diff --git a/__pycache__/config.cpython-312.pyc b/__pycache__/config.cpython-312.pyc index 281027a..6d38c54 100644 Binary files a/__pycache__/config.cpython-312.pyc and b/__pycache__/config.cpython-312.pyc differ diff --git a/__pycache__/extensions.cpython-312.pyc b/__pycache__/extensions.cpython-312.pyc index 4bf94c2..143ac3f 100644 Binary files a/__pycache__/extensions.cpython-312.pyc and b/__pycache__/extensions.cpython-312.pyc differ diff --git a/__pycache__/models.cpython-312.pyc b/__pycache__/models.cpython-312.pyc index 7e60e5a..3fca0f2 100644 Binary files a/__pycache__/models.cpython-312.pyc and b/__pycache__/models.cpython-312.pyc differ diff --git a/blueprints/__pycache__/admin.cpython-312.pyc b/blueprints/__pycache__/admin.cpython-312.pyc index 4455fa7..883a16b 100644 Binary files a/blueprints/__pycache__/admin.cpython-312.pyc and b/blueprints/__pycache__/admin.cpython-312.pyc differ diff --git a/blueprints/__pycache__/api.cpython-312.pyc b/blueprints/__pycache__/api.cpython-312.pyc index 9cab3a2..976e61f 100644 Binary files a/blueprints/__pycache__/api.cpython-312.pyc and b/blueprints/__pycache__/api.cpython-312.pyc differ diff --git a/blueprints/__pycache__/auth.cpython-312.pyc b/blueprints/__pycache__/auth.cpython-312.pyc index 321c4d0..2a5fea6 100644 Binary files a/blueprints/__pycache__/auth.cpython-312.pyc and b/blueprints/__pycache__/auth.cpython-312.pyc differ diff --git a/blueprints/__pycache__/payment.cpython-312.pyc b/blueprints/__pycache__/payment.cpython-312.pyc index 90581d1..cdc6df4 100644 Binary files a/blueprints/__pycache__/payment.cpython-312.pyc and b/blueprints/__pycache__/payment.cpython-312.pyc differ diff --git a/blueprints/payment.py b/blueprints/payment.py index 54a14c7..aaffe82 100644 --- a/blueprints/payment.py +++ b/blueprints/payment.py @@ -4,6 +4,9 @@ from models import Order, User from services.alipay_service import AlipayService import uuid from datetime import datetime +import logging + +logger = logging.getLogger(__name__) payment_bp = Blueprint('payment', __name__, url_prefix='/payment') @@ -60,52 +63,92 @@ def create_payment(): @payment_bp.route('/return') def payment_return(): """支付成功后的同步跳转页面""" - data = request.args.to_dict() - signature = data.pop("sign", None) - - if not signature: - return "参数错误", 400 + try: + logger.info(f"收到支付宝同步回调,参数: {dict(request.args)}") - alipay_service = AlipayService() - success = alipay_service.verify_notify(data, signature) - - out_trade_no = data.get('out_trade_no') - order = Order.query.filter_by(out_trade_no=out_trade_no).first() - - if success: - # 同步通知仅用于页面展示,实际业务逻辑在异步通知 notify 中处理 - return render_template('buy.html', success=True, order=order) - else: - return "支付验证失败", 400 + data = request.args.to_dict() + signature = data.get("sign") + + if not signature: + logger.error("同步回调缺少签名参数") + return "参数错误:缺少签名", 400 + + # 验证签名前,先记录关键信息 + logger.info(f"验证订单号: {data.get('out_trade_no')}") + logger.info(f"支付宝交易号: {data.get('trade_no')}") + logger.info(f"支付金额: {data.get('total_amount')}") + + # 从数据中移除sign参数以进行验证 + verify_data = data.copy() + verify_data.pop('sign', None) + verify_data.pop('sign_type', None) # 也要移除sign_type + + alipay_service = AlipayService() + success = alipay_service.verify_notify(verify_data, signature) + + out_trade_no = data.get('out_trade_no') + order = Order.query.filter_by(out_trade_no=out_trade_no).first() + + if success: + logger.info(f"同步回调验证成功,订单号: {out_trade_no}") + # 同步通知仅用于页面展示,实际业务逻辑在异步通知 notify 中处理 + return render_template('buy.html', success=True, order=order) + else: + logger.error(f"同步回调验证失败,订单号: {out_trade_no}") + return "支付验证失败", 400 + + except Exception as e: + logger.error(f"处理同步回调时发生异常: {str(e)}", exc_info=True) + return f"处理支付回调失败: {str(e)}", 500 @payment_bp.route('/notify', methods=['POST']) def payment_notify(): """支付宝异步通知""" - data = request.form.to_dict() - signature = data.pop("sign", None) - - if not signature: + try: + logger.info(f"收到支付宝异步通知,参数: {request.form.to_dict()}") + + data = request.form.to_dict() + signature = data.get("sign") # 不要pop,保留原始数据 + + if not signature: + logger.error("异步通知缺少签名参数") + return "fail" + + alipay_service = AlipayService() + success = alipay_service.verify_notify(data, signature) + + if success and data.get('trade_status') in ['TRADE_SUCCESS', 'TRADE_FINISHED']: + out_trade_no = data.get('out_trade_no') + trade_no = data.get('trade_no') + + logger.info(f"异步通知验证成功,订单号: {out_trade_no}, 支付宝交易号: {trade_no}") + + order = Order.query.filter_by(out_trade_no=out_trade_no).first() + if order and order.status == 'PENDING': + order.status = 'PAID' + order.trade_no = trade_no + order.paid_at = datetime.utcnow() + + # 给用户加积分 + user = User.query.get(order.user_id) + if user: + user.points += order.points + logger.info(f"用户 {user.id} 充值 {order.points} 积分") + + db.session.commit() + logger.info(f"订单 {out_trade_no} 处理成功") + return "success" + elif order: + logger.warning(f"订单 {out_trade_no} 状态为 {order.status},跳过处理") + return "success" # 已处理过的订单也返回success + else: + logger.error(f"未找到订单: {out_trade_no}") + return "fail" + else: + logger.error(f"异步通知验证失败或交易状态异常: {data.get('trade_status')}") + return "fail" + + except Exception as e: + logger.error(f"处理异步通知时发生异常: {str(e)}", exc_info=True) + db.session.rollback() return "fail" - - alipay_service = AlipayService() - success = alipay_service.verify_notify(data, signature) - - if success and data.get('trade_status') in ['TRADE_SUCCESS', 'TRADE_FINISHED']: - out_trade_no = data.get('out_trade_no') - trade_no = data.get('trade_no') - - order = Order.query.filter_by(out_trade_no=out_trade_no).first() - if order and order.status == 'PENDING': - order.status = 'PAID' - order.trade_no = trade_no - order.paid_at = datetime.utcnow() - - # 给用户加积分 - user = User.query.get(order.user_id) - if user: - user.points += order.points - - db.session.commit() - return "success" - - return "fail" diff --git a/check_alipay_config.py b/check_alipay_config.py new file mode 100644 index 0000000..0b4a778 --- /dev/null +++ b/check_alipay_config.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +支付宝配置检查脚本 +用于检查支付宝配置是否正确,不依赖虚拟环境 +""" + +import sys +import os +sys.path.append(os.path.dirname(os.path.abspath(__file__))) + +def check_alipay_config(): + """检查支付宝配置""" + print("="*60) + print("支付宝配置检查") + print("="*60) + + # 读取配置文件内容 + config_path = os.path.join(os.path.dirname(__file__), 'config.py') + + if not os.path.exists(config_path): + print("错误: config.py 文件不存在") + return False + + with open(config_path, 'r', encoding='utf-8') as f: + config_content = f.read() + + # 检查支付宝相关配置 + required_configs = [ + 'ALIPAY_APP_ID', + 'ALIPAY_APP_PRIVATE_KEY', + 'ALIPAY_PUBLIC_KEY', + 'ALIPAY_RETURN_URL', + 'ALIPAY_NOTIFY_URL', + 'ALIPAY_DEBUG' + ] + + print("检查必需的配置项:") + for config in required_configs: + if config in config_content: + print(f" ✓ {config}: 存在") + else: + print(f" ✗ {config}: 缺失") + + # 提取并显示配置值(部分显示以保护安全) + import re + + # 检查密钥格式 + private_key_match = re.search(r'ALIPAY_APP_PRIVATE_KEY\s*=\s*"""(.*?)"""', config_content, re.DOTALL) + public_key_match = re.search(r'ALIPAY_PUBLIC_KEY\s*=\s*"""(.*?)"""', config_content, re.DOTALL) + + if private_key_match: + private_key = private_key_match.group(1) + if "BEGIN RSA PRIVATE KEY" in private_key and "END RSA PRIVATE KEY" in private_key: + print(" ✓ 应用私钥格式正确") + else: + print(" ✗ 应用私钥格式错误") + else: + print(" ✗ 未找到应用私钥") + + if public_key_match: + public_key = public_key_match.group(1) + if "BEGIN PUBLIC KEY" in public_key and "END PUBLIC KEY" in public_key: + print(" ✓ 支付宝公钥格式正确") + else: + print(" ✗ 支付宝公钥格式错误") + else: + print(" ✗ 未找到支付宝公钥") + + # 检查调试模式 + debug_match = re.search(r'ALIPAY_DEBUG\s*=\s*(True|False)', config_content) + if debug_match: + debug_mode = debug_match.group(1) + print(f" ✓ 调试模式: {debug_mode}") + if debug_mode == "False": + print(" 注意: 当前为正式环境模式,确保使用正式环境的密钥") + else: + print(" 提示: 当前为沙箱环境模式,适用于测试") + + print("\n" + "="*60) + print("常见问题排查") + print("="*60) + print(""" +1. 密钥配置问题: + - 确保应用私钥和支付宝公钥格式正确 + - 检查BEGIN/END标签是否完整 + - 确认沙箱/正式环境配置一致 + +2. 回调地址问题: + - 确保ALIPAY_RETURN_URL和ALIPAY_NOTIFY_URL可以公网访问 + - 检查URL是否拼写正确 + +3. 签名算法: + - SDK默认使用RSA2算法 + - 确保支付宝开放平台应用设置中也是RSA2 + +4. 对于同步回调400错误: + - 主要是签名验证失败 + - 需要正确处理sign和sign_type参数 + - 确保使用正确的公钥验证 + +5. 环境问题: + - 如果使用沙箱环境,确保使用沙箱的AppID和密钥 + - 确保服务器时间准确 + """) + + return True + +def analyze_callback_request(): + """分析提供的回调请求""" + print("\n" + "="*60) + print("回调请求分析") + print("="*60) + + sample_request = "GET /payment/return?charset=utf-8&out_trade_no=20260114192935cfcf96&method=alipay.trade.page.pay.return&total_amount=5.00&sign=ksH8Nov1SA9U4fgovUXv%2BXxmZccCDaqVhPmm%2BAPlGL8QgMYWDN7NSqDQTDoVshe2agHT11rNuVEXuApE3lVOnBPPbUvUlyMdaWpx/0GlFBRS0tezfdUcCQsShOTj4YdKwa2K0bfoqQeStupG0LFVipsWiga9WIryFU5JWDK3lDOuaVLiw2gLFMemsz/Xg14UPQMWcmlyXVGYzeLYvNmVRbQQjnJL8m%2BFOq5tqMgopEtZmAC4wstIwm7n1kOrV%2Bs/HBxMeQqWOTtFEbDkzbU8o%2BhS5%2BavQm5BUvFTmjbsVs6Npo5qmmTkI8dRvqRO1HzqSv6ymL8%2BpiguKEBmaFaBeg%3D%3D&trade_no=2026011422001420011429097835&auth_app_id=2021006126656681&version=1.0&app_id=2021006126656681&sign_type=RSA2&seller_id=2088802857784696×tamp=2026-01-14+19:30:21" + + print("从您提供的请求中,我们可以看到:") + print("- 订单号: 20260114192935cfcf96") + print("- 支付金额: 5.00") + print("- 支付宝交易号: 2026011422001420011429097835") + print("- 签名类型: RSA2") + print("- 时间戳: 2026-01-14 19:30:21") + print("\n这些参数应该被正确传递给验证函数") + + print("\n" + "="*60) + print("解决方案") + print("="*60) + print(""" +1. 确保签名验证时: + - 从参数中移除 'sign' 和 'sign_type' 字段 + - 使用剩余参数进行签名验证 + - 使用正确的支付宝公钥 + +2. 检查配置: + - 确认使用的AppID与请求中的app_id一致 + - 确认密钥对正确配对 + +3. 日志查看: + - 运行应用后,进行一次支付测试 + - 查看 logs/system.log 中的详细错误信息 + - 检查具体的验证失败原因 + +4. 沙箱测试: + - 如果使用沙箱环境,请确保配置正确 + - 使用沙箱提供的测试账号进行支付 + """) + +if __name__ == "__main__": + print("支付宝配置检查工具\n") + + check_alipay_config() + analyze_callback_request() + + print("\n" + "="*60) + print("下一步操作建议") + print("="*60) + print(""" +1. 运行应用: python app.py +2. 进行支付测试 +3. 检查日志文件 logs/system.log +4. 查看具体的错误信息 +5. 根据错误信息进行相应修复 + """) \ No newline at end of file diff --git a/middlewares/__pycache__/auth.cpython-312.pyc b/middlewares/__pycache__/auth.cpython-312.pyc index 9e51700..60baaeb 100644 Binary files a/middlewares/__pycache__/auth.cpython-312.pyc and b/middlewares/__pycache__/auth.cpython-312.pyc differ diff --git a/services/__pycache__/alipay_service.cpython-312.pyc b/services/__pycache__/alipay_service.cpython-312.pyc index 9519a14..697c3d5 100644 Binary files a/services/__pycache__/alipay_service.cpython-312.pyc and b/services/__pycache__/alipay_service.cpython-312.pyc differ diff --git a/services/__pycache__/logger.cpython-312.pyc b/services/__pycache__/logger.cpython-312.pyc index e522000..7268fff 100644 Binary files a/services/__pycache__/logger.cpython-312.pyc and b/services/__pycache__/logger.cpython-312.pyc differ diff --git a/services/__pycache__/sms_service.cpython-312.pyc b/services/__pycache__/sms_service.cpython-312.pyc index 6e63e62..7d5fa63 100644 Binary files a/services/__pycache__/sms_service.cpython-312.pyc and b/services/__pycache__/sms_service.cpython-312.pyc differ diff --git a/services/alipay_service.py b/services/alipay_service.py index afdc000..5da5710 100644 --- a/services/alipay_service.py +++ b/services/alipay_service.py @@ -1,6 +1,9 @@ from alipay import AliPay from flask import current_app import os +import logging + +logger = logging.getLogger(__name__) class AlipayService: def __init__(self): @@ -38,5 +41,41 @@ class AlipayService: def verify_notify(self, data, signature): """验证通知签名""" - alipay = self.get_alipay_client() - return alipay.verify(data, signature) + try: + logger.info(f"开始验证支付宝签名,数据: {data}") + logger.info(f"签名值: {signature}") + + # 验证必要参数 + if not signature: + logger.error("签名为空") + return False + + if not data: + logger.error("数据为空") + return False + + # 创建数据副本,避免修改原数据 + verify_data = data.copy() + + # 移除sign和sign_type,这两个字段不参与验证 + verify_data.pop('sign', None) + verify_data.pop('sign_type', None) + + alipay = self.get_alipay_client() + result = alipay.verify(verify_data, signature) + + if result: + logger.info("签名验证成功") + else: + logger.error("签名验证失败") + logger.error(f"验证数据: {verify_data}") + logger.error(f"公钥配置: {self.public_key[:50]}...") + # 额外调试信息 + logger.error(f"App ID: {self.app_id}") + logger.error(f"是否调试模式: {self.debug}") + + return result + + except Exception as e: + logger.error(f"验证签名时发生异常: {str(e)}", exc_info=True) + return False diff --git a/test_alipay_verify.py b/test_alipay_verify.py new file mode 100644 index 0000000..07d8ae8 --- /dev/null +++ b/test_alipay_verify.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +支付宝签名验证测试脚本 +用于诊断支付宝回调验证失败的问题 +""" + +from alipay import AliPay +from config import Config +import logging + +# 配置日志 +logging.basicConfig( + level=logging.DEBUG, + format='[%(asctime)s] %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + +def test_alipay_config(): + """测试支付宝配置是否正确""" + print("="*60) + print("支付宝配置信息检查") + print("="*60) + + config = Config() + + print(f"AppID: {config.ALIPAY_APP_ID}") + print(f"调试模式: {config.ALIPAY_DEBUG}") + print(f"回调地址: {config.ALIPAY_RETURN_URL}") + print(f"异步通知地址: {config.ALIPAY_NOTIFY_URL}") + print(f"应用私钥长度: {len(config.ALIPAY_APP_PRIVATE_KEY)}") + print(f"支付宝公钥长度: {len(config.ALIPAY_PUBLIC_KEY)}") + + # 检查密钥格式 + if "BEGIN RSA PRIVATE KEY" in config.ALIPAY_APP_PRIVATE_KEY: + print("✓ 应用私钥格式正确") + else: + print("✗ 应用私钥格式错误") + + if "BEGIN PUBLIC KEY" in config.ALIPAY_PUBLIC_KEY: + print("✓ 支付宝公钥格式正确") + else: + print("✗ 支付宝公钥格式错误") + + print("\n") + +def test_alipay_client(): + """测试支付宝客户端初始化""" + print("="*60) + print("支付宝客户端初始化测试") + print("="*60) + + config = Config() + + try: + alipay = AliPay( + appid=config.ALIPAY_APP_ID, + app_notify_url=config.ALIPAY_NOTIFY_URL, + app_private_key_string=config.ALIPAY_APP_PRIVATE_KEY, + alipay_public_key_string=config.ALIPAY_PUBLIC_KEY, + sign_type="RSA2", + debug=config.ALIPAY_DEBUG + ) + print("✓ 支付宝客户端初始化成功") + return alipay + except Exception as e: + print(f"✗ 支付宝客户端初始化失败: {str(e)}") + logger.error("客户端初始化失败", exc_info=True) + return None + +def test_signature_verification(): + """测试签名验证功能""" + print("\n") + print("="*60) + print("签名验证功能测试") + print("="*60) + + alipay = test_alipay_client() + if not alipay: + return + + # 模拟支付宝回调数据(这是一个示例,需要替换为真实的回调数据) + test_data = { + "gmt_create": "2024-01-01 00:00:00", + "charset": "utf-8", + "seller_email": "test@example.com", + "subject": "测试订单", + "sign_type": "RSA2", + "buyer_id": "2088000000000000", + "invoice_amount": "10.00", + "notify_id": "test_notify_id", + "fund_bill_list": "[{\"amount\":\"10.00\",\"fundChannel\":\"ALIPAYACCOUNT\"}]", + "notify_type": "trade_status_sync", + "trade_status": "TRADE_SUCCESS", + "receipt_amount": "10.00", + "buyer_pay_amount": "10.00", + "app_id": Config().ALIPAY_APP_ID, + "notify_time": "2024-01-01 00:00:00", + "point_amount": "0.00", + "total_amount": "10.00", + "seller_id": "2088000000000000", + "trade_no": "2024010100000000000", + "auth_app_id": Config().ALIPAY_APP_ID, + "buyer_logon_id": "test***@example.com", + "out_trade_no": "test_order_123456" + } + + # 注意: 这里无法进行真实的签名验证测试,因为需要真实的签名值 + print("提示: 需要真实的支付宝回调数据才能进行完整测试") + print("请在日志中查看实际回调时的数据和签名") + print("\n") + +def print_troubleshooting_guide(): + """打印故障排查指南""" + print("="*60) + print("支付宝验证失败常见问题排查") + print("="*60) + print(""" +1. 密钥配置问题: + - 确认应用私钥和支付宝公钥是否匹配 + - 检查密钥格式是否包含完整的BEGIN/END标记 + - 确认沙箱环境/正式环境的配置一致性 + +2. 签名验证问题: + - 确认签名类型为RSA2 + - 检查是否正确移除了sign和sign_type参数 + - 验证参数顺序和编码格式 + +3. 环境配置问题: + - ALIPAY_DEBUG设置: True=沙箱环境, False=正式环境 + - 回调地址必须是外网可访问的地址 + - 检查防火墙/端口是否开放 + +4. 调试建议: + - 开启详细日志,查看完整的回调参数 + - 使用支付宝开放平台的"验签工具"验证签名 + - 检查服务器时间是否准确 + - 确认订单金额格式正确(字符串类型,保留两位小数) + +5. 沙箱环境测试: + - 确保使用沙箱环境的AppID和密钥 + - 使用沙箱账号进行测试 + - 沙箱环境域名: https://openapi-sandbox.dl.alipaydev.com/gateway.do + +6. 查看日志: + - 检查 logs/system.log 文件 + - 查看控制台输出的详细错误信息 + - 分析支付宝返回的错误码和错误信息 +""") + print("="*60) + +if __name__ == "__main__": + print("\n支付宝签名验证诊断工具\n") + + # 运行测试 + test_alipay_config() + test_alipay_client() + test_signature_verification() + print_troubleshooting_guide() + + print("\n测试完成!") + print("如果仍然有问题,请:") + print("1. 运行实际支付流程并查看日志") + print("2. 复制日志中的回调数据和签名") + print("3. 使用支付宝官方验签工具验证\n") diff --git a/venv/Include/site/python3.12/greenlet/greenlet.h b/venv/Include/site/python3.12/greenlet/greenlet.h deleted file mode 100644 index d02a16e..0000000 --- a/venv/Include/site/python3.12/greenlet/greenlet.h +++ /dev/null @@ -1,164 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ - -/* Greenlet object interface */ - -#ifndef Py_GREENLETOBJECT_H -#define Py_GREENLETOBJECT_H - - -#include - -#ifdef __cplusplus -extern "C" { -#endif - -/* This is deprecated and undocumented. It does not change. */ -#define GREENLET_VERSION "1.0.0" - -#ifndef GREENLET_MODULE -#define implementation_ptr_t void* -#endif - -typedef struct _greenlet { - PyObject_HEAD - PyObject* weakreflist; - PyObject* dict; - implementation_ptr_t pimpl; -} PyGreenlet; - -#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) - - -/* C API functions */ - -/* Total number of symbols that are exported */ -#define PyGreenlet_API_pointers 12 - -#define PyGreenlet_Type_NUM 0 -#define PyExc_GreenletError_NUM 1 -#define PyExc_GreenletExit_NUM 2 - -#define PyGreenlet_New_NUM 3 -#define PyGreenlet_GetCurrent_NUM 4 -#define PyGreenlet_Throw_NUM 5 -#define PyGreenlet_Switch_NUM 6 -#define PyGreenlet_SetParent_NUM 7 - -#define PyGreenlet_MAIN_NUM 8 -#define PyGreenlet_STARTED_NUM 9 -#define PyGreenlet_ACTIVE_NUM 10 -#define PyGreenlet_GET_PARENT_NUM 11 - -#ifndef GREENLET_MODULE -/* This section is used by modules that uses the greenlet C API */ -static void** _PyGreenlet_API = NULL; - -# define PyGreenlet_Type \ - (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) - -# define PyExc_GreenletError \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) - -# define PyExc_GreenletExit \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) - -/* - * PyGreenlet_New(PyObject *args) - * - * greenlet.greenlet(run, parent=None) - */ -# define PyGreenlet_New \ - (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ - _PyGreenlet_API[PyGreenlet_New_NUM]) - -/* - * PyGreenlet_GetCurrent(void) - * - * greenlet.getcurrent() - */ -# define PyGreenlet_GetCurrent \ - (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) - -/* - * PyGreenlet_Throw( - * PyGreenlet *greenlet, - * PyObject *typ, - * PyObject *val, - * PyObject *tb) - * - * g.throw(...) - */ -# define PyGreenlet_Throw \ - (*(PyObject * (*)(PyGreenlet * self, \ - PyObject * typ, \ - PyObject * val, \ - PyObject * tb)) \ - _PyGreenlet_API[PyGreenlet_Throw_NUM]) - -/* - * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) - * - * g.switch(*args, **kwargs) - */ -# define PyGreenlet_Switch \ - (*(PyObject * \ - (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ - _PyGreenlet_API[PyGreenlet_Switch_NUM]) - -/* - * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) - * - * g.parent = new_parent - */ -# define PyGreenlet_SetParent \ - (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ - _PyGreenlet_API[PyGreenlet_SetParent_NUM]) - -/* - * PyGreenlet_GetParent(PyObject* greenlet) - * - * return greenlet.parent; - * - * This could return NULL even if there is no exception active. - * If it does not return NULL, you are responsible for decrementing the - * reference count. - */ -# define PyGreenlet_GetParent \ - (*(PyGreenlet* (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) - -/* - * deprecated, undocumented alias. - */ -# define PyGreenlet_GET_PARENT PyGreenlet_GetParent - -# define PyGreenlet_MAIN \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_MAIN_NUM]) - -# define PyGreenlet_STARTED \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_STARTED_NUM]) - -# define PyGreenlet_ACTIVE \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) - - - - -/* Macro that imports greenlet and initializes C API */ -/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we - keep the older definition to be sure older code that might have a copy of - the header still works. */ -# define PyGreenlet_Import() \ - { \ - _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ - } - -#endif /* GREENLET_MODULE */ - -#ifdef __cplusplus -} -#endif -#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/AES.py b/venv/Lib/site-packages/Cryptodome/Cipher/AES.py deleted file mode 100644 index 42f4bab..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/AES.py +++ /dev/null @@ -1,235 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/AES.py : AES -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import sys - -from Cryptodome.Cipher import _create_cipher -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - c_size_t, c_uint8_ptr) - -from Cryptodome.Util import _cpu_features -from Cryptodome.Random import get_random_bytes - -MODE_ECB = 1 #: Electronic Code Book (:ref:`ecb_mode`) -MODE_CBC = 2 #: Cipher-Block Chaining (:ref:`cbc_mode`) -MODE_CFB = 3 #: Cipher Feedback (:ref:`cfb_mode`) -MODE_OFB = 5 #: Output Feedback (:ref:`ofb_mode`) -MODE_CTR = 6 #: Counter mode (:ref:`ctr_mode`) -MODE_OPENPGP = 7 #: OpenPGP mode (:ref:`openpgp_mode`) -MODE_CCM = 8 #: Counter with CBC-MAC (:ref:`ccm_mode`) -MODE_EAX = 9 #: :ref:`eax_mode` -MODE_SIV = 10 #: Synthetic Initialization Vector (:ref:`siv_mode`) -MODE_GCM = 11 #: Galois Counter Mode (:ref:`gcm_mode`) -MODE_OCB = 12 #: Offset Code Book (:ref:`ocb_mode`) -MODE_KW = 13 #: Key Wrap (:ref:`kw_mode`) -MODE_KWP = 14 #: Key Wrap with Padding (:ref:`kwp_mode`) - -_cproto = """ - int AES_start_operation(const uint8_t key[], - size_t key_len, - void **pResult); - int AES_encrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int AES_decrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int AES_stop_operation(void *state); - """ - - -# Load portable AES -_raw_aes_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_aes", - _cproto) - -# Try to load AES with AES NI instructions -try: - _raw_aesni_lib = None - if _cpu_features.have_aes_ni(): - _raw_aesni_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_aesni", - _cproto.replace("AES", - "AESNI")) -# _raw_aesni may not have been compiled in -except OSError: - pass - - -def _create_base_cipher(dict_parameters): - """This method instantiates and returns a handle to a low-level - base cipher. It will absorb named parameters in the process.""" - - use_aesni = dict_parameters.pop("use_aesni", True) - - try: - key = dict_parameters.pop("key") - except KeyError: - raise TypeError("Missing 'key' parameter") - - if len(key) not in key_size: - raise ValueError("Incorrect AES key length (%d bytes)" % len(key)) - - if use_aesni and _raw_aesni_lib: - start_operation = _raw_aesni_lib.AESNI_start_operation - stop_operation = _raw_aesni_lib.AESNI_stop_operation - else: - start_operation = _raw_aes_lib.AES_start_operation - stop_operation = _raw_aes_lib.AES_stop_operation - - cipher = VoidPointer() - result = start_operation(c_uint8_ptr(key), - c_size_t(len(key)), - cipher.address_of()) - if result: - raise ValueError("Error %X while instantiating the AES cipher" - % result) - return SmartPointer(cipher.get(), stop_operation) - - -def _derive_Poly1305_key_pair(key, nonce): - """Derive a tuple (r, s, nonce) for a Poly1305 MAC. - - If nonce is ``None``, a new 16-byte nonce is generated. - """ - - if len(key) != 32: - raise ValueError("Poly1305 with AES requires a 32-byte key") - - if nonce is None: - nonce = get_random_bytes(16) - elif len(nonce) != 16: - raise ValueError("Poly1305 with AES requires a 16-byte nonce") - - s = new(key[:16], MODE_ECB).encrypt(nonce) - return key[16:], s, nonce - - -def new(key, mode, *args, **kwargs): - """Create a new AES cipher. - - Args: - key(bytes/bytearray/memoryview): - The secret key to use in the symmetric cipher. - - It must be 16 (*AES-128)*, 24 (*AES-192*) or 32 (*AES-256*) bytes long. - - For ``MODE_SIV`` only, it doubles to 32, 48, or 64 bytes. - mode (a ``MODE_*`` constant): - The chaining mode to use for encryption or decryption. - If in doubt, use ``MODE_EAX``. - - Keyword Args: - iv (bytes/bytearray/memoryview): - (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, - and ``MODE_OPENPGP`` modes). - - The initialization vector to use for encryption or decryption. - - For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 16 bytes long. - - For ``MODE_OPENPGP`` mode only, - it must be 16 bytes long for encryption - and 18 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - - If not provided, a random byte string is generated (you must then - read its value with the :attr:`iv` attribute). - - nonce (bytes/bytearray/memoryview): - (Only applicable for ``MODE_CCM``, ``MODE_EAX``, ``MODE_GCM``, - ``MODE_SIV``, ``MODE_OCB``, and ``MODE_CTR``). - - A value that must never be reused for any other encryption done - with this key (except possibly for ``MODE_SIV``, see below). - - For ``MODE_EAX``, ``MODE_GCM`` and ``MODE_SIV`` there are no - restrictions on its length (recommended: **16** bytes). - - For ``MODE_CCM``, its length must be in the range **[7..13]**. - Bear in mind that with CCM there is a trade-off between nonce - length and maximum message size. Recommendation: **11** bytes. - - For ``MODE_OCB``, its length must be in the range **[1..15]** - (recommended: **15**). - - For ``MODE_CTR``, its length must be in the range **[0..15]** - (recommended: **8**). - - For ``MODE_SIV``, the nonce is optional, if it is not specified, - then no nonce is being used, which renders the encryption - deterministic. - - If not provided, for modes other than ``MODE_SIV``, a random - byte string of the recommended length is used (you must then - read its value with the :attr:`nonce` attribute). - - segment_size (integer): - (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext - are segmented in. It must be a multiple of 8. - If not specified, it will be assumed to be 8. - - mac_len (integer): - (Only ``MODE_EAX``, ``MODE_GCM``, ``MODE_OCB``, ``MODE_CCM``) - Length of the authentication tag, in bytes. - - It must be even and in the range **[4..16]**. - The recommended value (and the default, if not specified) is **16**. - - msg_len (integer): - (Only ``MODE_CCM``). Length of the message to (de)cipher. - If not specified, ``encrypt`` must be called with the entire message. - Similarly, ``decrypt`` can only be called once. - - assoc_len (integer): - (Only ``MODE_CCM``). Length of the associated data. - If not specified, all associated data is buffered internally, - which may represent a problem for very large messages. - - initial_value (integer or bytes/bytearray/memoryview): - (Only ``MODE_CTR``). - The initial value for the counter. If not present, the cipher will - start counting from 0. The value is incremented by one for each block. - The counter number is encoded in big endian mode. - - counter (object): - (Only ``MODE_CTR``). - Instance of ``Cryptodome.Util.Counter``, which allows full customization - of the counter block. This parameter is incompatible to both ``nonce`` - and ``initial_value``. - - use_aesni: (boolean): - Use Intel AES-NI hardware extensions (default: use if available). - - Returns: - an AES object, of the applicable mode. - """ - - kwargs["add_aes_modes"] = True - return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) - - -# Size of a data block (in bytes) -block_size = 16 -# Size of a key (in bytes) -key_size = (16, 24, 32) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/AES.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/AES.pyi deleted file mode 100644 index 3f07b65..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/AES.pyi +++ /dev/null @@ -1,156 +0,0 @@ -from typing import Dict, Optional, Tuple, Union, overload -from typing_extensions import Literal - -Buffer=bytes|bytearray|memoryview - -from Cryptodome.Cipher._mode_ecb import EcbMode -from Cryptodome.Cipher._mode_cbc import CbcMode -from Cryptodome.Cipher._mode_cfb import CfbMode -from Cryptodome.Cipher._mode_ofb import OfbMode -from Cryptodome.Cipher._mode_ctr import CtrMode -from Cryptodome.Cipher._mode_openpgp import OpenPgpMode -from Cryptodome.Cipher._mode_ccm import CcmMode -from Cryptodome.Cipher._mode_eax import EaxMode -from Cryptodome.Cipher._mode_gcm import GcmMode -from Cryptodome.Cipher._mode_siv import SivMode -from Cryptodome.Cipher._mode_ocb import OcbMode - -MODE_ECB: Literal[1] -MODE_CBC: Literal[2] -MODE_CFB: Literal[3] -MODE_OFB: Literal[5] -MODE_CTR: Literal[6] -MODE_OPENPGP: Literal[7] -MODE_CCM: Literal[8] -MODE_EAX: Literal[9] -MODE_SIV: Literal[10] -MODE_GCM: Literal[11] -MODE_OCB: Literal[12] - -# MODE_ECB -@overload -def new(key: Buffer, - mode: Literal[1], - use_aesni : bool = ...) -> \ - EcbMode: ... - -# MODE_CBC -@overload -def new(key: Buffer, - mode: Literal[2], - iv : Optional[Buffer] = ..., - use_aesni : bool = ...) -> \ - CbcMode: ... - -@overload -def new(key: Buffer, - mode: Literal[2], - IV : Optional[Buffer] = ..., - use_aesni : bool = ...) -> \ - CbcMode: ... - -# MODE_CFB -@overload -def new(key: Buffer, - mode: Literal[3], - iv : Optional[Buffer] = ..., - segment_size : int = ..., - use_aesni : bool = ...) -> \ - CfbMode: ... - -@overload -def new(key: Buffer, - mode: Literal[3], - IV : Optional[Buffer] = ..., - segment_size : int = ..., - use_aesni : bool = ...) -> \ - CfbMode: ... - -# MODE_OFB -@overload -def new(key: Buffer, - mode: Literal[5], - iv : Optional[Buffer] = ..., - use_aesni : bool = ...) -> \ - OfbMode: ... - -@overload -def new(key: Buffer, - mode: Literal[5], - IV : Optional[Buffer] = ..., - use_aesni : bool = ...) -> \ - OfbMode: ... - -# MODE_CTR -@overload -def new(key: Buffer, - mode: Literal[6], - nonce : Optional[Buffer] = ..., - initial_value : Union[int, Buffer] = ..., - counter : Dict = ..., - use_aesni : bool = ...) -> \ - CtrMode: ... - -# MODE_OPENPGP -@overload -def new(key: Buffer, - mode: Literal[7], - iv : Optional[Buffer] = ..., - use_aesni : bool = ...) -> \ - OpenPgpMode: ... - -@overload -def new(key: Buffer, - mode: Literal[7], - IV : Optional[Buffer] = ..., - use_aesni : bool = ...) -> \ - OpenPgpMode: ... - -# MODE_CCM -@overload -def new(key: Buffer, - mode: Literal[8], - nonce : Optional[Buffer] = ..., - mac_len : int = ..., - assoc_len : int = ..., - use_aesni : bool = ...) -> \ - CcmMode: ... - -# MODE_EAX -@overload -def new(key: Buffer, - mode: Literal[9], - nonce : Optional[Buffer] = ..., - mac_len : int = ..., - use_aesni : bool = ...) -> \ - EaxMode: ... - -# MODE_GCM -@overload -def new(key: Buffer, - mode: Literal[10], - nonce : Optional[Buffer] = ..., - use_aesni : bool = ...) -> \ - SivMode: ... - -# MODE_SIV -@overload -def new(key: Buffer, - mode: Literal[11], - nonce : Optional[Buffer] = ..., - mac_len : int = ..., - use_aesni : bool = ...) -> \ - GcmMode: ... - -# MODE_OCB -@overload -def new(key: Buffer, - mode: Literal[12], - nonce : Optional[Buffer] = ..., - mac_len : int = ..., - use_aesni : bool = ...) -> \ - OcbMode: ... - - -block_size: int -key_size: Tuple[int, int, int] diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ARC2.py b/venv/Lib/site-packages/Cryptodome/Cipher/ARC2.py deleted file mode 100644 index 4dc1bb8..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ARC2.py +++ /dev/null @@ -1,175 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/ARC2.py : ARC2.py -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -""" -Module's constants for the modes of operation supported with ARC2: - -:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` -:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` -:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` -:var MODE_OFB: :ref:`Output FeedBack (OFB) ` -:var MODE_CTR: :ref:`CounTer Mode (CTR) ` -:var MODE_OPENPGP: :ref:`OpenPGP Mode ` -:var MODE_EAX: :ref:`EAX Mode ` -""" - -import sys - -from Cryptodome.Cipher import _create_cipher -from Cryptodome.Util.py3compat import byte_string -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - c_size_t, c_uint8_ptr) - -_raw_arc2_lib = load_pycryptodome_raw_lib( - "Cryptodome.Cipher._raw_arc2", - """ - int ARC2_start_operation(const uint8_t key[], - size_t key_len, - size_t effective_key_len, - void **pResult); - int ARC2_encrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int ARC2_decrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int ARC2_stop_operation(void *state); - """ - ) - - -def _create_base_cipher(dict_parameters): - """This method instantiates and returns a handle to a low-level - base cipher. It will absorb named parameters in the process.""" - - try: - key = dict_parameters.pop("key") - except KeyError: - raise TypeError("Missing 'key' parameter") - - effective_keylen = dict_parameters.pop("effective_keylen", 1024) - - if len(key) not in key_size: - raise ValueError("Incorrect ARC2 key length (%d bytes)" % len(key)) - - if not (40 <= effective_keylen <= 1024): - raise ValueError("'effective_key_len' must be at least 40 and no larger than 1024 " - "(not %d)" % effective_keylen) - - start_operation = _raw_arc2_lib.ARC2_start_operation - stop_operation = _raw_arc2_lib.ARC2_stop_operation - - cipher = VoidPointer() - result = start_operation(c_uint8_ptr(key), - c_size_t(len(key)), - c_size_t(effective_keylen), - cipher.address_of()) - if result: - raise ValueError("Error %X while instantiating the ARC2 cipher" - % result) - - return SmartPointer(cipher.get(), stop_operation) - - -def new(key, mode, *args, **kwargs): - """Create a new RC2 cipher. - - :param key: - The secret key to use in the symmetric cipher. - Its length can vary from 5 to 128 bytes; the actual search space - (and the cipher strength) can be reduced with the ``effective_keylen`` parameter. - :type key: bytes, bytearray, memoryview - - :param mode: - The chaining mode to use for encryption or decryption. - :type mode: One of the supported ``MODE_*`` constants - - :Keyword Arguments: - * **iv** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, - and ``MODE_OPENPGP`` modes). - - The initialization vector to use for encryption or decryption. - - For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. - - For ``MODE_OPENPGP`` mode only, - it must be 8 bytes long for encryption - and 10 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - - If not provided, a random byte string is generated (you must then - read its value with the :attr:`iv` attribute). - - * **nonce** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). - - A value that must never be reused for any other encryption done - with this key. - - For ``MODE_EAX`` there are no - restrictions on its length (recommended: **16** bytes). - - For ``MODE_CTR``, its length must be in the range **[0..7]**. - - If not provided for ``MODE_EAX``, a random byte string is generated (you - can read it back via the ``nonce`` attribute). - - * **effective_keylen** (*integer*) -- - Optional. Maximum strength in bits of the actual key used by the ARC2 algorithm. - If the supplied ``key`` parameter is longer (in bits) of the value specified - here, it will be weakened to match it. - If not specified, no limitation is applied. - - * **segment_size** (*integer*) -- - (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext - are segmented in. It must be a multiple of 8. - If not specified, it will be assumed to be 8. - - * **mac_len** : (*integer*) -- - (Only ``MODE_EAX``) - Length of the authentication tag, in bytes. - It must be no longer than 8 (default). - - * **initial_value** : (*integer*) -- - (Only ``MODE_CTR``). The initial value for the counter within - the counter block. By default it is **0**. - - :Return: an ARC2 object, of the applicable mode. - """ - - return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) - -MODE_ECB = 1 -MODE_CBC = 2 -MODE_CFB = 3 -MODE_OFB = 5 -MODE_CTR = 6 -MODE_OPENPGP = 7 -MODE_EAX = 9 - -# Size of a data block (in bytes) -block_size = 8 -# Size of a key (in bytes) -key_size = range(5, 128 + 1) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ARC2.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/ARC2.pyi deleted file mode 100644 index a122a52..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ARC2.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Union, Dict, Iterable, Optional - -Buffer = bytes|bytearray|memoryview - -from Cryptodome.Cipher._mode_ecb import EcbMode -from Cryptodome.Cipher._mode_cbc import CbcMode -from Cryptodome.Cipher._mode_cfb import CfbMode -from Cryptodome.Cipher._mode_ofb import OfbMode -from Cryptodome.Cipher._mode_ctr import CtrMode -from Cryptodome.Cipher._mode_openpgp import OpenPgpMode -from Cryptodome.Cipher._mode_eax import EaxMode - -ARC2Mode = int - -MODE_ECB: ARC2Mode -MODE_CBC: ARC2Mode -MODE_CFB: ARC2Mode -MODE_OFB: ARC2Mode -MODE_CTR: ARC2Mode -MODE_OPENPGP: ARC2Mode -MODE_EAX: ARC2Mode - -def new(key: Buffer, - mode: ARC2Mode, - iv : Optional[Buffer] = ..., - IV : Optional[Buffer] = ..., - nonce : Optional[Buffer] = ..., - segment_size : int = ..., - mac_len : int = ..., - initial_value : Union[int, Buffer] = ..., - counter : Dict = ...) -> \ - Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... - -block_size: int -key_size: Iterable[int] diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ARC4.py b/venv/Lib/site-packages/Cryptodome/Cipher/ARC4.py deleted file mode 100644 index 543a323..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ARC4.py +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/ARC4.py : ARC4 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - create_string_buffer, get_raw_buffer, - SmartPointer, c_size_t, c_uint8_ptr) - - -_raw_arc4_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._ARC4", """ - int ARC4_stream_encrypt(void *rc4State, const uint8_t in[], - uint8_t out[], size_t len); - int ARC4_stream_init(uint8_t *key, size_t keylen, - void **pRc4State); - int ARC4_stream_destroy(void *rc4State); - """) - - -class ARC4Cipher: - """ARC4 cipher object. Do not create it directly. Use - :func:`Cryptodome.Cipher.ARC4.new` instead. - """ - - def __init__(self, key, *args, **kwargs): - """Initialize an ARC4 cipher object - - See also `new()` at the module level.""" - - if len(args) > 0: - ndrop = args[0] - args = args[1:] - else: - ndrop = kwargs.pop('drop', 0) - - if len(key) not in key_size: - raise ValueError("Incorrect ARC4 key length (%d bytes)" % - len(key)) - - self._state = VoidPointer() - result = _raw_arc4_lib.ARC4_stream_init(c_uint8_ptr(key), - c_size_t(len(key)), - self._state.address_of()) - if result != 0: - raise ValueError("Error %d while creating the ARC4 cipher" - % result) - self._state = SmartPointer(self._state.get(), - _raw_arc4_lib.ARC4_stream_destroy) - - if ndrop > 0: - # This is OK even if the cipher is used for decryption, - # since encrypt and decrypt are actually the same thing - # with ARC4. - self.encrypt(b'\x00' * ndrop) - - self.block_size = 1 - self.key_size = len(key) - - def encrypt(self, plaintext): - """Encrypt a piece of data. - - :param plaintext: The data to encrypt, of any size. - :type plaintext: bytes, bytearray, memoryview - :returns: the encrypted byte string, of equal length as the - plaintext. - """ - - ciphertext = create_string_buffer(len(plaintext)) - result = _raw_arc4_lib.ARC4_stream_encrypt(self._state.get(), - c_uint8_ptr(plaintext), - ciphertext, - c_size_t(len(plaintext))) - if result: - raise ValueError("Error %d while encrypting with RC4" % result) - return get_raw_buffer(ciphertext) - - def decrypt(self, ciphertext): - """Decrypt a piece of data. - - :param ciphertext: The data to decrypt, of any size. - :type ciphertext: bytes, bytearray, memoryview - :returns: the decrypted byte string, of equal length as the - ciphertext. - """ - - try: - return self.encrypt(ciphertext) - except ValueError as e: - raise ValueError(str(e).replace("enc", "dec")) - - -def new(key, *args, **kwargs): - """Create a new ARC4 cipher. - - :param key: - The secret key to use in the symmetric cipher. - Its length must be in the range ``[1..256]``. - The recommended length is 16 bytes. - :type key: bytes, bytearray, memoryview - - :Keyword Arguments: - * *drop* (``integer``) -- - The amount of bytes to discard from the initial part of the keystream. - In fact, such part has been found to be distinguishable from random - data (while it shouldn't) and also correlated to key. - - The recommended value is 3072_ bytes. The default value is 0. - - :Return: an `ARC4Cipher` object - - .. _3072: http://eprint.iacr.org/2002/067.pdf - """ - return ARC4Cipher(key, *args, **kwargs) - - -# Size of a data block (in bytes) -block_size = 1 -# Size of a key (in bytes) -key_size = range(1, 256+1) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ARC4.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/ARC4.pyi deleted file mode 100644 index b081585..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ARC4.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Any, Union, Iterable - -Buffer = bytes|bytearray|memoryview - -class ARC4Cipher: - block_size: int - key_size: int - - def __init__(self, key: Buffer, *args: Any, **kwargs: Any) -> None: ... - def encrypt(self, plaintext: Buffer) -> bytes: ... - def decrypt(self, ciphertext: Buffer) -> bytes: ... - -def new(key: Buffer, drop : int = ...) -> ARC4Cipher: ... - -block_size: int -key_size: Iterable[int] diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/Blowfish.py b/venv/Lib/site-packages/Cryptodome/Cipher/Blowfish.py deleted file mode 100644 index 536cbc8..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/Blowfish.py +++ /dev/null @@ -1,159 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/Blowfish.py : Blowfish -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -""" -Module's constants for the modes of operation supported with Blowfish: - -:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` -:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` -:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` -:var MODE_OFB: :ref:`Output FeedBack (OFB) ` -:var MODE_CTR: :ref:`CounTer Mode (CTR) ` -:var MODE_OPENPGP: :ref:`OpenPGP Mode ` -:var MODE_EAX: :ref:`EAX Mode ` -""" - -import sys - -from Cryptodome.Cipher import _create_cipher -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, c_size_t, - c_uint8_ptr) - -_raw_blowfish_lib = load_pycryptodome_raw_lib( - "Cryptodome.Cipher._raw_blowfish", - """ - int Blowfish_start_operation(const uint8_t key[], - size_t key_len, - void **pResult); - int Blowfish_encrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int Blowfish_decrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int Blowfish_stop_operation(void *state); - """ - ) - - -def _create_base_cipher(dict_parameters): - """This method instantiates and returns a smart pointer to - a low-level base cipher. It will absorb named parameters in - the process.""" - - try: - key = dict_parameters.pop("key") - except KeyError: - raise TypeError("Missing 'key' parameter") - - if len(key) not in key_size: - raise ValueError("Incorrect Blowfish key length (%d bytes)" % len(key)) - - start_operation = _raw_blowfish_lib.Blowfish_start_operation - stop_operation = _raw_blowfish_lib.Blowfish_stop_operation - - void_p = VoidPointer() - result = start_operation(c_uint8_ptr(key), - c_size_t(len(key)), - void_p.address_of()) - if result: - raise ValueError("Error %X while instantiating the Blowfish cipher" - % result) - return SmartPointer(void_p.get(), stop_operation) - - -def new(key, mode, *args, **kwargs): - """Create a new Blowfish cipher - - :param key: - The secret key to use in the symmetric cipher. - Its length can vary from 5 to 56 bytes. - :type key: bytes, bytearray, memoryview - - :param mode: - The chaining mode to use for encryption or decryption. - :type mode: One of the supported ``MODE_*`` constants - - :Keyword Arguments: - * **iv** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, - and ``MODE_OPENPGP`` modes). - - The initialization vector to use for encryption or decryption. - - For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. - - For ``MODE_OPENPGP`` mode only, - it must be 8 bytes long for encryption - and 10 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - - If not provided, a random byte string is generated (you must then - read its value with the :attr:`iv` attribute). - - * **nonce** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). - - A value that must never be reused for any other encryption done - with this key. - - For ``MODE_EAX`` there are no - restrictions on its length (recommended: **16** bytes). - - For ``MODE_CTR``, its length must be in the range **[0..7]**. - - If not provided for ``MODE_EAX``, a random byte string is generated (you - can read it back via the ``nonce`` attribute). - - * **segment_size** (*integer*) -- - (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext - are segmented in. It must be a multiple of 8. - If not specified, it will be assumed to be 8. - - * **mac_len** : (*integer*) -- - (Only ``MODE_EAX``) - Length of the authentication tag, in bytes. - It must be no longer than 8 (default). - - * **initial_value** : (*integer*) -- - (Only ``MODE_CTR``). The initial value for the counter within - the counter block. By default it is **0**. - - :Return: a Blowfish object, of the applicable mode. - """ - - return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) - -MODE_ECB = 1 -MODE_CBC = 2 -MODE_CFB = 3 -MODE_OFB = 5 -MODE_CTR = 6 -MODE_OPENPGP = 7 -MODE_EAX = 9 - -# Size of a data block (in bytes) -block_size = 8 -# Size of a key (in bytes) -key_size = range(4, 56 + 1) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/Blowfish.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/Blowfish.pyi deleted file mode 100644 index b8b21c6..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/Blowfish.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Union, Dict, Iterable, Optional - -Buffer = bytes|bytearray|memoryview - -from Cryptodome.Cipher._mode_ecb import EcbMode -from Cryptodome.Cipher._mode_cbc import CbcMode -from Cryptodome.Cipher._mode_cfb import CfbMode -from Cryptodome.Cipher._mode_ofb import OfbMode -from Cryptodome.Cipher._mode_ctr import CtrMode -from Cryptodome.Cipher._mode_openpgp import OpenPgpMode -from Cryptodome.Cipher._mode_eax import EaxMode - -BlowfishMode = int - -MODE_ECB: BlowfishMode -MODE_CBC: BlowfishMode -MODE_CFB: BlowfishMode -MODE_OFB: BlowfishMode -MODE_CTR: BlowfishMode -MODE_OPENPGP: BlowfishMode -MODE_EAX: BlowfishMode - -def new(key: Buffer, - mode: BlowfishMode, - iv : Optional[Buffer] = ..., - IV : Optional[Buffer] = ..., - nonce : Optional[Buffer] = ..., - segment_size : int = ..., - mac_len : int = ..., - initial_value : Union[int, Buffer] = ..., - counter : Dict = ...) -> \ - Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... - -block_size: int -key_size: Iterable[int] diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/CAST.py b/venv/Lib/site-packages/Cryptodome/Cipher/CAST.py deleted file mode 100644 index 84eb88e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/CAST.py +++ /dev/null @@ -1,159 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/CAST.py : CAST -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -""" -Module's constants for the modes of operation supported with CAST: - -:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` -:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` -:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` -:var MODE_OFB: :ref:`Output FeedBack (OFB) ` -:var MODE_CTR: :ref:`CounTer Mode (CTR) ` -:var MODE_OPENPGP: :ref:`OpenPGP Mode ` -:var MODE_EAX: :ref:`EAX Mode ` -""" - -import sys - -from Cryptodome.Cipher import _create_cipher -from Cryptodome.Util.py3compat import byte_string -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - c_size_t, c_uint8_ptr) - -_raw_cast_lib = load_pycryptodome_raw_lib( - "Cryptodome.Cipher._raw_cast", - """ - int CAST_start_operation(const uint8_t key[], - size_t key_len, - void **pResult); - int CAST_encrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CAST_decrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CAST_stop_operation(void *state); - """) - - -def _create_base_cipher(dict_parameters): - """This method instantiates and returns a handle to a low-level - base cipher. It will absorb named parameters in the process.""" - - try: - key = dict_parameters.pop("key") - except KeyError: - raise TypeError("Missing 'key' parameter") - - if len(key) not in key_size: - raise ValueError("Incorrect CAST key length (%d bytes)" % len(key)) - - start_operation = _raw_cast_lib.CAST_start_operation - stop_operation = _raw_cast_lib.CAST_stop_operation - - cipher = VoidPointer() - result = start_operation(c_uint8_ptr(key), - c_size_t(len(key)), - cipher.address_of()) - if result: - raise ValueError("Error %X while instantiating the CAST cipher" - % result) - - return SmartPointer(cipher.get(), stop_operation) - - -def new(key, mode, *args, **kwargs): - """Create a new CAST cipher - - :param key: - The secret key to use in the symmetric cipher. - Its length can vary from 5 to 16 bytes. - :type key: bytes, bytearray, memoryview - - :param mode: - The chaining mode to use for encryption or decryption. - :type mode: One of the supported ``MODE_*`` constants - - :Keyword Arguments: - * **iv** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, - and ``MODE_OPENPGP`` modes). - - The initialization vector to use for encryption or decryption. - - For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. - - For ``MODE_OPENPGP`` mode only, - it must be 8 bytes long for encryption - and 10 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - - If not provided, a random byte string is generated (you must then - read its value with the :attr:`iv` attribute). - - * **nonce** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). - - A value that must never be reused for any other encryption done - with this key. - - For ``MODE_EAX`` there are no - restrictions on its length (recommended: **16** bytes). - - For ``MODE_CTR``, its length must be in the range **[0..7]**. - - If not provided for ``MODE_EAX``, a random byte string is generated (you - can read it back via the ``nonce`` attribute). - - * **segment_size** (*integer*) -- - (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext - are segmented in. It must be a multiple of 8. - If not specified, it will be assumed to be 8. - - * **mac_len** : (*integer*) -- - (Only ``MODE_EAX``) - Length of the authentication tag, in bytes. - It must be no longer than 8 (default). - - * **initial_value** : (*integer*) -- - (Only ``MODE_CTR``). The initial value for the counter within - the counter block. By default it is **0**. - - :Return: a CAST object, of the applicable mode. - """ - - return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) - -MODE_ECB = 1 -MODE_CBC = 2 -MODE_CFB = 3 -MODE_OFB = 5 -MODE_CTR = 6 -MODE_OPENPGP = 7 -MODE_EAX = 9 - -# Size of a data block (in bytes) -block_size = 8 -# Size of a key (in bytes) -key_size = range(5, 16 + 1) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/CAST.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/CAST.pyi deleted file mode 100644 index be01f09..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/CAST.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Union, Dict, Iterable, Optional - -Buffer = bytes|bytearray|memoryview - -from Cryptodome.Cipher._mode_ecb import EcbMode -from Cryptodome.Cipher._mode_cbc import CbcMode -from Cryptodome.Cipher._mode_cfb import CfbMode -from Cryptodome.Cipher._mode_ofb import OfbMode -from Cryptodome.Cipher._mode_ctr import CtrMode -from Cryptodome.Cipher._mode_openpgp import OpenPgpMode -from Cryptodome.Cipher._mode_eax import EaxMode - -CASTMode = int - -MODE_ECB: CASTMode -MODE_CBC: CASTMode -MODE_CFB: CASTMode -MODE_OFB: CASTMode -MODE_CTR: CASTMode -MODE_OPENPGP: CASTMode -MODE_EAX: CASTMode - -def new(key: Buffer, - mode: CASTMode, - iv : Optional[Buffer] = ..., - IV : Optional[Buffer] = ..., - nonce : Optional[Buffer] = ..., - segment_size : int = ..., - mac_len : int = ..., - initial_value : Union[int, Buffer] = ..., - counter : Dict = ...) -> \ - Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... - -block_size: int -key_size : Iterable[int] diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20.py b/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20.py deleted file mode 100644 index b2759b9..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20.py +++ /dev/null @@ -1,291 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Random import get_random_bytes - -from Cryptodome.Util.py3compat import _copy_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - create_string_buffer, - get_raw_buffer, VoidPointer, - SmartPointer, c_size_t, - c_uint8_ptr, c_ulong, - is_writeable_buffer) - -_raw_chacha20_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._chacha20", - """ - int chacha20_init(void **pState, - const uint8_t *key, - size_t keySize, - const uint8_t *nonce, - size_t nonceSize); - - int chacha20_destroy(void *state); - - int chacha20_encrypt(void *state, - const uint8_t in[], - uint8_t out[], - size_t len); - - int chacha20_seek(void *state, - unsigned long block_high, - unsigned long block_low, - unsigned offset); - - int hchacha20( const uint8_t key[32], - const uint8_t nonce16[16], - uint8_t subkey[32]); - """) - - -def _HChaCha20(key, nonce): - - assert(len(key) == 32) - assert(len(nonce) == 16) - - subkey = bytearray(32) - result = _raw_chacha20_lib.hchacha20( - c_uint8_ptr(key), - c_uint8_ptr(nonce), - c_uint8_ptr(subkey)) - if result: - raise ValueError("Error %d when deriving subkey with HChaCha20" % result) - - return subkey - - -class ChaCha20Cipher(object): - """ChaCha20 (or XChaCha20) cipher object. - Do not create it directly. Use :py:func:`new` instead. - - :var nonce: The nonce with length 8, 12 or 24 bytes - :vartype nonce: bytes - """ - - block_size = 1 - - def __init__(self, key, nonce): - """Initialize a ChaCha20/XChaCha20 cipher object - - See also `new()` at the module level.""" - - self.nonce = _copy_bytes(None, None, nonce) - - # XChaCha20 requires a key derivation with HChaCha20 - # See 2.3 in https://tools.ietf.org/html/draft-arciszewski-xchacha-03 - if len(nonce) == 24: - key = _HChaCha20(key, nonce[:16]) - nonce = b'\x00' * 4 + nonce[16:] - self._name = "XChaCha20" - else: - self._name = "ChaCha20" - nonce = self.nonce - - self._next = ("encrypt", "decrypt") - - self._state = VoidPointer() - result = _raw_chacha20_lib.chacha20_init( - self._state.address_of(), - c_uint8_ptr(key), - c_size_t(len(key)), - nonce, - c_size_t(len(nonce))) - if result: - raise ValueError("Error %d instantiating a %s cipher" % (result, - self._name)) - self._state = SmartPointer(self._state.get(), - _raw_chacha20_lib.chacha20_destroy) - - def encrypt(self, plaintext, output=None): - """Encrypt a piece of data. - - Args: - plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. - Keyword Args: - output(bytes/bytearray/memoryview): The location where the ciphertext - is written to. If ``None``, the ciphertext is returned. - Returns: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("Cipher object can only be used for decryption") - self._next = ("encrypt",) - return self._encrypt(plaintext, output) - - def _encrypt(self, plaintext, output): - """Encrypt without FSM checks""" - - if output is None: - ciphertext = create_string_buffer(len(plaintext)) - else: - ciphertext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(plaintext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = _raw_chacha20_lib.chacha20_encrypt( - self._state.get(), - c_uint8_ptr(plaintext), - c_uint8_ptr(ciphertext), - c_size_t(len(plaintext))) - if result: - raise ValueError("Error %d while encrypting with %s" % (result, self._name)) - - if output is None: - return get_raw_buffer(ciphertext) - else: - return None - - def decrypt(self, ciphertext, output=None): - """Decrypt a piece of data. - - Args: - ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. - Keyword Args: - output(bytes/bytearray/memoryview): The location where the plaintext - is written to. If ``None``, the plaintext is returned. - Returns: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("Cipher object can only be used for encryption") - self._next = ("decrypt",) - - try: - return self._encrypt(ciphertext, output) - except ValueError as e: - raise ValueError(str(e).replace("enc", "dec")) - - def seek(self, position): - """Seek to a certain position in the key stream. - - If you want to seek to a certain block, - use ``seek(block_number * 64)``. - - Args: - position (integer): - The absolute position within the key stream, in bytes. - """ - - block_number, offset = divmod(position, 64) - block_low = block_number & 0xFFFFFFFF - block_high = block_number >> 32 - - result = _raw_chacha20_lib.chacha20_seek( - self._state.get(), - c_ulong(block_high), - c_ulong(block_low), - offset - ) - if result: - raise ValueError("Error %d while seeking with %s" % (result, self._name)) - - -def _derive_Poly1305_key_pair(key, nonce): - """Derive a tuple (r, s, nonce) for a Poly1305 MAC. - - If nonce is ``None``, a new 12-byte nonce is generated. - """ - - if len(key) != 32: - raise ValueError("Poly1305 with ChaCha20 requires a 32-byte key") - - if nonce is None: - padded_nonce = nonce = get_random_bytes(12) - elif len(nonce) == 8: - # See RFC7538, 2.6: [...] ChaCha20 as specified here requires a 96-bit - # nonce. So if the provided nonce is only 64-bit, then the first 32 - # bits of the nonce will be set to a constant number. - # This will usually be zero, but for protocols with multiple senders it may be - # different for each sender, but should be the same for all - # invocations of the function with the same key by a particular - # sender. - padded_nonce = b'\x00\x00\x00\x00' + nonce - elif len(nonce) == 12: - padded_nonce = nonce - else: - raise ValueError("Poly1305 with ChaCha20 requires an 8- or 12-byte nonce") - - rs = new(key=key, nonce=padded_nonce).encrypt(b'\x00' * 32) - return rs[:16], rs[16:], nonce - - -def new(**kwargs): - """Create a new ChaCha20 or XChaCha20 cipher - - Keyword Args: - key (bytes/bytearray/memoryview): The secret key to use. - It must be 32 bytes long. - nonce (bytes/bytearray/memoryview): A mandatory value that - must never be reused for any other encryption - done with this key. - - For ChaCha20, it must be 8 or 12 bytes long. - - For XChaCha20, it must be 24 bytes long. - - If not provided, 8 bytes will be randomly generated - (you can find them back in the ``nonce`` attribute). - - :Return: a :class:`Cryptodome.Cipher.ChaCha20.ChaCha20Cipher` object - """ - - try: - key = kwargs.pop("key") - except KeyError as e: - raise TypeError("Missing parameter %s" % e) - - nonce = kwargs.pop("nonce", None) - if nonce is None: - nonce = get_random_bytes(8) - - if len(key) != 32: - raise ValueError("ChaCha20/XChaCha20 key must be 32 bytes long") - - if len(nonce) not in (8, 12, 24): - raise ValueError("Nonce must be 8/12 bytes(ChaCha20) or 24 bytes (XChaCha20)") - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return ChaCha20Cipher(key, nonce) - -# Size of a data block (in bytes) -block_size = 1 - -# Size of a key (in bytes) -key_size = 32 diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20.pyi deleted file mode 100644 index f5001cd..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Union, overload, Optional - -Buffer = bytes|bytearray|memoryview - -def _HChaCha20(key: Buffer, nonce: Buffer) -> bytearray: ... - -class ChaCha20Cipher: - block_size: int - nonce: bytes - - def __init__(self, key: Buffer, nonce: Buffer) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - def seek(self, position: int) -> None: ... - -def new(key: Buffer, nonce: Optional[Buffer] = ...) -> ChaCha20Cipher: ... - -block_size: int -key_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20_Poly1305.py b/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20_Poly1305.py deleted file mode 100644 index 6a89e2a..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20_Poly1305.py +++ /dev/null @@ -1,334 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2018, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from binascii import unhexlify - -from Cryptodome.Cipher import ChaCha20 -from Cryptodome.Cipher.ChaCha20 import _HChaCha20 -from Cryptodome.Hash import Poly1305, BLAKE2s - -from Cryptodome.Random import get_random_bytes - -from Cryptodome.Util.number import long_to_bytes -from Cryptodome.Util.py3compat import _copy_bytes, bord -from Cryptodome.Util._raw_api import is_buffer - - -def _enum(**enums): - return type('Enum', (), enums) - - -_CipherStatus = _enum(PROCESSING_AUTH_DATA=1, - PROCESSING_CIPHERTEXT=2, - PROCESSING_DONE=3) - - -class ChaCha20Poly1305Cipher(object): - """ChaCha20-Poly1305 and XChaCha20-Poly1305 cipher object. - Do not create it directly. Use :py:func:`new` instead. - - :var nonce: The nonce with length 8, 12 or 24 bytes - :vartype nonce: byte string - """ - - def __init__(self, key, nonce): - """Initialize a ChaCha20-Poly1305 AEAD cipher object - - See also `new()` at the module level.""" - - self._next = ("update", "encrypt", "decrypt", "digest", - "verify") - - self._authenticator = Poly1305.new(key=key, nonce=nonce, cipher=ChaCha20) - - self._cipher = ChaCha20.new(key=key, nonce=nonce) - self._cipher.seek(64) # Block counter starts at 1 - - self._len_aad = 0 - self._len_ct = 0 - self._mac_tag = None - self._status = _CipherStatus.PROCESSING_AUTH_DATA - - def update(self, data): - """Protect the associated data. - - Associated data (also known as *additional authenticated data* - AAD) - is the piece of the message that must stay in the clear, while - still allowing the receiver to verify its integrity. - An example is packet headers. - - The associated data (possibly split into multiple segments) is - fed into :meth:`update` before any call to :meth:`decrypt` or :meth:`encrypt`. - If there is no associated data, :meth:`update` is not called. - - :param bytes/bytearray/memoryview assoc_data: - A piece of associated data. There are no restrictions on its size. - """ - - if "update" not in self._next: - raise TypeError("update() method cannot be called") - - self._len_aad += len(data) - self._authenticator.update(data) - - def _pad_aad(self): - - assert(self._status == _CipherStatus.PROCESSING_AUTH_DATA) - if self._len_aad & 0x0F: - self._authenticator.update(b'\x00' * (16 - (self._len_aad & 0x0F))) - self._status = _CipherStatus.PROCESSING_CIPHERTEXT - - def encrypt(self, plaintext, output=None): - """Encrypt a piece of data. - - Args: - plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. - Keyword Args: - output(bytes/bytearray/memoryview): The location where the ciphertext - is written to. If ``None``, the ciphertext is returned. - Returns: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() method cannot be called") - - if self._status == _CipherStatus.PROCESSING_AUTH_DATA: - self._pad_aad() - - self._next = ("encrypt", "digest") - - result = self._cipher.encrypt(plaintext, output=output) - self._len_ct += len(plaintext) - if output is None: - self._authenticator.update(result) - else: - self._authenticator.update(output) - return result - - def decrypt(self, ciphertext, output=None): - """Decrypt a piece of data. - - Args: - ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. - Keyword Args: - output(bytes/bytearray/memoryview): The location where the plaintext - is written to. If ``None``, the plaintext is returned. - Returns: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() method cannot be called") - - if self._status == _CipherStatus.PROCESSING_AUTH_DATA: - self._pad_aad() - - self._next = ("decrypt", "verify") - - self._len_ct += len(ciphertext) - self._authenticator.update(ciphertext) - return self._cipher.decrypt(ciphertext, output=output) - - def _compute_mac(self): - """Finalize the cipher (if not done already) and return the MAC.""" - - if self._mac_tag: - assert(self._status == _CipherStatus.PROCESSING_DONE) - return self._mac_tag - - assert(self._status != _CipherStatus.PROCESSING_DONE) - - if self._status == _CipherStatus.PROCESSING_AUTH_DATA: - self._pad_aad() - - if self._len_ct & 0x0F: - self._authenticator.update(b'\x00' * (16 - (self._len_ct & 0x0F))) - - self._status = _CipherStatus.PROCESSING_DONE - - self._authenticator.update(long_to_bytes(self._len_aad, 8)[::-1]) - self._authenticator.update(long_to_bytes(self._len_ct, 8)[::-1]) - self._mac_tag = self._authenticator.digest() - return self._mac_tag - - def digest(self): - """Compute the *binary* authentication tag (MAC). - - :Return: the MAC tag, as 16 ``bytes``. - """ - - if "digest" not in self._next: - raise TypeError("digest() method cannot be called") - self._next = ("digest",) - - return self._compute_mac() - - def hexdigest(self): - """Compute the *printable* authentication tag (MAC). - - This method is like :meth:`digest`. - - :Return: the MAC tag, as a hexadecimal string. - """ - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def verify(self, received_mac_tag): - """Validate the *binary* authentication tag (MAC). - - The receiver invokes this method at the very end, to - check if the associated data (if any) and the decrypted - messages are valid. - - :param bytes/bytearray/memoryview received_mac_tag: - This is the 16-byte *binary* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - if "verify" not in self._next: - raise TypeError("verify() cannot be called" - " when encrypting a message") - self._next = ("verify",) - - secret = get_random_bytes(16) - - self._compute_mac() - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, - data=self._mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, - data=received_mac_tag) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Validate the *printable* authentication tag (MAC). - - This method is like :meth:`verify`. - - :param string hex_mac_tag: - This is the *printable* MAC. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - self.verify(unhexlify(hex_mac_tag)) - - def encrypt_and_digest(self, plaintext): - """Perform :meth:`encrypt` and :meth:`digest` in one step. - - :param plaintext: The data to encrypt, of any size. - :type plaintext: bytes/bytearray/memoryview - :return: a tuple with two ``bytes`` objects: - - - the ciphertext, of equal length as the plaintext - - the 16-byte MAC tag - """ - - return self.encrypt(plaintext), self.digest() - - def decrypt_and_verify(self, ciphertext, received_mac_tag): - """Perform :meth:`decrypt` and :meth:`verify` in one step. - - :param ciphertext: The piece of data to decrypt. - :type ciphertext: bytes/bytearray/memoryview - :param bytes received_mac_tag: - This is the 16-byte *binary* MAC, as received from the sender. - :return: the decrypted data (as ``bytes``) - :raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - plaintext = self.decrypt(ciphertext) - self.verify(received_mac_tag) - return plaintext - - -def new(**kwargs): - """Create a new ChaCha20-Poly1305 or XChaCha20-Poly1305 AEAD cipher. - - :keyword key: The secret key to use. It must be 32 bytes long. - :type key: byte string - - :keyword nonce: - A value that must never be reused for any other encryption - done with this key. - - For ChaCha20-Poly1305, it must be 8 or 12 bytes long. - - For XChaCha20-Poly1305, it must be 24 bytes long. - - If not provided, 12 ``bytes`` will be generated randomly - (you can find them back in the ``nonce`` attribute). - :type nonce: bytes, bytearray, memoryview - - :Return: a :class:`Cryptodome.Cipher.ChaCha20.ChaCha20Poly1305Cipher` object - """ - - try: - key = kwargs.pop("key") - except KeyError as e: - raise TypeError("Missing parameter %s" % e) - - if len(key) != 32: - raise ValueError("Key must be 32 bytes long") - - nonce = kwargs.pop("nonce", None) - if nonce is None: - nonce = get_random_bytes(12) - - if len(nonce) in (8, 12): - chacha20_poly1305_nonce = nonce - elif len(nonce) == 24: - key = _HChaCha20(key, nonce[:16]) - chacha20_poly1305_nonce = b'\x00\x00\x00\x00' + nonce[16:] - else: - raise ValueError("Nonce must be 8, 12 or 24 bytes long") - - if not is_buffer(nonce): - raise TypeError("nonce must be bytes, bytearray or memoryview") - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - cipher = ChaCha20Poly1305Cipher(key, chacha20_poly1305_nonce) - cipher.nonce = _copy_bytes(None, None, nonce) - return cipher - - -# Size of a key (in bytes) -key_size = 32 diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20_Poly1305.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20_Poly1305.pyi deleted file mode 100644 index 109e805..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/ChaCha20_Poly1305.pyi +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Union, Tuple, overload, Optional - -Buffer = bytes|bytearray|memoryview - -class ChaCha20Poly1305Cipher: - nonce: bytes - - def __init__(self, key: Buffer, nonce: Buffer) -> None: ... - def update(self, data: Buffer) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, received_mac_tag: Buffer) -> None: ... - def hexverify(self, received_mac_tag: str) -> None: ... - def encrypt_and_digest(self, plaintext: Buffer) -> Tuple[bytes, bytes]: ... - def decrypt_and_verify(self, ciphertext: Buffer, received_mac_tag: Buffer) -> bytes: ... - -def new(key: Buffer, nonce: Optional[Buffer] = ...) -> ChaCha20Poly1305Cipher: ... - -block_size: int -key_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/DES.py b/venv/Lib/site-packages/Cryptodome/Cipher/DES.py deleted file mode 100644 index 026b491..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/DES.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/DES.py : DES -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -""" -Module's constants for the modes of operation supported with Single DES: - -:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` -:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` -:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` -:var MODE_OFB: :ref:`Output FeedBack (OFB) ` -:var MODE_CTR: :ref:`CounTer Mode (CTR) ` -:var MODE_OPENPGP: :ref:`OpenPGP Mode ` -:var MODE_EAX: :ref:`EAX Mode ` -""" - -import sys - -from Cryptodome.Cipher import _create_cipher -from Cryptodome.Util.py3compat import byte_string -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - c_size_t, c_uint8_ptr) - -_raw_des_lib = load_pycryptodome_raw_lib( - "Cryptodome.Cipher._raw_des", - """ - int DES_start_operation(const uint8_t key[], - size_t key_len, - void **pResult); - int DES_encrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int DES_decrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int DES_stop_operation(void *state); - """) - - -def _create_base_cipher(dict_parameters): - """This method instantiates and returns a handle to a low-level - base cipher. It will absorb named parameters in the process.""" - - try: - key = dict_parameters.pop("key") - except KeyError: - raise TypeError("Missing 'key' parameter") - - if len(key) != key_size: - raise ValueError("Incorrect DES key length (%d bytes)" % len(key)) - - start_operation = _raw_des_lib.DES_start_operation - stop_operation = _raw_des_lib.DES_stop_operation - - cipher = VoidPointer() - result = start_operation(c_uint8_ptr(key), - c_size_t(len(key)), - cipher.address_of()) - if result: - raise ValueError("Error %X while instantiating the DES cipher" - % result) - return SmartPointer(cipher.get(), stop_operation) - - -def new(key, mode, *args, **kwargs): - """Create a new DES cipher. - - :param key: - The secret key to use in the symmetric cipher. - It must be 8 byte long. The parity bits will be ignored. - :type key: bytes/bytearray/memoryview - - :param mode: - The chaining mode to use for encryption or decryption. - :type mode: One of the supported ``MODE_*`` constants - - :Keyword Arguments: - * **iv** (*byte string*) -- - (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, - and ``MODE_OPENPGP`` modes). - - The initialization vector to use for encryption or decryption. - - For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. - - For ``MODE_OPENPGP`` mode only, - it must be 8 bytes long for encryption - and 10 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - - If not provided, a random byte string is generated (you must then - read its value with the :attr:`iv` attribute). - - * **nonce** (*byte string*) -- - (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). - - A value that must never be reused for any other encryption done - with this key. - - For ``MODE_EAX`` there are no - restrictions on its length (recommended: **16** bytes). - - For ``MODE_CTR``, its length must be in the range **[0..7]**. - - If not provided for ``MODE_EAX``, a random byte string is generated (you - can read it back via the ``nonce`` attribute). - - * **segment_size** (*integer*) -- - (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext - are segmented in. It must be a multiple of 8. - If not specified, it will be assumed to be 8. - - * **mac_len** : (*integer*) -- - (Only ``MODE_EAX``) - Length of the authentication tag, in bytes. - It must be no longer than 8 (default). - - * **initial_value** : (*integer*) -- - (Only ``MODE_CTR``). The initial value for the counter within - the counter block. By default it is **0**. - - :Return: a DES object, of the applicable mode. - """ - - return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) - -MODE_ECB = 1 -MODE_CBC = 2 -MODE_CFB = 3 -MODE_OFB = 5 -MODE_CTR = 6 -MODE_OPENPGP = 7 -MODE_EAX = 9 - -# Size of a data block (in bytes) -block_size = 8 -# Size of a key (in bytes) -key_size = 8 diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/DES.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/DES.pyi deleted file mode 100644 index 25a3b23..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/DES.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Union, Dict, Iterable, Optional - -Buffer = bytes|bytearray|memoryview - -from Cryptodome.Cipher._mode_ecb import EcbMode -from Cryptodome.Cipher._mode_cbc import CbcMode -from Cryptodome.Cipher._mode_cfb import CfbMode -from Cryptodome.Cipher._mode_ofb import OfbMode -from Cryptodome.Cipher._mode_ctr import CtrMode -from Cryptodome.Cipher._mode_openpgp import OpenPgpMode -from Cryptodome.Cipher._mode_eax import EaxMode - -DESMode = int - -MODE_ECB: DESMode -MODE_CBC: DESMode -MODE_CFB: DESMode -MODE_OFB: DESMode -MODE_CTR: DESMode -MODE_OPENPGP: DESMode -MODE_EAX: DESMode - -def new(key: Buffer, - mode: DESMode, - iv : Optional[Buffer] = ..., - IV : Optional[Buffer] = ..., - nonce : Optional[Buffer] = ..., - segment_size : int = ..., - mac_len : int = ..., - initial_value : Union[int, Buffer] = ..., - counter : Dict = ...) -> \ - Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... - -block_size: int -key_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/DES3.py b/venv/Lib/site-packages/Cryptodome/Cipher/DES3.py deleted file mode 100644 index 3b2828e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/DES3.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/DES3.py : DES3 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -""" -Module's constants for the modes of operation supported with Triple DES: - -:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` -:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` -:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` -:var MODE_OFB: :ref:`Output FeedBack (OFB) ` -:var MODE_CTR: :ref:`CounTer Mode (CTR) ` -:var MODE_OPENPGP: :ref:`OpenPGP Mode ` -:var MODE_EAX: :ref:`EAX Mode ` -""" - -import sys - -from Cryptodome.Cipher import _create_cipher -from Cryptodome.Util.py3compat import byte_string, bchr, bord, bstr -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - c_size_t) - -_raw_des3_lib = load_pycryptodome_raw_lib( - "Cryptodome.Cipher._raw_des3", - """ - int DES3_start_operation(const uint8_t key[], - size_t key_len, - void **pResult); - int DES3_encrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int DES3_decrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int DES3_stop_operation(void *state); - """) - - -def adjust_key_parity(key_in): - """Set the parity bits in a TDES key. - - :param key_in: the TDES key whose bits need to be adjusted - :type key_in: byte string - - :returns: a copy of ``key_in``, with the parity bits correctly set - :rtype: byte string - - :raises ValueError: if the TDES key is not 16 or 24 bytes long - :raises ValueError: if the TDES key degenerates into Single DES - """ - - def parity_byte(key_byte): - parity = 1 - for i in range(1, 8): - parity ^= (key_byte >> i) & 1 - return (key_byte & 0xFE) | parity - - if len(key_in) not in key_size: - raise ValueError("Not a valid TDES key") - - key_out = b"".join([ bchr(parity_byte(bord(x))) for x in key_in ]) - - if key_out[:8] == key_out[8:16] or key_out[-16:-8] == key_out[-8:]: - raise ValueError("Triple DES key degenerates to single DES") - - return key_out - - -def _create_base_cipher(dict_parameters): - """This method instantiates and returns a handle to a low-level base cipher. - It will absorb named parameters in the process.""" - - try: - key_in = dict_parameters.pop("key") - except KeyError: - raise TypeError("Missing 'key' parameter") - - key = adjust_key_parity(bstr(key_in)) - - start_operation = _raw_des3_lib.DES3_start_operation - stop_operation = _raw_des3_lib.DES3_stop_operation - - cipher = VoidPointer() - result = start_operation(key, - c_size_t(len(key)), - cipher.address_of()) - if result: - raise ValueError("Error %X while instantiating the TDES cipher" - % result) - return SmartPointer(cipher.get(), stop_operation) - - -def new(key, mode, *args, **kwargs): - """Create a new Triple DES cipher. - - :param key: - The secret key to use in the symmetric cipher. - It must be 16 or 24 byte long. The parity bits will be ignored. - :type key: bytes/bytearray/memoryview - - :param mode: - The chaining mode to use for encryption or decryption. - :type mode: One of the supported ``MODE_*`` constants - - :Keyword Arguments: - * **iv** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, - and ``MODE_OPENPGP`` modes). - - The initialization vector to use for encryption or decryption. - - For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. - - For ``MODE_OPENPGP`` mode only, - it must be 8 bytes long for encryption - and 10 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - - If not provided, a random byte string is generated (you must then - read its value with the :attr:`iv` attribute). - - * **nonce** (*bytes*, *bytearray*, *memoryview*) -- - (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). - - A value that must never be reused for any other encryption done - with this key. - - For ``MODE_EAX`` there are no - restrictions on its length (recommended: **16** bytes). - - For ``MODE_CTR``, its length must be in the range **[0..7]**. - - If not provided for ``MODE_EAX``, a random byte string is generated (you - can read it back via the ``nonce`` attribute). - - * **segment_size** (*integer*) -- - (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext - are segmented in. It must be a multiple of 8. - If not specified, it will be assumed to be 8. - - * **mac_len** : (*integer*) -- - (Only ``MODE_EAX``) - Length of the authentication tag, in bytes. - It must be no longer than 8 (default). - - * **initial_value** : (*integer*) -- - (Only ``MODE_CTR``). The initial value for the counter within - the counter block. By default it is **0**. - - :Return: a Triple DES object, of the applicable mode. - """ - - return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) - -MODE_ECB = 1 -MODE_CBC = 2 -MODE_CFB = 3 -MODE_OFB = 5 -MODE_CTR = 6 -MODE_OPENPGP = 7 -MODE_EAX = 9 - -# Size of a data block (in bytes) -block_size = 8 -# Size of a key (in bytes) -key_size = (16, 24) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/DES3.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/DES3.pyi deleted file mode 100644 index 2c150f8..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/DES3.pyi +++ /dev/null @@ -1,37 +0,0 @@ -from typing import Union, Dict, Tuple, Optional - -Buffer = bytes|bytearray|memoryview - -from Cryptodome.Cipher._mode_ecb import EcbMode -from Cryptodome.Cipher._mode_cbc import CbcMode -from Cryptodome.Cipher._mode_cfb import CfbMode -from Cryptodome.Cipher._mode_ofb import OfbMode -from Cryptodome.Cipher._mode_ctr import CtrMode -from Cryptodome.Cipher._mode_openpgp import OpenPgpMode -from Cryptodome.Cipher._mode_eax import EaxMode - -def adjust_key_parity(key_in: bytes) -> bytes: ... - -DES3Mode = int - -MODE_ECB: DES3Mode -MODE_CBC: DES3Mode -MODE_CFB: DES3Mode -MODE_OFB: DES3Mode -MODE_CTR: DES3Mode -MODE_OPENPGP: DES3Mode -MODE_EAX: DES3Mode - -def new(key: Buffer, - mode: DES3Mode, - iv : Optional[Buffer] = ..., - IV : Optional[Buffer] = ..., - nonce : Optional[Buffer] = ..., - segment_size : int = ..., - mac_len : int = ..., - initial_value : Union[int, Buffer] = ..., - counter : Dict = ...) -> \ - Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... - -block_size: int -key_size: Tuple[int, int] diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_OAEP.py b/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_OAEP.py deleted file mode 100644 index 08f9efe..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_OAEP.py +++ /dev/null @@ -1,231 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/PKCS1_OAEP.py : PKCS#1 OAEP -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Signature.pss import MGF1 -import Cryptodome.Hash.SHA1 - -from Cryptodome.Util.py3compat import _copy_bytes -import Cryptodome.Util.number -from Cryptodome.Util.number import ceil_div, bytes_to_long, long_to_bytes -from Cryptodome.Util.strxor import strxor -from Cryptodome import Random -from ._pkcs1_oaep_decode import oaep_decode - - -class PKCS1OAEP_Cipher: - """Cipher object for PKCS#1 v1.5 OAEP. - Do not create directly: use :func:`new` instead.""" - - def __init__(self, key, hashAlgo, mgfunc, label, randfunc): - """Initialize this PKCS#1 OAEP cipher object. - - :Parameters: - key : an RSA key object - If a private half is given, both encryption and decryption are possible. - If a public half is given, only encryption is possible. - hashAlgo : hash object - The hash function to use. This can be a module under `Cryptodome.Hash` - or an existing hash object created from any of such modules. If not specified, - `Cryptodome.Hash.SHA1` is used. - mgfunc : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - If not specified, the standard MGF1 consistent with ``hashAlgo`` is used (a safe choice). - label : bytes/bytearray/memoryview - A label to apply to this particular encryption. If not specified, - an empty string is used. Specifying a label does not improve - security. - randfunc : callable - A function that returns random bytes. - - :attention: Modify the mask generation function only if you know what you are doing. - Sender and receiver must use the same one. - """ - self._key = key - - if hashAlgo: - self._hashObj = hashAlgo - else: - self._hashObj = Cryptodome.Hash.SHA1 - - if mgfunc: - self._mgf = mgfunc - else: - self._mgf = lambda x, y: MGF1(x, y, self._hashObj) - - self._label = _copy_bytes(None, None, label) - self._randfunc = randfunc - - def can_encrypt(self): - """Legacy function to check if you can call :meth:`encrypt`. - - .. deprecated:: 3.0""" - return self._key.can_encrypt() - - def can_decrypt(self): - """Legacy function to check if you can call :meth:`decrypt`. - - .. deprecated:: 3.0""" - return self._key.can_decrypt() - - def encrypt(self, message): - """Encrypt a message with PKCS#1 OAEP. - - :param message: - The message to encrypt, also known as plaintext. It can be of - variable length, but not longer than the RSA modulus (in bytes) - minus 2, minus twice the hash output size. - For instance, if you use RSA 2048 and SHA-256, the longest message - you can encrypt is 190 byte long. - :type message: bytes/bytearray/memoryview - - :returns: The ciphertext, as large as the RSA modulus. - :rtype: bytes - - :raises ValueError: - if the message is too long. - """ - - # See 7.1.1 in RFC3447 - modBits = Cryptodome.Util.number.size(self._key.n) - k = ceil_div(modBits, 8) # Convert from bits to bytes - hLen = self._hashObj.digest_size - mLen = len(message) - - # Step 1b - ps_len = k - mLen - 2 * hLen - 2 - if ps_len < 0: - raise ValueError("Plaintext is too long.") - # Step 2a - lHash = self._hashObj.new(self._label).digest() - # Step 2b - ps = b'\x00' * ps_len - # Step 2c - db = lHash + ps + b'\x01' + _copy_bytes(None, None, message) - # Step 2d - ros = self._randfunc(hLen) - # Step 2e - dbMask = self._mgf(ros, k-hLen-1) - # Step 2f - maskedDB = strxor(db, dbMask) - # Step 2g - seedMask = self._mgf(maskedDB, hLen) - # Step 2h - maskedSeed = strxor(ros, seedMask) - # Step 2i - em = b'\x00' + maskedSeed + maskedDB - # Step 3a (OS2IP) - em_int = bytes_to_long(em) - # Step 3b (RSAEP) - m_int = self._key._encrypt(em_int) - # Step 3c (I2OSP) - c = long_to_bytes(m_int, k) - return c - - def decrypt(self, ciphertext): - """Decrypt a message with PKCS#1 OAEP. - - :param ciphertext: The encrypted message. - :type ciphertext: bytes/bytearray/memoryview - - :returns: The original message (plaintext). - :rtype: bytes - - :raises ValueError: - if the ciphertext has the wrong length, or if decryption - fails the integrity check (in which case, the decryption - key is probably wrong). - :raises TypeError: - if the RSA key has no private half (i.e. you are trying - to decrypt using a public key). - """ - - # See 7.1.2 in RFC3447 - modBits = Cryptodome.Util.number.size(self._key.n) - k = ceil_div(modBits, 8) # Convert from bits to bytes - hLen = self._hashObj.digest_size - - # Step 1b and 1c - if len(ciphertext) != k or k < hLen+2: - raise ValueError("Ciphertext with incorrect length.") - # Step 2a (O2SIP) - ct_int = bytes_to_long(ciphertext) - # Step 2b (RSADP) and step 2c (I2OSP) - em = self._key._decrypt_to_bytes(ct_int) - # Step 3a - lHash = self._hashObj.new(self._label).digest() - # y must be 0, but we MUST NOT check it here in order not to - # allow attacks like Manger's (http://dl.acm.org/citation.cfm?id=704143) - maskedSeed = em[1:hLen+1] - maskedDB = em[hLen+1:] - # Step 3c - seedMask = self._mgf(maskedDB, hLen) - # Step 3d - seed = strxor(maskedSeed, seedMask) - # Step 3e - dbMask = self._mgf(seed, k-hLen-1) - # Step 3f - db = strxor(maskedDB, dbMask) - # Step 3b + 3g - res = oaep_decode(em, lHash, db) - if res <= 0: - raise ValueError("Incorrect decryption.") - # Step 4 - return db[res:] - - -def new(key, hashAlgo=None, mgfunc=None, label=b'', randfunc=None): - """Return a cipher object :class:`PKCS1OAEP_Cipher` - that can be used to perform PKCS#1 OAEP encryption or decryption. - - :param key: - The key object to use to encrypt or decrypt the message. - Decryption is only possible with a private RSA key. - :type key: RSA key object - - :param hashAlgo: - The hash function to use. This can be a module under `Cryptodome.Hash` - or an existing hash object created from any of such modules. - If not specified, `Cryptodome.Hash.SHA1` is used. - :type hashAlgo: hash object - - :param mgfunc: - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - If not specified, the standard MGF1 consistent with ``hashAlgo`` is used (a safe choice). - :type mgfunc: callable - - :param label: - A label to apply to this particular encryption. If not specified, - an empty string is used. Specifying a label does not improve - security. - :type label: bytes/bytearray/memoryview - - :param randfunc: - A function that returns random bytes. - The default is `Random.get_random_bytes`. - :type randfunc: callable - """ - - if randfunc is None: - randfunc = Random.get_random_bytes - return PKCS1OAEP_Cipher(key, hashAlgo, mgfunc, label, randfunc) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_OAEP.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_OAEP.pyi deleted file mode 100644 index b54cd3f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_OAEP.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Optional, Union, Callable, Any, overload -from typing_extensions import Protocol - -from Cryptodome.PublicKey.RSA import RsaKey - -class HashLikeClass(Protocol): - digest_size : int - def new(self, data: Optional[bytes] = ...) -> Any: ... - -class HashLikeModule(Protocol): - digest_size : int - @staticmethod - def new(data: Optional[bytes] = ...) -> Any: ... - -HashLike = Union[HashLikeClass, HashLikeModule] - -Buffer = Union[bytes, bytearray, memoryview] - -class PKCS1OAEP_Cipher: - def __init__(self, - key: RsaKey, - hashAlgo: HashLike, - mgfunc: Callable[[bytes, int], bytes], - label: Buffer, - randfunc: Callable[[int], bytes]) -> None: ... - def can_encrypt(self) -> bool: ... - def can_decrypt(self) -> bool: ... - def encrypt(self, message: Buffer) -> bytes: ... - def decrypt(self, ciphertext: Buffer) -> bytes: ... - -def new(key: RsaKey, - hashAlgo: Optional[HashLike] = ..., - mgfunc: Optional[Callable[[bytes, int], bytes]] = ..., - label: Optional[Buffer] = ..., - randfunc: Optional[Callable[[int], bytes]] = ...) -> PKCS1OAEP_Cipher: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_v1_5.py b/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_v1_5.py deleted file mode 100644 index d7a9b79..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_v1_5.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/PKCS1-v1_5.py : PKCS#1 v1.5 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__all__ = ['new', 'PKCS115_Cipher'] - -from Cryptodome import Random -from Cryptodome.Util.number import bytes_to_long, long_to_bytes -from Cryptodome.Util.py3compat import bord, is_bytes, _copy_bytes -from ._pkcs1_oaep_decode import pkcs1_decode - - -class PKCS115_Cipher: - """This cipher can perform PKCS#1 v1.5 RSA encryption or decryption. - Do not instantiate directly. Use :func:`Cryptodome.Cipher.PKCS1_v1_5.new` instead.""" - - def __init__(self, key, randfunc): - """Initialize this PKCS#1 v1.5 cipher object. - - :Parameters: - key : an RSA key object - If a private half is given, both encryption and decryption are possible. - If a public half is given, only encryption is possible. - randfunc : callable - Function that returns random bytes. - """ - - self._key = key - self._randfunc = randfunc - - def can_encrypt(self): - """Return True if this cipher object can be used for encryption.""" - return self._key.can_encrypt() - - def can_decrypt(self): - """Return True if this cipher object can be used for decryption.""" - return self._key.can_decrypt() - - def encrypt(self, message): - """Produce the PKCS#1 v1.5 encryption of a message. - - This function is named ``RSAES-PKCS1-V1_5-ENCRYPT``, and it is specified in - `section 7.2.1 of RFC8017 - `_. - - :param message: - The message to encrypt, also known as plaintext. It can be of - variable length, but not longer than the RSA modulus (in bytes) minus 11. - :type message: bytes/bytearray/memoryview - - :Returns: A byte string, the ciphertext in which the message is encrypted. - It is as long as the RSA modulus (in bytes). - - :Raises ValueError: - If the RSA key length is not sufficiently long to deal with the given - message. - """ - - # See 7.2.1 in RFC8017 - k = self._key.size_in_bytes() - mLen = len(message) - - # Step 1 - if mLen > k - 11: - raise ValueError("Plaintext is too long.") - # Step 2a - ps = [] - while len(ps) != k - mLen - 3: - new_byte = self._randfunc(1) - if bord(new_byte[0]) == 0x00: - continue - ps.append(new_byte) - ps = b"".join(ps) - # Step 2b - em = b'\x00\x02' + ps + b'\x00' + _copy_bytes(None, None, message) - # Step 3a (OS2IP) - em_int = bytes_to_long(em) - # Step 3b (RSAEP) - m_int = self._key._encrypt(em_int) - # Step 3c (I2OSP) - c = long_to_bytes(m_int, k) - return c - - def decrypt(self, ciphertext, sentinel, expected_pt_len=0): - r"""Decrypt a PKCS#1 v1.5 ciphertext. - - This is the function ``RSAES-PKCS1-V1_5-DECRYPT`` specified in - `section 7.2.2 of RFC8017 - `_. - - Args: - ciphertext (bytes/bytearray/memoryview): - The ciphertext that contains the message to recover. - sentinel (any type): - The object to return whenever an error is detected. - expected_pt_len (integer): - The length the plaintext is known to have, or 0 if unknown. - - Returns (byte string): - It is either the original message or the ``sentinel`` (in case of an error). - - .. warning:: - PKCS#1 v1.5 decryption is intrinsically vulnerable to timing - attacks (see `Bleichenbacher's`__ attack). - **Use PKCS#1 OAEP instead**. - - This implementation attempts to mitigate the risk - with some constant-time constructs. - However, they are not sufficient by themselves: the type of protocol you - implement and the way you handle errors make a big difference. - - Specifically, you should make it very hard for the (malicious) - party that submitted the ciphertext to quickly understand if decryption - succeeded or not. - - To this end, it is recommended that your protocol only encrypts - plaintexts of fixed length (``expected_pt_len``), - that ``sentinel`` is a random byte string of the same length, - and that processing continues for as long - as possible even if ``sentinel`` is returned (i.e. in case of - incorrect decryption). - - .. __: https://dx.doi.org/10.1007/BFb0055716 - """ - - # See 7.2.2 in RFC8017 - k = self._key.size_in_bytes() - - # Step 1 - if len(ciphertext) != k: - raise ValueError("Ciphertext with incorrect length (not %d bytes)" % k) - - # Step 2a (O2SIP) - ct_int = bytes_to_long(ciphertext) - - # Step 2b (RSADP) and Step 2c (I2OSP) - em = self._key._decrypt_to_bytes(ct_int) - - # Step 3 (not constant time when the sentinel is not a byte string) - output = bytes(bytearray(k)) - if not is_bytes(sentinel) or len(sentinel) > k: - size = pkcs1_decode(em, b'', expected_pt_len, output) - if size < 0: - return sentinel - else: - return output[size:] - - # Step 3 (somewhat constant time) - size = pkcs1_decode(em, sentinel, expected_pt_len, output) - return output[size:] - - -def new(key, randfunc=None): - """Create a cipher for performing PKCS#1 v1.5 encryption or decryption. - - :param key: - The key to use to encrypt or decrypt the message. This is a `Cryptodome.PublicKey.RSA` object. - Decryption is only possible if *key* is a private RSA key. - :type key: RSA key object - - :param randfunc: - Function that return random bytes. - The default is :func:`Cryptodome.Random.get_random_bytes`. - :type randfunc: callable - - :returns: A cipher object `PKCS115_Cipher`. - """ - - if randfunc is None: - randfunc = Random.get_random_bytes - return PKCS115_Cipher(key, randfunc) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_v1_5.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_v1_5.pyi deleted file mode 100644 index b69f509..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/PKCS1_v1_5.pyi +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Callable, Union, Any, Optional, TypeVar - -from Cryptodome.PublicKey.RSA import RsaKey - -Buffer = Union[bytes, bytearray, memoryview] -T = TypeVar('T') - -class PKCS115_Cipher: - def __init__(self, - key: RsaKey, - randfunc: Callable[[int], bytes]) -> None: ... - def can_encrypt(self) -> bool: ... - def can_decrypt(self) -> bool: ... - def encrypt(self, message: Buffer) -> bytes: ... - def decrypt(self, ciphertext: Buffer, - sentinel: T, - expected_pt_len: Optional[int] = ...) -> Union[bytes, T]: ... - -def new(key: RsaKey, - randfunc: Optional[Callable[[int], bytes]] = ...) -> PKCS115_Cipher: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/Salsa20.py b/venv/Lib/site-packages/Cryptodome/Cipher/Salsa20.py deleted file mode 100644 index 79e6701..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/Salsa20.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/Salsa20.py : Salsa20 stream cipher (http://cr.yp.to/snuffle.html) -# -# Contributed by Fabrizio Tarizzo . -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import _copy_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - create_string_buffer, - get_raw_buffer, VoidPointer, - SmartPointer, c_size_t, - c_uint8_ptr, is_writeable_buffer) - -from Cryptodome.Random import get_random_bytes - -_raw_salsa20_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._Salsa20", - """ - int Salsa20_stream_init(uint8_t *key, size_t keylen, - uint8_t *nonce, size_t nonce_len, - void **pSalsaState); - int Salsa20_stream_destroy(void *salsaState); - int Salsa20_stream_encrypt(void *salsaState, - const uint8_t in[], - uint8_t out[], size_t len); - """) - - -class Salsa20Cipher: - """Salsa20 cipher object. Do not create it directly. Use :py:func:`new` - instead. - - :var nonce: The nonce with length 8 - :vartype nonce: byte string - """ - - def __init__(self, key, nonce): - """Initialize a Salsa20 cipher object - - See also `new()` at the module level.""" - - if len(key) not in key_size: - raise ValueError("Incorrect key length for Salsa20 (%d bytes)" % len(key)) - - if len(nonce) != 8: - raise ValueError("Incorrect nonce length for Salsa20 (%d bytes)" % - len(nonce)) - - self.nonce = _copy_bytes(None, None, nonce) - - self._state = VoidPointer() - result = _raw_salsa20_lib.Salsa20_stream_init( - c_uint8_ptr(key), - c_size_t(len(key)), - c_uint8_ptr(nonce), - c_size_t(len(nonce)), - self._state.address_of()) - if result: - raise ValueError("Error %d instantiating a Salsa20 cipher") - self._state = SmartPointer(self._state.get(), - _raw_salsa20_lib.Salsa20_stream_destroy) - - self.block_size = 1 - self.key_size = len(key) - - def encrypt(self, plaintext, output=None): - """Encrypt a piece of data. - - Args: - plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. - Keyword Args: - output(bytes/bytearray/memoryview): The location where the ciphertext - is written to. If ``None``, the ciphertext is returned. - Returns: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if output is None: - ciphertext = create_string_buffer(len(plaintext)) - else: - ciphertext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(plaintext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = _raw_salsa20_lib.Salsa20_stream_encrypt( - self._state.get(), - c_uint8_ptr(plaintext), - c_uint8_ptr(ciphertext), - c_size_t(len(plaintext))) - if result: - raise ValueError("Error %d while encrypting with Salsa20" % result) - - if output is None: - return get_raw_buffer(ciphertext) - else: - return None - - def decrypt(self, ciphertext, output=None): - """Decrypt a piece of data. - - Args: - ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. - Keyword Args: - output(bytes/bytearray/memoryview): The location where the plaintext - is written to. If ``None``, the plaintext is returned. - Returns: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - try: - return self.encrypt(ciphertext, output=output) - except ValueError as e: - raise ValueError(str(e).replace("enc", "dec")) - - -def new(key, nonce=None): - """Create a new Salsa20 cipher - - :keyword key: The secret key to use. It must be 16 or 32 bytes long. - :type key: bytes/bytearray/memoryview - - :keyword nonce: - A value that must never be reused for any other encryption - done with this key. It must be 8 bytes long. - - If not provided, a random byte string will be generated (you can read - it back via the ``nonce`` attribute of the returned object). - :type nonce: bytes/bytearray/memoryview - - :Return: a :class:`Cryptodome.Cipher.Salsa20.Salsa20Cipher` object - """ - - if nonce is None: - nonce = get_random_bytes(8) - - return Salsa20Cipher(key, nonce) - -# Size of a data block (in bytes) -block_size = 1 - -# Size of a key (in bytes) -key_size = (16, 32) - diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/Salsa20.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/Salsa20.pyi deleted file mode 100644 index cf8690e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/Salsa20.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Union, Tuple, Optional, overload, Optional - -Buffer = bytes|bytearray|memoryview - -class Salsa20Cipher: - nonce: bytes - block_size: int - key_size: int - - def __init__(self, - key: Buffer, - nonce: Buffer) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - -def new(key: Buffer, nonce: Optional[Buffer] = ...) -> Salsa20Cipher: ... - -block_size: int -key_size: Tuple[int, int] - diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_ARC4.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_ARC4.pyd deleted file mode 100644 index 37e083a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_ARC4.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_EKSBlowfish.py b/venv/Lib/site-packages/Cryptodome/Cipher/_EKSBlowfish.py deleted file mode 100644 index c1c3249..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_EKSBlowfish.py +++ /dev/null @@ -1,131 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2019, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import sys - -from Cryptodome.Cipher import _create_cipher -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, c_size_t, - c_uint8_ptr, c_uint) - -_raw_blowfish_lib = load_pycryptodome_raw_lib( - "Cryptodome.Cipher._raw_eksblowfish", - """ - int EKSBlowfish_start_operation(const uint8_t key[], - size_t key_len, - const uint8_t salt[16], - size_t salt_len, - unsigned cost, - unsigned invert, - void **pResult); - int EKSBlowfish_encrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int EKSBlowfish_decrypt(const void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int EKSBlowfish_stop_operation(void *state); - """ - ) - - -def _create_base_cipher(dict_parameters): - """This method instantiates and returns a smart pointer to - a low-level base cipher. It will absorb named parameters in - the process.""" - - try: - key = dict_parameters.pop("key") - salt = dict_parameters.pop("salt") - cost = dict_parameters.pop("cost") - except KeyError as e: - raise TypeError("Missing EKSBlowfish parameter: " + str(e)) - invert = dict_parameters.pop("invert", True) - - if len(key) not in key_size: - raise ValueError("Incorrect EKSBlowfish key length (%d bytes)" % len(key)) - - start_operation = _raw_blowfish_lib.EKSBlowfish_start_operation - stop_operation = _raw_blowfish_lib.EKSBlowfish_stop_operation - - void_p = VoidPointer() - result = start_operation(c_uint8_ptr(key), - c_size_t(len(key)), - c_uint8_ptr(salt), - c_size_t(len(salt)), - c_uint(cost), - c_uint(int(invert)), - void_p.address_of()) - if result: - raise ValueError("Error %X while instantiating the EKSBlowfish cipher" - % result) - return SmartPointer(void_p.get(), stop_operation) - - -def new(key, mode, salt, cost, invert): - """Create a new EKSBlowfish cipher - - Args: - - key (bytes, bytearray, memoryview): - The secret key to use in the symmetric cipher. - Its length can vary from 0 to 72 bytes. - - mode (one of the supported ``MODE_*`` constants): - The chaining mode to use for encryption or decryption. - - salt (bytes, bytearray, memoryview): - The salt that bcrypt uses to thwart rainbow table attacks - - cost (integer): - The complexity factor in bcrypt - - invert (bool): - If ``False``, in the inner loop use ``ExpandKey`` first over the salt - and then over the key, as defined in - the `original bcrypt specification `_. - If ``True``, reverse the order, as in the first implementation of - `bcrypt` in OpenBSD. - - :Return: an EKSBlowfish object - """ - - kwargs = { 'salt':salt, 'cost':cost, 'invert':invert } - return _create_cipher(sys.modules[__name__], key, mode, **kwargs) - - -MODE_ECB = 1 - -# Size of a data block (in bytes) -block_size = 8 -# Size of a key (in bytes) -key_size = range(0, 72 + 1) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_EKSBlowfish.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_EKSBlowfish.pyi deleted file mode 100644 index 49c8448..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_EKSBlowfish.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Union, Iterable - -from Cryptodome.Cipher._mode_ecb import EcbMode - -MODE_ECB: int - -Buffer = Union[bytes, bytearray, memoryview] - -def new(key: Buffer, - mode: int, - salt: Buffer, - cost: int) -> EcbMode: ... - -block_size: int -key_size: Iterable[int] diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_Salsa20.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_Salsa20.pyd deleted file mode 100644 index 1f77d38..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_Salsa20.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__init__.py b/venv/Lib/site-packages/Cryptodome/Cipher/__init__.py deleted file mode 100644 index 8823711..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/__init__.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# A block cipher is instantiated as a combination of: -# 1. A base cipher (such as AES) -# 2. A mode of operation (such as CBC) -# -# Both items are implemented as C modules. -# -# The API of #1 is (replace "AES" with the name of the actual cipher): -# - AES_start_operaion(key) --> base_cipher_state -# - AES_encrypt(base_cipher_state, in, out, length) -# - AES_decrypt(base_cipher_state, in, out, length) -# - AES_stop_operation(base_cipher_state) -# -# Where base_cipher_state is AES_State, a struct with BlockBase (set of -# pointers to encrypt/decrypt/stop) followed by cipher-specific data. -# -# The API of #2 is (replace "CBC" with the name of the actual mode): -# - CBC_start_operation(base_cipher_state) --> mode_state -# - CBC_encrypt(mode_state, in, out, length) -# - CBC_decrypt(mode_state, in, out, length) -# - CBC_stop_operation(mode_state) -# -# where mode_state is a a pointer to base_cipher_state plus mode-specific data. - -def _create_cipher(factory, key, mode, *args, **kwargs): - - kwargs["key"] = key - - if args: - if mode in (8, 9, 10, 11, 12): - if len(args) > 1: - raise TypeError("Too many arguments for this mode") - kwargs["nonce"] = args[0] - elif mode in (2, 3, 5, 7): - if len(args) > 1: - raise TypeError("Too many arguments for this mode") - kwargs["IV"] = args[0] - elif mode == 6: - if len(args) > 0: - raise TypeError("Too many arguments for this mode") - elif mode == 1: - raise TypeError("IV is not meaningful for the ECB mode") - - res = None - extra_modes = kwargs.pop("add_aes_modes", False) - - if mode == 1: - from Cryptodome.Cipher._mode_ecb import _create_ecb_cipher - res = _create_ecb_cipher(factory, **kwargs) - elif mode == 2: - from Cryptodome.Cipher._mode_cbc import _create_cbc_cipher - res = _create_cbc_cipher(factory, **kwargs) - elif mode == 3: - from Cryptodome.Cipher._mode_cfb import _create_cfb_cipher - res = _create_cfb_cipher(factory, **kwargs) - elif mode == 5: - from Cryptodome.Cipher._mode_ofb import _create_ofb_cipher - res = _create_ofb_cipher(factory, **kwargs) - elif mode == 6: - from Cryptodome.Cipher._mode_ctr import _create_ctr_cipher - res = _create_ctr_cipher(factory, **kwargs) - elif mode == 7: - from Cryptodome.Cipher._mode_openpgp import _create_openpgp_cipher - res = _create_openpgp_cipher(factory, **kwargs) - elif mode == 9: - from Cryptodome.Cipher._mode_eax import _create_eax_cipher - res = _create_eax_cipher(factory, **kwargs) - elif extra_modes: - if mode == 8: - from Cryptodome.Cipher._mode_ccm import _create_ccm_cipher - res = _create_ccm_cipher(factory, **kwargs) - elif mode == 10: - from Cryptodome.Cipher._mode_siv import _create_siv_cipher - res = _create_siv_cipher(factory, **kwargs) - elif mode == 11: - from Cryptodome.Cipher._mode_gcm import _create_gcm_cipher - res = _create_gcm_cipher(factory, **kwargs) - elif mode == 12: - from Cryptodome.Cipher._mode_ocb import _create_ocb_cipher - res = _create_ocb_cipher(factory, **kwargs) - elif mode == 13: - from Cryptodome.Cipher._mode_kw import _create_kw_cipher - res = _create_kw_cipher(factory, **kwargs) - elif mode == 14: - from Cryptodome.Cipher._mode_kwp import _create_kwp_cipher - res = _create_kwp_cipher(factory, **kwargs) - - if res is None: - raise ValueError("Mode not supported") - - return res diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__init__.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/__init__.pyi deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/AES.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/AES.cpython-312.pyc deleted file mode 100644 index 095bc10..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/AES.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ARC2.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ARC2.cpython-312.pyc deleted file mode 100644 index 95b4e72..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ARC2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ARC4.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ARC4.cpython-312.pyc deleted file mode 100644 index a3964f9..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ARC4.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/Blowfish.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/Blowfish.cpython-312.pyc deleted file mode 100644 index 6b4bf2b..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/Blowfish.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/CAST.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/CAST.cpython-312.pyc deleted file mode 100644 index ffe59d0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/CAST.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ChaCha20.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ChaCha20.cpython-312.pyc deleted file mode 100644 index 9ffc197..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ChaCha20.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ChaCha20_Poly1305.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ChaCha20_Poly1305.cpython-312.pyc deleted file mode 100644 index add3c3d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/ChaCha20_Poly1305.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/DES.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/DES.cpython-312.pyc deleted file mode 100644 index f689b1d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/DES.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/DES3.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/DES3.cpython-312.pyc deleted file mode 100644 index 8b18aea..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/DES3.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/PKCS1_OAEP.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/PKCS1_OAEP.cpython-312.pyc deleted file mode 100644 index 5b4f0ae..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/PKCS1_OAEP.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/PKCS1_v1_5.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/PKCS1_v1_5.cpython-312.pyc deleted file mode 100644 index 2fb597c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/PKCS1_v1_5.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/Salsa20.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/Salsa20.cpython-312.pyc deleted file mode 100644 index 2f4ee27..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/Salsa20.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_EKSBlowfish.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_EKSBlowfish.cpython-312.pyc deleted file mode 100644 index c6a7437..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_EKSBlowfish.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c643bce..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_cbc.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_cbc.cpython-312.pyc deleted file mode 100644 index 347ef2d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_cbc.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ccm.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ccm.cpython-312.pyc deleted file mode 100644 index 009dd33..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ccm.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_cfb.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_cfb.cpython-312.pyc deleted file mode 100644 index 49c11ca..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_cfb.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ctr.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ctr.cpython-312.pyc deleted file mode 100644 index 52a5049..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ctr.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_eax.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_eax.cpython-312.pyc deleted file mode 100644 index 20323e0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_eax.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ecb.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ecb.cpython-312.pyc deleted file mode 100644 index 4d42133..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ecb.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_gcm.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_gcm.cpython-312.pyc deleted file mode 100644 index 76632a0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_gcm.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_kw.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_kw.cpython-312.pyc deleted file mode 100644 index 43d97c0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_kw.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_kwp.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_kwp.cpython-312.pyc deleted file mode 100644 index 62f6084..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_kwp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ocb.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ocb.cpython-312.pyc deleted file mode 100644 index 80415c4..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ocb.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ofb.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ofb.cpython-312.pyc deleted file mode 100644 index 94e6a54..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_ofb.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_openpgp.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_openpgp.cpython-312.pyc deleted file mode 100644 index ff2f1de..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_openpgp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_siv.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_siv.cpython-312.pyc deleted file mode 100644 index 509d879..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_mode_siv.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_pkcs1_oaep_decode.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_pkcs1_oaep_decode.cpython-312.pyc deleted file mode 100644 index d0f023d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/__pycache__/_pkcs1_oaep_decode.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_chacha20.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_chacha20.pyd deleted file mode 100644 index 6d16bb1..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_chacha20.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cbc.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cbc.py deleted file mode 100644 index 94d02e7..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cbc.py +++ /dev/null @@ -1,293 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -Ciphertext Block Chaining (CBC) mode. -""" - -__all__ = ['CbcMode'] - -from Cryptodome.Util.py3compat import _copy_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - create_string_buffer, get_raw_buffer, - SmartPointer, c_size_t, c_uint8_ptr, - is_writeable_buffer) - -from Cryptodome.Random import get_random_bytes - -raw_cbc_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_cbc", """ - int CBC_start_operation(void *cipher, - const uint8_t iv[], - size_t iv_len, - void **pResult); - int CBC_encrypt(void *cbcState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CBC_decrypt(void *cbcState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CBC_stop_operation(void *state); - """ - ) - - -class CbcMode(object): - """*Cipher-Block Chaining (CBC)*. - - Each of the ciphertext blocks depends on the current - and all previous plaintext blocks. - - An Initialization Vector (*IV*) is required. - - See `NIST SP800-38A`_ , Section 6.2 . - - .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf - - :undocumented: __init__ - """ - - def __init__(self, block_cipher, iv): - """Create a new block cipher, configured in CBC mode. - - :Parameters: - block_cipher : C pointer - A smart pointer to the low-level block cipher instance. - - iv : bytes/bytearray/memoryview - The initialization vector to use for encryption or decryption. - It is as long as the cipher block. - - **The IV must be unpredictable**. Ideally it is picked randomly. - - Reusing the *IV* for encryptions performed with the same key - compromises confidentiality. - """ - - self._state = VoidPointer() - result = raw_cbc_lib.CBC_start_operation(block_cipher.get(), - c_uint8_ptr(iv), - c_size_t(len(iv)), - self._state.address_of()) - if result: - raise ValueError("Error %d while instantiating the CBC mode" - % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the cipher mode - self._state = SmartPointer(self._state.get(), - raw_cbc_lib.CBC_stop_operation) - - # Memory allocated for the underlying block cipher is now owed - # by the cipher mode - block_cipher.release() - - self.block_size = len(iv) - """The block size of the underlying cipher, in bytes.""" - - self.iv = _copy_bytes(None, None, iv) - """The Initialization Vector originally used to create the object. - The value does not change.""" - - self.IV = self.iv - """Alias for `iv`""" - - self._next = ["encrypt", "decrypt"] - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - That also means that you cannot reuse an object for encrypting - or decrypting other data with the same key. - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - Its lenght must be multiple of the cipher block size. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() cannot be called after decrypt()") - self._next = ["encrypt"] - - if output is None: - ciphertext = create_string_buffer(len(plaintext)) - else: - ciphertext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(plaintext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_cbc_lib.CBC_encrypt(self._state.get(), - c_uint8_ptr(plaintext), - c_uint8_ptr(ciphertext), - c_size_t(len(plaintext))) - if result: - if result == 3: - raise ValueError("Data must be padded to %d byte boundary in CBC mode" % self.block_size) - raise ValueError("Error %d while encrypting in CBC mode" % result) - - if output is None: - return get_raw_buffer(ciphertext) - else: - return None - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - Its length must be multiple of the cipher block size. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() cannot be called after encrypt()") - self._next = ["decrypt"] - - if output is None: - plaintext = create_string_buffer(len(ciphertext)) - else: - plaintext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(ciphertext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_cbc_lib.CBC_decrypt(self._state.get(), - c_uint8_ptr(ciphertext), - c_uint8_ptr(plaintext), - c_size_t(len(ciphertext))) - if result: - if result == 3: - raise ValueError("Data must be padded to %d byte boundary in CBC mode" % self.block_size) - raise ValueError("Error %d while decrypting in CBC mode" % result) - - if output is None: - return get_raw_buffer(plaintext) - else: - return None - - -def _create_cbc_cipher(factory, **kwargs): - """Instantiate a cipher object that performs CBC encryption/decryption. - - :Parameters: - factory : module - The underlying block cipher, a module from ``Cryptodome.Cipher``. - - :Keywords: - iv : bytes/bytearray/memoryview - The IV to use for CBC. - - IV : bytes/bytearray/memoryview - Alias for ``iv``. - - Any other keyword will be passed to the underlying block cipher. - See the relevant documentation for details (at least ``key`` will need - to be present). - """ - - cipher_state = factory._create_base_cipher(kwargs) - iv = kwargs.pop("IV", None) - IV = kwargs.pop("iv", None) - - if (None, None) == (iv, IV): - iv = get_random_bytes(factory.block_size) - if iv is not None: - if IV is not None: - raise TypeError("You must either use 'iv' or 'IV', not both") - else: - iv = IV - - if len(iv) != factory.block_size: - raise ValueError("Incorrect IV length (it must be %d bytes long)" % - factory.block_size) - - if kwargs: - raise TypeError("Unknown parameters for CBC: %s" % str(kwargs)) - - return CbcMode(cipher_state, iv) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cbc.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cbc.pyi deleted file mode 100644 index 526632e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cbc.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Union, overload - -from Cryptodome.Util._raw_api import SmartPointer - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['CbcMode'] - -class CbcMode(object): - block_size: int - iv: Buffer - IV: Buffer - - def __init__(self, - block_cipher: SmartPointer, - iv: Buffer) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ccm.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ccm.py deleted file mode 100644 index ac27221..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ccm.py +++ /dev/null @@ -1,671 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -Counter with CBC-MAC (CCM) mode. -""" - -__all__ = ['CcmMode'] - -import struct -from binascii import unhexlify - -from Cryptodome.Util.py3compat import (byte_string, bord, - _copy_bytes) -from Cryptodome.Util._raw_api import is_writeable_buffer - -from Cryptodome.Util.strxor import strxor -from Cryptodome.Util.number import long_to_bytes - -from Cryptodome.Hash import BLAKE2s -from Cryptodome.Random import get_random_bytes - - -def enum(**enums): - return type('Enum', (), enums) - -MacStatus = enum(NOT_STARTED=0, PROCESSING_AUTH_DATA=1, PROCESSING_PLAINTEXT=2) - - -class CCMMessageTooLongError(ValueError): - pass - - -class CcmMode(object): - """Counter with CBC-MAC (CCM). - - This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. - It provides both confidentiality and authenticity. - - The header of the message may be left in the clear, if needed, and it will - still be subject to authentication. The decryption step tells the receiver - if the message comes from a source that really knowns the secret key. - Additionally, decryption detects if any part of the message - including the - header - has been modified or corrupted. - - This mode requires a nonce. The nonce shall never repeat for two - different messages encrypted with the same key, but it does not need - to be random. - Note that there is a trade-off between the size of the nonce and the - maximum size of a single message you can encrypt. - - It is important to use a large nonce if the key is reused across several - messages and the nonce is chosen randomly. - - It is acceptable to us a short nonce if the key is only used a few times or - if the nonce is taken from a counter. - - The following table shows the trade-off when the nonce is chosen at - random. The column on the left shows how many messages it takes - for the keystream to repeat **on average**. In practice, you will want to - stop using the key way before that. - - +--------------------+---------------+-------------------+ - | Avg. # of messages | nonce | Max. message | - | before keystream | size | size | - | repeats | (bytes) | (bytes) | - +====================+===============+===================+ - | 2^52 | 13 | 64K | - +--------------------+---------------+-------------------+ - | 2^48 | 12 | 16M | - +--------------------+---------------+-------------------+ - | 2^44 | 11 | 4G | - +--------------------+---------------+-------------------+ - | 2^40 | 10 | 1T | - +--------------------+---------------+-------------------+ - | 2^36 | 9 | 64P | - +--------------------+---------------+-------------------+ - | 2^32 | 8 | 16E | - +--------------------+---------------+-------------------+ - - This mode is only available for ciphers that operate on 128 bits blocks - (e.g. AES but not TDES). - - See `NIST SP800-38C`_ or RFC3610_. - - .. _`NIST SP800-38C`: http://csrc.nist.gov/publications/nistpubs/800-38C/SP800-38C.pdf - .. _RFC3610: https://tools.ietf.org/html/rfc3610 - .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html - - :undocumented: __init__ - """ - - def __init__(self, factory, key, nonce, mac_len, msg_len, assoc_len, - cipher_params): - - self.block_size = factory.block_size - """The block size of the underlying cipher, in bytes.""" - - self.nonce = _copy_bytes(None, None, nonce) - """The nonce used for this cipher instance""" - - self._factory = factory - self._key = _copy_bytes(None, None, key) - self._mac_len = mac_len - self._msg_len = msg_len - self._assoc_len = assoc_len - self._cipher_params = cipher_params - - self._mac_tag = None # Cache for MAC tag - - if self.block_size != 16: - raise ValueError("CCM mode is only available for ciphers" - " that operate on 128 bits blocks") - - # MAC tag length (Tlen) - if mac_len not in (4, 6, 8, 10, 12, 14, 16): - raise ValueError("Parameter 'mac_len' must be even" - " and in the range 4..16 (not %d)" % mac_len) - - # Nonce value - if not (7 <= len(nonce) <= 13): - raise ValueError("Length of parameter 'nonce' must be" - " in the range 7..13 bytes") - - # Message length (if known already) - q = 15 - len(nonce) # length of Q, the encoded message length - if msg_len and len(long_to_bytes(msg_len)) > q: - raise CCMMessageTooLongError("Message too long for a %u-byte nonce" % len(nonce)) - - # Create MAC object (the tag will be the last block - # bytes worth of ciphertext) - self._mac = self._factory.new(key, - factory.MODE_CBC, - iv=b'\x00' * 16, - **cipher_params) - self._mac_status = MacStatus.NOT_STARTED - self._t = None - - # Allowed transitions after initialization - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - # Cumulative lengths - self._cumul_assoc_len = 0 - self._cumul_msg_len = 0 - - # Cache for unaligned associated data/plaintext. - # This is a list with byte strings, but when the MAC starts, - # it will become a binary string no longer than the block size. - self._cache = [] - - # Start CTR cipher, by formatting the counter (A.3) - self._cipher = self._factory.new(key, - self._factory.MODE_CTR, - nonce=struct.pack("B", q - 1) + self.nonce, - **cipher_params) - - # S_0, step 6 in 6.1 for j=0 - self._s_0 = self._cipher.encrypt(b'\x00' * 16) - - # Try to start the MAC - if None not in (assoc_len, msg_len): - self._start_mac() - - def _start_mac(self): - - assert(self._mac_status == MacStatus.NOT_STARTED) - assert(None not in (self._assoc_len, self._msg_len)) - assert(isinstance(self._cache, list)) - - # Formatting control information and nonce (A.2.1) - q = 15 - len(self.nonce) # length of Q, the encoded message length (2..8) - flags = (self._assoc_len > 0) << 6 - flags |= ((self._mac_len - 2) // 2) << 3 - flags |= q - 1 - b_0 = struct.pack("B", flags) + self.nonce + long_to_bytes(self._msg_len, q) - - # Formatting associated data (A.2.2) - # Encoded 'a' is concatenated with the associated data 'A' - assoc_len_encoded = b'' - if self._assoc_len > 0: - if self._assoc_len < (2 ** 16 - 2 ** 8): - enc_size = 2 - elif self._assoc_len < (2 ** 32): - assoc_len_encoded = b'\xFF\xFE' - enc_size = 4 - else: - assoc_len_encoded = b'\xFF\xFF' - enc_size = 8 - assoc_len_encoded += long_to_bytes(self._assoc_len, enc_size) - - # b_0 and assoc_len_encoded must be processed first - self._cache.insert(0, b_0) - self._cache.insert(1, assoc_len_encoded) - - # Process all the data cached so far - first_data_to_mac = b"".join(self._cache) - self._cache = b"" - self._mac_status = MacStatus.PROCESSING_AUTH_DATA - self._update(first_data_to_mac) - - def _pad_cache_and_update(self): - - assert(self._mac_status != MacStatus.NOT_STARTED) - assert(len(self._cache) < self.block_size) - - # Associated data is concatenated with the least number - # of zero bytes (possibly none) to reach alignment to - # the 16 byte boundary (A.2.3) - len_cache = len(self._cache) - if len_cache > 0: - self._update(b'\x00' * (self.block_size - len_cache)) - - def update(self, assoc_data): - """Protect associated data - - If there is any associated data, the caller has to invoke - this function one or more times, before using - ``decrypt`` or ``encrypt``. - - By *associated data* it is meant any data (e.g. packet headers) that - will not be encrypted and will be transmitted in the clear. - However, the receiver is still able to detect any modification to it. - In CCM, the *associated data* is also called - *additional authenticated data* (AAD). - - If there is no associated data, this method must not be called. - - The caller may split associated data in segments of any size, and - invoke this method multiple times, each time with the next segment. - - :Parameters: - assoc_data : bytes/bytearray/memoryview - A piece of associated data. There are no restrictions on its size. - """ - - if "update" not in self._next: - raise TypeError("update() can only be called" - " immediately after initialization") - - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - self._cumul_assoc_len += len(assoc_data) - if self._assoc_len is not None and \ - self._cumul_assoc_len > self._assoc_len: - raise ValueError("Associated data is too long") - - self._update(assoc_data) - return self - - def _update(self, assoc_data_pt=b""): - """Update the MAC with associated data or plaintext - (without FSM checks)""" - - # If MAC has not started yet, we just park the data into a list. - # If the data is mutable, we create a copy and store that instead. - if self._mac_status == MacStatus.NOT_STARTED: - if is_writeable_buffer(assoc_data_pt): - assoc_data_pt = _copy_bytes(None, None, assoc_data_pt) - self._cache.append(assoc_data_pt) - return - - assert(len(self._cache) < self.block_size) - - if len(self._cache) > 0: - filler = min(self.block_size - len(self._cache), - len(assoc_data_pt)) - self._cache += _copy_bytes(None, filler, assoc_data_pt) - assoc_data_pt = _copy_bytes(filler, None, assoc_data_pt) - - if len(self._cache) < self.block_size: - return - - # The cache is exactly one block - self._t = self._mac.encrypt(self._cache) - self._cache = b"" - - update_len = len(assoc_data_pt) // self.block_size * self.block_size - self._cache = _copy_bytes(update_len, None, assoc_data_pt) - if update_len > 0: - self._t = self._mac.encrypt(assoc_data_pt[:update_len])[-16:] - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - This method can be called only **once** if ``msg_len`` was - not passed at initialization. - - If ``msg_len`` was given, the data to encrypt can be broken - up in two or more pieces and `encrypt` can be called - multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() can only be called after" - " initialization or an update()") - self._next = ["encrypt", "digest"] - - # No more associated data allowed from now - if self._assoc_len is None: - assert(isinstance(self._cache, list)) - self._assoc_len = sum([len(x) for x in self._cache]) - if self._msg_len is not None: - self._start_mac() - else: - if self._cumul_assoc_len < self._assoc_len: - raise ValueError("Associated data is too short") - - # Only once piece of plaintext accepted if message length was - # not declared in advance - if self._msg_len is None: - q = 15 - len(self.nonce) - if len(long_to_bytes(len(plaintext))) > q: - raise CCMMessageTooLongError("Message too long for a %u-byte nonce" % len(self.nonce)) - - self._msg_len = len(plaintext) - self._start_mac() - self._next = ["digest"] - - self._cumul_msg_len += len(plaintext) - if self._cumul_msg_len > self._msg_len: - msg = "Message longer than declared for (%u bytes vs %u bytes" % \ - (self._cumul_msg_len, self._msg_len) - raise CCMMessageTooLongError(msg) - - if self._mac_status == MacStatus.PROCESSING_AUTH_DATA: - # Associated data is concatenated with the least number - # of zero bytes (possibly none) to reach alignment to - # the 16 byte boundary (A.2.3) - self._pad_cache_and_update() - self._mac_status = MacStatus.PROCESSING_PLAINTEXT - - self._update(plaintext) - return self._cipher.encrypt(plaintext, output=output) - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - This method can be called only **once** if ``msg_len`` was - not passed at initialization. - - If ``msg_len`` was given, the data to decrypt can be - broken up in two or more pieces and `decrypt` can be - called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() can only be called" - " after initialization or an update()") - self._next = ["decrypt", "verify"] - - # No more associated data allowed from now - if self._assoc_len is None: - assert(isinstance(self._cache, list)) - self._assoc_len = sum([len(x) for x in self._cache]) - if self._msg_len is not None: - self._start_mac() - else: - if self._cumul_assoc_len < self._assoc_len: - raise ValueError("Associated data is too short") - - # Only once piece of ciphertext accepted if message length was - # not declared in advance - if self._msg_len is None: - q = 15 - len(self.nonce) - if len(long_to_bytes(len(ciphertext))) > q: - raise CCMMessageTooLongError("Message too long for a %u-byte nonce" % len(self.nonce)) - - self._msg_len = len(ciphertext) - self._start_mac() - self._next = ["verify"] - - self._cumul_msg_len += len(ciphertext) - if self._cumul_msg_len > self._msg_len: - msg = "Message longer than declared for (%u bytes vs %u bytes" % \ - (self._cumul_msg_len, self._msg_len) - raise CCMMessageTooLongError(msg) - - if self._mac_status == MacStatus.PROCESSING_AUTH_DATA: - # Associated data is concatenated with the least number - # of zero bytes (possibly none) to reach alignment to - # the 16 byte boundary (A.2.3) - self._pad_cache_and_update() - self._mac_status = MacStatus.PROCESSING_PLAINTEXT - - # Encrypt is equivalent to decrypt with the CTR mode - plaintext = self._cipher.encrypt(ciphertext, output=output) - if output is None: - self._update(plaintext) - else: - self._update(output) - return plaintext - - def digest(self): - """Compute the *binary* MAC tag. - - The caller invokes this function at the very end. - - This method returns the MAC that shall be sent to the receiver, - together with the ciphertext. - - :Return: the MAC, as a byte string. - """ - - if "digest" not in self._next: - raise TypeError("digest() cannot be called when decrypting" - " or validating a message") - self._next = ["digest"] - return self._digest() - - def _digest(self): - if self._mac_tag: - return self._mac_tag - - if self._assoc_len is None: - assert(isinstance(self._cache, list)) - self._assoc_len = sum([len(x) for x in self._cache]) - if self._msg_len is not None: - self._start_mac() - else: - if self._cumul_assoc_len < self._assoc_len: - raise ValueError("Associated data is too short") - - if self._msg_len is None: - self._msg_len = 0 - self._start_mac() - - if self._cumul_msg_len != self._msg_len: - raise ValueError("Message is too short") - - # Both associated data and payload are concatenated with the least - # number of zero bytes (possibly none) that align it to the - # 16 byte boundary (A.2.2 and A.2.3) - self._pad_cache_and_update() - - # Step 8 in 6.1 (T xor MSB_Tlen(S_0)) - self._mac_tag = strxor(self._t, self._s_0)[:self._mac_len] - - return self._mac_tag - - def hexdigest(self): - """Compute the *printable* MAC tag. - - This method is like `digest`. - - :Return: the MAC, as a hexadecimal string. - """ - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def verify(self, received_mac_tag): - """Validate the *binary* MAC tag. - - The caller invokes this function at the very end. - - This method checks if the decrypted message is indeed valid - (that is, if the key is correct) and it has not been - tampered with while in transit. - - :Parameters: - received_mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - if "verify" not in self._next: - raise TypeError("verify() cannot be called" - " when encrypting a message") - self._next = ["verify"] - - self._digest() - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Validate the *printable* MAC tag. - - This method is like `verify`. - - :Parameters: - hex_mac_tag : string - This is the *printable* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - self.verify(unhexlify(hex_mac_tag)) - - def encrypt_and_digest(self, plaintext, output=None): - """Perform encrypt() and digest() in one step. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - a tuple with two items: - - - the ciphertext, as ``bytes`` - - the MAC tag, as ``bytes`` - - The first item becomes ``None`` when the ``output`` parameter - specified a location for the result. - """ - - return self.encrypt(plaintext, output=output), self.digest() - - def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): - """Perform decrypt() and verify() in one step. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - received_mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` - parameter specified a location for the result. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - plaintext = self.decrypt(ciphertext, output=output) - self.verify(received_mac_tag) - return plaintext - - -def _create_ccm_cipher(factory, **kwargs): - """Create a new block cipher, configured in CCM mode. - - :Parameters: - factory : module - A symmetric cipher module from `Cryptodome.Cipher` (like - `Cryptodome.Cipher.AES`). - - :Keywords: - key : bytes/bytearray/memoryview - The secret key to use in the symmetric cipher. - - nonce : bytes/bytearray/memoryview - A value that must never be reused for any other encryption. - - Its length must be in the range ``[7..13]``. - 11 or 12 bytes are reasonable values in general. Bear in - mind that with CCM there is a trade-off between nonce length and - maximum message size. - - If not specified, a 11 byte long random string is used. - - mac_len : integer - Length of the MAC, in bytes. It must be even and in - the range ``[4..16]``. The default is 16. - - msg_len : integer - Length of the message to (de)cipher. - If not specified, ``encrypt`` or ``decrypt`` may only be called once. - - assoc_len : integer - Length of the associated data. - If not specified, all data is internally buffered. - """ - - try: - key = key = kwargs.pop("key") - except KeyError as e: - raise TypeError("Missing parameter: " + str(e)) - - nonce = kwargs.pop("nonce", None) # N - if nonce is None: - nonce = get_random_bytes(11) - mac_len = kwargs.pop("mac_len", factory.block_size) - msg_len = kwargs.pop("msg_len", None) # p - assoc_len = kwargs.pop("assoc_len", None) # a - cipher_params = dict(kwargs) - - return CcmMode(factory, key, nonce, mac_len, msg_len, - assoc_len, cipher_params) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ccm.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ccm.pyi deleted file mode 100644 index 98af96a..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ccm.pyi +++ /dev/null @@ -1,52 +0,0 @@ -from types import ModuleType -from typing import Union, overload, Dict, Tuple, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['CcmMode'] - - -class CCMMessageTooLongError(ValueError): - pass - - -class CcmMode(object): - block_size: int - nonce: bytes - - def __init__(self, - factory: ModuleType, - key: Buffer, - nonce: Buffer, - mac_len: int, - msg_len: Optional[int], - assoc_len: Optional[int], - cipher_params: Dict) -> None: ... - - def update(self, assoc_data: Buffer) -> CcmMode: ... - - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, received_mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - - @overload - def encrypt_and_digest(self, - plaintext: Buffer) -> Tuple[bytes, bytes]: ... - @overload - def encrypt_and_digest(self, - plaintext: Buffer, - output: Buffer) -> Tuple[None, bytes]: ... - def decrypt_and_verify(self, - ciphertext: Buffer, - received_mac_tag: Buffer, - output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cfb.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cfb.py deleted file mode 100644 index 1b1b6c3..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cfb.py +++ /dev/null @@ -1,293 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/mode_cfb.py : CFB mode -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -""" -Counter Feedback (CFB) mode. -""" - -__all__ = ['CfbMode'] - -from Cryptodome.Util.py3compat import _copy_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - create_string_buffer, get_raw_buffer, - SmartPointer, c_size_t, c_uint8_ptr, - is_writeable_buffer) - -from Cryptodome.Random import get_random_bytes - -raw_cfb_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_cfb",""" - int CFB_start_operation(void *cipher, - const uint8_t iv[], - size_t iv_len, - size_t segment_len, /* In bytes */ - void **pResult); - int CFB_encrypt(void *cfbState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CFB_decrypt(void *cfbState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CFB_stop_operation(void *state);""" - ) - - -class CfbMode(object): - """*Cipher FeedBack (CFB)*. - - This mode is similar to CFB, but it transforms - the underlying block cipher into a stream cipher. - - Plaintext and ciphertext are processed in *segments* - of **s** bits. The mode is therefore sometimes - labelled **s**-bit CFB. - - An Initialization Vector (*IV*) is required. - - See `NIST SP800-38A`_ , Section 6.3. - - .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf - - :undocumented: __init__ - """ - - def __init__(self, block_cipher, iv, segment_size): - """Create a new block cipher, configured in CFB mode. - - :Parameters: - block_cipher : C pointer - A smart pointer to the low-level block cipher instance. - - iv : bytes/bytearray/memoryview - The initialization vector to use for encryption or decryption. - It is as long as the cipher block. - - **The IV must be unpredictable**. Ideally it is picked randomly. - - Reusing the *IV* for encryptions performed with the same key - compromises confidentiality. - - segment_size : integer - The number of bytes the plaintext and ciphertext are segmented in. - """ - - self._state = VoidPointer() - result = raw_cfb_lib.CFB_start_operation(block_cipher.get(), - c_uint8_ptr(iv), - c_size_t(len(iv)), - c_size_t(segment_size), - self._state.address_of()) - if result: - raise ValueError("Error %d while instantiating the CFB mode" % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the cipher mode - self._state = SmartPointer(self._state.get(), - raw_cfb_lib.CFB_stop_operation) - - # Memory allocated for the underlying block cipher is now owed - # by the cipher mode - block_cipher.release() - - self.block_size = len(iv) - """The block size of the underlying cipher, in bytes.""" - - self.iv = _copy_bytes(None, None, iv) - """The Initialization Vector originally used to create the object. - The value does not change.""" - - self.IV = self.iv - """Alias for `iv`""" - - self._next = ["encrypt", "decrypt"] - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() cannot be called after decrypt()") - self._next = ["encrypt"] - - if output is None: - ciphertext = create_string_buffer(len(plaintext)) - else: - ciphertext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(plaintext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_cfb_lib.CFB_encrypt(self._state.get(), - c_uint8_ptr(plaintext), - c_uint8_ptr(ciphertext), - c_size_t(len(plaintext))) - if result: - raise ValueError("Error %d while encrypting in CFB mode" % result) - - if output is None: - return get_raw_buffer(ciphertext) - else: - return None - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() cannot be called after encrypt()") - self._next = ["decrypt"] - - if output is None: - plaintext = create_string_buffer(len(ciphertext)) - else: - plaintext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(ciphertext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_cfb_lib.CFB_decrypt(self._state.get(), - c_uint8_ptr(ciphertext), - c_uint8_ptr(plaintext), - c_size_t(len(ciphertext))) - if result: - raise ValueError("Error %d while decrypting in CFB mode" % result) - - if output is None: - return get_raw_buffer(plaintext) - else: - return None - - -def _create_cfb_cipher(factory, **kwargs): - """Instantiate a cipher object that performs CFB encryption/decryption. - - :Parameters: - factory : module - The underlying block cipher, a module from ``Cryptodome.Cipher``. - - :Keywords: - iv : bytes/bytearray/memoryview - The IV to use for CFB. - - IV : bytes/bytearray/memoryview - Alias for ``iv``. - - segment_size : integer - The number of bit the plaintext and ciphertext are segmented in. - If not present, the default is 8. - - Any other keyword will be passed to the underlying block cipher. - See the relevant documentation for details (at least ``key`` will need - to be present). - """ - - cipher_state = factory._create_base_cipher(kwargs) - - iv = kwargs.pop("IV", None) - IV = kwargs.pop("iv", None) - - if (None, None) == (iv, IV): - iv = get_random_bytes(factory.block_size) - if iv is not None: - if IV is not None: - raise TypeError("You must either use 'iv' or 'IV', not both") - else: - iv = IV - - if len(iv) != factory.block_size: - raise ValueError("Incorrect IV length (it must be %d bytes long)" % - factory.block_size) - - segment_size_bytes, rem = divmod(kwargs.pop("segment_size", 8), 8) - if segment_size_bytes == 0 or rem != 0: - raise ValueError("'segment_size' must be positive and multiple of 8 bits") - - if kwargs: - raise TypeError("Unknown parameters for CFB: %s" % str(kwargs)) - return CfbMode(cipher_state, iv, segment_size_bytes) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cfb.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cfb.pyi deleted file mode 100644 index 228e464..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_cfb.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Union, overload - -from Cryptodome.Util._raw_api import SmartPointer - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['CfbMode'] - - -class CfbMode(object): - block_size: int - iv: Buffer - IV: Buffer - - def __init__(self, - block_cipher: SmartPointer, - iv: Buffer, - segment_size: int) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ctr.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ctr.py deleted file mode 100644 index 9ce357f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ctr.py +++ /dev/null @@ -1,393 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/mode_ctr.py : CTR mode -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -""" -Counter (CTR) mode. -""" - -__all__ = ['CtrMode'] - -import struct - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - create_string_buffer, get_raw_buffer, - SmartPointer, c_size_t, c_uint8_ptr, - is_writeable_buffer) - -from Cryptodome.Random import get_random_bytes -from Cryptodome.Util.py3compat import _copy_bytes, is_native_int -from Cryptodome.Util.number import long_to_bytes - -raw_ctr_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_ctr", """ - int CTR_start_operation(void *cipher, - uint8_t initialCounterBlock[], - size_t initialCounterBlock_len, - size_t prefix_len, - unsigned counter_len, - unsigned littleEndian, - void **pResult); - int CTR_encrypt(void *ctrState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CTR_decrypt(void *ctrState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int CTR_stop_operation(void *ctrState);""" - ) - - -class CtrMode(object): - """*CounTeR (CTR)* mode. - - This mode is very similar to ECB, in that - encryption of one block is done independently of all other blocks. - - Unlike ECB, the block *position* contributes to the encryption - and no information leaks about symbol frequency. - - Each message block is associated to a *counter* which - must be unique across all messages that get encrypted - with the same key (not just within the same message). - The counter is as big as the block size. - - Counters can be generated in several ways. The most - straightword one is to choose an *initial counter block* - (which can be made public, similarly to the *IV* for the - other modes) and increment its lowest **m** bits by one - (modulo *2^m*) for each block. In most cases, **m** is - chosen to be half the block size. - - See `NIST SP800-38A`_, Section 6.5 (for the mode) and - Appendix B (for how to manage the *initial counter block*). - - .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf - - :undocumented: __init__ - """ - - def __init__(self, block_cipher, initial_counter_block, - prefix_len, counter_len, little_endian): - """Create a new block cipher, configured in CTR mode. - - :Parameters: - block_cipher : C pointer - A smart pointer to the low-level block cipher instance. - - initial_counter_block : bytes/bytearray/memoryview - The initial plaintext to use to generate the key stream. - - It is as large as the cipher block, and it embeds - the initial value of the counter. - - This value must not be reused. - It shall contain a nonce or a random component. - Reusing the *initial counter block* for encryptions - performed with the same key compromises confidentiality. - - prefix_len : integer - The amount of bytes at the beginning of the counter block - that never change. - - counter_len : integer - The length in bytes of the counter embedded in the counter - block. - - little_endian : boolean - True if the counter in the counter block is an integer encoded - in little endian mode. If False, it is big endian. - """ - - if len(initial_counter_block) == prefix_len + counter_len: - self.nonce = _copy_bytes(None, prefix_len, initial_counter_block) - """Nonce; not available if there is a fixed suffix""" - - self._state = VoidPointer() - result = raw_ctr_lib.CTR_start_operation(block_cipher.get(), - c_uint8_ptr(initial_counter_block), - c_size_t(len(initial_counter_block)), - c_size_t(prefix_len), - counter_len, - little_endian, - self._state.address_of()) - if result: - raise ValueError("Error %X while instantiating the CTR mode" - % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the cipher mode - self._state = SmartPointer(self._state.get(), - raw_ctr_lib.CTR_stop_operation) - - # Memory allocated for the underlying block cipher is now owed - # by the cipher mode - block_cipher.release() - - self.block_size = len(initial_counter_block) - """The block size of the underlying cipher, in bytes.""" - - self._next = ["encrypt", "decrypt"] - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() cannot be called after decrypt()") - self._next = ["encrypt"] - - if output is None: - ciphertext = create_string_buffer(len(plaintext)) - else: - ciphertext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(plaintext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_ctr_lib.CTR_encrypt(self._state.get(), - c_uint8_ptr(plaintext), - c_uint8_ptr(ciphertext), - c_size_t(len(plaintext))) - if result: - if result == 0x60002: - raise OverflowError("The counter has wrapped around in" - " CTR mode") - raise ValueError("Error %X while encrypting in CTR mode" % result) - - if output is None: - return get_raw_buffer(ciphertext) - else: - return None - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() cannot be called after encrypt()") - self._next = ["decrypt"] - - if output is None: - plaintext = create_string_buffer(len(ciphertext)) - else: - plaintext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(ciphertext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_ctr_lib.CTR_decrypt(self._state.get(), - c_uint8_ptr(ciphertext), - c_uint8_ptr(plaintext), - c_size_t(len(ciphertext))) - if result: - if result == 0x60002: - raise OverflowError("The counter has wrapped around in" - " CTR mode") - raise ValueError("Error %X while decrypting in CTR mode" % result) - - if output is None: - return get_raw_buffer(plaintext) - else: - return None - - -def _create_ctr_cipher(factory, **kwargs): - """Instantiate a cipher object that performs CTR encryption/decryption. - - :Parameters: - factory : module - The underlying block cipher, a module from ``Cryptodome.Cipher``. - - :Keywords: - nonce : bytes/bytearray/memoryview - The fixed part at the beginning of the counter block - the rest is - the counter number that gets increased when processing the next block. - The nonce must be such that no two messages are encrypted under the - same key and the same nonce. - - The nonce must be shorter than the block size (it can have - zero length; the counter is then as long as the block). - - If this parameter is not present, a random nonce will be created with - length equal to half the block size. No random nonce shorter than - 64 bits will be created though - you must really think through all - security consequences of using such a short block size. - - initial_value : posive integer or bytes/bytearray/memoryview - The initial value for the counter. If not present, the cipher will - start counting from 0. The value is incremented by one for each block. - The counter number is encoded in big endian mode. - - counter : object - Instance of ``Cryptodome.Util.Counter``, which allows full customization - of the counter block. This parameter is incompatible to both ``nonce`` - and ``initial_value``. - - Any other keyword will be passed to the underlying block cipher. - See the relevant documentation for details (at least ``key`` will need - to be present). - """ - - cipher_state = factory._create_base_cipher(kwargs) - - counter = kwargs.pop("counter", None) - nonce = kwargs.pop("nonce", None) - initial_value = kwargs.pop("initial_value", None) - if kwargs: - raise TypeError("Invalid parameters for CTR mode: %s" % str(kwargs)) - - if counter is not None and (nonce, initial_value) != (None, None): - raise TypeError("'counter' and 'nonce'/'initial_value'" - " are mutually exclusive") - - if counter is None: - # Cryptodome.Util.Counter is not used - if nonce is None: - if factory.block_size < 16: - raise TypeError("Impossible to create a safe nonce for short" - " block sizes") - nonce = get_random_bytes(factory.block_size // 2) - else: - if len(nonce) >= factory.block_size: - raise ValueError("Nonce is too long") - - # What is not nonce is counter - counter_len = factory.block_size - len(nonce) - - if initial_value is None: - initial_value = 0 - - if is_native_int(initial_value): - if (1 << (counter_len * 8)) - 1 < initial_value: - raise ValueError("Initial counter value is too large") - initial_counter_block = nonce + long_to_bytes(initial_value, counter_len) - else: - if len(initial_value) != counter_len: - raise ValueError("Incorrect length for counter byte string (%d bytes, expected %d)" % - (len(initial_value), counter_len)) - initial_counter_block = nonce + initial_value - - return CtrMode(cipher_state, - initial_counter_block, - len(nonce), # prefix - counter_len, - False) # little_endian - - # Cryptodome.Util.Counter is used - - # 'counter' used to be a callable object, but now it is - # just a dictionary for backward compatibility. - _counter = dict(counter) - try: - counter_len = _counter.pop("counter_len") - prefix = _counter.pop("prefix") - suffix = _counter.pop("suffix") - initial_value = _counter.pop("initial_value") - little_endian = _counter.pop("little_endian") - except KeyError: - raise TypeError("Incorrect counter object" - " (use Cryptodome.Util.Counter.new)") - - # Compute initial counter block - words = [] - while initial_value > 0: - words.append(struct.pack('B', initial_value & 255)) - initial_value >>= 8 - words += [b'\x00'] * max(0, counter_len - len(words)) - if not little_endian: - words.reverse() - initial_counter_block = prefix + b"".join(words) + suffix - - if len(initial_counter_block) != factory.block_size: - raise ValueError("Size of the counter block (%d bytes) must match" - " block size (%d)" % (len(initial_counter_block), - factory.block_size)) - - return CtrMode(cipher_state, initial_counter_block, - len(prefix), counter_len, little_endian) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ctr.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ctr.pyi deleted file mode 100644 index a68a890..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ctr.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Union, overload - -from Cryptodome.Util._raw_api import SmartPointer - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['CtrMode'] - -class CtrMode(object): - block_size: int - nonce: bytes - - def __init__(self, - block_cipher: SmartPointer, - initial_counter_block: Buffer, - prefix_len: int, - counter_len: int, - little_endian: bool) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_eax.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_eax.py deleted file mode 100644 index 44ef21f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_eax.py +++ /dev/null @@ -1,408 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -EAX mode. -""" - -__all__ = ['EaxMode'] - -import struct -from binascii import unhexlify - -from Cryptodome.Util.py3compat import byte_string, bord, _copy_bytes - -from Cryptodome.Util._raw_api import is_buffer - -from Cryptodome.Util.strxor import strxor -from Cryptodome.Util.number import long_to_bytes, bytes_to_long - -from Cryptodome.Hash import CMAC, BLAKE2s -from Cryptodome.Random import get_random_bytes - - -class EaxMode(object): - """*EAX* mode. - - This is an Authenticated Encryption with Associated Data - (`AEAD`_) mode. It provides both confidentiality and authenticity. - - The header of the message may be left in the clear, if needed, - and it will still be subject to authentication. - - The decryption step tells the receiver if the message comes - from a source that really knowns the secret key. - Additionally, decryption detects if any part of the message - - including the header - has been modified or corrupted. - - This mode requires a *nonce*. - - This mode is only available for ciphers that operate on 64 or - 128 bits blocks. - - There are no official standards defining EAX. - The implementation is based on `a proposal`__ that - was presented to NIST. - - .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html - .. __: http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/eax/eax-spec.pdf - - :undocumented: __init__ - """ - - def __init__(self, factory, key, nonce, mac_len, cipher_params): - """EAX cipher mode""" - - self.block_size = factory.block_size - """The block size of the underlying cipher, in bytes.""" - - self.nonce = _copy_bytes(None, None, nonce) - """The nonce originally used to create the object.""" - - self._mac_len = mac_len - self._mac_tag = None # Cache for MAC tag - - # Allowed transitions after initialization - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - # MAC tag length - if not (2 <= self._mac_len <= self.block_size): - raise ValueError("'mac_len' must be at least 2 and not larger than %d" - % self.block_size) - - # Nonce cannot be empty and must be a byte string - if len(self.nonce) == 0: - raise ValueError("Nonce cannot be empty in EAX mode") - if not is_buffer(nonce): - raise TypeError("nonce must be bytes, bytearray or memoryview") - - self._omac = [ - CMAC.new(key, - b'\x00' * (self.block_size - 1) + struct.pack('B', i), - ciphermod=factory, - cipher_params=cipher_params) - for i in range(0, 3) - ] - - # Compute MAC of nonce - self._omac[0].update(self.nonce) - self._signer = self._omac[1] - - # MAC of the nonce is also the initial counter for CTR encryption - counter_int = bytes_to_long(self._omac[0].digest()) - self._cipher = factory.new(key, - factory.MODE_CTR, - initial_value=counter_int, - nonce=b"", - **cipher_params) - - def update(self, assoc_data): - """Protect associated data - - If there is any associated data, the caller has to invoke - this function one or more times, before using - ``decrypt`` or ``encrypt``. - - By *associated data* it is meant any data (e.g. packet headers) that - will not be encrypted and will be transmitted in the clear. - However, the receiver is still able to detect any modification to it. - - If there is no associated data, this method must not be called. - - The caller may split associated data in segments of any size, and - invoke this method multiple times, each time with the next segment. - - :Parameters: - assoc_data : bytes/bytearray/memoryview - A piece of associated data. There are no restrictions on its size. - """ - - if "update" not in self._next: - raise TypeError("update() can only be called" - " immediately after initialization") - - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - self._signer.update(assoc_data) - return self - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() can only be called after" - " initialization or an update()") - self._next = ["encrypt", "digest"] - ct = self._cipher.encrypt(plaintext, output=output) - if output is None: - self._omac[2].update(ct) - else: - self._omac[2].update(output) - return ct - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() can only be called" - " after initialization or an update()") - self._next = ["decrypt", "verify"] - self._omac[2].update(ciphertext) - return self._cipher.decrypt(ciphertext, output=output) - - def digest(self): - """Compute the *binary* MAC tag. - - The caller invokes this function at the very end. - - This method returns the MAC that shall be sent to the receiver, - together with the ciphertext. - - :Return: the MAC, as a byte string. - """ - - if "digest" not in self._next: - raise TypeError("digest() cannot be called when decrypting" - " or validating a message") - self._next = ["digest"] - - if not self._mac_tag: - tag = b'\x00' * self.block_size - for i in range(3): - tag = strxor(tag, self._omac[i].digest()) - self._mac_tag = tag[:self._mac_len] - - return self._mac_tag - - def hexdigest(self): - """Compute the *printable* MAC tag. - - This method is like `digest`. - - :Return: the MAC, as a hexadecimal string. - """ - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def verify(self, received_mac_tag): - """Validate the *binary* MAC tag. - - The caller invokes this function at the very end. - - This method checks if the decrypted message is indeed valid - (that is, if the key is correct) and it has not been - tampered with while in transit. - - :Parameters: - received_mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Raises MacMismatchError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - if "verify" not in self._next: - raise TypeError("verify() cannot be called" - " when encrypting a message") - self._next = ["verify"] - - if not self._mac_tag: - tag = b'\x00' * self.block_size - for i in range(3): - tag = strxor(tag, self._omac[i].digest()) - self._mac_tag = tag[:self._mac_len] - - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Validate the *printable* MAC tag. - - This method is like `verify`. - - :Parameters: - hex_mac_tag : string - This is the *printable* MAC, as received from the sender. - :Raises MacMismatchError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - self.verify(unhexlify(hex_mac_tag)) - - def encrypt_and_digest(self, plaintext, output=None): - """Perform encrypt() and digest() in one step. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - a tuple with two items: - - - the ciphertext, as ``bytes`` - - the MAC tag, as ``bytes`` - - The first item becomes ``None`` when the ``output`` parameter - specified a location for the result. - """ - - return self.encrypt(plaintext, output=output), self.digest() - - def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): - """Perform decrypt() and verify() in one step. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - received_mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` - parameter specified a location for the result. - :Raises MacMismatchError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - pt = self.decrypt(ciphertext, output=output) - self.verify(received_mac_tag) - return pt - - -def _create_eax_cipher(factory, **kwargs): - """Create a new block cipher, configured in EAX mode. - - :Parameters: - factory : module - A symmetric cipher module from `Cryptodome.Cipher` (like - `Cryptodome.Cipher.AES`). - - :Keywords: - key : bytes/bytearray/memoryview - The secret key to use in the symmetric cipher. - - nonce : bytes/bytearray/memoryview - A value that must never be reused for any other encryption. - There are no restrictions on its length, but it is recommended to use - at least 16 bytes. - - The nonce shall never repeat for two different messages encrypted with - the same key, but it does not need to be random. - - If not specified, a 16 byte long random string is used. - - mac_len : integer - Length of the MAC, in bytes. It must be no larger than the cipher - block bytes (which is the default). - """ - - try: - key = kwargs.pop("key") - nonce = kwargs.pop("nonce", None) - if nonce is None: - nonce = get_random_bytes(16) - mac_len = kwargs.pop("mac_len", factory.block_size) - except KeyError as e: - raise TypeError("Missing parameter: " + str(e)) - - return EaxMode(factory, key, nonce, mac_len, kwargs) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_eax.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_eax.pyi deleted file mode 100644 index cbfa467..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_eax.pyi +++ /dev/null @@ -1,45 +0,0 @@ -from types import ModuleType -from typing import Any, Union, Tuple, Dict, overload, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['EaxMode'] - -class EaxMode(object): - block_size: int - nonce: bytes - - def __init__(self, - factory: ModuleType, - key: Buffer, - nonce: Buffer, - mac_len: int, - cipher_params: Dict) -> None: ... - - def update(self, assoc_data: Buffer) -> EaxMode: ... - - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, received_mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - - @overload - def encrypt_and_digest(self, - plaintext: Buffer) -> Tuple[bytes, bytes]: ... - @overload - def encrypt_and_digest(self, - plaintext: Buffer, - output: Buffer) -> Tuple[None, bytes]: ... - def decrypt_and_verify(self, - ciphertext: Buffer, - received_mac_tag: Buffer, - output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ecb.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ecb.py deleted file mode 100644 index a01a16f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ecb.py +++ /dev/null @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/mode_ecb.py : ECB mode -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -""" -Electronic Code Book (ECB) mode. -""" - -__all__ = [ 'EcbMode' ] - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, create_string_buffer, - get_raw_buffer, SmartPointer, - c_size_t, c_uint8_ptr, - is_writeable_buffer) - -raw_ecb_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_ecb", """ - int ECB_start_operation(void *cipher, - void **pResult); - int ECB_encrypt(void *ecbState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int ECB_decrypt(void *ecbState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int ECB_stop_operation(void *state); - """ - ) - - -class EcbMode(object): - """*Electronic Code Book (ECB)*. - - This is the simplest encryption mode. Each of the plaintext blocks - is directly encrypted into a ciphertext block, independently of - any other block. - - This mode is dangerous because it exposes frequency of symbols - in your plaintext. Other modes (e.g. *CBC*) should be used instead. - - See `NIST SP800-38A`_ , Section 6.1. - - .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf - - :undocumented: __init__ - """ - - def __init__(self, block_cipher): - """Create a new block cipher, configured in ECB mode. - - :Parameters: - block_cipher : C pointer - A smart pointer to the low-level block cipher instance. - """ - self.block_size = block_cipher.block_size - - self._state = VoidPointer() - result = raw_ecb_lib.ECB_start_operation(block_cipher.get(), - self._state.address_of()) - if result: - raise ValueError("Error %d while instantiating the ECB mode" - % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the cipher - # mode - self._state = SmartPointer(self._state.get(), - raw_ecb_lib.ECB_stop_operation) - - # Memory allocated for the underlying block cipher is now owned - # by the cipher mode - block_cipher.release() - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key set at initialization. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - The length must be multiple of the cipher block length. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if output is None: - ciphertext = create_string_buffer(len(plaintext)) - else: - ciphertext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(plaintext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_ecb_lib.ECB_encrypt(self._state.get(), - c_uint8_ptr(plaintext), - c_uint8_ptr(ciphertext), - c_size_t(len(plaintext))) - if result: - if result == 3: - raise ValueError("Data must be aligned to block boundary in ECB mode") - raise ValueError("Error %d while encrypting in ECB mode" % result) - - if output is None: - return get_raw_buffer(ciphertext) - else: - return None - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key set at initialization. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - The length must be multiple of the cipher block length. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if output is None: - plaintext = create_string_buffer(len(ciphertext)) - else: - plaintext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(ciphertext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_ecb_lib.ECB_decrypt(self._state.get(), - c_uint8_ptr(ciphertext), - c_uint8_ptr(plaintext), - c_size_t(len(ciphertext))) - if result: - if result == 3: - raise ValueError("Data must be aligned to block boundary in ECB mode") - raise ValueError("Error %d while decrypting in ECB mode" % result) - - if output is None: - return get_raw_buffer(plaintext) - else: - return None - - -def _create_ecb_cipher(factory, **kwargs): - """Instantiate a cipher object that performs ECB encryption/decryption. - - :Parameters: - factory : module - The underlying block cipher, a module from ``Cryptodome.Cipher``. - - All keywords are passed to the underlying block cipher. - See the relevant documentation for details (at least ``key`` will need - to be present""" - - cipher_state = factory._create_base_cipher(kwargs) - cipher_state.block_size = factory.block_size - if kwargs: - raise TypeError("Unknown parameters for ECB: %s" % str(kwargs)) - return EcbMode(cipher_state) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ecb.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ecb.pyi deleted file mode 100644 index 936195f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ecb.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, overload - -from Cryptodome.Util._raw_api import SmartPointer - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = [ 'EcbMode' ] - -class EcbMode(object): - def __init__(self, block_cipher: SmartPointer) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_gcm.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_gcm.py deleted file mode 100644 index 9914400..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_gcm.py +++ /dev/null @@ -1,620 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -Galois/Counter Mode (GCM). -""" - -__all__ = ['GcmMode'] - -from binascii import unhexlify - -from Cryptodome.Util.py3compat import bord, _copy_bytes - -from Cryptodome.Util._raw_api import is_buffer - -from Cryptodome.Util.number import long_to_bytes, bytes_to_long -from Cryptodome.Hash import BLAKE2s -from Cryptodome.Random import get_random_bytes - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - create_string_buffer, get_raw_buffer, - SmartPointer, c_size_t, c_uint8_ptr) - -from Cryptodome.Util import _cpu_features - - -# C API by module implementing GHASH -_ghash_api_template = """ - int ghash_%imp%(uint8_t y_out[16], - const uint8_t block_data[], - size_t len, - const uint8_t y_in[16], - const void *exp_key); - int ghash_expand_%imp%(const uint8_t h[16], - void **ghash_tables); - int ghash_destroy_%imp%(void *ghash_tables); -""" - -def _build_impl(lib, postfix): - from collections import namedtuple - - funcs = ( "ghash", "ghash_expand", "ghash_destroy" ) - GHASH_Imp = namedtuple('_GHash_Imp', funcs) - try: - imp_funcs = [ getattr(lib, x + "_" + postfix) for x in funcs ] - except AttributeError: # Make sphinx stop complaining with its mocklib - imp_funcs = [ None ] * 3 - params = dict(zip(funcs, imp_funcs)) - return GHASH_Imp(**params) - - -def _get_ghash_portable(): - api = _ghash_api_template.replace("%imp%", "portable") - lib = load_pycryptodome_raw_lib("Cryptodome.Hash._ghash_portable", api) - result = _build_impl(lib, "portable") - return result -_ghash_portable = _get_ghash_portable() - - -def _get_ghash_clmul(): - """Return None if CLMUL implementation is not available""" - - if not _cpu_features.have_clmul(): - return None - try: - api = _ghash_api_template.replace("%imp%", "clmul") - lib = load_pycryptodome_raw_lib("Cryptodome.Hash._ghash_clmul", api) - result = _build_impl(lib, "clmul") - except OSError: - result = None - return result -_ghash_clmul = _get_ghash_clmul() - - -class _GHASH(object): - """GHASH function defined in NIST SP 800-38D, Algorithm 2. - - If X_1, X_2, .. X_m are the blocks of input data, the function - computes: - - X_1*H^{m} + X_2*H^{m-1} + ... + X_m*H - - in the Galois field GF(2^256) using the reducing polynomial - (x^128 + x^7 + x^2 + x + 1). - """ - - def __init__(self, subkey, ghash_c): - assert len(subkey) == 16 - - self.ghash_c = ghash_c - - self._exp_key = VoidPointer() - result = ghash_c.ghash_expand(c_uint8_ptr(subkey), - self._exp_key.address_of()) - if result: - raise ValueError("Error %d while expanding the GHASH key" % result) - - self._exp_key = SmartPointer(self._exp_key.get(), - ghash_c.ghash_destroy) - - # create_string_buffer always returns a string of zeroes - self._last_y = create_string_buffer(16) - - def update(self, block_data): - assert len(block_data) % 16 == 0 - - result = self.ghash_c.ghash(self._last_y, - c_uint8_ptr(block_data), - c_size_t(len(block_data)), - self._last_y, - self._exp_key.get()) - if result: - raise ValueError("Error %d while updating GHASH" % result) - - return self - - def digest(self): - return get_raw_buffer(self._last_y) - - -def enum(**enums): - return type('Enum', (), enums) - - -MacStatus = enum(PROCESSING_AUTH_DATA=1, PROCESSING_CIPHERTEXT=2) - - -class GcmMode(object): - """Galois Counter Mode (GCM). - - This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. - It provides both confidentiality and authenticity. - - The header of the message may be left in the clear, if needed, and it will - still be subject to authentication. The decryption step tells the receiver - if the message comes from a source that really knowns the secret key. - Additionally, decryption detects if any part of the message - including the - header - has been modified or corrupted. - - This mode requires a *nonce*. - - This mode is only available for ciphers that operate on 128 bits blocks - (e.g. AES but not TDES). - - See `NIST SP800-38D`_. - - .. _`NIST SP800-38D`: http://csrc.nist.gov/publications/nistpubs/800-38D/SP-800-38D.pdf - .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html - - :undocumented: __init__ - """ - - def __init__(self, factory, key, nonce, mac_len, cipher_params, ghash_c): - - self.block_size = factory.block_size - if self.block_size != 16: - raise ValueError("GCM mode is only available for ciphers" - " that operate on 128 bits blocks") - - if len(nonce) == 0: - raise ValueError("Nonce cannot be empty") - - if not is_buffer(nonce): - raise TypeError("Nonce must be bytes, bytearray or memoryview") - - # See NIST SP 800 38D, 5.2.1.1 - if len(nonce) > 2**64 - 1: - raise ValueError("Nonce exceeds maximum length") - - - self.nonce = _copy_bytes(None, None, nonce) - """Nonce""" - - self._factory = factory - self._key = _copy_bytes(None, None, key) - self._tag = None # Cache for MAC tag - - self._mac_len = mac_len - if not (4 <= mac_len <= 16): - raise ValueError("Parameter 'mac_len' must be in the range 4..16") - - # Allowed transitions after initialization - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - self._no_more_assoc_data = False - - # Length of associated data - self._auth_len = 0 - - # Length of the ciphertext or plaintext - self._msg_len = 0 - - # Step 1 in SP800-38D, Algorithm 4 (encryption) - Compute H - # See also Algorithm 5 (decryption) - hash_subkey = factory.new(key, - self._factory.MODE_ECB, - **cipher_params - ).encrypt(b'\x00' * 16) - - # Step 2 - Compute J0 - if len(self.nonce) == 12: - j0 = self.nonce + b"\x00\x00\x00\x01" - else: - fill = (16 - (len(self.nonce) % 16)) % 16 + 8 - ghash_in = (self.nonce + - b'\x00' * fill + - long_to_bytes(8 * len(self.nonce), 8)) - j0 = _GHASH(hash_subkey, ghash_c).update(ghash_in).digest() - - # Step 3 - Prepare GCTR cipher for encryption/decryption - nonce_ctr = j0[:12] - iv_ctr = (bytes_to_long(j0) + 1) & 0xFFFFFFFF - self._cipher = factory.new(key, - self._factory.MODE_CTR, - initial_value=iv_ctr, - nonce=nonce_ctr, - **cipher_params) - - # Step 5 - Bootstrat GHASH - self._signer = _GHASH(hash_subkey, ghash_c) - - # Step 6 - Prepare GCTR cipher for GMAC - self._tag_cipher = factory.new(key, - self._factory.MODE_CTR, - initial_value=j0, - nonce=b"", - **cipher_params) - - # Cache for data to authenticate - self._cache = b"" - - self._status = MacStatus.PROCESSING_AUTH_DATA - - def update(self, assoc_data): - """Protect associated data - - If there is any associated data, the caller has to invoke - this function one or more times, before using - ``decrypt`` or ``encrypt``. - - By *associated data* it is meant any data (e.g. packet headers) that - will not be encrypted and will be transmitted in the clear. - However, the receiver is still able to detect any modification to it. - In GCM, the *associated data* is also called - *additional authenticated data* (AAD). - - If there is no associated data, this method must not be called. - - The caller may split associated data in segments of any size, and - invoke this method multiple times, each time with the next segment. - - :Parameters: - assoc_data : bytes/bytearray/memoryview - A piece of associated data. There are no restrictions on its size. - """ - - if "update" not in self._next: - raise TypeError("update() can only be called" - " immediately after initialization") - - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - self._update(assoc_data) - self._auth_len += len(assoc_data) - - # See NIST SP 800 38D, 5.2.1.1 - if self._auth_len > 2**64 - 1: - raise ValueError("Additional Authenticated Data exceeds maximum length") - - return self - - def _update(self, data): - assert(len(self._cache) < 16) - - if len(self._cache) > 0: - filler = min(16 - len(self._cache), len(data)) - self._cache += _copy_bytes(None, filler, data) - data = data[filler:] - - if len(self._cache) < 16: - return - - # The cache is exactly one block - self._signer.update(self._cache) - self._cache = b"" - - update_len = len(data) // 16 * 16 - self._cache = _copy_bytes(update_len, None, data) - if update_len > 0: - self._signer.update(data[:update_len]) - - def _pad_cache_and_update(self): - assert(len(self._cache) < 16) - - # The authenticated data A is concatenated to the minimum - # number of zero bytes (possibly none) such that the - # - ciphertext C is aligned to the 16 byte boundary. - # See step 5 in section 7.1 - # - ciphertext C is aligned to the 16 byte boundary. - # See step 6 in section 7.2 - len_cache = len(self._cache) - if len_cache > 0: - self._update(b'\x00' * (16 - len_cache)) - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() can only be called after" - " initialization or an update()") - self._next = ["encrypt", "digest"] - - ciphertext = self._cipher.encrypt(plaintext, output=output) - - if self._status == MacStatus.PROCESSING_AUTH_DATA: - self._pad_cache_and_update() - self._status = MacStatus.PROCESSING_CIPHERTEXT - - self._update(ciphertext if output is None else output) - self._msg_len += len(plaintext) - - # See NIST SP 800 38D, 5.2.1.1 - if self._msg_len > 2**39 - 256: - raise ValueError("Plaintext exceeds maximum length") - - return ciphertext - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() can only be called" - " after initialization or an update()") - self._next = ["decrypt", "verify"] - - if self._status == MacStatus.PROCESSING_AUTH_DATA: - self._pad_cache_and_update() - self._status = MacStatus.PROCESSING_CIPHERTEXT - - self._update(ciphertext) - self._msg_len += len(ciphertext) - - return self._cipher.decrypt(ciphertext, output=output) - - def digest(self): - """Compute the *binary* MAC tag in an AEAD mode. - - The caller invokes this function at the very end. - - This method returns the MAC that shall be sent to the receiver, - together with the ciphertext. - - :Return: the MAC, as a byte string. - """ - - if "digest" not in self._next: - raise TypeError("digest() cannot be called when decrypting" - " or validating a message") - self._next = ["digest"] - - return self._compute_mac() - - def _compute_mac(self): - """Compute MAC without any FSM checks.""" - - if self._tag: - return self._tag - - # Step 5 in NIST SP 800-38D, Algorithm 4 - Compute S - self._pad_cache_and_update() - self._update(long_to_bytes(8 * self._auth_len, 8)) - self._update(long_to_bytes(8 * self._msg_len, 8)) - s_tag = self._signer.digest() - - # Step 6 - Compute T - self._tag = self._tag_cipher.encrypt(s_tag)[:self._mac_len] - - return self._tag - - def hexdigest(self): - """Compute the *printable* MAC tag. - - This method is like `digest`. - - :Return: the MAC, as a hexadecimal string. - """ - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def verify(self, received_mac_tag): - """Validate the *binary* MAC tag. - - The caller invokes this function at the very end. - - This method checks if the decrypted message is indeed valid - (that is, if the key is correct) and it has not been - tampered with while in transit. - - :Parameters: - received_mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - if "verify" not in self._next: - raise TypeError("verify() cannot be called" - " when encrypting a message") - self._next = ["verify"] - - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, - data=self._compute_mac()) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, - data=received_mac_tag) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Validate the *printable* MAC tag. - - This method is like `verify`. - - :Parameters: - hex_mac_tag : string - This is the *printable* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - self.verify(unhexlify(hex_mac_tag)) - - def encrypt_and_digest(self, plaintext, output=None): - """Perform encrypt() and digest() in one step. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - a tuple with two items: - - - the ciphertext, as ``bytes`` - - the MAC tag, as ``bytes`` - - The first item becomes ``None`` when the ``output`` parameter - specified a location for the result. - """ - - return self.encrypt(plaintext, output=output), self.digest() - - def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): - """Perform decrypt() and verify() in one step. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - received_mac_tag : byte string - This is the *binary* MAC, as received from the sender. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` - parameter specified a location for the result. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - plaintext = self.decrypt(ciphertext, output=output) - self.verify(received_mac_tag) - return plaintext - - -def _create_gcm_cipher(factory, **kwargs): - """Create a new block cipher, configured in Galois Counter Mode (GCM). - - :Parameters: - factory : module - A block cipher module, taken from `Cryptodome.Cipher`. - The cipher must have block length of 16 bytes. - GCM has been only defined for `Cryptodome.Cipher.AES`. - - :Keywords: - key : bytes/bytearray/memoryview - The secret key to use in the symmetric cipher. - It must be 16 (e.g. *AES-128*), 24 (e.g. *AES-192*) - or 32 (e.g. *AES-256*) bytes long. - - nonce : bytes/bytearray/memoryview - A value that must never be reused for any other encryption. - - There are no restrictions on its length, - but it is recommended to use at least 16 bytes. - - The nonce shall never repeat for two - different messages encrypted with the same key, - but it does not need to be random. - - If not provided, a 16 byte nonce will be randomly created. - - mac_len : integer - Length of the MAC, in bytes. - It must be no larger than 16 bytes (which is the default). - """ - - try: - key = kwargs.pop("key") - except KeyError as e: - raise TypeError("Missing parameter:" + str(e)) - - nonce = kwargs.pop("nonce", None) - if nonce is None: - nonce = get_random_bytes(16) - mac_len = kwargs.pop("mac_len", 16) - - # Not documented - only used for testing - use_clmul = kwargs.pop("use_clmul", True) - if use_clmul and _ghash_clmul: - ghash_c = _ghash_clmul - else: - ghash_c = _ghash_portable - - return GcmMode(factory, key, nonce, mac_len, kwargs, ghash_c) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_gcm.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_gcm.pyi deleted file mode 100644 index 8912955..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_gcm.pyi +++ /dev/null @@ -1,45 +0,0 @@ -from types import ModuleType -from typing import Union, Tuple, Dict, overload, Optional - -__all__ = ['GcmMode'] - -Buffer = Union[bytes, bytearray, memoryview] - -class GcmMode(object): - block_size: int - nonce: Buffer - - def __init__(self, - factory: ModuleType, - key: Buffer, - nonce: Buffer, - mac_len: int, - cipher_params: Dict) -> None: ... - - def update(self, assoc_data: Buffer) -> GcmMode: ... - - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, received_mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - - @overload - def encrypt_and_digest(self, - plaintext: Buffer) -> Tuple[bytes, bytes]: ... - @overload - def encrypt_and_digest(self, - plaintext: Buffer, - output: Buffer) -> Tuple[None, bytes]: ... - def decrypt_and_verify(self, - ciphertext: Buffer, - received_mac_tag: Buffer, - output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_kw.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_kw.py deleted file mode 100644 index 41c09ef..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_kw.py +++ /dev/null @@ -1,158 +0,0 @@ -import struct -from collections import deque - -from types import ModuleType -from typing import Union - -from Cryptodome.Util.strxor import strxor - - -def W(cipher: ModuleType, - plaintext: Union[bytes, bytearray]) -> bytes: - - S = [plaintext[i:i+8] for i in range(0, len(plaintext), 8)] - n = len(S) - s = 6 * (n - 1) - A = S[0] - R = deque(S[1:]) - - for t in range(1, s + 1): - t_64 = struct.pack('>Q', t) - ct = cipher.encrypt(A + R.popleft()) - A = strxor(ct[:8], t_64) - R.append(ct[8:]) - - return A + b''.join(R) - - -def W_inverse(cipher: ModuleType, - ciphertext: Union[bytes, bytearray]) -> bytes: - - C = [ciphertext[i:i+8] for i in range(0, len(ciphertext), 8)] - n = len(C) - s = 6 * (n - 1) - A = C[0] - R = deque(C[1:]) - - for t in range(s, 0, -1): - t_64 = struct.pack('>Q', t) - pt = cipher.decrypt(strxor(A, t_64) + R.pop()) - A = pt[:8] - R.appendleft(pt[8:]) - - return A + b''.join(R) - - -class KWMode(object): - """Key Wrap (KW) mode. - - This is a deterministic Authenticated Encryption (AE) mode - for protecting cryptographic keys. See `NIST SP800-38F`_. - - It provides both confidentiality and authenticity, and it designed - so that any bit of the ciphertext depends on all bits of the plaintext. - - This mode is only available for ciphers that operate on 128 bits blocks - (e.g., AES). - - .. _`NIST SP800-38F`: http://csrc.nist.gov/publications/nistpubs/800-38F/SP-800-38F.pdf - - :undocumented: __init__ - """ - - def __init__(self, - factory: ModuleType, - key: Union[bytes, bytearray]): - - self.block_size = factory.block_size - if self.block_size != 16: - raise ValueError("Key Wrap mode is only available for ciphers" - " that operate on 128 bits blocks") - - self._factory = factory - self._cipher = factory.new(key, factory.MODE_ECB) - self._done = False - - def seal(self, plaintext: Union[bytes, bytearray]) -> bytes: - """Encrypt and authenticate (wrap) a cryptographic key. - - Args: - plaintext: - The cryptographic key to wrap. - It must be at least 16 bytes long, and its length - must be a multiple of 8. - - Returns: - The wrapped key. - """ - - if self._done: - raise ValueError("The cipher cannot be used more than once") - - if len(plaintext) % 8: - raise ValueError("The plaintext must have length multiple of 8 bytes") - - if len(plaintext) < 16: - raise ValueError("The plaintext must be at least 16 bytes long") - - if len(plaintext) >= 2**32: - raise ValueError("The plaintext is too long") - - res = W(self._cipher, b'\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6' + plaintext) - self._done = True - return res - - def unseal(self, ciphertext: Union[bytes, bytearray]) -> bytes: - """Decrypt and authenticate (unwrap) a cryptographic key. - - Args: - ciphertext: - The cryptographic key to unwrap. - It must be at least 24 bytes long, and its length - must be a multiple of 8. - - Returns: - The original key. - - Raises: ValueError - If the ciphertext or the key are not valid. - """ - - if self._done: - raise ValueError("The cipher cannot be used more than once") - - if len(ciphertext) % 8: - raise ValueError("The ciphertext must have length multiple of 8 bytes") - - if len(ciphertext) < 24: - raise ValueError("The ciphertext must be at least 24 bytes long") - - pt = W_inverse(self._cipher, ciphertext) - - if pt[:8] != b'\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6': - raise ValueError("Incorrect integrity check value") - self._done = True - - return pt[8:] - - -def _create_kw_cipher(factory: ModuleType, - **kwargs: Union[bytes, bytearray]) -> KWMode: - """Create a new block cipher in Key Wrap mode. - - Args: - factory: - A block cipher module, taken from `Cryptodome.Cipher`. - The cipher must have block length of 16 bytes, such as AES. - - Keywords: - key: - The secret key to use to seal or unseal. - """ - - try: - key = kwargs["key"] - except KeyError as e: - raise TypeError("Missing parameter:" + str(e)) - - return KWMode(factory, key) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_kwp.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_kwp.py deleted file mode 100644 index 0868443..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_kwp.py +++ /dev/null @@ -1,135 +0,0 @@ -import struct - -from types import ModuleType -from typing import Union - -from ._mode_kw import W, W_inverse - - -class KWPMode(object): - """Key Wrap with Padding (KWP) mode. - - This is a deterministic Authenticated Encryption (AE) mode - for protecting cryptographic keys. See `NIST SP800-38F`_. - - It provides both confidentiality and authenticity, and it designed - so that any bit of the ciphertext depends on all bits of the plaintext. - - This mode is only available for ciphers that operate on 128 bits blocks - (e.g., AES). - - .. _`NIST SP800-38F`: http://csrc.nist.gov/publications/nistpubs/800-38F/SP-800-38F.pdf - - :undocumented: __init__ - """ - - def __init__(self, - factory: ModuleType, - key: Union[bytes, bytearray]): - - self.block_size = factory.block_size - if self.block_size != 16: - raise ValueError("Key Wrap with Padding mode is only available for ciphers" - " that operate on 128 bits blocks") - - self._factory = factory - self._cipher = factory.new(key, factory.MODE_ECB) - self._done = False - - def seal(self, plaintext: Union[bytes, bytearray]) -> bytes: - """Encrypt and authenticate (wrap) a cryptographic key. - - Args: - plaintext: - The cryptographic key to wrap. - - Returns: - The wrapped key. - """ - - if self._done: - raise ValueError("The cipher cannot be used more than once") - - if len(plaintext) == 0: - raise ValueError("The plaintext must be at least 1 byte") - - if len(plaintext) >= 2 ** 32: - raise ValueError("The plaintext is too long") - - padlen = (8 - len(plaintext)) % 8 - padded = plaintext + b'\x00' * padlen - - AIV = b'\xA6\x59\x59\xA6' + struct.pack('>I', len(plaintext)) - - if len(padded) == 8: - res = self._cipher.encrypt(AIV + padded) - else: - res = W(self._cipher, AIV + padded) - - return res - - def unseal(self, ciphertext: Union[bytes, bytearray]) -> bytes: - """Decrypt and authenticate (unwrap) a cryptographic key. - - Args: - ciphertext: - The cryptographic key to unwrap. - It must be at least 16 bytes long, and its length - must be a multiple of 8. - - Returns: - The original key. - - Raises: ValueError - If the ciphertext or the key are not valid. - """ - - if self._done: - raise ValueError("The cipher cannot be used more than once") - - if len(ciphertext) % 8: - raise ValueError("The ciphertext must have length multiple of 8 bytes") - - if len(ciphertext) < 16: - raise ValueError("The ciphertext must be at least 24 bytes long") - - if len(ciphertext) == 16: - S = self._cipher.decrypt(ciphertext) - else: - S = W_inverse(self._cipher, ciphertext) - - if S[:4] != b'\xA6\x59\x59\xA6': - raise ValueError("Incorrect decryption") - - Plen = struct.unpack('>I', S[4:8])[0] - - padlen = len(S) - 8 - Plen - if padlen < 0 or padlen > 7: - raise ValueError("Incorrect decryption") - - if S[len(S) - padlen:] != b'\x00' * padlen: - raise ValueError("Incorrect decryption") - - return S[8:len(S) - padlen] - - -def _create_kwp_cipher(factory: ModuleType, - **kwargs: Union[bytes, bytearray]) -> KWPMode: - """Create a new block cipher in Key Wrap with Padding mode. - - Args: - factory: - A block cipher module, taken from `Cryptodome.Cipher`. - The cipher must have block length of 16 bytes, such as AES. - - Keywords: - key: - The secret key to use to seal or unseal. - """ - - try: - key = kwargs["key"] - except KeyError as e: - raise TypeError("Missing parameter:" + str(e)) - - return KWPMode(factory, key) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ocb.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ocb.py deleted file mode 100644 index 1295e61..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ocb.py +++ /dev/null @@ -1,532 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -Offset Codebook (OCB) mode. - -OCB is Authenticated Encryption with Associated Data (AEAD) cipher mode -designed by Prof. Phillip Rogaway and specified in `RFC7253`_. - -The algorithm provides both authenticity and privacy, it is very efficient, -it uses only one key and it can be used in online mode (so that encryption -or decryption can start before the end of the message is available). - -This module implements the third and last variant of OCB (OCB3) and it only -works in combination with a 128-bit block symmetric cipher, like AES. - -OCB is patented in US but `free licenses`_ exist for software implementations -meant for non-military purposes. - -Example: - >>> from Cryptodome.Cipher import AES - >>> from Cryptodome.Random import get_random_bytes - >>> - >>> key = get_random_bytes(32) - >>> cipher = AES.new(key, AES.MODE_OCB) - >>> plaintext = b"Attack at dawn" - >>> ciphertext, mac = cipher.encrypt_and_digest(plaintext) - >>> # Deliver cipher.nonce, ciphertext and mac - ... - >>> cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) - >>> try: - >>> plaintext = cipher.decrypt_and_verify(ciphertext, mac) - >>> except ValueError: - >>> print "Invalid message" - >>> else: - >>> print plaintext - -:undocumented: __package__ - -.. _RFC7253: http://www.rfc-editor.org/info/rfc7253 -.. _free licenses: http://web.cs.ucdavis.edu/~rogaway/ocb/license.htm -""" - -import struct -from binascii import unhexlify - -from Cryptodome.Util.py3compat import bord, _copy_bytes, bchr -from Cryptodome.Util.number import long_to_bytes, bytes_to_long -from Cryptodome.Util.strxor import strxor - -from Cryptodome.Hash import BLAKE2s -from Cryptodome.Random import get_random_bytes - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - create_string_buffer, get_raw_buffer, - SmartPointer, c_size_t, c_uint8_ptr, - is_buffer) - -_raw_ocb_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_ocb", """ - int OCB_start_operation(void *cipher, - const uint8_t *offset_0, - size_t offset_0_len, - void **pState); - int OCB_encrypt(void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int OCB_decrypt(void *state, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int OCB_update(void *state, - const uint8_t *in, - size_t data_len); - int OCB_digest(void *state, - uint8_t *tag, - size_t tag_len); - int OCB_stop_operation(void *state); - """) - - -class OcbMode(object): - """Offset Codebook (OCB) mode. - - :undocumented: __init__ - """ - - def __init__(self, factory, nonce, mac_len, cipher_params): - - if factory.block_size != 16: - raise ValueError("OCB mode is only available for ciphers" - " that operate on 128 bits blocks") - - self.block_size = 16 - """The block size of the underlying cipher, in bytes.""" - - self.nonce = _copy_bytes(None, None, nonce) - """Nonce used for this session.""" - if len(nonce) not in range(1, 16): - raise ValueError("Nonce must be at most 15 bytes long") - if not is_buffer(nonce): - raise TypeError("Nonce must be bytes, bytearray or memoryview") - - self._mac_len = mac_len - if not 8 <= mac_len <= 16: - raise ValueError("MAC tag must be between 8 and 16 bytes long") - - # Cache for MAC tag - self._mac_tag = None - - # Cache for unaligned associated data - self._cache_A = b"" - - # Cache for unaligned ciphertext/plaintext - self._cache_P = b"" - - # Allowed transitions after initialization - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - # Compute Offset_0 - params_without_key = dict(cipher_params) - key = params_without_key.pop("key") - - taglen_mod128 = (self._mac_len * 8) % 128 - if len(self.nonce) < 15: - nonce = bchr(taglen_mod128 << 1) +\ - b'\x00' * (14 - len(nonce)) +\ - b'\x01' +\ - self.nonce - else: - nonce = bchr((taglen_mod128 << 1) | 0x01) +\ - self.nonce - - bottom_bits = bord(nonce[15]) & 0x3F # 6 bits, 0..63 - top_bits = bord(nonce[15]) & 0xC0 # 2 bits - - ktop_cipher = factory.new(key, - factory.MODE_ECB, - **params_without_key) - ktop = ktop_cipher.encrypt(struct.pack('15sB', - nonce[:15], - top_bits)) - - stretch = ktop + strxor(ktop[:8], ktop[1:9]) # 192 bits - offset_0 = long_to_bytes(bytes_to_long(stretch) >> - (64 - bottom_bits), 24)[8:] - - # Create low-level cipher instance - raw_cipher = factory._create_base_cipher(cipher_params) - if cipher_params: - raise TypeError("Unknown keywords: " + str(cipher_params)) - - self._state = VoidPointer() - result = _raw_ocb_lib.OCB_start_operation(raw_cipher.get(), - offset_0, - c_size_t(len(offset_0)), - self._state.address_of()) - if result: - raise ValueError("Error %d while instantiating the OCB mode" - % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the cipher mode - self._state = SmartPointer(self._state.get(), - _raw_ocb_lib.OCB_stop_operation) - - # Memory allocated for the underlying block cipher is now owed - # by the cipher mode - raw_cipher.release() - - def _update(self, assoc_data, assoc_data_len): - result = _raw_ocb_lib.OCB_update(self._state.get(), - c_uint8_ptr(assoc_data), - c_size_t(assoc_data_len)) - if result: - raise ValueError("Error %d while computing MAC in OCB mode" % result) - - def update(self, assoc_data): - """Process the associated data. - - If there is any associated data, the caller has to invoke - this method one or more times, before using - ``decrypt`` or ``encrypt``. - - By *associated data* it is meant any data (e.g. packet headers) that - will not be encrypted and will be transmitted in the clear. - However, the receiver shall still able to detect modifications. - - If there is no associated data, this method must not be called. - - The caller may split associated data in segments of any size, and - invoke this method multiple times, each time with the next segment. - - :Parameters: - assoc_data : bytes/bytearray/memoryview - A piece of associated data. - """ - - if "update" not in self._next: - raise TypeError("update() can only be called" - " immediately after initialization") - - self._next = ["encrypt", "decrypt", "digest", - "verify", "update"] - - if len(self._cache_A) > 0: - filler = min(16 - len(self._cache_A), len(assoc_data)) - self._cache_A += _copy_bytes(None, filler, assoc_data) - assoc_data = assoc_data[filler:] - - if len(self._cache_A) < 16: - return self - - # Clear the cache, and proceeding with any other aligned data - self._cache_A, seg = b"", self._cache_A - self.update(seg) - - update_len = len(assoc_data) // 16 * 16 - self._cache_A = _copy_bytes(update_len, None, assoc_data) - self._update(assoc_data, update_len) - return self - - def _transcrypt_aligned(self, in_data, in_data_len, - trans_func, trans_desc): - - out_data = create_string_buffer(in_data_len) - result = trans_func(self._state.get(), - in_data, - out_data, - c_size_t(in_data_len)) - if result: - raise ValueError("Error %d while %sing in OCB mode" - % (result, trans_desc)) - return get_raw_buffer(out_data) - - def _transcrypt(self, in_data, trans_func, trans_desc): - # Last piece to encrypt/decrypt - if in_data is None: - out_data = self._transcrypt_aligned(self._cache_P, - len(self._cache_P), - trans_func, - trans_desc) - self._cache_P = b"" - return out_data - - # Try to fill up the cache, if it already contains something - prefix = b"" - if len(self._cache_P) > 0: - filler = min(16 - len(self._cache_P), len(in_data)) - self._cache_P += _copy_bytes(None, filler, in_data) - in_data = in_data[filler:] - - if len(self._cache_P) < 16: - # We could not manage to fill the cache, so there is certainly - # no output yet. - return b"" - - # Clear the cache, and proceeding with any other aligned data - prefix = self._transcrypt_aligned(self._cache_P, - len(self._cache_P), - trans_func, - trans_desc) - self._cache_P = b"" - - # Process data in multiples of the block size - trans_len = len(in_data) // 16 * 16 - result = self._transcrypt_aligned(c_uint8_ptr(in_data), - trans_len, - trans_func, - trans_desc) - if prefix: - result = prefix + result - - # Left-over - self._cache_P = _copy_bytes(trans_len, None, in_data) - - return result - - def encrypt(self, plaintext=None): - """Encrypt the next piece of plaintext. - - After the entire plaintext has been passed (but before `digest`), - you **must** call this method one last time with no arguments to collect - the final piece of ciphertext. - - If possible, use the method `encrypt_and_digest` instead. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The next piece of data to encrypt or ``None`` to signify - that encryption has finished and that any remaining ciphertext - has to be produced. - :Return: - the ciphertext, as a byte string. - Its length may not match the length of the *plaintext*. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() can only be called after" - " initialization or an update()") - - if plaintext is None: - self._next = ["digest"] - else: - self._next = ["encrypt"] - return self._transcrypt(plaintext, _raw_ocb_lib.OCB_encrypt, "encrypt") - - def decrypt(self, ciphertext=None): - """Decrypt the next piece of ciphertext. - - After the entire ciphertext has been passed (but before `verify`), - you **must** call this method one last time with no arguments to collect - the remaining piece of plaintext. - - If possible, use the method `decrypt_and_verify` instead. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The next piece of data to decrypt or ``None`` to signify - that decryption has finished and that any remaining plaintext - has to be produced. - :Return: - the plaintext, as a byte string. - Its length may not match the length of the *ciphertext*. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() can only be called after" - " initialization or an update()") - - if ciphertext is None: - self._next = ["verify"] - else: - self._next = ["decrypt"] - return self._transcrypt(ciphertext, - _raw_ocb_lib.OCB_decrypt, - "decrypt") - - def _compute_mac_tag(self): - - if self._mac_tag is not None: - return - - if self._cache_A: - self._update(self._cache_A, len(self._cache_A)) - self._cache_A = b"" - - mac_tag = create_string_buffer(16) - result = _raw_ocb_lib.OCB_digest(self._state.get(), - mac_tag, - c_size_t(len(mac_tag)) - ) - if result: - raise ValueError("Error %d while computing digest in OCB mode" - % result) - self._mac_tag = get_raw_buffer(mac_tag)[:self._mac_len] - - def digest(self): - """Compute the *binary* MAC tag. - - Call this method after the final `encrypt` (the one with no arguments) - to obtain the MAC tag. - - The MAC tag is needed by the receiver to determine authenticity - of the message. - - :Return: the MAC, as a byte string. - """ - - if "digest" not in self._next: - raise TypeError("digest() cannot be called now for this cipher") - - assert(len(self._cache_P) == 0) - - self._next = ["digest"] - - if self._mac_tag is None: - self._compute_mac_tag() - - return self._mac_tag - - def hexdigest(self): - """Compute the *printable* MAC tag. - - This method is like `digest`. - - :Return: the MAC, as a hexadecimal string. - """ - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def verify(self, received_mac_tag): - """Validate the *binary* MAC tag. - - Call this method after the final `decrypt` (the one with no arguments) - to check if the message is authentic and valid. - - :Parameters: - received_mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - if "verify" not in self._next: - raise TypeError("verify() cannot be called now for this cipher") - - assert(len(self._cache_P) == 0) - - self._next = ["verify"] - - if self._mac_tag is None: - self._compute_mac_tag() - - secret = get_random_bytes(16) - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Validate the *printable* MAC tag. - - This method is like `verify`. - - :Parameters: - hex_mac_tag : string - This is the *printable* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - self.verify(unhexlify(hex_mac_tag)) - - def encrypt_and_digest(self, plaintext): - """Encrypt the message and create the MAC tag in one step. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The entire message to encrypt. - :Return: - a tuple with two byte strings: - - - the encrypted data - - the MAC - """ - - return self.encrypt(plaintext) + self.encrypt(), self.digest() - - def decrypt_and_verify(self, ciphertext, received_mac_tag): - """Decrypted the message and verify its authenticity in one step. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The entire message to decrypt. - received_mac_tag : byte string - This is the *binary* MAC, as received from the sender. - - :Return: the decrypted data (byte string). - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - plaintext = self.decrypt(ciphertext) + self.decrypt() - self.verify(received_mac_tag) - return plaintext - - -def _create_ocb_cipher(factory, **kwargs): - """Create a new block cipher, configured in OCB mode. - - :Parameters: - factory : module - A symmetric cipher module from `Cryptodome.Cipher` - (like `Cryptodome.Cipher.AES`). - - :Keywords: - nonce : bytes/bytearray/memoryview - A value that must never be reused for any other encryption. - Its length can vary from 1 to 15 bytes. - If not specified, a random 15 bytes long nonce is generated. - - mac_len : integer - Length of the MAC, in bytes. - It must be in the range ``[8..16]``. - The default is 16 (128 bits). - - Any other keyword will be passed to the underlying block cipher. - See the relevant documentation for details (at least ``key`` will need - to be present). - """ - - try: - nonce = kwargs.pop("nonce", None) - if nonce is None: - nonce = get_random_bytes(15) - mac_len = kwargs.pop("mac_len", 16) - except KeyError as e: - raise TypeError("Keyword missing: " + str(e)) - - return OcbMode(factory, nonce, mac_len, kwargs) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ocb.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ocb.pyi deleted file mode 100644 index a1909fc..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ocb.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from types import ModuleType -from typing import Union, Any, Optional, Tuple, Dict, overload - -Buffer = Union[bytes, bytearray, memoryview] - -class OcbMode(object): - block_size: int - nonce: Buffer - - def __init__(self, - factory: ModuleType, - nonce: Buffer, - mac_len: int, - cipher_params: Dict) -> None: ... - - def update(self, assoc_data: Buffer) -> OcbMode: ... - - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, received_mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - - def encrypt_and_digest(self, - plaintext: Buffer) -> Tuple[bytes, bytes]: ... - def decrypt_and_verify(self, - ciphertext: Buffer, - received_mac_tag: Buffer) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ofb.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ofb.py deleted file mode 100644 index 8c0ccf6..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ofb.py +++ /dev/null @@ -1,282 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/mode_ofb.py : OFB mode -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -""" -Output Feedback (CFB) mode. -""" - -__all__ = ['OfbMode'] - -from Cryptodome.Util.py3compat import _copy_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - create_string_buffer, get_raw_buffer, - SmartPointer, c_size_t, c_uint8_ptr, - is_writeable_buffer) - -from Cryptodome.Random import get_random_bytes - -raw_ofb_lib = load_pycryptodome_raw_lib("Cryptodome.Cipher._raw_ofb", """ - int OFB_start_operation(void *cipher, - const uint8_t iv[], - size_t iv_len, - void **pResult); - int OFB_encrypt(void *ofbState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int OFB_decrypt(void *ofbState, - const uint8_t *in, - uint8_t *out, - size_t data_len); - int OFB_stop_operation(void *state); - """ - ) - - -class OfbMode(object): - """*Output FeedBack (OFB)*. - - This mode is very similar to CBC, but it - transforms the underlying block cipher into a stream cipher. - - The keystream is the iterated block encryption of the - previous ciphertext block. - - An Initialization Vector (*IV*) is required. - - See `NIST SP800-38A`_ , Section 6.4. - - .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf - - :undocumented: __init__ - """ - - def __init__(self, block_cipher, iv): - """Create a new block cipher, configured in OFB mode. - - :Parameters: - block_cipher : C pointer - A smart pointer to the low-level block cipher instance. - - iv : bytes/bytearray/memoryview - The initialization vector to use for encryption or decryption. - It is as long as the cipher block. - - **The IV must be a nonce, to to be reused for any other - message**. It shall be a nonce or a random value. - - Reusing the *IV* for encryptions performed with the same key - compromises confidentiality. - """ - - self._state = VoidPointer() - result = raw_ofb_lib.OFB_start_operation(block_cipher.get(), - c_uint8_ptr(iv), - c_size_t(len(iv)), - self._state.address_of()) - if result: - raise ValueError("Error %d while instantiating the OFB mode" - % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the cipher mode - self._state = SmartPointer(self._state.get(), - raw_ofb_lib.OFB_stop_operation) - - # Memory allocated for the underlying block cipher is now owed - # by the cipher mode - block_cipher.release() - - self.block_size = len(iv) - """The block size of the underlying cipher, in bytes.""" - - self.iv = _copy_bytes(None, None, iv) - """The Initialization Vector originally used to create the object. - The value does not change.""" - - self.IV = self.iv - """Alias for `iv`""" - - self._next = ["encrypt", "decrypt"] - - def encrypt(self, plaintext, output=None): - """Encrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - If ``output`` is ``None``, the ciphertext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() cannot be called after decrypt()") - self._next = ["encrypt"] - - if output is None: - ciphertext = create_string_buffer(len(plaintext)) - else: - ciphertext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(plaintext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_ofb_lib.OFB_encrypt(self._state.get(), - c_uint8_ptr(plaintext), - c_uint8_ptr(ciphertext), - c_size_t(len(plaintext))) - if result: - raise ValueError("Error %d while encrypting in OFB mode" % result) - - if output is None: - return get_raw_buffer(ciphertext) - else: - return None - - def decrypt(self, ciphertext, output=None): - """Decrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - It can be of any length. - :Keywords: - output : bytearray/memoryview - The location where the plaintext is written to. - If ``None``, the plaintext is returned. - :Return: - If ``output`` is ``None``, the plaintext is returned as ``bytes``. - Otherwise, ``None``. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() cannot be called after encrypt()") - self._next = ["decrypt"] - - if output is None: - plaintext = create_string_buffer(len(ciphertext)) - else: - plaintext = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(ciphertext) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(plaintext)) - - result = raw_ofb_lib.OFB_decrypt(self._state.get(), - c_uint8_ptr(ciphertext), - c_uint8_ptr(plaintext), - c_size_t(len(ciphertext))) - if result: - raise ValueError("Error %d while decrypting in OFB mode" % result) - - if output is None: - return get_raw_buffer(plaintext) - else: - return None - - -def _create_ofb_cipher(factory, **kwargs): - """Instantiate a cipher object that performs OFB encryption/decryption. - - :Parameters: - factory : module - The underlying block cipher, a module from ``Cryptodome.Cipher``. - - :Keywords: - iv : bytes/bytearray/memoryview - The IV to use for OFB. - - IV : bytes/bytearray/memoryview - Alias for ``iv``. - - Any other keyword will be passed to the underlying block cipher. - See the relevant documentation for details (at least ``key`` will need - to be present). - """ - - cipher_state = factory._create_base_cipher(kwargs) - iv = kwargs.pop("IV", None) - IV = kwargs.pop("iv", None) - - if (None, None) == (iv, IV): - iv = get_random_bytes(factory.block_size) - if iv is not None: - if IV is not None: - raise TypeError("You must either use 'iv' or 'IV', not both") - else: - iv = IV - - if len(iv) != factory.block_size: - raise ValueError("Incorrect IV length (it must be %d bytes long)" % - factory.block_size) - - if kwargs: - raise TypeError("Unknown parameters for OFB: %s" % str(kwargs)) - - return OfbMode(cipher_state, iv) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ofb.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ofb.pyi deleted file mode 100644 index d28608e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_ofb.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Union, overload - -from Cryptodome.Util._raw_api import SmartPointer - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['OfbMode'] - -class OfbMode(object): - block_size: int - iv: Buffer - IV: Buffer - - def __init__(self, - block_cipher: SmartPointer, - iv: Buffer) -> None: ... - @overload - def encrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - @overload - def decrypt(self, plaintext: Buffer) -> bytes: ... - @overload - def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... - diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_openpgp.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_openpgp.py deleted file mode 100644 index d86ed19..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_openpgp.py +++ /dev/null @@ -1,206 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -OpenPGP mode. -""" - -__all__ = ['OpenPgpMode'] - -from Cryptodome.Util.py3compat import _copy_bytes -from Cryptodome.Random import get_random_bytes - -class OpenPgpMode(object): - """OpenPGP mode. - - This mode is a variant of CFB, and it is only used in PGP and - OpenPGP_ applications. If in doubt, use another mode. - - An Initialization Vector (*IV*) is required. - - Unlike CFB, the *encrypted* IV (not the IV itself) is - transmitted to the receiver. - - The IV is a random data block. For legacy reasons, two of its bytes are - duplicated to act as a checksum for the correctness of the key, which is now - known to be insecure and is ignored. The encrypted IV is therefore 2 bytes - longer than the clean IV. - - .. _OpenPGP: http://tools.ietf.org/html/rfc4880 - - :undocumented: __init__ - """ - - def __init__(self, factory, key, iv, cipher_params): - - #: The block size of the underlying cipher, in bytes. - self.block_size = factory.block_size - - self._done_first_block = False # True after the first encryption - - # Instantiate a temporary cipher to process the IV - IV_cipher = factory.new( - key, - factory.MODE_CFB, - IV=b'\x00' * self.block_size, - segment_size=self.block_size * 8, - **cipher_params) - - iv = _copy_bytes(None, None, iv) - - # The cipher will be used for... - if len(iv) == self.block_size: - # ... encryption - self._encrypted_IV = IV_cipher.encrypt(iv + iv[-2:]) - elif len(iv) == self.block_size + 2: - # ... decryption - self._encrypted_IV = iv - # Last two bytes are for a deprecated "quick check" feature that - # should not be used. (https://eprint.iacr.org/2005/033) - iv = IV_cipher.decrypt(iv)[:-2] - else: - raise ValueError("Length of IV must be %d or %d bytes" - " for MODE_OPENPGP" - % (self.block_size, self.block_size + 2)) - - self.iv = self.IV = iv - - # Instantiate the cipher for the real PGP data - self._cipher = factory.new( - key, - factory.MODE_CFB, - IV=self._encrypted_IV[-self.block_size:], - segment_size=self.block_size * 8, - **cipher_params) - - def encrypt(self, plaintext): - """Encrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have encrypted a message - you cannot encrypt (or decrypt) another message using the same - object. - - The data to encrypt can be broken up in two or - more pieces and `encrypt` can be called multiple times. - - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is equivalent to: - - >>> c.encrypt(a+b) - - This function does not add any padding to the plaintext. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - - :Return: - the encrypted data, as a byte string. - It is as long as *plaintext* with one exception: - when encrypting the first message chunk, - the encypted IV is prepended to the returned ciphertext. - """ - - res = self._cipher.encrypt(plaintext) - if not self._done_first_block: - res = self._encrypted_IV + res - self._done_first_block = True - return res - - def decrypt(self, ciphertext): - """Decrypt data with the key and the parameters set at initialization. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - The data to decrypt can be broken up in two or - more pieces and `decrypt` can be called multiple times. - - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is equivalent to: - - >>> c.decrypt(a+b) - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - - :Return: the decrypted data (byte string). - """ - - return self._cipher.decrypt(ciphertext) - - -def _create_openpgp_cipher(factory, **kwargs): - """Create a new block cipher, configured in OpenPGP mode. - - :Parameters: - factory : module - The module. - - :Keywords: - key : bytes/bytearray/memoryview - The secret key to use in the symmetric cipher. - - IV : bytes/bytearray/memoryview - The initialization vector to use for encryption or decryption. - - For encryption, the IV must be as long as the cipher block size. - - For decryption, it must be 2 bytes longer (it is actually the - *encrypted* IV which was prefixed to the ciphertext). - """ - - iv = kwargs.pop("IV", None) - IV = kwargs.pop("iv", None) - - if (None, None) == (iv, IV): - iv = get_random_bytes(factory.block_size) - if iv is not None: - if IV is not None: - raise TypeError("You must either use 'iv' or 'IV', not both") - else: - iv = IV - - try: - key = kwargs.pop("key") - except KeyError as e: - raise TypeError("Missing component: " + str(e)) - - return OpenPgpMode(factory, key, iv, kwargs) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_openpgp.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_openpgp.pyi deleted file mode 100644 index 14b8105..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_openpgp.pyi +++ /dev/null @@ -1,20 +0,0 @@ -from types import ModuleType -from typing import Union, Dict - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['OpenPgpMode'] - -class OpenPgpMode(object): - block_size: int - iv: Union[bytes, bytearray, memoryview] - IV: Union[bytes, bytearray, memoryview] - - def __init__(self, - factory: ModuleType, - key: Buffer, - iv: Buffer, - cipher_params: Dict) -> None: ... - def encrypt(self, plaintext: Buffer) -> bytes: ... - def decrypt(self, plaintext: Buffer) -> bytes: ... - diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_siv.py b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_siv.py deleted file mode 100644 index 4a76ad6..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_siv.py +++ /dev/null @@ -1,392 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -Synthetic Initialization Vector (SIV) mode. -""" - -__all__ = ['SivMode'] - -from binascii import hexlify, unhexlify - -from Cryptodome.Util.py3compat import bord, _copy_bytes - -from Cryptodome.Util._raw_api import is_buffer - -from Cryptodome.Util.number import long_to_bytes, bytes_to_long -from Cryptodome.Protocol.KDF import _S2V -from Cryptodome.Hash import BLAKE2s -from Cryptodome.Random import get_random_bytes - - -class SivMode(object): - """Synthetic Initialization Vector (SIV). - - This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. - It provides both confidentiality and authenticity. - - The header of the message may be left in the clear, if needed, and it will - still be subject to authentication. The decryption step tells the receiver - if the message comes from a source that really knowns the secret key. - Additionally, decryption detects if any part of the message - including the - header - has been modified or corrupted. - - Unlike other AEAD modes such as CCM, EAX or GCM, accidental reuse of a - nonce is not catastrophic for the confidentiality of the message. The only - effect is that an attacker can tell when the same plaintext (and same - associated data) is protected with the same key. - - The length of the MAC is fixed to the block size of the underlying cipher. - The key size is twice the length of the key of the underlying cipher. - - This mode is only available for AES ciphers. - - +--------------------+---------------+-------------------+ - | Cipher | SIV MAC size | SIV key length | - | | (bytes) | (bytes) | - +====================+===============+===================+ - | AES-128 | 16 | 32 | - +--------------------+---------------+-------------------+ - | AES-192 | 16 | 48 | - +--------------------+---------------+-------------------+ - | AES-256 | 16 | 64 | - +--------------------+---------------+-------------------+ - - See `RFC5297`_ and the `original paper`__. - - .. _RFC5297: https://tools.ietf.org/html/rfc5297 - .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html - .. __: http://www.cs.ucdavis.edu/~rogaway/papers/keywrap.pdf - - :undocumented: __init__ - """ - - def __init__(self, factory, key, nonce, kwargs): - - self.block_size = factory.block_size - """The block size of the underlying cipher, in bytes.""" - - self._factory = factory - - self._cipher_params = kwargs - - if len(key) not in (32, 48, 64): - raise ValueError("Incorrect key length (%d bytes)" % len(key)) - - if nonce is not None: - if not is_buffer(nonce): - raise TypeError("When provided, the nonce must be bytes, bytearray or memoryview") - - if len(nonce) == 0: - raise ValueError("When provided, the nonce must be non-empty") - - self.nonce = _copy_bytes(None, None, nonce) - """Public attribute is only available in case of non-deterministic - encryption.""" - - subkey_size = len(key) // 2 - - self._mac_tag = None # Cache for MAC tag - self._kdf = _S2V(key[:subkey_size], - ciphermod=factory, - cipher_params=self._cipher_params) - self._subkey_cipher = key[subkey_size:] - - # Purely for the purpose of verifying that cipher_params are OK - factory.new(key[:subkey_size], factory.MODE_ECB, **kwargs) - - # Allowed transitions after initialization - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - def _create_ctr_cipher(self, v): - """Create a new CTR cipher from V in SIV mode""" - - v_int = bytes_to_long(v) - q = v_int & 0xFFFFFFFFFFFFFFFF7FFFFFFF7FFFFFFF - return self._factory.new( - self._subkey_cipher, - self._factory.MODE_CTR, - initial_value=q, - nonce=b"", - **self._cipher_params) - - def update(self, component): - """Protect one associated data component - - For SIV, the associated data is a sequence (*vector*) of non-empty - byte strings (*components*). - - This method consumes the next component. It must be called - once for each of the components that constitue the associated data. - - Note that the components have clear boundaries, so that: - - >>> cipher.update(b"builtin") - >>> cipher.update(b"securely") - - is not equivalent to: - - >>> cipher.update(b"built") - >>> cipher.update(b"insecurely") - - If there is no associated data, this method must not be called. - - :Parameters: - component : bytes/bytearray/memoryview - The next associated data component. - """ - - if "update" not in self._next: - raise TypeError("update() can only be called" - " immediately after initialization") - - self._next = ["update", "encrypt", "decrypt", - "digest", "verify"] - - return self._kdf.update(component) - - def encrypt(self, plaintext): - """ - For SIV, encryption and MAC authentication must take place at the same - point. This method shall not be used. - - Use `encrypt_and_digest` instead. - """ - - raise TypeError("encrypt() not allowed for SIV mode." - " Use encrypt_and_digest() instead.") - - def decrypt(self, ciphertext): - """ - For SIV, decryption and verification must take place at the same - point. This method shall not be used. - - Use `decrypt_and_verify` instead. - """ - - raise TypeError("decrypt() not allowed for SIV mode." - " Use decrypt_and_verify() instead.") - - def digest(self): - """Compute the *binary* MAC tag. - - The caller invokes this function at the very end. - - This method returns the MAC that shall be sent to the receiver, - together with the ciphertext. - - :Return: the MAC, as a byte string. - """ - - if "digest" not in self._next: - raise TypeError("digest() cannot be called when decrypting" - " or validating a message") - self._next = ["digest"] - if self._mac_tag is None: - self._mac_tag = self._kdf.derive() - return self._mac_tag - - def hexdigest(self): - """Compute the *printable* MAC tag. - - This method is like `digest`. - - :Return: the MAC, as a hexadecimal string. - """ - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def verify(self, received_mac_tag): - """Validate the *binary* MAC tag. - - The caller invokes this function at the very end. - - This method checks if the decrypted message is indeed valid - (that is, if the key is correct) and it has not been - tampered with while in transit. - - :Parameters: - received_mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - if "verify" not in self._next: - raise TypeError("verify() cannot be called" - " when encrypting a message") - self._next = ["verify"] - - if self._mac_tag is None: - self._mac_tag = self._kdf.derive() - - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Validate the *printable* MAC tag. - - This method is like `verify`. - - :Parameters: - hex_mac_tag : string - This is the *printable* MAC, as received from the sender. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - self.verify(unhexlify(hex_mac_tag)) - - def encrypt_and_digest(self, plaintext, output=None): - """Perform encrypt() and digest() in one step. - - :Parameters: - plaintext : bytes/bytearray/memoryview - The piece of data to encrypt. - :Keywords: - output : bytearray/memoryview - The location where the ciphertext must be written to. - If ``None``, the ciphertext is returned. - :Return: - a tuple with two items: - - - the ciphertext, as ``bytes`` - - the MAC tag, as ``bytes`` - - The first item becomes ``None`` when the ``output`` parameter - specified a location for the result. - """ - - if "encrypt" not in self._next: - raise TypeError("encrypt() can only be called after" - " initialization or an update()") - - self._next = ["digest"] - - # Compute V (MAC) - if hasattr(self, 'nonce'): - self._kdf.update(self.nonce) - self._kdf.update(plaintext) - self._mac_tag = self._kdf.derive() - - cipher = self._create_ctr_cipher(self._mac_tag) - - return cipher.encrypt(plaintext, output=output), self._mac_tag - - def decrypt_and_verify(self, ciphertext, mac_tag, output=None): - """Perform decryption and verification in one step. - - A cipher object is stateful: once you have decrypted a message - you cannot decrypt (or encrypt) another message with the same - object. - - You cannot reuse an object for encrypting - or decrypting other data with the same key. - - This function does not remove any padding from the plaintext. - - :Parameters: - ciphertext : bytes/bytearray/memoryview - The piece of data to decrypt. - It can be of any length. - mac_tag : bytes/bytearray/memoryview - This is the *binary* MAC, as received from the sender. - :Keywords: - output : bytearray/memoryview - The location where the plaintext must be written to. - If ``None``, the plaintext is returned. - :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` - parameter specified a location for the result. - :Raises ValueError: - if the MAC does not match. The message has been tampered with - or the key is incorrect. - """ - - if "decrypt" not in self._next: - raise TypeError("decrypt() can only be called" - " after initialization or an update()") - self._next = ["verify"] - - # Take the MAC and start the cipher for decryption - self._cipher = self._create_ctr_cipher(mac_tag) - - plaintext = self._cipher.decrypt(ciphertext, output=output) - - if hasattr(self, 'nonce'): - self._kdf.update(self.nonce) - self._kdf.update(plaintext if output is None else output) - self.verify(mac_tag) - - return plaintext - - -def _create_siv_cipher(factory, **kwargs): - """Create a new block cipher, configured in - Synthetic Initializaton Vector (SIV) mode. - - :Parameters: - - factory : object - A symmetric cipher module from `Cryptodome.Cipher` - (like `Cryptodome.Cipher.AES`). - - :Keywords: - - key : bytes/bytearray/memoryview - The secret key to use in the symmetric cipher. - It must be 32, 48 or 64 bytes long. - If AES is the chosen cipher, the variants *AES-128*, - *AES-192* and or *AES-256* will be used internally. - - nonce : bytes/bytearray/memoryview - For deterministic encryption, it is not present. - - Otherwise, it is a value that must never be reused - for encrypting message under this key. - - There are no restrictions on its length, - but it is recommended to use at least 16 bytes. - """ - - try: - key = kwargs.pop("key") - except KeyError as e: - raise TypeError("Missing parameter: " + str(e)) - - nonce = kwargs.pop("nonce", None) - - return SivMode(factory, key, nonce, kwargs) diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_siv.pyi b/venv/Lib/site-packages/Cryptodome/Cipher/_mode_siv.pyi deleted file mode 100644 index 2934f23..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_mode_siv.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from types import ModuleType -from typing import Union, Tuple, Dict, Optional, overload - -Buffer = Union[bytes, bytearray, memoryview] - -__all__ = ['SivMode'] - -class SivMode(object): - block_size: int - nonce: bytes - - def __init__(self, - factory: ModuleType, - key: Buffer, - nonce: Buffer, - kwargs: Dict) -> None: ... - - def update(self, component: Buffer) -> SivMode: ... - - def encrypt(self, plaintext: Buffer) -> bytes: ... - def decrypt(self, plaintext: Buffer) -> bytes: ... - - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, received_mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - - @overload - def encrypt_and_digest(self, - plaintext: Buffer) -> Tuple[bytes, bytes]: ... - @overload - def encrypt_and_digest(self, - plaintext: Buffer, - output: Buffer) -> Tuple[None, bytes]: ... - def decrypt_and_verify(self, - ciphertext: Buffer, - received_mac_tag: Buffer, - output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_pkcs1_decode.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_pkcs1_decode.pyd deleted file mode 100644 index 13059c4..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_pkcs1_decode.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_pkcs1_oaep_decode.py b/venv/Lib/site-packages/Cryptodome/Cipher/_pkcs1_oaep_decode.py deleted file mode 100644 index 82bdaa7..0000000 --- a/venv/Lib/site-packages/Cryptodome/Cipher/_pkcs1_oaep_decode.py +++ /dev/null @@ -1,41 +0,0 @@ -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, c_size_t, - c_uint8_ptr) - - -_raw_pkcs1_decode = load_pycryptodome_raw_lib("Cryptodome.Cipher._pkcs1_decode", - """ - int pkcs1_decode(const uint8_t *em, size_t len_em, - const uint8_t *sentinel, size_t len_sentinel, - size_t expected_pt_len, - uint8_t *output); - - int oaep_decode(const uint8_t *em, - size_t em_len, - const uint8_t *lHash, - size_t hLen, - const uint8_t *db, - size_t db_len); - """) - - -def pkcs1_decode(em, sentinel, expected_pt_len, output): - if len(em) != len(output): - raise ValueError("Incorrect output length") - - ret = _raw_pkcs1_decode.pkcs1_decode(c_uint8_ptr(em), - c_size_t(len(em)), - c_uint8_ptr(sentinel), - c_size_t(len(sentinel)), - c_size_t(expected_pt_len), - c_uint8_ptr(output)) - return ret - - -def oaep_decode(em, lHash, db): - ret = _raw_pkcs1_decode.oaep_decode(c_uint8_ptr(em), - c_size_t(len(em)), - c_uint8_ptr(lHash), - c_size_t(len(lHash)), - c_uint8_ptr(db), - c_size_t(len(db))) - return ret diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_aes.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_aes.pyd deleted file mode 100644 index 70e520d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_aes.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_aesni.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_aesni.pyd deleted file mode 100644 index 97bf6e8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_aesni.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_arc2.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_arc2.pyd deleted file mode 100644 index 53f802e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_arc2.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_blowfish.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_blowfish.pyd deleted file mode 100644 index a03f704..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_blowfish.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cast.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cast.pyd deleted file mode 100644 index 61e6ec6..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cast.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cbc.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cbc.pyd deleted file mode 100644 index dce36b1..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cbc.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cfb.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cfb.pyd deleted file mode 100644 index aa91db3..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_cfb.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ctr.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ctr.pyd deleted file mode 100644 index 77950e2..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ctr.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_des.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_des.pyd deleted file mode 100644 index 4bbefdd..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_des.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_des3.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_des3.pyd deleted file mode 100644 index d24825e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_des3.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ecb.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ecb.pyd deleted file mode 100644 index 2170586..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ecb.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_eksblowfish.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_eksblowfish.pyd deleted file mode 100644 index dc39ffe..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_eksblowfish.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ocb.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ocb.pyd deleted file mode 100644 index 75b5675..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ocb.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ofb.pyd b/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ofb.pyd deleted file mode 100644 index 2266fdc..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Cipher/_raw_ofb.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2b.py b/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2b.py deleted file mode 100644 index 85da887..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2b.py +++ /dev/null @@ -1,247 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from binascii import unhexlify - -from Cryptodome.Util.py3compat import bord, tobytes - -from Cryptodome.Random import get_random_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_blake2b_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._BLAKE2b", - """ - int blake2b_init(void **state, - const uint8_t *key, - size_t key_size, - size_t digest_size); - int blake2b_destroy(void *state); - int blake2b_update(void *state, - const uint8_t *buf, - size_t len); - int blake2b_digest(const void *state, - uint8_t digest[64]); - int blake2b_copy(const void *src, void *dst); - """) - - -class BLAKE2b_Hash(object): - """A BLAKE2b hash object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The internal block size of the hash algorithm in bytes. - block_size = 64 - - def __init__(self, data, key, digest_bytes, update_after_digest): - - # The size of the resulting hash in bytes. - self.digest_size = digest_bytes - - self._update_after_digest = update_after_digest - self._digest_done = False - - # See https://tools.ietf.org/html/rfc7693 - if digest_bytes in (20, 32, 48, 64) and not key: - self.oid = "1.3.6.1.4.1.1722.12.2.1." + str(digest_bytes) - - state = VoidPointer() - result = _raw_blake2b_lib.blake2b_init(state.address_of(), - c_uint8_ptr(key), - c_size_t(len(key)), - c_size_t(digest_bytes) - ) - if result: - raise ValueError("Error %d while instantiating BLAKE2b" % result) - self._state = SmartPointer(state.get(), - _raw_blake2b_lib.blake2b_destroy) - if data: - self.update(data) - - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (bytes/bytearray/memoryview): The next chunk of the message being hashed. - """ - - if self._digest_done and not self._update_after_digest: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_blake2b_lib.blake2b_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while hashing BLAKE2b data" % result) - return self - - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(64) - result = _raw_blake2b_lib.blake2b_digest(self._state.get(), - bfr) - if result: - raise ValueError("Error %d while creating BLAKE2b digest" % result) - - self._digest_done = True - - return get_raw_buffer(bfr)[:self.digest_size] - - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) - - - def verify(self, mac_tag): - """Verify that a given **binary** MAC (computed by another party) - is valid. - - Args: - mac_tag (bytes/bytearray/memoryview): the expected MAC of the message. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - secret = get_random_bytes(16) - - mac1 = new(digest_bits=160, key=secret, data=mac_tag) - mac2 = new(digest_bits=160, key=secret, data=self.digest()) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - - def hexverify(self, hex_mac_tag): - """Verify that a given **printable** MAC (computed by another party) - is valid. - - Args: - hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - self.verify(unhexlify(tobytes(hex_mac_tag))) - - - def new(self, **kwargs): - """Return a new instance of a BLAKE2b hash object. - See :func:`new`. - """ - - if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: - kwargs["digest_bytes"] = self.digest_size - - return new(**kwargs) - - -def new(**kwargs): - """Create a new hash object. - - Args: - data (bytes/bytearray/memoryview): - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`BLAKE2b_Hash.update`. - digest_bytes (integer): - Optional. The size of the digest, in bytes (1 to 64). Default is 64. - digest_bits (integer): - Optional and alternative to ``digest_bytes``. - The size of the digest, in bits (8 to 512, in steps of 8). - Default is 512. - key (bytes/bytearray/memoryview): - Optional. The key to use to compute the MAC (1 to 64 bytes). - If not specified, no key will be used. - update_after_digest (boolean): - Optional. By default, a hash object cannot be updated anymore after - the digest is computed. When this flag is ``True``, such check - is no longer enforced. - - Returns: - A :class:`BLAKE2b_Hash` hash object - """ - - data = kwargs.pop("data", None) - update_after_digest = kwargs.pop("update_after_digest", False) - - digest_bytes = kwargs.pop("digest_bytes", None) - digest_bits = kwargs.pop("digest_bits", None) - if None not in (digest_bytes, digest_bits): - raise TypeError("Only one digest parameter must be provided") - if (None, None) == (digest_bytes, digest_bits): - digest_bytes = 64 - if digest_bytes is not None: - if not (1 <= digest_bytes <= 64): - raise ValueError("'digest_bytes' not in range 1..64") - else: - if not (8 <= digest_bits <= 512) or (digest_bits % 8): - raise ValueError("'digest_bits' not in range 8..512, " - "with steps of 8") - digest_bytes = digest_bits // 8 - - key = kwargs.pop("key", b"") - if len(key) > 64: - raise ValueError("BLAKE2b key cannot exceed 64 bytes") - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return BLAKE2b_Hash(data, key, digest_bytes, update_after_digest) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2b.pyi b/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2b.pyi deleted file mode 100644 index d37c374..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2b.pyi +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Any, Union -from types import ModuleType - -Buffer = Union[bytes, bytearray, memoryview] - -class BLAKE2b_Hash(object): - block_size: int - digest_size: int - oid: str - - def __init__(self, - data: Buffer, - key: Buffer, - digest_bytes: bytes, - update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> BLAKE2b_Hash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - def new(self, - data: Buffer = ..., - digest_bytes: int = ..., - digest_bits: int = ..., - key: Buffer = ..., - update_after_digest: bool = ...) -> BLAKE2b_Hash: ... - -def new(data: Buffer = ..., - digest_bytes: int = ..., - digest_bits: int = ..., - key: Buffer = ..., - update_after_digest: bool = ...) -> BLAKE2b_Hash: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2s.py b/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2s.py deleted file mode 100644 index 43be5c4..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2s.py +++ /dev/null @@ -1,247 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from binascii import unhexlify - -from Cryptodome.Util.py3compat import bord, tobytes - -from Cryptodome.Random import get_random_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_blake2s_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._BLAKE2s", - """ - int blake2s_init(void **state, - const uint8_t *key, - size_t key_size, - size_t digest_size); - int blake2s_destroy(void *state); - int blake2s_update(void *state, - const uint8_t *buf, - size_t len); - int blake2s_digest(const void *state, - uint8_t digest[32]); - int blake2s_copy(const void *src, void *dst); - """) - - -class BLAKE2s_Hash(object): - """A BLAKE2s hash object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The internal block size of the hash algorithm in bytes. - block_size = 32 - - def __init__(self, data, key, digest_bytes, update_after_digest): - - # The size of the resulting hash in bytes. - self.digest_size = digest_bytes - - self._update_after_digest = update_after_digest - self._digest_done = False - - # See https://tools.ietf.org/html/rfc7693 - if digest_bytes in (16, 20, 28, 32) and not key: - self.oid = "1.3.6.1.4.1.1722.12.2.2." + str(digest_bytes) - - state = VoidPointer() - result = _raw_blake2s_lib.blake2s_init(state.address_of(), - c_uint8_ptr(key), - c_size_t(len(key)), - c_size_t(digest_bytes) - ) - if result: - raise ValueError("Error %d while instantiating BLAKE2s" % result) - self._state = SmartPointer(state.get(), - _raw_blake2s_lib.blake2s_destroy) - if data: - self.update(data) - - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._digest_done and not self._update_after_digest: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_blake2s_lib.blake2s_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while hashing BLAKE2s data" % result) - return self - - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(32) - result = _raw_blake2s_lib.blake2s_digest(self._state.get(), - bfr) - if result: - raise ValueError("Error %d while creating BLAKE2s digest" % result) - - self._digest_done = True - - return get_raw_buffer(bfr)[:self.digest_size] - - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) - - - def verify(self, mac_tag): - """Verify that a given **binary** MAC (computed by another party) - is valid. - - Args: - mac_tag (byte string/byte array/memoryview): the expected MAC of the message. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - secret = get_random_bytes(16) - - mac1 = new(digest_bits=160, key=secret, data=mac_tag) - mac2 = new(digest_bits=160, key=secret, data=self.digest()) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - - def hexverify(self, hex_mac_tag): - """Verify that a given **printable** MAC (computed by another party) - is valid. - - Args: - hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - self.verify(unhexlify(tobytes(hex_mac_tag))) - - - def new(self, **kwargs): - """Return a new instance of a BLAKE2s hash object. - See :func:`new`. - """ - - if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: - kwargs["digest_bytes"] = self.digest_size - - return new(**kwargs) - - -def new(**kwargs): - """Create a new hash object. - - Args: - data (byte string/byte array/memoryview): - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`BLAKE2s_Hash.update`. - digest_bytes (integer): - Optional. The size of the digest, in bytes (1 to 32). Default is 32. - digest_bits (integer): - Optional and alternative to ``digest_bytes``. - The size of the digest, in bits (8 to 256, in steps of 8). - Default is 256. - key (byte string): - Optional. The key to use to compute the MAC (1 to 64 bytes). - If not specified, no key will be used. - update_after_digest (boolean): - Optional. By default, a hash object cannot be updated anymore after - the digest is computed. When this flag is ``True``, such check - is no longer enforced. - - Returns: - A :class:`BLAKE2s_Hash` hash object - """ - - data = kwargs.pop("data", None) - update_after_digest = kwargs.pop("update_after_digest", False) - - digest_bytes = kwargs.pop("digest_bytes", None) - digest_bits = kwargs.pop("digest_bits", None) - if None not in (digest_bytes, digest_bits): - raise TypeError("Only one digest parameter must be provided") - if (None, None) == (digest_bytes, digest_bits): - digest_bytes = 32 - if digest_bytes is not None: - if not (1 <= digest_bytes <= 32): - raise ValueError("'digest_bytes' not in range 1..32") - else: - if not (8 <= digest_bits <= 256) or (digest_bits % 8): - raise ValueError("'digest_bits' not in range 8..256, " - "with steps of 8") - digest_bytes = digest_bits // 8 - - key = kwargs.pop("key", b"") - if len(key) > 32: - raise ValueError("BLAKE2s key cannot exceed 32 bytes") - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return BLAKE2s_Hash(data, key, digest_bytes, update_after_digest) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2s.pyi b/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2s.pyi deleted file mode 100644 index 374b3a4..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/BLAKE2s.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Any, Union - -Buffer = Union[bytes, bytearray, memoryview] - -class BLAKE2s_Hash(object): - block_size: int - digest_size: int - oid: str - - def __init__(self, - data: Buffer, - key: Buffer, - digest_bytes: bytes, - update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> BLAKE2s_Hash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - def new(self, **kwargs: Any) -> BLAKE2s_Hash: ... - -def new(data: Buffer = ..., - digest_bytes: int = ..., - digest_bits: int = ..., - key: Buffer = ..., - update_after_digest: bool = ...) -> BLAKE2s_Hash: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/CMAC.py b/venv/Lib/site-packages/Cryptodome/Hash/CMAC.py deleted file mode 100644 index 8feb79f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/CMAC.py +++ /dev/null @@ -1,306 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Hash/CMAC.py - Implements the CMAC algorithm -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from binascii import unhexlify - -from Cryptodome.Hash import BLAKE2s -from Cryptodome.Util.strxor import strxor -from Cryptodome.Util.number import long_to_bytes, bytes_to_long -from Cryptodome.Util.py3compat import bord, tobytes, _copy_bytes -from Cryptodome.Random import get_random_bytes - - -# The size of the authentication tag produced by the MAC. -digest_size = None - - -def _shift_bytes(bs, xor_lsb=0): - num = (bytes_to_long(bs) << 1) ^ xor_lsb - return long_to_bytes(num, len(bs))[-len(bs):] - - -class CMAC(object): - """A CMAC hash object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar digest_size: the size in bytes of the resulting MAC tag - :vartype digest_size: integer - """ - - digest_size = None - - def __init__(self, key, msg, ciphermod, cipher_params, mac_len, - update_after_digest): - - self.digest_size = mac_len - - self._key = _copy_bytes(None, None, key) - self._factory = ciphermod - self._cipher_params = cipher_params - self._block_size = bs = ciphermod.block_size - self._mac_tag = None - self._update_after_digest = update_after_digest - - # Section 5.3 of NIST SP 800 38B and Appendix B - if bs == 8: - const_Rb = 0x1B - self._max_size = 8 * (2 ** 21) - elif bs == 16: - const_Rb = 0x87 - self._max_size = 16 * (2 ** 48) - else: - raise TypeError("CMAC requires a cipher with a block size" - " of 8 or 16 bytes, not %d" % bs) - - # Compute sub-keys - zero_block = b'\x00' * bs - self._ecb = ciphermod.new(key, - ciphermod.MODE_ECB, - **self._cipher_params) - L = self._ecb.encrypt(zero_block) - if bord(L[0]) & 0x80: - self._k1 = _shift_bytes(L, const_Rb) - else: - self._k1 = _shift_bytes(L) - if bord(self._k1[0]) & 0x80: - self._k2 = _shift_bytes(self._k1, const_Rb) - else: - self._k2 = _shift_bytes(self._k1) - - # Initialize CBC cipher with zero IV - self._cbc = ciphermod.new(key, - ciphermod.MODE_CBC, - zero_block, - **self._cipher_params) - - # Cache for outstanding data to authenticate - self._cache = bytearray(bs) - self._cache_n = 0 - - # Last piece of ciphertext produced - self._last_ct = zero_block - - # Last block that was encrypted with AES - self._last_pt = None - - # Counter for total message size - self._data_size = 0 - - if msg: - self.update(msg) - - def update(self, msg): - """Authenticate the next chunk of message. - - Args: - data (byte string/byte array/memoryview): The next chunk of data - """ - - if self._mac_tag is not None and not self._update_after_digest: - raise TypeError("update() cannot be called after digest() or verify()") - - self._data_size += len(msg) - bs = self._block_size - - if self._cache_n > 0: - filler = min(bs - self._cache_n, len(msg)) - self._cache[self._cache_n:self._cache_n+filler] = msg[:filler] - self._cache_n += filler - - if self._cache_n < bs: - return self - - msg = memoryview(msg)[filler:] - self._update(self._cache) - self._cache_n = 0 - - remain = len(msg) % bs - if remain > 0: - self._update(msg[:-remain]) - self._cache[:remain] = msg[-remain:] - else: - self._update(msg) - self._cache_n = remain - return self - - def _update(self, data_block): - """Update a block aligned to the block boundary""" - - bs = self._block_size - assert len(data_block) % bs == 0 - - if len(data_block) == 0: - return - - ct = self._cbc.encrypt(data_block) - if len(data_block) == bs: - second_last = self._last_ct - else: - second_last = ct[-bs*2:-bs] - self._last_ct = ct[-bs:] - self._last_pt = strxor(second_last, data_block[-bs:]) - - def copy(self): - """Return a copy ("clone") of the CMAC object. - - The copy will have the same internal state as the original CMAC - object. - This can be used to efficiently compute the MAC tag of byte - strings that share a common initial substring. - - :return: An :class:`CMAC` - """ - - obj = self.__new__(CMAC) - obj.__dict__ = self.__dict__.copy() - obj._cbc = self._factory.new(self._key, - self._factory.MODE_CBC, - self._last_ct, - **self._cipher_params) - obj._cache = self._cache[:] - obj._last_ct = self._last_ct[:] - return obj - - def digest(self): - """Return the **binary** (non-printable) MAC tag of the message - that has been authenticated so far. - - :return: The MAC tag, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bs = self._block_size - - if self._mac_tag is not None and not self._update_after_digest: - return self._mac_tag - - if self._data_size > self._max_size: - raise ValueError("MAC is unsafe for this message") - - if self._cache_n == 0 and self._data_size > 0: - # Last block was full - pt = strxor(self._last_pt, self._k1) - else: - # Last block is partial (or message length is zero) - partial = self._cache[:] - partial[self._cache_n:] = b'\x80' + b'\x00' * (bs - self._cache_n - 1) - pt = strxor(strxor(self._last_ct, partial), self._k2) - - self._mac_tag = self._ecb.encrypt(pt)[:self.digest_size] - - return self._mac_tag - - def hexdigest(self): - """Return the **printable** MAC tag of the message authenticated so far. - - :return: The MAC tag, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) - for x in tuple(self.digest())]) - - def verify(self, mac_tag): - """Verify that a given **binary** MAC (computed by another party) - is valid. - - Args: - mac_tag (byte string/byte array/memoryview): the expected MAC of the message. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Verify that a given **printable** MAC (computed by another party) - is valid. - - Args: - hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - self.verify(unhexlify(tobytes(hex_mac_tag))) - - -def new(key, msg=None, ciphermod=None, cipher_params=None, mac_len=None, - update_after_digest=False): - """Create a new MAC object. - - Args: - key (byte string/byte array/memoryview): - key for the CMAC object. - The key must be valid for the underlying cipher algorithm. - For instance, it must be 16 bytes long for AES-128. - ciphermod (module): - A cipher module from :mod:`Cryptodome.Cipher`. - The cipher's block size has to be 128 bits, - like :mod:`Cryptodome.Cipher.AES`, to reduce the probability - of collisions. - msg (byte string/byte array/memoryview): - Optional. The very first chunk of the message to authenticate. - It is equivalent to an early call to `CMAC.update`. Optional. - cipher_params (dict): - Optional. A set of parameters to use when instantiating a cipher - object. - mac_len (integer): - Length of the MAC, in bytes. - It must be at least 4 bytes long. - The default (and recommended) length matches the size of a cipher block. - update_after_digest (boolean): - Optional. By default, a hash object cannot be updated anymore after - the digest is computed. When this flag is ``True``, such check - is no longer enforced. - Returns: - A :class:`CMAC` object - """ - - if ciphermod is None: - raise TypeError("ciphermod must be specified (try AES)") - - cipher_params = {} if cipher_params is None else dict(cipher_params) - - if mac_len is None: - mac_len = ciphermod.block_size - - if mac_len < 4: - raise ValueError("MAC tag length must be at least 4 bytes long") - - if mac_len > ciphermod.block_size: - raise ValueError("MAC tag length cannot be larger than a cipher block (%d) bytes" % ciphermod.block_size) - - return CMAC(key, msg, ciphermod, cipher_params, mac_len, - update_after_digest) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/CMAC.pyi b/venv/Lib/site-packages/Cryptodome/Hash/CMAC.pyi deleted file mode 100644 index acdf055..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/CMAC.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from types import ModuleType -from typing import Union, Dict, Any - -Buffer = Union[bytes, bytearray, memoryview] - -digest_size: int - -class CMAC(object): - digest_size: int - - def __init__(self, - key: Buffer, - msg: Buffer, - ciphermod: ModuleType, - cipher_params: Dict[str, Any], - mac_len: int, update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> CMAC: ... - def copy(self) -> CMAC: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - - -def new(key: Buffer, - msg: Buffer = ..., - ciphermod: ModuleType = ..., - cipher_params: Dict[str, Any] = ..., - mac_len: int = ..., - update_after_digest: bool = ...) -> CMAC: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/HMAC.py b/venv/Lib/site-packages/Cryptodome/Hash/HMAC.py deleted file mode 100644 index 615056a..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/HMAC.py +++ /dev/null @@ -1,238 +0,0 @@ -# -# HMAC.py - Implements the HMAC algorithm as described by RFC 2104. -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord, tobytes - -from binascii import unhexlify - -from Cryptodome.Hash import BLAKE2s -from Cryptodome.Util.strxor import strxor -from Cryptodome.Random import get_random_bytes - -__all__ = ['new', 'HMAC'] - -_hash2hmac_oid = { - '1.3.14.3.2.26': '1.2.840.113549.2.7', # SHA-1 - '2.16.840.1.101.3.4.2.4': '1.2.840.113549.2.8', # SHA-224 - '2.16.840.1.101.3.4.2.1': '1.2.840.113549.2.9', # SHA-256 - '2.16.840.1.101.3.4.2.2': '1.2.840.113549.2.10', # SHA-384 - '2.16.840.1.101.3.4.2.3': '1.2.840.113549.2.11', # SHA-512 - '2.16.840.1.101.3.4.2.5': '1.2.840.113549.2.12', # SHA-512_224 - '2.16.840.1.101.3.4.2.6': '1.2.840.113549.2.13', # SHA-512_256 - '2.16.840.1.101.3.4.2.7': '2.16.840.1.101.3.4.2.13', # SHA-3 224 - '2.16.840.1.101.3.4.2.8': '2.16.840.1.101.3.4.2.14', # SHA-3 256 - '2.16.840.1.101.3.4.2.9': '2.16.840.1.101.3.4.2.15', # SHA-3 384 - '2.16.840.1.101.3.4.2.10': '2.16.840.1.101.3.4.2.16', # SHA-3 512 -} - -_hmac2hash_oid = {v: k for k, v in _hash2hmac_oid.items()} - - -class HMAC(object): - """An HMAC hash object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar digest_size: the size in bytes of the resulting MAC tag - :vartype digest_size: integer - - :ivar oid: the ASN.1 object ID of the HMAC algorithm. - Only present if the algorithm was officially assigned one. - """ - - def __init__(self, key, msg=b"", digestmod=None): - - if digestmod is None: - from Cryptodome.Hash import MD5 - digestmod = MD5 - - if msg is None: - msg = b"" - - # Size of the MAC tag - self.digest_size = digestmod.digest_size - - self._digestmod = digestmod - - # Hash OID --> HMAC OID - try: - self.oid = _hash2hmac_oid[digestmod.oid] - except (KeyError, AttributeError): - pass - - if isinstance(key, memoryview): - key = key.tobytes() - - try: - if len(key) <= digestmod.block_size: - # Step 1 or 2 - key_0 = key + b"\x00" * (digestmod.block_size - len(key)) - else: - # Step 3 - hash_k = digestmod.new(key).digest() - key_0 = hash_k + b"\x00" * (digestmod.block_size - len(hash_k)) - except AttributeError: - # Not all hash types have "block_size" - raise ValueError("Hash type incompatible to HMAC") - - # Step 4 - key_0_ipad = strxor(key_0, b"\x36" * len(key_0)) - - # Start step 5 and 6 - self._inner = digestmod.new(key_0_ipad) - self._inner.update(msg) - - # Step 7 - key_0_opad = strxor(key_0, b"\x5c" * len(key_0)) - - # Start step 8 and 9 - self._outer = digestmod.new(key_0_opad) - - def update(self, msg): - """Authenticate the next chunk of message. - - Args: - data (byte string/byte array/memoryview): The next chunk of data - """ - - self._inner.update(msg) - return self - - def _pbkdf2_hmac_assist(self, first_digest, iterations): - """Carry out the expensive inner loop for PBKDF2-HMAC""" - - result = self._digestmod._pbkdf2_hmac_assist( - self._inner, - self._outer, - first_digest, - iterations) - return result - - def copy(self): - """Return a copy ("clone") of the HMAC object. - - The copy will have the same internal state as the original HMAC - object. - This can be used to efficiently compute the MAC tag of byte - strings that share a common initial substring. - - :return: An :class:`HMAC` - """ - - new_hmac = HMAC(b"fake key", digestmod=self._digestmod) - - # Syncronize the state - new_hmac._inner = self._inner.copy() - new_hmac._outer = self._outer.copy() - - return new_hmac - - def digest(self): - """Return the **binary** (non-printable) MAC tag of the message - authenticated so far. - - :return: The MAC tag digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - frozen_outer_hash = self._outer.copy() - frozen_outer_hash.update(self._inner.digest()) - return frozen_outer_hash.digest() - - def verify(self, mac_tag): - """Verify that a given **binary** MAC (computed by another party) - is valid. - - Args: - mac_tag (byte string/byte string/memoryview): the expected MAC of the message. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexdigest(self): - """Return the **printable** MAC tag of the message authenticated so far. - - :return: The MAC tag, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) - for x in tuple(self.digest())]) - - def hexverify(self, hex_mac_tag): - """Verify that a given **printable** MAC (computed by another party) - is valid. - - Args: - hex_mac_tag (string): the expected MAC of the message, - as a hexadecimal string. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - self.verify(unhexlify(tobytes(hex_mac_tag))) - - -def new(key, msg=b"", digestmod=None): - """Create a new MAC object. - - Args: - key (bytes/bytearray/memoryview): - key for the MAC object. - It must be long enough to match the expected security level of the - MAC. - msg (bytes/bytearray/memoryview): - Optional. The very first chunk of the message to authenticate. - It is equivalent to an early call to :meth:`HMAC.update`. - digestmod (module): - The hash to use to implement the HMAC. - Default is :mod:`Cryptodome.Hash.MD5`. - - Returns: - An :class:`HMAC` object - """ - - return HMAC(key, msg, digestmod) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/HMAC.pyi b/venv/Lib/site-packages/Cryptodome/Hash/HMAC.pyi deleted file mode 100644 index b577230..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/HMAC.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from types import ModuleType -from typing import Union, Dict - -Buffer = Union[bytes, bytearray, memoryview] - -digest_size: int - -class HMAC(object): - digest_size: int - - def __init__(self, - key: Buffer, - msg: Buffer, - digestmod: ModuleType) -> None: ... - def update(self, msg: Buffer) -> HMAC: ... - def copy(self) -> HMAC: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - - -def new(key: Buffer, - msg: Buffer = ..., - digestmod: ModuleType = ...) -> HMAC: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/KMAC128.py b/venv/Lib/site-packages/Cryptodome/Hash/KMAC128.py deleted file mode 100644 index afd91c4..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/KMAC128.py +++ /dev/null @@ -1,179 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2021, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from binascii import unhexlify - -from Cryptodome.Util.py3compat import bord, tobytes, is_bytes -from Cryptodome.Random import get_random_bytes - -from . import cSHAKE128, SHA3_256 -from .cSHAKE128 import _bytepad, _encode_str, _right_encode - - -class KMAC_Hash(object): - """A KMAC hash object. - Do not instantiate directly. - Use the :func:`new` function. - """ - - def __init__(self, data, key, mac_len, custom, - oid_variant, cshake, rate): - - # See https://tools.ietf.org/html/rfc8702 - self.oid = "2.16.840.1.101.3.4.2." + oid_variant - self.digest_size = mac_len - - self._mac = None - - partial_newX = _bytepad(_encode_str(tobytes(key)), rate) - self._cshake = cshake._new(partial_newX, custom, b"KMAC") - - if data: - self._cshake.update(data) - - def update(self, data): - """Authenticate the next chunk of message. - - Args: - data (bytes/bytearray/memoryview): The next chunk of the message to - authenticate. - """ - - if self._mac: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - self._cshake.update(data) - return self - - def digest(self): - """Return the **binary** (non-printable) MAC tag of the message. - - :return: The MAC tag. Binary form. - :rtype: byte string - """ - - if not self._mac: - self._cshake.update(_right_encode(self.digest_size * 8)) - self._mac = self._cshake.read(self.digest_size) - - return self._mac - - def hexdigest(self): - """Return the **printable** MAC tag of the message. - - :return: The MAC tag. Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) - - def verify(self, mac_tag): - """Verify that a given **binary** MAC (computed by another party) - is valid. - - Args: - mac_tag (bytes/bytearray/memoryview): the expected MAC of the message. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - secret = get_random_bytes(16) - - mac1 = SHA3_256.new(secret + mac_tag) - mac2 = SHA3_256.new(secret + self.digest()) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Verify that a given **printable** MAC (computed by another party) - is valid. - - Args: - hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - self.verify(unhexlify(tobytes(hex_mac_tag))) - - def new(self, **kwargs): - """Return a new instance of a KMAC hash object. - See :func:`new`. - """ - - if "mac_len" not in kwargs: - kwargs["mac_len"] = self.digest_size - - return new(**kwargs) - - -def new(**kwargs): - """Create a new KMAC128 object. - - Args: - key (bytes/bytearray/memoryview): - The key to use to compute the MAC. - It must be at least 128 bits long (16 bytes). - data (bytes/bytearray/memoryview): - Optional. The very first chunk of the message to authenticate. - It is equivalent to an early call to :meth:`KMAC_Hash.update`. - mac_len (integer): - Optional. The size of the authentication tag, in bytes. - Default is 64. Minimum is 8. - custom (bytes/bytearray/memoryview): - Optional. A customization byte string (``S`` in SP 800-185). - - Returns: - A :class:`KMAC_Hash` hash object - """ - - key = kwargs.pop("key", None) - if not is_bytes(key): - raise TypeError("You must pass a key to KMAC128") - if len(key) < 16: - raise ValueError("The key must be at least 128 bits long (16 bytes)") - - data = kwargs.pop("data", None) - - mac_len = kwargs.pop("mac_len", 64) - if mac_len < 8: - raise ValueError("'mac_len' must be 8 bytes or more") - - custom = kwargs.pop("custom", b"") - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return KMAC_Hash(data, key, mac_len, custom, "19", cSHAKE128, 168) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/KMAC128.pyi b/venv/Lib/site-packages/Cryptodome/Hash/KMAC128.pyi deleted file mode 100644 index 8947dab..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/KMAC128.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Union -from types import ModuleType - -Buffer = Union[bytes, bytearray, memoryview] - -class KMAC_Hash(object): - - def __init__(self, - data: Buffer, - key: Buffer, - mac_len: int, - custom: Buffer, - oid_variant: str, - cshake: ModuleType, - rate: int) -> None: ... - - def update(self, data: Buffer) -> KMAC_Hash: ... - - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - def new(self, - data: Buffer = ..., - mac_len: int = ..., - key: Buffer = ..., - custom: Buffer = ...) -> KMAC_Hash: ... - - -def new(key: Buffer, - data: Buffer = ..., - mac_len: int = ..., - custom: Buffer = ...) -> KMAC_Hash: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/KMAC256.py b/venv/Lib/site-packages/Cryptodome/Hash/KMAC256.py deleted file mode 100644 index 82da062..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/KMAC256.py +++ /dev/null @@ -1,74 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2021, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import is_bytes - -from .KMAC128 import KMAC_Hash -from . import cSHAKE256 - - -def new(**kwargs): - """Create a new KMAC256 object. - - Args: - key (bytes/bytearray/memoryview): - The key to use to compute the MAC. - It must be at least 256 bits long (32 bytes). - data (bytes/bytearray/memoryview): - Optional. The very first chunk of the message to authenticate. - It is equivalent to an early call to :meth:`KMAC_Hash.update`. - mac_len (integer): - Optional. The size of the authentication tag, in bytes. - Default is 64. Minimum is 8. - custom (bytes/bytearray/memoryview): - Optional. A customization byte string (``S`` in SP 800-185). - - Returns: - A :class:`KMAC_Hash` hash object - """ - - key = kwargs.pop("key", None) - if not is_bytes(key): - raise TypeError("You must pass a key to KMAC256") - if len(key) < 32: - raise ValueError("The key must be at least 256 bits long (32 bytes)") - - data = kwargs.pop("data", None) - - mac_len = kwargs.pop("mac_len", 64) - if mac_len < 8: - raise ValueError("'mac_len' must be 8 bytes or more") - - custom = kwargs.pop("custom", b"") - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return KMAC_Hash(data, key, mac_len, custom, "20", cSHAKE256, 136) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/KMAC256.pyi b/venv/Lib/site-packages/Cryptodome/Hash/KMAC256.pyi deleted file mode 100644 index 86cc500..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/KMAC256.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Union - -from .KMAC128 import KMAC_Hash - -Buffer = Union[bytes, bytearray, memoryview] - -def new(key: Buffer, - data: Buffer = ..., - mac_len: int = ..., - custom: Buffer = ...) -> KMAC_Hash: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/KangarooTwelve.py b/venv/Lib/site-packages/Cryptodome/Hash/KangarooTwelve.py deleted file mode 100644 index 60ced57..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/KangarooTwelve.py +++ /dev/null @@ -1,222 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2021, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.number import long_to_bytes -from Cryptodome.Util.py3compat import bchr - -from . import TurboSHAKE128 - -def _length_encode(x): - if x == 0: - return b'\x00' - - S = long_to_bytes(x) - return S + bchr(len(S)) - - -# Possible states for a KangarooTwelve instance, which depend on the amount of data processed so far. -SHORT_MSG = 1 # Still within the first 8192 bytes, but it is not certain we will exceed them. -LONG_MSG_S0 = 2 # Still within the first 8192 bytes, and it is certain we will exceed them. -LONG_MSG_SX = 3 # Beyond the first 8192 bytes. -SQUEEZING = 4 # No more data to process. - - -class K12_XOF(object): - """A KangarooTwelve hash object. - Do not instantiate directly. - Use the :func:`new` function. - """ - - def __init__(self, data, custom): - - if custom == None: - custom = b'' - - self._custom = custom + _length_encode(len(custom)) - self._state = SHORT_MSG - self._padding = None # Final padding is only decided in read() - - # Internal hash that consumes FinalNode - # The real domain separation byte will be known before squeezing - self._hash1 = TurboSHAKE128.new(domain=1) - self._length1 = 0 - - # Internal hash that produces CV_i (reset each time) - self._hash2 = None - self._length2 = 0 - - # Incremented by one for each 8192-byte block - self._ctr = 0 - - if data: - self.update(data) - - def update(self, data): - """Hash the next piece of data. - - .. note:: - For better performance, submit chunks with a length multiple of 8192 bytes. - - Args: - data (byte string/byte array/memoryview): The next chunk of the - message to hash. - """ - - if self._state == SQUEEZING: - raise TypeError("You cannot call 'update' after the first 'read'") - - if self._state == SHORT_MSG: - next_length = self._length1 + len(data) - - if next_length + len(self._custom) <= 8192: - self._length1 = next_length - self._hash1.update(data) - return self - - # Switch to tree hashing - self._state = LONG_MSG_S0 - - if self._state == LONG_MSG_S0: - data_mem = memoryview(data) - assert(self._length1 < 8192) - dtc = min(len(data), 8192 - self._length1) - self._hash1.update(data_mem[:dtc]) - self._length1 += dtc - - if self._length1 < 8192: - return self - - # Finish hashing S_0 and start S_1 - assert(self._length1 == 8192) - - divider = b'\x03' + b'\x00' * 7 - self._hash1.update(divider) - self._length1 += 8 - - self._hash2 = TurboSHAKE128.new(domain=0x0B) - self._length2 = 0 - self._ctr = 1 - - self._state = LONG_MSG_SX - return self.update(data_mem[dtc:]) - - # LONG_MSG_SX - assert(self._state == LONG_MSG_SX) - index = 0 - len_data = len(data) - - # All iteractions could actually run in parallel - data_mem = memoryview(data) - while index < len_data: - - new_index = min(index + 8192 - self._length2, len_data) - self._hash2.update(data_mem[index:new_index]) - self._length2 += new_index - index - index = new_index - - if self._length2 == 8192: - cv_i = self._hash2.read(32) - self._hash1.update(cv_i) - self._length1 += 32 - self._hash2._reset() - self._length2 = 0 - self._ctr += 1 - - return self - - def read(self, length): - """ - Produce more bytes of the digest. - - .. note:: - You cannot use :meth:`update` anymore after the first call to - :meth:`read`. - - Args: - length (integer): the amount of bytes this method must return - - :return: the next piece of XOF output (of the given length) - :rtype: byte string - """ - - custom_was_consumed = False - - if self._state == SHORT_MSG: - self._hash1.update(self._custom) - self._padding = 0x07 - self._state = SQUEEZING - - if self._state == LONG_MSG_S0: - self.update(self._custom) - custom_was_consumed = True - assert(self._state == LONG_MSG_SX) - - if self._state == LONG_MSG_SX: - if not custom_was_consumed: - self.update(self._custom) - - # Is there still some leftover data in hash2? - if self._length2 > 0: - cv_i = self._hash2.read(32) - self._hash1.update(cv_i) - self._length1 += 32 - self._hash2._reset() - self._length2 = 0 - self._ctr += 1 - - trailer = _length_encode(self._ctr - 1) + b'\xFF\xFF' - self._hash1.update(trailer) - - self._padding = 0x06 - self._state = SQUEEZING - - self._hash1._domain = self._padding - return self._hash1.read(length) - - def new(self, data=None, custom=b''): - return type(self)(data, custom) - - -def new(data=None, custom=None): - """Return a fresh instance of a KangarooTwelve object. - - Args: - data (bytes/bytearray/memoryview): - Optional. - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - custom (bytes): - Optional. - A customization byte string. - - :Return: A :class:`K12_XOF` object - """ - - return K12_XOF(data, custom) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/KangarooTwelve.pyi b/venv/Lib/site-packages/Cryptodome/Hash/KangarooTwelve.pyi deleted file mode 100644 index 8b3fd74..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/KangarooTwelve.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class K12_XOF(object): - def __init__(self, - data: Optional[Buffer] = ..., - custom: Optional[bytes] = ...) -> None: ... - def update(self, data: Buffer) -> K12_XOF: ... - def read(self, length: int) -> bytes: ... - def new(self, - data: Optional[Buffer] = ..., - custom: Optional[bytes] = ...) -> None: ... - -def new(data: Optional[Buffer] = ..., - custom: Optional[Buffer] = ...) -> K12_XOF: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/MD2.py b/venv/Lib/site-packages/Cryptodome/Hash/MD2.py deleted file mode 100644 index 47ecc05..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/MD2.py +++ /dev/null @@ -1,166 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_md2_lib = load_pycryptodome_raw_lib( - "Cryptodome.Hash._MD2", - """ - int md2_init(void **shaState); - int md2_destroy(void *shaState); - int md2_update(void *hs, - const uint8_t *buf, - size_t len); - int md2_digest(const void *shaState, - uint8_t digest[20]); - int md2_copy(const void *src, void *dst); - """) - - -class MD2Hash(object): - """An MD2 hash object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 16 - # The internal block size of the hash algorithm in bytes. - block_size = 16 - # ASN.1 Object ID - oid = "1.2.840.113549.2.2" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_md2_lib.md2_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating MD2" - % result) - self._state = SmartPointer(state.get(), - _raw_md2_lib.md2_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_md2_lib.md2_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while instantiating MD2" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_md2_lib.md2_digest(self._state.get(), - bfr) - if result: - raise ValueError("Error %d while instantiating MD2" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = MD2Hash() - result = _raw_md2_lib.md2_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying MD2" % result) - return clone - - def new(self, data=None): - return MD2Hash(data) - - -def new(data=None): - """Create a new hash object. - - :parameter data: - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`MD2Hash.update`. - :type data: bytes/bytearray/memoryview - - :Return: A :class:`MD2Hash` hash object - """ - - return MD2Hash().new(data) - -# The size of the resulting hash in bytes. -digest_size = MD2Hash.digest_size - -# The internal block size of the hash algorithm in bytes. -block_size = MD2Hash.block_size diff --git a/venv/Lib/site-packages/Cryptodome/Hash/MD2.pyi b/venv/Lib/site-packages/Cryptodome/Hash/MD2.pyi deleted file mode 100644 index 95a97a9..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/MD2.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union - -Buffer = Union[bytes, bytearray, memoryview] - -class MD4Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, data: Buffer = ...) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> MD4Hash: ... - def new(self, data: Buffer = ...) -> MD4Hash: ... - -def new(data: Buffer = ...) -> MD4Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/MD4.py b/venv/Lib/site-packages/Cryptodome/Hash/MD4.py deleted file mode 100644 index 668fa65..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/MD4.py +++ /dev/null @@ -1,185 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -MD4 is specified in RFC1320_ and produces the 128 bit digest of a message. - - >>> from Cryptodome.Hash import MD4 - >>> - >>> h = MD4.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -MD4 stand for Message Digest version 4, and it was invented by Rivest in 1990. -This algorithm is insecure. Do not use it for new designs. - -.. _RFC1320: http://tools.ietf.org/html/rfc1320 -""" - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_md4_lib = load_pycryptodome_raw_lib( - "Cryptodome.Hash._MD4", - """ - int md4_init(void **shaState); - int md4_destroy(void *shaState); - int md4_update(void *hs, - const uint8_t *buf, - size_t len); - int md4_digest(const void *shaState, - uint8_t digest[20]); - int md4_copy(const void *src, void *dst); - """) - - -class MD4Hash(object): - """Class that implements an MD4 hash - """ - - #: The size of the resulting hash in bytes. - digest_size = 16 - #: The internal block size of the hash algorithm in bytes. - block_size = 64 - #: ASN.1 Object ID - oid = "1.2.840.113549.2.4" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_md4_lib.md4_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating MD4" - % result) - self._state = SmartPointer(state.get(), - _raw_md4_lib.md4_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Repeated calls are equivalent to a single call with the concatenation - of all the arguments. In other words: - - >>> m.update(a); m.update(b) - - is equivalent to: - - >>> m.update(a+b) - - :Parameters: - data : byte string/byte array/memoryview - The next chunk of the message being hashed. - """ - - result = _raw_md4_lib.md4_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while instantiating MD4" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that - has been hashed so far. - - This method does not change the state of the hash object. - You can continue updating the object after calling this function. - - :Return: A byte string of `digest_size` bytes. It may contain non-ASCII - characters, including null bytes. - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_md4_lib.md4_digest(self._state.get(), - bfr) - if result: - raise ValueError("Error %d while instantiating MD4" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been - hashed so far. - - This method does not change the state of the hash object. - - :Return: A string of 2* `digest_size` characters. It contains only - hexadecimal ASCII digits. - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :Return: A hash object of the same type - """ - - clone = MD4Hash() - result = _raw_md4_lib.md4_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying MD4" % result) - return clone - - def new(self, data=None): - return MD4Hash(data) - - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string/byte array/memoryview - The very first chunk of the message to hash. - It is equivalent to an early call to `MD4Hash.update()`. - Optional. - - :Return: A `MD4Hash` object - """ - return MD4Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = MD4Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = MD4Hash.block_size diff --git a/venv/Lib/site-packages/Cryptodome/Hash/MD4.pyi b/venv/Lib/site-packages/Cryptodome/Hash/MD4.pyi deleted file mode 100644 index a9a7295..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/MD4.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class MD4Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, data: Optional[Buffer] = ...) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> MD4Hash: ... - def new(self, data: Optional[Buffer] = ...) -> MD4Hash: ... - -def new(data: Optional[Buffer] = ...) -> MD4Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/MD5.py b/venv/Lib/site-packages/Cryptodome/Hash/MD5.py deleted file mode 100644 index 8f573a9..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/MD5.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import * - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_md5_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._MD5", - """ - #define MD5_DIGEST_SIZE 16 - - int MD5_init(void **shaState); - int MD5_destroy(void *shaState); - int MD5_update(void *hs, - const uint8_t *buf, - size_t len); - int MD5_digest(const void *shaState, - uint8_t digest[MD5_DIGEST_SIZE]); - int MD5_copy(const void *src, void *dst); - - int MD5_pbkdf2_hmac_assist(const void *inner, - const void *outer, - const uint8_t first_digest[MD5_DIGEST_SIZE], - uint8_t final_digest[MD5_DIGEST_SIZE], - size_t iterations); - """) - -class MD5Hash(object): - """A MD5 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 16 - # The internal block size of the hash algorithm in bytes. - block_size = 64 - # ASN.1 Object ID - oid = "1.2.840.113549.2.5" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_md5_lib.MD5_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating MD5" - % result) - self._state = SmartPointer(state.get(), - _raw_md5_lib.MD5_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_md5_lib.MD5_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while instantiating MD5" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_md5_lib.MD5_digest(self._state.get(), - bfr) - if result: - raise ValueError("Error %d while instantiating MD5" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = MD5Hash() - result = _raw_md5_lib.MD5_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying MD5" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA-1 hash object.""" - - return MD5Hash(data) - - -def new(data=None): - """Create a new hash object. - - :parameter data: - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`MD5Hash.update`. - :type data: byte string/byte array/memoryview - - :Return: A :class:`MD5Hash` hash object - """ - return MD5Hash().new(data) - -# The size of the resulting hash in bytes. -digest_size = 16 - -# The internal block size of the hash algorithm in bytes. -block_size = 64 - - -def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): - """Compute the expensive inner loop in PBKDF-HMAC.""" - - assert len(first_digest) == digest_size - assert iterations > 0 - - bfr = create_string_buffer(digest_size); - result = _raw_md5_lib.MD5_pbkdf2_hmac_assist( - inner._state.get(), - outer._state.get(), - first_digest, - bfr, - c_size_t(iterations)) - - if result: - raise ValueError("Error %d with PBKDF2-HMAC assis for MD5" % result) - - return get_raw_buffer(bfr) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/MD5.pyi b/venv/Lib/site-packages/Cryptodome/Hash/MD5.pyi deleted file mode 100644 index d819556..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/MD5.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union - -Buffer = Union[bytes, bytearray, memoryview] - -class MD5Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, data: Buffer = ...) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> MD5Hash: ... - def new(self, data: Buffer = ...) -> MD5Hash: ... - -def new(data: Buffer = ...) -> MD5Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/Poly1305.py b/venv/Lib/site-packages/Cryptodome/Hash/Poly1305.py deleted file mode 100644 index c03f522..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/Poly1305.py +++ /dev/null @@ -1,217 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Hash/Poly1305.py - Implements the Poly1305 MAC -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from binascii import unhexlify - -from Cryptodome.Util.py3compat import bord, tobytes, _copy_bytes - -from Cryptodome.Hash import BLAKE2s -from Cryptodome.Random import get_random_bytes -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - - -_raw_poly1305 = load_pycryptodome_raw_lib("Cryptodome.Hash._poly1305", - """ - int poly1305_init(void **state, - const uint8_t *r, - size_t r_len, - const uint8_t *s, - size_t s_len); - int poly1305_destroy(void *state); - int poly1305_update(void *state, - const uint8_t *in, - size_t len); - int poly1305_digest(const void *state, - uint8_t *digest, - size_t len); - """) - - -class Poly1305_MAC(object): - """An Poly1305 MAC object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar digest_size: the size in bytes of the resulting MAC tag - :vartype digest_size: integer - """ - - digest_size = 16 - - def __init__(self, r, s, data): - - if len(r) != 16: - raise ValueError("Parameter r is not 16 bytes long") - if len(s) != 16: - raise ValueError("Parameter s is not 16 bytes long") - - self._mac_tag = None - - state = VoidPointer() - result = _raw_poly1305.poly1305_init(state.address_of(), - c_uint8_ptr(r), - c_size_t(len(r)), - c_uint8_ptr(s), - c_size_t(len(s)) - ) - if result: - raise ValueError("Error %d while instantiating Poly1305" % result) - self._state = SmartPointer(state.get(), - _raw_poly1305.poly1305_destroy) - if data: - self.update(data) - - def update(self, data): - """Authenticate the next chunk of message. - - Args: - data (byte string/byte array/memoryview): The next chunk of data - """ - - if self._mac_tag: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_poly1305.poly1305_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while hashing Poly1305 data" % result) - return self - - def copy(self): - raise NotImplementedError() - - def digest(self): - """Return the **binary** (non-printable) MAC tag of the message - authenticated so far. - - :return: The MAC tag digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - if self._mac_tag: - return self._mac_tag - - bfr = create_string_buffer(16) - result = _raw_poly1305.poly1305_digest(self._state.get(), - bfr, - c_size_t(len(bfr))) - if result: - raise ValueError("Error %d while creating Poly1305 digest" % result) - - self._mac_tag = get_raw_buffer(bfr) - return self._mac_tag - - def hexdigest(self): - """Return the **printable** MAC tag of the message authenticated so far. - - :return: The MAC tag, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) - for x in tuple(self.digest())]) - - def verify(self, mac_tag): - """Verify that a given **binary** MAC (computed by another party) - is valid. - - Args: - mac_tag (byte string/byte string/memoryview): the expected MAC of the message. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) - - if mac1.digest() != mac2.digest(): - raise ValueError("MAC check failed") - - def hexverify(self, hex_mac_tag): - """Verify that a given **printable** MAC (computed by another party) - is valid. - - Args: - hex_mac_tag (string): the expected MAC of the message, - as a hexadecimal string. - - Raises: - ValueError: if the MAC does not match. It means that the message - has been tampered with or that the MAC key is incorrect. - """ - - self.verify(unhexlify(tobytes(hex_mac_tag))) - - - -def new(**kwargs): - """Create a new Poly1305 MAC object. - - Args: - key (bytes/bytearray/memoryview): - The 32-byte key for the Poly1305 object. - cipher (module from ``Cryptodome.Cipher``): - The cipher algorithm to use for deriving the Poly1305 - key pair *(r, s)*. - It can only be ``Cryptodome.Cipher.AES`` or ``Cryptodome.Cipher.ChaCha20``. - nonce (bytes/bytearray/memoryview): - Optional. The non-repeatable value to use for the MAC of this message. - It must be 16 bytes long for ``AES`` and 8 or 12 bytes for ``ChaCha20``. - If not passed, a random nonce is created; you will find it in the - ``nonce`` attribute of the new object. - data (bytes/bytearray/memoryview): - Optional. The very first chunk of the message to authenticate. - It is equivalent to an early call to ``update()``. - - Returns: - A :class:`Poly1305_MAC` object - """ - - cipher = kwargs.pop("cipher", None) - if not hasattr(cipher, '_derive_Poly1305_key_pair'): - raise ValueError("Parameter 'cipher' must be AES or ChaCha20") - - cipher_key = kwargs.pop("key", None) - if cipher_key is None: - raise TypeError("You must pass a parameter 'key'") - - nonce = kwargs.pop("nonce", None) - data = kwargs.pop("data", None) - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - r, s, nonce = cipher._derive_Poly1305_key_pair(cipher_key, nonce) - - new_mac = Poly1305_MAC(r, s, data) - new_mac.nonce = _copy_bytes(None, None, nonce) # nonce may still be just a memoryview - return new_mac diff --git a/venv/Lib/site-packages/Cryptodome/Hash/Poly1305.pyi b/venv/Lib/site-packages/Cryptodome/Hash/Poly1305.pyi deleted file mode 100644 index f97a14a..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/Poly1305.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from types import ModuleType -from typing import Union - -Buffer = Union[bytes, bytearray, memoryview] - -class Poly1305_MAC(object): - block_size: int - digest_size: int - oid: str - - def __init__(self, - r : int, - s : int, - data : Buffer) -> None: ... - def update(self, data: Buffer) -> Poly1305_MAC: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def verify(self, mac_tag: Buffer) -> None: ... - def hexverify(self, hex_mac_tag: str) -> None: ... - -def new(key: Buffer, - cipher: ModuleType, - nonce: Buffer = ..., - data: Buffer = ...) -> Poly1305_MAC: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD.py b/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD.py deleted file mode 100644 index 35ad576..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -# This file exists for backward compatibility with old code that refers to -# Cryptodome.Hash.RIPEMD - -"""Deprecated alias for `Cryptodome.Hash.RIPEMD160`""" - -from Cryptodome.Hash.RIPEMD160 import new, block_size, digest_size diff --git a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD.pyi b/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD.pyi deleted file mode 100644 index cfb2252..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD.pyi +++ /dev/null @@ -1,3 +0,0 @@ -# This file exists for backward compatibility with old code that refers to -# Cryptodome.Hash.SHA - diff --git a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD160.py b/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD160.py deleted file mode 100644 index f959027..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD160.py +++ /dev/null @@ -1,169 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_ripemd160_lib = load_pycryptodome_raw_lib( - "Cryptodome.Hash._RIPEMD160", - """ - int ripemd160_init(void **shaState); - int ripemd160_destroy(void *shaState); - int ripemd160_update(void *hs, - const uint8_t *buf, - size_t len); - int ripemd160_digest(const void *shaState, - uint8_t digest[20]); - int ripemd160_copy(const void *src, void *dst); - """) - - -class RIPEMD160Hash(object): - """A RIPEMD-160 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 20 - # The internal block size of the hash algorithm in bytes. - block_size = 64 - # ASN.1 Object ID - oid = "1.3.36.3.2.1" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_ripemd160_lib.ripemd160_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating RIPEMD160" - % result) - self._state = SmartPointer(state.get(), - _raw_ripemd160_lib.ripemd160_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_ripemd160_lib.ripemd160_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while instantiating ripemd160" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_ripemd160_lib.ripemd160_digest(self._state.get(), - bfr) - if result: - raise ValueError("Error %d while instantiating ripemd160" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = RIPEMD160Hash() - result = _raw_ripemd160_lib.ripemd160_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying ripemd160" % result) - return clone - - def new(self, data=None): - """Create a fresh RIPEMD-160 hash object.""" - - return RIPEMD160Hash(data) - - -def new(data=None): - """Create a new hash object. - - :parameter data: - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`RIPEMD160Hash.update`. - :type data: byte string/byte array/memoryview - - :Return: A :class:`RIPEMD160Hash` hash object - """ - - return RIPEMD160Hash().new(data) - -# The size of the resulting hash in bytes. -digest_size = RIPEMD160Hash.digest_size - -# The internal block size of the hash algorithm in bytes. -block_size = RIPEMD160Hash.block_size diff --git a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD160.pyi b/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD160.pyi deleted file mode 100644 index b619473..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/RIPEMD160.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union - -Buffer = Union[bytes, bytearray, memoryview] - -class RIPEMD160Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, data: Buffer = ...) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> RIPEMD160Hash: ... - def new(self, data: Buffer = ...) -> RIPEMD160Hash: ... - -def new(data: Buffer = ...) -> RIPEMD160Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA.py deleted file mode 100644 index 95f8745..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -# This file exists for backward compatibility with old code that refers to -# Cryptodome.Hash.SHA - -from Cryptodome.Hash.SHA1 import __doc__, new, block_size, digest_size diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA.pyi deleted file mode 100644 index 7d01a5f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA.pyi +++ /dev/null @@ -1,4 +0,0 @@ -# This file exists for backward compatibility with old code that refers to -# Cryptodome.Hash.SHA - -from Cryptodome.Hash.SHA1 import __doc__, new, block_size, digest_size diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA1.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA1.py deleted file mode 100644 index dea51bc..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA1.py +++ /dev/null @@ -1,185 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import * - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_sha1_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._SHA1", - """ - #define SHA1_DIGEST_SIZE 20 - - int SHA1_init(void **shaState); - int SHA1_destroy(void *shaState); - int SHA1_update(void *hs, - const uint8_t *buf, - size_t len); - int SHA1_digest(const void *shaState, - uint8_t digest[SHA1_DIGEST_SIZE]); - int SHA1_copy(const void *src, void *dst); - - int SHA1_pbkdf2_hmac_assist(const void *inner, - const void *outer, - const uint8_t first_digest[SHA1_DIGEST_SIZE], - uint8_t final_digest[SHA1_DIGEST_SIZE], - size_t iterations); - """) - -class SHA1Hash(object): - """A SHA-1 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 20 - # The internal block size of the hash algorithm in bytes. - block_size = 64 - # ASN.1 Object ID - oid = "1.3.14.3.2.26" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_sha1_lib.SHA1_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating SHA1" - % result) - self._state = SmartPointer(state.get(), - _raw_sha1_lib.SHA1_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_sha1_lib.SHA1_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while instantiating SHA1" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_sha1_lib.SHA1_digest(self._state.get(), - bfr) - if result: - raise ValueError("Error %d while instantiating SHA1" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = SHA1Hash() - result = _raw_sha1_lib.SHA1_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA1" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA-1 hash object.""" - - return SHA1Hash(data) - - -def new(data=None): - """Create a new hash object. - - :parameter data: - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`SHA1Hash.update`. - :type data: byte string/byte array/memoryview - - :Return: A :class:`SHA1Hash` hash object - """ - return SHA1Hash().new(data) - - -# The size of the resulting hash in bytes. -digest_size = SHA1Hash.digest_size - -# The internal block size of the hash algorithm in bytes. -block_size = SHA1Hash.block_size - - -def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): - """Compute the expensive inner loop in PBKDF-HMAC.""" - - assert len(first_digest) == digest_size - assert iterations > 0 - - bfr = create_string_buffer(digest_size); - result = _raw_sha1_lib.SHA1_pbkdf2_hmac_assist( - inner._state.get(), - outer._state.get(), - first_digest, - bfr, - c_size_t(iterations)) - - if result: - raise ValueError("Error %d with PBKDF2-HMAC assis for SHA1" % result) - - return get_raw_buffer(bfr) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA1.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA1.pyi deleted file mode 100644 index d6c8e25..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA1.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA1Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, data: Optional[Buffer] = ...) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA1Hash: ... - def new(self, data: Optional[Buffer] = ...) -> SHA1Hash: ... - -def new(data: Optional[Buffer] = ...) -> SHA1Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA224.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA224.py deleted file mode 100644 index fca7622..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA224.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_sha224_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._SHA224", - """ - int SHA224_init(void **shaState); - int SHA224_destroy(void *shaState); - int SHA224_update(void *hs, - const uint8_t *buf, - size_t len); - int SHA224_digest(const void *shaState, - uint8_t *digest, - size_t digest_size); - int SHA224_copy(const void *src, void *dst); - - int SHA224_pbkdf2_hmac_assist(const void *inner, - const void *outer, - const uint8_t *first_digest, - uint8_t *final_digest, - size_t iterations, - size_t digest_size); - """) - -class SHA224Hash(object): - """A SHA-224 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 28 - # The internal block size of the hash algorithm in bytes. - block_size = 64 - # ASN.1 Object ID - oid = '2.16.840.1.101.3.4.2.4' - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_sha224_lib.SHA224_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating SHA224" - % result) - self._state = SmartPointer(state.get(), - _raw_sha224_lib.SHA224_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_sha224_lib.SHA224_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while hashing data with SHA224" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_sha224_lib.SHA224_digest(self._state.get(), - bfr, - c_size_t(self.digest_size)) - if result: - raise ValueError("Error %d while making SHA224 digest" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = SHA224Hash() - result = _raw_sha224_lib.SHA224_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA224" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA-224 hash object.""" - - return SHA224Hash(data) - - -def new(data=None): - """Create a new hash object. - - :parameter data: - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`SHA224Hash.update`. - :type data: byte string/byte array/memoryview - - :Return: A :class:`SHA224Hash` hash object - """ - return SHA224Hash().new(data) - - -# The size of the resulting hash in bytes. -digest_size = SHA224Hash.digest_size - -# The internal block size of the hash algorithm in bytes. -block_size = SHA224Hash.block_size - - -def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): - """Compute the expensive inner loop in PBKDF-HMAC.""" - - assert iterations > 0 - - bfr = create_string_buffer(len(first_digest)); - result = _raw_sha224_lib.SHA224_pbkdf2_hmac_assist( - inner._state.get(), - outer._state.get(), - first_digest, - bfr, - c_size_t(iterations), - c_size_t(len(first_digest))) - - if result: - raise ValueError("Error %d with PBKDF2-HMAC assist for SHA224" % result) - - return get_raw_buffer(bfr) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA224.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA224.pyi deleted file mode 100644 index 613a7f9..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA224.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA224Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, data: Optional[Buffer] = ...) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA224Hash: ... - def new(self, data: Optional[Buffer] = ...) -> SHA224Hash: ... - -def new(data: Optional[Buffer] = ...) -> SHA224Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA256.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA256.py deleted file mode 100644 index c1a81b1..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA256.py +++ /dev/null @@ -1,185 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_sha256_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._SHA256", - """ - int SHA256_init(void **shaState); - int SHA256_destroy(void *shaState); - int SHA256_update(void *hs, - const uint8_t *buf, - size_t len); - int SHA256_digest(const void *shaState, - uint8_t *digest, - size_t digest_size); - int SHA256_copy(const void *src, void *dst); - - int SHA256_pbkdf2_hmac_assist(const void *inner, - const void *outer, - const uint8_t *first_digest, - uint8_t *final_digest, - size_t iterations, - size_t digest_size); - """) - -class SHA256Hash(object): - """A SHA-256 hash object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 32 - # The internal block size of the hash algorithm in bytes. - block_size = 64 - # ASN.1 Object ID - oid = "2.16.840.1.101.3.4.2.1" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_sha256_lib.SHA256_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating SHA256" - % result) - self._state = SmartPointer(state.get(), - _raw_sha256_lib.SHA256_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_sha256_lib.SHA256_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while hashing data with SHA256" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_sha256_lib.SHA256_digest(self._state.get(), - bfr, - c_size_t(self.digest_size)) - if result: - raise ValueError("Error %d while making SHA256 digest" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = SHA256Hash() - result = _raw_sha256_lib.SHA256_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA256" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA-256 hash object.""" - - return SHA256Hash(data) - -def new(data=None): - """Create a new hash object. - - :parameter data: - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`SHA256Hash.update`. - :type data: byte string/byte array/memoryview - - :Return: A :class:`SHA256Hash` hash object - """ - - return SHA256Hash().new(data) - - -# The size of the resulting hash in bytes. -digest_size = SHA256Hash.digest_size - -# The internal block size of the hash algorithm in bytes. -block_size = SHA256Hash.block_size - - -def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): - """Compute the expensive inner loop in PBKDF-HMAC.""" - - assert iterations > 0 - - bfr = create_string_buffer(len(first_digest)); - result = _raw_sha256_lib.SHA256_pbkdf2_hmac_assist( - inner._state.get(), - outer._state.get(), - first_digest, - bfr, - c_size_t(iterations), - c_size_t(len(first_digest))) - - if result: - raise ValueError("Error %d with PBKDF2-HMAC assist for SHA256" % result) - - return get_raw_buffer(bfr) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA256.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA256.pyi deleted file mode 100644 index cbf21bf..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA256.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Union, Optional - - -class SHA256Hash(object): - digest_size: int - block_size: int - oid: str - def __init__(self, data: Optional[Union[bytes, bytearray, memoryview]]=None) -> None: ... - def update(self, data: Union[bytes, bytearray, memoryview]) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA256Hash: ... - def new(self, data: Optional[Union[bytes, bytearray, memoryview]]=None) -> SHA256Hash: ... - -def new(data: Optional[Union[bytes, bytearray, memoryview]]=None) -> SHA256Hash: ... - -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA384.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA384.py deleted file mode 100644 index 711aa73..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA384.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_sha384_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._SHA384", - """ - int SHA384_init(void **shaState); - int SHA384_destroy(void *shaState); - int SHA384_update(void *hs, - const uint8_t *buf, - size_t len); - int SHA384_digest(const void *shaState, - uint8_t *digest, - size_t digest_size); - int SHA384_copy(const void *src, void *dst); - - int SHA384_pbkdf2_hmac_assist(const void *inner, - const void *outer, - const uint8_t *first_digest, - uint8_t *final_digest, - size_t iterations, - size_t digest_size); - """) - -class SHA384Hash(object): - """A SHA-384 hash object. - Do not instantiate directly. Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 48 - # The internal block size of the hash algorithm in bytes. - block_size = 128 - # ASN.1 Object ID - oid = '2.16.840.1.101.3.4.2.2' - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_sha384_lib.SHA384_init(state.address_of()) - if result: - raise ValueError("Error %d while instantiating SHA384" - % result) - self._state = SmartPointer(state.get(), - _raw_sha384_lib.SHA384_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_sha384_lib.SHA384_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while hashing data with SHA384" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_sha384_lib.SHA384_digest(self._state.get(), - bfr, - c_size_t(self.digest_size)) - if result: - raise ValueError("Error %d while making SHA384 digest" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = SHA384Hash() - result = _raw_sha384_lib.SHA384_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA384" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA-384 hash object.""" - - return SHA384Hash(data) - - -def new(data=None): - """Create a new hash object. - - :parameter data: - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`SHA384Hash.update`. - :type data: byte string/byte array/memoryview - - :Return: A :class:`SHA384Hash` hash object - """ - - return SHA384Hash().new(data) - - -# The size of the resulting hash in bytes. -digest_size = SHA384Hash.digest_size - -# The internal block size of the hash algorithm in bytes. -block_size = SHA384Hash.block_size - - -def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): - """Compute the expensive inner loop in PBKDF-HMAC.""" - - assert iterations > 0 - - bfr = create_string_buffer(len(first_digest)); - result = _raw_sha384_lib.SHA384_pbkdf2_hmac_assist( - inner._state.get(), - outer._state.get(), - first_digest, - bfr, - c_size_t(iterations), - c_size_t(len(first_digest))) - - if result: - raise ValueError("Error %d with PBKDF2-HMAC assist for SHA384" % result) - - return get_raw_buffer(bfr) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA384.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA384.pyi deleted file mode 100644 index c2aab9e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA384.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA384Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, data: Optional[Buffer] = ...) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA384Hash: ... - def new(self, data: Optional[Buffer] = ...) -> SHA384Hash: ... - -def new(data: Optional[Buffer] = ...) -> SHA384Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_224.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_224.py deleted file mode 100644 index 34888c5..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_224.py +++ /dev/null @@ -1,174 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Hash.keccak import _raw_keccak_lib - -class SHA3_224_Hash(object): - """A SHA3-224 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 28 - - # ASN.1 Object ID - oid = "2.16.840.1.101.3.4.2.7" - - # Input block size for HMAC - block_size = 144 - - def __init__(self, data, update_after_digest): - self._update_after_digest = update_after_digest - self._digest_done = False - self._padding = 0x06 - - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(self.digest_size * 2), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating SHA-3/224" - % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._digest_done and not self._update_after_digest: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data)) - ) - if result: - raise ValueError("Error %d while updating SHA-3/224" - % result) - return self - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - self._digest_done = True - - bfr = create_string_buffer(self.digest_size) - result = _raw_keccak_lib.keccak_digest(self._state.get(), - bfr, - c_size_t(self.digest_size), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while instantiating SHA-3/224" - % result) - - self._digest_value = get_raw_buffer(bfr) - return self._digest_value - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = self.new() - result = _raw_keccak_lib.keccak_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA3-224" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA3-224 hash object.""" - - return type(self)(data, self._update_after_digest) - - -def new(*args, **kwargs): - """Create a new hash object. - - Args: - data (byte string/byte array/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - update_after_digest (boolean): - Whether :meth:`digest` can be followed by another :meth:`update` - (default: ``False``). - - :Return: A :class:`SHA3_224_Hash` hash object - """ - - data = kwargs.pop("data", None) - update_after_digest = kwargs.pop("update_after_digest", False) - if len(args) == 1: - if data: - raise ValueError("Initial data for hash specified twice") - data = args[0] - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return SHA3_224_Hash(data, update_after_digest) - -# The size of the resulting hash in bytes. -digest_size = SHA3_224_Hash.digest_size - -# Input block size for HMAC -block_size = 144 diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_224.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_224.pyi deleted file mode 100644 index 2180821..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_224.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA3_224_Hash(object): - digest_size: int - block_size: int - oid: str - def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> SHA3_224_Hash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA3_224_Hash: ... - def new(self, data: Optional[Buffer]) -> SHA3_224_Hash: ... - -def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_224_Hash: ... - -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_256.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_256.py deleted file mode 100644 index 024962f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_256.py +++ /dev/null @@ -1,174 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Hash.keccak import _raw_keccak_lib - -class SHA3_256_Hash(object): - """A SHA3-256 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 32 - - # ASN.1 Object ID - oid = "2.16.840.1.101.3.4.2.8" - - # Input block size for HMAC - block_size = 136 - - def __init__(self, data, update_after_digest): - self._update_after_digest = update_after_digest - self._digest_done = False - self._padding = 0x06 - - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(self.digest_size * 2), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating SHA-3/256" - % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._digest_done and not self._update_after_digest: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data)) - ) - if result: - raise ValueError("Error %d while updating SHA-3/256" - % result) - return self - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - self._digest_done = True - - bfr = create_string_buffer(self.digest_size) - result = _raw_keccak_lib.keccak_digest(self._state.get(), - bfr, - c_size_t(self.digest_size), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while instantiating SHA-3/256" - % result) - - self._digest_value = get_raw_buffer(bfr) - return self._digest_value - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = self.new() - result = _raw_keccak_lib.keccak_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA3-256" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA3-256 hash object.""" - - return type(self)(data, self._update_after_digest) - - -def new(*args, **kwargs): - """Create a new hash object. - - Args: - data (byte string/byte array/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - update_after_digest (boolean): - Whether :meth:`digest` can be followed by another :meth:`update` - (default: ``False``). - - :Return: A :class:`SHA3_256_Hash` hash object - """ - - data = kwargs.pop("data", None) - update_after_digest = kwargs.pop("update_after_digest", False) - if len(args) == 1: - if data: - raise ValueError("Initial data for hash specified twice") - data = args[0] - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return SHA3_256_Hash(data, update_after_digest) - -# The size of the resulting hash in bytes. -digest_size = SHA3_256_Hash.digest_size - -# Input block size for HMAC -block_size = 136 diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_256.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_256.pyi deleted file mode 100644 index 88436bd..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_256.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA3_256_Hash(object): - digest_size: int - block_size: int - oid: str - def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> SHA3_256_Hash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA3_256_Hash: ... - def new(self, data: Optional[Buffer]) -> SHA3_256_Hash: ... - -def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_256_Hash: ... - -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_384.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_384.py deleted file mode 100644 index 26eeb79..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_384.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Hash.keccak import _raw_keccak_lib - -class SHA3_384_Hash(object): - """A SHA3-384 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 48 - - # ASN.1 Object ID - oid = "2.16.840.1.101.3.4.2.9" - - # Input block size for HMAC - block_size = 104 - - def __init__(self, data, update_after_digest): - self._update_after_digest = update_after_digest - self._digest_done = False - self._padding = 0x06 - - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(self.digest_size * 2), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating SHA-3/384" - % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._digest_done and not self._update_after_digest: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while updating SHA-3/384" - % result) - return self - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - self._digest_done = True - - bfr = create_string_buffer(self.digest_size) - result = _raw_keccak_lib.keccak_digest(self._state.get(), - bfr, - c_size_t(self.digest_size), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while instantiating SHA-3/384" - % result) - - self._digest_value = get_raw_buffer(bfr) - return self._digest_value - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = self.new() - result = _raw_keccak_lib.keccak_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA3-384" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA3-256 hash object.""" - - return type(self)(data, self._update_after_digest) - - - def new(self, data=None): - """Create a fresh SHA3-384 hash object.""" - - return type(self)(data, self._update_after_digest) - - -def new(*args, **kwargs): - """Create a new hash object. - - Args: - data (byte string/byte array/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - update_after_digest (boolean): - Whether :meth:`digest` can be followed by another :meth:`update` - (default: ``False``). - - :Return: A :class:`SHA3_384_Hash` hash object - """ - - data = kwargs.pop("data", None) - update_after_digest = kwargs.pop("update_after_digest", False) - if len(args) == 1: - if data: - raise ValueError("Initial data for hash specified twice") - data = args[0] - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return SHA3_384_Hash(data, update_after_digest) - -# The size of the resulting hash in bytes. -digest_size = SHA3_384_Hash.digest_size - -# Input block size for HMAC -block_size = 104 diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_384.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_384.pyi deleted file mode 100644 index 98d00c6..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_384.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA3_384_Hash(object): - digest_size: int - block_size: int - oid: str - def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> SHA3_384_Hash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA3_384_Hash: ... - def new(self, data: Optional[Buffer]) -> SHA3_384_Hash: ... - -def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_384_Hash: ... - -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_512.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_512.py deleted file mode 100644 index 99b1c37..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_512.py +++ /dev/null @@ -1,174 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Hash.keccak import _raw_keccak_lib - -class SHA3_512_Hash(object): - """A SHA3-512 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The size of the resulting hash in bytes. - digest_size = 64 - - # ASN.1 Object ID - oid = "2.16.840.1.101.3.4.2.10" - - # Input block size for HMAC - block_size = 72 - - def __init__(self, data, update_after_digest): - self._update_after_digest = update_after_digest - self._digest_done = False - self._padding = 0x06 - - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(self.digest_size * 2), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating SHA-3/512" - % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._digest_done and not self._update_after_digest: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while updating SHA-3/512" - % result) - return self - - def digest(self): - - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - self._digest_done = True - - bfr = create_string_buffer(self.digest_size) - result = _raw_keccak_lib.keccak_digest(self._state.get(), - bfr, - c_size_t(self.digest_size), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while instantiating SHA-3/512" - % result) - - self._digest_value = get_raw_buffer(bfr) - return self._digest_value - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = self.new() - result = _raw_keccak_lib.keccak_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA3-512" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA3-521 hash object.""" - - return type(self)(data, self._update_after_digest) - - -def new(*args, **kwargs): - """Create a new hash object. - - Args: - data (byte string/byte array/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - update_after_digest (boolean): - Whether :meth:`digest` can be followed by another :meth:`update` - (default: ``False``). - - :Return: A :class:`SHA3_512_Hash` hash object - """ - - data = kwargs.pop("data", None) - update_after_digest = kwargs.pop("update_after_digest", False) - if len(args) == 1: - if data: - raise ValueError("Initial data for hash specified twice") - data = args[0] - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return SHA3_512_Hash(data, update_after_digest) - -# The size of the resulting hash in bytes. -digest_size = SHA3_512_Hash.digest_size - -# Input block size for HMAC -block_size = 72 diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_512.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA3_512.pyi deleted file mode 100644 index cdeec16..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA3_512.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA3_512_Hash(object): - digest_size: int - block_size: int - oid: str - def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> SHA3_512_Hash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA3_512_Hash: ... - def new(self, data: Optional[Buffer]) -> SHA3_512_Hash: ... - -def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_512_Hash: ... - -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA512.py b/venv/Lib/site-packages/Cryptodome/Hash/SHA512.py deleted file mode 100644 index 5066197..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA512.py +++ /dev/null @@ -1,204 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr) - -_raw_sha512_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._SHA512", - """ - int SHA512_init(void **shaState, - size_t digest_size); - int SHA512_destroy(void *shaState); - int SHA512_update(void *hs, - const uint8_t *buf, - size_t len); - int SHA512_digest(const void *shaState, - uint8_t *digest, - size_t digest_size); - int SHA512_copy(const void *src, void *dst); - - int SHA512_pbkdf2_hmac_assist(const void *inner, - const void *outer, - const uint8_t *first_digest, - uint8_t *final_digest, - size_t iterations, - size_t digest_size); - """) - -class SHA512Hash(object): - """A SHA-512 hash object (possibly in its truncated version SHA-512/224 or - SHA-512/256. - Do not instantiate directly. Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - - :ivar block_size: the size in bytes of the internal message block, - input to the compression function - :vartype block_size: integer - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - # The internal block size of the hash algorithm in bytes. - block_size = 128 - - def __init__(self, data, truncate): - self._truncate = truncate - - if truncate is None: - self.oid = "2.16.840.1.101.3.4.2.3" - self.digest_size = 64 - elif truncate == "224": - self.oid = "2.16.840.1.101.3.4.2.5" - self.digest_size = 28 - elif truncate == "256": - self.oid = "2.16.840.1.101.3.4.2.6" - self.digest_size = 32 - else: - raise ValueError("Incorrect truncation length. It must be '224' or '256'.") - - state = VoidPointer() - result = _raw_sha512_lib.SHA512_init(state.address_of(), - c_size_t(self.digest_size)) - if result: - raise ValueError("Error %d while instantiating SHA-512" - % result) - self._state = SmartPointer(state.get(), - _raw_sha512_lib.SHA512_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - result = _raw_sha512_lib.SHA512_update(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while hashing data with SHA512" - % result) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - bfr = create_string_buffer(self.digest_size) - result = _raw_sha512_lib.SHA512_digest(self._state.get(), - bfr, - c_size_t(self.digest_size)) - if result: - raise ValueError("Error %d while making SHA512 digest" - % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :return: A hash object of the same type - """ - - clone = SHA512Hash(None, self._truncate) - result = _raw_sha512_lib.SHA512_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHA512" % result) - return clone - - def new(self, data=None): - """Create a fresh SHA-512 hash object.""" - - return SHA512Hash(data, self._truncate) - - -def new(data=None, truncate=None): - """Create a new hash object. - - Args: - data (bytes/bytearray/memoryview): - Optional. The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`SHA512Hash.update`. - truncate (string): - Optional. The desired length of the digest. It can be either "224" or - "256". If not present, the digest is 512 bits long. - Passing this parameter is **not** equivalent to simply truncating - the output digest. - - :Return: A :class:`SHA512Hash` hash object - """ - - return SHA512Hash(data, truncate) - - -# The size of the full SHA-512 hash in bytes. -digest_size = 64 - -# The internal block size of the hash algorithm in bytes. -block_size = 128 - - -def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): - """Compute the expensive inner loop in PBKDF-HMAC.""" - - assert iterations > 0 - - bfr = create_string_buffer(len(first_digest)); - result = _raw_sha512_lib.SHA512_pbkdf2_hmac_assist( - inner._state.get(), - outer._state.get(), - first_digest, - bfr, - c_size_t(iterations), - c_size_t(len(first_digest))) - - if result: - raise ValueError("Error %d with PBKDF2-HMAC assist for SHA512" % result) - - return get_raw_buffer(bfr) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHA512.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHA512.pyi deleted file mode 100644 index f219ee9..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHA512.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHA512Hash(object): - digest_size: int - block_size: int - oid: str - - def __init__(self, - data: Optional[Buffer], - truncate: Optional[str]) -> None: ... - def update(self, data: Buffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> SHA512Hash: ... - def new(self, data: Optional[Buffer] = ...) -> SHA512Hash: ... - -def new(data: Optional[Buffer] = ..., - truncate: Optional[str] = ...) -> SHA512Hash: ... -digest_size: int -block_size: int diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE128.py b/venv/Lib/site-packages/Cryptodome/Hash/SHAKE128.py deleted file mode 100644 index 847b514..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE128.py +++ /dev/null @@ -1,145 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Hash.keccak import _raw_keccak_lib - -class SHAKE128_XOF(object): - """A SHAKE128 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - """ - - # ASN.1 Object ID - oid = "2.16.840.1.101.3.4.2.11" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(32), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating SHAKE128" - % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - self._is_squeezing = False - self._padding = 0x1F - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._is_squeezing: - raise TypeError("You cannot call 'update' after the first 'read'") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while updating SHAKE128 state" - % result) - return self - - def read(self, length): - """ - Compute the next piece of XOF output. - - .. note:: - You cannot use :meth:`update` anymore after the first call to - :meth:`read`. - - Args: - length (integer): the amount of bytes this method must return - - :return: the next piece of XOF output (of the given length) - :rtype: byte string - """ - - self._is_squeezing = True - bfr = create_string_buffer(length) - result = _raw_keccak_lib.keccak_squeeze(self._state.get(), - bfr, - c_size_t(length), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while extracting from SHAKE128" - % result) - - return get_raw_buffer(bfr) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - - :return: A hash object of the same type - """ - - clone = self.new() - result = _raw_keccak_lib.keccak_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHAKE128" % result) - return clone - - def new(self, data=None): - return type(self)(data=data) - - -def new(data=None): - """Return a fresh instance of a SHAKE128 object. - - Args: - data (bytes/bytearray/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - Optional. - - :Return: A :class:`SHAKE128_XOF` object - """ - - return SHAKE128_XOF(data=data) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE128.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHAKE128.pyi deleted file mode 100644 index de51d8e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE128.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHAKE128_XOF(object): - oid: str - def __init__(self, - data: Optional[Buffer] = ...) -> None: ... - def update(self, data: Buffer) -> SHAKE128_XOF: ... - def read(self, length: int) -> bytes: ... - def copy(self) -> SHAKE128_XOF: ... - def new(self, data: Optional[Buffer] = ...) -> SHAKE128_XOF: ... - -def new(data: Optional[Buffer] = ...) -> SHAKE128_XOF: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE256.py b/venv/Lib/site-packages/Cryptodome/Hash/SHAKE256.py deleted file mode 100644 index 637044e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE256.py +++ /dev/null @@ -1,146 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Hash.keccak import _raw_keccak_lib - -class SHAKE256_XOF(object): - """A SHAKE256 hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar oid: ASN.1 Object ID - :vartype oid: string - """ - - # ASN.1 Object ID - oid = "2.16.840.1.101.3.4.2.12" - - def __init__(self, data=None): - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(64), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating SHAKE256" - % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - self._is_squeezing = False - self._padding = 0x1F - - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._is_squeezing: - raise TypeError("You cannot call 'update' after the first 'read'") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while updating SHAKE256 state" - % result) - return self - - def read(self, length): - """ - Compute the next piece of XOF output. - - .. note:: - You cannot use :meth:`update` anymore after the first call to - :meth:`read`. - - Args: - length (integer): the amount of bytes this method must return - - :return: the next piece of XOF output (of the given length) - :rtype: byte string - """ - - self._is_squeezing = True - bfr = create_string_buffer(length) - result = _raw_keccak_lib.keccak_squeeze(self._state.get(), - bfr, - c_size_t(length), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while extracting from SHAKE256" - % result) - - return get_raw_buffer(bfr) - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - - :return: A hash object of the same type - """ - - clone = self.new() - result = _raw_keccak_lib.keccak_copy(self._state.get(), - clone._state.get()) - if result: - raise ValueError("Error %d while copying SHAKE256" % result) - return clone - - def new(self, data=None): - return type(self)(data=data) - - -def new(data=None): - """Return a fresh instance of a SHAKE256 object. - - Args: - data (bytes/bytearray/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - Optional. - - :Return: A :class:`SHAKE256_XOF` object - """ - - return SHAKE256_XOF(data=data) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE256.pyi b/venv/Lib/site-packages/Cryptodome/Hash/SHAKE256.pyi deleted file mode 100644 index 72eb898..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/SHAKE256.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class SHAKE256_XOF(object): - oid: str - def __init__(self, - data: Optional[Buffer] = ...) -> None: ... - def update(self, data: Buffer) -> SHAKE256_XOF: ... - def read(self, length: int) -> bytes: ... - def copy(self) -> SHAKE256_XOF: ... - def new(self, data: Optional[Buffer] = ...) -> SHAKE256_XOF: ... - -def new(data: Optional[Buffer] = ...) -> SHAKE256_XOF: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash128.py b/venv/Lib/site-packages/Cryptodome/Hash/TupleHash128.py deleted file mode 100644 index 49aeccc..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash128.py +++ /dev/null @@ -1,136 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2021, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord, is_bytes, tobytes - -from . import cSHAKE128 -from .cSHAKE128 import _encode_str, _right_encode - - -class TupleHash(object): - """A Tuple hash object. - Do not instantiate directly. - Use the :func:`new` function. - """ - - def __init__(self, custom, cshake, digest_size): - - self.digest_size = digest_size - - self._cshake = cshake._new(b'', custom, b'TupleHash') - self._digest = None - - def update(self, *data): - """Authenticate the next tuple of byte strings. - TupleHash guarantees the logical separation between each byte string. - - Args: - data (bytes/bytearray/memoryview): One or more items to hash. - """ - - if self._digest is not None: - raise TypeError("You cannot call 'update' after 'digest' or 'hexdigest'") - - for item in data: - if not is_bytes(item): - raise TypeError("You can only call 'update' on bytes" ) - self._cshake.update(_encode_str(item)) - - return self - - def digest(self): - """Return the **binary** (non-printable) digest of the tuple of byte strings. - - :return: The hash digest. Binary form. - :rtype: byte string - """ - - if self._digest is None: - self._cshake.update(_right_encode(self.digest_size * 8)) - self._digest = self._cshake.read(self.digest_size) - - return self._digest - - def hexdigest(self): - """Return the **printable** digest of the tuple of byte strings. - - :return: The hash digest. Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) - - def new(self, **kwargs): - """Return a new instance of a TupleHash object. - See :func:`new`. - """ - - if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: - kwargs["digest_bytes"] = self.digest_size - - return new(**kwargs) - - -def new(**kwargs): - """Create a new TupleHash128 object. - - Args: - digest_bytes (integer): - Optional. The size of the digest, in bytes. - Default is 64. Minimum is 8. - digest_bits (integer): - Optional and alternative to ``digest_bytes``. - The size of the digest, in bits (and in steps of 8). - Default is 512. Minimum is 64. - custom (bytes): - Optional. - A customization bytestring (``S`` in SP 800-185). - - :Return: A :class:`TupleHash` object - """ - - digest_bytes = kwargs.pop("digest_bytes", None) - digest_bits = kwargs.pop("digest_bits", None) - if None not in (digest_bytes, digest_bits): - raise TypeError("Only one digest parameter must be provided") - if (None, None) == (digest_bytes, digest_bits): - digest_bytes = 64 - if digest_bytes is not None: - if digest_bytes < 8: - raise ValueError("'digest_bytes' must be at least 8") - else: - if digest_bits < 64 or digest_bits % 8: - raise ValueError("'digest_bytes' must be at least 64 " - "in steps of 8") - digest_bytes = digest_bits // 8 - - custom = kwargs.pop("custom", b'') - - return TupleHash(custom, cSHAKE128, digest_bytes) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash128.pyi b/venv/Lib/site-packages/Cryptodome/Hash/TupleHash128.pyi deleted file mode 100644 index 2e0ea83..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash128.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Any, Union, List, Tuple -from types import ModuleType - -Buffer = Union[bytes, bytearray, memoryview] - -class TupleHash(object): - digest_size: int - def __init__(self, - custom: bytes, - cshake: ModuleType, - digest_size: int) -> None: ... - def update(self, *data: Buffer) -> TupleHash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def new(self, - digest_bytes: int = ..., - digest_bits: int = ..., - custom: int = ...) -> TupleHash: ... - -def new(digest_bytes: int = ..., - digest_bits: int = ..., - custom: int = ...) -> TupleHash: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash256.py b/venv/Lib/site-packages/Cryptodome/Hash/TupleHash256.py deleted file mode 100644 index 40a824a..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash256.py +++ /dev/null @@ -1,70 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2021, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from . import cSHAKE256 -from .TupleHash128 import TupleHash - - -def new(**kwargs): - """Create a new TupleHash256 object. - - Args: - digest_bytes (integer): - Optional. The size of the digest, in bytes. - Default is 64. Minimum is 8. - digest_bits (integer): - Optional and alternative to ``digest_bytes``. - The size of the digest, in bits (and in steps of 8). - Default is 512. Minimum is 64. - custom (bytes): - Optional. - A customization bytestring (``S`` in SP 800-185). - - :Return: A :class:`TupleHash` object - """ - - digest_bytes = kwargs.pop("digest_bytes", None) - digest_bits = kwargs.pop("digest_bits", None) - if None not in (digest_bytes, digest_bits): - raise TypeError("Only one digest parameter must be provided") - if (None, None) == (digest_bytes, digest_bits): - digest_bytes = 64 - if digest_bytes is not None: - if digest_bytes < 8: - raise ValueError("'digest_bytes' must be at least 8") - else: - if digest_bits < 64 or digest_bits % 8: - raise ValueError("'digest_bytes' must be at least 64 " - "in steps of 8") - digest_bytes = digest_bits // 8 - - custom = kwargs.pop("custom", b'') - - return TupleHash(custom, cSHAKE256, digest_bytes) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash256.pyi b/venv/Lib/site-packages/Cryptodome/Hash/TupleHash256.pyi deleted file mode 100644 index 82d943f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TupleHash256.pyi +++ /dev/null @@ -1,5 +0,0 @@ -from .TupleHash128 import TupleHash - -def new(digest_bytes: int = ..., - digest_bits: int = ..., - custom: int = ...) -> TupleHash: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE128.py b/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE128.py deleted file mode 100644 index 92ac59e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE128.py +++ /dev/null @@ -1,112 +0,0 @@ -from Cryptodome.Util._raw_api import (VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Util.number import long_to_bytes -from Cryptodome.Util.py3compat import bchr - -from .keccak import _raw_keccak_lib - - -class TurboSHAKE(object): - """A TurboSHAKE hash object. - Do not instantiate directly. - Use the :func:`new` function. - """ - - def __init__(self, capacity, domain_separation, data): - - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(capacity), - c_ubyte(12)) # Reduced number of rounds - if result: - raise ValueError("Error %d while instantiating TurboSHAKE" - % result) - self._state = SmartPointer(state.get(), _raw_keccak_lib.keccak_destroy) - - self._is_squeezing = False - self._capacity = capacity - self._domain = domain_separation - - if data: - self.update(data) - - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._is_squeezing: - raise TypeError("You cannot call 'update' after the first 'read'") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while updating TurboSHAKE state" - % result) - return self - - def read(self, length): - """ - Compute the next piece of XOF output. - - .. note:: - You cannot use :meth:`update` anymore after the first call to - :meth:`read`. - - Args: - length (integer): the amount of bytes this method must return - - :return: the next piece of XOF output (of the given length) - :rtype: byte string - """ - - self._is_squeezing = True - bfr = create_string_buffer(length) - result = _raw_keccak_lib.keccak_squeeze(self._state.get(), - bfr, - c_size_t(length), - c_ubyte(self._domain)) - if result: - raise ValueError("Error %d while extracting from TurboSHAKE" - % result) - - return get_raw_buffer(bfr) - - def new(self, data=None): - return type(self)(self._capacity, self._domain, data) - - def _reset(self): - result = _raw_keccak_lib.keccak_reset(self._state.get()) - if result: - raise ValueError("Error %d while resetting TurboSHAKE state" - % result) - self._is_squeezing = False - - -def new(**kwargs): - """Create a new TurboSHAKE128 object. - - Args: - domain (integer): - Optional - A domain separation byte, between 0x01 and 0x7F. - The default value is 0x1F. - data (bytes/bytearray/memoryview): - Optional - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - - :Return: A :class:`TurboSHAKE` object - """ - - domain_separation = kwargs.get('domain', 0x1F) - if not (0x01 <= domain_separation <= 0x7F): - raise ValueError("Incorrect domain separation value (%d)" % - domain_separation) - data = kwargs.get('data') - return TurboSHAKE(32, domain_separation, data=data) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE128.pyi b/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE128.pyi deleted file mode 100644 index d74c9c0..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE128.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Union, Optional -from typing_extensions import TypedDict, Unpack, NotRequired - -Buffer = Union[bytes, bytearray, memoryview] - -class TurboSHAKE(object): - - def __init__(self, capacity: int, domain_separation: int, data: Union[Buffer, None]) -> None: ... - def update(self, data: Buffer) -> TurboSHAKE : ... - def read(self, length: int) -> bytes: ... - def new(self, data: Optional[Buffer]=None) -> TurboSHAKE: ... - -class Args(TypedDict): - domain: NotRequired[int] - data: NotRequired[Buffer] - -def new(**kwargs: Unpack[Args]) -> TurboSHAKE: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE256.py b/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE256.py deleted file mode 100644 index ce27a48..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE256.py +++ /dev/null @@ -1,22 +0,0 @@ -from .TurboSHAKE128 import TurboSHAKE - -def new(**kwargs): - """Create a new TurboSHAKE256 object. - - Args: - domain (integer): - Optional - A domain separation byte, between 0x01 and 0x7F. - The default value is 0x1F. - data (bytes/bytearray/memoryview): - Optional - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - - :Return: A :class:`TurboSHAKE` object - """ - - domain_separation = kwargs.get('domain', 0x1F) - if not (0x01 <= domain_separation <= 0x7F): - raise ValueError("Incorrect domain separation value (%d)" % - domain_separation) - data = kwargs.get('data') - return TurboSHAKE(64, domain_separation, data=data) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE256.pyi b/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE256.pyi deleted file mode 100644 index 561e946..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/TurboSHAKE256.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Union -from typing_extensions import TypedDict, Unpack, NotRequired - -from .TurboSHAKE128 import TurboSHAKE - -Buffer = Union[bytes, bytearray, memoryview] - -class Args(TypedDict): - domain: NotRequired[int] - data: NotRequired[Buffer] - -def new(**kwargs: Unpack[Args]) -> TurboSHAKE: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_BLAKE2b.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_BLAKE2b.pyd deleted file mode 100644 index 6595612..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_BLAKE2b.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_BLAKE2s.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_BLAKE2s.pyd deleted file mode 100644 index 8e72ab5..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_BLAKE2s.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_MD2.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_MD2.pyd deleted file mode 100644 index 6b851c4..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_MD2.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_MD4.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_MD4.pyd deleted file mode 100644 index 605ff65..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_MD4.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_MD5.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_MD5.pyd deleted file mode 100644 index 1e90224..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_MD5.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_RIPEMD160.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_RIPEMD160.pyd deleted file mode 100644 index f14a4d5..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_RIPEMD160.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_SHA1.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_SHA1.pyd deleted file mode 100644 index de7f64c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_SHA1.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_SHA224.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_SHA224.pyd deleted file mode 100644 index c1d5fb7..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_SHA224.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_SHA256.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_SHA256.pyd deleted file mode 100644 index 0b62e23..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_SHA256.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_SHA384.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_SHA384.pyd deleted file mode 100644 index 1d3ee59..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_SHA384.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_SHA512.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_SHA512.pyd deleted file mode 100644 index e9dfbd8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_SHA512.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__init__.py b/venv/Lib/site-packages/Cryptodome/Hash/__init__.py deleted file mode 100644 index 80446e4..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/__init__.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__all__ = ['HMAC', 'MD2', 'MD4', 'MD5', 'RIPEMD160', 'SHA1', - 'SHA224', 'SHA256', 'SHA384', 'SHA512', - 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512', - 'CMAC', 'Poly1305', - 'cSHAKE128', 'cSHAKE256', 'KMAC128', 'KMAC256', - 'TupleHash128', 'TupleHash256', 'KangarooTwelve', - 'TurboSHAKE128', 'TurboSHAKE256'] - -def new(name): - """Return a new hash instance, based on its name or - on its ASN.1 Object ID""" - - name = name.upper() - if name in ("1.3.14.3.2.26", "SHA1", "SHA-1"): - from . import SHA1 - return SHA1.new() - if name in ("2.16.840.1.101.3.4.2.4", "SHA224", "SHA-224"): - from . import SHA224 - return SHA224.new() - if name in ("2.16.840.1.101.3.4.2.1", "SHA256", "SHA-256"): - from . import SHA256 - return SHA256.new() - if name in ("2.16.840.1.101.3.4.2.2", "SHA384", "SHA-384"): - from . import SHA384 - return SHA384.new() - if name in ("2.16.840.1.101.3.4.2.3", "SHA512", "SHA-512"): - from . import SHA512 - return SHA512.new() - if name in ("2.16.840.1.101.3.4.2.5", "SHA512-224", "SHA-512-224"): - from . import SHA512 - return SHA512.new(truncate='224') - if name in ("2.16.840.1.101.3.4.2.6", "SHA512-256", "SHA-512-256"): - from . import SHA512 - return SHA512.new(truncate='256') - if name in ("2.16.840.1.101.3.4.2.7", "SHA3-224", "SHA-3-224"): - from . import SHA3_224 - return SHA3_224.new() - if name in ("2.16.840.1.101.3.4.2.8", "SHA3-256", "SHA-3-256"): - from . import SHA3_256 - return SHA3_256.new() - if name in ("2.16.840.1.101.3.4.2.9", "SHA3-384", "SHA-3-384"): - from . import SHA3_384 - return SHA3_384.new() - if name in ("2.16.840.1.101.3.4.2.10", "SHA3-512", "SHA-3-512"): - from . import SHA3_512 - return SHA3_512.new() - else: - raise ValueError("Unknown hash %s" % str(name)) - diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__init__.pyi b/venv/Lib/site-packages/Cryptodome/Hash/__init__.pyi deleted file mode 100644 index b072157..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/__init__.pyi +++ /dev/null @@ -1,57 +0,0 @@ -from typing import overload -from typing_extensions import Literal - -from Cryptodome.Hash.SHA1 import SHA1Hash -from Cryptodome.Hash.SHA224 import SHA224Hash -from Cryptodome.Hash.SHA256 import SHA256Hash -from Cryptodome.Hash.SHA384 import SHA384Hash -from Cryptodome.Hash.SHA512 import SHA512Hash -from Cryptodome.Hash.SHA3_224 import SHA3_224_Hash -from Cryptodome.Hash.SHA3_256 import SHA3_256_Hash -from Cryptodome.Hash.SHA3_384 import SHA3_384_Hash -from Cryptodome.Hash.SHA3_512 import SHA3_512_Hash - -@overload -def new(name: Literal["1.3.14.3.2.26"]) -> SHA1Hash: ... -@overload -def new(name: Literal["SHA1"]) -> SHA1Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.4"]) -> SHA224Hash: ... -@overload -def new(name: Literal["SHA224"]) -> SHA224Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.1"]) -> SHA256Hash: ... -@overload -def new(name: Literal["SHA256"]) -> SHA256Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.2"]) -> SHA384Hash: ... -@overload -def new(name: Literal["SHA384"]) -> SHA384Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.3"]) -> SHA512Hash: ... -@overload -def new(name: Literal["SHA512"]) -> SHA512Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.5"]) -> SHA512Hash: ... -@overload -def new(name: Literal["SHA512-224"]) -> SHA512Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.6"]) -> SHA512Hash: ... -@overload -def new(name: Literal["SHA512-256"]) -> SHA512Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.7"]) -> SHA3_224_Hash: ... -@overload -def new(name: Literal["SHA3-224"]) -> SHA3_224_Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.8"]) -> SHA3_256_Hash: ... -@overload -def new(name: Literal["SHA3-256"]) -> SHA3_256_Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.9"]) -> SHA3_384_Hash: ... -@overload -def new(name: Literal["SHA3-384"]) -> SHA3_384_Hash: ... -@overload -def new(name: Literal["2.16.840.1.101.3.4.2.10"]) -> SHA3_512_Hash: ... -@overload -def new(name: Literal["SHA3-512"]) -> SHA3_512_Hash: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/BLAKE2b.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/BLAKE2b.cpython-312.pyc deleted file mode 100644 index 56b2c0d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/BLAKE2b.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/BLAKE2s.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/BLAKE2s.cpython-312.pyc deleted file mode 100644 index 0b08ce8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/BLAKE2s.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/CMAC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/CMAC.cpython-312.pyc deleted file mode 100644 index 4b9c4de..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/CMAC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/HMAC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/HMAC.cpython-312.pyc deleted file mode 100644 index 9a584f1..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/HMAC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KMAC128.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KMAC128.cpython-312.pyc deleted file mode 100644 index 0fd3bec..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KMAC128.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KMAC256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KMAC256.cpython-312.pyc deleted file mode 100644 index 2749db9..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KMAC256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KangarooTwelve.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KangarooTwelve.cpython-312.pyc deleted file mode 100644 index ceb5300..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/KangarooTwelve.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD2.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD2.cpython-312.pyc deleted file mode 100644 index 317500e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD4.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD4.cpython-312.pyc deleted file mode 100644 index aba0265..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD4.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD5.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD5.cpython-312.pyc deleted file mode 100644 index fdcadc4..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/MD5.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/Poly1305.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/Poly1305.cpython-312.pyc deleted file mode 100644 index 728ea56..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/Poly1305.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/RIPEMD.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/RIPEMD.cpython-312.pyc deleted file mode 100644 index eb34845..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/RIPEMD.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/RIPEMD160.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/RIPEMD160.cpython-312.pyc deleted file mode 100644 index 45da5fb..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/RIPEMD160.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA.cpython-312.pyc deleted file mode 100644 index 5ec80a1..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA1.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA1.cpython-312.pyc deleted file mode 100644 index 3dafd13..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA1.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA224.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA224.cpython-312.pyc deleted file mode 100644 index 06b393b..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA224.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA256.cpython-312.pyc deleted file mode 100644 index cea3767..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA384.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA384.cpython-312.pyc deleted file mode 100644 index 0344ebe..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA384.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_224.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_224.cpython-312.pyc deleted file mode 100644 index 94aae1d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_224.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_256.cpython-312.pyc deleted file mode 100644 index 5905c0c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_384.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_384.cpython-312.pyc deleted file mode 100644 index a26c2c4..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_384.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_512.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_512.cpython-312.pyc deleted file mode 100644 index 6578ce9..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA3_512.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA512.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA512.cpython-312.pyc deleted file mode 100644 index 24eaa80..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHA512.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHAKE128.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHAKE128.cpython-312.pyc deleted file mode 100644 index 135bec8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHAKE128.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHAKE256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHAKE256.cpython-312.pyc deleted file mode 100644 index fdf961e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/SHAKE256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TupleHash128.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TupleHash128.cpython-312.pyc deleted file mode 100644 index 00c2327..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TupleHash128.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TupleHash256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TupleHash256.cpython-312.pyc deleted file mode 100644 index 3948dc7..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TupleHash256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TurboSHAKE128.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TurboSHAKE128.cpython-312.pyc deleted file mode 100644 index fb86242..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TurboSHAKE128.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TurboSHAKE256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TurboSHAKE256.cpython-312.pyc deleted file mode 100644 index b82d18d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/TurboSHAKE256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 48b0353..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/cSHAKE128.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/cSHAKE128.cpython-312.pyc deleted file mode 100644 index 3670a2a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/cSHAKE128.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/cSHAKE256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/cSHAKE256.cpython-312.pyc deleted file mode 100644 index 33e57f9..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/cSHAKE256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/keccak.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/keccak.cpython-312.pyc deleted file mode 100644 index 2957108..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/__pycache__/keccak.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_ghash_clmul.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_ghash_clmul.pyd deleted file mode 100644 index 55b4f71..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_ghash_clmul.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_ghash_portable.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_ghash_portable.pyd deleted file mode 100644 index 4da569c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_ghash_portable.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_keccak.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_keccak.pyd deleted file mode 100644 index 9aee0e8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_keccak.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/_poly1305.pyd b/venv/Lib/site-packages/Cryptodome/Hash/_poly1305.pyd deleted file mode 100644 index 0b7756e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Hash/_poly1305.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE128.py b/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE128.py deleted file mode 100644 index 064b3d6..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE128.py +++ /dev/null @@ -1,187 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2021, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bchr, concat_buffers - -from Cryptodome.Util._raw_api import (VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -from Cryptodome.Util.number import long_to_bytes - -from Cryptodome.Hash.keccak import _raw_keccak_lib - - -def _left_encode(x): - """Left encode function as defined in NIST SP 800-185""" - - assert (x < (1 << 2040) and x >= 0) - - # Get number of bytes needed to represent this integer. - num = 1 if x == 0 else (x.bit_length() + 7) // 8 - - return bchr(num) + long_to_bytes(x) - - -def _right_encode(x): - """Right encode function as defined in NIST SP 800-185""" - - assert (x < (1 << 2040) and x >= 0) - - # Get number of bytes needed to represent this integer. - num = 1 if x == 0 else (x.bit_length() + 7) // 8 - - return long_to_bytes(x) + bchr(num) - - -def _encode_str(x): - """Encode string function as defined in NIST SP 800-185""" - - bitlen = len(x) * 8 - if bitlen >= (1 << 2040): - raise ValueError("String too large to encode in cSHAKE") - - return concat_buffers(_left_encode(bitlen), x) - - -def _bytepad(x, length): - """Zero pad byte string as defined in NIST SP 800-185""" - - to_pad = concat_buffers(_left_encode(length), x) - - # Note: this implementation works with byte aligned strings, - # hence no additional bit padding is needed at this point. - npad = (length - len(to_pad) % length) % length - - return to_pad + b'\x00' * npad - - -class cSHAKE_XOF(object): - """A cSHAKE hash object. - Do not instantiate directly. - Use the :func:`new` function. - """ - - def __init__(self, data, custom, capacity, function): - state = VoidPointer() - - if custom or function: - prefix_unpad = _encode_str(function) + _encode_str(custom) - prefix = _bytepad(prefix_unpad, (1600 - capacity)//8) - self._padding = 0x04 - else: - prefix = None - self._padding = 0x1F # for SHAKE - - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(capacity//8), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating cSHAKE" - % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - self._is_squeezing = False - - if prefix: - self.update(prefix) - - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._is_squeezing: - raise TypeError("You cannot call 'update' after the first 'read'") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while updating %s state" - % (result, self.name)) - return self - - def read(self, length): - """ - Compute the next piece of XOF output. - - .. note:: - You cannot use :meth:`update` anymore after the first call to - :meth:`read`. - - Args: - length (integer): the amount of bytes this method must return - - :return: the next piece of XOF output (of the given length) - :rtype: byte string - """ - - self._is_squeezing = True - bfr = create_string_buffer(length) - result = _raw_keccak_lib.keccak_squeeze(self._state.get(), - bfr, - c_size_t(length), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while extracting from %s" - % (result, self.name)) - - return get_raw_buffer(bfr) - - -def _new(data, custom, function): - # Use Keccak[256] - return cSHAKE_XOF(data, custom, 256, function) - - -def new(data=None, custom=None): - """Return a fresh instance of a cSHAKE128 object. - - Args: - data (bytes/bytearray/memoryview): - Optional. - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - custom (bytes): - Optional. - A customization bytestring (``S`` in SP 800-185). - - :Return: A :class:`cSHAKE_XOF` object - """ - - # Use Keccak[256] - return cSHAKE_XOF(data, custom, 256, b'') diff --git a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE128.pyi b/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE128.pyi deleted file mode 100644 index 1452fea..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE128.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -class cSHAKE_XOF(object): - def __init__(self, - data: Optional[Buffer] = ..., - function: Optional[bytes] = ..., - custom: Optional[bytes] = ...) -> None: ... - def update(self, data: Buffer) -> cSHAKE_XOF: ... - def read(self, length: int) -> bytes: ... - -def new(data: Optional[Buffer] = ..., - custom: Optional[Buffer] = ...) -> cSHAKE_XOF: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE256.py b/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE256.py deleted file mode 100644 index a5b8701..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE256.py +++ /dev/null @@ -1,56 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2021, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util._raw_api import c_size_t -from Cryptodome.Hash.cSHAKE128 import cSHAKE_XOF - - -def _new(data, custom, function): - # Use Keccak[512] - return cSHAKE_XOF(data, custom, 512, function) - - -def new(data=None, custom=None): - """Return a fresh instance of a cSHAKE256 object. - - Args: - data (bytes/bytearray/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`update`. - Optional. - custom (bytes): - Optional. - A customization bytestring (``S`` in SP 800-185). - - :Return: A :class:`cSHAKE_XOF` object - """ - - # Use Keccak[512] - return cSHAKE_XOF(data, custom, 512, b'') diff --git a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE256.pyi b/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE256.pyi deleted file mode 100644 index b910bb6..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/cSHAKE256.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Union, Optional - -from Cryptodome.Hash.cSHAKE128 import cSHAKE_XOF - -Buffer = Union[bytes, bytearray, memoryview] - -def new(data: Optional[Buffer] = ..., - custom: Optional[Buffer] = ...) -> cSHAKE_XOF: ... diff --git a/venv/Lib/site-packages/Cryptodome/Hash/keccak.py b/venv/Lib/site-packages/Cryptodome/Hash/keccak.py deleted file mode 100644 index f2af202..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/keccak.py +++ /dev/null @@ -1,181 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - VoidPointer, SmartPointer, - create_string_buffer, - get_raw_buffer, c_size_t, - c_uint8_ptr, c_ubyte) - -_raw_keccak_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._keccak", - """ - int keccak_init(void **state, - size_t capacity_bytes, - uint8_t rounds); - int keccak_destroy(void *state); - int keccak_absorb(void *state, - const uint8_t *in, - size_t len); - int keccak_squeeze(const void *state, - uint8_t *out, - size_t len, - uint8_t padding); - int keccak_digest(void *state, - uint8_t *digest, - size_t len, - uint8_t padding); - int keccak_copy(const void *src, void *dst); - int keccak_reset(void *state); - """) - -class Keccak_Hash(object): - """A Keccak hash object. - Do not instantiate directly. - Use the :func:`new` function. - - :ivar digest_size: the size in bytes of the resulting hash - :vartype digest_size: integer - """ - - def __init__(self, data, digest_bytes, update_after_digest): - # The size of the resulting hash in bytes. - self.digest_size = digest_bytes - - self._update_after_digest = update_after_digest - self._digest_done = False - self._padding = 0x01 - - state = VoidPointer() - result = _raw_keccak_lib.keccak_init(state.address_of(), - c_size_t(self.digest_size * 2), - c_ubyte(24)) - if result: - raise ValueError("Error %d while instantiating keccak" % result) - self._state = SmartPointer(state.get(), - _raw_keccak_lib.keccak_destroy) - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Args: - data (byte string/byte array/memoryview): The next chunk of the message being hashed. - """ - - if self._digest_done and not self._update_after_digest: - raise TypeError("You can only call 'digest' or 'hexdigest' on this object") - - result = _raw_keccak_lib.keccak_absorb(self._state.get(), - c_uint8_ptr(data), - c_size_t(len(data))) - if result: - raise ValueError("Error %d while updating keccak" % result) - return self - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Binary form. - :rtype: byte string - """ - - self._digest_done = True - bfr = create_string_buffer(self.digest_size) - result = _raw_keccak_lib.keccak_digest(self._state.get(), - bfr, - c_size_t(self.digest_size), - c_ubyte(self._padding)) - if result: - raise ValueError("Error %d while squeezing keccak" % result) - - return get_raw_buffer(bfr) - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - :return: The hash digest, computed over the data processed so far. - Hexadecimal encoded. - :rtype: string - """ - - return "".join(["%02x" % bord(x) for x in self.digest()]) - - def new(self, **kwargs): - """Create a fresh Keccak hash object.""" - - if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: - kwargs["digest_bytes"] = self.digest_size - - return new(**kwargs) - - -def new(**kwargs): - """Create a new hash object. - - Args: - data (bytes/bytearray/memoryview): - The very first chunk of the message to hash. - It is equivalent to an early call to :meth:`Keccak_Hash.update`. - digest_bytes (integer): - The size of the digest, in bytes (28, 32, 48, 64). - digest_bits (integer): - The size of the digest, in bits (224, 256, 384, 512). - update_after_digest (boolean): - Whether :meth:`Keccak.digest` can be followed by another - :meth:`Keccak.update` (default: ``False``). - - :Return: A :class:`Keccak_Hash` hash object - """ - - data = kwargs.pop("data", None) - update_after_digest = kwargs.pop("update_after_digest", False) - - digest_bytes = kwargs.pop("digest_bytes", None) - digest_bits = kwargs.pop("digest_bits", None) - if None not in (digest_bytes, digest_bits): - raise TypeError("Only one digest parameter must be provided") - if (None, None) == (digest_bytes, digest_bits): - raise TypeError("Digest size (bits, bytes) not provided") - if digest_bytes is not None: - if digest_bytes not in (28, 32, 48, 64): - raise ValueError("'digest_bytes' must be: 28, 32, 48 or 64") - else: - if digest_bits not in (224, 256, 384, 512): - raise ValueError("'digest_bytes' must be: 224, 256, 384 or 512") - digest_bytes = digest_bits // 8 - - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - return Keccak_Hash(data, digest_bytes, update_after_digest) diff --git a/venv/Lib/site-packages/Cryptodome/Hash/keccak.pyi b/venv/Lib/site-packages/Cryptodome/Hash/keccak.pyi deleted file mode 100644 index 844d256..0000000 --- a/venv/Lib/site-packages/Cryptodome/Hash/keccak.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Union, Any - -Buffer = Union[bytes, bytearray, memoryview] - -class Keccak_Hash(object): - digest_size: int - def __init__(self, - data: Buffer, - digest_bytes: int, - update_after_digest: bool) -> None: ... - def update(self, data: Buffer) -> Keccak_Hash: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def new(self, - data: Buffer = ..., - digest_bytes: int = ..., - digest_bits: int = ..., - update_after_digest: bool = ...) -> Keccak_Hash: ... - -def new(data: Buffer = ..., - digest_bytes: int = ..., - digest_bits: int = ..., - update_after_digest: bool = ...) -> Keccak_Hash: ... diff --git a/venv/Lib/site-packages/Cryptodome/IO/PEM.py b/venv/Lib/site-packages/Cryptodome/IO/PEM.py deleted file mode 100644 index 61fe920..0000000 --- a/venv/Lib/site-packages/Cryptodome/IO/PEM.py +++ /dev/null @@ -1,191 +0,0 @@ -# -# Util/PEM.py : Privacy Enhanced Mail utilities -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -__all__ = ['encode', 'decode'] - -import re -from binascii import a2b_base64, b2a_base64, hexlify, unhexlify - -from Cryptodome.Hash import MD5 -from Cryptodome.Util.Padding import pad, unpad -from Cryptodome.Cipher import DES, DES3, AES -from Cryptodome.Protocol.KDF import PBKDF1 -from Cryptodome.Random import get_random_bytes -from Cryptodome.Util.py3compat import tobytes, tostr - - -def encode(data, marker, passphrase=None, randfunc=None): - """Encode a piece of binary data into PEM format. - - Args: - data (byte string): - The piece of binary data to encode. - marker (string): - The marker for the PEM block (e.g. "PUBLIC KEY"). - Note that there is no official master list for all allowed markers. - Still, you can refer to the OpenSSL_ source code. - passphrase (byte string): - If given, the PEM block will be encrypted. The key is derived from - the passphrase. - randfunc (callable): - Random number generation function; it accepts an integer N and returns - a byte string of random data, N bytes long. If not given, a new one is - instantiated. - - Returns: - The PEM block, as a string. - - .. _OpenSSL: https://github.com/openssl/openssl/blob/master/include/openssl/pem.h - """ - - if randfunc is None: - randfunc = get_random_bytes - - out = "-----BEGIN %s-----\n" % marker - if passphrase: - # We only support 3DES for encryption - salt = randfunc(8) - key = PBKDF1(passphrase, salt, 16, 1, MD5) - key += PBKDF1(key + passphrase, salt, 8, 1, MD5) - objenc = DES3.new(key, DES3.MODE_CBC, salt) - out += "Proc-Type: 4,ENCRYPTED\nDEK-Info: DES-EDE3-CBC,%s\n\n" %\ - tostr(hexlify(salt).upper()) - # Encrypt with PKCS#7 padding - data = objenc.encrypt(pad(data, objenc.block_size)) - elif passphrase is not None: - raise ValueError("Empty password") - - # Each BASE64 line can take up to 64 characters (=48 bytes of data) - # b2a_base64 adds a new line character! - chunks = [tostr(b2a_base64(data[i:i + 48])) - for i in range(0, len(data), 48)] - out += "".join(chunks) - out += "-----END %s-----" % marker - return out - - -def _EVP_BytesToKey(data, salt, key_len): - d = [ b'' ] - m = (key_len + 15 ) // 16 - for _ in range(m): - nd = MD5.new(d[-1] + data + salt).digest() - d.append(nd) - return b"".join(d)[:key_len] - - -def decode(pem_data, passphrase=None): - """Decode a PEM block into binary. - - Args: - pem_data (string): - The PEM block. - passphrase (byte string): - If given and the PEM block is encrypted, - the key will be derived from the passphrase. - - Returns: - A tuple with the binary data, the marker string, and a boolean to - indicate if decryption was performed. - - Raises: - ValueError: if decoding fails, if the PEM file is encrypted and no passphrase has - been provided or if the passphrase is incorrect. - """ - - # Verify Pre-Encapsulation Boundary - r = re.compile(r"\s*-----BEGIN (.*)-----\s+") - m = r.match(pem_data) - if not m: - raise ValueError("Not a valid PEM pre boundary") - marker = m.group(1) - - # Verify Post-Encapsulation Boundary - r = re.compile(r"-----END (.*)-----\s*$") - m = r.search(pem_data) - if not m or m.group(1) != marker: - raise ValueError("Not a valid PEM post boundary") - - # Removes spaces and slit on lines - lines = pem_data.replace(" ", '').split() - if len(lines) < 3: - raise ValueError("A PEM file must have at least 3 lines") - - # Decrypts, if necessary - if lines[1].startswith('Proc-Type:4,ENCRYPTED'): - if not passphrase: - raise ValueError("PEM is encrypted, but no passphrase available") - DEK = lines[2].split(':') - if len(DEK) != 2 or DEK[0] != 'DEK-Info': - raise ValueError("PEM encryption format not supported.") - algo, salt = DEK[1].split(',') - salt = unhexlify(tobytes(salt)) - - padding = True - - if algo == "DES-CBC": - key = _EVP_BytesToKey(passphrase, salt, 8) - objdec = DES.new(key, DES.MODE_CBC, salt) - elif algo == "DES-EDE3-CBC": - key = _EVP_BytesToKey(passphrase, salt, 24) - objdec = DES3.new(key, DES3.MODE_CBC, salt) - elif algo == "AES-128-CBC": - key = _EVP_BytesToKey(passphrase, salt[:8], 16) - objdec = AES.new(key, AES.MODE_CBC, salt) - elif algo == "AES-192-CBC": - key = _EVP_BytesToKey(passphrase, salt[:8], 24) - objdec = AES.new(key, AES.MODE_CBC, salt) - elif algo == "AES-256-CBC": - key = _EVP_BytesToKey(passphrase, salt[:8], 32) - objdec = AES.new(key, AES.MODE_CBC, salt) - elif algo.lower() == "id-aes256-gcm": - key = _EVP_BytesToKey(passphrase, salt[:8], 32) - objdec = AES.new(key, AES.MODE_GCM, nonce=salt) - padding = False - else: - raise ValueError("Unsupport PEM encryption algorithm (%s)." % algo) - lines = lines[2:] - else: - objdec = None - - # Decode body - data = a2b_base64(''.join(lines[1:-1])) - enc_flag = False - if objdec: - if padding: - data = unpad(objdec.decrypt(data), objdec.block_size) - else: - # There is no tag, so we don't use decrypt_and_verify - data = objdec.decrypt(data) - enc_flag = True - - return (data, marker, enc_flag) diff --git a/venv/Lib/site-packages/Cryptodome/IO/PEM.pyi b/venv/Lib/site-packages/Cryptodome/IO/PEM.pyi deleted file mode 100644 index 2e324c4..0000000 --- a/venv/Lib/site-packages/Cryptodome/IO/PEM.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Tuple, Optional, Callable - -def encode(data: bytes, - marke: str, - passphrase: Optional[bytes] = ..., - randfunc: Optional[Callable[[int],bytes]] = ...) -> str: ... - - -def decode(pem_data: str, - passphrase: Optional[bytes] = ...) -> Tuple[bytes, str, bool]: ... diff --git a/venv/Lib/site-packages/Cryptodome/IO/PKCS8.py b/venv/Lib/site-packages/Cryptodome/IO/PKCS8.py deleted file mode 100644 index e770199..0000000 --- a/venv/Lib/site-packages/Cryptodome/IO/PKCS8.py +++ /dev/null @@ -1,226 +0,0 @@ -# -# PublicKey/PKCS8.py : PKCS#8 functions -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - - -from Cryptodome.Util.py3compat import * - -from Cryptodome.Util.asn1 import ( - DerNull, - DerSequence, - DerObjectId, - DerOctetString, - ) - -from Cryptodome.IO._PBES import PBES1, PBES2, PbesError - - -__all__ = ['wrap', 'unwrap'] - - -def wrap(private_key, key_oid, passphrase=None, protection=None, - prot_params=None, key_params=DerNull(), randfunc=None): - """Wrap a private key into a PKCS#8 blob (clear or encrypted). - - Args: - - private_key (bytes): - The private key encoded in binary form. The actual encoding is - algorithm specific. In most cases, it is DER. - - key_oid (string): - The object identifier (OID) of the private key to wrap. - It is a dotted string, like ``'1.2.840.113549.1.1.1'`` (for RSA keys) - or ``'1.2.840.10045.2.1'`` (for ECC keys). - - Keyword Args: - - passphrase (bytes or string): - The secret passphrase from which the wrapping key is derived. - Set it only if encryption is required. - - protection (string): - The identifier of the algorithm to use for securely wrapping the key. - Refer to :ref:`the encryption parameters` . - The default value is ``'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC'``. - - prot_params (dictionary): - Parameters for the key derivation function (KDF). - Refer to :ref:`the encryption parameters` . - - key_params (DER object or None): - The ``parameters`` field to use in the ``AlgorithmIdentifier`` - SEQUENCE. If ``None``, no ``parameters`` field will be added. - By default, the ASN.1 type ``NULL`` is used. - - randfunc (callable): - Random number generation function; it should accept a single integer - N and return a string of random data, N bytes long. - If not specified, a new RNG will be instantiated - from :mod:`Cryptodome.Random`. - - Returns: - bytes: The PKCS#8-wrapped private key (possibly encrypted). - """ - - # - # PrivateKeyInfo ::= SEQUENCE { - # version Version, - # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, - # privateKey PrivateKey, - # attributes [0] IMPLICIT Attributes OPTIONAL - # } - # - if key_params is None: - algorithm = DerSequence([DerObjectId(key_oid)]) - else: - algorithm = DerSequence([DerObjectId(key_oid), key_params]) - - pk_info = DerSequence([ - 0, - algorithm, - DerOctetString(private_key) - ]) - pk_info_der = pk_info.encode() - - if passphrase is None: - return pk_info_der - - if not passphrase: - raise ValueError("Empty passphrase") - - # Encryption with PBES2 - passphrase = tobytes(passphrase) - if protection is None: - protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' - return PBES2.encrypt(pk_info_der, passphrase, - protection, prot_params, randfunc) - - -def unwrap(p8_private_key, passphrase=None): - """Unwrap a private key from a PKCS#8 blob (clear or encrypted). - - Args: - p8_private_key (bytes): - The private key wrapped into a PKCS#8 container, DER encoded. - - Keyword Args: - passphrase (byte string or string): - The passphrase to use to decrypt the blob (if it is encrypted). - - Return: - A tuple containing - - #. the algorithm identifier of the wrapped key (OID, dotted string) - #. the private key (bytes, DER encoded) - #. the associated parameters (bytes, DER encoded) or ``None`` - - Raises: - ValueError : if decoding fails - """ - - if passphrase is not None: - passphrase = tobytes(passphrase) - - found = False - try: - p8_private_key = PBES1.decrypt(p8_private_key, passphrase) - found = True - except PbesError as e: - error_str = "PBES1[%s]" % str(e) - except ValueError: - error_str = "PBES1[Invalid]" - - if not found: - try: - p8_private_key = PBES2.decrypt(p8_private_key, passphrase) - found = True - except PbesError as e: - error_str += ",PBES2[%s]" % str(e) - except ValueError: - error_str += ",PBES2[Invalid]" - - if not found: - raise ValueError("Error decoding PKCS#8 (%s)" % error_str) - - pk_info = DerSequence().decode(p8_private_key, nr_elements=(2, 3, 4, 5)) - if len(pk_info) == 2 and not passphrase: - raise ValueError("Not a valid clear PKCS#8 structure " - "(maybe it is encrypted?)") - - # RFC5208, PKCS#8, version is v1(0) - # - # PrivateKeyInfo ::= SEQUENCE { - # version Version, - # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, - # privateKey PrivateKey, - # attributes [0] IMPLICIT Attributes OPTIONAL - # } - # - # RFC5915, Asymmetric Key Package, version is v2(1) - # - # OneAsymmetricKey ::= SEQUENCE { - # version Version, - # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, - # privateKey PrivateKey, - # attributes [0] Attributes OPTIONAL, - # ..., - # [[2: publicKey [1] PublicKey OPTIONAL ]], - # ... - # } - - if pk_info[0] == 0: - if len(pk_info) not in (3, 4): - raise ValueError("Not a valid PrivateKeyInfo SEQUENCE") - elif pk_info[0] == 1: - if len(pk_info) not in (3, 4, 5): - raise ValueError("Not a valid PrivateKeyInfo SEQUENCE") - else: - raise ValueError("Not a valid PrivateKeyInfo SEQUENCE") - - algo = DerSequence().decode(pk_info[1], nr_elements=(1, 2)) - algo_oid = DerObjectId().decode(algo[0]).value - if len(algo) == 1: - algo_params = None - else: - try: - DerNull().decode(algo[1]) - algo_params = None - except: - algo_params = algo[1] - - # PrivateKey ::= OCTET STRING - private_key = DerOctetString().decode(pk_info[2]).payload - - # We ignore attributes and (for v2 only) publickey - - return (algo_oid, private_key, algo_params) diff --git a/venv/Lib/site-packages/Cryptodome/IO/PKCS8.pyi b/venv/Lib/site-packages/Cryptodome/IO/PKCS8.pyi deleted file mode 100644 index c8d0c10..0000000 --- a/venv/Lib/site-packages/Cryptodome/IO/PKCS8.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Tuple, Optional, Union, Callable -from typing_extensions import NotRequired - -from Cryptodome.Util.asn1 import DerObject -from Cryptodome.IO._PBES import ProtParams - - -def wrap(private_key: bytes, - key_oid: str, - passphrase: Union[bytes, str] = ..., - protection: str = ..., - prot_params: Optional[ProtParams] = ..., - key_params: Optional[DerObject] = ..., - randfunc: Optional[Callable[[int], str]] = ...) -> bytes: ... - - -def unwrap(p8_private_key: bytes, passphrase: Optional[Union[bytes, str]] = ...) -> Tuple[str, bytes, Optional[bytes]]: ... diff --git a/venv/Lib/site-packages/Cryptodome/IO/_PBES.py b/venv/Lib/site-packages/Cryptodome/IO/_PBES.py deleted file mode 100644 index 75d8cde..0000000 --- a/venv/Lib/site-packages/Cryptodome/IO/_PBES.py +++ /dev/null @@ -1,546 +0,0 @@ -# -# PublicKey/_PBES.py : Password-Based Encryption functions -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import re - -from Cryptodome import Hash -from Cryptodome import Random -from Cryptodome.Util.asn1 import ( - DerSequence, DerOctetString, - DerObjectId, DerInteger, - ) - -from Cryptodome.Cipher import AES -from Cryptodome.Util.Padding import pad, unpad -from Cryptodome.Protocol.KDF import PBKDF1, PBKDF2, scrypt - -_OID_PBE_WITH_MD5_AND_DES_CBC = "1.2.840.113549.1.5.3" -_OID_PBE_WITH_MD5_AND_RC2_CBC = "1.2.840.113549.1.5.6" -_OID_PBE_WITH_SHA1_AND_DES_CBC = "1.2.840.113549.1.5.10" -_OID_PBE_WITH_SHA1_AND_RC2_CBC = "1.2.840.113549.1.5.11" - -_OID_PBES2 = "1.2.840.113549.1.5.13" - -_OID_PBKDF2 = "1.2.840.113549.1.5.12" -_OID_SCRYPT = "1.3.6.1.4.1.11591.4.11" - -_OID_HMAC_SHA1 = "1.2.840.113549.2.7" - -_OID_DES_EDE3_CBC = "1.2.840.113549.3.7" -_OID_AES128_CBC = "2.16.840.1.101.3.4.1.2" -_OID_AES192_CBC = "2.16.840.1.101.3.4.1.22" -_OID_AES256_CBC = "2.16.840.1.101.3.4.1.42" -_OID_AES128_GCM = "2.16.840.1.101.3.4.1.6" -_OID_AES192_GCM = "2.16.840.1.101.3.4.1.26" -_OID_AES256_GCM = "2.16.840.1.101.3.4.1.46" - -class PbesError(ValueError): - pass - -# These are the ASN.1 definitions used by the PBES1/2 logic: -# -# EncryptedPrivateKeyInfo ::= SEQUENCE { -# encryptionAlgorithm EncryptionAlgorithmIdentifier, -# encryptedData EncryptedData -# } -# -# EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier -# -# EncryptedData ::= OCTET STRING -# -# AlgorithmIdentifier ::= SEQUENCE { -# algorithm OBJECT IDENTIFIER, -# parameters ANY DEFINED BY algorithm OPTIONAL -# } -# -# PBEParameter ::= SEQUENCE { -# salt OCTET STRING (SIZE(8)), -# iterationCount INTEGER -# } -# -# PBES2-params ::= SEQUENCE { -# keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}}, -# encryptionScheme AlgorithmIdentifier {{PBES2-Encs}} -# } -# -# PBKDF2-params ::= SEQUENCE { -# salt CHOICE { -# specified OCTET STRING, -# otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}} -# }, -# iterationCount INTEGER (1..MAX), -# keyLength INTEGER (1..MAX) OPTIONAL, -# prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1 -# } -# -# PBKDF2-PRFs ALGORITHM-IDENTIFIER ::= { -# {NULL IDENTIFIED BY id-hmacWithSHA1}, -# {NULL IDENTIFIED BY id-hmacWithSHA224}, -# {NULL IDENTIFIED BY id-hmacWithSHA256}, -# {NULL IDENTIFIED BY id-hmacWithSHA384}, -# {NULL IDENTIFIED BY id-hmacWithSHA512}, -# {NULL IDENTIFIED BY id-hmacWithSHA512-224}, -# {NULL IDENTIFIED BY id-hmacWithSHA512-256}, -# ... -# } -# scrypt-params ::= SEQUENCE { -# salt OCTET STRING, -# costParameter INTEGER (1..MAX), -# blockSize INTEGER (1..MAX), -# parallelizationParameter INTEGER (1..MAX), -# keyLength INTEGER (1..MAX) OPTIONAL -# } - - -class PBES1(object): - """Deprecated encryption scheme with password-based key derivation - (originally defined in PKCS#5 v1.5, but still present in `v2.0`__). - - .. __: http://www.ietf.org/rfc/rfc2898.txt - """ - - @staticmethod - def decrypt(data, passphrase): - """Decrypt a piece of data using a passphrase and *PBES1*. - - The algorithm to use is automatically detected. - - :Parameters: - data : byte string - The piece of data to decrypt. - passphrase : byte string - The passphrase to use for decrypting the data. - :Returns: - The decrypted data, as a binary string. - """ - - enc_private_key_info = DerSequence().decode(data) - encrypted_algorithm = DerSequence().decode(enc_private_key_info[0]) - encrypted_data = DerOctetString().decode(enc_private_key_info[1]).payload - - pbe_oid = DerObjectId().decode(encrypted_algorithm[0]).value - cipher_params = {} - if pbe_oid == _OID_PBE_WITH_MD5_AND_DES_CBC: - # PBE_MD5_DES_CBC - from Cryptodome.Hash import MD5 - from Cryptodome.Cipher import DES - hashmod = MD5 - module = DES - elif pbe_oid == _OID_PBE_WITH_MD5_AND_RC2_CBC: - # PBE_MD5_RC2_CBC - from Cryptodome.Hash import MD5 - from Cryptodome.Cipher import ARC2 - hashmod = MD5 - module = ARC2 - cipher_params['effective_keylen'] = 64 - elif pbe_oid == _OID_PBE_WITH_SHA1_AND_DES_CBC: - # PBE_SHA1_DES_CBC - from Cryptodome.Hash import SHA1 - from Cryptodome.Cipher import DES - hashmod = SHA1 - module = DES - elif pbe_oid == _OID_PBE_WITH_SHA1_AND_RC2_CBC: - # PBE_SHA1_RC2_CBC - from Cryptodome.Hash import SHA1 - from Cryptodome.Cipher import ARC2 - hashmod = SHA1 - module = ARC2 - cipher_params['effective_keylen'] = 64 - else: - raise PbesError("Unknown OID for PBES1") - - pbe_params = DerSequence().decode(encrypted_algorithm[1], nr_elements=2) - salt = DerOctetString().decode(pbe_params[0]).payload - iterations = pbe_params[1] - - key_iv = PBKDF1(passphrase, salt, 16, iterations, hashmod) - key, iv = key_iv[:8], key_iv[8:] - - cipher = module.new(key, module.MODE_CBC, iv, **cipher_params) - pt = cipher.decrypt(encrypted_data) - return unpad(pt, cipher.block_size) - - -class PBES2(object): - """Encryption scheme with password-based key derivation - (defined in `PKCS#5 v2.0`__). - - .. __: http://www.ietf.org/rfc/rfc2898.txt.""" - - @staticmethod - def encrypt(data, passphrase, protection, prot_params=None, randfunc=None): - """Encrypt a piece of data using a passphrase and *PBES2*. - - :Parameters: - data : byte string - The piece of data to encrypt. - passphrase : byte string - The passphrase to use for encrypting the data. - protection : string - The identifier of the encryption algorithm to use. - The default value is '``PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC``'. - prot_params : dictionary - Parameters of the protection algorithm. - - +------------------+-----------------------------------------------+ - | Key | Description | - +==================+===============================================+ - | iteration_count | The KDF algorithm is repeated several times to| - | | slow down brute force attacks on passwords | - | | (called *N* or CPU/memory cost in scrypt). | - | | | - | | The default value for PBKDF2 is 1 000. | - | | The default value for scrypt is 16 384. | - +------------------+-----------------------------------------------+ - | salt_size | Salt is used to thwart dictionary and rainbow | - | | attacks on passwords. The default value is 8 | - | | bytes. | - +------------------+-----------------------------------------------+ - | block_size | *(scrypt only)* Memory-cost (r). The default | - | | value is 8. | - +------------------+-----------------------------------------------+ - | parallelization | *(scrypt only)* CPU-cost (p). The default | - | | value is 1. | - +------------------+-----------------------------------------------+ - - - randfunc : callable - Random number generation function; it should accept - a single integer N and return a string of random data, - N bytes long. If not specified, a new RNG will be - instantiated from ``Cryptodome.Random``. - - :Returns: - The encrypted data, as a binary string. - """ - - if prot_params is None: - prot_params = {} - - if randfunc is None: - randfunc = Random.new().read - - pattern = re.compile(r'^(PBKDF2WithHMAC-([0-9A-Z-]+)|scrypt)And([0-9A-Z-]+)$') - res = pattern.match(protection) - if res is None: - raise ValueError("Unknown protection %s" % protection) - - if protection.startswith("PBKDF"): - pbkdf = "pbkdf2" - pbkdf2_hmac_algo = res.group(2) - enc_algo = res.group(3) - else: - pbkdf = "scrypt" - enc_algo = res.group(3) - - aead = False - if enc_algo == 'DES-EDE3-CBC': - from Cryptodome.Cipher import DES3 - key_size = 24 - module = DES3 - cipher_mode = DES3.MODE_CBC - enc_oid = _OID_DES_EDE3_CBC - enc_param = {'iv': randfunc(8)} - elif enc_algo == 'AES128-CBC': - key_size = 16 - module = AES - cipher_mode = AES.MODE_CBC - enc_oid = _OID_AES128_CBC - enc_param = {'iv': randfunc(16)} - elif enc_algo == 'AES192-CBC': - key_size = 24 - module = AES - cipher_mode = AES.MODE_CBC - enc_oid = _OID_AES192_CBC - enc_param = {'iv': randfunc(16)} - elif enc_algo == 'AES256-CBC': - key_size = 32 - module = AES - cipher_mode = AES.MODE_CBC - enc_oid = _OID_AES256_CBC - enc_param = {'iv': randfunc(16)} - elif enc_algo == 'AES128-GCM': - key_size = 16 - module = AES - cipher_mode = AES.MODE_GCM - enc_oid = _OID_AES128_GCM - enc_param = {'nonce': randfunc(12)} - aead = True - elif enc_algo == 'AES192-GCM': - key_size = 24 - module = AES - cipher_mode = AES.MODE_GCM - enc_oid = _OID_AES192_GCM - enc_param = {'nonce': randfunc(12)} - aead = True - elif enc_algo == 'AES256-GCM': - key_size = 32 - module = AES - cipher_mode = AES.MODE_GCM - enc_oid = _OID_AES256_GCM - enc_param = {'nonce': randfunc(12)} - aead = True - else: - raise ValueError("Unknown encryption mode '%s'" % enc_algo) - - iv_nonce = list(enc_param.values())[0] - salt = randfunc(prot_params.get("salt_size", 8)) - - # Derive key from password - if pbkdf == 'pbkdf2': - - count = prot_params.get("iteration_count", 1000) - digestmod = Hash.new(pbkdf2_hmac_algo) - - key = PBKDF2(passphrase, - salt, - key_size, - count, - hmac_hash_module=digestmod) - - pbkdf2_params = DerSequence([ - DerOctetString(salt), - DerInteger(count) - ]) - - if pbkdf2_hmac_algo != 'SHA1': - try: - hmac_oid = Hash.HMAC.new(b'', digestmod=digestmod).oid - except KeyError: - raise ValueError("No OID for HMAC hash algorithm") - pbkdf2_params.append(DerSequence([DerObjectId(hmac_oid)])) - - kdf_info = DerSequence([ - DerObjectId(_OID_PBKDF2), # PBKDF2 - pbkdf2_params - ]) - - elif pbkdf == 'scrypt': - - count = prot_params.get("iteration_count", 16384) - scrypt_r = prot_params.get('block_size', 8) - scrypt_p = prot_params.get('parallelization', 1) - key = scrypt(passphrase, salt, key_size, - count, scrypt_r, scrypt_p) - kdf_info = DerSequence([ - DerObjectId(_OID_SCRYPT), # scrypt - DerSequence([ - DerOctetString(salt), - DerInteger(count), - DerInteger(scrypt_r), - DerInteger(scrypt_p) - ]) - ]) - - else: - raise ValueError("Unknown KDF " + res.group(1)) - - # Create cipher and use it - cipher = module.new(key, cipher_mode, **enc_param) - if aead: - ct, tag = cipher.encrypt_and_digest(data) - encrypted_data = ct + tag - else: - encrypted_data = cipher.encrypt(pad(data, cipher.block_size)) - enc_info = DerSequence([ - DerObjectId(enc_oid), - DerOctetString(iv_nonce) - ]) - - # Result - enc_private_key_info = DerSequence([ - # encryptionAlgorithm - DerSequence([ - DerObjectId(_OID_PBES2), - DerSequence([ - kdf_info, - enc_info - ]), - ]), - DerOctetString(encrypted_data) - ]) - return enc_private_key_info.encode() - - @staticmethod - def decrypt(data, passphrase): - """Decrypt a piece of data using a passphrase and *PBES2*. - - The algorithm to use is automatically detected. - - :Parameters: - data : byte string - The piece of data to decrypt. - passphrase : byte string - The passphrase to use for decrypting the data. - :Returns: - The decrypted data, as a binary string. - """ - - enc_private_key_info = DerSequence().decode(data, nr_elements=2) - enc_algo = DerSequence().decode(enc_private_key_info[0]) - encrypted_data = DerOctetString().decode(enc_private_key_info[1]).payload - - pbe_oid = DerObjectId().decode(enc_algo[0]).value - if pbe_oid != _OID_PBES2: - raise PbesError("Not a PBES2 object") - - pbes2_params = DerSequence().decode(enc_algo[1], nr_elements=2) - - # Key Derivation Function selection - kdf_info = DerSequence().decode(pbes2_params[0], nr_elements=2) - kdf_oid = DerObjectId().decode(kdf_info[0]).value - - kdf_key_length = None - - # We only support PBKDF2 or scrypt - if kdf_oid == _OID_PBKDF2: - - pbkdf2_params = DerSequence().decode(kdf_info[1], nr_elements=(2, 3, 4)) - salt = DerOctetString().decode(pbkdf2_params[0]).payload - iteration_count = pbkdf2_params[1] - - left = len(pbkdf2_params) - 2 - idx = 2 - - if left > 0: - try: - # Check if it's an INTEGER - kdf_key_length = pbkdf2_params[idx] - 0 - left -= 1 - idx += 1 - except TypeError: - # keyLength is not present - pass - - # Default is HMAC-SHA1 - pbkdf2_prf_oid = _OID_HMAC_SHA1 - if left > 0: - pbkdf2_prf_algo_id = DerSequence().decode(pbkdf2_params[idx]) - pbkdf2_prf_oid = DerObjectId().decode(pbkdf2_prf_algo_id[0]).value - - elif kdf_oid == _OID_SCRYPT: - - scrypt_params = DerSequence().decode(kdf_info[1], nr_elements=(4, 5)) - salt = DerOctetString().decode(scrypt_params[0]).payload - iteration_count, scrypt_r, scrypt_p = [scrypt_params[x] - for x in (1, 2, 3)] - if len(scrypt_params) > 4: - kdf_key_length = scrypt_params[4] - else: - kdf_key_length = None - else: - raise PbesError("Unsupported PBES2 KDF") - - # Cipher selection - enc_info = DerSequence().decode(pbes2_params[1]) - enc_oid = DerObjectId().decode(enc_info[0]).value - - aead = False - if enc_oid == _OID_DES_EDE3_CBC: - # DES_EDE3_CBC - from Cryptodome.Cipher import DES3 - module = DES3 - cipher_mode = DES3.MODE_CBC - key_size = 24 - cipher_param = 'iv' - elif enc_oid == _OID_AES128_CBC: - module = AES - cipher_mode = AES.MODE_CBC - key_size = 16 - cipher_param = 'iv' - elif enc_oid == _OID_AES192_CBC: - module = AES - cipher_mode = AES.MODE_CBC - key_size = 24 - cipher_param = 'iv' - elif enc_oid == _OID_AES256_CBC: - module = AES - cipher_mode = AES.MODE_CBC - key_size = 32 - cipher_param = 'iv' - elif enc_oid == _OID_AES128_GCM: - module = AES - cipher_mode = AES.MODE_GCM - key_size = 16 - cipher_param = 'nonce' - aead = True - elif enc_oid == _OID_AES192_GCM: - module = AES - cipher_mode = AES.MODE_GCM - key_size = 24 - cipher_param = 'nonce' - aead = True - elif enc_oid == _OID_AES256_GCM: - module = AES - cipher_mode = AES.MODE_GCM - key_size = 32 - cipher_param = 'nonce' - aead = True - else: - raise PbesError("Unsupported PBES2 cipher " + enc_algo) - - if kdf_key_length and kdf_key_length != key_size: - raise PbesError("Mismatch between PBES2 KDF parameters" - " and selected cipher") - - iv_nonce = DerOctetString().decode(enc_info[1]).payload - - # Create cipher - if kdf_oid == _OID_PBKDF2: - - try: - hmac_hash_module_oid = Hash.HMAC._hmac2hash_oid[pbkdf2_prf_oid] - except KeyError: - raise PbesError("Unsupported HMAC %s" % pbkdf2_prf_oid) - hmac_hash_module = Hash.new(hmac_hash_module_oid) - - key = PBKDF2(passphrase, salt, key_size, iteration_count, - hmac_hash_module=hmac_hash_module) - else: - key = scrypt(passphrase, salt, key_size, iteration_count, - scrypt_r, scrypt_p) - cipher = module.new(key, cipher_mode, **{cipher_param:iv_nonce}) - - # Decrypt data - if len(encrypted_data) < cipher.block_size: - raise ValueError("Too little data to decrypt") - - if aead: - tag_len = cipher.block_size - pt = cipher.decrypt_and_verify(encrypted_data[:-tag_len], - encrypted_data[-tag_len:]) - else: - pt_padded = cipher.decrypt(encrypted_data) - pt = unpad(pt_padded, cipher.block_size) - - return pt diff --git a/venv/Lib/site-packages/Cryptodome/IO/_PBES.pyi b/venv/Lib/site-packages/Cryptodome/IO/_PBES.pyi deleted file mode 100644 index 0673364..0000000 --- a/venv/Lib/site-packages/Cryptodome/IO/_PBES.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Optional, Callable, TypedDict -from typing_extensions import NotRequired - -class PbesError(ValueError): - ... - -class PBES1(object): - @staticmethod - def decrypt(data: bytes, passphrase: bytes) -> bytes: ... - -class ProtParams(TypedDict): - iteration_count: NotRequired[int] - salt_size: NotRequired[int] - block_size: NotRequired[int] - parallelization: NotRequired[int] - -class PBES2(object): - @staticmethod - def encrypt(data: bytes, - passphrase: bytes, - protection: str, - prot_params: Optional[ProtParams] = ..., - randfunc: Optional[Callable[[int],bytes]] = ...) -> bytes: ... - - @staticmethod - def decrypt(data:bytes, passphrase: bytes) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/IO/__init__.py b/venv/Lib/site-packages/Cryptodome/IO/__init__.py deleted file mode 100644 index 85a0d0b..0000000 --- a/venv/Lib/site-packages/Cryptodome/IO/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -__all__ = ['PEM', 'PKCS8'] diff --git a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/PEM.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/IO/__pycache__/PEM.cpython-312.pyc deleted file mode 100644 index e04d1e8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/PEM.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/PKCS8.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/IO/__pycache__/PKCS8.cpython-312.pyc deleted file mode 100644 index dc8d913..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/PKCS8.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/_PBES.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/IO/__pycache__/_PBES.cpython-312.pyc deleted file mode 100644 index 377b0b2..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/_PBES.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/IO/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index fa4a0b5..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/IO/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/Numbers.py b/venv/Lib/site-packages/Cryptodome/Math/Numbers.py deleted file mode 100644 index 9e96686..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/Numbers.py +++ /dev/null @@ -1,47 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -__all__ = ["Integer"] - -import os - -try: - if os.getenv("PYCRYPTODOME_DISABLE_GMP"): - raise ImportError() - - from Cryptodome.Math._IntegerGMP import IntegerGMP as Integer - from Cryptodome.Math._IntegerGMP import implementation as _implementation -except (ImportError, OSError, AttributeError): - try: - from Cryptodome.Math._IntegerCustom import IntegerCustom as Integer - from Cryptodome.Math._IntegerCustom import implementation as _implementation - except (ImportError, OSError): - from Cryptodome.Math._IntegerNative import IntegerNative as Integer - _implementation = {} diff --git a/venv/Lib/site-packages/Cryptodome/Math/Numbers.pyi b/venv/Lib/site-packages/Cryptodome/Math/Numbers.pyi deleted file mode 100644 index b0206ca..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/Numbers.pyi +++ /dev/null @@ -1,2 +0,0 @@ -from Cryptodome.Math._IntegerBase import IntegerBase as Integer -__all__ = ['Integer'] diff --git a/venv/Lib/site-packages/Cryptodome/Math/Primality.py b/venv/Lib/site-packages/Cryptodome/Math/Primality.py deleted file mode 100644 index 33814fa..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/Primality.py +++ /dev/null @@ -1,369 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Functions to create and test prime numbers. - -:undocumented: __package__ -""" - -from Cryptodome import Random -from Cryptodome.Math.Numbers import Integer - -from Cryptodome.Util.py3compat import iter_range - -COMPOSITE = 0 -PROBABLY_PRIME = 1 - - -def miller_rabin_test(candidate, iterations, randfunc=None): - """Perform a Miller-Rabin primality test on an integer. - - The test is specified in Section C.3.1 of `FIPS PUB 186-4`__. - - :Parameters: - candidate : integer - The number to test for primality. - iterations : integer - The maximum number of iterations to perform before - declaring a candidate a probable prime. - randfunc : callable - An RNG function where bases are taken from. - - :Returns: - ``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``. - - .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf - """ - - if not isinstance(candidate, Integer): - candidate = Integer(candidate) - - if candidate in (1, 2, 3, 5): - return PROBABLY_PRIME - - if candidate.is_even(): - return COMPOSITE - - one = Integer(1) - minus_one = Integer(candidate - 1) - - if randfunc is None: - randfunc = Random.new().read - - # Step 1 and 2 - m = Integer(minus_one) - a = 0 - while m.is_even(): - m >>= 1 - a += 1 - - # Skip step 3 - - # Step 4 - for i in iter_range(iterations): - - # Step 4.1-2 - base = 1 - while base in (one, minus_one): - base = Integer.random_range(min_inclusive=2, - max_inclusive=candidate - 2, - randfunc=randfunc) - assert(2 <= base <= candidate - 2) - - # Step 4.3-4.4 - z = pow(base, m, candidate) - if z in (one, minus_one): - continue - - # Step 4.5 - for j in iter_range(1, a): - z = pow(z, 2, candidate) - if z == minus_one: - break - if z == one: - return COMPOSITE - else: - return COMPOSITE - - # Step 5 - return PROBABLY_PRIME - - -def lucas_test(candidate): - """Perform a Lucas primality test on an integer. - - The test is specified in Section C.3.3 of `FIPS PUB 186-4`__. - - :Parameters: - candidate : integer - The number to test for primality. - - :Returns: - ``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``. - - .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf - """ - - if not isinstance(candidate, Integer): - candidate = Integer(candidate) - - # Step 1 - if candidate in (1, 2, 3, 5): - return PROBABLY_PRIME - if candidate.is_even() or candidate.is_perfect_square(): - return COMPOSITE - - # Step 2 - def alternate(): - value = 5 - while True: - yield value - if value > 0: - value += 2 - else: - value -= 2 - value = -value - - for D in alternate(): - if candidate in (D, -D): - continue - js = Integer.jacobi_symbol(D, candidate) - if js == 0: - return COMPOSITE - if js == -1: - break - # Found D. P=1 and Q=(1-D)/4 (note that Q is guaranteed to be an integer) - - # Step 3 - # This is \delta(n) = n - jacobi(D/n) - K = candidate + 1 - # Step 4 - r = K.size_in_bits() - 1 - # Step 5 - # U_1=1 and V_1=P - U_i = Integer(1) - V_i = Integer(1) - U_temp = Integer(0) - V_temp = Integer(0) - # Step 6 - for i in iter_range(r - 1, -1, -1): - # Square - # U_temp = U_i * V_i % candidate - U_temp.set(U_i) - U_temp *= V_i - U_temp %= candidate - # V_temp = (((V_i ** 2 + (U_i ** 2 * D)) * K) >> 1) % candidate - V_temp.set(U_i) - V_temp *= U_i - V_temp *= D - V_temp.multiply_accumulate(V_i, V_i) - if V_temp.is_odd(): - V_temp += candidate - V_temp >>= 1 - V_temp %= candidate - # Multiply - if K.get_bit(i): - # U_i = (((U_temp + V_temp) * K) >> 1) % candidate - U_i.set(U_temp) - U_i += V_temp - if U_i.is_odd(): - U_i += candidate - U_i >>= 1 - U_i %= candidate - # V_i = (((V_temp + U_temp * D) * K) >> 1) % candidate - V_i.set(V_temp) - V_i.multiply_accumulate(U_temp, D) - if V_i.is_odd(): - V_i += candidate - V_i >>= 1 - V_i %= candidate - else: - U_i.set(U_temp) - V_i.set(V_temp) - # Step 7 - if U_i == 0: - return PROBABLY_PRIME - return COMPOSITE - - -from Cryptodome.Util.number import sieve_base as _sieve_base_large -## The optimal number of small primes to use for the sieve -## is probably dependent on the platform and the candidate size -_sieve_base = set(_sieve_base_large[:100]) - - -def test_probable_prime(candidate, randfunc=None): - """Test if a number is prime. - - A number is qualified as prime if it passes a certain - number of Miller-Rabin tests (dependent on the size - of the number, but such that probability of a false - positive is less than 10^-30) and a single Lucas test. - - For instance, a 1024-bit candidate will need to pass - 4 Miller-Rabin tests. - - :Parameters: - candidate : integer - The number to test for primality. - randfunc : callable - The routine to draw random bytes from to select Miller-Rabin bases. - :Returns: - ``PROBABLE_PRIME`` if the number if prime with very high probability. - ``COMPOSITE`` if the number is a composite. - For efficiency reasons, ``COMPOSITE`` is also returned for small primes. - """ - - if randfunc is None: - randfunc = Random.new().read - - if not isinstance(candidate, Integer): - candidate = Integer(candidate) - - # First, check trial division by the smallest primes - if int(candidate) in _sieve_base: - return PROBABLY_PRIME - try: - map(candidate.fail_if_divisible_by, _sieve_base) - except ValueError: - return COMPOSITE - - # These are the number of Miller-Rabin iterations s.t. p(k, t) < 1E-30, - # with p(k, t) being the probability that a randomly chosen k-bit number - # is composite but still survives t MR iterations. - mr_ranges = ((220, 30), (280, 20), (390, 15), (512, 10), - (620, 7), (740, 6), (890, 5), (1200, 4), - (1700, 3), (3700, 2)) - - bit_size = candidate.size_in_bits() - try: - mr_iterations = list(filter(lambda x: bit_size < x[0], - mr_ranges))[0][1] - except IndexError: - mr_iterations = 1 - - if miller_rabin_test(candidate, mr_iterations, - randfunc=randfunc) == COMPOSITE: - return COMPOSITE - if lucas_test(candidate) == COMPOSITE: - return COMPOSITE - return PROBABLY_PRIME - - -def generate_probable_prime(**kwargs): - """Generate a random probable prime. - - The prime will not have any specific properties - (e.g. it will not be a *strong* prime). - - Random numbers are evaluated for primality until one - passes all tests, consisting of a certain number of - Miller-Rabin tests with random bases followed by - a single Lucas test. - - The number of Miller-Rabin iterations is chosen such that - the probability that the output number is a non-prime is - less than 1E-30 (roughly 2^{-100}). - - This approach is compliant to `FIPS PUB 186-4`__. - - :Keywords: - exact_bits : integer - The desired size in bits of the probable prime. - It must be at least 160. - randfunc : callable - An RNG function where candidate primes are taken from. - prime_filter : callable - A function that takes an Integer as parameter and returns - True if the number can be passed to further primality tests, - False if it should be immediately discarded. - - :Return: - A probable prime in the range 2^exact_bits > p > 2^(exact_bits-1). - - .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf - """ - - exact_bits = kwargs.pop("exact_bits", None) - randfunc = kwargs.pop("randfunc", None) - prime_filter = kwargs.pop("prime_filter", lambda x: True) - if kwargs: - raise ValueError("Unknown parameters: " + kwargs.keys()) - - if exact_bits is None: - raise ValueError("Missing exact_bits parameter") - if exact_bits < 160: - raise ValueError("Prime number is not big enough.") - - if randfunc is None: - randfunc = Random.new().read - - result = COMPOSITE - while result == COMPOSITE: - candidate = Integer.random(exact_bits=exact_bits, - randfunc=randfunc) | 1 - if not prime_filter(candidate): - continue - result = test_probable_prime(candidate, randfunc) - return candidate - - -def generate_probable_safe_prime(**kwargs): - """Generate a random, probable safe prime. - - Note this operation is much slower than generating a simple prime. - - :Keywords: - exact_bits : integer - The desired size in bits of the probable safe prime. - randfunc : callable - An RNG function where candidate primes are taken from. - - :Return: - A probable safe prime in the range - 2^exact_bits > p > 2^(exact_bits-1). - """ - - exact_bits = kwargs.pop("exact_bits", None) - randfunc = kwargs.pop("randfunc", None) - if kwargs: - raise ValueError("Unknown parameters: " + kwargs.keys()) - - if randfunc is None: - randfunc = Random.new().read - - result = COMPOSITE - while result == COMPOSITE: - q = generate_probable_prime(exact_bits=exact_bits - 1, randfunc=randfunc) - candidate = q * 2 + 1 - if candidate.size_in_bits() != exact_bits: - continue - result = test_probable_prime(candidate, randfunc=randfunc) - return candidate diff --git a/venv/Lib/site-packages/Cryptodome/Math/Primality.pyi b/venv/Lib/site-packages/Cryptodome/Math/Primality.pyi deleted file mode 100644 index 7813483..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/Primality.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Callable, Optional, Union, Set - -PrimeResult = int - -COMPOSITE: PrimeResult -PROBABLY_PRIME: PrimeResult - -def miller_rabin_test(candidate: int, iterations: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ... -def lucas_test(candidate: int) -> PrimeResult: ... -_sieve_base: Set[int] -def test_probable_prime(candidate: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ... -def generate_probable_prime(*, - exact_bits: int = ..., - randfunc: Callable[[int],bytes] = ..., - prime_filter: Callable[[int],bool] = ...) -> int: ... -def generate_probable_safe_prime(*, - exact_bits: int = ..., - randfunc: Callable[[int],bytes] = ...) -> int: ... diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerBase.py b/venv/Lib/site-packages/Cryptodome/Math/_IntegerBase.py deleted file mode 100644 index 03dd591..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerBase.py +++ /dev/null @@ -1,412 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2018, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import abc - -from Cryptodome.Util.py3compat import iter_range, bord, bchr, ABC - -from Cryptodome import Random - - -class IntegerBase(ABC): - - # Conversions - @abc.abstractmethod - def __int__(self): - pass - - @abc.abstractmethod - def __str__(self): - pass - - @abc.abstractmethod - def __repr__(self): - pass - - @abc.abstractmethod - def to_bytes(self, block_size=0, byteorder='big'): - pass - - @staticmethod - @abc.abstractmethod - def from_bytes(byte_string, byteorder='big'): - pass - - # Relations - @abc.abstractmethod - def __eq__(self, term): - pass - - @abc.abstractmethod - def __ne__(self, term): - pass - - @abc.abstractmethod - def __lt__(self, term): - pass - - @abc.abstractmethod - def __le__(self, term): - pass - - @abc.abstractmethod - def __gt__(self, term): - pass - - @abc.abstractmethod - def __ge__(self, term): - pass - - @abc.abstractmethod - def __nonzero__(self): - pass - __bool__ = __nonzero__ - - @abc.abstractmethod - def is_negative(self): - pass - - # Arithmetic operations - @abc.abstractmethod - def __add__(self, term): - pass - - @abc.abstractmethod - def __sub__(self, term): - pass - - @abc.abstractmethod - def __mul__(self, factor): - pass - - @abc.abstractmethod - def __floordiv__(self, divisor): - pass - - @abc.abstractmethod - def __mod__(self, divisor): - pass - - @abc.abstractmethod - def inplace_pow(self, exponent, modulus=None): - pass - - @abc.abstractmethod - def __pow__(self, exponent, modulus=None): - pass - - @abc.abstractmethod - def __abs__(self): - pass - - @abc.abstractmethod - def sqrt(self, modulus=None): - pass - - @abc.abstractmethod - def __iadd__(self, term): - pass - - @abc.abstractmethod - def __isub__(self, term): - pass - - @abc.abstractmethod - def __imul__(self, term): - pass - - @abc.abstractmethod - def __imod__(self, term): - pass - - # Boolean/bit operations - @abc.abstractmethod - def __and__(self, term): - pass - - @abc.abstractmethod - def __or__(self, term): - pass - - @abc.abstractmethod - def __rshift__(self, pos): - pass - - @abc.abstractmethod - def __irshift__(self, pos): - pass - - @abc.abstractmethod - def __lshift__(self, pos): - pass - - @abc.abstractmethod - def __ilshift__(self, pos): - pass - - @abc.abstractmethod - def get_bit(self, n): - pass - - # Extra - @abc.abstractmethod - def is_odd(self): - pass - - @abc.abstractmethod - def is_even(self): - pass - - @abc.abstractmethod - def size_in_bits(self): - pass - - @abc.abstractmethod - def size_in_bytes(self): - pass - - @abc.abstractmethod - def is_perfect_square(self): - pass - - @abc.abstractmethod - def fail_if_divisible_by(self, small_prime): - pass - - @abc.abstractmethod - def multiply_accumulate(self, a, b): - pass - - @abc.abstractmethod - def set(self, source): - pass - - @abc.abstractmethod - def inplace_inverse(self, modulus): - pass - - @abc.abstractmethod - def inverse(self, modulus): - pass - - @abc.abstractmethod - def gcd(self, term): - pass - - @abc.abstractmethod - def lcm(self, term): - pass - - @staticmethod - @abc.abstractmethod - def jacobi_symbol(a, n): - pass - - @staticmethod - def _tonelli_shanks(n, p): - """Tonelli-shanks algorithm for computing the square root - of n modulo a prime p. - - n must be in the range [0..p-1]. - p must be at least even. - - The return value r is the square root of modulo p. If non-zero, - another solution will also exist (p-r). - - Note we cannot assume that p is really a prime: if it's not, - we can either raise an exception or return the correct value. - """ - - # See https://rosettacode.org/wiki/Tonelli-Shanks_algorithm - - if n in (0, 1): - return n - - if p % 4 == 3: - root = pow(n, (p + 1) // 4, p) - if pow(root, 2, p) != n: - raise ValueError("Cannot compute square root") - return root - - s = 1 - q = (p - 1) // 2 - while not (q & 1): - s += 1 - q >>= 1 - - z = n.__class__(2) - while True: - euler = pow(z, (p - 1) // 2, p) - if euler == 1: - z += 1 - continue - if euler == p - 1: - break - # Most probably p is not a prime - raise ValueError("Cannot compute square root") - - m = s - c = pow(z, q, p) - t = pow(n, q, p) - r = pow(n, (q + 1) // 2, p) - - while t != 1: - for i in iter_range(0, m): - if pow(t, 2**i, p) == 1: - break - if i == m: - raise ValueError("Cannot compute square root of %d mod %d" % (n, p)) - b = pow(c, 2**(m - i - 1), p) - m = i - c = b**2 % p - t = (t * b**2) % p - r = (r * b) % p - - if pow(r, 2, p) != n: - raise ValueError("Cannot compute square root") - - return r - - @classmethod - def random(cls, **kwargs): - """Generate a random natural integer of a certain size. - - :Keywords: - exact_bits : positive integer - The length in bits of the resulting random Integer number. - The number is guaranteed to fulfil the relation: - - 2^bits > result >= 2^(bits - 1) - - max_bits : positive integer - The maximum length in bits of the resulting random Integer number. - The number is guaranteed to fulfil the relation: - - 2^bits > result >=0 - - randfunc : callable - A function that returns a random byte string. The length of the - byte string is passed as parameter. Optional. - If not provided (or ``None``), randomness is read from the system RNG. - - :Return: a Integer object - """ - - exact_bits = kwargs.pop("exact_bits", None) - max_bits = kwargs.pop("max_bits", None) - randfunc = kwargs.pop("randfunc", None) - - if randfunc is None: - randfunc = Random.new().read - - if exact_bits is None and max_bits is None: - raise ValueError("Either 'exact_bits' or 'max_bits' must be specified") - - if exact_bits is not None and max_bits is not None: - raise ValueError("'exact_bits' and 'max_bits' are mutually exclusive") - - bits = exact_bits or max_bits - bytes_needed = ((bits - 1) // 8) + 1 - significant_bits_msb = 8 - (bytes_needed * 8 - bits) - msb = bord(randfunc(1)[0]) - if exact_bits is not None: - msb |= 1 << (significant_bits_msb - 1) - msb &= (1 << significant_bits_msb) - 1 - - return cls.from_bytes(bchr(msb) + randfunc(bytes_needed - 1)) - - @classmethod - def random_range(cls, **kwargs): - """Generate a random integer within a given internal. - - :Keywords: - min_inclusive : integer - The lower end of the interval (inclusive). - max_inclusive : integer - The higher end of the interval (inclusive). - max_exclusive : integer - The higher end of the interval (exclusive). - randfunc : callable - A function that returns a random byte string. The length of the - byte string is passed as parameter. Optional. - If not provided (or ``None``), randomness is read from the system RNG. - :Returns: - An Integer randomly taken in the given interval. - """ - - min_inclusive = kwargs.pop("min_inclusive", None) - max_inclusive = kwargs.pop("max_inclusive", None) - max_exclusive = kwargs.pop("max_exclusive", None) - randfunc = kwargs.pop("randfunc", None) - - if kwargs: - raise ValueError("Unknown keywords: " + str(kwargs.keys)) - if None not in (max_inclusive, max_exclusive): - raise ValueError("max_inclusive and max_exclusive cannot be both" - " specified") - if max_exclusive is not None: - max_inclusive = max_exclusive - 1 - if None in (min_inclusive, max_inclusive): - raise ValueError("Missing keyword to identify the interval") - - if randfunc is None: - randfunc = Random.new().read - - norm_maximum = max_inclusive - min_inclusive - bits_needed = cls(norm_maximum).size_in_bits() - - norm_candidate = -1 - while not 0 <= norm_candidate <= norm_maximum: - norm_candidate = cls.random( - max_bits=bits_needed, - randfunc=randfunc - ) - return norm_candidate + min_inclusive - - @staticmethod - @abc.abstractmethod - def _mult_modulo_bytes(term1, term2, modulus): - """Multiply two integers, take the modulo, and encode as big endian. - This specialized method is used for RSA decryption. - - Args: - term1 : integer - The first term of the multiplication, non-negative. - term2 : integer - The second term of the multiplication, non-negative. - modulus: integer - The modulus, a positive odd number. - :Returns: - A byte string, with the result of the modular multiplication - encoded in big endian mode. - It is as long as the modulus would be, with zero padding - on the left if needed. - """ - pass diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerBase.pyi b/venv/Lib/site-packages/Cryptodome/Math/_IntegerBase.pyi deleted file mode 100644 index ea23532..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerBase.pyi +++ /dev/null @@ -1,67 +0,0 @@ -from typing import Optional, Union, Callable - -RandFunc = Callable[[int],int] - -class IntegerBase: - - def __init__(self, value: Union[IntegerBase, int]): ... - - def __int__(self) -> int: ... - def __str__(self) -> str: ... - def __repr__(self) -> str: ... - def to_bytes(self, block_size: Optional[int]=0, byteorder: str= ...) -> bytes: ... - @staticmethod - def from_bytes(byte_string: bytes, byteorder: Optional[str] = ...) -> IntegerBase: ... - def __eq__(self, term: object) -> bool: ... - def __ne__(self, term: object) -> bool: ... - def __lt__(self, term: Union[IntegerBase, int]) -> bool: ... - def __le__(self, term: Union[IntegerBase, int]) -> bool: ... - def __gt__(self, term: Union[IntegerBase, int]) -> bool: ... - def __ge__(self, term: Union[IntegerBase, int]) -> bool: ... - def __nonzero__(self) -> bool: ... - def is_negative(self) -> bool: ... - def __add__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __sub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __mul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __floordiv__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... - def __mod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... - def inplace_pow(self, exponent: int, modulus: Optional[Union[IntegerBase, int]]=None) -> IntegerBase: ... - def __pow__(self, exponent: int, modulus: Optional[int]) -> IntegerBase: ... - def __abs__(self) -> IntegerBase: ... - def sqrt(self, modulus: Optional[int]) -> IntegerBase: ... - def __iadd__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __isub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __imul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __imod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... - def __and__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __or__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def __rshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... - def __irshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... - def __lshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... - def __ilshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... - def get_bit(self, n: int) -> bool: ... - def is_odd(self) -> bool: ... - def is_even(self) -> bool: ... - def size_in_bits(self) -> int: ... - def size_in_bytes(self) -> int: ... - def is_perfect_square(self) -> bool: ... - def fail_if_divisible_by(self, small_prime: Union[IntegerBase, int]) -> None: ... - def multiply_accumulate(self, a: Union[IntegerBase, int], b: Union[IntegerBase, int]) -> IntegerBase: ... - def set(self, source: Union[IntegerBase, int]) -> IntegerBase: ... - def inplace_inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ... - def inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ... - def gcd(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - def lcm(self, term: Union[IntegerBase, int]) -> IntegerBase: ... - @staticmethod - def jacobi_symbol(a: Union[IntegerBase, int], n: Union[IntegerBase, int]) -> IntegerBase: ... - @staticmethod - def _tonelli_shanks(n: Union[IntegerBase, int], p: Union[IntegerBase, int]) -> IntegerBase : ... - @classmethod - def random(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ... - @classmethod - def random_range(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ... - @staticmethod - def _mult_modulo_bytes(term1: Union[IntegerBase, int], - term2: Union[IntegerBase, int], - modulus: Union[IntegerBase, int]) -> bytes: ... - diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerCustom.py b/venv/Lib/site-packages/Cryptodome/Math/_IntegerCustom.py deleted file mode 100644 index 20eadca..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerCustom.py +++ /dev/null @@ -1,162 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2018, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from ._IntegerNative import IntegerNative - -from Cryptodome.Util.number import long_to_bytes, bytes_to_long - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - create_string_buffer, - get_raw_buffer, backend, - c_size_t, c_ulonglong) - - -from Cryptodome.Random.random import getrandbits - -c_defs = """ -int monty_pow(uint8_t *out, - const uint8_t *base, - const uint8_t *exp, - const uint8_t *modulus, - size_t len, - uint64_t seed); - -int monty_multiply(uint8_t *out, - const uint8_t *term1, - const uint8_t *term2, - const uint8_t *modulus, - size_t len); -""" - - -_raw_montgomery = load_pycryptodome_raw_lib("Cryptodome.Math._modexp", c_defs) -implementation = {"library": "custom", "api": backend} - - -class IntegerCustom(IntegerNative): - - @staticmethod - def from_bytes(byte_string, byteorder='big'): - if byteorder == 'big': - pass - elif byteorder == 'little': - byte_string = bytearray(byte_string) - byte_string.reverse() - else: - raise ValueError("Incorrect byteorder") - return IntegerCustom(bytes_to_long(byte_string)) - - def inplace_pow(self, exponent, modulus=None): - exp_value = int(exponent) - if exp_value < 0: - raise ValueError("Exponent must not be negative") - - # No modular reduction - if modulus is None: - self._value = pow(self._value, exp_value) - return self - - # With modular reduction - mod_value = int(modulus) - if mod_value < 0: - raise ValueError("Modulus must be positive") - if mod_value == 0: - raise ZeroDivisionError("Modulus cannot be zero") - - # C extension only works with odd moduli - if (mod_value & 1) == 0: - self._value = pow(self._value, exp_value, mod_value) - return self - - # C extension only works with bases smaller than modulus - if self._value >= mod_value: - self._value %= mod_value - - max_len = len(long_to_bytes(max(self._value, exp_value, mod_value))) - - base_b = long_to_bytes(self._value, max_len) - exp_b = long_to_bytes(exp_value, max_len) - modulus_b = long_to_bytes(mod_value, max_len) - - out = create_string_buffer(max_len) - - error = _raw_montgomery.monty_pow( - out, - base_b, - exp_b, - modulus_b, - c_size_t(max_len), - c_ulonglong(getrandbits(64)) - ) - - if error: - raise ValueError("monty_pow failed with error: %d" % error) - - result = bytes_to_long(get_raw_buffer(out)) - self._value = result - return self - - @staticmethod - def _mult_modulo_bytes(term1, term2, modulus): - - # With modular reduction - mod_value = int(modulus) - if mod_value < 0: - raise ValueError("Modulus must be positive") - if mod_value == 0: - raise ZeroDivisionError("Modulus cannot be zero") - - # C extension only works with odd moduli - if (mod_value & 1) == 0: - raise ValueError("Odd modulus is required") - - # C extension only works with non-negative terms smaller than modulus - if term1 >= mod_value or term1 < 0: - term1 %= mod_value - if term2 >= mod_value or term2 < 0: - term2 %= mod_value - - modulus_b = long_to_bytes(mod_value) - numbers_len = len(modulus_b) - term1_b = long_to_bytes(term1, numbers_len) - term2_b = long_to_bytes(term2, numbers_len) - out = create_string_buffer(numbers_len) - - error = _raw_montgomery.monty_multiply( - out, - term1_b, - term2_b, - modulus_b, - c_size_t(numbers_len) - ) - if error: - raise ValueError("monty_multiply failed with error: %d" % error) - - return get_raw_buffer(out) diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerCustom.pyi b/venv/Lib/site-packages/Cryptodome/Math/_IntegerCustom.pyi deleted file mode 100644 index 2dd75c7..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerCustom.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Any - -from ._IntegerNative import IntegerNative - -_raw_montgomery = Any - -class IntegerCustom(IntegerNative): - pass diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerGMP.py b/venv/Lib/site-packages/Cryptodome/Math/_IntegerGMP.py deleted file mode 100644 index be77372..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerGMP.py +++ /dev/null @@ -1,799 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import sys -import struct - -from Cryptodome.Util.py3compat import is_native_int - -from Cryptodome.Util._raw_api import (backend, load_lib, - c_ulong, c_size_t, c_uint8_ptr) - -from ._IntegerBase import IntegerBase - -gmp_defs = """typedef unsigned long UNIX_ULONG; - typedef struct { int a; int b; void *c; } MPZ; - typedef MPZ mpz_t[1]; - typedef UNIX_ULONG mp_bitcnt_t; - - void __gmpz_init (mpz_t x); - void __gmpz_init_set (mpz_t rop, const mpz_t op); - void __gmpz_init_set_ui (mpz_t rop, UNIX_ULONG op); - - UNIX_ULONG __gmpz_get_ui (const mpz_t op); - void __gmpz_set (mpz_t rop, const mpz_t op); - void __gmpz_set_ui (mpz_t rop, UNIX_ULONG op); - void __gmpz_add (mpz_t rop, const mpz_t op1, const mpz_t op2); - void __gmpz_add_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); - void __gmpz_sub_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); - void __gmpz_addmul (mpz_t rop, const mpz_t op1, const mpz_t op2); - void __gmpz_addmul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); - void __gmpz_submul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); - void __gmpz_import (mpz_t rop, size_t count, int order, size_t size, - int endian, size_t nails, const void *op); - void * __gmpz_export (void *rop, size_t *countp, int order, - size_t size, - int endian, size_t nails, const mpz_t op); - size_t __gmpz_sizeinbase (const mpz_t op, int base); - void __gmpz_sub (mpz_t rop, const mpz_t op1, const mpz_t op2); - void __gmpz_mul (mpz_t rop, const mpz_t op1, const mpz_t op2); - void __gmpz_mul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); - int __gmpz_cmp (const mpz_t op1, const mpz_t op2); - void __gmpz_powm (mpz_t rop, const mpz_t base, const mpz_t exp, const - mpz_t mod); - void __gmpz_powm_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp, - const mpz_t mod); - void __gmpz_pow_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp); - void __gmpz_sqrt(mpz_t rop, const mpz_t op); - void __gmpz_mod (mpz_t r, const mpz_t n, const mpz_t d); - void __gmpz_neg (mpz_t rop, const mpz_t op); - void __gmpz_abs (mpz_t rop, const mpz_t op); - void __gmpz_and (mpz_t rop, const mpz_t op1, const mpz_t op2); - void __gmpz_ior (mpz_t rop, const mpz_t op1, const mpz_t op2); - void __gmpz_clear (mpz_t x); - void __gmpz_tdiv_q_2exp (mpz_t q, const mpz_t n, mp_bitcnt_t b); - void __gmpz_fdiv_q (mpz_t q, const mpz_t n, const mpz_t d); - void __gmpz_mul_2exp (mpz_t rop, const mpz_t op1, mp_bitcnt_t op2); - int __gmpz_tstbit (const mpz_t op, mp_bitcnt_t bit_index); - int __gmpz_perfect_square_p (const mpz_t op); - int __gmpz_jacobi (const mpz_t a, const mpz_t b); - void __gmpz_gcd (mpz_t rop, const mpz_t op1, const mpz_t op2); - UNIX_ULONG __gmpz_gcd_ui (mpz_t rop, const mpz_t op1, - UNIX_ULONG op2); - void __gmpz_lcm (mpz_t rop, const mpz_t op1, const mpz_t op2); - int __gmpz_invert (mpz_t rop, const mpz_t op1, const mpz_t op2); - int __gmpz_divisible_p (const mpz_t n, const mpz_t d); - int __gmpz_divisible_ui_p (const mpz_t n, UNIX_ULONG d); - - size_t __gmpz_size (const mpz_t op); - UNIX_ULONG __gmpz_getlimbn (const mpz_t op, size_t n); - """ - -if sys.platform == "win32": - raise ImportError("Not using GMP on Windows") - -lib = load_lib("gmp", gmp_defs) -implementation = {"library": "gmp", "api": backend} - -if hasattr(lib, "__mpir_version"): - raise ImportError("MPIR library detected") - - -# Lazy creation of GMP methods -class _GMP(object): - - def __getattr__(self, name): - if name.startswith("mpz_"): - func_name = "__gmpz_" + name[4:] - elif name.startswith("gmp_"): - func_name = "__gmp_" + name[4:] - else: - raise AttributeError("Attribute %s is invalid" % name) - func = getattr(lib, func_name) - setattr(self, name, func) - return func - - -_gmp = _GMP() - - -# In order to create a function that returns a pointer to -# a new MPZ structure, we need to break the abstraction -# and know exactly what ffi backend we have -if implementation["api"] == "ctypes": - from ctypes import Structure, c_int, c_void_p, byref - - class _MPZ(Structure): - _fields_ = [('_mp_alloc', c_int), - ('_mp_size', c_int), - ('_mp_d', c_void_p)] - - def new_mpz(): - return byref(_MPZ()) - - _gmp.mpz_getlimbn.restype = c_ulong - -else: - # We are using CFFI - from Cryptodome.Util._raw_api import ffi - - def new_mpz(): - return ffi.new("MPZ*") - - -# Size of a native word -_sys_bits = 8 * struct.calcsize("P") - - -class IntegerGMP(IntegerBase): - """A fast, arbitrary precision integer""" - - _zero_mpz_p = new_mpz() - _gmp.mpz_init_set_ui(_zero_mpz_p, c_ulong(0)) - - def __init__(self, value): - """Initialize the integer to the given value.""" - - self._mpz_p = new_mpz() - self._initialized = False - - if isinstance(value, float): - raise ValueError("A floating point type is not a natural number") - - if is_native_int(value): - _gmp.mpz_init(self._mpz_p) - self._initialized = True - if value == 0: - return - - tmp = new_mpz() - _gmp.mpz_init(tmp) - - try: - positive = value >= 0 - reduce = abs(value) - slots = (reduce.bit_length() - 1) // 32 + 1 - - while slots > 0: - slots = slots - 1 - _gmp.mpz_set_ui(tmp, - c_ulong(0xFFFFFFFF & (reduce >> (slots * 32)))) - _gmp.mpz_mul_2exp(tmp, tmp, c_ulong(slots * 32)) - _gmp.mpz_add(self._mpz_p, self._mpz_p, tmp) - finally: - _gmp.mpz_clear(tmp) - - if not positive: - _gmp.mpz_neg(self._mpz_p, self._mpz_p) - - elif isinstance(value, IntegerGMP): - _gmp.mpz_init_set(self._mpz_p, value._mpz_p) - self._initialized = True - else: - raise NotImplementedError - - # Conversions - def __int__(self): - tmp = new_mpz() - _gmp.mpz_init_set(tmp, self._mpz_p) - - try: - value = 0 - slot = 0 - while _gmp.mpz_cmp(tmp, self._zero_mpz_p) != 0: - lsb = _gmp.mpz_get_ui(tmp) & 0xFFFFFFFF - value |= lsb << (slot * 32) - _gmp.mpz_tdiv_q_2exp(tmp, tmp, c_ulong(32)) - slot = slot + 1 - finally: - _gmp.mpz_clear(tmp) - - if self < 0: - value = -value - return int(value) - - def __str__(self): - return str(int(self)) - - def __repr__(self): - return "Integer(%s)" % str(self) - - # Only Python 2.x - def __hex__(self): - return hex(int(self)) - - # Only Python 3.x - def __index__(self): - return int(self) - - def to_bytes(self, block_size=0, byteorder='big'): - """Convert the number into a byte string. - - This method encodes the number in network order and prepends - as many zero bytes as required. It only works for non-negative - values. - - :Parameters: - block_size : integer - The exact size the output byte string must have. - If zero, the string has the minimal length. - byteorder : string - 'big' for big-endian integers (default), 'little' for litte-endian. - :Returns: - A byte string. - :Raise ValueError: - If the value is negative or if ``block_size`` is - provided and the length of the byte string would exceed it. - """ - - if self < 0: - raise ValueError("Conversion only valid for non-negative numbers") - - num_limbs = _gmp.mpz_size(self._mpz_p) - if _sys_bits == 32: - spchar = "L" - num_limbs = max(1, num_limbs, (block_size + 3) // 4) - elif _sys_bits == 64: - spchar = "Q" - num_limbs = max(1, num_limbs, (block_size + 7) // 8) - else: - raise ValueError("Unknown limb size") - - # mpz_getlimbn returns 0 if i is larger than the number of actual limbs - limbs = [_gmp.mpz_getlimbn(self._mpz_p, num_limbs - i - 1) for i in range(num_limbs)] - - result = struct.pack(">" + spchar * num_limbs, *limbs) - cutoff_len = len(result) - block_size - if block_size == 0: - result = result.lstrip(b'\x00') - elif cutoff_len > 0: - if result[:cutoff_len] != b'\x00' * (cutoff_len): - raise ValueError("Number is too big to convert to " - "byte string of prescribed length") - result = result[cutoff_len:] - elif cutoff_len < 0: - result = b'\x00' * (-cutoff_len) + result - - if byteorder == 'little': - result = result[::-1] - elif byteorder == 'big': - pass - else: - raise ValueError("Incorrect byteorder") - - if len(result) == 0: - result = b'\x00' - - return result - - @staticmethod - def from_bytes(byte_string, byteorder='big'): - """Convert a byte string into a number. - - :Parameters: - byte_string : byte string - The input number, encoded in network order. - It can only be non-negative. - byteorder : string - 'big' for big-endian integers (default), 'little' for litte-endian. - - :Return: - The ``Integer`` object carrying the same value as the input. - """ - result = IntegerGMP(0) - if byteorder == 'big': - pass - elif byteorder == 'little': - byte_string = bytearray(byte_string) - byte_string.reverse() - else: - raise ValueError("Incorrect byteorder") - _gmp.mpz_import( - result._mpz_p, - c_size_t(len(byte_string)), # Amount of words to read - 1, # Big endian - c_size_t(1), # Each word is 1 byte long - 0, # Endianess within a word - not relevant - c_size_t(0), # No nails - c_uint8_ptr(byte_string)) - return result - - # Relations - def _apply_and_return(self, func, term): - if not isinstance(term, IntegerGMP): - term = IntegerGMP(term) - return func(self._mpz_p, term._mpz_p) - - def __eq__(self, term): - if not (isinstance(term, IntegerGMP) or is_native_int(term)): - return False - return self._apply_and_return(_gmp.mpz_cmp, term) == 0 - - def __ne__(self, term): - if not (isinstance(term, IntegerGMP) or is_native_int(term)): - return True - return self._apply_and_return(_gmp.mpz_cmp, term) != 0 - - def __lt__(self, term): - return self._apply_and_return(_gmp.mpz_cmp, term) < 0 - - def __le__(self, term): - return self._apply_and_return(_gmp.mpz_cmp, term) <= 0 - - def __gt__(self, term): - return self._apply_and_return(_gmp.mpz_cmp, term) > 0 - - def __ge__(self, term): - return self._apply_and_return(_gmp.mpz_cmp, term) >= 0 - - def __nonzero__(self): - return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) != 0 - __bool__ = __nonzero__ - - def is_negative(self): - return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) < 0 - - # Arithmetic operations - def __add__(self, term): - result = IntegerGMP(0) - if not isinstance(term, IntegerGMP): - try: - term = IntegerGMP(term) - except NotImplementedError: - return NotImplemented - _gmp.mpz_add(result._mpz_p, - self._mpz_p, - term._mpz_p) - return result - - def __sub__(self, term): - result = IntegerGMP(0) - if not isinstance(term, IntegerGMP): - try: - term = IntegerGMP(term) - except NotImplementedError: - return NotImplemented - _gmp.mpz_sub(result._mpz_p, - self._mpz_p, - term._mpz_p) - return result - - def __mul__(self, term): - result = IntegerGMP(0) - if not isinstance(term, IntegerGMP): - try: - term = IntegerGMP(term) - except NotImplementedError: - return NotImplemented - _gmp.mpz_mul(result._mpz_p, - self._mpz_p, - term._mpz_p) - return result - - def __floordiv__(self, divisor): - if not isinstance(divisor, IntegerGMP): - divisor = IntegerGMP(divisor) - if _gmp.mpz_cmp(divisor._mpz_p, - self._zero_mpz_p) == 0: - raise ZeroDivisionError("Division by zero") - result = IntegerGMP(0) - _gmp.mpz_fdiv_q(result._mpz_p, - self._mpz_p, - divisor._mpz_p) - return result - - def __mod__(self, divisor): - if not isinstance(divisor, IntegerGMP): - divisor = IntegerGMP(divisor) - comp = _gmp.mpz_cmp(divisor._mpz_p, - self._zero_mpz_p) - if comp == 0: - raise ZeroDivisionError("Division by zero") - if comp < 0: - raise ValueError("Modulus must be positive") - result = IntegerGMP(0) - _gmp.mpz_mod(result._mpz_p, - self._mpz_p, - divisor._mpz_p) - return result - - def inplace_pow(self, exponent, modulus=None): - - if modulus is None: - if exponent < 0: - raise ValueError("Exponent must not be negative") - - # Normal exponentiation - if exponent > 256: - raise ValueError("Exponent is too big") - _gmp.mpz_pow_ui(self._mpz_p, - self._mpz_p, # Base - c_ulong(int(exponent)) - ) - else: - # Modular exponentiation - if not isinstance(modulus, IntegerGMP): - modulus = IntegerGMP(modulus) - if not modulus: - raise ZeroDivisionError("Division by zero") - if modulus.is_negative(): - raise ValueError("Modulus must be positive") - if is_native_int(exponent): - if exponent < 0: - raise ValueError("Exponent must not be negative") - if exponent < 65536: - _gmp.mpz_powm_ui(self._mpz_p, - self._mpz_p, - c_ulong(exponent), - modulus._mpz_p) - return self - exponent = IntegerGMP(exponent) - elif exponent.is_negative(): - raise ValueError("Exponent must not be negative") - _gmp.mpz_powm(self._mpz_p, - self._mpz_p, - exponent._mpz_p, - modulus._mpz_p) - return self - - def __pow__(self, exponent, modulus=None): - result = IntegerGMP(self) - return result.inplace_pow(exponent, modulus) - - def __abs__(self): - result = IntegerGMP(0) - _gmp.mpz_abs(result._mpz_p, self._mpz_p) - return result - - def sqrt(self, modulus=None): - """Return the largest Integer that does not - exceed the square root""" - - if modulus is None: - if self < 0: - raise ValueError("Square root of negative value") - result = IntegerGMP(0) - _gmp.mpz_sqrt(result._mpz_p, - self._mpz_p) - else: - if modulus <= 0: - raise ValueError("Modulus must be positive") - modulus = int(modulus) - result = IntegerGMP(self._tonelli_shanks(int(self) % modulus, modulus)) - - return result - - def __iadd__(self, term): - if is_native_int(term): - if 0 <= term < 65536: - _gmp.mpz_add_ui(self._mpz_p, - self._mpz_p, - c_ulong(term)) - return self - if -65535 < term < 0: - _gmp.mpz_sub_ui(self._mpz_p, - self._mpz_p, - c_ulong(-term)) - return self - term = IntegerGMP(term) - _gmp.mpz_add(self._mpz_p, - self._mpz_p, - term._mpz_p) - return self - - def __isub__(self, term): - if is_native_int(term): - if 0 <= term < 65536: - _gmp.mpz_sub_ui(self._mpz_p, - self._mpz_p, - c_ulong(term)) - return self - if -65535 < term < 0: - _gmp.mpz_add_ui(self._mpz_p, - self._mpz_p, - c_ulong(-term)) - return self - term = IntegerGMP(term) - _gmp.mpz_sub(self._mpz_p, - self._mpz_p, - term._mpz_p) - return self - - def __imul__(self, term): - if is_native_int(term): - if 0 <= term < 65536: - _gmp.mpz_mul_ui(self._mpz_p, - self._mpz_p, - c_ulong(term)) - return self - if -65535 < term < 0: - _gmp.mpz_mul_ui(self._mpz_p, - self._mpz_p, - c_ulong(-term)) - _gmp.mpz_neg(self._mpz_p, self._mpz_p) - return self - term = IntegerGMP(term) - _gmp.mpz_mul(self._mpz_p, - self._mpz_p, - term._mpz_p) - return self - - def __imod__(self, divisor): - if not isinstance(divisor, IntegerGMP): - divisor = IntegerGMP(divisor) - comp = _gmp.mpz_cmp(divisor._mpz_p, - divisor._zero_mpz_p) - if comp == 0: - raise ZeroDivisionError("Division by zero") - if comp < 0: - raise ValueError("Modulus must be positive") - _gmp.mpz_mod(self._mpz_p, - self._mpz_p, - divisor._mpz_p) - return self - - # Boolean/bit operations - def __and__(self, term): - result = IntegerGMP(0) - if not isinstance(term, IntegerGMP): - term = IntegerGMP(term) - _gmp.mpz_and(result._mpz_p, - self._mpz_p, - term._mpz_p) - return result - - def __or__(self, term): - result = IntegerGMP(0) - if not isinstance(term, IntegerGMP): - term = IntegerGMP(term) - _gmp.mpz_ior(result._mpz_p, - self._mpz_p, - term._mpz_p) - return result - - def __rshift__(self, pos): - result = IntegerGMP(0) - if pos < 0: - raise ValueError("negative shift count") - if pos > 65536: - if self < 0: - return -1 - else: - return 0 - _gmp.mpz_tdiv_q_2exp(result._mpz_p, - self._mpz_p, - c_ulong(int(pos))) - return result - - def __irshift__(self, pos): - if pos < 0: - raise ValueError("negative shift count") - if pos > 65536: - if self < 0: - return -1 - else: - return 0 - _gmp.mpz_tdiv_q_2exp(self._mpz_p, - self._mpz_p, - c_ulong(int(pos))) - return self - - def __lshift__(self, pos): - result = IntegerGMP(0) - if not 0 <= pos < 65536: - raise ValueError("Incorrect shift count") - _gmp.mpz_mul_2exp(result._mpz_p, - self._mpz_p, - c_ulong(int(pos))) - return result - - def __ilshift__(self, pos): - if not 0 <= pos < 65536: - raise ValueError("Incorrect shift count") - _gmp.mpz_mul_2exp(self._mpz_p, - self._mpz_p, - c_ulong(int(pos))) - return self - - def get_bit(self, n): - """Return True if the n-th bit is set to 1. - Bit 0 is the least significant.""" - - if self < 0: - raise ValueError("no bit representation for negative values") - if n < 0: - raise ValueError("negative bit count") - if n > 65536: - return 0 - return bool(_gmp.mpz_tstbit(self._mpz_p, - c_ulong(int(n)))) - - # Extra - def is_odd(self): - return _gmp.mpz_tstbit(self._mpz_p, 0) == 1 - - def is_even(self): - return _gmp.mpz_tstbit(self._mpz_p, 0) == 0 - - def size_in_bits(self): - """Return the minimum number of bits that can encode the number.""" - - if self < 0: - raise ValueError("Conversion only valid for non-negative numbers") - return _gmp.mpz_sizeinbase(self._mpz_p, 2) - - def size_in_bytes(self): - """Return the minimum number of bytes that can encode the number.""" - return (self.size_in_bits() - 1) // 8 + 1 - - def is_perfect_square(self): - return _gmp.mpz_perfect_square_p(self._mpz_p) != 0 - - def fail_if_divisible_by(self, small_prime): - """Raise an exception if the small prime is a divisor.""" - - if is_native_int(small_prime): - if 0 < small_prime < 65536: - if _gmp.mpz_divisible_ui_p(self._mpz_p, - c_ulong(small_prime)): - raise ValueError("The value is composite") - return - small_prime = IntegerGMP(small_prime) - if _gmp.mpz_divisible_p(self._mpz_p, - small_prime._mpz_p): - raise ValueError("The value is composite") - - def multiply_accumulate(self, a, b): - """Increment the number by the product of a and b.""" - - if not isinstance(a, IntegerGMP): - a = IntegerGMP(a) - if is_native_int(b): - if 0 < b < 65536: - _gmp.mpz_addmul_ui(self._mpz_p, - a._mpz_p, - c_ulong(b)) - return self - if -65535 < b < 0: - _gmp.mpz_submul_ui(self._mpz_p, - a._mpz_p, - c_ulong(-b)) - return self - b = IntegerGMP(b) - _gmp.mpz_addmul(self._mpz_p, - a._mpz_p, - b._mpz_p) - return self - - def set(self, source): - """Set the Integer to have the given value""" - - if not isinstance(source, IntegerGMP): - source = IntegerGMP(source) - _gmp.mpz_set(self._mpz_p, - source._mpz_p) - return self - - def inplace_inverse(self, modulus): - """Compute the inverse of this number in the ring of - modulo integers. - - Raise an exception if no inverse exists. - """ - - if not isinstance(modulus, IntegerGMP): - modulus = IntegerGMP(modulus) - - comp = _gmp.mpz_cmp(modulus._mpz_p, - self._zero_mpz_p) - if comp == 0: - raise ZeroDivisionError("Modulus cannot be zero") - if comp < 0: - raise ValueError("Modulus must be positive") - - result = _gmp.mpz_invert(self._mpz_p, - self._mpz_p, - modulus._mpz_p) - if not result: - raise ValueError("No inverse value can be computed") - return self - - def inverse(self, modulus): - result = IntegerGMP(self) - result.inplace_inverse(modulus) - return result - - def gcd(self, term): - """Compute the greatest common denominator between this - number and another term.""" - - result = IntegerGMP(0) - if is_native_int(term): - if 0 < term < 65535: - _gmp.mpz_gcd_ui(result._mpz_p, - self._mpz_p, - c_ulong(term)) - return result - term = IntegerGMP(term) - _gmp.mpz_gcd(result._mpz_p, self._mpz_p, term._mpz_p) - return result - - def lcm(self, term): - """Compute the least common multiplier between this - number and another term.""" - - result = IntegerGMP(0) - if not isinstance(term, IntegerGMP): - term = IntegerGMP(term) - _gmp.mpz_lcm(result._mpz_p, self._mpz_p, term._mpz_p) - return result - - @staticmethod - def jacobi_symbol(a, n): - """Compute the Jacobi symbol""" - - if not isinstance(a, IntegerGMP): - a = IntegerGMP(a) - if not isinstance(n, IntegerGMP): - n = IntegerGMP(n) - if n <= 0 or n.is_even(): - raise ValueError("n must be positive odd for the Jacobi symbol") - return _gmp.mpz_jacobi(a._mpz_p, n._mpz_p) - - @staticmethod - def _mult_modulo_bytes(term1, term2, modulus): - if not isinstance(term1, IntegerGMP): - term1 = IntegerGMP(term1) - if not isinstance(term2, IntegerGMP): - term2 = IntegerGMP(term2) - if not isinstance(modulus, IntegerGMP): - modulus = IntegerGMP(modulus) - - if modulus < 0: - raise ValueError("Modulus must be positive") - if modulus == 0: - raise ZeroDivisionError("Modulus cannot be zero") - if (modulus & 1) == 0: - raise ValueError("Odd modulus is required") - - product = (term1 * term2) % modulus - return product.to_bytes(modulus.size_in_bytes()) - - # Clean-up - def __del__(self): - - try: - if self._mpz_p is not None: - if self._initialized: - _gmp.mpz_clear(self._mpz_p) - - self._mpz_p = None - except AttributeError: - pass diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerGMP.pyi b/venv/Lib/site-packages/Cryptodome/Math/_IntegerGMP.pyi deleted file mode 100644 index 2181b47..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerGMP.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from ._IntegerBase import IntegerBase -class IntegerGMP(IntegerBase): - pass diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerNative.py b/venv/Lib/site-packages/Cryptodome/Math/_IntegerNative.py deleted file mode 100644 index 5f768e2..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerNative.py +++ /dev/null @@ -1,382 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from ._IntegerBase import IntegerBase - -from Cryptodome.Util.number import long_to_bytes, bytes_to_long, inverse, GCD - - -class IntegerNative(IntegerBase): - """A class to model a natural integer (including zero)""" - - def __init__(self, value): - if isinstance(value, float): - raise ValueError("A floating point type is not a natural number") - try: - self._value = value._value - except AttributeError: - self._value = value - - # Conversions - def __int__(self): - return self._value - - def __str__(self): - return str(int(self)) - - def __repr__(self): - return "Integer(%s)" % str(self) - - # Only Python 2.x - def __hex__(self): - return hex(self._value) - - # Only Python 3.x - def __index__(self): - return int(self._value) - - def to_bytes(self, block_size=0, byteorder='big'): - if self._value < 0: - raise ValueError("Conversion only valid for non-negative numbers") - result = long_to_bytes(self._value, block_size) - if len(result) > block_size > 0: - raise ValueError("Value too large to encode") - if byteorder == 'big': - pass - elif byteorder == 'little': - result = bytearray(result) - result.reverse() - result = bytes(result) - else: - raise ValueError("Incorrect byteorder") - return result - - @classmethod - def from_bytes(cls, byte_string, byteorder='big'): - if byteorder == 'big': - pass - elif byteorder == 'little': - byte_string = bytearray(byte_string) - byte_string.reverse() - else: - raise ValueError("Incorrect byteorder") - return cls(bytes_to_long(byte_string)) - - # Relations - def __eq__(self, term): - if term is None: - return False - return self._value == int(term) - - def __ne__(self, term): - return not self.__eq__(term) - - def __lt__(self, term): - return self._value < int(term) - - def __le__(self, term): - return self.__lt__(term) or self.__eq__(term) - - def __gt__(self, term): - return not self.__le__(term) - - def __ge__(self, term): - return not self.__lt__(term) - - def __nonzero__(self): - return self._value != 0 - __bool__ = __nonzero__ - - def is_negative(self): - return self._value < 0 - - # Arithmetic operations - def __add__(self, term): - try: - return self.__class__(self._value + int(term)) - except (ValueError, AttributeError, TypeError): - return NotImplemented - - def __sub__(self, term): - try: - return self.__class__(self._value - int(term)) - except (ValueError, AttributeError, TypeError): - return NotImplemented - - def __mul__(self, factor): - try: - return self.__class__(self._value * int(factor)) - except (ValueError, AttributeError, TypeError): - return NotImplemented - - def __floordiv__(self, divisor): - return self.__class__(self._value // int(divisor)) - - def __mod__(self, divisor): - divisor_value = int(divisor) - if divisor_value < 0: - raise ValueError("Modulus must be positive") - return self.__class__(self._value % divisor_value) - - def inplace_pow(self, exponent, modulus=None): - exp_value = int(exponent) - if exp_value < 0: - raise ValueError("Exponent must not be negative") - - if modulus is not None: - mod_value = int(modulus) - if mod_value < 0: - raise ValueError("Modulus must be positive") - if mod_value == 0: - raise ZeroDivisionError("Modulus cannot be zero") - else: - mod_value = None - self._value = pow(self._value, exp_value, mod_value) - return self - - def __pow__(self, exponent, modulus=None): - result = self.__class__(self) - return result.inplace_pow(exponent, modulus) - - def __abs__(self): - return abs(self._value) - - def sqrt(self, modulus=None): - - value = self._value - if modulus is None: - if value < 0: - raise ValueError("Square root of negative value") - # http://stackoverflow.com/questions/15390807/integer-square-root-in-python - - x = value - y = (x + 1) // 2 - while y < x: - x = y - y = (x + value // x) // 2 - result = x - else: - if modulus <= 0: - raise ValueError("Modulus must be positive") - result = self._tonelli_shanks(self % modulus, modulus) - - return self.__class__(result) - - def __iadd__(self, term): - self._value += int(term) - return self - - def __isub__(self, term): - self._value -= int(term) - return self - - def __imul__(self, term): - self._value *= int(term) - return self - - def __imod__(self, term): - modulus = int(term) - if modulus == 0: - raise ZeroDivisionError("Division by zero") - if modulus < 0: - raise ValueError("Modulus must be positive") - self._value %= modulus - return self - - # Boolean/bit operations - def __and__(self, term): - return self.__class__(self._value & int(term)) - - def __or__(self, term): - return self.__class__(self._value | int(term)) - - def __rshift__(self, pos): - try: - return self.__class__(self._value >> int(pos)) - except OverflowError: - if self._value >= 0: - return 0 - else: - return -1 - - def __irshift__(self, pos): - try: - self._value >>= int(pos) - except OverflowError: - if self._value >= 0: - return 0 - else: - return -1 - return self - - def __lshift__(self, pos): - try: - return self.__class__(self._value << int(pos)) - except OverflowError: - raise ValueError("Incorrect shift count") - - def __ilshift__(self, pos): - try: - self._value <<= int(pos) - except OverflowError: - raise ValueError("Incorrect shift count") - return self - - def get_bit(self, n): - if self._value < 0: - raise ValueError("no bit representation for negative values") - try: - try: - result = (self._value >> n._value) & 1 - if n._value < 0: - raise ValueError("negative bit count") - except AttributeError: - result = (self._value >> n) & 1 - if n < 0: - raise ValueError("negative bit count") - except OverflowError: - result = 0 - return result - - # Extra - def is_odd(self): - return (self._value & 1) == 1 - - def is_even(self): - return (self._value & 1) == 0 - - def size_in_bits(self): - - if self._value < 0: - raise ValueError("Conversion only valid for non-negative numbers") - - if self._value == 0: - return 1 - - return self._value.bit_length() - - def size_in_bytes(self): - return (self.size_in_bits() - 1) // 8 + 1 - - def is_perfect_square(self): - if self._value < 0: - return False - if self._value in (0, 1): - return True - - x = self._value // 2 - square_x = x ** 2 - - while square_x > self._value: - x = (square_x + self._value) // (2 * x) - square_x = x ** 2 - - return self._value == x ** 2 - - def fail_if_divisible_by(self, small_prime): - if (self._value % int(small_prime)) == 0: - raise ValueError("Value is composite") - - def multiply_accumulate(self, a, b): - self._value += int(a) * int(b) - return self - - def set(self, source): - self._value = int(source) - - def inplace_inverse(self, modulus): - self._value = inverse(self._value, int(modulus)) - return self - - def inverse(self, modulus): - result = self.__class__(self) - result.inplace_inverse(modulus) - return result - - def gcd(self, term): - return self.__class__(GCD(abs(self._value), abs(int(term)))) - - def lcm(self, term): - term = int(term) - if self._value == 0 or term == 0: - return self.__class__(0) - return self.__class__(abs((self._value * term) // self.gcd(term)._value)) - - @staticmethod - def jacobi_symbol(a, n): - a = int(a) - n = int(n) - - if n <= 0: - raise ValueError("n must be a positive integer") - - if (n & 1) == 0: - raise ValueError("n must be odd for the Jacobi symbol") - - # Step 1 - a = a % n - # Step 2 - if a == 1 or n == 1: - return 1 - # Step 3 - if a == 0: - return 0 - # Step 4 - e = 0 - a1 = a - while (a1 & 1) == 0: - a1 >>= 1 - e += 1 - # Step 5 - if (e & 1) == 0: - s = 1 - elif n % 8 in (1, 7): - s = 1 - else: - s = -1 - # Step 6 - if n % 4 == 3 and a1 % 4 == 3: - s = -s - # Step 7 - n1 = n % a1 - # Step 8 - return s * IntegerNative.jacobi_symbol(n1, a1) - - @staticmethod - def _mult_modulo_bytes(term1, term2, modulus): - if modulus < 0: - raise ValueError("Modulus must be positive") - if modulus == 0: - raise ZeroDivisionError("Modulus cannot be zero") - if (modulus & 1) == 0: - raise ValueError("Odd modulus is required") - - number_len = len(long_to_bytes(modulus)) - return long_to_bytes((term1 * term2) % modulus, number_len) diff --git a/venv/Lib/site-packages/Cryptodome/Math/_IntegerNative.pyi b/venv/Lib/site-packages/Cryptodome/Math/_IntegerNative.pyi deleted file mode 100644 index 3f65a39..0000000 --- a/venv/Lib/site-packages/Cryptodome/Math/_IntegerNative.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from ._IntegerBase import IntegerBase -class IntegerNative(IntegerBase): - pass diff --git a/venv/Lib/site-packages/Cryptodome/Math/__init__.py b/venv/Lib/site-packages/Cryptodome/Math/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/Numbers.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Math/__pycache__/Numbers.cpython-312.pyc deleted file mode 100644 index 83ca990..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/Numbers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/Primality.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Math/__pycache__/Primality.cpython-312.pyc deleted file mode 100644 index 93df83d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/Primality.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerBase.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerBase.cpython-312.pyc deleted file mode 100644 index 8bcdd55..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerBase.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerCustom.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerCustom.cpython-312.pyc deleted file mode 100644 index 4a8251c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerCustom.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerGMP.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerGMP.cpython-312.pyc deleted file mode 100644 index f036a0c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerGMP.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerNative.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerNative.cpython-312.pyc deleted file mode 100644 index 6df66bc..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/_IntegerNative.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Math/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 00045e6..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Math/_modexp.pyd b/venv/Lib/site-packages/Cryptodome/Math/_modexp.pyd deleted file mode 100644 index d783b62..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Math/_modexp.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/DH.py b/venv/Lib/site-packages/Cryptodome/Protocol/DH.py deleted file mode 100644 index 41793c1..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/DH.py +++ /dev/null @@ -1,192 +0,0 @@ -from Cryptodome.Util.number import long_to_bytes -from Cryptodome.PublicKey.ECC import (EccKey, - construct, - _import_curve25519_public_key, - _import_curve448_public_key) - - -def _compute_ecdh(key_priv, key_pub): - pointP = key_pub.pointQ * key_priv.d - if pointP.is_point_at_infinity(): - raise ValueError("Invalid ECDH point") - - if key_priv.curve == "Curve25519": - z = bytearray(pointP.x.to_bytes(32, byteorder='little')) - elif key_priv.curve == "Curve448": - z = bytearray(pointP.x.to_bytes(56, byteorder='little')) - else: - # See Section 5.7.1.2 in NIST SP 800-56Ar3 - z = long_to_bytes(pointP.x, pointP.size_in_bytes()) - return z - - -def import_x25519_public_key(encoded): - """Create a new X25519 public key object, - starting from the key encoded as raw ``bytes``, - in the format described in RFC7748. - - Args: - encoded (bytes): - The x25519 public key to import. - It must be 32 bytes. - - Returns: - :class:`Cryptodome.PublicKey.EccKey` : a new ECC key object. - - Raises: - ValueError: when the given key cannot be parsed. - """ - - x = _import_curve25519_public_key(encoded) - return construct(curve='Curve25519', point_x=x) - - -def import_x25519_private_key(encoded): - """Create a new X25519 private key object, - starting from the key encoded as raw ``bytes``, - in the format described in RFC7748. - - Args: - encoded (bytes): - The X25519 private key to import. - It must be 32 bytes. - - Returns: - :class:`Cryptodome.PublicKey.EccKey` : a new ECC key object. - - Raises: - ValueError: when the given key cannot be parsed. - """ - - return construct(seed=encoded, curve="Curve25519") - - -def import_x448_public_key(encoded): - """Create a new X448 public key object, - starting from the key encoded as raw ``bytes``, - in the format described in RFC7748. - - Args: - encoded (bytes): - The x448 public key to import. - It must be 56 bytes. - - Returns: - :class:`Cryptodome.PublicKey.EccKey` : a new ECC key object. - - Raises: - ValueError: when the given key cannot be parsed. - """ - - x = _import_curve448_public_key(encoded) - return construct(curve='Curve448', point_x=x) - - -def import_x448_private_key(encoded): - """Create a new X448 private key object, - starting from the key encoded as raw ``bytes``, - in the format described in RFC7748. - - Args: - encoded (bytes): - The X448 private key to import. - It must be 56 bytes. - - Returns: - :class:`Cryptodome.PublicKey.EccKey` : a new ECC key object. - - Raises: - ValueError: when the given key cannot be parsed. - """ - - return construct(seed=encoded, curve="Curve448") - - -def key_agreement(**kwargs): - """Perform a Diffie-Hellman key agreement. - - Keywords: - kdf (callable): - A key derivation function that accepts ``bytes`` as input and returns - ``bytes``. - static_priv (EccKey): - The local static private key. Optional. - static_pub (EccKey): - The static public key that belongs to the peer. Optional. - eph_priv (EccKey): - The local ephemeral private key, generated for this session. Optional. - eph_pub (EccKey): - The ephemeral public key, received from the peer for this session. Optional. - - At least two keys must be passed, of which one is a private key and one - a public key. - - Returns (bytes): - The derived secret key material. - """ - - static_priv = kwargs.get('static_priv', None) - static_pub = kwargs.get('static_pub', None) - eph_priv = kwargs.get('eph_priv', None) - eph_pub = kwargs.get('eph_pub', None) - kdf = kwargs.get('kdf', None) - - if kdf is None: - raise ValueError("'kdf' is mandatory") - - count_priv = 0 - count_pub = 0 - curve = None - - def check_curve(curve, key, name, private): - if not isinstance(key, EccKey): - raise TypeError("'%s' must be an ECC key" % name) - if private and not key.has_private(): - raise TypeError("'%s' must be a private ECC key" % name) - if curve is None: - curve = key.curve - elif curve != key.curve: - raise TypeError("'%s' is defined on an incompatible curve" % name) - return curve - - if static_priv is not None: - curve = check_curve(curve, static_priv, 'static_priv', True) - count_priv += 1 - - if static_pub is not None: - curve = check_curve(curve, static_pub, 'static_pub', False) - count_pub += 1 - - if eph_priv is not None: - curve = check_curve(curve, eph_priv, 'eph_priv', True) - count_priv += 1 - - if eph_pub is not None: - curve = check_curve(curve, eph_pub, 'eph_pub', False) - count_pub += 1 - - if (count_priv + count_pub) < 2 or count_priv == 0 or count_pub == 0: - raise ValueError("Too few keys for the ECDH key agreement") - - Zs = b'' - Ze = b'' - - if static_priv and static_pub: - # C(*, 2s) - Zs = _compute_ecdh(static_priv, static_pub) - - if eph_priv and eph_pub: - # C(2e, 0s) or C(2e, 2s) - if bool(static_priv) != bool(static_pub): - raise ValueError("DH mode C(2e, 1s) is not supported") - Ze = _compute_ecdh(eph_priv, eph_pub) - elif eph_priv and static_pub: - # C(1e, 2s) or C(1e, 1s) - Ze = _compute_ecdh(eph_priv, static_pub) - elif eph_pub and static_priv: - # C(1e, 2s) or C(1e, 1s) - Ze = _compute_ecdh(static_priv, eph_pub) - - Z = Ze + Zs - - return kdf(Z) diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/DH.pyi b/venv/Lib/site-packages/Cryptodome/Protocol/DH.pyi deleted file mode 100644 index 017d863..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/DH.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import TypedDict, Callable, TypeVar, Generic -from typing_extensions import Unpack, NotRequired - -from Cryptodome.PublicKey.ECC import EccKey - -T = TypeVar('T') - -class RequestParams(TypedDict, Generic[T]): - kdf: Callable[[bytes|bytearray|memoryview], T] - static_priv: NotRequired[EccKey] - static_pub: NotRequired[EccKey] - eph_priv: NotRequired[EccKey] - eph_pub: NotRequired[EccKey] - -def import_x25519_public_key(encoded: bytes) -> EccKey: ... -def import_x25519_private_key(encoded: bytes) -> EccKey: ... -def import_x448_public_key(encoded: bytes) -> EccKey: ... -def import_x448_private_key(encoded: bytes) -> EccKey: ... -def key_agreement(**kwargs: Unpack[RequestParams[T]]) -> T: ... diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/HPKE.py b/venv/Lib/site-packages/Cryptodome/Protocol/HPKE.py deleted file mode 100644 index 13823ec..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/HPKE.py +++ /dev/null @@ -1,483 +0,0 @@ -import struct -from enum import IntEnum - -from types import ModuleType -from typing import Optional - -from .KDF import _HKDF_extract, _HKDF_expand -from .DH import key_agreement, import_x25519_public_key, import_x448_public_key -from Cryptodome.Util.strxor import strxor -from Cryptodome.PublicKey import ECC -from Cryptodome.PublicKey.ECC import EccKey -from Cryptodome.Hash import SHA256, SHA384, SHA512 -from Cryptodome.Cipher import AES, ChaCha20_Poly1305 - - -class MODE(IntEnum): - """HPKE modes""" - BASE = 0x00 - PSK = 0x01 - AUTH = 0x02 - AUTH_PSK = 0x03 - - -class AEAD(IntEnum): - """Authenticated Encryption with Associated Data (AEAD) Functions""" - AES128_GCM = 0x0001 - AES256_GCM = 0x0002 - CHACHA20_POLY1305 = 0x0003 - - -class DeserializeError(ValueError): - pass - -class MessageLimitReachedError(ValueError): - pass - -# CURVE to (KEM ID, KDF ID, HASH) -_Curve_Config = { - "NIST P-256": (0x0010, 0x0001, SHA256), - "NIST P-384": (0x0011, 0x0002, SHA384), - "NIST P-521": (0x0012, 0x0003, SHA512), - "Curve25519": (0x0020, 0x0001, SHA256), - "Curve448": (0x0021, 0x0003, SHA512), -} - - -def _labeled_extract(salt: bytes, - label: bytes, - ikm: bytes, - suite_id: bytes, - hashmod: ModuleType): - labeled_ikm = b"HPKE-v1" + suite_id + label + ikm - return _HKDF_extract(salt, labeled_ikm, hashmod) - - -def _labeled_expand(prk: bytes, - label: bytes, - info: bytes, - L: int, - suite_id: bytes, - hashmod: ModuleType): - labeled_info = struct.pack('>H', L) + b"HPKE-v1" + suite_id + \ - label + info - return _HKDF_expand(prk, labeled_info, L, hashmod) - - -def _extract_and_expand(dh: bytes, - kem_context: bytes, - suite_id: bytes, - hashmod: ModuleType): - Nsecret = hashmod.digest_size - - eae_prk = _labeled_extract(b"", - b"eae_prk", - dh, - suite_id, - hashmod) - - shared_secret = _labeled_expand(eae_prk, - b"shared_secret", - kem_context, - Nsecret, - suite_id, - hashmod) - return shared_secret - - -class HPKE_Cipher: - - def __init__(self, - receiver_key: EccKey, - enc: Optional[bytes], - sender_key: Optional[EccKey], - psk_pair: tuple[bytes, bytes], - info: bytes, - aead_id: AEAD, - mode: MODE): - - self.enc: bytes = b'' if enc is None else enc - """The encapsulated session key.""" - - self._verify_psk_inputs(mode, psk_pair) - - self._curve = receiver_key.curve - self._aead_id = aead_id - self._mode = mode - - try: - self._kem_id, \ - self._kdf_id, \ - self._hashmod = _Curve_Config[self._curve] - except KeyError as ke: - raise ValueError("Curve {} is not supported by HPKE".format(self._curve)) from ke - - self._Nk = 16 if self._aead_id == AEAD.AES128_GCM else 32 - self._Nn = 12 - self._Nt = 16 - self._Nh = self._hashmod.digest_size - - self._encrypt = not receiver_key.has_private() - - if self._encrypt: - # SetupBaseS (encryption) - if enc is not None: - raise ValueError("Parameter 'enc' cannot be an input when sealing") - shared_secret, self.enc = self._encap(receiver_key, - self._kem_id, - self._hashmod, - sender_key) - else: - # SetupBaseR (decryption) - if enc is None: - raise ValueError("Parameter 'enc' required when unsealing") - shared_secret = self._decap(enc, - receiver_key, - self._kem_id, - self._hashmod, - sender_key) - - self._sequence = 0 - self._max_sequence = (1 << (8 * self._Nn)) - 1 - - self._key, \ - self._base_nonce, \ - self._export_secret = self._key_schedule(shared_secret, - info, - *psk_pair) - - @staticmethod - def _encap(receiver_key: EccKey, - kem_id: int, - hashmod: ModuleType, - sender_key: Optional[EccKey] = None, - eph_key: Optional[EccKey] = None): - - assert (sender_key is None) or sender_key.has_private() - assert (eph_key is None) or eph_key.has_private() - - if eph_key is None: - eph_key = ECC.generate(curve=receiver_key.curve) - enc = eph_key.public_key().export_key(format='raw') - - pkRm = receiver_key.public_key().export_key(format='raw') - kem_context = enc + pkRm - extra_param = {} - if sender_key: - kem_context += sender_key.public_key().export_key(format='raw') - extra_param = {'static_priv': sender_key} - - suite_id = b"KEM" + struct.pack('>H', kem_id) - - def kdf(dh, - kem_context=kem_context, - suite_id=suite_id, - hashmod=hashmod): - return _extract_and_expand(dh, kem_context, suite_id, hashmod) - - shared_secret = key_agreement(eph_priv=eph_key, - static_pub=receiver_key, - kdf=kdf, - **extra_param) - return shared_secret, enc - - @staticmethod - def _decap(enc: bytes, - receiver_key: EccKey, - kem_id: int, - hashmod: ModuleType, - sender_key: Optional[EccKey] = None): - - assert receiver_key.has_private() - - try: - if receiver_key.curve == 'Curve25519': - pkE = import_x25519_public_key(enc) - elif receiver_key.curve == 'Curve448': - pkE = import_x448_public_key(enc) - else: - pkE = ECC.import_key(enc, curve_name=receiver_key.curve) - except ValueError as ve: - raise DeserializeError("'enc' is not a valid encapsulated HPKE key") from ve - - pkRm = receiver_key.public_key().export_key(format='raw') - kem_context = enc + pkRm - extra_param = {} - if sender_key: - kem_context += sender_key.public_key().export_key(format='raw') - extra_param = {'static_pub': sender_key} - - suite_id = b"KEM" + struct.pack('>H', kem_id) - - def kdf(dh, - kem_context=kem_context, - suite_id=suite_id, - hashmod=hashmod): - return _extract_and_expand(dh, kem_context, suite_id, hashmod) - - shared_secret = key_agreement(eph_pub=pkE, - static_priv=receiver_key, - kdf=kdf, - **extra_param) - return shared_secret - - @staticmethod - def _verify_psk_inputs(mode: MODE, psk_pair: tuple[bytes, bytes]): - psk_id, psk = psk_pair - - if (psk == b'') ^ (psk_id == b''): - raise ValueError("Inconsistent PSK inputs") - - if (psk == b''): - if mode in (MODE.PSK, MODE.AUTH_PSK): - raise ValueError(f"PSK is required with mode {mode.name}") - else: - if len(psk) < 32: - raise ValueError("PSK must be at least 32 byte long") - if mode in (MODE.BASE, MODE.AUTH): - raise ValueError("PSK is not compatible with this mode") - - def _key_schedule(self, - shared_secret: bytes, - info: bytes, - psk_id: bytes, - psk: bytes): - - suite_id = b"HPKE" + struct.pack('>HHH', - self._kem_id, - self._kdf_id, - self._aead_id) - - psk_id_hash = _labeled_extract(b'', - b'psk_id_hash', - psk_id, - suite_id, - self._hashmod) - - info_hash = _labeled_extract(b'', - b'info_hash', - info, - suite_id, - self._hashmod) - - key_schedule_context = self._mode.to_bytes(1, 'big') + psk_id_hash + info_hash - - secret = _labeled_extract(shared_secret, - b'secret', - psk, - suite_id, - self._hashmod) - - key = _labeled_expand(secret, - b'key', - key_schedule_context, - self._Nk, - suite_id, - self._hashmod) - - base_nonce = _labeled_expand(secret, - b'base_nonce', - key_schedule_context, - self._Nn, - suite_id, - self._hashmod) - - exporter_secret = _labeled_expand(secret, - b'exp', - key_schedule_context, - self._Nh, - suite_id, - self._hashmod) - - return key, base_nonce, exporter_secret - - def _new_cipher(self): - nonce = strxor(self._base_nonce, self._sequence.to_bytes(self._Nn, 'big')) - if self._aead_id in (AEAD.AES128_GCM, AEAD.AES256_GCM): - cipher = AES.new(self._key, AES.MODE_GCM, nonce=nonce, mac_len=self._Nt) - elif self._aead_id == AEAD.CHACHA20_POLY1305: - cipher = ChaCha20_Poly1305.new(key=self._key, nonce=nonce) - else: - raise ValueError(f"Unknown AEAD cipher ID {self._aead_id:#x}") - if self._sequence >= self._max_sequence: - raise MessageLimitReachedError() - self._sequence += 1 - return cipher - - def seal(self, plaintext: bytes, auth_data: Optional[bytes] = None): - """Encrypt and authenticate a message. - - This method can be invoked multiple times - to seal an ordered sequence of messages. - - Arguments: - plaintext: bytes - The message to seal. - auth_data: bytes - Optional. Additional Authenticated data (AAD) that is not encrypted - but that will be also covered by the authentication tag. - - Returns: - The ciphertext concatenated with the authentication tag. - """ - - if not self._encrypt: - raise ValueError("This cipher can only be used to seal") - cipher = self._new_cipher() - if auth_data: - cipher.update(auth_data) - ct, tag = cipher.encrypt_and_digest(plaintext) - return ct + tag - - def unseal(self, ciphertext: bytes, auth_data: Optional[bytes] = None): - """Decrypt a message and validate its authenticity. - - This method can be invoked multiple times - to unseal an ordered sequence of messages. - - Arguments: - cipertext: bytes - The message to unseal. - auth_data: bytes - Optional. Additional Authenticated data (AAD) that - was also covered by the authentication tag. - - Returns: - The original plaintext. - - Raises: ValueError - If the ciphertext (in combination with the AAD) is not valid. - - But if it is the first time you call ``unseal()`` this - exception may also mean that any of the parameters or keys - used to establish the session is wrong or that one is missing. - """ - - if self._encrypt: - raise ValueError("This cipher can only be used to unseal") - if len(ciphertext) < self._Nt: - raise ValueError("Ciphertext is too small") - cipher = self._new_cipher() - if auth_data: - cipher.update(auth_data) - - try: - pt = cipher.decrypt_and_verify(ciphertext[:-self._Nt], - ciphertext[-self._Nt:]) - except ValueError: - if self._sequence == 1: - raise ValueError("Incorrect HPKE keys/parameters or invalid message (wrong MAC tag)") - raise ValueError("Invalid message (wrong MAC tag)") - return pt - - -def new(*, receiver_key: EccKey, - aead_id: AEAD, - enc: Optional[bytes] = None, - sender_key: Optional[EccKey] = None, - psk: Optional[tuple[bytes, bytes]] = None, - info: Optional[bytes] = None) -> HPKE_Cipher: - """Create an HPKE context which can be used: - - - by the sender to seal (encrypt) a message or - - by the receiver to unseal (decrypt) it. - - As a minimum, the two parties agree on the receiver's asymmetric key - (of which the sender will only know the public half). - - Additionally, for authentication purposes, they may also agree on: - - * the sender's asymmetric key (of which the receiver will only know the public half) - - * a shared secret (e.g., a symmetric key derived from a password) - - Args: - receiver_key: - The ECC key of the receiver. - It must be on one of the following curves: ``NIST P-256``, - ``NIST P-384``, ``NIST P-521``, ``X25519`` or ``X448``. - - If this is a **public** key, the HPKE context can only be used to - **seal** (**encrypt**). - - If this is a **private** key, the HPKE context can only be used to - **unseal** (**decrypt**). - - aead_id: - The HPKE identifier of the symmetric cipher. - The possible values are: - - * ``HPKE.AEAD.AES128_GCM`` - * ``HPKE.AEAD.AES256_GCM`` - * ``HPKE.AEAD.CHACHA20_POLY1305`` - - enc: - The encapsulated session key (i.e., the KEM shared secret). - - The receiver must always specify this parameter. - - The sender must always omit this parameter. - - sender_key: - The ECC key of the sender. - It must be on the same curve as the ``receiver_key``. - If the ``receiver_key`` is a public key, ``sender_key`` must be a - private key, and vice versa. - - psk: - A Pre-Shared Key (PSK) as a 2-tuple of non-empty - byte strings: the identifier and the actual secret value. - Sender and receiver must use the same PSK (or none). - - The secret value must be at least 32 bytes long, - but it must not be a low-entropy password - (use a KDF like PBKDF2 or scrypt to derive a secret - from a password). - - info: - A non-secret parameter that contributes - to the generation of all session keys. - Sender and receive must use the same **info** parameter (or none). - - Returns: - An object that can be used for - sealing (if ``receiver_key`` is a public key) or - unsealing (if ``receiver_key`` is a private key). - In the latter case, - correctness of all the keys and parameters will only - be assessed with the first call to ``unseal()``. - """ - - if aead_id not in AEAD: - raise ValueError(f"Unknown AEAD cipher ID {aead_id:#x}") - - curve = receiver_key.curve - if curve not in ('NIST P-256', 'NIST P-384', 'NIST P-521', - 'Curve25519', 'Curve448'): - raise ValueError(f"Unsupported curve {curve}") - - if sender_key: - count_private_keys = int(receiver_key.has_private()) + \ - int(sender_key.has_private()) - if count_private_keys != 1: - raise ValueError("Exactly 1 private key required") - if sender_key.curve != curve: - raise ValueError("Sender key uses {} but recipient key {}". - format(sender_key.curve, curve)) - mode = MODE.AUTH if psk is None else MODE.AUTH_PSK - else: - mode = MODE.BASE if psk is None else MODE.PSK - - if psk is None: - psk = b'', b'' - - if info is None: - info = b'' - - return HPKE_Cipher(receiver_key, - enc, - sender_key, - psk, - info, - aead_id, - mode) diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/KDF.py b/venv/Lib/site-packages/Cryptodome/Protocol/KDF.py deleted file mode 100644 index 5f8170a..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/KDF.py +++ /dev/null @@ -1,647 +0,0 @@ -# coding=utf-8 -# -# KDF.py : a collection of Key Derivation Functions -# -# Part of the Python Cryptography Toolkit -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import re -import struct -from functools import reduce - -from Cryptodome.Util.py3compat import (tobytes, bord, _copy_bytes, iter_range, - tostr, bchr, bstr) - -from Cryptodome.Hash import SHA1, SHA256, HMAC, CMAC, BLAKE2s -from Cryptodome.Util.strxor import strxor -from Cryptodome.Random import get_random_bytes -from Cryptodome.Util.number import size as bit_size, long_to_bytes, bytes_to_long - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - create_string_buffer, - get_raw_buffer, c_size_t) - -_raw_salsa20_lib = load_pycryptodome_raw_lib( - "Cryptodome.Cipher._Salsa20", - """ - int Salsa20_8_core(const uint8_t *x, const uint8_t *y, - uint8_t *out); - """) - -_raw_scrypt_lib = load_pycryptodome_raw_lib( - "Cryptodome.Protocol._scrypt", - """ - typedef int (core_t)(const uint8_t [64], const uint8_t [64], uint8_t [64]); - int scryptROMix(const uint8_t *data_in, uint8_t *data_out, - size_t data_len, unsigned N, core_t *core); - """) - - -def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): - """Derive one key from a password (or passphrase). - - This function performs key derivation according to an old version of - the PKCS#5 standard (v1.5) or `RFC2898 - `_. - - Args: - password (string): - The secret password to generate the key from. - salt (byte string): - An 8 byte string to use for better protection from dictionary attacks. - This value does not need to be kept secret, but it should be randomly - chosen for each derivation. - dkLen (integer): - The length of the desired key. The default is 16 bytes, suitable for - instance for :mod:`Cryptodome.Cipher.AES`. - count (integer): - The number of iterations to carry out. The recommendation is 1000 or - more. - hashAlgo (module): - The hash algorithm to use, as a module or an object from the :mod:`Cryptodome.Hash` package. - The digest length must be no shorter than ``dkLen``. - The default algorithm is :mod:`Cryptodome.Hash.SHA1`. - - Return: - A byte string of length ``dkLen`` that can be used as key. - """ - - if not hashAlgo: - hashAlgo = SHA1 - password = tobytes(password) - pHash = hashAlgo.new(password+salt) - digest = pHash.digest_size - if dkLen > digest: - raise TypeError("Selected hash algorithm has a too short digest (%d bytes)." % digest) - if len(salt) != 8: - raise ValueError("Salt is not 8 bytes long (%d bytes instead)." % len(salt)) - for i in iter_range(count-1): - pHash = pHash.new(pHash.digest()) - return pHash.digest()[:dkLen] - - -def PBKDF2(password, salt, dkLen=16, count=1000, prf=None, hmac_hash_module=None): - """Derive one or more keys from a password (or passphrase). - - This function performs key derivation according to the PKCS#5 standard (v2.0). - - Args: - password (string or byte string): - The secret password to generate the key from. - - Strings will be encoded as ISO 8859-1 (also known as Latin-1), - which does not allow any characters with codepoints > 255. - salt (string or byte string): - A (byte) string to use for better protection from dictionary attacks. - This value does not need to be kept secret, but it should be randomly - chosen for each derivation. It is recommended to use at least 16 bytes. - - Strings will be encoded as ISO 8859-1 (also known as Latin-1), - which does not allow any characters with codepoints > 255. - dkLen (integer): - The cumulative length of the keys to produce. - - Due to a flaw in the PBKDF2 design, you should not request more bytes - than the ``prf`` can output. For instance, ``dkLen`` should not exceed - 20 bytes in combination with ``HMAC-SHA1``. - count (integer): - The number of iterations to carry out. The higher the value, the slower - and the more secure the function becomes. - - You should find the maximum number of iterations that keeps the - key derivation still acceptable on the slowest hardware you must support. - - Although the default value is 1000, **it is recommended to use at least - 1000000 (1 million) iterations**. - prf (callable): - A pseudorandom function. It must be a function that returns a - pseudorandom byte string from two parameters: a secret and a salt. - The slower the algorithm, the more secure the derivation function. - If not specified, **HMAC-SHA1** is used. - hmac_hash_module (module): - A module from ``Cryptodome.Hash`` implementing a Merkle-Damgard cryptographic - hash, which PBKDF2 must use in combination with HMAC. - This parameter is mutually exclusive with ``prf``. - - Return: - A byte string of length ``dkLen`` that can be used as key material. - If you want multiple keys, just break up this string into segments of the desired length. - """ - - password = tobytes(password) - salt = tobytes(salt) - - if prf and hmac_hash_module: - raise ValueError("'prf' and 'hmac_hash_module' are mutually exlusive") - - if prf is None and hmac_hash_module is None: - hmac_hash_module = SHA1 - - if prf or not hasattr(hmac_hash_module, "_pbkdf2_hmac_assist"): - # Generic (and slow) implementation - - if prf is None: - prf = lambda p, s: HMAC.new(p, s, hmac_hash_module).digest() - - def link(s): - s[0], s[1] = s[1], prf(password, s[1]) - return s[0] - - key = b'' - i = 1 - while len(key) < dkLen: - s = [prf(password, salt + struct.pack(">I", i))] * 2 - key += reduce(strxor, (link(s) for j in range(count))) - i += 1 - - else: - # Optimized implementation - key = b'' - i = 1 - while len(key) < dkLen: - base = HMAC.new(password, b"", hmac_hash_module) - first_digest = base.copy().update(salt + struct.pack(">I", i)).digest() - key += base._pbkdf2_hmac_assist(first_digest, count) - i += 1 - - return key[:dkLen] - - -class _S2V(object): - """String-to-vector PRF as defined in `RFC5297`_. - - This class implements a pseudorandom function family - based on CMAC that takes as input a vector of strings. - - .. _RFC5297: http://tools.ietf.org/html/rfc5297 - """ - - def __init__(self, key, ciphermod, cipher_params=None): - """Initialize the S2V PRF. - - :Parameters: - key : byte string - A secret that can be used as key for CMACs - based on ciphers from ``ciphermod``. - ciphermod : module - A block cipher module from `Cryptodome.Cipher`. - cipher_params : dictionary - A set of extra parameters to use to create a cipher instance. - """ - - self._key = _copy_bytes(None, None, key) - self._ciphermod = ciphermod - self._last_string = self._cache = b'\x00' * ciphermod.block_size - - # Max number of update() call we can process - self._n_updates = ciphermod.block_size * 8 - 1 - - if cipher_params is None: - self._cipher_params = {} - else: - self._cipher_params = dict(cipher_params) - - @staticmethod - def new(key, ciphermod): - """Create a new S2V PRF. - - :Parameters: - key : byte string - A secret that can be used as key for CMACs - based on ciphers from ``ciphermod``. - ciphermod : module - A block cipher module from `Cryptodome.Cipher`. - """ - return _S2V(key, ciphermod) - - def _double(self, bs): - doubled = bytes_to_long(bs) << 1 - if bord(bs[0]) & 0x80: - doubled ^= 0x87 - return long_to_bytes(doubled, len(bs))[-len(bs):] - - def update(self, item): - """Pass the next component of the vector. - - The maximum number of components you can pass is equal to the block - length of the cipher (in bits) minus 1. - - :Parameters: - item : byte string - The next component of the vector. - :Raise TypeError: when the limit on the number of components has been reached. - """ - - if self._n_updates == 0: - raise TypeError("Too many components passed to S2V") - self._n_updates -= 1 - - mac = CMAC.new(self._key, - msg=self._last_string, - ciphermod=self._ciphermod, - cipher_params=self._cipher_params) - self._cache = strxor(self._double(self._cache), mac.digest()) - self._last_string = _copy_bytes(None, None, item) - - def derive(self): - """"Derive a secret from the vector of components. - - :Return: a byte string, as long as the block length of the cipher. - """ - - if len(self._last_string) >= 16: - # xorend - final = self._last_string[:-16] + strxor(self._last_string[-16:], self._cache) - else: - # zero-pad & xor - padded = (self._last_string + b'\x80' + b'\x00' * 15)[:16] - final = strxor(padded, self._double(self._cache)) - mac = CMAC.new(self._key, - msg=final, - ciphermod=self._ciphermod, - cipher_params=self._cipher_params) - return mac.digest() - - -def _HKDF_extract(salt, ikm, hashmod): - prk = HMAC.new(salt, ikm, digestmod=hashmod).digest() - return prk - - -def _HKDF_expand(prk, info, L, hashmod): - t = [b""] - n = 1 - tlen = 0 - while tlen < L: - hmac = HMAC.new(prk, t[-1] + info + struct.pack('B', n), digestmod=hashmod) - t.append(hmac.digest()) - tlen += hashmod.digest_size - n += 1 - okm = b"".join(t) - return okm[:L] - - -def HKDF(master, key_len, salt, hashmod, num_keys=1, context=None): - """Derive one or more keys from a master secret using - the HMAC-based KDF defined in RFC5869_. - - Args: - master (byte string): - The unguessable value used by the KDF to generate the other keys. - It must be a high-entropy secret, though not necessarily uniform. - It must not be a password. - key_len (integer): - The length in bytes of every derived key. - salt (byte string): - A non-secret, reusable value that strengthens the randomness - extraction step. - Ideally, it is as long as the digest size of the chosen hash. - If empty, a string of zeroes in used. - hashmod (module): - A cryptographic hash algorithm from :mod:`Cryptodome.Hash`. - :mod:`Cryptodome.Hash.SHA512` is a good choice. - num_keys (integer): - The number of keys to derive. Every key is :data:`key_len` bytes long. - The maximum cumulative length of all keys is - 255 times the digest size. - context (byte string): - Optional identifier describing what the keys are used for. - - Return: - A byte string or a tuple of byte strings. - - .. _RFC5869: http://tools.ietf.org/html/rfc5869 - """ - - output_len = key_len * num_keys - if output_len > (255 * hashmod.digest_size): - raise ValueError("Too much secret data to derive") - if not salt: - salt = b'\x00' * hashmod.digest_size - if context is None: - context = b"" - - prk = _HKDF_extract(salt, master, hashmod) - okm = _HKDF_expand(prk, context, output_len, hashmod) - - if num_keys == 1: - return okm[:key_len] - kol = [okm[idx:idx + key_len] - for idx in iter_range(0, output_len, key_len)] - return list(kol[:num_keys]) - - -def scrypt(password, salt, key_len, N, r, p, num_keys=1): - """Derive one or more keys from a passphrase. - - Args: - password (string): - The secret pass phrase to generate the keys from. - salt (string): - A string to use for better protection from dictionary attacks. - This value does not need to be kept secret, - but it should be randomly chosen for each derivation. - It is recommended to be at least 16 bytes long. - key_len (integer): - The length in bytes of each derived key. - N (integer): - CPU/Memory cost parameter. It must be a power of 2 and less - than :math:`2^{32}`. - r (integer): - Block size parameter. - p (integer): - Parallelization parameter. - It must be no greater than :math:`(2^{32}-1)/(4r)`. - num_keys (integer): - The number of keys to derive. Every key is :data:`key_len` bytes long. - By default, only 1 key is generated. - The maximum cumulative length of all keys is :math:`(2^{32}-1)*32` - (that is, 128TB). - - A good choice of parameters *(N, r , p)* was suggested - by Colin Percival in his `presentation in 2009`__: - - - *( 2¹⁴, 8, 1 )* for interactive logins (≤100ms) - - *( 2²⁰, 8, 1 )* for file encryption (≤5s) - - Return: - A byte string or a tuple of byte strings. - - .. __: http://www.tarsnap.com/scrypt/scrypt-slides.pdf - """ - - if 2 ** (bit_size(N) - 1) != N: - raise ValueError("N must be a power of 2") - if N >= 2 ** 32: - raise ValueError("N is too big") - if p > ((2 ** 32 - 1) * 32) // (128 * r): - raise ValueError("p or r are too big") - - prf_hmac_sha256 = lambda p, s: HMAC.new(p, s, SHA256).digest() - - stage_1 = PBKDF2(password, salt, p * 128 * r, 1, prf=prf_hmac_sha256) - - scryptROMix = _raw_scrypt_lib.scryptROMix - core = _raw_salsa20_lib.Salsa20_8_core - - # Parallelize into p flows - data_out = [] - for flow in iter_range(p): - idx = flow * 128 * r - buffer_out = create_string_buffer(128 * r) - result = scryptROMix(stage_1[idx: idx + 128 * r], - buffer_out, - c_size_t(128 * r), - N, - core) - if result: - raise ValueError("Error %X while running scrypt" % result) - data_out += [get_raw_buffer(buffer_out)] - - dk = PBKDF2(password, - b"".join(data_out), - key_len * num_keys, 1, - prf=prf_hmac_sha256) - - if num_keys == 1: - return dk - - kol = [dk[idx:idx + key_len] - for idx in iter_range(0, key_len * num_keys, key_len)] - return kol - - -def _bcrypt_encode(data): - s = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - - bits = [] - for c in data: - bits_c = bin(bord(c))[2:].zfill(8) - bits.append(bstr(bits_c)) - bits = b"".join(bits) - - bits6 = [bits[idx:idx+6] for idx in range(0, len(bits), 6)] - - result = [] - for g in bits6[:-1]: - idx = int(g, 2) - result.append(s[idx]) - - g = bits6[-1] - idx = int(g, 2) << (6 - len(g)) - result.append(s[idx]) - result = "".join(result) - - return tobytes(result) - - -def _bcrypt_decode(data): - s = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - - bits = [] - for c in tostr(data): - idx = s.find(c) - bits6 = bin(idx)[2:].zfill(6) - bits.append(bits6) - bits = "".join(bits) - - modulo4 = len(data) % 4 - if modulo4 == 1: - raise ValueError("Incorrect length") - elif modulo4 == 2: - bits = bits[:-4] - elif modulo4 == 3: - bits = bits[:-2] - - bits8 = [bits[idx:idx+8] for idx in range(0, len(bits), 8)] - - result = [] - for g in bits8: - result.append(bchr(int(g, 2))) - result = b"".join(result) - - return result - - -def _bcrypt_hash(password, cost, salt, constant, invert): - from Cryptodome.Cipher import _EKSBlowfish - - if len(password) > 72: - raise ValueError("The password is too long. It must be 72 bytes at most.") - - if not (4 <= cost <= 31): - raise ValueError("bcrypt cost factor must be in the range 4..31") - - cipher = _EKSBlowfish.new(password, _EKSBlowfish.MODE_ECB, salt, cost, invert) - ctext = constant - for _ in range(64): - ctext = cipher.encrypt(ctext) - return ctext - - -def bcrypt(password, cost, salt=None): - """Hash a password into a key, using the OpenBSD bcrypt protocol. - - Args: - password (byte string or string): - The secret password or pass phrase. - It must be at most 72 bytes long. - It must not contain the zero byte. - Unicode strings will be encoded as UTF-8. - cost (integer): - The exponential factor that makes it slower to compute the hash. - It must be in the range 4 to 31. - A value of at least 12 is recommended. - salt (byte string): - Optional. Random byte string to thwarts dictionary and rainbow table - attacks. It must be 16 bytes long. - If not passed, a random value is generated. - - Return (byte string): - The bcrypt hash - - Raises: - ValueError: if password is longer than 72 bytes or if it contains the zero byte - - """ - - password = tobytes(password, "utf-8") - - if password.find(bchr(0)[0]) != -1: - raise ValueError("The password contains the zero byte") - - if len(password) < 72: - password += b"\x00" - - if salt is None: - salt = get_random_bytes(16) - if len(salt) != 16: - raise ValueError("bcrypt salt must be 16 bytes long") - - ctext = _bcrypt_hash(password, cost, salt, b"OrpheanBeholderScryDoubt", True) - - cost_enc = b"$" + bstr(str(cost).zfill(2)) - salt_enc = b"$" + _bcrypt_encode(salt) - hash_enc = _bcrypt_encode(ctext[:-1]) # only use 23 bytes, not 24 - return b"$2a" + cost_enc + salt_enc + hash_enc - - -def bcrypt_check(password, bcrypt_hash): - """Verify if the provided password matches the given bcrypt hash. - - Args: - password (byte string or string): - The secret password or pass phrase to test. - It must be at most 72 bytes long. - It must not contain the zero byte. - Unicode strings will be encoded as UTF-8. - bcrypt_hash (byte string, bytearray): - The reference bcrypt hash the password needs to be checked against. - - Raises: - ValueError: if the password does not match - """ - - bcrypt_hash = tobytes(bcrypt_hash) - - if len(bcrypt_hash) != 60: - raise ValueError("Incorrect length of the bcrypt hash: %d bytes instead of 60" % len(bcrypt_hash)) - - if bcrypt_hash[:4] != b'$2a$': - raise ValueError("Unsupported prefix") - - p = re.compile(br'\$2a\$([0-9][0-9])\$([A-Za-z0-9./]{22,22})([A-Za-z0-9./]{31,31})') - r = p.match(bcrypt_hash) - if not r: - raise ValueError("Incorrect bcrypt hash format") - - cost = int(r.group(1)) - if not (4 <= cost <= 31): - raise ValueError("Incorrect cost") - - salt = _bcrypt_decode(r.group(2)) - - bcrypt_hash2 = bcrypt(password, cost, salt) - - secret = get_random_bytes(16) - - mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=bcrypt_hash).digest() - mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=bcrypt_hash2).digest() - if mac1 != mac2: - raise ValueError("Incorrect bcrypt hash") - - -def SP800_108_Counter(master, key_len, prf, num_keys=None, label=b'', context=b''): - """Derive one or more keys from a master secret using - a pseudorandom function in Counter Mode, as specified in - `NIST SP 800-108r1 `_. - - Args: - master (byte string): - The secret value used by the KDF to derive the other keys. - It must not be a password. - The length on the secret must be consistent with the input expected by - the :data:`prf` function. - key_len (integer): - The length in bytes of each derived key. - prf (function): - A pseudorandom function that takes two byte strings as parameters: - the secret and an input. It returns another byte string. - num_keys (integer): - The number of keys to derive. Every key is :data:`key_len` bytes long. - By default, only 1 key is derived. - label (byte string): - Optional description of the purpose of the derived keys. - It must not contain zero bytes. - context (byte string): - Optional information pertaining to - the protocol that uses the keys, such as the identity of the - participants, nonces, session IDs, etc. - It must not contain zero bytes. - - Return: - - a byte string (if ``num_keys`` is not specified), or - - a tuple of byte strings (if ``num_key`` is specified). - """ - - if num_keys is None: - num_keys = 1 - - if context.find(b'\x00') != -1: - raise ValueError("Null byte found in context") - - key_len_enc = long_to_bytes(key_len * num_keys * 8, 4) - output_len = key_len * num_keys - - i = 1 - dk = b"" - while len(dk) < output_len: - info = long_to_bytes(i, 4) + label + b'\x00' + context + key_len_enc - dk += prf(master, info) - i += 1 - if i > 0xFFFFFFFF: - raise ValueError("Overflow in SP800 108 counter") - - if num_keys == 1: - return dk[:key_len] - else: - kol = [dk[idx:idx + key_len] - for idx in iter_range(0, output_len, key_len)] - return kol diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/KDF.pyi b/venv/Lib/site-packages/Cryptodome/Protocol/KDF.pyi deleted file mode 100644 index 80691e0..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/KDF.pyi +++ /dev/null @@ -1,44 +0,0 @@ -from types import ModuleType -from typing import Optional, Callable, Tuple, Union, Dict, Any, overload -from typing_extensions import Literal - -Buffer=bytes|bytearray|memoryview - -RNG = Callable[[int], bytes] -PRF = Callable[[bytes, bytes], bytes] - -def PBKDF1(password: str, salt: bytes, dkLen: int, count: Optional[int]=1000, hashAlgo: Optional[ModuleType]=None) -> bytes: ... -def PBKDF2(password: str, salt: bytes, dkLen: Optional[int]=16, count: Optional[int]=1000, prf: Optional[RNG]=None, hmac_hash_module: Optional[ModuleType]=None) -> bytes: ... - -class _S2V(object): - def __init__(self, key: bytes, ciphermod: ModuleType, cipher_params: Optional[Dict[Any, Any]]=None) -> None: ... - - @staticmethod - def new(key: bytes, ciphermod: ModuleType) -> None: ... - def update(self, item: bytes) -> None: ... - def derive(self) -> bytes: ... - -def _HKDF_extract(salt: Buffer, ikm: Buffer, hashmod: ModuleType) -> bytes: ... -def _HKDF_expand(prk: Buffer, info: Buffer, L: int, hashmod) -> bytes : ... -def HKDF(master: bytes, key_len: int, salt: bytes, hashmod: ModuleType, num_keys: Optional[int]=1, context: Optional[bytes]=None) -> Union[bytes, Tuple[bytes, ...]]: ... - -def scrypt(password: str, salt: str, key_len: int, N: int, r: int, p: int, num_keys: Optional[int]=1) -> Union[bytes, Tuple[bytes, ...]]: ... - -def _bcrypt_decode(data: bytes) -> bytes: ... -def _bcrypt_hash(password:bytes , cost: int, salt: bytes, constant:bytes, invert:bool) -> bytes: ... -def bcrypt(password: Union[bytes, str], cost: int, salt: Optional[bytes]=None) -> bytes: ... -def bcrypt_check(password: Union[bytes, str], bcrypt_hash: Union[bytes, bytearray, str]) -> None: ... - -@overload -def SP800_108_Counter(master: Buffer, - key_len: int, - prf: PRF, - num_keys: Literal[None] = None, - label: Buffer = b'', context: Buffer = b'') -> bytes: ... - -@overload -def SP800_108_Counter(master: Buffer, - key_len: int, - prf: PRF, - num_keys: int, - label: Buffer = b'', context: Buffer = b'') -> Tuple[bytes]: ... diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/SecretSharing.py b/venv/Lib/site-packages/Cryptodome/Protocol/SecretSharing.py deleted file mode 100644 index 1034909..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/SecretSharing.py +++ /dev/null @@ -1,297 +0,0 @@ -# -# SecretSharing.py : distribute a secret amongst a group of participants -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import is_native_int -from Cryptodome.Util import number -from Cryptodome.Util.number import long_to_bytes, bytes_to_long -from Cryptodome.Random import get_random_bytes as rng - - -def _mult_gf2(f1, f2): - """Multiply two polynomials in GF(2)""" - - # Ensure f2 is the smallest - if f2 > f1: - f1, f2 = f2, f1 - z = 0 - while f2: - if f2 & 1: - z ^= f1 - f1 <<= 1 - f2 >>= 1 - return z - - -def _div_gf2(a, b): - """ - Compute division of polynomials over GF(2). - Given a and b, it finds two polynomials q and r such that: - - a = b*q + r with deg(r)= d: - s = 1 << (deg(r) - d) - q ^= s - r ^= _mult_gf2(b, s) - return (q, r) - - -class _Element(object): - """Element of GF(2^128) field""" - - # The irreducible polynomial defining - # this field is 1 + x + x^2 + x^7 + x^128 - irr_poly = 1 + 2 + 4 + 128 + 2 ** 128 - - def __init__(self, encoded_value): - """Initialize the element to a certain value. - - The value passed as parameter is internally encoded as - a 128-bit integer, where each bit represents a polynomial - coefficient. The LSB is the constant coefficient. - """ - - if is_native_int(encoded_value): - self._value = encoded_value - elif len(encoded_value) == 16: - self._value = bytes_to_long(encoded_value) - else: - raise ValueError("The encoded value must be an integer or a 16 byte string") - - def __eq__(self, other): - return self._value == other._value - - def __int__(self): - """Return the field element, encoded as a 128-bit integer.""" - return self._value - - def encode(self): - """Return the field element, encoded as a 16 byte string.""" - return long_to_bytes(self._value, 16) - - def __mul__(self, factor): - - f1 = self._value - f2 = factor._value - - # Make sure that f2 is the smallest, to speed up the loop - if f2 > f1: - f1, f2 = f2, f1 - - if self.irr_poly in (f1, f2): - return _Element(0) - - mask1 = 2 ** 128 - v, z = f1, 0 - while f2: - # if f2 ^ 1: z ^= v - mask2 = int(bin(f2 & 1)[2:] * 128, base=2) - z = (mask2 & (z ^ v)) | ((mask1 - mask2 - 1) & z) - v <<= 1 - # if v & mask1: v ^= self.irr_poly - mask3 = int(bin((v >> 128) & 1)[2:] * 128, base=2) - v = (mask3 & (v ^ self.irr_poly)) | ((mask1 - mask3 - 1) & v) - f2 >>= 1 - return _Element(z) - - def __add__(self, term): - return _Element(self._value ^ term._value) - - def inverse(self): - """Return the inverse of this element in GF(2^128).""" - - # We use the Extended GCD algorithm - # http://en.wikipedia.org/wiki/Polynomial_greatest_common_divisor - - if self._value == 0: - raise ValueError("Inversion of zero") - - r0, r1 = self._value, self.irr_poly - s0, s1 = 1, 0 - while r1 > 0: - q = _div_gf2(r0, r1)[0] - r0, r1 = r1, r0 ^ _mult_gf2(q, r1) - s0, s1 = s1, s0 ^ _mult_gf2(q, s1) - return _Element(s0) - - def __pow__(self, exponent): - result = _Element(self._value) - for _ in range(exponent - 1): - result = result * self - return result - - -class Shamir(object): - """Shamir's secret sharing scheme. - - A secret is split into ``n`` shares, and it is sufficient to collect - ``k`` of them to reconstruct the secret. - """ - - @staticmethod - def split(k, n, secret, ssss=False): - """Split a secret into ``n`` shares. - - The secret can be reconstructed later using just ``k`` shares - out of the original ``n``. - Each share must be kept confidential to the person it was - assigned to. - - Each share is associated to an index (starting from 1). - - Args: - k (integer): - The number of shares needed to reconstruct the secret. - n (integer): - The number of shares to create (at least ``k``). - secret (byte string): - A byte string of 16 bytes (e.g. an AES 128 key). - ssss (bool): - If ``True``, the shares can be used with the ``ssss`` utility - (without using the "diffusion layer"). - Default: ``False``. - - Return (tuples): - ``n`` tuples, one per participant. - A tuple contains two items: - - 1. the unique index (an integer) - 2. the share (16 bytes) - """ - - # - # We create a polynomial with random coefficients in GF(2^128): - # - # p(x) = c_0 + \sum_{i=1}^{k-1} c_i * x^i - # - # c_0 is the secret. - # - - coeffs = [_Element(rng(16)) for i in range(k - 1)] - coeffs.append(_Element(secret)) - - # Each share is y_i = p(x_i) where x_i - # is the index assigned to the share. - - def make_share(user, coeffs, ssss): - idx = _Element(user) - - # Horner's method - share = _Element(0) - for coeff in coeffs: - share = idx * share + coeff - - # The ssss utility actually uses: - # - # p(x) = c_0 + \sum_{i=1}^{k-1} c_i * x^i + x^k - # - if ssss: - share += _Element(user) ** len(coeffs) - - return share.encode() - - return [(i, make_share(i, coeffs, ssss)) for i in range(1, n + 1)] - - @staticmethod - def combine(shares, ssss=False): - """Recombine a secret, if enough shares are presented. - - Args: - shares (tuples): - The *k* tuples, each containing the index (an integer) and - the share (a byte string, 16 bytes long) that were assigned to - a participant. - - .. note:: - - Pass exactly as many share as they are required, - and no more. - - ssss (bool): - If ``True``, the shares were produced by the ``ssss`` utility - (without using the "diffusion layer"). - Default: ``False``. - - Return: - The original secret, as a byte string (16 bytes long). - """ - - # - # Given k points (x,y), the interpolation polynomial of degree k-1 is: - # - # L(x) = \sum_{j=0}^{k-1} y_i * l_j(x) - # - # where: - # - # l_j(x) = \prod_{ \overset{0 \le m \le k-1}{m \ne j} } - # \frac{x - x_m}{x_j - x_m} - # - # However, in this case we are purely interested in the constant - # coefficient of L(x). - # - - k = len(shares) - - gf_shares = [] - for x in shares: - idx = _Element(x[0]) - value = _Element(x[1]) - if any(y[0] == idx for y in gf_shares): - raise ValueError("Duplicate share") - if ssss: - value += idx ** k - gf_shares.append((idx, value)) - - result = _Element(0) - for j in range(k): - x_j, y_j = gf_shares[j] - - numerator = _Element(1) - denominator = _Element(1) - - for m in range(k): - x_m = gf_shares[m][0] - if m != j: - numerator *= x_m - denominator *= x_j + x_m - result += y_j * numerator * denominator.inverse() - - return result.encode() diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/SecretSharing.pyi b/venv/Lib/site-packages/Cryptodome/Protocol/SecretSharing.pyi deleted file mode 100644 index 5952c99..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/SecretSharing.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Union, List, Tuple, Optional - -def _mult_gf2(f1: int, f2: int) -> int : ... -def _div_gf2(a: int, b: int) -> int : ... - -class _Element(object): - irr_poly: int - def __init__(self, encoded_value: Union[int, bytes]) -> None: ... - def __eq__(self, other) -> bool: ... - def __int__(self) -> int: ... - def encode(self) -> bytes: ... - def __mul__(self, factor: int) -> _Element: ... - def __add__(self, term: _Element) -> _Element: ... - def inverse(self) -> _Element: ... - def __pow__(self, exponent) -> _Element: ... - -class Shamir(object): - @staticmethod - def split(k: int, n: int, secret: bytes, ssss: Optional[bool]) -> List[Tuple[int, bytes]]: ... - @staticmethod - def combine(shares: List[Tuple[int, bytes]], ssss: Optional[bool]) -> bytes: ... - diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/__init__.py b/venv/Lib/site-packages/Cryptodome/Protocol/__init__.py deleted file mode 100644 index 76e22bf..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -__all__ = ['KDF', 'SecretSharing', 'DH'] diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/__init__.pyi b/venv/Lib/site-packages/Cryptodome/Protocol/__init__.pyi deleted file mode 100644 index 377ed90..0000000 --- a/venv/Lib/site-packages/Cryptodome/Protocol/__init__.pyi +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['KDF.pyi', 'SecretSharing.pyi'] diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/DH.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/DH.cpython-312.pyc deleted file mode 100644 index 66bbf99..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/DH.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/HPKE.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/HPKE.cpython-312.pyc deleted file mode 100644 index c12a870..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/HPKE.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/KDF.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/KDF.cpython-312.pyc deleted file mode 100644 index c3dddcc..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/KDF.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/SecretSharing.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/SecretSharing.cpython-312.pyc deleted file mode 100644 index 69eb129..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/SecretSharing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4ed0053..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Protocol/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Protocol/_scrypt.pyd b/venv/Lib/site-packages/Cryptodome/Protocol/_scrypt.pyd deleted file mode 100644 index 0353374..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Protocol/_scrypt.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/DSA.py b/venv/Lib/site-packages/Cryptodome/PublicKey/DSA.py deleted file mode 100644 index dddd304..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/DSA.py +++ /dev/null @@ -1,682 +0,0 @@ -# -*- coding: utf-8 -*- -# -# PublicKey/DSA.py : DSA signature primitive -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__all__ = ['generate', 'construct', 'DsaKey', 'import_key' ] - -import binascii -import struct -import itertools - -from Cryptodome.Util.py3compat import bchr, bord, tobytes, tostr, iter_range - -from Cryptodome import Random -from Cryptodome.IO import PKCS8, PEM -from Cryptodome.Hash import SHA256 -from Cryptodome.Util.asn1 import ( - DerObject, DerSequence, - DerInteger, DerObjectId, - DerBitString, - ) - -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Math.Primality import (test_probable_prime, COMPOSITE, - PROBABLY_PRIME) - -from Cryptodome.PublicKey import (_expand_subject_public_key_info, - _create_subject_public_key_info, - _extract_subject_public_key_info) - -# ; The following ASN.1 types are relevant for DSA -# -# SubjectPublicKeyInfo ::= SEQUENCE { -# algorithm AlgorithmIdentifier, -# subjectPublicKey BIT STRING -# } -# -# id-dsa ID ::= { iso(1) member-body(2) us(840) x9-57(10040) x9cm(4) 1 } -# -# ; See RFC3279 -# Dss-Parms ::= SEQUENCE { -# p INTEGER, -# q INTEGER, -# g INTEGER -# } -# -# DSAPublicKey ::= INTEGER -# -# DSSPrivatKey_OpenSSL ::= SEQUENCE -# version INTEGER, -# p INTEGER, -# q INTEGER, -# g INTEGER, -# y INTEGER, -# x INTEGER -# } -# - -class DsaKey(object): - r"""Class defining an actual DSA key. - Do not instantiate directly. - Use :func:`generate`, :func:`construct` or :func:`import_key` instead. - - :ivar p: DSA modulus - :vartype p: integer - - :ivar q: Order of the subgroup - :vartype q: integer - - :ivar g: Generator - :vartype g: integer - - :ivar y: Public key - :vartype y: integer - - :ivar x: Private key - :vartype x: integer - - :undocumented: exportKey, publickey - """ - - _keydata = ['y', 'g', 'p', 'q', 'x'] - - def __init__(self, key_dict): - input_set = set(key_dict.keys()) - public_set = set(('y' , 'g', 'p', 'q')) - if not public_set.issubset(input_set): - raise ValueError("Some DSA components are missing = %s" % - str(public_set - input_set)) - extra_set = input_set - public_set - if extra_set and extra_set != set(('x',)): - raise ValueError("Unknown DSA components = %s" % - str(extra_set - set(('x',)))) - self._key = dict(key_dict) - - def _sign(self, m, k): - if not self.has_private(): - raise TypeError("DSA public key cannot be used for signing") - if not (1 < k < self.q): - raise ValueError("k is not between 2 and q-1") - - x, q, p, g = [self._key[comp] for comp in ['x', 'q', 'p', 'g']] - - blind_factor = Integer.random_range(min_inclusive=1, - max_exclusive=q) - inv_blind_k = (blind_factor * k).inverse(q) - blind_x = x * blind_factor - - r = pow(g, k, p) % q # r = (g**k mod p) mod q - s = (inv_blind_k * (blind_factor * m + blind_x * r)) % q - return map(int, (r, s)) - - def _verify(self, m, sig): - r, s = sig - y, q, p, g = [self._key[comp] for comp in ['y', 'q', 'p', 'g']] - if not (0 < r < q) or not (0 < s < q): - return False - w = Integer(s).inverse(q) - u1 = (w * m) % q - u2 = (w * r) % q - v = (pow(g, u1, p) * pow(y, u2, p) % p) % q - return v == r - - def has_private(self): - """Whether this is a DSA private key""" - - return 'x' in self._key - - def can_encrypt(self): # legacy - return False - - def can_sign(self): # legacy - return True - - def public_key(self): - """A matching DSA public key. - - Returns: - a new :class:`DsaKey` object - """ - - public_components = dict((k, self._key[k]) for k in ('y', 'g', 'p', 'q')) - return DsaKey(public_components) - - def __eq__(self, other): - if bool(self.has_private()) != bool(other.has_private()): - return False - - result = True - for comp in self._keydata: - result = result and (getattr(self._key, comp, None) == - getattr(other._key, comp, None)) - return result - - def __ne__(self, other): - return not self.__eq__(other) - - def __getstate__(self): - # DSA key is not pickable - from pickle import PicklingError - raise PicklingError - - def domain(self): - """The DSA domain parameters. - - Returns - tuple : (p,q,g) - """ - - return [int(self._key[comp]) for comp in ('p', 'q', 'g')] - - def __repr__(self): - attrs = [] - for k in self._keydata: - if k == 'p': - bits = Integer(self.p).size_in_bits() - attrs.append("p(%d)" % (bits,)) - elif hasattr(self, k): - attrs.append(k) - if self.has_private(): - attrs.append("private") - # PY3K: This is meant to be text, do not change to bytes (data) - return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) - - def __getattr__(self, item): - try: - return int(self._key[item]) - except KeyError: - raise AttributeError(item) - - def export_key(self, format='PEM', pkcs8=None, passphrase=None, - protection=None, randfunc=None): - """Export this DSA key. - - Args: - format (string): - The encoding for the output: - - - *'PEM'* (default). ASCII as per `RFC1421`_/ `RFC1423`_. - - *'DER'*. Binary ASN.1 encoding. - - *'OpenSSH'*. ASCII one-liner as per `RFC4253`_. - Only suitable for public keys, not for private keys. - - passphrase (string): - *Private keys only*. The pass phrase to protect the output. - - pkcs8 (boolean): - *Private keys only*. If ``True`` (default), the key is encoded - with `PKCS#8`_. If ``False``, it is encoded in the custom - OpenSSL/OpenSSH container. - - protection (string): - *Only in combination with a pass phrase*. - The encryption scheme to use to protect the output. - - If :data:`pkcs8` takes value ``True``, this is the PKCS#8 - algorithm to use for deriving the secret and encrypting - the private DSA key. - For a complete list of algorithms, see :mod:`Cryptodome.IO.PKCS8`. - The default is *PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC*. - - If :data:`pkcs8` is ``False``, the obsolete PEM encryption scheme is - used. It is based on MD5 for key derivation, and Triple DES for - encryption. Parameter :data:`protection` is then ignored. - - The combination ``format='DER'`` and ``pkcs8=False`` is not allowed - if a passphrase is present. - - randfunc (callable): - A function that returns random bytes. - By default it is :func:`Cryptodome.Random.get_random_bytes`. - - Returns: - byte string : the encoded key - - Raises: - ValueError : when the format is unknown or when you try to encrypt a private - key with *DER* format and OpenSSL/OpenSSH. - - .. warning:: - If you don't provide a pass phrase, the private key will be - exported in the clear! - - .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt - .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt - .. _RFC4253: http://www.ietf.org/rfc/rfc4253.txt - .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt - """ - - if passphrase is not None: - passphrase = tobytes(passphrase) - - if randfunc is None: - randfunc = Random.get_random_bytes - - if format == 'OpenSSH': - tup1 = [self._key[x].to_bytes() for x in ('p', 'q', 'g', 'y')] - - def func(x): - if (bord(x[0]) & 0x80): - return bchr(0) + x - else: - return x - - tup2 = [func(x) for x in tup1] - keyparts = [b'ssh-dss'] + tup2 - keystring = b''.join( - [struct.pack(">I", len(kp)) + kp for kp in keyparts] - ) - return b'ssh-dss ' + binascii.b2a_base64(keystring)[:-1] - - # DER format is always used, even in case of PEM, which simply - # encodes it into BASE64. - params = DerSequence([self.p, self.q, self.g]) - if self.has_private(): - if pkcs8 is None: - pkcs8 = True - if pkcs8: - if not protection: - protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' - private_key = DerInteger(self.x).encode() - binary_key = PKCS8.wrap( - private_key, oid, passphrase, - protection, key_params=params, - randfunc=randfunc - ) - if passphrase: - key_type = 'ENCRYPTED PRIVATE' - else: - key_type = 'PRIVATE' - passphrase = None - else: - if format != 'PEM' and passphrase: - raise ValueError("DSA private key cannot be encrypted") - ints = [0, self.p, self.q, self.g, self.y, self.x] - binary_key = DerSequence(ints).encode() - key_type = "DSA PRIVATE" - else: - if pkcs8: - raise ValueError("PKCS#8 is only meaningful for private keys") - - binary_key = _create_subject_public_key_info(oid, - DerInteger(self.y), params) - key_type = "PUBLIC" - - if format == 'DER': - return binary_key - if format == 'PEM': - pem_str = PEM.encode( - binary_key, key_type + " KEY", - passphrase, randfunc - ) - return tobytes(pem_str) - raise ValueError("Unknown key format '%s'. Cannot export the DSA key." % format) - - # Backward-compatibility - exportKey = export_key - publickey = public_key - - # Methods defined in PyCryptodome that we don't support anymore - - def sign(self, M, K): - raise NotImplementedError("Use module Cryptodome.Signature.DSS instead") - - def verify(self, M, signature): - raise NotImplementedError("Use module Cryptodome.Signature.DSS instead") - - def encrypt(self, plaintext, K): - raise NotImplementedError - - def decrypt(self, ciphertext): - raise NotImplementedError - - def blind(self, M, B): - raise NotImplementedError - - def unblind(self, M, B): - raise NotImplementedError - - def size(self): - raise NotImplementedError - - -def _generate_domain(L, randfunc): - """Generate a new set of DSA domain parameters""" - - N = { 1024:160, 2048:224, 3072:256 }.get(L) - if N is None: - raise ValueError("Invalid modulus length (%d)" % L) - - outlen = SHA256.digest_size * 8 - n = (L + outlen - 1) // outlen - 1 # ceil(L/outlen) -1 - b_ = L - 1 - (n * outlen) - - # Generate q (A.1.1.2) - q = Integer(4) - upper_bit = 1 << (N - 1) - while test_probable_prime(q, randfunc) != PROBABLY_PRIME: - seed = randfunc(64) - U = Integer.from_bytes(SHA256.new(seed).digest()) & (upper_bit - 1) - q = U | upper_bit | 1 - - assert(q.size_in_bits() == N) - - # Generate p (A.1.1.2) - offset = 1 - upper_bit = 1 << (L - 1) - while True: - V = [ SHA256.new(seed + Integer(offset + j).to_bytes()).digest() - for j in iter_range(n + 1) ] - V = [ Integer.from_bytes(v) for v in V ] - W = sum([V[i] * (1 << (i * outlen)) for i in iter_range(n)], - (V[n] & ((1 << b_) - 1)) * (1 << (n * outlen))) - - X = Integer(W + upper_bit) # 2^{L-1} < X < 2^{L} - assert(X.size_in_bits() == L) - - c = X % (q * 2) - p = X - (c - 1) # 2q divides (p-1) - if p.size_in_bits() == L and \ - test_probable_prime(p, randfunc) == PROBABLY_PRIME: - break - offset += n + 1 - - # Generate g (A.2.3, index=1) - e = (p - 1) // q - for count in itertools.count(1): - U = seed + b"ggen" + bchr(1) + Integer(count).to_bytes() - W = Integer.from_bytes(SHA256.new(U).digest()) - g = pow(W, e, p) - if g != 1: - break - - return (p, q, g, seed) - - -def generate(bits, randfunc=None, domain=None): - """Generate a new DSA key pair. - - The algorithm follows Appendix A.1/A.2 and B.1 of `FIPS 186-4`_, - respectively for domain generation and key pair generation. - - Args: - bits (integer): - Key length, or size (in bits) of the DSA modulus *p*. - It must be 1024, 2048 or 3072. - - randfunc (callable): - Random number generation function; it accepts a single integer N - and return a string of random data N bytes long. - If not specified, :func:`Cryptodome.Random.get_random_bytes` is used. - - domain (tuple): - The DSA domain parameters *p*, *q* and *g* as a list of 3 - integers. Size of *p* and *q* must comply to `FIPS 186-4`_. - If not specified, the parameters are created anew. - - Returns: - :class:`DsaKey` : a new DSA key object - - Raises: - ValueError : when **bits** is too little, too big, or not a multiple of 64. - - .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf - """ - - if randfunc is None: - randfunc = Random.get_random_bytes - - if domain: - p, q, g = map(Integer, domain) - - ## Perform consistency check on domain parameters - # P and Q must be prime - fmt_error = test_probable_prime(p) == COMPOSITE - fmt_error |= test_probable_prime(q) == COMPOSITE - # Verify Lagrange's theorem for sub-group - fmt_error |= ((p - 1) % q) != 0 - fmt_error |= g <= 1 or g >= p - fmt_error |= pow(g, q, p) != 1 - if fmt_error: - raise ValueError("Invalid DSA domain parameters") - else: - p, q, g, _ = _generate_domain(bits, randfunc) - - L = p.size_in_bits() - N = q.size_in_bits() - - if L != bits: - raise ValueError("Mismatch between size of modulus (%d)" - " and 'bits' parameter (%d)" % (L, bits)) - - if (L, N) not in [(1024, 160), (2048, 224), - (2048, 256), (3072, 256)]: - raise ValueError("Lengths of p and q (%d, %d) are not compatible" - "to FIPS 186-3" % (L, N)) - - if not 1 < g < p: - raise ValueError("Incorrent DSA generator") - - # B.1.1 - c = Integer.random(exact_bits=N + 64, randfunc=randfunc) - x = c % (q - 1) + 1 # 1 <= x <= q-1 - y = pow(g, x, p) - - key_dict = { 'y':y, 'g':g, 'p':p, 'q':q, 'x':x } - return DsaKey(key_dict) - - -def construct(tup, consistency_check=True): - """Construct a DSA key from a tuple of valid DSA components. - - Args: - tup (tuple): - A tuple of long integers, with 4 or 5 items - in the following order: - - 1. Public key (*y*). - 2. Sub-group generator (*g*). - 3. Modulus, finite field order (*p*). - 4. Sub-group order (*q*). - 5. Private key (*x*). Optional. - - consistency_check (boolean): - If ``True``, the library will verify that the provided components - fulfil the main DSA properties. - - Raises: - ValueError: when the key being imported fails the most basic DSA validity checks. - - Returns: - :class:`DsaKey` : a DSA key object - """ - - key_dict = dict(zip(('y', 'g', 'p', 'q', 'x'), map(Integer, tup))) - key = DsaKey(key_dict) - - fmt_error = False - if consistency_check: - # P and Q must be prime - fmt_error = test_probable_prime(key.p) == COMPOSITE - fmt_error |= test_probable_prime(key.q) == COMPOSITE - # Verify Lagrange's theorem for sub-group - fmt_error |= ((key.p - 1) % key.q) != 0 - fmt_error |= key.g <= 1 or key.g >= key.p - fmt_error |= pow(key.g, key.q, key.p) != 1 - # Public key - fmt_error |= key.y <= 0 or key.y >= key.p - if hasattr(key, 'x'): - fmt_error |= key.x <= 0 or key.x >= key.q - fmt_error |= pow(key.g, key.x, key.p) != key.y - - if fmt_error: - raise ValueError("Invalid DSA key components") - - return key - - -# Dss-Parms ::= SEQUENCE { -# p OCTET STRING, -# q OCTET STRING, -# g OCTET STRING -# } -# DSAPublicKey ::= INTEGER -- public key, y - -def _import_openssl_private(encoded, passphrase, params): - if params: - raise ValueError("DSA private key already comes with parameters") - der = DerSequence().decode(encoded, nr_elements=6, only_ints_expected=True) - if der[0] != 0: - raise ValueError("No version found") - tup = [der[comp] for comp in (4, 3, 1, 2, 5)] - return construct(tup) - - -def _import_subjectPublicKeyInfo(encoded, passphrase, params): - - algoid, encoded_key, emb_params = _expand_subject_public_key_info(encoded) - if algoid != oid: - raise ValueError("No DSA subjectPublicKeyInfo") - if params and emb_params: - raise ValueError("Too many DSA parameters") - - y = DerInteger().decode(encoded_key).value - p, q, g = list(DerSequence().decode(params or emb_params)) - tup = (y, g, p, q) - return construct(tup) - - -def _import_x509_cert(encoded, passphrase, params): - - sp_info = _extract_subject_public_key_info(encoded) - return _import_subjectPublicKeyInfo(sp_info, None, params) - - -def _import_pkcs8(encoded, passphrase, params): - if params: - raise ValueError("PKCS#8 already includes parameters") - k = PKCS8.unwrap(encoded, passphrase) - if k[0] != oid: - raise ValueError("No PKCS#8 encoded DSA key") - x = DerInteger().decode(k[1]).value - p, q, g = list(DerSequence().decode(k[2])) - tup = (pow(g, x, p), g, p, q, x) - return construct(tup) - - -def _import_key_der(key_data, passphrase, params): - """Import a DSA key (public or private half), encoded in DER form.""" - - decodings = (_import_openssl_private, - _import_subjectPublicKeyInfo, - _import_x509_cert, - _import_pkcs8) - - for decoding in decodings: - try: - return decoding(key_data, passphrase, params) - except ValueError: - pass - - raise ValueError("DSA key format is not supported") - - -def import_key(extern_key, passphrase=None): - """Import a DSA key. - - Args: - extern_key (string or byte string): - The DSA key to import. - - The following formats are supported for a DSA **public** key: - - - X.509 certificate (binary DER or PEM) - - X.509 ``subjectPublicKeyInfo`` (binary DER or PEM) - - OpenSSH (ASCII one-liner, see `RFC4253`_) - - The following formats are supported for a DSA **private** key: - - - `PKCS#8`_ ``PrivateKeyInfo`` or ``EncryptedPrivateKeyInfo`` - DER SEQUENCE (binary or PEM) - - OpenSSL/OpenSSH custom format (binary or PEM) - - For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. - - passphrase (string): - In case of an encrypted private key, this is the pass phrase - from which the decryption key is derived. - - Encryption may be applied either at the `PKCS#8`_ or at the PEM level. - - Returns: - :class:`DsaKey` : a DSA key object - - Raises: - ValueError : when the given key cannot be parsed (possibly because - the pass phrase is wrong). - - .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt - .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt - .. _RFC4253: http://www.ietf.org/rfc/rfc4253.txt - .. _PKCS#8: http://www.ietf.org/rfc/rfc5208.txt - """ - - extern_key = tobytes(extern_key) - if passphrase is not None: - passphrase = tobytes(passphrase) - - if extern_key.startswith(b'-----'): - # This is probably a PEM encoded key - (der, marker, enc_flag) = PEM.decode(tostr(extern_key), passphrase) - if enc_flag: - passphrase = None - return _import_key_der(der, passphrase, None) - - if extern_key.startswith(b'ssh-dss '): - # This is probably a public OpenSSH key - keystring = binascii.a2b_base64(extern_key.split(b' ')[1]) - keyparts = [] - while len(keystring) > 4: - length = struct.unpack(">I", keystring[:4])[0] - keyparts.append(keystring[4:4 + length]) - keystring = keystring[4 + length:] - if keyparts[0] == b"ssh-dss": - tup = [Integer.from_bytes(keyparts[x]) for x in (4, 3, 1, 2)] - return construct(tup) - - if len(extern_key) > 0 and bord(extern_key[0]) == 0x30: - # This is probably a DER encoded key - return _import_key_der(extern_key, passphrase, None) - - raise ValueError("DSA key format is not supported") - - -# Backward compatibility -importKey = import_key - -#: `Object ID`_ for a DSA key. -#: -#: id-dsa ID ::= { iso(1) member-body(2) us(840) x9-57(10040) x9cm(4) 1 } -#: -#: .. _`Object ID`: http://www.alvestrand.no/objectid/1.2.840.10040.4.1.html -oid = "1.2.840.10040.4.1" diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/DSA.pyi b/venv/Lib/site-packages/Cryptodome/PublicKey/DSA.pyi deleted file mode 100644 index 354ac1f..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/DSA.pyi +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Dict, Tuple, Callable, Union, Optional - -__all__ = ['generate', 'construct', 'DsaKey', 'import_key' ] - -RNG = Callable[[int], bytes] - -class DsaKey(object): - def __init__(self, key_dict: Dict[str, int]) -> None: ... - def has_private(self) -> bool: ... - def can_encrypt(self) -> bool: ... # legacy - def can_sign(self) -> bool: ... # legacy - def public_key(self) -> DsaKey: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... - def __getstate__(self) -> None: ... - def domain(self) -> Tuple[int, int, int]: ... - def __repr__(self) -> str: ... - def __getattr__(self, item: str) -> int: ... - def export_key(self, format: Optional[str]="PEM", pkcs8: Optional[bool]=None, passphrase: Optional[str]=None, - protection: Optional[str]=None, randfunc: Optional[RNG]=None) -> bytes: ... - # Backward-compatibility - exportKey = export_key - publickey = public_key - -def generate(bits: int, randfunc: Optional[RNG]=None, domain: Optional[Tuple[int, int, int]]=None) -> DsaKey: ... -def construct(tup: Union[Tuple[int, int, int, int], Tuple[int, int, int, int, int]], consistency_check: Optional[bool]=True) -> DsaKey: ... -def import_key(extern_key: Union[str, bytes], passphrase: Optional[str]=None) -> DsaKey: ... -# Backward compatibility -importKey = import_key - -oid: str diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/ECC.py b/venv/Lib/site-packages/Cryptodome/PublicKey/ECC.py deleted file mode 100644 index bd9c8cb..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/ECC.py +++ /dev/null @@ -1,1342 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from __future__ import print_function - -import re -import struct -import binascii - -from Cryptodome.Util.py3compat import bord, tobytes, tostr, bchr, is_string - -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Util.asn1 import (DerObjectId, DerOctetString, DerSequence, - DerBitString) - -from Cryptodome.PublicKey import (_expand_subject_public_key_info, - _create_subject_public_key_info, - _extract_subject_public_key_info) - -from Cryptodome.Hash import SHA512, SHAKE256 - -from Cryptodome.Random import get_random_bytes - -from ._point import EccPoint, EccXPoint, _curves -from ._point import CurveID as _CurveID - - -class UnsupportedEccFeature(ValueError): - pass - - -class EccKey(object): - r"""Class defining an ECC key. - Do not instantiate directly. - Use :func:`generate`, :func:`construct` or :func:`import_key` instead. - - :ivar curve: The **canonical** name of the curve as defined in the `ECC table`_. - :vartype curve: string - - :ivar pointQ: an ECC point representing the public component. - :vartype pointQ: :class:`EccPoint` or :class:`EccXPoint` - - :ivar d: A scalar that represents the private component - in NIST P curves. It is smaller than the - order of the generator point. - :vartype d: integer - - :ivar seed: A seed that represents the private component - in Ed22519 (32 bytes), Curve25519 (32 bytes), - Curve448 (56 bytes), Ed448 (57 bytes). - :vartype seed: bytes - """ - - def __init__(self, **kwargs): - """Create a new ECC key - - Keywords: - curve : string - The name of the curve. - d : integer - Mandatory for a private key one NIST P curves. - It must be in the range ``[1..order-1]``. - seed : bytes - Mandatory for a private key on Ed25519 (32 bytes), - Curve25519 (32 bytes), Curve448 (56 bytes) or Ed448 (57 bytes). - point : EccPoint or EccXPoint - Mandatory for a public key. If provided for a private key, - the implementation will NOT check whether it matches ``d``. - - Only one parameter among ``d``, ``seed`` or ``point`` may be used. - """ - - kwargs_ = dict(kwargs) - curve_name = kwargs_.pop("curve", None) - self._d = kwargs_.pop("d", None) - self._seed = kwargs_.pop("seed", None) - self._point = kwargs_.pop("point", None) - if curve_name is None and self._point: - curve_name = self._point.curve - if kwargs_: - raise TypeError("Unknown parameters: " + str(kwargs_)) - - if curve_name not in _curves: - raise ValueError("Unsupported curve (%s)" % curve_name) - self._curve = _curves[curve_name] - self.curve = self._curve.canonical - - count = int(self._d is not None) + int(self._seed is not None) - - if count == 0: - if self._point is None: - raise ValueError("At lest one between parameters 'point', 'd' or 'seed' must be specified") - return - - if count == 2: - raise ValueError("Parameters d and seed are mutually exclusive") - - # NIST P curves work with d, EdDSA works with seed - - # RFC 8032, 5.1.5 - if self._curve.id == _CurveID.ED25519: - if self._d is not None: - raise ValueError("Parameter d can only be used with NIST P curves") - if len(self._seed) != 32: - raise ValueError("Parameter seed must be 32 bytes long for Ed25519") - seed_hash = SHA512.new(self._seed).digest() # h - self._prefix = seed_hash[32:] - tmp = bytearray(seed_hash[:32]) - tmp[0] &= 0xF8 - tmp[31] = (tmp[31] & 0x7F) | 0x40 - self._d = Integer.from_bytes(tmp, byteorder='little') - # RFC 8032, 5.2.5 - elif self._curve.id == _CurveID.ED448: - if self._d is not None: - raise ValueError("Parameter d can only be used with NIST P curves") - if len(self._seed) != 57: - raise ValueError("Parameter seed must be 57 bytes long for Ed448") - seed_hash = SHAKE256.new(self._seed).read(114) # h - self._prefix = seed_hash[57:] - tmp = bytearray(seed_hash[:57]) - tmp[0] &= 0xFC - tmp[55] |= 0x80 - tmp[56] = 0 - self._d = Integer.from_bytes(tmp, byteorder='little') - # RFC 7748, 5 - elif self._curve.id == _CurveID.CURVE25519: - if self._d is not None: - raise ValueError("Parameter d can only be used with NIST P curves") - if len(self._seed) != 32: - raise ValueError("Parameter seed must be 32 bytes long for Curve25519") - tmp = bytearray(self._seed) - tmp[0] &= 0xF8 - tmp[31] = (tmp[31] & 0x7F) | 0x40 - self._d = Integer.from_bytes(tmp, byteorder='little') - elif self._curve.id == _CurveID.CURVE448: - if self._d is not None: - raise ValueError("Parameter d can only be used with NIST P curves") - if len(self._seed) != 56: - raise ValueError("Parameter seed must be 56 bytes long for Curve448") - tmp = bytearray(self._seed) - tmp[0] &= 0xFC - tmp[55] |= 0x80 - self._d = Integer.from_bytes(tmp, byteorder='little') - - else: - if self._seed is not None: - raise ValueError("Parameter 'seed' cannot be used with NIST P-curves") - self._d = Integer(self._d) - if not 1 <= self._d < self._curve.order: - raise ValueError("Parameter d must be an integer smaller than the curve order") - - def __eq__(self, other): - if not isinstance(other, EccKey): - return False - - if other.has_private() != self.has_private(): - return False - - return other.pointQ == self.pointQ - - def __repr__(self): - if self.has_private(): - if self._curve.is_edwards: - extra = ", seed=%s" % tostr(binascii.hexlify(self._seed)) - else: - extra = ", d=%d" % int(self._d) - else: - extra = "" - if self._curve.id in (_CurveID.CURVE25519, - _CurveID.CURVE448): - x = self.pointQ.x - result = "EccKey(curve='%s', point_x=%d%s)" % (self._curve.canonical, x, extra) - else: - x, y = self.pointQ.xy - result = "EccKey(curve='%s', point_x=%d, point_y=%d%s)" % (self._curve.canonical, x, y, extra) - return result - - def has_private(self): - """``True`` if this key can be used for making signatures or decrypting data.""" - - return self._d is not None - - # ECDSA - def _sign(self, z, k): - assert 0 < k < self._curve.order - - order = self._curve.order - blind = Integer.random_range(min_inclusive=1, - max_exclusive=order) - - blind_d = self._d * blind - inv_blind_k = (blind * k).inverse(order) - - r = (self._curve.G * k).x % order - s = inv_blind_k * (blind * z + blind_d * r) % order - return (r, s) - - # ECDSA - def _verify(self, z, rs): - order = self._curve.order - sinv = rs[1].inverse(order) - point1 = self._curve.G * ((sinv * z) % order) - point2 = self.pointQ * ((sinv * rs[0]) % order) - return (point1 + point2).x == rs[0] - - @property - def d(self): - if not self.has_private(): - raise ValueError("This is not a private ECC key") - return self._d - - @property - def seed(self): - if not self.has_private(): - raise ValueError("This is not a private ECC key") - return self._seed - - @property - def pointQ(self): - if self._point is None: - self._point = self._curve.G * self._d - return self._point - - def public_key(self): - """A matching ECC public key. - - Returns: - a new :class:`EccKey` object - """ - - return EccKey(curve=self._curve.canonical, point=self.pointQ) - - def _export_SEC1(self, compress): - if not self._curve.is_weierstrass: - raise ValueError("SEC1 format is only supported for NIST P curves") - - # See 2.2 in RFC5480 and 2.3.3 in SEC1 - # - # The first byte is: - # - 0x02: compressed, only X-coordinate, Y-coordinate is even - # - 0x03: compressed, only X-coordinate, Y-coordinate is odd - # - 0x04: uncompressed, X-coordinate is followed by Y-coordinate - # - # PAI is in theory encoded as 0x00. - - modulus_bytes = self.pointQ.size_in_bytes() - - if compress: - if self.pointQ.y.is_odd(): - first_byte = b'\x03' - else: - first_byte = b'\x02' - public_key = (first_byte + - self.pointQ.x.to_bytes(modulus_bytes)) - else: - public_key = (b'\x04' + - self.pointQ.x.to_bytes(modulus_bytes) + - self.pointQ.y.to_bytes(modulus_bytes)) - return public_key - - def _export_eddsa_public(self): - x, y = self.pointQ.xy - if self._curve.id == _CurveID.ED25519: - result = bytearray(y.to_bytes(32, byteorder='little')) - result[31] = ((x & 1) << 7) | result[31] - elif self._curve.id == _CurveID.ED448: - result = bytearray(y.to_bytes(57, byteorder='little')) - result[56] = (x & 1) << 7 - else: - raise ValueError("Not an EdDSA key to export") - return bytes(result) - - def _export_montgomery_public(self): - if not self._curve.is_montgomery: - raise ValueError("Not a Montgomery key to export") - x = self.pointQ.x - field_size = self.pointQ.size_in_bytes() - result = bytearray(x.to_bytes(field_size, byteorder='little')) - return bytes(result) - - def _export_subjectPublicKeyInfo(self, compress): - if self._curve.is_edwards: - oid = self._curve.oid - public_key = self._export_eddsa_public() - params = None - elif self._curve.is_montgomery: - oid = self._curve.oid - public_key = self._export_montgomery_public() - params = None - else: - oid = "1.2.840.10045.2.1" # unrestricted - public_key = self._export_SEC1(compress) - params = DerObjectId(self._curve.oid) - - return _create_subject_public_key_info(oid, - public_key, - params) - - def _export_rfc5915_private_der(self, include_ec_params=True): - - assert self.has_private() - - # ECPrivateKey ::= SEQUENCE { - # version INTEGER { ecPrivkeyVer1(1) } (ecPrivkeyVer1), - # privateKey OCTET STRING, - # parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, - # publicKey [1] BIT STRING OPTIONAL - # } - - # Public key - uncompressed form - modulus_bytes = self.pointQ.size_in_bytes() - public_key = (b'\x04' + - self.pointQ.x.to_bytes(modulus_bytes) + - self.pointQ.y.to_bytes(modulus_bytes)) - - seq = [1, - DerOctetString(self.d.to_bytes(modulus_bytes)), - DerObjectId(self._curve.oid, explicit=0), - DerBitString(public_key, explicit=1)] - - if not include_ec_params: - del seq[2] - - return DerSequence(seq).encode() - - def _export_pkcs8(self, **kwargs): - from Cryptodome.IO import PKCS8 - - if kwargs.get('passphrase', None) is not None and 'protection' not in kwargs: - raise ValueError("At least the 'protection' parameter must be present") - - if self._seed is not None: - oid = self._curve.oid - private_key = DerOctetString(self._seed).encode() - params = None - else: - oid = "1.2.840.10045.2.1" # unrestricted - private_key = self._export_rfc5915_private_der(include_ec_params=False) - params = DerObjectId(self._curve.oid) - - result = PKCS8.wrap(private_key, - oid, - key_params=params, - **kwargs) - return result - - def _export_public_pem(self, compress): - from Cryptodome.IO import PEM - - encoded_der = self._export_subjectPublicKeyInfo(compress) - return PEM.encode(encoded_der, "PUBLIC KEY") - - def _export_private_pem(self, passphrase, **kwargs): - from Cryptodome.IO import PEM - - encoded_der = self._export_rfc5915_private_der() - return PEM.encode(encoded_der, "EC PRIVATE KEY", passphrase, **kwargs) - - def _export_private_clear_pkcs8_in_clear_pem(self): - from Cryptodome.IO import PEM - - encoded_der = self._export_pkcs8() - return PEM.encode(encoded_der, "PRIVATE KEY") - - def _export_private_encrypted_pkcs8_in_clear_pem(self, passphrase, **kwargs): - from Cryptodome.IO import PEM - - assert passphrase - if 'protection' not in kwargs: - raise ValueError("At least the 'protection' parameter should be present") - encoded_der = self._export_pkcs8(passphrase=passphrase, **kwargs) - return PEM.encode(encoded_der, "ENCRYPTED PRIVATE KEY") - - def _export_openssh(self, compress): - if self.has_private(): - raise ValueError("Cannot export OpenSSH private keys") - - desc = self._curve.openssh - - if desc is None: - raise ValueError("Cannot export %s keys as OpenSSH" % self.curve) - elif desc == "ssh-ed25519": - public_key = self._export_eddsa_public() - comps = (tobytes(desc), tobytes(public_key)) - else: - modulus_bytes = self.pointQ.size_in_bytes() - - if compress: - first_byte = 2 + self.pointQ.y.is_odd() - public_key = (bchr(first_byte) + - self.pointQ.x.to_bytes(modulus_bytes)) - else: - public_key = (b'\x04' + - self.pointQ.x.to_bytes(modulus_bytes) + - self.pointQ.y.to_bytes(modulus_bytes)) - - middle = desc.split("-")[2] - comps = (tobytes(desc), tobytes(middle), public_key) - - blob = b"".join([struct.pack(">I", len(x)) + x for x in comps]) - return desc + " " + tostr(binascii.b2a_base64(blob)) - - def export_key(self, **kwargs): - """Export this ECC key. - - Args: - format (string): - The output format: - - - ``'DER'``. The key will be encoded in ASN.1 DER format (binary). - For a public key, the ASN.1 ``subjectPublicKeyInfo`` structure - defined in `RFC5480`_ will be used. - For a private key, the ASN.1 ``ECPrivateKey`` structure defined - in `RFC5915`_ is used instead (possibly within a PKCS#8 envelope, - see the ``use_pkcs8`` flag below). - - ``'PEM'``. The key will be encoded in a PEM_ envelope (ASCII). - - ``'OpenSSH'``. The key will be encoded in the OpenSSH_ format - (ASCII, public keys only). - - ``'SEC1'``. The public key (i.e., the EC point) will be encoded - into ``bytes`` according to Section 2.3.3 of `SEC1`_ - (which is a subset of the older X9.62 ITU standard). - Only for NIST P-curves. - - ``'raw'``. The public key will be encoded as ``bytes``, - without any metadata. - - * For NIST P-curves: equivalent to ``'SEC1'``. - * For Ed25519 and Ed448: ``bytes`` in the format - defined in `RFC8032`_. - * For Curve25519 and Curve448: ``bytes`` in the format - defined in `RFC7748`_. - - passphrase (bytes or string): - (*Private keys only*) The passphrase to protect the - private key. - - use_pkcs8 (boolean): - (*Private keys only*) - If ``True`` (default and recommended), the `PKCS#8`_ representation - will be used. - It must be ``True`` for Ed25519, Ed448, Curve25519, and Curve448. - - If ``False`` and a passphrase is present, the obsolete PEM - encryption will be used. - - protection (string): - When a private key is exported with password-protection - and PKCS#8 (both ``DER`` and ``PEM`` formats), this parameter MUST be - present, - For all possible protection schemes, - refer to :ref:`the encryption parameters of PKCS#8`. - It is recommended to use ``'PBKDF2WithHMAC-SHA512AndAES128-CBC'``. - - compress (boolean): - If ``True``, the method returns a more compact representation - of the public key, with the X-coordinate only. - - If ``False`` (default), the method returns the full public key. - - This parameter is ignored for Ed25519/Ed448/Curve25519/Curve448, - as compression is mandatory. - - prot_params (dict): - When a private key is exported with password-protection - and PKCS#8 (both ``DER`` and ``PEM`` formats), this dictionary - contains the parameters to use to derive the encryption key - from the passphrase. - For all possible values, - refer to :ref:`the encryption parameters of PKCS#8`. - The recommendation is to use ``{'iteration_count':21000}`` for PBKDF2, - and ``{'iteration_count':131072}`` for scrypt. - - .. warning:: - If you don't provide a passphrase, the private key will be - exported in the clear! - - .. note:: - When exporting a private key with password-protection and `PKCS#8`_ - (both ``DER`` and ``PEM`` formats), any extra parameters - to ``export_key()`` will be passed to :mod:`Cryptodome.IO.PKCS8`. - - .. _PEM: http://www.ietf.org/rfc/rfc1421.txt - .. _`PEM encryption`: http://www.ietf.org/rfc/rfc1423.txt - .. _OpenSSH: http://www.openssh.com/txt/rfc5656.txt - .. _RFC5480: https://tools.ietf.org/html/rfc5480 - .. _SEC1: https://www.secg.org/sec1-v2.pdf - .. _RFC7748: https://tools.ietf.org/html/rfc7748 - - Returns: - A multi-line string (for ``'PEM'`` and ``'OpenSSH'``) or - ``bytes`` (for ``'DER'``, ``'SEC1'``, and ``'raw'``) with the encoded key. - """ - - args = kwargs.copy() - ext_format = args.pop("format") - if ext_format not in ("PEM", "DER", "OpenSSH", "SEC1", "raw"): - raise ValueError("Unknown format '%s'" % ext_format) - - compress = args.pop("compress", False) - - if self.has_private(): - passphrase = args.pop("passphrase", None) - if is_string(passphrase): - passphrase = tobytes(passphrase) - if not passphrase: - raise ValueError("Empty passphrase") - - use_pkcs8 = args.pop("use_pkcs8", True) - if use_pkcs8 is False: - if self._curve.is_edwards: - raise ValueError("'pkcs8' must be True for EdDSA curves") - if self._curve.is_montgomery: - raise ValueError("'pkcs8' must be True for Curve25519") - if 'protection' in args: - raise ValueError("'protection' is only supported for PKCS#8") - - if ext_format == "PEM": - if use_pkcs8: - if passphrase: - return self._export_private_encrypted_pkcs8_in_clear_pem(passphrase, **args) - else: - return self._export_private_clear_pkcs8_in_clear_pem() - else: - return self._export_private_pem(passphrase, **args) - elif ext_format == "DER": - # DER - if passphrase and not use_pkcs8: - raise ValueError("Private keys can only be encrpyted with DER using PKCS#8") - if use_pkcs8: - return self._export_pkcs8(passphrase=passphrase, **args) - else: - return self._export_rfc5915_private_der() - else: - raise ValueError("Private keys cannot be exported " - "in the '%s' format" % ext_format) - else: # Public key - if args: - raise ValueError("Unexpected parameters: '%s'" % args) - if ext_format == "PEM": - return self._export_public_pem(compress) - elif ext_format == "DER": - return self._export_subjectPublicKeyInfo(compress) - elif ext_format == "SEC1": - return self._export_SEC1(compress) - elif ext_format == "raw": - if self._curve.is_edwards: - return self._export_eddsa_public() - elif self._curve.is_montgomery: - return self._export_montgomery_public() - else: - return self._export_SEC1(compress) - else: - return self._export_openssh(compress) - - -def generate(**kwargs): - """Generate a new private key on the given curve. - - Args: - - curve (string): - Mandatory. It must be a curve name defined in the `ECC table`_. - - randfunc (callable): - Optional. The RNG to read randomness from. - If ``None``, :func:`Cryptodome.Random.get_random_bytes` is used. - """ - - curve_name = kwargs.pop("curve") - curve = _curves[curve_name] - randfunc = kwargs.pop("randfunc", get_random_bytes) - if kwargs: - raise TypeError("Unknown parameters: " + str(kwargs)) - - if _curves[curve_name].id == _CurveID.ED25519: - seed = randfunc(32) - new_key = EccKey(curve=curve_name, seed=seed) - elif _curves[curve_name].id == _CurveID.ED448: - seed = randfunc(57) - new_key = EccKey(curve=curve_name, seed=seed) - elif _curves[curve_name].id == _CurveID.CURVE25519: - seed = randfunc(32) - new_key = EccKey(curve=curve_name, seed=seed) - _curves[curve_name].validate(new_key.pointQ) - elif _curves[curve_name].id == _CurveID.CURVE448: - seed = randfunc(56) - new_key = EccKey(curve=curve_name, seed=seed) - _curves[curve_name].validate(new_key.pointQ) - else: - d = Integer.random_range(min_inclusive=1, - max_exclusive=curve.order, - randfunc=randfunc) - new_key = EccKey(curve=curve_name, d=d) - - return new_key - - -def construct(**kwargs): - """Build a new ECC key (private or public) starting - from some base components. - - In most cases, you will already have an existing key - which you can read in with :func:`import_key` instead - of this function. - - Args: - curve (string): - Mandatory. The name of the elliptic curve, as defined in the `ECC table`_. - - d (integer): - Mandatory for a private key and a NIST P-curve (e.g., P-256). - It must be an integer in the range ``[1..order-1]``. - - seed (bytes): - Mandatory for a private key and curves Ed25519 (32 bytes), - Curve25519 (32 bytes), Curve448 (56 bytes) and Ed448 (57 bytes). - - point_x (integer): - The X coordinate (affine) of the ECC point. - Mandatory for a public key. - - point_y (integer): - The Y coordinate (affine) of the ECC point. - Mandatory for a public key, - except for Curve25519 and Curve448. - - Returns: - :class:`EccKey` : a new ECC key object - """ - - curve_name = kwargs["curve"] - curve = _curves[curve_name] - point_x = kwargs.pop("point_x", None) - point_y = kwargs.pop("point_y", None) - - if "point" in kwargs: - raise TypeError("Unknown keyword: point") - - if curve.id == _CurveID.CURVE25519: - - if point_x is not None: - kwargs["point"] = EccXPoint(point_x, curve_name) - new_key = EccKey(**kwargs) - curve.validate(new_key.pointQ) - - elif curve.id == _CurveID.CURVE448: - - if point_x is not None: - kwargs["point"] = EccXPoint(point_x, curve_name) - new_key = EccKey(**kwargs) - curve.validate(new_key.pointQ) - - else: - - if None not in (point_x, point_y): - kwargs["point"] = EccPoint(point_x, point_y, curve_name) - new_key = EccKey(**kwargs) - - # Validate that the private key matches the public one - # because EccKey will not do that automatically - if new_key.has_private() and 'point' in kwargs: - pub_key = curve.G * new_key.d - if pub_key.xy != (point_x, point_y): - raise ValueError("Private and public ECC keys do not match") - - return new_key - - -def _import_public_der(ec_point, curve_oid=None, curve_name=None): - """Convert an encoded EC point into an EccKey object - - ec_point: byte string with the EC point (SEC1-encoded) - curve_oid: string with the name the curve - curve_name: string with the OID of the curve - - Either curve_id or curve_name must be specified - - """ - - for _curve_name, curve in _curves.items(): - if curve_oid and curve.oid == curve_oid: - break - if curve_name == _curve_name: - break - else: - if curve_oid: - raise UnsupportedEccFeature("Unsupported ECC curve (OID: %s)" % curve_oid) - else: - raise UnsupportedEccFeature("Unsupported ECC curve (%s)" % curve_name) - - # See 2.2 in RFC5480 and 2.3.3 in SEC1 - # The first byte is: - # - 0x02: compressed, only X-coordinate, Y-coordinate is even - # - 0x03: compressed, only X-coordinate, Y-coordinate is odd - # - 0x04: uncompressed, X-coordinate is followed by Y-coordinate - # - # PAI is in theory encoded as 0x00. - - modulus_bytes = curve.p.size_in_bytes() - point_type = bord(ec_point[0]) - - # Uncompressed point - if point_type == 0x04: - if len(ec_point) != (1 + 2 * modulus_bytes): - raise ValueError("Incorrect EC point length") - x = Integer.from_bytes(ec_point[1:modulus_bytes+1]) - y = Integer.from_bytes(ec_point[modulus_bytes+1:]) - # Compressed point - elif point_type in (0x02, 0x03): - if len(ec_point) != (1 + modulus_bytes): - raise ValueError("Incorrect EC point length") - x = Integer.from_bytes(ec_point[1:]) - # Right now, we only support Short Weierstrass curves - y = (x**3 - x*3 + curve.b).sqrt(curve.p) - if point_type == 0x02 and y.is_odd(): - y = curve.p - y - if point_type == 0x03 and y.is_even(): - y = curve.p - y - else: - raise ValueError("Incorrect EC point encoding") - - return construct(curve=_curve_name, point_x=x, point_y=y) - - -def _import_subjectPublicKeyInfo(encoded, *kwargs): - """Convert a subjectPublicKeyInfo into an EccKey object""" - - # See RFC5480 - - # Parse the generic subjectPublicKeyInfo structure - oid, ec_point, params = _expand_subject_public_key_info(encoded) - - nist_p_oids = ( - "1.2.840.10045.2.1", # id-ecPublicKey (unrestricted) - "1.3.132.1.12", # id-ecDH - "1.3.132.1.13" # id-ecMQV - ) - eddsa_oids = { - "1.3.101.112": ("Ed25519", _import_ed25519_public_key), # id-Ed25519 - "1.3.101.113": ("Ed448", _import_ed448_public_key) # id-Ed448 - } - xdh_oids = { - "1.3.101.110": ("Curve25519", _import_curve25519_public_key), # id-X25519 - "1.3.101.111": ("Curve448", _import_curve448_public_key), # id-X448 - } - - if oid in nist_p_oids: - # See RFC5480 - - # Parameters are mandatory and encoded as ECParameters - # ECParameters ::= CHOICE { - # namedCurve OBJECT IDENTIFIER - # -- implicitCurve NULL - # -- specifiedCurve SpecifiedECDomain - # } - # implicitCurve and specifiedCurve are not supported (as per RFC) - if not params: - raise ValueError("Missing ECC parameters for ECC OID %s" % oid) - try: - curve_oid = DerObjectId().decode(params).value - except ValueError: - raise ValueError("Error decoding namedCurve") - - # ECPoint ::= OCTET STRING - return _import_public_der(ec_point, curve_oid=curve_oid) - - elif oid in eddsa_oids: - # See RFC8410 - curve_name, import_eddsa_public_key = eddsa_oids[oid] - - # Parameters must be absent - if params: - raise ValueError("Unexpected ECC parameters for ECC OID %s" % oid) - - x, y = import_eddsa_public_key(ec_point) - return construct(point_x=x, point_y=y, curve=curve_name) - - elif oid in xdh_oids: - curve_name, import_xdh_public_key = xdh_oids[oid] - - # Parameters must be absent - if params: - raise ValueError("Unexpected ECC parameters for ECC OID %s" % oid) - - x = import_xdh_public_key(ec_point) - return construct(point_x=x, curve=curve_name) - - else: - raise UnsupportedEccFeature("Unsupported ECC OID: %s" % oid) - - -def _import_rfc5915_der(encoded, passphrase, curve_oid=None): - - # See RFC5915 https://tools.ietf.org/html/rfc5915 - # - # ECPrivateKey ::= SEQUENCE { - # version INTEGER { ecPrivkeyVer1(1) } (ecPrivkeyVer1), - # privateKey OCTET STRING, - # parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, - # publicKey [1] BIT STRING OPTIONAL - # } - - ec_private_key = DerSequence().decode(encoded, nr_elements=(2, 3, 4)) - if ec_private_key[0] != 1: - raise ValueError("Incorrect ECC private key version") - - scalar_bytes = DerOctetString().decode(ec_private_key[1]).payload - - next_element = 2 - - # Try to decode 'parameters' - if next_element < len(ec_private_key): - try: - parameters = DerObjectId(explicit=0).decode(ec_private_key[next_element]).value - if curve_oid is not None and parameters != curve_oid: - raise ValueError("Curve mismatch") - curve_oid = parameters - next_element += 1 - except ValueError: - pass - - if curve_oid is None: - raise ValueError("No curve found") - - for curve_name, curve in _curves.items(): - if curve.oid == curve_oid: - break - else: - raise UnsupportedEccFeature("Unsupported ECC curve (OID: %s)" % curve_oid) - - modulus_bytes = curve.p.size_in_bytes() - if len(scalar_bytes) != modulus_bytes: - raise ValueError("Private key is too small") - - # Try to decode 'publicKey' - point_x = point_y = None - if next_element < len(ec_private_key): - try: - public_key_enc = DerBitString(explicit=1).decode(ec_private_key[next_element]).value - public_key = _import_public_der(public_key_enc, curve_oid=curve_oid) - point_x = public_key.pointQ.x - point_y = public_key.pointQ.y - next_element += 1 - except ValueError: - pass - - d = Integer.from_bytes(scalar_bytes) - return construct(curve=curve_name, d=d, point_x=point_x, point_y=point_y) - - -def _import_pkcs8(encoded, passphrase): - from Cryptodome.IO import PKCS8 - - algo_oid, private_key, params = PKCS8.unwrap(encoded, passphrase) - - nist_p_oids = ( - "1.2.840.10045.2.1", # id-ecPublicKey (unrestricted) - "1.3.132.1.12", # id-ecDH - "1.3.132.1.13" # id-ecMQV - ) - eddsa_oids = { - "1.3.101.112": "Ed25519", # id-Ed25519 - "1.3.101.113": "Ed448", # id-Ed448 - } - xdh_oids = { - "1.3.101.110": "Curve25519", # id-X25519 - "1.3.101.111": "Curve448", # id-X448 - } - - if algo_oid in nist_p_oids: - curve_oid = DerObjectId().decode(params).value - return _import_rfc5915_der(private_key, passphrase, curve_oid) - elif algo_oid in eddsa_oids: - if params is not None: - raise ValueError("EdDSA ECC private key must not have parameters") - curve_oid = None - seed = DerOctetString().decode(private_key).payload - return construct(curve=eddsa_oids[algo_oid], seed=seed) - elif algo_oid in xdh_oids: - curve_name = xdh_oids[algo_oid] - if params is not None: - raise ValueError("%s ECC private key must not have parameters" % - curve_name) - curve_oid = None - seed = DerOctetString().decode(private_key).payload - return construct(curve=xdh_oids[algo_oid], seed=seed) - else: - raise UnsupportedEccFeature("Unsupported ECC purpose (OID: %s)" % algo_oid) - - -def _import_x509_cert(encoded, *kwargs): - - sp_info = _extract_subject_public_key_info(encoded) - return _import_subjectPublicKeyInfo(sp_info) - - -def _import_der(encoded, passphrase): - - try: - return _import_subjectPublicKeyInfo(encoded, passphrase) - except UnsupportedEccFeature as err: - raise err - except (ValueError, TypeError, IndexError): - pass - - try: - return _import_x509_cert(encoded, passphrase) - except UnsupportedEccFeature as err: - raise err - except (ValueError, TypeError, IndexError): - pass - - try: - return _import_rfc5915_der(encoded, passphrase) - except UnsupportedEccFeature as err: - raise err - except (ValueError, TypeError, IndexError): - pass - - try: - return _import_pkcs8(encoded, passphrase) - except UnsupportedEccFeature as err: - raise err - except (ValueError, TypeError, IndexError): - pass - - raise ValueError("Not an ECC DER key") - - -def _import_openssh_public(encoded): - parts = encoded.split(b' ') - if len(parts) not in (2, 3): - raise ValueError("Not an openssh public key") - - try: - keystring = binascii.a2b_base64(parts[1]) - - keyparts = [] - while len(keystring) > 4: - lk = struct.unpack(">I", keystring[:4])[0] - keyparts.append(keystring[4:4 + lk]) - keystring = keystring[4 + lk:] - - if parts[0] != keyparts[0]: - raise ValueError("Mismatch in openssh public key") - - # NIST P curves - if parts[0].startswith(b"ecdsa-sha2-"): - - for curve_name, curve in _curves.items(): - if curve.openssh is None: - continue - if not curve.openssh.startswith("ecdsa-sha2"): - continue - middle = tobytes(curve.openssh.split("-")[2]) - if keyparts[1] == middle: - break - else: - raise ValueError("Unsupported ECC curve: " + middle) - - ecc_key = _import_public_der(keyparts[2], curve_oid=curve.oid) - - # EdDSA - elif parts[0] == b"ssh-ed25519": - x, y = _import_ed25519_public_key(keyparts[1]) - ecc_key = construct(curve="Ed25519", point_x=x, point_y=y) - else: - raise ValueError("Unsupported SSH key type: " + parts[0]) - - except (IndexError, TypeError, binascii.Error): - raise ValueError("Error parsing SSH key type: " + parts[0]) - - return ecc_key - - -def _import_openssh_private_ecc(data, password): - - from ._openssh import (import_openssh_private_generic, - read_bytes, read_string, check_padding) - - key_type, decrypted = import_openssh_private_generic(data, password) - - eddsa_keys = { - "ssh-ed25519": ("Ed25519", _import_ed25519_public_key, 32), - } - - # https://datatracker.ietf.org/doc/html/draft-miller-ssh-agent-04 - if key_type.startswith("ecdsa-sha2"): - - ecdsa_curve_name, decrypted = read_string(decrypted) - if ecdsa_curve_name not in _curves: - raise UnsupportedEccFeature("Unsupported ECC curve %s" % ecdsa_curve_name) - curve = _curves[ecdsa_curve_name] - modulus_bytes = (curve.modulus_bits + 7) // 8 - - public_key, decrypted = read_bytes(decrypted) - - if bord(public_key[0]) != 4: - raise ValueError("Only uncompressed OpenSSH EC keys are supported") - if len(public_key) != 2 * modulus_bytes + 1: - raise ValueError("Incorrect public key length") - - point_x = Integer.from_bytes(public_key[1:1+modulus_bytes]) - point_y = Integer.from_bytes(public_key[1+modulus_bytes:]) - - private_key, decrypted = read_bytes(decrypted) - d = Integer.from_bytes(private_key) - - params = {'d': d, 'curve': ecdsa_curve_name} - - elif key_type in eddsa_keys: - - curve_name, import_eddsa_public_key, seed_len = eddsa_keys[key_type] - - public_key, decrypted = read_bytes(decrypted) - point_x, point_y = import_eddsa_public_key(public_key) - - private_public_key, decrypted = read_bytes(decrypted) - seed = private_public_key[:seed_len] - - params = {'seed': seed, 'curve': curve_name} - else: - raise ValueError("Unsupport SSH agent key type:" + key_type) - - _, padded = read_string(decrypted) # Comment - check_padding(padded) - - return construct(point_x=point_x, point_y=point_y, **params) - - -def _import_ed25519_public_key(encoded): - """Import an Ed25519 ECC public key, encoded as raw bytes as described - in RFC8032_. - - Args: - encoded (bytes): - The Ed25519 public key to import. It must be 32 bytes long. - - Returns: - x and y (integer) - - Raises: - ValueError: when the given key cannot be parsed. - - .. _RFC8032: https://datatracker.ietf.org/doc/html/rfc8032 - """ - - if len(encoded) != 32: - raise ValueError("Incorrect length. Only Ed25519 public keys are supported.") - - p = Integer(0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed) # 2**255 - 19 - d = 37095705934669439343138083508754565189542113879843219016388785533085940283555 - - y = bytearray(encoded) - x_lsb = y[31] >> 7 - y[31] &= 0x7F - point_y = Integer.from_bytes(y, byteorder='little') - if point_y >= p: - raise ValueError("Invalid Ed25519 key (y)") - if point_y == 1: - return 0, 1 - - u = (point_y**2 - 1) % p - v = ((point_y**2 % p) * d + 1) % p - try: - v_inv = v.inverse(p) - x2 = (u * v_inv) % p - point_x = Integer._tonelli_shanks(x2, p) - if (point_x & 1) != x_lsb: - point_x = p - point_x - except ValueError: - raise ValueError("Invalid Ed25519 public key") - return point_x, point_y - - -def _import_curve25519_public_key(encoded): - """Import a Curve25519 ECC public key, - encoded as raw bytes as described in RFC7748_. - - Args: - encoded (bytes): - The Curve25519 public key to import. It must be 32 bytes long. - - Returns: - x (integer) - - Raises: - ValueError: when the given key cannot be parsed. - - .. _RFC7748: https://datatracker.ietf.org/doc/html/rfc7748 - """ - - if len(encoded) != 32: - raise ValueError("Incorrect Curve25519 key length") - - x = bytearray(encoded) - # RFC 7741, Section 5 - x[31] &= 0x7F - point_x = Integer.from_bytes(x, byteorder='little') - - return point_x - - -def _import_curve448_public_key(encoded): - """Import a Curve448 ECC public key, - encoded as raw bytes as described in RFC7748_. - - Args: - encoded (bytes): - The Curve448 public key to import. It must be 56 bytes long. - - Returns: - x (integer) - - Raises: - ValueError: when the given key cannot be parsed. - - .. _RFC7748: https://datatracker.ietf.org/doc/html/rfc7748 - """ - - if len(encoded) != 56: - raise ValueError("Incorrect Curve448 key length") - - point_x = Integer.from_bytes(encoded, byteorder='little') - - return point_x - - -def _import_ed448_public_key(encoded): - """Import an Ed448 ECC public key, encoded as raw bytes as described - in RFC8032_. - - Args: - encoded (bytes): - The Ed448 public key to import. It must be 57 bytes long. - - Returns: - x and y (integer) - - Raises: - ValueError: when the given key cannot be parsed. - - .. _RFC8032: https://datatracker.ietf.org/doc/html/rfc8032 - """ - - if len(encoded) != 57: - raise ValueError("Incorrect length. Only Ed448 public keys are supported.") - - p = _curves['curve448'].p - d = p - 39081 - - y = encoded[:56] - x_lsb = bord(encoded[56]) >> 7 - point_y = Integer.from_bytes(y, byteorder='little') - if point_y >= p: - raise ValueError("Invalid Ed448 key (y)") - if point_y == 1: - return 0, 1 - - u = (point_y**2 - 1) % p - v = ((point_y**2 % p) * d - 1) % p - try: - v_inv = v.inverse(p) - x2 = (u * v_inv) % p - point_x = Integer._tonelli_shanks(x2, p) - if (point_x & 1) != x_lsb: - point_x = p - point_x - except ValueError: - raise ValueError("Invalid Ed448 public key") - return point_x, point_y - - -def import_key(encoded, passphrase=None, curve_name=None): - """Import an ECC key (public or private). - - Args: - encoded (bytes or multi-line string): - The ECC key to import. - The function will try to automatically detect the right format. - - Supported formats for an ECC **public** key: - - * X.509 certificate: binary (DER) or ASCII (PEM). - * X.509 ``subjectPublicKeyInfo``: binary (DER) or ASCII (PEM). - * SEC1_ (or X9.62), as ``bytes``. NIST P curves only. - You must also provide the ``curve_name`` (with a value from the `ECC table`_) - * OpenSSH line, defined in RFC5656_ and RFC8709_ (ASCII). - This is normally the content of files like ``~/.ssh/id_ecdsa.pub``. - - Supported formats for an ECC **private** key: - - * A binary ``ECPrivateKey`` structure, as defined in `RFC5915`_ (DER). - NIST P curves only. - * A `PKCS#8`_ structure (or the more recent Asymmetric Key - Package, RFC5958_): binary (DER) or ASCII (PEM). - * `OpenSSH 6.5`_ and newer versions (ASCII). - - Private keys can be in the clear or password-protected. - - For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. - - passphrase (byte string): - The passphrase to use for decrypting a private key. - Encryption may be applied protected at the PEM level (not recommended) - or at the PKCS#8 level (recommended). - This parameter is ignored if the key in input is not encrypted. - - curve_name (string): - For a SEC1 encoding only. This is the name of the curve, - as defined in the `ECC table`_. - - .. note:: - - To import EdDSA private and public keys, when encoded as raw ``bytes``, use: - - * :func:`Cryptodome.Signature.eddsa.import_public_key`, or - * :func:`Cryptodome.Signature.eddsa.import_private_key`. - - .. note:: - - To import X25519/X448 private and public keys, when encoded as raw ``bytes``, use: - - * :func:`Cryptodome.Protocol.DH.import_x25519_public_key` - * :func:`Cryptodome.Protocol.DH.import_x25519_private_key` - * :func:`Cryptodome.Protocol.DH.import_x448_public_key` - * :func:`Cryptodome.Protocol.DH.import_x448_private_key` - - Returns: - :class:`EccKey` : a new ECC key object - - Raises: - ValueError: when the given key cannot be parsed (possibly because - the pass phrase is wrong). - - .. _RFC1421: https://datatracker.ietf.org/doc/html/rfc1421 - .. _RFC1423: https://datatracker.ietf.org/doc/html/rfc1423 - .. _RFC5915: https://datatracker.ietf.org/doc/html/rfc5915 - .. _RFC5656: https://datatracker.ietf.org/doc/html/rfc5656 - .. _RFC8709: https://datatracker.ietf.org/doc/html/rfc8709 - .. _RFC5958: https://datatracker.ietf.org/doc/html/rfc5958 - .. _`PKCS#8`: https://datatracker.ietf.org/doc/html/rfc5208 - .. _`OpenSSH 6.5`: https://flak.tedunangst.com/post/new-openssh-key-format-and-bcrypt-pbkdf - .. _SEC1: https://www.secg.org/sec1-v2.pdf - """ - - from Cryptodome.IO import PEM - - encoded = tobytes(encoded) - if passphrase is not None: - passphrase = tobytes(passphrase) - - # PEM - if encoded.startswith(b'-----BEGIN OPENSSH PRIVATE KEY'): - text_encoded = tostr(encoded) - openssh_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) - result = _import_openssh_private_ecc(openssh_encoded, passphrase) - return result - - elif encoded.startswith(b'-----'): - - text_encoded = tostr(encoded) - - # Remove any EC PARAMETERS section - # Ignore its content because the curve type must be already given in the key - ecparams_start = "-----BEGIN EC PARAMETERS-----" - ecparams_end = "-----END EC PARAMETERS-----" - text_encoded = re.sub(ecparams_start + ".*?" + ecparams_end, "", - text_encoded, - flags=re.DOTALL) - - der_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) - if enc_flag: - passphrase = None - try: - result = _import_der(der_encoded, passphrase) - except UnsupportedEccFeature as uef: - raise uef - except ValueError: - raise ValueError("Invalid DER encoding inside the PEM file") - return result - - # OpenSSH - if encoded.startswith((b'ecdsa-sha2-', b'ssh-ed25519')): - return _import_openssh_public(encoded) - - # DER - if len(encoded) > 0 and bord(encoded[0]) == 0x30: - return _import_der(encoded, passphrase) - - # SEC1 - if len(encoded) > 0 and bord(encoded[0]) in (0x02, 0x03, 0x04): - if curve_name is None: - raise ValueError("No curve name was provided") - return _import_public_der(encoded, curve_name=curve_name) - - raise ValueError("ECC key format is not supported") - - -if __name__ == "__main__": - - import time - - d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd - - point = _curves['p256'].G.copy() - count = 3000 - - start = time.time() - for x in range(count): - pointX = point * d - print("(P-256 G)", (time.time() - start) / count * 1000, "ms") - - start = time.time() - for x in range(count): - pointX = pointX * d - print("(P-256 arbitrary point)", (time.time() - start) / count * 1000, "ms") diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/ECC.pyi b/venv/Lib/site-packages/Cryptodome/PublicKey/ECC.pyi deleted file mode 100644 index 3d64727..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/ECC.pyi +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import annotations - -from typing import Union, Callable, Optional, Tuple, Dict, NamedTuple, Any, overload, Literal -from typing_extensions import TypedDict, Unpack, NotRequired - -from Cryptodome.Math.Numbers import Integer -from Cryptodome.IO._PBES import ProtParams - -from ._point import EccPoint as EccPoint -from ._point import EccXPoint as EccXPoint - -RNG = Callable[[int], bytes] - - -class UnsupportedEccFeature(ValueError): - ... - -class ExportParams(TypedDict): - passphrase: NotRequired[Union[bytes, str]] - use_pkcs8: NotRequired[bool] - protection: NotRequired[str] - compress: NotRequired[bool] - prot_params: NotRequired[ProtParams] - - -class EccKey(object): - curve: str - def __init__(self, *, curve: str = ..., d: int = ..., point: EccPoint = ...) -> None: ... - def __eq__(self, other: object) -> bool: ... - def __repr__(self) -> str: ... - def has_private(self) -> bool: ... - @property - def d(self) -> int: ... - @property - def pointQ(self) -> EccPoint: ... - def public_key(self) -> EccKey: ... - - @overload - def export_key(self, - *, - format: Literal['PEM', 'OpenSSH'], - **kwargs: Unpack[ExportParams]) -> str: ... - - @overload - def export_key(self, - *, - format: Literal['DER', 'SEC1', 'raw'], - **kwargs: Unpack[ExportParams]) -> bytes: ... - - -_Curve = NamedTuple("_Curve", [('p', Integer), - ('order', Integer), - ('b', Integer), - ('Gx', Integer), - ('Gy', Integer), - ('G', EccPoint), - ('modulus_bits', int), - ('oid', str), - ('context', Any), - ('desc', str), - ('openssh', Union[str, None]), - ]) - -_curves: Dict[str, _Curve] - -def _import_rfc5915_der(encoded: bytes, - passphrase: Optional[str] = None, - curve_oid: Optional[str] = None) -> EccKey: ... - -def generate(**kwargs: Union[str, RNG]) -> EccKey: ... -def construct(**kwargs: Union[str, int]) -> EccKey: ... - - -def import_key(encoded: Union[bytes, str], - passphrase: Optional[str] = None, - curve_name: Optional[str] = None) -> EccKey: ... - - -def _import_ed25519_public_key(encoded: bytes) -> EccKey: ... -def _import_ed448_public_key(encoded: bytes) -> EccKey: ... diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/ElGamal.py b/venv/Lib/site-packages/Cryptodome/PublicKey/ElGamal.py deleted file mode 100644 index 95c219e..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/ElGamal.py +++ /dev/null @@ -1,286 +0,0 @@ -# -# ElGamal.py : ElGamal encryption/decryption and signatures -# -# Part of the Python Cryptography Toolkit -# -# Originally written by: A.M. Kuchling -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__all__ = ['generate', 'construct', 'ElGamalKey'] - -from Cryptodome import Random -from Cryptodome.Math.Primality import ( generate_probable_safe_prime, - test_probable_prime, COMPOSITE ) -from Cryptodome.Math.Numbers import Integer - -# Generate an ElGamal key with N bits -def generate(bits, randfunc): - """Randomly generate a fresh, new ElGamal key. - - The key will be safe for use for both encryption and signature - (although it should be used for **only one** purpose). - - Args: - bits (int): - Key length, or size (in bits) of the modulus *p*. - The recommended value is 2048. - randfunc (callable): - Random number generation function; it should accept - a single integer *N* and return a string of random - *N* random bytes. - - Return: - an :class:`ElGamalKey` object - """ - - obj=ElGamalKey() - - # Generate a safe prime p - # See Algorithm 4.86 in Handbook of Applied Cryptography - obj.p = generate_probable_safe_prime(exact_bits=bits, randfunc=randfunc) - q = (obj.p - 1) >> 1 - - # Generate generator g - while 1: - # Choose a square residue; it will generate a cyclic group of order q. - obj.g = pow(Integer.random_range(min_inclusive=2, - max_exclusive=obj.p, - randfunc=randfunc), 2, obj.p) - - # We must avoid g=2 because of Bleichenbacher's attack described - # in "Generating ElGamal signatures without knowning the secret key", - # 1996 - if obj.g in (1, 2): - continue - - # Discard g if it divides p-1 because of the attack described - # in Note 11.67 (iii) in HAC - if (obj.p - 1) % obj.g == 0: - continue - - # g^{-1} must not divide p-1 because of Khadir's attack - # described in "Conditions of the generator for forging ElGamal - # signature", 2011 - ginv = obj.g.inverse(obj.p) - if (obj.p - 1) % ginv == 0: - continue - - # Found - break - - # Generate private key x - obj.x = Integer.random_range(min_inclusive=2, - max_exclusive=obj.p-1, - randfunc=randfunc) - # Generate public key y - obj.y = pow(obj.g, obj.x, obj.p) - return obj - -def construct(tup): - r"""Construct an ElGamal key from a tuple of valid ElGamal components. - - The modulus *p* must be a prime. - The following conditions must apply: - - .. math:: - - \begin{align} - &1 < g < p-1 \\ - &g^{p-1} = 1 \text{ mod } 1 \\ - &1 < x < p-1 \\ - &g^x = y \text{ mod } p - \end{align} - - Args: - tup (tuple): - A tuple with either 3 or 4 integers, - in the following order: - - 1. Modulus (*p*). - 2. Generator (*g*). - 3. Public key (*y*). - 4. Private key (*x*). Optional. - - Raises: - ValueError: when the key being imported fails the most basic ElGamal validity checks. - - Returns: - an :class:`ElGamalKey` object - """ - - obj=ElGamalKey() - if len(tup) not in [3,4]: - raise ValueError('argument for construct() wrong length') - for i in range(len(tup)): - field = obj._keydata[i] - setattr(obj, field, Integer(tup[i])) - - fmt_error = test_probable_prime(obj.p) == COMPOSITE - fmt_error |= obj.g<=1 or obj.g>=obj.p - fmt_error |= pow(obj.g, obj.p-1, obj.p)!=1 - fmt_error |= obj.y<1 or obj.y>=obj.p - if len(tup)==4: - fmt_error |= obj.x<=1 or obj.x>=obj.p - fmt_error |= pow(obj.g, obj.x, obj.p)!=obj.y - - if fmt_error: - raise ValueError("Invalid ElGamal key components") - - return obj - -class ElGamalKey(object): - r"""Class defining an ElGamal key. - Do not instantiate directly. - Use :func:`generate` or :func:`construct` instead. - - :ivar p: Modulus - :vartype d: integer - - :ivar g: Generator - :vartype e: integer - - :ivar y: Public key component - :vartype y: integer - - :ivar x: Private key component - :vartype x: integer - """ - - #: Dictionary of ElGamal parameters. - #: - #: A public key will only have the following entries: - #: - #: - **y**, the public key. - #: - **g**, the generator. - #: - **p**, the modulus. - #: - #: A private key will also have: - #: - #: - **x**, the private key. - _keydata=['p', 'g', 'y', 'x'] - - def __init__(self, randfunc=None): - if randfunc is None: - randfunc = Random.new().read - self._randfunc = randfunc - - def _encrypt(self, M, K): - a=pow(self.g, K, self.p) - b=( pow(self.y, K, self.p)*M ) % self.p - return [int(a), int(b)] - - def _decrypt(self, M): - if (not hasattr(self, 'x')): - raise TypeError('Private key not available in this object') - r = Integer.random_range(min_inclusive=2, - max_exclusive=self.p-1, - randfunc=self._randfunc) - a_blind = (pow(self.g, r, self.p) * M[0]) % self.p - ax=pow(a_blind, self.x, self.p) - plaintext_blind = (ax.inverse(self.p) * M[1] ) % self.p - plaintext = (plaintext_blind * pow(self.y, r, self.p)) % self.p - return int(plaintext) - - def _sign(self, M, K): - if (not hasattr(self, 'x')): - raise TypeError('Private key not available in this object') - p1=self.p-1 - K = Integer(K) - if (K.gcd(p1)!=1): - raise ValueError('Bad K value: GCD(K,p-1)!=1') - a=pow(self.g, K, self.p) - t=(Integer(M)-self.x*a) % p1 - while t<0: t=t+p1 - b=(t*K.inverse(p1)) % p1 - return [int(a), int(b)] - - def _verify(self, M, sig): - sig = [Integer(x) for x in sig] - if sig[0]<1 or sig[0]>self.p-1: - return 0 - v1=pow(self.y, sig[0], self.p) - v1=(v1*pow(sig[0], sig[1], self.p)) % self.p - v2=pow(self.g, M, self.p) - if v1==v2: - return 1 - return 0 - - def has_private(self): - """Whether this is an ElGamal private key""" - - if hasattr(self, 'x'): - return 1 - else: - return 0 - - def can_encrypt(self): - return True - - def can_sign(self): - return True - - def publickey(self): - """A matching ElGamal public key. - - Returns: - a new :class:`ElGamalKey` object - """ - return construct((self.p, self.g, self.y)) - - def __eq__(self, other): - if bool(self.has_private()) != bool(other.has_private()): - return False - - result = True - for comp in self._keydata: - result = result and (getattr(self.key, comp, None) == - getattr(other.key, comp, None)) - return result - - def __ne__(self, other): - return not self.__eq__(other) - - def __getstate__(self): - # ElGamal key is not pickable - from pickle import PicklingError - raise PicklingError - - # Methods defined in PyCryptodome that we don't support anymore - - def sign(self, M, K): - raise NotImplementedError - - def verify(self, M, signature): - raise NotImplementedError - - def encrypt(self, plaintext, K): - raise NotImplementedError - - def decrypt(self, ciphertext): - raise NotImplementedError - - def blind(self, M, B): - raise NotImplementedError - - def unblind(self, M, B): - raise NotImplementedError - - def size(self): - raise NotImplementedError diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/ElGamal.pyi b/venv/Lib/site-packages/Cryptodome/PublicKey/ElGamal.pyi deleted file mode 100644 index 9048531..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/ElGamal.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Callable, Union, Tuple, Optional - -__all__ = ['generate', 'construct', 'ElGamalKey'] - -RNG = Callable[[int], bytes] - -def generate(bits: int, randfunc: RNG) -> ElGamalKey: ... -def construct(tup: Union[Tuple[int, int, int], Tuple[int, int, int, int]]) -> ElGamalKey: ... - -class ElGamalKey(object): - def __init__(self, randfunc: Optional[RNG]=None) -> None: ... - def has_private(self) -> bool: ... - def can_encrypt(self) -> bool: ... - def can_sign(self) -> bool: ... - def publickey(self) -> ElGamalKey: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... - def __getstate__(self) -> None: ... diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/RSA.py b/venv/Lib/site-packages/Cryptodome/PublicKey/RSA.py deleted file mode 100644 index 476785e..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/RSA.py +++ /dev/null @@ -1,871 +0,0 @@ -# -*- coding: utf-8 -*- -# =================================================================== -# -# Copyright (c) 2016, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -__all__ = ['generate', 'construct', 'import_key', - 'RsaKey', 'oid'] - -import binascii -import struct - -from Cryptodome import Random -from Cryptodome.Util.py3compat import tobytes, bord, tostr -from Cryptodome.Util.asn1 import DerSequence, DerNull -from Cryptodome.Util.number import bytes_to_long - -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Math.Primality import (test_probable_prime, - generate_probable_prime, COMPOSITE) - -from Cryptodome.PublicKey import (_expand_subject_public_key_info, - _create_subject_public_key_info, - _extract_subject_public_key_info) - - -class RsaKey(object): - r"""Class defining an RSA key, private or public. - Do not instantiate directly. - Use :func:`generate`, :func:`construct` or :func:`import_key` instead. - - :ivar n: RSA modulus - :vartype n: integer - - :ivar e: RSA public exponent - :vartype e: integer - - :ivar d: RSA private exponent - :vartype d: integer - - :ivar p: First factor of the RSA modulus - :vartype p: integer - - :ivar q: Second factor of the RSA modulus - :vartype q: integer - - :ivar invp: Chinese remainder component (:math:`p^{-1} \text{mod } q`) - :vartype invp: integer - - :ivar invq: Chinese remainder component (:math:`q^{-1} \text{mod } p`) - :vartype invq: integer - - :ivar u: Same as ``invp`` - :vartype u: integer - """ - - def __init__(self, **kwargs): - """Build an RSA key. - - :Keywords: - n : integer - The modulus. - e : integer - The public exponent. - d : integer - The private exponent. Only required for private keys. - p : integer - The first factor of the modulus. Only required for private keys. - q : integer - The second factor of the modulus. Only required for private keys. - u : integer - The CRT coefficient (inverse of p modulo q). Only required for - private keys. - """ - - input_set = set(kwargs.keys()) - public_set = set(('n', 'e')) - private_set = public_set | set(('p', 'q', 'd', 'u')) - if input_set not in (private_set, public_set): - raise ValueError("Some RSA components are missing") - for component, value in kwargs.items(): - setattr(self, "_" + component, value) - if input_set == private_set: - self._dp = self._d % (self._p - 1) # = (e⁻¹) mod (p-1) - self._dq = self._d % (self._q - 1) # = (e⁻¹) mod (q-1) - self._invq = None # will be computed on demand - - @property - def n(self): - return int(self._n) - - @property - def e(self): - return int(self._e) - - @property - def d(self): - if not self.has_private(): - raise AttributeError("No private exponent available for public keys") - return int(self._d) - - @property - def p(self): - if not self.has_private(): - raise AttributeError("No CRT component 'p' available for public keys") - return int(self._p) - - @property - def q(self): - if not self.has_private(): - raise AttributeError("No CRT component 'q' available for public keys") - return int(self._q) - - @property - def dp(self): - if not self.has_private(): - raise AttributeError("No CRT component 'dp' available for public keys") - return int(self._dp) - - @property - def dq(self): - if not self.has_private(): - raise AttributeError("No CRT component 'dq' available for public keys") - return int(self._dq) - - @property - def invq(self): - if not self.has_private(): - raise AttributeError("No CRT component 'invq' available for public keys") - if self._invq is None: - self._invq = self._q.inverse(self._p) - return int(self._invq) - - @property - def invp(self): - return self.u - - @property - def u(self): - if not self.has_private(): - raise AttributeError("No CRT component 'u' available for public keys") - return int(self._u) - - def size_in_bits(self): - """Size of the RSA modulus in bits""" - return self._n.size_in_bits() - - def size_in_bytes(self): - """The minimal amount of bytes that can hold the RSA modulus""" - return (self._n.size_in_bits() - 1) // 8 + 1 - - def _encrypt(self, plaintext): - if not 0 <= plaintext < self._n: - raise ValueError("Plaintext too large") - return int(pow(Integer(plaintext), self._e, self._n)) - - def _decrypt_to_bytes(self, ciphertext): - if not 0 <= ciphertext < self._n: - raise ValueError("Ciphertext too large") - if not self.has_private(): - raise TypeError("This is not a private key") - - # Blinded RSA decryption (to prevent timing attacks): - # Step 1: Generate random secret blinding factor r, - # such that 0 < r < n-1 - r = Integer.random_range(min_inclusive=1, max_exclusive=self._n) - # Step 2: Compute c' = c * r**e mod n - cp = Integer(ciphertext) * pow(r, self._e, self._n) % self._n - # Step 3: Compute m' = c'**d mod n (normal RSA decryption) - m1 = pow(cp, self._dp, self._p) - m2 = pow(cp, self._dq, self._q) - h = ((m2 - m1) * self._u) % self._q - mp = h * self._p + m1 - # Step 4: Compute m = m' * (r**(-1)) mod n - # then encode into a big endian byte string - result = Integer._mult_modulo_bytes( - r.inverse(self._n), - mp, - self._n) - return result - - def _decrypt(self, ciphertext): - """Legacy private method""" - - return bytes_to_long(self._decrypt_to_bytes(ciphertext)) - - def has_private(self): - """Whether this is an RSA private key""" - - return hasattr(self, "_d") - - def can_encrypt(self): # legacy - return True - - def can_sign(self): # legacy - return True - - def public_key(self): - """A matching RSA public key. - - Returns: - a new :class:`RsaKey` object - """ - return RsaKey(n=self._n, e=self._e) - - def __eq__(self, other): - if self.has_private() != other.has_private(): - return False - if self.n != other.n or self.e != other.e: - return False - if not self.has_private(): - return True - return (self.d == other.d) - - def __ne__(self, other): - return not (self == other) - - def __getstate__(self): - # RSA key is not pickable - from pickle import PicklingError - raise PicklingError - - def __repr__(self): - if self.has_private(): - extra = ", d=%d, p=%d, q=%d, u=%d" % (int(self._d), int(self._p), - int(self._q), int(self._u)) - else: - extra = "" - return "RsaKey(n=%d, e=%d%s)" % (int(self._n), int(self._e), extra) - - def __str__(self): - if self.has_private(): - key_type = "Private" - else: - key_type = "Public" - return "%s RSA key at 0x%X" % (key_type, id(self)) - - def export_key(self, format='PEM', passphrase=None, pkcs=1, - protection=None, randfunc=None, prot_params=None): - """Export this RSA key. - - Keyword Args: - format (string): - The desired output format: - - - ``'PEM'``. (default) Text output, according to `RFC1421`_/`RFC1423`_. - - ``'DER'``. Binary output. - - ``'OpenSSH'``. Text output, according to the OpenSSH specification. - Only suitable for public keys (not private keys). - - Note that PEM contains a DER structure. - - passphrase (bytes or string): - (*Private keys only*) The passphrase to protect the - private key. - - pkcs (integer): - (*Private keys only*) The standard to use for - serializing the key: PKCS#1 or PKCS#8. - - With ``pkcs=1`` (*default*), the private key is encoded with a - simple `PKCS#1`_ structure (``RSAPrivateKey``). The key cannot be - securely encrypted. - - With ``pkcs=8``, the private key is encoded with a `PKCS#8`_ structure - (``PrivateKeyInfo``). PKCS#8 offers the best ways to securely - encrypt the key. - - .. note:: - This parameter is ignored for a public key. - For DER and PEM, the output is always an - ASN.1 DER ``SubjectPublicKeyInfo`` structure. - - protection (string): - (*For private keys only*) - The encryption scheme to use for protecting the private key - using the passphrase. - - You can only specify a value if ``pkcs=8``. - For all possible protection schemes, - refer to :ref:`the encryption parameters of PKCS#8`. - The recommended value is - ``'PBKDF2WithHMAC-SHA512AndAES256-CBC'``. - - If ``None`` (default), the behavior depends on :attr:`format`: - - - if ``format='PEM'``, the obsolete PEM encryption scheme is used. - It is based on MD5 for key derivation, and 3DES for encryption. - - - if ``format='DER'``, the ``'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC'`` - scheme is used. - - prot_params (dict): - (*For private keys only*) - - The parameters to use to derive the encryption key - from the passphrase. ``'protection'`` must be also specified. - For all possible values, - refer to :ref:`the encryption parameters of PKCS#8`. - The recommendation is to use ``{'iteration_count':21000}`` for PBKDF2, - and ``{'iteration_count':131072}`` for scrypt. - - randfunc (callable): - A function that provides random bytes. Only used for PEM encoding. - The default is :func:`Cryptodome.Random.get_random_bytes`. - - Returns: - bytes: the encoded key - - Raises: - ValueError:when the format is unknown or when you try to encrypt a private - key with *DER* format and PKCS#1. - - .. warning:: - If you don't provide a pass phrase, the private key will be - exported in the clear! - - .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt - .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt - .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt - .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt - """ - - if passphrase is not None: - passphrase = tobytes(passphrase) - - if randfunc is None: - randfunc = Random.get_random_bytes - - if format == 'OpenSSH': - e_bytes, n_bytes = [x.to_bytes() for x in (self._e, self._n)] - if bord(e_bytes[0]) & 0x80: - e_bytes = b'\x00' + e_bytes - if bord(n_bytes[0]) & 0x80: - n_bytes = b'\x00' + n_bytes - keyparts = [b'ssh-rsa', e_bytes, n_bytes] - keystring = b''.join([struct.pack(">I", len(kp)) + kp for kp in keyparts]) - return b'ssh-rsa ' + binascii.b2a_base64(keystring)[:-1] - - # DER format is always used, even in case of PEM, which simply - # encodes it into BASE64. - if self.has_private(): - binary_key = DerSequence([0, - self.n, - self.e, - self.d, - self.p, - self.q, - self.d % (self.p-1), - self.d % (self.q-1), - Integer(self.q).inverse(self.p) - ]).encode() - if pkcs == 1: - key_type = 'RSA PRIVATE KEY' - if format == 'DER' and passphrase: - raise ValueError("PKCS#1 private key cannot be encrypted") - else: # PKCS#8 - from Cryptodome.IO import PKCS8 - - if format == 'PEM' and protection is None: - key_type = 'PRIVATE KEY' - binary_key = PKCS8.wrap(binary_key, oid, None, - key_params=DerNull()) - else: - key_type = 'ENCRYPTED PRIVATE KEY' - if not protection: - if prot_params: - raise ValueError("'protection' parameter must be set") - protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' - binary_key = PKCS8.wrap(binary_key, oid, - passphrase, protection, - prot_params=prot_params, - key_params=DerNull()) - passphrase = None - else: - key_type = "PUBLIC KEY" - binary_key = _create_subject_public_key_info(oid, - DerSequence([self.n, - self.e]), - DerNull() - ) - - if format == 'DER': - return binary_key - if format == 'PEM': - from Cryptodome.IO import PEM - - pem_str = PEM.encode(binary_key, key_type, passphrase, randfunc) - return tobytes(pem_str) - - raise ValueError("Unknown key format '%s'. Cannot export the RSA key." % format) - - # Backward compatibility - def exportKey(self, *args, **kwargs): - """:meta private:""" - return self.export_key(*args, **kwargs) - - def publickey(self): - """:meta private:""" - return self.public_key() - - # Methods defined in PyCryptodome that we don't support anymore - def sign(self, M, K): - """:meta private:""" - raise NotImplementedError("Use module Cryptodome.Signature.pkcs1_15 instead") - - def verify(self, M, signature): - """:meta private:""" - raise NotImplementedError("Use module Cryptodome.Signature.pkcs1_15 instead") - - def encrypt(self, plaintext, K): - """:meta private:""" - raise NotImplementedError("Use module Cryptodome.Cipher.PKCS1_OAEP instead") - - def decrypt(self, ciphertext): - """:meta private:""" - raise NotImplementedError("Use module Cryptodome.Cipher.PKCS1_OAEP instead") - - def blind(self, M, B): - """:meta private:""" - raise NotImplementedError - - def unblind(self, M, B): - """:meta private:""" - raise NotImplementedError - - def size(self): - """:meta private:""" - raise NotImplementedError - - -def generate(bits, randfunc=None, e=65537): - """Create a new RSA key pair. - - The algorithm closely follows NIST `FIPS 186-4`_ in its - sections B.3.1 and B.3.3. The modulus is the product of - two non-strong probable primes. - Each prime passes a suitable number of Miller-Rabin tests - with random bases and a single Lucas test. - - Args: - bits (integer): - Key length, or size (in bits) of the RSA modulus. - It must be at least 1024, but **2048 is recommended.** - The FIPS standard only defines 1024, 2048 and 3072. - Keyword Args: - randfunc (callable): - Function that returns random bytes. - The default is :func:`Cryptodome.Random.get_random_bytes`. - e (integer): - Public RSA exponent. It must be an odd positive integer. - It is typically a small number with very few ones in its - binary representation. - The FIPS standard requires the public exponent to be - at least 65537 (the default). - - Returns: an RSA key object (:class:`RsaKey`, with private key). - - .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf - """ - - if bits < 1024: - raise ValueError("RSA modulus length must be >= 1024") - if e % 2 == 0 or e < 3: - raise ValueError("RSA public exponent must be a positive, odd integer larger than 2.") - - if randfunc is None: - randfunc = Random.get_random_bytes - - d = n = Integer(1) - e = Integer(e) - - while n.size_in_bits() != bits and d < (1 << (bits // 2)): - # Generate the prime factors of n: p and q. - # By construciton, their product is always - # 2^{bits-1} < p*q < 2^bits. - size_q = bits // 2 - size_p = bits - size_q - - min_p = min_q = (Integer(1) << (2 * size_q - 1)).sqrt() - if size_q != size_p: - min_p = (Integer(1) << (2 * size_p - 1)).sqrt() - - def filter_p(candidate): - return candidate > min_p and (candidate - 1).gcd(e) == 1 - - p = generate_probable_prime(exact_bits=size_p, - randfunc=randfunc, - prime_filter=filter_p) - - min_distance = Integer(1) << (bits // 2 - 100) - - def filter_q(candidate): - return (candidate > min_q and - (candidate - 1).gcd(e) == 1 and - abs(candidate - p) > min_distance) - - q = generate_probable_prime(exact_bits=size_q, - randfunc=randfunc, - prime_filter=filter_q) - - n = p * q - lcm = (p - 1).lcm(q - 1) - d = e.inverse(lcm) - - if p > q: - p, q = q, p - - u = p.inverse(q) - - return RsaKey(n=n, e=e, d=d, p=p, q=q, u=u) - - -def construct(rsa_components, consistency_check=True): - r"""Construct an RSA key from a tuple of valid RSA components. - - The modulus **n** must be the product of two primes. - The public exponent **e** must be odd and larger than 1. - - In case of a private key, the following equations must apply: - - .. math:: - - \begin{align} - p*q &= n \\ - e*d &\equiv 1 ( \text{mod lcm} [(p-1)(q-1)]) \\ - p*u &\equiv 1 ( \text{mod } q) - \end{align} - - Args: - rsa_components (tuple): - A tuple of integers, with at least 2 and no - more than 6 items. The items come in the following order: - - 1. RSA modulus *n*. - 2. Public exponent *e*. - 3. Private exponent *d*. - Only required if the key is private. - 4. First factor of *n* (*p*). - Optional, but the other factor *q* must also be present. - 5. Second factor of *n* (*q*). Optional. - 6. CRT coefficient *q*, that is :math:`p^{-1} \text{mod }q`. Optional. - - Keyword Args: - consistency_check (boolean): - If ``True``, the library will verify that the provided components - fulfil the main RSA properties. - - Raises: - ValueError: when the key being imported fails the most basic RSA validity checks. - - Returns: An RSA key object (:class:`RsaKey`). - """ - - class InputComps(object): - pass - - input_comps = InputComps() - for (comp, value) in zip(('n', 'e', 'd', 'p', 'q', 'u'), rsa_components): - setattr(input_comps, comp, Integer(value)) - - n = input_comps.n - e = input_comps.e - if not hasattr(input_comps, 'd'): - key = RsaKey(n=n, e=e) - else: - d = input_comps.d - if hasattr(input_comps, 'q'): - p = input_comps.p - q = input_comps.q - else: - # Compute factors p and q from the private exponent d. - # We assume that n has no more than two factors. - # See 8.2.2(i) in Handbook of Applied Cryptography. - ktot = d * e - 1 - # The quantity d*e-1 is a multiple of phi(n), even, - # and can be represented as t*2^s. - t = ktot - while t % 2 == 0: - t //= 2 - # Cycle through all multiplicative inverses in Zn. - # The algorithm is non-deterministic, but there is a 50% chance - # any candidate a leads to successful factoring. - # See "Digitalized Signatures and Public Key Functions as Intractable - # as Factorization", M. Rabin, 1979 - spotted = False - a = Integer(2) - while not spotted and a < 100: - k = Integer(t) - # Cycle through all values a^{t*2^i}=a^k - while k < ktot: - cand = pow(a, k, n) - # Check if a^k is a non-trivial root of unity (mod n) - if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: - # We have found a number such that (cand-1)(cand+1)=0 (mod n). - # Either of the terms divides n. - p = Integer(n).gcd(cand + 1) - spotted = True - break - k *= 2 - # This value was not any good... let's try another! - a += 2 - if not spotted: - raise ValueError("Unable to compute factors p and q from exponent d.") - # Found ! - assert ((n % p) == 0) - q = n // p - - if hasattr(input_comps, 'u'): - u = input_comps.u - else: - u = p.inverse(q) - - # Build key object - key = RsaKey(n=n, e=e, d=d, p=p, q=q, u=u) - - # Verify consistency of the key - if consistency_check: - - # Modulus and public exponent must be coprime - if e <= 1 or e >= n: - raise ValueError("Invalid RSA public exponent") - if Integer(n).gcd(e) != 1: - raise ValueError("RSA public exponent is not coprime to modulus") - - # For RSA, modulus must be odd - if not n & 1: - raise ValueError("RSA modulus is not odd") - - if key.has_private(): - # Modulus and private exponent must be coprime - if d <= 1 or d >= n: - raise ValueError("Invalid RSA private exponent") - if Integer(n).gcd(d) != 1: - raise ValueError("RSA private exponent is not coprime to modulus") - # Modulus must be product of 2 primes - if p * q != n: - raise ValueError("RSA factors do not match modulus") - if test_probable_prime(p) == COMPOSITE: - raise ValueError("RSA factor p is composite") - if test_probable_prime(q) == COMPOSITE: - raise ValueError("RSA factor q is composite") - # See Carmichael theorem - phi = (p - 1) * (q - 1) - lcm = phi // (p - 1).gcd(q - 1) - if (e * d % int(lcm)) != 1: - raise ValueError("Invalid RSA condition") - if hasattr(key, 'u'): - # CRT coefficient - if u <= 1 or u >= q: - raise ValueError("Invalid RSA component u") - if (p * u % q) != 1: - raise ValueError("Invalid RSA component u with p") - - return key - - -def _import_pkcs1_private(encoded, *kwargs): - # RSAPrivateKey ::= SEQUENCE { - # version Version, - # modulus INTEGER, -- n - # publicExponent INTEGER, -- e - # privateExponent INTEGER, -- d - # prime1 INTEGER, -- p - # prime2 INTEGER, -- q - # exponent1 INTEGER, -- d mod (p-1) - # exponent2 INTEGER, -- d mod (q-1) - # coefficient INTEGER -- (inverse of q) mod p - # } - # - # Version ::= INTEGER - der = DerSequence().decode(encoded, nr_elements=9, only_ints_expected=True) - if der[0] != 0: - raise ValueError("No PKCS#1 encoding of an RSA private key") - return construct(der[1:6] + [Integer(der[4]).inverse(der[5])]) - - -def _import_pkcs1_public(encoded, *kwargs): - # RSAPublicKey ::= SEQUENCE { - # modulus INTEGER, -- n - # publicExponent INTEGER -- e - # } - der = DerSequence().decode(encoded, nr_elements=2, only_ints_expected=True) - return construct(der) - - -def _import_subjectPublicKeyInfo(encoded, *kwargs): - - oids = (oid, "1.2.840.113549.1.1.10") - - algoid, encoded_key, params = _expand_subject_public_key_info(encoded) - if algoid not in oids or params is not None: - raise ValueError("No RSA subjectPublicKeyInfo") - return _import_pkcs1_public(encoded_key) - - -def _import_x509_cert(encoded, *kwargs): - - sp_info = _extract_subject_public_key_info(encoded) - return _import_subjectPublicKeyInfo(sp_info) - - -def _import_pkcs8(encoded, passphrase): - from Cryptodome.IO import PKCS8 - - oids = (oid, "1.2.840.113549.1.1.10") - - k = PKCS8.unwrap(encoded, passphrase) - if k[0] not in oids: - raise ValueError("No PKCS#8 encoded RSA key") - return _import_keyDER(k[1], passphrase) - - -def _import_keyDER(extern_key, passphrase): - """Import an RSA key (public or private half), encoded in DER form.""" - - decodings = (_import_pkcs1_private, - _import_pkcs1_public, - _import_subjectPublicKeyInfo, - _import_x509_cert, - _import_pkcs8) - - for decoding in decodings: - try: - return decoding(extern_key, passphrase) - except ValueError: - pass - - raise ValueError("RSA key format is not supported") - - -def _import_openssh_private_rsa(data, password): - - from ._openssh import (import_openssh_private_generic, - read_bytes, read_string, check_padding) - - ssh_name, decrypted = import_openssh_private_generic(data, password) - - if ssh_name != "ssh-rsa": - raise ValueError("This SSH key is not RSA") - - n, decrypted = read_bytes(decrypted) - e, decrypted = read_bytes(decrypted) - d, decrypted = read_bytes(decrypted) - iqmp, decrypted = read_bytes(decrypted) - p, decrypted = read_bytes(decrypted) - q, decrypted = read_bytes(decrypted) - - _, padded = read_string(decrypted) # Comment - check_padding(padded) - - build = [Integer.from_bytes(x) for x in (n, e, d, q, p, iqmp)] - return construct(build) - - -def import_key(extern_key, passphrase=None): - """Import an RSA key (public or private). - - Args: - extern_key (string or byte string): - The RSA key to import. - - The following formats are supported for an RSA **public key**: - - - X.509 certificate (binary or PEM format) - - X.509 ``subjectPublicKeyInfo`` DER SEQUENCE (binary or PEM - encoding) - - `PKCS#1`_ ``RSAPublicKey`` DER SEQUENCE (binary or PEM encoding) - - An OpenSSH line (e.g. the content of ``~/.ssh/id_ecdsa``, ASCII) - - The following formats are supported for an RSA **private key**: - - - PKCS#1 ``RSAPrivateKey`` DER SEQUENCE (binary or PEM encoding) - - `PKCS#8`_ ``PrivateKeyInfo`` or ``EncryptedPrivateKeyInfo`` - DER SEQUENCE (binary or PEM encoding) - - OpenSSH (text format, introduced in `OpenSSH 6.5`_) - - For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. - - passphrase (string or byte string): - For private keys only, the pass phrase that encrypts the key. - - Returns: An RSA key object (:class:`RsaKey`). - - Raises: - ValueError/IndexError/TypeError: - When the given key cannot be parsed (possibly because the pass - phrase is wrong). - - .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt - .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt - .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt - .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt - .. _`OpenSSH 6.5`: https://flak.tedunangst.com/post/new-openssh-key-format-and-bcrypt-pbkdf - """ - - from Cryptodome.IO import PEM - - extern_key = tobytes(extern_key) - if passphrase is not None: - passphrase = tobytes(passphrase) - - if extern_key.startswith(b'-----BEGIN OPENSSH PRIVATE KEY'): - text_encoded = tostr(extern_key) - openssh_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) - result = _import_openssh_private_rsa(openssh_encoded, passphrase) - return result - - if extern_key.startswith(b'-----'): - # This is probably a PEM encoded key. - (der, marker, enc_flag) = PEM.decode(tostr(extern_key), passphrase) - if enc_flag: - passphrase = None - return _import_keyDER(der, passphrase) - - if extern_key.startswith(b'ssh-rsa '): - # This is probably an OpenSSH key - keystring = binascii.a2b_base64(extern_key.split(b' ')[1]) - keyparts = [] - while len(keystring) > 4: - length = struct.unpack(">I", keystring[:4])[0] - keyparts.append(keystring[4:4 + length]) - keystring = keystring[4 + length:] - e = Integer.from_bytes(keyparts[1]) - n = Integer.from_bytes(keyparts[2]) - return construct([n, e]) - - if len(extern_key) > 0 and bord(extern_key[0]) == 0x30: - # This is probably a DER encoded key - return _import_keyDER(extern_key, passphrase) - - raise ValueError("RSA key format is not supported") - - -# Backward compatibility -importKey = import_key - -#: `Object ID`_ for the RSA encryption algorithm. This OID often indicates -#: a generic RSA key, even when such key will be actually used for digital -#: signatures. -#: -#: .. note: -#: An RSA key meant for PSS padding has a dedicated Object ID ``1.2.840.113549.1.1.10`` -#: -#: .. _`Object ID`: http://www.alvestrand.no/objectid/1.2.840.113549.1.1.1.html -oid = "1.2.840.113549.1.1.1" diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/RSA.pyi b/venv/Lib/site-packages/Cryptodome/PublicKey/RSA.pyi deleted file mode 100644 index 85f6c4a..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/RSA.pyi +++ /dev/null @@ -1,78 +0,0 @@ -from typing import Callable, Union, Tuple, Optional, overload, Literal - -from Cryptodome.Math.Numbers import Integer -from Cryptodome.IO._PBES import ProtParams - -__all__ = ['generate', 'construct', 'import_key', - 'RsaKey', 'oid'] - -RNG = Callable[[int], bytes] - -class RsaKey(object): - def __init__(self, **kwargs: int) -> None: ... - - @property - def n(self) -> int: ... - @property - def e(self) -> int: ... - @property - def d(self) -> int: ... - @property - def p(self) -> int: ... - @property - def q(self) -> int: ... - @property - def u(self) -> int: ... - @property - def invp(self) -> int: ... - @property - def invq(self) -> int: ... - - def size_in_bits(self) -> int: ... - def size_in_bytes(self) -> int: ... - def has_private(self) -> bool: ... - def can_encrypt(self) -> bool: ... # legacy - def can_sign(self) -> bool:... # legacy - def public_key(self) -> RsaKey: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... - def __getstate__(self) -> None: ... - def __repr__(self) -> str: ... - def __str__(self) -> str: ... - - @overload - def export_key(self, - format: Optional[str]="PEM", - passphrase: Optional[str]=None, - pkcs: Optional[int]=1, - protection: Optional[str]=None, - randfunc: Optional[RNG]=None - ) -> bytes: ... - @overload - def export_key(self, *, - format: Optional[str]="PEM", - passphrase: str, - pkcs: Literal[8], - protection: str, - randfunc: Optional[RNG]=None, - prot_params: ProtParams, - ) -> bytes: ... - - # Backward compatibility - exportKey = export_key - publickey = public_key - -Int = Union[int, Integer] - -def generate(bits: int, randfunc: Optional[RNG]=None, e: Optional[int]=65537) -> RsaKey: ... -def construct(rsa_components: Union[Tuple[Int, Int], # n, e - Tuple[Int, Int, Int], # n, e, d - Tuple[Int, Int, Int, Int, Int], # n, e, d, p, q - Tuple[Int, Int, Int, Int, Int, Int]], # n, e, d, p, q, crt_q - consistency_check: Optional[bool]=True) -> RsaKey: ... -def import_key(extern_key: Union[str, bytes], passphrase: Optional[str]=None) -> RsaKey: ... - -# Backward compatibility -importKey = import_key - -oid: str diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__init__.py b/venv/Lib/site-packages/Cryptodome/PublicKey/__init__.py deleted file mode 100644 index 99b67a4..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/__init__.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Cryptodome.Util.asn1 import (DerSequence, DerInteger, DerBitString, - DerObjectId, DerNull) - - -def _expand_subject_public_key_info(encoded): - """Parse a SubjectPublicKeyInfo structure. - - It returns a triple with: - * OID (string) - * encoded public key (bytes) - * Algorithm parameters (bytes or None) - """ - - # - # SubjectPublicKeyInfo ::= SEQUENCE { - # algorithm AlgorithmIdentifier, - # subjectPublicKey BIT STRING - # } - # - # AlgorithmIdentifier ::= SEQUENCE { - # algorithm OBJECT IDENTIFIER, - # parameters ANY DEFINED BY algorithm OPTIONAL - # } - # - - spki = DerSequence().decode(encoded, nr_elements=2) - algo = DerSequence().decode(spki[0], nr_elements=(1,2)) - algo_oid = DerObjectId().decode(algo[0]) - spk = DerBitString().decode(spki[1]).value - - if len(algo) == 1: - algo_params = None - else: - try: - DerNull().decode(algo[1]) - algo_params = None - except: - algo_params = algo[1] - - return algo_oid.value, spk, algo_params - - -def _create_subject_public_key_info(algo_oid, public_key, params): - - if params is None: - algorithm = DerSequence([DerObjectId(algo_oid)]) - else: - algorithm = DerSequence([DerObjectId(algo_oid), params]) - - spki = DerSequence([algorithm, - DerBitString(public_key) - ]) - return spki.encode() - - -def _extract_subject_public_key_info(x509_certificate): - """Extract subjectPublicKeyInfo from a DER X.509 certificate.""" - - certificate = DerSequence().decode(x509_certificate, nr_elements=3) - tbs_certificate = DerSequence().decode(certificate[0], - nr_elements=range(6, 11)) - - index = 5 - try: - tbs_certificate[0] + 1 - # Version not present - version = 1 - except TypeError: - version = DerInteger(explicit=0).decode(tbs_certificate[0]).value - if version not in (2, 3): - raise ValueError("Incorrect X.509 certificate version") - index = 6 - - return tbs_certificate[index] diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__init__.pyi b/venv/Lib/site-packages/Cryptodome/PublicKey/__init__.pyi deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/DSA.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/DSA.cpython-312.pyc deleted file mode 100644 index f190b27..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/DSA.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/ECC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/ECC.cpython-312.pyc deleted file mode 100644 index 9fb3508..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/ECC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/ElGamal.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/ElGamal.cpython-312.pyc deleted file mode 100644 index 3fefca0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/ElGamal.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/RSA.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/RSA.cpython-312.pyc deleted file mode 100644 index 3ec48e7..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/RSA.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 72b6d83..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_curve.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_curve.cpython-312.pyc deleted file mode 100644 index 5a0fc33..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_curve.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_edwards.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_edwards.cpython-312.pyc deleted file mode 100644 index 4223546..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_edwards.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_montgomery.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_montgomery.cpython-312.pyc deleted file mode 100644 index 5c4d3b2..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_montgomery.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_nist_ecc.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_nist_ecc.cpython-312.pyc deleted file mode 100644 index a637435..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_nist_ecc.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_openssh.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_openssh.cpython-312.pyc deleted file mode 100644 index 7304451..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_openssh.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_point.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_point.cpython-312.pyc deleted file mode 100644 index f053c68..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/__pycache__/_point.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_curve.py b/venv/Lib/site-packages/Cryptodome/PublicKey/_curve.py deleted file mode 100644 index 0027f61..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_curve.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is licensed under the BSD 2-Clause License. -# See https://opensource.org/licenses/BSD-2-Clause for details. - -# This is the element of a database of curve parameters. Items are indexed by their -# human-friendly name, such as "P-256". The element has the following fields: -# -# - p the prime number that defines the finite field for all modulo operations -# - b the constant in the Short Weierstrass curve equation (can be None) -# - order the number of elements in the group with the generator below -# - Gx the affine coordinate X of the generator point -# - Gy the affine coordinate Y of the generator point -# - G the generator, as an EccPoint object -# - modulus_bits the minimum number of bits for encoding the modulus p -# - oid an ASCII string with the registered ASN.1 Object ID -# - context a raw pointer to memory holding a context for all curve operations (can be None) -# - canonical the canonical name of the curve -# - openssh the ASCII string used in OpenSSH id files for public keys on this curve -# - rawlib the reference to the dynamic libary with the low-level functions -# - validate a function that raises an exception if the the input point is invalid - -class _Curve(object): - - def __init__(self, p, b, order, Gx, Gy, G, modulus_bits, oid, context, - canonical, openssh, rawlib, validate=None): - self.p = p - self.b = b - self.order = order - self.Gx = Gx - self.Gy = Gy - self.G = G - self.modulus_bits = modulus_bits - self.oid = oid - self.context = context - self.canonical = canonical - self.openssh = openssh - self.rawlib = rawlib - self.validate = validate diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_curve25519.pyd b/venv/Lib/site-packages/Cryptodome/PublicKey/_curve25519.pyd deleted file mode 100644 index c40148a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/_curve25519.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_curve448.pyd b/venv/Lib/site-packages/Cryptodome/PublicKey/_curve448.pyd deleted file mode 100644 index 040d188..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/_curve448.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_ec_ws.pyd b/venv/Lib/site-packages/Cryptodome/PublicKey/_ec_ws.pyd deleted file mode 100644 index a47bb55..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/_ec_ws.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_ed25519.pyd b/venv/Lib/site-packages/Cryptodome/PublicKey/_ed25519.pyd deleted file mode 100644 index 380de9a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/_ed25519.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_ed448.pyd b/venv/Lib/site-packages/Cryptodome/PublicKey/_ed448.pyd deleted file mode 100644 index 42a9b95..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/PublicKey/_ed448.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_edwards.py b/venv/Lib/site-packages/Cryptodome/PublicKey/_edwards.py deleted file mode 100644 index 55de7b7..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_edwards.py +++ /dev/null @@ -1,116 +0,0 @@ -# This file is licensed under the BSD 2-Clause License. -# See https://opensource.org/licenses/BSD-2-Clause for details. - -from ._curve import _Curve -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - SmartPointer) - - -def ed25519_curve(): - p = 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed # 2**255 - 19 - order = 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed - Gx = 0x216936d3cd6e53fec0a4e231fdd6dc5c692cc7609525a7b2c9562d608f25d51a - Gy = 0x6666666666666666666666666666666666666666666666666666666666666658 - - _ed25519_lib = load_pycryptodome_raw_lib("Cryptodome.PublicKey._ed25519", """ -typedef void Point; -int ed25519_new_point(Point **out, - const uint8_t x[32], - const uint8_t y[32], - size_t modsize, - const void *context); -int ed25519_clone(Point **P, const Point *Q); -void ed25519_free_point(Point *p); -int ed25519_cmp(const Point *p1, const Point *p2); -int ed25519_neg(Point *p); -int ed25519_get_xy(uint8_t *xb, uint8_t *yb, size_t modsize, Point *p); -int ed25519_double(Point *p); -int ed25519_add(Point *P1, const Point *P2); -int ed25519_scalar(Point *P, const uint8_t *scalar, size_t scalar_len, uint64_t seed); -""") - - class EcLib(object): - new_point = _ed25519_lib.ed25519_new_point - clone = _ed25519_lib.ed25519_clone - free_point = _ed25519_lib.ed25519_free_point - cmp = _ed25519_lib.ed25519_cmp - neg = _ed25519_lib.ed25519_neg - get_xy = _ed25519_lib.ed25519_get_xy - double = _ed25519_lib.ed25519_double - add = _ed25519_lib.ed25519_add - scalar = _ed25519_lib.ed25519_scalar - - ed25519 = _Curve(Integer(p), - None, - Integer(order), - Integer(Gx), - Integer(Gy), - None, - 255, - "1.3.101.112", # RFC8410 - None, - "Ed25519", - "ssh-ed25519", - EcLib) - return ed25519 - - -def ed448_curve(): - p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffff # 2**448 - 2**224 - 1 - order = 0x3fffffffffffffffffffffffffffffffffffffffffffffffffffffff7cca23e9c44edb49aed63690216cc2728dc58f552378c292ab5844f3 - Gx = 0x4f1970c66bed0ded221d15a622bf36da9e146570470f1767ea6de324a3d3a46412ae1af72ab66511433b80e18b00938e2626a82bc70cc05e - Gy = 0x693f46716eb6bc248876203756c9c7624bea73736ca3984087789c1e05a0c2d73ad3ff1ce67c39c4fdbd132c4ed7c8ad9808795bf230fa14 - - _ed448_lib = load_pycryptodome_raw_lib("Cryptodome.PublicKey._ed448", """ -typedef void EcContext; -typedef void PointEd448; -int ed448_new_context(EcContext **pec_ctx); -void ed448_context(EcContext *ec_ctx); -void ed448_free_context(EcContext *ec_ctx); -int ed448_new_point(PointEd448 **out, - const uint8_t x[56], - const uint8_t y[56], - size_t len, - const EcContext *context); -int ed448_clone(PointEd448 **P, const PointEd448 *Q); -void ed448_free_point(PointEd448 *p); -int ed448_cmp(const PointEd448 *p1, const PointEd448 *p2); -int ed448_neg(PointEd448 *p); -int ed448_get_xy(uint8_t *xb, uint8_t *yb, size_t len, const PointEd448 *p); -int ed448_double(PointEd448 *p); -int ed448_add(PointEd448 *P1, const PointEd448 *P2); -int ed448_scalar(PointEd448 *P, const uint8_t *scalar, size_t scalar_len, uint64_t seed); -""") - - class EcLib(object): - new_point = _ed448_lib.ed448_new_point - clone = _ed448_lib.ed448_clone - free_point = _ed448_lib.ed448_free_point - cmp = _ed448_lib.ed448_cmp - neg = _ed448_lib.ed448_neg - get_xy = _ed448_lib.ed448_get_xy - double = _ed448_lib.ed448_double - add = _ed448_lib.ed448_add - scalar = _ed448_lib.ed448_scalar - - ed448_context = VoidPointer() - result = _ed448_lib.ed448_new_context(ed448_context.address_of()) - if result: - raise ImportError("Error %d initializing Ed448 context" % result) - - context = SmartPointer(ed448_context.get(), _ed448_lib.ed448_free_context) - - ed448 = _Curve(Integer(p), - None, - Integer(order), - Integer(Gx), - Integer(Gy), - None, - 448, - "1.3.101.113", # RFC8410 - context, - "Ed448", - None, - EcLib) - return ed448 diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_montgomery.py b/venv/Lib/site-packages/Cryptodome/PublicKey/_montgomery.py deleted file mode 100644 index 5e5fd51..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_montgomery.py +++ /dev/null @@ -1,152 +0,0 @@ -# This file is licensed under the BSD 2-Clause License. -# See https://opensource.org/licenses/BSD-2-Clause for details. - -from ._curve import _Curve -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - SmartPointer) - - -def curve25519_curve(): - p = 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed # 2**255 - 19 - order = 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed - - _curve25519_lib = load_pycryptodome_raw_lib("Cryptodome.PublicKey._curve25519", """ -typedef void Point; - -int curve25519_new_point(Point **out, - const uint8_t x[32], - size_t modsize, - const void* context); -int curve25519_clone(Point **P, const Point *Q); -void curve25519_free_point(Point *p); -int curve25519_get_x(uint8_t *xb, size_t modsize, Point *p); -int curve25519_scalar(Point *P, const uint8_t *scalar, size_t scalar_len, uint64_t seed); -int curve25519_cmp(const Point *ecp1, const Point *ecp2); -""") - - class EcLib(object): - new_point = _curve25519_lib.curve25519_new_point - clone = _curve25519_lib.curve25519_clone - free_point = _curve25519_lib.curve25519_free_point - get_x = _curve25519_lib.curve25519_get_x - scalar = _curve25519_lib.curve25519_scalar - cmp = _curve25519_lib.curve25519_cmp - - def _validate_x25519_point(point): - - p2 = p * 2 - x1 = 325606250916557431795983626356110631294008115727848805560023387167927233504 - x2 = 39382357235489614581723060781553021112529911719440698176882885853963445705823 - - # http://cr.yp.to/ecdh.html#validate - deny_list = ( - 0, - 1, - x1, - x2, - p - 1, - p, - p + 1, - p + x1, - p + x2, - p2 - 1, - p2, - p2 + 1, - ) - - try: - valid = point.x not in deny_list - except ValueError: - valid = False - - if not valid: - raise ValueError("Invalid Curve25519 public key") - - curve25519 = _Curve(Integer(p), - None, - Integer(order), - Integer(9), - None, - None, - 255, - "1.3.101.110", # RFC8410 - None, - "Curve25519", - None, - EcLib, - _validate_x25519_point, - ) - - return curve25519 - - -def curve448_curve(): - p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffff # 2**448 - 2**224 - 1 - order = 0x3fffffffffffffffffffffffffffffffffffffffffffffffffffffff7cca23e9c44edb49aed63690216cc2728dc58f552378c292ab5844f3 - - _curve448_lib = load_pycryptodome_raw_lib("Cryptodome.PublicKey._curve448", """ -typedef void Curve448Context; -typedef void Curve448Point; - -int curve448_new_context(Curve448Context **pec_ctx); -void curve448_free_context(Curve448Context *ec_ctx); -int curve448_new_point(Curve448Point **out, - const uint8_t *x, - size_t len, - const Curve448Context *ec_ctx); -void curve448_free_point(Curve448Point *p); -int curve448_clone(Curve448Point **P, const Curve448Point *Q); -int curve448_get_x(uint8_t *xb, size_t modsize, const Curve448Point *p); -int curve448_scalar(Curve448Point *P, const uint8_t *scalar, size_t scalar_len, uint64_t seed); -int curve448_cmp(const Curve448Point *ecp1, const Curve448Point *ecp2); -""") - - class EcLib(object): - new_context = _curve448_lib.curve448_new_context - free_context = _curve448_lib.curve448_free_context - new_point = _curve448_lib.curve448_new_point - clone = _curve448_lib.curve448_clone - free_point = _curve448_lib.curve448_free_point - get_x = _curve448_lib.curve448_get_x - scalar = _curve448_lib.curve448_scalar - cmp = _curve448_lib.curve448_cmp - - curve448_context = VoidPointer() - result = EcLib.new_context(curve448_context.address_of()) - if result: - raise ImportError("Error %d initializing Curve448 context" % result) - - def _validate_x448_point(point): - deny_list = ( - 0, - 1, - p - 1, - p, - p + 1, - ) - - try: - valid = point.x not in deny_list - except ValueError: - valid = False - - if not valid: - raise ValueError("Invalid Curve448 public key") - - curve448 = _Curve(Integer(p), - None, - Integer(order), - Integer(5), - None, - None, - 448, - "1.3.101.111", # RFC8410 - SmartPointer(curve448_context.get(), EcLib.free_context), - "Curve448", - None, - EcLib, - _validate_x448_point, - ) - - return curve448 diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_nist_ecc.py b/venv/Lib/site-packages/Cryptodome/PublicKey/_nist_ecc.py deleted file mode 100644 index 3065c65..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_nist_ecc.py +++ /dev/null @@ -1,246 +0,0 @@ -# This file is licensed under the BSD 2-Clause License. -# See https://opensource.org/licenses/BSD-2-Clause for details. - -from ._curve import _Curve -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, - SmartPointer, c_size_t, c_uint8_ptr, - c_ulonglong) -from Cryptodome.Util.number import long_to_bytes -from Cryptodome.Random.random import getrandbits - - -_ec_lib = load_pycryptodome_raw_lib("Cryptodome.PublicKey._ec_ws", """ -typedef void EcContext; -typedef void EcPoint; -int ec_ws_new_context(EcContext **pec_ctx, - const uint8_t *modulus, - const uint8_t *b, - const uint8_t *order, - size_t len, - uint64_t seed); -void ec_ws_free_context(EcContext *ec_ctx); -int ec_ws_new_point(EcPoint **pecp, - const uint8_t *x, - const uint8_t *y, - size_t len, - const EcContext *ec_ctx); -void ec_ws_free_point(EcPoint *ecp); -int ec_ws_get_xy(uint8_t *x, - uint8_t *y, - size_t len, - const EcPoint *ecp); -int ec_ws_double(EcPoint *p); -int ec_ws_add(EcPoint *ecpa, EcPoint *ecpb); -int ec_ws_scalar(EcPoint *ecp, - const uint8_t *k, - size_t len, - uint64_t seed); -int ec_ws_clone(EcPoint **pecp2, const EcPoint *ecp); -int ec_ws_cmp(const EcPoint *ecp1, const EcPoint *ecp2); -int ec_ws_neg(EcPoint *p); -""") - - -class EcLib(object): - new_context = _ec_lib.ec_ws_new_context - free_context = _ec_lib.ec_ws_free_context - new_point = _ec_lib.ec_ws_new_point - free_point = _ec_lib.ec_ws_free_point - get_xy = _ec_lib.ec_ws_get_xy - double = _ec_lib.ec_ws_double - add = _ec_lib.ec_ws_add - scalar = _ec_lib.ec_ws_scalar - clone = _ec_lib.ec_ws_clone - cmp = _ec_lib.ec_ws_cmp - neg = _ec_lib.ec_ws_neg - - -def p192_curve(): - p = 0xfffffffffffffffffffffffffffffffeffffffffffffffff - b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1 - order = 0xffffffffffffffffffffffff99def836146bc9b1b4d22831 - Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012 - Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811 - - p192_modulus = long_to_bytes(p, 24) - p192_b = long_to_bytes(b, 24) - p192_order = long_to_bytes(order, 24) - - ec_p192_context = VoidPointer() - result = _ec_lib.ec_ws_new_context(ec_p192_context.address_of(), - c_uint8_ptr(p192_modulus), - c_uint8_ptr(p192_b), - c_uint8_ptr(p192_order), - c_size_t(len(p192_modulus)), - c_ulonglong(getrandbits(64)) - ) - if result: - raise ImportError("Error %d initializing P-192 context" % result) - - context = SmartPointer(ec_p192_context.get(), _ec_lib.ec_ws_free_context) - p192 = _Curve(Integer(p), - Integer(b), - Integer(order), - Integer(Gx), - Integer(Gy), - None, - 192, - "1.2.840.10045.3.1.1", # ANSI X9.62 / SEC2 - context, - "NIST P-192", - "ecdsa-sha2-nistp192", - EcLib) - return p192 - - -def p224_curve(): - p = 0xffffffffffffffffffffffffffffffff000000000000000000000001 - b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4 - order = 0xffffffffffffffffffffffffffff16a2e0b8f03e13dd29455c5c2a3d - Gx = 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21 - Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34 - - p224_modulus = long_to_bytes(p, 28) - p224_b = long_to_bytes(b, 28) - p224_order = long_to_bytes(order, 28) - - ec_p224_context = VoidPointer() - result = _ec_lib.ec_ws_new_context(ec_p224_context.address_of(), - c_uint8_ptr(p224_modulus), - c_uint8_ptr(p224_b), - c_uint8_ptr(p224_order), - c_size_t(len(p224_modulus)), - c_ulonglong(getrandbits(64)) - ) - if result: - raise ImportError("Error %d initializing P-224 context" % result) - - context = SmartPointer(ec_p224_context.get(), _ec_lib.ec_ws_free_context) - p224 = _Curve(Integer(p), - Integer(b), - Integer(order), - Integer(Gx), - Integer(Gy), - None, - 224, - "1.3.132.0.33", # SEC 2 - context, - "NIST P-224", - "ecdsa-sha2-nistp224", - EcLib) - return p224 - - -def p256_curve(): - p = 0xffffffff00000001000000000000000000000000ffffffffffffffffffffffff - b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b - order = 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551 - Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296 - Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5 - - p256_modulus = long_to_bytes(p, 32) - p256_b = long_to_bytes(b, 32) - p256_order = long_to_bytes(order, 32) - - ec_p256_context = VoidPointer() - result = _ec_lib.ec_ws_new_context(ec_p256_context.address_of(), - c_uint8_ptr(p256_modulus), - c_uint8_ptr(p256_b), - c_uint8_ptr(p256_order), - c_size_t(len(p256_modulus)), - c_ulonglong(getrandbits(64)) - ) - if result: - raise ImportError("Error %d initializing P-256 context" % result) - - context = SmartPointer(ec_p256_context.get(), _ec_lib.ec_ws_free_context) - p256 = _Curve(Integer(p), - Integer(b), - Integer(order), - Integer(Gx), - Integer(Gy), - None, - 256, - "1.2.840.10045.3.1.7", # ANSI X9.62 / SEC2 - context, - "NIST P-256", - "ecdsa-sha2-nistp256", - EcLib) - return p256 - - -def p384_curve(): - p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000ffffffff - b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef - order = 0xffffffffffffffffffffffffffffffffffffffffffffffffc7634d81f4372ddf581a0db248b0a77aecec196accc52973 - Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760aB7 - Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5F - - p384_modulus = long_to_bytes(p, 48) - p384_b = long_to_bytes(b, 48) - p384_order = long_to_bytes(order, 48) - - ec_p384_context = VoidPointer() - result = _ec_lib.ec_ws_new_context(ec_p384_context.address_of(), - c_uint8_ptr(p384_modulus), - c_uint8_ptr(p384_b), - c_uint8_ptr(p384_order), - c_size_t(len(p384_modulus)), - c_ulonglong(getrandbits(64)) - ) - if result: - raise ImportError("Error %d initializing P-384 context" % result) - - context = SmartPointer(ec_p384_context.get(), _ec_lib.ec_ws_free_context) - p384 = _Curve(Integer(p), - Integer(b), - Integer(order), - Integer(Gx), - Integer(Gy), - None, - 384, - "1.3.132.0.34", # SEC 2 - context, - "NIST P-384", - "ecdsa-sha2-nistp384", - EcLib) - return p384 - - -def p521_curve(): - p = 0x000001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff - b = 0x00000051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00 - order = 0x000001fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409 - Gx = 0x000000c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66 - Gy = 0x0000011839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650 - - p521_modulus = long_to_bytes(p, 66) - p521_b = long_to_bytes(b, 66) - p521_order = long_to_bytes(order, 66) - - ec_p521_context = VoidPointer() - result = _ec_lib.ec_ws_new_context(ec_p521_context.address_of(), - c_uint8_ptr(p521_modulus), - c_uint8_ptr(p521_b), - c_uint8_ptr(p521_order), - c_size_t(len(p521_modulus)), - c_ulonglong(getrandbits(64)) - ) - if result: - raise ImportError("Error %d initializing P-521 context" % result) - - context = SmartPointer(ec_p521_context.get(), _ec_lib.ec_ws_free_context) - p521 = _Curve(Integer(p), - Integer(b), - Integer(order), - Integer(Gx), - Integer(Gy), - None, - 521, - "1.3.132.0.35", # SEC 2 - context, - "NIST P-521", - "ecdsa-sha2-nistp521", - EcLib) - return p521 diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_openssh.py b/venv/Lib/site-packages/Cryptodome/PublicKey/_openssh.py deleted file mode 100644 index 53b16df..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_openssh.py +++ /dev/null @@ -1,135 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2019, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import struct - -from Cryptodome.Cipher import AES -from Cryptodome.Hash import SHA512 -from Cryptodome.Protocol.KDF import _bcrypt_hash -from Cryptodome.Util.strxor import strxor -from Cryptodome.Util.py3compat import tostr, bchr, bord - - -def read_int4(data): - if len(data) < 4: - raise ValueError("Insufficient data") - value = struct.unpack(">I", data[:4])[0] - return value, data[4:] - - -def read_bytes(data): - size, data = read_int4(data) - if len(data) < size: - raise ValueError("Insufficient data (V)") - return data[:size], data[size:] - - -def read_string(data): - s, d = read_bytes(data) - return tostr(s), d - - -def check_padding(pad): - for v, x in enumerate(pad): - if bord(x) != ((v + 1) & 0xFF): - raise ValueError("Incorrect padding") - - -def import_openssh_private_generic(data, password): - # https://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.key?annotate=HEAD - # https://github.com/openssh/openssh-portable/blob/master/sshkey.c - # https://coolaj86.com/articles/the-openssh-private-key-format/ - # https://coolaj86.com/articles/the-ssh-public-key-format/ - - if not data.startswith(b'openssh-key-v1\x00'): - raise ValueError("Incorrect magic value") - data = data[15:] - - ciphername, data = read_string(data) - kdfname, data = read_string(data) - kdfoptions, data = read_bytes(data) - number_of_keys, data = read_int4(data) - - if number_of_keys != 1: - raise ValueError("We only handle 1 key at a time") - - _, data = read_string(data) # Public key - encrypted, data = read_bytes(data) - if data: - raise ValueError("Too much data") - - if len(encrypted) % 8 != 0: - raise ValueError("Incorrect payload length") - - # Decrypt if necessary - if ciphername == 'none': - decrypted = encrypted - else: - if (ciphername, kdfname) != ('aes256-ctr', 'bcrypt'): - raise ValueError("Unsupported encryption scheme %s/%s" % (ciphername, kdfname)) - - salt, kdfoptions = read_bytes(kdfoptions) - iterations, kdfoptions = read_int4(kdfoptions) - - if len(salt) != 16: - raise ValueError("Incorrect salt length") - if kdfoptions: - raise ValueError("Too much data in kdfoptions") - - pwd_sha512 = SHA512.new(password).digest() - # We need 32+16 = 48 bytes, therefore 2 bcrypt outputs are sufficient - stripes = [] - constant = b"OxychromaticBlowfishSwatDynamite" - for count in range(1, 3): - salt_sha512 = SHA512.new(salt + struct.pack(">I", count)).digest() - out_le = _bcrypt_hash(pwd_sha512, 6, salt_sha512, constant, False) - out = struct.pack("IIIIIIII", out_le)) - acc = bytearray(out) - for _ in range(1, iterations): - out_le = _bcrypt_hash(pwd_sha512, 6, SHA512.new(out).digest(), constant, False) - out = struct.pack("IIIIIIII", out_le)) - strxor(acc, out, output=acc) - stripes.append(acc[:24]) - - result = b"".join([bchr(a)+bchr(b) for (a, b) in zip(*stripes)]) - - cipher = AES.new(result[:32], - AES.MODE_CTR, - nonce=b"", - initial_value=result[32:32+16]) - decrypted = cipher.decrypt(encrypted) - - checkint1, decrypted = read_int4(decrypted) - checkint2, decrypted = read_int4(decrypted) - if checkint1 != checkint2: - raise ValueError("Incorrect checksum") - ssh_name, decrypted = read_string(decrypted) - - return ssh_name, decrypted diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_openssh.pyi b/venv/Lib/site-packages/Cryptodome/PublicKey/_openssh.pyi deleted file mode 100644 index 15f3677..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_openssh.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Tuple - -def read_int4(data: bytes) -> Tuple[int, bytes]: ... -def read_bytes(data: bytes) -> Tuple[bytes, bytes]: ... -def read_string(data: bytes) -> Tuple[str, bytes]: ... -def check_padding(pad: bytes) -> None: ... -def import_openssh_private_generic(data: bytes, password: bytes) -> Tuple[str, bytes]: ... diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_point.py b/venv/Lib/site-packages/Cryptodome/PublicKey/_point.py deleted file mode 100644 index 2afce9b..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_point.py +++ /dev/null @@ -1,493 +0,0 @@ -# This file is licensed under the BSD 2-Clause License. -# See https://opensource.org/licenses/BSD-2-Clause for details. - -import threading - -from Cryptodome.Util.number import bytes_to_long, long_to_bytes -from Cryptodome.Util._raw_api import (VoidPointer, null_pointer, - SmartPointer, c_size_t, c_uint8_ptr, - c_ulonglong) -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Random.random import getrandbits - - -class CurveID(object): - P192 = 1 - P224 = 2 - P256 = 3 - P384 = 4 - P521 = 5 - ED25519 = 6 - ED448 = 7 - CURVE25519 = 8 - CURVE448 = 9 - - -class _Curves(object): - - curves = {} - curves_lock = threading.RLock() - - p192_names = ["p192", "NIST P-192", "P-192", "prime192v1", "secp192r1", - "nistp192"] - p224_names = ["p224", "NIST P-224", "P-224", "prime224v1", "secp224r1", - "nistp224"] - p256_names = ["p256", "NIST P-256", "P-256", "prime256v1", "secp256r1", - "nistp256"] - p384_names = ["p384", "NIST P-384", "P-384", "prime384v1", "secp384r1", - "nistp384"] - p521_names = ["p521", "NIST P-521", "P-521", "prime521v1", "secp521r1", - "nistp521"] - ed25519_names = ["ed25519", "Ed25519"] - ed448_names = ["ed448", "Ed448"] - curve25519_names = ["curve25519", "Curve25519", "X25519"] - curve448_names = ["curve448", "Curve448", "X448"] - - all_names = p192_names + p224_names + p256_names + p384_names + p521_names + \ - ed25519_names + ed448_names + curve25519_names + curve448_names - - def __contains__(self, item): - return item in self.all_names - - def __dir__(self): - return self.all_names - - def load(self, name): - if name in self.p192_names: - from . import _nist_ecc - p192 = _nist_ecc.p192_curve() - p192.id = CurveID.P192 - self.curves.update(dict.fromkeys(self.p192_names, p192)) - elif name in self.p224_names: - from . import _nist_ecc - p224 = _nist_ecc.p224_curve() - p224.id = CurveID.P224 - self.curves.update(dict.fromkeys(self.p224_names, p224)) - elif name in self.p256_names: - from . import _nist_ecc - p256 = _nist_ecc.p256_curve() - p256.id = CurveID.P256 - self.curves.update(dict.fromkeys(self.p256_names, p256)) - elif name in self.p384_names: - from . import _nist_ecc - p384 = _nist_ecc.p384_curve() - p384.id = CurveID.P384 - self.curves.update(dict.fromkeys(self.p384_names, p384)) - elif name in self.p521_names: - from . import _nist_ecc - p521 = _nist_ecc.p521_curve() - p521.id = CurveID.P521 - self.curves.update(dict.fromkeys(self.p521_names, p521)) - elif name in self.ed25519_names: - from . import _edwards - ed25519 = _edwards.ed25519_curve() - ed25519.id = CurveID.ED25519 - self.curves.update(dict.fromkeys(self.ed25519_names, ed25519)) - elif name in self.ed448_names: - from . import _edwards - ed448 = _edwards.ed448_curve() - ed448.id = CurveID.ED448 - self.curves.update(dict.fromkeys(self.ed448_names, ed448)) - elif name in self.curve25519_names: - from . import _montgomery - curve25519 = _montgomery.curve25519_curve() - curve25519.id = CurveID.CURVE25519 - self.curves.update(dict.fromkeys(self.curve25519_names, curve25519)) - elif name in self.curve448_names: - from . import _montgomery - curve448 = _montgomery.curve448_curve() - curve448.id = CurveID.CURVE448 - self.curves.update(dict.fromkeys(self.curve448_names, curve448)) - else: - raise ValueError("Unsupported curve '%s'" % name) - return self.curves[name] - - def __getitem__(self, name): - with self.curves_lock: - curve = self.curves.get(name) - if curve is None: - curve = self.load(name) - if name in self.curve25519_names or name in self.curve448_names: - curve.G = EccXPoint(curve.Gx, name) - else: - curve.G = EccPoint(curve.Gx, curve.Gy, name) - curve.is_edwards = curve.id in (CurveID.ED25519, CurveID.ED448) - curve.is_montgomery = curve.id in (CurveID.CURVE25519, - CurveID.CURVE448) - curve.is_weierstrass = not (curve.is_edwards or - curve.is_montgomery) - return curve - - def items(self): - # Load all curves - for name in self.all_names: - _ = self[name] - return self.curves.items() - - -_curves = _Curves() - - -class EccPoint(object): - """A class to model a point on an Elliptic Curve. - - The class supports operators for: - - * Adding two points: ``R = S + T`` - * In-place addition: ``S += T`` - * Negating a point: ``R = -T`` - * Comparing two points: ``if S == T: ...`` or ``if S != T: ...`` - * Multiplying a point by a scalar: ``R = S*k`` - * In-place multiplication by a scalar: ``T *= k`` - - :ivar curve: The **canonical** name of the curve as defined in the `ECC table`_. - :vartype curve: string - - :ivar x: The affine X-coordinate of the ECC point - :vartype x: integer - - :ivar y: The affine Y-coordinate of the ECC point - :vartype y: integer - - :ivar xy: The tuple with affine X- and Y- coordinates - """ - - def __init__(self, x, y, curve="p256"): - - try: - self._curve = _curves[curve] - except KeyError: - raise ValueError("Unknown curve name %s" % str(curve)) - self.curve = self._curve.canonical - - if self._curve.id == CurveID.CURVE25519: - raise ValueError("EccPoint cannot be created for Curve25519") - - modulus_bytes = self.size_in_bytes() - - xb = long_to_bytes(x, modulus_bytes) - yb = long_to_bytes(y, modulus_bytes) - if len(xb) != modulus_bytes or len(yb) != modulus_bytes: - raise ValueError("Incorrect coordinate length") - - new_point = self._curve.rawlib.new_point - free_func = self._curve.rawlib.free_point - - self._point = VoidPointer() - try: - context = self._curve.context.get() - except AttributeError: - context = null_pointer - result = new_point(self._point.address_of(), - c_uint8_ptr(xb), - c_uint8_ptr(yb), - c_size_t(modulus_bytes), - context) - - if result: - if result == 15: - raise ValueError("The EC point does not belong to the curve") - raise ValueError("Error %d while instantiating an EC point" % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the EC point - self._point = SmartPointer(self._point.get(), free_func) - - def set(self, point): - clone = self._curve.rawlib.clone - free_func = self._curve.rawlib.free_point - - self._point = VoidPointer() - result = clone(self._point.address_of(), - point._point.get()) - - if result: - raise ValueError("Error %d while cloning an EC point" % result) - - self._point = SmartPointer(self._point.get(), free_func) - return self - - def __eq__(self, point): - if not isinstance(point, EccPoint): - return False - - cmp_func = self._curve.rawlib.cmp - return 0 == cmp_func(self._point.get(), point._point.get()) - - # Only needed for Python 2 - def __ne__(self, point): - return not self == point - - def __neg__(self): - neg_func = self._curve.rawlib.neg - np = self.copy() - result = neg_func(np._point.get()) - if result: - raise ValueError("Error %d while inverting an EC point" % result) - return np - - def copy(self): - """Return a copy of this point.""" - x, y = self.xy - np = EccPoint(x, y, self.curve) - return np - - def is_point_at_infinity(self): - """``True`` if this is the *point-at-infinity*.""" - - if self._curve.is_edwards: - return self.x == 0 - else: - return self.xy == (0, 0) - - def point_at_infinity(self): - """Return the *point-at-infinity* for the curve.""" - - if self._curve.is_edwards: - return EccPoint(0, 1, self.curve) - else: - return EccPoint(0, 0, self.curve) - - @property - def x(self): - return self.xy[0] - - @property - def y(self): - return self.xy[1] - - @property - def xy(self): - modulus_bytes = self.size_in_bytes() - xb = bytearray(modulus_bytes) - yb = bytearray(modulus_bytes) - get_xy = self._curve.rawlib.get_xy - result = get_xy(c_uint8_ptr(xb), - c_uint8_ptr(yb), - c_size_t(modulus_bytes), - self._point.get()) - if result: - raise ValueError("Error %d while encoding an EC point" % result) - - return (Integer(bytes_to_long(xb)), Integer(bytes_to_long(yb))) - - def size_in_bytes(self): - """Size of each coordinate, in bytes.""" - return (self.size_in_bits() + 7) // 8 - - def size_in_bits(self): - """Size of each coordinate, in bits.""" - return self._curve.modulus_bits - - def double(self): - """Double this point (in-place operation). - - Returns: - This same object (to enable chaining). - """ - - double_func = self._curve.rawlib.double - result = double_func(self._point.get()) - if result: - raise ValueError("Error %d while doubling an EC point" % result) - return self - - def __iadd__(self, point): - """Add a second point to this one""" - - add_func = self._curve.rawlib.add - result = add_func(self._point.get(), point._point.get()) - if result: - if result == 16: - raise ValueError("EC points are not on the same curve") - raise ValueError("Error %d while adding two EC points" % result) - return self - - def __add__(self, point): - """Return a new point, the addition of this one and another""" - - np = self.copy() - np += point - return np - - def __imul__(self, scalar): - """Multiply this point by a scalar""" - - scalar_func = self._curve.rawlib.scalar - if scalar < 0: - raise ValueError("Scalar multiplication is only defined for non-negative integers") - sb = long_to_bytes(scalar) - result = scalar_func(self._point.get(), - c_uint8_ptr(sb), - c_size_t(len(sb)), - c_ulonglong(getrandbits(64))) - if result: - raise ValueError("Error %d during scalar multiplication" % result) - return self - - def __mul__(self, scalar): - """Return a new point, the scalar product of this one""" - - np = self.copy() - np *= scalar - return np - - def __rmul__(self, left_hand): - return self.__mul__(left_hand) - - -class EccXPoint(object): - """A class to model a point on an Elliptic Curve, - where only the X-coordinate is exposed. - - The class supports operators for: - - * Multiplying a point by a scalar: ``R = S*k`` - * In-place multiplication by a scalar: ``T *= k`` - - :ivar curve: The **canonical** name of the curve as defined in the `ECC table`_. - :vartype curve: string - - :ivar x: The affine X-coordinate of the ECC point - :vartype x: integer - """ - - def __init__(self, x, curve): - # Once encoded, x must not exceed the length of the modulus, - # but its value may match or exceed the modulus itself - # (i.e., non-canonical value) - - try: - self._curve = _curves[curve] - except KeyError: - raise ValueError("Unknown curve name %s" % str(curve)) - self.curve = self._curve.canonical - - if self._curve.id not in (CurveID.CURVE25519, CurveID.CURVE448): - raise ValueError("EccXPoint can only be created for Curve25519/Curve448") - - new_point = self._curve.rawlib.new_point - free_func = self._curve.rawlib.free_point - - self._point = VoidPointer() - try: - context = self._curve.context.get() - except AttributeError: - context = null_pointer - - modulus_bytes = self.size_in_bytes() - - if x is None: - xb = null_pointer - else: - xb = c_uint8_ptr(long_to_bytes(x, modulus_bytes)) - if len(xb) != modulus_bytes: - raise ValueError("Incorrect coordinate length") - - self._point = VoidPointer() - result = new_point(self._point.address_of(), - xb, - c_size_t(modulus_bytes), - context) - - if result == 15: - raise ValueError("The EC point does not belong to the curve") - if result: - raise ValueError("Error %d while instantiating an EC point" % result) - - # Ensure that object disposal of this Python object will (eventually) - # free the memory allocated by the raw library for the EC point - self._point = SmartPointer(self._point.get(), free_func) - - def set(self, point): - clone = self._curve.rawlib.clone - free_func = self._curve.rawlib.free_point - - self._point = VoidPointer() - result = clone(self._point.address_of(), - point._point.get()) - if result: - raise ValueError("Error %d while cloning an EC point" % result) - - self._point = SmartPointer(self._point.get(), free_func) - return self - - def __eq__(self, point): - if not isinstance(point, EccXPoint): - return False - - cmp_func = self._curve.rawlib.cmp - p1 = self._point.get() - p2 = point._point.get() - res = cmp_func(p1, p2) - return 0 == res - - def copy(self): - """Return a copy of this point.""" - - try: - x = self.x - except ValueError: - return self.point_at_infinity() - return EccXPoint(x, self.curve) - - def is_point_at_infinity(self): - """``True`` if this is the *point-at-infinity*.""" - - try: - _ = self.x - except ValueError: - return True - return False - - def point_at_infinity(self): - """Return the *point-at-infinity* for the curve.""" - - return EccXPoint(None, self.curve) - - @property - def x(self): - modulus_bytes = self.size_in_bytes() - xb = bytearray(modulus_bytes) - get_x = self._curve.rawlib.get_x - result = get_x(c_uint8_ptr(xb), - c_size_t(modulus_bytes), - self._point.get()) - if result == 19: # ERR_ECC_PAI - raise ValueError("No X coordinate for the point at infinity") - if result: - raise ValueError("Error %d while getting X of an EC point" % result) - return Integer(bytes_to_long(xb)) - - def size_in_bytes(self): - """Size of each coordinate, in bytes.""" - return (self.size_in_bits() + 7) // 8 - - def size_in_bits(self): - """Size of each coordinate, in bits.""" - return self._curve.modulus_bits - - def __imul__(self, scalar): - """Multiply this point by a scalar""" - - scalar_func = self._curve.rawlib.scalar - if scalar < 0: - raise ValueError("Scalar multiplication is only defined for non-negative integers") - sb = long_to_bytes(scalar) - result = scalar_func(self._point.get(), - c_uint8_ptr(sb), - c_size_t(len(sb)), - c_ulonglong(getrandbits(64))) - if result: - raise ValueError("Error %d during scalar multiplication" % result) - return self - - def __mul__(self, scalar): - """Return a new point, the scalar product of this one""" - - np = self.copy() - np *= scalar - return np - - def __rmul__(self, left_hand): - return self.__mul__(left_hand) diff --git a/venv/Lib/site-packages/Cryptodome/PublicKey/_point.pyi b/venv/Lib/site-packages/Cryptodome/PublicKey/_point.pyi deleted file mode 100644 index 2518e2e..0000000 --- a/venv/Lib/site-packages/Cryptodome/PublicKey/_point.pyi +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Union, Optional, Tuple - -from Cryptodome.Math.Numbers import Integer - -class EccPoint(object): - curve: str - def __init__(self, - x: Union[int, Integer], - y: Union[int, Integer], - curve: Optional[str] = ...) -> None: ... - - def set(self, point: EccPoint) -> EccPoint: ... - def __eq__(self, point: object) -> bool: ... - def __neg__(self) -> EccPoint: ... - def copy(self) -> EccPoint: ... - def is_point_at_infinity(self) -> bool: ... - def point_at_infinity(self) -> EccPoint: ... - @property - def x(self) -> int: ... - @property - def y(self) -> int: ... - @property - def xy(self) -> Tuple[int, int]: ... - def size_in_bytes(self) -> int: ... - def size_in_bits(self) -> int: ... - def double(self) -> EccPoint: ... - def __iadd__(self, point: EccPoint) -> EccPoint: ... - def __add__(self, point: EccPoint) -> EccPoint: ... - def __imul__(self, scalar: int) -> EccPoint: ... - def __mul__(self, scalar: int) -> EccPoint: ... - - -class EccXPoint(object): - curve: str - def __init__(self, - x: Union[int, Integer], - curve: Optional[str] = ...) -> None: ... - def set(self, point: EccXPoint) -> EccXPoint: ... - def __eq__(self, point: object) -> bool: ... - def copy(self) -> EccXPoint: ... - def is_point_at_infinity(self) -> bool: ... - def point_at_infinity(self) -> EccXPoint: ... - @property - def x(self) -> int: ... - def size_in_bytes(self) -> int: ... - def size_in_bits(self) -> int: ... - def __imul__(self, scalar: int) -> EccXPoint: ... - def __mul__(self, scalar: int) -> EccXPoint: ... - def __rmul__(self, left_hand: int) -> EccXPoint: ... diff --git a/venv/Lib/site-packages/Cryptodome/Random/__init__.py b/venv/Lib/site-packages/Cryptodome/Random/__init__.py deleted file mode 100644 index fd18d86..0000000 --- a/venv/Lib/site-packages/Cryptodome/Random/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Random/__init__.py : PyCryptodome random number generation -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__all__ = ['new', 'get_random_bytes'] - -from os import urandom - -class _UrandomRNG(object): - - def read(self, n): - """Return a random byte string of the desired size.""" - return urandom(n) - - def flush(self): - """Method provided for backward compatibility only.""" - pass - - def reinit(self): - """Method provided for backward compatibility only.""" - pass - - def close(self): - """Method provided for backward compatibility only.""" - pass - - -def new(*args, **kwargs): - """Return a file-like object that outputs cryptographically random bytes.""" - return _UrandomRNG() - - -def atfork(): - pass - - -#: Function that returns a random byte string of the desired size. -get_random_bytes = urandom - diff --git a/venv/Lib/site-packages/Cryptodome/Random/__init__.pyi b/venv/Lib/site-packages/Cryptodome/Random/__init__.pyi deleted file mode 100644 index ddc5b9b..0000000 --- a/venv/Lib/site-packages/Cryptodome/Random/__init__.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any - -__all__ = ['new', 'get_random_bytes'] - -from os import urandom - -class _UrandomRNG(object): - - def read(self, n: int) -> bytes:... - def flush(self) -> None: ... - def reinit(self) -> None: ... - def close(self) -> None: ... - -def new(*args: Any, **kwargs: Any) -> _UrandomRNG: ... - -def atfork() -> None: ... - -get_random_bytes = urandom - diff --git a/venv/Lib/site-packages/Cryptodome/Random/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Random/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 87a1a08..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Random/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Random/__pycache__/random.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Random/__pycache__/random.cpython-312.pyc deleted file mode 100644 index 46b43ec..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Random/__pycache__/random.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Random/random.py b/venv/Lib/site-packages/Cryptodome/Random/random.py deleted file mode 100644 index da30795..0000000 --- a/venv/Lib/site-packages/Cryptodome/Random/random.py +++ /dev/null @@ -1,138 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Random/random.py : Strong alternative for the standard 'random' module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] - -from Cryptodome import Random - -from Cryptodome.Util.py3compat import is_native_int - -class StrongRandom(object): - def __init__(self, rng=None, randfunc=None): - if randfunc is None and rng is None: - self._randfunc = None - elif randfunc is not None and rng is None: - self._randfunc = randfunc - elif randfunc is None and rng is not None: - self._randfunc = rng.read - else: - raise ValueError("Cannot specify both 'rng' and 'randfunc'") - - def getrandbits(self, k): - """Return an integer with k random bits.""" - - if self._randfunc is None: - self._randfunc = Random.new().read - mask = (1 << k) - 1 - return mask & bytes_to_long(self._randfunc(ceil_div(k, 8))) - - def randrange(self, *args): - """randrange([start,] stop[, step]): - Return a randomly-selected element from range(start, stop, step).""" - if len(args) == 3: - (start, stop, step) = args - elif len(args) == 2: - (start, stop) = args - step = 1 - elif len(args) == 1: - (stop,) = args - start = 0 - step = 1 - else: - raise TypeError("randrange expected at most 3 arguments, got %d" % (len(args),)) - if (not is_native_int(start) or not is_native_int(stop) or not - is_native_int(step)): - raise TypeError("randrange requires integer arguments") - if step == 0: - raise ValueError("randrange step argument must not be zero") - - num_choices = ceil_div(stop - start, step) - if num_choices < 0: - num_choices = 0 - if num_choices < 1: - raise ValueError("empty range for randrange(%r, %r, %r)" % (start, stop, step)) - - # Pick a random number in the range of possible numbers - r = num_choices - while r >= num_choices: - r = self.getrandbits(size(num_choices)) - - return start + (step * r) - - def randint(self, a, b): - """Return a random integer N such that a <= N <= b.""" - if not is_native_int(a) or not is_native_int(b): - raise TypeError("randint requires integer arguments") - N = self.randrange(a, b+1) - assert a <= N <= b - return N - - def choice(self, seq): - """Return a random element from a (non-empty) sequence. - - If the seqence is empty, raises IndexError. - """ - if len(seq) == 0: - raise IndexError("empty sequence") - return seq[self.randrange(len(seq))] - - def shuffle(self, x): - """Shuffle the sequence in place.""" - # Fisher-Yates shuffle. O(n) - # See http://en.wikipedia.org/wiki/Fisher-Yates_shuffle - # Working backwards from the end of the array, we choose a random item - # from the remaining items until all items have been chosen. - for i in range(len(x)-1, 0, -1): # iterate from len(x)-1 downto 1 - j = self.randrange(0, i+1) # choose random j such that 0 <= j <= i - x[i], x[j] = x[j], x[i] # exchange x[i] and x[j] - - def sample(self, population, k): - """Return a k-length list of unique elements chosen from the population sequence.""" - - num_choices = len(population) - if k > num_choices: - raise ValueError("sample larger than population") - - retval = [] - selected = {} # we emulate a set using a dict here - for i in range(k): - r = None - while r is None or r in selected: - r = self.randrange(num_choices) - retval.append(population[r]) - selected[r] = 1 - return retval - -_r = StrongRandom() -getrandbits = _r.getrandbits -randrange = _r.randrange -randint = _r.randint -choice = _r.choice -shuffle = _r.shuffle -sample = _r.sample - -# These are at the bottom to avoid problems with recursive imports -from Cryptodome.Util.number import ceil_div, bytes_to_long, long_to_bytes, size - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/Random/random.pyi b/venv/Lib/site-packages/Cryptodome/Random/random.pyi deleted file mode 100644 index 9b7cf7e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Random/random.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Callable, Tuple, Union, Sequence, Any, Optional, TypeVar - -__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] - -T = TypeVar('T') - -class StrongRandom(object): - def __init__(self, rng: Optional[Any]=None, randfunc: Optional[Callable]=None) -> None: ... # TODO What is rng? - def getrandbits(self, k: int) -> int: ... - def randrange(self, start: int, stop: int = ..., step: int = ...) -> int: ... - def randint(self, a: int, b: int) -> int: ... - def choice(self, seq: Sequence[T]) -> T: ... - def shuffle(self, x: Sequence) -> None: ... - def sample(self, population: Sequence, k: int) -> list: ... - -_r = StrongRandom() -getrandbits = _r.getrandbits -randrange = _r.randrange -randint = _r.randint -choice = _r.choice -shuffle = _r.shuffle -sample = _r.sample diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__init__.py deleted file mode 100644 index 40f865d..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/__init__.py: Self-test for cipher modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for cipher modules""" - -import sys - - -def get_tests(config={}): - tests = [] - from Cryptodome.SelfTest.Cipher import test_AES; tests += test_AES.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_ARC2; tests += test_ARC2.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_ARC4; tests += test_ARC4.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_Blowfish; tests += test_Blowfish.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_CAST; tests += test_CAST.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_DES3; tests += test_DES3.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_DES; tests += test_DES.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_Salsa20; tests += test_Salsa20.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_ChaCha20; tests += test_ChaCha20.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_ChaCha20_Poly1305; tests += test_ChaCha20_Poly1305.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_pkcs1_oaep; tests += test_pkcs1_oaep.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_OCB; tests += test_OCB.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_CBC; tests += test_CBC.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_CFB; tests += test_CFB.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_OpenPGP; tests += test_OpenPGP.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_OFB; tests += test_OFB.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_CTR; tests += test_CTR.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_CCM; tests += test_CCM.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_EAX; tests += test_EAX.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_GCM; tests += test_GCM.get_tests(config=config) - from Cryptodome.SelfTest.Cipher import test_SIV; tests += test_SIV.get_tests(config=config) - - if sys.version_info >= (3, 9): - from Cryptodome.SelfTest.Cipher import test_KW - tests += test_KW.get_tests(config=config) - - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 7cd33e0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/common.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/common.cpython-312.pyc deleted file mode 100644 index 580c8cd..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/common.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_AES.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_AES.cpython-312.pyc deleted file mode 100644 index d171b6f..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_AES.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ARC2.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ARC2.cpython-312.pyc deleted file mode 100644 index 885e559..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ARC2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ARC4.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ARC4.cpython-312.pyc deleted file mode 100644 index 222aaa8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ARC4.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_Blowfish.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_Blowfish.cpython-312.pyc deleted file mode 100644 index bb09118..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_Blowfish.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CAST.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CAST.cpython-312.pyc deleted file mode 100644 index b2c4dae..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CAST.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CBC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CBC.cpython-312.pyc deleted file mode 100644 index 3bb799f..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CBC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CCM.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CCM.cpython-312.pyc deleted file mode 100644 index 969a27f..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CCM.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CFB.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CFB.cpython-312.pyc deleted file mode 100644 index 2883938..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CFB.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CTR.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CTR.cpython-312.pyc deleted file mode 100644 index adaa2b5..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_CTR.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ChaCha20.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ChaCha20.cpython-312.pyc deleted file mode 100644 index 9b00a43..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ChaCha20.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ChaCha20_Poly1305.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ChaCha20_Poly1305.cpython-312.pyc deleted file mode 100644 index 7685985..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_ChaCha20_Poly1305.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_DES.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_DES.cpython-312.pyc deleted file mode 100644 index 3b2e109..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_DES.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_DES3.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_DES3.cpython-312.pyc deleted file mode 100644 index 400b745..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_DES3.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_EAX.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_EAX.cpython-312.pyc deleted file mode 100644 index 4832bf8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_EAX.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_GCM.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_GCM.cpython-312.pyc deleted file mode 100644 index 53d562b..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_GCM.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_KW.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_KW.cpython-312.pyc deleted file mode 100644 index ad9bf34..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_KW.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OCB.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OCB.cpython-312.pyc deleted file mode 100644 index a6a10d7..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OCB.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OFB.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OFB.cpython-312.pyc deleted file mode 100644 index 26c24ff..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OFB.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OpenPGP.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OpenPGP.cpython-312.pyc deleted file mode 100644 index fa64dfd..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_OpenPGP.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_SIV.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_SIV.cpython-312.pyc deleted file mode 100644 index fbf78b0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_SIV.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_Salsa20.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_Salsa20.cpython-312.pyc deleted file mode 100644 index 21a297a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_Salsa20.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_pkcs1_15.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_pkcs1_15.cpython-312.pyc deleted file mode 100644 index 6b2e5eb..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_pkcs1_15.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_pkcs1_oaep.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_pkcs1_oaep.cpython-312.pyc deleted file mode 100644 index c2054c8..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/__pycache__/test_pkcs1_oaep.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/common.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/common.py deleted file mode 100644 index a13d4fb..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/common.py +++ /dev/null @@ -1,510 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/common.py: Common code for Cryptodome.SelfTest.Hash -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-testing for PyCryptodome hash modules""" - -import unittest -from binascii import a2b_hex, b2a_hex, hexlify - -from Cryptodome.Util.py3compat import b -from Cryptodome.Util.strxor import strxor_c - -class _NoDefault: pass # sentinel object -def _extract(d, k, default=_NoDefault): - """Get an item from a dictionary, and remove it from the dictionary.""" - try: - retval = d[k] - except KeyError: - if default is _NoDefault: - raise - return default - del d[k] - return retval - -# Generic cipher test case -class CipherSelfTest(unittest.TestCase): - - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - - # Extract the parameters - params = params.copy() - self.description = _extract(params, 'description') - self.key = b(_extract(params, 'key')) - self.plaintext = b(_extract(params, 'plaintext')) - self.ciphertext = b(_extract(params, 'ciphertext')) - self.module_name = _extract(params, 'module_name', None) - self.assoc_data = _extract(params, 'assoc_data', None) - self.mac = _extract(params, 'mac', None) - if self.assoc_data: - self.mac = b(self.mac) - - mode = _extract(params, 'mode', None) - self.mode_name = str(mode) - - if mode is not None: - # Block cipher - self.mode = getattr(self.module, "MODE_" + mode) - - self.iv = _extract(params, 'iv', None) - if self.iv is None: - self.iv = _extract(params, 'nonce', None) - if self.iv is not None: - self.iv = b(self.iv) - - else: - # Stream cipher - self.mode = None - self.iv = _extract(params, 'iv', None) - if self.iv is not None: - self.iv = b(self.iv) - - self.extra_params = params - - def shortDescription(self): - return self.description - - def _new(self): - params = self.extra_params.copy() - key = a2b_hex(self.key) - - old_style = [] - if self.mode is not None: - old_style = [ self.mode ] - if self.iv is not None: - old_style += [ a2b_hex(self.iv) ] - - return self.module.new(key, *old_style, **params) - - def isMode(self, name): - if not hasattr(self.module, "MODE_"+name): - return False - return self.mode == getattr(self.module, "MODE_"+name) - - def runTest(self): - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - assoc_data = [] - if self.assoc_data: - assoc_data = [ a2b_hex(b(x)) for x in self.assoc_data] - - ct = None - pt = None - - # - # Repeat the same encryption or decryption twice and verify - # that the result is always the same - # - for i in range(2): - cipher = self._new() - decipher = self._new() - - # Only AEAD modes - for comp in assoc_data: - cipher.update(comp) - decipher.update(comp) - - ctX = b2a_hex(cipher.encrypt(plaintext)) - ptX = b2a_hex(decipher.decrypt(ciphertext)) - - if ct: - self.assertEqual(ct, ctX) - self.assertEqual(pt, ptX) - ct, pt = ctX, ptX - - self.assertEqual(self.ciphertext, ct) # encrypt - self.assertEqual(self.plaintext, pt) # decrypt - - if self.mac: - mac = b2a_hex(cipher.digest()) - self.assertEqual(self.mac, mac) - decipher.verify(a2b_hex(self.mac)) - -class CipherStreamingSelfTest(CipherSelfTest): - - def shortDescription(self): - desc = self.module_name - if self.mode is not None: - desc += " in %s mode" % (self.mode_name,) - return "%s should behave like a stream cipher" % (desc,) - - def runTest(self): - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - - # The cipher should work like a stream cipher - - # Test counter mode encryption, 3 bytes at a time - ct3 = [] - cipher = self._new() - for i in range(0, len(plaintext), 3): - ct3.append(cipher.encrypt(plaintext[i:i+3])) - ct3 = b2a_hex(b("").join(ct3)) - self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time) - - # Test counter mode decryption, 3 bytes at a time - pt3 = [] - cipher = self._new() - for i in range(0, len(ciphertext), 3): - pt3.append(cipher.encrypt(ciphertext[i:i+3])) - # PY3K: This is meant to be text, do not change to bytes (data) - pt3 = b2a_hex(b("").join(pt3)) - self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time) - - -class RoundtripTest(unittest.TestCase): - def __init__(self, module, params): - from Cryptodome import Random - unittest.TestCase.__init__(self) - self.module = module - self.iv = Random.get_random_bytes(module.block_size) - self.key = b(params['key']) - self.plaintext = 100 * b(params['plaintext']) - self.module_name = params.get('module_name', None) - - def shortDescription(self): - return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,) - - def runTest(self): - - ## ECB mode - mode = self.module.MODE_ECB - encryption_cipher = self.module.new(a2b_hex(self.key), mode) - ciphertext = encryption_cipher.encrypt(self.plaintext) - decryption_cipher = self.module.new(a2b_hex(self.key), mode) - decrypted_plaintext = decryption_cipher.decrypt(ciphertext) - self.assertEqual(self.plaintext, decrypted_plaintext) - - -class IVLengthTest(unittest.TestCase): - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = b(params['key']) - - def shortDescription(self): - return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length" - - def runTest(self): - self.assertRaises(TypeError, self.module.new, a2b_hex(self.key), - self.module.MODE_ECB, b("")) - - def _dummy_counter(self): - return "\0" * self.module.block_size - - -class NoDefaultECBTest(unittest.TestCase): - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = b(params['key']) - - def runTest(self): - self.assertRaises(TypeError, self.module.new, a2b_hex(self.key)) - - -class BlockSizeTest(unittest.TestCase): - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = a2b_hex(b(params['key'])) - - def runTest(self): - cipher = self.module.new(self.key, self.module.MODE_ECB) - self.assertEqual(cipher.block_size, self.module.block_size) - - -class ByteArrayTest(unittest.TestCase): - """Verify we can use bytearray's for encrypting and decrypting""" - - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - - # Extract the parameters - params = params.copy() - self.description = _extract(params, 'description') - self.key = b(_extract(params, 'key')) - self.plaintext = b(_extract(params, 'plaintext')) - self.ciphertext = b(_extract(params, 'ciphertext')) - self.module_name = _extract(params, 'module_name', None) - self.assoc_data = _extract(params, 'assoc_data', None) - self.mac = _extract(params, 'mac', None) - if self.assoc_data: - self.mac = b(self.mac) - - mode = _extract(params, 'mode', None) - self.mode_name = str(mode) - - if mode is not None: - # Block cipher - self.mode = getattr(self.module, "MODE_" + mode) - - self.iv = _extract(params, 'iv', None) - if self.iv is None: - self.iv = _extract(params, 'nonce', None) - if self.iv is not None: - self.iv = b(self.iv) - else: - # Stream cipher - self.mode = None - self.iv = _extract(params, 'iv', None) - if self.iv is not None: - self.iv = b(self.iv) - - self.extra_params = params - - def _new(self): - params = self.extra_params.copy() - key = a2b_hex(self.key) - - old_style = [] - if self.mode is not None: - old_style = [ self.mode ] - if self.iv is not None: - old_style += [ a2b_hex(self.iv) ] - - return self.module.new(key, *old_style, **params) - - def runTest(self): - - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - assoc_data = [] - if self.assoc_data: - assoc_data = [ bytearray(a2b_hex(b(x))) for x in self.assoc_data] - - cipher = self._new() - decipher = self._new() - - # Only AEAD modes - for comp in assoc_data: - cipher.update(comp) - decipher.update(comp) - - ct = b2a_hex(cipher.encrypt(bytearray(plaintext))) - pt = b2a_hex(decipher.decrypt(bytearray(ciphertext))) - - self.assertEqual(self.ciphertext, ct) # encrypt - self.assertEqual(self.plaintext, pt) # decrypt - - if self.mac: - mac = b2a_hex(cipher.digest()) - self.assertEqual(self.mac, mac) - decipher.verify(bytearray(a2b_hex(self.mac))) - - -class MemoryviewTest(unittest.TestCase): - """Verify we can use memoryviews for encrypting and decrypting""" - - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - - # Extract the parameters - params = params.copy() - self.description = _extract(params, 'description') - self.key = b(_extract(params, 'key')) - self.plaintext = b(_extract(params, 'plaintext')) - self.ciphertext = b(_extract(params, 'ciphertext')) - self.module_name = _extract(params, 'module_name', None) - self.assoc_data = _extract(params, 'assoc_data', None) - self.mac = _extract(params, 'mac', None) - if self.assoc_data: - self.mac = b(self.mac) - - mode = _extract(params, 'mode', None) - self.mode_name = str(mode) - - if mode is not None: - # Block cipher - self.mode = getattr(self.module, "MODE_" + mode) - - self.iv = _extract(params, 'iv', None) - if self.iv is None: - self.iv = _extract(params, 'nonce', None) - if self.iv is not None: - self.iv = b(self.iv) - else: - # Stream cipher - self.mode = None - self.iv = _extract(params, 'iv', None) - if self.iv is not None: - self.iv = b(self.iv) - - self.extra_params = params - - def _new(self): - params = self.extra_params.copy() - key = a2b_hex(self.key) - - old_style = [] - if self.mode is not None: - old_style = [ self.mode ] - if self.iv is not None: - old_style += [ a2b_hex(self.iv) ] - - return self.module.new(key, *old_style, **params) - - def runTest(self): - - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - assoc_data = [] - if self.assoc_data: - assoc_data = [ memoryview(a2b_hex(b(x))) for x in self.assoc_data] - - cipher = self._new() - decipher = self._new() - - # Only AEAD modes - for comp in assoc_data: - cipher.update(comp) - decipher.update(comp) - - ct = b2a_hex(cipher.encrypt(memoryview(plaintext))) - pt = b2a_hex(decipher.decrypt(memoryview(ciphertext))) - - self.assertEqual(self.ciphertext, ct) # encrypt - self.assertEqual(self.plaintext, pt) # decrypt - - if self.mac: - mac = b2a_hex(cipher.digest()) - self.assertEqual(self.mac, mac) - decipher.verify(memoryview(a2b_hex(self.mac))) - - -def make_block_tests(module, module_name, test_data, additional_params=dict()): - tests = [] - extra_tests_added = False - for i in range(len(test_data)): - row = test_data[i] - - # Build the "params" dictionary with - # - plaintext - # - ciphertext - # - key - # - mode (default is ECB) - # - (optionally) description - # - (optionally) any other parameter that this cipher mode requires - params = {} - if len(row) == 3: - (params['plaintext'], params['ciphertext'], params['key']) = row - elif len(row) == 4: - (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row - elif len(row) == 5: - (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row - params.update(extra_params) - else: - raise AssertionError("Unsupported tuple size %d" % (len(row),)) - - if not "mode" in params: - params["mode"] = "ECB" - - # Build the display-name for the test - p2 = params.copy() - p_key = _extract(p2, 'key') - p_plaintext = _extract(p2, 'plaintext') - p_ciphertext = _extract(p2, 'ciphertext') - p_mode = _extract(p2, 'mode') - p_description = _extract(p2, 'description', None) - - if p_description is not None: - description = p_description - elif p_mode == 'ECB' and not p2: - description = "p=%s, k=%s" % (p_plaintext, p_key) - else: - description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) - name = "%s #%d: %s" % (module_name, i+1, description) - params['description'] = name - params['module_name'] = module_name - params.update(additional_params) - - # Add extra test(s) to the test suite before the current test - if not extra_tests_added: - tests += [ - RoundtripTest(module, params), - IVLengthTest(module, params), - NoDefaultECBTest(module, params), - ByteArrayTest(module, params), - BlockSizeTest(module, params), - ] - extra_tests_added = True - - # Add the current test to the test suite - tests.append(CipherSelfTest(module, params)) - - return tests - -def make_stream_tests(module, module_name, test_data): - tests = [] - extra_tests_added = False - for i in range(len(test_data)): - row = test_data[i] - - # Build the "params" dictionary - params = {} - if len(row) == 3: - (params['plaintext'], params['ciphertext'], params['key']) = row - elif len(row) == 4: - (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row - elif len(row) == 5: - (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row - params.update(extra_params) - else: - raise AssertionError("Unsupported tuple size %d" % (len(row),)) - - # Build the display-name for the test - p2 = params.copy() - p_key = _extract(p2, 'key') - p_plaintext = _extract(p2, 'plaintext') - p_ciphertext = _extract(p2, 'ciphertext') - p_description = _extract(p2, 'description', None) - - if p_description is not None: - description = p_description - elif not p2: - description = "p=%s, k=%s" % (p_plaintext, p_key) - else: - description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) - name = "%s #%d: %s" % (module_name, i+1, description) - params['description'] = name - params['module_name'] = module_name - - # Add extra test(s) to the test suite before the current test - if not extra_tests_added: - tests += [ - ByteArrayTest(module, params), - ] - - tests.append(MemoryviewTest(module, params)) - extra_tests_added = True - - # Add the test to the test suite - tests.append(CipherSelfTest(module, params)) - tests.append(CipherStreamingSelfTest(module, params)) - return tests - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_AES.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_AES.py deleted file mode 100644 index bd6c40e..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_AES.py +++ /dev/null @@ -1,1351 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/AES.py: Self-test for the AES cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.AES""" - -from __future__ import print_function - -import unittest -from Cryptodome.Hash import SHA256 -from Cryptodome.Cipher import AES -from Cryptodome.Util.py3compat import * -from binascii import hexlify - -# This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. -test_data = [ - # FIPS PUB 197 test vectors - # http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf - - ('00112233445566778899aabbccddeeff', '69c4e0d86a7b0430d8cdb78070b4c55a', - '000102030405060708090a0b0c0d0e0f', 'FIPS 197 C.1 (AES-128)'), - - ('00112233445566778899aabbccddeeff', 'dda97ca4864cdfe06eaf70a0ec0d7191', - '000102030405060708090a0b0c0d0e0f1011121314151617', - 'FIPS 197 C.2 (AES-192)'), - - ('00112233445566778899aabbccddeeff', '8ea2b7ca516745bfeafc49904b496089', - '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - 'FIPS 197 C.3 (AES-256)'), - - # Rijndael128 test vectors - # Downloaded 2008-09-13 from - # http://www.iaik.tugraz.at/Research/krypto/AES/old/~rijmen/rijndael/testvalues.tar.gz - - # ecb_tbl.txt, KEYSIZE=128 - ('506812a45f08c889b97f5980038b8359', 'd8f532538289ef7d06b506a4fd5be9c9', - '00010203050607080a0b0c0d0f101112', - 'ecb-tbl-128: I=1'), - ('5c6d71ca30de8b8b00549984d2ec7d4b', '59ab30f4d4ee6e4ff9907ef65b1fb68c', - '14151617191a1b1c1e1f202123242526', - 'ecb-tbl-128: I=2'), - ('53f3f4c64f8616e4e7c56199f48f21f6', 'bf1ed2fcb2af3fd41443b56d85025cb1', - '28292a2b2d2e2f30323334353738393a', - 'ecb-tbl-128: I=3'), - ('a1eb65a3487165fb0f1c27ff9959f703', '7316632d5c32233edcb0780560eae8b2', - '3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-128: I=4'), - ('3553ecf0b1739558b08e350a98a39bfa', '408c073e3e2538072b72625e68b8364b', - '50515253555657585a5b5c5d5f606162', - 'ecb-tbl-128: I=5'), - ('67429969490b9711ae2b01dc497afde8', 'e1f94dfa776597beaca262f2f6366fea', - '64656667696a6b6c6e6f707173747576', - 'ecb-tbl-128: I=6'), - ('93385c1f2aec8bed192f5a8e161dd508', 'f29e986c6a1c27d7b29ffd7ee92b75f1', - '78797a7b7d7e7f80828384858788898a', - 'ecb-tbl-128: I=7'), - ('b5bf946be19beb8db3983b5f4c6e8ddb', '131c886a57f8c2e713aba6955e2b55b5', - '8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-128: I=8'), - ('41321ee10e21bd907227c4450ff42324', 'd2ab7662df9b8c740210e5eeb61c199d', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-128: I=9'), - ('00a82f59c91c8486d12c0a80124f6089', '14c10554b2859c484cab5869bbe7c470', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-128: I=10'), - ('7ce0fd076754691b4bbd9faf8a1372fe', 'db4d498f0a49cf55445d502c1f9ab3b5', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', - 'ecb-tbl-128: I=11'), - ('23605a8243d07764541bc5ad355b3129', '6d96fef7d66590a77a77bb2056667f7f', - 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-128: I=12'), - ('12a8cfa23ea764fd876232b4e842bc44', '316fb68edba736c53e78477bf913725c', - 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', - 'ecb-tbl-128: I=13'), - ('bcaf32415e8308b3723e5fdd853ccc80', '6936f2b93af8397fd3a771fc011c8c37', - '04050607090a0b0c0e0f101113141516', - 'ecb-tbl-128: I=14'), - ('89afae685d801ad747ace91fc49adde0', 'f3f92f7a9c59179c1fcc2c2ba0b082cd', - '2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-128: I=15'), - ('f521d07b484357c4a69e76124a634216', '6a95ea659ee3889158e7a9152ff04ebc', - '40414243454647484a4b4c4d4f505152', - 'ecb-tbl-128: I=16'), - ('3e23b3bc065bcc152407e23896d77783', '1959338344e945670678a5d432c90b93', - '54555657595a5b5c5e5f606163646566', - 'ecb-tbl-128: I=17'), - ('79f0fba002be1744670e7e99290d8f52', 'e49bddd2369b83ee66e6c75a1161b394', - '68696a6b6d6e6f70727374757778797a', - 'ecb-tbl-128: I=18'), - ('da23fe9d5bd63e1d72e3dafbe21a6c2a', 'd3388f19057ff704b70784164a74867d', - '7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-128: I=19'), - ('e3f5698ba90b6a022efd7db2c7e6c823', '23aa03e2d5e4cd24f3217e596480d1e1', - 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-128: I=20'), - ('bdc2691d4f1b73d2700679c3bcbf9c6e', 'c84113d68b666ab2a50a8bdb222e91b9', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', - 'ecb-tbl-128: I=21'), - ('ba74e02093217ee1ba1b42bd5624349a', 'ac02403981cd4340b507963db65cb7b6', - '08090a0b0d0e0f10121314151718191a', - 'ecb-tbl-128: I=22'), - ('b5c593b5851c57fbf8b3f57715e8f680', '8d1299236223359474011f6bf5088414', - '6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-128: I=23'), - ('3da9bd9cec072381788f9387c3bbf4ee', '5a1d6ab8605505f7977e55b9a54d9b90', - '80818283858687888a8b8c8d8f909192', - 'ecb-tbl-128: I=24'), - ('4197f3051121702ab65d316b3c637374', '72e9c2d519cf555e4208805aabe3b258', - '94959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-128: I=25'), - ('9f46c62ec4f6ee3f6e8c62554bc48ab7', 'a8f3e81c4a23a39ef4d745dffe026e80', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', - 'ecb-tbl-128: I=26'), - ('0220673fe9e699a4ebc8e0dbeb6979c8', '546f646449d31458f9eb4ef5483aee6c', - 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-128: I=27'), - ('b2b99171337ded9bc8c2c23ff6f18867', '4dbe4bc84ac797c0ee4efb7f1a07401c', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', - 'ecb-tbl-128: I=28'), - ('a7facf4e301e984e5efeefd645b23505', '25e10bfb411bbd4d625ac8795c8ca3b3', - 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-128: I=29'), - ('f7c762e4a9819160fd7acfb6c4eedcdd', '315637405054ec803614e43def177579', - 'f8f9fafbfdfefe00020304050708090a', - 'ecb-tbl-128: I=30'), - ('9b64fc21ea08709f4915436faa70f1be', '60c5bc8a1410247295c6386c59e572a8', - '0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-128: I=31'), - ('52af2c3de07ee6777f55a4abfc100b3f', '01366fc8ca52dfe055d6a00a76471ba6', - '20212223252627282a2b2c2d2f303132', - 'ecb-tbl-128: I=32'), - ('2fca001224386c57aa3f968cbe2c816f', 'ecc46595516ec612449c3f581e7d42ff', - '34353637393a3b3c3e3f404143444546', - 'ecb-tbl-128: I=33'), - ('4149c73658a4a9c564342755ee2c132f', '6b7ffe4c602a154b06ee9c7dab5331c9', - '48494a4b4d4e4f50525354555758595a', - 'ecb-tbl-128: I=34'), - ('af60005a00a1772f7c07a48a923c23d2', '7da234c14039a240dd02dd0fbf84eb67', - '5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-128: I=35'), - ('6fccbc28363759914b6f0280afaf20c6', 'c7dc217d9e3604ffe7e91f080ecd5a3a', - '70717273757677787a7b7c7d7f808182', - 'ecb-tbl-128: I=36'), - ('7d82a43ddf4fefa2fc5947499884d386', '37785901863f5c81260ea41e7580cda5', - '84858687898a8b8c8e8f909193949596', - 'ecb-tbl-128: I=37'), - ('5d5a990eaab9093afe4ce254dfa49ef9', 'a07b9338e92ed105e6ad720fccce9fe4', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', - 'ecb-tbl-128: I=38'), - ('4cd1e2fd3f4434b553aae453f0ed1a02', 'ae0fb9722418cc21a7da816bbc61322c', - 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-128: I=39'), - ('5a2c9a9641d4299125fa1b9363104b5e', 'c826a193080ff91ffb21f71d3373c877', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', - 'ecb-tbl-128: I=40'), - ('b517fe34c0fa217d341740bfd4fe8dd4', '1181b11b0e494e8d8b0aa6b1d5ac2c48', - 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-128: I=41'), - ('014baf2278a69d331d5180103643e99a', '6743c3d1519ab4f2cd9a78ab09a511bd', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', - 'ecb-tbl-128: I=42'), - ('b529bd8164f20d0aa443d4932116841c', 'dc55c076d52bacdf2eefd952946a439d', - 'fcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-128: I=43'), - ('2e596dcbb2f33d4216a1176d5bd1e456', '711b17b590ffc72b5c8e342b601e8003', - '10111213151617181a1b1c1d1f202122', - 'ecb-tbl-128: I=44'), - ('7274a1ea2b7ee2424e9a0e4673689143', '19983bb0950783a537e1339f4aa21c75', - '24252627292a2b2c2e2f303133343536', - 'ecb-tbl-128: I=45'), - ('ae20020bd4f13e9d90140bee3b5d26af', '3ba7762e15554169c0f4fa39164c410c', - '38393a3b3d3e3f40424344454748494a', - 'ecb-tbl-128: I=46'), - ('baac065da7ac26e855e79c8849d75a02', 'a0564c41245afca7af8aa2e0e588ea89', - '4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-128: I=47'), - ('7c917d8d1d45fab9e2540e28832540cc', '5e36a42a2e099f54ae85ecd92e2381ed', - '60616263656667686a6b6c6d6f707172', - 'ecb-tbl-128: I=48'), - ('bde6f89e16daadb0e847a2a614566a91', '770036f878cd0f6ca2268172f106f2fe', - '74757677797a7b7c7e7f808183848586', - 'ecb-tbl-128: I=49'), - ('c9de163725f1f5be44ebb1db51d07fbc', '7e4e03908b716116443ccf7c94e7c259', - '88898a8b8d8e8f90929394959798999a', - 'ecb-tbl-128: I=50'), - ('3af57a58f0c07dffa669572b521e2b92', '482735a48c30613a242dd494c7f9185d', - '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-128: I=51'), - ('3d5ebac306dde4604f1b4fbbbfcdae55', 'b4c0f6c9d4d7079addf9369fc081061d', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', - 'ecb-tbl-128: I=52'), - ('c2dfa91bceb76a1183c995020ac0b556', 'd5810fe0509ac53edcd74f89962e6270', - 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-128: I=53'), - ('c70f54305885e9a0746d01ec56c8596b', '03f17a16b3f91848269ecdd38ebb2165', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', - 'ecb-tbl-128: I=54'), - ('c4f81b610e98012ce000182050c0c2b2', 'da1248c3180348bad4a93b4d9856c9df', - 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-128: I=55'), - ('eaab86b1d02a95d7404eff67489f97d4', '3d10d7b63f3452c06cdf6cce18be0c2c', - '00010203050607080a0b0c0d0f101112', - 'ecb-tbl-128: I=56'), - ('7c55bdb40b88870b52bec3738de82886', '4ab823e7477dfddc0e6789018fcb6258', - '14151617191a1b1c1e1f202123242526', - 'ecb-tbl-128: I=57'), - ('ba6eaa88371ff0a3bd875e3f2a975ce0', 'e6478ba56a77e70cfdaa5c843abde30e', - '28292a2b2d2e2f30323334353738393a', - 'ecb-tbl-128: I=58'), - ('08059130c4c24bd30cf0575e4e0373dc', '1673064895fbeaf7f09c5429ff75772d', - '3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-128: I=59'), - ('9a8eab004ef53093dfcf96f57e7eda82', '4488033ae9f2efd0ca9383bfca1a94e9', - '50515253555657585a5b5c5d5f606162', - 'ecb-tbl-128: I=60'), - ('0745b589e2400c25f117b1d796c28129', '978f3b8c8f9d6f46626cac3c0bcb9217', - '64656667696a6b6c6e6f707173747576', - 'ecb-tbl-128: I=61'), - ('2f1777781216cec3f044f134b1b92bbe', 'e08c8a7e582e15e5527f1d9e2eecb236', - '78797a7b7d7e7f80828384858788898a', - 'ecb-tbl-128: I=62'), - ('353a779ffc541b3a3805d90ce17580fc', 'cec155b76ac5ffda4cf4f9ca91e49a7a', - '8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-128: I=63'), - ('1a1eae4415cefcf08c4ac1c8f68bea8f', 'd5ac7165763225dd2a38cdc6862c29ad', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-128: I=64'), - ('e6e7e4e5b0b3b2b5d4d5aaab16111013', '03680fe19f7ce7275452020be70e8204', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-128: I=65'), - ('f8f9fafbfbf8f9e677767170efe0e1e2', '461df740c9781c388e94bb861ceb54f6', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', - 'ecb-tbl-128: I=66'), - ('63626160a1a2a3a445444b4a75727370', '451bd60367f96483042742219786a074', - 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-128: I=67'), - ('717073720605040b2d2c2b2a05fafbf9', 'e4dfa42671a02e57ef173b85c0ea9f2b', - 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', - 'ecb-tbl-128: I=68'), - ('78797a7beae9e8ef3736292891969794', 'ed11b89e76274282227d854700a78b9e', - '04050607090a0b0c0e0f101113141516', - 'ecb-tbl-128: I=69'), - ('838281803231300fdddcdbdaa0afaead', '433946eaa51ea47af33895f2b90b3b75', - '18191a1b1d1e1f20222324252728292a', - 'ecb-tbl-128: I=70'), - ('18191a1bbfbcbdba75747b7a7f78797a', '6bc6d616a5d7d0284a5910ab35022528', - '2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-128: I=71'), - ('848586879b989996a3a2a5a4849b9a99', 'd2a920ecfe919d354b5f49eae9719c98', - '40414243454647484a4b4c4d4f505152', - 'ecb-tbl-128: I=72'), - ('0001020322212027cacbf4f551565754', '3a061b17f6a92885efbd0676985b373d', - '54555657595a5b5c5e5f606163646566', - 'ecb-tbl-128: I=73'), - ('cecfcccdafacadb2515057564a454447', 'fadeec16e33ea2f4688499d157e20d8f', - '68696a6b6d6e6f70727374757778797a', - 'ecb-tbl-128: I=74'), - ('92939091cdcecfc813121d1c80878685', '5cdefede59601aa3c3cda36fa6b1fa13', - '7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-128: I=75'), - ('d2d3d0d16f6c6d6259585f5ed1eeefec', '9574b00039844d92ebba7ee8719265f8', - '90919293959697989a9b9c9d9fa0a1a2', - 'ecb-tbl-128: I=76'), - ('acadaeaf878485820f0e1110d5d2d3d0', '9a9cf33758671787e5006928188643fa', - 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-128: I=77'), - ('9091929364676619e6e7e0e1757a7b78', '2cddd634c846ba66bb46cbfea4a674f9', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', - 'ecb-tbl-128: I=78'), - ('babbb8b98a89888f74757a7b92959497', 'd28bae029393c3e7e26e9fafbbb4b98f', - 'cccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-128: I=79'), - ('8d8c8f8e6e6d6c633b3a3d3ccad5d4d7', 'ec27529b1bee0a9ab6a0d73ebc82e9b7', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', - 'ecb-tbl-128: I=80'), - ('86878485010203040808f7f767606162', '3cb25c09472aff6ee7e2b47ccd7ccb17', - 'f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-128: I=81'), - ('8e8f8c8d656667788a8b8c8d010e0f0c', 'dee33103a7283370d725e44ca38f8fe5', - '08090a0b0d0e0f10121314151718191a', - 'ecb-tbl-128: I=82'), - ('c8c9cacb858687807a7b7475e7e0e1e2', '27f9bcd1aac64bffc11e7815702c1a69', - '1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-128: I=83'), - ('6d6c6f6e5053525d8c8d8a8badd2d3d0', '5df534ffad4ed0749a9988e9849d0021', - '30313233353637383a3b3c3d3f404142', - 'ecb-tbl-128: I=84'), - ('28292a2b393a3b3c0607181903040506', 'a48bee75db04fb60ca2b80f752a8421b', - '44454647494a4b4c4e4f505153545556', - 'ecb-tbl-128: I=85'), - ('a5a4a7a6b0b3b28ddbdadddcbdb2b3b0', '024c8cf70bc86ee5ce03678cb7af45f9', - '58595a5b5d5e5f60626364656768696a', - 'ecb-tbl-128: I=86'), - ('323330316467666130313e3f2c2b2a29', '3c19ac0f8a3a3862ce577831301e166b', - '6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-128: I=87'), - ('27262524080b0a05171611100b141516', 'c5e355b796a57421d59ca6be82e73bca', - '80818283858687888a8b8c8d8f909192', - 'ecb-tbl-128: I=88'), - ('040506074142434435340b0aa3a4a5a6', 'd94033276417abfb05a69d15b6e386e2', - '94959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-128: I=89'), - ('242526271112130c61606766bdb2b3b0', '24b36559ea3a9b9b958fe6da3e5b8d85', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', - 'ecb-tbl-128: I=90'), - ('4b4a4948252627209e9f9091cec9c8cb', '20fd4feaa0e8bf0cce7861d74ef4cb72', - 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-128: I=91'), - ('68696a6b6665646b9f9e9998d9e6e7e4', '350e20d5174277b9ec314c501570a11d', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', - 'ecb-tbl-128: I=92'), - ('34353637c5c6c7c0f0f1eeef7c7b7a79', '87a29d61b7c604d238fe73045a7efd57', - 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-128: I=93'), - ('32333031c2c1c13f0d0c0b0a050a0b08', '2c3164c1cc7d0064816bdc0faa362c52', - 'f8f9fafbfdfefe00020304050708090a', - 'ecb-tbl-128: I=94'), - ('cdcccfcebebdbcbbabaaa5a4181f1e1d', '195fe5e8a05a2ed594f6e4400eee10b3', - '0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-128: I=95'), - ('212023223635343ba0a1a6a7445b5a59', 'e4663df19b9a21a5a284c2bd7f905025', - '20212223252627282a2b2c2d2f303132', - 'ecb-tbl-128: I=96'), - ('0e0f0c0da8abaaad2f2e515002050407', '21b88714cfb4e2a933bd281a2c4743fd', - '34353637393a3b3c3e3f404143444546', - 'ecb-tbl-128: I=97'), - ('070605042a2928378e8f8889bdb2b3b0', 'cbfc3980d704fd0fc54378ab84e17870', - '48494a4b4d4e4f50525354555758595a', - 'ecb-tbl-128: I=98'), - ('cbcac9c893909196a9a8a7a6a5a2a3a0', 'bc5144baa48bdeb8b63e22e03da418ef', - '5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-128: I=99'), - ('80818283c1c2c3cc9c9d9a9b0cf3f2f1', '5a1dbaef1ee2984b8395da3bdffa3ccc', - '70717273757677787a7b7c7d7f808182', - 'ecb-tbl-128: I=100'), - ('1213101125262720fafbe4e5b1b6b7b4', 'f0b11cd0729dfcc80cec903d97159574', - '84858687898a8b8c8e8f909193949596', - 'ecb-tbl-128: I=101'), - ('7f7e7d7c3033320d97969190222d2c2f', '9f95314acfddc6d1914b7f19a9cc8209', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', - 'ecb-tbl-128: I=102'), - ('4e4f4c4d484b4a4d81808f8e53545556', '595736f6f0f70914a94e9e007f022519', - 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-128: I=103'), - ('dcdddedfb0b3b2bd15141312a1bebfbc', '1f19f57892cae586fcdfb4c694deb183', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', - 'ecb-tbl-128: I=104'), - ('93929190282b2a2dc4c5fafb92959497', '540700ee1f6f3dab0b3eddf6caee1ef5', - 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-128: I=105'), - ('f5f4f7f6c4c7c6d9373631307e717073', '14a342a91019a331687a2254e6626ca2', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', - 'ecb-tbl-128: I=106'), - ('93929190b6b5b4b364656a6b05020300', '7b25f3c3b2eea18d743ef283140f29ff', - 'fcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-128: I=107'), - ('babbb8b90d0e0f00a4a5a2a3043b3a39', '46c2587d66e5e6fa7f7ca6411ad28047', - '10111213151617181a1b1c1d1f202122', - 'ecb-tbl-128: I=108'), - ('d8d9dadb7f7c7d7a10110e0f787f7e7d', '09470e72229d954ed5ee73886dfeeba9', - '24252627292a2b2c2e2f303133343536', - 'ecb-tbl-128: I=109'), - ('fefffcfdefeced923b3a3d3c6768696a', 'd77c03de92d4d0d79ef8d4824ef365eb', - '38393a3b3d3e3f40424344454748494a', - 'ecb-tbl-128: I=110'), - ('d6d7d4d58a89888f96979899a5a2a3a0', '1d190219f290e0f1715d152d41a23593', - '4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-128: I=111'), - ('18191a1ba8abaaa5303136379b848586', 'a2cd332ce3a0818769616292e87f757b', - '60616263656667686a6b6c6d6f707172', - 'ecb-tbl-128: I=112'), - ('6b6a6968a4a7a6a1d6d72829b0b7b6b5', 'd54afa6ce60fbf9341a3690e21385102', - '74757677797a7b7c7e7f808183848586', - 'ecb-tbl-128: I=113'), - ('000102038a89889755545352a6a9a8ab', '06e5c364ded628a3f5e05e613e356f46', - '88898a8b8d8e8f90929394959798999a', - 'ecb-tbl-128: I=114'), - ('2d2c2f2eb3b0b1b6b6b7b8b9f2f5f4f7', 'eae63c0e62556dac85d221099896355a', - '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-128: I=115'), - ('979695943536373856575051e09f9e9d', '1fed060e2c6fc93ee764403a889985a2', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', - 'ecb-tbl-128: I=116'), - ('a4a5a6a7989b9a9db1b0afae7a7d7c7f', 'c25235c1a30fdec1c7cb5c5737b2a588', - 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-128: I=117'), - ('c1c0c3c2686b6a55a8a9aeafeae5e4e7', '796dbef95147d4d30873ad8b7b92efc0', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', - 'ecb-tbl-128: I=118'), - ('c1c0c3c2141716118c8d828364636261', 'cbcf0fb34d98d0bd5c22ce37211a46bf', - 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-128: I=119'), - ('93929190cccfcec196979091e0fffefd', '94b44da6466126cafa7c7fd09063fc24', - '00010203050607080a0b0c0d0f101112', - 'ecb-tbl-128: I=120'), - ('b4b5b6b7f9fafbfc25241b1a6e69686b', 'd78c5b5ebf9b4dbda6ae506c5074c8fe', - '14151617191a1b1c1e1f202123242526', - 'ecb-tbl-128: I=121'), - ('868784850704051ac7c6c1c08788898a', '6c27444c27204b043812cf8cf95f9769', - '28292a2b2d2e2f30323334353738393a', - 'ecb-tbl-128: I=122'), - ('f4f5f6f7aaa9a8affdfcf3f277707172', 'be94524ee5a2aa50bba8b75f4c0aebcf', - '3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-128: I=123'), - ('d3d2d1d00605040bc3c2c5c43e010003', 'a0aeaae91ba9f31f51aeb3588cf3a39e', - '50515253555657585a5b5c5d5f606162', - 'ecb-tbl-128: I=124'), - ('73727170424140476a6b74750d0a0b08', '275297779c28266ef9fe4c6a13c08488', - '64656667696a6b6c6e6f707173747576', - 'ecb-tbl-128: I=125'), - ('c2c3c0c10a0908f754555253a1aeafac', '86523d92bb8672cb01cf4a77fd725882', - '78797a7b7d7e7f80828384858788898a', - 'ecb-tbl-128: I=126'), - ('6d6c6f6ef8fbfafd82838c8df8fffefd', '4b8327640e9f33322a04dd96fcbf9a36', - '8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-128: I=127'), - ('f5f4f7f684878689a6a7a0a1d2cdcccf', 'ce52af650d088ca559425223f4d32694', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-128: I=128'), - - # ecb_tbl.txt, KEYSIZE=192 - ('2d33eef2c0430a8a9ebf45e809c40bb6', 'dff4945e0336df4c1c56bc700eff837f', - '00010203050607080a0b0c0d0f10111214151617191a1b1c', - 'ecb-tbl-192: I=1'), - ('6aa375d1fa155a61fb72353e0a5a8756', 'b6fddef4752765e347d5d2dc196d1252', - '1e1f20212324252628292a2b2d2e2f30323334353738393a', - 'ecb-tbl-192: I=2'), - ('bc3736518b9490dcb8ed60eb26758ed4', 'd23684e3d963b3afcf1a114aca90cbd6', - '3c3d3e3f41424344464748494b4c4d4e5051525355565758', - 'ecb-tbl-192: I=3'), - ('aa214402b46cffb9f761ec11263a311e', '3a7ac027753e2a18c2ceab9e17c11fd0', - '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-192: I=4'), - ('02aea86e572eeab66b2c3af5e9a46fd6', '8f6786bd007528ba26603c1601cdd0d8', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', - 'ecb-tbl-192: I=5'), - ('e2aef6acc33b965c4fa1f91c75ff6f36', 'd17d073b01e71502e28b47ab551168b3', - '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-192: I=6'), - ('0659df46427162b9434865dd9499f91d', 'a469da517119fab95876f41d06d40ffa', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', - 'ecb-tbl-192: I=7'), - ('49a44239c748feb456f59c276a5658df', '6091aa3b695c11f5c0b6ad26d3d862ff', - 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-192: I=8'), - ('66208f6e9d04525bdedb2733b6a6be37', '70f9e67f9f8df1294131662dc6e69364', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', - 'ecb-tbl-192: I=9'), - ('3393f8dfc729c97f5480b950bc9666b0', 'd154dcafad8b207fa5cbc95e9996b559', - '0e0f10111314151618191a1b1d1e1f20222324252728292a', - 'ecb-tbl-192: I=10'), - ('606834c8ce063f3234cf1145325dbd71', '4934d541e8b46fa339c805a7aeb9e5da', - '2c2d2e2f31323334363738393b3c3d3e4041424345464748', - 'ecb-tbl-192: I=11'), - ('fec1c04f529bbd17d8cecfcc4718b17f', '62564c738f3efe186e1a127a0c4d3c61', - '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-192: I=12'), - ('32df99b431ed5dc5acf8caf6dc6ce475', '07805aa043986eb23693e23bef8f3438', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', - 'ecb-tbl-192: I=13'), - ('7fdc2b746f3f665296943b83710d1f82', 'df0b4931038bade848dee3b4b85aa44b', - '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', - 'ecb-tbl-192: I=14'), - ('8fba1510a3c5b87e2eaa3f7a91455ca2', '592d5fded76582e4143c65099309477c', - 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6b8b9babbbdbebfc0', - 'ecb-tbl-192: I=15'), - ('2c9b468b1c2eed92578d41b0716b223b', 'c9b8d6545580d3dfbcdd09b954ed4e92', - 'c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-192: I=16'), - ('0a2bbf0efc6bc0034f8a03433fca1b1a', '5dccd5d6eb7c1b42acb008201df707a0', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfc', - 'ecb-tbl-192: I=17'), - ('25260e1f31f4104d387222e70632504b', 'a2a91682ffeb6ed1d34340946829e6f9', - 'fefe01010304050608090a0b0d0e0f10121314151718191a', - 'ecb-tbl-192: I=18'), - ('c527d25a49f08a5228d338642ae65137', 'e45d185b797000348d9267960a68435d', - '1c1d1e1f21222324262728292b2c2d2e3031323335363738', - 'ecb-tbl-192: I=19'), - ('3b49fc081432f5890d0e3d87e884a69e', '45e060dae5901cda8089e10d4f4c246b', - '3a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-192: I=20'), - ('d173f9ed1e57597e166931df2754a083', 'f6951afacc0079a369c71fdcff45df50', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374', - 'ecb-tbl-192: I=21'), - ('8c2b7cafa5afe7f13562daeae1adede0', '9e95e00f351d5b3ac3d0e22e626ddad6', - '767778797b7c7d7e80818283858687888a8b8c8d8f909192', - 'ecb-tbl-192: I=22'), - ('aaf4ec8c1a815aeb826cab741339532c', '9cb566ff26d92dad083b51fdc18c173c', - '94959697999a9b9c9e9fa0a1a3a4a5a6a8a9aaabadaeafb0', - 'ecb-tbl-192: I=23'), - ('40be8c5d9108e663f38f1a2395279ecf', 'c9c82766176a9b228eb9a974a010b4fb', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebec', - 'ecb-tbl-192: I=24'), - ('0c8ad9bc32d43e04716753aa4cfbe351', 'd8e26aa02945881d5137f1c1e1386e88', - '2a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-192: I=25'), - ('1407b1d5f87d63357c8dc7ebbaebbfee', 'c0e024ccd68ff5ffa4d139c355a77c55', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364', - 'ecb-tbl-192: I=26'), - ('e62734d1ae3378c4549e939e6f123416', '0b18b3d16f491619da338640df391d43', - '84858687898a8b8c8e8f90919394959698999a9b9d9e9fa0', - 'ecb-tbl-192: I=27'), - ('5a752cff2a176db1a1de77f2d2cdee41', 'dbe09ac8f66027bf20cb6e434f252efc', - 'a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-192: I=28'), - ('a9c8c3a4eabedc80c64730ddd018cd88', '6d04e5e43c5b9cbe05feb9606b6480fe', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdc', - 'ecb-tbl-192: I=29'), - ('ee9b3dbbdb86180072130834d305999a', 'dd1d6553b96be526d9fee0fbd7176866', - '1a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-192: I=30'), - ('a7fa8c3586b8ebde7568ead6f634a879', '0260ca7e3f979fd015b0dd4690e16d2a', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354', - 'ecb-tbl-192: I=31'), - ('37e0f4a87f127d45ac936fe7ad88c10a', '9893734de10edcc8a67c3b110b8b8cc6', - '929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-192: I=32'), - ('3f77d8b5d92bac148e4e46f697a535c5', '93b30b750516b2d18808d710c2ee84ef', - '464748494b4c4d4e50515253555657585a5b5c5d5f606162', - 'ecb-tbl-192: I=33'), - ('d25ebb686c40f7e2c4da1014936571ca', '16f65fa47be3cb5e6dfe7c6c37016c0e', - '828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-192: I=34'), - ('4f1c769d1e5b0552c7eca84dea26a549', 'f3847210d5391e2360608e5acb560581', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbc', - 'ecb-tbl-192: I=35'), - ('8548e2f882d7584d0fafc54372b6633a', '8754462cd223366d0753913e6af2643d', - 'bebfc0c1c3c4c5c6c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', - 'ecb-tbl-192: I=36'), - ('87d7a336cb476f177cd2a51af2a62cdf', '1ea20617468d1b806a1fd58145462017', - 'dcdddedfe1e2e3e4e6e7e8e9ebecedeef0f1f2f3f5f6f7f8', - 'ecb-tbl-192: I=37'), - ('03b1feac668c4e485c1065dfc22b44ee', '3b155d927355d737c6be9dda60136e2e', - 'fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-192: I=38'), - ('bda15e66819fa72d653a6866aa287962', '26144f7b66daa91b6333dbd3850502b3', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334', - 'ecb-tbl-192: I=39'), - ('4d0c7a0d2505b80bf8b62ceb12467f0a', 'e4f9a4ab52ced8134c649bf319ebcc90', - '363738393b3c3d3e40414243454647484a4b4c4d4f505152', - 'ecb-tbl-192: I=40'), - ('626d34c9429b37211330986466b94e5f', 'b9ddd29ac6128a6cab121e34a4c62b36', - '54555657595a5b5c5e5f60616364656668696a6b6d6e6f70', - 'ecb-tbl-192: I=41'), - ('333c3e6bf00656b088a17e5ff0e7f60a', '6fcddad898f2ce4eff51294f5eaaf5c9', - '727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-192: I=42'), - ('687ed0cdc0d2a2bc8c466d05ef9d2891', 'c9a6fe2bf4028080bea6f7fc417bd7e3', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabac', - 'ecb-tbl-192: I=43'), - ('487830e78cc56c1693e64b2a6660c7b6', '6a2026846d8609d60f298a9c0673127f', - 'aeafb0b1b3b4b5b6b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', - 'ecb-tbl-192: I=44'), - ('7a48d6b7b52b29392aa2072a32b66160', '2cb25c005e26efea44336c4c97a4240b', - 'cccdcecfd1d2d3d4d6d7d8d9dbdcdddee0e1e2e3e5e6e7e8', - 'ecb-tbl-192: I=45'), - ('907320e64c8c5314d10f8d7a11c8618d', '496967ab8680ddd73d09a0e4c7dcc8aa', - 'eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-192: I=46'), - ('b561f2ca2d6e65a4a98341f3ed9ff533', 'd5af94de93487d1f3a8c577cb84a66a4', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324', - 'ecb-tbl-192: I=47'), - ('df769380d212792d026f049e2e3e48ef', '84bdac569cae2828705f267cc8376e90', - '262728292b2c2d2e30313233353637383a3b3c3d3f404142', - 'ecb-tbl-192: I=48'), - ('79f374bc445bdabf8fccb8843d6054c6', 'f7401dda5ad5ab712b7eb5d10c6f99b6', - '44454647494a4b4c4e4f50515354555658595a5b5d5e5f60', - 'ecb-tbl-192: I=49'), - ('4e02f1242fa56b05c68dbae8fe44c9d6', '1c9d54318539ebd4c3b5b7e37bf119f0', - '626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-192: I=50'), - ('cf73c93cbff57ac635a6f4ad2a4a1545', 'aca572d65fb2764cffd4a6eca090ea0d', - '80818283858687888a8b8c8d8f90919294959697999a9b9c', - 'ecb-tbl-192: I=51'), - ('9923548e2875750725b886566784c625', '36d9c627b8c2a886a10ccb36eae3dfbb', - '9e9fa0a1a3a4a5a6a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', - 'ecb-tbl-192: I=52'), - ('4888336b723a022c9545320f836a4207', '010edbf5981e143a81d646e597a4a568', - 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdced0d1d2d3d5d6d7d8', - 'ecb-tbl-192: I=53'), - ('f84d9a5561b0608b1160dee000c41ba8', '8db44d538dc20cc2f40f3067fd298e60', - 'dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-192: I=54'), - ('c23192a0418e30a19b45ae3e3625bf22', '930eb53bc71e6ac4b82972bdcd5aafb3', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314', - 'ecb-tbl-192: I=55'), - ('b84e0690b28b0025381ad82a15e501a7', '6c42a81edcbc9517ccd89c30c95597b4', - '161718191b1c1d1e20212223252627282a2b2c2d2f303132', - 'ecb-tbl-192: I=56'), - ('acef5e5c108876c4f06269f865b8f0b0', 'da389847ad06df19d76ee119c71e1dd3', - '34353637393a3b3c3e3f40414344454648494a4b4d4e4f50', - 'ecb-tbl-192: I=57'), - ('0f1b3603e0f5ddea4548246153a5e064', 'e018fdae13d3118f9a5d1a647a3f0462', - '525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-192: I=58'), - ('fbb63893450d42b58c6d88cd3c1809e3', '2aa65db36264239d3846180fabdfad20', - '70717273757677787a7b7c7d7f80818284858687898a8b8c', - 'ecb-tbl-192: I=59'), - ('4bef736df150259dae0c91354e8a5f92', '1472163e9a4f780f1ceb44b07ecf4fdb', - '8e8f90919394959698999a9b9d9e9fa0a2a3a4a5a7a8a9aa', - 'ecb-tbl-192: I=60'), - ('7d2d46242056ef13d3c3fc93c128f4c7', 'c8273fdc8f3a9f72e91097614b62397c', - 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8', - 'ecb-tbl-192: I=61'), - ('e9c1ba2df415657a256edb33934680fd', '66c8427dcd733aaf7b3470cb7d976e3f', - 'cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-192: I=62'), - ('e23ee277b0aa0a1dfb81f7527c3514f1', '146131cb17f1424d4f8da91e6f80c1d0', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304', - 'ecb-tbl-192: I=63'), - ('3e7445b0b63caaf75e4a911e12106b4c', '2610d0ad83659081ae085266a88770dc', - '060708090b0c0d0e10111213151617181a1b1c1d1f202122', - 'ecb-tbl-192: I=64'), - ('767774752023222544455a5be6e1e0e3', '38a2b5a974b0575c5d733917fb0d4570', - '24252627292a2b2c2e2f30313334353638393a3b3d3e3f40', - 'ecb-tbl-192: I=65'), - ('72737475717e7f7ce9e8ebea696a6b6c', 'e21d401ebc60de20d6c486e4f39a588b', - '424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-192: I=66'), - ('dfdedddc25262728c9c8cfcef1eeefec', 'e51d5f88c670b079c0ca1f0c2c4405a2', - '60616263656667686a6b6c6d6f70717274757677797a7b7c', - 'ecb-tbl-192: I=67'), - ('fffe0100707776755f5e5d5c7675746b', '246a94788a642fb3d1b823c8762380c8', - '7e7f80818384858688898a8b8d8e8f90929394959798999a', - 'ecb-tbl-192: I=68'), - ('e0e1e2e3424140479f9e9190292e2f2c', 'b80c391c5c41a4c3b30c68e0e3d7550f', - '9c9d9e9fa1a2a3a4a6a7a8a9abacadaeb0b1b2b3b5b6b7b8', - 'ecb-tbl-192: I=69'), - ('2120272690efeeed3b3a39384e4d4c4b', 'b77c4754fc64eb9a1154a9af0bb1f21c', - 'babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-192: I=70'), - ('ecedeeef5350516ea1a0a7a6a3acadae', 'fb554de520d159a06bf219fc7f34a02f', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4', - 'ecb-tbl-192: I=71'), - ('32333c3d25222320e9e8ebeacecdccc3', 'a89fba152d76b4927beed160ddb76c57', - 'f6f7f8f9fbfcfdfe00010203050607080a0b0c0d0f101112', - 'ecb-tbl-192: I=72'), - ('40414243626160678a8bb4b511161714', '5676eab4a98d2e8473b3f3d46424247c', - '14151617191a1b1c1e1f20212324252628292a2b2d2e2f30', - 'ecb-tbl-192: I=73'), - ('94959293f5fafbf81f1e1d1c7c7f7e79', '4e8f068bd7ede52a639036ec86c33568', - '323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-192: I=74'), - ('bebfbcbd191a1b14cfcec9c8546b6a69', 'f0193c4d7aff1791ee4c07eb4a1824fc', - '50515253555657585a5b5c5d5f60616264656667696a6b6c', - 'ecb-tbl-192: I=75'), - ('2c2d3233898e8f8cbbbab9b8333031ce', 'ac8686eeca9ba761afe82d67b928c33f', - '6e6f70717374757678797a7b7d7e7f80828384858788898a', - 'ecb-tbl-192: I=76'), - ('84858687bfbcbdba37363938fdfafbf8', '5faf8573e33b145b6a369cd3606ab2c9', - '8c8d8e8f91929394969798999b9c9d9ea0a1a2a3a5a6a7a8', - 'ecb-tbl-192: I=77'), - ('828384857669686b909192930b08090e', '31587e9944ab1c16b844ecad0df2e7da', - 'aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-192: I=78'), - ('bebfbcbd9695948b707176779e919093', 'd017fecd91148aba37f6f3068aa67d8a', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4', - 'ecb-tbl-192: I=79'), - ('8b8a85846067666521202322d0d3d2dd', '788ef2f021a73cba2794b616078a8500', - 'e6e7e8e9ebecedeef0f1f2f3f5f6f7f8fafbfcfdfe010002', - 'ecb-tbl-192: I=80'), - ('76777475f1f2f3f4f8f9e6e777707172', '5d1ef20dced6bcbc12131ac7c54788aa', - '04050607090a0b0c0e0f10111314151618191a1b1d1e1f20', - 'ecb-tbl-192: I=81'), - ('a4a5a2a34f404142b4b5b6b727242522', 'b3c8cf961faf9ea05fdde6d1e4d8f663', - '222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-192: I=82'), - ('94959697e1e2e3ec16171011839c9d9e', '143075c70605861c7fac6526199e459f', - '40414243454647484a4b4c4d4f50515254555657595a5b5c', - 'ecb-tbl-192: I=83'), - ('03023d3c06010003dedfdcddfffcfde2', 'a5ae12eade9a87268d898bfc8fc0252a', - '5e5f60616364656668696a6b6d6e6f70727374757778797a', - 'ecb-tbl-192: I=84'), - ('10111213f1f2f3f4cecfc0c1dbdcddde', '0924f7cf2e877a4819f5244a360dcea9', - '7c7d7e7f81828384868788898b8c8d8e9091929395969798', - 'ecb-tbl-192: I=85'), - ('67666160724d4c4f1d1c1f1e73707176', '3d9e9635afcc3e291cc7ab3f27d1c99a', - '9a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-192: I=86'), - ('e6e7e4e5a8abaad584858283909f9e9d', '9d80feebf87510e2b8fb98bb54fd788c', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4', - 'ecb-tbl-192: I=87'), - ('71707f7e565150537d7c7f7e6162636c', '5f9d1a082a1a37985f174002eca01309', - 'd6d7d8d9dbdcdddee0e1e2e3e5e6e7e8eaebecedeff0f1f2', - 'ecb-tbl-192: I=88'), - ('64656667212223245555aaaa03040506', 'a390ebb1d1403930184a44b4876646e4', - 'f4f5f6f7f9fafbfcfefe01010304050608090a0b0d0e0f10', - 'ecb-tbl-192: I=89'), - ('9e9f9899aba4a5a6cfcecdcc2b28292e', '700fe918981c3195bb6c4bcb46b74e29', - '121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-192: I=90'), - ('c7c6c5c4d1d2d3dc626364653a454447', '907984406f7bf2d17fb1eb15b673d747', - '30313233353637383a3b3c3d3f40414244454647494a4b4c', - 'ecb-tbl-192: I=91'), - ('f6f7e8e9e0e7e6e51d1c1f1e5b585966', 'c32a956dcfc875c2ac7c7cc8b8cc26e1', - '4e4f50515354555658595a5b5d5e5f60626364656768696a', - 'ecb-tbl-192: I=92'), - ('bcbdbebf5d5e5f5868696667f4f3f2f1', '02646e2ebfa9b820cf8424e9b9b6eb51', - '6c6d6e6f71727374767778797b7c7d7e8081828385868788', - 'ecb-tbl-192: I=93'), - ('40414647b0afaead9b9a99989b98999e', '621fda3a5bbd54c6d3c685816bd4ead8', - '8a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-192: I=94'), - ('69686b6a0201001f0f0e0908b4bbbab9', 'd4e216040426dfaf18b152469bc5ac2f', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4', - 'ecb-tbl-192: I=95'), - ('c7c6c9c8d8dfdedd5a5b5859bebdbcb3', '9d0635b9d33b6cdbd71f5d246ea17cc8', - 'c6c7c8c9cbcccdced0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', - 'ecb-tbl-192: I=96'), - ('dedfdcdd787b7a7dfffee1e0b2b5b4b7', '10abad1bd9bae5448808765583a2cc1a', - 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6f8f9fafbfdfefe00', - 'ecb-tbl-192: I=97'), - ('4d4c4b4a606f6e6dd0d1d2d3fbf8f9fe', '6891889e16544e355ff65a793c39c9a8', - '020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-192: I=98'), - ('b7b6b5b4d7d4d5dae5e4e3e2e1fefffc', 'cc735582e68072c163cd9ddf46b91279', - '20212223252627282a2b2c2d2f30313234353637393a3b3c', - 'ecb-tbl-192: I=99'), - ('cecfb0b1f7f0f1f2aeafacad3e3d3c23', 'c5c68b9aeeb7f878df578efa562f9574', - '3e3f40414344454648494a4b4d4e4f50525354555758595a', - 'ecb-tbl-192: I=100'), - ('cacbc8c9cdcecfc812131c1d494e4f4c', '5f4764395a667a47d73452955d0d2ce8', - '5c5d5e5f61626364666768696b6c6d6e7071727375767778', - 'ecb-tbl-192: I=101'), - ('9d9c9b9ad22d2c2fb1b0b3b20c0f0e09', '701448331f66106cefddf1eb8267c357', - '7a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-192: I=102'), - ('7a7b787964676659959493924f404142', 'cb3ee56d2e14b4e1941666f13379d657', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4', - 'ecb-tbl-192: I=103'), - ('aaaba4a5cec9c8cb1f1e1d1caba8a9a6', '9fe16efd18ab6e1981191851fedb0764', - 'b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', - 'ecb-tbl-192: I=104'), - ('93929190282b2a2dc4c5fafb92959497', '3dc9ba24e1b223589b147adceb4c8e48', - 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6e8e9eaebedeeeff0', - 'ecb-tbl-192: I=105'), - ('efeee9e8ded1d0d339383b3a888b8a8d', '1c333032682e7d4de5e5afc05c3e483c', - 'f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-192: I=106'), - ('7f7e7d7ca2a1a0af78797e7f112e2f2c', 'd593cc99a95afef7e92038e05a59d00a', - '10111213151617181a1b1c1d1f20212224252627292a2b2c', - 'ecb-tbl-192: I=107'), - ('84859a9b2b2c2d2e868784852625245b', '51e7f96f53b4353923452c222134e1ec', - '2e2f30313334353638393a3b3d3e3f40424344454748494a', - 'ecb-tbl-192: I=108'), - ('b0b1b2b3070405026869666710171615', '4075b357a1a2b473400c3b25f32f81a4', - '4c4d4e4f51525354565758595b5c5d5e6061626365666768', - 'ecb-tbl-192: I=109'), - ('acadaaabbda2a3a00d0c0f0e595a5b5c', '302e341a3ebcd74f0d55f61714570284', - '6a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-192: I=110'), - ('121310115655544b5253545569666764', '57abdd8231280da01c5042b78cf76522', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4', - 'ecb-tbl-192: I=111'), - ('dedfd0d166616063eaebe8e94142434c', '17f9ea7eea17ac1adf0e190fef799e92', - 'a6a7a8a9abacadaeb0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', - 'ecb-tbl-192: I=112'), - ('dbdad9d81417161166677879e0e7e6e5', '2e1bdd563dd87ee5c338dd6d098d0a7a', - 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6d8d9dadbdddedfe0', - 'ecb-tbl-192: I=113'), - ('6a6b6c6de0efeeed2b2a2928c0c3c2c5', 'eb869996e6f8bfb2bfdd9e0c4504dbb2', - 'e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-192: I=114'), - ('b1b0b3b21714151a1a1b1c1d5649484b', 'c2e01549e9decf317468b3e018c61ba8', - '00010203050607080a0b0c0d0f10111214151617191a1b1c', - 'ecb-tbl-192: I=115'), - ('39380706a3a4a5a6c4c5c6c77271706f', '8da875d033c01dd463b244a1770f4a22', - '1e1f20212324252628292a2b2d2e2f30323334353738393a', - 'ecb-tbl-192: I=116'), - ('5c5d5e5f1013121539383736e2e5e4e7', '8ba0dcf3a186844f026d022f8839d696', - '3c3d3e3f41424344464748494b4c4d4e5051525355565758', - 'ecb-tbl-192: I=117'), - ('43424544ead5d4d72e2f2c2d64676661', 'e9691ff9a6cc6970e51670a0fd5b88c1', - '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-192: I=118'), - ('55545756989b9a65f8f9feff18171615', 'f2baec06faeed30f88ee63ba081a6e5b', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', - 'ecb-tbl-192: I=119'), - ('05040b0a525554573c3d3e3f4a494847', '9c39d4c459ae5753394d6094adc21e78', - '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-192: I=120'), - ('14151617595a5b5c8584fbfa8e89888b', '6345b532a11904502ea43ba99c6bd2b2', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', - 'ecb-tbl-192: I=121'), - ('7c7d7a7bfdf2f3f029282b2a51525354', '5ffae3061a95172e4070cedce1e428c8', - 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-192: I=122'), - ('38393a3b1e1d1c1341404746c23d3c3e', '0a4566be4cdf9adce5dec865b5ab34cd', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', - 'ecb-tbl-192: I=123'), - ('8d8c939240474645818083827c7f7e41', 'ca17fcce79b7404f2559b22928f126fb', - '0e0f10111314151618191a1b1d1e1f20222324252728292a', - 'ecb-tbl-192: I=124'), - ('3b3a39381a19181f32333c3d45424340', '97ca39b849ed73a6470a97c821d82f58', - '2c2d2e2f31323334363738393b3c3d3e4041424345464748', - 'ecb-tbl-192: I=125'), - ('f0f1f6f738272625828380817f7c7d7a', '8198cb06bc684c6d3e9b7989428dcf7a', - '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-192: I=126'), - ('89888b8a0407061966676061141b1a19', 'f53c464c705ee0f28d9a4c59374928bd', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', - 'ecb-tbl-192: I=127'), - ('d3d2dddcaaadacaf9c9d9e9fe8ebeae5', '9adb3d4cca559bb98c3e2ed73dbf1154', - '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', - 'ecb-tbl-192: I=128'), - - # ecb_tbl.txt, KEYSIZE=256 - ('834eadfccac7e1b30664b1aba44815ab', '1946dabf6a03a2a2c3d0b05080aed6fc', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=1'), - ('d9dc4dba3021b05d67c0518f72b62bf1', '5ed301d747d3cc715445ebdec62f2fb4', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=2'), - ('a291d86301a4a739f7392173aa3c604c', '6585c8f43d13a6beab6419fc5935b9d0', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=3'), - ('4264b2696498de4df79788a9f83e9390', '2a5b56a596680fcc0e05f5e0f151ecae', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=4'), - ('ee9932b3721804d5a83ef5949245b6f6', 'f5d6ff414fd2c6181494d20c37f2b8c4', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=5'), - ('e6248f55c5fdcbca9cbbb01c88a2ea77', '85399c01f59fffb5204f19f8482f00b8', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=6'), - ('b8358e41b9dff65fd461d55a99266247', '92097b4c88a041ddf98144bc8d22e8e7', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=7'), - ('f0e2d72260af58e21e015ab3a4c0d906', '89bd5b73b356ab412aef9f76cea2d65c', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=8'), - ('475b8b823ce8893db3c44a9f2a379ff7', '2536969093c55ff9454692f2fac2f530', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=9'), - ('688f5281945812862f5f3076cf80412f', '07fc76a872843f3f6e0081ee9396d637', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=10'), - ('08d1d2bc750af553365d35e75afaceaa', 'e38ba8ec2aa741358dcc93e8f141c491', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=11'), - ('8707121f47cc3efceca5f9a8474950a1', 'd028ee23e4a89075d0b03e868d7d3a42', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=12'), - ('e51aa0b135dba566939c3b6359a980c5', '8cd9423dfc459e547155c5d1d522e540', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=13'), - ('069a007fc76a459f98baf917fedf9521', '080e9517eb1677719acf728086040ae3', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=14'), - ('726165c1723fbcf6c026d7d00b091027', '7c1700211a3991fc0ecded0ab3e576b0', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=15'), - ('d7c544de91d55cfcde1f84ca382200ce', 'dabcbcc855839251db51e224fbe87435', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=16'), - ('fed3c9a161b9b5b2bd611b41dc9da357', '68d56fad0406947a4dd27a7448c10f1d', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=17'), - ('4f634cdc6551043409f30b635832cf82', 'da9a11479844d1ffee24bbf3719a9925', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=18'), - ('109ce98db0dfb36734d9f3394711b4e6', '5e4ba572f8d23e738da9b05ba24b8d81', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=19'), - ('4ea6dfaba2d8a02ffdffa89835987242', 'a115a2065d667e3f0b883837a6e903f8', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=20'), - ('5ae094f54af58e6e3cdbf976dac6d9ef', '3e9e90dc33eac2437d86ad30b137e66e', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=21'), - ('764d8e8e0f29926dbe5122e66354fdbe', '01ce82d8fbcdae824cb3c48e495c3692', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=22'), - ('3f0418f888cdf29a982bf6b75410d6a9', '0c9cff163ce936faaf083cfd3dea3117', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=23'), - ('e4a3e7cb12cdd56aa4a75197a9530220', '5131ba9bd48f2bba85560680df504b52', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=24'), - ('211677684aac1ec1a160f44c4ebf3f26', '9dc503bbf09823aec8a977a5ad26ccb2', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=25'), - ('d21e439ff749ac8f18d6d4b105e03895', '9a6db0c0862e506a9e397225884041d7', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=26'), - ('d9f6ff44646c4725bd4c0103ff5552a7', '430bf9570804185e1ab6365fc6a6860c', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=27'), - ('0b1256c2a00b976250cfc5b0c37ed382', '3525ebc02f4886e6a5a3762813e8ce8a', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=28'), - ('b056447ffc6dc4523a36cc2e972a3a79', '07fa265c763779cce224c7bad671027b', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=29'), - ('5e25ca78f0de55802524d38da3fe4456', 'e8b72b4e8be243438c9fff1f0e205872', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=30'), - ('a5bcf4728fa5eaad8567c0dc24675f83', '109d4f999a0e11ace1f05e6b22cbcb50', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=31'), - ('814e59f97ed84646b78b2ca022e9ca43', '45a5e8d4c3ed58403ff08d68a0cc4029', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=32'), - ('15478beec58f4775c7a7f5d4395514d7', '196865964db3d417b6bd4d586bcb7634', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=33'), - ('253548ffca461c67c8cbc78cd59f4756', '60436ad45ac7d30d99195f815d98d2ae', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=34'), - ('fd7ad8d73b9b0f8cc41600640f503d65', 'bb07a23f0b61014b197620c185e2cd75', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=35'), - ('06199de52c6cbf8af954cd65830bcd56', '5bc0b2850129c854423aff0751fe343b', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=36'), - ('f17c4ffe48e44c61bd891e257e725794', '7541a78f96738e6417d2a24bd2beca40', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=37'), - ('9a5b4a402a3e8a59be6bf5cd8154f029', 'b0a303054412882e464591f1546c5b9e', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=38'), - ('79bd40b91a7e07dc939d441782ae6b17', '778c06d8a355eeee214fcea14b4e0eef', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=39'), - ('d8ceaaf8976e5fbe1012d8c84f323799', '09614206d15cbace63227d06db6beebb', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=40'), - ('3316e2751e2e388b083da23dd6ac3fbe', '41b97fb20e427a9fdbbb358d9262255d', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=41'), - ('8b7cfbe37de7dca793521819242c5816', 'c1940f703d845f957652c2d64abd7adf', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=42'), - ('f23f033c0eebf8ec55752662fd58ce68', 'd2d44fcdae5332343366db297efcf21b', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=43'), - ('59eb34f6c8bdbacc5fc6ad73a59a1301', 'ea8196b79dbe167b6aa9896e287eed2b', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=44'), - ('dcde8b6bd5cf7cc22d9505e3ce81261a', 'd6b0b0c4ba6c7dbe5ed467a1e3f06c2d', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=45'), - ('e33cf7e524fed781e7042ff9f4b35dc7', 'ec51eb295250c22c2fb01816fb72bcae', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=46'), - ('27963c8facdf73062867d164df6d064c', 'aded6630a07ce9c7408a155d3bd0d36f', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=47'), - ('77b1ce386b551b995f2f2a1da994eef8', '697c9245b9937f32f5d1c82319f0363a', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=48'), - ('f083388b013679efcf0bb9b15d52ae5c', 'aad5ad50c6262aaec30541a1b7b5b19c', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-256: I=49'), - ('c5009e0dab55db0abdb636f2600290c8', '7d34b893855341ec625bd6875ac18c0d', - '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-256: I=50'), - ('7804881e26cd532d8514d3683f00f1b9', '7ef05105440f83862f5d780e88f02b41', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-256: I=51'), - ('46cddcd73d1eb53e675ca012870a92a3', 'c377c06403382061af2c9c93a8e70df6', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=52'), - ('a9fb44062bb07fe130a8e8299eacb1ab', '1dbdb3ffdc052dacc83318853abc6de5', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=53'), - ('2b6ff8d7a5cc3a28a22d5a6f221af26b', '69a6eab00432517d0bf483c91c0963c7', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=54'), - ('1a9527c29b8add4b0e3e656dbb2af8b4', '0797f41dc217c80446e1d514bd6ab197', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=55'), - ('7f99cf2c75244df015eb4b0c1050aeae', '9dfd76575902a637c01343c58e011a03', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=56'), - ('e84ff85b0d9454071909c1381646c4ed', 'acf4328ae78f34b9fa9b459747cc2658', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=57'), - ('89afd40f99521280d5399b12404f6db4', 'b0479aea12bac4fe2384cf98995150c6', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=58'), - ('a09ef32dbc5119a35ab7fa38656f0329', '9dd52789efe3ffb99f33b3da5030109a', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=59'), - ('61773457f068c376c7829b93e696e716', 'abbb755e4621ef8f1214c19f649fb9fd', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=60'), - ('a34f0cae726cce41dd498747d891b967', 'da27fb8174357bce2bed0e7354f380f9', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=61'), - ('856f59496c7388ee2d2b1a27b7697847', 'c59a0663f0993838f6e5856593bdc5ef', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=62'), - ('cb090c593ef7720bd95908fb93b49df4', 'ed60b264b5213e831607a99c0ce5e57e', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=63'), - ('a0ac75cd2f1923d460fc4d457ad95baf', 'e50548746846f3eb77b8c520640884ed', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=64'), - ('2a2b282974777689e8e9eeef525d5c5f', '28282cc7d21d6a2923641e52d188ef0c', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=65'), - ('909192939390919e0f0e09089788898a', '0dfa5b02abb18e5a815305216d6d4f8e', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=66'), - ('777675748d8e8f907170777649464744', '7359635c0eecefe31d673395fb46fb99', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=67'), - ('717073720605040b2d2c2b2a05fafbf9', '73c679f7d5aef2745c9737bb4c47fb36', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=68'), - ('64656667fefdfcc31b1a1d1ca5aaaba8', 'b192bd472a4d2eafb786e97458967626', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=69'), - ('dbdad9d86a696867b5b4b3b2c8d7d6d5', '0ec327f6c8a2b147598ca3fde61dc6a4', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=70'), - ('5c5d5e5fe3e0e1fe31303736333c3d3e', 'fc418eb3c41b859b38d4b6f646629729', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=71'), - ('545556574b48494673727574546b6a69', '30249e5ac282b1c981ea64b609f3a154', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=72'), - ('ecedeeefc6c5c4bb56575051f5fafbf8', '5e6e08646d12150776bb43c2d78a9703', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=73'), - ('464744452724252ac9c8cfced2cdcccf', 'faeb3d5de652cd3447dceb343f30394a', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=74'), - ('e6e7e4e54142435c878681801c131211', 'a8e88706823f6993ef80d05c1c7b2cf0', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=75'), - ('72737071cfcccdc2f9f8fffe710e0f0c', '8ced86677e6e00a1a1b15968f2d3cce6', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=76'), - ('505152537370714ec3c2c5c4010e0f0c', '9fc7c23858be03bdebb84e90db6786a9', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=77'), - ('a8a9aaab5c5f5e51aeafa8a93d222320', 'b4fbd65b33f70d8cf7f1111ac4649c36', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=78'), - ('dedfdcddf6f5f4eb10111617fef1f0f3', 'c5c32d5ed03c4b53cc8c1bd0ef0dbbf6', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=79'), - ('bdbcbfbe5e5d5c530b0a0d0cfac5c4c7', 'd1a7f03b773e5c212464b63709c6a891', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=80'), - ('8a8b8889050606f8f4f5f2f3636c6d6e', '6b7161d8745947ac6950438ea138d028', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-256: I=81'), - ('a6a7a4a54d4e4f40b2b3b4b539262724', 'fd47a9f7e366ee7a09bc508b00460661', - '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-256: I=82'), - ('9c9d9e9fe9eaebf40e0f08099b949596', '00d40b003dc3a0d9310b659b98c7e416', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-256: I=83'), - ('2d2c2f2e1013121dcccdcacbed121310', 'eea4c79dcc8e2bda691f20ac48be0717', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=84'), - ('f4f5f6f7edeeefd0eaebecedf7f8f9fa', 'e78f43b11c204403e5751f89d05a2509', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=85'), - ('3d3c3f3e282b2a2573727574150a0b08', 'd0f0e3d1f1244bb979931e38dd1786ef', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=86'), - ('b6b7b4b5f8fbfae5b4b5b2b3a0afaead', '042e639dc4e1e4dde7b75b749ea6f765', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=87'), - ('b7b6b5b4989b9a95878681809ba4a5a6', 'bc032fdd0efe29503a980a7d07ab46a8', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=88'), - ('a8a9aaabe5e6e798e9e8efee4748494a', '0c93ac949c0da6446effb86183b6c910', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=89'), - ('ecedeeefd9dadbd4b9b8bfbe657a7b78', 'e0d343e14da75c917b4a5cec4810d7c2', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=90'), - ('7f7e7d7c696a6b74cacbcccd929d9c9f', '0eafb821748408279b937b626792e619', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=91'), - ('08090a0b0605040bfffef9f8b9c6c7c4', 'fa1ac6e02d23b106a1fef18b274a553f', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=92'), - ('08090a0bf1f2f3ccfcfdfafb68676665', '0dadfe019cd12368075507df33c1a1e9', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=93'), - ('cacbc8c93a393837050403020d121310', '3a0879b414465d9ffbaf86b33a63a1b9', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=94'), - ('e9e8ebea8281809f8f8e8988343b3a39', '62199fadc76d0be1805d3ba0b7d914bf', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=95'), - ('515053524645444bd0d1d6d7340b0a09', '1b06d6c5d333e742730130cf78e719b4', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=96'), - ('42434041ecefee1193929594c6c9c8cb', 'f1f848824c32e9dcdcbf21580f069329', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=97'), - ('efeeedecc2c1c0cf76777071455a5b58', '1a09050cbd684f784d8e965e0782f28a', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=98'), - ('5f5e5d5c3f3c3d221d1c1b1a19161714', '79c2969e7ded2ba7d088f3f320692360', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=99'), - ('000102034142434c1c1d1a1b8d727371', '091a658a2f7444c16accb669450c7b63', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=100'), - ('8e8f8c8db1b2b38c56575051050a0b08', '97c1e3a72cca65fa977d5ed0e8a7bbfc', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=101'), - ('a7a6a5a4e8ebeae57f7e7978cad5d4d7', '70c430c6db9a17828937305a2df91a2a', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=102'), - ('8a8b888994979689454443429f909192', '629553457fbe2479098571c7c903fde8', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=103'), - ('8c8d8e8fe0e3e2ed45444342f1cecfcc', 'a25b25a61f612669e7d91265c7d476ba', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=104'), - ('fffefdfc4c4f4e31d8d9dedfb6b9b8bb', 'eb7e4e49b8ae0f024570dda293254fed', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=105'), - ('fdfcfffecccfcec12f2e29286679787b', '38fe15d61cca84516e924adce5014f67', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=106'), - ('67666564bab9b8a77071767719161714', '3ad208492249108c9f3ebeb167ad0583', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=107'), - ('9a9b98992d2e2f2084858283245b5a59', '299ba9f9bf5ab05c3580fc26edd1ed12', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=108'), - ('a4a5a6a70b0809365c5d5a5b2c232221', '19dc705b857a60fb07717b2ea5717781', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=109'), - ('464744455754555af3f2f5f4afb0b1b2', 'ffc8aeb885b5efcad06b6dbebf92e76b', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=110'), - ('323330317675746b7273747549464744', 'f58900c5e0b385253ff2546250a0142b', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=111'), - ('a8a9aaab181b1a15808186872b141516', '2ee67b56280bc462429cee6e3370cbc1', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=112'), - ('e7e6e5e4202323ddaaabacad343b3a39', '20db650a9c8e9a84ab4d25f7edc8f03f', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-256: I=113'), - ('a8a9aaab2221202fedecebea1e010003', '3c36da169525cf818843805f25b78ae5', - '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-256: I=114'), - ('f9f8fbfa5f5c5d42424344450e010003', '9a781d960db9e45e37779042fea51922', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-256: I=115'), - ('57565554f5f6f7f89697909120dfdedd', '6560395ec269c672a3c288226efdba77', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=116'), - ('f8f9fafbcccfcef1dddcdbda0e010003', '8c772b7a189ac544453d5916ebb27b9a', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=117'), - ('d9d8dbda7073727d80818687c2dddcdf', '77ca5468cc48e843d05f78eed9d6578f', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=118'), - ('c5c4c7c6080b0a1588898e8f68676665', '72cdcc71dc82c60d4429c9e2d8195baa', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=119'), - ('83828180dcdfded186878081f0cfcecd', '8080d68ce60e94b40b5b8b69eeb35afa', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=120'), - ('98999a9bdddedfa079787f7e0a050407', '44222d3cde299c04369d58ac0eba1e8e', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=121'), - ('cecfcccd4f4c4d429f9e9998dfc0c1c2', '9b8721b0a8dfc691c5bc5885dbfcb27a', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=122'), - ('404142436665647b29282f2eaba4a5a6', '0dc015ce9a3a3414b5e62ec643384183', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=123'), - ('33323130e6e5e4eb23222524dea1a0a3', '705715448a8da412025ce38345c2a148', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=124'), - ('cfcecdccf6f5f4cbe6e7e0e199969794', 'c32b5b0b6fbae165266c569f4b6ecf0b', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=125'), - ('babbb8b97271707fdcdddadb29363734', '4dca6c75192a01ddca9476af2a521e87', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=126'), - ('c9c8cbca4447465926272021545b5a59', '058691e627ecbc36ac07b6db423bd698', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=127'), - ('050407067477767956575051221d1c1f', '7444527095838fe080fc2bcdd30847eb', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=128'), - - # FIPS PUB 800-38A test vectors, 2001 edition. Annex F. - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '3ad77bb40d7a3660a89ecaf32466ef97'+'f5d3d58503b9699de785895a96fdbaaf'+ - '43b1cd7f598ece23881b00e3ed030688'+'7b0c785e27e8ad3f8223207104725dd4', - '2b7e151628aed2a6abf7158809cf4f3c', - 'NIST 800-38A, F.1.1, ECB and AES-128'), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'bd334f1d6e45f25ff712a214571fa5cc'+'974104846d0ad3ad7734ecb3ecee4eef'+ - 'ef7afd2270e2e60adce0ba2face6444e'+'9a4b41ba738d6c72fb16691603c18e0e', - '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', - 'NIST 800-38A, F.1.3, ECB and AES-192'), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'f3eed1bdb5d2a03c064b5a7e3db181f8'+'591ccb10d410ed26dc5ba74a31362870'+ - 'b6ed21b99ca6f4f9f153e7b1beafed1d'+'23304b7a39f9f3ff067d8d8f9e24ecc7', - '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', - 'NIST 800-38A, F.1.3, ECB and AES-256'), - -] - -test_data_8_lanes = [] -for td in test_data: - test_data_8_lanes.append((td[0] * 8, td[1] * 8, td[2], td[3])) -test_data += test_data_8_lanes - -class TestMultipleBlocks(unittest.TestCase): - - def __init__(self, use_aesni): - unittest.TestCase.__init__(self) - self.use_aesni = use_aesni - - def runTest(self): - # Encrypt data which is 8*2+4 bytes long, so as to trigger (for the - # AESNI variant) both the path that parallelizes 8 lanes and the one - # that processes data serially - - tvs = [ - (b'a' * 16, 'c0b27011eb15bf144d2fc9fae80ea16d4c231cb230416c5fac02e6835ad9d7d0'), - (b'a' * 24, 'df8435ce361a78c535b41dcb57da952abbf9ee5954dc6fbcd75fd00fa626915d'), - (b'a' * 32, '211402de6c80db1f92ba255881178e1f70783b8cfd3b37808205e48b80486cd8') - ] - - for key, expected in tvs: - - cipher = AES.new(key, AES.MODE_ECB, use_aesni=self.use_aesni) - h = SHA256.new() - - pt = b"".join([ tobytes('{0:016x}'.format(x)) for x in range(20) ]) - ct = cipher.encrypt(pt) - self.assertEqual(SHA256.new(ct).hexdigest(), expected) - - -class TestIncompleteBlocks(unittest.TestCase): - - def __init__(self, use_aesni): - unittest.TestCase.__init__(self) - self.use_aesni = use_aesni - - def runTest(self): - # Encrypt data with length not multiple of 16 bytes - - cipher = AES.new(b'4'*16, AES.MODE_ECB, use_aesni=self.use_aesni) - - for msg_len in range(1, 16): - self.assertRaises(ValueError, cipher.encrypt, b'1' * msg_len) - self.assertRaises(ValueError, cipher.encrypt, b'1' * (msg_len+16)) - self.assertRaises(ValueError, cipher.decrypt, b'1' * msg_len) - self.assertRaises(ValueError, cipher.decrypt, b'1' * (msg_len+16)) - - self.assertEqual(cipher.encrypt(b''), b'') - self.assertEqual(cipher.decrypt(b''), b'') - - -class TestOutput(unittest.TestCase): - - def __init__(self, use_aesni): - unittest.TestCase.__init__(self) - self.use_aesni = use_aesni - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - cipher = AES.new(b'4'*16, AES.MODE_ECB, use_aesni=self.use_aesni) - - pt = b'5' * 16 - ct = cipher.encrypt(pt) - - output = bytearray(16) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(16)) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) - - shorter_output = bytearray(15) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - from Cryptodome.Util import _cpu_features - from .common import make_block_tests - - tests = make_block_tests(AES, "AES", test_data, {'use_aesni': False}) - tests += [ TestMultipleBlocks(False) ] - tests += [ TestIncompleteBlocks(False) ] - if _cpu_features.have_aes_ni(): - # Run tests with AES-NI instructions if they are available. - tests += make_block_tests(AES, "AESNI", test_data, {'use_aesni': True}) - tests += [ TestMultipleBlocks(True) ] - tests += [ TestIncompleteBlocks(True) ] - tests += [ TestOutput(True) ] - else: - print("Skipping AESNI tests") - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ARC2.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ARC2.py deleted file mode 100644 index 0072506..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ARC2.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/ARC2.py: Self-test for the Alleged-RC2 cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.ARC2""" - -import unittest - -from Cryptodome.Util.py3compat import b, bchr - -from Cryptodome.Cipher import ARC2 - -# This is a list of (plaintext, ciphertext, key[, description[, extra_params]]) tuples. -test_data = [ - # Test vectors from RFC 2268 - - # 63-bit effective key length - ('0000000000000000', 'ebb773f993278eff', '0000000000000000', - 'RFC2268-1', dict(effective_keylen=63)), - - # 64-bit effective key length - ('ffffffffffffffff', '278b27e42e2f0d49', 'ffffffffffffffff', - 'RFC2268-2', dict(effective_keylen=64)), - ('1000000000000001', '30649edf9be7d2c2', '3000000000000000', - 'RFC2268-3', dict(effective_keylen=64)), - #('0000000000000000', '61a8a244adacccf0', '88', - # 'RFC2268-4', dict(effective_keylen=64)), - ('0000000000000000', '6ccf4308974c267f', '88bca90e90875a', - 'RFC2268-5', dict(effective_keylen=64)), - ('0000000000000000', '1a807d272bbe5db1', '88bca90e90875a7f0f79c384627bafb2', - 'RFC2268-6', dict(effective_keylen=64)), - - # 128-bit effective key length - ('0000000000000000', '2269552ab0f85ca6', '88bca90e90875a7f0f79c384627bafb2', - "RFC2268-7", dict(effective_keylen=128)), - ('0000000000000000', '5b78d3a43dfff1f1', - '88bca90e90875a7f0f79c384627bafb216f80a6f85920584c42fceb0be255daf1e', - "RFC2268-8", dict(effective_keylen=129)), - - # Test vectors from PyCryptodome 2.0.1's testdata.py - # 1024-bit effective key length - ('0000000000000000', '624fb3e887419e48', '5068696c6970476c617373', - 'PCTv201-0'), - ('ffffffffffffffff', '79cadef44c4a5a85', '5068696c6970476c617373', - 'PCTv201-1'), - ('0001020304050607', '90411525b34e4c2c', '5068696c6970476c617373', - 'PCTv201-2'), - ('0011223344556677', '078656aaba61cbfb', '5068696c6970476c617373', - 'PCTv201-3'), - ('0000000000000000', 'd7bcc5dbb4d6e56a', 'ffffffffffffffff', - 'PCTv201-4'), - ('ffffffffffffffff', '7259018ec557b357', 'ffffffffffffffff', - 'PCTv201-5'), - ('0001020304050607', '93d20a497f2ccb62', 'ffffffffffffffff', - 'PCTv201-6'), - ('0011223344556677', 'cb15a7f819c0014d', 'ffffffffffffffff', - 'PCTv201-7'), - ('0000000000000000', '63ac98cdf3843a7a', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-8'), - ('ffffffffffffffff', '3fb49e2fa12371dd', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-9'), - ('0001020304050607', '46414781ab387d5f', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-10'), - ('0011223344556677', 'be09dc81feaca271', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-11'), - ('0000000000000000', 'e64221e608be30ab', '53e5ffe553', - 'PCTv201-12'), - ('ffffffffffffffff', '862bc60fdcd4d9a9', '53e5ffe553', - 'PCTv201-13'), - ('0001020304050607', '6a34da50fa5e47de', '53e5ffe553', - 'PCTv201-14'), - ('0011223344556677', '584644c34503122c', '53e5ffe553', - 'PCTv201-15'), -] - -class BufferOverflowTest(unittest.TestCase): - # Test a buffer overflow found in older versions of PyCrypto - - def runTest(self): - """ARC2 with keylength > 128""" - key = b("x") * 16384 - self.assertRaises(ValueError, ARC2.new, key, ARC2.MODE_ECB) - -class KeyLength(unittest.TestCase): - - def runTest(self): - ARC2.new(b'\x00' * 16, ARC2.MODE_ECB, effective_keylen=40) - self.assertRaises(ValueError, ARC2.new, bchr(0) * 4, ARC2.MODE_ECB) - self.assertRaises(ValueError, ARC2.new, bchr(0) * 129, ARC2.MODE_ECB) - - self.assertRaises(ValueError, ARC2.new, bchr(0) * 16, ARC2.MODE_ECB, - effective_keylen=39) - self.assertRaises(ValueError, ARC2.new, bchr(0) * 16, ARC2.MODE_ECB, - effective_keylen=1025) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - cipher = ARC2.new(b'4'*16, ARC2.MODE_ECB) - - pt = b'5' * 16 - ct = cipher.encrypt(pt) - - output = bytearray(16) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(16)) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) - - shorter_output = bytearray(7) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - from Cryptodome.Cipher import ARC2 - from .common import make_block_tests - - tests = make_block_tests(ARC2, "ARC2", test_data) - tests.append(BufferOverflowTest()) - tests.append(KeyLength()) - tests += [TestOutput()] - - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ARC4.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ARC4.py deleted file mode 100644 index a160c98..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ARC4.py +++ /dev/null @@ -1,471 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/ARC4.py: Self-test for the Alleged-RC4 cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.ARC4""" - -import unittest - -from Cryptodome.Util.py3compat import b -from Cryptodome.SelfTest.st_common import list_test_cases -from binascii import unhexlify - -from Cryptodome.Cipher import ARC4 - -# This is a list of (plaintext, ciphertext, key[, description]) tuples. -test_data = [ - # Test vectors from Eric Rescorla's message with the subject - # "RC4 compatibility testing", sent to the cipherpunks mailing list on - # September 13, 1994. - # http://cypherpunks.venona.com/date/1994/09/msg00420.html - - ('0123456789abcdef', '75b7878099e0c596', '0123456789abcdef', - 'Test vector 0'), - - ('0000000000000000', '7494c2e7104b0879', '0123456789abcdef', - 'Test vector 1'), - - ('0000000000000000', 'de188941a3375d3a', '0000000000000000', - 'Test vector 2'), - - ('00000000000000000000', 'd6a141a7ec3c38dfbd61', 'ef012345', - 'Test vector 3'), - - ('01' * 512, - '7595c3e6114a09780c4ad452338e1ffd9a1be9498f813d76533449b6778dcad8' - + 'c78a8d2ba9ac66085d0e53d59c26c2d1c490c1ebbe0ce66d1b6b1b13b6b919b8' - + '47c25a91447a95e75e4ef16779cde8bf0a95850e32af9689444fd377108f98fd' - + 'cbd4e726567500990bcc7e0ca3c4aaa304a387d20f3b8fbbcd42a1bd311d7a43' - + '03dda5ab078896ae80c18b0af66dff319616eb784e495ad2ce90d7f772a81747' - + 'b65f62093b1e0db9e5ba532fafec47508323e671327df9444432cb7367cec82f' - + '5d44c0d00b67d650a075cd4b70dedd77eb9b10231b6b5b741347396d62897421' - + 'd43df9b42e446e358e9c11a9b2184ecbef0cd8e7a877ef968f1390ec9b3d35a5' - + '585cb009290e2fcde7b5ec66d9084be44055a619d9dd7fc3166f9487f7cb2729' - + '12426445998514c15d53a18c864ce3a2b7555793988126520eacf2e3066e230c' - + '91bee4dd5304f5fd0405b35bd99c73135d3d9bc335ee049ef69b3867bf2d7bd1' - + 'eaa595d8bfc0066ff8d31509eb0c6caa006c807a623ef84c3d33c195d23ee320' - + 'c40de0558157c822d4b8c569d849aed59d4e0fd7f379586b4b7ff684ed6a189f' - + '7486d49b9c4bad9ba24b96abf924372c8a8fffb10d55354900a77a3db5f205e1' - + 'b99fcd8660863a159ad4abe40fa48934163ddde542a6585540fd683cbfd8c00f' - + '12129a284deacc4cdefe58be7137541c047126c8d49e2755ab181ab7e940b0c0', - '0123456789abcdef', - "Test vector 4"), - # shortest key - generated with arc4 package - ('7468697320697320616e206578616d706c65', - '7260677d38495a09585d69321e17eaf3cdd0', - '01'), -] - - -class RFC6229_Tests(unittest.TestCase): - # Test vectors from RFC 6229. Each test vector is a tuple with two items: - # the ARC4 key and a dictionary. The dictionary has keystream offsets as keys - # and the 16-byte keystream starting at the relevant offset as value. - rfc6229_data = [ - # Page 3 - ( - '0102030405', - { - 0: 'b2 39 63 05 f0 3d c0 27 cc c3 52 4a 0a 11 18 a8', - 16: '69 82 94 4f 18 fc 82 d5 89 c4 03 a4 7a 0d 09 19', - 240: '28 cb 11 32 c9 6c e2 86 42 1d ca ad b8 b6 9e ae', - 256: '1c fc f6 2b 03 ed db 64 1d 77 df cf 7f 8d 8c 93', - 496: '42 b7 d0 cd d9 18 a8 a3 3d d5 17 81 c8 1f 40 41', - 512: '64 59 84 44 32 a7 da 92 3c fb 3e b4 98 06 61 f6', - 752: 'ec 10 32 7b de 2b ee fd 18 f9 27 76 80 45 7e 22', - 768: 'eb 62 63 8d 4f 0b a1 fe 9f ca 20 e0 5b f8 ff 2b', - 1008: '45 12 90 48 e6 a0 ed 0b 56 b4 90 33 8f 07 8d a5', - 1024: '30 ab bc c7 c2 0b 01 60 9f 23 ee 2d 5f 6b b7 df', - 1520: '32 94 f7 44 d8 f9 79 05 07 e7 0f 62 e5 bb ce ea', - 1536: 'd8 72 9d b4 18 82 25 9b ee 4f 82 53 25 f5 a1 30', - 2032: '1e b1 4a 0c 13 b3 bf 47 fa 2a 0b a9 3a d4 5b 8b', - 2048: 'cc 58 2f 8b a9 f2 65 e2 b1 be 91 12 e9 75 d2 d7', - 3056: 'f2 e3 0f 9b d1 02 ec bf 75 aa ad e9 bc 35 c4 3c', - 3072: 'ec 0e 11 c4 79 dc 32 9d c8 da 79 68 fe 96 56 81', - 4080: '06 83 26 a2 11 84 16 d2 1f 9d 04 b2 cd 1c a0 50', - 4096: 'ff 25 b5 89 95 99 67 07 e5 1f bd f0 8b 34 d8 75' - } - ), - # Page 4 - ( - '01020304050607', - { - 0: '29 3f 02 d4 7f 37 c9 b6 33 f2 af 52 85 fe b4 6b', - 16: 'e6 20 f1 39 0d 19 bd 84 e2 e0 fd 75 20 31 af c1', - 240: '91 4f 02 53 1c 92 18 81 0d f6 0f 67 e3 38 15 4c', - 256: 'd0 fd b5 83 07 3c e8 5a b8 39 17 74 0e c0 11 d5', - 496: '75 f8 14 11 e8 71 cf fa 70 b9 0c 74 c5 92 e4 54', - 512: '0b b8 72 02 93 8d ad 60 9e 87 a5 a1 b0 79 e5 e4', - 752: 'c2 91 12 46 b6 12 e7 e7 b9 03 df ed a1 da d8 66', - 768: '32 82 8f 91 50 2b 62 91 36 8d e8 08 1d e3 6f c2', - 1008: 'f3 b9 a7 e3 b2 97 bf 9a d8 04 51 2f 90 63 ef f1', - 1024: '8e cb 67 a9 ba 1f 55 a5 a0 67 e2 b0 26 a3 67 6f', - 1520: 'd2 aa 90 2b d4 2d 0d 7c fd 34 0c d4 58 10 52 9f', - 1536: '78 b2 72 c9 6e 42 ea b4 c6 0b d9 14 e3 9d 06 e3', - 2032: 'f4 33 2f d3 1a 07 93 96 ee 3c ee 3f 2a 4f f0 49', - 2048: '05 45 97 81 d4 1f da 7f 30 c1 be 7e 12 46 c6 23', - 3056: 'ad fd 38 68 b8 e5 14 85 d5 e6 10 01 7e 3d d6 09', - 3072: 'ad 26 58 1c 0c 5b e4 5f 4c ea 01 db 2f 38 05 d5', - 4080: 'f3 17 2c ef fc 3b 3d 99 7c 85 cc d5 af 1a 95 0c', - 4096: 'e7 4b 0b 97 31 22 7f d3 7c 0e c0 8a 47 dd d8 b8' - } - ), - ( - '0102030405060708', - { - 0: '97 ab 8a 1b f0 af b9 61 32 f2 f6 72 58 da 15 a8', - 16: '82 63 ef db 45 c4 a1 86 84 ef 87 e6 b1 9e 5b 09', - 240: '96 36 eb c9 84 19 26 f4 f7 d1 f3 62 bd df 6e 18', - 256: 'd0 a9 90 ff 2c 05 fe f5 b9 03 73 c9 ff 4b 87 0a', - 496: '73 23 9f 1d b7 f4 1d 80 b6 43 c0 c5 25 18 ec 63', - 512: '16 3b 31 99 23 a6 bd b4 52 7c 62 61 26 70 3c 0f', - 752: '49 d6 c8 af 0f 97 14 4a 87 df 21 d9 14 72 f9 66', - 768: '44 17 3a 10 3b 66 16 c5 d5 ad 1c ee 40 c8 63 d0', - 1008: '27 3c 9c 4b 27 f3 22 e4 e7 16 ef 53 a4 7d e7 a4', - 1024: 'c6 d0 e7 b2 26 25 9f a9 02 34 90 b2 61 67 ad 1d', - 1520: '1f e8 98 67 13 f0 7c 3d 9a e1 c1 63 ff 8c f9 d3', - 1536: '83 69 e1 a9 65 61 0b e8 87 fb d0 c7 91 62 aa fb', - 2032: '0a 01 27 ab b4 44 84 b9 fb ef 5a bc ae 1b 57 9f', - 2048: 'c2 cd ad c6 40 2e 8e e8 66 e1 f3 7b db 47 e4 2c', - 3056: '26 b5 1e a3 7d f8 e1 d6 f7 6f c3 b6 6a 74 29 b3', - 3072: 'bc 76 83 20 5d 4f 44 3d c1 f2 9d da 33 15 c8 7b', - 4080: 'd5 fa 5a 34 69 d2 9a aa f8 3d 23 58 9d b8 c8 5b', - 4096: '3f b4 6e 2c 8f 0f 06 8e dc e8 cd cd 7d fc 58 62' - } - ), - # Page 5 - ( - '0102030405060708090a', - { - 0: 'ed e3 b0 46 43 e5 86 cc 90 7d c2 18 51 70 99 02', - 16: '03 51 6b a7 8f 41 3b eb 22 3a a5 d4 d2 df 67 11', - 240: '3c fd 6c b5 8e e0 fd de 64 01 76 ad 00 00 04 4d', - 256: '48 53 2b 21 fb 60 79 c9 11 4c 0f fd 9c 04 a1 ad', - 496: '3e 8c ea 98 01 71 09 97 90 84 b1 ef 92 f9 9d 86', - 512: 'e2 0f b4 9b db 33 7e e4 8b 8d 8d c0 f4 af ef fe', - 752: '5c 25 21 ea cd 79 66 f1 5e 05 65 44 be a0 d3 15', - 768: 'e0 67 a7 03 19 31 a2 46 a6 c3 87 5d 2f 67 8a cb', - 1008: 'a6 4f 70 af 88 ae 56 b6 f8 75 81 c0 e2 3e 6b 08', - 1024: 'f4 49 03 1d e3 12 81 4e c6 f3 19 29 1f 4a 05 16', - 1520: 'bd ae 85 92 4b 3c b1 d0 a2 e3 3a 30 c6 d7 95 99', - 1536: '8a 0f ed db ac 86 5a 09 bc d1 27 fb 56 2e d6 0a', - 2032: 'b5 5a 0a 5b 51 a1 2a 8b e3 48 99 c3 e0 47 51 1a', - 2048: 'd9 a0 9c ea 3c e7 5f e3 96 98 07 03 17 a7 13 39', - 3056: '55 22 25 ed 11 77 f4 45 84 ac 8c fa 6c 4e b5 fc', - 3072: '7e 82 cb ab fc 95 38 1b 08 09 98 44 21 29 c2 f8', - 4080: '1f 13 5e d1 4c e6 0a 91 36 9d 23 22 be f2 5e 3c', - 4096: '08 b6 be 45 12 4a 43 e2 eb 77 95 3f 84 dc 85 53' - } - ), - ( - '0102030405060708090a0b0c0d0e0f10', - { - 0: '9a c7 cc 9a 60 9d 1e f7 b2 93 28 99 cd e4 1b 97', - 16: '52 48 c4 95 90 14 12 6a 6e 8a 84 f1 1d 1a 9e 1c', - 240: '06 59 02 e4 b6 20 f6 cc 36 c8 58 9f 66 43 2f 2b', - 256: 'd3 9d 56 6b c6 bc e3 01 07 68 15 15 49 f3 87 3f', - 496: 'b6 d1 e6 c4 a5 e4 77 1c ad 79 53 8d f2 95 fb 11', - 512: 'c6 8c 1d 5c 55 9a 97 41 23 df 1d bc 52 a4 3b 89', - 752: 'c5 ec f8 8d e8 97 fd 57 fe d3 01 70 1b 82 a2 59', - 768: 'ec cb e1 3d e1 fc c9 1c 11 a0 b2 6c 0b c8 fa 4d', - 1008: 'e7 a7 25 74 f8 78 2a e2 6a ab cf 9e bc d6 60 65', - 1024: 'bd f0 32 4e 60 83 dc c6 d3 ce dd 3c a8 c5 3c 16', - 1520: 'b4 01 10 c4 19 0b 56 22 a9 61 16 b0 01 7e d2 97', - 1536: 'ff a0 b5 14 64 7e c0 4f 63 06 b8 92 ae 66 11 81', - 2032: 'd0 3d 1b c0 3c d3 3d 70 df f9 fa 5d 71 96 3e bd', - 2048: '8a 44 12 64 11 ea a7 8b d5 1e 8d 87 a8 87 9b f5', - 3056: 'fa be b7 60 28 ad e2 d0 e4 87 22 e4 6c 46 15 a3', - 3072: 'c0 5d 88 ab d5 03 57 f9 35 a6 3c 59 ee 53 76 23', - 4080: 'ff 38 26 5c 16 42 c1 ab e8 d3 c2 fe 5e 57 2b f8', - 4096: 'a3 6a 4c 30 1a e8 ac 13 61 0c cb c1 22 56 ca cc' - } - ), - # Page 6 - ( - '0102030405060708090a0b0c0d0e0f101112131415161718', - { - 0: '05 95 e5 7f e5 f0 bb 3c 70 6e da c8 a4 b2 db 11', - 16: 'df de 31 34 4a 1a f7 69 c7 4f 07 0a ee 9e 23 26', - 240: 'b0 6b 9b 1e 19 5d 13 d8 f4 a7 99 5c 45 53 ac 05', - 256: '6b d2 37 8e c3 41 c9 a4 2f 37 ba 79 f8 8a 32 ff', - 496: 'e7 0b ce 1d f7 64 5a db 5d 2c 41 30 21 5c 35 22', - 512: '9a 57 30 c7 fc b4 c9 af 51 ff da 89 c7 f1 ad 22', - 752: '04 85 05 5f d4 f6 f0 d9 63 ef 5a b9 a5 47 69 82', - 768: '59 1f c6 6b cd a1 0e 45 2b 03 d4 55 1f 6b 62 ac', - 1008: '27 53 cc 83 98 8a fa 3e 16 88 a1 d3 b4 2c 9a 02', - 1024: '93 61 0d 52 3d 1d 3f 00 62 b3 c2 a3 bb c7 c7 f0', - 1520: '96 c2 48 61 0a ad ed fe af 89 78 c0 3d e8 20 5a', - 1536: '0e 31 7b 3d 1c 73 b9 e9 a4 68 8f 29 6d 13 3a 19', - 2032: 'bd f0 e6 c3 cc a5 b5 b9 d5 33 b6 9c 56 ad a1 20', - 2048: '88 a2 18 b6 e2 ec e1 e6 24 6d 44 c7 59 d1 9b 10', - 3056: '68 66 39 7e 95 c1 40 53 4f 94 26 34 21 00 6e 40', - 3072: '32 cb 0a 1e 95 42 c6 b3 b8 b3 98 ab c3 b0 f1 d5', - 4080: '29 a0 b8 ae d5 4a 13 23 24 c6 2e 42 3f 54 b4 c8', - 4096: '3c b0 f3 b5 02 0a 98 b8 2a f9 fe 15 44 84 a1 68' - } - ), - ( - '0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', - { - 0: 'ea a6 bd 25 88 0b f9 3d 3f 5d 1e 4c a2 61 1d 91', - 16: 'cf a4 5c 9f 7e 71 4b 54 bd fa 80 02 7c b1 43 80', - 240: '11 4a e3 44 de d7 1b 35 f2 e6 0f eb ad 72 7f d8', - 256: '02 e1 e7 05 6b 0f 62 39 00 49 64 22 94 3e 97 b6', - 496: '91 cb 93 c7 87 96 4e 10 d9 52 7d 99 9c 6f 93 6b', - 512: '49 b1 8b 42 f8 e8 36 7c be b5 ef 10 4b a1 c7 cd', - 752: '87 08 4b 3b a7 00 ba de 95 56 10 67 27 45 b3 74', - 768: 'e7 a7 b9 e9 ec 54 0d 5f f4 3b db 12 79 2d 1b 35', - 1008: 'c7 99 b5 96 73 8f 6b 01 8c 76 c7 4b 17 59 bd 90', - 1024: '7f ec 5b fd 9f 9b 89 ce 65 48 30 90 92 d7 e9 58', - 1520: '40 f2 50 b2 6d 1f 09 6a 4a fd 4c 34 0a 58 88 15', - 1536: '3e 34 13 5c 79 db 01 02 00 76 76 51 cf 26 30 73', - 2032: 'f6 56 ab cc f8 8d d8 27 02 7b 2c e9 17 d4 64 ec', - 2048: '18 b6 25 03 bf bc 07 7f ba bb 98 f2 0d 98 ab 34', - 3056: '8a ed 95 ee 5b 0d cb fb ef 4e b2 1d 3a 3f 52 f9', - 3072: '62 5a 1a b0 0e e3 9a 53 27 34 6b dd b0 1a 9c 18', - 4080: 'a1 3a 7c 79 c7 e1 19 b5 ab 02 96 ab 28 c3 00 b9', - 4096: 'f3 e4 c0 a2 e0 2d 1d 01 f7 f0 a7 46 18 af 2b 48' - } - ), - # Page 7 - ( - '833222772a', - { - 0: '80 ad 97 bd c9 73 df 8a 2e 87 9e 92 a4 97 ef da', - 16: '20 f0 60 c2 f2 e5 12 65 01 d3 d4 fe a1 0d 5f c0', - 240: 'fa a1 48 e9 90 46 18 1f ec 6b 20 85 f3 b2 0e d9', - 256: 'f0 da f5 ba b3 d5 96 83 98 57 84 6f 73 fb fe 5a', - 496: '1c 7e 2f c4 63 92 32 fe 29 75 84 b2 96 99 6b c8', - 512: '3d b9 b2 49 40 6c c8 ed ff ac 55 cc d3 22 ba 12', - 752: 'e4 f9 f7 e0 06 61 54 bb d1 25 b7 45 56 9b c8 97', - 768: '75 d5 ef 26 2b 44 c4 1a 9c f6 3a e1 45 68 e1 b9', - 1008: '6d a4 53 db f8 1e 82 33 4a 3d 88 66 cb 50 a1 e3', - 1024: '78 28 d0 74 11 9c ab 5c 22 b2 94 d7 a9 bf a0 bb', - 1520: 'ad b8 9c ea 9a 15 fb e6 17 29 5b d0 4b 8c a0 5c', - 1536: '62 51 d8 7f d4 aa ae 9a 7e 4a d5 c2 17 d3 f3 00', - 2032: 'e7 11 9b d6 dd 9b 22 af e8 f8 95 85 43 28 81 e2', - 2048: '78 5b 60 fd 7e c4 e9 fc b6 54 5f 35 0d 66 0f ab', - 3056: 'af ec c0 37 fd b7 b0 83 8e b3 d7 0b cd 26 83 82', - 3072: 'db c1 a7 b4 9d 57 35 8c c9 fa 6d 61 d7 3b 7c f0', - 4080: '63 49 d1 26 a3 7a fc ba 89 79 4f 98 04 91 4f dc', - 4096: 'bf 42 c3 01 8c 2f 7c 66 bf de 52 49 75 76 81 15' - } - ), - ( - '1910833222772a', - { - 0: 'bc 92 22 db d3 27 4d 8f c6 6d 14 cc bd a6 69 0b', - 16: '7a e6 27 41 0c 9a 2b e6 93 df 5b b7 48 5a 63 e3', - 240: '3f 09 31 aa 03 de fb 30 0f 06 01 03 82 6f 2a 64', - 256: 'be aa 9e c8 d5 9b b6 81 29 f3 02 7c 96 36 11 81', - 496: '74 e0 4d b4 6d 28 64 8d 7d ee 8a 00 64 b0 6c fe', - 512: '9b 5e 81 c6 2f e0 23 c5 5b e4 2f 87 bb f9 32 b8', - 752: 'ce 17 8f c1 82 6e fe cb c1 82 f5 79 99 a4 61 40', - 768: '8b df 55 cd 55 06 1c 06 db a6 be 11 de 4a 57 8a', - 1008: '62 6f 5f 4d ce 65 25 01 f3 08 7d 39 c9 2c c3 49', - 1024: '42 da ac 6a 8f 9a b9 a7 fd 13 7c 60 37 82 56 82', - 1520: 'cc 03 fd b7 91 92 a2 07 31 2f 53 f5 d4 dc 33 d9', - 1536: 'f7 0f 14 12 2a 1c 98 a3 15 5d 28 b8 a0 a8 a4 1d', - 2032: '2a 3a 30 7a b2 70 8a 9c 00 fe 0b 42 f9 c2 d6 a1', - 2048: '86 26 17 62 7d 22 61 ea b0 b1 24 65 97 ca 0a e9', - 3056: '55 f8 77 ce 4f 2e 1d db bf 8e 13 e2 cd e0 fd c8', - 3072: '1b 15 56 cb 93 5f 17 33 37 70 5f bb 5d 50 1f c1', - 4080: 'ec d0 e9 66 02 be 7f 8d 50 92 81 6c cc f2 c2 e9', - 4096: '02 78 81 fa b4 99 3a 1c 26 20 24 a9 4f ff 3f 61' - } - ), - # Page 8 - ( - '641910833222772a', - { - 0: 'bb f6 09 de 94 13 17 2d 07 66 0c b6 80 71 69 26', - 16: '46 10 1a 6d ab 43 11 5d 6c 52 2b 4f e9 36 04 a9', - 240: 'cb e1 ff f2 1c 96 f3 ee f6 1e 8f e0 54 2c bd f0', - 256: '34 79 38 bf fa 40 09 c5 12 cf b4 03 4b 0d d1 a7', - 496: '78 67 a7 86 d0 0a 71 47 90 4d 76 dd f1 e5 20 e3', - 512: '8d 3e 9e 1c ae fc cc b3 fb f8 d1 8f 64 12 0b 32', - 752: '94 23 37 f8 fd 76 f0 fa e8 c5 2d 79 54 81 06 72', - 768: 'b8 54 8c 10 f5 16 67 f6 e6 0e 18 2f a1 9b 30 f7', - 1008: '02 11 c7 c6 19 0c 9e fd 12 37 c3 4c 8f 2e 06 c4', - 1024: 'bd a6 4f 65 27 6d 2a ac b8 f9 02 12 20 3a 80 8e', - 1520: 'bd 38 20 f7 32 ff b5 3e c1 93 e7 9d 33 e2 7c 73', - 1536: 'd0 16 86 16 86 19 07 d4 82 e3 6c da c8 cf 57 49', - 2032: '97 b0 f0 f2 24 b2 d2 31 71 14 80 8f b0 3a f7 a0', - 2048: 'e5 96 16 e4 69 78 79 39 a0 63 ce ea 9a f9 56 d1', - 3056: 'c4 7e 0d c1 66 09 19 c1 11 01 20 8f 9e 69 aa 1f', - 3072: '5a e4 f1 28 96 b8 37 9a 2a ad 89 b5 b5 53 d6 b0', - 4080: '6b 6b 09 8d 0c 29 3b c2 99 3d 80 bf 05 18 b6 d9', - 4096: '81 70 cc 3c cd 92 a6 98 62 1b 93 9d d3 8f e7 b9' - } - ), - ( - '8b37641910833222772a', - { - 0: 'ab 65 c2 6e dd b2 87 60 0d b2 fd a1 0d 1e 60 5c', - 16: 'bb 75 90 10 c2 96 58 f2 c7 2d 93 a2 d1 6d 29 30', - 240: 'b9 01 e8 03 6e d1 c3 83 cd 3c 4c 4d d0 a6 ab 05', - 256: '3d 25 ce 49 22 92 4c 55 f0 64 94 33 53 d7 8a 6c', - 496: '12 c1 aa 44 bb f8 7e 75 e6 11 f6 9b 2c 38 f4 9b', - 512: '28 f2 b3 43 4b 65 c0 98 77 47 00 44 c6 ea 17 0d', - 752: 'bd 9e f8 22 de 52 88 19 61 34 cf 8a f7 83 93 04', - 768: '67 55 9c 23 f0 52 15 84 70 a2 96 f7 25 73 5a 32', - 1008: '8b ab 26 fb c2 c1 2b 0f 13 e2 ab 18 5e ab f2 41', - 1024: '31 18 5a 6d 69 6f 0c fa 9b 42 80 8b 38 e1 32 a2', - 1520: '56 4d 3d ae 18 3c 52 34 c8 af 1e 51 06 1c 44 b5', - 1536: '3c 07 78 a7 b5 f7 2d 3c 23 a3 13 5c 7d 67 b9 f4', - 2032: 'f3 43 69 89 0f cf 16 fb 51 7d ca ae 44 63 b2 dd', - 2048: '02 f3 1c 81 e8 20 07 31 b8 99 b0 28 e7 91 bf a7', - 3056: '72 da 64 62 83 22 8c 14 30 08 53 70 17 95 61 6f', - 3072: '4e 0a 8c 6f 79 34 a7 88 e2 26 5e 81 d6 d0 c8 f4', - 4080: '43 8d d5 ea fe a0 11 1b 6f 36 b4 b9 38 da 2a 68', - 4096: '5f 6b fc 73 81 58 74 d9 71 00 f0 86 97 93 57 d8' - } - ), - # Page 9 - ( - 'ebb46227c6cc8b37641910833222772a', - { - 0: '72 0c 94 b6 3e df 44 e1 31 d9 50 ca 21 1a 5a 30', - 16: 'c3 66 fd ea cf 9c a8 04 36 be 7c 35 84 24 d2 0b', - 240: 'b3 39 4a 40 aa bf 75 cb a4 22 82 ef 25 a0 05 9f', - 256: '48 47 d8 1d a4 94 2d bc 24 9d ef c4 8c 92 2b 9f', - 496: '08 12 8c 46 9f 27 53 42 ad da 20 2b 2b 58 da 95', - 512: '97 0d ac ef 40 ad 98 72 3b ac 5d 69 55 b8 17 61', - 752: '3c b8 99 93 b0 7b 0c ed 93 de 13 d2 a1 10 13 ac', - 768: 'ef 2d 67 6f 15 45 c2 c1 3d c6 80 a0 2f 4a db fe', - 1008: 'b6 05 95 51 4f 24 bc 9f e5 22 a6 ca d7 39 36 44', - 1024: 'b5 15 a8 c5 01 17 54 f5 90 03 05 8b db 81 51 4e', - 1520: '3c 70 04 7e 8c bc 03 8e 3b 98 20 db 60 1d a4 95', - 1536: '11 75 da 6e e7 56 de 46 a5 3e 2b 07 56 60 b7 70', - 2032: '00 a5 42 bb a0 21 11 cc 2c 65 b3 8e bd ba 58 7e', - 2048: '58 65 fd bb 5b 48 06 41 04 e8 30 b3 80 f2 ae de', - 3056: '34 b2 1a d2 ad 44 e9 99 db 2d 7f 08 63 f0 d9 b6', - 3072: '84 a9 21 8f c3 6e 8a 5f 2c cf be ae 53 a2 7d 25', - 4080: 'a2 22 1a 11 b8 33 cc b4 98 a5 95 40 f0 54 5f 4a', - 4096: '5b be b4 78 7d 59 e5 37 3f db ea 6c 6f 75 c2 9b' - } - ), - ( - 'c109163908ebe51debb46227c6cc8b37641910833222772a', - { - 0: '54 b6 4e 6b 5a 20 b5 e2 ec 84 59 3d c7 98 9d a7', - 16: 'c1 35 ee e2 37 a8 54 65 ff 97 dc 03 92 4f 45 ce', - 240: 'cf cc 92 2f b4 a1 4a b4 5d 61 75 aa bb f2 d2 01', - 256: '83 7b 87 e2 a4 46 ad 0e f7 98 ac d0 2b 94 12 4f', - 496: '17 a6 db d6 64 92 6a 06 36 b3 f4 c3 7a 4f 46 94', - 512: '4a 5f 9f 26 ae ee d4 d4 a2 5f 63 2d 30 52 33 d9', - 752: '80 a3 d0 1e f0 0c 8e 9a 42 09 c1 7f 4e eb 35 8c', - 768: 'd1 5e 7d 5f fa aa bc 02 07 bf 20 0a 11 77 93 a2', - 1008: '34 96 82 bf 58 8e aa 52 d0 aa 15 60 34 6a ea fa', - 1024: 'f5 85 4c db 76 c8 89 e3 ad 63 35 4e 5f 72 75 e3', - 1520: '53 2c 7c ec cb 39 df 32 36 31 84 05 a4 b1 27 9c', - 1536: 'ba ef e6 d9 ce b6 51 84 22 60 e0 d1 e0 5e 3b 90', - 2032: 'e8 2d 8c 6d b5 4e 3c 63 3f 58 1c 95 2b a0 42 07', - 2048: '4b 16 e5 0a bd 38 1b d7 09 00 a9 cd 9a 62 cb 23', - 3056: '36 82 ee 33 bd 14 8b d9 f5 86 56 cd 8f 30 d9 fb', - 3072: '1e 5a 0b 84 75 04 5d 9b 20 b2 62 86 24 ed fd 9e', - 4080: '63 ed d6 84 fb 82 62 82 fe 52 8f 9c 0e 92 37 bc', - 4096: 'e4 dd 2e 98 d6 96 0f ae 0b 43 54 54 56 74 33 91' - } - ), - # Page 10 - ( - '1ada31d5cf688221c109163908ebe51debb46227c6cc8b37641910833222772a', - { - 0: 'dd 5b cb 00 18 e9 22 d4 94 75 9d 7c 39 5d 02 d3', - 16: 'c8 44 6f 8f 77 ab f7 37 68 53 53 eb 89 a1 c9 eb', - 240: 'af 3e 30 f9 c0 95 04 59 38 15 15 75 c3 fb 90 98', - 256: 'f8 cb 62 74 db 99 b8 0b 1d 20 12 a9 8e d4 8f 0e', - 496: '25 c3 00 5a 1c b8 5d e0 76 25 98 39 ab 71 98 ab', - 512: '9d cb c1 83 e8 cb 99 4b 72 7b 75 be 31 80 76 9c', - 752: 'a1 d3 07 8d fa 91 69 50 3e d9 d4 49 1d ee 4e b2', - 768: '85 14 a5 49 58 58 09 6f 59 6e 4b cd 66 b1 06 65', - 1008: '5f 40 d5 9e c1 b0 3b 33 73 8e fa 60 b2 25 5d 31', - 1024: '34 77 c7 f7 64 a4 1b ac ef f9 0b f1 4f 92 b7 cc', - 1520: 'ac 4e 95 36 8d 99 b9 eb 78 b8 da 8f 81 ff a7 95', - 1536: '8c 3c 13 f8 c2 38 8b b7 3f 38 57 6e 65 b7 c4 46', - 2032: '13 c4 b9 c1 df b6 65 79 ed dd 8a 28 0b 9f 73 16', - 2048: 'dd d2 78 20 55 01 26 69 8e fa ad c6 4b 64 f6 6e', - 3056: 'f0 8f 2e 66 d2 8e d1 43 f3 a2 37 cf 9d e7 35 59', - 3072: '9e a3 6c 52 55 31 b8 80 ba 12 43 34 f5 7b 0b 70', - 4080: 'd5 a3 9e 3d fc c5 02 80 ba c4 a6 b5 aa 0d ca 7d', - 4096: '37 0b 1c 1f e6 55 91 6d 97 fd 0d 47 ca 1d 72 b8' - } - ) - ] - - def test_keystream(self): - for tv in self.rfc6229_data: - key = unhexlify(b((tv[0]))) - cipher = ARC4.new(key) - count = 0 - for offset in range(0, 4096+1, 16): - ct = cipher.encrypt(b('\x00')*16) - expected = tv[1].get(offset) - if expected: - expected = unhexlify(b(expected.replace(" ", ''))) - self.assertEqual(ct, expected) - count += 1 - self.assertEqual(count, len(tv[1])) - - -class Drop_Tests(unittest.TestCase): - key = b('\xAA')*16 - data = b('\x00')*5000 - - def setUp(self): - self.cipher = ARC4.new(self.key) - - def test_drop256_encrypt(self): - cipher_drop = ARC4.new(self.key, 256) - ct_drop = cipher_drop.encrypt(self.data[:16]) - ct = self.cipher.encrypt(self.data)[256:256+16] - self.assertEqual(ct_drop, ct) - - def test_drop256_decrypt(self): - cipher_drop = ARC4.new(self.key, 256) - pt_drop = cipher_drop.decrypt(self.data[:16]) - pt = self.cipher.decrypt(self.data)[256:256+16] - self.assertEqual(pt_drop, pt) - - -class KeyLength(unittest.TestCase): - - def runTest(self): - self.assertRaises(ValueError, ARC4.new, b'') - self.assertRaises(ValueError, ARC4.new, b'\x00' * 257) - - -def get_tests(config={}): - from .common import make_stream_tests - tests = make_stream_tests(ARC4, "ARC4", test_data) - tests += list_test_cases(RFC6229_Tests) - tests += list_test_cases(Drop_Tests) - tests.append(KeyLength()) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Blowfish.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Blowfish.py deleted file mode 100644 index ca5c603..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Blowfish.py +++ /dev/null @@ -1,160 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/test_Blowfish.py: Self-test for the Blowfish cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.Blowfish""" - -import unittest - -from Cryptodome.Util.py3compat import bchr - -from Cryptodome.Cipher import Blowfish - -# This is a list of (plaintext, ciphertext, key) tuples. -test_data = [ - # Test vectors from http://www.schneier.com/code/vectors.txt - ('0000000000000000', '4ef997456198dd78', '0000000000000000'), - ('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'), - ('1000000000000001', '7d856f9a613063f2', '3000000000000000'), - ('1111111111111111', '2466dd878b963c9d', '1111111111111111'), - ('1111111111111111', '61f9c3802281b096', '0123456789abcdef'), - ('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'), - ('0000000000000000', '4ef997456198dd78', '0000000000000000'), - ('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'), - ('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'), - ('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'), - ('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'), - ('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'), - ('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'), - ('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'), - ('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'), - ('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'), - ('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'), - ('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'), - ('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'), - ('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'), - ('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'), - ('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'), - ('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'), - ('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'), - ('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'), - ('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'), - ('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'), - ('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'), - ('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'), - ('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'), - ('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'), - ('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'), - ('0000000000000000', '245946885754369a', '0123456789abcdef'), - ('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'), - #('fedcba9876543210', 'f9ad597c49db005e', 'f0'), - #('fedcba9876543210', 'e91d21c1d961a6d6', 'f0e1'), - #('fedcba9876543210', 'e9c2b70a1bc65cf3', 'f0e1d2'), - ('fedcba9876543210', 'be1e639408640f05', 'f0e1d2c3'), - ('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'), - ('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'), - ('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'), - ('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'), - ('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'), - ('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'), - ('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'), - ('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'), - ('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'), - ('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'), - ('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'), - ('fedcba9876543210', '93142887ee3be15c', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f'), - ('fedcba9876543210', '03429e838ce2d14b', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f00'), - ('fedcba9876543210', 'a4299e27469ff67b', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'), - ('fedcba9876543210', 'afd5aed1c1bc96a8', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'), - ('fedcba9876543210', '10851c0e3858da9f', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'), - ('fedcba9876543210', 'e6f51ed79b9db21f', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'), - ('fedcba9876543210', '64a6e14afd36b46f', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'), - ('fedcba9876543210', '80c7d7d45a5479ad', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'), - ('fedcba9876543210', '05044b62fa52d080', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'), -] - - -class KeyLength(unittest.TestCase): - - def runTest(self): - self.assertRaises(ValueError, Blowfish.new, bchr(0) * 3, - Blowfish.MODE_ECB) - self.assertRaises(ValueError, Blowfish.new, bchr(0) * 57, - Blowfish.MODE_ECB) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - cipher = Blowfish.new(b'4'*16, Blowfish.MODE_ECB) - - pt = b'5' * 16 - ct = cipher.encrypt(pt) - - output = bytearray(16) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(16)) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) - - shorter_output = bytearray(7) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - from .common import make_block_tests - tests = make_block_tests(Blowfish, "Blowfish", test_data) - tests.append(KeyLength()) - tests += [TestOutput()] - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CAST.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CAST.py deleted file mode 100644 index 8bc21fd..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CAST.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/CAST.py: Self-test for the CAST-128 (CAST5) cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.CAST""" - -import unittest - -from Cryptodome.Util.py3compat import bchr - -from Cryptodome.Cipher import CAST - -# This is a list of (plaintext, ciphertext, key) tuples. -test_data = [ - # Test vectors from RFC 2144, B.1 - ('0123456789abcdef', '238b4fe5847e44b2', - '0123456712345678234567893456789a', - '128-bit key'), - - ('0123456789abcdef', 'eb6a711a2c02271b', - '01234567123456782345', - '80-bit key'), - - ('0123456789abcdef', '7ac816d16e9b302e', - '0123456712', - '40-bit key'), -] - - -class KeyLength(unittest.TestCase): - - def runTest(self): - self.assertRaises(ValueError, CAST.new, bchr(0) * 4, CAST.MODE_ECB) - self.assertRaises(ValueError, CAST.new, bchr(0) * 17, CAST.MODE_ECB) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - cipher = CAST.new(b'4'*16, CAST.MODE_ECB) - - pt = b'5' * 16 - ct = cipher.encrypt(pt) - - output = bytearray(16) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(16)) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) - - shorter_output = bytearray(7) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - from .common import make_block_tests - - tests = make_block_tests(CAST, "CAST", test_data) - tests.append(KeyLength()) - tests.append(TestOutput()) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CBC.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CBC.py deleted file mode 100644 index f118eb6..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CBC.py +++ /dev/null @@ -1,556 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import tobytes, is_string -from Cryptodome.Cipher import AES, DES3, DES -from Cryptodome.Hash import SHAKE128 - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - -class BlockChainingTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - key_192 = get_tag_random("key_192", 24) - iv_128 = get_tag_random("iv_128", 16) - iv_64 = get_tag_random("iv_64", 8) - data_128 = get_tag_random("data_128", 16) - - def test_loopback_128(self): - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - pt = get_tag_random("plaintext", 16 * 100) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_loopback_64(self): - cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) - pt = get_tag_random("plaintext", 8 * 100) - ct = cipher.encrypt(pt) - - cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_iv(self): - # If not passed, the iv is created randomly - cipher = AES.new(self.key_128, self.aes_mode) - iv1 = cipher.iv - cipher = AES.new(self.key_128, self.aes_mode) - iv2 = cipher.iv - self.assertNotEqual(iv1, iv2) - self.assertEqual(len(iv1), 16) - - # IV can be passed in uppercase or lowercase - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - ct = cipher.encrypt(self.data_128) - - cipher = AES.new(self.key_128, self.aes_mode, iv=self.iv_128) - self.assertEqual(ct, cipher.encrypt(self.data_128)) - - cipher = AES.new(self.key_128, self.aes_mode, IV=self.iv_128) - self.assertEqual(ct, cipher.encrypt(self.data_128)) - - def test_iv_must_be_bytes(self): - self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, - iv = u'test1234567890-*') - - def test_only_one_iv(self): - # Only one IV/iv keyword allowed - self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, - iv=self.iv_128, IV=self.iv_128) - - def test_iv_with_matching_length(self): - self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, - b"") - self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, - self.iv_128[:15]) - self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, - self.iv_128 + b"0") - - def test_block_size_128(self): - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - self.assertEqual(cipher.block_size, AES.block_size) - - def test_block_size_64(self): - cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) - self.assertEqual(cipher.block_size, DES3.block_size) - - def test_unaligned_data_128(self): - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - for wrong_length in range(1,16): - self.assertRaises(ValueError, cipher.encrypt, b"5" * wrong_length) - - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - for wrong_length in range(1,16): - self.assertRaises(ValueError, cipher.decrypt, b"5" * wrong_length) - - def test_unaligned_data_64(self): - cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) - for wrong_length in range(1,8): - self.assertRaises(ValueError, cipher.encrypt, b"5" * wrong_length) - - cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) - for wrong_length in range(1,8): - self.assertRaises(ValueError, cipher.decrypt, b"5" * wrong_length) - - def test_IV_iv_attributes(self): - data = get_tag_random("data", 16 * 100) - for func in "encrypt", "decrypt": - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - getattr(cipher, func)(data) - self.assertEqual(cipher.iv, self.iv_128) - self.assertEqual(cipher.IV, self.iv_128) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, - self.iv_128, 7) - self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, - iv=self.iv_128, unknown=7) - # But some are only known by the base cipher (e.g. use_aesni consumed by the AES module) - AES.new(self.key_128, self.aes_mode, iv=self.iv_128, use_aesni=False) - - def test_null_encryption_decryption(self): - for func in "encrypt", "decrypt": - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - result = getattr(cipher, func)(b"") - self.assertEqual(result, b"") - - def test_either_encrypt_or_decrypt(self): - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - cipher.encrypt(b"") - self.assertRaises(TypeError, cipher.decrypt, b"") - - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - cipher.decrypt(b"") - self.assertRaises(TypeError, cipher.encrypt, b"") - - def test_data_must_be_bytes(self): - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') - - cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) - self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') - - def test_bytearray(self): - data = b"1" * 128 - data_ba = bytearray(data) - - # Encrypt - key_ba = bytearray(self.key_128) - iv_ba = bytearray(self.iv_128) - - cipher1 = AES.new(self.key_128, self.aes_mode, self.iv_128) - ref1 = cipher1.encrypt(data) - - cipher2 = AES.new(key_ba, self.aes_mode, iv_ba) - key_ba[:3] = b'\xFF\xFF\xFF' - iv_ba[:3] = b'\xFF\xFF\xFF' - ref2 = cipher2.encrypt(data_ba) - - self.assertEqual(ref1, ref2) - self.assertEqual(cipher1.iv, cipher2.iv) - - # Decrypt - key_ba = bytearray(self.key_128) - iv_ba = bytearray(self.iv_128) - - cipher3 = AES.new(self.key_128, self.aes_mode, self.iv_128) - ref3 = cipher3.decrypt(data) - - cipher4 = AES.new(key_ba, self.aes_mode, iv_ba) - key_ba[:3] = b'\xFF\xFF\xFF' - iv_ba[:3] = b'\xFF\xFF\xFF' - ref4 = cipher4.decrypt(data_ba) - - self.assertEqual(ref3, ref4) - - def test_memoryview(self): - data = b"1" * 128 - data_mv = memoryview(bytearray(data)) - - # Encrypt - key_mv = memoryview(bytearray(self.key_128)) - iv_mv = memoryview(bytearray(self.iv_128)) - - cipher1 = AES.new(self.key_128, self.aes_mode, self.iv_128) - ref1 = cipher1.encrypt(data) - - cipher2 = AES.new(key_mv, self.aes_mode, iv_mv) - key_mv[:3] = b'\xFF\xFF\xFF' - iv_mv[:3] = b'\xFF\xFF\xFF' - ref2 = cipher2.encrypt(data_mv) - - self.assertEqual(ref1, ref2) - self.assertEqual(cipher1.iv, cipher2.iv) - - # Decrypt - key_mv = memoryview(bytearray(self.key_128)) - iv_mv = memoryview(bytearray(self.iv_128)) - - cipher3 = AES.new(self.key_128, self.aes_mode, self.iv_128) - ref3 = cipher3.decrypt(data) - - cipher4 = AES.new(key_mv, self.aes_mode, iv_mv) - key_mv[:3] = b'\xFF\xFF\xFF' - iv_mv[:3] = b'\xFF\xFF\xFF' - ref4 = cipher4.decrypt(data_mv) - - self.assertEqual(ref3, ref4) - - def test_output_param(self): - - pt = b'5' * 128 - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - ct = cipher.encrypt(pt) - - output = bytearray(128) - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - - def test_output_param_same_buffer(self): - - pt = b'5' * 128 - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - ct = cipher.encrypt(pt) - - pt_ba = bytearray(pt) - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - res = cipher.encrypt(pt_ba, output=pt_ba) - self.assertEqual(ct, pt_ba) - self.assertEqual(res, None) - - ct_ba = bytearray(ct) - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - res = cipher.decrypt(ct_ba, output=ct_ba) - self.assertEqual(pt, ct_ba) - self.assertEqual(res, None) - - - def test_output_param_memoryview(self): - - pt = b'5' * 128 - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - ct = cipher.encrypt(pt) - - output = memoryview(bytearray(128)) - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - def test_output_param_neg(self): - LEN_PT = 128 - - pt = b'5' * LEN_PT - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - ct = cipher.encrypt(pt) - - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) - - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) - - shorter_output = bytearray(LEN_PT - 1) - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -class CbcTests(BlockChainingTests): - aes_mode = AES.MODE_CBC - des3_mode = DES3.MODE_CBC - - -class NistBlockChainingVectors(unittest.TestCase): - - def _do_kat_aes_test(self, file_name): - - test_vectors = load_test_vectors(("Cipher", "AES"), - file_name, - "AES CBC KAT", - { "count" : lambda x: int(x) } ) - if test_vectors is None: - return - - direction = None - for tv in test_vectors: - - # The test vector file contains some directive lines - if is_string(tv): - direction = tv - continue - - self.description = tv.desc - - cipher = AES.new(tv.key, self.aes_mode, tv.iv) - if direction == "[ENCRYPT]": - self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) - elif direction == "[DECRYPT]": - self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) - else: - assert False - - # See Section 6.4.2 in AESAVS - def _do_mct_aes_test(self, file_name): - - test_vectors = load_test_vectors(("Cipher", "AES"), - file_name, - "AES CBC Montecarlo", - { "count" : lambda x: int(x) } ) - if test_vectors is None: - return - - direction = None - for tv in test_vectors: - - # The test vector file contains some directive lines - if is_string(tv): - direction = tv - continue - - self.description = tv.desc - cipher = AES.new(tv.key, self.aes_mode, tv.iv) - - if direction == '[ENCRYPT]': - cts = [ tv.iv ] - for count in range(1000): - cts.append(cipher.encrypt(tv.plaintext)) - tv.plaintext = cts[-2] - self.assertEqual(cts[-1], tv.ciphertext) - elif direction == '[DECRYPT]': - pts = [ tv.iv] - for count in range(1000): - pts.append(cipher.decrypt(tv.ciphertext)) - tv.ciphertext = pts[-2] - self.assertEqual(pts[-1], tv.plaintext) - else: - assert False - - def _do_tdes_test(self, file_name): - - test_vectors = load_test_vectors(("Cipher", "TDES"), - file_name, - "TDES CBC KAT", - { "count" : lambda x: int(x) } ) - if test_vectors is None: - return - - direction = None - for tv in test_vectors: - - # The test vector file contains some directive lines - if is_string(tv): - direction = tv - continue - - self.description = tv.desc - if hasattr(tv, "keys"): - cipher = DES.new(tv.keys, self.des_mode, tv.iv) - else: - if tv.key1 != tv.key3: - key = tv.key1 + tv.key2 + tv.key3 # Option 3 - else: - key = tv.key1 + tv.key2 # Option 2 - cipher = DES3.new(key, self.des3_mode, tv.iv) - - if direction == "[ENCRYPT]": - self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) - elif direction == "[DECRYPT]": - self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) - else: - assert False - - -class NistCbcVectors(NistBlockChainingVectors): - aes_mode = AES.MODE_CBC - des_mode = DES.MODE_CBC - des3_mode = DES3.MODE_CBC - - -# Create one test method per file -nist_aes_kat_mmt_files = ( - # KAT - "CBCGFSbox128.rsp", - "CBCGFSbox192.rsp", - "CBCGFSbox256.rsp", - "CBCKeySbox128.rsp", - "CBCKeySbox192.rsp", - "CBCKeySbox256.rsp", - "CBCVarKey128.rsp", - "CBCVarKey192.rsp", - "CBCVarKey256.rsp", - "CBCVarTxt128.rsp", - "CBCVarTxt192.rsp", - "CBCVarTxt256.rsp", - # MMT - "CBCMMT128.rsp", - "CBCMMT192.rsp", - "CBCMMT256.rsp", - ) -nist_aes_mct_files = ( - "CBCMCT128.rsp", - "CBCMCT192.rsp", - "CBCMCT256.rsp", - ) - -for file_name in nist_aes_kat_mmt_files: - def new_func(self, file_name=file_name): - self._do_kat_aes_test(file_name) - setattr(NistCbcVectors, "test_AES_" + file_name, new_func) - -for file_name in nist_aes_mct_files: - def new_func(self, file_name=file_name): - self._do_mct_aes_test(file_name) - setattr(NistCbcVectors, "test_AES_" + file_name, new_func) -del file_name, new_func - -nist_tdes_files = ( - "TCBCMMT2.rsp", # 2TDES - "TCBCMMT3.rsp", # 3TDES - "TCBCinvperm.rsp", # Single DES - "TCBCpermop.rsp", - "TCBCsubtab.rsp", - "TCBCvarkey.rsp", - "TCBCvartext.rsp", - ) - -for file_name in nist_tdes_files: - def new_func(self, file_name=file_name): - self._do_tdes_test(file_name) - setattr(NistCbcVectors, "test_TDES_" + file_name, new_func) - -# END OF NIST CBC TEST VECTORS - - -class SP800TestVectors(unittest.TestCase): - """Class exercising the CBC test vectors found in Section F.2 - of NIST SP 800-3A""" - - def test_aes_128(self): - key = '2b7e151628aed2a6abf7158809cf4f3c' - iv = '000102030405060708090a0b0c0d0e0f' - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = '7649abac8119b246cee98e9b12e9197d' +\ - '5086cb9b507219ee95db113a917678b2' +\ - '73bed6b8e3c1743b7116e69e22229516' +\ - '3ff1caa1681fac09120eca307586e1a7' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CBC, iv) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CBC, iv) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_192(self): - key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' - iv = '000102030405060708090a0b0c0d0e0f' - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = '4f021db243bc633d7178183a9fa071e8' +\ - 'b4d9ada9ad7dedf4e5e738763f69145a' +\ - '571b242012fb7ae07fa9baac3df102e0' +\ - '08b0e27988598881d920a9e64f5615cd' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CBC, iv) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CBC, iv) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_256(self): - key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' - iv = '000102030405060708090a0b0c0d0e0f' - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = 'f58c4c04d6e5f1ba779eabfb5f7bfbd6' +\ - '9cfc4e967edb808d679f777bc6702c7d' +\ - '39f23369a9d9bacfa530e26304231461' +\ - 'b2eb05e2c39be9fcda6c19078c6a9d1b' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CBC, iv) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CBC, iv) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(CbcTests) - if config.get('slow_tests'): - tests += list_test_cases(NistCbcVectors) - tests += list_test_cases(SP800TestVectors) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CCM.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CCM.py deleted file mode 100644 index c179f16..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CCM.py +++ /dev/null @@ -1,970 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof -from Cryptodome.Util.py3compat import tobytes, bchr -from Cryptodome.Cipher import AES -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.Util.strxor import strxor - -from Cryptodome.Cipher._mode_ccm import CCMMessageTooLongError - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class CcmTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data = get_tag_random("data", 128) - - def test_loopback_128(self): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - pt = get_tag_random("plaintext", 16 * 100) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_nonce(self): - # If not passed, the nonce is created randomly - cipher = AES.new(self.key_128, AES.MODE_CCM) - nonce1 = cipher.nonce - cipher = AES.new(self.key_128, AES.MODE_CCM) - nonce2 = cipher.nonce - self.assertEqual(len(nonce1), 11) - self.assertNotEqual(nonce1, nonce2) - - cipher = AES.new(self.key_128, AES.MODE_CCM, self.nonce_96) - ct = cipher.encrypt(self.data) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertEqual(ct, cipher.encrypt(self.data)) - - def test_nonce_must_be_bytes(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, - nonce=u'test12345678') - - def test_nonce_length(self): - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, - nonce=b"") - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, - nonce=bchr(1) * 6) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, - nonce=bchr(1) * 14) - for x in range(7, 13 + 1): - AES.new(self.key_128, AES.MODE_CCM, nonce=bchr(1) * x) - - def test_block_size(self): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertEqual(cipher.block_size, AES.block_size) - - def test_nonce_attribute(self): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertEqual(cipher.nonce, self.nonce_96) - - # By default, a 11 bytes long nonce is randomly generated - nonce1 = AES.new(self.key_128, AES.MODE_CCM).nonce - nonce2 = AES.new(self.key_128, AES.MODE_CCM).nonce - self.assertEqual(len(nonce1), 11) - self.assertNotEqual(nonce1, nonce2) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, - self.nonce_96, 7) - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, unknown=7) - - # But some are only known by the base cipher - # (e.g. use_aesni consumed by the AES module) - AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - use_aesni=False) - - def test_null_encryption_decryption(self): - for func in "encrypt", "decrypt": - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - result = getattr(cipher, func)(b"") - self.assertEqual(result, b"") - - def test_either_encrypt_or_decrypt(self): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.encrypt(b"") - self.assertRaises(TypeError, cipher.decrypt, b"") - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.decrypt(b"") - self.assertRaises(TypeError, cipher.encrypt, b"") - - def test_data_must_be_bytes(self): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') - - def test_mac_len(self): - # Invalid MAC length - for mac_len in range(3, 17 + 1, 2): - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, mac_len=mac_len) - - # Valid MAC length - for mac_len in range(4, 16 + 1, 2): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - mac_len=mac_len) - _, mac = cipher.encrypt_and_digest(self.data) - self.assertEqual(len(mac), mac_len) - - # Default MAC length - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - _, mac = cipher.encrypt_and_digest(self.data) - self.assertEqual(len(mac), 16) - - def test_invalid_mac(self): - from Cryptodome.Util.strxor import strxor_c - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - ct, mac = cipher.encrypt_and_digest(self.data) - - invalid_mac = strxor_c(mac, 0x01) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, - invalid_mac) - - def test_hex_mac(self): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - mac_hex = cipher.hexdigest() - self.assertEqual(cipher.digest(), unhexlify(mac_hex)) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.hexverify(mac_hex) - - def test_longer_assoc_data_than_declared(self): - # More than zero - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - assoc_len=0) - self.assertRaises(ValueError, cipher.update, b"1") - - # Too large - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - assoc_len=15) - self.assertRaises(ValueError, cipher.update, self.data) - - def test_shorter_assoc_data_than_expected(self): - DATA_LEN = len(self.data) - - # With plaintext - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - assoc_len=DATA_LEN + 1) - cipher.update(self.data) - self.assertRaises(ValueError, cipher.encrypt, self.data) - - # With empty plaintext - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - assoc_len=DATA_LEN + 1) - cipher.update(self.data) - self.assertRaises(ValueError, cipher.digest) - - # With ciphertext - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - assoc_len=DATA_LEN + 1) - cipher.update(self.data) - self.assertRaises(ValueError, cipher.decrypt, self.data) - - # With empty ciphertext - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.update(self.data) - mac = cipher.digest() - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - assoc_len=DATA_LEN + 1) - cipher.update(self.data) - self.assertRaises(ValueError, cipher.verify, mac) - - def test_shorter_and_longer_plaintext_than_declared(self): - DATA_LEN = len(self.data) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - msg_len=DATA_LEN + 1) - cipher.encrypt(self.data) - self.assertRaises(ValueError, cipher.digest) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - msg_len=DATA_LEN - 1) - self.assertRaises(ValueError, cipher.encrypt, self.data) - - def test_shorter_ciphertext_than_declared(self): - DATA_LEN = len(self.data) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - ct, mac = cipher.encrypt_and_digest(self.data) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - msg_len=DATA_LEN + 1) - cipher.decrypt(ct) - self.assertRaises(ValueError, cipher.verify, mac) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - msg_len=DATA_LEN - 1) - self.assertRaises(ValueError, cipher.decrypt, ct) - - def test_message_chunks(self): - # Validate that both associated data and plaintext/ciphertext - # can be broken up in chunks of arbitrary length - - auth_data = get_tag_random("authenticated data", 127) - plaintext = get_tag_random("plaintext", 127) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.update(auth_data) - ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) - - def break_up(data, chunk_length): - return [data[i:i+chunk_length] for i in range(0, len(data), - chunk_length)] - - # Encryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - msg_len=127, assoc_len=127) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - pt2 = b"" - for chunk in break_up(ciphertext, chunk_length): - pt2 += cipher.decrypt(chunk) - self.assertEqual(plaintext, pt2) - cipher.verify(ref_mac) - - # Decryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, - msg_len=127, assoc_len=127) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - ct2 = b"" - for chunk in break_up(plaintext, chunk_length): - ct2 += cipher.encrypt(chunk) - self.assertEqual(ciphertext, ct2) - self.assertEqual(cipher.digest(), ref_mac) - - def test_bytearray(self): - - # Encrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data) - data_ba = bytearray(self.data) - - cipher1 = AES.new(self.key_128, - AES.MODE_CCM, - nonce=self.nonce_96) - cipher1.update(self.data) - ct = cipher1.encrypt(self.data) - tag = cipher1.digest() - - cipher2 = AES.new(key_ba, - AES.MODE_CCM, - nonce=nonce_ba) - key_ba[:3] = b"\xFF\xFF\xFF" - nonce_ba[:3] = b"\xFF\xFF\xFF" - cipher2.update(header_ba) - header_ba[:3] = b"\xFF\xFF\xFF" - ct_test = cipher2.encrypt(data_ba) - data_ba[:3] = b"\xFF\xFF\xFF" - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data) - del data_ba - - cipher4 = AES.new(key_ba, - AES.MODE_CCM, - nonce=nonce_ba) - key_ba[:3] = b"\xFF\xFF\xFF" - nonce_ba[:3] = b"\xFF\xFF\xFF" - cipher4.update(header_ba) - header_ba[:3] = b"\xFF\xFF\xFF" - pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) - - self.assertEqual(self.data, pt_test) - - def test_memoryview(self): - - # Encrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data)) - data_mv = memoryview(bytearray(self.data)) - - cipher1 = AES.new(self.key_128, - AES.MODE_CCM, - nonce=self.nonce_96) - cipher1.update(self.data) - ct = cipher1.encrypt(self.data) - tag = cipher1.digest() - - cipher2 = AES.new(key_mv, - AES.MODE_CCM, - nonce=nonce_mv) - key_mv[:3] = b"\xFF\xFF\xFF" - nonce_mv[:3] = b"\xFF\xFF\xFF" - cipher2.update(header_mv) - header_mv[:3] = b"\xFF\xFF\xFF" - ct_test = cipher2.encrypt(data_mv) - data_mv[:3] = b"\xFF\xFF\xFF" - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data)) - del data_mv - - cipher4 = AES.new(key_mv, - AES.MODE_CCM, - nonce=nonce_mv) - key_mv[:3] = b"\xFF\xFF\xFF" - nonce_mv[:3] = b"\xFF\xFF\xFF" - cipher4.update(header_mv) - header_mv[:3] = b"\xFF\xFF\xFF" - pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) - - self.assertEqual(self.data, pt_test) - - def test_output_param(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - tag = cipher.digest() - - output = bytearray(128) - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - res, tag_out = cipher.encrypt_and_digest(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - self.assertEqual(tag, tag_out) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - res = cipher.decrypt_and_verify(ct, tag, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - def test_output_param_memoryview(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - - output = memoryview(bytearray(128)) - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - def test_output_param_neg(self): - - pt = b'5' * 16 - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) - - shorter_output = bytearray(15) - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - def test_message_too_long(self): - - nonce = b'N' * 13 - self.assertRaises(CCMMessageTooLongError, - AES.new, - self.key_128, - AES.MODE_CCM, - nonce=nonce, - assoc_len=20, - msg_len=0x10000) - - nonce = b'N' * 7 - self.assertRaises(CCMMessageTooLongError, - AES.new, - self.key_128, - AES.MODE_CCM, - nonce=nonce, - assoc_len=20, - msg_len=2**64) - - nonce = b'N' * 13 - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=nonce) - self.assertRaises(CCMMessageTooLongError, - cipher.encrypt, - b'C' * 0x10000) - - nonce = b'N' * 13 - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=nonce) - self.assertRaises(CCMMessageTooLongError, - cipher.decrypt, - b'C' * 0x10000) - - -class CcmFSMTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data = get_tag_random("data", 16) - - def test_valid_init_encrypt_decrypt_digest_verify(self): - # No authenticated data, fixed plaintext - for assoc_len in (None, 0): - for msg_len in (None, len(self.data)): - # Verify path INIT->ENCRYPT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - assoc_len=assoc_len, - msg_len=msg_len) - ct = cipher.encrypt(self.data) - mac = cipher.digest() - - # Verify path INIT->DECRYPT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - assoc_len=assoc_len, - msg_len=msg_len) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_update_digest_verify(self): - # No plaintext, fixed authenticated data - for assoc_len in (None, len(self.data)): - for msg_len in (None, 0): - # Verify path INIT->UPDATE->DIGEST - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - assoc_len=assoc_len, - msg_len=msg_len) - cipher.update(self.data) - mac = cipher.digest() - - # Verify path INIT->UPDATE->VERIFY - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - assoc_len=assoc_len, - msg_len=msg_len) - cipher.update(self.data) - cipher.verify(mac) - - def test_valid_full_path(self): - # Fixed authenticated data, fixed plaintext - for assoc_len in (None, len(self.data)): - for msg_len in (None, len(self.data)): - # Verify path INIT->UPDATE->ENCRYPT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - assoc_len=assoc_len, - msg_len=msg_len) - cipher.update(self.data) - ct = cipher.encrypt(self.data) - mac = cipher.digest() - - # Verify path INIT->UPDATE->DECRYPT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - assoc_len=assoc_len, - msg_len=msg_len) - cipher.update(self.data) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_digest(self): - # Verify path INIT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.digest() - - def test_valid_init_verify(self): - # Verify path INIT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - mac = cipher.digest() - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.verify(mac) - - def test_valid_multiple_encrypt_or_decrypt(self): - # Only possible if msg_len is declared in advance - for method_name in "encrypt", "decrypt": - for auth_data in (None, b"333", self.data, - self.data + b"3"): - if auth_data is None: - assoc_len = None - else: - assoc_len = len(auth_data) - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - msg_len=64, - assoc_len=assoc_len) - if auth_data is not None: - cipher.update(auth_data) - method = getattr(cipher, method_name) - method(self.data) - method(self.data) - method(self.data) - method(self.data) - - def test_valid_multiple_digest_or_verify(self): - # Multiple calls to digest - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.update(self.data) - first_mac = cipher.digest() - for x in range(4): - self.assertEqual(first_mac, cipher.digest()) - - # Multiple calls to verify - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.update(self.data) - for x in range(5): - cipher.verify(first_mac) - - def test_valid_encrypt_and_digest_decrypt_and_verify(self): - # encrypt_and_digest - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.update(self.data) - ct, mac = cipher.encrypt_and_digest(self.data) - - # decrypt_and_verify - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.update(self.data) - pt = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(self.data, pt) - - def test_invalid_multiple_encrypt_decrypt_without_msg_len(self): - # Once per method, with or without assoc. data - for method_name in "encrypt", "decrypt": - for assoc_data_present in (True, False): - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96) - if assoc_data_present: - cipher.update(self.data) - method = getattr(cipher, method_name) - method(self.data) - self.assertRaises(TypeError, method, self.data) - - def test_invalid_mixing_encrypt_decrypt(self): - # Once per method, with or without assoc. data - for method1_name, method2_name in (("encrypt", "decrypt"), - ("decrypt", "encrypt")): - for assoc_data_present in (True, False): - cipher = AES.new(self.key_128, AES.MODE_CCM, - nonce=self.nonce_96, - msg_len=32) - if assoc_data_present: - cipher.update(self.data) - getattr(cipher, method1_name)(self.data) - self.assertRaises(TypeError, getattr(cipher, method2_name), - self.data) - - def test_invalid_encrypt_or_update_after_digest(self): - for method_name in "encrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.encrypt(self.data) - cipher.digest() - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.encrypt_and_digest(self.data) - - def test_invalid_decrypt_or_update_after_verify(self): - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - ct = cipher.encrypt(self.data) - mac = cipher.digest() - - for method_name in "decrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) - cipher.decrypt_and_verify(ct, mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - -class TestVectors(unittest.TestCase): - """Class exercising the CCM test vectors found in Appendix C - of NIST SP 800-38C and in RFC 3610""" - - # List of test vectors, each made up of: - # - authenticated data - # - plaintext - # - ciphertext - # - MAC - # - AES key - # - nonce - test_vectors_hex = [ - # NIST SP 800 38C - ( '0001020304050607', - '20212223', - '7162015b', - '4dac255d', - '404142434445464748494a4b4c4d4e4f', - '10111213141516'), - ( '000102030405060708090a0b0c0d0e0f', - '202122232425262728292a2b2c2d2e2f', - 'd2a1f0e051ea5f62081a7792073d593d', - '1fc64fbfaccd', - '404142434445464748494a4b4c4d4e4f', - '1011121314151617'), - ( '000102030405060708090a0b0c0d0e0f10111213', - '202122232425262728292a2b2c2d2e2f3031323334353637', - 'e3b201a9f5b71a7a9b1ceaeccd97e70b6176aad9a4428aa5', - '484392fbc1b09951', - '404142434445464748494a4b4c4d4e4f', - '101112131415161718191a1b'), - ( (''.join(["%02X" % (x*16+y) for x in range(0,16) for y in range(0,16)]))*256, - '202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f', - '69915dad1e84c6376a68c2967e4dab615ae0fd1faec44cc484828529463ccf72', - 'b4ac6bec93e8598e7f0dadbcea5b', - '404142434445464748494a4b4c4d4e4f', - '101112131415161718191a1b1c'), - # RFC3610 - ( '0001020304050607', - '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e', - '588c979a61c663d2f066d0c2c0f989806d5f6b61dac384', - '17e8d12cfdf926e0', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '00000003020100a0a1a2a3a4a5'), - ( - '0001020304050607', - '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - '72c91a36e135f8cf291ca894085c87e3cc15c439c9e43a3b', - 'a091d56e10400916', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '00000004030201a0a1a2a3a4a5'), - ( '0001020304050607', - '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', - '51b1e5f44a197d1da46b0f8e2d282ae871e838bb64da859657', - '4adaa76fbd9fb0c5', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '00000005040302A0A1A2A3A4A5'), - ( '000102030405060708090a0b', - '0c0d0e0f101112131415161718191a1b1c1d1e', - 'a28c6865939a9a79faaa5c4c2a9d4a91cdac8c', - '96c861b9c9e61ef1', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '00000006050403a0a1a2a3a4a5'), - ( '000102030405060708090a0b', - '0c0d0e0f101112131415161718191a1b1c1d1e1f', - 'dcf1fb7b5d9e23fb9d4e131253658ad86ebdca3e', - '51e83f077d9c2d93', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '00000007060504a0a1a2a3a4a5'), - ( '000102030405060708090a0b', - '0c0d0e0f101112131415161718191a1b1c1d1e1f20', - '6fc1b011f006568b5171a42d953d469b2570a4bd87', - '405a0443ac91cb94', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '00000008070605a0a1a2a3a4a5'), - ( '0001020304050607', - '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e', - '0135d1b2c95f41d5d1d4fec185d166b8094e999dfed96c', - '048c56602c97acbb7490', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '00000009080706a0a1a2a3a4a5'), - ( '0001020304050607', - '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - '7b75399ac0831dd2f0bbd75879a2fd8f6cae6b6cd9b7db24', - 'c17b4433f434963f34b4', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '0000000a090807a0a1a2a3a4a5'), - ( '0001020304050607', - '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', - '82531a60cc24945a4b8279181ab5c84df21ce7f9b73f42e197', - 'ea9c07e56b5eb17e5f4e', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '0000000b0a0908a0a1a2a3a4a5'), - ( '000102030405060708090a0b', - '0c0d0e0f101112131415161718191a1b1c1d1e', - '07342594157785152b074098330abb141b947b', - '566aa9406b4d999988dd', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '0000000c0b0a09a0a1a2a3a4a5'), - ( '000102030405060708090a0b', - '0c0d0e0f101112131415161718191a1b1c1d1e1f', - '676bb20380b0e301e8ab79590a396da78b834934', - 'f53aa2e9107a8b6c022c', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '0000000d0c0b0aa0a1a2a3a4a5'), - ( '000102030405060708090a0b', - '0c0d0e0f101112131415161718191a1b1c1d1e1f20', - 'c0ffa0d6f05bdb67f24d43a4338d2aa4bed7b20e43', - 'cd1aa31662e7ad65d6db', - 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', - '0000000e0d0c0ba0a1a2a3a4a5'), - ( '0be1a88bace018b1', - '08e8cf97d820ea258460e96ad9cf5289054d895ceac47c', - '4cb97f86a2a4689a877947ab8091ef5386a6ffbdd080f8', - 'e78cf7cb0cddd7b3', - 'd7828d13b2b0bdc325a76236df93cc6b', - '00412b4ea9cdbe3c9696766cfa'), - ( '63018f76dc8a1bcb', - '9020ea6f91bdd85afa0039ba4baff9bfb79c7028949cd0ec', - '4ccb1e7ca981befaa0726c55d378061298c85c92814abc33', - 'c52ee81d7d77c08a', - 'd7828d13b2b0bdc325a76236df93cc6b', - '0033568ef7b2633c9696766cfa'), - ( 'aa6cfa36cae86b40', - 'b916e0eacc1c00d7dcec68ec0b3bbb1a02de8a2d1aa346132e', - 'b1d23a2220ddc0ac900d9aa03c61fcf4a559a4417767089708', - 'a776796edb723506', - 'd7828d13b2b0bdc325a76236df93cc6b', - '00103fe41336713c9696766cfa'), - ( 'd0d0735c531e1becf049c244', - '12daac5630efa5396f770ce1a66b21f7b2101c', - '14d253c3967b70609b7cbb7c49916028324526', - '9a6f49975bcadeaf', - 'd7828d13b2b0bdc325a76236df93cc6b', - '00764c63b8058e3c9696766cfa'), - ( '77b60f011c03e1525899bcae', - 'e88b6a46c78d63e52eb8c546efb5de6f75e9cc0d', - '5545ff1a085ee2efbf52b2e04bee1e2336c73e3f', - '762c0c7744fe7e3c', - 'd7828d13b2b0bdc325a76236df93cc6b', - '00f8b678094e3b3c9696766cfa'), - ( 'cd9044d2b71fdb8120ea60c0', - '6435acbafb11a82e2f071d7ca4a5ebd93a803ba87f', - '009769ecabdf48625594c59251e6035722675e04c8', - '47099e5ae0704551', - 'd7828d13b2b0bdc325a76236df93cc6b', - '00d560912d3f703c9696766cfa'), - ( 'd85bc7e69f944fb8', - '8a19b950bcf71a018e5e6701c91787659809d67dbedd18', - 'bc218daa947427b6db386a99ac1aef23ade0b52939cb6a', - '637cf9bec2408897c6ba', - 'd7828d13b2b0bdc325a76236df93cc6b', - '0042fff8f1951c3c9696766cfa'), - ( '74a0ebc9069f5b37', - '1761433c37c5a35fc1f39f406302eb907c6163be38c98437', - '5810e6fd25874022e80361a478e3e9cf484ab04f447efff6', - 'f0a477cc2fc9bf548944', - 'd7828d13b2b0bdc325a76236df93cc6b', - '00920f40e56cdc3c9696766cfa'), - ( '44a3aa3aae6475ca', - 'a434a8e58500c6e41530538862d686ea9e81301b5ae4226bfa', - 'f2beed7bc5098e83feb5b31608f8e29c38819a89c8e776f154', - '4d4151a4ed3a8b87b9ce', - 'd7828d13b2b0bdc325a76236df93cc6b', - '0027ca0c7120bc3c9696766cfa'), - ( 'ec46bb63b02520c33c49fd70', - 'b96b49e21d621741632875db7f6c9243d2d7c2', - '31d750a09da3ed7fddd49a2032aabf17ec8ebf', - '7d22c8088c666be5c197', - 'd7828d13b2b0bdc325a76236df93cc6b', - '005b8ccbcd9af83c9696766cfa'), - ( '47a65ac78b3d594227e85e71', - 'e2fcfbb880442c731bf95167c8ffd7895e337076', - 'e882f1dbd38ce3eda7c23f04dd65071eb41342ac', - 'df7e00dccec7ae52987d', - 'd7828d13b2b0bdc325a76236df93cc6b', - '003ebe94044b9a3c9696766cfa'), - ( '6e37a6ef546d955d34ab6059', - 'abf21c0b02feb88f856df4a37381bce3cc128517d4', - 'f32905b88a641b04b9c9ffb58cc390900f3da12ab1', - '6dce9e82efa16da62059', - 'd7828d13b2b0bdc325a76236df93cc6b', - '008d493b30ae8b3c9696766cfa'), - ] - - test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] - - def runTest(self): - for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: - # Encrypt - cipher = AES.new(key, AES.MODE_CCM, nonce, mac_len=len(mac)) - cipher.update(assoc_data) - ct2, mac2 = cipher.encrypt_and_digest(pt) - self.assertEqual(ct, ct2) - self.assertEqual(mac, mac2) - - # Decrypt - cipher = AES.new(key, AES.MODE_CCM, nonce, mac_len=len(mac)) - cipher.update(assoc_data) - pt2 = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(pt, pt2) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings, **extra_params): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._extra_params = extra_params - self._id = "None" - - def setUp(self): - - def filter_tag(group): - return group['tagSize'] // 8 - - self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - "aes_ccm_test.json", - "Wycheproof AES CCM", - group_tag={'tag_size': filter_tag}) - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_encrypt(self, tv): - self._id = "Wycheproof Encrypt CCM Test #" + str(tv.id) - - try: - cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, - **self._extra_params) - except ValueError as e: - if len(tv.iv) not in range(7, 13 + 1, 2) and "Length of parameter 'nonce'" in str(e): - assert not tv.valid - return - if tv.tag_size not in range(4, 16 + 1, 2) and "Parameter 'mac_len'" in str(e): - assert not tv.valid - return - raise e - - cipher.update(tv.aad) - ct, tag = cipher.encrypt_and_digest(tv.msg) - if tv.valid: - self.assertEqual(ct, tv.ct) - self.assertEqual(tag, tv.tag) - self.warn(tv) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt CCM Test #" + str(tv.id) - - try: - cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, - **self._extra_params) - except ValueError as e: - if len(tv.iv) not in range(7, 13 + 1, 2) and "Length of parameter 'nonce'" in str(e): - assert not tv.valid - return - if tv.tag_size not in range(4, 16 + 1, 2) and "Parameter 'mac_len'" in str(e): - assert not tv.valid - return - raise e - - cipher.update(tv.aad) - try: - pt = cipher.decrypt_and_verify(tv.ct, tv.tag) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - self.warn(tv) - - def test_corrupt_decrypt(self, tv): - self._id = "Wycheproof Corrupt Decrypt CCM Test #" + str(tv.id) - if len(tv.iv) not in range(7, 13 + 1, 2) or len(tv.ct) == 0: - return - cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, - **self._extra_params) - cipher.update(tv.aad) - ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) - - def runTest(self): - - for tv in self.tv: - self.test_encrypt(tv) - self.test_decrypt(tv) - self.test_corrupt_decrypt(tv) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(CcmTests) - tests += list_test_cases(CcmFSMTests) - tests += [TestVectors()] - tests += [TestVectorsWycheproof(wycheproof_warnings)] - - return tests - - -if __name__ == '__main__': - def suite(): - unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CFB.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CFB.py deleted file mode 100644 index 673bf8e..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CFB.py +++ /dev/null @@ -1,411 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import tobytes, is_string -from Cryptodome.Cipher import AES, DES3, DES -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.SelfTest.Cipher.test_CBC import BlockChainingTests - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class CfbTests(BlockChainingTests): - - aes_mode = AES.MODE_CFB - des3_mode = DES3.MODE_CFB - - # Redefine test_unaligned_data_128/64 - - def test_unaligned_data_128(self): - plaintexts = [ b"7777777" ] * 100 - - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - def test_unaligned_data_64(self): - plaintexts = [ b"7777777" ] * 100 - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - # Extra - - def test_segment_size_128(self): - for bits in range(8, 129, 8): - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, - segment_size=bits) - - for bits in 0, 7, 9, 127, 129: - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CFB, - self.iv_128, - segment_size=bits) - - def test_segment_size_64(self): - for bits in range(8, 65, 8): - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, - segment_size=bits) - - for bits in 0, 7, 9, 63, 65: - self.assertRaises(ValueError, DES3.new, self.key_192, AES.MODE_CFB, - self.iv_64, - segment_size=bits) - - -class NistCfbVectors(unittest.TestCase): - - def _do_kat_aes_test(self, file_name, segment_size): - - test_vectors = load_test_vectors(("Cipher", "AES"), - file_name, - "AES CFB%d KAT" % segment_size, - { "count" : lambda x: int(x) } ) - if test_vectors is None: - return - - direction = None - for tv in test_vectors: - - # The test vector file contains some directive lines - if is_string(tv): - direction = tv - continue - - self.description = tv.desc - cipher = AES.new(tv.key, AES.MODE_CFB, tv.iv, - segment_size=segment_size) - if direction == "[ENCRYPT]": - self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) - elif direction == "[DECRYPT]": - self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) - else: - assert False - - # See Section 6.4.5 in AESAVS - def _do_mct_aes_test(self, file_name, segment_size): - - test_vectors = load_test_vectors(("Cipher", "AES"), - file_name, - "AES CFB%d Montecarlo" % segment_size, - { "count" : lambda x: int(x) } ) - if test_vectors is None: - return - - assert(segment_size in (8, 128)) - - direction = None - for tv in test_vectors: - - # The test vector file contains some directive lines - if is_string(tv): - direction = tv - continue - - self.description = tv.desc - cipher = AES.new(tv.key, AES.MODE_CFB, tv.iv, - segment_size=segment_size) - - def get_input(input_text, output_seq, j): - # CFB128 - if segment_size == 128: - if j >= 2: - return output_seq[-2] - return [input_text, tv.iv][j] - # CFB8 - if j == 0: - return input_text - elif j <= 16: - return tv.iv[j - 1:j] - return output_seq[j - 17] - - if direction == '[ENCRYPT]': - cts = [] - for j in range(1000): - plaintext = get_input(tv.plaintext, cts, j) - cts.append(cipher.encrypt(plaintext)) - self.assertEqual(cts[-1], tv.ciphertext) - elif direction == '[DECRYPT]': - pts = [] - for j in range(1000): - ciphertext = get_input(tv.ciphertext, pts, j) - pts.append(cipher.decrypt(ciphertext)) - self.assertEqual(pts[-1], tv.plaintext) - else: - assert False - - def _do_tdes_test(self, file_name, segment_size): - - test_vectors = load_test_vectors(("Cipher", "TDES"), - file_name, - "TDES CFB%d KAT" % segment_size, - { "count" : lambda x: int(x) } ) - if test_vectors is None: - return - - direction = None - for tv in test_vectors: - - # The test vector file contains some directive lines - if is_string(tv): - direction = tv - continue - - self.description = tv.desc - if hasattr(tv, "keys"): - cipher = DES.new(tv.keys, DES.MODE_CFB, tv.iv, - segment_size=segment_size) - else: - if tv.key1 != tv.key3: - key = tv.key1 + tv.key2 + tv.key3 # Option 3 - else: - key = tv.key1 + tv.key2 # Option 2 - cipher = DES3.new(key, DES3.MODE_CFB, tv.iv, - segment_size=segment_size) - if direction == "[ENCRYPT]": - self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) - elif direction == "[DECRYPT]": - self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) - else: - assert False - - -# Create one test method per file -nist_aes_kat_mmt_files = ( - # KAT - "CFB?GFSbox128.rsp", - "CFB?GFSbox192.rsp", - "CFB?GFSbox256.rsp", - "CFB?KeySbox128.rsp", - "CFB?KeySbox192.rsp", - "CFB?KeySbox256.rsp", - "CFB?VarKey128.rsp", - "CFB?VarKey192.rsp", - "CFB?VarKey256.rsp", - "CFB?VarTxt128.rsp", - "CFB?VarTxt192.rsp", - "CFB?VarTxt256.rsp", - # MMT - "CFB?MMT128.rsp", - "CFB?MMT192.rsp", - "CFB?MMT256.rsp", - ) -nist_aes_mct_files = ( - "CFB?MCT128.rsp", - "CFB?MCT192.rsp", - "CFB?MCT256.rsp", - ) - -for file_gen_name in nist_aes_kat_mmt_files: - for bits in "8", "128": - file_name = file_gen_name.replace("?", bits) - def new_func(self, file_name=file_name, bits=bits): - self._do_kat_aes_test(file_name, int(bits)) - setattr(NistCfbVectors, "test_AES_" + file_name, new_func) - -for file_gen_name in nist_aes_mct_files: - for bits in "8", "128": - file_name = file_gen_name.replace("?", bits) - def new_func(self, file_name=file_name, bits=bits): - self._do_mct_aes_test(file_name, int(bits)) - setattr(NistCfbVectors, "test_AES_" + file_name, new_func) -del file_name, new_func - -nist_tdes_files = ( - "TCFB?MMT2.rsp", # 2TDES - "TCFB?MMT3.rsp", # 3TDES - "TCFB?invperm.rsp", # Single DES - "TCFB?permop.rsp", - "TCFB?subtab.rsp", - "TCFB?varkey.rsp", - "TCFB?vartext.rsp", - ) - -for file_gen_name in nist_tdes_files: - for bits in "8", "64": - file_name = file_gen_name.replace("?", bits) - def new_func(self, file_name=file_name, bits=bits): - self._do_tdes_test(file_name, int(bits)) - setattr(NistCfbVectors, "test_TDES_" + file_name, new_func) - -# END OF NIST CBC TEST VECTORS - - -class SP800TestVectors(unittest.TestCase): - """Class exercising the CFB test vectors found in Section F.3 - of NIST SP 800-3A""" - - def test_aes_128_cfb8(self): - plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' - ciphertext = '3b79424c9c0dd436bace9e0ed4586a4f32b9' - key = '2b7e151628aed2a6abf7158809cf4f3c' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_192_cfb8(self): - plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' - ciphertext = 'cda2521ef0a905ca44cd057cbf0d47a0678a' - key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_256_cfb8(self): - plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' - ciphertext = 'dc1f1a8520a64db55fcc8ac554844e889700' - key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_128_cfb128(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = '3b3fd92eb72dad20333449f8e83cfb4a' +\ - 'c8a64537a0b3a93fcde3cdad9f1ce58b' +\ - '26751f67a3cbb140b1808cf187a4f4df' +\ - 'c04b05357c5d1c0eeac4c66f9ff7f2e6' - key = '2b7e151628aed2a6abf7158809cf4f3c' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_192_cfb128(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = 'cdc80d6fddf18cab34c25909c99a4174' +\ - '67ce7f7f81173621961a2b70171d3d7a' +\ - '2e1e8a1dd59b88b1c8e60fed1efac4c9' +\ - 'c05f9f9ca9834fa042ae8fba584b09ff' - key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_256_cfb128(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - - ciphertext = 'dc7e84bfda79164b7ecd8486985d3860' +\ - '39ffed143b28b1c832113c6331e5407b' +\ - 'df10132415e54b92a13ed0a8267ae2f9' +\ - '75a385741ab9cef82031623d55b1e471' - key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(CfbTests) - if config.get('slow_tests'): - tests += list_test_cases(NistCfbVectors) - tests += list_test_cases(SP800TestVectors) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CTR.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CTR.py deleted file mode 100644 index ef5be5d..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_CTR.py +++ /dev/null @@ -1,472 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import hexlify, unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import tobytes, bchr -from Cryptodome.Cipher import AES, DES3 -from Cryptodome.Hash import SHAKE128, SHA256 -from Cryptodome.Util import Counter - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - -class CtrTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - key_192 = get_tag_random("key_192", 24) - nonce_32 = get_tag_random("nonce_32", 4) - nonce_64 = get_tag_random("nonce_64", 8) - ctr_64 = Counter.new(32, prefix=nonce_32) - ctr_128 = Counter.new(64, prefix=nonce_64) - - def test_loopback_128(self): - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - pt = get_tag_random("plaintext", 16 * 100) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_loopback_64(self): - cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) - pt = get_tag_random("plaintext", 8 * 100) - ct = cipher.encrypt(pt) - - cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_invalid_counter_parameter(self): - # Counter object is required for ciphers with short block size - self.assertRaises(TypeError, DES3.new, self.key_192, AES.MODE_CTR) - # Positional arguments are not allowed (Counter must be passed as - # keyword) - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, self.ctr_128) - - def test_nonce_attribute(self): - # Nonce attribute is the prefix passed to Counter (DES3) - cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) - self.assertEqual(cipher.nonce, self.nonce_32) - - # Nonce attribute is the prefix passed to Counter (AES) - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - self.assertEqual(cipher.nonce, self.nonce_64) - - # Nonce attribute is not defined if suffix is used in Counter - counter = Counter.new(64, prefix=self.nonce_32, suffix=self.nonce_32) - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - self.assertFalse(hasattr(cipher, "nonce")) - - def test_nonce_parameter(self): - # Nonce parameter becomes nonce attribute - cipher1 = AES.new(self.key_128, AES.MODE_CTR, nonce=self.nonce_64) - self.assertEqual(cipher1.nonce, self.nonce_64) - - counter = Counter.new(64, prefix=self.nonce_64, initial_value=0) - cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - pt = get_tag_random("plaintext", 65536) - self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) - - # Nonce is implicitly created (for AES) when no parameters are passed - nonce1 = AES.new(self.key_128, AES.MODE_CTR).nonce - nonce2 = AES.new(self.key_128, AES.MODE_CTR).nonce - self.assertNotEqual(nonce1, nonce2) - self.assertEqual(len(nonce1), 8) - - # Nonce can be zero-length - cipher = AES.new(self.key_128, AES.MODE_CTR, nonce=b"") - self.assertEqual(b"", cipher.nonce) - cipher.encrypt(b'0'*300) - - # Nonce and Counter are mutually exclusive - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, - counter=self.ctr_128, nonce=self.nonce_64) - - def test_initial_value_parameter(self): - # Test with nonce parameter - cipher1 = AES.new(self.key_128, AES.MODE_CTR, - nonce=self.nonce_64, initial_value=0xFFFF) - counter = Counter.new(64, prefix=self.nonce_64, initial_value=0xFFFF) - cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - pt = get_tag_random("plaintext", 65536) - self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) - - # Test without nonce parameter - cipher1 = AES.new(self.key_128, AES.MODE_CTR, - initial_value=0xFFFF) - counter = Counter.new(64, prefix=cipher1.nonce, initial_value=0xFFFF) - cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - pt = get_tag_random("plaintext", 65536) - self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) - - # Initial_value and Counter are mutually exclusive - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, - counter=self.ctr_128, initial_value=0) - - def test_initial_value_bytes_parameter(self): - # Same result as when passing an integer - cipher1 = AES.new(self.key_128, AES.MODE_CTR, - nonce=self.nonce_64, - initial_value=b"\x00"*6+b"\xFF\xFF") - cipher2 = AES.new(self.key_128, AES.MODE_CTR, - nonce=self.nonce_64, initial_value=0xFFFF) - pt = get_tag_random("plaintext", 65536) - self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) - - # Fail if the iv is too large - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, - initial_value=b"5"*17) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, - nonce=self.nonce_64, initial_value=b"5"*9) - - # Fail if the iv is too short - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, - initial_value=b"5"*15) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, - nonce=self.nonce_64, initial_value=b"5"*7) - - def test_iv_with_matching_length(self): - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, - counter=Counter.new(120)) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, - counter=Counter.new(136)) - - def test_block_size_128(self): - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - self.assertEqual(cipher.block_size, AES.block_size) - - def test_block_size_64(self): - cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) - self.assertEqual(cipher.block_size, DES3.block_size) - - def test_unaligned_data_128(self): - plaintexts = [ b"7777777" ] * 100 - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - def test_unaligned_data_64(self): - plaintexts = [ b"7777777" ] * 100 - cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, - 7, counter=self.ctr_128) - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, - counter=self.ctr_128, unknown=7) - # But some are only known by the base cipher (e.g. use_aesni consumed by the AES module) - AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128, use_aesni=False) - - def test_null_encryption_decryption(self): - for func in "encrypt", "decrypt": - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - result = getattr(cipher, func)(b"") - self.assertEqual(result, b"") - - def test_either_encrypt_or_decrypt(self): - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - cipher.encrypt(b"") - self.assertRaises(TypeError, cipher.decrypt, b"") - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) - cipher.decrypt(b"") - self.assertRaises(TypeError, cipher.encrypt, b"") - - def test_wrap_around(self): - # Counter is only 8 bits, so we can only encrypt/decrypt 256 blocks (=4096 bytes) - counter = Counter.new(8, prefix=bchr(9) * 15) - max_bytes = 4096 - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - cipher.encrypt(b'9' * max_bytes) - self.assertRaises(OverflowError, cipher.encrypt, b'9') - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - self.assertRaises(OverflowError, cipher.encrypt, b'9' * (max_bytes + 1)) - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - cipher.decrypt(b'9' * max_bytes) - self.assertRaises(OverflowError, cipher.decrypt, b'9') - - cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) - self.assertRaises(OverflowError, cipher.decrypt, b'9' * (max_bytes + 1)) - - def test_bytearray(self): - data = b"1" * 16 - iv = b"\x00" * 6 + b"\xFF\xFF" - - # Encrypt - cipher1 = AES.new(self.key_128, AES.MODE_CTR, - nonce=self.nonce_64, - initial_value=iv) - ref1 = cipher1.encrypt(data) - - cipher2 = AES.new(self.key_128, AES.MODE_CTR, - nonce=bytearray(self.nonce_64), - initial_value=bytearray(iv)) - ref2 = cipher2.encrypt(bytearray(data)) - - self.assertEqual(ref1, ref2) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - cipher3 = AES.new(self.key_128, AES.MODE_CTR, - nonce=self.nonce_64, - initial_value=iv) - ref3 = cipher3.decrypt(data) - - cipher4 = AES.new(self.key_128, AES.MODE_CTR, - nonce=bytearray(self.nonce_64), - initial_value=bytearray(iv)) - ref4 = cipher4.decrypt(bytearray(data)) - - self.assertEqual(ref3, ref4) - - def test_very_long_data(self): - cipher = AES.new(b'A' * 32, AES.MODE_CTR, nonce=b'') - ct = cipher.encrypt(b'B' * 1000000) - digest = SHA256.new(ct).hexdigest() - self.assertEqual(digest, "96204fc470476561a3a8f3b6fe6d24be85c87510b638142d1d0fb90989f8a6a6") - - def test_output_param(self): - - pt = b'5' * 128 - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - ct = cipher.encrypt(pt) - - output = bytearray(128) - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - def test_output_param_memoryview(self): - - pt = b'5' * 128 - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - ct = cipher.encrypt(pt) - - output = memoryview(bytearray(128)) - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - def test_output_param_neg(self): - LEN_PT = 128 - - pt = b'5' * LEN_PT - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - ct = cipher.encrypt(pt) - - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) - - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) - - shorter_output = bytearray(LEN_PT - 1) - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -class SP800TestVectors(unittest.TestCase): - """Class exercising the CTR test vectors found in Section F.5 - of NIST SP 800-38A""" - - def test_aes_128(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = '874d6191b620e3261bef6864990db6ce' +\ - '9806f66b7970fdff8617187bb9fffdff' +\ - '5ae4df3edbd5d35e5b4f09020db03eab' +\ - '1e031dda2fbe03d1792170a0f3009cee' - key = '2b7e151628aed2a6abf7158809cf4f3c' - counter = Counter.new(nbits=16, - prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), - initial_value=0xfeff) - - key = unhexlify(key) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CTR, counter=counter) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CTR, counter=counter) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_192(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = '1abc932417521ca24f2b0459fe7e6e0b' +\ - '090339ec0aa6faefd5ccc2c6f4ce8e94' +\ - '1e36b26bd1ebc670d1bd1d665620abf7' +\ - '4f78a7f6d29809585a97daec58c6b050' - key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' - counter = Counter.new(nbits=16, - prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), - initial_value=0xfeff) - - key = unhexlify(key) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CTR, counter=counter) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CTR, counter=counter) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - def test_aes_256(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = '601ec313775789a5b7a7f504bbf3d228' +\ - 'f443e3ca4d62b59aca84e990cacaf5c5' +\ - '2b0930daa23de94ce87017ba2d84988d' +\ - 'dfc9c58db67aada613c2dd08457941a6' - key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' - counter = Counter.new(nbits=16, - prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), - initial_value=0xfeff) - key = unhexlify(key) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_CTR, counter=counter) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_CTR, counter=counter) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - -class RFC3686TestVectors(unittest.TestCase): - - # Each item is a test vector with: - # - plaintext - # - ciphertext - # - key (AES 128, 192 or 256 bits) - # - counter prefix (4 byte nonce + 8 byte nonce) - data = ( - ('53696e676c6520626c6f636b206d7367', - 'e4095d4fb7a7b3792d6175a3261311b8', - 'ae6852f8121067cc4bf7a5765577f39e', - '000000300000000000000000'), - ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - '5104a106168a72d9790d41ee8edad388eb2e1efc46da57c8fce630df9141be28', - '7e24067817fae0d743d6ce1f32539163', - '006cb6dbc0543b59da48d90b'), - ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', - 'c1cf48a89f2ffdd9cf4652e9efdb72d74540a42bde6d7836d59a5ceaaef3105325b2072f', - '7691be035e5020a8ac6e618529f9a0dc', - '00e0017b27777f3f4a1786f0'), - ('53696e676c6520626c6f636b206d7367', - '4b55384fe259c9c84e7935a003cbe928', - '16af5b145fc9f579c175f93e3bfb0eed863d06ccfdb78515', - '0000004836733c147d6d93cb'), - ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - '453243fc609b23327edfaafa7131cd9f8490701c5ad4a79cfc1fe0ff42f4fb00', - '7c5cb2401b3dc33c19e7340819e0f69c678c3db8e6f6a91a', - '0096b03b020c6eadc2cb500d'), - ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', - '96893fc55e5c722f540b7dd1ddf7e758d288bc95c69165884536c811662f2188abee0935', - '02bf391ee8ecb159b959617b0965279bf59b60a786d3e0fe', - '0007bdfd5cbd60278dcc0912'), - ('53696e676c6520626c6f636b206d7367', - '145ad01dbf824ec7560863dc71e3e0c0', - '776beff2851db06f4c8a0542c8696f6c6a81af1eec96b4d37fc1d689e6c1c104', - '00000060db5672c97aa8f0b2'), - ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - 'f05e231b3894612c49ee000b804eb2a9b8306b508f839d6a5530831d9344af1c', - 'f6d66d6bd52d59bb0796365879eff886c66dd51a5b6a99744b50590c87a23884', - '00faac24c1585ef15a43d875'), - ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', - 'eb6c52821d0bbbf7ce7594462aca4faab407df866569fd07f48cc0b583d6071f1ec0e6b8', - 'ff7a617ce69148e4f1726e2f43581de2aa62d9f805532edff1eed687fb54153d', - '001cc5b751a51d70a1c11148') - ) - - bindata = [] - for tv in data: - bindata.append([unhexlify(x) for x in tv]) - - def runTest(self): - for pt, ct, key, prefix in self.bindata: - counter = Counter.new(32, prefix=prefix) - cipher = AES.new(key, AES.MODE_CTR, counter=counter) - result = cipher.encrypt(pt) - self.assertEqual(hexlify(ct), hexlify(result)) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(CtrTests) - tests += list_test_cases(SP800TestVectors) - tests += [ RFC3686TestVectors() ] - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ChaCha20.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ChaCha20.py deleted file mode 100644 index 92c6f3c..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ChaCha20.py +++ /dev/null @@ -1,529 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import os -import re -import unittest -from binascii import hexlify, unhexlify - -from Cryptodome.Util.py3compat import b, tobytes, bchr -from Cryptodome.Util.strxor import strxor_c -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Cipher import ChaCha20 - - -class ChaCha20Test(unittest.TestCase): - - def test_new_positive(self): - cipher = ChaCha20.new(key=b("0")*32, nonce=b"0"*8) - self.assertEqual(cipher.nonce, b"0" * 8) - cipher = ChaCha20.new(key=b("0")*32, nonce=b"0"*12) - self.assertEqual(cipher.nonce, b"0" * 12) - - def test_new_negative(self): - new = ChaCha20.new - self.assertRaises(TypeError, new) - self.assertRaises(TypeError, new, nonce=b("0")) - self.assertRaises(ValueError, new, nonce=b("0")*8, key=b("0")) - self.assertRaises(ValueError, new, nonce=b("0"), key=b("0")*32) - - def test_default_nonce(self): - cipher1 = ChaCha20.new(key=bchr(1) * 32) - cipher2 = ChaCha20.new(key=bchr(1) * 32) - self.assertEqual(len(cipher1.nonce), 8) - self.assertNotEqual(cipher1.nonce, cipher2.nonce) - - def test_nonce(self): - key = b'A' * 32 - - nonce1 = b'P' * 8 - cipher1 = ChaCha20.new(key=key, nonce=nonce1) - self.assertEqual(nonce1, cipher1.nonce) - - nonce2 = b'Q' * 12 - cipher2 = ChaCha20.new(key=key, nonce=nonce2) - self.assertEqual(nonce2, cipher2.nonce) - - def test_eiter_encrypt_or_decrypt(self): - """Verify that a cipher cannot be used for both decrypting and encrypting""" - - c1 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) - c1.encrypt(b("8")) - self.assertRaises(TypeError, c1.decrypt, b("9")) - - c2 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) - c2.decrypt(b("8")) - self.assertRaises(TypeError, c2.encrypt, b("9")) - - def test_round_trip(self): - pt = b("A") * 1024 - c1 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) - c2 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) - ct = c1.encrypt(pt) - self.assertEqual(c2.decrypt(ct), pt) - - self.assertEqual(c1.encrypt(b("")), b("")) - self.assertEqual(c2.decrypt(b("")), b("")) - - def test_streaming(self): - """Verify that an arbitrary number of bytes can be encrypted/decrypted""" - from Cryptodome.Hash import SHA1 - - segments = (1, 3, 5, 7, 11, 17, 23) - total = sum(segments) - - pt = b("") - while len(pt) < total: - pt += SHA1.new(pt).digest() - - cipher1 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) - ct = cipher1.encrypt(pt) - - cipher2 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) - cipher3 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) - idx = 0 - for segment in segments: - self.assertEqual(cipher2.decrypt(ct[idx:idx+segment]), pt[idx:idx+segment]) - self.assertEqual(cipher3.encrypt(pt[idx:idx+segment]), ct[idx:idx+segment]) - idx += segment - - def test_seek(self): - cipher1 = ChaCha20.new(key=b("9") * 32, nonce=b("e") * 8) - - offset = 64 * 900 + 7 - pt = b("1") * 64 - - cipher1.encrypt(b("0") * offset) - ct1 = cipher1.encrypt(pt) - - cipher2 = ChaCha20.new(key=b("9") * 32, nonce=b("e") * 8) - cipher2.seek(offset) - ct2 = cipher2.encrypt(pt) - - self.assertEqual(ct1, ct2) - - def test_seek_tv(self): - # Test Vector #4, A.1 from - # http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04 - key = bchr(0) + bchr(255) + bchr(0) * 30 - nonce = bchr(0) * 8 - cipher = ChaCha20.new(key=key, nonce=nonce) - cipher.seek(64 * 2) - expected_key_stream = unhexlify(b( - "72d54dfbf12ec44b362692df94137f32" - "8fea8da73990265ec1bbbea1ae9af0ca" - "13b25aa26cb4a648cb9b9d1be65b2c09" - "24a66c54d545ec1b7374f4872e99f096" - )) - ct = cipher.encrypt(bchr(0) * len(expected_key_stream)) - self.assertEqual(expected_key_stream, ct) - - def test_rfc7539(self): - # from https://tools.ietf.org/html/rfc7539 Annex A.1 - # Each item is: key, nonce, block #, plaintext, ciphertext - tvs = [ - # Test Vector #1 - ( - "00"*32, - "00"*12, - 0, - "00"*16*4, - "76b8e0ada0f13d90405d6ae55386bd28" - "bdd219b8a08ded1aa836efcc8b770dc7" - "da41597c5157488d7724e03fb8d84a37" - "6a43b8f41518a11cc387b669b2ee6586" - ), - # Test Vector #2 - ( - "00"*31 + "01", - "00"*11 + "02", - 1, - "416e79207375626d697373696f6e2074" - "6f20746865204945544620696e74656e" - "6465642062792074686520436f6e7472" - "696275746f7220666f72207075626c69" - "636174696f6e20617320616c6c206f72" - "2070617274206f6620616e2049455446" - "20496e7465726e65742d447261667420" - "6f722052464320616e6420616e792073" - "746174656d656e74206d616465207769" - "7468696e2074686520636f6e74657874" - "206f6620616e20494554462061637469" - "7669747920697320636f6e7369646572" - "656420616e20224945544620436f6e74" - "7269627574696f6e222e205375636820" - "73746174656d656e747320696e636c75" - "6465206f72616c2073746174656d656e" - "747320696e2049455446207365737369" - "6f6e732c2061732077656c6c20617320" - "7772697474656e20616e6420656c6563" - "74726f6e696320636f6d6d756e696361" - "74696f6e73206d61646520617420616e" - "792074696d65206f7220706c6163652c" - "20776869636820617265206164647265" - "7373656420746f", - "a3fbf07df3fa2fde4f376ca23e827370" - "41605d9f4f4f57bd8cff2c1d4b7955ec" - "2a97948bd3722915c8f3d337f7d37005" - "0e9e96d647b7c39f56e031ca5eb6250d" - "4042e02785ececfa4b4bb5e8ead0440e" - "20b6e8db09d881a7c6132f420e527950" - "42bdfa7773d8a9051447b3291ce1411c" - "680465552aa6c405b7764d5e87bea85a" - "d00f8449ed8f72d0d662ab052691ca66" - "424bc86d2df80ea41f43abf937d3259d" - "c4b2d0dfb48a6c9139ddd7f76966e928" - "e635553ba76c5c879d7b35d49eb2e62b" - "0871cdac638939e25e8a1e0ef9d5280f" - "a8ca328b351c3c765989cbcf3daa8b6c" - "cc3aaf9f3979c92b3720fc88dc95ed84" - "a1be059c6499b9fda236e7e818b04b0b" - "c39c1e876b193bfe5569753f88128cc0" - "8aaa9b63d1a16f80ef2554d7189c411f" - "5869ca52c5b83fa36ff216b9c1d30062" - "bebcfd2dc5bce0911934fda79a86f6e6" - "98ced759c3ff9b6477338f3da4f9cd85" - "14ea9982ccafb341b2384dd902f3d1ab" - "7ac61dd29c6f21ba5b862f3730e37cfd" - "c4fd806c22f221" - ), - # Test Vector #3 - ( - "1c9240a5eb55d38af333888604f6b5f0" - "473917c1402b80099dca5cbc207075c0", - "00"*11 + "02", - 42, - "2754776173206272696c6c69672c2061" - "6e642074686520736c6974687920746f" - "7665730a446964206779726520616e64" - "2067696d626c6520696e207468652077" - "6162653a0a416c6c206d696d73792077" - "6572652074686520626f726f676f7665" - "732c0a416e6420746865206d6f6d6520" - "7261746873206f757467726162652e", - "62e6347f95ed87a45ffae7426f27a1df" - "5fb69110044c0d73118effa95b01e5cf" - "166d3df2d721caf9b21e5fb14c616871" - "fd84c54f9d65b283196c7fe4f60553eb" - "f39c6402c42234e32a356b3e764312a6" - "1a5532055716ead6962568f87d3f3f77" - "04c6a8d1bcd1bf4d50d6154b6da731b1" - "87b58dfd728afa36757a797ac188d1" - ) - ] - - for tv in tvs: - key = unhexlify(tv[0]) - nonce = unhexlify(tv[1]) - offset = tv[2] * 64 - pt = unhexlify(tv[3]) - ct_expect = unhexlify(tv[4]) - - cipher = ChaCha20.new(key=key, nonce=nonce) - if offset != 0: - cipher.seek(offset) - ct = cipher.encrypt(pt) - assert(ct == ct_expect) - - -class XChaCha20Test(unittest.TestCase): - - # From https://tools.ietf.org/html/draft-arciszewski-xchacha-03 - - def test_hchacha20(self): - # Section 2.2.1 - - from Cryptodome.Cipher.ChaCha20 import _HChaCha20 - - key = b"00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f:10:11:12:13:14:15:16:17:18:19:1a:1b:1c:1d:1e:1f" - key = unhexlify(key.replace(b":", b"")) - - nonce = b"00:00:00:09:00:00:00:4a:00:00:00:00:31:41:59:27" - nonce = unhexlify(nonce.replace(b":", b"")) - - subkey = _HChaCha20(key, nonce) - - expected = b"82413b42 27b27bfe d30e4250 8a877d73 a0f9e4d5 8a74a853 c12ec413 26d3ecdc" - expected = unhexlify(expected.replace(b" ", b"")) - - self.assertEqual(subkey, expected) - - def test_nonce(self): - key = b'A' * 32 - nonce = b'P' * 24 - cipher = ChaCha20.new(key=key, nonce=nonce) - self.assertEqual(nonce, cipher.nonce) - - def test_encrypt(self): - # Section A.3.2 - - pt = b""" - 5468652064686f6c65202870726f6e6f756e6365642022646f6c652229206973 - 20616c736f206b6e6f776e2061732074686520417369617469632077696c6420 - 646f672c2072656420646f672c20616e642077686973746c696e6720646f672e - 2049742069732061626f7574207468652073697a65206f662061204765726d61 - 6e20736865706865726420627574206c6f6f6b73206d6f7265206c696b652061 - 206c6f6e672d6c656767656420666f782e205468697320686967686c7920656c - 757369766520616e6420736b696c6c6564206a756d70657220697320636c6173 - 736966696564207769746820776f6c7665732c20636f796f7465732c206a6163 - 6b616c732c20616e6420666f78657320696e20746865207461786f6e6f6d6963 - 2066616d696c792043616e696461652e""" - pt = unhexlify(pt.replace(b"\n", b"").replace(b" ", b"")) - - key = unhexlify(b"808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f") - iv = unhexlify(b"404142434445464748494a4b4c4d4e4f5051525354555658") - - ct = b""" - 7d0a2e6b7f7c65a236542630294e063b7ab9b555a5d5149aa21e4ae1e4fbce87 - ecc8e08a8b5e350abe622b2ffa617b202cfad72032a3037e76ffdcdc4376ee05 - 3a190d7e46ca1de04144850381b9cb29f051915386b8a710b8ac4d027b8b050f - 7cba5854e028d564e453b8a968824173fc16488b8970cac828f11ae53cabd201 - 12f87107df24ee6183d2274fe4c8b1485534ef2c5fbc1ec24bfc3663efaa08bc - 047d29d25043532db8391a8a3d776bf4372a6955827ccb0cdd4af403a7ce4c63 - d595c75a43e045f0cce1f29c8b93bd65afc5974922f214a40b7c402cdb91ae73 - c0b63615cdad0480680f16515a7ace9d39236464328a37743ffc28f4ddb324f4 - d0f5bbdc270c65b1749a6efff1fbaa09536175ccd29fb9e6057b307320d31683 - 8a9c71f70b5b5907a66f7ea49aadc409""" - ct = unhexlify(ct.replace(b"\n", b"").replace(b" ", b"")) - - cipher = ChaCha20.new(key=key, nonce=iv) - cipher.seek(64) # Counter = 1 - ct_test = cipher.encrypt(pt) - self.assertEqual(ct, ct_test) - - -class ByteArrayTest(unittest.TestCase): - """Verify we can encrypt or decrypt bytearrays""" - - def runTest(self): - - data = b"0123" - key = b"9" * 32 - nonce = b"t" * 8 - - # Encryption - data_ba = bytearray(data) - key_ba = bytearray(key) - nonce_ba = bytearray(nonce) - - cipher1 = ChaCha20.new(key=key, nonce=nonce) - ct = cipher1.encrypt(data) - - cipher2 = ChaCha20.new(key=key_ba, nonce=nonce_ba) - key_ba[:1] = b'\xFF' - nonce_ba[:1] = b'\xFF' - ct_test = cipher2.encrypt(data_ba) - - self.assertEqual(ct, ct_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decryption - key_ba = bytearray(key) - nonce_ba = bytearray(nonce) - ct_ba = bytearray(ct) - - cipher3 = ChaCha20.new(key=key_ba, nonce=nonce_ba) - key_ba[:1] = b'\xFF' - nonce_ba[:1] = b'\xFF' - pt_test = cipher3.decrypt(ct_ba) - - self.assertEqual(data, pt_test) - - -class MemoryviewTest(unittest.TestCase): - """Verify we can encrypt or decrypt bytearrays""" - - def runTest(self): - - data = b"0123" - key = b"9" * 32 - nonce = b"t" * 8 - - # Encryption - data_mv = memoryview(bytearray(data)) - key_mv = memoryview(bytearray(key)) - nonce_mv = memoryview(bytearray(nonce)) - - cipher1 = ChaCha20.new(key=key, nonce=nonce) - ct = cipher1.encrypt(data) - - cipher2 = ChaCha20.new(key=key_mv, nonce=nonce_mv) - key_mv[:1] = b'\xFF' - nonce_mv[:1] = b'\xFF' - ct_test = cipher2.encrypt(data_mv) - - self.assertEqual(ct, ct_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decryption - key_mv = memoryview(bytearray(key)) - nonce_mv = memoryview(bytearray(nonce)) - ct_mv = memoryview(bytearray(ct)) - - cipher3 = ChaCha20.new(key=key_mv, nonce=nonce_mv) - key_mv[:1] = b'\xFF' - nonce_mv[:1] = b'\xFF' - pt_test = cipher3.decrypt(ct_mv) - - self.assertEqual(data, pt_test) - - -class ChaCha20_AGL_NIR(unittest.TestCase): - - # From http://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04 - # and http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04 - tv = [ - ( "00" * 32, - "00" * 8, - "76b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc" - "8b770dc7da41597c5157488d7724e03fb8d84a376a43b8f41518a11c" - "c387b669b2ee6586" - "9f07e7be5551387a98ba977c732d080d" - "cb0f29a048e3656912c6533e32ee7aed" - "29b721769ce64e43d57133b074d839d5" - "31ed1f28510afb45ace10a1f4b794d6f" - ), - ( "00" * 31 + "01", - "00" * 8, - "4540f05a9f1fb296d7736e7b208e3c96eb4fe1834688d2604f450952" - "ed432d41bbe2a0b6ea7566d2a5d1e7e20d42af2c53d792b1c43fea81" - "7e9ad275ae546963" - "3aeb5224ecf849929b9d828db1ced4dd" - "832025e8018b8160b82284f3c949aa5a" - "8eca00bbb4a73bdad192b5c42f73f2fd" - "4e273644c8b36125a64addeb006c13a0" - ), - ( "00" * 32, - "00" * 7 + "01", - "de9cba7bf3d69ef5e786dc63973f653a0b49e015adbff7134fcb7df1" - "37821031e85a050278a7084527214f73efc7fa5b5277062eb7a0433e" - "445f41e3" - ), - ( "00" * 32, - "01" + "00" * 7, - "ef3fdfd6c61578fbf5cf35bd3dd33b8009631634d21e42ac33960bd1" - "38e50d32111e4caf237ee53ca8ad6426194a88545ddc497a0b466e7d" - "6bbdb0041b2f586b" - ), - ( "000102030405060708090a0b0c0d0e0f101112131415161718191a1b" - "1c1d1e1f", - "0001020304050607", - "f798a189f195e66982105ffb640bb7757f579da31602fc93ec01ac56" - "f85ac3c134a4547b733b46413042c9440049176905d3be59ea1c53f1" - "5916155c2be8241a38008b9a26bc35941e2444177c8ade6689de9526" - "4986d95889fb60e84629c9bd9a5acb1cc118be563eb9b3a4a472f82e" - "09a7e778492b562ef7130e88dfe031c79db9d4f7c7a899151b9a4750" - "32b63fc385245fe054e3dd5a97a5f576fe064025d3ce042c566ab2c5" - "07b138db853e3d6959660996546cc9c4a6eafdc777c040d70eaf46f7" - "6dad3979e5c5360c3317166a1c894c94a371876a94df7628fe4eaaf2" - "ccb27d5aaae0ad7ad0f9d4b6ad3b54098746d4524d38407a6deb3ab7" - "8fab78c9" - ), - ( "00" * 32, - "00" * 7 + "02", - "c2c64d378cd536374ae204b9ef933fcd" - "1a8b2288b3dfa49672ab765b54ee27c7" - "8a970e0e955c14f3a88e741b97c286f7" - "5f8fc299e8148362fa198a39531bed6d" - ), - ] - - def runTest(self): - for (key, nonce, stream) in self.tv: - c = ChaCha20.new(key=unhexlify(b(key)), nonce=unhexlify(b(nonce))) - ct = unhexlify(b(stream)) - pt = b("\x00") * len(ct) - self.assertEqual(c.encrypt(pt), ct) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - key = b'4' * 32 - nonce = b'5' * 8 - cipher = ChaCha20.new(key=key, nonce=nonce) - - pt = b'5' * 300 - ct = cipher.encrypt(pt) - - output = bytearray(len(pt)) - cipher = ChaCha20.new(key=key, nonce=nonce) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = ChaCha20.new(key=key, nonce=nonce) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(len(pt))) - cipher = ChaCha20.new(key=key, nonce=nonce) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = ChaCha20.new(key=key, nonce=nonce) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - cipher = ChaCha20.new(key=key, nonce=nonce) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*len(pt)) - - cipher = ChaCha20.new(key=key, nonce=nonce) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*len(pt)) - - shorter_output = bytearray(len(pt) - 1) - - cipher = ChaCha20.new(key=key, nonce=nonce) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - - cipher = ChaCha20.new(key=key, nonce=nonce) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(ChaCha20Test) - tests += list_test_cases(XChaCha20Test) - tests.append(ChaCha20_AGL_NIR()) - tests.append(ByteArrayTest()) - tests.append(MemoryviewTest()) - tests.append(TestOutput()) - - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ChaCha20_Poly1305.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ChaCha20_Poly1305.py deleted file mode 100644 index 495028a..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_ChaCha20_Poly1305.py +++ /dev/null @@ -1,776 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2018, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof -from Cryptodome.Util.py3compat import tobytes -from Cryptodome.Cipher import ChaCha20_Poly1305 -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.Util.strxor import strxor - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class ChaCha20Poly1305Tests(unittest.TestCase): - - key_256 = get_tag_random("key_256", 32) - nonce_96 = get_tag_random("nonce_96", 12) - data_128 = get_tag_random("data_128", 16) - - def test_loopback(self): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - pt = get_tag_random("plaintext", 16 * 100) - ct = cipher.encrypt(pt) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_nonce(self): - # Nonce can only be 8 or 12 bytes - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=b'H' * 8) - self.assertEqual(len(cipher.nonce), 8) - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=b'H' * 12) - self.assertEqual(len(cipher.nonce), 12) - - # If not passed, the nonce is created randomly - cipher = ChaCha20_Poly1305.new(key=self.key_256) - nonce1 = cipher.nonce - cipher = ChaCha20_Poly1305.new(key=self.key_256) - nonce2 = cipher.nonce - self.assertEqual(len(nonce1), 12) - self.assertNotEqual(nonce1, nonce2) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - ct = cipher.encrypt(self.data_128) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - self.assertEqual(ct, cipher.encrypt(self.data_128)) - - def test_nonce_must_be_bytes(self): - self.assertRaises(TypeError, - ChaCha20_Poly1305.new, - key=self.key_256, - nonce=u'test12345678') - - def test_nonce_length(self): - # nonce can only be 8 or 12 bytes long - self.assertRaises(ValueError, - ChaCha20_Poly1305.new, - key=self.key_256, - nonce=b'0' * 7) - self.assertRaises(ValueError, - ChaCha20_Poly1305.new, - key=self.key_256, - nonce=b'') - - def test_block_size(self): - # Not based on block ciphers - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - self.assertFalse(hasattr(cipher, 'block_size')) - - def test_nonce_attribute(self): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - self.assertEqual(cipher.nonce, self.nonce_96) - - # By default, a 12 bytes long nonce is randomly generated - nonce1 = ChaCha20_Poly1305.new(key=self.key_256).nonce - nonce2 = ChaCha20_Poly1305.new(key=self.key_256).nonce - self.assertEqual(len(nonce1), 12) - self.assertNotEqual(nonce1, nonce2) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, - ChaCha20_Poly1305.new, - key=self.key_256, - param=9) - - def test_null_encryption_decryption(self): - for func in "encrypt", "decrypt": - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - result = getattr(cipher, func)(b"") - self.assertEqual(result, b"") - - def test_either_encrypt_or_decrypt(self): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.encrypt(b"") - self.assertRaises(TypeError, cipher.decrypt, b"") - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.decrypt(b"") - self.assertRaises(TypeError, cipher.encrypt, b"") - - def test_data_must_be_bytes(self): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') - - def test_mac_len(self): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - _, mac = cipher.encrypt_and_digest(self.data_128) - self.assertEqual(len(mac), 16) - - def test_invalid_mac(self): - from Cryptodome.Util.strxor import strxor_c - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - ct, mac = cipher.encrypt_and_digest(self.data_128) - - invalid_mac = strxor_c(mac, 0x01) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, - invalid_mac) - - def test_hex_mac(self): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - mac_hex = cipher.hexdigest() - self.assertEqual(cipher.digest(), unhexlify(mac_hex)) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.hexverify(mac_hex) - - def test_message_chunks(self): - # Validate that both associated data and plaintext/ciphertext - # can be broken up in chunks of arbitrary length - - auth_data = get_tag_random("authenticated data", 127) - plaintext = get_tag_random("plaintext", 127) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(auth_data) - ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) - - def break_up(data, chunk_length): - return [data[i:i+chunk_length] for i in range(0, len(data), - chunk_length)] - - # Encryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - pt2 = b"" - for chunk in break_up(ciphertext, chunk_length): - pt2 += cipher.decrypt(chunk) - self.assertEqual(plaintext, pt2) - cipher.verify(ref_mac) - - # Decryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - ct2 = b"" - for chunk in break_up(plaintext, chunk_length): - ct2 += cipher.encrypt(chunk) - self.assertEqual(ciphertext, ct2) - self.assertEqual(cipher.digest(), ref_mac) - - def test_bytearray(self): - - # Encrypt - key_ba = bytearray(self.key_256) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data_128) - data_ba = bytearray(self.data_128) - - cipher1 = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher1.update(self.data_128) - ct = cipher1.encrypt(self.data_128) - tag = cipher1.digest() - - cipher2 = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - key_ba[:3] = b'\xFF\xFF\xFF' - nonce_ba[:3] = b'\xFF\xFF\xFF' - cipher2.update(header_ba) - header_ba[:3] = b'\xFF\xFF\xFF' - ct_test = cipher2.encrypt(data_ba) - data_ba[:3] = b'\x99\x99\x99' - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_ba = bytearray(self.key_256) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data_128) - ct_ba = bytearray(ct) - tag_ba = bytearray(tag) - del data_ba - - cipher3 = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - key_ba[:3] = b'\xFF\xFF\xFF' - nonce_ba[:3] = b'\xFF\xFF\xFF' - cipher3.update(header_ba) - header_ba[:3] = b'\xFF\xFF\xFF' - pt_test = cipher3.decrypt(ct_ba) - ct_ba[:3] = b'\xFF\xFF\xFF' - cipher3.verify(tag_ba) - - self.assertEqual(pt_test, self.data_128) - - def test_memoryview(self): - - # Encrypt - key_mv = memoryview(bytearray(self.key_256)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data_128)) - data_mv = memoryview(bytearray(self.data_128)) - - cipher1 = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher1.update(self.data_128) - ct = cipher1.encrypt(self.data_128) - tag = cipher1.digest() - - cipher2 = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - key_mv[:3] = b'\xFF\xFF\xFF' - nonce_mv[:3] = b'\xFF\xFF\xFF' - cipher2.update(header_mv) - header_mv[:3] = b'\xFF\xFF\xFF' - ct_test = cipher2.encrypt(data_mv) - data_mv[:3] = b'\x99\x99\x99' - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_mv = memoryview(bytearray(self.key_256)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data_128)) - ct_mv = memoryview(bytearray(ct)) - tag_mv = memoryview(bytearray(tag)) - del data_mv - - cipher3 = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - key_mv[:3] = b'\xFF\xFF\xFF' - nonce_mv[:3] = b'\xFF\xFF\xFF' - cipher3.update(header_mv) - header_mv[:3] = b'\xFF\xFF\xFF' - pt_test = cipher3.decrypt(ct_mv) - ct_mv[:3] = b'\x99\x99\x99' - cipher3.verify(tag_mv) - - self.assertEqual(pt_test, self.data_128) - - -class XChaCha20Poly1305Tests(unittest.TestCase): - - def test_nonce(self): - # Nonce can only be 24 bytes - cipher = ChaCha20_Poly1305.new(key=b'Y' * 32, - nonce=b'H' * 24) - self.assertEqual(len(cipher.nonce), 24) - self.assertEqual(cipher.nonce, b'H' * 24) - - def test_encrypt(self): - # From https://tools.ietf.org/html/draft-arciszewski-xchacha-03 - # Section A.3.1 - - pt = b""" - 4c616469657320616e642047656e746c656d656e206f662074686520636c6173 - 73206f66202739393a204966204920636f756c64206f6666657220796f75206f - 6e6c79206f6e652074697020666f7220746865206675747572652c2073756e73 - 637265656e20776f756c642062652069742e""" - pt = unhexlify(pt.replace(b"\n", b"").replace(b" ", b"")) - - aad = unhexlify(b"50515253c0c1c2c3c4c5c6c7") - key = unhexlify(b"808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f") - iv = unhexlify(b"404142434445464748494a4b4c4d4e4f5051525354555657") - - ct = b""" - bd6d179d3e83d43b9576579493c0e939572a1700252bfaccbed2902c21396cbb - 731c7f1b0b4aa6440bf3a82f4eda7e39ae64c6708c54c216cb96b72e1213b452 - 2f8c9ba40db5d945b11b69b982c1bb9e3f3fac2bc369488f76b2383565d3fff9 - 21f9664c97637da9768812f615c68b13b52e""" - ct = unhexlify(ct.replace(b"\n", b"").replace(b" ", b"")) - - tag = unhexlify(b"c0875924c1c7987947deafd8780acf49") - - cipher = ChaCha20_Poly1305.new(key=key, nonce=iv) - cipher.update(aad) - ct_test, tag_test = cipher.encrypt_and_digest(pt) - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - - cipher = ChaCha20_Poly1305.new(key=key, nonce=iv) - cipher.update(aad) - cipher.decrypt_and_verify(ct, tag) - - -class ChaCha20Poly1305FSMTests(unittest.TestCase): - - key_256 = get_tag_random("key_256", 32) - nonce_96 = get_tag_random("nonce_96", 12) - data_128 = get_tag_random("data_128", 16) - - def test_valid_init_encrypt_decrypt_digest_verify(self): - # No authenticated data, fixed plaintext - # Verify path INIT->ENCRYPT->DIGEST - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - ct = cipher.encrypt(self.data_128) - mac = cipher.digest() - - # Verify path INIT->DECRYPT->VERIFY - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_update_digest_verify(self): - # No plaintext, fixed authenticated data - # Verify path INIT->UPDATE->DIGEST - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - mac = cipher.digest() - - # Verify path INIT->UPDATE->VERIFY - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - cipher.verify(mac) - - def test_valid_full_path(self): - # Fixed authenticated data, fixed plaintext - # Verify path INIT->UPDATE->ENCRYPT->DIGEST - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - ct = cipher.encrypt(self.data_128) - mac = cipher.digest() - - # Verify path INIT->UPDATE->DECRYPT->VERIFY - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_digest(self): - # Verify path INIT->DIGEST - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.digest() - - def test_valid_init_verify(self): - # Verify path INIT->VERIFY - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - mac = cipher.digest() - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.verify(mac) - - def test_valid_multiple_encrypt_or_decrypt(self): - for method_name in "encrypt", "decrypt": - for auth_data in (None, b"333", self.data_128, - self.data_128 + b"3"): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - if auth_data is not None: - cipher.update(auth_data) - method = getattr(cipher, method_name) - method(self.data_128) - method(self.data_128) - method(self.data_128) - method(self.data_128) - - def test_valid_multiple_digest_or_verify(self): - # Multiple calls to digest - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - first_mac = cipher.digest() - for x in range(4): - self.assertEqual(first_mac, cipher.digest()) - - # Multiple calls to verify - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - for x in range(5): - cipher.verify(first_mac) - - def test_valid_encrypt_and_digest_decrypt_and_verify(self): - # encrypt_and_digest - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - ct, mac = cipher.encrypt_and_digest(self.data_128) - - # decrypt_and_verify - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.update(self.data_128) - pt = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(self.data_128, pt) - - def test_invalid_mixing_encrypt_decrypt(self): - # Once per method, with or without assoc. data - for method1_name, method2_name in (("encrypt", "decrypt"), - ("decrypt", "encrypt")): - for assoc_data_present in (True, False): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - if assoc_data_present: - cipher.update(self.data_128) - getattr(cipher, method1_name)(self.data_128) - self.assertRaises(TypeError, getattr(cipher, method2_name), - self.data_128) - - def test_invalid_encrypt_or_update_after_digest(self): - for method_name in "encrypt", "update": - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.encrypt(self.data_128) - cipher.digest() - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data_128) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.encrypt_and_digest(self.data_128) - - def test_invalid_decrypt_or_update_after_verify(self): - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - ct = cipher.encrypt(self.data_128) - mac = cipher.digest() - - for method_name in "decrypt", "update": - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data_128) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data_128) - - cipher = ChaCha20_Poly1305.new(key=self.key_256, - nonce=self.nonce_96) - cipher.decrypt_and_verify(ct, mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data_128) - - -def compact(x): - return unhexlify(x.replace(" ", "").replace(":", "")) - - -class TestVectorsRFC(unittest.TestCase): - """Test cases from RFC7539""" - - # AAD, PT, CT, MAC, KEY, NONCE - test_vectors_hex = [ - ( '50 51 52 53 c0 c1 c2 c3 c4 c5 c6 c7', - '4c 61 64 69 65 73 20 61 6e 64 20 47 65 6e 74 6c' - '65 6d 65 6e 20 6f 66 20 74 68 65 20 63 6c 61 73' - '73 20 6f 66 20 27 39 39 3a 20 49 66 20 49 20 63' - '6f 75 6c 64 20 6f 66 66 65 72 20 79 6f 75 20 6f' - '6e 6c 79 20 6f 6e 65 20 74 69 70 20 66 6f 72 20' - '74 68 65 20 66 75 74 75 72 65 2c 20 73 75 6e 73' - '63 72 65 65 6e 20 77 6f 75 6c 64 20 62 65 20 69' - '74 2e', - 'd3 1a 8d 34 64 8e 60 db 7b 86 af bc 53 ef 7e c2' - 'a4 ad ed 51 29 6e 08 fe a9 e2 b5 a7 36 ee 62 d6' - '3d be a4 5e 8c a9 67 12 82 fa fb 69 da 92 72 8b' - '1a 71 de 0a 9e 06 0b 29 05 d6 a5 b6 7e cd 3b 36' - '92 dd bd 7f 2d 77 8b 8c 98 03 ae e3 28 09 1b 58' - 'fa b3 24 e4 fa d6 75 94 55 85 80 8b 48 31 d7 bc' - '3f f4 de f0 8e 4b 7a 9d e5 76 d2 65 86 ce c6 4b' - '61 16', - '1a:e1:0b:59:4f:09:e2:6a:7e:90:2e:cb:d0:60:06:91', - '80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f' - '90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f', - '07 00 00 00' + '40 41 42 43 44 45 46 47', - ), - ( 'f3 33 88 86 00 00 00 00 00 00 4e 91', - '49 6e 74 65 72 6e 65 74 2d 44 72 61 66 74 73 20' - '61 72 65 20 64 72 61 66 74 20 64 6f 63 75 6d 65' - '6e 74 73 20 76 61 6c 69 64 20 66 6f 72 20 61 20' - '6d 61 78 69 6d 75 6d 20 6f 66 20 73 69 78 20 6d' - '6f 6e 74 68 73 20 61 6e 64 20 6d 61 79 20 62 65' - '20 75 70 64 61 74 65 64 2c 20 72 65 70 6c 61 63' - '65 64 2c 20 6f 72 20 6f 62 73 6f 6c 65 74 65 64' - '20 62 79 20 6f 74 68 65 72 20 64 6f 63 75 6d 65' - '6e 74 73 20 61 74 20 61 6e 79 20 74 69 6d 65 2e' - '20 49 74 20 69 73 20 69 6e 61 70 70 72 6f 70 72' - '69 61 74 65 20 74 6f 20 75 73 65 20 49 6e 74 65' - '72 6e 65 74 2d 44 72 61 66 74 73 20 61 73 20 72' - '65 66 65 72 65 6e 63 65 20 6d 61 74 65 72 69 61' - '6c 20 6f 72 20 74 6f 20 63 69 74 65 20 74 68 65' - '6d 20 6f 74 68 65 72 20 74 68 61 6e 20 61 73 20' - '2f e2 80 9c 77 6f 72 6b 20 69 6e 20 70 72 6f 67' - '72 65 73 73 2e 2f e2 80 9d', - '64 a0 86 15 75 86 1a f4 60 f0 62 c7 9b e6 43 bd' - '5e 80 5c fd 34 5c f3 89 f1 08 67 0a c7 6c 8c b2' - '4c 6c fc 18 75 5d 43 ee a0 9e e9 4e 38 2d 26 b0' - 'bd b7 b7 3c 32 1b 01 00 d4 f0 3b 7f 35 58 94 cf' - '33 2f 83 0e 71 0b 97 ce 98 c8 a8 4a bd 0b 94 81' - '14 ad 17 6e 00 8d 33 bd 60 f9 82 b1 ff 37 c8 55' - '97 97 a0 6e f4 f0 ef 61 c1 86 32 4e 2b 35 06 38' - '36 06 90 7b 6a 7c 02 b0 f9 f6 15 7b 53 c8 67 e4' - 'b9 16 6c 76 7b 80 4d 46 a5 9b 52 16 cd e7 a4 e9' - '90 40 c5 a4 04 33 22 5e e2 82 a1 b0 a0 6c 52 3e' - 'af 45 34 d7 f8 3f a1 15 5b 00 47 71 8c bc 54 6a' - '0d 07 2b 04 b3 56 4e ea 1b 42 22 73 f5 48 27 1a' - '0b b2 31 60 53 fa 76 99 19 55 eb d6 31 59 43 4e' - 'ce bb 4e 46 6d ae 5a 10 73 a6 72 76 27 09 7a 10' - '49 e6 17 d9 1d 36 10 94 fa 68 f0 ff 77 98 71 30' - '30 5b ea ba 2e da 04 df 99 7b 71 4d 6c 6f 2c 29' - 'a6 ad 5c b4 02 2b 02 70 9b', - 'ee ad 9d 67 89 0c bb 22 39 23 36 fe a1 85 1f 38', - '1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0' - '47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0', - '00 00 00 00 01 02 03 04 05 06 07 08', - ) - ] - - test_vectors = [[unhexlify(x.replace(" ", "").replace(":", "")) for x in tv] for tv in test_vectors_hex] - - def runTest(self): - for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: - # Encrypt - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - cipher.update(assoc_data) - ct2, mac2 = cipher.encrypt_and_digest(pt) - self.assertEqual(ct, ct2) - self.assertEqual(mac, mac2) - - # Decrypt - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - cipher.update(assoc_data) - pt2 = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(pt, pt2) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._id = "None" - - def load_tests(self, filename): - - def filter_tag(group): - return group['tagSize'] // 8 - - def filter_algo(root): - return root['algorithm'] - - result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - filename, - "Wycheproof ChaCha20-Poly1305", - root_tag={'algo': filter_algo}, - group_tag={'tag_size': filter_tag}) - return result - - def setUp(self): - self.tv = [] - self.tv.extend(self.load_tests("chacha20_poly1305_test.json")) - self.tv.extend(self.load_tests("xchacha20_poly1305_test.json")) - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_encrypt(self, tv): - self._id = "Wycheproof Encrypt %s Test #%s" % (tv.algo, tv.id) - - try: - cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) - except ValueError as e: - assert len(tv.iv) not in (8, 12) and "Nonce must be" in str(e) - return - - cipher.update(tv.aad) - ct, tag = cipher.encrypt_and_digest(tv.msg) - if tv.valid: - self.assertEqual(ct, tv.ct) - self.assertEqual(tag, tv.tag) - self.warn(tv) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt %s Test #%s" % (tv.algo, tv.id) - - try: - cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) - except ValueError as e: - assert len(tv.iv) not in (8, 12) and "Nonce must be" in str(e) - return - - cipher.update(tv.aad) - try: - pt = cipher.decrypt_and_verify(tv.ct, tv.tag) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - self.warn(tv) - - def test_corrupt_decrypt(self, tv): - self._id = "Wycheproof Corrupt Decrypt ChaCha20-Poly1305 Test #" + str(tv.id) - if len(tv.iv) == 0 or len(tv.ct) < 1: - return - cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) - cipher.update(tv.aad) - ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) - - def runTest(self): - - for tv in self.tv: - self.test_encrypt(tv) - self.test_decrypt(tv) - self.test_corrupt_decrypt(tv) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - key = b'4' * 32 - nonce = b'5' * 12 - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - - pt = b'5' * 16 - ct = cipher.encrypt(pt) - - output = bytearray(16) - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(16)) - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) - - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) - - shorter_output = bytearray(7) - - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - - cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(ChaCha20Poly1305Tests) - tests += list_test_cases(XChaCha20Poly1305Tests) - tests += list_test_cases(ChaCha20Poly1305FSMTests) - tests += [TestVectorsRFC()] - tests += [TestVectorsWycheproof(wycheproof_warnings)] - tests += [TestOutput()] - return tests - - -if __name__ == '__main__': - def suite(): - unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_DES.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_DES.py deleted file mode 100644 index df1313a..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_DES.py +++ /dev/null @@ -1,374 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/DES.py: Self-test for the (Single) DES cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.DES""" - -import unittest - -from Cryptodome.Cipher import DES - -# This is a list of (plaintext, ciphertext, key, description) tuples. -SP800_17_B1_KEY = '01' * 8 -SP800_17_B2_PT = '00' * 8 -test_data = [ - # Test vectors from Appendix A of NIST SP 800-17 - # "Modes of Operation Validation System (MOVS): Requirements and Procedures" - # http://csrc.nist.gov/publications/nistpubs/800-17/800-17.pdf - - # Appendix A - "Sample Round Outputs for the DES" - ('0000000000000000', '82dcbafbdeab6602', '10316e028c8f3b4a', - "NIST SP800-17 A"), - - # Table B.1 - Variable Plaintext Known Answer Test - ('8000000000000000', '95f8a5e5dd31d900', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #0'), - ('4000000000000000', 'dd7f121ca5015619', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #1'), - ('2000000000000000', '2e8653104f3834ea', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #2'), - ('1000000000000000', '4bd388ff6cd81d4f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #3'), - ('0800000000000000', '20b9e767b2fb1456', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #4'), - ('0400000000000000', '55579380d77138ef', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #5'), - ('0200000000000000', '6cc5defaaf04512f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #6'), - ('0100000000000000', '0d9f279ba5d87260', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #7'), - ('0080000000000000', 'd9031b0271bd5a0a', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #8'), - ('0040000000000000', '424250b37c3dd951', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #9'), - ('0020000000000000', 'b8061b7ecd9a21e5', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #10'), - ('0010000000000000', 'f15d0f286b65bd28', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #11'), - ('0008000000000000', 'add0cc8d6e5deba1', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #12'), - ('0004000000000000', 'e6d5f82752ad63d1', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #13'), - ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #14'), - ('0001000000000000', 'f356834379d165cd', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #15'), - ('0000800000000000', '2b9f982f20037fa9', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #16'), - ('0000400000000000', '889de068a16f0be6', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #17'), - ('0000200000000000', 'e19e275d846a1298', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #18'), - ('0000100000000000', '329a8ed523d71aec', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #19'), - ('0000080000000000', 'e7fce22557d23c97', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #20'), - ('0000040000000000', '12a9f5817ff2d65d', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #21'), - ('0000020000000000', 'a484c3ad38dc9c19', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #22'), - ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #23'), - ('0000008000000000', '750d079407521363', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #24'), - ('0000004000000000', '64feed9c724c2faf', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #25'), - ('0000002000000000', 'f02b263b328e2b60', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #26'), - ('0000001000000000', '9d64555a9a10b852', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #27'), - ('0000000800000000', 'd106ff0bed5255d7', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #28'), - ('0000000400000000', 'e1652c6b138c64a5', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #29'), - ('0000000200000000', 'e428581186ec8f46', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #30'), - ('0000000100000000', 'aeb5f5ede22d1a36', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #31'), - ('0000000080000000', 'e943d7568aec0c5c', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #32'), - ('0000000040000000', 'df98c8276f54b04b', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #33'), - ('0000000020000000', 'b160e4680f6c696f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #34'), - ('0000000010000000', 'fa0752b07d9c4ab8', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #35'), - ('0000000008000000', 'ca3a2b036dbc8502', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #36'), - ('0000000004000000', '5e0905517bb59bcf', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #37'), - ('0000000002000000', '814eeb3b91d90726', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #38'), - ('0000000001000000', '4d49db1532919c9f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #39'), - ('0000000000800000', '25eb5fc3f8cf0621', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #40'), - ('0000000000400000', 'ab6a20c0620d1c6f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #41'), - ('0000000000200000', '79e90dbc98f92cca', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #42'), - ('0000000000100000', '866ecedd8072bb0e', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #43'), - ('0000000000080000', '8b54536f2f3e64a8', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #44'), - ('0000000000040000', 'ea51d3975595b86b', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #45'), - ('0000000000020000', 'caffc6ac4542de31', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #46'), - ('0000000000010000', '8dd45a2ddf90796c', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #47'), - ('0000000000008000', '1029d55e880ec2d0', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #48'), - ('0000000000004000', '5d86cb23639dbea9', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #49'), - ('0000000000002000', '1d1ca853ae7c0c5f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #50'), - ('0000000000001000', 'ce332329248f3228', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #51'), - ('0000000000000800', '8405d1abe24fb942', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #52'), - ('0000000000000400', 'e643d78090ca4207', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #53'), - ('0000000000000200', '48221b9937748a23', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #54'), - ('0000000000000100', 'dd7c0bbd61fafd54', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #55'), - ('0000000000000080', '2fbc291a570db5c4', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #56'), - ('0000000000000040', 'e07c30d7e4e26e12', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #57'), - ('0000000000000020', '0953e2258e8e90a1', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #58'), - ('0000000000000010', '5b711bc4ceebf2ee', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #59'), - ('0000000000000008', 'cc083f1e6d9e85f6', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #60'), - ('0000000000000004', 'd2fd8867d50d2dfe', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #61'), - ('0000000000000002', '06e7ea22ce92708f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #62'), - ('0000000000000001', '166b40b44aba4bd6', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #63'), - - # Table B.2 - Variable Key Known Answer Test - (SP800_17_B2_PT, '95a8d72813daa94d', '8001010101010101', - 'NIST SP800-17 B.2 #0'), - (SP800_17_B2_PT, '0eec1487dd8c26d5', '4001010101010101', - 'NIST SP800-17 B.2 #1'), - (SP800_17_B2_PT, '7ad16ffb79c45926', '2001010101010101', - 'NIST SP800-17 B.2 #2'), - (SP800_17_B2_PT, 'd3746294ca6a6cf3', '1001010101010101', - 'NIST SP800-17 B.2 #3'), - (SP800_17_B2_PT, '809f5f873c1fd761', '0801010101010101', - 'NIST SP800-17 B.2 #4'), - (SP800_17_B2_PT, 'c02faffec989d1fc', '0401010101010101', - 'NIST SP800-17 B.2 #5'), - (SP800_17_B2_PT, '4615aa1d33e72f10', '0201010101010101', - 'NIST SP800-17 B.2 #6'), - (SP800_17_B2_PT, '2055123350c00858', '0180010101010101', - 'NIST SP800-17 B.2 #7'), - (SP800_17_B2_PT, 'df3b99d6577397c8', '0140010101010101', - 'NIST SP800-17 B.2 #8'), - (SP800_17_B2_PT, '31fe17369b5288c9', '0120010101010101', - 'NIST SP800-17 B.2 #9'), - (SP800_17_B2_PT, 'dfdd3cc64dae1642', '0110010101010101', - 'NIST SP800-17 B.2 #10'), - (SP800_17_B2_PT, '178c83ce2b399d94', '0108010101010101', - 'NIST SP800-17 B.2 #11'), - (SP800_17_B2_PT, '50f636324a9b7f80', '0104010101010101', - 'NIST SP800-17 B.2 #12'), - (SP800_17_B2_PT, 'a8468ee3bc18f06d', '0102010101010101', - 'NIST SP800-17 B.2 #13'), - (SP800_17_B2_PT, 'a2dc9e92fd3cde92', '0101800101010101', - 'NIST SP800-17 B.2 #14'), - (SP800_17_B2_PT, 'cac09f797d031287', '0101400101010101', - 'NIST SP800-17 B.2 #15'), - (SP800_17_B2_PT, '90ba680b22aeb525', '0101200101010101', - 'NIST SP800-17 B.2 #16'), - (SP800_17_B2_PT, 'ce7a24f350e280b6', '0101100101010101', - 'NIST SP800-17 B.2 #17'), - (SP800_17_B2_PT, '882bff0aa01a0b87', '0101080101010101', - 'NIST SP800-17 B.2 #18'), - (SP800_17_B2_PT, '25610288924511c2', '0101040101010101', - 'NIST SP800-17 B.2 #19'), - (SP800_17_B2_PT, 'c71516c29c75d170', '0101020101010101', - 'NIST SP800-17 B.2 #20'), - (SP800_17_B2_PT, '5199c29a52c9f059', '0101018001010101', - 'NIST SP800-17 B.2 #21'), - (SP800_17_B2_PT, 'c22f0a294a71f29f', '0101014001010101', - 'NIST SP800-17 B.2 #22'), - (SP800_17_B2_PT, 'ee371483714c02ea', '0101012001010101', - 'NIST SP800-17 B.2 #23'), - (SP800_17_B2_PT, 'a81fbd448f9e522f', '0101011001010101', - 'NIST SP800-17 B.2 #24'), - (SP800_17_B2_PT, '4f644c92e192dfed', '0101010801010101', - 'NIST SP800-17 B.2 #25'), - (SP800_17_B2_PT, '1afa9a66a6df92ae', '0101010401010101', - 'NIST SP800-17 B.2 #26'), - (SP800_17_B2_PT, 'b3c1cc715cb879d8', '0101010201010101', - 'NIST SP800-17 B.2 #27'), - (SP800_17_B2_PT, '19d032e64ab0bd8b', '0101010180010101', - 'NIST SP800-17 B.2 #28'), - (SP800_17_B2_PT, '3cfaa7a7dc8720dc', '0101010140010101', - 'NIST SP800-17 B.2 #29'), - (SP800_17_B2_PT, 'b7265f7f447ac6f3', '0101010120010101', - 'NIST SP800-17 B.2 #30'), - (SP800_17_B2_PT, '9db73b3c0d163f54', '0101010110010101', - 'NIST SP800-17 B.2 #31'), - (SP800_17_B2_PT, '8181b65babf4a975', '0101010108010101', - 'NIST SP800-17 B.2 #32'), - (SP800_17_B2_PT, '93c9b64042eaa240', '0101010104010101', - 'NIST SP800-17 B.2 #33'), - (SP800_17_B2_PT, '5570530829705592', '0101010102010101', - 'NIST SP800-17 B.2 #34'), - (SP800_17_B2_PT, '8638809e878787a0', '0101010101800101', - 'NIST SP800-17 B.2 #35'), - (SP800_17_B2_PT, '41b9a79af79ac208', '0101010101400101', - 'NIST SP800-17 B.2 #36'), - (SP800_17_B2_PT, '7a9be42f2009a892', '0101010101200101', - 'NIST SP800-17 B.2 #37'), - (SP800_17_B2_PT, '29038d56ba6d2745', '0101010101100101', - 'NIST SP800-17 B.2 #38'), - (SP800_17_B2_PT, '5495c6abf1e5df51', '0101010101080101', - 'NIST SP800-17 B.2 #39'), - (SP800_17_B2_PT, 'ae13dbd561488933', '0101010101040101', - 'NIST SP800-17 B.2 #40'), - (SP800_17_B2_PT, '024d1ffa8904e389', '0101010101020101', - 'NIST SP800-17 B.2 #41'), - (SP800_17_B2_PT, 'd1399712f99bf02e', '0101010101018001', - 'NIST SP800-17 B.2 #42'), - (SP800_17_B2_PT, '14c1d7c1cffec79e', '0101010101014001', - 'NIST SP800-17 B.2 #43'), - (SP800_17_B2_PT, '1de5279dae3bed6f', '0101010101012001', - 'NIST SP800-17 B.2 #44'), - (SP800_17_B2_PT, 'e941a33f85501303', '0101010101011001', - 'NIST SP800-17 B.2 #45'), - (SP800_17_B2_PT, 'da99dbbc9a03f379', '0101010101010801', - 'NIST SP800-17 B.2 #46'), - (SP800_17_B2_PT, 'b7fc92f91d8e92e9', '0101010101010401', - 'NIST SP800-17 B.2 #47'), - (SP800_17_B2_PT, 'ae8e5caa3ca04e85', '0101010101010201', - 'NIST SP800-17 B.2 #48'), - (SP800_17_B2_PT, '9cc62df43b6eed74', '0101010101010180', - 'NIST SP800-17 B.2 #49'), - (SP800_17_B2_PT, 'd863dbb5c59a91a0', '0101010101010140', - 'NIST SP800-17 B.2 #50'), - (SP800_17_B2_PT, 'a1ab2190545b91d7', '0101010101010120', - 'NIST SP800-17 B.2 #51'), - (SP800_17_B2_PT, '0875041e64c570f7', '0101010101010110', - 'NIST SP800-17 B.2 #52'), - (SP800_17_B2_PT, '5a594528bebef1cc', '0101010101010108', - 'NIST SP800-17 B.2 #53'), - (SP800_17_B2_PT, 'fcdb3291de21f0c0', '0101010101010104', - 'NIST SP800-17 B.2 #54'), - (SP800_17_B2_PT, '869efd7f9f265a09', '0101010101010102', - 'NIST SP800-17 B.2 #55'), -] - -class RonRivestTest(unittest.TestCase): - """ Ronald L. Rivest's DES test, see - http://people.csail.mit.edu/rivest/Destest.txt - ABSTRACT - -------- - - We present a simple way to test the correctness of a DES implementation: - Use the recurrence relation: - - X0 = 9474B8E8C73BCA7D (hexadecimal) - - X(i+1) = IF (i is even) THEN E(Xi,Xi) ELSE D(Xi,Xi) - - to compute a sequence of 64-bit values: X0, X1, X2, ..., X16. Here - E(X,K) denotes the DES encryption of X using key K, and D(X,K) denotes - the DES decryption of X using key K. If you obtain - - X16 = 1B1A2DDB4C642438 - - your implementation does not have any of the 36,568 possible single-fault - errors described herein. - """ - def runTest(self): - from binascii import b2a_hex - - X = [] - X[0:] = [b'\x94\x74\xB8\xE8\xC7\x3B\xCA\x7D'] - - for i in range(16): - c = DES.new(X[i],DES.MODE_ECB) - if not (i&1): # (num&1) returns 1 for odd numbers - X[i+1:] = [c.encrypt(X[i])] # even - else: - X[i+1:] = [c.decrypt(X[i])] # odd - - self.assertEqual(b2a_hex(X[16]), - b2a_hex(b'\x1B\x1A\x2D\xDB\x4C\x64\x24\x38')) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - cipher = DES.new(b'4'*8, DES.MODE_ECB) - - pt = b'5' * 8 - ct = cipher.encrypt(pt) - - output = bytearray(8) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(8)) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*8) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*8) - - shorter_output = bytearray(7) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - from .common import make_block_tests - tests = make_block_tests(DES, "DES", test_data) - tests += [RonRivestTest()] - tests += [TestOutput()] - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_DES3.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_DES3.py deleted file mode 100644 index 8f8479b..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_DES3.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.DES3""" - -import unittest -from binascii import hexlify, unhexlify - -from Cryptodome.Cipher import DES3 - -from Cryptodome.Util.strxor import strxor_c -from Cryptodome.Util.py3compat import bchr, tostr -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases - -# This is a list of (plaintext, ciphertext, key, description) tuples. -test_data = [ - # Test vector from Appendix B of NIST SP 800-67 - # "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block - # Cipher" - # http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf - ('54686520717566636b2062726f776e20666f78206a756d70', - 'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900', - '0123456789abcdef23456789abcdef01456789abcdef0123', - 'NIST SP800-67 B.1'), - - # This test is designed to test the DES3 API, not the correctness of the - # output. - ('21e81b7ade88a259', '5c577d4d9b20c0f8', - '9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'), -] - -# NIST CAVP test vectors - -nist_tdes_mmt_files = ("TECBMMT2.rsp", "TECBMMT3.rsp") - -for tdes_file in nist_tdes_mmt_files: - - test_vectors = load_test_vectors( - ("Cipher", "TDES"), - tdes_file, - "TDES ECB (%s)" % tdes_file, - {"count": lambda x: int(x)}) or [] - - for index, tv in enumerate(test_vectors): - - # The test vector file contains some directive lines - if isinstance(tv, str): - continue - - key = tv.key1 + tv.key2 + tv.key3 - test_data_item = (tostr(hexlify(tv.plaintext)), - tostr(hexlify(tv.ciphertext)), - tostr(hexlify(key)), - "%s (%s)" % (tdes_file, index)) - test_data.append(test_data_item) - - -class CheckParity(unittest.TestCase): - - def test_parity_option2(self): - before_2k = unhexlify("CABF326FA56734324FFCCABCDEFACABF") - after_2k = DES3.adjust_key_parity(before_2k) - self.assertEqual(after_2k, - unhexlify("CBBF326EA46734324FFDCBBCDFFBCBBF")) - - def test_parity_option3(self): - before_3k = unhexlify("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC") - after_3k = DES3.adjust_key_parity(before_3k) - self.assertEqual(after_3k, - unhexlify("ABABABABABABABABBABABABABABABABACDCDCDCDCDCDCDCD")) - - def test_degradation(self): - sub_key1 = bchr(1) * 8 - sub_key2 = bchr(255) * 8 - - # K1 == K2 - self.assertRaises(ValueError, DES3.adjust_key_parity, - sub_key1 * 2 + sub_key2) - - # K2 == K3 - self.assertRaises(ValueError, DES3.adjust_key_parity, - sub_key1 + sub_key2 * 2) - - # K1 == K2 == K3 - self.assertRaises(ValueError, DES3.adjust_key_parity, - sub_key1 * 3) - - # K1 == K2 (with different parity) - self.assertRaises(ValueError, DES3.adjust_key_parity, - sub_key1 + strxor_c(sub_key1, 1) + sub_key2) - - -class DegenerateToDESTest(unittest.TestCase): - - def runTest(self): - sub_key1 = bchr(1) * 8 - sub_key2 = bchr(255) * 8 - - # K1 == K2 - self.assertRaises(ValueError, DES3.new, - sub_key1 * 2 + sub_key2, - DES3.MODE_ECB) - - # K2 == K3 - self.assertRaises(ValueError, DES3.new, - sub_key1 + sub_key2 * 2, - DES3.MODE_ECB) - - # K1 == K2 == K3 - self.assertRaises(ValueError, DES3.new, - sub_key1 * 3, - DES3.MODE_ECB) - - # K2 == K3 (parity is ignored) - self.assertRaises(ValueError, DES3.new, - sub_key1 + sub_key2 + strxor_c(sub_key2, 0x1), - DES3.MODE_ECB) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - cipher = DES3.new(b'4'*8 + b'G'*8 + b'T'*8, DES3.MODE_ECB) - - pt = b'5' * 16 - ct = cipher.encrypt(pt) - - output = bytearray(16) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(16)) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) - - shorter_output = bytearray(7) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - from .common import make_block_tests - - tests = [] - tests = make_block_tests(DES3, "DES3", test_data) - tests.append(DegenerateToDESTest()) - tests += list_test_cases(CheckParity) - tests += [TestOutput()] - return tests - - -if __name__ == '__main__': - import unittest - - def suite(): - unittest.TestSuite(get_tests()) - - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_EAX.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_EAX.py deleted file mode 100644 index 4127a88..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_EAX.py +++ /dev/null @@ -1,773 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof -from Cryptodome.Util.py3compat import tobytes, bchr -from Cryptodome.Cipher import AES, DES3 -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.Util.strxor import strxor - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class EaxTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - key_192 = get_tag_random("key_192", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data_128 = get_tag_random("data_128", 16) - - def test_loopback_128(self): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - pt = get_tag_random("plaintext", 16 * 100) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_loopback_64(self): - cipher = DES3.new(self.key_192, DES3.MODE_EAX, nonce=self.nonce_96) - pt = get_tag_random("plaintext", 8 * 100) - ct = cipher.encrypt(pt) - - cipher = DES3.new(self.key_192, DES3.MODE_EAX, nonce=self.nonce_96) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_nonce(self): - # If not passed, the nonce is created randomly - cipher = AES.new(self.key_128, AES.MODE_EAX) - nonce1 = cipher.nonce - cipher = AES.new(self.key_128, AES.MODE_EAX) - nonce2 = cipher.nonce - self.assertEqual(len(nonce1), 16) - self.assertNotEqual(nonce1, nonce2) - - cipher = AES.new(self.key_128, AES.MODE_EAX, self.nonce_96) - ct = cipher.encrypt(self.data_128) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertEqual(ct, cipher.encrypt(self.data_128)) - - def test_nonce_must_be_bytes(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, - nonce=u'test12345678') - - def test_nonce_length(self): - # nonce can be of any length (but not empty) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, - nonce=b"") - - for x in range(1, 128): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=bchr(1) * x) - cipher.encrypt(bchr(1)) - - def test_block_size_128(self): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertEqual(cipher.block_size, AES.block_size) - - def test_block_size_64(self): - cipher = DES3.new(self.key_192, AES.MODE_EAX, nonce=self.nonce_96) - self.assertEqual(cipher.block_size, DES3.block_size) - - def test_nonce_attribute(self): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertEqual(cipher.nonce, self.nonce_96) - - # By default, a 16 bytes long nonce is randomly generated - nonce1 = AES.new(self.key_128, AES.MODE_EAX).nonce - nonce2 = AES.new(self.key_128, AES.MODE_EAX).nonce - self.assertEqual(len(nonce1), 16) - self.assertNotEqual(nonce1, nonce2) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, - self.nonce_96, 7) - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, - nonce=self.nonce_96, unknown=7) - - # But some are only known by the base cipher - # (e.g. use_aesni consumed by the AES module) - AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96, - use_aesni=False) - - def test_null_encryption_decryption(self): - for func in "encrypt", "decrypt": - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - result = getattr(cipher, func)(b"") - self.assertEqual(result, b"") - - def test_either_encrypt_or_decrypt(self): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.encrypt(b"") - self.assertRaises(TypeError, cipher.decrypt, b"") - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.decrypt(b"") - self.assertRaises(TypeError, cipher.encrypt, b"") - - def test_data_must_be_bytes(self): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') - - def test_mac_len(self): - # Invalid MAC length - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, - nonce=self.nonce_96, mac_len=2-1) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, - nonce=self.nonce_96, mac_len=16+1) - - # Valid MAC length - for mac_len in range(2, 16 + 1): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96, - mac_len=mac_len) - _, mac = cipher.encrypt_and_digest(self.data_128) - self.assertEqual(len(mac), mac_len) - - # Default MAC length - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - _, mac = cipher.encrypt_and_digest(self.data_128) - self.assertEqual(len(mac), 16) - - def test_invalid_mac(self): - from Cryptodome.Util.strxor import strxor_c - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - ct, mac = cipher.encrypt_and_digest(self.data_128) - - invalid_mac = strxor_c(mac, 0x01) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, - invalid_mac) - - def test_hex_mac(self): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - mac_hex = cipher.hexdigest() - self.assertEqual(cipher.digest(), unhexlify(mac_hex)) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.hexverify(mac_hex) - - def test_message_chunks(self): - # Validate that both associated data and plaintext/ciphertext - # can be broken up in chunks of arbitrary length - - auth_data = get_tag_random("authenticated data", 127) - plaintext = get_tag_random("plaintext", 127) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.update(auth_data) - ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) - - def break_up(data, chunk_length): - return [data[i:i+chunk_length] for i in range(0, len(data), - chunk_length)] - - # Encryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - pt2 = b"" - for chunk in break_up(ciphertext, chunk_length): - pt2 += cipher.decrypt(chunk) - self.assertEqual(plaintext, pt2) - cipher.verify(ref_mac) - - # Decryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - ct2 = b"" - for chunk in break_up(plaintext, chunk_length): - ct2 += cipher.encrypt(chunk) - self.assertEqual(ciphertext, ct2) - self.assertEqual(cipher.digest(), ref_mac) - - def test_bytearray(self): - - # Encrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data_128) - data_ba = bytearray(self.data_128) - - cipher1 = AES.new(self.key_128, - AES.MODE_EAX, - nonce=self.nonce_96) - cipher1.update(self.data_128) - ct = cipher1.encrypt(self.data_128) - tag = cipher1.digest() - - cipher2 = AES.new(key_ba, - AES.MODE_EAX, - nonce=nonce_ba) - key_ba[:3] = b'\xFF\xFF\xFF' - nonce_ba[:3] = b'\xFF\xFF\xFF' - cipher2.update(header_ba) - header_ba[:3] = b'\xFF\xFF\xFF' - ct_test = cipher2.encrypt(data_ba) - data_ba[:3] = b'\x99\x99\x99' - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data_128) - ct_ba = bytearray(ct) - tag_ba = bytearray(tag) - del data_ba - - cipher3 = AES.new(key_ba, - AES.MODE_EAX, - nonce=nonce_ba) - key_ba[:3] = b'\xFF\xFF\xFF' - nonce_ba[:3] = b'\xFF\xFF\xFF' - cipher3.update(header_ba) - header_ba[:3] = b'\xFF\xFF\xFF' - pt_test = cipher3.decrypt(ct_ba) - ct_ba[:3] = b'\xFF\xFF\xFF' - cipher3.verify(tag_ba) - - self.assertEqual(pt_test, self.data_128) - - def test_memoryview(self): - - # Encrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data_128)) - data_mv = memoryview(bytearray(self.data_128)) - - cipher1 = AES.new(self.key_128, - AES.MODE_EAX, - nonce=self.nonce_96) - cipher1.update(self.data_128) - ct = cipher1.encrypt(self.data_128) - tag = cipher1.digest() - - cipher2 = AES.new(key_mv, - AES.MODE_EAX, - nonce=nonce_mv) - key_mv[:3] = b'\xFF\xFF\xFF' - nonce_mv[:3] = b'\xFF\xFF\xFF' - cipher2.update(header_mv) - header_mv[:3] = b'\xFF\xFF\xFF' - ct_test = cipher2.encrypt(data_mv) - data_mv[:3] = b'\x99\x99\x99' - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data_128)) - ct_mv = memoryview(bytearray(ct)) - tag_mv = memoryview(bytearray(tag)) - del data_mv - - cipher3 = AES.new(key_mv, - AES.MODE_EAX, - nonce=nonce_mv) - key_mv[:3] = b'\xFF\xFF\xFF' - nonce_mv[:3] = b'\xFF\xFF\xFF' - cipher3.update(header_mv) - header_mv[:3] = b'\xFF\xFF\xFF' - pt_test = cipher3.decrypt(ct_mv) - ct_mv[:3] = b'\x99\x99\x99' - cipher3.verify(tag_mv) - - self.assertEqual(pt_test, self.data_128) - - def test_output_param(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - tag = cipher.digest() - - output = bytearray(128) - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - res, tag_out = cipher.encrypt_and_digest(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - self.assertEqual(tag, tag_out) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - res = cipher.decrypt_and_verify(ct, tag, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - def test_output_param_memoryview(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - - output = memoryview(bytearray(128)) - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - def test_output_param_neg(self): - LEN_PT = 16 - - pt = b'5' * LEN_PT - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) - - shorter_output = bytearray(LEN_PT - 1) - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -class EaxFSMTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data_128 = get_tag_random("data_128", 16) - - def test_valid_init_encrypt_decrypt_digest_verify(self): - # No authenticated data, fixed plaintext - # Verify path INIT->ENCRYPT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - ct = cipher.encrypt(self.data_128) - mac = cipher.digest() - - # Verify path INIT->DECRYPT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_update_digest_verify(self): - # No plaintext, fixed authenticated data - # Verify path INIT->UPDATE->DIGEST - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - cipher.update(self.data_128) - mac = cipher.digest() - - # Verify path INIT->UPDATE->VERIFY - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - cipher.update(self.data_128) - cipher.verify(mac) - - def test_valid_full_path(self): - # Fixed authenticated data, fixed plaintext - # Verify path INIT->UPDATE->ENCRYPT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - cipher.update(self.data_128) - ct = cipher.encrypt(self.data_128) - mac = cipher.digest() - - # Verify path INIT->UPDATE->DECRYPT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - cipher.update(self.data_128) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_digest(self): - # Verify path INIT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.digest() - - def test_valid_init_verify(self): - # Verify path INIT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - mac = cipher.digest() - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.verify(mac) - - def test_valid_multiple_encrypt_or_decrypt(self): - for method_name in "encrypt", "decrypt": - for auth_data in (None, b"333", self.data_128, - self.data_128 + b"3"): - if auth_data is None: - assoc_len = None - else: - assoc_len = len(auth_data) - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - if auth_data is not None: - cipher.update(auth_data) - method = getattr(cipher, method_name) - method(self.data_128) - method(self.data_128) - method(self.data_128) - method(self.data_128) - - def test_valid_multiple_digest_or_verify(self): - # Multiple calls to digest - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.update(self.data_128) - first_mac = cipher.digest() - for x in range(4): - self.assertEqual(first_mac, cipher.digest()) - - # Multiple calls to verify - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.update(self.data_128) - for x in range(5): - cipher.verify(first_mac) - - def test_valid_encrypt_and_digest_decrypt_and_verify(self): - # encrypt_and_digest - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.update(self.data_128) - ct, mac = cipher.encrypt_and_digest(self.data_128) - - # decrypt_and_verify - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.update(self.data_128) - pt = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(self.data_128, pt) - - def test_invalid_mixing_encrypt_decrypt(self): - # Once per method, with or without assoc. data - for method1_name, method2_name in (("encrypt", "decrypt"), - ("decrypt", "encrypt")): - for assoc_data_present in (True, False): - cipher = AES.new(self.key_128, AES.MODE_EAX, - nonce=self.nonce_96) - if assoc_data_present: - cipher.update(self.data_128) - getattr(cipher, method1_name)(self.data_128) - self.assertRaises(TypeError, getattr(cipher, method2_name), - self.data_128) - - def test_invalid_encrypt_or_update_after_digest(self): - for method_name in "encrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.encrypt(self.data_128) - cipher.digest() - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data_128) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.encrypt_and_digest(self.data_128) - - def test_invalid_decrypt_or_update_after_verify(self): - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - ct = cipher.encrypt(self.data_128) - mac = cipher.digest() - - for method_name in "decrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data_128) - - cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) - cipher.decrypt_and_verify(ct, mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data_128) - - -class TestVectorsPaper(unittest.TestCase): - """Class exercising the EAX test vectors found in - http://www.cs.ucdavis.edu/~rogaway/papers/eax.pdf""" - - test_vectors_hex = [ - ( '6bfb914fd07eae6b', - '', - '', - 'e037830e8389f27b025a2d6527e79d01', - '233952dee4d5ed5f9b9c6d6ff80ff478', - '62EC67F9C3A4A407FCB2A8C49031A8B3' - ), - ( - 'fa3bfd4806eb53fa', - 'f7fb', - '19dd', - '5c4c9331049d0bdab0277408f67967e5', - '91945d3f4dcbee0bf45ef52255f095a4', - 'BECAF043B0A23D843194BA972C66DEBD' - ), - ( '234a3463c1264ac6', - '1a47cb4933', - 'd851d5bae0', - '3a59f238a23e39199dc9266626c40f80', - '01f74ad64077f2e704c0f60ada3dd523', - '70C3DB4F0D26368400A10ED05D2BFF5E' - ), - ( - '33cce2eabff5a79d', - '481c9e39b1', - '632a9d131a', - 'd4c168a4225d8e1ff755939974a7bede', - 'd07cf6cbb7f313bdde66b727afd3c5e8', - '8408DFFF3C1A2B1292DC199E46B7D617' - ), - ( - 'aeb96eaebe2970e9', - '40d0c07da5e4', - '071dfe16c675', - 'cb0677e536f73afe6a14b74ee49844dd', - '35b6d0580005bbc12b0587124557d2c2', - 'FDB6B06676EEDC5C61D74276E1F8E816' - ), - ( - 'd4482d1ca78dce0f', - '4de3b35c3fc039245bd1fb7d', - '835bb4f15d743e350e728414', - 'abb8644fd6ccb86947c5e10590210a4f', - 'bd8e6e11475e60b268784c38c62feb22', - '6EAC5C93072D8E8513F750935E46DA1B' - ), - ( - '65d2017990d62528', - '8b0a79306c9ce7ed99dae4f87f8dd61636', - '02083e3979da014812f59f11d52630da30', - '137327d10649b0aa6e1c181db617d7f2', - '7c77d6e813bed5ac98baa417477a2e7d', - '1A8C98DCD73D38393B2BF1569DEEFC19' - ), - ( - '54b9f04e6a09189a', - '1bda122bce8a8dbaf1877d962b8592dd2d56', - '2ec47b2c4954a489afc7ba4897edcdae8cc3', - '3b60450599bd02c96382902aef7f832a', - '5fff20cafab119ca2fc73549e20f5b0d', - 'DDE59B97D722156D4D9AFF2BC7559826' - ), - ( - '899a175897561d7e', - '6cf36720872b8513f6eab1a8a44438d5ef11', - '0de18fd0fdd91e7af19f1d8ee8733938b1e8', - 'e7f6d2231618102fdb7fe55ff1991700', - 'a4a4782bcffd3ec5e7ef6d8c34a56123', - 'B781FCF2F75FA5A8DE97A9CA48E522EC' - ), - ( - '126735fcc320d25a', - 'ca40d7446e545ffaed3bd12a740a659ffbbb3ceab7', - 'cb8920f87a6c75cff39627b56e3ed197c552d295a7', - 'cfc46afc253b4652b1af3795b124ab6e', - '8395fcf1e95bebd697bd010bc766aac3', - '22E7ADD93CFC6393C57EC0B3C17D6B44' - ), - ] - - test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] - - def runTest(self): - for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: - # Encrypt - cipher = AES.new(key, AES.MODE_EAX, nonce, mac_len=len(mac)) - cipher.update(assoc_data) - ct2, mac2 = cipher.encrypt_and_digest(pt) - self.assertEqual(ct, ct2) - self.assertEqual(mac, mac2) - - # Decrypt - cipher = AES.new(key, AES.MODE_EAX, nonce, mac_len=len(mac)) - cipher.update(assoc_data) - pt2 = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(pt, pt2) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._id = "None" - - def setUp(self): - - def filter_tag(group): - return group['tagSize'] // 8 - - self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - "aes_eax_test.json", - "Wycheproof EAX", - group_tag={'tag_size': filter_tag}) - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_encrypt(self, tv): - self._id = "Wycheproof Encrypt EAX Test #" + str(tv.id) - - try: - cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) - except ValueError as e: - assert len(tv.iv) == 0 and "Nonce cannot be empty" in str(e) - return - - cipher.update(tv.aad) - ct, tag = cipher.encrypt_and_digest(tv.msg) - if tv.valid: - self.assertEqual(ct, tv.ct) - self.assertEqual(tag, tv.tag) - self.warn(tv) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt EAX Test #" + str(tv.id) - - try: - cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) - except ValueError as e: - assert len(tv.iv) == 0 and "Nonce cannot be empty" in str(e) - return - - cipher.update(tv.aad) - try: - pt = cipher.decrypt_and_verify(tv.ct, tv.tag) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - self.warn(tv) - - def test_corrupt_decrypt(self, tv): - self._id = "Wycheproof Corrupt Decrypt EAX Test #" + str(tv.id) - if len(tv.iv) == 0 or len(tv.ct) < 1: - return - cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) - cipher.update(tv.aad) - ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) - - def runTest(self): - - for tv in self.tv: - self.test_encrypt(tv) - self.test_decrypt(tv) - self.test_corrupt_decrypt(tv) - - -class TestOtherCiphers(unittest.TestCase): - - @classmethod - def create_test(cls, name, factory, key_size): - - def test_template(self, factory=factory, key_size=key_size): - cipher = factory.new(get_tag_random("cipher", key_size), - factory.MODE_EAX, - nonce=b"nonce") - ct, mac = cipher.encrypt_and_digest(b"plaintext") - - cipher = factory.new(get_tag_random("cipher", key_size), - factory.MODE_EAX, - nonce=b"nonce") - pt2 = cipher.decrypt_and_verify(ct, mac) - - self.assertEqual(b"plaintext", pt2) - - setattr(cls, "test_" + name, test_template) - - -from Cryptodome.Cipher import DES, DES3, ARC2, CAST, Blowfish - -TestOtherCiphers.create_test("DES_" + str(DES.key_size), DES, DES.key_size) -for ks in DES3.key_size: - TestOtherCiphers.create_test("DES3_" + str(ks), DES3, ks) -for ks in ARC2.key_size: - TestOtherCiphers.create_test("ARC2_" + str(ks), ARC2, ks) -for ks in CAST.key_size: - TestOtherCiphers.create_test("CAST_" + str(ks), CAST, ks) -for ks in Blowfish.key_size: - TestOtherCiphers.create_test("Blowfish_" + str(ks), Blowfish, ks) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(EaxTests) - tests += list_test_cases(EaxFSMTests) - tests += [ TestVectorsPaper() ] - tests += [ TestVectorsWycheproof(wycheproof_warnings) ] - tests += list_test_cases(TestOtherCiphers) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_GCM.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_GCM.py deleted file mode 100644 index ac8e741..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_GCM.py +++ /dev/null @@ -1,951 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from __future__ import print_function - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof - -from Cryptodome.Util.py3compat import tobytes, bchr -from Cryptodome.Cipher import AES -from Cryptodome.Hash import SHAKE128, SHA256 - -from Cryptodome.Util.strxor import strxor - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class GcmTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data = get_tag_random("data", 128) - - def test_loopback_128(self): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - pt = get_tag_random("plaintext", 16 * 100) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_nonce(self): - # Nonce is optional (a random one will be created) - AES.new(self.key_128, AES.MODE_GCM) - - cipher = AES.new(self.key_128, AES.MODE_GCM, self.nonce_96) - ct = cipher.encrypt(self.data) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertEqual(ct, cipher.encrypt(self.data)) - - def test_nonce_must_be_bytes(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, - nonce=u'test12345678') - - def test_nonce_length(self): - # nonce can be of any length (but not empty) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, - nonce=b"") - - for x in range(1, 128): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=bchr(1) * x) - cipher.encrypt(bchr(1)) - - def test_block_size_128(self): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertEqual(cipher.block_size, AES.block_size) - - def test_nonce_attribute(self): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertEqual(cipher.nonce, self.nonce_96) - - # By default, a 15 bytes long nonce is randomly generated - nonce1 = AES.new(self.key_128, AES.MODE_GCM).nonce - nonce2 = AES.new(self.key_128, AES.MODE_GCM).nonce - self.assertEqual(len(nonce1), 16) - self.assertNotEqual(nonce1, nonce2) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, - self.nonce_96, 7) - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, - nonce=self.nonce_96, unknown=7) - - # But some are only known by the base cipher - # (e.g. use_aesni consumed by the AES module) - AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96, - use_aesni=False) - - def test_null_encryption_decryption(self): - for func in "encrypt", "decrypt": - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - result = getattr(cipher, func)(b"") - self.assertEqual(result, b"") - - def test_either_encrypt_or_decrypt(self): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.encrypt(b"") - self.assertRaises(TypeError, cipher.decrypt, b"") - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.decrypt(b"") - self.assertRaises(TypeError, cipher.encrypt, b"") - - def test_data_must_be_bytes(self): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') - - def test_mac_len(self): - # Invalid MAC length - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, - nonce=self.nonce_96, mac_len=3) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, - nonce=self.nonce_96, mac_len=16+1) - - # Valid MAC length - for mac_len in range(5, 16 + 1): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96, - mac_len=mac_len) - _, mac = cipher.encrypt_and_digest(self.data) - self.assertEqual(len(mac), mac_len) - - # Default MAC length - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - _, mac = cipher.encrypt_and_digest(self.data) - self.assertEqual(len(mac), 16) - - def test_invalid_mac(self): - from Cryptodome.Util.strxor import strxor_c - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - ct, mac = cipher.encrypt_and_digest(self.data) - - invalid_mac = strxor_c(mac, 0x01) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, - invalid_mac) - - def test_hex_mac(self): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - mac_hex = cipher.hexdigest() - self.assertEqual(cipher.digest(), unhexlify(mac_hex)) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.hexverify(mac_hex) - - def test_message_chunks(self): - # Validate that both associated data and plaintext/ciphertext - # can be broken up in chunks of arbitrary length - - auth_data = get_tag_random("authenticated data", 127) - plaintext = get_tag_random("plaintext", 127) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.update(auth_data) - ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) - - def break_up(data, chunk_length): - return [data[i:i+chunk_length] for i in range(0, len(data), - chunk_length)] - - # Encryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - pt2 = b"" - for chunk in break_up(ciphertext, chunk_length): - pt2 += cipher.decrypt(chunk) - self.assertEqual(plaintext, pt2) - cipher.verify(ref_mac) - - # Decryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - ct2 = b"" - for chunk in break_up(plaintext, chunk_length): - ct2 += cipher.encrypt(chunk) - self.assertEqual(ciphertext, ct2) - self.assertEqual(cipher.digest(), ref_mac) - - def test_bytearray(self): - - # Encrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data) - data_ba = bytearray(self.data) - - cipher1 = AES.new(self.key_128, - AES.MODE_GCM, - nonce=self.nonce_96) - cipher1.update(self.data) - ct = cipher1.encrypt(self.data) - tag = cipher1.digest() - - cipher2 = AES.new(key_ba, - AES.MODE_GCM, - nonce=nonce_ba) - key_ba[:3] = b"\xFF\xFF\xFF" - nonce_ba[:3] = b"\xFF\xFF\xFF" - cipher2.update(header_ba) - header_ba[:3] = b"\xFF\xFF\xFF" - ct_test = cipher2.encrypt(data_ba) - data_ba[:3] = b"\xFF\xFF\xFF" - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data) - del data_ba - - cipher4 = AES.new(key_ba, - AES.MODE_GCM, - nonce=nonce_ba) - key_ba[:3] = b"\xFF\xFF\xFF" - nonce_ba[:3] = b"\xFF\xFF\xFF" - cipher4.update(header_ba) - header_ba[:3] = b"\xFF\xFF\xFF" - pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) - - self.assertEqual(self.data, pt_test) - - def test_memoryview(self): - - # Encrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data)) - data_mv = memoryview(bytearray(self.data)) - - cipher1 = AES.new(self.key_128, - AES.MODE_GCM, - nonce=self.nonce_96) - cipher1.update(self.data) - ct = cipher1.encrypt(self.data) - tag = cipher1.digest() - - cipher2 = AES.new(key_mv, - AES.MODE_GCM, - nonce=nonce_mv) - key_mv[:3] = b"\xFF\xFF\xFF" - nonce_mv[:3] = b"\xFF\xFF\xFF" - cipher2.update(header_mv) - header_mv[:3] = b"\xFF\xFF\xFF" - ct_test = cipher2.encrypt(data_mv) - data_mv[:3] = b"\xFF\xFF\xFF" - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data)) - del data_mv - - cipher4 = AES.new(key_mv, - AES.MODE_GCM, - nonce=nonce_mv) - key_mv[:3] = b"\xFF\xFF\xFF" - nonce_mv[:3] = b"\xFF\xFF\xFF" - cipher4.update(header_mv) - header_mv[:3] = b"\xFF\xFF\xFF" - pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) - - self.assertEqual(self.data, pt_test) - - def test_output_param(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - tag = cipher.digest() - - output = bytearray(128) - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - res, tag_out = cipher.encrypt_and_digest(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - self.assertEqual(tag, tag_out) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - res = cipher.decrypt_and_verify(ct, tag, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - def test_output_param_memoryview(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - - output = memoryview(bytearray(128)) - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - def test_output_param_neg(self): - LEN_PT = 128 - - pt = b'5' * LEN_PT - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - ct = cipher.encrypt(pt) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) - - shorter_output = bytearray(LEN_PT - 1) - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -class GcmFSMTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data = get_tag_random("data", 128) - - def test_valid_init_encrypt_decrypt_digest_verify(self): - # No authenticated data, fixed plaintext - # Verify path INIT->ENCRYPT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - ct = cipher.encrypt(self.data) - mac = cipher.digest() - - # Verify path INIT->DECRYPT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_update_digest_verify(self): - # No plaintext, fixed authenticated data - # Verify path INIT->UPDATE->DIGEST - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - cipher.update(self.data) - mac = cipher.digest() - - # Verify path INIT->UPDATE->VERIFY - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.verify(mac) - - def test_valid_full_path(self): - # Fixed authenticated data, fixed plaintext - # Verify path INIT->UPDATE->ENCRYPT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - cipher.update(self.data) - ct = cipher.encrypt(self.data) - mac = cipher.digest() - - # Verify path INIT->UPDATE->DECRYPT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.decrypt(ct) - cipher.verify(mac) - - def test_valid_init_digest(self): - # Verify path INIT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.digest() - - def test_valid_init_verify(self): - # Verify path INIT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - mac = cipher.digest() - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.verify(mac) - - def test_valid_multiple_encrypt_or_decrypt(self): - for method_name in "encrypt", "decrypt": - for auth_data in (None, b"333", self.data, - self.data + b"3"): - if auth_data is None: - assoc_len = None - else: - assoc_len = len(auth_data) - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - if auth_data is not None: - cipher.update(auth_data) - method = getattr(cipher, method_name) - method(self.data) - method(self.data) - method(self.data) - method(self.data) - - def test_valid_multiple_digest_or_verify(self): - # Multiple calls to digest - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.update(self.data) - first_mac = cipher.digest() - for x in range(4): - self.assertEqual(first_mac, cipher.digest()) - - # Multiple calls to verify - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.update(self.data) - for x in range(5): - cipher.verify(first_mac) - - def test_valid_encrypt_and_digest_decrypt_and_verify(self): - # encrypt_and_digest - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.update(self.data) - ct, mac = cipher.encrypt_and_digest(self.data) - - # decrypt_and_verify - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.update(self.data) - pt = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(self.data, pt) - - def test_invalid_mixing_encrypt_decrypt(self): - # Once per method, with or without assoc. data - for method1_name, method2_name in (("encrypt", "decrypt"), - ("decrypt", "encrypt")): - for assoc_data_present in (True, False): - cipher = AES.new(self.key_128, AES.MODE_GCM, - nonce=self.nonce_96) - if assoc_data_present: - cipher.update(self.data) - getattr(cipher, method1_name)(self.data) - self.assertRaises(TypeError, getattr(cipher, method2_name), - self.data) - - def test_invalid_encrypt_or_update_after_digest(self): - for method_name in "encrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.encrypt(self.data) - cipher.digest() - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.encrypt_and_digest(self.data) - - def test_invalid_decrypt_or_update_after_verify(self): - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - ct = cipher.encrypt(self.data) - mac = cipher.digest() - - for method_name in "decrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.verify(mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) - cipher.decrypt_and_verify(ct, mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - -class TestVectors(unittest.TestCase): - """Class exercising the GCM test vectors found in - http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/gcm/gcm-revised-spec.pdf""" - - # List of test vectors, each made up of: - # - authenticated data - # - plaintext - # - ciphertext - # - MAC - # - AES key - # - nonce - test_vectors_hex = [ - ( - '', - '', - '', - '58e2fccefa7e3061367f1d57a4e7455a', - '00000000000000000000000000000000', - '000000000000000000000000' - ), - ( - '', - '00000000000000000000000000000000', - '0388dace60b6a392f328c2b971b2fe78', - 'ab6e47d42cec13bdf53a67b21257bddf', - '00000000000000000000000000000000', - '000000000000000000000000' - ), - ( - '', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', - '42831ec2217774244b7221b784d0d49ce3aa212f2c02a4e035c17e2329aca12e' + - '21d514b25466931c7d8f6a5aac84aa051ba30b396a0aac973d58e091473f5985', - '4d5c2af327cd64a62cf35abd2ba6fab4', - 'feffe9928665731c6d6a8f9467308308', - 'cafebabefacedbaddecaf888' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - '42831ec2217774244b7221b784d0d49ce3aa212f2c02a4e035c17e2329aca12e' + - '21d514b25466931c7d8f6a5aac84aa051ba30b396a0aac973d58e091', - '5bc94fbc3221a5db94fae95ae7121a47', - 'feffe9928665731c6d6a8f9467308308', - 'cafebabefacedbaddecaf888' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - '61353b4c2806934a777ff51fa22a4755699b2a714fcdc6f83766e5f97b6c7423' + - '73806900e49f24b22b097544d4896b424989b5e1ebac0f07c23f4598', - '3612d2e79e3b0785561be14aaca2fccb', - 'feffe9928665731c6d6a8f9467308308', - 'cafebabefacedbad' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - '8ce24998625615b603a033aca13fb894be9112a5c3a211a8ba262a3cca7e2ca7' + - '01e4a9a4fba43c90ccdcb281d48c7c6fd62875d2aca417034c34aee5', - '619cc5aefffe0bfa462af43c1699d050', - 'feffe9928665731c6d6a8f9467308308', - '9313225df88406e555909c5aff5269aa' + - '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + - '16aedbf5a0de6a57a637b39b' - ), - ( - '', - '', - '', - 'cd33b28ac773f74ba00ed1f312572435', - '000000000000000000000000000000000000000000000000', - '000000000000000000000000' - ), - ( - '', - '00000000000000000000000000000000', - '98e7247c07f0fe411c267e4384b0f600', - '2ff58d80033927ab8ef4d4587514f0fb', - '000000000000000000000000000000000000000000000000', - '000000000000000000000000' - ), - ( - '', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', - '3980ca0b3c00e841eb06fac4872a2757859e1ceaa6efd984628593b40ca1e19c' + - '7d773d00c144c525ac619d18c84a3f4718e2448b2fe324d9ccda2710acade256', - '9924a7c8587336bfb118024db8674a14', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c', - 'cafebabefacedbaddecaf888' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - '3980ca0b3c00e841eb06fac4872a2757859e1ceaa6efd984628593b40ca1e19c' + - '7d773d00c144c525ac619d18c84a3f4718e2448b2fe324d9ccda2710', - '2519498e80f1478f37ba55bd6d27618c', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c', - 'cafebabefacedbaddecaf888' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - '0f10f599ae14a154ed24b36e25324db8c566632ef2bbb34f8347280fc4507057' + - 'fddc29df9a471f75c66541d4d4dad1c9e93a19a58e8b473fa0f062f7', - '65dcc57fcf623a24094fcca40d3533f8', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c', - 'cafebabefacedbad' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - 'd27e88681ce3243c4830165a8fdcf9ff1de9a1d8e6b447ef6ef7b79828666e45' + - '81e79012af34ddd9e2f037589b292db3e67c036745fa22e7e9b7373b', - 'dcf566ff291c25bbb8568fc3d376a6d9', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c', - '9313225df88406e555909c5aff5269aa' + - '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + - '16aedbf5a0de6a57a637b39b' - ), - ( - '', - '', - '', - '530f8afbc74536b9a963b4f1c4cb738b', - '0000000000000000000000000000000000000000000000000000000000000000', - '000000000000000000000000' - ), - ( - '', - '00000000000000000000000000000000', - 'cea7403d4d606b6e074ec5d3baf39d18', - 'd0d1c8a799996bf0265b98b5d48ab919', - '0000000000000000000000000000000000000000000000000000000000000000', - '000000000000000000000000' - ), - ( '', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', - '522dc1f099567d07f47f37a32a84427d643a8cdcbfe5c0c97598a2bd2555d1aa' + - '8cb08e48590dbb3da7b08b1056828838c5f61e6393ba7a0abcc9f662898015ad', - 'b094dac5d93471bdec1a502270e3cc6c', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', - 'cafebabefacedbaddecaf888' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - '522dc1f099567d07f47f37a32a84427d643a8cdcbfe5c0c97598a2bd2555d1aa' + - '8cb08e48590dbb3da7b08b1056828838c5f61e6393ba7a0abcc9f662', - '76fc6ece0f4e1768cddf8853bb2d551b', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', - 'cafebabefacedbaddecaf888' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - 'c3762df1ca787d32ae47c13bf19844cbaf1ae14d0b976afac52ff7d79bba9de0' + - 'feb582d33934a4f0954cc2363bc73f7862ac430e64abe499f47c9b1f', - '3a337dbf46a792c45e454913fe2ea8f2', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', - 'cafebabefacedbad' - ), - ( - 'feedfacedeadbeeffeedfacedeadbeefabaddad2', - 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + - '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', - '5a8def2f0c9e53f1f75d7853659e2a20eeb2b22aafde6419a058ab4f6f746bf4' + - '0fc0c3b780f244452da3ebf1c5d82cdea2418997200ef82e44ae7e3f', - 'a44a8266ee1c8eb0c8b5d4cf5ae9f19a', - 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', - '9313225df88406e555909c5aff5269aa' + - '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + - '16aedbf5a0de6a57a637b39b' - ) - ] - - test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] - - def runTest(self): - for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: - - # Encrypt - cipher = AES.new(key, AES.MODE_GCM, nonce, mac_len=len(mac)) - cipher.update(assoc_data) - ct2, mac2 = cipher.encrypt_and_digest(pt) - self.assertEqual(ct, ct2) - self.assertEqual(mac, mac2) - - # Decrypt - cipher = AES.new(key, AES.MODE_GCM, nonce, mac_len=len(mac)) - cipher.update(assoc_data) - pt2 = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(pt, pt2) - - -class TestVectorsGueronKrasnov(unittest.TestCase): - """Class exercising the GCM test vectors found in - 'The fragility of AES-GCM authentication algorithm', Gueron, Krasnov - https://eprint.iacr.org/2013/157.pdf""" - - def test_1(self): - key = unhexlify("3da6c536d6295579c0959a7043efb503") - iv = unhexlify("2b926197d34e091ef722db94") - aad = unhexlify("00000000000000000000000000000000" + - "000102030405060708090a0b0c0d0e0f" + - "101112131415161718191a1b1c1d1e1f" + - "202122232425262728292a2b2c2d2e2f" + - "303132333435363738393a3b3c3d3e3f") - digest = unhexlify("69dd586555ce3fcc89663801a71d957b") - - cipher = AES.new(key, AES.MODE_GCM, iv).update(aad) - self.assertEqual(digest, cipher.digest()) - - def test_2(self): - key = unhexlify("843ffcf5d2b72694d19ed01d01249412") - iv = unhexlify("dbcca32ebf9b804617c3aa9e") - aad = unhexlify("00000000000000000000000000000000" + - "101112131415161718191a1b1c1d1e1f") - pt = unhexlify("000102030405060708090a0b0c0d0e0f" + - "101112131415161718191a1b1c1d1e1f" + - "202122232425262728292a2b2c2d2e2f" + - "303132333435363738393a3b3c3d3e3f" + - "404142434445464748494a4b4c4d4e4f") - ct = unhexlify("6268c6fa2a80b2d137467f092f657ac0" + - "4d89be2beaa623d61b5a868c8f03ff95" + - "d3dcee23ad2f1ab3a6c80eaf4b140eb0" + - "5de3457f0fbc111a6b43d0763aa422a3" + - "013cf1dc37fe417d1fbfc449b75d4cc5") - digest = unhexlify("3b629ccfbc1119b7319e1dce2cd6fd6d") - - cipher = AES.new(key, AES.MODE_GCM, iv).update(aad) - ct2, digest2 = cipher.encrypt_and_digest(pt) - - self.assertEqual(ct, ct2) - self.assertEqual(digest, digest2) - - -class NISTTestVectorsGCM(unittest.TestCase): - - def __init__(self, a): - self.use_clmul = True - unittest.TestCase.__init__(self, a) - - -class NISTTestVectorsGCM_no_clmul(unittest.TestCase): - - def __init__(self, a): - self.use_clmul = False - unittest.TestCase.__init__(self, a) - - -test_vectors_nist = load_test_vectors( - ("Cipher", "AES"), - "gcmDecrypt128.rsp", - "GCM decrypt", - {"count": lambda x: int(x)}) or [] - -test_vectors_nist += load_test_vectors( - ("Cipher", "AES"), - "gcmEncryptExtIV128.rsp", - "GCM encrypt", - {"count": lambda x: int(x)}) or [] - -for idx, tv in enumerate(test_vectors_nist): - - # The test vector file contains some directive lines - if isinstance(tv, str): - continue - - def single_test(self, tv=tv): - - self.description = tv.desc - cipher = AES.new(tv.key, AES.MODE_GCM, nonce=tv.iv, - mac_len=len(tv.tag), use_clmul=self.use_clmul) - cipher.update(tv.aad) - if "FAIL" in tv.others: - self.assertRaises(ValueError, cipher.decrypt_and_verify, - tv.ct, tv.tag) - else: - pt = cipher.decrypt_and_verify(tv.ct, tv.tag) - self.assertEqual(pt, tv.pt) - - setattr(NISTTestVectorsGCM, "test_%d" % idx, single_test) - setattr(NISTTestVectorsGCM_no_clmul, "test_%d" % idx, single_test) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings, **extra_params): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._extra_params = extra_params - self._id = "None" - - def setUp(self): - - def filter_tag(group): - return group['tagSize'] // 8 - - self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - "aes_gcm_test.json", - "Wycheproof GCM", - group_tag={'tag_size': filter_tag}) - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_encrypt(self, tv): - self._id = "Wycheproof Encrypt GCM Test #" + str(tv.id) - - try: - cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, - **self._extra_params) - except ValueError as e: - if len(tv.iv) == 0 and "Nonce cannot be empty" in str(e): - return - raise e - - cipher.update(tv.aad) - ct, tag = cipher.encrypt_and_digest(tv.msg) - if tv.valid: - self.assertEqual(ct, tv.ct) - self.assertEqual(tag, tv.tag) - self.warn(tv) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt GCM Test #" + str(tv.id) - - try: - cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, - **self._extra_params) - except ValueError as e: - if len(tv.iv) == 0 and "Nonce cannot be empty" in str(e): - return - raise e - - cipher.update(tv.aad) - try: - pt = cipher.decrypt_and_verify(tv.ct, tv.tag) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - self.warn(tv) - - def test_corrupt_decrypt(self, tv): - self._id = "Wycheproof Corrupt Decrypt GCM Test #" + str(tv.id) - if len(tv.iv) == 0 or len(tv.ct) < 1: - return - cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, - **self._extra_params) - cipher.update(tv.aad) - ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) - - def runTest(self): - - for tv in self.tv: - self.test_encrypt(tv) - self.test_decrypt(tv) - self.test_corrupt_decrypt(tv) - - -class TestVariableLength(unittest.TestCase): - - def __init__(self, **extra_params): - unittest.TestCase.__init__(self) - self._extra_params = extra_params - - def runTest(self): - key = b'0' * 16 - h = SHA256.new() - - for length in range(160): - nonce = '{0:04d}'.format(length).encode('utf-8') - data = bchr(length) * length - cipher = AES.new(key, AES.MODE_GCM, nonce=nonce, **self._extra_params) - ct, tag = cipher.encrypt_and_digest(data) - h.update(ct) - h.update(tag) - - self.assertEqual(h.hexdigest(), "7b7eb1ffbe67a2e53a912067c0ec8e62ebc7ce4d83490ea7426941349811bdf4") - - -def get_tests(config={}): - from Cryptodome.Util import _cpu_features - - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(GcmTests) - tests += list_test_cases(GcmFSMTests) - tests += [TestVectors()] - tests += [TestVectorsWycheproof(wycheproof_warnings)] - tests += list_test_cases(TestVectorsGueronKrasnov) - tests += [TestVariableLength()] - if config.get('slow_tests'): - tests += list_test_cases(NISTTestVectorsGCM) - - if _cpu_features.have_clmul(): - tests += [TestVectorsWycheproof(wycheproof_warnings, use_clmul=False)] - tests += [TestVariableLength(use_clmul=False)] - if config.get('slow_tests'): - tests += list_test_cases(NISTTestVectorsGCM_no_clmul) - else: - print("Skipping test of PCLMULDQD in AES GCM") - - return tests - - -if __name__ == '__main__': - def suite(): - unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_KW.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_KW.py deleted file mode 100644 index 4b530cf..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_KW.py +++ /dev/null @@ -1,175 +0,0 @@ -import unittest - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof - -from Cryptodome.Cipher import AES - - -class KW_Tests(unittest.TestCase): - - # From RFC3394 - tvs = [ - ("000102030405060708090A0B0C0D0E0F", - "00112233445566778899AABBCCDDEEFF", - "1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5"), - ("000102030405060708090A0B0C0D0E0F1011121314151617", - "00112233445566778899AABBCCDDEEFF", - "96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D"), - ("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F", - "00112233445566778899AABBCCDDEEFF", - "64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7"), - ("000102030405060708090A0B0C0D0E0F1011121314151617", - "00112233445566778899AABBCCDDEEFF0001020304050607", - "031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2"), - ("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F", - "00112233445566778899AABBCCDDEEFF0001020304050607", - "A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1"), - ("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F", - "00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F", - "28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21"), - ] - - def test_rfc3394(self): - for tv in self.tvs: - kek, pt, ct = [bytes.fromhex(x) for x in tv] - - cipher = AES.new(kek, AES.MODE_KW) - ct2 = cipher.seal(pt) - - self.assertEqual(ct, ct2) - - cipher = AES.new(kek, AES.MODE_KW) - pt2 = cipher.unseal(ct) - self.assertEqual(pt, pt2) - - def test_neg1(self): - - cipher = AES.new(b'-' * 16, AES.MODE_KW) - - with self.assertRaises(ValueError): - cipher.seal(b'') - - with self.assertRaises(ValueError): - cipher.seal(b'8' * 17) - - def test_neg2(self): - - cipher = AES.new(b'-' * 16, AES.MODE_KW) - ct = bytearray(cipher.seal(b'7' * 16)) - - cipher = AES.new(b'-' * 16, AES.MODE_KW) - cipher.unseal(ct) - - cipher = AES.new(b'-' * 16, AES.MODE_KW) - ct[0] ^= 0xFF - with self.assertRaises(ValueError): - cipher.unseal(ct) - - -class KW_Wycheproof(unittest.TestCase): - - def setUp(self): - self.vectors = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - "kw_test.json", - "Wycheproof tests for KW") - - def test_wycheproof(self): - - if not self.vectors: - self.skipTest("No test vectors available") - - for vector in self.vectors: - with self.subTest(testId=vector.id): - cipher = AES.new(vector.key, AES.MODE_KW) - - try: - cipher.seal(vector.msg) - except ValueError: - if vector.valid: - raise - continue - - cipher = AES.new(vector.key, AES.MODE_KW) - try: - pt = cipher.unseal(vector.ct) - except ValueError: - if vector.valid: - raise - continue - - self.assertEqual(pt, vector.msg) - - -class KWP_Tests(unittest.TestCase): - - tvs = [ - ("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8", - "c37b7e6492584340bed12207808941155068f738", - "138bdeaa9b8fa7fc61f97742e72248ee5ae6ae5360d1ae6a5f54f373fa543b6a"), - ("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8", - "466f7250617369", - "afbeb0f07dfbf5419200f2ccb50bb24f"), - ] - - def test_rfc5649(self): - for tv in self.tvs: - kek, pt, ct = [bytes.fromhex(x) for x in tv] - - cipher = AES.new(kek, AES.MODE_KWP) - ct2 = cipher.seal(pt) - - self.assertEqual(ct, ct2) - - cipher = AES.new(kek, AES.MODE_KWP) - pt2 = cipher.unseal(ct) - self.assertEqual(pt, pt2) - - -class KWP_Wycheproof(unittest.TestCase): - - def setUp(self): - self.vectors = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - "kwp_test.json", - "Wycheproof tests for KWP") - - def test_wycheproof(self): - - if not self.vectors: - self.skipTest("No test vectors available") - - for vector in self.vectors: - with self.subTest(testId=vector.id): - cipher = AES.new(vector.key, AES.MODE_KWP) - - try: - cipher.seal(vector.msg) - except ValueError: - if vector.valid and not vector.warning: - raise - continue - - cipher = AES.new(vector.key, AES.MODE_KWP) - try: - pt = cipher.unseal(vector.ct) - except ValueError: - if vector.valid and not vector.warning: - raise - continue - - self.assertEqual(pt, vector.msg) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(KW_Tests) - tests += list_test_cases(KWP_Tests) - tests += list_test_cases(KW_Wycheproof) - tests += list_test_cases(KWP_Wycheproof) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OCB.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OCB.py deleted file mode 100644 index 1f2ffbc..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OCB.py +++ /dev/null @@ -1,845 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.Util.py3compat import b, tobytes, bchr -from Cryptodome.Util.number import long_to_bytes -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Cipher import AES -from Cryptodome.Hash import SHAKE128 - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class OcbTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data = get_tag_random("data", 128) - - def test_loopback_128(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - pt = get_tag_random("plaintext", 16 * 100) - ct, mac = cipher.encrypt_and_digest(pt) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - pt2 = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(pt, pt2) - - def test_nonce(self): - # Nonce is optional - AES.new(self.key_128, AES.MODE_OCB) - - cipher = AES.new(self.key_128, AES.MODE_OCB, self.nonce_96) - ct = cipher.encrypt(self.data) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - self.assertEqual(ct, cipher.encrypt(self.data)) - - def test_nonce_must_be_bytes(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, - nonce=u'test12345678') - - def test_nonce_length(self): - # nonce cannot be empty - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, - nonce=b("")) - - # nonce can be up to 15 bytes long - for length in range(1, 16): - AES.new(self.key_128, AES.MODE_OCB, nonce=self.data[:length]) - - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, - nonce=self.data) - - def test_block_size_128(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - self.assertEqual(cipher.block_size, AES.block_size) - - # By default, a 15 bytes long nonce is randomly generated - nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce - nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce - self.assertEqual(len(nonce1), 15) - self.assertNotEqual(nonce1, nonce2) - - def test_nonce_attribute(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - self.assertEqual(cipher.nonce, self.nonce_96) - - # By default, a 15 bytes long nonce is randomly generated - nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce - nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce - self.assertEqual(len(nonce1), 15) - self.assertNotEqual(nonce1, nonce2) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, - self.nonce_96, 7) - self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, - nonce=self.nonce_96, unknown=7) - - # But some are only known by the base cipher - # (e.g. use_aesni consumed by the AES module) - AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96, - use_aesni=False) - - def test_null_encryption_decryption(self): - for func in "encrypt", "decrypt": - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - result = getattr(cipher, func)(b("")) - self.assertEqual(result, b("")) - - def test_either_encrypt_or_decrypt(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.encrypt(b("xyz")) - self.assertRaises(TypeError, cipher.decrypt, b("xyz")) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.decrypt(b("xyz")) - self.assertRaises(TypeError, cipher.encrypt, b("xyz")) - - def test_data_must_be_bytes(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') - - def test_mac_len(self): - # Invalid MAC length - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, - nonce=self.nonce_96, mac_len=7) - self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, - nonce=self.nonce_96, mac_len=16+1) - - # Valid MAC length - for mac_len in range(8, 16 + 1): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96, - mac_len=mac_len) - _, mac = cipher.encrypt_and_digest(self.data) - self.assertEqual(len(mac), mac_len) - - # Default MAC length - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - _, mac = cipher.encrypt_and_digest(self.data) - self.assertEqual(len(mac), 16) - - def test_invalid_mac(self): - from Cryptodome.Util.strxor import strxor_c - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - ct, mac = cipher.encrypt_and_digest(self.data) - - invalid_mac = strxor_c(mac, 0x01) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, - invalid_mac) - - def test_hex_mac(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - mac_hex = cipher.hexdigest() - self.assertEqual(cipher.digest(), unhexlify(mac_hex)) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.hexverify(mac_hex) - - def test_message_chunks(self): - # Validate that both associated data and plaintext/ciphertext - # can be broken up in chunks of arbitrary length - - auth_data = get_tag_random("authenticated data", 127) - plaintext = get_tag_random("plaintext", 127) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.update(auth_data) - ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) - - def break_up(data, chunk_length): - return [data[i:i+chunk_length] for i in range(0, len(data), - chunk_length)] - - # Encryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - pt2 = b("") - for chunk in break_up(ciphertext, chunk_length): - pt2 += cipher.decrypt(chunk) - pt2 += cipher.decrypt() - self.assertEqual(plaintext, pt2) - cipher.verify(ref_mac) - - # Decryption - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - - for chunk in break_up(auth_data, chunk_length): - cipher.update(chunk) - ct2 = b("") - for chunk in break_up(plaintext, chunk_length): - ct2 += cipher.encrypt(chunk) - ct2 += cipher.encrypt() - self.assertEqual(ciphertext, ct2) - self.assertEqual(cipher.digest(), ref_mac) - - def test_bytearray(self): - - # Encrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data) - data_ba = bytearray(self.data) - - cipher1 = AES.new(self.key_128, - AES.MODE_OCB, - nonce=self.nonce_96) - cipher1.update(self.data) - ct = cipher1.encrypt(self.data) + cipher1.encrypt() - tag = cipher1.digest() - - cipher2 = AES.new(key_ba, - AES.MODE_OCB, - nonce=nonce_ba) - key_ba[:3] = b"\xFF\xFF\xFF" - nonce_ba[:3] = b"\xFF\xFF\xFF" - cipher2.update(header_ba) - header_ba[:3] = b"\xFF\xFF\xFF" - ct_test = cipher2.encrypt(data_ba) + cipher2.encrypt() - data_ba[:3] = b"\xFF\xFF\xFF" - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_ba = bytearray(self.key_128) - nonce_ba = bytearray(self.nonce_96) - header_ba = bytearray(self.data) - del data_ba - - cipher4 = AES.new(key_ba, - AES.MODE_OCB, - nonce=nonce_ba) - key_ba[:3] = b"\xFF\xFF\xFF" - nonce_ba[:3] = b"\xFF\xFF\xFF" - cipher4.update(header_ba) - header_ba[:3] = b"\xFF\xFF\xFF" - pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) - - self.assertEqual(self.data, pt_test) - - def test_memoryview(self): - - # Encrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data)) - data_mv = memoryview(bytearray(self.data)) - - cipher1 = AES.new(self.key_128, - AES.MODE_OCB, - nonce=self.nonce_96) - cipher1.update(self.data) - ct = cipher1.encrypt(self.data) + cipher1.encrypt() - tag = cipher1.digest() - - cipher2 = AES.new(key_mv, - AES.MODE_OCB, - nonce=nonce_mv) - key_mv[:3] = b"\xFF\xFF\xFF" - nonce_mv[:3] = b"\xFF\xFF\xFF" - cipher2.update(header_mv) - header_mv[:3] = b"\xFF\xFF\xFF" - ct_test = cipher2.encrypt(data_mv) + cipher2.encrypt() - data_mv[:3] = b"\xFF\xFF\xFF" - tag_test = cipher2.digest() - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key_mv = memoryview(bytearray(self.key_128)) - nonce_mv = memoryview(bytearray(self.nonce_96)) - header_mv = memoryview(bytearray(self.data)) - del data_mv - - cipher4 = AES.new(key_mv, - AES.MODE_OCB, - nonce=nonce_mv) - key_mv[:3] = b"\xFF\xFF\xFF" - nonce_mv[:3] = b"\xFF\xFF\xFF" - cipher4.update(header_mv) - header_mv[:3] = b"\xFF\xFF\xFF" - pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) - - self.assertEqual(self.data, pt_test) - - -class OcbFSMTests(unittest.TestCase): - - key_128 = get_tag_random("key_128", 16) - nonce_96 = get_tag_random("nonce_128", 12) - data = get_tag_random("data", 128) - - def test_valid_init_encrypt_decrypt_digest_verify(self): - # No authenticated data, fixed plaintext - # Verify path INIT->ENCRYPT->ENCRYPT(NONE)->DIGEST - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - ct = cipher.encrypt(self.data) - ct += cipher.encrypt() - mac = cipher.digest() - - # Verify path INIT->DECRYPT->DECRYPT(NONCE)->VERIFY - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.decrypt() - cipher.verify(mac) - - def test_invalid_init_encrypt_decrypt_digest_verify(self): - # No authenticated data, fixed plaintext - # Verify path INIT->ENCRYPT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - ct = cipher.encrypt(self.data) - self.assertRaises(TypeError, cipher.digest) - - # Verify path INIT->DECRYPT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.decrypt(ct) - self.assertRaises(TypeError, cipher.verify) - - def test_valid_init_update_digest_verify(self): - # No plaintext, fixed authenticated data - # Verify path INIT->UPDATE->DIGEST - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - mac = cipher.digest() - - # Verify path INIT->UPDATE->VERIFY - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.verify(mac) - - def test_valid_full_path(self): - # Fixed authenticated data, fixed plaintext - # Verify path INIT->UPDATE->ENCRYPT->ENCRYPT(NONE)->DIGEST - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - ct = cipher.encrypt(self.data) - ct += cipher.encrypt() - mac = cipher.digest() - - # Verify path INIT->UPDATE->DECRYPT->DECRYPT(NONE)->VERIFY - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.decrypt(ct) - cipher.decrypt() - cipher.verify(mac) - - # Verify path INIT->UPDATE->ENCRYPT->ENCRYPT_AND_DIGEST - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - ct1 = cipher.encrypt(self.data[:2]) - ct2, mac = cipher.encrypt_and_digest(self.data[2:]) - - # Verify path INIT->UPDATE->DECRYPT->DECRYPT_AND_VERIFY - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.decrypt(ct1) - cipher.decrypt_and_verify(ct2, mac) - - def test_invalid_encrypt_after_final(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.encrypt(self.data) - cipher.encrypt() - self.assertRaises(TypeError, cipher.encrypt, self.data) - - def test_invalid_decrypt_after_final(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.decrypt(self.data) - cipher.decrypt() - self.assertRaises(TypeError, cipher.decrypt, self.data) - - def test_valid_init_digest(self): - # Verify path INIT->DIGEST - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.digest() - - def test_valid_init_verify(self): - # Verify path INIT->VERIFY - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - mac = cipher.digest() - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.verify(mac) - - def test_valid_multiple_encrypt_or_decrypt(self): - for method_name in "encrypt", "decrypt": - for auth_data in (None, b("333"), self.data, - self.data + b("3")): - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - if auth_data is not None: - cipher.update(auth_data) - method = getattr(cipher, method_name) - method(self.data) - method(self.data) - method(self.data) - method(self.data) - method() - - def test_valid_multiple_digest_or_verify(self): - # Multiple calls to digest - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.update(self.data) - first_mac = cipher.digest() - for x in range(4): - self.assertEqual(first_mac, cipher.digest()) - - # Multiple calls to verify - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.update(self.data) - for x in range(5): - cipher.verify(first_mac) - - def test_valid_encrypt_and_digest_decrypt_and_verify(self): - # encrypt_and_digest - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.update(self.data) - ct, mac = cipher.encrypt_and_digest(self.data) - - # decrypt_and_verify - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.update(self.data) - pt = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(self.data, pt) - - def test_invalid_mixing_encrypt_decrypt(self): - # Once per method, with or without assoc. data - for method1_name, method2_name in (("encrypt", "decrypt"), - ("decrypt", "encrypt")): - for assoc_data_present in (True, False): - cipher = AES.new(self.key_128, AES.MODE_OCB, - nonce=self.nonce_96) - if assoc_data_present: - cipher.update(self.data) - getattr(cipher, method1_name)(self.data) - self.assertRaises(TypeError, getattr(cipher, method2_name), - self.data) - - def test_invalid_encrypt_or_update_after_digest(self): - for method_name in "encrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.encrypt(self.data) - cipher.encrypt() - cipher.digest() - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.encrypt_and_digest(self.data) - - def test_invalid_decrypt_or_update_after_verify(self): - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - ct = cipher.encrypt(self.data) - ct += cipher.encrypt() - mac = cipher.digest() - - for method_name in "decrypt", "update": - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.decrypt(ct) - cipher.decrypt() - cipher.verify(mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) - cipher.decrypt_and_verify(ct, mac) - self.assertRaises(TypeError, getattr(cipher, method_name), - self.data) - - -def algo_rfc7253(keylen, taglen, noncelen): - """Implement the algorithm at page 18 of RFC 7253""" - - key = bchr(0) * (keylen // 8 - 1) + bchr(taglen) - C = b"" - - for i in range(128): - S = bchr(0) * i - - N = long_to_bytes(3 * i + 1, noncelen // 8) - cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) - cipher.update(S) - C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest() - - N = long_to_bytes(3 * i + 2, noncelen // 8) - cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) - C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest() - - N = long_to_bytes(3 * i + 3, noncelen // 8) - cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) - cipher.update(S) - C += cipher.encrypt() + cipher.digest() - - N = long_to_bytes(385, noncelen // 8) - cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) - cipher.update(C) - return cipher.encrypt() + cipher.digest() - - -class OcbRfc7253Test(unittest.TestCase): - - # Tuple with - # - nonce - # - authenticated data - # - plaintext - # - ciphertext and 16 byte MAC tag - tv1_key = "000102030405060708090A0B0C0D0E0F" - tv1 = ( - ( - "BBAA99887766554433221100", - "", - "", - "785407BFFFC8AD9EDCC5520AC9111EE6" - ), - ( - "BBAA99887766554433221101", - "0001020304050607", - "0001020304050607", - "6820B3657B6F615A5725BDA0D3B4EB3A257C9AF1F8F03009" - ), - ( - "BBAA99887766554433221102", - "0001020304050607", - "", - "81017F8203F081277152FADE694A0A00" - ), - ( - "BBAA99887766554433221103", - "", - "0001020304050607", - "45DD69F8F5AAE72414054CD1F35D82760B2CD00D2F99BFA9" - ), - ( - "BBAA99887766554433221104", - "000102030405060708090A0B0C0D0E0F", - "000102030405060708090A0B0C0D0E0F", - "571D535B60B277188BE5147170A9A22C3AD7A4FF3835B8C5" - "701C1CCEC8FC3358" - ), - ( - "BBAA99887766554433221105", - "000102030405060708090A0B0C0D0E0F", - "", - "8CF761B6902EF764462AD86498CA6B97" - ), - ( - "BBAA99887766554433221106", - "", - "000102030405060708090A0B0C0D0E0F", - "5CE88EC2E0692706A915C00AEB8B2396F40E1C743F52436B" - "DF06D8FA1ECA343D" - ), - ( - "BBAA99887766554433221107", - "000102030405060708090A0B0C0D0E0F1011121314151617", - "000102030405060708090A0B0C0D0E0F1011121314151617", - "1CA2207308C87C010756104D8840CE1952F09673A448A122" - "C92C62241051F57356D7F3C90BB0E07F" - ), - ( - "BBAA99887766554433221108", - "000102030405060708090A0B0C0D0E0F1011121314151617", - "", - "6DC225A071FC1B9F7C69F93B0F1E10DE" - ), - ( - "BBAA99887766554433221109", - "", - "000102030405060708090A0B0C0D0E0F1011121314151617", - "221BD0DE7FA6FE993ECCD769460A0AF2D6CDED0C395B1C3C" - "E725F32494B9F914D85C0B1EB38357FF" - ), - ( - "BBAA9988776655443322110A", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F", - "BD6F6C496201C69296C11EFD138A467ABD3C707924B964DE" - "AFFC40319AF5A48540FBBA186C5553C68AD9F592A79A4240" - ), - ( - "BBAA9988776655443322110B", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F", - "", - "FE80690BEE8A485D11F32965BC9D2A32" - ), - ( - "BBAA9988776655443322110C", - "", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F", - "2942BFC773BDA23CABC6ACFD9BFD5835BD300F0973792EF4" - "6040C53F1432BCDFB5E1DDE3BC18A5F840B52E653444D5DF" - ), - ( - "BBAA9988776655443322110D", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627", - "D5CA91748410C1751FF8A2F618255B68A0A12E093FF45460" - "6E59F9C1D0DDC54B65E8628E568BAD7AED07BA06A4A69483" - "A7035490C5769E60" - ), - ( - "BBAA9988776655443322110E", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627", - "", - "C5CD9D1850C141E358649994EE701B68" - ), - ( - "BBAA9988776655443322110F", - "", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627", - "4412923493C57D5DE0D700F753CCE0D1D2D95060122E9F15" - "A5DDBFC5787E50B5CC55EE507BCB084E479AD363AC366B95" - "A98CA5F3000B1479" - ) - ) - - # Tuple with - # - key - # - nonce - # - authenticated data - # - plaintext - # - ciphertext and 12 byte MAC tag - tv2 = ( - "0F0E0D0C0B0A09080706050403020100", - "BBAA9988776655443322110D", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627", - "000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627", - "1792A4E31E0755FB03E31B22116E6C2DDF9EFD6E33D536F1" - "A0124B0A55BAE884ED93481529C76B6AD0C515F4D1CDD4FD" - "AC4F02AA" - ) - - # Tuple with - # - key length - # - MAC tag length - # - Expected output - tv3 = ( - (128, 128, "67E944D23256C5E0B6C61FA22FDF1EA2"), - (192, 128, "F673F2C3E7174AAE7BAE986CA9F29E17"), - (256, 128, "D90EB8E9C977C88B79DD793D7FFA161C"), - (128, 96, "77A3D8E73589158D25D01209"), - (192, 96, "05D56EAD2752C86BE6932C5E"), - (256, 96, "5458359AC23B0CBA9E6330DD"), - (128, 64, "192C9B7BD90BA06A"), - (192, 64, "0066BC6E0EF34E24"), - (256, 64, "7D4EA5D445501CBE"), - ) - - def test1(self): - key = unhexlify(b(self.tv1_key)) - for tv in self.tv1: - nonce, aad, pt, ct = [unhexlify(b(x)) for x in tv] - ct, mac_tag = ct[:-16], ct[-16:] - - cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) - cipher.update(aad) - ct2 = cipher.encrypt(pt) + cipher.encrypt() - self.assertEqual(ct, ct2) - self.assertEqual(mac_tag, cipher.digest()) - - cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) - cipher.update(aad) - pt2 = cipher.decrypt(ct) + cipher.decrypt() - self.assertEqual(pt, pt2) - cipher.verify(mac_tag) - - def test2(self): - - key, nonce, aad, pt, ct = [unhexlify(b(x)) for x in self.tv2] - ct, mac_tag = ct[:-12], ct[-12:] - - cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12) - cipher.update(aad) - ct2 = cipher.encrypt(pt) + cipher.encrypt() - self.assertEqual(ct, ct2) - self.assertEqual(mac_tag, cipher.digest()) - - cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12) - cipher.update(aad) - pt2 = cipher.decrypt(ct) + cipher.decrypt() - self.assertEqual(pt, pt2) - cipher.verify(mac_tag) - - def test3(self): - for keylen, taglen, result in self.tv3: - result2 = algo_rfc7253(keylen, taglen, 96) - self.assertEqual(unhexlify(b(result)), result2) - - -class OcbDkgTest(unittest.TestCase): - """Test vectors from https://gitlab.com/dkg/ocb-test-vectors""" - - def test_1_2(self): - tvs = [] - for fi in (1, 2): - for nb in (104, 112, 120): - tv_file = load_test_vectors(("Cipher", "AES"), - "test-vector-%d-nonce%d.txt" % (fi, nb), - "DKG tests, %d, %d bits" % (fi, nb), - {}) - if tv_file is None: - break - key = tv_file[0].k - for tv in tv_file[1:]: - tv.k = key - tvs.append(tv) - - for tv in tvs: - k, n, a, p, c = tv.k, tv.n, tv.a, tv.p, tv.c - mac_len = len(c) - len(p) - cipher = AES.new(k, AES.MODE_OCB, nonce=n, mac_len=mac_len) - cipher.update(a) - c_out, tag_out = cipher.encrypt_and_digest(p) - self.assertEqual(c, c_out + tag_out) - - def test_3(self): - - def check(keylen, taglen, noncelen, exp): - result = algo_rfc7253(keylen, taglen, noncelen) - self.assertEqual(result, unhexlify(exp)) - - # test-vector-3-nonce104.txt - check(128, 128, 104, "C47F5F0341E15326D4D1C46F47F05062") - check(192, 128, 104, "95B9167A38EB80495DFC561A8486E109") - check(256, 128, 104, "AFE1CDDB97028FD92F8FB3C8CFBA7D83") - check(128, 96, 104, "F471B4983BA80946DF217A54") - check(192, 96, 104, "5AE828BC51C24D85FA5CC7B2") - check(256, 96, 104, "8C8335982E2B734616CAD14C") - check(128, 64, 104, "B553F74B85FD1E5B") - check(192, 64, 104, "3B49D20E513531F9") - check(256, 64, 104, "ED6DA5B1216BF8BB") - - # test-vector-3-nonce112.txt - check(128, 128, 112, "CA8AFCA031BAC3F480A583BD6C50A547") - check(192, 128, 112, "D170C1DF356308079DA9A3F619147148") - check(256, 128, 112, "57F94381F2F9231EFB04AECD323757C3") - check(128, 96, 112, "3A618B2531ED39F260C750DC") - check(192, 96, 112, "9071EB89FEDBADDA88FD286E") - check(256, 96, 112, "FDF0EFB97F21A39AC4BAB5AC") - check(128, 64, 112, "FAB2FF3A8DD82A13") - check(192, 64, 112, "AC01D912BD0737D3") - check(256, 64, 112, "9D1FD0B500EA4ECF") - - # test-vector-3-nonce120.txt - check(128, 128, 120, "9E043A7140A25FB91F43BCC9DD7E0F46") - check(192, 128, 120, "680000E53908323A7F396B955B8EC641") - check(256, 128, 120, "8304B97FAACDA56E676602E1878A7E6F") - check(128, 96, 120, "81F978AC9867E825D339847D") - check(192, 96, 120, "EFCF2D60B24926ADA48CF5B1") - check(256, 96, 120, "84961DC56E917B165E58C174") - check(128, 64, 120, "227AEE6C9D905A61") - check(192, 64, 120, "541DE691B9E1A2F9") - check(256, 64, 120, "B0E761381C7129FC") - - def test_2_bugfix(self): - nonce = unhexlify("EEDDCCBBAA9988776655443322110D") - key = unhexlify("0F0E0D0C0B0A09080706050403020100") - A = unhexlify("000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627") - P = unhexlify("000102030405060708090A0B0C0D0E0F1011121314151617" - "18191A1B1C1D1E1F2021222324252627") - C = unhexlify("07E903BFC49552411ABC865F5ECE60F6FAD1F5A9F14D3070" - "FA2F1308A563207FFE14C1EEA44B22059C7484319D8A2C53" - "C236A7B3") - mac_len = len(C) - len(P) - - # Prior to version 3.17, a nonce of maximum length (15 bytes) - # was actually used as a 14 byte nonce. The last byte was erroneously - # ignored. - buggy_result = unhexlify("BA015C4E5AE54D76C890AE81BD40DC57" - "03EDC30E8AC2A58BC5D8FA4D61C5BAE6" - "C39BEAC435B2FD56A2A5085C1B135D77" - "0C8264B7") - cipher = AES.new(key, AES.MODE_OCB, nonce=nonce[:-1], mac_len=mac_len) - cipher.update(A) - C_out2, tag_out2 = cipher.encrypt_and_digest(P) - self.assertEqual(buggy_result, C_out2 + tag_out2) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(OcbTests) - tests += list_test_cases(OcbFSMTests) - tests += list_test_cases(OcbRfc7253Test) - tests += list_test_cases(OcbDkgTest) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OFB.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OFB.py deleted file mode 100644 index 9a8ef0a..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OFB.py +++ /dev/null @@ -1,238 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import tobytes -from Cryptodome.Cipher import AES, DES3, DES -from Cryptodome.Hash import SHAKE128 -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - -from Cryptodome.SelfTest.Cipher.test_CBC import BlockChainingTests - -class OfbTests(BlockChainingTests): - - aes_mode = AES.MODE_OFB - des3_mode = DES3.MODE_OFB - - # Redefine test_unaligned_data_128/64 - - def test_unaligned_data_128(self): - plaintexts = [ b"7777777" ] * 100 - - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - def test_unaligned_data_64(self): - plaintexts = [ b"7777777" ] * 100 - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - -from Cryptodome.SelfTest.Cipher.test_CBC import NistBlockChainingVectors - -class NistOfbVectors(NistBlockChainingVectors): - aes_mode = AES.MODE_OFB - des_mode = DES.MODE_OFB - des3_mode = DES3.MODE_OFB - - -# Create one test method per file -nist_aes_kat_mmt_files = ( - # KAT - "OFBGFSbox128.rsp", - "OFBGFSbox192.rsp", - "OFBGFSbox256.rsp", - "OFBKeySbox128.rsp", - "OFBKeySbox192.rsp", - "OFBKeySbox256.rsp", - "OFBVarKey128.rsp", - "OFBVarKey192.rsp", - "OFBVarKey256.rsp", - "OFBVarTxt128.rsp", - "OFBVarTxt192.rsp", - "OFBVarTxt256.rsp", - # MMT - "OFBMMT128.rsp", - "OFBMMT192.rsp", - "OFBMMT256.rsp", - ) -nist_aes_mct_files = ( - "OFBMCT128.rsp", - "OFBMCT192.rsp", - "OFBMCT256.rsp", - ) - -for file_name in nist_aes_kat_mmt_files: - def new_func(self, file_name=file_name): - self._do_kat_aes_test(file_name) - setattr(NistOfbVectors, "test_AES_" + file_name, new_func) - -for file_name in nist_aes_mct_files: - def new_func(self, file_name=file_name): - self._do_mct_aes_test(file_name) - setattr(NistOfbVectors, "test_AES_" + file_name, new_func) -del file_name, new_func - -nist_tdes_files = ( - "TOFBMMT2.rsp", # 2TDES - "TOFBMMT3.rsp", # 3TDES - "TOFBinvperm.rsp", # Single DES - "TOFBpermop.rsp", - "TOFBsubtab.rsp", - "TOFBvarkey.rsp", - "TOFBvartext.rsp", - ) - -for file_name in nist_tdes_files: - def new_func(self, file_name=file_name): - self._do_tdes_test(file_name) - setattr(NistOfbVectors, "test_TDES_" + file_name, new_func) - -# END OF NIST OFB TEST VECTORS - - -class SP800TestVectors(unittest.TestCase): - """Class exercising the OFB test vectors found in Section F.4 - of NIST SP 800-3A""" - - def test_aes_128(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = '3b3fd92eb72dad20333449f8e83cfb4a' +\ - '7789508d16918f03f53c52dac54ed825' +\ - '9740051e9c5fecf64344f7a82260edcc' +\ - '304c6528f659c77866a510d9c1d6ae5e' - key = '2b7e151628aed2a6abf7158809cf4f3c' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) - - def test_aes_192(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = 'cdc80d6fddf18cab34c25909c99a4174' +\ - 'fcc28b8d4c63837c09e81700c1100401' +\ - '8d9a9aeac0f6596f559c6d4daf59a5f2' +\ - '6d9f200857ca6c3e9cac524bd9acc92a' - key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) - - def test_aes_256(self): - plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ - 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ - '30c81c46a35ce411e5fbc1191a0a52ef' +\ - 'f69f2445df4f9b17ad2b417be66c3710' - ciphertext = 'dc7e84bfda79164b7ecd8486985d3860' +\ - '4febdc6740d20b3ac88f6ad82a4fb08d' +\ - '71ab47a086e86eedf39d1c5bba97c408' +\ - '0126141d67f37be8538f5a8be740e484' - key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' - iv = '000102030405060708090a0b0c0d0e0f' - - key = unhexlify(key) - iv = unhexlify(iv) - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.encrypt(plaintext), ciphertext) - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.decrypt(ciphertext), plaintext) - - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) - cipher = AES.new(key, AES.MODE_OFB, iv) - self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(OfbTests) - if config.get('slow_tests'): - tests += list_test_cases(NistOfbVectors) - tests += list_test_cases(SP800TestVectors) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OpenPGP.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OpenPGP.py deleted file mode 100644 index 4090a1a..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_OpenPGP.py +++ /dev/null @@ -1,218 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import tobytes -from Cryptodome.Cipher import AES, DES3, DES -from Cryptodome.Hash import SHAKE128 - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -from Cryptodome.SelfTest.Cipher.test_CBC import BlockChainingTests - -class OpenPGPTests(BlockChainingTests): - - aes_mode = AES.MODE_OPENPGP - des3_mode = DES3.MODE_OPENPGP - - # Redefine test_unaligned_data_128/64 - - key_128 = get_tag_random("key_128", 16) - key_192 = get_tag_random("key_192", 24) - iv_128 = get_tag_random("iv_128", 16) - iv_64 = get_tag_random("iv_64", 8) - data_128 = get_tag_random("data_128", 16) - - def test_loopback_128(self): - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) - pt = get_tag_random("plaintext", 16 * 100) - ct = cipher.encrypt(pt) - - eiv, ct = ct[:18], ct[18:] - - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_loopback_64(self): - cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) - pt = get_tag_random("plaintext", 8 * 100) - ct = cipher.encrypt(pt) - - eiv, ct = ct[:10], ct[10:] - - cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, eiv) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def test_IV_iv_attributes(self): - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) - eiv = cipher.encrypt(b"") - self.assertEqual(cipher.iv, self.iv_128) - - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) - self.assertEqual(cipher.iv, self.iv_128) - - def test_null_encryption_decryption(self): - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) - eiv = cipher.encrypt(b"") - - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) - self.assertEqual(cipher.decrypt(b""), b"") - - def test_either_encrypt_or_decrypt(self): - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) - eiv = cipher.encrypt(b"") - self.assertRaises(TypeError, cipher.decrypt, b"") - - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) - cipher.decrypt(b"") - self.assertRaises(TypeError, cipher.encrypt, b"") - - def test_unaligned_data_128(self): - plaintexts = [ b"7777777" ] * 100 - - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - def test_unaligned_data_64(self): - plaintexts = [ b"7777777" ] * 100 - - cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) - ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] - cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) - self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) - - def test_output_param(self): - pass - - def test_output_param_same_buffer(self): - pass - - def test_output_param_memoryview(self): - pass - - def test_output_param_neg(self): - pass - - -class TestVectors(unittest.TestCase): - - def test_aes(self): - # The following test vectors have been generated with gpg v1.4.0. - # The command line used was: - # - # gpg -c -z 0 --cipher-algo AES --passphrase secret_passphrase \ - # --disable-mdc --s2k-mode 0 --output ct pt - # - # As result, the content of the file 'pt' is encrypted with a key derived - # from 'secret_passphrase' and written to file 'ct'. - # Test vectors must be extracted from 'ct', which is a collection of - # TLVs (see RFC4880 for all details): - # - the encrypted data (with the encrypted IV as prefix) is the payload - # of the TLV with tag 9 (Symmetrical Encrypted Data Packet). - # This is the ciphertext in the test vector. - # - inside the encrypted part, there is a further layer of TLVs. One must - # look for tag 11 (Literal Data Packet); in its payload, after a short - # but time dependent header, there is the content of file 'pt'. - # In the test vector, the plaintext is the complete set of TLVs that gets - # encrypted. It is not just the content of 'pt'. - # - the key is the leftmost 16 bytes of the SHA1 digest of the password. - # The test vector contains such shortened digest. - # - # Note that encryption uses a clear IV, and decryption an encrypted IV - - plaintext = 'ac18620270744fb4f647426c61636b4361745768697465436174' - ciphertext = 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb' - key = '5baa61e4c9b93f3f0682250b6cf8331b' - iv = '3d7d3e62282add7eb203eeba5c800733' - encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef' - - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - key = unhexlify(key) - iv = unhexlify(iv) - encrypted_iv = unhexlify(encrypted_iv) - - cipher = AES.new(key, AES.MODE_OPENPGP, iv) - ct = cipher.encrypt(plaintext) - self.assertEqual(ct[:18], encrypted_iv) - self.assertEqual(ct[18:], ciphertext) - - cipher = AES.new(key, AES.MODE_OPENPGP, encrypted_iv) - pt = cipher.decrypt(ciphertext) - self.assertEqual(pt, plaintext) - - def test_des3(self): - # The following test vectors have been generated with gpg v1.4.0. - # The command line used was: - # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ - # --disable-mdc --s2k-mode 0 --output ct pt - # For an explanation, see test_AES.py . - - plaintext = 'ac1762037074324fb53ba3596f73656d69746556616c6c6579' - ciphertext = '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d' - key = '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2' - iv='cd47e2afb8b7e4b0' - encrypted_iv='6a7eef0b58050e8b904a' - - plaintext = unhexlify(plaintext) - ciphertext = unhexlify(ciphertext) - key = unhexlify(key) - iv = unhexlify(iv) - encrypted_iv = unhexlify(encrypted_iv) - - cipher = DES3.new(key, DES3.MODE_OPENPGP, iv) - ct = cipher.encrypt(plaintext) - self.assertEqual(ct[:10], encrypted_iv) - self.assertEqual(ct[10:], ciphertext) - - cipher = DES3.new(key, DES3.MODE_OPENPGP, encrypted_iv) - pt = cipher.decrypt(ciphertext) - self.assertEqual(pt, plaintext) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(OpenPGPTests) - tests += list_test_cases(TestVectors) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_SIV.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_SIV.py deleted file mode 100644 index d4bb5a9..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_SIV.py +++ /dev/null @@ -1,552 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import json -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof - -from Cryptodome.Util.py3compat import tobytes, bchr -from Cryptodome.Cipher import AES -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.Util.strxor import strxor - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class SivTests(unittest.TestCase): - - key_256 = get_tag_random("key_256", 32) - key_384 = get_tag_random("key_384", 48) - key_512 = get_tag_random("key_512", 64) - nonce_96 = get_tag_random("nonce_128", 12) - data = get_tag_random("data", 128) - - def test_loopback_128(self): - for key in self.key_256, self.key_384, self.key_512: - cipher = AES.new(key, AES.MODE_SIV, nonce=self.nonce_96) - pt = get_tag_random("plaintext", 16 * 100) - ct, mac = cipher.encrypt_and_digest(pt) - - cipher = AES.new(key, AES.MODE_SIV, nonce=self.nonce_96) - pt2 = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(pt, pt2) - - def test_nonce(self): - # Deterministic encryption - AES.new(self.key_256, AES.MODE_SIV) - - cipher = AES.new(self.key_256, AES.MODE_SIV, self.nonce_96) - ct1, tag1 = cipher.encrypt_and_digest(self.data) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - ct2, tag2 = cipher.encrypt_and_digest(self.data) - self.assertEqual(ct1 + tag1, ct2 + tag2) - - def test_nonce_must_be_bytes(self): - self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, - nonce=u'test12345678') - - def test_nonce_length(self): - # nonce can be of any length (but not empty) - self.assertRaises(ValueError, AES.new, self.key_256, AES.MODE_SIV, - nonce=b"") - - for x in range(1, 128): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=bchr(1) * x) - cipher.encrypt_and_digest(b'\x01') - - def test_block_size_128(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertEqual(cipher.block_size, AES.block_size) - - def test_nonce_attribute(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertEqual(cipher.nonce, self.nonce_96) - - # By default, no nonce is randomly generated - self.assertFalse(hasattr(AES.new(self.key_256, AES.MODE_SIV), "nonce")) - - def test_unknown_parameters(self): - self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, - self.nonce_96, 7) - self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, - nonce=self.nonce_96, unknown=7) - - # But some are only known by the base cipher - # (e.g. use_aesni consumed by the AES module) - AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96, - use_aesni=False) - - def test_encrypt_excludes_decrypt(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.encrypt_and_digest(self.data) - self.assertRaises(TypeError, cipher.decrypt, self.data) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.encrypt_and_digest(self.data) - self.assertRaises(TypeError, cipher.decrypt_and_verify, - self.data, self.data) - - def test_data_must_be_bytes(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt_and_verify, - u'test1234567890-*', b"xxxx") - - def test_mac_len(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - _, mac = cipher.encrypt_and_digest(self.data) - self.assertEqual(len(mac), 16) - - def test_invalid_mac(self): - from Cryptodome.Util.strxor import strxor_c - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - ct, mac = cipher.encrypt_and_digest(self.data) - - invalid_mac = strxor_c(mac, 0x01) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, - invalid_mac) - - def test_hex_mac(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - mac_hex = cipher.hexdigest() - self.assertEqual(cipher.digest(), unhexlify(mac_hex)) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.hexverify(mac_hex) - - def test_bytearray(self): - - # Encrypt - key = bytearray(self.key_256) - nonce = bytearray(self.nonce_96) - data = bytearray(self.data) - header = bytearray(self.data) - - cipher1 = AES.new(self.key_256, - AES.MODE_SIV, - nonce=self.nonce_96) - cipher1.update(self.data) - ct, tag = cipher1.encrypt_and_digest(self.data) - - cipher2 = AES.new(key, - AES.MODE_SIV, - nonce=nonce) - key[:3] = b'\xFF\xFF\xFF' - nonce[:3] = b'\xFF\xFF\xFF' - cipher2.update(header) - header[:3] = b'\xFF\xFF\xFF' - ct_test, tag_test = cipher2.encrypt_and_digest(data) - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key = bytearray(self.key_256) - nonce = bytearray(self.nonce_96) - header = bytearray(self.data) - ct_ba = bytearray(ct) - tag_ba = bytearray(tag) - - cipher3 = AES.new(key, - AES.MODE_SIV, - nonce=nonce) - key[:3] = b'\xFF\xFF\xFF' - nonce[:3] = b'\xFF\xFF\xFF' - cipher3.update(header) - header[:3] = b'\xFF\xFF\xFF' - pt_test = cipher3.decrypt_and_verify(ct_ba, tag_ba) - - self.assertEqual(self.data, pt_test) - - def test_memoryview(self): - - # Encrypt - key = memoryview(bytearray(self.key_256)) - nonce = memoryview(bytearray(self.nonce_96)) - data = memoryview(bytearray(self.data)) - header = memoryview(bytearray(self.data)) - - cipher1 = AES.new(self.key_256, - AES.MODE_SIV, - nonce=self.nonce_96) - cipher1.update(self.data) - ct, tag = cipher1.encrypt_and_digest(self.data) - - cipher2 = AES.new(key, - AES.MODE_SIV, - nonce=nonce) - key[:3] = b'\xFF\xFF\xFF' - nonce[:3] = b'\xFF\xFF\xFF' - cipher2.update(header) - header[:3] = b'\xFF\xFF\xFF' - ct_test, tag_test= cipher2.encrypt_and_digest(data) - - self.assertEqual(ct, ct_test) - self.assertEqual(tag, tag_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decrypt - key = memoryview(bytearray(self.key_256)) - nonce = memoryview(bytearray(self.nonce_96)) - header = memoryview(bytearray(self.data)) - ct_ba = memoryview(bytearray(ct)) - tag_ba = memoryview(bytearray(tag)) - - cipher3 = AES.new(key, - AES.MODE_SIV, - nonce=nonce) - key[:3] = b'\xFF\xFF\xFF' - nonce[:3] = b'\xFF\xFF\xFF' - cipher3.update(header) - header[:3] = b'\xFF\xFF\xFF' - pt_test = cipher3.decrypt_and_verify(ct_ba, tag_ba) - - self.assertEqual(self.data, pt_test) - - def test_output_param(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - ct, tag = cipher.encrypt_and_digest(pt) - - output = bytearray(128) - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - res, tag_out = cipher.encrypt_and_digest(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - self.assertEqual(tag, tag_out) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - res = cipher.decrypt_and_verify(ct, tag, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - def test_output_param_memoryview(self): - - pt = b'5' * 128 - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - ct, tag = cipher.encrypt_and_digest(pt) - - output = memoryview(bytearray(128)) - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.encrypt_and_digest(pt, output=output) - self.assertEqual(ct, output) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.decrypt_and_verify(ct, tag, output=output) - self.assertEqual(pt, output) - - def test_output_param_neg(self): - LEN_PT = 128 - - pt = b'5' * LEN_PT - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - ct, tag = cipher.encrypt_and_digest(pt) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt_and_digest, pt, output=b'0' * LEN_PT) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt_and_verify, ct, tag, output=b'0' * LEN_PT) - - shorter_output = bytearray(LEN_PT - 1) - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.encrypt_and_digest, pt, output=shorter_output) - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, tag, output=shorter_output) - - -class SivFSMTests(unittest.TestCase): - - key_256 = get_tag_random("key_256", 32) - nonce_96 = get_tag_random("nonce_96", 12) - data = get_tag_random("data", 128) - - def test_invalid_init_encrypt(self): - # Path INIT->ENCRYPT fails - cipher = AES.new(self.key_256, AES.MODE_SIV, - nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.encrypt, b"xxx") - - def test_invalid_init_decrypt(self): - # Path INIT->DECRYPT fails - cipher = AES.new(self.key_256, AES.MODE_SIV, - nonce=self.nonce_96) - self.assertRaises(TypeError, cipher.decrypt, b"xxx") - - def test_valid_init_update_digest_verify(self): - # No plaintext, fixed authenticated data - # Verify path INIT->UPDATE->DIGEST - cipher = AES.new(self.key_256, AES.MODE_SIV, - nonce=self.nonce_96) - cipher.update(self.data) - mac = cipher.digest() - - # Verify path INIT->UPDATE->VERIFY - cipher = AES.new(self.key_256, AES.MODE_SIV, - nonce=self.nonce_96) - cipher.update(self.data) - cipher.verify(mac) - - def test_valid_init_digest(self): - # Verify path INIT->DIGEST - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.digest() - - def test_valid_init_verify(self): - # Verify path INIT->VERIFY - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - mac = cipher.digest() - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.verify(mac) - - def test_valid_multiple_digest_or_verify(self): - # Multiple calls to digest - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.update(self.data) - first_mac = cipher.digest() - for x in range(4): - self.assertEqual(first_mac, cipher.digest()) - - # Multiple calls to verify - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.update(self.data) - for x in range(5): - cipher.verify(first_mac) - - def test_valid_encrypt_and_digest_decrypt_and_verify(self): - # encrypt_and_digest - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.update(self.data) - ct, mac = cipher.encrypt_and_digest(self.data) - - # decrypt_and_verify - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.update(self.data) - pt = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(self.data, pt) - - def test_invalid_multiple_encrypt_and_digest(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - ct, tag = cipher.encrypt_and_digest(self.data) - self.assertRaises(TypeError, cipher.encrypt_and_digest, b'') - - def test_invalid_multiple_decrypt_and_verify(self): - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - ct, tag = cipher.encrypt_and_digest(self.data) - - cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) - cipher.decrypt_and_verify(ct, tag) - self.assertRaises(TypeError, cipher.decrypt_and_verify, ct, tag) - - -def transform(tv): - new_tv = [[unhexlify(x) for x in tv[0].split("-")]] - new_tv += [ unhexlify(x) for x in tv[1:5]] - if tv[5]: - nonce = unhexlify(tv[5]) - else: - nonce = None - new_tv += [ nonce ] - return new_tv - - -class TestVectors(unittest.TestCase): - """Class exercising the SIV test vectors found in RFC5297""" - - # This is a list of tuples with 5 items: - # - # 1. Header + '|' + plaintext - # 2. Header + '|' + ciphertext + '|' + MAC - # 3. AES-128 key - # 4. Description - # 5. Dictionary of parameters to be passed to AES.new(). - # It must include the nonce. - # - # A "Header" is a dash ('-') separated sequece of components. - # - test_vectors_hex = [ - ( - '101112131415161718191a1b1c1d1e1f2021222324252627', - '112233445566778899aabbccddee', - '40c02b9690c4dc04daef7f6afe5c', - '85632d07c6e8f37f950acd320a2ecc93', - 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff', - None - ), - ( - '00112233445566778899aabbccddeeffdeaddadadeaddadaffeeddccbbaa9988' + - '7766554433221100-102030405060708090a0', - '7468697320697320736f6d6520706c61696e7465787420746f20656e63727970' + - '74207573696e67205349562d414553', - 'cb900f2fddbe404326601965c889bf17dba77ceb094fa663b7a3f748ba8af829' + - 'ea64ad544a272e9c485b62a3fd5c0d', - '7bdb6e3b432667eb06f4d14bff2fbd0f', - '7f7e7d7c7b7a79787776757473727170404142434445464748494a4b4c4d4e4f', - '09f911029d74e35bd84156c5635688c0' - ), - ] - - test_vectors = [ transform(tv) for tv in test_vectors_hex ] - - def runTest(self): - for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: - - # Encrypt - cipher = AES.new(key, AES.MODE_SIV, nonce=nonce) - for x in assoc_data: - cipher.update(x) - ct2, mac2 = cipher.encrypt_and_digest(pt) - self.assertEqual(ct, ct2) - self.assertEqual(mac, mac2) - - # Decrypt - cipher = AES.new(key, AES.MODE_SIV, nonce=nonce) - for x in assoc_data: - cipher.update(x) - pt2 = cipher.decrypt_and_verify(ct, mac) - self.assertEqual(pt, pt2) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self): - unittest.TestCase.__init__(self) - self._id = "None" - - def setUp(self): - self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - "aes_siv_cmac_test.json", - "Wycheproof AES SIV") - - def shortDescription(self): - return self._id - - def test_encrypt(self, tv): - self._id = "Wycheproof Encrypt AES-SIV Test #" + str(tv.id) - - cipher = AES.new(tv.key, AES.MODE_SIV) - cipher.update(tv.aad) - ct, tag = cipher.encrypt_and_digest(tv.msg) - if tv.valid: - self.assertEqual(tag + ct, tv.ct) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt AES_SIV Test #" + str(tv.id) - - cipher = AES.new(tv.key, AES.MODE_SIV) - cipher.update(tv.aad) - try: - pt = cipher.decrypt_and_verify(tv.ct[16:], tv.ct[:16]) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - - def runTest(self): - - for tv in self.tv: - self.test_encrypt(tv) - self.test_decrypt(tv) - - -class TestVectorsWycheproof2(unittest.TestCase): - - def __init__(self): - unittest.TestCase.__init__(self) - self._id = "None" - - def setUp(self): - self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - "aead_aes_siv_cmac_test.json", - "Wycheproof AEAD SIV") - - def shortDescription(self): - return self._id - - def test_encrypt(self, tv): - self._id = "Wycheproof Encrypt AEAD-AES-SIV Test #" + str(tv.id) - - cipher = AES.new(tv.key, AES.MODE_SIV, nonce=tv.iv) - cipher.update(tv.aad) - ct, tag = cipher.encrypt_and_digest(tv.msg) - if tv.valid: - self.assertEqual(ct, tv.ct) - self.assertEqual(tag, tv.tag) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt AEAD-AES-SIV Test #" + str(tv.id) - - cipher = AES.new(tv.key, AES.MODE_SIV, nonce=tv.iv) - cipher.update(tv.aad) - try: - pt = cipher.decrypt_and_verify(tv.ct, tv.tag) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - - def runTest(self): - - for tv in self.tv: - self.test_encrypt(tv) - self.test_decrypt(tv) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(SivTests) - tests += list_test_cases(SivFSMTests) - tests += [ TestVectors() ] - tests += [ TestVectorsWycheproof() ] - tests += [ TestVectorsWycheproof2() ] - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Salsa20.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Salsa20.py deleted file mode 100644 index a444906..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Salsa20.py +++ /dev/null @@ -1,367 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/Salsa20.py: Self-test for the Salsa20 stream cipher -# -# Written in 2013 by Fabrizio Tarizzo -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Cipher.Salsa20""" - -import unittest - -from Cryptodome.Util.py3compat import bchr - -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Cipher import Salsa20 - -from .common import make_stream_tests - -# This is a list of (plaintext, ciphertext, key[, description[, params]]) -# tuples. -test_data = [ - # Test vectors are taken from - # http://www.ecrypt.eu.org/stream/svn/viewcvs.cgi/ecrypt/trunk/submissions/salsa20/full/verified.test-vectors - ( '00' * 512, - '4dfa5e481da23ea09a31022050859936da52fcee218005164f267cb65f5cfd7f' - + '2b4f97e0ff16924a52df269515110a07f9e460bc65ef95da58f740b7d1dbb0aa' - + 'd64cec189c7eb8c6bbf3d7376c80a481d43e628701f6a27afb9fe23919f24114' - + '8db44f70d7063efcc3dd55a0893a613c3c6fe1c127bd6f59910589293bb6ef9e' - + 'e24819066dee1a64f49b0bbad5988635272b169af861f85df881939f29ada6fd' - + '0241410e8d332ae4798d929434a2630de451ec4e0169694cbaa7ebb121ea6a2b' - + 'da9c1581f429e0a00f7d67e23b730676783b262e8eb43a25f55fb90b3e753aef' - + '8c6713ec66c51881111593ccb3e8cb8f8de124080501eeeb389c4bcb6977cf95' - + '7d5789631eb4554400e1e025935dfa7b3e9039d61bdc58a8697d36815bf1985c' - + 'efdf7ae112e5bb81e37ecf0616ce7147fc08a93a367e08631f23c03b00a8da2f' - + 'aa5024e5c8d30aca43fc2d5082067b21b234bc741d68fb292c6012c3764ccee3' - + '1e364a5403e00cfee338a21a01e7d3cefd5a770ca0ab48c435ea6116435f7ad8' - + '30b217b49f978a68e207ed9f462af7fb195b2115fe8f24f152e4ddc32202d6f2' - + 'b52fafbcfbc202d8a259a611e901d3f62d065eb13f09bbc45cd45119b843efaa' - + 'b375703739daced4dd4059fd71c3c47fc2f9939670fad4a46066adcc6a564578' - + '3308b90ffb72be04a6b147cbe38cc0c3b9267c296a92a7c69873f9f263be9703', - '80000000000000000000000000000000', - '128 bits key, set 1, vector 0', - dict (iv='00'*8)), - - ( '00' * 512, - 'e3be8fdd8beca2e3ea8ef9475b29a6e7003951e1097a5c38d23b7a5fad9f6844' - + 'b22c97559e2723c7cbbd3fe4fc8d9a0744652a83e72a9c461876af4d7ef1a117' - + '8da2b74eef1b6283e7e20166abcae538e9716e4669e2816b6b20c5c356802001' - + 'cc1403a9a117d12a2669f456366d6ebb0f1246f1265150f793cdb4b253e348ae' - + '203d89bc025e802a7e0e00621d70aa36b7e07cb1e7d5b38d5e222b8b0e4b8407' - + '0142b1e29504767d76824850320b5368129fdd74e861b498e3be8d16f2d7d169' - + '57be81f47b17d9ae7c4ff15429a73e10acf250ed3a90a93c711308a74c6216a9' - + 'ed84cd126da7f28e8abf8bb63517e1ca98e712f4fb2e1a6aed9fdc73291faa17' - + '958211c4ba2ebd5838c635edb81f513a91a294e194f1c039aeec657dce40aa7e' - + '7c0af57cacefa40c9f14b71a4b3456a63e162ec7d8d10b8ffb1810d71001b618' - + '2f9f73da53b85405c11f7b2d890fa8ae0c7f2e926d8a98c7ec4e91b65120e988' - + '349631a700c6facec3471cb0413656e75e309456584084d7e12c5b43a41c43ed' - + '9a048abd9b880da65f6a665a20fe7b77cd292fe62cae644b7f7df69f32bdb331' - + '903e6505ce44fdc293920c6a9ec7057e23df7dad298f82ddf4efb7fdc7bfc622' - + '696afcfd0cddcc83c7e77f11a649d79acdc3354e9635ff137e929933a0bd6f53' - + '77efa105a3a4266b7c0d089d08f1e855cc32b15b93784a36e56a76cc64bc8477', - '8000000000000000000000000000000000000000000000000000000000000000', - '256 bits key, set 1, vector 0', - dict (iv='00'*8)), - - ( '00' * 512, - '169060ccb42bea7bee4d8012a02f3635eb7bca12859fa159cd559094b3507db8' - + '01735d1a1300102a9c9415546829cbd2021ba217b39b81d89c55b13d0c603359' - + '3f84159a3c84f4b4f4a0edcd9d38ff261a737909e0b66d68b5cac496f3a5be99' - + 'cb12c321ab711afaab36cc0947955e1a9bb952ed54425e7711279fbc81bb83f5' - + '6e55cea44e6daddb05858a153ea6213b3350c12aa1a83ef2726f09485fa71790' - + 'f9b9f922c7dda1113b1f9d56658ed3402803f511bc1f122601d5e7f0ff036e23' - + '23ef24bb24195b9fd574823cd8a40c29d86bd35c191e2038779ff696c712b6d8' - + '2e7014dbe1ac5d527af076c088c4a8d44317958189f6ef54933a7e0816b5b916' - + 'd8f12ed8afe9422b85e5cc9b8adec9d6cfabe8dbc1082bccc02f5a7266aa074c' - + 'a284e583a35837798cc0e69d4ce937653b8cdd65ce414b89138615ccb165ad19' - + '3c6b9c3d05eef4be921a10ea811fe61d11c6867600188e065daff90b509ec56b' - + 'd41e7e8968c478c78d590c2d2ee24ea009c8f49bc3d81672cfc47895a9e21c9a' - + '471ebf8e294bee5d2de436ac8d052bf31111b345f1da23c3a4d13b9fc5f0900a' - + 'a298f98f538973b8fad40d4d159777de2cfe2a3dead1645ddb49794827dba040' - + 'f70a0ff4ecd155e0f033604693a51e2363880e2ecf98699e7174af7c2c6b0fc6' - + '59ae329599a3949272a37b9b2183a0910922a3f325ae124dcbdd735364055ceb', - '09090909090909090909090909090909', - '128 bits key, set 2, vector 9', - dict (iv='00'*8)), - - ( '00' * 512, - '7041e747ceb22ed7812985465f50333124f971da1c5d6efe5ca201b886f31046' - + 'e757e5c3ec914f60ed1f6bce2819b6810953f12b8ba1199bf82d746a8b8a88f1' - + '142002978ec4c35b95dc2c82990f9e847a0ab45f2ca72625f5190c820f29f3aa' - + 'f5f0b5572b06b70a144f2a240c3b3098d4831fa1ce1459f8d1df226a6a79b0ab' - + '41e91799ef31b5ff3d756c19126b19025858ee70fbd69f2be955cb011c005e31' - + '32b271b378f39b0cb594e95c99ce6ff17735a541891845bbf0450afcb4a850b9' - + '4ee90afb713ae7e01295c74381180a3816d7020d5a396c0d97aaa783eaabb6ec' - + '44d5111157f2212d1b1b8fca7893e8b520cd482418c272ab119b569a2b9598eb' - + '355624d12e79adab81153b58cd22eaf1b2a32395dedc4a1c66f4d274070b9800' - + 'ea95766f0245a8295f8aadb36ddbbdfa936417c8dbc6235d19494036964d3e70' - + 'b125b0f800c3d53881d9d11e7970f827c2f9556935cd29e927b0aceb8cae5fd4' - + '0fd88a8854010a33db94c96c98735858f1c5df6844f864feaca8f41539313e7f' - + '3c0610214912cd5e6362197646207e2d64cd5b26c9dfe0822629dcbeb16662e8' - + '9ff5bf5cf2e499138a5e27bd5027329d0e68ddf53103e9e409523662e27f61f6' - + '5cf38c1232023e6a6ef66c315bcb2a4328642faabb7ca1e889e039e7c444b34b' - + 'b3443f596ac730f3df3dfcdb343c307c80f76e43e8898c5e8f43dc3bb280add0', - '0909090909090909090909090909090909090909090909090909090909090909', - '256 bits key, set 2, vector 9', - dict (iv='00'*8)), - - ( '00' * 1024, - '71daee5142d0728b41b6597933ebf467e43279e30978677078941602629cbf68' - + 'b73d6bd2c95f118d2b3e6ec955dabb6dc61c4143bc9a9b32b99dbe6866166dc0' - + '8631b7d6553050303d7252c264d3a90d26c853634813e09ad7545a6ce7e84a5d' - + 'fc75ec43431207d5319970b0faadb0e1510625bb54372c8515e28e2accf0a993' - + '0ad15f431874923d2a59e20d9f2a5367dba6051564f150287debb1db536ff9b0' - + '9ad981f25e5010d85d76ee0c305f755b25e6f09341e0812f95c94f42eead346e' - + '81f39c58c5faa2c88953dc0cac90469db2063cb5cdb22c9eae22afbf0506fca4' - + '1dc710b846fbdfe3c46883dd118f3a5e8b11b6afd9e71680d8666557301a2daa' - + 'fb9496c559784d35a035360885f9b17bd7191977deea932b981ebdb29057ae3c' - + '92cfeff5e6c5d0cb62f209ce342d4e35c69646ccd14e53350e488bb310a32f8b' - + '0248e70acc5b473df537ced3f81a014d4083932bedd62ed0e447b6766cd2604b' - + '706e9b346c4468beb46a34ecf1610ebd38331d52bf33346afec15eefb2a7699e' - + '8759db5a1f636a48a039688e39de34d995df9f27ed9edc8dd795e39e53d9d925' - + 'b278010565ff665269042f05096d94da3433d957ec13d2fd82a0066283d0d1ee' - + 'b81bf0ef133b7fd90248b8ffb499b2414cd4fa003093ff0864575a43749bf596' - + '02f26c717fa96b1d057697db08ebc3fa664a016a67dcef8807577cc3a09385d3' - + 'f4dc79b34364bb3b166ce65fe1dd28e3950fe6fa81063f7b16ce1c0e6daac1f8' - + '188455b77752045e863c9b256ad92bc6e2d08314c5bba191c274f42dfbb3d652' - + 'bb771956555e880f84cd8b827a4c5a52f3a099fa0259bd4aac3efd541f191170' - + '4412d6e85fbcc628b335875b9fef24807f6e1bc66c3186159e1e7f5a13913e02' - + 'd241ce2efdbcaa275039fb14eac5923d17ffbc7f1abd3b45e92127575bfbabf9' - + '3a257ebef0aa1437b326e41b585af572f7239c33b32981a1577a4f629b027e1e' - + 'b49d58cc497e944d79cef44357c2bf25442ab779651e991147bf79d6fd3a8868' - + '0cd3b1748e07fd10d78aceef6db8a5e563570d40127f754146c34a440f2a991a' - + '23fa39d365141f255041f2135c5cba4373452c114da1801bacca38610e3a6524' - + '2b822d32de4ab5a7d3cf9b61b37493c863bd12e2cae10530cddcda2cb7a5436b' - + 'ef8988d4d24e8cdc31b2d2a3586340bc5141f8f6632d0dd543bfed81eb471ba1' - + 'f3dc2225a15ffddcc03eb48f44e27e2aa390598adf83f15c6608a5f18d4dfcf0' - + 'f547d467a4d70b281c83a595d7660d0b62de78b9cca023cca89d7b1f83484638' - + '0e228c25f049184a612ef5bb3d37454e6cfa5b10dceda619d898a699b3c8981a' - + '173407844bb89b4287bf57dd6600c79e352c681d74b03fa7ea0d7bf6ad69f8a6' - + '8ecb001963bd2dd8a2baa0083ec09751cd9742402ad716be16d5c052304cfca1', - '0F62B5085BAE0154A7FA4DA0F34699EC', - '128 bits key, Set 6, vector# 3', - dict (iv='288FF65DC42B92F9')), - - ( '00' * 1024, - '5e5e71f90199340304abb22a37b6625bf883fb89ce3b21f54a10b81066ef87da' - + '30b77699aa7379da595c77dd59542da208e5954f89e40eb7aa80a84a6176663f' - + 'd910cde567cf1ff60f7040548d8f376bfd1f44c4774aac37410ede7d5c3463fc' - + '4508a603201d8495ad257894e5eb1914b53e8da5e4bf2bc83ac87ce55cc67df7' - + '093d9853d2a83a9c8be969175df7c807a17156df768445dd0874a9271c6537f5' - + 'ce0466473582375f067fa4fcdaf65dbc0139cd75e8c21a482f28c0fb8c3d9f94' - + '22606cc8e88fe28fe73ec3cb10ff0e8cc5f2a49e540f007265c65b7130bfdb98' - + '795b1df9522da46e48b30e55d9f0d787955ece720205b29c85f3ad9be33b4459' - + '7d21b54d06c9a60b04b8e640c64e566e51566730e86cf128ab14174f91bd8981' - + 'a6fb00fe587bbd6c38b5a1dfdb04ea7e61536fd229f957aa9b070ca931358e85' - + '11b92c53c523cb54828fb1513c5636fa9a0645b4a3c922c0db94986d92f314ff' - + '7852c03b231e4dceea5dd8cced621869cff818daf3c270ff3c8be2e5c74be767' - + 'a4e1fdf3327a934fe31e46df5a74ae2021cee021d958c4f615263d99a5ddae7f' - + 'eab45e6eccbafefe4761c57750847b7e75ee2e2f14333c0779ce4678f47b1e1b' - + '760a03a5f17d6e91d4b42313b3f1077ee270e432fe04917ed1fc8babebf7c941' - + '42b80dfb44a28a2a3e59093027606f6860bfb8c2e5897078cfccda7314c70035' - + 'f137de6f05daa035891d5f6f76e1df0fce1112a2ff0ac2bd3534b5d1bf4c7165' - + 'fb40a1b6eacb7f295711c4907ae457514a7010f3a342b4427593d61ba993bc59' - + '8bd09c56b9ee53aac5dd861fa4b4bb53888952a4aa9d8ca8671582de716270e1' - + '97375b3ee49e51fa2bf4ef32015dd9a764d966aa2ae541592d0aa650849e99ca' - + '5c6c39beebf516457cc32fe4c105bff314a12f1ec94bdf4d626f5d9b1cbbde42' - + 'e5733f0885765ba29e2e82c829d312f5fc7e180679ac84826c08d0a644b326d0' - + '44da0fdcc75fa53cfe4ced0437fa4df5a7ecbca8b4cb7c4a9ecf9a60d00a56eb' - + '81da52adc21f508dbb60a9503a3cc94a896616d86020d5b0e5c637329b6d396a' - + '41a21ba2c4a9493cf33fa2d4f10f77d5b12fdad7e478ccfe79b74851fc96a7ca' - + '6320c5efd561a222c0ab0fb44bbda0e42149611d2262bb7d1719150fa798718a' - + '0eec63ee297cad459869c8b0f06c4e2b56cbac03cd2605b2a924efedf85ec8f1' - + '9b0b6c90e7cbd933223ffeb1b3a3f9677657905829294c4c70acdb8b0891b47d' - + '0875d0cd6c0f4efe2917fc44b581ef0d1e4280197065d07da34ab33283364552' - + 'efad0bd9257b059acdd0a6f246812feb69e7e76065f27dbc2eee94da9cc41835' - + 'bf826e36e5cebe5d4d6a37a6a666246290ce51a0c082718ab0ec855668db1add' - + 'a658e5f257e0db39384d02e6145c4c00eaa079098f6d820d872de711b6ed08cf', - '0F62B5085BAE0154A7FA4DA0F34699EC3F92E5388BDE3184D72A7DD02376C91C', - '256 bits key, Set 6, vector# 3', - dict (iv='288FF65DC42B92F9')), - -] - - -class KeyLength(unittest.TestCase): - - def runTest(self): - - nonce = bchr(0) * 8 - for key_length in (15, 30, 33): - key = bchr(1) * key_length - self.assertRaises(ValueError, Salsa20.new, key, nonce) - - -class NonceTests(unittest.TestCase): - - def test_invalid_nonce_length(self): - key = bchr(1) * 16 - self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 7) - self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 9) - - def test_default_nonce(self): - - cipher1 = Salsa20.new(bchr(1) * 16) - cipher2 = Salsa20.new(bchr(1) * 16) - self.assertEqual(len(cipher1.nonce), 8) - self.assertNotEqual(cipher1.nonce, cipher2.nonce) - - -class ByteArrayTest(unittest.TestCase): - """Verify we can encrypt or decrypt bytearrays""" - - def runTest(self): - - data = b"0123" - key = b"9" * 32 - nonce = b"t" * 8 - - # Encryption - data_ba = bytearray(data) - key_ba = bytearray(key) - nonce_ba = bytearray(nonce) - - cipher1 = Salsa20.new(key=key, nonce=nonce) - ct = cipher1.encrypt(data) - - cipher2 = Salsa20.new(key=key_ba, nonce=nonce_ba) - key_ba[:1] = b'\xFF' - nonce_ba[:1] = b'\xFF' - ct_test = cipher2.encrypt(data_ba) - - self.assertEqual(ct, ct_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decryption - key_ba = bytearray(key) - nonce_ba = bytearray(nonce) - ct_ba = bytearray(ct) - - cipher3 = Salsa20.new(key=key_ba, nonce=nonce_ba) - key_ba[:1] = b'\xFF' - nonce_ba[:1] = b'\xFF' - pt_test = cipher3.decrypt(ct_ba) - - self.assertEqual(data, pt_test) - - -class MemoryviewTest(unittest.TestCase): - """Verify we can encrypt or decrypt bytearrays""" - - def runTest(self): - - data = b"0123" - key = b"9" * 32 - nonce = b"t" * 8 - - # Encryption - data_mv = memoryview(bytearray(data)) - key_mv = memoryview(bytearray(key)) - nonce_mv = memoryview(bytearray(nonce)) - - cipher1 = Salsa20.new(key=key, nonce=nonce) - ct = cipher1.encrypt(data) - - cipher2 = Salsa20.new(key=key_mv, nonce=nonce_mv) - key_mv[:1] = b'\xFF' - nonce_mv[:1] = b'\xFF' - ct_test = cipher2.encrypt(data_mv) - - self.assertEqual(ct, ct_test) - self.assertEqual(cipher1.nonce, cipher2.nonce) - - # Decryption - key_mv = memoryview(bytearray(key)) - nonce_mv = memoryview(bytearray(nonce)) - ct_mv = memoryview(bytearray(ct)) - - cipher3 = Salsa20.new(key=key_mv, nonce=nonce_mv) - key_mv[:1] = b'\xFF' - nonce_mv[:1] = b'\xFF' - pt_test = cipher3.decrypt(ct_mv) - - self.assertEqual(data, pt_test) - - -class TestOutput(unittest.TestCase): - - def runTest(self): - # Encrypt/Decrypt data and test output parameter - - key = b'4' * 32 - nonce = b'5' * 8 - cipher = Salsa20.new(key=key, nonce=nonce) - - pt = b'5' * 300 - ct = cipher.encrypt(pt) - - output = bytearray(len(pt)) - cipher = Salsa20.new(key=key, nonce=nonce) - res = cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - self.assertEqual(res, None) - - cipher = Salsa20.new(key=key, nonce=nonce) - res = cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - self.assertEqual(res, None) - - output = memoryview(bytearray(len(pt))) - cipher = Salsa20.new(key=key, nonce=nonce) - cipher.encrypt(pt, output=output) - self.assertEqual(ct, output) - - cipher = Salsa20.new(key=key, nonce=nonce) - cipher.decrypt(ct, output=output) - self.assertEqual(pt, output) - - cipher = Salsa20.new(key=key, nonce=nonce) - self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*len(pt)) - - cipher = Salsa20.new(key=key, nonce=nonce) - self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*len(ct)) - - shorter_output = bytearray(len(pt) - 1) - - cipher = Salsa20.new(key=key, nonce=nonce) - self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) - - cipher = Salsa20.new(key=key, nonce=nonce) - self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) - - -def get_tests(config={}): - tests = make_stream_tests(Salsa20, "Salsa20", test_data) - tests.append(KeyLength()) - tests += list_test_cases(NonceTests) - tests.append(ByteArrayTest()) - tests.append(MemoryviewTest()) - tests.append(TestOutput()) - - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_pkcs1_15.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_pkcs1_15.py deleted file mode 100644 index 12c09dd..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_pkcs1_15.py +++ /dev/null @@ -1,283 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 encryption -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from __future__ import print_function - -import unittest - -from Cryptodome.PublicKey import RSA -from Cryptodome.SelfTest.st_common import list_test_cases, a2b_hex -from Cryptodome import Random -from Cryptodome.Cipher import PKCS1_v1_5 as PKCS -from Cryptodome.Util.py3compat import b -from Cryptodome.Util.number import bytes_to_long, long_to_bytes -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof - - -def rws(t): - """Remove white spaces, tabs, and new lines from a string""" - for c in ['\n', '\t', ' ']: - t = t.replace(c, '') - return t - - -def t2b(t): - """Convert a text string with bytes in hex form to a byte string""" - clean = b(rws(t)) - if len(clean) % 2 == 1: - raise ValueError("Even number of characters expected") - return a2b_hex(clean) - - -class PKCS1_15_Tests(unittest.TestCase): - - def setUp(self): - self.rng = Random.new().read - self.key1024 = RSA.generate(1024, self.rng) - - # List of tuples with test data for PKCS#1 v1.5. - # Each tuple is made up by: - # Item #0: dictionary with RSA key component, or key to import - # Item #1: plaintext - # Item #2: ciphertext - # Item #3: random data - - _testData = ( - - # - # Generated with openssl 0.9.8o - # - ( - # Private key - '''-----BEGIN RSA PRIVATE KEY----- -MIICXAIBAAKBgQDAiAnvIAOvqVwJTaYzsKnefZftgtXGE2hPJppGsWl78yz9jeXY -W/FxX/gTPURArNhdnhP6n3p2ZaDIBrO2zizbgIXs0IsljTTcr4vnI8fMXzyNUOjA -zP3nzMqZDZK6757XQAobOssMkBFqRWwilT/3DsBhRpl3iMUhF+wvpTSHewIDAQAB -AoGAC4HV/inOrpgTvSab8Wj0riyZgQOZ3U3ZpSlsfR8ra9Ib9Uee3jCYnKscu6Gk -y6zI/cdt8EPJ4PuwAWSNJzbpbVaDvUq25OD+CX8/uRT08yBS4J8TzBitZJTD4lS7 -atdTnKT0Wmwk+u8tDbhvMKwnUHdJLcuIsycts9rwJVapUtkCQQDvDpx2JMun0YKG -uUttjmL8oJ3U0m3ZvMdVwBecA0eebZb1l2J5PvI3EJD97eKe91Nsw8T3lwpoN40k -IocSVDklAkEAzi1HLHE6EzVPOe5+Y0kGvrIYRRhncOb72vCvBZvD6wLZpQgqo6c4 -d3XHFBBQWA6xcvQb5w+VVEJZzw64y25sHwJBAMYReRl6SzL0qA0wIYrYWrOt8JeQ -8mthulcWHXmqTgC6FEXP9Es5GD7/fuKl4wqLKZgIbH4nqvvGay7xXLCXD/ECQH9a -1JYNMtRen5unSAbIOxRcKkWz92F0LKpm9ZW/S9vFHO+mBcClMGoKJHiuQxLBsLbT -NtEZfSJZAeS2sUtn3/0CQDb2M2zNBTF8LlM0nxmh0k9VGm5TVIyBEMcipmvOgqIs -HKukWBcq9f/UOmS0oEhai/6g+Uf7VHJdWaeO5LzuvwU= ------END RSA PRIVATE KEY-----''', - # Plaintext - '''THIS IS PLAINTEXT\x0A''', - # Ciphertext - '''3f dc fd 3c cd 5c 9b 12 af 65 32 e3 f7 d0 da 36 - 8f 8f d9 e3 13 1c 7f c8 b3 f9 c1 08 e4 eb 79 9c - 91 89 1f 96 3b 94 77 61 99 a4 b1 ee 5d e6 17 c9 - 5d 0a b5 63 52 0a eb 00 45 38 2a fb b0 71 3d 11 - f7 a1 9e a7 69 b3 af 61 c0 bb 04 5b 5d 4b 27 44 - 1f 5b 97 89 ba 6a 08 95 ee 4f a2 eb 56 64 e5 0f - da 7c f9 9a 61 61 06 62 ed a0 bc 5f aa 6c 31 78 - 70 28 1a bb 98 3c e3 6a 60 3c d1 0b 0f 5a f4 75''', - # Random data - '''eb d7 7d 86 a4 35 23 a3 54 7e 02 0b 42 1d - 61 6c af 67 b8 4e 17 56 80 66 36 04 64 34 26 8a - 47 dd 44 b3 1a b2 17 60 f4 91 2e e2 b5 95 64 cc - f9 da c8 70 94 54 86 4c ef 5b 08 7d 18 c4 ab 8d - 04 06 33 8f ca 15 5f 52 60 8a a1 0c f5 08 b5 4c - bb 99 b8 94 25 04 9c e6 01 75 e6 f9 63 7a 65 61 - 13 8a a7 47 77 81 ae 0d b8 2c 4d 50 a5''' - ), - ) - - def testEncrypt1(self): - for test in self._testData: - # Build the key - key = RSA.importKey(test[0]) - # RNG that takes its random numbers from a pool given - # at initialization - class randGen: - def __init__(self, data): - self.data = data - self.idx = 0 - def __call__(self, N): - r = self.data[self.idx:self.idx+N] - self.idx += N - return r - # The real test - cipher = PKCS.new(key, randfunc=randGen(t2b(test[3]))) - ct = cipher.encrypt(b(test[1])) - self.assertEqual(ct, t2b(test[2])) - - def testEncrypt2(self): - # Verify that encryption fail if plaintext is too long - pt = '\x00'*(128-11+1) - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.encrypt, pt) - - def testVerify1(self): - for test in self._testData: - key = RSA.importKey(test[0]) - expected_pt = b(test[1]) - ct = t2b(test[2]) - cipher = PKCS.new(key) - - # The real test - pt = cipher.decrypt(ct, None) - self.assertEqual(pt, expected_pt) - - pt = cipher.decrypt(ct, b'\xFF' * len(expected_pt)) - self.assertEqual(pt, expected_pt) - - def testVerify2(self): - # Verify that decryption fails if ciphertext is not as long as - # RSA modulus - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.decrypt, '\x00'*127, "---") - self.assertRaises(ValueError, cipher.decrypt, '\x00'*129, "---") - - # Verify that decryption fails if there are less then 8 non-zero padding - # bytes - pt = b('\x00\x02' + '\xFF'*7 + '\x00' + '\x45'*118) - pt_int = bytes_to_long(pt) - ct_int = self.key1024._encrypt(pt_int) - ct = long_to_bytes(ct_int, 128) - self.assertEqual(b"---", cipher.decrypt(ct, b"---")) - - def testEncryptVerify1(self): - # Encrypt/Verify messages of length [0..RSAlen-11] - # and therefore padding [8..117] - for pt_len in range(0, 128 - 11 + 1): - pt = self.rng(pt_len) - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(pt) - pt2 = cipher.decrypt(ct, b'\xAA' * pt_len) - self.assertEqual(pt, pt2) - - def test_encrypt_verify_exp_pt_len(self): - - cipher = PKCS.new(self.key1024) - pt = b'5' * 16 - ct = cipher.encrypt(pt) - sentinel = b'\xAA' * 16 - - pt_A = cipher.decrypt(ct, sentinel, 16) - self.assertEqual(pt, pt_A) - - pt_B = cipher.decrypt(ct, sentinel, 15) - self.assertEqual(sentinel, pt_B) - - pt_C = cipher.decrypt(ct, sentinel, 17) - self.assertEqual(sentinel, pt_C) - - def testByteArray(self): - pt = b"XER" - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(bytearray(pt)) - pt2 = cipher.decrypt(bytearray(ct), '\xFF' * len(pt)) - self.assertEqual(pt, pt2) - - def testMemoryview(self): - pt = b"XER" - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(memoryview(bytearray(pt))) - pt2 = cipher.decrypt(memoryview(bytearray(ct)), b'\xFF' * len(pt)) - self.assertEqual(pt, pt2) - - def test_return_type(self): - pt = b"XYZ" - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(pt) - self.assertTrue(isinstance(ct, bytes)) - pt2 = cipher.decrypt(ct, b'\xAA' * 3) - self.assertTrue(isinstance(pt2, bytes)) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings, skip_slow_tests): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._skip_slow_tests = skip_slow_tests - self._id = "None" - - def load_tests(self, filename): - - def filter_rsa(group): - return RSA.import_key(group['privateKeyPem']) - - result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - filename, - "Wycheproof PKCS#1v1.5 (%s)" % filename, - group_tag={'rsa_key': filter_rsa} - ) - return result - - def setUp(self): - self.tv = [] - self.tv.extend(self.load_tests("rsa_pkcs1_2048_test.json")) - if not self._skip_slow_tests: - self.tv.extend(self.load_tests("rsa_pkcs1_3072_test.json")) - self.tv.extend(self.load_tests("rsa_pkcs1_4096_test.json")) - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt PKCS#1v1.5 Test #%s" % tv.id - sentinel = b'\xAA' * max(3, len(tv.msg)) - cipher = PKCS.new(tv.rsa_key) - try: - pt = cipher.decrypt(tv.ct, sentinel=sentinel) - except ValueError: - assert not tv.valid - else: - if pt == sentinel: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - self.warn(tv) - - def runTest(self): - - for tv in self.tv: - self.test_decrypt(tv) - - -def get_tests(config={}): - skip_slow_tests = not config.get('slow_tests') - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(PKCS1_15_Tests) - tests += [TestVectorsWycheproof(wycheproof_warnings, skip_slow_tests)] - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_pkcs1_oaep.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_pkcs1_oaep.py deleted file mode 100644 index aa00c9c..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_pkcs1_oaep.py +++ /dev/null @@ -1,506 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import unittest - -from Cryptodome.SelfTest.st_common import list_test_cases, a2b_hex -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof - -from Cryptodome.PublicKey import RSA -from Cryptodome.Cipher import PKCS1_OAEP as PKCS -from Cryptodome.Hash import MD2, MD5, SHA1, SHA256, RIPEMD160, SHA224, SHA384, SHA512 -from Cryptodome import Random -from Cryptodome.Signature.pss import MGF1 - -from Cryptodome.Util.py3compat import b, bchr - - -def rws(t): - """Remove white spaces, tabs, and new lines from a string""" - for c in ['\n', '\t', ' ']: - t = t.replace(c, '') - return t - - -def t2b(t): - """Convert a text string with bytes in hex form to a byte string""" - clean = rws(t) - if len(clean) % 2 == 1: - raise ValueError("Even number of characters expected") - return a2b_hex(clean) - - -class PKCS1_OAEP_Tests(unittest.TestCase): - - def setUp(self): - self.rng = Random.new().read - self.key1024 = RSA.generate(1024, self.rng) - - # List of tuples with test data for PKCS#1 OAEP - # Each tuple is made up by: - # Item #0: dictionary with RSA key component - # Item #1: plaintext - # Item #2: ciphertext - # Item #3: random data (=seed) - # Item #4: hash object - - _testData = ( - - # - # From in oaep-int.txt to be found in - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 - 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f - b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 - 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f - af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 - ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e - e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f - e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''', - # Public key - 'e':'11', - # In the test vector, only p and q were given... - # d is computed offline as e^{-1} mod (p-1)(q-1) - 'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0 - 668b42784813801579641b29410b3c7998d6bc465745e5c3 - 92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595 - 0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef - 5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b - 8883fe4463a4bc85b1cb3c1''' - } - , - # Plaintext - '''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''', - # Ciphertext - '''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 - 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 - 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 - 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb - 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 - 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 - da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d - 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''', - # Random - '''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2 - f0 6c b5 8f''', - # Hash - SHA1, - ), - - # - # From in oaep-vect.txt to be found in Example 1.1 - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4 - 91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab - c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85 - 12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72 - f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97 - c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14 - 8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24 - 76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''', - 'e':'''01 00 01''', - 'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c - 55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd - 8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b - 15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55 - fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73 - 2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf - b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de - f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 ''' - } - , - # Plaintext - '''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23 - 97 d5 0d ba 79 b9 87 00 4a fe fe 34''', - # Ciphertext - '''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f - 7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb - 21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01 - 03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f - a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22 - d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26 - d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c - 40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''', - # Random - '''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1 - dd a0 a5 ef''', - SHA1 - ), - - # - # From in oaep-vect.txt to be found in Example 2.1 - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5 - e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e - 49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba - 7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d - 0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d - de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05 - 5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66 - 32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f - 45''', - 'e':'''01 00 01''', - 'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa - 4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93 - c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47 - 39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc - d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0 - 8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59 - 41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25 - 25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39''' - }, - # Plaintext - '''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6 - 52 b5 8c f1 d9 2f ec 57 0b ee e7''', - # Ciphertext - '''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99 - 2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a - 5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a - ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34 - 56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d - 56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a - bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f - ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72 - 0e''', - # Random - '''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94 - e7 1b 17 82''', - SHA1 - ), - - # - # From in oaep-vect.txt to be found in Example 10.1 - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d - db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8 - df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4 - 02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5 - 40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d - 4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f - a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1 - 2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0 - 33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a - 53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00 - c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8 - 4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef - a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76 - 57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88 - d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc - 36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''', - 'e':'''01 00 01''', - 'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85 - 25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59 - 6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0 - d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d - 19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80 - 4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be - be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19 - 34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10 - 2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2 - 4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64 - fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1 - 3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43 - 0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87 - 6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01 - 84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e - 97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79''' - }, - # Plaintext - '''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70 - b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''', - # Ciphertext - '''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15 - 52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19 - ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee - e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14 - 4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a - 8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc - c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb - ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87 - 74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec - 6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1 - fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62 - 93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05 - ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55 - ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64 - 73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54 - bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''', - # Random - '''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0 - aa 63 bd 33''', - SHA1 - ), - ) - - def testEncrypt1(self): - # Verify encryption using all test vectors - for test in self._testData: - # Build the key - comps = [int(rws(test[0][x]), 16) for x in ('n', 'e')] - key = RSA.construct(comps) - - # RNG that takes its random numbers from a pool given - # at initialization - class randGen: - - def __init__(self, data): - self.data = data - self.idx = 0 - - def __call__(self, N): - r = self.data[self.idx:N] - self.idx += N - return r - - # The real test - cipher = PKCS.new(key, test[4], randfunc=randGen(t2b(test[3]))) - ct = cipher.encrypt(t2b(test[1])) - self.assertEqual(ct, t2b(test[2])) - - def testEncrypt2(self): - # Verify that encryption fails if plaintext is too long - pt = '\x00'*(128-2*20-2+1) - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.encrypt, pt) - - def testDecrypt1(self): - # Verify decryption using all test vectors - for test in self._testData: - # Build the key - comps = [int(rws(test[0][x]),16) for x in ('n', 'e', 'd')] - key = RSA.construct(comps) - # The real test - cipher = PKCS.new(key, test[4]) - pt = cipher.decrypt(t2b(test[2])) - self.assertEqual(pt, t2b(test[1])) - - def testDecrypt2(self): - # Simplest possible negative tests - for ct_size in (127, 128, 129): - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size) - - def testEncryptDecrypt1(self): - # Encrypt/Decrypt messages of length [0..128-2*20-2] - for pt_len in range(0, 128-2*20-2): - pt = self.rng(pt_len) - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(pt) - pt2 = cipher.decrypt(ct) - self.assertEqual(pt, pt2) - - def testEncryptDecrypt2(self): - # Helper function to monitor what's requested from RNG - global asked - - def localRng(N): - global asked - asked += N - return self.rng(N) - - # Verify that OAEP is friendly to all hashes - for hashmod in (MD2, MD5, SHA1, SHA256, RIPEMD160): - # Verify that encrypt() asks for as many random bytes - # as the hash output size - asked = 0 - pt = self.rng(40) - cipher = PKCS.new(self.key1024, hashmod, randfunc=localRng) - ct = cipher.encrypt(pt) - self.assertEqual(cipher.decrypt(ct), pt) - self.assertEqual(asked, hashmod.digest_size) - - def testEncryptDecrypt3(self): - # Verify that OAEP supports labels - pt = self.rng(35) - xlabel = self.rng(22) - cipher = PKCS.new(self.key1024, label=xlabel) - ct = cipher.encrypt(pt) - self.assertEqual(cipher.decrypt(ct), pt) - - def testEncryptDecrypt4(self): - # Verify that encrypt() uses the custom MGF - global mgfcalls - # Helper function to monitor what's requested from MGF - - def newMGF(seed, maskLen): - global mgfcalls - mgfcalls += 1 - return b'\x00' * maskLen - - mgfcalls = 0 - pt = self.rng(32) - cipher = PKCS.new(self.key1024, mgfunc=newMGF) - ct = cipher.encrypt(pt) - self.assertEqual(mgfcalls, 2) - self.assertEqual(cipher.decrypt(ct), pt) - - def testByteArray(self): - pt = b("XER") - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(bytearray(pt)) - pt2 = cipher.decrypt(bytearray(ct)) - self.assertEqual(pt, pt2) - - def testMemoryview(self): - pt = b("XER") - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(memoryview(bytearray(pt))) - pt2 = cipher.decrypt(memoryview(bytearray(ct))) - self.assertEqual(pt, pt2) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings, skip_slow_tests): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._skip_slow_tests = skip_slow_tests - self._id = "None" - - def load_tests(self, filename): - - def filter_rsa(group): - return RSA.import_key(group['privateKeyPem']) - - def filter_sha(group): - if group['sha'] == "SHA-1": - return SHA1 - elif group['sha'] == "SHA-224": - return SHA224 - elif group['sha'] == "SHA-256": - return SHA256 - elif group['sha'] == "SHA-384": - return SHA384 - elif group['sha'] == "SHA-512": - return SHA512 - else: - raise ValueError("Unknown sha " + group['sha']) - - def filter_mgf(group): - if group['mgfSha'] == "SHA-1": - return lambda x, y: MGF1(x, y, SHA1) - elif group['mgfSha'] == "SHA-224": - return lambda x, y: MGF1(x, y, SHA224) - elif group['mgfSha'] == "SHA-256": - return lambda x, y: MGF1(x, y, SHA256) - elif group['mgfSha'] == "SHA-384": - return lambda x, y: MGF1(x, y, SHA384) - elif group['mgfSha'] == "SHA-512": - return lambda x, y: MGF1(x, y, SHA512) - else: - raise ValueError("Unknown mgf/sha " + group['mgfSha']) - - def filter_algo(group): - return "%s with MGF1/%s" % (group['sha'], group['mgfSha']) - - result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), - filename, - "Wycheproof PKCS#1 OAEP (%s)" % filename, - group_tag={'rsa_key': filter_rsa, - 'hash_mod': filter_sha, - 'mgf': filter_mgf, - 'algo': filter_algo} - ) - return result - - def setUp(self): - self.tv = [] - self.tv.extend(self.load_tests("rsa_oaep_2048_sha1_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha224_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha224_mgf1sha224_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha256_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha256_mgf1sha256_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha384_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha384_mgf1sha384_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha512_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_2048_sha512_mgf1sha512_test.json")) - if not self._skip_slow_tests: - self.tv.extend(self.load_tests("rsa_oaep_3072_sha256_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_3072_sha256_mgf1sha256_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_3072_sha512_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_3072_sha512_mgf1sha512_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_4096_sha256_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_4096_sha256_mgf1sha256_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha1_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha512_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha512_test.json")) - self.tv.extend(self.load_tests("rsa_oaep_misc_test.json")) - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_decrypt(self, tv): - self._id = "Wycheproof Decrypt %s Test #%s" % (tv.algo, tv.id) - - cipher = PKCS.new(tv.rsa_key, hashAlgo=tv.hash_mod, mgfunc=tv.mgf, label=tv.label) - try: - pt = cipher.decrypt(tv.ct) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.assertEqual(pt, tv.msg) - self.warn(tv) - - def runTest(self): - - for tv in self.tv: - self.test_decrypt(tv) - - -def get_tests(config={}): - skip_slow_tests = not config.get('slow_tests') - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(PKCS1_OAEP_Tests) - tests += [TestVectorsWycheproof(wycheproof_warnings, skip_slow_tests)] - return tests - - -if __name__ == '__main__': - def suite(): - unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__init__.py deleted file mode 100644 index 5f5b999..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/__init__.py: Self-test for hash modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for hash modules""" - -__revision__ = "$Id$" - -def get_tests(config={}): - tests = [] - from Cryptodome.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_CMAC; tests += test_CMAC.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_RIPEMD160; tests += test_RIPEMD160.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA1; tests += test_SHA1.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA3_224; tests += test_SHA3_224.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA3_256; tests += test_SHA3_256.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA3_384; tests += test_SHA3_384.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHA3_512; tests += test_SHA3_512.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_keccak; tests += test_keccak.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_SHAKE; tests += test_SHAKE.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_BLAKE2; tests += test_BLAKE2.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_Poly1305; tests += test_Poly1305.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_cSHAKE; tests += test_cSHAKE.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_KMAC; tests += test_KMAC.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_TupleHash; tests += test_TupleHash.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_KangarooTwelve; tests += test_KangarooTwelve.get_tests(config=config) - from Cryptodome.SelfTest.Hash import test_TurboSHAKE; tests += test_TurboSHAKE.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 0d43581..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/common.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/common.cpython-312.pyc deleted file mode 100644 index 399b043..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/common.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_BLAKE2.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_BLAKE2.cpython-312.pyc deleted file mode 100644 index 708d0e7..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_BLAKE2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_CMAC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_CMAC.cpython-312.pyc deleted file mode 100644 index 8536750..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_CMAC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_HMAC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_HMAC.cpython-312.pyc deleted file mode 100644 index 3792030..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_HMAC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_KMAC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_KMAC.cpython-312.pyc deleted file mode 100644 index 4c0192b..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_KMAC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_KangarooTwelve.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_KangarooTwelve.cpython-312.pyc deleted file mode 100644 index 4367df7..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_KangarooTwelve.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD2.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD2.cpython-312.pyc deleted file mode 100644 index f1129c1..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD4.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD4.cpython-312.pyc deleted file mode 100644 index 4e81b8f..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD4.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD5.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD5.cpython-312.pyc deleted file mode 100644 index 524e1e0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_MD5.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_Poly1305.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_Poly1305.cpython-312.pyc deleted file mode 100644 index f21d059..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_Poly1305.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_RIPEMD160.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_RIPEMD160.cpython-312.pyc deleted file mode 100644 index 5fad895..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_RIPEMD160.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA1.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA1.cpython-312.pyc deleted file mode 100644 index 0a27429..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA1.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA224.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA224.cpython-312.pyc deleted file mode 100644 index c5ca261..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA224.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA256.cpython-312.pyc deleted file mode 100644 index 7650611..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA384.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA384.cpython-312.pyc deleted file mode 100644 index 21e6f00..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA384.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_224.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_224.cpython-312.pyc deleted file mode 100644 index cc54034..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_224.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_256.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_256.cpython-312.pyc deleted file mode 100644 index 392e0c1..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_256.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_384.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_384.cpython-312.pyc deleted file mode 100644 index 60cc4c2..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_384.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_512.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_512.cpython-312.pyc deleted file mode 100644 index 3f471ed..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA3_512.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA512.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA512.cpython-312.pyc deleted file mode 100644 index 2197196..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHA512.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHAKE.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHAKE.cpython-312.pyc deleted file mode 100644 index beb7663..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_SHAKE.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_TupleHash.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_TupleHash.cpython-312.pyc deleted file mode 100644 index cb6b63c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_TupleHash.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_TurboSHAKE.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_TurboSHAKE.cpython-312.pyc deleted file mode 100644 index f12b7f6..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_TurboSHAKE.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_cSHAKE.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_cSHAKE.cpython-312.pyc deleted file mode 100644 index 874e3ba..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_cSHAKE.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_keccak.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_keccak.cpython-312.pyc deleted file mode 100644 index 96263ad..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/__pycache__/test_keccak.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/common.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/common.py deleted file mode 100644 index 4ed9234..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/common.py +++ /dev/null @@ -1,290 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/common.py: Common code for Cryptodome.SelfTest.Hash -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-testing for PyCryptodome hash modules""" - -import re -import sys -import unittest -import binascii -import Cryptodome.Hash -from binascii import hexlify, unhexlify -from Cryptodome.Util.py3compat import b, tobytes -from Cryptodome.Util.strxor import strxor_c - -def t2b(hex_string): - shorter = re.sub(br'\s+', b'', tobytes(hex_string)) - return unhexlify(shorter) - - -class HashDigestSizeSelfTest(unittest.TestCase): - - def __init__(self, hashmod, description, expected, extra_params): - unittest.TestCase.__init__(self) - self.hashmod = hashmod - self.expected = expected - self.description = description - self.extra_params = extra_params - - def shortDescription(self): - return self.description - - def runTest(self): - if "truncate" not in self.extra_params: - self.assertTrue(hasattr(self.hashmod, "digest_size")) - self.assertEqual(self.hashmod.digest_size, self.expected) - h = self.hashmod.new(**self.extra_params) - self.assertTrue(hasattr(h, "digest_size")) - self.assertEqual(h.digest_size, self.expected) - - -class HashSelfTest(unittest.TestCase): - - def __init__(self, hashmod, description, expected, input, extra_params): - unittest.TestCase.__init__(self) - self.hashmod = hashmod - self.expected = expected.lower() - self.input = input - self.description = description - self.extra_params = extra_params - - def shortDescription(self): - return self.description - - def runTest(self): - h = self.hashmod.new(**self.extra_params) - h.update(self.input) - - out1 = binascii.b2a_hex(h.digest()) - out2 = h.hexdigest() - - h = self.hashmod.new(self.input, **self.extra_params) - - out3 = h.hexdigest() - out4 = binascii.b2a_hex(h.digest()) - - # PY3K: hexdigest() should return str(), and digest() bytes - self.assertEqual(self.expected, out1) # h = .new(); h.update(data); h.digest() - if sys.version_info[0] == 2: - self.assertEqual(self.expected, out2) # h = .new(); h.update(data); h.hexdigest() - self.assertEqual(self.expected, out3) # h = .new(data); h.hexdigest() - else: - self.assertEqual(self.expected.decode(), out2) # h = .new(); h.update(data); h.hexdigest() - self.assertEqual(self.expected.decode(), out3) # h = .new(data); h.hexdigest() - self.assertEqual(self.expected, out4) # h = .new(data); h.digest() - - # Verify that the .new() method produces a fresh hash object, except - # for MD5 and SHA1, which are hashlib objects. (But test any .new() - # method that does exist.) - if self.hashmod.__name__ not in ('Cryptodome.Hash.MD5', 'Cryptodome.Hash.SHA1') or hasattr(h, 'new'): - h2 = h.new() - h2.update(self.input) - out5 = binascii.b2a_hex(h2.digest()) - self.assertEqual(self.expected, out5) - - -class HashTestOID(unittest.TestCase): - def __init__(self, hashmod, oid, extra_params): - unittest.TestCase.__init__(self) - self.hashmod = hashmod - self.oid = oid - self.extra_params = extra_params - - def runTest(self): - h = self.hashmod.new(**self.extra_params) - self.assertEqual(h.oid, self.oid) - - -class ByteArrayTest(unittest.TestCase): - - def __init__(self, module, extra_params): - unittest.TestCase.__init__(self) - self.module = module - self.extra_params = extra_params - - def runTest(self): - data = b("\x00\x01\x02") - - # Data can be a bytearray (during initialization) - ba = bytearray(data) - - h1 = self.module.new(data, **self.extra_params) - h2 = self.module.new(ba, **self.extra_params) - ba[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a bytearray (during operation) - ba = bytearray(data) - - h1 = self.module.new(**self.extra_params) - h2 = self.module.new(**self.extra_params) - - h1.update(data) - h2.update(ba) - - ba[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - -class MemoryViewTest(unittest.TestCase): - - def __init__(self, module, extra_params): - unittest.TestCase.__init__(self) - self.module = module - self.extra_params = extra_params - - def runTest(self): - - data = b"\x00\x01\x02" - - def get_mv_ro(data): - return memoryview(data) - - def get_mv_rw(data): - return memoryview(bytearray(data)) - - for get_mv in get_mv_ro, get_mv_rw: - - # Data can be a memoryview (during initialization) - mv = get_mv(data) - - h1 = self.module.new(data, **self.extra_params) - h2 = self.module.new(mv, **self.extra_params) - if not mv.readonly: - mv[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a memoryview (during operation) - mv = get_mv(data) - - h1 = self.module.new(**self.extra_params) - h2 = self.module.new(**self.extra_params) - h1.update(data) - h2.update(mv) - if not mv.readonly: - mv[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - -class MACSelfTest(unittest.TestCase): - - def __init__(self, module, description, result, data, key, params): - unittest.TestCase.__init__(self) - self.module = module - self.result = t2b(result) - self.data = t2b(data) - self.key = t2b(key) - self.params = params - self.description = description - - def shortDescription(self): - return self.description - - def runTest(self): - - result_hex = hexlify(self.result) - - # Verify result - h = self.module.new(self.key, **self.params) - h.update(self.data) - self.assertEqual(self.result, h.digest()) - self.assertEqual(hexlify(self.result).decode('ascii'), h.hexdigest()) - - # Verify that correct MAC does not raise any exception - h.verify(self.result) - h.hexverify(result_hex) - - # Verify that incorrect MAC does raise ValueError exception - wrong_mac = strxor_c(self.result, 255) - self.assertRaises(ValueError, h.verify, wrong_mac) - self.assertRaises(ValueError, h.hexverify, "4556") - - # Verify again, with data passed to new() - h = self.module.new(self.key, self.data, **self.params) - self.assertEqual(self.result, h.digest()) - self.assertEqual(hexlify(self.result).decode('ascii'), h.hexdigest()) - - # Test .copy() - try: - h = self.module.new(self.key, self.data, **self.params) - h2 = h.copy() - h3 = h.copy() - - # Verify that changing the copy does not change the original - h2.update(b"bla") - self.assertEqual(h3.digest(), self.result) - - # Verify that both can reach the same state - h.update(b"bla") - self.assertEqual(h.digest(), h2.digest()) - except NotImplementedError: - pass - - # PY3K: Check that hexdigest() returns str and digest() returns bytes - self.assertTrue(isinstance(h.digest(), type(b""))) - self.assertTrue(isinstance(h.hexdigest(), type(""))) - - # PY3K: Check that .hexverify() accepts bytes or str - h.hexverify(h.hexdigest()) - h.hexverify(h.hexdigest().encode('ascii')) - - -def make_hash_tests(module, module_name, test_data, digest_size, oid=None, - extra_params={}): - tests = [] - for i in range(len(test_data)): - row = test_data[i] - (expected, input) = map(tobytes,row[0:2]) - if len(row) < 3: - description = repr(input) - else: - description = row[2] - name = "%s #%d: %s" % (module_name, i+1, description) - tests.append(HashSelfTest(module, name, expected, input, extra_params)) - - name = "%s #%d: digest_size" % (module_name, len(test_data) + 1) - tests.append(HashDigestSizeSelfTest(module, name, digest_size, extra_params)) - - if oid is not None: - tests.append(HashTestOID(module, oid, extra_params)) - - tests.append(ByteArrayTest(module, extra_params)) - - tests.append(MemoryViewTest(module, extra_params)) - - return tests - - -def make_mac_tests(module, module_name, test_data): - tests = [] - for i, row in enumerate(test_data): - if len(row) == 4: - (key, data, results, description, params) = list(row) + [ {} ] - else: - (key, data, results, description, params) = row - name = "%s #%d: %s" % (module_name, i+1, description) - tests.append(MACSelfTest(module, name, results, data, key, params)) - return tests - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_BLAKE2.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_BLAKE2.py deleted file mode 100644 index e5ed63b..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_BLAKE2.py +++ /dev/null @@ -1,482 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import os -import re -import unittest -import warnings -from binascii import unhexlify, hexlify - -from Cryptodome.Util.py3compat import tobytes -from Cryptodome.Util.strxor import strxor_c -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import BLAKE2b, BLAKE2s - - -class Blake2Test(unittest.TestCase): - - def test_new_positive(self): - - h = self.BLAKE2.new(digest_bits=self.max_bits) - for new_func in self.BLAKE2.new, h.new: - - for dbits in range(8, self.max_bits + 1, 8): - hobj = new_func(digest_bits=dbits) - self.assertEqual(hobj.digest_size, dbits // 8) - - for dbytes in range(1, self.max_bytes + 1): - hobj = new_func(digest_bytes=dbytes) - self.assertEqual(hobj.digest_size, dbytes) - - digest1 = new_func(data=b"\x90", digest_bytes=self.max_bytes).digest() - digest2 = new_func(digest_bytes=self.max_bytes).update(b"\x90").digest() - self.assertEqual(digest1, digest2) - - new_func(data=b"A", key=b"5", digest_bytes=self.max_bytes) - - hobj = h.new() - self.assertEqual(hobj.digest_size, self.max_bytes) - - def test_new_negative(self): - - h = self.BLAKE2.new(digest_bits=self.max_bits) - for new_func in self.BLAKE2.new, h.new: - self.assertRaises(TypeError, new_func, - digest_bytes=self.max_bytes, - digest_bits=self.max_bits) - self.assertRaises(ValueError, new_func, digest_bytes=0) - self.assertRaises(ValueError, new_func, - digest_bytes=self.max_bytes + 1) - self.assertRaises(ValueError, new_func, digest_bits=7) - self.assertRaises(ValueError, new_func, digest_bits=15) - self.assertRaises(ValueError, new_func, - digest_bits=self.max_bits + 1) - self.assertRaises(TypeError, new_func, - digest_bytes=self.max_bytes, - key=u"string") - self.assertRaises(TypeError, new_func, - digest_bytes=self.max_bytes, - data=u"string") - - def test_default_digest_size(self): - digest = self.BLAKE2.new(data=b'abc').digest() - self.assertEqual(len(digest), self.max_bytes) - - def test_update(self): - pieces = [b"\x0A" * 200, b"\x14" * 300] - h = self.BLAKE2.new(digest_bytes=self.max_bytes) - h.update(pieces[0]).update(pieces[1]) - digest = h.digest() - h = self.BLAKE2.new(digest_bytes=self.max_bytes) - h.update(pieces[0] + pieces[1]) - self.assertEqual(h.digest(), digest) - - def test_update_negative(self): - h = self.BLAKE2.new(digest_bytes=self.max_bytes) - self.assertRaises(TypeError, h.update, u"string") - - def test_digest(self): - h = self.BLAKE2.new(digest_bytes=self.max_bytes) - digest = h.digest() - - # hexdigest does not change the state - self.assertEqual(h.digest(), digest) - # digest returns a byte string - self.assertTrue(isinstance(digest, type(b"digest"))) - - def test_update_after_digest(self): - msg = b"rrrrttt" - - # Normally, update() cannot be done after digest() - h = self.BLAKE2.new(digest_bits=256, data=msg[:4]) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - dig2 = self.BLAKE2.new(digest_bits=256, data=msg).digest() - - # With the proper flag, it is allowed - h = self.BLAKE2.new(digest_bits=256, data=msg[:4], update_after_digest=True) - self.assertEqual(h.digest(), dig1) - # ... and the subsequent digest applies to the entire message - # up to that point - h.update(msg[4:]) - self.assertEqual(h.digest(), dig2) - - def test_hex_digest(self): - mac = self.BLAKE2.new(digest_bits=self.max_bits) - digest = mac.digest() - hexdigest = mac.hexdigest() - - # hexdigest is equivalent to digest - self.assertEqual(hexlify(digest), tobytes(hexdigest)) - # hexdigest does not change the state - self.assertEqual(mac.hexdigest(), hexdigest) - # hexdigest returns a string - self.assertTrue(isinstance(hexdigest, type("digest"))) - - def test_verify(self): - h = self.BLAKE2.new(digest_bytes=self.max_bytes, key=b"4") - mac = h.digest() - h.verify(mac) - wrong_mac = strxor_c(mac, 255) - self.assertRaises(ValueError, h.verify, wrong_mac) - - def test_hexverify(self): - h = self.BLAKE2.new(digest_bytes=self.max_bytes, key=b"4") - mac = h.hexdigest() - h.hexverify(mac) - self.assertRaises(ValueError, h.hexverify, "4556") - - def test_oid(self): - - prefix = "1.3.6.1.4.1.1722.12.2." + self.oid_variant + "." - - for digest_bits in self.digest_bits_oid: - h = self.BLAKE2.new(digest_bits=digest_bits) - self.assertEqual(h.oid, prefix + str(digest_bits // 8)) - - h = self.BLAKE2.new(digest_bits=digest_bits, key=b"secret") - self.assertRaises(AttributeError, lambda: h.oid) - - for digest_bits in (8, self.max_bits): - if digest_bits in self.digest_bits_oid: - continue - self.assertRaises(AttributeError, lambda: h.oid) - - def test_bytearray(self): - - key = b'0' * 16 - data = b"\x00\x01\x02" - - # Data and key can be a bytearray (during initialization) - key_ba = bytearray(key) - data_ba = bytearray(data) - - h1 = self.BLAKE2.new(data=data, key=key) - h2 = self.BLAKE2.new(data=data_ba, key=key_ba) - key_ba[:1] = b'\xFF' - data_ba[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a bytearray (during operation) - data_ba = bytearray(data) - - h1 = self.BLAKE2.new() - h2 = self.BLAKE2.new() - h1.update(data) - h2.update(data_ba) - data_ba[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - def test_memoryview(self): - - key = b'0' * 16 - data = b"\x00\x01\x02" - - def get_mv_ro(data): - return memoryview(data) - - def get_mv_rw(data): - return memoryview(bytearray(data)) - - for get_mv in (get_mv_ro, get_mv_rw): - - # Data and key can be a memoryview (during initialization) - key_mv = get_mv(key) - data_mv = get_mv(data) - - h1 = self.BLAKE2.new(data=data, key=key) - h2 = self.BLAKE2.new(data=data_mv, key=key_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - key_mv[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a memoryview (during operation) - data_mv = get_mv(data) - - h1 = self.BLAKE2.new() - h2 = self.BLAKE2.new() - h1.update(data) - h2.update(data_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - -class Blake2bTest(Blake2Test): - #: Module - BLAKE2 = BLAKE2b - #: Max output size (in bits) - max_bits = 512 - #: Max output size (in bytes) - max_bytes = 64 - #: Bit size of the digests for which an ASN OID exists - digest_bits_oid = (160, 256, 384, 512) - # http://tools.ietf.org/html/draft-saarinen-blake2-02 - oid_variant = "1" - - -class Blake2sTest(Blake2Test): - #: Module - BLAKE2 = BLAKE2s - #: Max output size (in bits) - max_bits = 256 - #: Max output size (in bytes) - max_bytes = 32 - #: Bit size of the digests for which an ASN OID exists - digest_bits_oid = (128, 160, 224, 256) - # http://tools.ietf.org/html/draft-saarinen-blake2-02 - oid_variant = "2" - - -class Blake2OfficialTestVector(unittest.TestCase): - - def _load_tests(self, test_vector_file): - expected = "in" - test_vectors = [] - with open(test_vector_file, "rt") as test_vector_fd: - for line_number, line in enumerate(test_vector_fd): - - if line.strip() == "" or line.startswith("#"): - continue - - res = re.match("%s:\t([0-9A-Fa-f]*)" % expected, line) - if not res: - raise ValueError("Incorrect test vector format (line %d)" - % line_number) - - if res.group(1): - bin_value = unhexlify(tobytes(res.group(1))) - else: - bin_value = b"" - if expected == "in": - input_data = bin_value - expected = "key" - elif expected == "key": - key = bin_value - expected = "hash" - else: - result = bin_value - expected = "in" - test_vectors.append((input_data, key, result)) - return test_vectors - - def setUp(self): - - dir_comps = ("Hash", self.name) - file_name = self.name.lower() + "-test.txt" - self.description = "%s tests" % self.name - - try: - import pycryptodome_test_vectors # type: ignore - except ImportError: - warnings.warn("Warning: skipping extended tests for %s" % self.name, - UserWarning) - self.test_vectors = [] - return - - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - self.test_vectors = self._load_tests(full_file_name) - - def runTest(self): - for (input_data, key, result) in self.test_vectors: - mac = self.BLAKE2.new(key=key, digest_bytes=self.max_bytes) - mac.update(input_data) - self.assertEqual(mac.digest(), result) - - -class Blake2bOfficialTestVector(Blake2OfficialTestVector): - #: Module - BLAKE2 = BLAKE2b - #: Hash name - name = "BLAKE2b" - #: Max digest size - max_bytes = 64 - - -class Blake2sOfficialTestVector(Blake2OfficialTestVector): - #: Module - BLAKE2 = BLAKE2s - #: Hash name - name = "BLAKE2s" - #: Max digest size - max_bytes = 32 - - -class Blake2TestVector1(unittest.TestCase): - - def _load_tests(self, test_vector_file): - test_vectors = [] - with open(test_vector_file, "rt") as test_vector_fd: - for line_number, line in enumerate(test_vector_fd): - if line.strip() == "" or line.startswith("#"): - continue - res = re.match("digest: ([0-9A-Fa-f]*)", line) - if not res: - raise ValueError("Incorrect test vector format (line %d)" - % line_number) - - test_vectors.append(unhexlify(tobytes(res.group(1)))) - return test_vectors - - def setUp(self): - dir_comps = ("Hash", self.name) - file_name = "tv1.txt" - self.description = "%s tests" % self.name - - try: - import pycryptodome_test_vectors - except ImportError: - warnings.warn("Warning: skipping extended tests for %s" % self.name, - UserWarning) - self.test_vectors = [] - return - - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - self.test_vectors = self._load_tests(full_file_name) - - def runTest(self): - - for tv in self.test_vectors: - digest_bytes = len(tv) - next_data = b"" - for _ in range(100): - h = self.BLAKE2.new(digest_bytes=digest_bytes) - h.update(next_data) - next_data = h.digest() + next_data - self.assertEqual(h.digest(), tv) - - -class Blake2bTestVector1(Blake2TestVector1): - #: Module - BLAKE2 = BLAKE2b - #: Hash name - name = "BLAKE2b" - - -class Blake2sTestVector1(Blake2TestVector1): - #: Module - BLAKE2 = BLAKE2s - #: Hash name - name = "BLAKE2s" - - -class Blake2TestVector2(unittest.TestCase): - - def _load_tests(self, test_vector_file): - test_vectors = [] - with open(test_vector_file, "rt") as test_vector_fd: - for line_number, line in enumerate(test_vector_fd): - if line.strip() == "" or line.startswith("#"): - continue - res = re.match(r"digest\(([0-9]+)\): ([0-9A-Fa-f]*)", line) - if not res: - raise ValueError("Incorrect test vector format (line %d)" - % line_number) - key_size = int(res.group(1)) - result = unhexlify(tobytes(res.group(2))) - test_vectors.append((key_size, result)) - return test_vectors - - def setUp(self): - dir_comps = ("Hash", self.name) - file_name = "tv2.txt" - self.description = "%s tests" % self.name - - try: - import pycryptodome_test_vectors # type: ignore - except ImportError: - warnings.warn("Warning: skipping extended tests for %s" % self.name, - UserWarning) - self.test_vectors = [] - return - - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - self.test_vectors = self._load_tests(full_file_name) - - def runTest(self): - - for key_size, result in self.test_vectors: - next_data = b"" - for _ in range(100): - h = self.BLAKE2.new(digest_bytes=self.max_bytes, - key=b"A" * key_size) - h.update(next_data) - next_data = h.digest() + next_data - self.assertEqual(h.digest(), result) - - -class Blake2bTestVector2(Blake2TestVector1): - #: Module - BLAKE2 = BLAKE2b - #: Hash name - name = "BLAKE2b" - #: Max digest size in bytes - max_bytes = 64 - - -class Blake2sTestVector2(Blake2TestVector1): - #: Module - BLAKE2 = BLAKE2s - #: Hash name - name = "BLAKE2s" - #: Max digest size in bytes - max_bytes = 32 - - -def get_tests(config={}): - tests = [] - - tests += list_test_cases(Blake2bTest) - tests.append(Blake2bOfficialTestVector()) - tests.append(Blake2bTestVector1()) - tests.append(Blake2bTestVector2()) - - tests += list_test_cases(Blake2sTest) - tests.append(Blake2sOfficialTestVector()) - tests.append(Blake2sTestVector1()) - tests.append(Blake2sTestVector2()) - - return tests - - -if __name__ == '__main__': - import unittest - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_CMAC.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_CMAC.py deleted file mode 100644 index f88f1cd..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_CMAC.py +++ /dev/null @@ -1,448 +0,0 @@ -# -# SelfTest/Hash/CMAC.py: Self-test for the CMAC module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.CMAC""" - -import json -import unittest -from binascii import unhexlify - -from Cryptodome.Util.py3compat import tobytes - -from Cryptodome.Hash import CMAC -from Cryptodome.Cipher import AES, DES3 -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.Util.strxor import strxor - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof - -# This is a list of (key, data, result, description, module) tuples. -test_data = [ - - ## Test vectors from RFC 4493 ## - ## The are also in NIST SP 800 38B D.2 ## - ( '2b7e151628aed2a6abf7158809cf4f3c', - '', - 'bb1d6929e95937287fa37d129b756746', - 'RFC 4493 #1', - AES - ), - - ( '2b7e151628aed2a6abf7158809cf4f3c', - '6bc1bee22e409f96e93d7e117393172a', - '070a16b46b4d4144f79bdd9dd04a287c', - 'RFC 4493 #2', - AES - ), - - ( '2b7e151628aed2a6abf7158809cf4f3c', - '6bc1bee22e409f96e93d7e117393172a'+ - 'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411', - 'dfa66747de9ae63030ca32611497c827', - 'RFC 4493 #3', - AES - ), - - ( '2b7e151628aed2a6abf7158809cf4f3c', - '6bc1bee22e409f96e93d7e117393172a'+ - 'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+ - 'f69f2445df4f9b17ad2b417be66c3710', - '51f0bebf7e3b9d92fc49741779363cfe', - 'RFC 4493 #4', - AES - ), - - ## The rest of Appendix D of NIST SP 800 38B - ## was not totally correct. - ## Values in Examples 14, 15, 18, and 19 were wrong. - ## The updated test values are published in: - ## http://csrc.nist.gov/publications/nistpubs/800-38B/Updated_CMAC_Examples.pdf - - ( '8e73b0f7da0e6452c810f32b809079e5'+ - '62f8ead2522c6b7b', - '', - 'd17ddf46adaacde531cac483de7a9367', - 'NIST SP 800 38B D.2 Example 5', - AES - ), - - ( '8e73b0f7da0e6452c810f32b809079e5'+ - '62f8ead2522c6b7b', - '6bc1bee22e409f96e93d7e117393172a', - '9e99a7bf31e710900662f65e617c5184', - 'NIST SP 800 38B D.2 Example 6', - AES - ), - - ( '8e73b0f7da0e6452c810f32b809079e5'+ - '62f8ead2522c6b7b', - '6bc1bee22e409f96e93d7e117393172a'+ - 'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411', - '8a1de5be2eb31aad089a82e6ee908b0e', - 'NIST SP 800 38B D.2 Example 7', - AES - ), - - ( '8e73b0f7da0e6452c810f32b809079e5'+ - '62f8ead2522c6b7b', - '6bc1bee22e409f96e93d7e117393172a'+ - 'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+ - 'f69f2445df4f9b17ad2b417be66c3710', - 'a1d5df0eed790f794d77589659f39a11', - 'NIST SP 800 38B D.2 Example 8', - AES - ), - - ( '603deb1015ca71be2b73aef0857d7781'+ - '1f352c073b6108d72d9810a30914dff4', - '', - '028962f61b7bf89efc6b551f4667d983', - 'NIST SP 800 38B D.3 Example 9', - AES - ), - - ( '603deb1015ca71be2b73aef0857d7781'+ - '1f352c073b6108d72d9810a30914dff4', - '6bc1bee22e409f96e93d7e117393172a', - '28a7023f452e8f82bd4bf28d8c37c35c', - 'NIST SP 800 38B D.3 Example 10', - AES - ), - - ( '603deb1015ca71be2b73aef0857d7781'+ - '1f352c073b6108d72d9810a30914dff4', - '6bc1bee22e409f96e93d7e117393172a'+ - 'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411', - 'aaf3d8f1de5640c232f5b169b9c911e6', - 'NIST SP 800 38B D.3 Example 11', - AES - ), - - ( '603deb1015ca71be2b73aef0857d7781'+ - '1f352c073b6108d72d9810a30914dff4', - '6bc1bee22e409f96e93d7e117393172a'+ - 'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+ - 'f69f2445df4f9b17ad2b417be66c3710', - 'e1992190549f6ed5696a2c056c315410', - 'NIST SP 800 38B D.3 Example 12', - AES - ), - - ( '8aa83bf8cbda1062'+ - '0bc1bf19fbb6cd58'+ - 'bc313d4a371ca8b5', - '', - 'b7a688e122ffaf95', - 'NIST SP 800 38B D.4 Example 13', - DES3 - ), - - ( '8aa83bf8cbda1062'+ - '0bc1bf19fbb6cd58'+ - 'bc313d4a371ca8b5', - '6bc1bee22e409f96', - '8e8f293136283797', - 'NIST SP 800 38B D.4 Example 14', - DES3 - ), - - ( '8aa83bf8cbda1062'+ - '0bc1bf19fbb6cd58'+ - 'bc313d4a371ca8b5', - '6bc1bee22e409f96'+ - 'e93d7e117393172a'+ - 'ae2d8a57', - '743ddbe0ce2dc2ed', - 'NIST SP 800 38B D.4 Example 15', - DES3 - ), - - ( '8aa83bf8cbda1062'+ - '0bc1bf19fbb6cd58'+ - 'bc313d4a371ca8b5', - '6bc1bee22e409f96'+ - 'e93d7e117393172a'+ - 'ae2d8a571e03ac9c'+ - '9eb76fac45af8e51', - '33e6b1092400eae5', - 'NIST SP 800 38B D.4 Example 16', - DES3 - ), - - ( '4cf15134a2850dd5'+ - '8a3d10ba80570d38', - '', - 'bd2ebf9a3ba00361', - 'NIST SP 800 38B D.7 Example 17', - DES3 - ), - - ( '4cf15134a2850dd5'+ - '8a3d10ba80570d38', - '6bc1bee22e409f96', - '4ff2ab813c53ce83', - 'NIST SP 800 38B D.7 Example 18', - DES3 - ), - - ( '4cf15134a2850dd5'+ - '8a3d10ba80570d38', - '6bc1bee22e409f96'+ - 'e93d7e117393172a'+ - 'ae2d8a57', - '62dd1b471902bd4e', - 'NIST SP 800 38B D.7 Example 19', - DES3 - ), - - ( '4cf15134a2850dd5'+ - '8a3d10ba80570d38', - '6bc1bee22e409f96'+ - 'e93d7e117393172a'+ - 'ae2d8a571e03ac9c'+ - '9eb76fac45af8e51', - '31b1e431dabc4eb8', - 'NIST SP 800 38B D.7 Example 20', - DES3 - ), - -] - - -def get_tag_random(tag, length): - return SHAKE128.new(data=tobytes(tag)).read(length) - - -class TestCMAC(unittest.TestCase): - - def test_internal_caching(self): - """Verify that internal caching is implemented correctly""" - - data_to_mac = get_tag_random("data_to_mac", 128) - key = get_tag_random("key", 16) - ref_mac = CMAC.new(key, msg=data_to_mac, ciphermod=AES).digest() - - # Break up in chunks of different length - # The result must always be the same - for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: - - chunks = [data_to_mac[i:i+chunk_length] for i in - range(0, len(data_to_mac), chunk_length)] - - mac = CMAC.new(key, ciphermod=AES) - for chunk in chunks: - mac.update(chunk) - self.assertEqual(ref_mac, mac.digest()) - - def test_update_after_digest(self): - msg = b"rrrrttt" - key = b"4" * 16 - - # Normally, update() cannot be done after digest() - h = CMAC.new(key, msg[:4], ciphermod=AES) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - dig2 = CMAC.new(key, msg, ciphermod=AES).digest() - - # With the proper flag, it is allowed - h2 = CMAC.new(key, msg[:4], ciphermod=AES, update_after_digest=True) - self.assertEqual(h2.digest(), dig1) - # ... and the subsequent digest applies to the entire message - # up to that point - h2.update(msg[4:]) - self.assertEqual(h2.digest(), dig2) - - -class ByteArrayTests(unittest.TestCase): - - def runTest(self): - - key = b"0" * 16 - data = b"\x00\x01\x02" - - # Data and key can be a bytearray (during initialization) - key_ba = bytearray(key) - data_ba = bytearray(data) - - h1 = CMAC.new(key, data, ciphermod=AES) - h2 = CMAC.new(key_ba, data_ba, ciphermod=AES) - key_ba[:1] = b'\xFF' - data_ba[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a bytearray (during operation) - key_ba = bytearray(key) - data_ba = bytearray(data) - - h1 = CMAC.new(key, ciphermod=AES) - h2 = CMAC.new(key, ciphermod=AES) - h1.update(data) - h2.update(data_ba) - data_ba[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - -class MemoryViewTests(unittest.TestCase): - - def runTest(self): - - key = b"0" * 16 - data = b"\x00\x01\x02" - - def get_mv_ro(data): - return memoryview(data) - - def get_mv_rw(data): - return memoryview(bytearray(data)) - - for get_mv in (get_mv_ro, get_mv_rw): - - # Data and key can be a memoryview (during initialization) - key_mv = get_mv(key) - data_mv = get_mv(data) - - h1 = CMAC.new(key, data, ciphermod=AES) - h2 = CMAC.new(key_mv, data_mv, ciphermod=AES) - if not data_mv.readonly: - key_mv[:1] = b'\xFF' - data_mv[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a memoryview (during operation) - data_mv = get_mv(data) - - h1 = CMAC.new(key, ciphermod=AES) - h2 = CMAC.new(key, ciphermod=AES) - h1.update(data) - h2.update(data_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._id = "None" - - def setUp(self): - - def filter_tag(group): - return group['tagSize'] // 8 - - self.tv = load_test_vectors_wycheproof(("Hash", "wycheproof"), - "aes_cmac_test.json", - "Wycheproof CMAC", - group_tag={'tag_size': filter_tag}) - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_create_mac(self, tv): - self._id = "Wycheproof MAC creation Test #" + str(tv.id) - - try: - tag = CMAC.new(tv.key, tv.msg, ciphermod=AES, mac_len=tv.tag_size).digest() - except ValueError as e: - if len(tv.key) not in (16, 24, 32) and "key length" in str(e): - return - raise e - if tv.valid: - self.assertEqual(tag, tv.tag) - self.warn(tv) - - def test_verify_mac(self, tv): - self._id = "Wycheproof MAC verification Test #" + str(tv.id) - - try: - mac = CMAC.new(tv.key, tv.msg, ciphermod=AES, mac_len=tv.tag_size) - except ValueError as e: - if len(tv.key) not in (16, 24, 32) and "key length" in str(e): - return - raise e - try: - mac.verify(tv.tag) - except ValueError: - assert not tv.valid - else: - assert tv.valid - self.warn(tv) - - def runTest(self): - - for tv in self.tv: - self.test_create_mac(tv) - self.test_verify_mac(tv) - - -def get_tests(config={}): - global test_data - import types - from .common import make_mac_tests - - wycheproof_warnings = config.get('wycheproof_warnings') - - # Add new() parameters to the back of each test vector - params_test_data = [] - for row in test_data: - t = list(row) - t[4] = dict(ciphermod=t[4]) - params_test_data.append(t) - - tests = make_mac_tests(CMAC, "CMAC", params_test_data) - tests.append(ByteArrayTests()) - tests.append(list_test_cases(TestCMAC)) - tests.append(MemoryViewTests()) - tests += [ TestVectorsWycheproof(wycheproof_warnings) ] - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_HMAC.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_HMAC.py deleted file mode 100644 index ecec1a8..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_HMAC.py +++ /dev/null @@ -1,548 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/HMAC.py: Self-test for the HMAC module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.HMAC""" - -import unittest -from binascii import hexlify -from Cryptodome.Util.py3compat import tostr, tobytes - -from Cryptodome.Hash import (HMAC, MD5, SHA1, SHA256, - SHA224, SHA384, SHA512, - RIPEMD160, - SHA3_224, SHA3_256, SHA3_384, SHA3_512) - - -hash_modules = dict(MD5=MD5, SHA1=SHA1, SHA256=SHA256, - SHA224=SHA224, SHA384=SHA384, SHA512=SHA512, - RIPEMD160=RIPEMD160, - SHA3_224=SHA3_224, SHA3_256=SHA3_256, - SHA3_384=SHA3_384, SHA3_512=SHA3_512) - -default_hash = None - -def xl(text): - return tostr(hexlify(tobytes(text))) - -# This is a list of (key, data, results, description) tuples. -test_data = [ - ## Test vectors from RFC 2202 ## - # Test that the default hashmod is MD5 - ('0b' * 16, - '4869205468657265', - dict(default_hash='9294727a3638bb1c13f48ef8158bfc9d'), - 'default-is-MD5'), - - # Test case 1 (MD5) - ('0b' * 16, - '4869205468657265', - dict(MD5='9294727a3638bb1c13f48ef8158bfc9d'), - 'RFC 2202 #1-MD5 (HMAC-MD5)'), - - # Test case 1 (SHA1) - ('0b' * 20, - '4869205468657265', - dict(SHA1='b617318655057264e28bc0b6fb378c8ef146be00'), - 'RFC 2202 #1-SHA1 (HMAC-SHA1)'), - - # Test case 2 - ('4a656665', - '7768617420646f2079612077616e7420666f72206e6f7468696e673f', - dict(MD5='750c783e6ab0b503eaa86e310a5db738', - SHA1='effcdf6ae5eb2fa2d27416d5f184df9c259a7c79'), - 'RFC 2202 #2 (HMAC-MD5/SHA1)'), - - # Test case 3 (MD5) - ('aa' * 16, - 'dd' * 50, - dict(MD5='56be34521d144c88dbb8c733f0e8b3f6'), - 'RFC 2202 #3-MD5 (HMAC-MD5)'), - - # Test case 3 (SHA1) - ('aa' * 20, - 'dd' * 50, - dict(SHA1='125d7342b9ac11cd91a39af48aa17b4f63f175d3'), - 'RFC 2202 #3-SHA1 (HMAC-SHA1)'), - - # Test case 4 - ('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'cd' * 50, - dict(MD5='697eaf0aca3a3aea3a75164746ffaa79', - SHA1='4c9007f4026250c6bc8414f9bf50c86c2d7235da'), - 'RFC 2202 #4 (HMAC-MD5/SHA1)'), - - # Test case 5 (MD5) - ('0c' * 16, - '546573742057697468205472756e636174696f6e', - dict(MD5='56461ef2342edc00f9bab995690efd4c'), - 'RFC 2202 #5-MD5 (HMAC-MD5)'), - - # Test case 5 (SHA1) - # NB: We do not implement hash truncation, so we only test the full hash here. - ('0c' * 20, - '546573742057697468205472756e636174696f6e', - dict(SHA1='4c1a03424b55e07fe7f27be1d58bb9324a9a5a04'), - 'RFC 2202 #5-SHA1 (HMAC-SHA1)'), - - # Test case 6 - ('aa' * 80, - '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' - + '65204b6579202d2048617368204b6579204669727374', - dict(MD5='6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd', - SHA1='aa4ae5e15272d00e95705637ce8a3b55ed402112'), - 'RFC 2202 #6 (HMAC-MD5/SHA1)'), - - # Test case 7 - ('aa' * 80, - '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' - + '65204b657920616e64204c6172676572205468616e204f6e6520426c6f636b2d' - + '53697a652044617461', - dict(MD5='6f630fad67cda0ee1fb1f562db3aa53e', - SHA1='e8e99d0f45237d786d6bbaa7965c7808bbff1a91'), - 'RFC 2202 #7 (HMAC-MD5/SHA1)'), - - ## Test vectors from RFC 4231 ## - # 4.2. Test Case 1 - ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', - '4869205468657265', - dict(SHA256=''' - b0344c61d8db38535ca8afceaf0bf12b - 881dc200c9833da726e9376c2e32cff7 - '''), - 'RFC 4231 #1 (HMAC-SHA256)'), - - # 4.3. Test Case 2 - Test with a key shorter than the length of the HMAC - # output. - ('4a656665', - '7768617420646f2079612077616e7420666f72206e6f7468696e673f', - dict(SHA256=''' - 5bdcc146bf60754e6a042426089575c7 - 5a003f089d2739839dec58b964ec3843 - '''), - 'RFC 4231 #2 (HMAC-SHA256)'), - - # 4.4. Test Case 3 - Test with a combined length of key and data that is - # larger than 64 bytes (= block-size of SHA-224 and SHA-256). - ('aa' * 20, - 'dd' * 50, - dict(SHA256=''' - 773ea91e36800e46854db8ebd09181a7 - 2959098b3ef8c122d9635514ced565fe - '''), - 'RFC 4231 #3 (HMAC-SHA256)'), - - # 4.5. Test Case 4 - Test with a combined length of key and data that is - # larger than 64 bytes (= block-size of SHA-224 and SHA-256). - ('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'cd' * 50, - dict(SHA256=''' - 82558a389a443c0ea4cc819899f2083a - 85f0faa3e578f8077a2e3ff46729665b - '''), - 'RFC 4231 #4 (HMAC-SHA256)'), - - # 4.6. Test Case 5 - Test with a truncation of output to 128 bits. - # - # Not included because we do not implement hash truncation. - # - - # 4.7. Test Case 6 - Test with a key larger than 128 bytes (= block-size of - # SHA-384 and SHA-512). - ('aa' * 131, - '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' - + '65204b6579202d2048617368204b6579204669727374', - dict(SHA256=''' - 60e431591ee0b67f0d8a26aacbf5b77f - 8e0bc6213728c5140546040f0ee37f54 - '''), - 'RFC 4231 #6 (HMAC-SHA256)'), - - # 4.8. Test Case 7 - Test with a key and data that is larger than 128 bytes - # (= block-size of SHA-384 and SHA-512). - ('aa' * 131, - '5468697320697320612074657374207573696e672061206c6172676572207468' - + '616e20626c6f636b2d73697a65206b657920616e642061206c61726765722074' - + '68616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565' - + '647320746f20626520686173686564206265666f7265206265696e6720757365' - + '642062792074686520484d414320616c676f726974686d2e', - dict(SHA256=''' - 9b09ffa71b942fcb27635fbcd5b0e944 - bfdc63644f0713938a7f51535c3a35e2 - '''), - 'RFC 4231 #7 (HMAC-SHA256)'), - - # Test case 8 (SHA224) - ('4a656665', - '7768617420646f2079612077616e74' - + '20666f72206e6f7468696e673f', - dict(SHA224='a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44'), - 'RFC 4634 8.4 SHA224 (HMAC-SHA224)'), - - # Test case 9 (SHA384) - ('4a656665', - '7768617420646f2079612077616e74' - + '20666f72206e6f7468696e673f', - dict(SHA384='af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649'), - 'RFC 4634 8.4 SHA384 (HMAC-SHA384)'), - - # Test case 10 (SHA512) - ('4a656665', - '7768617420646f2079612077616e74' - + '20666f72206e6f7468696e673f', - dict(SHA512='164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737'), - 'RFC 4634 8.4 SHA512 (HMAC-SHA512)'), - - # Test case 11 (RIPEMD) - ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', - xl("Hi There"), - dict(RIPEMD160='24cb4bd67d20fc1a5d2ed7732dcc39377f0a5668'), - 'RFC 2286 #1 (HMAC-RIPEMD)'), - - # Test case 12 (RIPEMD) - (xl("Jefe"), - xl("what do ya want for nothing?"), - dict(RIPEMD160='dda6c0213a485a9e24f4742064a7f033b43c4069'), - 'RFC 2286 #2 (HMAC-RIPEMD)'), - - # Test case 13 (RIPEMD) - ('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - 'dd' * 50, - dict(RIPEMD160='b0b105360de759960ab4f35298e116e295d8e7c1'), - 'RFC 2286 #3 (HMAC-RIPEMD)'), - - # Test case 14 (RIPEMD) - ('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'cd' * 50, - dict(RIPEMD160='d5ca862f4d21d5e610e18b4cf1beb97a4365ecf4'), - 'RFC 2286 #4 (HMAC-RIPEMD)'), - - # Test case 15 (RIPEMD) - ('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', - xl("Test With Truncation"), - dict(RIPEMD160='7619693978f91d90539ae786500ff3d8e0518e39'), - 'RFC 2286 #5 (HMAC-RIPEMD)'), - - # Test case 16 (RIPEMD) - ('aa' * 80, - xl("Test Using Larger Than Block-Size Key - Hash Key First"), - dict(RIPEMD160='6466ca07ac5eac29e1bd523e5ada7605b791fd8b'), - 'RFC 2286 #6 (HMAC-RIPEMD)'), - - # Test case 17 (RIPEMD) - ('aa' * 80, - xl("Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data"), - dict(RIPEMD160='69ea60798d71616cce5fd0871e23754cd75d5a0a'), - 'RFC 2286 #7 (HMAC-RIPEMD)'), - - # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-224.pdf - ( - '000102030405060708090a0b0c0d0e0f' - '101112131415161718191a1b', - xl('Sample message for keylenblocklen'), - dict(SHA3_224='078695eecc227c636ad31d063a15dd05a7e819a66ec6d8de1e193e59'), - 'NIST CSRC Sample #3 (SHA3-224)' - ), - - # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-256.pdf - ( - '000102030405060708090a0b0c0d0e0f'\ - '101112131415161718191a1b1c1d1e1f', - xl('Sample message for keylenblocklen'), - dict(SHA3_256='9bcf2c238e235c3ce88404e813bd2f3a97185ac6f238c63d6229a00b07974258'), - 'NIST CSRC Sample #3 (SHA3-256)' - ), - - # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-384.pdf - ( - '000102030405060708090a0b0c0d0e0f'\ - '101112131415161718191a1b1c1d1e1f' - '202122232425262728292a2b2c2d2e2f', - xl('Sample message for keylenblocklen'), - dict(SHA3_384='e5ae4c739f455279368ebf36d4f5354c95aa184c899d3870e460ebc288ef1f9470053f73f7c6da2a71bcaec38ce7d6ac'), - 'NIST CSRC Sample #3 (SHA3-384)' - ), - - # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-512.pdf - ( - '000102030405060708090a0b0c0d0e0f'\ - '101112131415161718191a1b1c1d1e1f'\ - '202122232425262728292a2b2c2d2e2f'\ - '303132333435363738393a3b3c3d3e3f', - xl('Sample message for keylenblocklen'), - dict(SHA3_512='5f464f5e5b7848e3885e49b2c385f0694985d0e38966242dc4a5fe3fea4b37d46b65ceced5dcf59438dd840bab22269f0ba7febdb9fcf74602a35666b2a32915'), - 'NIST CSRC Sample #3 (SHA3-512)' - ), - -] - - -class HMAC_Module_and_Instance_Test(unittest.TestCase): - """Test the HMAC construction and verify that it does not - matter if you initialize it with a hash module or - with an hash instance. - - See https://bugs.launchpad.net/pycrypto/+bug/1209399 - """ - - def __init__(self, hashmods): - """Initialize the test with a dictionary of hash modules - indexed by their names""" - - unittest.TestCase.__init__(self) - self.hashmods = hashmods - self.description = "" - - def shortDescription(self): - return self.description - - def runTest(self): - key = b"\x90\x91\x92\x93" * 4 - payload = b"\x00" * 100 - - for hashname, hashmod in self.hashmods.items(): - if hashmod is None: - continue - self.description = "Test HMAC in combination with " + hashname - one = HMAC.new(key, payload, hashmod).digest() - two = HMAC.new(key, payload, hashmod.new()).digest() - self.assertEqual(one, two) - - -class HMAC_None(unittest.TestCase): - - def runTest(self): - - key = b"\x04" * 20 - one = HMAC.new(key, b"", SHA1).digest() - two = HMAC.new(key, None, SHA1).digest() - self.assertEqual(one, two) - - -class ByteArrayTests(unittest.TestCase): - - def runTest(self): - - key = b"0" * 16 - data = b"\x00\x01\x02" - - # Data and key can be a bytearray (during initialization) - key_ba = bytearray(key) - data_ba = bytearray(data) - - h1 = HMAC.new(key, data) - h2 = HMAC.new(key_ba, data_ba) - key_ba[:1] = b'\xFF' - data_ba[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a bytearray (during operation) - key_ba = bytearray(key) - data_ba = bytearray(data) - - h1 = HMAC.new(key) - h2 = HMAC.new(key) - h1.update(data) - h2.update(data_ba) - data_ba[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - -class MemoryViewTests(unittest.TestCase): - - def runTest(self): - - key = b"0" * 16 - data = b"\x00\x01\x02" - - def get_mv_ro(data): - return memoryview(data) - - def get_mv_rw(data): - return memoryview(bytearray(data)) - - for get_mv in (get_mv_ro, get_mv_rw): - - # Data and key can be a memoryview (during initialization) - key_mv = get_mv(key) - data_mv = get_mv(data) - - h1 = HMAC.new(key, data) - h2 = HMAC.new(key_mv, data_mv) - if not data_mv.readonly: - key_mv[:1] = b'\xFF' - data_mv[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a memoryview (during operation) - data_mv = get_mv(data) - - h1 = HMAC.new(key) - h2 = HMAC.new(key) - h1.update(data) - h2.update(data_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - self.assertEqual(h1.digest(), h2.digest()) - - -def get_tests(config={}): - global test_data - import types - from .common import make_mac_tests - - # A test vector contains multiple results, each one for a - # different hash algorithm. - # Here we expand each test vector into multiple ones, - # and add the relevant parameters that will be passed to new() - exp_test_data = [] - for row in test_data: - for modname in row[2].keys(): - t = list(row) - t[2] = row[2][modname] - t.append(dict(digestmod=globals()[modname])) - exp_test_data.append(t) - tests = make_mac_tests(HMAC, "HMAC", exp_test_data) - tests.append(HMAC_Module_and_Instance_Test(hash_modules)) - tests.append(HMAC_None()) - - tests.append(ByteArrayTests()) - tests.append(MemoryViewTests()) - - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_KMAC.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_KMAC.py deleted file mode 100644 index 0543a4c..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_KMAC.py +++ /dev/null @@ -1,346 +0,0 @@ -import unittest -from binascii import unhexlify, hexlify - -from Cryptodome.Util.py3compat import tobytes -from Cryptodome.Util.strxor import strxor_c -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import KMAC128, KMAC256 - - -class KMACTest(unittest.TestCase): - - def new(self, *args, **kwargs): - return self.KMAC.new(key=b'X' * (self.minimum_key_bits // 8), *args, **kwargs) - - def test_new_positive(self): - - key = b'X' * 32 - - h = self.new() - for new_func in self.KMAC.new, h.new: - - for dbytes in range(self.minimum_bytes, 128 + 1): - hobj = new_func(key=key, mac_len=dbytes) - self.assertEqual(hobj.digest_size, dbytes) - - digest1 = new_func(key=key, data=b"\x90").digest() - digest2 = new_func(key=key).update(b"\x90").digest() - self.assertEqual(digest1, digest2) - - new_func(data=b"A", key=key, custom=b"g") - - hobj = h.new(key=key) - self.assertEqual(hobj.digest_size, self.default_bytes) - - def test_new_negative(self): - - h = self.new() - for new_func in self.KMAC.new, h.new: - self.assertRaises(ValueError, new_func, key=b'X'*32, - mac_len=0) - self.assertRaises(ValueError, new_func, key=b'X'*32, - mac_len=self.minimum_bytes - 1) - self.assertRaises(TypeError, new_func, - key=u"string") - self.assertRaises(TypeError, new_func, - data=u"string") - - def test_default_digest_size(self): - digest = self.new(data=b'abc').digest() - self.assertEqual(len(digest), self.default_bytes) - - def test_update(self): - pieces = [b"\x0A" * 200, b"\x14" * 300] - h = self.new() - h.update(pieces[0]).update(pieces[1]) - digest = h.digest() - h = self.new() - h.update(pieces[0] + pieces[1]) - self.assertEqual(h.digest(), digest) - - def test_update_negative(self): - h = self.new() - self.assertRaises(TypeError, h.update, u"string") - - def test_digest(self): - h = self.new() - digest = h.digest() - - # hexdigest does not change the state - self.assertEqual(h.digest(), digest) - # digest returns a byte string - self.assertTrue(isinstance(digest, type(b"digest"))) - - def test_update_after_digest(self): - msg = b"rrrrttt" - - # Normally, update() cannot be done after digest() - h = self.new(mac_len=32, data=msg[:4]) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, dig1) - - def test_hex_digest(self): - mac = self.new() - digest = mac.digest() - hexdigest = mac.hexdigest() - - # hexdigest is equivalent to digest - self.assertEqual(hexlify(digest), tobytes(hexdigest)) - # hexdigest does not change the state - self.assertEqual(mac.hexdigest(), hexdigest) - # hexdigest returns a string - self.assertTrue(isinstance(hexdigest, type("digest"))) - - def test_verify(self): - h = self.new() - mac = h.digest() - h.verify(mac) - wrong_mac = strxor_c(mac, 255) - self.assertRaises(ValueError, h.verify, wrong_mac) - - def test_hexverify(self): - h = self.new() - mac = h.hexdigest() - h.hexverify(mac) - self.assertRaises(ValueError, h.hexverify, "4556") - - def test_oid(self): - - oid = "2.16.840.1.101.3.4.2." + self.oid_variant - h = self.new() - self.assertEqual(h.oid, oid) - - def test_bytearray(self): - - key = b'0' * 32 - data = b"\x00\x01\x02" - - # Data and key can be a bytearray (during initialization) - key_ba = bytearray(key) - data_ba = bytearray(data) - - h1 = self.KMAC.new(data=data, key=key) - h2 = self.KMAC.new(data=data_ba, key=key_ba) - key_ba[:1] = b'\xFF' - data_ba[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a bytearray (during operation) - data_ba = bytearray(data) - - h1 = self.new() - h2 = self.new() - h1.update(data) - h2.update(data_ba) - data_ba[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - def test_memoryview(self): - - key = b'0' * 32 - data = b"\x00\x01\x02" - - def get_mv_ro(data): - return memoryview(data) - - def get_mv_rw(data): - return memoryview(bytearray(data)) - - for get_mv in (get_mv_ro, get_mv_rw): - - # Data and key can be a memoryview (during initialization) - key_mv = get_mv(key) - data_mv = get_mv(data) - - h1 = self.KMAC.new(data=data, key=key) - h2 = self.KMAC.new(data=data_mv, key=key_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - key_mv[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a memoryview (during operation) - data_mv = get_mv(data) - - h1 = self.new() - h2 = self.new() - h1.update(data) - h2.update(data_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - -class KMAC128Test(KMACTest): - - KMAC = KMAC128 - - minimum_key_bits = 128 - - minimum_bytes = 8 - default_bytes = 64 - - oid_variant = "19" - - -class KMAC256Test(KMACTest): - - KMAC = KMAC256 - - minimum_key_bits = 256 - - minimum_bytes = 8 - default_bytes = 64 - - oid_variant = "20" - - -class NISTExampleTestVectors(unittest.TestCase): - - # https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/KMAC_samples.pdf - test_data = [ - ( - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", - "00 01 02 03", - "", - "E5 78 0B 0D 3E A6 F7 D3 A4 29 C5 70 6A A4 3A 00" - "FA DB D7 D4 96 28 83 9E 31 87 24 3F 45 6E E1 4E", - "Sample #1 NIST", - KMAC128 - ), - ( - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", - "00 01 02 03", - "My Tagged Application", - "3B 1F BA 96 3C D8 B0 B5 9E 8C 1A 6D 71 88 8B 71" - "43 65 1A F8 BA 0A 70 70 C0 97 9E 28 11 32 4A A5", - "Sample #2 NIST", - KMAC128 - ), - ( - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", - "00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F" - "10 11 12 13 14 15 16 17 18 19 1A 1B 1C 1D 1E 1F" - "20 21 22 23 24 25 26 27 28 29 2A 2B 2C 2D 2E 2F" - "30 31 32 33 34 35 36 37 38 39 3A 3B 3C 3D 3E 3F" - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F" - "60 61 62 63 64 65 66 67 68 69 6A 6B 6C 6D 6E 6F" - "70 71 72 73 74 75 76 77 78 79 7A 7B 7C 7D 7E 7F" - "80 81 82 83 84 85 86 87 88 89 8A 8B 8C 8D 8E 8F" - "90 91 92 93 94 95 96 97 98 99 9A 9B 9C 9D 9E 9F" - "A0 A1 A2 A3 A4 A5 A6 A7 A8 A9 AA AB AC AD AE AF" - "B0 B1 B2 B3 B4 B5 B6 B7 B8 B9 BA BB BC BD BE BF" - "C0 C1 C2 C3 C4 C5 C6 C7", - "My Tagged Application", - "1F 5B 4E 6C CA 02 20 9E 0D CB 5C A6 35 B8 9A 15" - "E2 71 EC C7 60 07 1D FD 80 5F AA 38 F9 72 92 30", - "Sample #3 NIST", - KMAC128 - ), - ( - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", - "00 01 02 03", - "My Tagged Application", - "20 C5 70 C3 13 46 F7 03 C9 AC 36 C6 1C 03 CB 64" - "C3 97 0D 0C FC 78 7E 9B 79 59 9D 27 3A 68 D2 F7" - "F6 9D 4C C3 DE 9D 10 4A 35 16 89 F2 7C F6 F5 95" - "1F 01 03 F3 3F 4F 24 87 10 24 D9 C2 77 73 A8 DD", - "Sample #4 NIST", - KMAC256 - ), - ( - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", - "00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F" - "10 11 12 13 14 15 16 17 18 19 1A 1B 1C 1D 1E 1F" - "20 21 22 23 24 25 26 27 28 29 2A 2B 2C 2D 2E 2F" - "30 31 32 33 34 35 36 37 38 39 3A 3B 3C 3D 3E 3F" - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F" - "60 61 62 63 64 65 66 67 68 69 6A 6B 6C 6D 6E 6F" - "70 71 72 73 74 75 76 77 78 79 7A 7B 7C 7D 7E 7F" - "80 81 82 83 84 85 86 87 88 89 8A 8B 8C 8D 8E 8F" - "90 91 92 93 94 95 96 97 98 99 9A 9B 9C 9D 9E 9F" - "A0 A1 A2 A3 A4 A5 A6 A7 A8 A9 AA AB AC AD AE AF" - "B0 B1 B2 B3 B4 B5 B6 B7 B8 B9 BA BB BC BD BE BF" - "C0 C1 C2 C3 C4 C5 C6 C7", - "", - "75 35 8C F3 9E 41 49 4E 94 97 07 92 7C EE 0A F2" - "0A 3F F5 53 90 4C 86 B0 8F 21 CC 41 4B CF D6 91" - "58 9D 27 CF 5E 15 36 9C BB FF 8B 9A 4C 2E B1 78" - "00 85 5D 02 35 FF 63 5D A8 25 33 EC 6B 75 9B 69", - "Sample #5 NIST", - KMAC256 - ), - ( - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", - "00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F" - "10 11 12 13 14 15 16 17 18 19 1A 1B 1C 1D 1E 1F" - "20 21 22 23 24 25 26 27 28 29 2A 2B 2C 2D 2E 2F" - "30 31 32 33 34 35 36 37 38 39 3A 3B 3C 3D 3E 3F" - "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" - "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F" - "60 61 62 63 64 65 66 67 68 69 6A 6B 6C 6D 6E 6F" - "70 71 72 73 74 75 76 77 78 79 7A 7B 7C 7D 7E 7F" - "80 81 82 83 84 85 86 87 88 89 8A 8B 8C 8D 8E 8F" - "90 91 92 93 94 95 96 97 98 99 9A 9B 9C 9D 9E 9F" - "A0 A1 A2 A3 A4 A5 A6 A7 A8 A9 AA AB AC AD AE AF" - "B0 B1 B2 B3 B4 B5 B6 B7 B8 B9 BA BB BC BD BE BF" - "C0 C1 C2 C3 C4 C5 C6 C7", - "My Tagged Application", - "B5 86 18 F7 1F 92 E1 D5 6C 1B 8C 55 DD D7 CD 18" - "8B 97 B4 CA 4D 99 83 1E B2 69 9A 83 7D A2 E4 D9" - "70 FB AC FD E5 00 33 AE A5 85 F1 A2 70 85 10 C3" - "2D 07 88 08 01 BD 18 28 98 FE 47 68 76 FC 89 65", - "Sample #6 NIST", - KMAC256 - ), - ] - - def setUp(self): - td = [] - for key, data, custom, mac, text, module in self.test_data: - ni = ( - unhexlify(key.replace(" ", "")), - unhexlify(data.replace(" ", "")), - custom.encode(), - unhexlify(mac.replace(" ", "")), - text, - module - ) - td.append(ni) - self.test_data = td - - def runTest(self): - - for key, data, custom, mac, text, module in self.test_data: - h = module.new(data=data, key=key, custom=custom, mac_len=len(mac)) - mac_tag = h.digest() - self.assertEqual(mac_tag, mac, msg=text) - - -def get_tests(config={}): - tests = [] - - tests += list_test_cases(KMAC128Test) - tests += list_test_cases(KMAC256Test) - tests.append(NISTExampleTestVectors()) - - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_KangarooTwelve.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_KangarooTwelve.py deleted file mode 100644 index c9ad363..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_KangarooTwelve.py +++ /dev/null @@ -1,367 +0,0 @@ -# =================================================================== -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.KangarooTwelve""" - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import KangarooTwelve as K12 -from Cryptodome.Util.py3compat import b, bchr - - -class KangarooTwelveTest(unittest.TestCase): - - def test_length_encode(self): - self.assertEqual(K12._length_encode(0), b'\x00') - self.assertEqual(K12._length_encode(12), b'\x0C\x01') - self.assertEqual(K12._length_encode(65538), b'\x01\x00\x02\x03') - - def test_new_positive(self): - - xof1 = K12.new() - xof2 = K12.new(data=b("90")) - xof3 = K12.new().update(b("90")) - - self.assertNotEqual(xof1.read(10), xof2.read(10)) - xof3.read(10) - self.assertEqual(xof2.read(10), xof3.read(10)) - - xof1 = K12.new() - ref = xof1.read(10) - xof2 = K12.new(custom=b("")) - xof3 = K12.new(custom=b("foo")) - - self.assertEqual(ref, xof2.read(10)) - self.assertNotEqual(ref, xof3.read(10)) - - xof1 = K12.new(custom=b("foo")) - xof2 = K12.new(custom=b("foo"), data=b("90")) - xof3 = K12.new(custom=b("foo")).update(b("90")) - - self.assertNotEqual(xof1.read(10), xof2.read(10)) - xof3.read(10) - self.assertEqual(xof2.read(10), xof3.read(10)) - - def test_update(self): - pieces = [bchr(10) * 200, bchr(20) * 300] - h = K12.new() - h.update(pieces[0]).update(pieces[1]) - digest = h.read(10) - h = K12.new() - h.update(pieces[0] + pieces[1]) - self.assertEqual(h.read(10), digest) - - def test_update_negative(self): - h = K12.new() - self.assertRaises(TypeError, h.update, u"string") - - def test_digest(self): - h = K12.new() - digest = h.read(90) - - # read returns a byte string of the right length - self.assertTrue(isinstance(digest, type(b("digest")))) - self.assertEqual(len(digest), 90) - - def test_update_after_read(self): - mac = K12.new() - mac.update(b("rrrr")) - mac.read(90) - self.assertRaises(TypeError, mac.update, b("ttt")) - - -def txt2bin(txt): - clean = txt.replace(" ", "").replace("\n", "").replace("\r", "") - return unhexlify(clean) - - -def ptn(n): - res = bytearray(n) - pattern = b"".join([bchr(x) for x in range(0, 0xFB)]) - for base in range(0, n - 0xFB, 0xFB): - res[base:base + 0xFB] = pattern - remain = n % 0xFB - if remain: - base = (n // 0xFB) * 0xFB - res[base:] = pattern[:remain] - assert(len(res) == n) - return res - - -def chunked(source, size): - for i in range(0, len(source), size): - yield source[i:i+size] - - -class KangarooTwelveTV(unittest.TestCase): - - # https://github.com/XKCP/XKCP/blob/master/tests/TestVectors/KangarooTwelve.txt - - def test_zero_1(self): - tv = """1A C2 D4 50 FC 3B 42 05 D1 9D A7 BF CA 1B 37 51 - 3C 08 03 57 7A C7 16 7F 06 FE 2C E1 F0 EF 39 E5""" - - btv = txt2bin(tv) - res = K12.new().read(32) - self.assertEqual(res, btv) - - def test_zero_2(self): - tv = """1A C2 D4 50 FC 3B 42 05 D1 9D A7 BF CA 1B 37 51 - 3C 08 03 57 7A C7 16 7F 06 FE 2C E1 F0 EF 39 E5 - 42 69 C0 56 B8 C8 2E 48 27 60 38 B6 D2 92 96 6C - C0 7A 3D 46 45 27 2E 31 FF 38 50 81 39 EB 0A 71""" - - btv = txt2bin(tv) - res = K12.new().read(64) - self.assertEqual(res, btv) - - def test_zero_3(self): - tv = """E8 DC 56 36 42 F7 22 8C 84 68 4C 89 84 05 D3 A8 - 34 79 91 58 C0 79 B1 28 80 27 7A 1D 28 E2 FF 6D""" - - btv = txt2bin(tv) - res = K12.new().read(10032) - self.assertEqual(res[-32:], btv) - - def test_ptn_1(self): - tv = """2B DA 92 45 0E 8B 14 7F 8A 7C B6 29 E7 84 A0 58 - EF CA 7C F7 D8 21 8E 02 D3 45 DF AA 65 24 4A 1F""" - - btv = txt2bin(tv) - res = K12.new(data=ptn(1)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17(self): - tv = """6B F7 5F A2 23 91 98 DB 47 72 E3 64 78 F8 E1 9B - 0F 37 12 05 F6 A9 A9 3A 27 3F 51 DF 37 12 28 88""" - - btv = txt2bin(tv) - res = K12.new(data=ptn(17)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17_2(self): - tv = """0C 31 5E BC DE DB F6 14 26 DE 7D CF 8F B7 25 D1 - E7 46 75 D7 F5 32 7A 50 67 F3 67 B1 08 EC B6 7C""" - - btv = txt2bin(tv) - res = K12.new(data=ptn(17**2)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17_3(self): - tv = """CB 55 2E 2E C7 7D 99 10 70 1D 57 8B 45 7D DF 77 - 2C 12 E3 22 E4 EE 7F E4 17 F9 2C 75 8F 0D 59 D0""" - - btv = txt2bin(tv) - res = K12.new(data=ptn(17**3)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17_4(self): - tv = """87 01 04 5E 22 20 53 45 FF 4D DA 05 55 5C BB 5C - 3A F1 A7 71 C2 B8 9B AE F3 7D B4 3D 99 98 B9 FE""" - - btv = txt2bin(tv) - data = ptn(17**4) - - # All at once - res = K12.new(data=data).read(32) - self.assertEqual(res, btv) - - # Byte by byte - k12 = K12.new() - for x in data: - k12.update(bchr(x)) - res = k12.read(32) - self.assertEqual(res, btv) - - # Chunks of various prime sizes - for chunk_size in (13, 17, 19, 23, 31): - k12 = K12.new() - for x in chunked(data, chunk_size): - k12.update(x) - res = k12.read(32) - self.assertEqual(res, btv) - - def test_ptn_17_5(self): - tv = """84 4D 61 09 33 B1 B9 96 3C BD EB 5A E3 B6 B0 5C - C7 CB D6 7C EE DF 88 3E B6 78 A0 A8 E0 37 16 82""" - - btv = txt2bin(tv) - data = ptn(17**5) - - # All at once - res = K12.new(data=data).read(32) - self.assertEqual(res, btv) - - # Chunks - k12 = K12.new() - for chunk in chunked(data, 8192): - k12.update(chunk) - res = k12.read(32) - self.assertEqual(res, btv) - - def test_ptn_17_6(self): - tv = """3C 39 07 82 A8 A4 E8 9F A6 36 7F 72 FE AA F1 32 - 55 C8 D9 58 78 48 1D 3C D8 CE 85 F5 8E 88 0A F8""" - - btv = txt2bin(tv) - data = ptn(17**6) - - # All at once - res = K12.new(data=data).read(32) - self.assertEqual(res, btv) - - def test_ptn_c_1(self): - tv = """FA B6 58 DB 63 E9 4A 24 61 88 BF 7A F6 9A 13 30 - 45 F4 6E E9 84 C5 6E 3C 33 28 CA AF 1A A1 A5 83""" - - btv = txt2bin(tv) - custom = ptn(1) - - # All at once - res = K12.new(custom=custom).read(32) - self.assertEqual(res, btv) - - def test_ptn_c_41(self): - tv = """D8 48 C5 06 8C ED 73 6F 44 62 15 9B 98 67 FD 4C - 20 B8 08 AC C3 D5 BC 48 E0 B0 6B A0 A3 76 2E C4""" - - btv = txt2bin(tv) - custom = ptn(41) - - # All at once - res = K12.new(data=b'\xFF', custom=custom).read(32) - self.assertEqual(res, btv) - - def test_ptn_c_41_2(self): - tv = """C3 89 E5 00 9A E5 71 20 85 4C 2E 8C 64 67 0A C0 - 13 58 CF 4C 1B AF 89 44 7A 72 42 34 DC 7C ED 74""" - - btv = txt2bin(tv) - custom = ptn(41**2) - - # All at once - res = K12.new(data=b'\xFF' * 3, custom=custom).read(32) - self.assertEqual(res, btv) - - def test_ptn_c_41_3(self): - tv = """75 D2 F8 6A 2E 64 45 66 72 6B 4F BC FC 56 57 B9 - DB CF 07 0C 7B 0D CA 06 45 0A B2 91 D7 44 3B CF""" - - btv = txt2bin(tv) - custom = ptn(41**3) - - # All at once - res = K12.new(data=b'\xFF' * 7, custom=custom).read(32) - self.assertEqual(res, btv) - - # https://datatracker.ietf.org/doc/draft-irtf-cfrg-kangarootwelve/ - - def test_ptn_8191(self): - tv = """1B 57 76 36 F7 23 64 3E 99 0C C7 D6 A6 59 83 74 - 36 FD 6A 10 36 26 60 0E B8 30 1C D1 DB E5 53 D6""" - - btv = txt2bin(tv) - - # All at once - res = K12.new(data=ptn(8191)).read(32) - self.assertEqual(res, btv) - - def test_ptn_8192(self): - tv = """48 F2 56 F6 77 2F 9E DF B6 A8 B6 61 EC 92 DC 93 - B9 5E BD 05 A0 8A 17 B3 9A E3 49 08 70 C9 26 C3""" - - btv = txt2bin(tv) - - # All at once - res = K12.new(data=ptn(8192)).read(32) - self.assertEqual(res, btv) - - def test_ptn_8192_8189(self): - tv = """3E D1 2F 70 FB 05 DD B5 86 89 51 0A B3 E4 D2 3C - 6C 60 33 84 9A A0 1E 1D 8C 22 0A 29 7F ED CD 0B""" - - btv = txt2bin(tv) - - # All at once - res = K12.new(data=ptn(8192), custom=ptn(8189)).read(32) - self.assertEqual(res, btv) - - def test_ptn_8192_8190(self): - tv = """6A 7C 1B 6A 5C D0 D8 C9 CA 94 3A 4A 21 6C C6 46 - 04 55 9A 2E A4 5F 78 57 0A 15 25 3D 67 BA 00 AE""" - - btv = txt2bin(tv) - - # All at once - res = K12.new(data=ptn(8192), custom=ptn(8190)).read(32) - self.assertEqual(res, btv) - - ### - - def test_1(self): - tv = "fd608f91d81904a9916e78a18f65c157a78d63f93d8f6367db0524526a5ea2bb" - - btv = txt2bin(tv) - res = K12.new(data=b'', custom=ptn(100)).read(32) - self.assertEqual(res, btv) - - def test_2(self): - tv4 = "5a4ec9a649f81916d4ce1553492962f7868abf8dd1ceb2f0cb3682ea95cda6a6" - tv3 = "441688fe4fe4ae9425eb3105eb445eb2b3a6f67b66eff8e74ebfbc49371f6d4c" - tv2 = "17269a57759af0214c84a0fd9bc851f4d95f80554cfed4e7da8a6ee1ff080131" - tv1 = "33826990c09dc712ba7224f0d9be319e2720de95a4c1afbd2211507dae1c703a" - tv0 = "9f4d3aba908ddc096e4d3a71da954f917b9752f05052b9d26d916a6fbc75bf3e" - - res = K12.new(data=b'A' * (8192 - 4), custom=b'B').read(32) - self.assertEqual(res, txt2bin(tv4)) - - res = K12.new(data=b'A' * (8192 - 3), custom=b'B').read(32) - self.assertEqual(res, txt2bin(tv3)) - - res = K12.new(data=b'A' * (8192 - 2), custom=b'B').read(32) - self.assertEqual(res, txt2bin(tv2)) - - res = K12.new(data=b'A' * (8192 - 1), custom=b'B').read(32) - self.assertEqual(res, txt2bin(tv1)) - - res = K12.new(data=b'A' * (8192 - 0), custom=b'B').read(32) - self.assertEqual(res, txt2bin(tv0)) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(KangarooTwelveTest) - tests += list_test_cases(KangarooTwelveTV) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD2.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD2.py deleted file mode 100644 index beae38a..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD2.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/MD2.py: Self-test for the MD2 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.MD2""" - -from Cryptodome.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors from RFC 1319 - ('8350e5a3e24c153df2275c9f80692773', '', "'' (empty string)"), - ('32ec01ec4a6dac72c0ab96fb34c0b5d1', 'a'), - ('da853b0d3f88d99b30283a69e6ded6bb', 'abc'), - ('ab4f496bfb2a530b219ff33031fe06b0', 'message digest'), - - ('4e8ddff3650292ab5a4108c3aa47940b', 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('da33def2a42df13975352846c30338cd', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('d5976f79d83d3a0dc9806c3c66f3efd8', - '1234567890123456789012345678901234567890123456' - + '7890123456789012345678901234567890', - "'1234567890' * 8"), -] - -def get_tests(config={}): - from Cryptodome.Hash import MD2 - from .common import make_hash_tests - return make_hash_tests(MD2, "MD2", test_data, - digest_size=16, - oid="1.2.840.113549.2.2") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD4.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD4.py deleted file mode 100644 index 41de977..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD4.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/MD4.py: Self-test for the MD4 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.MD4""" - -__revision__ = "$Id$" - -from Cryptodome.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors from RFC 1320 - ('31d6cfe0d16ae931b73c59d7e0c089c0', '', "'' (empty string)"), - ('bde52cb31de33e46245e05fbdbd6fb24', 'a'), - ('a448017aaf21d8525fc10ae87aa6729d', 'abc'), - ('d9130a8164549fe818874806e1c7014b', 'message digest'), - - ('d79e1c308aa5bbcdeea8ed63df412da9', 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('043f8582f241db351ce627e153e7f0e4', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('e33b4ddc9c38f2199c3e7b164fcc0536', - '1234567890123456789012345678901234567890123456' - + '7890123456789012345678901234567890', - "'1234567890' * 8"), -] - -def get_tests(config={}): - from Cryptodome.Hash import MD4 - from .common import make_hash_tests - return make_hash_tests(MD4, "MD4", test_data, - digest_size=16, - oid="1.2.840.113549.2.4") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD5.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD5.py deleted file mode 100644 index 3f7a005..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_MD5.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/MD5.py: Self-test for the MD5 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.MD5""" - -from Cryptodome.Util.py3compat import * -from Cryptodome.Hash import MD5 -from binascii import unhexlify -import unittest -from Cryptodome.SelfTest.st_common import list_test_cases - - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors from RFC 1321 - ('d41d8cd98f00b204e9800998ecf8427e', '', "'' (empty string)"), - ('0cc175b9c0f1b6a831c399e269772661', 'a'), - ('900150983cd24fb0d6963f7d28e17f72', 'abc'), - ('f96b697d7cb7938d525a2f31aaf161d0', 'message digest'), - - ('c3fcd3d76192e4007dfb496cca67e13b', 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('d174ab98d277d9f5a5611c2c9f419d9f', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('57edf4a22be3c955ac49da2e2107b67a', - '1234567890123456789012345678901234567890123456' - + '7890123456789012345678901234567890', - "'1234567890' * 8"), - - # https://www.cosic.esat.kuleuven.be/nessie/testvectors/hash/md5/Md5-128.unverified.test-vectors - ('57EDF4A22BE3C955AC49DA2E2107B67A', '1234567890' * 8, 'Set 1, vector #7'), - ('7707D6AE4E027C70EEA2A935C2296F21', 'a'*1000000, 'Set 1, vector #8'), -] - - -class Md5IterTest(unittest.TestCase): - - def runTest(self): - message = b("\x00") * 16 - result1 = "4AE71336E44BF9BF79D2752E234818A5".lower() - result2 = "1A83F51285E4D89403D00C46EF8508FE".lower() - - h = MD5.new(message) - message = h.digest() - self.assertEqual(h.hexdigest(), result1) - - for _ in range(99999): - h = MD5.new(message) - message = h.digest() - - self.assertEqual(h.hexdigest(), result2) - - -def get_tests(config={}): - from .common import make_hash_tests - - tests = make_hash_tests(MD5, "MD5", test_data, - digest_size=16, - oid="1.2.840.113549.2.5") - if config.get('slow_tests'): - tests += [ Md5IterTest() ] - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_Poly1305.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_Poly1305.py deleted file mode 100644 index 19cacb4..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_Poly1305.py +++ /dev/null @@ -1,542 +0,0 @@ -# -# SelfTest/Hash/test_Poly1305.py: Self-test for the Poly1305 module -# -# =================================================================== -# -# Copyright (c) 2018, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash._Poly1305""" - -import json -import unittest -from binascii import unhexlify, hexlify - -from .common import make_mac_tests -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import Poly1305 -from Cryptodome.Cipher import AES, ChaCha20 - -from Cryptodome.Util.py3compat import tobytes -from Cryptodome.Util.strxor import strxor_c - -# This is a list of (r+s keypair, data, result, description, keywords) tuples. -test_data_basic = [ - ( - "85d6be7857556d337f4452fe42d506a80103808afb0db2fd4abff6af4149f51b", - hexlify(b"Cryptographic Forum Research Group").decode(), - "a8061dc1305136c6c22b8baf0c0127a9", - "RFC7539" - ), - ( - "746869732069732033322d62797465206b657920666f7220506f6c7931333035", - "0000000000000000000000000000000000000000000000000000000000000000", - "49ec78090e481ec6c26b33b91ccc0307", - "https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-00#section-7 A", - ), - ( - "746869732069732033322d62797465206b657920666f7220506f6c7931333035", - "48656c6c6f20776f726c6421", - "a6f745008f81c916a20dcc74eef2b2f0", - "https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-00#section-7 B", - ), - ( - "746869732069732033322d62797465206b657920666f7220506f6c7931333035", - "", - "6b657920666f7220506f6c7931333035", - "Generated with pure Python", - ), - ( - "746869732069732033322d62797465206b657920666f7220506f6c7931333035", - "FF", - "f7e4e0ef4c46d106219da3d1bdaeb3ff", - "Generated with pure Python", - ), - ( - "746869732069732033322d62797465206b657920666f7220506f6c7931333035", - "FF00", - "7471eceeb22988fc936da1d6e838b70e", - "Generated with pure Python", - ), - ( - "746869732069732033322d62797465206b657920666f7220506f6c7931333035", - "AA" * 17, - "32590bc07cb2afaccca3f67f122975fe", - "Generated with pure Python", - ), - ( - "00" * 32, - "00" * 64, - "00" * 16, - "RFC7539 A.3 #1", - ), - ( - "0000000000000000000000000000000036e5f6b5c5e06070f0efca96227a863e", - hexlify( - b"Any submission t" - b"o the IETF inten" - b"ded by the Contr" - b"ibutor for publi" - b"cation as all or" - b" part of an IETF" - b" Internet-Draft " - b"or RFC and any s" - b"tatement made wi" - b"thin the context" - b" of an IETF acti" - b"vity is consider" - b"ed an \"IETF Cont" - b"ribution\". Such " - b"statements inclu" - b"de oral statemen" - b"ts in IETF sessi" - b"ons, as well as " - b"written and elec" - b"tronic communica" - b"tions made at an" - b"y time or place," - b" which are addre" - b"ssed to").decode(), - "36e5f6b5c5e06070f0efca96227a863e", - "RFC7539 A.3 #2", - ), - ( - "36e5f6b5c5e06070f0efca96227a863e00000000000000000000000000000000", - hexlify( - b"Any submission t" - b"o the IETF inten" - b"ded by the Contr" - b"ibutor for publi" - b"cation as all or" - b" part of an IETF" - b" Internet-Draft " - b"or RFC and any s" - b"tatement made wi" - b"thin the context" - b" of an IETF acti" - b"vity is consider" - b"ed an \"IETF Cont" - b"ribution\". Such " - b"statements inclu" - b"de oral statemen" - b"ts in IETF sessi" - b"ons, as well as " - b"written and elec" - b"tronic communica" - b"tions made at an" - b"y time or place," - b" which are addre" - b"ssed to").decode(), - "f3477e7cd95417af89a6b8794c310cf0", - "RFC7539 A.3 #3", - ), - ( - "1c9240a5eb55d38af333888604f6b5f0473917c1402b80099dca5cbc207075c0", - "2754776173206272696c6c69672c2061" - "6e642074686520736c6974687920746f" - "7665730a446964206779726520616e64" - "2067696d626c6520696e207468652077" - "6162653a0a416c6c206d696d73792077" - "6572652074686520626f726f676f7665" - "732c0a416e6420746865206d6f6d6520" - "7261746873206f757467726162652e", - "4541669a7eaaee61e708dc7cbcc5eb62", - "RFC7539 A.3 #4", - ), - ( - "02" + "00" * 31, - "FF" * 16, - "03" + "00" * 15, - "RFC7539 A.3 #5", - ), - ( - "02" + "00" * 15 + "FF" * 16, - "02" + "00" * 15, - "03" + "00" * 15, - "RFC7539 A.3 #6", - ), - ( - "01" + "00" * 31, - "FF" * 16 + "F0" + "FF" * 15 + "11" + "00" * 15, - "05" + "00" * 15, - "RFC7539 A.3 #7", - ), - ( - "01" + "00" * 31, - "FF" * 16 + "FB" + "FE" * 15 + "01" * 16, - "00" * 16, - "RFC7539 A.3 #8", - ), - ( - "02" + "00" * 31, - "FD" + "FF" * 15, - "FA" + "FF" * 15, - "RFC7539 A.3 #9", - ), - ( - "01 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00" - "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", - "E3 35 94 D7 50 5E 43 B9 00 00 00 00 00 00 00 00" - "33 94 D7 50 5E 43 79 CD 01 00 00 00 00 00 00 00" - "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00" - "01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", - "14 00 00 00 00 00 00 00 55 00 00 00 00 00 00 00", - "RFC7539 A.3 #10", - ), - ( - "01 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00" - "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", - "E3 35 94 D7 50 5E 43 B9 00 00 00 00 00 00 00 00" - "33 94 D7 50 5E 43 79 CD 01 00 00 00 00 00 00 00" - "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", - "13" + "00" * 15, - "RFC7539 A.3 #11", - ), -] - -# This is a list of (key(k+r), data, result, description, keywords) tuples. -test_data_aes = [ - ( - "ec074c835580741701425b623235add6851fc40c3467ac0be05cc20404f3f700", - "f3f6", - "f4c633c3044fc145f84f335cb81953de", - "http://cr.yp.to/mac/poly1305-20050329.pdf", - { 'cipher':AES, 'nonce':unhexlify("fb447350c4e868c52ac3275cf9d4327e") } - ), - ( - "75deaa25c09f208e1dc4ce6b5cad3fbfa0f3080000f46400d0c7e9076c834403", - "", - "dd3fab2251f11ac759f0887129cc2ee7", - "http://cr.yp.to/mac/poly1305-20050329.pdf", - { 'cipher':AES, 'nonce':unhexlify("61ee09218d29b0aaed7e154a2c5509cc") } - ), - ( - "6acb5f61a7176dd320c5c1eb2edcdc7448443d0bb0d21109c89a100b5ce2c208", - "663cea190ffb83d89593f3f476b6bc24" - "d7e679107ea26adb8caf6652d0656136", - "0ee1c16bb73f0f4fd19881753c01cdbe", - "http://cr.yp.to/mac/poly1305-20050329.pdf", - { 'cipher':AES, 'nonce':unhexlify("ae212a55399729595dea458bc621ff0e") } - ), - ( - "e1a5668a4d5b66a5f68cc5424ed5982d12976a08c4426d0ce8a82407c4f48207", - "ab0812724a7f1e342742cbed374d94d1" - "36c6b8795d45b3819830f2c04491faf0" - "990c62e48b8018b2c3e4a0fa3134cb67" - "fa83e158c994d961c4cb21095c1bf9", - "5154ad0d2cb26e01274fc51148491f1b", - "http://cr.yp.to/mac/poly1305-20050329.pdf", - { 'cipher':AES, 'nonce':unhexlify("9ae831e743978d3a23527c7128149e3a") } - ), -] - -test_data_chacha20 = [ - ( - "00" * 32, - "FF" * 15, - "13cc5bbadc36b03a5163928f0bcb65aa", - "RFC7539 A.4 #1", - { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 12) } - ), - ( - "00" * 31 + "01", - "FF" * 15, - "0baf33c1d6df211bdd50a6767e98e00a", - "RFC7539 A.4 #2", - { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 11 + "02") } - ), - ( - "1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0" - "47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0", - "FF" * 15, - "e8b4c6db226cd8939e65e02eebf834ce", - "RFC7539 A.4 #3", - { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 11 + "02") } - ), - ( - "1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0" - "47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0", - "f3 33 88 86 00 00 00 00 00 00 4e 91 00 00 00 00" - "64 a0 86 15 75 86 1a f4 60 f0 62 c7 9b e6 43 bd" - "5e 80 5c fd 34 5c f3 89 f1 08 67 0a c7 6c 8c b2" - "4c 6c fc 18 75 5d 43 ee a0 9e e9 4e 38 2d 26 b0" - "bd b7 b7 3c 32 1b 01 00 d4 f0 3b 7f 35 58 94 cf" - "33 2f 83 0e 71 0b 97 ce 98 c8 a8 4a bd 0b 94 81" - "14 ad 17 6e 00 8d 33 bd 60 f9 82 b1 ff 37 c8 55" - "97 97 a0 6e f4 f0 ef 61 c1 86 32 4e 2b 35 06 38" - "36 06 90 7b 6a 7c 02 b0 f9 f6 15 7b 53 c8 67 e4" - "b9 16 6c 76 7b 80 4d 46 a5 9b 52 16 cd e7 a4 e9" - "90 40 c5 a4 04 33 22 5e e2 82 a1 b0 a0 6c 52 3e" - "af 45 34 d7 f8 3f a1 15 5b 00 47 71 8c bc 54 6a" - "0d 07 2b 04 b3 56 4e ea 1b 42 22 73 f5 48 27 1a" - "0b b2 31 60 53 fa 76 99 19 55 eb d6 31 59 43 4e" - "ce bb 4e 46 6d ae 5a 10 73 a6 72 76 27 09 7a 10" - "49 e6 17 d9 1d 36 10 94 fa 68 f0 ff 77 98 71 30" - "30 5b ea ba 2e da 04 df 99 7b 71 4d 6c 6f 2c 29" - "a6 ad 5c b4 02 2b 02 70 9b 00 00 00 00 00 00 00" - "0c 00 00 00 00 00 00 00 09 01 00 00 00 00 00 00", - "ee ad 9d 67 89 0c bb 22 39 23 36 fe a1 85 1f 38", - "RFC7539 A.5", - { 'cipher':ChaCha20, 'nonce':unhexlify("000000000102030405060708") } - ), -] - - -class Poly1305Test_AES(unittest.TestCase): - - key = b'\x11' * 32 - - def test_new_positive(self): - - data = b'r' * 100 - - h1 = Poly1305.new(key=self.key, cipher=AES) - self.assertEqual(h1.digest_size, 16) - self.assertEqual(len(h1.nonce), 16) - d1 = h1.update(data).digest() - self.assertEqual(len(d1), 16) - - h2 = Poly1305.new(key=self.key, nonce=h1.nonce, data=data, cipher=AES) - d2 = h2.digest() - self.assertEqual(h1.nonce, h2.nonce) - self.assertEqual(d1, d2) - - def test_new_negative(self): - from Cryptodome.Cipher import DES3 - - self.assertRaises(ValueError, Poly1305.new, key=self.key[:31], cipher=AES) - self.assertRaises(ValueError, Poly1305.new, key=self.key, cipher=DES3) - self.assertRaises(ValueError, Poly1305.new, key=self.key, nonce=b'1' * 15, cipher=AES) - self.assertRaises(TypeError, Poly1305.new, key=u"2" * 32, cipher=AES) - self.assertRaises(TypeError, Poly1305.new, key=self.key, data=u"2" * 100, cipher=AES) - - def test_update(self): - pieces = [b"\x0A" * 200, b"\x14" * 300] - h1 = Poly1305.new(key=self.key, cipher=AES) - h1.update(pieces[0]).update(pieces[1]) - d1 = h1.digest() - - h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) - h2.update(pieces[0] + pieces[1]) - d2 = h2.digest() - self.assertEqual(d1, d2) - - def test_update_negative(self): - h = Poly1305.new(key=self.key, cipher=AES) - self.assertRaises(TypeError, h.update, u"string") - - def test_digest(self): - h = Poly1305.new(key=self.key, cipher=AES) - digest = h.digest() - - # hexdigest does not change the state - self.assertEqual(h.digest(), digest) - # digest returns a byte string - self.assertTrue(isinstance(digest, type(b"digest"))) - - def test_update_after_digest(self): - msg=b"rrrrttt" - - # Normally, update() cannot be done after digest() - h = Poly1305.new(key=self.key, data=msg[:4], cipher=AES) - h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - - def test_hex_digest(self): - mac = Poly1305.new(key=self.key, cipher=AES) - digest = mac.digest() - hexdigest = mac.hexdigest() - - # hexdigest is equivalent to digest - self.assertEqual(hexlify(digest), tobytes(hexdigest)) - # hexdigest does not change the state - self.assertEqual(mac.hexdigest(), hexdigest) - # hexdigest returns a string - self.assertTrue(isinstance(hexdigest, type("digest"))) - - def test_verify(self): - h = Poly1305.new(key=self.key, cipher=AES) - mac = h.digest() - h.verify(mac) - wrong_mac = strxor_c(mac, 255) - self.assertRaises(ValueError, h.verify, wrong_mac) - - def test_hexverify(self): - h = Poly1305.new(key=self.key, cipher=AES) - mac = h.hexdigest() - h.hexverify(mac) - self.assertRaises(ValueError, h.hexverify, "4556") - - def test_bytearray(self): - - data = b"\x00\x01\x02" - h0 = Poly1305.new(key=self.key, data=data, cipher=AES) - d_ref = h0.digest() - - # Data and key can be a bytearray (during initialization) - key_ba = bytearray(self.key) - data_ba = bytearray(data) - - h1 = Poly1305.new(key=self.key, data=data, cipher=AES, nonce=h0.nonce) - h2 = Poly1305.new(key=key_ba, data=data_ba, cipher=AES, nonce=h0.nonce) - key_ba[:1] = b'\xFF' - data_ba[:1] = b'\xEE' - - self.assertEqual(h1.digest(), d_ref) - self.assertEqual(h2.digest(), d_ref) - - # Data can be a bytearray (during operation) - data_ba = bytearray(data) - - h1 = Poly1305.new(key=self.key, cipher=AES) - h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) - h1.update(data) - h2.update(data_ba) - data_ba[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - def test_memoryview(self): - - data = b"\x00\x01\x02" - - def get_mv_ro(data): - return memoryview(data) - - def get_mv_rw(data): - return memoryview(bytearray(data)) - - for get_mv in (get_mv_ro, get_mv_rw): - - # Data and key can be a memoryview (during initialization) - key_mv = get_mv(self.key) - data_mv = get_mv(data) - - h1 = Poly1305.new(key=self.key, data=data, cipher=AES) - h2 = Poly1305.new(key=key_mv, data=data_mv, cipher=AES, - nonce=h1.nonce) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - key_mv[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - # Data can be a memoryview (during operation) - data_mv = get_mv(data) - - h1 = Poly1305.new(key=self.key, cipher=AES) - h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) - h1.update(data) - h2.update(data_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - -class Poly1305Test_ChaCha20(unittest.TestCase): - - key = b'\x11' * 32 - - def test_new_positive(self): - data = b'r' * 100 - - h1 = Poly1305.new(key=self.key, cipher=ChaCha20) - self.assertEqual(h1.digest_size, 16) - self.assertEqual(len(h1.nonce), 12) - - h2 = Poly1305.new(key=self.key, cipher=ChaCha20, nonce = b'8' * 8) - self.assertEqual(len(h2.nonce), 8) - self.assertEqual(h2.nonce, b'8' * 8) - - def test_new_negative(self): - - self.assertRaises(ValueError, Poly1305.new, key=self.key, nonce=b'1' * 7, cipher=ChaCha20) - - -# -# make_mac_tests() expect a new() function with signature new(key, data, -# **kwargs), and we need to adapt Poly1305's, as it only uses keywords -# -class Poly1305_New(object): - - @staticmethod - def new(key, *data, **kwds): - _kwds = dict(kwds) - if len(data) == 1: - _kwds['data'] = data[0] - _kwds['key'] = key - return Poly1305.new(**_kwds) - - -class Poly1305_Basic(object): - - @staticmethod - def new(key, *data, **kwds): - from Cryptodome.Hash.Poly1305 import Poly1305_MAC - - if len(data) == 1: - msg = data[0] - else: - msg = None - - return Poly1305_MAC(key[:16], key[16:], msg) - - -class Poly1305AES_MC(unittest.TestCase): - - def runTest(self): - tag = unhexlify(b"fb447350c4e868c52ac3275cf9d4327e") - - msg = b'' - for msg_len in range(5000 + 1): - key = tag + strxor_c(tag, 0xFF) - nonce = tag[::-1] - if msg_len > 0: - msg = msg + tobytes(tag[0]) - auth = Poly1305.new(key=key, nonce=nonce, cipher=AES, data=msg) - tag = auth.digest() - - # Compare against output of original DJB's poly1305aes-20050218 - self.assertEqual("CDFA436DDD629C7DC20E1128530BAED2", auth.hexdigest().upper()) - - -def get_tests(config={}): - tests = make_mac_tests(Poly1305_Basic, "Poly1305", test_data_basic) - tests += make_mac_tests(Poly1305_New, "Poly1305", test_data_aes) - tests += make_mac_tests(Poly1305_New, "Poly1305", test_data_chacha20) - tests += [ Poly1305AES_MC() ] - tests += list_test_cases(Poly1305Test_AES) - tests += list_test_cases(Poly1305Test_ChaCha20) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_RIPEMD160.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_RIPEMD160.py deleted file mode 100644 index c05a877..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_RIPEMD160.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_RIPEMD160.py: Self-test for the RIPEMD-160 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -#"""Self-test suite for Cryptodome.Hash.RIPEMD160""" - -from Cryptodome.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors downloaded 2008-09-12 from - # http://homes.esat.kuleuven.be/~bosselae/ripemd160.html - ('9c1185a5c5e9fc54612808977ee8f548b2258d31', '', "'' (empty string)"), - ('0bdc9d2d256b3ee9daae347be6f4dc835a467ffe', 'a'), - ('8eb208f7e05d987a9b044a8e98c6b087f15a0bfc', 'abc'), - ('5d0689ef49d2fae572b881b123a85ffa21595f36', 'message digest'), - - ('f71c27109c692c1b56bbdceb5b9d2865b3708dbc', - 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('12a053384a9c0c88e405a06c27dcf49ada62eb2b', - 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq', - 'abcdbcd...pnopq'), - - ('b0e20b6e3116640286ed3a87a5713079b21f5189', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('9b752e45573d4b39f4dbd3323cab82bf63326bfb', - '1234567890' * 8, - "'1234567890' * 8"), - - ('52783243c1697bdbe16d37f97f68f08325dc1528', - 'a' * 10**6, - '"a" * 10**6'), -] - -def get_tests(config={}): - from Cryptodome.Hash import RIPEMD160 - from .common import make_hash_tests - return make_hash_tests(RIPEMD160, "RIPEMD160", test_data, - digest_size=20, - oid="1.3.36.3.2.1") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA1.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA1.py deleted file mode 100644 index a879e68..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA1.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/SHA1.py: Self-test for the SHA-1 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA""" - -from binascii import hexlify - -from Cryptodome.SelfTest.loader import load_test_vectors - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data_various = [ - # FIPS PUB 180-2, A.1 - "One-Block Message" - ('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'), - - # FIPS PUB 180-2, A.2 - "Multi-Block Message" - ('84983e441c3bd26ebaae4aa1f95129e5e54670f1', - 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), - - # FIPS PUB 180-2, A.3 - "Long Message" -# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f', -# 'a' * 10**6, -# '"a" * 10**6'), - - # RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits) - ('dea356a2cddd90c7a7ecedc5ebb563934f460452', - '01234567' * 80, - '"01234567" * 80'), -] - -def get_tests(config={}): - from Cryptodome.Hash import SHA1 - from .common import make_hash_tests - - tests = [] - - test_vectors = load_test_vectors(("Hash", "SHA1"), - "SHA1ShortMsg.rsp", - "KAT SHA-1", - { "len" : lambda x: int(x) } ) or [] - - test_data = test_data_various[:] - for tv in test_vectors: - try: - if tv.startswith('['): - continue - except AttributeError: - pass - if tv.len == 0: - tv.msg = b"" - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - tests = make_hash_tests(SHA1, "SHA1", test_data, - digest_size=20, - oid="1.3.14.3.2.26") - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA224.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA224.py deleted file mode 100644 index da32423..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA224.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA224.py: Self-test for the SHA-224 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA224""" - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - - # RFC 3874: Section 3.1, "Test Vector #1 - ('23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7', 'abc'), - - # RFC 3874: Section 3.2, "Test Vector #2 - ('75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525', 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), - - # RFC 3874: Section 3.3, "Test Vector #3 - ('20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67', 'a' * 10**6, "'a' * 10**6"), - - # Examples from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f', ''), - - ('49b08defa65e644cbf8a2dd9270bdededabc741997d1dadd42026d7b', - 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), - - ('58911e7fccf2971a7d07f93162d8bd13568e71aa8fc86fc1fe9043d1', - 'Frank jagt im komplett verwahrlosten Taxi quer durch Bayern'), - -] - -def get_tests(config={}): - from Cryptodome.Hash import SHA224 - from .common import make_hash_tests - return make_hash_tests(SHA224, "SHA224", test_data, - digest_size=28, - oid='2.16.840.1.101.3.4.2.4') - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA256.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA256.py deleted file mode 100644 index 23d1145..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA256.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA256.py: Self-test for the SHA-256 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA256""" - -import unittest -from Cryptodome.Util.py3compat import * - -class LargeSHA256Test(unittest.TestCase): - def runTest(self): - """SHA256: 512/520 MiB test""" - from Cryptodome.Hash import SHA256 - zeros = bchr(0x00) * (1024*1024) - - h = SHA256.new(zeros) - for i in range(511): - h.update(zeros) - - # This test vector is from PyCrypto's old testdata.py file. - self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB - - for i in range(8): - h.update(zeros) - - # This test vector is from PyCrypto's old testdata.py file. - self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB - -def get_tests(config={}): - # Test vectors from FIPS PUB 180-2 - # This is a list of (expected_result, input[, description]) tuples. - test_data = [ - # FIPS PUB 180-2, B.1 - "One-Block Message" - ('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad', - 'abc'), - - # FIPS PUB 180-2, B.2 - "Multi-Block Message" - ('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1', - 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), - - # FIPS PUB 180-2, B.3 - "Long Message" - ('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0', - 'a' * 10**6, - '"a" * 10**6'), - - # Test for an old PyCryptodome bug. - ('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103', - 'This message is precisely 55 bytes long, to test a bug.', - 'Length = 55 (mod 64)'), - - # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', ''), - - ('d32b568cd1b96d459e7291ebf4b25d007f275c9f13149beeb782fac0716613f8', - 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), - ] - - from Cryptodome.Hash import SHA256 - from .common import make_hash_tests - tests = make_hash_tests(SHA256, "SHA256", test_data, - digest_size=32, - oid="2.16.840.1.101.3.4.2.1") - - if config.get('slow_tests'): - tests += [LargeSHA256Test()] - - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA384.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA384.py deleted file mode 100644 index 5233d13..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA384.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA.py: Self-test for the SHA-384 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA384""" - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - - # RFC 4634: Section Page 8.4, "Test 1" - ('cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7', 'abc'), - - # RFC 4634: Section Page 8.4, "Test 2.2" - ('09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), - - # RFC 4634: Section Page 8.4, "Test 3" - ('9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b07b8b3dc38ecc4ebae97ddd87f3d8985', 'a' * 10**6, "'a' * 10**6"), - - # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b', ''), - - # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('71e8383a4cea32d6fd6877495db2ee353542f46fa44bc23100bca48f3366b84e809f0708e81041f427c6d5219a286677', - 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), - -] - -def get_tests(config={}): - from Cryptodome.Hash import SHA384 - from .common import make_hash_tests - return make_hash_tests(SHA384, "SHA384", test_data, - digest_size=48, - oid='2.16.840.1.101.3.4.2.2') - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_224.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_224.py deleted file mode 100644 index 3141880..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_224.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA3_224.py: Self-test for the SHA-3/224 hash function -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA3_224""" - -import unittest -from binascii import hexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Hash import SHA3_224 as SHA3 -from Cryptodome.Util.py3compat import b - - -class APITest(unittest.TestCase): - - def test_update_after_digest(self): - msg=b("rrrrttt") - - # Normally, update() cannot be done after digest() - h = SHA3.new(data=msg[:4]) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - dig2 = SHA3.new(data=msg).digest() - - # With the proper flag, it is allowed - h = SHA3.new(data=msg[:4], update_after_digest=True) - self.assertEqual(h.digest(), dig1) - # ... and the subsequent digest applies to the entire message - # up to that point - h.update(msg[4:]) - self.assertEqual(h.digest(), dig2) - - -def get_tests(config={}): - from .common import make_hash_tests - - tests = [] - - test_vectors = load_test_vectors(("Hash", "SHA3"), - "ShortMsgKAT_SHA3-224.txt", - "KAT SHA-3 224", - { "len" : lambda x: int(x) } ) or [] - - test_data = [] - for tv in test_vectors: - if tv.len == 0: - tv.msg = b("") - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - tests += make_hash_tests(SHA3, "SHA3_224", test_data, - digest_size=SHA3.digest_size, - oid="2.16.840.1.101.3.4.2.7") - tests += list_test_cases(APITest) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_256.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_256.py deleted file mode 100644 index 9dee551..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_256.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA3_256.py: Self-test for the SHA-3/256 hash function -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA3_256""" - -import unittest -from binascii import hexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Hash import SHA3_256 as SHA3 -from Cryptodome.Util.py3compat import b - - -class APITest(unittest.TestCase): - - def test_update_after_digest(self): - msg=b("rrrrttt") - - # Normally, update() cannot be done after digest() - h = SHA3.new(data=msg[:4]) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - dig2 = SHA3.new(data=msg).digest() - - # With the proper flag, it is allowed - h = SHA3.new(data=msg[:4], update_after_digest=True) - self.assertEqual(h.digest(), dig1) - # ... and the subsequent digest applies to the entire message - # up to that point - h.update(msg[4:]) - self.assertEqual(h.digest(), dig2) - - -def get_tests(config={}): - from .common import make_hash_tests - - tests = [] - - test_vectors = load_test_vectors(("Hash", "SHA3"), - "ShortMsgKAT_SHA3-256.txt", - "KAT SHA-3 256", - { "len" : lambda x: int(x) } ) or [] - - test_data = [] - for tv in test_vectors: - if tv.len == 0: - tv.msg = b("") - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - - tests += make_hash_tests(SHA3, "SHA3_256", test_data, - digest_size=SHA3.digest_size, - oid="2.16.840.1.101.3.4.2.8") - tests += list_test_cases(APITest) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_384.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_384.py deleted file mode 100644 index c5030b5..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_384.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA3_384.py: Self-test for the SHA-3/384 hash function -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA3_384""" - -import unittest -from binascii import hexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Hash import SHA3_384 as SHA3 -from Cryptodome.Util.py3compat import b - - -class APITest(unittest.TestCase): - - def test_update_after_digest(self): - msg=b("rrrrttt") - - # Normally, update() cannot be done after digest() - h = SHA3.new(data=msg[:4]) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - dig2 = SHA3.new(data=msg).digest() - - # With the proper flag, it is allowed - h = SHA3.new(data=msg[:4], update_after_digest=True) - self.assertEqual(h.digest(), dig1) - # ... and the subsequent digest applies to the entire message - # up to that point - h.update(msg[4:]) - self.assertEqual(h.digest(), dig2) - - -def get_tests(config={}): - from .common import make_hash_tests - - tests = [] - - test_vectors = load_test_vectors(("Hash", "SHA3"), - "ShortMsgKAT_SHA3-384.txt", - "KAT SHA-3 384", - { "len" : lambda x: int(x) } ) or [] - - test_data = [] - for tv in test_vectors: - if tv.len == 0: - tv.msg = b("") - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - tests += make_hash_tests(SHA3, "SHA3_384", test_data, - digest_size=SHA3.digest_size, - oid="2.16.840.1.101.3.4.2.9") - tests += list_test_cases(APITest) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_512.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_512.py deleted file mode 100644 index b7a57f8..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA3_512.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA3_512.py: Self-test for the SHA-3/512 hash function -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA3_512""" - -import unittest -from binascii import hexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Hash import SHA3_512 as SHA3 -from Cryptodome.Util.py3compat import b - - -class APITest(unittest.TestCase): - - def test_update_after_digest(self): - msg=b("rrrrttt") - - # Normally, update() cannot be done after digest() - h = SHA3.new(data=msg[:4]) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - dig2 = SHA3.new(data=msg).digest() - - # With the proper flag, it is allowed - h = SHA3.new(data=msg[:4], update_after_digest=True) - self.assertEqual(h.digest(), dig1) - # ... and the subsequent digest applies to the entire message - # up to that point - h.update(msg[4:]) - self.assertEqual(h.digest(), dig2) - - -def get_tests(config={}): - from .common import make_hash_tests - - tests = [] - - test_vectors = load_test_vectors(("Hash", "SHA3"), - "ShortMsgKAT_SHA3-512.txt", - "KAT SHA-3 512", - { "len" : lambda x: int(x) } ) or [] - - test_data = [] - for tv in test_vectors: - if tv.len == 0: - tv.msg = b("") - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - tests += make_hash_tests(SHA3, "SHA3_512", test_data, - digest_size=SHA3.digest_size, - oid="2.16.840.1.101.3.4.2.10") - tests += list_test_cases(APITest) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA512.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA512.py deleted file mode 100644 index e6c74b3..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHA512.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA512.py: Self-test for the SHA-512 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHA512""" - -from binascii import hexlify - -from Cryptodome.Hash import SHA512 -from .common import make_hash_tests -from Cryptodome.SelfTest.loader import load_test_vectors - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data_512_other = [ - - # RFC 4634: Section Page 8.4, "Test 1" - ('ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f', 'abc'), - - # RFC 4634: Section Page 8.4, "Test 2.1" - ('8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), - - # RFC 4634: Section Page 8.4, "Test 3" - ('e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973ebde0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b', 'a' * 10**6, "'a' * 10**6"), - - # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', ''), - - ('af9ed2de700433b803240a552b41b5a472a6ef3fe1431a722b2063c75e9f07451f67a28e37d09cde769424c96aea6f8971389db9e1993d6c565c3c71b855723c', 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), -] - - -def get_tests_SHA512(): - - test_vectors = load_test_vectors(("Hash", "SHA2"), - "SHA512ShortMsg.rsp", - "KAT SHA-512", - {"len": lambda x: int(x)}) or [] - - test_data = test_data_512_other[:] - for tv in test_vectors: - try: - if tv.startswith('['): - continue - except AttributeError: - pass - if tv.len == 0: - tv.msg = b"" - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - tests = make_hash_tests(SHA512, "SHA512", test_data, - digest_size=64, - oid="2.16.840.1.101.3.4.2.3") - return tests - - -def get_tests_SHA512_224(): - - test_vectors = load_test_vectors(("Hash", "SHA2"), - "SHA512_224ShortMsg.rsp", - "KAT SHA-512/224", - {"len": lambda x: int(x)}) or [] - - test_data = [] - for tv in test_vectors: - try: - if tv.startswith('['): - continue - except AttributeError: - pass - if tv.len == 0: - tv.msg = b"" - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - tests = make_hash_tests(SHA512, "SHA512/224", test_data, - digest_size=28, - oid="2.16.840.1.101.3.4.2.5", - extra_params={ "truncate" : "224" }) - return tests - - -def get_tests_SHA512_256(): - - test_vectors = load_test_vectors(("Hash", "SHA2"), - "SHA512_256ShortMsg.rsp", - "KAT SHA-512/256", - {"len": lambda x: int(x)}) or [] - - test_data = [] - for tv in test_vectors: - try: - if tv.startswith('['): - continue - except AttributeError: - pass - if tv.len == 0: - tv.msg = b"" - test_data.append((hexlify(tv.md), tv.msg, tv.desc)) - - tests = make_hash_tests(SHA512, "SHA512/256", test_data, - digest_size=32, - oid="2.16.840.1.101.3.4.2.6", - extra_params={ "truncate" : "256" }) - return tests - - -def get_tests(config={}): - - tests = [] - tests += get_tests_SHA512() - tests += get_tests_SHA512_224() - tests += get_tests_SHA512_256() - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHAKE.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHAKE.py deleted file mode 100644 index 07965f6..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_SHAKE.py +++ /dev/null @@ -1,151 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.SHAKE128 and SHAKE256""" - -import unittest -from binascii import hexlify, unhexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import SHAKE128, SHAKE256 -from Cryptodome.Util.py3compat import b, bchr, bord, tobytes - -class SHAKETest(unittest.TestCase): - - def test_new_positive(self): - - xof1 = self.shake.new() - xof2 = self.shake.new(data=b("90")) - xof3 = self.shake.new().update(b("90")) - - self.assertNotEqual(xof1.read(10), xof2.read(10)) - xof3.read(10) - self.assertEqual(xof2.read(10), xof3.read(10)) - - def test_update(self): - pieces = [bchr(10) * 200, bchr(20) * 300] - h = self.shake.new() - h.update(pieces[0]).update(pieces[1]) - digest = h.read(10) - h = self.shake.new() - h.update(pieces[0] + pieces[1]) - self.assertEqual(h.read(10), digest) - - def test_update_negative(self): - h = self.shake.new() - self.assertRaises(TypeError, h.update, u"string") - - def test_digest(self): - h = self.shake.new() - digest = h.read(90) - - # read returns a byte string of the right length - self.assertTrue(isinstance(digest, type(b("digest")))) - self.assertEqual(len(digest), 90) - - def test_update_after_read(self): - mac = self.shake.new() - mac.update(b("rrrr")) - mac.read(90) - self.assertRaises(TypeError, mac.update, b("ttt")) - - def test_copy(self): - mac = self.shake.new() - mac.update(b("rrrr")) - mac2 = mac.copy() - x1 = mac.read(90) - x2 = mac2.read(90) - self.assertEqual(x1, x2) - - -class SHAKE128Test(SHAKETest): - shake = SHAKE128 - - -class SHAKE256Test(SHAKETest): - shake = SHAKE256 - - -class SHAKEVectors(unittest.TestCase): - pass - - -test_vectors_128 = load_test_vectors(("Hash", "SHA3"), - "ShortMsgKAT_SHAKE128.txt", - "Short Messages KAT SHAKE128", - { "len" : lambda x: int(x) } ) or [] - -for idx, tv in enumerate(test_vectors_128): - if tv.len == 0: - data = b("") - else: - data = tobytes(tv.msg) - - def new_test(self, data=data, result=tv.md): - hobj = SHAKE128.new(data=data) - digest = hobj.read(len(result)) - self.assertEqual(digest, result) - - setattr(SHAKEVectors, "test_128_%d" % idx, new_test) - - -test_vectors_256 = load_test_vectors(("Hash", "SHA3"), - "ShortMsgKAT_SHAKE256.txt", - "Short Messages KAT SHAKE256", - { "len" : lambda x: int(x) } ) or [] - -for idx, tv in enumerate(test_vectors_256): - if tv.len == 0: - data = b("") - else: - data = tobytes(tv.msg) - - def new_test(self, data=data, result=tv.md): - hobj = SHAKE256.new(data=data) - digest = hobj.read(len(result)) - self.assertEqual(digest, result) - - setattr(SHAKEVectors, "test_256_%d" % idx, new_test) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(SHAKE128Test) - tests += list_test_cases(SHAKE256Test) - tests += list_test_cases(SHAKEVectors) - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_TupleHash.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_TupleHash.py deleted file mode 100644 index 2f93d7b..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_TupleHash.py +++ /dev/null @@ -1,302 +0,0 @@ -import unittest -from binascii import unhexlify, hexlify - -from Cryptodome.Util.py3compat import tobytes -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import TupleHash128, TupleHash256 - - -class TupleHashTest(unittest.TestCase): - - def new(self, *args, **kwargs): - return self.TupleHash.new(*args, **kwargs) - - def test_new_positive(self): - - h = self.new() - for new_func in self.TupleHash.new, h.new: - - for dbits in range(64, 1024 + 1, 8): - hobj = new_func(digest_bits=dbits) - self.assertEqual(hobj.digest_size * 8, dbits) - - for dbytes in range(8, 128 + 1): - hobj = new_func(digest_bytes=dbytes) - self.assertEqual(hobj.digest_size, dbytes) - - hobj = h.new() - self.assertEqual(hobj.digest_size, self.default_bytes) - - def test_new_negative(self): - - h = self.new() - for new_func in self.TupleHash.new, h.new: - self.assertRaises(TypeError, new_func, - digest_bytes=self.minimum_bytes, - digest_bits=self.minimum_bits) - self.assertRaises(ValueError, new_func, digest_bytes=0) - self.assertRaises(ValueError, new_func, - digest_bits=self.minimum_bits + 7) - self.assertRaises(ValueError, new_func, - digest_bits=self.minimum_bits - 8) - self.assertRaises(ValueError, new_func, - digest_bits=self.minimum_bytes - 1) - - def test_default_digest_size(self): - digest = self.new().digest() - self.assertEqual(len(digest), self.default_bytes) - - def test_update(self): - h = self.new() - h.update(b'') - h.digest() - - h = self.new() - h.update(b'') - h.update(b'STRING1') - h.update(b'STRING2') - mac1 = h.digest() - - h = self.new() - h.update(b'STRING1') - h.update(b'STRING2') - mac2 = h.digest() - self.assertNotEqual(mac1, mac2) - - h = self.new() - h.update(b'STRING1', b'STRING2') - self.assertEqual(mac2, h.digest()) - - h = self.new() - t = b'STRING1', b'STRING2' - h.update(*t) - self.assertEqual(mac2, h.digest()) - - def test_update_negative(self): - h = self.new() - self.assertRaises(TypeError, h.update, u"string") - self.assertRaises(TypeError, h.update, None) - self.assertRaises(TypeError, h.update, (b'STRING1', b'STRING2')) - - def test_digest(self): - h = self.new() - digest = h.digest() - - # hexdigest does not change the state - self.assertEqual(h.digest(), digest) - # digest returns a byte string - self.assertTrue(isinstance(digest, type(b"digest"))) - - def test_update_after_digest(self): - msg = b"rrrrttt" - - # Normally, update() cannot be done after digest() - h = self.new() - h.update(msg) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, dig1) - - def test_hex_digest(self): - mac = self.new() - digest = mac.digest() - hexdigest = mac.hexdigest() - - # hexdigest is equivalent to digest - self.assertEqual(hexlify(digest), tobytes(hexdigest)) - # hexdigest does not change the state - self.assertEqual(mac.hexdigest(), hexdigest) - # hexdigest returns a string - self.assertTrue(isinstance(hexdigest, type("digest"))) - - def test_bytearray(self): - - data = b"\x00\x01\x02" - - # Data can be a bytearray (during operation) - data_ba = bytearray(data) - - h1 = self.new() - h2 = self.new() - h1.update(data) - h2.update(data_ba) - data_ba[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - def test_memoryview(self): - - data = b"\x00\x01\x02" - - def get_mv_ro(data): - return memoryview(data) - - def get_mv_rw(data): - return memoryview(bytearray(data)) - - for get_mv in (get_mv_ro, get_mv_rw): - - # Data can be a memoryview (during operation) - data_mv = get_mv(data) - - h1 = self.new() - h2 = self.new() - h1.update(data) - h2.update(data_mv) - if not data_mv.readonly: - data_mv[:1] = b'\xFF' - - self.assertEqual(h1.digest(), h2.digest()) - - -class TupleHash128Test(TupleHashTest): - - TupleHash = TupleHash128 - - minimum_bytes = 8 - default_bytes = 64 - - minimum_bits = 64 - default_bits = 512 - - -class TupleHash256Test(TupleHashTest): - - TupleHash = TupleHash256 - - minimum_bytes = 8 - default_bytes = 64 - - minimum_bits = 64 - default_bits = 512 - - -class NISTExampleTestVectors(unittest.TestCase): - - # http://csrc.nist.gov/groups/ST/toolkit/documents/Examples/TupleHash_samples.pdf - test_data = [ - ( - ( - "00 01 02", - "10 11 12 13 14 15", - ), - "", - "C5 D8 78 6C 1A FB 9B 82 11 1A B3 4B 65 B2 C0 04" - "8F A6 4E 6D 48 E2 63 26 4C E1 70 7D 3F FC 8E D1", - "KMAC128 Sample #1 NIST", - TupleHash128 - ), - ( - ( - "00 01 02", - "10 11 12 13 14 15", - ), - "My Tuple App", - "75 CD B2 0F F4 DB 11 54 E8 41 D7 58 E2 41 60 C5" - "4B AE 86 EB 8C 13 E7 F5 F4 0E B3 55 88 E9 6D FB", - "KMAC128 Sample #2 NIST", - TupleHash128 - ), - ( - ( - "00 01 02", - "10 11 12 13 14 15", - "20 21 22 23 24 25 26 27 28", - ), - "My Tuple App", - "E6 0F 20 2C 89 A2 63 1E DA 8D 4C 58 8C A5 FD 07" - "F3 9E 51 51 99 8D EC CF 97 3A DB 38 04 BB 6E 84", - "KMAC128 Sample #3 NIST", - TupleHash128 - ), - ( - ( - "00 01 02", - "10 11 12 13 14 15", - ), - "", - "CF B7 05 8C AC A5 E6 68 F8 1A 12 A2 0A 21 95 CE" - "97 A9 25 F1 DB A3 E7 44 9A 56 F8 22 01 EC 60 73" - "11 AC 26 96 B1 AB 5E A2 35 2D F1 42 3B DE 7B D4" - "BB 78 C9 AE D1 A8 53 C7 86 72 F9 EB 23 BB E1 94", - "KMAC256 Sample #4 NIST", - TupleHash256 - ), - ( - ( - "00 01 02", - "10 11 12 13 14 15", - ), - "My Tuple App", - "14 7C 21 91 D5 ED 7E FD 98 DB D9 6D 7A B5 A1 16" - "92 57 6F 5F E2 A5 06 5F 3E 33 DE 6B BA 9F 3A A1" - "C4 E9 A0 68 A2 89 C6 1C 95 AA B3 0A EE 1E 41 0B" - "0B 60 7D E3 62 0E 24 A4 E3 BF 98 52 A1 D4 36 7E", - "KMAC256 Sample #5 NIST", - TupleHash256 - ), - ( - ( - "00 01 02", - "10 11 12 13 14 15", - "20 21 22 23 24 25 26 27 28", - ), - "My Tuple App", - "45 00 0B E6 3F 9B 6B FD 89 F5 47 17 67 0F 69 A9" - "BC 76 35 91 A4 F0 5C 50 D6 88 91 A7 44 BC C6 E7" - "D6 D5 B5 E8 2C 01 8D A9 99 ED 35 B0 BB 49 C9 67" - "8E 52 6A BD 8E 85 C1 3E D2 54 02 1D B9 E7 90 CE", - "KMAC256 Sample #6 NIST", - TupleHash256 - ), - - - - ] - - def setUp(self): - td = [] - for tv_in in self.test_data: - tv_out = [None] * len(tv_in) - - tv_out[0] = [] - for string in tv_in[0]: - tv_out[0].append(unhexlify(string.replace(" ", ""))) - - tv_out[1] = tobytes(tv_in[1]) # Custom - tv_out[2] = unhexlify(tv_in[2].replace(" ", "")) - tv_out[3] = tv_in[3] - tv_out[4] = tv_in[4] - td.append(tv_out) - self.test_data = td - - def runTest(self): - - for data, custom, digest, text, module in self.test_data: - hd1 = module.new(custom=custom, digest_bytes=len(digest)) - hd2 = module.new(custom=custom, digest_bytes=len(digest)) - - # Call update() for each element - for string in data: - hd1.update(string) - - # One single update for all elements - hd2.update(*data) - - self.assertEqual(hd1.digest(), digest, msg=text) - self.assertEqual(hd2.digest(), digest, msg=text) - -def get_tests(config={}): - tests = [] - - tests += list_test_cases(TupleHash128Test) - tests += list_test_cases(TupleHash256Test) - tests.append(NISTExampleTestVectors()) - - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_TurboSHAKE.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_TurboSHAKE.py deleted file mode 100644 index 7c13d1e..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_TurboSHAKE.py +++ /dev/null @@ -1,468 +0,0 @@ -"""Self-test suite for Cryptodome.Hash.TurboSHAKE128 and TurboSHAKE256""" - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import TurboSHAKE128, TurboSHAKE256 -from Cryptodome.Util.py3compat import bchr - - -class TurboSHAKETest(unittest.TestCase): - - def test_new_positive(self): - - xof1 = self.TurboSHAKE.new() - xof1.update(b'90') - - xof2 = self.TurboSHAKE.new(domain=0x1F) - xof2.update(b'90') - - xof3 = self.TurboSHAKE.new(data=b'90') - - out1 = xof1.read(128) - out2 = xof2.read(128) - out3 = xof3.read(128) - - self.assertEqual(out1, out2) - self.assertEqual(out1, out3) - - def test_new_domain(self): - xof1 = self.TurboSHAKE.new(domain=0x1D) - xof2 = self.TurboSHAKE.new(domain=0x20) - self.assertNotEqual(xof1.read(128), xof2.read(128)) - - def test_update(self): - pieces = [bchr(10) * 200, bchr(20) * 300] - - xof1 = self.TurboSHAKE.new() - xof1.update(pieces[0]).update(pieces[1]) - digest1 = xof1.read(10) - - xof2 = self.TurboSHAKE.new() - xof2.update(pieces[0] + pieces[1]) - digest2 = xof2.read(10) - - self.assertEqual(digest1, digest2) - - def test_update_negative(self): - xof1 = self.TurboSHAKE.new() - self.assertRaises(TypeError, xof1.update, u"string") - - def test_read(self): - xof1 = self.TurboSHAKE.new() - digest = xof1.read(90) - - # read returns a byte string of the right length - self.assertTrue(isinstance(digest, bytes)) - self.assertEqual(len(digest), 90) - - def test_update_after_read(self): - xof1 = self.TurboSHAKE.new() - xof1.update(b"rrrr") - xof1.read(90) - self.assertRaises(TypeError, xof1.update, b"ttt") - - def test_new(self): - xof1 = self.TurboSHAKE.new(domain=0x07) - xof1.update(b'90') - digest1 = xof1.read(100) - - xof2 = xof1.new() - xof2.update(b'90') - digest2 = xof2.read(100) - - self.assertEqual(digest1, digest2) - - self.assertRaises(TypeError, xof1.new, domain=0x07) - - -class TurboSHAKE128Test(TurboSHAKETest): - TurboSHAKE = TurboSHAKE128 - - -class TurboSHAKE256Test(TurboSHAKETest): - TurboSHAKE = TurboSHAKE256 - - -def txt2bin(txt): - clean = txt.replace(" ", "").replace("\n", "").replace("\r", "") - return unhexlify(clean) - - -def ptn(n): - res = bytearray(n) - pattern = b"".join([bchr(x) for x in range(0, 0xFB)]) - for base in range(0, n - 0xFB, 0xFB): - res[base:base + 0xFB] = pattern - remain = n % 0xFB - if remain: - base = (n // 0xFB) * 0xFB - res[base:] = pattern[:remain] - assert len(res) == n - return res - - -def chunked(source, size): - for i in range(0, len(source), size): - yield source[i:i+size] - - -class TurboSHAKE128TV(unittest.TestCase): - - def test_zero_1(self): - tv = """1E 41 5F 1C 59 83 AF F2 16 92 17 27 7D 17 BB 53 - 8C D9 45 A3 97 DD EC 54 1F 1C E4 1A F2 C1 B7 4C""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new().read(32) - self.assertEqual(res, btv) - - def test_zero_2(self): - tv = """1E 41 5F 1C 59 83 AF F2 16 92 17 27 7D 17 BB 53 - 8C D9 45 A3 97 DD EC 54 1F 1C E4 1A F2 C1 B7 4C - 3E 8C CA E2 A4 DA E5 6C 84 A0 4C 23 85 C0 3C 15 - E8 19 3B DF 58 73 73 63 32 16 91 C0 54 62 C8 DF""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new().read(64) - self.assertEqual(res, btv) - - def test_zero_3(self): - tv = """A3 B9 B0 38 59 00 CE 76 1F 22 AE D5 48 E7 54 DA - 10 A5 24 2D 62 E8 C6 58 E3 F3 A9 23 A7 55 56 07""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new().read(10032)[-32:] - self.assertEqual(res, btv) - - def test_ptn_1(self): - tv = """55 CE DD 6F 60 AF 7B B2 9A 40 42 AE 83 2E F3 F5 - 8D B7 29 9F 89 3E BB 92 47 24 7D 85 69 58 DA A9""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=ptn(1)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17(self): - tv = """9C 97 D0 36 A3 BA C8 19 DB 70 ED E0 CA 55 4E C6 - E4 C2 A1 A4 FF BF D9 EC 26 9C A6 A1 11 16 12 33""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=ptn(17)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17_2(self): - tv = """96 C7 7C 27 9E 01 26 F7 FC 07 C9 B0 7F 5C DA E1 - E0 BE 60 BD BE 10 62 00 40 E7 5D 72 23 A6 24 D2""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=ptn(17**2)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17_3(self): - tv = """D4 97 6E B5 6B CF 11 85 20 58 2B 70 9F 73 E1 D6 - 85 3E 00 1F DA F8 0E 1B 13 E0 D0 59 9D 5F B3 72""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=ptn(17**3)).read(32) - self.assertEqual(res, btv) - - def test_ptn_17_4(self): - tv = """DA 67 C7 03 9E 98 BF 53 0C F7 A3 78 30 C6 66 4E - 14 CB AB 7F 54 0F 58 40 3B 1B 82 95 13 18 EE 5C""" - - btv = txt2bin(tv) - data = ptn(17**4) - - # All at once - res = TurboSHAKE128.new(data=data).read(32) - self.assertEqual(res, btv) - - # Byte by byte - xof = TurboSHAKE128.new() - for x in data: - xof.update(bchr(x)) - res = xof.read(32) - self.assertEqual(res, btv) - - # Chunks of various prime sizes - for chunk_size in (13, 17, 19, 23, 31): - xof = TurboSHAKE128.new() - for x in chunked(data, chunk_size): - xof.update(x) - res = xof.read(32) - self.assertEqual(res, btv) - - def test_ptn_17_5(self): - tv = """B9 7A 90 6F BF 83 EF 7C 81 25 17 AB F3 B2 D0 AE - A0 C4 F6 03 18 CE 11 CF 10 39 25 12 7F 59 EE CD""" - - btv = txt2bin(tv) - data = ptn(17**5) - - # All at once - res = TurboSHAKE128.new(data=data).read(32) - self.assertEqual(res, btv) - - # Chunks - xof = TurboSHAKE128.new() - for chunk in chunked(data, 8192): - xof.update(chunk) - res = xof.read(32) - self.assertEqual(res, btv) - - def test_ptn_17_6(self): - tv = """35 CD 49 4A DE DE D2 F2 52 39 AF 09 A7 B8 EF 0C - 4D 1C A4 FE 2D 1A C3 70 FA 63 21 6F E7 B4 C2 B1""" - - btv = txt2bin(tv) - data = ptn(17**6) - - res = TurboSHAKE128.new(data=data).read(32) - self.assertEqual(res, btv) - - def test_ffffff_d01(self): - tv = """BF 32 3F 94 04 94 E8 8E E1 C5 40 FE 66 0B E8 A0 - C9 3F 43 D1 5E C0 06 99 84 62 FA 99 4E ED 5D AB""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=b"\xff\xff\xff", domain=0x01).read(32) - self.assertEqual(res, btv) - - def test_ff_d06(self): - tv = """8E C9 C6 64 65 ED 0D 4A 6C 35 D1 35 06 71 8D 68 - 7A 25 CB 05 C7 4C CA 1E 42 50 1A BD 83 87 4A 67""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=b'\xFF', domain=0x06).read(32) - self.assertEqual(res, btv) - - def test_ffffff_d07(self): - tv = """B6 58 57 60 01 CA D9 B1 E5 F3 99 A9 F7 77 23 BB - A0 54 58 04 2D 68 20 6F 72 52 68 2D BA 36 63 ED""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=b'\xFF' * 3, domain=0x07).read(32) - self.assertEqual(res, btv) - - def test_ffffffffffff_d0b(self): - tv = """8D EE AA 1A EC 47 CC EE 56 9F 65 9C 21 DF A8 E1 - 12 DB 3C EE 37 B1 81 78 B2 AC D8 05 B7 99 CC 37""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=b'\xFF' * 7, domain=0x0B).read(32) - self.assertEqual(res, btv) - - def test_ff_d30(self): - tv = """55 31 22 E2 13 5E 36 3C 32 92 BE D2 C6 42 1F A2 - 32 BA B0 3D AA 07 C7 D6 63 66 03 28 65 06 32 5B""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=b'\xFF', domain=0x30).read(32) - self.assertEqual(res, btv) - - def test_ffffff_d7f(self): - tv = """16 27 4C C6 56 D4 4C EF D4 22 39 5D 0F 90 53 BD - A6 D2 8E 12 2A BA 15 C7 65 E5 AD 0E 6E AF 26 F9""" - - btv = txt2bin(tv) - res = TurboSHAKE128.new(data=b'\xFF' * 3, domain=0x7F).read(32) - self.assertEqual(res, btv) - - -class TurboSHAKE256TV(unittest.TestCase): - - def test_zero_1(self): - tv = """36 7A 32 9D AF EA 87 1C 78 02 EC 67 F9 05 AE 13 - C5 76 95 DC 2C 66 63 C6 10 35 F5 9A 18 F8 E7 DB - 11 ED C0 E1 2E 91 EA 60 EB 6B 32 DF 06 DD 7F 00 - 2F BA FA BB 6E 13 EC 1C C2 0D 99 55 47 60 0D B0""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new().read(64) - self.assertEqual(res, btv) - - def test_zero_2(self): - tv = """AB EF A1 16 30 C6 61 26 92 49 74 26 85 EC 08 2F - 20 72 65 DC CF 2F 43 53 4E 9C 61 BA 0C 9D 1D 75""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new().read(10032)[-32:] - self.assertEqual(res, btv) - - def test_ptn_1(self): - tv = """3E 17 12 F9 28 F8 EA F1 05 46 32 B2 AA 0A 24 6E - D8 B0 C3 78 72 8F 60 BC 97 04 10 15 5C 28 82 0E - 90 CC 90 D8 A3 00 6A A2 37 2C 5C 5E A1 76 B0 68 - 2B F2 2B AE 74 67 AC 94 F7 4D 43 D3 9B 04 82 E2""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=ptn(1)).read(64) - self.assertEqual(res, btv) - - def test_ptn_17(self): - tv = """B3 BA B0 30 0E 6A 19 1F BE 61 37 93 98 35 92 35 - 78 79 4E A5 48 43 F5 01 10 90 FA 2F 37 80 A9 E5 - CB 22 C5 9D 78 B4 0A 0F BF F9 E6 72 C0 FB E0 97 - 0B D2 C8 45 09 1C 60 44 D6 87 05 4D A5 D8 E9 C7""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=ptn(17)).read(64) - self.assertEqual(res, btv) - - def test_ptn_17_2(self): - tv = """66 B8 10 DB 8E 90 78 04 24 C0 84 73 72 FD C9 57 - 10 88 2F DE 31 C6 DF 75 BE B9 D4 CD 93 05 CF CA - E3 5E 7B 83 E8 B7 E6 EB 4B 78 60 58 80 11 63 16 - FE 2C 07 8A 09 B9 4A D7 B8 21 3C 0A 73 8B 65 C0""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=ptn(17**2)).read(64) - self.assertEqual(res, btv) - - def test_ptn_17_3(self): - tv = """C7 4E BC 91 9A 5B 3B 0D D1 22 81 85 BA 02 D2 9E - F4 42 D6 9D 3D 42 76 A9 3E FE 0B F9 A1 6A 7D C0 - CD 4E AB AD AB 8C D7 A5 ED D9 66 95 F5 D3 60 AB - E0 9E 2C 65 11 A3 EC 39 7D A3 B7 6B 9E 16 74 FB""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=ptn(17**3)).read(64) - self.assertEqual(res, btv) - - def test_ptn_17_4(self): - tv = """02 CC 3A 88 97 E6 F4 F6 CC B6 FD 46 63 1B 1F 52 - 07 B6 6C 6D E9 C7 B5 5B 2D 1A 23 13 4A 17 0A FD - AC 23 4E AB A9 A7 7C FF 88 C1 F0 20 B7 37 24 61 - 8C 56 87 B3 62 C4 30 B2 48 CD 38 64 7F 84 8A 1D""" - - btv = txt2bin(tv) - data = ptn(17**4) - - # All at once - res = TurboSHAKE256.new(data=data).read(64) - self.assertEqual(res, btv) - - # Byte by byte - xof = TurboSHAKE256.new() - for x in data: - xof.update(bchr(x)) - res = xof.read(64) - self.assertEqual(res, btv) - - # Chunks of various prime sizes - for chunk_size in (13, 17, 19, 23, 31): - xof = TurboSHAKE256.new() - for x in chunked(data, chunk_size): - xof.update(x) - res = xof.read(64) - self.assertEqual(res, btv) - - def test_ptn_17_5(self): - tv = """AD D5 3B 06 54 3E 58 4B 58 23 F6 26 99 6A EE 50 - FE 45 ED 15 F2 02 43 A7 16 54 85 AC B4 AA 76 B4 - FF DA 75 CE DF 6D 8C DC 95 C3 32 BD 56 F4 B9 86 - B5 8B B1 7D 17 78 BF C1 B1 A9 75 45 CD F4 EC 9F""" - - btv = txt2bin(tv) - data = ptn(17**5) - - # All at once - res = TurboSHAKE256.new(data=data).read(64) - self.assertEqual(res, btv) - - # Chunks - xof = TurboSHAKE256.new() - for chunk in chunked(data, 8192): - xof.update(chunk) - res = xof.read(64) - self.assertEqual(res, btv) - - def test_ptn_17_6(self): - tv = """9E 11 BC 59 C2 4E 73 99 3C 14 84 EC 66 35 8E F7 - 1D B7 4A EF D8 4E 12 3F 78 00 BA 9C 48 53 E0 2C - FE 70 1D 9E 6B B7 65 A3 04 F0 DC 34 A4 EE 3B A8 - 2C 41 0F 0D A7 0E 86 BF BD 90 EA 87 7C 2D 61 04""" - - btv = txt2bin(tv) - data = ptn(17**6) - - res = TurboSHAKE256.new(data=data).read(64) - self.assertEqual(res, btv) - - def test_ffffff_d01(self): - tv = """D2 1C 6F BB F5 87 FA 22 82 F2 9A EA 62 01 75 FB - 02 57 41 3A F7 8A 0B 1B 2A 87 41 9C E0 31 D9 33 - AE 7A 4D 38 33 27 A8 A1 76 41 A3 4F 8A 1D 10 03 - AD 7D A6 B7 2D BA 84 BB 62 FE F2 8F 62 F1 24 24""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=b"\xff\xff\xff", domain=0x01).read(64) - self.assertEqual(res, btv) - - def test_ff_d06(self): - tv = """73 8D 7B 4E 37 D1 8B 7F 22 AD 1B 53 13 E3 57 E3 - DD 7D 07 05 6A 26 A3 03 C4 33 FA 35 33 45 52 80 - F4 F5 A7 D4 F7 00 EF B4 37 FE 6D 28 14 05 E0 7B - E3 2A 0A 97 2E 22 E6 3A DC 1B 09 0D AE FE 00 4B""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=b'\xFF', domain=0x06).read(64) - self.assertEqual(res, btv) - - def test_ffffff_d07(self): - tv = """18 B3 B5 B7 06 1C 2E 67 C1 75 3A 00 E6 AD 7E D7 - BA 1C 90 6C F9 3E FB 70 92 EA F2 7F BE EB B7 55 - AE 6E 29 24 93 C1 10 E4 8D 26 00 28 49 2B 8E 09 - B5 50 06 12 B8 F2 57 89 85 DE D5 35 7D 00 EC 67""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=b'\xFF' * 3, domain=0x07).read(64) - self.assertEqual(res, btv) - - def test_ffffffffffff_d0b(self): - tv = """BB 36 76 49 51 EC 97 E9 D8 5F 7E E9 A6 7A 77 18 - FC 00 5C F4 25 56 BE 79 CE 12 C0 BD E5 0E 57 36 - D6 63 2B 0D 0D FB 20 2D 1B BB 8F FE 3D D7 4C B0 - 08 34 FA 75 6C B0 34 71 BA B1 3A 1E 2C 16 B3 C0""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=b'\xFF' * 7, domain=0x0B).read(64) - self.assertEqual(res, btv) - - def test_ff_d30(self): - tv = """F3 FE 12 87 3D 34 BC BB 2E 60 87 79 D6 B7 0E 7F - 86 BE C7 E9 0B F1 13 CB D4 FD D0 C4 E2 F4 62 5E - 14 8D D7 EE 1A 52 77 6C F7 7F 24 05 14 D9 CC FC - 3B 5D DA B8 EE 25 5E 39 EE 38 90 72 96 2C 11 1A""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=b'\xFF', domain=0x30).read(64) - self.assertEqual(res, btv) - - def test_ffffff_d7f(self): - tv = """AB E5 69 C1 F7 7E C3 40 F0 27 05 E7 D3 7C 9A B7 - E1 55 51 6E 4A 6A 15 00 21 D7 0B 6F AC 0B B4 0C - 06 9F 9A 98 28 A0 D5 75 CD 99 F9 BA E4 35 AB 1A - CF 7E D9 11 0B A9 7C E0 38 8D 07 4B AC 76 87 76""" - - btv = txt2bin(tv) - res = TurboSHAKE256.new(data=b'\xFF' * 3, domain=0x7F).read(64) - self.assertEqual(res, btv) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TurboSHAKE128Test) - tests += list_test_cases(TurboSHAKE256Test) - tests += list_test_cases(TurboSHAKE128TV) - tests += list_test_cases(TurboSHAKE256TV) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_cSHAKE.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_cSHAKE.py deleted file mode 100644 index 6797160..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_cSHAKE.py +++ /dev/null @@ -1,178 +0,0 @@ -# =================================================================== -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.cSHAKE128 and cSHAKE256""" - -import unittest - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import cSHAKE128, cSHAKE256, SHAKE128, SHAKE256 -from Cryptodome.Util.py3compat import b, bchr, tobytes - - -class cSHAKETest(unittest.TestCase): - - def test_left_encode(self): - from Cryptodome.Hash.cSHAKE128 import _left_encode - self.assertEqual(_left_encode(0), b'\x01\x00') - self.assertEqual(_left_encode(1), b'\x01\x01') - self.assertEqual(_left_encode(256), b'\x02\x01\x00') - - def test_bytepad(self): - from Cryptodome.Hash.cSHAKE128 import _bytepad - self.assertEqual(_bytepad(b'', 4), b'\x01\x04\x00\x00') - self.assertEqual(_bytepad(b'A', 4), b'\x01\x04A\x00') - self.assertEqual(_bytepad(b'AA', 4), b'\x01\x04AA') - self.assertEqual(_bytepad(b'AAA', 4), b'\x01\x04AAA\x00\x00\x00') - self.assertEqual(_bytepad(b'AAAA', 4), b'\x01\x04AAAA\x00\x00') - self.assertEqual(_bytepad(b'AAAAA', 4), b'\x01\x04AAAAA\x00') - self.assertEqual(_bytepad(b'AAAAAA', 4), b'\x01\x04AAAAAA') - self.assertEqual(_bytepad(b'AAAAAAA', 4), b'\x01\x04AAAAAAA\x00\x00\x00') - - def test_new_positive(self): - - xof1 = self.cshake.new() - xof2 = self.cshake.new(data=b("90")) - xof3 = self.cshake.new().update(b("90")) - - self.assertNotEqual(xof1.read(10), xof2.read(10)) - xof3.read(10) - self.assertEqual(xof2.read(10), xof3.read(10)) - - xof1 = self.cshake.new() - ref = xof1.read(10) - xof2 = self.cshake.new(custom=b("")) - xof3 = self.cshake.new(custom=b("foo")) - - self.assertEqual(ref, xof2.read(10)) - self.assertNotEqual(ref, xof3.read(10)) - - xof1 = self.cshake.new(custom=b("foo")) - xof2 = self.cshake.new(custom=b("foo"), data=b("90")) - xof3 = self.cshake.new(custom=b("foo")).update(b("90")) - - self.assertNotEqual(xof1.read(10), xof2.read(10)) - xof3.read(10) - self.assertEqual(xof2.read(10), xof3.read(10)) - - def test_update(self): - pieces = [bchr(10) * 200, bchr(20) * 300] - h = self.cshake.new() - h.update(pieces[0]).update(pieces[1]) - digest = h.read(10) - h = self.cshake.new() - h.update(pieces[0] + pieces[1]) - self.assertEqual(h.read(10), digest) - - def test_update_negative(self): - h = self.cshake.new() - self.assertRaises(TypeError, h.update, u"string") - - def test_digest(self): - h = self.cshake.new() - digest = h.read(90) - - # read returns a byte string of the right length - self.assertTrue(isinstance(digest, type(b("digest")))) - self.assertEqual(len(digest), 90) - - def test_update_after_read(self): - mac = self.cshake.new() - mac.update(b("rrrr")) - mac.read(90) - self.assertRaises(TypeError, mac.update, b("ttt")) - - def test_shake(self): - # When no customization string is passed, results must match SHAKE - for digest_len in range(64): - xof1 = self.cshake.new(b'TEST') - xof2 = self.shake.new(b'TEST') - self.assertEqual(xof1.read(digest_len), xof2.read(digest_len)) - - -class cSHAKE128Test(cSHAKETest): - cshake = cSHAKE128 - shake = SHAKE128 - - -class cSHAKE256Test(cSHAKETest): - cshake = cSHAKE256 - shake = SHAKE256 - - -class cSHAKEVectors(unittest.TestCase): - pass - - -vector_files = [("ShortMsgSamples_cSHAKE128.txt", "Short Message Samples cSHAKE128", "128_cshake", cSHAKE128), - ("ShortMsgSamples_cSHAKE256.txt", "Short Message Samples cSHAKE256", "256_cshake", cSHAKE256), - ("CustomMsgSamples_cSHAKE128.txt", "Custom Message Samples cSHAKE128", "custom_128_cshake", cSHAKE128), - ("CustomMsgSamples_cSHAKE256.txt", "Custom Message Samples cSHAKE256", "custom_256_cshake", cSHAKE256), - ] - -for file, descr, tag, test_class in vector_files: - - test_vectors = load_test_vectors(("Hash", "SHA3"), file, descr, - {"len": lambda x: int(x), - "nlen": lambda x: int(x), - "slen": lambda x: int(x)}) or [] - - for idx, tv in enumerate(test_vectors): - if getattr(tv, "len", 0) == 0: - data = b("") - else: - data = tobytes(tv.msg) - assert(tv.len == len(tv.msg)*8) - if getattr(tv, "nlen", 0) != 0: - raise ValueError("Unsupported cSHAKE test vector") - if getattr(tv, "slen", 0) == 0: - custom = b("") - else: - custom = tobytes(tv.s) - assert(tv.slen == len(tv.s)*8) - - def new_test(self, data=data, result=tv.md, custom=custom, test_class=test_class): - hobj = test_class.new(data=data, custom=custom) - digest = hobj.read(len(result)) - self.assertEqual(digest, result) - - setattr(cSHAKEVectors, "test_%s_%d" % (tag, idx), new_test) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(cSHAKE128Test) - tests += list_test_cases(cSHAKE256Test) - tests += list_test_cases(cSHAKEVectors) - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_keccak.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_keccak.py deleted file mode 100644 index dcc0d13..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Hash/test_keccak.py +++ /dev/null @@ -1,250 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test suite for Cryptodome.Hash.keccak""" - -import unittest -from binascii import hexlify, unhexlify - -from Cryptodome.SelfTest.loader import load_test_vectors -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Hash import keccak -from Cryptodome.Util.py3compat import b, tobytes, bchr - -class KeccakTest(unittest.TestCase): - - def test_new_positive(self): - - for digest_bits in (224, 256, 384, 512): - hobj = keccak.new(digest_bits=digest_bits) - self.assertEqual(hobj.digest_size, digest_bits // 8) - - hobj2 = hobj.new() - self.assertEqual(hobj2.digest_size, digest_bits // 8) - - for digest_bytes in (28, 32, 48, 64): - hobj = keccak.new(digest_bytes=digest_bytes) - self.assertEqual(hobj.digest_size, digest_bytes) - - hobj2 = hobj.new() - self.assertEqual(hobj2.digest_size, digest_bytes) - - def test_new_positive2(self): - - digest1 = keccak.new(data=b("\x90"), digest_bytes=64).digest() - digest2 = keccak.new(digest_bytes=64).update(b("\x90")).digest() - self.assertEqual(digest1, digest2) - - def test_new_negative(self): - - # keccak.new needs digest size - self.assertRaises(TypeError, keccak.new) - - h = keccak.new(digest_bits=512) - - # Either bits or bytes can be specified - self.assertRaises(TypeError, keccak.new, - digest_bytes=64, - digest_bits=512) - - # Range - self.assertRaises(ValueError, keccak.new, digest_bytes=0) - self.assertRaises(ValueError, keccak.new, digest_bytes=1) - self.assertRaises(ValueError, keccak.new, digest_bytes=65) - self.assertRaises(ValueError, keccak.new, digest_bits=0) - self.assertRaises(ValueError, keccak.new, digest_bits=1) - self.assertRaises(ValueError, keccak.new, digest_bits=513) - - def test_update(self): - pieces = [bchr(10) * 200, bchr(20) * 300] - h = keccak.new(digest_bytes=64) - h.update(pieces[0]).update(pieces[1]) - digest = h.digest() - h = keccak.new(digest_bytes=64) - h.update(pieces[0] + pieces[1]) - self.assertEqual(h.digest(), digest) - - def test_update_negative(self): - h = keccak.new(digest_bytes=64) - self.assertRaises(TypeError, h.update, u"string") - - def test_digest(self): - h = keccak.new(digest_bytes=64) - digest = h.digest() - - # hexdigest does not change the state - self.assertEqual(h.digest(), digest) - # digest returns a byte string - self.assertTrue(isinstance(digest, type(b("digest")))) - - def test_hex_digest(self): - mac = keccak.new(digest_bits=512) - digest = mac.digest() - hexdigest = mac.hexdigest() - - # hexdigest is equivalent to digest - self.assertEqual(hexlify(digest), tobytes(hexdigest)) - # hexdigest does not change the state - self.assertEqual(mac.hexdigest(), hexdigest) - # hexdigest returns a string - self.assertTrue(isinstance(hexdigest, type("digest"))) - - def test_update_after_digest(self): - msg=b("rrrrttt") - - # Normally, update() cannot be done after digest() - h = keccak.new(digest_bits=512, data=msg[:4]) - dig1 = h.digest() - self.assertRaises(TypeError, h.update, msg[4:]) - dig2 = keccak.new(digest_bits=512, data=msg).digest() - - # With the proper flag, it is allowed - h = keccak.new(digest_bits=512, data=msg[:4], update_after_digest=True) - self.assertEqual(h.digest(), dig1) - # ... and the subsequent digest applies to the entire message - # up to that point - h.update(msg[4:]) - self.assertEqual(h.digest(), dig2) - - -class KeccakVectors(unittest.TestCase): - pass - - # TODO: add ExtremelyLong tests - - -test_vectors_224 = load_test_vectors(("Hash", "keccak"), - "ShortMsgKAT_224.txt", - "Short Messages KAT 224", - {"len": lambda x: int(x)}) or [] - -test_vectors_224 += load_test_vectors(("Hash", "keccak"), - "LongMsgKAT_224.txt", - "Long Messages KAT 224", - {"len": lambda x: int(x)}) or [] - -for idx, tv in enumerate(test_vectors_224): - if tv.len == 0: - data = b("") - else: - data = tobytes(tv.msg) - - def new_test(self, data=data, result=tv.md): - hobj = keccak.new(digest_bits=224, data=data) - self.assertEqual(hobj.digest(), result) - - setattr(KeccakVectors, "test_224_%d" % idx, new_test) - -# --- - -test_vectors_256 = load_test_vectors(("Hash", "keccak"), - "ShortMsgKAT_256.txt", - "Short Messages KAT 256", - { "len" : lambda x: int(x) } ) or [] - -test_vectors_256 += load_test_vectors(("Hash", "keccak"), - "LongMsgKAT_256.txt", - "Long Messages KAT 256", - { "len" : lambda x: int(x) } ) or [] - -for idx, tv in enumerate(test_vectors_256): - if tv.len == 0: - data = b("") - else: - data = tobytes(tv.msg) - - def new_test(self, data=data, result=tv.md): - hobj = keccak.new(digest_bits=256, data=data) - self.assertEqual(hobj.digest(), result) - - setattr(KeccakVectors, "test_256_%d" % idx, new_test) - - -# --- - -test_vectors_384 = load_test_vectors(("Hash", "keccak"), - "ShortMsgKAT_384.txt", - "Short Messages KAT 384", - {"len": lambda x: int(x)}) or [] - -test_vectors_384 += load_test_vectors(("Hash", "keccak"), - "LongMsgKAT_384.txt", - "Long Messages KAT 384", - {"len": lambda x: int(x)}) or [] - -for idx, tv in enumerate(test_vectors_384): - if tv.len == 0: - data = b("") - else: - data = tobytes(tv.msg) - - def new_test(self, data=data, result=tv.md): - hobj = keccak.new(digest_bits=384, data=data) - self.assertEqual(hobj.digest(), result) - - setattr(KeccakVectors, "test_384_%d" % idx, new_test) - -# --- - -test_vectors_512 = load_test_vectors(("Hash", "keccak"), - "ShortMsgKAT_512.txt", - "Short Messages KAT 512", - {"len": lambda x: int(x)}) or [] - -test_vectors_512 += load_test_vectors(("Hash", "keccak"), - "LongMsgKAT_512.txt", - "Long Messages KAT 512", - {"len": lambda x: int(x)}) or [] - -for idx, tv in enumerate(test_vectors_512): - if tv.len == 0: - data = b("") - else: - data = tobytes(tv.msg) - - def new_test(self, data=data, result=tv.md): - hobj = keccak.new(digest_bits=512, data=data) - self.assertEqual(hobj.digest(), result) - - setattr(KeccakVectors, "test_512_%d" % idx, new_test) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(KeccakTest) - tests += list_test_cases(KeccakVectors) - return tests - - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__init__.py deleted file mode 100644 index f15f141..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# SelfTest/IO/__init__.py: Self-test for input/output module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test for I/O""" - -def get_tests(config={}): - tests = [] - from Cryptodome.SelfTest.IO import test_PKCS8; tests += test_PKCS8.get_tests(config=config) - from Cryptodome.SelfTest.IO import test_PBES; tests += test_PBES.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - - diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2958b27..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/test_PBES.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/test_PBES.cpython-312.pyc deleted file mode 100644 index e2b4004..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/test_PBES.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/test_PKCS8.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/test_PKCS8.cpython-312.pyc deleted file mode 100644 index 92420d4..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/__pycache__/test_PKCS8.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/test_PBES.py b/venv/Lib/site-packages/Cryptodome/SelfTest/IO/test_PBES.py deleted file mode 100644 index 19762f3..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/test_PBES.py +++ /dev/null @@ -1,118 +0,0 @@ -# -# SelfTest/IO/test_PBES.py: Self-test for the _PBES module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-tests for Cryptodome.IO._PBES module""" - -import unittest - -from Cryptodome.IO._PBES import PBES2 - - -class TestPBES2(unittest.TestCase): - - def setUp(self): - self.ref = b"Test data" - self.passphrase = b"Passphrase" - - def test1(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test2(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'PBKDF2WithHMAC-SHA224AndAES128-CBC') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test3(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'PBKDF2WithHMAC-SHA256AndAES192-CBC') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test4(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'PBKDF2WithHMAC-SHA384AndAES256-CBC') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test5(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'PBKDF2WithHMAC-SHA512AndAES128-GCM') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test6(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'PBKDF2WithHMAC-SHA512-224AndAES192-GCM') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test7(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'PBKDF2WithHMAC-SHA3-256AndAES256-GCM') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test8(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'scryptAndAES128-CBC') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test9(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'scryptAndAES192-CBC') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - def test10(self): - ct = PBES2.encrypt(self.ref, self.passphrase, - 'scryptAndAES256-CBC') - pt = PBES2.decrypt(ct, self.passphrase) - self.assertEqual(self.ref, pt) - - -def get_tests(config={}): - from Cryptodome.SelfTest.st_common import list_test_cases - listTests = [] - listTests += list_test_cases(TestPBES2) - return listTests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/test_PKCS8.py b/venv/Lib/site-packages/Cryptodome/SelfTest/IO/test_PKCS8.py deleted file mode 100644 index 718b69d..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/IO/test_PKCS8.py +++ /dev/null @@ -1,459 +0,0 @@ -# -# SelfTest/IO/test_PKCS8.py: Self-test for the PKCS8 module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-tests for Cryptodome.IO.PKCS8 module""" - -import unittest -from binascii import unhexlify - -from Cryptodome.Util.py3compat import * -from Cryptodome.IO import PKCS8 - -from Cryptodome.Util.asn1 import DerNull - -oid_key = '1.2.840.113549.1.1.1' - -# Original RSA key (in DER format) -# hexdump -v -e '32/1 "%02x" "\n"' key.der -clear_key=""" -308201ab020100025a00b94a7f7075ab9e79e8196f47be707781e80dd965cf16 -0c951a870b71783b6aaabbd550c0e65e5a3dfe15b8620009f6d7e5efec42a3f0 -6fe20faeebb0c356e79cdec6db4dd427e82d8ae4a5b90996227b8ba54ccfc4d2 -5c08050203010001025a00afa09c70d528299b7552fe766b5d20f9a221d66938 -c3b68371d48515359863ff96f0978d700e08cd6fd3d8a3f97066fc2e0d5f78eb -3a50b8e17ba297b24d1b8e9cdfd18d608668198d724ad15863ef0329195dee89 -3f039395022d0ebe0518df702a8b25954301ec60a97efdcec8eaa4f2e76ca7e8 -8dfbc3f7e0bb83f9a0e8dc47c0f8c746e9df6b022d0c9195de13f09b7be1fdd7 -1f56ae7d973e08bd9fd2c3dfd8936bb05be9cc67bd32d663c7f00d70932a0be3 -c24f022d0ac334eb6cabf1933633db007b763227b0d9971a9ea36aca8b669ec9 -4fcf16352f6b3dcae28e4bd6137db4ddd3022d0400a09f15ee7b351a2481cb03 -09920905c236d09c87afd3022f3afc2a19e3b746672b635238956ee7e6dd62d5 -022d0cd88ed14fcfbda5bbf0257f700147137bbab9c797af7df866704b889aa3 -7e2e93df3ff1a0fd3490111dcdbc4c -""" - -# Same key as above, wrapped in PKCS#8 but w/o password -# -# openssl pkcs8 -topk8 -inform DER -nocrypt -in key.der -outform DER -out keyp8.der -# hexdump -v -e '32/1 "%02x" "\n"' keyp8.der -wrapped_clear_key=""" -308201c5020100300d06092a864886f70d0101010500048201af308201ab0201 -00025a00b94a7f7075ab9e79e8196f47be707781e80dd965cf160c951a870b71 -783b6aaabbd550c0e65e5a3dfe15b8620009f6d7e5efec42a3f06fe20faeebb0 -c356e79cdec6db4dd427e82d8ae4a5b90996227b8ba54ccfc4d25c0805020301 -0001025a00afa09c70d528299b7552fe766b5d20f9a221d66938c3b68371d485 -15359863ff96f0978d700e08cd6fd3d8a3f97066fc2e0d5f78eb3a50b8e17ba2 -97b24d1b8e9cdfd18d608668198d724ad15863ef0329195dee893f039395022d -0ebe0518df702a8b25954301ec60a97efdcec8eaa4f2e76ca7e88dfbc3f7e0bb -83f9a0e8dc47c0f8c746e9df6b022d0c9195de13f09b7be1fdd71f56ae7d973e -08bd9fd2c3dfd8936bb05be9cc67bd32d663c7f00d70932a0be3c24f022d0ac3 -34eb6cabf1933633db007b763227b0d9971a9ea36aca8b669ec94fcf16352f6b -3dcae28e4bd6137db4ddd3022d0400a09f15ee7b351a2481cb0309920905c236 -d09c87afd3022f3afc2a19e3b746672b635238956ee7e6dd62d5022d0cd88ed1 -4fcfbda5bbf0257f700147137bbab9c797af7df866704b889aa37e2e93df3ff1 -a0fd3490111dcdbc4c -""" - -### -# -# The key above will now be encrypted with different algorithms. -# The password is always 'TestTest'. -# -# Each item in the wrapped_enc_keys list contains: -# * wrap algorithm -# * iteration count -# * Salt -# * IV -# * Expected result -### -wrapped_enc_keys = [] - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -outform DER -out keyenc.der -v2 des3 -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC', -2048, -"47EA7227D8B22E2F", # IV -"E3F7A838AB911A4D", # Salt -""" -30820216304006092a864886f70d01050d3033301b06092a864886f70d01050c -300e0408e3f7a838ab911a4d02020800301406082a864886f70d0307040847ea -7227d8b22e2f048201d0ea388b374d2d0e4ceb7a5139f850fdff274884a6e6c0 -64326e09d00dbba9018834edb5a51a6ae3d1806e6e91eebf33788ce71fee0637 -a2ebf58859dd32afc644110c390274a6128b50c39b8d907823810ec471bada86 -6f5b75d8ea04ad310fad2e73621696db8e426cd511ee93ec1714a1a7db45e036 -4bf20d178d1f16bbb250b32c2d200093169d588de65f7d99aad9ddd0104b44f1 -326962e1520dfac3c2a800e8a14f678dff2b3d0bb23f69da635bf2a643ac934e -219a447d2f4460b67149e860e54f365da130763deefa649c72b0dcd48966a2d3 -4a477444782e3e66df5a582b07bbb19778a79bd355074ce331f4a82eb966b0c4 -52a09eab6116f2722064d314ae433b3d6e81d2436e93fdf446112663cde93b87 -9c8be44beb45f18e2c78fee9b016033f01ecda51b9b142091fa69f65ab784d2c -5ad8d34be6f7f1464adfc1e0ef3f7848f40d3bdea4412758f2fcb655c93d8f4d -f6fa48fc5aa4b75dd1c017ab79ac9d737233a6d668f5364ccf47786debd37334 -9c10c9e6efbe78430a61f71c89948aa32cdc3cc7338cf994147819ce7ab23450 -c8f7d9b94c3bb377d17a3fa204b601526317824b142ff6bc843fa7815ece89c0 -839573f234dac8d80cc571a045353d61db904a4398d8ef3df5ac -""" -)) - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -outform DER -out keyenc.der -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'skip encryption', # pbeWithMD5AndDES-CBC, only decoding is supported --1, -"", -"", -""" -308201f1301b06092a864886f70d010503300e0408f9b990c89af1d41b020208 -00048201d0c6267fe8592903891933d559e71a7ca68b2e39150f19daca0f7921 -52f97e249d72f670d5140e9150433310ed7c7ee51927693fd39884cb9551cea5 -a7b746f7edf199f8787d4787a35dad930d7db057b2118851211b645ac8b90fa6 -b0e7d49ac8567cbd5fff226e87aa9129a0f52c45e9307752e8575c3b0ff756b7 -31fda6942d15ecb6b27ea19370ccc79773f47891e80d22b440d81259c4c28eac -e0ca839524116bcf52d8c566e49a95ddb0e5493437279a770a39fd333f3fca91 -55884fad0ba5aaf273121f893059d37dd417da7dcfd0d6fa7494968f13b2cc95 -65633f2c891340193e5ec00e4ee0b0e90b3b93da362a4906360845771ade1754 -9df79140be5993f3424c012598eadd3e7c7c0b4db2c72cf103d7943a5cf61420 -93370b9702386c3dd4eb0a47f34b579624a46a108b2d13921fa1b367495fe345 -6aa128aa70f8ca80ae13eb301e96c380724ce67c54380bbea2316c1faf4d058e -b4ca2e23442047606b9bc4b3bf65b432cb271bea4eb35dd3eb360d3be8612a87 -a50e96a2264490aeabdc07c6e78e5dbf4fe3388726d0e2a228346bf3c2907d68 -2a6276b22ae883fb30fa611f4e4193e7a08480fcd7db48308bacbd72bf4807aa -11fd394859f97d22982f7fe890b2e2a0f7e7ffb693 -""" -)) - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -# -outform DER -out keyenc.der -v1 PBE-SHA1-RC2-64 -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'skip encryption', # pbeWithSHA1AndRC2-CBC, only decoding is supported --1, -"", -"", -""" -308201f1301b06092a864886f70d01050b300e04083ee943bdae185008020208 -00048201d0e4614d9371d3ff10ceabc2f6a7a13a0f449f9a714144e46518ea55 -e3e6f0cde24031d01ef1f37ec40081449ef01914faf45983dde0d2bc496712de -8dd15a5527dff4721d9016c13f34fb93e3ce68577e30146266d71b539f854e56 -753a192cf126ed4812734d86f81884374f1100772f78d0646e9946407637c565 -d070acab413c55952f7237437f2e48cae7fa0ff8d370de2bf446dd08049a3663 -d9c813ac197468c02e2b687e7ca994cf7f03f01b6eca87dbfed94502c2094157 -ea39f73fe4e591df1a68b04d19d9adab90bb9898467c1464ad20bf2b8fb9a5ff -d3ec91847d1c67fd768a4b9cfb46572eccc83806601372b6fad0243f58f623b7 -1c5809dea0feb8278fe27e5560eed8448dc93f5612f546e5dd7c5f6404365eb2 -5bf3396814367ae8b15c5c432b57eaed1f882c05c7f6517ee9e42b87b7b8d071 -9d6125d1b52f7b2cca1f6bd5f584334bf90bce1a7d938274cafe27b68e629698 -b16e27ae528db28593af9adcfccbebb3b9e1f2af5cd5531b51968389caa6c091 -e7de1f1b96f0d258e54e540d961a7c0ef51fda45d6da5fddd33e9bbfd3a5f8d7 -d7ab2e971de495cddbc86d38444fee9f0ac097b00adaf7802dabe0cff5b43b45 -4f26b7b547016f89be52676866189911c53e2f2477""" -)) - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -# -outform DER -out keyenc.der -v1 PBE-MD5-RC2-64 -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'skip encryption', # pbeWithMD5AndRC2-CBC, only decoding is supported --1, -"", -"", -""" -308201f1301b06092a864886f70d010506300e0408f5cd2fee56d9b4b8020208 -00048201d086454942d6166a19d6b108465bd111e7080911f573d54b1369c676 -df28600e84936bfec04f91023ff16499e2e07178c340904f12ffa6886ab66228 -32bf43c2bff5a0ed14e765918cf5fc543ad49566246f7eb3fc044fa5a9c25f40 -8fc8c8296b91658d3bb1067c0aba008c4fefd9e2bcdbbbd63fdc8085482bccf4 -f150cec9a084259ad441a017e5d81a1034ef2484696a7a50863836d0eeda45cd -8cee8ecabfed703f8d9d4bbdf3a767d32a0ccdc38550ee2928d7fe3fa27eda5b -5c7899e75ad55d076d2c2d3c37d6da3d95236081f9671dab9a99afdb1cbc890e -332d1a91105d9a8ce08b6027aa07367bd1daec3059cb51f5d896124da16971e4 -0ca4bcadb06c854bdf39f42dd24174011414e51626d198775eff3449a982df7b -ace874e77e045eb6d7c3faef0750792b29a068a6291f7275df1123fac5789c51 -27ace42836d81633faf9daf38f6787fff0394ea484bbcd465b57d4dbee3cf8df -b77d1db287b3a6264c466805be5a4fe85cfbca180699859280f2dd8e2c2c10b5 -7a7d2ac670c6039d41952fbb0e4f99b560ebe1d020e1b96d02403283819c00cc -529c51f0b0101555e4c58002ba3c6e3c12e3fde1aec94382792e96d9666a2b33 -3dc397b22ecab67ee38a552fec29a1d4ff8719c748""" -)) - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -# -outform DER -out keyenc.der -v1 PBE-SHA1-DES -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'skip encryption', # pbeWithSHA1AndDES-CBC, only decoding is supported --1, -"", -"", -""" -308201f1301b06092a864886f70d01050a300e04089bacc9cf1e8f734e020208 -00048201d03e502f3ceafe8fd19ab2939576bfdded26d719b2441db1459688f5 -9673218b41ec1f739edf1e460bd927bc28470c87b2d4fc8ea02ba17b47a63c49 -c5c1bee40529dadfd3ef8b4472c730bc136678c78abfb34670ec9d7dcd17ee3f -892f93f2629e6e0f4b24ecb9f954069bf722f466dece3913bb6abbd2c471d9a5 -c5eea89b14aaccda43d30b0dd0f6eb6e9850d9747aa8aa8414c383ad01c374ee -26d3552abec9ba22669cc9622ccf2921e3d0c8ecd1a70e861956de0bec6104b5 -b649ac994970c83f8a9e84b14a7dff7843d4ca3dd4af87cea43b5657e15ae0b5 -a940ce5047f006ab3596506600724764f23757205fe374fee04911336d655acc -03e159ec27789191d1517c4f3f9122f5242d44d25eab8f0658cafb928566ca0e -8f6589aa0c0ab13ca7a618008ae3eafd4671ee8fe0b562e70b3623b0e2a16eee -97fd388087d2e03530c9fe7db6e52eccc7c48fd701ede35e08922861a9508d12 -bc8bbf24f0c6bee6e63dbcb489b603d4c4a78ce45bf2eab1d5d10456c42a65a8 -3a606f4e4b9b46eb13b57f2624b651859d3d2d5192b45dbd5a2ead14ff20ca76 -48f321309aa56d8c0c4a192b580821cc6c70c75e6f19d1c5414da898ec4dd39d -b0eb93d6ba387a80702dfd2db610757ba340f63230 -""" -)) - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -# -outform DER -out keyenc.der -v2 aes128 -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'PBKDF2WithHMAC-SHA1AndAES128-CBC', -2048, -"4F66EE5D3BCD531FE6EBF4B4E73016B8", # IV -"479F25156176C53A", # Salt -""" -3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c -300e0408479f25156176c53a02020800301d060960864801650304010204104f -66ee5d3bcd531fe6ebf4b4e73016b8048201d0e33cfa560423f589d097d21533 -3b880a5ebac5b2ac58b4e73b0d787aee7764f034fe34ca1d1bd845c0a7c3316f -afbfb2129e03dcaf5a5031394206492828dacef1e04639bee5935e0f46114202 -10bc6c37182f4889be11c5d0486c398f4be952e5740f65de9d8edeb275e2b406 -e19bc29ad5ebb97fa536344fc3d84c7e755696f12b810898de4e6f069b8a81c8 -0aab0d45d7d062303aaa4a10c2ce84fdb5a03114039cfe138e38bb15b2ced717 -93549cdad85e730b14d9e2198b663dfdc8d04a4349eb3de59b076ad40b116d4a -25ed917c576bc7c883c95ef0f1180e28fc9981bea069594c309f1aa1b253ceab -a2f0313bb1372bcb51a745056be93d77a1f235a762a45e8856512d436b2ca0f7 -dd60fbed394ba28978d2a2b984b028529d0a58d93aba46c6bbd4ac1e4013cbaa -63b00988bc5f11ccc40141c346762d2b28f64435d4be98ec17c1884985e3807e -e550db606600993efccf6de0dfc2d2d70b5336a3b018fa415d6bdd59f5777118 -16806b7bc17c4c7e20ad7176ebfa5a1aa3f6bc10f04b77afd443944642ac9cca -d740e082b4a3bbb8bafdd34a0b3c5f2f3c2aceccccdccd092b78994b845bfa61 -706c3b9df5165ed1dbcbf1244fe41fc9bf993f52f7658e2f87e1baaeacb0f562 -9d905c -""" -)) - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -# -outform DER -out keyenc.der -v2 aes192 -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'PBKDF2WithHMAC-SHA1AndAES192-CBC', -2048, -"5CFC2A4FF7B63201A4A8A5B021148186", # IV -"D718541C264944CE", # Salt -""" -3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c -300e0408d718541c264944ce02020800301d060960864801650304011604105c -fc2a4ff7b63201a4a8a5b021148186048201d08e74aaa21b8bcfb15b9790fe95 -b0e09ddb0f189b6fb1682fdb9f122b804650ddec3c67a1df093a828b3e5fbcc6 -286abbcc5354c482fd796d972e919ca8a5eba1eaa2293af1d648013ddad72106 -75622264dfba55dafdda39e338f058f1bdb9846041ffff803797d3fdf3693135 -8a192729ea8346a7e5e58e925a2e2e4af0818581859e8215d87370eb4194a5ff -bae900857d4c591dbc651a241865a817eaede9987c9f9ae4f95c0bf930eea88c -4d7596e535ffb7ca369988aba75027a96b9d0bc9c8b0b75f359067fd145a378b -02aaa15e9db7a23176224da48a83249005460cc6e429168657f2efa8b1af7537 -d7d7042f2d683e8271b21d591090963eeb57aea6172f88da139e1614d6a7d1a2 -1002d5a7a93d6d21156e2b4777f6fc069287a85a1538c46b7722ccde591ab55c -630e1ceeb1ac42d1b41f3f654e9da86b5efced43775ea68b2594e50e4005e052 -0fe753c0898120c2c07265367ff157f6538a1e4080d6f9d1ca9eb51939c9574e -f2e4e1e87c1434affd5808563cddd376776dbbf790c6a40028f311a8b58dafa2 -0970ed34acd6e3e89d063987893b2b9570ddb8cc032b05a723bba9444933ebf3 -c624204be72f4190e0245197d0cb772bec933fd8442445f9a28bd042d5a3a1e9 -9a8a07 -""" -)) - -# -# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -# -outform DER -out keyenc.der -v2 aes192 -# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der -# -wrapped_enc_keys.append(( -'PBKDF2WithHMAC-SHA1AndAES256-CBC', -2048, -"323351F94462AC563E053A056252C2C4", # IV -"02A6CD0D12E727B5", # Salt -""" -3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c -300e040802a6cd0d12e727b502020800301d060960864801650304012a041032 -3351f94462ac563e053a056252c2c4048201d07f4ef1c7be21aae738a20c5632 -b8bdbbb9083b6e7f68822267b1f481fd27fdafd61a90660de6e4058790e4c912 -bf3f319a7c37e6eb3d956daaa143865020d554bf6215e8d7492359aaeef45d6e -d85a686ed26c0bf7c18d071d827a86f0b73e1db0c0e7f3d42201544093302a90 -551ad530692468c47ac15c69500b8ca67d4a17b64d15cecc035ae50b768a36cf -07c395afa091e9e6f86f665455fbdc1b21ad79c0908b73da5de75a9b43508d5d -44dc97a870cd3cd9f01ca24452e9b11c1b4982946702cfcbfda5b2fcc0203fb5 -0b52a115760bd635c94d4c95ac2c640ee9a04ffaf6ccff5a8d953dd5d88ca478 -c377811c521f2191639c643d657a9e364af88bb7c14a356c2b0b4870a23c2f54 -d41f8157afff731471dccc6058b15e1151bcf84b39b5e622a3a1d65859c912a5 -591b85e034a1f6af664f030a6bfc8c3d20c70f32b54bcf4da9c2da83cef49cf8 -e9a74f0e5d358fe50b88acdce6a9db9a7ad61536212fc5f877ebfc7957b8bda4 -b1582a0f10d515a20ee06cf768db9c977aa6fbdca7540d611ff953012d009dac -e8abd059f8e8ffea637c9c7721f817aaf0bb23403e26a0ef0ff0e2037da67d41 -af728481f53443551a9bff4cea023164e9622b5441a309e1f4bff98e5bf76677 -8d7cd9 -""" -)) - -# hexdump -v -e '32/1 "%02x" "\n"' botan_scrypt.der -botan_scrypt = """ -3081f1305206092a864886f70d01050d3045302806092b06010401da47040b30 -1b040c316c5c7a847276a838a668280202200002010102010102012030190609 -60864801650304012e040c293e9bcddc0d59d64e060cb604819ab92318063480 -16148081a3123bb092b636ec0cc3b964628e181504c13eaf94987e6fb9f171d4 -9c45baeeb79c1d805d5a762d9bfd6d1995669df60a2cd0174b6d204693964de7 -05bc3fdc3a4ce5db01f30a994c82b0aac786e4f8655138c952f1cf2cc6093f90 -b5e5ca507beb539ff497b7b6370ba7f31f4928d3385dbe8bcd2395813ba1324e -6795e81a8518aff0f0a9e01396539f937b8b7b08 -""" - -# hexdump -v -e '32/1 "%02x" "\n"' botan_pbkdf2.der -botan_pbkdf2 = """ -3081f3305e06092a864886f70d01050d3051303006092a864886f70d01050c30 -23040cc91c89b368db578d2ec4c32002020fa0020118300c06082a864886f70d -02090500301d060960864801650304011604102a7147289e7c914a7d8257e4a1 -a2135b048190a648955fc96ecae56dcb4d0ab19edc5b7ef1219c88c7c3b2d0ed -b21e25d2559447f53e20b90b2f20e72456d943561c4925aad6067a4c720afb3d -691e14dfffa10ef77898e21d134f19136d35088a7aac508b296fd00d5742ad69 -8c693293b6a591e3660b130d718724d23d696f4da9bf4031475fafb682d7955c -996363f37032e10ac85afebb7cc1cbfc0e5d4c60a4c2 -""" - -def txt2bin(inputs): - s = b('').join([b(x) for x in inputs if not (x in '\n\r\t ')]) - return unhexlify(s) - -class Rng: - def __init__(self, output): - self.output=output - self.idx=0 - def __call__(self, n): - output = self.output[self.idx:self.idx+n] - self.idx += n - return output - -class PKCS8_Decrypt(unittest.TestCase): - - def setUp(self): - self.oid_key = oid_key - self.clear_key = txt2bin(clear_key) - self.wrapped_clear_key = txt2bin(wrapped_clear_key) - self.wrapped_enc_keys = [] - for t in wrapped_enc_keys: - self.wrapped_enc_keys.append(( - t[0], - t[1], - txt2bin(t[2]), - txt2bin(t[3]), - txt2bin(t[4]) - )) - - ### NO ENCRYTION - - def test1(self): - """Verify unwrapping w/o encryption""" - res1, res2, res3 = PKCS8.unwrap(self.wrapped_clear_key) - self.assertEqual(res1, self.oid_key) - self.assertEqual(res2, self.clear_key) - - def test2(self): - """Verify wrapping w/o encryption""" - wrapped = PKCS8.wrap(self.clear_key, self.oid_key) - res1, res2, res3 = PKCS8.unwrap(wrapped) - self.assertEqual(res1, self.oid_key) - self.assertEqual(res2, self.clear_key) - - ## ENCRYPTION - - def test3(self): - """Verify unwrapping with encryption""" - - for t in self.wrapped_enc_keys: - res1, res2, res3 = PKCS8.unwrap(t[4], b"TestTest") - self.assertEqual(res1, self.oid_key) - self.assertEqual(res2, self.clear_key) - - def test4(self): - """Verify wrapping with encryption""" - - for t in self.wrapped_enc_keys: - if t[0] == 'skip encryption': - continue - rng = Rng(t[2]+t[3]) - params = { 'iteration_count':t[1] } - wrapped = PKCS8.wrap( - self.clear_key, - self.oid_key, - b("TestTest"), - protection=t[0], - prot_params=params, - key_params=DerNull(), - randfunc=rng) - self.assertEqual(wrapped, t[4]) - - def test_import_botan_keys(self): - botan_scrypt_der = txt2bin(botan_scrypt) - key1 = PKCS8.unwrap(botan_scrypt_der, - b'your_password') - botan_pbkdf2_der = txt2bin(botan_pbkdf2) - key2 = PKCS8.unwrap(botan_pbkdf2_der, - b'your_password') - self.assertEqual(key1, key2) - - -def get_tests(config={}): - from Cryptodome.SelfTest.st_common import list_test_cases - listTests = [] - listTests += list_test_cases(PKCS8_Decrypt) - return listTests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__init__.py deleted file mode 100644 index 9f732ba..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# SelfTest/Math/__init__.py: Self-test for math module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test for Math""" - -def get_tests(config={}): - tests = [] - from Cryptodome.SelfTest.Math import test_Numbers - from Cryptodome.SelfTest.Math import test_Primality - from Cryptodome.SelfTest.Math import test_modexp - from Cryptodome.SelfTest.Math import test_modmult - tests += test_Numbers.get_tests(config=config) - tests += test_Primality.get_tests(config=config) - tests += test_modexp.get_tests(config=config) - tests += test_modmult.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4f714ff..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_Numbers.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_Numbers.cpython-312.pyc deleted file mode 100644 index 8a051bc..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_Numbers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_Primality.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_Primality.cpython-312.pyc deleted file mode 100644 index 756898d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_Primality.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_modexp.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_modexp.cpython-312.pyc deleted file mode 100644 index 242734f..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_modexp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_modmult.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_modmult.cpython-312.pyc deleted file mode 100644 index e6e3ccb..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/__pycache__/test_modmult.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_Numbers.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_Numbers.py deleted file mode 100644 index f974d5a..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_Numbers.py +++ /dev/null @@ -1,838 +0,0 @@ -# -# SelfTest/Math/test_Numbers.py: Self-test for Numbers module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test for Math.Numbers""" - -import sys -import unittest - -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Util.py3compat import * - -from Cryptodome.Math._IntegerNative import IntegerNative - - -class TestIntegerBase(unittest.TestCase): - - def setUp(self): - raise NotImplementedError("To be implemented") - - def Integers(self, *arg): - return map(self.Integer, arg) - - def test_init_and_equality(self): - Integer = self.Integer - - v1 = Integer(23) - v2 = Integer(v1) - v3 = Integer(-9) - self.assertRaises(ValueError, Integer, 1.0) - - v4 = Integer(10**10) - v5 = Integer(-10**10) - - v6 = Integer(0xFFFF) - v7 = Integer(0xFFFFFFFF) - v8 = Integer(0xFFFFFFFFFFFFFFFF) - - self.assertEqual(v1, v1) - self.assertEqual(v1, 23) - self.assertEqual(v1, v2) - self.assertEqual(v3, -9) - self.assertEqual(v4, 10 ** 10) - self.assertEqual(v5, -10 ** 10) - self.assertEqual(v6, 0xFFFF) - self.assertEqual(v7, 0xFFFFFFFF) - self.assertEqual(v8, 0xFFFFFFFFFFFFFFFF) - - self.assertFalse(v1 == v4) - - # Init and comparison between Integer's - v6 = Integer(v1) - self.assertEqual(v1, v6) - - self.assertFalse(Integer(0) == None) - - def test_conversion_to_int(self): - v1, v2 = self.Integers(-23, 2 ** 1000) - self.assertEqual(int(v1), -23) - self.assertEqual(int(v2), 2 ** 1000) - - def test_equality_with_ints(self): - v1, v2, v3 = self.Integers(23, -89, 2 ** 1000) - self.assertTrue(v1 == 23) - self.assertTrue(v2 == -89) - self.assertFalse(v1 == 24) - self.assertTrue(v3 == 2 ** 1000) - - def test_conversion_to_str(self): - v1, v2, v3, v4 = self.Integers(20, 0, -20, 2 ** 1000) - self.assertTrue(str(v1) == "20") - self.assertTrue(str(v2) == "0") - self.assertTrue(str(v3) == "-20") - self.assertTrue(str(v4) == "10715086071862673209484250490600018105614048117055336074437503883703510511249361224931983788156958581275946729175531468251871452856923140435984577574698574803934567774824230985421074605062371141877954182153046474983581941267398767559165543946077062914571196477686542167660429831652624386837205668069376") - - def test_repr(self): - v1, v2 = self.Integers(-1, 2**80) - self.assertEqual(repr(v1), "Integer(-1)") - self.assertEqual(repr(v2), "Integer(1208925819614629174706176)") - - def test_conversion_to_bytes(self): - Integer = self.Integer - - v0 = Integer(0) - self.assertEqual(b"\x00", v0.to_bytes()) - - v1 = Integer(0x17) - self.assertEqual(b"\x17", v1.to_bytes()) - - v2 = Integer(0xFFFE) - self.assertEqual(b"\xFF\xFE", v2.to_bytes()) - self.assertEqual(b"\x00\xFF\xFE", v2.to_bytes(3)) - self.assertRaises(ValueError, v2.to_bytes, 1) - - self.assertEqual(b"\xFE\xFF", v2.to_bytes(byteorder='little')) - self.assertEqual(b"\xFE\xFF\x00", v2.to_bytes(3, byteorder='little')) - - v3 = Integer(0xFF00AABBCCDDEE1122) - self.assertEqual(b"\xFF\x00\xAA\xBB\xCC\xDD\xEE\x11\x22", v3.to_bytes()) - self.assertEqual(b"\x22\x11\xEE\xDD\xCC\xBB\xAA\x00\xFF", - v3.to_bytes(byteorder='little')) - self.assertEqual(b"\x00\xFF\x00\xAA\xBB\xCC\xDD\xEE\x11\x22", - v3.to_bytes(10)) - self.assertEqual(b"\x22\x11\xEE\xDD\xCC\xBB\xAA\x00\xFF\x00", - v3.to_bytes(10, byteorder='little')) - self.assertRaises(ValueError, v3.to_bytes, 8) - - v4 = Integer(-90) - self.assertRaises(ValueError, v4.to_bytes) - self.assertRaises(ValueError, v4.to_bytes, byteorder='bittle') - - def test_conversion_from_bytes(self): - Integer = self.Integer - - v1 = Integer.from_bytes(b"\x00") - self.assertTrue(isinstance(v1, Integer)) - self.assertEqual(0, v1) - - v2 = Integer.from_bytes(b"\x00\x01") - self.assertEqual(1, v2) - - v3 = Integer.from_bytes(b"\xFF\xFF") - self.assertEqual(0xFFFF, v3) - - v4 = Integer.from_bytes(b"\x00\x01", 'big') - self.assertEqual(1, v4) - - v5 = Integer.from_bytes(b"\x00\x01", byteorder='big') - self.assertEqual(1, v5) - - v6 = Integer.from_bytes(b"\x00\x01", byteorder='little') - self.assertEqual(0x0100, v6) - - self.assertRaises(ValueError, Integer.from_bytes, b'\x09', 'bittle') - - def test_inequality(self): - # Test Integer!=Integer and Integer!=int - v1, v2, v3, v4 = self.Integers(89, 89, 90, -8) - self.assertTrue(v1 != v3) - self.assertTrue(v1 != 90) - self.assertFalse(v1 != v2) - self.assertFalse(v1 != 89) - self.assertTrue(v1 != v4) - self.assertTrue(v4 != v1) - self.assertTrue(self.Integer(0) != None) - - def test_less_than(self): - # Test IntegerInteger and Integer>int - v1, v2, v3, v4, v5 = self.Integers(13, 13, 14, -8, 2 ** 10) - self.assertTrue(v3 > v1) - self.assertTrue(v3 > 13) - self.assertFalse(v1 > v1) - self.assertFalse(v1 > v2) - self.assertFalse(v1 > 13) - self.assertTrue(v1 > v4) - self.assertFalse(v4 > v1) - self.assertTrue(v5 > v1) - self.assertFalse(v1 > v5) - - def test_more_than_or_equal(self): - # Test Integer>=Integer and Integer>=int - v1, v2, v3, v4 = self.Integers(13, 13, 14, -4) - self.assertTrue(v3 >= v1) - self.assertTrue(v3 >= 13) - self.assertTrue(v1 >= v2) - self.assertTrue(v1 >= v1) - self.assertTrue(v1 >= 13) - self.assertFalse(v4 >= v1) - - def test_bool(self): - v1, v2, v3, v4 = self.Integers(0, 10, -9, 2 ** 10) - self.assertFalse(v1) - self.assertFalse(bool(v1)) - self.assertTrue(v2) - self.assertTrue(bool(v2)) - self.assertTrue(v3) - self.assertTrue(v4) - - def test_is_negative(self): - v1, v2, v3, v4, v5 = self.Integers(-3 ** 100, -3, 0, 3, 3**100) - self.assertTrue(v1.is_negative()) - self.assertTrue(v2.is_negative()) - self.assertFalse(v4.is_negative()) - self.assertFalse(v5.is_negative()) - - def test_addition(self): - # Test Integer+Integer and Integer+int - v1, v2, v3 = self.Integers(7, 90, -7) - self.assertTrue(isinstance(v1 + v2, self.Integer)) - self.assertEqual(v1 + v2, 97) - self.assertEqual(v1 + 90, 97) - self.assertEqual(v1 + v3, 0) - self.assertEqual(v1 + (-7), 0) - self.assertEqual(v1 + 2 ** 10, 2 ** 10 + 7) - - def test_subtraction(self): - # Test Integer-Integer and Integer-int - v1, v2, v3 = self.Integers(7, 90, -7) - self.assertTrue(isinstance(v1 - v2, self.Integer)) - self.assertEqual(v2 - v1, 83) - self.assertEqual(v2 - 7, 83) - self.assertEqual(v2 - v3, 97) - self.assertEqual(v1 - (-7), 14) - self.assertEqual(v1 - 2 ** 10, 7 - 2 ** 10) - - def test_multiplication(self): - # Test Integer-Integer and Integer-int - v1, v2, v3, v4 = self.Integers(4, 5, -2, 2 ** 10) - self.assertTrue(isinstance(v1 * v2, self.Integer)) - self.assertEqual(v1 * v2, 20) - self.assertEqual(v1 * 5, 20) - self.assertEqual(v1 * -2, -8) - self.assertEqual(v1 * 2 ** 10, 4 * (2 ** 10)) - - def test_floor_div(self): - v1, v2, v3 = self.Integers(3, 8, 2 ** 80) - self.assertTrue(isinstance(v1 // v2, self.Integer)) - self.assertEqual(v2 // v1, 2) - self.assertEqual(v2 // 3, 2) - self.assertEqual(v2 // -3, -3) - self.assertEqual(v3 // 2 ** 79, 2) - self.assertRaises(ZeroDivisionError, lambda: v1 // 0) - - def test_remainder(self): - # Test Integer%Integer and Integer%int - v1, v2, v3 = self.Integers(23, 5, -4) - self.assertTrue(isinstance(v1 % v2, self.Integer)) - self.assertEqual(v1 % v2, 3) - self.assertEqual(v1 % 5, 3) - self.assertEqual(v3 % 5, 1) - self.assertEqual(v1 % 2 ** 10, 23) - self.assertRaises(ZeroDivisionError, lambda: v1 % 0) - self.assertRaises(ValueError, lambda: v1 % -6) - - def test_simple_exponentiation(self): - v1, v2, v3 = self.Integers(4, 3, -2) - self.assertTrue(isinstance(v1 ** v2, self.Integer)) - self.assertEqual(v1 ** v2, 64) - self.assertEqual(pow(v1, v2), 64) - self.assertEqual(v1 ** 3, 64) - self.assertEqual(pow(v1, 3), 64) - self.assertEqual(v3 ** 2, 4) - self.assertEqual(v3 ** 3, -8) - - self.assertRaises(ValueError, pow, v1, -3) - - def test_modular_exponentiation(self): - v1, v2, v3 = self.Integers(23, 5, 17) - - self.assertTrue(isinstance(pow(v1, v2, v3), self.Integer)) - self.assertEqual(pow(v1, v2, v3), 7) - self.assertEqual(pow(v1, 5, v3), 7) - self.assertEqual(pow(v1, v2, 17), 7) - self.assertEqual(pow(v1, 5, 17), 7) - self.assertEqual(pow(v1, 0, 17), 1) - self.assertEqual(pow(v1, 1, 2 ** 80), 23) - self.assertEqual(pow(v1, 2 ** 80, 89298), 17689) - - self.assertRaises(ZeroDivisionError, pow, v1, 5, 0) - self.assertRaises(ValueError, pow, v1, 5, -4) - self.assertRaises(ValueError, pow, v1, -3, 8) - - def test_inplace_exponentiation(self): - v1 = self.Integer(4) - v1.inplace_pow(2) - self.assertEqual(v1, 16) - - v1 = self.Integer(4) - v1.inplace_pow(2, 15) - self.assertEqual(v1, 1) - - def test_abs(self): - v1, v2, v3, v4, v5 = self.Integers(-2 ** 100, -2, 0, 2, 2 ** 100) - self.assertEqual(abs(v1), 2 ** 100) - self.assertEqual(abs(v2), 2) - self.assertEqual(abs(v3), 0) - self.assertEqual(abs(v4), 2) - self.assertEqual(abs(v5), 2 ** 100) - - def test_sqrt(self): - v1, v2, v3, v4 = self.Integers(-2, 0, 49, 10**100) - - self.assertRaises(ValueError, v1.sqrt) - self.assertEqual(v2.sqrt(), 0) - self.assertEqual(v3.sqrt(), 7) - self.assertEqual(v4.sqrt(), 10**50) - - def test_sqrt_module(self): - - # Invalid modulus (non positive) - self.assertRaises(ValueError, self.Integer(5).sqrt, 0) - self.assertRaises(ValueError, self.Integer(5).sqrt, -1) - - # Simple cases - assert self.Integer(0).sqrt(5) == 0 - assert self.Integer(1).sqrt(5) in (1, 4) - - # Test with all quadratic residues in several fields - for p in (11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53): - for i in range(0, p): - square = i**2 % p - res = self.Integer(square).sqrt(p) - assert res in (i, p - i) - - # 2 is a non-quadratic reside in Z_11 - self.assertRaises(ValueError, self.Integer(2).sqrt, 11) - - # 10 is not a prime - self.assertRaises(ValueError, self.Integer(4).sqrt, 10) - - # 5 is square residue of 4 and 7 - assert self.Integer(5 - 11).sqrt(11) in (4, 7) - assert self.Integer(5 + 11).sqrt(11) in (4, 7) - - def test_in_place_add(self): - v1, v2 = self.Integers(10, 20) - - v1 += v2 - self.assertEqual(v1, 30) - v1 += 10 - self.assertEqual(v1, 40) - v1 += -1 - self.assertEqual(v1, 39) - v1 += 2 ** 1000 - self.assertEqual(v1, 39 + 2 ** 1000) - - def test_in_place_sub(self): - v1, v2 = self.Integers(10, 20) - - v1 -= v2 - self.assertEqual(v1, -10) - v1 -= -100 - self.assertEqual(v1, 90) - v1 -= 90000 - self.assertEqual(v1, -89910) - v1 -= -100000 - self.assertEqual(v1, 10090) - - def test_in_place_mul(self): - v1, v2 = self.Integers(3, 5) - - v1 *= v2 - self.assertEqual(v1, 15) - v1 *= 2 - self.assertEqual(v1, 30) - v1 *= -2 - self.assertEqual(v1, -60) - v1 *= 2 ** 1000 - self.assertEqual(v1, -60 * (2 ** 1000)) - - def test_in_place_modulus(self): - v1, v2 = self.Integers(20, 7) - - v1 %= v2 - self.assertEqual(v1, 6) - v1 %= 2 ** 1000 - self.assertEqual(v1, 6) - v1 %= 2 - self.assertEqual(v1, 0) - def t(): - v3 = self.Integer(9) - v3 %= 0 - self.assertRaises(ZeroDivisionError, t) - - def test_and(self): - v1, v2, v3 = self.Integers(0xF4, 0x31, -0xF) - self.assertTrue(isinstance(v1 & v2, self.Integer)) - self.assertEqual(v1 & v2, 0x30) - self.assertEqual(v1 & 0x31, 0x30) - self.assertEqual(v1 & v3, 0xF0) - self.assertEqual(v1 & -0xF, 0xF0) - self.assertEqual(v3 & -0xF, -0xF) - self.assertEqual(v2 & (2 ** 1000 + 0x31), 0x31) - - def test_or(self): - v1, v2, v3 = self.Integers(0x40, 0x82, -0xF) - self.assertTrue(isinstance(v1 | v2, self.Integer)) - self.assertEqual(v1 | v2, 0xC2) - self.assertEqual(v1 | 0x82, 0xC2) - self.assertEqual(v2 | v3, -0xD) - self.assertEqual(v2 | 2 ** 1000, 2 ** 1000 + 0x82) - - def test_right_shift(self): - v1, v2, v3 = self.Integers(0x10, 1, -0x10) - self.assertEqual(v1 >> 0, v1) - self.assertTrue(isinstance(v1 >> v2, self.Integer)) - self.assertEqual(v1 >> v2, 0x08) - self.assertEqual(v1 >> 1, 0x08) - self.assertRaises(ValueError, lambda: v1 >> -1) - self.assertEqual(v1 >> (2 ** 1000), 0) - - self.assertEqual(v3 >> 1, -0x08) - self.assertEqual(v3 >> (2 ** 1000), -1) - - def test_in_place_right_shift(self): - v1, v2, v3 = self.Integers(0x10, 1, -0x10) - v1 >>= 0 - self.assertEqual(v1, 0x10) - v1 >>= 1 - self.assertEqual(v1, 0x08) - v1 >>= v2 - self.assertEqual(v1, 0x04) - v3 >>= 1 - self.assertEqual(v3, -0x08) - def l(): - v4 = self.Integer(0x90) - v4 >>= -1 - self.assertRaises(ValueError, l) - def m1(): - v4 = self.Integer(0x90) - v4 >>= 2 ** 1000 - return v4 - self.assertEqual(0, m1()) - def m2(): - v4 = self.Integer(-1) - v4 >>= 2 ** 1000 - return v4 - self.assertEqual(-1, m2()) - - def _test_left_shift(self): - v1, v2, v3 = self.Integers(0x10, 1, -0x10) - self.assertEqual(v1 << 0, v1) - self.assertTrue(isinstance(v1 << v2, self.Integer)) - self.assertEqual(v1 << v2, 0x20) - self.assertEqual(v1 << 1, 0x20) - self.assertEqual(v3 << 1, -0x20) - self.assertRaises(ValueError, lambda: v1 << -1) - self.assertRaises(ValueError, lambda: v1 << (2 ** 1000)) - - def test_in_place_left_shift(self): - v1, v2, v3 = self.Integers(0x10, 1, -0x10) - v1 <<= 0 - self.assertEqual(v1, 0x10) - v1 <<= 1 - self.assertEqual(v1, 0x20) - v1 <<= v2 - self.assertEqual(v1, 0x40) - v3 <<= 1 - self.assertEqual(v3, -0x20) - def l(): - v4 = self.Integer(0x90) - v4 <<= -1 - self.assertRaises(ValueError, l) - def m(): - v4 = self.Integer(0x90) - v4 <<= 2 ** 1000 - self.assertRaises(ValueError, m) - - - def test_get_bit(self): - v1, v2, v3 = self.Integers(0x102, -3, 1) - self.assertEqual(v1.get_bit(0), 0) - self.assertEqual(v1.get_bit(1), 1) - self.assertEqual(v1.get_bit(v3), 1) - self.assertEqual(v1.get_bit(8), 1) - self.assertEqual(v1.get_bit(9), 0) - - self.assertRaises(ValueError, v1.get_bit, -1) - self.assertEqual(v1.get_bit(2 ** 1000), 0) - - self.assertRaises(ValueError, v2.get_bit, -1) - self.assertRaises(ValueError, v2.get_bit, 0) - self.assertRaises(ValueError, v2.get_bit, 1) - self.assertRaises(ValueError, v2.get_bit, 2 * 1000) - - def test_odd_even(self): - v1, v2, v3, v4, v5 = self.Integers(0, 4, 17, -4, -17) - - self.assertTrue(v1.is_even()) - self.assertTrue(v2.is_even()) - self.assertFalse(v3.is_even()) - self.assertTrue(v4.is_even()) - self.assertFalse(v5.is_even()) - - self.assertFalse(v1.is_odd()) - self.assertFalse(v2.is_odd()) - self.assertTrue(v3.is_odd()) - self.assertFalse(v4.is_odd()) - self.assertTrue(v5.is_odd()) - - def test_size_in_bits(self): - v1, v2, v3, v4 = self.Integers(0, 1, 0x100, -90) - self.assertEqual(v1.size_in_bits(), 1) - self.assertEqual(v2.size_in_bits(), 1) - self.assertEqual(v3.size_in_bits(), 9) - self.assertRaises(ValueError, v4.size_in_bits) - - def test_size_in_bytes(self): - v1, v2, v3, v4, v5, v6 = self.Integers(0, 1, 0xFF, 0x1FF, 0x10000, -9) - self.assertEqual(v1.size_in_bytes(), 1) - self.assertEqual(v2.size_in_bytes(), 1) - self.assertEqual(v3.size_in_bytes(), 1) - self.assertEqual(v4.size_in_bytes(), 2) - self.assertEqual(v5.size_in_bytes(), 3) - self.assertRaises(ValueError, v6.size_in_bits) - - def test_perfect_square(self): - - self.assertFalse(self.Integer(-9).is_perfect_square()) - self.assertTrue(self.Integer(0).is_perfect_square()) - self.assertTrue(self.Integer(1).is_perfect_square()) - self.assertFalse(self.Integer(2).is_perfect_square()) - self.assertFalse(self.Integer(3).is_perfect_square()) - self.assertTrue(self.Integer(4).is_perfect_square()) - self.assertTrue(self.Integer(39*39).is_perfect_square()) - self.assertFalse(self.Integer(39*39+1).is_perfect_square()) - - for x in range(100, 1000): - self.assertFalse(self.Integer(x**2+1).is_perfect_square()) - self.assertTrue(self.Integer(x**2).is_perfect_square()) - - def test_fail_if_divisible_by(self): - v1, v2, v3 = self.Integers(12, -12, 4) - - # No failure expected - v1.fail_if_divisible_by(7) - v2.fail_if_divisible_by(7) - v2.fail_if_divisible_by(2 ** 80) - - # Failure expected - self.assertRaises(ValueError, v1.fail_if_divisible_by, 4) - self.assertRaises(ValueError, v1.fail_if_divisible_by, v3) - - def test_multiply_accumulate(self): - v1, v2, v3 = self.Integers(4, 3, 2) - v1.multiply_accumulate(v2, v3) - self.assertEqual(v1, 10) - v1.multiply_accumulate(v2, 2) - self.assertEqual(v1, 16) - v1.multiply_accumulate(3, v3) - self.assertEqual(v1, 22) - v1.multiply_accumulate(1, -2) - self.assertEqual(v1, 20) - v1.multiply_accumulate(-2, 1) - self.assertEqual(v1, 18) - v1.multiply_accumulate(1, 2 ** 1000) - self.assertEqual(v1, 18 + 2 ** 1000) - v1.multiply_accumulate(2 ** 1000, 1) - self.assertEqual(v1, 18 + 2 ** 1001) - - def test_set(self): - v1, v2 = self.Integers(3, 6) - v1.set(v2) - self.assertEqual(v1, 6) - v1.set(9) - self.assertEqual(v1, 9) - v1.set(-2) - self.assertEqual(v1, -2) - v1.set(2 ** 1000) - self.assertEqual(v1, 2 ** 1000) - - def test_inverse(self): - v1, v2, v3, v4, v5, v6 = self.Integers(2, 5, -3, 0, 723872, 3433) - - self.assertTrue(isinstance(v1.inverse(v2), self.Integer)) - self.assertEqual(v1.inverse(v2), 3) - self.assertEqual(v1.inverse(5), 3) - self.assertEqual(v3.inverse(5), 3) - self.assertEqual(v5.inverse(92929921), 58610507) - self.assertEqual(v6.inverse(9912), 5353) - - self.assertRaises(ValueError, v2.inverse, 10) - self.assertRaises(ValueError, v1.inverse, -3) - self.assertRaises(ValueError, v4.inverse, 10) - self.assertRaises(ZeroDivisionError, v2.inverse, 0) - - def test_inplace_inverse(self): - v1, v2 = self.Integers(2, 5) - - v1.inplace_inverse(v2) - self.assertEqual(v1, 3) - - def test_gcd(self): - v1, v2, v3, v4 = self.Integers(6, 10, 17, -2) - self.assertTrue(isinstance(v1.gcd(v2), self.Integer)) - self.assertEqual(v1.gcd(v2), 2) - self.assertEqual(v1.gcd(10), 2) - self.assertEqual(v1.gcd(v3), 1) - self.assertEqual(v1.gcd(-2), 2) - self.assertEqual(v4.gcd(6), 2) - - def test_lcm(self): - v1, v2, v3, v4, v5 = self.Integers(6, 10, 17, -2, 0) - self.assertTrue(isinstance(v1.lcm(v2), self.Integer)) - self.assertEqual(v1.lcm(v2), 30) - self.assertEqual(v1.lcm(10), 30) - self.assertEqual(v1.lcm(v3), 102) - self.assertEqual(v1.lcm(-2), 6) - self.assertEqual(v4.lcm(6), 6) - self.assertEqual(v1.lcm(0), 0) - self.assertEqual(v5.lcm(0), 0) - - def test_jacobi_symbol(self): - - data = ( - (1001, 1, 1), - (19, 45, 1), - (8, 21, -1), - (5, 21, 1), - (610, 987, -1), - (1001, 9907, -1), - (5, 3439601197, -1) - ) - - js = self.Integer.jacobi_symbol - - # Jacobi symbol is always 1 for k==1 or n==1 - for k in range(1, 30): - self.assertEqual(js(k, 1), 1) - for n in range(1, 30, 2): - self.assertEqual(js(1, n), 1) - - # Fail if n is not positive odd - self.assertRaises(ValueError, js, 6, -2) - self.assertRaises(ValueError, js, 6, -1) - self.assertRaises(ValueError, js, 6, 0) - self.assertRaises(ValueError, js, 0, 0) - self.assertRaises(ValueError, js, 6, 2) - self.assertRaises(ValueError, js, 6, 4) - self.assertRaises(ValueError, js, 6, 6) - self.assertRaises(ValueError, js, 6, 8) - - for tv in data: - self.assertEqual(js(tv[0], tv[1]), tv[2]) - self.assertEqual(js(self.Integer(tv[0]), tv[1]), tv[2]) - self.assertEqual(js(tv[0], self.Integer(tv[1])), tv[2]) - - def test_jacobi_symbol_wikipedia(self): - - # Test vectors from https://en.wikipedia.org/wiki/Jacobi_symbol - tv = [ - (3, [(1, 1), (2, -1), (3, 0), (4, 1), (5, -1), (6, 0), (7, 1), (8, -1), (9, 0), (10, 1), (11, -1), (12, 0), (13, 1), (14, -1), (15, 0), (16, 1), (17, -1), (18, 0), (19, 1), (20, -1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, -1), (27, 0), (28, 1), (29, -1), (30, 0)]), - (5, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 0), (6, 1), (7, -1), (8, -1), (9, 1), (10, 0), (11, 1), (12, -1), (13, -1), (14, 1), (15, 0), (16, 1), (17, -1), (18, -1), (19, 1), (20, 0), (21, 1), (22, -1), (23, -1), (24, 1), (25, 0), (26, 1), (27, -1), (28, -1), (29, 1), (30, 0)]), - (7, [(1, 1), (2, 1), (3, -1), (4, 1), (5, -1), (6, -1), (7, 0), (8, 1), (9, 1), (10, -1), (11, 1), (12, -1), (13, -1), (14, 0), (15, 1), (16, 1), (17, -1), (18, 1), (19, -1), (20, -1), (21, 0), (22, 1), (23, 1), (24, -1), (25, 1), (26, -1), (27, -1), (28, 0), (29, 1), (30, 1)]), - (9, [(1, 1), (2, 1), (3, 0), (4, 1), (5, 1), (6, 0), (7, 1), (8, 1), (9, 0), (10, 1), (11, 1), (12, 0), (13, 1), (14, 1), (15, 0), (16, 1), (17, 1), (18, 0), (19, 1), (20, 1), (21, 0), (22, 1), (23, 1), (24, 0), (25, 1), (26, 1), (27, 0), (28, 1), (29, 1), (30, 0)]), - (11, [(1, 1), (2, -1), (3, 1), (4, 1), (5, 1), (6, -1), (7, -1), (8, -1), (9, 1), (10, -1), (11, 0), (12, 1), (13, -1), (14, 1), (15, 1), (16, 1), (17, -1), (18, -1), (19, -1), (20, 1), (21, -1), (22, 0), (23, 1), (24, -1), (25, 1), (26, 1), (27, 1), (28, -1), (29, -1), (30, -1)]), - (13, [(1, 1), (2, -1), (3, 1), (4, 1), (5, -1), (6, -1), (7, -1), (8, -1), (9, 1), (10, 1), (11, -1), (12, 1), (13, 0), (14, 1), (15, -1), (16, 1), (17, 1), (18, -1), (19, -1), (20, -1), (21, -1), (22, 1), (23, 1), (24, -1), (25, 1), (26, 0), (27, 1), (28, -1), (29, 1), (30, 1)]), - (15, [(1, 1), (2, 1), (3, 0), (4, 1), (5, 0), (6, 0), (7, -1), (8, 1), (9, 0), (10, 0), (11, -1), (12, 0), (13, -1), (14, -1), (15, 0), (16, 1), (17, 1), (18, 0), (19, 1), (20, 0), (21, 0), (22, -1), (23, 1), (24, 0), (25, 0), (26, -1), (27, 0), (28, -1), (29, -1), (30, 0)]), - (17, [(1, 1), (2, 1), (3, -1), (4, 1), (5, -1), (6, -1), (7, -1), (8, 1), (9, 1), (10, -1), (11, -1), (12, -1), (13, 1), (14, -1), (15, 1), (16, 1), (17, 0), (18, 1), (19, 1), (20, -1), (21, 1), (22, -1), (23, -1), (24, -1), (25, 1), (26, 1), (27, -1), (28, -1), (29, -1), (30, 1)]), - (19, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 1), (6, 1), (7, 1), (8, -1), (9, 1), (10, -1), (11, 1), (12, -1), (13, -1), (14, -1), (15, -1), (16, 1), (17, 1), (18, -1), (19, 0), (20, 1), (21, -1), (22, -1), (23, 1), (24, 1), (25, 1), (26, 1), (27, -1), (28, 1), (29, -1), (30, 1)]), - (21, [(1, 1), (2, -1), (3, 0), (4, 1), (5, 1), (6, 0), (7, 0), (8, -1), (9, 0), (10, -1), (11, -1), (12, 0), (13, -1), (14, 0), (15, 0), (16, 1), (17, 1), (18, 0), (19, -1), (20, 1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, 1), (27, 0), (28, 0), (29, -1), (30, 0)]), - (23, [(1, 1), (2, 1), (3, 1), (4, 1), (5, -1), (6, 1), (7, -1), (8, 1), (9, 1), (10, -1), (11, -1), (12, 1), (13, 1), (14, -1), (15, -1), (16, 1), (17, -1), (18, 1), (19, -1), (20, -1), (21, -1), (22, -1), (23, 0), (24, 1), (25, 1), (26, 1), (27, 1), (28, -1), (29, 1), (30, -1)]), - (25, [(1, 1), (2, 1), (3, 1), (4, 1), (5, 0), (6, 1), (7, 1), (8, 1), (9, 1), (10, 0), (11, 1), (12, 1), (13, 1), (14, 1), (15, 0), (16, 1), (17, 1), (18, 1), (19, 1), (20, 0), (21, 1), (22, 1), (23, 1), (24, 1), (25, 0), (26, 1), (27, 1), (28, 1), (29, 1), (30, 0)]), - (27, [(1, 1), (2, -1), (3, 0), (4, 1), (5, -1), (6, 0), (7, 1), (8, -1), (9, 0), (10, 1), (11, -1), (12, 0), (13, 1), (14, -1), (15, 0), (16, 1), (17, -1), (18, 0), (19, 1), (20, -1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, -1), (27, 0), (28, 1), (29, -1), (30, 0)]), - (29, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 1), (6, 1), (7, 1), (8, -1), (9, 1), (10, -1), (11, -1), (12, -1), (13, 1), (14, -1), (15, -1), (16, 1), (17, -1), (18, -1), (19, -1), (20, 1), (21, -1), (22, 1), (23, 1), (24, 1), (25, 1), (26, -1), (27, -1), (28, 1), (29, 0), (30, 1)]), - ] - - js = self.Integer.jacobi_symbol - - for n, kj in tv: - for k, j in kj: - self.assertEqual(js(k, n), j) - - def test_hex(self): - v1, = self.Integers(0x10) - self.assertEqual(hex(v1), "0x10") - - def test_mult_modulo_bytes(self): - modmult = self.Integer._mult_modulo_bytes - - res = modmult(4, 5, 19) - self.assertEqual(res, b'\x01') - - res = modmult(4 - 19, 5, 19) - self.assertEqual(res, b'\x01') - - res = modmult(4, 5 - 19, 19) - self.assertEqual(res, b'\x01') - - res = modmult(4 + 19, 5, 19) - self.assertEqual(res, b'\x01') - - res = modmult(4, 5 + 19, 19) - self.assertEqual(res, b'\x01') - - modulus = 2**512 - 1 # 64 bytes - t1 = 13**100 - t2 = 17**100 - expect = b"\xfa\xb2\x11\x87\xc3(y\x07\xf8\xf1n\xdepq\x0b\xca\xf3\xd3B,\xef\xf2\xfbf\xcc)\x8dZ*\x95\x98r\x96\xa8\xd5\xc3}\xe2q:\xa2'z\xf48\xde%\xef\t\x07\xbc\xc4[C\x8bUE2\x90\xef\x81\xaa:\x08" - self.assertEqual(expect, modmult(t1, t2, modulus)) - - self.assertRaises(ZeroDivisionError, modmult, 4, 5, 0) - self.assertRaises(ValueError, modmult, 4, 5, -1) - self.assertRaises(ValueError, modmult, 4, 5, 4) - - -class TestIntegerInt(TestIntegerBase): - - def setUp(self): - self.Integer = IntegerNative - - -class testIntegerRandom(unittest.TestCase): - - def test_random_exact_bits(self): - - for _ in range(1000): - a = IntegerNative.random(exact_bits=8) - self.assertFalse(a < 128) - self.assertFalse(a >= 256) - - for bits_value in range(1024, 1024 + 8): - a = IntegerNative.random(exact_bits=bits_value) - self.assertFalse(a < 2**(bits_value - 1)) - self.assertFalse(a >= 2**bits_value) - - def test_random_max_bits(self): - - flag = False - for _ in range(1000): - a = IntegerNative.random(max_bits=8) - flag = flag or a < 128 - self.assertFalse(a>=256) - self.assertTrue(flag) - - for bits_value in range(1024, 1024 + 8): - a = IntegerNative.random(max_bits=bits_value) - self.assertFalse(a >= 2**bits_value) - - def test_random_bits_custom_rng(self): - - class CustomRNG(object): - def __init__(self): - self.counter = 0 - - def __call__(self, size): - self.counter += size - return bchr(0) * size - - custom_rng = CustomRNG() - a = IntegerNative.random(exact_bits=32, randfunc=custom_rng) - self.assertEqual(custom_rng.counter, 4) - - def test_random_range(self): - - func = IntegerNative.random_range - - for x in range(200): - a = func(min_inclusive=1, max_inclusive=15) - self.assertTrue(1 <= a <= 15) - - for x in range(200): - a = func(min_inclusive=1, max_exclusive=15) - self.assertTrue(1 <= a < 15) - - self.assertRaises(ValueError, func, min_inclusive=1, max_inclusive=2, - max_exclusive=3) - self.assertRaises(ValueError, func, max_inclusive=2, max_exclusive=3) - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestIntegerInt) - - try: - from Cryptodome.Math._IntegerGMP import IntegerGMP - - class TestIntegerGMP(TestIntegerBase): - def setUp(self): - self.Integer = IntegerGMP - - tests += list_test_cases(TestIntegerGMP) - except (ImportError, OSError) as e: - if sys.platform == "win32": - sys.stdout.write("Skipping GMP tests on Windows\n") - else: - sys.stdout.write("Skipping GMP tests (%s)\n" % str(e) ) - - try: - from Cryptodome.Math._IntegerCustom import IntegerCustom - - class TestIntegerCustomModexp(TestIntegerBase): - def setUp(self): - self.Integer = IntegerCustom - - tests += list_test_cases(TestIntegerCustomModexp) - except (ImportError, OSError) as e: - sys.stdout.write("Skipping custom modexp tests (%s)\n" % str(e) ) - - tests += list_test_cases(testIntegerRandom) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_Primality.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_Primality.py deleted file mode 100644 index 475d1d4..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_Primality.py +++ /dev/null @@ -1,118 +0,0 @@ -# -# SelfTest/Math/test_Primality.py: Self-test for Primality module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test for Math.Numbers""" - -import unittest - -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Util.py3compat import * - -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Math.Primality import ( - PROBABLY_PRIME, COMPOSITE, - miller_rabin_test, lucas_test, - test_probable_prime, - generate_probable_prime, - generate_probable_safe_prime, - ) - - -class TestPrimality(unittest.TestCase): - - primes = (1, 2, 3, 5, 7, 11, 13, 17, 19, 23, 2**127-1, 175637383534939453397801320455508570374088202376942372758907369518414308188137781042871856139027160010343454418881888953150175357127346872102307696660678617989191485418582475696230580407111841072614783095326672517315988762029036079794994990250662362650625650262324085116467511357592728695033227611029693067539) - composites = (0, 4, 6, 8, 9, 10, 12, 14, 15, 16, 18, 20, 21, 7*23, (2**19-1)*(2**67-1), 9746347772161,) - - def test_miller_rabin(self): - for prime in self.primes: - self.assertEqual(miller_rabin_test(prime, 3), PROBABLY_PRIME) - for composite in self.composites: - self.assertEqual(miller_rabin_test(composite, 3), COMPOSITE) - self.assertRaises(ValueError, miller_rabin_test, -1, 3) - - def test_lucas(self): - for prime in self.primes: - res = lucas_test(prime) - self.assertEqual(res, PROBABLY_PRIME) - for composite in self.composites: - res = lucas_test(composite) - self.assertEqual(res, COMPOSITE) - self.assertRaises(ValueError, lucas_test, -1) - - def test_is_prime(self): - primes = (170141183460469231731687303715884105727, - 19175002942688032928599, - 1363005552434666078217421284621279933627102780881053358473, - 2 ** 521 - 1) - for p in primes: - self.assertEqual(test_probable_prime(p), PROBABLY_PRIME) - - not_primes = ( - 4754868377601046732119933839981363081972014948522510826417784001, - 1334733877147062382486934807105197899496002201113849920496510541601, - 260849323075371835669784094383812120359260783810157225730623388382401, - ) - for np in not_primes: - self.assertEqual(test_probable_prime(np), COMPOSITE) - - from Cryptodome.Util.number import sieve_base - for p in sieve_base[:100]: - res = test_probable_prime(p) - self.assertEqual(res, PROBABLY_PRIME) - - def test_generate_prime_bit_size(self): - p = generate_probable_prime(exact_bits=512) - self.assertEqual(p.size_in_bits(), 512) - - def test_generate_prime_filter(self): - def ending_with_one(number): - return number % 10 == 1 - - for x in range(20): - q = generate_probable_prime(exact_bits=160, - prime_filter=ending_with_one) - self.assertEqual(q % 10, 1) - - def test_generate_safe_prime(self): - p = generate_probable_safe_prime(exact_bits=161) - self.assertEqual(p.size_in_bits(), 161) - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestPrimality) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_modexp.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_modexp.py deleted file mode 100644 index d63f43c..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_modexp.py +++ /dev/null @@ -1,201 +0,0 @@ -# -# SelfTest/Math/test_modexp.py: Self-test for module exponentiation -# -# =================================================================== -# -# Copyright (c) 2017, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test for the custom module exponentiation""" - -import unittest - -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Util.number import long_to_bytes, bytes_to_long - -from Cryptodome.Util.py3compat import * - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, - create_string_buffer, - get_raw_buffer, - c_size_t, - c_ulonglong) - -from Cryptodome.Hash import SHAKE128 -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Math._IntegerCustom import _raw_montgomery - -from Cryptodome.Random.random import StrongRandom - - -def create_rng(tag): - rng = StrongRandom(SHAKE128.new(data=tag)) - return rng - -class ExceptionModulus(ValueError): - pass - -def monty_pow(base, exp, modulus): - max_len = len(long_to_bytes(max(base, exp, modulus))) - - base_b, exp_b, modulus_b = [ long_to_bytes(x, max_len) for x in - (base, exp, modulus) ] - - out = create_string_buffer(max_len) - error = _raw_montgomery.monty_pow( - out, - base_b, - exp_b, - modulus_b, - c_size_t(max_len), - c_ulonglong(32) - ) - - if error == 17: - raise ExceptionModulus() - if error: - raise ValueError("monty_pow failed with error: %d" % error) - - result = bytes_to_long(get_raw_buffer(out)) - return result - -exponent1 = 0x2ce0af628901460a419a08ef950d498b9fd6f271a1a52ac293b86fe5c60efe8e8ba93fa1ebe1eb3d614d2e7b328cb60a2591440e163441a190ecf101ceec245f600fffdcf3f5b3a17a7baeacb96a424db1d7ec985e8ec998bb479fecfffed6a75f9a90fc97062fd973303bce855ad7b8d8272a94025e8532be9aabd54a183f303538d2a7e621b4131d59e823a4625f39bd7d518d7784f7c3a8f19061da74974ff42fa1c063dec2db97d461e291a7d6e721708a5229de166c1246363372854e27f3f08ae274bc16bfd205b028a4d81386494433d516dfbb35f495acba5e4e1d1843cb3c3129b6642a85fc7244ce5845fac071c7f622e4ee12ac43fabeeaa0cd01 -modulus1 = 0xd66691b20071be4d66d4b71032b37fa007cfabf579fcb91e50bfc2753b3f0ce7be74e216aef7e26d4ae180bc20d7bd3ea88a6cbf6f87380e613c8979b5b043b200a8ff8856a3b12875e36e98a7569f3852d028e967551000b02c19e9fa52e83115b89309aabb1e1cf1e2cb6369d637d46775ce4523ea31f64ad2794cbc365dd8a35e007ed3b57695877fbf102dbeb8b3212491398e494314e93726926e1383f8abb5889bea954eb8c0ca1c62c8e9d83f41888095c5e645ed6d32515fe0c58c1368cad84694e18da43668c6f43e61d7c9bca633ddcda7aef5b79bc396d4a9f48e2a9abe0836cc455e435305357228e93d25aaed46b952defae0f57339bf26f5a9 - - -class TestModExp(unittest.TestCase): - - def test_small(self): - self.assertEqual(1, monty_pow(11,12,19)) - - def test_large_1(self): - base = 0xfffffffffffffffffffffffffffffffffffffffffffffffffff - expected = pow(base, exponent1, modulus1) - result = monty_pow(base, exponent1, modulus1) - self.assertEqual(result, expected) - - def test_zero_exp(self): - base = 0xfffffffffffffffffffffffffffffffffffffffffffffffffff - result = monty_pow(base, 0, modulus1) - self.assertEqual(result, 1) - - def test_zero_base(self): - result = monty_pow(0, exponent1, modulus1) - self.assertEqual(result, 0) - - def test_zero_modulus(self): - base = 0xfffffffffffffffffffffffffffffffffffffffffffffffff - self.assertRaises(ExceptionModulus, monty_pow, base, exponent1, 0) - self.assertRaises(ExceptionModulus, monty_pow, 0, 0, 0) - - def test_larger_exponent(self): - base = modulus1 - 0xFFFFFFF - expected = pow(base, modulus1<<64, modulus1) - result = monty_pow(base, modulus1<<64, modulus1) - self.assertEqual(result, expected) - - def test_even_modulus(self): - base = modulus1 >> 4 - self.assertRaises(ExceptionModulus, monty_pow, base, exponent1, modulus1-1) - - def test_several_lengths(self): - prng = SHAKE128.new().update(b('Test')) - for length in range(1, 100): - modulus2 = Integer.from_bytes(prng.read(length)) | 1 - base = Integer.from_bytes(prng.read(length)) % modulus2 - exponent2 = Integer.from_bytes(prng.read(length)) - - expected = pow(base, exponent2, modulus2) - result = monty_pow(base, exponent2, modulus2) - self.assertEqual(result, expected) - - def test_variable_exponent(self): - prng = create_rng(b('Test variable exponent')) - for i in range(20): - for j in range(7): - modulus = prng.getrandbits(8*30) | 1 - base = prng.getrandbits(8*30) % modulus - exponent = prng.getrandbits(i*8+j) - - expected = pow(base, exponent, modulus) - result = monty_pow(base, exponent, modulus) - self.assertEqual(result, expected) - - exponent ^= (1 << (i*8+j)) - 1 - - expected = pow(base, exponent, modulus) - result = monty_pow(base, exponent, modulus) - self.assertEqual(result, expected) - - def test_stress_63(self): - prng = create_rng(b('Test 63')) - length = 63 - for _ in range(2000): - modulus = prng.getrandbits(8*length) | 1 - base = prng.getrandbits(8*length) % modulus - exponent = prng.getrandbits(8*length) - - expected = pow(base, exponent, modulus) - result = monty_pow(base, exponent, modulus) - self.assertEqual(result, expected) - - def test_stress_64(self): - prng = create_rng(b('Test 64')) - length = 64 - for _ in range(2000): - modulus = prng.getrandbits(8*length) | 1 - base = prng.getrandbits(8*length) % modulus - exponent = prng.getrandbits(8*length) - - expected = pow(base, exponent, modulus) - result = monty_pow(base, exponent, modulus) - self.assertEqual(result, expected) - - def test_stress_65(self): - prng = create_rng(b('Test 65')) - length = 65 - for _ in range(2000): - modulus = prng.getrandbits(8*length) | 1 - base = prng.getrandbits(8*length) % modulus - exponent = prng.getrandbits(8*length) - - expected = pow(base, exponent, modulus) - result = monty_pow(base, exponent, modulus) - self.assertEqual(result, expected) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestModExp) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_modmult.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_modmult.py deleted file mode 100644 index df794e4..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Math/test_modmult.py +++ /dev/null @@ -1,120 +0,0 @@ -# -# SelfTest/Math/test_modmult.py: Self-test for custom modular multiplication -# -# =================================================================== -# -# Copyright (c) 2023, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-test for the custom modular multiplication""" - -import unittest - -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Util.number import long_to_bytes, bytes_to_long - -from Cryptodome.Util._raw_api import (create_string_buffer, - get_raw_buffer, - c_size_t) - -from Cryptodome.Math._IntegerCustom import _raw_montgomery - - -class ExceptionModulus(ValueError): - pass - - -def monty_mult(term1, term2, modulus): - - if term1 >= modulus: - term1 %= modulus - if term2 >= modulus: - term2 %= modulus - - modulus_b = long_to_bytes(modulus) - numbers_len = len(modulus_b) - term1_b = long_to_bytes(term1, numbers_len) - term2_b = long_to_bytes(term2, numbers_len) - - out = create_string_buffer(numbers_len) - error = _raw_montgomery.monty_multiply( - out, - term1_b, - term2_b, - modulus_b, - c_size_t(numbers_len) - ) - - if error == 17: - raise ExceptionModulus() - if error: - raise ValueError("monty_multiply() failed with error: %d" % error) - - return get_raw_buffer(out) - - -modulus1 = 0xd66691b20071be4d66d4b71032b37fa007cfabf579fcb91e50bfc2753b3f0ce7be74e216aef7e26d4ae180bc20d7bd3ea88a6cbf6f87380e613c8979b5b043b200a8ff8856a3b12875e36e98a7569f3852d028e967551000b02c19e9fa52e83115b89309aabb1e1cf1e2cb6369d637d46775ce4523ea31f64ad2794cbc365dd8a35e007ed3b57695877fbf102dbeb8b3212491398e494314e93726926e1383f8abb5889bea954eb8c0ca1c62c8e9d83f41888095c5e645ed6d32515fe0c58c1368cad84694e18da43668c6f43e61d7c9bca633ddcda7aef5b79bc396d4a9f48e2a9abe0836cc455e435305357228e93d25aaed46b952defae0f57339bf26f5a9 - - -class TestModMultiply(unittest.TestCase): - - def test_small(self): - self.assertEqual(b"\x01", monty_mult(5, 6, 29)) - - def test_large(self): - numbers_len = (modulus1.bit_length() + 7) // 8 - - t1 = modulus1 // 2 - t2 = modulus1 - 90 - expect = b'\x00' * (numbers_len - 1) + b'\x2d' - self.assertEqual(expect, monty_mult(t1, t2, modulus1)) - - def test_zero_term(self): - numbers_len = (modulus1.bit_length() + 7) // 8 - expect = b'\x00' * numbers_len - self.assertEqual(expect, monty_mult(0x100, 0, modulus1)) - self.assertEqual(expect, monty_mult(0, 0x100, modulus1)) - - def test_larger_term(self): - t1 = 2**2047 - expect_int = 0x8edf4071f78e3d7ba622cdbbbef74612e301d69186776ae6bf87ff38c320d9aebaa64889c2f67de2324e6bccd2b10ad89e91fd21ba4bb523904d033eff5e70e62f01a84f41fa90a4f248ef249b82e1d2729253fdfc2a3b5b740198123df8bfbf7057d03e15244ad5f26eb9a099763b5c5972121ec076b0bf899f59bd95f7cc129abddccf24217bce52ca0f3a44c9ccc504765dbb89734205f3ae6a8cc560494a60ea84b27d8e00fa24bdd5b4f1d4232edb61e47d3d984c1fa50a3820a2e580fbc3fc8bc11e99df53b9efadf5a40ac75d384e400905aa6f1d88950cd53b1c54dc2222115ad84a27260fa4d978155c1434c551de1ee7361a17a2f79d4388f78a5d - res = bytes_to_long(monty_mult(t1, t1, modulus1)) - self.assertEqual(res, expect_int) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestModMultiply) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__init__.py deleted file mode 100644 index e40f772..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# SelfTest/Protocol/__init__.py: Self-tests for Cryptodome.Protocol -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import sys - -"""Self-test for Cryptodome.Protocol""" - -def get_tests(config={}): - tests = [] - from Cryptodome.SelfTest.Protocol import test_rfc1751; tests += test_rfc1751.get_tests(config=config) - from Cryptodome.SelfTest.Protocol import test_KDF; tests += test_KDF.get_tests(config=config) - from Cryptodome.SelfTest.Protocol import test_ecdh; tests += test_ecdh.get_tests(config=config) - - from Cryptodome.SelfTest.Protocol import test_SecretSharing - tests += test_SecretSharing.get_tests(config=config) - - if sys.version_info >= (3, 9): - from Cryptodome.SelfTest.Protocol import test_HPKE - tests += test_HPKE.get_tests(config=config) - - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index db882e5..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_HPKE.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_HPKE.cpython-312.pyc deleted file mode 100644 index 74ad427..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_HPKE.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_KDF.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_KDF.cpython-312.pyc deleted file mode 100644 index f83dd20..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_KDF.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_SecretSharing.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_SecretSharing.cpython-312.pyc deleted file mode 100644 index 53ac58f..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_SecretSharing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_ecdh.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_ecdh.cpython-312.pyc deleted file mode 100644 index f67151d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_ecdh.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_rfc1751.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_rfc1751.cpython-312.pyc deleted file mode 100644 index a4672c4..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/__pycache__/test_rfc1751.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_HPKE.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_HPKE.py deleted file mode 100644 index d633967..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_HPKE.py +++ /dev/null @@ -1,491 +0,0 @@ -import os -import json -import unittest -from binascii import unhexlify - -from Cryptodome.Protocol import HPKE -from Cryptodome.Protocol.HPKE import DeserializeError - -from Cryptodome.PublicKey import ECC -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Protocol import DH -from Cryptodome.Hash import SHA256, SHA384, SHA512 - - -class HPKE_Tests(unittest.TestCase): - - key1 = ECC.generate(curve='p256') - key2 = ECC.generate(curve='p256') - - # name, size of enc - curves = { - 'p256': 65, - 'p384': 97, - 'p521': 133, - 'curve25519': 32, - 'curve448': 56, - } - - def round_trip(self, curve, aead_id): - key1 = ECC.generate(curve=curve) - aead_id = aead_id - - encryptor = HPKE.new(receiver_key=key1.public_key(), - aead_id=aead_id) - self.assertEqual(len(encryptor.enc), self.curves[curve]) - - # First message - ct = encryptor.seal(b'ABC', auth_data=b'DEF') - - decryptor = HPKE.new(receiver_key=key1, - aead_id=aead_id, - enc=encryptor.enc) - - pt = decryptor.unseal(ct, auth_data=b'DEF') - self.assertEqual(b'ABC', pt) - - # Second message - ct2 = encryptor.seal(b'GHI') - pt2 = decryptor.unseal(ct2) - self.assertEqual(b'GHI', pt2) - - def test_round_trip(self): - for curve in self.curves.keys(): - for aead_id in HPKE.AEAD: - self.round_trip(curve, aead_id) - - def test_psk(self): - aead_id = HPKE.AEAD.AES128_GCM - HPKE.new(receiver_key=self.key1.public_key(), - aead_id=aead_id, - psk=(b'a', b'c' * 32)) - - def test_info(self): - aead_id = HPKE.AEAD.AES128_GCM - HPKE.new(receiver_key=self.key1.public_key(), - aead_id=aead_id, - info=b'baba') - - def test_neg_unsupported_curve(self): - key3 = ECC.generate(curve='p224') - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=key3.public_key(), - aead_id=HPKE.AEAD.AES128_GCM) - self.assertIn("Unsupported curve", str(cm.exception)) - - def test_neg_too_many_private_keys(self): - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=self.key1, - sender_key=self.key2, - aead_id=HPKE.AEAD.AES128_GCM) - self.assertIn("Exactly 1 private key", str(cm.exception)) - - def test_neg_curve_mismatch(self): - key3 = ECC.generate(curve='p384') - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=self.key1.public_key(), - sender_key=key3, - aead_id=HPKE.AEAD.AES128_GCM) - self.assertIn("but recipient key", str(cm.exception)) - - def test_neg_psk(self): - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=self.key1.public_key(), - psk=(b'', b'G' * 32), - aead_id=HPKE.AEAD.AES128_GCM) - - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=self.key1.public_key(), - psk=(b'JJJ', b''), - aead_id=HPKE.AEAD.AES128_GCM) - - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=self.key1.public_key(), - psk=(b'JJJ', b'Y' * 31), - aead_id=HPKE.AEAD.AES128_GCM) - self.assertIn("at least 32", str(cm.exception)) - - def test_neg_wrong_enc(self): - wrong_enc = b'\xFF' + b'8' * 64 - with self.assertRaises(DeserializeError): - HPKE.new(receiver_key=self.key1, - aead_id=HPKE.AEAD.AES128_GCM, - enc=wrong_enc) - - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=self.key1.public_key(), - enc=self.key1.public_key().export_key(format='raw'), - aead_id=HPKE.AEAD.AES128_GCM) - self.assertIn("'enc' cannot be an input", str(cm.exception)) - - with self.assertRaises(ValueError) as cm: - HPKE.new(receiver_key=self.key1, - aead_id=HPKE.AEAD.AES128_GCM) - self.assertIn("'enc' required", str(cm.exception)) - - def test_neg_unseal_wrong_ct(self): - decryptor = HPKE.new(receiver_key=self.key1, - aead_id=HPKE.AEAD.CHACHA20_POLY1305, - enc=self.key2.public_key().export_key(format='raw')) - - with self.assertRaises(ValueError): - decryptor.unseal(b'XYZ' * 20) - - def test_neg_unseal_no_auth_data(self): - aead_id = HPKE.AEAD.CHACHA20_POLY1305 - - encryptor = HPKE.new(receiver_key=self.key1.public_key(), - aead_id=aead_id) - - ct = encryptor.seal(b'ABC', auth_data=b'DEF') - - decryptor = HPKE.new(receiver_key=self.key1, - aead_id=aead_id, - enc=encryptor.enc) - - with self.assertRaises(ValueError): - decryptor.unseal(ct) - - def test_x25519_mode_0(self): - # RFC x9180, A.1.1.1, seq 0 and 1 - - keyR_hex = "4612c550263fc8ad58375df3f557aac531d26850903e55a9f23f21d8534e8ac8" - keyR = DH.import_x25519_private_key(bytes.fromhex(keyR_hex)) - - pt_hex = "4265617574792069732074727574682c20747275746820626561757479" - pt = bytes.fromhex(pt_hex) - - ct0_hex = "f938558b5d72f1a23810b4be2ab4f84331acc02fc97babc53a52ae8218a355a96d8770ac83d07bea87e13c512a" - ct0 = bytes.fromhex(ct0_hex) - - enc_hex = "37fda3567bdbd628e88668c3c8d7e97d1d1253b6d4ea6d44c150f741f1bf4431" - enc = bytes.fromhex(enc_hex) - - aad0_hex = "436f756e742d30" - aad0 = bytes.fromhex(aad0_hex) - - aad1_hex = "436f756e742d31" - aad1 = bytes.fromhex(aad1_hex) - - info_hex = "4f6465206f6e2061204772656369616e2055726e" - info = bytes.fromhex(info_hex) - - ct1_hex = "af2d7e9ac9ae7e270f46ba1f975be53c09f8d875bdc8535458c2494e8a6eab251c03d0c22a56b8ca42c2063b84" - ct1 = bytes.fromhex(ct1_hex) - - aead_id = HPKE.AEAD.AES128_GCM - - decryptor = HPKE.new(receiver_key=keyR, - aead_id=aead_id, - info=info, - enc=enc) - - pt_X0 = decryptor.unseal(ct0, aad0) - self.assertEqual(pt_X0, pt) - - pt_X1 = decryptor.unseal(ct1, aad1) - self.assertEqual(pt_X1, pt) - - def test_x25519_mode_1(self): - # RFC x9180, A.1.2.1, seq 0 and 1 - - keyR_hex = "c5eb01eb457fe6c6f57577c5413b931550a162c71a03ac8d196babbd4e5ce0fd" - keyR = DH.import_x25519_private_key(bytes.fromhex(keyR_hex)) - - psk_id_hex = "456e6e796e20447572696e206172616e204d6f726961" - psk_id = bytes.fromhex(psk_id_hex) - - psk_hex = "0247fd33b913760fa1fa51e1892d9f307fbe65eb171e8132c2af18555a738b82" - psk = bytes.fromhex(psk_hex) - - pt_hex = "4265617574792069732074727574682c20747275746820626561757479" - pt = bytes.fromhex(pt_hex) - - ct0_hex = "e52c6fed7f758d0cf7145689f21bc1be6ec9ea097fef4e959440012f4feb73fb611b946199e681f4cfc34db8ea" - ct0 = bytes.fromhex(ct0_hex) - - enc_hex = "0ad0950d9fb9588e59690b74f1237ecdf1d775cd60be2eca57af5a4b0471c91b" - enc = bytes.fromhex(enc_hex) - - aad0_hex = "436f756e742d30" - aad0 = bytes.fromhex(aad0_hex) - - aad1_hex = "436f756e742d31" - aad1 = bytes.fromhex(aad1_hex) - - info_hex = "4f6465206f6e2061204772656369616e2055726e" - info = bytes.fromhex(info_hex) - - ct1_hex = "49f3b19b28a9ea9f43e8c71204c00d4a490ee7f61387b6719db765e948123b45b61633ef059ba22cd62437c8ba" - ct1 = bytes.fromhex(ct1_hex) - - aead_id = HPKE.AEAD.AES128_GCM - - decryptor = HPKE.new(receiver_key=keyR, - aead_id=aead_id, - info=info, - psk=(psk_id, psk), - enc=enc) - - pt_X0 = decryptor.unseal(ct0, aad0) - self.assertEqual(pt_X0, pt) - - pt_X1 = decryptor.unseal(ct1, aad1) - self.assertEqual(pt_X1, pt) - - def test_x25519_mode_2(self): - # RFC x9180, A.1.3.1, seq 0 and 1 - - keyR_hex = "fdea67cf831f1ca98d8e27b1f6abeb5b7745e9d35348b80fa407ff6958f9137e" - keyR = DH.import_x25519_private_key(bytes.fromhex(keyR_hex)) - - keyS_hex = "dc4a146313cce60a278a5323d321f051c5707e9c45ba21a3479fecdf76fc69dd" - keyS = DH.import_x25519_private_key(bytes.fromhex(keyS_hex)) - - pt_hex = "4265617574792069732074727574682c20747275746820626561757479" - pt = bytes.fromhex(pt_hex) - - ct0_hex = "5fd92cc9d46dbf8943e72a07e42f363ed5f721212cd90bcfd072bfd9f44e06b80fd17824947496e21b680c141b" - ct0 = bytes.fromhex(ct0_hex) - - enc_hex = "23fb952571a14a25e3d678140cd0e5eb47a0961bb18afcf85896e5453c312e76" - enc = bytes.fromhex(enc_hex) - - aad0_hex = "436f756e742d30" - aad0 = bytes.fromhex(aad0_hex) - - aad1_hex = "436f756e742d31" - aad1 = bytes.fromhex(aad1_hex) - - info_hex = "4f6465206f6e2061204772656369616e2055726e" - info = bytes.fromhex(info_hex) - - ct1_hex = "d3736bb256c19bfa93d79e8f80b7971262cb7c887e35c26370cfed62254369a1b52e3d505b79dd699f002bc8ed" - ct1 = bytes.fromhex(ct1_hex) - - aead_id = HPKE.AEAD.AES128_GCM - - decryptor = HPKE.new(receiver_key=keyR, - sender_key=keyS.public_key(), - aead_id=aead_id, - info=info, - enc=enc) - - pt_X0 = decryptor.unseal(ct0, aad0) - self.assertEqual(pt_X0, pt) - - pt_X1 = decryptor.unseal(ct1, aad1) - self.assertEqual(pt_X1, pt) - - def test_x25519_mode_3(self): - # RFC x9180, A.1.4.1, seq 0 and 1 - - keyR_hex = "cb29a95649dc5656c2d054c1aa0d3df0493155e9d5da6d7e344ed8b6a64a9423" - keyR = DH.import_x25519_private_key(bytes.fromhex(keyR_hex)) - - keyS_hex = "fc1c87d2f3832adb178b431fce2ac77c7ca2fd680f3406c77b5ecdf818b119f4" - keyS = DH.import_x25519_private_key(bytes.fromhex(keyS_hex)) - - psk_id_hex = "456e6e796e20447572696e206172616e204d6f726961" - psk_id = bytes.fromhex(psk_id_hex) - - psk_hex = "0247fd33b913760fa1fa51e1892d9f307fbe65eb171e8132c2af18555a738b82" - psk = bytes.fromhex(psk_hex) - - pt_hex = "4265617574792069732074727574682c20747275746820626561757479" - pt = bytes.fromhex(pt_hex) - - ct0_hex = "a84c64df1e11d8fd11450039d4fe64ff0c8a99fca0bd72c2d4c3e0400bc14a40f27e45e141a24001697737533e" - ct0 = bytes.fromhex(ct0_hex) - - enc_hex = "820818d3c23993492cc5623ab437a48a0a7ca3e9639c140fe1e33811eb844b7c" - enc = bytes.fromhex(enc_hex) - - aad0_hex = "436f756e742d30" - aad0 = bytes.fromhex(aad0_hex) - - aad1_hex = "436f756e742d31" - aad1 = bytes.fromhex(aad1_hex) - - info_hex = "4f6465206f6e2061204772656369616e2055726e" - info = bytes.fromhex(info_hex) - - ct1_hex = "4d19303b848f424fc3c3beca249b2c6de0a34083b8e909b6aa4c3688505c05ffe0c8f57a0a4c5ab9da127435d9" - ct1 = bytes.fromhex(ct1_hex) - - aead_id = HPKE.AEAD.AES128_GCM - - decryptor = HPKE.new(receiver_key=keyR, - sender_key=keyS.public_key(), - aead_id=aead_id, - psk=(psk_id, psk), - info=info, - enc=enc) - - pt_X0 = decryptor.unseal(ct0, aad0) - self.assertEqual(pt_X0, pt) - - pt_X1 = decryptor.unseal(ct1, aad1) - self.assertEqual(pt_X1, pt) - - -class HPKE_TestVectors(unittest.TestCase): - - def setUp(self): - self.vectors = [] - try: - import pycryptodome_test_vectors # type: ignore - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(init_dir, "Protocol", "wycheproof", "HPKE-test-vectors.json") - with open(full_file_name, "r") as f: - self.vectors = json.load(f) - except (FileNotFoundError, ImportError): - print("\nWarning: skipping extended tests for HPKE (install pycryptodome-test-vectors)") - - def import_private_key(self, key_hex, kem_id): - key_bin = unhexlify(key_hex) - if kem_id == 0x0010: - return ECC.construct(curve='p256', d=int.from_bytes(key_bin, - byteorder="big")) - elif kem_id == 0x0011: - return ECC.construct(curve='p384', d=int.from_bytes(key_bin, - byteorder="big")) - elif kem_id == 0x0012: - return ECC.construct(curve='p521', d=int.from_bytes(key_bin, - byteorder="big")) - elif kem_id == 0x0020: - return DH.import_x25519_private_key(key_bin) - elif kem_id == 0x0021: - return DH.import_x448_private_key(key_bin) - - def test_hpke_encap(self): - """Test HPKE encapsulation using test vectors.""" - - if not self.vectors: - self.skipTest("No test vectors available") - - for idx, vector in enumerate(self.vectors): - - kem_id = vector["kem_id"] - kdf_id = vector["kdf_id"] - aead_id = vector["aead_id"] - - # No export-only pseudo-cipher - if aead_id == 0xffff: - continue - - # We support only one KDF per curve - supported_combi = { - (0x10, 0x1): SHA256, - (0x11, 0x2): SHA384, - (0x12, 0x3): SHA512, - (0x20, 0x1): SHA256, - (0x21, 0x3): SHA512, - } - hashmod = supported_combi.get((kem_id, kdf_id)) - if hashmod is None: - continue - - with self.subTest(idx=idx, kem_id=kem_id, aead_id=aead_id): - - receiver_pub = self.import_private_key(vector["skRm"], - kem_id).public_key() - - sender_priv = None - if "skSm" in vector: - sender_priv = self.import_private_key(vector["skSm"], - kem_id) - - encap_key = self.import_private_key(vector["skEm"], kem_id) - - shared_secret, enc = HPKE.HPKE_Cipher._encap(receiver_pub, - kem_id, - hashmod, - sender_priv, - encap_key) - self.assertEqual(enc.hex(), vector["enc"]) - self.assertEqual(shared_secret, - unhexlify(vector["shared_secret"])) - - print(".", end="", flush=True) - - def test_hpke_unseal(self): - """Test HPKE encryption and decryption using test vectors.""" - - if not self.vectors: - self.skipTest("No test vectors available") - - for idx, vector in enumerate(self.vectors): - - kem_id = vector["kem_id"] - kdf_id = vector["kdf_id"] - aead_id = vector["aead_id"] - - # No export-only pseudo-cipher - if aead_id == 0xffff: - continue - - # We support only one KDF per curve - supported_combi = ( - (0x10, 0x1), - (0x11, 0x2), - (0x12, 0x3), - (0x20, 0x1), - (0x21, 0x3), - ) - if (kem_id, kdf_id) not in supported_combi: - continue - - with self.subTest(idx=idx, kem_id=kem_id, aead_id=aead_id): - - receiver_priv = self.import_private_key(vector["skRm"], - kem_id) - - sender_pub = None - if "skSm" in vector: - sender_priv = self.import_private_key(vector["skSm"], - kem_id) - sender_pub = sender_priv.public_key() - - encap_key = unhexlify(vector["enc"]) - - psk = None - if "psk_id" in vector: - psk = unhexlify(vector["psk_id"]), unhexlify(vector["psk"]) - - receiver_hpke = HPKE.new(receiver_key=receiver_priv, - aead_id=HPKE.AEAD(aead_id), - enc=encap_key, - sender_key=sender_pub, - psk=psk, - info=unhexlify(vector["info"])) - - for encryption in vector['encryptions']: - - plaintext = unhexlify(encryption["pt"]) - ciphertext = unhexlify(encryption["ct"]) - aad = unhexlify(encryption["aad"]) - - # Decrypt (unseal) - decrypted = receiver_hpke.unseal(ciphertext, aad) - self.assertEqual(decrypted, plaintext, "Decryption failed") - - print(".", end="", flush=True) - - -def get_tests(config={}): - - tests = [] - tests += list_test_cases(HPKE_Tests) - - if config.get('slow_tests'): - tests += list_test_cases(HPKE_TestVectors) - - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_KDF.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_KDF.py deleted file mode 100644 index eafe349..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_KDF.py +++ /dev/null @@ -1,809 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Protocol/test_KDF.py: Self-test for key derivation functions -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import re -import unittest -from binascii import unhexlify - -from Cryptodome.Util.py3compat import b, bchr - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof -from Cryptodome.Hash import SHA1, HMAC, SHA256, MD5, SHA224, SHA384, SHA512 -from Cryptodome.Cipher import AES, DES3 - -from Cryptodome.Protocol.KDF import (PBKDF1, PBKDF2, _S2V, HKDF, scrypt, - bcrypt, bcrypt_check, - SP800_108_Counter) - -from Cryptodome.Protocol.KDF import _bcrypt_decode - - -def t2b(t): - if t is None: - return None - t2 = t.replace(" ", "").replace("\n", "") - return unhexlify(b(t2)) - - -class TestVector(object): - pass - - -class PBKDF1_Tests(unittest.TestCase): - - # List of tuples with test data. - # Each tuple is made up by: - # Item #0: a pass phrase - # Item #1: salt (8 bytes encoded in hex) - # Item #2: output key length - # Item #3: iterations to use - # Item #4: expected result (encoded in hex) - _testData = ( - # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf - ("password", "78578E5A5D63CB06", 16, 1000, "DC19847E05C64D2FAF10EBFB4A3D2A20"), - ) - - def test1(self): - v = self._testData[0] - res = PBKDF1(v[0], t2b(v[1]), v[2], v[3], SHA1) - self.assertEqual(res, t2b(v[4])) - - -class PBKDF2_Tests(unittest.TestCase): - - # List of tuples with test data. - # Each tuple is made up by: - # Item #0: a pass phrase - # Item #1: salt (encoded in hex) - # Item #2: output key length - # Item #3: iterations to use - # Item #4: hash module - # Item #5: expected result (encoded in hex) - _testData = ( - # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf - ("password","78578E5A5D63CB06",24,2048, SHA1, "BFDE6BE94DF7E11DD409BCE20A0255EC327CB936FFE93643"), - # From RFC 6050 - ("password","73616c74", 20, 1, SHA1, "0c60c80f961f0e71f3a9b524af6012062fe037a6"), - ("password","73616c74", 20, 2, SHA1, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), - ("password","73616c74", 20, 4096, SHA1, "4b007901b765489abead49d926f721d065a429c1"), - ("passwordPASSWORDpassword","73616c7453414c5473616c7453414c5473616c7453414c5473616c7453414c5473616c74", - 25, 4096, SHA1, "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), - ( 'pass\x00word',"7361006c74",16,4096, SHA1, "56fa6aa75548099dcc37d7f03425e0c3"), - # From draft-josefsson-scrypt-kdf-01, Chapter 10 - ( 'passwd', '73616c74', 64, 1, SHA256, "55ac046e56e3089fec1691c22544b605f94185216dde0465e68b9d57c20dacbc49ca9cccf179b645991664b39d77ef317c71b845b1e30bd509112041d3a19783"), - ( 'Password', '4e61436c', 64, 80000, SHA256, "4ddcd8f60b98be21830cee5ef22701f9641a4418d04c0414aeff08876b34ab56a1d425a1225833549adb841b51c9b3176a272bdebba1d078478f62b397f33c8d"), - ) - - def test1(self): - # Test only for HMAC-SHA1 as PRF - - def prf_SHA1(p,s): - return HMAC.new(p,s,SHA1).digest() - - def prf_SHA256(p,s): - return HMAC.new(p,s,SHA256).digest() - - for i in range(len(self._testData)): - v = self._testData[i] - password = v[0] - salt = t2b(v[1]) - out_len = v[2] - iters = v[3] - hash_mod = v[4] - expected = t2b(v[5]) - - if hash_mod is SHA1: - res = PBKDF2(password, salt, out_len, iters) - self.assertEqual(res, expected) - - res = PBKDF2(password, salt, out_len, iters, prf_SHA1) - self.assertEqual(res, expected) - else: - res = PBKDF2(password, salt, out_len, iters, prf_SHA256) - self.assertEqual(res, expected) - - def test2(self): - # Verify that prf and hmac_hash_module are mutual exclusive - def prf_SHA1(p,s): - return HMAC.new(p,s,SHA1).digest() - - self.assertRaises(ValueError, PBKDF2, b("xxx"), b("yyy"), 16, 100, - prf=prf_SHA1, hmac_hash_module=SHA1) - - def test3(self): - # Verify that hmac_hash_module works like prf - - password = b("xxx") - salt = b("yyy") - - for hashmod in (MD5, SHA1, SHA224, SHA256, SHA384, SHA512): - - pr1 = PBKDF2(password, salt, 16, 100, - prf=lambda p, s: HMAC.new(p,s,hashmod).digest()) - pr2 = PBKDF2(password, salt, 16, 100, hmac_hash_module=hashmod) - - self.assertEqual(pr1, pr2) - - def test4(self): - # Verify that PBKDF2 can take bytes or strings as password or salt - k1 = PBKDF2("xxx", b("yyy"), 16, 10) - k2 = PBKDF2(b("xxx"), b("yyy"), 16, 10) - self.assertEqual(k1, k2) - - k1 = PBKDF2(b("xxx"), "yyy", 16, 10) - k2 = PBKDF2(b("xxx"), b("yyy"), 16, 10) - self.assertEqual(k1, k2) - - -class S2V_Tests(unittest.TestCase): - - # Sequence of test vectors. - # Each test vector is made up by: - # Item #0: a tuple of strings - # Item #1: an AES key - # Item #2: the result - # Item #3: the cipher module S2V is based on - # Everything is hex encoded - _testData = [ - - # RFC5297, A.1 - ( - ( '101112131415161718191a1b1c1d1e1f2021222324252627', - '112233445566778899aabbccddee' ), - 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0', - '85632d07c6e8f37f950acd320a2ecc93', - AES - ), - - # RFC5297, A.2 - ( - ( '00112233445566778899aabbccddeeffdeaddadadeaddadaffeeddcc'+ - 'bbaa99887766554433221100', - '102030405060708090a0', - '09f911029d74e35bd84156c5635688c0', - '7468697320697320736f6d6520706c61'+ - '696e7465787420746f20656e63727970'+ - '74207573696e67205349562d414553'), - '7f7e7d7c7b7a79787776757473727170', - '7bdb6e3b432667eb06f4d14bff2fbd0f', - AES - ), - - ] - - def test1(self): - """Verify correctness of test vector""" - for tv in self._testData: - s2v = _S2V.new(t2b(tv[1]), tv[3]) - for s in tv[0]: - s2v.update(t2b(s)) - result = s2v.derive() - self.assertEqual(result, t2b(tv[2])) - - def test2(self): - """Verify that no more than 127(AES) and 63(TDES) - components are accepted.""" - key = bchr(0) * 8 + bchr(255) * 8 - for module in (AES, DES3): - s2v = _S2V.new(key, module) - max_comps = module.block_size*8-1 - for i in range(max_comps): - s2v.update(b("XX")) - self.assertRaises(TypeError, s2v.update, b("YY")) - - -class HKDF_Tests(unittest.TestCase): - - # Test vectors from RFC5869, Appendix A - # Each tuple is made up by: - # Item #0: hash module - # Item #1: secret - # Item #2: salt - # Item #3: context - # Item #4: expected result - _test_vector = ( - ( - SHA256, - "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", - "000102030405060708090a0b0c", - "f0f1f2f3f4f5f6f7f8f9", - 42, - "3cb25f25faacd57a90434f64d0362f2a" + - "2d2d0a90cf1a5a4c5db02d56ecc4c5bf" + - "34007208d5b887185865" - ), - ( - SHA256, - "000102030405060708090a0b0c0d0e0f" + - "101112131415161718191a1b1c1d1e1f" + - "202122232425262728292a2b2c2d2e2f" + - "303132333435363738393a3b3c3d3e3f" + - "404142434445464748494a4b4c4d4e4f", - "606162636465666768696a6b6c6d6e6f" + - "707172737475767778797a7b7c7d7e7f" + - "808182838485868788898a8b8c8d8e8f" + - "909192939495969798999a9b9c9d9e9f" + - "a0a1a2a3a4a5a6a7a8a9aaabacadaeaf", - "b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" + - "c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" + - "d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" + - "e0e1e2e3e4e5e6e7e8e9eaebecedeeef" + - "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", - 82, - "b11e398dc80327a1c8e7f78c596a4934" + - "4f012eda2d4efad8a050cc4c19afa97c" + - "59045a99cac7827271cb41c65e590e09" + - "da3275600c2f09b8367793a9aca3db71" + - "cc30c58179ec3e87c14c01d5c1f3434f" + - "1d87" - ), - ( - SHA256, - "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", - None, - None, - 42, - "8da4e775a563c18f715f802a063c5a31" + - "b8a11f5c5ee1879ec3454e5f3c738d2d" + - "9d201395faa4b61a96c8" - ), - ( - SHA1, - "0b0b0b0b0b0b0b0b0b0b0b", - "000102030405060708090a0b0c", - "f0f1f2f3f4f5f6f7f8f9", - 42, - "085a01ea1b10f36933068b56efa5ad81" + - "a4f14b822f5b091568a9cdd4f155fda2" + - "c22e422478d305f3f896" - ), - ( - SHA1, - "000102030405060708090a0b0c0d0e0f" + - "101112131415161718191a1b1c1d1e1f" + - "202122232425262728292a2b2c2d2e2f" + - "303132333435363738393a3b3c3d3e3f" + - "404142434445464748494a4b4c4d4e4f", - "606162636465666768696a6b6c6d6e6f" + - "707172737475767778797a7b7c7d7e7f" + - "808182838485868788898a8b8c8d8e8f" + - "909192939495969798999a9b9c9d9e9f" + - "a0a1a2a3a4a5a6a7a8a9aaabacadaeaf", - "b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" + - "c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" + - "d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" + - "e0e1e2e3e4e5e6e7e8e9eaebecedeeef" + - "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", - 82, - "0bd770a74d1160f7c9f12cd5912a06eb" + - "ff6adcae899d92191fe4305673ba2ffe" + - "8fa3f1a4e5ad79f3f334b3b202b2173c" + - "486ea37ce3d397ed034c7f9dfeb15c5e" + - "927336d0441f4c4300e2cff0d0900b52" + - "d3b4" - ), - ( - SHA1, - "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", - "", - "", - 42, - "0ac1af7002b3d761d1e55298da9d0506" + - "b9ae52057220a306e07b6b87e8df21d0" + - "ea00033de03984d34918" - ), - ( - SHA1, - "0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c", - None, - "", - 42, - "2c91117204d745f3500d636a62f64f0a" + - "b3bae548aa53d423b0d1f27ebba6f5e5" + - "673a081d70cce7acfc48" - ) - ) - - def test1(self): - for tv in self._test_vector: - secret, salt, info, exp = [ t2b(tv[x]) for x in (1,2,3,5) ] - key_len, hashmod = [ tv[x] for x in (4,0) ] - - output = HKDF(secret, key_len, salt, hashmod, 1, info) - self.assertEqual(output, exp) - - def test2(self): - ref = HKDF(b("XXXXXX"), 12, b("YYYY"), SHA1) - - # Same output, but this time split over 2 keys - key1, key2 = HKDF(b("XXXXXX"), 6, b("YYYY"), SHA1, 2) - self.assertEqual((ref[:6], ref[6:]), (key1, key2)) - - # Same output, but this time split over 3 keys - key1, key2, key3 = HKDF(b("XXXXXX"), 4, b("YYYY"), SHA1, 3) - self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3)) - - -class scrypt_Tests(unittest.TestCase): - - # Test vectors taken from - # https://tools.ietf.org/html/rfc7914 - # - password - # - salt - # - N - # - r - # - p - data = ( - ( - "", - "", - 16, # 2K - 1, - 1, - """ - 77 d6 57 62 38 65 7b 20 3b 19 ca 42 c1 8a 04 97 - f1 6b 48 44 e3 07 4a e8 df df fa 3f ed e2 14 42 - fc d0 06 9d ed 09 48 f8 32 6a 75 3a 0f c8 1f 17 - e8 d3 e0 fb 2e 0d 36 28 cf 35 e2 0c 38 d1 89 06 - """ - ), - ( - "password", - "NaCl", - 1024, # 1M - 8, - 16, - """ - fd ba be 1c 9d 34 72 00 78 56 e7 19 0d 01 e9 fe - 7c 6a d7 cb c8 23 78 30 e7 73 76 63 4b 37 31 62 - 2e af 30 d9 2e 22 a3 88 6f f1 09 27 9d 98 30 da - c7 27 af b9 4a 83 ee 6d 83 60 cb df a2 cc 06 40 - """ - ), - ( - "pleaseletmein", - "SodiumChloride", - 16384, # 16M - 8, - 1, - """ - 70 23 bd cb 3a fd 73 48 46 1c 06 cd 81 fd 38 eb - fd a8 fb ba 90 4f 8e 3e a9 b5 43 f6 54 5d a1 f2 - d5 43 29 55 61 3f 0f cf 62 d4 97 05 24 2a 9a f9 - e6 1e 85 dc 0d 65 1e 40 df cf 01 7b 45 57 58 87 - """ - ), - ( - "pleaseletmein", - "SodiumChloride", - 1048576, # 1G - 8, - 1, - """ - 21 01 cb 9b 6a 51 1a ae ad db be 09 cf 70 f8 81 - ec 56 8d 57 4a 2f fd 4d ab e5 ee 98 20 ad aa 47 - 8e 56 fd 8f 4b a5 d0 9f fa 1c 6d 92 7c 40 f4 c3 - 37 30 40 49 e8 a9 52 fb cb f4 5c 6f a7 7a 41 a4 - """ - ), - ) - - def setUp(self): - new_test_vectors = [] - for tv in self.data: - new_tv = TestVector() - new_tv.P = b(tv[0]) - new_tv.S = b(tv[1]) - new_tv.N = tv[2] - new_tv.r = tv[3] - new_tv.p = tv[4] - new_tv.output = t2b(tv[5]) - new_tv.dkLen = len(new_tv.output) - new_test_vectors.append(new_tv) - self.data = new_test_vectors - - def test2(self): - - for tv in self.data: - try: - output = scrypt(tv.P, tv.S, tv.dkLen, tv.N, tv.r, tv.p) - except ValueError as e: - if " 2 " in str(e) and tv.N >= 1048576: - import warnings - warnings.warn("Not enough memory to unit test scrypt() with N=1048576", RuntimeWarning) - continue - else: - raise e - self.assertEqual(output, tv.output) - - def test3(self): - ref = scrypt(b("password"), b("salt"), 12, 16, 1, 1) - - # Same output, but this time split over 2 keys - key1, key2 = scrypt(b("password"), b("salt"), 6, 16, 1, 1, 2) - self.assertEqual((ref[:6], ref[6:]), (key1, key2)) - - # Same output, but this time split over 3 keys - key1, key2, key3 = scrypt(b("password"), b("salt"), 4, 16, 1, 1, 3) - self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3)) - - -class bcrypt_Tests(unittest.TestCase): - - def test_negative_cases(self): - self.assertRaises(ValueError, bcrypt, b"1" * 73, 10) - self.assertRaises(ValueError, bcrypt, b"1" * 10, 3) - self.assertRaises(ValueError, bcrypt, b"1" * 10, 32) - self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"") - self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"1") - self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"1" * 17) - self.assertRaises(ValueError, bcrypt, b"1\x00" * 10, 4) - - def test_bytearray_mismatch(self): - ref = bcrypt("pwd", 4) - bcrypt_check("pwd", ref) - bref = bytearray(ref) - bcrypt_check("pwd", bref) - - wrong = ref[:-1] + bchr(bref[-1] ^ 0x01) - self.assertRaises(ValueError, bcrypt_check, "pwd", wrong) - - wrong = b"x" + ref[1:] - self.assertRaises(ValueError, bcrypt_check, "pwd", wrong) - - # https://github.com/patrickfav/bcrypt/wiki/Published-Test-Vectors - - def test_empty_password(self): - # password, cost, salt, bcrypt hash - tvs = [ - (b"", 4, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$04$zVHmKQtGGQob.b/Nc7l9NO8UlrYcW05FiuCj/SxsFO/ZtiN9.mNzy"), - (b"", 5, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$05$zVHmKQtGGQob.b/Nc7l9NOWES.1hkVBgy5IWImh9DOjKNU8atY4Iy"), - (b"", 6, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$06$zVHmKQtGGQob.b/Nc7l9NOjOl7l4oz3WSh5fJ6414Uw8IXRAUoiaO"), - (b"", 7, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$07$zVHmKQtGGQob.b/Nc7l9NOBsj1dQpBA1HYNGpIETIByoNX9jc.hOi"), - (b"", 8, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$08$zVHmKQtGGQob.b/Nc7l9NOiLTUh/9MDpX86/DLyEzyiFjqjBFePgO"), - ] - - for (idx, (password, cost, salt64, result)) in enumerate(tvs): - x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) - self.assertEqual(x, result) - bcrypt_check(password, result) - - def test_random_password_and_salt_short_pw(self): - # password, cost, salt, bcrypt hash - tvs = [ - (b"<.S.2K(Zq'", 4, b"VYAclAMpaXY/oqAo9yUpku", b"$2a$04$VYAclAMpaXY/oqAo9yUpkuWmoYywaPzyhu56HxXpVltnBIfmO9tgu"), - (b"5.rApO%5jA", 5, b"kVNDrnYKvbNr5AIcxNzeIu", b"$2a$05$kVNDrnYKvbNr5AIcxNzeIuRcyIF5cZk6UrwHGxENbxP5dVv.WQM/G"), - (b"oW++kSrQW^", 6, b"QLKkRMH9Am6irtPeSKN5sO", b"$2a$06$QLKkRMH9Am6irtPeSKN5sObJGr3j47cO6Pdf5JZ0AsJXuze0IbsNm"), - (b"ggJ\\KbTnDG", 7, b"4H896R09bzjhapgCPS/LYu", b"$2a$07$4H896R09bzjhapgCPS/LYuMzAQluVgR5iu/ALF8L8Aln6lzzYXwbq"), - (b"49b0:;VkH/", 8, b"hfvO2retKrSrx5f2RXikWe", b"$2a$08$hfvO2retKrSrx5f2RXikWeFWdtSesPlbj08t/uXxCeZoHRWDz/xFe"), - (b">9N^5jc##'", 9, b"XZLvl7rMB3EvM0c1.JHivu", b"$2a$09$XZLvl7rMB3EvM0c1.JHivuIDPJWeNJPTVrpjZIEVRYYB/mF6cYgJK"), - (b"\\$ch)s4WXp", 10, b"aIjpMOLK5qiS9zjhcHR5TO", b"$2a$10$aIjpMOLK5qiS9zjhcHR5TOU7v2NFDmcsBmSFDt5EHOgp/jeTF3O/q"), - (b"RYoj\\_>2P7", 12, b"esIAHiQAJNNBrsr5V13l7.", b"$2a$12$esIAHiQAJNNBrsr5V13l7.RFWWJI2BZFtQlkFyiWXjou05GyuREZa"), - ] - - for (idx, (password, cost, salt64, result)) in enumerate(tvs): - x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) - self.assertEqual(x, result) - bcrypt_check(password, result) - - def test_random_password_and_salt_long_pw(self): - # password, cost, salt, bcrypt hash - tvs = [ - (b"^Q&\"]A`%/A(BVGt>QaX0M-#1ghq_+\":Y0CRmY", 5, b"YuQvhokOGVnevctykUYpKu", b"$2a$05$YuQvhokOGVnevctykUYpKutZD2pWeGGYn3auyLOasguMY3/0BbIyq"), - (b"F%uN/j>[GuB7-jB'_Yj!Tnb7Y!u^6)", 6, b"5L3vpQ0tG9O7k5gQ8nAHAe", b"$2a$06$5L3vpQ0tG9O7k5gQ8nAHAe9xxQiOcOLh8LGcI0PLWhIznsDt.S.C6"), - (b"Z>BobP32ub\"Cfe*Q<-q-=tRSjOBh8\\mLNW.", 9, b"nArqOfdCsD9kIbVnAixnwe", b"$2a$09$nArqOfdCsD9kIbVnAixnwe6s8QvyPYWtQBpEXKir2OJF9/oNBsEFe"), - (b"/MH51`!BP&0tj3%YCA;Xk%e3S`o\\EI", 10, b"ePiAc.s.yoBi3B6p1iQUCe", b"$2a$10$ePiAc.s.yoBi3B6p1iQUCezn3mraLwpVJ5XGelVyYFKyp5FZn/y.u"), - (b"ptAP\"mcg6oH.\";c0U2_oll.OKi5?Ui\"^ai#iQH7ZFtNMfs3AROnIncE9\"BNNoEgO[[*Yk8;RQ(#S,;I+aT", - 5, b"wgkOlGNXIVE2fWkT3gyRoO", b"$2a$05$wgkOlGNXIVE2fWkT3gyRoOqWi4gbi1Wv2Q2Jx3xVs3apl1w.Wtj8C"), - (b"M.E1=dt<.L0Q&p;94NfGm_Oo23+Kpl@M5?WIAL.[@/:'S)W96G8N^AWb7_smmC]>7#fGoB", - 6, b"W9zTCl35nEvUukhhFzkKMe", b"$2a$06$W9zTCl35nEvUukhhFzkKMekjT9/pj7M0lihRVEZrX3m8/SBNZRX7i"), - ] - - for (idx, (password, cost, salt64, result)) in enumerate(tvs): - x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) - self.assertEqual(x, result) - bcrypt_check(password, result) - - def test_increasing_password_length(self): - # password, cost, salt, bcrypt hash - tvs = [ - (b"a", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.l4WvgHIVg17ZawDIrDM2IjlE64GDNQS"), - (b"aa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.AyUxBk.ThHlsLvRTH7IqcG7yVHJ3SXq"), - (b"aaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.BxOVac5xPB6XFdRc/ZrzM9FgZkqmvbW"), - (b"aaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.Qbr209bpCtfl5hN7UQlG/L4xiD3AKau"), - (b"aaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.oWszihPjDZI0ypReKsaDOW1jBl7oOii"), - (b"aaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ./k.Xxn9YiqtV/sxh3EHbnOHd0Qsq27K"), - (b"aaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.PYJqRFQbgRbIjMd5VNKmdKS4sBVOyDe"), - (b"aaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ..VMYfzaw1wP/SGxowpLeGf13fxCCt.q"), - (b"aaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.5B0p054nO5WgAD1n04XslDY/bqY9RJi"), - (b"aaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.INBTgqm7sdlBJDg.J5mLMSRK25ri04y"), - (b"aaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.s3y7CdFD0OR5p6rsZw/eZ.Dla40KLfm"), - (b"aaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.Jx742Djra6Q7PqJWnTAS.85c28g.Siq"), - (b"aaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.oKMXW3EZcPHcUV0ib5vDBnh9HojXnLu"), - (b"aaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.w6nIjWpDPNSH5pZUvLjC1q25ONEQpeS"), - (b"aaaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.k1b2/r9A/hxdwKEKurg6OCn4MwMdiGq"), - (b"aaaaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.3prCNHVX1Ws.7Hm2bJxFUnQOX9f7DFa"), - ] - - for (idx, (password, cost, salt64, result)) in enumerate(tvs): - x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) - self.assertEqual(x, result) - bcrypt_check(password, result) - - def test_non_ascii_characters(self): - # password, cost, salt, bcrypt hash - tvs = [ - ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 4, b"D3qS2aoTVyqM7z8v8crLm.", b"$2a$04$D3qS2aoTVyqM7z8v8crLm.3nKt4CzBZJbyFB.ZebmfCvRw7BGs.Xm"), - ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 5, b"VA1FujiOCMPkUHQ8kF7IaO", b"$2a$05$VA1FujiOCMPkUHQ8kF7IaOg7NGaNvpxwWzSluQutxEVmbZItRTsAa"), - ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 6, b"TXiaNrPeBSz5ugiQlehRt.", b"$2a$06$TXiaNrPeBSz5ugiQlehRt.gwpeDQnXWteQL4z2FulouBr6G7D9KUi"), - ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 4, b"YTn1Qlvps8e1odqMn6G5x.", b"$2a$04$YTn1Qlvps8e1odqMn6G5x.85pqKql6w773EZJAExk7/BatYAI4tyO"), - ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 5, b"C.8k5vJKD2NtfrRI9o17DO", b"$2a$05$C.8k5vJKD2NtfrRI9o17DOfIW0XnwItA529vJnh2jzYTb1QdoY0py"), - ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 6, b"xqfRPj3RYAgwurrhcA6uRO", b"$2a$06$xqfRPj3RYAgwurrhcA6uROtGlXDp/U6/gkoDYHwlubtcVcNft5.vW"), - ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 4, b"y8vGgMmr9EdyxP9rmMKjH.", b"$2a$04$y8vGgMmr9EdyxP9rmMKjH.wv2y3r7yRD79gykQtmb3N3zrwjKsyay"), - ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 5, b"iYH4XIKAOOm/xPQs7xKP1u", b"$2a$05$iYH4XIKAOOm/xPQs7xKP1upD0cWyMn3Jf0ZWiizXbEkVpS41K1dcO"), - ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 6, b"wCOob.D0VV8twafNDB2ape", b"$2a$06$wCOob.D0VV8twafNDB2apegiGD5nqF6Y1e6K95q6Y.R8C4QGd265q"), - ("ΔημοσιεύθηκεστηνΕφημερίδατης", 4, b"E5SQtS6P4568MDXW7cyUp.", b"$2a$04$E5SQtS6P4568MDXW7cyUp.18wfDisKZBxifnPZjAI1d/KTYMfHPYO"), - ("АБбВвГгДдЕеЁёЖжЗзИиЙйКкЛлМмН", 4, b"03e26gQFHhQwRNf81/ww9.", b"$2a$04$03e26gQFHhQwRNf81/ww9.p1UbrNwxpzWjLuT.zpTLH4t/w5WhAhC"), - ("нОоПпРрСсТтУуФфХхЦцЧчШшЩщЪъЫыЬьЭэЮю", 4, b"PHNoJwpXCfe32nUtLv2Upu", b"$2a$04$PHNoJwpXCfe32nUtLv2UpuhJXOzd4k7IdFwnEpYwfJVCZ/f/.8Pje"), - ("電电電島岛島兔兔兎龜龟亀國国国區区区", 4, b"wU4/0i1TmNl2u.1jIwBX.u", b"$2a$04$wU4/0i1TmNl2u.1jIwBX.uZUaOL3Rc5ID7nlQRloQh6q5wwhV/zLW"), - ("诶比伊艾弗豆贝尔维吾艾尺开艾丝维贼德", 4, b"P4kreGLhCd26d4WIy7DJXu", b"$2a$04$P4kreGLhCd26d4WIy7DJXusPkhxLvBouzV6OXkL5EB0jux0osjsry"), - ] - - for (idx, (password, cost, salt64, result)) in enumerate(tvs): - x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) - self.assertEqual(x, result) - bcrypt_check(password, result) - - def test_special_case_salt(self): - # password, cost, salt, bcrypt hash - tvs = [ - ("-O_=*N!2JP", 4, b"......................", b"$2a$04$......................JjuKLOX9OOwo5PceZZXSkaLDvdmgb82"), - ("7B[$Q<4b>U", 5, b"......................", b"$2a$05$......................DRiedDQZRL3xq5A5FL8y7/6NM8a2Y5W"), - (">d5-I_8^.h", 6, b"......................", b"$2a$06$......................5Mq1Ng8jgDY.uHNU4h5p/x6BedzNH2W"), - (")V`/UM/]1t", 4, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$04$.OC/.OC/.OC/.OC/.OC/.OQIvKRDAam.Hm5/IaV/.hc7P8gwwIbmi"), - (":@t2.bWuH]", 5, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$05$.OC/.OC/.OC/.OC/.OC/.ONDbUvdOchUiKmQORX6BlkPofa/QxW9e"), - ("b(#KljF5s\"", 6, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$06$.OC/.OC/.OC/.OC/.OC/.OHfTd9e7svOu34vi1PCvOcAEq07ST7.K"), - ("@3YaJ^Xs]*", 4, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$04$eGA.eGA.eGA.eGA.eGA.e.stcmvh.R70m.0jbfSFVxlONdj1iws0C"), - ("'\"5\\!k*C(p", 5, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$05$eGA.eGA.eGA.eGA.eGA.e.vR37mVSbfdHwu.F0sNMvgn8oruQRghy"), - ("edEu7C?$'W", 6, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$06$eGA.eGA.eGA.eGA.eGA.e.tSq0FN8MWHQXJXNFnHTPQKtA.n2a..G"), - ("N7dHmg\\PI^", 4, b"999999999999999999999u", b"$2a$04$999999999999999999999uCZfA/pLrlyngNDMq89r1uUk.bQ9icOu"), - ("\"eJuHh!)7*", 5, b"999999999999999999999u", b"$2a$05$999999999999999999999uj8Pfx.ufrJFAoWFLjapYBS5vVEQQ/hK"), - ("ZeDRJ:_tu:", 6, b"999999999999999999999u", b"$2a$06$999999999999999999999u6RB0P9UmbdbQgjoQFEJsrvrKe.BoU6q"), - ] - - for (idx, (password, cost, salt64, result)) in enumerate(tvs): - x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) - self.assertEqual(x, result) - bcrypt_check(password, result) - - -class TestVectorsHKDFWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._id = "None" - - def add_tests(self, filename): - - def filter_algo(root): - algo_name = root['algorithm'] - if algo_name == "HKDF-SHA-1": - return SHA1 - elif algo_name == "HKDF-SHA-256": - return SHA256 - elif algo_name == "HKDF-SHA-384": - return SHA384 - elif algo_name == "HKDF-SHA-512": - return SHA512 - else: - raise ValueError("Unknown algorithm " + algo_name) - - def filter_size(unit): - return int(unit['size']) - - result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), - filename, - "Wycheproof HMAC (%s)" % filename, - root_tag={'hash_module': filter_algo}, - unit_tag={'size': filter_size}) - return result - - def setUp(self): - self.tv = [] - self.add_tests("hkdf_sha1_test.json") - self.add_tests("hkdf_sha256_test.json") - self.add_tests("hkdf_sha384_test.json") - self.add_tests("hkdf_sha512_test.json") - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_verify(self, tv): - self._id = "Wycheproof HKDF Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) - - try: - key = HKDF(tv.ikm, tv.size, tv.salt, tv.hash_module, 1, tv.info) - except ValueError: - assert not tv.valid - else: - if key != tv.okm: - assert not tv.valid - else: - assert tv.valid - self.warn(tv) - - def runTest(self): - for tv in self.tv: - self.test_verify(tv) - - -def load_hash_by_name(hash_name): - return __import__("Cryptodome.Hash." + hash_name, globals(), locals(), ["new"]) - - -class SP800_108_Counter_Tests(unittest.TestCase): - - def test_negative_zeroes(self): - def prf(s, x): - return HMAC.new(s, x, SHA256).digest() - - try: - _ = SP800_108_Counter(b'0' * 16, 1, prf, label=b'A\x00B') - except ValueError: - self.fail('SP800_108_Counter failed with zero in label') - self.assertRaises(ValueError, SP800_108_Counter, b'0' * 16, 1, prf, - context=b'A\x00B') - - def test_multiple_keys(self): - def prf(s, x): - return HMAC.new(s, x, SHA256).digest() - - key = b'0' * 16 - expected = SP800_108_Counter(key, 2*3*23, prf) - for r in (1, 2, 3, 23): - dks = SP800_108_Counter(key, r, prf, 138//r) - self.assertEqual(len(dks), 138//r) - self.assertEqual(len(dks[0]), r) - self.assertEqual(b''.join(dks), expected) - - -def add_tests_sp800_108_counter(cls): - - test_vectors_sp800_108_counter = load_test_vectors(("Protocol", ), - "KDF_SP800_108_COUNTER.txt", - "NIST SP 800 108 KDF Counter Mode", - {'count': lambda x: int(x)}, - ) or [] - - mac_type = None - for idx, tv in enumerate(test_vectors_sp800_108_counter): - - if isinstance(tv, str): - res = re.match(r"\[HMAC-(SHA-[0-9]+)\]", tv) - if res: - hash_name = res.group(1).replace("-", "") - hash_module = load_hash_by_name(hash_name) - mac_type = "hmac" - continue - res = re.match(r"\[CMAC-AES-128\]", tv) - if res: - mac_type = "cmac" - continue - assert res - - if mac_type == "hmac": - def prf(s, x, hash_module=hash_module): - return HMAC.new(s, x, hash_module).digest() - elif mac_type == "cmac": - def prf(s, x, hash_module=hash_module): - return CMAC.new(s, x, AES).digest() - continue - - def kdf_test(self, prf=prf, kin=tv.kin, label=tv.label, - context=tv.context, kout=tv.kout, count=tv.count): - result = SP800_108_Counter(kin, len(kout), prf, 1, label, context) - assert(len(result) == len(kout)) - self.assertEqual(result, kout) - - setattr(cls, "test_kdf_sp800_108_counter_%d" % idx, kdf_test) - - -add_tests_sp800_108_counter(SP800_108_Counter_Tests) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - if not config.get('slow_tests'): - PBKDF2_Tests._testData = PBKDF2_Tests._testData[:3] - scrypt_Tests.data = scrypt_Tests.data[:3] - - tests = [] - tests += list_test_cases(PBKDF1_Tests) - tests += list_test_cases(PBKDF2_Tests) - tests += list_test_cases(S2V_Tests) - tests += list_test_cases(HKDF_Tests) - tests += [TestVectorsHKDFWycheproof(wycheproof_warnings)] - tests += list_test_cases(scrypt_Tests) - tests += list_test_cases(bcrypt_Tests) - tests += list_test_cases(SP800_108_Counter_Tests) - - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_SecretSharing.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_SecretSharing.py deleted file mode 100644 index afcbb23..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_SecretSharing.py +++ /dev/null @@ -1,290 +0,0 @@ -# -# SelfTest/Protocol/test_secret_sharing.py: Self-test for secret sharing protocols -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from unittest import main, TestCase, TestSuite -from binascii import unhexlify, hexlify - -from Cryptodome.Util.py3compat import * -from Cryptodome.Hash import SHAKE128 -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Protocol.SecretSharing import Shamir, _Element, \ - _mult_gf2, _div_gf2 - - -class GF2_Tests(TestCase): - - def test_mult_gf2(self): - # Prove mult by zero - x = _mult_gf2(0,0) - self.assertEqual(x, 0) - - # Prove mult by unity - x = _mult_gf2(34, 1) - self.assertEqual(x, 34) - - z = 3 # (x+1) - y = _mult_gf2(z, z) - self.assertEqual(y, 5) # (x+1)^2 = x^2 + 1 - y = _mult_gf2(y, z) - self.assertEqual(y, 15) # (x+1)^3 = x^3 + x^2 + x + 1 - y = _mult_gf2(y, z) - self.assertEqual(y, 17) # (x+1)^4 = x^4 + 1 - - # Prove linearity works - comps = [1, 4, 128, 2**34] - sum_comps = 1+4+128+2**34 - y = 908 - z = _mult_gf2(sum_comps, y) - w = 0 - for x in comps: - w ^= _mult_gf2(x, y) - self.assertEqual(w, z) - - def test_div_gf2(self): - from Cryptodome.Util.number import size as deg - - x, y = _div_gf2(567, 7) - self.assertTrue(deg(y) < deg(7)) - - w = _mult_gf2(x, 7) ^ y - self.assertEqual(567, w) - - x, y = _div_gf2(7, 567) - self.assertEqual(x, 0) - self.assertEqual(y, 7) - -class Element_Tests(TestCase): - - def test1(self): - # Test encondings - e = _Element(256) - self.assertEqual(int(e), 256) - self.assertEqual(e.encode(), bchr(0)*14 + b("\x01\x00")) - - e = _Element(bchr(0)*14 + b("\x01\x10")) - self.assertEqual(int(e), 0x110) - self.assertEqual(e.encode(), bchr(0)*14 + b("\x01\x10")) - - # Only 16 byte string are a valid encoding - self.assertRaises(ValueError, _Element, bchr(0)) - - def test2(self): - # Test addition - e = _Element(0x10) - f = _Element(0x0A) - self.assertEqual(int(e+f), 0x1A) - - def test3(self): - # Test multiplication - zero = _Element(0) - one = _Element(1) - two = _Element(2) - - x = _Element(6) * zero - self.assertEqual(int(x), 0) - - x = _Element(6) * one - self.assertEqual(int(x), 6) - - x = _Element(2**127) * two - self.assertEqual(int(x), 1 + 2 + 4 + 128) - - def test4(self): - # Test inversion - one = _Element(1) - - x = one.inverse() - self.assertEqual(int(x), 1) - - x = _Element(82323923) - y = x.inverse() - self.assertEqual(int(x * y), 1) - - -class Shamir_Tests(TestCase): - - def test1(self): - # Test splitting - shares = Shamir.split(2, 3, bchr(90)*16) - self.assertEqual(len(shares), 3) - for index in range(3): - self.assertEqual(shares[index][0], index+1) - self.assertEqual(len(shares[index][1]), 16) - - def test2(self): - # Test recombine - from itertools import permutations - - # Generated by ssss (index, secret, shares) - # in hex mode, without "diffusion" mode - test_vectors = ( - (2, "d9fe73909bae28b3757854c0af7ad405", - "1-594ae8964294174d95c33756d2504170", - "2-d897459d29da574eb40e93ec552ffe6e", - "3-5823de9bf0e068b054b5f07a28056b1b", - "4-db2c1f8bff46d748f795da995bd080cb"), - (2, "bf4f902d9a7efafd1f3ffd9291fd5de9", - "1-557bd3b0748064b533469722d1cc7935", - "2-6b2717164783c66d47cd28f2119f14d0", - "3-8113548ba97d58256bb4424251ae300c", - "4-179e9e5a218483ddaeda57539139cf04"), - (3, "ec96aa5c14c9faa699354cf1da74e904", - "1-64579fbf1908d66f7239bf6e2b4e41e1", - "2-6cd9428df8017b52322561e8c672ae3e", - "3-e418776ef5c0579bd9299277374806dd", - "4-ab3f77a0107398d23b323e581bb43f5d", - "5-23fe42431db2b41bd03ecdc7ea8e97ac"), - (3, "44cf249b68b80fcdc27b47be60c2c145", - "1-d6515a3905cd755119b86e311c801e31", - "2-16693d9ac9f10c254036ced5f8917fa3", - "3-84f74338a48476b99bf5e75a84d3a0d1", - "4-3fe8878dc4a5d35811cf3cbcd33dbe52", - "5-ad76f92fa9d0a9c4ca0c1533af7f6132"), - (5, "5398717c982db935d968eebe53a47f5a", - "1-be7be2dd4c068e7ef576aaa1b1c11b01", - "2-f821f5848441cb98b3eb467e2733ee21", - "3-25ee52f53e203f6e29a0297b5ab486b5", - "4-fc9fb58ef74dab947fbf9acd9d5d83cd", - "5-b1949cce46d81552e65f248d3f74cc5c", - "6-d64797f59977c4d4a7956ad916da7699", - "7-ab608a6546a8b9af8820ff832b1135c7"), - (5, "4a78db90fbf35da5545d2fb728e87596", - "1-08daf9a25d8aa184cfbf02b30a0ed6a0", - "2-dda28261e36f0b14168c2cf153fb734e", - "3-e9fdec5505d674a57f9836c417c1ecaa", - "4-4dce5636ae06dee42d2c82e65f06c735", - "5-3963dc118afc2ba798fa1d452b28ef00", - "6-6dfe6ff5b09e94d2f84c382b12f42424", - "7-6faea9d4d4a4e201bf6c90b9000630c3"), - (10, "eccbf6d66d680b49b073c4f1ddf804aa", - "01-7d8ac32fe4ae209ead1f3220fda34466", - "02-f9144e76988aad647d2e61353a6e96d5", - "03-b14c3b80179203363922d60760271c98", - "04-770bb2a8c28f6cee89e00f4d5cc7f861", - "05-6e3d7073ea368334ef67467871c66799", - "06-248792bc74a98ce024477c13c8fb5f8d", - "07-fcea4640d2db820c0604851e293d2487", - "08-2776c36fb714bb1f8525a0be36fc7dba", - "09-6ee7ac8be773e473a4bf75ee5f065762", - "10-33657fc073354cf91d4a68c735aacfc8", - "11-7645c65094a5868bf225c516fdee2d0c", - "12-840485aacb8226631ecd9c70e3018086"), - (10, "377e63bdbb5f7d4dc58a483d035212bb", - "01-32c53260103be431c843b1a633afe3bd", - "02-0107eb16cb8695084d452d2cc50bc7d6", - "03-df1e5c66cd755287fb0446faccd72a06", - "04-361bbcd5d40797f49dfa1898652da197", - "05-160d3ad1512f7dec7fd9344aed318591", - "06-659af6d95df4f25beca4fb9bfee3b7e8", - "07-37f3b208977bad50b3724566b72bfa9d", - "08-6c1de2dfc69c2986142c26a8248eb316", - "09-5e19220837a396bd4bc8cd685ff314c3", - "10-86e7b864fb0f3d628e46d50c1ba92f1c", - "11-065d0082c80b1aea18f4abe0c49df72e", - "12-84a09430c1d20ea9f388f3123c3733a3"), - ) - - def get_share(p): - pos = p.find('-') - return int(p[:pos]), unhexlify(p[pos + 1:]) - - for tv in test_vectors: - k = tv[0] - secret = unhexlify(tv[1]) - max_perms = 10 - for perm, shares_idx in enumerate(permutations(range(2, len(tv)), k)): - if perm > max_perms: - break - shares = [ get_share(tv[x]) for x in shares_idx ] - result = Shamir.combine(shares, True) - self.assertEqual(secret, result) - - def test3(self): - # Loopback split/recombine - - rng = SHAKE128.new(b"test3") - - for _ in range(100): - - secret = rng.read(16) - - shares = Shamir.split(2, 3, secret) - - secret2 = Shamir.combine(shares[:2]) - self.assertEqual(secret, secret2) - - secret3 = Shamir.combine([ shares[0], shares[2] ]) - self.assertEqual(secret, secret3) - - def test4(self): - # Loopback split/recombine (SSSS) - - rng = SHAKE128.new(b"test4") - - for _ in range(10): - secret = rng.read(16) - - shares = Shamir.split(2, 3, secret, ssss=True) - - secret2 = Shamir.combine(shares[:2], ssss=True) - self.assertEqual(secret, secret2) - - for _ in range(10): - secret = rng.read(16) - - shares = Shamir.split(3, 7, secret, ssss=True) - - secret2 = Shamir.combine([shares[3], shares[4], shares[6]], ssss=True) - self.assertEqual(secret, secret2) - - - def test5(self): - # Detect duplicate shares - secret = unhexlify(b("000102030405060708090a0b0c0d0e0f")) - - shares = Shamir.split(2, 3, secret) - self.assertRaises(ValueError, Shamir.combine, (shares[0], shares[0])) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(GF2_Tests) - tests += list_test_cases(Element_Tests) - tests += list_test_cases(Shamir_Tests) - return tests - -if __name__ == '__main__': - suite = lambda: TestSuite(get_tests()) - main(defaultTest='suite') - diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_ecdh.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_ecdh.py deleted file mode 100644 index 8bea787..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_ecdh.py +++ /dev/null @@ -1,770 +0,0 @@ -import re -import base64 -import unittest -from binascii import hexlify, unhexlify - -from Cryptodome.Util.py3compat import bord - -from Cryptodome.Hash import SHA256 -from Cryptodome.PublicKey import ECC -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof - -from Cryptodome.Protocol import DH -from Cryptodome.Protocol.DH import (key_agreement, - import_x25519_public_key, - import_x25519_private_key, - import_x448_public_key, - import_x448_private_key) - - -class FIPS_ECDH_Tests_KAT(unittest.TestCase): - pass - - -test_vectors_verify = load_test_vectors(("Protocol", ), - "KAS_ECC_CDH_PrimitiveTest.txt", - "ECC CDH Primitive (SP800-56A Section 5.7.1.2)", - { - 'qcavsx': lambda x: int(x, 16), - 'qcavsy': lambda x: int(x, 16), - 'diut': lambda x: int(x, 16), - 'qiutx': lambda x: int(x, 16), - 'qiuty': lambda x: int(x, 16), - }) or [] - -for idx, tv in enumerate(test_vectors_verify): - - # Stand-alone header with curve name - if isinstance(tv, str): - res = re.match(r"\[([A-Za-z0-9-]+)\]", tv) - assert res - curve_name = res.group(1) - continue - - public_key = ECC.construct(curve=curve_name, - point_x=tv.qcavsx, - point_y=tv.qcavsy) - - private_key = ECC.construct(curve=curve_name, - d=tv.diut) - - exp_response = tv.ziut - - def ecdh_test(self, - public_key=public_key, - private_key=private_key, - exp_response=exp_response): - z = key_agreement( - static_pub=public_key, - static_priv=private_key, - kdf=lambda x: x) - self.assertEqual(z, exp_response) - - def ecdh_test_rev(self, - public_key=public_key, - private_key=private_key, - exp_response=exp_response): - z = key_agreement( - static_pub=public_key, - static_priv=private_key, - kdf=lambda x: x) - self.assertEqual(z, exp_response) - - setattr(FIPS_ECDH_Tests_KAT, "test_verify_positive_%d" % idx, ecdh_test) - if idx == 1: - setattr(FIPS_ECDH_Tests_KAT, "test_verify_positive_rev_%d" % idx, ecdh_test_rev) - - -class TestVectorsECDHWycheproof(unittest.TestCase): - - desc = "Wycheproof ECDH tests" - - def add_tests(self, filename): - - def curve(g): - return g['curve'] - - def private(u): - return int(u['private'], 16) - - result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), - filename, - "Wycheproof ECDH (%s)" - % filename, - group_tag={'curve': curve}, - unit_tag={'private': private}, - ) - self.tv += result - - def setUp(self): - self.tv = [] - self.desc = None - - self.add_tests("ecdh_secp224r1_ecpoint_test.json") - self.add_tests("ecdh_secp256r1_ecpoint_test.json") - self.add_tests("ecdh_secp384r1_ecpoint_test.json") - self.add_tests("ecdh_secp521r1_ecpoint_test.json") - - self.add_tests("ecdh_secp224r1_test.json") - self.add_tests("ecdh_secp256r1_test.json") - self.add_tests("ecdh_secp384r1_test.json") - self.add_tests("ecdh_secp521r1_test.json") - - def shortDescription(self): - return self.desc - - def test_verify(self, tv): - - if len(tv.public) == 0: - return - - try: - if bord(tv.public[0]) == 4: # SEC1 - public_key = ECC.import_key(tv.public, curve_name=tv.curve) - else: - public_key = ECC.import_key(tv.public) - except ValueError: - assert tv.warning or not tv.valid - return - - private_key = ECC.construct(curve=tv.curve, d=tv.private) - - try: - z = key_agreement(static_pub=public_key, - static_priv=private_key, - kdf=lambda x: x) - except ValueError: - assert not tv.valid - except TypeError as e: - assert not tv.valid - assert "incompatible curve" in str(e) - else: - self.assertEqual(z, tv.shared) - assert tv.valid - - def runTest(self): - for tv in self.tv: - self.desc = "Wycheproof ECDH Verify Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) - self.test_verify(tv) - - -class ECDH_Tests(unittest.TestCase): - - static_priv = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg9VHFVKh2a1aVFifH\n+BiyNaRa2kttEg3165Ye/dJxJ7KhRANCAARImIEXro5ZOcyWU2mq/+d79FEZXtTA\nbKkz1aICQXihQdCMzRNbeNtC9LFLzhu1slRKJ2xsDAlw9r6w6vwtkRzr\n-----END PRIVATE KEY-----') - static_pub = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgHhmv8zmZ+Nw8fsZd\ns8tlZflyfw2NE1CRS9DWr3Y3O46hRANCAAS3hZVUCbk+uk3w4S/YOraEVGG+WYpk\nNO/vrwzufUUks2GV2OnBQESe0EBk4Jq8gn4ij8Lvs3rZX2yT+XfeATYd\n-----END PRIVATE KEY-----').public_key() - - eph_priv = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgGPdJmFFFKzLPspIr\nE1T2cEjeIf4ajS9CpneP0e2b3AyhRANCAAQBexAA5BYDcXHs2KOksTYUsst4HhPt\nkp0zkgI2virc3OGJFNGPaCCPfFCQJHwLRaEpiq3SoQlgoBwSc8ZPsl3y\n-----END PRIVATE KEY-----') - - eph_pub = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQghaVZXElSEGEojFKF\nOU0JCpxWUWHvWQUR81gwWrOp76ShRANCAATi1Ib2K+YR3AckD8wxypWef7pw5PRw\ntBaB3RDPyE7IjHZC6yu1DbcXoCdtaw+F5DM+4zpl59n5ZaIy/Yl1BdIy\n-----END PRIVATE KEY-----') - - def test_1(self): - # C(0, 2s) - kdf = lambda x: SHA256.new(x).digest() - z = key_agreement( - kdf=kdf, - static_pub=self.static_pub, - static_priv=self.static_priv) - self.assertEqual(hexlify(z), - b"3960a1101d1193cbaffef4cc7202ebff783c22c6d2e0d5d530ffc66dc197ea9c") - - def test_2(self): - # C(2e, 2s) - kdf = lambda x: SHA256.new(x).digest() - z = key_agreement( - kdf=kdf, - static_pub=self.static_pub, - static_priv=self.static_priv, - eph_pub=self.eph_pub, - eph_priv=self.eph_priv) - self.assertEqual(hexlify(z), - b"7447b733d40c8fab2c633b3dc61e4a8c742f3a6af7e16fb0cc486f5bdb5d6ba2") - - def test_3(self): - # C(1e, 2s) - kdf = lambda x: SHA256.new(x).digest() - z = key_agreement( - kdf=kdf, - static_pub=self.static_pub, - static_priv=self.static_priv, - eph_priv=self.eph_priv) - self.assertEqual(hexlify(z), - b"9e977ae45f33bf67f285d064d83e6632bcafe3a7d33fe571233bab4794ace759") - - def test_4(self): - # C(1e, 2s) - kdf = lambda x: SHA256.new(x).digest() - z = key_agreement( - kdf=kdf, - static_pub=self.static_pub, - static_priv=self.static_priv, - eph_pub=self.eph_pub) - self.assertEqual(hexlify(z), - b"c9532df6aa7e9dbe5fe85da31ee25ff19c179c88691ec4b8328cc2036dcdadf2") - - def test_5(self): - # C(2e, 1s) is not supported - kdf = lambda x: SHA256.new(x).digest() - self.assertRaises(ValueError, - key_agreement, - kdf=kdf, - static_priv=self.static_priv, - eph_pub=self.eph_pub, - eph_priv=self.eph_priv) - - def test_6(self): - # C(2e, 1s) is not supported - kdf = lambda x: SHA256.new(x).digest() - self.assertRaises(ValueError, - key_agreement, - kdf=kdf, - static_pub=self.static_pub, - eph_pub=self.eph_pub, - eph_priv=self.eph_priv) - - def test_7(self): - # C(2e, 0) - kdf = lambda x: SHA256.new(x).digest() - z = key_agreement( - kdf=kdf, - eph_pub=self.eph_pub, - eph_priv=self.eph_priv) - self.assertEqual(hexlify(z), - b"feb257ebe063078b1391aac07913283d7b642ad7df61b46dfc9cd6f420bb896a") - - def test_8(self): - # C(1e, 1s) - kdf = lambda x: SHA256.new(x).digest() - z = key_agreement( - kdf=kdf, - static_priv=self.static_priv, - eph_pub=self.eph_pub) - self.assertEqual(hexlify(z), - b"ee4dc995117476ed57fd17ff0ed44e9f0466d46b929443bc0db9380317583b04") - - def test_9(self): - # C(1e, 1s) - kdf = lambda x: SHA256.new(x).digest() - z = key_agreement( - kdf=kdf, - static_pub=self.static_pub, - eph_priv=self.eph_priv) - self.assertEqual(hexlify(z), - b"2351cc2014f7c40468fa072b5d30f706eeaeef7507311cd8e59bab3b43f03c51") - - def test_10(self): - # No private (local) keys - kdf = lambda x: SHA256.new(x).digest() - self.assertRaises(ValueError, - key_agreement, - kdf=kdf, - static_pub=self.static_pub, - eph_pub=self.eph_pub) - - def test_11(self): - # No public (peer) keys - kdf = lambda x: SHA256.new(x).digest() - self.assertRaises(ValueError, - key_agreement, - kdf=kdf, - static_priv=self.static_priv, - eph_priv=self.eph_priv) - - def test_12(self): - # failure if kdf is missing - self.assertRaises(ValueError, - key_agreement, - static_pub=self.static_pub, - static_priv=self.static_priv) - - -class X25519_Tests(unittest.TestCase): - - def test_rfc7748_1(self): - tvs = ( - ("a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4", - "e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c", - "c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552"), - ("4b66e9d4d1b4673c5ad22691957d6af5c11b6421e0ea01d42ca4169e7918ba0d", - "e5210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493", - "95cbde9476e8907d7aade45cb4b873f88b595a68799fa152e6f8f7647aac7957"), - ) - - for tv1, tv2, tv3 in tvs: - priv_key = DH.import_x25519_private_key(unhexlify(tv1)) - pub_key = DH.import_x25519_public_key(unhexlify(tv2)) - result = key_agreement(static_pub=pub_key, - static_priv=priv_key, - kdf=lambda x: x) - self.assertEqual(result, unhexlify(tv3)) - - def test_rfc7748_2(self): - k = unhexlify("0900000000000000000000000000000000000000000000000000000000000000") - - priv_key = DH.import_x25519_private_key(k) - pub_key = DH.import_x25519_public_key(k) - result = key_agreement(static_pub=pub_key, - static_priv=priv_key, - kdf=lambda x: x) - self.assertEqual( - result, - unhexlify("422c8e7a6227d7bca1350b3e2bb7279f7897b87bb6854b783c60e80311ae3079") - ) - - for _ in range(999): - priv_key = DH.import_x25519_private_key(result) - pub_key = DH.import_x25519_public_key(k) - k = result - result = key_agreement(static_pub=pub_key, - static_priv=priv_key, - kdf=lambda x: x) - - self.assertEqual( - result, - unhexlify("684cf59ba83309552800ef566f2f4d3c1c3887c49360e3875f2eb94d99532c51") - ) - - def test_rfc7748_3(self): - tv1 = "77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a" - tv2 = "8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a" - tv3 = "5dab087e624a8a4b79e17f8b83800ee66f3bb1292618b6fd1c2f8b27ff88e0eb" - tv4 = "de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f" - tv5 = "4a5d9d5ba4ce2de1728e3bf480350f25e07e21c947d19e3376f09b3c1e161742" - - alice_priv_key = DH.import_x25519_private_key(unhexlify(tv1)) - alice_pub_key = DH.import_x25519_public_key(unhexlify(tv2)) - bob_priv_key = DH.import_x25519_private_key(unhexlify(tv3)) - bob_pub_key = DH.import_x25519_public_key(unhexlify(tv4)) - secret = unhexlify(tv5) - - result1 = key_agreement(static_pub=alice_pub_key, - static_priv=bob_priv_key, - kdf=lambda x: x) - result2 = key_agreement(static_pub=bob_pub_key, - static_priv=alice_priv_key, - kdf=lambda x: x) - self.assertEqual(result1, secret) - self.assertEqual(result2, secret) - - def test_weak(self): - - weak_keys = ( - "0000000000000000000000000000000000000000000000000000000000000000", - "0100000000000000000000000000000000000000000000000000000000000000", - "e0eb7a7c3b41b8ae1656e3faf19fc46ada098deb9c32b1fd866205165f49b800", - "5f9c95bca3508c24b1d0b1559c83ef5b04445cc4581c8e86d8224eddd09f1157", - "ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f", - "edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f", - "eeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f", - # The implementation will accept these value, but only because - # it will set the MSB to zero (as required by RFC7748, Section 5), - # therefore leading to another public key (and to a point which is - # not of low order anymore). - # "cdeb7a7c3b41b8ae1656e3faf19fc46ada098deb9c32b1fd866205165f49b880", - # "4c9c95bca3508c24b1d0b1559c83ef5b04445cc4581c8e86d8224eddd09f11d7", - # "d9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", - # "daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", - # "dbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", - ) - - for x in weak_keys: - self.assertRaises(ValueError, - DH.import_x25519_public_key, - unhexlify(x)) - - -class X448_Tests(unittest.TestCase): - - def test_rfc7748_1(self): - tvs = ( - ("3d262fddf9ec8e88495266fea19a34d28882acef045104d0d1aae121700a779c984c24f8cdd78fbff44943eba368f54b29259a4f1c600ad3", - "06fce640fa3487bfda5f6cf2d5263f8aad88334cbd07437f020f08f9814dc031ddbdc38c19c6da2583fa5429db94ada18aa7a7fb4ef8a086", - "ce3e4ff95a60dc6697da1db1d85e6afbdf79b50a2412d7546d5f239fe14fbaadeb445fc66a01b0779d98223961111e21766282f73dd96b6f"), - ("203d494428b8399352665ddca42f9de8fef600908e0d461cb021f8c538345dd77c3e4806e25f46d3315c44e0a5b4371282dd2c8d5be3095f", - "0fbcc2f993cd56d3305b0b7d9e55d4c1a8fb5dbb52f8e9a1e9b6201b165d015894e56c4d3570bee52fe205e28a78b91cdfbde71ce8d157db", - "884a02576239ff7a2f2f63b2db6a9ff37047ac13568e1e30fe63c4a7ad1b3ee3a5700df34321d62077e63633c575c1c954514e99da7c179d"), - ) - - for tv1, tv2, tv3 in tvs: - priv_key = DH.import_x448_private_key(unhexlify(tv1)) - pub_key = DH.import_x448_public_key(unhexlify(tv2)) - result = key_agreement(static_pub=pub_key, - static_priv=priv_key, - kdf=lambda x: x) - self.assertEqual(result, unhexlify(tv3)) - - def test_rfc7748_2(self): - k = unhexlify("0500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000") - - priv_key = DH.import_x448_private_key(k) - pub_key = DH.import_x448_public_key(k) - result = key_agreement(static_pub=pub_key, - static_priv=priv_key, - kdf=lambda x: x) - self.assertEqual( - result, - unhexlify("3f482c8a9f19b01e6c46ee9711d9dc14fd4bf67af30765c2ae2b846a4d23a8cd0db897086239492caf350b51f833868b9bc2b3bca9cf4113") - ) - - for _ in range(999): - priv_key = DH.import_x448_private_key(result) - pub_key = DH.import_x448_public_key(k) - k = result - result = key_agreement(static_pub=pub_key, - static_priv=priv_key, - kdf=lambda x: x) - - self.assertEqual( - result, - unhexlify("aa3b4749d55b9daf1e5b00288826c467274ce3ebbdd5c17b975e09d4af6c67cf10d087202db88286e2b79fceea3ec353ef54faa26e219f38") - ) - - def test_rfc7748_3(self): - tv1 = "9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28dd9c9baf574a9419744897391006382a6f127ab1d9ac2d8c0a598726b" - tv2 = "9b08f7cc31b7e3e67d22d5aea121074a273bd2b83de09c63faa73d2c22c5d9bbc836647241d953d40c5b12da88120d53177f80e532c41fa0" - tv3 = "1c306a7ac2a0e2e0990b294470cba339e6453772b075811d8fad0d1d6927c120bb5ee8972b0d3e21374c9c921b09d1b0366f10b65173992d" - tv4 = "3eb7a829b0cd20f5bcfc0b599b6feccf6da4627107bdb0d4f345b43027d8b972fc3e34fb4232a13ca706dcb57aec3dae07bdc1c67bf33609" - tv5 = "07fff4181ac6cc95ec1c16a94a0f74d12da232ce40a77552281d282bb60c0b56fd2464c335543936521c24403085d59a449a5037514a879d" - - alice_priv_key = DH.import_x448_private_key(unhexlify(tv1)) - alice_pub_key = DH.import_x448_public_key(unhexlify(tv2)) - bob_priv_key = DH.import_x448_private_key(unhexlify(tv3)) - bob_pub_key = DH.import_x448_public_key(unhexlify(tv4)) - secret = unhexlify(tv5) - - result1 = key_agreement(static_pub=alice_pub_key, - static_priv=bob_priv_key, - kdf=lambda x: x) - result2 = key_agreement(static_pub=bob_pub_key, - static_priv=alice_priv_key, - kdf=lambda x: x) - self.assertEqual(result1, secret) - self.assertEqual(result2, secret) - - def test_weak(self): - - weak_keys = ( - "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "0100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "fefffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffff", - "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffff", - "00000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff", - ) - - for x in weak_keys: - self.assertRaises(ValueError, - DH.import_x448_public_key, - unhexlify(x)) - - -class TestVectorsX25519Wycheproof(unittest.TestCase): - - desc = "Wycheproof X25519 tests" - - def add_tests_hex(self, filename): - - def encoding(g): - return g['type'] - - def private(u): - return unhexlify(u['private']) - - result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), - filename, - "Wycheproof ECDH (%s)" - % filename, - group_tag={'encoding': encoding}, - unit_tag={'private': private} - ) - self.tv += result - - def add_tests_ascii(self, filename): - - def encoding(g): - return g['type'] - - def public(u): - return u['public'] - - def private(u): - return u['private'] - - result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), - filename, - "Wycheproof ECDH (%s)" - % filename, - group_tag={'encoding': encoding}, - unit_tag={'public': public, - 'private': private} - ) - self.tv += result - - def setUp(self): - self.tv = [] - self.desc = None - - self.add_tests_hex("x25519_test.json") - self.add_tests_hex("x25519_asn_test.json") - self.add_tests_ascii("x25519_pem_test.json") - self.add_tests_ascii("x25519_jwk_test.json") - - def shortDescription(self): - return self.desc - - def test_verify(self, tv): - - if tv.encoding == "XdhComp": - try: - public_key = import_x25519_public_key(tv.public) - except ValueError as e: - assert tv.valid - assert tv.warning - assert "LowOrderPublic" in tv.flags - assert "Invalid Curve25519" in str(e) - return - private_key = import_x25519_private_key(tv.private) - elif tv.encoding in ("XdhAsnComp", "XdhPemComp"): - try: - public_key = ECC.import_key(tv.public) - private_key = ECC.import_key(tv.private) - except ECC.UnsupportedEccFeature as e: - assert not tv.valid - assert "Unsupported ECC" in str(e) - return - except ValueError: - assert tv.valid - assert tv.warning - assert "LowOrderPublic" in tv.flags - return - elif tv.encoding == "XdhJwkComp": - - if 'y' in tv.public: - return - if 'x' not in tv.public: - return - if 'x' not in tv.private: - return - if tv.public.get('kty') != 'OKP': - return - if tv.public.get('crv') != 'X25519': - return - if tv.private.get('crv') != 'X25519': - return - - def base64url_decode(input_str): - input_str = input_str.replace('-', '+').replace('_', '/') - padding = 4 - (len(input_str) % 4) - if padding != 4: - input_str += '=' * padding - decoded_bytes = base64.b64decode(input_str) - return decoded_bytes - - jwk_public = base64url_decode(tv.public['x']) - jwk_private = base64url_decode(tv.private['d']) - - try: - public_key = import_x25519_public_key(jwk_public) - private_key = import_x25519_private_key(jwk_private) - except ValueError as e: - if tv.valid: - assert tv.warning - assert "LowOrderPublic" in tv.flags - assert "Invalid Curve25519" in str(e) - return - else: - assert "Incorrect length" in str(e) - return - except ValueError: - assert tv.valid - else: - raise ValueError("Unknown encoding", tv.encoding) - - try: - z = key_agreement(static_pub=public_key, - static_priv=private_key, - kdf=lambda x: x) - except ValueError: - assert not tv.valid - except TypeError as e: - assert not tv.valid - assert "incompatible curve" in str(e) - else: - self.assertEqual(z, tv.shared) - assert tv.valid - - def runTest(self): - for tv in self.tv: - self.desc = "Wycheproof XECDH Verify Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) - self.test_verify(tv) - - -class TestVectorsX448Wycheproof(unittest.TestCase): - - desc = "Wycheproof X448 tests" - - def add_tests_hex(self, filename): - - def encoding(g): - return g['type'] - - def private(u): - return unhexlify(u['private']) - - result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), - filename, - "Wycheproof ECDH (%s)" - % filename, - group_tag={'encoding': encoding}, - unit_tag={'private': private} - ) - self.tv += result - - def add_tests_ascii(self, filename): - - def encoding(g): - return g['type'] - - def public(u): - return u['public'] - - def private(u): - return u['private'] - - result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), - filename, - "Wycheproof ECDH (%s)" - % filename, - group_tag={'encoding': encoding}, - unit_tag={'public': public, - 'private': private} - ) - self.tv += result - - def setUp(self): - self.tv = [] - self.desc = None - - self.add_tests_hex("x448_test.json") - self.add_tests_hex("x448_asn_test.json") - self.add_tests_ascii("x448_pem_test.json") - self.add_tests_ascii("x448_jwk_test.json") - - def shortDescription(self): - return self.desc - - def test_verify(self, tv): - - if tv.encoding == "XdhComp": - try: - public_key = import_x448_public_key(tv.public) - except ValueError as e: - assert tv.valid - assert tv.warning - if len(tv.public) == 56: - assert "LowOrderPublic" in tv.flags - assert "Invalid Curve448" in str(e) - else: - assert "Incorrect Curve448" in str(e) - return - private_key = import_x448_private_key(tv.private) - elif tv.encoding in ("XdhAsnComp", "XdhPemComp"): - try: - public_key = ECC.import_key(tv.public) - private_key = ECC.import_key(tv.private) - except ECC.UnsupportedEccFeature as e: - assert not tv.valid - assert "Unsupported ECC" in str(e) - return - except ValueError as e: - assert tv.valid - assert tv.warning - assert "LowOrderPublic" in tv.flags or "NonCanonicalPublic" in tv.flags - return - elif tv.encoding == "XdhJwkComp": - - if 'y' in tv.public: - return - if 'x' not in tv.public: - return - if 'x' not in tv.private: - return - if tv.public.get('kty') != 'OKP': - return - if tv.public.get('crv') != 'X448': - return - if tv.private.get('crv') != 'X448': - return - - def base64url_decode(input_str): - input_str = input_str.replace('-', '+').replace('_', '/') - padding = 4 - (len(input_str) % 4) - if padding != 4: - input_str += '=' * padding - decoded_bytes = base64.b64decode(input_str) - return decoded_bytes - - jwk_public = base64url_decode(tv.public['x']) - jwk_private = base64url_decode(tv.private['d']) - - try: - public_key = import_x448_public_key(jwk_public) - private_key = import_x448_private_key(jwk_private) - except ValueError as e: - if tv.valid: - assert tv.warning - if len(tv.public['x']) == 75: - assert "LowOrderPublic" in tv.flags or \ - "NonCanonicalPublic" in tv.flags - assert "Invalid Curve448" in str(e) - else: - assert "Incorrect Curve448" in str(e) - return - else: - assert "Incorrect length" in str(e) - return - except ValueError: - assert tv.valid - else: - raise ValueError("Unknown encoding", tv.encoding) - - try: - z = key_agreement(static_pub=public_key, - static_priv=private_key, - kdf=lambda x: x) - except ValueError: - assert not tv.valid - except TypeError as e: - assert not tv.valid - assert "incompatible curve" in str(e) - else: - self.assertEqual(z, tv.shared) - assert tv.valid - - def runTest(self): - for tv in self.tv: - self.desc = "Wycheproof XECDH Verify Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) - self.test_verify(tv) - - -def get_tests(config={}): - - tests = [] - tests += list_test_cases(FIPS_ECDH_Tests_KAT) - tests += [TestVectorsECDHWycheproof()] - tests += list_test_cases(ECDH_Tests) - tests += list_test_cases(X25519_Tests) - tests += list_test_cases(X448_Tests) - tests += [TestVectorsX25519Wycheproof()] - tests += [TestVectorsX448Wycheproof()] - - slow_tests = config.get('slow_tests') - if slow_tests: - pass - - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_rfc1751.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_rfc1751.py deleted file mode 100644 index a79769c..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Protocol/test_rfc1751.py +++ /dev/null @@ -1,62 +0,0 @@ -# -# Test script for Cryptodome.Util.RFC1751. -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew Kuchling and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import binascii -import unittest -from Cryptodome.Util import RFC1751 -from Cryptodome.Util.py3compat import * - -test_data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), - ('CCAC2AED591056BE4F90FD441C534766', - 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), - ('EFF81F9BFBC65350920CDD7416DE8009', - 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') - ] - -class RFC1751Test_k2e (unittest.TestCase): - - def runTest (self): - "Check converting keys to English" - for key, words in test_data: - key=binascii.a2b_hex(b(key)) - self.assertEqual(RFC1751.key_to_english(key), words) - -class RFC1751Test_e2k (unittest.TestCase): - - def runTest (self): - "Check converting English strings to keys" - for key, words in test_data: - key=binascii.a2b_hex(b(key)) - self.assertEqual(RFC1751.english_to_key(words), key) - -# class RFC1751Test - -def get_tests(config={}): - return [RFC1751Test_k2e(), RFC1751Test_e2k()] - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__init__.py deleted file mode 100644 index f16a6ff..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__init__.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/__init__.py: Self-test for public key crypto -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for public-key crypto""" - -import unittest -from Cryptodome.SelfTest.PublicKey import (test_DSA, test_RSA, - test_ECC_NIST, - test_ECC_Ed25519, - test_ECC_Curve25519, - test_ECC_Ed448, - test_ECC_Curve448, - test_import_DSA, test_import_RSA, - test_import_ECC, test_ElGamal, - test_import_Curve25519, - test_import_Curve448) - - -def get_tests(config={}): - tests = [] - tests += test_DSA.get_tests(config=config) - tests += test_RSA.get_tests(config=config) - tests += test_ECC_NIST.get_tests(config=config) - tests += test_ECC_Ed25519.get_tests(config=config) - tests += test_ECC_Curve25519.get_tests(config=config) - tests += test_ECC_Ed448.get_tests(config=config) - tests += test_ECC_Curve448.get_tests(config=config) - - tests += test_import_DSA.get_tests(config=config) - tests += test_import_RSA.get_tests(config=config) - tests += test_import_ECC.get_tests(config=config) - tests += test_import_Curve25519.get_tests(config=config) - tests += test_import_Curve448.get_tests(config=config) - - tests += test_ElGamal.get_tests(config=config) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 47a9ce2..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_DSA.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_DSA.cpython-312.pyc deleted file mode 100644 index 46205af..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_DSA.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Curve25519.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Curve25519.cpython-312.pyc deleted file mode 100644 index 6fdff75..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Curve25519.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Curve448.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Curve448.cpython-312.pyc deleted file mode 100644 index 208edaf..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Curve448.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Ed25519.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Ed25519.cpython-312.pyc deleted file mode 100644 index 86dd55f..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Ed25519.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Ed448.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Ed448.cpython-312.pyc deleted file mode 100644 index 5358269..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_Ed448.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_NIST.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_NIST.cpython-312.pyc deleted file mode 100644 index c38e5ab..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ECC_NIST.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ElGamal.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ElGamal.cpython-312.pyc deleted file mode 100644 index ac17aea..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_ElGamal.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_RSA.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_RSA.cpython-312.pyc deleted file mode 100644 index 5161444..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_RSA.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_Curve25519.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_Curve25519.cpython-312.pyc deleted file mode 100644 index 7544515..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_Curve25519.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_Curve448.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_Curve448.cpython-312.pyc deleted file mode 100644 index 73fe4f5..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_Curve448.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_DSA.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_DSA.cpython-312.pyc deleted file mode 100644 index 74b071c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_DSA.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_ECC.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_ECC.cpython-312.pyc deleted file mode 100644 index 5928d3e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_ECC.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_RSA.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_RSA.cpython-312.pyc deleted file mode 100644 index 6f44519..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/__pycache__/test_import_RSA.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_DSA.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_DSA.py deleted file mode 100644 index 160d882..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_DSA.py +++ /dev/null @@ -1,247 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/test_DSA.py: Self-test for the DSA primitive -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.PublicKey.DSA""" - -import os -from Cryptodome.Util.py3compat import * - -import unittest -from Cryptodome.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex - -def _sws(s): - """Remove whitespace from a text or byte string""" - if isinstance(s,str): - return "".join(s.split()) - else: - return b("").join(s.split()) - -class DSATest(unittest.TestCase): - # Test vector from "Appendix 5. Example of the DSA" of - # "Digital Signature Standard (DSS)", - # U.S. Department of Commerce/National Institute of Standards and Technology - # FIPS 186-2 (+Change Notice), 2000 January 27. - # http://csrc.nist.gov/publications/fips/fips186-2/fips186-2-change1.pdf - - y = _sws("""19131871 d75b1612 a819f29d 78d1b0d7 346f7aa7 7bb62a85 - 9bfd6c56 75da9d21 2d3a36ef 1672ef66 0b8c7c25 5cc0ec74 - 858fba33 f44c0669 9630a76b 030ee333""") - - g = _sws("""626d0278 39ea0a13 413163a5 5b4cb500 299d5522 956cefcb - 3bff10f3 99ce2c2e 71cb9de5 fa24babf 58e5b795 21925c9c - c42e9f6f 464b088c c572af53 e6d78802""") - - p = _sws("""8df2a494 492276aa 3d25759b b06869cb eac0d83a fb8d0cf7 - cbb8324f 0d7882e5 d0762fc5 b7210eaf c2e9adac 32ab7aac - 49693dfb f83724c2 ec0736ee 31c80291""") - - q = _sws("""c773218c 737ec8ee 993b4f2d ed30f48e dace915f""") - - x = _sws("""2070b322 3dba372f de1c0ffc 7b2e3b49 8b260614""") - - k = _sws("""358dad57 1462710f 50e254cf 1a376b2b deaadfbf""") - k_inverse = _sws("""0d516729 8202e49b 4116ac10 4fc3f415 ae52f917""") - m = b2a_hex(b("abc")) - m_hash = _sws("""a9993e36 4706816a ba3e2571 7850c26c 9cd0d89d""") - r = _sws("""8bac1ab6 6410435c b7181f95 b16ab97c 92b341c0""") - s = _sws("""41e2345f 1f56df24 58f426d1 55b4ba2d b6dcd8c8""") - - def setUp(self): - global DSA, Random, bytes_to_long, size - from Cryptodome.PublicKey import DSA - from Cryptodome import Random - from Cryptodome.Util.number import bytes_to_long, inverse, size - - self.dsa = DSA - - def test_generate_1arg(self): - """DSA (default implementation) generated key (1 argument)""" - dsaObj = self.dsa.generate(1024) - self._check_private_key(dsaObj) - pub = dsaObj.public_key() - self._check_public_key(pub) - - def test_generate_2arg(self): - """DSA (default implementation) generated key (2 arguments)""" - dsaObj = self.dsa.generate(1024, Random.new().read) - self._check_private_key(dsaObj) - pub = dsaObj.public_key() - self._check_public_key(pub) - - def test_construct_4tuple(self): - """DSA (default implementation) constructed key (4-tuple)""" - (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] - dsaObj = self.dsa.construct((y, g, p, q)) - self._test_verification(dsaObj) - - def test_construct_5tuple(self): - """DSA (default implementation) constructed key (5-tuple)""" - (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] - dsaObj = self.dsa.construct((y, g, p, q, x)) - self._test_signing(dsaObj) - self._test_verification(dsaObj) - - def test_construct_bad_key4(self): - (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] - tup = (y, g, p+1, q) - self.assertRaises(ValueError, self.dsa.construct, tup) - - tup = (y, g, p, q+1) - self.assertRaises(ValueError, self.dsa.construct, tup) - - tup = (y, 1, p, q) - self.assertRaises(ValueError, self.dsa.construct, tup) - - def test_construct_bad_key5(self): - (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] - tup = (y, g, p, q, x+1) - self.assertRaises(ValueError, self.dsa.construct, tup) - - tup = (y, g, p, q, q+10) - self.assertRaises(ValueError, self.dsa.construct, tup) - - def _check_private_key(self, dsaObj): - # Check capabilities - self.assertEqual(1, dsaObj.has_private()) - self.assertEqual(1, dsaObj.can_sign()) - self.assertEqual(0, dsaObj.can_encrypt()) - - # Sanity check key data - self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q - self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits - self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 - self.assertEqual(dsaObj.y, pow(dsaObj.g, dsaObj.x, dsaObj.p)) # y == g**x mod p - self.assertEqual(1, 0 < dsaObj.x < dsaObj.q) # 0 < x < q - - def _check_public_key(self, dsaObj): - k = bytes_to_long(a2b_hex(self.k)) - m_hash = bytes_to_long(a2b_hex(self.m_hash)) - - # Check capabilities - self.assertEqual(0, dsaObj.has_private()) - self.assertEqual(1, dsaObj.can_sign()) - self.assertEqual(0, dsaObj.can_encrypt()) - - # Check that private parameters are all missing - self.assertEqual(0, hasattr(dsaObj, 'x')) - - # Sanity check key data - self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q - self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits - self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 - - # Public-only key objects should raise an error when .sign() is called - self.assertRaises(TypeError, dsaObj._sign, m_hash, k) - - # Check __eq__ and __ne__ - self.assertEqual(dsaObj.public_key() == dsaObj.public_key(),True) # assert_ - self.assertEqual(dsaObj.public_key() != dsaObj.public_key(),False) # assertFalse - - self.assertEqual(dsaObj.public_key(), dsaObj.publickey()) - - def _test_signing(self, dsaObj): - k = bytes_to_long(a2b_hex(self.k)) - m_hash = bytes_to_long(a2b_hex(self.m_hash)) - r = bytes_to_long(a2b_hex(self.r)) - s = bytes_to_long(a2b_hex(self.s)) - (r_out, s_out) = dsaObj._sign(m_hash, k) - self.assertEqual((r, s), (r_out, s_out)) - - def _test_verification(self, dsaObj): - m_hash = bytes_to_long(a2b_hex(self.m_hash)) - r = bytes_to_long(a2b_hex(self.r)) - s = bytes_to_long(a2b_hex(self.s)) - self.assertTrue(dsaObj._verify(m_hash, (r, s))) - self.assertFalse(dsaObj._verify(m_hash + 1, (r, s))) - - def test_repr(self): - (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] - dsaObj = self.dsa.construct((y, g, p, q)) - repr(dsaObj) - - -class DSADomainTest(unittest.TestCase): - - def test_domain1(self): - """Verify we can generate new keys in a given domain""" - dsa_key_1 = DSA.generate(1024) - domain_params = dsa_key_1.domain() - - dsa_key_2 = DSA.generate(1024, domain=domain_params) - self.assertEqual(dsa_key_1.p, dsa_key_2.p) - self.assertEqual(dsa_key_1.q, dsa_key_2.q) - self.assertEqual(dsa_key_1.g, dsa_key_2.g) - - self.assertEqual(dsa_key_1.domain(), dsa_key_2.domain()) - - def _get_weak_domain(self): - - from Cryptodome.Math.Numbers import Integer - from Cryptodome.Math import Primality - - p = Integer(4) - while p.size_in_bits() != 1024 or Primality.test_probable_prime(p) != Primality.PROBABLY_PRIME: - q1 = Integer.random(exact_bits=80) - q2 = Integer.random(exact_bits=80) - q = q1 * q2 - z = Integer.random(exact_bits=1024-160) - p = z * q + 1 - - h = Integer(2) - g = 1 - while g == 1: - g = pow(h, z, p) - h += 1 - - return (p, q, g) - - - def test_generate_error_weak_domain(self): - """Verify that domain parameters with composite q are rejected""" - - domain_params = self._get_weak_domain() - self.assertRaises(ValueError, DSA.generate, 1024, domain=domain_params) - - - def test_construct_error_weak_domain(self): - """Verify that domain parameters with composite q are rejected""" - - from Cryptodome.Math.Numbers import Integer - - p, q, g = self._get_weak_domain() - y = pow(g, 89, p) - self.assertRaises(ValueError, DSA.construct, (y, g, p, q)) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(DSATest) - tests += list_test_cases(DSADomainTest) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Curve25519.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Curve25519.py deleted file mode 100644 index 8defd00..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Curve25519.py +++ /dev/null @@ -1,283 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2024, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.PublicKey import ECC -from Cryptodome.PublicKey.ECC import EccKey, EccXPoint, _curves - -# Test vectors for scalar multiplication using point with X=9 as base -# generated with nickovs' Python-only code https://gist.github.com/nickovs/cc3c22d15f239a2640c185035c06f8a3 -# The order is 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed -# Each tuple is (exponent, X-coordinate) -scalar_base9_test = [ - (1, 9), - (2, 0x20d342d51873f1b7d9750c687d1571148f3f5ced1e350b5c5cae469cdd684efb), - (3, 0x1c12bc1a6d57abe645534d91c21bba64f8824e67621c0859c00a03affb713c12), - (4, 0x79ce98b7e0689d7de7d1d074a15b315ffe1805dfcd5d2a230fee85e4550013ef), - (6, 0x26954ccdc99ebf34f8f1dde5e6bb080685fec73640494c28f9fe0bfa8c794531), - (9, 0x192b929197d07748db44600da41bab7499b1c2e6e2f87c6f0e337980668164ba), - (129, 0x7332096a738900085e721103fce2cbf13aee50fef0788ea0d669008eb09ceab7), - (255, 0x1534582fc2b1cea45e8cb776547e209da4fd54a9e473b50c5b8c6b0ae023a9b3), - (256, 0x4300017536976a742ec8747f7505cd6bc80e610d669acab1a1eed36f680d98e8), - (257, 0x6c410611cb484c9016adfb884d37a0e682e075daca1d46f45bb7a4afed10b125), - (0x10101, 0xa679e9d7e043bf76c03362576e2c88abe9093c5d4f6b4a202c64a8397467cf), - (0xAA55CC, 0x2cc02f84c067e3586f4278326689be163e606d69ccae505bb09488e11f295887), - (0x1B29A0E579E0A000567, 0x50c38a72d7bfd7864c8b9083fa123e8d359068e6b491a019a885036e073f6604), - (0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed + 1, 9), -] - - -class TestEccPoint_Curve25519(unittest.TestCase): - - v1 = 0x09fa78b39b00a72930bcd8039be789a0997830bb99f79aeeb93493715390b4e8 - v2 = 0x15210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493 - - def test_init(self): - EccXPoint(9, "curve25519") - EccXPoint(2**255 - 19 + 5, "curve25519") - - def test_curve_attribute(self): - point = EccXPoint(9, "curve25519") - self.assertEqual(point.curve, "Curve25519") - - def test_init_fail(self): - self.assertRaises(ValueError, EccXPoint, 3*(2**255 - 19), "curve25519") - self.assertRaises(ValueError, EccXPoint, 9, "curve25518") - - def test_equal_set(self): - point1 = EccXPoint(self.v1, "curve25519") - point2 = EccXPoint(self.v2, "curve25519") - - self.assertEqual(point1, point1) - self.assertNotEqual(point1, point2) - - point2.set(point1) - self.assertEqual(point1.x, point2.x) - - def test_copy(self): - point1 = EccXPoint(self.v1, "curve25519") - point2 = point1.copy() - self.assertEqual(point1.x, point2.x) - - def test_pai(self): - point1 = EccXPoint(self.v1, "curve25519") - pai = point1.point_at_infinity() - self.assertTrue(pai.point_at_infinity()) - - point2 = EccXPoint(None, "curve25519") - self.assertTrue(point2.point_at_infinity()) - - def test_scalar_multiply(self): - base = EccXPoint(9, "curve25519") - - pointH = 0 * base - self.assertTrue(pointH.point_at_infinity()) - - pointH = 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed * base - self.assertTrue(pointH.point_at_infinity()) - - pointH = base * 1 - self.assertEqual(pointH.x, 9) - - for d, result in scalar_base9_test: - pointH = d * base - self.assertEqual(pointH.x, result) - - def test_sizes(self): - point = EccXPoint(9, "curve25519") - self.assertEqual(point.size_in_bits(), 255) - self.assertEqual(point.size_in_bytes(), 32) - - -class TestEccKey_Curve25519(unittest.TestCase): - - def test_private_key(self): - # RFC7748 Section 6.1 - Alice - alice_priv = unhexlify("77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a") - alice_pub = unhexlify("8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a") - alice_pub_x = Integer.from_bytes(alice_pub, byteorder='little') - - key = EccKey(curve="Curve25519", seed=alice_priv) - self.assertEqual(key.seed, alice_priv) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, alice_pub_x) - - # RFC7748 Section 6.1 - Bob - bob_priv = unhexlify("5dab087e624a8a4b79e17f8b83800ee66f3bb1292618b6fd1c2f8b27ff88e0eb") - bob_pub = unhexlify("de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f") - bob_pub_x = Integer.from_bytes(bob_pub, byteorder='little') - - key = EccKey(curve="Curve25519", seed=bob_priv) - self.assertEqual(key.seed, bob_priv) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, bob_pub_x) - - # Other names - key = EccKey(curve="curve25519", seed=alice_priv) - - # Must not accept d parameter - self.assertRaises(ValueError, EccKey, curve="curve25519", d=1) - - def test_public_key(self): - point = EccXPoint(_curves['curve25519'].Gx, - curve='curve25519') - key = EccKey(curve="curve25519", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - priv_key = EccKey(curve="curve25519", seed=b'H'*32) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_seed(self): - self.assertRaises(ValueError, lambda: EccKey(curve="curve25519", seed=b'H' * 31)) - - def test_equality(self): - private_key = ECC.construct(seed=b'H'*32, curve="Curve25519") - private_key2 = ECC.construct(seed=b'H'*32, curve="curve25519") - private_key3 = ECC.construct(seed=b'C'*32, curve="Curve25519") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='curve25519') - self.assertIn("curve='Curve25519'", repr(key)) - self.assertEqual(key.curve, 'Curve25519') - self.assertEqual(key.public_key().curve, 'Curve25519') - - -class TestEccModule_Curve25519(unittest.TestCase): - - def test_generate(self): - key = ECC.generate(curve="Curve25519") - self.assertTrue(key.has_private()) - point = EccXPoint(_curves['Curve25519'].Gx, curve="Curve25519") * key.d - self.assertEqual(key.pointQ, point) - - # Always random - key2 = ECC.generate(curve="Curve25519") - self.assertNotEqual(key, key2) - - # Other names - ECC.generate(curve="curve25519") - - # Random source - key1 = ECC.generate(curve="Curve25519", randfunc=SHAKE128.new().read) - key2 = ECC.generate(curve="Curve25519", randfunc=SHAKE128.new().read) - self.assertEqual(key1, key2) - - def test_construct(self): - seed = unhexlify("77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a") - point_hex = unhexlify("8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a") - Px = Integer.from_bytes(point_hex, byteorder='little') - point = EccXPoint(Px, curve="Curve25519") - - # Private key only - key = ECC.construct(curve="Curve25519", seed=seed) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Public key only - key = ECC.construct(curve="Curve25519", point_x=Px) - self.assertEqual(key.pointQ, point) - self.assertFalse(key.has_private()) - - # Private and public key - key = ECC.construct(curve="Curve25519", seed=seed, point_x=Px) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Other names - key = ECC.construct(curve="curve25519", seed=seed) - - def test_negative_construct(self): - coordG = dict(point_x=_curves['curve25519'].Gx) - - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", d=2, **coordG) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", seed=b'H'*31) - - # Verify you cannot construct weak keys (small-order points) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=0) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=1) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=325606250916557431795983626356110631294008115727848805560023387167927233504) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=39382357235489614581723060781553021112529911719440698176882885853963445705823) - p = 2**255 - 19 - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p-1) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p+1) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p+325606250916557431795983626356110631294008115727848805560023387167927233504) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p+39382357235489614581723060781553021112529911719440698176882885853963445705823) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p*2-1) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p*2) - self.assertRaises(ValueError, ECC.construct, curve="Curve25519", - point_x=p*2+1) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestEccPoint_Curve25519) - tests += list_test_cases(TestEccKey_Curve25519) - tests += list_test_cases(TestEccModule_Curve25519) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Curve448.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Curve448.py deleted file mode 100644 index 6a05a9b..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Curve448.py +++ /dev/null @@ -1,246 +0,0 @@ -# This file is licensed under the BSD 2-Clause License. -# See https://opensource.org/licenses/BSD-2-Clause for details. - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Math.Numbers import Integer -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.PublicKey import ECC -from Cryptodome.PublicKey.ECC import EccKey, EccXPoint, _curves - -CURVE448_P = 2**448 - 2**224 - 1 -CURVE448_ORDER = 2**446 - 0x8335dc163bb124b65129c96fde933d8d723a70aadc873d6d54a7bb0d - -# Test vectors for scalar multiplication using point with X=5 as base -# Each tuple is (exponent, X-coordinate) -scalar_base5_test = [ - (1, 5), - (2, 0x6391322257cae3d49aef4665d8bd5cccac9abefb511e83d75f3c766616266fc1bf3747f1da00ed7125e8f0255a1208087d32a4bc1c743cb6), - (3, 0x1fbe4b3584cab86170c14b9325840b8a2429b61fb93c42492c002a2807a4e7ea63138ea59bf95652ce9a7d13d0321c7511e3314d0553f34c), - (4, 0x93b44a7b78726ba8d0b048bd7144074f8bdad24ef9d0a6c8264f6c00b135ffcea11545e80d18364acc8ebfbcc45358e0da5fd5e5146e2b1), - (6, 0x693d165f453bd62871e5e53845f33e9e5b18b24d79c1f9102608aa7ba6f18ac24864012171d64c90b698f5ce5631cd02cee4e4336b1ad88c), - (9, 0xb970d576e7d9aa427dbf7cb9b7dd65170721d04ee060c9ea8d499dc361d4cfde1ceb19068eae853bac8f5d92827bdbf3d94c22de2fb42dae), - (129, 0x9fbdb50a1450438fe656aa32aa1bb2548d077d5c3a5d327689093a2996a4f94eacd1fb4f90315edb2afe41908a759f0d6db83fa791df80db), - (255, 0x31bc3e9385dfd12e1238927061eb0c911466da394e459bf058ba3b08260a258a3c392b0f85ddbd23828657137b88577a85b83774139fab9e), - (256, 0x735c7f30e6872e5e4215c0147c8a112d697f668c9bd0f92f5f1e4e6badc128a0b654e697cd4bae2144d54e726b54c1fa63a09b00dd3c17f), - (257, 0x95c1b0ce01286dc047aeb5922a5e62b3effb5b9296273a5004eb456f592728dd494a6fb5996a2ea7011ae6423874a48c2927bfa62d8ce8b0), - (0x10101, 0x113bb172c9dc52ab45bd665dd9751ed44e33b8596f943c6cb2f8dd329160ece802960b3eb0d2c21ef3a3ac12c20fccbc2a271fc2f061c1b2), - (0xAA55CC, 0xcf42585d2e0b1e45c0bfd601c91af4b137d7faf139fc761178c7ded432417c307ee1759af2deec6a14dbaf6b868eb13a6039fbdde4b61898), - (0x1B29A0E579E0A000567, 0x7bd9ec9775a664f4d860d82d6be60895113a7c36f92db25583dbba5dc17f09c136ec27e14857bfd6a705311327030aa657dd036325fad330), - (CURVE448_ORDER + 1, 5), -] - - -class TestEccPoint_Curve448(unittest.TestCase): - - v1 = 0x09fa78b39b00a72930bcd8039be789a0997830bb99f79aeeb93493715390b4e8 - v2 = 0x15210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493 - - def test_init(self): - EccXPoint(5, "curve448") - EccXPoint(CURVE448_P - 5, "curve448") - - def test_curve_attribute(self): - point = EccXPoint(5, "curve448") - self.assertEqual(point.curve, "Curve448") - - def test_init_fail(self): - self.assertRaises(ValueError, EccXPoint, 3*CURVE448_P, "curve448") - self.assertRaises(ValueError, EccXPoint, 3, "curve449") - - def test_equal_set(self): - point1 = EccXPoint(self.v1, "curve448") - point2 = EccXPoint(self.v2, "curve448") - - self.assertEqual(point1, point1) - self.assertNotEqual(point1, point2) - - point2.set(point1) - self.assertEqual(point1.x, point2.x) - - def test_copy(self): - point1 = EccXPoint(self.v1, "curve448") - point2 = point1.copy() - self.assertEqual(point1.x, point2.x) - - def test_pai(self): - point1 = EccXPoint(self.v1, "curve448") - pai = point1.point_at_infinity() - self.assertTrue(pai.point_at_infinity()) - - point2 = EccXPoint(None, "curve448") - self.assertTrue(point2.point_at_infinity()) - - def test_scalar_multiply(self): - base = EccXPoint(5, "curve448") - - pointH = 0 * base - self.assertTrue(pointH.point_at_infinity()) - - pointH = CURVE448_ORDER * base - self.assertTrue(pointH.point_at_infinity()) - - pointH = base * 1 - self.assertEqual(pointH.x, 5) - - for d, result in scalar_base5_test: - pointH = d * base - self.assertEqual(pointH.x, result) - - def test_sizes(self): - point = EccXPoint(5, "curve448") - self.assertEqual(point.size_in_bits(), 448) - self.assertEqual(point.size_in_bytes(), 56) - - -class TestEccKey_Curve448(unittest.TestCase): - - def test_private_key(self): - # RFC7748 Section 6.2 - Alice - alice_priv = unhexlify("9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28dd9c9baf574a9419744897391006382a6f127ab1d9ac2d8c0a598726b") - alice_pub = unhexlify("9b08f7cc31b7e3e67d22d5aea121074a273bd2b83de09c63faa73d2c22c5d9bbc836647241d953d40c5b12da88120d53177f80e532c41fa0") - alice_pub_x = Integer.from_bytes(alice_pub, byteorder='little') - - key = EccKey(curve="Curve448", seed=alice_priv) - self.assertEqual(key.seed, alice_priv) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, alice_pub_x) - - # RFC7748 Section 6.2 - Bob - bob_priv = unhexlify("1c306a7ac2a0e2e0990b294470cba339e6453772b075811d8fad0d1d6927c120bb5ee8972b0d3e21374c9c921b09d1b0366f10b65173992d") - bob_pub = unhexlify("3eb7a829b0cd20f5bcfc0b599b6feccf6da4627107bdb0d4f345b43027d8b972fc3e34fb4232a13ca706dcb57aec3dae07bdc1c67bf33609") - bob_pub_x = Integer.from_bytes(bob_pub, byteorder='little') - - key = EccKey(curve="Curve448", seed=bob_priv) - self.assertEqual(key.seed, bob_priv) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, bob_pub_x) - - # Other names - key = EccKey(curve="curve448", seed=alice_priv) - - # Must not accept d parameter - self.assertRaises(ValueError, EccKey, curve="curve448", d=1) - - def test_public_key(self): - point = EccXPoint(_curves['curve448'].Gx, - curve='curve448') - key = EccKey(curve="curve448", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - priv_key = EccKey(curve="curve448", seed=b'H'*56) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_seed(self): - self.assertRaises(ValueError, lambda: EccKey(curve="curve448", - seed=b'H' * 55)) - - def test_equality(self): - private_key = ECC.construct(seed=b'H'*56, curve="Curve448") - private_key2 = ECC.construct(seed=b'H'*56, curve="curve448") - private_key3 = ECC.construct(seed=b'C'*56, curve="Curve448") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='curve448') - self.assertIn("curve='Curve448'", repr(key)) - self.assertEqual(key.curve, 'Curve448') - self.assertEqual(key.public_key().curve, 'Curve448') - - -class TestEccModule_Curve448(unittest.TestCase): - - def test_generate(self): - key = ECC.generate(curve="Curve448") - self.assertTrue(key.has_private()) - point = EccXPoint(_curves['Curve448'].Gx, curve="Curve448") * key.d - self.assertEqual(key.pointQ, point) - - # Always random - key2 = ECC.generate(curve="Curve448") - self.assertNotEqual(key, key2) - - # Other names - ECC.generate(curve="curve448") - - # Random source - key1 = ECC.generate(curve="Curve448", randfunc=SHAKE128.new().read) - key2 = ECC.generate(curve="Curve448", randfunc=SHAKE128.new().read) - self.assertEqual(key1, key2) - - def test_construct(self): - seed = unhexlify("9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28dd9c9baf574a9419744897391006382a6f127ab1d9ac2d8c0a598726b") - point_hex = unhexlify("9b08f7cc31b7e3e67d22d5aea121074a273bd2b83de09c63faa73d2c22c5d9bbc836647241d953d40c5b12da88120d53177f80e532c41fa0") - Px = Integer.from_bytes(point_hex, byteorder='little') - point = EccXPoint(Px, curve="Curve448") - - # Private key only - key = ECC.construct(curve="Curve448", seed=seed) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Public key only - key = ECC.construct(curve="Curve448", point_x=Px) - self.assertEqual(key.pointQ, point) - self.assertFalse(key.has_private()) - - # Private and public key - key = ECC.construct(curve="Curve448", seed=seed, point_x=Px) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Other names - key = ECC.construct(curve="curve448", seed=seed) - - def test_negative_construct(self): - coordG = dict(point_x=_curves['curve448'].Gx) - - self.assertRaises(ValueError, ECC.construct, curve="Curve448", - d=2, **coordG) - self.assertRaises(ValueError, ECC.construct, curve="Curve448", - seed=b'H'*55) - - # Verify you cannot construct weak keys (small-order points) - self.assertRaises(ValueError, ECC.construct, curve="Curve448", - point_x=0) - self.assertRaises(ValueError, ECC.construct, curve="Curve448", - point_x=1) - p = 2**448 - 2**224 - 1 - self.assertRaises(ValueError, ECC.construct, curve="Curve448", - point_x=p-1) - self.assertRaises(ValueError, ECC.construct, curve="Curve448", - point_x=p) - self.assertRaises(ValueError, ECC.construct, curve="Curve448", - point_x=p+1) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestEccPoint_Curve448) - tests += list_test_cases(TestEccKey_Curve448) - tests += list_test_cases(TestEccModule_Curve448) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Ed25519.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Ed25519.py deleted file mode 100644 index 5018552..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Ed25519.py +++ /dev/null @@ -1,341 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2022, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors - -from Cryptodome.PublicKey import ECC -from Cryptodome.PublicKey.ECC import EccPoint, _curves, EccKey - -from Cryptodome.Math.Numbers import Integer - -from Cryptodome.Hash import SHAKE128 - - -class TestEccPoint_Ed25519(unittest.TestCase): - - Gxy = {"x": 15112221349535400772501151409588531511454012693041857206046113283949847762202, - "y": 46316835694926478169428394003475163141307993866256225615783033603165251855960} - - G2xy = {"x": 24727413235106541002554574571675588834622768167397638456726423682521233608206, - "y": 15549675580280190176352668710449542251549572066445060580507079593062643049417} - - G3xy = {"x": 46896733464454938657123544595386787789046198280132665686241321779790909858396, - "y": 8324843778533443976490377120369201138301417226297555316741202210403726505172} - - pointG = EccPoint(Gxy['x'], Gxy['y'], curve="ed25519") - pointG2 = EccPoint(G2xy['x'], G2xy['y'], curve="ed25519") - pointG3 = EccPoint(G3xy['x'], G3xy['y'], curve="ed25519") - - def test_curve_attribute(self): - self.assertEqual(self.pointG.curve, "Ed25519") - - def test_init_xy(self): - EccPoint(self.Gxy['x'], self.Gxy['y'], curve="Ed25519") - - # Neutral point - pai = EccPoint(0, 1, curve="Ed25519") - self.assertEqual(pai.x, 0) - self.assertEqual(pai.y, 1) - self.assertEqual(pai.xy, (0, 1)) - - # G - bp = self.pointG.copy() - self.assertEqual(bp.x, 15112221349535400772501151409588531511454012693041857206046113283949847762202) - self.assertEqual(bp.y, 46316835694926478169428394003475163141307993866256225615783033603165251855960) - self.assertEqual(bp.xy, (bp.x, bp.y)) - - # 2G - bp2 = self.pointG2.copy() - self.assertEqual(bp2.x, 24727413235106541002554574571675588834622768167397638456726423682521233608206) - self.assertEqual(bp2.y, 15549675580280190176352668710449542251549572066445060580507079593062643049417) - self.assertEqual(bp2.xy, (bp2.x, bp2.y)) - - # 5G - EccPoint(x=33467004535436536005251147249499675200073690106659565782908757308821616914995, - y=43097193783671926753355113395909008640284023746042808659097434958891230611693, - curve="Ed25519") - - # Catch if point is not on the curve - self.assertRaises(ValueError, EccPoint, 34, 35, curve="Ed25519") - - def test_set(self): - pointW = EccPoint(0, 1, curve="Ed25519") - pointW.set(self.pointG) - self.assertEqual(pointW.x, self.pointG.x) - self.assertEqual(pointW.y, self.pointG.y) - - def test_copy(self): - pointW = self.pointG.copy() - self.assertEqual(pointW.x, self.pointG.x) - self.assertEqual(pointW.y, self.pointG.y) - - def test_equal(self): - pointH = self.pointG.copy() - pointI = self.pointG2.copy() - self.assertEqual(self.pointG, pointH) - self.assertNotEqual(self.pointG, pointI) - - def test_pai(self): - pai = EccPoint(0, 1, curve="Ed25519") - self.assertTrue(pai.is_point_at_infinity()) - self.assertEqual(pai, pai.point_at_infinity()) - - def test_negate(self): - negG = -self.pointG - G100 = self.pointG * 100 - sum_zero = G100 + negG * 100 - self.assertTrue(sum_zero.is_point_at_infinity()) - - sum_99 = G100 + negG - expected = self.pointG * 99 - self.assertEqual(sum_99, expected) - - def test_addition(self): - self.assertEqual(self.pointG + self.pointG2, self.pointG3) - self.assertEqual(self.pointG2 + self.pointG, self.pointG3) - self.assertEqual(self.pointG2 + self.pointG.point_at_infinity(), self.pointG2) - self.assertEqual(self.pointG.point_at_infinity() + self.pointG2, self.pointG2) - - G5 = self.pointG2 + self.pointG3 - self.assertEqual(G5.x, 33467004535436536005251147249499675200073690106659565782908757308821616914995) - self.assertEqual(G5.y, 43097193783671926753355113395909008640284023746042808659097434958891230611693) - - def test_inplace_addition(self): - pointH = self.pointG.copy() - pointH += self.pointG - self.assertEqual(pointH, self.pointG2) - pointH += self.pointG - self.assertEqual(pointH, self.pointG3) - pointH += self.pointG.point_at_infinity() - self.assertEqual(pointH, self.pointG3) - - def test_doubling(self): - pointH = self.pointG.copy() - pointH.double() - self.assertEqual(pointH.x, self.pointG2.x) - self.assertEqual(pointH.y, self.pointG2.y) - - # 2*0 - pai = self.pointG.point_at_infinity() - pointR = pai.copy() - pointR.double() - self.assertEqual(pointR, pai) - - def test_scalar_multiply(self): - d = 0 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0) - self.assertEqual(pointH.y, 1) - - d = 1 - pointH = d * self.pointG - self.assertEqual(pointH.x, self.pointG.x) - self.assertEqual(pointH.y, self.pointG.y) - - d = 2 - pointH = d * self.pointG - self.assertEqual(pointH.x, self.pointG2.x) - self.assertEqual(pointH.y, self.pointG2.y) - - d = 3 - pointH = d * self.pointG - self.assertEqual(pointH.x, self.pointG3.x) - self.assertEqual(pointH.y, self.pointG3.y) - - d = 4 - pointH = d * self.pointG - self.assertEqual(pointH.x, 14582954232372986451776170844943001818709880559417862259286374126315108956272) - self.assertEqual(pointH.y, 32483318716863467900234833297694612235682047836132991208333042722294373421359) - - d = 5 - pointH = d * self.pointG - self.assertEqual(pointH.x, 33467004535436536005251147249499675200073690106659565782908757308821616914995) - self.assertEqual(pointH.y, 43097193783671926753355113395909008640284023746042808659097434958891230611693) - - d = 10 - pointH = d * self.pointG - self.assertEqual(pointH.x, 43500613248243327786121022071801015118933854441360174117148262713429272820047) - self.assertEqual(pointH.y, 45005105423099817237495816771148012388779685712352441364231470781391834741548) - - d = 20 - pointH = d * self.pointG - self.assertEqual(pointH.x, 46694936775300686710656303283485882876784402425210400817529601134760286812591) - self.assertEqual(pointH.y, 8786390172762935853260670851718824721296437982862763585171334833968259029560) - - d = 255 - pointH = d * self.pointG - self.assertEqual(pointH.x, 36843863416400016952258312492144504209624961884991522125275155377549541182230) - self.assertEqual(pointH.y, 22327030283879720808995671630924669697661065034121040761798775626517750047180) - - d = 256 - pointH = d * self.pointG - self.assertEqual(pointH.x, 42740085206947573681423002599456489563927820004573071834350074001818321593686) - self.assertEqual(pointH.y, 6935684722522267618220753829624209639984359598320562595061366101608187623111) - - def test_sizes(self): - self.assertEqual(self.pointG.size_in_bits(), 255) - self.assertEqual(self.pointG.size_in_bytes(), 32) - - -class TestEccKey_Ed25519(unittest.TestCase): - - def test_private_key(self): - seed = unhexlify("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") - Px = 38815646466658113194383306759739515082307681141926459231621296960732224964046 - Py = 11903303657706407974989296177215005343713679411332034699907763981919547054807 - - key = EccKey(curve="Ed25519", seed=seed) - self.assertEqual(key.seed, seed) - self.assertEqual(key.d, 36144925721603087658594284515452164870581325872720374094707712194495455132720) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, Px) - self.assertEqual(key.pointQ.y, Py) - - point = EccPoint(Px, Py, "ed25519") - key = EccKey(curve="Ed25519", seed=seed, point=point) - self.assertEqual(key.d, 36144925721603087658594284515452164870581325872720374094707712194495455132720) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, point) - - # Other names - key = EccKey(curve="ed25519", seed=seed) - - # Must not accept d parameter - self.assertRaises(ValueError, EccKey, curve="ed25519", d=1) - - def test_public_key(self): - point = EccPoint(_curves['ed25519'].Gx, _curves['ed25519'].Gy, curve='ed25519') - key = EccKey(curve="ed25519", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - priv_key = EccKey(curve="ed25519", seed=b'H'*32) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_seed(self): - self.assertRaises(ValueError, lambda: EccKey(curve="ed25519", seed=b'H' * 31)) - - def test_equality(self): - private_key = ECC.construct(seed=b'H'*32, curve="Ed25519") - private_key2 = ECC.construct(seed=b'H'*32, curve="ed25519") - private_key3 = ECC.construct(seed=b'C'*32, curve="Ed25519") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='ed25519') - self.assertIn("curve='Ed25519'", repr(key)) - self.assertEqual(key.curve, 'Ed25519') - self.assertEqual(key.public_key().curve, 'Ed25519') - - -class TestEccModule_Ed25519(unittest.TestCase): - - def test_generate(self): - key = ECC.generate(curve="Ed25519") - self.assertTrue(key.has_private()) - point = EccPoint(_curves['Ed25519'].Gx, _curves['Ed25519'].Gy, curve="Ed25519") * key.d - self.assertEqual(key.pointQ, point) - - # Always random - key2 = ECC.generate(curve="Ed25519") - self.assertNotEqual(key, key2) - - # Other names - ECC.generate(curve="Ed25519") - - # Random source - key1 = ECC.generate(curve="Ed25519", randfunc=SHAKE128.new().read) - key2 = ECC.generate(curve="Ed25519", randfunc=SHAKE128.new().read) - self.assertEqual(key1, key2) - - def test_construct(self): - seed = unhexlify("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") - Px = 38815646466658113194383306759739515082307681141926459231621296960732224964046 - Py = 11903303657706407974989296177215005343713679411332034699907763981919547054807 - d = 36144925721603087658594284515452164870581325872720374094707712194495455132720 - point = EccPoint(Px, Py, curve="Ed25519") - - # Private key only - key = ECC.construct(curve="Ed25519", seed=seed) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Public key only - key = ECC.construct(curve="Ed25519", point_x=Px, point_y=Py) - self.assertEqual(key.pointQ, point) - self.assertFalse(key.has_private()) - - # Private and public key - key = ECC.construct(curve="Ed25519", seed=seed, point_x=Px, point_y=Py) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Other names - key = ECC.construct(curve="ed25519", seed=seed) - - def test_negative_construct(self): - coord = dict(point_x=10, point_y=4) - coordG = dict(point_x=_curves['ed25519'].Gx, point_y=_curves['ed25519'].Gy) - - self.assertRaises(ValueError, ECC.construct, curve="Ed25519", **coord) - self.assertRaises(ValueError, ECC.construct, curve="Ed25519", d=2, **coordG) - self.assertRaises(ValueError, ECC.construct, curve="Ed25519", seed=b'H'*31) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestEccPoint_Ed25519) - tests += list_test_cases(TestEccKey_Ed25519) - tests += list_test_cases(TestEccModule_Ed25519) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Ed448.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Ed448.py deleted file mode 100644 index 3a7b0eb..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_Ed448.py +++ /dev/null @@ -1,336 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2022, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors - -from Cryptodome.PublicKey import ECC -from Cryptodome.PublicKey.ECC import EccPoint, _curves, EccKey - -from Cryptodome.Math.Numbers import Integer - -from Cryptodome.Hash import SHAKE128 - - -class TestEccPoint_Ed448(unittest.TestCase): - - Gxy = {"x": 0x4f1970c66bed0ded221d15a622bf36da9e146570470f1767ea6de324a3d3a46412ae1af72ab66511433b80e18b00938e2626a82bc70cc05e, - "y": 0x693f46716eb6bc248876203756c9c7624bea73736ca3984087789c1e05a0c2d73ad3ff1ce67c39c4fdbd132c4ed7c8ad9808795bf230fa14} - - G2xy = {"x": 0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa955555555555555555555555555555555555555555555555555555555, - "y": 0xae05e9634ad7048db359d6205086c2b0036ed7a035884dd7b7e36d728ad8c4b80d6565833a2a3098bbbcb2bed1cda06bdaeafbcdea9386ed} - - G3xy = {"x": 0x865886b9108af6455bd64316cb6943332241b8b8cda82c7e2ba077a4a3fcfe8daa9cbf7f6271fd6e862b769465da8575728173286ff2f8f, - "y": 0xe005a8dbd5125cf706cbda7ad43aa6449a4a8d952356c3b9fce43c82ec4e1d58bb3a331bdb6767f0bffa9a68fed02dafb822ac13588ed6fc} - - pointG = EccPoint(Gxy['x'], Gxy['y'], curve="ed448") - pointG2 = EccPoint(G2xy['x'], G2xy['y'], curve="ed448") - pointG3 = EccPoint(G3xy['x'], G3xy['y'], curve="ed448") - - def test_curve_attribute(self): - self.assertEqual(self.pointG.curve, "Ed448") - - def test_init_xy(self): - EccPoint(self.Gxy['x'], self.Gxy['y'], curve="Ed448") - - # Neutral point - pai = EccPoint(0, 1, curve="Ed448") - self.assertEqual(pai.x, 0) - self.assertEqual(pai.y, 1) - self.assertEqual(pai.xy, (0, 1)) - - # G - bp = self.pointG.copy() - self.assertEqual(bp.x, 0x4f1970c66bed0ded221d15a622bf36da9e146570470f1767ea6de324a3d3a46412ae1af72ab66511433b80e18b00938e2626a82bc70cc05e) - self.assertEqual(bp.y, 0x693f46716eb6bc248876203756c9c7624bea73736ca3984087789c1e05a0c2d73ad3ff1ce67c39c4fdbd132c4ed7c8ad9808795bf230fa14) - self.assertEqual(bp.xy, (bp.x, bp.y)) - - # 2G - bp2 = self.pointG2.copy() - self.assertEqual(bp2.x, 0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa955555555555555555555555555555555555555555555555555555555) - self.assertEqual(bp2.y, 0xae05e9634ad7048db359d6205086c2b0036ed7a035884dd7b7e36d728ad8c4b80d6565833a2a3098bbbcb2bed1cda06bdaeafbcdea9386ed) - self.assertEqual(bp2.xy, (bp2.x, bp2.y)) - - # 5G - EccPoint(x=0x7a9f9335a48dcb0e2ba7601eedb50def80cbcf728562ada756d761e8958812808bc0d57a920c3c96f07b2d8cefc6f950d0a99d1092030034, - y=0xadfd751a2517edd3b9109ce4fd580ade260ca1823ab18fced86551f7b698017127d7a4ee59d2b33c58405512881f225443b4731472f435eb, - curve="Ed448") - - # Catch if point is not on the curve - self.assertRaises(ValueError, EccPoint, 34, 35, curve="Ed448") - - def test_set(self): - pointW = EccPoint(0, 1, curve="Ed448") - pointW.set(self.pointG) - self.assertEqual(pointW.x, self.pointG.x) - self.assertEqual(pointW.y, self.pointG.y) - - def test_copy(self): - pointW = self.pointG.copy() - self.assertEqual(pointW.x, self.pointG.x) - self.assertEqual(pointW.y, self.pointG.y) - - def test_equal(self): - pointH = self.pointG.copy() - pointI = self.pointG2.copy() - self.assertEqual(self.pointG, pointH) - self.assertNotEqual(self.pointG, pointI) - - def test_pai(self): - pai = EccPoint(0, 1, curve="Ed448") - self.assertTrue(pai.is_point_at_infinity()) - self.assertEqual(pai, pai.point_at_infinity()) - - def test_negate(self): - negG = -self.pointG - sum = self.pointG + negG - self.assertTrue(sum.is_point_at_infinity()) - - def test_addition(self): - self.assertEqual(self.pointG + self.pointG2, self.pointG3) - self.assertEqual(self.pointG2 + self.pointG, self.pointG3) - self.assertEqual(self.pointG2 + self.pointG.point_at_infinity(), self.pointG2) - self.assertEqual(self.pointG.point_at_infinity() + self.pointG2, self.pointG2) - - G5 = self.pointG2 + self.pointG3 - self.assertEqual(G5.x, 0x7a9f9335a48dcb0e2ba7601eedb50def80cbcf728562ada756d761e8958812808bc0d57a920c3c96f07b2d8cefc6f950d0a99d1092030034) - self.assertEqual(G5.y, 0xadfd751a2517edd3b9109ce4fd580ade260ca1823ab18fced86551f7b698017127d7a4ee59d2b33c58405512881f225443b4731472f435eb) - - def test_inplace_addition(self): - pointH = self.pointG.copy() - pointH += self.pointG - self.assertEqual(pointH, self.pointG2) - pointH += self.pointG - self.assertEqual(pointH, self.pointG3) - pointH += self.pointG.point_at_infinity() - self.assertEqual(pointH, self.pointG3) - - def test_doubling(self): - pointH = self.pointG.copy() - pointH.double() - self.assertEqual(pointH.x, self.pointG2.x) - self.assertEqual(pointH.y, self.pointG2.y) - - # 2*0 - pai = self.pointG.point_at_infinity() - pointR = pai.copy() - pointR.double() - self.assertEqual(pointR, pai) - - def test_scalar_multiply(self): - d = 0 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0) - self.assertEqual(pointH.y, 1) - - d = 1 - pointH = d * self.pointG - self.assertEqual(pointH.x, self.pointG.x) - self.assertEqual(pointH.y, self.pointG.y) - - d = 2 - pointH = d * self.pointG - self.assertEqual(pointH.x, self.pointG2.x) - self.assertEqual(pointH.y, self.pointG2.y) - - d = 3 - pointH = d * self.pointG - self.assertEqual(pointH.x, self.pointG3.x) - self.assertEqual(pointH.y, self.pointG3.y) - - d = 4 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0x49dcbc5c6c0cce2c1419a17226f929ea255a09cf4e0891c693fda4be70c74cc301b7bdf1515dd8ba21aee1798949e120e2ce42ac48ba7f30) - self.assertEqual(pointH.y, 0xd49077e4accde527164b33a5de021b979cb7c02f0457d845c90dc3227b8a5bc1c0d8f97ea1ca9472b5d444285d0d4f5b32e236f86de51839) - - d = 5 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0x7a9f9335a48dcb0e2ba7601eedb50def80cbcf728562ada756d761e8958812808bc0d57a920c3c96f07b2d8cefc6f950d0a99d1092030034) - self.assertEqual(pointH.y, 0xadfd751a2517edd3b9109ce4fd580ade260ca1823ab18fced86551f7b698017127d7a4ee59d2b33c58405512881f225443b4731472f435eb) - - d = 10 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0x77486f9d19f6411cdd35d30d1c3235f71936452c787e5c034134d3e8172278aca61622bc805761ce3dab65118a0122d73b403165d0ed303d) - self.assertEqual(pointH.y, 0x4d2fea0b026be11024f1f0fe7e94e618e8ac17381ada1d1bf7ee293a68ff5d0bf93c1997dc1aabdc0c7e6381428d85b6b1954a89e4cddf67) - - d = 20 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0x3c236422354600fe6763defcc1503737e4ed89e262d0de3ec1e552020f2a56fe3b9e1e012d021072598c3c2821e18268bb8fb8339c0d1216) - self.assertEqual(pointH.y, 0xb555b9721f630ccb05fc466de4c74d3d2781e69eca88e1b040844f04cab39fd946f91c688fa42402bb38fb9c3e61231017020b219b4396e1) - - d = 255 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0xbeb7f8388b05cd9c1aa2e3c0dcf31e2b563659361826225390e7748654f627d5c36cbe627e9019936b56d15d4dad7c337c09bac64ff4197f) - self.assertEqual(pointH.y, 0x1e37312b2dd4e9440c43c6e7725fc4fa3d11e582d4863f1d018e28f50c0efdb1f53f9b01ada7c87fa162b1f0d72401015d57613d25f1ad53) - - d = 256 - pointH = d * self.pointG - self.assertEqual(pointH.x, 0xf19c34feb56730e3e2be761ac0a2a2b24853b281dda019fc35a5ab58e3696beb39609ae756b0d20fb7ccf0d79aaf5f3bca2e4fdb25bfac1c) - self.assertEqual(pointH.y, 0x3beb69cc9111bffcaddc61d363ce6fe5dd44da4aadce78f52e92e985d5442344ced72c4611ed0daac9f4f5661eab73d7a12d25ce8a30241e) - - def test_sizes(self): - self.assertEqual(self.pointG.size_in_bits(), 448) - self.assertEqual(self.pointG.size_in_bytes(), 56) - - -class TestEccKey_Ed448(unittest.TestCase): - - def test_private_key(self): - seed = unhexlify("4adf5d37ac6785e83e99a924f92676d366a78690af59c92b6bdf14f9cdbcf26fdad478109607583d633b60078d61d51d81b7509c5433b0d4c9") - Px = 0x72a01eea003a35f9ac44231dc4aae2a382f351d80bf32508175b0855edcf389aa2bbf308dd961ce361a6e7c2091bc78957f6ebcf3002a617 - Py = 0x9e0d08d84586e9aeefecacb41d049b831f1a3ee0c3eada63e34557b30702b50ab59fb372feff7c30b8cbb7dd51afbe88444ec56238722ec1 - - key = EccKey(curve="Ed448", seed=seed) - self.assertEqual(key.seed, seed) - self.assertEqual(key.d, 0xb07cf179604f83433186e5178760c759c15125ee54ff6f8dcde46e872b709ac82ed0bd0a4e036d774034dcb18a9fb11894657a1485895f80) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, Px) - self.assertEqual(key.pointQ.y, Py) - - point = EccPoint(Px, Py, "ed448") - key = EccKey(curve="Ed448", seed=seed, point=point) - self.assertEqual(key.d, 0xb07cf179604f83433186e5178760c759c15125ee54ff6f8dcde46e872b709ac82ed0bd0a4e036d774034dcb18a9fb11894657a1485895f80) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, point) - - # Other names - key = EccKey(curve="ed448", seed=seed) - - # Must not accept d parameter - self.assertRaises(ValueError, EccKey, curve="ed448", d=1) - - def test_public_key(self): - point = EccPoint(_curves['ed448'].Gx, _curves['ed448'].Gy, curve='ed448') - key = EccKey(curve="ed448", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - priv_key = EccKey(curve="ed448", seed=b'H'*57) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_seed(self): - self.assertRaises(ValueError, lambda: EccKey(curve="ed448", seed=b'H' * 56)) - - def test_equality(self): - private_key = ECC.construct(seed=b'H'*57, curve="Ed448") - private_key2 = ECC.construct(seed=b'H'*57, curve="ed448") - private_key3 = ECC.construct(seed=b'C'*57, curve="Ed448") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='ed448') - self.assertIn("curve='Ed448'", repr(key)) - self.assertEqual(key.curve, 'Ed448') - self.assertEqual(key.public_key().curve, 'Ed448') - - -class TestEccModule_Ed448(unittest.TestCase): - - def test_generate(self): - key = ECC.generate(curve="Ed448") - self.assertTrue(key.has_private()) - point = EccPoint(_curves['Ed448'].Gx, _curves['Ed448'].Gy, curve="Ed448") * key.d - self.assertEqual(key.pointQ, point) - - # Always random - key2 = ECC.generate(curve="Ed448") - self.assertNotEqual(key, key2) - - # Other names - ECC.generate(curve="Ed448") - - # Random source - key1 = ECC.generate(curve="Ed448", randfunc=SHAKE128.new().read) - key2 = ECC.generate(curve="Ed448", randfunc=SHAKE128.new().read) - self.assertEqual(key1, key2) - - def test_construct(self): - seed = unhexlify("4adf5d37ac6785e83e99a924f92676d366a78690af59c92b6bdf14f9cdbcf26fdad478109607583d633b60078d61d51d81b7509c5433b0d4c9") - Px = 0x72a01eea003a35f9ac44231dc4aae2a382f351d80bf32508175b0855edcf389aa2bbf308dd961ce361a6e7c2091bc78957f6ebcf3002a617 - Py = 0x9e0d08d84586e9aeefecacb41d049b831f1a3ee0c3eada63e34557b30702b50ab59fb372feff7c30b8cbb7dd51afbe88444ec56238722ec1 - d = 0xb07cf179604f83433186e5178760c759c15125ee54ff6f8dcde46e872b709ac82ed0bd0a4e036d774034dcb18a9fb11894657a1485895f80 - point = EccPoint(Px, Py, curve="Ed448") - - # Private key only - key = ECC.construct(curve="Ed448", seed=seed) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Public key only - key = ECC.construct(curve="Ed448", point_x=Px, point_y=Py) - self.assertEqual(key.pointQ, point) - self.assertFalse(key.has_private()) - - # Private and public key - key = ECC.construct(curve="Ed448", seed=seed, point_x=Px, point_y=Py) - self.assertEqual(key.pointQ, point) - self.assertTrue(key.has_private()) - - # Other names - key = ECC.construct(curve="ed448", seed=seed) - - def test_negative_construct(self): - coord = dict(point_x=10, point_y=4) - coordG = dict(point_x=_curves['ed448'].Gx, point_y=_curves['ed448'].Gy) - - self.assertRaises(ValueError, ECC.construct, curve="Ed448", **coord) - self.assertRaises(ValueError, ECC.construct, curve="Ed448", d=2, **coordG) - self.assertRaises(ValueError, ECC.construct, curve="Ed448", seed=b'H'*58) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestEccPoint_Ed448) - tests += list_test_cases(TestEccKey_Ed448) - tests += list_test_cases(TestEccModule_Ed448) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_NIST.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_NIST.py deleted file mode 100644 index ae790b1..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ECC_NIST.py +++ /dev/null @@ -1,1440 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors - -from Cryptodome.PublicKey import ECC -from Cryptodome.PublicKey.ECC import EccPoint, _curves, EccKey - -from Cryptodome.Math.Numbers import Integer - - -class TestEccPoint(unittest.TestCase): - - def test_mix(self): - - p1 = ECC.generate(curve='P-256').pointQ - p2 = ECC.generate(curve='P-384').pointQ - - try: - p1 + p2 - assert(False) - except ValueError as e: - assert "not on the same curve" in str(e) - - try: - p1 += p2 - assert(False) - except ValueError as e: - assert "not on the same curve" in str(e) - - class OtherKeyType: - pass - - self.assertFalse(p1 == OtherKeyType()) - self.assertTrue(p1 != OtherKeyType()) - - def test_repr(self): - p1 = ECC.construct(curve='P-256', - d=75467964919405407085864614198393977741148485328036093939970922195112333446269, - point_x=20573031766139722500939782666697015100983491952082159880539639074939225934381, - point_y=108863130203210779921520632367477406025152638284581252625277850513266505911389) - self.assertEqual(repr(p1), "EccKey(curve='NIST P-256', point_x=20573031766139722500939782666697015100983491952082159880539639074939225934381, point_y=108863130203210779921520632367477406025152638284581252625277850513266505911389, d=75467964919405407085864614198393977741148485328036093939970922195112333446269)") - - -class TestEccPoint_NIST_P192(unittest.TestCase): - """Tests defined in section 4.1 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" - - pointS = EccPoint( - 0xd458e7d127ae671b0c330266d246769353a012073e97acf8, - 0x325930500d851f336bddc050cf7fb11b5673a1645086df3b, - curve='p192') - - pointT = EccPoint( - 0xf22c4395213e9ebe67ddecdd87fdbd01be16fb059b9753a4, - 0x264424096af2b3597796db48f8dfb41fa9cecc97691a9c79, - curve='p192') - - def test_curve_attribute(self): - self.assertEqual(self.pointS.curve, "NIST P-192") - - def test_set(self): - pointW = EccPoint(0, 0) - pointW.set(self.pointS) - self.assertEqual(pointW, self.pointS) - - def test_copy(self): - pointW = self.pointS.copy() - self.assertEqual(pointW, self.pointS) - pointW.set(self.pointT) - self.assertEqual(pointW, self.pointT) - self.assertNotEqual(self.pointS, self.pointT) - - def test_negate(self): - negS = -self.pointS - sum = self.pointS + negS - self.assertEqual(sum, self.pointS.point_at_infinity()) - - def test_addition(self): - pointRx = 0x48e1e4096b9b8e5ca9d0f1f077b8abf58e843894de4d0290 - pointRy = 0x408fa77c797cd7dbfb16aa48a3648d3d63c94117d7b6aa4b - - pointR = self.pointS + self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS + pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai + self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai + pai - self.assertEqual(pointR, pai) - - def test_inplace_addition(self): - pointRx = 0x48e1e4096b9b8e5ca9d0f1f077b8abf58e843894de4d0290 - pointRy = 0x408fa77c797cd7dbfb16aa48a3648d3d63c94117d7b6aa4b - - pointR = self.pointS.copy() - pointR += self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS.copy() - pointR += pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai.copy() - pointR += self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai.copy() - pointR += pai - self.assertEqual(pointR, pai) - - def test_doubling(self): - pointRx = 0x30c5bc6b8c7da25354b373dc14dd8a0eba42d25a3f6e6962 - pointRy = 0x0dde14bc4249a721c407aedbf011e2ddbbcb2968c9d889cf - - pointR = self.pointS.copy() - pointR.double() - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 2*0 - pai = self.pointS.point_at_infinity() - pointR = pai.copy() - pointR.double() - self.assertEqual(pointR, pai) - - # S + S - pointR = self.pointS.copy() - pointR += pointR - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_scalar_multiply(self): - d = 0xa78a236d60baec0c5dd41b33a542463a8255391af64c74ee - pointRx = 0x1faee4205a4f669d2d0a8f25e3bcec9a62a6952965bf6d31 - pointRy = 0x5ff2cdfa508a2581892367087c696f179e7a4d7e8260fb06 - - pointR = self.pointS * d - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 0*S - pai = self.pointS.point_at_infinity() - pointR = self.pointS * 0 - self.assertEqual(pointR, pai) - - # -1*S - self.assertRaises(ValueError, lambda: self.pointS * -1) - - # Reverse order - pointR = d * self.pointS - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pointR = Integer(d) * self.pointS - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_joint_scalar_multiply(self): - d = 0xa78a236d60baec0c5dd41b33a542463a8255391af64c74ee - e = 0xc4be3d53ec3089e71e4de8ceab7cce889bc393cd85b972bc - pointRx = 0x019f64eed8fa9b72b7dfea82c17c9bfa60ecb9e1778b5bde - pointRy = 0x16590c5fcd8655fa4ced33fb800e2a7e3c61f35d83503644 - - pointR = self.pointS * d + self.pointT * e - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_sizes(self): - self.assertEqual(self.pointS.size_in_bits(), 192) - self.assertEqual(self.pointS.size_in_bytes(), 24) - - -class TestEccPoint_NIST_P224(unittest.TestCase): - """Tests defined in section 4.2 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" - - pointS = EccPoint( - 0x6eca814ba59a930843dc814edd6c97da95518df3c6fdf16e9a10bb5b, - 0xef4b497f0963bc8b6aec0ca0f259b89cd80994147e05dc6b64d7bf22, - curve='p224') - - pointT = EccPoint( - 0xb72b25aea5cb03fb88d7e842002969648e6ef23c5d39ac903826bd6d, - 0xc42a8a4d34984f0b71b5b4091af7dceb33ea729c1a2dc8b434f10c34, - curve='p224') - - def test_curve_attribute(self): - self.assertEqual(self.pointS.curve, "NIST P-224") - - def test_set(self): - pointW = EccPoint(0, 0) - pointW.set(self.pointS) - self.assertEqual(pointW, self.pointS) - - def test_copy(self): - pointW = self.pointS.copy() - self.assertEqual(pointW, self.pointS) - pointW.set(self.pointT) - self.assertEqual(pointW, self.pointT) - self.assertNotEqual(self.pointS, self.pointT) - - def test_negate(self): - negS = -self.pointS - sum = self.pointS + negS - self.assertEqual(sum, self.pointS.point_at_infinity()) - - def test_addition(self): - pointRx = 0x236f26d9e84c2f7d776b107bd478ee0a6d2bcfcaa2162afae8d2fd15 - pointRy = 0xe53cc0a7904ce6c3746f6a97471297a0b7d5cdf8d536ae25bb0fda70 - - pointR = self.pointS + self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS + pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai + self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai + pai - self.assertEqual(pointR, pai) - - def test_inplace_addition(self): - pointRx = 0x236f26d9e84c2f7d776b107bd478ee0a6d2bcfcaa2162afae8d2fd15 - pointRy = 0xe53cc0a7904ce6c3746f6a97471297a0b7d5cdf8d536ae25bb0fda70 - - pointR = self.pointS.copy() - pointR += self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS.copy() - pointR += pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai.copy() - pointR += self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai.copy() - pointR += pai - self.assertEqual(pointR, pai) - - def test_doubling(self): - pointRx = 0xa9c96f2117dee0f27ca56850ebb46efad8ee26852f165e29cb5cdfc7 - pointRy = 0xadf18c84cf77ced4d76d4930417d9579207840bf49bfbf5837dfdd7d - - pointR = self.pointS.copy() - pointR.double() - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 2*0 - pai = self.pointS.point_at_infinity() - pointR = pai.copy() - pointR.double() - self.assertEqual(pointR, pai) - - # S + S - pointR = self.pointS.copy() - pointR += pointR - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_scalar_multiply(self): - d = 0xa78ccc30eaca0fcc8e36b2dd6fbb03df06d37f52711e6363aaf1d73b - pointRx = 0x96a7625e92a8d72bff1113abdb95777e736a14c6fdaacc392702bca4 - pointRy = 0x0f8e5702942a3c5e13cd2fd5801915258b43dfadc70d15dbada3ed10 - - pointR = self.pointS * d - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 0*S - pai = self.pointS.point_at_infinity() - pointR = self.pointS * 0 - self.assertEqual(pointR, pai) - - # -1*S - self.assertRaises(ValueError, lambda: self.pointS * -1) - - # Reverse order - pointR = d * self.pointS - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pointR = Integer(d) * self.pointS - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_joing_scalar_multiply(self): - d = 0xa78ccc30eaca0fcc8e36b2dd6fbb03df06d37f52711e6363aaf1d73b - e = 0x54d549ffc08c96592519d73e71e8e0703fc8177fa88aa77a6ed35736 - pointRx = 0xdbfe2958c7b2cda1302a67ea3ffd94c918c5b350ab838d52e288c83e - pointRy = 0x2f521b83ac3b0549ff4895abcc7f0c5a861aacb87acbc5b8147bb18b - - pointR = self.pointS * d + self.pointT * e - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_sizes(self): - self.assertEqual(self.pointS.size_in_bits(), 224) - self.assertEqual(self.pointS.size_in_bytes(), 28) - - -class TestEccPoint_NIST_P256(unittest.TestCase): - """Tests defined in section 4.3 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" - - pointS = EccPoint( - 0xde2444bebc8d36e682edd27e0f271508617519b3221a8fa0b77cab3989da97c9, - 0xc093ae7ff36e5380fc01a5aad1e66659702de80f53cec576b6350b243042a256) - - pointT = EccPoint( - 0x55a8b00f8da1d44e62f6b3b25316212e39540dc861c89575bb8cf92e35e0986b, - 0x5421c3209c2d6c704835d82ac4c3dd90f61a8a52598b9e7ab656e9d8c8b24316) - - def test_curve_attribute(self): - self.assertEqual(self.pointS.curve, "NIST P-256") - - def test_set(self): - pointW = EccPoint(0, 0) - pointW.set(self.pointS) - self.assertEqual(pointW, self.pointS) - - def test_copy(self): - pointW = self.pointS.copy() - self.assertEqual(pointW, self.pointS) - pointW.set(self.pointT) - self.assertEqual(pointW, self.pointT) - self.assertNotEqual(self.pointS, self.pointT) - - def test_negate(self): - negS = -self.pointS - sum = self.pointS + negS - self.assertEqual(sum, self.pointS.point_at_infinity()) - - def test_addition(self): - pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e - pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 - - pointR = self.pointS + self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS + pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai + self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai + pai - self.assertEqual(pointR, pai) - - def test_inplace_addition(self): - pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e - pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 - - pointR = self.pointS.copy() - pointR += self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS.copy() - pointR += pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai.copy() - pointR += self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai.copy() - pointR += pai - self.assertEqual(pointR, pai) - - def test_doubling(self): - pointRx = 0x7669e6901606ee3ba1a8eef1e0024c33df6c22f3b17481b82a860ffcdb6127b0 - pointRy = 0xfa878162187a54f6c39f6ee0072f33de389ef3eecd03023de10ca2c1db61d0c7 - - pointR = self.pointS.copy() - pointR.double() - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 2*0 - pai = self.pointS.point_at_infinity() - pointR = pai.copy() - pointR.double() - self.assertEqual(pointR, pai) - - # S + S - pointR = self.pointS.copy() - pointR += pointR - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_scalar_multiply(self): - d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd - pointRx = 0x51d08d5f2d4278882946d88d83c97d11e62becc3cfc18bedacc89ba34eeca03f - pointRy = 0x75ee68eb8bf626aa5b673ab51f6e744e06f8fcf8a6c0cf3035beca956a7b41d5 - - pointR = self.pointS * d - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 0*S - pai = self.pointS.point_at_infinity() - pointR = self.pointS * 0 - self.assertEqual(pointR, pai) - - # -1*S - self.assertRaises(ValueError, lambda: self.pointS * -1) - - # Reverse order - pointR = d * self.pointS - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pointR = Integer(d) * self.pointS - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_joing_scalar_multiply(self): - d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd - e = 0xd37f628ece72a462f0145cbefe3f0b355ee8332d37acdd83a358016aea029db7 - pointRx = 0xd867b4679221009234939221b8046245efcf58413daacbeff857b8588341f6b8 - pointRy = 0xf2504055c03cede12d22720dad69c745106b6607ec7e50dd35d54bd80f615275 - - pointR = self.pointS * d + self.pointT * e - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_sizes(self): - self.assertEqual(self.pointS.size_in_bits(), 256) - self.assertEqual(self.pointS.size_in_bytes(), 32) - - -class TestEccPoint_NIST_P384(unittest.TestCase): - """Tests defined in section 4.4 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" - - pointS = EccPoint( - 0xfba203b81bbd23f2b3be971cc23997e1ae4d89e69cb6f92385dda82768ada415ebab4167459da98e62b1332d1e73cb0e, - 0x5ffedbaefdeba603e7923e06cdb5d0c65b22301429293376d5c6944e3fa6259f162b4788de6987fd59aed5e4b5285e45, - "p384") - - pointT = EccPoint( - 0xaacc05202e7fda6fc73d82f0a66220527da8117ee8f8330ead7d20ee6f255f582d8bd38c5a7f2b40bcdb68ba13d81051, - 0x84009a263fefba7c2c57cffa5db3634d286131afc0fca8d25afa22a7b5dce0d9470da89233cee178592f49b6fecb5092, - "p384") - - def test_curve_attribute(self): - self.assertEqual(self.pointS.curve, "NIST P-384") - - def test_set(self): - pointW = EccPoint(0, 0, "p384") - pointW.set(self.pointS) - self.assertEqual(pointW, self.pointS) - - def test_copy(self): - pointW = self.pointS.copy() - self.assertEqual(pointW, self.pointS) - pointW.set(self.pointT) - self.assertEqual(pointW, self.pointT) - self.assertNotEqual(self.pointS, self.pointT) - - def test_negate(self): - negS = -self.pointS - sum = self.pointS + negS - self.assertEqual(sum, self.pointS.point_at_infinity()) - - def test_addition(self): - pointRx = 0x12dc5ce7acdfc5844d939f40b4df012e68f865b89c3213ba97090a247a2fc009075cf471cd2e85c489979b65ee0b5eed - pointRy = 0x167312e58fe0c0afa248f2854e3cddcb557f983b3189b67f21eee01341e7e9fe67f6ee81b36988efa406945c8804a4b0 - - pointR = self.pointS + self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS + pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai + self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai + pai - self.assertEqual(pointR, pai) - - def _test_inplace_addition(self): - pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e - pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 - - pointR = self.pointS.copy() - pointR += self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS.copy() - pointR += pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai.copy() - pointR += self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai.copy() - pointR += pai - self.assertEqual(pointR, pai) - - def test_doubling(self): - pointRx = 0x2a2111b1e0aa8b2fc5a1975516bc4d58017ff96b25e1bdff3c229d5fac3bacc319dcbec29f9478f42dee597b4641504c - pointRy = 0xfa2e3d9dc84db8954ce8085ef28d7184fddfd1344b4d4797343af9b5f9d837520b450f726443e4114bd4e5bdb2f65ddd - - pointR = self.pointS.copy() - pointR.double() - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 2*0 - pai = self.pointS.point_at_infinity() - pointR = pai.copy() - pointR.double() - self.assertEqual(pointR, pai) - - # S + S - pointR = self.pointS.copy() - pointR += pointR - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_scalar_multiply(self): - d = 0xa4ebcae5a665983493ab3e626085a24c104311a761b5a8fdac052ed1f111a5c44f76f45659d2d111a61b5fdd97583480 - pointRx = 0xe4f77e7ffeb7f0958910e3a680d677a477191df166160ff7ef6bb5261f791aa7b45e3e653d151b95dad3d93ca0290ef2 - pointRy = 0xac7dee41d8c5f4a7d5836960a773cfc1376289d3373f8cf7417b0c6207ac32e913856612fc9ff2e357eb2ee05cf9667f - - pointR = self.pointS * d - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 0*S - pai = self.pointS.point_at_infinity() - pointR = self.pointS * 0 - self.assertEqual(pointR, pai) - - # -1*S - self.assertRaises(ValueError, lambda: self.pointS * -1) - - def test_joing_scalar_multiply(self): - d = 0xa4ebcae5a665983493ab3e626085a24c104311a761b5a8fdac052ed1f111a5c44f76f45659d2d111a61b5fdd97583480 - e = 0xafcf88119a3a76c87acbd6008e1349b29f4ba9aa0e12ce89bcfcae2180b38d81ab8cf15095301a182afbc6893e75385d - pointRx = 0x917ea28bcd641741ae5d18c2f1bd917ba68d34f0f0577387dc81260462aea60e2417b8bdc5d954fc729d211db23a02dc - pointRy = 0x1a29f7ce6d074654d77b40888c73e92546c8f16a5ff6bcbd307f758d4aee684beff26f6742f597e2585c86da908f7186 - - pointR = self.pointS * d + self.pointT * e - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_sizes(self): - self.assertEqual(self.pointS.size_in_bits(), 384) - self.assertEqual(self.pointS.size_in_bytes(), 48) - - -class TestEccPoint_NIST_P521(unittest.TestCase): - """Tests defined in section 4.5 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" - - pointS = EccPoint( - 0x000001d5c693f66c08ed03ad0f031f937443458f601fd098d3d0227b4bf62873af50740b0bb84aa157fc847bcf8dc16a8b2b8bfd8e2d0a7d39af04b089930ef6dad5c1b4, - 0x00000144b7770963c63a39248865ff36b074151eac33549b224af5c8664c54012b818ed037b2b7c1a63ac89ebaa11e07db89fcee5b556e49764ee3fa66ea7ae61ac01823, - "p521") - - pointT = EccPoint( - 0x000000f411f2ac2eb971a267b80297ba67c322dba4bb21cec8b70073bf88fc1ca5fde3ba09e5df6d39acb2c0762c03d7bc224a3e197feaf760d6324006fe3be9a548c7d5, - 0x000001fdf842769c707c93c630df6d02eff399a06f1b36fb9684f0b373ed064889629abb92b1ae328fdb45534268384943f0e9222afe03259b32274d35d1b9584c65e305, - "p521") - - def test_curve_attribute(self): - self.assertEqual(self.pointS.curve, "NIST P-521") - - def test_set(self): - pointW = EccPoint(0, 0) - pointW.set(self.pointS) - self.assertEqual(pointW, self.pointS) - - def test_copy(self): - pointW = self.pointS.copy() - self.assertEqual(pointW, self.pointS) - pointW.set(self.pointT) - self.assertEqual(pointW, self.pointT) - self.assertNotEqual(self.pointS, self.pointT) - - def test_negate(self): - negS = -self.pointS - sum = self.pointS + negS - self.assertEqual(sum, self.pointS.point_at_infinity()) - - def test_addition(self): - pointRx = 0x000001264ae115ba9cbc2ee56e6f0059e24b52c8046321602c59a339cfb757c89a59c358a9a8e1f86d384b3f3b255ea3f73670c6dc9f45d46b6a196dc37bbe0f6b2dd9e9 - pointRy = 0x00000062a9c72b8f9f88a271690bfa017a6466c31b9cadc2fc544744aeb817072349cfddc5ad0e81b03f1897bd9c8c6efbdf68237dc3bb00445979fb373b20c9a967ac55 - - pointR = self.pointS + self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS + pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai + self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai + pai - self.assertEqual(pointR, pai) - - def test_inplace_addition(self): - pointRx = 0x000001264ae115ba9cbc2ee56e6f0059e24b52c8046321602c59a339cfb757c89a59c358a9a8e1f86d384b3f3b255ea3f73670c6dc9f45d46b6a196dc37bbe0f6b2dd9e9 - pointRy = 0x00000062a9c72b8f9f88a271690bfa017a6466c31b9cadc2fc544744aeb817072349cfddc5ad0e81b03f1897bd9c8c6efbdf68237dc3bb00445979fb373b20c9a967ac55 - - pointR = self.pointS.copy() - pointR += self.pointT - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - pai = pointR.point_at_infinity() - - # S + 0 - pointR = self.pointS.copy() - pointR += pai - self.assertEqual(pointR, self.pointS) - - # 0 + S - pointR = pai.copy() - pointR += self.pointS - self.assertEqual(pointR, self.pointS) - - # 0 + 0 - pointR = pai.copy() - pointR += pai - self.assertEqual(pointR, pai) - - def test_doubling(self): - pointRx = 0x0000012879442f2450c119e7119a5f738be1f1eba9e9d7c6cf41b325d9ce6d643106e9d61124a91a96bcf201305a9dee55fa79136dc700831e54c3ca4ff2646bd3c36bc6 - pointRy = 0x0000019864a8b8855c2479cbefe375ae553e2393271ed36fadfc4494fc0583f6bd03598896f39854abeae5f9a6515a021e2c0eef139e71de610143f53382f4104dccb543 - - pointR = self.pointS.copy() - pointR.double() - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 2*0 - pai = self.pointS.point_at_infinity() - pointR = pai.copy() - pointR.double() - self.assertEqual(pointR, pai) - - # S + S - pointR = self.pointS.copy() - pointR += pointR - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_scalar_multiply(self): - d = 0x000001eb7f81785c9629f136a7e8f8c674957109735554111a2a866fa5a166699419bfa9936c78b62653964df0d6da940a695c7294d41b2d6600de6dfcf0edcfc89fdcb1 - pointRx = 0x00000091b15d09d0ca0353f8f96b93cdb13497b0a4bb582ae9ebefa35eee61bf7b7d041b8ec34c6c00c0c0671c4ae063318fb75be87af4fe859608c95f0ab4774f8c95bb - pointRy = 0x00000130f8f8b5e1abb4dd94f6baaf654a2d5810411e77b7423965e0c7fd79ec1ae563c207bd255ee9828eb7a03fed565240d2cc80ddd2cecbb2eb50f0951f75ad87977f - - pointR = self.pointS * d - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - # 0*S - pai = self.pointS.point_at_infinity() - pointR = self.pointS * 0 - self.assertEqual(pointR, pai) - - # -1*S - self.assertRaises(ValueError, lambda: self.pointS * -1) - - def test_joing_scalar_multiply(self): - d = 0x000001eb7f81785c9629f136a7e8f8c674957109735554111a2a866fa5a166699419bfa9936c78b62653964df0d6da940a695c7294d41b2d6600de6dfcf0edcfc89fdcb1 - e = 0x00000137e6b73d38f153c3a7575615812608f2bab3229c92e21c0d1c83cfad9261dbb17bb77a63682000031b9122c2f0cdab2af72314be95254de4291a8f85f7c70412e3 - pointRx = 0x0000009d3802642b3bea152beb9e05fba247790f7fc168072d363340133402f2585588dc1385d40ebcb8552f8db02b23d687cae46185b27528adb1bf9729716e4eba653d - pointRy = 0x0000000fe44344e79da6f49d87c1063744e5957d9ac0a505bafa8281c9ce9ff25ad53f8da084a2deb0923e46501de5797850c61b229023dd9cf7fc7f04cd35ebb026d89d - - pointR = self.pointS * d - pointR += self.pointT * e - self.assertEqual(pointR.x, pointRx) - self.assertEqual(pointR.y, pointRy) - - def test_sizes(self): - self.assertEqual(self.pointS.size_in_bits(), 521) - self.assertEqual(self.pointS.size_in_bytes(), 66) - - -class TestEccPoint_PAI_P192(unittest.TestCase): - """Test vectors from http://point-at-infinity.org/ecc/nisttv""" - - curve = _curves['p192'] - pointG = EccPoint(curve.Gx, curve.Gy, "p192") - - -tv_pai = load_test_vectors(("PublicKey", "ECC"), - "point-at-infinity.org-P192.txt", - "P-192 tests from point-at-infinity.org", - {"k": lambda k: int(k), - "x": lambda x: int(x, 16), - "y": lambda y: int(y, 16)}) or [] -for tv in tv_pai: - def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): - result = self.pointG * scalar - self.assertEqual(result.x, x) - self.assertEqual(result.y, y) - setattr(TestEccPoint_PAI_P192, "test_%d" % tv.count, new_test) - - -class TestEccPoint_PAI_P224(unittest.TestCase): - """Test vectors from http://point-at-infinity.org/ecc/nisttv""" - - curve = _curves['p224'] - pointG = EccPoint(curve.Gx, curve.Gy, "p224") - - -tv_pai = load_test_vectors(("PublicKey", "ECC"), - "point-at-infinity.org-P224.txt", - "P-224 tests from point-at-infinity.org", - {"k": lambda k: int(k), - "x": lambda x: int(x, 16), - "y": lambda y: int(y, 16)}) or [] -for tv in tv_pai: - def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): - result = self.pointG * scalar - self.assertEqual(result.x, x) - self.assertEqual(result.y, y) - setattr(TestEccPoint_PAI_P224, "test_%d" % tv.count, new_test) - - -class TestEccPoint_PAI_P256(unittest.TestCase): - """Test vectors from http://point-at-infinity.org/ecc/nisttv""" - - curve = _curves['p256'] - pointG = EccPoint(curve.Gx, curve.Gy, "p256") - - -tv_pai = load_test_vectors(("PublicKey", "ECC"), - "point-at-infinity.org-P256.txt", - "P-256 tests from point-at-infinity.org", - {"k": lambda k: int(k), - "x": lambda x: int(x, 16), - "y": lambda y: int(y, 16)}) or [] -for tv in tv_pai: - def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): - result = self.pointG * scalar - self.assertEqual(result.x, x) - self.assertEqual(result.y, y) - setattr(TestEccPoint_PAI_P256, "test_%d" % tv.count, new_test) - - -class TestEccPoint_PAI_P384(unittest.TestCase): - """Test vectors from http://point-at-infinity.org/ecc/nisttv""" - - curve = _curves['p384'] - pointG = EccPoint(curve.Gx, curve.Gy, "p384") - - -tv_pai = load_test_vectors(("PublicKey", "ECC"), - "point-at-infinity.org-P384.txt", - "P-384 tests from point-at-infinity.org", - {"k": lambda k: int(k), - "x": lambda x: int(x, 16), - "y": lambda y: int(y, 16)}) or [] -for tv in tv_pai: - def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): - result = self.pointG * scalar - self.assertEqual(result.x, x) - self.assertEqual(result.y, y) - setattr(TestEccPoint_PAI_P384, "test_%d" % tv.count, new_test) - - -class TestEccPoint_PAI_P521(unittest.TestCase): - """Test vectors from http://point-at-infinity.org/ecc/nisttv""" - - curve = _curves['p521'] - pointG = EccPoint(curve.Gx, curve.Gy, "p521") - - -tv_pai = load_test_vectors(("PublicKey", "ECC"), - "point-at-infinity.org-P521.txt", - "P-521 tests from point-at-infinity.org", - {"k": lambda k: int(k), - "x": lambda x: int(x, 16), - "y": lambda y: int(y, 16)}) or [] -for tv in tv_pai: - def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): - result = self.pointG * scalar - self.assertEqual(result.x, x) - self.assertEqual(result.y, y) - setattr(TestEccPoint_PAI_P521, "test_%d" % tv.count, new_test) - - -class TestEccKey_P192(unittest.TestCase): - - def test_private_key(self): - - key = EccKey(curve="P-192", d=1) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, _curves['p192'].Gx) - self.assertEqual(key.pointQ.y, _curves['p192'].Gy) - - point = EccPoint(_curves['p192'].Gx, _curves['p192'].Gy, curve='P-192') - key = EccKey(curve="P-192", d=1, point=point) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, point) - - # Other names - key = EccKey(curve="secp192r1", d=1) - key = EccKey(curve="prime192v1", d=1) - - def test_public_key(self): - - point = EccPoint(_curves['p192'].Gx, _curves['p192'].Gy, curve='P-192') - key = EccKey(curve="P-192", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - - priv_key = EccKey(curve="P-192", d=3) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_curve(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-193", d=1)) - - def test_invalid_d(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-192", d=0)) - self.assertRaises(ValueError, lambda: EccKey(curve="P-192", - d=_curves['p192'].order)) - - def test_equality(self): - - private_key = ECC.construct(d=3, curve="P-192") - private_key2 = ECC.construct(d=3, curve="P-192") - private_key3 = ECC.construct(d=4, curve="P-192") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='p192') - self.assertIn("curve='NIST P-192'", repr(key)) - self.assertEqual(key.curve, 'NIST P-192') - self.assertEqual(key.public_key().curve, 'NIST P-192') - - -class TestEccKey_P224(unittest.TestCase): - - def test_private_key(self): - - key = EccKey(curve="P-224", d=1) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, _curves['p224'].Gx) - self.assertEqual(key.pointQ.y, _curves['p224'].Gy) - - point = EccPoint(_curves['p224'].Gx, _curves['p224'].Gy, curve='P-224') - key = EccKey(curve="P-224", d=1, point=point) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, point) - - # Other names - key = EccKey(curve="secp224r1", d=1) - key = EccKey(curve="prime224v1", d=1) - - def test_public_key(self): - - point = EccPoint(_curves['p224'].Gx, _curves['p224'].Gy, curve='P-224') - key = EccKey(curve="P-224", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - - priv_key = EccKey(curve="P-224", d=3) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_curve(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-225", d=1)) - - def test_invalid_d(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-224", d=0)) - self.assertRaises(ValueError, lambda: EccKey(curve="P-224", - d=_curves['p224'].order)) - - def test_equality(self): - - private_key = ECC.construct(d=3, curve="P-224") - private_key2 = ECC.construct(d=3, curve="P-224") - private_key3 = ECC.construct(d=4, curve="P-224") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='p224') - self.assertIn("curve='NIST P-224'", repr(key)) - self.assertEqual(key.curve, 'NIST P-224') - self.assertEqual(key.public_key().curve, 'NIST P-224') - - -class TestEccKey_P256(unittest.TestCase): - - def test_private_key(self): - - key = EccKey(curve="P-256", d=1) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, _curves['p256'].Gx) - self.assertEqual(key.pointQ.y, _curves['p256'].Gy) - - point = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy) - key = EccKey(curve="P-256", d=1, point=point) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, point) - - # Other names - key = EccKey(curve="secp256r1", d=1) - key = EccKey(curve="prime256v1", d=1) - - # Must not accept d parameter - self.assertRaises(ValueError, EccKey, curve="p256", seed=b'H'*32) - - def test_public_key(self): - - point = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy) - key = EccKey(curve="P-256", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - - priv_key = EccKey(curve="P-256", d=3) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_curve(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-257", d=1)) - - def test_invalid_d(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-256", d=0)) - self.assertRaises(ValueError, lambda: EccKey(curve="P-256", d=_curves['p256'].order)) - - def test_equality(self): - - private_key = ECC.construct(d=3, curve="P-256") - private_key2 = ECC.construct(d=3, curve="P-256") - private_key3 = ECC.construct(d=4, curve="P-256") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='p256') - self.assertIn("curve='NIST P-256'", repr(key)) - self.assertEqual(key.curve, 'NIST P-256') - self.assertEqual(key.public_key().curve, 'NIST P-256') - - -class TestEccKey_P384(unittest.TestCase): - - def test_private_key(self): - - p384 = _curves['p384'] - - key = EccKey(curve="P-384", d=1) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, p384.Gx) - self.assertEqual(key.pointQ.y, p384.Gy) - - point = EccPoint(p384.Gx, p384.Gy, "p384") - key = EccKey(curve="P-384", d=1, point=point) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, point) - - # Other names - key = EccKey(curve="p384", d=1) - key = EccKey(curve="secp384r1", d=1) - key = EccKey(curve="prime384v1", d=1) - - def test_public_key(self): - - p384 = _curves['p384'] - point = EccPoint(p384.Gx, p384.Gy, 'p384') - key = EccKey(curve="P-384", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - - priv_key = EccKey(curve="P-384", d=3) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_curve(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-385", d=1)) - - def test_invalid_d(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-384", d=0)) - self.assertRaises(ValueError, lambda: EccKey(curve="P-384", - d=_curves['p384'].order)) - - def test_equality(self): - - private_key = ECC.construct(d=3, curve="P-384") - private_key2 = ECC.construct(d=3, curve="P-384") - private_key3 = ECC.construct(d=4, curve="P-384") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='p384') - self.assertIn("curve='NIST P-384'", repr(key)) - self.assertEqual(key.curve, 'NIST P-384') - self.assertEqual(key.public_key().curve, 'NIST P-384') - - -class TestEccKey_P521(unittest.TestCase): - - def test_private_key(self): - - p521 = _curves['p521'] - - key = EccKey(curve="P-521", d=1) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ.x, p521.Gx) - self.assertEqual(key.pointQ.y, p521.Gy) - - point = EccPoint(p521.Gx, p521.Gy, "p521") - key = EccKey(curve="P-521", d=1, point=point) - self.assertEqual(key.d, 1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, point) - - # Other names - key = EccKey(curve="p521", d=1) - key = EccKey(curve="secp521r1", d=1) - key = EccKey(curve="prime521v1", d=1) - - def test_public_key(self): - - p521 = _curves['p521'] - point = EccPoint(p521.Gx, p521.Gy, 'p521') - key = EccKey(curve="P-384", point=point) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, point) - - def test_public_key_derived(self): - - priv_key = EccKey(curve="P-521", d=3) - pub_key = priv_key.public_key() - self.assertFalse(pub_key.has_private()) - self.assertEqual(priv_key.pointQ, pub_key.pointQ) - - def test_invalid_curve(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-522", d=1)) - - def test_invalid_d(self): - self.assertRaises(ValueError, lambda: EccKey(curve="P-521", d=0)) - self.assertRaises(ValueError, lambda: EccKey(curve="P-521", - d=_curves['p521'].order)) - - def test_equality(self): - - private_key = ECC.construct(d=3, curve="P-521") - private_key2 = ECC.construct(d=3, curve="P-521") - private_key3 = ECC.construct(d=4, curve="P-521") - - public_key = private_key.public_key() - public_key2 = private_key2.public_key() - public_key3 = private_key3.public_key() - - self.assertEqual(private_key, private_key2) - self.assertNotEqual(private_key, private_key3) - - self.assertEqual(public_key, public_key2) - self.assertNotEqual(public_key, public_key3) - - self.assertNotEqual(public_key, private_key) - - def test_name_consistency(self): - key = ECC.generate(curve='p521') - self.assertIn("curve='NIST P-521'", repr(key)) - self.assertEqual(key.curve, 'NIST P-521') - self.assertEqual(key.public_key().curve, 'NIST P-521') - - -class TestEccModule_P192(unittest.TestCase): - - def test_generate(self): - - key = ECC.generate(curve="P-192") - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, EccPoint(_curves['p192'].Gx, - _curves['p192'].Gy, - "P-192") * key.d, - "p192") - - # Other names - ECC.generate(curve="secp192r1") - ECC.generate(curve="prime192v1") - - def test_construct(self): - - key = ECC.construct(curve="P-192", d=1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, _curves['p192'].G) - - key = ECC.construct(curve="P-192", point_x=_curves['p192'].Gx, - point_y=_curves['p192'].Gy) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, _curves['p192'].G) - - # Other names - ECC.construct(curve="p192", d=1) - ECC.construct(curve="secp192r1", d=1) - ECC.construct(curve="prime192v1", d=1) - - def test_negative_construct(self): - coord = dict(point_x=10, point_y=4) - coordG = dict(point_x=_curves['p192'].Gx, point_y=_curves['p192'].Gy) - - self.assertRaises(ValueError, ECC.construct, curve="P-192", **coord) - self.assertRaises(ValueError, ECC.construct, curve="P-192", d=2, **coordG) - - -class TestEccModule_P224(unittest.TestCase): - - def test_generate(self): - - key = ECC.generate(curve="P-224") - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, EccPoint(_curves['p224'].Gx, - _curves['p224'].Gy, - "P-224") * key.d, - "p224") - - # Other names - ECC.generate(curve="secp224r1") - ECC.generate(curve="prime224v1") - - def test_construct(self): - - key = ECC.construct(curve="P-224", d=1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, _curves['p224'].G) - - key = ECC.construct(curve="P-224", point_x=_curves['p224'].Gx, - point_y=_curves['p224'].Gy) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, _curves['p224'].G) - - # Other names - ECC.construct(curve="p224", d=1) - ECC.construct(curve="secp224r1", d=1) - ECC.construct(curve="prime224v1", d=1) - - def test_negative_construct(self): - coord = dict(point_x=10, point_y=4) - coordG = dict(point_x=_curves['p224'].Gx, point_y=_curves['p224'].Gy) - - self.assertRaises(ValueError, ECC.construct, curve="P-224", **coord) - self.assertRaises(ValueError, ECC.construct, curve="P-224", d=2, **coordG) - - -class TestEccModule_P256(unittest.TestCase): - - def test_generate(self): - - key = ECC.generate(curve="P-256") - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, EccPoint(_curves['p256'].Gx, - _curves['p256'].Gy) * key.d, - "p256") - - # Other names - ECC.generate(curve="secp256r1") - ECC.generate(curve="prime256v1") - - def test_construct(self): - - key = ECC.construct(curve="P-256", d=1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, _curves['p256'].G) - - key = ECC.construct(curve="P-256", point_x=_curves['p256'].Gx, - point_y=_curves['p256'].Gy) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, _curves['p256'].G) - - # Other names - ECC.construct(curve="p256", d=1) - ECC.construct(curve="secp256r1", d=1) - ECC.construct(curve="prime256v1", d=1) - - def test_negative_construct(self): - coord = dict(point_x=10, point_y=4) - coordG = dict(point_x=_curves['p256'].Gx, point_y=_curves['p256'].Gy) - - self.assertRaises(ValueError, ECC.construct, curve="P-256", **coord) - self.assertRaises(ValueError, ECC.construct, curve="P-256", d=2, **coordG) - - -class TestEccModule_P384(unittest.TestCase): - - def test_generate(self): - - curve = _curves['p384'] - key = ECC.generate(curve="P-384") - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, EccPoint(curve.Gx, curve.Gy, "p384") * key.d) - - # Other names - ECC.generate(curve="secp384r1") - ECC.generate(curve="prime384v1") - - def test_construct(self): - - curve = _curves['p384'] - key = ECC.construct(curve="P-384", d=1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, _curves['p384'].G) - - key = ECC.construct(curve="P-384", point_x=curve.Gx, point_y=curve.Gy) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, curve.G) - - # Other names - ECC.construct(curve="p384", d=1) - ECC.construct(curve="secp384r1", d=1) - ECC.construct(curve="prime384v1", d=1) - - def test_negative_construct(self): - coord = dict(point_x=10, point_y=4) - coordG = dict(point_x=_curves['p384'].Gx, point_y=_curves['p384'].Gy) - - self.assertRaises(ValueError, ECC.construct, curve="P-384", **coord) - self.assertRaises(ValueError, ECC.construct, curve="P-384", d=2, **coordG) - - -class TestEccModule_P521(unittest.TestCase): - - def test_generate(self): - - curve = _curves['p521'] - key = ECC.generate(curve="P-521") - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, EccPoint(curve.Gx, curve.Gy, "p521") * key.d) - - # Other names - ECC.generate(curve="secp521r1") - ECC.generate(curve="prime521v1") - - def test_construct(self): - - curve = _curves['p521'] - key = ECC.construct(curve="P-521", d=1) - self.assertTrue(key.has_private()) - self.assertEqual(key.pointQ, _curves['p521'].G) - - key = ECC.construct(curve="P-521", point_x=curve.Gx, point_y=curve.Gy) - self.assertFalse(key.has_private()) - self.assertEqual(key.pointQ, curve.G) - - # Other names - ECC.construct(curve="p521", d=1) - ECC.construct(curve="secp521r1", d=1) - ECC.construct(curve="prime521v1", d=1) - - def test_negative_construct(self): - coord = dict(point_x=10, point_y=4) - coordG = dict(point_x=_curves['p521'].Gx, point_y=_curves['p521'].Gy) - - self.assertRaises(ValueError, ECC.construct, curve="P-521", **coord) - self.assertRaises(ValueError, ECC.construct, curve="P-521", d=2, **coordG) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestEccPoint) - tests += list_test_cases(TestEccPoint_NIST_P192) - tests += list_test_cases(TestEccPoint_NIST_P224) - tests += list_test_cases(TestEccPoint_NIST_P256) - tests += list_test_cases(TestEccPoint_NIST_P384) - tests += list_test_cases(TestEccPoint_NIST_P521) - tests += list_test_cases(TestEccPoint_PAI_P192) - tests += list_test_cases(TestEccPoint_PAI_P224) - tests += list_test_cases(TestEccPoint_PAI_P256) - tests += list_test_cases(TestEccPoint_PAI_P384) - tests += list_test_cases(TestEccPoint_PAI_P521) - tests += list_test_cases(TestEccKey_P192) - tests += list_test_cases(TestEccKey_P224) - tests += list_test_cases(TestEccKey_P256) - tests += list_test_cases(TestEccKey_P384) - tests += list_test_cases(TestEccKey_P521) - tests += list_test_cases(TestEccModule_P192) - tests += list_test_cases(TestEccModule_P224) - tests += list_test_cases(TestEccModule_P256) - tests += list_test_cases(TestEccModule_P384) - tests += list_test_cases(TestEccModule_P521) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ElGamal.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ElGamal.py deleted file mode 100644 index 67d2e0b..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_ElGamal.py +++ /dev/null @@ -1,217 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/test_ElGamal.py: Self-test for the ElGamal primitive -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.PublicKey.ElGamal""" - -__revision__ = "$Id$" - -import unittest -from Cryptodome.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex -from Cryptodome import Random -from Cryptodome.PublicKey import ElGamal -from Cryptodome.Util.number import bytes_to_long -from Cryptodome.Util.py3compat import * - -class ElGamalTest(unittest.TestCase): - - # - # Test vectors - # - # There seem to be no real ElGamal test vectors available in the - # public domain. The following test vectors have been generated - # with libgcrypt 1.5.0. - # - # Encryption - tve=[ - { - # 256 bits - 'p' :'BA4CAEAAED8CBE952AFD2126C63EB3B345D65C2A0A73D2A3AD4138B6D09BD933', - 'g' :'05', - 'y' :'60D063600ECED7C7C55146020E7A31C4476E9793BEAED420FEC9E77604CAE4EF', - 'x' :'1D391BA2EE3C37FE1BA175A69B2C73A11238AD77675932', - 'k' :'F5893C5BAB4131264066F57AB3D8AD89E391A0B68A68A1', - 'pt' :'48656C6C6F207468657265', - 'ct1':'32BFD5F487966CEA9E9356715788C491EC515E4ED48B58F0F00971E93AAA5EC7', - 'ct2':'7BE8FBFF317C93E82FCEF9BD515284BA506603FEA25D01C0CB874A31F315EE68' - }, - - { - # 512 bits - 'p' :'F1B18AE9F7B4E08FDA9A04832F4E919D89462FD31BF12F92791A93519F75076D6CE3942689CDFF2F344CAFF0F82D01864F69F3AECF566C774CBACF728B81A227', - 'g' :'07', - 'y' :'688628C676E4F05D630E1BE39D0066178CA7AA83836B645DE5ADD359B4825A12B02EF4252E4E6FA9BEC1DB0BE90F6D7C8629CABB6E531F472B2664868156E20C', - 'x' :'14E60B1BDFD33436C0DA8A22FDC14A2CCDBBED0627CE68', - 'k' :'38DBF14E1F319BDA9BAB33EEEADCAF6B2EA5250577ACE7', - 'pt' :'48656C6C6F207468657265', - 'ct1':'290F8530C2CC312EC46178724F196F308AD4C523CEABB001FACB0506BFED676083FE0F27AC688B5C749AB3CB8A80CD6F7094DBA421FB19442F5A413E06A9772B', - 'ct2':'1D69AAAD1DC50493FB1B8E8721D621D683F3BF1321BE21BC4A43E11B40C9D4D9C80DE3AAC2AB60D31782B16B61112E68220889D53C4C3136EE6F6CE61F8A23A0' - } - ] - - # Signature - tvs=[ - { - # 256 bits - 'p' :'D2F3C41EA66530838A704A48FFAC9334F4701ECE3A97CEE4C69DD01AE7129DD7', - 'g' :'05', - 'y' :'C3F9417DC0DAFEA6A05C1D2333B7A95E63B3F4F28CC962254B3256984D1012E7', - 'x' :'165E4A39BE44D5A2D8B1332D416BC559616F536BC735BB', - 'k' :'C7F0C794A7EAD726E25A47FF8928013680E73C51DD3D7D99BFDA8F492585928F', - 'h' :'48656C6C6F207468657265', - 'sig1':'35CA98133779E2073EF31165AFCDEB764DD54E96ADE851715495F9C635E1E7C2', - 'sig2':'0135B88B1151279FE5D8078D4FC685EE81177EE9802AB123A73925FC1CB059A7', - }, - { - # 512 bits - 'p' :'E24CF3A4B8A6AF749DCA6D714282FE4AABEEE44A53BB6ED15FBE32B5D3C3EF9CC4124A2ECA331F3C1C1B667ACA3766825217E7B5F9856648D95F05330C6A19CF', - 'g' :'0B', - 'y' :'2AD3A1049CA5D4ED207B2431C79A8719BB4073D4A94E450EA6CEE8A760EB07ADB67C0D52C275EE85D7B52789061EE45F2F37D9B2AE522A51C28329766BFE68AC', - 'x' :'16CBB4F46D9ECCF24FF9F7E63CAA3BD8936341555062AB', - 'k' :'8A3D89A4E429FD2476D7D717251FB79BF900FFE77444E6BB8299DC3F84D0DD57ABAB50732AE158EA52F5B9E7D8813E81FD9F79470AE22F8F1CF9AEC820A78C69', - 'h' :'48656C6C6F207468657265', - 'sig1':'BE001AABAFFF976EC9016198FBFEA14CBEF96B000CCC0063D3324016F9E91FE80D8F9325812ED24DDB2B4D4CF4430B169880B3CE88313B53255BD4EC0378586F', - 'sig2':'5E266F3F837BA204E3BBB6DBECC0611429D96F8C7CE8F4EFDF9D4CB681C2A954468A357BF4242CEC7418B51DFC081BCD21299EF5B5A0DDEF3A139A1817503DDE', - } - ] - - def test_generate_180(self): - self._test_random_key(180) - - def test_encryption(self): - for tv in self.tve: - d = self.convert_tv(tv, True) - key = ElGamal.construct(d['key']) - ct = key._encrypt(d['pt'], d['k']) - self.assertEqual(ct[0], d['ct1']) - self.assertEqual(ct[1], d['ct2']) - - def test_decryption(self): - for tv in self.tve: - d = self.convert_tv(tv, True) - key = ElGamal.construct(d['key']) - pt = key._decrypt((d['ct1'], d['ct2'])) - self.assertEqual(pt, d['pt']) - - def test_signing(self): - for tv in self.tvs: - d = self.convert_tv(tv, True) - key = ElGamal.construct(d['key']) - sig1, sig2 = key._sign(d['h'], d['k']) - self.assertEqual(sig1, d['sig1']) - self.assertEqual(sig2, d['sig2']) - - def test_verification(self): - for tv in self.tvs: - d = self.convert_tv(tv, True) - key = ElGamal.construct(d['key']) - # Positive test - res = key._verify( d['h'], (d['sig1'],d['sig2']) ) - self.assertTrue(res) - # Negative test - res = key._verify( d['h'], (d['sig1']+1,d['sig2']) ) - self.assertFalse(res) - - def test_bad_key3(self): - tup = tup0 = list(self.convert_tv(self.tvs[0], 1)['key'])[:3] - tup[0] += 1 # p += 1 (not prime) - self.assertRaises(ValueError, ElGamal.construct, tup) - - tup = tup0 - tup[1] = 1 # g = 1 - self.assertRaises(ValueError, ElGamal.construct, tup) - - tup = tup0 - tup[2] = tup[0]*2 # y = 2*p - self.assertRaises(ValueError, ElGamal.construct, tup) - - def test_bad_key4(self): - tup = tup0 = list(self.convert_tv(self.tvs[0], 1)['key']) - tup[3] += 1 # x += 1 - self.assertRaises(ValueError, ElGamal.construct, tup) - - def convert_tv(self, tv, as_longs=0): - """Convert a test vector from textual form (hexadecimal ascii - to either integers or byte strings.""" - key_comps = 'p','g','y','x' - tv2 = {} - for c in tv.keys(): - tv2[c] = a2b_hex(tv[c]) - if as_longs or c in key_comps or c in ('sig1','sig2'): - tv2[c] = bytes_to_long(tv2[c]) - tv2['key']=[] - for c in key_comps: - tv2['key'] += [tv2[c]] - del tv2[c] - return tv2 - - def _test_random_key(self, bits): - elgObj = ElGamal.generate(bits, Random.new().read) - self._check_private_key(elgObj) - self._exercise_primitive(elgObj) - pub = elgObj.publickey() - self._check_public_key(pub) - self._exercise_public_primitive(elgObj) - - def _check_private_key(self, elgObj): - - # Check capabilities - self.assertTrue(elgObj.has_private()) - - # Sanity check key data - self.assertTrue(1 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.PublicKey.RSA""" - -__revision__ = "$Id$" - -import os -import pickle -from pickle import PicklingError -from Cryptodome.Util.py3compat import * - -import unittest -from Cryptodome.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex - -class RSATest(unittest.TestCase): - # Test vectors from "RSA-OAEP and RSA-PSS test vectors (.zip file)" - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # See RSADSI's PKCS#1 page at - # http://www.rsa.com/rsalabs/node.asp?id=2125 - - # from oaep-int.txt - - # TODO: PyCryptodome treats the message as starting *after* the leading "00" - # TODO: That behaviour should probably be changed in the future. - plaintext = """ - eb 7a 19 ac e9 e3 00 63 50 e3 29 50 4b 45 e2 - ca 82 31 0b 26 dc d8 7d 5c 68 f1 ee a8 f5 52 67 - c3 1b 2e 8b b4 25 1f 84 d7 e0 b2 c0 46 26 f5 af - f9 3e dc fb 25 c9 c2 b3 ff 8a e1 0e 83 9a 2d db - 4c dc fe 4f f4 77 28 b4 a1 b7 c1 36 2b aa d2 9a - b4 8d 28 69 d5 02 41 21 43 58 11 59 1b e3 92 f9 - 82 fb 3e 87 d0 95 ae b4 04 48 db 97 2f 3a c1 4f - 7b c2 75 19 52 81 ce 32 d2 f1 b7 6d 4d 35 3e 2d - """ - - ciphertext = """ - 12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 - 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 - 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 - 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb - 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 - 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 - da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d - 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55 - """ - - modulus = """ - bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 - 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f - b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 - 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f - af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 - ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e - e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f - e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb - """ - - e = 0x11 # public exponent - - prime_factor = """ - c9 7f b1 f0 27 f4 53 f6 34 12 33 ea aa d1 d9 35 - 3f 6c 42 d0 88 66 b1 d0 5a 0f 20 35 02 8b 9d 86 - 98 40 b4 16 66 b4 2e 92 ea 0d a3 b4 32 04 b5 cf - ce 33 52 52 4d 04 16 a5 a4 41 e7 00 af 46 15 03 - """ - - def setUp(self): - global RSA, Random, bytes_to_long - from Cryptodome.PublicKey import RSA - from Cryptodome import Random - from Cryptodome.Util.number import bytes_to_long, inverse - self.n = bytes_to_long(a2b_hex(self.modulus)) - self.p = bytes_to_long(a2b_hex(self.prime_factor)) - - # Compute q, d, and u from n, e, and p - self.q = self.n // self.p - self.d = inverse(self.e, (self.p-1)*(self.q-1)) - self.u = inverse(self.p, self.q) # u = e**-1 (mod q) - - self.rsa = RSA - - def test_generate_1arg(self): - """RSA (default implementation) generated key (1 argument)""" - rsaObj = self.rsa.generate(1024) - self._check_private_key(rsaObj) - self._exercise_primitive(rsaObj) - pub = rsaObj.public_key() - self._check_public_key(pub) - self._exercise_public_primitive(rsaObj) - - def test_generate_2arg(self): - """RSA (default implementation) generated key (2 arguments)""" - rsaObj = self.rsa.generate(1024, Random.new().read) - self._check_private_key(rsaObj) - self._exercise_primitive(rsaObj) - pub = rsaObj.public_key() - self._check_public_key(pub) - self._exercise_public_primitive(rsaObj) - - def test_generate_3args(self): - rsaObj = self.rsa.generate(1024, Random.new().read,e=65537) - self._check_private_key(rsaObj) - self._exercise_primitive(rsaObj) - pub = rsaObj.public_key() - self._check_public_key(pub) - self._exercise_public_primitive(rsaObj) - self.assertEqual(65537,rsaObj.e) - - def test_construct_2tuple(self): - """RSA (default implementation) constructed key (2-tuple)""" - pub = self.rsa.construct((self.n, self.e)) - self._check_public_key(pub) - self._check_encryption(pub) - - def test_construct_3tuple(self): - """RSA (default implementation) constructed key (3-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d)) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - - def test_construct_4tuple(self): - """RSA (default implementation) constructed key (4-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p)) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - - def test_construct_5tuple(self): - """RSA (default implementation) constructed key (5-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) - self._check_private_key(rsaObj) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - - def test_construct_6tuple(self): - """RSA (default implementation) constructed key (6-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q, self.u)) - self._check_private_key(rsaObj) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - - def test_construct_bad_key2(self): - tup = (self.n, 1) - self.assertRaises(ValueError, self.rsa.construct, tup) - - # An even modulus is wrong - tup = (self.n+1, self.e) - self.assertRaises(ValueError, self.rsa.construct, tup) - - def test_construct_bad_key3(self): - tup = (self.n, self.e, self.d+1) - self.assertRaises(ValueError, self.rsa.construct, tup) - - def test_construct_bad_key5(self): - tup = (self.n, self.e, self.d, self.p, self.p) - self.assertRaises(ValueError, self.rsa.construct, tup) - - tup = (self.p*self.p, self.e, self.p, self.p) - self.assertRaises(ValueError, self.rsa.construct, tup) - - tup = (self.p*self.p, 3, self.p, self.q) - self.assertRaises(ValueError, self.rsa.construct, tup) - - def test_construct_bad_key6(self): - tup = (self.n, self.e, self.d, self.p, self.q, 10) - self.assertRaises(ValueError, self.rsa.construct, tup) - - from Cryptodome.Util.number import inverse - tup = (self.n, self.e, self.d, self.p, self.q, inverse(self.q, self.p)) - self.assertRaises(ValueError, self.rsa.construct, tup) - - def test_factoring(self): - rsaObj = self.rsa.construct([self.n, self.e, self.d]) - self.assertTrue(rsaObj.p==self.p or rsaObj.p==self.q) - self.assertTrue(rsaObj.q==self.p or rsaObj.q==self.q) - self.assertTrue(rsaObj.q*rsaObj.p == self.n) - - self.assertRaises(ValueError, self.rsa.construct, [self.n, self.e, self.n-1]) - - def test_repr(self): - rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) - repr(rsaObj) - - def test_serialization(self): - """RSA keys are unpickable""" - - rsa_key = self.rsa.generate(1024) - self.assertRaises(PicklingError, pickle.dumps, rsa_key) - - def test_raw_rsa_boundary(self): - # The argument of every RSA raw operation (encrypt/decrypt) must be - # non-negative and no larger than the modulus - rsa_obj = self.rsa.generate(1024) - - self.assertRaises(ValueError, rsa_obj._decrypt, rsa_obj.n) - self.assertRaises(ValueError, rsa_obj._decrypt_to_bytes, rsa_obj.n) - self.assertRaises(ValueError, rsa_obj._encrypt, rsa_obj.n) - - self.assertRaises(ValueError, rsa_obj._decrypt, -1) - self.assertRaises(ValueError, rsa_obj._decrypt_to_bytes, -1) - self.assertRaises(ValueError, rsa_obj._encrypt, -1) - - def test_size(self): - pub = self.rsa.construct((self.n, self.e)) - self.assertEqual(pub.size_in_bits(), 1024) - self.assertEqual(pub.size_in_bytes(), 128) - - def _check_private_key(self, rsaObj): - from Cryptodome.Math.Numbers import Integer - - # Check capabilities - self.assertEqual(1, rsaObj.has_private()) - - # Sanity check key data - self.assertEqual(rsaObj.n, rsaObj.p * rsaObj.q) # n = pq - lcm = int(Integer(rsaObj.p-1).lcm(rsaObj.q-1)) - self.assertEqual(1, rsaObj.d * rsaObj.e % lcm) # ed = 1 (mod LCM(p-1, q-1)) - self.assertEqual(1, rsaObj.p * rsaObj.u % rsaObj.q) # pu = 1 (mod q) - self.assertEqual(1, rsaObj.p > 1) # p > 1 - self.assertEqual(1, rsaObj.q > 1) # q > 1 - self.assertEqual(1, rsaObj.e > 1) # e > 1 - self.assertEqual(1, rsaObj.d > 1) # d > 1 - - self.assertEqual(rsaObj.u, rsaObj.invp) - self.assertEqual(1, rsaObj.q * rsaObj.invq % rsaObj.p) - - def _check_public_key(self, rsaObj): - ciphertext = a2b_hex(self.ciphertext) - - # Check capabilities - self.assertEqual(0, rsaObj.has_private()) - - # Check rsaObj.[ne] -> rsaObj.[ne] mapping - self.assertEqual(rsaObj.n, rsaObj.n) - self.assertEqual(rsaObj.e, rsaObj.e) - - # Check that private parameters are all missing - self.assertEqual(0, hasattr(rsaObj, 'd')) - self.assertEqual(0, hasattr(rsaObj, 'p')) - self.assertEqual(0, hasattr(rsaObj, 'q')) - self.assertEqual(0, hasattr(rsaObj, 'u')) - - # Sanity check key data - self.assertEqual(1, rsaObj.e > 1) # e > 1 - - # Public keys should not be able to sign or decrypt - self.assertRaises(TypeError, rsaObj._decrypt, - bytes_to_long(ciphertext)) - self.assertRaises(TypeError, rsaObj._decrypt_to_bytes, - bytes_to_long(ciphertext)) - - # Check __eq__ and __ne__ - self.assertEqual(rsaObj.public_key() == rsaObj.public_key(),True) # assert_ - self.assertEqual(rsaObj.public_key() != rsaObj.public_key(),False) # assertFalse - - self.assertEqual(rsaObj.publickey(), rsaObj.public_key()) - - def _exercise_primitive(self, rsaObj): - # Since we're using a randomly-generated key, we can't check the test - # vector, but we can make sure encryption and decryption are inverse - # operations. - ciphertext = bytes_to_long(a2b_hex(self.ciphertext)) - - # Test decryption - plaintext = rsaObj._decrypt(ciphertext) - - # Test encryption (2 arguments) - new_ciphertext2 = rsaObj._encrypt(plaintext) - self.assertEqual(ciphertext, new_ciphertext2) - - def _exercise_public_primitive(self, rsaObj): - plaintext = a2b_hex(self.plaintext) - - # Test encryption (2 arguments) - new_ciphertext2 = rsaObj._encrypt(bytes_to_long(plaintext)) - - def _check_encryption(self, rsaObj): - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - - # Test encryption - new_ciphertext2 = rsaObj._encrypt(bytes_to_long(plaintext)) - self.assertEqual(bytes_to_long(ciphertext), new_ciphertext2) - - def _check_decryption(self, rsaObj): - plaintext = bytes_to_long(a2b_hex(self.plaintext)) - ciphertext = bytes_to_long(a2b_hex(self.ciphertext)) - - # Test plain decryption - new_plaintext = rsaObj._decrypt(ciphertext) - self.assertEqual(plaintext, new_plaintext) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(RSATest) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_Curve25519.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_Curve25519.py deleted file mode 100644 index 2401eac..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_Curve25519.py +++ /dev/null @@ -1,385 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2024, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import os -import errno -import warnings -import unittest -from binascii import unhexlify -from unittest import SkipTest - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import tostr, FileNotFoundError -from Cryptodome.Util.asn1 import DerSequence, DerBitString -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.PublicKey import ECC - -try: - import pycryptodome_test_vectors # type: ignore - test_vectors_available = True -except ImportError: - test_vectors_available = False - - -def load_file(file_name, mode="rb"): - results = None - - try: - if not test_vectors_available: - raise FileNotFoundError(errno.ENOENT, - os.strerror(errno.ENOENT), - file_name) - - dir_comps = ("PublicKey", "ECC") - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - with open(full_file_name, mode) as file_in: - results = file_in.read() - - except FileNotFoundError: - warnings.warn("Warning: skipping extended tests for ECC", - UserWarning, - stacklevel=2) - - if results is None: - raise SkipTest("Missing %s" % file_name) - - return results - - -def compact(lines): - ext = b"".join(lines) - return unhexlify(tostr(ext).replace(" ", "").replace(":", "")) - - -def create_ref_keys_x25519(): - key_lines = load_file("ecc_x25519.txt").splitlines() - seed = compact(key_lines[5:8]) - key = ECC.construct(curve="Curve25519", seed=seed) - return (key, key.public_key()) - - -def get_fixed_prng(): - return SHAKE128.new().update(b"SEED").read - - -def extract_bitstring_from_spki(data): - seq = DerSequence() - seq.decode(data) - bs = DerBitString() - bs.decode(seq[1]) - return bs.value - - -class TestImport(unittest.TestCase): - - def test_empty(self): - self.assertRaises(ValueError, ECC.import_key, b"") - - def test_mismatch(self): - # Private key with X448 Object ID but X25519 key - mismatch_hex = "302e020100300506032b656f042204207009906b64ec727d5cb5c23007bf0425b3fd79014c6cd62ca3dddfcf0f278f79" - mismatch = unhexlify(mismatch_hex) - self.assertRaises(ValueError, ECC.import_key, mismatch) - - -class TestImport_Curve25519(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_Curve25519, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_x25519() - - def test_import_public_der(self): - key_file = load_file("ecc_x25519_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_pkcs8_der(self): - key_file = load_file("ecc_x25519_private.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_x25519_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_x25519_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_x25519_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_x25519_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_x25519_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_x25519_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256": - key_file = load_file("ecc_x25519_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_x25519_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - -class TestExport_Curve25519(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_Curve25519, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_x25519() - - def test_export_public_der(self): - key_file = load_file("ecc_x25519_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(True) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_x25519_private.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - self.assertRaises(ValueError, self.ref_private.export_key, - format="DER", use_pkcs8=False) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA256AndAES128-CBC", - prot_params={'iteration_count': 123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem(self): - key_file_ref = load_file("ecc_x25519_public.pem", "rt").strip() - key_file = self.ref_public.export_key(format="PEM").strip() - self.assertEqual(key_file_ref, key_file) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_x25519_private.pem", "rt").strip() - encoded = self.ref_private.export_key(format="PEM").strip() - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_raw(self): - encoded = self.ref_public.export_key(format='raw') - self.assertEqual(len(encoded), 32) - self.assertEqual(encoded, unhexlify(b'ff7561ef60c9c8a757f6d6372ec14142c9be208d0e719136d8d3c715dfcf7e15')) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.assertRaises(ValueError, - self.ref_private.export_key, - format="PEM", - passphrase="secret") - - # Empty password - self.assertRaises(ValueError, - self.ref_private.export_key, - format="PEM", - passphrase="", - use_pkcs8=False) - self.assertRaises(ValueError, - self.ref_private.export_key, - format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # No private keys with OpenSSH - self.assertRaises(ValueError, - self.ref_private.export_key, - format="OpenSSH", - passphrase="secret") - - -class TestImport_Curve25519_Weak(unittest.TestCase): - - def test_weak_pem(self): - - p = 2**255 - 19 - weak_x = (0, - 1, - 325606250916557431795983626356110631294008115727848805560023387167927233504, - 39382357235489614581723060781553021112529911719440698176882885853963445705823, - p - 1, - p, - p + 1, - p + 325606250916557431795983626356110631294008115727848805560023387167927233504, - p + 39382357235489614581723060781553021112529911719440698176882885853963445705823, - p * 2 - 1, - p * 2, - p * 2 + 1) - - for x in weak_x: - low_order_point = ECC.EccXPoint(x, "curve25519") - weak_key = ECC.EccKey(point=low_order_point, curve="curve25519") - encoded = weak_key.export_key(format="PEM") - - self.assertRaises(ValueError, - ECC.import_key, - encoded) - - -def get_tests(config={}): - tests = [] - try: - tests += list_test_cases(TestImport) - tests += list_test_cases(TestImport_Curve25519) - tests += list_test_cases(TestExport_Curve25519) - tests += list_test_cases(TestImport_Curve25519_Weak) - except SkipTest: - pass - return tests - - -if __name__ == '__main__': - def suit(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_Curve448.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_Curve448.py deleted file mode 100644 index e1b343b..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_Curve448.py +++ /dev/null @@ -1,351 +0,0 @@ -# This file is licensed under the BSD 2-Clause License. -# See https://opensource.org/licenses/BSD-2-Clause for details. - -import os -import errno -import warnings -import unittest -from binascii import unhexlify -from unittest import SkipTest - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import tostr, FileNotFoundError -from Cryptodome.Util.asn1 import DerSequence, DerBitString -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.PublicKey import ECC - -try: - import pycryptodome_test_vectors # type: ignore - test_vectors_available = True -except ImportError: - test_vectors_available = False - - -def load_file(file_name, mode="rb"): - results = None - - try: - if not test_vectors_available: - raise FileNotFoundError(errno.ENOENT, - os.strerror(errno.ENOENT), - file_name) - - dir_comps = ("PublicKey", "ECC") - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - with open(full_file_name, mode) as file_in: - results = file_in.read() - - except FileNotFoundError: - warnings.warn("Warning: skipping extended tests for ECC", - UserWarning, - stacklevel=2) - - if results is None: - raise SkipTest("Missing %s" % file_name) - - return results - - -def compact(lines): - ext = b"".join(lines) - return unhexlify(tostr(ext).replace(" ", "").replace(":", "")) - - -def create_ref_keys_x448(): - key_lines = load_file("ecc_x448.txt").splitlines() - seed = compact(key_lines[6:10]) - key = ECC.construct(curve="Curve448", seed=seed) - return (key, key.public_key()) - - -def get_fixed_prng(): - return SHAKE128.new().update(b"SEED").read - - -def extract_bitstring_from_spki(data): - seq = DerSequence() - seq.decode(data) - bs = DerBitString() - bs.decode(seq[1]) - return bs.value - - -class TestImport(unittest.TestCase): - - def test_empty(self): - self.assertRaises(ValueError, ECC.import_key, b"") - - def test_mismatch(self): - # Private key with X448 Object ID but X448 key - mismatch_hex = "302e020100300506032b656f042204207009906b64ec727d5cb5c23007bf0425b3fd79014c6cd62ca3dddfcf0f278f79" - mismatch = unhexlify(mismatch_hex) - self.assertRaises(ValueError, ECC.import_key, mismatch) - - -class TestImport_Curve448(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_Curve448, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_x448() - - def test_import_public_der(self): - key_file = load_file("ecc_x448_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_pkcs8_der(self): - key_file = load_file("ecc_x448_private.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_x448_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_x448_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_x448_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_x448_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_x448_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_x448_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256": - key_file = load_file("ecc_x448_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_x448_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - -class TestExport_Curve448(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_Curve448, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_x448() - - def test_export_public_der(self): - key_file = load_file("ecc_x448_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(True) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_x448_private.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - self.assertRaises(ValueError, self.ref_private.export_key, - format="DER", use_pkcs8=False) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA256AndAES128-CBC", - prot_params={'iteration_count': 123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem(self): - key_file_ref = load_file("ecc_x448_public.pem", "rt").strip() - key_file = self.ref_public.export_key(format="PEM").strip() - self.assertEqual(key_file_ref, key_file) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_x448_private.pem", "rt").strip() - encoded = self.ref_private.export_key(format="PEM").strip() - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_raw(self): - encoded = self.ref_public.export_key(format='raw') - self.assertEqual(len(encoded), 56) - self.assertEqual(encoded, unhexlify(b'e2abae24ab8f65b01969e61f84fee615b525f413a90e3d727f71d0ffe60fb1d0a1a0285f2a7fd88789206e0aa4f3e9fcb9e4ba5d644e691e')) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.assertRaises(ValueError, - self.ref_private.export_key, - format="PEM", - passphrase="secret") - - # Empty password - self.assertRaises(ValueError, - self.ref_private.export_key, - format="PEM", - passphrase="", - use_pkcs8=False) - self.assertRaises(ValueError, - self.ref_private.export_key, - format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # No private keys with OpenSSH - self.assertRaises(ValueError, - self.ref_private.export_key, - format="OpenSSH", - passphrase="secret") - - -class TestImport_Curve448_Weak(unittest.TestCase): - - def test_weak_pem(self): - - p = 2**448 - 2**224 - 1 - weak_x = (0, - 1, - p - 1, - p, - p + 1) - - for x in weak_x: - low_order_point = ECC.EccXPoint(x, "curve448") - weak_key = ECC.EccKey(point=low_order_point, curve="curve448") - encoded = weak_key.export_key(format="PEM") - - self.assertRaises(ValueError, - ECC.import_key, - encoded) - - -def get_tests(config={}): - tests = [] - try: - tests += list_test_cases(TestImport) - tests += list_test_cases(TestImport_Curve448) - tests += list_test_cases(TestExport_Curve448) - tests += list_test_cases(TestImport_Curve448_Weak) - except SkipTest: - pass - return tests - - -if __name__ == '__main__': - def suit(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_DSA.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_DSA.py deleted file mode 100644 index 5ff0113..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_DSA.py +++ /dev/null @@ -1,554 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/test_import_DSA.py: Self-test for importing DSA keys -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import unittest -import re - -from Cryptodome.PublicKey import DSA -from Cryptodome.SelfTest.st_common import * -from Cryptodome.Util.py3compat import * - -from binascii import unhexlify - -class ImportKeyTests(unittest.TestCase): - - y = 92137165128186062214622779787483327510946462589285775188003362705875131352591574106484271700740858696583623951844732128165434284507709057439633739849986759064015013893156866539696757799934634945787496920169462601722830899660681779448742875054459716726855443681559131362852474817534616736104831095601710736729 - p = 162452170958135306109773853318304545923250830605675936228618290525164105310663722368377131295055868997377338797580997938253236213714988311430600065853662861806894003694743806769284131194035848116051021923956699231855223389086646903420682639786976554552864568460372266462812137447840653688476258666833303658691 - q = 988791743931120302950649732173330531512663554851 - g = 85583152299197514738065570254868711517748965097380456700369348466136657764813442044039878840094809620913085570225318356734366886985903212775602770761953571967834823306046501307810937486758039063386311593890777319935391363872375452381836756832784184928202587843258855704771836753434368484556809100537243908232 - x = 540873410045082450874416847965843801027716145253 - - def setUp(self): - - # It is easier to write test vectors in text form, - # and convert them to byte strigs dynamically here - for mname, mvalue in ImportKeyTests.__dict__.items(): - if mname[:4] in ('der_', 'pem_', 'ssh_'): - if mname[:4] == 'der_': - mvalue = unhexlify(tobytes(mvalue)) - mvalue = tobytes(mvalue) - setattr(self, mname, mvalue) - - # 1. SubjectPublicKeyInfo - der_public=\ - '308201b73082012b06072a8648ce3804013082011e02818100e756ee1717f4b6'+\ - '794c7c214724a19763742c45572b4b3f8ff3b44f3be9f44ce039a2757695ec91'+\ - '5697da74ef914fcd1b05660e2419c761d639f45d2d79b802dbd23e7ab8b81b47'+\ - '9a380e1f30932584ba2a0b955032342ebc83cb5ca906e7b0d7cd6fe656cecb4c'+\ - '8b5a77123a8c6750a481e3b06057aff6aa6eba620b832d60c3021500ad32f48c'+\ - 'd3ae0c45a198a61fa4b5e20320763b2302818079dfdc3d614fe635fceb7eaeae'+\ - '3718dc2efefb45282993ac6749dc83c223d8c1887296316b3b0b54466cf444f3'+\ - '4b82e3554d0b90a778faaf1306f025dae6a3e36c7f93dd5bac4052b92370040a'+\ - 'ca70b8d5820599711900efbc961812c355dd9beffe0981da85c5548074b41c56'+\ - 'ae43fd300d89262e4efd89943f99a651b03888038185000281810083352a69a1'+\ - '32f34843d2a0eb995bff4e2f083a73f0049d2c91ea2f0ce43d144abda48199e4'+\ - 'b003c570a8af83303d45105f606c5c48d925a40ed9c2630c2fa4cdbf838539de'+\ - 'b9a29f919085f2046369f627ca84b2cb1e2c7940564b670f963ab1164d4e2ca2'+\ - 'bf6ffd39f12f548928bf4d2d1b5e6980b4f1be4c92a91986fba559' - - def testImportKey1(self): - key_obj = DSA.importKey(self.der_public) - self.assertFalse(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - - def testExportKey1(self): - tup = (self.y, self.g, self.p, self.q) - key = DSA.construct(tup) - encoded = key.export_key('DER') - self.assertEqual(self.der_public, encoded) - - # 2. - pem_public="""\ ------BEGIN PUBLIC KEY----- -MIIBtzCCASsGByqGSM44BAEwggEeAoGBAOdW7hcX9LZ5THwhRyShl2N0LEVXK0s/ -j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4uBtH -mjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47BgV6/2 -qm66YguDLWDDAhUArTL0jNOuDEWhmKYfpLXiAyB2OyMCgYB539w9YU/mNfzrfq6u -NxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG8CXa -5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0tBxW -rkP9MA2JJi5O/YmUP5mmUbA4iAOBhQACgYEAgzUqaaEy80hD0qDrmVv/Ti8IOnPw -BJ0skeovDOQ9FEq9pIGZ5LADxXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTne -uaKfkZCF8gRjafYnyoSyyx4seUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmA -tPG+TJKpGYb7pVk= ------END PUBLIC KEY-----""" - - def testImportKey2(self): - for pem in (self.pem_public, tostr(self.pem_public)): - key_obj = DSA.importKey(pem) - self.assertFalse(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - - def testExportKey2(self): - tup = (self.y, self.g, self.p, self.q) - key = DSA.construct(tup) - encoded = key.export_key('PEM') - self.assertEqual(self.pem_public, encoded) - - # 3. OpenSSL/OpenSSH format - der_private=\ - '308201bb02010002818100e756ee1717f4b6794c7c214724a19763742c45572b'+\ - '4b3f8ff3b44f3be9f44ce039a2757695ec915697da74ef914fcd1b05660e2419'+\ - 'c761d639f45d2d79b802dbd23e7ab8b81b479a380e1f30932584ba2a0b955032'+\ - '342ebc83cb5ca906e7b0d7cd6fe656cecb4c8b5a77123a8c6750a481e3b06057'+\ - 'aff6aa6eba620b832d60c3021500ad32f48cd3ae0c45a198a61fa4b5e2032076'+\ - '3b2302818079dfdc3d614fe635fceb7eaeae3718dc2efefb45282993ac6749dc'+\ - '83c223d8c1887296316b3b0b54466cf444f34b82e3554d0b90a778faaf1306f0'+\ - '25dae6a3e36c7f93dd5bac4052b92370040aca70b8d5820599711900efbc9618'+\ - '12c355dd9beffe0981da85c5548074b41c56ae43fd300d89262e4efd89943f99'+\ - 'a651b038880281810083352a69a132f34843d2a0eb995bff4e2f083a73f0049d'+\ - '2c91ea2f0ce43d144abda48199e4b003c570a8af83303d45105f606c5c48d925'+\ - 'a40ed9c2630c2fa4cdbf838539deb9a29f919085f2046369f627ca84b2cb1e2c'+\ - '7940564b670f963ab1164d4e2ca2bf6ffd39f12f548928bf4d2d1b5e6980b4f1'+\ - 'be4c92a91986fba55902145ebd9a3f0b82069d98420986b314215025756065' - - def testImportKey3(self): - key_obj = DSA.importKey(self.der_private) - self.assertTrue(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - self.assertEqual(self.x, key_obj.x) - - def testExportKey3(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - encoded = key.export_key('DER', pkcs8=False) - self.assertEqual(self.der_private, encoded) - - # 4. - pem_private="""\ ------BEGIN DSA PRIVATE KEY----- -MIIBuwIBAAKBgQDnVu4XF/S2eUx8IUckoZdjdCxFVytLP4/ztE876fRM4DmidXaV -7JFWl9p075FPzRsFZg4kGcdh1jn0XS15uALb0j56uLgbR5o4Dh8wkyWEuioLlVAy -NC68g8tcqQbnsNfNb+ZWzstMi1p3EjqMZ1CkgeOwYFev9qpuumILgy1gwwIVAK0y -9IzTrgxFoZimH6S14gMgdjsjAoGAed/cPWFP5jX8636urjcY3C7++0UoKZOsZ0nc -g8Ij2MGIcpYxazsLVEZs9ETzS4LjVU0LkKd4+q8TBvAl2uaj42x/k91brEBSuSNw -BArKcLjVggWZcRkA77yWGBLDVd2b7/4JgdqFxVSAdLQcVq5D/TANiSYuTv2JlD+Z -plGwOIgCgYEAgzUqaaEy80hD0qDrmVv/Ti8IOnPwBJ0skeovDOQ9FEq9pIGZ5LAD -xXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTneuaKfkZCF8gRjafYnyoSyyx4s -eUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmAtPG+TJKpGYb7pVkCFF69mj8L -ggadmEIJhrMUIVAldWBl ------END DSA PRIVATE KEY-----""" - - def testImportKey4(self): - for pem in (self.pem_private, tostr(self.pem_private)): - key_obj = DSA.importKey(pem) - self.assertTrue(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - self.assertEqual(self.x, key_obj.x) - - def testExportKey4(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - encoded = key.export_key('PEM', pkcs8=False) - self.assertEqual(self.pem_private, encoded) - - # 5. PKCS8 (unencrypted) - der_pkcs8=\ - '3082014a0201003082012b06072a8648ce3804013082011e02818100e756ee17'+\ - '17f4b6794c7c214724a19763742c45572b4b3f8ff3b44f3be9f44ce039a27576'+\ - '95ec915697da74ef914fcd1b05660e2419c761d639f45d2d79b802dbd23e7ab8'+\ - 'b81b479a380e1f30932584ba2a0b955032342ebc83cb5ca906e7b0d7cd6fe656'+\ - 'cecb4c8b5a77123a8c6750a481e3b06057aff6aa6eba620b832d60c3021500ad'+\ - '32f48cd3ae0c45a198a61fa4b5e20320763b2302818079dfdc3d614fe635fceb'+\ - '7eaeae3718dc2efefb45282993ac6749dc83c223d8c1887296316b3b0b54466c'+\ - 'f444f34b82e3554d0b90a778faaf1306f025dae6a3e36c7f93dd5bac4052b923'+\ - '70040aca70b8d5820599711900efbc961812c355dd9beffe0981da85c5548074'+\ - 'b41c56ae43fd300d89262e4efd89943f99a651b03888041602145ebd9a3f0b82'+\ - '069d98420986b314215025756065' - - def testImportKey5(self): - key_obj = DSA.importKey(self.der_pkcs8) - self.assertTrue(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - self.assertEqual(self.x, key_obj.x) - - def testExportKey5(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - encoded = key.export_key('DER') - self.assertEqual(self.der_pkcs8, encoded) - encoded = key.export_key('DER', pkcs8=True) - self.assertEqual(self.der_pkcs8, encoded) - - # 6. - pem_pkcs8="""\ ------BEGIN PRIVATE KEY----- -MIIBSgIBADCCASsGByqGSM44BAEwggEeAoGBAOdW7hcX9LZ5THwhRyShl2N0LEVX -K0s/j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4 -uBtHmjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47Bg -V6/2qm66YguDLWDDAhUArTL0jNOuDEWhmKYfpLXiAyB2OyMCgYB539w9YU/mNfzr -fq6uNxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG -8CXa5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0 -tBxWrkP9MA2JJi5O/YmUP5mmUbA4iAQWAhRevZo/C4IGnZhCCYazFCFQJXVgZQ== ------END PRIVATE KEY-----""" - - def testImportKey6(self): - for pem in (self.pem_pkcs8, tostr(self.pem_pkcs8)): - key_obj = DSA.importKey(pem) - self.assertTrue(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - self.assertEqual(self.x, key_obj.x) - - def testExportKey6(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - encoded = key.export_key('PEM') - self.assertEqual(self.pem_pkcs8, encoded) - encoded = key.export_key('PEM', pkcs8=True) - self.assertEqual(self.pem_pkcs8, encoded) - - # 7. OpenSSH/RFC4253 - ssh_pub="""ssh-dss AAAAB3NzaC1kc3MAAACBAOdW7hcX9LZ5THwhRyShl2N0LEVXK0s/j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4uBtHmjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47BgV6/2qm66YguDLWDDAAAAFQCtMvSM064MRaGYph+kteIDIHY7IwAAAIB539w9YU/mNfzrfq6uNxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG8CXa5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0tBxWrkP9MA2JJi5O/YmUP5mmUbA4iAAAAIEAgzUqaaEy80hD0qDrmVv/Ti8IOnPwBJ0skeovDOQ9FEq9pIGZ5LADxXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTneuaKfkZCF8gRjafYnyoSyyx4seUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmAtPG+TJKpGYb7pVk=""" - - def testImportKey7(self): - for ssh in (self.ssh_pub, tostr(self.ssh_pub)): - key_obj = DSA.importKey(ssh) - self.assertFalse(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - - def testExportKey7(self): - tup = (self.y, self.g, self.p, self.q) - key = DSA.construct(tup) - encoded = key.export_key('OpenSSH') - self.assertEqual(self.ssh_pub, encoded) - - # 8. Encrypted OpenSSL/OpenSSH - pem_private_encrypted="""\ ------BEGIN DSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: AES-128-CBC,70B6908939D65E9F2EB999E8729788CE - -4V6GHRDpCrdZ8MBjbyp5AlGUrjvr2Pn2e2zVxy5RBt4FBj9/pa0ae0nnyUPMLSUU -kKyOR0topRYTVRLElm4qVrb5uNZ3hRwfbklr+pSrB7O9eHz9V5sfOQxyODS07JxK -k1OdOs70/ouMXLF9EWfAZOmWUccZKHNblUwg1p1UrZIz5jXw4dUE/zqhvXh6d+iC -ADsICaBCjCrRQJKDp50h3+ndQjkYBKVH+pj8TiQ79U7lAvdp3+iMghQN6YXs9mdI -gFpWw/f97oWM4GHZFqHJ+VSMNFjBiFhAvYV587d7Lk4dhD8sCfbxj42PnfRgUItc -nnPqHxmhMQozBWzYM4mQuo3XbF2WlsNFbOzFVyGhw1Bx1s91qvXBVWJh2ozrW0s6 -HYDV7ZkcTml/4kjA/d+mve6LZ8kuuR1qCiZx6rkffhh1gDN/1Xz3HVvIy/dQ+h9s -5zp7PwUoWbhqp3WCOr156P6gR8qo7OlT6wMh33FSXK/mxikHK136fV2shwTKQVII -rJBvXpj8nACUmi7scKuTWGeUoXa+dwTZVVe+b+L2U1ZM7+h/neTJiXn7u99PFUwu -xVJtxaV37m3aXxtCsPnbBg== ------END DSA PRIVATE KEY-----""" - - def testImportKey8(self): - for pem in (self.pem_private_encrypted, tostr(self.pem_private_encrypted)): - key_obj = DSA.importKey(pem, "PWDTEST") - self.assertTrue(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - self.assertEqual(self.x, key_obj.x) - - def testExportKey8(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - encoded = key.export_key('PEM', pkcs8=False, passphrase="PWDTEST") - key = DSA.importKey(encoded, "PWDTEST") - self.assertEqual(self.y, key.y) - self.assertEqual(self.p, key.p) - self.assertEqual(self.q, key.q) - self.assertEqual(self.g, key.g) - self.assertEqual(self.x, key.x) - - # 9. Encrypted PKCS8 - # pbeWithMD5AndDES-CBC - pem_pkcs8_encrypted="""\ ------BEGIN ENCRYPTED PRIVATE KEY----- -MIIBcTAbBgkqhkiG9w0BBQMwDgQI0GC3BJ/jSw8CAggABIIBUHc1cXZpExIE9tC7 -7ryiW+5ihtF2Ekurq3e408GYSAu5smJjN2bvQXmzRFBz8W38K8eMf1sbWroZ4+zn -kZSbb9nSm5kAa8lR2+oF2k+WRswMR/PTC3f/D9STO2X0QxdrzKgIHEcSGSHp5jTx -aVvbkCDHo9vhBTl6S3ogZ48As/MEro76+9igUwJ1jNhIQZPJ7e20QH5qDpQFFJN4 -CKl2ENSEuwGiqBszItFy4dqH0g63ZGZV/xt9wSO9Rd7SK/EbA/dklOxBa5Y/VItM -gnIhs9XDMoGYyn6F023EicNJm6g/bVQk81BTTma4tm+12TKGdYm+QkeZvCOMZylr -Wv67cKwO3cAXt5C3QXMDgYR64XvuaT5h7C0igMp2afSXJlnbHEbFxQVJlv83T4FM -eZ4k+NQDbEL8GiHmFxzDWQAuPPZKJWEEEV2p/To+WOh+kSDHQw== ------END ENCRYPTED PRIVATE KEY-----""" - - def testImportKey9(self): - for pem in (self.pem_pkcs8_encrypted, tostr(self.pem_pkcs8_encrypted)): - key_obj = DSA.importKey(pem, "PWDTEST") - self.assertTrue(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - self.assertEqual(self.x, key_obj.x) - - # 10. Encrypted PKCS8 - # pkcs5PBES2 / - # pkcs5PBKDF2 (rounds=1000, salt=D725BF1B6B8239F4) / - # des-EDE3-CBC (iv=27A1C66C42AFEECE) - # - der_pkcs8_encrypted=\ - '30820196304006092a864886f70d01050d3033301b06092a864886f70d01050c'+\ - '300e0408d725bf1b6b8239f4020203e8301406082a864886f70d0307040827a1'+\ - 'c66c42afeece048201505cacfde7bf8edabb3e0d387950dc872662ea7e9b1ed4'+\ - '400d2e7e6186284b64668d8d0328c33a9d9397e6f03df7cb68268b0a06b4e22f'+\ - '7d132821449ecf998a8b696dbc6dd2b19e66d7eb2edfeb4153c1771d49702395'+\ - '4f36072868b5fcccf93413a5ac4b2eb47d4b3f681c6bd67ae363ed776f45ae47'+\ - '174a00098a7c930a50f820b227ddf50f9742d8e950d02586ff2dac0e3c372248'+\ - 'e5f9b6a7a02f4004f20c87913e0f7b52bccc209b95d478256a890b31d4c9adec'+\ - '21a4d157a179a93a3dad06f94f3ce486b46dfa7fc15fd852dd7680bbb2f17478'+\ - '7e71bd8dbaf81eca7518d76c1d26256e95424864ba45ca5d47d7c5a421be02fa'+\ - 'b94ab01e18593f66cf9094eb5c94b9ecf3aa08b854a195cf87612fbe5e96c426'+\ - '2b0d573e52dc71ba3f5e468c601e816c49b7d32c698b22175e89aaef0c443770'+\ - '5ef2f88a116d99d8e2869a4fd09a771b84b49e4ccb79aadcb1c9' - - def testImportKey10(self): - key_obj = DSA.importKey(self.der_pkcs8_encrypted, "PWDTEST") - self.assertTrue(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - self.assertEqual(self.x, key_obj.x) - - def testExportKey10(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - randfunc = BytesIO(unhexlify(b("27A1C66C42AFEECE") + b("D725BF1B6B8239F4"))).read - encoded = key.export_key('DER', pkcs8=True, passphrase="PWDTEST", randfunc=randfunc) - self.assertEqual(self.der_pkcs8_encrypted, encoded) - - # ---- - - def testImportError1(self): - self.assertRaises(ValueError, DSA.importKey, self.der_pkcs8_encrypted, "wrongpwd") - - def testExportError2(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - self.assertRaises(ValueError, key.export_key, 'DER', pkcs8=False, passphrase="PWDTEST") - - def test_import_key(self): - """Verify importKey is an alias to import_key""" - - key_obj = DSA.import_key(self.der_public) - self.assertFalse(key_obj.has_private()) - self.assertEqual(self.y, key_obj.y) - self.assertEqual(self.p, key_obj.p) - self.assertEqual(self.q, key_obj.q) - self.assertEqual(self.g, key_obj.g) - - def test_exportKey(self): - tup = (self.y, self.g, self.p, self.q, self.x) - key = DSA.construct(tup) - self.assertEqual(key.exportKey(), key.export_key()) - - - def test_import_empty(self): - self.assertRaises(ValueError, DSA.import_key, b'') - - -class ImportKeyFromX509Cert(unittest.TestCase): - - def test_x509v1(self): - - # Sample V1 certificate with a 1024 bit DSA key - x509_v1_cert = """ ------BEGIN CERTIFICATE----- -MIIDUjCCArsCAQIwDQYJKoZIhvcNAQEFBQAwfjENMAsGA1UEChMEQWNtZTELMAkG -A1UECxMCUkQxHDAaBgkqhkiG9w0BCQEWDXNwYW1AYWNtZS5vcmcxEzARBgNVBAcT -Ck1ldHJvcG9saXMxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzENMAsG -A1UEAxMEdGVzdDAeFw0xNDA3MTEyMDM4NDNaFw0xNzA0MDYyMDM4NDNaME0xCzAJ -BgNVBAYTAlVTMREwDwYDVQQIEwhOZXcgWW9yazENMAsGA1UEChMEQWNtZTELMAkG -A1UECxMCUkQxDzANBgNVBAMTBnBvbGFuZDCCAbYwggErBgcqhkjOOAQBMIIBHgKB -gQDOrN4Ox4+t3T6wKeHfhzArhcrNEFMQ4Ss+4PIKyimDy9Bn64WPkL1B/9dvYIga -23GLu6tVJmXo6EdJnVOHEMhr99EeOwuDWWeP7Awq7RSlKEejokr4BEzMTW/tExSD -cO6/GI7xzh0eTH+VTTPDfyrJMYCkh0rJAfCP+5xrmPNetwIVALtXYOV1yoRrzJ2Q -M5uEjidH6GiZAoGAfUqA1SAm5g5U68SILMVX9l5rq0OpB0waBMpJQ31/R/yXNDqo -c3gGWZTOJFU4IzwNpGhrGNADUByz/lc1SAOAdEJIr0JVrhbGewQjB4pWqoLGbBKz -RoavTNDc/zD7SYa12evWDHADwvlXoeQg+lWop1zS8OqaDC7aLGKpWN3/m8kDgYQA -AoGAKoirPAfcp1rbbl4y2FFAIktfW8f4+T7d2iKSg73aiVfujhNOt1Zz1lfC0NI2 -eonLWO3tAM4XGKf1TLjb5UXngGn40okPsaA81YE6ZIKm20ywjlOY3QkAEdMaLVY3 -9PJvM8RGB9m7pLKxyHfGMfF40MVN4222zKeGp7xhM0CNiCUwDQYJKoZIhvcNAQEF -BQADgYEAfbNZfpYa2KlALEM1FZnwvQDvJHntHz8LdeJ4WM7CXDlKi67wY2HKM30w -s2xej75imkVOFd1kF2d0A8sjfriXLVIt1Hwq9ANZomhu4Edx0xpH8tqdh/bDtnM2 -TmduZNY9OWkb07h0CtWD6Zt8fhRllVsSSrlWd/2or7FXNC5weFQ= ------END CERTIFICATE----- - """.strip() - - # DSA public key as dumped by openssl - y_str = """ -2a:88:ab:3c:07:dc:a7:5a:db:6e:5e:32:d8:51:40: -22:4b:5f:5b:c7:f8:f9:3e:dd:da:22:92:83:bd:da: -89:57:ee:8e:13:4e:b7:56:73:d6:57:c2:d0:d2:36: -7a:89:cb:58:ed:ed:00:ce:17:18:a7:f5:4c:b8:db: -e5:45:e7:80:69:f8:d2:89:0f:b1:a0:3c:d5:81:3a: -64:82:a6:db:4c:b0:8e:53:98:dd:09:00:11:d3:1a: -2d:56:37:f4:f2:6f:33:c4:46:07:d9:bb:a4:b2:b1: -c8:77:c6:31:f1:78:d0:c5:4d:e3:6d:b6:cc:a7:86: -a7:bc:61:33:40:8d:88:25 - """ - p_str = """ -00:ce:ac:de:0e:c7:8f:ad:dd:3e:b0:29:e1:df:87: -30:2b:85:ca:cd:10:53:10:e1:2b:3e:e0:f2:0a:ca: -29:83:cb:d0:67:eb:85:8f:90:bd:41:ff:d7:6f:60: -88:1a:db:71:8b:bb:ab:55:26:65:e8:e8:47:49:9d: -53:87:10:c8:6b:f7:d1:1e:3b:0b:83:59:67:8f:ec: -0c:2a:ed:14:a5:28:47:a3:a2:4a:f8:04:4c:cc:4d: -6f:ed:13:14:83:70:ee:bf:18:8e:f1:ce:1d:1e:4c: -7f:95:4d:33:c3:7f:2a:c9:31:80:a4:87:4a:c9:01: -f0:8f:fb:9c:6b:98:f3:5e:b7 - """ - q_str = """ -00:bb:57:60:e5:75:ca:84:6b:cc:9d:90:33:9b:84: -8e:27:47:e8:68:99 - """ - g_str = """ -7d:4a:80:d5:20:26:e6:0e:54:eb:c4:88:2c:c5:57: -f6:5e:6b:ab:43:a9:07:4c:1a:04:ca:49:43:7d:7f: -47:fc:97:34:3a:a8:73:78:06:59:94:ce:24:55:38: -23:3c:0d:a4:68:6b:18:d0:03:50:1c:b3:fe:57:35: -48:03:80:74:42:48:af:42:55:ae:16:c6:7b:04:23: -07:8a:56:aa:82:c6:6c:12:b3:46:86:af:4c:d0:dc: -ff:30:fb:49:86:b5:d9:eb:d6:0c:70:03:c2:f9:57: -a1:e4:20:fa:55:a8:a7:5c:d2:f0:ea:9a:0c:2e:da: -2c:62:a9:58:dd:ff:9b:c9 - """ - - key = DSA.importKey(x509_v1_cert) - for comp_name in ('y', 'p', 'q', 'g'): - comp_str = locals()[comp_name + "_str"] - comp = int(re.sub("[^0-9a-f]", "", comp_str), 16) - self.assertEqual(getattr(key, comp_name), comp) - self.assertFalse(key.has_private()) - - def test_x509v3(self): - - # Sample V3 certificate with a 1024 bit DSA key - x509_v3_cert = """ ------BEGIN CERTIFICATE----- -MIIFhjCCA26gAwIBAgIBAzANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQGEwJVUzEL -MAkGA1UECAwCTUQxEjAQBgNVBAcMCUJhbHRpbW9yZTEQMA4GA1UEAwwHVGVzdCBD -QTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFtcGxlLmNvbTAeFw0xNDA3MTMyMDUz -MjBaFw0xNzA0MDgyMDUzMjBaMEAxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJNRDES -MBAGA1UEBwwJQmFsdGltb3JlMRAwDgYDVQQDDAdhdXN0cmlhMIIBtjCCASsGByqG -SM44BAEwggEeAoGBALfd8gyEpVPA0ZI69Kp3nyJcu5N0ZZ3K1K9hleQLNqKEcZOh -7a/C2J1TPdmHTLJ0rAwBZ1nWxnARSgRphziGDFspKCYQwYcSMz8KoFgvXbXpuchy -oFACiQ2LqZnc5MakuLQtLcQciSYGYj3zmZdYMoa904F1aDWr+DxQI6DVC3/bAhUA -hqXMCJ6fQK3G2O9S3/CC/yVZXCsCgYBRXROl3R2khX7l10LQjDEgo3B1IzjXU/jP -McMBl6XO+nBJXxr/scbq8Ajiv7LTnGpSjgryHtvfj887kfvo8QbSS3kp3vq5uSqI -ui7E7r3jguWaLj616AG1HWOctXJUjqsiabZwsp2h09gHTzmHEXBOmiARu8xFxKAH -xsuo7onAbwOBhAACgYBylWjWSnKHE8mHx1A5m/0GQx6xnhWIe3+MJAnEhRGxA2J4 -SCsfWU0OwglIQToh1z5uUU9oDi9cYgNPBevOFRnDhc2yaJY6VAYnI+D+6J5IU6Yd -0iaG/iSc4sV4bFr0axcPpse3SN0XaQxiKeSFBfFnoMqL+dd9Gb3QPZSllBcVD6OB -1TCB0jAdBgNVHQ4EFgQUx5wN0Puotv388M9Tp/fsPbZpzAUwHwYDVR0jBBgwFoAU -a0hkif3RMaraiWtsOOZZlLu9wJwwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwSgYD -VR0RBEMwQYILZXhhbXBsZS5jb22CD3d3dy5leGFtcGxlLmNvbYIQbWFpbC5leGFt -cGxlLmNvbYIPZnRwLmV4YW1wbGUuY29tMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM -IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsFAAOCAgEAyWf1TiJI -aNEIA9o/PG8/JiGASTS2/HBVTJbkq03k6NkJVk/GxC1DPziTUJ+CdWlHWcAi1EOW -Ach3QxNDRrVfCOfCMDgElIO1094/reJgdFYG00LRi8QkRJuxANV7YS4tLudhyHJC -kR2lhdMNmEuzWK+s2y+5cLrdm7qdvdENQCcV67uvGPx4sc+EaE7x13SczKjWBtbo -QCs6JTOW+EkPRl4Zo27K4OIZ43/J+GxvwU9QUVH3wPVdbbLNw+QeTFBYMTEcxyc4 -kv50HPBFaithziXBFyvdIs19FjkFzu0Uz/e0zb1+vMzQlJMD94HVOrMnIj5Sb2cL -KKdYXS4uhxFJmdV091Xur5JkYYwEzuaGav7J3zOzYutrIGTgDluLCvA+VQkRcTsy -jZ065SkY/v+38QHp+cmm8WRluupJTs8wYzVp6Fu0iFaaK7ztFmaZmHpiPIfDFjva -aCIgzzT5NweJd/b71A2SyzHXJ14zBXsr1PMylMp2TpHIidhuuNuQL6I0HaollB4M -Z3FsVBMhVDw4Z76qnFPr8mZE2tar33hSlJI/3pS/bBiukuBk8U7VB0X8OqaUnP3C -7b2Z4G8GtqDVcKGMzkvMjT4n9rKd/Le+qHSsQOGO9W/0LB7UDAZSwUsfAPnoBgdS -5t9tIomLCOstByXi+gGZue1TcdCa3Ph4kO0= ------END CERTIFICATE----- - """.strip() - - # DSA public key as dumped by openssl - y_str = """ -72:95:68:d6:4a:72:87:13:c9:87:c7:50:39:9b:fd: -06:43:1e:b1:9e:15:88:7b:7f:8c:24:09:c4:85:11: -b1:03:62:78:48:2b:1f:59:4d:0e:c2:09:48:41:3a: -21:d7:3e:6e:51:4f:68:0e:2f:5c:62:03:4f:05:eb: -ce:15:19:c3:85:cd:b2:68:96:3a:54:06:27:23:e0: -fe:e8:9e:48:53:a6:1d:d2:26:86:fe:24:9c:e2:c5: -78:6c:5a:f4:6b:17:0f:a6:c7:b7:48:dd:17:69:0c: -62:29:e4:85:05:f1:67:a0:ca:8b:f9:d7:7d:19:bd: -d0:3d:94:a5:94:17:15:0f - """ - p_str = """ -00:b7:dd:f2:0c:84:a5:53:c0:d1:92:3a:f4:aa:77: -9f:22:5c:bb:93:74:65:9d:ca:d4:af:61:95:e4:0b: -36:a2:84:71:93:a1:ed:af:c2:d8:9d:53:3d:d9:87: -4c:b2:74:ac:0c:01:67:59:d6:c6:70:11:4a:04:69: -87:38:86:0c:5b:29:28:26:10:c1:87:12:33:3f:0a: -a0:58:2f:5d:b5:e9:b9:c8:72:a0:50:02:89:0d:8b: -a9:99:dc:e4:c6:a4:b8:b4:2d:2d:c4:1c:89:26:06: -62:3d:f3:99:97:58:32:86:bd:d3:81:75:68:35:ab: -f8:3c:50:23:a0:d5:0b:7f:db - """ - q_str = """ -00:86:a5:cc:08:9e:9f:40:ad:c6:d8:ef:52:df:f0: -82:ff:25:59:5c:2b - """ - g_str = """ -51:5d:13:a5:dd:1d:a4:85:7e:e5:d7:42:d0:8c:31: -20:a3:70:75:23:38:d7:53:f8:cf:31:c3:01:97:a5: -ce:fa:70:49:5f:1a:ff:b1:c6:ea:f0:08:e2:bf:b2: -d3:9c:6a:52:8e:0a:f2:1e:db:df:8f:cf:3b:91:fb: -e8:f1:06:d2:4b:79:29:de:fa:b9:b9:2a:88:ba:2e: -c4:ee:bd:e3:82:e5:9a:2e:3e:b5:e8:01:b5:1d:63: -9c:b5:72:54:8e:ab:22:69:b6:70:b2:9d:a1:d3:d8: -07:4f:39:87:11:70:4e:9a:20:11:bb:cc:45:c4:a0: -07:c6:cb:a8:ee:89:c0:6f - """ - - key = DSA.importKey(x509_v3_cert) - for comp_name in ('y', 'p', 'q', 'g'): - comp_str = locals()[comp_name + "_str"] - comp = int(re.sub("[^0-9a-f]", "", comp_str), 16) - self.assertEqual(getattr(key, comp_name), comp) - self.assertFalse(key.has_private()) - - -if __name__ == '__main__': - unittest.main() - -def get_tests(config={}): - tests = [] - tests += list_test_cases(ImportKeyTests) - tests += list_test_cases(ImportKeyFromX509Cert) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_ECC.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_ECC.py deleted file mode 100644 index ee80000..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_ECC.py +++ /dev/null @@ -1,2782 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2015, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import os -import errno -import warnings -import unittest -from binascii import unhexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.py3compat import bord, tostr, FileNotFoundError -from Cryptodome.Util.asn1 import DerSequence, DerBitString -from Cryptodome.Util.number import bytes_to_long -from Cryptodome.Hash import SHAKE128 - -from Cryptodome.PublicKey import ECC - -from Cryptodome.PublicKey.ECC import _import_rfc5915_der - -try: - import pycryptodome_test_vectors # type: ignore - test_vectors_available = True -except ImportError: - test_vectors_available = False - - -class MissingTestVectorException(ValueError): - pass - - -def load_file(file_name, mode="rb"): - results = None - - try: - if not test_vectors_available: - raise FileNotFoundError(errno.ENOENT, - os.strerror(errno.ENOENT), - file_name) - - dir_comps = ("PublicKey", "ECC") - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - with open(full_file_name, mode) as file_in: - results = file_in.read() - - except FileNotFoundError: - warnings.warn("Skipping extended tests for ECC", - UserWarning, - stacklevel=2) - - if results is None: - raise MissingTestVectorException("Missing %s" % file_name) - - return results - - -def compact(lines): - ext = b"".join(lines) - return unhexlify(tostr(ext).replace(" ", "").replace(":", "")) - - -def create_ref_keys_p192(): - key_len = 24 - key_lines = load_file("ecc_p192.txt").splitlines() - private_key_d = bytes_to_long(compact(key_lines[2:4])) - public_key_xy = compact(key_lines[5:9]) - assert bord(public_key_xy[0]) == 4 # Uncompressed - public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) - public_key_y = bytes_to_long(public_key_xy[key_len+1:]) - - return (ECC.construct(curve="P-192", d=private_key_d), - ECC.construct(curve="P-192", point_x=public_key_x, point_y=public_key_y)) - - -def create_ref_keys_p224(): - key_len = 28 - key_lines = load_file("ecc_p224.txt").splitlines() - private_key_d = bytes_to_long(compact(key_lines[2:4])) - public_key_xy = compact(key_lines[5:9]) - assert bord(public_key_xy[0]) == 4 # Uncompressed - public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) - public_key_y = bytes_to_long(public_key_xy[key_len+1:]) - - return (ECC.construct(curve="P-224", d=private_key_d), - ECC.construct(curve="P-224", point_x=public_key_x, point_y=public_key_y)) - - -def create_ref_keys_p256(): - key_len = 32 - key_lines = load_file("ecc_p256.txt").splitlines() - private_key_d = bytes_to_long(compact(key_lines[2:5])) - public_key_xy = compact(key_lines[6:11]) - assert bord(public_key_xy[0]) == 4 # Uncompressed - public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) - public_key_y = bytes_to_long(public_key_xy[key_len+1:]) - - return (ECC.construct(curve="P-256", d=private_key_d), - ECC.construct(curve="P-256", point_x=public_key_x, point_y=public_key_y)) - - -def create_ref_keys_p384(): - key_len = 48 - key_lines = load_file("ecc_p384.txt").splitlines() - private_key_d = bytes_to_long(compact(key_lines[2:6])) - public_key_xy = compact(key_lines[7:14]) - assert bord(public_key_xy[0]) == 4 # Uncompressed - public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) - public_key_y = bytes_to_long(public_key_xy[key_len+1:]) - - return (ECC.construct(curve="P-384", d=private_key_d), - ECC.construct(curve="P-384", point_x=public_key_x, point_y=public_key_y)) - - -def create_ref_keys_p521(): - key_len = 66 - key_lines = load_file("ecc_p521.txt").splitlines() - private_key_d = bytes_to_long(compact(key_lines[2:7])) - public_key_xy = compact(key_lines[8:17]) - assert bord(public_key_xy[0]) == 4 # Uncompressed - public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) - public_key_y = bytes_to_long(public_key_xy[key_len+1:]) - - return (ECC.construct(curve="P-521", d=private_key_d), - ECC.construct(curve="P-521", point_x=public_key_x, point_y=public_key_y)) - - -def create_ref_keys_ed25519(): - key_lines = load_file("ecc_ed25519.txt").splitlines() - seed = compact(key_lines[5:8]) - key = ECC.construct(curve="Ed25519", seed=seed) - return (key, key.public_key()) - - -def create_ref_keys_ed448(): - key_lines = load_file("ecc_ed448.txt").splitlines() - seed = compact(key_lines[6:10]) - key = ECC.construct(curve="Ed448", seed=seed) - return (key, key.public_key()) - - -# Create reference key pair -# ref_private, ref_public = create_ref_keys_p521() - -def get_fixed_prng(): - return SHAKE128.new().update(b"SEED").read - - -def extract_bitstring_from_spki(data): - seq = DerSequence() - seq.decode(data) - bs = DerBitString() - bs.decode(seq[1]) - return bs.value - - -class TestImport(unittest.TestCase): - - def test_empty(self): - self.assertRaises(ValueError, ECC.import_key, b"") - - def test_mismatch(self): - # The private key does not match the public key - mismatch = """-----BEGIN PRIVATE KEY----- -MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJChZANiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXk= ------END PRIVATE KEY-----""" - self.assertRaises(ValueError, ECC.import_key, mismatch) - - def test_import_private_rfc5915_none(self): - # ECPrivateKey with a P256 private key, without [0] and [1] - data_hex = "302502010104205c4e4320ef260f91ed9fc597aee98c8236b60e0ced692cc7a057d5e45798a052" - key = _import_rfc5915_der(unhexlify(data_hex), None, "1.2.840.10045.3.1.7") - self.assertEqual(key.d, 0x5c4e4320ef260f91ed9fc597aee98c8236b60e0ced692cc7a057d5e45798a052) - - def test_import_private_rfc5915_only_0(self): - # ECPrivateKey with a P256 private key, with [0] only - data_hex = "303102010104205c4e4320ef260f91ed9fc597aee98c8236b60e0ced692cc7a057d5e45798a052a00a06082a8648ce3d030107" - key = _import_rfc5915_der(unhexlify(data_hex), None) - self.assertEqual(key.d, 0x5c4e4320ef260f91ed9fc597aee98c8236b60e0ced692cc7a057d5e45798a052) - - def test_import_private_rfc5915_only_1(self): - # ECPrivateKey with a P256 private key, with [1] only - data_hex = "306b02010104205c4e4320ef260f91ed9fc597aee98c8236b60e0ced692cc7a057d5e45798a052a14403420004a40ad59a2050ebe92479bd5fb16bb2e45b6465eb3cb2b1effe423fabe6cb7424db8219ef0bab80acf26fd70595b61fe4760d33eed80dd03d2fd0dfb27b8ce75c" - key = _import_rfc5915_der(unhexlify(data_hex), None, "1.2.840.10045.3.1.7") - self.assertEqual(key.d, 0x5c4e4320ef260f91ed9fc597aee98c8236b60e0ced692cc7a057d5e45798a052) - -class TestImport_P192(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_P192, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p192() - - def test_import_public_der(self): - key_file = load_file("ecc_p192_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_sec1_uncompressed(self): - key_file = load_file("ecc_p192_public.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P192') - self.assertEqual(self.ref_public, key) - - def test_import_sec1_compressed(self): - key_file = load_file("ecc_p192_public_compressed.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P192') - self.assertEqual(self.ref_public, key) - - def test_import_rfc5915_der(self): - key_file = load_file("ecc_p192_private.der") - - key = ECC._import_rfc5915_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_clear(self): - key_file = load_file("ecc_p192_private_p8_clear.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_in_pem_clear(self): - key_file = load_file("ecc_p192_private_p8_clear.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_p192_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_p192_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_p192_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_p192_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_p192_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": - key_file = load_file("ecc_p192_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_p192_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - -class TestImport_P224(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_P224, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p224() - - def test_import_public_der(self): - key_file = load_file("ecc_p224_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_sec1_uncompressed(self): - key_file = load_file("ecc_p224_public.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P224') - self.assertEqual(self.ref_public, key) - - def test_import_sec1_compressed(self): - key_file = load_file("ecc_p224_public_compressed.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P224') - self.assertEqual(self.ref_public, key) - - def test_import_rfc5915_der(self): - key_file = load_file("ecc_p224_private.der") - - key = ECC._import_rfc5915_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_clear(self): - key_file = load_file("ecc_p224_private_p8_clear.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_in_pem_clear(self): - key_file = load_file("ecc_p224_private_p8_clear.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_p224_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_p224_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_p224_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_p224_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_p224_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_p224_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": - key_file = load_file("ecc_p224_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_p224_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - -class TestImport_P256(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_P256, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p256() - - def test_import_public_der(self): - key_file = load_file("ecc_p256_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_sec1_uncompressed(self): - key_file = load_file("ecc_p256_public.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P256') - self.assertEqual(self.ref_public, key) - - def test_import_sec1_compressed(self): - key_file = load_file("ecc_p256_public_compressed.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P256') - self.assertEqual(self.ref_public, key) - - def test_import_rfc5915_der(self): - key_file = load_file("ecc_p256_private.der") - - key = ECC._import_rfc5915_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_clear(self): - key_file = load_file("ecc_p256_private_p8_clear.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_in_pem_clear(self): - key_file = load_file("ecc_p256_private_p8_clear.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_p256_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_p256_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_p256_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_p256_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_p256_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_p256_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_with_ecparams(self): - key_file = load_file("ecc_p256_private_ecparams.pem") - key = ECC.import_key(key_file) - # We just check if the import succeeds - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": - key_file = load_file("ecc_p256_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_p256_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_openssh_public(self): - key_file = load_file("ecc_p256_public_openssh.txt") - - key = ECC._import_openssh_public(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_openssh_private_clear(self): - key_file = load_file("ecc_p256_private_openssh.pem") - key_file_old = load_file("ecc_p256_private_openssh_old.pem") - - key = ECC.import_key(key_file) - key_old = ECC.import_key(key_file_old) - self.assertEqual(key, key_old) - - def test_import_openssh_private_password(self): - key_file = load_file("ecc_p256_private_openssh_pwd.pem") - key_file_old = load_file("ecc_p256_private_openssh_pwd_old.pem") - - key = ECC.import_key(key_file, b"password") - key_old = ECC.import_key(key_file_old) - self.assertEqual(key, key_old) - - -class TestImport_P384(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_P384, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p384() - - def test_import_public_der(self): - key_file = load_file("ecc_p384_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_sec1_uncompressed(self): - key_file = load_file("ecc_p384_public.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P384') - self.assertEqual(self.ref_public, key) - - def test_import_sec1_compressed(self): - key_file = load_file("ecc_p384_public_compressed.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P384') - self.assertEqual(self.ref_public, key) - - def test_import_rfc5915_der(self): - key_file = load_file("ecc_p384_private.der") - - key = ECC._import_rfc5915_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_clear(self): - key_file = load_file("ecc_p384_private_p8_clear.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_in_pem_clear(self): - key_file = load_file("ecc_p384_private_p8_clear.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_p384_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_p384_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_p384_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_p384_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_p384_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_p384_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": - key_file = load_file("ecc_p384_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_p384_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_openssh_public(self): - key_file = load_file("ecc_p384_public_openssh.txt") - - key = ECC._import_openssh_public(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_openssh_private_clear(self): - key_file = load_file("ecc_p384_private_openssh.pem") - key_file_old = load_file("ecc_p384_private_openssh_old.pem") - - key = ECC.import_key(key_file) - key_old = ECC.import_key(key_file_old) - self.assertEqual(key, key_old) - - def test_import_openssh_private_password(self): - key_file = load_file("ecc_p384_private_openssh_pwd.pem") - key_file_old = load_file("ecc_p384_private_openssh_pwd_old.pem") - - key = ECC.import_key(key_file, b"password") - key_old = ECC.import_key(key_file_old) - self.assertEqual(key, key_old) - - -class TestImport_P521(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_P521, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p521() - - def test_import_public_der(self): - key_file = load_file("ecc_p521_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_sec1_uncompressed(self): - key_file = load_file("ecc_p521_public.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P521') - self.assertEqual(self.ref_public, key) - - def test_import_sec1_compressed(self): - key_file = load_file("ecc_p521_public_compressed.der") - value = extract_bitstring_from_spki(key_file) - key = ECC.import_key(key_file, curve_name='P521') - self.assertEqual(self.ref_public, key) - - def test_import_rfc5915_der(self): - key_file = load_file("ecc_p521_private.der") - - key = ECC._import_rfc5915_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_clear(self): - key_file = load_file("ecc_p521_private_p8_clear.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_in_pem_clear(self): - key_file = load_file("ecc_p521_private_p8_clear.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_p521_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_p521_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_p521_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_p521_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_p521_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_p521_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": - key_file = load_file("ecc_p521_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_p521_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_openssh_public(self): - key_file = load_file("ecc_p521_public_openssh.txt") - - key = ECC._import_openssh_public(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_openssh_private_clear(self): - key_file = load_file("ecc_p521_private_openssh.pem") - key_file_old = load_file("ecc_p521_private_openssh_old.pem") - - key = ECC.import_key(key_file) - key_old = ECC.import_key(key_file_old) - self.assertEqual(key, key_old) - - def test_import_openssh_private_password(self): - key_file = load_file("ecc_p521_private_openssh_pwd.pem") - key_file_old = load_file("ecc_p521_private_openssh_pwd_old.pem") - - key = ECC.import_key(key_file, b"password") - key_old = ECC.import_key(key_file_old) - self.assertEqual(key, key_old) - - -class TestExport_P192(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_P192, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p192() - - def test_export_public_der_uncompressed(self): - key_file = load_file("ecc_p192_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(False) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_der_compressed(self): - key_file = load_file("ecc_p192_public.der") - pub_key = ECC.import_key(key_file) - key_file_compressed = pub_key.export_key(format="DER", compress=True) - - key_file_compressed_ref = load_file("ecc_p192_public_compressed.der") - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_public_sec1_uncompressed(self): - key_file = load_file("ecc_p192_public.der") - value = extract_bitstring_from_spki(key_file) - - encoded = self.ref_public.export_key(format="SEC1") - self.assertEqual(value, encoded) - - def test_export_public_sec1_compressed(self): - key_file = load_file("ecc_p192_public.der") - encoded = self.ref_public.export_key(format="SEC1", compress=True) - - key_file_compressed_ref = load_file("ecc_p192_public_compressed.der") - value = extract_bitstring_from_spki(key_file_compressed_ref) - self.assertEqual(value, encoded) - - def test_export_rfc5915_private_der(self): - key_file = load_file("ecc_p192_private.der") - - encoded = self.ref_private._export_rfc5915_private_der() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_p192_private_p8_clear.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA224AndAES192-CBC", - prot_params={'iteration_count':123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem_uncompressed(self): - key_file = load_file("ecc_p192_public.pem", "rt").strip() - - encoded = self.ref_private._export_public_pem(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="PEM", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_pem_compressed(self): - key_file = load_file("ecc_p192_public.pem", "rt").strip() - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="PEM", compress=True) - key_file_compressed_ref = load_file("ecc_p192_public_compressed.pem", "rt").strip() - - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_p192_private.pem", "rt").strip() - - encoded = self.ref_private._export_private_pem(None) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private._export_private_pem(passphrase=b"secret") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "EC PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - use_pkcs8=False) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_private_pkcs8_and_pem_1(self): - # PKCS8 inside PEM with both unencrypted - key_file = load_file("ecc_p192_private_p8_clear.pem", "rt").strip() - - encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_and_pem_2(self): - # PKCS8 inside PEM with PKCS8 encryption - encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - # --- - - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase=b"secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.ref_private.export_key(format="PEM", passphrase="secret", - use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="secret") - - # DER format but no PKCS#8 - self.assertRaises(ValueError, self.ref_private.export_key, format="DER", - passphrase="secret", - use_pkcs8=False, - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # Incorrect parameters for public keys - self.assertRaises(ValueError, self.ref_public.export_key, format="DER", - use_pkcs8=False) - - # Empty password - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - def test_compressed_curve(self): - - # Compressed P-192 curve (Y-point is even) - pem1 = """-----BEGIN EC PRIVATE KEY----- - MF8CAQEEGHvhXmIW95JxZYfd4AUPu9BwknjuvS36aqAKBggqhkjOPQMBAaE0AzIA - BLJZCyTu35DQIlqvMlBynn3k1Ig+dWfg/brRhHecxptrbloqFSP8ITw0CwbGF+2X - 5g== - -----END EC PRIVATE KEY-----""" - - # Compressed P-192 curve (Y-point is odd) - pem2 = """-----BEGIN EC PRIVATE KEY----- - MF8CAQEEGA3rAotUaWl7d47eX6tz9JmLzOMJwl13XaAKBggqhkjOPQMBAaE0AzIA - BG4tHlTBBBGokcWmGm2xubVB0NvPC/Ou5AYwivs+3iCxmEjsymVAj6iiuX2Lxr6g - /Q== - -----END EC PRIVATE KEY-----""" - - key1 = ECC.import_key(pem1) - low16 = int(key1.pointQ.y % 65536) - self.assertEqual(low16, 0x97E6) - - key2 = ECC.import_key(pem2) - low16 = int(key2.pointQ.y % 65536) - self.assertEqual(low16, 0xA0FD) - - -class TestExport_P224(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_P224, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p224() - - def test_export_public_der_uncompressed(self): - key_file = load_file("ecc_p224_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(False) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_der_compressed(self): - key_file = load_file("ecc_p224_public.der") - pub_key = ECC.import_key(key_file) - key_file_compressed = pub_key.export_key(format="DER", compress=True) - - key_file_compressed_ref = load_file("ecc_p224_public_compressed.der") - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_public_sec1_uncompressed(self): - key_file = load_file("ecc_p224_public.der") - value = extract_bitstring_from_spki(key_file) - - encoded = self.ref_public.export_key(format="SEC1") - self.assertEqual(value, encoded) - - def test_export_public_sec1_compressed(self): - key_file = load_file("ecc_p224_public.der") - encoded = self.ref_public.export_key(format="SEC1", compress=True) - - key_file_compressed_ref = load_file("ecc_p224_public_compressed.der") - value = extract_bitstring_from_spki(key_file_compressed_ref) - self.assertEqual(value, encoded) - - def test_export_rfc5915_private_der(self): - key_file = load_file("ecc_p224_private.der") - - encoded = self.ref_private._export_rfc5915_private_der() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_p224_private_p8_clear.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA512-224AndAES128-CBC", - prot_params={'iteration_count':123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem_uncompressed(self): - key_file = load_file("ecc_p224_public.pem", "rt").strip() - - encoded = self.ref_private._export_public_pem(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="PEM", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_pem_compressed(self): - key_file = load_file("ecc_p224_public.pem", "rt").strip() - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="PEM", compress=True) - key_file_compressed_ref = load_file("ecc_p224_public_compressed.pem", "rt").strip() - - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_p224_private.pem", "rt").strip() - - encoded = self.ref_private._export_private_pem(None) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private._export_private_pem(passphrase=b"secret") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "EC PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - use_pkcs8=False) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_private_pkcs8_and_pem_1(self): - # PKCS8 inside PEM with both unencrypted - key_file = load_file("ecc_p224_private_p8_clear.pem", "rt").strip() - - encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_and_pem_2(self): - # PKCS8 inside PEM with PKCS8 encryption - encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - # --- - - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase=b"secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.ref_private.export_key(format="PEM", passphrase="secret", - use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="secret") - - # DER format but no PKCS#8 - self.assertRaises(ValueError, self.ref_private.export_key, format="DER", - passphrase="secret", - use_pkcs8=False, - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # Incorrect parameters for public keys - self.assertRaises(ValueError, self.ref_public.export_key, format="DER", - use_pkcs8=False) - - # Empty password - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - def test_compressed_curve(self): - - # Compressed P-224 curve (Y-point is even) - pem1 = """-----BEGIN EC PRIVATE KEY----- - MGgCAQEEHPYicBNI9nd6wDKAX2l+f3A0Q+KWUQeMqSt5GoOgBwYFK4EEACGhPAM6 - AATCL6rUIDT14zXKoS5GQUMDP/tpc+1iI/FyEZikt2roKDkhU5q08srmqaysbfJN - eUr7Xf1lnCVGag== - -----END EC PRIVATE KEY-----""" - - # Compressed P-224 curve (Y-point is odd) - pem2 = """-----BEGIN EC PRIVATE KEY----- - MGgCAQEEHEFjbaVPLJ3ngZyCibCvT0RLUqSlHjC5Z3e0FtugBwYFK4EEACGhPAM6 - AAT5IvL2V6m48y1JLMGr6ZbnOqNKP9hMf9mxyVkk6/SaRoBoJVkXrNIpYL0P7DS7 - QF8E/OGeZRwvow== - -----END EC PRIVATE KEY-----""" - - key1 = ECC.import_key(pem1) - low16 = int(key1.pointQ.y % 65536) - self.assertEqual(low16, 0x466A) - - key2 = ECC.import_key(pem2) - low16 = int(key2.pointQ.y % 65536) - self.assertEqual(low16, 0x2FA3) - - -class TestExport_P256(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_P256, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p256() - - def test_export_public_der_uncompressed(self): - key_file = load_file("ecc_p256_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(False) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_der_compressed(self): - key_file = load_file("ecc_p256_public.der") - pub_key = ECC.import_key(key_file) - key_file_compressed = pub_key.export_key(format="DER", compress=True) - - key_file_compressed_ref = load_file("ecc_p256_public_compressed.der") - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_public_sec1_uncompressed(self): - key_file = load_file("ecc_p256_public.der") - value = extract_bitstring_from_spki(key_file) - - encoded = self.ref_public.export_key(format="SEC1") - self.assertEqual(value, encoded) - - def test_export_public_sec1_compressed(self): - key_file = load_file("ecc_p256_public.der") - encoded = self.ref_public.export_key(format="SEC1", compress=True) - - key_file_compressed_ref = load_file("ecc_p256_public_compressed.der") - value = extract_bitstring_from_spki(key_file_compressed_ref) - self.assertEqual(value, encoded) - - def test_export_rfc5915_private_der(self): - key_file = load_file("ecc_p256_private.der") - - encoded = self.ref_private._export_rfc5915_private_der() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_p256_private_p8_clear.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA512-256AndAES128-CBC", - prot_params={'iteration_count':123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem_uncompressed(self): - key_file = load_file("ecc_p256_public.pem", "rt").strip() - - encoded = self.ref_private._export_public_pem(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="PEM", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_pem_compressed(self): - key_file = load_file("ecc_p256_public.pem", "rt").strip() - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="PEM", compress=True) - key_file_compressed_ref = load_file("ecc_p256_public_compressed.pem", "rt").strip() - - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_p256_private.pem", "rt").strip() - - encoded = self.ref_private._export_private_pem(None) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private._export_private_pem(passphrase=b"secret") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "EC PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - use_pkcs8=False) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_private_pkcs8_and_pem_1(self): - # PKCS8 inside PEM with both unencrypted - key_file = load_file("ecc_p256_private_p8_clear.pem", "rt").strip() - - encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_and_pem_2(self): - # PKCS8 inside PEM with PKCS8 encryption - encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_openssh_uncompressed(self): - key_file = load_file("ecc_p256_public_openssh.txt", "rt") - - encoded = self.ref_public._export_openssh(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="OpenSSH") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="OpenSSH", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_openssh_compressed(self): - key_file = load_file("ecc_p256_public_openssh.txt", "rt") - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) - assert len(key_file) > len(key_file_compressed) - self.assertEqual(pub_key, ECC.import_key(key_file_compressed)) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - # --- - - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase=b"secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.ref_private.export_key(format="PEM", passphrase="secret", - use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="secret") - - # DER format but no PKCS#8 - self.assertRaises(ValueError, self.ref_private.export_key, format="DER", - passphrase="secret", - use_pkcs8=False, - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # Incorrect parameters for public keys - self.assertRaises(ValueError, self.ref_public.export_key, format="DER", - use_pkcs8=False) - - # Empty password - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # No private keys with OpenSSH - self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", - passphrase="secret") - - - def test_compressed_curve(self): - - # Compressed P-256 curve (Y-point is even) - pem1 = """-----BEGIN EC PRIVATE KEY----- - MFcCAQEEIHTuc09jC51xXomV6MVCDN+DpAAvSmaJWZPTEHM6D5H1oAoGCCqGSM49 - AwEHoSQDIgACWFuGbHe8yJ43rir7PMTE9w8vHz0BSpXHq90Xi7/s+a0= - -----END EC PRIVATE KEY-----""" - - # Compressed P-256 curve (Y-point is odd) - pem2 = """-----BEGIN EC PRIVATE KEY----- - MFcCAQEEIFggiPN9SQP+FAPTCPp08fRUz7rHp2qNBRcBJ1DXhb3ZoAoGCCqGSM49 - AwEHoSQDIgADLpph1trTIlVfa8NJvlMUPyWvL+wP+pW3BJITUL/wj9A= - -----END EC PRIVATE KEY-----""" - - key1 = ECC.import_key(pem1) - low16 = int(key1.pointQ.y % 65536) - self.assertEqual(low16, 0xA6FC) - - key2 = ECC.import_key(pem2) - low16 = int(key2.pointQ.y % 65536) - self.assertEqual(low16, 0x6E57) - - -class TestExport_P384(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_P384, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p384() - - def test_export_public_der_uncompressed(self): - key_file = load_file("ecc_p384_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(False) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_der_compressed(self): - key_file = load_file("ecc_p384_public.der") - pub_key = ECC.import_key(key_file) - key_file_compressed = pub_key.export_key(format="DER", compress=True) - - key_file_compressed_ref = load_file("ecc_p384_public_compressed.der") - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_public_sec1_uncompressed(self): - key_file = load_file("ecc_p384_public.der") - value = extract_bitstring_from_spki(key_file) - - encoded = self.ref_public.export_key(format="SEC1") - self.assertEqual(value, encoded) - - def test_export_public_sec1_compressed(self): - key_file = load_file("ecc_p384_public.der") - encoded = self.ref_public.export_key(format="SEC1", compress=True) - - key_file_compressed_ref = load_file("ecc_p384_public_compressed.der") - value = extract_bitstring_from_spki(key_file_compressed_ref) - self.assertEqual(value, encoded) - - def test_export_rfc5915_private_der(self): - key_file = load_file("ecc_p384_private.der") - - encoded = self.ref_private._export_rfc5915_private_der() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_p384_private_p8_clear.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA384AndAES128-CBC", - prot_params={'iteration_count':123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem_uncompressed(self): - key_file = load_file("ecc_p384_public.pem", "rt").strip() - - encoded = self.ref_private._export_public_pem(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="PEM", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_pem_compressed(self): - key_file = load_file("ecc_p384_public.pem", "rt").strip() - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="PEM", compress=True) - key_file_compressed_ref = load_file("ecc_p384_public_compressed.pem", "rt").strip() - - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_p384_private.pem", "rt").strip() - - encoded = self.ref_private._export_private_pem(None) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private._export_private_pem(passphrase=b"secret") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "EC PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - use_pkcs8=False) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_private_pkcs8_and_pem_1(self): - # PKCS8 inside PEM with both unencrypted - key_file = load_file("ecc_p384_private_p8_clear.pem", "rt").strip() - - encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_and_pem_2(self): - # PKCS8 inside PEM with PKCS8 encryption - encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_openssh_uncompressed(self): - key_file = load_file("ecc_p384_public_openssh.txt", "rt") - - encoded = self.ref_public._export_openssh(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="OpenSSH") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="OpenSSH", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_openssh_compressed(self): - key_file = load_file("ecc_p384_public_openssh.txt", "rt") - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) - assert len(key_file) > len(key_file_compressed) - self.assertEqual(pub_key, ECC.import_key(key_file_compressed)) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - # --- - - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase=b"secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.ref_private.export_key(format="PEM", passphrase="secret", - use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="secret") - - # DER format but no PKCS#8 - self.assertRaises(ValueError, self.ref_private.export_key, format="DER", - passphrase="secret", - use_pkcs8=False, - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # Incorrect parameters for public keys - self.assertRaises(ValueError, self.ref_public.export_key, format="DER", - use_pkcs8=False) - - # Empty password - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # No private keys with OpenSSH - self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", - passphrase="secret") - - def test_compressed_curve(self): - - # Compressed P-384 curve (Y-point is even) - # openssl ecparam -name secp384p1 -genkey -noout -conv_form compressed -out /tmp/a.pem - # openssl ec -in /tmp/a.pem -text -noout - pem1 = """-----BEGIN EC PRIVATE KEY----- -MIGkAgEBBDAM0lEIhvXuekK2SWtdbgOcZtBaxa9TxfpO/GcDFZLCJ3JVXaTgwken -QT+C+XLtD6WgBwYFK4EEACKhZANiAATs0kZMhFDu8DoBC21jrSDPyAUn4aXZ/DM4 -ylhDfWmb4LEbeszXceIzfhIUaaGs5y1xXaqf5KXTiAAYx2pKUzAAM9lcGUHCGKJG -k4AgUmVJON29XoUilcFrzjDmuye3B6Q= ------END EC PRIVATE KEY-----""" - - # Compressed P-384 curve (Y-point is odd) - pem2 = """-----BEGIN EC PRIVATE KEY----- -MIGkAgEBBDDHPFTslYLltE16fHdSDTtE/2HTmd3M8mqy5MttAm4wZ833KXiGS9oe -kFdx9sNV0KygBwYFK4EEACKhZANiAASLIE5RqVMtNhtBH/u/p/ifqOAlKnK/+RrQ -YC46ZRsnKNayw3wATdPjgja7L/DSII3nZK0G6KOOVwJBznT/e+zudUJYhZKaBLRx -/bgXyxUtYClOXxb1Y/5N7txLstYRyP0= ------END EC PRIVATE KEY-----""" - - key1 = ECC.import_key(pem1) - low16 = int(key1.pointQ.y % 65536) - self.assertEqual(low16, 0x07a4) - - key2 = ECC.import_key(pem2) - low16 = int(key2.pointQ.y % 65536) - self.assertEqual(low16, 0xc8fd) - - -class TestExport_P521(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_P521, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_p521() - - def test_export_public_der_uncompressed(self): - key_file = load_file("ecc_p521_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(False) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_der_compressed(self): - key_file = load_file("ecc_p521_public.der") - pub_key = ECC.import_key(key_file) - key_file_compressed = pub_key.export_key(format="DER", compress=True) - - key_file_compressed_ref = load_file("ecc_p521_public_compressed.der") - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_public_sec1_uncompressed(self): - key_file = load_file("ecc_p521_public.der") - value = extract_bitstring_from_spki(key_file) - - encoded = self.ref_public.export_key(format="SEC1") - self.assertEqual(value, encoded) - - encoded = self.ref_public.export_key(format="raw") - self.assertEqual(value, encoded) - - def test_export_public_sec1_compressed(self): - key_file = load_file("ecc_p521_public.der") - encoded = self.ref_public.export_key(format="SEC1", compress=True) - - key_file_compressed_ref = load_file("ecc_p521_public_compressed.der") - value = extract_bitstring_from_spki(key_file_compressed_ref) - self.assertEqual(value, encoded) - - encoded = self.ref_public.export_key(format="raw", compress=True) - self.assertEqual(value, encoded) - - def test_export_rfc5915_private_der(self): - key_file = load_file("ecc_p521_private.der") - - encoded = self.ref_private._export_rfc5915_private_der() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_p521_private_p8_clear.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - prot_params={'iteration_count':123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem_uncompressed(self): - key_file = load_file("ecc_p521_public.pem", "rt").strip() - - encoded = self.ref_private._export_public_pem(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="PEM", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_pem_compressed(self): - key_file = load_file("ecc_p521_public.pem", "rt").strip() - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="PEM", compress=True) - key_file_compressed_ref = load_file("ecc_p521_public_compressed.pem", "rt").strip() - - self.assertEqual(key_file_compressed, key_file_compressed_ref) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_p521_private.pem", "rt").strip() - - encoded = self.ref_private._export_private_pem(None) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private._export_private_pem(passphrase=b"secret") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "EC PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - use_pkcs8=False) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_private_pkcs8_and_pem_1(self): - # PKCS8 inside PEM with both unencrypted - key_file = load_file("ecc_p521_private_p8_clear.pem", "rt").strip() - - encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM") - self.assertEqual(key_file, encoded) - - def test_export_private_pkcs8_and_pem_2(self): - # PKCS8 inside PEM with PKCS8 encryption - encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_openssh_uncompressed(self): - key_file = load_file("ecc_p521_public_openssh.txt", "rt") - - encoded = self.ref_public._export_openssh(False) - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_public.export_key(format="OpenSSH") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="OpenSSH", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_openssh_compressed(self): - key_file = load_file("ecc_p521_public_openssh.txt", "rt") - pub_key = ECC.import_key(key_file) - - key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) - assert len(key_file) > len(key_file_compressed) - self.assertEqual(pub_key, ECC.import_key(key_file_compressed)) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - # --- - - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase="secret", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - use_pkcs8=False, - passphrase=b"secret", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.ref_private.export_key(format="PEM", passphrase="secret", - use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="secret") - - # DER format but no PKCS#8 - self.assertRaises(ValueError, self.ref_private.export_key, format="DER", - passphrase="secret", - use_pkcs8=False, - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # Incorrect parameters for public keys - self.assertRaises(ValueError, self.ref_public.export_key, format="DER", - use_pkcs8=False) - - # Empty password - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # No private keys with OpenSSH - self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", - passphrase="secret") - - def test_compressed_curve(self): - - # Compressed P-521 curve (Y-point is even) - # openssl ecparam -name secp521r1 -genkey -noout -conv_form compressed -out /tmp/a.pem - # openssl ec -in /tmp/a.pem -text -noout - pem1 = """-----BEGIN EC PRIVATE KEY----- -MIHcAgEBBEIAnm1CEjVjvNfXEN730p+D6su5l+mOztdc5XmTEoti+s2R4GQ4mAv3 -0zYLvyklvOHw0+yy8d0cyGEJGb8T3ZVKmg2gBwYFK4EEACOhgYkDgYYABAHzjTI1 -ckxQ3Togi0LAxiG0PucdBBBs5oIy3df95xv6SInp70z+4qQ2EltEmdNMssH8eOrl -M5CYdZ6nbcHMVaJUvQEzTrYxvFjOgJiOd+E9eBWbLkbMNqsh1UKVO6HbMbW0ohCI -uGxO8tM6r3w89/qzpG2SvFM/fvv3mIR30wSZDD84qA== ------END EC PRIVATE KEY-----""" - - # Compressed P-521 curve (Y-point is odd) - pem2 = """-----BEGIN EC PRIVATE KEY----- -MIHcAgEBBEIB84OfhJluLBRLn3+cC/RQ37C2SfQVP/t0gQK2tCsTf5avRcWYRrOJ -PmX9lNnkC0Hobd75QFRmdxrB0Wd1/M4jZOWgBwYFK4EEACOhgYkDgYYABAAMZcdJ -1YLCGHt3bHCEzdidVy6+brlJIbv1aQ9fPQLF7WKNv4c8w3H8d5a2+SDZilBOsk5c -6cNJDMz2ExWQvxl4CwDJtJGt1+LHVKFGy73NANqVxMbRu+2F8lOxkNp/ziFTbVyV -vv6oYkMIIi7r5oQWAiQDrR2mlrrFDL9V7GH/r8SWQw== ------END EC PRIVATE KEY-----""" - - key1 = ECC.import_key(pem1) - low16 = int(key1.pointQ.y % 65536) - self.assertEqual(low16, 0x38a8) - - key2 = ECC.import_key(pem2) - low16 = int(key2.pointQ.y % 65536) - self.assertEqual(low16, 0x9643) - - -class TestImport_Ed25519(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_Ed25519, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_ed25519() - - def test_import_public_der(self): - key_file = load_file("ecc_ed25519_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_pkcs8_der(self): - key_file = load_file("ecc_ed25519_private.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_ed25519_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_ed25519_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_ed25519_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_ed25519_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_ed25519_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_ed25519_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256": - key_file = load_file("ecc_ed25519_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_ed25519_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_openssh_public(self): - key_file = load_file("ecc_ed25519_public_openssh.txt") - key = ECC._import_openssh_public(key_file) - self.assertFalse(key.has_private()) - key = ECC.import_key(key_file) - self.assertFalse(key.has_private()) - - def test_import_openssh_private_clear(self): - key_file = load_file("ecc_ed25519_private_openssh.pem") - key = ECC.import_key(key_file) - - def test_import_openssh_private_password(self): - key_file = load_file("ecc_ed25519_private_openssh_pwd.pem") - key = ECC.import_key(key_file, b"password") - - -class TestExport_Ed25519(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_Ed25519, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_ed25519() - - def test_export_public_der(self): - key_file = load_file("ecc_ed25519_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(True) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_sec1(self): - self.assertRaises(ValueError, self.ref_public.export_key, format="SEC1") - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_ed25519_private.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - self.assertRaises(ValueError, self.ref_private.export_key, - format="DER", use_pkcs8=False) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA256AndAES128-CBC", - prot_params={'iteration_count':123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_public_pem(self): - key_file_ref = load_file("ecc_ed25519_public.pem", "rt").strip() - key_file = self.ref_public.export_key(format="PEM").strip() - self.assertEqual(key_file_ref, key_file) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_ed25519_private.pem", "rt").strip() - encoded = self.ref_private.export_key(format="PEM").strip() - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_openssh(self): - key_file = load_file("ecc_ed25519_public_openssh.txt", "rt") - public_key = ECC.import_key(key_file) - key_file = " ".join(key_file.split(' ')[:2]) # remove comment - - encoded = public_key._export_openssh(False) - self.assertEqual(key_file, encoded.strip()) - - encoded = public_key.export_key(format="OpenSSH") - self.assertEqual(key_file, encoded.strip()) - - def test_export_raw(self): - encoded = self.ref_public.export_key(format='raw') - self.assertEqual(encoded, unhexlify(b'bc85b8cf585d20a4de47e84d1cb6183f63d9ba96223fcbc886e363ffdea20cff')) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="secret") - - # Empty password - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # No private keys with OpenSSH - self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", - passphrase="secret") - - -class TestImport_Ed448(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestImport_Ed448, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_ed448() - - def test_import_public_der(self): - key_file = load_file("ecc_ed448_public.der") - - key = ECC._import_subjectPublicKeyInfo(key_file) - self.assertEqual(self.ref_public, key) - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_pkcs8_der(self): - key_file = load_file("ecc_ed448_private.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_1(self): - key_file = load_file("ecc_ed448_private_p8.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_2(self): - key_file = load_file("ecc_ed448_private_p8.pem") - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_private_pkcs8_encrypted_3(self): - key_file = load_file("ecc_ed448_private_p8_2.der") - - key = ECC._import_der(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_der(self): - key_file = load_file("ecc_ed448_x509.der") - - key = ECC._import_der(key_file, None) - self.assertEqual(self.ref_public, key) - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_public_pem(self): - key_file = load_file("ecc_ed448_public.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - def test_import_private_pem(self): - key_file = load_file("ecc_ed448_private.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_private, key) - - def test_import_private_pem_encrypted(self): - for algo in "des3", "aes128", "aes192", "aes256": - key_file = load_file("ecc_ed448_private_enc_%s.pem" % algo) - - key = ECC.import_key(key_file, "secret") - self.assertEqual(self.ref_private, key) - - key = ECC.import_key(tostr(key_file), b"secret") - self.assertEqual(self.ref_private, key) - - def test_import_x509_pem(self): - key_file = load_file("ecc_ed448_x509.pem") - - key = ECC.import_key(key_file) - self.assertEqual(self.ref_public, key) - - -class TestExport_Ed448(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestExport_Ed448, self).__init__(*args, **kwargs) - self.ref_private, self.ref_public = create_ref_keys_ed448() - - def test_export_public_der(self): - key_file = load_file("ecc_ed448_public.der") - - encoded = self.ref_public._export_subjectPublicKeyInfo(True) - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER") - self.assertEqual(key_file, encoded) - - encoded = self.ref_public.export_key(format="DER", compress=False) - self.assertEqual(key_file, encoded) - - def test_export_public_sec1(self): - self.assertRaises(ValueError, self.ref_public.export_key, format="SEC1") - - def test_export_private_pkcs8_clear(self): - key_file = load_file("ecc_ed448_private.der") - - encoded = self.ref_private._export_pkcs8() - self.assertEqual(key_file, encoded) - - # --- - - encoded = self.ref_private.export_key(format="DER") - self.assertEqual(key_file, encoded) - - self.assertRaises(ValueError, self.ref_private.export_key, - format="DER", use_pkcs8=False) - - def test_export_private_pkcs8_encrypted(self): - encoded = self.ref_private._export_pkcs8(passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) - - decoded = ECC._import_pkcs8(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - # --- - - encoded = self.ref_private.export_key(format="DER", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA384AndAES128-CBC", - prot_params={'iteration_count':123}) - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - - def test_export_public_pem(self): - key_file_ref = load_file("ecc_ed448_public.pem", "rt").strip() - key_file = self.ref_public.export_key(format="PEM").strip() - self.assertEqual(key_file_ref, key_file) - - def test_export_private_pem_clear(self): - key_file = load_file("ecc_ed448_private.pem", "rt").strip() - encoded = self.ref_private.export_key(format="PEM").strip() - self.assertEqual(key_file, encoded) - - def test_export_private_pem_encrypted(self): - encoded = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # This should prove that the output is password-protected - self.assertRaises(ValueError, ECC.import_key, encoded) - - assert "ENCRYPTED PRIVATE KEY" in encoded - - decoded = ECC.import_key(encoded, "secret") - self.assertEqual(self.ref_private, decoded) - - def test_export_openssh(self): - # Not supported - self.assertRaises(ValueError, self.ref_public.export_key, format="OpenSSH") - - def test_export_raw(self): - encoded = self.ref_public.export_key(format='raw') - self.assertEqual(encoded, unhexlify(b'899014ddc0a0e1260cfc1085afdf952019e9fd63372e3e366e26dad32b176624884330a14617237e3081febd9d1a15069e7499433d2f55dd80')) - - def test_prng(self): - # Test that password-protected containers use the provided PRNG - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_byte_or_string_passphrase(self): - encoded1 = self.ref_private.export_key(format="PEM", - passphrase="secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - encoded2 = self.ref_private.export_key(format="PEM", - passphrase=b"secret", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", - randfunc=get_fixed_prng()) - self.assertEqual(encoded1, encoded2) - - def test_error_params1(self): - # Unknown format - self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") - - # Missing 'protection' parameter when PKCS#8 is used - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="secret") - - # Empty password - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", use_pkcs8=False) - self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", - passphrase="", - protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") - - # No private keys with OpenSSH - self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", - passphrase="secret") - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(TestImport) - try: - tests += list_test_cases(TestImport_P192) - tests += list_test_cases(TestImport_P224) - tests += list_test_cases(TestImport_P256) - tests += list_test_cases(TestImport_P384) - tests += list_test_cases(TestImport_P521) - tests += list_test_cases(TestImport_Ed25519) - tests += list_test_cases(TestImport_Ed448) - - tests += list_test_cases(TestExport_P192) - tests += list_test_cases(TestExport_P224) - tests += list_test_cases(TestExport_P256) - tests += list_test_cases(TestExport_P384) - tests += list_test_cases(TestExport_P521) - tests += list_test_cases(TestExport_Ed25519) - tests += list_test_cases(TestExport_Ed448) - - except MissingTestVectorException: - pass - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_RSA.py b/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_RSA.py deleted file mode 100644 index b57676d..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/PublicKey/test_import_RSA.py +++ /dev/null @@ -1,636 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/test_importKey.py: Self-test for importing RSA keys -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import os -import re -import errno -import warnings -import unittest -from unittest import SkipTest - -from Cryptodome.PublicKey import RSA -from Cryptodome.SelfTest.st_common import a2b_hex, list_test_cases -from Cryptodome.IO import PEM -from Cryptodome.Util.py3compat import b, tostr, FileNotFoundError -from Cryptodome.Util.number import inverse -from Cryptodome.Util import asn1 - -try: - import pycryptodome_test_vectors # type: ignore - test_vectors_available = True -except ImportError: - test_vectors_available = False - - -def load_file(file_name, mode="rb"): - results = None - - try: - if not test_vectors_available: - raise FileNotFoundError(errno.ENOENT, - os.strerror(errno.ENOENT), - file_name) - - dir_comps = ("PublicKey", "RSA") - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - with open(full_file_name, mode) as file_in: - results = file_in.read() - - except FileNotFoundError: - warnings.warn("Skipping tests for RSA based on %s" % file_name, - UserWarning, - stacklevel=2) - - if results is None: - raise SkipTest("Missing %s" % file_name) - - return results - - -def der2pem(der, text='PUBLIC'): - import binascii - chunks = [binascii.b2a_base64(der[i:i+48]) for i in range(0, len(der), 48)] - pem = b('-----BEGIN %s KEY-----\n' % text) - pem += b('').join(chunks) - pem += b('-----END %s KEY-----' % text) - return pem - - -class ImportKeyTests(unittest.TestCase): - # 512-bit RSA key generated with openssl - rsaKeyPEM = u'''-----BEGIN RSA PRIVATE KEY----- -MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII -q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 -Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI -OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr -+rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK -JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 -n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== ------END RSA PRIVATE KEY-----''' - - # As above, but this is actually an unencrypted PKCS#8 key - rsaKeyPEM8 = u'''-----BEGIN PRIVATE KEY----- -MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAvx4nkAqgiyNRGlwS -ga5tkzEsPv6RP5MuvtSS8S0WtGEMMoy24girX0WsvilQgzKY8xIsGfeEkt7fQPDj -wZAzhQIDAQABAkAJRIMSnxFN7fZ+2rwjAbxaiOXmYB3XAWIg6tn9S/xv3rdYk4mK -5BxU3b2/FTn4zL0Y9ntEDeGsMEQCgdQM+sg5AiEA8g8vPh2mGIP2KYCSK9jfVFzk -B8cmJBEDteLFNyMSSiMCIQDKH+kkeSz8yWv6t080Smi0GN9XgzgGSAYAD+KlyZoC -NwIhAIe+HDApUEvPNOxxPYd5R0R4EyiJdcokAICvewlAkbEhAiBqtGn6bVZIpXUx -yLAxpM6dtTvDEWz0M/Wm9rvqVgHOBQIhAL2fQKdkInohlipK3Qfk3v5D7ZGjrie7 -BX85JB8zqwHB ------END PRIVATE KEY-----''' - - # The same RSA private key as in rsaKeyPEM, but now encrypted - rsaKeyEncryptedPEM = ( - - # PEM encryption - # With DES and passphrase 'test' - ('test', u'''-----BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-CBC,AF8F9A40BD2FA2FC - -Ckl9ex1kaVEWhYC2QBmfaF+YPiR4NFkRXA7nj3dcnuFEzBnY5XULupqQpQI3qbfA -u8GYS7+b3toWWiHZivHbAAUBPDIZG9hKDyB9Sq2VMARGsX1yW1zhNvZLIiVJzUHs -C6NxQ1IJWOXzTew/xM2I26kPwHIvadq+/VaT8gLQdjdH0jOiVNaevjWnLgrn1mLP -BCNRMdcexozWtAFNNqSzfW58MJL2OdMi21ED184EFytIc1BlB+FZiGZduwKGuaKy -9bMbdb/1PSvsSzPsqW7KSSrTw6MgJAFJg6lzIYvR5F4poTVBxwBX3+EyEmShiaNY -IRX3TgQI0IjrVuLmvlZKbGWP18FXj7I7k9tSsNOOzllTTdq3ny5vgM3A+ynfAaxp -dysKznQ6P+IoqML1WxAID4aGRMWka+uArOJ148Rbj9s= ------END RSA PRIVATE KEY-----'''), - - # PKCS8 encryption - ('winter', u'''-----BEGIN ENCRYPTED PRIVATE KEY----- -MIIBpjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIeZIsbW3O+JcCAggA -MBQGCCqGSIb3DQMHBAgSM2p0D8FilgSCAWBhFyP2tiGKVpGj3mO8qIBzinU60ApR -3unvP+N6j7LVgnV2lFGaXbJ6a1PbQXe+2D6DUyBLo8EMXrKKVLqOMGkFMHc0UaV6 -R6MmrsRDrbOqdpTuVRW+NVd5J9kQQh4xnfU/QrcPPt7vpJvSf4GzG0n666Ki50OV -M/feuVlIiyGXY6UWdVDpcOV72cq02eNUs/1JWdh2uEBvA9fCL0c07RnMrdT+CbJQ -NjJ7f8ULtp7xvR9O3Al/yJ4Wv3i4VxF1f3MCXzhlUD4I0ONlr0kJWgeQ80q/cWhw -ntvgJwnCn2XR1h6LA8Wp+0ghDTsL2NhJpWd78zClGhyU4r3hqu1XDjoXa7YCXCix -jCV15+ViDJzlNCwg+W6lRg18sSLkCT7alviIE0U5tHc6UPbbHwT5QqAxAABaP+nZ -CGqJGyiwBzrKebjgSm/KRd4C91XqcsysyH2kKPfT51MLAoD4xelOURBP ------END ENCRYPTED PRIVATE KEY-----''' - ), - ) - - rsaPublicKeyPEM = u'''-----BEGIN PUBLIC KEY----- -MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+T -Lr7UkvEtFrRhDDKMtuIIq19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQ== ------END PUBLIC KEY-----''' - - # Obtained using 'ssh-keygen -i -m PKCS8 -f rsaPublicKeyPEM' - rsaPublicKeyOpenSSH = b('''ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQC/HieQCqCLI1EaXBKBrm2TMSw+/pE/ky6+1JLxLRa0YQwyjLbiCKtfRay+KVCDMpjzEiwZ94SS3t9A8OPBkDOF comment\n''') - - # The private key, in PKCS#1 format encoded with DER - rsaKeyDER = a2b_hex( - '''3082013b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe - 913f932ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f312 - 2c19f78492dedf40f0e3c190338502030100010240094483129f114dedf6 - 7edabc2301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c - 54ddbdbf1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f - 2f3e1da61883f62980922bd8df545ce407c726241103b5e2c53723124a23 - 022100ca1fe924792cfcc96bfab74f344a68b418df578338064806000fe2 - a5c99a023702210087be1c3029504bcf34ec713d877947447813288975ca - 240080af7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53b - c3116cf433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07 - e4defe43ed91a3ae27bb057f39241f33ab01c1 - '''.replace(" ","")) - - # The private key, in unencrypted PKCS#8 format encoded with DER - rsaKeyDER8 = a2b_hex( - '''30820155020100300d06092a864886f70d01010105000482013f3082013 - b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe913f932 - ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f3122c19f78 - 492dedf40f0e3c190338502030100010240094483129f114dedf67edabc2 - 301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c54ddbdb - f1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f2f3e1da - 61883f62980922bd8df545ce407c726241103b5e2c53723124a23022100c - a1fe924792cfcc96bfab74f344a68b418df578338064806000fe2a5c99a0 - 23702210087be1c3029504bcf34ec713d877947447813288975ca240080a - f7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53bc3116cf - 433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07e4defe4 - 3ed91a3ae27bb057f39241f33ab01c1 - '''.replace(" ","")) - - rsaPublicKeyDER = a2b_hex( - '''305c300d06092a864886f70d0101010500034b003048024100bf1e27900a - a08b23511a5c1281ae6d93312c3efe913f932ebed492f12d16b4610c328c - b6e208ab5f45acbe2950833298f3122c19f78492dedf40f0e3c190338502 - 03010001 - '''.replace(" ","")) - - n = int('BF 1E 27 90 0A A0 8B 23 51 1A 5C 12 81 AE 6D 93 31 2C 3E FE 91 3F 93 2E BE D4 92 F1 2D 16 B4 61 0C 32 8C B6 E2 08 AB 5F 45 AC BE 29 50 83 32 98 F3 12 2C 19 F7 84 92 DE DF 40 F0 E3 C1 90 33 85'.replace(" ",""),16) - e = 65537 - d = int('09 44 83 12 9F 11 4D ED F6 7E DA BC 23 01 BC 5A 88 E5 E6 60 1D D7 01 62 20 EA D9 FD 4B FC 6F DE B7 58 93 89 8A E4 1C 54 DD BD BF 15 39 F8 CC BD 18 F6 7B 44 0D E1 AC 30 44 02 81 D4 0C FA C8 39'.replace(" ",""),16) - p = int('00 F2 0F 2F 3E 1D A6 18 83 F6 29 80 92 2B D8 DF 54 5C E4 07 C7 26 24 11 03 B5 E2 C5 37 23 12 4A 23'.replace(" ",""),16) - q = int('00 CA 1F E9 24 79 2C FC C9 6B FA B7 4F 34 4A 68 B4 18 DF 57 83 38 06 48 06 00 0F E2 A5 C9 9A 02 37'.replace(" ",""),16) - - # This is q^{-1} mod p). fastmath and slowmath use pInv (p^{-1} - # mod q) instead! - qInv = int('00 BD 9F 40 A7 64 22 7A 21 96 2A 4A DD 07 E4 DE FE 43 ED 91 A3 AE 27 BB 05 7F 39 24 1F 33 AB 01 C1'.replace(" ",""),16) - pInv = inverse(p,q) - - def testImportKey1(self): - """Verify import of RSAPrivateKey DER SEQUENCE""" - key = RSA.importKey(self.rsaKeyDER) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey2(self): - """Verify import of SubjectPublicKeyInfo DER SEQUENCE""" - key = RSA.importKey(self.rsaPublicKeyDER) - self.assertFalse(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey3unicode(self): - """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" - key = RSA.importKey(self.rsaKeyPEM) - self.assertEqual(key.has_private(),True) # assert_ - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey3bytes(self): - """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as byte string""" - key = RSA.importKey(b(self.rsaKeyPEM)) - self.assertEqual(key.has_private(),True) # assert_ - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey4unicode(self): - """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" - key = RSA.importKey(self.rsaPublicKeyPEM) - self.assertEqual(key.has_private(),False) # assertFalse - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey4bytes(self): - """Verify import of SubjectPublicKeyInfo DER SEQUENCE, encoded with PEM as byte string""" - key = RSA.importKey(b(self.rsaPublicKeyPEM)) - self.assertEqual(key.has_private(),False) # assertFalse - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey5(self): - """Verifies that the imported key is still a valid RSA pair""" - key = RSA.importKey(self.rsaKeyPEM) - idem = key._encrypt(key._decrypt(89)) - self.assertEqual(idem, 89) - - def testImportKey6(self): - """Verifies that the imported key is still a valid RSA pair""" - key = RSA.importKey(self.rsaKeyDER) - idem = key._encrypt(key._decrypt(65)) - self.assertEqual(idem, 65) - - def testImportKey7(self): - """Verify import of OpenSSH public key""" - key = RSA.importKey(self.rsaPublicKeyOpenSSH) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey8(self): - """Verify import of encrypted PrivateKeyInfo DER SEQUENCE""" - for t in self.rsaKeyEncryptedPEM: - key = RSA.importKey(t[1], t[0]) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey9(self): - """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE""" - key = RSA.importKey(self.rsaKeyDER8) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey10(self): - """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE, encoded with PEM""" - key = RSA.importKey(self.rsaKeyPEM8) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey11(self): - """Verify import of RSAPublicKey DER SEQUENCE""" - der = asn1.DerSequence([17, 3]).encode() - key = RSA.importKey(der) - self.assertEqual(key.n, 17) - self.assertEqual(key.e, 3) - - def testImportKey12(self): - """Verify import of RSAPublicKey DER SEQUENCE, encoded with PEM""" - der = asn1.DerSequence([17, 3]).encode() - pem = der2pem(der) - key = RSA.importKey(pem) - self.assertEqual(key.n, 17) - self.assertEqual(key.e, 3) - - def test_import_key_windows_cr_lf(self): - pem_cr_lf = "\r\n".join(self.rsaKeyPEM.splitlines()) - key = RSA.importKey(pem_cr_lf) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def test_import_empty(self): - self.assertRaises(ValueError, RSA.import_key, b"") - - ### - def testExportKey1(self): - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - derKey = key.export_key("DER") - self.assertEqual(derKey, self.rsaKeyDER) - - def testExportKey2(self): - key = RSA.construct([self.n, self.e]) - derKey = key.export_key("DER") - self.assertEqual(derKey, self.rsaPublicKeyDER) - - def testExportKey3(self): - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - pemKey = key.export_key("PEM") - self.assertEqual(pemKey, b(self.rsaKeyPEM)) - - def testExportKey4(self): - key = RSA.construct([self.n, self.e]) - pemKey = key.export_key("PEM") - self.assertEqual(pemKey, b(self.rsaPublicKeyPEM)) - - def testExportKey5(self): - key = RSA.construct([self.n, self.e]) - openssh_1 = key.export_key("OpenSSH").split() - openssh_2 = self.rsaPublicKeyOpenSSH.split() - self.assertEqual(openssh_1[0], openssh_2[0]) - self.assertEqual(openssh_1[1], openssh_2[1]) - - def testExportKey7(self): - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - derKey = key.export_key("DER", pkcs=8) - self.assertEqual(derKey, self.rsaKeyDER8) - - def testExportKey8(self): - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - pemKey = key.export_key("PEM", pkcs=8) - self.assertEqual(pemKey, b(self.rsaKeyPEM8)) - - def testExportKey9(self): - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - self.assertRaises(ValueError, key.export_key, "invalid-format") - - def testExportKey10(self): - # Export and re-import the encrypted key. It must match. - # PEM envelope, PKCS#1, old PEM encryption - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - outkey = key.export_key('PEM', 'test') - self.assertTrue(tostr(outkey).find('4,ENCRYPTED')!=-1) - self.assertTrue(tostr(outkey).find('BEGIN RSA PRIVATE KEY')!=-1) - inkey = RSA.importKey(outkey, 'test') - self.assertEqual(key.n, inkey.n) - self.assertEqual(key.e, inkey.e) - self.assertEqual(key.d, inkey.d) - - def testExportKey11(self): - # Export and re-import the encrypted key. It must match. - # PEM envelope, PKCS#1, old PEM encryption - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - outkey = key.export_key('PEM', 'test', pkcs=1) - self.assertTrue(tostr(outkey).find('4,ENCRYPTED')!=-1) - self.assertTrue(tostr(outkey).find('BEGIN RSA PRIVATE KEY')!=-1) - inkey = RSA.importKey(outkey, 'test') - self.assertEqual(key.n, inkey.n) - self.assertEqual(key.e, inkey.e) - self.assertEqual(key.d, inkey.d) - - def testExportKey12(self): - # Export and re-import the encrypted key. It must match. - # PEM envelope, PKCS#8, old PEM encryption - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - outkey = key.export_key('PEM', 'test', pkcs=8) - self.assertTrue(tostr(outkey).find('4,ENCRYPTED')!=-1) - self.assertTrue(tostr(outkey).find('BEGIN PRIVATE KEY')!=-1) - inkey = RSA.importKey(outkey, 'test') - self.assertEqual(key.n, inkey.n) - self.assertEqual(key.e, inkey.e) - self.assertEqual(key.d, inkey.d) - - def testExportKey13(self): - # Export and re-import the encrypted key. It must match. - # PEM envelope, PKCS#8, PKCS#8 encryption - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - outkey = key.export_key('PEM', 'test', pkcs=8, - protection='PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC') - self.assertTrue(tostr(outkey).find('4,ENCRYPTED')==-1) - self.assertTrue(tostr(outkey).find('BEGIN ENCRYPTED PRIVATE KEY')!=-1) - inkey = RSA.importKey(outkey, 'test') - self.assertEqual(key.n, inkey.n) - self.assertEqual(key.e, inkey.e) - self.assertEqual(key.d, inkey.d) - - def testExportKey14(self): - # Export and re-import the encrypted key. It must match. - # DER envelope, PKCS#8, PKCS#8 encryption - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - outkey = key.export_key('DER', 'test', pkcs=8) - inkey = RSA.importKey(outkey, 'test') - self.assertEqual(key.n, inkey.n) - self.assertEqual(key.e, inkey.e) - self.assertEqual(key.d, inkey.d) - - def testExportKey15(self): - # Verify that that error an condition is detected when trying to - # use a password with DER encoding and PKCS#1. - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - self.assertRaises(ValueError, key.export_key, 'DER', 'test', 1) - - def testExportKey16(self): - # Export and re-import the encrypted key. It must match. - # PEM envelope, PKCS#8, PKCS#8 encryption with parameters - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - outkey = key.export_key('PEM', 'test', pkcs=8, - protection='PBKDF2WithHMAC-SHA512AndAES256-CBC', - prot_params={'iteration_count':123} - ) - self.assertTrue(tostr(outkey).find('4,ENCRYPTED')==-1) - self.assertTrue(tostr(outkey).find('BEGIN ENCRYPTED PRIVATE KEY')!=-1) - - # Verify the iteration count - der = PEM.decode(tostr(outkey))[0] - seq1 = asn1.DerSequence().decode(der) - seq2 = asn1.DerSequence().decode(seq1[0]) - seq3 = asn1.DerSequence().decode(seq2[1]) - seq4 = asn1.DerSequence().decode(seq3[0]) - seq5 = asn1.DerSequence().decode(seq4[1]) - self.assertEqual(seq5[1], 123) - - inkey = RSA.importKey(outkey, 'test') - self.assertEqual(key.n, inkey.n) - self.assertEqual(key.e, inkey.e) - self.assertEqual(key.d, inkey.d) - - def test_import_key(self): - """Verify that import_key is an alias to importKey""" - key = RSA.import_key(self.rsaPublicKeyDER) - self.assertFalse(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def test_import_key_ba_mv(self): - """Verify that import_key can be used on bytearrays and memoryviews""" - key = RSA.import_key(bytearray(self.rsaPublicKeyDER)) - key = RSA.import_key(memoryview(self.rsaPublicKeyDER)) - - def test_exportKey(self): - key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - self.assertEqual(key.export_key(), key.exportKey()) - - -class ImportKeyFromX509Cert(unittest.TestCase): - - def test_x509v1(self): - - # Sample V1 certificate with a 1024 bit RSA key - x509_v1_cert = """ ------BEGIN CERTIFICATE----- -MIICOjCCAaMCAQEwDQYJKoZIhvcNAQEEBQAwfjENMAsGA1UEChMEQWNtZTELMAkG -A1UECxMCUkQxHDAaBgkqhkiG9w0BCQEWDXNwYW1AYWNtZS5vcmcxEzARBgNVBAcT -Ck1ldHJvcG9saXMxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzENMAsG -A1UEAxMEdGVzdDAeFw0xNDA3MTExOTU3MjRaFw0xNzA0MDYxOTU3MjRaME0xCzAJ -BgNVBAYTAlVTMREwDwYDVQQIEwhOZXcgWW9yazENMAsGA1UEChMEQWNtZTELMAkG -A1UECxMCUkQxDzANBgNVBAMTBmxhdHZpYTCBnzANBgkqhkiG9w0BAQEFAAOBjQAw -gYkCgYEAyG+kytdRj3TFbRmHDYp3TXugVQ81chew0qeOxZWOz80IjtWpgdOaCvKW -NCuc8wUR9BWrEQW+39SaRMLiQfQtyFSQZijc3nsEBu/Lo4uWZ0W/FHDRVSvkJA/V -Ex5NL5ikI+wbUeCV5KajGNDalZ8F1pk32+CBs8h1xNx5DyxuEHUCAwEAATANBgkq -hkiG9w0BAQQFAAOBgQCVQF9Y//Q4Psy+umEM38pIlbZ2hxC5xNz/MbVPwuCkNcGn -KYNpQJP+JyVTsPpO8RLZsAQDzRueMI3S7fbbwTzAflN0z19wvblvu93xkaBytVok -9VBAH28olVhy9b1MMeg2WOt5sUEQaFNPnwwsyiY9+HsRpvpRnPSQF+kyYVsshQ== ------END CERTIFICATE----- - """.strip() - - # RSA public key as dumped by openssl - exponent = 65537 - modulus_str = """ -00:c8:6f:a4:ca:d7:51:8f:74:c5:6d:19:87:0d:8a: -77:4d:7b:a0:55:0f:35:72:17:b0:d2:a7:8e:c5:95: -8e:cf:cd:08:8e:d5:a9:81:d3:9a:0a:f2:96:34:2b: -9c:f3:05:11:f4:15:ab:11:05:be:df:d4:9a:44:c2: -e2:41:f4:2d:c8:54:90:66:28:dc:de:7b:04:06:ef: -cb:a3:8b:96:67:45:bf:14:70:d1:55:2b:e4:24:0f: -d5:13:1e:4d:2f:98:a4:23:ec:1b:51:e0:95:e4:a6: -a3:18:d0:da:95:9f:05:d6:99:37:db:e0:81:b3:c8: -75:c4:dc:79:0f:2c:6e:10:75 - """ - modulus = int(re.sub("[^0-9a-f]","", modulus_str), 16) - - key = RSA.importKey(x509_v1_cert) - self.assertEqual(key.e, exponent) - self.assertEqual(key.n, modulus) - self.assertFalse(key.has_private()) - - def test_x509v3(self): - - # Sample V3 certificate with a 1024 bit RSA key - x509_v3_cert = """ ------BEGIN CERTIFICATE----- -MIIEcjCCAlqgAwIBAgIBATANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQGEwJVUzEL -MAkGA1UECAwCTUQxEjAQBgNVBAcMCUJhbHRpbW9yZTEQMA4GA1UEAwwHVGVzdCBD -QTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFtcGxlLmNvbTAeFw0xNDA3MTIwOTM1 -MTJaFw0xNzA0MDcwOTM1MTJaMEQxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJNRDES -MBAGA1UEBwwJQmFsdGltb3JlMRQwEgYDVQQDDAtUZXN0IFNlcnZlcjCBnzANBgkq -hkiG9w0BAQEFAAOBjQAwgYkCgYEA/S7GJV2OcFdyNMQ4K75KrYFtMEn3VnEFdPHa -jyS37XlMxSh0oS4GeTGVUCJInl5Cpsv8WQdh03FfeOdvzp5IZ46OcjeOPiWnmjgl -2G5j7e2bDH7RSchGV+OD6Fb1Agvuu2/9iy8fdf3rPQ/7eAddzKUrzwacVbnW+tg2 -QtSXKRcCAwEAAaOB1TCB0jAdBgNVHQ4EFgQU/WwCX7FfWMIPDFfJ+I8a2COG+l8w -HwYDVR0jBBgwFoAUa0hkif3RMaraiWtsOOZZlLu9wJwwCQYDVR0TBAIwADALBgNV -HQ8EBAMCBeAwSgYDVR0RBEMwQYILZXhhbXBsZS5jb22CD3d3dy5leGFtcGxlLmNv -bYIQbWFpbC5leGFtcGxlLmNvbYIPZnRwLmV4YW1wbGUuY29tMCwGCWCGSAGG+EIB -DQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsF -AAOCAgEAvO6xfdsGbnoK4My3eJthodTAjMjPwFVY133LH04QLcCv54TxKhtUg1fi -PgdjVe1HpTytPBfXy2bSZbXAN0abZCtw1rYrnn7o1g2pN8iypVq3zVn0iMTzQzxs -zEPO3bpR/UhNSf90PmCsS5rqZpAAnXSaAy1ClwHWk/0eG2pYkhE1m1ABVMN2lsAW -e9WxGk6IFqaI9O37NYQwmEypMs4DC+ECJEvbPFiqi3n0gbXCZJJ6omDA5xJldaYK -Oa7KR3s/qjBsu9UAiWpLBuFoSTHIF2aeRKRFmUdmzwo43eVPep65pY6eQ4AdL2RF -rqEuINbGlzI5oQyYhu71IwB+iPZXaZZPlwjLgOsuad/p2hOgDb5WxUi8FnDPursQ -ujfpIpmrOP/zpvvQWnwePI3lI+5n41kTBSbefXEdv6rXpHk3QRzB90uPxnXPdxSC -16ASA8bQT5an/1AgoE3k9CrcD2K0EmgaX0YI0HUhkyzbkg34EhpWJ6vvRUbRiNRo -9cIbt/ya9Y9u0Ja8GLXv6dwX0l0IdJMkL8KifXUFAVCujp1FBrr/gdmwQn8itANy -+qbnWSxmOvtaY0zcaFAcONuHva0h51/WqXOMO1eb8PhR4HIIYU8p1oBwQp7dSni8 -THDi1F+GG5PsymMDj5cWK42f+QzjVw5PrVmFqqrrEoMlx8DWh5Y= ------END CERTIFICATE----- -""".strip() - - # RSA public key as dumped by openssl - exponent = 65537 - modulus_str = """ -00:fd:2e:c6:25:5d:8e:70:57:72:34:c4:38:2b:be: -4a:ad:81:6d:30:49:f7:56:71:05:74:f1:da:8f:24: -b7:ed:79:4c:c5:28:74:a1:2e:06:79:31:95:50:22: -48:9e:5e:42:a6:cb:fc:59:07:61:d3:71:5f:78:e7: -6f:ce:9e:48:67:8e:8e:72:37:8e:3e:25:a7:9a:38: -25:d8:6e:63:ed:ed:9b:0c:7e:d1:49:c8:46:57:e3: -83:e8:56:f5:02:0b:ee:bb:6f:fd:8b:2f:1f:75:fd: -eb:3d:0f:fb:78:07:5d:cc:a5:2b:cf:06:9c:55:b9: -d6:fa:d8:36:42:d4:97:29:17 - """ - modulus = int(re.sub("[^0-9a-f]","", modulus_str), 16) - - key = RSA.importKey(x509_v3_cert) - self.assertEqual(key.e, exponent) - self.assertEqual(key.n, modulus) - self.assertFalse(key.has_private()) - - -class TestImport_2048(unittest.TestCase): - - def test_import_pss(self): - pub_key_file = load_file("rsa2048_pss_public.pem") - pub_key = RSA.import_key(pub_key_file) - - priv_key_file = load_file("rsa2048_pss_private.pem") - priv_key = RSA.import_key(priv_key_file) - - self.assertEqual(pub_key.n, priv_key.n) - - def test_import_openssh_public(self): - key_file_ref = load_file("rsa2048_private.pem") - key_file = load_file("rsa2048_public_openssh.txt") - - # Skip test if test vectors are not installed - if None in (key_file_ref, key_file): - return - - key_ref = RSA.import_key(key_file_ref).public_key() - key = RSA.import_key(key_file) - self.assertEqual(key_ref, key) - - def test_import_openssh_private_clear(self): - key_file = load_file("rsa2048_private_openssh.pem") - key_file_old = load_file("rsa2048_private_openssh_old.pem") - - # Skip test if test vectors are not installed - if None in (key_file_old, key_file): - return - - key = RSA.import_key(key_file) - key_old = RSA.import_key(key_file_old) - - self.assertEqual(key, key_old) - - def test_import_openssh_private_password(self): - key_file = load_file("rsa2048_private_openssh_pwd.pem") - key_file_old = load_file("rsa2048_private_openssh_pwd_old.pem") - - # Skip test if test vectors are not installed - if None in (key_file_old, key_file): - return - - key = RSA.import_key(key_file, b"password") - key_old = RSA.import_key(key_file_old) - self.assertEqual(key, key_old) - - def test_import_pkcs8_private(self): - key_file_ref = load_file("rsa2048_private.pem") - key_file = load_file("rsa2048_private_p8.der") - - # Skip test if test vectors are not installed - if None in (key_file_ref, key_file): - return - - key_ref = RSA.import_key(key_file_ref) - key = RSA.import_key(key_file, b'secret') - self.assertEqual(key_ref, key) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(ImportKeyTests) - tests += list_test_cases(ImportKeyFromX509Cert) - tests += list_test_cases(TestImport_2048) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__init__.py deleted file mode 100644 index 763ee9c..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Random/__init__.py: Self-test for random number generation modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for random number generators""" - -__revision__ = "$Id$" - -def get_tests(config={}): - tests = [] - from Cryptodome.SelfTest.Random import test_random; tests += test_random.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index cfc8d30..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__pycache__/test_random.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__pycache__/test_random.cpython-312.pyc deleted file mode 100644 index 62610d2..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/__pycache__/test_random.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/test_random.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Random/test_random.py deleted file mode 100644 index 30e9194..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Random/test_random.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_generic.py: Self-test for the Cryptodome.Random.new() function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Cryptodome.Random.new()""" - -import sys -import unittest -from Cryptodome.Util.py3compat import b - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Cryptodome.Random.new()""" - # Import the Random module and try to use it - from Cryptodome import Random - randobj = Random.new() - x = randobj.read(16) - y = randobj.read(16) - self.assertNotEqual(x, y) - z = Random.get_random_bytes(16) - self.assertNotEqual(x, z) - self.assertNotEqual(y, z) - # Test the Random.random module, which - # implements a subset of Python's random API - # Not implemented: - # seed(), getstate(), setstate(), jumpahead() - # random(), uniform(), triangular(), betavariate() - # expovariate(), gammavariate(), gauss(), - # longnormvariate(), normalvariate(), - # vonmisesvariate(), paretovariate() - # weibullvariate() - # WichmannHill(), whseed(), SystemRandom() - from Cryptodome.Random import random - x = random.getrandbits(16*8) - y = random.getrandbits(16*8) - self.assertNotEqual(x, y) - # Test randrange - if x>y: - start = y - stop = x - else: - start = x - stop = y - for step in range(1,10): - x = random.randrange(start,stop,step) - y = random.randrange(start,stop,step) - self.assertNotEqual(x, y) - self.assertEqual(start <= x < stop, True) - self.assertEqual(start <= y < stop, True) - self.assertEqual((x - start) % step, 0) - self.assertEqual((y - start) % step, 0) - for i in range(10): - self.assertEqual(random.randrange(1,2), 1) - self.assertRaises(ValueError, random.randrange, start, start) - self.assertRaises(ValueError, random.randrange, stop, start, step) - self.assertRaises(TypeError, random.randrange, start, stop, step, step) - self.assertRaises(TypeError, random.randrange, start, stop, "1") - self.assertRaises(TypeError, random.randrange, "1", stop, step) - self.assertRaises(TypeError, random.randrange, 1, "2", step) - self.assertRaises(ValueError, random.randrange, start, stop, 0) - # Test randint - x = random.randint(start,stop) - y = random.randint(start,stop) - self.assertNotEqual(x, y) - self.assertEqual(start <= x <= stop, True) - self.assertEqual(start <= y <= stop, True) - for i in range(10): - self.assertEqual(random.randint(1,1), 1) - self.assertRaises(ValueError, random.randint, stop, start) - self.assertRaises(TypeError, random.randint, start, stop, step) - self.assertRaises(TypeError, random.randint, "1", stop) - self.assertRaises(TypeError, random.randint, 1, "2") - # Test choice - seq = range(10000) - x = random.choice(seq) - y = random.choice(seq) - self.assertNotEqual(x, y) - self.assertEqual(x in seq, True) - self.assertEqual(y in seq, True) - for i in range(10): - self.assertEqual(random.choice((1,2,3)) in (1,2,3), True) - self.assertEqual(random.choice([1,2,3]) in [1,2,3], True) - if sys.version_info[0] == 3: - self.assertEqual(random.choice(bytearray(b('123'))) in bytearray(b('123')), True) - self.assertEqual(1, random.choice([1])) - self.assertRaises(IndexError, random.choice, []) - self.assertRaises(TypeError, random.choice, 1) - # Test shuffle. Lacks random parameter to specify function. - # Make copies of seq - seq = range(500) - x = list(seq) - y = list(seq) - random.shuffle(x) - random.shuffle(y) - self.assertNotEqual(x, y) - self.assertEqual(len(seq), len(x)) - self.assertEqual(len(seq), len(y)) - for i in range(len(seq)): - self.assertEqual(x[i] in seq, True) - self.assertEqual(y[i] in seq, True) - self.assertEqual(seq[i] in x, True) - self.assertEqual(seq[i] in y, True) - z = [1] - random.shuffle(z) - self.assertEqual(z, [1]) - if sys.version_info[0] == 3: - z = bytearray(b('12')) - random.shuffle(z) - self.assertEqual(b('1') in z, True) - self.assertRaises(TypeError, random.shuffle, b('12')) - self.assertRaises(TypeError, random.shuffle, 1) - self.assertRaises(TypeError, random.shuffle, "11") - self.assertRaises(TypeError, random.shuffle, (1,2)) - # 2to3 wraps a list() around it, alas - but I want to shoot - # myself in the foot here! :D - # if sys.version_info[0] == 3: - # self.assertRaises(TypeError, random.shuffle, range(3)) - # Test sample - x = random.sample(seq, 20) - y = random.sample(seq, 20) - self.assertNotEqual(x, y) - for i in range(20): - self.assertEqual(x[i] in seq, True) - self.assertEqual(y[i] in seq, True) - z = random.sample([1], 1) - self.assertEqual(z, [1]) - z = random.sample((1,2,3), 1) - self.assertEqual(z[0] in (1,2,3), True) - z = random.sample("123", 1) - self.assertEqual(z[0] in "123", True) - z = random.sample(range(3), 1) - self.assertEqual(z[0] in range(3), True) - if sys.version_info[0] == 3: - z = random.sample(b("123"), 1) - self.assertEqual(z[0] in b("123"), True) - z = random.sample(bytearray(b("123")), 1) - self.assertEqual(z[0] in bytearray(b("123")), True) - self.assertRaises(TypeError, random.sample, 1) - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__init__.py deleted file mode 100644 index 83cf0f3..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Signature/__init__.py: Self-test for signature modules -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for signature modules""" - -import unittest -from . import test_pkcs1_15, test_pss, test_dss, test_eddsa - - -def get_tests(config={}): - tests = [] - tests += test_pkcs1_15.get_tests(config=config) - tests += test_pss.get_tests(config=config) - tests += test_dss.get_tests(config=config) - tests += test_eddsa.get_tests(config=config) - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2f95601..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_dss.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_dss.cpython-312.pyc deleted file mode 100644 index 20c4d39..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_dss.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_eddsa.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_eddsa.cpython-312.pyc deleted file mode 100644 index f6e9962..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_eddsa.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_pkcs1_15.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_pkcs1_15.cpython-312.pyc deleted file mode 100644 index 7b1457c..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_pkcs1_15.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_pss.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_pss.cpython-312.pyc deleted file mode 100644 index 04ba461..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/__pycache__/test_pss.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_dss.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_dss.py deleted file mode 100644 index 156ee67..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_dss.py +++ /dev/null @@ -1,1369 +0,0 @@ -# -# SelfTest/Signature/test_dss.py: Self-test for DSS signatures -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import re -import unittest -from binascii import hexlify, unhexlify - -from Cryptodome.Util.py3compat import tobytes, bord, bchr - -from Cryptodome.Hash import (SHA1, SHA224, SHA256, SHA384, SHA512, - SHA3_224, SHA3_256, SHA3_384, SHA3_512) -from Cryptodome.Signature import DSS -from Cryptodome.PublicKey import DSA, ECC -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof -from Cryptodome.Util.number import bytes_to_long, long_to_bytes - - -def t2b(hexstring): - ws = hexstring.replace(" ", "").replace("\n", "") - return unhexlify(tobytes(ws)) - - -def t2l(hexstring): - ws = hexstring.replace(" ", "").replace("\n", "") - return int(ws, 16) - - -def load_hash_by_name(hash_name): - return __import__("Cryptodome.Hash." + hash_name, globals(), locals(), ["new"]) - - -class StrRNG: - - def __init__(self, randomness): - length = len(randomness) - self._idx = 0 - # Fix required to get the right K (see how randint() works!) - self._randomness = long_to_bytes(bytes_to_long(randomness) - 1, length) - - def __call__(self, n): - out = self._randomness[self._idx:self._idx + n] - self._idx += n - return out - - -class FIPS_DSA_Tests(unittest.TestCase): - - # 1st 1024 bit key from SigGen.txt - P = 0xa8f9cd201e5e35d892f85f80e4db2599a5676a3b1d4f190330ed3256b26d0e80a0e49a8fffaaad2a24f472d2573241d4d6d6c7480c80b4c67bb4479c15ada7ea8424d2502fa01472e760241713dab025ae1b02e1703a1435f62ddf4ee4c1b664066eb22f2e3bf28bb70a2a76e4fd5ebe2d1229681b5b06439ac9c7e9d8bde283 - Q = 0xf85f0f83ac4df7ea0cdf8f469bfeeaea14156495 - G = 0x2b3152ff6c62f14622b8f48e59f8af46883b38e79b8c74deeae9df131f8b856e3ad6c8455dab87cc0da8ac973417ce4f7878557d6cdf40b35b4a0ca3eb310c6a95d68ce284ad4e25ea28591611ee08b8444bd64b25f3f7c572410ddfb39cc728b9c936f85f419129869929cdb909a6a3a99bbe089216368171bd0ba81de4fe33 - X = 0xc53eae6d45323164c7d07af5715703744a63fc3a - Y = 0x313fd9ebca91574e1c2eebe1517c57e0c21b0209872140c5328761bbb2450b33f1b18b409ce9ab7c4cd8fda3391e8e34868357c199e16a6b2eba06d6749def791d79e95d3a4d09b24c392ad89dbf100995ae19c01062056bb14bce005e8731efde175f95b975089bdcdaea562b32786d96f5a31aedf75364008ad4fffebb970b - - key_pub = DSA.construct((Y, G, P, Q)) - key_priv = DSA.construct((Y, G, P, Q, X)) - - def shortDescription(self): - return "FIPS DSA Tests" - - def test_loopback(self): - hashed_msg = SHA512.new(b"test") - signer = DSS.new(self.key_priv, 'fips-186-3') - signature = signer.sign(hashed_msg) - - verifier = DSS.new(self.key_pub, 'fips-186-3') - verifier.verify(hashed_msg, signature) - - def test_negative_unapproved_hashes(self): - """Verify that unapproved hashes are rejected""" - - from Cryptodome.Hash import RIPEMD160 - - self.description = "Unapproved hash (RIPEMD160) test" - hash_obj = RIPEMD160.new() - signer = DSS.new(self.key_priv, 'fips-186-3') - self.assertRaises(ValueError, signer.sign, hash_obj) - self.assertRaises(ValueError, signer.verify, hash_obj, b"\x00" * 40) - - def test_negative_unknown_modes_encodings(self): - """Verify that unknown modes/encodings are rejected""" - - self.description = "Unknown mode test" - self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-0') - - self.description = "Unknown encoding test" - self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-3', 'xml') - - def test_asn1_encoding(self): - """Verify ASN.1 encoding""" - - self.description = "ASN.1 encoding test" - hash_obj = SHA1.new() - signer = DSS.new(self.key_priv, 'fips-186-3', 'der') - signature = signer.sign(hash_obj) - - # Verify that output looks like a DER SEQUENCE - self.assertEqual(bord(signature[0]), 48) - signer.verify(hash_obj, signature) - - # Verify that ASN.1 parsing fails as expected - signature = bchr(7) + signature[1:] - self.assertRaises(ValueError, signer.verify, hash_obj, signature) - - def test_sign_verify(self): - """Verify public/private method""" - - self.description = "can_sign() test" - signer = DSS.new(self.key_priv, 'fips-186-3') - self.assertTrue(signer.can_sign()) - - signer = DSS.new(self.key_pub, 'fips-186-3') - self.assertFalse(signer.can_sign()) - - try: - signer.sign(SHA256.new(b'xyz')) - except TypeError as e: - msg = str(e) - else: - msg = "" - self.assertTrue("Private key is needed" in msg) - - -class FIPS_DSA_Tests_KAT(unittest.TestCase): - pass - - -test_vectors_verify = load_test_vectors(("Signature", "DSA"), - "FIPS_186_3_SigVer.rsp", - "Signature Verification 186-3", - {'result': lambda x: x}) or [] - -for idx, tv in enumerate(test_vectors_verify): - - if isinstance(tv, str): - res = re.match(r"\[mod = L=([0-9]+), N=([0-9]+), ([a-zA-Z0-9-]+)\]", tv) - assert(res) - hash_name = res.group(3).replace("-", "") - hash_module = load_hash_by_name(hash_name) - continue - - if hasattr(tv, "p"): - modulus = tv.p - generator = tv.g - suborder = tv.q - continue - - hash_obj = hash_module.new(tv.msg) - - comps = [bytes_to_long(x) for x in (tv.y, generator, modulus, suborder)] - key = DSA.construct(comps, False) # type: ignore - verifier = DSS.new(key, 'fips-186-3') - - def positive_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): - verifier.verify(hash_obj, signature) - - def negative_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): - self.assertRaises(ValueError, verifier.verify, hash_obj, signature) - - if tv.result == 'p': - setattr(FIPS_DSA_Tests_KAT, "test_verify_positive_%d" % idx, positive_test) - else: - setattr(FIPS_DSA_Tests_KAT, "test_verify_negative_%d" % idx, negative_test) - - -test_vectors_sign = load_test_vectors(("Signature", "DSA"), - "FIPS_186_3_SigGen.txt", - "Signature Creation 186-3", - {}) or [] - -for idx, tv in enumerate(test_vectors_sign): - - if isinstance(tv, str): - res = re.match(r"\[mod = L=([0-9]+), N=([0-9]+), ([a-zA-Z0-9-]+)\]", tv) - assert(res) - hash_name = res.group(3).replace("-", "") - hash_module = load_hash_by_name(hash_name) - continue - - if hasattr(tv, "p"): - modulus = tv.p - generator = tv.g - suborder = tv.q - continue - - hash_obj = hash_module.new(tv.msg) - comps_dsa = [bytes_to_long(x) for x in (tv.y, generator, modulus, suborder, tv.x)] - key = DSA.construct(comps_dsa, False) # type: ignore - signer = DSS.new(key, 'fips-186-3', randfunc=StrRNG(tv.k)) - - def new_test(self, signer=signer, hash_obj=hash_obj, signature=tv.r+tv.s): - self.assertEqual(signer.sign(hash_obj), signature) - setattr(FIPS_DSA_Tests_KAT, "test_sign_%d" % idx, new_test) - - -class FIPS_ECDSA_Tests(unittest.TestCase): - - key_priv = ECC.generate(curve="P-256") - key_pub = key_priv.public_key() - - def shortDescription(self): - return "FIPS ECDSA Tests" - - def test_loopback(self): - hashed_msg = SHA512.new(b"test") - signer = DSS.new(self.key_priv, 'fips-186-3') - signature = signer.sign(hashed_msg) - - verifier = DSS.new(self.key_pub, 'fips-186-3') - verifier.verify(hashed_msg, signature) - - def test_negative_unapproved_hashes(self): - """Verify that unapproved hashes are rejected""" - - from Cryptodome.Hash import SHA1 - - self.description = "Unapproved hash (SHA-1) test" - hash_obj = SHA1.new() - signer = DSS.new(self.key_priv, 'fips-186-3') - self.assertRaises(ValueError, signer.sign, hash_obj) - self.assertRaises(ValueError, signer.verify, hash_obj, b"\x00" * 40) - - def test_negative_eddsa_key(self): - key = ECC.generate(curve="ed25519") - self.assertRaises(ValueError, DSS.new, key, 'fips-186-3') - - def test_sign_verify(self): - """Verify public/private method""" - - self.description = "can_sign() test" - signer = DSS.new(self.key_priv, 'fips-186-3') - self.assertTrue(signer.can_sign()) - - signer = DSS.new(self.key_pub, 'fips-186-3') - self.assertFalse(signer.can_sign()) - self.assertRaises(TypeError, signer.sign, SHA256.new(b'xyz')) - - try: - signer.sign(SHA256.new(b'xyz')) - except TypeError as e: - msg = str(e) - else: - msg = "" - self.assertTrue("Private key is needed" in msg) - - def test_negative_unknown_modes_encodings(self): - """Verify that unknown modes/encodings are rejected""" - - self.description = "Unknown mode test" - self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-0') - - self.description = "Unknown encoding test" - self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-3', 'xml') - - def test_asn1_encoding(self): - """Verify ASN.1 encoding""" - - self.description = "ASN.1 encoding test" - hash_obj = SHA256.new() - signer = DSS.new(self.key_priv, 'fips-186-3', 'der') - signature = signer.sign(hash_obj) - - # Verify that output looks like a DER SEQUENCE - self.assertEqual(bord(signature[0]), 48) - signer.verify(hash_obj, signature) - - # Verify that ASN.1 parsing fails as expected - signature = bchr(7) + signature[1:] - self.assertRaises(ValueError, signer.verify, hash_obj, signature) - - -class FIPS_ECDSA_Tests_KAT(unittest.TestCase): - pass - - -test_vectors_verify = load_test_vectors(("Signature", "ECDSA"), - "SigVer.rsp", - "ECDSA Signature Verification 186-3", - {'result': lambda x: x, - 'qx': lambda x: int(x, 16), - 'qy': lambda x: int(x, 16), - }) or [] -test_vectors_verify += load_test_vectors(("Signature", "ECDSA"), - "SigVer_TruncatedSHAs.rsp", - "ECDSA Signature Verification 186-3", - {'result': lambda x: x, - 'qx': lambda x: int(x, 16), - 'qy': lambda x: int(x, 16), - }) or [] - - -for idx, tv in enumerate(test_vectors_verify): - - if isinstance(tv, str): - res = re.match(r"\[(P-[0-9]+),(SHA-[0-9]+)\]", tv) - assert res - curve_name = res.group(1) - hash_name = res.group(2).replace("-", "") - if hash_name in ("SHA512224", "SHA512256"): - truncate = hash_name[-3:] - hash_name = hash_name[:-3] - else: - truncate = None - hash_module = load_hash_by_name(hash_name) - continue - - if truncate is None: - hash_obj = hash_module.new(tv.msg) - else: - hash_obj = hash_module.new(tv.msg, truncate=truncate) - ecc_key = ECC.construct(curve=curve_name, point_x=tv.qx, point_y=tv.qy) - verifier = DSS.new(ecc_key, 'fips-186-3') - - def positive_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): - verifier.verify(hash_obj, signature) - - def negative_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): - self.assertRaises(ValueError, verifier.verify, hash_obj, signature) - - if tv.result.startswith('p'): - setattr(FIPS_ECDSA_Tests_KAT, "test_verify_positive_%d" % idx, positive_test) - else: - setattr(FIPS_ECDSA_Tests_KAT, "test_verify_negative_%d" % idx, negative_test) - - -test_vectors_sign = load_test_vectors(("Signature", "ECDSA"), - "SigGen.txt", - "ECDSA Signature Verification 186-3", - {'d': lambda x: int(x, 16)}) or [] - -for idx, tv in enumerate(test_vectors_sign): - - if isinstance(tv, str): - res = re.match(r"\[(P-[0-9]+),(SHA-[0-9]+)\]", tv) - assert res - curve_name = res.group(1) - hash_name = res.group(2).replace("-", "") - hash_module = load_hash_by_name(hash_name) - continue - - hash_obj = hash_module.new(tv.msg) - ecc_key = ECC.construct(curve=curve_name, d=tv.d) - signer = DSS.new(ecc_key, 'fips-186-3', randfunc=StrRNG(tv.k)) - - def sign_test(self, signer=signer, hash_obj=hash_obj, signature=tv.r+tv.s): - self.assertEqual(signer.sign(hash_obj), signature) - setattr(FIPS_ECDSA_Tests_KAT, "test_sign_%d" % idx, sign_test) - - -class Det_DSA_Tests(unittest.TestCase): - """Tests from rfc6979""" - - # Each key is (p, q, g, x, y, desc) - keys = [ - ( - """ - 86F5CA03DCFEB225063FF830A0C769B9DD9D6153AD91D7CE27F787C43278B447 - E6533B86B18BED6E8A48B784A14C252C5BE0DBF60B86D6385BD2F12FB763ED88 - 73ABFD3F5BA2E0A8C0A59082EAC056935E529DAF7C610467899C77ADEDFC846C - 881870B7B19B2B58F9BE0521A17002E3BDD6B86685EE90B3D9A1B02B782B1779""", - "996F967F6C8E388D9E28D01E205FBA957A5698B1", - """ - 07B0F92546150B62514BB771E2A0C0CE387F03BDA6C56B505209FF25FD3C133D - 89BBCD97E904E09114D9A7DEFDEADFC9078EA544D2E401AEECC40BB9FBBF78FD - 87995A10A1C27CB7789B594BA7EFB5C4326A9FE59A070E136DB77175464ADCA4 - 17BE5DCE2F40D10A46A3A3943F26AB7FD9C0398FF8C76EE0A56826A8A88F1DBD""", - "411602CB19A6CCC34494D79D98EF1E7ED5AF25F7", - """ - 5DF5E01DED31D0297E274E1691C192FE5868FEF9E19A84776454B100CF16F653 - 92195A38B90523E2542EE61871C0440CB87C322FC4B4D2EC5E1E7EC766E1BE8D - 4CE935437DC11C3C8FD426338933EBFE739CB3465F4D3668C5E473508253B1E6 - 82F65CBDC4FAE93C2EA212390E54905A86E2223170B44EAA7DA5DD9FFCFB7F3B""", - "DSA1024" - ), - ( - """ - 9DB6FB5951B66BB6FE1E140F1D2CE5502374161FD6538DF1648218642F0B5C48 - C8F7A41AADFA187324B87674FA1822B00F1ECF8136943D7C55757264E5A1A44F - FE012E9936E00C1D3E9310B01C7D179805D3058B2A9F4BB6F9716BFE6117C6B5 - B3CC4D9BE341104AD4A80AD6C94E005F4B993E14F091EB51743BF33050C38DE2 - 35567E1B34C3D6A5C0CEAA1A0F368213C3D19843D0B4B09DCB9FC72D39C8DE41 - F1BF14D4BB4563CA28371621CAD3324B6A2D392145BEBFAC748805236F5CA2FE - 92B871CD8F9C36D3292B5509CA8CAA77A2ADFC7BFD77DDA6F71125A7456FEA15 - 3E433256A2261C6A06ED3693797E7995FAD5AABBCFBE3EDA2741E375404AE25B""", - "F2C3119374CE76C9356990B465374A17F23F9ED35089BD969F61C6DDE9998C1F", - """ - 5C7FF6B06F8F143FE8288433493E4769C4D988ACE5BE25A0E24809670716C613 - D7B0CEE6932F8FAA7C44D2CB24523DA53FBE4F6EC3595892D1AA58C4328A06C4 - 6A15662E7EAA703A1DECF8BBB2D05DBE2EB956C142A338661D10461C0D135472 - 085057F3494309FFA73C611F78B32ADBB5740C361C9F35BE90997DB2014E2EF5 - AA61782F52ABEB8BD6432C4DD097BC5423B285DAFB60DC364E8161F4A2A35ACA - 3A10B1C4D203CC76A470A33AFDCBDD92959859ABD8B56E1725252D78EAC66E71 - BA9AE3F1DD2487199874393CD4D832186800654760E1E34C09E4D155179F9EC0 - DC4473F996BDCE6EED1CABED8B6F116F7AD9CF505DF0F998E34AB27514B0FFE7""", - "69C7548C21D0DFEA6B9A51C9EAD4E27C33D3B3F180316E5BCAB92C933F0E4DBC", - """ - 667098C654426C78D7F8201EAC6C203EF030D43605032C2F1FA937E5237DBD94 - 9F34A0A2564FE126DC8B715C5141802CE0979C8246463C40E6B6BDAA2513FA61 - 1728716C2E4FD53BC95B89E69949D96512E873B9C8F8DFD499CC312882561ADE - CB31F658E934C0C197F2C4D96B05CBAD67381E7B768891E4DA3843D24D94CDFB - 5126E9B8BF21E8358EE0E0A30EF13FD6A664C0DCE3731F7FB49A4845A4FD8254 - 687972A2D382599C9BAC4E0ED7998193078913032558134976410B89D2C171D1 - 23AC35FD977219597AA7D15C1A9A428E59194F75C721EBCBCFAE44696A499AFA - 74E04299F132026601638CB87AB79190D4A0986315DA8EEC6561C938996BEADF""", - "DSA2048" - ), - ] - - # This is a sequence of items: - # message, k, r, s, hash module - signatures = [ - ( - "sample", - "7BDB6B0FF756E1BB5D53583EF979082F9AD5BD5B", - "2E1A0C2562B2912CAAF89186FB0F42001585DA55", - "29EFB6B0AFF2D7A68EB70CA313022253B9A88DF5", - SHA1, - 'DSA1024' - ), - ( - "sample", - "562097C06782D60C3037BA7BE104774344687649", - "4BC3B686AEA70145856814A6F1BB53346F02101E", - "410697B92295D994D21EDD2F4ADA85566F6F94C1", - SHA224, - 'DSA1024' - ), - ( - "sample", - "519BA0546D0C39202A7D34D7DFA5E760B318BCFB", - "81F2F5850BE5BC123C43F71A3033E9384611C545", - "4CDD914B65EB6C66A8AAAD27299BEE6B035F5E89", - SHA256, - 'DSA1024' - ), - ( - "sample", - "95897CD7BBB944AA932DBC579C1C09EB6FCFC595", - "07F2108557EE0E3921BC1774F1CA9B410B4CE65A", - "54DF70456C86FAC10FAB47C1949AB83F2C6F7595", - SHA384, - 'DSA1024' - ), - ( - "sample", - "09ECE7CA27D0F5A4DD4E556C9DF1D21D28104F8B", - "16C3491F9B8C3FBBDD5E7A7B667057F0D8EE8E1B", - "02C36A127A7B89EDBB72E4FFBC71DABC7D4FC69C", - SHA512, - 'DSA1024' - ), - ( - "test", - "5C842DF4F9E344EE09F056838B42C7A17F4A6433", - "42AB2052FD43E123F0607F115052A67DCD9C5C77", - "183916B0230D45B9931491D4C6B0BD2FB4AAF088", - SHA1, - 'DSA1024' - ), - ( - "test", - "4598B8EFC1A53BC8AECD58D1ABBB0C0C71E67297", - "6868E9964E36C1689F6037F91F28D5F2C30610F2", - "49CEC3ACDC83018C5BD2674ECAAD35B8CD22940F", - SHA224, - 'DSA1024' - ), - ( - "test", - "5A67592E8128E03A417B0484410FB72C0B630E1A", - "22518C127299B0F6FDC9872B282B9E70D0790812", - "6837EC18F150D55DE95B5E29BE7AF5D01E4FE160", - SHA256, - 'DSA1024' - ), - ( - "test", - "220156B761F6CA5E6C9F1B9CF9C24BE25F98CD89", - "854CF929B58D73C3CBFDC421E8D5430CD6DB5E66", - "91D0E0F53E22F898D158380676A871A157CDA622", - SHA384, - 'DSA1024' - ), - ( - "test", - "65D2C2EEB175E370F28C75BFCDC028D22C7DBE9C", - "8EA47E475BA8AC6F2D821DA3BD212D11A3DEB9A0", - "7C670C7AD72B6C050C109E1790008097125433E8", - SHA512, - 'DSA1024' - ), - ( - "sample", - "888FA6F7738A41BDC9846466ABDB8174C0338250AE50CE955CA16230F9CBD53E", - "3A1B2DBD7489D6ED7E608FD036C83AF396E290DBD602408E8677DAABD6E7445A", - "D26FCBA19FA3E3058FFC02CA1596CDBB6E0D20CB37B06054F7E36DED0CDBBCCF", - SHA1, - 'DSA2048' - ), - ( - "sample", - "BC372967702082E1AA4FCE892209F71AE4AD25A6DFD869334E6F153BD0C4D806", - "DC9F4DEADA8D8FF588E98FED0AB690FFCE858DC8C79376450EB6B76C24537E2C", - "A65A9C3BC7BABE286B195D5DA68616DA8D47FA0097F36DD19F517327DC848CEC", - SHA224, - 'DSA2048' - ), - ( - "sample", - "8926A27C40484216F052F4427CFD5647338B7B3939BC6573AF4333569D597C52", - "EACE8BDBBE353C432A795D9EC556C6D021F7A03F42C36E9BC87E4AC7932CC809", - "7081E175455F9247B812B74583E9E94F9EA79BD640DC962533B0680793A38D53", - SHA256, - 'DSA2048' - ), - ( - "sample", - "C345D5AB3DA0A5BCB7EC8F8FB7A7E96069E03B206371EF7D83E39068EC564920", - "B2DA945E91858834FD9BF616EBAC151EDBC4B45D27D0DD4A7F6A22739F45C00B", - "19048B63D9FD6BCA1D9BAE3664E1BCB97F7276C306130969F63F38FA8319021B", - SHA384, - 'DSA2048' - ), - ( - "sample", - "5A12994431785485B3F5F067221517791B85A597B7A9436995C89ED0374668FC", - "2016ED092DC5FB669B8EFB3D1F31A91EECB199879BE0CF78F02BA062CB4C942E", - "D0C76F84B5F091E141572A639A4FB8C230807EEA7D55C8A154A224400AFF2351", - SHA512, - 'DSA2048' - ), - ( - "test", - "6EEA486F9D41A037B2C640BC5645694FF8FF4B98D066A25F76BE641CCB24BA4F", - "C18270A93CFC6063F57A4DFA86024F700D980E4CF4E2CB65A504397273D98EA0", - "414F22E5F31A8B6D33295C7539C1C1BA3A6160D7D68D50AC0D3A5BEAC2884FAA", - SHA1, - 'DSA2048' - ), - ( - "test", - "06BD4C05ED74719106223BE33F2D95DA6B3B541DAD7BFBD7AC508213B6DA6670", - "272ABA31572F6CC55E30BF616B7A265312018DD325BE031BE0CC82AA17870EA3", - "E9CC286A52CCE201586722D36D1E917EB96A4EBDB47932F9576AC645B3A60806", - SHA224, - 'DSA2048' - ), - ( - "test", - "1D6CE6DDA1C5D37307839CD03AB0A5CBB18E60D800937D67DFB4479AAC8DEAD7", - "8190012A1969F9957D56FCCAAD223186F423398D58EF5B3CEFD5A4146A4476F0", - "7452A53F7075D417B4B013B278D1BB8BBD21863F5E7B1CEE679CF2188E1AB19E", - SHA256, - 'DSA2048' - ), - ( - "test", - "206E61F73DBE1B2DC8BE736B22B079E9DACD974DB00EEBBC5B64CAD39CF9F91C", - "239E66DDBE8F8C230A3D071D601B6FFBDFB5901F94D444C6AF56F732BEB954BE", - "6BD737513D5E72FE85D1C750E0F73921FE299B945AAD1C802F15C26A43D34961", - SHA384, - 'DSA2048' - ), - ( - "test", - "AFF1651E4CD6036D57AA8B2A05CCF1A9D5A40166340ECBBDC55BE10B568AA0AA", - "89EC4BB1400ECCFF8E7D9AA515CD1DE7803F2DAFF09693EE7FD1353E90A68307", - "C9F0BDABCC0D880BB137A994CC7F3980CE91CC10FAF529FC46565B15CEA854E1", - SHA512, - 'DSA2048' - ) - ] - - def setUp(self): - # Convert DSA key components from hex strings to integers - # Each key is (p, q, g, x, y, desc) - - from collections import namedtuple - - TestKey = namedtuple('TestKey', 'p q g x y') - new_keys = {} - for k in self.keys: - tk = TestKey(*[t2l(y) for y in k[:-1]]) - new_keys[k[-1]] = tk - self.keys = new_keys - - # Convert signature encoding - TestSig = namedtuple('TestSig', 'message nonce result module test_key') - new_signatures = [] - for message, nonce, r, s, module, test_key in self.signatures: - tsig = TestSig( - tobytes(message), - t2l(nonce), - t2b(r) + t2b(s), - module, - self.keys[test_key] - ) - new_signatures.append(tsig) - self.signatures = new_signatures - - def test1(self): - q = 0x4000000000000000000020108A2E0CC0D99F8A5EF - x = 0x09A4D6792295A7F730FC3F2B49CBC0F62E862272F - p = 2 * q + 1 - y = pow(2, x, p) - key = DSA.construct([pow(y, 2, p), 2, p, q, x], False) - signer = DSS.new(key, 'deterministic-rfc6979') - - # Test _int2octets - self.assertEqual(hexlify(signer._int2octets(x)), - b'009a4d6792295a7f730fc3f2b49cbc0f62e862272f') - - # Test _bits2octets - h1 = SHA256.new(b"sample").digest() - self.assertEqual(hexlify(signer._bits2octets(h1)), - b'01795edf0d54db760f156d0dac04c0322b3a204224') - - def test2(self): - - for sig in self.signatures: - tk = sig.test_key - key = DSA.construct([tk.y, tk.g, tk.p, tk.q, tk.x], False) - signer = DSS.new(key, 'deterministic-rfc6979') - - hash_obj = sig.module.new(sig.message) - result = signer.sign(hash_obj) - self.assertEqual(sig.result, result) - - -class Det_ECDSA_Tests(unittest.TestCase): - - key_priv_p192 = ECC.construct(curve="P-192", d=0x6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4) - key_pub_p192 = key_priv_p192.public_key() - - key_priv_p224 = ECC.construct(curve="P-224", d=0xF220266E1105BFE3083E03EC7A3A654651F45E37167E88600BF257C1) - key_pub_p224 = key_priv_p224.public_key() - - key_priv_p256 = ECC.construct(curve="P-256", d=0xC9AFA9D845BA75166B5C215767B1D6934E50C3DB36E89B127B8A622B120F6721) - key_pub_p256 = key_priv_p256.public_key() - - key_priv_p384 = ECC.construct(curve="P-384", d=0x6B9D3DAD2E1B8C1C05B19875B6659F4DE23C3B667BF297BA9AA47740787137D896D5724E4C70A825F872C9EA60D2EDF5) - key_pub_p384 = key_priv_p384.public_key() - - key_priv_p521 = ECC.construct(curve="P-521", d=0x0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538) - key_pub_p521 = key_priv_p521.public_key() - - # This is a sequence of items: - # message, k, r, s, hash module - # taken from RFC6979 - signatures_p192_ = ( - ( - "sample", - "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", - "98C6BD12B23EAF5E2A2045132086BE3EB8EBD62ABF6698FF", - "57A22B07DEA9530F8DE9471B1DC6624472E8E2844BC25B64", - SHA1 - ), - ( - "sample", - "4381526B3FC1E7128F202E194505592F01D5FF4C5AF015D8", - "A1F00DAD97AEEC91C95585F36200C65F3C01812AA60378F5", - "E07EC1304C7C6C9DEBBE980B9692668F81D4DE7922A0F97A", - SHA224 - ), - ( - "sample", - "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", - "4B0B8CE98A92866A2820E20AA6B75B56382E0F9BFD5ECB55", - "CCDB006926EA9565CBADC840829D8C384E06DE1F1E381B85", - SHA256 - ), - ( - "sample", - "4730005C4FCB01834C063A7B6760096DBE284B8252EF4311", - "DA63BF0B9ABCF948FBB1E9167F136145F7A20426DCC287D5", - "C3AA2C960972BD7A2003A57E1C4C77F0578F8AE95E31EC5E", - SHA384 - ), - ( - "sample", - "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", - "4D60C5AB1996BD848343B31C00850205E2EA6922DAC2E4B8", - "3F6E837448F027A1BF4B34E796E32A811CBB4050908D8F67", - SHA512 - ), - ( - "test", - "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", - "0F2141A0EBBC44D2E1AF90A50EBCFCE5E197B3B7D4DE036D", - "EB18BC9E1F3D7387500CB99CF5F7C157070A8961E38700B7", - SHA1 - ), - ( - "test", - "F5DC805F76EF851800700CCE82E7B98D8911B7D510059FBE", - "6945A1C1D1B2206B8145548F633BB61CEF04891BAF26ED34", - "B7FB7FDFC339C0B9BD61A9F5A8EAF9BE58FC5CBA2CB15293", - SHA224 - ), - ( - "test", - "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", - "3A718BD8B4926C3B52EE6BBE67EF79B18CB6EB62B1AD97AE", - "5662E6848A4A19B1F1AE2F72ACD4B8BBE50F1EAC65D9124F", - SHA256 - ), - ( - "test", - "5AFEFB5D3393261B828DB6C91FBC68C230727B030C975693", - "B234B60B4DB75A733E19280A7A6034BD6B1EE88AF5332367", - "7994090B2D59BB782BE57E74A44C9A1C700413F8ABEFE77A", - SHA384 - ), - ( - "test", - "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", - "FE4F4AE86A58B6507946715934FE2D8FF9D95B6B098FE739", - "74CF5605C98FBA0E1EF34D4B5A1577A7DCF59457CAE52290", - SHA512 - ) - ) - - signatures_p224_ = ( - ( - "sample", - "7EEFADD91110D8DE6C2C470831387C50D3357F7F4D477054B8B426BC", - "22226F9D40A96E19C4A301CE5B74B115303C0F3A4FD30FC257FB57AC", - "66D1CDD83E3AF75605DD6E2FEFF196D30AA7ED7A2EDF7AF475403D69", - SHA1 - ), - ( - "sample", - "C1D1F2F10881088301880506805FEB4825FE09ACB6816C36991AA06D", - "1CDFE6662DDE1E4A1EC4CDEDF6A1F5A2FB7FBD9145C12113E6ABFD3E", - "A6694FD7718A21053F225D3F46197CA699D45006C06F871808F43EBC", - SHA224 - ), - ( - "sample", - "AD3029E0278F80643DE33917CE6908C70A8FF50A411F06E41DEDFCDC", - "61AA3DA010E8E8406C656BC477A7A7189895E7E840CDFE8FF42307BA", - "BC814050DAB5D23770879494F9E0A680DC1AF7161991BDE692B10101", - SHA256 - ), - ( - "sample", - "52B40F5A9D3D13040F494E83D3906C6079F29981035C7BD51E5CAC40", - "0B115E5E36F0F9EC81F1325A5952878D745E19D7BB3EABFABA77E953", - "830F34CCDFE826CCFDC81EB4129772E20E122348A2BBD889A1B1AF1D", - SHA384 - ), - ( - "sample", - "9DB103FFEDEDF9CFDBA05184F925400C1653B8501BAB89CEA0FBEC14", - "074BD1D979D5F32BF958DDC61E4FB4872ADCAFEB2256497CDAC30397", - "A4CECA196C3D5A1FF31027B33185DC8EE43F288B21AB342E5D8EB084", - SHA512 - ), - ( - "test", - "2519178F82C3F0E4F87ED5883A4E114E5B7A6E374043D8EFD329C253", - "DEAA646EC2AF2EA8AD53ED66B2E2DDAA49A12EFD8356561451F3E21C", - "95987796F6CF2062AB8135271DE56AE55366C045F6D9593F53787BD2", - SHA1 - ), - ( - "test", - "DF8B38D40DCA3E077D0AC520BF56B6D565134D9B5F2EAE0D34900524", - "C441CE8E261DED634E4CF84910E4C5D1D22C5CF3B732BB204DBEF019", - "902F42847A63BDC5F6046ADA114953120F99442D76510150F372A3F4", - SHA224 - ), - ( - "test", - "FF86F57924DA248D6E44E8154EB69F0AE2AEBAEE9931D0B5A969F904", - "AD04DDE87B84747A243A631EA47A1BA6D1FAA059149AD2440DE6FBA6", - "178D49B1AE90E3D8B629BE3DB5683915F4E8C99FDF6E666CF37ADCFD", - SHA256 - ), - ( - "test", - "7046742B839478C1B5BD31DB2E862AD868E1A45C863585B5F22BDC2D", - "389B92682E399B26518A95506B52C03BC9379A9DADF3391A21FB0EA4", - "414A718ED3249FF6DBC5B50C27F71F01F070944DA22AB1F78F559AAB", - SHA384 - ), - ( - "test", - "E39C2AA4EA6BE2306C72126D40ED77BF9739BB4D6EF2BBB1DCB6169D", - "049F050477C5ADD858CAC56208394B5A55BAEBBE887FDF765047C17C", - "077EB13E7005929CEFA3CD0403C7CDCC077ADF4E44F3C41B2F60ECFF", - SHA512 - ) - ) - - signatures_p256_ = ( - ( - "sample", - "882905F1227FD620FBF2ABF21244F0BA83D0DC3A9103DBBEE43A1FB858109DB4", - "61340C88C3AAEBEB4F6D667F672CA9759A6CCAA9FA8811313039EE4A35471D32", - "6D7F147DAC089441BB2E2FE8F7A3FA264B9C475098FDCF6E00D7C996E1B8B7EB", - SHA1 - ), - ( - "sample", - "103F90EE9DC52E5E7FB5132B7033C63066D194321491862059967C715985D473", - "53B2FFF5D1752B2C689DF257C04C40A587FABABB3F6FC2702F1343AF7CA9AA3F", - "B9AFB64FDC03DC1A131C7D2386D11E349F070AA432A4ACC918BEA988BF75C74C", - SHA224 - ), - ( - "sample", - "A6E3C57DD01ABE90086538398355DD4C3B17AA873382B0F24D6129493D8AAD60", - "EFD48B2AACB6A8FD1140DD9CD45E81D69D2C877B56AAF991C34D0EA84EAF3716", - "F7CB1C942D657C41D436C7A1B6E29F65F3E900DBB9AFF4064DC4AB2F843ACDA8", - SHA256 - ), - ( - "sample", - "09F634B188CEFD98E7EC88B1AA9852D734D0BC272F7D2A47DECC6EBEB375AAD4", - "0EAFEA039B20E9B42309FB1D89E213057CBF973DC0CFC8F129EDDDC800EF7719", - "4861F0491E6998B9455193E34E7B0D284DDD7149A74B95B9261F13ABDE940954", - SHA384 - ), - ( - "sample", - "5FA81C63109BADB88C1F367B47DA606DA28CAD69AA22C4FE6AD7DF73A7173AA5", - "8496A60B5E9B47C825488827E0495B0E3FA109EC4568FD3F8D1097678EB97F00", - "2362AB1ADBE2B8ADF9CB9EDAB740EA6049C028114F2460F96554F61FAE3302FE", - SHA512 - ), - ( - "test", - "8C9520267C55D6B980DF741E56B4ADEE114D84FBFA2E62137954164028632A2E", - "0CBCC86FD6ABD1D99E703E1EC50069EE5C0B4BA4B9AC60E409E8EC5910D81A89", - "01B9D7B73DFAA60D5651EC4591A0136F87653E0FD780C3B1BC872FFDEAE479B1", - SHA1 - ), - ( - "test", - "669F4426F2688B8BE0DB3A6BD1989BDAEFFF84B649EEB84F3DD26080F667FAA7", - "C37EDB6F0AE79D47C3C27E962FA269BB4F441770357E114EE511F662EC34A692", - "C820053A05791E521FCAAD6042D40AEA1D6B1A540138558F47D0719800E18F2D", - SHA224 - ), - ( - "test", - "D16B6AE827F17175E040871A1C7EC3500192C4C92677336EC2537ACAEE0008E0", - "F1ABB023518351CD71D881567B1EA663ED3EFCF6C5132B354F28D3B0B7D38367", - "019F4113742A2B14BD25926B49C649155F267E60D3814B4C0CC84250E46F0083", - SHA256 - ), - ( - "test", - "16AEFFA357260B04B1DD199693960740066C1A8F3E8EDD79070AA914D361B3B8", - "83910E8B48BB0C74244EBDF7F07A1C5413D61472BD941EF3920E623FBCCEBEB6", - "8DDBEC54CF8CD5874883841D712142A56A8D0F218F5003CB0296B6B509619F2C", - SHA384 - ), - ( - "test", - "6915D11632ACA3C40D5D51C08DAF9C555933819548784480E93499000D9F0B7F", - "461D93F31B6540894788FD206C07CFA0CC35F46FA3C91816FFF1040AD1581A04", - "39AF9F15DE0DB8D97E72719C74820D304CE5226E32DEDAE67519E840D1194E55", - SHA512 - ) - ) - - signatures_p384_ = ( - ( - "sample", - "4471EF7518BB2C7C20F62EAE1C387AD0C5E8E470995DB4ACF694466E6AB096630F29E5938D25106C3C340045A2DB01A7", - "EC748D839243D6FBEF4FC5C4859A7DFFD7F3ABDDF72014540C16D73309834FA37B9BA002899F6FDA3A4A9386790D4EB2", - "A3BCFA947BEEF4732BF247AC17F71676CB31A847B9FF0CBC9C9ED4C1A5B3FACF26F49CA031D4857570CCB5CA4424A443", - SHA1 - ), - ( - "sample", - "A4E4D2F0E729EB786B31FC20AD5D849E304450E0AE8E3E341134A5C1AFA03CAB8083EE4E3C45B06A5899EA56C51B5879", - "42356E76B55A6D9B4631C865445DBE54E056D3B3431766D0509244793C3F9366450F76EE3DE43F5A125333A6BE060122", - "9DA0C81787064021E78DF658F2FBB0B042BF304665DB721F077A4298B095E4834C082C03D83028EFBF93A3C23940CA8D", - SHA224 - ), - ( - "sample", - "180AE9F9AEC5438A44BC159A1FCB277C7BE54FA20E7CF404B490650A8ACC414E375572342863C899F9F2EDF9747A9B60", - "21B13D1E013C7FA1392D03C5F99AF8B30C570C6F98D4EA8E354B63A21D3DAA33BDE1E888E63355D92FA2B3C36D8FB2CD", - "F3AA443FB107745BF4BD77CB3891674632068A10CA67E3D45DB2266FA7D1FEEBEFDC63ECCD1AC42EC0CB8668A4FA0AB0", - SHA256 - ), - ( - "sample", - "94ED910D1A099DAD3254E9242AE85ABDE4BA15168EAF0CA87A555FD56D10FBCA2907E3E83BA95368623B8C4686915CF9", - "94EDBB92A5ECB8AAD4736E56C691916B3F88140666CE9FA73D64C4EA95AD133C81A648152E44ACF96E36DD1E80FABE46", - "99EF4AEB15F178CEA1FE40DB2603138F130E740A19624526203B6351D0A3A94FA329C145786E679E7B82C71A38628AC8", - SHA384 - ), - ( - "sample", - "92FC3C7183A883E24216D1141F1A8976C5B0DD797DFA597E3D7B32198BD35331A4E966532593A52980D0E3AAA5E10EC3", - "ED0959D5880AB2D869AE7F6C2915C6D60F96507F9CB3E047C0046861DA4A799CFE30F35CC900056D7C99CD7882433709", - "512C8CCEEE3890A84058CE1E22DBC2198F42323CE8ACA9135329F03C068E5112DC7CC3EF3446DEFCEB01A45C2667FDD5", - SHA512 - ), - ( - "test", - "66CC2C8F4D303FC962E5FF6A27BD79F84EC812DDAE58CF5243B64A4AD8094D47EC3727F3A3C186C15054492E30698497", - "4BC35D3A50EF4E30576F58CD96CE6BF638025EE624004A1F7789A8B8E43D0678ACD9D29876DAF46638645F7F404B11C7", - "D5A6326C494ED3FF614703878961C0FDE7B2C278F9A65FD8C4B7186201A2991695BA1C84541327E966FA7B50F7382282", - SHA1 - ), - ( - "test", - "18FA39DB95AA5F561F30FA3591DC59C0FA3653A80DAFFA0B48D1A4C6DFCBFF6E3D33BE4DC5EB8886A8ECD093F2935726", - "E8C9D0B6EA72A0E7837FEA1D14A1A9557F29FAA45D3E7EE888FC5BF954B5E62464A9A817C47FF78B8C11066B24080E72", - "07041D4A7A0379AC7232FF72E6F77B6DDB8F09B16CCE0EC3286B2BD43FA8C6141C53EA5ABEF0D8231077A04540A96B66", - SHA224 - ), - ( - "test", - "0CFAC37587532347DC3389FDC98286BBA8C73807285B184C83E62E26C401C0FAA48DD070BA79921A3457ABFF2D630AD7", - "6D6DEFAC9AB64DABAFE36C6BF510352A4CC27001263638E5B16D9BB51D451559F918EEDAF2293BE5B475CC8F0188636B", - "2D46F3BECBCC523D5F1A1256BF0C9B024D879BA9E838144C8BA6BAEB4B53B47D51AB373F9845C0514EEFB14024787265", - SHA256 - ), - ( - "test", - "015EE46A5BF88773ED9123A5AB0807962D193719503C527B031B4C2D225092ADA71F4A459BC0DA98ADB95837DB8312EA", - "8203B63D3C853E8D77227FB377BCF7B7B772E97892A80F36AB775D509D7A5FEB0542A7F0812998DA8F1DD3CA3CF023DB", - "DDD0760448D42D8A43AF45AF836FCE4DE8BE06B485E9B61B827C2F13173923E06A739F040649A667BF3B828246BAA5A5", - SHA384 - ), - ( - "test", - "3780C4F67CB15518B6ACAE34C9F83568D2E12E47DEAB6C50A4E4EE5319D1E8CE0E2CC8A136036DC4B9C00E6888F66B6C", - "A0D5D090C9980FAF3C2CE57B7AE951D31977DD11C775D314AF55F76C676447D06FB6495CD21B4B6E340FC236584FB277", - "976984E59B4C77B0E8E4460DCA3D9F20E07B9BB1F63BEEFAF576F6B2E8B224634A2092CD3792E0159AD9CEE37659C736", - SHA512 - ), - ) - - signatures_p521_ = ( - ( - "sample", - "0089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", - "00343B6EC45728975EA5CBA6659BBB6062A5FF89EEA58BE3C80B619F322C87910FE092F7D45BB0F8EEE01ED3F20BABEC079D202AE677B243AB40B5431D497C55D75D", - "00E7B0E675A9B24413D448B8CC119D2BF7B2D2DF032741C096634D6D65D0DBE3D5694625FB9E8104D3B842C1B0E2D0B98BEA19341E8676AEF66AE4EBA3D5475D5D16", - SHA1 - ), - ( - "sample", - "0121415EC2CD7726330A61F7F3FA5DE14BE9436019C4DB8CB4041F3B54CF31BE0493EE3F427FB906393D895A19C9523F3A1D54BB8702BD4AA9C99DAB2597B92113F3", - "01776331CFCDF927D666E032E00CF776187BC9FDD8E69D0DABB4109FFE1B5E2A30715F4CC923A4A5E94D2503E9ACFED92857B7F31D7152E0F8C00C15FF3D87E2ED2E", - "0050CB5265417FE2320BBB5A122B8E1A32BD699089851128E360E620A30C7E17BA41A666AF126CE100E5799B153B60528D5300D08489CA9178FB610A2006C254B41F", - SHA224 - ), - ( - "sample", - "00EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", - "01511BB4D675114FE266FC4372B87682BAECC01D3CC62CF2303C92B3526012659D16876E25C7C1E57648F23B73564D67F61C6F14D527D54972810421E7D87589E1A7", - "004A171143A83163D6DF460AAF61522695F207A58B95C0644D87E52AA1A347916E4F7A72930B1BC06DBE22CE3F58264AFD23704CBB63B29B931F7DE6C9D949A7ECFC", - SHA256 - ), - ( - "sample", - "01546A108BC23A15D6F21872F7DED661FA8431DDBD922D0DCDB77CC878C8553FFAD064C95A920A750AC9137E527390D2D92F153E66196966EA554D9ADFCB109C4211", - "01EA842A0E17D2DE4F92C15315C63DDF72685C18195C2BB95E572B9C5136CA4B4B576AD712A52BE9730627D16054BA40CC0B8D3FF035B12AE75168397F5D50C67451", - "01F21A3CEE066E1961025FB048BD5FE2B7924D0CD797BABE0A83B66F1E35EEAF5FDE143FA85DC394A7DEE766523393784484BDF3E00114A1C857CDE1AA203DB65D61", - SHA384 - ), - ( - "sample", - "01DAE2EA071F8110DC26882D4D5EAE0621A3256FC8847FB9022E2B7D28E6F10198B1574FDD03A9053C08A1854A168AA5A57470EC97DD5CE090124EF52A2F7ECBFFD3", - "00C328FAFCBD79DD77850370C46325D987CB525569FB63C5D3BC53950E6D4C5F174E25A1EE9017B5D450606ADD152B534931D7D4E8455CC91F9B15BF05EC36E377FA", - "00617CCE7CF5064806C467F678D3B4080D6F1CC50AF26CA209417308281B68AF282623EAA63E5B5C0723D8B8C37FF0777B1A20F8CCB1DCCC43997F1EE0E44DA4A67A", - SHA512 - ), - ( - "test", - "00BB9F2BF4FE1038CCF4DABD7139A56F6FD8BB1386561BD3C6A4FC818B20DF5DDBA80795A947107A1AB9D12DAA615B1ADE4F7A9DC05E8E6311150F47F5C57CE8B222", - "013BAD9F29ABE20DE37EBEB823C252CA0F63361284015A3BF430A46AAA80B87B0693F0694BD88AFE4E661FC33B094CD3B7963BED5A727ED8BD6A3A202ABE009D0367", - "01E9BB81FF7944CA409AD138DBBEE228E1AFCC0C890FC78EC8604639CB0DBDC90F717A99EAD9D272855D00162EE9527567DD6A92CBD629805C0445282BBC916797FF", - SHA1 - ), - ( - "test", - "0040D09FCF3C8A5F62CF4FB223CBBB2B9937F6B0577C27020A99602C25A01136987E452988781484EDBBCF1C47E554E7FC901BC3085E5206D9F619CFF07E73D6F706", - "01C7ED902E123E6815546065A2C4AF977B22AA8EADDB68B2C1110E7EA44D42086BFE4A34B67DDC0E17E96536E358219B23A706C6A6E16BA77B65E1C595D43CAE17FB", - "0177336676304FCB343CE028B38E7B4FBA76C1C1B277DA18CAD2A8478B2A9A9F5BEC0F3BA04F35DB3E4263569EC6AADE8C92746E4C82F8299AE1B8F1739F8FD519A4", - SHA224 - ), - ( - "test", - "001DE74955EFAABC4C4F17F8E84D881D1310B5392D7700275F82F145C61E843841AF09035BF7A6210F5A431A6A9E81C9323354A9E69135D44EBD2FCAA7731B909258", - "000E871C4A14F993C6C7369501900C4BC1E9C7B0B4BA44E04868B30B41D8071042EB28C4C250411D0CE08CD197E4188EA4876F279F90B3D8D74A3C76E6F1E4656AA8", - "00CD52DBAA33B063C3A6CD8058A1FB0A46A4754B034FCC644766CA14DA8CA5CA9FDE00E88C1AD60CCBA759025299079D7A427EC3CC5B619BFBC828E7769BCD694E86", - SHA256 - ), - ( - "test", - "01F1FC4A349A7DA9A9E116BFDD055DC08E78252FF8E23AC276AC88B1770AE0B5DCEB1ED14A4916B769A523CE1E90BA22846AF11DF8B300C38818F713DADD85DE0C88", - "014BEE21A18B6D8B3C93FAB08D43E739707953244FDBE924FA926D76669E7AC8C89DF62ED8975C2D8397A65A49DCC09F6B0AC62272741924D479354D74FF6075578C", - "0133330865C067A0EAF72362A65E2D7BC4E461E8C8995C3B6226A21BD1AA78F0ED94FE536A0DCA35534F0CD1510C41525D163FE9D74D134881E35141ED5E8E95B979", - SHA384 - ), - ( - "test", - "016200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", - "013E99020ABF5CEE7525D16B69B229652AB6BDF2AFFCAEF38773B4B7D08725F10CDB93482FDCC54EDCEE91ECA4166B2A7C6265EF0CE2BD7051B7CEF945BABD47EE6D", - "01FBD0013C674AA79CB39849527916CE301C66EA7CE8B80682786AD60F98F7E78A19CA69EFF5C57400E3B3A0AD66CE0978214D13BAF4E9AC60752F7B155E2DE4DCE3", - SHA512 - ), - ) - - signatures_p192 = [] - for a, b, c, d, e in signatures_p192_: - new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) - signatures_p192.append(new_tv) - - signatures_p224 = [] - for a, b, c, d, e in signatures_p224_: - new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) - signatures_p224.append(new_tv) - - signatures_p256 = [] - for a, b, c, d, e in signatures_p256_: - new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) - signatures_p256.append(new_tv) - - signatures_p384 = [] - for a, b, c, d, e in signatures_p384_: - new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) - signatures_p384.append(new_tv) - - signatures_p521 = [] - for a, b, c, d, e in signatures_p521_: - new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) - signatures_p521.append(new_tv) - - def shortDescription(self): - return "Deterministic ECDSA Tests" - - def test_loopback_p192(self): - hashed_msg = SHA512.new(b"test") - signer = DSS.new(self.key_priv_p192, 'deterministic-rfc6979') - signature = signer.sign(hashed_msg) - - verifier = DSS.new(self.key_pub_p192, 'deterministic-rfc6979') - verifier.verify(hashed_msg, signature) - - def test_loopback_p224(self): - hashed_msg = SHA512.new(b"test") - signer = DSS.new(self.key_priv_p224, 'deterministic-rfc6979') - signature = signer.sign(hashed_msg) - - verifier = DSS.new(self.key_pub_p224, 'deterministic-rfc6979') - verifier.verify(hashed_msg, signature) - - def test_loopback_p256(self): - hashed_msg = SHA512.new(b"test") - signer = DSS.new(self.key_priv_p256, 'deterministic-rfc6979') - signature = signer.sign(hashed_msg) - - verifier = DSS.new(self.key_pub_p256, 'deterministic-rfc6979') - verifier.verify(hashed_msg, signature) - - def test_loopback_p384(self): - hashed_msg = SHA512.new(b"test") - signer = DSS.new(self.key_priv_p384, 'deterministic-rfc6979') - signature = signer.sign(hashed_msg) - - verifier = DSS.new(self.key_pub_p384, 'deterministic-rfc6979') - verifier.verify(hashed_msg, signature) - - def test_loopback_p521(self): - hashed_msg = SHA512.new(b"test") - signer = DSS.new(self.key_priv_p521, 'deterministic-rfc6979') - signature = signer.sign(hashed_msg) - - verifier = DSS.new(self.key_pub_p521, 'deterministic-rfc6979') - verifier.verify(hashed_msg, signature) - - def test_data_rfc6979_p192(self): - signer = DSS.new(self.key_priv_p192, 'deterministic-rfc6979') - for message, k, r, s, module in self.signatures_p192: - hash_obj = module.new(message) - result = signer.sign(hash_obj) - self.assertEqual(r + s, result) - - def test_data_rfc6979_p224(self): - signer = DSS.new(self.key_priv_p224, 'deterministic-rfc6979') - for message, k, r, s, module in self.signatures_p224: - hash_obj = module.new(message) - result = signer.sign(hash_obj) - self.assertEqual(r + s, result) - - def test_data_rfc6979_p256(self): - signer = DSS.new(self.key_priv_p256, 'deterministic-rfc6979') - for message, k, r, s, module in self.signatures_p256: - hash_obj = module.new(message) - result = signer.sign(hash_obj) - self.assertEqual(r + s, result) - - def test_data_rfc6979_p384(self): - signer = DSS.new(self.key_priv_p384, 'deterministic-rfc6979') - for message, k, r, s, module in self.signatures_p384: - hash_obj = module.new(message) - result = signer.sign(hash_obj) - self.assertEqual(r + s, result) - - def test_data_rfc6979_p521(self): - signer = DSS.new(self.key_priv_p521, 'deterministic-rfc6979') - for message, k, r, s, module in self.signatures_p521: - hash_obj = module.new(message) - result = signer.sign(hash_obj) - self.assertEqual(r + s, result) - - -def get_hash_module(hash_name): - if hash_name == "SHA-512": - hash_module = SHA512 - elif hash_name == "SHA-512/224": - hash_module = SHA512.new(truncate="224") - elif hash_name == "SHA-512/256": - hash_module = SHA512.new(truncate="256") - elif hash_name == "SHA-384": - hash_module = SHA384 - elif hash_name == "SHA-256": - hash_module = SHA256 - elif hash_name == "SHA-224": - hash_module = SHA224 - elif hash_name == "SHA-1": - hash_module = SHA1 - elif hash_name == "SHA3-224": - hash_module = SHA3_224 - elif hash_name == "SHA3-256": - hash_module = SHA3_256 - elif hash_name == "SHA3-384": - hash_module = SHA3_384 - elif hash_name == "SHA3-512": - hash_module = SHA3_512 - else: - raise ValueError("Unknown hash algorithm: " + hash_name) - return hash_module - - -class TestVectorsDSAWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings, slow_tests): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._slow_tests = slow_tests - self._id = "None" - self.tv = [] - - def setUp(self): - - def filter_dsa(group): - return DSA.import_key(group['keyPem']) - - def filter_sha(group): - return get_hash_module(group['sha']) - - def filter_type(group): - sig_type = group['type'] - if sig_type != 'DsaVerify': - raise ValueError("Unknown signature type " + sig_type) - return sig_type - - result = load_test_vectors_wycheproof(("Signature", "wycheproof"), - "dsa_test.json", - "Wycheproof DSA signature", - group_tag={'key': filter_dsa, - 'hash_module': filter_sha, - 'sig_type': filter_type}) - self.tv += result - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_verify(self, tv): - self._id = "Wycheproof DSA Test #" + str(tv.id) - - hashed_msg = tv.hash_module.new(tv.msg) - signer = DSS.new(tv.key, 'fips-186-3', encoding='der') - try: - signature = signer.verify(hashed_msg, tv.sig) - except ValueError as e: - if tv.warning: - return - assert not tv.valid - else: - assert tv.valid - self.warn(tv) - - def runTest(self): - for tv in self.tv: - self.test_verify(tv) - - -class TestVectorsECDSAWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings, slow_tests): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._slow_tests = slow_tests - self._id = "None" - - def add_tests(self, filename): - - def filter_ecc(group): - # These are the only curves we accept to skip - if group['key']['curve'] in ('secp224k1', 'secp256k1', - 'brainpoolP224r1', 'brainpoolP224t1', - 'brainpoolP256r1', 'brainpoolP256t1', - 'brainpoolP320r1', 'brainpoolP320t1', - 'brainpoolP384r1', 'brainpoolP384t1', - 'brainpoolP512r1', 'brainpoolP512t1', - ): - return None - return ECC.import_key(group['keyPem']) - - def filter_sha(group): - return get_hash_module(group['sha']) - - def filter_encoding(group): - encoding_name = group['type'] - if encoding_name == "EcdsaVerify": - return "der" - elif encoding_name == "EcdsaP1363Verify": - return "binary" - else: - raise ValueError("Unknown signature type " + encoding_name) - - result = load_test_vectors_wycheproof(("Signature", "wycheproof"), - filename, - "Wycheproof ECDSA signature (%s)" % filename, - group_tag={'key': filter_ecc, - 'hash_module': filter_sha, - 'encoding': filter_encoding, - }) - self.tv += result - - def setUp(self): - self.tv = [] - self.add_tests("ecdsa_secp224r1_sha224_p1363_test.json") - self.add_tests("ecdsa_secp224r1_sha224_test.json") - if self._slow_tests: - self.add_tests("ecdsa_secp224r1_sha256_p1363_test.json") - self.add_tests("ecdsa_secp224r1_sha256_test.json") - self.add_tests("ecdsa_secp224r1_sha3_224_test.json") - self.add_tests("ecdsa_secp224r1_sha3_256_test.json") - self.add_tests("ecdsa_secp224r1_sha3_512_test.json") - self.add_tests("ecdsa_secp224r1_sha512_p1363_test.json") - self.add_tests("ecdsa_secp224r1_sha512_test.json") - self.add_tests("ecdsa_secp256r1_sha256_p1363_test.json") - self.add_tests("ecdsa_secp256r1_sha256_test.json") - self.add_tests("ecdsa_secp256r1_sha3_256_test.json") - self.add_tests("ecdsa_secp256r1_sha3_512_test.json") - self.add_tests("ecdsa_secp256r1_sha512_p1363_test.json") - self.add_tests("ecdsa_secp256r1_sha512_test.json") - if self._slow_tests: - self.add_tests("ecdsa_secp384r1_sha3_384_test.json") - self.add_tests("ecdsa_secp384r1_sha3_512_test.json") - self.add_tests("ecdsa_secp384r1_sha384_p1363_test.json") - self.add_tests("ecdsa_secp384r1_sha384_test.json") - self.add_tests("ecdsa_secp384r1_sha512_p1363_test.json") - self.add_tests("ecdsa_secp384r1_sha512_test.json") - if self._slow_tests: - self.add_tests("ecdsa_secp521r1_sha3_512_test.json") - self.add_tests("ecdsa_secp521r1_sha512_p1363_test.json") - self.add_tests("ecdsa_secp521r1_sha512_test.json") - self.add_tests("ecdsa_test.json") - self.add_tests("ecdsa_webcrypto_test.json") - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_verify(self, tv): - self._id = "Wycheproof ECDSA Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) - - # Skip tests with unsupported curves - if tv.key is None: - return - - hashed_msg = tv.hash_module.new(tv.msg) - signer = DSS.new(tv.key, 'fips-186-3', encoding=tv.encoding) - try: - signature = signer.verify(hashed_msg, tv.sig) - except ValueError as e: - if tv.warning: - return - if tv.comment == "k*G has a large x-coordinate": - return - assert not tv.valid - else: - assert tv.valid - self.warn(tv) - - def runTest(self): - for tv in self.tv: - self.test_verify(tv) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(FIPS_DSA_Tests) - tests += list_test_cases(FIPS_ECDSA_Tests) - tests += list_test_cases(Det_DSA_Tests) - tests += list_test_cases(Det_ECDSA_Tests) - - slow_tests = config.get('slow_tests') - if slow_tests: - tests += list_test_cases(FIPS_DSA_Tests_KAT) - tests += list_test_cases(FIPS_ECDSA_Tests_KAT) - - tests += [TestVectorsDSAWycheproof(wycheproof_warnings, slow_tests)] - tests += [TestVectorsECDSAWycheproof(wycheproof_warnings, slow_tests)] - - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_eddsa.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_eddsa.py deleted file mode 100644 index 3215bbc..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_eddsa.py +++ /dev/null @@ -1,604 +0,0 @@ -# -# Copyright (c) 2022, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify - -from Cryptodome.PublicKey import ECC -from Cryptodome.Signature import eddsa -from Cryptodome.Hash import SHA512, SHAKE256 -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors_wycheproof -from Cryptodome.Util.number import bytes_to_long - -rfc8032_tv_str = ( - # 7.1 Ed25519 - ( - "9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60", - "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a", - "", - None, - "", - "e5564300c360ac729086e2cc806e828a" - "84877f1eb8e5d974d873e06522490155" - "5fb8821590a33bacc61e39701cf9b46b" - "d25bf5f0595bbe24655141438e7a100b" - ), - ( - "4ccd089b28ff96da9db6c346ec114e0f5b8a319f35aba624da8cf6ed4fb8a6fb", - "3d4017c3e843895a92b70aa74d1b7ebc9c982ccf2ec4968cc0cd55f12af4660c", - "72", - None, - "", - "92a009a9f0d4cab8720e820b5f642540" - "a2b27b5416503f8fb3762223ebdb69da" - "085ac1e43e15996e458f3613d0f11d8c" - "387b2eaeb4302aeeb00d291612bb0c00" - ), - ( - "c5aa8df43f9f837bedb7442f31dcb7b166d38535076f094b85ce3a2e0b4458f7", - "fc51cd8e6218a1a38da47ed00230f0580816ed13ba3303ac5deb911548908025", - "af82", - None, - "", - "6291d657deec24024827e69c3abe01a3" - "0ce548a284743a445e3680d7db5ac3ac" - "18ff9b538d16f290ae67f760984dc659" - "4a7c15e9716ed28dc027beceea1ec40a" - ), - ( - "f5e5767cf153319517630f226876b86c8160cc583bc013744c6bf255f5cc0ee5", - "278117fc144c72340f67d0f2316e8386ceffbf2b2428c9c51fef7c597f1d426e", - "08b8b2b733424243760fe426a4b54908" - "632110a66c2f6591eabd3345e3e4eb98" - "fa6e264bf09efe12ee50f8f54e9f77b1" - "e355f6c50544e23fb1433ddf73be84d8" - "79de7c0046dc4996d9e773f4bc9efe57" - "38829adb26c81b37c93a1b270b20329d" - "658675fc6ea534e0810a4432826bf58c" - "941efb65d57a338bbd2e26640f89ffbc" - "1a858efcb8550ee3a5e1998bd177e93a" - "7363c344fe6b199ee5d02e82d522c4fe" - "ba15452f80288a821a579116ec6dad2b" - "3b310da903401aa62100ab5d1a36553e" - "06203b33890cc9b832f79ef80560ccb9" - "a39ce767967ed628c6ad573cb116dbef" - "efd75499da96bd68a8a97b928a8bbc10" - "3b6621fcde2beca1231d206be6cd9ec7" - "aff6f6c94fcd7204ed3455c68c83f4a4" - "1da4af2b74ef5c53f1d8ac70bdcb7ed1" - "85ce81bd84359d44254d95629e9855a9" - "4a7c1958d1f8ada5d0532ed8a5aa3fb2" - "d17ba70eb6248e594e1a2297acbbb39d" - "502f1a8c6eb6f1ce22b3de1a1f40cc24" - "554119a831a9aad6079cad88425de6bd" - "e1a9187ebb6092cf67bf2b13fd65f270" - "88d78b7e883c8759d2c4f5c65adb7553" - "878ad575f9fad878e80a0c9ba63bcbcc" - "2732e69485bbc9c90bfbd62481d9089b" - "eccf80cfe2df16a2cf65bd92dd597b07" - "07e0917af48bbb75fed413d238f5555a" - "7a569d80c3414a8d0859dc65a46128ba" - "b27af87a71314f318c782b23ebfe808b" - "82b0ce26401d2e22f04d83d1255dc51a" - "ddd3b75a2b1ae0784504df543af8969b" - "e3ea7082ff7fc9888c144da2af58429e" - "c96031dbcad3dad9af0dcbaaaf268cb8" - "fcffead94f3c7ca495e056a9b47acdb7" - "51fb73e666c6c655ade8297297d07ad1" - "ba5e43f1bca32301651339e22904cc8c" - "42f58c30c04aafdb038dda0847dd988d" - "cda6f3bfd15c4b4c4525004aa06eeff8" - "ca61783aacec57fb3d1f92b0fe2fd1a8" - "5f6724517b65e614ad6808d6f6ee34df" - "f7310fdc82aebfd904b01e1dc54b2927" - "094b2db68d6f903b68401adebf5a7e08" - "d78ff4ef5d63653a65040cf9bfd4aca7" - "984a74d37145986780fc0b16ac451649" - "de6188a7dbdf191f64b5fc5e2ab47b57" - "f7f7276cd419c17a3ca8e1b939ae49e4" - "88acba6b965610b5480109c8b17b80e1" - "b7b750dfc7598d5d5011fd2dcc5600a3" - "2ef5b52a1ecc820e308aa342721aac09" - "43bf6686b64b2579376504ccc493d97e" - "6aed3fb0f9cd71a43dd497f01f17c0e2" - "cb3797aa2a2f256656168e6c496afc5f" - "b93246f6b1116398a346f1a641f3b041" - "e989f7914f90cc2c7fff357876e506b5" - "0d334ba77c225bc307ba537152f3f161" - "0e4eafe595f6d9d90d11faa933a15ef1" - "369546868a7f3a45a96768d40fd9d034" - "12c091c6315cf4fde7cb68606937380d" - "b2eaaa707b4c4185c32eddcdd306705e" - "4dc1ffc872eeee475a64dfac86aba41c" - "0618983f8741c5ef68d3a101e8a3b8ca" - "c60c905c15fc910840b94c00a0b9d0", - None, - "", - "0aab4c900501b3e24d7cdf4663326a3a" - "87df5e4843b2cbdb67cbf6e460fec350" - "aa5371b1508f9f4528ecea23c436d94b" - "5e8fcd4f681e30a6ac00a9704a188a03" - ), - # 7.2 Ed25519ctx - ( - "0305334e381af78f141cb666f6199f57" - "bc3495335a256a95bd2a55bf546663f6", - "dfc9425e4f968f7f0c29f0259cf5f9ae" - "d6851c2bb4ad8bfb860cfee0ab248292", - "f726936d19c800494e3fdaff20b276a8", - None, - "666f6f", - "55a4cc2f70a54e04288c5f4cd1e45a7b" - "b520b36292911876cada7323198dd87a" - "8b36950b95130022907a7fb7c4e9b2d5" - "f6cca685a587b4b21f4b888e4e7edb0d" - ), - ( - "0305334e381af78f141cb666f6199f57" - "bc3495335a256a95bd2a55bf546663f6", - "dfc9425e4f968f7f0c29f0259cf5f9ae" - "d6851c2bb4ad8bfb860cfee0ab248292", - "f726936d19c800494e3fdaff20b276a8", - None, - "626172", - "fc60d5872fc46b3aa69f8b5b4351d580" - "8f92bcc044606db097abab6dbcb1aee3" - "216c48e8b3b66431b5b186d1d28f8ee1" - "5a5ca2df6668346291c2043d4eb3e90d" - ), - ( - "0305334e381af78f141cb666f6199f57" - "bc3495335a256a95bd2a55bf546663f6", - "dfc9425e4f968f7f0c29f0259cf5f9ae" - "d6851c2bb4ad8bfb860cfee0ab248292", - "508e9e6882b979fea900f62adceaca35", - None, - "666f6f", - "8b70c1cc8310e1de20ac53ce28ae6e72" - "07f33c3295e03bb5c0732a1d20dc6490" - "8922a8b052cf99b7c4fe107a5abb5b2c" - "4085ae75890d02df26269d8945f84b0b" - ), - ( - "ab9c2853ce297ddab85c993b3ae14bca" - "d39b2c682beabc27d6d4eb20711d6560", - "0f1d1274943b91415889152e893d80e9" - "3275a1fc0b65fd71b4b0dda10ad7d772", - "f726936d19c800494e3fdaff20b276a8", - None, - "666f6f", - "21655b5f1aa965996b3f97b3c849eafb" - "a922a0a62992f73b3d1b73106a84ad85" - "e9b86a7b6005ea868337ff2d20a7f5fb" - "d4cd10b0be49a68da2b2e0dc0ad8960f" - ), - # 7.3 Ed25519ph - ( - "833fe62409237b9d62ec77587520911e" - "9a759cec1d19755b7da901b96dca3d42", - "ec172b93ad5e563bf4932c70e1245034" - "c35467ef2efd4d64ebf819683467e2bf", - "616263", - SHA512, - "", - "98a70222f0b8121aa9d30f813d683f80" - "9e462b469c7ff87639499bb94e6dae41" - "31f85042463c2a355a2003d062adf5aa" - "a10b8c61e636062aaad11c2a26083406" - ), - # 7.4 Ed448 - ( - "6c82a562cb808d10d632be89c8513ebf6c929f34ddfa8c9f63c9960ef6e348a3" - "528c8a3fcc2f044e39a3fc5b94492f8f032e7549a20098f95b", - "5fd7449b59b461fd2ce787ec616ad46a1da1342485a70e1f8a0ea75d80e96778" - "edf124769b46c7061bd6783df1e50f6cd1fa1abeafe8256180", - "", - None, - "", - "533a37f6bbe457251f023c0d88f976ae2dfb504a843e34d2074fd823d41a591f" - "2b233f034f628281f2fd7a22ddd47d7828c59bd0a21bfd3980ff0d2028d4b18a" - "9df63e006c5d1c2d345b925d8dc00b4104852db99ac5c7cdda8530a113a0f4db" - "b61149f05a7363268c71d95808ff2e652600" - ), - ( - "c4eab05d357007c632f3dbb48489924d552b08fe0c353a0d4a1f00acda2c463a" - "fbea67c5e8d2877c5e3bc397a659949ef8021e954e0a12274e", - "43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c6798c086" - "6aea01eb00742802b8438ea4cb82169c235160627b4c3a9480", - "03", - None, - "", - "26b8f91727bd62897af15e41eb43c377efb9c610d48f2335cb0bd0087810f435" - "2541b143c4b981b7e18f62de8ccdf633fc1bf037ab7cd779805e0dbcc0aae1cb" - "cee1afb2e027df36bc04dcecbf154336c19f0af7e0a6472905e799f1953d2a0f" - "f3348ab21aa4adafd1d234441cf807c03a00", - ), - ( - "c4eab05d357007c632f3dbb48489924d552b08fe0c353a0d4a1f00acda2c463a" - "fbea67c5e8d2877c5e3bc397a659949ef8021e954e0a12274e", - "43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c6798c086" - "6aea01eb00742802b8438ea4cb82169c235160627b4c3a9480", - "03", - None, - "666f6f", - "d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b32a89f7d2" - "151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea000c85741de5c8da" - "1144a6a1aba7f96de42505d7a7298524fda538fccbbb754f578c1cad10d54d0d" - "5428407e85dcbc98a49155c13764e66c3c00", - ), - ( - "cd23d24f714274e744343237b93290f511f6425f98e64459ff203e8985083ffd" - "f60500553abc0e05cd02184bdb89c4ccd67e187951267eb328", - "dcea9e78f35a1bf3499a831b10b86c90aac01cd84b67a0109b55a36e9328b1e3" - "65fce161d71ce7131a543ea4cb5f7e9f1d8b00696447001400", - "0c3e544074ec63b0265e0c", - None, - "", - "1f0a8888ce25e8d458a21130879b840a9089d999aaba039eaf3e3afa090a09d3" - "89dba82c4ff2ae8ac5cdfb7c55e94d5d961a29fe0109941e00b8dbdeea6d3b05" - "1068df7254c0cdc129cbe62db2dc957dbb47b51fd3f213fb8698f064774250a5" - "028961c9bf8ffd973fe5d5c206492b140e00", - ), - ( - "258cdd4ada32ed9c9ff54e63756ae582fb8fab2ac721f2c8e676a72768513d93" - "9f63dddb55609133f29adf86ec9929dccb52c1c5fd2ff7e21b", - "3ba16da0c6f2cc1f30187740756f5e798d6bc5fc015d7c63cc9510ee3fd44adc" - "24d8e968b6e46e6f94d19b945361726bd75e149ef09817f580", - "64a65f3cdedcdd66811e2915", - None, - "", - "7eeeab7c4e50fb799b418ee5e3197ff6bf15d43a14c34389b59dd1a7b1b85b4a" - "e90438aca634bea45e3a2695f1270f07fdcdf7c62b8efeaf00b45c2c96ba457e" - "b1a8bf075a3db28e5c24f6b923ed4ad747c3c9e03c7079efb87cb110d3a99861" - "e72003cbae6d6b8b827e4e6c143064ff3c00", - ), - ( - "7ef4e84544236752fbb56b8f31a23a10e42814f5f55ca037cdcc11c64c9a3b29" - "49c1bb60700314611732a6c2fea98eebc0266a11a93970100e", - "b3da079b0aa493a5772029f0467baebee5a8112d9d3a22532361da294f7bb381" - "5c5dc59e176b4d9f381ca0938e13c6c07b174be65dfa578e80", - "64a65f3cdedcdd66811e2915e7", - None, - "", - "6a12066f55331b6c22acd5d5bfc5d71228fbda80ae8dec26bdd306743c5027cb" - "4890810c162c027468675ecf645a83176c0d7323a2ccde2d80efe5a1268e8aca" - "1d6fbc194d3f77c44986eb4ab4177919ad8bec33eb47bbb5fc6e28196fd1caf5" - "6b4e7e0ba5519234d047155ac727a1053100", - ), - ( - "d65df341ad13e008567688baedda8e9dcdc17dc024974ea5b4227b6530e339bf" - "f21f99e68ca6968f3cca6dfe0fb9f4fab4fa135d5542ea3f01", - "df9705f58edbab802c7f8363cfe5560ab1c6132c20a9f1dd163483a26f8ac53a" - "39d6808bf4a1dfbd261b099bb03b3fb50906cb28bd8a081f00", - "bd0f6a3747cd561bdddf4640a332461a4a30a12a434cd0bf40d766d9c6d458e5" - "512204a30c17d1f50b5079631f64eb3112182da3005835461113718d1a5ef944", - None, - "", - "554bc2480860b49eab8532d2a533b7d578ef473eeb58c98bb2d0e1ce488a98b1" - "8dfde9b9b90775e67f47d4a1c3482058efc9f40d2ca033a0801b63d45b3b722e" - "f552bad3b4ccb667da350192b61c508cf7b6b5adadc2c8d9a446ef003fb05cba" - "5f30e88e36ec2703b349ca229c2670833900", - ), - ( - "2ec5fe3c17045abdb136a5e6a913e32ab75ae68b53d2fc149b77e504132d3756" - "9b7e766ba74a19bd6162343a21c8590aa9cebca9014c636df5", - "79756f014dcfe2079f5dd9e718be4171e2ef2486a08f25186f6bff43a9936b9b" - "fe12402b08ae65798a3d81e22e9ec80e7690862ef3d4ed3a00", - "15777532b0bdd0d1389f636c5f6b9ba734c90af572877e2d272dd078aa1e567c" - "fa80e12928bb542330e8409f3174504107ecd5efac61ae7504dabe2a602ede89" - "e5cca6257a7c77e27a702b3ae39fc769fc54f2395ae6a1178cab4738e543072f" - "c1c177fe71e92e25bf03e4ecb72f47b64d0465aaea4c7fad372536c8ba516a60" - "39c3c2a39f0e4d832be432dfa9a706a6e5c7e19f397964ca4258002f7c0541b5" - "90316dbc5622b6b2a6fe7a4abffd96105eca76ea7b98816af0748c10df048ce0" - "12d901015a51f189f3888145c03650aa23ce894c3bd889e030d565071c59f409" - "a9981b51878fd6fc110624dcbcde0bf7a69ccce38fabdf86f3bef6044819de11", - None, - "", - "c650ddbb0601c19ca11439e1640dd931f43c518ea5bea70d3dcde5f4191fe53f" - "00cf966546b72bcc7d58be2b9badef28743954e3a44a23f880e8d4f1cfce2d7a" - "61452d26da05896f0a50da66a239a8a188b6d825b3305ad77b73fbac0836ecc6" - "0987fd08527c1a8e80d5823e65cafe2a3d00", - ), - ( - "872d093780f5d3730df7c212664b37b8a0f24f56810daa8382cd4fa3f77634ec" - "44dc54f1c2ed9bea86fafb7632d8be199ea165f5ad55dd9ce8", - "a81b2e8a70a5ac94ffdbcc9badfc3feb0801f258578bb114ad44ece1ec0e799d" - "a08effb81c5d685c0c56f64eecaef8cdf11cc38737838cf400", - "6ddf802e1aae4986935f7f981ba3f0351d6273c0a0c22c9c0e8339168e675412" - "a3debfaf435ed651558007db4384b650fcc07e3b586a27a4f7a00ac8a6fec2cd" - "86ae4bf1570c41e6a40c931db27b2faa15a8cedd52cff7362c4e6e23daec0fbc" - "3a79b6806e316efcc7b68119bf46bc76a26067a53f296dafdbdc11c77f7777e9" - "72660cf4b6a9b369a6665f02e0cc9b6edfad136b4fabe723d2813db3136cfde9" - "b6d044322fee2947952e031b73ab5c603349b307bdc27bc6cb8b8bbd7bd32321" - "9b8033a581b59eadebb09b3c4f3d2277d4f0343624acc817804728b25ab79717" - "2b4c5c21a22f9c7839d64300232eb66e53f31c723fa37fe387c7d3e50bdf9813" - "a30e5bb12cf4cd930c40cfb4e1fc622592a49588794494d56d24ea4b40c89fc0" - "596cc9ebb961c8cb10adde976a5d602b1c3f85b9b9a001ed3c6a4d3b1437f520" - "96cd1956d042a597d561a596ecd3d1735a8d570ea0ec27225a2c4aaff26306d1" - "526c1af3ca6d9cf5a2c98f47e1c46db9a33234cfd4d81f2c98538a09ebe76998" - "d0d8fd25997c7d255c6d66ece6fa56f11144950f027795e653008f4bd7ca2dee" - "85d8e90f3dc315130ce2a00375a318c7c3d97be2c8ce5b6db41a6254ff264fa6" - "155baee3b0773c0f497c573f19bb4f4240281f0b1f4f7be857a4e59d416c06b4" - "c50fa09e1810ddc6b1467baeac5a3668d11b6ecaa901440016f389f80acc4db9" - "77025e7f5924388c7e340a732e554440e76570f8dd71b7d640b3450d1fd5f041" - "0a18f9a3494f707c717b79b4bf75c98400b096b21653b5d217cf3565c9597456" - "f70703497a078763829bc01bb1cbc8fa04eadc9a6e3f6699587a9e75c94e5bab" - "0036e0b2e711392cff0047d0d6b05bd2a588bc109718954259f1d86678a579a3" - "120f19cfb2963f177aeb70f2d4844826262e51b80271272068ef5b3856fa8535" - "aa2a88b2d41f2a0e2fda7624c2850272ac4a2f561f8f2f7a318bfd5caf969614" - "9e4ac824ad3460538fdc25421beec2cc6818162d06bbed0c40a387192349db67" - "a118bada6cd5ab0140ee273204f628aad1c135f770279a651e24d8c14d75a605" - "9d76b96a6fd857def5e0b354b27ab937a5815d16b5fae407ff18222c6d1ed263" - "be68c95f32d908bd895cd76207ae726487567f9a67dad79abec316f683b17f2d" - "02bf07e0ac8b5bc6162cf94697b3c27cd1fea49b27f23ba2901871962506520c" - "392da8b6ad0d99f7013fbc06c2c17a569500c8a7696481c1cd33e9b14e40b82e" - "79a5f5db82571ba97bae3ad3e0479515bb0e2b0f3bfcd1fd33034efc6245eddd" - "7ee2086ddae2600d8ca73e214e8c2b0bdb2b047c6a464a562ed77b73d2d841c4" - "b34973551257713b753632efba348169abc90a68f42611a40126d7cb21b58695" - "568186f7e569d2ff0f9e745d0487dd2eb997cafc5abf9dd102e62ff66cba87", - None, - "", - "e301345a41a39a4d72fff8df69c98075a0cc082b802fc9b2b6bc503f926b65bd" - "df7f4c8f1cb49f6396afc8a70abe6d8aef0db478d4c6b2970076c6a0484fe76d" - "76b3a97625d79f1ce240e7c576750d295528286f719b413de9ada3e8eb78ed57" - "3603ce30d8bb761785dc30dbc320869e1a00" - ), - # 7.5 Ed448ph - ( - "833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42" - "ef7822e0d5104127dc05d6dbefde69e3ab2cec7c867c6e2c49", - "259b71c19f83ef77a7abd26524cbdb3161b590a48f7d17de3ee0ba9c52beb743" - "c09428a131d6b1b57303d90d8132c276d5ed3d5d01c0f53880", - "616263", - SHAKE256, - "", - "822f6901f7480f3d5f562c592994d9693602875614483256505600bbc281ae38" - "1f54d6bce2ea911574932f52a4e6cadd78769375ec3ffd1b801a0d9b3f4030cd" - "433964b6457ea39476511214f97469b57dd32dbc560a9a94d00bff07620464a3" - "ad203df7dc7ce360c3cd3696d9d9fab90f00" - ), - ( - "833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42" - "ef7822e0d5104127dc05d6dbefde69e3ab2cec7c867c6e2c49", - "259b71c19f83ef77a7abd26524cbdb3161b590a48f7d17de3ee0ba9c52beb743" - "c09428a131d6b1b57303d90d8132c276d5ed3d5d01c0f53880", - "616263", - SHAKE256, - "666f6f", - "c32299d46ec8ff02b54540982814dce9a05812f81962b649d528095916a2aa48" - "1065b1580423ef927ecf0af5888f90da0f6a9a85ad5dc3f280d91224ba9911a3" - "653d00e484e2ce232521481c8658df304bb7745a73514cdb9bf3e15784ab7128" - "4f8d0704a608c54a6b62d97beb511d132100", - ), -) - - -rfc8032_tv_bytes = [] -for tv_str in rfc8032_tv_str: - rfc8032_tv_bytes.append([unhexlify(i) if isinstance(i, str) else i for i in tv_str]) - - -class TestEdDSA(unittest.TestCase): - - def test_sign(self): - for sk, _, msg, hashmod, ctx, exp_signature in rfc8032_tv_bytes: - key = eddsa.import_private_key(sk) - signer = eddsa.new(key, 'rfc8032', context=ctx) - if hashmod is None: - # PureEdDSA - signature = signer.sign(msg) - else: - # HashEdDSA - hashobj = hashmod.new(msg) - signature = signer.sign(hashobj) - self.assertEqual(exp_signature, signature) - - def test_verify(self): - for _, pk, msg, hashmod, ctx, exp_signature in rfc8032_tv_bytes: - key = eddsa.import_public_key(pk) - verifier = eddsa.new(key, 'rfc8032', context=ctx) - if hashmod is None: - # PureEdDSA - verifier.verify(msg, exp_signature) - else: - # HashEdDSA - hashobj = hashmod.new(msg) - verifier.verify(hashobj, exp_signature) - - def test_double_sign_verify_ed25519(self): - msg_hash = SHA512.new(b'abc') - key = ECC.generate(curve='ed25519') - signer = eddsa.new(key, 'rfc8032') - verifier = eddsa.new(key, 'rfc8032') - - signature = signer.sign(msg_hash) - signature2 = signer.sign(msg_hash) - self.assertEqual(signature, signature2) - - verifier.verify(msg_hash, signature) - verifier.verify(msg_hash, signature) - - def test_double_sign_verify_ed448(self): - msg_hash = SHAKE256.new(b'abc') - key = ECC.generate(curve='ed448') - signer = eddsa.new(key, 'rfc8032') - verifier = eddsa.new(key, 'rfc8032') - - signature = signer.sign(msg_hash) - signature2 = signer.sign(msg_hash) - self.assertEqual(signature, signature2) - - verifier.verify(msg_hash, signature) - verifier.verify(msg_hash, signature) - - def test_negative(self): - key = ECC.generate(curve="ed25519") - self.assertRaises(ValueError, eddsa.new, key, 'rfc9999') - - nist_key = ECC.generate(curve="p256") - self.assertRaises(ValueError, eddsa.new, nist_key, 'rfc8032') - - -class TestExport_Ed25519(unittest.TestCase): - - def test_raw(self): - key = ECC.generate(curve="Ed25519") - x, y = key.pointQ.xy - raw = bytearray(key._export_eddsa_public()) - sign_x = raw[31] >> 7 - raw[31] &= 0x7F - yt = bytes_to_long(raw[::-1]) - self.assertEqual(y, yt) - self.assertEqual(x & 1, sign_x) - - key = ECC.construct(point_x=0, point_y=1, curve="Ed25519") - out = key._export_eddsa_public() - self.assertEqual(b'\x01' + b'\x00' * 31, out) - - -class TestExport_Ed448(unittest.TestCase): - - def test_raw(self): - key = ECC.generate(curve="Ed448") - x, y = key.pointQ.xy - raw = bytearray(key._export_eddsa_public()) - sign_x = raw[56] >> 7 - raw[56] &= 0x7F - yt = bytes_to_long(raw[::-1]) - self.assertEqual(y, yt) - self.assertEqual(x & 1, sign_x) - - key = ECC.construct(point_x=0, point_y=1, curve="Ed448") - out = key._export_eddsa_public() - self.assertEqual(b'\x01' + b'\x00' * 56, out) - - -class TestImport_Ed25519(unittest.TestCase): - - def test_raw(self): - Px = 24407857220263921307776619664228778204996144802740950419837658238229122415920 - Py = 56480760040633817885061096979765646085062883740629155052073094891081309750690 - encoded = b'\xa2\x05\xd6\x00\xe1 \xe1\xc0\xff\x96\xee?V\x8e\xba/\xd3\x89\x06\xd7\xc4c\xe8$\xc2d\xd7a1\xfa\xde|' - key = eddsa.import_public_key(encoded) - self.assertEqual(Py, key.pointQ.y) - self.assertEqual(Px, key.pointQ.x) - - encoded = b'\x01' + b'\x00' * 31 - key = eddsa.import_public_key(encoded) - self.assertEqual(1, key.pointQ.y) - self.assertEqual(0, key.pointQ.x) - - -class TestImport_Ed448(unittest.TestCase): - - def test_raw(self): - Px = 0x153f42025aba3b0daecaa5cd79458b3146c7c9378c16c17b4a59bc3561113d90c169045bc12966c3f93e140c2ca0a3acc33d9205b9daf9b1 - Py = 0x38f5c0015d3dedd576c232810dd90373b5b1d631a12894c043b7be529cbae03ede177d8fa490b56131dbcb2465d2aba777ef839fc1719b25 - encoded = unhexlify("259b71c19f83ef77a7abd26524cbdb31" - "61b590a48f7d17de3ee0ba9c52beb743" - "c09428a131d6b1b57303d90d8132c276" - "d5ed3d5d01c0f53880") - key = eddsa.import_public_key(encoded) - self.assertEqual(Py, key.pointQ.y) - self.assertEqual(Px, key.pointQ.x) - - encoded = b'\x01' + b'\x00' * 56 - key = eddsa.import_public_key(encoded) - self.assertEqual(1, key.pointQ.y) - self.assertEqual(0, key.pointQ.x) - - -class TestVectorsEdDSAWycheproof(unittest.TestCase): - - def add_tests(self, filename): - - def pk(group): - elem = group['key']['pk'] - return unhexlify(elem) - - def sk(group): - elem = group['key']['sk'] - return unhexlify(elem) - - result = load_test_vectors_wycheproof(("Signature", "wycheproof"), - filename, - "Wycheproof ECDSA signature (%s)" - % filename, - group_tag={'pk': pk, 'sk': sk}) - self.tv += result - - def setUp(self): - self.tv = [] - self.add_tests("eddsa_test.json") - self.add_tests("ed448_test.json") - - def test_sign(self, tv): - if not tv.valid: - return - - self._id = "Wycheproof EdDSA Sign Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) - key = eddsa.import_private_key(tv.sk) - signer = eddsa.new(key, 'rfc8032') - signature = signer.sign(tv.msg) - self.assertEqual(signature, tv.sig) - - def test_verify(self, tv): - self._id = "Wycheproof EdDSA Verify Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) - key = eddsa.import_public_key(tv.pk) - verifier = eddsa.new(key, 'rfc8032') - try: - verifier.verify(tv.msg, tv.sig) - except ValueError: - assert not tv.valid - else: - assert tv.valid - - def runTest(self): - for tv in self.tv: - self.test_sign(tv) - self.test_verify(tv) - - -def get_tests(config={}): - - tests = [] - tests += list_test_cases(TestExport_Ed25519) - tests += list_test_cases(TestExport_Ed448) - tests += list_test_cases(TestImport_Ed25519) - tests += list_test_cases(TestImport_Ed448) - tests += list_test_cases(TestEdDSA) - tests += [TestVectorsEdDSAWycheproof()] - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_pkcs1_15.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_pkcs1_15.py deleted file mode 100644 index 3a3e30b..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_pkcs1_15.py +++ /dev/null @@ -1,348 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import json -import unittest -from binascii import unhexlify - -from Cryptodome.Util.py3compat import bchr -from Cryptodome.Util.number import bytes_to_long -from Cryptodome.Util.strxor import strxor -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof - -from Cryptodome.Hash import (SHA1, SHA224, SHA256, SHA384, SHA512, SHA3_384, - SHA3_224, SHA3_256, SHA3_512) -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import pkcs1_15 -from Cryptodome.Signature import PKCS1_v1_5 - -from Cryptodome.Util._file_system import pycryptodome_filename -from Cryptodome.Util.strxor import strxor - - -def load_hash_by_name(hash_name): - return __import__("Cryptodome.Hash." + hash_name, globals(), locals(), ["new"]) - - -class FIPS_PKCS1_Verify_Tests(unittest.TestCase): - - def shortDescription(self): - return "FIPS PKCS1 Tests (Verify)" - - def test_can_sign(self): - test_public_key = RSA.generate(1024).public_key() - verifier = pkcs1_15.new(test_public_key) - self.assertEqual(verifier.can_sign(), False) - - -class FIPS_PKCS1_Verify_Tests_KAT(unittest.TestCase): - pass - - -test_vectors_verify = load_test_vectors(("Signature", "PKCS1-v1.5"), - "SigVer15_186-3.rsp", - "Signature Verification 186-3", - {'shaalg': lambda x: x, - 'd': lambda x: int(x), - 'result': lambda x: x}) or [] - - -for count, tv in enumerate(test_vectors_verify): - if isinstance(tv, str): - continue - if hasattr(tv, "n"): - modulus = tv.n - continue - - hash_module = load_hash_by_name(tv.shaalg.upper()) - hash_obj = hash_module.new(tv.msg) - public_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e)]) # type: ignore - verifier = pkcs1_15.new(public_key) - - def positive_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): - verifier.verify(hash_obj, signature) - - def negative_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): - self.assertRaises(ValueError, verifier.verify, hash_obj, signature) - - if tv.result == 'f': - setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_negative_%d" % count, negative_test) - else: - setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_positive_%d" % count, positive_test) - - -class FIPS_PKCS1_Sign_Tests(unittest.TestCase): - - def shortDescription(self): - return "FIPS PKCS1 Tests (Sign)" - - def test_can_sign(self): - test_private_key = RSA.generate(1024) - signer = pkcs1_15.new(test_private_key) - self.assertEqual(signer.can_sign(), True) - - -class FIPS_PKCS1_Sign_Tests_KAT(unittest.TestCase): - pass - - -test_vectors_sign = load_test_vectors(("Signature", "PKCS1-v1.5"), - "SigGen15_186-2.txt", - "Signature Generation 186-2", - {'shaalg': lambda x: x}) or [] - -test_vectors_sign += load_test_vectors(("Signature", "PKCS1-v1.5"), - "SigGen15_186-3.txt", - "Signature Generation 186-3", - {'shaalg': lambda x: x}) or [] - -for count, tv in enumerate(test_vectors_sign): - if isinstance(tv, str): - continue - if hasattr(tv, "n"): - modulus = tv.n - continue - if hasattr(tv, "e"): - private_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e, tv.d)]) # type: ignore - signer = pkcs1_15.new(private_key) - continue - - hash_module = load_hash_by_name(tv.shaalg.upper()) - hash_obj = hash_module.new(tv.msg) - - def new_test(self, hash_obj=hash_obj, signer=signer, result=tv.s): - signature = signer.sign(hash_obj) - self.assertEqual(signature, result) - - setattr(FIPS_PKCS1_Sign_Tests_KAT, "test_%d" % count, new_test) - - -class PKCS1_15_NoParams(unittest.TestCase): - """Verify that PKCS#1 v1.5 signatures pass even without NULL parameters in - the algorithm identifier (PyCrypto/LP bug #1119552).""" - - rsakey = """-----BEGIN RSA PRIVATE KEY----- - MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII - q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 - Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI - OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr - +rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK - JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 - n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== - -----END RSA PRIVATE KEY-----""" - - msg = b"This is a test\x0a" - - # PKCS1 v1.5 signature of the message computed using SHA-1. - # The digestAlgorithm SEQUENCE does NOT contain the NULL parameter. - sig_str = "a287a13517f716e72fb14eea8e33a8db4a4643314607e7ca3e3e28"\ - "1893db74013dda8b855fd99f6fecedcb25fcb7a434f35cd0a101f8"\ - "b19348e0bd7b6f152dfc" - signature = unhexlify(sig_str) - - def runTest(self): - verifier = pkcs1_15.new(RSA.importKey(self.rsakey)) - hashed = SHA1.new(self.msg) - verifier.verify(hashed, self.signature) - - -class PKCS1_Legacy_Module_Tests(unittest.TestCase): - """Verify that the legacy module Cryptodome.Signature.PKCS1_v1_5 - behaves as expected. The only difference is that the verify() - method returns True/False and does not raise exceptions.""" - - def shortDescription(self): - return "Test legacy Cryptodome.Signature.PKCS1_v1_5" - - def runTest(self): - key = RSA.importKey(PKCS1_15_NoParams.rsakey) - hashed = SHA1.new(b"Test") - good_signature = PKCS1_v1_5.new(key).sign(hashed) - verifier = PKCS1_v1_5.new(key.public_key()) - - self.assertEqual(verifier.verify(hashed, good_signature), True) - - # Flip a few bits in the signature - bad_signature = strxor(good_signature, bchr(1) * len(good_signature)) - self.assertEqual(verifier.verify(hashed, bad_signature), False) - - -class PKCS1_All_Hashes_Tests(unittest.TestCase): - - def shortDescription(self): - return "Test PKCS#1v1.5 signature in combination with all hashes" - - def runTest(self): - - key = RSA.generate(1024) - signer = pkcs1_15.new(key) - hash_names = ("MD2", "MD4", "MD5", "RIPEMD160", "SHA1", - "SHA224", "SHA256", "SHA384", "SHA512", - "SHA3_224", "SHA3_256", "SHA3_384", "SHA3_512") - - for name in hash_names: - hashed = load_hash_by_name(name).new(b"Test") - signer.sign(hashed) - - from Cryptodome.Hash import BLAKE2b, BLAKE2s - for hash_size in (20, 32, 48, 64): - hashed_b = BLAKE2b.new(digest_bytes=hash_size, data=b"Test") - signer.sign(hashed_b) - for hash_size in (16, 20, 28, 32): - hashed_s = BLAKE2s.new(digest_bytes=hash_size, data=b"Test") - signer.sign(hashed_s) - - -class TestVectorsWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._id = "None" - - def setUp(self): - self.tv = [] - self.add_tests("rsa_sig_gen_misc_test.json") - self.add_tests("rsa_signature_2048_sha224_test.json") - self.add_tests("rsa_signature_2048_sha256_test.json") - self.add_tests("rsa_signature_2048_sha384_test.json") - self.add_tests("rsa_signature_2048_sha3_224_test.json") - self.add_tests("rsa_signature_2048_sha3_256_test.json") - self.add_tests("rsa_signature_2048_sha3_384_test.json") - self.add_tests("rsa_signature_2048_sha3_512_test.json") - self.add_tests("rsa_signature_2048_sha512_test.json") - self.add_tests("rsa_signature_2048_sha512_224_test.json") - self.add_tests("rsa_signature_2048_sha512_256_test.json") - self.add_tests("rsa_signature_3072_sha256_test.json") - self.add_tests("rsa_signature_3072_sha384_test.json") - self.add_tests("rsa_signature_3072_sha3_256_test.json") - self.add_tests("rsa_signature_3072_sha3_384_test.json") - self.add_tests("rsa_signature_3072_sha3_512_test.json") - self.add_tests("rsa_signature_3072_sha512_test.json") - self.add_tests("rsa_signature_3072_sha512_256_test.json") - self.add_tests("rsa_signature_4096_sha384_test.json") - self.add_tests("rsa_signature_4096_sha512_test.json") - self.add_tests("rsa_signature_4096_sha512_256_test.json") - self.add_tests("rsa_signature_test.json") - - def add_tests(self, filename): - - def filter_rsa(group): - return RSA.import_key(group['keyPem']) - - def filter_sha(group): - hash_name = group['sha'] - if hash_name == "SHA-512": - return SHA512 - elif hash_name == "SHA-512/224": - return SHA512.new(truncate="224") - elif hash_name == "SHA-512/256": - return SHA512.new(truncate="256") - elif hash_name == "SHA3-512": - return SHA3_512 - elif hash_name == "SHA-384": - return SHA384 - elif hash_name == "SHA3-384": - return SHA3_384 - elif hash_name == "SHA-256": - return SHA256 - elif hash_name == "SHA3-256": - return SHA3_256 - elif hash_name == "SHA-224": - return SHA224 - elif hash_name == "SHA3-224": - return SHA3_224 - elif hash_name == "SHA-1": - return SHA1 - else: - raise ValueError("Unknown hash algorithm: " + hash_name) - - def filter_type(group): - type_name = group['type'] - if type_name not in ("RsassaPkcs1Verify", "RsassaPkcs1Generate"): - raise ValueError("Unknown type name " + type_name) - - result = load_test_vectors_wycheproof(("Signature", "wycheproof"), - filename, - "Wycheproof PKCS#1v1.5 signature (%s)" % filename, - group_tag={'rsa_key': filter_rsa, - 'hash_mod': filter_sha, - 'type': filter_type}) - return result - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_verify(self, tv): - self._id = "Wycheproof RSA PKCS$#1 Test #" + str(tv.id) - - hashed_msg = tv.hash_module.new(tv.msg) - signer = pkcs1_15.new(tv.key) - try: - signature = signer.verify(hashed_msg, tv.sig) - except ValueError as e: - if tv.warning: - return - assert not tv.valid - else: - assert tv.valid - self.warn(tv) - - def runTest(self): - for tv in self.tv: - self.test_verify(tv) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(FIPS_PKCS1_Verify_Tests) - tests += list_test_cases(FIPS_PKCS1_Sign_Tests) - tests += list_test_cases(PKCS1_15_NoParams) - tests += list_test_cases(PKCS1_Legacy_Module_Tests) - tests += list_test_cases(PKCS1_All_Hashes_Tests) - tests += [ TestVectorsWycheproof(wycheproof_warnings) ] - - if config.get('slow_tests'): - tests += list_test_cases(FIPS_PKCS1_Verify_Tests_KAT) - tests += list_test_cases(FIPS_PKCS1_Sign_Tests_KAT) - - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_pss.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_pss.py deleted file mode 100644 index c3b1ce5..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Signature/test_pss.py +++ /dev/null @@ -1,377 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest - -from Cryptodome.Util.py3compat import b, bchr -from Cryptodome.Util.number import bytes_to_long -from Cryptodome.Util.strxor import strxor -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof - -from Cryptodome.Hash import SHA1, SHA224, SHA256, SHA384, SHA512 -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import pss -from Cryptodome.Signature import PKCS1_PSS - -from Cryptodome.Signature.pss import MGF1 - - -def load_hash_by_name(hash_name): - return __import__("Cryptodome.Hash." + hash_name, globals(), locals(), ["new"]) - - -class PRNG(object): - - def __init__(self, stream): - self.stream = stream - self.idx = 0 - - def __call__(self, rnd_size): - result = self.stream[self.idx:self.idx + rnd_size] - self.idx += rnd_size - return result - - -class PSS_Tests(unittest.TestCase): - - rsa_key = b'-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAsvI34FgiTK8+txBvmooNGpNwk23YTU51dwNZi5yha3W4lA/Q\nvcZrDalkmD7ekWQwnduxVKa6pRSI13KBgeUOIqJoGXSWhntEtY3FEwvWOHW5AE7Q\njUzTzCiYT6TVaCcpa/7YLai+p6ai2g5f5Zfh4jSawa9uYeuggFygQq4IVW796MgV\nyqxYMM/arEj+/sKz3Viua9Rp9fFosertCYCX4DUTgW0mX9bwEnEOgjSI3pLOPXz1\n8vx+DRZS5wMCmwCUa0sKonLn3cAUPq+sGix7+eo7T0Z12MU8ud7IYVX/75r3cXiF\nPaYE2q8Le0kgOApIXbb+x74x0rNgyIh1yGygkwIDAQABAoIBABz4t1A0pLT6qHI2\nEIOaNz3mwhK0dZEqkz0GB1Dhtoax5ATgvKCFB98J3lYB08IBURe1snOsnMpOVUtg\naBRSM+QqnCUG6bnzKjAkuFP5liDE+oNQv1YpKp9CsUovuzdmI8Au3ewihl+ZTIN2\nUVNYMEOR1b5m+z2SSwWNOYsiJwpBrT7zkpdlDyjat7FiiPhMMIMXjhQFVxURMIcB\njUBtPzGvV/PG90cVDWi1wRGeeP1dDqti/jsnvykQ15KW1MqGrpeNKRmDdTy/Ucl1\nWIoYklKw3U456lgZ/rDTDB818+Tlnk35z4yF7d5ANPM8CKfqOPcnO1BCKVFzf4eq\n54wvUtkCgYEA1Zv2lp06l7rXMsvNtyYQjbFChezRDRnPwZmN4NCdRtTgGG1G0Ryd\nYz6WWoPGqZp0b4LAaaHd3W2GTcpXF8WXMKfMX1W+tMAxMozfsXRKMcHoypwuS5wT\nfJRXJCG4pvd57AB0iVUEJW2we+uGKU5Zxcx//id2nXGCpoRyViIplQsCgYEA1nVC\neHupHChht0Fh4N09cGqZHZzuwXjOUMzR3Vsfz+4WzVS3NvIgN4g5YgmQFOeKwo5y\niRq5yvubcNdFvf85eHWClg0zPAyxJCVUWigCrrOanGEhJo6re4idJvNVzu4Ucg0v\n6B3SJ1HsCda+ZSNz24bSyqRep8A+RoAaoVSFx5kCgYEAn3RvXPs9s+obnqWYiPF3\nRe5etE6Vt2vfNKwFxx6zaR6bsmBQjuUHcABWiHb6I71S0bMPI0tbrWGG8ibrYKl1\nNTLtUvVVCOS3VP7oNTWT9RTFTAnOXU7DFSo+6o/poWn3r36ff6zhDXeWWMr2OXtt\ndEQ1/2lCGEGVv+v61eVmmQUCgYABFHITPTwqwiFL1O5zPWnzyPWgaovhOYSAb6eW\n38CXQXGn8wdBJZL39J2lWrr4//l45VK6UgIhfYbY2JynSkO10ZGow8RARygVMILu\nOUlaK9lZdDvAf/NpGdUAvzTtZ9F+iYZ2OsA2JnlzyzsGM1l//3vMPWukmJk3ral0\nqoJJ8QKBgGRG3eVHnIegBbFVuMDp2NTcfuSuDVUQ1fGAwtPiFa8u81IodJnMk2pq\niXu2+0ytNA/M+SVrAnE2AgIzcaJbtr0p2srkuVM7KMWnG1vWFNjtXN8fAhf/joOv\nD+NmPL/N4uE57e40tbiU/H7KdyZaDt+5QiTmdhuyAe6CBjKsF2jy\n-----END RSA PRIVATE KEY-----' - msg = b'AAA' - tag = b'\x00[c5\xd8\xb0\x8b!D\x81\x83\x07\xc0\xdd\xb9\xb4\xb2`\x92\xe7\x02\xf1\xe1P\xea\xc3\xf0\xe3>\xddX5\xdd\x8e\xc5\x89\xef\xf3\xc2\xdc\xfeP\x02\x7f\x12+\xc9\xaf\xbb\xec\xfe\xb0\xa5\xb9\x08\x11P\x8fL\xee5\x9b\xb0k{=_\xd2\x14\xfb\x01R\xb7\xfe\x14}b\x03\x8d5Y\x89~}\xfc\xf2l\xd01-\xbd\xeb\x11\xcdV\x11\xe9l\x19k/o5\xa2\x0f\x15\xe7Q$\t=\xec\x1dAB\x19\xa5P\x9a\xaf\xa3G\x86"\xd6~\xf0j\xfcqkbs\x13\x84b\xe4\xbdm(\xed`\xa4F\xfb\x8f.\xe1\x8c)/_\x9eS\x98\xa4v\xb8\xdc\xfe\xf7/D\x18\x19\xb3T\x97:\xe2\x96s\xe8<\xa2\xb4\xb9\xf8/' - - def test_positive_1(self): - key = RSA.import_key(self.rsa_key) - h = SHA256.new(self.msg) - verifier = pss.new(key) - verifier.verify(h, self.tag) - - def test_negative_1(self): - key = RSA.import_key(self.rsa_key) - h = SHA256.new(self.msg + b'A') - verifier = pss.new(key) - tag = bytearray(self.tag) - self.assertRaises(ValueError, verifier.verify, h, tag) - - def test_negative_2(self): - key = RSA.import_key(self.rsa_key) - h = SHA256.new(self.msg) - verifier = pss.new(key, salt_bytes=1000) - tag = bytearray(self.tag) - self.assertRaises(ValueError, verifier.verify, h, tag) - - -class FIPS_PKCS1_Verify_Tests(unittest.TestCase): - - def shortDescription(self): - return "FIPS PKCS1 Tests (Verify)" - - def verify_positive(self, hashmod, message, public_key, salt, signature): - prng = PRNG(salt) - hashed = hashmod.new(message) - verifier = pss.new(public_key, salt_bytes=len(salt), rand_func=prng) - verifier.verify(hashed, signature) - - def verify_negative(self, hashmod, message, public_key, salt, signature): - prng = PRNG(salt) - hashed = hashmod.new(message) - verifier = pss.new(public_key, salt_bytes=len(salt), rand_func=prng) - self.assertRaises(ValueError, verifier.verify, hashed, signature) - - def test_can_sign(self): - test_public_key = RSA.generate(1024).public_key() - verifier = pss.new(test_public_key) - self.assertEqual(verifier.can_sign(), False) - - -class FIPS_PKCS1_Verify_Tests_KAT(unittest.TestCase): - pass - - -test_vectors_verify = load_test_vectors(("Signature", "PKCS1-PSS"), - "SigVerPSS_186-3.rsp", - "Signature Verification 186-3", - {'shaalg': lambda x: x, - 'result': lambda x: x}) or [] - - -for count, tv in enumerate(test_vectors_verify): - if isinstance(tv, str): - continue - if hasattr(tv, "n"): - modulus = tv.n - continue - if hasattr(tv, "p"): - continue - - hash_module = load_hash_by_name(tv.shaalg.upper()) - hash_obj = hash_module.new(tv.msg) - public_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e)]) # type: ignore - if tv.saltval != b("\x00"): - prng = PRNG(tv.saltval) - verifier = pss.new(public_key, salt_bytes=len(tv.saltval), rand_func=prng) - else: - verifier = pss.new(public_key, salt_bytes=0) - - def positive_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): - verifier.verify(hash_obj, signature) - - def negative_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): - self.assertRaises(ValueError, verifier.verify, hash_obj, signature) - - if tv.result == 'p': - setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_positive_%d" % count, positive_test) - else: - setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_negative_%d" % count, negative_test) - - -class FIPS_PKCS1_Sign_Tests(unittest.TestCase): - - def shortDescription(self): - return "FIPS PKCS1 Tests (Sign)" - - def test_can_sign(self): - test_private_key = RSA.generate(1024) - signer = pss.new(test_private_key) - self.assertEqual(signer.can_sign(), True) - - -class FIPS_PKCS1_Sign_Tests_KAT(unittest.TestCase): - pass - - -test_vectors_sign = load_test_vectors(("Signature", "PKCS1-PSS"), - "SigGenPSS_186-2.txt", - "Signature Generation 186-2", - {'shaalg': lambda x: x}) or [] - -test_vectors_sign += load_test_vectors(("Signature", "PKCS1-PSS"), - "SigGenPSS_186-3.txt", - "Signature Generation 186-3", - {'shaalg': lambda x: x}) or [] - -for count, tv in enumerate(test_vectors_sign): - if isinstance(tv, str): - continue - if hasattr(tv, "n"): - modulus = tv.n - continue - if hasattr(tv, "e"): - private_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e, tv.d)]) # type: ignore - continue - - hash_module = load_hash_by_name(tv.shaalg.upper()) - hash_obj = hash_module.new(tv.msg) - if tv.saltval != b("\x00"): - prng = PRNG(tv.saltval) - signer = pss.new(private_key, salt_bytes=len(tv.saltval), rand_func=prng) - else: - signer = pss.new(private_key, salt_bytes=0) - - def new_test(self, hash_obj=hash_obj, signer=signer, result=tv.s): - signature = signer.sign(hash_obj) - self.assertEqual(signature, result) - - setattr(FIPS_PKCS1_Sign_Tests_KAT, "test_%d" % count, new_test) - - -class PKCS1_Legacy_Module_Tests(unittest.TestCase): - """Verify that the legacy module Cryptodome.Signature.PKCS1_PSS - behaves as expected. The only difference is that the verify() - method returns True/False and does not raise exceptions.""" - - def shortDescription(self): - return "Test legacy Cryptodome.Signature.PKCS1_PSS" - - def runTest(self): - key = RSA.generate(1024) - hashed = SHA1.new(b("Test")) - good_signature = PKCS1_PSS.new(key).sign(hashed) - verifier = PKCS1_PSS.new(key.public_key()) - - self.assertEqual(verifier.verify(hashed, good_signature), True) - - # Flip a few bits in the signature - bad_signature = strxor(good_signature, bchr(1) * len(good_signature)) - self.assertEqual(verifier.verify(hashed, bad_signature), False) - - -class PKCS1_All_Hashes_Tests(unittest.TestCase): - - def shortDescription(self): - return "Test PKCS#1 PSS signature in combination with all hashes" - - def runTest(self): - - key = RSA.generate(1280) - signer = pss.new(key) - hash_names = ("MD2", "MD4", "MD5", "RIPEMD160", "SHA1", - "SHA224", "SHA256", "SHA384", "SHA512", - "SHA3_224", "SHA3_256", "SHA3_384", "SHA3_512") - - for name in hash_names: - hashed = load_hash_by_name(name).new(b("Test")) - signer.sign(hashed) - - from Cryptodome.Hash import BLAKE2b, BLAKE2s - for hash_size in (20, 32, 48, 64): - hashed_b = BLAKE2b.new(digest_bytes=hash_size, data=b("Test")) - signer.sign(hashed_b) - for hash_size in (16, 20, 28, 32): - hashed_s = BLAKE2s.new(digest_bytes=hash_size, data=b("Test")) - signer.sign(hashed_s) - - -def get_hash_module(hash_name): - if hash_name == "SHA-512": - hash_module = SHA512 - elif hash_name == "SHA-512/224": - hash_module = SHA512.new(truncate="224") - elif hash_name == "SHA-512/256": - hash_module = SHA512.new(truncate="256") - elif hash_name == "SHA-384": - hash_module = SHA384 - elif hash_name == "SHA-256": - hash_module = SHA256 - elif hash_name == "SHA-224": - hash_module = SHA224 - elif hash_name == "SHA-1": - hash_module = SHA1 - else: - raise ValueError("Unknown hash algorithm: " + hash_name) - return hash_module - - -class TestVectorsPSSWycheproof(unittest.TestCase): - - def __init__(self, wycheproof_warnings): - unittest.TestCase.__init__(self) - self._wycheproof_warnings = wycheproof_warnings - self._id = "None" - - def add_tests(self, filename): - - def filter_rsa(group): - return RSA.import_key(group['keyPem']) - - def filter_sha(group): - return get_hash_module(group['sha']) - - def filter_type(group): - type_name = group['type'] - if type_name not in ("RsassaPssVerify", ): - raise ValueError("Unknown type name " + type_name) - - def filter_slen(group): - return group['sLen'] - - def filter_mgf(group): - mgf = group['mgf'] - if mgf not in ("MGF1", ): - raise ValueError("Unknown MGF " + mgf) - mgf1_hash = get_hash_module(group['mgfSha']) - - def mgf(x, y, mh=mgf1_hash): - return MGF1(x, y, mh) - - return mgf - - result = load_test_vectors_wycheproof(("Signature", "wycheproof"), - filename, - "Wycheproof PSS signature (%s)" % filename, - group_tag={'key': filter_rsa, - 'hash_module': filter_sha, - 'sLen': filter_slen, - 'mgf': filter_mgf, - 'type': filter_type}) - return result - - def setUp(self): - self.tv = [] - self.add_tests("rsa_pss_2048_sha1_mgf1_20_test.json") - self.add_tests("rsa_pss_2048_sha256_mgf1_0_test.json") - self.add_tests("rsa_pss_2048_sha256_mgf1_32_test.json") - self.add_tests("rsa_pss_2048_sha512_256_mgf1_28_test.json") - self.add_tests("rsa_pss_2048_sha512_256_mgf1_32_test.json") - self.add_tests("rsa_pss_3072_sha256_mgf1_32_test.json") - self.add_tests("rsa_pss_4096_sha256_mgf1_32_test.json") - self.add_tests("rsa_pss_4096_sha512_mgf1_32_test.json") - self.add_tests("rsa_pss_misc_test.json") - - def shortDescription(self): - return self._id - - def warn(self, tv): - if tv.warning and self._wycheproof_warnings: - import warnings - warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) - - def test_verify(self, tv): - self._id = "Wycheproof RSA PSS Test #%d (%s)" % (tv.id, tv.comment) - - hashed_msg = tv.hash_module.new(tv.msg) - signer = pss.new(tv.key, mask_func=tv.mgf, salt_bytes=tv.sLen) - try: - signature = signer.verify(hashed_msg, tv.sig) - except ValueError as e: - if tv.warning: - return - assert not tv.valid - else: - assert tv.valid - self.warn(tv) - - def runTest(self): - for tv in self.tv: - self.test_verify(tv) - - -def get_tests(config={}): - wycheproof_warnings = config.get('wycheproof_warnings') - - tests = [] - tests += list_test_cases(PSS_Tests) - tests += list_test_cases(FIPS_PKCS1_Verify_Tests) - tests += list_test_cases(FIPS_PKCS1_Sign_Tests) - tests += list_test_cases(PKCS1_Legacy_Module_Tests) - tests += list_test_cases(PKCS1_All_Hashes_Tests) - - if config.get('slow_tests'): - tests += list_test_cases(FIPS_PKCS1_Verify_Tests_KAT) - tests += list_test_cases(FIPS_PKCS1_Sign_Tests_KAT) - - tests += [TestVectorsPSSWycheproof(wycheproof_warnings)] - - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__init__.py deleted file mode 100644 index e52c490..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/__init__.py: Self-test for utility modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for utility modules""" - -__revision__ = "$Id$" - -import os - -def get_tests(config={}): - tests = [] - from Cryptodome.SelfTest.Util import test_number; tests += test_number.get_tests(config=config) - from Cryptodome.SelfTest.Util import test_Counter; tests += test_Counter.get_tests(config=config) - from Cryptodome.SelfTest.Util import test_Padding; tests += test_Padding.get_tests(config=config) - from Cryptodome.SelfTest.Util import test_strxor; tests += test_strxor.get_tests(config=config) - from Cryptodome.SelfTest.Util import test_asn1; tests += test_asn1.get_tests(config=config) - from Cryptodome.SelfTest.Util import test_rfc1751; tests += test_rfc1751.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index e7bb4c9..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_Counter.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_Counter.cpython-312.pyc deleted file mode 100644 index 526bf71..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_Counter.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_Padding.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_Padding.cpython-312.pyc deleted file mode 100644 index 4309d13..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_Padding.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_asn1.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_asn1.cpython-312.pyc deleted file mode 100644 index a1479bb..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_asn1.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_number.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_number.cpython-312.pyc deleted file mode 100644 index 51f184a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_number.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_rfc1751.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_rfc1751.cpython-312.pyc deleted file mode 100644 index 5be9f96..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_rfc1751.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_strxor.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_strxor.cpython-312.pyc deleted file mode 100644 index c5dc1ad..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/__pycache__/test_strxor.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_Counter.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_Counter.py deleted file mode 100644 index 0d1e089..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_Counter.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_Counter: Self-test for the Cryptodome.Util.Counter module -# -# Written in 2009 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-tests for Cryptodome.Util.Counter""" - -from Cryptodome.Util.py3compat import * - -import unittest - -class CounterTests(unittest.TestCase): - def setUp(self): - global Counter - from Cryptodome.Util import Counter - - def test_BE(self): - """Big endian""" - c = Counter.new(128) - c = Counter.new(128, little_endian=False) - - def test_LE(self): - """Little endian""" - c = Counter.new(128, little_endian=True) - - def test_nbits(self): - c = Counter.new(nbits=128) - self.assertRaises(ValueError, Counter.new, 129) - - def test_prefix(self): - c = Counter.new(128, prefix=b("xx")) - - def test_suffix(self): - c = Counter.new(128, suffix=b("xx")) - - def test_iv(self): - c = Counter.new(128, initial_value=2) - self.assertRaises(ValueError, Counter.new, 16, initial_value=0x1FFFF) - -def get_tests(config={}): - from Cryptodome.SelfTest.st_common import list_test_cases - return list_test_cases(CounterTests) - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_Padding.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_Padding.py deleted file mode 100644 index a9c3eb6..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_Padding.py +++ /dev/null @@ -1,153 +0,0 @@ -# -# SelfTest/Util/test_Padding.py: Self-test for padding functions -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify as uh - -from Cryptodome.Util.py3compat import * -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.Padding import pad, unpad - -class PKCS7_Tests(unittest.TestCase): - - def test1(self): - padded = pad(b(""), 4) - self.assertTrue(padded == uh(b("04040404"))) - padded = pad(b(""), 4, 'pkcs7') - self.assertTrue(padded == uh(b("04040404"))) - back = unpad(padded, 4) - self.assertTrue(back == b("")) - - def test2(self): - padded = pad(uh(b("12345678")), 4) - self.assertTrue(padded == uh(b("1234567804040404"))) - back = unpad(padded, 4) - self.assertTrue(back == uh(b("12345678"))) - - def test3(self): - padded = pad(uh(b("123456")), 4) - self.assertTrue(padded == uh(b("12345601"))) - back = unpad(padded, 4) - self.assertTrue(back == uh(b("123456"))) - - def test4(self): - padded = pad(uh(b("1234567890")), 4) - self.assertTrue(padded == uh(b("1234567890030303"))) - back = unpad(padded, 4) - self.assertTrue(back == uh(b("1234567890"))) - - def testn1(self): - self.assertRaises(ValueError, pad, uh(b("12")), 4, 'pkcs8') - - def testn2(self): - self.assertRaises(ValueError, unpad, b("\0\0\0"), 4) - self.assertRaises(ValueError, unpad, b(""), 4) - - def testn3(self): - self.assertRaises(ValueError, unpad, b("123456\x02"), 4) - self.assertRaises(ValueError, unpad, b("123456\x00"), 4) - self.assertRaises(ValueError, unpad, b("123456\x05\x05\x05\x05\x05"), 4) - -class X923_Tests(unittest.TestCase): - - def test1(self): - padded = pad(b(""), 4, 'x923') - self.assertTrue(padded == uh(b("00000004"))) - back = unpad(padded, 4, 'x923') - self.assertTrue(back == b("")) - - def test2(self): - padded = pad(uh(b("12345678")), 4, 'x923') - self.assertTrue(padded == uh(b("1234567800000004"))) - back = unpad(padded, 4, 'x923') - self.assertTrue(back == uh(b("12345678"))) - - def test3(self): - padded = pad(uh(b("123456")), 4, 'x923') - self.assertTrue(padded == uh(b("12345601"))) - back = unpad(padded, 4, 'x923') - self.assertTrue(back == uh(b("123456"))) - - def test4(self): - padded = pad(uh(b("1234567890")), 4, 'x923') - self.assertTrue(padded == uh(b("1234567890000003"))) - back = unpad(padded, 4, 'x923') - self.assertTrue(back == uh(b("1234567890"))) - - def testn1(self): - self.assertRaises(ValueError, unpad, b("123456\x02"), 4, 'x923') - self.assertRaises(ValueError, unpad, b("123456\x00"), 4, 'x923') - self.assertRaises(ValueError, unpad, b("123456\x00\x00\x00\x00\x05"), 4, 'x923') - self.assertRaises(ValueError, unpad, b(""), 4, 'x923') - -class ISO7816_Tests(unittest.TestCase): - - def test1(self): - padded = pad(b(""), 4, 'iso7816') - self.assertTrue(padded == uh(b("80000000"))) - back = unpad(padded, 4, 'iso7816') - self.assertTrue(back == b("")) - - def test2(self): - padded = pad(uh(b("12345678")), 4, 'iso7816') - self.assertTrue(padded == uh(b("1234567880000000"))) - back = unpad(padded, 4, 'iso7816') - self.assertTrue(back == uh(b("12345678"))) - - def test3(self): - padded = pad(uh(b("123456")), 4, 'iso7816') - self.assertTrue(padded == uh(b("12345680"))) - back = unpad(padded, 4, 'iso7816') - self.assertTrue(back == uh(b("123456"))) - - def test4(self): - padded = pad(uh(b("1234567890")), 4, 'iso7816') - self.assertTrue(padded == uh(b("1234567890800000"))) - back = unpad(padded, 4, 'iso7816') - self.assertTrue(back == uh(b("1234567890"))) - - def testn1(self): - self.assertRaises(ValueError, unpad, b("123456\x81"), 4, 'iso7816') - self.assertRaises(ValueError, unpad, b(""), 4, 'iso7816') - -def get_tests(config={}): - tests = [] - tests += list_test_cases(PKCS7_Tests) - tests += list_test_cases(X923_Tests) - tests += list_test_cases(ISO7816_Tests) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_asn1.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_asn1.py deleted file mode 100644 index 811ac84..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_asn1.py +++ /dev/null @@ -1,851 +0,0 @@ -# -# SelfTest/Util/test_asn.py: Self-test for the Cryptodome.Util.asn1 module -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Self-tests for Cryptodome.Util.asn1""" - -import unittest - -from Cryptodome.Util.py3compat import * -from Cryptodome.Util.asn1 import (DerObject, DerSetOf, DerInteger, - DerBitString, - DerObjectId, DerNull, DerOctetString, - DerSequence, DerBoolean) - -class DerObjectTests(unittest.TestCase): - - def testObjInit1(self): - # Fail with invalid tag format (must be 1 byte) - self.assertRaises(ValueError, DerObject, b('\x00\x99')) - # Fail with invalid implicit tag (must be <0x1F) - self.assertRaises(ValueError, DerObject, 0x1F) - - # ------ - - def testObjEncode1(self): - # No payload - der = DerObject(b('\x02')) - self.assertEqual(der.encode(), b('\x02\x00')) - # Small payload (primitive) - der.payload = b('\x45') - self.assertEqual(der.encode(), b('\x02\x01\x45')) - # Invariant - self.assertEqual(der.encode(), b('\x02\x01\x45')) - # Initialize with numerical tag - der = DerObject(0x04) - der.payload = b('\x45') - self.assertEqual(der.encode(), b('\x04\x01\x45')) - # Initialize with constructed type - der = DerObject(b('\x10'), constructed=True) - self.assertEqual(der.encode(), b('\x30\x00')) - - def testObjEncode2(self): - # Initialize with payload - der = DerObject(0x03, b('\x12\x12')) - self.assertEqual(der.encode(), b('\x03\x02\x12\x12')) - - def testObjEncode3(self): - # Long payload - der = DerObject(b('\x10')) - der.payload = b("0")*128 - self.assertEqual(der.encode(), b('\x10\x81\x80' + "0"*128)) - - def testObjEncode4(self): - # Implicit tags (constructed) - der = DerObject(0x10, implicit=1, constructed=True) - der.payload = b('ppll') - self.assertEqual(der.encode(), b('\xa1\x04ppll')) - # Implicit tags (primitive) - der = DerObject(0x02, implicit=0x1E, constructed=False) - der.payload = b('ppll') - self.assertEqual(der.encode(), b('\x9E\x04ppll')) - - def testObjEncode5(self): - # Encode type with explicit tag - der = DerObject(0x10, explicit=5) - der.payload = b("xxll") - self.assertEqual(der.encode(), b("\xa5\x06\x10\x04xxll")) - - # ----- - - def testObjDecode1(self): - # Decode short payload - der = DerObject(0x02) - der.decode(b('\x02\x02\x01\x02')) - self.assertEqual(der.payload, b("\x01\x02")) - self.assertEqual(der._tag_octet, 0x02) - - def testObjDecode2(self): - # Decode long payload - der = DerObject(0x02) - der.decode(b('\x02\x81\x80' + "1"*128)) - self.assertEqual(der.payload, b("1")*128) - self.assertEqual(der._tag_octet, 0x02) - - def testObjDecode3(self): - # Decode payload with too much data gives error - der = DerObject(0x02) - self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02\xFF')) - # Decode payload with too little data gives error - der = DerObject(0x02) - self.assertRaises(ValueError, der.decode, b('\x02\x02\x01')) - - def testObjDecode4(self): - # Decode implicit tag (primitive) - der = DerObject(0x02, constructed=False, implicit=0xF) - self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02')) - der.decode(b('\x8F\x01\x00')) - self.assertEqual(der.payload, b('\x00')) - # Decode implicit tag (constructed) - der = DerObject(0x02, constructed=True, implicit=0xF) - self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02')) - der.decode(b('\xAF\x01\x00')) - self.assertEqual(der.payload, b('\x00')) - - def testObjDecode5(self): - # Decode payload with unexpected tag gives error - der = DerObject(0x02) - self.assertRaises(ValueError, der.decode, b('\x03\x02\x01\x02')) - - def testObjDecode6(self): - # Arbitrary DER object - der = DerObject() - der.decode(b('\x65\x01\x88')) - self.assertEqual(der._tag_octet, 0x65) - self.assertEqual(der.payload, b('\x88')) - - def testObjDecode7(self): - # Decode explicit tag - der = DerObject(0x10, explicit=5) - der.decode(b("\xa5\x06\x10\x04xxll")) - self.assertEqual(der._inner_tag_octet, 0x10) - self.assertEqual(der.payload, b('xxll')) - - # Explicit tag may be 0 - der = DerObject(0x10, explicit=0) - der.decode(b("\xa0\x06\x10\x04xxll")) - self.assertEqual(der._inner_tag_octet, 0x10) - self.assertEqual(der.payload, b('xxll')) - - def testObjDecode8(self): - # Verify that decode returns the object - der = DerObject(0x02) - self.assertEqual(der, der.decode(b('\x02\x02\x01\x02'))) - -class DerIntegerTests(unittest.TestCase): - - def testInit1(self): - der = DerInteger(1) - self.assertEqual(der.encode(), b('\x02\x01\x01')) - - def testEncode1(self): - # Single-byte integers - # Value 0 - der = DerInteger(0) - self.assertEqual(der.encode(), b('\x02\x01\x00')) - # Value 1 - der = DerInteger(1) - self.assertEqual(der.encode(), b('\x02\x01\x01')) - # Value 127 - der = DerInteger(127) - self.assertEqual(der.encode(), b('\x02\x01\x7F')) - - def testEncode2(self): - # Multi-byte integers - # Value 128 - der = DerInteger(128) - self.assertEqual(der.encode(), b('\x02\x02\x00\x80')) - # Value 0x180 - der = DerInteger(0x180) - self.assertEqual(der.encode(), b('\x02\x02\x01\x80')) - # One very long integer - der = DerInteger(2**2048) - self.assertEqual(der.encode(), - b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) - - def testEncode3(self): - # Negative integers - # Value -1 - der = DerInteger(-1) - self.assertEqual(der.encode(), b('\x02\x01\xFF')) - # Value -128 - der = DerInteger(-128) - self.assertEqual(der.encode(), b('\x02\x01\x80')) - # Value - der = DerInteger(-87873) - self.assertEqual(der.encode(), b('\x02\x03\xFE\xA8\xBF')) - - def testEncode4(self): - # Explicit encoding - number = DerInteger(0x34, explicit=3) - self.assertEqual(number.encode(), b('\xa3\x03\x02\x01\x34')) - - # ----- - - def testDecode1(self): - # Single-byte integer - der = DerInteger() - # Value 0 - der.decode(b('\x02\x01\x00')) - self.assertEqual(der.value, 0) - # Value 1 - der.decode(b('\x02\x01\x01')) - self.assertEqual(der.value, 1) - # Value 127 - der.decode(b('\x02\x01\x7F')) - self.assertEqual(der.value, 127) - - def testDecode2(self): - # Multi-byte integer - der = DerInteger() - # Value 0x180L - der.decode(b('\x02\x02\x01\x80')) - self.assertEqual(der.value,0x180) - # One very long integer - der.decode( - b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) - self.assertEqual(der.value,2**2048) - - def testDecode3(self): - # Negative integer - der = DerInteger() - # Value -1 - der.decode(b('\x02\x01\xFF')) - self.assertEqual(der.value, -1) - # Value -32768 - der.decode(b('\x02\x02\x80\x00')) - self.assertEqual(der.value, -32768) - - def testDecode5(self): - # We still accept BER integer format - der = DerInteger() - # Redundant leading zeroes - der.decode(b('\x02\x02\x00\x01')) - self.assertEqual(der.value, 1) - # Redundant leading 0xFF - der.decode(b('\x02\x02\xFF\xFF')) - self.assertEqual(der.value, -1) - # Empty payload - der.decode(b('\x02\x00')) - self.assertEqual(der.value, 0) - - def testDecode6(self): - # Explicit encoding - number = DerInteger(explicit=3) - number.decode(b('\xa3\x03\x02\x01\x34')) - self.assertEqual(number.value, 0x34) - - def testDecode7(self): - # Verify decode returns the DerInteger - der = DerInteger() - self.assertEqual(der, der.decode(b('\x02\x01\x7F'))) - - ### - - def testStrict1(self): - number = DerInteger() - - number.decode(b'\x02\x02\x00\x01') - number.decode(b'\x02\x02\x00\x7F') - self.assertRaises(ValueError, number.decode, b'\x02\x02\x00\x01', strict=True) - self.assertRaises(ValueError, number.decode, b'\x02\x02\x00\x7F', strict=True) - - ### - - def testErrDecode1(self): - # Wide length field - der = DerInteger() - self.assertRaises(ValueError, der.decode, b('\x02\x81\x01\x01')) - - -class DerSequenceTests(unittest.TestCase): - - def testInit1(self): - der = DerSequence([1, DerInteger(2), b('0\x00')]) - self.assertEqual(der.encode(), b('0\x08\x02\x01\x01\x02\x01\x020\x00')) - - def testEncode1(self): - # Empty sequence - der = DerSequence() - self.assertEqual(der.encode(), b('0\x00')) - self.assertFalse(der.hasOnlyInts()) - # One single-byte integer (zero) - der.append(0) - self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) - self.assertEqual(der.hasInts(),1) - self.assertEqual(der.hasInts(False),1) - self.assertTrue(der.hasOnlyInts()) - self.assertTrue(der.hasOnlyInts(False)) - # Invariant - self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) - - def testEncode2(self): - # Indexing - der = DerSequence() - der.append(0) - der[0] = 1 - self.assertEqual(len(der),1) - self.assertEqual(der[0],1) - self.assertEqual(der[-1],1) - self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) - # - der[:] = [1] - self.assertEqual(len(der),1) - self.assertEqual(der[0],1) - self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) - - def testEncode3(self): - # One multi-byte integer (non-zero) - der = DerSequence() - der.append(0x180) - self.assertEqual(der.encode(), b('0\x04\x02\x02\x01\x80')) - - def testEncode4(self): - # One very long integer - der = DerSequence() - der.append(2**2048) - self.assertEqual(der.encode(), b('0\x82\x01\x05')+ - b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) - - def testEncode5(self): - der = DerSequence() - der += 1 - der += b('\x30\x00') - self.assertEqual(der.encode(), b('\x30\x05\x02\x01\x01\x30\x00')) - - def testEncode6(self): - # Two positive integers - der = DerSequence() - der.append(0x180) - der.append(0xFF) - self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) - self.assertTrue(der.hasOnlyInts()) - self.assertTrue(der.hasOnlyInts(False)) - # Two mixed integers - der = DerSequence() - der.append(2) - der.append(-2) - self.assertEqual(der.encode(), b('0\x06\x02\x01\x02\x02\x01\xFE')) - self.assertEqual(der.hasInts(), 1) - self.assertEqual(der.hasInts(False), 2) - self.assertFalse(der.hasOnlyInts()) - self.assertTrue(der.hasOnlyInts(False)) - # - der.append(0x01) - der[1:] = [9,8] - self.assertEqual(len(der),3) - self.assertEqual(der[1:],[9,8]) - self.assertEqual(der[1:-1],[9]) - self.assertEqual(der.encode(), b('0\x09\x02\x01\x02\x02\x01\x09\x02\x01\x08')) - - def testEncode7(self): - # One integer and another type (already encoded) - der = DerSequence() - der.append(0x180) - der.append(b('0\x03\x02\x01\x05')) - self.assertEqual(der.encode(), b('0\x09\x02\x02\x01\x800\x03\x02\x01\x05')) - self.assertFalse(der.hasOnlyInts()) - - def testEncode8(self): - # One integer and another type (yet to encode) - der = DerSequence() - der.append(0x180) - der.append(DerSequence([5])) - self.assertEqual(der.encode(), b('0\x09\x02\x02\x01\x800\x03\x02\x01\x05')) - self.assertFalse(der.hasOnlyInts()) - - #### - - def testDecode1(self): - # Empty sequence - der = DerSequence() - der.decode(b('0\x00')) - self.assertEqual(len(der),0) - # One single-byte integer (zero) - der.decode(b('0\x03\x02\x01\x00')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],0) - # Invariant - der.decode(b('0\x03\x02\x01\x00')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],0) - - def testDecode2(self): - # One single-byte integer (non-zero) - der = DerSequence() - der.decode(b('0\x03\x02\x01\x7f')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],127) - - def testDecode4(self): - # One very long integer - der = DerSequence() - der.decode(b('0\x82\x01\x05')+ - b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],2**2048) - - def testDecode6(self): - # Two integers - der = DerSequence() - der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) - self.assertEqual(len(der),2) - self.assertEqual(der[0],0x180) - self.assertEqual(der[1],0xFF) - - def testDecode7(self): - # One integer and 2 other types - der = DerSequence() - der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) - self.assertEqual(len(der),3) - self.assertEqual(der[0],0x180) - self.assertEqual(der[1],b('\x24\x02\xb6\x63')) - self.assertEqual(der[2],b('\x12\x00')) - - def testDecode8(self): - # Only 2 other types - der = DerSequence() - der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00')) - self.assertEqual(len(der),2) - self.assertEqual(der[0],b('\x24\x02\xb6\x63')) - self.assertEqual(der[1],b('\x12\x00')) - self.assertEqual(der.hasInts(), 0) - self.assertEqual(der.hasInts(False), 0) - self.assertFalse(der.hasOnlyInts()) - self.assertFalse(der.hasOnlyInts(False)) - - def testDecode9(self): - # Verify that decode returns itself - der = DerSequence() - self.assertEqual(der, der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00'))) - - ### - - def testErrDecode1(self): - # Not a sequence - der = DerSequence() - self.assertRaises(ValueError, der.decode, b('')) - self.assertRaises(ValueError, der.decode, b('\x00')) - self.assertRaises(ValueError, der.decode, b('\x30')) - - def testErrDecode2(self): - der = DerSequence() - # Too much data - self.assertRaises(ValueError, der.decode, b('\x30\x00\x00')) - - def testErrDecode3(self): - # Wrong length format - der = DerSequence() - # Missing length in sub-item - self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00')) - # Valid BER, but invalid DER length - self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01')) - self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01')) - - def test_expected_nr_elements(self): - der_bin = DerSequence([1, 2, 3]).encode() - - DerSequence().decode(der_bin, nr_elements=3) - DerSequence().decode(der_bin, nr_elements=(2,3)) - self.assertRaises(ValueError, DerSequence().decode, der_bin, nr_elements=1) - self.assertRaises(ValueError, DerSequence().decode, der_bin, nr_elements=(4,5)) - - def test_expected_only_integers(self): - - der_bin1 = DerSequence([1, 2, 3]).encode() - der_bin2 = DerSequence([1, 2, DerSequence([3, 4])]).encode() - - DerSequence().decode(der_bin1, only_ints_expected=True) - DerSequence().decode(der_bin1, only_ints_expected=False) - DerSequence().decode(der_bin2, only_ints_expected=False) - self.assertRaises(ValueError, DerSequence().decode, der_bin2, only_ints_expected=True) - - -class DerOctetStringTests(unittest.TestCase): - - def testInit1(self): - der = DerOctetString(b('\xFF')) - self.assertEqual(der.encode(), b('\x04\x01\xFF')) - - def testEncode1(self): - # Empty sequence - der = DerOctetString() - self.assertEqual(der.encode(), b('\x04\x00')) - # Small payload - der.payload = b('\x01\x02') - self.assertEqual(der.encode(), b('\x04\x02\x01\x02')) - - #### - - def testDecode1(self): - # Empty sequence - der = DerOctetString() - der.decode(b('\x04\x00')) - self.assertEqual(der.payload, b('')) - # Small payload - der.decode(b('\x04\x02\x01\x02')) - self.assertEqual(der.payload, b('\x01\x02')) - - def testDecode2(self): - # Verify that decode returns the object - der = DerOctetString() - self.assertEqual(der, der.decode(b('\x04\x00'))) - - def testErrDecode1(self): - # No leftovers allowed - der = DerOctetString() - self.assertRaises(ValueError, der.decode, b('\x04\x01\x01\xff')) - -class DerNullTests(unittest.TestCase): - - def testEncode1(self): - der = DerNull() - self.assertEqual(der.encode(), b('\x05\x00')) - - #### - - def testDecode1(self): - # Empty sequence - der = DerNull() - self.assertEqual(der, der.decode(b('\x05\x00'))) - -class DerObjectIdTests(unittest.TestCase): - - def testInit1(self): - der = DerObjectId("1.1") - self.assertEqual(der.encode(), b'\x06\x01)') - - def testEncode1(self): - der = DerObjectId('1.2.840.113549.1.1.1') - self.assertEqual(der.encode(), b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01') - - der = DerObjectId() - der.value = '1.2.840.113549.1.1.1' - self.assertEqual(der.encode(), b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01') - - der = DerObjectId('2.999.1234') - self.assertEqual(der.encode(), b'\x06\x04\x88\x37\x89\x52') - - def testEncode2(self): - der = DerObjectId('3.4') - self.assertRaises(ValueError, der.encode) - - der = DerObjectId('1.40') - self.assertRaises(ValueError, der.encode) - - #### - - def testDecode1(self): - # Empty sequence - der = DerObjectId() - der.decode(b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01') - self.assertEqual(der.value, '1.2.840.113549.1.1.1') - - def testDecode2(self): - # Verify that decode returns the object - der = DerObjectId() - self.assertEqual(der, - der.decode(b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01')) - - def testDecode3(self): - der = DerObjectId() - der.decode(b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x00\x01') - self.assertEqual(der.value, '1.2.840.113549.1.0.1') - - def testDecode4(self): - der = DerObjectId() - der.decode(b'\x06\x04\x88\x37\x89\x52') - self.assertEqual(der.value, '2.999.1234') - - -class DerBitStringTests(unittest.TestCase): - - def testInit1(self): - der = DerBitString(b("\xFF")) - self.assertEqual(der.encode(), b('\x03\x02\x00\xFF')) - - def testInit2(self): - der = DerBitString(DerInteger(1)) - self.assertEqual(der.encode(), b('\x03\x04\x00\x02\x01\x01')) - - def testEncode1(self): - # Empty sequence - der = DerBitString() - self.assertEqual(der.encode(), b('\x03\x01\x00')) - # Small payload - der = DerBitString(b('\x01\x02')) - self.assertEqual(der.encode(), b('\x03\x03\x00\x01\x02')) - # Small payload - der = DerBitString() - der.value = b('\x01\x02') - self.assertEqual(der.encode(), b('\x03\x03\x00\x01\x02')) - - #### - - def testDecode1(self): - # Empty sequence - der = DerBitString() - der.decode(b('\x03\x00')) - self.assertEqual(der.value, b('')) - # Small payload - der.decode(b('\x03\x03\x00\x01\x02')) - self.assertEqual(der.value, b('\x01\x02')) - - def testDecode2(self): - # Verify that decode returns the object - der = DerBitString() - self.assertEqual(der, der.decode(b('\x03\x00'))) - - -class DerSetOfTests(unittest.TestCase): - - def testInit1(self): - der = DerSetOf([DerInteger(1), DerInteger(2)]) - self.assertEqual(der.encode(), b('1\x06\x02\x01\x01\x02\x01\x02')) - - def testEncode1(self): - # Empty set - der = DerSetOf() - self.assertEqual(der.encode(), b('1\x00')) - # One single-byte integer (zero) - der.add(0) - self.assertEqual(der.encode(), b('1\x03\x02\x01\x00')) - # Invariant - self.assertEqual(der.encode(), b('1\x03\x02\x01\x00')) - - def testEncode2(self): - # Two integers - der = DerSetOf() - der.add(0x180) - der.add(0xFF) - self.assertEqual(der.encode(), b('1\x08\x02\x02\x00\xff\x02\x02\x01\x80')) - # Initialize with integers - der = DerSetOf([0x180, 0xFF]) - self.assertEqual(der.encode(), b('1\x08\x02\x02\x00\xff\x02\x02\x01\x80')) - - def testEncode3(self): - # One integer and another type (no matter what it is) - der = DerSetOf() - der.add(0x180) - self.assertRaises(ValueError, der.add, b('\x00\x02\x00\x00')) - - def testEncode4(self): - # Only non integers - der = DerSetOf() - der.add(b('\x01\x00')) - der.add(b('\x01\x01\x01')) - self.assertEqual(der.encode(), b('1\x05\x01\x00\x01\x01\x01')) - - #### - - def testDecode1(self): - # Empty sequence - der = DerSetOf() - der.decode(b('1\x00')) - self.assertEqual(len(der),0) - # One single-byte integer (zero) - der.decode(b('1\x03\x02\x01\x00')) - self.assertEqual(len(der),1) - self.assertEqual(list(der),[0]) - - def testDecode2(self): - # Two integers - der = DerSetOf() - der.decode(b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff')) - self.assertEqual(len(der),2) - l = list(der) - self.assertTrue(0x180 in l) - self.assertTrue(0xFF in l) - - def testDecode3(self): - # One integer and 2 other types - der = DerSetOf() - #import pdb; pdb.set_trace() - self.assertRaises(ValueError, der.decode, - b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) - - def testDecode4(self): - # Verify that decode returns the object - der = DerSetOf() - self.assertEqual(der, - der.decode(b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff'))) - - ### - - def testErrDecode1(self): - # No leftovers allowed - der = DerSetOf() - self.assertRaises(ValueError, der.decode, - b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff\xAA')) - - -class DerBooleanTests(unittest.TestCase): - - def testEncode1(self): - der = DerBoolean(False) - self.assertEqual(der.encode(), b'\x01\x01\x00') - - def testEncode2(self): - der = DerBoolean(True) - self.assertEqual(der.encode(), b'\x01\x01\xFF') - - def testEncode3(self): - der = DerBoolean(False, implicit=0x12) - self.assertEqual(der.encode(), b'\x92\x01\x00') - - def testEncode4(self): - der = DerBoolean(False, explicit=0x05) - self.assertEqual(der.encode(), b'\xA5\x03\x01\x01\x00') - #### - - def testDecode1(self): - der = DerBoolean() - der.decode(b'\x01\x01\x00') - self.assertEqual(der.value, False) - - def testDecode2(self): - der = DerBoolean() - der.decode(b'\x01\x01\xFF') - self.assertEqual(der.value, True) - - def testDecode3(self): - der = DerBoolean(implicit=0x12) - der.decode(b'\x92\x01\x00') - self.assertEqual(der.value, False) - - def testDecode4(self): - der = DerBoolean(explicit=0x05) - der.decode(b'\xA5\x03\x01\x01\x00') - self.assertEqual(der.value, False) - - def testErrorDecode1(self): - der = DerBoolean() - # Wrong tag - self.assertRaises(ValueError, der.decode, b'\x02\x01\x00') - - def testErrorDecode2(self): - der = DerBoolean() - # Payload too long - self.assertRaises(ValueError, der.decode, b'\x01\x01\x00\xFF') - - -def get_tests(config={}): - from Cryptodome.SelfTest.st_common import list_test_cases - listTests = [] - listTests += list_test_cases(DerObjectTests) - listTests += list_test_cases(DerIntegerTests) - listTests += list_test_cases(DerSequenceTests) - listTests += list_test_cases(DerOctetStringTests) - listTests += list_test_cases(DerNullTests) - listTests += list_test_cases(DerObjectIdTests) - listTests += list_test_cases(DerBitStringTests) - listTests += list_test_cases(DerSetOfTests) - listTests += list_test_cases(DerBooleanTests) - return listTests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_number.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_number.py deleted file mode 100644 index 8221443..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_number.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_number.py: Self-test for parts of the Cryptodome.Util.number module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-tests for (some of) Cryptodome.Util.number""" - -import math -import unittest - -from Cryptodome.Util.py3compat import * -from Cryptodome.SelfTest.st_common import list_test_cases - -from Cryptodome.Util import number -from Cryptodome.Util.number import long_to_bytes - - -class MyError(Exception): - """Dummy exception used for tests""" - -# NB: In some places, we compare tuples instead of just output values so that -# if any inputs cause a test failure, we'll be able to tell which ones. - -class MiscTests(unittest.TestCase): - - def test_ceil_div(self): - """Util.number.ceil_div""" - self.assertRaises(TypeError, number.ceil_div, "1", 1) - self.assertRaises(ZeroDivisionError, number.ceil_div, 1, 0) - self.assertRaises(ZeroDivisionError, number.ceil_div, -1, 0) - - # b = 1 - self.assertEqual(0, number.ceil_div(0, 1)) - self.assertEqual(1, number.ceil_div(1, 1)) - self.assertEqual(2, number.ceil_div(2, 1)) - self.assertEqual(3, number.ceil_div(3, 1)) - - # b = 2 - self.assertEqual(0, number.ceil_div(0, 2)) - self.assertEqual(1, number.ceil_div(1, 2)) - self.assertEqual(1, number.ceil_div(2, 2)) - self.assertEqual(2, number.ceil_div(3, 2)) - self.assertEqual(2, number.ceil_div(4, 2)) - self.assertEqual(3, number.ceil_div(5, 2)) - - # b = 3 - self.assertEqual(0, number.ceil_div(0, 3)) - self.assertEqual(1, number.ceil_div(1, 3)) - self.assertEqual(1, number.ceil_div(2, 3)) - self.assertEqual(1, number.ceil_div(3, 3)) - self.assertEqual(2, number.ceil_div(4, 3)) - self.assertEqual(2, number.ceil_div(5, 3)) - self.assertEqual(2, number.ceil_div(6, 3)) - self.assertEqual(3, number.ceil_div(7, 3)) - - # b = 4 - self.assertEqual(0, number.ceil_div(0, 4)) - self.assertEqual(1, number.ceil_div(1, 4)) - self.assertEqual(1, number.ceil_div(2, 4)) - self.assertEqual(1, number.ceil_div(3, 4)) - self.assertEqual(1, number.ceil_div(4, 4)) - self.assertEqual(2, number.ceil_div(5, 4)) - self.assertEqual(2, number.ceil_div(6, 4)) - self.assertEqual(2, number.ceil_div(7, 4)) - self.assertEqual(2, number.ceil_div(8, 4)) - self.assertEqual(3, number.ceil_div(9, 4)) - - def test_getPrime(self): - """Util.number.getPrime""" - self.assertRaises(ValueError, number.getPrime, -100) - self.assertRaises(ValueError, number.getPrime, 0) - self.assertRaises(ValueError, number.getPrime, 1) - - bits = 4 - for i in range(100): - x = number.getPrime(bits) - self.assertEqual(x >= (1 << bits - 1), 1) - self.assertEqual(x < (1 << bits), 1) - - bits = 512 - x = number.getPrime(bits) - self.assertNotEqual(x % 2, 0) - self.assertEqual(x >= (1 << bits - 1), 1) - self.assertEqual(x < (1 << bits), 1) - - def test_getStrongPrime(self): - """Util.number.getStrongPrime""" - self.assertRaises(ValueError, number.getStrongPrime, 256) - self.assertRaises(ValueError, number.getStrongPrime, 513) - bits = 512 - x = number.getStrongPrime(bits) - self.assertNotEqual(x % 2, 0) - self.assertEqual(x > (1 << bits-1)-1, 1) - self.assertEqual(x < (1 << bits), 1) - e = 2**16+1 - x = number.getStrongPrime(bits, e) - self.assertEqual(number.GCD(x-1, e), 1) - self.assertNotEqual(x % 2, 0) - self.assertEqual(x > (1 << bits-1)-1, 1) - self.assertEqual(x < (1 << bits), 1) - e = 2**16+2 - x = number.getStrongPrime(bits, e) - self.assertEqual(number.GCD((x-1)>>1, e), 1) - self.assertNotEqual(x % 2, 0) - self.assertEqual(x > (1 << bits-1)-1, 1) - self.assertEqual(x < (1 << bits), 1) - - def test_isPrime(self): - """Util.number.isPrime""" - self.assertEqual(number.isPrime(-3), False) # Regression test: negative numbers should not be prime - self.assertEqual(number.isPrime(-2), False) # Regression test: negative numbers should not be prime - self.assertEqual(number.isPrime(1), False) # Regression test: isPrime(1) caused some versions of PyCryptodome to crash. - self.assertEqual(number.isPrime(2), True) - self.assertEqual(number.isPrime(3), True) - self.assertEqual(number.isPrime(4), False) - self.assertEqual(number.isPrime(2**1279-1), True) - self.assertEqual(number.isPrime(-(2**1279-1)), False) # Regression test: negative numbers should not be prime - # test some known gmp pseudo-primes taken from - # http://www.trnicely.net/misc/mpzspsp.html - for composite in (43 * 127 * 211, 61 * 151 * 211, 15259 * 30517, - 346141 * 692281, 1007119 * 2014237, 3589477 * 7178953, - 4859419 * 9718837, 2730439 * 5460877, - 245127919 * 490255837, 963939391 * 1927878781, - 4186358431 * 8372716861, 1576820467 * 3153640933): - self.assertEqual(number.isPrime(int(composite)), False) - - def test_size(self): - self.assertEqual(number.size(2),2) - self.assertEqual(number.size(3),2) - self.assertEqual(number.size(0xa2),8) - self.assertEqual(number.size(0xa2ba40),8*3) - self.assertEqual(number.size(0xa2ba40ee07e3b2bd2f02ce227f36a195024486e49c19cb41bbbdfbba98b22b0e577c2eeaffa20d883a76e65e394c69d4b3c05a1e8fadda27edb2a42bc000fe888b9b32c22d15add0cd76b3e7936e19955b220dd17d4ea904b1ec102b2e4de7751222aa99151024c7cb41cc5ea21d00eeb41f7c800834d2c6e06bce3bce7ea9a5), 1024) - self.assertRaises(ValueError, number.size, -1) - - -class LongTests(unittest.TestCase): - - def test1(self): - self.assertEqual(long_to_bytes(0), b'\x00') - self.assertEqual(long_to_bytes(1), b'\x01') - self.assertEqual(long_to_bytes(0x100), b'\x01\x00') - self.assertEqual(long_to_bytes(0xFF00000000), b'\xFF\x00\x00\x00\x00') - self.assertEqual(long_to_bytes(0xFF00000000), b'\xFF\x00\x00\x00\x00') - self.assertEqual(long_to_bytes(0x1122334455667788), b'\x11\x22\x33\x44\x55\x66\x77\x88') - self.assertEqual(long_to_bytes(0x112233445566778899), b'\x11\x22\x33\x44\x55\x66\x77\x88\x99') - - def test2(self): - self.assertEqual(long_to_bytes(0, 1), b'\x00') - self.assertEqual(long_to_bytes(0, 2), b'\x00\x00') - self.assertEqual(long_to_bytes(1, 3), b'\x00\x00\x01') - self.assertEqual(long_to_bytes(65535, 2), b'\xFF\xFF') - self.assertEqual(long_to_bytes(65536, 2), b'\x00\x01\x00\x00') - self.assertEqual(long_to_bytes(0x100, 1), b'\x01\x00') - self.assertEqual(long_to_bytes(0xFF00000001, 6), b'\x00\xFF\x00\x00\x00\x01') - self.assertEqual(long_to_bytes(0xFF00000001, 8), b'\x00\x00\x00\xFF\x00\x00\x00\x01') - self.assertEqual(long_to_bytes(0xFF00000001, 10), b'\x00\x00\x00\x00\x00\xFF\x00\x00\x00\x01') - self.assertEqual(long_to_bytes(0xFF00000001, 11), b'\x00\x00\x00\x00\x00\x00\xFF\x00\x00\x00\x01') - - def test_err1(self): - self.assertRaises(ValueError, long_to_bytes, -1) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(MiscTests) - tests += list_test_cases(LongTests) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_rfc1751.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_rfc1751.py deleted file mode 100644 index 43b137d..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_rfc1751.py +++ /dev/null @@ -1,38 +0,0 @@ -import unittest - -import binascii -from Cryptodome.Util.RFC1751 import key_to_english, english_to_key - - -class RFC1751_Tests(unittest.TestCase): - - def test1(self): - data = [ - ('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), - ('CCAC2AED591056BE4F90FD441C534766', 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), - ('EFF81F9BFBC65350920CDD7416DE8009', 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') - ] - - for key_hex, words in data: - key_bin = binascii.a2b_hex(key_hex) - - w2 = key_to_english(key_bin) - self.assertEqual(w2, words) - - k2 = english_to_key(words) - self.assertEqual(k2, key_bin) - - def test_error_key_to_english(self): - - self.assertRaises(ValueError, key_to_english, b'0' * 7) - - -def get_tests(config={}): - from Cryptodome.SelfTest.st_common import list_test_cases - tests = list_test_cases(RFC1751_Tests) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_strxor.py b/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_strxor.py deleted file mode 100644 index 6a96129..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/Util/test_strxor.py +++ /dev/null @@ -1,280 +0,0 @@ -# -# SelfTest/Util/test_strxor.py: Self-test for XORing -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import unittest -from binascii import unhexlify, hexlify - -from Cryptodome.SelfTest.st_common import list_test_cases -from Cryptodome.Util.strxor import strxor, strxor_c - - -class StrxorTests(unittest.TestCase): - - def test1(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term2 = unhexlify(b"383d4ba020573314395b") - result = unhexlify(b"c70ed123c59a7fcb6f12") - self.assertEqual(strxor(term1, term2), result) - self.assertEqual(strxor(term2, term1), result) - - def test2(self): - es = b"" - self.assertEqual(strxor(es, es), es) - - def test3(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - all_zeros = b"\x00" * len(term1) - self.assertEqual(strxor(term1, term1), all_zeros) - - def test_wrong_length(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term2 = unhexlify(b"ff339a83e5cd4cdf564990") - self.assertRaises(ValueError, strxor, term1, term2) - - def test_bytearray(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term1_ba = bytearray(term1) - term2 = unhexlify(b"383d4ba020573314395b") - result = unhexlify(b"c70ed123c59a7fcb6f12") - - self.assertEqual(strxor(term1_ba, term2), result) - - def test_memoryview(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term1_mv = memoryview(term1) - term2 = unhexlify(b"383d4ba020573314395b") - result = unhexlify(b"c70ed123c59a7fcb6f12") - - self.assertEqual(strxor(term1_mv, term2), result) - - def test_output_bytearray(self): - """Verify result can be stored in pre-allocated memory""" - - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term2 = unhexlify(b"383d4ba020573314395b") - original_term1 = term1[:] - original_term2 = term2[:] - expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") - output = bytearray(len(term1)) - - result = strxor(term1, term2, output=output) - - self.assertEqual(result, None) - self.assertEqual(output, expected_xor) - self.assertEqual(term1, original_term1) - self.assertEqual(term2, original_term2) - - def test_output_memoryview(self): - """Verify result can be stored in pre-allocated memory""" - - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term2 = unhexlify(b"383d4ba020573314395b") - original_term1 = term1[:] - original_term2 = term2[:] - expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") - output = memoryview(bytearray(len(term1))) - - result = strxor(term1, term2, output=output) - - self.assertEqual(result, None) - self.assertEqual(output, expected_xor) - self.assertEqual(term1, original_term1) - self.assertEqual(term2, original_term2) - - def test_output_overlapping_bytearray(self): - """Verify result can be stored in overlapping memory""" - - term1 = bytearray(unhexlify(b"ff339a83e5cd4cdf5649")) - term2 = unhexlify(b"383d4ba020573314395b") - original_term2 = term2[:] - expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") - - result = strxor(term1, term2, output=term1) - - self.assertEqual(result, None) - self.assertEqual(term1, expected_xor) - self.assertEqual(term2, original_term2) - - def test_output_overlapping_memoryview(self): - """Verify result can be stored in overlapping memory""" - - term1 = memoryview(bytearray(unhexlify(b"ff339a83e5cd4cdf5649"))) - term2 = unhexlify(b"383d4ba020573314395b") - original_term2 = term2[:] - expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") - - result = strxor(term1, term2, output=term1) - - self.assertEqual(result, None) - self.assertEqual(term1, expected_xor) - self.assertEqual(term2, original_term2) - - def test_output_ro_bytes(self): - """Verify result cannot be stored in read-only memory""" - - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term2 = unhexlify(b"383d4ba020573314395b") - - self.assertRaises(TypeError, strxor, term1, term2, output=term1) - - def test_output_ro_memoryview(self): - """Verify result cannot be stored in read-only memory""" - - term1 = memoryview(unhexlify(b"ff339a83e5cd4cdf5649")) - term2 = unhexlify(b"383d4ba020573314395b") - - self.assertRaises(TypeError, strxor, term1, term2, output=term1) - - def test_output_incorrect_length(self): - """Verify result cannot be stored in memory of incorrect length""" - - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term2 = unhexlify(b"383d4ba020573314395b") - output = bytearray(len(term1) - 1) - - self.assertRaises(ValueError, strxor, term1, term2, output=output) - - -class Strxor_cTests(unittest.TestCase): - - def test1(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - result = unhexlify(b"be72dbc2a48c0d9e1708") - self.assertEqual(strxor_c(term1, 65), result) - - def test2(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - self.assertEqual(strxor_c(term1, 0), term1) - - def test3(self): - self.assertEqual(strxor_c(b"", 90), b"") - - def test_wrong_range(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - self.assertRaises(ValueError, strxor_c, term1, -1) - self.assertRaises(ValueError, strxor_c, term1, 256) - - def test_bytearray(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term1_ba = bytearray(term1) - result = unhexlify(b"be72dbc2a48c0d9e1708") - - self.assertEqual(strxor_c(term1_ba, 65), result) - - def test_memoryview(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - term1_mv = memoryview(term1) - result = unhexlify(b"be72dbc2a48c0d9e1708") - - self.assertEqual(strxor_c(term1_mv, 65), result) - - def test_output_bytearray(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - original_term1 = term1[:] - expected_result = unhexlify(b"be72dbc2a48c0d9e1708") - output = bytearray(len(term1)) - - result = strxor_c(term1, 65, output=output) - - self.assertEqual(result, None) - self.assertEqual(output, expected_result) - self.assertEqual(term1, original_term1) - - def test_output_memoryview(self): - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - original_term1 = term1[:] - expected_result = unhexlify(b"be72dbc2a48c0d9e1708") - output = memoryview(bytearray(len(term1))) - - result = strxor_c(term1, 65, output=output) - - self.assertEqual(result, None) - self.assertEqual(output, expected_result) - self.assertEqual(term1, original_term1) - - def test_output_overlapping_bytearray(self): - """Verify result can be stored in overlapping memory""" - - term1 = bytearray(unhexlify(b"ff339a83e5cd4cdf5649")) - expected_xor = unhexlify(b"be72dbc2a48c0d9e1708") - - result = strxor_c(term1, 65, output=term1) - - self.assertEqual(result, None) - self.assertEqual(term1, expected_xor) - - def test_output_overlapping_memoryview(self): - """Verify result can be stored in overlapping memory""" - - term1 = memoryview(bytearray(unhexlify(b"ff339a83e5cd4cdf5649"))) - expected_xor = unhexlify(b"be72dbc2a48c0d9e1708") - - result = strxor_c(term1, 65, output=term1) - - self.assertEqual(result, None) - self.assertEqual(term1, expected_xor) - - def test_output_ro_bytes(self): - """Verify result cannot be stored in read-only memory""" - - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - - self.assertRaises(TypeError, strxor_c, term1, 65, output=term1) - - def test_output_ro_memoryview(self): - """Verify result cannot be stored in read-only memory""" - - term1 = memoryview(unhexlify(b"ff339a83e5cd4cdf5649")) - term2 = unhexlify(b"383d4ba020573314395b") - - self.assertRaises(TypeError, strxor_c, term1, 65, output=term1) - - def test_output_incorrect_length(self): - """Verify result cannot be stored in memory of incorrect length""" - - term1 = unhexlify(b"ff339a83e5cd4cdf5649") - output = bytearray(len(term1) - 1) - - self.assertRaises(ValueError, strxor_c, term1, 65, output=output) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(StrxorTests) - tests += list_test_cases(Strxor_cTests) - return tests - - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/__init__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/__init__.py deleted file mode 100644 index 09bb48c..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/__init__.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/__init__.py: Self-test for PyCrypto -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self tests - -These tests should perform quickly and can ideally be used every time an -application runs. -""" - -import sys -import unittest -from importlib import import_module -from Cryptodome.Util.py3compat import StringIO - - -class SelfTestError(Exception): - def __init__(self, message, result): - Exception.__init__(self, message, result) - self.message = message - self.result = result - - -def run(module=None, verbosity=0, stream=None, tests=None, config=None, **kwargs): - """Execute self-tests. - - This raises SelfTestError if any test is unsuccessful. - - You may optionally pass in a sub-module of SelfTest if you only want to - perform some of the tests. For example, the following would test only the - hash modules: - - Cryptodome.SelfTest.run(Cryptodome.SelfTest.Hash) - - """ - - if config is None: - config = {} - suite = unittest.TestSuite() - if module is None: - if tests is None: - tests = get_tests(config=config) - suite.addTests(tests) - else: - if tests is None: - suite.addTests(module.get_tests(config=config)) - else: - raise ValueError("'module' and 'tests' arguments are mutually exclusive") - if stream is None: - kwargs['stream'] = StringIO() - else: - kwargs['stream'] = stream - runner = unittest.TextTestRunner(verbosity=verbosity, **kwargs) - result = runner.run(suite) - if not result.wasSuccessful(): - if stream is None: - sys.stderr.write(kwargs['stream'].getvalue()) - raise SelfTestError("Self-test failed", result) - return result - - -def get_tests(config={}): - tests = [] - - module_names = [ - "Cipher", "Hash", "Protocol", "PublicKey", "Random", - "Util", "Signature", "IO", "Math", - ] - - for name in module_names: - module = import_module("Cryptodome.SelfTest." + name) - tests += module.get_tests(config=config) - - return tests - - -if __name__ == '__main__': - def suite(): - return unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/__main__.py b/venv/Lib/site-packages/Cryptodome/SelfTest/__main__.py deleted file mode 100644 index 242d781..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/__main__.py +++ /dev/null @@ -1,43 +0,0 @@ -#! /usr/bin/env python -# -# __main__.py : Stand-along loader for PyCryptodome test suite -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from __future__ import print_function - -import sys - -from Cryptodome import SelfTest - -slow_tests = not ("--skip-slow-tests" in sys.argv) -if not slow_tests: - print("Skipping slow tests") - -wycheproof_warnings = "--wycheproof-warnings" in sys.argv -if wycheproof_warnings: - print("Printing Wycheproof warnings") - -if "-v" in sys.argv: - verbosity=2 -else: - verbosity=1 - -config = {'slow_tests': slow_tests, 'wycheproof_warnings': wycheproof_warnings} -SelfTest.run(stream=sys.stdout, verbosity=verbosity, config=config) diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 8946973..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/__main__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index 6c4efdd..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/loader.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/loader.cpython-312.pyc deleted file mode 100644 index 1413447..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/loader.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/st_common.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/st_common.cpython-312.pyc deleted file mode 100644 index 77f785e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/SelfTest/__pycache__/st_common.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/loader.py b/venv/Lib/site-packages/Cryptodome/SelfTest/loader.py deleted file mode 100644 index 8699c3d..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/loader.py +++ /dev/null @@ -1,250 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2016, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import os -import re -import json -import errno -import binascii -import warnings -from binascii import unhexlify -from Cryptodome.Util.py3compat import FileNotFoundError - - -try: - import pycryptodome_test_vectors # type: ignore - test_vectors_available = True -except ImportError: - test_vectors_available = False - - -def _load_tests(dir_comps, file_in, description, conversions): - """Load and parse a test vector file - - Return a list of objects, one per group of adjacent - KV lines or for a single line in the form "[.*]". - - For a group of lines, the object has one attribute per line. - """ - - line_number = 0 - results = [] - - class TestVector(object): - def __init__(self, description, count): - self.desc = description - self.count = count - self.others = [] - - test_vector = None - count = 0 - new_group = True - - while True: - line_number += 1 - line = file_in.readline() - if not line: - if test_vector is not None: - results.append(test_vector) - break - line = line.strip() - - # Skip comments and empty lines - if line.startswith('#') or not line: - new_group = True - continue - - if line.startswith("["): - if test_vector is not None: - results.append(test_vector) - test_vector = None - results.append(line) - continue - - if new_group: - count += 1 - new_group = False - if test_vector is not None: - results.append(test_vector) - test_vector = TestVector("%s (#%d)" % (description, count), count) - - res = re.match("([A-Za-z0-9]+) = ?(.*)", line) - if not res: - test_vector.others += [line] - else: - token = res.group(1).lower() - data = res.group(2).lower() - - conversion = conversions.get(token, None) - if conversion is None: - if len(data) % 2 != 0: - data = "0" + data - setattr(test_vector, token, binascii.unhexlify(data)) - else: - setattr(test_vector, token, conversion(data)) - - # This line is ignored - return results - - -def load_test_vectors(dir_comps, file_name, description, conversions): - """Load and parse a test vector file, formatted using the NIST style. - - Args: - dir_comps (list of strings): - The path components under the ``pycryptodome_test_vectors`` package. - For instance ``("Cipher", "AES")``. - file_name (string): - The name of the file with the test vectors. - description (string): - A description applicable to the test vectors in the file. - conversions (dictionary): - The dictionary contains functions. - Values in the file that have an entry in this dictionary - will be converted usign the matching function. - Otherwise, values will be considered as hexadecimal and - converted to binary. - - Returns: - A list of test vector objects. - - The file is formatted in the following way: - - - Lines starting with "#" are comments and will be ignored. - - Each test vector is a sequence of 1 or more adjacent lines, where - each lines is an assignement. - - Test vectors are separated by an empty line, a comment, or - a line starting with "[". - - A test vector object has the following attributes: - - - desc (string): description - - counter (int): the order of the test vector in the file (from 1) - - others (list): zero or more lines of the test vector that were not assignments - - left-hand side of each assignment (lowercase): the value of the - assignement, either converted or bytes. - """ - - results = None - - try: - if not test_vectors_available: - raise FileNotFoundError(errno.ENOENT, - os.strerror(errno.ENOENT), - file_name) - - description = "%s test (%s)" % (description, file_name) - - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - with open(full_file_name) as file_in: - results = _load_tests(dir_comps, file_in, description, conversions) - - except FileNotFoundError: - warnings.warn("Warning: skipping extended tests for " + description, - UserWarning, - stacklevel=2) - - return results - - -def load_test_vectors_wycheproof(dir_comps, file_name, description, - root_tag={}, group_tag={}, unit_tag={}): - - result = [] - try: - if not test_vectors_available: - raise FileNotFoundError(errno.ENOENT, - os.strerror(errno.ENOENT), - file_name) - - init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) - full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) - with open(full_file_name) as file_in: - tv_tree = json.load(file_in) - - except FileNotFoundError: - warnings.warn("Warning: skipping extended tests for " + description, - UserWarning, - stacklevel=2) - return result - - class TestVector(object): - pass - - # Unique attributes that will be converted from - # hexadecimal to binary, unless the attribute is - # listed in the unit_tag dict - unit_attr_hex = {'key', 'iv', 'aad', 'msg', 'ct', 'tag', 'label', - 'ikm', 'salt', 'info', 'okm', 'sig', 'public', - 'shared'} - unit_attr_hex -= set(unit_tag.keys()) - - common_root = {} - for k, v in root_tag.items(): - common_root[k] = v(tv_tree) - - for group in tv_tree['testGroups']: - - common_group = {} - for k, v in group_tag.items(): - common_group[k] = v(group) - - for test in group['tests']: - tv = TestVector() - - for k, v in common_root.items(): - setattr(tv, k, v) - for k, v in common_group.items(): - setattr(tv, k, v) - - tv.id = test['tcId'] - tv.comment = test['comment'] - for attr in unit_attr_hex: - if attr in test: - try: - setattr(tv, attr, unhexlify(test[attr])) - except binascii.Error: - raise ValueError("Error decoding attribute '%s' (tcId=%s, file %s)" % (attr, tv.id, file_name)) - tv.filename = file_name - - for k, v in unit_tag.items(): - setattr(tv, k, v(test)) - - tv.valid = test['result'] != "invalid" - tv.warning = test['result'] == "acceptable" - tv.flags = test.get('flags') - - tv.filename = file_name - - result.append(tv) - - return result - diff --git a/venv/Lib/site-packages/Cryptodome/SelfTest/st_common.py b/venv/Lib/site-packages/Cryptodome/SelfTest/st_common.py deleted file mode 100644 index 3565251..0000000 --- a/venv/Lib/site-packages/Cryptodome/SelfTest/st_common.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/st_common.py: Common functions for SelfTest modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Common functions for SelfTest modules""" - -import unittest -import binascii -from Cryptodome.Util.py3compat import b - - -def list_test_cases(class_): - """Return a list of TestCase instances given a TestCase class - - This is useful when you have defined test* methods on your TestCase class. - """ - return unittest.TestLoader().loadTestsFromTestCase(class_) - -def strip_whitespace(s): - """Remove whitespace from a text or byte string""" - if isinstance(s,str): - return b("".join(s.split())) - else: - return b("").join(s.split()) - -def a2b_hex(s): - """Convert hexadecimal to binary, ignoring whitespace""" - return binascii.a2b_hex(strip_whitespace(s)) - -def b2a_hex(s): - """Convert binary to hexadecimal""" - # For completeness - return binascii.b2a_hex(s) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/venv/Lib/site-packages/Cryptodome/Signature/DSS.py b/venv/Lib/site-packages/Cryptodome/Signature/DSS.py deleted file mode 100644 index 97d9c85..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/DSS.py +++ /dev/null @@ -1,403 +0,0 @@ -# -# Signature/DSS.py : DSS.py -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.asn1 import DerSequence -from Cryptodome.Util.number import long_to_bytes -from Cryptodome.Math.Numbers import Integer - -from Cryptodome.Hash import HMAC -from Cryptodome.PublicKey.ECC import EccKey -from Cryptodome.PublicKey.DSA import DsaKey - -__all__ = ['DssSigScheme', 'new'] - - -class DssSigScheme(object): - """A (EC)DSA signature object. - Do not instantiate directly. - Use :func:`Cryptodome.Signature.DSS.new`. - """ - - def __init__(self, key, encoding, order): - """Create a new Digital Signature Standard (DSS) object. - - Do not instantiate this object directly, - use `Cryptodome.Signature.DSS.new` instead. - """ - - self._key = key - self._encoding = encoding - self._order = order - - self._order_bits = self._order.size_in_bits() - self._order_bytes = (self._order_bits - 1) // 8 + 1 - - def can_sign(self): - """Return ``True`` if this signature object can be used - for signing messages.""" - - return self._key.has_private() - - def _compute_nonce(self, msg_hash): - raise NotImplementedError("To be provided by subclasses") - - def _valid_hash(self, msg_hash): - raise NotImplementedError("To be provided by subclasses") - - def sign(self, msg_hash): - """Compute the DSA/ECDSA signature of a message. - - Args: - msg_hash (hash object): - The hash that was carried out over the message. - The object belongs to the :mod:`Cryptodome.Hash` package. - Under mode ``'fips-186-3'``, the hash must be a FIPS - approved secure hash (SHA-2 or SHA-3). - - :return: The signature as ``bytes`` - :raise ValueError: if the hash algorithm is incompatible to the (EC)DSA key - :raise TypeError: if the (EC)DSA key has no private half - """ - - if not self._key.has_private(): - raise TypeError("Private key is needed to sign") - - if not self._valid_hash(msg_hash): - raise ValueError("Hash is not sufficiently strong") - - # Generate the nonce k (critical!) - nonce = self._compute_nonce(msg_hash) - - # Perform signature using the raw API - z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes]) - sig_pair = self._key._sign(z, nonce) - - # Encode the signature into a single byte string - if self._encoding == 'binary': - output = b"".join([long_to_bytes(x, self._order_bytes) - for x in sig_pair]) - else: - # Dss-sig ::= SEQUENCE { - # r INTEGER, - # s INTEGER - # } - # Ecdsa-Sig-Value ::= SEQUENCE { - # r INTEGER, - # s INTEGER - # } - output = DerSequence(sig_pair).encode() - - return output - - def verify(self, msg_hash, signature): - """Check if a certain (EC)DSA signature is authentic. - - Args: - msg_hash (hash object): - The hash that was carried out over the message. - This is an object belonging to the :mod:`Cryptodome.Hash` module. - Under mode ``'fips-186-3'``, the hash must be a FIPS - approved secure hash (SHA-2 or SHA-3). - - signature (``bytes``): - The signature that needs to be validated. - - :raise ValueError: if the signature is not authentic - """ - - if not self._valid_hash(msg_hash): - raise ValueError("Hash is not sufficiently strong") - - if self._encoding == 'binary': - if len(signature) != (2 * self._order_bytes): - raise ValueError("The signature is not authentic (length)") - r_prime, s_prime = [Integer.from_bytes(x) - for x in (signature[:self._order_bytes], - signature[self._order_bytes:])] - else: - try: - der_seq = DerSequence().decode(signature, strict=True) - except (ValueError, IndexError): - raise ValueError("The signature is not authentic (DER)") - if len(der_seq) != 2 or not der_seq.hasOnlyInts(): - raise ValueError("The signature is not authentic (DER content)") - r_prime, s_prime = Integer(der_seq[0]), Integer(der_seq[1]) - - if not (0 < r_prime < self._order) or not (0 < s_prime < self._order): - raise ValueError("The signature is not authentic (d)") - - z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes]) - result = self._key._verify(z, (r_prime, s_prime)) - if not result: - raise ValueError("The signature is not authentic") - # Make PyCryptodome code to fail - return False - - -class DeterministicDsaSigScheme(DssSigScheme): - # Also applicable to ECDSA - - def __init__(self, key, encoding, order, private_key): - super(DeterministicDsaSigScheme, self).__init__(key, encoding, order) - self._private_key = private_key - - def _bits2int(self, bstr): - """See 2.3.2 in RFC6979""" - - result = Integer.from_bytes(bstr) - q_len = self._order.size_in_bits() - b_len = len(bstr) * 8 - if b_len > q_len: - # Only keep leftmost q_len bits - result >>= (b_len - q_len) - return result - - def _int2octets(self, int_mod_q): - """See 2.3.3 in RFC6979""" - - assert 0 < int_mod_q < self._order - return long_to_bytes(int_mod_q, self._order_bytes) - - def _bits2octets(self, bstr): - """See 2.3.4 in RFC6979""" - - z1 = self._bits2int(bstr) - if z1 < self._order: - z2 = z1 - else: - z2 = z1 - self._order - return self._int2octets(z2) - - def _compute_nonce(self, mhash): - """Generate k in a deterministic way""" - - # See section 3.2 in RFC6979.txt - # Step a - h1 = mhash.digest() - # Step b - mask_v = b'\x01' * mhash.digest_size - # Step c - nonce_k = b'\x00' * mhash.digest_size - - for int_oct in (b'\x00', b'\x01'): - # Step d/f - nonce_k = HMAC.new(nonce_k, - mask_v + int_oct + - self._int2octets(self._private_key) + - self._bits2octets(h1), mhash).digest() - # Step e/g - mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() - - nonce = -1 - while not (0 < nonce < self._order): - # Step h.C (second part) - if nonce != -1: - nonce_k = HMAC.new(nonce_k, mask_v + b'\x00', - mhash).digest() - mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() - - # Step h.A - mask_t = b"" - - # Step h.B - while len(mask_t) < self._order_bytes: - mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() - mask_t += mask_v - - # Step h.C (first part) - nonce = self._bits2int(mask_t) - return nonce - - def _valid_hash(self, msg_hash): - return True - - -class FipsDsaSigScheme(DssSigScheme): - - #: List of L (bit length of p) and N (bit length of q) combinations - #: that are allowed by FIPS 186-3. The security level is provided in - #: Table 2 of FIPS 800-57 (rev3). - _fips_186_3_L_N = ( - (1024, 160), # 80 bits (SHA-1 or stronger) - (2048, 224), # 112 bits (SHA-224 or stronger) - (2048, 256), # 128 bits (SHA-256 or stronger) - (3072, 256) # 256 bits (SHA-512) - ) - - def __init__(self, key, encoding, order, randfunc): - super(FipsDsaSigScheme, self).__init__(key, encoding, order) - self._randfunc = randfunc - - L = Integer(key.p).size_in_bits() - if (L, self._order_bits) not in self._fips_186_3_L_N: - error = ("L/N (%d, %d) is not compliant to FIPS 186-3" - % (L, self._order_bits)) - raise ValueError(error) - - def _compute_nonce(self, msg_hash): - # hash is not used - return Integer.random_range(min_inclusive=1, - max_exclusive=self._order, - randfunc=self._randfunc) - - def _valid_hash(self, msg_hash): - """Verify that SHA-1, SHA-2 or SHA-3 are used""" - return (msg_hash.oid == "1.3.14.3.2.26" or - msg_hash.oid.startswith("2.16.840.1.101.3.4.2.")) - - -class FipsEcDsaSigScheme(DssSigScheme): - - def __init__(self, key, encoding, order, randfunc): - super(FipsEcDsaSigScheme, self).__init__(key, encoding, order) - self._randfunc = randfunc - - def _compute_nonce(self, msg_hash): - return Integer.random_range(min_inclusive=1, - max_exclusive=self._key._curve.order, - randfunc=self._randfunc) - - def _valid_hash(self, msg_hash): - """Verify that the strength of the hash matches or exceeds - the strength of the EC. We fail if the hash is too weak.""" - - modulus_bits = self._key.pointQ.size_in_bits() - - # SHS: SHA-2, SHA-3, truncated SHA-512 - sha224 = ("2.16.840.1.101.3.4.2.4", "2.16.840.1.101.3.4.2.7", "2.16.840.1.101.3.4.2.5") - sha256 = ("2.16.840.1.101.3.4.2.1", "2.16.840.1.101.3.4.2.8", "2.16.840.1.101.3.4.2.6") - sha384 = ("2.16.840.1.101.3.4.2.2", "2.16.840.1.101.3.4.2.9") - sha512 = ("2.16.840.1.101.3.4.2.3", "2.16.840.1.101.3.4.2.10") - shs = sha224 + sha256 + sha384 + sha512 - - try: - result = msg_hash.oid in shs - except AttributeError: - result = False - return result - - -def new(key, mode, encoding='binary', randfunc=None): - """Create a signature object :class:`DssSigScheme` that - can perform (EC)DSA signature or verification. - - .. note:: - Refer to `NIST SP 800 Part 1 Rev 4`_ (or newer release) for an - overview of the recommended key lengths. - - Args: - key (:class:`Cryptodome.PublicKey.DSA` or :class:`Cryptodome.PublicKey.ECC`): - The key to use for computing the signature (*private* keys only) - or for verifying one. - For DSA keys, let ``L`` and ``N`` be the bit lengths of the modulus ``p`` - and of ``q``: the pair ``(L,N)`` must appear in the following list, - in compliance to section 4.2 of `FIPS 186-4`_: - - - (1024, 160) *legacy only; do not create new signatures with this* - - (2048, 224) *deprecated; do not create new signatures with this* - - (2048, 256) - - (3072, 256) - - For ECC, only keys over P-224, P-256, P-384, and P-521 are accepted. - - mode (string): - The parameter can take these values: - - - ``'fips-186-3'``. The signature generation is randomized and carried out - according to `FIPS 186-3`_: the nonce ``k`` is taken from the RNG. - - ``'deterministic-rfc6979'``. The signature generation is not - randomized. See RFC6979_. - - encoding (string): - How the signature is encoded. This value determines the output of - :meth:`sign` and the input to :meth:`verify`. - - The following values are accepted: - - - ``'binary'`` (default), the signature is the raw concatenation - of ``r`` and ``s``. It is defined in the IEEE P.1363 standard. - For DSA, the size in bytes of the signature is ``N/4`` bytes - (e.g. 64 for ``N=256``). - For ECDSA, the signature is always twice the length of a point - coordinate (e.g. 64 bytes for P-256). - - - ``'der'``, the signature is a ASN.1 DER SEQUENCE - with two INTEGERs (``r`` and ``s``). It is defined in RFC3279_. - The size of the signature is variable. - - randfunc (callable): - A function that returns random ``bytes``, of a given length. - If omitted, the internal RNG is used. - Only applicable for the *'fips-186-3'* mode. - - .. _FIPS 186-3: http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf - .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf - .. _NIST SP 800 Part 1 Rev 4: http://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-57pt1r4.pdf - .. _RFC6979: http://tools.ietf.org/html/rfc6979 - .. _RFC3279: https://tools.ietf.org/html/rfc3279#section-2.2.2 - """ - - # The goal of the 'mode' parameter is to avoid to - # have the current version of the standard as default. - # - # Over time, such version will be superseded by (for instance) - # FIPS 186-4 and it will be odd to have -3 as default. - - if encoding not in ('binary', 'der'): - raise ValueError("Unknown encoding '%s'" % encoding) - - if isinstance(key, EccKey): - order = key._curve.order - private_key_attr = 'd' - if not key.curve.startswith("NIST"): - raise ValueError("ECC key is not on a NIST P curve") - elif isinstance(key, DsaKey): - order = Integer(key.q) - private_key_attr = 'x' - else: - raise ValueError("Unsupported key type " + str(type(key))) - - if key.has_private(): - private_key = getattr(key, private_key_attr) - else: - private_key = None - - if mode == 'deterministic-rfc6979': - return DeterministicDsaSigScheme(key, encoding, order, private_key) - elif mode == 'fips-186-3': - if isinstance(key, EccKey): - return FipsEcDsaSigScheme(key, encoding, order, randfunc) - else: - return FipsDsaSigScheme(key, encoding, order, randfunc) - else: - raise ValueError("Unknown DSS mode '%s'" % mode) diff --git a/venv/Lib/site-packages/Cryptodome/Signature/DSS.pyi b/venv/Lib/site-packages/Cryptodome/Signature/DSS.pyi deleted file mode 100644 index 52ecc8f..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/DSS.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Union, Optional, Callable -from typing_extensions import Protocol - -from Cryptodome.PublicKey.DSA import DsaKey -from Cryptodome.PublicKey.ECC import EccKey - -class Hash(Protocol): - def digest(self) -> bytes: ... - -__all__ = ['new'] - -class DssSigScheme: - def __init__(self, key: Union[DsaKey, EccKey], encoding: str, order: int) -> None: ... - def can_sign(self) -> bool: ... - def sign(self, msg_hash: Hash) -> bytes: ... - def verify(self, msg_hash: Hash, signature: bytes) -> bool: ... - -class DeterministicDsaSigScheme(DssSigScheme): - def __init__(self, key, encoding, order, private_key) -> None: ... - -class FipsDsaSigScheme(DssSigScheme): - def __init__(self, key: DsaKey, encoding: str, order: int, randfunc: Callable) -> None: ... - -class FipsEcDsaSigScheme(DssSigScheme): - def __init__(self, key: EccKey, encoding: str, order: int, randfunc: Callable) -> None: ... - -def new(key: Union[DsaKey, EccKey], mode: str, encoding: Optional[str]='binary', randfunc: Optional[Callable]=None) -> Union[DeterministicDsaSigScheme, FipsDsaSigScheme, FipsEcDsaSigScheme]: ... diff --git a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_PSS.py b/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_PSS.py deleted file mode 100644 index 1e7e5b5..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_PSS.py +++ /dev/null @@ -1,55 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -Legacy module for PKCS#1 PSS signatures. - -:undocumented: __package__ -""" - -import types - -from Cryptodome.Signature import pss - - -def _pycrypto_verify(self, hash_object, signature): - try: - self._verify(hash_object, signature) - except (ValueError, TypeError): - return False - return True - - -def new(rsa_key, mgfunc=None, saltLen=None, randfunc=None): - pkcs1 = pss.new(rsa_key, mask_func=mgfunc, - salt_bytes=saltLen, rand_func=randfunc) - pkcs1._verify = pkcs1.verify - pkcs1.verify = types.MethodType(_pycrypto_verify, pkcs1) - return pkcs1 diff --git a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_PSS.pyi b/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_PSS.pyi deleted file mode 100644 index e7424f5..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_PSS.pyi +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Union, Callable, Optional -from typing_extensions import Protocol - -from Cryptodome.PublicKey.RSA import RsaKey - - -class Hash(Protocol): - def digest(self) -> bytes: ... - def update(self, bytes) -> None: ... - - -class HashModule(Protocol): - @staticmethod - def new(data: Optional[bytes]) -> Hash: ... - - -MaskFunction = Callable[[bytes, int, Union[Hash, HashModule]], bytes] -RndFunction = Callable[[int], bytes] - -class PSS_SigScheme: - def __init__(self, key: RsaKey, mgfunc: MaskFunction, saltLen: int, randfunc: RndFunction) -> None: ... - def can_sign(self) -> bool: ... - def sign(self, msg_hash: Hash) -> bytes: ... - def verify(self, msg_hash: Hash, signature: bytes) -> bool: ... - - - -def new(rsa_key: RsaKey, mgfunc: Optional[MaskFunction]=None, saltLen: Optional[int]=None, randfunc: Optional[RndFunction]=None) -> PSS_SigScheme: ... diff --git a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_v1_5.py b/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_v1_5.py deleted file mode 100644 index d560663..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_v1_5.py +++ /dev/null @@ -1,53 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -""" -Legacy module for PKCS#1 v1.5 signatures. - -:undocumented: __package__ -""" - -import types - -from Cryptodome.Signature import pkcs1_15 - -def _pycrypto_verify(self, hash_object, signature): - try: - self._verify(hash_object, signature) - except (ValueError, TypeError): - return False - return True - -def new(rsa_key): - pkcs1 = pkcs1_15.new(rsa_key) - pkcs1._verify = pkcs1.verify - pkcs1.verify = types.MethodType(_pycrypto_verify, pkcs1) - return pkcs1 - diff --git a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_v1_5.pyi b/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_v1_5.pyi deleted file mode 100644 index d02555c..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/PKCS1_v1_5.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Optional -from typing_extensions import Protocol - -from Cryptodome.PublicKey.RSA import RsaKey - -class Hash(Protocol): - def digest(self) -> bytes: ... - -class PKCS115_SigScheme: - def __init__(self, rsa_key: RsaKey) -> None: ... - def can_sign(self) -> bool: ... - def sign(self, msg_hash: Hash) -> bytes: ... - def verify(self, msg_hash: Hash, signature: bytes) -> bool: ... - - -def new(rsa_key: RsaKey) -> PKCS115_SigScheme: ... diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__init__.py b/venv/Lib/site-packages/Cryptodome/Signature/__init__.py deleted file mode 100644 index 11ca64c..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -"""Digital signature protocols - -A collection of standardized protocols to carry out digital signatures. -""" - -__all__ = ['PKCS1_v1_5', 'PKCS1_PSS', 'DSS', 'pkcs1_15', 'pss', 'eddsa'] diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/DSS.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/DSS.cpython-312.pyc deleted file mode 100644 index 9c1befc..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/DSS.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/PKCS1_PSS.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/PKCS1_PSS.cpython-312.pyc deleted file mode 100644 index f114277..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/PKCS1_PSS.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/PKCS1_v1_5.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/PKCS1_v1_5.cpython-312.pyc deleted file mode 100644 index 259d73b..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/PKCS1_v1_5.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 9c1e73a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/eddsa.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/eddsa.cpython-312.pyc deleted file mode 100644 index ef0d3aa..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/eddsa.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/pkcs1_15.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/pkcs1_15.cpython-312.pyc deleted file mode 100644 index 2aba546..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/pkcs1_15.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/pss.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/pss.cpython-312.pyc deleted file mode 100644 index 7a68b21..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Signature/__pycache__/pss.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Signature/eddsa.py b/venv/Lib/site-packages/Cryptodome/Signature/eddsa.py deleted file mode 100644 index 02e1e3e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/eddsa.py +++ /dev/null @@ -1,343 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2022, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Math.Numbers import Integer - -from Cryptodome.Hash import SHA512, SHAKE256 -from Cryptodome.Util.py3compat import bchr, is_bytes -from Cryptodome.PublicKey.ECC import (EccKey, - construct, - _import_ed25519_public_key, - _import_ed448_public_key) - - -def import_public_key(encoded): - """Create a new Ed25519 or Ed448 public key object, - starting from the key encoded as raw ``bytes``, - in the format described in RFC8032. - - Args: - encoded (bytes): - The EdDSA public key to import. - It must be 32 bytes for Ed25519, and 57 bytes for Ed448. - - Returns: - :class:`Cryptodome.PublicKey.EccKey` : a new ECC key object. - - Raises: - ValueError: when the given key cannot be parsed. - """ - - if len(encoded) == 32: - x, y = _import_ed25519_public_key(encoded) - curve_name = "Ed25519" - elif len(encoded) == 57: - x, y = _import_ed448_public_key(encoded) - curve_name = "Ed448" - else: - raise ValueError("Not an EdDSA key (%d bytes)" % len(encoded)) - return construct(curve=curve_name, point_x=x, point_y=y) - - -def import_private_key(encoded): - """Create a new Ed25519 or Ed448 private key object, - starting from the key encoded as raw ``bytes``, - in the format described in RFC8032. - - Args: - encoded (bytes): - The EdDSA private key to import. - It must be 32 bytes for Ed25519, and 57 bytes for Ed448. - - Returns: - :class:`Cryptodome.PublicKey.EccKey` : a new ECC key object. - - Raises: - ValueError: when the given key cannot be parsed. - """ - - if len(encoded) == 32: - curve_name = "ed25519" - elif len(encoded) == 57: - curve_name = "ed448" - else: - raise ValueError("Incorrect length. Only EdDSA private keys are supported.") - - # Note that the private key is truly a sequence of random bytes, - # so we cannot check its correctness in any way. - - return construct(seed=encoded, curve=curve_name) - - -class EdDSASigScheme(object): - """An EdDSA signature object. - Do not instantiate directly. - Use :func:`Cryptodome.Signature.eddsa.new`. - """ - - def __init__(self, key, context): - """Create a new EdDSA object. - - Do not instantiate this object directly, - use `Cryptodome.Signature.DSS.new` instead. - """ - - self._key = key - self._context = context - self._A = key._export_eddsa_public() - self._order = key._curve.order - - def can_sign(self): - """Return ``True`` if this signature object can be used - for signing messages.""" - - return self._key.has_private() - - def sign(self, msg_or_hash): - """Compute the EdDSA signature of a message. - - Args: - msg_or_hash (bytes or a hash object): - The message to sign (``bytes``, in case of *PureEdDSA*) or - the hash that was carried out over the message (hash object, for *HashEdDSA*). - - The hash object must be :class:`Cryptodome.Hash.SHA512` for Ed25519, - and :class:`Cryptodome.Hash.SHAKE256` object for Ed448. - - :return: The signature as ``bytes``. It is always 64 bytes for Ed25519, and 114 bytes for Ed448. - :raise TypeError: if the EdDSA key has no private half - """ - - if not self._key.has_private(): - raise TypeError("Private key is needed to sign") - - if self._key.curve == "Ed25519": - ph = isinstance(msg_or_hash, SHA512.SHA512Hash) - if not (ph or is_bytes(msg_or_hash)): - raise TypeError("'msg_or_hash' must be bytes of a SHA-512 hash") - eddsa_sign_method = self._sign_ed25519 - - elif self._key.curve == "Ed448": - ph = isinstance(msg_or_hash, SHAKE256.SHAKE256_XOF) - if not (ph or is_bytes(msg_or_hash)): - raise TypeError("'msg_or_hash' must be bytes of a SHAKE256 hash") - eddsa_sign_method = self._sign_ed448 - - else: - raise ValueError("Incorrect curve for EdDSA") - - return eddsa_sign_method(msg_or_hash, ph) - - def _sign_ed25519(self, msg_or_hash, ph): - - if self._context or ph: - flag = int(ph) - # dom2(flag, self._context) - dom2 = b'SigEd25519 no Ed25519 collisions' + bchr(flag) + \ - bchr(len(self._context)) + self._context - else: - dom2 = b'' - - PHM = msg_or_hash.digest() if ph else msg_or_hash - - # See RFC 8032, section 5.1.6 - - # Step 2 - r_hash = SHA512.new(dom2 + self._key._prefix + PHM).digest() - r = Integer.from_bytes(r_hash, 'little') % self._order - # Step 3 - R_pk = EccKey(point=r * self._key._curve.G)._export_eddsa_public() - # Step 4 - k_hash = SHA512.new(dom2 + R_pk + self._A + PHM).digest() - k = Integer.from_bytes(k_hash, 'little') % self._order - # Step 5 - s = (r + k * self._key.d) % self._order - - return R_pk + s.to_bytes(32, 'little') - - def _sign_ed448(self, msg_or_hash, ph): - - flag = int(ph) - # dom4(flag, self._context) - dom4 = b'SigEd448' + bchr(flag) + \ - bchr(len(self._context)) + self._context - - PHM = msg_or_hash.copy().read(64) if ph else msg_or_hash - - # See RFC 8032, section 5.2.6 - - # Step 2 - r_hash = SHAKE256.new(dom4 + self._key._prefix + PHM).read(114) - r = Integer.from_bytes(r_hash, 'little') % self._order - # Step 3 - R_pk = EccKey(point=r * self._key._curve.G)._export_eddsa_public() - # Step 4 - k_hash = SHAKE256.new(dom4 + R_pk + self._A + PHM).read(114) - k = Integer.from_bytes(k_hash, 'little') % self._order - # Step 5 - s = (r + k * self._key.d) % self._order - - return R_pk + s.to_bytes(57, 'little') - - def verify(self, msg_or_hash, signature): - """Check if an EdDSA signature is authentic. - - Args: - msg_or_hash (bytes or a hash object): - The message to verify (``bytes``, in case of *PureEdDSA*) or - the hash that was carried out over the message (hash object, for *HashEdDSA*). - - The hash object must be :class:`Cryptodome.Hash.SHA512` object for Ed25519, - and :class:`Cryptodome.Hash.SHAKE256` for Ed448. - - signature (``bytes``): - The signature that needs to be validated. - It must be 64 bytes for Ed25519, and 114 bytes for Ed448. - - :raise ValueError: if the signature is not authentic - """ - - if self._key.curve == "Ed25519": - ph = isinstance(msg_or_hash, SHA512.SHA512Hash) - if not (ph or is_bytes(msg_or_hash)): - raise TypeError("'msg_or_hash' must be bytes of a SHA-512 hash") - eddsa_verify_method = self._verify_ed25519 - - elif self._key.curve == "Ed448": - ph = isinstance(msg_or_hash, SHAKE256.SHAKE256_XOF) - if not (ph or is_bytes(msg_or_hash)): - raise TypeError("'msg_or_hash' must be bytes of a SHAKE256 hash") - eddsa_verify_method = self._verify_ed448 - - else: - raise ValueError("Incorrect curve for EdDSA") - - return eddsa_verify_method(msg_or_hash, signature, ph) - - def _verify_ed25519(self, msg_or_hash, signature, ph): - - if len(signature) != 64: - raise ValueError("The signature is not authentic (length)") - - if self._context or ph: - flag = int(ph) - dom2 = b'SigEd25519 no Ed25519 collisions' + bchr(flag) + \ - bchr(len(self._context)) + self._context - else: - dom2 = b'' - - PHM = msg_or_hash.digest() if ph else msg_or_hash - - # Section 5.1.7 - - # Step 1 - try: - R = import_public_key(signature[:32]).pointQ - except ValueError: - raise ValueError("The signature is not authentic (R)") - s = Integer.from_bytes(signature[32:], 'little') - if s > self._order: - raise ValueError("The signature is not authentic (S)") - # Step 2 - k_hash = SHA512.new(dom2 + signature[:32] + self._A + PHM).digest() - k = Integer.from_bytes(k_hash, 'little') % self._order - # Step 3 - point1 = s * 8 * self._key._curve.G - # OPTIMIZE: with double-scalar multiplication, with no SCA - # countermeasures because it is public values - point2 = 8 * R + k * 8 * self._key.pointQ - if point1 != point2: - raise ValueError("The signature is not authentic") - - def _verify_ed448(self, msg_or_hash, signature, ph): - - if len(signature) != 114: - raise ValueError("The signature is not authentic (length)") - - flag = int(ph) - # dom4(flag, self._context) - dom4 = b'SigEd448' + bchr(flag) + \ - bchr(len(self._context)) + self._context - - PHM = msg_or_hash.copy().read(64) if ph else msg_or_hash - - # Section 5.2.7 - - # Step 1 - try: - R = import_public_key(signature[:57]).pointQ - except ValueError: - raise ValueError("The signature is not authentic (R)") - s = Integer.from_bytes(signature[57:], 'little') - if s > self._order: - raise ValueError("The signature is not authentic (S)") - # Step 2 - k_hash = SHAKE256.new(dom4 + signature[:57] + self._A + PHM).read(114) - k = Integer.from_bytes(k_hash, 'little') % self._order - # Step 3 - point1 = s * 8 * self._key._curve.G - # OPTIMIZE: with double-scalar multiplication, with no SCA - # countermeasures because it is public values - point2 = 8 * R + k * 8 * self._key.pointQ - if point1 != point2: - raise ValueError("The signature is not authentic") - - -def new(key, mode, context=None): - """Create a signature object :class:`EdDSASigScheme` that - can perform or verify an EdDSA signature. - - Args: - key (:class:`Cryptodome.PublicKey.ECC` object): - The key to use for computing the signature (*private* keys only) - or for verifying one. - The key must be on the curve ``Ed25519`` or ``Ed448``. - - mode (string): - This parameter must be ``'rfc8032'``. - - context (bytes): - Up to 255 bytes of `context `_, - which is a constant byte string to segregate different protocols or - different applications of the same key. - """ - - if not isinstance(key, EccKey) or key.curve not in ("Ed25519", "Ed448"): - raise ValueError("EdDSA can only be used with EdDSA keys") - - if mode != 'rfc8032': - raise ValueError("Mode must be 'rfc8032'") - - if context is None: - context = b'' - elif len(context) > 255: - raise ValueError("Context for EdDSA must not be longer than 255 bytes") - - return EdDSASigScheme(key, context) diff --git a/venv/Lib/site-packages/Cryptodome/Signature/eddsa.pyi b/venv/Lib/site-packages/Cryptodome/Signature/eddsa.pyi deleted file mode 100644 index 809a7ad..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/eddsa.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Union, Optional -from typing_extensions import Protocol -from Cryptodome.PublicKey.ECC import EccKey - -class Hash(Protocol): - def digest(self) -> bytes: ... - -class XOF(Protocol): - def read(self, len: int) -> bytes: ... - -def import_public_key(encoded: bytes) -> EccKey: ... -def import_private_key(encoded: bytes) -> EccKey: ... - -class EdDSASigScheme(object): - - def __init__(self, key: EccKey, context: bytes) -> None: ... - def can_sign(self) -> bool: ... - def sign(self, msg_or_hash: Union[bytes, Hash, XOF]) -> bytes: ... - def verify(self, msg_or_hash: Union[bytes, Hash, XOF], signature: bytes) -> None: ... - -def new(key: EccKey, mode: str, context: Optional[bytes]=None) -> EdDSASigScheme: ... diff --git a/venv/Lib/site-packages/Cryptodome/Signature/pkcs1_15.py b/venv/Lib/site-packages/Cryptodome/Signature/pkcs1_15.py deleted file mode 100644 index bdde78a..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/pkcs1_15.py +++ /dev/null @@ -1,223 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import Cryptodome.Util.number -from Cryptodome.Util.number import ceil_div, bytes_to_long, long_to_bytes -from Cryptodome.Util.asn1 import DerSequence, DerNull, DerOctetString, DerObjectId - -class PKCS115_SigScheme: - """A signature object for ``RSASSA-PKCS1-v1_5``. - Do not instantiate directly. - Use :func:`Cryptodome.Signature.pkcs1_15.new`. - """ - - def __init__(self, rsa_key): - """Initialize this PKCS#1 v1.5 signature scheme object. - - :Parameters: - rsa_key : an RSA key object - Creation of signatures is only possible if this is a *private* - RSA key. Verification of signatures is always possible. - """ - self._key = rsa_key - - def can_sign(self): - """Return ``True`` if this object can be used to sign messages.""" - return self._key.has_private() - - def sign(self, msg_hash): - """Create the PKCS#1 v1.5 signature of a message. - - This function is also called ``RSASSA-PKCS1-V1_5-SIGN`` and - it is specified in - `section 8.2.1 of RFC8017 `_. - - :parameter msg_hash: - This is an object from the :mod:`Cryptodome.Hash` package. - It has been used to digest the message to sign. - :type msg_hash: hash object - - :return: the signature encoded as a *byte string*. - :raise ValueError: if the RSA key is not long enough for the given hash algorithm. - :raise TypeError: if the RSA key has no private half. - """ - - # See 8.2.1 in RFC3447 - modBits = Cryptodome.Util.number.size(self._key.n) - k = ceil_div(modBits,8) # Convert from bits to bytes - - # Step 1 - em = _EMSA_PKCS1_V1_5_ENCODE(msg_hash, k) - # Step 2a (OS2IP) - em_int = bytes_to_long(em) - # Step 2b (RSASP1) and Step 2c (I2OSP) - signature = self._key._decrypt_to_bytes(em_int) - # Verify no faults occurred - if em_int != pow(bytes_to_long(signature), self._key.e, self._key.n): - raise ValueError("Fault detected in RSA private key operation") - return signature - - def verify(self, msg_hash, signature): - """Check if the PKCS#1 v1.5 signature over a message is valid. - - This function is also called ``RSASSA-PKCS1-V1_5-VERIFY`` and - it is specified in - `section 8.2.2 of RFC8037 `_. - - :parameter msg_hash: - The hash that was carried out over the message. This is an object - belonging to the :mod:`Cryptodome.Hash` module. - :type parameter: hash object - - :parameter signature: - The signature that needs to be validated. - :type signature: byte string - - :raise ValueError: if the signature is not valid. - """ - - # See 8.2.2 in RFC3447 - modBits = Cryptodome.Util.number.size(self._key.n) - k = ceil_div(modBits, 8) # Convert from bits to bytes - - # Step 1 - if len(signature) != k: - raise ValueError("Invalid signature") - # Step 2a (O2SIP) - signature_int = bytes_to_long(signature) - # Step 2b (RSAVP1) - em_int = self._key._encrypt(signature_int) - # Step 2c (I2OSP) - em1 = long_to_bytes(em_int, k) - # Step 3 - try: - possible_em1 = [ _EMSA_PKCS1_V1_5_ENCODE(msg_hash, k, True) ] - # MD2/4/5 hashes always require NULL params in AlgorithmIdentifier. - # For all others, it is optional. - try: - algorithm_is_md = msg_hash.oid.startswith('1.2.840.113549.2.') - except AttributeError: - algorithm_is_md = False - if not algorithm_is_md: # MD2/MD4/MD5 - possible_em1.append(_EMSA_PKCS1_V1_5_ENCODE(msg_hash, k, False)) - except ValueError: - raise ValueError("Invalid signature") - # Step 4 - # By comparing the full encodings (as opposed to checking each - # of its components one at a time) we avoid attacks to the padding - # scheme like Bleichenbacher's (see http://www.mail-archive.com/cryptography@metzdowd.com/msg06537). - # - if em1 not in possible_em1: - raise ValueError("Invalid signature") - pass - - -def _EMSA_PKCS1_V1_5_ENCODE(msg_hash, emLen, with_hash_parameters=True): - """ - Implement the ``EMSA-PKCS1-V1_5-ENCODE`` function, as defined - in PKCS#1 v2.1 (RFC3447, 9.2). - - ``_EMSA-PKCS1-V1_5-ENCODE`` actually accepts the message ``M`` as input, - and hash it internally. Here, we expect that the message has already - been hashed instead. - - :Parameters: - msg_hash : hash object - The hash object that holds the digest of the message being signed. - emLen : int - The length the final encoding must have, in bytes. - with_hash_parameters : bool - If True (default), include NULL parameters for the hash - algorithm in the ``digestAlgorithm`` SEQUENCE. - - :attention: the early standard (RFC2313) stated that ``DigestInfo`` - had to be BER-encoded. This means that old signatures - might have length tags in indefinite form, which - is not supported in DER. Such encoding cannot be - reproduced by this function. - - :Return: An ``emLen`` byte long string that encodes the hash. - """ - - # First, build the ASN.1 DER object DigestInfo: - # - # DigestInfo ::= SEQUENCE { - # digestAlgorithm AlgorithmIdentifier, - # digest OCTET STRING - # } - # - # where digestAlgorithm identifies the hash function and shall be an - # algorithm ID with an OID in the set PKCS1-v1-5DigestAlgorithms. - # - # PKCS1-v1-5DigestAlgorithms ALGORITHM-IDENTIFIER ::= { - # { OID id-md2 PARAMETERS NULL }| - # { OID id-md5 PARAMETERS NULL }| - # { OID id-sha1 PARAMETERS NULL }| - # { OID id-sha256 PARAMETERS NULL }| - # { OID id-sha384 PARAMETERS NULL }| - # { OID id-sha512 PARAMETERS NULL } - # } - # - # Appendix B.1 also says that for SHA-1/-2 algorithms, the parameters - # should be omitted. They may be present, but when they are, they shall - # have NULL value. - - digestAlgo = DerSequence([ DerObjectId(msg_hash.oid).encode() ]) - - if with_hash_parameters: - digestAlgo.append(DerNull().encode()) - - digest = DerOctetString(msg_hash.digest()) - digestInfo = DerSequence([ - digestAlgo.encode(), - digest.encode() - ]).encode() - - # We need at least 11 bytes for the remaining data: 3 fixed bytes and - # at least 8 bytes of padding). - if emLen bytes: ... - -class PKCS115_SigScheme: - def __init__(self, rsa_key: RsaKey) -> None: ... - def can_sign(self) -> bool: ... - def sign(self, msg_hash: Hash) -> bytes: ... - def verify(self, msg_hash: Hash, signature: bytes) -> None: ... - -def _EMSA_PKCS1_V1_5_ENCODE(msg_hash: Hash, emLen: int, with_hash_parameters: Optional[bool]=True) -> bytes: ... - -def new(rsa_key: RsaKey) -> PKCS115_SigScheme: ... diff --git a/venv/Lib/site-packages/Cryptodome/Signature/pss.py b/venv/Lib/site-packages/Cryptodome/Signature/pss.py deleted file mode 100644 index b929e26..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/pss.py +++ /dev/null @@ -1,387 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util.py3compat import bchr, bord, iter_range -import Cryptodome.Util.number -from Cryptodome.Util.number import (ceil_div, - long_to_bytes, - bytes_to_long - ) -from Cryptodome.Util.strxor import strxor -from Cryptodome import Random - - -class PSS_SigScheme: - """A signature object for ``RSASSA-PSS``. - Do not instantiate directly. - Use :func:`Cryptodome.Signature.pss.new`. - """ - - def __init__(self, key, mgfunc, saltLen, randfunc): - """Initialize this PKCS#1 PSS signature scheme object. - - :Parameters: - key : an RSA key object - If a private half is given, both signature and - verification are possible. - If a public half is given, only verification is possible. - mgfunc : callable - A mask generation function that accepts two parameters: - a string to use as seed, and the lenth of the mask to - generate, in bytes. - saltLen : integer - Length of the salt, in bytes. - randfunc : callable - A function that returns random bytes. - """ - - self._key = key - self._saltLen = saltLen - self._mgfunc = mgfunc - self._randfunc = randfunc - - def can_sign(self): - """Return ``True`` if this object can be used to sign messages.""" - return self._key.has_private() - - def sign(self, msg_hash): - """Create the PKCS#1 PSS signature of a message. - - This function is also called ``RSASSA-PSS-SIGN`` and - it is specified in - `section 8.1.1 of RFC8017 `_. - - :parameter msg_hash: - This is an object from the :mod:`Cryptodome.Hash` package. - It has been used to digest the message to sign. - :type msg_hash: hash object - - :return: the signature encoded as a *byte string*. - :raise ValueError: if the RSA key is not long enough for the given hash algorithm. - :raise TypeError: if the RSA key has no private half. - """ - - # Set defaults for salt length and mask generation function - if self._saltLen is None: - sLen = msg_hash.digest_size - else: - sLen = self._saltLen - - if self._mgfunc is None: - mgf = lambda x, y: MGF1(x, y, msg_hash) - else: - mgf = self._mgfunc - - modBits = Cryptodome.Util.number.size(self._key.n) - - # See 8.1.1 in RFC3447 - k = ceil_div(modBits, 8) # k is length in bytes of the modulus - # Step 1 - em = _EMSA_PSS_ENCODE(msg_hash, modBits-1, self._randfunc, mgf, sLen) - # Step 2a (OS2IP) - em_int = bytes_to_long(em) - # Step 2b (RSASP1) and Step 2c (I2OSP) - signature = self._key._decrypt_to_bytes(em_int) - # Verify no faults occurred - if em_int != pow(bytes_to_long(signature), self._key.e, self._key.n): - raise ValueError("Fault detected in RSA private key operation") - return signature - - def verify(self, msg_hash, signature): - """Check if the PKCS#1 PSS signature over a message is valid. - - This function is also called ``RSASSA-PSS-VERIFY`` and - it is specified in - `section 8.1.2 of RFC8037 `_. - - :parameter msg_hash: - The hash that was carried out over the message. This is an object - belonging to the :mod:`Cryptodome.Hash` module. - :type parameter: hash object - - :parameter signature: - The signature that needs to be validated. - :type signature: bytes - - :raise ValueError: if the signature is not valid. - """ - - # Set defaults for salt length and mask generation function - if self._saltLen is None: - sLen = msg_hash.digest_size - else: - sLen = self._saltLen - if self._mgfunc: - mgf = self._mgfunc - else: - mgf = lambda x, y: MGF1(x, y, msg_hash) - - modBits = Cryptodome.Util.number.size(self._key.n) - - # See 8.1.2 in RFC3447 - k = ceil_div(modBits, 8) # Convert from bits to bytes - # Step 1 - if len(signature) != k: - raise ValueError("Incorrect signature") - # Step 2a (O2SIP) - signature_int = bytes_to_long(signature) - # Step 2b (RSAVP1) - em_int = self._key._encrypt(signature_int) - # Step 2c (I2OSP) - emLen = ceil_div(modBits - 1, 8) - em = long_to_bytes(em_int, emLen) - # Step 3/4 - _EMSA_PSS_VERIFY(msg_hash, em, modBits-1, mgf, sLen) - - -def MGF1(mgfSeed, maskLen, hash_gen): - """Mask Generation Function, described in `B.2.1 of RFC8017 - `_. - - :param mfgSeed: - seed from which the mask is generated - :type mfgSeed: byte string - - :param maskLen: - intended length in bytes of the mask - :type maskLen: integer - - :param hash_gen: - A module or a hash object from :mod:`Cryptodome.Hash` - :type hash_object: - - :return: the mask, as a *byte string* - """ - - T = b"" - for counter in iter_range(ceil_div(maskLen, hash_gen.digest_size)): - c = long_to_bytes(counter, 4) - hobj = hash_gen.new() - hobj.update(mgfSeed + c) - T = T + hobj.digest() - assert(len(T) >= maskLen) - return T[:maskLen] - - -def _EMSA_PSS_ENCODE(mhash, emBits, randFunc, mgf, sLen): - r""" - Implement the ``EMSA-PSS-ENCODE`` function, as defined - in PKCS#1 v2.1 (RFC3447, 9.1.1). - - The original ``EMSA-PSS-ENCODE`` actually accepts the message ``M`` - as input, and hash it internally. Here, we expect that the message - has already been hashed instead. - - :Parameters: - mhash : hash object - The hash object that holds the digest of the message being signed. - emBits : int - Maximum length of the final encoding, in bits. - randFunc : callable - An RNG function that accepts as only parameter an int, and returns - a string of random bytes, to be used as salt. - mgf : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - sLen : int - Length of the salt, in bytes. - - :Return: An ``emLen`` byte long string that encodes the hash - (with ``emLen = \ceil(emBits/8)``). - - :Raise ValueError: - When digest or salt length are too big. - """ - - emLen = ceil_div(emBits, 8) - - # Bitmask of digits that fill up - lmask = 0 - for i in iter_range(8*emLen-emBits): - lmask = lmask >> 1 | 0x80 - - # Step 1 and 2 have been already done - # Step 3 - if emLen < mhash.digest_size+sLen+2: - raise ValueError("Digest or salt length are too long" - " for given key size.") - # Step 4 - salt = randFunc(sLen) - # Step 5 - m_prime = bchr(0)*8 + mhash.digest() + salt - # Step 6 - h = mhash.new() - h.update(m_prime) - # Step 7 - ps = bchr(0)*(emLen-sLen-mhash.digest_size-2) - # Step 8 - db = ps + bchr(1) + salt - # Step 9 - dbMask = mgf(h.digest(), emLen-mhash.digest_size-1) - # Step 10 - maskedDB = strxor(db, dbMask) - # Step 11 - maskedDB = bchr(bord(maskedDB[0]) & ~lmask) + maskedDB[1:] - # Step 12 - em = maskedDB + h.digest() + bchr(0xBC) - return em - - -def _EMSA_PSS_VERIFY(mhash, em, emBits, mgf, sLen): - """ - Implement the ``EMSA-PSS-VERIFY`` function, as defined - in PKCS#1 v2.1 (RFC3447, 9.1.2). - - ``EMSA-PSS-VERIFY`` actually accepts the message ``M`` as input, - and hash it internally. Here, we expect that the message has already - been hashed instead. - - :Parameters: - mhash : hash object - The hash object that holds the digest of the message to be verified. - em : string - The signature to verify, therefore proving that the sender really - signed the message that was received. - emBits : int - Length of the final encoding (em), in bits. - mgf : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - sLen : int - Length of the salt, in bytes. - - :Raise ValueError: - When the encoding is inconsistent, or the digest or salt lengths - are too big. - """ - - emLen = ceil_div(emBits, 8) - - # Bitmask of digits that fill up - lmask = 0 - for i in iter_range(8*emLen-emBits): - lmask = lmask >> 1 | 0x80 - - # Step 1 and 2 have been already done - # Step 3 - if emLen < mhash.digest_size+sLen+2: - raise ValueError("Incorrect signature") - # Step 4 - if ord(em[-1:]) != 0xBC: - raise ValueError("Incorrect signature") - # Step 5 - maskedDB = em[:emLen-mhash.digest_size-1] - h = em[emLen-mhash.digest_size-1:-1] - # Step 6 - if lmask & bord(em[0]): - raise ValueError("Incorrect signature") - # Step 7 - dbMask = mgf(h, emLen-mhash.digest_size-1) - # Step 8 - db = strxor(maskedDB, dbMask) - # Step 9 - db = bchr(bord(db[0]) & ~lmask) + db[1:] - # Step 10 - if not db.startswith(bchr(0)*(emLen-mhash.digest_size-sLen-2) + bchr(1)): - raise ValueError("Incorrect signature") - # Step 11 - if sLen > 0: - salt = db[-sLen:] - else: - salt = b"" - # Step 12 - m_prime = bchr(0)*8 + mhash.digest() + salt - # Step 13 - hobj = mhash.new() - hobj.update(m_prime) - hp = hobj.digest() - # Step 14 - if h != hp: - raise ValueError("Incorrect signature") - - -def new(rsa_key, **kwargs): - """Create an object for making or verifying PKCS#1 PSS signatures. - - :parameter rsa_key: - The RSA key to use for signing or verifying the message. - This is a :class:`Cryptodome.PublicKey.RSA` object. - Signing is only possible when ``rsa_key`` is a **private** RSA key. - :type rsa_key: RSA object - - :Keyword Arguments: - - * *mask_func* (``callable``) -- - A function that returns the mask (as `bytes`). - It must accept two parameters: a seed (as `bytes`) - and the length of the data to return. - - If not specified, it will be the function :func:`MGF1` defined in - `RFC8017 `_ and - combined with the same hash algorithm applied to the - message to sign or verify. - - If you want to use a different function, for instance still :func:`MGF1` - but together with another hash, you can do:: - - from Cryptodome.Hash import SHA256 - from Cryptodome.Signature.pss import MGF1 - mgf = lambda x, y: MGF1(x, y, SHA256) - - * *salt_bytes* (``integer``) -- - Length of the salt, in bytes. - It is a value between 0 and ``emLen - hLen - 2``, where ``emLen`` - is the size of the RSA modulus and ``hLen`` is the size of the digest - applied to the message to sign or verify. - - The salt is generated internally, you don't need to provide it. - - If not specified, the salt length will be ``hLen``. - If it is zero, the signature scheme becomes deterministic. - - Note that in some implementations such as OpenSSL the default - salt length is ``emLen - hLen - 2`` (even though it is not more - secure than ``hLen``). - - * *rand_func* (``callable``) -- - A function that returns random ``bytes``, of the desired length. - The default is :func:`Cryptodome.Random.get_random_bytes`. - - :return: a :class:`PSS_SigScheme` signature object - """ - - mask_func = kwargs.pop("mask_func", None) - salt_len = kwargs.pop("salt_bytes", None) - rand_func = kwargs.pop("rand_func", None) - if rand_func is None: - rand_func = Random.get_random_bytes - if kwargs: - raise ValueError("Unknown keywords: " + str(kwargs.keys())) - return PSS_SigScheme(rsa_key, mask_func, salt_len, rand_func) diff --git a/venv/Lib/site-packages/Cryptodome/Signature/pss.pyi b/venv/Lib/site-packages/Cryptodome/Signature/pss.pyi deleted file mode 100644 index 84a960e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Signature/pss.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Union, Callable, Optional -from typing_extensions import Protocol - -from Cryptodome.PublicKey.RSA import RsaKey - - -class Hash(Protocol): - def digest(self) -> bytes: ... - def update(self, bytes) -> None: ... - - -class HashModule(Protocol): - @staticmethod - def new(data: Optional[bytes]) -> Hash: ... - - -MaskFunction = Callable[[bytes, int, Union[Hash, HashModule]], bytes] -RndFunction = Callable[[int], bytes] - -class PSS_SigScheme: - def __init__(self, key: RsaKey, mgfunc: MaskFunction, saltLen: int, randfunc: RndFunction) -> None: ... - def can_sign(self) -> bool: ... - def sign(self, msg_hash: Hash) -> bytes: ... - def verify(self, msg_hash: Hash, signature: bytes) -> None: ... - - -MGF1 : MaskFunction -def _EMSA_PSS_ENCODE(mhash: Hash, emBits: int, randFunc: RndFunction, mgf:MaskFunction, sLen: int) -> str: ... -def _EMSA_PSS_VERIFY(mhash: Hash, em: str, emBits: int, mgf: MaskFunction, sLen: int) -> None: ... -def new(rsa_key: RsaKey, **kwargs: Union[MaskFunction, RndFunction, int]) -> PSS_SigScheme: ... diff --git a/venv/Lib/site-packages/Cryptodome/Util/Counter.py b/venv/Lib/site-packages/Cryptodome/Util/Counter.py deleted file mode 100644 index e3bdcbe..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/Counter.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Util/Counter.py : Fast counter for use with CTR-mode ciphers -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -def new(nbits, prefix=b"", suffix=b"", initial_value=1, little_endian=False, allow_wraparound=False): - """Create a stateful counter block function suitable for CTR encryption modes. - - Each call to the function returns the next counter block. - Each counter block is made up by three parts: - - +------+--------------+-------+ - |prefix| counter value|postfix| - +------+--------------+-------+ - - The counter value is incremented by 1 at each call. - - Args: - nbits (integer): - Length of the desired counter value, in bits. It must be a multiple of 8. - prefix (byte string): - The constant prefix of the counter block. By default, no prefix is - used. - suffix (byte string): - The constant postfix of the counter block. By default, no suffix is - used. - initial_value (integer): - The initial value of the counter. Default value is 1. - Its length in bits must not exceed the argument ``nbits``. - little_endian (boolean): - If ``True``, the counter number will be encoded in little endian format. - If ``False`` (default), in big endian format. - allow_wraparound (boolean): - This parameter is ignored. - An ``OverflowError`` exception is always raised when the counter wraps - around to zero. - Returns: - An object that can be passed with the :data:`counter` parameter to a CTR mode - cipher. - - It must hold that *len(prefix) + nbits//8 + len(suffix)* matches the - block size of the underlying block cipher. - """ - - if (nbits % 8) != 0: - raise ValueError("'nbits' must be a multiple of 8") - - iv_bl = initial_value.bit_length() - if iv_bl > nbits: - raise ValueError("Initial value takes %d bits but it is longer than " - "the counter (%d bits)" % - (iv_bl, nbits)) - - # Ignore wraparound - return {"counter_len": nbits // 8, - "prefix": prefix, - "suffix": suffix, - "initial_value": initial_value, - "little_endian": little_endian - } diff --git a/venv/Lib/site-packages/Cryptodome/Util/Counter.pyi b/venv/Lib/site-packages/Cryptodome/Util/Counter.pyi deleted file mode 100644 index fa2ffdd..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/Counter.pyi +++ /dev/null @@ -1,5 +0,0 @@ -from typing import Optional, Union, Dict - -def new(nbits: int, prefix: Optional[bytes]=..., suffix: Optional[bytes]=..., initial_value: Optional[int]=1, - little_endian: Optional[bool]=False, allow_wraparound: Optional[bool]=False) -> \ - Dict[str, Union[int, bytes, bool]]: ... diff --git a/venv/Lib/site-packages/Cryptodome/Util/Padding.py b/venv/Lib/site-packages/Cryptodome/Util/Padding.py deleted file mode 100644 index 1016568..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/Padding.py +++ /dev/null @@ -1,119 +0,0 @@ -# -# Util/Padding.py : Functions to manage padding -# -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -__all__ = [ 'pad', 'unpad' ] - -from Cryptodome.Util.py3compat import * - - -def pad(data_to_pad, block_size, style='pkcs7'): - """Apply standard padding. - - Args: - data_to_pad (byte string): - The data that needs to be padded. - block_size (integer): - The block boundary to use for padding. The output length is guaranteed - to be a multiple of :data:`block_size`. - style (string): - Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. - - Return: - byte string : the original data with the appropriate padding added at the end. - """ - - padding_len = block_size - len(data_to_pad) % block_size - - if style == 'pkcs7': - padding = bchr(padding_len) * padding_len - elif style == 'x923': - padding = bchr(0)*(padding_len-1) + bchr(padding_len) - elif style == 'iso7816': - padding = bchr(128) + bchr(0) * (padding_len-1) - else: - raise ValueError("Unknown padding style") - - return data_to_pad + padding - - -def unpad(padded_data, block_size, style='pkcs7'): - """Remove standard padding. - - Args: - padded_data (byte string): - A piece of data with padding that needs to be stripped. - block_size (integer): - The block boundary to use for padding. The input length - must be a multiple of :data:`block_size`. - style (string): - Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. - Return: - byte string : data without padding. - Raises: - ValueError: if the padding is incorrect. - """ - - pdata_len = len(padded_data) - - if pdata_len == 0: - raise ValueError("Zero-length input cannot be unpadded") - - if pdata_len % block_size: - raise ValueError("Input data is not padded") - - if style in ('pkcs7', 'x923'): - padding_len = bord(padded_data[-1]) - - if padding_len < 1 or padding_len > min(block_size, pdata_len): - raise ValueError("Padding is incorrect.") - - if style == 'pkcs7': - if padded_data[-padding_len:] != bchr(padding_len)*padding_len: - raise ValueError("PKCS#7 padding is incorrect.") - else: - if padded_data[-padding_len:-1] != bchr(0)*(padding_len-1): - raise ValueError("ANSI X.923 padding is incorrect.") - - elif style == 'iso7816': - padding_len = pdata_len - padded_data.rfind(bchr(128)) - - if padding_len < 1 or padding_len > min(block_size, pdata_len): - raise ValueError("Padding is incorrect.") - - if padding_len > 1 and padded_data[1-padding_len:] != bchr(0)*(padding_len-1): - raise ValueError("ISO 7816-4 padding is incorrect.") - else: - raise ValueError("Unknown padding style") - - return padded_data[:-padding_len] - diff --git a/venv/Lib/site-packages/Cryptodome/Util/Padding.pyi b/venv/Lib/site-packages/Cryptodome/Util/Padding.pyi deleted file mode 100644 index 4d8d30d..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/Padding.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Optional - -__all__ = [ 'pad', 'unpad' ] - -def pad(data_to_pad: bytes, block_size: int, style: Optional[str]='pkcs7') -> bytes: ... -def unpad(padded_data: bytes, block_size: int, style: Optional[str]='pkcs7') -> bytes: ... \ No newline at end of file diff --git a/venv/Lib/site-packages/Cryptodome/Util/RFC1751.py b/venv/Lib/site-packages/Cryptodome/Util/RFC1751.py deleted file mode 100644 index 10859c3..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/RFC1751.py +++ /dev/null @@ -1,386 +0,0 @@ -# rfc1751.py : Converts between 128-bit strings and a human-readable -# sequence of words, as defined in RFC1751: "A Convention for -# Human-Readable 128-bit Keys", by Daniel L. McDonald. -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew M. Kuchling and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from __future__ import print_function - -import binascii - -from Cryptodome.Util.py3compat import bord, bchr - -binary = {0: '0000', 1: '0001', 2: '0010', 3: '0011', 4: '0100', 5: '0101', - 6: '0110', 7: '0111', 8: '1000', 9: '1001', 10: '1010', 11: '1011', - 12: '1100', 13: '1101', 14: '1110', 15: '1111'} - - -def _key2bin(s): - "Convert a key into a string of binary digits" - kl = map(lambda x: bord(x), s) - kl = map(lambda x: binary[x >> 4] + binary[x & 15], kl) - return ''.join(kl) - - -def _extract(key, start, length): - """Extract a bitstring(2.x)/bytestring(2.x) from a string of binary digits, and return its - numeric value.""" - - result = 0 - for y in key[start:start+length]: - result = result * 2 + ord(y) - 48 - return result - - -def key_to_english(key): - """Transform an arbitrary key into a string containing English words. - - Example:: - - >>> from Cryptodome.Util.RFC1751 import key_to_english - >>> key_to_english(b'66666666') - 'RAM LOIS GOAD CREW CARE HIT' - - Args: - key (byte string): - The key to convert. Its length must be a multiple of 8. - Return: - A string of English words. - """ - - if len(key) % 8 != 0: - raise ValueError('The length of the key must be a multiple of 8.') - - english = '' - for index in range(0, len(key), 8): # Loop over 8-byte subkeys - subkey = key[index:index + 8] - # Compute the parity of the key - skbin = _key2bin(subkey) - p = 0 - for i in range(0, 64, 2): - p = p + _extract(skbin, i, 2) - # Append parity bits to the subkey - skbin = _key2bin(subkey + bchr((p << 6) & 255)) - for i in range(0, 64, 11): - english = english + wordlist[_extract(skbin, i, 11)] + ' ' - - return english.strip() - - -def english_to_key(s): - """Transform a string into a corresponding key. - - Example:: - - >>> from Cryptodome.Util.RFC1751 import english_to_key - >>> english_to_key('RAM LOIS GOAD CREW CARE HIT') - b'66666666' - - Args: - s (string): the string with the words separated by whitespace; - the number of words must be a multiple of 6. - Return: - A byte string. - """ - - L = s.upper().split() - key = b'' - for index in range(0, len(L), 6): - sublist = L[index:index + 6] - char = 9 * [0] - bits = 0 - for i in sublist: - index = wordlist.index(i) - shift = (8 - (bits + 11) % 8) % 8 - y = index << shift - cl, cc, cr = (y >> 16), (y >> 8) & 0xff, y & 0xff - if (shift > 5): - char[bits >> 3] = char[bits >> 3] | cl - char[(bits >> 3) + 1] = char[(bits >> 3) + 1] | cc - char[(bits >> 3) + 2] = char[(bits >> 3) + 2] | cr - elif shift > -3: - char[bits >> 3] = char[bits >> 3] | cc - char[(bits >> 3) + 1] = char[(bits >> 3) + 1] | cr - else: - char[bits >> 3] = char[bits >> 3] | cr - bits = bits + 11 - - subkey = b'' - for y in char: - subkey = subkey + bchr(y) - - # Check the parity of the resulting key - skbin = _key2bin(subkey) - p = 0 - for i in range(0, 64, 2): - p = p + _extract(skbin, i, 2) - if (p & 3) != _extract(skbin, 64, 2): - raise ValueError("Parity error in resulting key") - key = key + subkey[0:8] - return key - - -wordlist = [ - "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD", - "AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA", - "AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK", - "ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE", - "AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM", - "BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET", - "BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO", - "BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT", - "BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT", - "CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY", - "CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN", - "DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG", - "DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB", - "DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO", - "ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE", - "EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW", - "FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR", - "FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP", - "GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO", - "GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD", - "HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM", - "HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT", - "HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE", - "HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL", - "INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT", - "ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET", - "JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT", - "KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB", - "LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE", - "LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT", - "LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG", - "LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW", - "MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT", - "MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG", - "MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED", - "NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD", - "NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF", - "OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL", - "OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT", - "OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD", - "PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG", - "PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT", - "PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB", - "PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT", - "RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM", - "RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB", - "RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM", - "SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET", - "SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY", - "SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY", - "SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN", - "TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE", - "TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP", - "TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP", - "US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS", - "WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT", - "WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE", - "YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT", - "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS", - "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE", - "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA", - "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN", - "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW", - "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA", - "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM", - "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW", - "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL", - "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM", - "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK", - "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH", - "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT", - "BEAU", "BECK", "BEEF", "BEEN", "BEER", - "BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN", - "BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE", - "BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE", - "BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT", - "BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK", - "BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT", - "BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK", - "BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS", - "BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN", - "BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD", - "BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG", - "BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST", - "BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF", - "CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL", - "CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL", - "CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF", - "CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG", - "CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY", - "CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA", - "COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN", - "COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK", - "COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST", - "COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB", - "CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY", - "CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE", - "DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN", - "DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS", - "DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED", - "DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK", - "DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT", - "DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES", - "DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA", - "DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG", - "DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK", - "DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK", - "DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST", - "EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT", - "EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT", - "EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED", - "FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL", - "FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT", - "FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST", - "FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE", - "FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE", - "FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW", - "FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM", - "FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL", - "FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL", - "FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY", - "FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY", - "FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA", - "GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH", - "GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE", - "GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT", - "GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN", - "GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD", - "GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG", - "GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB", - "GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN", - "GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH", - "GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR", - "HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK", - "HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE", - "HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR", - "HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL", - "HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN", - "HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT", - "HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE", - "HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK", - "HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL", - "HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK", - "HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE", - "HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH", - "INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE", - "ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE", - "JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL", - "JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN", - "JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY", - "JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST", - "JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL", - "KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL", - "KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW", - "KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD", - "KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN", - "LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD", - "LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS", - "LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER", - "LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST", - "LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU", - "LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB", - "LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST", - "LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE", - "LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD", - "LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK", - "LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE", - "LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE", - "MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI", - "MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK", - "MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE", - "MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK", - "MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH", - "MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT", - "MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS", - "MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD", - "MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON", - "MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH", - "MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK", - "MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL", - "NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR", - "NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS", - "NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA", - "NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON", - "NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB", - "OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY", - "OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE", - "ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS", - "OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY", - "OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT", - "RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE", - "RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR", - "RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA", - "REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT", - "RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD", - "ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME", - "ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS", - "ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY", - "RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE", - "RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE", - "SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE", - "SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR", - "SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK", - "SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS", - "SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN", - "SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE", - "SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE", - "SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW", - "SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY", - "SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT", - "SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB", - "SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA", - "SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE", - "SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR", - "STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH", - "SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF", - "SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM", - "TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK", - "TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM", - "TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS", - "TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN", - "THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER", - "TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY", - "TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG", - "TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR", - "TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG", - "TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE", - "TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK", - "TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER", - "USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST", - "VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY", - "VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE", - "WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK", - "WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM", - "WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY", - "WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR", - "WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM", - "WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE", - "WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE", - "WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD", - "WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE", - "YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR", - "YELL", "YOGA", "YOKE" ] diff --git a/venv/Lib/site-packages/Cryptodome/Util/RFC1751.pyi b/venv/Lib/site-packages/Cryptodome/Util/RFC1751.pyi deleted file mode 100644 index 6ad07ff..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/RFC1751.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Dict, List - -binary: Dict[int, str] -wordlist: List[str] - -def key_to_english(key: bytes) -> str: ... -def english_to_key(s: str) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Util/__init__.py b/venv/Lib/site-packages/Cryptodome/Util/__init__.py deleted file mode 100644 index 1862b82..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Miscellaneous modules - -Contains useful modules that don't belong into any of the -other Cryptodome.* subpackages. - -======================== ============================================= -Module Description -======================== ============================================= -`Cryptodome.Util.number` Number-theoretic functions (primality testing, etc.) -`Cryptodome.Util.Counter` Fast counter functions for CTR cipher modes. -`Cryptodome.Util.RFC1751` Converts between 128-bit keys and human-readable - strings of words. -`Cryptodome.Util.asn1` Minimal support for ASN.1 DER encoding -`Cryptodome.Util.Padding` Set of functions for adding and removing padding. -======================== ============================================= - -:undocumented: _galois, _number_new, cpuid, py3compat, _raw_api -""" - -__all__ = ['RFC1751', 'number', 'strxor', 'asn1', 'Counter', 'Padding'] - diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/Counter.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/Counter.cpython-312.pyc deleted file mode 100644 index 681505d..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/Counter.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/Padding.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/Padding.cpython-312.pyc deleted file mode 100644 index 36a61bd..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/Padding.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/RFC1751.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/RFC1751.cpython-312.pyc deleted file mode 100644 index b32a11a..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/RFC1751.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index b67a6fc..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_cpu_features.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_cpu_features.cpython-312.pyc deleted file mode 100644 index 67a10c7..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_cpu_features.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_file_system.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_file_system.cpython-312.pyc deleted file mode 100644 index b8fa7ea..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_file_system.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_raw_api.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_raw_api.cpython-312.pyc deleted file mode 100644 index db9c8b0..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/_raw_api.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/asn1.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/asn1.cpython-312.pyc deleted file mode 100644 index 7106f19..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/asn1.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/number.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/number.cpython-312.pyc deleted file mode 100644 index 7b8ea49..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/number.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/py3compat.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/py3compat.cpython-312.pyc deleted file mode 100644 index bbdfad5..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/py3compat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/strxor.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/Util/__pycache__/strxor.cpython-312.pyc deleted file mode 100644 index f1ff3fd..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/__pycache__/strxor.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/_cpu_features.py b/venv/Lib/site-packages/Cryptodome/Util/_cpu_features.py deleted file mode 100644 index 4794a02..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/_cpu_features.py +++ /dev/null @@ -1,46 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2018, Helder Eijs -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util._raw_api import load_pycryptodome_raw_lib - - -_raw_cpuid_lib = load_pycryptodome_raw_lib("Cryptodome.Util._cpuid_c", - """ - int have_aes_ni(void); - int have_clmul(void); - """) - - -def have_aes_ni(): - return _raw_cpuid_lib.have_aes_ni() - - -def have_clmul(): - return _raw_cpuid_lib.have_clmul() diff --git a/venv/Lib/site-packages/Cryptodome/Util/_cpu_features.pyi b/venv/Lib/site-packages/Cryptodome/Util/_cpu_features.pyi deleted file mode 100644 index 10e669e..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/_cpu_features.pyi +++ /dev/null @@ -1,2 +0,0 @@ -def have_aes_ni() -> int: ... -def have_clmul() -> int: ... diff --git a/venv/Lib/site-packages/Cryptodome/Util/_cpuid_c.pyd b/venv/Lib/site-packages/Cryptodome/Util/_cpuid_c.pyd deleted file mode 100644 index e197508..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/_cpuid_c.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/_file_system.py b/venv/Lib/site-packages/Cryptodome/Util/_file_system.py deleted file mode 100644 index 282f0dc..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/_file_system.py +++ /dev/null @@ -1,54 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2016, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import os - - -def pycryptodome_filename(dir_comps, filename): - """Return the complete file name for the module - - dir_comps : list of string - The list of directory names in the PyCryptodome package. - The first element must be "Cryptodome". - - filename : string - The filename (inclusing extension) in the target directory. - """ - - if dir_comps[0] != "Cryptodome": - raise ValueError("Only available for modules under 'Cryptodome'") - - dir_comps = list(dir_comps[1:]) + [filename] - - util_lib, _ = os.path.split(os.path.abspath(__file__)) - root_lib = os.path.join(util_lib, "..") - - return os.path.join(root_lib, *dir_comps) - diff --git a/venv/Lib/site-packages/Cryptodome/Util/_file_system.pyi b/venv/Lib/site-packages/Cryptodome/Util/_file_system.pyi deleted file mode 100644 index d54a126..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/_file_system.pyi +++ /dev/null @@ -1,4 +0,0 @@ -from typing import List - - -def pycryptodome_filename(dir_comps: List[str], filename: str) -> str: ... \ No newline at end of file diff --git a/venv/Lib/site-packages/Cryptodome/Util/_raw_api.py b/venv/Lib/site-packages/Cryptodome/Util/_raw_api.py deleted file mode 100644 index cd64ac8..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/_raw_api.py +++ /dev/null @@ -1,325 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -import os -import abc -import sys -from Cryptodome.Util.py3compat import byte_string -from Cryptodome.Util._file_system import pycryptodome_filename - -# -# List of file suffixes for Python extensions -# -if sys.version_info[0] < 3: - - import imp - extension_suffixes = [] - for ext, mod, typ in imp.get_suffixes(): - if typ == imp.C_EXTENSION: - extension_suffixes.append(ext) - -else: - - from importlib import machinery - extension_suffixes = machinery.EXTENSION_SUFFIXES - -# Which types with buffer interface we support (apart from byte strings) -_buffer_type = (bytearray, memoryview) - - -class _VoidPointer(object): - @abc.abstractmethod - def get(self): - """Return the memory location we point to""" - return - - @abc.abstractmethod - def address_of(self): - """Return a raw pointer to this pointer""" - return - - -try: - # Starting from v2.18, pycparser (used by cffi for in-line ABI mode) - # stops working correctly when PYOPTIMIZE==2 or the parameter -OO is - # passed. In that case, we fall back to ctypes. - # Note that PyPy ships with an old version of pycparser so we can keep - # using cffi there. - # See https://github.com/Legrandin/pycryptodome/issues/228 - if '__pypy__' not in sys.builtin_module_names and sys.flags.optimize == 2: - raise ImportError("CFFI with optimize=2 fails due to pycparser bug.") - - # cffi still uses PyUnicode_GetSize, which was removed in Python 3.12 - # thus leading to a crash on cffi.dlopen() - # See https://groups.google.com/u/1/g/python-cffi/c/oZkOIZ_zi5k - if sys.version_info >= (3, 12) and os.name == "nt": - raise ImportError("CFFI is not compatible with Python 3.12 on Windows") - - from cffi import FFI - - ffi = FFI() - null_pointer = ffi.NULL - uint8_t_type = ffi.typeof(ffi.new("const uint8_t*")) - - _Array = ffi.new("uint8_t[1]").__class__.__bases__ - - def load_lib(name, cdecl): - """Load a shared library and return a handle to it. - - @name, either an absolute path or the name of a library - in the system search path. - - @cdecl, the C function declarations. - """ - - if hasattr(ffi, "RTLD_DEEPBIND") and not os.getenv('PYCRYPTODOME_DISABLE_DEEPBIND'): - lib = ffi.dlopen(name, ffi.RTLD_DEEPBIND) - else: - lib = ffi.dlopen(name) - ffi.cdef(cdecl) - return lib - - def c_ulong(x): - """Convert a Python integer to unsigned long""" - return x - - c_ulonglong = c_ulong - c_uint = c_ulong - c_ubyte = c_ulong - - def c_size_t(x): - """Convert a Python integer to size_t""" - return x - - def create_string_buffer(init_or_size, size=None): - """Allocate the given amount of bytes (initially set to 0)""" - - if isinstance(init_or_size, bytes): - size = max(len(init_or_size) + 1, size) - result = ffi.new("uint8_t[]", size) - result[:] = init_or_size - else: - if size: - raise ValueError("Size must be specified once only") - result = ffi.new("uint8_t[]", init_or_size) - return result - - def get_c_string(c_string): - """Convert a C string into a Python byte sequence""" - return ffi.string(c_string) - - def get_raw_buffer(buf): - """Convert a C buffer into a Python byte sequence""" - return ffi.buffer(buf)[:] - - def c_uint8_ptr(data): - if isinstance(data, _buffer_type): - # This only works for cffi >= 1.7 - return ffi.cast(uint8_t_type, ffi.from_buffer(data)) - elif byte_string(data) or isinstance(data, _Array): - return data - else: - raise TypeError("Object type %s cannot be passed to C code" % type(data)) - - class VoidPointer_cffi(_VoidPointer): - """Model a newly allocated pointer to void""" - - def __init__(self): - self._pp = ffi.new("void *[1]") - - def get(self): - return self._pp[0] - - def address_of(self): - return self._pp - - def VoidPointer(): - return VoidPointer_cffi() - - backend = "cffi" - -except ImportError: - - import ctypes - from ctypes import (CDLL, c_void_p, byref, c_ulong, c_ulonglong, c_size_t, - create_string_buffer, c_ubyte, c_uint) - from ctypes.util import find_library - from ctypes import Array as _Array - - null_pointer = None - cached_architecture = [] - - def c_ubyte(c): - if not (0 <= c < 256): - raise OverflowError() - return ctypes.c_ubyte(c) - - def load_lib(name, cdecl): - if not cached_architecture: - # platform.architecture() creates a subprocess, so caching the - # result makes successive imports faster. - import platform - cached_architecture[:] = platform.architecture() - bits, linkage = cached_architecture - if "." not in name and not linkage.startswith("Win"): - full_name = find_library(name) - if full_name is None: - raise OSError("Cannot load library '%s'" % name) - name = full_name - return CDLL(name) - - def get_c_string(c_string): - return c_string.value - - def get_raw_buffer(buf): - return buf.raw - - # ---- Get raw pointer --- - - _c_ssize_t = ctypes.c_ssize_t - - _PyBUF_SIMPLE = 0 - _PyObject_GetBuffer = ctypes.pythonapi.PyObject_GetBuffer - _PyBuffer_Release = ctypes.pythonapi.PyBuffer_Release - _py_object = ctypes.py_object - _c_ssize_p = ctypes.POINTER(_c_ssize_t) - - # See Include/object.h for CPython - # and https://github.com/pallets/click/blob/master/src/click/_winconsole.py - class _Py_buffer(ctypes.Structure): - _fields_ = [ - ('buf', c_void_p), - ('obj', ctypes.py_object), - ('len', _c_ssize_t), - ('itemsize', _c_ssize_t), - ('readonly', ctypes.c_int), - ('ndim', ctypes.c_int), - ('format', ctypes.c_char_p), - ('shape', _c_ssize_p), - ('strides', _c_ssize_p), - ('suboffsets', _c_ssize_p), - ('internal', c_void_p) - ] - - # Extra field for CPython 2.6/2.7 - if sys.version_info[0] == 2: - _fields_.insert(-1, ('smalltable', _c_ssize_t * 2)) - - def c_uint8_ptr(data): - if byte_string(data) or isinstance(data, _Array): - return data - elif isinstance(data, _buffer_type): - obj = _py_object(data) - buf = _Py_buffer() - _PyObject_GetBuffer(obj, byref(buf), _PyBUF_SIMPLE) - try: - buffer_type = ctypes.c_ubyte * buf.len - return buffer_type.from_address(buf.buf) - finally: - _PyBuffer_Release(byref(buf)) - else: - raise TypeError("Object type %s cannot be passed to C code" % type(data)) - - # --- - - class VoidPointer_ctypes(_VoidPointer): - """Model a newly allocated pointer to void""" - - def __init__(self): - self._p = c_void_p() - - def get(self): - return self._p - - def address_of(self): - return byref(self._p) - - def VoidPointer(): - return VoidPointer_ctypes() - - backend = "ctypes" - - -class SmartPointer(object): - """Class to hold a non-managed piece of memory""" - - def __init__(self, raw_pointer, destructor): - self._raw_pointer = raw_pointer - self._destructor = destructor - - def get(self): - return self._raw_pointer - - def release(self): - rp, self._raw_pointer = self._raw_pointer, None - return rp - - def __del__(self): - try: - if self._raw_pointer is not None: - self._destructor(self._raw_pointer) - self._raw_pointer = None - except AttributeError: - pass - - -def load_pycryptodome_raw_lib(name, cdecl): - """Load a shared library and return a handle to it. - - @name, the name of the library expressed as a PyCryptodome module, - for instance Cryptodome.Cipher._raw_cbc. - - @cdecl, the C function declarations. - """ - - split = name.split(".") - dir_comps, basename = split[:-1], split[-1] - attempts = [] - for ext in extension_suffixes: - try: - filename = basename + ext - full_name = pycryptodome_filename(dir_comps, filename) - if not os.path.isfile(full_name): - attempts.append("Not found '%s'" % filename) - continue - return load_lib(full_name, cdecl) - except OSError as exp: - attempts.append("Cannot load '%s': %s" % (filename, str(exp))) - raise OSError("Cannot load native module '%s': %s" % (name, ", ".join(attempts))) - - -def is_buffer(x): - """Return True if object x supports the buffer interface""" - return isinstance(x, (bytes, bytearray, memoryview)) - - -def is_writeable_buffer(x): - return (isinstance(x, bytearray) or - (isinstance(x, memoryview) and not x.readonly)) diff --git a/venv/Lib/site-packages/Cryptodome/Util/_raw_api.pyi b/venv/Lib/site-packages/Cryptodome/Util/_raw_api.pyi deleted file mode 100644 index 2bc5301..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/_raw_api.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Any, Optional, Union - -def load_lib(name: str, cdecl: str) -> Any : ... -def c_ulong(x: int ) -> Any : ... -def c_ulonglong(x: int ) -> Any : ... -def c_size_t(x: int) -> Any : ... -def create_string_buffer(init_or_size: Union[bytes,int], size: Optional[int]) -> Any : ... -def get_c_string(c_string: Any) -> bytes : ... -def get_raw_buffer(buf: Any) -> bytes : ... -def c_uint8_ptr(data: Union[bytes, memoryview, bytearray]) -> Any : ... - -class VoidPointer(object): - def get(self) -> Any : ... - def address_of(self) -> Any : ... - -class SmartPointer(object): - def __init__(self, raw_pointer: Any, destructor: Any) -> None : ... - def get(self) -> Any : ... - def release(self) -> Any : ... - -backend : str -null_pointer : Any -ffi: Any - -def load_pycryptodome_raw_lib(name: str, cdecl: str) -> Any : ... -def is_buffer(x: Any) -> bool : ... -def is_writeable_buffer(x: Any) -> bool : ... diff --git a/venv/Lib/site-packages/Cryptodome/Util/_strxor.pyd b/venv/Lib/site-packages/Cryptodome/Util/_strxor.pyd deleted file mode 100644 index 17eeb27..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/Util/_strxor.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/Util/asn1.py b/venv/Lib/site-packages/Cryptodome/Util/asn1.py deleted file mode 100644 index 9987fda..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/asn1.py +++ /dev/null @@ -1,1064 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Util/asn1.py : Minimal support for ASN.1 DER binary encoding. -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -import struct - -from Cryptodome.Util.py3compat import byte_string, bchr, bord - -from Cryptodome.Util.number import long_to_bytes, bytes_to_long - -__all__ = ['DerObject', 'DerInteger', 'DerBoolean', 'DerOctetString', - 'DerNull', 'DerSequence', 'DerObjectId', 'DerBitString', 'DerSetOf'] - -# Useful references: -# - https://luca.ntop.org/Teaching/Appunti/asn1.html -# - https://letsencrypt.org/docs/a-warm-welcome-to-asn1-and-der/ -# - https://www.zytrax.com/tech/survival/asn1.html -# - https://www.oss.com/asn1/resources/books-whitepapers-pubs/larmouth-asn1-book.pdf -# - https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf -# - https://misc.daniel-marschall.de/asn.1/oid-converter/online.php - -def _is_number(x, only_non_negative=False): - test = 0 - try: - test = x + test - except TypeError: - return False - return not only_non_negative or x >= 0 - - -class BytesIO_EOF(object): - """This class differs from BytesIO in that a ValueError exception is - raised whenever EOF is reached.""" - - def __init__(self, initial_bytes): - self._buffer = initial_bytes - self._index = 0 - self._bookmark = None - - def set_bookmark(self): - self._bookmark = self._index - - def data_since_bookmark(self): - assert self._bookmark is not None - return self._buffer[self._bookmark:self._index] - - def remaining_data(self): - return len(self._buffer) - self._index - - def read(self, length): - new_index = self._index + length - if new_index > len(self._buffer): - raise ValueError("Not enough data for DER decoding: expected %d bytes and found %d" % (new_index, len(self._buffer))) - - result = self._buffer[self._index:new_index] - self._index = new_index - return result - - def read_byte(self): - return bord(self.read(1)[0]) - - -class DerObject(object): - """Base class for defining a single DER object. - - This class should never be directly instantiated. - """ - - def __init__(self, asn1Id=None, payload=b'', implicit=None, - constructed=False, explicit=None): - """Initialize the DER object according to a specific ASN.1 type. - - :Parameters: - asn1Id : integer or byte - The universal DER tag number for this object - (e.g. 0x10 for a SEQUENCE). - If None, the tag is not known yet. - - payload : byte string - The initial payload of the object (that it, - the content octets). - If not specified, the payload is empty. - - implicit : integer or byte - The IMPLICIT tag number (< 0x1F) to use for the encoded object. - It overrides the universal tag *asn1Id*. - It cannot be combined with the ``explicit`` parameter. - By default, there is no IMPLICIT tag. - - constructed : bool - True when the ASN.1 type is *constructed*. - False when it is *primitive* (default). - - explicit : integer or byte - The EXPLICIT tag number (< 0x1F) to use for the encoded object. - It cannot be combined with the ``implicit`` parameter. - By default, there is no EXPLICIT tag. - """ - - if asn1Id is None: - # The tag octet will be read in with ``decode`` - self._tag_octet = None - return - asn1Id = self._convertTag(asn1Id) - - self.payload = payload - - # In a BER/DER identifier octet: - # * bits 4-0 contain the tag value - # * bit 5 is set if the type is 'constructed' - # and unset if 'primitive' - # * bits 7-6 depend on the encoding class - # - # Class | Bit 7, Bit 6 - # ---------------------------------- - # universal | 0 0 - # application | 0 1 - # context-spec | 1 0 (default for IMPLICIT/EXPLICIT) - # private | 1 1 - # - - constructed_bit = 0x20 if constructed else 0x00 - - if None not in (explicit, implicit): - raise ValueError("Explicit and implicit tags are" - " mutually exclusive") - - if implicit is not None: - # IMPLICIT tag overrides asn1Id - self._tag_octet = 0x80 | constructed_bit | self._convertTag(implicit) - elif explicit is not None: - # 'constructed bit' is always asserted for an EXPLICIT tag - self._tag_octet = 0x80 | 0x20 | self._convertTag(explicit) - self._inner_tag_octet = constructed_bit | asn1Id - else: - # Neither IMPLICIT nor EXPLICIT - self._tag_octet = constructed_bit | asn1Id - - def _convertTag(self, tag): - """Check if *tag* is a real DER tag (5 bits). - Convert it from a character to number if necessary. - """ - if not _is_number(tag): - if len(tag) == 1: - tag = bord(tag[0]) - # Ensure that tag is a low tag - if not (_is_number(tag) and 0 <= tag < 0x1F): - raise ValueError("Wrong DER tag") - return tag - - @staticmethod - def _definite_form(length): - """Build length octets according to BER/DER - definite form. - """ - if length > 127: - encoding = long_to_bytes(length) - return bchr(len(encoding) + 128) + encoding - return bchr(length) - - def encode(self): - """Return this DER element, fully encoded as a binary byte string.""" - - # Concatenate identifier octets, length octets, - # and contents octets - - output_payload = self.payload - - # In case of an EXTERNAL tag, first encode the inner - # element. - if hasattr(self, "_inner_tag_octet"): - output_payload = (bchr(self._inner_tag_octet) + - self._definite_form(len(self.payload)) + - self.payload) - - return (bchr(self._tag_octet) + - self._definite_form(len(output_payload)) + - output_payload) - - def _decodeLen(self, s): - """Decode DER length octets from a file.""" - - length = s.read_byte() - - if length > 127: - encoded_length = s.read(length & 0x7F) - if bord(encoded_length[0]) == 0: - raise ValueError("Invalid DER: length has leading zero") - length = bytes_to_long(encoded_length) - if length <= 127: - raise ValueError("Invalid DER: length in long form but smaller than 128") - - return length - - def decode(self, der_encoded, strict=False): - """Decode a complete DER element, and re-initializes this - object with it. - - Args: - der_encoded (byte string): A complete DER element. - - Raises: - ValueError: in case of parsing errors. - """ - - if not byte_string(der_encoded): - raise ValueError("Input is not a byte string") - - s = BytesIO_EOF(der_encoded) - self._decodeFromStream(s, strict) - - # There shouldn't be other bytes left - if s.remaining_data() > 0: - raise ValueError("Unexpected extra data after the DER structure") - - return self - - def _decodeFromStream(self, s, strict): - """Decode a complete DER element from a file.""" - - idOctet = s.read_byte() - if self._tag_octet is not None: - if idOctet != self._tag_octet: - raise ValueError("Unexpected DER tag") - else: - self._tag_octet = idOctet - length = self._decodeLen(s) - self.payload = s.read(length) - - # In case of an EXTERNAL tag, further decode the inner - # element. - if hasattr(self, "_inner_tag_octet"): - p = BytesIO_EOF(self.payload) - inner_octet = p.read_byte() - if inner_octet != self._inner_tag_octet: - raise ValueError("Unexpected internal DER tag") - length = self._decodeLen(p) - self.payload = p.read(length) - - # There shouldn't be other bytes left - if p.remaining_data() > 0: - raise ValueError("Unexpected extra data after the DER structure") - - -class DerInteger(DerObject): - """Class to model a DER INTEGER. - - An example of encoding is:: - - >>> from Cryptodome.Util.asn1 import DerInteger - >>> from binascii import hexlify, unhexlify - >>> int_der = DerInteger(9) - >>> print hexlify(int_der.encode()) - - which will show ``020109``, the DER encoding of 9. - - And for decoding:: - - >>> s = unhexlify(b'020109') - >>> try: - >>> int_der = DerInteger() - >>> int_der.decode(s) - >>> print int_der.value - >>> except ValueError: - >>> print "Not a valid DER INTEGER" - - the output will be ``9``. - - :ivar value: The integer value - :vartype value: integer - """ - - def __init__(self, value=0, implicit=None, explicit=None): - """Initialize the DER object as an INTEGER. - - :Parameters: - value : integer - The value of the integer. - - implicit : integer - The IMPLICIT tag to use for the encoded object. - It overrides the universal tag for INTEGER (2). - """ - - DerObject.__init__(self, 0x02, b'', implicit, - False, explicit) - self.value = value # The integer value - - def encode(self): - """Return the DER INTEGER, fully encoded as a - binary string.""" - - number = self.value - self.payload = b'' - while True: - self.payload = bchr(int(number & 255)) + self.payload - if 128 <= number <= 255: - self.payload = bchr(0x00) + self.payload - if -128 <= number <= 255: - break - number >>= 8 - return DerObject.encode(self) - - def decode(self, der_encoded, strict=False): - """Decode a DER-encoded INTEGER, and re-initializes this - object with it. - - Args: - der_encoded (byte string): A complete INTEGER DER element. - - Raises: - ValueError: in case of parsing errors. - """ - - return DerObject.decode(self, der_encoded, strict=strict) - - def _decodeFromStream(self, s, strict): - """Decode a complete DER INTEGER from a file.""" - - # Fill up self.payload - DerObject._decodeFromStream(self, s, strict) - - if strict: - if len(self.payload) == 0: - raise ValueError("Invalid encoding for DER INTEGER: empty payload") - if len(self.payload) >= 2 and struct.unpack('>H', self.payload[:2])[0] < 0x80: - raise ValueError("Invalid encoding for DER INTEGER: leading zero") - - # Derive self.value from self.payload - self.value = 0 - bits = 1 - for i in self.payload: - self.value *= 256 - self.value += bord(i) - bits <<= 8 - if self.payload and bord(self.payload[0]) & 0x80: - self.value -= bits - - -class DerBoolean(DerObject): - """Class to model a DER-encoded BOOLEAN. - - An example of encoding is:: - - >>> from Cryptodome.Util.asn1 import DerBoolean - >>> bool_der = DerBoolean(True) - >>> print(bool_der.encode().hex()) - - which will show ``0101ff``, the DER encoding of True. - - And for decoding:: - - >>> s = bytes.fromhex('0101ff') - >>> try: - >>> bool_der = DerBoolean() - >>> bool_der.decode(s) - >>> print(bool_der.value) - >>> except ValueError: - >>> print "Not a valid DER BOOLEAN" - - the output will be ``True``. - - :ivar value: The boolean value - :vartype value: boolean - """ - def __init__(self, value=False, implicit=None, explicit=None): - """Initialize the DER object as a BOOLEAN. - - Args: - value (boolean): - The value of the boolean. Default is False. - - implicit (integer or byte): - The IMPLICIT tag number (< 0x1F) to use for the encoded object. - It overrides the universal tag for BOOLEAN (1). - It cannot be combined with the ``explicit`` parameter. - By default, there is no IMPLICIT tag. - - explicit (integer or byte): - The EXPLICIT tag number (< 0x1F) to use for the encoded object. - It cannot be combined with the ``implicit`` parameter. - By default, there is no EXPLICIT tag. - """ - - DerObject.__init__(self, 0x01, b'', implicit, False, explicit) - self.value = value # The boolean value - - def encode(self): - """Return the DER BOOLEAN, fully encoded as a binary string.""" - - self.payload = b'\xFF' if self.value else b'\x00' - return DerObject.encode(self) - - def decode(self, der_encoded, strict=False): - """Decode a DER-encoded BOOLEAN, and re-initializes this object with it. - - Args: - der_encoded (byte string): A DER-encoded BOOLEAN. - - Raises: - ValueError: in case of parsing errors. - """ - - return DerObject.decode(self, der_encoded, strict) - - def _decodeFromStream(self, s, strict): - """Decode a DER-encoded BOOLEAN from a file.""" - - # Fill up self.payload - DerObject._decodeFromStream(self, s, strict) - - if len(self.payload) != 1: - raise ValueError("Invalid encoding for DER BOOLEAN: payload is not 1 byte") - - if bord(self.payload[0]) == 0: - self.value = False - elif bord(self.payload[0]) == 0xFF: - self.value = True - else: - raise ValueError("Invalid payload for DER BOOLEAN") - - -class DerSequence(DerObject): - """Class to model a DER SEQUENCE. - - This object behaves like a dynamic Python sequence. - - Sub-elements that are INTEGERs behave like Python integers. - - Any other sub-element is a binary string encoded as a complete DER - sub-element (TLV). - - An example of encoding is: - - >>> from Cryptodome.Util.asn1 import DerSequence, DerInteger - >>> from binascii import hexlify, unhexlify - >>> obj_der = unhexlify('070102') - >>> seq_der = DerSequence([4]) - >>> seq_der.append(9) - >>> seq_der.append(obj_der.encode()) - >>> print hexlify(seq_der.encode()) - - which will show ``3009020104020109070102``, the DER encoding of the - sequence containing ``4``, ``9``, and the object with payload ``02``. - - For decoding: - - >>> s = unhexlify(b'3009020104020109070102') - >>> try: - >>> seq_der = DerSequence() - >>> seq_der.decode(s) - >>> print len(seq_der) - >>> print seq_der[0] - >>> print seq_der[:] - >>> except ValueError: - >>> print "Not a valid DER SEQUENCE" - - the output will be:: - - 3 - 4 - [4, 9, b'\x07\x01\x02'] - - """ - - def __init__(self, startSeq=None, implicit=None, explicit=None): - """Initialize the DER object as a SEQUENCE. - - :Parameters: - startSeq : Python sequence - A sequence whose element are either integers or - other DER objects. - - implicit : integer or byte - The IMPLICIT tag number (< 0x1F) to use for the encoded object. - It overrides the universal tag for SEQUENCE (16). - It cannot be combined with the ``explicit`` parameter. - By default, there is no IMPLICIT tag. - - explicit : integer or byte - The EXPLICIT tag number (< 0x1F) to use for the encoded object. - It cannot be combined with the ``implicit`` parameter. - By default, there is no EXPLICIT tag. - """ - - DerObject.__init__(self, 0x10, b'', implicit, True, explicit) - if startSeq is None: - self._seq = [] - else: - self._seq = startSeq - - # A few methods to make it behave like a python sequence - - def __delitem__(self, n): - del self._seq[n] - - def __getitem__(self, n): - return self._seq[n] - - def __setitem__(self, key, value): - self._seq[key] = value - - def __setslice__(self, i, j, sequence): - self._seq[i:j] = sequence - - def __delslice__(self, i, j): - del self._seq[i:j] - - def __getslice__(self, i, j): - return self._seq[max(0, i):max(0, j)] - - def __len__(self): - return len(self._seq) - - def __iadd__(self, item): - self._seq.append(item) - return self - - def append(self, item): - self._seq.append(item) - return self - - def insert(self, index, item): - self._seq.insert(index, item) - return self - - def hasInts(self, only_non_negative=True): - """Return the number of items in this sequence that are - integers. - - Args: - only_non_negative (boolean): - If ``True``, negative integers are not counted in. - """ - - items = [x for x in self._seq if _is_number(x, only_non_negative)] - return len(items) - - def hasOnlyInts(self, only_non_negative=True): - """Return ``True`` if all items in this sequence are integers - or non-negative integers. - - This function returns False is the sequence is empty, - or at least one member is not an integer. - - Args: - only_non_negative (boolean): - If ``True``, the presence of negative integers - causes the method to return ``False``.""" - return self._seq and self.hasInts(only_non_negative) == len(self._seq) - - def encode(self): - """Return this DER SEQUENCE, fully encoded as a - binary string. - - Raises: - ValueError: if some elements in the sequence are neither integers - nor byte strings. - """ - self.payload = b'' - for item in self._seq: - if byte_string(item): - self.payload += item - elif _is_number(item): - self.payload += DerInteger(item).encode() - else: - self.payload += item.encode() - return DerObject.encode(self) - - def decode(self, der_encoded, strict=False, nr_elements=None, only_ints_expected=False): - """Decode a complete DER SEQUENCE, and re-initializes this - object with it. - - Args: - der_encoded (byte string): - A complete SEQUENCE DER element. - nr_elements (None or integer or list of integers): - The number of members the SEQUENCE can have - only_ints_expected (boolean): - Whether the SEQUENCE is expected to contain only integers. - strict (boolean): - Whether decoding must check for strict DER compliancy. - - Raises: - ValueError: in case of parsing errors. - - DER INTEGERs are decoded into Python integers. Any other DER - element is not decoded. Its validity is not checked. - """ - - self._nr_elements = nr_elements - result = DerObject.decode(self, der_encoded, strict=strict) - - if only_ints_expected and not self.hasOnlyInts(): - raise ValueError("Some members are not INTEGERs") - - return result - - def _decodeFromStream(self, s, strict): - """Decode a complete DER SEQUENCE from a file.""" - - self._seq = [] - - # Fill up self.payload - DerObject._decodeFromStream(self, s, strict) - - # Add one item at a time to self.seq, by scanning self.payload - p = BytesIO_EOF(self.payload) - while p.remaining_data() > 0: - p.set_bookmark() - - der = DerObject() - der._decodeFromStream(p, strict) - - # Parse INTEGERs differently - if der._tag_octet != 0x02: - self._seq.append(p.data_since_bookmark()) - else: - derInt = DerInteger() - data = p.data_since_bookmark() - derInt.decode(data, strict=strict) - self._seq.append(derInt.value) - - ok = True - if self._nr_elements is not None: - try: - ok = len(self._seq) in self._nr_elements - except TypeError: - ok = len(self._seq) == self._nr_elements - - if not ok: - raise ValueError("Unexpected number of members (%d)" - " in the sequence" % len(self._seq)) - - -class DerOctetString(DerObject): - """Class to model a DER OCTET STRING. - - An example of encoding is: - - >>> from Cryptodome.Util.asn1 import DerOctetString - >>> from binascii import hexlify, unhexlify - >>> os_der = DerOctetString(b'\\xaa') - >>> os_der.payload += b'\\xbb' - >>> print hexlify(os_der.encode()) - - which will show ``0402aabb``, the DER encoding for the byte string - ``b'\\xAA\\xBB'``. - - For decoding: - - >>> s = unhexlify(b'0402aabb') - >>> try: - >>> os_der = DerOctetString() - >>> os_der.decode(s) - >>> print hexlify(os_der.payload) - >>> except ValueError: - >>> print "Not a valid DER OCTET STRING" - - the output will be ``aabb``. - - :ivar payload: The content of the string - :vartype payload: byte string - """ - - def __init__(self, value=b'', implicit=None): - """Initialize the DER object as an OCTET STRING. - - :Parameters: - value : byte string - The initial payload of the object. - If not specified, the payload is empty. - - implicit : integer - The IMPLICIT tag to use for the encoded object. - It overrides the universal tag for OCTET STRING (4). - """ - DerObject.__init__(self, 0x04, value, implicit, False) - - -class DerNull(DerObject): - """Class to model a DER NULL element.""" - - def __init__(self): - """Initialize the DER object as a NULL.""" - - DerObject.__init__(self, 0x05, b'', None, False) - - -class DerObjectId(DerObject): - """Class to model a DER OBJECT ID. - - An example of encoding is: - - >>> from Cryptodome.Util.asn1 import DerObjectId - >>> from binascii import hexlify, unhexlify - >>> oid_der = DerObjectId("1.2") - >>> oid_der.value += ".840.113549.1.1.1" - >>> print hexlify(oid_der.encode()) - - which will show ``06092a864886f70d010101``, the DER encoding for the - RSA Object Identifier ``1.2.840.113549.1.1.1``. - - For decoding: - - >>> s = unhexlify(b'06092a864886f70d010101') - >>> try: - >>> oid_der = DerObjectId() - >>> oid_der.decode(s) - >>> print oid_der.value - >>> except ValueError: - >>> print "Not a valid DER OBJECT ID" - - the output will be ``1.2.840.113549.1.1.1``. - - :ivar value: The Object ID (OID), a dot separated list of integers - :vartype value: string - """ - - def __init__(self, value='', implicit=None, explicit=None): - """Initialize the DER object as an OBJECT ID. - - :Parameters: - value : string - The initial Object Identifier (e.g. "1.2.0.0.6.2"). - implicit : integer - The IMPLICIT tag to use for the encoded object. - It overrides the universal tag for OBJECT ID (6). - explicit : integer - The EXPLICIT tag to use for the encoded object. - """ - DerObject.__init__(self, 0x06, b'', implicit, False, explicit) - self.value = value - - def encode(self): - """Return the DER OBJECT ID, fully encoded as a - binary string.""" - - comps = [int(x) for x in self.value.split(".")] - - if len(comps) < 2: - raise ValueError("Not a valid Object Identifier string") - if comps[0] > 2: - raise ValueError("First component must be 0, 1 or 2") - if comps[0] < 2 and comps[1] > 39: - raise ValueError("Second component must be 39 at most") - - subcomps = [40 * comps[0] + comps[1]] + comps[2:] - - encoding = [] - for v in reversed(subcomps): - encoding.append(v & 0x7F) - v >>= 7 - while v: - encoding.append((v & 0x7F) | 0x80) - v >>= 7 - - self.payload = b''.join([bchr(x) for x in reversed(encoding)]) - return DerObject.encode(self) - - def decode(self, der_encoded, strict=False): - """Decode a complete DER OBJECT ID, and re-initializes this - object with it. - - Args: - der_encoded (byte string): - A complete DER OBJECT ID. - strict (boolean): - Whether decoding must check for strict DER compliancy. - - Raises: - ValueError: in case of parsing errors. - """ - - return DerObject.decode(self, der_encoded, strict) - - def _decodeFromStream(self, s, strict): - """Decode a complete DER OBJECT ID from a file.""" - - # Fill up self.payload - DerObject._decodeFromStream(self, s, strict) - - # Derive self.value from self.payload - p = BytesIO_EOF(self.payload) - - subcomps = [] - v = 0 - while p.remaining_data(): - c = p.read_byte() - v = (v << 7) + (c & 0x7F) - if not (c & 0x80): - subcomps.append(v) - v = 0 - - if len(subcomps) == 0: - raise ValueError("Empty payload") - - if subcomps[0] < 40: - subcomps[:1] = [0, subcomps[0]] - elif subcomps[0] < 80: - subcomps[:1] = [1, subcomps[0] - 40] - else: - subcomps[:1] = [2, subcomps[0] - 80] - - self.value = ".".join([str(x) for x in subcomps]) - - -class DerBitString(DerObject): - """Class to model a DER BIT STRING. - - An example of encoding is: - - >>> from Cryptodome.Util.asn1 import DerBitString - >>> bs_der = DerBitString(b'\\xAA') - >>> bs_der.value += b'\\xBB' - >>> print(bs_der.encode().hex()) - - which will show ``030300aabb``, the DER encoding for the bit string - ``b'\\xAA\\xBB'``. - - For decoding: - - >>> s = bytes.fromhex('030300aabb') - >>> try: - >>> bs_der = DerBitString() - >>> bs_der.decode(s) - >>> print(bs_der.value.hex()) - >>> except ValueError: - >>> print "Not a valid DER BIT STRING" - - the output will be ``aabb``. - - :ivar value: The content of the string - :vartype value: byte string - """ - - def __init__(self, value=b'', implicit=None, explicit=None): - """Initialize the DER object as a BIT STRING. - - :Parameters: - value : byte string or DER object - The initial, packed bit string. - If not specified, the bit string is empty. - implicit : integer - The IMPLICIT tag to use for the encoded object. - It overrides the universal tag for BIT STRING (3). - explicit : integer - The EXPLICIT tag to use for the encoded object. - """ - DerObject.__init__(self, 0x03, b'', implicit, False, explicit) - - # The bitstring value (packed) - if isinstance(value, DerObject): - self.value = value.encode() - else: - self.value = value - - def encode(self): - """Return the DER BIT STRING, fully encoded as a - byte string.""" - - # Add padding count byte - self.payload = b'\x00' + self.value - return DerObject.encode(self) - - def decode(self, der_encoded, strict=False): - """Decode a complete DER BIT STRING, and re-initializes this - object with it. - - Args: - der_encoded (byte string): a complete DER BIT STRING. - strict (boolean): - Whether decoding must check for strict DER compliancy. - - Raises: - ValueError: in case of parsing errors. - """ - - return DerObject.decode(self, der_encoded, strict) - - def _decodeFromStream(self, s, strict): - """Decode a complete DER BIT STRING DER from a file.""" - - # Fill-up self.payload - DerObject._decodeFromStream(self, s, strict) - - if self.payload and bord(self.payload[0]) != 0: - raise ValueError("Not a valid BIT STRING") - - # Fill-up self.value - self.value = b'' - # Remove padding count byte - if self.payload: - self.value = self.payload[1:] - - -class DerSetOf(DerObject): - """Class to model a DER SET OF. - - An example of encoding is: - - >>> from Cryptodome.Util.asn1 import DerBitString - >>> from binascii import hexlify, unhexlify - >>> so_der = DerSetOf([4,5]) - >>> so_der.add(6) - >>> print hexlify(so_der.encode()) - - which will show ``3109020104020105020106``, the DER encoding - of a SET OF with items 4,5, and 6. - - For decoding: - - >>> s = unhexlify(b'3109020104020105020106') - >>> try: - >>> so_der = DerSetOf() - >>> so_der.decode(s) - >>> print [x for x in so_der] - >>> except ValueError: - >>> print "Not a valid DER SET OF" - - the output will be ``[4, 5, 6]``. - """ - - def __init__(self, startSet=None, implicit=None): - """Initialize the DER object as a SET OF. - - :Parameters: - startSet : container - The initial set of integers or DER encoded objects. - implicit : integer - The IMPLICIT tag to use for the encoded object. - It overrides the universal tag for SET OF (17). - """ - DerObject.__init__(self, 0x11, b'', implicit, True) - self._seq = [] - - # All elements must be of the same type (and therefore have the - # same leading octet) - self._elemOctet = None - - if startSet: - for e in startSet: - self.add(e) - - def __getitem__(self, n): - return self._seq[n] - - def __iter__(self): - return iter(self._seq) - - def __len__(self): - return len(self._seq) - - def add(self, elem): - """Add an element to the set. - - Args: - elem (byte string or integer): - An element of the same type of objects already in the set. - It can be an integer or a DER encoded object. - """ - - if _is_number(elem): - eo = 0x02 - elif isinstance(elem, DerObject): - eo = self._tag_octet - else: - eo = bord(elem[0]) - - if self._elemOctet != eo: - if self._elemOctet is not None: - raise ValueError("New element does not belong to the set") - self._elemOctet = eo - - if elem not in self._seq: - self._seq.append(elem) - - def decode(self, der_encoded, strict=False): - """Decode a complete SET OF DER element, and re-initializes this - object with it. - - DER INTEGERs are decoded into Python integers. Any other DER - element is left undecoded; its validity is not checked. - - Args: - der_encoded (byte string): a complete DER BIT SET OF. - strict (boolean): - Whether decoding must check for strict DER compliancy. - - Raises: - ValueError: in case of parsing errors. - """ - - return DerObject.decode(self, der_encoded, strict) - - def _decodeFromStream(self, s, strict): - """Decode a complete DER SET OF from a file.""" - - self._seq = [] - - # Fill up self.payload - DerObject._decodeFromStream(self, s, strict) - - # Add one item at a time to self.seq, by scanning self.payload - p = BytesIO_EOF(self.payload) - setIdOctet = -1 - while p.remaining_data() > 0: - p.set_bookmark() - - der = DerObject() - der._decodeFromStream(p, strict) - - # Verify that all members are of the same type - if setIdOctet < 0: - setIdOctet = der._tag_octet - else: - if setIdOctet != der._tag_octet: - raise ValueError("Not all elements are of the same DER type") - - # Parse INTEGERs differently - if setIdOctet != 0x02: - self._seq.append(p.data_since_bookmark()) - else: - derInt = DerInteger() - derInt.decode(p.data_since_bookmark(), strict) - self._seq.append(derInt.value) - # end - - def encode(self): - """Return this SET OF DER element, fully encoded as a - binary string. - """ - - # Elements in the set must be ordered in lexicographic order - ordered = [] - for item in self._seq: - if _is_number(item): - bys = DerInteger(item).encode() - elif isinstance(item, DerObject): - bys = item.encode() - else: - bys = item - ordered.append(bys) - ordered.sort() - self.payload = b''.join(ordered) - return DerObject.encode(self) diff --git a/venv/Lib/site-packages/Cryptodome/Util/asn1.pyi b/venv/Lib/site-packages/Cryptodome/Util/asn1.pyi deleted file mode 100644 index ee4891c..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/asn1.pyi +++ /dev/null @@ -1,80 +0,0 @@ -from typing import Optional, Sequence, Union, Set, Iterable - -__all__ = ['DerObject', 'DerInteger', 'DerOctetString', 'DerNull', - 'DerSequence', 'DerObjectId', 'DerBitString', 'DerSetOf'] - -# TODO: Make the encoded DerObjects their own type, so that DerSequence and -# DerSetOf can check their contents better - -class BytesIO_EOF: - def __init__(self, initial_bytes: bytes) -> None: ... - def set_bookmark(self) -> None: ... - def data_since_bookmark(self) -> bytes: ... - def remaining_data(self) -> int: ... - def read(self, length: int) -> bytes: ... - def read_byte(self) -> bytes: ... - -class DerObject: - payload: bytes - def __init__(self, asn1Id: Optional[int]=None, payload: Optional[bytes]=..., implicit: Optional[int]=None, - constructed: Optional[bool]=False, explicit: Optional[int]=None) -> None: ... - def encode(self) -> bytes: ... - def decode(self, der_encoded: bytes, strict: bool=...) -> DerObject: ... - -class DerInteger(DerObject): - value: int - def __init__(self, value: Optional[int]= 0, implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... - def encode(self) -> bytes: ... - def decode(self, der_encoded: bytes, strict: bool=...) -> DerInteger: ... - -class DerBoolean(DerObject): - value: bool - def __init__(self, value: bool=..., implicit: Optional[Union[int, bytes]]=..., explicit: Optional[Union[int, bytes]]=...) -> None: ... - def encode(self) -> bytes: ... - def decode(self, der_encoded: bytes, strict: bool=...) -> DerBoolean: ... - -class DerSequence(DerObject): - def __init__(self, startSeq: Optional[Sequence[Union[int, DerInteger, DerObject]]]=None, implicit: Optional[int]=None) -> None: ... - def __delitem__(self, n: int) -> None: ... - def __getitem__(self, n: int) -> None: ... - def __setitem__(self, key: int, value: DerObject) -> None: ... - def __setslice__(self, i: int, j: int, sequence: Sequence) -> None: ... - def __delslice__(self, i: int, j: int) -> None: ... - def __getslice__(self, i: int, j: int) -> DerSequence: ... - def __len__(self) -> int: ... - def __iadd__(self, item: DerObject) -> DerSequence: ... - def append(self, item: DerObject) -> DerSequence: ... - def hasInts(self, only_non_negative: Optional[bool]=True) -> int: ... - def hasOnlyInts(self, only_non_negative: Optional[bool]=True) -> bool: ... - def encode(self) -> bytes: ... - def decode(self, der_encoded: bytes, strict: bool=..., nr_elements: Optional[int]=None, only_ints_expected: Optional[bool]=False) -> DerSequence: ... - -class DerOctetString(DerObject): - payload: bytes - def __init__(self, value: Optional[bytes]=..., implicit: Optional[int]=None) -> None: ... - -class DerNull(DerObject): - def __init__(self) -> None: ... - -class DerObjectId(DerObject): - value: str - def __init__(self, value: Optional[str]=..., implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... - def encode(self) -> bytes: ... - def decode(self, der_encoded: bytes, strict: bool=...) -> DerObjectId: ... - -class DerBitString(DerObject): - value: bytes - def __init__(self, value: Optional[bytes]=..., implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... - def encode(self) -> bytes: ... - def decode(self, der_encoded: bytes, strict: bool=...) -> DerBitString: ... - -DerSetElement = Union[bytes, int] - -class DerSetOf(DerObject): - def __init__(self, startSet: Optional[Set[DerSetElement]]=None, implicit: Optional[int]=None) -> None: ... - def __getitem__(self, n: int) -> DerSetElement: ... - def __iter__(self) -> Iterable: ... - def __len__(self) -> int: ... - def add(self, elem: DerSetElement) -> None: ... - def decode(self, der_encoded: bytes, strict: bool=...) -> DerObject: ... - def encode(self) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Util/number.py b/venv/Lib/site-packages/Cryptodome/Util/number.py deleted file mode 100644 index 701c21c..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/number.py +++ /dev/null @@ -1,1525 +0,0 @@ -# -# number.py : Number-theoretic functions -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew M. Kuchling, Barry A. Warsaw, and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -# - -import math -import sys -import struct -from Cryptodome import Random -from Cryptodome.Util.py3compat import iter_range - -# Backward compatibility -_fastmath = None - - -def ceil_div(n, d): - """Return ceil(n/d), that is, the smallest integer r such that r*d >= n""" - - if d == 0: - raise ZeroDivisionError() - if (n < 0) or (d < 0): - raise ValueError("Non positive values") - r, q = divmod(n, d) - if (n != 0) and (q != 0): - r += 1 - return r - - -def size (N): - """Returns the size of the number N in bits.""" - - if N < 0: - raise ValueError("Size in bits only available for non-negative numbers") - return N.bit_length() - - -def getRandomInteger(N, randfunc=None): - """Return a random number at most N bits long. - - If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. - - .. deprecated:: 3.0 - This function is for internal use only and may be renamed or removed in - the future. Use :func:`Cryptodome.Random.random.getrandbits` instead. - """ - - if randfunc is None: - randfunc = Random.get_random_bytes - - S = randfunc(N>>3) - odd_bits = N % 8 - if odd_bits != 0: - rand_bits = ord(randfunc(1)) >> (8-odd_bits) - S = struct.pack('B', rand_bits) + S - value = bytes_to_long(S) - return value - -def getRandomRange(a, b, randfunc=None): - """Return a random number *n* so that *a <= n < b*. - - If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. - - .. deprecated:: 3.0 - This function is for internal use only and may be renamed or removed in - the future. Use :func:`Cryptodome.Random.random.randrange` instead. - """ - - range_ = b - a - 1 - bits = size(range_) - value = getRandomInteger(bits, randfunc) - while value > range_: - value = getRandomInteger(bits, randfunc) - return a + value - -def getRandomNBitInteger(N, randfunc=None): - """Return a random number with exactly N-bits, - i.e. a random number between 2**(N-1) and (2**N)-1. - - If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. - - .. deprecated:: 3.0 - This function is for internal use only and may be renamed or removed in - the future. - """ - - value = getRandomInteger (N-1, randfunc) - value |= 2 ** (N-1) # Ensure high bit is set - assert size(value) >= N - return value - - -if sys.version_info[:2] >= (3, 5): - - GCD = math.gcd - -else: - - def GCD(x,y): - """Greatest Common Denominator of :data:`x` and :data:`y`. - """ - - x = abs(x) ; y = abs(y) - while x > 0: - x, y = y % x, x - return y - - -if sys.version_info[:2] >= (3, 8): - - def inverse(u, v): - """The inverse of :data:`u` *mod* :data:`v`.""" - - if v == 0: - raise ZeroDivisionError("Modulus cannot be zero") - if v < 0: - raise ValueError("Modulus cannot be negative") - - return pow(u, -1, v) - -else: - - def inverse(u, v): - """The inverse of :data:`u` *mod* :data:`v`.""" - - if v == 0: - raise ZeroDivisionError("Modulus cannot be zero") - if v < 0: - raise ValueError("Modulus cannot be negative") - - u3, v3 = u, v - u1, v1 = 1, 0 - while v3 > 0: - q = u3 // v3 - u1, v1 = v1, u1 - v1*q - u3, v3 = v3, u3 - v3*q - if u3 != 1: - raise ValueError("No inverse value can be computed") - while u1<0: - u1 = u1 + v - return u1 - -# Given a number of bits to generate and a random generation function, -# find a prime number of the appropriate size. - -def getPrime(N, randfunc=None): - """Return a random N-bit prime number. - - N must be an integer larger than 1. - If randfunc is omitted, then :meth:`Random.get_random_bytes` is used. - """ - if randfunc is None: - randfunc = Random.get_random_bytes - - if N < 2: - raise ValueError("N must be larger than 1") - - while True: - number = getRandomNBitInteger(N, randfunc) | 1 - if isPrime(number, randfunc=randfunc): - break - return number - - -def _rabinMillerTest(n, rounds, randfunc=None): - """_rabinMillerTest(n:long, rounds:int, randfunc:callable):int - Tests if n is prime. - Returns 0 when n is definitely composite. - Returns 1 when n is probably prime. - Returns 2 when n is definitely prime. - - If randfunc is omitted, then Random.new().read is used. - - This function is for internal use only and may be renamed or removed in - the future. - """ - # check special cases (n==2, n even, n < 2) - if n < 3 or (n & 1) == 0: - return n == 2 - # n might be very large so it might be beneficial to precalculate n-1 - n_1 = n - 1 - # determine m and b so that 2**b * m = n - 1 and b maximal - b = 0 - m = n_1 - while (m & 1) == 0: - b += 1 - m >>= 1 - - tested = [] - # we need to do at most n-2 rounds. - for i in iter_range (min (rounds, n-2)): - # randomly choose a < n and make sure it hasn't been tested yet - a = getRandomRange (2, n, randfunc) - while a in tested: - a = getRandomRange (2, n, randfunc) - tested.append (a) - # do the rabin-miller test - z = pow (a, m, n) # (a**m) % n - if z == 1 or z == n_1: - continue - composite = 1 - for r in iter_range(b): - z = (z * z) % n - if z == 1: - return 0 - elif z == n_1: - composite = 0 - break - if composite: - return 0 - return 1 - -def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None): - r""" - Return a random strong *N*-bit prime number. - In this context, *p* is a strong prime if *p-1* and *p+1* have at - least one large prime factor. - - Args: - N (integer): the exact length of the strong prime. - It must be a multiple of 128 and > 512. - e (integer): if provided, the returned prime (minus 1) - will be coprime to *e* and thus suitable for RSA where - *e* is the public exponent. - false_positive_prob (float): - The statistical probability for the result not to be actually a - prime. It defaults to 10\ :sup:`-6`. - Note that the real probability of a false-positive is far less. This is - just the mathematically provable limit. - randfunc (callable): - A function that takes a parameter *N* and that returns - a random byte string of such length. - If omitted, :func:`Cryptodome.Random.get_random_bytes` is used. - Return: - The new strong prime. - - .. deprecated:: 3.0 - This function is for internal use only and may be renamed or removed in - the future. - """ - - # This function was implemented following the - # instructions found in the paper: - # "FAST GENERATION OF RANDOM, STRONG RSA PRIMES" - # by Robert D. Silverman - # RSA Laboratories - # May 17, 1997 - # which by the time of writing could be freely downloaded here: - # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf - - if randfunc is None: - randfunc = Random.get_random_bytes - - # Use the accelerator if available - if _fastmath is not None: - return _fastmath.getStrongPrime(long(N), long(e), false_positive_prob, - randfunc) - - if (N < 512) or ((N % 128) != 0): - raise ValueError ("bits must be multiple of 128 and > 512") - - rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) - - # calculate range for X - # lower_bound = sqrt(2) * 2^{511 + 128*x} - # upper_bound = 2^{512 + 128*x} - 1 - x = (N - 512) >> 7 - # We need to approximate the sqrt(2) in the lower_bound by an integer - # expression because floating point math overflows with these numbers - lower_bound = (14142135623730950489 * (2 ** (511 + 128*x))) // 10000000000000000000 - upper_bound = (1 << (512 + 128*x)) - 1 - # Randomly choose X in calculated range - X = getRandomRange (lower_bound, upper_bound, randfunc) - - # generate p1 and p2 - p = [0, 0] - for i in (0, 1): - # randomly choose 101-bit y - y = getRandomNBitInteger (101, randfunc) - # initialize the field for sieving - field = [0] * 5 * len (sieve_base) - # sieve the field - for prime in sieve_base: - offset = y % prime - for j in iter_range((prime - offset) % prime, len (field), prime): - field[j] = 1 - - # look for suitable p[i] starting at y - result = 0 - for j in range(len(field)): - composite = field[j] - # look for next canidate - if composite: - continue - tmp = y + j - result = _rabinMillerTest (tmp, rabin_miller_rounds) - if result > 0: - p[i] = tmp - break - if result == 0: - raise RuntimeError ("Couln't find prime in field. " - "Developer: Increase field_size") - - # Calculate R - # R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1 - tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2 - tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1 - R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1 - - # search for final prime number starting by Y0 - # Y0 = X + (R - X mod p1p2) - increment = p[0] * p[1] - X = X + (R - (X % increment)) - while 1: - is_possible_prime = 1 - # first check candidate against sieve_base - for prime in sieve_base: - if (X % prime) == 0: - is_possible_prime = 0 - break - # if e is given make sure that e and X-1 are coprime - # this is not necessarily a strong prime criterion but useful when - # creating them for RSA where the p-1 and q-1 should be coprime to - # the public exponent e - if e and is_possible_prime: - if e & 1: - if GCD(e, X-1) != 1: - is_possible_prime = 0 - else: - if GCD(e, (X-1) // 2) != 1: - is_possible_prime = 0 - - # do some Rabin-Miller-Tests - if is_possible_prime: - result = _rabinMillerTest (X, rabin_miller_rounds) - if result > 0: - break - X += increment - # abort when X has more bits than requested - # TODO: maybe we shouldn't abort but rather start over. - if X >= 1 << N: - raise RuntimeError ("Couln't find prime in field. " - "Developer: Increase field_size") - return X - -def isPrime(N, false_positive_prob=1e-6, randfunc=None): - r"""Test if a number *N* is a prime. - - Args: - false_positive_prob (float): - The statistical probability for the result not to be actually a - prime. It defaults to 10\ :sup:`-6`. - Note that the real probability of a false-positive is far less. - This is just the mathematically provable limit. - randfunc (callable): - A function that takes a parameter *N* and that returns - a random byte string of such length. - If omitted, :func:`Cryptodome.Random.get_random_bytes` is used. - - Return: - `True` if the input is indeed prime. - """ - - if randfunc is None: - randfunc = Random.get_random_bytes - - if _fastmath is not None: - return _fastmath.isPrime(long(N), false_positive_prob, randfunc) - - if N < 3 or N & 1 == 0: - return N == 2 - for p in sieve_base: - if N == p: - return True - if N % p == 0: - return False - - rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) - return bool(_rabinMillerTest(N, rounds, randfunc)) - - -# Improved conversion functions contributed by Barry Warsaw, after -# careful benchmarking - -import struct - -def long_to_bytes(n, blocksize=0): - """Convert a positive integer to a byte string using big endian encoding. - - If :data:`blocksize` is absent or zero, the byte string will - be of minimal length. - - Otherwise, the length of the byte string is guaranteed to be a multiple - of :data:`blocksize`. If necessary, zeroes (``\\x00``) are added at the left. - - .. note:: - In Python 3, if you are sure that :data:`n` can fit into - :data:`blocksize` bytes, you can simply use the native method instead:: - - >>> n.to_bytes(blocksize, 'big') - - For instance:: - - >>> n = 80 - >>> n.to_bytes(2, 'big') - b'\\x00P' - - However, and unlike this ``long_to_bytes()`` function, - an ``OverflowError`` exception is raised if :data:`n` does not fit. - """ - - if n < 0 or blocksize < 0: - raise ValueError("Values must be non-negative") - - result = [] - pack = struct.pack - - # Fill the first block independently from the value of n - bsr = blocksize - while bsr >= 8: - result.insert(0, pack('>Q', n & 0xFFFFFFFFFFFFFFFF)) - n = n >> 64 - bsr -= 8 - - while bsr >= 4: - result.insert(0, pack('>I', n & 0xFFFFFFFF)) - n = n >> 32 - bsr -= 4 - - while bsr > 0: - result.insert(0, pack('>B', n & 0xFF)) - n = n >> 8 - bsr -= 1 - - if n == 0: - if len(result) == 0: - bresult = b'\x00' - else: - bresult = b''.join(result) - else: - # The encoded number exceeds the block size - while n > 0: - result.insert(0, pack('>Q', n & 0xFFFFFFFFFFFFFFFF)) - n = n >> 64 - result[0] = result[0].lstrip(b'\x00') - bresult = b''.join(result) - # bresult has minimum length here - if blocksize > 0: - target_len = ((len(bresult) - 1) // blocksize + 1) * blocksize - bresult = b'\x00' * (target_len - len(bresult)) + bresult - - return bresult - - -def bytes_to_long(s): - """Convert a byte string to a long integer (big endian). - - In Python 3.2+, use the native method instead:: - - >>> int.from_bytes(s, 'big') - - For instance:: - - >>> int.from_bytes(b'\x00P', 'big') - 80 - - This is (essentially) the inverse of :func:`long_to_bytes`. - """ - acc = 0 - - unpack = struct.unpack - - # Up to Python 2.7.4, struct.unpack can't work with bytearrays nor - # memoryviews - if sys.version_info[0:3] < (2, 7, 4): - if isinstance(s, bytearray): - s = bytes(s) - elif isinstance(s, memoryview): - s = s.tobytes() - - length = len(s) - if length % 4: - extra = (4 - length % 4) - s = b'\x00' * extra + s - length = length + extra - for i in range(0, length, 4): - acc = (acc << 32) + unpack('>I', s[i:i+4])[0] - return acc - - -# For backwards compatibility... -import warnings -def long2str(n, blocksize=0): - warnings.warn("long2str() has been replaced by long_to_bytes()") - return long_to_bytes(n, blocksize) -def str2long(s): - warnings.warn("str2long() has been replaced by bytes_to_long()") - return bytes_to_long(s) - - -# The first 10000 primes used for checking primality. -# This should be enough to eliminate most of the odd -# numbers before needing to do a Rabin-Miller test at all. -sieve_base = ( - 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, - 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, - 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, - 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, - 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, - 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, - 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, - 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, - 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, - 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, - 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, - 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, - 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, - 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, - 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, - 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, - 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, - 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, - 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, - 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, - 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, - 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, - 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, - 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, - 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, - 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, - 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, - 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, - 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, - 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, - 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, - 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, - 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, - 2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, - 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, - 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, - 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, - 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, - 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, - 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, - 2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, - 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, - 2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, - 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, - 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, - 3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, - 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, - 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, - 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, - 3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, - 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, - 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, - 3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, - 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, - 3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989, - 4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057, - 4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, - 4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, - 4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297, - 4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409, - 4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493, - 4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, - 4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, - 4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751, - 4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, - 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, - 4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, - 5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087, - 5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179, - 5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, - 5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, - 5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443, - 5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521, - 5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639, - 5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, - 5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, - 5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857, - 5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939, - 5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, - 6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, - 6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221, - 6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301, - 6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, - 6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, - 6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571, - 6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673, - 6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761, - 6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, - 6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, - 6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997, - 7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103, - 7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, - 7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, - 7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411, - 7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499, - 7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, - 7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, - 7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723, - 7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829, - 7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919, - 7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017, - 8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111, - 8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219, - 8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291, - 8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387, - 8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501, - 8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597, - 8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677, - 8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741, - 8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831, - 8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929, - 8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011, - 9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109, - 9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199, - 9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283, - 9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377, - 9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439, - 9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533, - 9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631, - 9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733, - 9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811, - 9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887, - 9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007, - 10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099, - 10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177, - 10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271, - 10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343, - 10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459, - 10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567, - 10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657, - 10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739, - 10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859, - 10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949, - 10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059, - 11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149, - 11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251, - 11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329, - 11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443, - 11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527, - 11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657, - 11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777, - 11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833, - 11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933, - 11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011, - 12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109, - 12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211, - 12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289, - 12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401, - 12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487, - 12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553, - 12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641, - 12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739, - 12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829, - 12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923, - 12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007, - 13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109, - 13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187, - 13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309, - 13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411, - 13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499, - 13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619, - 13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697, - 13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781, - 13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879, - 13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967, - 13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081, - 14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197, - 14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323, - 14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419, - 14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519, - 14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593, - 14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699, - 14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767, - 14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851, - 14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947, - 14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073, - 15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149, - 15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259, - 15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319, - 15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401, - 15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497, - 15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607, - 15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679, - 15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773, - 15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881, - 15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971, - 15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069, - 16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183, - 16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267, - 16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381, - 16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481, - 16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603, - 16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691, - 16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811, - 16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903, - 16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993, - 17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093, - 17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191, - 17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317, - 17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389, - 17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477, - 17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573, - 17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669, - 17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783, - 17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891, - 17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971, - 17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059, - 18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143, - 18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233, - 18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313, - 18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427, - 18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517, - 18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637, - 18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749, - 18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899, - 18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009, - 19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121, - 19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219, - 19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319, - 19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423, - 19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477, - 19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571, - 19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699, - 19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793, - 19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891, - 19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991, - 19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071, - 20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149, - 20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261, - 20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357, - 20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443, - 20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551, - 20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693, - 20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771, - 20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897, - 20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983, - 21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067, - 21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169, - 21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277, - 21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383, - 21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491, - 21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563, - 21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647, - 21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751, - 21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841, - 21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943, - 21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039, - 22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123, - 22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229, - 22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307, - 22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441, - 22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543, - 22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643, - 22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727, - 22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817, - 22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943, - 22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029, - 23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099, - 23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203, - 23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321, - 23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447, - 23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561, - 23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629, - 23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743, - 23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827, - 23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909, - 23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007, - 24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091, - 24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169, - 24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281, - 24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413, - 24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517, - 24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659, - 24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767, - 24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877, - 24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977, - 24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097, - 25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183, - 25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303, - 25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391, - 25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471, - 25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603, - 25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693, - 25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799, - 25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913, - 25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999, - 26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111, - 26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203, - 26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297, - 26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399, - 26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497, - 26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633, - 26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711, - 26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801, - 26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891, - 26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987, - 26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077, - 27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211, - 27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329, - 27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449, - 27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551, - 27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691, - 27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767, - 27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827, - 27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947, - 27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051, - 28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151, - 28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283, - 28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403, - 28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499, - 28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579, - 28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649, - 28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729, - 28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837, - 28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933, - 28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059, - 29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167, - 29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251, - 29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363, - 29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443, - 29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573, - 29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671, - 29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819, - 29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921, - 29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059, - 30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137, - 30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241, - 30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341, - 30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469, - 30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559, - 30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689, - 30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803, - 30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871, - 30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983, - 31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091, - 31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183, - 31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259, - 31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357, - 31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511, - 31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601, - 31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721, - 31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817, - 31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973, - 31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063, - 32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159, - 32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257, - 32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353, - 32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423, - 32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531, - 32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609, - 32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717, - 32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831, - 32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939, - 32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023, - 33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113, - 33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211, - 33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343, - 33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427, - 33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533, - 33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613, - 33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713, - 33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797, - 33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893, - 33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031, - 34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157, - 34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261, - 34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337, - 34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457, - 34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537, - 34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649, - 34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739, - 34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847, - 34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961, - 34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083, - 35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159, - 35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291, - 35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401, - 35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509, - 35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593, - 35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759, - 35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863, - 35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969, - 35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061, - 36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161, - 36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277, - 36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383, - 36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497, - 36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587, - 36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691, - 36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781, - 36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877, - 36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947, - 36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057, - 37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189, - 37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309, - 37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397, - 37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507, - 37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573, - 37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663, - 37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813, - 37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951, - 37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047, - 38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183, - 38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281, - 38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371, - 38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543, - 38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639, - 38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713, - 38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821, - 38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921, - 38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041, - 39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133, - 39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227, - 39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323, - 39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419, - 39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541, - 39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667, - 39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769, - 39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857, - 39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971, - 39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087, - 40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169, - 40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283, - 40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433, - 40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531, - 40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639, - 40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787, - 40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867, - 40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973, - 40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081, - 41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183, - 41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257, - 41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387, - 41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507, - 41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603, - 41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669, - 41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801, - 41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897, - 41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981, - 41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073, - 42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187, - 42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283, - 42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379, - 42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457, - 42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557, - 42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677, - 42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743, - 42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841, - 42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953, - 42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051, - 43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189, - 43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319, - 43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451, - 43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579, - 43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661, - 43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783, - 43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933, - 43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017, - 44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101, - 44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201, - 44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279, - 44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449, - 44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537, - 44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641, - 44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741, - 44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843, - 44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953, - 44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077, - 45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181, - 45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307, - 45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403, - 45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533, - 45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641, - 45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757, - 45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853, - 45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979, - 45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099, - 46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199, - 46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309, - 46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447, - 46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549, - 46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643, - 46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747, - 46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831, - 46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993, - 46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119, - 47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221, - 47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317, - 47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419, - 47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527, - 47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629, - 47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717, - 47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819, - 47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939, - 47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049, - 48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179, - 48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299, - 48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409, - 48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497, - 48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611, - 48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733, - 48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817, - 48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907, - 48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033, - 49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123, - 49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211, - 49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339, - 49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433, - 49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537, - 49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663, - 49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757, - 49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853, - 49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957, - 49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069, - 50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147, - 50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273, - 50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377, - 50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503, - 50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593, - 50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753, - 50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867, - 50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971, - 50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109, - 51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203, - 51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329, - 51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421, - 51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487, - 51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593, - 51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683, - 51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803, - 51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899, - 51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009, - 52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127, - 52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237, - 52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361, - 52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501, - 52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579, - 52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709, - 52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813, - 52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919, - 52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017, - 53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117, - 53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231, - 53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327, - 53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441, - 53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593, - 53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657, - 53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783, - 53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891, - 53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993, - 54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121, - 54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269, - 54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367, - 54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443, - 54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541, - 54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629, - 54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751, - 54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877, - 54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983, - 55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103, - 55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217, - 55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337, - 55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457, - 55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603, - 55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673, - 55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793, - 55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849, - 55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949, - 55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087, - 56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179, - 56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299, - 56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431, - 56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503, - 56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599, - 56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711, - 56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809, - 56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909, - 56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989, - 56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097, - 57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191, - 57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283, - 57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389, - 57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529, - 57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653, - 57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737, - 57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839, - 57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947, - 57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061, - 58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169, - 58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237, - 58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379, - 58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453, - 58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601, - 58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711, - 58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889, - 58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967, - 58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053, - 59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141, - 59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233, - 59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359, - 59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443, - 59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557, - 59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659, - 59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747, - 59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887, - 59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029, - 60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127, - 60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251, - 60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353, - 60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497, - 60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623, - 60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719, - 60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811, - 60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919, - 60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043, - 61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169, - 61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333, - 61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441, - 61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543, - 61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631, - 61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717, - 61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861, - 61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981, - 61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071, - 62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189, - 62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303, - 62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459, - 62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549, - 62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653, - 62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773, - 62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903, - 62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989, - 63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127, - 63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281, - 63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367, - 63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463, - 63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559, - 63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647, - 63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719, - 63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809, - 63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929, - 63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067, - 64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189, - 64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319, - 64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453, - 64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601, - 64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693, - 64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849, - 64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937, - 64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063, - 65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147, - 65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267, - 65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381, - 65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497, - 65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581, - 65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677, - 65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761, - 65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867, - 65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983, - 65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103, - 66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239, - 66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377, - 66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499, - 66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593, - 66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721, - 66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841, - 66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943, - 66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049, - 67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153, - 67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231, - 67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369, - 67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453, - 67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547, - 67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651, - 67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763, - 67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867, - 67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961, - 67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087, - 68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213, - 68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371, - 68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489, - 68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597, - 68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713, - 68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819, - 68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917, - 68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061, - 69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191, - 69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263, - 69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403, - 69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493, - 69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691, - 69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821, - 69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929, - 69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039, - 70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139, - 70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223, - 70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321, - 70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451, - 70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549, - 70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657, - 70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793, - 70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901, - 70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981, - 70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089, - 71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209, - 71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327, - 71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389, - 71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473, - 71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593, - 71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713, - 71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843, - 71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933, - 71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031, - 72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109, - 72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229, - 72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341, - 72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481, - 72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617, - 72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701, - 72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823, - 72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923, - 72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013, - 73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127, - 73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303, - 73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417, - 73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529, - 73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613, - 73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721, - 73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859, - 73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973, - 73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099, - 74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197, - 74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297, - 74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411, - 74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521, - 74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611, - 74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731, - 74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843, - 74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929, - 74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079, - 75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209, - 75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307, - 75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401, - 75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533, - 75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619, - 75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709, - 75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821, - 75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979, - 75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081, - 76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207, - 76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303, - 76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441, - 76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541, - 76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667, - 76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781, - 76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907, - 76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023, - 77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153, - 77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261, - 77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351, - 77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477, - 77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551, - 77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641, - 77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723, - 77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839, - 77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977, - 77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101, - 78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193, - 78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311, - 78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479, - 78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571, - 78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697, - 78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803, - 78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901, - 78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063, - 79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181, - 79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283, - 79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393, - 79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531, - 79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621, - 79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699, - 79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841, - 79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939, - 79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051, - 80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173, - 80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263, - 80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369, - 80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513, - 80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629, - 80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713, - 80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809, - 80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923, - 80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023, - 81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101, - 81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223, - 81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349, - 81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463, - 81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569, - 81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689, - 81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799, - 81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929, - 81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009, - 82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139, - 82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219, - 82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339, - 82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469, - 82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559, - 82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651, - 82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781, - 82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891, - 82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047, - 83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177, - 83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267, - 83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399, - 83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471, - 83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609, - 83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719, - 83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869, - 83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987, - 84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127, - 84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221, - 84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319, - 84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437, - 84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521, - 84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659, - 84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761, - 84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913, - 84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027, - 85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121, - 85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237, - 85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363, - 85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469, - 85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601, - 85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691, - 85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829, - 85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933, - 85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111, - 86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197, - 86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291, - 86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371, - 86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477, - 86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587, - 86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743, - 86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861, - 86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993, - 87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119, - 87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223, - 87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323, - 87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473, - 87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553, - 87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641, - 87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721, - 87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853, - 87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961, - 87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079, - 88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259, - 88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411, - 88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547, - 88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667, - 88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801, - 88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873, - 88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003, - 89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087, - 89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209, - 89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317, - 89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431, - 89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527, - 89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627, - 89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759, - 89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849, - 89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963, - 89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031, - 90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149, - 90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239, - 90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373, - 90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481, - 90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617, - 90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709, - 90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847, - 90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977, - 90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121, - 91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193, - 91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303, - 91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411, - 91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529, - 91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673, - 91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807, - 91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939, - 91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033, - 92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173, - 92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243, - 92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363, - 92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431, - 92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567, - 92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669, - 92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753, - 92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849, - 92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951, - 92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083, - 93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179, - 93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281, - 93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383, - 93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497, - 93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607, - 93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787, - 93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911, - 93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997, - 94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111, - 94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253, - 94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349, - 94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447, - 94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559, - 94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687, - 94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793, - 94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903, - 94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021, - 95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111, - 95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231, - 95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317, - 95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441, - 95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539, - 95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633, - 95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773, - 95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873, - 95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971, - 95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097, - 96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223, - 96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331, - 96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461, - 96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581, - 96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731, - 96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799, - 96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931, - 96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021, - 97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169, - 97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303, - 97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429, - 97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549, - 97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649, - 97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813, - 97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883, - 97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011, - 98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179, - 98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317, - 98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411, - 98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507, - 98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639, - 98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737, - 98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873, - 98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947, - 98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053, - 99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139, - 99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259, - 99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397, - 99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529, - 99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643, - 99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733, - 99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839, - 99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971, - 99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109, -100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237, -100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361, -100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469, -100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549, -100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699, -100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823, -100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957, -100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089, -101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173, -101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281, -101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383, -101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501, -101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599, -101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719, -101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833, -101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929, -101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023, -102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107, -102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217, -102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317, -102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451, -102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551, -102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667, -102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811, -102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931, -102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079, -103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217, -103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387, -103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483, -103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591, -103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703, -103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867, -103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981, -103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059, -104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173, -104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297, -104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399, -104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543, -104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659, -104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729, -) diff --git a/venv/Lib/site-packages/Cryptodome/Util/number.pyi b/venv/Lib/site-packages/Cryptodome/Util/number.pyi deleted file mode 100644 index f8680bf..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/number.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import List, Optional, Callable - - -def ceil_div(n: int, d: int) -> int: ... -def size (N: int) -> int: ... -def getRandomInteger(N: int, randfunc: Optional[Callable]=None) -> int: ... -def getRandomRange(a: int, b: int, randfunc: Optional[Callable]=None) -> int: ... -def getRandomNBitInteger(N: int, randfunc: Optional[Callable]=None) -> int: ... -def GCD(x: int,y: int) -> int: ... -def inverse(u: int, v: int) -> int: ... -def getPrime(N: int, randfunc: Optional[Callable]=None) -> int: ... -def getStrongPrime(N: int, e: Optional[int]=0, false_positive_prob: Optional[float]=1e-6, randfunc: Optional[Callable]=None) -> int: ... -def isPrime(N: int, false_positive_prob: Optional[float]=1e-6, randfunc: Optional[Callable]=None) -> bool: ... -def long_to_bytes(n: int, blocksize: Optional[int]=0) -> bytes: ... -def bytes_to_long(s: bytes) -> int: ... -def long2str(n: int, blocksize: Optional[int]=0) -> bytes: ... -def str2long(s: bytes) -> int: ... - -sieve_base: List[int] diff --git a/venv/Lib/site-packages/Cryptodome/Util/py3compat.py b/venv/Lib/site-packages/Cryptodome/Util/py3compat.py deleted file mode 100644 index 3294b66..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/py3compat.py +++ /dev/null @@ -1,185 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Util/py3compat.py : Compatibility code for handling Py3k / Python 2.x -# -# Written in 2010 by Thorsten Behrens -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Compatibility code for handling string/bytes changes from Python 2.x to Py3k - -In Python 2.x, strings (of type ''str'') contain binary data, including encoded -Unicode text (e.g. UTF-8). The separate type ''unicode'' holds Unicode text. -Unicode literals are specified via the u'...' prefix. Indexing or slicing -either type always produces a string of the same type as the original. -Data read from a file is always of '''str'' type. - -In Python 3.x, strings (type ''str'') may only contain Unicode text. The u'...' -prefix and the ''unicode'' type are now redundant. A new type (called -''bytes'') has to be used for binary data (including any particular -''encoding'' of a string). The b'...' prefix allows one to specify a binary -literal. Indexing or slicing a string produces another string. Slicing a byte -string produces another byte string, but the indexing operation produces an -integer. Data read from a file is of '''str'' type if the file was opened in -text mode, or of ''bytes'' type otherwise. - -Since PyCryptodome aims at supporting both Python 2.x and 3.x, the following helper -functions are used to keep the rest of the library as independent as possible -from the actual Python version. - -In general, the code should always deal with binary strings, and use integers -instead of 1-byte character strings. - -b(s) - Take a text string literal (with no prefix or with u'...' prefix) and - make a byte string. -bchr(c) - Take an integer and make a 1-character byte string. -bord(c) - Take the result of indexing on a byte string and make an integer. -tobytes(s) - Take a text string, a byte string, or a sequence of character taken from - a byte string and make a byte string. -""" - -import sys -import abc - - -if sys.version_info[0] == 2: - def b(s): - return s - def bchr(s): - return chr(s) - def bstr(s): - return str(s) - def bord(s): - return ord(s) - def tobytes(s, encoding="latin-1"): - if isinstance(s, unicode): - return s.encode(encoding) - elif isinstance(s, str): - return s - elif isinstance(s, bytearray): - return bytes(s) - elif isinstance(s, memoryview): - return s.tobytes() - else: - return ''.join(s) - def tostr(bs): - return bs - def byte_string(s): - return isinstance(s, str) - - # In Python 2, a memoryview does not support concatenation - def concat_buffers(a, b): - if isinstance(a, memoryview): - a = a.tobytes() - if isinstance(b, memoryview): - b = b.tobytes() - return a + b - - from StringIO import StringIO - BytesIO = StringIO - - from sys import maxint - - iter_range = xrange - - def is_native_int(x): - return isinstance(x, (int, long)) - - def is_string(x): - return isinstance(x, basestring) - - def is_bytes(x): - return isinstance(x, str) or \ - isinstance(x, bytearray) or \ - isinstance(x, memoryview) - - ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) - - FileNotFoundError = IOError - -else: - def b(s): - return s.encode("latin-1") # utf-8 would cause some side-effects we don't want - def bchr(s): - return bytes([s]) - def bstr(s): - if isinstance(s,str): - return bytes(s,"latin-1") - else: - return bytes(s) - def bord(s): - return s - def tobytes(s, encoding="latin-1"): - if isinstance(s, bytes): - return s - elif isinstance(s, bytearray): - return bytes(s) - elif isinstance(s,str): - return s.encode(encoding) - elif isinstance(s, memoryview): - return s.tobytes() - else: - return bytes([s]) - def tostr(bs): - return bs.decode("latin-1") - def byte_string(s): - return isinstance(s, bytes) - - def concat_buffers(a, b): - return a + b - - from io import BytesIO - from io import StringIO - from sys import maxsize as maxint - - iter_range = range - - def is_native_int(x): - return isinstance(x, int) - - def is_string(x): - return isinstance(x, str) - - def is_bytes(x): - return isinstance(x, bytes) or \ - isinstance(x, bytearray) or \ - isinstance(x, memoryview) - - from abc import ABC - - FileNotFoundError = FileNotFoundError - - -def _copy_bytes(start, end, seq): - """Return an immutable copy of a sequence (byte string, byte array, memoryview) - in a certain interval [start:seq]""" - - if isinstance(seq, memoryview): - return seq[start:end].tobytes() - elif isinstance(seq, bytearray): - return bytes(seq[start:end]) - else: - return seq[start:end] - -del sys -del abc diff --git a/venv/Lib/site-packages/Cryptodome/Util/py3compat.pyi b/venv/Lib/site-packages/Cryptodome/Util/py3compat.pyi deleted file mode 100644 index 74e04a2..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/py3compat.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Union, Any, Optional, IO - -Buffer = Union[bytes, bytearray, memoryview] - -import sys - -def b(s: str) -> bytes: ... -def bchr(s: int) -> bytes: ... -def bord(s: bytes) -> int: ... -def tobytes(s: Union[bytes, str]) -> bytes: ... -def tostr(b: bytes) -> str: ... -def bytestring(x: Any) -> bool: ... - -def is_native_int(s: Any) -> bool: ... -def is_string(x: Any) -> bool: ... -def is_bytes(x: Any) -> bool: ... - -def BytesIO(b: bytes) -> IO[bytes]: ... -def StringIO(s: str) -> IO[str]: ... - -if sys.version_info[0] == 2: - from sys import maxint - iter_range = xrange - -else: - from sys import maxsize as maxint - iter_range = range - -class FileNotFoundError: - def __init__(self, err: int, msg: str, filename: str) -> None: - pass - -def _copy_bytes(start: Optional[int], end: Optional[int], seq: Buffer) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/Util/strxor.py b/venv/Lib/site-packages/Cryptodome/Util/strxor.py deleted file mode 100644 index 6b16155..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/strxor.py +++ /dev/null @@ -1,146 +0,0 @@ -# =================================================================== -# -# Copyright (c) 2014, Legrandin -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# =================================================================== - -from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, c_size_t, - create_string_buffer, get_raw_buffer, - c_uint8_ptr, is_writeable_buffer) - -_raw_strxor = load_pycryptodome_raw_lib( - "Cryptodome.Util._strxor", - """ - void strxor(const uint8_t *in1, - const uint8_t *in2, - uint8_t *out, size_t len); - void strxor_c(const uint8_t *in, - uint8_t c, - uint8_t *out, - size_t len); - """) - - -def strxor(term1, term2, output=None): - """From two byte strings of equal length, - create a third one which is the byte-by-byte XOR of the two. - - Args: - term1 (bytes/bytearray/memoryview): - The first byte string to XOR. - term2 (bytes/bytearray/memoryview): - The second byte string to XOR. - output (bytearray/memoryview): - The location where the result will be written to. - It must have the same length as ``term1`` and ``term2``. - If ``None``, the result is returned. - :Return: - If ``output`` is ``None``, a new byte string with the result. - Otherwise ``None``. - - .. note:: - ``term1`` and ``term2`` must have the same length. - """ - - if len(term1) != len(term2): - raise ValueError("Only byte strings of equal length can be xored") - - if output is None: - result = create_string_buffer(len(term1)) - else: - # Note: output may overlap with either input - result = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(term1) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(term1)) - - _raw_strxor.strxor(c_uint8_ptr(term1), - c_uint8_ptr(term2), - c_uint8_ptr(result), - c_size_t(len(term1))) - - if output is None: - return get_raw_buffer(result) - else: - return None - - -def strxor_c(term, c, output=None): - """From a byte string, create a second one of equal length - where each byte is XOR-red with the same value. - - Args: - term(bytes/bytearray/memoryview): - The byte string to XOR. - c (int): - Every byte in the string will be XOR-ed with this value. - It must be between 0 and 255 (included). - output (None or bytearray/memoryview): - The location where the result will be written to. - It must have the same length as ``term``. - If ``None``, the result is returned. - - Return: - If ``output`` is ``None``, a new ``bytes`` string with the result. - Otherwise ``None``. - """ - - if not 0 <= c < 256: - raise ValueError("c must be in range(256)") - - if output is None: - result = create_string_buffer(len(term)) - else: - # Note: output may overlap with either input - result = output - - if not is_writeable_buffer(output): - raise TypeError("output must be a bytearray or a writeable memoryview") - - if len(term) != len(output): - raise ValueError("output must have the same length as the input" - " (%d bytes)" % len(term)) - - _raw_strxor.strxor_c(c_uint8_ptr(term), - c, - c_uint8_ptr(result), - c_size_t(len(term)) - ) - - if output is None: - return get_raw_buffer(result) - else: - return None - - -def _strxor_direct(term1, term2, result): - """Very fast XOR - check conditions!""" - _raw_strxor.strxor(term1, term2, result, c_size_t(len(term1))) diff --git a/venv/Lib/site-packages/Cryptodome/Util/strxor.pyi b/venv/Lib/site-packages/Cryptodome/Util/strxor.pyi deleted file mode 100644 index ca896f3..0000000 --- a/venv/Lib/site-packages/Cryptodome/Util/strxor.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Union, Optional - -Buffer = Union[bytes, bytearray, memoryview] - -def strxor(term1: bytes, term2: bytes, output: Optional[Buffer]=...) -> bytes: ... -def strxor_c(term: bytes, c: int, output: Optional[Buffer]=...) -> bytes: ... diff --git a/venv/Lib/site-packages/Cryptodome/__init__.py b/venv/Lib/site-packages/Cryptodome/__init__.py deleted file mode 100644 index 1fc2db9..0000000 --- a/venv/Lib/site-packages/Cryptodome/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -__all__ = ['Cipher', 'Hash', 'Protocol', 'PublicKey', 'Util', 'Signature', - 'IO', 'Math'] - -version_info = (3, 23, '0') - -__version__ = ".".join([str(x) for x in version_info]) diff --git a/venv/Lib/site-packages/Cryptodome/__init__.pyi b/venv/Lib/site-packages/Cryptodome/__init__.pyi deleted file mode 100644 index bc73446..0000000 --- a/venv/Lib/site-packages/Cryptodome/__init__.pyi +++ /dev/null @@ -1,4 +0,0 @@ -from typing import Tuple, Union - -version_info : Tuple[int, int, Union[int, str]] -__version__ : str diff --git a/venv/Lib/site-packages/Cryptodome/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Cryptodome/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 950424e..0000000 Binary files a/venv/Lib/site-packages/Cryptodome/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Cryptodome/py.typed b/venv/Lib/site-packages/Cryptodome/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/OpenSSL/SSL.py b/venv/Lib/site-packages/OpenSSL/SSL.py deleted file mode 100644 index 51c60d5..0000000 --- a/venv/Lib/site-packages/OpenSSL/SSL.py +++ /dev/null @@ -1,3239 +0,0 @@ -from __future__ import annotations - -import os -import socket -import typing -import warnings -from collections.abc import Sequence -from errno import errorcode -from functools import partial, wraps -from itertools import chain, count -from sys import platform -from typing import Any, Callable, Optional, TypeVar -from weakref import WeakValueDictionary - -from cryptography import x509 -from cryptography.hazmat.primitives.asymmetric import ec - -from OpenSSL._util import ( - StrOrBytesPath as _StrOrBytesPath, -) -from OpenSSL._util import ( - exception_from_error_queue as _exception_from_error_queue, -) -from OpenSSL._util import ( - ffi as _ffi, -) -from OpenSSL._util import ( - lib as _lib, -) -from OpenSSL._util import ( - make_assert as _make_assert, -) -from OpenSSL._util import ( - no_zero_allocator as _no_zero_allocator, -) -from OpenSSL._util import ( - path_bytes as _path_bytes, -) -from OpenSSL._util import ( - text_to_bytes_and_warn as _text_to_bytes_and_warn, -) -from OpenSSL.crypto import ( - FILETYPE_PEM, - X509, - PKey, - X509Name, - X509Store, - _EllipticCurve, - _PassphraseHelper, - _PrivateKey, -) - -__all__ = [ - "DTLS_CLIENT_METHOD", - "DTLS_METHOD", - "DTLS_SERVER_METHOD", - "MODE_RELEASE_BUFFERS", - "NO_OVERLAPPING_PROTOCOLS", - "OPENSSL_BUILT_ON", - "OPENSSL_CFLAGS", - "OPENSSL_DIR", - "OPENSSL_PLATFORM", - "OPENSSL_VERSION", - "OPENSSL_VERSION_NUMBER", - "OP_ALL", - "OP_CIPHER_SERVER_PREFERENCE", - "OP_COOKIE_EXCHANGE", - "OP_DONT_INSERT_EMPTY_FRAGMENTS", - "OP_EPHEMERAL_RSA", - "OP_MICROSOFT_BIG_SSLV3_BUFFER", - "OP_MICROSOFT_SESS_ID_BUG", - "OP_MSIE_SSLV2_RSA_PADDING", - "OP_NETSCAPE_CA_DN_BUG", - "OP_NETSCAPE_CHALLENGE_BUG", - "OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG", - "OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG", - "OP_NO_COMPRESSION", - "OP_NO_QUERY_MTU", - "OP_NO_TICKET", - "OP_PKCS1_CHECK_1", - "OP_PKCS1_CHECK_2", - "OP_SINGLE_DH_USE", - "OP_SINGLE_ECDH_USE", - "OP_SSLEAY_080_CLIENT_DH_BUG", - "OP_SSLREF2_REUSE_CERT_TYPE_BUG", - "OP_TLS_BLOCK_PADDING_BUG", - "OP_TLS_D5_BUG", - "OP_TLS_ROLLBACK_BUG", - "RECEIVED_SHUTDOWN", - "SENT_SHUTDOWN", - "SESS_CACHE_BOTH", - "SESS_CACHE_CLIENT", - "SESS_CACHE_NO_AUTO_CLEAR", - "SESS_CACHE_NO_INTERNAL", - "SESS_CACHE_NO_INTERNAL_LOOKUP", - "SESS_CACHE_NO_INTERNAL_STORE", - "SESS_CACHE_OFF", - "SESS_CACHE_SERVER", - "SSL3_VERSION", - "SSLEAY_BUILT_ON", - "SSLEAY_CFLAGS", - "SSLEAY_DIR", - "SSLEAY_PLATFORM", - "SSLEAY_VERSION", - "SSL_CB_ACCEPT_EXIT", - "SSL_CB_ACCEPT_LOOP", - "SSL_CB_ALERT", - "SSL_CB_CONNECT_EXIT", - "SSL_CB_CONNECT_LOOP", - "SSL_CB_EXIT", - "SSL_CB_HANDSHAKE_DONE", - "SSL_CB_HANDSHAKE_START", - "SSL_CB_LOOP", - "SSL_CB_READ", - "SSL_CB_READ_ALERT", - "SSL_CB_WRITE", - "SSL_CB_WRITE_ALERT", - "SSL_ST_ACCEPT", - "SSL_ST_CONNECT", - "SSL_ST_MASK", - "TLS1_1_VERSION", - "TLS1_2_VERSION", - "TLS1_3_VERSION", - "TLS1_VERSION", - "TLS_CLIENT_METHOD", - "TLS_METHOD", - "TLS_SERVER_METHOD", - "VERIFY_CLIENT_ONCE", - "VERIFY_FAIL_IF_NO_PEER_CERT", - "VERIFY_NONE", - "VERIFY_PEER", - "Connection", - "Context", - "Error", - "OP_NO_SSLv2", - "OP_NO_SSLv3", - "OP_NO_TLSv1", - "OP_NO_TLSv1_1", - "OP_NO_TLSv1_2", - "OP_NO_TLSv1_3", - "SSLeay_version", - "SSLv23_METHOD", - "Session", - "SysCallError", - "TLSv1_1_METHOD", - "TLSv1_2_METHOD", - "TLSv1_METHOD", - "WantReadError", - "WantWriteError", - "WantX509LookupError", - "X509VerificationCodes", - "ZeroReturnError", -] - - -OPENSSL_VERSION_NUMBER: int = _lib.OPENSSL_VERSION_NUMBER -OPENSSL_VERSION: int = _lib.OPENSSL_VERSION -OPENSSL_CFLAGS: int = _lib.OPENSSL_CFLAGS -OPENSSL_PLATFORM: int = _lib.OPENSSL_PLATFORM -OPENSSL_DIR: int = _lib.OPENSSL_DIR -OPENSSL_BUILT_ON: int = _lib.OPENSSL_BUILT_ON - -SSLEAY_VERSION = OPENSSL_VERSION -SSLEAY_CFLAGS = OPENSSL_CFLAGS -SSLEAY_PLATFORM = OPENSSL_PLATFORM -SSLEAY_DIR = OPENSSL_DIR -SSLEAY_BUILT_ON = OPENSSL_BUILT_ON - -SENT_SHUTDOWN = _lib.SSL_SENT_SHUTDOWN -RECEIVED_SHUTDOWN = _lib.SSL_RECEIVED_SHUTDOWN - -SSLv23_METHOD = 3 -TLSv1_METHOD = 4 -TLSv1_1_METHOD = 5 -TLSv1_2_METHOD = 6 -TLS_METHOD = 7 -TLS_SERVER_METHOD = 8 -TLS_CLIENT_METHOD = 9 -DTLS_METHOD = 10 -DTLS_SERVER_METHOD = 11 -DTLS_CLIENT_METHOD = 12 - -SSL3_VERSION: int = _lib.SSL3_VERSION -TLS1_VERSION: int = _lib.TLS1_VERSION -TLS1_1_VERSION: int = _lib.TLS1_1_VERSION -TLS1_2_VERSION: int = _lib.TLS1_2_VERSION -TLS1_3_VERSION: int = _lib.TLS1_3_VERSION - -OP_NO_SSLv2: int = _lib.SSL_OP_NO_SSLv2 -OP_NO_SSLv3: int = _lib.SSL_OP_NO_SSLv3 -OP_NO_TLSv1: int = _lib.SSL_OP_NO_TLSv1 -OP_NO_TLSv1_1: int = _lib.SSL_OP_NO_TLSv1_1 -OP_NO_TLSv1_2: int = _lib.SSL_OP_NO_TLSv1_2 -OP_NO_TLSv1_3: int = _lib.SSL_OP_NO_TLSv1_3 - -MODE_RELEASE_BUFFERS: int = _lib.SSL_MODE_RELEASE_BUFFERS - -OP_SINGLE_DH_USE: int = _lib.SSL_OP_SINGLE_DH_USE -OP_SINGLE_ECDH_USE: int = _lib.SSL_OP_SINGLE_ECDH_USE -OP_EPHEMERAL_RSA: int = _lib.SSL_OP_EPHEMERAL_RSA -OP_MICROSOFT_SESS_ID_BUG: int = _lib.SSL_OP_MICROSOFT_SESS_ID_BUG -OP_NETSCAPE_CHALLENGE_BUG: int = _lib.SSL_OP_NETSCAPE_CHALLENGE_BUG -OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: int = ( - _lib.SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG -) -OP_SSLREF2_REUSE_CERT_TYPE_BUG: int = _lib.SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG -OP_MICROSOFT_BIG_SSLV3_BUFFER: int = _lib.SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER -OP_MSIE_SSLV2_RSA_PADDING: int = _lib.SSL_OP_MSIE_SSLV2_RSA_PADDING -OP_SSLEAY_080_CLIENT_DH_BUG: int = _lib.SSL_OP_SSLEAY_080_CLIENT_DH_BUG -OP_TLS_D5_BUG: int = _lib.SSL_OP_TLS_D5_BUG -OP_TLS_BLOCK_PADDING_BUG: int = _lib.SSL_OP_TLS_BLOCK_PADDING_BUG -OP_DONT_INSERT_EMPTY_FRAGMENTS: int = _lib.SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS -OP_CIPHER_SERVER_PREFERENCE: int = _lib.SSL_OP_CIPHER_SERVER_PREFERENCE -OP_TLS_ROLLBACK_BUG: int = _lib.SSL_OP_TLS_ROLLBACK_BUG -OP_PKCS1_CHECK_1 = _lib.SSL_OP_PKCS1_CHECK_1 -OP_PKCS1_CHECK_2: int = _lib.SSL_OP_PKCS1_CHECK_2 -OP_NETSCAPE_CA_DN_BUG: int = _lib.SSL_OP_NETSCAPE_CA_DN_BUG -OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: int = ( - _lib.SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG -) -OP_NO_COMPRESSION: int = _lib.SSL_OP_NO_COMPRESSION - -OP_NO_QUERY_MTU: int = _lib.SSL_OP_NO_QUERY_MTU -OP_COOKIE_EXCHANGE: int = _lib.SSL_OP_COOKIE_EXCHANGE -OP_NO_TICKET: int = _lib.SSL_OP_NO_TICKET - -try: - OP_NO_RENEGOTIATION: int = _lib.SSL_OP_NO_RENEGOTIATION - __all__.append("OP_NO_RENEGOTIATION") -except AttributeError: - pass - -try: - OP_IGNORE_UNEXPECTED_EOF: int = _lib.SSL_OP_IGNORE_UNEXPECTED_EOF - __all__.append("OP_IGNORE_UNEXPECTED_EOF") -except AttributeError: - pass - -try: - OP_LEGACY_SERVER_CONNECT: int = _lib.SSL_OP_LEGACY_SERVER_CONNECT - __all__.append("OP_LEGACY_SERVER_CONNECT") -except AttributeError: - pass - -OP_ALL: int = _lib.SSL_OP_ALL - -VERIFY_PEER: int = _lib.SSL_VERIFY_PEER -VERIFY_FAIL_IF_NO_PEER_CERT: int = _lib.SSL_VERIFY_FAIL_IF_NO_PEER_CERT -VERIFY_CLIENT_ONCE: int = _lib.SSL_VERIFY_CLIENT_ONCE -VERIFY_NONE: int = _lib.SSL_VERIFY_NONE - -SESS_CACHE_OFF: int = _lib.SSL_SESS_CACHE_OFF -SESS_CACHE_CLIENT: int = _lib.SSL_SESS_CACHE_CLIENT -SESS_CACHE_SERVER: int = _lib.SSL_SESS_CACHE_SERVER -SESS_CACHE_BOTH: int = _lib.SSL_SESS_CACHE_BOTH -SESS_CACHE_NO_AUTO_CLEAR: int = _lib.SSL_SESS_CACHE_NO_AUTO_CLEAR -SESS_CACHE_NO_INTERNAL_LOOKUP: int = _lib.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP -SESS_CACHE_NO_INTERNAL_STORE: int = _lib.SSL_SESS_CACHE_NO_INTERNAL_STORE -SESS_CACHE_NO_INTERNAL: int = _lib.SSL_SESS_CACHE_NO_INTERNAL - -SSL_ST_CONNECT: int = _lib.SSL_ST_CONNECT -SSL_ST_ACCEPT: int = _lib.SSL_ST_ACCEPT -SSL_ST_MASK: int = _lib.SSL_ST_MASK - -SSL_CB_LOOP: int = _lib.SSL_CB_LOOP -SSL_CB_EXIT: int = _lib.SSL_CB_EXIT -SSL_CB_READ: int = _lib.SSL_CB_READ -SSL_CB_WRITE: int = _lib.SSL_CB_WRITE -SSL_CB_ALERT: int = _lib.SSL_CB_ALERT -SSL_CB_READ_ALERT: int = _lib.SSL_CB_READ_ALERT -SSL_CB_WRITE_ALERT: int = _lib.SSL_CB_WRITE_ALERT -SSL_CB_ACCEPT_LOOP: int = _lib.SSL_CB_ACCEPT_LOOP -SSL_CB_ACCEPT_EXIT: int = _lib.SSL_CB_ACCEPT_EXIT -SSL_CB_CONNECT_LOOP: int = _lib.SSL_CB_CONNECT_LOOP -SSL_CB_CONNECT_EXIT: int = _lib.SSL_CB_CONNECT_EXIT -SSL_CB_HANDSHAKE_START: int = _lib.SSL_CB_HANDSHAKE_START -SSL_CB_HANDSHAKE_DONE: int = _lib.SSL_CB_HANDSHAKE_DONE - -_Buffer = typing.Union[bytes, bytearray, memoryview] -_T = TypeVar("_T") - - -class _NoOverlappingProtocols: - pass - - -NO_OVERLAPPING_PROTOCOLS = _NoOverlappingProtocols() - -# Callback types. -_ALPNSelectCallback = Callable[ - [ - "Connection", - typing.List[bytes], - ], - typing.Union[bytes, _NoOverlappingProtocols], -] -_CookieGenerateCallback = Callable[["Connection"], bytes] -_CookieVerifyCallback = Callable[["Connection", bytes], bool] -_OCSPClientCallback = Callable[["Connection", bytes, Optional[_T]], bool] -_OCSPServerCallback = Callable[["Connection", Optional[_T]], bytes] -_PassphraseCallback = Callable[[int, bool, Optional[_T]], bytes] -_VerifyCallback = Callable[["Connection", X509, int, int, int], bool] - - -class X509VerificationCodes: - """ - Success and error codes for X509 verification, as returned by the - underlying ``X509_STORE_CTX_get_error()`` function and passed by pyOpenSSL - to verification callback functions. - - See `OpenSSL Verification Errors - `_ - for details. - """ - - OK = _lib.X509_V_OK - ERR_UNABLE_TO_GET_ISSUER_CERT = _lib.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT - ERR_UNABLE_TO_GET_CRL = _lib.X509_V_ERR_UNABLE_TO_GET_CRL - ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE = ( - _lib.X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE - ) - ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE = ( - _lib.X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE - ) - ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY = ( - _lib.X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY - ) - ERR_CERT_SIGNATURE_FAILURE = _lib.X509_V_ERR_CERT_SIGNATURE_FAILURE - ERR_CRL_SIGNATURE_FAILURE = _lib.X509_V_ERR_CRL_SIGNATURE_FAILURE - ERR_CERT_NOT_YET_VALID = _lib.X509_V_ERR_CERT_NOT_YET_VALID - ERR_CERT_HAS_EXPIRED = _lib.X509_V_ERR_CERT_HAS_EXPIRED - ERR_CRL_NOT_YET_VALID = _lib.X509_V_ERR_CRL_NOT_YET_VALID - ERR_CRL_HAS_EXPIRED = _lib.X509_V_ERR_CRL_HAS_EXPIRED - ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD = ( - _lib.X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD - ) - ERR_ERROR_IN_CERT_NOT_AFTER_FIELD = ( - _lib.X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD - ) - ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD = ( - _lib.X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD - ) - ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD = ( - _lib.X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD - ) - ERR_OUT_OF_MEM = _lib.X509_V_ERR_OUT_OF_MEM - ERR_DEPTH_ZERO_SELF_SIGNED_CERT = ( - _lib.X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT - ) - ERR_SELF_SIGNED_CERT_IN_CHAIN = _lib.X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN - ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY = ( - _lib.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY - ) - ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE = ( - _lib.X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE - ) - ERR_CERT_CHAIN_TOO_LONG = _lib.X509_V_ERR_CERT_CHAIN_TOO_LONG - ERR_CERT_REVOKED = _lib.X509_V_ERR_CERT_REVOKED - ERR_INVALID_CA = _lib.X509_V_ERR_INVALID_CA - ERR_PATH_LENGTH_EXCEEDED = _lib.X509_V_ERR_PATH_LENGTH_EXCEEDED - ERR_INVALID_PURPOSE = _lib.X509_V_ERR_INVALID_PURPOSE - ERR_CERT_UNTRUSTED = _lib.X509_V_ERR_CERT_UNTRUSTED - ERR_CERT_REJECTED = _lib.X509_V_ERR_CERT_REJECTED - ERR_SUBJECT_ISSUER_MISMATCH = _lib.X509_V_ERR_SUBJECT_ISSUER_MISMATCH - ERR_AKID_SKID_MISMATCH = _lib.X509_V_ERR_AKID_SKID_MISMATCH - ERR_AKID_ISSUER_SERIAL_MISMATCH = ( - _lib.X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH - ) - ERR_KEYUSAGE_NO_CERTSIGN = _lib.X509_V_ERR_KEYUSAGE_NO_CERTSIGN - ERR_UNABLE_TO_GET_CRL_ISSUER = _lib.X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER - ERR_UNHANDLED_CRITICAL_EXTENSION = ( - _lib.X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION - ) - ERR_KEYUSAGE_NO_CRL_SIGN = _lib.X509_V_ERR_KEYUSAGE_NO_CRL_SIGN - ERR_UNHANDLED_CRITICAL_CRL_EXTENSION = ( - _lib.X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION - ) - ERR_INVALID_NON_CA = _lib.X509_V_ERR_INVALID_NON_CA - ERR_PROXY_PATH_LENGTH_EXCEEDED = _lib.X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED - ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE = ( - _lib.X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE - ) - ERR_PROXY_CERTIFICATES_NOT_ALLOWED = ( - _lib.X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED - ) - ERR_INVALID_EXTENSION = _lib.X509_V_ERR_INVALID_EXTENSION - ERR_INVALID_POLICY_EXTENSION = _lib.X509_V_ERR_INVALID_POLICY_EXTENSION - ERR_NO_EXPLICIT_POLICY = _lib.X509_V_ERR_NO_EXPLICIT_POLICY - ERR_DIFFERENT_CRL_SCOPE = _lib.X509_V_ERR_DIFFERENT_CRL_SCOPE - ERR_UNSUPPORTED_EXTENSION_FEATURE = ( - _lib.X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE - ) - ERR_UNNESTED_RESOURCE = _lib.X509_V_ERR_UNNESTED_RESOURCE - ERR_PERMITTED_VIOLATION = _lib.X509_V_ERR_PERMITTED_VIOLATION - ERR_EXCLUDED_VIOLATION = _lib.X509_V_ERR_EXCLUDED_VIOLATION - ERR_SUBTREE_MINMAX = _lib.X509_V_ERR_SUBTREE_MINMAX - ERR_UNSUPPORTED_CONSTRAINT_TYPE = ( - _lib.X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE - ) - ERR_UNSUPPORTED_CONSTRAINT_SYNTAX = ( - _lib.X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX - ) - ERR_UNSUPPORTED_NAME_SYNTAX = _lib.X509_V_ERR_UNSUPPORTED_NAME_SYNTAX - ERR_CRL_PATH_VALIDATION_ERROR = _lib.X509_V_ERR_CRL_PATH_VALIDATION_ERROR - ERR_HOSTNAME_MISMATCH = _lib.X509_V_ERR_HOSTNAME_MISMATCH - ERR_EMAIL_MISMATCH = _lib.X509_V_ERR_EMAIL_MISMATCH - ERR_IP_ADDRESS_MISMATCH = _lib.X509_V_ERR_IP_ADDRESS_MISMATCH - ERR_APPLICATION_VERIFICATION = _lib.X509_V_ERR_APPLICATION_VERIFICATION - - -# Taken from https://golang.org/src/crypto/x509/root_linux.go -_CERTIFICATE_FILE_LOCATIONS = [ - "/etc/ssl/certs/ca-certificates.crt", # Debian/Ubuntu/Gentoo etc. - "/etc/pki/tls/certs/ca-bundle.crt", # Fedora/RHEL 6 - "/etc/ssl/ca-bundle.pem", # OpenSUSE - "/etc/pki/tls/cacert.pem", # OpenELEC - "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", # CentOS/RHEL 7 -] - -_CERTIFICATE_PATH_LOCATIONS = [ - "/etc/ssl/certs", # SLES10/SLES11 -] - -# These values are compared to output from cffi's ffi.string so they must be -# byte strings. -_CRYPTOGRAPHY_MANYLINUX_CA_DIR = b"/opt/pyca/cryptography/openssl/certs" -_CRYPTOGRAPHY_MANYLINUX_CA_FILE = b"/opt/pyca/cryptography/openssl/cert.pem" - - -class Error(Exception): - """ - An error occurred in an `OpenSSL.SSL` API. - """ - - -_raise_current_error = partial(_exception_from_error_queue, Error) -_openssl_assert = _make_assert(Error) - - -class WantReadError(Error): - pass - - -class WantWriteError(Error): - pass - - -class WantX509LookupError(Error): - pass - - -class ZeroReturnError(Error): - pass - - -class SysCallError(Error): - pass - - -class _CallbackExceptionHelper: - """ - A base class for wrapper classes that allow for intelligent exception - handling in OpenSSL callbacks. - - :ivar list _problems: Any exceptions that occurred while executing in a - context where they could not be raised in the normal way. Typically - this is because OpenSSL has called into some Python code and requires a - return value. The exceptions are saved to be raised later when it is - possible to do so. - """ - - def __init__(self) -> None: - self._problems: list[Exception] = [] - - def raise_if_problem(self) -> None: - """ - Raise an exception from the OpenSSL error queue or that was previously - captured whe running a callback. - """ - if self._problems: - try: - _raise_current_error() - except Error: - pass - raise self._problems.pop(0) - - -class _VerifyHelper(_CallbackExceptionHelper): - """ - Wrap a callback such that it can be used as a certificate verification - callback. - """ - - def __init__(self, callback: _VerifyCallback) -> None: - _CallbackExceptionHelper.__init__(self) - - @wraps(callback) - def wrapper(ok, store_ctx): # type: ignore[no-untyped-def] - x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) - _lib.X509_up_ref(x509) - cert = X509._from_raw_x509_ptr(x509) - error_number = _lib.X509_STORE_CTX_get_error(store_ctx) - error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx) - - index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx() - ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index) - connection = Connection._reverse_mapping[ssl] - - try: - result = callback( - connection, cert, error_number, error_depth, ok - ) - except Exception as e: - self._problems.append(e) - return 0 - else: - if result: - _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK) - return 1 - else: - return 0 - - self.callback = _ffi.callback( - "int (*)(int, X509_STORE_CTX *)", wrapper - ) - - -class _ALPNSelectHelper(_CallbackExceptionHelper): - """ - Wrap a callback such that it can be used as an ALPN selection callback. - """ - - def __init__(self, callback: _ALPNSelectCallback) -> None: - _CallbackExceptionHelper.__init__(self) - - @wraps(callback) - def wrapper(ssl, out, outlen, in_, inlen, arg): # type: ignore[no-untyped-def] - try: - conn = Connection._reverse_mapping[ssl] - - # The string passed to us is made up of multiple - # length-prefixed bytestrings. We need to split that into a - # list. - instr = _ffi.buffer(in_, inlen)[:] - protolist = [] - while instr: - encoded_len = instr[0] - proto = instr[1 : encoded_len + 1] - protolist.append(proto) - instr = instr[encoded_len + 1 :] - - # Call the callback - outbytes = callback(conn, protolist) - any_accepted = True - if outbytes is NO_OVERLAPPING_PROTOCOLS: - outbytes = b"" - any_accepted = False - elif not isinstance(outbytes, bytes): - raise TypeError( - "ALPN callback must return a bytestring or the " - "special NO_OVERLAPPING_PROTOCOLS sentinel value." - ) - - # Save our callback arguments on the connection object to make - # sure that they don't get freed before OpenSSL can use them. - # Then, return them in the appropriate output parameters. - conn._alpn_select_callback_args = [ - _ffi.new("unsigned char *", len(outbytes)), - _ffi.new("unsigned char[]", outbytes), - ] - outlen[0] = conn._alpn_select_callback_args[0][0] - out[0] = conn._alpn_select_callback_args[1] - if not any_accepted: - return _lib.SSL_TLSEXT_ERR_NOACK - return _lib.SSL_TLSEXT_ERR_OK - except Exception as e: - self._problems.append(e) - return _lib.SSL_TLSEXT_ERR_ALERT_FATAL - - self.callback = _ffi.callback( - ( - "int (*)(SSL *, unsigned char **, unsigned char *, " - "const unsigned char *, unsigned int, void *)" - ), - wrapper, - ) - - -class _OCSPServerCallbackHelper(_CallbackExceptionHelper): - """ - Wrap a callback such that it can be used as an OCSP callback for the server - side. - - Annoyingly, OpenSSL defines one OCSP callback but uses it in two different - ways. For servers, that callback is expected to retrieve some OCSP data and - hand it to OpenSSL, and may return only SSL_TLSEXT_ERR_OK, - SSL_TLSEXT_ERR_FATAL, and SSL_TLSEXT_ERR_NOACK. For clients, that callback - is expected to check the OCSP data, and returns a negative value on error, - 0 if the response is not acceptable, or positive if it is. These are - mutually exclusive return code behaviours, and they mean that we need two - helpers so that we always return an appropriate error code if the user's - code throws an exception. - - Given that we have to have two helpers anyway, these helpers are a bit more - helpery than most: specifically, they hide a few more of the OpenSSL - functions so that the user has an easier time writing these callbacks. - - This helper implements the server side. - """ - - def __init__(self, callback: _OCSPServerCallback[Any]) -> None: - _CallbackExceptionHelper.__init__(self) - - @wraps(callback) - def wrapper(ssl, cdata): # type: ignore[no-untyped-def] - try: - conn = Connection._reverse_mapping[ssl] - - # Extract the data if any was provided. - if cdata != _ffi.NULL: - data = _ffi.from_handle(cdata) - else: - data = None - - # Call the callback. - ocsp_data = callback(conn, data) - - if not isinstance(ocsp_data, bytes): - raise TypeError("OCSP callback must return a bytestring.") - - # If the OCSP data was provided, we will pass it to OpenSSL. - # However, we have an early exit here: if no OCSP data was - # provided we will just exit out and tell OpenSSL that there - # is nothing to do. - if not ocsp_data: - return 3 # SSL_TLSEXT_ERR_NOACK - - # OpenSSL takes ownership of this data and expects it to have - # been allocated by OPENSSL_malloc. - ocsp_data_length = len(ocsp_data) - data_ptr = _lib.OPENSSL_malloc(ocsp_data_length) - _ffi.buffer(data_ptr, ocsp_data_length)[:] = ocsp_data - - _lib.SSL_set_tlsext_status_ocsp_resp( - ssl, data_ptr, ocsp_data_length - ) - - return 0 - except Exception as e: - self._problems.append(e) - return 2 # SSL_TLSEXT_ERR_ALERT_FATAL - - self.callback = _ffi.callback("int (*)(SSL *, void *)", wrapper) - - -class _OCSPClientCallbackHelper(_CallbackExceptionHelper): - """ - Wrap a callback such that it can be used as an OCSP callback for the client - side. - - Annoyingly, OpenSSL defines one OCSP callback but uses it in two different - ways. For servers, that callback is expected to retrieve some OCSP data and - hand it to OpenSSL, and may return only SSL_TLSEXT_ERR_OK, - SSL_TLSEXT_ERR_FATAL, and SSL_TLSEXT_ERR_NOACK. For clients, that callback - is expected to check the OCSP data, and returns a negative value on error, - 0 if the response is not acceptable, or positive if it is. These are - mutually exclusive return code behaviours, and they mean that we need two - helpers so that we always return an appropriate error code if the user's - code throws an exception. - - Given that we have to have two helpers anyway, these helpers are a bit more - helpery than most: specifically, they hide a few more of the OpenSSL - functions so that the user has an easier time writing these callbacks. - - This helper implements the client side. - """ - - def __init__(self, callback: _OCSPClientCallback[Any]) -> None: - _CallbackExceptionHelper.__init__(self) - - @wraps(callback) - def wrapper(ssl, cdata): # type: ignore[no-untyped-def] - try: - conn = Connection._reverse_mapping[ssl] - - # Extract the data if any was provided. - if cdata != _ffi.NULL: - data = _ffi.from_handle(cdata) - else: - data = None - - # Get the OCSP data. - ocsp_ptr = _ffi.new("unsigned char **") - ocsp_len = _lib.SSL_get_tlsext_status_ocsp_resp(ssl, ocsp_ptr) - if ocsp_len < 0: - # No OCSP data. - ocsp_data = b"" - else: - # Copy the OCSP data, then pass it to the callback. - ocsp_data = _ffi.buffer(ocsp_ptr[0], ocsp_len)[:] - - valid = callback(conn, ocsp_data, data) - - # Return 1 on success or 0 on error. - return int(bool(valid)) - - except Exception as e: - self._problems.append(e) - # Return negative value if an exception is hit. - return -1 - - self.callback = _ffi.callback("int (*)(SSL *, void *)", wrapper) - - -class _CookieGenerateCallbackHelper(_CallbackExceptionHelper): - def __init__(self, callback: _CookieGenerateCallback) -> None: - _CallbackExceptionHelper.__init__(self) - - @wraps(callback) - def wrapper(ssl, out, outlen): # type: ignore[no-untyped-def] - try: - conn = Connection._reverse_mapping[ssl] - cookie = callback(conn) - out[0 : len(cookie)] = cookie - outlen[0] = len(cookie) - return 1 - except Exception as e: - self._problems.append(e) - # "a zero return value can be used to abort the handshake" - return 0 - - self.callback = _ffi.callback( - "int (*)(SSL *, unsigned char *, unsigned int *)", - wrapper, - ) - - -class _CookieVerifyCallbackHelper(_CallbackExceptionHelper): - def __init__(self, callback: _CookieVerifyCallback) -> None: - _CallbackExceptionHelper.__init__(self) - - @wraps(callback) - def wrapper(ssl, c_cookie, cookie_len): # type: ignore[no-untyped-def] - try: - conn = Connection._reverse_mapping[ssl] - return callback(conn, bytes(c_cookie[0:cookie_len])) - except Exception as e: - self._problems.append(e) - return 0 - - self.callback = _ffi.callback( - "int (*)(SSL *, unsigned char *, unsigned int)", - wrapper, - ) - - -def _asFileDescriptor(obj: Any) -> int: - fd = None - if not isinstance(obj, int): - meth = getattr(obj, "fileno", None) - if meth is not None: - obj = meth() - - if isinstance(obj, int): - fd = obj - - if not isinstance(fd, int): - raise TypeError("argument must be an int, or have a fileno() method.") - elif fd < 0: - raise ValueError( - f"file descriptor cannot be a negative integer ({fd:i})" - ) - - return fd - - -def OpenSSL_version(type: int) -> bytes: - """ - Return a string describing the version of OpenSSL in use. - - :param type: One of the :const:`OPENSSL_` constants defined in this module. - """ - return _ffi.string(_lib.OpenSSL_version(type)) - - -SSLeay_version = OpenSSL_version - - -def _make_requires(flag: int, error: str) -> Callable[[_T], _T]: - """ - Builds a decorator that ensures that functions that rely on OpenSSL - functions that are not present in this build raise NotImplementedError, - rather than AttributeError coming out of cryptography. - - :param flag: A cryptography flag that guards the functions, e.g. - ``Cryptography_HAS_NEXTPROTONEG``. - :param error: The string to be used in the exception if the flag is false. - """ - - def _requires_decorator(func): # type: ignore[no-untyped-def] - if not flag: - - @wraps(func) - def explode(*args, **kwargs): # type: ignore[no-untyped-def] - raise NotImplementedError(error) - - return explode - else: - return func - - return _requires_decorator - - -_requires_keylog = _make_requires( - getattr(_lib, "Cryptography_HAS_KEYLOG", 0), "Key logging not available" -) - - -class Session: - """ - A class representing an SSL session. A session defines certain connection - parameters which may be re-used to speed up the setup of subsequent - connections. - - .. versionadded:: 0.14 - """ - - _session: Any - - -F = TypeVar("F", bound=Callable[..., Any]) - - -def _require_not_used(f: F) -> F: - @wraps(f) - def inner(self: Context, *args: Any, **kwargs: Any) -> Any: - if self._used: - warnings.warn( - ( - "Attempting to mutate a Context after a Connection was " - "created. In the future, this will raise an exception" - ), - DeprecationWarning, - stacklevel=2, - ) - return f(self, *args, **kwargs) - - return typing.cast(F, inner) - - -class Context: - """ - :class:`OpenSSL.SSL.Context` instances define the parameters for setting - up new SSL connections. - - :param method: One of TLS_METHOD, TLS_CLIENT_METHOD, TLS_SERVER_METHOD, - DTLS_METHOD, DTLS_CLIENT_METHOD, or DTLS_SERVER_METHOD. - SSLv23_METHOD, TLSv1_METHOD, etc. are deprecated and should - not be used. - """ - - _methods: typing.ClassVar[ - dict[int, tuple[Callable[[], Any], int | None]] - ] = { - SSLv23_METHOD: (_lib.TLS_method, None), - TLSv1_METHOD: (_lib.TLS_method, TLS1_VERSION), - TLSv1_1_METHOD: (_lib.TLS_method, TLS1_1_VERSION), - TLSv1_2_METHOD: (_lib.TLS_method, TLS1_2_VERSION), - TLS_METHOD: (_lib.TLS_method, None), - TLS_SERVER_METHOD: (_lib.TLS_server_method, None), - TLS_CLIENT_METHOD: (_lib.TLS_client_method, None), - DTLS_METHOD: (_lib.DTLS_method, None), - DTLS_SERVER_METHOD: (_lib.DTLS_server_method, None), - DTLS_CLIENT_METHOD: (_lib.DTLS_client_method, None), - } - - def __init__(self, method: int) -> None: - if not isinstance(method, int): - raise TypeError("method must be an integer") - - try: - method_func, version = self._methods[method] - except KeyError: - raise ValueError("No such protocol") - - method_obj = method_func() - _openssl_assert(method_obj != _ffi.NULL) - - context = _lib.SSL_CTX_new(method_obj) - _openssl_assert(context != _ffi.NULL) - context = _ffi.gc(context, _lib.SSL_CTX_free) - - self._context = context - self._used = False - self._passphrase_helper: _PassphraseHelper | None = None - self._passphrase_callback: _PassphraseCallback[Any] | None = None - self._passphrase_userdata: Any | None = None - self._verify_helper: _VerifyHelper | None = None - self._verify_callback: _VerifyCallback | None = None - self._info_callback = None - self._keylog_callback = None - self._tlsext_servername_callback = None - self._app_data = None - self._alpn_select_helper: _ALPNSelectHelper | None = None - self._alpn_select_callback: _ALPNSelectCallback | None = None - self._ocsp_helper: ( - _OCSPClientCallbackHelper | _OCSPServerCallbackHelper | None - ) = None - self._ocsp_callback: ( - _OCSPClientCallback[Any] | _OCSPServerCallback[Any] | None - ) = None - self._ocsp_data: Any | None = None - self._cookie_generate_helper: _CookieGenerateCallbackHelper | None = ( - None - ) - self._cookie_verify_helper: _CookieVerifyCallbackHelper | None = None - - self.set_mode( - _lib.SSL_MODE_ENABLE_PARTIAL_WRITE - | _lib.SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER - ) - if version is not None: - self.set_min_proto_version(version) - self.set_max_proto_version(version) - - @_require_not_used - def set_min_proto_version(self, version: int) -> None: - """ - Set the minimum supported protocol version. Setting the minimum - version to 0 will enable protocol versions down to the lowest version - supported by the library. - - If the underlying OpenSSL build is missing support for the selected - version, this method will raise an exception. - """ - _openssl_assert( - _lib.SSL_CTX_set_min_proto_version(self._context, version) == 1 - ) - - @_require_not_used - def set_max_proto_version(self, version: int) -> None: - """ - Set the maximum supported protocol version. Setting the maximum - version to 0 will enable protocol versions up to the highest version - supported by the library. - - If the underlying OpenSSL build is missing support for the selected - version, this method will raise an exception. - """ - _openssl_assert( - _lib.SSL_CTX_set_max_proto_version(self._context, version) == 1 - ) - - @_require_not_used - def load_verify_locations( - self, - cafile: _StrOrBytesPath | None, - capath: _StrOrBytesPath | None = None, - ) -> None: - """ - Let SSL know where we can find trusted certificates for the certificate - chain. Note that the certificates have to be in PEM format. - - If capath is passed, it must be a directory prepared using the - ``c_rehash`` tool included with OpenSSL. Either, but not both, of - *pemfile* or *capath* may be :data:`None`. - - :param cafile: In which file we can find the certificates (``bytes`` or - ``str``). - :param capath: In which directory we can find the certificates - (``bytes`` or ``str``). - - :return: None - """ - if cafile is None: - cafile = _ffi.NULL - else: - cafile = _path_bytes(cafile) - - if capath is None: - capath = _ffi.NULL - else: - capath = _path_bytes(capath) - - load_result = _lib.SSL_CTX_load_verify_locations( - self._context, cafile, capath - ) - if not load_result: - _raise_current_error() - - def _wrap_callback( - self, callback: _PassphraseCallback[_T] - ) -> _PassphraseHelper: - @wraps(callback) - def wrapper(size: int, verify: bool, userdata: Any) -> bytes: - return callback(size, verify, self._passphrase_userdata) - - return _PassphraseHelper( - FILETYPE_PEM, wrapper, more_args=True, truncate=True - ) - - @_require_not_used - def set_passwd_cb( - self, - callback: _PassphraseCallback[_T], - userdata: _T | None = None, - ) -> None: - """ - Set the passphrase callback. This function will be called - when a private key with a passphrase is loaded. - - :param callback: The Python callback to use. This must accept three - positional arguments. First, an integer giving the maximum length - of the passphrase it may return. If the returned passphrase is - longer than this, it will be truncated. Second, a boolean value - which will be true if the user should be prompted for the - passphrase twice and the callback should verify that the two values - supplied are equal. Third, the value given as the *userdata* - parameter to :meth:`set_passwd_cb`. The *callback* must return - a byte string. If an error occurs, *callback* should return a false - value (e.g. an empty string). - :param userdata: (optional) A Python object which will be given as - argument to the callback - :return: None - """ - if not callable(callback): - raise TypeError("callback must be callable") - - self._passphrase_helper = self._wrap_callback(callback) - self._passphrase_callback = self._passphrase_helper.callback - _lib.SSL_CTX_set_default_passwd_cb( - self._context, self._passphrase_callback - ) - self._passphrase_userdata = userdata - - @_require_not_used - def set_default_verify_paths(self) -> None: - """ - Specify that the platform provided CA certificates are to be used for - verification purposes. This method has some caveats related to the - binary wheels that cryptography (pyOpenSSL's primary dependency) ships: - - * macOS will only load certificates using this method if the user has - the ``openssl@1.1`` `Homebrew `_ formula installed - in the default location. - * Windows will not work. - * manylinux cryptography wheels will work on most common Linux - distributions in pyOpenSSL 17.1.0 and above. pyOpenSSL detects the - manylinux wheel and attempts to load roots via a fallback path. - - :return: None - """ - # SSL_CTX_set_default_verify_paths will attempt to load certs from - # both a cafile and capath that are set at compile time. However, - # it will first check environment variables and, if present, load - # those paths instead - set_result = _lib.SSL_CTX_set_default_verify_paths(self._context) - _openssl_assert(set_result == 1) - # After attempting to set default_verify_paths we need to know whether - # to go down the fallback path. - # First we'll check to see if any env vars have been set. If so, - # we won't try to do anything else because the user has set the path - # themselves. - if not self._check_env_vars_set("SSL_CERT_DIR", "SSL_CERT_FILE"): - default_dir = _ffi.string(_lib.X509_get_default_cert_dir()) - default_file = _ffi.string(_lib.X509_get_default_cert_file()) - # Now we check to see if the default_dir and default_file are set - # to the exact values we use in our manylinux builds. If they are - # then we know to load the fallbacks - if ( - default_dir == _CRYPTOGRAPHY_MANYLINUX_CA_DIR - and default_file == _CRYPTOGRAPHY_MANYLINUX_CA_FILE - ): - # This is manylinux, let's load our fallback paths - self._fallback_default_verify_paths( - _CERTIFICATE_FILE_LOCATIONS, _CERTIFICATE_PATH_LOCATIONS - ) - - def _check_env_vars_set(self, dir_env_var: str, file_env_var: str) -> bool: - """ - Check to see if the default cert dir/file environment vars are present. - - :return: bool - """ - return ( - os.environ.get(file_env_var) is not None - or os.environ.get(dir_env_var) is not None - ) - - def _fallback_default_verify_paths( - self, file_path: list[str], dir_path: list[str] - ) -> None: - """ - Default verify paths are based on the compiled version of OpenSSL. - However, when pyca/cryptography is compiled as a manylinux wheel - that compiled location can potentially be wrong. So, like Go, we - will try a predefined set of paths and attempt to load roots - from there. - - :return: None - """ - for cafile in file_path: - if os.path.isfile(cafile): - self.load_verify_locations(cafile) - break - - for capath in dir_path: - if os.path.isdir(capath): - self.load_verify_locations(None, capath) - break - - @_require_not_used - def use_certificate_chain_file(self, certfile: _StrOrBytesPath) -> None: - """ - Load a certificate chain from a file. - - :param certfile: The name of the certificate chain file (``bytes`` or - ``str``). Must be PEM encoded. - - :return: None - """ - certfile = _path_bytes(certfile) - - result = _lib.SSL_CTX_use_certificate_chain_file( - self._context, certfile - ) - if not result: - _raise_current_error() - - @_require_not_used - def use_certificate_file( - self, certfile: _StrOrBytesPath, filetype: int = FILETYPE_PEM - ) -> None: - """ - Load a certificate from a file - - :param certfile: The name of the certificate file (``bytes`` or - ``str``). - :param filetype: (optional) The encoding of the file, which is either - :const:`FILETYPE_PEM` or :const:`FILETYPE_ASN1`. The default is - :const:`FILETYPE_PEM`. - - :return: None - """ - certfile = _path_bytes(certfile) - if not isinstance(filetype, int): - raise TypeError("filetype must be an integer") - - use_result = _lib.SSL_CTX_use_certificate_file( - self._context, certfile, filetype - ) - if not use_result: - _raise_current_error() - - @_require_not_used - def use_certificate(self, cert: X509 | x509.Certificate) -> None: - """ - Load a certificate from a X509 object - - :param cert: The X509 object - :return: None - """ - # Mirrored at Connection.use_certificate - if not isinstance(cert, X509): - cert = X509.from_cryptography(cert) - else: - warnings.warn( - ( - "Passing pyOpenSSL X509 objects is deprecated. You " - "should use a cryptography.x509.Certificate instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - use_result = _lib.SSL_CTX_use_certificate(self._context, cert._x509) - if not use_result: - _raise_current_error() - - @_require_not_used - def add_extra_chain_cert(self, certobj: X509 | x509.Certificate) -> None: - """ - Add certificate to chain - - :param certobj: The X509 certificate object to add to the chain - :return: None - """ - if not isinstance(certobj, X509): - certobj = X509.from_cryptography(certobj) - else: - warnings.warn( - ( - "Passing pyOpenSSL X509 objects is deprecated. You " - "should use a cryptography.x509.Certificate instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - copy = _lib.X509_dup(certobj._x509) - add_result = _lib.SSL_CTX_add_extra_chain_cert(self._context, copy) - if not add_result: - # TODO: This is untested. - _lib.X509_free(copy) - _raise_current_error() - - def _raise_passphrase_exception(self) -> None: - if self._passphrase_helper is not None: - self._passphrase_helper.raise_if_problem(Error) - - _raise_current_error() - - @_require_not_used - def use_privatekey_file( - self, keyfile: _StrOrBytesPath, filetype: int = FILETYPE_PEM - ) -> None: - """ - Load a private key from a file - - :param keyfile: The name of the key file (``bytes`` or ``str``) - :param filetype: (optional) The encoding of the file, which is either - :const:`FILETYPE_PEM` or :const:`FILETYPE_ASN1`. The default is - :const:`FILETYPE_PEM`. - - :return: None - """ - keyfile = _path_bytes(keyfile) - - if not isinstance(filetype, int): - raise TypeError("filetype must be an integer") - - use_result = _lib.SSL_CTX_use_PrivateKey_file( - self._context, keyfile, filetype - ) - if not use_result: - self._raise_passphrase_exception() - - @_require_not_used - def use_privatekey(self, pkey: _PrivateKey | PKey) -> None: - """ - Load a private key from a PKey object - - :param pkey: The PKey object - :return: None - """ - # Mirrored at Connection.use_privatekey - if not isinstance(pkey, PKey): - pkey = PKey.from_cryptography_key(pkey) - else: - warnings.warn( - ( - "Passing pyOpenSSL PKey objects is deprecated. You " - "should use a cryptography private key instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - use_result = _lib.SSL_CTX_use_PrivateKey(self._context, pkey._pkey) - if not use_result: - self._raise_passphrase_exception() - - def check_privatekey(self) -> None: - """ - Check if the private key (loaded with :meth:`use_privatekey`) matches - the certificate (loaded with :meth:`use_certificate`) - - :return: :data:`None` (raises :exc:`Error` if something's wrong) - """ - if not _lib.SSL_CTX_check_private_key(self._context): - _raise_current_error() - - @_require_not_used - def load_client_ca(self, cafile: bytes) -> None: - """ - Load the trusted certificates that will be sent to the client. Does - not actually imply any of the certificates are trusted; that must be - configured separately. - - :param bytes cafile: The path to a certificates file in PEM format. - :return: None - """ - ca_list = _lib.SSL_load_client_CA_file( - _text_to_bytes_and_warn("cafile", cafile) - ) - _openssl_assert(ca_list != _ffi.NULL) - _lib.SSL_CTX_set_client_CA_list(self._context, ca_list) - - @_require_not_used - def set_session_id(self, buf: bytes) -> None: - """ - Set the session id to *buf* within which a session can be reused for - this Context object. This is needed when doing session resumption, - because there is no way for a stored session to know which Context - object it is associated with. - - :param bytes buf: The session id. - - :returns: None - """ - buf = _text_to_bytes_and_warn("buf", buf) - _openssl_assert( - _lib.SSL_CTX_set_session_id_context(self._context, buf, len(buf)) - == 1 - ) - - @_require_not_used - def set_session_cache_mode(self, mode: int) -> int: - """ - Set the behavior of the session cache used by all connections using - this Context. The previously set mode is returned. See - :const:`SESS_CACHE_*` for details about particular modes. - - :param mode: One or more of the SESS_CACHE_* flags (combine using - bitwise or) - :returns: The previously set caching mode. - - .. versionadded:: 0.14 - """ - if not isinstance(mode, int): - raise TypeError("mode must be an integer") - - return _lib.SSL_CTX_set_session_cache_mode(self._context, mode) - - def get_session_cache_mode(self) -> int: - """ - Get the current session cache mode. - - :returns: The currently used cache mode. - - .. versionadded:: 0.14 - """ - return _lib.SSL_CTX_get_session_cache_mode(self._context) - - @_require_not_used - def set_verify( - self, mode: int, callback: _VerifyCallback | None = None - ) -> None: - """ - Set the verification flags for this Context object to *mode* and - specify that *callback* should be used for verification callbacks. - - :param mode: The verify mode, this should be one of - :const:`VERIFY_NONE` and :const:`VERIFY_PEER`. If - :const:`VERIFY_PEER` is used, *mode* can be OR:ed with - :const:`VERIFY_FAIL_IF_NO_PEER_CERT` and - :const:`VERIFY_CLIENT_ONCE` to further control the behaviour. - :param callback: The optional Python verification callback to use. - This should take five arguments: A Connection object, an X509 - object, and three integer variables, which are in turn potential - error number, error depth and return code. *callback* should - return True if verification passes and False otherwise. - If omitted, OpenSSL's default verification is used. - :return: None - - See SSL_CTX_set_verify(3SSL) for further details. - """ - if not isinstance(mode, int): - raise TypeError("mode must be an integer") - - if callback is None: - self._verify_helper = None - self._verify_callback = None - _lib.SSL_CTX_set_verify(self._context, mode, _ffi.NULL) - else: - if not callable(callback): - raise TypeError("callback must be callable") - - self._verify_helper = _VerifyHelper(callback) - self._verify_callback = self._verify_helper.callback - _lib.SSL_CTX_set_verify(self._context, mode, self._verify_callback) - - @_require_not_used - def set_verify_depth(self, depth: int) -> None: - """ - Set the maximum depth for the certificate chain verification that shall - be allowed for this Context object. - - :param depth: An integer specifying the verify depth - :return: None - """ - if not isinstance(depth, int): - raise TypeError("depth must be an integer") - - _lib.SSL_CTX_set_verify_depth(self._context, depth) - - def get_verify_mode(self) -> int: - """ - Retrieve the Context object's verify mode, as set by - :meth:`set_verify`. - - :return: The verify mode - """ - return _lib.SSL_CTX_get_verify_mode(self._context) - - def get_verify_depth(self) -> int: - """ - Retrieve the Context object's verify depth, as set by - :meth:`set_verify_depth`. - - :return: The verify depth - """ - return _lib.SSL_CTX_get_verify_depth(self._context) - - @_require_not_used - def load_tmp_dh(self, dhfile: _StrOrBytesPath) -> None: - """ - Load parameters for Ephemeral Diffie-Hellman - - :param dhfile: The file to load EDH parameters from (``bytes`` or - ``str``). - - :return: None - """ - dhfile = _path_bytes(dhfile) - - bio = _lib.BIO_new_file(dhfile, b"r") - if bio == _ffi.NULL: - _raise_current_error() - bio = _ffi.gc(bio, _lib.BIO_free) - - dh = _lib.PEM_read_bio_DHparams(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) - dh = _ffi.gc(dh, _lib.DH_free) - res = _lib.SSL_CTX_set_tmp_dh(self._context, dh) - _openssl_assert(res == 1) - - @_require_not_used - def set_tmp_ecdh(self, curve: _EllipticCurve | ec.EllipticCurve) -> None: - """ - Select a curve to use for ECDHE key exchange. - - :param curve: A curve instance from cryptography - (:class:`~cryptogragraphy.hazmat.primitives.asymmetric.ec.EllipticCurve`). - Alternatively (deprecated) a curve object from either - :meth:`OpenSSL.crypto.get_elliptic_curve` or - :meth:`OpenSSL.crypto.get_elliptic_curves`. - - :return: None - """ - - if isinstance(curve, _EllipticCurve): - warnings.warn( - ( - "Passing pyOpenSSL elliptic curves to set_tmp_ecdh is " - "deprecated. You should use cryptography's elliptic curve " - "types instead." - ), - DeprecationWarning, - stacklevel=2, - ) - _lib.SSL_CTX_set_tmp_ecdh(self._context, curve._to_EC_KEY()) - else: - name = curve.name - if name == "secp192r1": - name = "prime192v1" - elif name == "secp256r1": - name = "prime256v1" - nid = _lib.OBJ_txt2nid(name.encode()) - if nid == _lib.NID_undef: - _raise_current_error() - - ec = _lib.EC_KEY_new_by_curve_name(nid) - _openssl_assert(ec != _ffi.NULL) - ec = _ffi.gc(ec, _lib.EC_KEY_free) - _lib.SSL_CTX_set_tmp_ecdh(self._context, ec) - - @_require_not_used - def set_cipher_list(self, cipher_list: bytes) -> None: - """ - Set the list of ciphers to be used in this context. - - See the OpenSSL manual for more information (e.g. - :manpage:`ciphers(1)`). - - Note this API does not change the cipher suites used in TLS 1.3 - Use `set_tls13_ciphersuites` for that. - - :param bytes cipher_list: An OpenSSL cipher string. - :return: None - """ - cipher_list = _text_to_bytes_and_warn("cipher_list", cipher_list) - - if not isinstance(cipher_list, bytes): - raise TypeError("cipher_list must be a byte string.") - - _openssl_assert( - _lib.SSL_CTX_set_cipher_list(self._context, cipher_list) == 1 - ) - - @_require_not_used - def set_tls13_ciphersuites(self, ciphersuites: bytes) -> None: - """ - Set the list of TLS 1.3 ciphers to be used in this context. - OpenSSL maintains a separate list of TLS 1.3+ ciphers to - ciphers for TLS 1.2 and lowers. - - See the OpenSSL manual for more information (e.g. - :manpage:`ciphers(1)`). - - :param bytes ciphersuites: An OpenSSL cipher string containing - TLS 1.3+ ciphersuites. - :return: None - - .. versionadded:: 25.2.0 - """ - if not isinstance(ciphersuites, bytes): - raise TypeError("ciphersuites must be a byte string.") - - _openssl_assert( - _lib.SSL_CTX_set_ciphersuites(self._context, ciphersuites) == 1 - ) - - @_require_not_used - def set_client_ca_list( - self, certificate_authorities: Sequence[X509Name] - ) -> None: - """ - Set the list of preferred client certificate signers for this server - context. - - This list of certificate authorities will be sent to the client when - the server requests a client certificate. - - :param certificate_authorities: a sequence of X509Names. - :return: None - - .. versionadded:: 0.10 - """ - name_stack = _lib.sk_X509_NAME_new_null() - _openssl_assert(name_stack != _ffi.NULL) - - try: - for ca_name in certificate_authorities: - if not isinstance(ca_name, X509Name): - raise TypeError( - f"client CAs must be X509Name objects, not " - f"{type(ca_name).__name__} objects" - ) - copy = _lib.X509_NAME_dup(ca_name._name) - _openssl_assert(copy != _ffi.NULL) - push_result = _lib.sk_X509_NAME_push(name_stack, copy) - if not push_result: - _lib.X509_NAME_free(copy) - _raise_current_error() - except Exception: - _lib.sk_X509_NAME_free(name_stack) - raise - - _lib.SSL_CTX_set_client_CA_list(self._context, name_stack) - - @_require_not_used - def add_client_ca( - self, certificate_authority: X509 | x509.Certificate - ) -> None: - """ - Add the CA certificate to the list of preferred signers for this - context. - - The list of certificate authorities will be sent to the client when the - server requests a client certificate. - - :param certificate_authority: certificate authority's X509 certificate. - :return: None - - .. versionadded:: 0.10 - """ - if not isinstance(certificate_authority, X509): - certificate_authority = X509.from_cryptography( - certificate_authority - ) - else: - warnings.warn( - ( - "Passing pyOpenSSL X509 objects is deprecated. You " - "should use a cryptography.x509.Certificate instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - add_result = _lib.SSL_CTX_add_client_CA( - self._context, certificate_authority._x509 - ) - _openssl_assert(add_result == 1) - - @_require_not_used - def set_timeout(self, timeout: int) -> None: - """ - Set the timeout for newly created sessions for this Context object to - *timeout*. The default value is 300 seconds. See the OpenSSL manual - for more information (e.g. :manpage:`SSL_CTX_set_timeout(3)`). - - :param timeout: The timeout in (whole) seconds - :return: The previous session timeout - """ - if not isinstance(timeout, int): - raise TypeError("timeout must be an integer") - - return _lib.SSL_CTX_set_timeout(self._context, timeout) - - def get_timeout(self) -> int: - """ - Retrieve session timeout, as set by :meth:`set_timeout`. The default - is 300 seconds. - - :return: The session timeout - """ - return _lib.SSL_CTX_get_timeout(self._context) - - @_require_not_used - def set_info_callback( - self, callback: Callable[[Connection, int, int], None] - ) -> None: - """ - Set the information callback to *callback*. This function will be - called from time to time during SSL handshakes. - - :param callback: The Python callback to use. This should take three - arguments: a Connection object and two integers. The first integer - specifies where in the SSL handshake the function was called, and - the other the return code from a (possibly failed) internal - function call. - :return: None - """ - - @wraps(callback) - def wrapper(ssl, where, return_code): # type: ignore[no-untyped-def] - callback(Connection._reverse_mapping[ssl], where, return_code) - - self._info_callback = _ffi.callback( - "void (*)(const SSL *, int, int)", wrapper - ) - _lib.SSL_CTX_set_info_callback(self._context, self._info_callback) - - @_requires_keylog - @_require_not_used - def set_keylog_callback( - self, callback: Callable[[Connection, bytes], None] - ) -> None: - """ - Set the TLS key logging callback to *callback*. This function will be - called whenever TLS key material is generated or received, in order - to allow applications to store this keying material for debugging - purposes. - - :param callback: The Python callback to use. This should take two - arguments: a Connection object and a bytestring that contains - the key material in the format used by NSS for its SSLKEYLOGFILE - debugging output. - :return: None - """ - - @wraps(callback) - def wrapper(ssl, line): # type: ignore[no-untyped-def] - line = _ffi.string(line) - callback(Connection._reverse_mapping[ssl], line) - - self._keylog_callback = _ffi.callback( - "void (*)(const SSL *, const char *)", wrapper - ) - _lib.SSL_CTX_set_keylog_callback(self._context, self._keylog_callback) - - def get_app_data(self) -> Any: - """ - Get the application data (supplied via :meth:`set_app_data()`) - - :return: The application data - """ - return self._app_data - - @_require_not_used - def set_app_data(self, data: Any) -> None: - """ - Set the application data (will be returned from get_app_data()) - - :param data: Any Python object - :return: None - """ - self._app_data = data - - def get_cert_store(self) -> X509Store | None: - """ - Get the certificate store for the context. This can be used to add - "trusted" certificates without using the - :meth:`load_verify_locations` method. - - :return: A X509Store object or None if it does not have one. - """ - store = _lib.SSL_CTX_get_cert_store(self._context) - if store == _ffi.NULL: - # TODO: This is untested. - return None - - pystore = X509Store.__new__(X509Store) - pystore._store = store - return pystore - - @_require_not_used - def set_options(self, options: int) -> int: - """ - Add options. Options set before are not cleared! - This method should be used with the :const:`OP_*` constants. - - :param options: The options to add. - :return: The new option bitmask. - """ - if not isinstance(options, int): - raise TypeError("options must be an integer") - - return _lib.SSL_CTX_set_options(self._context, options) - - @_require_not_used - def set_mode(self, mode: int) -> int: - """ - Add modes via bitmask. Modes set before are not cleared! This method - should be used with the :const:`MODE_*` constants. - - :param mode: The mode to add. - :return: The new mode bitmask. - """ - if not isinstance(mode, int): - raise TypeError("mode must be an integer") - - return _lib.SSL_CTX_set_mode(self._context, mode) - - @_require_not_used - def clear_mode(self, mode_to_clear: int) -> int: - """ - Modes previously set cannot be overwritten without being - cleared first. This method should be used to clear existing modes. - """ - return _lib.SSL_CTX_clear_mode(self._context, mode_to_clear) - - @_require_not_used - def set_tlsext_servername_callback( - self, callback: Callable[[Connection], None] - ) -> None: - """ - Specify a callback function to be called when clients specify a server - name. - - :param callback: The callback function. It will be invoked with one - argument, the Connection instance. - - .. versionadded:: 0.13 - """ - - @wraps(callback) - def wrapper(ssl, alert, arg): # type: ignore[no-untyped-def] - callback(Connection._reverse_mapping[ssl]) - return 0 - - self._tlsext_servername_callback = _ffi.callback( - "int (*)(SSL *, int *, void *)", wrapper - ) - _lib.SSL_CTX_set_tlsext_servername_callback( - self._context, self._tlsext_servername_callback - ) - - @_require_not_used - def set_tlsext_use_srtp(self, profiles: bytes) -> None: - """ - Enable support for negotiating SRTP keying material. - - :param bytes profiles: A colon delimited list of protection profile - names, like ``b'SRTP_AES128_CM_SHA1_80:SRTP_AES128_CM_SHA1_32'``. - :return: None - """ - if not isinstance(profiles, bytes): - raise TypeError("profiles must be a byte string.") - - _openssl_assert( - _lib.SSL_CTX_set_tlsext_use_srtp(self._context, profiles) == 0 - ) - - @_require_not_used - def set_alpn_protos(self, protos: list[bytes]) -> None: - """ - Specify the protocols that the client is prepared to speak after the - TLS connection has been negotiated using Application Layer Protocol - Negotiation. - - :param protos: A list of the protocols to be offered to the server. - This list should be a Python list of bytestrings representing the - protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. - """ - # Different versions of OpenSSL are inconsistent about how they handle - # empty proto lists (see #1043), so we avoid the problem entirely by - # rejecting them ourselves. - if not protos: - raise ValueError("at least one protocol must be specified") - - # Take the list of protocols and join them together, prefixing them - # with their lengths. - protostr = b"".join( - chain.from_iterable((bytes((len(p),)), p) for p in protos) - ) - - # Build a C string from the list. We don't need to save this off - # because OpenSSL immediately copies the data out. - input_str = _ffi.new("unsigned char[]", protostr) - - # https://www.openssl.org/docs/man1.1.0/man3/SSL_CTX_set_alpn_protos.html: - # SSL_CTX_set_alpn_protos() and SSL_set_alpn_protos() - # return 0 on success, and non-0 on failure. - # WARNING: these functions reverse the return value convention. - _openssl_assert( - _lib.SSL_CTX_set_alpn_protos( - self._context, input_str, len(protostr) - ) - == 0 - ) - - @_require_not_used - def set_alpn_select_callback(self, callback: _ALPNSelectCallback) -> None: - """ - Specify a callback function that will be called on the server when a - client offers protocols using ALPN. - - :param callback: The callback function. It will be invoked with two - arguments: the Connection, and a list of offered protocols as - bytestrings, e.g ``[b'http/1.1', b'spdy/2']``. It can return - one of those bytestrings to indicate the chosen protocol, the - empty bytestring to terminate the TLS connection, or the - :py:obj:`NO_OVERLAPPING_PROTOCOLS` to indicate that no offered - protocol was selected, but that the connection should not be - aborted. - """ - self._alpn_select_helper = _ALPNSelectHelper(callback) - self._alpn_select_callback = self._alpn_select_helper.callback - _lib.SSL_CTX_set_alpn_select_cb( - self._context, self._alpn_select_callback, _ffi.NULL - ) - - def _set_ocsp_callback( - self, - helper: _OCSPClientCallbackHelper | _OCSPServerCallbackHelper, - data: Any | None, - ) -> None: - """ - This internal helper does the common work for - ``set_ocsp_server_callback`` and ``set_ocsp_client_callback``, which is - almost all of it. - """ - self._ocsp_helper = helper - self._ocsp_callback = helper.callback - if data is None: - self._ocsp_data = _ffi.NULL - else: - self._ocsp_data = _ffi.new_handle(data) - - rc = _lib.SSL_CTX_set_tlsext_status_cb( - self._context, self._ocsp_callback - ) - _openssl_assert(rc == 1) - rc = _lib.SSL_CTX_set_tlsext_status_arg(self._context, self._ocsp_data) - _openssl_assert(rc == 1) - - @_require_not_used - def set_ocsp_server_callback( - self, - callback: _OCSPServerCallback[_T], - data: _T | None = None, - ) -> None: - """ - Set a callback to provide OCSP data to be stapled to the TLS handshake - on the server side. - - :param callback: The callback function. It will be invoked with two - arguments: the Connection, and the optional arbitrary data you have - provided. The callback must return a bytestring that contains the - OCSP data to staple to the handshake. If no OCSP data is available - for this connection, return the empty bytestring. - :param data: Some opaque data that will be passed into the callback - function when called. This can be used to avoid needing to do - complex data lookups or to keep track of what context is being - used. This parameter is optional. - """ - helper = _OCSPServerCallbackHelper(callback) - self._set_ocsp_callback(helper, data) - - @_require_not_used - def set_ocsp_client_callback( - self, - callback: _OCSPClientCallback[_T], - data: _T | None = None, - ) -> None: - """ - Set a callback to validate OCSP data stapled to the TLS handshake on - the client side. - - :param callback: The callback function. It will be invoked with three - arguments: the Connection, a bytestring containing the stapled OCSP - assertion, and the optional arbitrary data you have provided. The - callback must return a boolean that indicates the result of - validating the OCSP data: ``True`` if the OCSP data is valid and - the certificate can be trusted, or ``False`` if either the OCSP - data is invalid or the certificate has been revoked. - :param data: Some opaque data that will be passed into the callback - function when called. This can be used to avoid needing to do - complex data lookups or to keep track of what context is being - used. This parameter is optional. - """ - helper = _OCSPClientCallbackHelper(callback) - self._set_ocsp_callback(helper, data) - - @_require_not_used - def set_cookie_generate_callback( - self, callback: _CookieGenerateCallback - ) -> None: - self._cookie_generate_helper = _CookieGenerateCallbackHelper(callback) - _lib.SSL_CTX_set_cookie_generate_cb( - self._context, - self._cookie_generate_helper.callback, - ) - - @_require_not_used - def set_cookie_verify_callback( - self, callback: _CookieVerifyCallback - ) -> None: - self._cookie_verify_helper = _CookieVerifyCallbackHelper(callback) - _lib.SSL_CTX_set_cookie_verify_cb( - self._context, - self._cookie_verify_helper.callback, - ) - - -class Connection: - _reverse_mapping: typing.MutableMapping[Any, Connection] = ( - WeakValueDictionary() - ) - - def __init__( - self, context: Context, socket: socket.socket | None = None - ) -> None: - """ - Create a new Connection object, using the given OpenSSL.SSL.Context - instance and socket. - - :param context: An SSL Context to use for this connection - :param socket: The socket to use for transport layer - """ - if not isinstance(context, Context): - raise TypeError("context must be a Context instance") - - context._used = True - - ssl = _lib.SSL_new(context._context) - self._ssl = _ffi.gc(ssl, _lib.SSL_free) - # We set SSL_MODE_AUTO_RETRY to handle situations where OpenSSL returns - # an SSL_ERROR_WANT_READ when processing a non-application data packet - # even though there is still data on the underlying transport. - # See https://github.com/openssl/openssl/issues/6234 for more details. - _lib.SSL_set_mode(self._ssl, _lib.SSL_MODE_AUTO_RETRY) - self._context = context - self._app_data = None - - # References to strings used for Application Layer Protocol - # Negotiation. These strings get copied at some point but it's well - # after the callback returns, so we have to hang them somewhere to - # avoid them getting freed. - self._alpn_select_callback_args: Any = None - - # Reference the verify_callback of the Context. This ensures that if - # set_verify is called again after the SSL object has been created we - # do not point to a dangling reference - self._verify_helper = context._verify_helper - self._verify_callback = context._verify_callback - - # And likewise for the cookie callbacks - self._cookie_generate_helper = context._cookie_generate_helper - self._cookie_verify_helper = context._cookie_verify_helper - - self._reverse_mapping[self._ssl] = self - - if socket is None: - self._socket = None - # Don't set up any gc for these, SSL_free will take care of them. - self._into_ssl = _lib.BIO_new(_lib.BIO_s_mem()) - _openssl_assert(self._into_ssl != _ffi.NULL) - - self._from_ssl = _lib.BIO_new(_lib.BIO_s_mem()) - _openssl_assert(self._from_ssl != _ffi.NULL) - - _lib.SSL_set_bio(self._ssl, self._into_ssl, self._from_ssl) - else: - self._into_ssl = None - self._from_ssl = None - self._socket = socket - set_result = _lib.SSL_set_fd( - self._ssl, _asFileDescriptor(self._socket) - ) - _openssl_assert(set_result == 1) - - def __getattr__(self, name: str) -> Any: - """ - Look up attributes on the wrapped socket object if they are not found - on the Connection object. - """ - if self._socket is None: - raise AttributeError( - f"'{self.__class__.__name__}' object has no attribute '{name}'" - ) - else: - return getattr(self._socket, name) - - def _raise_ssl_error(self, ssl: Any, result: int) -> None: - if self._context._verify_helper is not None: - self._context._verify_helper.raise_if_problem() - if self._context._alpn_select_helper is not None: - self._context._alpn_select_helper.raise_if_problem() - if self._context._ocsp_helper is not None: - self._context._ocsp_helper.raise_if_problem() - - error = _lib.SSL_get_error(ssl, result) - if error == _lib.SSL_ERROR_WANT_READ: - raise WantReadError() - elif error == _lib.SSL_ERROR_WANT_WRITE: - raise WantWriteError() - elif error == _lib.SSL_ERROR_ZERO_RETURN: - raise ZeroReturnError() - elif error == _lib.SSL_ERROR_WANT_X509_LOOKUP: - # TODO: This is untested. - raise WantX509LookupError() - elif error == _lib.SSL_ERROR_SYSCALL: - if platform == "win32": - errno = _ffi.getwinerror()[0] - else: - errno = _ffi.errno - if _lib.ERR_peek_error() == 0 or errno != 0: - if result < 0 and errno != 0: - raise SysCallError(errno, errorcode.get(errno)) - raise SysCallError(-1, "Unexpected EOF") - else: - # TODO: This is untested, but I think twisted hits it? - _raise_current_error() - elif error == _lib.SSL_ERROR_SSL and _lib.ERR_peek_error() != 0: - # In 3.0.x an unexpected EOF no longer triggers syscall error - # but we want to maintain compatibility so we check here and - # raise syscall if it is an EOF. Since we're not actually sure - # what else could raise SSL_ERROR_SSL we check for the presence - # of the OpenSSL 3 constant SSL_R_UNEXPECTED_EOF_WHILE_READING - # and if it's not present we just raise an error, which matches - # the behavior before we added this elif section - peeked_error = _lib.ERR_peek_error() - reason = _lib.ERR_GET_REASON(peeked_error) - if _lib.Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING: - _openssl_assert( - reason == _lib.SSL_R_UNEXPECTED_EOF_WHILE_READING - ) - _lib.ERR_clear_error() - raise SysCallError(-1, "Unexpected EOF") - else: - _raise_current_error() - elif error == _lib.SSL_ERROR_NONE: - pass - else: - _raise_current_error() - - def get_context(self) -> Context: - """ - Retrieve the :class:`Context` object associated with this - :class:`Connection`. - """ - return self._context - - def set_context(self, context: Context) -> None: - """ - Switch this connection to a new session context. - - :param context: A :class:`Context` instance giving the new session - context to use. - """ - if not isinstance(context, Context): - raise TypeError("context must be a Context instance") - - _lib.SSL_set_SSL_CTX(self._ssl, context._context) - self._context = context - self._context._used = True - - def get_servername(self) -> bytes | None: - """ - Retrieve the servername extension value if provided in the client hello - message, or None if there wasn't one. - - :return: A byte string giving the server name or :data:`None`. - - .. versionadded:: 0.13 - """ - name = _lib.SSL_get_servername( - self._ssl, _lib.TLSEXT_NAMETYPE_host_name - ) - if name == _ffi.NULL: - return None - - return _ffi.string(name) - - def set_verify( - self, mode: int, callback: _VerifyCallback | None = None - ) -> None: - """ - Override the Context object's verification flags for this specific - connection. See :py:meth:`Context.set_verify` for details. - """ - if not isinstance(mode, int): - raise TypeError("mode must be an integer") - - if callback is None: - self._verify_helper = None - self._verify_callback = None - _lib.SSL_set_verify(self._ssl, mode, _ffi.NULL) - else: - if not callable(callback): - raise TypeError("callback must be callable") - - self._verify_helper = _VerifyHelper(callback) - self._verify_callback = self._verify_helper.callback - _lib.SSL_set_verify(self._ssl, mode, self._verify_callback) - - def get_verify_mode(self) -> int: - """ - Retrieve the Connection object's verify mode, as set by - :meth:`set_verify`. - - :return: The verify mode - """ - return _lib.SSL_get_verify_mode(self._ssl) - - def use_certificate(self, cert: X509 | x509.Certificate) -> None: - """ - Load a certificate from a X509 object - - :param cert: The X509 object - :return: None - """ - # Mirrored from Context.use_certificate - if not isinstance(cert, X509): - cert = X509.from_cryptography(cert) - else: - warnings.warn( - ( - "Passing pyOpenSSL X509 objects is deprecated. You " - "should use a cryptography.x509.Certificate instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - use_result = _lib.SSL_use_certificate(self._ssl, cert._x509) - if not use_result: - _raise_current_error() - - def use_privatekey(self, pkey: _PrivateKey | PKey) -> None: - """ - Load a private key from a PKey object - - :param pkey: The PKey object - :return: None - """ - # Mirrored from Context.use_privatekey - if not isinstance(pkey, PKey): - pkey = PKey.from_cryptography_key(pkey) - else: - warnings.warn( - ( - "Passing pyOpenSSL PKey objects is deprecated. You " - "should use a cryptography private key instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - use_result = _lib.SSL_use_PrivateKey(self._ssl, pkey._pkey) - if not use_result: - self._context._raise_passphrase_exception() - - def set_ciphertext_mtu(self, mtu: int) -> None: - """ - For DTLS, set the maximum UDP payload size (*not* including IP/UDP - overhead). - - Note that you might have to set :data:`OP_NO_QUERY_MTU` to prevent - OpenSSL from spontaneously clearing this. - - :param mtu: An integer giving the maximum transmission unit. - - .. versionadded:: 21.1 - """ - _lib.SSL_set_mtu(self._ssl, mtu) - - def get_cleartext_mtu(self) -> int: - """ - For DTLS, get the maximum size of unencrypted data you can pass to - :meth:`write` without exceeding the MTU (as passed to - :meth:`set_ciphertext_mtu`). - - :return: The effective MTU as an integer. - - .. versionadded:: 21.1 - """ - - if not hasattr(_lib, "DTLS_get_data_mtu"): - raise NotImplementedError("requires OpenSSL 1.1.1 or better") - return _lib.DTLS_get_data_mtu(self._ssl) - - def set_tlsext_host_name(self, name: bytes) -> None: - """ - Set the value of the servername extension to send in the client hello. - - :param name: A byte string giving the name. - - .. versionadded:: 0.13 - """ - if not isinstance(name, bytes): - raise TypeError("name must be a byte string") - elif b"\0" in name: - raise TypeError("name must not contain NUL byte") - - # XXX I guess this can fail sometimes? - _lib.SSL_set_tlsext_host_name(self._ssl, name) - - def pending(self) -> int: - """ - Get the number of bytes that can be safely read from the SSL buffer - (**not** the underlying transport buffer). - - :return: The number of bytes available in the receive buffer. - """ - return _lib.SSL_pending(self._ssl) - - def send(self, buf: _Buffer, flags: int = 0) -> int: - """ - Send data on the connection. NOTE: If you get one of the WantRead, - WantWrite or WantX509Lookup exceptions on this, you have to call the - method again with the SAME buffer. - - :param buf: The string, buffer or memoryview to send - :param flags: (optional) Included for compatibility with the socket - API, the value is ignored - :return: The number of bytes written - """ - # Backward compatibility - buf = _text_to_bytes_and_warn("buf", buf) - - with _ffi.from_buffer(buf) as data: - # check len(buf) instead of len(data) for testability - if len(buf) > 2147483647: - raise ValueError( - "Cannot send more than 2**31-1 bytes at once." - ) - - result = _lib.SSL_write(self._ssl, data, len(data)) - self._raise_ssl_error(self._ssl, result) - - return result - - write = send - - def sendall(self, buf: _Buffer, flags: int = 0) -> int: - """ - Send "all" data on the connection. This calls send() repeatedly until - all data is sent. If an error occurs, it's impossible to tell how much - data has been sent. - - :param buf: The string, buffer or memoryview to send - :param flags: (optional) Included for compatibility with the socket - API, the value is ignored - :return: The number of bytes written - """ - buf = _text_to_bytes_and_warn("buf", buf) - - with _ffi.from_buffer(buf) as data: - left_to_send = len(buf) - total_sent = 0 - - while left_to_send: - # SSL_write's num arg is an int, - # so we cannot send more than 2**31-1 bytes at once. - result = _lib.SSL_write( - self._ssl, data + total_sent, min(left_to_send, 2147483647) - ) - self._raise_ssl_error(self._ssl, result) - total_sent += result - left_to_send -= result - - return total_sent - - def recv(self, bufsiz: int, flags: int | None = None) -> bytes: - """ - Receive data on the connection. - - :param bufsiz: The maximum number of bytes to read - :param flags: (optional) The only supported flag is ``MSG_PEEK``, - all other flags are ignored. - :return: The string read from the Connection - """ - buf = _no_zero_allocator("char[]", bufsiz) - if flags is not None and flags & socket.MSG_PEEK: - result = _lib.SSL_peek(self._ssl, buf, bufsiz) - else: - result = _lib.SSL_read(self._ssl, buf, bufsiz) - self._raise_ssl_error(self._ssl, result) - return _ffi.buffer(buf, result)[:] - - read = recv - - def recv_into( - self, - buffer: Any, # collections.abc.Buffer once we use Python 3.12+ - nbytes: int | None = None, - flags: int | None = None, - ) -> int: - """ - Receive data on the connection and copy it directly into the provided - buffer, rather than creating a new string. - - :param buffer: The buffer to copy into. - :param nbytes: (optional) The maximum number of bytes to read into the - buffer. If not present, defaults to the size of the buffer. If - larger than the size of the buffer, is reduced to the size of the - buffer. - :param flags: (optional) The only supported flag is ``MSG_PEEK``, - all other flags are ignored. - :return: The number of bytes read into the buffer. - """ - if nbytes is None: - nbytes = len(buffer) - else: - nbytes = min(nbytes, len(buffer)) - - # We need to create a temporary buffer. This is annoying, it would be - # better if we could pass memoryviews straight into the SSL_read call, - # but right now we can't. Revisit this if CFFI gets that ability. - buf = _no_zero_allocator("char[]", nbytes) - if flags is not None and flags & socket.MSG_PEEK: - result = _lib.SSL_peek(self._ssl, buf, nbytes) - else: - result = _lib.SSL_read(self._ssl, buf, nbytes) - self._raise_ssl_error(self._ssl, result) - - # This strange line is all to avoid a memory copy. The buffer protocol - # should allow us to assign a CFFI buffer to the LHS of this line, but - # on CPython 3.3+ that segfaults. As a workaround, we can temporarily - # wrap it in a memoryview. - buffer[:result] = memoryview(_ffi.buffer(buf, result)) - - return result - - def _handle_bio_errors(self, bio: Any, result: int) -> typing.NoReturn: - if _lib.BIO_should_retry(bio): - if _lib.BIO_should_read(bio): - raise WantReadError() - elif _lib.BIO_should_write(bio): - # TODO: This is untested. - raise WantWriteError() - elif _lib.BIO_should_io_special(bio): - # TODO: This is untested. I think io_special means the socket - # BIO has a not-yet connected socket. - raise ValueError("BIO_should_io_special") - else: - # TODO: This is untested. - raise ValueError("unknown bio failure") - else: - # TODO: This is untested. - _raise_current_error() - - def bio_read(self, bufsiz: int) -> bytes: - """ - If the Connection was created with a memory BIO, this method can be - used to read bytes from the write end of that memory BIO. Many - Connection methods will add bytes which must be read in this manner or - the buffer will eventually fill up and the Connection will be able to - take no further actions. - - :param bufsiz: The maximum number of bytes to read - :return: The string read. - """ - if self._from_ssl is None: - raise TypeError("Connection sock was not None") - - if not isinstance(bufsiz, int): - raise TypeError("bufsiz must be an integer") - - buf = _no_zero_allocator("char[]", bufsiz) - result = _lib.BIO_read(self._from_ssl, buf, bufsiz) - if result <= 0: - self._handle_bio_errors(self._from_ssl, result) - - return _ffi.buffer(buf, result)[:] - - def bio_write(self, buf: _Buffer) -> int: - """ - If the Connection was created with a memory BIO, this method can be - used to add bytes to the read end of that memory BIO. The Connection - can then read the bytes (for example, in response to a call to - :meth:`recv`). - - :param buf: The string to put into the memory BIO. - :return: The number of bytes written - """ - buf = _text_to_bytes_and_warn("buf", buf) - - if self._into_ssl is None: - raise TypeError("Connection sock was not None") - - with _ffi.from_buffer(buf) as data: - result = _lib.BIO_write(self._into_ssl, data, len(data)) - if result <= 0: - self._handle_bio_errors(self._into_ssl, result) - return result - - def renegotiate(self) -> bool: - """ - Renegotiate the session. - - :return: True if the renegotiation can be started, False otherwise - """ - if not self.renegotiate_pending(): - _openssl_assert(_lib.SSL_renegotiate(self._ssl) == 1) - return True - return False - - def do_handshake(self) -> None: - """ - Perform an SSL handshake (usually called after :meth:`renegotiate` or - one of :meth:`set_accept_state` or :meth:`set_connect_state`). This can - raise the same exceptions as :meth:`send` and :meth:`recv`. - - :return: None. - """ - result = _lib.SSL_do_handshake(self._ssl) - self._raise_ssl_error(self._ssl, result) - - def renegotiate_pending(self) -> bool: - """ - Check if there's a renegotiation in progress, it will return False once - a renegotiation is finished. - - :return: Whether there's a renegotiation in progress - """ - return _lib.SSL_renegotiate_pending(self._ssl) == 1 - - def total_renegotiations(self) -> int: - """ - Find out the total number of renegotiations. - - :return: The number of renegotiations. - """ - return _lib.SSL_total_renegotiations(self._ssl) - - def connect(self, addr: Any) -> None: - """ - Call the :meth:`connect` method of the underlying socket and set up SSL - on the socket, using the :class:`Context` object supplied to this - :class:`Connection` object at creation. - - :param addr: A remote address - :return: What the socket's connect method returns - """ - _lib.SSL_set_connect_state(self._ssl) - return self._socket.connect(addr) # type: ignore[return-value, union-attr] - - def connect_ex(self, addr: Any) -> int: - """ - Call the :meth:`connect_ex` method of the underlying socket and set up - SSL on the socket, using the Context object supplied to this Connection - object at creation. Note that if the :meth:`connect_ex` method of the - socket doesn't return 0, SSL won't be initialized. - - :param addr: A remove address - :return: What the socket's connect_ex method returns - """ - connect_ex = self._socket.connect_ex # type: ignore[union-attr] - self.set_connect_state() - return connect_ex(addr) - - def accept(self) -> tuple[Connection, Any]: - """ - Call the :meth:`accept` method of the underlying socket and set up SSL - on the returned socket, using the Context object supplied to this - :class:`Connection` object at creation. - - :return: A *(conn, addr)* pair where *conn* is the new - :class:`Connection` object created, and *address* is as returned by - the socket's :meth:`accept`. - """ - client, addr = self._socket.accept() # type: ignore[union-attr] - conn = Connection(self._context, client) - conn.set_accept_state() - return (conn, addr) - - def DTLSv1_listen(self) -> None: - """ - Call the OpenSSL function DTLSv1_listen on this connection. See the - OpenSSL manual for more details. - - :return: None - """ - # Possible future extension: return the BIO_ADDR in some form. - bio_addr = _lib.BIO_ADDR_new() - try: - result = _lib.DTLSv1_listen(self._ssl, bio_addr) - finally: - _lib.BIO_ADDR_free(bio_addr) - # DTLSv1_listen is weird. A zero return value means 'didn't find a - # ClientHello with valid cookie, but keep trying'. So basically - # WantReadError. But it doesn't work correctly with _raise_ssl_error. - # So we raise it manually instead. - if self._cookie_generate_helper is not None: - self._cookie_generate_helper.raise_if_problem() - if self._cookie_verify_helper is not None: - self._cookie_verify_helper.raise_if_problem() - if result == 0: - raise WantReadError() - if result < 0: - self._raise_ssl_error(self._ssl, result) - - def DTLSv1_get_timeout(self) -> int | None: - """ - Determine when the DTLS SSL object next needs to perform internal - processing due to the passage of time. - - When the returned number of seconds have passed, the - :meth:`DTLSv1_handle_timeout` method needs to be called. - - :return: The time left in seconds before the next timeout or `None` - if no timeout is currently active. - """ - ptv_sec = _ffi.new("time_t *") - ptv_usec = _ffi.new("long *") - if _lib.Cryptography_DTLSv1_get_timeout(self._ssl, ptv_sec, ptv_usec): - return ptv_sec[0] + (ptv_usec[0] / 1000000) - else: - return None - - def DTLSv1_handle_timeout(self) -> bool: - """ - Handles any timeout events which have become pending on a DTLS SSL - object. - - :return: `True` if there was a pending timeout, `False` otherwise. - """ - result = _lib.DTLSv1_handle_timeout(self._ssl) - if result < 0: - self._raise_ssl_error(self._ssl, result) - assert False, "unreachable" - else: - return bool(result) - - def bio_shutdown(self) -> None: - """ - If the Connection was created with a memory BIO, this method can be - used to indicate that *end of file* has been reached on the read end of - that memory BIO. - - :return: None - """ - if self._from_ssl is None: - raise TypeError("Connection sock was not None") - - _lib.BIO_set_mem_eof_return(self._into_ssl, 0) - - def shutdown(self) -> bool: - """ - Send the shutdown message to the Connection. - - :return: True if the shutdown completed successfully (i.e. both sides - have sent closure alerts), False otherwise (in which case you - call :meth:`recv` or :meth:`send` when the connection becomes - readable/writeable). - """ - result = _lib.SSL_shutdown(self._ssl) - if result < 0: - self._raise_ssl_error(self._ssl, result) - assert False, "unreachable" - elif result > 0: - return True - else: - return False - - def get_cipher_list(self) -> list[str]: - """ - Retrieve the list of ciphers used by the Connection object. - - :return: A list of native cipher strings. - """ - ciphers = [] - for i in count(): - result = _lib.SSL_get_cipher_list(self._ssl, i) - if result == _ffi.NULL: - break - ciphers.append(_ffi.string(result).decode("utf-8")) - return ciphers - - def get_client_ca_list(self) -> list[X509Name]: - """ - Get CAs whose certificates are suggested for client authentication. - - :return: If this is a server connection, the list of certificate - authorities that will be sent or has been sent to the client, as - controlled by this :class:`Connection`'s :class:`Context`. - - If this is a client connection, the list will be empty until the - connection with the server is established. - - .. versionadded:: 0.10 - """ - ca_names = _lib.SSL_get_client_CA_list(self._ssl) - if ca_names == _ffi.NULL: - # TODO: This is untested. - return [] - - result = [] - for i in range(_lib.sk_X509_NAME_num(ca_names)): - name = _lib.sk_X509_NAME_value(ca_names, i) - copy = _lib.X509_NAME_dup(name) - _openssl_assert(copy != _ffi.NULL) - - pyname = X509Name.__new__(X509Name) - pyname._name = _ffi.gc(copy, _lib.X509_NAME_free) - result.append(pyname) - return result - - def makefile(self, *args: Any, **kwargs: Any) -> typing.NoReturn: - """ - The makefile() method is not implemented, since there is no dup - semantics for SSL connections - - :raise: NotImplementedError - """ - raise NotImplementedError( - "Cannot make file object of OpenSSL.SSL.Connection" - ) - - def get_app_data(self) -> Any: - """ - Retrieve application data as set by :meth:`set_app_data`. - - :return: The application data - """ - return self._app_data - - def set_app_data(self, data: Any) -> None: - """ - Set application data - - :param data: The application data - :return: None - """ - self._app_data = data - - def get_shutdown(self) -> int: - """ - Get the shutdown state of the Connection. - - :return: The shutdown state, a bitvector of SENT_SHUTDOWN, - RECEIVED_SHUTDOWN. - """ - return _lib.SSL_get_shutdown(self._ssl) - - def set_shutdown(self, state: int) -> None: - """ - Set the shutdown state of the Connection. - - :param state: bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. - :return: None - """ - if not isinstance(state, int): - raise TypeError("state must be an integer") - - _lib.SSL_set_shutdown(self._ssl, state) - - def get_state_string(self) -> bytes: - """ - Retrieve a verbose string detailing the state of the Connection. - - :return: A string representing the state - """ - return _ffi.string(_lib.SSL_state_string_long(self._ssl)) - - def server_random(self) -> bytes | None: - """ - Retrieve the random value used with the server hello message. - - :return: A string representing the state - """ - session = _lib.SSL_get_session(self._ssl) - if session == _ffi.NULL: - return None - length = _lib.SSL_get_server_random(self._ssl, _ffi.NULL, 0) - _openssl_assert(length > 0) - outp = _no_zero_allocator("unsigned char[]", length) - _lib.SSL_get_server_random(self._ssl, outp, length) - return _ffi.buffer(outp, length)[:] - - def client_random(self) -> bytes | None: - """ - Retrieve the random value used with the client hello message. - - :return: A string representing the state - """ - session = _lib.SSL_get_session(self._ssl) - if session == _ffi.NULL: - return None - - length = _lib.SSL_get_client_random(self._ssl, _ffi.NULL, 0) - _openssl_assert(length > 0) - outp = _no_zero_allocator("unsigned char[]", length) - _lib.SSL_get_client_random(self._ssl, outp, length) - return _ffi.buffer(outp, length)[:] - - def master_key(self) -> bytes | None: - """ - Retrieve the value of the master key for this session. - - :return: A string representing the state - """ - session = _lib.SSL_get_session(self._ssl) - if session == _ffi.NULL: - return None - - length = _lib.SSL_SESSION_get_master_key(session, _ffi.NULL, 0) - _openssl_assert(length > 0) - outp = _no_zero_allocator("unsigned char[]", length) - _lib.SSL_SESSION_get_master_key(session, outp, length) - return _ffi.buffer(outp, length)[:] - - def export_keying_material( - self, label: bytes, olen: int, context: bytes | None = None - ) -> bytes: - """ - Obtain keying material for application use. - - :param: label - a disambiguating label string as described in RFC 5705 - :param: olen - the length of the exported key material in bytes - :param: context - a per-association context value - :return: the exported key material bytes or None - """ - outp = _no_zero_allocator("unsigned char[]", olen) - context_buf = _ffi.NULL - context_len = 0 - use_context = 0 - if context is not None: - context_buf = context - context_len = len(context) - use_context = 1 - success = _lib.SSL_export_keying_material( - self._ssl, - outp, - olen, - label, - len(label), - context_buf, - context_len, - use_context, - ) - _openssl_assert(success == 1) - return _ffi.buffer(outp, olen)[:] - - def sock_shutdown(self, *args: Any, **kwargs: Any) -> None: - """ - Call the :meth:`shutdown` method of the underlying socket. - See :manpage:`shutdown(2)`. - - :return: What the socket's shutdown() method returns - """ - return self._socket.shutdown(*args, **kwargs) # type: ignore[return-value, union-attr] - - @typing.overload - def get_certificate( - self, *, as_cryptography: typing.Literal[True] - ) -> x509.Certificate | None: - pass - - @typing.overload - def get_certificate( - self, *, as_cryptography: typing.Literal[False] = False - ) -> X509 | None: - pass - - def get_certificate( - self, - *, - as_cryptography: typing.Literal[True] | typing.Literal[False] = False, - ) -> X509 | x509.Certificate | None: - """ - Retrieve the local certificate (if any) - - :param bool as_cryptography: Controls whether a - ``cryptography.x509.Certificate`` or an ``OpenSSL.crypto.X509`` - object should be returned. - - :return: The local certificate - """ - cert = _lib.SSL_get_certificate(self._ssl) - if cert != _ffi.NULL: - _lib.X509_up_ref(cert) - pycert = X509._from_raw_x509_ptr(cert) - if as_cryptography: - return pycert.to_cryptography() - return pycert - return None - - @typing.overload - def get_peer_certificate( - self, *, as_cryptography: typing.Literal[True] - ) -> x509.Certificate | None: - pass - - @typing.overload - def get_peer_certificate( - self, *, as_cryptography: typing.Literal[False] = False - ) -> X509 | None: - pass - - def get_peer_certificate( - self, - *, - as_cryptography: typing.Literal[True] | typing.Literal[False] = False, - ) -> X509 | x509.Certificate | None: - """ - Retrieve the other side's certificate (if any) - - :param bool as_cryptography: Controls whether a - ``cryptography.x509.Certificate`` or an ``OpenSSL.crypto.X509`` - object should be returned. - - :return: The peer's certificate - """ - cert = _lib.SSL_get_peer_certificate(self._ssl) - if cert != _ffi.NULL: - pycert = X509._from_raw_x509_ptr(cert) - if as_cryptography: - return pycert.to_cryptography() - return pycert - return None - - @staticmethod - def _cert_stack_to_list(cert_stack: Any) -> list[X509]: - """ - Internal helper to convert a STACK_OF(X509) to a list of X509 - instances. - """ - result = [] - for i in range(_lib.sk_X509_num(cert_stack)): - cert = _lib.sk_X509_value(cert_stack, i) - _openssl_assert(cert != _ffi.NULL) - res = _lib.X509_up_ref(cert) - _openssl_assert(res >= 1) - pycert = X509._from_raw_x509_ptr(cert) - result.append(pycert) - return result - - @staticmethod - def _cert_stack_to_cryptography_list( - cert_stack: Any, - ) -> list[x509.Certificate]: - """ - Internal helper to convert a STACK_OF(X509) to a list of X509 - instances. - """ - result = [] - for i in range(_lib.sk_X509_num(cert_stack)): - cert = _lib.sk_X509_value(cert_stack, i) - _openssl_assert(cert != _ffi.NULL) - res = _lib.X509_up_ref(cert) - _openssl_assert(res >= 1) - pycert = X509._from_raw_x509_ptr(cert) - result.append(pycert.to_cryptography()) - return result - - @typing.overload - def get_peer_cert_chain( - self, *, as_cryptography: typing.Literal[True] - ) -> list[x509.Certificate] | None: - pass - - @typing.overload - def get_peer_cert_chain( - self, *, as_cryptography: typing.Literal[False] = False - ) -> list[X509] | None: - pass - - def get_peer_cert_chain( - self, - *, - as_cryptography: typing.Literal[True] | typing.Literal[False] = False, - ) -> list[X509] | list[x509.Certificate] | None: - """ - Retrieve the other side's certificate (if any) - - :param bool as_cryptography: Controls whether a list of - ``cryptography.x509.Certificate`` or ``OpenSSL.crypto.X509`` - object should be returned. - - :return: A list of X509 instances giving the peer's certificate chain, - or None if it does not have one. - """ - cert_stack = _lib.SSL_get_peer_cert_chain(self._ssl) - if cert_stack == _ffi.NULL: - return None - - if as_cryptography: - return self._cert_stack_to_cryptography_list(cert_stack) - return self._cert_stack_to_list(cert_stack) - - @typing.overload - def get_verified_chain( - self, *, as_cryptography: typing.Literal[True] - ) -> list[x509.Certificate] | None: - pass - - @typing.overload - def get_verified_chain( - self, *, as_cryptography: typing.Literal[False] = False - ) -> list[X509] | None: - pass - - def get_verified_chain( - self, - *, - as_cryptography: typing.Literal[True] | typing.Literal[False] = False, - ) -> list[X509] | list[x509.Certificate] | None: - """ - Retrieve the verified certificate chain of the peer including the - peer's end entity certificate. It must be called after a session has - been successfully established. If peer verification was not successful - the chain may be incomplete, invalid, or None. - - :param bool as_cryptography: Controls whether a list of - ``cryptography.x509.Certificate`` or ``OpenSSL.crypto.X509`` - object should be returned. - - :return: A list of X509 instances giving the peer's verified - certificate chain, or None if it does not have one. - - .. versionadded:: 20.0 - """ - # OpenSSL 1.1+ - cert_stack = _lib.SSL_get0_verified_chain(self._ssl) - if cert_stack == _ffi.NULL: - return None - - if as_cryptography: - return self._cert_stack_to_cryptography_list(cert_stack) - return self._cert_stack_to_list(cert_stack) - - def want_read(self) -> bool: - """ - Checks if more data has to be read from the transport layer to complete - an operation. - - :return: True iff more data has to be read - """ - return _lib.SSL_want_read(self._ssl) - - def want_write(self) -> bool: - """ - Checks if there is data to write to the transport layer to complete an - operation. - - :return: True iff there is data to write - """ - return _lib.SSL_want_write(self._ssl) - - def set_accept_state(self) -> None: - """ - Set the connection to work in server mode. The handshake will be - handled automatically by read/write. - - :return: None - """ - _lib.SSL_set_accept_state(self._ssl) - - def set_connect_state(self) -> None: - """ - Set the connection to work in client mode. The handshake will be - handled automatically by read/write. - - :return: None - """ - _lib.SSL_set_connect_state(self._ssl) - - def get_session(self) -> Session | None: - """ - Returns the Session currently used. - - :return: An instance of :class:`OpenSSL.SSL.Session` or - :obj:`None` if no session exists. - - .. versionadded:: 0.14 - """ - session = _lib.SSL_get1_session(self._ssl) - if session == _ffi.NULL: - return None - - pysession = Session.__new__(Session) - pysession._session = _ffi.gc(session, _lib.SSL_SESSION_free) - return pysession - - def set_session(self, session: Session) -> None: - """ - Set the session to be used when the TLS/SSL connection is established. - - :param session: A Session instance representing the session to use. - :returns: None - - .. versionadded:: 0.14 - """ - if not isinstance(session, Session): - raise TypeError("session must be a Session instance") - - result = _lib.SSL_set_session(self._ssl, session._session) - _openssl_assert(result == 1) - - def _get_finished_message( - self, function: Callable[[Any, Any, int], int] - ) -> bytes | None: - """ - Helper to implement :meth:`get_finished` and - :meth:`get_peer_finished`. - - :param function: Either :data:`SSL_get_finished`: or - :data:`SSL_get_peer_finished`. - - :return: :data:`None` if the desired message has not yet been - received, otherwise the contents of the message. - """ - # The OpenSSL documentation says nothing about what might happen if the - # count argument given is zero. Specifically, it doesn't say whether - # the output buffer may be NULL in that case or not. Inspection of the - # implementation reveals that it calls memcpy() unconditionally. - # Section 7.1.4, paragraph 1 of the C standard suggests that - # memcpy(NULL, source, 0) is not guaranteed to produce defined (let - # alone desirable) behavior (though it probably does on just about - # every implementation...) - # - # Allocate a tiny buffer to pass in (instead of just passing NULL as - # one might expect) for the initial call so as to be safe against this - # potentially undefined behavior. - empty = _ffi.new("char[]", 0) - size = function(self._ssl, empty, 0) - if size == 0: - # No Finished message so far. - return None - - buf = _no_zero_allocator("char[]", size) - function(self._ssl, buf, size) - return _ffi.buffer(buf, size)[:] - - def get_finished(self) -> bytes | None: - """ - Obtain the latest TLS Finished message that we sent. - - :return: The contents of the message or :obj:`None` if the TLS - handshake has not yet completed. - - .. versionadded:: 0.15 - """ - return self._get_finished_message(_lib.SSL_get_finished) - - def get_peer_finished(self) -> bytes | None: - """ - Obtain the latest TLS Finished message that we received from the peer. - - :return: The contents of the message or :obj:`None` if the TLS - handshake has not yet completed. - - .. versionadded:: 0.15 - """ - return self._get_finished_message(_lib.SSL_get_peer_finished) - - def get_cipher_name(self) -> str | None: - """ - Obtain the name of the currently used cipher. - - :returns: The name of the currently used cipher or :obj:`None` - if no connection has been established. - - .. versionadded:: 0.15 - """ - cipher = _lib.SSL_get_current_cipher(self._ssl) - if cipher == _ffi.NULL: - return None - else: - name = _ffi.string(_lib.SSL_CIPHER_get_name(cipher)) - return name.decode("utf-8") - - def get_cipher_bits(self) -> int | None: - """ - Obtain the number of secret bits of the currently used cipher. - - :returns: The number of secret bits of the currently used cipher - or :obj:`None` if no connection has been established. - - .. versionadded:: 0.15 - """ - cipher = _lib.SSL_get_current_cipher(self._ssl) - if cipher == _ffi.NULL: - return None - else: - return _lib.SSL_CIPHER_get_bits(cipher, _ffi.NULL) - - def get_cipher_version(self) -> str | None: - """ - Obtain the protocol version of the currently used cipher. - - :returns: The protocol name of the currently used cipher - or :obj:`None` if no connection has been established. - - .. versionadded:: 0.15 - """ - cipher = _lib.SSL_get_current_cipher(self._ssl) - if cipher == _ffi.NULL: - return None - else: - version = _ffi.string(_lib.SSL_CIPHER_get_version(cipher)) - return version.decode("utf-8") - - def get_protocol_version_name(self) -> str: - """ - Retrieve the protocol version of the current connection. - - :returns: The TLS version of the current connection, for example - the value for TLS 1.2 would be ``TLSv1.2``or ``Unknown`` - for connections that were not successfully established. - """ - version = _ffi.string(_lib.SSL_get_version(self._ssl)) - return version.decode("utf-8") - - def get_protocol_version(self) -> int: - """ - Retrieve the SSL or TLS protocol version of the current connection. - - :returns: The TLS version of the current connection. For example, - it will return ``0x769`` for connections made over TLS version 1. - """ - version = _lib.SSL_version(self._ssl) - return version - - def set_alpn_protos(self, protos: list[bytes]) -> None: - """ - Specify the client's ALPN protocol list. - - These protocols are offered to the server during protocol negotiation. - - :param protos: A list of the protocols to be offered to the server. - This list should be a Python list of bytestrings representing the - protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. - """ - # Different versions of OpenSSL are inconsistent about how they handle - # empty proto lists (see #1043), so we avoid the problem entirely by - # rejecting them ourselves. - if not protos: - raise ValueError("at least one protocol must be specified") - - # Take the list of protocols and join them together, prefixing them - # with their lengths. - protostr = b"".join( - chain.from_iterable((bytes((len(p),)), p) for p in protos) - ) - - # Build a C string from the list. We don't need to save this off - # because OpenSSL immediately copies the data out. - input_str = _ffi.new("unsigned char[]", protostr) - - # https://www.openssl.org/docs/man1.1.0/man3/SSL_CTX_set_alpn_protos.html: - # SSL_CTX_set_alpn_protos() and SSL_set_alpn_protos() - # return 0 on success, and non-0 on failure. - # WARNING: these functions reverse the return value convention. - _openssl_assert( - _lib.SSL_set_alpn_protos(self._ssl, input_str, len(protostr)) == 0 - ) - - def get_alpn_proto_negotiated(self) -> bytes: - """ - Get the protocol that was negotiated by ALPN. - - :returns: A bytestring of the protocol name. If no protocol has been - negotiated yet, returns an empty bytestring. - """ - data = _ffi.new("unsigned char **") - data_len = _ffi.new("unsigned int *") - - _lib.SSL_get0_alpn_selected(self._ssl, data, data_len) - - if not data_len: - return b"" - - return _ffi.buffer(data[0], data_len[0])[:] - - def get_selected_srtp_profile(self) -> bytes: - """ - Get the SRTP protocol which was negotiated. - - :returns: A bytestring of the SRTP profile name. If no profile has been - negotiated yet, returns an empty bytestring. - """ - profile = _lib.SSL_get_selected_srtp_profile(self._ssl) - if not profile: - return b"" - - return _ffi.string(profile.name) - - def request_ocsp(self) -> None: - """ - Called to request that the server sends stapled OCSP data, if - available. If this is not called on the client side then the server - will not send OCSP data. Should be used in conjunction with - :meth:`Context.set_ocsp_client_callback`. - """ - rc = _lib.SSL_set_tlsext_status_type( - self._ssl, _lib.TLSEXT_STATUSTYPE_ocsp - ) - _openssl_assert(rc == 1) - - def set_info_callback( - self, callback: Callable[[Connection, int, int], None] - ) -> None: - """ - Set the information callback to *callback*. This function will be - called from time to time during SSL handshakes. - - :param callback: The Python callback to use. This should take three - arguments: a Connection object and two integers. The first integer - specifies where in the SSL handshake the function was called, and - the other the return code from a (possibly failed) internal - function call. - :return: None - """ - - @wraps(callback) - def wrapper(ssl, where, return_code): # type: ignore[no-untyped-def] - callback(Connection._reverse_mapping[ssl], where, return_code) - - self._info_callback = _ffi.callback( - "void (*)(const SSL *, int, int)", wrapper - ) - _lib.SSL_set_info_callback(self._ssl, self._info_callback) diff --git a/venv/Lib/site-packages/OpenSSL/__init__.py b/venv/Lib/site-packages/OpenSSL/__init__.py deleted file mode 100644 index 7b077cf..0000000 --- a/venv/Lib/site-packages/OpenSSL/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) AB Strakt -# See LICENSE for details. - -""" -pyOpenSSL - A simple wrapper around the OpenSSL library -""" - -from OpenSSL import SSL, crypto -from OpenSSL.version import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - __version__, -) - -__all__ = [ - "SSL", - "__author__", - "__copyright__", - "__email__", - "__license__", - "__summary__", - "__title__", - "__uri__", - "__version__", - "crypto", -] diff --git a/venv/Lib/site-packages/OpenSSL/__pycache__/SSL.cpython-312.pyc b/venv/Lib/site-packages/OpenSSL/__pycache__/SSL.cpython-312.pyc deleted file mode 100644 index 8295c5d..0000000 Binary files a/venv/Lib/site-packages/OpenSSL/__pycache__/SSL.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/OpenSSL/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/OpenSSL/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 94edd2b..0000000 Binary files a/venv/Lib/site-packages/OpenSSL/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/OpenSSL/__pycache__/_util.cpython-312.pyc b/venv/Lib/site-packages/OpenSSL/__pycache__/_util.cpython-312.pyc deleted file mode 100644 index 780c1e0..0000000 Binary files a/venv/Lib/site-packages/OpenSSL/__pycache__/_util.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/OpenSSL/__pycache__/crypto.cpython-312.pyc b/venv/Lib/site-packages/OpenSSL/__pycache__/crypto.cpython-312.pyc deleted file mode 100644 index 685f8b9..0000000 Binary files a/venv/Lib/site-packages/OpenSSL/__pycache__/crypto.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/OpenSSL/__pycache__/debug.cpython-312.pyc b/venv/Lib/site-packages/OpenSSL/__pycache__/debug.cpython-312.pyc deleted file mode 100644 index debdb08..0000000 Binary files a/venv/Lib/site-packages/OpenSSL/__pycache__/debug.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/OpenSSL/__pycache__/rand.cpython-312.pyc b/venv/Lib/site-packages/OpenSSL/__pycache__/rand.cpython-312.pyc deleted file mode 100644 index 026708f..0000000 Binary files a/venv/Lib/site-packages/OpenSSL/__pycache__/rand.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/OpenSSL/__pycache__/version.cpython-312.pyc b/venv/Lib/site-packages/OpenSSL/__pycache__/version.cpython-312.pyc deleted file mode 100644 index 4528b3c..0000000 Binary files a/venv/Lib/site-packages/OpenSSL/__pycache__/version.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/OpenSSL/_util.py b/venv/Lib/site-packages/OpenSSL/_util.py deleted file mode 100644 index 41a6452..0000000 --- a/venv/Lib/site-packages/OpenSSL/_util.py +++ /dev/null @@ -1,129 +0,0 @@ -from __future__ import annotations - -import os -import sys -import warnings -from typing import Any, Callable, NoReturn, Union - -from cryptography.hazmat.bindings.openssl.binding import Binding - -if sys.version_info >= (3, 9): - StrOrBytesPath = Union[str, bytes, os.PathLike[str], os.PathLike[bytes]] -else: - StrOrBytesPath = Union[str, bytes, os.PathLike] - -binding = Binding() -ffi = binding.ffi -lib: Any = binding.lib - - -# This is a special CFFI allocator that does not bother to zero its memory -# after allocation. This has vastly better performance on large allocations and -# so should be used whenever we don't need the memory zeroed out. -no_zero_allocator = ffi.new_allocator(should_clear_after_alloc=False) - - -def text(charp: Any) -> str: - """ - Get a native string type representing of the given CFFI ``char*`` object. - - :param charp: A C-style string represented using CFFI. - - :return: :class:`str` - """ - if not charp: - return "" - return ffi.string(charp).decode("utf-8") - - -def exception_from_error_queue(exception_type: type[Exception]) -> NoReturn: - """ - Convert an OpenSSL library failure into a Python exception. - - When a call to the native OpenSSL library fails, this is usually signalled - by the return value, and an error code is stored in an error queue - associated with the current thread. The err library provides functions to - obtain these error codes and textual error messages. - """ - errors = [] - - while True: - error = lib.ERR_get_error() - if error == 0: - break - errors.append( - ( - text(lib.ERR_lib_error_string(error)), - text(lib.ERR_func_error_string(error)), - text(lib.ERR_reason_error_string(error)), - ) - ) - - raise exception_type(errors) - - -def make_assert(error: type[Exception]) -> Callable[[bool], Any]: - """ - Create an assert function that uses :func:`exception_from_error_queue` to - raise an exception wrapped by *error*. - """ - - def openssl_assert(ok: bool) -> None: - """ - If *ok* is not True, retrieve the error from OpenSSL and raise it. - """ - if ok is not True: - exception_from_error_queue(error) - - return openssl_assert - - -def path_bytes(s: StrOrBytesPath) -> bytes: - """ - Convert a Python path to a :py:class:`bytes` for the path which can be - passed into an OpenSSL API accepting a filename. - - :param s: A path (valid for os.fspath). - - :return: An instance of :py:class:`bytes`. - """ - b = os.fspath(s) - - if isinstance(b, str): - return b.encode(sys.getfilesystemencoding()) - else: - return b - - -def byte_string(s: str) -> bytes: - return s.encode("charmap") - - -# A marker object to observe whether some optional arguments are passed any -# value or not. -UNSPECIFIED = object() - -_TEXT_WARNING = "str for {0} is no longer accepted, use bytes" - - -def text_to_bytes_and_warn(label: str, obj: Any) -> Any: - """ - If ``obj`` is text, emit a warning that it should be bytes instead and try - to convert it to bytes automatically. - - :param str label: The name of the parameter from which ``obj`` was taken - (so a developer can easily find the source of the problem and correct - it). - - :return: If ``obj`` is the text string type, a ``bytes`` object giving the - UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is - returned. - """ - if isinstance(obj, str): - warnings.warn( - _TEXT_WARNING.format(label), - category=DeprecationWarning, - stacklevel=3, - ) - return obj.encode("utf-8") - return obj diff --git a/venv/Lib/site-packages/OpenSSL/crypto.py b/venv/Lib/site-packages/OpenSSL/crypto.py deleted file mode 100644 index 366007e..0000000 --- a/venv/Lib/site-packages/OpenSSL/crypto.py +++ /dev/null @@ -1,2450 +0,0 @@ -from __future__ import annotations - -import calendar -import datetime -import functools -import sys -import typing -import warnings -from base64 import b16encode -from collections.abc import Iterable, Sequence -from functools import partial -from typing import ( - Any, - Callable, - Union, -) - -if sys.version_info >= (3, 13): - from warnings import deprecated -elif sys.version_info < (3, 8): - _T = typing.TypeVar("T") - - def deprecated(msg: str, **kwargs: object) -> Callable[[_T], _T]: - return lambda f: f -else: - from typing_extensions import deprecated - -from cryptography import utils, x509 -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed448, - ed25519, - rsa, -) - -from OpenSSL._util import StrOrBytesPath -from OpenSSL._util import ( - byte_string as _byte_string, -) -from OpenSSL._util import ( - exception_from_error_queue as _exception_from_error_queue, -) -from OpenSSL._util import ( - ffi as _ffi, -) -from OpenSSL._util import ( - lib as _lib, -) -from OpenSSL._util import ( - make_assert as _make_assert, -) -from OpenSSL._util import ( - path_bytes as _path_bytes, -) - -__all__ = [ - "FILETYPE_ASN1", - "FILETYPE_PEM", - "FILETYPE_TEXT", - "TYPE_DSA", - "TYPE_RSA", - "X509", - "Error", - "PKey", - "X509Extension", - "X509Name", - "X509Req", - "X509Store", - "X509StoreContext", - "X509StoreContextError", - "X509StoreFlags", - "dump_certificate", - "dump_certificate_request", - "dump_privatekey", - "dump_publickey", - "get_elliptic_curve", - "get_elliptic_curves", - "load_certificate", - "load_certificate_request", - "load_privatekey", - "load_publickey", -] - - -_PrivateKey = Union[ - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - rsa.RSAPrivateKey, -] -_PublicKey = Union[ - dsa.DSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - rsa.RSAPublicKey, -] -_Key = Union[_PrivateKey, _PublicKey] -PassphraseCallableT = Union[bytes, Callable[..., bytes]] - - -FILETYPE_PEM: int = _lib.SSL_FILETYPE_PEM -FILETYPE_ASN1: int = _lib.SSL_FILETYPE_ASN1 - -# TODO This was an API mistake. OpenSSL has no such constant. -FILETYPE_TEXT = 2**16 - 1 - -TYPE_RSA: int = _lib.EVP_PKEY_RSA -TYPE_DSA: int = _lib.EVP_PKEY_DSA -TYPE_DH: int = _lib.EVP_PKEY_DH -TYPE_EC: int = _lib.EVP_PKEY_EC - - -class Error(Exception): - """ - An error occurred in an `OpenSSL.crypto` API. - """ - - -_raise_current_error = partial(_exception_from_error_queue, Error) -_openssl_assert = _make_assert(Error) - - -def _new_mem_buf(buffer: bytes | None = None) -> Any: - """ - Allocate a new OpenSSL memory BIO. - - Arrange for the garbage collector to clean it up automatically. - - :param buffer: None or some bytes to use to put into the BIO so that they - can be read out. - """ - if buffer is None: - bio = _lib.BIO_new(_lib.BIO_s_mem()) - free = _lib.BIO_free - else: - data = _ffi.new("char[]", buffer) - bio = _lib.BIO_new_mem_buf(data, len(buffer)) - - # Keep the memory alive as long as the bio is alive! - def free(bio: Any, ref: Any = data) -> Any: - return _lib.BIO_free(bio) - - _openssl_assert(bio != _ffi.NULL) - - bio = _ffi.gc(bio, free) - return bio - - -def _bio_to_string(bio: Any) -> bytes: - """ - Copy the contents of an OpenSSL BIO object into a Python byte string. - """ - result_buffer = _ffi.new("char**") - buffer_length = _lib.BIO_get_mem_data(bio, result_buffer) - return _ffi.buffer(result_buffer[0], buffer_length)[:] - - -def _set_asn1_time(boundary: Any, when: bytes) -> None: - """ - The the time value of an ASN1 time object. - - @param boundary: An ASN1_TIME pointer (or an object safely - castable to that type) which will have its value set. - @param when: A string representation of the desired time value. - - @raise TypeError: If C{when} is not a L{bytes} string. - @raise ValueError: If C{when} does not represent a time in the required - format. - @raise RuntimeError: If the time value cannot be set for some other - (unspecified) reason. - """ - if not isinstance(when, bytes): - raise TypeError("when must be a byte string") - # ASN1_TIME_set_string validates the string without writing anything - # when the destination is NULL. - _openssl_assert(boundary != _ffi.NULL) - - set_result = _lib.ASN1_TIME_set_string(boundary, when) - if set_result == 0: - raise ValueError("Invalid string") - - -def _new_asn1_time(when: bytes) -> Any: - """ - Behaves like _set_asn1_time but returns a new ASN1_TIME object. - - @param when: A string representation of the desired time value. - - @raise TypeError: If C{when} is not a L{bytes} string. - @raise ValueError: If C{when} does not represent a time in the required - format. - @raise RuntimeError: If the time value cannot be set for some other - (unspecified) reason. - """ - ret = _lib.ASN1_TIME_new() - _openssl_assert(ret != _ffi.NULL) - ret = _ffi.gc(ret, _lib.ASN1_TIME_free) - _set_asn1_time(ret, when) - return ret - - -def _get_asn1_time(timestamp: Any) -> bytes | None: - """ - Retrieve the time value of an ASN1 time object. - - @param timestamp: An ASN1_GENERALIZEDTIME* (or an object safely castable to - that type) from which the time value will be retrieved. - - @return: The time value from C{timestamp} as a L{bytes} string in a certain - format. Or C{None} if the object contains no time value. - """ - string_timestamp = _ffi.cast("ASN1_STRING*", timestamp) - if _lib.ASN1_STRING_length(string_timestamp) == 0: - return None - elif ( - _lib.ASN1_STRING_type(string_timestamp) == _lib.V_ASN1_GENERALIZEDTIME - ): - return _ffi.string(_lib.ASN1_STRING_get0_data(string_timestamp)) - else: - generalized_timestamp = _ffi.new("ASN1_GENERALIZEDTIME**") - _lib.ASN1_TIME_to_generalizedtime(timestamp, generalized_timestamp) - _openssl_assert(generalized_timestamp[0] != _ffi.NULL) - - string_timestamp = _ffi.cast("ASN1_STRING*", generalized_timestamp[0]) - string_data = _lib.ASN1_STRING_get0_data(string_timestamp) - string_result = _ffi.string(string_data) - _lib.ASN1_GENERALIZEDTIME_free(generalized_timestamp[0]) - return string_result - - -class _X509NameInvalidator: - def __init__(self) -> None: - self._names: list[X509Name] = [] - - def add(self, name: X509Name) -> None: - self._names.append(name) - - def clear(self) -> None: - for name in self._names: - # Breaks the object, but also prevents UAF! - del name._name - - -class PKey: - """ - A class representing an DSA or RSA public key or key pair. - """ - - _only_public = False - _initialized = True - - def __init__(self) -> None: - pkey = _lib.EVP_PKEY_new() - self._pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) - self._initialized = False - - def to_cryptography_key(self) -> _Key: - """ - Export as a ``cryptography`` key. - - :rtype: One of ``cryptography``'s `key interfaces`_. - - .. _key interfaces: https://cryptography.io/en/latest/hazmat/\ - primitives/asymmetric/rsa/#key-interfaces - - .. versionadded:: 16.1.0 - """ - from cryptography.hazmat.primitives.serialization import ( - load_der_private_key, - load_der_public_key, - ) - - if self._only_public: - der = dump_publickey(FILETYPE_ASN1, self) - return typing.cast(_Key, load_der_public_key(der)) - else: - der = dump_privatekey(FILETYPE_ASN1, self) - return typing.cast(_Key, load_der_private_key(der, password=None)) - - @classmethod - def from_cryptography_key(cls, crypto_key: _Key) -> PKey: - """ - Construct based on a ``cryptography`` *crypto_key*. - - :param crypto_key: A ``cryptography`` key. - :type crypto_key: One of ``cryptography``'s `key interfaces`_. - - :rtype: PKey - - .. versionadded:: 16.1.0 - """ - if not isinstance( - crypto_key, - ( - dsa.DSAPrivateKey, - dsa.DSAPublicKey, - ec.EllipticCurvePrivateKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PrivateKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PrivateKey, - ed448.Ed448PublicKey, - rsa.RSAPrivateKey, - rsa.RSAPublicKey, - ), - ): - raise TypeError("Unsupported key type") - - from cryptography.hazmat.primitives.serialization import ( - Encoding, - NoEncryption, - PrivateFormat, - PublicFormat, - ) - - if isinstance( - crypto_key, - ( - dsa.DSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - rsa.RSAPublicKey, - ), - ): - return load_publickey( - FILETYPE_ASN1, - crypto_key.public_bytes( - Encoding.DER, PublicFormat.SubjectPublicKeyInfo - ), - ) - else: - der = crypto_key.private_bytes( - Encoding.DER, PrivateFormat.PKCS8, NoEncryption() - ) - return load_privatekey(FILETYPE_ASN1, der) - - def generate_key(self, type: int, bits: int) -> None: - """ - Generate a key pair of the given type, with the given number of bits. - - This generates a key "into" the this object. - - :param type: The key type. - :type type: :py:data:`TYPE_RSA` or :py:data:`TYPE_DSA` - :param bits: The number of bits. - :type bits: :py:data:`int` ``>= 0`` - :raises TypeError: If :py:data:`type` or :py:data:`bits` isn't - of the appropriate type. - :raises ValueError: If the number of bits isn't an integer of - the appropriate size. - :return: ``None`` - """ - if not isinstance(type, int): - raise TypeError("type must be an integer") - - if not isinstance(bits, int): - raise TypeError("bits must be an integer") - - if type == TYPE_RSA: - if bits <= 0: - raise ValueError("Invalid number of bits") - - # TODO Check error return - exponent = _lib.BN_new() - exponent = _ffi.gc(exponent, _lib.BN_free) - _lib.BN_set_word(exponent, _lib.RSA_F4) - - rsa = _lib.RSA_new() - - result = _lib.RSA_generate_key_ex(rsa, bits, exponent, _ffi.NULL) - _openssl_assert(result == 1) - - result = _lib.EVP_PKEY_assign_RSA(self._pkey, rsa) - _openssl_assert(result == 1) - - elif type == TYPE_DSA: - dsa = _lib.DSA_new() - _openssl_assert(dsa != _ffi.NULL) - - dsa = _ffi.gc(dsa, _lib.DSA_free) - res = _lib.DSA_generate_parameters_ex( - dsa, bits, _ffi.NULL, 0, _ffi.NULL, _ffi.NULL, _ffi.NULL - ) - _openssl_assert(res == 1) - - _openssl_assert(_lib.DSA_generate_key(dsa) == 1) - _openssl_assert(_lib.EVP_PKEY_set1_DSA(self._pkey, dsa) == 1) - else: - raise Error("No such key type") - - self._initialized = True - - def check(self) -> bool: - """ - Check the consistency of an RSA private key. - - This is the Python equivalent of OpenSSL's ``RSA_check_key``. - - :return: ``True`` if key is consistent. - - :raise OpenSSL.crypto.Error: if the key is inconsistent. - - :raise TypeError: if the key is of a type which cannot be checked. - Only RSA keys can currently be checked. - """ - if self._only_public: - raise TypeError("public key only") - - if _lib.EVP_PKEY_type(self.type()) != _lib.EVP_PKEY_RSA: - raise TypeError("Only RSA keys can currently be checked.") - - rsa = _lib.EVP_PKEY_get1_RSA(self._pkey) - rsa = _ffi.gc(rsa, _lib.RSA_free) - result = _lib.RSA_check_key(rsa) - if result == 1: - return True - _raise_current_error() - - def type(self) -> int: - """ - Returns the type of the key - - :return: The type of the key. - """ - return _lib.EVP_PKEY_id(self._pkey) - - def bits(self) -> int: - """ - Returns the number of bits of the key - - :return: The number of bits of the key. - """ - return _lib.EVP_PKEY_bits(self._pkey) - - -class _EllipticCurve: - """ - A representation of a supported elliptic curve. - - @cvar _curves: :py:obj:`None` until an attempt is made to load the curves. - Thereafter, a :py:type:`set` containing :py:type:`_EllipticCurve` - instances each of which represents one curve supported by the system. - @type _curves: :py:type:`NoneType` or :py:type:`set` - """ - - _curves = None - - def __ne__(self, other: Any) -> bool: - """ - Implement cooperation with the right-hand side argument of ``!=``. - - Python 3 seems to have dropped this cooperation in this very narrow - circumstance. - """ - if isinstance(other, _EllipticCurve): - return super().__ne__(other) - return NotImplemented - - @classmethod - def _load_elliptic_curves(cls, lib: Any) -> set[_EllipticCurve]: - """ - Get the curves supported by OpenSSL. - - :param lib: The OpenSSL library binding object. - - :return: A :py:type:`set` of ``cls`` instances giving the names of the - elliptic curves the underlying library supports. - """ - num_curves = lib.EC_get_builtin_curves(_ffi.NULL, 0) - builtin_curves = _ffi.new("EC_builtin_curve[]", num_curves) - # The return value on this call should be num_curves again. We - # could check it to make sure but if it *isn't* then.. what could - # we do? Abort the whole process, I suppose...? -exarkun - lib.EC_get_builtin_curves(builtin_curves, num_curves) - return set(cls.from_nid(lib, c.nid) for c in builtin_curves) - - @classmethod - def _get_elliptic_curves(cls, lib: Any) -> set[_EllipticCurve]: - """ - Get, cache, and return the curves supported by OpenSSL. - - :param lib: The OpenSSL library binding object. - - :return: A :py:type:`set` of ``cls`` instances giving the names of the - elliptic curves the underlying library supports. - """ - if cls._curves is None: - cls._curves = cls._load_elliptic_curves(lib) - return cls._curves - - @classmethod - def from_nid(cls, lib: Any, nid: int) -> _EllipticCurve: - """ - Instantiate a new :py:class:`_EllipticCurve` associated with the given - OpenSSL NID. - - :param lib: The OpenSSL library binding object. - - :param nid: The OpenSSL NID the resulting curve object will represent. - This must be a curve NID (and not, for example, a hash NID) or - subsequent operations will fail in unpredictable ways. - :type nid: :py:class:`int` - - :return: The curve object. - """ - return cls(lib, nid, _ffi.string(lib.OBJ_nid2sn(nid)).decode("ascii")) - - def __init__(self, lib: Any, nid: int, name: str) -> None: - """ - :param _lib: The :py:mod:`cryptography` binding instance used to - interface with OpenSSL. - - :param _nid: The OpenSSL NID identifying the curve this object - represents. - :type _nid: :py:class:`int` - - :param name: The OpenSSL short name identifying the curve this object - represents. - :type name: :py:class:`unicode` - """ - self._lib = lib - self._nid = nid - self.name = name - - def __repr__(self) -> str: - return f"" - - def _to_EC_KEY(self) -> Any: - """ - Create a new OpenSSL EC_KEY structure initialized to use this curve. - - The structure is automatically garbage collected when the Python object - is garbage collected. - """ - key = self._lib.EC_KEY_new_by_curve_name(self._nid) - return _ffi.gc(key, _lib.EC_KEY_free) - - -@deprecated( - "get_elliptic_curves is deprecated. You should use the APIs in " - "cryptography instead." -) -def get_elliptic_curves() -> set[_EllipticCurve]: - """ - Return a set of objects representing the elliptic curves supported in the - OpenSSL build in use. - - The curve objects have a :py:class:`unicode` ``name`` attribute by which - they identify themselves. - - The curve objects are useful as values for the argument accepted by - :py:meth:`Context.set_tmp_ecdh` to specify which elliptical curve should be - used for ECDHE key exchange. - """ - return _EllipticCurve._get_elliptic_curves(_lib) - - -@deprecated( - "get_elliptic_curve is deprecated. You should use the APIs in " - "cryptography instead." -) -def get_elliptic_curve(name: str) -> _EllipticCurve: - """ - Return a single curve object selected by name. - - See :py:func:`get_elliptic_curves` for information about curve objects. - - :param name: The OpenSSL short name identifying the curve object to - retrieve. - :type name: :py:class:`unicode` - - If the named curve is not supported then :py:class:`ValueError` is raised. - """ - for curve in get_elliptic_curves(): - if curve.name == name: - return curve - raise ValueError("unknown curve name", name) - - -@functools.total_ordering -class X509Name: - """ - An X.509 Distinguished Name. - - :ivar countryName: The country of the entity. - :ivar C: Alias for :py:attr:`countryName`. - - :ivar stateOrProvinceName: The state or province of the entity. - :ivar ST: Alias for :py:attr:`stateOrProvinceName`. - - :ivar localityName: The locality of the entity. - :ivar L: Alias for :py:attr:`localityName`. - - :ivar organizationName: The organization name of the entity. - :ivar O: Alias for :py:attr:`organizationName`. - - :ivar organizationalUnitName: The organizational unit of the entity. - :ivar OU: Alias for :py:attr:`organizationalUnitName` - - :ivar commonName: The common name of the entity. - :ivar CN: Alias for :py:attr:`commonName`. - - :ivar emailAddress: The e-mail address of the entity. - """ - - def __init__(self, name: X509Name) -> None: - """ - Create a new X509Name, copying the given X509Name instance. - - :param name: The name to copy. - :type name: :py:class:`X509Name` - """ - name = _lib.X509_NAME_dup(name._name) - self._name: Any = _ffi.gc(name, _lib.X509_NAME_free) - - def __setattr__(self, name: str, value: Any) -> None: - if name.startswith("_"): - return super().__setattr__(name, value) - - # Note: we really do not want str subclasses here, so we do not use - # isinstance. - if type(name) is not str: - raise TypeError( - f"attribute name must be string, not " - f"'{type(value).__name__:.200}'" - ) - - nid = _lib.OBJ_txt2nid(_byte_string(name)) - if nid == _lib.NID_undef: - try: - _raise_current_error() - except Error: - pass - raise AttributeError("No such attribute") - - # If there's an old entry for this NID, remove it - for i in range(_lib.X509_NAME_entry_count(self._name)): - ent = _lib.X509_NAME_get_entry(self._name, i) - ent_obj = _lib.X509_NAME_ENTRY_get_object(ent) - ent_nid = _lib.OBJ_obj2nid(ent_obj) - if nid == ent_nid: - ent = _lib.X509_NAME_delete_entry(self._name, i) - _lib.X509_NAME_ENTRY_free(ent) - break - - if isinstance(value, str): - value = value.encode("utf-8") - - add_result = _lib.X509_NAME_add_entry_by_NID( - self._name, nid, _lib.MBSTRING_UTF8, value, -1, -1, 0 - ) - if not add_result: - _raise_current_error() - - def __getattr__(self, name: str) -> str | None: - """ - Find attribute. An X509Name object has the following attributes: - countryName (alias C), stateOrProvince (alias ST), locality (alias L), - organization (alias O), organizationalUnit (alias OU), commonName - (alias CN) and more... - """ - nid = _lib.OBJ_txt2nid(_byte_string(name)) - if nid == _lib.NID_undef: - # This is a bit weird. OBJ_txt2nid indicated failure, but it seems - # a lower level function, a2d_ASN1_OBJECT, also feels the need to - # push something onto the error queue. If we don't clean that up - # now, someone else will bump into it later and be quite confused. - # See lp#314814. - try: - _raise_current_error() - except Error: - pass - raise AttributeError("No such attribute") - - entry_index = _lib.X509_NAME_get_index_by_NID(self._name, nid, -1) - if entry_index == -1: - return None - - entry = _lib.X509_NAME_get_entry(self._name, entry_index) - data = _lib.X509_NAME_ENTRY_get_data(entry) - - result_buffer = _ffi.new("unsigned char**") - data_length = _lib.ASN1_STRING_to_UTF8(result_buffer, data) - _openssl_assert(data_length >= 0) - - try: - result = _ffi.buffer(result_buffer[0], data_length)[:].decode( - "utf-8" - ) - finally: - # XXX untested - _lib.OPENSSL_free(result_buffer[0]) - return result - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, X509Name): - return NotImplemented - - return _lib.X509_NAME_cmp(self._name, other._name) == 0 - - def __lt__(self, other: Any) -> bool: - if not isinstance(other, X509Name): - return NotImplemented - - return _lib.X509_NAME_cmp(self._name, other._name) < 0 - - def __repr__(self) -> str: - """ - String representation of an X509Name - """ - result_buffer = _ffi.new("char[]", 512) - format_result = _lib.X509_NAME_oneline( - self._name, result_buffer, len(result_buffer) - ) - _openssl_assert(format_result != _ffi.NULL) - - return "".format( - _ffi.string(result_buffer).decode("utf-8"), - ) - - def hash(self) -> int: - """ - Return an integer representation of the first four bytes of the - MD5 digest of the DER representation of the name. - - This is the Python equivalent of OpenSSL's ``X509_NAME_hash``. - - :return: The (integer) hash of this name. - :rtype: :py:class:`int` - """ - return _lib.X509_NAME_hash(self._name) - - def der(self) -> bytes: - """ - Return the DER encoding of this name. - - :return: The DER encoded form of this name. - :rtype: :py:class:`bytes` - """ - result_buffer = _ffi.new("unsigned char**") - encode_result = _lib.i2d_X509_NAME(self._name, result_buffer) - _openssl_assert(encode_result >= 0) - - string_result = _ffi.buffer(result_buffer[0], encode_result)[:] - _lib.OPENSSL_free(result_buffer[0]) - return string_result - - def get_components(self) -> list[tuple[bytes, bytes]]: - """ - Returns the components of this name, as a sequence of 2-tuples. - - :return: The components of this name. - :rtype: :py:class:`list` of ``name, value`` tuples. - """ - result = [] - for i in range(_lib.X509_NAME_entry_count(self._name)): - ent = _lib.X509_NAME_get_entry(self._name, i) - - fname = _lib.X509_NAME_ENTRY_get_object(ent) - fval = _lib.X509_NAME_ENTRY_get_data(ent) - - nid = _lib.OBJ_obj2nid(fname) - name = _lib.OBJ_nid2sn(nid) - - # ffi.string does not handle strings containing NULL bytes - # (which may have been generated by old, broken software) - value = _ffi.buffer( - _lib.ASN1_STRING_get0_data(fval), _lib.ASN1_STRING_length(fval) - )[:] - result.append((_ffi.string(name), value)) - - return result - - -@deprecated( - "X509Extension support in pyOpenSSL is deprecated. You should use the " - "APIs in cryptography." -) -class X509Extension: - """ - An X.509 v3 certificate extension. - - .. deprecated:: 23.3.0 - Use cryptography's X509 APIs instead. - """ - - def __init__( - self, - type_name: bytes, - critical: bool, - value: bytes, - subject: X509 | None = None, - issuer: X509 | None = None, - ) -> None: - """ - Initializes an X509 extension. - - :param type_name: The name of the type of extension_ to create. - :type type_name: :py:data:`bytes` - - :param bool critical: A flag indicating whether this is a critical - extension. - - :param value: The OpenSSL textual representation of the extension's - value. - :type value: :py:data:`bytes` - - :param subject: Optional X509 certificate to use as subject. - :type subject: :py:class:`X509` - - :param issuer: Optional X509 certificate to use as issuer. - :type issuer: :py:class:`X509` - - .. _extension: https://www.openssl.org/docs/manmaster/man5/ - x509v3_config.html#STANDARD-EXTENSIONS - """ - ctx = _ffi.new("X509V3_CTX*") - - # A context is necessary for any extension which uses the r2i - # conversion method. That is, X509V3_EXT_nconf may segfault if passed - # a NULL ctx. Start off by initializing most of the fields to NULL. - _lib.X509V3_set_ctx(ctx, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL, 0) - - # We have no configuration database - but perhaps we should (some - # extensions may require it). - _lib.X509V3_set_ctx_nodb(ctx) - - # Initialize the subject and issuer, if appropriate. ctx is a local, - # and as far as I can tell none of the X509V3_* APIs invoked here steal - # any references, so no need to mess with reference counts or - # duplicates. - if issuer is not None: - if not isinstance(issuer, X509): - raise TypeError("issuer must be an X509 instance") - ctx.issuer_cert = issuer._x509 - if subject is not None: - if not isinstance(subject, X509): - raise TypeError("subject must be an X509 instance") - ctx.subject_cert = subject._x509 - - if critical: - # There are other OpenSSL APIs which would let us pass in critical - # separately, but they're harder to use, and since value is already - # a pile of crappy junk smuggling a ton of utterly important - # structured data, what's the point of trying to avoid nasty stuff - # with strings? (However, X509V3_EXT_i2d in particular seems like - # it would be a better API to invoke. I do not know where to get - # the ext_struc it desires for its last parameter, though.) - value = b"critical," + value - - extension = _lib.X509V3_EXT_nconf(_ffi.NULL, ctx, type_name, value) - if extension == _ffi.NULL: - _raise_current_error() - self._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) - - @property - def _nid(self) -> Any: - return _lib.OBJ_obj2nid( - _lib.X509_EXTENSION_get_object(self._extension) - ) - - _prefixes: typing.ClassVar[dict[int, str]] = { - _lib.GEN_EMAIL: "email", - _lib.GEN_DNS: "DNS", - _lib.GEN_URI: "URI", - } - - def _subjectAltNameString(self) -> str: - names = _ffi.cast( - "GENERAL_NAMES*", _lib.X509V3_EXT_d2i(self._extension) - ) - - names = _ffi.gc(names, _lib.GENERAL_NAMES_free) - parts = [] - for i in range(_lib.sk_GENERAL_NAME_num(names)): - name = _lib.sk_GENERAL_NAME_value(names, i) - try: - label = self._prefixes[name.type] - except KeyError: - bio = _new_mem_buf() - _lib.GENERAL_NAME_print(bio, name) - parts.append(_bio_to_string(bio).decode("utf-8")) - else: - value = _ffi.buffer(name.d.ia5.data, name.d.ia5.length)[ - : - ].decode("utf-8") - parts.append(label + ":" + value) - return ", ".join(parts) - - def __str__(self) -> str: - """ - :return: a nice text representation of the extension - """ - if _lib.NID_subject_alt_name == self._nid: - return self._subjectAltNameString() - - bio = _new_mem_buf() - print_result = _lib.X509V3_EXT_print(bio, self._extension, 0, 0) - _openssl_assert(print_result != 0) - - return _bio_to_string(bio).decode("utf-8") - - def get_critical(self) -> bool: - """ - Returns the critical field of this X.509 extension. - - :return: The critical field. - """ - return _lib.X509_EXTENSION_get_critical(self._extension) - - def get_short_name(self) -> bytes: - """ - Returns the short type name of this X.509 extension. - - The result is a byte string such as :py:const:`b"basicConstraints"`. - - :return: The short type name. - :rtype: :py:data:`bytes` - - .. versionadded:: 0.12 - """ - obj = _lib.X509_EXTENSION_get_object(self._extension) - nid = _lib.OBJ_obj2nid(obj) - # OpenSSL 3.1.0 has a bug where nid2sn returns NULL for NIDs that - # previously returned UNDEF. This is a workaround for that issue. - # https://github.com/openssl/openssl/commit/908ba3ed9adbb3df90f76 - buf = _lib.OBJ_nid2sn(nid) - if buf != _ffi.NULL: - return _ffi.string(buf) - else: - return b"UNDEF" - - def get_data(self) -> bytes: - """ - Returns the data of the X509 extension, encoded as ASN.1. - - :return: The ASN.1 encoded data of this X509 extension. - :rtype: :py:data:`bytes` - - .. versionadded:: 0.12 - """ - octet_result = _lib.X509_EXTENSION_get_data(self._extension) - string_result = _ffi.cast("ASN1_STRING*", octet_result) - char_result = _lib.ASN1_STRING_get0_data(string_result) - result_length = _lib.ASN1_STRING_length(string_result) - return _ffi.buffer(char_result, result_length)[:] - - -@deprecated( - "CSR support in pyOpenSSL is deprecated. You should use the APIs " - "in cryptography." -) -class X509Req: - """ - An X.509 certificate signing requests. - - .. deprecated:: 24.2.0 - Use `cryptography.x509.CertificateSigningRequest` instead. - """ - - def __init__(self) -> None: - req = _lib.X509_REQ_new() - self._req = _ffi.gc(req, _lib.X509_REQ_free) - # Default to version 0. - self.set_version(0) - - def to_cryptography(self) -> x509.CertificateSigningRequest: - """ - Export as a ``cryptography`` certificate signing request. - - :rtype: ``cryptography.x509.CertificateSigningRequest`` - - .. versionadded:: 17.1.0 - """ - from cryptography.x509 import load_der_x509_csr - - der = _dump_certificate_request_internal(FILETYPE_ASN1, self) - - return load_der_x509_csr(der) - - @classmethod - def from_cryptography( - cls, crypto_req: x509.CertificateSigningRequest - ) -> X509Req: - """ - Construct based on a ``cryptography`` *crypto_req*. - - :param crypto_req: A ``cryptography`` X.509 certificate signing request - :type crypto_req: ``cryptography.x509.CertificateSigningRequest`` - - :rtype: X509Req - - .. versionadded:: 17.1.0 - """ - if not isinstance(crypto_req, x509.CertificateSigningRequest): - raise TypeError("Must be a certificate signing request") - - from cryptography.hazmat.primitives.serialization import Encoding - - der = crypto_req.public_bytes(Encoding.DER) - return _load_certificate_request_internal(FILETYPE_ASN1, der) - - def set_pubkey(self, pkey: PKey) -> None: - """ - Set the public key of the certificate signing request. - - :param pkey: The public key to use. - :type pkey: :py:class:`PKey` - - :return: ``None`` - """ - set_result = _lib.X509_REQ_set_pubkey(self._req, pkey._pkey) - _openssl_assert(set_result == 1) - - def get_pubkey(self) -> PKey: - """ - Get the public key of the certificate signing request. - - :return: The public key. - :rtype: :py:class:`PKey` - """ - pkey = PKey.__new__(PKey) - pkey._pkey = _lib.X509_REQ_get_pubkey(self._req) - _openssl_assert(pkey._pkey != _ffi.NULL) - pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) - pkey._only_public = True - return pkey - - def set_version(self, version: int) -> None: - """ - Set the version subfield (RFC 2986, section 4.1) of the certificate - request. - - :param int version: The version number. - :return: ``None`` - """ - if not isinstance(version, int): - raise TypeError("version must be an int") - if version != 0: - raise ValueError( - "Invalid version. The only valid version for X509Req is 0." - ) - set_result = _lib.X509_REQ_set_version(self._req, version) - _openssl_assert(set_result == 1) - - def get_version(self) -> int: - """ - Get the version subfield (RFC 2459, section 4.1.2.1) of the certificate - request. - - :return: The value of the version subfield. - :rtype: :py:class:`int` - """ - return _lib.X509_REQ_get_version(self._req) - - def get_subject(self) -> X509Name: - """ - Return the subject of this certificate signing request. - - This creates a new :class:`X509Name` that wraps the underlying subject - name field on the certificate signing request. Modifying it will modify - the underlying signing request, and will have the effect of modifying - any other :class:`X509Name` that refers to this subject. - - :return: The subject of this certificate signing request. - :rtype: :class:`X509Name` - """ - name = X509Name.__new__(X509Name) - name._name = _lib.X509_REQ_get_subject_name(self._req) - _openssl_assert(name._name != _ffi.NULL) - - # The name is owned by the X509Req structure. As long as the X509Name - # Python object is alive, keep the X509Req Python object alive. - name._owner = self - - return name - - def add_extensions(self, extensions: Iterable[X509Extension]) -> None: - """ - Add extensions to the certificate signing request. - - :param extensions: The X.509 extensions to add. - :type extensions: iterable of :py:class:`X509Extension` - :return: ``None`` - """ - warnings.warn( - ( - "This API is deprecated and will be removed in a future " - "version of pyOpenSSL. You should use pyca/cryptography's " - "X.509 APIs instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - stack = _lib.sk_X509_EXTENSION_new_null() - _openssl_assert(stack != _ffi.NULL) - - stack = _ffi.gc(stack, _lib.sk_X509_EXTENSION_free) - - for ext in extensions: - if not isinstance(ext, X509Extension): - raise ValueError("One of the elements is not an X509Extension") - - # TODO push can fail (here and elsewhere) - _lib.sk_X509_EXTENSION_push(stack, ext._extension) - - add_result = _lib.X509_REQ_add_extensions(self._req, stack) - _openssl_assert(add_result == 1) - - def get_extensions(self) -> list[X509Extension]: - """ - Get X.509 extensions in the certificate signing request. - - :return: The X.509 extensions in this request. - :rtype: :py:class:`list` of :py:class:`X509Extension` objects. - - .. versionadded:: 0.15 - """ - warnings.warn( - ( - "This API is deprecated and will be removed in a future " - "version of pyOpenSSL. You should use pyca/cryptography's " - "X.509 APIs instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - exts = [] - native_exts_obj = _lib.X509_REQ_get_extensions(self._req) - native_exts_obj = _ffi.gc( - native_exts_obj, - lambda x: _lib.sk_X509_EXTENSION_pop_free( - x, - _ffi.addressof(_lib._original_lib, "X509_EXTENSION_free"), - ), - ) - - for i in range(_lib.sk_X509_EXTENSION_num(native_exts_obj)): - ext = X509Extension.__new__(X509Extension) - extension = _lib.X509_EXTENSION_dup( - _lib.sk_X509_EXTENSION_value(native_exts_obj, i) - ) - ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) - exts.append(ext) - return exts - - def sign(self, pkey: PKey, digest: str) -> None: - """ - Sign the certificate signing request with this key and digest type. - - :param pkey: The key pair to sign with. - :type pkey: :py:class:`PKey` - :param digest: The name of the message digest to use for the signature, - e.g. :py:data:`"sha256"`. - :type digest: :py:class:`str` - :return: ``None`` - """ - if pkey._only_public: - raise ValueError("Key has only public part") - - if not pkey._initialized: - raise ValueError("Key is uninitialized") - - digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) - if digest_obj == _ffi.NULL: - raise ValueError("No such digest method") - - sign_result = _lib.X509_REQ_sign(self._req, pkey._pkey, digest_obj) - _openssl_assert(sign_result > 0) - - def verify(self, pkey: PKey) -> bool: - """ - Verifies the signature on this certificate signing request. - - :param PKey key: A public key. - - :return: ``True`` if the signature is correct. - :rtype: bool - - :raises OpenSSL.crypto.Error: If the signature is invalid or there is a - problem verifying the signature. - """ - if not isinstance(pkey, PKey): - raise TypeError("pkey must be a PKey instance") - - result = _lib.X509_REQ_verify(self._req, pkey._pkey) - if result <= 0: - _raise_current_error() - - return result - - -class X509: - """ - An X.509 certificate. - """ - - def __init__(self) -> None: - x509 = _lib.X509_new() - _openssl_assert(x509 != _ffi.NULL) - self._x509 = _ffi.gc(x509, _lib.X509_free) - - self._issuer_invalidator = _X509NameInvalidator() - self._subject_invalidator = _X509NameInvalidator() - - @classmethod - def _from_raw_x509_ptr(cls, x509: Any) -> X509: - cert = cls.__new__(cls) - cert._x509 = _ffi.gc(x509, _lib.X509_free) - cert._issuer_invalidator = _X509NameInvalidator() - cert._subject_invalidator = _X509NameInvalidator() - return cert - - def to_cryptography(self) -> x509.Certificate: - """ - Export as a ``cryptography`` certificate. - - :rtype: ``cryptography.x509.Certificate`` - - .. versionadded:: 17.1.0 - """ - from cryptography.x509 import load_der_x509_certificate - - der = dump_certificate(FILETYPE_ASN1, self) - return load_der_x509_certificate(der) - - @classmethod - def from_cryptography(cls, crypto_cert: x509.Certificate) -> X509: - """ - Construct based on a ``cryptography`` *crypto_cert*. - - :param crypto_key: A ``cryptography`` X.509 certificate. - :type crypto_key: ``cryptography.x509.Certificate`` - - :rtype: X509 - - .. versionadded:: 17.1.0 - """ - if not isinstance(crypto_cert, x509.Certificate): - raise TypeError("Must be a certificate") - - from cryptography.hazmat.primitives.serialization import Encoding - - der = crypto_cert.public_bytes(Encoding.DER) - return load_certificate(FILETYPE_ASN1, der) - - def set_version(self, version: int) -> None: - """ - Set the version number of the certificate. Note that the - version value is zero-based, eg. a value of 0 is V1. - - :param version: The version number of the certificate. - :type version: :py:class:`int` - - :return: ``None`` - """ - if not isinstance(version, int): - raise TypeError("version must be an integer") - - _openssl_assert(_lib.X509_set_version(self._x509, version) == 1) - - def get_version(self) -> int: - """ - Return the version number of the certificate. - - :return: The version number of the certificate. - :rtype: :py:class:`int` - """ - return _lib.X509_get_version(self._x509) - - def get_pubkey(self) -> PKey: - """ - Get the public key of the certificate. - - :return: The public key. - :rtype: :py:class:`PKey` - """ - pkey = PKey.__new__(PKey) - pkey._pkey = _lib.X509_get_pubkey(self._x509) - if pkey._pkey == _ffi.NULL: - _raise_current_error() - pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) - pkey._only_public = True - return pkey - - def set_pubkey(self, pkey: PKey) -> None: - """ - Set the public key of the certificate. - - :param pkey: The public key. - :type pkey: :py:class:`PKey` - - :return: :py:data:`None` - """ - if not isinstance(pkey, PKey): - raise TypeError("pkey must be a PKey instance") - - set_result = _lib.X509_set_pubkey(self._x509, pkey._pkey) - _openssl_assert(set_result == 1) - - def sign(self, pkey: PKey, digest: str) -> None: - """ - Sign the certificate with this key and digest type. - - :param pkey: The key to sign with. - :type pkey: :py:class:`PKey` - - :param digest: The name of the message digest to use. - :type digest: :py:class:`str` - - :return: :py:data:`None` - """ - if not isinstance(pkey, PKey): - raise TypeError("pkey must be a PKey instance") - - if pkey._only_public: - raise ValueError("Key only has public part") - - if not pkey._initialized: - raise ValueError("Key is uninitialized") - - evp_md = _lib.EVP_get_digestbyname(_byte_string(digest)) - if evp_md == _ffi.NULL: - raise ValueError("No such digest method") - - sign_result = _lib.X509_sign(self._x509, pkey._pkey, evp_md) - _openssl_assert(sign_result > 0) - - def get_signature_algorithm(self) -> bytes: - """ - Return the signature algorithm used in the certificate. - - :return: The name of the algorithm. - :rtype: :py:class:`bytes` - - :raises ValueError: If the signature algorithm is undefined. - - .. versionadded:: 0.13 - """ - sig_alg = _lib.X509_get0_tbs_sigalg(self._x509) - alg = _ffi.new("ASN1_OBJECT **") - _lib.X509_ALGOR_get0(alg, _ffi.NULL, _ffi.NULL, sig_alg) - nid = _lib.OBJ_obj2nid(alg[0]) - if nid == _lib.NID_undef: - raise ValueError("Undefined signature algorithm") - return _ffi.string(_lib.OBJ_nid2ln(nid)) - - def digest(self, digest_name: str) -> bytes: - """ - Return the digest of the X509 object. - - :param digest_name: The name of the digest algorithm to use. - :type digest_name: :py:class:`str` - - :return: The digest of the object, formatted as - :py:const:`b":"`-delimited hex pairs. - :rtype: :py:class:`bytes` - """ - digest = _lib.EVP_get_digestbyname(_byte_string(digest_name)) - if digest == _ffi.NULL: - raise ValueError("No such digest method") - - result_buffer = _ffi.new("unsigned char[]", _lib.EVP_MAX_MD_SIZE) - result_length = _ffi.new("unsigned int[]", 1) - result_length[0] = len(result_buffer) - - digest_result = _lib.X509_digest( - self._x509, digest, result_buffer, result_length - ) - _openssl_assert(digest_result == 1) - - return b":".join( - [ - b16encode(ch).upper() - for ch in _ffi.buffer(result_buffer, result_length[0]) - ] - ) - - def subject_name_hash(self) -> int: - """ - Return the hash of the X509 subject. - - :return: The hash of the subject. - :rtype: :py:class:`int` - """ - return _lib.X509_subject_name_hash(self._x509) - - def set_serial_number(self, serial: int) -> None: - """ - Set the serial number of the certificate. - - :param serial: The new serial number. - :type serial: :py:class:`int` - - :return: :py:data`None` - """ - if not isinstance(serial, int): - raise TypeError("serial must be an integer") - - hex_serial = hex(serial)[2:] - hex_serial_bytes = hex_serial.encode("ascii") - - bignum_serial = _ffi.new("BIGNUM**") - - # BN_hex2bn stores the result in &bignum. - result = _lib.BN_hex2bn(bignum_serial, hex_serial_bytes) - _openssl_assert(result != _ffi.NULL) - - asn1_serial = _lib.BN_to_ASN1_INTEGER(bignum_serial[0], _ffi.NULL) - _lib.BN_free(bignum_serial[0]) - _openssl_assert(asn1_serial != _ffi.NULL) - asn1_serial = _ffi.gc(asn1_serial, _lib.ASN1_INTEGER_free) - set_result = _lib.X509_set_serialNumber(self._x509, asn1_serial) - _openssl_assert(set_result == 1) - - def get_serial_number(self) -> int: - """ - Return the serial number of this certificate. - - :return: The serial number. - :rtype: int - """ - asn1_serial = _lib.X509_get_serialNumber(self._x509) - bignum_serial = _lib.ASN1_INTEGER_to_BN(asn1_serial, _ffi.NULL) - try: - hex_serial = _lib.BN_bn2hex(bignum_serial) - try: - hexstring_serial = _ffi.string(hex_serial) - serial = int(hexstring_serial, 16) - return serial - finally: - _lib.OPENSSL_free(hex_serial) - finally: - _lib.BN_free(bignum_serial) - - def gmtime_adj_notAfter(self, amount: int) -> None: - """ - Adjust the time stamp on which the certificate stops being valid. - - :param int amount: The number of seconds by which to adjust the - timestamp. - :return: ``None`` - """ - if not isinstance(amount, int): - raise TypeError("amount must be an integer") - - notAfter = _lib.X509_getm_notAfter(self._x509) - _lib.X509_gmtime_adj(notAfter, amount) - - def gmtime_adj_notBefore(self, amount: int) -> None: - """ - Adjust the timestamp on which the certificate starts being valid. - - :param amount: The number of seconds by which to adjust the timestamp. - :return: ``None`` - """ - if not isinstance(amount, int): - raise TypeError("amount must be an integer") - - notBefore = _lib.X509_getm_notBefore(self._x509) - _lib.X509_gmtime_adj(notBefore, amount) - - def has_expired(self) -> bool: - """ - Check whether the certificate has expired. - - :return: ``True`` if the certificate has expired, ``False`` otherwise. - :rtype: bool - """ - time_bytes = self.get_notAfter() - if time_bytes is None: - raise ValueError("Unable to determine notAfter") - time_string = time_bytes.decode("utf-8") - not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ") - - UTC = datetime.timezone.utc - utcnow = datetime.datetime.now(UTC).replace(tzinfo=None) - return not_after < utcnow - - def _get_boundary_time(self, which: Any) -> bytes | None: - return _get_asn1_time(which(self._x509)) - - def get_notBefore(self) -> bytes | None: - """ - Get the timestamp at which the certificate starts being valid. - - The timestamp is formatted as an ASN.1 TIME:: - - YYYYMMDDhhmmssZ - - :return: A timestamp string, or ``None`` if there is none. - :rtype: bytes or NoneType - """ - return self._get_boundary_time(_lib.X509_getm_notBefore) - - def _set_boundary_time( - self, which: Callable[..., Any], when: bytes - ) -> None: - return _set_asn1_time(which(self._x509), when) - - def set_notBefore(self, when: bytes) -> None: - """ - Set the timestamp at which the certificate starts being valid. - - The timestamp is formatted as an ASN.1 TIME:: - - YYYYMMDDhhmmssZ - - :param bytes when: A timestamp string. - :return: ``None`` - """ - return self._set_boundary_time(_lib.X509_getm_notBefore, when) - - def get_notAfter(self) -> bytes | None: - """ - Get the timestamp at which the certificate stops being valid. - - The timestamp is formatted as an ASN.1 TIME:: - - YYYYMMDDhhmmssZ - - :return: A timestamp string, or ``None`` if there is none. - :rtype: bytes or NoneType - """ - return self._get_boundary_time(_lib.X509_getm_notAfter) - - def set_notAfter(self, when: bytes) -> None: - """ - Set the timestamp at which the certificate stops being valid. - - The timestamp is formatted as an ASN.1 TIME:: - - YYYYMMDDhhmmssZ - - :param bytes when: A timestamp string. - :return: ``None`` - """ - return self._set_boundary_time(_lib.X509_getm_notAfter, when) - - def _get_name(self, which: Any) -> X509Name: - name = X509Name.__new__(X509Name) - name._name = which(self._x509) - _openssl_assert(name._name != _ffi.NULL) - - # The name is owned by the X509 structure. As long as the X509Name - # Python object is alive, keep the X509 Python object alive. - name._owner = self - - return name - - def _set_name(self, which: Any, name: X509Name) -> None: - if not isinstance(name, X509Name): - raise TypeError("name must be an X509Name") - set_result = which(self._x509, name._name) - _openssl_assert(set_result == 1) - - def get_issuer(self) -> X509Name: - """ - Return the issuer of this certificate. - - This creates a new :class:`X509Name` that wraps the underlying issuer - name field on the certificate. Modifying it will modify the underlying - certificate, and will have the effect of modifying any other - :class:`X509Name` that refers to this issuer. - - :return: The issuer of this certificate. - :rtype: :class:`X509Name` - """ - name = self._get_name(_lib.X509_get_issuer_name) - self._issuer_invalidator.add(name) - return name - - def set_issuer(self, issuer: X509Name) -> None: - """ - Set the issuer of this certificate. - - :param issuer: The issuer. - :type issuer: :py:class:`X509Name` - - :return: ``None`` - """ - self._set_name(_lib.X509_set_issuer_name, issuer) - self._issuer_invalidator.clear() - - def get_subject(self) -> X509Name: - """ - Return the subject of this certificate. - - This creates a new :class:`X509Name` that wraps the underlying subject - name field on the certificate. Modifying it will modify the underlying - certificate, and will have the effect of modifying any other - :class:`X509Name` that refers to this subject. - - :return: The subject of this certificate. - :rtype: :class:`X509Name` - """ - name = self._get_name(_lib.X509_get_subject_name) - self._subject_invalidator.add(name) - return name - - def set_subject(self, subject: X509Name) -> None: - """ - Set the subject of this certificate. - - :param subject: The subject. - :type subject: :py:class:`X509Name` - - :return: ``None`` - """ - self._set_name(_lib.X509_set_subject_name, subject) - self._subject_invalidator.clear() - - def get_extension_count(self) -> int: - """ - Get the number of extensions on this certificate. - - :return: The number of extensions. - :rtype: :py:class:`int` - - .. versionadded:: 0.12 - """ - return _lib.X509_get_ext_count(self._x509) - - def add_extensions(self, extensions: Iterable[X509Extension]) -> None: - """ - Add extensions to the certificate. - - :param extensions: The extensions to add. - :type extensions: An iterable of :py:class:`X509Extension` objects. - :return: ``None`` - """ - warnings.warn( - ( - "This API is deprecated and will be removed in a future " - "version of pyOpenSSL. You should use pyca/cryptography's " - "X.509 APIs instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - for ext in extensions: - if not isinstance(ext, X509Extension): - raise ValueError("One of the elements is not an X509Extension") - - add_result = _lib.X509_add_ext(self._x509, ext._extension, -1) - _openssl_assert(add_result == 1) - - def get_extension(self, index: int) -> X509Extension: - """ - Get a specific extension of the certificate by index. - - Extensions on a certificate are kept in order. The index - parameter selects which extension will be returned. - - :param int index: The index of the extension to retrieve. - :return: The extension at the specified index. - :rtype: :py:class:`X509Extension` - :raises IndexError: If the extension index was out of bounds. - - .. versionadded:: 0.12 - """ - warnings.warn( - ( - "This API is deprecated and will be removed in a future " - "version of pyOpenSSL. You should use pyca/cryptography's " - "X.509 APIs instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - ext = X509Extension.__new__(X509Extension) - ext._extension = _lib.X509_get_ext(self._x509, index) - if ext._extension == _ffi.NULL: - raise IndexError("extension index out of bounds") - - extension = _lib.X509_EXTENSION_dup(ext._extension) - ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) - return ext - - -class X509StoreFlags: - """ - Flags for X509 verification, used to change the behavior of - :class:`X509Store`. - - See `OpenSSL Verification Flags`_ for details. - - .. _OpenSSL Verification Flags: - https://www.openssl.org/docs/manmaster/man3/X509_VERIFY_PARAM_set_flags.html - """ - - CRL_CHECK: int = _lib.X509_V_FLAG_CRL_CHECK - CRL_CHECK_ALL: int = _lib.X509_V_FLAG_CRL_CHECK_ALL - IGNORE_CRITICAL: int = _lib.X509_V_FLAG_IGNORE_CRITICAL - X509_STRICT: int = _lib.X509_V_FLAG_X509_STRICT - ALLOW_PROXY_CERTS: int = _lib.X509_V_FLAG_ALLOW_PROXY_CERTS - POLICY_CHECK: int = _lib.X509_V_FLAG_POLICY_CHECK - EXPLICIT_POLICY: int = _lib.X509_V_FLAG_EXPLICIT_POLICY - INHIBIT_MAP: int = _lib.X509_V_FLAG_INHIBIT_MAP - CHECK_SS_SIGNATURE: int = _lib.X509_V_FLAG_CHECK_SS_SIGNATURE - PARTIAL_CHAIN: int = _lib.X509_V_FLAG_PARTIAL_CHAIN - - -class X509Store: - """ - An X.509 store. - - An X.509 store is used to describe a context in which to verify a - certificate. A description of a context may include a set of certificates - to trust, a set of certificate revocation lists, verification flags and - more. - - An X.509 store, being only a description, cannot be used by itself to - verify a certificate. To carry out the actual verification process, see - :class:`X509StoreContext`. - """ - - def __init__(self) -> None: - store = _lib.X509_STORE_new() - self._store = _ffi.gc(store, _lib.X509_STORE_free) - - def add_cert(self, cert: X509) -> None: - """ - Adds a trusted certificate to this store. - - Adding a certificate with this method adds this certificate as a - *trusted* certificate. - - :param X509 cert: The certificate to add to this store. - - :raises TypeError: If the certificate is not an :class:`X509`. - - :raises OpenSSL.crypto.Error: If OpenSSL was unhappy with your - certificate. - - :return: ``None`` if the certificate was added successfully. - """ - if not isinstance(cert, X509): - raise TypeError() - - res = _lib.X509_STORE_add_cert(self._store, cert._x509) - _openssl_assert(res == 1) - - def add_crl(self, crl: x509.CertificateRevocationList) -> None: - """ - Add a certificate revocation list to this store. - - The certificate revocation lists added to a store will only be used if - the associated flags are configured to check certificate revocation - lists. - - .. versionadded:: 16.1.0 - - :param crl: The certificate revocation list to add to this store. - :type crl: ``cryptography.x509.CertificateRevocationList`` - :return: ``None`` if the certificate revocation list was added - successfully. - """ - if isinstance(crl, x509.CertificateRevocationList): - from cryptography.hazmat.primitives.serialization import Encoding - - bio = _new_mem_buf(crl.public_bytes(Encoding.DER)) - openssl_crl = _lib.d2i_X509_CRL_bio(bio, _ffi.NULL) - _openssl_assert(openssl_crl != _ffi.NULL) - crl = _ffi.gc(openssl_crl, _lib.X509_CRL_free) - else: - raise TypeError( - "CRL must be of type " - "cryptography.x509.CertificateRevocationList" - ) - - _openssl_assert(_lib.X509_STORE_add_crl(self._store, crl) != 0) - - def set_flags(self, flags: int) -> None: - """ - Set verification flags to this store. - - Verification flags can be combined by oring them together. - - .. note:: - - Setting a verification flag sometimes requires clients to add - additional information to the store, otherwise a suitable error will - be raised. - - For example, in setting flags to enable CRL checking a - suitable CRL must be added to the store otherwise an error will be - raised. - - .. versionadded:: 16.1.0 - - :param int flags: The verification flags to set on this store. - See :class:`X509StoreFlags` for available constants. - :return: ``None`` if the verification flags were successfully set. - """ - _openssl_assert(_lib.X509_STORE_set_flags(self._store, flags) != 0) - - def set_time(self, vfy_time: datetime.datetime) -> None: - """ - Set the time against which the certificates are verified. - - Normally the current time is used. - - .. note:: - - For example, you can determine if a certificate was valid at a given - time. - - .. versionadded:: 17.0.0 - - :param datetime vfy_time: The verification time to set on this store. - :return: ``None`` if the verification time was successfully set. - """ - param = _lib.X509_VERIFY_PARAM_new() - param = _ffi.gc(param, _lib.X509_VERIFY_PARAM_free) - - _lib.X509_VERIFY_PARAM_set_time( - param, calendar.timegm(vfy_time.timetuple()) - ) - _openssl_assert(_lib.X509_STORE_set1_param(self._store, param) != 0) - - def load_locations( - self, - cafile: StrOrBytesPath | None, - capath: StrOrBytesPath | None = None, - ) -> None: - """ - Let X509Store know where we can find trusted certificates for the - certificate chain. Note that the certificates have to be in PEM - format. - - If *capath* is passed, it must be a directory prepared using the - ``c_rehash`` tool included with OpenSSL. Either, but not both, of - *cafile* or *capath* may be ``None``. - - .. note:: - - Both *cafile* and *capath* may be set simultaneously. - - Call this method multiple times to add more than one location. - For example, CA certificates, and certificate revocation list bundles - may be passed in *cafile* in subsequent calls to this method. - - .. versionadded:: 20.0 - - :param cafile: In which file we can find the certificates (``bytes`` or - ``unicode``). - :param capath: In which directory we can find the certificates - (``bytes`` or ``unicode``). - - :return: ``None`` if the locations were set successfully. - - :raises OpenSSL.crypto.Error: If both *cafile* and *capath* is ``None`` - or the locations could not be set for any reason. - - """ - if cafile is None: - cafile = _ffi.NULL - else: - cafile = _path_bytes(cafile) - - if capath is None: - capath = _ffi.NULL - else: - capath = _path_bytes(capath) - - load_result = _lib.X509_STORE_load_locations( - self._store, cafile, capath - ) - if not load_result: - _raise_current_error() - - -class X509StoreContextError(Exception): - """ - An exception raised when an error occurred while verifying a certificate - using `OpenSSL.X509StoreContext.verify_certificate`. - - :ivar certificate: The certificate which caused verificate failure. - :type certificate: :class:`X509` - """ - - def __init__( - self, message: str, errors: list[Any], certificate: X509 - ) -> None: - super().__init__(message) - self.errors = errors - self.certificate = certificate - - -class X509StoreContext: - """ - An X.509 store context. - - An X.509 store context is used to carry out the actual verification process - of a certificate in a described context. For describing such a context, see - :class:`X509Store`. - - :param X509Store store: The certificates which will be trusted for the - purposes of any verifications. - :param X509 certificate: The certificate to be verified. - :param chain: List of untrusted certificates that may be used for building - the certificate chain. May be ``None``. - :type chain: :class:`list` of :class:`X509` - """ - - def __init__( - self, - store: X509Store, - certificate: X509, - chain: Sequence[X509] | None = None, - ) -> None: - self._store = store - self._cert = certificate - self._chain = self._build_certificate_stack(chain) - - @staticmethod - def _build_certificate_stack( - certificates: Sequence[X509] | None, - ) -> None: - def cleanup(s: Any) -> None: - # Equivalent to sk_X509_pop_free, but we don't - # currently have a CFFI binding for that available - for i in range(_lib.sk_X509_num(s)): - x = _lib.sk_X509_value(s, i) - _lib.X509_free(x) - _lib.sk_X509_free(s) - - if certificates is None or len(certificates) == 0: - return _ffi.NULL - - stack = _lib.sk_X509_new_null() - _openssl_assert(stack != _ffi.NULL) - stack = _ffi.gc(stack, cleanup) - - for cert in certificates: - if not isinstance(cert, X509): - raise TypeError("One of the elements is not an X509 instance") - - _openssl_assert(_lib.X509_up_ref(cert._x509) > 0) - if _lib.sk_X509_push(stack, cert._x509) <= 0: - _lib.X509_free(cert._x509) - _raise_current_error() - - return stack - - @staticmethod - def _exception_from_context(store_ctx: Any) -> X509StoreContextError: - """ - Convert an OpenSSL native context error failure into a Python - exception. - - When a call to native OpenSSL X509_verify_cert fails, additional - information about the failure can be obtained from the store context. - """ - message = _ffi.string( - _lib.X509_verify_cert_error_string( - _lib.X509_STORE_CTX_get_error(store_ctx) - ) - ).decode("utf-8") - errors = [ - _lib.X509_STORE_CTX_get_error(store_ctx), - _lib.X509_STORE_CTX_get_error_depth(store_ctx), - message, - ] - # A context error should always be associated with a certificate, so we - # expect this call to never return :class:`None`. - _x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) - _cert = _lib.X509_dup(_x509) - pycert = X509._from_raw_x509_ptr(_cert) - return X509StoreContextError(message, errors, pycert) - - def _verify_certificate(self) -> Any: - """ - Verifies the certificate and runs an X509_STORE_CTX containing the - results. - - :raises X509StoreContextError: If an error occurred when validating a - certificate in the context. Sets ``certificate`` attribute to - indicate which certificate caused the error. - """ - store_ctx = _lib.X509_STORE_CTX_new() - _openssl_assert(store_ctx != _ffi.NULL) - store_ctx = _ffi.gc(store_ctx, _lib.X509_STORE_CTX_free) - - ret = _lib.X509_STORE_CTX_init( - store_ctx, self._store._store, self._cert._x509, self._chain - ) - _openssl_assert(ret == 1) - - ret = _lib.X509_verify_cert(store_ctx) - if ret <= 0: - raise self._exception_from_context(store_ctx) - - return store_ctx - - def set_store(self, store: X509Store) -> None: - """ - Set the context's X.509 store. - - .. versionadded:: 0.15 - - :param X509Store store: The store description which will be used for - the purposes of any *future* verifications. - """ - self._store = store - - def verify_certificate(self) -> None: - """ - Verify a certificate in a context. - - .. versionadded:: 0.15 - - :raises X509StoreContextError: If an error occurred when validating a - certificate in the context. Sets ``certificate`` attribute to - indicate which certificate caused the error. - """ - self._verify_certificate() - - def get_verified_chain(self) -> list[X509]: - """ - Verify a certificate in a context and return the complete validated - chain. - - :raises X509StoreContextError: If an error occurred when validating a - certificate in the context. Sets ``certificate`` attribute to - indicate which certificate caused the error. - - .. versionadded:: 20.0 - """ - store_ctx = self._verify_certificate() - - # Note: X509_STORE_CTX_get1_chain returns a deep copy of the chain. - cert_stack = _lib.X509_STORE_CTX_get1_chain(store_ctx) - _openssl_assert(cert_stack != _ffi.NULL) - - result = [] - for i in range(_lib.sk_X509_num(cert_stack)): - cert = _lib.sk_X509_value(cert_stack, i) - _openssl_assert(cert != _ffi.NULL) - pycert = X509._from_raw_x509_ptr(cert) - result.append(pycert) - - # Free the stack but not the members which are freed by the X509 class. - _lib.sk_X509_free(cert_stack) - return result - - -def load_certificate(type: int, buffer: bytes) -> X509: - """ - Load a certificate (X509) from the string *buffer* encoded with the - type *type*. - - :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) - - :param bytes buffer: The buffer the certificate is stored in - - :return: The X509 object - """ - if isinstance(buffer, str): - buffer = buffer.encode("ascii") - - bio = _new_mem_buf(buffer) - - if type == FILETYPE_PEM: - x509 = _lib.PEM_read_bio_X509(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) - elif type == FILETYPE_ASN1: - x509 = _lib.d2i_X509_bio(bio, _ffi.NULL) - else: - raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") - - if x509 == _ffi.NULL: - _raise_current_error() - - return X509._from_raw_x509_ptr(x509) - - -def dump_certificate(type: int, cert: X509) -> bytes: - """ - Dump the certificate *cert* into a buffer string encoded with the type - *type*. - - :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or - FILETYPE_TEXT) - :param cert: The certificate to dump - :return: The buffer with the dumped certificate in - """ - bio = _new_mem_buf() - - if type == FILETYPE_PEM: - result_code = _lib.PEM_write_bio_X509(bio, cert._x509) - elif type == FILETYPE_ASN1: - result_code = _lib.i2d_X509_bio(bio, cert._x509) - elif type == FILETYPE_TEXT: - result_code = _lib.X509_print_ex(bio, cert._x509, 0, 0) - else: - raise ValueError( - "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " - "FILETYPE_TEXT" - ) - - _openssl_assert(result_code == 1) - return _bio_to_string(bio) - - -def dump_publickey(type: int, pkey: PKey) -> bytes: - """ - Dump a public key to a buffer. - - :param type: The file type (one of :data:`FILETYPE_PEM` or - :data:`FILETYPE_ASN1`). - :param PKey pkey: The public key to dump - :return: The buffer with the dumped key in it. - :rtype: bytes - """ - bio = _new_mem_buf() - if type == FILETYPE_PEM: - write_bio = _lib.PEM_write_bio_PUBKEY - elif type == FILETYPE_ASN1: - write_bio = _lib.i2d_PUBKEY_bio - else: - raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") - - result_code = write_bio(bio, pkey._pkey) - if result_code != 1: # pragma: no cover - _raise_current_error() - - return _bio_to_string(bio) - - -def dump_privatekey( - type: int, - pkey: PKey, - cipher: str | None = None, - passphrase: PassphraseCallableT | None = None, -) -> bytes: - """ - Dump the private key *pkey* into a buffer string encoded with the type - *type*. Optionally (if *type* is :const:`FILETYPE_PEM`) encrypting it - using *cipher* and *passphrase*. - - :param type: The file type (one of :const:`FILETYPE_PEM`, - :const:`FILETYPE_ASN1`, or :const:`FILETYPE_TEXT`) - :param PKey pkey: The PKey to dump - :param cipher: (optional) if encrypted PEM format, the cipher to use - :param passphrase: (optional) if encrypted PEM format, this can be either - the passphrase to use, or a callback for providing the passphrase. - - :return: The buffer with the dumped key in - :rtype: bytes - """ - bio = _new_mem_buf() - - if not isinstance(pkey, PKey): - raise TypeError("pkey must be a PKey") - - if cipher is not None: - if passphrase is None: - raise TypeError( - "if a value is given for cipher " - "one must also be given for passphrase" - ) - cipher_obj = _lib.EVP_get_cipherbyname(_byte_string(cipher)) - if cipher_obj == _ffi.NULL: - raise ValueError("Invalid cipher name") - else: - cipher_obj = _ffi.NULL - - helper = _PassphraseHelper(type, passphrase) - if type == FILETYPE_PEM: - result_code = _lib.PEM_write_bio_PrivateKey( - bio, - pkey._pkey, - cipher_obj, - _ffi.NULL, - 0, - helper.callback, - helper.callback_args, - ) - helper.raise_if_problem() - elif type == FILETYPE_ASN1: - result_code = _lib.i2d_PrivateKey_bio(bio, pkey._pkey) - elif type == FILETYPE_TEXT: - if _lib.EVP_PKEY_id(pkey._pkey) != _lib.EVP_PKEY_RSA: - raise TypeError("Only RSA keys are supported for FILETYPE_TEXT") - - rsa = _ffi.gc(_lib.EVP_PKEY_get1_RSA(pkey._pkey), _lib.RSA_free) - result_code = _lib.RSA_print(bio, rsa, 0) - else: - raise ValueError( - "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " - "FILETYPE_TEXT" - ) - - _openssl_assert(result_code != 0) - - return _bio_to_string(bio) - - -class _PassphraseHelper: - def __init__( - self, - type: int, - passphrase: PassphraseCallableT | None, - more_args: bool = False, - truncate: bool = False, - ) -> None: - if type != FILETYPE_PEM and passphrase is not None: - raise ValueError( - "only FILETYPE_PEM key format supports encryption" - ) - self._passphrase = passphrase - self._more_args = more_args - self._truncate = truncate - self._problems: list[Exception] = [] - - @property - def callback(self) -> Any: - if self._passphrase is None: - return _ffi.NULL - elif isinstance(self._passphrase, bytes) or callable(self._passphrase): - return _ffi.callback("pem_password_cb", self._read_passphrase) - else: - raise TypeError( - "Last argument must be a byte string or a callable." - ) - - @property - def callback_args(self) -> Any: - if self._passphrase is None: - return _ffi.NULL - elif isinstance(self._passphrase, bytes) or callable(self._passphrase): - return _ffi.NULL - else: - raise TypeError( - "Last argument must be a byte string or a callable." - ) - - def raise_if_problem(self, exceptionType: type[Exception] = Error) -> None: - if self._problems: - # Flush the OpenSSL error queue - try: - _exception_from_error_queue(exceptionType) - except exceptionType: - pass - - raise self._problems.pop(0) - - def _read_passphrase( - self, buf: Any, size: int, rwflag: Any, userdata: Any - ) -> int: - try: - if callable(self._passphrase): - if self._more_args: - result = self._passphrase(size, rwflag, userdata) - else: - result = self._passphrase(rwflag) - else: - assert self._passphrase is not None - result = self._passphrase - if not isinstance(result, bytes): - raise ValueError("Bytes expected") - if len(result) > size: - if self._truncate: - result = result[:size] - else: - raise ValueError( - "passphrase returned by callback is too long" - ) - for i in range(len(result)): - buf[i] = result[i : i + 1] - return len(result) - except Exception as e: - self._problems.append(e) - return 0 - - -def load_publickey(type: int, buffer: str | bytes) -> PKey: - """ - Load a public key from a buffer. - - :param type: The file type (one of :data:`FILETYPE_PEM`, - :data:`FILETYPE_ASN1`). - :param buffer: The buffer the key is stored in. - :type buffer: A Python string object, either unicode or bytestring. - :return: The PKey object. - :rtype: :class:`PKey` - """ - if isinstance(buffer, str): - buffer = buffer.encode("ascii") - - bio = _new_mem_buf(buffer) - - if type == FILETYPE_PEM: - evp_pkey = _lib.PEM_read_bio_PUBKEY( - bio, _ffi.NULL, _ffi.NULL, _ffi.NULL - ) - elif type == FILETYPE_ASN1: - evp_pkey = _lib.d2i_PUBKEY_bio(bio, _ffi.NULL) - else: - raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") - - if evp_pkey == _ffi.NULL: - _raise_current_error() - - pkey = PKey.__new__(PKey) - pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) - pkey._only_public = True - return pkey - - -def load_privatekey( - type: int, - buffer: str | bytes, - passphrase: PassphraseCallableT | None = None, -) -> PKey: - """ - Load a private key (PKey) from the string *buffer* encoded with the type - *type*. - - :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) - :param buffer: The buffer the key is stored in - :param passphrase: (optional) if encrypted PEM format, this can be - either the passphrase to use, or a callback for - providing the passphrase. - - :return: The PKey object - """ - if isinstance(buffer, str): - buffer = buffer.encode("ascii") - - bio = _new_mem_buf(buffer) - - helper = _PassphraseHelper(type, passphrase) - if type == FILETYPE_PEM: - evp_pkey = _lib.PEM_read_bio_PrivateKey( - bio, _ffi.NULL, helper.callback, helper.callback_args - ) - helper.raise_if_problem() - elif type == FILETYPE_ASN1: - evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL) - else: - raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") - - if evp_pkey == _ffi.NULL: - _raise_current_error() - - pkey = PKey.__new__(PKey) - pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) - return pkey - - -def dump_certificate_request(type: int, req: X509Req) -> bytes: - """ - Dump the certificate request *req* into a buffer string encoded with the - type *type*. - - :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) - :param req: The certificate request to dump - :return: The buffer with the dumped certificate request in - - - .. deprecated:: 24.2.0 - Use `cryptography.x509.CertificateSigningRequest` instead. - """ - bio = _new_mem_buf() - - if type == FILETYPE_PEM: - result_code = _lib.PEM_write_bio_X509_REQ(bio, req._req) - elif type == FILETYPE_ASN1: - result_code = _lib.i2d_X509_REQ_bio(bio, req._req) - elif type == FILETYPE_TEXT: - result_code = _lib.X509_REQ_print_ex(bio, req._req, 0, 0) - else: - raise ValueError( - "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " - "FILETYPE_TEXT" - ) - - _openssl_assert(result_code != 0) - - return _bio_to_string(bio) - - -_dump_certificate_request_internal = dump_certificate_request - -utils.deprecated( - dump_certificate_request, - __name__, - ( - "CSR support in pyOpenSSL is deprecated. You should use the APIs " - "in cryptography." - ), - DeprecationWarning, - name="dump_certificate_request", -) - - -def load_certificate_request(type: int, buffer: bytes) -> X509Req: - """ - Load a certificate request (X509Req) from the string *buffer* encoded with - the type *type*. - - :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) - :param buffer: The buffer the certificate request is stored in - :return: The X509Req object - - .. deprecated:: 24.2.0 - Use `cryptography.x509.load_der_x509_csr` or - `cryptography.x509.load_pem_x509_csr` instead. - """ - if isinstance(buffer, str): - buffer = buffer.encode("ascii") - - bio = _new_mem_buf(buffer) - - if type == FILETYPE_PEM: - req = _lib.PEM_read_bio_X509_REQ(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) - elif type == FILETYPE_ASN1: - req = _lib.d2i_X509_REQ_bio(bio, _ffi.NULL) - else: - raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") - - _openssl_assert(req != _ffi.NULL) - - x509req = X509Req.__new__(X509Req) - x509req._req = _ffi.gc(req, _lib.X509_REQ_free) - return x509req - - -_load_certificate_request_internal = load_certificate_request - -utils.deprecated( - load_certificate_request, - __name__, - ( - "CSR support in pyOpenSSL is deprecated. You should use the APIs " - "in cryptography." - ), - DeprecationWarning, - name="load_certificate_request", -) diff --git a/venv/Lib/site-packages/OpenSSL/debug.py b/venv/Lib/site-packages/OpenSSL/debug.py deleted file mode 100644 index e0ed3f8..0000000 --- a/venv/Lib/site-packages/OpenSSL/debug.py +++ /dev/null @@ -1,40 +0,0 @@ -import ssl -import sys - -import cffi -import cryptography - -import OpenSSL.SSL - -from . import version - -_env_info = """\ -pyOpenSSL: {pyopenssl} -cryptography: {cryptography} -cffi: {cffi} -cryptography's compiled against OpenSSL: {crypto_openssl_compile} -cryptography's linked OpenSSL: {crypto_openssl_link} -Python's OpenSSL: {python_openssl} -Python executable: {python} -Python version: {python_version} -Platform: {platform} -sys.path: {sys_path}""".format( - pyopenssl=version.__version__, - crypto_openssl_compile=OpenSSL._util.ffi.string( - OpenSSL._util.lib.OPENSSL_VERSION_TEXT, - ).decode("ascii"), - crypto_openssl_link=OpenSSL.SSL.SSLeay_version( - OpenSSL.SSL.SSLEAY_VERSION - ).decode("ascii"), - python_openssl=getattr(ssl, "OPENSSL_VERSION", "n/a"), - cryptography=cryptography.__version__, - cffi=cffi.__version__, - python=sys.executable, - python_version=sys.version, - platform=sys.platform, - sys_path=sys.path, -) - - -if __name__ == "__main__": - print(_env_info) diff --git a/venv/Lib/site-packages/OpenSSL/py.typed b/venv/Lib/site-packages/OpenSSL/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/OpenSSL/rand.py b/venv/Lib/site-packages/OpenSSL/rand.py deleted file mode 100644 index e57425f..0000000 --- a/venv/Lib/site-packages/OpenSSL/rand.py +++ /dev/null @@ -1,50 +0,0 @@ -""" -PRNG management routines, thin wrappers. -""" - -from __future__ import annotations - -import warnings - -from OpenSSL._util import lib as _lib - -warnings.warn( - "OpenSSL.rand is deprecated - you should use os.urandom instead", - DeprecationWarning, - stacklevel=3, -) - - -def add(buffer: bytes, entropy: int) -> None: - """ - Mix bytes from *string* into the PRNG state. - - The *entropy* argument is (the lower bound of) an estimate of how much - randomness is contained in *string*, measured in bytes. - - For more information, see e.g. :rfc:`1750`. - - This function is only relevant if you are forking Python processes and - need to reseed the CSPRNG after fork. - - :param buffer: Buffer with random data. - :param entropy: The entropy (in bytes) measurement of the buffer. - - :return: :obj:`None` - """ - if not isinstance(buffer, bytes): - raise TypeError("buffer must be a byte string") - - if not isinstance(entropy, int): - raise TypeError("entropy must be an integer") - - _lib.RAND_add(buffer, len(buffer), entropy) - - -def status() -> int: - """ - Check whether the PRNG has been seeded with enough data. - - :return: 1 if the PRNG is seeded enough, 0 otherwise. - """ - return _lib.RAND_status() diff --git a/venv/Lib/site-packages/OpenSSL/version.py b/venv/Lib/site-packages/OpenSSL/version.py deleted file mode 100644 index c49055e..0000000 --- a/venv/Lib/site-packages/OpenSSL/version.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) AB Strakt -# Copyright (C) Jean-Paul Calderone -# See LICENSE for details. - -""" -pyOpenSSL - A simple wrapper around the OpenSSL library -""" - -__all__ = [ - "__author__", - "__copyright__", - "__email__", - "__license__", - "__summary__", - "__title__", - "__uri__", - "__version__", -] - -__version__ = "25.3.0" - -__title__ = "pyOpenSSL" -__uri__ = "https://pyopenssl.org/" -__summary__ = "Python wrapper module around the OpenSSL library" -__author__ = "The pyOpenSSL developers" -__email__ = "cryptography-dev@python.org" -__license__ = "Apache License, Version 2.0" -__copyright__ = f"Copyright 2001-2025 {__author__}" diff --git a/venv/Lib/site-packages/PIL/BdfFontFile.py b/venv/Lib/site-packages/PIL/BdfFontFile.py deleted file mode 100644 index bc1416c..0000000 --- a/venv/Lib/site-packages/PIL/BdfFontFile.py +++ /dev/null @@ -1,133 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# bitmap distribution font (bdf) file parser -# -# history: -# 1996-05-16 fl created (as bdf2pil) -# 1997-08-25 fl converted to FontFile driver -# 2001-05-25 fl removed bogus __init__ call -# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) -# 2003-04-22 fl more robustification (from Graham Dumpleton) -# -# Copyright (c) 1997-2003 by Secret Labs AB. -# Copyright (c) 1997-2003 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# - -""" -Parse X Bitmap Distribution Format (BDF) -""" -from __future__ import annotations - -from typing import BinaryIO - -from . import FontFile, Image - -bdf_slant = { - "R": "Roman", - "I": "Italic", - "O": "Oblique", - "RI": "Reverse Italic", - "RO": "Reverse Oblique", - "OT": "Other", -} - -bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"} - - -def bdf_char( - f: BinaryIO, -) -> ( - tuple[ - str, - int, - tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]], - Image.Image, - ] - | None -): - # skip to STARTCHAR - while True: - s = f.readline() - if not s: - return None - if s[:9] == b"STARTCHAR": - break - id = s[9:].strip().decode("ascii") - - # load symbol properties - props = {} - while True: - s = f.readline() - if not s or s[:6] == b"BITMAP": - break - i = s.find(b" ") - props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") - - # load bitmap - bitmap = bytearray() - while True: - s = f.readline() - if not s or s[:7] == b"ENDCHAR": - break - bitmap += s[:-1] - - # The word BBX - # followed by the width in x (BBw), height in y (BBh), - # and x and y displacement (BBxoff0, BByoff0) - # of the lower left corner from the origin of the character. - width, height, x_disp, y_disp = (int(p) for p in props["BBX"].split()) - - # The word DWIDTH - # followed by the width in x and y of the character in device pixels. - dwx, dwy = (int(p) for p in props["DWIDTH"].split()) - - bbox = ( - (dwx, dwy), - (x_disp, -y_disp - height, width + x_disp, -y_disp), - (0, 0, width, height), - ) - - try: - im = Image.frombytes("1", (width, height), bitmap, "hex", "1") - except ValueError: - # deal with zero-width characters - im = Image.new("1", (width, height)) - - return id, int(props["ENCODING"]), bbox, im - - -class BdfFontFile(FontFile.FontFile): - """Font file plugin for the X11 BDF format.""" - - def __init__(self, fp: BinaryIO) -> None: - super().__init__() - - s = fp.readline() - if s[:13] != b"STARTFONT 2.1": - msg = "not a valid BDF file" - raise SyntaxError(msg) - - props = {} - comments = [] - - while True: - s = fp.readline() - if not s or s[:13] == b"ENDPROPERTIES": - break - i = s.find(b" ") - props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") - if s[:i] in [b"COMMENT", b"COPYRIGHT"]: - if s.find(b"LogicalFontDescription") < 0: - comments.append(s[i + 1 : -1].decode("ascii")) - - while True: - c = bdf_char(fp) - if not c: - break - id, ch, (xy, dst, src), im = c - if 0 <= ch < len(self.glyph): - self.glyph[ch] = xy, dst, src, im diff --git a/venv/Lib/site-packages/PIL/BlpImagePlugin.py b/venv/Lib/site-packages/PIL/BlpImagePlugin.py deleted file mode 100644 index b9cefaf..0000000 --- a/venv/Lib/site-packages/PIL/BlpImagePlugin.py +++ /dev/null @@ -1,488 +0,0 @@ -""" -Blizzard Mipmap Format (.blp) -Jerome Leclanche - -The contents of this file are hereby released in the public domain (CC0) -Full text of the CC0 license: - https://creativecommons.org/publicdomain/zero/1.0/ - -BLP1 files, used mostly in Warcraft III, are not fully supported. -All types of BLP2 files used in World of Warcraft are supported. - -The BLP file structure consists of a header, up to 16 mipmaps of the -texture - -Texture sizes must be powers of two, though the two dimensions do -not have to be equal; 512x256 is valid, but 512x200 is not. -The first mipmap (mipmap #0) is the full size image; each subsequent -mipmap halves both dimensions. The final mipmap should be 1x1. - -BLP files come in many different flavours: -* JPEG-compressed (type == 0) - only supported for BLP1. -* RAW images (type == 1, encoding == 1). Each mipmap is stored as an - array of 8-bit values, one per pixel, left to right, top to bottom. - Each value is an index to the palette. -* DXT-compressed (type == 1, encoding == 2): -- DXT1 compression is used if alpha_encoding == 0. - - An additional alpha bit is used if alpha_depth == 1. - - DXT3 compression is used if alpha_encoding == 1. - - DXT5 compression is used if alpha_encoding == 7. -""" - -from __future__ import annotations - -import abc -import os -import struct -from enum import IntEnum -from io import BytesIO -from typing import IO - -from . import Image, ImageFile - - -class Format(IntEnum): - JPEG = 0 - - -class Encoding(IntEnum): - UNCOMPRESSED = 1 - DXT = 2 - UNCOMPRESSED_RAW_BGRA = 3 - - -class AlphaEncoding(IntEnum): - DXT1 = 0 - DXT3 = 1 - DXT5 = 7 - - -def unpack_565(i: int) -> tuple[int, int, int]: - return ((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3 - - -def decode_dxt1( - data: bytes, alpha: bool = False -) -> tuple[bytearray, bytearray, bytearray, bytearray]: - """ - input: one "row" of data (i.e. will produce 4*width pixels) - """ - - blocks = len(data) // 8 # number of blocks in row - ret = (bytearray(), bytearray(), bytearray(), bytearray()) - - for block_index in range(blocks): - # Decode next 8-byte block. - idx = block_index * 8 - color0, color1, bits = struct.unpack_from("> 2 - - a = 0xFF - if control == 0: - r, g, b = r0, g0, b0 - elif control == 1: - r, g, b = r1, g1, b1 - elif control == 2: - if color0 > color1: - r = (2 * r0 + r1) // 3 - g = (2 * g0 + g1) // 3 - b = (2 * b0 + b1) // 3 - else: - r = (r0 + r1) // 2 - g = (g0 + g1) // 2 - b = (b0 + b1) // 2 - elif control == 3: - if color0 > color1: - r = (2 * r1 + r0) // 3 - g = (2 * g1 + g0) // 3 - b = (2 * b1 + b0) // 3 - else: - r, g, b, a = 0, 0, 0, 0 - - if alpha: - ret[j].extend([r, g, b, a]) - else: - ret[j].extend([r, g, b]) - - return ret - - -def decode_dxt3(data: bytes) -> tuple[bytearray, bytearray, bytearray, bytearray]: - """ - input: one "row" of data (i.e. will produce 4*width pixels) - """ - - blocks = len(data) // 16 # number of blocks in row - ret = (bytearray(), bytearray(), bytearray(), bytearray()) - - for block_index in range(blocks): - idx = block_index * 16 - block = data[idx : idx + 16] - # Decode next 16-byte block. - bits = struct.unpack_from("<8B", block) - color0, color1 = struct.unpack_from(">= 4 - else: - high = True - a &= 0xF - a *= 17 # We get a value between 0 and 15 - - color_code = (code >> 2 * (4 * j + i)) & 0x03 - - if color_code == 0: - r, g, b = r0, g0, b0 - elif color_code == 1: - r, g, b = r1, g1, b1 - elif color_code == 2: - r = (2 * r0 + r1) // 3 - g = (2 * g0 + g1) // 3 - b = (2 * b0 + b1) // 3 - elif color_code == 3: - r = (2 * r1 + r0) // 3 - g = (2 * g1 + g0) // 3 - b = (2 * b1 + b0) // 3 - - ret[j].extend([r, g, b, a]) - - return ret - - -def decode_dxt5(data: bytes) -> tuple[bytearray, bytearray, bytearray, bytearray]: - """ - input: one "row" of data (i.e. will produce 4 * width pixels) - """ - - blocks = len(data) // 16 # number of blocks in row - ret = (bytearray(), bytearray(), bytearray(), bytearray()) - - for block_index in range(blocks): - idx = block_index * 16 - block = data[idx : idx + 16] - # Decode next 16-byte block. - a0, a1 = struct.unpack_from("> alphacode_index) & 0x07 - elif alphacode_index == 15: - alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06) - else: # alphacode_index >= 18 and alphacode_index <= 45 - alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07 - - if alphacode == 0: - a = a0 - elif alphacode == 1: - a = a1 - elif a0 > a1: - a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7 - elif alphacode == 6: - a = 0 - elif alphacode == 7: - a = 255 - else: - a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5 - - color_code = (code >> 2 * (4 * j + i)) & 0x03 - - if color_code == 0: - r, g, b = r0, g0, b0 - elif color_code == 1: - r, g, b = r1, g1, b1 - elif color_code == 2: - r = (2 * r0 + r1) // 3 - g = (2 * g0 + g1) // 3 - b = (2 * b0 + b1) // 3 - elif color_code == 3: - r = (2 * r1 + r0) // 3 - g = (2 * g1 + g0) // 3 - b = (2 * b1 + b0) // 3 - - ret[j].extend([r, g, b, a]) - - return ret - - -class BLPFormatError(NotImplementedError): - pass - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] in (b"BLP1", b"BLP2") - - -class BlpImageFile(ImageFile.ImageFile): - """ - Blizzard Mipmap Format - """ - - format = "BLP" - format_description = "Blizzard Mipmap Format" - - def _open(self) -> None: - self.magic = self.fp.read(4) - - self.fp.seek(5, os.SEEK_CUR) - (self._blp_alpha_depth,) = struct.unpack(" tuple[int, int]: - try: - self._read_blp_header() - self._load() - except struct.error as e: - msg = "Truncated BLP file" - raise OSError(msg) from e - return -1, 0 - - @abc.abstractmethod - def _load(self) -> None: - pass - - def _read_blp_header(self) -> None: - assert self.fd is not None - self.fd.seek(4) - (self._blp_compression,) = struct.unpack(" bytes: - return ImageFile._safe_read(self.fd, length) - - def _read_palette(self) -> list[tuple[int, int, int, int]]: - ret = [] - for i in range(256): - try: - b, g, r, a = struct.unpack("<4B", self._safe_read(4)) - except struct.error: - break - ret.append((b, g, r, a)) - return ret - - def _read_bgra(self, palette: list[tuple[int, int, int, int]]) -> bytearray: - data = bytearray() - _data = BytesIO(self._safe_read(self._blp_lengths[0])) - while True: - try: - (offset,) = struct.unpack(" None: - if self._blp_compression == Format.JPEG: - self._decode_jpeg_stream() - - elif self._blp_compression == 1: - if self._blp_encoding in (4, 5): - palette = self._read_palette() - data = self._read_bgra(palette) - self.set_as_raw(data) - else: - msg = f"Unsupported BLP encoding {repr(self._blp_encoding)}" - raise BLPFormatError(msg) - else: - msg = f"Unsupported BLP compression {repr(self._blp_encoding)}" - raise BLPFormatError(msg) - - def _decode_jpeg_stream(self) -> None: - from .JpegImagePlugin import JpegImageFile - - (jpeg_header_size,) = struct.unpack(" None: - palette = self._read_palette() - - assert self.fd is not None - self.fd.seek(self._blp_offsets[0]) - - if self._blp_compression == 1: - # Uncompressed or DirectX compression - - if self._blp_encoding == Encoding.UNCOMPRESSED: - data = self._read_bgra(palette) - - elif self._blp_encoding == Encoding.DXT: - data = bytearray() - if self._blp_alpha_encoding == AlphaEncoding.DXT1: - linesize = (self.size[0] + 3) // 4 * 8 - for yb in range((self.size[1] + 3) // 4): - for d in decode_dxt1( - self._safe_read(linesize), alpha=bool(self._blp_alpha_depth) - ): - data += d - - elif self._blp_alpha_encoding == AlphaEncoding.DXT3: - linesize = (self.size[0] + 3) // 4 * 16 - for yb in range((self.size[1] + 3) // 4): - for d in decode_dxt3(self._safe_read(linesize)): - data += d - - elif self._blp_alpha_encoding == AlphaEncoding.DXT5: - linesize = (self.size[0] + 3) // 4 * 16 - for yb in range((self.size[1] + 3) // 4): - for d in decode_dxt5(self._safe_read(linesize)): - data += d - else: - msg = f"Unsupported alpha encoding {repr(self._blp_alpha_encoding)}" - raise BLPFormatError(msg) - else: - msg = f"Unknown BLP encoding {repr(self._blp_encoding)}" - raise BLPFormatError(msg) - - else: - msg = f"Unknown BLP compression {repr(self._blp_compression)}" - raise BLPFormatError(msg) - - self.set_as_raw(data) - - -class BLPEncoder(ImageFile.PyEncoder): - _pushes_fd = True - - def _write_palette(self) -> bytes: - data = b"" - assert self.im is not None - palette = self.im.getpalette("RGBA", "RGBA") - for i in range(len(palette) // 4): - r, g, b, a = palette[i * 4 : (i + 1) * 4] - data += struct.pack("<4B", b, g, r, a) - while len(data) < 256 * 4: - data += b"\x00" * 4 - return data - - def encode(self, bufsize: int) -> tuple[int, int, bytes]: - palette_data = self._write_palette() - - offset = 20 + 16 * 4 * 2 + len(palette_data) - data = struct.pack("<16I", offset, *((0,) * 15)) - - assert self.im is not None - w, h = self.im.size - data += struct.pack("<16I", w * h, *((0,) * 15)) - - data += palette_data - - for y in range(h): - for x in range(w): - data += struct.pack(" None: - if im.mode != "P": - msg = "Unsupported BLP image mode" - raise ValueError(msg) - - magic = b"BLP1" if im.encoderinfo.get("blp_version") == "BLP1" else b"BLP2" - fp.write(magic) - - fp.write(struct.pack(" mode, rawmode - 1: ("P", "P;1"), - 4: ("P", "P;4"), - 8: ("P", "P"), - 16: ("RGB", "BGR;15"), - 24: ("RGB", "BGR"), - 32: ("RGB", "BGRX"), -} - - -def _accept(prefix: bytes) -> bool: - return prefix[:2] == b"BM" - - -def _dib_accept(prefix: bytes) -> bool: - return i32(prefix) in [12, 40, 52, 56, 64, 108, 124] - - -# ============================================================================= -# Image plugin for the Windows BMP format. -# ============================================================================= -class BmpImageFile(ImageFile.ImageFile): - """Image plugin for the Windows Bitmap format (BMP)""" - - # ------------------------------------------------------------- Description - format_description = "Windows Bitmap" - format = "BMP" - - # -------------------------------------------------- BMP Compression values - COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5} - for k, v in COMPRESSIONS.items(): - vars()[k] = v - - def _bitmap(self, header=0, offset=0): - """Read relevant info about the BMP""" - read, seek = self.fp.read, self.fp.seek - if header: - seek(header) - # read bmp header size @offset 14 (this is part of the header size) - file_info = {"header_size": i32(read(4)), "direction": -1} - - # -------------------- If requested, read header at a specific position - # read the rest of the bmp header, without its size - header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4) - - # ------------------------------- Windows Bitmap v2, IBM OS/2 Bitmap v1 - # ----- This format has different offsets because of width/height types - # 12: BITMAPCOREHEADER/OS21XBITMAPHEADER - if file_info["header_size"] == 12: - file_info["width"] = i16(header_data, 0) - file_info["height"] = i16(header_data, 2) - file_info["planes"] = i16(header_data, 4) - file_info["bits"] = i16(header_data, 6) - file_info["compression"] = self.RAW - file_info["palette_padding"] = 3 - - # --------------------------------------------- Windows Bitmap v3 to v5 - # 40: BITMAPINFOHEADER - # 52: BITMAPV2HEADER - # 56: BITMAPV3HEADER - # 64: BITMAPCOREHEADER2/OS22XBITMAPHEADER - # 108: BITMAPV4HEADER - # 124: BITMAPV5HEADER - elif file_info["header_size"] in (40, 52, 56, 64, 108, 124): - file_info["y_flip"] = header_data[7] == 0xFF - file_info["direction"] = 1 if file_info["y_flip"] else -1 - file_info["width"] = i32(header_data, 0) - file_info["height"] = ( - i32(header_data, 4) - if not file_info["y_flip"] - else 2**32 - i32(header_data, 4) - ) - file_info["planes"] = i16(header_data, 8) - file_info["bits"] = i16(header_data, 10) - file_info["compression"] = i32(header_data, 12) - # byte size of pixel data - file_info["data_size"] = i32(header_data, 16) - file_info["pixels_per_meter"] = ( - i32(header_data, 20), - i32(header_data, 24), - ) - file_info["colors"] = i32(header_data, 28) - file_info["palette_padding"] = 4 - self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"]) - if file_info["compression"] == self.BITFIELDS: - masks = ["r_mask", "g_mask", "b_mask"] - if len(header_data) >= 48: - if len(header_data) >= 52: - masks.append("a_mask") - else: - file_info["a_mask"] = 0x0 - for idx, mask in enumerate(masks): - file_info[mask] = i32(header_data, 36 + idx * 4) - else: - # 40 byte headers only have the three components in the - # bitfields masks, ref: - # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx - # See also - # https://github.com/python-pillow/Pillow/issues/1293 - # There is a 4th component in the RGBQuad, in the alpha - # location, but it is listed as a reserved component, - # and it is not generally an alpha channel - file_info["a_mask"] = 0x0 - for mask in masks: - file_info[mask] = i32(read(4)) - file_info["rgb_mask"] = ( - file_info["r_mask"], - file_info["g_mask"], - file_info["b_mask"], - ) - file_info["rgba_mask"] = ( - file_info["r_mask"], - file_info["g_mask"], - file_info["b_mask"], - file_info["a_mask"], - ) - else: - msg = f"Unsupported BMP header type ({file_info['header_size']})" - raise OSError(msg) - - # ------------------ Special case : header is reported 40, which - # ---------------------- is shorter than real size for bpp >= 16 - self._size = file_info["width"], file_info["height"] - - # ------- If color count was not found in the header, compute from bits - file_info["colors"] = ( - file_info["colors"] - if file_info.get("colors", 0) - else (1 << file_info["bits"]) - ) - if offset == 14 + file_info["header_size"] and file_info["bits"] <= 8: - offset += 4 * file_info["colors"] - - # ---------------------- Check bit depth for unusual unsupported values - self._mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None)) - if self.mode is None: - msg = f"Unsupported BMP pixel depth ({file_info['bits']})" - raise OSError(msg) - - # ---------------- Process BMP with Bitfields compression (not palette) - decoder_name = "raw" - if file_info["compression"] == self.BITFIELDS: - SUPPORTED = { - 32: [ - (0xFF0000, 0xFF00, 0xFF, 0x0), - (0xFF000000, 0xFF0000, 0xFF00, 0x0), - (0xFF000000, 0xFF00, 0xFF, 0x0), - (0xFF000000, 0xFF0000, 0xFF00, 0xFF), - (0xFF, 0xFF00, 0xFF0000, 0xFF000000), - (0xFF0000, 0xFF00, 0xFF, 0xFF000000), - (0xFF000000, 0xFF00, 0xFF, 0xFF0000), - (0x0, 0x0, 0x0, 0x0), - ], - 24: [(0xFF0000, 0xFF00, 0xFF)], - 16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)], - } - MASK_MODES = { - (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX", - (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR", - (32, (0xFF000000, 0xFF00, 0xFF, 0x0)): "BGXR", - (32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)): "ABGR", - (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA", - (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA", - (32, (0xFF000000, 0xFF00, 0xFF, 0xFF0000)): "BGAR", - (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", - (24, (0xFF0000, 0xFF00, 0xFF)): "BGR", - (16, (0xF800, 0x7E0, 0x1F)): "BGR;16", - (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15", - } - if file_info["bits"] in SUPPORTED: - if ( - file_info["bits"] == 32 - and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]] - ): - raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])] - self._mode = "RGBA" if "A" in raw_mode else self.mode - elif ( - file_info["bits"] in (24, 16) - and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]] - ): - raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])] - else: - msg = "Unsupported BMP bitfields layout" - raise OSError(msg) - else: - msg = "Unsupported BMP bitfields layout" - raise OSError(msg) - elif file_info["compression"] == self.RAW: - if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset - raw_mode, self._mode = "BGRA", "RGBA" - elif file_info["compression"] in (self.RLE8, self.RLE4): - decoder_name = "bmp_rle" - else: - msg = f"Unsupported BMP compression ({file_info['compression']})" - raise OSError(msg) - - # --------------- Once the header is processed, process the palette/LUT - if self.mode == "P": # Paletted for 1, 4 and 8 bit images - # ---------------------------------------------------- 1-bit images - if not (0 < file_info["colors"] <= 65536): - msg = f"Unsupported BMP Palette size ({file_info['colors']})" - raise OSError(msg) - else: - padding = file_info["palette_padding"] - palette = read(padding * file_info["colors"]) - grayscale = True - indices = ( - (0, 255) - if file_info["colors"] == 2 - else list(range(file_info["colors"])) - ) - - # ----------------- Check if grayscale and ignore palette if so - for ind, val in enumerate(indices): - rgb = palette[ind * padding : ind * padding + 3] - if rgb != o8(val) * 3: - grayscale = False - - # ------- If all colors are gray, white or black, ditch palette - if grayscale: - self._mode = "1" if file_info["colors"] == 2 else "L" - raw_mode = self.mode - else: - self._mode = "P" - self.palette = ImagePalette.raw( - "BGRX" if padding == 4 else "BGR", palette - ) - - # ---------------------------- Finally set the tile data for the plugin - self.info["compression"] = file_info["compression"] - args = [raw_mode] - if decoder_name == "bmp_rle": - args.append(file_info["compression"] == self.RLE4) - else: - args.append(((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3)) - args.append(file_info["direction"]) - self.tile = [ - ( - decoder_name, - (0, 0, file_info["width"], file_info["height"]), - offset or self.fp.tell(), - tuple(args), - ) - ] - - def _open(self) -> None: - """Open file, check magic number and read header""" - # read 14 bytes: magic number, filesize, reserved, header final offset - head_data = self.fp.read(14) - # choke if the file does not have the required magic bytes - if not _accept(head_data): - msg = "Not a BMP file" - raise SyntaxError(msg) - # read the start position of the BMP image data (u32) - offset = i32(head_data, 10) - # load bitmap information (offset=raster info) - self._bitmap(offset=offset) - - -class BmpRleDecoder(ImageFile.PyDecoder): - _pulls_fd = True - - def decode(self, buffer: bytes) -> tuple[int, int]: - assert self.fd is not None - rle4 = self.args[1] - data = bytearray() - x = 0 - dest_length = self.state.xsize * self.state.ysize - while len(data) < dest_length: - pixels = self.fd.read(1) - byte = self.fd.read(1) - if not pixels or not byte: - break - num_pixels = pixels[0] - if num_pixels: - # encoded mode - if x + num_pixels > self.state.xsize: - # Too much data for row - num_pixels = max(0, self.state.xsize - x) - if rle4: - first_pixel = o8(byte[0] >> 4) - second_pixel = o8(byte[0] & 0x0F) - for index in range(num_pixels): - if index % 2 == 0: - data += first_pixel - else: - data += second_pixel - else: - data += byte * num_pixels - x += num_pixels - else: - if byte[0] == 0: - # end of line - while len(data) % self.state.xsize != 0: - data += b"\x00" - x = 0 - elif byte[0] == 1: - # end of bitmap - break - elif byte[0] == 2: - # delta - bytes_read = self.fd.read(2) - if len(bytes_read) < 2: - break - right, up = self.fd.read(2) - data += b"\x00" * (right + up * self.state.xsize) - x = len(data) % self.state.xsize - else: - # absolute mode - if rle4: - # 2 pixels per byte - byte_count = byte[0] // 2 - bytes_read = self.fd.read(byte_count) - for byte_read in bytes_read: - data += o8(byte_read >> 4) - data += o8(byte_read & 0x0F) - else: - byte_count = byte[0] - bytes_read = self.fd.read(byte_count) - data += bytes_read - if len(bytes_read) < byte_count: - break - x += byte[0] - - # align to 16-bit word boundary - if self.fd.tell() % 2 != 0: - self.fd.seek(1, os.SEEK_CUR) - rawmode = "L" if self.mode == "L" else "P" - self.set_as_raw(bytes(data), (rawmode, 0, self.args[-1])) - return -1, 0 - - -# ============================================================================= -# Image plugin for the DIB format (BMP alias) -# ============================================================================= -class DibImageFile(BmpImageFile): - format = "DIB" - format_description = "Windows Bitmap" - - def _open(self) -> None: - self._bitmap() - - -# -# -------------------------------------------------------------------- -# Write BMP file - - -SAVE = { - "1": ("1", 1, 2), - "L": ("L", 8, 256), - "P": ("P", 8, 256), - "RGB": ("BGR", 24, 0), - "RGBA": ("BGRA", 32, 0), -} - - -def _dib_save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - _save(im, fp, filename, False) - - -def _save( - im: Image.Image, fp: IO[bytes], filename: str | bytes, bitmap_header: bool = True -) -> None: - try: - rawmode, bits, colors = SAVE[im.mode] - except KeyError as e: - msg = f"cannot write mode {im.mode} as BMP" - raise OSError(msg) from e - - info = im.encoderinfo - - dpi = info.get("dpi", (96, 96)) - - # 1 meter == 39.3701 inches - ppm = tuple(int(x * 39.3701 + 0.5) for x in dpi) - - stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3) - header = 40 # or 64 for OS/2 version 2 - image = stride * im.size[1] - - if im.mode == "1": - palette = b"".join(o8(i) * 4 for i in (0, 255)) - elif im.mode == "L": - palette = b"".join(o8(i) * 4 for i in range(256)) - elif im.mode == "P": - palette = im.im.getpalette("RGB", "BGRX") - colors = len(palette) // 4 - else: - palette = None - - # bitmap header - if bitmap_header: - offset = 14 + header + colors * 4 - file_size = offset + image - if file_size > 2**32 - 1: - msg = "File size is too large for the BMP format" - raise ValueError(msg) - fp.write( - b"BM" # file type (magic) - + o32(file_size) # file size - + o32(0) # reserved - + o32(offset) # image data offset - ) - - # bitmap info header - fp.write( - o32(header) # info header size - + o32(im.size[0]) # width - + o32(im.size[1]) # height - + o16(1) # planes - + o16(bits) # depth - + o32(0) # compression (0=uncompressed) - + o32(image) # size of bitmap - + o32(ppm[0]) # resolution - + o32(ppm[1]) # resolution - + o32(colors) # colors used - + o32(colors) # colors important - ) - - fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) - - if palette: - fp.write(palette) - - ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))]) - - -# -# -------------------------------------------------------------------- -# Registry - - -Image.register_open(BmpImageFile.format, BmpImageFile, _accept) -Image.register_save(BmpImageFile.format, _save) - -Image.register_extension(BmpImageFile.format, ".bmp") - -Image.register_mime(BmpImageFile.format, "image/bmp") - -Image.register_decoder("bmp_rle", BmpRleDecoder) - -Image.register_open(DibImageFile.format, DibImageFile, _dib_accept) -Image.register_save(DibImageFile.format, _dib_save) - -Image.register_extension(DibImageFile.format, ".dib") - -Image.register_mime(DibImageFile.format, "image/bmp") diff --git a/venv/Lib/site-packages/PIL/BufrStubImagePlugin.py b/venv/Lib/site-packages/PIL/BufrStubImagePlugin.py deleted file mode 100644 index 0ee2f65..0000000 --- a/venv/Lib/site-packages/PIL/BufrStubImagePlugin.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# BUFR stub adapter -# -# Copyright (c) 1996-2003 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from typing import IO - -from . import Image, ImageFile - -_handler = None - - -def register_handler(handler: ImageFile.StubHandler | None) -> None: - """ - Install application-specific BUFR image handler. - - :param handler: Handler object. - """ - global _handler - _handler = handler - - -# -------------------------------------------------------------------- -# Image adapter - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" - - -class BufrStubImageFile(ImageFile.StubImageFile): - format = "BUFR" - format_description = "BUFR" - - def _open(self) -> None: - offset = self.fp.tell() - - if not _accept(self.fp.read(4)): - msg = "Not a BUFR file" - raise SyntaxError(msg) - - self.fp.seek(offset) - - # make something up - self._mode = "F" - self._size = 1, 1 - - loader = self._load() - if loader: - loader.open(self) - - def _load(self) -> ImageFile.StubHandler | None: - return _handler - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if _handler is None or not hasattr(_handler, "save"): - msg = "BUFR save handler not installed" - raise OSError(msg) - _handler.save(im, fp, filename) - - -# -------------------------------------------------------------------- -# Registry - -Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) -Image.register_save(BufrStubImageFile.format, _save) - -Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/venv/Lib/site-packages/PIL/ContainerIO.py b/venv/Lib/site-packages/PIL/ContainerIO.py deleted file mode 100644 index 0035296..0000000 --- a/venv/Lib/site-packages/PIL/ContainerIO.py +++ /dev/null @@ -1,121 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# a class to read from a container file -# -# History: -# 1995-06-18 fl Created -# 1995-09-07 fl Added readline(), readlines() -# -# Copyright (c) 1997-2001 by Secret Labs AB -# Copyright (c) 1995 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -from typing import IO, AnyStr, Generic, Literal - - -class ContainerIO(Generic[AnyStr]): - """ - A file object that provides read access to a part of an existing - file (for example a TAR file). - """ - - def __init__(self, file: IO[AnyStr], offset: int, length: int) -> None: - """ - Create file object. - - :param file: Existing file. - :param offset: Start of region, in bytes. - :param length: Size of region, in bytes. - """ - self.fh: IO[AnyStr] = file - self.pos = 0 - self.offset = offset - self.length = length - self.fh.seek(offset) - - ## - # Always false. - - def isatty(self) -> bool: - return False - - def seek(self, offset: int, mode: Literal[0, 1, 2] = io.SEEK_SET) -> None: - """ - Move file pointer. - - :param offset: Offset in bytes. - :param mode: Starting position. Use 0 for beginning of region, 1 - for current offset, and 2 for end of region. You cannot move - the pointer outside the defined region. - """ - if mode == 1: - self.pos = self.pos + offset - elif mode == 2: - self.pos = self.length + offset - else: - self.pos = offset - # clamp - self.pos = max(0, min(self.pos, self.length)) - self.fh.seek(self.offset + self.pos) - - def tell(self) -> int: - """ - Get current file pointer. - - :returns: Offset from start of region, in bytes. - """ - return self.pos - - def read(self, n: int = 0) -> AnyStr: - """ - Read data. - - :param n: Number of bytes to read. If omitted or zero, - read until end of region. - :returns: An 8-bit string. - """ - if n: - n = min(n, self.length - self.pos) - else: - n = self.length - self.pos - if not n: # EOF - return b"" if "b" in self.fh.mode else "" # type: ignore[return-value] - self.pos = self.pos + n - return self.fh.read(n) - - def readline(self) -> AnyStr: - """ - Read a line of text. - - :returns: An 8-bit string. - """ - s: AnyStr = b"" if "b" in self.fh.mode else "" # type: ignore[assignment] - newline_character = b"\n" if "b" in self.fh.mode else "\n" - while True: - c = self.read(1) - if not c: - break - s = s + c - if c == newline_character: - break - return s - - def readlines(self) -> list[AnyStr]: - """ - Read multiple lines of text. - - :returns: A list of 8-bit strings. - """ - lines = [] - while True: - s = self.readline() - if not s: - break - lines.append(s) - return lines diff --git a/venv/Lib/site-packages/PIL/CurImagePlugin.py b/venv/Lib/site-packages/PIL/CurImagePlugin.py deleted file mode 100644 index 85e2145..0000000 --- a/venv/Lib/site-packages/PIL/CurImagePlugin.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# Windows Cursor support for PIL -# -# notes: -# uses BmpImagePlugin.py to read the bitmap data. -# -# history: -# 96-05-27 fl Created -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1996. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import BmpImagePlugin, Image -from ._binary import i16le as i16 -from ._binary import i32le as i32 - -# -# -------------------------------------------------------------------- - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"\0\0\2\0" - - -## -# Image plugin for Windows Cursor files. - - -class CurImageFile(BmpImagePlugin.BmpImageFile): - format = "CUR" - format_description = "Windows Cursor" - - def _open(self) -> None: - offset = self.fp.tell() - - # check magic - s = self.fp.read(6) - if not _accept(s): - msg = "not a CUR file" - raise SyntaxError(msg) - - # pick the largest cursor in the file - m = b"" - for i in range(i16(s, 4)): - s = self.fp.read(16) - if not m: - m = s - elif s[0] > m[0] and s[1] > m[1]: - m = s - if not m: - msg = "No cursors were found" - raise TypeError(msg) - - # load as bitmap - self._bitmap(i32(m, 12) + offset) - - # patch up the bitmap height - self._size = self.size[0], self.size[1] // 2 - d, e, o, a = self.tile[0] - self.tile[0] = d, (0, 0) + self.size, o, a - - -# -# -------------------------------------------------------------------- - -Image.register_open(CurImageFile.format, CurImageFile, _accept) - -Image.register_extension(CurImageFile.format, ".cur") diff --git a/venv/Lib/site-packages/PIL/DcxImagePlugin.py b/venv/Lib/site-packages/PIL/DcxImagePlugin.py deleted file mode 100644 index f67f27d..0000000 --- a/venv/Lib/site-packages/PIL/DcxImagePlugin.py +++ /dev/null @@ -1,80 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# DCX file handling -# -# DCX is a container file format defined by Intel, commonly used -# for fax applications. Each DCX file consists of a directory -# (a list of file offsets) followed by a set of (usually 1-bit) -# PCX files. -# -# History: -# 1995-09-09 fl Created -# 1996-03-20 fl Properly derived from PcxImageFile. -# 1998-07-15 fl Renamed offset attribute to avoid name clash -# 2002-07-30 fl Fixed file handling -# -# Copyright (c) 1997-98 by Secret Labs AB. -# Copyright (c) 1995-96 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image -from ._binary import i32le as i32 -from .PcxImagePlugin import PcxImageFile - -MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? - - -def _accept(prefix: bytes) -> bool: - return len(prefix) >= 4 and i32(prefix) == MAGIC - - -## -# Image plugin for the Intel DCX format. - - -class DcxImageFile(PcxImageFile): - format = "DCX" - format_description = "Intel DCX" - _close_exclusive_fp_after_loading = False - - def _open(self) -> None: - # Header - s = self.fp.read(4) - if not _accept(s): - msg = "not a DCX file" - raise SyntaxError(msg) - - # Component directory - self._offset = [] - for i in range(1024): - offset = i32(self.fp.read(4)) - if not offset: - break - self._offset.append(offset) - - self._fp = self.fp - self.frame = -1 - self.n_frames = len(self._offset) - self.is_animated = self.n_frames > 1 - self.seek(0) - - def seek(self, frame: int) -> None: - if not self._seek_check(frame): - return - self.frame = frame - self.fp = self._fp - self.fp.seek(self._offset[frame]) - PcxImageFile._open(self) - - def tell(self) -> int: - return self.frame - - -Image.register_open(DcxImageFile.format, DcxImageFile, _accept) - -Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/venv/Lib/site-packages/PIL/DdsImagePlugin.py b/venv/Lib/site-packages/PIL/DdsImagePlugin.py deleted file mode 100644 index a57e4ae..0000000 --- a/venv/Lib/site-packages/PIL/DdsImagePlugin.py +++ /dev/null @@ -1,575 +0,0 @@ -""" -A Pillow loader for .dds files (S3TC-compressed aka DXTC) -Jerome Leclanche - -Documentation: -https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt - -The contents of this file are hereby released in the public domain (CC0) -Full text of the CC0 license: -https://creativecommons.org/publicdomain/zero/1.0/ -""" - -from __future__ import annotations - -import io -import struct -import sys -from enum import IntEnum, IntFlag -from typing import IO - -from . import Image, ImageFile, ImagePalette -from ._binary import i32le as i32 -from ._binary import o8 -from ._binary import o32le as o32 - -# Magic ("DDS ") -DDS_MAGIC = 0x20534444 - - -# DDS flags -class DDSD(IntFlag): - CAPS = 0x1 - HEIGHT = 0x2 - WIDTH = 0x4 - PITCH = 0x8 - PIXELFORMAT = 0x1000 - MIPMAPCOUNT = 0x20000 - LINEARSIZE = 0x80000 - DEPTH = 0x800000 - - -# DDS caps -class DDSCAPS(IntFlag): - COMPLEX = 0x8 - TEXTURE = 0x1000 - MIPMAP = 0x400000 - - -class DDSCAPS2(IntFlag): - CUBEMAP = 0x200 - CUBEMAP_POSITIVEX = 0x400 - CUBEMAP_NEGATIVEX = 0x800 - CUBEMAP_POSITIVEY = 0x1000 - CUBEMAP_NEGATIVEY = 0x2000 - CUBEMAP_POSITIVEZ = 0x4000 - CUBEMAP_NEGATIVEZ = 0x8000 - VOLUME = 0x200000 - - -# Pixel Format -class DDPF(IntFlag): - ALPHAPIXELS = 0x1 - ALPHA = 0x2 - FOURCC = 0x4 - PALETTEINDEXED8 = 0x20 - RGB = 0x40 - LUMINANCE = 0x20000 - - -# dxgiformat.h -class DXGI_FORMAT(IntEnum): - UNKNOWN = 0 - R32G32B32A32_TYPELESS = 1 - R32G32B32A32_FLOAT = 2 - R32G32B32A32_UINT = 3 - R32G32B32A32_SINT = 4 - R32G32B32_TYPELESS = 5 - R32G32B32_FLOAT = 6 - R32G32B32_UINT = 7 - R32G32B32_SINT = 8 - R16G16B16A16_TYPELESS = 9 - R16G16B16A16_FLOAT = 10 - R16G16B16A16_UNORM = 11 - R16G16B16A16_UINT = 12 - R16G16B16A16_SNORM = 13 - R16G16B16A16_SINT = 14 - R32G32_TYPELESS = 15 - R32G32_FLOAT = 16 - R32G32_UINT = 17 - R32G32_SINT = 18 - R32G8X24_TYPELESS = 19 - D32_FLOAT_S8X24_UINT = 20 - R32_FLOAT_X8X24_TYPELESS = 21 - X32_TYPELESS_G8X24_UINT = 22 - R10G10B10A2_TYPELESS = 23 - R10G10B10A2_UNORM = 24 - R10G10B10A2_UINT = 25 - R11G11B10_FLOAT = 26 - R8G8B8A8_TYPELESS = 27 - R8G8B8A8_UNORM = 28 - R8G8B8A8_UNORM_SRGB = 29 - R8G8B8A8_UINT = 30 - R8G8B8A8_SNORM = 31 - R8G8B8A8_SINT = 32 - R16G16_TYPELESS = 33 - R16G16_FLOAT = 34 - R16G16_UNORM = 35 - R16G16_UINT = 36 - R16G16_SNORM = 37 - R16G16_SINT = 38 - R32_TYPELESS = 39 - D32_FLOAT = 40 - R32_FLOAT = 41 - R32_UINT = 42 - R32_SINT = 43 - R24G8_TYPELESS = 44 - D24_UNORM_S8_UINT = 45 - R24_UNORM_X8_TYPELESS = 46 - X24_TYPELESS_G8_UINT = 47 - R8G8_TYPELESS = 48 - R8G8_UNORM = 49 - R8G8_UINT = 50 - R8G8_SNORM = 51 - R8G8_SINT = 52 - R16_TYPELESS = 53 - R16_FLOAT = 54 - D16_UNORM = 55 - R16_UNORM = 56 - R16_UINT = 57 - R16_SNORM = 58 - R16_SINT = 59 - R8_TYPELESS = 60 - R8_UNORM = 61 - R8_UINT = 62 - R8_SNORM = 63 - R8_SINT = 64 - A8_UNORM = 65 - R1_UNORM = 66 - R9G9B9E5_SHAREDEXP = 67 - R8G8_B8G8_UNORM = 68 - G8R8_G8B8_UNORM = 69 - BC1_TYPELESS = 70 - BC1_UNORM = 71 - BC1_UNORM_SRGB = 72 - BC2_TYPELESS = 73 - BC2_UNORM = 74 - BC2_UNORM_SRGB = 75 - BC3_TYPELESS = 76 - BC3_UNORM = 77 - BC3_UNORM_SRGB = 78 - BC4_TYPELESS = 79 - BC4_UNORM = 80 - BC4_SNORM = 81 - BC5_TYPELESS = 82 - BC5_UNORM = 83 - BC5_SNORM = 84 - B5G6R5_UNORM = 85 - B5G5R5A1_UNORM = 86 - B8G8R8A8_UNORM = 87 - B8G8R8X8_UNORM = 88 - R10G10B10_XR_BIAS_A2_UNORM = 89 - B8G8R8A8_TYPELESS = 90 - B8G8R8A8_UNORM_SRGB = 91 - B8G8R8X8_TYPELESS = 92 - B8G8R8X8_UNORM_SRGB = 93 - BC6H_TYPELESS = 94 - BC6H_UF16 = 95 - BC6H_SF16 = 96 - BC7_TYPELESS = 97 - BC7_UNORM = 98 - BC7_UNORM_SRGB = 99 - AYUV = 100 - Y410 = 101 - Y416 = 102 - NV12 = 103 - P010 = 104 - P016 = 105 - OPAQUE_420 = 106 - YUY2 = 107 - Y210 = 108 - Y216 = 109 - NV11 = 110 - AI44 = 111 - IA44 = 112 - P8 = 113 - A8P8 = 114 - B4G4R4A4_UNORM = 115 - P208 = 130 - V208 = 131 - V408 = 132 - SAMPLER_FEEDBACK_MIN_MIP_OPAQUE = 189 - SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE = 190 - - -class D3DFMT(IntEnum): - UNKNOWN = 0 - R8G8B8 = 20 - A8R8G8B8 = 21 - X8R8G8B8 = 22 - R5G6B5 = 23 - X1R5G5B5 = 24 - A1R5G5B5 = 25 - A4R4G4B4 = 26 - R3G3B2 = 27 - A8 = 28 - A8R3G3B2 = 29 - X4R4G4B4 = 30 - A2B10G10R10 = 31 - A8B8G8R8 = 32 - X8B8G8R8 = 33 - G16R16 = 34 - A2R10G10B10 = 35 - A16B16G16R16 = 36 - A8P8 = 40 - P8 = 41 - L8 = 50 - A8L8 = 51 - A4L4 = 52 - V8U8 = 60 - L6V5U5 = 61 - X8L8V8U8 = 62 - Q8W8V8U8 = 63 - V16U16 = 64 - A2W10V10U10 = 67 - D16_LOCKABLE = 70 - D32 = 71 - D15S1 = 73 - D24S8 = 75 - D24X8 = 77 - D24X4S4 = 79 - D16 = 80 - D32F_LOCKABLE = 82 - D24FS8 = 83 - D32_LOCKABLE = 84 - S8_LOCKABLE = 85 - L16 = 81 - VERTEXDATA = 100 - INDEX16 = 101 - INDEX32 = 102 - Q16W16V16U16 = 110 - R16F = 111 - G16R16F = 112 - A16B16G16R16F = 113 - R32F = 114 - G32R32F = 115 - A32B32G32R32F = 116 - CxV8U8 = 117 - A1 = 118 - A2B10G10R10_XR_BIAS = 119 - BINARYBUFFER = 199 - - UYVY = i32(b"UYVY") - R8G8_B8G8 = i32(b"RGBG") - YUY2 = i32(b"YUY2") - G8R8_G8B8 = i32(b"GRGB") - DXT1 = i32(b"DXT1") - DXT2 = i32(b"DXT2") - DXT3 = i32(b"DXT3") - DXT4 = i32(b"DXT4") - DXT5 = i32(b"DXT5") - DX10 = i32(b"DX10") - BC4S = i32(b"BC4S") - BC4U = i32(b"BC4U") - BC5S = i32(b"BC5S") - BC5U = i32(b"BC5U") - ATI1 = i32(b"ATI1") - ATI2 = i32(b"ATI2") - MULTI2_ARGB8 = i32(b"MET1") - - -# Backward compatibility layer -module = sys.modules[__name__] -for item in DDSD: - assert item.name is not None - setattr(module, f"DDSD_{item.name}", item.value) -for item1 in DDSCAPS: - assert item1.name is not None - setattr(module, f"DDSCAPS_{item1.name}", item1.value) -for item2 in DDSCAPS2: - assert item2.name is not None - setattr(module, f"DDSCAPS2_{item2.name}", item2.value) -for item3 in DDPF: - assert item3.name is not None - setattr(module, f"DDPF_{item3.name}", item3.value) - -DDS_FOURCC = DDPF.FOURCC -DDS_RGB = DDPF.RGB -DDS_RGBA = DDPF.RGB | DDPF.ALPHAPIXELS -DDS_LUMINANCE = DDPF.LUMINANCE -DDS_LUMINANCEA = DDPF.LUMINANCE | DDPF.ALPHAPIXELS -DDS_ALPHA = DDPF.ALPHA -DDS_PAL8 = DDPF.PALETTEINDEXED8 - -DDS_HEADER_FLAGS_TEXTURE = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PIXELFORMAT -DDS_HEADER_FLAGS_MIPMAP = DDSD.MIPMAPCOUNT -DDS_HEADER_FLAGS_VOLUME = DDSD.DEPTH -DDS_HEADER_FLAGS_PITCH = DDSD.PITCH -DDS_HEADER_FLAGS_LINEARSIZE = DDSD.LINEARSIZE - -DDS_HEIGHT = DDSD.HEIGHT -DDS_WIDTH = DDSD.WIDTH - -DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS.TEXTURE -DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS.COMPLEX | DDSCAPS.MIPMAP -DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS.COMPLEX - -DDS_CUBEMAP_POSITIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEX -DDS_CUBEMAP_NEGATIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEX -DDS_CUBEMAP_POSITIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEY -DDS_CUBEMAP_NEGATIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEY -DDS_CUBEMAP_POSITIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEZ -DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEZ - -DXT1_FOURCC = D3DFMT.DXT1 -DXT3_FOURCC = D3DFMT.DXT3 -DXT5_FOURCC = D3DFMT.DXT5 - -DXGI_FORMAT_R8G8B8A8_TYPELESS = DXGI_FORMAT.R8G8B8A8_TYPELESS -DXGI_FORMAT_R8G8B8A8_UNORM = DXGI_FORMAT.R8G8B8A8_UNORM -DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = DXGI_FORMAT.R8G8B8A8_UNORM_SRGB -DXGI_FORMAT_BC5_TYPELESS = DXGI_FORMAT.BC5_TYPELESS -DXGI_FORMAT_BC5_UNORM = DXGI_FORMAT.BC5_UNORM -DXGI_FORMAT_BC5_SNORM = DXGI_FORMAT.BC5_SNORM -DXGI_FORMAT_BC6H_UF16 = DXGI_FORMAT.BC6H_UF16 -DXGI_FORMAT_BC6H_SF16 = DXGI_FORMAT.BC6H_SF16 -DXGI_FORMAT_BC7_TYPELESS = DXGI_FORMAT.BC7_TYPELESS -DXGI_FORMAT_BC7_UNORM = DXGI_FORMAT.BC7_UNORM -DXGI_FORMAT_BC7_UNORM_SRGB = DXGI_FORMAT.BC7_UNORM_SRGB - - -class DdsImageFile(ImageFile.ImageFile): - format = "DDS" - format_description = "DirectDraw Surface" - - def _open(self) -> None: - if not _accept(self.fp.read(4)): - msg = "not a DDS file" - raise SyntaxError(msg) - (header_size,) = struct.unpack(" None: - pass - - -class DdsRgbDecoder(ImageFile.PyDecoder): - _pulls_fd = True - - def decode(self, buffer: bytes) -> tuple[int, int]: - assert self.fd is not None - bitcount, masks = self.args - - # Some masks will be padded with zeros, e.g. R 0b11 G 0b1100 - # Calculate how many zeros each mask is padded with - mask_offsets = [] - # And the maximum value of each channel without the padding - mask_totals = [] - for mask in masks: - offset = 0 - if mask != 0: - while mask >> (offset + 1) << (offset + 1) == mask: - offset += 1 - mask_offsets.append(offset) - mask_totals.append(mask >> offset) - - data = bytearray() - bytecount = bitcount // 8 - dest_length = self.state.xsize * self.state.ysize * len(masks) - while len(data) < dest_length: - value = int.from_bytes(self.fd.read(bytecount), "little") - for i, mask in enumerate(masks): - masked_value = value & mask - # Remove the zero padding, and scale it to 8 bits - data += o8( - int(((masked_value >> mask_offsets[i]) / mask_totals[i]) * 255) - ) - self.set_as_raw(data) - return -1, 0 - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.mode not in ("RGB", "RGBA", "L", "LA"): - msg = f"cannot write mode {im.mode} as DDS" - raise OSError(msg) - - alpha = im.mode[-1] == "A" - if im.mode[0] == "L": - pixel_flags = DDPF.LUMINANCE - rawmode = im.mode - if alpha: - rgba_mask = [0x000000FF, 0x000000FF, 0x000000FF] - else: - rgba_mask = [0xFF000000, 0xFF000000, 0xFF000000] - else: - pixel_flags = DDPF.RGB - rawmode = im.mode[::-1] - rgba_mask = [0x00FF0000, 0x0000FF00, 0x000000FF] - - if alpha: - r, g, b, a = im.split() - im = Image.merge("RGBA", (a, r, g, b)) - if alpha: - pixel_flags |= DDPF.ALPHAPIXELS - rgba_mask.append(0xFF000000 if alpha else 0) - - flags = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PITCH | DDSD.PIXELFORMAT - bitcount = len(im.getbands()) * 8 - pitch = (im.width * bitcount + 7) // 8 - - fp.write( - o32(DDS_MAGIC) - + struct.pack( - "<7I", - 124, # header size - flags, # flags - im.height, - im.width, - pitch, - 0, # depth - 0, # mipmaps - ) - + struct.pack("11I", *((0,) * 11)) # reserved - # pfsize, pfflags, fourcc, bitcount - + struct.pack("<4I", 32, pixel_flags, 0, bitcount) - + struct.pack("<4I", *rgba_mask) # dwRGBABitMask - + struct.pack("<5I", DDSCAPS.TEXTURE, 0, 0, 0, 0) - ) - ImageFile._save( - im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))] - ) - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"DDS " - - -Image.register_open(DdsImageFile.format, DdsImageFile, _accept) -Image.register_decoder("dds_rgb", DdsRgbDecoder) -Image.register_save(DdsImageFile.format, _save) -Image.register_extension(DdsImageFile.format, ".dds") diff --git a/venv/Lib/site-packages/PIL/EpsImagePlugin.py b/venv/Lib/site-packages/PIL/EpsImagePlugin.py deleted file mode 100644 index f31b1c1..0000000 --- a/venv/Lib/site-packages/PIL/EpsImagePlugin.py +++ /dev/null @@ -1,478 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# EPS file handling -# -# History: -# 1995-09-01 fl Created (0.1) -# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2) -# 1996-08-22 fl Don't choke on floating point BoundingBox values -# 1996-08-23 fl Handle files from Macintosh (0.3) -# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) -# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5) -# 2014-05-07 e Handling of EPS with binary preview and fixed resolution -# resizing -# -# Copyright (c) 1997-2003 by Secret Labs AB. -# Copyright (c) 1995-2003 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -import os -import re -import subprocess -import sys -import tempfile -from typing import IO - -from . import Image, ImageFile -from ._binary import i32le as i32 -from ._deprecate import deprecate - -# -------------------------------------------------------------------- - - -split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$") -field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$") - -gs_binary: str | bool | None = None -gs_windows_binary = None - - -def has_ghostscript() -> bool: - global gs_binary, gs_windows_binary - if gs_binary is None: - if sys.platform.startswith("win"): - if gs_windows_binary is None: - import shutil - - for binary in ("gswin32c", "gswin64c", "gs"): - if shutil.which(binary) is not None: - gs_windows_binary = binary - break - else: - gs_windows_binary = False - gs_binary = gs_windows_binary - else: - try: - subprocess.check_call(["gs", "--version"], stdout=subprocess.DEVNULL) - gs_binary = "gs" - except OSError: - gs_binary = False - return gs_binary is not False - - -def Ghostscript(tile, size, fp, scale=1, transparency=False): - """Render an image using Ghostscript""" - global gs_binary - if not has_ghostscript(): - msg = "Unable to locate Ghostscript on paths" - raise OSError(msg) - - # Unpack decoder tile - decoder, tile, offset, data = tile[0] - length, bbox = data - - # Hack to support hi-res rendering - scale = int(scale) or 1 - width = size[0] * scale - height = size[1] * scale - # resolution is dependent on bbox and size - res_x = 72.0 * width / (bbox[2] - bbox[0]) - res_y = 72.0 * height / (bbox[3] - bbox[1]) - - out_fd, outfile = tempfile.mkstemp() - os.close(out_fd) - - infile_temp = None - if hasattr(fp, "name") and os.path.exists(fp.name): - infile = fp.name - else: - in_fd, infile_temp = tempfile.mkstemp() - os.close(in_fd) - infile = infile_temp - - # Ignore length and offset! - # Ghostscript can read it - # Copy whole file to read in Ghostscript - with open(infile_temp, "wb") as f: - # fetch length of fp - fp.seek(0, io.SEEK_END) - fsize = fp.tell() - # ensure start position - # go back - fp.seek(0) - lengthfile = fsize - while lengthfile > 0: - s = fp.read(min(lengthfile, 100 * 1024)) - if not s: - break - lengthfile -= len(s) - f.write(s) - - device = "pngalpha" if transparency else "ppmraw" - - # Build Ghostscript command - command = [ - gs_binary, - "-q", # quiet mode - f"-g{width:d}x{height:d}", # set output geometry (pixels) - f"-r{res_x:f}x{res_y:f}", # set input DPI (dots per inch) - "-dBATCH", # exit after processing - "-dNOPAUSE", # don't pause between pages - "-dSAFER", # safe mode - f"-sDEVICE={device}", - f"-sOutputFile={outfile}", # output file - # adjust for image origin - "-c", - f"{-bbox[0]} {-bbox[1]} translate", - "-f", - infile, # input file - # showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272) - "-c", - "showpage", - ] - - # push data through Ghostscript - try: - startupinfo = None - if sys.platform.startswith("win"): - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - subprocess.check_call(command, startupinfo=startupinfo) - out_im = Image.open(outfile) - out_im.load() - finally: - try: - os.unlink(outfile) - if infile_temp: - os.unlink(infile_temp) - except OSError: - pass - - im = out_im.im.copy() - out_im.close() - return im - - -class PSFile: - """ - Wrapper for bytesio object that treats either CR or LF as end of line. - This class is no longer used internally, but kept for backwards compatibility. - """ - - def __init__(self, fp): - deprecate( - "PSFile", - 11, - action="If you need the functionality of this class " - "you will need to implement it yourself.", - ) - self.fp = fp - self.char = None - - def seek(self, offset, whence=io.SEEK_SET): - self.char = None - self.fp.seek(offset, whence) - - def readline(self) -> str: - s = [self.char or b""] - self.char = None - - c = self.fp.read(1) - while (c not in b"\r\n") and len(c): - s.append(c) - c = self.fp.read(1) - - self.char = self.fp.read(1) - # line endings can be 1 or 2 of \r \n, in either order - if self.char in b"\r\n": - self.char = None - - return b"".join(s).decode("latin-1") - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) - - -## -# Image plugin for Encapsulated PostScript. This plugin supports only -# a few variants of this format. - - -class EpsImageFile(ImageFile.ImageFile): - """EPS File Parser for the Python Imaging Library""" - - format = "EPS" - format_description = "Encapsulated Postscript" - - mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"} - - def _open(self) -> None: - (length, offset) = self._find_offset(self.fp) - - # go to offset - start of "%!PS" - self.fp.seek(offset) - - self._mode = "RGB" - self._size = None - - byte_arr = bytearray(255) - bytes_mv = memoryview(byte_arr) - bytes_read = 0 - reading_header_comments = True - reading_trailer_comments = False - trailer_reached = False - - def check_required_header_comments() -> None: - """ - The EPS specification requires that some headers exist. - This should be checked when the header comments formally end, - when image data starts, or when the file ends, whichever comes first. - """ - if "PS-Adobe" not in self.info: - msg = 'EPS header missing "%!PS-Adobe" comment' - raise SyntaxError(msg) - if "BoundingBox" not in self.info: - msg = 'EPS header missing "%%BoundingBox" comment' - raise SyntaxError(msg) - - def _read_comment(s: str) -> bool: - nonlocal reading_trailer_comments - try: - m = split.match(s) - except re.error as e: - msg = "not an EPS file" - raise SyntaxError(msg) from e - - if not m: - return False - - k, v = m.group(1, 2) - self.info[k] = v - if k == "BoundingBox": - if v == "(atend)": - reading_trailer_comments = True - elif not self._size or (trailer_reached and reading_trailer_comments): - try: - # Note: The DSC spec says that BoundingBox - # fields should be integers, but some drivers - # put floating point values there anyway. - box = [int(float(i)) for i in v.split()] - self._size = box[2] - box[0], box[3] - box[1] - self.tile = [("eps", (0, 0) + self.size, offset, (length, box))] - except Exception: - pass - return True - - while True: - byte = self.fp.read(1) - if byte == b"": - # if we didn't read a byte we must be at the end of the file - if bytes_read == 0: - if reading_header_comments: - check_required_header_comments() - break - elif byte in b"\r\n": - # if we read a line ending character, ignore it and parse what - # we have already read. if we haven't read any other characters, - # continue reading - if bytes_read == 0: - continue - else: - # ASCII/hexadecimal lines in an EPS file must not exceed - # 255 characters, not including line ending characters - if bytes_read >= 255: - # only enforce this for lines starting with a "%", - # otherwise assume it's binary data - if byte_arr[0] == ord("%"): - msg = "not an EPS file" - raise SyntaxError(msg) - else: - if reading_header_comments: - check_required_header_comments() - reading_header_comments = False - # reset bytes_read so we can keep reading - # data until the end of the line - bytes_read = 0 - byte_arr[bytes_read] = byte[0] - bytes_read += 1 - continue - - if reading_header_comments: - # Load EPS header - - # if this line doesn't start with a "%", - # or does start with "%%EndComments", - # then we've reached the end of the header/comments - if byte_arr[0] != ord("%") or bytes_mv[:13] == b"%%EndComments": - check_required_header_comments() - reading_header_comments = False - continue - - s = str(bytes_mv[:bytes_read], "latin-1") - if not _read_comment(s): - m = field.match(s) - if m: - k = m.group(1) - if k[:8] == "PS-Adobe": - self.info["PS-Adobe"] = k[9:] - else: - self.info[k] = "" - elif s[0] == "%": - # handle non-DSC PostScript comments that some - # tools mistakenly put in the Comments section - pass - else: - msg = "bad EPS header" - raise OSError(msg) - elif bytes_mv[:11] == b"%ImageData:": - # Check for an "ImageData" descriptor - # https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577413_pgfId-1035096 - - # Values: - # columns - # rows - # bit depth (1 or 8) - # mode (1: L, 2: LAB, 3: RGB, 4: CMYK) - # number of padding channels - # block size (number of bytes per row per channel) - # binary/ascii (1: binary, 2: ascii) - # data start identifier (the image data follows after a single line - # consisting only of this quoted value) - image_data_values = byte_arr[11:bytes_read].split(None, 7) - columns, rows, bit_depth, mode_id = ( - int(value) for value in image_data_values[:4] - ) - - if bit_depth == 1: - self._mode = "1" - elif bit_depth == 8: - try: - self._mode = self.mode_map[mode_id] - except ValueError: - break - else: - break - - self._size = columns, rows - return - elif bytes_mv[:5] == b"%%EOF": - break - elif trailer_reached and reading_trailer_comments: - # Load EPS trailer - s = str(bytes_mv[:bytes_read], "latin-1") - _read_comment(s) - elif bytes_mv[:9] == b"%%Trailer": - trailer_reached = True - bytes_read = 0 - - if not self._size: - msg = "cannot determine EPS bounding box" - raise OSError(msg) - - def _find_offset(self, fp): - s = fp.read(4) - - if s == b"%!PS": - # for HEAD without binary preview - fp.seek(0, io.SEEK_END) - length = fp.tell() - offset = 0 - elif i32(s) == 0xC6D3D0C5: - # FIX for: Some EPS file not handled correctly / issue #302 - # EPS can contain binary data - # or start directly with latin coding - # more info see: - # https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf - s = fp.read(8) - offset = i32(s) - length = i32(s, 4) - else: - msg = "not an EPS file" - raise SyntaxError(msg) - - return length, offset - - def load(self, scale=1, transparency=False): - # Load EPS via Ghostscript - if self.tile: - self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency) - self._mode = self.im.mode - self._size = self.im.size - self.tile = [] - return Image.Image.load(self) - - def load_seek(self, pos: int) -> None: - # we can't incrementally load, so force ImageFile.parser to - # use our custom load method by defining this method. - pass - - -# -------------------------------------------------------------------- - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes, eps: int = 1) -> None: - """EPS Writer for the Python Imaging Library.""" - - # make sure image data is available - im.load() - - # determine PostScript image mode - if im.mode == "L": - operator = (8, 1, b"image") - elif im.mode == "RGB": - operator = (8, 3, b"false 3 colorimage") - elif im.mode == "CMYK": - operator = (8, 4, b"false 4 colorimage") - else: - msg = "image mode is not supported" - raise ValueError(msg) - - if eps: - # write EPS header - fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n") - fp.write(b"%%Creator: PIL 0.1 EpsEncode\n") - # fp.write("%%CreationDate: %s"...) - fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size) - fp.write(b"%%Pages: 1\n") - fp.write(b"%%EndComments\n") - fp.write(b"%%Page: 1 1\n") - fp.write(b"%%ImageData: %d %d " % im.size) - fp.write(b'%d %d 0 1 1 "%s"\n' % operator) - - # image header - fp.write(b"gsave\n") - fp.write(b"10 dict begin\n") - fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1])) - fp.write(b"%d %d scale\n" % im.size) - fp.write(b"%d %d 8\n" % im.size) # <= bits - fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) - fp.write(b"{ currentfile buf readhexstring pop } bind\n") - fp.write(operator[2] + b"\n") - if hasattr(fp, "flush"): - fp.flush() - - ImageFile._save(im, fp, [("eps", (0, 0) + im.size, 0, None)]) - - fp.write(b"\n%%%%EndBinary\n") - fp.write(b"grestore end\n") - if hasattr(fp, "flush"): - fp.flush() - - -# -------------------------------------------------------------------- - - -Image.register_open(EpsImageFile.format, EpsImageFile, _accept) - -Image.register_save(EpsImageFile.format, _save) - -Image.register_extensions(EpsImageFile.format, [".ps", ".eps"]) - -Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/venv/Lib/site-packages/PIL/ExifTags.py b/venv/Lib/site-packages/PIL/ExifTags.py deleted file mode 100644 index 39b4aa5..0000000 --- a/venv/Lib/site-packages/PIL/ExifTags.py +++ /dev/null @@ -1,381 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# EXIF tags -# -# Copyright (c) 2003 by Secret Labs AB -# -# See the README file for information on usage and redistribution. -# - -""" -This module provides constants and clear-text names for various -well-known EXIF tags. -""" -from __future__ import annotations - -from enum import IntEnum - - -class Base(IntEnum): - # possibly incomplete - InteropIndex = 0x0001 - ProcessingSoftware = 0x000B - NewSubfileType = 0x00FE - SubfileType = 0x00FF - ImageWidth = 0x0100 - ImageLength = 0x0101 - BitsPerSample = 0x0102 - Compression = 0x0103 - PhotometricInterpretation = 0x0106 - Thresholding = 0x0107 - CellWidth = 0x0108 - CellLength = 0x0109 - FillOrder = 0x010A - DocumentName = 0x010D - ImageDescription = 0x010E - Make = 0x010F - Model = 0x0110 - StripOffsets = 0x0111 - Orientation = 0x0112 - SamplesPerPixel = 0x0115 - RowsPerStrip = 0x0116 - StripByteCounts = 0x0117 - MinSampleValue = 0x0118 - MaxSampleValue = 0x0119 - XResolution = 0x011A - YResolution = 0x011B - PlanarConfiguration = 0x011C - PageName = 0x011D - FreeOffsets = 0x0120 - FreeByteCounts = 0x0121 - GrayResponseUnit = 0x0122 - GrayResponseCurve = 0x0123 - T4Options = 0x0124 - T6Options = 0x0125 - ResolutionUnit = 0x0128 - PageNumber = 0x0129 - TransferFunction = 0x012D - Software = 0x0131 - DateTime = 0x0132 - Artist = 0x013B - HostComputer = 0x013C - Predictor = 0x013D - WhitePoint = 0x013E - PrimaryChromaticities = 0x013F - ColorMap = 0x0140 - HalftoneHints = 0x0141 - TileWidth = 0x0142 - TileLength = 0x0143 - TileOffsets = 0x0144 - TileByteCounts = 0x0145 - SubIFDs = 0x014A - InkSet = 0x014C - InkNames = 0x014D - NumberOfInks = 0x014E - DotRange = 0x0150 - TargetPrinter = 0x0151 - ExtraSamples = 0x0152 - SampleFormat = 0x0153 - SMinSampleValue = 0x0154 - SMaxSampleValue = 0x0155 - TransferRange = 0x0156 - ClipPath = 0x0157 - XClipPathUnits = 0x0158 - YClipPathUnits = 0x0159 - Indexed = 0x015A - JPEGTables = 0x015B - OPIProxy = 0x015F - JPEGProc = 0x0200 - JpegIFOffset = 0x0201 - JpegIFByteCount = 0x0202 - JpegRestartInterval = 0x0203 - JpegLosslessPredictors = 0x0205 - JpegPointTransforms = 0x0206 - JpegQTables = 0x0207 - JpegDCTables = 0x0208 - JpegACTables = 0x0209 - YCbCrCoefficients = 0x0211 - YCbCrSubSampling = 0x0212 - YCbCrPositioning = 0x0213 - ReferenceBlackWhite = 0x0214 - XMLPacket = 0x02BC - RelatedImageFileFormat = 0x1000 - RelatedImageWidth = 0x1001 - RelatedImageLength = 0x1002 - Rating = 0x4746 - RatingPercent = 0x4749 - ImageID = 0x800D - CFARepeatPatternDim = 0x828D - BatteryLevel = 0x828F - Copyright = 0x8298 - ExposureTime = 0x829A - FNumber = 0x829D - IPTCNAA = 0x83BB - ImageResources = 0x8649 - ExifOffset = 0x8769 - InterColorProfile = 0x8773 - ExposureProgram = 0x8822 - SpectralSensitivity = 0x8824 - GPSInfo = 0x8825 - ISOSpeedRatings = 0x8827 - OECF = 0x8828 - Interlace = 0x8829 - TimeZoneOffset = 0x882A - SelfTimerMode = 0x882B - SensitivityType = 0x8830 - StandardOutputSensitivity = 0x8831 - RecommendedExposureIndex = 0x8832 - ISOSpeed = 0x8833 - ISOSpeedLatitudeyyy = 0x8834 - ISOSpeedLatitudezzz = 0x8835 - ExifVersion = 0x9000 - DateTimeOriginal = 0x9003 - DateTimeDigitized = 0x9004 - OffsetTime = 0x9010 - OffsetTimeOriginal = 0x9011 - OffsetTimeDigitized = 0x9012 - ComponentsConfiguration = 0x9101 - CompressedBitsPerPixel = 0x9102 - ShutterSpeedValue = 0x9201 - ApertureValue = 0x9202 - BrightnessValue = 0x9203 - ExposureBiasValue = 0x9204 - MaxApertureValue = 0x9205 - SubjectDistance = 0x9206 - MeteringMode = 0x9207 - LightSource = 0x9208 - Flash = 0x9209 - FocalLength = 0x920A - Noise = 0x920D - ImageNumber = 0x9211 - SecurityClassification = 0x9212 - ImageHistory = 0x9213 - TIFFEPStandardID = 0x9216 - MakerNote = 0x927C - UserComment = 0x9286 - SubsecTime = 0x9290 - SubsecTimeOriginal = 0x9291 - SubsecTimeDigitized = 0x9292 - AmbientTemperature = 0x9400 - Humidity = 0x9401 - Pressure = 0x9402 - WaterDepth = 0x9403 - Acceleration = 0x9404 - CameraElevationAngle = 0x9405 - XPTitle = 0x9C9B - XPComment = 0x9C9C - XPAuthor = 0x9C9D - XPKeywords = 0x9C9E - XPSubject = 0x9C9F - FlashPixVersion = 0xA000 - ColorSpace = 0xA001 - ExifImageWidth = 0xA002 - ExifImageHeight = 0xA003 - RelatedSoundFile = 0xA004 - ExifInteroperabilityOffset = 0xA005 - FlashEnergy = 0xA20B - SpatialFrequencyResponse = 0xA20C - FocalPlaneXResolution = 0xA20E - FocalPlaneYResolution = 0xA20F - FocalPlaneResolutionUnit = 0xA210 - SubjectLocation = 0xA214 - ExposureIndex = 0xA215 - SensingMethod = 0xA217 - FileSource = 0xA300 - SceneType = 0xA301 - CFAPattern = 0xA302 - CustomRendered = 0xA401 - ExposureMode = 0xA402 - WhiteBalance = 0xA403 - DigitalZoomRatio = 0xA404 - FocalLengthIn35mmFilm = 0xA405 - SceneCaptureType = 0xA406 - GainControl = 0xA407 - Contrast = 0xA408 - Saturation = 0xA409 - Sharpness = 0xA40A - DeviceSettingDescription = 0xA40B - SubjectDistanceRange = 0xA40C - ImageUniqueID = 0xA420 - CameraOwnerName = 0xA430 - BodySerialNumber = 0xA431 - LensSpecification = 0xA432 - LensMake = 0xA433 - LensModel = 0xA434 - LensSerialNumber = 0xA435 - CompositeImage = 0xA460 - CompositeImageCount = 0xA461 - CompositeImageExposureTimes = 0xA462 - Gamma = 0xA500 - PrintImageMatching = 0xC4A5 - DNGVersion = 0xC612 - DNGBackwardVersion = 0xC613 - UniqueCameraModel = 0xC614 - LocalizedCameraModel = 0xC615 - CFAPlaneColor = 0xC616 - CFALayout = 0xC617 - LinearizationTable = 0xC618 - BlackLevelRepeatDim = 0xC619 - BlackLevel = 0xC61A - BlackLevelDeltaH = 0xC61B - BlackLevelDeltaV = 0xC61C - WhiteLevel = 0xC61D - DefaultScale = 0xC61E - DefaultCropOrigin = 0xC61F - DefaultCropSize = 0xC620 - ColorMatrix1 = 0xC621 - ColorMatrix2 = 0xC622 - CameraCalibration1 = 0xC623 - CameraCalibration2 = 0xC624 - ReductionMatrix1 = 0xC625 - ReductionMatrix2 = 0xC626 - AnalogBalance = 0xC627 - AsShotNeutral = 0xC628 - AsShotWhiteXY = 0xC629 - BaselineExposure = 0xC62A - BaselineNoise = 0xC62B - BaselineSharpness = 0xC62C - BayerGreenSplit = 0xC62D - LinearResponseLimit = 0xC62E - CameraSerialNumber = 0xC62F - LensInfo = 0xC630 - ChromaBlurRadius = 0xC631 - AntiAliasStrength = 0xC632 - ShadowScale = 0xC633 - DNGPrivateData = 0xC634 - MakerNoteSafety = 0xC635 - CalibrationIlluminant1 = 0xC65A - CalibrationIlluminant2 = 0xC65B - BestQualityScale = 0xC65C - RawDataUniqueID = 0xC65D - OriginalRawFileName = 0xC68B - OriginalRawFileData = 0xC68C - ActiveArea = 0xC68D - MaskedAreas = 0xC68E - AsShotICCProfile = 0xC68F - AsShotPreProfileMatrix = 0xC690 - CurrentICCProfile = 0xC691 - CurrentPreProfileMatrix = 0xC692 - ColorimetricReference = 0xC6BF - CameraCalibrationSignature = 0xC6F3 - ProfileCalibrationSignature = 0xC6F4 - AsShotProfileName = 0xC6F6 - NoiseReductionApplied = 0xC6F7 - ProfileName = 0xC6F8 - ProfileHueSatMapDims = 0xC6F9 - ProfileHueSatMapData1 = 0xC6FA - ProfileHueSatMapData2 = 0xC6FB - ProfileToneCurve = 0xC6FC - ProfileEmbedPolicy = 0xC6FD - ProfileCopyright = 0xC6FE - ForwardMatrix1 = 0xC714 - ForwardMatrix2 = 0xC715 - PreviewApplicationName = 0xC716 - PreviewApplicationVersion = 0xC717 - PreviewSettingsName = 0xC718 - PreviewSettingsDigest = 0xC719 - PreviewColorSpace = 0xC71A - PreviewDateTime = 0xC71B - RawImageDigest = 0xC71C - OriginalRawFileDigest = 0xC71D - SubTileBlockSize = 0xC71E - RowInterleaveFactor = 0xC71F - ProfileLookTableDims = 0xC725 - ProfileLookTableData = 0xC726 - OpcodeList1 = 0xC740 - OpcodeList2 = 0xC741 - OpcodeList3 = 0xC74E - NoiseProfile = 0xC761 - - -"""Maps EXIF tags to tag names.""" -TAGS = { - **{i.value: i.name for i in Base}, - 0x920C: "SpatialFrequencyResponse", - 0x9214: "SubjectLocation", - 0x9215: "ExposureIndex", - 0x828E: "CFAPattern", - 0x920B: "FlashEnergy", - 0x9216: "TIFF/EPStandardID", -} - - -class GPS(IntEnum): - GPSVersionID = 0 - GPSLatitudeRef = 1 - GPSLatitude = 2 - GPSLongitudeRef = 3 - GPSLongitude = 4 - GPSAltitudeRef = 5 - GPSAltitude = 6 - GPSTimeStamp = 7 - GPSSatellites = 8 - GPSStatus = 9 - GPSMeasureMode = 10 - GPSDOP = 11 - GPSSpeedRef = 12 - GPSSpeed = 13 - GPSTrackRef = 14 - GPSTrack = 15 - GPSImgDirectionRef = 16 - GPSImgDirection = 17 - GPSMapDatum = 18 - GPSDestLatitudeRef = 19 - GPSDestLatitude = 20 - GPSDestLongitudeRef = 21 - GPSDestLongitude = 22 - GPSDestBearingRef = 23 - GPSDestBearing = 24 - GPSDestDistanceRef = 25 - GPSDestDistance = 26 - GPSProcessingMethod = 27 - GPSAreaInformation = 28 - GPSDateStamp = 29 - GPSDifferential = 30 - GPSHPositioningError = 31 - - -"""Maps EXIF GPS tags to tag names.""" -GPSTAGS = {i.value: i.name for i in GPS} - - -class Interop(IntEnum): - InteropIndex = 1 - InteropVersion = 2 - RelatedImageFileFormat = 4096 - RelatedImageWidth = 4097 - RelatedImageHeight = 4098 - - -class IFD(IntEnum): - Exif = 34665 - GPSInfo = 34853 - Makernote = 37500 - Interop = 40965 - IFD1 = -1 - - -class LightSource(IntEnum): - Unknown = 0 - Daylight = 1 - Fluorescent = 2 - Tungsten = 3 - Flash = 4 - Fine = 9 - Cloudy = 10 - Shade = 11 - DaylightFluorescent = 12 - DayWhiteFluorescent = 13 - CoolWhiteFluorescent = 14 - WhiteFluorescent = 15 - StandardLightA = 17 - StandardLightB = 18 - StandardLightC = 19 - D55 = 20 - D65 = 21 - D75 = 22 - D50 = 23 - ISO = 24 - Other = 255 diff --git a/venv/Lib/site-packages/PIL/FitsImagePlugin.py b/venv/Lib/site-packages/PIL/FitsImagePlugin.py deleted file mode 100644 index 4846054..0000000 --- a/venv/Lib/site-packages/PIL/FitsImagePlugin.py +++ /dev/null @@ -1,152 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# FITS file handling -# -# Copyright (c) 1998-2003 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import gzip -import math - -from . import Image, ImageFile - - -def _accept(prefix: bytes) -> bool: - return prefix[:6] == b"SIMPLE" - - -class FitsImageFile(ImageFile.ImageFile): - format = "FITS" - format_description = "FITS" - - def _open(self) -> None: - assert self.fp is not None - - headers: dict[bytes, bytes] = {} - header_in_progress = False - decoder_name = "" - while True: - header = self.fp.read(80) - if not header: - msg = "Truncated FITS file" - raise OSError(msg) - keyword = header[:8].strip() - if keyword in (b"SIMPLE", b"XTENSION"): - header_in_progress = True - elif headers and not header_in_progress: - # This is now a data unit - break - elif keyword == b"END": - # Seek to the end of the header unit - self.fp.seek(math.ceil(self.fp.tell() / 2880) * 2880) - if not decoder_name: - decoder_name, offset, args = self._parse_headers(headers) - - header_in_progress = False - continue - - if decoder_name: - # Keep going to read past the headers - continue - - value = header[8:].split(b"/")[0].strip() - if value.startswith(b"="): - value = value[1:].strip() - if not headers and (not _accept(keyword) or value != b"T"): - msg = "Not a FITS file" - raise SyntaxError(msg) - headers[keyword] = value - - if not decoder_name: - msg = "No image data" - raise ValueError(msg) - - offset += self.fp.tell() - 80 - self.tile = [(decoder_name, (0, 0) + self.size, offset, args)] - - def _get_size( - self, headers: dict[bytes, bytes], prefix: bytes - ) -> tuple[int, int] | None: - naxis = int(headers[prefix + b"NAXIS"]) - if naxis == 0: - return None - - if naxis == 1: - return 1, int(headers[prefix + b"NAXIS1"]) - else: - return int(headers[prefix + b"NAXIS1"]), int(headers[prefix + b"NAXIS2"]) - - def _parse_headers( - self, headers: dict[bytes, bytes] - ) -> tuple[str, int, tuple[str | int, ...]]: - prefix = b"" - decoder_name = "raw" - offset = 0 - if ( - headers.get(b"XTENSION") == b"'BINTABLE'" - and headers.get(b"ZIMAGE") == b"T" - and headers[b"ZCMPTYPE"] == b"'GZIP_1 '" - ): - no_prefix_size = self._get_size(headers, prefix) or (0, 0) - number_of_bits = int(headers[b"BITPIX"]) - offset = no_prefix_size[0] * no_prefix_size[1] * (number_of_bits // 8) - - prefix = b"Z" - decoder_name = "fits_gzip" - - size = self._get_size(headers, prefix) - if not size: - return "", 0, () - - self._size = size - - number_of_bits = int(headers[prefix + b"BITPIX"]) - if number_of_bits == 8: - self._mode = "L" - elif number_of_bits == 16: - self._mode = "I;16" - elif number_of_bits == 32: - self._mode = "I" - elif number_of_bits in (-32, -64): - self._mode = "F" - - args: tuple[str | int, ...] - if decoder_name == "raw": - args = (self.mode, 0, -1) - else: - args = (number_of_bits,) - return decoder_name, offset, args - - -class FitsGzipDecoder(ImageFile.PyDecoder): - _pulls_fd = True - - def decode(self, buffer: bytes) -> tuple[int, int]: - assert self.fd is not None - value = gzip.decompress(self.fd.read()) - - rows = [] - offset = 0 - number_of_bits = min(self.args[0] // 8, 4) - for y in range(self.state.ysize): - row = bytearray() - for x in range(self.state.xsize): - row += value[offset + (4 - number_of_bits) : offset + 4] - offset += 4 - rows.append(row) - self.set_as_raw(bytes([pixel for row in rows[::-1] for pixel in row])) - return -1, 0 - - -# -------------------------------------------------------------------- -# Registry - -Image.register_open(FitsImageFile.format, FitsImageFile, _accept) -Image.register_decoder("fits_gzip", FitsGzipDecoder) - -Image.register_extensions(FitsImageFile.format, [".fit", ".fits"]) diff --git a/venv/Lib/site-packages/PIL/FliImagePlugin.py b/venv/Lib/site-packages/PIL/FliImagePlugin.py deleted file mode 100644 index dceb839..0000000 --- a/venv/Lib/site-packages/PIL/FliImagePlugin.py +++ /dev/null @@ -1,174 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# FLI/FLC file handling. -# -# History: -# 95-09-01 fl Created -# 97-01-03 fl Fixed parser, setup decoder tile -# 98-07-15 fl Renamed offset attribute to avoid name clash -# -# Copyright (c) Secret Labs AB 1997-98. -# Copyright (c) Fredrik Lundh 1995-97. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import os - -from . import Image, ImageFile, ImagePalette -from ._binary import i16le as i16 -from ._binary import i32le as i32 -from ._binary import o8 - -# -# decoder - - -def _accept(prefix: bytes) -> bool: - return ( - len(prefix) >= 6 - and i16(prefix, 4) in [0xAF11, 0xAF12] - and i16(prefix, 14) in [0, 3] # flags - ) - - -## -# Image plugin for the FLI/FLC animation format. Use the seek -# method to load individual frames. - - -class FliImageFile(ImageFile.ImageFile): - format = "FLI" - format_description = "Autodesk FLI/FLC Animation" - _close_exclusive_fp_after_loading = False - - def _open(self): - # HEAD - s = self.fp.read(128) - if not (_accept(s) and s[20:22] == b"\x00\x00"): - msg = "not an FLI/FLC file" - raise SyntaxError(msg) - - # frames - self.n_frames = i16(s, 6) - self.is_animated = self.n_frames > 1 - - # image characteristics - self._mode = "P" - self._size = i16(s, 8), i16(s, 10) - - # animation speed - duration = i32(s, 16) - magic = i16(s, 4) - if magic == 0xAF11: - duration = (duration * 1000) // 70 - self.info["duration"] = duration - - # look for palette - palette = [(a, a, a) for a in range(256)] - - s = self.fp.read(16) - - self.__offset = 128 - - if i16(s, 4) == 0xF100: - # prefix chunk; ignore it - self.__offset = self.__offset + i32(s) - self.fp.seek(self.__offset) - s = self.fp.read(16) - - if i16(s, 4) == 0xF1FA: - # look for palette chunk - number_of_subchunks = i16(s, 6) - chunk_size = None - for _ in range(number_of_subchunks): - if chunk_size is not None: - self.fp.seek(chunk_size - 6, os.SEEK_CUR) - s = self.fp.read(6) - chunk_type = i16(s, 4) - if chunk_type in (4, 11): - self._palette(palette, 2 if chunk_type == 11 else 0) - break - chunk_size = i32(s) - if not chunk_size: - break - - palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette] - self.palette = ImagePalette.raw("RGB", b"".join(palette)) - - # set things up to decode first frame - self.__frame = -1 - self._fp = self.fp - self.__rewind = self.fp.tell() - self.seek(0) - - def _palette(self, palette, shift): - # load palette - - i = 0 - for e in range(i16(self.fp.read(2))): - s = self.fp.read(2) - i = i + s[0] - n = s[1] - if n == 0: - n = 256 - s = self.fp.read(n * 3) - for n in range(0, len(s), 3): - r = s[n] << shift - g = s[n + 1] << shift - b = s[n + 2] << shift - palette[i] = (r, g, b) - i += 1 - - def seek(self, frame: int) -> None: - if not self._seek_check(frame): - return - if frame < self.__frame: - self._seek(0) - - for f in range(self.__frame + 1, frame + 1): - self._seek(f) - - def _seek(self, frame: int) -> None: - if frame == 0: - self.__frame = -1 - self._fp.seek(self.__rewind) - self.__offset = 128 - else: - # ensure that the previous frame was loaded - self.load() - - if frame != self.__frame + 1: - msg = f"cannot seek to frame {frame}" - raise ValueError(msg) - self.__frame = frame - - # move to next frame - self.fp = self._fp - self.fp.seek(self.__offset) - - s = self.fp.read(4) - if not s: - msg = "missing frame size" - raise EOFError(msg) - - framesize = i32(s) - - self.decodermaxblock = framesize - self.tile = [("fli", (0, 0) + self.size, self.__offset, None)] - - self.__offset += framesize - - def tell(self) -> int: - return self.__frame - - -# -# registry - -Image.register_open(FliImageFile.format, FliImageFile, _accept) - -Image.register_extensions(FliImageFile.format, [".fli", ".flc"]) diff --git a/venv/Lib/site-packages/PIL/FontFile.py b/venv/Lib/site-packages/PIL/FontFile.py deleted file mode 100644 index 1e0c1c1..0000000 --- a/venv/Lib/site-packages/PIL/FontFile.py +++ /dev/null @@ -1,134 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# base class for raster font file parsers -# -# history: -# 1997-06-05 fl created -# 1997-08-19 fl restrict image width -# -# Copyright (c) 1997-1998 by Secret Labs AB -# Copyright (c) 1997-1998 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import os -from typing import BinaryIO - -from . import Image, _binary - -WIDTH = 800 - - -def puti16( - fp: BinaryIO, values: tuple[int, int, int, int, int, int, int, int, int, int] -) -> None: - """Write network order (big-endian) 16-bit sequence""" - for v in values: - if v < 0: - v += 65536 - fp.write(_binary.o16be(v)) - - -class FontFile: - """Base class for raster font file handlers.""" - - bitmap: Image.Image | None = None - - def __init__(self) -> None: - self.info: dict[bytes, bytes | int] = {} - self.glyph: list[ - tuple[ - tuple[int, int], - tuple[int, int, int, int], - tuple[int, int, int, int], - Image.Image, - ] - | None - ] = [None] * 256 - - def __getitem__(self, ix: int) -> ( - tuple[ - tuple[int, int], - tuple[int, int, int, int], - tuple[int, int, int, int], - Image.Image, - ] - | None - ): - return self.glyph[ix] - - def compile(self) -> None: - """Create metrics and bitmap""" - - if self.bitmap: - return - - # create bitmap large enough to hold all data - h = w = maxwidth = 0 - lines = 1 - for glyph in self.glyph: - if glyph: - d, dst, src, im = glyph - h = max(h, src[3] - src[1]) - w = w + (src[2] - src[0]) - if w > WIDTH: - lines += 1 - w = src[2] - src[0] - maxwidth = max(maxwidth, w) - - xsize = maxwidth - ysize = lines * h - - if xsize == 0 and ysize == 0: - return - - self.ysize = h - - # paste glyphs into bitmap - self.bitmap = Image.new("1", (xsize, ysize)) - self.metrics: list[ - tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]] - | None - ] = [None] * 256 - x = y = 0 - for i in range(256): - glyph = self[i] - if glyph: - d, dst, src, im = glyph - xx = src[2] - src[0] - x0, y0 = x, y - x = x + xx - if x > WIDTH: - x, y = 0, y + h - x0, y0 = x, y - x = xx - s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 - self.bitmap.paste(im.crop(src), s) - self.metrics[i] = d, dst, s - - def save(self, filename: str) -> None: - """Save font""" - - self.compile() - - # font data - if not self.bitmap: - msg = "No bitmap created" - raise ValueError(msg) - self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") - - # font metrics - with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp: - fp.write(b"PILfont\n") - fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!! - fp.write(b"DATA\n") - for id in range(256): - m = self.metrics[id] - if not m: - puti16(fp, (0,) * 10) - else: - puti16(fp, m[0] + m[1] + m[2]) diff --git a/venv/Lib/site-packages/PIL/FpxImagePlugin.py b/venv/Lib/site-packages/PIL/FpxImagePlugin.py deleted file mode 100644 index c1927bd..0000000 --- a/venv/Lib/site-packages/PIL/FpxImagePlugin.py +++ /dev/null @@ -1,255 +0,0 @@ -# -# THIS IS WORK IN PROGRESS -# -# The Python Imaging Library. -# $Id$ -# -# FlashPix support for PIL -# -# History: -# 97-01-25 fl Created (reads uncompressed RGB images only) -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1997. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import olefile - -from . import Image, ImageFile -from ._binary import i32le as i32 - -# we map from colour field tuples to (mode, rawmode) descriptors -MODES = { - # opacity - (0x00007FFE,): ("A", "L"), - # monochrome - (0x00010000,): ("L", "L"), - (0x00018000, 0x00017FFE): ("RGBA", "LA"), - # photo YCC - (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), - (0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"), - # standard RGB (NIFRGB) - (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), - (0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"), -} - - -# -# -------------------------------------------------------------------- - - -def _accept(prefix: bytes) -> bool: - return prefix[:8] == olefile.MAGIC - - -## -# Image plugin for the FlashPix images. - - -class FpxImageFile(ImageFile.ImageFile): - format = "FPX" - format_description = "FlashPix" - - def _open(self): - # - # read the OLE directory and see if this is a likely - # to be a FlashPix file - - try: - self.ole = olefile.OleFileIO(self.fp) - except OSError as e: - msg = "not an FPX file; invalid OLE file" - raise SyntaxError(msg) from e - - if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": - msg = "not an FPX file; bad root CLSID" - raise SyntaxError(msg) - - self._open_index(1) - - def _open_index(self, index: int = 1) -> None: - # - # get the Image Contents Property Set - - prop = self.ole.getproperties( - [f"Data Object Store {index:06d}", "\005Image Contents"] - ) - - # size (highest resolution) - - self._size = prop[0x1000002], prop[0x1000003] - - size = max(self.size) - i = 1 - while size > 64: - size = size // 2 - i += 1 - self.maxid = i - 1 - - # mode. instead of using a single field for this, flashpix - # requires you to specify the mode for each channel in each - # resolution subimage, and leaves it to the decoder to make - # sure that they all match. for now, we'll cheat and assume - # that this is always the case. - - id = self.maxid << 16 - - s = prop[0x2000002 | id] - - bands = i32(s, 4) - if bands > 4: - msg = "Invalid number of bands" - raise OSError(msg) - - # note: for now, we ignore the "uncalibrated" flag - colors = tuple(i32(s, 8 + i * 4) & 0x7FFFFFFF for i in range(bands)) - - self._mode, self.rawmode = MODES[colors] - - # load JPEG tables, if any - self.jpeg = {} - for i in range(256): - id = 0x3000001 | (i << 16) - if id in prop: - self.jpeg[i] = prop[id] - - self._open_subimage(1, self.maxid) - - def _open_subimage(self, index: int = 1, subimage: int = 0) -> None: - # - # setup tile descriptors for a given subimage - - stream = [ - f"Data Object Store {index:06d}", - f"Resolution {subimage:04d}", - "Subimage 0000 Header", - ] - - fp = self.ole.openstream(stream) - - # skip prefix - fp.read(28) - - # header stream - s = fp.read(36) - - size = i32(s, 4), i32(s, 8) - # tilecount = i32(s, 12) - tilesize = i32(s, 16), i32(s, 20) - # channels = i32(s, 24) - offset = i32(s, 28) - length = i32(s, 32) - - if size != self.size: - msg = "subimage mismatch" - raise OSError(msg) - - # get tile descriptors - fp.seek(28 + offset) - s = fp.read(i32(s, 12) * length) - - x = y = 0 - xsize, ysize = size - xtile, ytile = tilesize - self.tile = [] - - for i in range(0, len(s), length): - x1 = min(xsize, x + xtile) - y1 = min(ysize, y + ytile) - - compression = i32(s, i + 8) - - if compression == 0: - self.tile.append( - ( - "raw", - (x, y, x1, y1), - i32(s, i) + 28, - (self.rawmode,), - ) - ) - - elif compression == 1: - # FIXME: the fill decoder is not implemented - self.tile.append( - ( - "fill", - (x, y, x1, y1), - i32(s, i) + 28, - (self.rawmode, s[12:16]), - ) - ) - - elif compression == 2: - internal_color_conversion = s[14] - jpeg_tables = s[15] - rawmode = self.rawmode - - if internal_color_conversion: - # The image is stored as usual (usually YCbCr). - if rawmode == "RGBA": - # For "RGBA", data is stored as YCbCrA based on - # negative RGB. The following trick works around - # this problem : - jpegmode, rawmode = "YCbCrK", "CMYK" - else: - jpegmode = None # let the decoder decide - - else: - # The image is stored as defined by rawmode - jpegmode = rawmode - - self.tile.append( - ( - "jpeg", - (x, y, x1, y1), - i32(s, i) + 28, - (rawmode, jpegmode), - ) - ) - - # FIXME: jpeg tables are tile dependent; the prefix - # data must be placed in the tile descriptor itself! - - if jpeg_tables: - self.tile_prefix = self.jpeg[jpeg_tables] - - else: - msg = "unknown/invalid compression" - raise OSError(msg) - - x = x + xtile - if x >= xsize: - x, y = 0, y + ytile - if y >= ysize: - break # isn't really required - - self.stream = stream - self._fp = self.fp - self.fp = None - - def load(self): - if not self.fp: - self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"]) - - return ImageFile.ImageFile.load(self) - - def close(self) -> None: - self.ole.close() - super().close() - - def __exit__(self, *args: object) -> None: - self.ole.close() - super().__exit__() - - -# -# -------------------------------------------------------------------- - - -Image.register_open(FpxImageFile.format, FpxImageFile, _accept) - -Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/venv/Lib/site-packages/PIL/FtexImagePlugin.py b/venv/Lib/site-packages/PIL/FtexImagePlugin.py deleted file mode 100644 index 5acbb49..0000000 --- a/venv/Lib/site-packages/PIL/FtexImagePlugin.py +++ /dev/null @@ -1,115 +0,0 @@ -""" -A Pillow loader for .ftc and .ftu files (FTEX) -Jerome Leclanche - -The contents of this file are hereby released in the public domain (CC0) -Full text of the CC0 license: - https://creativecommons.org/publicdomain/zero/1.0/ - -Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001 - -The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a -packed custom format called FTEX. This file format uses file extensions FTC -and FTU. -* FTC files are compressed textures (using standard texture compression). -* FTU files are not compressed. -Texture File Format -The FTC and FTU texture files both use the same format. This -has the following structure: -{header} -{format_directory} -{data} -Where: -{header} = { - u32:magic, - u32:version, - u32:width, - u32:height, - u32:mipmap_count, - u32:format_count -} - -* The "magic" number is "FTEX". -* "width" and "height" are the dimensions of the texture. -* "mipmap_count" is the number of mipmaps in the texture. -* "format_count" is the number of texture formats (different versions of the -same texture) in this file. - -{format_directory} = format_count * { u32:format, u32:where } - -The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB -uncompressed textures. -The texture data for a format starts at the position "where" in the file. - -Each set of texture data in the file has the following structure: -{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } } -* "mipmap_size" is the number of bytes in that mip level. For compressed -textures this is the size of the texture data compressed with DXT1. For 24 bit -uncompressed textures, this is 3 * width * height. Following this are the image -bytes for that mipmap level. - -Note: All data is stored in little-Endian (Intel) byte order. -""" - -from __future__ import annotations - -import struct -from enum import IntEnum -from io import BytesIO - -from . import Image, ImageFile - -MAGIC = b"FTEX" - - -class Format(IntEnum): - DXT1 = 0 - UNCOMPRESSED = 1 - - -class FtexImageFile(ImageFile.ImageFile): - format = "FTEX" - format_description = "Texture File Format (IW2:EOC)" - - def _open(self) -> None: - if not _accept(self.fp.read(4)): - msg = "not an FTEX file" - raise SyntaxError(msg) - struct.unpack(" None: - pass - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == MAGIC - - -Image.register_open(FtexImageFile.format, FtexImageFile, _accept) -Image.register_extensions(FtexImageFile.format, [".ftc", ".ftu"]) diff --git a/venv/Lib/site-packages/PIL/GbrImagePlugin.py b/venv/Lib/site-packages/PIL/GbrImagePlugin.py deleted file mode 100644 index 93e89b1..0000000 --- a/venv/Lib/site-packages/PIL/GbrImagePlugin.py +++ /dev/null @@ -1,103 +0,0 @@ -# -# The Python Imaging Library -# -# load a GIMP brush file -# -# History: -# 96-03-14 fl Created -# 16-01-08 es Version 2 -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1996. -# Copyright (c) Eric Soroos 2016. -# -# See the README file for information on usage and redistribution. -# -# -# See https://github.com/GNOME/gimp/blob/mainline/devel-docs/gbr.txt for -# format documentation. -# -# This code Interprets version 1 and 2 .gbr files. -# Version 1 files are obsolete, and should not be used for new -# brushes. -# Version 2 files are saved by GIMP v2.8 (at least) -# Version 3 files have a format specifier of 18 for 16bit floats in -# the color depth field. This is currently unsupported by Pillow. -from __future__ import annotations - -from . import Image, ImageFile -from ._binary import i32be as i32 - - -def _accept(prefix: bytes) -> bool: - return len(prefix) >= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2) - - -## -# Image plugin for the GIMP brush format. - - -class GbrImageFile(ImageFile.ImageFile): - format = "GBR" - format_description = "GIMP brush file" - - def _open(self) -> None: - header_size = i32(self.fp.read(4)) - if header_size < 20: - msg = "not a GIMP brush" - raise SyntaxError(msg) - version = i32(self.fp.read(4)) - if version not in (1, 2): - msg = f"Unsupported GIMP brush version: {version}" - raise SyntaxError(msg) - - width = i32(self.fp.read(4)) - height = i32(self.fp.read(4)) - color_depth = i32(self.fp.read(4)) - if width <= 0 or height <= 0: - msg = "not a GIMP brush" - raise SyntaxError(msg) - if color_depth not in (1, 4): - msg = f"Unsupported GIMP brush color depth: {color_depth}" - raise SyntaxError(msg) - - if version == 1: - comment_length = header_size - 20 - else: - comment_length = header_size - 28 - magic_number = self.fp.read(4) - if magic_number != b"GIMP": - msg = "not a GIMP brush, bad magic number" - raise SyntaxError(msg) - self.info["spacing"] = i32(self.fp.read(4)) - - comment = self.fp.read(comment_length)[:-1] - - if color_depth == 1: - self._mode = "L" - else: - self._mode = "RGBA" - - self._size = width, height - - self.info["comment"] = comment - - # Image might not be small - Image._decompression_bomb_check(self.size) - - # Data is an uncompressed block of w * h * bytes/pixel - self._data_size = width * height * color_depth - - def load(self): - if not self.im: - self.im = Image.core.new(self.mode, self.size) - self.frombytes(self.fp.read(self._data_size)) - return Image.Image.load(self) - - -# -# registry - - -Image.register_open(GbrImageFile.format, GbrImageFile, _accept) -Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/venv/Lib/site-packages/PIL/GdImageFile.py b/venv/Lib/site-packages/PIL/GdImageFile.py deleted file mode 100644 index 88b87a2..0000000 --- a/venv/Lib/site-packages/PIL/GdImageFile.py +++ /dev/null @@ -1,102 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# GD file handling -# -# History: -# 1996-04-12 fl Created -# -# Copyright (c) 1997 by Secret Labs AB. -# Copyright (c) 1996 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# - - -""" -.. note:: - This format cannot be automatically recognized, so the - class is not registered for use with :py:func:`PIL.Image.open()`. To open a - gd file, use the :py:func:`PIL.GdImageFile.open()` function instead. - -.. warning:: - THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This - implementation is provided for convenience and demonstrational - purposes only. -""" -from __future__ import annotations - -from typing import IO - -from . import ImageFile, ImagePalette, UnidentifiedImageError -from ._binary import i16be as i16 -from ._binary import i32be as i32 -from ._typing import StrOrBytesPath - - -class GdImageFile(ImageFile.ImageFile): - """ - Image plugin for the GD uncompressed format. Note that this format - is not supported by the standard :py:func:`PIL.Image.open()` function. To use - this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and - use the :py:func:`PIL.GdImageFile.open()` function. - """ - - format = "GD" - format_description = "GD uncompressed images" - - def _open(self) -> None: - # Header - assert self.fp is not None - - s = self.fp.read(1037) - - if i16(s) not in [65534, 65535]: - msg = "Not a valid GD 2.x .gd file" - raise SyntaxError(msg) - - self._mode = "L" # FIXME: "P" - self._size = i16(s, 2), i16(s, 4) - - true_color = s[6] - true_color_offset = 2 if true_color else 0 - - # transparency index - tindex = i32(s, 7 + true_color_offset) - if tindex < 256: - self.info["transparency"] = tindex - - self.palette = ImagePalette.raw( - "XBGR", s[7 + true_color_offset + 4 : 7 + true_color_offset + 4 + 256 * 4] - ) - - self.tile = [ - ( - "raw", - (0, 0) + self.size, - 7 + true_color_offset + 4 + 256 * 4, - ("L", 0, 1), - ) - ] - - -def open(fp: StrOrBytesPath | IO[bytes], mode: str = "r") -> GdImageFile: - """ - Load texture from a GD image file. - - :param fp: GD file name, or an opened file handle. - :param mode: Optional mode. In this version, if the mode argument - is given, it must be "r". - :returns: An image instance. - :raises OSError: If the image could not be read. - """ - if mode != "r": - msg = "bad mode" - raise ValueError(msg) - - try: - return GdImageFile(fp) - except SyntaxError as e: - msg = "cannot identify this image file" - raise UnidentifiedImageError(msg) from e diff --git a/venv/Lib/site-packages/PIL/GifImagePlugin.py b/venv/Lib/site-packages/PIL/GifImagePlugin.py deleted file mode 100644 index 284128c..0000000 --- a/venv/Lib/site-packages/PIL/GifImagePlugin.py +++ /dev/null @@ -1,1159 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# GIF file handling -# -# History: -# 1995-09-01 fl Created -# 1996-12-14 fl Added interlace support -# 1996-12-30 fl Added animation support -# 1997-01-05 fl Added write support, fixed local colour map bug -# 1997-02-23 fl Make sure to load raster data in getdata() -# 1997-07-05 fl Support external decoder (0.4) -# 1998-07-09 fl Handle all modes when saving (0.5) -# 1998-07-15 fl Renamed offset attribute to avoid name clash -# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) -# 2001-04-17 fl Added palette optimization (0.7) -# 2002-06-06 fl Added transparency support for save (0.8) -# 2004-02-24 fl Disable interlacing for small images -# -# Copyright (c) 1997-2004 by Secret Labs AB -# Copyright (c) 1995-2004 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import itertools -import math -import os -import subprocess -import sys -from enum import IntEnum -from functools import cached_property -from typing import IO, TYPE_CHECKING, Any, List, Literal, NamedTuple, Union - -from . import ( - Image, - ImageChops, - ImageFile, - ImageMath, - ImageOps, - ImagePalette, - ImageSequence, -) -from ._binary import i16le as i16 -from ._binary import o8 -from ._binary import o16le as o16 - -if TYPE_CHECKING: - from . import _imaging - - -class LoadingStrategy(IntEnum): - """.. versionadded:: 9.1.0""" - - RGB_AFTER_FIRST = 0 - RGB_AFTER_DIFFERENT_PALETTE_ONLY = 1 - RGB_ALWAYS = 2 - - -#: .. versionadded:: 9.1.0 -LOADING_STRATEGY = LoadingStrategy.RGB_AFTER_FIRST - -# -------------------------------------------------------------------- -# Identify/read GIF files - - -def _accept(prefix: bytes) -> bool: - return prefix[:6] in [b"GIF87a", b"GIF89a"] - - -## -# Image plugin for GIF images. This plugin supports both GIF87 and -# GIF89 images. - - -class GifImageFile(ImageFile.ImageFile): - format = "GIF" - format_description = "Compuserve GIF" - _close_exclusive_fp_after_loading = False - - global_palette = None - - def data(self) -> bytes | None: - s = self.fp.read(1) - if s and s[0]: - return self.fp.read(s[0]) - return None - - def _is_palette_needed(self, p: bytes) -> bool: - for i in range(0, len(p), 3): - if not (i // 3 == p[i] == p[i + 1] == p[i + 2]): - return True - return False - - def _open(self) -> None: - # Screen - s = self.fp.read(13) - if not _accept(s): - msg = "not a GIF file" - raise SyntaxError(msg) - - self.info["version"] = s[:6] - self._size = i16(s, 6), i16(s, 8) - self.tile = [] - flags = s[10] - bits = (flags & 7) + 1 - - if flags & 128: - # get global palette - self.info["background"] = s[11] - # check if palette contains colour indices - p = self.fp.read(3 << bits) - if self._is_palette_needed(p): - p = ImagePalette.raw("RGB", p) - self.global_palette = self.palette = p - - self._fp = self.fp # FIXME: hack - self.__rewind = self.fp.tell() - self._n_frames: int | None = None - self._seek(0) # get ready to read first frame - - @property - def n_frames(self) -> int: - if self._n_frames is None: - current = self.tell() - try: - while True: - self._seek(self.tell() + 1, False) - except EOFError: - self._n_frames = self.tell() + 1 - self.seek(current) - return self._n_frames - - @cached_property - def is_animated(self) -> bool: - if self._n_frames is not None: - return self._n_frames != 1 - - current = self.tell() - if current: - return True - - try: - self._seek(1, False) - is_animated = True - except EOFError: - is_animated = False - - self.seek(current) - return is_animated - - def seek(self, frame: int) -> None: - if not self._seek_check(frame): - return - if frame < self.__frame: - self.im = None - self._seek(0) - - last_frame = self.__frame - for f in range(self.__frame + 1, frame + 1): - try: - self._seek(f) - except EOFError as e: - self.seek(last_frame) - msg = "no more images in GIF file" - raise EOFError(msg) from e - - def _seek(self, frame: int, update_image: bool = True) -> None: - if frame == 0: - # rewind - self.__offset = 0 - self.dispose: _imaging.ImagingCore | None = None - self.__frame = -1 - self._fp.seek(self.__rewind) - self.disposal_method = 0 - if "comment" in self.info: - del self.info["comment"] - else: - # ensure that the previous frame was loaded - if self.tile and update_image: - self.load() - - if frame != self.__frame + 1: - msg = f"cannot seek to frame {frame}" - raise ValueError(msg) - - self.fp = self._fp - if self.__offset: - # backup to last frame - self.fp.seek(self.__offset) - while self.data(): - pass - self.__offset = 0 - - s = self.fp.read(1) - if not s or s == b";": - msg = "no more images in GIF file" - raise EOFError(msg) - - palette: ImagePalette.ImagePalette | Literal[False] | None = None - - info: dict[str, Any] = {} - frame_transparency = None - interlace = None - frame_dispose_extent = None - while True: - if not s: - s = self.fp.read(1) - if not s or s == b";": - break - - elif s == b"!": - # - # extensions - # - s = self.fp.read(1) - block = self.data() - if s[0] == 249 and block is not None: - # - # graphic control extension - # - flags = block[0] - if flags & 1: - frame_transparency = block[3] - info["duration"] = i16(block, 1) * 10 - - # disposal method - find the value of bits 4 - 6 - dispose_bits = 0b00011100 & flags - dispose_bits = dispose_bits >> 2 - if dispose_bits: - # only set the dispose if it is not - # unspecified. I'm not sure if this is - # correct, but it seems to prevent the last - # frame from looking odd for some animations - self.disposal_method = dispose_bits - elif s[0] == 254: - # - # comment extension - # - comment = b"" - - # Read this comment block - while block: - comment += block - block = self.data() - - if "comment" in info: - # If multiple comment blocks in frame, separate with \n - info["comment"] += b"\n" + comment - else: - info["comment"] = comment - s = None - continue - elif s[0] == 255 and frame == 0 and block is not None: - # - # application extension - # - info["extension"] = block, self.fp.tell() - if block[:11] == b"NETSCAPE2.0": - block = self.data() - if block and len(block) >= 3 and block[0] == 1: - self.info["loop"] = i16(block, 1) - while self.data(): - pass - - elif s == b",": - # - # local image - # - s = self.fp.read(9) - - # extent - x0, y0 = i16(s, 0), i16(s, 2) - x1, y1 = x0 + i16(s, 4), y0 + i16(s, 6) - if (x1 > self.size[0] or y1 > self.size[1]) and update_image: - self._size = max(x1, self.size[0]), max(y1, self.size[1]) - Image._decompression_bomb_check(self._size) - frame_dispose_extent = x0, y0, x1, y1 - flags = s[8] - - interlace = (flags & 64) != 0 - - if flags & 128: - bits = (flags & 7) + 1 - p = self.fp.read(3 << bits) - if self._is_palette_needed(p): - palette = ImagePalette.raw("RGB", p) - else: - palette = False - - # image data - bits = self.fp.read(1)[0] - self.__offset = self.fp.tell() - break - s = None - - if interlace is None: - msg = "image not found in GIF frame" - raise EOFError(msg) - - self.__frame = frame - if not update_image: - return - - self.tile = [] - - if self.dispose: - self.im.paste(self.dispose, self.dispose_extent) - - self._frame_palette = palette if palette is not None else self.global_palette - self._frame_transparency = frame_transparency - if frame == 0: - if self._frame_palette: - if LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS: - self._mode = "RGBA" if frame_transparency is not None else "RGB" - else: - self._mode = "P" - else: - self._mode = "L" - - if not palette and self.global_palette: - from copy import copy - - palette = copy(self.global_palette) - self.palette = palette - else: - if self.mode == "P": - if ( - LOADING_STRATEGY != LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY - or palette - ): - self.pyaccess = None - if "transparency" in self.info: - self.im.putpalettealpha(self.info["transparency"], 0) - self.im = self.im.convert("RGBA", Image.Dither.FLOYDSTEINBERG) - self._mode = "RGBA" - del self.info["transparency"] - else: - self._mode = "RGB" - self.im = self.im.convert("RGB", Image.Dither.FLOYDSTEINBERG) - - def _rgb(color: int) -> tuple[int, int, int]: - if self._frame_palette: - if color * 3 + 3 > len(self._frame_palette.palette): - color = 0 - return tuple(self._frame_palette.palette[color * 3 : color * 3 + 3]) - else: - return (color, color, color) - - self.dispose = None - self.dispose_extent = frame_dispose_extent - if self.dispose_extent and self.disposal_method >= 2: - try: - if self.disposal_method == 2: - # replace with background colour - - # only dispose the extent in this frame - x0, y0, x1, y1 = self.dispose_extent - dispose_size = (x1 - x0, y1 - y0) - - Image._decompression_bomb_check(dispose_size) - - # by convention, attempt to use transparency first - dispose_mode = "P" - color = self.info.get("transparency", frame_transparency) - if color is not None: - if self.mode in ("RGB", "RGBA"): - dispose_mode = "RGBA" - color = _rgb(color) + (0,) - else: - color = self.info.get("background", 0) - if self.mode in ("RGB", "RGBA"): - dispose_mode = "RGB" - color = _rgb(color) - self.dispose = Image.core.fill(dispose_mode, dispose_size, color) - else: - # replace with previous contents - if self.im is not None: - # only dispose the extent in this frame - self.dispose = self._crop(self.im, self.dispose_extent) - elif frame_transparency is not None: - x0, y0, x1, y1 = self.dispose_extent - dispose_size = (x1 - x0, y1 - y0) - - Image._decompression_bomb_check(dispose_size) - dispose_mode = "P" - color = frame_transparency - if self.mode in ("RGB", "RGBA"): - dispose_mode = "RGBA" - color = _rgb(frame_transparency) + (0,) - self.dispose = Image.core.fill( - dispose_mode, dispose_size, color - ) - except AttributeError: - pass - - if interlace is not None: - transparency = -1 - if frame_transparency is not None: - if frame == 0: - if LOADING_STRATEGY != LoadingStrategy.RGB_ALWAYS: - self.info["transparency"] = frame_transparency - elif self.mode not in ("RGB", "RGBA"): - transparency = frame_transparency - self.tile = [ - ( - "gif", - (x0, y0, x1, y1), - self.__offset, - (bits, interlace, transparency), - ) - ] - - if info.get("comment"): - self.info["comment"] = info["comment"] - for k in ["duration", "extension"]: - if k in info: - self.info[k] = info[k] - elif k in self.info: - del self.info[k] - - def load_prepare(self) -> None: - temp_mode = "P" if self._frame_palette else "L" - self._prev_im = None - if self.__frame == 0: - if self._frame_transparency is not None: - self.im = Image.core.fill( - temp_mode, self.size, self._frame_transparency - ) - elif self.mode in ("RGB", "RGBA"): - self._prev_im = self.im - if self._frame_palette: - self.im = Image.core.fill("P", self.size, self._frame_transparency or 0) - self.im.putpalette("RGB", *self._frame_palette.getdata()) - else: - self.im = None - self._mode = temp_mode - self._frame_palette = None - - super().load_prepare() - - def load_end(self) -> None: - if self.__frame == 0: - if self.mode == "P" and LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS: - if self._frame_transparency is not None: - self.im.putpalettealpha(self._frame_transparency, 0) - self._mode = "RGBA" - else: - self._mode = "RGB" - self.im = self.im.convert(self.mode, Image.Dither.FLOYDSTEINBERG) - return - if not self._prev_im: - return - if self._frame_transparency is not None: - self.im.putpalettealpha(self._frame_transparency, 0) - frame_im = self.im.convert("RGBA") - else: - frame_im = self.im.convert("RGB") - - assert self.dispose_extent is not None - frame_im = self._crop(frame_im, self.dispose_extent) - - self.im = self._prev_im - self._mode = self.im.mode - if frame_im.mode == "RGBA": - self.im.paste(frame_im, self.dispose_extent, frame_im) - else: - self.im.paste(frame_im, self.dispose_extent) - - def tell(self) -> int: - return self.__frame - - -# -------------------------------------------------------------------- -# Write GIF files - - -RAWMODE = {"1": "L", "L": "L", "P": "P"} - - -def _normalize_mode(im: Image.Image) -> Image.Image: - """ - Takes an image (or frame), returns an image in a mode that is appropriate - for saving in a Gif. - - It may return the original image, or it may return an image converted to - palette or 'L' mode. - - :param im: Image object - :returns: Image object - """ - if im.mode in RAWMODE: - im.load() - return im - if Image.getmodebase(im.mode) == "RGB": - im = im.convert("P", palette=Image.Palette.ADAPTIVE) - if im.palette.mode == "RGBA": - for rgba in im.palette.colors: - if rgba[3] == 0: - im.info["transparency"] = im.palette.colors[rgba] - break - return im - return im.convert("L") - - -_Palette = Union[bytes, bytearray, List[int], ImagePalette.ImagePalette] - - -def _normalize_palette( - im: Image.Image, palette: _Palette | None, info: dict[str, Any] -) -> Image.Image: - """ - Normalizes the palette for image. - - Sets the palette to the incoming palette, if provided. - - Ensures that there's a palette for L mode images - - Optimizes the palette if necessary/desired. - - :param im: Image object - :param palette: bytes object containing the source palette, or .... - :param info: encoderinfo - :returns: Image object - """ - source_palette = None - if palette: - # a bytes palette - if isinstance(palette, (bytes, bytearray, list)): - source_palette = bytearray(palette[:768]) - if isinstance(palette, ImagePalette.ImagePalette): - source_palette = bytearray(palette.palette) - - if im.mode == "P": - if not source_palette: - source_palette = im.im.getpalette("RGB")[:768] - else: # L-mode - if not source_palette: - source_palette = bytearray(i // 3 for i in range(768)) - im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette) - - used_palette_colors: list[int] | None - if palette: - used_palette_colors = [] - assert source_palette is not None - for i in range(0, len(source_palette), 3): - source_color = tuple(source_palette[i : i + 3]) - index = im.palette.colors.get(source_color) - if index in used_palette_colors: - index = None - used_palette_colors.append(index) - for i, index in enumerate(used_palette_colors): - if index is None: - for j in range(len(used_palette_colors)): - if j not in used_palette_colors: - used_palette_colors[i] = j - break - im = im.remap_palette(used_palette_colors) - else: - used_palette_colors = _get_optimize(im, info) - if used_palette_colors is not None: - im = im.remap_palette(used_palette_colors, source_palette) - if "transparency" in info: - try: - info["transparency"] = used_palette_colors.index( - info["transparency"] - ) - except ValueError: - del info["transparency"] - return im - - im.palette.palette = source_palette - return im - - -def _write_single_frame( - im: Image.Image, - fp: IO[bytes], - palette: _Palette | None, -) -> None: - im_out = _normalize_mode(im) - for k, v in im_out.info.items(): - im.encoderinfo.setdefault(k, v) - im_out = _normalize_palette(im_out, palette, im.encoderinfo) - - for s in _get_global_header(im_out, im.encoderinfo): - fp.write(s) - - # local image header - flags = 0 - if get_interlace(im): - flags = flags | 64 - _write_local_header(fp, im, (0, 0), flags) - - im_out.encoderconfig = (8, get_interlace(im)) - ImageFile._save(im_out, fp, [("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])]) - - fp.write(b"\0") # end of image data - - -def _getbbox( - base_im: Image.Image, im_frame: Image.Image -) -> tuple[Image.Image, tuple[int, int, int, int] | None]: - if _get_palette_bytes(im_frame) != _get_palette_bytes(base_im): - im_frame = im_frame.convert("RGBA") - base_im = base_im.convert("RGBA") - delta = ImageChops.subtract_modulo(im_frame, base_im) - return delta, delta.getbbox(alpha_only=False) - - -class _Frame(NamedTuple): - im: Image.Image - bbox: tuple[int, int, int, int] | None - encoderinfo: dict[str, Any] - - -def _write_multiple_frames( - im: Image.Image, fp: IO[bytes], palette: _Palette | None -) -> bool: - duration = im.encoderinfo.get("duration") - disposal = im.encoderinfo.get("disposal", im.info.get("disposal")) - - im_frames: list[_Frame] = [] - previous_im: Image.Image | None = None - frame_count = 0 - background_im = None - for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])): - for im_frame in ImageSequence.Iterator(imSequence): - # a copy is required here since seek can still mutate the image - im_frame = _normalize_mode(im_frame.copy()) - if frame_count == 0: - for k, v in im_frame.info.items(): - if k == "transparency": - continue - im.encoderinfo.setdefault(k, v) - - encoderinfo = im.encoderinfo.copy() - if "transparency" in im_frame.info: - encoderinfo.setdefault("transparency", im_frame.info["transparency"]) - im_frame = _normalize_palette(im_frame, palette, encoderinfo) - if isinstance(duration, (list, tuple)): - encoderinfo["duration"] = duration[frame_count] - elif duration is None and "duration" in im_frame.info: - encoderinfo["duration"] = im_frame.info["duration"] - if isinstance(disposal, (list, tuple)): - encoderinfo["disposal"] = disposal[frame_count] - frame_count += 1 - - diff_frame = None - if im_frames and previous_im: - # delta frame - delta, bbox = _getbbox(previous_im, im_frame) - if not bbox: - # This frame is identical to the previous frame - if encoderinfo.get("duration"): - im_frames[-1].encoderinfo["duration"] += encoderinfo["duration"] - continue - if im_frames[-1].encoderinfo.get("disposal") == 2: - if background_im is None: - color = im.encoderinfo.get( - "transparency", im.info.get("transparency", (0, 0, 0)) - ) - background = _get_background(im_frame, color) - background_im = Image.new("P", im_frame.size, background) - background_im.putpalette(im_frames[0].im.palette) - bbox = _getbbox(background_im, im_frame)[1] - elif encoderinfo.get("optimize") and im_frame.mode != "1": - if "transparency" not in encoderinfo: - try: - encoderinfo["transparency"] = ( - im_frame.palette._new_color_index(im_frame) - ) - except ValueError: - pass - if "transparency" in encoderinfo: - # When the delta is zero, fill the image with transparency - diff_frame = im_frame.copy() - fill = Image.new("P", delta.size, encoderinfo["transparency"]) - if delta.mode == "RGBA": - r, g, b, a = delta.split() - mask = ImageMath.lambda_eval( - lambda args: args["convert"]( - args["max"]( - args["max"]( - args["max"](args["r"], args["g"]), args["b"] - ), - args["a"], - ) - * 255, - "1", - ), - r=r, - g=g, - b=b, - a=a, - ) - else: - if delta.mode == "P": - # Convert to L without considering palette - delta_l = Image.new("L", delta.size) - delta_l.putdata(delta.getdata()) - delta = delta_l - mask = ImageMath.lambda_eval( - lambda args: args["convert"](args["im"] * 255, "1"), - im=delta, - ) - diff_frame.paste(fill, mask=ImageOps.invert(mask)) - else: - bbox = None - previous_im = im_frame - im_frames.append(_Frame(diff_frame or im_frame, bbox, encoderinfo)) - - if len(im_frames) == 1: - if "duration" in im.encoderinfo: - # Since multiple frames will not be written, use the combined duration - im.encoderinfo["duration"] = im_frames[0].encoderinfo["duration"] - return False - - for frame_data in im_frames: - im_frame = frame_data.im - if not frame_data.bbox: - # global header - for s in _get_global_header(im_frame, frame_data.encoderinfo): - fp.write(s) - offset = (0, 0) - else: - # compress difference - if not palette: - frame_data.encoderinfo["include_color_table"] = True - - im_frame = im_frame.crop(frame_data.bbox) - offset = frame_data.bbox[:2] - _write_frame_data(fp, im_frame, offset, frame_data.encoderinfo) - return True - - -def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - _save(im, fp, filename, save_all=True) - - -def _save( - im: Image.Image, fp: IO[bytes], filename: str | bytes, save_all: bool = False -) -> None: - # header - if "palette" in im.encoderinfo or "palette" in im.info: - palette = im.encoderinfo.get("palette", im.info.get("palette")) - else: - palette = None - im.encoderinfo.setdefault("optimize", True) - - if not save_all or not _write_multiple_frames(im, fp, palette): - _write_single_frame(im, fp, palette) - - fp.write(b";") # end of file - - if hasattr(fp, "flush"): - fp.flush() - - -def get_interlace(im: Image.Image) -> int: - interlace = im.encoderinfo.get("interlace", 1) - - # workaround for @PIL153 - if min(im.size) < 16: - interlace = 0 - - return interlace - - -def _write_local_header( - fp: IO[bytes], im: Image.Image, offset: tuple[int, int], flags: int -) -> None: - try: - transparency = im.encoderinfo["transparency"] - except KeyError: - transparency = None - - if "duration" in im.encoderinfo: - duration = int(im.encoderinfo["duration"] / 10) - else: - duration = 0 - - disposal = int(im.encoderinfo.get("disposal", 0)) - - if transparency is not None or duration != 0 or disposal: - packed_flag = 1 if transparency is not None else 0 - packed_flag |= disposal << 2 - - fp.write( - b"!" - + o8(249) # extension intro - + o8(4) # length - + o8(packed_flag) # packed fields - + o16(duration) # duration - + o8(transparency or 0) # transparency index - + o8(0) - ) - - include_color_table = im.encoderinfo.get("include_color_table") - if include_color_table: - palette_bytes = _get_palette_bytes(im) - color_table_size = _get_color_table_size(palette_bytes) - if color_table_size: - flags = flags | 128 # local color table flag - flags = flags | color_table_size - - fp.write( - b"," - + o16(offset[0]) # offset - + o16(offset[1]) - + o16(im.size[0]) # size - + o16(im.size[1]) - + o8(flags) # flags - ) - if include_color_table and color_table_size: - fp.write(_get_header_palette(palette_bytes)) - fp.write(o8(8)) # bits - - -def _save_netpbm(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - # Unused by default. - # To use, uncomment the register_save call at the end of the file. - # - # If you need real GIF compression and/or RGB quantization, you - # can use the external NETPBM/PBMPLUS utilities. See comments - # below for information on how to enable this. - tempfile = im._dump() - - try: - with open(filename, "wb") as f: - if im.mode != "RGB": - subprocess.check_call( - ["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL - ) - else: - # Pipe ppmquant output into ppmtogif - # "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename) - quant_cmd = ["ppmquant", "256", tempfile] - togif_cmd = ["ppmtogif"] - quant_proc = subprocess.Popen( - quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL - ) - togif_proc = subprocess.Popen( - togif_cmd, - stdin=quant_proc.stdout, - stdout=f, - stderr=subprocess.DEVNULL, - ) - - # Allow ppmquant to receive SIGPIPE if ppmtogif exits - assert quant_proc.stdout is not None - quant_proc.stdout.close() - - retcode = quant_proc.wait() - if retcode: - raise subprocess.CalledProcessError(retcode, quant_cmd) - - retcode = togif_proc.wait() - if retcode: - raise subprocess.CalledProcessError(retcode, togif_cmd) - finally: - try: - os.unlink(tempfile) - except OSError: - pass - - -# Force optimization so that we can test performance against -# cases where it took lots of memory and time previously. -_FORCE_OPTIMIZE = False - - -def _get_optimize(im: Image.Image, info: dict[str, Any]) -> list[int] | None: - """ - Palette optimization is a potentially expensive operation. - - This function determines if the palette should be optimized using - some heuristics, then returns the list of palette entries in use. - - :param im: Image object - :param info: encoderinfo - :returns: list of indexes of palette entries in use, or None - """ - if im.mode in ("P", "L") and info and info.get("optimize"): - # Potentially expensive operation. - - # The palette saves 3 bytes per color not used, but palette - # lengths are restricted to 3*(2**N) bytes. Max saving would - # be 768 -> 6 bytes if we went all the way down to 2 colors. - # * If we're over 128 colors, we can't save any space. - # * If there aren't any holes, it's not worth collapsing. - # * If we have a 'large' image, the palette is in the noise. - - # create the new palette if not every color is used - optimise = _FORCE_OPTIMIZE or im.mode == "L" - if optimise or im.width * im.height < 512 * 512: - # check which colors are used - used_palette_colors = [] - for i, count in enumerate(im.histogram()): - if count: - used_palette_colors.append(i) - - if optimise or max(used_palette_colors) >= len(used_palette_colors): - return used_palette_colors - - num_palette_colors = len(im.palette.palette) // Image.getmodebands( - im.palette.mode - ) - current_palette_size = 1 << (num_palette_colors - 1).bit_length() - if ( - # check that the palette would become smaller when saved - len(used_palette_colors) <= current_palette_size // 2 - # check that the palette is not already the smallest possible size - and current_palette_size > 2 - ): - return used_palette_colors - return None - - -def _get_color_table_size(palette_bytes: bytes) -> int: - # calculate the palette size for the header - if not palette_bytes: - return 0 - elif len(palette_bytes) < 9: - return 1 - else: - return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1 - - -def _get_header_palette(palette_bytes: bytes) -> bytes: - """ - Returns the palette, null padded to the next power of 2 (*3) bytes - suitable for direct inclusion in the GIF header - - :param palette_bytes: Unpadded palette bytes, in RGBRGB form - :returns: Null padded palette - """ - color_table_size = _get_color_table_size(palette_bytes) - - # add the missing amount of bytes - # the palette has to be 2< 0: - palette_bytes += o8(0) * 3 * actual_target_size_diff - return palette_bytes - - -def _get_palette_bytes(im: Image.Image) -> bytes: - """ - Gets the palette for inclusion in the gif header - - :param im: Image object - :returns: Bytes, len<=768 suitable for inclusion in gif header - """ - return im.palette.palette if im.palette else b"" - - -def _get_background( - im: Image.Image, - info_background: int | tuple[int, int, int] | tuple[int, int, int, int] | None, -) -> int: - background = 0 - if info_background: - if isinstance(info_background, tuple): - # WebPImagePlugin stores an RGBA value in info["background"] - # So it must be converted to the same format as GifImagePlugin's - # info["background"] - a global color table index - try: - background = im.palette.getcolor(info_background, im) - except ValueError as e: - if str(e) not in ( - # If all 256 colors are in use, - # then there is no need for the background color - "cannot allocate more than 256 colors", - # Ignore non-opaque WebP background - "cannot add non-opaque RGBA color to RGB palette", - ): - raise - else: - background = info_background - return background - - -def _get_global_header(im: Image.Image, info: dict[str, Any]) -> list[bytes]: - """Return a list of strings representing a GIF header""" - - # Header Block - # https://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp - - version = b"87a" - if im.info.get("version") == b"89a" or ( - info - and ( - "transparency" in info - or info.get("loop") is not None - or info.get("duration") - or info.get("comment") - ) - ): - version = b"89a" - - background = _get_background(im, info.get("background")) - - palette_bytes = _get_palette_bytes(im) - color_table_size = _get_color_table_size(palette_bytes) - - header = [ - b"GIF" # signature - + version # version - + o16(im.size[0]) # canvas width - + o16(im.size[1]), # canvas height - # Logical Screen Descriptor - # size of global color table + global color table flag - o8(color_table_size + 128), # packed fields - # background + reserved/aspect - o8(background) + o8(0), - # Global Color Table - _get_header_palette(palette_bytes), - ] - if info.get("loop") is not None: - header.append( - b"!" - + o8(255) # extension intro - + o8(11) - + b"NETSCAPE2.0" - + o8(3) - + o8(1) - + o16(info["loop"]) # number of loops - + o8(0) - ) - if info.get("comment"): - comment_block = b"!" + o8(254) # extension intro - - comment = info["comment"] - if isinstance(comment, str): - comment = comment.encode() - for i in range(0, len(comment), 255): - subblock = comment[i : i + 255] - comment_block += o8(len(subblock)) + subblock - - comment_block += o8(0) - header.append(comment_block) - return header - - -def _write_frame_data( - fp: IO[bytes], - im_frame: Image.Image, - offset: tuple[int, int], - params: dict[str, Any], -) -> None: - try: - im_frame.encoderinfo = params - - # local image header - _write_local_header(fp, im_frame, offset, 0) - - ImageFile._save( - im_frame, fp, [("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])] - ) - - fp.write(b"\0") # end of image data - finally: - del im_frame.encoderinfo - - -# -------------------------------------------------------------------- -# Legacy GIF utilities - - -def getheader( - im: Image.Image, palette: _Palette | None = None, info: dict[str, Any] | None = None -) -> tuple[list[bytes], list[int] | None]: - """ - Legacy Method to get Gif data from image. - - Warning:: May modify image data. - - :param im: Image object - :param palette: bytes object containing the source palette, or .... - :param info: encoderinfo - :returns: tuple of(list of header items, optimized palette) - - """ - if info is None: - info = {} - - used_palette_colors = _get_optimize(im, info) - - if "background" not in info and "background" in im.info: - info["background"] = im.info["background"] - - im_mod = _normalize_palette(im, palette, info) - im.palette = im_mod.palette - im.im = im_mod.im - header = _get_global_header(im, info) - - return header, used_palette_colors - - -def getdata( - im: Image.Image, offset: tuple[int, int] = (0, 0), **params: Any -) -> list[bytes]: - """ - Legacy Method - - Return a list of strings representing this image. - The first string is a local image header, the rest contains - encoded image data. - - To specify duration, add the time in milliseconds, - e.g. ``getdata(im_frame, duration=1000)`` - - :param im: Image object - :param offset: Tuple of (x, y) pixels. Defaults to (0, 0) - :param \\**params: e.g. duration or other encoder info parameters - :returns: List of bytes containing GIF encoded frame data - - """ - from io import BytesIO - - class Collector(BytesIO): - data = [] - - if sys.version_info >= (3, 12): - from collections.abc import Buffer - - def write(self, data: Buffer) -> int: - self.data.append(data) - return len(data) - - else: - - def write(self, data: Any) -> int: - self.data.append(data) - return len(data) - - im.load() # make sure raster data is available - - fp = Collector() - - _write_frame_data(fp, im, offset, params) - - return fp.data - - -# -------------------------------------------------------------------- -# Registry - -Image.register_open(GifImageFile.format, GifImageFile, _accept) -Image.register_save(GifImageFile.format, _save) -Image.register_save_all(GifImageFile.format, _save_all) -Image.register_extension(GifImageFile.format, ".gif") -Image.register_mime(GifImageFile.format, "image/gif") - -# -# Uncomment the following line if you wish to use NETPBM/PBMPLUS -# instead of the built-in "uncompressed" GIF encoder - -# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/venv/Lib/site-packages/PIL/GimpGradientFile.py b/venv/Lib/site-packages/PIL/GimpGradientFile.py deleted file mode 100644 index 220eac5..0000000 --- a/venv/Lib/site-packages/PIL/GimpGradientFile.py +++ /dev/null @@ -1,149 +0,0 @@ -# -# Python Imaging Library -# $Id$ -# -# stuff to read (and render) GIMP gradient files -# -# History: -# 97-08-23 fl Created -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1997. -# -# See the README file for information on usage and redistribution. -# - -""" -Stuff to translate curve segments to palette values (derived from -the corresponding code in GIMP, written by Federico Mena Quintero. -See the GIMP distribution for more information.) -""" -from __future__ import annotations - -from math import log, pi, sin, sqrt -from typing import IO, Callable - -from ._binary import o8 - -EPSILON = 1e-10 -"""""" # Enable auto-doc for data member - - -def linear(middle: float, pos: float) -> float: - if pos <= middle: - if middle < EPSILON: - return 0.0 - else: - return 0.5 * pos / middle - else: - pos = pos - middle - middle = 1.0 - middle - if middle < EPSILON: - return 1.0 - else: - return 0.5 + 0.5 * pos / middle - - -def curved(middle: float, pos: float) -> float: - return pos ** (log(0.5) / log(max(middle, EPSILON))) - - -def sine(middle: float, pos: float) -> float: - return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 - - -def sphere_increasing(middle: float, pos: float) -> float: - return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) - - -def sphere_decreasing(middle: float, pos: float) -> float: - return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) - - -SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] -"""""" # Enable auto-doc for data member - - -class GradientFile: - gradient: ( - list[ - tuple[ - float, - float, - float, - list[float], - list[float], - Callable[[float, float], float], - ] - ] - | None - ) = None - - def getpalette(self, entries: int = 256) -> tuple[bytes, str]: - assert self.gradient is not None - palette = [] - - ix = 0 - x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] - - for i in range(entries): - x = i / (entries - 1) - - while x1 < x: - ix += 1 - x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] - - w = x1 - x0 - - if w < EPSILON: - scale = segment(0.5, 0.5) - else: - scale = segment((xm - x0) / w, (x - x0) / w) - - # expand to RGBA - r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) - g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) - b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) - a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) - - # add to palette - palette.append(r + g + b + a) - - return b"".join(palette), "RGBA" - - -class GimpGradientFile(GradientFile): - """File handler for GIMP's gradient format.""" - - def __init__(self, fp: IO[bytes]) -> None: - if fp.readline()[:13] != b"GIMP Gradient": - msg = "not a GIMP gradient file" - raise SyntaxError(msg) - - line = fp.readline() - - # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do - if line.startswith(b"Name: "): - line = fp.readline().strip() - - count = int(line) - - self.gradient = [] - - for i in range(count): - s = fp.readline().split() - w = [float(x) for x in s[:11]] - - x0, x1 = w[0], w[2] - xm = w[1] - rgb0 = w[3:7] - rgb1 = w[7:11] - - segment = SEGMENTS[int(s[11])] - cspace = int(s[12]) - - if cspace != 0: - msg = "cannot handle HSV colour space" - raise OSError(msg) - - self.gradient.append((x0, x1, xm, rgb0, rgb1, segment)) diff --git a/venv/Lib/site-packages/PIL/GimpPaletteFile.py b/venv/Lib/site-packages/PIL/GimpPaletteFile.py deleted file mode 100644 index 4cad0eb..0000000 --- a/venv/Lib/site-packages/PIL/GimpPaletteFile.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Python Imaging Library -# $Id$ -# -# stuff to read GIMP palette files -# -# History: -# 1997-08-23 fl Created -# 2004-09-07 fl Support GIMP 2.0 palette files. -# -# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. -# Copyright (c) Fredrik Lundh 1997-2004. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import re -from typing import IO - -from ._binary import o8 - - -class GimpPaletteFile: - """File handler for GIMP's palette format.""" - - rawmode = "RGB" - - def __init__(self, fp: IO[bytes]) -> None: - palette = [o8(i) * 3 for i in range(256)] - - if fp.readline()[:12] != b"GIMP Palette": - msg = "not a GIMP palette file" - raise SyntaxError(msg) - - for i in range(256): - s = fp.readline() - if not s: - break - - # skip fields and comment lines - if re.match(rb"\w+:|#", s): - continue - if len(s) > 100: - msg = "bad palette file" - raise SyntaxError(msg) - - v = tuple(map(int, s.split()[:3])) - if len(v) != 3: - msg = "bad palette entry" - raise ValueError(msg) - - palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) - - self.palette = b"".join(palette) - - def getpalette(self) -> tuple[bytes, str]: - return self.palette, self.rawmode diff --git a/venv/Lib/site-packages/PIL/GribStubImagePlugin.py b/venv/Lib/site-packages/PIL/GribStubImagePlugin.py deleted file mode 100644 index e9aa084..0000000 --- a/venv/Lib/site-packages/PIL/GribStubImagePlugin.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# GRIB stub adapter -# -# Copyright (c) 1996-2003 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from typing import IO - -from . import Image, ImageFile - -_handler = None - - -def register_handler(handler: ImageFile.StubHandler | None) -> None: - """ - Install application-specific GRIB image handler. - - :param handler: Handler object. - """ - global _handler - _handler = handler - - -# -------------------------------------------------------------------- -# Image adapter - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"GRIB" and prefix[7] == 1 - - -class GribStubImageFile(ImageFile.StubImageFile): - format = "GRIB" - format_description = "GRIB" - - def _open(self) -> None: - offset = self.fp.tell() - - if not _accept(self.fp.read(8)): - msg = "Not a GRIB file" - raise SyntaxError(msg) - - self.fp.seek(offset) - - # make something up - self._mode = "F" - self._size = 1, 1 - - loader = self._load() - if loader: - loader.open(self) - - def _load(self) -> ImageFile.StubHandler | None: - return _handler - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if _handler is None or not hasattr(_handler, "save"): - msg = "GRIB save handler not installed" - raise OSError(msg) - _handler.save(im, fp, filename) - - -# -------------------------------------------------------------------- -# Registry - -Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) -Image.register_save(GribStubImageFile.format, _save) - -Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/venv/Lib/site-packages/PIL/Hdf5StubImagePlugin.py b/venv/Lib/site-packages/PIL/Hdf5StubImagePlugin.py deleted file mode 100644 index cc9e73d..0000000 --- a/venv/Lib/site-packages/PIL/Hdf5StubImagePlugin.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# HDF5 stub adapter -# -# Copyright (c) 2000-2003 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from typing import IO - -from . import Image, ImageFile - -_handler = None - - -def register_handler(handler: ImageFile.StubHandler | None) -> None: - """ - Install application-specific HDF5 image handler. - - :param handler: Handler object. - """ - global _handler - _handler = handler - - -# -------------------------------------------------------------------- -# Image adapter - - -def _accept(prefix: bytes) -> bool: - return prefix[:8] == b"\x89HDF\r\n\x1a\n" - - -class HDF5StubImageFile(ImageFile.StubImageFile): - format = "HDF5" - format_description = "HDF5" - - def _open(self) -> None: - offset = self.fp.tell() - - if not _accept(self.fp.read(8)): - msg = "Not an HDF file" - raise SyntaxError(msg) - - self.fp.seek(offset) - - # make something up - self._mode = "F" - self._size = 1, 1 - - loader = self._load() - if loader: - loader.open(self) - - def _load(self) -> ImageFile.StubHandler | None: - return _handler - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if _handler is None or not hasattr(_handler, "save"): - msg = "HDF5 save handler not installed" - raise OSError(msg) - _handler.save(im, fp, filename) - - -# -------------------------------------------------------------------- -# Registry - -Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) -Image.register_save(HDF5StubImageFile.format, _save) - -Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"]) diff --git a/venv/Lib/site-packages/PIL/IcnsImagePlugin.py b/venv/Lib/site-packages/PIL/IcnsImagePlugin.py deleted file mode 100644 index 2a89d49..0000000 --- a/venv/Lib/site-packages/PIL/IcnsImagePlugin.py +++ /dev/null @@ -1,399 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# macOS icns file decoder, based on icns.py by Bob Ippolito. -# -# history: -# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. -# 2020-04-04 Allow saving on all operating systems. -# -# Copyright (c) 2004 by Bob Ippolito. -# Copyright (c) 2004 by Secret Labs. -# Copyright (c) 2004 by Fredrik Lundh. -# Copyright (c) 2014 by Alastair Houghton. -# Copyright (c) 2020 by Pan Jing. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -import os -import struct -import sys -from typing import IO - -from . import Image, ImageFile, PngImagePlugin, features - -enable_jpeg2k = features.check_codec("jpg_2000") -if enable_jpeg2k: - from . import Jpeg2KImagePlugin - -MAGIC = b"icns" -HEADERSIZE = 8 - - -def nextheader(fobj): - return struct.unpack(">4sI", fobj.read(HEADERSIZE)) - - -def read_32t(fobj, start_length, size): - # The 128x128 icon seems to have an extra header for some reason. - (start, length) = start_length - fobj.seek(start) - sig = fobj.read(4) - if sig != b"\x00\x00\x00\x00": - msg = "Unknown signature, expecting 0x00000000" - raise SyntaxError(msg) - return read_32(fobj, (start + 4, length - 4), size) - - -def read_32(fobj, start_length, size): - """ - Read a 32bit RGB icon resource. Seems to be either uncompressed or - an RLE packbits-like scheme. - """ - (start, length) = start_length - fobj.seek(start) - pixel_size = (size[0] * size[2], size[1] * size[2]) - sizesq = pixel_size[0] * pixel_size[1] - if length == sizesq * 3: - # uncompressed ("RGBRGBGB") - indata = fobj.read(length) - im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) - else: - # decode image - im = Image.new("RGB", pixel_size, None) - for band_ix in range(3): - data = [] - bytesleft = sizesq - while bytesleft > 0: - byte = fobj.read(1) - if not byte: - break - byte = byte[0] - if byte & 0x80: - blocksize = byte - 125 - byte = fobj.read(1) - for i in range(blocksize): - data.append(byte) - else: - blocksize = byte + 1 - data.append(fobj.read(blocksize)) - bytesleft -= blocksize - if bytesleft <= 0: - break - if bytesleft != 0: - msg = f"Error reading channel [{repr(bytesleft)} left]" - raise SyntaxError(msg) - band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1) - im.im.putband(band.im, band_ix) - return {"RGB": im} - - -def read_mk(fobj, start_length, size): - # Alpha masks seem to be uncompressed - start = start_length[0] - fobj.seek(start) - pixel_size = (size[0] * size[2], size[1] * size[2]) - sizesq = pixel_size[0] * pixel_size[1] - band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1) - return {"A": band} - - -def read_png_or_jpeg2000(fobj, start_length, size): - (start, length) = start_length - fobj.seek(start) - sig = fobj.read(12) - if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a": - fobj.seek(start) - im = PngImagePlugin.PngImageFile(fobj) - Image._decompression_bomb_check(im.size) - return {"RGBA": im} - elif ( - sig[:4] == b"\xff\x4f\xff\x51" - or sig[:4] == b"\x0d\x0a\x87\x0a" - or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" - ): - if not enable_jpeg2k: - msg = ( - "Unsupported icon subimage format (rebuild PIL " - "with JPEG 2000 support to fix this)" - ) - raise ValueError(msg) - # j2k, jpc or j2c - fobj.seek(start) - jp2kstream = fobj.read(length) - f = io.BytesIO(jp2kstream) - im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) - Image._decompression_bomb_check(im.size) - if im.mode != "RGBA": - im = im.convert("RGBA") - return {"RGBA": im} - else: - msg = "Unsupported icon subimage format" - raise ValueError(msg) - - -class IcnsFile: - SIZES = { - (512, 512, 2): [(b"ic10", read_png_or_jpeg2000)], - (512, 512, 1): [(b"ic09", read_png_or_jpeg2000)], - (256, 256, 2): [(b"ic14", read_png_or_jpeg2000)], - (256, 256, 1): [(b"ic08", read_png_or_jpeg2000)], - (128, 128, 2): [(b"ic13", read_png_or_jpeg2000)], - (128, 128, 1): [ - (b"ic07", read_png_or_jpeg2000), - (b"it32", read_32t), - (b"t8mk", read_mk), - ], - (64, 64, 1): [(b"icp6", read_png_or_jpeg2000)], - (32, 32, 2): [(b"ic12", read_png_or_jpeg2000)], - (48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)], - (32, 32, 1): [ - (b"icp5", read_png_or_jpeg2000), - (b"il32", read_32), - (b"l8mk", read_mk), - ], - (16, 16, 2): [(b"ic11", read_png_or_jpeg2000)], - (16, 16, 1): [ - (b"icp4", read_png_or_jpeg2000), - (b"is32", read_32), - (b"s8mk", read_mk), - ], - } - - def __init__(self, fobj): - """ - fobj is a file-like object as an icns resource - """ - # signature : (start, length) - self.dct = dct = {} - self.fobj = fobj - sig, filesize = nextheader(fobj) - if not _accept(sig): - msg = "not an icns file" - raise SyntaxError(msg) - i = HEADERSIZE - while i < filesize: - sig, blocksize = nextheader(fobj) - if blocksize <= 0: - msg = "invalid block header" - raise SyntaxError(msg) - i += HEADERSIZE - blocksize -= HEADERSIZE - dct[sig] = (i, blocksize) - fobj.seek(blocksize, io.SEEK_CUR) - i += blocksize - - def itersizes(self): - sizes = [] - for size, fmts in self.SIZES.items(): - for fmt, reader in fmts: - if fmt in self.dct: - sizes.append(size) - break - return sizes - - def bestsize(self): - sizes = self.itersizes() - if not sizes: - msg = "No 32bit icon resources found" - raise SyntaxError(msg) - return max(sizes) - - def dataforsize(self, size): - """ - Get an icon resource as {channel: array}. Note that - the arrays are bottom-up like windows bitmaps and will likely - need to be flipped or transposed in some way. - """ - dct = {} - for code, reader in self.SIZES[size]: - desc = self.dct.get(code) - if desc is not None: - dct.update(reader(self.fobj, desc, size)) - return dct - - def getimage(self, size=None): - if size is None: - size = self.bestsize() - if len(size) == 2: - size = (size[0], size[1], 1) - channels = self.dataforsize(size) - - im = channels.get("RGBA", None) - if im: - return im - - im = channels.get("RGB").copy() - try: - im.putalpha(channels["A"]) - except KeyError: - pass - return im - - -## -# Image plugin for Mac OS icons. - - -class IcnsImageFile(ImageFile.ImageFile): - """ - PIL image support for Mac OS .icns files. - Chooses the best resolution, but will possibly load - a different size image if you mutate the size attribute - before calling 'load'. - - The info dictionary has a key 'sizes' that is a list - of sizes that the icns file has. - """ - - format = "ICNS" - format_description = "Mac OS icns resource" - - def _open(self) -> None: - self.icns = IcnsFile(self.fp) - self._mode = "RGBA" - self.info["sizes"] = self.icns.itersizes() - self.best_size = self.icns.bestsize() - self.size = ( - self.best_size[0] * self.best_size[2], - self.best_size[1] * self.best_size[2], - ) - - @property - def size(self): - return self._size - - @size.setter - def size(self, value): - info_size = value - if info_size not in self.info["sizes"] and len(info_size) == 2: - info_size = (info_size[0], info_size[1], 1) - if ( - info_size not in self.info["sizes"] - and len(info_size) == 3 - and info_size[2] == 1 - ): - simple_sizes = [ - (size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"] - ] - if value in simple_sizes: - info_size = self.info["sizes"][simple_sizes.index(value)] - if info_size not in self.info["sizes"]: - msg = "This is not one of the allowed sizes of this image" - raise ValueError(msg) - self._size = value - - def load(self): - if len(self.size) == 3: - self.best_size = self.size - self.size = ( - self.best_size[0] * self.best_size[2], - self.best_size[1] * self.best_size[2], - ) - - px = Image.Image.load(self) - if self.im is not None and self.im.size == self.size: - # Already loaded - return px - self.load_prepare() - # This is likely NOT the best way to do it, but whatever. - im = self.icns.getimage(self.best_size) - - # If this is a PNG or JPEG 2000, it won't be loaded yet - px = im.load() - - self.im = im.im - self._mode = im.mode - self.size = im.size - - return px - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - """ - Saves the image as a series of PNG files, - that are then combined into a .icns file. - """ - if hasattr(fp, "flush"): - fp.flush() - - sizes = { - b"ic07": 128, - b"ic08": 256, - b"ic09": 512, - b"ic10": 1024, - b"ic11": 32, - b"ic12": 64, - b"ic13": 256, - b"ic14": 512, - } - provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])} - size_streams = {} - for size in set(sizes.values()): - image = ( - provided_images[size] - if size in provided_images - else im.resize((size, size)) - ) - - temp = io.BytesIO() - image.save(temp, "png") - size_streams[size] = temp.getvalue() - - entries = [] - for type, size in sizes.items(): - stream = size_streams[size] - entries.append((type, HEADERSIZE + len(stream), stream)) - - # Header - fp.write(MAGIC) - file_length = HEADERSIZE # Header - file_length += HEADERSIZE + 8 * len(entries) # TOC - file_length += sum(entry[1] for entry in entries) - fp.write(struct.pack(">i", file_length)) - - # TOC - fp.write(b"TOC ") - fp.write(struct.pack(">i", HEADERSIZE + len(entries) * HEADERSIZE)) - for entry in entries: - fp.write(entry[0]) - fp.write(struct.pack(">i", entry[1])) - - # Data - for entry in entries: - fp.write(entry[0]) - fp.write(struct.pack(">i", entry[1])) - fp.write(entry[2]) - - if hasattr(fp, "flush"): - fp.flush() - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == MAGIC - - -Image.register_open(IcnsImageFile.format, IcnsImageFile, _accept) -Image.register_extension(IcnsImageFile.format, ".icns") - -Image.register_save(IcnsImageFile.format, _save) -Image.register_mime(IcnsImageFile.format, "image/icns") - -if __name__ == "__main__": - if len(sys.argv) < 2: - print("Syntax: python3 IcnsImagePlugin.py [file]") - sys.exit() - - with open(sys.argv[1], "rb") as fp: - imf = IcnsImageFile(fp) - for size in imf.info["sizes"]: - width, height, scale = imf.size = size - imf.save(f"out-{width}-{height}-{scale}.png") - with Image.open(sys.argv[1]) as im: - im.save("out.png") - if sys.platform == "windows": - os.startfile("out.png") diff --git a/venv/Lib/site-packages/PIL/IcoImagePlugin.py b/venv/Lib/site-packages/PIL/IcoImagePlugin.py deleted file mode 100644 index 227fcf3..0000000 --- a/venv/Lib/site-packages/PIL/IcoImagePlugin.py +++ /dev/null @@ -1,360 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# Windows Icon support for PIL -# -# History: -# 96-05-27 fl Created -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1996. -# -# See the README file for information on usage and redistribution. -# - -# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis -# . -# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki -# -# Icon format references: -# * https://en.wikipedia.org/wiki/ICO_(file_format) -# * https://msdn.microsoft.com/en-us/library/ms997538.aspx -from __future__ import annotations - -import warnings -from io import BytesIO -from math import ceil, log -from typing import IO - -from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin -from ._binary import i16le as i16 -from ._binary import i32le as i32 -from ._binary import o8 -from ._binary import o16le as o16 -from ._binary import o32le as o32 - -# -# -------------------------------------------------------------------- - -_MAGIC = b"\0\0\1\0" - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - fp.write(_MAGIC) # (2+2) - bmp = im.encoderinfo.get("bitmap_format") == "bmp" - sizes = im.encoderinfo.get( - "sizes", - [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)], - ) - frames = [] - provided_ims = [im] + im.encoderinfo.get("append_images", []) - width, height = im.size - for size in sorted(set(sizes)): - if size[0] > width or size[1] > height or size[0] > 256 or size[1] > 256: - continue - - for provided_im in provided_ims: - if provided_im.size != size: - continue - frames.append(provided_im) - if bmp: - bits = BmpImagePlugin.SAVE[provided_im.mode][1] - bits_used = [bits] - for other_im in provided_ims: - if other_im.size != size: - continue - bits = BmpImagePlugin.SAVE[other_im.mode][1] - if bits not in bits_used: - # Another image has been supplied for this size - # with a different bit depth - frames.append(other_im) - bits_used.append(bits) - break - else: - # TODO: invent a more convenient method for proportional scalings - frame = provided_im.copy() - frame.thumbnail(size, Image.Resampling.LANCZOS, reducing_gap=None) - frames.append(frame) - fp.write(o16(len(frames))) # idCount(2) - offset = fp.tell() + len(frames) * 16 - for frame in frames: - width, height = frame.size - # 0 means 256 - fp.write(o8(width if width < 256 else 0)) # bWidth(1) - fp.write(o8(height if height < 256 else 0)) # bHeight(1) - - bits, colors = BmpImagePlugin.SAVE[frame.mode][1:] if bmp else (32, 0) - fp.write(o8(colors)) # bColorCount(1) - fp.write(b"\0") # bReserved(1) - fp.write(b"\0\0") # wPlanes(2) - fp.write(o16(bits)) # wBitCount(2) - - image_io = BytesIO() - if bmp: - frame.save(image_io, "dib") - - if bits != 32: - and_mask = Image.new("1", size) - ImageFile._save( - and_mask, image_io, [("raw", (0, 0) + size, 0, ("1", 0, -1))] - ) - else: - frame.save(image_io, "png") - image_io.seek(0) - image_bytes = image_io.read() - if bmp: - image_bytes = image_bytes[:8] + o32(height * 2) + image_bytes[12:] - bytes_len = len(image_bytes) - fp.write(o32(bytes_len)) # dwBytesInRes(4) - fp.write(o32(offset)) # dwImageOffset(4) - current = fp.tell() - fp.seek(offset) - fp.write(image_bytes) - offset = offset + bytes_len - fp.seek(current) - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == _MAGIC - - -class IcoFile: - def __init__(self, buf): - """ - Parse image from file-like object containing ico file data - """ - - # check magic - s = buf.read(6) - if not _accept(s): - msg = "not an ICO file" - raise SyntaxError(msg) - - self.buf = buf - self.entry = [] - - # Number of items in file - self.nb_items = i16(s, 4) - - # Get headers for each item - for i in range(self.nb_items): - s = buf.read(16) - - icon_header = { - "width": s[0], - "height": s[1], - "nb_color": s[2], # No. of colors in image (0 if >=8bpp) - "reserved": s[3], - "planes": i16(s, 4), - "bpp": i16(s, 6), - "size": i32(s, 8), - "offset": i32(s, 12), - } - - # See Wikipedia - for j in ("width", "height"): - if not icon_header[j]: - icon_header[j] = 256 - - # See Wikipedia notes about color depth. - # We need this just to differ images with equal sizes - icon_header["color_depth"] = ( - icon_header["bpp"] - or ( - icon_header["nb_color"] != 0 - and ceil(log(icon_header["nb_color"], 2)) - ) - or 256 - ) - - icon_header["dim"] = (icon_header["width"], icon_header["height"]) - icon_header["square"] = icon_header["width"] * icon_header["height"] - - self.entry.append(icon_header) - - self.entry = sorted(self.entry, key=lambda x: x["color_depth"]) - # ICO images are usually squares - self.entry = sorted(self.entry, key=lambda x: x["square"], reverse=True) - - def sizes(self): - """ - Get a list of all available icon sizes and color depths. - """ - return {(h["width"], h["height"]) for h in self.entry} - - def getentryindex(self, size, bpp=False): - for i, h in enumerate(self.entry): - if size == h["dim"] and (bpp is False or bpp == h["color_depth"]): - return i - return 0 - - def getimage(self, size, bpp=False): - """ - Get an image from the icon - """ - return self.frame(self.getentryindex(size, bpp)) - - def frame(self, idx: int) -> Image.Image: - """ - Get an image from frame idx - """ - - header = self.entry[idx] - - self.buf.seek(header["offset"]) - data = self.buf.read(8) - self.buf.seek(header["offset"]) - - im: Image.Image - if data[:8] == PngImagePlugin._MAGIC: - # png frame - im = PngImagePlugin.PngImageFile(self.buf) - Image._decompression_bomb_check(im.size) - else: - # XOR + AND mask bmp frame - im = BmpImagePlugin.DibImageFile(self.buf) - Image._decompression_bomb_check(im.size) - - # change tile dimension to only encompass XOR image - im._size = (im.size[0], int(im.size[1] / 2)) - d, e, o, a = im.tile[0] - im.tile[0] = d, (0, 0) + im.size, o, a - - # figure out where AND mask image starts - bpp = header["bpp"] - if 32 == bpp: - # 32-bit color depth icon image allows semitransparent areas - # PIL's DIB format ignores transparency bits, recover them. - # The DIB is packed in BGRX byte order where X is the alpha - # channel. - - # Back up to start of bmp data - self.buf.seek(o) - # extract every 4th byte (eg. 3,7,11,15,...) - alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] - - # convert to an 8bpp grayscale image - mask = Image.frombuffer( - "L", # 8bpp - im.size, # (w, h) - alpha_bytes, # source chars - "raw", # raw decoder - ("L", 0, -1), # 8bpp inverted, unpadded, reversed - ) - else: - # get AND image from end of bitmap - w = im.size[0] - if (w % 32) > 0: - # bitmap row data is aligned to word boundaries - w += 32 - (im.size[0] % 32) - - # the total mask data is - # padded row size * height / bits per char - - total_bytes = int((w * im.size[1]) / 8) - and_mask_offset = header["offset"] + header["size"] - total_bytes - - self.buf.seek(and_mask_offset) - mask_data = self.buf.read(total_bytes) - - # convert raw data to image - mask = Image.frombuffer( - "1", # 1 bpp - im.size, # (w, h) - mask_data, # source chars - "raw", # raw decoder - ("1;I", int(w / 8), -1), # 1bpp inverted, padded, reversed - ) - - # now we have two images, im is XOR image and mask is AND image - - # apply mask image as alpha channel - im = im.convert("RGBA") - im.putalpha(mask) - - return im - - -## -# Image plugin for Windows Icon files. - - -class IcoImageFile(ImageFile.ImageFile): - """ - PIL read-only image support for Microsoft Windows .ico files. - - By default the largest resolution image in the file will be loaded. This - can be changed by altering the 'size' attribute before calling 'load'. - - The info dictionary has a key 'sizes' that is a list of the sizes available - in the icon file. - - Handles classic, XP and Vista icon formats. - - When saving, PNG compression is used. Support for this was only added in - Windows Vista. If you are unable to view the icon in Windows, convert the - image to "RGBA" mode before saving. - - This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis - . - https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki - """ - - format = "ICO" - format_description = "Windows Icon" - - def _open(self) -> None: - self.ico = IcoFile(self.fp) - self.info["sizes"] = self.ico.sizes() - self.size = self.ico.entry[0]["dim"] - self.load() - - @property - def size(self): - return self._size - - @size.setter - def size(self, value): - if value not in self.info["sizes"]: - msg = "This is not one of the allowed sizes of this image" - raise ValueError(msg) - self._size = value - - def load(self): - if self.im is not None and self.im.size == self.size: - # Already loaded - return Image.Image.load(self) - im = self.ico.getimage(self.size) - # if tile is PNG, it won't really be loaded yet - im.load() - self.im = im.im - self.pyaccess = None - self._mode = im.mode - if im.palette: - self.palette = im.palette - if im.size != self.size: - warnings.warn("Image was not the expected size") - - index = self.ico.getentryindex(self.size) - sizes = list(self.info["sizes"]) - sizes[index] = im.size - self.info["sizes"] = set(sizes) - - self.size = im.size - - def load_seek(self, pos: int) -> None: - # Flag the ImageFile.Parser so that it - # just does all the decode at the end. - pass - - -# -# -------------------------------------------------------------------- - - -Image.register_open(IcoImageFile.format, IcoImageFile, _accept) -Image.register_save(IcoImageFile.format, _save) -Image.register_extension(IcoImageFile.format, ".ico") - -Image.register_mime(IcoImageFile.format, "image/x-icon") diff --git a/venv/Lib/site-packages/PIL/ImImagePlugin.py b/venv/Lib/site-packages/PIL/ImImagePlugin.py deleted file mode 100644 index 2fb7ecd..0000000 --- a/venv/Lib/site-packages/PIL/ImImagePlugin.py +++ /dev/null @@ -1,374 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# IFUNC IM file handling for PIL -# -# history: -# 1995-09-01 fl Created. -# 1997-01-03 fl Save palette images -# 1997-01-08 fl Added sequence support -# 1997-01-23 fl Added P and RGB save support -# 1997-05-31 fl Read floating point images -# 1997-06-22 fl Save floating point images -# 1997-08-27 fl Read and save 1-bit images -# 1998-06-25 fl Added support for RGB+LUT images -# 1998-07-02 fl Added support for YCC images -# 1998-07-15 fl Renamed offset attribute to avoid name clash -# 1998-12-29 fl Added I;16 support -# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) -# 2003-09-26 fl Added LA/PA support -# -# Copyright (c) 1997-2003 by Secret Labs AB. -# Copyright (c) 1995-2001 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import os -import re -from typing import IO, Any - -from . import Image, ImageFile, ImagePalette - -# -------------------------------------------------------------------- -# Standard tags - -COMMENT = "Comment" -DATE = "Date" -EQUIPMENT = "Digitalization equipment" -FRAMES = "File size (no of images)" -LUT = "Lut" -NAME = "Name" -SCALE = "Scale (x,y)" -SIZE = "Image size (x*y)" -MODE = "Image type" - -TAGS = { - COMMENT: 0, - DATE: 0, - EQUIPMENT: 0, - FRAMES: 0, - LUT: 0, - NAME: 0, - SCALE: 0, - SIZE: 0, - MODE: 0, -} - -OPEN = { - # ifunc93/p3cfunc formats - "0 1 image": ("1", "1"), - "L 1 image": ("1", "1"), - "Greyscale image": ("L", "L"), - "Grayscale image": ("L", "L"), - "RGB image": ("RGB", "RGB;L"), - "RLB image": ("RGB", "RLB"), - "RYB image": ("RGB", "RLB"), - "B1 image": ("1", "1"), - "B2 image": ("P", "P;2"), - "B4 image": ("P", "P;4"), - "X 24 image": ("RGB", "RGB"), - "L 32 S image": ("I", "I;32"), - "L 32 F image": ("F", "F;32"), - # old p3cfunc formats - "RGB3 image": ("RGB", "RGB;T"), - "RYB3 image": ("RGB", "RYB;T"), - # extensions - "LA image": ("LA", "LA;L"), - "PA image": ("LA", "PA;L"), - "RGBA image": ("RGBA", "RGBA;L"), - "RGBX image": ("RGB", "RGBX;L"), - "CMYK image": ("CMYK", "CMYK;L"), - "YCC image": ("YCbCr", "YCbCr;L"), -} - -# ifunc95 extensions -for i in ["8", "8S", "16", "16S", "32", "32F"]: - OPEN[f"L {i} image"] = ("F", f"F;{i}") - OPEN[f"L*{i} image"] = ("F", f"F;{i}") -for i in ["16", "16L", "16B"]: - OPEN[f"L {i} image"] = (f"I;{i}", f"I;{i}") - OPEN[f"L*{i} image"] = (f"I;{i}", f"I;{i}") -for i in ["32S"]: - OPEN[f"L {i} image"] = ("I", f"I;{i}") - OPEN[f"L*{i} image"] = ("I", f"I;{i}") -for j in range(2, 33): - OPEN[f"L*{j} image"] = ("F", f"F;{j}") - - -# -------------------------------------------------------------------- -# Read IM directory - -split = re.compile(rb"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") - - -def number(s: Any) -> float: - try: - return int(s) - except ValueError: - return float(s) - - -## -# Image plugin for the IFUNC IM file format. - - -class ImImageFile(ImageFile.ImageFile): - format = "IM" - format_description = "IFUNC Image Memory" - _close_exclusive_fp_after_loading = False - - def _open(self) -> None: - # Quick rejection: if there's not an LF among the first - # 100 bytes, this is (probably) not a text header. - - if b"\n" not in self.fp.read(100): - msg = "not an IM file" - raise SyntaxError(msg) - self.fp.seek(0) - - n = 0 - - # Default values - self.info[MODE] = "L" - self.info[SIZE] = (512, 512) - self.info[FRAMES] = 1 - - self.rawmode = "L" - - while True: - s = self.fp.read(1) - - # Some versions of IFUNC uses \n\r instead of \r\n... - if s == b"\r": - continue - - if not s or s == b"\0" or s == b"\x1A": - break - - # FIXME: this may read whole file if not a text file - s = s + self.fp.readline() - - if len(s) > 100: - msg = "not an IM file" - raise SyntaxError(msg) - - if s[-2:] == b"\r\n": - s = s[:-2] - elif s[-1:] == b"\n": - s = s[:-1] - - try: - m = split.match(s) - except re.error as e: - msg = "not an IM file" - raise SyntaxError(msg) from e - - if m: - k, v = m.group(1, 2) - - # Don't know if this is the correct encoding, - # but a decent guess (I guess) - k = k.decode("latin-1", "replace") - v = v.decode("latin-1", "replace") - - # Convert value as appropriate - if k in [FRAMES, SCALE, SIZE]: - v = v.replace("*", ",") - v = tuple(map(number, v.split(","))) - if len(v) == 1: - v = v[0] - elif k == MODE and v in OPEN: - v, self.rawmode = OPEN[v] - - # Add to dictionary. Note that COMMENT tags are - # combined into a list of strings. - if k == COMMENT: - if k in self.info: - self.info[k].append(v) - else: - self.info[k] = [v] - else: - self.info[k] = v - - if k in TAGS: - n += 1 - - else: - msg = f"Syntax error in IM header: {s.decode('ascii', 'replace')}" - raise SyntaxError(msg) - - if not n: - msg = "Not an IM file" - raise SyntaxError(msg) - - # Basic attributes - self._size = self.info[SIZE] - self._mode = self.info[MODE] - - # Skip forward to start of image data - while s and s[:1] != b"\x1A": - s = self.fp.read(1) - if not s: - msg = "File truncated" - raise SyntaxError(msg) - - if LUT in self.info: - # convert lookup table to palette or lut attribute - palette = self.fp.read(768) - greyscale = 1 # greyscale palette - linear = 1 # linear greyscale palette - for i in range(256): - if palette[i] == palette[i + 256] == palette[i + 512]: - if palette[i] != i: - linear = 0 - else: - greyscale = 0 - if self.mode in ["L", "LA", "P", "PA"]: - if greyscale: - if not linear: - self.lut = list(palette[:256]) - else: - if self.mode in ["L", "P"]: - self._mode = self.rawmode = "P" - elif self.mode in ["LA", "PA"]: - self._mode = "PA" - self.rawmode = "PA;L" - self.palette = ImagePalette.raw("RGB;L", palette) - elif self.mode == "RGB": - if not greyscale or not linear: - self.lut = list(palette) - - self.frame = 0 - - self.__offset = offs = self.fp.tell() - - self._fp = self.fp # FIXME: hack - - if self.rawmode[:2] == "F;": - # ifunc95 formats - try: - # use bit decoder (if necessary) - bits = int(self.rawmode[2:]) - if bits not in [8, 16, 32]: - self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))] - return - except ValueError: - pass - - if self.rawmode in ["RGB;T", "RYB;T"]: - # Old LabEye/3PC files. Would be very surprised if anyone - # ever stumbled upon such a file ;-) - size = self.size[0] * self.size[1] - self.tile = [ - ("raw", (0, 0) + self.size, offs, ("G", 0, -1)), - ("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)), - ("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)), - ] - else: - # LabEye/IFUNC files - self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] - - @property - def n_frames(self) -> int: - return self.info[FRAMES] - - @property - def is_animated(self) -> bool: - return self.info[FRAMES] > 1 - - def seek(self, frame: int) -> None: - if not self._seek_check(frame): - return - - self.frame = frame - - if self.mode == "1": - bits = 1 - else: - bits = 8 * len(self.mode) - - size = ((self.size[0] * bits + 7) // 8) * self.size[1] - offs = self.__offset + frame * size - - self.fp = self._fp - - self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] - - def tell(self) -> int: - return self.frame - - -# -# -------------------------------------------------------------------- -# Save IM files - - -SAVE = { - # mode: (im type, raw mode) - "1": ("0 1", "1"), - "L": ("Greyscale", "L"), - "LA": ("LA", "LA;L"), - "P": ("Greyscale", "P"), - "PA": ("LA", "PA;L"), - "I": ("L 32S", "I;32S"), - "I;16": ("L 16", "I;16"), - "I;16L": ("L 16L", "I;16L"), - "I;16B": ("L 16B", "I;16B"), - "F": ("L 32F", "F;32F"), - "RGB": ("RGB", "RGB;L"), - "RGBA": ("RGBA", "RGBA;L"), - "RGBX": ("RGBX", "RGBX;L"), - "CMYK": ("CMYK", "CMYK;L"), - "YCbCr": ("YCC", "YCbCr;L"), -} - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - try: - image_type, rawmode = SAVE[im.mode] - except KeyError as e: - msg = f"Cannot save {im.mode} images as IM" - raise ValueError(msg) from e - - frames = im.encoderinfo.get("frames", 1) - - fp.write(f"Image type: {image_type} image\r\n".encode("ascii")) - if filename: - # Each line must be 100 characters or less, - # or: SyntaxError("not an IM file") - # 8 characters are used for "Name: " and "\r\n" - # Keep just the filename, ditch the potentially overlong path - if isinstance(filename, bytes): - filename = filename.decode("ascii") - name, ext = os.path.splitext(os.path.basename(filename)) - name = "".join([name[: 92 - len(ext)], ext]) - - fp.write(f"Name: {name}\r\n".encode("ascii")) - fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii")) - fp.write(f"File size (no of images): {frames}\r\n".encode("ascii")) - if im.mode in ["P", "PA"]: - fp.write(b"Lut: 1\r\n") - fp.write(b"\000" * (511 - fp.tell()) + b"\032") - if im.mode in ["P", "PA"]: - im_palette = im.im.getpalette("RGB", "RGB;L") - colors = len(im_palette) // 3 - palette = b"" - for i in range(3): - palette += im_palette[colors * i : colors * (i + 1)] - palette += b"\x00" * (256 - colors) - fp.write(palette) # 768 bytes - ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))]) - - -# -# -------------------------------------------------------------------- -# Registry - - -Image.register_open(ImImageFile.format, ImImageFile) -Image.register_save(ImImageFile.format, _save) - -Image.register_extension(ImImageFile.format, ".im") diff --git a/venv/Lib/site-packages/PIL/Image.py b/venv/Lib/site-packages/PIL/Image.py deleted file mode 100644 index d41c065..0000000 --- a/venv/Lib/site-packages/PIL/Image.py +++ /dev/null @@ -1,4147 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# the Image class wrapper -# -# partial release history: -# 1995-09-09 fl Created -# 1996-03-11 fl PIL release 0.0 (proof of concept) -# 1996-04-30 fl PIL release 0.1b1 -# 1999-07-28 fl PIL release 1.0 final -# 2000-06-07 fl PIL release 1.1 -# 2000-10-20 fl PIL release 1.1.1 -# 2001-05-07 fl PIL release 1.1.2 -# 2002-03-15 fl PIL release 1.1.3 -# 2003-05-10 fl PIL release 1.1.4 -# 2005-03-28 fl PIL release 1.1.5 -# 2006-12-02 fl PIL release 1.1.6 -# 2009-11-15 fl PIL release 1.1.7 -# -# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. -# Copyright (c) 1995-2009 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# - -from __future__ import annotations - -import abc -import atexit -import builtins -import io -import logging -import math -import os -import re -import struct -import sys -import tempfile -import warnings -from collections.abc import Callable, MutableMapping -from enum import IntEnum -from types import ModuleType -from typing import ( - IO, - TYPE_CHECKING, - Any, - Literal, - Protocol, - Sequence, - Tuple, - cast, -) - -# VERSION was removed in Pillow 6.0.0. -# PILLOW_VERSION was removed in Pillow 9.0.0. -# Use __version__ instead. -from . import ( - ExifTags, - ImageMode, - TiffTags, - UnidentifiedImageError, - __version__, - _plugins, -) -from ._binary import i32le, o32be, o32le -from ._deprecate import deprecate -from ._typing import StrOrBytesPath, TypeGuard -from ._util import DeferredError, is_path - -ElementTree: ModuleType | None -try: - from defusedxml import ElementTree -except ImportError: - ElementTree = None - -logger = logging.getLogger(__name__) - - -class DecompressionBombWarning(RuntimeWarning): - pass - - -class DecompressionBombError(Exception): - pass - - -WARN_POSSIBLE_FORMATS: bool = False - -# Limit to around a quarter gigabyte for a 24-bit (3 bpp) image -MAX_IMAGE_PIXELS: int | None = int(1024 * 1024 * 1024 // 4 // 3) - - -try: - # If the _imaging C module is not present, Pillow will not load. - # Note that other modules should not refer to _imaging directly; - # import Image and use the Image.core variable instead. - # Also note that Image.core is not a publicly documented interface, - # and should be considered private and subject to change. - from . import _imaging as core - - if __version__ != getattr(core, "PILLOW_VERSION", None): - msg = ( - "The _imaging extension was built for another version of Pillow or PIL:\n" - f"Core version: {getattr(core, 'PILLOW_VERSION', None)}\n" - f"Pillow version: {__version__}" - ) - raise ImportError(msg) - -except ImportError as v: - core = DeferredError.new(ImportError("The _imaging C module is not installed.")) - # Explanations for ways that we know we might have an import error - if str(v).startswith("Module use of python"): - # The _imaging C module is present, but not compiled for - # the right version (windows only). Print a warning, if - # possible. - warnings.warn( - "The _imaging extension was built for another version of Python.", - RuntimeWarning, - ) - elif str(v).startswith("The _imaging extension"): - warnings.warn(str(v), RuntimeWarning) - # Fail here anyway. Don't let people run with a mostly broken Pillow. - # see docs/porting.rst - raise - - -USE_CFFI_ACCESS = False -cffi: ModuleType | None -try: - import cffi -except ImportError: - cffi = None - - -def isImageType(t: Any) -> TypeGuard[Image]: - """ - Checks if an object is an image object. - - .. warning:: - - This function is for internal use only. - - :param t: object to check if it's an image - :returns: True if the object is an image - """ - return hasattr(t, "im") - - -# -# Constants - - -# transpose -class Transpose(IntEnum): - FLIP_LEFT_RIGHT = 0 - FLIP_TOP_BOTTOM = 1 - ROTATE_90 = 2 - ROTATE_180 = 3 - ROTATE_270 = 4 - TRANSPOSE = 5 - TRANSVERSE = 6 - - -# transforms (also defined in Imaging.h) -class Transform(IntEnum): - AFFINE = 0 - EXTENT = 1 - PERSPECTIVE = 2 - QUAD = 3 - MESH = 4 - - -# resampling filters (also defined in Imaging.h) -class Resampling(IntEnum): - NEAREST = 0 - BOX = 4 - BILINEAR = 2 - HAMMING = 5 - BICUBIC = 3 - LANCZOS = 1 - - -_filters_support = { - Resampling.BOX: 0.5, - Resampling.BILINEAR: 1.0, - Resampling.HAMMING: 1.0, - Resampling.BICUBIC: 2.0, - Resampling.LANCZOS: 3.0, -} - - -# dithers -class Dither(IntEnum): - NONE = 0 - ORDERED = 1 # Not yet implemented - RASTERIZE = 2 # Not yet implemented - FLOYDSTEINBERG = 3 # default - - -# palettes/quantizers -class Palette(IntEnum): - WEB = 0 - ADAPTIVE = 1 - - -class Quantize(IntEnum): - MEDIANCUT = 0 - MAXCOVERAGE = 1 - FASTOCTREE = 2 - LIBIMAGEQUANT = 3 - - -module = sys.modules[__name__] -for enum in (Transpose, Transform, Resampling, Dither, Palette, Quantize): - for item in enum: - setattr(module, item.name, item.value) - - -if hasattr(core, "DEFAULT_STRATEGY"): - DEFAULT_STRATEGY = core.DEFAULT_STRATEGY - FILTERED = core.FILTERED - HUFFMAN_ONLY = core.HUFFMAN_ONLY - RLE = core.RLE - FIXED = core.FIXED - - -# -------------------------------------------------------------------- -# Registries - -if TYPE_CHECKING: - from . import ImageFile, PyAccess -ID: list[str] = [] -OPEN: dict[ - str, - tuple[ - Callable[[IO[bytes], str | bytes], ImageFile.ImageFile], - Callable[[bytes], bool | str] | None, - ], -] = {} -MIME: dict[str, str] = {} -SAVE: dict[str, Callable[[Image, IO[bytes], str | bytes], None]] = {} -SAVE_ALL: dict[str, Callable[[Image, IO[bytes], str | bytes], None]] = {} -EXTENSION: dict[str, str] = {} -DECODERS: dict[str, type[ImageFile.PyDecoder]] = {} -ENCODERS: dict[str, type[ImageFile.PyEncoder]] = {} - -# -------------------------------------------------------------------- -# Modes - -_ENDIAN = "<" if sys.byteorder == "little" else ">" - - -def _conv_type_shape(im): - m = ImageMode.getmode(im.mode) - shape = (im.height, im.width) - extra = len(m.bands) - if extra != 1: - shape += (extra,) - return shape, m.typestr - - -MODES = [ - "1", - "CMYK", - "F", - "HSV", - "I", - "I;16", - "I;16B", - "I;16L", - "I;16N", - "L", - "LA", - "La", - "LAB", - "P", - "PA", - "RGB", - "RGBA", - "RGBa", - "RGBX", - "YCbCr", -] - -# raw modes that may be memory mapped. NOTE: if you change this, you -# may have to modify the stride calculation in map.c too! -_MAPMODES = ("L", "P", "RGBX", "RGBA", "CMYK", "I;16", "I;16L", "I;16B") - - -def getmodebase(mode: str) -> str: - """ - Gets the "base" mode for given mode. This function returns "L" for - images that contain grayscale data, and "RGB" for images that - contain color data. - - :param mode: Input mode. - :returns: "L" or "RGB". - :exception KeyError: If the input mode was not a standard mode. - """ - return ImageMode.getmode(mode).basemode - - -def getmodetype(mode: str) -> str: - """ - Gets the storage type mode. Given a mode, this function returns a - single-layer mode suitable for storing individual bands. - - :param mode: Input mode. - :returns: "L", "I", or "F". - :exception KeyError: If the input mode was not a standard mode. - """ - return ImageMode.getmode(mode).basetype - - -def getmodebandnames(mode: str) -> tuple[str, ...]: - """ - Gets a list of individual band names. Given a mode, this function returns - a tuple containing the names of individual bands (use - :py:method:`~PIL.Image.getmodetype` to get the mode used to store each - individual band. - - :param mode: Input mode. - :returns: A tuple containing band names. The length of the tuple - gives the number of bands in an image of the given mode. - :exception KeyError: If the input mode was not a standard mode. - """ - return ImageMode.getmode(mode).bands - - -def getmodebands(mode: str) -> int: - """ - Gets the number of individual bands for this mode. - - :param mode: Input mode. - :returns: The number of bands in this mode. - :exception KeyError: If the input mode was not a standard mode. - """ - return len(ImageMode.getmode(mode).bands) - - -# -------------------------------------------------------------------- -# Helpers - -_initialized = 0 - - -def preinit() -> None: - """ - Explicitly loads BMP, GIF, JPEG, PPM and PPM file format drivers. - - It is called when opening or saving images. - """ - - global _initialized - if _initialized >= 1: - return - - try: - from . import BmpImagePlugin - - assert BmpImagePlugin - except ImportError: - pass - try: - from . import GifImagePlugin - - assert GifImagePlugin - except ImportError: - pass - try: - from . import JpegImagePlugin - - assert JpegImagePlugin - except ImportError: - pass - try: - from . import PpmImagePlugin - - assert PpmImagePlugin - except ImportError: - pass - try: - from . import PngImagePlugin - - assert PngImagePlugin - except ImportError: - pass - - _initialized = 1 - - -def init() -> bool: - """ - Explicitly initializes the Python Imaging Library. This function - loads all available file format drivers. - - It is called when opening or saving images if :py:meth:`~preinit()` is - insufficient, and by :py:meth:`~PIL.features.pilinfo`. - """ - - global _initialized - if _initialized >= 2: - return False - - parent_name = __name__.rpartition(".")[0] - for plugin in _plugins: - try: - logger.debug("Importing %s", plugin) - __import__(f"{parent_name}.{plugin}", globals(), locals(), []) - except ImportError as e: - logger.debug("Image: failed to import %s: %s", plugin, e) - - if OPEN or SAVE: - _initialized = 2 - return True - return False - - -# -------------------------------------------------------------------- -# Codec factories (used by tobytes/frombytes and ImageFile.load) - - -def _getdecoder( - mode: str, decoder_name: str, args: Any, extra: tuple[Any, ...] = () -) -> core.ImagingDecoder | ImageFile.PyDecoder: - # tweak arguments - if args is None: - args = () - elif not isinstance(args, tuple): - args = (args,) - - try: - decoder = DECODERS[decoder_name] - except KeyError: - pass - else: - return decoder(mode, *args + extra) - - try: - # get decoder - decoder = getattr(core, f"{decoder_name}_decoder") - except AttributeError as e: - msg = f"decoder {decoder_name} not available" - raise OSError(msg) from e - return decoder(mode, *args + extra) - - -def _getencoder( - mode: str, encoder_name: str, args: Any, extra: tuple[Any, ...] = () -) -> core.ImagingEncoder | ImageFile.PyEncoder: - # tweak arguments - if args is None: - args = () - elif not isinstance(args, tuple): - args = (args,) - - try: - encoder = ENCODERS[encoder_name] - except KeyError: - pass - else: - return encoder(mode, *args + extra) - - try: - # get encoder - encoder = getattr(core, f"{encoder_name}_encoder") - except AttributeError as e: - msg = f"encoder {encoder_name} not available" - raise OSError(msg) from e - return encoder(mode, *args + extra) - - -# -------------------------------------------------------------------- -# Simple expression analyzer - - -class _E: - def __init__(self, scale, offset) -> None: - self.scale = scale - self.offset = offset - - def __neg__(self): - return _E(-self.scale, -self.offset) - - def __add__(self, other): - if isinstance(other, _E): - return _E(self.scale + other.scale, self.offset + other.offset) - return _E(self.scale, self.offset + other) - - __radd__ = __add__ - - def __sub__(self, other): - return self + -other - - def __rsub__(self, other): - return other + -self - - def __mul__(self, other): - if isinstance(other, _E): - return NotImplemented - return _E(self.scale * other, self.offset * other) - - __rmul__ = __mul__ - - def __truediv__(self, other): - if isinstance(other, _E): - return NotImplemented - return _E(self.scale / other, self.offset / other) - - -def _getscaleoffset(expr): - a = expr(_E(1, 0)) - return (a.scale, a.offset) if isinstance(a, _E) else (0, a) - - -# -------------------------------------------------------------------- -# Implementation wrapper - - -class SupportsGetData(Protocol): - def getdata( - self, - ) -> tuple[Transform, Sequence[int]]: ... - - -class Image: - """ - This class represents an image object. To create - :py:class:`~PIL.Image.Image` objects, use the appropriate factory - functions. There's hardly ever any reason to call the Image constructor - directly. - - * :py:func:`~PIL.Image.open` - * :py:func:`~PIL.Image.new` - * :py:func:`~PIL.Image.frombytes` - """ - - format: str | None = None - format_description: str | None = None - _close_exclusive_fp_after_loading = True - - def __init__(self): - # FIXME: take "new" parameters / other image? - # FIXME: turn mode and size into delegating properties? - self.im = None - self._mode = "" - self._size = (0, 0) - self.palette = None - self.info = {} - self.readonly = 0 - self.pyaccess = None - self._exif = None - - @property - def width(self) -> int: - return self.size[0] - - @property - def height(self) -> int: - return self.size[1] - - @property - def size(self) -> tuple[int, int]: - return self._size - - @property - def mode(self) -> str: - return self._mode - - def _new(self, im: core.ImagingCore) -> Image: - new = Image() - new.im = im - new._mode = im.mode - new._size = im.size - if im.mode in ("P", "PA"): - if self.palette: - new.palette = self.palette.copy() - else: - from . import ImagePalette - - new.palette = ImagePalette.ImagePalette() - new.info = self.info.copy() - return new - - # Context manager support - def __enter__(self): - return self - - def _close_fp(self): - if getattr(self, "_fp", False): - if self._fp != self.fp: - self._fp.close() - self._fp = DeferredError(ValueError("Operation on closed image")) - if self.fp: - self.fp.close() - - def __exit__(self, *args): - if hasattr(self, "fp"): - if getattr(self, "_exclusive_fp", False): - self._close_fp() - self.fp = None - - def close(self) -> None: - """ - Closes the file pointer, if possible. - - This operation will destroy the image core and release its memory. - The image data will be unusable afterward. - - This function is required to close images that have multiple frames or - have not had their file read and closed by the - :py:meth:`~PIL.Image.Image.load` method. See :ref:`file-handling` for - more information. - """ - if hasattr(self, "fp"): - try: - self._close_fp() - self.fp = None - except Exception as msg: - logger.debug("Error closing: %s", msg) - - if getattr(self, "map", None): - self.map = None - - # Instead of simply setting to None, we're setting up a - # deferred error that will better explain that the core image - # object is gone. - self.im = DeferredError(ValueError("Operation on closed image")) - - def _copy(self) -> None: - self.load() - self.im = self.im.copy() - self.pyaccess = None - self.readonly = 0 - - def _ensure_mutable(self) -> None: - if self.readonly: - self._copy() - else: - self.load() - - def _dump( - self, file: str | None = None, format: str | None = None, **options: Any - ) -> str: - suffix = "" - if format: - suffix = f".{format}" - - if not file: - f, filename = tempfile.mkstemp(suffix) - os.close(f) - else: - filename = file - if not filename.endswith(suffix): - filename = filename + suffix - - self.load() - - if not format or format == "PPM": - self.im.save_ppm(filename) - else: - self.save(filename, format, **options) - - return filename - - def __eq__(self, other: object) -> bool: - if self.__class__ is not other.__class__: - return False - assert isinstance(other, Image) - return ( - self.mode == other.mode - and self.size == other.size - and self.info == other.info - and self.getpalette() == other.getpalette() - and self.tobytes() == other.tobytes() - ) - - def __repr__(self) -> str: - return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( - self.__class__.__module__, - self.__class__.__name__, - self.mode, - self.size[0], - self.size[1], - id(self), - ) - - def _repr_pretty_(self, p, cycle) -> None: - """IPython plain text display support""" - - # Same as __repr__ but without unpredictable id(self), - # to keep Jupyter notebook `text/plain` output stable. - p.text( - "<%s.%s image mode=%s size=%dx%d>" - % ( - self.__class__.__module__, - self.__class__.__name__, - self.mode, - self.size[0], - self.size[1], - ) - ) - - def _repr_image(self, image_format: str, **kwargs: Any) -> bytes | None: - """Helper function for iPython display hook. - - :param image_format: Image format. - :returns: image as bytes, saved into the given format. - """ - b = io.BytesIO() - try: - self.save(b, image_format, **kwargs) - except Exception: - return None - return b.getvalue() - - def _repr_png_(self) -> bytes | None: - """iPython display hook support for PNG format. - - :returns: PNG version of the image as bytes - """ - return self._repr_image("PNG", compress_level=1) - - def _repr_jpeg_(self) -> bytes | None: - """iPython display hook support for JPEG format. - - :returns: JPEG version of the image as bytes - """ - return self._repr_image("JPEG") - - @property - def __array_interface__(self): - # numpy array interface support - new = {"version": 3} - try: - if self.mode == "1": - # Binary images need to be extended from bits to bytes - # See: https://github.com/python-pillow/Pillow/issues/350 - new["data"] = self.tobytes("raw", "L") - else: - new["data"] = self.tobytes() - except Exception as e: - if not isinstance(e, (MemoryError, RecursionError)): - try: - import numpy - from packaging.version import parse as parse_version - except ImportError: - pass - else: - if parse_version(numpy.__version__) < parse_version("1.23"): - warnings.warn(str(e)) - raise - new["shape"], new["typestr"] = _conv_type_shape(self) - return new - - def __getstate__(self): - im_data = self.tobytes() # load image first - return [self.info, self.mode, self.size, self.getpalette(), im_data] - - def __setstate__(self, state) -> None: - Image.__init__(self) - info, mode, size, palette, data = state - self.info = info - self._mode = mode - self._size = size - self.im = core.new(mode, size) - if mode in ("L", "LA", "P", "PA") and palette: - self.putpalette(palette) - self.frombytes(data) - - def tobytes(self, encoder_name: str = "raw", *args: Any) -> bytes: - """ - Return image as a bytes object. - - .. warning:: - - This method returns the raw image data from the internal - storage. For compressed image data (e.g. PNG, JPEG) use - :meth:`~.save`, with a BytesIO parameter for in-memory - data. - - :param encoder_name: What encoder to use. The default is to - use the standard "raw" encoder. - - A list of C encoders can be seen under - codecs section of the function array in - :file:`_imaging.c`. Python encoders are - registered within the relevant plugins. - :param args: Extra arguments to the encoder. - :returns: A :py:class:`bytes` object. - """ - - encoder_args: Any = args - if len(encoder_args) == 1 and isinstance(encoder_args[0], tuple): - # may pass tuple instead of argument list - encoder_args = encoder_args[0] - - if encoder_name == "raw" and encoder_args == (): - encoder_args = self.mode - - self.load() - - if self.width == 0 or self.height == 0: - return b"" - - # unpack data - e = _getencoder(self.mode, encoder_name, encoder_args) - e.setimage(self.im) - - bufsize = max(65536, self.size[0] * 4) # see RawEncode.c - - output = [] - while True: - bytes_consumed, errcode, data = e.encode(bufsize) - output.append(data) - if errcode: - break - if errcode < 0: - msg = f"encoder error {errcode} in tobytes" - raise RuntimeError(msg) - - return b"".join(output) - - def tobitmap(self, name: str = "image") -> bytes: - """ - Returns the image converted to an X11 bitmap. - - .. note:: This method only works for mode "1" images. - - :param name: The name prefix to use for the bitmap variables. - :returns: A string containing an X11 bitmap. - :raises ValueError: If the mode is not "1" - """ - - self.load() - if self.mode != "1": - msg = "not a bitmap" - raise ValueError(msg) - data = self.tobytes("xbm") - return b"".join( - [ - f"#define {name}_width {self.size[0]}\n".encode("ascii"), - f"#define {name}_height {self.size[1]}\n".encode("ascii"), - f"static char {name}_bits[] = {{\n".encode("ascii"), - data, - b"};", - ] - ) - - def frombytes( - self, data: bytes | bytearray, decoder_name: str = "raw", *args: Any - ) -> None: - """ - Loads this image with pixel data from a bytes object. - - This method is similar to the :py:func:`~PIL.Image.frombytes` function, - but loads data into this image instead of creating a new image object. - """ - - if self.width == 0 or self.height == 0: - return - - decoder_args: Any = args - if len(decoder_args) == 1 and isinstance(decoder_args[0], tuple): - # may pass tuple instead of argument list - decoder_args = decoder_args[0] - - # default format - if decoder_name == "raw" and decoder_args == (): - decoder_args = self.mode - - # unpack data - d = _getdecoder(self.mode, decoder_name, decoder_args) - d.setimage(self.im) - s = d.decode(data) - - if s[0] >= 0: - msg = "not enough image data" - raise ValueError(msg) - if s[1] != 0: - msg = "cannot decode image data" - raise ValueError(msg) - - def load(self) -> core.PixelAccess | PyAccess.PyAccess | None: - """ - Allocates storage for the image and loads the pixel data. In - normal cases, you don't need to call this method, since the - Image class automatically loads an opened image when it is - accessed for the first time. - - If the file associated with the image was opened by Pillow, then this - method will close it. The exception to this is if the image has - multiple frames, in which case the file will be left open for seek - operations. See :ref:`file-handling` for more information. - - :returns: An image access object. - :rtype: :py:class:`.PixelAccess` or :py:class:`.PyAccess` - """ - if self.im is not None and self.palette and self.palette.dirty: - # realize palette - mode, arr = self.palette.getdata() - self.im.putpalette(self.palette.mode, mode, arr) - self.palette.dirty = 0 - self.palette.rawmode = None - if "transparency" in self.info and mode in ("LA", "PA"): - if isinstance(self.info["transparency"], int): - self.im.putpalettealpha(self.info["transparency"], 0) - else: - self.im.putpalettealphas(self.info["transparency"]) - self.palette.mode = "RGBA" - else: - self.palette.palette = self.im.getpalette( - self.palette.mode, self.palette.mode - ) - - if self.im is not None: - if cffi and USE_CFFI_ACCESS: - if self.pyaccess: - return self.pyaccess - from . import PyAccess - - self.pyaccess = PyAccess.new(self, self.readonly) - if self.pyaccess: - return self.pyaccess - return self.im.pixel_access(self.readonly) - return None - - def verify(self) -> None: - """ - Verifies the contents of a file. For data read from a file, this - method attempts to determine if the file is broken, without - actually decoding the image data. If this method finds any - problems, it raises suitable exceptions. If you need to load - the image after using this method, you must reopen the image - file. - """ - pass - - def convert( - self, - mode: str | None = None, - matrix: tuple[float, ...] | None = None, - dither: Dither | None = None, - palette: Palette = Palette.WEB, - colors: int = 256, - ) -> Image: - """ - Returns a converted copy of this image. For the "P" mode, this - method translates pixels through the palette. If mode is - omitted, a mode is chosen so that all information in the image - and the palette can be represented without a palette. - - This supports all possible conversions between "L", "RGB" and "CMYK". The - ``matrix`` argument only supports "L" and "RGB". - - When translating a color image to grayscale (mode "L"), - the library uses the ITU-R 601-2 luma transform:: - - L = R * 299/1000 + G * 587/1000 + B * 114/1000 - - The default method of converting a grayscale ("L") or "RGB" - image into a bilevel (mode "1") image uses Floyd-Steinberg - dither to approximate the original image luminosity levels. If - dither is ``None``, all values larger than 127 are set to 255 (white), - all other values to 0 (black). To use other thresholds, use the - :py:meth:`~PIL.Image.Image.point` method. - - When converting from "RGBA" to "P" without a ``matrix`` argument, - this passes the operation to :py:meth:`~PIL.Image.Image.quantize`, - and ``dither`` and ``palette`` are ignored. - - When converting from "PA", if an "RGBA" palette is present, the alpha - channel from the image will be used instead of the values from the palette. - - :param mode: The requested mode. See: :ref:`concept-modes`. - :param matrix: An optional conversion matrix. If given, this - should be 4- or 12-tuple containing floating point values. - :param dither: Dithering method, used when converting from - mode "RGB" to "P" or from "RGB" or "L" to "1". - Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG` - (default). Note that this is not used when ``matrix`` is supplied. - :param palette: Palette to use when converting from mode "RGB" - to "P". Available palettes are :data:`Palette.WEB` or - :data:`Palette.ADAPTIVE`. - :param colors: Number of colors to use for the :data:`Palette.ADAPTIVE` - palette. Defaults to 256. - :rtype: :py:class:`~PIL.Image.Image` - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - if mode in ("BGR;15", "BGR;16", "BGR;24"): - deprecate(mode, 12) - - self.load() - - has_transparency = "transparency" in self.info - if not mode and self.mode == "P": - # determine default mode - if self.palette: - mode = self.palette.mode - else: - mode = "RGB" - if mode == "RGB" and has_transparency: - mode = "RGBA" - if not mode or (mode == self.mode and not matrix): - return self.copy() - - if matrix: - # matrix conversion - if mode not in ("L", "RGB"): - msg = "illegal conversion" - raise ValueError(msg) - im = self.im.convert_matrix(mode, matrix) - new_im = self._new(im) - if has_transparency and self.im.bands == 3: - transparency = new_im.info["transparency"] - - def convert_transparency( - m: tuple[float, ...], v: tuple[int, int, int] - ) -> int: - value = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3] * 0.5 - return max(0, min(255, int(value))) - - if mode == "L": - transparency = convert_transparency(matrix, transparency) - elif len(mode) == 3: - transparency = tuple( - convert_transparency(matrix[i * 4 : i * 4 + 4], transparency) - for i in range(0, len(transparency)) - ) - new_im.info["transparency"] = transparency - return new_im - - if mode == "P" and self.mode == "RGBA": - return self.quantize(colors) - - trns = None - delete_trns = False - # transparency handling - if has_transparency: - if (self.mode in ("1", "L", "I", "I;16") and mode in ("LA", "RGBA")) or ( - self.mode == "RGB" and mode in ("La", "LA", "RGBa", "RGBA") - ): - # Use transparent conversion to promote from transparent - # color to an alpha channel. - new_im = self._new( - self.im.convert_transparent(mode, self.info["transparency"]) - ) - del new_im.info["transparency"] - return new_im - elif self.mode in ("L", "RGB", "P") and mode in ("L", "RGB", "P"): - t = self.info["transparency"] - if isinstance(t, bytes): - # Dragons. This can't be represented by a single color - warnings.warn( - "Palette images with Transparency expressed in bytes should be " - "converted to RGBA images" - ) - delete_trns = True - else: - # get the new transparency color. - # use existing conversions - trns_im = new(self.mode, (1, 1)) - if self.mode == "P": - trns_im.putpalette(self.palette) - if isinstance(t, tuple): - err = "Couldn't allocate a palette color for transparency" - try: - t = trns_im.palette.getcolor(t, self) - except ValueError as e: - if str(e) == "cannot allocate more than 256 colors": - # If all 256 colors are in use, - # then there is no need for transparency - t = None - else: - raise ValueError(err) from e - if t is None: - trns = None - else: - trns_im.putpixel((0, 0), t) - - if mode in ("L", "RGB"): - trns_im = trns_im.convert(mode) - else: - # can't just retrieve the palette number, got to do it - # after quantization. - trns_im = trns_im.convert("RGB") - trns = trns_im.getpixel((0, 0)) - - elif self.mode == "P" and mode in ("LA", "PA", "RGBA"): - t = self.info["transparency"] - delete_trns = True - - if isinstance(t, bytes): - self.im.putpalettealphas(t) - elif isinstance(t, int): - self.im.putpalettealpha(t, 0) - else: - msg = "Transparency for P mode should be bytes or int" - raise ValueError(msg) - - if mode == "P" and palette == Palette.ADAPTIVE: - im = self.im.quantize(colors) - new_im = self._new(im) - from . import ImagePalette - - new_im.palette = ImagePalette.ImagePalette( - "RGB", new_im.im.getpalette("RGB") - ) - if delete_trns: - # This could possibly happen if we requantize to fewer colors. - # The transparency would be totally off in that case. - del new_im.info["transparency"] - if trns is not None: - try: - new_im.info["transparency"] = new_im.palette.getcolor( - cast(Tuple[int, ...], trns), # trns was converted to RGB - new_im, - ) - except Exception: - # if we can't make a transparent color, don't leave the old - # transparency hanging around to mess us up. - del new_im.info["transparency"] - warnings.warn("Couldn't allocate palette entry for transparency") - return new_im - - if "LAB" in (self.mode, mode): - other_mode = mode if self.mode == "LAB" else self.mode - if other_mode in ("RGB", "RGBA", "RGBX"): - from . import ImageCms - - srgb = ImageCms.createProfile("sRGB") - lab = ImageCms.createProfile("LAB") - profiles = [lab, srgb] if self.mode == "LAB" else [srgb, lab] - transform = ImageCms.buildTransform( - profiles[0], profiles[1], self.mode, mode - ) - return transform.apply(self) - - # colorspace conversion - if dither is None: - dither = Dither.FLOYDSTEINBERG - - try: - im = self.im.convert(mode, dither) - except ValueError: - try: - # normalize source image and try again - modebase = getmodebase(self.mode) - if modebase == self.mode: - raise - im = self.im.convert(modebase) - im = im.convert(mode, dither) - except KeyError as e: - msg = "illegal conversion" - raise ValueError(msg) from e - - new_im = self._new(im) - if mode == "P" and palette != Palette.ADAPTIVE: - from . import ImagePalette - - new_im.palette = ImagePalette.ImagePalette("RGB", im.getpalette("RGB")) - if delete_trns: - # crash fail if we leave a bytes transparency in an rgb/l mode. - del new_im.info["transparency"] - if trns is not None: - if new_im.mode == "P" and new_im.palette: - try: - new_im.info["transparency"] = new_im.palette.getcolor(trns, new_im) - except ValueError as e: - del new_im.info["transparency"] - if str(e) != "cannot allocate more than 256 colors": - # If all 256 colors are in use, - # then there is no need for transparency - warnings.warn( - "Couldn't allocate palette entry for transparency" - ) - else: - new_im.info["transparency"] = trns - return new_im - - def quantize( - self, - colors: int = 256, - method: int | None = None, - kmeans: int = 0, - palette=None, - dither: Dither = Dither.FLOYDSTEINBERG, - ) -> Image: - """ - Convert the image to 'P' mode with the specified number - of colors. - - :param colors: The desired number of colors, <= 256 - :param method: :data:`Quantize.MEDIANCUT` (median cut), - :data:`Quantize.MAXCOVERAGE` (maximum coverage), - :data:`Quantize.FASTOCTREE` (fast octree), - :data:`Quantize.LIBIMAGEQUANT` (libimagequant; check support - using :py:func:`PIL.features.check_feature` with - ``feature="libimagequant"``). - - By default, :data:`Quantize.MEDIANCUT` will be used. - - The exception to this is RGBA images. :data:`Quantize.MEDIANCUT` - and :data:`Quantize.MAXCOVERAGE` do not support RGBA images, so - :data:`Quantize.FASTOCTREE` is used by default instead. - :param kmeans: Integer greater than or equal to zero. - :param palette: Quantize to the palette of given - :py:class:`PIL.Image.Image`. - :param dither: Dithering method, used when converting from - mode "RGB" to "P" or from "RGB" or "L" to "1". - Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG` - (default). - :returns: A new image - """ - - self.load() - - if method is None: - # defaults: - method = Quantize.MEDIANCUT - if self.mode == "RGBA": - method = Quantize.FASTOCTREE - - if self.mode == "RGBA" and method not in ( - Quantize.FASTOCTREE, - Quantize.LIBIMAGEQUANT, - ): - # Caller specified an invalid mode. - msg = ( - "Fast Octree (method == 2) and libimagequant (method == 3) " - "are the only valid methods for quantizing RGBA images" - ) - raise ValueError(msg) - - if palette: - # use palette from reference image - palette.load() - if palette.mode != "P": - msg = "bad mode for palette image" - raise ValueError(msg) - if self.mode not in {"RGB", "L"}: - msg = "only RGB or L mode images can be quantized to a palette" - raise ValueError(msg) - im = self.im.convert("P", dither, palette.im) - new_im = self._new(im) - new_im.palette = palette.palette.copy() - return new_im - - if kmeans < 0: - msg = "kmeans must not be negative" - raise ValueError(msg) - - im = self._new(self.im.quantize(colors, method, kmeans)) - - from . import ImagePalette - - mode = im.im.getpalettemode() - palette = im.im.getpalette(mode, mode)[: colors * len(mode)] - im.palette = ImagePalette.ImagePalette(mode, palette) - - return im - - def copy(self) -> Image: - """ - Copies this image. Use this method if you wish to paste things - into an image, but still retain the original. - - :rtype: :py:class:`~PIL.Image.Image` - :returns: An :py:class:`~PIL.Image.Image` object. - """ - self.load() - return self._new(self.im.copy()) - - __copy__ = copy - - def crop(self, box: tuple[float, float, float, float] | None = None) -> Image: - """ - Returns a rectangular region from this image. The box is a - 4-tuple defining the left, upper, right, and lower pixel - coordinate. See :ref:`coordinate-system`. - - Note: Prior to Pillow 3.4.0, this was a lazy operation. - - :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. - :rtype: :py:class:`~PIL.Image.Image` - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - if box is None: - return self.copy() - - if box[2] < box[0]: - msg = "Coordinate 'right' is less than 'left'" - raise ValueError(msg) - elif box[3] < box[1]: - msg = "Coordinate 'lower' is less than 'upper'" - raise ValueError(msg) - - self.load() - return self._new(self._crop(self.im, box)) - - def _crop( - self, im: core.ImagingCore, box: tuple[float, float, float, float] - ) -> core.ImagingCore: - """ - Returns a rectangular region from the core image object im. - - This is equivalent to calling im.crop((x0, y0, x1, y1)), but - includes additional sanity checks. - - :param im: a core image object - :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. - :returns: A core image object. - """ - - x0, y0, x1, y1 = map(int, map(round, box)) - - absolute_values = (abs(x1 - x0), abs(y1 - y0)) - - _decompression_bomb_check(absolute_values) - - return im.crop((x0, y0, x1, y1)) - - def draft( - self, mode: str | None, size: tuple[int, int] | None - ) -> tuple[str, tuple[int, int, float, float]] | None: - """ - Configures the image file loader so it returns a version of the - image that as closely as possible matches the given mode and - size. For example, you can use this method to convert a color - JPEG to grayscale while loading it. - - If any changes are made, returns a tuple with the chosen ``mode`` and - ``box`` with coordinates of the original image within the altered one. - - Note that this method modifies the :py:class:`~PIL.Image.Image` object - in place. If the image has already been loaded, this method has no - effect. - - Note: This method is not implemented for most images. It is - currently implemented only for JPEG and MPO images. - - :param mode: The requested mode. - :param size: The requested size in pixels, as a 2-tuple: - (width, height). - """ - pass - - def _expand(self, xmargin: int, ymargin: int | None = None) -> Image: - if ymargin is None: - ymargin = xmargin - self.load() - return self._new(self.im.expand(xmargin, ymargin)) - - if TYPE_CHECKING: - from . import ImageFilter - - def filter(self, filter: ImageFilter.Filter | type[ImageFilter.Filter]) -> Image: - """ - Filters this image using the given filter. For a list of - available filters, see the :py:mod:`~PIL.ImageFilter` module. - - :param filter: Filter kernel. - :returns: An :py:class:`~PIL.Image.Image` object.""" - - from . import ImageFilter - - self.load() - - if callable(filter): - filter = filter() - if not hasattr(filter, "filter"): - msg = "filter argument should be ImageFilter.Filter instance or class" - raise TypeError(msg) - - multiband = isinstance(filter, ImageFilter.MultibandFilter) - if self.im.bands == 1 or multiband: - return self._new(filter.filter(self.im)) - - ims = [ - self._new(filter.filter(self.im.getband(c))) for c in range(self.im.bands) - ] - return merge(self.mode, ims) - - def getbands(self) -> tuple[str, ...]: - """ - Returns a tuple containing the name of each band in this image. - For example, ``getbands`` on an RGB image returns ("R", "G", "B"). - - :returns: A tuple containing band names. - :rtype: tuple - """ - return ImageMode.getmode(self.mode).bands - - def getbbox(self, *, alpha_only: bool = True) -> tuple[int, int, int, int] | None: - """ - Calculates the bounding box of the non-zero regions in the - image. - - :param alpha_only: Optional flag, defaulting to ``True``. - If ``True`` and the image has an alpha channel, trim transparent pixels. - Otherwise, trim pixels when all channels are zero. - Keyword-only argument. - :returns: The bounding box is returned as a 4-tuple defining the - left, upper, right, and lower pixel coordinate. See - :ref:`coordinate-system`. If the image is completely empty, this - method returns None. - - """ - - self.load() - return self.im.getbbox(alpha_only) - - def getcolors(self, maxcolors: int = 256): - """ - Returns a list of colors used in this image. - - The colors will be in the image's mode. For example, an RGB image will - return a tuple of (red, green, blue) color values, and a P image will - return the index of the color in the palette. - - :param maxcolors: Maximum number of colors. If this number is - exceeded, this method returns None. The default limit is - 256 colors. - :returns: An unsorted list of (count, pixel) values. - """ - - self.load() - if self.mode in ("1", "L", "P"): - h = self.im.histogram() - out = [(h[i], i) for i in range(256) if h[i]] - if len(out) > maxcolors: - return None - return out - return self.im.getcolors(maxcolors) - - def getdata(self, band: int | None = None): - """ - Returns the contents of this image as a sequence object - containing pixel values. The sequence object is flattened, so - that values for line one follow directly after the values of - line zero, and so on. - - Note that the sequence object returned by this method is an - internal PIL data type, which only supports certain sequence - operations. To convert it to an ordinary sequence (e.g. for - printing), use ``list(im.getdata())``. - - :param band: What band to return. The default is to return - all bands. To return a single band, pass in the index - value (e.g. 0 to get the "R" band from an "RGB" image). - :returns: A sequence-like object. - """ - - self.load() - if band is not None: - return self.im.getband(band) - return self.im # could be abused - - def getextrema(self) -> tuple[float, float] | tuple[tuple[int, int], ...]: - """ - Gets the minimum and maximum pixel values for each band in - the image. - - :returns: For a single-band image, a 2-tuple containing the - minimum and maximum pixel value. For a multi-band image, - a tuple containing one 2-tuple for each band. - """ - - self.load() - if self.im.bands > 1: - return tuple(self.im.getband(i).getextrema() for i in range(self.im.bands)) - return self.im.getextrema() - - def getxmp(self): - """ - Returns a dictionary containing the XMP tags. - Requires defusedxml to be installed. - - :returns: XMP tags in a dictionary. - """ - - def get_name(tag: str) -> str: - return re.sub("^{[^}]+}", "", tag) - - def get_value(element): - value = {get_name(k): v for k, v in element.attrib.items()} - children = list(element) - if children: - for child in children: - name = get_name(child.tag) - child_value = get_value(child) - if name in value: - if not isinstance(value[name], list): - value[name] = [value[name]] - value[name].append(child_value) - else: - value[name] = child_value - elif value: - if element.text: - value["text"] = element.text - else: - return element.text - return value - - if ElementTree is None: - warnings.warn("XMP data cannot be read without defusedxml dependency") - return {} - if "xmp" not in self.info: - return {} - root = ElementTree.fromstring(self.info["xmp"].rstrip(b"\x00")) - return {get_name(root.tag): get_value(root)} - - def getexif(self) -> Exif: - """ - Gets EXIF data from the image. - - :returns: an :py:class:`~PIL.Image.Exif` object. - """ - if self._exif is None: - self._exif = Exif() - elif self._exif._loaded: - return self._exif - self._exif._loaded = True - - exif_info = self.info.get("exif") - if exif_info is None: - if "Raw profile type exif" in self.info: - exif_info = bytes.fromhex( - "".join(self.info["Raw profile type exif"].split("\n")[3:]) - ) - elif hasattr(self, "tag_v2"): - self._exif.bigtiff = self.tag_v2._bigtiff - self._exif.endian = self.tag_v2._endian - self._exif.load_from_fp(self.fp, self.tag_v2._offset) - if exif_info is not None: - self._exif.load(exif_info) - - # XMP tags - if ExifTags.Base.Orientation not in self._exif: - xmp_tags = self.info.get("XML:com.adobe.xmp") - if xmp_tags: - match = re.search(r'tiff:Orientation(="|>)([0-9])', xmp_tags) - if match: - self._exif[ExifTags.Base.Orientation] = int(match[2]) - - return self._exif - - def _reload_exif(self) -> None: - if self._exif is None or not self._exif._loaded: - return - self._exif._loaded = False - self.getexif() - - def get_child_images(self) -> list[ImageFile.ImageFile]: - child_images = [] - exif = self.getexif() - ifds = [] - if ExifTags.Base.SubIFDs in exif: - subifd_offsets = exif[ExifTags.Base.SubIFDs] - if subifd_offsets: - if not isinstance(subifd_offsets, tuple): - subifd_offsets = (subifd_offsets,) - for subifd_offset in subifd_offsets: - ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset)) - ifd1 = exif.get_ifd(ExifTags.IFD.IFD1) - if ifd1 and ifd1.get(513): - ifds.append((ifd1, exif._info.next)) - - offset = None - for ifd, ifd_offset in ifds: - current_offset = self.fp.tell() - if offset is None: - offset = current_offset - - fp = self.fp - thumbnail_offset = ifd.get(513) - if thumbnail_offset is not None: - thumbnail_offset += getattr(self, "_exif_offset", 0) - self.fp.seek(thumbnail_offset) - data = self.fp.read(ifd.get(514)) - fp = io.BytesIO(data) - - with open(fp) as im: - from . import TiffImagePlugin - - if thumbnail_offset is None and isinstance( - im, TiffImagePlugin.TiffImageFile - ): - im._frame_pos = [ifd_offset] - im._seek(0) - im.load() - child_images.append(im) - - if offset is not None: - self.fp.seek(offset) - return child_images - - def getim(self): - """ - Returns a capsule that points to the internal image memory. - - :returns: A capsule object. - """ - - self.load() - return self.im.ptr - - def getpalette(self, rawmode: str | None = "RGB") -> list[int] | None: - """ - Returns the image palette as a list. - - :param rawmode: The mode in which to return the palette. ``None`` will - return the palette in its current mode. - - .. versionadded:: 9.1.0 - - :returns: A list of color values [r, g, b, ...], or None if the - image has no palette. - """ - - self.load() - try: - mode = self.im.getpalettemode() - except ValueError: - return None # no palette - if rawmode is None: - rawmode = mode - return list(self.im.getpalette(mode, rawmode)) - - @property - def has_transparency_data(self) -> bool: - """ - Determine if an image has transparency data, whether in the form of an - alpha channel, a palette with an alpha channel, or a "transparency" key - in the info dictionary. - - Note the image might still appear solid, if all of the values shown - within are opaque. - - :returns: A boolean. - """ - return ( - self.mode in ("LA", "La", "PA", "RGBA", "RGBa") - or (self.mode == "P" and self.palette.mode.endswith("A")) - or "transparency" in self.info - ) - - def apply_transparency(self) -> None: - """ - If a P mode image has a "transparency" key in the info dictionary, - remove the key and instead apply the transparency to the palette. - Otherwise, the image is unchanged. - """ - if self.mode != "P" or "transparency" not in self.info: - return - - from . import ImagePalette - - palette = self.getpalette("RGBA") - assert palette is not None - transparency = self.info["transparency"] - if isinstance(transparency, bytes): - for i, alpha in enumerate(transparency): - palette[i * 4 + 3] = alpha - else: - palette[transparency * 4 + 3] = 0 - self.palette = ImagePalette.ImagePalette("RGBA", bytes(palette)) - self.palette.dirty = 1 - - del self.info["transparency"] - - def getpixel( - self, xy: tuple[int, int] | list[int] - ) -> float | tuple[int, ...] | None: - """ - Returns the pixel value at a given position. - - :param xy: The coordinate, given as (x, y). See - :ref:`coordinate-system`. - :returns: The pixel value. If the image is a multi-layer image, - this method returns a tuple. - """ - - self.load() - if self.pyaccess: - return self.pyaccess.getpixel(xy) - return self.im.getpixel(tuple(xy)) - - def getprojection(self) -> tuple[list[int], list[int]]: - """ - Get projection to x and y axes - - :returns: Two sequences, indicating where there are non-zero - pixels along the X-axis and the Y-axis, respectively. - """ - - self.load() - x, y = self.im.getprojection() - return list(x), list(y) - - def histogram(self, mask: Image | None = None, extrema=None) -> list[int]: - """ - Returns a histogram for the image. The histogram is returned as a - list of pixel counts, one for each pixel value in the source - image. Counts are grouped into 256 bins for each band, even if - the image has more than 8 bits per band. If the image has more - than one band, the histograms for all bands are concatenated (for - example, the histogram for an "RGB" image contains 768 values). - - A bilevel image (mode "1") is treated as a grayscale ("L") image - by this method. - - If a mask is provided, the method returns a histogram for those - parts of the image where the mask image is non-zero. The mask - image must have the same size as the image, and be either a - bi-level image (mode "1") or a grayscale image ("L"). - - :param mask: An optional mask. - :param extrema: An optional tuple of manually-specified extrema. - :returns: A list containing pixel counts. - """ - self.load() - if mask: - mask.load() - return self.im.histogram((0, 0), mask.im) - if self.mode in ("I", "F"): - if extrema is None: - extrema = self.getextrema() - return self.im.histogram(extrema) - return self.im.histogram() - - def entropy(self, mask=None, extrema=None): - """ - Calculates and returns the entropy for the image. - - A bilevel image (mode "1") is treated as a grayscale ("L") - image by this method. - - If a mask is provided, the method employs the histogram for - those parts of the image where the mask image is non-zero. - The mask image must have the same size as the image, and be - either a bi-level image (mode "1") or a grayscale image ("L"). - - :param mask: An optional mask. - :param extrema: An optional tuple of manually-specified extrema. - :returns: A float value representing the image entropy - """ - self.load() - if mask: - mask.load() - return self.im.entropy((0, 0), mask.im) - if self.mode in ("I", "F"): - if extrema is None: - extrema = self.getextrema() - return self.im.entropy(extrema) - return self.im.entropy() - - def paste( - self, - im: Image | str | float | tuple[float, ...], - box: Image | tuple[int, int, int, int] | tuple[int, int] | None = None, - mask: Image | None = None, - ) -> None: - """ - Pastes another image into this image. The box argument is either - a 2-tuple giving the upper left corner, a 4-tuple defining the - left, upper, right, and lower pixel coordinate, or None (same as - (0, 0)). See :ref:`coordinate-system`. If a 4-tuple is given, the size - of the pasted image must match the size of the region. - - If the modes don't match, the pasted image is converted to the mode of - this image (see the :py:meth:`~PIL.Image.Image.convert` method for - details). - - Instead of an image, the source can be a integer or tuple - containing pixel values. The method then fills the region - with the given color. When creating RGB images, you can - also use color strings as supported by the ImageColor module. - - If a mask is given, this method updates only the regions - indicated by the mask. You can use either "1", "L", "LA", "RGBA" - or "RGBa" images (if present, the alpha band is used as mask). - Where the mask is 255, the given image is copied as is. Where - the mask is 0, the current value is preserved. Intermediate - values will mix the two images together, including their alpha - channels if they have them. - - See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to - combine images with respect to their alpha channels. - - :param im: Source image or pixel value (integer, float or tuple). - :param box: An optional 4-tuple giving the region to paste into. - If a 2-tuple is used instead, it's treated as the upper left - corner. If omitted or None, the source is pasted into the - upper left corner. - - If an image is given as the second argument and there is no - third, the box defaults to (0, 0), and the second argument - is interpreted as a mask image. - :param mask: An optional mask image. - """ - - if isImageType(box): - if mask is not None: - msg = "If using second argument as mask, third argument must be None" - raise ValueError(msg) - # abbreviated paste(im, mask) syntax - mask = box - box = None - assert not isinstance(box, Image) - - if box is None: - box = (0, 0) - - if len(box) == 2: - # upper left corner given; get size from image or mask - if isImageType(im): - size = im.size - elif isImageType(mask): - size = mask.size - else: - # FIXME: use self.size here? - msg = "cannot determine region size; use 4-item box" - raise ValueError(msg) - box += (box[0] + size[0], box[1] + size[1]) - - if isinstance(im, str): - from . import ImageColor - - im = ImageColor.getcolor(im, self.mode) - - elif isImageType(im): - im.load() - if self.mode != im.mode: - if self.mode != "RGB" or im.mode not in ("LA", "RGBA", "RGBa"): - # should use an adapter for this! - im = im.convert(self.mode) - im = im.im - - self._ensure_mutable() - - if mask: - mask.load() - self.im.paste(im, box, mask.im) - else: - self.im.paste(im, box) - - def alpha_composite( - self, im: Image, dest: Sequence[int] = (0, 0), source: Sequence[int] = (0, 0) - ) -> None: - """'In-place' analog of Image.alpha_composite. Composites an image - onto this image. - - :param im: image to composite over this one - :param dest: Optional 2 tuple (left, top) specifying the upper - left corner in this (destination) image. - :param source: Optional 2 (left, top) tuple for the upper left - corner in the overlay source image, or 4 tuple (left, top, right, - bottom) for the bounds of the source rectangle - - Performance Note: Not currently implemented in-place in the core layer. - """ - - if not isinstance(source, (list, tuple)): - msg = "Source must be a list or tuple" - raise ValueError(msg) - if not isinstance(dest, (list, tuple)): - msg = "Destination must be a list or tuple" - raise ValueError(msg) - - if len(source) == 4: - overlay_crop_box = tuple(source) - elif len(source) == 2: - overlay_crop_box = tuple(source) + im.size - else: - msg = "Source must be a sequence of length 2 or 4" - raise ValueError(msg) - - if not len(dest) == 2: - msg = "Destination must be a sequence of length 2" - raise ValueError(msg) - if min(source) < 0: - msg = "Source must be non-negative" - raise ValueError(msg) - - # over image, crop if it's not the whole image. - if overlay_crop_box == (0, 0) + im.size: - overlay = im - else: - overlay = im.crop(overlay_crop_box) - - # target for the paste - box = tuple(dest) + (dest[0] + overlay.width, dest[1] + overlay.height) - - # destination image. don't copy if we're using the whole image. - if box == (0, 0) + self.size: - background = self - else: - background = self.crop(box) - - result = alpha_composite(background, overlay) - self.paste(result, box) - - def point( - self, - lut: Sequence[float] | Callable[[int], float] | ImagePointHandler, - mode: str | None = None, - ) -> Image: - """ - Maps this image through a lookup table or function. - - :param lut: A lookup table, containing 256 (or 65536 if - self.mode=="I" and mode == "L") values per band in the - image. A function can be used instead, it should take a - single argument. The function is called once for each - possible pixel value, and the resulting table is applied to - all bands of the image. - - It may also be an :py:class:`~PIL.Image.ImagePointHandler` - object:: - - class Example(Image.ImagePointHandler): - def point(self, data): - # Return result - :param mode: Output mode (default is same as input). This can only be used if - the source image has mode "L" or "P", and the output has mode "1" or the - source image mode is "I" and the output mode is "L". - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - self.load() - - if isinstance(lut, ImagePointHandler): - return lut.point(self) - - if callable(lut): - # if it isn't a list, it should be a function - if self.mode in ("I", "I;16", "F"): - # check if the function can be used with point_transform - # UNDONE wiredfool -- I think this prevents us from ever doing - # a gamma function point transform on > 8bit images. - scale, offset = _getscaleoffset(lut) - return self._new(self.im.point_transform(scale, offset)) - # for other modes, convert the function to a table - flatLut = [lut(i) for i in range(256)] * self.im.bands - else: - flatLut = lut - - if self.mode == "F": - # FIXME: _imaging returns a confusing error message for this case - msg = "point operation not supported for this mode" - raise ValueError(msg) - - if mode != "F": - flatLut = [round(i) for i in flatLut] - return self._new(self.im.point(flatLut, mode)) - - def putalpha(self, alpha: Image | int) -> None: - """ - Adds or replaces the alpha layer in this image. If the image - does not have an alpha layer, it's converted to "LA" or "RGBA". - The new layer must be either "L" or "1". - - :param alpha: The new alpha layer. This can either be an "L" or "1" - image having the same size as this image, or an integer. - """ - - self._ensure_mutable() - - if self.mode not in ("LA", "PA", "RGBA"): - # attempt to promote self to a matching alpha mode - try: - mode = getmodebase(self.mode) + "A" - try: - self.im.setmode(mode) - except (AttributeError, ValueError) as e: - # do things the hard way - im = self.im.convert(mode) - if im.mode not in ("LA", "PA", "RGBA"): - msg = "alpha channel could not be added" - raise ValueError(msg) from e # sanity check - self.im = im - self.pyaccess = None - self._mode = self.im.mode - except KeyError as e: - msg = "illegal image mode" - raise ValueError(msg) from e - - if self.mode in ("LA", "PA"): - band = 1 - else: - band = 3 - - if isImageType(alpha): - # alpha layer - if alpha.mode not in ("1", "L"): - msg = "illegal image mode" - raise ValueError(msg) - alpha.load() - if alpha.mode == "1": - alpha = alpha.convert("L") - else: - # constant alpha - alpha = cast(int, alpha) # see python/typing#1013 - try: - self.im.fillband(band, alpha) - except (AttributeError, ValueError): - # do things the hard way - alpha = new("L", self.size, alpha) - else: - return - - self.im.putband(alpha.im, band) - - def putdata( - self, - data: Sequence[float] | Sequence[Sequence[int]], - scale: float = 1.0, - offset: float = 0.0, - ) -> None: - """ - Copies pixel data from a flattened sequence object into the image. The - values should start at the upper left corner (0, 0), continue to the - end of the line, followed directly by the first value of the second - line, and so on. Data will be read until either the image or the - sequence ends. The scale and offset values are used to adjust the - sequence values: **pixel = value*scale + offset**. - - :param data: A flattened sequence object. - :param scale: An optional scale value. The default is 1.0. - :param offset: An optional offset value. The default is 0.0. - """ - - self._ensure_mutable() - - self.im.putdata(data, scale, offset) - - def putpalette(self, data, rawmode="RGB") -> None: - """ - Attaches a palette to this image. The image must be a "P", "PA", "L" - or "LA" image. - - The palette sequence must contain at most 256 colors, made up of one - integer value for each channel in the raw mode. - For example, if the raw mode is "RGB", then it can contain at most 768 - values, made up of red, green and blue values for the corresponding pixel - index in the 256 colors. - If the raw mode is "RGBA", then it can contain at most 1024 values, - containing red, green, blue and alpha values. - - Alternatively, an 8-bit string may be used instead of an integer sequence. - - :param data: A palette sequence (either a list or a string). - :param rawmode: The raw mode of the palette. Either "RGB", "RGBA", or a mode - that can be transformed to "RGB" or "RGBA" (e.g. "R", "BGR;15", "RGBA;L"). - """ - from . import ImagePalette - - if self.mode not in ("L", "LA", "P", "PA"): - msg = "illegal image mode" - raise ValueError(msg) - if isinstance(data, ImagePalette.ImagePalette): - palette = ImagePalette.raw(data.rawmode, data.palette) - else: - if not isinstance(data, bytes): - data = bytes(data) - palette = ImagePalette.raw(rawmode, data) - self._mode = "PA" if "A" in self.mode else "P" - self.palette = palette - self.palette.mode = "RGBA" if "A" in rawmode else "RGB" - self.load() # install new palette - - def putpixel( - self, xy: tuple[int, int], value: float | tuple[int, ...] | list[int] - ) -> None: - """ - Modifies the pixel at the given position. The color is given as - a single numerical value for single-band images, and a tuple for - multi-band images. In addition to this, RGB and RGBA tuples are - accepted for P and PA images. - - Note that this method is relatively slow. For more extensive changes, - use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` - module instead. - - See: - - * :py:meth:`~PIL.Image.Image.paste` - * :py:meth:`~PIL.Image.Image.putdata` - * :py:mod:`~PIL.ImageDraw` - - :param xy: The pixel coordinate, given as (x, y). See - :ref:`coordinate-system`. - :param value: The pixel value. - """ - - if self.readonly: - self._copy() - self.load() - - if self.pyaccess: - return self.pyaccess.putpixel(xy, value) - - if ( - self.mode in ("P", "PA") - and isinstance(value, (list, tuple)) - and len(value) in [3, 4] - ): - # RGB or RGBA value for a P or PA image - if self.mode == "PA": - alpha = value[3] if len(value) == 4 else 255 - value = value[:3] - palette_index = self.palette.getcolor(value, self) - value = (palette_index, alpha) if self.mode == "PA" else palette_index - return self.im.putpixel(xy, value) - - def remap_palette(self, dest_map, source_palette=None): - """ - Rewrites the image to reorder the palette. - - :param dest_map: A list of indexes into the original palette. - e.g. ``[1,0]`` would swap a two item palette, and ``list(range(256))`` - is the identity transform. - :param source_palette: Bytes or None. - :returns: An :py:class:`~PIL.Image.Image` object. - - """ - from . import ImagePalette - - if self.mode not in ("L", "P"): - msg = "illegal image mode" - raise ValueError(msg) - - bands = 3 - palette_mode = "RGB" - if source_palette is None: - if self.mode == "P": - self.load() - palette_mode = self.im.getpalettemode() - if palette_mode == "RGBA": - bands = 4 - source_palette = self.im.getpalette(palette_mode, palette_mode) - else: # L-mode - source_palette = bytearray(i // 3 for i in range(768)) - - palette_bytes = b"" - new_positions = [0] * 256 - - # pick only the used colors from the palette - for i, oldPosition in enumerate(dest_map): - palette_bytes += source_palette[ - oldPosition * bands : oldPosition * bands + bands - ] - new_positions[oldPosition] = i - - # replace the palette color id of all pixel with the new id - - # Palette images are [0..255], mapped through a 1 or 3 - # byte/color map. We need to remap the whole image - # from palette 1 to palette 2. New_positions is - # an array of indexes into palette 1. Palette 2 is - # palette 1 with any holes removed. - - # We're going to leverage the convert mechanism to use the - # C code to remap the image from palette 1 to palette 2, - # by forcing the source image into 'L' mode and adding a - # mapping 'L' mode palette, then converting back to 'L' - # sans palette thus converting the image bytes, then - # assigning the optimized RGB palette. - - # perf reference, 9500x4000 gif, w/~135 colors - # 14 sec prepatch, 1 sec postpatch with optimization forced. - - mapping_palette = bytearray(new_positions) - - m_im = self.copy() - m_im._mode = "P" - - m_im.palette = ImagePalette.ImagePalette( - palette_mode, palette=mapping_palette * bands - ) - # possibly set palette dirty, then - # m_im.putpalette(mapping_palette, 'L') # converts to 'P' - # or just force it. - # UNDONE -- this is part of the general issue with palettes - m_im.im.putpalette(palette_mode, palette_mode + ";L", m_im.palette.tobytes()) - - m_im = m_im.convert("L") - - m_im.putpalette(palette_bytes, palette_mode) - m_im.palette = ImagePalette.ImagePalette(palette_mode, palette=palette_bytes) - - if "transparency" in self.info: - try: - m_im.info["transparency"] = dest_map.index(self.info["transparency"]) - except ValueError: - if "transparency" in m_im.info: - del m_im.info["transparency"] - - return m_im - - def _get_safe_box(self, size, resample, box): - """Expands the box so it includes adjacent pixels - that may be used by resampling with the given resampling filter. - """ - filter_support = _filters_support[resample] - 0.5 - scale_x = (box[2] - box[0]) / size[0] - scale_y = (box[3] - box[1]) / size[1] - support_x = filter_support * scale_x - support_y = filter_support * scale_y - - return ( - max(0, int(box[0] - support_x)), - max(0, int(box[1] - support_y)), - min(self.size[0], math.ceil(box[2] + support_x)), - min(self.size[1], math.ceil(box[3] + support_y)), - ) - - def resize( - self, - size: tuple[int, int], - resample: int | None = None, - box: tuple[float, float, float, float] | None = None, - reducing_gap: float | None = None, - ) -> Image: - """ - Returns a resized copy of this image. - - :param size: The requested size in pixels, as a 2-tuple: - (width, height). - :param resample: An optional resampling filter. This can be - one of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`, - :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`, - :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`. - If the image has mode "1" or "P", it is always set to - :py:data:`Resampling.NEAREST`. If the image mode specifies a number - of bits, such as "I;16", then the default filter is - :py:data:`Resampling.NEAREST`. Otherwise, the default filter is - :py:data:`Resampling.BICUBIC`. See: :ref:`concept-filters`. - :param box: An optional 4-tuple of floats providing - the source image region to be scaled. - The values must be within (0, 0, width, height) rectangle. - If omitted or None, the entire source is used. - :param reducing_gap: Apply optimization by resizing the image - in two steps. First, reducing the image by integer times - using :py:meth:`~PIL.Image.Image.reduce`. - Second, resizing using regular resampling. The last step - changes size no less than by ``reducing_gap`` times. - ``reducing_gap`` may be None (no first step is performed) - or should be greater than 1.0. The bigger ``reducing_gap``, - the closer the result to the fair resampling. - The smaller ``reducing_gap``, the faster resizing. - With ``reducing_gap`` greater or equal to 3.0, the result is - indistinguishable from fair resampling in most cases. - The default value is None (no optimization). - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - if resample is None: - type_special = ";" in self.mode - resample = Resampling.NEAREST if type_special else Resampling.BICUBIC - elif resample not in ( - Resampling.NEAREST, - Resampling.BILINEAR, - Resampling.BICUBIC, - Resampling.LANCZOS, - Resampling.BOX, - Resampling.HAMMING, - ): - msg = f"Unknown resampling filter ({resample})." - - filters = [ - f"{filter[1]} ({filter[0]})" - for filter in ( - (Resampling.NEAREST, "Image.Resampling.NEAREST"), - (Resampling.LANCZOS, "Image.Resampling.LANCZOS"), - (Resampling.BILINEAR, "Image.Resampling.BILINEAR"), - (Resampling.BICUBIC, "Image.Resampling.BICUBIC"), - (Resampling.BOX, "Image.Resampling.BOX"), - (Resampling.HAMMING, "Image.Resampling.HAMMING"), - ) - ] - msg += f" Use {', '.join(filters[:-1])} or {filters[-1]}" - raise ValueError(msg) - - if reducing_gap is not None and reducing_gap < 1.0: - msg = "reducing_gap must be 1.0 or greater" - raise ValueError(msg) - - self.load() - if box is None: - box = (0, 0) + self.size - - if self.size == size and box == (0, 0) + self.size: - return self.copy() - - if self.mode in ("1", "P"): - resample = Resampling.NEAREST - - if self.mode in ["LA", "RGBA"] and resample != Resampling.NEAREST: - im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) - im = im.resize(size, resample, box) - return im.convert(self.mode) - - self.load() - - if reducing_gap is not None and resample != Resampling.NEAREST: - factor_x = int((box[2] - box[0]) / size[0] / reducing_gap) or 1 - factor_y = int((box[3] - box[1]) / size[1] / reducing_gap) or 1 - if factor_x > 1 or factor_y > 1: - reduce_box = self._get_safe_box(size, resample, box) - factor = (factor_x, factor_y) - self = ( - self.reduce(factor, box=reduce_box) - if callable(self.reduce) - else Image.reduce(self, factor, box=reduce_box) - ) - box = ( - (box[0] - reduce_box[0]) / factor_x, - (box[1] - reduce_box[1]) / factor_y, - (box[2] - reduce_box[0]) / factor_x, - (box[3] - reduce_box[1]) / factor_y, - ) - - return self._new(self.im.resize(size, resample, box)) - - def reduce( - self, - factor: int | tuple[int, int], - box: tuple[int, int, int, int] | None = None, - ) -> Image: - """ - Returns a copy of the image reduced ``factor`` times. - If the size of the image is not dividable by ``factor``, - the resulting size will be rounded up. - - :param factor: A greater than 0 integer or tuple of two integers - for width and height separately. - :param box: An optional 4-tuple of ints providing - the source image region to be reduced. - The values must be within ``(0, 0, width, height)`` rectangle. - If omitted or ``None``, the entire source is used. - """ - if not isinstance(factor, (list, tuple)): - factor = (factor, factor) - - if box is None: - box = (0, 0) + self.size - - if factor == (1, 1) and box == (0, 0) + self.size: - return self.copy() - - if self.mode in ["LA", "RGBA"]: - im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) - im = im.reduce(factor, box) - return im.convert(self.mode) - - self.load() - - return self._new(self.im.reduce(factor, box)) - - def rotate( - self, - angle: float, - resample: Resampling = Resampling.NEAREST, - expand: int | bool = False, - center: tuple[float, float] | None = None, - translate: tuple[int, int] | None = None, - fillcolor: float | tuple[float, ...] | str | None = None, - ) -> Image: - """ - Returns a rotated copy of this image. This method returns a - copy of this image, rotated the given number of degrees counter - clockwise around its centre. - - :param angle: In degrees counter clockwise. - :param resample: An optional resampling filter. This can be - one of :py:data:`Resampling.NEAREST` (use nearest neighbour), - :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2 - environment), or :py:data:`Resampling.BICUBIC` (cubic spline - interpolation in a 4x4 environment). If omitted, or if the image has - mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`. - See :ref:`concept-filters`. - :param expand: Optional expansion flag. If true, expands the output - image to make it large enough to hold the entire rotated image. - If false or omitted, make the output image the same size as the - input image. Note that the expand flag assumes rotation around - the center and no translation. - :param center: Optional center of rotation (a 2-tuple). Origin is - the upper left corner. Default is the center of the image. - :param translate: An optional post-rotate translation (a 2-tuple). - :param fillcolor: An optional color for area outside the rotated image. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - angle = angle % 360.0 - - # Fast paths regardless of filter, as long as we're not - # translating or changing the center. - if not (center or translate): - if angle == 0: - return self.copy() - if angle == 180: - return self.transpose(Transpose.ROTATE_180) - if angle in (90, 270) and (expand or self.width == self.height): - return self.transpose( - Transpose.ROTATE_90 if angle == 90 else Transpose.ROTATE_270 - ) - - # Calculate the affine matrix. Note that this is the reverse - # transformation (from destination image to source) because we - # want to interpolate the (discrete) destination pixel from - # the local area around the (floating) source pixel. - - # The matrix we actually want (note that it operates from the right): - # (1, 0, tx) (1, 0, cx) ( cos a, sin a, 0) (1, 0, -cx) - # (0, 1, ty) * (0, 1, cy) * (-sin a, cos a, 0) * (0, 1, -cy) - # (0, 0, 1) (0, 0, 1) ( 0, 0, 1) (0, 0, 1) - - # The reverse matrix is thus: - # (1, 0, cx) ( cos -a, sin -a, 0) (1, 0, -cx) (1, 0, -tx) - # (0, 1, cy) * (-sin -a, cos -a, 0) * (0, 1, -cy) * (0, 1, -ty) - # (0, 0, 1) ( 0, 0, 1) (0, 0, 1) (0, 0, 1) - - # In any case, the final translation may be updated at the end to - # compensate for the expand flag. - - w, h = self.size - - if translate is None: - post_trans = (0, 0) - else: - post_trans = translate - if center is None: - center = (w / 2, h / 2) - - angle = -math.radians(angle) - matrix = [ - round(math.cos(angle), 15), - round(math.sin(angle), 15), - 0.0, - round(-math.sin(angle), 15), - round(math.cos(angle), 15), - 0.0, - ] - - def transform(x, y, matrix): - (a, b, c, d, e, f) = matrix - return a * x + b * y + c, d * x + e * y + f - - matrix[2], matrix[5] = transform( - -center[0] - post_trans[0], -center[1] - post_trans[1], matrix - ) - matrix[2] += center[0] - matrix[5] += center[1] - - if expand: - # calculate output size - xx = [] - yy = [] - for x, y in ((0, 0), (w, 0), (w, h), (0, h)): - x, y = transform(x, y, matrix) - xx.append(x) - yy.append(y) - nw = math.ceil(max(xx)) - math.floor(min(xx)) - nh = math.ceil(max(yy)) - math.floor(min(yy)) - - # We multiply a translation matrix from the right. Because of its - # special form, this is the same as taking the image of the - # translation vector as new translation vector. - matrix[2], matrix[5] = transform(-(nw - w) / 2.0, -(nh - h) / 2.0, matrix) - w, h = nw, nh - - return self.transform( - (w, h), Transform.AFFINE, matrix, resample, fillcolor=fillcolor - ) - - def save( - self, fp: StrOrBytesPath | IO[bytes], format: str | None = None, **params: Any - ) -> None: - """ - Saves this image under the given filename. If no format is - specified, the format to use is determined from the filename - extension, if possible. - - Keyword options can be used to provide additional instructions - to the writer. If a writer doesn't recognise an option, it is - silently ignored. The available options are described in the - :doc:`image format documentation - <../handbook/image-file-formats>` for each writer. - - You can use a file object instead of a filename. In this case, - you must always specify the format. The file object must - implement the ``seek``, ``tell``, and ``write`` - methods, and be opened in binary mode. - - :param fp: A filename (string), os.PathLike object or file object. - :param format: Optional format override. If omitted, the - format to use is determined from the filename extension. - If a file object was used instead of a filename, this - parameter should always be used. - :param params: Extra parameters to the image writer. - :returns: None - :exception ValueError: If the output format could not be determined - from the file name. Use the format option to solve this. - :exception OSError: If the file could not be written. The file - may have been created, and may contain partial data. - """ - - filename: str | bytes = "" - open_fp = False - if is_path(fp): - filename = os.path.realpath(os.fspath(fp)) - open_fp = True - elif fp == sys.stdout: - try: - fp = sys.stdout.buffer - except AttributeError: - pass - if not filename and hasattr(fp, "name") and is_path(fp.name): - # only set the name for metadata purposes - filename = os.path.realpath(os.fspath(fp.name)) - - # may mutate self! - self._ensure_mutable() - - save_all = params.pop("save_all", False) - self.encoderinfo = params - self.encoderconfig: tuple[Any, ...] = () - - preinit() - - filename_ext = os.path.splitext(filename)[1].lower() - ext = filename_ext.decode() if isinstance(filename_ext, bytes) else filename_ext - - if not format: - if ext not in EXTENSION: - init() - try: - format = EXTENSION[ext] - except KeyError as e: - msg = f"unknown file extension: {ext}" - raise ValueError(msg) from e - - if format.upper() not in SAVE: - init() - if save_all: - save_handler = SAVE_ALL[format.upper()] - else: - save_handler = SAVE[format.upper()] - - created = False - if open_fp: - created = not os.path.exists(filename) - if params.get("append", False): - # Open also for reading ("+"), because TIFF save_all - # writer needs to go back and edit the written data. - fp = builtins.open(filename, "r+b") - else: - fp = builtins.open(filename, "w+b") - else: - fp = cast(IO[bytes], fp) - - try: - save_handler(self, fp, filename) - except Exception: - if open_fp: - fp.close() - if created: - try: - os.remove(filename) - except PermissionError: - pass - raise - if open_fp: - fp.close() - - def seek(self, frame: int) -> None: - """ - Seeks to the given frame in this sequence file. If you seek - beyond the end of the sequence, the method raises an - ``EOFError`` exception. When a sequence file is opened, the - library automatically seeks to frame 0. - - See :py:meth:`~PIL.Image.Image.tell`. - - If defined, :attr:`~PIL.Image.Image.n_frames` refers to the - number of available frames. - - :param frame: Frame number, starting at 0. - :exception EOFError: If the call attempts to seek beyond the end - of the sequence. - """ - - # overridden by file handlers - if frame != 0: - msg = "no more images in file" - raise EOFError(msg) - - def show(self, title: str | None = None) -> None: - """ - Displays this image. This method is mainly intended for debugging purposes. - - This method calls :py:func:`PIL.ImageShow.show` internally. You can use - :py:func:`PIL.ImageShow.register` to override its default behaviour. - - The image is first saved to a temporary file. By default, it will be in - PNG format. - - On Unix, the image is then opened using the **xdg-open**, **display**, - **gm**, **eog** or **xv** utility, depending on which one can be found. - - On macOS, the image is opened with the native Preview application. - - On Windows, the image is opened with the standard PNG display utility. - - :param title: Optional title to use for the image window, where possible. - """ - - _show(self, title=title) - - def split(self) -> tuple[Image, ...]: - """ - Split this image into individual bands. This method returns a - tuple of individual image bands from an image. For example, - splitting an "RGB" image creates three new images each - containing a copy of one of the original bands (red, green, - blue). - - If you need only one band, :py:meth:`~PIL.Image.Image.getchannel` - method can be more convenient and faster. - - :returns: A tuple containing bands. - """ - - self.load() - if self.im.bands == 1: - return (self.copy(),) - return tuple(map(self._new, self.im.split())) - - def getchannel(self, channel: int | str) -> Image: - """ - Returns an image containing a single channel of the source image. - - :param channel: What channel to return. Could be index - (0 for "R" channel of "RGB") or channel name - ("A" for alpha channel of "RGBA"). - :returns: An image in "L" mode. - - .. versionadded:: 4.3.0 - """ - self.load() - - if isinstance(channel, str): - try: - channel = self.getbands().index(channel) - except ValueError as e: - msg = f'The image has no channel "{channel}"' - raise ValueError(msg) from e - - return self._new(self.im.getband(channel)) - - def tell(self) -> int: - """ - Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. - - If defined, :attr:`~PIL.Image.Image.n_frames` refers to the - number of available frames. - - :returns: Frame number, starting with 0. - """ - return 0 - - def thumbnail( - self, - size: tuple[float, float], - resample: Resampling = Resampling.BICUBIC, - reducing_gap: float | None = 2.0, - ) -> None: - """ - Make this image into a thumbnail. This method modifies the - image to contain a thumbnail version of itself, no larger than - the given size. This method calculates an appropriate thumbnail - size to preserve the aspect of the image, calls the - :py:meth:`~PIL.Image.Image.draft` method to configure the file reader - (where applicable), and finally resizes the image. - - Note that this function modifies the :py:class:`~PIL.Image.Image` - object in place. If you need to use the full resolution image as well, - apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original - image. - - :param size: The requested size in pixels, as a 2-tuple: - (width, height). - :param resample: Optional resampling filter. This can be one - of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`, - :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`, - :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`. - If omitted, it defaults to :py:data:`Resampling.BICUBIC`. - (was :py:data:`Resampling.NEAREST` prior to version 2.5.0). - See: :ref:`concept-filters`. - :param reducing_gap: Apply optimization by resizing the image - in two steps. First, reducing the image by integer times - using :py:meth:`~PIL.Image.Image.reduce` or - :py:meth:`~PIL.Image.Image.draft` for JPEG images. - Second, resizing using regular resampling. The last step - changes size no less than by ``reducing_gap`` times. - ``reducing_gap`` may be None (no first step is performed) - or should be greater than 1.0. The bigger ``reducing_gap``, - the closer the result to the fair resampling. - The smaller ``reducing_gap``, the faster resizing. - With ``reducing_gap`` greater or equal to 3.0, the result is - indistinguishable from fair resampling in most cases. - The default value is 2.0 (very close to fair resampling - while still being faster in many cases). - :returns: None - """ - - provided_size = tuple(map(math.floor, size)) - - def preserve_aspect_ratio() -> tuple[int, int] | None: - def round_aspect(number, key): - return max(min(math.floor(number), math.ceil(number), key=key), 1) - - x, y = provided_size - if x >= self.width and y >= self.height: - return None - - aspect = self.width / self.height - if x / y >= aspect: - x = round_aspect(y * aspect, key=lambda n: abs(aspect - n / y)) - else: - y = round_aspect( - x / aspect, key=lambda n: 0 if n == 0 else abs(aspect - x / n) - ) - return x, y - - box = None - final_size: tuple[int, int] - if reducing_gap is not None: - preserved_size = preserve_aspect_ratio() - if preserved_size is None: - return - final_size = preserved_size - - res = self.draft( - None, (int(size[0] * reducing_gap), int(size[1] * reducing_gap)) - ) - if res is not None: - box = res[1] - if box is None: - self.load() - - # load() may have changed the size of the image - preserved_size = preserve_aspect_ratio() - if preserved_size is None: - return - final_size = preserved_size - - if self.size != final_size: - im = self.resize(final_size, resample, box=box, reducing_gap=reducing_gap) - - self.im = im.im - self._size = final_size - self._mode = self.im.mode - - self.readonly = 0 - self.pyaccess = None - - # FIXME: the different transform methods need further explanation - # instead of bloating the method docs, add a separate chapter. - def transform( - self, - size: tuple[int, int], - method: Transform | ImageTransformHandler | SupportsGetData, - data: Sequence[Any] | None = None, - resample: int = Resampling.NEAREST, - fill: int = 1, - fillcolor: float | tuple[float, ...] | str | None = None, - ) -> Image: - """ - Transforms this image. This method creates a new image with the - given size, and the same mode as the original, and copies data - to the new image using the given transform. - - :param size: The output size in pixels, as a 2-tuple: - (width, height). - :param method: The transformation method. This is one of - :py:data:`Transform.EXTENT` (cut out a rectangular subregion), - :py:data:`Transform.AFFINE` (affine transform), - :py:data:`Transform.PERSPECTIVE` (perspective transform), - :py:data:`Transform.QUAD` (map a quadrilateral to a rectangle), or - :py:data:`Transform.MESH` (map a number of source quadrilaterals - in one operation). - - It may also be an :py:class:`~PIL.Image.ImageTransformHandler` - object:: - - class Example(Image.ImageTransformHandler): - def transform(self, size, data, resample, fill=1): - # Return result - - Implementations of :py:class:`~PIL.Image.ImageTransformHandler` - for some of the :py:class:`Transform` methods are provided - in :py:mod:`~PIL.ImageTransform`. - - It may also be an object with a ``method.getdata`` method - that returns a tuple supplying new ``method`` and ``data`` values:: - - class Example: - def getdata(self): - method = Image.Transform.EXTENT - data = (0, 0, 100, 100) - return method, data - :param data: Extra data to the transformation method. - :param resample: Optional resampling filter. It can be one of - :py:data:`Resampling.NEAREST` (use nearest neighbour), - :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2 - environment), or :py:data:`Resampling.BICUBIC` (cubic spline - interpolation in a 4x4 environment). If omitted, or if the image - has mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`. - See: :ref:`concept-filters`. - :param fill: If ``method`` is an - :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of - the arguments passed to it. Otherwise, it is unused. - :param fillcolor: Optional fill color for the area outside the - transform in the output image. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - if self.mode in ("LA", "RGBA") and resample != Resampling.NEAREST: - return ( - self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) - .transform(size, method, data, resample, fill, fillcolor) - .convert(self.mode) - ) - - if isinstance(method, ImageTransformHandler): - return method.transform(size, self, resample=resample, fill=fill) - - if hasattr(method, "getdata"): - # compatibility w. old-style transform objects - method, data = method.getdata() - - if data is None: - msg = "missing method data" - raise ValueError(msg) - - im = new(self.mode, size, fillcolor) - if self.mode == "P" and self.palette: - im.palette = self.palette.copy() - im.info = self.info.copy() - if method == Transform.MESH: - # list of quads - for box, quad in data: - im.__transformer( - box, self, Transform.QUAD, quad, resample, fillcolor is None - ) - else: - im.__transformer( - (0, 0) + size, self, method, data, resample, fillcolor is None - ) - - return im - - def __transformer( - self, box, image, method, data, resample=Resampling.NEAREST, fill=1 - ): - w = box[2] - box[0] - h = box[3] - box[1] - - if method == Transform.AFFINE: - data = data[:6] - - elif method == Transform.EXTENT: - # convert extent to an affine transform - x0, y0, x1, y1 = data - xs = (x1 - x0) / w - ys = (y1 - y0) / h - method = Transform.AFFINE - data = (xs, 0, x0, 0, ys, y0) - - elif method == Transform.PERSPECTIVE: - data = data[:8] - - elif method == Transform.QUAD: - # quadrilateral warp. data specifies the four corners - # given as NW, SW, SE, and NE. - nw = data[:2] - sw = data[2:4] - se = data[4:6] - ne = data[6:8] - x0, y0 = nw - As = 1.0 / w - At = 1.0 / h - data = ( - x0, - (ne[0] - x0) * As, - (sw[0] - x0) * At, - (se[0] - sw[0] - ne[0] + x0) * As * At, - y0, - (ne[1] - y0) * As, - (sw[1] - y0) * At, - (se[1] - sw[1] - ne[1] + y0) * As * At, - ) - - else: - msg = "unknown transformation method" - raise ValueError(msg) - - if resample not in ( - Resampling.NEAREST, - Resampling.BILINEAR, - Resampling.BICUBIC, - ): - if resample in (Resampling.BOX, Resampling.HAMMING, Resampling.LANCZOS): - msg = { - Resampling.BOX: "Image.Resampling.BOX", - Resampling.HAMMING: "Image.Resampling.HAMMING", - Resampling.LANCZOS: "Image.Resampling.LANCZOS", - }[resample] + f" ({resample}) cannot be used." - else: - msg = f"Unknown resampling filter ({resample})." - - filters = [ - f"{filter[1]} ({filter[0]})" - for filter in ( - (Resampling.NEAREST, "Image.Resampling.NEAREST"), - (Resampling.BILINEAR, "Image.Resampling.BILINEAR"), - (Resampling.BICUBIC, "Image.Resampling.BICUBIC"), - ) - ] - msg += f" Use {', '.join(filters[:-1])} or {filters[-1]}" - raise ValueError(msg) - - image.load() - - self.load() - - if image.mode in ("1", "P"): - resample = Resampling.NEAREST - - self.im.transform(box, image.im, method, data, resample, fill) - - def transpose(self, method: Transpose) -> Image: - """ - Transpose image (flip or rotate in 90 degree steps) - - :param method: One of :py:data:`Transpose.FLIP_LEFT_RIGHT`, - :py:data:`Transpose.FLIP_TOP_BOTTOM`, :py:data:`Transpose.ROTATE_90`, - :py:data:`Transpose.ROTATE_180`, :py:data:`Transpose.ROTATE_270`, - :py:data:`Transpose.TRANSPOSE` or :py:data:`Transpose.TRANSVERSE`. - :returns: Returns a flipped or rotated copy of this image. - """ - - self.load() - return self._new(self.im.transpose(method)) - - def effect_spread(self, distance: int) -> Image: - """ - Randomly spread pixels in an image. - - :param distance: Distance to spread pixels. - """ - self.load() - return self._new(self.im.effect_spread(distance)) - - def toqimage(self): - """Returns a QImage copy of this image""" - from . import ImageQt - - if not ImageQt.qt_is_installed: - msg = "Qt bindings are not installed" - raise ImportError(msg) - return ImageQt.toqimage(self) - - def toqpixmap(self): - """Returns a QPixmap copy of this image""" - from . import ImageQt - - if not ImageQt.qt_is_installed: - msg = "Qt bindings are not installed" - raise ImportError(msg) - return ImageQt.toqpixmap(self) - - -# -------------------------------------------------------------------- -# Abstract handlers. - - -class ImagePointHandler: - """ - Used as a mixin by point transforms - (for use with :py:meth:`~PIL.Image.Image.point`) - """ - - @abc.abstractmethod - def point(self, im: Image) -> Image: - pass - - -class ImageTransformHandler: - """ - Used as a mixin by geometry transforms - (for use with :py:meth:`~PIL.Image.Image.transform`) - """ - - @abc.abstractmethod - def transform( - self, - size: tuple[int, int], - image: Image, - **options: Any, - ) -> Image: - pass - - -# -------------------------------------------------------------------- -# Factories - -# -# Debugging - - -def _wedge() -> Image: - """Create grayscale wedge (for debugging only)""" - - return Image()._new(core.wedge("L")) - - -def _check_size(size: Any) -> None: - """ - Common check to enforce type and sanity check on size tuples - - :param size: Should be a 2 tuple of (width, height) - :returns: None, or raises a ValueError - """ - - if not isinstance(size, (list, tuple)): - msg = "Size must be a list or tuple" - raise ValueError(msg) - if len(size) != 2: - msg = "Size must be a sequence of length 2" - raise ValueError(msg) - if size[0] < 0 or size[1] < 0: - msg = "Width and height must be >= 0" - raise ValueError(msg) - - -def new( - mode: str, - size: tuple[int, int] | list[int], - color: float | tuple[float, ...] | str | None = 0, -) -> Image: - """ - Creates a new image with the given mode and size. - - :param mode: The mode to use for the new image. See: - :ref:`concept-modes`. - :param size: A 2-tuple, containing (width, height) in pixels. - :param color: What color to use for the image. Default is black. - If given, this should be a single integer or floating point value - for single-band modes, and a tuple for multi-band modes (one value - per band). When creating RGB or HSV images, you can also use color - strings as supported by the ImageColor module. If the color is - None, the image is not initialised. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - if mode in ("BGR;15", "BGR;16", "BGR;24"): - deprecate(mode, 12) - - _check_size(size) - - if color is None: - # don't initialize - return Image()._new(core.new(mode, size)) - - if isinstance(color, str): - # css3-style specifier - - from . import ImageColor - - color = ImageColor.getcolor(color, mode) - - im = Image() - if ( - mode == "P" - and isinstance(color, (list, tuple)) - and all(isinstance(i, int) for i in color) - ): - color_ints: tuple[int, ...] = cast(Tuple[int, ...], tuple(color)) - if len(color_ints) == 3 or len(color_ints) == 4: - # RGB or RGBA value for a P image - from . import ImagePalette - - im.palette = ImagePalette.ImagePalette() - color = im.palette.getcolor(color_ints) - return im._new(core.fill(mode, size, color)) - - -def frombytes( - mode: str, - size: tuple[int, int], - data: bytes | bytearray, - decoder_name: str = "raw", - *args: Any, -) -> Image: - """ - Creates a copy of an image memory from pixel data in a buffer. - - In its simplest form, this function takes three arguments - (mode, size, and unpacked pixel data). - - You can also use any pixel decoder supported by PIL. For more - information on available decoders, see the section - :ref:`Writing Your Own File Codec `. - - Note that this function decodes pixel data only, not entire images. - If you have an entire image in a string, wrap it in a - :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load - it. - - :param mode: The image mode. See: :ref:`concept-modes`. - :param size: The image size. - :param data: A byte buffer containing raw data for the given mode. - :param decoder_name: What decoder to use. - :param args: Additional parameters for the given decoder. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - _check_size(size) - - im = new(mode, size) - if im.width != 0 and im.height != 0: - decoder_args: Any = args - if len(decoder_args) == 1 and isinstance(decoder_args[0], tuple): - # may pass tuple instead of argument list - decoder_args = decoder_args[0] - - if decoder_name == "raw" and decoder_args == (): - decoder_args = mode - - im.frombytes(data, decoder_name, decoder_args) - return im - - -def frombuffer( - mode: str, size: tuple[int, int], data, decoder_name: str = "raw", *args: Any -) -> Image: - """ - Creates an image memory referencing pixel data in a byte buffer. - - This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data - in the byte buffer, where possible. This means that changes to the - original buffer object are reflected in this image). Not all modes can - share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". - - Note that this function decodes pixel data only, not entire images. - If you have an entire image file in a string, wrap it in a - :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load it. - - The default parameters used for the "raw" decoder differs from that used for - :py:func:`~PIL.Image.frombytes`. This is a bug, and will probably be fixed in a - future release. The current release issues a warning if you do this; to disable - the warning, you should provide the full set of parameters. See below for details. - - :param mode: The image mode. See: :ref:`concept-modes`. - :param size: The image size. - :param data: A bytes or other buffer object containing raw - data for the given mode. - :param decoder_name: What decoder to use. - :param args: Additional parameters for the given decoder. For the - default encoder ("raw"), it's recommended that you provide the - full set of parameters:: - - frombuffer(mode, size, data, "raw", mode, 0, 1) - - :returns: An :py:class:`~PIL.Image.Image` object. - - .. versionadded:: 1.1.4 - """ - - _check_size(size) - - # may pass tuple instead of argument list - if len(args) == 1 and isinstance(args[0], tuple): - args = args[0] - - if decoder_name == "raw": - if args == (): - args = mode, 0, 1 - if args[0] in _MAPMODES: - im = new(mode, (0, 0)) - im = im._new(core.map_buffer(data, size, decoder_name, 0, args)) - if mode == "P": - from . import ImagePalette - - im.palette = ImagePalette.ImagePalette("RGB", im.im.getpalette("RGB")) - im.readonly = 1 - return im - - return frombytes(mode, size, data, decoder_name, args) - - -class SupportsArrayInterface(Protocol): - """ - An object that has an ``__array_interface__`` dictionary. - """ - - @property - def __array_interface__(self) -> dict[str, Any]: - raise NotImplementedError() - - -def fromarray(obj: SupportsArrayInterface, mode: str | None = None) -> Image: - """ - Creates an image memory from an object exporting the array interface - (using the buffer protocol):: - - from PIL import Image - import numpy as np - a = np.zeros((5, 5)) - im = Image.fromarray(a) - - If ``obj`` is not contiguous, then the ``tobytes`` method is called - and :py:func:`~PIL.Image.frombuffer` is used. - - In the case of NumPy, be aware that Pillow modes do not always correspond - to NumPy dtypes. Pillow modes only offer 1-bit pixels, 8-bit pixels, - 32-bit signed integer pixels, and 32-bit floating point pixels. - - Pillow images can also be converted to arrays:: - - from PIL import Image - import numpy as np - im = Image.open("hopper.jpg") - a = np.asarray(im) - - When converting Pillow images to arrays however, only pixel values are - transferred. This means that P and PA mode images will lose their palette. - - :param obj: Object with array interface - :param mode: Optional mode to use when reading ``obj``. Will be determined from - type if ``None``. - - This will not be used to convert the data after reading, but will be used to - change how the data is read:: - - from PIL import Image - import numpy as np - a = np.full((1, 1), 300) - im = Image.fromarray(a, mode="L") - im.getpixel((0, 0)) # 44 - im = Image.fromarray(a, mode="RGB") - im.getpixel((0, 0)) # (44, 1, 0) - - See: :ref:`concept-modes` for general information about modes. - :returns: An image object. - - .. versionadded:: 1.1.6 - """ - arr = obj.__array_interface__ - shape = arr["shape"] - ndim = len(shape) - strides = arr.get("strides", None) - if mode is None: - try: - typekey = (1, 1) + shape[2:], arr["typestr"] - except KeyError as e: - msg = "Cannot handle this data type" - raise TypeError(msg) from e - try: - mode, rawmode = _fromarray_typemap[typekey] - except KeyError as e: - typekey_shape, typestr = typekey - msg = f"Cannot handle this data type: {typekey_shape}, {typestr}" - raise TypeError(msg) from e - else: - rawmode = mode - if mode in ["1", "L", "I", "P", "F"]: - ndmax = 2 - elif mode == "RGB": - ndmax = 3 - else: - ndmax = 4 - if ndim > ndmax: - msg = f"Too many dimensions: {ndim} > {ndmax}." - raise ValueError(msg) - - size = 1 if ndim == 1 else shape[1], shape[0] - if strides is not None: - if hasattr(obj, "tobytes"): - obj = obj.tobytes() - elif hasattr(obj, "tostring"): - obj = obj.tostring() - else: - msg = "'strides' requires either tobytes() or tostring()" - raise ValueError(msg) - - return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) - - -def fromqimage(im): - """Creates an image instance from a QImage image""" - from . import ImageQt - - if not ImageQt.qt_is_installed: - msg = "Qt bindings are not installed" - raise ImportError(msg) - return ImageQt.fromqimage(im) - - -def fromqpixmap(im): - """Creates an image instance from a QPixmap image""" - from . import ImageQt - - if not ImageQt.qt_is_installed: - msg = "Qt bindings are not installed" - raise ImportError(msg) - return ImageQt.fromqpixmap(im) - - -_fromarray_typemap = { - # (shape, typestr) => mode, rawmode - # first two members of shape are set to one - ((1, 1), "|b1"): ("1", "1;8"), - ((1, 1), "|u1"): ("L", "L"), - ((1, 1), "|i1"): ("I", "I;8"), - ((1, 1), "u2"): ("I", "I;16B"), - ((1, 1), "i2"): ("I", "I;16BS"), - ((1, 1), "u4"): ("I", "I;32B"), - ((1, 1), "i4"): ("I", "I;32BS"), - ((1, 1), "f4"): ("F", "F;32BF"), - ((1, 1), "f8"): ("F", "F;64BF"), - ((1, 1, 2), "|u1"): ("LA", "LA"), - ((1, 1, 3), "|u1"): ("RGB", "RGB"), - ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), - # shortcuts: - ((1, 1), f"{_ENDIAN}i4"): ("I", "I"), - ((1, 1), f"{_ENDIAN}f4"): ("F", "F"), -} - - -def _decompression_bomb_check(size: tuple[int, int]) -> None: - if MAX_IMAGE_PIXELS is None: - return - - pixels = max(1, size[0]) * max(1, size[1]) - - if pixels > 2 * MAX_IMAGE_PIXELS: - msg = ( - f"Image size ({pixels} pixels) exceeds limit of {2 * MAX_IMAGE_PIXELS} " - "pixels, could be decompression bomb DOS attack." - ) - raise DecompressionBombError(msg) - - if pixels > MAX_IMAGE_PIXELS: - warnings.warn( - f"Image size ({pixels} pixels) exceeds limit of {MAX_IMAGE_PIXELS} pixels, " - "could be decompression bomb DOS attack.", - DecompressionBombWarning, - ) - - -def open( - fp: StrOrBytesPath | IO[bytes], - mode: Literal["r"] = "r", - formats: list[str] | tuple[str, ...] | None = None, -) -> ImageFile.ImageFile: - """ - Opens and identifies the given image file. - - This is a lazy operation; this function identifies the file, but - the file remains open and the actual image data is not read from - the file until you try to process the data (or call the - :py:meth:`~PIL.Image.Image.load` method). See - :py:func:`~PIL.Image.new`. See :ref:`file-handling`. - - :param fp: A filename (string), os.PathLike object or a file object. - The file object must implement ``file.read``, - ``file.seek``, and ``file.tell`` methods, - and be opened in binary mode. The file object will also seek to zero - before reading. - :param mode: The mode. If given, this argument must be "r". - :param formats: A list or tuple of formats to attempt to load the file in. - This can be used to restrict the set of formats checked. - Pass ``None`` to try all supported formats. You can print the set of - available formats by running ``python3 -m PIL`` or using - the :py:func:`PIL.features.pilinfo` function. - :returns: An :py:class:`~PIL.Image.Image` object. - :exception FileNotFoundError: If the file cannot be found. - :exception PIL.UnidentifiedImageError: If the image cannot be opened and - identified. - :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO`` - instance is used for ``fp``. - :exception TypeError: If ``formats`` is not ``None``, a list or a tuple. - """ - - if mode != "r": - msg = f"bad mode {repr(mode)}" # type: ignore[unreachable] - raise ValueError(msg) - elif isinstance(fp, io.StringIO): - msg = ( # type: ignore[unreachable] - "StringIO cannot be used to open an image. " - "Binary data must be used instead." - ) - raise ValueError(msg) - - if formats is None: - formats = ID - elif not isinstance(formats, (list, tuple)): - msg = "formats must be a list or tuple" # type: ignore[unreachable] - raise TypeError(msg) - - exclusive_fp = False - filename: str | bytes = "" - if is_path(fp): - filename = os.path.realpath(os.fspath(fp)) - - if filename: - fp = builtins.open(filename, "rb") - exclusive_fp = True - else: - fp = cast(IO[bytes], fp) - - try: - fp.seek(0) - except (AttributeError, io.UnsupportedOperation): - fp = io.BytesIO(fp.read()) - exclusive_fp = True - - prefix = fp.read(16) - - preinit() - - warning_messages: list[str] = [] - - def _open_core( - fp: IO[bytes], - filename: str | bytes, - prefix: bytes, - formats: list[str] | tuple[str, ...], - ) -> ImageFile.ImageFile | None: - for i in formats: - i = i.upper() - if i not in OPEN: - init() - try: - factory, accept = OPEN[i] - result = not accept or accept(prefix) - if isinstance(result, str): - warning_messages.append(result) - elif result: - fp.seek(0) - im = factory(fp, filename) - _decompression_bomb_check(im.size) - return im - except (SyntaxError, IndexError, TypeError, struct.error) as e: - if WARN_POSSIBLE_FORMATS: - warning_messages.append(i + " opening failed. " + str(e)) - except BaseException: - if exclusive_fp: - fp.close() - raise - return None - - im = _open_core(fp, filename, prefix, formats) - - if im is None and formats is ID: - checked_formats = ID.copy() - if init(): - im = _open_core( - fp, - filename, - prefix, - tuple(format for format in formats if format not in checked_formats), - ) - - if im: - im._exclusive_fp = exclusive_fp - return im - - if exclusive_fp: - fp.close() - for message in warning_messages: - warnings.warn(message) - msg = "cannot identify image file %r" % (filename if filename else fp) - raise UnidentifiedImageError(msg) - - -# -# Image processing. - - -def alpha_composite(im1: Image, im2: Image) -> Image: - """ - Alpha composite im2 over im1. - - :param im1: The first image. Must have mode RGBA. - :param im2: The second image. Must have mode RGBA, and the same size as - the first image. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - im1.load() - im2.load() - return im1._new(core.alpha_composite(im1.im, im2.im)) - - -def blend(im1: Image, im2: Image, alpha: float) -> Image: - """ - Creates a new image by interpolating between two input images, using - a constant alpha:: - - out = image1 * (1.0 - alpha) + image2 * alpha - - :param im1: The first image. - :param im2: The second image. Must have the same mode and size as - the first image. - :param alpha: The interpolation alpha factor. If alpha is 0.0, a - copy of the first image is returned. If alpha is 1.0, a copy of - the second image is returned. There are no restrictions on the - alpha value. If necessary, the result is clipped to fit into - the allowed output range. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - im1.load() - im2.load() - return im1._new(core.blend(im1.im, im2.im, alpha)) - - -def composite(image1: Image, image2: Image, mask: Image) -> Image: - """ - Create composite image by blending images using a transparency mask. - - :param image1: The first image. - :param image2: The second image. Must have the same mode and - size as the first image. - :param mask: A mask image. This image can have mode - "1", "L", or "RGBA", and must have the same size as the - other two images. - """ - - image = image2.copy() - image.paste(image1, None, mask) - return image - - -def eval(image, *args): - """ - Applies the function (which should take one argument) to each pixel - in the given image. If the image has more than one band, the same - function is applied to each band. Note that the function is - evaluated once for each possible pixel value, so you cannot use - random components or other generators. - - :param image: The input image. - :param function: A function object, taking one integer argument. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - return image.point(args[0]) - - -def merge(mode: str, bands: Sequence[Image]) -> Image: - """ - Merge a set of single band images into a new multiband image. - - :param mode: The mode to use for the output image. See: - :ref:`concept-modes`. - :param bands: A sequence containing one single-band image for - each band in the output image. All bands must have the - same size. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - - if getmodebands(mode) != len(bands) or "*" in mode: - msg = "wrong number of bands" - raise ValueError(msg) - for band in bands[1:]: - if band.mode != getmodetype(mode): - msg = "mode mismatch" - raise ValueError(msg) - if band.size != bands[0].size: - msg = "size mismatch" - raise ValueError(msg) - for band in bands: - band.load() - return bands[0]._new(core.merge(mode, *[b.im for b in bands])) - - -# -------------------------------------------------------------------- -# Plugin registry - - -def register_open( - id: str, - factory: Callable[[IO[bytes], str | bytes], ImageFile.ImageFile], - accept: Callable[[bytes], bool | str] | None = None, -) -> None: - """ - Register an image file plugin. This function should not be used - in application code. - - :param id: An image format identifier. - :param factory: An image file factory method. - :param accept: An optional function that can be used to quickly - reject images having another format. - """ - id = id.upper() - if id not in ID: - ID.append(id) - OPEN[id] = factory, accept - - -def register_mime(id: str, mimetype: str) -> None: - """ - Registers an image MIME type by populating ``Image.MIME``. This function - should not be used in application code. - - ``Image.MIME`` provides a mapping from image format identifiers to mime - formats, but :py:meth:`~PIL.ImageFile.ImageFile.get_format_mimetype` can - provide a different result for specific images. - - :param id: An image format identifier. - :param mimetype: The image MIME type for this format. - """ - MIME[id.upper()] = mimetype - - -def register_save( - id: str, driver: Callable[[Image, IO[bytes], str | bytes], None] -) -> None: - """ - Registers an image save function. This function should not be - used in application code. - - :param id: An image format identifier. - :param driver: A function to save images in this format. - """ - SAVE[id.upper()] = driver - - -def register_save_all( - id: str, driver: Callable[[Image, IO[bytes], str | bytes], None] -) -> None: - """ - Registers an image function to save all the frames - of a multiframe format. This function should not be - used in application code. - - :param id: An image format identifier. - :param driver: A function to save images in this format. - """ - SAVE_ALL[id.upper()] = driver - - -def register_extension(id: str, extension: str) -> None: - """ - Registers an image extension. This function should not be - used in application code. - - :param id: An image format identifier. - :param extension: An extension used for this format. - """ - EXTENSION[extension.lower()] = id.upper() - - -def register_extensions(id: str, extensions: list[str]) -> None: - """ - Registers image extensions. This function should not be - used in application code. - - :param id: An image format identifier. - :param extensions: A list of extensions used for this format. - """ - for extension in extensions: - register_extension(id, extension) - - -def registered_extensions() -> dict[str, str]: - """ - Returns a dictionary containing all file extensions belonging - to registered plugins - """ - init() - return EXTENSION - - -def register_decoder(name: str, decoder: type[ImageFile.PyDecoder]) -> None: - """ - Registers an image decoder. This function should not be - used in application code. - - :param name: The name of the decoder - :param decoder: An ImageFile.PyDecoder object - - .. versionadded:: 4.1.0 - """ - DECODERS[name] = decoder - - -def register_encoder(name: str, encoder: type[ImageFile.PyEncoder]) -> None: - """ - Registers an image encoder. This function should not be - used in application code. - - :param name: The name of the encoder - :param encoder: An ImageFile.PyEncoder object - - .. versionadded:: 4.1.0 - """ - ENCODERS[name] = encoder - - -# -------------------------------------------------------------------- -# Simple display support. - - -def _show(image: Image, **options: Any) -> None: - from . import ImageShow - - ImageShow.show(image, **options) - - -# -------------------------------------------------------------------- -# Effects - - -def effect_mandelbrot( - size: tuple[int, int], extent: tuple[float, float, float, float], quality: int -) -> Image: - """ - Generate a Mandelbrot set covering the given extent. - - :param size: The requested size in pixels, as a 2-tuple: - (width, height). - :param extent: The extent to cover, as a 4-tuple: - (x0, y0, x1, y1). - :param quality: Quality. - """ - return Image()._new(core.effect_mandelbrot(size, extent, quality)) - - -def effect_noise(size: tuple[int, int], sigma: float) -> Image: - """ - Generate Gaussian noise centered around 128. - - :param size: The requested size in pixels, as a 2-tuple: - (width, height). - :param sigma: Standard deviation of noise. - """ - return Image()._new(core.effect_noise(size, sigma)) - - -def linear_gradient(mode: str) -> Image: - """ - Generate 256x256 linear gradient from black to white, top to bottom. - - :param mode: Input mode. - """ - return Image()._new(core.linear_gradient(mode)) - - -def radial_gradient(mode: str) -> Image: - """ - Generate 256x256 radial gradient from black to white, centre to edge. - - :param mode: Input mode. - """ - return Image()._new(core.radial_gradient(mode)) - - -# -------------------------------------------------------------------- -# Resources - - -def _apply_env_variables(env: dict[str, str] | None = None) -> None: - env_dict = env if env is not None else os.environ - - for var_name, setter in [ - ("PILLOW_ALIGNMENT", core.set_alignment), - ("PILLOW_BLOCK_SIZE", core.set_block_size), - ("PILLOW_BLOCKS_MAX", core.set_blocks_max), - ]: - if var_name not in env_dict: - continue - - var = env_dict[var_name].lower() - - units = 1 - for postfix, mul in [("k", 1024), ("m", 1024 * 1024)]: - if var.endswith(postfix): - units = mul - var = var[: -len(postfix)] - - try: - var_int = int(var) * units - except ValueError: - warnings.warn(f"{var_name} is not int") - continue - - try: - setter(var_int) - except ValueError as e: - warnings.warn(f"{var_name}: {e}") - - -_apply_env_variables() -atexit.register(core.clear_cache) - - -if TYPE_CHECKING: - _ExifBase = MutableMapping[int, Any] -else: - _ExifBase = MutableMapping - - -class Exif(_ExifBase): - """ - This class provides read and write access to EXIF image data:: - - from PIL import Image - im = Image.open("exif.png") - exif = im.getexif() # Returns an instance of this class - - Information can be read and written, iterated over or deleted:: - - print(exif[274]) # 1 - exif[274] = 2 - for k, v in exif.items(): - print("Tag", k, "Value", v) # Tag 274 Value 2 - del exif[274] - - To access information beyond IFD0, :py:meth:`~PIL.Image.Exif.get_ifd` - returns a dictionary:: - - from PIL import ExifTags - im = Image.open("exif_gps.jpg") - exif = im.getexif() - gps_ifd = exif.get_ifd(ExifTags.IFD.GPSInfo) - print(gps_ifd) - - Other IFDs include ``ExifTags.IFD.Exif``, ``ExifTags.IFD.Makernote``, - ``ExifTags.IFD.Interop`` and ``ExifTags.IFD.IFD1``. - - :py:mod:`~PIL.ExifTags` also has enum classes to provide names for data:: - - print(exif[ExifTags.Base.Software]) # PIL - print(gps_ifd[ExifTags.GPS.GPSDateStamp]) # 1999:99:99 99:99:99 - """ - - endian = None - bigtiff = False - _loaded = False - - def __init__(self): - self._data = {} - self._hidden_data = {} - self._ifds = {} - self._info = None - self._loaded_exif = None - - def _fixup(self, value): - try: - if len(value) == 1 and isinstance(value, tuple): - return value[0] - except Exception: - pass - return value - - def _fixup_dict(self, src_dict): - # Helper function - # returns a dict with any single item tuples/lists as individual values - return {k: self._fixup(v) for k, v in src_dict.items()} - - def _get_ifd_dict(self, offset, group=None): - try: - # an offset pointer to the location of the nested embedded IFD. - # It should be a long, but may be corrupted. - self.fp.seek(offset) - except (KeyError, TypeError): - pass - else: - from . import TiffImagePlugin - - info = TiffImagePlugin.ImageFileDirectory_v2(self.head, group=group) - info.load(self.fp) - return self._fixup_dict(info) - - def _get_head(self): - version = b"\x2B" if self.bigtiff else b"\x2A" - if self.endian == "<": - head = b"II" + version + b"\x00" + o32le(8) - else: - head = b"MM\x00" + version + o32be(8) - if self.bigtiff: - head += o32le(8) if self.endian == "<" else o32be(8) - head += b"\x00\x00\x00\x00" - return head - - def load(self, data): - # Extract EXIF information. This is highly experimental, - # and is likely to be replaced with something better in a future - # version. - - # The EXIF record consists of a TIFF file embedded in a JPEG - # application marker (!). - if data == self._loaded_exif: - return - self._loaded_exif = data - self._data.clear() - self._hidden_data.clear() - self._ifds.clear() - if data and data.startswith(b"Exif\x00\x00"): - data = data[6:] - if not data: - self._info = None - return - - self.fp = io.BytesIO(data) - self.head = self.fp.read(8) - # process dictionary - from . import TiffImagePlugin - - self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head) - self.endian = self._info._endian - self.fp.seek(self._info.next) - self._info.load(self.fp) - - def load_from_fp(self, fp, offset=None): - self._loaded_exif = None - self._data.clear() - self._hidden_data.clear() - self._ifds.clear() - - # process dictionary - from . import TiffImagePlugin - - self.fp = fp - if offset is not None: - self.head = self._get_head() - else: - self.head = self.fp.read(8) - self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head) - if self.endian is None: - self.endian = self._info._endian - if offset is None: - offset = self._info.next - self.fp.tell() - self.fp.seek(offset) - self._info.load(self.fp) - - def _get_merged_dict(self): - merged_dict = dict(self) - - # get EXIF extension - if ExifTags.IFD.Exif in self: - ifd = self._get_ifd_dict(self[ExifTags.IFD.Exif], ExifTags.IFD.Exif) - if ifd: - merged_dict.update(ifd) - - # GPS - if ExifTags.IFD.GPSInfo in self: - merged_dict[ExifTags.IFD.GPSInfo] = self._get_ifd_dict( - self[ExifTags.IFD.GPSInfo], ExifTags.IFD.GPSInfo - ) - - return merged_dict - - def tobytes(self, offset: int = 8) -> bytes: - from . import TiffImagePlugin - - head = self._get_head() - ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head) - for tag, value in self.items(): - if tag in [ - ExifTags.IFD.Exif, - ExifTags.IFD.GPSInfo, - ] and not isinstance(value, dict): - value = self.get_ifd(tag) - if ( - tag == ExifTags.IFD.Exif - and ExifTags.IFD.Interop in value - and not isinstance(value[ExifTags.IFD.Interop], dict) - ): - value = value.copy() - value[ExifTags.IFD.Interop] = self.get_ifd(ExifTags.IFD.Interop) - ifd[tag] = value - return b"Exif\x00\x00" + head + ifd.tobytes(offset) - - def get_ifd(self, tag): - if tag not in self._ifds: - if tag == ExifTags.IFD.IFD1: - if self._info is not None and self._info.next != 0: - self._ifds[tag] = self._get_ifd_dict(self._info.next) - elif tag in [ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo]: - offset = self._hidden_data.get(tag, self.get(tag)) - if offset is not None: - self._ifds[tag] = self._get_ifd_dict(offset, tag) - elif tag in [ExifTags.IFD.Interop, ExifTags.IFD.Makernote]: - if ExifTags.IFD.Exif not in self._ifds: - self.get_ifd(ExifTags.IFD.Exif) - tag_data = self._ifds[ExifTags.IFD.Exif][tag] - if tag == ExifTags.IFD.Makernote: - from .TiffImagePlugin import ImageFileDirectory_v2 - - if tag_data[:8] == b"FUJIFILM": - ifd_offset = i32le(tag_data, 8) - ifd_data = tag_data[ifd_offset:] - - makernote = {} - for i in range(0, struct.unpack(" 4: - (offset,) = struct.unpack("H", tag_data[:2])[0]): - ifd_tag, typ, count, data = struct.unpack( - ">HHL4s", tag_data[i * 12 + 2 : (i + 1) * 12 + 2] - ) - if ifd_tag == 0x1101: - # CameraInfo - (offset,) = struct.unpack(">L", data) - self.fp.seek(offset) - - camerainfo = {"ModelID": self.fp.read(4)} - - self.fp.read(4) - # Seconds since 2000 - camerainfo["TimeStamp"] = i32le(self.fp.read(12)) - - self.fp.read(4) - camerainfo["InternalSerialNumber"] = self.fp.read(4) - - self.fp.read(12) - parallax = self.fp.read(4) - handler = ImageFileDirectory_v2._load_dispatch[ - TiffTags.FLOAT - ][1] - camerainfo["Parallax"] = handler( - ImageFileDirectory_v2(), parallax, False - ) - - self.fp.read(4) - camerainfo["Category"] = self.fp.read(2) - - makernote = {0x1101: dict(self._fixup_dict(camerainfo))} - self._ifds[tag] = makernote - else: - # Interop - self._ifds[tag] = self._get_ifd_dict(tag_data, tag) - ifd = self._ifds.get(tag, {}) - if tag == ExifTags.IFD.Exif and self._hidden_data: - ifd = { - k: v - for (k, v) in ifd.items() - if k not in (ExifTags.IFD.Interop, ExifTags.IFD.Makernote) - } - return ifd - - def hide_offsets(self) -> None: - for tag in (ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo): - if tag in self: - self._hidden_data[tag] = self[tag] - del self[tag] - - def __str__(self) -> str: - if self._info is not None: - # Load all keys into self._data - for tag in self._info: - self[tag] - - return str(self._data) - - def __len__(self) -> int: - keys = set(self._data) - if self._info is not None: - keys.update(self._info) - return len(keys) - - def __getitem__(self, tag): - if self._info is not None and tag not in self._data and tag in self._info: - self._data[tag] = self._fixup(self._info[tag]) - del self._info[tag] - return self._data[tag] - - def __contains__(self, tag) -> bool: - return tag in self._data or (self._info is not None and tag in self._info) - - def __setitem__(self, tag, value) -> None: - if self._info is not None and tag in self._info: - del self._info[tag] - self._data[tag] = value - - def __delitem__(self, tag: int) -> None: - if self._info is not None and tag in self._info: - del self._info[tag] - else: - del self._data[tag] - - def __iter__(self): - keys = set(self._data) - if self._info is not None: - keys.update(self._info) - return iter(keys) diff --git a/venv/Lib/site-packages/PIL/ImageChops.py b/venv/Lib/site-packages/PIL/ImageChops.py deleted file mode 100644 index 29a5c99..0000000 --- a/venv/Lib/site-packages/PIL/ImageChops.py +++ /dev/null @@ -1,311 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# standard channel operations -# -# History: -# 1996-03-24 fl Created -# 1996-08-13 fl Added logical operations (for "1" images) -# 2000-10-12 fl Added offset method (from Image.py) -# -# Copyright (c) 1997-2000 by Secret Labs AB -# Copyright (c) 1996-2000 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - -from __future__ import annotations - -from . import Image - - -def constant(image: Image.Image, value: int) -> Image.Image: - """Fill a channel with a given gray level. - - :rtype: :py:class:`~PIL.Image.Image` - """ - - return Image.new("L", image.size, value) - - -def duplicate(image: Image.Image) -> Image.Image: - """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. - - :rtype: :py:class:`~PIL.Image.Image` - """ - - return image.copy() - - -def invert(image: Image.Image) -> Image.Image: - """ - Invert an image (channel). :: - - out = MAX - image - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image.load() - return image._new(image.im.chop_invert()) - - -def lighter(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Compares the two images, pixel by pixel, and returns a new image containing - the lighter values. :: - - out = max(image1, image2) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_lighter(image2.im)) - - -def darker(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Compares the two images, pixel by pixel, and returns a new image containing - the darker values. :: - - out = min(image1, image2) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_darker(image2.im)) - - -def difference(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Returns the absolute value of the pixel-by-pixel difference between the two - images. :: - - out = abs(image1 - image2) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_difference(image2.im)) - - -def multiply(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Superimposes two images on top of each other. - - If you multiply an image with a solid black image, the result is black. If - you multiply with a solid white image, the image is unaffected. :: - - out = image1 * image2 / MAX - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_multiply(image2.im)) - - -def screen(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Superimposes two inverted images on top of each other. :: - - out = MAX - ((MAX - image1) * (MAX - image2) / MAX) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_screen(image2.im)) - - -def soft_light(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Superimposes two images on top of each other using the Soft Light algorithm - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_soft_light(image2.im)) - - -def hard_light(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Superimposes two images on top of each other using the Hard Light algorithm - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_hard_light(image2.im)) - - -def overlay(image1: Image.Image, image2: Image.Image) -> Image.Image: - """ - Superimposes two images on top of each other using the Overlay algorithm - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_overlay(image2.im)) - - -def add( - image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0 -) -> Image.Image: - """ - Adds two images, dividing the result by scale and adding the - offset. If omitted, scale defaults to 1.0, and offset to 0.0. :: - - out = ((image1 + image2) / scale + offset) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_add(image2.im, scale, offset)) - - -def subtract( - image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0 -) -> Image.Image: - """ - Subtracts two images, dividing the result by scale and adding the offset. - If omitted, scale defaults to 1.0, and offset to 0.0. :: - - out = ((image1 - image2) / scale + offset) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) - - -def add_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image: - """Add two images, without clipping the result. :: - - out = ((image1 + image2) % MAX) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_add_modulo(image2.im)) - - -def subtract_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image: - """Subtract two images, without clipping the result. :: - - out = ((image1 - image2) % MAX) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_subtract_modulo(image2.im)) - - -def logical_and(image1: Image.Image, image2: Image.Image) -> Image.Image: - """Logical AND between two images. - - Both of the images must have mode "1". If you would like to perform a - logical AND on an image with a mode other than "1", try - :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask - as the second image. :: - - out = ((image1 and image2) % MAX) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_and(image2.im)) - - -def logical_or(image1: Image.Image, image2: Image.Image) -> Image.Image: - """Logical OR between two images. - - Both of the images must have mode "1". :: - - out = ((image1 or image2) % MAX) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_or(image2.im)) - - -def logical_xor(image1: Image.Image, image2: Image.Image) -> Image.Image: - """Logical XOR between two images. - - Both of the images must have mode "1". :: - - out = ((bool(image1) != bool(image2)) % MAX) - - :rtype: :py:class:`~PIL.Image.Image` - """ - - image1.load() - image2.load() - return image1._new(image1.im.chop_xor(image2.im)) - - -def blend(image1: Image.Image, image2: Image.Image, alpha: float) -> Image.Image: - """Blend images using constant transparency weight. Alias for - :py:func:`PIL.Image.blend`. - - :rtype: :py:class:`~PIL.Image.Image` - """ - - return Image.blend(image1, image2, alpha) - - -def composite( - image1: Image.Image, image2: Image.Image, mask: Image.Image -) -> Image.Image: - """Create composite using transparency mask. Alias for - :py:func:`PIL.Image.composite`. - - :rtype: :py:class:`~PIL.Image.Image` - """ - - return Image.composite(image1, image2, mask) - - -def offset(image: Image.Image, xoffset: int, yoffset: int | None = None) -> Image.Image: - """Returns a copy of the image where data has been offset by the given - distances. Data wraps around the edges. If ``yoffset`` is omitted, it - is assumed to be equal to ``xoffset``. - - :param image: Input image. - :param xoffset: The horizontal distance. - :param yoffset: The vertical distance. If omitted, both - distances are set to the same value. - :rtype: :py:class:`~PIL.Image.Image` - """ - - if yoffset is None: - yoffset = xoffset - image.load() - return image._new(image.im.offset(xoffset, yoffset)) diff --git a/venv/Lib/site-packages/PIL/ImageCms.py b/venv/Lib/site-packages/PIL/ImageCms.py deleted file mode 100644 index ec10230..0000000 --- a/venv/Lib/site-packages/PIL/ImageCms.py +++ /dev/null @@ -1,1127 +0,0 @@ -# The Python Imaging Library. -# $Id$ - -# Optional color management support, based on Kevin Cazabon's PyCMS -# library. - -# Originally released under LGPL. Graciously donated to PIL in -# March 2009, for distribution under the standard PIL license - -# History: - -# 2009-03-08 fl Added to PIL. - -# Copyright (C) 2002-2003 Kevin Cazabon -# Copyright (c) 2009 by Fredrik Lundh -# Copyright (c) 2013 by Eric Soroos - -# See the README file for information on usage and redistribution. See -# below for the original description. -from __future__ import annotations - -import operator -import sys -from enum import IntEnum, IntFlag -from functools import reduce -from typing import Any, Literal, SupportsFloat, SupportsInt, Union - -from . import Image, __version__ -from ._deprecate import deprecate -from ._typing import SupportsRead - -try: - from . import _imagingcms as core -except ImportError as ex: - # Allow error import for doc purposes, but error out when accessing - # anything in core. - from ._util import DeferredError - - core = DeferredError.new(ex) - -_DESCRIPTION = """ -pyCMS - - a Python / PIL interface to the littleCMS ICC Color Management System - Copyright (C) 2002-2003 Kevin Cazabon - kevin@cazabon.com - https://www.cazabon.com - - pyCMS home page: https://www.cazabon.com/pyCMS - littleCMS home page: https://www.littlecms.com - (littleCMS is Copyright (C) 1998-2001 Marti Maria) - - Originally released under LGPL. Graciously donated to PIL in - March 2009, for distribution under the standard PIL license - - The pyCMS.py module provides a "clean" interface between Python/PIL and - pyCMSdll, taking care of some of the more complex handling of the direct - pyCMSdll functions, as well as error-checking and making sure that all - relevant data is kept together. - - While it is possible to call pyCMSdll functions directly, it's not highly - recommended. - - Version History: - - 1.0.0 pil Oct 2013 Port to LCMS 2. - - 0.1.0 pil mod March 10, 2009 - - Renamed display profile to proof profile. The proof - profile is the profile of the device that is being - simulated, not the profile of the device which is - actually used to display/print the final simulation - (that'd be the output profile) - also see LCMSAPI.txt - input colorspace -> using 'renderingIntent' -> proof - colorspace -> using 'proofRenderingIntent' -> output - colorspace - - Added LCMS FLAGS support. - Added FLAGS["SOFTPROOFING"] as default flag for - buildProofTransform (otherwise the proof profile/intent - would be ignored). - - 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms - - 0.0.2 alpha Jan 6, 2002 - - Added try/except statements around type() checks of - potential CObjects... Python won't let you use type() - on them, and raises a TypeError (stupid, if you ask - me!) - - Added buildProofTransformFromOpenProfiles() function. - Additional fixes in DLL, see DLL code for details. - - 0.0.1 alpha first public release, Dec. 26, 2002 - - Known to-do list with current version (of Python interface, not pyCMSdll): - - none - -""" - -_VERSION = "1.0.0 pil" - - -def __getattr__(name: str) -> Any: - if name == "DESCRIPTION": - deprecate("PIL.ImageCms.DESCRIPTION", 12) - return _DESCRIPTION - elif name == "VERSION": - deprecate("PIL.ImageCms.VERSION", 12) - return _VERSION - elif name == "FLAGS": - deprecate("PIL.ImageCms.FLAGS", 12, "PIL.ImageCms.Flags") - return _FLAGS - msg = f"module '{__name__}' has no attribute '{name}'" - raise AttributeError(msg) - - -# --------------------------------------------------------------------. - - -# -# intent/direction values - - -class Intent(IntEnum): - PERCEPTUAL = 0 - RELATIVE_COLORIMETRIC = 1 - SATURATION = 2 - ABSOLUTE_COLORIMETRIC = 3 - - -class Direction(IntEnum): - INPUT = 0 - OUTPUT = 1 - PROOF = 2 - - -# -# flags - - -class Flags(IntFlag): - """Flags and documentation are taken from ``lcms2.h``.""" - - NONE = 0 - NOCACHE = 0x0040 - """Inhibit 1-pixel cache""" - NOOPTIMIZE = 0x0100 - """Inhibit optimizations""" - NULLTRANSFORM = 0x0200 - """Don't transform anyway""" - GAMUTCHECK = 0x1000 - """Out of Gamut alarm""" - SOFTPROOFING = 0x4000 - """Do softproofing""" - BLACKPOINTCOMPENSATION = 0x2000 - NOWHITEONWHITEFIXUP = 0x0004 - """Don't fix scum dot""" - HIGHRESPRECALC = 0x0400 - """Use more memory to give better accuracy""" - LOWRESPRECALC = 0x0800 - """Use less memory to minimize resources""" - # this should be 8BITS_DEVICELINK, but that is not a valid name in Python: - USE_8BITS_DEVICELINK = 0x0008 - """Create 8 bits devicelinks""" - GUESSDEVICECLASS = 0x0020 - """Guess device class (for ``transform2devicelink``)""" - KEEP_SEQUENCE = 0x0080 - """Keep profile sequence for devicelink creation""" - FORCE_CLUT = 0x0002 - """Force CLUT optimization""" - CLUT_POST_LINEARIZATION = 0x0001 - """create postlinearization tables if possible""" - CLUT_PRE_LINEARIZATION = 0x0010 - """create prelinearization tables if possible""" - NONEGATIVES = 0x8000 - """Prevent negative numbers in floating point transforms""" - COPY_ALPHA = 0x04000000 - """Alpha channels are copied on ``cmsDoTransform()``""" - NODEFAULTRESOURCEDEF = 0x01000000 - - _GRIDPOINTS_1 = 1 << 16 - _GRIDPOINTS_2 = 2 << 16 - _GRIDPOINTS_4 = 4 << 16 - _GRIDPOINTS_8 = 8 << 16 - _GRIDPOINTS_16 = 16 << 16 - _GRIDPOINTS_32 = 32 << 16 - _GRIDPOINTS_64 = 64 << 16 - _GRIDPOINTS_128 = 128 << 16 - - @staticmethod - def GRIDPOINTS(n: int) -> Flags: - """ - Fine-tune control over number of gridpoints - - :param n: :py:class:`int` in range ``0 <= n <= 255`` - """ - return Flags.NONE | ((n & 0xFF) << 16) - - -_MAX_FLAG = reduce(operator.or_, Flags) - - -_FLAGS = { - "MATRIXINPUT": 1, - "MATRIXOUTPUT": 2, - "MATRIXONLY": (1 | 2), - "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot - # Don't create prelinearization tables on precalculated transforms - # (internal use): - "NOPRELINEARIZATION": 16, - "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) - "NOTCACHE": 64, # Inhibit 1-pixel cache - "NOTPRECALC": 256, - "NULLTRANSFORM": 512, # Don't transform anyway - "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy - "LOWRESPRECALC": 2048, # Use less memory to minimize resources - "WHITEBLACKCOMPENSATION": 8192, - "BLACKPOINTCOMPENSATION": 8192, - "GAMUTCHECK": 4096, # Out of Gamut alarm - "SOFTPROOFING": 16384, # Do softproofing - "PRESERVEBLACK": 32768, # Black preservation - "NODEFAULTRESOURCEDEF": 16777216, # CRD special - "GRIDPOINTS": lambda n: (n & 0xFF) << 16, # Gridpoints -} - - -# --------------------------------------------------------------------. -# Experimental PIL-level API -# --------------------------------------------------------------------. - -## -# Profile. - - -class ImageCmsProfile: - def __init__(self, profile: str | SupportsRead[bytes] | core.CmsProfile) -> None: - """ - :param profile: Either a string representing a filename, - a file like object containing a profile or a - low-level profile object - - """ - - if isinstance(profile, str): - if sys.platform == "win32": - profile_bytes_path = profile.encode() - try: - profile_bytes_path.decode("ascii") - except UnicodeDecodeError: - with open(profile, "rb") as f: - self._set(core.profile_frombytes(f.read())) - return - self._set(core.profile_open(profile), profile) - elif hasattr(profile, "read"): - self._set(core.profile_frombytes(profile.read())) - elif isinstance(profile, core.CmsProfile): - self._set(profile) - else: - msg = "Invalid type for Profile" # type: ignore[unreachable] - raise TypeError(msg) - - def _set(self, profile: core.CmsProfile, filename: str | None = None) -> None: - self.profile = profile - self.filename = filename - self.product_name = None # profile.product_name - self.product_info = None # profile.product_info - - def tobytes(self) -> bytes: - """ - Returns the profile in a format suitable for embedding in - saved images. - - :returns: a bytes object containing the ICC profile. - """ - - return core.profile_tobytes(self.profile) - - -class ImageCmsTransform(Image.ImagePointHandler): - """ - Transform. This can be used with the procedural API, or with the standard - :py:func:`~PIL.Image.Image.point` method. - - Will return the output profile in the ``output.info['icc_profile']``. - """ - - def __init__( - self, - input: ImageCmsProfile, - output: ImageCmsProfile, - input_mode: str, - output_mode: str, - intent: Intent = Intent.PERCEPTUAL, - proof: ImageCmsProfile | None = None, - proof_intent: Intent = Intent.ABSOLUTE_COLORIMETRIC, - flags: Flags = Flags.NONE, - ): - supported_modes = ( - "RGB", - "RGBA", - "RGBX", - "CMYK", - "I;16", - "I;16L", - "I;16B", - "YCbCr", - "LAB", - "L", - "1", - ) - for mode in (input_mode, output_mode): - if mode not in supported_modes: - deprecate( - mode, - 12, - { - "L;16": "I;16 or I;16L", - "L:16B": "I;16B", - "YCCA": "YCbCr", - "YCC": "YCbCr", - }.get(mode), - ) - if proof is None: - self.transform = core.buildTransform( - input.profile, output.profile, input_mode, output_mode, intent, flags - ) - else: - self.transform = core.buildProofTransform( - input.profile, - output.profile, - proof.profile, - input_mode, - output_mode, - intent, - proof_intent, - flags, - ) - # Note: inputMode and outputMode are for pyCMS compatibility only - self.input_mode = self.inputMode = input_mode - self.output_mode = self.outputMode = output_mode - - self.output_profile = output - - def point(self, im: Image.Image) -> Image.Image: - return self.apply(im) - - def apply(self, im: Image.Image, imOut: Image.Image | None = None) -> Image.Image: - im.load() - if imOut is None: - imOut = Image.new(self.output_mode, im.size, None) - self.transform.apply(im.im.id, imOut.im.id) - imOut.info["icc_profile"] = self.output_profile.tobytes() - return imOut - - def apply_in_place(self, im: Image.Image) -> Image.Image: - im.load() - if im.mode != self.output_mode: - msg = "mode mismatch" - raise ValueError(msg) # wrong output mode - self.transform.apply(im.im.id, im.im.id) - im.info["icc_profile"] = self.output_profile.tobytes() - return im - - -def get_display_profile(handle: SupportsInt | None = None) -> ImageCmsProfile | None: - """ - (experimental) Fetches the profile for the current display device. - - :returns: ``None`` if the profile is not known. - """ - - if sys.platform != "win32": - return None - - from . import ImageWin # type: ignore[unused-ignore, unreachable] - - if isinstance(handle, ImageWin.HDC): - profile = core.get_display_profile_win32(int(handle), 1) - else: - profile = core.get_display_profile_win32(int(handle or 0)) - if profile is None: - return None - return ImageCmsProfile(profile) - - -# --------------------------------------------------------------------. -# pyCMS compatible layer -# --------------------------------------------------------------------. - -_CmsProfileCompatible = Union[ - str, SupportsRead[bytes], core.CmsProfile, ImageCmsProfile -] - - -class PyCMSError(Exception): - """(pyCMS) Exception class. - This is used for all errors in the pyCMS API.""" - - pass - - -def profileToProfile( - im: Image.Image, - inputProfile: _CmsProfileCompatible, - outputProfile: _CmsProfileCompatible, - renderingIntent: Intent = Intent.PERCEPTUAL, - outputMode: str | None = None, - inPlace: bool = False, - flags: Flags = Flags.NONE, -) -> Image.Image | None: - """ - (pyCMS) Applies an ICC transformation to a given image, mapping from - ``inputProfile`` to ``outputProfile``. - - If the input or output profiles specified are not valid filenames, a - :exc:`PyCMSError` will be raised. If ``inPlace`` is ``True`` and - ``outputMode != im.mode``, a :exc:`PyCMSError` will be raised. - If an error occurs during application of the profiles, - a :exc:`PyCMSError` will be raised. - If ``outputMode`` is not a mode supported by the ``outputProfile`` (or by pyCMS), - a :exc:`PyCMSError` will be raised. - - This function applies an ICC transformation to im from ``inputProfile``'s - color space to ``outputProfile``'s color space using the specified rendering - intent to decide how to handle out-of-gamut colors. - - ``outputMode`` can be used to specify that a color mode conversion is to - be done using these profiles, but the specified profiles must be able - to handle that mode. I.e., if converting im from RGB to CMYK using - profiles, the input profile must handle RGB data, and the output - profile must handle CMYK data. - - :param im: An open :py:class:`~PIL.Image.Image` object (i.e. Image.new(...) - or Image.open(...), etc.) - :param inputProfile: String, as a valid filename path to the ICC input - profile you wish to use for this image, or a profile object - :param outputProfile: String, as a valid filename path to the ICC output - profile you wish to use for this image, or a profile object - :param renderingIntent: Integer (0-3) specifying the rendering intent you - wish to use for the transform - - ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) - ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 - ImageCms.Intent.SATURATION = 2 - ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 - - see the pyCMS documentation for details on rendering intents and what - they do. - :param outputMode: A valid PIL mode for the output image (i.e. "RGB", - "CMYK", etc.). Note: if rendering the image "inPlace", outputMode - MUST be the same mode as the input, or omitted completely. If - omitted, the outputMode will be the same as the mode of the input - image (im.mode) - :param inPlace: Boolean. If ``True``, the original image is modified in-place, - and ``None`` is returned. If ``False`` (default), a new - :py:class:`~PIL.Image.Image` object is returned with the transform applied. - :param flags: Integer (0-...) specifying additional flags - :returns: Either None or a new :py:class:`~PIL.Image.Image` object, depending on - the value of ``inPlace`` - :exception PyCMSError: - """ - - if outputMode is None: - outputMode = im.mode - - if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): - msg = "renderingIntent must be an integer between 0 and 3" - raise PyCMSError(msg) - - if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): - msg = f"flags must be an integer between 0 and {_MAX_FLAG}" - raise PyCMSError(msg) - - try: - if not isinstance(inputProfile, ImageCmsProfile): - inputProfile = ImageCmsProfile(inputProfile) - if not isinstance(outputProfile, ImageCmsProfile): - outputProfile = ImageCmsProfile(outputProfile) - transform = ImageCmsTransform( - inputProfile, - outputProfile, - im.mode, - outputMode, - renderingIntent, - flags=flags, - ) - if inPlace: - transform.apply_in_place(im) - imOut = None - else: - imOut = transform.apply(im) - except (OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - return imOut - - -def getOpenProfile( - profileFilename: str | SupportsRead[bytes] | core.CmsProfile, -) -> ImageCmsProfile: - """ - (pyCMS) Opens an ICC profile file. - - The PyCMSProfile object can be passed back into pyCMS for use in creating - transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). - - If ``profileFilename`` is not a valid filename for an ICC profile, - a :exc:`PyCMSError` will be raised. - - :param profileFilename: String, as a valid filename path to the ICC profile - you wish to open, or a file-like object. - :returns: A CmsProfile class object. - :exception PyCMSError: - """ - - try: - return ImageCmsProfile(profileFilename) - except (OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def buildTransform( - inputProfile: _CmsProfileCompatible, - outputProfile: _CmsProfileCompatible, - inMode: str, - outMode: str, - renderingIntent: Intent = Intent.PERCEPTUAL, - flags: Flags = Flags.NONE, -) -> ImageCmsTransform: - """ - (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the - ``outputProfile``. Use applyTransform to apply the transform to a given - image. - - If the input or output profiles specified are not valid filenames, a - :exc:`PyCMSError` will be raised. If an error occurs during creation - of the transform, a :exc:`PyCMSError` will be raised. - - If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` - (or by pyCMS), a :exc:`PyCMSError` will be raised. - - This function builds and returns an ICC transform from the ``inputProfile`` - to the ``outputProfile`` using the ``renderingIntent`` to determine what to do - with out-of-gamut colors. It will ONLY work for converting images that - are in ``inMode`` to images that are in ``outMode`` color format (PIL mode, - i.e. "RGB", "RGBA", "CMYK", etc.). - - Building the transform is a fair part of the overhead in - ImageCms.profileToProfile(), so if you're planning on converting multiple - images using the same input/output settings, this can save you time. - Once you have a transform object, it can be used with - ImageCms.applyProfile() to convert images without the need to re-compute - the lookup table for the transform. - - The reason pyCMS returns a class object rather than a handle directly - to the transform is that it needs to keep track of the PIL input/output - modes that the transform is meant for. These attributes are stored in - the ``inMode`` and ``outMode`` attributes of the object (which can be - manually overridden if you really want to, but I don't know of any - time that would be of use, or would even work). - - :param inputProfile: String, as a valid filename path to the ICC input - profile you wish to use for this transform, or a profile object - :param outputProfile: String, as a valid filename path to the ICC output - profile you wish to use for this transform, or a profile object - :param inMode: String, as a valid PIL mode that the appropriate profile - also supports (i.e. "RGB", "RGBA", "CMYK", etc.) - :param outMode: String, as a valid PIL mode that the appropriate profile - also supports (i.e. "RGB", "RGBA", "CMYK", etc.) - :param renderingIntent: Integer (0-3) specifying the rendering intent you - wish to use for the transform - - ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) - ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 - ImageCms.Intent.SATURATION = 2 - ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 - - see the pyCMS documentation for details on rendering intents and what - they do. - :param flags: Integer (0-...) specifying additional flags - :returns: A CmsTransform class object. - :exception PyCMSError: - """ - - if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): - msg = "renderingIntent must be an integer between 0 and 3" - raise PyCMSError(msg) - - if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): - msg = f"flags must be an integer between 0 and {_MAX_FLAG}" - raise PyCMSError(msg) - - try: - if not isinstance(inputProfile, ImageCmsProfile): - inputProfile = ImageCmsProfile(inputProfile) - if not isinstance(outputProfile, ImageCmsProfile): - outputProfile = ImageCmsProfile(outputProfile) - return ImageCmsTransform( - inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags - ) - except (OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def buildProofTransform( - inputProfile: _CmsProfileCompatible, - outputProfile: _CmsProfileCompatible, - proofProfile: _CmsProfileCompatible, - inMode: str, - outMode: str, - renderingIntent: Intent = Intent.PERCEPTUAL, - proofRenderingIntent: Intent = Intent.ABSOLUTE_COLORIMETRIC, - flags: Flags = Flags.SOFTPROOFING, -) -> ImageCmsTransform: - """ - (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the - ``outputProfile``, but tries to simulate the result that would be - obtained on the ``proofProfile`` device. - - If the input, output, or proof profiles specified are not valid - filenames, a :exc:`PyCMSError` will be raised. - - If an error occurs during creation of the transform, - a :exc:`PyCMSError` will be raised. - - If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` - (or by pyCMS), a :exc:`PyCMSError` will be raised. - - This function builds and returns an ICC transform from the ``inputProfile`` - to the ``outputProfile``, but tries to simulate the result that would be - obtained on the ``proofProfile`` device using ``renderingIntent`` and - ``proofRenderingIntent`` to determine what to do with out-of-gamut - colors. This is known as "soft-proofing". It will ONLY work for - converting images that are in ``inMode`` to images that are in outMode - color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). - - Usage of the resulting transform object is exactly the same as with - ImageCms.buildTransform(). - - Proof profiling is generally used when using an output device to get a - good idea of what the final printed/displayed image would look like on - the ``proofProfile`` device when it's quicker and easier to use the - output device for judging color. Generally, this means that the - output device is a monitor, or a dye-sub printer (etc.), and the simulated - device is something more expensive, complicated, or time consuming - (making it difficult to make a real print for color judgement purposes). - - Soft-proofing basically functions by adjusting the colors on the - output device to match the colors of the device being simulated. However, - when the simulated device has a much wider gamut than the output - device, you may obtain marginal results. - - :param inputProfile: String, as a valid filename path to the ICC input - profile you wish to use for this transform, or a profile object - :param outputProfile: String, as a valid filename path to the ICC output - (monitor, usually) profile you wish to use for this transform, or a - profile object - :param proofProfile: String, as a valid filename path to the ICC proof - profile you wish to use for this transform, or a profile object - :param inMode: String, as a valid PIL mode that the appropriate profile - also supports (i.e. "RGB", "RGBA", "CMYK", etc.) - :param outMode: String, as a valid PIL mode that the appropriate profile - also supports (i.e. "RGB", "RGBA", "CMYK", etc.) - :param renderingIntent: Integer (0-3) specifying the rendering intent you - wish to use for the input->proof (simulated) transform - - ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) - ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 - ImageCms.Intent.SATURATION = 2 - ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 - - see the pyCMS documentation for details on rendering intents and what - they do. - :param proofRenderingIntent: Integer (0-3) specifying the rendering intent - you wish to use for proof->output transform - - ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) - ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 - ImageCms.Intent.SATURATION = 2 - ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 - - see the pyCMS documentation for details on rendering intents and what - they do. - :param flags: Integer (0-...) specifying additional flags - :returns: A CmsTransform class object. - :exception PyCMSError: - """ - - if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): - msg = "renderingIntent must be an integer between 0 and 3" - raise PyCMSError(msg) - - if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): - msg = f"flags must be an integer between 0 and {_MAX_FLAG}" - raise PyCMSError(msg) - - try: - if not isinstance(inputProfile, ImageCmsProfile): - inputProfile = ImageCmsProfile(inputProfile) - if not isinstance(outputProfile, ImageCmsProfile): - outputProfile = ImageCmsProfile(outputProfile) - if not isinstance(proofProfile, ImageCmsProfile): - proofProfile = ImageCmsProfile(proofProfile) - return ImageCmsTransform( - inputProfile, - outputProfile, - inMode, - outMode, - renderingIntent, - proofProfile, - proofRenderingIntent, - flags, - ) - except (OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -buildTransformFromOpenProfiles = buildTransform -buildProofTransformFromOpenProfiles = buildProofTransform - - -def applyTransform( - im: Image.Image, transform: ImageCmsTransform, inPlace: bool = False -) -> Image.Image | None: - """ - (pyCMS) Applies a transform to a given image. - - If ``im.mode != transform.input_mode``, a :exc:`PyCMSError` is raised. - - If ``inPlace`` is ``True`` and ``transform.input_mode != transform.output_mode``, a - :exc:`PyCMSError` is raised. - - If ``im.mode``, ``transform.input_mode`` or ``transform.output_mode`` is not - supported by pyCMSdll or the profiles you used for the transform, a - :exc:`PyCMSError` is raised. - - If an error occurs while the transform is being applied, - a :exc:`PyCMSError` is raised. - - This function applies a pre-calculated transform (from - ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) - to an image. The transform can be used for multiple images, saving - considerable calculation time if doing the same conversion multiple times. - - If you want to modify im in-place instead of receiving a new image as - the return value, set ``inPlace`` to ``True``. This can only be done if - ``transform.input_mode`` and ``transform.output_mode`` are the same, because we - can't change the mode in-place (the buffer sizes for some modes are - different). The default behavior is to return a new :py:class:`~PIL.Image.Image` - object of the same dimensions in mode ``transform.output_mode``. - - :param im: An :py:class:`~PIL.Image.Image` object, and ``im.mode`` must be the same - as the ``input_mode`` supported by the transform. - :param transform: A valid CmsTransform class object - :param inPlace: Bool. If ``True``, ``im`` is modified in place and ``None`` is - returned, if ``False``, a new :py:class:`~PIL.Image.Image` object with the - transform applied is returned (and ``im`` is not changed). The default is - ``False``. - :returns: Either ``None``, or a new :py:class:`~PIL.Image.Image` object, - depending on the value of ``inPlace``. The profile will be returned in - the image's ``info['icc_profile']``. - :exception PyCMSError: - """ - - try: - if inPlace: - transform.apply_in_place(im) - imOut = None - else: - imOut = transform.apply(im) - except (TypeError, ValueError) as v: - raise PyCMSError(v) from v - - return imOut - - -def createProfile( - colorSpace: Literal["LAB", "XYZ", "sRGB"], colorTemp: SupportsFloat = 0 -) -> core.CmsProfile: - """ - (pyCMS) Creates a profile. - - If colorSpace not in ``["LAB", "XYZ", "sRGB"]``, - a :exc:`PyCMSError` is raised. - - If using LAB and ``colorTemp`` is not a positive integer, - a :exc:`PyCMSError` is raised. - - If an error occurs while creating the profile, - a :exc:`PyCMSError` is raised. - - Use this function to create common profiles on-the-fly instead of - having to supply a profile on disk and knowing the path to it. It - returns a normal CmsProfile object that can be passed to - ImageCms.buildTransformFromOpenProfiles() to create a transform to apply - to images. - - :param colorSpace: String, the color space of the profile you wish to - create. - Currently only "LAB", "XYZ", and "sRGB" are supported. - :param colorTemp: Positive number for the white point for the profile, in - degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 - illuminant if omitted (5000k). colorTemp is ONLY applied to LAB - profiles, and is ignored for XYZ and sRGB. - :returns: A CmsProfile class object - :exception PyCMSError: - """ - - if colorSpace not in ["LAB", "XYZ", "sRGB"]: - msg = ( - f"Color space not supported for on-the-fly profile creation ({colorSpace})" - ) - raise PyCMSError(msg) - - if colorSpace == "LAB": - try: - colorTemp = float(colorTemp) - except (TypeError, ValueError) as e: - msg = f'Color temperature must be numeric, "{colorTemp}" not valid' - raise PyCMSError(msg) from e - - try: - return core.createProfile(colorSpace, colorTemp) - except (TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def getProfileName(profile: _CmsProfileCompatible) -> str: - """ - - (pyCMS) Gets the internal product name for the given profile. - - If ``profile`` isn't a valid CmsProfile object or filename to a profile, - a :exc:`PyCMSError` is raised If an error occurs while trying - to obtain the name tag, a :exc:`PyCMSError` is raised. - - Use this function to obtain the INTERNAL name of the profile (stored - in an ICC tag in the profile itself), usually the one used when the - profile was originally created. Sometimes this tag also contains - additional information supplied by the creator. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :returns: A string containing the internal name of the profile as stored - in an ICC tag. - :exception PyCMSError: - """ - - try: - # add an extra newline to preserve pyCMS compatibility - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - # do it in python, not c. - # // name was "%s - %s" (model, manufacturer) || Description , - # // but if the Model and Manufacturer were the same or the model - # // was long, Just the model, in 1.x - model = profile.profile.model - manufacturer = profile.profile.manufacturer - - if not (model or manufacturer): - return (profile.profile.profile_description or "") + "\n" - if not manufacturer or (model and len(model) > 30): - return f"{model}\n" - return f"{model} - {manufacturer}\n" - - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def getProfileInfo(profile: _CmsProfileCompatible) -> str: - """ - (pyCMS) Gets the internal product information for the given profile. - - If ``profile`` isn't a valid CmsProfile object or filename to a profile, - a :exc:`PyCMSError` is raised. - - If an error occurs while trying to obtain the info tag, - a :exc:`PyCMSError` is raised. - - Use this function to obtain the information stored in the profile's - info tag. This often contains details about the profile, and how it - was created, as supplied by the creator. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :returns: A string containing the internal profile information stored in - an ICC tag. - :exception PyCMSError: - """ - - try: - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - # add an extra newline to preserve pyCMS compatibility - # Python, not C. the white point bits weren't working well, - # so skipping. - # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint - description = profile.profile.profile_description - cpright = profile.profile.copyright - elements = [element for element in (description, cpright) if element] - return "\r\n\r\n".join(elements) + "\r\n\r\n" - - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def getProfileCopyright(profile: _CmsProfileCompatible) -> str: - """ - (pyCMS) Gets the copyright for the given profile. - - If ``profile`` isn't a valid CmsProfile object or filename to a profile, a - :exc:`PyCMSError` is raised. - - If an error occurs while trying to obtain the copyright tag, - a :exc:`PyCMSError` is raised. - - Use this function to obtain the information stored in the profile's - copyright tag. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :returns: A string containing the internal profile information stored in - an ICC tag. - :exception PyCMSError: - """ - try: - # add an extra newline to preserve pyCMS compatibility - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - return (profile.profile.copyright or "") + "\n" - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def getProfileManufacturer(profile: _CmsProfileCompatible) -> str: - """ - (pyCMS) Gets the manufacturer for the given profile. - - If ``profile`` isn't a valid CmsProfile object or filename to a profile, a - :exc:`PyCMSError` is raised. - - If an error occurs while trying to obtain the manufacturer tag, a - :exc:`PyCMSError` is raised. - - Use this function to obtain the information stored in the profile's - manufacturer tag. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :returns: A string containing the internal profile information stored in - an ICC tag. - :exception PyCMSError: - """ - try: - # add an extra newline to preserve pyCMS compatibility - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - return (profile.profile.manufacturer or "") + "\n" - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def getProfileModel(profile: _CmsProfileCompatible) -> str: - """ - (pyCMS) Gets the model for the given profile. - - If ``profile`` isn't a valid CmsProfile object or filename to a profile, a - :exc:`PyCMSError` is raised. - - If an error occurs while trying to obtain the model tag, - a :exc:`PyCMSError` is raised. - - Use this function to obtain the information stored in the profile's - model tag. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :returns: A string containing the internal profile information stored in - an ICC tag. - :exception PyCMSError: - """ - - try: - # add an extra newline to preserve pyCMS compatibility - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - return (profile.profile.model or "") + "\n" - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def getProfileDescription(profile: _CmsProfileCompatible) -> str: - """ - (pyCMS) Gets the description for the given profile. - - If ``profile`` isn't a valid CmsProfile object or filename to a profile, a - :exc:`PyCMSError` is raised. - - If an error occurs while trying to obtain the description tag, - a :exc:`PyCMSError` is raised. - - Use this function to obtain the information stored in the profile's - description tag. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :returns: A string containing the internal profile information stored in an - ICC tag. - :exception PyCMSError: - """ - - try: - # add an extra newline to preserve pyCMS compatibility - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - return (profile.profile.profile_description or "") + "\n" - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def getDefaultIntent(profile: _CmsProfileCompatible) -> int: - """ - (pyCMS) Gets the default intent name for the given profile. - - If ``profile`` isn't a valid CmsProfile object or filename to a profile, a - :exc:`PyCMSError` is raised. - - If an error occurs while trying to obtain the default intent, a - :exc:`PyCMSError` is raised. - - Use this function to determine the default (and usually best optimized) - rendering intent for this profile. Most profiles support multiple - rendering intents, but are intended mostly for one type of conversion. - If you wish to use a different intent than returned, use - ImageCms.isIntentSupported() to verify it will work first. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :returns: Integer 0-3 specifying the default rendering intent for this - profile. - - ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) - ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 - ImageCms.Intent.SATURATION = 2 - ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 - - see the pyCMS documentation for details on rendering intents and what - they do. - :exception PyCMSError: - """ - - try: - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - return profile.profile.rendering_intent - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def isIntentSupported( - profile: _CmsProfileCompatible, intent: Intent, direction: Direction -) -> Literal[-1, 1]: - """ - (pyCMS) Checks if a given intent is supported. - - Use this function to verify that you can use your desired - ``intent`` with ``profile``, and that ``profile`` can be used for the - input/output/proof profile as you desire. - - Some profiles are created specifically for one "direction", can cannot - be used for others. Some profiles can only be used for certain - rendering intents, so it's best to either verify this before trying - to create a transform with them (using this function), or catch the - potential :exc:`PyCMSError` that will occur if they don't - support the modes you select. - - :param profile: EITHER a valid CmsProfile object, OR a string of the - filename of an ICC profile. - :param intent: Integer (0-3) specifying the rendering intent you wish to - use with this profile - - ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) - ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 - ImageCms.Intent.SATURATION = 2 - ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 - - see the pyCMS documentation for details on rendering intents and what - they do. - :param direction: Integer specifying if the profile is to be used for - input, output, or proof - - INPUT = 0 (or use ImageCms.Direction.INPUT) - OUTPUT = 1 (or use ImageCms.Direction.OUTPUT) - PROOF = 2 (or use ImageCms.Direction.PROOF) - - :returns: 1 if the intent/direction are supported, -1 if they are not. - :exception PyCMSError: - """ - - try: - if not isinstance(profile, ImageCmsProfile): - profile = ImageCmsProfile(profile) - # FIXME: I get different results for the same data w. different - # compilers. Bug in LittleCMS or in the binding? - if profile.profile.is_intent_supported(intent, direction): - return 1 - else: - return -1 - except (AttributeError, OSError, TypeError, ValueError) as v: - raise PyCMSError(v) from v - - -def versions() -> tuple[str, str | None, str, str]: - """ - (pyCMS) Fetches versions. - """ - - deprecate( - "PIL.ImageCms.versions()", - 12, - '(PIL.features.version("littlecms2"), sys.version, PIL.__version__)', - ) - return _VERSION, core.littlecms_version, sys.version.split()[0], __version__ diff --git a/venv/Lib/site-packages/PIL/ImageColor.py b/venv/Lib/site-packages/PIL/ImageColor.py deleted file mode 100644 index 9a15a8e..0000000 --- a/venv/Lib/site-packages/PIL/ImageColor.py +++ /dev/null @@ -1,320 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# map CSS3-style colour description strings to RGB -# -# History: -# 2002-10-24 fl Added support for CSS-style color strings -# 2002-12-15 fl Added RGBA support -# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 -# 2004-07-19 fl Fixed gray/grey spelling issues -# 2009-03-05 fl Fixed rounding error in grayscale calculation -# -# Copyright (c) 2002-2004 by Secret Labs AB -# Copyright (c) 2002-2004 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import re -from functools import lru_cache - -from . import Image - - -@lru_cache -def getrgb(color: str) -> tuple[int, int, int] | tuple[int, int, int, int]: - """ - Convert a color string to an RGB or RGBA tuple. If the string cannot be - parsed, this function raises a :py:exc:`ValueError` exception. - - .. versionadded:: 1.1.4 - - :param color: A color string - :return: ``(red, green, blue[, alpha])`` - """ - if len(color) > 100: - msg = "color specifier is too long" - raise ValueError(msg) - color = color.lower() - - rgb = colormap.get(color, None) - if rgb: - if isinstance(rgb, tuple): - return rgb - rgb_tuple = getrgb(rgb) - assert len(rgb_tuple) == 3 - colormap[color] = rgb_tuple - return rgb_tuple - - # check for known string formats - if re.match("#[a-f0-9]{3}$", color): - return int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16) - - if re.match("#[a-f0-9]{4}$", color): - return ( - int(color[1] * 2, 16), - int(color[2] * 2, 16), - int(color[3] * 2, 16), - int(color[4] * 2, 16), - ) - - if re.match("#[a-f0-9]{6}$", color): - return int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16) - - if re.match("#[a-f0-9]{8}$", color): - return ( - int(color[1:3], 16), - int(color[3:5], 16), - int(color[5:7], 16), - int(color[7:9], 16), - ) - - m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) - if m: - return int(m.group(1)), int(m.group(2)), int(m.group(3)) - - m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) - if m: - return ( - int((int(m.group(1)) * 255) / 100.0 + 0.5), - int((int(m.group(2)) * 255) / 100.0 + 0.5), - int((int(m.group(3)) * 255) / 100.0 + 0.5), - ) - - m = re.match( - r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color - ) - if m: - from colorsys import hls_to_rgb - - rgb_floats = hls_to_rgb( - float(m.group(1)) / 360.0, - float(m.group(3)) / 100.0, - float(m.group(2)) / 100.0, - ) - return ( - int(rgb_floats[0] * 255 + 0.5), - int(rgb_floats[1] * 255 + 0.5), - int(rgb_floats[2] * 255 + 0.5), - ) - - m = re.match( - r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color - ) - if m: - from colorsys import hsv_to_rgb - - rgb_floats = hsv_to_rgb( - float(m.group(1)) / 360.0, - float(m.group(2)) / 100.0, - float(m.group(3)) / 100.0, - ) - return ( - int(rgb_floats[0] * 255 + 0.5), - int(rgb_floats[1] * 255 + 0.5), - int(rgb_floats[2] * 255 + 0.5), - ) - - m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) - if m: - return int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4)) - msg = f"unknown color specifier: {repr(color)}" - raise ValueError(msg) - - -@lru_cache -def getcolor(color: str, mode: str) -> int | tuple[int, ...]: - """ - Same as :py:func:`~PIL.ImageColor.getrgb` for most modes. However, if - ``mode`` is HSV, converts the RGB value to a HSV value, or if ``mode`` is - not color or a palette image, converts the RGB value to a grayscale value. - If the string cannot be parsed, this function raises a :py:exc:`ValueError` - exception. - - .. versionadded:: 1.1.4 - - :param color: A color string - :param mode: Convert result to this mode - :return: ``graylevel, (graylevel, alpha) or (red, green, blue[, alpha])`` - """ - # same as getrgb, but converts the result to the given mode - rgb, alpha = getrgb(color), 255 - if len(rgb) == 4: - alpha = rgb[3] - rgb = rgb[:3] - - if mode == "HSV": - from colorsys import rgb_to_hsv - - r, g, b = rgb - h, s, v = rgb_to_hsv(r / 255, g / 255, b / 255) - return int(h * 255), int(s * 255), int(v * 255) - elif Image.getmodebase(mode) == "L": - r, g, b = rgb - # ITU-R Recommendation 601-2 for nonlinear RGB - # scaled to 24 bits to match the convert's implementation. - graylevel = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16 - if mode[-1] == "A": - return graylevel, alpha - return graylevel - elif mode[-1] == "A": - return rgb + (alpha,) - return rgb - - -colormap: dict[str, str | tuple[int, int, int]] = { - # X11 colour table from https://drafts.csswg.org/css-color-4/, with - # gray/grey spelling issues fixed. This is a superset of HTML 4.0 - # colour names used in CSS 1. - "aliceblue": "#f0f8ff", - "antiquewhite": "#faebd7", - "aqua": "#00ffff", - "aquamarine": "#7fffd4", - "azure": "#f0ffff", - "beige": "#f5f5dc", - "bisque": "#ffe4c4", - "black": "#000000", - "blanchedalmond": "#ffebcd", - "blue": "#0000ff", - "blueviolet": "#8a2be2", - "brown": "#a52a2a", - "burlywood": "#deb887", - "cadetblue": "#5f9ea0", - "chartreuse": "#7fff00", - "chocolate": "#d2691e", - "coral": "#ff7f50", - "cornflowerblue": "#6495ed", - "cornsilk": "#fff8dc", - "crimson": "#dc143c", - "cyan": "#00ffff", - "darkblue": "#00008b", - "darkcyan": "#008b8b", - "darkgoldenrod": "#b8860b", - "darkgray": "#a9a9a9", - "darkgrey": "#a9a9a9", - "darkgreen": "#006400", - "darkkhaki": "#bdb76b", - "darkmagenta": "#8b008b", - "darkolivegreen": "#556b2f", - "darkorange": "#ff8c00", - "darkorchid": "#9932cc", - "darkred": "#8b0000", - "darksalmon": "#e9967a", - "darkseagreen": "#8fbc8f", - "darkslateblue": "#483d8b", - "darkslategray": "#2f4f4f", - "darkslategrey": "#2f4f4f", - "darkturquoise": "#00ced1", - "darkviolet": "#9400d3", - "deeppink": "#ff1493", - "deepskyblue": "#00bfff", - "dimgray": "#696969", - "dimgrey": "#696969", - "dodgerblue": "#1e90ff", - "firebrick": "#b22222", - "floralwhite": "#fffaf0", - "forestgreen": "#228b22", - "fuchsia": "#ff00ff", - "gainsboro": "#dcdcdc", - "ghostwhite": "#f8f8ff", - "gold": "#ffd700", - "goldenrod": "#daa520", - "gray": "#808080", - "grey": "#808080", - "green": "#008000", - "greenyellow": "#adff2f", - "honeydew": "#f0fff0", - "hotpink": "#ff69b4", - "indianred": "#cd5c5c", - "indigo": "#4b0082", - "ivory": "#fffff0", - "khaki": "#f0e68c", - "lavender": "#e6e6fa", - "lavenderblush": "#fff0f5", - "lawngreen": "#7cfc00", - "lemonchiffon": "#fffacd", - "lightblue": "#add8e6", - "lightcoral": "#f08080", - "lightcyan": "#e0ffff", - "lightgoldenrodyellow": "#fafad2", - "lightgreen": "#90ee90", - "lightgray": "#d3d3d3", - "lightgrey": "#d3d3d3", - "lightpink": "#ffb6c1", - "lightsalmon": "#ffa07a", - "lightseagreen": "#20b2aa", - "lightskyblue": "#87cefa", - "lightslategray": "#778899", - "lightslategrey": "#778899", - "lightsteelblue": "#b0c4de", - "lightyellow": "#ffffe0", - "lime": "#00ff00", - "limegreen": "#32cd32", - "linen": "#faf0e6", - "magenta": "#ff00ff", - "maroon": "#800000", - "mediumaquamarine": "#66cdaa", - "mediumblue": "#0000cd", - "mediumorchid": "#ba55d3", - "mediumpurple": "#9370db", - "mediumseagreen": "#3cb371", - "mediumslateblue": "#7b68ee", - "mediumspringgreen": "#00fa9a", - "mediumturquoise": "#48d1cc", - "mediumvioletred": "#c71585", - "midnightblue": "#191970", - "mintcream": "#f5fffa", - "mistyrose": "#ffe4e1", - "moccasin": "#ffe4b5", - "navajowhite": "#ffdead", - "navy": "#000080", - "oldlace": "#fdf5e6", - "olive": "#808000", - "olivedrab": "#6b8e23", - "orange": "#ffa500", - "orangered": "#ff4500", - "orchid": "#da70d6", - "palegoldenrod": "#eee8aa", - "palegreen": "#98fb98", - "paleturquoise": "#afeeee", - "palevioletred": "#db7093", - "papayawhip": "#ffefd5", - "peachpuff": "#ffdab9", - "peru": "#cd853f", - "pink": "#ffc0cb", - "plum": "#dda0dd", - "powderblue": "#b0e0e6", - "purple": "#800080", - "rebeccapurple": "#663399", - "red": "#ff0000", - "rosybrown": "#bc8f8f", - "royalblue": "#4169e1", - "saddlebrown": "#8b4513", - "salmon": "#fa8072", - "sandybrown": "#f4a460", - "seagreen": "#2e8b57", - "seashell": "#fff5ee", - "sienna": "#a0522d", - "silver": "#c0c0c0", - "skyblue": "#87ceeb", - "slateblue": "#6a5acd", - "slategray": "#708090", - "slategrey": "#708090", - "snow": "#fffafa", - "springgreen": "#00ff7f", - "steelblue": "#4682b4", - "tan": "#d2b48c", - "teal": "#008080", - "thistle": "#d8bfd8", - "tomato": "#ff6347", - "turquoise": "#40e0d0", - "violet": "#ee82ee", - "wheat": "#f5deb3", - "white": "#ffffff", - "whitesmoke": "#f5f5f5", - "yellow": "#ffff00", - "yellowgreen": "#9acd32", -} diff --git a/venv/Lib/site-packages/PIL/ImageDraw.py b/venv/Lib/site-packages/PIL/ImageDraw.py deleted file mode 100644 index 244d3d5..0000000 --- a/venv/Lib/site-packages/PIL/ImageDraw.py +++ /dev/null @@ -1,1206 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# drawing interface operations -# -# History: -# 1996-04-13 fl Created (experimental) -# 1996-08-07 fl Filled polygons, ellipses. -# 1996-08-13 fl Added text support -# 1998-06-28 fl Handle I and F images -# 1998-12-29 fl Added arc; use arc primitive to draw ellipses -# 1999-01-10 fl Added shape stuff (experimental) -# 1999-02-06 fl Added bitmap support -# 1999-02-11 fl Changed all primitives to take options -# 1999-02-20 fl Fixed backwards compatibility -# 2000-10-12 fl Copy on write, when necessary -# 2001-02-18 fl Use default ink for bitmap/text also in fill mode -# 2002-10-24 fl Added support for CSS-style color strings -# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing -# 2002-12-11 fl Refactored low-level drawing API (work in progress) -# 2004-08-26 fl Made Draw() a factory function, added getdraw() support -# 2004-09-04 fl Added width support to line primitive -# 2004-09-10 fl Added font mode handling -# 2006-06-19 fl Added font bearing support (getmask2) -# -# Copyright (c) 1997-2006 by Secret Labs AB -# Copyright (c) 1996-2006 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import math -import numbers -import struct -from types import ModuleType -from typing import TYPE_CHECKING, AnyStr, Callable, List, Sequence, Tuple, Union, cast - -from . import Image, ImageColor -from ._deprecate import deprecate -from ._typing import Coords - -# experimental access to the outline API -Outline: Callable[[], Image.core._Outline] | None -try: - Outline = Image.core.outline -except AttributeError: - Outline = None - -if TYPE_CHECKING: - from . import ImageDraw2, ImageFont - -_Ink = Union[float, Tuple[int, ...], str] - -""" -A simple 2D drawing interface for PIL images. -

-Application code should use the Draw factory, instead of -directly. -""" - - -class ImageDraw: - font: ( - ImageFont.ImageFont | ImageFont.FreeTypeFont | ImageFont.TransposedFont | None - ) = None - - def __init__(self, im: Image.Image, mode: str | None = None) -> None: - """ - Create a drawing instance. - - :param im: The image to draw in. - :param mode: Optional mode to use for color values. For RGB - images, this argument can be RGB or RGBA (to blend the - drawing into the image). For all other modes, this argument - must be the same as the image mode. If omitted, the mode - defaults to the mode of the image. - """ - im.load() - if im.readonly: - im._copy() # make it writeable - blend = 0 - if mode is None: - mode = im.mode - if mode != im.mode: - if mode == "RGBA" and im.mode == "RGB": - blend = 1 - else: - msg = "mode mismatch" - raise ValueError(msg) - if mode == "P": - self.palette = im.palette - else: - self.palette = None - self._image = im - self.im = im.im - self.draw = Image.core.draw(self.im, blend) - self.mode = mode - if mode in ("I", "F"): - self.ink = self.draw.draw_ink(1) - else: - self.ink = self.draw.draw_ink(-1) - if mode in ("1", "P", "I", "F"): - # FIXME: fix Fill2 to properly support matte for I+F images - self.fontmode = "1" - else: - self.fontmode = "L" # aliasing is okay for other modes - self.fill = False - - def getfont( - self, - ) -> ImageFont.ImageFont | ImageFont.FreeTypeFont | ImageFont.TransposedFont: - """ - Get the current default font. - - To set the default font for this ImageDraw instance:: - - from PIL import ImageDraw, ImageFont - draw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf") - - To set the default font for all future ImageDraw instances:: - - from PIL import ImageDraw, ImageFont - ImageDraw.ImageDraw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf") - - If the current default font is ``None``, - it is initialized with ``ImageFont.load_default()``. - - :returns: An image font.""" - if not self.font: - # FIXME: should add a font repository - from . import ImageFont - - self.font = ImageFont.load_default() - return self.font - - def _getfont( - self, font_size: float | None - ) -> ImageFont.ImageFont | ImageFont.FreeTypeFont | ImageFont.TransposedFont: - if font_size is not None: - from . import ImageFont - - return ImageFont.load_default(font_size) - else: - return self.getfont() - - def _getink( - self, ink: _Ink | None, fill: _Ink | None = None - ) -> tuple[int | None, int | None]: - result_ink = None - result_fill = None - if ink is None and fill is None: - if self.fill: - result_fill = self.ink - else: - result_ink = self.ink - else: - if ink is not None: - if isinstance(ink, str): - ink = ImageColor.getcolor(ink, self.mode) - if self.palette and not isinstance(ink, numbers.Number): - ink = self.palette.getcolor(ink, self._image) - result_ink = self.draw.draw_ink(ink) - if fill is not None: - if isinstance(fill, str): - fill = ImageColor.getcolor(fill, self.mode) - if self.palette and not isinstance(fill, numbers.Number): - fill = self.palette.getcolor(fill, self._image) - result_fill = self.draw.draw_ink(fill) - return result_ink, result_fill - - def arc( - self, - xy: Coords, - start: float, - end: float, - fill: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw an arc.""" - ink, fill = self._getink(fill) - if ink is not None: - self.draw.draw_arc(xy, start, end, ink, width) - - def bitmap( - self, xy: Sequence[int], bitmap: Image.Image, fill: _Ink | None = None - ) -> None: - """Draw a bitmap.""" - bitmap.load() - ink, fill = self._getink(fill) - if ink is None: - ink = fill - if ink is not None: - self.draw.draw_bitmap(xy, bitmap.im, ink) - - def chord( - self, - xy: Coords, - start: float, - end: float, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw a chord.""" - ink, fill_ink = self._getink(outline, fill) - if fill_ink is not None: - self.draw.draw_chord(xy, start, end, fill_ink, 1) - if ink is not None and ink != fill_ink and width != 0: - self.draw.draw_chord(xy, start, end, ink, 0, width) - - def ellipse( - self, - xy: Coords, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw an ellipse.""" - ink, fill_ink = self._getink(outline, fill) - if fill_ink is not None: - self.draw.draw_ellipse(xy, fill_ink, 1) - if ink is not None and ink != fill_ink and width != 0: - self.draw.draw_ellipse(xy, ink, 0, width) - - def circle( - self, - xy: Sequence[float], - radius: float, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw a circle given center coordinates and a radius.""" - ellipse_xy = (xy[0] - radius, xy[1] - radius, xy[0] + radius, xy[1] + radius) - self.ellipse(ellipse_xy, fill, outline, width) - - def line( - self, - xy: Coords, - fill: _Ink | None = None, - width: int = 0, - joint: str | None = None, - ) -> None: - """Draw a line, or a connected sequence of line segments.""" - ink = self._getink(fill)[0] - if ink is not None: - self.draw.draw_lines(xy, ink, width) - if joint == "curve" and width > 4: - points: Sequence[Sequence[float]] - if isinstance(xy[0], (list, tuple)): - points = cast(Sequence[Sequence[float]], xy) - else: - points = [ - cast(Sequence[float], tuple(xy[i : i + 2])) - for i in range(0, len(xy), 2) - ] - for i in range(1, len(points) - 1): - point = points[i] - angles = [ - math.degrees(math.atan2(end[0] - start[0], start[1] - end[1])) - % 360 - for start, end in ( - (points[i - 1], point), - (point, points[i + 1]), - ) - ] - if angles[0] == angles[1]: - # This is a straight line, so no joint is required - continue - - def coord_at_angle( - coord: Sequence[float], angle: float - ) -> tuple[float, ...]: - x, y = coord - angle -= 90 - distance = width / 2 - 1 - return tuple( - p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d)) - for p, p_d in ( - (x, distance * math.cos(math.radians(angle))), - (y, distance * math.sin(math.radians(angle))), - ) - ) - - flipped = ( - angles[1] > angles[0] and angles[1] - 180 > angles[0] - ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0]) - coords = [ - (point[0] - width / 2 + 1, point[1] - width / 2 + 1), - (point[0] + width / 2 - 1, point[1] + width / 2 - 1), - ] - if flipped: - start, end = (angles[1] + 90, angles[0] + 90) - else: - start, end = (angles[0] - 90, angles[1] - 90) - self.pieslice(coords, start - 90, end - 90, fill) - - if width > 8: - # Cover potential gaps between the line and the joint - if flipped: - gap_coords = [ - coord_at_angle(point, angles[0] + 90), - point, - coord_at_angle(point, angles[1] + 90), - ] - else: - gap_coords = [ - coord_at_angle(point, angles[0] - 90), - point, - coord_at_angle(point, angles[1] - 90), - ] - self.line(gap_coords, fill, width=3) - - def shape( - self, - shape: Image.core._Outline, - fill: _Ink | None = None, - outline: _Ink | None = None, - ) -> None: - """(Experimental) Draw a shape.""" - shape.close() - ink, fill_ink = self._getink(outline, fill) - if fill_ink is not None: - self.draw.draw_outline(shape, fill_ink, 1) - if ink is not None and ink != fill_ink: - self.draw.draw_outline(shape, ink, 0) - - def pieslice( - self, - xy: Coords, - start: float, - end: float, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw a pieslice.""" - ink, fill_ink = self._getink(outline, fill) - if fill_ink is not None: - self.draw.draw_pieslice(xy, start, end, fill_ink, 1) - if ink is not None and ink != fill_ink and width != 0: - self.draw.draw_pieslice(xy, start, end, ink, 0, width) - - def point(self, xy: Coords, fill: _Ink | None = None) -> None: - """Draw one or more individual pixels.""" - ink, fill = self._getink(fill) - if ink is not None: - self.draw.draw_points(xy, ink) - - def polygon( - self, - xy: Coords, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw a polygon.""" - ink, fill_ink = self._getink(outline, fill) - if fill_ink is not None: - self.draw.draw_polygon(xy, fill_ink, 1) - if ink is not None and ink != fill_ink and width != 0: - if width == 1: - self.draw.draw_polygon(xy, ink, 0, width) - elif self.im is not None: - # To avoid expanding the polygon outwards, - # use the fill as a mask - mask = Image.new("1", self.im.size) - mask_ink = self._getink(1)[0] - - fill_im = mask.copy() - draw = Draw(fill_im) - draw.draw.draw_polygon(xy, mask_ink, 1) - - ink_im = mask.copy() - draw = Draw(ink_im) - width = width * 2 - 1 - draw.draw.draw_polygon(xy, mask_ink, 0, width) - - mask.paste(ink_im, mask=fill_im) - - im = Image.new(self.mode, self.im.size) - draw = Draw(im) - draw.draw.draw_polygon(xy, ink, 0, width) - self.im.paste(im.im, (0, 0) + im.size, mask.im) - - def regular_polygon( - self, - bounding_circle: Sequence[Sequence[float] | float], - n_sides: int, - rotation: float = 0, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw a regular polygon.""" - xy = _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation) - self.polygon(xy, fill, outline, width) - - def rectangle( - self, - xy: Coords, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - ) -> None: - """Draw a rectangle.""" - ink, fill_ink = self._getink(outline, fill) - if fill_ink is not None: - self.draw.draw_rectangle(xy, fill_ink, 1) - if ink is not None and ink != fill_ink and width != 0: - self.draw.draw_rectangle(xy, ink, 0, width) - - def rounded_rectangle( - self, - xy: Coords, - radius: float = 0, - fill: _Ink | None = None, - outline: _Ink | None = None, - width: int = 1, - *, - corners: tuple[bool, bool, bool, bool] | None = None, - ) -> None: - """Draw a rounded rectangle.""" - if isinstance(xy[0], (list, tuple)): - (x0, y0), (x1, y1) = cast(Sequence[Sequence[float]], xy) - else: - x0, y0, x1, y1 = cast(Sequence[float], xy) - if x1 < x0: - msg = "x1 must be greater than or equal to x0" - raise ValueError(msg) - if y1 < y0: - msg = "y1 must be greater than or equal to y0" - raise ValueError(msg) - if corners is None: - corners = (True, True, True, True) - - d = radius * 2 - - x0 = round(x0) - y0 = round(y0) - x1 = round(x1) - y1 = round(y1) - full_x, full_y = False, False - if all(corners): - full_x = d >= x1 - x0 - 1 - if full_x: - # The two left and two right corners are joined - d = x1 - x0 - full_y = d >= y1 - y0 - 1 - if full_y: - # The two top and two bottom corners are joined - d = y1 - y0 - if full_x and full_y: - # If all corners are joined, that is a circle - return self.ellipse(xy, fill, outline, width) - - if d == 0 or not any(corners): - # If the corners have no curve, - # or there are no corners, - # that is a rectangle - return self.rectangle(xy, fill, outline, width) - - r = int(d // 2) - ink, fill_ink = self._getink(outline, fill) - - def draw_corners(pieslice: bool) -> None: - parts: tuple[tuple[tuple[float, float, float, float], int, int], ...] - if full_x: - # Draw top and bottom halves - parts = ( - ((x0, y0, x0 + d, y0 + d), 180, 360), - ((x0, y1 - d, x0 + d, y1), 0, 180), - ) - elif full_y: - # Draw left and right halves - parts = ( - ((x0, y0, x0 + d, y0 + d), 90, 270), - ((x1 - d, y0, x1, y0 + d), 270, 90), - ) - else: - # Draw four separate corners - parts = tuple( - part - for i, part in enumerate( - ( - ((x0, y0, x0 + d, y0 + d), 180, 270), - ((x1 - d, y0, x1, y0 + d), 270, 360), - ((x1 - d, y1 - d, x1, y1), 0, 90), - ((x0, y1 - d, x0 + d, y1), 90, 180), - ) - ) - if corners[i] - ) - for part in parts: - if pieslice: - self.draw.draw_pieslice(*(part + (fill_ink, 1))) - else: - self.draw.draw_arc(*(part + (ink, width))) - - if fill_ink is not None: - draw_corners(True) - - if full_x: - self.draw.draw_rectangle((x0, y0 + r + 1, x1, y1 - r - 1), fill_ink, 1) - else: - self.draw.draw_rectangle((x0 + r + 1, y0, x1 - r - 1, y1), fill_ink, 1) - if not full_x and not full_y: - left = [x0, y0, x0 + r, y1] - if corners[0]: - left[1] += r + 1 - if corners[3]: - left[3] -= r + 1 - self.draw.draw_rectangle(left, fill_ink, 1) - - right = [x1 - r, y0, x1, y1] - if corners[1]: - right[1] += r + 1 - if corners[2]: - right[3] -= r + 1 - self.draw.draw_rectangle(right, fill_ink, 1) - if ink is not None and ink != fill_ink and width != 0: - draw_corners(False) - - if not full_x: - top = [x0, y0, x1, y0 + width - 1] - if corners[0]: - top[0] += r + 1 - if corners[1]: - top[2] -= r + 1 - self.draw.draw_rectangle(top, ink, 1) - - bottom = [x0, y1 - width + 1, x1, y1] - if corners[3]: - bottom[0] += r + 1 - if corners[2]: - bottom[2] -= r + 1 - self.draw.draw_rectangle(bottom, ink, 1) - if not full_y: - left = [x0, y0, x0 + width - 1, y1] - if corners[0]: - left[1] += r + 1 - if corners[3]: - left[3] -= r + 1 - self.draw.draw_rectangle(left, ink, 1) - - right = [x1 - width + 1, y0, x1, y1] - if corners[1]: - right[1] += r + 1 - if corners[2]: - right[3] -= r + 1 - self.draw.draw_rectangle(right, ink, 1) - - def _multiline_check(self, text: AnyStr) -> bool: - split_character = "\n" if isinstance(text, str) else b"\n" - - return split_character in text - - def _multiline_split(self, text: AnyStr) -> list[AnyStr]: - return text.split("\n" if isinstance(text, str) else b"\n") - - def _multiline_spacing(self, font, spacing, stroke_width): - return ( - self.textbbox((0, 0), "A", font, stroke_width=stroke_width)[3] - + stroke_width - + spacing - ) - - def text( - self, - xy: tuple[float, float], - text: str, - fill=None, - font: ( - ImageFont.ImageFont - | ImageFont.FreeTypeFont - | ImageFont.TransposedFont - | None - ) = None, - anchor=None, - spacing=4, - align="left", - direction=None, - features=None, - language=None, - stroke_width=0, - stroke_fill=None, - embedded_color=False, - *args, - **kwargs, - ) -> None: - """Draw text.""" - if embedded_color and self.mode not in ("RGB", "RGBA"): - msg = "Embedded color supported only in RGB and RGBA modes" - raise ValueError(msg) - - if font is None: - font = self._getfont(kwargs.get("font_size")) - - if self._multiline_check(text): - return self.multiline_text( - xy, - text, - fill, - font, - anchor, - spacing, - align, - direction, - features, - language, - stroke_width, - stroke_fill, - embedded_color, - ) - - def getink(fill: _Ink | None) -> int: - ink, fill_ink = self._getink(fill) - if ink is None: - assert fill_ink is not None - return fill_ink - return ink - - def draw_text(ink, stroke_width=0, stroke_offset=None) -> None: - mode = self.fontmode - if stroke_width == 0 and embedded_color: - mode = "RGBA" - coord = [] - start = [] - for i in range(2): - coord.append(int(xy[i])) - start.append(math.modf(xy[i])[0]) - try: - mask, offset = font.getmask2( # type: ignore[union-attr,misc] - text, - mode, - direction=direction, - features=features, - language=language, - stroke_width=stroke_width, - anchor=anchor, - ink=ink, - start=start, - *args, - **kwargs, - ) - coord = [coord[0] + offset[0], coord[1] + offset[1]] - except AttributeError: - try: - mask = font.getmask( # type: ignore[misc] - text, - mode, - direction, - features, - language, - stroke_width, - anchor, - ink, - start=start, - *args, - **kwargs, - ) - except TypeError: - mask = font.getmask(text) - if stroke_offset: - coord = [coord[0] + stroke_offset[0], coord[1] + stroke_offset[1]] - if mode == "RGBA": - # font.getmask2(mode="RGBA") returns color in RGB bands and mask in A - # extract mask and set text alpha - color, mask = mask, mask.getband(3) - ink_alpha = struct.pack("i", ink)[3] - color.fillband(3, ink_alpha) - x, y = coord - if self.im is not None: - self.im.paste( - color, (x, y, x + mask.size[0], y + mask.size[1]), mask - ) - else: - self.draw.draw_bitmap(coord, mask, ink) - - ink = getink(fill) - if ink is not None: - stroke_ink = None - if stroke_width: - stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink - - if stroke_ink is not None: - # Draw stroked text - draw_text(stroke_ink, stroke_width) - - # Draw normal text - draw_text(ink, 0) - else: - # Only draw normal text - draw_text(ink) - - def multiline_text( - self, - xy: tuple[float, float], - text: str, - fill=None, - font: ( - ImageFont.ImageFont - | ImageFont.FreeTypeFont - | ImageFont.TransposedFont - | None - ) = None, - anchor=None, - spacing=4, - align="left", - direction=None, - features=None, - language=None, - stroke_width=0, - stroke_fill=None, - embedded_color=False, - *, - font_size=None, - ) -> None: - if direction == "ttb": - msg = "ttb direction is unsupported for multiline text" - raise ValueError(msg) - - if anchor is None: - anchor = "la" - elif len(anchor) != 2: - msg = "anchor must be a 2 character string" - raise ValueError(msg) - elif anchor[1] in "tb": - msg = "anchor not supported for multiline text" - raise ValueError(msg) - - if font is None: - font = self._getfont(font_size) - - widths = [] - max_width: float = 0 - lines = self._multiline_split(text) - line_spacing = self._multiline_spacing(font, spacing, stroke_width) - for line in lines: - line_width = self.textlength( - line, font, direction=direction, features=features, language=language - ) - widths.append(line_width) - max_width = max(max_width, line_width) - - top = xy[1] - if anchor[1] == "m": - top -= (len(lines) - 1) * line_spacing / 2.0 - elif anchor[1] == "d": - top -= (len(lines) - 1) * line_spacing - - for idx, line in enumerate(lines): - left = xy[0] - width_difference = max_width - widths[idx] - - # first align left by anchor - if anchor[0] == "m": - left -= width_difference / 2.0 - elif anchor[0] == "r": - left -= width_difference - - # then align by align parameter - if align == "left": - pass - elif align == "center": - left += width_difference / 2.0 - elif align == "right": - left += width_difference - else: - msg = 'align must be "left", "center" or "right"' - raise ValueError(msg) - - self.text( - (left, top), - line, - fill, - font, - anchor, - direction=direction, - features=features, - language=language, - stroke_width=stroke_width, - stroke_fill=stroke_fill, - embedded_color=embedded_color, - ) - top += line_spacing - - def textlength( - self, - text: str, - font: ( - ImageFont.ImageFont - | ImageFont.FreeTypeFont - | ImageFont.TransposedFont - | None - ) = None, - direction=None, - features=None, - language=None, - embedded_color=False, - *, - font_size=None, - ) -> float: - """Get the length of a given string, in pixels with 1/64 precision.""" - if self._multiline_check(text): - msg = "can't measure length of multiline text" - raise ValueError(msg) - if embedded_color and self.mode not in ("RGB", "RGBA"): - msg = "Embedded color supported only in RGB and RGBA modes" - raise ValueError(msg) - - if font is None: - font = self._getfont(font_size) - mode = "RGBA" if embedded_color else self.fontmode - return font.getlength(text, mode, direction, features, language) - - def textbbox( - self, - xy, - text, - font=None, - anchor=None, - spacing=4, - align="left", - direction=None, - features=None, - language=None, - stroke_width=0, - embedded_color=False, - *, - font_size=None, - ) -> tuple[int, int, int, int]: - """Get the bounding box of a given string, in pixels.""" - if embedded_color and self.mode not in ("RGB", "RGBA"): - msg = "Embedded color supported only in RGB and RGBA modes" - raise ValueError(msg) - - if font is None: - font = self._getfont(font_size) - - if self._multiline_check(text): - return self.multiline_textbbox( - xy, - text, - font, - anchor, - spacing, - align, - direction, - features, - language, - stroke_width, - embedded_color, - ) - - mode = "RGBA" if embedded_color else self.fontmode - bbox = font.getbbox( - text, mode, direction, features, language, stroke_width, anchor - ) - return bbox[0] + xy[0], bbox[1] + xy[1], bbox[2] + xy[0], bbox[3] + xy[1] - - def multiline_textbbox( - self, - xy, - text, - font=None, - anchor=None, - spacing=4, - align="left", - direction=None, - features=None, - language=None, - stroke_width=0, - embedded_color=False, - *, - font_size=None, - ) -> tuple[int, int, int, int]: - if direction == "ttb": - msg = "ttb direction is unsupported for multiline text" - raise ValueError(msg) - - if anchor is None: - anchor = "la" - elif len(anchor) != 2: - msg = "anchor must be a 2 character string" - raise ValueError(msg) - elif anchor[1] in "tb": - msg = "anchor not supported for multiline text" - raise ValueError(msg) - - if font is None: - font = self._getfont(font_size) - - widths = [] - max_width: float = 0 - lines = self._multiline_split(text) - line_spacing = self._multiline_spacing(font, spacing, stroke_width) - for line in lines: - line_width = self.textlength( - line, - font, - direction=direction, - features=features, - language=language, - embedded_color=embedded_color, - ) - widths.append(line_width) - max_width = max(max_width, line_width) - - top = xy[1] - if anchor[1] == "m": - top -= (len(lines) - 1) * line_spacing / 2.0 - elif anchor[1] == "d": - top -= (len(lines) - 1) * line_spacing - - bbox: tuple[int, int, int, int] | None = None - - for idx, line in enumerate(lines): - left = xy[0] - width_difference = max_width - widths[idx] - - # first align left by anchor - if anchor[0] == "m": - left -= width_difference / 2.0 - elif anchor[0] == "r": - left -= width_difference - - # then align by align parameter - if align == "left": - pass - elif align == "center": - left += width_difference / 2.0 - elif align == "right": - left += width_difference - else: - msg = 'align must be "left", "center" or "right"' - raise ValueError(msg) - - bbox_line = self.textbbox( - (left, top), - line, - font, - anchor, - direction=direction, - features=features, - language=language, - stroke_width=stroke_width, - embedded_color=embedded_color, - ) - if bbox is None: - bbox = bbox_line - else: - bbox = ( - min(bbox[0], bbox_line[0]), - min(bbox[1], bbox_line[1]), - max(bbox[2], bbox_line[2]), - max(bbox[3], bbox_line[3]), - ) - - top += line_spacing - - if bbox is None: - return xy[0], xy[1], xy[0], xy[1] - return bbox - - -def Draw(im: Image.Image, mode: str | None = None) -> ImageDraw: - """ - A simple 2D drawing interface for PIL images. - - :param im: The image to draw in. - :param mode: Optional mode to use for color values. For RGB - images, this argument can be RGB or RGBA (to blend the - drawing into the image). For all other modes, this argument - must be the same as the image mode. If omitted, the mode - defaults to the mode of the image. - """ - try: - return getattr(im, "getdraw")(mode) - except AttributeError: - return ImageDraw(im, mode) - - -def getdraw( - im: Image.Image | None = None, hints: list[str] | None = None -) -> tuple[ImageDraw2.Draw | None, ModuleType]: - """ - :param im: The image to draw in. - :param hints: An optional list of hints. Deprecated. - :returns: A (drawing context, drawing resource factory) tuple. - """ - if hints is not None: - deprecate("'hints' parameter", 12) - from . import ImageDraw2 - - draw = ImageDraw2.Draw(im) if im is not None else None - return draw, ImageDraw2 - - -def floodfill( - image: Image.Image, - xy: tuple[int, int], - value: float | tuple[int, ...], - border: float | tuple[int, ...] | None = None, - thresh: float = 0, -) -> None: - """ - .. warning:: This method is experimental. - - Fills a bounded region with a given color. - - :param image: Target image. - :param xy: Seed position (a 2-item coordinate tuple). See - :ref:`coordinate-system`. - :param value: Fill color. - :param border: Optional border value. If given, the region consists of - pixels with a color different from the border color. If not given, - the region consists of pixels having the same color as the seed - pixel. - :param thresh: Optional threshold value which specifies a maximum - tolerable difference of a pixel value from the 'background' in - order for it to be replaced. Useful for filling regions of - non-homogeneous, but similar, colors. - """ - # based on an implementation by Eric S. Raymond - # amended by yo1995 @20180806 - pixel = image.load() - assert pixel is not None - x, y = xy - try: - background = pixel[x, y] - if _color_diff(value, background) <= thresh: - return # seed point already has fill color - pixel[x, y] = value - except (ValueError, IndexError): - return # seed point outside image - edge = {(x, y)} - # use a set to keep record of current and previous edge pixels - # to reduce memory consumption - full_edge = set() - while edge: - new_edge = set() - for x, y in edge: # 4 adjacent method - for s, t in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)): - # If already processed, or if a coordinate is negative, skip - if (s, t) in full_edge or s < 0 or t < 0: - continue - try: - p = pixel[s, t] - except (ValueError, IndexError): - pass - else: - full_edge.add((s, t)) - if border is None: - fill = _color_diff(p, background) <= thresh - else: - fill = p not in (value, border) - if fill: - pixel[s, t] = value - new_edge.add((s, t)) - full_edge = edge # discard pixels processed - edge = new_edge - - -def _compute_regular_polygon_vertices( - bounding_circle: Sequence[Sequence[float] | float], n_sides: int, rotation: float -) -> list[tuple[float, float]]: - """ - Generate a list of vertices for a 2D regular polygon. - - :param bounding_circle: The bounding circle is a sequence defined - by a point and radius. The polygon is inscribed in this circle. - (e.g. ``bounding_circle=(x, y, r)`` or ``((x, y), r)``) - :param n_sides: Number of sides - (e.g. ``n_sides=3`` for a triangle, ``6`` for a hexagon) - :param rotation: Apply an arbitrary rotation to the polygon - (e.g. ``rotation=90``, applies a 90 degree rotation) - :return: List of regular polygon vertices - (e.g. ``[(25, 50), (50, 50), (50, 25), (25, 25)]``) - - How are the vertices computed? - 1. Compute the following variables - - theta: Angle between the apothem & the nearest polygon vertex - - side_length: Length of each polygon edge - - centroid: Center of bounding circle (1st, 2nd elements of bounding_circle) - - polygon_radius: Polygon radius (last element of bounding_circle) - - angles: Location of each polygon vertex in polar grid - (e.g. A square with 0 degree rotation => [225.0, 315.0, 45.0, 135.0]) - - 2. For each angle in angles, get the polygon vertex at that angle - The vertex is computed using the equation below. - X= xcos(φ) + ysin(φ) - Y= −xsin(φ) + ycos(φ) - - Note: - φ = angle in degrees - x = 0 - y = polygon_radius - - The formula above assumes rotation around the origin. - In our case, we are rotating around the centroid. - To account for this, we use the formula below - X = xcos(φ) + ysin(φ) + centroid_x - Y = −xsin(φ) + ycos(φ) + centroid_y - """ - # 1. Error Handling - # 1.1 Check `n_sides` has an appropriate value - if not isinstance(n_sides, int): - msg = "n_sides should be an int" # type: ignore[unreachable] - raise TypeError(msg) - if n_sides < 3: - msg = "n_sides should be an int > 2" - raise ValueError(msg) - - # 1.2 Check `bounding_circle` has an appropriate value - if not isinstance(bounding_circle, (list, tuple)): - msg = "bounding_circle should be a sequence" - raise TypeError(msg) - - if len(bounding_circle) == 3: - if not all(isinstance(i, (int, float)) for i in bounding_circle): - msg = "bounding_circle should only contain numeric data" - raise ValueError(msg) - - *centroid, polygon_radius = cast(List[float], list(bounding_circle)) - elif len(bounding_circle) == 2 and isinstance(bounding_circle[0], (list, tuple)): - if not all( - isinstance(i, (int, float)) for i in bounding_circle[0] - ) or not isinstance(bounding_circle[1], (int, float)): - msg = "bounding_circle should only contain numeric data" - raise ValueError(msg) - - if len(bounding_circle[0]) != 2: - msg = "bounding_circle centre should contain 2D coordinates (e.g. (x, y))" - raise ValueError(msg) - - centroid = cast(List[float], list(bounding_circle[0])) - polygon_radius = cast(float, bounding_circle[1]) - else: - msg = ( - "bounding_circle should contain 2D coordinates " - "and a radius (e.g. (x, y, r) or ((x, y), r) )" - ) - raise ValueError(msg) - - if polygon_radius <= 0: - msg = "bounding_circle radius should be > 0" - raise ValueError(msg) - - # 1.3 Check `rotation` has an appropriate value - if not isinstance(rotation, (int, float)): - msg = "rotation should be an int or float" # type: ignore[unreachable] - raise ValueError(msg) - - # 2. Define Helper Functions - def _apply_rotation(point: list[float], degrees: float) -> tuple[float, float]: - return ( - round( - point[0] * math.cos(math.radians(360 - degrees)) - - point[1] * math.sin(math.radians(360 - degrees)) - + centroid[0], - 2, - ), - round( - point[1] * math.cos(math.radians(360 - degrees)) - + point[0] * math.sin(math.radians(360 - degrees)) - + centroid[1], - 2, - ), - ) - - def _compute_polygon_vertex(angle: float) -> tuple[float, float]: - start_point = [polygon_radius, 0] - return _apply_rotation(start_point, angle) - - def _get_angles(n_sides: int, rotation: float) -> list[float]: - angles = [] - degrees = 360 / n_sides - # Start with the bottom left polygon vertex - current_angle = (270 - 0.5 * degrees) + rotation - for _ in range(0, n_sides): - angles.append(current_angle) - current_angle += degrees - if current_angle > 360: - current_angle -= 360 - return angles - - # 3. Variable Declarations - angles = _get_angles(n_sides, rotation) - - # 4. Compute Vertices - return [_compute_polygon_vertex(angle) for angle in angles] - - -def _color_diff( - color1: float | tuple[int, ...], color2: float | tuple[int, ...] -) -> float: - """ - Uses 1-norm distance to calculate difference between two values. - """ - first = color1 if isinstance(color1, tuple) else (color1,) - second = color2 if isinstance(color2, tuple) else (color2,) - - return sum(abs(first[i] - second[i]) for i in range(0, len(second))) diff --git a/venv/Lib/site-packages/PIL/ImageDraw2.py b/venv/Lib/site-packages/PIL/ImageDraw2.py deleted file mode 100644 index e89a78b..0000000 --- a/venv/Lib/site-packages/PIL/ImageDraw2.py +++ /dev/null @@ -1,206 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# WCK-style drawing interface operations -# -# History: -# 2003-12-07 fl created -# 2005-05-15 fl updated; added to PIL as ImageDraw2 -# 2005-05-15 fl added text support -# 2005-05-20 fl added arc/chord/pieslice support -# -# Copyright (c) 2003-2005 by Secret Labs AB -# Copyright (c) 2003-2005 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - - -""" -(Experimental) WCK-style drawing interface operations - -.. seealso:: :py:mod:`PIL.ImageDraw` -""" -from __future__ import annotations - -from typing import BinaryIO - -from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath -from ._typing import StrOrBytesPath - - -class Pen: - """Stores an outline color and width.""" - - def __init__(self, color: str, width: int = 1, opacity: int = 255) -> None: - self.color = ImageColor.getrgb(color) - self.width = width - - -class Brush: - """Stores a fill color""" - - def __init__(self, color: str, opacity: int = 255) -> None: - self.color = ImageColor.getrgb(color) - - -class Font: - """Stores a TrueType font and color""" - - def __init__( - self, color: str, file: StrOrBytesPath | BinaryIO, size: float = 12 - ) -> None: - # FIXME: add support for bitmap fonts - self.color = ImageColor.getrgb(color) - self.font = ImageFont.truetype(file, size) - - -class Draw: - """ - (Experimental) WCK-style drawing interface - """ - - def __init__( - self, - image: Image.Image | str, - size: tuple[int, int] | list[int] | None = None, - color: float | tuple[float, ...] | str | None = None, - ) -> None: - if isinstance(image, str): - if size is None: - msg = "If image argument is mode string, size must be a list or tuple" - raise ValueError(msg) - image = Image.new(image, size, color) - self.draw = ImageDraw.Draw(image) - self.image = image - self.transform = None - - def flush(self) -> Image.Image: - return self.image - - def render(self, op, xy, pen, brush=None): - # handle color arguments - outline = fill = None - width = 1 - if isinstance(pen, Pen): - outline = pen.color - width = pen.width - elif isinstance(brush, Pen): - outline = brush.color - width = brush.width - if isinstance(brush, Brush): - fill = brush.color - elif isinstance(pen, Brush): - fill = pen.color - # handle transformation - if self.transform: - xy = ImagePath.Path(xy) - xy.transform(self.transform) - # render the item - if op == "line": - self.draw.line(xy, fill=outline, width=width) - else: - getattr(self.draw, op)(xy, fill=fill, outline=outline) - - def settransform(self, offset): - """Sets a transformation offset.""" - (xoffset, yoffset) = offset - self.transform = (1, 0, xoffset, 0, 1, yoffset) - - def arc(self, xy, start, end, *options): - """ - Draws an arc (a portion of a circle outline) between the start and end - angles, inside the given bounding box. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.arc` - """ - self.render("arc", xy, start, end, *options) - - def chord(self, xy, start, end, *options): - """ - Same as :py:meth:`~PIL.ImageDraw2.Draw.arc`, but connects the end points - with a straight line. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.chord` - """ - self.render("chord", xy, start, end, *options) - - def ellipse(self, xy, *options): - """ - Draws an ellipse inside the given bounding box. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.ellipse` - """ - self.render("ellipse", xy, *options) - - def line(self, xy, *options): - """ - Draws a line between the coordinates in the ``xy`` list. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.line` - """ - self.render("line", xy, *options) - - def pieslice(self, xy, start, end, *options): - """ - Same as arc, but also draws straight lines between the end points and the - center of the bounding box. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.pieslice` - """ - self.render("pieslice", xy, start, end, *options) - - def polygon(self, xy, *options): - """ - Draws a polygon. - - The polygon outline consists of straight lines between the given - coordinates, plus a straight line between the last and the first - coordinate. - - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.polygon` - """ - self.render("polygon", xy, *options) - - def rectangle(self, xy, *options): - """ - Draws a rectangle. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.rectangle` - """ - self.render("rectangle", xy, *options) - - def text(self, xy, text, font): - """ - Draws the string at the given position. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.text` - """ - if self.transform: - xy = ImagePath.Path(xy) - xy.transform(self.transform) - self.draw.text(xy, text, font=font.font, fill=font.color) - - def textbbox(self, xy, text, font): - """ - Returns bounding box (in pixels) of given text. - - :return: ``(left, top, right, bottom)`` bounding box - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textbbox` - """ - if self.transform: - xy = ImagePath.Path(xy) - xy.transform(self.transform) - return self.draw.textbbox(xy, text, font=font.font) - - def textlength(self, text, font): - """ - Returns length (in pixels) of given text. - This is the amount by which following text should be offset. - - .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textlength` - """ - return self.draw.textlength(text, font=font.font) diff --git a/venv/Lib/site-packages/PIL/ImageEnhance.py b/venv/Lib/site-packages/PIL/ImageEnhance.py deleted file mode 100644 index d7e99a9..0000000 --- a/venv/Lib/site-packages/PIL/ImageEnhance.py +++ /dev/null @@ -1,107 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# image enhancement classes -# -# For a background, see "Image Processing By Interpolation and -# Extrapolation", Paul Haeberli and Douglas Voorhies. Available -# at http://www.graficaobscura.com/interp/index.html -# -# History: -# 1996-03-23 fl Created -# 2009-06-16 fl Fixed mean calculation -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1996. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image, ImageFilter, ImageStat - - -class _Enhance: - image: Image.Image - degenerate: Image.Image - - def enhance(self, factor: float) -> Image.Image: - """ - Returns an enhanced image. - - :param factor: A floating point value controlling the enhancement. - Factor 1.0 always returns a copy of the original image, - lower factors mean less color (brightness, contrast, - etc), and higher values more. There are no restrictions - on this value. - :rtype: :py:class:`~PIL.Image.Image` - """ - return Image.blend(self.degenerate, self.image, factor) - - -class Color(_Enhance): - """Adjust image color balance. - - This class can be used to adjust the colour balance of an image, in - a manner similar to the controls on a colour TV set. An enhancement - factor of 0.0 gives a black and white image. A factor of 1.0 gives - the original image. - """ - - def __init__(self, image: Image.Image) -> None: - self.image = image - self.intermediate_mode = "L" - if "A" in image.getbands(): - self.intermediate_mode = "LA" - - self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) - - -class Contrast(_Enhance): - """Adjust image contrast. - - This class can be used to control the contrast of an image, similar - to the contrast control on a TV set. An enhancement factor of 0.0 - gives a solid gray image. A factor of 1.0 gives the original image. - """ - - def __init__(self, image: Image.Image) -> None: - self.image = image - mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) - self.degenerate = Image.new("L", image.size, mean).convert(image.mode) - - if "A" in image.getbands(): - self.degenerate.putalpha(image.getchannel("A")) - - -class Brightness(_Enhance): - """Adjust image brightness. - - This class can be used to control the brightness of an image. An - enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the - original image. - """ - - def __init__(self, image: Image.Image) -> None: - self.image = image - self.degenerate = Image.new(image.mode, image.size, 0) - - if "A" in image.getbands(): - self.degenerate.putalpha(image.getchannel("A")) - - -class Sharpness(_Enhance): - """Adjust image sharpness. - - This class can be used to adjust the sharpness of an image. An - enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the - original image, and a factor of 2.0 gives a sharpened image. - """ - - def __init__(self, image: Image.Image) -> None: - self.image = image - self.degenerate = image.filter(ImageFilter.SMOOTH) - - if "A" in image.getbands(): - self.degenerate.putalpha(image.getchannel("A")) diff --git a/venv/Lib/site-packages/PIL/ImageFile.py b/venv/Lib/site-packages/PIL/ImageFile.py deleted file mode 100644 index 69e7ee5..0000000 --- a/venv/Lib/site-packages/PIL/ImageFile.py +++ /dev/null @@ -1,810 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# base class for image file handlers -# -# history: -# 1995-09-09 fl Created -# 1996-03-11 fl Fixed load mechanism. -# 1996-04-15 fl Added pcx/xbm decoders. -# 1996-04-30 fl Added encoders. -# 1996-12-14 fl Added load helpers -# 1997-01-11 fl Use encode_to_file where possible -# 1997-08-27 fl Flush output in _save -# 1998-03-05 fl Use memory mapping for some modes -# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" -# 1999-05-31 fl Added image parser -# 2000-10-12 fl Set readonly flag on memory-mapped images -# 2002-03-20 fl Use better messages for common decoder errors -# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available -# 2003-10-30 fl Added StubImageFile class -# 2004-02-25 fl Made incremental parser more robust -# -# Copyright (c) 1997-2004 by Secret Labs AB -# Copyright (c) 1995-2004 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import abc -import io -import itertools -import struct -import sys -from typing import IO, Any, NamedTuple - -from . import Image -from ._deprecate import deprecate -from ._util import is_path - -MAXBLOCK = 65536 - -SAFEBLOCK = 1024 * 1024 - -LOAD_TRUNCATED_IMAGES = False -"""Whether or not to load truncated image files. User code may change this.""" - -ERRORS = { - -1: "image buffer overrun error", - -2: "decoding error", - -3: "unknown error", - -8: "bad configuration", - -9: "out of memory error", -} -""" -Dict of known error codes returned from :meth:`.PyDecoder.decode`, -:meth:`.PyEncoder.encode` :meth:`.PyEncoder.encode_to_pyfd` and -:meth:`.PyEncoder.encode_to_file`. -""" - - -# -# -------------------------------------------------------------------- -# Helpers - - -def _get_oserror(error: int, *, encoder: bool) -> OSError: - try: - msg = Image.core.getcodecstatus(error) - except AttributeError: - msg = ERRORS.get(error) - if not msg: - msg = f"{'encoder' if encoder else 'decoder'} error {error}" - msg += f" when {'writing' if encoder else 'reading'} image file" - return OSError(msg) - - -def raise_oserror(error: int) -> OSError: - deprecate( - "raise_oserror", - 12, - action="It is only useful for translating error codes returned by a codec's " - "decode() method, which ImageFile already does automatically.", - ) - raise _get_oserror(error, encoder=False) - - -def _tilesort(t): - # sort on offset - return t[2] - - -class _Tile(NamedTuple): - codec_name: str - extents: tuple[int, int, int, int] - offset: int - args: tuple[Any, ...] | str | None - - -# -# -------------------------------------------------------------------- -# ImageFile base class - - -class ImageFile(Image.Image): - """Base class for image file format handlers.""" - - def __init__(self, fp=None, filename=None): - super().__init__() - - self._min_frame = 0 - - self.custom_mimetype = None - - self.tile = None - """ A list of tile descriptors, or ``None`` """ - - self.readonly = 1 # until we know better - - self.decoderconfig = () - self.decodermaxblock = MAXBLOCK - - if is_path(fp): - # filename - self.fp = open(fp, "rb") - self.filename = fp - self._exclusive_fp = True - else: - # stream - self.fp = fp - self.filename = filename - # can be overridden - self._exclusive_fp = None - - try: - try: - self._open() - except ( - IndexError, # end of data - TypeError, # end of data (ord) - KeyError, # unsupported mode - EOFError, # got header but not the first frame - struct.error, - ) as v: - raise SyntaxError(v) from v - - if not self.mode or self.size[0] <= 0 or self.size[1] <= 0: - msg = "not identified by this driver" - raise SyntaxError(msg) - except BaseException: - # close the file only if we have opened it this constructor - if self._exclusive_fp: - self.fp.close() - raise - - def get_format_mimetype(self) -> str | None: - if self.custom_mimetype: - return self.custom_mimetype - if self.format is not None: - return Image.MIME.get(self.format.upper()) - return None - - def __setstate__(self, state): - self.tile = [] - super().__setstate__(state) - - def verify(self) -> None: - """Check file integrity""" - - # raise exception if something's wrong. must be called - # directly after open, and closes file when finished. - if self._exclusive_fp: - self.fp.close() - self.fp = None - - def load(self): - """Load image data based on tile list""" - - if self.tile is None: - msg = "cannot load this image" - raise OSError(msg) - - pixel = Image.Image.load(self) - if not self.tile: - return pixel - - self.map = None - use_mmap = self.filename and len(self.tile) == 1 - # As of pypy 2.1.0, memory mapping was failing here. - use_mmap = use_mmap and not hasattr(sys, "pypy_version_info") - - readonly = 0 - - # look for read/seek overrides - try: - read = self.load_read - # don't use mmap if there are custom read/seek functions - use_mmap = False - except AttributeError: - read = self.fp.read - - try: - seek = self.load_seek - use_mmap = False - except AttributeError: - seek = self.fp.seek - - if use_mmap: - # try memory mapping - decoder_name, extents, offset, args = self.tile[0] - if isinstance(args, str): - args = (args, 0, 1) - if ( - decoder_name == "raw" - and len(args) >= 3 - and args[0] == self.mode - and args[0] in Image._MAPMODES - ): - try: - # use mmap, if possible - import mmap - - with open(self.filename) as fp: - self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ) - if offset + self.size[1] * args[1] > self.map.size(): - msg = "buffer is not large enough" - raise OSError(msg) - self.im = Image.core.map_buffer( - self.map, self.size, decoder_name, offset, args - ) - readonly = 1 - # After trashing self.im, - # we might need to reload the palette data. - if self.palette: - self.palette.dirty = 1 - except (AttributeError, OSError, ImportError): - self.map = None - - self.load_prepare() - err_code = -3 # initialize to unknown error - if not self.map: - # sort tiles in file order - self.tile.sort(key=_tilesort) - - try: - # FIXME: This is a hack to handle TIFF's JpegTables tag. - prefix = self.tile_prefix - except AttributeError: - prefix = b"" - - # Remove consecutive duplicates that only differ by their offset - self.tile = [ - list(tiles)[-1] - for _, tiles in itertools.groupby( - self.tile, lambda tile: (tile[0], tile[1], tile[3]) - ) - ] - for decoder_name, extents, offset, args in self.tile: - seek(offset) - decoder = Image._getdecoder( - self.mode, decoder_name, args, self.decoderconfig - ) - try: - decoder.setimage(self.im, extents) - if decoder.pulls_fd: - decoder.setfd(self.fp) - err_code = decoder.decode(b"")[1] - else: - b = prefix - while True: - try: - s = read(self.decodermaxblock) - except (IndexError, struct.error) as e: - # truncated png/gif - if LOAD_TRUNCATED_IMAGES: - break - else: - msg = "image file is truncated" - raise OSError(msg) from e - - if not s: # truncated jpeg - if LOAD_TRUNCATED_IMAGES: - break - else: - msg = ( - "image file is truncated " - f"({len(b)} bytes not processed)" - ) - raise OSError(msg) - - b = b + s - n, err_code = decoder.decode(b) - if n < 0: - break - b = b[n:] - finally: - # Need to cleanup here to prevent leaks - decoder.cleanup() - - self.tile = [] - self.readonly = readonly - - self.load_end() - - if self._exclusive_fp and self._close_exclusive_fp_after_loading: - self.fp.close() - self.fp = None - - if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0: - # still raised if decoder fails to return anything - raise _get_oserror(err_code, encoder=False) - - return Image.Image.load(self) - - def load_prepare(self) -> None: - # create image memory if necessary - if not self.im or self.im.mode != self.mode or self.im.size != self.size: - self.im = Image.core.new(self.mode, self.size) - # create palette (optional) - if self.mode == "P": - Image.Image.load(self) - - def load_end(self) -> None: - # may be overridden - pass - - # may be defined for contained formats - # def load_seek(self, pos: int) -> None: - # pass - - # may be defined for blocked formats (e.g. PNG) - # def load_read(self, read_bytes: int) -> bytes: - # pass - - def _seek_check(self, frame): - if ( - frame < self._min_frame - # Only check upper limit on frames if additional seek operations - # are not required to do so - or ( - not (hasattr(self, "_n_frames") and self._n_frames is None) - and frame >= self.n_frames + self._min_frame - ) - ): - msg = "attempt to seek outside sequence" - raise EOFError(msg) - - return self.tell() != frame - - -class StubHandler: - def open(self, im: StubImageFile) -> None: - pass - - @abc.abstractmethod - def load(self, im: StubImageFile) -> Image.Image: - pass - - -class StubImageFile(ImageFile): - """ - Base class for stub image loaders. - - A stub loader is an image loader that can identify files of a - certain format, but relies on external code to load the file. - """ - - def _open(self) -> None: - msg = "StubImageFile subclass must implement _open" - raise NotImplementedError(msg) - - def load(self): - loader = self._load() - if loader is None: - msg = f"cannot find loader for this {self.format} file" - raise OSError(msg) - image = loader.load(self) - assert image is not None - # become the other object (!) - self.__class__ = image.__class__ - self.__dict__ = image.__dict__ - return image.load() - - def _load(self) -> StubHandler | None: - """(Hook) Find actual image loader.""" - msg = "StubImageFile subclass must implement _load" - raise NotImplementedError(msg) - - -class Parser: - """ - Incremental image parser. This class implements the standard - feed/close consumer interface. - """ - - incremental = None - image: Image.Image | None = None - data = None - decoder = None - offset = 0 - finished = 0 - - def reset(self) -> None: - """ - (Consumer) Reset the parser. Note that you can only call this - method immediately after you've created a parser; parser - instances cannot be reused. - """ - assert self.data is None, "cannot reuse parsers" - - def feed(self, data): - """ - (Consumer) Feed data to the parser. - - :param data: A string buffer. - :exception OSError: If the parser failed to parse the image file. - """ - # collect data - - if self.finished: - return - - if self.data is None: - self.data = data - else: - self.data = self.data + data - - # parse what we have - if self.decoder: - if self.offset > 0: - # skip header - skip = min(len(self.data), self.offset) - self.data = self.data[skip:] - self.offset = self.offset - skip - if self.offset > 0 or not self.data: - return - - n, e = self.decoder.decode(self.data) - - if n < 0: - # end of stream - self.data = None - self.finished = 1 - if e < 0: - # decoding error - self.image = None - raise _get_oserror(e, encoder=False) - else: - # end of image - return - self.data = self.data[n:] - - elif self.image: - # if we end up here with no decoder, this file cannot - # be incrementally parsed. wait until we've gotten all - # available data - pass - - else: - # attempt to open this file - try: - with io.BytesIO(self.data) as fp: - im = Image.open(fp) - except OSError: - pass # not enough data - else: - flag = hasattr(im, "load_seek") or hasattr(im, "load_read") - if flag or len(im.tile) != 1: - # custom load code, or multiple tiles - self.decode = None - else: - # initialize decoder - im.load_prepare() - d, e, o, a = im.tile[0] - im.tile = [] - self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig) - self.decoder.setimage(im.im, e) - - # calculate decoder offset - self.offset = o - if self.offset <= len(self.data): - self.data = self.data[self.offset :] - self.offset = 0 - - self.image = im - - def __enter__(self): - return self - - def __exit__(self, *args: object) -> None: - self.close() - - def close(self): - """ - (Consumer) Close the stream. - - :returns: An image object. - :exception OSError: If the parser failed to parse the image file either - because it cannot be identified or cannot be - decoded. - """ - # finish decoding - if self.decoder: - # get rid of what's left in the buffers - self.feed(b"") - self.data = self.decoder = None - if not self.finished: - msg = "image was incomplete" - raise OSError(msg) - if not self.image: - msg = "cannot parse this image" - raise OSError(msg) - if self.data: - # incremental parsing not possible; reopen the file - # not that we have all data - with io.BytesIO(self.data) as fp: - try: - self.image = Image.open(fp) - finally: - self.image.load() - return self.image - - -# -------------------------------------------------------------------- - - -def _save(im, fp, tile, bufsize=0) -> None: - """Helper to save image based on tile list - - :param im: Image object. - :param fp: File object. - :param tile: Tile list. - :param bufsize: Optional buffer size - """ - - im.load() - if not hasattr(im, "encoderconfig"): - im.encoderconfig = () - tile.sort(key=_tilesort) - # FIXME: make MAXBLOCK a configuration parameter - # It would be great if we could have the encoder specify what it needs - # But, it would need at least the image size in most cases. RawEncode is - # a tricky case. - bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c - try: - fh = fp.fileno() - fp.flush() - _encode_tile(im, fp, tile, bufsize, fh) - except (AttributeError, io.UnsupportedOperation) as exc: - _encode_tile(im, fp, tile, bufsize, None, exc) - if hasattr(fp, "flush"): - fp.flush() - - -def _encode_tile(im, fp, tile: list[_Tile], bufsize, fh, exc=None): - for encoder_name, extents, offset, args in tile: - if offset > 0: - fp.seek(offset) - encoder = Image._getencoder(im.mode, encoder_name, args, im.encoderconfig) - try: - encoder.setimage(im.im, extents) - if encoder.pushes_fd: - encoder.setfd(fp) - errcode = encoder.encode_to_pyfd()[1] - else: - if exc: - # compress to Python file-compatible object - while True: - errcode, data = encoder.encode(bufsize)[1:] - fp.write(data) - if errcode: - break - else: - # slight speedup: compress to real file object - errcode = encoder.encode_to_file(fh, bufsize) - if errcode < 0: - raise _get_oserror(errcode, encoder=True) from exc - finally: - encoder.cleanup() - - -def _safe_read(fp, size): - """ - Reads large blocks in a safe way. Unlike fp.read(n), this function - doesn't trust the user. If the requested size is larger than - SAFEBLOCK, the file is read block by block. - - :param fp: File handle. Must implement a read method. - :param size: Number of bytes to read. - :returns: A string containing size bytes of data. - - Raises an OSError if the file is truncated and the read cannot be completed - - """ - if size <= 0: - return b"" - if size <= SAFEBLOCK: - data = fp.read(size) - if len(data) < size: - msg = "Truncated File Read" - raise OSError(msg) - return data - data = [] - remaining_size = size - while remaining_size > 0: - block = fp.read(min(remaining_size, SAFEBLOCK)) - if not block: - break - data.append(block) - remaining_size -= len(block) - if sum(len(d) for d in data) < size: - msg = "Truncated File Read" - raise OSError(msg) - return b"".join(data) - - -class PyCodecState: - def __init__(self) -> None: - self.xsize = 0 - self.ysize = 0 - self.xoff = 0 - self.yoff = 0 - - def extents(self) -> tuple[int, int, int, int]: - return self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize - - -class PyCodec: - fd: IO[bytes] | None - - def __init__(self, mode, *args): - self.im = None - self.state = PyCodecState() - self.fd = None - self.mode = mode - self.init(args) - - def init(self, args): - """ - Override to perform codec specific initialization - - :param args: Array of args items from the tile entry - :returns: None - """ - self.args = args - - def cleanup(self) -> None: - """ - Override to perform codec specific cleanup - - :returns: None - """ - pass - - def setfd(self, fd): - """ - Called from ImageFile to set the Python file-like object - - :param fd: A Python file-like object - :returns: None - """ - self.fd = fd - - def setimage(self, im, extents: tuple[int, int, int, int] | None = None) -> None: - """ - Called from ImageFile to set the core output image for the codec - - :param im: A core image object - :param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle - for this tile - :returns: None - """ - - # following c code - self.im = im - - if extents: - (x0, y0, x1, y1) = extents - else: - (x0, y0, x1, y1) = (0, 0, 0, 0) - - if x0 == 0 and x1 == 0: - self.state.xsize, self.state.ysize = self.im.size - else: - self.state.xoff = x0 - self.state.yoff = y0 - self.state.xsize = x1 - x0 - self.state.ysize = y1 - y0 - - if self.state.xsize <= 0 or self.state.ysize <= 0: - msg = "Size cannot be negative" - raise ValueError(msg) - - if ( - self.state.xsize + self.state.xoff > self.im.size[0] - or self.state.ysize + self.state.yoff > self.im.size[1] - ): - msg = "Tile cannot extend outside image" - raise ValueError(msg) - - -class PyDecoder(PyCodec): - """ - Python implementation of a format decoder. Override this class and - add the decoding logic in the :meth:`decode` method. - - See :ref:`Writing Your Own File Codec in Python` - """ - - _pulls_fd = False - - @property - def pulls_fd(self) -> bool: - return self._pulls_fd - - def decode(self, buffer: bytes) -> tuple[int, int]: - """ - Override to perform the decoding process. - - :param buffer: A bytes object with the data to be decoded. - :returns: A tuple of ``(bytes consumed, errcode)``. - If finished with decoding return -1 for the bytes consumed. - Err codes are from :data:`.ImageFile.ERRORS`. - """ - msg = "unavailable in base decoder" - raise NotImplementedError(msg) - - def set_as_raw(self, data: bytes, rawmode=None) -> None: - """ - Convenience method to set the internal image from a stream of raw data - - :param data: Bytes to be set - :param rawmode: The rawmode to be used for the decoder. - If not specified, it will default to the mode of the image - :returns: None - """ - - if not rawmode: - rawmode = self.mode - d = Image._getdecoder(self.mode, "raw", rawmode) - assert self.im is not None - d.setimage(self.im, self.state.extents()) - s = d.decode(data) - - if s[0] >= 0: - msg = "not enough image data" - raise ValueError(msg) - if s[1] != 0: - msg = "cannot decode image data" - raise ValueError(msg) - - -class PyEncoder(PyCodec): - """ - Python implementation of a format encoder. Override this class and - add the decoding logic in the :meth:`encode` method. - - See :ref:`Writing Your Own File Codec in Python` - """ - - _pushes_fd = False - - @property - def pushes_fd(self) -> bool: - return self._pushes_fd - - def encode(self, bufsize: int) -> tuple[int, int, bytes]: - """ - Override to perform the encoding process. - - :param bufsize: Buffer size. - :returns: A tuple of ``(bytes encoded, errcode, bytes)``. - If finished with encoding return 1 for the error code. - Err codes are from :data:`.ImageFile.ERRORS`. - """ - msg = "unavailable in base encoder" - raise NotImplementedError(msg) - - def encode_to_pyfd(self) -> tuple[int, int]: - """ - If ``pushes_fd`` is ``True``, then this method will be used, - and ``encode()`` will only be called once. - - :returns: A tuple of ``(bytes consumed, errcode)``. - Err codes are from :data:`.ImageFile.ERRORS`. - """ - if not self.pushes_fd: - return 0, -8 # bad configuration - bytes_consumed, errcode, data = self.encode(0) - if data: - assert self.fd is not None - self.fd.write(data) - return bytes_consumed, errcode - - def encode_to_file(self, fh, bufsize): - """ - :param fh: File handle. - :param bufsize: Buffer size. - - :returns: If finished successfully, return 0. - Otherwise, return an error code. Err codes are from - :data:`.ImageFile.ERRORS`. - """ - errcode = 0 - while errcode == 0: - status, errcode, buf = self.encode(bufsize) - if status > 0: - fh.write(buf[status:]) - return errcode diff --git a/venv/Lib/site-packages/PIL/ImageFilter.py b/venv/Lib/site-packages/PIL/ImageFilter.py deleted file mode 100644 index e18b4a4..0000000 --- a/venv/Lib/site-packages/PIL/ImageFilter.py +++ /dev/null @@ -1,604 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# standard filters -# -# History: -# 1995-11-27 fl Created -# 2002-06-08 fl Added rank and mode filters -# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call -# -# Copyright (c) 1997-2003 by Secret Labs AB. -# Copyright (c) 1995-2002 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import abc -import functools -from types import ModuleType -from typing import TYPE_CHECKING, Any, Callable, Sequence, cast - -if TYPE_CHECKING: - from . import _imaging - from ._typing import NumpyArray - - -class Filter: - @abc.abstractmethod - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - pass - - -class MultibandFilter(Filter): - pass - - -class BuiltinFilter(MultibandFilter): - filterargs: tuple[Any, ...] - - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - if image.mode == "P": - msg = "cannot filter palette images" - raise ValueError(msg) - return image.filter(*self.filterargs) - - -class Kernel(BuiltinFilter): - """ - Create a convolution kernel. This only supports 3x3 and 5x5 integer and floating - point kernels. - - Kernels can only be applied to "L" and "RGB" images. - - :param size: Kernel size, given as (width, height). This must be (3,3) or (5,5). - :param kernel: A sequence containing kernel weights. The kernel will be flipped - vertically before being applied to the image. - :param scale: Scale factor. If given, the result for each pixel is divided by this - value. The default is the sum of the kernel weights. - :param offset: Offset. If given, this value is added to the result, after it has - been divided by the scale factor. - """ - - name = "Kernel" - - def __init__( - self, - size: tuple[int, int], - kernel: Sequence[float], - scale: float | None = None, - offset: float = 0, - ) -> None: - if scale is None: - # default scale is sum of kernel - scale = functools.reduce(lambda a, b: a + b, kernel) - if size[0] * size[1] != len(kernel): - msg = "not enough coefficients in kernel" - raise ValueError(msg) - self.filterargs = size, scale, offset, kernel - - -class RankFilter(Filter): - """ - Create a rank filter. The rank filter sorts all pixels in - a window of the given size, and returns the ``rank``'th value. - - :param size: The kernel size, in pixels. - :param rank: What pixel value to pick. Use 0 for a min filter, - ``size * size / 2`` for a median filter, ``size * size - 1`` - for a max filter, etc. - """ - - name = "Rank" - - def __init__(self, size: int, rank: int) -> None: - self.size = size - self.rank = rank - - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - if image.mode == "P": - msg = "cannot filter palette images" - raise ValueError(msg) - image = image.expand(self.size // 2, self.size // 2) - return image.rankfilter(self.size, self.rank) - - -class MedianFilter(RankFilter): - """ - Create a median filter. Picks the median pixel value in a window with the - given size. - - :param size: The kernel size, in pixels. - """ - - name = "Median" - - def __init__(self, size: int = 3) -> None: - self.size = size - self.rank = size * size // 2 - - -class MinFilter(RankFilter): - """ - Create a min filter. Picks the lowest pixel value in a window with the - given size. - - :param size: The kernel size, in pixels. - """ - - name = "Min" - - def __init__(self, size: int = 3) -> None: - self.size = size - self.rank = 0 - - -class MaxFilter(RankFilter): - """ - Create a max filter. Picks the largest pixel value in a window with the - given size. - - :param size: The kernel size, in pixels. - """ - - name = "Max" - - def __init__(self, size: int = 3) -> None: - self.size = size - self.rank = size * size - 1 - - -class ModeFilter(Filter): - """ - Create a mode filter. Picks the most frequent pixel value in a box with the - given size. Pixel values that occur only once or twice are ignored; if no - pixel value occurs more than twice, the original pixel value is preserved. - - :param size: The kernel size, in pixels. - """ - - name = "Mode" - - def __init__(self, size: int = 3) -> None: - self.size = size - - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - return image.modefilter(self.size) - - -class GaussianBlur(MultibandFilter): - """Blurs the image with a sequence of extended box filters, which - approximates a Gaussian kernel. For details on accuracy see - - - :param radius: Standard deviation of the Gaussian kernel. Either a sequence of two - numbers for x and y, or a single number for both. - """ - - name = "GaussianBlur" - - def __init__(self, radius: float | Sequence[float] = 2) -> None: - self.radius = radius - - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - xy = self.radius - if isinstance(xy, (int, float)): - xy = (xy, xy) - if xy == (0, 0): - return image.copy() - return image.gaussian_blur(xy) - - -class BoxBlur(MultibandFilter): - """Blurs the image by setting each pixel to the average value of the pixels - in a square box extending radius pixels in each direction. - Supports float radius of arbitrary size. Uses an optimized implementation - which runs in linear time relative to the size of the image - for any radius value. - - :param radius: Size of the box in a direction. Either a sequence of two numbers for - x and y, or a single number for both. - - Radius 0 does not blur, returns an identical image. - Radius 1 takes 1 pixel in each direction, i.e. 9 pixels in total. - """ - - name = "BoxBlur" - - def __init__(self, radius: float | Sequence[float]) -> None: - xy = radius if isinstance(radius, (tuple, list)) else (radius, radius) - if xy[0] < 0 or xy[1] < 0: - msg = "radius must be >= 0" - raise ValueError(msg) - self.radius = radius - - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - xy = self.radius - if isinstance(xy, (int, float)): - xy = (xy, xy) - if xy == (0, 0): - return image.copy() - return image.box_blur(xy) - - -class UnsharpMask(MultibandFilter): - """Unsharp mask filter. - - See Wikipedia's entry on `digital unsharp masking`_ for an explanation of - the parameters. - - :param radius: Blur Radius - :param percent: Unsharp strength, in percent - :param threshold: Threshold controls the minimum brightness change that - will be sharpened - - .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking - - """ - - name = "UnsharpMask" - - def __init__( - self, radius: float = 2, percent: int = 150, threshold: int = 3 - ) -> None: - self.radius = radius - self.percent = percent - self.threshold = threshold - - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - return image.unsharp_mask(self.radius, self.percent, self.threshold) - - -class BLUR(BuiltinFilter): - name = "Blur" - # fmt: off - filterargs = (5, 5), 16, 0, ( - 1, 1, 1, 1, 1, - 1, 0, 0, 0, 1, - 1, 0, 0, 0, 1, - 1, 0, 0, 0, 1, - 1, 1, 1, 1, 1, - ) - # fmt: on - - -class CONTOUR(BuiltinFilter): - name = "Contour" - # fmt: off - filterargs = (3, 3), 1, 255, ( - -1, -1, -1, - -1, 8, -1, - -1, -1, -1, - ) - # fmt: on - - -class DETAIL(BuiltinFilter): - name = "Detail" - # fmt: off - filterargs = (3, 3), 6, 0, ( - 0, -1, 0, - -1, 10, -1, - 0, -1, 0, - ) - # fmt: on - - -class EDGE_ENHANCE(BuiltinFilter): - name = "Edge-enhance" - # fmt: off - filterargs = (3, 3), 2, 0, ( - -1, -1, -1, - -1, 10, -1, - -1, -1, -1, - ) - # fmt: on - - -class EDGE_ENHANCE_MORE(BuiltinFilter): - name = "Edge-enhance More" - # fmt: off - filterargs = (3, 3), 1, 0, ( - -1, -1, -1, - -1, 9, -1, - -1, -1, -1, - ) - # fmt: on - - -class EMBOSS(BuiltinFilter): - name = "Emboss" - # fmt: off - filterargs = (3, 3), 1, 128, ( - -1, 0, 0, - 0, 1, 0, - 0, 0, 0, - ) - # fmt: on - - -class FIND_EDGES(BuiltinFilter): - name = "Find Edges" - # fmt: off - filterargs = (3, 3), 1, 0, ( - -1, -1, -1, - -1, 8, -1, - -1, -1, -1, - ) - # fmt: on - - -class SHARPEN(BuiltinFilter): - name = "Sharpen" - # fmt: off - filterargs = (3, 3), 16, 0, ( - -2, -2, -2, - -2, 32, -2, - -2, -2, -2, - ) - # fmt: on - - -class SMOOTH(BuiltinFilter): - name = "Smooth" - # fmt: off - filterargs = (3, 3), 13, 0, ( - 1, 1, 1, - 1, 5, 1, - 1, 1, 1, - ) - # fmt: on - - -class SMOOTH_MORE(BuiltinFilter): - name = "Smooth More" - # fmt: off - filterargs = (5, 5), 100, 0, ( - 1, 1, 1, 1, 1, - 1, 5, 5, 5, 1, - 1, 5, 44, 5, 1, - 1, 5, 5, 5, 1, - 1, 1, 1, 1, 1, - ) - # fmt: on - - -class Color3DLUT(MultibandFilter): - """Three-dimensional color lookup table. - - Transforms 3-channel pixels using the values of the channels as coordinates - in the 3D lookup table and interpolating the nearest elements. - - This method allows you to apply almost any color transformation - in constant time by using pre-calculated decimated tables. - - .. versionadded:: 5.2.0 - - :param size: Size of the table. One int or tuple of (int, int, int). - Minimal size in any dimension is 2, maximum is 65. - :param table: Flat lookup table. A list of ``channels * size**3`` - float elements or a list of ``size**3`` channels-sized - tuples with floats. Channels are changed first, - then first dimension, then second, then third. - Value 0.0 corresponds lowest value of output, 1.0 highest. - :param channels: Number of channels in the table. Could be 3 or 4. - Default is 3. - :param target_mode: A mode for the result image. Should have not less - than ``channels`` channels. Default is ``None``, - which means that mode wouldn't be changed. - """ - - name = "Color 3D LUT" - - def __init__( - self, - size: int | tuple[int, int, int], - table: Sequence[float] | Sequence[Sequence[int]] | NumpyArray, - channels: int = 3, - target_mode: str | None = None, - **kwargs: bool, - ) -> None: - if channels not in (3, 4): - msg = "Only 3 or 4 output channels are supported" - raise ValueError(msg) - self.size = size = self._check_size(size) - self.channels = channels - self.mode = target_mode - - # Hidden flag `_copy_table=False` could be used to avoid extra copying - # of the table if the table is specially made for the constructor. - copy_table = kwargs.get("_copy_table", True) - items = size[0] * size[1] * size[2] - wrong_size = False - - numpy: ModuleType | None = None - if hasattr(table, "shape"): - try: - import numpy - except ImportError: - pass - - if numpy and isinstance(table, numpy.ndarray): - numpy_table: NumpyArray = table - if copy_table: - numpy_table = numpy_table.copy() - - if numpy_table.shape in [ - (items * channels,), - (items, channels), - (size[2], size[1], size[0], channels), - ]: - table = numpy_table.reshape(items * channels) - else: - wrong_size = True - - else: - if copy_table: - table = list(table) - - # Convert to a flat list - if table and isinstance(table[0], (list, tuple)): - raw_table = cast(Sequence[Sequence[int]], table) - flat_table: list[int] = [] - for pixel in raw_table: - if len(pixel) != channels: - msg = ( - "The elements of the table should " - f"have a length of {channels}." - ) - raise ValueError(msg) - flat_table.extend(pixel) - table = flat_table - - if wrong_size or len(table) != items * channels: - msg = ( - "The table should have either channels * size**3 float items " - "or size**3 items of channels-sized tuples with floats. " - f"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. " - f"Actual length: {len(table)}" - ) - raise ValueError(msg) - self.table = table - - @staticmethod - def _check_size(size: Any) -> tuple[int, int, int]: - try: - _, _, _ = size - except ValueError as e: - msg = "Size should be either an integer or a tuple of three integers." - raise ValueError(msg) from e - except TypeError: - size = (size, size, size) - size = tuple(int(x) for x in size) - for size_1d in size: - if not 2 <= size_1d <= 65: - msg = "Size should be in [2, 65] range." - raise ValueError(msg) - return size - - @classmethod - def generate( - cls, - size: int | tuple[int, int, int], - callback: Callable[[float, float, float], tuple[float, ...]], - channels: int = 3, - target_mode: str | None = None, - ) -> Color3DLUT: - """Generates new LUT using provided callback. - - :param size: Size of the table. Passed to the constructor. - :param callback: Function with three parameters which correspond - three color channels. Will be called ``size**3`` - times with values from 0.0 to 1.0 and should return - a tuple with ``channels`` elements. - :param channels: The number of channels which should return callback. - :param target_mode: Passed to the constructor of the resulting - lookup table. - """ - size_1d, size_2d, size_3d = cls._check_size(size) - if channels not in (3, 4): - msg = "Only 3 or 4 output channels are supported" - raise ValueError(msg) - - table: list[float] = [0] * (size_1d * size_2d * size_3d * channels) - idx_out = 0 - for b in range(size_3d): - for g in range(size_2d): - for r in range(size_1d): - table[idx_out : idx_out + channels] = callback( - r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1) - ) - idx_out += channels - - return cls( - (size_1d, size_2d, size_3d), - table, - channels=channels, - target_mode=target_mode, - _copy_table=False, - ) - - def transform( - self, - callback: Callable[..., tuple[float, ...]], - with_normals: bool = False, - channels: int | None = None, - target_mode: str | None = None, - ) -> Color3DLUT: - """Transforms the table values using provided callback and returns - a new LUT with altered values. - - :param callback: A function which takes old lookup table values - and returns a new set of values. The number - of arguments which function should take is - ``self.channels`` or ``3 + self.channels`` - if ``with_normals`` flag is set. - Should return a tuple of ``self.channels`` or - ``channels`` elements if it is set. - :param with_normals: If true, ``callback`` will be called with - coordinates in the color cube as the first - three arguments. Otherwise, ``callback`` - will be called only with actual color values. - :param channels: The number of channels in the resulting lookup table. - :param target_mode: Passed to the constructor of the resulting - lookup table. - """ - if channels not in (None, 3, 4): - msg = "Only 3 or 4 output channels are supported" - raise ValueError(msg) - ch_in = self.channels - ch_out = channels or ch_in - size_1d, size_2d, size_3d = self.size - - table = [0] * (size_1d * size_2d * size_3d * ch_out) - idx_in = 0 - idx_out = 0 - for b in range(size_3d): - for g in range(size_2d): - for r in range(size_1d): - values = self.table[idx_in : idx_in + ch_in] - if with_normals: - values = callback( - r / (size_1d - 1), - g / (size_2d - 1), - b / (size_3d - 1), - *values, - ) - else: - values = callback(*values) - table[idx_out : idx_out + ch_out] = values - idx_in += ch_in - idx_out += ch_out - - return type(self)( - self.size, - table, - channels=ch_out, - target_mode=target_mode or self.mode, - _copy_table=False, - ) - - def __repr__(self) -> str: - r = [ - f"{self.__class__.__name__} from {self.table.__class__.__name__}", - "size={:d}x{:d}x{:d}".format(*self.size), - f"channels={self.channels:d}", - ] - if self.mode: - r.append(f"target_mode={self.mode}") - return "<{}>".format(" ".join(r)) - - def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: - from . import Image - - return image.color_lut_3d( - self.mode or image.mode, - Image.Resampling.BILINEAR, - self.channels, - self.size[0], - self.size[1], - self.size[2], - self.table, - ) diff --git a/venv/Lib/site-packages/PIL/ImageFont.py b/venv/Lib/site-packages/PIL/ImageFont.py deleted file mode 100644 index d260eef..0000000 --- a/venv/Lib/site-packages/PIL/ImageFont.py +++ /dev/null @@ -1,1290 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# PIL raster font management -# -# History: -# 1996-08-07 fl created (experimental) -# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 -# 1999-02-06 fl rewrote most font management stuff in C -# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) -# 2001-02-17 fl added freetype support -# 2001-05-09 fl added TransposedFont wrapper class -# 2002-03-04 fl make sure we have a "L" or "1" font -# 2002-12-04 fl skip non-directory entries in the system path -# 2003-04-29 fl add embedded default font -# 2003-09-27 fl added support for truetype charmap encodings -# -# Todo: -# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) -# -# Copyright (c) 1997-2003 by Secret Labs AB -# Copyright (c) 1996-2003 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - -from __future__ import annotations - -import base64 -import os -import sys -import warnings -from enum import IntEnum -from io import BytesIO -from types import ModuleType -from typing import IO, TYPE_CHECKING, Any, BinaryIO - -from . import Image -from ._typing import StrOrBytesPath -from ._util import DeferredError, is_path - -if TYPE_CHECKING: - from . import ImageFile - from ._imaging import ImagingFont - from ._imagingft import Font - - -class Layout(IntEnum): - BASIC = 0 - RAQM = 1 - - -MAX_STRING_LENGTH = 1_000_000 - - -core: ModuleType | DeferredError -try: - from . import _imagingft as core -except ImportError as ex: - core = DeferredError.new(ex) - - -def _string_length_check(text: str | bytes | bytearray) -> None: - if MAX_STRING_LENGTH is not None and len(text) > MAX_STRING_LENGTH: - msg = "too many characters in string" - raise ValueError(msg) - - -# FIXME: add support for pilfont2 format (see FontFile.py) - -# -------------------------------------------------------------------- -# Font metrics format: -# "PILfont" LF -# fontdescriptor LF -# (optional) key=value... LF -# "DATA" LF -# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) -# -# To place a character, cut out srcbox and paste at dstbox, -# relative to the character position. Then move the character -# position according to dx, dy. -# -------------------------------------------------------------------- - - -class ImageFont: - """PIL font wrapper""" - - font: ImagingFont - - def _load_pilfont(self, filename: str) -> None: - with open(filename, "rb") as fp: - image: ImageFile.ImageFile | None = None - for ext in (".png", ".gif", ".pbm"): - if image: - image.close() - try: - fullname = os.path.splitext(filename)[0] + ext - image = Image.open(fullname) - except Exception: - pass - else: - if image and image.mode in ("1", "L"): - break - else: - if image: - image.close() - msg = "cannot find glyph data file" - raise OSError(msg) - - self.file = fullname - - self._load_pilfont_data(fp, image) - image.close() - - def _load_pilfont_data(self, file: IO[bytes], image: Image.Image) -> None: - # read PILfont header - if file.readline() != b"PILfont\n": - msg = "Not a PILfont file" - raise SyntaxError(msg) - file.readline().split(b";") - self.info = [] # FIXME: should be a dictionary - while True: - s = file.readline() - if not s or s == b"DATA\n": - break - self.info.append(s) - - # read PILfont metrics - data = file.read(256 * 20) - - # check image - if image.mode not in ("1", "L"): - msg = "invalid font image mode" - raise TypeError(msg) - - image.load() - - self.font = Image.core.font(image.im, data) - - def getmask(self, text, mode="", *args, **kwargs): - """ - Create a bitmap for the text. - - If the font uses antialiasing, the bitmap should have mode ``L`` and use a - maximum value of 255. Otherwise, it should have mode ``1``. - - :param text: Text to render. - :param mode: Used by some graphics drivers to indicate what mode the - driver prefers; if empty, the renderer may return either - mode. Note that the mode is always a string, to simplify - C-level implementations. - - .. versionadded:: 1.1.5 - - :return: An internal PIL storage memory instance as defined by the - :py:mod:`PIL.Image.core` interface module. - """ - _string_length_check(text) - Image._decompression_bomb_check(self.font.getsize(text)) - return self.font.getmask(text, mode) - - def getbbox( - self, text: str | bytes | bytearray, *args: Any, **kwargs: Any - ) -> tuple[int, int, int, int]: - """ - Returns bounding box (in pixels) of given text. - - .. versionadded:: 9.2.0 - - :param text: Text to render. - - :return: ``(left, top, right, bottom)`` bounding box - """ - _string_length_check(text) - width, height = self.font.getsize(text) - return 0, 0, width, height - - def getlength( - self, text: str | bytes | bytearray, *args: Any, **kwargs: Any - ) -> int: - """ - Returns length (in pixels) of given text. - This is the amount by which following text should be offset. - - .. versionadded:: 9.2.0 - """ - _string_length_check(text) - width, height = self.font.getsize(text) - return width - - -## -# Wrapper for FreeType fonts. Application code should use the -# truetype factory function to create font objects. - - -class FreeTypeFont: - """FreeType font wrapper (requires _imagingft service)""" - - font: Font - font_bytes: bytes - - def __init__( - self, - font: StrOrBytesPath | BinaryIO | None = None, - size: float = 10, - index: int = 0, - encoding: str = "", - layout_engine: Layout | None = None, - ) -> None: - # FIXME: use service provider instead - - if isinstance(core, DeferredError): - raise core.ex - - if size <= 0: - msg = "font size must be greater than 0" - raise ValueError(msg) - - self.path = font - self.size = size - self.index = index - self.encoding = encoding - - if layout_engine not in (Layout.BASIC, Layout.RAQM): - layout_engine = Layout.BASIC - if core.HAVE_RAQM: - layout_engine = Layout.RAQM - elif layout_engine == Layout.RAQM and not core.HAVE_RAQM: - warnings.warn( - "Raqm layout was requested, but Raqm is not available. " - "Falling back to basic layout." - ) - layout_engine = Layout.BASIC - - self.layout_engine = layout_engine - - def load_from_bytes(f): - self.font_bytes = f.read() - self.font = core.getfont( - "", size, index, encoding, self.font_bytes, layout_engine - ) - - if is_path(font): - font = os.path.realpath(os.fspath(font)) - if sys.platform == "win32": - font_bytes_path = font if isinstance(font, bytes) else font.encode() - try: - font_bytes_path.decode("ascii") - except UnicodeDecodeError: - # FreeType cannot load fonts with non-ASCII characters on Windows - # So load it into memory first - with open(font, "rb") as f: - load_from_bytes(f) - return - self.font = core.getfont( - font, size, index, encoding, layout_engine=layout_engine - ) - else: - load_from_bytes(font) - - def __getstate__(self): - return [self.path, self.size, self.index, self.encoding, self.layout_engine] - - def __setstate__(self, state): - path, size, index, encoding, layout_engine = state - self.__init__(path, size, index, encoding, layout_engine) - - def getname(self) -> tuple[str | None, str | None]: - """ - :return: A tuple of the font family (e.g. Helvetica) and the font style - (e.g. Bold) - """ - return self.font.family, self.font.style - - def getmetrics(self) -> tuple[int, int]: - """ - :return: A tuple of the font ascent (the distance from the baseline to - the highest outline point) and descent (the distance from the - baseline to the lowest outline point, a negative value) - """ - return self.font.ascent, self.font.descent - - def getlength( - self, text: str | bytes, mode="", direction=None, features=None, language=None - ) -> float: - """ - Returns length (in pixels with 1/64 precision) of given text when rendered - in font with provided direction, features, and language. - - This is the amount by which following text should be offset. - Text bounding box may extend past the length in some fonts, - e.g. when using italics or accents. - - The result is returned as a float; it is a whole number if using basic layout. - - Note that the sum of two lengths may not equal the length of a concatenated - string due to kerning. If you need to adjust for kerning, include the following - character and subtract its length. - - For example, instead of :: - - hello = font.getlength("Hello") - world = font.getlength("World") - hello_world = hello + world # not adjusted for kerning - assert hello_world == font.getlength("HelloWorld") # may fail - - use :: - - hello = font.getlength("HelloW") - font.getlength("W") # adjusted for kerning - world = font.getlength("World") - hello_world = hello + world # adjusted for kerning - assert hello_world == font.getlength("HelloWorld") # True - - or disable kerning with (requires libraqm) :: - - hello = draw.textlength("Hello", font, features=["-kern"]) - world = draw.textlength("World", font, features=["-kern"]) - hello_world = hello + world # kerning is disabled, no need to adjust - assert hello_world == draw.textlength("HelloWorld", font, features=["-kern"]) - - .. versionadded:: 8.0.0 - - :param text: Text to measure. - :param mode: Used by some graphics drivers to indicate what mode the - driver prefers; if empty, the renderer may return either - mode. Note that the mode is always a string, to simplify - C-level implementations. - - :param direction: Direction of the text. It can be 'rtl' (right to - left), 'ltr' (left to right) or 'ttb' (top to bottom). - Requires libraqm. - - :param features: A list of OpenType font features to be used during text - layout. This is usually used to turn on optional - font features that are not enabled by default, - for example 'dlig' or 'ss01', but can be also - used to turn off default font features for - example '-liga' to disable ligatures or '-kern' - to disable kerning. To get all supported - features, see - https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist - Requires libraqm. - - :param language: Language of the text. Different languages may use - different glyph shapes or ligatures. This parameter tells - the font which language the text is in, and to apply the - correct substitutions as appropriate, if available. - It should be a `BCP 47 language code - `_ - Requires libraqm. - - :return: Either width for horizontal text, or height for vertical text. - """ - _string_length_check(text) - return self.font.getlength(text, mode, direction, features, language) / 64 - - def getbbox( - self, - text: str | bytes, - mode: str = "", - direction: str | None = None, - features: list[str] | None = None, - language: str | None = None, - stroke_width: float = 0, - anchor: str | None = None, - ) -> tuple[float, float, float, float]: - """ - Returns bounding box (in pixels) of given text relative to given anchor - when rendered in font with provided direction, features, and language. - - Use :py:meth:`getlength()` to get the offset of following text with - 1/64 pixel precision. The bounding box includes extra margins for - some fonts, e.g. italics or accents. - - .. versionadded:: 8.0.0 - - :param text: Text to render. - :param mode: Used by some graphics drivers to indicate what mode the - driver prefers; if empty, the renderer may return either - mode. Note that the mode is always a string, to simplify - C-level implementations. - - :param direction: Direction of the text. It can be 'rtl' (right to - left), 'ltr' (left to right) or 'ttb' (top to bottom). - Requires libraqm. - - :param features: A list of OpenType font features to be used during text - layout. This is usually used to turn on optional - font features that are not enabled by default, - for example 'dlig' or 'ss01', but can be also - used to turn off default font features for - example '-liga' to disable ligatures or '-kern' - to disable kerning. To get all supported - features, see - https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist - Requires libraqm. - - :param language: Language of the text. Different languages may use - different glyph shapes or ligatures. This parameter tells - the font which language the text is in, and to apply the - correct substitutions as appropriate, if available. - It should be a `BCP 47 language code - `_ - Requires libraqm. - - :param stroke_width: The width of the text stroke. - - :param anchor: The text anchor alignment. Determines the relative location of - the anchor to the text. The default alignment is top left, - specifically ``la`` for horizontal text and ``lt`` for - vertical text. See :ref:`text-anchors` for details. - - :return: ``(left, top, right, bottom)`` bounding box - """ - _string_length_check(text) - size, offset = self.font.getsize( - text, mode, direction, features, language, anchor - ) - left, top = offset[0] - stroke_width, offset[1] - stroke_width - width, height = size[0] + 2 * stroke_width, size[1] + 2 * stroke_width - return left, top, left + width, top + height - - def getmask( - self, - text, - mode="", - direction=None, - features=None, - language=None, - stroke_width=0, - anchor=None, - ink=0, - start=None, - ): - """ - Create a bitmap for the text. - - If the font uses antialiasing, the bitmap should have mode ``L`` and use a - maximum value of 255. If the font has embedded color data, the bitmap - should have mode ``RGBA``. Otherwise, it should have mode ``1``. - - :param text: Text to render. - :param mode: Used by some graphics drivers to indicate what mode the - driver prefers; if empty, the renderer may return either - mode. Note that the mode is always a string, to simplify - C-level implementations. - - .. versionadded:: 1.1.5 - - :param direction: Direction of the text. It can be 'rtl' (right to - left), 'ltr' (left to right) or 'ttb' (top to bottom). - Requires libraqm. - - .. versionadded:: 4.2.0 - - :param features: A list of OpenType font features to be used during text - layout. This is usually used to turn on optional - font features that are not enabled by default, - for example 'dlig' or 'ss01', but can be also - used to turn off default font features for - example '-liga' to disable ligatures or '-kern' - to disable kerning. To get all supported - features, see - https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist - Requires libraqm. - - .. versionadded:: 4.2.0 - - :param language: Language of the text. Different languages may use - different glyph shapes or ligatures. This parameter tells - the font which language the text is in, and to apply the - correct substitutions as appropriate, if available. - It should be a `BCP 47 language code - `_ - Requires libraqm. - - .. versionadded:: 6.0.0 - - :param stroke_width: The width of the text stroke. - - .. versionadded:: 6.2.0 - - :param anchor: The text anchor alignment. Determines the relative location of - the anchor to the text. The default alignment is top left, - specifically ``la`` for horizontal text and ``lt`` for - vertical text. See :ref:`text-anchors` for details. - - .. versionadded:: 8.0.0 - - :param ink: Foreground ink for rendering in RGBA mode. - - .. versionadded:: 8.0.0 - - :param start: Tuple of horizontal and vertical offset, as text may render - differently when starting at fractional coordinates. - - .. versionadded:: 9.4.0 - - :return: An internal PIL storage memory instance as defined by the - :py:mod:`PIL.Image.core` interface module. - """ - return self.getmask2( - text, - mode, - direction=direction, - features=features, - language=language, - stroke_width=stroke_width, - anchor=anchor, - ink=ink, - start=start, - )[0] - - def getmask2( - self, - text: str | bytes, - mode="", - direction=None, - features=None, - language=None, - stroke_width=0, - anchor=None, - ink=0, - start=None, - *args, - **kwargs, - ): - """ - Create a bitmap for the text. - - If the font uses antialiasing, the bitmap should have mode ``L`` and use a - maximum value of 255. If the font has embedded color data, the bitmap - should have mode ``RGBA``. Otherwise, it should have mode ``1``. - - :param text: Text to render. - :param mode: Used by some graphics drivers to indicate what mode the - driver prefers; if empty, the renderer may return either - mode. Note that the mode is always a string, to simplify - C-level implementations. - - .. versionadded:: 1.1.5 - - :param direction: Direction of the text. It can be 'rtl' (right to - left), 'ltr' (left to right) or 'ttb' (top to bottom). - Requires libraqm. - - .. versionadded:: 4.2.0 - - :param features: A list of OpenType font features to be used during text - layout. This is usually used to turn on optional - font features that are not enabled by default, - for example 'dlig' or 'ss01', but can be also - used to turn off default font features for - example '-liga' to disable ligatures or '-kern' - to disable kerning. To get all supported - features, see - https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist - Requires libraqm. - - .. versionadded:: 4.2.0 - - :param language: Language of the text. Different languages may use - different glyph shapes or ligatures. This parameter tells - the font which language the text is in, and to apply the - correct substitutions as appropriate, if available. - It should be a `BCP 47 language code - `_ - Requires libraqm. - - .. versionadded:: 6.0.0 - - :param stroke_width: The width of the text stroke. - - .. versionadded:: 6.2.0 - - :param anchor: The text anchor alignment. Determines the relative location of - the anchor to the text. The default alignment is top left, - specifically ``la`` for horizontal text and ``lt`` for - vertical text. See :ref:`text-anchors` for details. - - .. versionadded:: 8.0.0 - - :param ink: Foreground ink for rendering in RGBA mode. - - .. versionadded:: 8.0.0 - - :param start: Tuple of horizontal and vertical offset, as text may render - differently when starting at fractional coordinates. - - .. versionadded:: 9.4.0 - - :return: A tuple of an internal PIL storage memory instance as defined by the - :py:mod:`PIL.Image.core` interface module, and the text offset, the - gap between the starting coordinate and the first marking - """ - _string_length_check(text) - if start is None: - start = (0, 0) - - def fill(width, height): - size = (width, height) - Image._decompression_bomb_check(size) - return Image.core.fill("RGBA" if mode == "RGBA" else "L", size) - - return self.font.render( - text, - fill, - mode, - direction, - features, - language, - stroke_width, - anchor, - ink, - start[0], - start[1], - ) - - def font_variant( - self, font=None, size=None, index=None, encoding=None, layout_engine=None - ): - """ - Create a copy of this FreeTypeFont object, - using any specified arguments to override the settings. - - Parameters are identical to the parameters used to initialize this - object. - - :return: A FreeTypeFont object. - """ - if font is None: - try: - font = BytesIO(self.font_bytes) - except AttributeError: - font = self.path - return FreeTypeFont( - font=font, - size=self.size if size is None else size, - index=self.index if index is None else index, - encoding=self.encoding if encoding is None else encoding, - layout_engine=layout_engine or self.layout_engine, - ) - - def get_variation_names(self) -> list[bytes]: - """ - :returns: A list of the named styles in a variation font. - :exception OSError: If the font is not a variation font. - """ - try: - names = self.font.getvarnames() - except AttributeError as e: - msg = "FreeType 2.9.1 or greater is required" - raise NotImplementedError(msg) from e - return [name.replace(b"\x00", b"") for name in names] - - def set_variation_by_name(self, name): - """ - :param name: The name of the style. - :exception OSError: If the font is not a variation font. - """ - names = self.get_variation_names() - if not isinstance(name, bytes): - name = name.encode() - index = names.index(name) + 1 - - if index == getattr(self, "_last_variation_index", None): - # When the same name is set twice in a row, - # there is an 'unknown freetype error' - # https://savannah.nongnu.org/bugs/?56186 - return - self._last_variation_index = index - - self.font.setvarname(index) - - def get_variation_axes(self): - """ - :returns: A list of the axes in a variation font. - :exception OSError: If the font is not a variation font. - """ - try: - axes = self.font.getvaraxes() - except AttributeError as e: - msg = "FreeType 2.9.1 or greater is required" - raise NotImplementedError(msg) from e - for axis in axes: - if axis["name"]: - axis["name"] = axis["name"].replace(b"\x00", b"") - return axes - - def set_variation_by_axes(self, axes: list[float]) -> None: - """ - :param axes: A list of values for each axis. - :exception OSError: If the font is not a variation font. - """ - try: - self.font.setvaraxes(axes) - except AttributeError as e: - msg = "FreeType 2.9.1 or greater is required" - raise NotImplementedError(msg) from e - - -class TransposedFont: - """Wrapper for writing rotated or mirrored text""" - - def __init__(self, font, orientation=None): - """ - Wrapper that creates a transposed font from any existing font - object. - - :param font: A font object. - :param orientation: An optional orientation. If given, this should - be one of Image.Transpose.FLIP_LEFT_RIGHT, Image.Transpose.FLIP_TOP_BOTTOM, - Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_180, or - Image.Transpose.ROTATE_270. - """ - self.font = font - self.orientation = orientation # any 'transpose' argument, or None - - def getmask(self, text, mode="", *args, **kwargs): - im = self.font.getmask(text, mode, *args, **kwargs) - if self.orientation is not None: - return im.transpose(self.orientation) - return im - - def getbbox(self, text, *args, **kwargs): - # TransposedFont doesn't support getmask2, move top-left point to (0, 0) - # this has no effect on ImageFont and simulates anchor="lt" for FreeTypeFont - left, top, right, bottom = self.font.getbbox(text, *args, **kwargs) - width = right - left - height = bottom - top - if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): - return 0, 0, height, width - return 0, 0, width, height - - def getlength(self, text: str | bytes, *args, **kwargs) -> float: - if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): - msg = "text length is undefined for text rotated by 90 or 270 degrees" - raise ValueError(msg) - return self.font.getlength(text, *args, **kwargs) - - -def load(filename: str) -> ImageFont: - """ - Load a font file. This function loads a font object from the given - bitmap font file, and returns the corresponding font object. - - :param filename: Name of font file. - :return: A font object. - :exception OSError: If the file could not be read. - """ - f = ImageFont() - f._load_pilfont(filename) - return f - - -def truetype( - font: StrOrBytesPath | BinaryIO | None = None, - size: float = 10, - index: int = 0, - encoding: str = "", - layout_engine: Layout | None = None, -) -> FreeTypeFont: - """ - Load a TrueType or OpenType font from a file or file-like object, - and create a font object. - This function loads a font object from the given file or file-like - object, and creates a font object for a font of the given size. - - Pillow uses FreeType to open font files. On Windows, be aware that FreeType - will keep the file open as long as the FreeTypeFont object exists. Windows - limits the number of files that can be open in C at once to 512, so if many - fonts are opened simultaneously and that limit is approached, an - ``OSError`` may be thrown, reporting that FreeType "cannot open resource". - A workaround would be to copy the file(s) into memory, and open that instead. - - This function requires the _imagingft service. - - :param font: A filename or file-like object containing a TrueType font. - If the file is not found in this filename, the loader may also - search in other directories, such as: - - * The :file:`fonts/` directory on Windows, - * :file:`/Library/Fonts/`, :file:`/System/Library/Fonts/` - and :file:`~/Library/Fonts/` on macOS. - * :file:`~/.local/share/fonts`, :file:`/usr/local/share/fonts`, - and :file:`/usr/share/fonts` on Linux; or those specified by - the ``XDG_DATA_HOME`` and ``XDG_DATA_DIRS`` environment variables - for user-installed and system-wide fonts, respectively. - - :param size: The requested size, in pixels. - :param index: Which font face to load (default is first available face). - :param encoding: Which font encoding to use (default is Unicode). Possible - encodings include (see the FreeType documentation for more - information): - - * "unic" (Unicode) - * "symb" (Microsoft Symbol) - * "ADOB" (Adobe Standard) - * "ADBE" (Adobe Expert) - * "ADBC" (Adobe Custom) - * "armn" (Apple Roman) - * "sjis" (Shift JIS) - * "gb " (PRC) - * "big5" - * "wans" (Extended Wansung) - * "joha" (Johab) - * "lat1" (Latin-1) - - This specifies the character set to use. It does not alter the - encoding of any text provided in subsequent operations. - :param layout_engine: Which layout engine to use, if available: - :attr:`.ImageFont.Layout.BASIC` or :attr:`.ImageFont.Layout.RAQM`. - If it is available, Raqm layout will be used by default. - Otherwise, basic layout will be used. - - Raqm layout is recommended for all non-English text. If Raqm layout - is not required, basic layout will have better performance. - - You can check support for Raqm layout using - :py:func:`PIL.features.check_feature` with ``feature="raqm"``. - - .. versionadded:: 4.2.0 - :return: A font object. - :exception OSError: If the file could not be read. - :exception ValueError: If the font size is not greater than zero. - """ - - def freetype(font: StrOrBytesPath | BinaryIO | None) -> FreeTypeFont: - return FreeTypeFont(font, size, index, encoding, layout_engine) - - try: - return freetype(font) - except OSError: - if not is_path(font): - raise - ttf_filename = os.path.basename(font) - - dirs = [] - if sys.platform == "win32": - # check the windows font repository - # NOTE: must use uppercase WINDIR, to work around bugs in - # 1.5.2's os.environ.get() - windir = os.environ.get("WINDIR") - if windir: - dirs.append(os.path.join(windir, "fonts")) - elif sys.platform in ("linux", "linux2"): - data_home = os.environ.get("XDG_DATA_HOME") - if not data_home: - # The freedesktop spec defines the following default directory for - # when XDG_DATA_HOME is unset or empty. This user-level directory - # takes precedence over system-level directories. - data_home = os.path.expanduser("~/.local/share") - xdg_dirs = [data_home] - - data_dirs = os.environ.get("XDG_DATA_DIRS") - if not data_dirs: - # Similarly, defaults are defined for the system-level directories - data_dirs = "/usr/local/share:/usr/share" - xdg_dirs += data_dirs.split(":") - - dirs += [os.path.join(xdg_dir, "fonts") for xdg_dir in xdg_dirs] - elif sys.platform == "darwin": - dirs += [ - "/Library/Fonts", - "/System/Library/Fonts", - os.path.expanduser("~/Library/Fonts"), - ] - - ext = os.path.splitext(ttf_filename)[1] - first_font_with_a_different_extension = None - for directory in dirs: - for walkroot, walkdir, walkfilenames in os.walk(directory): - for walkfilename in walkfilenames: - if ext and walkfilename == ttf_filename: - return freetype(os.path.join(walkroot, walkfilename)) - elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: - fontpath = os.path.join(walkroot, walkfilename) - if os.path.splitext(fontpath)[1] == ".ttf": - return freetype(fontpath) - if not ext and first_font_with_a_different_extension is None: - first_font_with_a_different_extension = fontpath - if first_font_with_a_different_extension: - return freetype(first_font_with_a_different_extension) - raise - - -def load_path(filename: str | bytes) -> ImageFont: - """ - Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a - bitmap font along the Python path. - - :param filename: Name of font file. - :return: A font object. - :exception OSError: If the file could not be read. - """ - if not isinstance(filename, str): - filename = filename.decode("utf-8") - for directory in sys.path: - try: - return load(os.path.join(directory, filename)) - except OSError: - pass - msg = "cannot find font file" - raise OSError(msg) - - -def load_default_imagefont() -> ImageFont: - f = ImageFont() - f._load_pilfont_data( - # courB08 - BytesIO( - base64.b64decode( - b""" -UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA -BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL -AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA -AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB -ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A -BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB -//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA -AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH -AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA -ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv -AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ -/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 -AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA -AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG -AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA -BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA -AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA -2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF -AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// -+gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA -////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA -BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv -AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA -AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA -AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA -BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// -//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA -AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF -AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB -mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn -AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA -AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 -AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA -Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB -//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA -AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ -AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC -DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ -AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ -+wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 -AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ -///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG -AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA -BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA -Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC -eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG -AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// -+gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA -////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA -BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT -AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A -AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA -Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA -Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// -//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA -AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ -AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA -LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 -AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA -AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 -AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA -AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG -AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA -EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK -AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA -pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG -AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// -+QAGAAIAzgAKANUAEw== -""" - ) - ), - Image.open( - BytesIO( - base64.b64decode( - b""" -iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u -Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 -M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g -LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F -IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA -Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 -NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx -in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 -SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY -AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt -y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG -ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY -lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H -/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 -AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 -c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ -/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw -pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv -oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR -evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA -AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// -Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR -w7IkEbzhVQAAAABJRU5ErkJggg== -""" - ) - ) - ), - ) - return f - - -def load_default(size: float | None = None) -> FreeTypeFont | ImageFont: - """If FreeType support is available, load a version of Aileron Regular, - https://dotcolon.net/font/aileron, with a more limited character set. - - Otherwise, load a "better than nothing" font. - - .. versionadded:: 1.1.4 - - :param size: The font size of Aileron Regular. - - .. versionadded:: 10.1.0 - - :return: A font object. - """ - if isinstance(core, ModuleType) or size is not None: - return truetype( - BytesIO( - base64.b64decode( - b""" -AAEAAAAPAIAAAwBwRkZUTYwDlUAAADFoAAAAHEdERUYAqADnAAAo8AAAACRHUE9ThhmITwAAKfgAA -AduR1NVQnHxefoAACkUAAAA4k9TLzJovoHLAAABeAAAAGBjbWFw5lFQMQAAA6gAAAGqZ2FzcP//AA -MAACjoAAAACGdseWYmRXoPAAAGQAAAHfhoZWFkE18ayQAAAPwAAAA2aGhlYQboArEAAAE0AAAAJGh -tdHjjERZ8AAAB2AAAAdBsb2NhuOexrgAABVQAAADqbWF4cAC7AEYAAAFYAAAAIG5hbWUr+h5lAAAk -OAAAA6Jwb3N0D3oPTQAAJ9wAAAEKAAEAAAABGhxJDqIhXw889QALA+gAAAAA0Bqf2QAAAADhCh2h/ -2r/LgOxAyAAAAAIAAIAAAAAAAAAAQAAA8r/GgAAA7j/av9qA7EAAQAAAAAAAAAAAAAAAAAAAHQAAQ -AAAHQAQwAFAAAAAAACAAAAAQABAAAAQAAAAAAAAAADAfoBkAAFAAgCigJYAAAASwKKAlgAAAFeADI -BPgAAAAAFAAAAAAAAAAAAAAcAAAAAAAAAAAAAAABVS1dOAEAAIPsCAwL/GgDIA8oA5iAAAJMAAAAA -AhICsgAAACAAAwH0AAAAAAAAAU0AAADYAAAA8gA5AVMAVgJEAEYCRAA1AuQAKQKOAEAAsAArATsAZ -AE7AB4CMABVAkQAUADc/+EBEgAgANwAJQEv//sCRAApAkQAggJEADwCRAAtAkQAIQJEADkCRAArAk -QAMgJEACwCRAAxANwAJQDc/+ECRABnAkQAUAJEAEQB8wAjA1QANgJ/AB0CcwBkArsALwLFAGQCSwB -kAjcAZALGAC8C2gBkAQgAZAIgADcCYQBkAj8AZANiAGQCzgBkAuEALwJWAGQC3QAvAmsAZAJJADQC -ZAAiAqoAXgJuACADuAAaAnEAGQJFABMCTwAuATMAYgEv//sBJwAiAkQAUAH0ADIBLAApAhMAJAJjA -EoCEQAeAmcAHgIlAB4BIgAVAmcAHgJRAEoA7gA+AOn/8wIKAEoA9wBGA1cASgJRAEoCSgAeAmMASg -JnAB4BSgBKAcsAGAE5ABQCUABCAgIAAQMRAAEB4v/6AgEAAQHOABQBLwBAAPoAYAEvACECRABNA0Y -AJAItAHgBKgAcAkQAUAEsAHQAygAgAi0AOQD3ADYA9wAWAaEANgGhABYCbAAlAYMAeAGDADkA6/9q -AhsAFAIKABUB/QAVAAAAAwAAAAMAAAAcAAEAAAAAAKQAAwABAAAAHAAEAIgAAAAeABAAAwAOAH4Aq -QCrALEAtAC3ALsgGSAdICYgOiBEISL7Av//AAAAIACpAKsAsAC0ALcAuyAYIBwgJiA5IEQhIvsB// -//4/+5/7j/tP+y/7D/reBR4E/gR+A14CzfTwVxAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAEGAAABAAAAAAAAAAECAAAAAgAAAAAAAAAAAAAAAAAAAAEAAAMEBQYHCAkKCwwNDg8QERIT -FBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMT -U5PUFFSU1RVVldYWVpbXF1eX2BhAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGQAAA -AAAAAAYnFmAAAAAABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAY2htAAAAAAAAAABrbGlqAAAAAHAAbm9 -ycwBnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmACYAJgAmAD4AUgCCAMoBCgFO -AVwBcgGIAaYBvAHKAdYB6AH2AgwCIAJKAogCpgLWAw4DIgNkA5wDugPUA+gD/AQQBEYEogS8BPoFJ -gVSBWoFgAWwBcoF1gX6BhQGJAZMBmgGiga0BuIHGgdUB2YHkAeiB8AH3AfyCAoIHAgqCDoITghcCG -oIogjSCPoJKglYCXwJwgnqCgIKKApACl4Klgq8CtwLDAs8C1YLjAuyC9oL7gwMDCYMSAxgDKAMrAz -qDQoNTA1mDYQNoA2uDcAN2g3oDfYODA4iDkoOXA5sDnoOnA7EDvwAAAAFAAAAAAH0ArwAAwAGAAkA -DAAPAAAxESERAxMhExcRASELARETAfT6qv6syKr+jgFUqsiqArz9RAGLAP/+1P8B/v3VAP8BLP4CA -P8AAgA5//IAuQKyAAMACwAANyMDMwIyFhQGIiY0oE4MZk84JCQ4JLQB/v3AJDgkJDgAAgBWAeUBPA -LfAAMABwAAEyMnMxcjJzOmRgpagkYKWgHl+vr6AAAAAAIARgAAAf4CsgAbAB8AAAEHMxUjByM3Iwc -jNyM1MzcjNTM3MwczNzMHMxUrAQczAZgdZXEvOi9bLzovWmYdZXEvOi9bLzovWp9bHlsBn4w429vb -2ziMONvb29s4jAAAAAMANf+mAg4DDAAfACYALAAAJRQGBxUjNS4BJzMeARcRLgE0Njc1MxUeARcjJ -icVHgEBFBYXNQ4BExU+ATU0Ag5xWDpgcgRcBz41Xl9oVTpVYwpcC1ttXP6cLTQuM5szOrVRZwlOTQ -ZqVzZECAEAGlukZAlOTQdrUG8O7iNlAQgxNhDlCDj+8/YGOjReAAAAAAUAKf/yArsCvAAHAAsAFQA -dACcAABIyFhQGIiY0EyMBMwQiBhUUFjI2NTQSMhYUBiImNDYiBhUUFjI2NTR5iFBQiFCVVwHAV/5c -OiMjOiPmiFBQiFCxOiMjOiMCvFaSVlaS/ZoCsjIzMC80NC8w/uNWklZWkhozMC80NC8wAAAAAgBA/ -/ICbgLAACIALgAAARUjEQYjIiY1NDY3LgE1NDYzMhcVJiMiBhUUFhcWOwE1MxUFFBYzMjc1IyIHDg -ECbmBcYYOOVkg7R4hsQjY4Q0RNRD4SLDxW/pJUXzksPCkUUk0BgUb+zBVUZ0BkDw5RO1huCkULQzp -COAMBcHDHRz0J/AIHRQAAAAEAKwHlAIUC3wADAAATIycze0YKWgHl+gAAAAABAGT/sAEXAwwACQAA -EzMGEBcjLgE0Nt06dXU6OUBAAwzG/jDGVePs4wAAAAEAHv+wANEDDAAJAAATMx4BFAYHIzYQHjo5Q -EA5OnUDDFXj7ONVxgHQAAAAAQBVAFIB2wHbAA4AAAE3FwcXBycHJzcnNxcnMwEtmxOfcTJjYzJxnx -ObCj4BKD07KYolmZkliik7PbMAAQBQAFUB9AIlAAsAAAEjFSM1IzUzNTMVMwH0tTq1tTq1AR/Kyjj -OzgAAAAAB/+H/iACMAGQABAAANwcjNzOMWlFOXVrS3AAAAQAgAP8A8gE3AAMAABMjNTPy0tIA/zgA -AQAl//IApQByAAcAADYyFhQGIiY0STgkJDgkciQ4JCQ4AAAAAf/7/+IBNALQAAMAABcjEzM5Pvs+H -gLuAAAAAAIAKf/yAhsCwAADAAcAABIgECA2IBAgKQHy/g5gATL+zgLA/TJEAkYAAAAAAQCCAAABlg -KyAAgAAAERIxEHNTc2MwGWVr6SIygCsv1OAldxW1sWAAEAPAAAAg4CwAAZAAA3IRUhNRM+ATU0JiM -iDwEjNz4BMzIWFRQGB7kBUv4x+kI2QTt+EAFWAQp8aGVtSl5GRjEA/0RVLzlLmAoKa3FsUkNxXQAA -AAEALf/yAhYCwAAqAAABHgEVFAYjIi8BMxceATMyNjU0KwE1MzI2NTQmIyIGDwEjNz4BMzIWFRQGA -YxBSZJo2RUBVgEHV0JBUaQREUBUQzc5TQcBVgEKfGhfcEMBbxJbQl1x0AoKRkZHPn9GSD80QUVCCg -pfbGBPOlgAAAACACEAAAIkArIACgAPAAAlIxUjNSE1ATMRMyMRBg8BAiRXVv6qAVZWV60dHLCurq4 -rAdn+QgFLMibzAAABADn/8gIZArIAHQAAATIWFRQGIyIvATMXFjMyNjU0JiMiByMTIRUhBzc2ATNv -d5Fl1RQBVgIad0VSTkVhL1IwAYj+vh8rMAHHgGdtgcUKCoFXTU5bYgGRRvAuHQAAAAACACv/8gITA -sAAFwAjAAABMhYVFAYjIhE0NjMyFh8BIycmIyIDNzYTMjY1NCYjIgYVFBYBLmp7imr0l3RZdAgBXA -IYZ5wKJzU6QVNJSz5SUAHSgWltiQFGxcNlVQoKdv7sPiz+ZF1LTmJbU0lhAAAAAQAyAAACGgKyAAY -AAAEVASMBITUCGv6oXAFL/oECsij9dgJsRgAAAAMALP/xAhgCwAAWACAALAAAAR4BFRQGIyImNTQ2 -Ny4BNTQ2MhYVFAYmIgYVFBYyNjU0AzI2NTQmIyIGFRQWAZQ5S5BmbIpPOjA7ecp5P2F8Q0J8RIVJS -0pLTEtOAW0TXTxpZ2ZqPF0SE1A3VWVlVTdQ/UU0N0RENzT9/ko+Ok1NOj1LAAIAMf/yAhkCwAAXAC -MAAAEyERQGIyImLwEzFxYzMhMHBiMiJjU0NhMyNjU0JiMiBhUUFgEl9Jd0WXQIAVwCGGecCic1SWp -7imo+UlBAQVNJAsD+usXDZVUKCnYBFD4sgWltif5kW1NJYV1LTmIAAAACACX/8gClAiAABwAPAAAS -MhYUBiImNBIyFhQGIiY0STgkJDgkJDgkJDgkAiAkOCQkOP52JDgkJDgAAAAC/+H/iAClAiAABwAMA -AASMhYUBiImNBMHIzczSTgkJDgkaFpSTl4CICQ4JCQ4/mba5gAAAQBnAB4B+AH0AAYAAAENARUlNS -UB+P6qAVb+bwGRAbCmpkbJRMkAAAIAUAC7AfQBuwADAAcAAAEhNSERITUhAfT+XAGk/lwBpAGDOP8 -AOAABAEQAHgHVAfQABgAAARUFNS0BNQHV/m8BVv6qAStEyUSmpkYAAAAAAgAj//IB1ALAABgAIAAA -ATIWFRQHDgEHIz4BNz4BNTQmIyIGByM+ARIyFhQGIiY0AQRibmktIAJWBSEqNig+NTlHBFoDezQ4J -CQ4JALAZ1BjaS03JS1DMD5LLDQ/SUVgcv2yJDgkJDgAAAAAAgA2/5gDFgKYADYAQgAAAQMGFRQzMj -Y1NCYjIg4CFRQWMzI2NxcGIyImNTQ+AjMyFhUUBiMiJwcGIyImNTQ2MzIfATcHNzYmIyIGFRQzMjY -Cej8EJjJJlnBAfGQ+oHtAhjUYg5OPx0h2k06Os3xRWQsVLjY5VHtdPBwJETcJDyUoOkZEJz8B0f74 -EQ8kZl6EkTFZjVOLlyknMVm1pmCiaTq4lX6CSCknTVRmmR8wPdYnQzxuSWVGAAIAHQAAAncCsgAHA -AoAACUjByMTMxMjATMDAcj+UVz4dO5d/sjPZPT0ArL9TgE6ATQAAAADAGQAAAJMArIAEAAbACcAAA -EeARUUBgcGKwERMzIXFhUUJRUzMjc2NTQnJiMTPgE1NCcmKwEVMzIBvkdHZkwiNt7LOSGq/oeFHBt -hahIlSTM+cB8Yj5UWAW8QT0VYYgwFArIEF5Fv1eMED2NfDAL93AU+N24PBP0AAAAAAQAv//ICjwLA -ABsAAAEyFh8BIycmIyIGFRQWMzI/ATMHDgEjIiY1NDYBdX+PCwFWAiKiaHx5ZaIiAlYBCpWBk6a0A -sCAagoKpqN/gaOmCgplhcicn8sAAAIAZAAAAp8CsgAMABkAAAEeARUUBgcGKwERMzITPgE1NCYnJi -sBETMyAY59lJp8IzXN0jUVWmdjWRs5d3I4Aq4QqJWUug8EArL9mQ+PeHGHDgX92gAAAAABAGQAAAI -vArIACwAAJRUhESEVIRUhFSEVAi/+NQHB/pUBTf6zRkYCskbwRvAAAAABAGQAAAIlArIACQAAExUh -FSERIxEhFboBQ/69VgHBAmzwRv7KArJGAAAAAAEAL//yAo8CwAAfAAABMxEjNQcGIyImNTQ2MzIWH -wEjJyYjIgYVFBYzMjY1IwGP90wfPnWTprSSf48LAVYCIqJofHllVG+hAU3+s3hARsicn8uAagoKpq -N/gaN1XAAAAAEAZAAAAowCsgALAAABESMRIREjETMRIRECjFb+hFZWAXwCsv1OAS7+0gKy/sQBPAA -AAAABAGQAAAC6ArIAAwAAMyMRM7pWVgKyAAABADf/8gHoArIAEwAAAREUBw4BIyImLwEzFxYzMjc2 -NREB6AIFcGpgbQIBVgIHfXQKAQKy/lYxIltob2EpKYyEFD0BpwAAAAABAGQAAAJ0ArIACwAACQEjA -wcVIxEzEQEzATsBJ3ntQlZWAVVlAWH+nwEnR+ACsv6RAW8AAQBkAAACLwKyAAUAACUVIREzEQIv/j -VWRkYCsv2UAAABAGQAAAMUArIAFAAAAREjETQ3BgcDIwMmJxYVESMRMxsBAxRWAiMxemx8NxsCVo7 -MywKy/U4BY7ZLco7+nAFmoFxLtP6dArL9lwJpAAAAAAEAZAAAAoACsgANAAAhIwEWFREjETMBJjUR -MwKAhP67A1aEAUUDVAJeeov+pwKy/aJ5jAFZAAAAAgAv//ICuwLAAAkAEwAAEiAWFRQGICY1NBIyN -jU0JiIGFRTbATSsrP7MrNrYenrYegLAxaKhxsahov47nIeIm5uIhwACAGQAAAJHArIADgAYAAABHg -EVFAYHBisBESMRMzITNjQnJisBETMyAZRUX2VOHzuAVtY7GlxcGDWIiDUCrgtnVlVpCgT+5gKy/rU -V1BUF/vgAAAACAC//zAK9AsAAEgAcAAAlFhcHJiMiBwYjIiY1NDYgFhUUJRQWMjY1NCYiBgI9PUMx -UDcfKh8omqysATSs/dR62Hp62HpICTg7NgkHxqGixcWitbWHnJyHiJubAAIAZAAAAlgCsgAXACMAA -CUWFyMmJyYnJisBESMRMzIXHgEVFAYHFiUzMjc+ATU0JyYrAQIqDCJfGQwNWhAhglbiOx9QXEY1Tv -6bhDATMj1lGSyMtYgtOXR0BwH+1wKyBApbU0BSESRAAgVAOGoQBAABADT/8gIoAsAAJQAAATIWFyM -uASMiBhUUFhceARUUBiMiJiczHgEzMjY1NCYnLgE1NDYBOmd2ClwGS0E6SUNRdW+HZnKKC1wPWkQ9 -Uk1cZGuEAsBwXUJHNjQ3OhIbZVZZbm5kREo+NT5DFRdYUFdrAAAAAAEAIgAAAmQCsgAHAAABIxEjE -SM1IQJk9lb2AkICbP2UAmxGAAEAXv/yAmQCsgAXAAABERQHDgEiJicmNREzERQXHgEyNjc2NRECZA -IIgfCBCAJWAgZYmlgGAgKy/k0qFFxzc1wUKgGz/lUrEkRQUEQSKwGrAAAAAAEAIAAAAnoCsgAGAAA -hIwMzGwEzAYJ07l3N1FwCsv2PAnEAAAEAGgAAA7ECsgAMAAABAyMLASMDMxsBMxsBA7HAcZyicrZi -kaB0nJkCsv1OAlP9rQKy/ZsCW/2kAmYAAAEAGQAAAm8CsgALAAAhCwEjEwMzGwEzAxMCCsrEY/bkY -re+Y/D6AST+3AFcAVb+5gEa/q3+oQAAAQATAAACUQKyAAgAAAERIxEDMxsBMwFdVvRjwLphARD+8A -EQAaL+sQFPAAABAC4AAAI5ArIACQAAJRUhNQEhNSEVAQI5/fUBof57Aen+YUZGQgIqRkX92QAAAAA -BAGL/sAEFAwwABwAAARUjETMVIxEBBWlpowMMOP0UOANcAAAB//v/4gE0AtAAAwAABSMDMwE0Pvs+ -HgLuAAAAAQAi/7AAxQMMAAcAABcjNTMRIzUzxaNpaaNQOALsOAABAFAA1wH0AmgABgAAJQsBIxMzE -wGwjY1GsESw1wFZ/qcBkf5vAAAAAQAy/6oBwv/iAAMAAAUhNSEBwv5wAZBWOAAAAAEAKQJEALYCsg -ADAAATIycztjhVUAJEbgAAAAACACT/8gHQAiAAHQAlAAAhJwcGIyImNTQ2OwE1NCcmIyIHIz4BMzI -XFh0BFBcnMjY9ASYVFAF6CR0wVUtgkJoiAgdgaQlaBm1Zrg4DCuQ9R+5MOSFQR1tbDiwUUXBUXowf -J8c9SjRORzYSgVwAAAAAAgBK//ICRQLfABEAHgAAATIWFRQGIyImLwEVIxEzETc2EzI2NTQmIyIGH -QEUFgFUcYCVbiNJEyNWVigySElcU01JXmECIJd4i5QTEDRJAt/+3jkq/hRuZV55ZWsdX14AAQAe// -IB9wIgABgAAAEyFhcjJiMiBhUUFjMyNjczDgEjIiY1NDYBF152DFocbEJXU0A1Rw1aE3pbaoKQAiB -oWH5qZm1tPDlaXYuLgZcAAAACAB7/8gIZAt8AEQAeAAABESM1BwYjIiY1NDYzMhYfAREDMjY9ATQm -IyIGFRQWAhlWKDJacYCVbiNJEyOnSV5hQUlcUwLf/SFVOSqXeIuUExA0ARb9VWVrHV9ebmVeeQACA -B7/8gH9AiAAFQAbAAABFAchHgEzMjY3Mw4BIyImNTQ2MzIWJyIGByEmAf0C/oAGUkA1SwlaD4FXbI -WObmt45UBVBwEqDQEYFhNjWD84W16Oh3+akU9aU60AAAEAFQAAARoC8gAWAAATBh0BMxUjESMRIzU -zNTQ3PgEzMhcVJqcDbW1WOTkDB0k8Hx5oAngVITRC/jQBzEIsJRs5PwVHEwAAAAIAHv8uAhkCIAAi -AC8AAAERFAcOASMiLwEzFx4BMzI2NzY9AQcGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZAQSEd -NwRAVcBBU5DTlUDASgyWnGAlW4jSRMjp0leYUFJXFMCEv5wSh1zeq8KCTI8VU0ZIQk5Kpd4i5QTED -RJ/iJlax1fXm5lXnkAAQBKAAACCgLkABcAAAEWFREjETQnLgEHDgEdASMRMxE3NjMyFgIIAlYCBDs -6RVRWViE5UVViAYUbQP7WASQxGzI7AQJyf+kC5P7TPSxUAAACAD4AAACsAsAABwALAAASMhYUBiIm -NBMjETNeLiAgLiBiVlYCwCAuICAu/WACEgAC//P/LgCnAsAABwAVAAASMhYUBiImNBcRFAcGIyInN -RY3NjURWS4gIC4gYgMLcRwNSgYCAsAgLiAgLo79wCUbZAJGBzMOHgJEAAAAAQBKAAACCALfAAsAAC -EnBxUjETMREzMHEwGTwTJWVvdu9/rgN6kC3/4oAQv6/ugAAQBG//wA3gLfAA8AABMRFBceATcVBiM -iJicmNRGcAQIcIxkkKi4CAQLf/bkhERoSBD4EJC8SNAJKAAAAAQBKAAADEAIgACQAAAEWFREjETQn -JiMiFREjETQnJiMiFREjETMVNzYzMhYXNzYzMhYDCwVWBAxedFYEDF50VlYiJko7ThAvJkpEVAGfI -jn+vAEcQyRZ1v76ARxDJFnW/voCEk08HzYtRB9HAAAAAAEASgAAAgoCIAAWAAABFhURIxE0JyYjIg -YdASMRMxU3NjMyFgIIAlYCCXBEVVZWITlRVWIBhRtA/tYBJDEbbHR/6QISWz0sVAAAAAACAB7/8gI -sAiAABwARAAASIBYUBiAmNBIyNjU0JiIGFRSlAQCHh/8Ah7ieWlqeWgIgn/Cfn/D+s3ZfYHV1YF8A -AgBK/zwCRQIgABEAHgAAATIWFRQGIyImLwERIxEzFTc2EzI2NTQmIyIGHQEUFgFUcYCVbiNJEyNWV -igySElcU01JXmECIJd4i5QTEDT+8wLWVTkq/hRuZV55ZWsdX14AAgAe/zwCGQIgABEAHgAAAREjEQ -cGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZVigyWnGAlW4jSRMjp0leYUFJXFMCEv0qARk5Kpd -4i5QTEDRJ/iJlax1fXm5lXnkAAQBKAAABPgIeAA0AAAEyFxUmBhURIxEzFTc2ARoWDkdXVlYwIwIe -B0EFVlf+0gISU0cYAAEAGP/yAa0CIAAjAAATMhYXIyYjIgYVFBYXHgEVFAYjIiYnMxYzMjY1NCYnL -gE1NDbkV2MJWhNdKy04PF1XbVhWbgxaE2ktOjlEUllkAiBaS2MrJCUoEBlPQkhOVFZoKCUmLhIWSE -BIUwAAAAEAFP/4ARQCiQAXAAATERQXHgE3FQYjIiYnJjURIzUzNTMVMxWxAQMmMx8qMjMEAUdHVmM -BzP7PGw4mFgY/BSwxDjQBNUJ7e0IAAAABAEL/8gICAhIAFwAAAREjNQcGIyImJyY1ETMRFBceATMy -Nj0BAgJWITlRT2EKBVYEBkA1RFECEv3uWj4qTToiOQE+/tIlJC43c4DpAAAAAAEAAQAAAfwCEgAGA -AABAyMDMxsBAfzJaclfop8CEv3uAhL+LQHTAAABAAEAAAMLAhIADAAAAQMjCwEjAzMbATMbAQMLqW -Z2dmapY3t0a3Z7AhL97gG+/kICEv5AAcD+QwG9AAAB//oAAAHWAhIACwAAARMjJwcjEwMzFzczARq -8ZIuKY763ZoWFYwEO/vLV1QEMAQbNzQAAAQAB/y4B+wISABEAAAEDDgEjIic1FjMyNj8BAzMbAQH7 -2iFZQB8NDRIpNhQH02GenQIS/cFVUAJGASozEwIt/i4B0gABABQAAAGxAg4ACQAAJRUhNQEhNSEVA -QGx/mMBNP7iAYL+zkREQgGIREX+ewAAAAABAED/sAEOAwwALAAAASMiBhUUFxYVFAYHHgEVFAcGFR -QWOwEVIyImNTQ3NjU0JzU2NTQnJjU0NjsBAQ4MKiMLDS4pKS4NCyMqDAtERAwLUlILDERECwLUGBk -WTlsgKzUFBTcrIFtOFhkYOC87GFVMIkUIOAhFIkxVGDsvAAAAAAEAYP84AJoDIAADAAAXIxEzmjo6 -yAPoAAEAIf+wAO8DDAAsAAATFQYVFBcWFRQGKwE1MzI2NTQnJjU0NjcuATU0NzY1NCYrATUzMhYVF -AcGFRTvUgsMREQLDCojCw0uKSkuDQsjKgwLREQMCwF6OAhFIkxVGDsvOBgZFk5bICs1BQU3KyBbTh -YZGDgvOxhVTCJFAAABAE0A3wH2AWQAEwAAATMUIyImJyYjIhUjNDMyFhcWMzIBvjhuGywtQR0xOG4 -bLC1BHTEBZIURGCNMhREYIwAAAwAk/94DIgLoAAcAEQApAAAAIBYQBiAmECQgBhUUFiA2NTQlMhYX -IyYjIgYUFjMyNjczDgEjIiY1NDYBAQFE3d3+vN0CB/7wubkBELn+xVBnD1wSWDo+QTcqOQZcEmZWX -HN2Aujg/rbg4AFKpr+Mjb6+jYxbWEldV5ZZNShLVn5na34AAgB4AFIB9AGeAAUACwAAAQcXIyc3Mw -cXIyc3AUqJiUmJifOJiUmJiQGepqampqampqYAAAIAHAHSAQ4CwAAHAA8AABIyFhQGIiY0NiIGFBY -yNjRgakREakSTNCEhNCECwEJqQkJqCiM4IyM4AAAAAAIAUAAAAfQCCwALAA8AAAEzFSMVIzUjNTM1 -MxMhNSEBP7W1OrW1OrX+XAGkAVs4tLQ4sP31OAAAAQB0AkQBAQKyAAMAABMjNzOsOD1QAkRuAAAAA -AEAIADsAKoBdgAHAAASMhYUBiImNEg6KCg6KAF2KDooKDoAAAIAOQBSAbUBngAFAAsAACUHIzcnMw -UHIzcnMwELiUmJiUkBM4lJiYlJ+KampqampqYAAAABADYB5QDhAt8ABAAAEzczByM2Xk1OXQHv8Po -AAQAWAeUAwQLfAAQAABMHIzczwV5NTl0C1fD6AAIANgHlAYsC3wAEAAkAABM3MwcjPwEzByM2Xk1O -XapeTU5dAe/w+grw+gAAAgAWAeUBawLfAAQACQAAEwcjNzMXByM3M8FeTU5dql5NTl0C1fD6CvD6A -AADACX/8gI1AHIABwAPABcAADYyFhQGIiY0NjIWFAYiJjQ2MhYUBiImNEk4JCQ4JOw4JCQ4JOw4JC -Q4JHIkOCQkOCQkOCQkOCQkOCQkOAAAAAEAeABSAUoBngAFAAABBxcjJzcBSomJSYmJAZ6mpqamAAA -AAAEAOQBSAQsBngAFAAAlByM3JzMBC4lJiYlJ+KampgAAAf9qAAABgQKyAAMAACsBATM/VwHAVwKy -AAAAAAIAFAHIAdwClAAHABQAABMVIxUjNSM1BRUjNwcjJxcjNTMXN9pKMkoByDICKzQqATJLKysCl -CmjoykBy46KiY3Lm5sAAQAVAAABvALyABgAAAERIxEjESMRIzUzNTQ3NjMyFxUmBgcGHQEBvFbCVj -k5AxHHHx5iVgcDAg798gHM/jQBzEIOJRuWBUcIJDAVIRYAAAABABX//AHkAvIAJQAAJR4BNxUGIyI -mJyY1ESYjIgcGHQEzFSMRIxEjNTM1NDc2MzIXERQBowIcIxkkKi4CAR4nXgwDbW1WLy8DEbNdOmYa -EQQ/BCQvEjQCFQZWFSEWQv40AcxCDiUblhP9uSEAAAAAAAAWAQ4AAQAAAAAAAAATACgAAQAAAAAAA -QAHAEwAAQAAAAAAAgAHAGQAAQAAAAAAAwAaAKIAAQAAAAAABAAHAM0AAQAAAAAABQA8AU8AAQAAAA -AABgAPAawAAQAAAAAACAALAdQAAQAAAAAACQALAfgAAQAAAAAACwAXAjQAAQAAAAAADAAXAnwAAwA -BBAkAAAAmAAAAAwABBAkAAQAOADwAAwABBAkAAgAOAFQAAwABBAkAAwA0AGwAAwABBAkABAAOAL0A -AwABBAkABQB4ANUAAwABBAkABgAeAYwAAwABBAkACAAWAbwAAwABBAkACQAWAeAAAwABBAkACwAuA -gQAAwABBAkADAAuAkwATgBvACAAUgBpAGcAaAB0AHMAIABSAGUAcwBlAHIAdgBlAGQALgAATm8gUm -lnaHRzIFJlc2VydmVkLgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAUgBlAGcAdQBsAGEAcgAAUmV -ndWxhcgAAMQAuADEAMAAyADsAVQBLAFcATgA7AEEAaQBsAGUAcgBvAG4ALQBSAGUAZwB1AGwAYQBy -AAAxLjEwMjtVS1dOO0FpbGVyb24tUmVndWxhcgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAVgBlA -HIAcwBpAG8AbgAgADEALgAxADAAMgA7AFAAUwAgADAAMAAxAC4AMQAwADIAOwBoAG8AdABjAG8Abg -B2ACAAMQAuADAALgA3ADAAOwBtAGEAawBlAG8AdABmAC4AbABpAGIAMgAuADUALgA1ADgAMwAyADk -AAFZlcnNpb24gMS4xMDI7UFMgMDAxLjEwMjtob3Rjb252IDEuMC43MDttYWtlb3RmLmxpYjIuNS41 -ODMyOQAAQQBpAGwAZQByAG8AbgAtAFIAZQBnAHUAbABhAHIAAEFpbGVyb24tUmVndWxhcgAAUwBvA -HIAYQAgAFMAYQBnAGEAbgBvAABTb3JhIFNhZ2FubwAAUwBvAHIAYQAgAFMAYQBnAGEAbgBvAABTb3 -JhIFNhZ2FubwAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBsAG8AbgAuAG4AZQB0AAB -odHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBs -AG8AbgAuAG4AZQB0AABodHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAAAACAAAAAAAA/4MAMgAAAAAAA -AAAAAAAAAAAAAAAAAAAAHQAAAABAAIAAwAEAAUABgAHAAgACQAKAAsADAANAA4ADwAQABEAEgATAB -QAFQAWABcAGAAZABoAGwAcAB0AHgAfACAAIQAiACMAJAAlACYAJwAoACkAKgArACwALQAuAC8AMAA -xADIAMwA0ADUANgA3ADgAOQA6ADsAPAA9AD4APwBAAEEAQgBDAEQARQBGAEcASABJAEoASwBMAE0A -TgBPAFAAUQBSAFMAVABVAFYAVwBYAFkAWgBbAFwAXQBeAF8AYABhAIsAqQCDAJMAjQDDAKoAtgC3A -LQAtQCrAL4AvwC8AIwAwADBAAAAAAAB//8AAgABAAAADAAAABwAAAACAAIAAwBxAAEAcgBzAAIABA -AAAAIAAAABAAAACgBMAGYAAkRGTFQADmxhdG4AGgAEAAAAAP//AAEAAAAWAANDQVQgAB5NT0wgABZ -ST00gABYAAP//AAEAAAAA//8AAgAAAAEAAmxpZ2EADmxvY2wAFAAAAAEAAQAAAAEAAAACAAYAEAAG -AAAAAgASADQABAAAAAEATAADAAAAAgAQABYAAQAcAAAAAQABAE8AAQABAGcAAQABAE8AAwAAAAIAE -AAWAAEAHAAAAAEAAQAvAAEAAQBnAAEAAQAvAAEAGgABAAgAAgAGAAwAcwACAE8AcgACAEwAAQABAE -kAAAABAAAACgBGAGAAAkRGTFQADmxhdG4AHAAEAAAAAP//AAIAAAABABYAA0NBVCAAFk1PTCAAFlJ -PTSAAFgAA//8AAgAAAAEAAmNwc3AADmtlcm4AFAAAAAEAAAAAAAEAAQACAAYADgABAAAAAQASAAIA -AAACAB4ANgABAAoABQAFAAoAAgABACQAPQAAAAEAEgAEAAAAAQAMAAEAOP/nAAEAAQAkAAIGigAEA -AAFJAXKABoAGQAA//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAD/sv+4/+z/7v/MAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAD/xAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/9T/6AAAAAD/8QAA -ABD/vQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/7gAAAAAAAAAAAAAAAAAA//MAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAAAAAAAAAP/5AAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/gAAD/4AAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//L/9AAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAA/+gAAAAAAAkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/zAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/mAAAAAAAAAAAAAAAAAAD -/4gAA//AAAAAA//YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAP/OAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/zv/qAAAAAP/0AAAACAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/ZAAD/egAA/1kAAAAA/5D/rgAAAAAAAAAAAA -AAAAAAAAAAAAAAAAD/9AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAD/8AAA/7b/8P+wAAD/8P/E/98AAAAA/8P/+P/0//oAAAAAAAAAAAAA//gA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/w//C/9MAAP/SAAD/9wAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/yAAA/+kAAAAA//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/9wAAAAD//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAP/2AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAP/cAAAAAAAAAAAAAAAA/7YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/6AAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAkAFAAEAAAAAQACwAAABcA -BgAAAAAAAAAIAA4AAAAAAAsAEgAAAAAAAAATABkAAwANAAAAAQAJAAAAAAAAAAAAAAAAAAAAGAAAA -AAABwAAAAAAAAAAAAAAFQAFAAAAAAAYABgAAAAUAAAACgAAAAwAAgAPABEAFgAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFAAEAEQBdAAYAAAAAAAAAAAAAAAAAAAAAAAA -AAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAcAAAAAAAAABwAAAAAACAAAAAAAAAAAAAcAAAAHAAAAEwAJ -ABUADgAPAAAACwAQAAAAAAAAAAAAAAAAAAUAGAACAAIAAgAAAAIAGAAXAAAAGAAAABYAFgACABYAA -gAWAAAAEQADAAoAFAAMAA0ABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASAAAAEgAGAAEAHgAkAC -YAJwApACoALQAuAC8AMgAzADcAOAA5ADoAPAA9AEUASABOAE8AUgBTAFUAVwBZAFoAWwBcAF0AcwA -AAAAAAQAAAADa3tfFAAAAANAan9kAAAAA4QodoQ== -""" - ) - ), - 10 if size is None else size, - layout_engine=Layout.BASIC, - ) - return load_default_imagefont() diff --git a/venv/Lib/site-packages/PIL/ImageGrab.py b/venv/Lib/site-packages/PIL/ImageGrab.py deleted file mode 100644 index e27ca7e..0000000 --- a/venv/Lib/site-packages/PIL/ImageGrab.py +++ /dev/null @@ -1,194 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# screen grabber -# -# History: -# 2001-04-26 fl created -# 2001-09-17 fl use builtin driver, if present -# 2002-11-19 fl added grabclipboard support -# -# Copyright (c) 2001-2002 by Secret Labs AB -# Copyright (c) 2001-2002 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -import os -import shutil -import subprocess -import sys -import tempfile - -from . import Image - - -def grab( - bbox: tuple[int, int, int, int] | None = None, - include_layered_windows: bool = False, - all_screens: bool = False, - xdisplay: str | None = None, -) -> Image.Image: - im: Image.Image - if xdisplay is None: - if sys.platform == "darwin": - fh, filepath = tempfile.mkstemp(".png") - os.close(fh) - args = ["screencapture"] - if bbox: - left, top, right, bottom = bbox - args += ["-R", f"{left},{top},{right-left},{bottom-top}"] - subprocess.call(args + ["-x", filepath]) - im = Image.open(filepath) - im.load() - os.unlink(filepath) - if bbox: - im_resized = im.resize((right - left, bottom - top)) - im.close() - return im_resized - return im - elif sys.platform == "win32": - offset, size, data = Image.core.grabscreen_win32( - include_layered_windows, all_screens - ) - im = Image.frombytes( - "RGB", - size, - data, - # RGB, 32-bit line padding, origin lower left corner - "raw", - "BGR", - (size[0] * 3 + 3) & -4, - -1, - ) - if bbox: - x0, y0 = offset - left, top, right, bottom = bbox - im = im.crop((left - x0, top - y0, right - x0, bottom - y0)) - return im - # Cast to Optional[str] needed for Windows and macOS. - display_name: str | None = xdisplay - try: - if not Image.core.HAVE_XCB: - msg = "Pillow was built without XCB support" - raise OSError(msg) - size, data = Image.core.grabscreen_x11(display_name) - except OSError: - if ( - display_name is None - and sys.platform not in ("darwin", "win32") - and shutil.which("gnome-screenshot") - ): - fh, filepath = tempfile.mkstemp(".png") - os.close(fh) - subprocess.call(["gnome-screenshot", "-f", filepath]) - im = Image.open(filepath) - im.load() - os.unlink(filepath) - if bbox: - im_cropped = im.crop(bbox) - im.close() - return im_cropped - return im - else: - raise - else: - im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1) - if bbox: - im = im.crop(bbox) - return im - - -def grabclipboard() -> Image.Image | list[str] | None: - if sys.platform == "darwin": - fh, filepath = tempfile.mkstemp(".png") - os.close(fh) - commands = [ - 'set theFile to (open for access POSIX file "' - + filepath - + '" with write permission)', - "try", - " write (the clipboard as «class PNGf») to theFile", - "end try", - "close access theFile", - ] - script = ["osascript"] - for command in commands: - script += ["-e", command] - subprocess.call(script) - - im = None - if os.stat(filepath).st_size != 0: - im = Image.open(filepath) - im.load() - os.unlink(filepath) - return im - elif sys.platform == "win32": - fmt, data = Image.core.grabclipboard_win32() - if fmt == "file": # CF_HDROP - import struct - - o = struct.unpack_from("I", data)[0] - if data[16] != 0: - files = data[o:].decode("utf-16le").split("\0") - else: - files = data[o:].decode("mbcs").split("\0") - return files[: files.index("")] - if isinstance(data, bytes): - data = io.BytesIO(data) - if fmt == "png": - from . import PngImagePlugin - - return PngImagePlugin.PngImageFile(data) - elif fmt == "DIB": - from . import BmpImagePlugin - - return BmpImagePlugin.DibImageFile(data) - return None - else: - if os.getenv("WAYLAND_DISPLAY"): - session_type = "wayland" - elif os.getenv("DISPLAY"): - session_type = "x11" - else: # Session type check failed - session_type = None - - if shutil.which("wl-paste") and session_type in ("wayland", None): - args = ["wl-paste", "-t", "image"] - elif shutil.which("xclip") and session_type in ("x11", None): - args = ["xclip", "-selection", "clipboard", "-t", "image/png", "-o"] - else: - msg = "wl-paste or xclip is required for ImageGrab.grabclipboard() on Linux" - raise NotImplementedError(msg) - - p = subprocess.run(args, capture_output=True) - if p.returncode != 0: - err = p.stderr - for silent_error in [ - # wl-paste, when the clipboard is empty - b"Nothing is copied", - # Ubuntu/Debian wl-paste, when the clipboard is empty - b"No selection", - # Ubuntu/Debian wl-paste, when an image isn't available - b"No suitable type of content copied", - # wl-paste or Ubuntu/Debian xclip, when an image isn't available - b" not available", - # xclip, when an image isn't available - b"cannot convert ", - # xclip, when the clipboard isn't initialized - b"xclip: Error: There is no owner for the ", - ]: - if silent_error in err: - return None - msg = f"{args[0]} error" - if err: - msg += f": {err.strip().decode()}" - raise ChildProcessError(msg) - - data = io.BytesIO(p.stdout) - im = Image.open(data) - im.load() - return im diff --git a/venv/Lib/site-packages/PIL/ImageMath.py b/venv/Lib/site-packages/PIL/ImageMath.py deleted file mode 100644 index 6664434..0000000 --- a/venv/Lib/site-packages/PIL/ImageMath.py +++ /dev/null @@ -1,357 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# a simple math add-on for the Python Imaging Library -# -# History: -# 1999-02-15 fl Original PIL Plus release -# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 -# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 -# -# Copyright (c) 1999-2005 by Secret Labs AB -# Copyright (c) 2005 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import builtins -from types import CodeType -from typing import Any, Callable - -from . import Image, _imagingmath -from ._deprecate import deprecate - - -class _Operand: - """Wraps an image operand, providing standard operators""" - - def __init__(self, im: Image.Image): - self.im = im - - def __fixup(self, im1: _Operand | float) -> Image.Image: - # convert image to suitable mode - if isinstance(im1, _Operand): - # argument was an image. - if im1.im.mode in ("1", "L"): - return im1.im.convert("I") - elif im1.im.mode in ("I", "F"): - return im1.im - else: - msg = f"unsupported mode: {im1.im.mode}" - raise ValueError(msg) - else: - # argument was a constant - if isinstance(im1, (int, float)) and self.im.mode in ("1", "L", "I"): - return Image.new("I", self.im.size, im1) - else: - return Image.new("F", self.im.size, im1) - - def apply( - self, - op: str, - im1: _Operand | float, - im2: _Operand | float | None = None, - mode: str | None = None, - ) -> _Operand: - im_1 = self.__fixup(im1) - if im2 is None: - # unary operation - out = Image.new(mode or im_1.mode, im_1.size, None) - im_1.load() - try: - op = getattr(_imagingmath, f"{op}_{im_1.mode}") - except AttributeError as e: - msg = f"bad operand type for '{op}'" - raise TypeError(msg) from e - _imagingmath.unop(op, out.im.id, im_1.im.id) - else: - # binary operation - im_2 = self.__fixup(im2) - if im_1.mode != im_2.mode: - # convert both arguments to floating point - if im_1.mode != "F": - im_1 = im_1.convert("F") - if im_2.mode != "F": - im_2 = im_2.convert("F") - if im_1.size != im_2.size: - # crop both arguments to a common size - size = ( - min(im_1.size[0], im_2.size[0]), - min(im_1.size[1], im_2.size[1]), - ) - if im_1.size != size: - im_1 = im_1.crop((0, 0) + size) - if im_2.size != size: - im_2 = im_2.crop((0, 0) + size) - out = Image.new(mode or im_1.mode, im_1.size, None) - im_1.load() - im_2.load() - try: - op = getattr(_imagingmath, f"{op}_{im_1.mode}") - except AttributeError as e: - msg = f"bad operand type for '{op}'" - raise TypeError(msg) from e - _imagingmath.binop(op, out.im.id, im_1.im.id, im_2.im.id) - return _Operand(out) - - # unary operators - def __bool__(self) -> bool: - # an image is "true" if it contains at least one non-zero pixel - return self.im.getbbox() is not None - - def __abs__(self) -> _Operand: - return self.apply("abs", self) - - def __pos__(self) -> _Operand: - return self - - def __neg__(self) -> _Operand: - return self.apply("neg", self) - - # binary operators - def __add__(self, other: _Operand | float) -> _Operand: - return self.apply("add", self, other) - - def __radd__(self, other: _Operand | float) -> _Operand: - return self.apply("add", other, self) - - def __sub__(self, other: _Operand | float) -> _Operand: - return self.apply("sub", self, other) - - def __rsub__(self, other: _Operand | float) -> _Operand: - return self.apply("sub", other, self) - - def __mul__(self, other: _Operand | float) -> _Operand: - return self.apply("mul", self, other) - - def __rmul__(self, other: _Operand | float) -> _Operand: - return self.apply("mul", other, self) - - def __truediv__(self, other: _Operand | float) -> _Operand: - return self.apply("div", self, other) - - def __rtruediv__(self, other: _Operand | float) -> _Operand: - return self.apply("div", other, self) - - def __mod__(self, other: _Operand | float) -> _Operand: - return self.apply("mod", self, other) - - def __rmod__(self, other: _Operand | float) -> _Operand: - return self.apply("mod", other, self) - - def __pow__(self, other: _Operand | float) -> _Operand: - return self.apply("pow", self, other) - - def __rpow__(self, other: _Operand | float) -> _Operand: - return self.apply("pow", other, self) - - # bitwise - def __invert__(self) -> _Operand: - return self.apply("invert", self) - - def __and__(self, other: _Operand | float) -> _Operand: - return self.apply("and", self, other) - - def __rand__(self, other: _Operand | float) -> _Operand: - return self.apply("and", other, self) - - def __or__(self, other: _Operand | float) -> _Operand: - return self.apply("or", self, other) - - def __ror__(self, other: _Operand | float) -> _Operand: - return self.apply("or", other, self) - - def __xor__(self, other: _Operand | float) -> _Operand: - return self.apply("xor", self, other) - - def __rxor__(self, other: _Operand | float) -> _Operand: - return self.apply("xor", other, self) - - def __lshift__(self, other: _Operand | float) -> _Operand: - return self.apply("lshift", self, other) - - def __rshift__(self, other: _Operand | float) -> _Operand: - return self.apply("rshift", self, other) - - # logical - def __eq__(self, other): - return self.apply("eq", self, other) - - def __ne__(self, other): - return self.apply("ne", self, other) - - def __lt__(self, other: _Operand | float) -> _Operand: - return self.apply("lt", self, other) - - def __le__(self, other: _Operand | float) -> _Operand: - return self.apply("le", self, other) - - def __gt__(self, other: _Operand | float) -> _Operand: - return self.apply("gt", self, other) - - def __ge__(self, other: _Operand | float) -> _Operand: - return self.apply("ge", self, other) - - -# conversions -def imagemath_int(self: _Operand) -> _Operand: - return _Operand(self.im.convert("I")) - - -def imagemath_float(self: _Operand) -> _Operand: - return _Operand(self.im.convert("F")) - - -# logical -def imagemath_equal(self: _Operand, other: _Operand | float | None) -> _Operand: - return self.apply("eq", self, other, mode="I") - - -def imagemath_notequal(self: _Operand, other: _Operand | float | None) -> _Operand: - return self.apply("ne", self, other, mode="I") - - -def imagemath_min(self: _Operand, other: _Operand | float | None) -> _Operand: - return self.apply("min", self, other) - - -def imagemath_max(self: _Operand, other: _Operand | float | None) -> _Operand: - return self.apply("max", self, other) - - -def imagemath_convert(self: _Operand, mode: str) -> _Operand: - return _Operand(self.im.convert(mode)) - - -ops = { - "int": imagemath_int, - "float": imagemath_float, - "equal": imagemath_equal, - "notequal": imagemath_notequal, - "min": imagemath_min, - "max": imagemath_max, - "convert": imagemath_convert, -} - - -def lambda_eval( - expression: Callable[[dict[str, Any]], Any], - options: dict[str, Any] = {}, - **kw: Any, -) -> Any: - """ - Returns the result of an image function. - - :py:mod:`~PIL.ImageMath` only supports single-layer images. To process multi-band - images, use the :py:meth:`~PIL.Image.Image.split` method or - :py:func:`~PIL.Image.merge` function. - - :param expression: A function that receives a dictionary. - :param options: Values to add to the function's dictionary. You - can either use a dictionary, or one or more keyword - arguments. - :return: The expression result. This is usually an image object, but can - also be an integer, a floating point value, or a pixel tuple, - depending on the expression. - """ - - args: dict[str, Any] = ops.copy() - args.update(options) - args.update(kw) - for k, v in args.items(): - if hasattr(v, "im"): - args[k] = _Operand(v) - - out = expression(args) - try: - return out.im - except AttributeError: - return out - - -def unsafe_eval( - expression: str, - options: dict[str, Any] = {}, - **kw: Any, -) -> Any: - """ - Evaluates an image expression. This uses Python's ``eval()`` function to process - the expression string, and carries the security risks of doing so. It is not - recommended to process expressions without considering this. - :py:meth:`~lambda_eval` is a more secure alternative. - - :py:mod:`~PIL.ImageMath` only supports single-layer images. To process multi-band - images, use the :py:meth:`~PIL.Image.Image.split` method or - :py:func:`~PIL.Image.merge` function. - - :param expression: A string containing a Python-style expression. - :param options: Values to add to the evaluation context. You - can either use a dictionary, or one or more keyword - arguments. - :return: The evaluated expression. This is usually an image object, but can - also be an integer, a floating point value, or a pixel tuple, - depending on the expression. - """ - - # build execution namespace - args: dict[str, Any] = ops.copy() - for k in list(options.keys()) + list(kw.keys()): - if "__" in k or hasattr(builtins, k): - msg = f"'{k}' not allowed" - raise ValueError(msg) - - args.update(options) - args.update(kw) - for k, v in args.items(): - if hasattr(v, "im"): - args[k] = _Operand(v) - - compiled_code = compile(expression, "", "eval") - - def scan(code: CodeType) -> None: - for const in code.co_consts: - if type(const) is type(compiled_code): - scan(const) - - for name in code.co_names: - if name not in args and name != "abs": - msg = f"'{name}' not allowed" - raise ValueError(msg) - - scan(compiled_code) - out = builtins.eval(expression, {"__builtins": {"abs": abs}}, args) - try: - return out.im - except AttributeError: - return out - - -def eval( - expression: str, - _dict: dict[str, Any] = {}, - **kw: Any, -) -> Any: - """ - Evaluates an image expression. - - Deprecated. Use lambda_eval() or unsafe_eval() instead. - - :param expression: A string containing a Python-style expression. - :param _dict: Values to add to the evaluation context. You - can either use a dictionary, or one or more keyword - arguments. - :return: The evaluated expression. This is usually an image object, but can - also be an integer, a floating point value, or a pixel tuple, - depending on the expression. - - .. deprecated:: 10.3.0 - """ - - deprecate( - "ImageMath.eval", - 12, - "ImageMath.lambda_eval or ImageMath.unsafe_eval", - ) - return unsafe_eval(expression, _dict, **kw) diff --git a/venv/Lib/site-packages/PIL/ImageMode.py b/venv/Lib/site-packages/PIL/ImageMode.py deleted file mode 100644 index 92a08d2..0000000 --- a/venv/Lib/site-packages/PIL/ImageMode.py +++ /dev/null @@ -1,92 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# standard mode descriptors -# -# History: -# 2006-03-20 fl Added -# -# Copyright (c) 2006 by Secret Labs AB. -# Copyright (c) 2006 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import sys -from functools import lru_cache -from typing import NamedTuple - -from ._deprecate import deprecate - - -class ModeDescriptor(NamedTuple): - """Wrapper for mode strings.""" - - mode: str - bands: tuple[str, ...] - basemode: str - basetype: str - typestr: str - - def __str__(self) -> str: - return self.mode - - -@lru_cache -def getmode(mode: str) -> ModeDescriptor: - """Gets a mode descriptor for the given mode.""" - endian = "<" if sys.byteorder == "little" else ">" - - modes = { - # core modes - # Bits need to be extended to bytes - "1": ("L", "L", ("1",), "|b1"), - "L": ("L", "L", ("L",), "|u1"), - "I": ("L", "I", ("I",), f"{endian}i4"), - "F": ("L", "F", ("F",), f"{endian}f4"), - "P": ("P", "L", ("P",), "|u1"), - "RGB": ("RGB", "L", ("R", "G", "B"), "|u1"), - "RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"), - "RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"), - "CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"), - "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"), - # UNDONE - unsigned |u1i1i1 - "LAB": ("RGB", "L", ("L", "A", "B"), "|u1"), - "HSV": ("RGB", "L", ("H", "S", "V"), "|u1"), - # extra experimental modes - "RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"), - "BGR;15": ("RGB", "L", ("B", "G", "R"), "|u1"), - "BGR;16": ("RGB", "L", ("B", "G", "R"), "|u1"), - "BGR;24": ("RGB", "L", ("B", "G", "R"), "|u1"), - "LA": ("L", "L", ("L", "A"), "|u1"), - "La": ("L", "L", ("L", "a"), "|u1"), - "PA": ("RGB", "L", ("P", "A"), "|u1"), - } - if mode in modes: - if mode in ("BGR;15", "BGR;16", "BGR;24"): - deprecate(mode, 12) - base_mode, base_type, bands, type_str = modes[mode] - return ModeDescriptor(mode, bands, base_mode, base_type, type_str) - - mapping_modes = { - # I;16 == I;16L, and I;32 == I;32L - "I;16": "u2", - "I;16BS": ">i2", - "I;16N": f"{endian}u2", - "I;16NS": f"{endian}i2", - "I;32": "u4", - "I;32L": "i4", - "I;32LS": " -from __future__ import annotations - -import re - -from . import Image, _imagingmorph - -LUT_SIZE = 1 << 9 - -# fmt: off -ROTATION_MATRIX = [ - 6, 3, 0, - 7, 4, 1, - 8, 5, 2, -] -MIRROR_MATRIX = [ - 2, 1, 0, - 5, 4, 3, - 8, 7, 6, -] -# fmt: on - - -class LutBuilder: - """A class for building a MorphLut from a descriptive language - - The input patterns is a list of a strings sequences like these:: - - 4:(... - .1. - 111)->1 - - (whitespaces including linebreaks are ignored). The option 4 - describes a series of symmetry operations (in this case a - 4-rotation), the pattern is described by: - - - . or X - Ignore - - 1 - Pixel is on - - 0 - Pixel is off - - The result of the operation is described after "->" string. - - The default is to return the current pixel value, which is - returned if no other match is found. - - Operations: - - - 4 - 4 way rotation - - N - Negate - - 1 - Dummy op for no other operation (an op must always be given) - - M - Mirroring - - Example:: - - lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) - lut = lb.build_lut() - - """ - - def __init__( - self, patterns: list[str] | None = None, op_name: str | None = None - ) -> None: - if patterns is not None: - self.patterns = patterns - else: - self.patterns = [] - self.lut: bytearray | None = None - if op_name is not None: - known_patterns = { - "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"], - "dilation4": ["4:(... .0. .1.)->1"], - "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"], - "erosion4": ["4:(... .1. .0.)->0"], - "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"], - "edge": [ - "1:(... ... ...)->0", - "4:(.0. .1. ...)->1", - "4:(01. .1. ...)->1", - ], - } - if op_name not in known_patterns: - msg = f"Unknown pattern {op_name}!" - raise Exception(msg) - - self.patterns = known_patterns[op_name] - - def add_patterns(self, patterns: list[str]) -> None: - self.patterns += patterns - - def build_default_lut(self) -> None: - symbols = [0, 1] - m = 1 << 4 # pos of current pixel - self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE)) - - def get_lut(self) -> bytearray | None: - return self.lut - - def _string_permute(self, pattern: str, permutation: list[int]) -> str: - """string_permute takes a pattern and a permutation and returns the - string permuted according to the permutation list. - """ - assert len(permutation) == 9 - return "".join(pattern[p] for p in permutation) - - def _pattern_permute( - self, basic_pattern: str, options: str, basic_result: int - ) -> list[tuple[str, int]]: - """pattern_permute takes a basic pattern and its result and clones - the pattern according to the modifications described in the $options - parameter. It returns a list of all cloned patterns.""" - patterns = [(basic_pattern, basic_result)] - - # rotations - if "4" in options: - res = patterns[-1][1] - for i in range(4): - patterns.append( - (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res) - ) - # mirror - if "M" in options: - n = len(patterns) - for pattern, res in patterns[:n]: - patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res)) - - # negate - if "N" in options: - n = len(patterns) - for pattern, res in patterns[:n]: - # Swap 0 and 1 - pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1") - res = 1 - int(res) - patterns.append((pattern, res)) - - return patterns - - def build_lut(self) -> bytearray: - """Compile all patterns into a morphology lut. - - TBD :Build based on (file) morphlut:modify_lut - """ - self.build_default_lut() - assert self.lut is not None - patterns = [] - - # Parse and create symmetries of the patterns strings - for p in self.patterns: - m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", "")) - if not m: - msg = 'Syntax error in pattern "' + p + '"' - raise Exception(msg) - options = m.group(1) - pattern = m.group(2) - result = int(m.group(3)) - - # Get rid of spaces - pattern = pattern.replace(" ", "").replace("\n", "") - - patterns += self._pattern_permute(pattern, options, result) - - # compile the patterns into regular expressions for speed - compiled_patterns = [] - for pattern in patterns: - p = pattern[0].replace(".", "X").replace("X", "[01]") - compiled_patterns.append((re.compile(p), pattern[1])) - - # Step through table and find patterns that match. - # Note that all the patterns are searched. The last one - # caught overrides - for i in range(LUT_SIZE): - # Build the bit pattern - bitpattern = bin(i)[2:] - bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1] - - for pattern, r in compiled_patterns: - if pattern.match(bitpattern): - self.lut[i] = [0, 1][r] - - return self.lut - - -class MorphOp: - """A class for binary morphological operators""" - - def __init__( - self, - lut: bytearray | None = None, - op_name: str | None = None, - patterns: list[str] | None = None, - ) -> None: - """Create a binary morphological operator""" - self.lut = lut - if op_name is not None: - self.lut = LutBuilder(op_name=op_name).build_lut() - elif patterns is not None: - self.lut = LutBuilder(patterns=patterns).build_lut() - - def apply(self, image: Image.Image) -> tuple[int, Image.Image]: - """Run a single morphological operation on an image - - Returns a tuple of the number of changed pixels and the - morphed image""" - if self.lut is None: - msg = "No operator loaded" - raise Exception(msg) - - if image.mode != "L": - msg = "Image mode must be L" - raise ValueError(msg) - outimage = Image.new(image.mode, image.size, None) - count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id) - return count, outimage - - def match(self, image: Image.Image) -> list[tuple[int, int]]: - """Get a list of coordinates matching the morphological operation on - an image. - - Returns a list of tuples of (x,y) coordinates - of all matching pixels. See :ref:`coordinate-system`.""" - if self.lut is None: - msg = "No operator loaded" - raise Exception(msg) - - if image.mode != "L": - msg = "Image mode must be L" - raise ValueError(msg) - return _imagingmorph.match(bytes(self.lut), image.im.id) - - def get_on_pixels(self, image: Image.Image) -> list[tuple[int, int]]: - """Get a list of all turned on pixels in a binary image - - Returns a list of tuples of (x,y) coordinates - of all matching pixels. See :ref:`coordinate-system`.""" - - if image.mode != "L": - msg = "Image mode must be L" - raise ValueError(msg) - return _imagingmorph.get_on_pixels(image.im.id) - - def load_lut(self, filename: str) -> None: - """Load an operator from an mrl file""" - with open(filename, "rb") as f: - self.lut = bytearray(f.read()) - - if len(self.lut) != LUT_SIZE: - self.lut = None - msg = "Wrong size operator file!" - raise Exception(msg) - - def save_lut(self, filename: str) -> None: - """Save an operator to an mrl file""" - if self.lut is None: - msg = "No operator loaded" - raise Exception(msg) - with open(filename, "wb") as f: - f.write(self.lut) - - def set_lut(self, lut: bytearray | None) -> None: - """Set the lut from an external source""" - self.lut = lut diff --git a/venv/Lib/site-packages/PIL/ImageOps.py b/venv/Lib/site-packages/PIL/ImageOps.py deleted file mode 100644 index a84c083..0000000 --- a/venv/Lib/site-packages/PIL/ImageOps.py +++ /dev/null @@ -1,728 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# standard image operations -# -# History: -# 2001-10-20 fl Created -# 2001-10-23 fl Added autocontrast operator -# 2001-12-18 fl Added Kevin's fit operator -# 2004-03-14 fl Fixed potential division by zero in equalize -# 2005-05-05 fl Fixed equalize for low number of values -# -# Copyright (c) 2001-2004 by Secret Labs AB -# Copyright (c) 2001-2004 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import functools -import operator -import re -from typing import Protocol, Sequence, cast - -from . import ExifTags, Image, ImagePalette - -# -# helpers - - -def _border(border: int | tuple[int, ...]) -> tuple[int, int, int, int]: - if isinstance(border, tuple): - if len(border) == 2: - left, top = right, bottom = border - elif len(border) == 4: - left, top, right, bottom = border - else: - left = top = right = bottom = border - return left, top, right, bottom - - -def _color(color: str | int | tuple[int, ...], mode: str) -> int | tuple[int, ...]: - if isinstance(color, str): - from . import ImageColor - - color = ImageColor.getcolor(color, mode) - return color - - -def _lut(image: Image.Image, lut: list[int]) -> Image.Image: - if image.mode == "P": - # FIXME: apply to lookup table, not image data - msg = "mode P support coming soon" - raise NotImplementedError(msg) - elif image.mode in ("L", "RGB"): - if image.mode == "RGB" and len(lut) == 256: - lut = lut + lut + lut - return image.point(lut) - else: - msg = f"not supported for mode {image.mode}" - raise OSError(msg) - - -# -# actions - - -def autocontrast( - image: Image.Image, - cutoff: float | tuple[float, float] = 0, - ignore: int | Sequence[int] | None = None, - mask: Image.Image | None = None, - preserve_tone: bool = False, -) -> Image.Image: - """ - Maximize (normalize) image contrast. This function calculates a - histogram of the input image (or mask region), removes ``cutoff`` percent of the - lightest and darkest pixels from the histogram, and remaps the image - so that the darkest pixel becomes black (0), and the lightest - becomes white (255). - - :param image: The image to process. - :param cutoff: The percent to cut off from the histogram on the low and - high ends. Either a tuple of (low, high), or a single - number for both. - :param ignore: The background pixel value (use None for no background). - :param mask: Histogram used in contrast operation is computed using pixels - within the mask. If no mask is given the entire image is used - for histogram computation. - :param preserve_tone: Preserve image tone in Photoshop-like style autocontrast. - - .. versionadded:: 8.2.0 - - :return: An image. - """ - if preserve_tone: - histogram = image.convert("L").histogram(mask) - else: - histogram = image.histogram(mask) - - lut = [] - for layer in range(0, len(histogram), 256): - h = histogram[layer : layer + 256] - if ignore is not None: - # get rid of outliers - if isinstance(ignore, int): - h[ignore] = 0 - else: - for ix in ignore: - h[ix] = 0 - if cutoff: - # cut off pixels from both ends of the histogram - if not isinstance(cutoff, tuple): - cutoff = (cutoff, cutoff) - # get number of pixels - n = 0 - for ix in range(256): - n = n + h[ix] - # remove cutoff% pixels from the low end - cut = int(n * cutoff[0] // 100) - for lo in range(256): - if cut > h[lo]: - cut = cut - h[lo] - h[lo] = 0 - else: - h[lo] -= cut - cut = 0 - if cut <= 0: - break - # remove cutoff% samples from the high end - cut = int(n * cutoff[1] // 100) - for hi in range(255, -1, -1): - if cut > h[hi]: - cut = cut - h[hi] - h[hi] = 0 - else: - h[hi] -= cut - cut = 0 - if cut <= 0: - break - # find lowest/highest samples after preprocessing - for lo in range(256): - if h[lo]: - break - for hi in range(255, -1, -1): - if h[hi]: - break - if hi <= lo: - # don't bother - lut.extend(list(range(256))) - else: - scale = 255.0 / (hi - lo) - offset = -lo * scale - for ix in range(256): - ix = int(ix * scale + offset) - if ix < 0: - ix = 0 - elif ix > 255: - ix = 255 - lut.append(ix) - return _lut(image, lut) - - -def colorize( - image: Image.Image, - black: str | tuple[int, ...], - white: str | tuple[int, ...], - mid: str | int | tuple[int, ...] | None = None, - blackpoint: int = 0, - whitepoint: int = 255, - midpoint: int = 127, -) -> Image.Image: - """ - Colorize grayscale image. - This function calculates a color wedge which maps all black pixels in - the source image to the first color and all white pixels to the - second color. If ``mid`` is specified, it uses three-color mapping. - The ``black`` and ``white`` arguments should be RGB tuples or color names; - optionally you can use three-color mapping by also specifying ``mid``. - Mapping positions for any of the colors can be specified - (e.g. ``blackpoint``), where these parameters are the integer - value corresponding to where the corresponding color should be mapped. - These parameters must have logical order, such that - ``blackpoint <= midpoint <= whitepoint`` (if ``mid`` is specified). - - :param image: The image to colorize. - :param black: The color to use for black input pixels. - :param white: The color to use for white input pixels. - :param mid: The color to use for midtone input pixels. - :param blackpoint: an int value [0, 255] for the black mapping. - :param whitepoint: an int value [0, 255] for the white mapping. - :param midpoint: an int value [0, 255] for the midtone mapping. - :return: An image. - """ - - # Initial asserts - assert image.mode == "L" - if mid is None: - assert 0 <= blackpoint <= whitepoint <= 255 - else: - assert 0 <= blackpoint <= midpoint <= whitepoint <= 255 - - # Define colors from arguments - rgb_black = cast(Sequence[int], _color(black, "RGB")) - rgb_white = cast(Sequence[int], _color(white, "RGB")) - rgb_mid = cast(Sequence[int], _color(mid, "RGB")) if mid is not None else None - - # Empty lists for the mapping - red = [] - green = [] - blue = [] - - # Create the low-end values - for i in range(0, blackpoint): - red.append(rgb_black[0]) - green.append(rgb_black[1]) - blue.append(rgb_black[2]) - - # Create the mapping (2-color) - if rgb_mid is None: - range_map = range(0, whitepoint - blackpoint) - - for i in range_map: - red.append( - rgb_black[0] + i * (rgb_white[0] - rgb_black[0]) // len(range_map) - ) - green.append( - rgb_black[1] + i * (rgb_white[1] - rgb_black[1]) // len(range_map) - ) - blue.append( - rgb_black[2] + i * (rgb_white[2] - rgb_black[2]) // len(range_map) - ) - - # Create the mapping (3-color) - else: - range_map1 = range(0, midpoint - blackpoint) - range_map2 = range(0, whitepoint - midpoint) - - for i in range_map1: - red.append( - rgb_black[0] + i * (rgb_mid[0] - rgb_black[0]) // len(range_map1) - ) - green.append( - rgb_black[1] + i * (rgb_mid[1] - rgb_black[1]) // len(range_map1) - ) - blue.append( - rgb_black[2] + i * (rgb_mid[2] - rgb_black[2]) // len(range_map1) - ) - for i in range_map2: - red.append(rgb_mid[0] + i * (rgb_white[0] - rgb_mid[0]) // len(range_map2)) - green.append( - rgb_mid[1] + i * (rgb_white[1] - rgb_mid[1]) // len(range_map2) - ) - blue.append(rgb_mid[2] + i * (rgb_white[2] - rgb_mid[2]) // len(range_map2)) - - # Create the high-end values - for i in range(0, 256 - whitepoint): - red.append(rgb_white[0]) - green.append(rgb_white[1]) - blue.append(rgb_white[2]) - - # Return converted image - image = image.convert("RGB") - return _lut(image, red + green + blue) - - -def contain( - image: Image.Image, size: tuple[int, int], method: int = Image.Resampling.BICUBIC -) -> Image.Image: - """ - Returns a resized version of the image, set to the maximum width and height - within the requested size, while maintaining the original aspect ratio. - - :param image: The image to resize. - :param size: The requested output size in pixels, given as a - (width, height) tuple. - :param method: Resampling method to use. Default is - :py:attr:`~PIL.Image.Resampling.BICUBIC`. - See :ref:`concept-filters`. - :return: An image. - """ - - im_ratio = image.width / image.height - dest_ratio = size[0] / size[1] - - if im_ratio != dest_ratio: - if im_ratio > dest_ratio: - new_height = round(image.height / image.width * size[0]) - if new_height != size[1]: - size = (size[0], new_height) - else: - new_width = round(image.width / image.height * size[1]) - if new_width != size[0]: - size = (new_width, size[1]) - return image.resize(size, resample=method) - - -def cover( - image: Image.Image, size: tuple[int, int], method: int = Image.Resampling.BICUBIC -) -> Image.Image: - """ - Returns a resized version of the image, so that the requested size is - covered, while maintaining the original aspect ratio. - - :param image: The image to resize. - :param size: The requested output size in pixels, given as a - (width, height) tuple. - :param method: Resampling method to use. Default is - :py:attr:`~PIL.Image.Resampling.BICUBIC`. - See :ref:`concept-filters`. - :return: An image. - """ - - im_ratio = image.width / image.height - dest_ratio = size[0] / size[1] - - if im_ratio != dest_ratio: - if im_ratio < dest_ratio: - new_height = round(image.height / image.width * size[0]) - if new_height != size[1]: - size = (size[0], new_height) - else: - new_width = round(image.width / image.height * size[1]) - if new_width != size[0]: - size = (new_width, size[1]) - return image.resize(size, resample=method) - - -def pad( - image: Image.Image, - size: tuple[int, int], - method: int = Image.Resampling.BICUBIC, - color: str | int | tuple[int, ...] | None = None, - centering: tuple[float, float] = (0.5, 0.5), -) -> Image.Image: - """ - Returns a resized and padded version of the image, expanded to fill the - requested aspect ratio and size. - - :param image: The image to resize and crop. - :param size: The requested output size in pixels, given as a - (width, height) tuple. - :param method: Resampling method to use. Default is - :py:attr:`~PIL.Image.Resampling.BICUBIC`. - See :ref:`concept-filters`. - :param color: The background color of the padded image. - :param centering: Control the position of the original image within the - padded version. - - (0.5, 0.5) will keep the image centered - (0, 0) will keep the image aligned to the top left - (1, 1) will keep the image aligned to the bottom - right - :return: An image. - """ - - resized = contain(image, size, method) - if resized.size == size: - out = resized - else: - out = Image.new(image.mode, size, color) - if resized.palette: - out.putpalette(resized.getpalette()) - if resized.width != size[0]: - x = round((size[0] - resized.width) * max(0, min(centering[0], 1))) - out.paste(resized, (x, 0)) - else: - y = round((size[1] - resized.height) * max(0, min(centering[1], 1))) - out.paste(resized, (0, y)) - return out - - -def crop(image: Image.Image, border: int = 0) -> Image.Image: - """ - Remove border from image. The same amount of pixels are removed - from all four sides. This function works on all image modes. - - .. seealso:: :py:meth:`~PIL.Image.Image.crop` - - :param image: The image to crop. - :param border: The number of pixels to remove. - :return: An image. - """ - left, top, right, bottom = _border(border) - return image.crop((left, top, image.size[0] - right, image.size[1] - bottom)) - - -def scale( - image: Image.Image, factor: float, resample: int = Image.Resampling.BICUBIC -) -> Image.Image: - """ - Returns a rescaled image by a specific factor given in parameter. - A factor greater than 1 expands the image, between 0 and 1 contracts the - image. - - :param image: The image to rescale. - :param factor: The expansion factor, as a float. - :param resample: Resampling method to use. Default is - :py:attr:`~PIL.Image.Resampling.BICUBIC`. - See :ref:`concept-filters`. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - if factor == 1: - return image.copy() - elif factor <= 0: - msg = "the factor must be greater than 0" - raise ValueError(msg) - else: - size = (round(factor * image.width), round(factor * image.height)) - return image.resize(size, resample) - - -class SupportsGetMesh(Protocol): - """ - An object that supports the ``getmesh`` method, taking an image as an - argument, and returning a list of tuples. Each tuple contains two tuples, - the source box as a tuple of 4 integers, and a tuple of 8 integers for the - final quadrilateral, in order of top left, bottom left, bottom right, top - right. - """ - - def getmesh( - self, image: Image.Image - ) -> list[ - tuple[tuple[int, int, int, int], tuple[int, int, int, int, int, int, int, int]] - ]: ... - - -def deform( - image: Image.Image, - deformer: SupportsGetMesh, - resample: int = Image.Resampling.BILINEAR, -) -> Image.Image: - """ - Deform the image. - - :param image: The image to deform. - :param deformer: A deformer object. Any object that implements a - ``getmesh`` method can be used. - :param resample: An optional resampling filter. Same values possible as - in the PIL.Image.transform function. - :return: An image. - """ - return image.transform( - image.size, Image.Transform.MESH, deformer.getmesh(image), resample - ) - - -def equalize(image: Image.Image, mask: Image.Image | None = None) -> Image.Image: - """ - Equalize the image histogram. This function applies a non-linear - mapping to the input image, in order to create a uniform - distribution of grayscale values in the output image. - - :param image: The image to equalize. - :param mask: An optional mask. If given, only the pixels selected by - the mask are included in the analysis. - :return: An image. - """ - if image.mode == "P": - image = image.convert("RGB") - h = image.histogram(mask) - lut = [] - for b in range(0, len(h), 256): - histo = [_f for _f in h[b : b + 256] if _f] - if len(histo) <= 1: - lut.extend(list(range(256))) - else: - step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 - if not step: - lut.extend(list(range(256))) - else: - n = step // 2 - for i in range(256): - lut.append(n // step) - n = n + h[i + b] - return _lut(image, lut) - - -def expand( - image: Image.Image, - border: int | tuple[int, ...] = 0, - fill: str | int | tuple[int, ...] = 0, -) -> Image.Image: - """ - Add border to the image - - :param image: The image to expand. - :param border: Border width, in pixels. - :param fill: Pixel fill value (a color value). Default is 0 (black). - :return: An image. - """ - left, top, right, bottom = _border(border) - width = left + image.size[0] + right - height = top + image.size[1] + bottom - color = _color(fill, image.mode) - if image.palette: - palette = ImagePalette.ImagePalette(palette=image.getpalette()) - if isinstance(color, tuple) and (len(color) == 3 or len(color) == 4): - color = palette.getcolor(color) - else: - palette = None - out = Image.new(image.mode, (width, height), color) - if palette: - out.putpalette(palette.palette) - out.paste(image, (left, top)) - return out - - -def fit( - image: Image.Image, - size: tuple[int, int], - method: int = Image.Resampling.BICUBIC, - bleed: float = 0.0, - centering: tuple[float, float] = (0.5, 0.5), -) -> Image.Image: - """ - Returns a resized and cropped version of the image, cropped to the - requested aspect ratio and size. - - This function was contributed by Kevin Cazabon. - - :param image: The image to resize and crop. - :param size: The requested output size in pixels, given as a - (width, height) tuple. - :param method: Resampling method to use. Default is - :py:attr:`~PIL.Image.Resampling.BICUBIC`. - See :ref:`concept-filters`. - :param bleed: Remove a border around the outside of the image from all - four edges. The value is a decimal percentage (use 0.01 for - one percent). The default value is 0 (no border). - Cannot be greater than or equal to 0.5. - :param centering: Control the cropping position. Use (0.5, 0.5) for - center cropping (e.g. if cropping the width, take 50% off - of the left side, and therefore 50% off the right side). - (0.0, 0.0) will crop from the top left corner (i.e. if - cropping the width, take all of the crop off of the right - side, and if cropping the height, take all of it off the - bottom). (1.0, 0.0) will crop from the bottom left - corner, etc. (i.e. if cropping the width, take all of the - crop off the left side, and if cropping the height take - none from the top, and therefore all off the bottom). - :return: An image. - """ - - # by Kevin Cazabon, Feb 17/2000 - # kevin@cazabon.com - # https://www.cazabon.com - - centering_x, centering_y = centering - - if not 0.0 <= centering_x <= 1.0: - centering_x = 0.5 - if not 0.0 <= centering_y <= 1.0: - centering_y = 0.5 - - if not 0.0 <= bleed < 0.5: - bleed = 0.0 - - # calculate the area to use for resizing and cropping, subtracting - # the 'bleed' around the edges - - # number of pixels to trim off on Top and Bottom, Left and Right - bleed_pixels = (bleed * image.size[0], bleed * image.size[1]) - - live_size = ( - image.size[0] - bleed_pixels[0] * 2, - image.size[1] - bleed_pixels[1] * 2, - ) - - # calculate the aspect ratio of the live_size - live_size_ratio = live_size[0] / live_size[1] - - # calculate the aspect ratio of the output image - output_ratio = size[0] / size[1] - - # figure out if the sides or top/bottom will be cropped off - if live_size_ratio == output_ratio: - # live_size is already the needed ratio - crop_width = live_size[0] - crop_height = live_size[1] - elif live_size_ratio >= output_ratio: - # live_size is wider than what's needed, crop the sides - crop_width = output_ratio * live_size[1] - crop_height = live_size[1] - else: - # live_size is taller than what's needed, crop the top and bottom - crop_width = live_size[0] - crop_height = live_size[0] / output_ratio - - # make the crop - crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering_x - crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering_y - - crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height) - - # resize the image and return it - return image.resize(size, method, box=crop) - - -def flip(image: Image.Image) -> Image.Image: - """ - Flip the image vertically (top to bottom). - - :param image: The image to flip. - :return: An image. - """ - return image.transpose(Image.Transpose.FLIP_TOP_BOTTOM) - - -def grayscale(image: Image.Image) -> Image.Image: - """ - Convert the image to grayscale. - - :param image: The image to convert. - :return: An image. - """ - return image.convert("L") - - -def invert(image: Image.Image) -> Image.Image: - """ - Invert (negate) the image. - - :param image: The image to invert. - :return: An image. - """ - lut = list(range(255, -1, -1)) - return image.point(lut) if image.mode == "1" else _lut(image, lut) - - -def mirror(image: Image.Image) -> Image.Image: - """ - Flip image horizontally (left to right). - - :param image: The image to mirror. - :return: An image. - """ - return image.transpose(Image.Transpose.FLIP_LEFT_RIGHT) - - -def posterize(image: Image.Image, bits: int) -> Image.Image: - """ - Reduce the number of bits for each color channel. - - :param image: The image to posterize. - :param bits: The number of bits to keep for each channel (1-8). - :return: An image. - """ - mask = ~(2 ** (8 - bits) - 1) - lut = [i & mask for i in range(256)] - return _lut(image, lut) - - -def solarize(image: Image.Image, threshold: int = 128) -> Image.Image: - """ - Invert all pixel values above a threshold. - - :param image: The image to solarize. - :param threshold: All pixels above this grayscale level are inverted. - :return: An image. - """ - lut = [] - for i in range(256): - if i < threshold: - lut.append(i) - else: - lut.append(255 - i) - return _lut(image, lut) - - -def exif_transpose(image: Image.Image, *, in_place: bool = False) -> Image.Image | None: - """ - If an image has an EXIF Orientation tag, other than 1, transpose the image - accordingly, and remove the orientation data. - - :param image: The image to transpose. - :param in_place: Boolean. Keyword-only argument. - If ``True``, the original image is modified in-place, and ``None`` is returned. - If ``False`` (default), a new :py:class:`~PIL.Image.Image` object is returned - with the transposition applied. If there is no transposition, a copy of the - image will be returned. - """ - image.load() - image_exif = image.getexif() - orientation = image_exif.get(ExifTags.Base.Orientation, 1) - method = { - 2: Image.Transpose.FLIP_LEFT_RIGHT, - 3: Image.Transpose.ROTATE_180, - 4: Image.Transpose.FLIP_TOP_BOTTOM, - 5: Image.Transpose.TRANSPOSE, - 6: Image.Transpose.ROTATE_270, - 7: Image.Transpose.TRANSVERSE, - 8: Image.Transpose.ROTATE_90, - }.get(orientation) - if method is not None: - transposed_image = image.transpose(method) - if in_place: - image.im = transposed_image.im - image.pyaccess = None - image._size = transposed_image._size - exif_image = image if in_place else transposed_image - - exif = exif_image.getexif() - if ExifTags.Base.Orientation in exif: - del exif[ExifTags.Base.Orientation] - if "exif" in exif_image.info: - exif_image.info["exif"] = exif.tobytes() - elif "Raw profile type exif" in exif_image.info: - exif_image.info["Raw profile type exif"] = exif.tobytes().hex() - for key in ("XML:com.adobe.xmp", "xmp"): - if key in exif_image.info: - for pattern in ( - r'tiff:Orientation="([0-9])"', - r"([0-9])", - ): - value = exif_image.info[key] - exif_image.info[key] = ( - re.sub(pattern, "", value) - if isinstance(value, str) - else re.sub(pattern.encode(), b"", value) - ) - if not in_place: - return transposed_image - elif not in_place: - return image.copy() - return None diff --git a/venv/Lib/site-packages/PIL/ImagePalette.py b/venv/Lib/site-packages/PIL/ImagePalette.py deleted file mode 100644 index ed38285..0000000 --- a/venv/Lib/site-packages/PIL/ImagePalette.py +++ /dev/null @@ -1,284 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# image palette object -# -# History: -# 1996-03-11 fl Rewritten. -# 1997-01-03 fl Up and running. -# 1997-08-23 fl Added load hack -# 2001-04-16 fl Fixed randint shadow bug in random() -# -# Copyright (c) 1997-2001 by Secret Labs AB -# Copyright (c) 1996-1997 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import array -from typing import IO, TYPE_CHECKING, Sequence - -from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile - -if TYPE_CHECKING: - from . import Image - - -class ImagePalette: - """ - Color palette for palette mapped images - - :param mode: The mode to use for the palette. See: - :ref:`concept-modes`. Defaults to "RGB" - :param palette: An optional palette. If given, it must be a bytearray, - an array or a list of ints between 0-255. The list must consist of - all channels for one color followed by the next color (e.g. RGBRGBRGB). - Defaults to an empty palette. - """ - - def __init__( - self, - mode: str = "RGB", - palette: Sequence[int] | bytes | bytearray | None = None, - ) -> None: - self.mode = mode - self.rawmode: str | None = None # if set, palette contains raw data - self.palette = palette or bytearray() - self.dirty: int | None = None - - @property - def palette(self) -> Sequence[int] | bytes | bytearray: - return self._palette - - @palette.setter - def palette(self, palette: Sequence[int] | bytes | bytearray) -> None: - self._colors: dict[tuple[int, ...], int] | None = None - self._palette = palette - - @property - def colors(self) -> dict[tuple[int, ...], int]: - if self._colors is None: - mode_len = len(self.mode) - self._colors = {} - for i in range(0, len(self.palette), mode_len): - color = tuple(self.palette[i : i + mode_len]) - if color in self._colors: - continue - self._colors[color] = i // mode_len - return self._colors - - @colors.setter - def colors(self, colors: dict[tuple[int, ...], int]) -> None: - self._colors = colors - - def copy(self) -> ImagePalette: - new = ImagePalette() - - new.mode = self.mode - new.rawmode = self.rawmode - if self.palette is not None: - new.palette = self.palette[:] - new.dirty = self.dirty - - return new - - def getdata(self) -> tuple[str, Sequence[int] | bytes | bytearray]: - """ - Get palette contents in format suitable for the low-level - ``im.putpalette`` primitive. - - .. warning:: This method is experimental. - """ - if self.rawmode: - return self.rawmode, self.palette - return self.mode, self.tobytes() - - def tobytes(self) -> bytes: - """Convert palette to bytes. - - .. warning:: This method is experimental. - """ - if self.rawmode: - msg = "palette contains raw palette data" - raise ValueError(msg) - if isinstance(self.palette, bytes): - return self.palette - arr = array.array("B", self.palette) - return arr.tobytes() - - # Declare tostring as an alias for tobytes - tostring = tobytes - - def _new_color_index( - self, image: Image.Image | None = None, e: Exception | None = None - ) -> int: - if not isinstance(self.palette, bytearray): - self._palette = bytearray(self.palette) - index = len(self.palette) // 3 - special_colors: tuple[int | tuple[int, ...] | None, ...] = () - if image: - special_colors = ( - image.info.get("background"), - image.info.get("transparency"), - ) - while index in special_colors: - index += 1 - if index >= 256: - if image: - # Search for an unused index - for i, count in reversed(list(enumerate(image.histogram()))): - if count == 0 and i not in special_colors: - index = i - break - if index >= 256: - msg = "cannot allocate more than 256 colors" - raise ValueError(msg) from e - return index - - def getcolor( - self, - color: tuple[int, ...], - image: Image.Image | None = None, - ) -> int: - """Given an rgb tuple, allocate palette entry. - - .. warning:: This method is experimental. - """ - if self.rawmode: - msg = "palette contains raw palette data" - raise ValueError(msg) - if isinstance(color, tuple): - if self.mode == "RGB": - if len(color) == 4: - if color[3] != 255: - msg = "cannot add non-opaque RGBA color to RGB palette" - raise ValueError(msg) - color = color[:3] - elif self.mode == "RGBA": - if len(color) == 3: - color += (255,) - try: - return self.colors[color] - except KeyError as e: - # allocate new color slot - index = self._new_color_index(image, e) - assert isinstance(self._palette, bytearray) - self.colors[color] = index - if index * 3 < len(self.palette): - self._palette = ( - self._palette[: index * 3] - + bytes(color) - + self._palette[index * 3 + 3 :] - ) - else: - self._palette += bytes(color) - self.dirty = 1 - return index - else: - msg = f"unknown color specifier: {repr(color)}" # type: ignore[unreachable] - raise ValueError(msg) - - def save(self, fp: str | IO[str]) -> None: - """Save palette to text file. - - .. warning:: This method is experimental. - """ - if self.rawmode: - msg = "palette contains raw palette data" - raise ValueError(msg) - if isinstance(fp, str): - fp = open(fp, "w") - fp.write("# Palette\n") - fp.write(f"# Mode: {self.mode}\n") - for i in range(256): - fp.write(f"{i}") - for j in range(i * len(self.mode), (i + 1) * len(self.mode)): - try: - fp.write(f" {self.palette[j]}") - except IndexError: - fp.write(" 0") - fp.write("\n") - fp.close() - - -# -------------------------------------------------------------------- -# Internal - - -def raw(rawmode, data: Sequence[int] | bytes | bytearray) -> ImagePalette: - palette = ImagePalette() - palette.rawmode = rawmode - palette.palette = data - palette.dirty = 1 - return palette - - -# -------------------------------------------------------------------- -# Factories - - -def make_linear_lut(black: int, white: float) -> list[int]: - if black == 0: - return [int(white * i // 255) for i in range(256)] - - msg = "unavailable when black is non-zero" - raise NotImplementedError(msg) # FIXME - - -def make_gamma_lut(exp: float) -> list[int]: - return [int(((i / 255.0) ** exp) * 255.0 + 0.5) for i in range(256)] - - -def negative(mode: str = "RGB") -> ImagePalette: - palette = list(range(256 * len(mode))) - palette.reverse() - return ImagePalette(mode, [i // len(mode) for i in palette]) - - -def random(mode: str = "RGB") -> ImagePalette: - from random import randint - - palette = [randint(0, 255) for _ in range(256 * len(mode))] - return ImagePalette(mode, palette) - - -def sepia(white: str = "#fff0c0") -> ImagePalette: - bands = [make_linear_lut(0, band) for band in ImageColor.getrgb(white)] - return ImagePalette("RGB", [bands[i % 3][i // 3] for i in range(256 * 3)]) - - -def wedge(mode: str = "RGB") -> ImagePalette: - palette = list(range(256 * len(mode))) - return ImagePalette(mode, [i // len(mode) for i in palette]) - - -def load(filename: str) -> tuple[bytes, str]: - # FIXME: supports GIMP gradients only - - with open(filename, "rb") as fp: - paletteHandlers: list[ - type[ - GimpPaletteFile.GimpPaletteFile - | GimpGradientFile.GimpGradientFile - | PaletteFile.PaletteFile - ] - ] = [ - GimpPaletteFile.GimpPaletteFile, - GimpGradientFile.GimpGradientFile, - PaletteFile.PaletteFile, - ] - for paletteHandler in paletteHandlers: - try: - fp.seek(0) - lut = paletteHandler(fp).getpalette() - if lut: - break - except (SyntaxError, ValueError): - pass - else: - msg = "cannot load palette" - raise OSError(msg) - - return lut # data, rawmode diff --git a/venv/Lib/site-packages/PIL/ImagePath.py b/venv/Lib/site-packages/PIL/ImagePath.py deleted file mode 100644 index 77e8a60..0000000 --- a/venv/Lib/site-packages/PIL/ImagePath.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# path interface -# -# History: -# 1996-11-04 fl Created -# 2002-04-14 fl Added documentation stub class -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1996. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image - -Path = Image.core.path diff --git a/venv/Lib/site-packages/PIL/ImageQt.py b/venv/Lib/site-packages/PIL/ImageQt.py deleted file mode 100644 index 35a3776..0000000 --- a/venv/Lib/site-packages/PIL/ImageQt.py +++ /dev/null @@ -1,205 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# a simple Qt image interface. -# -# history: -# 2006-06-03 fl: created -# 2006-06-04 fl: inherit from QImage instead of wrapping it -# 2006-06-05 fl: removed toimage helper; move string support to ImageQt -# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) -# -# Copyright (c) 2006 by Secret Labs AB -# Copyright (c) 2006 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import sys -from io import BytesIO -from typing import Callable - -from . import Image -from ._util import is_path - -qt_version: str | None -qt_versions = [ - ["6", "PyQt6"], - ["side6", "PySide6"], -] - -# If a version has already been imported, attempt it first -qt_versions.sort(key=lambda version: version[1] in sys.modules, reverse=True) -for version, qt_module in qt_versions: - try: - QBuffer: type - QIODevice: type - QImage: type - QPixmap: type - qRgba: Callable[[int, int, int, int], int] - if qt_module == "PyQt6": - from PyQt6.QtCore import QBuffer, QIODevice - from PyQt6.QtGui import QImage, QPixmap, qRgba - elif qt_module == "PySide6": - from PySide6.QtCore import QBuffer, QIODevice - from PySide6.QtGui import QImage, QPixmap, qRgba - except (ImportError, RuntimeError): - continue - qt_is_installed = True - qt_version = version - break -else: - qt_is_installed = False - qt_version = None - - -def rgb(r, g, b, a=255): - """(Internal) Turns an RGB color into a Qt compatible color integer.""" - # use qRgb to pack the colors, and then turn the resulting long - # into a negative integer with the same bitpattern. - return qRgba(r, g, b, a) & 0xFFFFFFFF - - -def fromqimage(im): - """ - :param im: QImage or PIL ImageQt object - """ - buffer = QBuffer() - if qt_version == "6": - try: - qt_openmode = QIODevice.OpenModeFlag - except AttributeError: - qt_openmode = QIODevice.OpenMode - else: - qt_openmode = QIODevice - buffer.open(qt_openmode.ReadWrite) - # preserve alpha channel with png - # otherwise ppm is more friendly with Image.open - if im.hasAlphaChannel(): - im.save(buffer, "png") - else: - im.save(buffer, "ppm") - - b = BytesIO() - b.write(buffer.data()) - buffer.close() - b.seek(0) - - return Image.open(b) - - -def fromqpixmap(im): - return fromqimage(im) - - -def align8to32(bytes, width, mode): - """ - converts each scanline of data from 8 bit to 32 bit aligned - """ - - bits_per_pixel = {"1": 1, "L": 8, "P": 8, "I;16": 16}[mode] - - # calculate bytes per line and the extra padding if needed - bits_per_line = bits_per_pixel * width - full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) - bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) - - extra_padding = -bytes_per_line % 4 - - # already 32 bit aligned by luck - if not extra_padding: - return bytes - - new_data = [ - bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + b"\x00" * extra_padding - for i in range(len(bytes) // bytes_per_line) - ] - - return b"".join(new_data) - - -def _toqclass_helper(im): - data = None - colortable = None - exclusive_fp = False - - # handle filename, if given instead of image name - if hasattr(im, "toUtf8"): - # FIXME - is this really the best way to do this? - im = str(im.toUtf8(), "utf-8") - if is_path(im): - im = Image.open(im) - exclusive_fp = True - - qt_format = QImage.Format if qt_version == "6" else QImage - if im.mode == "1": - format = qt_format.Format_Mono - elif im.mode == "L": - format = qt_format.Format_Indexed8 - colortable = [rgb(i, i, i) for i in range(256)] - elif im.mode == "P": - format = qt_format.Format_Indexed8 - palette = im.getpalette() - colortable = [rgb(*palette[i : i + 3]) for i in range(0, len(palette), 3)] - elif im.mode == "RGB": - # Populate the 4th channel with 255 - im = im.convert("RGBA") - - data = im.tobytes("raw", "BGRA") - format = qt_format.Format_RGB32 - elif im.mode == "RGBA": - data = im.tobytes("raw", "BGRA") - format = qt_format.Format_ARGB32 - elif im.mode == "I;16": - im = im.point(lambda i: i * 256) - - format = qt_format.Format_Grayscale16 - else: - if exclusive_fp: - im.close() - msg = f"unsupported image mode {repr(im.mode)}" - raise ValueError(msg) - - size = im.size - __data = data or align8to32(im.tobytes(), size[0], im.mode) - if exclusive_fp: - im.close() - return {"data": __data, "size": size, "format": format, "colortable": colortable} - - -if qt_is_installed: - - class ImageQt(QImage): - def __init__(self, im): - """ - An PIL image wrapper for Qt. This is a subclass of PyQt's QImage - class. - - :param im: A PIL Image object, or a file name (given either as - Python string or a PyQt string object). - """ - im_data = _toqclass_helper(im) - # must keep a reference, or Qt will crash! - # All QImage constructors that take data operate on an existing - # buffer, so this buffer has to hang on for the life of the image. - # Fixes https://github.com/python-pillow/Pillow/issues/1370 - self.__data = im_data["data"] - super().__init__( - self.__data, - im_data["size"][0], - im_data["size"][1], - im_data["format"], - ) - if im_data["colortable"]: - self.setColorTable(im_data["colortable"]) - - -def toqimage(im) -> ImageQt: - return ImageQt(im) - - -def toqpixmap(im): - qimage = toqimage(im) - return QPixmap.fromImage(qimage) diff --git a/venv/Lib/site-packages/PIL/ImageSequence.py b/venv/Lib/site-packages/PIL/ImageSequence.py deleted file mode 100644 index 2c18502..0000000 --- a/venv/Lib/site-packages/PIL/ImageSequence.py +++ /dev/null @@ -1,86 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# sequence support classes -# -# history: -# 1997-02-20 fl Created -# -# Copyright (c) 1997 by Secret Labs AB. -# Copyright (c) 1997 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# - -## -from __future__ import annotations - -from typing import Callable - -from . import Image - - -class Iterator: - """ - This class implements an iterator object that can be used to loop - over an image sequence. - - You can use the ``[]`` operator to access elements by index. This operator - will raise an :py:exc:`IndexError` if you try to access a nonexistent - frame. - - :param im: An image object. - """ - - def __init__(self, im: Image.Image): - if not hasattr(im, "seek"): - msg = "im must have seek method" - raise AttributeError(msg) - self.im = im - self.position = getattr(self.im, "_min_frame", 0) - - def __getitem__(self, ix: int) -> Image.Image: - try: - self.im.seek(ix) - return self.im - except EOFError as e: - msg = "end of sequence" - raise IndexError(msg) from e - - def __iter__(self) -> Iterator: - return self - - def __next__(self) -> Image.Image: - try: - self.im.seek(self.position) - self.position += 1 - return self.im - except EOFError as e: - msg = "end of sequence" - raise StopIteration(msg) from e - - -def all_frames( - im: Image.Image | list[Image.Image], - func: Callable[[Image.Image], Image.Image] | None = None, -) -> list[Image.Image]: - """ - Applies a given function to all frames in an image or a list of images. - The frames are returned as a list of separate images. - - :param im: An image, or a list of images. - :param func: The function to apply to all of the image frames. - :returns: A list of images. - """ - if not isinstance(im, list): - im = [im] - - ims = [] - for imSequence in im: - current = imSequence.tell() - - ims += [im_frame.copy() for im_frame in Iterator(imSequence)] - - imSequence.seek(current) - return [func(im) for im in ims] if func else ims diff --git a/venv/Lib/site-packages/PIL/ImageShow.py b/venv/Lib/site-packages/PIL/ImageShow.py deleted file mode 100644 index 037d6f4..0000000 --- a/venv/Lib/site-packages/PIL/ImageShow.py +++ /dev/null @@ -1,363 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# im.show() drivers -# -# History: -# 2008-04-06 fl Created -# -# Copyright (c) Secret Labs AB 2008. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import abc -import os -import shutil -import subprocess -import sys -from shlex import quote -from typing import Any - -from . import Image - -_viewers = [] - - -def register(viewer, order: int = 1) -> None: - """ - The :py:func:`register` function is used to register additional viewers:: - - from PIL import ImageShow - ImageShow.register(MyViewer()) # MyViewer will be used as a last resort - ImageShow.register(MySecondViewer(), 0) # MySecondViewer will be prioritised - ImageShow.register(ImageShow.XVViewer(), 0) # XVViewer will be prioritised - - :param viewer: The viewer to be registered. - :param order: - Zero or a negative integer to prepend this viewer to the list, - a positive integer to append it. - """ - try: - if issubclass(viewer, Viewer): - viewer = viewer() - except TypeError: - pass # raised if viewer wasn't a class - if order > 0: - _viewers.append(viewer) - else: - _viewers.insert(0, viewer) - - -def show(image: Image.Image, title: str | None = None, **options: Any) -> bool: - r""" - Display a given image. - - :param image: An image object. - :param title: Optional title. Not all viewers can display the title. - :param \**options: Additional viewer options. - :returns: ``True`` if a suitable viewer was found, ``False`` otherwise. - """ - for viewer in _viewers: - if viewer.show(image, title=title, **options): - return True - return False - - -class Viewer: - """Base class for viewers.""" - - # main api - - def show(self, image: Image.Image, **options: Any) -> int: - """ - The main function for displaying an image. - Converts the given image to the target format and displays it. - """ - - if not ( - image.mode in ("1", "RGBA") - or (self.format == "PNG" and image.mode in ("I;16", "LA")) - ): - base = Image.getmodebase(image.mode) - if image.mode != base: - image = image.convert(base) - - return self.show_image(image, **options) - - # hook methods - - format: str | None = None - """The format to convert the image into.""" - options: dict[str, Any] = {} - """Additional options used to convert the image.""" - - def get_format(self, image: Image.Image) -> str | None: - """Return format name, or ``None`` to save as PGM/PPM.""" - return self.format - - def get_command(self, file: str, **options: Any) -> str: - """ - Returns the command used to display the file. - Not implemented in the base class. - """ - msg = "unavailable in base viewer" - raise NotImplementedError(msg) - - def save_image(self, image: Image.Image) -> str: - """Save to temporary file and return filename.""" - return image._dump(format=self.get_format(image), **self.options) - - def show_image(self, image: Image.Image, **options: Any) -> int: - """Display the given image.""" - return self.show_file(self.save_image(image), **options) - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - os.system(self.get_command(path, **options)) # nosec - return 1 - - -# -------------------------------------------------------------------- - - -class WindowsViewer(Viewer): - """The default viewer on Windows is the default system application for PNG files.""" - - format = "PNG" - options = {"compress_level": 1, "save_all": True} - - def get_command(self, file: str, **options: Any) -> str: - return ( - f'start "Pillow" /WAIT "{file}" ' - "&& ping -n 4 127.0.0.1 >NUL " - f'&& del /f "{file}"' - ) - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - subprocess.Popen( - self.get_command(path, **options), - shell=True, - creationflags=getattr(subprocess, "CREATE_NO_WINDOW"), - ) # nosec - return 1 - - -if sys.platform == "win32": - register(WindowsViewer) - - -class MacViewer(Viewer): - """The default viewer on macOS using ``Preview.app``.""" - - format = "PNG" - options = {"compress_level": 1, "save_all": True} - - def get_command(self, file: str, **options: Any) -> str: - # on darwin open returns immediately resulting in the temp - # file removal while app is opening - command = "open -a Preview.app" - command = f"({command} {quote(file)}; sleep 20; rm -f {quote(file)})&" - return command - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - subprocess.call(["open", "-a", "Preview.app", path]) - executable = sys.executable or shutil.which("python3") - if executable: - subprocess.Popen( - [ - executable, - "-c", - "import os, sys, time; time.sleep(20); os.remove(sys.argv[1])", - path, - ] - ) - return 1 - - -if sys.platform == "darwin": - register(MacViewer) - - -class UnixViewer(Viewer): - format = "PNG" - options = {"compress_level": 1, "save_all": True} - - @abc.abstractmethod - def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: - pass - - def get_command(self, file: str, **options: Any) -> str: - command = self.get_command_ex(file, **options)[0] - return f"{command} {quote(file)}" - - -class XDGViewer(UnixViewer): - """ - The freedesktop.org ``xdg-open`` command. - """ - - def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: - command = executable = "xdg-open" - return command, executable - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - subprocess.Popen(["xdg-open", path]) - return 1 - - -class DisplayViewer(UnixViewer): - """ - The ImageMagick ``display`` command. - This viewer supports the ``title`` parameter. - """ - - def get_command_ex( - self, file: str, title: str | None = None, **options: Any - ) -> tuple[str, str]: - command = executable = "display" - if title: - command += f" -title {quote(title)}" - return command, executable - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - args = ["display"] - title = options.get("title") - if title: - args += ["-title", title] - args.append(path) - - subprocess.Popen(args) - return 1 - - -class GmDisplayViewer(UnixViewer): - """The GraphicsMagick ``gm display`` command.""" - - def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: - executable = "gm" - command = "gm display" - return command, executable - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - subprocess.Popen(["gm", "display", path]) - return 1 - - -class EogViewer(UnixViewer): - """The GNOME Image Viewer ``eog`` command.""" - - def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: - executable = "eog" - command = "eog -n" - return command, executable - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - subprocess.Popen(["eog", "-n", path]) - return 1 - - -class XVViewer(UnixViewer): - """ - The X Viewer ``xv`` command. - This viewer supports the ``title`` parameter. - """ - - def get_command_ex( - self, file: str, title: str | None = None, **options: Any - ) -> tuple[str, str]: - # note: xv is pretty outdated. most modern systems have - # imagemagick's display command instead. - command = executable = "xv" - if title: - command += f" -name {quote(title)}" - return command, executable - - def show_file(self, path: str, **options: Any) -> int: - """ - Display given file. - """ - if not os.path.exists(path): - raise FileNotFoundError - args = ["xv"] - title = options.get("title") - if title: - args += ["-name", title] - args.append(path) - - subprocess.Popen(args) - return 1 - - -if sys.platform not in ("win32", "darwin"): # unixoids - if shutil.which("xdg-open"): - register(XDGViewer) - if shutil.which("display"): - register(DisplayViewer) - if shutil.which("gm"): - register(GmDisplayViewer) - if shutil.which("eog"): - register(EogViewer) - if shutil.which("xv"): - register(XVViewer) - - -class IPythonViewer(Viewer): - """The viewer for IPython frontends.""" - - def show_image(self, image: Image.Image, **options: Any) -> int: - ipython_display(image) - return 1 - - -try: - from IPython.display import display as ipython_display -except ImportError: - pass -else: - register(IPythonViewer) - - -if __name__ == "__main__": - if len(sys.argv) < 2: - print("Syntax: python3 ImageShow.py imagefile [title]") - sys.exit() - - with Image.open(sys.argv[1]) as im: - print(show(im, *sys.argv[2:])) diff --git a/venv/Lib/site-packages/PIL/ImageStat.py b/venv/Lib/site-packages/PIL/ImageStat.py deleted file mode 100644 index 8bc5045..0000000 --- a/venv/Lib/site-packages/PIL/ImageStat.py +++ /dev/null @@ -1,160 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# global image statistics -# -# History: -# 1996-04-05 fl Created -# 1997-05-21 fl Added mask; added rms, var, stddev attributes -# 1997-08-05 fl Added median -# 1998-07-05 hk Fixed integer overflow error -# -# Notes: -# This class shows how to implement delayed evaluation of attributes. -# To get a certain value, simply access the corresponding attribute. -# The __getattr__ dispatcher takes care of the rest. -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1996-97. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import math -from functools import cached_property - -from . import Image - - -class Stat: - def __init__( - self, image_or_list: Image.Image | list[int], mask: Image.Image | None = None - ) -> None: - """ - Calculate statistics for the given image. If a mask is included, - only the regions covered by that mask are included in the - statistics. You can also pass in a previously calculated histogram. - - :param image: A PIL image, or a precalculated histogram. - - .. note:: - - For a PIL image, calculations rely on the - :py:meth:`~PIL.Image.Image.histogram` method. The pixel counts are - grouped into 256 bins, even if the image has more than 8 bits per - channel. So ``I`` and ``F`` mode images have a maximum ``mean``, - ``median`` and ``rms`` of 255, and cannot have an ``extrema`` maximum - of more than 255. - - :param mask: An optional mask. - """ - if isinstance(image_or_list, Image.Image): - self.h = image_or_list.histogram(mask) - elif isinstance(image_or_list, list): - self.h = image_or_list - else: - msg = "first argument must be image or list" # type: ignore[unreachable] - raise TypeError(msg) - self.bands = list(range(len(self.h) // 256)) - - @cached_property - def extrema(self) -> list[tuple[int, int]]: - """ - Min/max values for each band in the image. - - .. note:: - This relies on the :py:meth:`~PIL.Image.Image.histogram` method, and - simply returns the low and high bins used. This is correct for - images with 8 bits per channel, but fails for other modes such as - ``I`` or ``F``. Instead, use :py:meth:`~PIL.Image.Image.getextrema` to - return per-band extrema for the image. This is more correct and - efficient because, for non-8-bit modes, the histogram method uses - :py:meth:`~PIL.Image.Image.getextrema` to determine the bins used. - """ - - def minmax(histogram: list[int]) -> tuple[int, int]: - res_min, res_max = 255, 0 - for i in range(256): - if histogram[i]: - res_min = i - break - for i in range(255, -1, -1): - if histogram[i]: - res_max = i - break - return res_min, res_max - - return [minmax(self.h[i:]) for i in range(0, len(self.h), 256)] - - @cached_property - def count(self) -> list[int]: - """Total number of pixels for each band in the image.""" - return [sum(self.h[i : i + 256]) for i in range(0, len(self.h), 256)] - - @cached_property - def sum(self) -> list[float]: - """Sum of all pixels for each band in the image.""" - - v = [] - for i in range(0, len(self.h), 256): - layer_sum = 0.0 - for j in range(256): - layer_sum += j * self.h[i + j] - v.append(layer_sum) - return v - - @cached_property - def sum2(self) -> list[float]: - """Squared sum of all pixels for each band in the image.""" - - v = [] - for i in range(0, len(self.h), 256): - sum2 = 0.0 - for j in range(256): - sum2 += (j**2) * float(self.h[i + j]) - v.append(sum2) - return v - - @cached_property - def mean(self) -> list[float]: - """Average (arithmetic mean) pixel level for each band in the image.""" - return [self.sum[i] / self.count[i] for i in self.bands] - - @cached_property - def median(self) -> list[int]: - """Median pixel level for each band in the image.""" - - v = [] - for i in self.bands: - s = 0 - half = self.count[i] // 2 - b = i * 256 - for j in range(256): - s = s + self.h[b + j] - if s > half: - break - v.append(j) - return v - - @cached_property - def rms(self) -> list[float]: - """RMS (root-mean-square) for each band in the image.""" - return [math.sqrt(self.sum2[i] / self.count[i]) for i in self.bands] - - @cached_property - def var(self) -> list[float]: - """Variance for each band in the image.""" - return [ - (self.sum2[i] - (self.sum[i] ** 2.0) / self.count[i]) / self.count[i] - for i in self.bands - ] - - @cached_property - def stddev(self) -> list[float]: - """Standard deviation for each band in the image.""" - return [math.sqrt(self.var[i]) for i in self.bands] - - -Global = Stat # compatibility diff --git a/venv/Lib/site-packages/PIL/ImageTk.py b/venv/Lib/site-packages/PIL/ImageTk.py deleted file mode 100644 index 90defdb..0000000 --- a/venv/Lib/site-packages/PIL/ImageTk.py +++ /dev/null @@ -1,284 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# a Tk display interface -# -# History: -# 96-04-08 fl Created -# 96-09-06 fl Added getimage method -# 96-11-01 fl Rewritten, removed image attribute and crop method -# 97-05-09 fl Use PyImagingPaste method instead of image type -# 97-05-12 fl Minor tweaks to match the IFUNC95 interface -# 97-05-17 fl Support the "pilbitmap" booster patch -# 97-06-05 fl Added file= and data= argument to image constructors -# 98-03-09 fl Added width and height methods to Image classes -# 98-07-02 fl Use default mode for "P" images without palette attribute -# 98-07-02 fl Explicitly destroy Tkinter image objects -# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) -# 99-07-26 fl Automatically hook into Tkinter (if possible) -# 99-08-15 fl Hook uses _imagingtk instead of _imaging -# -# Copyright (c) 1997-1999 by Secret Labs AB -# Copyright (c) 1996-1997 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import tkinter -from io import BytesIO - -from . import Image - -# -------------------------------------------------------------------- -# Check for Tkinter interface hooks - -_pilbitmap_ok = None - - -def _pilbitmap_check() -> int: - global _pilbitmap_ok - if _pilbitmap_ok is None: - try: - im = Image.new("1", (1, 1)) - tkinter.BitmapImage(data=f"PIL:{im.im.id}") - _pilbitmap_ok = 1 - except tkinter.TclError: - _pilbitmap_ok = 0 - return _pilbitmap_ok - - -def _get_image_from_kw(kw): - source = None - if "file" in kw: - source = kw.pop("file") - elif "data" in kw: - source = BytesIO(kw.pop("data")) - if source: - return Image.open(source) - - -def _pyimagingtkcall(command, photo, id): - tk = photo.tk - try: - tk.call(command, photo, id) - except tkinter.TclError: - # activate Tkinter hook - # may raise an error if it cannot attach to Tkinter - from . import _imagingtk - - _imagingtk.tkinit(tk.interpaddr()) - tk.call(command, photo, id) - - -# -------------------------------------------------------------------- -# PhotoImage - - -class PhotoImage: - """ - A Tkinter-compatible photo image. This can be used - everywhere Tkinter expects an image object. If the image is an RGBA - image, pixels having alpha 0 are treated as transparent. - - The constructor takes either a PIL image, or a mode and a size. - Alternatively, you can use the ``file`` or ``data`` options to initialize - the photo image object. - - :param image: Either a PIL image, or a mode string. If a mode string is - used, a size must also be given. - :param size: If the first argument is a mode string, this defines the size - of the image. - :keyword file: A filename to load the image from (using - ``Image.open(file)``). - :keyword data: An 8-bit string containing image data (as loaded from an - image file). - """ - - def __init__(self, image=None, size=None, **kw): - # Tk compatibility: file or data - if image is None: - image = _get_image_from_kw(kw) - - if hasattr(image, "mode") and hasattr(image, "size"): - # got an image instead of a mode - mode = image.mode - if mode == "P": - # palette mapped data - image.apply_transparency() - image.load() - try: - mode = image.palette.mode - except AttributeError: - mode = "RGB" # default - size = image.size - kw["width"], kw["height"] = size - else: - mode = image - image = None - - if mode not in ["1", "L", "RGB", "RGBA"]: - mode = Image.getmodebase(mode) - - self.__mode = mode - self.__size = size - self.__photo = tkinter.PhotoImage(**kw) - self.tk = self.__photo.tk - if image: - self.paste(image) - - def __del__(self) -> None: - name = self.__photo.name - self.__photo.name = None - try: - self.__photo.tk.call("image", "delete", name) - except Exception: - pass # ignore internal errors - - def __str__(self) -> str: - """ - Get the Tkinter photo image identifier. This method is automatically - called by Tkinter whenever a PhotoImage object is passed to a Tkinter - method. - - :return: A Tkinter photo image identifier (a string). - """ - return str(self.__photo) - - def width(self) -> int: - """ - Get the width of the image. - - :return: The width, in pixels. - """ - return self.__size[0] - - def height(self) -> int: - """ - Get the height of the image. - - :return: The height, in pixels. - """ - return self.__size[1] - - def paste(self, im: Image.Image) -> None: - """ - Paste a PIL image into the photo image. Note that this can - be very slow if the photo image is displayed. - - :param im: A PIL image. The size must match the target region. If the - mode does not match, the image is converted to the mode of - the bitmap image. - """ - # convert to blittable - im.load() - image = im.im - if image.isblock() and im.mode == self.__mode: - block = image - else: - block = image.new_block(self.__mode, im.size) - image.convert2(block, image) # convert directly between buffers - - _pyimagingtkcall("PyImagingPhoto", self.__photo, block.id) - - -# -------------------------------------------------------------------- -# BitmapImage - - -class BitmapImage: - """ - A Tkinter-compatible bitmap image. This can be used everywhere Tkinter - expects an image object. - - The given image must have mode "1". Pixels having value 0 are treated as - transparent. Options, if any, are passed on to Tkinter. The most commonly - used option is ``foreground``, which is used to specify the color for the - non-transparent parts. See the Tkinter documentation for information on - how to specify colours. - - :param image: A PIL image. - """ - - def __init__(self, image=None, **kw): - # Tk compatibility: file or data - if image is None: - image = _get_image_from_kw(kw) - - self.__mode = image.mode - self.__size = image.size - - if _pilbitmap_check(): - # fast way (requires the pilbitmap booster patch) - image.load() - kw["data"] = f"PIL:{image.im.id}" - self.__im = image # must keep a reference - else: - # slow but safe way - kw["data"] = image.tobitmap() - self.__photo = tkinter.BitmapImage(**kw) - - def __del__(self) -> None: - name = self.__photo.name - self.__photo.name = None - try: - self.__photo.tk.call("image", "delete", name) - except Exception: - pass # ignore internal errors - - def width(self) -> int: - """ - Get the width of the image. - - :return: The width, in pixels. - """ - return self.__size[0] - - def height(self) -> int: - """ - Get the height of the image. - - :return: The height, in pixels. - """ - return self.__size[1] - - def __str__(self) -> str: - """ - Get the Tkinter bitmap image identifier. This method is automatically - called by Tkinter whenever a BitmapImage object is passed to a Tkinter - method. - - :return: A Tkinter bitmap image identifier (a string). - """ - return str(self.__photo) - - -def getimage(photo: PhotoImage) -> Image.Image: - """Copies the contents of a PhotoImage to a PIL image memory.""" - im = Image.new("RGBA", (photo.width(), photo.height())) - block = im.im - - _pyimagingtkcall("PyImagingPhotoGet", photo, block.id) - - return im - - -def _show(image, title): - """Helper for the Image.show method.""" - - class UI(tkinter.Label): - def __init__(self, master, im): - if im.mode == "1": - self.image = BitmapImage(im, foreground="white", master=master) - else: - self.image = PhotoImage(im, master=master) - super().__init__(master, image=self.image, bg="black", bd=0) - - if not tkinter._default_root: - msg = "tkinter not initialized" - raise OSError(msg) - top = tkinter.Toplevel() - if title: - top.title(title) - UI(top, image).pack() diff --git a/venv/Lib/site-packages/PIL/ImageTransform.py b/venv/Lib/site-packages/PIL/ImageTransform.py deleted file mode 100644 index ffd7916..0000000 --- a/venv/Lib/site-packages/PIL/ImageTransform.py +++ /dev/null @@ -1,135 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# transform wrappers -# -# History: -# 2002-04-08 fl Created -# -# Copyright (c) 2002 by Secret Labs AB -# Copyright (c) 2002 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from typing import Any, Sequence - -from . import Image - - -class Transform(Image.ImageTransformHandler): - """Base class for other transforms defined in :py:mod:`~PIL.ImageTransform`.""" - - method: Image.Transform - - def __init__(self, data: Sequence[Any]) -> None: - self.data = data - - def getdata(self) -> tuple[Image.Transform, Sequence[int]]: - return self.method, self.data - - def transform( - self, - size: tuple[int, int], - image: Image.Image, - **options: Any, - ) -> Image.Image: - """Perform the transform. Called from :py:meth:`.Image.transform`.""" - # can be overridden - method, data = self.getdata() - return image.transform(size, method, data, **options) - - -class AffineTransform(Transform): - """ - Define an affine image transform. - - This function takes a 6-tuple (a, b, c, d, e, f) which contain the first - two rows from an affine transform matrix. For each pixel (x, y) in the - output image, the new value is taken from a position (a x + b y + c, - d x + e y + f) in the input image, rounded to nearest pixel. - - This function can be used to scale, translate, rotate, and shear the - original image. - - See :py:meth:`.Image.transform` - - :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows - from an affine transform matrix. - """ - - method = Image.Transform.AFFINE - - -class PerspectiveTransform(Transform): - """ - Define a perspective image transform. - - This function takes an 8-tuple (a, b, c, d, e, f, g, h). For each pixel - (x, y) in the output image, the new value is taken from a position - ((a x + b y + c) / (g x + h y + 1), (d x + e y + f) / (g x + h y + 1)) in - the input image, rounded to nearest pixel. - - This function can be used to scale, translate, rotate, and shear the - original image. - - See :py:meth:`.Image.transform` - - :param matrix: An 8-tuple (a, b, c, d, e, f, g, h). - """ - - method = Image.Transform.PERSPECTIVE - - -class ExtentTransform(Transform): - """ - Define a transform to extract a subregion from an image. - - Maps a rectangle (defined by two corners) from the image to a rectangle of - the given size. The resulting image will contain data sampled from between - the corners, such that (x0, y0) in the input image will end up at (0,0) in - the output image, and (x1, y1) at size. - - This method can be used to crop, stretch, shrink, or mirror an arbitrary - rectangle in the current image. It is slightly slower than crop, but about - as fast as a corresponding resize operation. - - See :py:meth:`.Image.transform` - - :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the - input image's coordinate system. See :ref:`coordinate-system`. - """ - - method = Image.Transform.EXTENT - - -class QuadTransform(Transform): - """ - Define a quad image transform. - - Maps a quadrilateral (a region defined by four corners) from the image to a - rectangle of the given size. - - See :py:meth:`.Image.transform` - - :param xy: An 8-tuple (x0, y0, x1, y1, x2, y2, x3, y3) which contain the - upper left, lower left, lower right, and upper right corner of the - source quadrilateral. - """ - - method = Image.Transform.QUAD - - -class MeshTransform(Transform): - """ - Define a mesh image transform. A mesh transform consists of one or more - individual quad transforms. - - See :py:meth:`.Image.transform` - - :param data: A list of (bbox, quad) tuples. - """ - - method = Image.Transform.MESH diff --git a/venv/Lib/site-packages/PIL/ImageWin.py b/venv/Lib/site-packages/PIL/ImageWin.py deleted file mode 100644 index 978c5a9..0000000 --- a/venv/Lib/site-packages/PIL/ImageWin.py +++ /dev/null @@ -1,238 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# a Windows DIB display interface -# -# History: -# 1996-05-20 fl Created -# 1996-09-20 fl Fixed subregion exposure -# 1997-09-21 fl Added draw primitive (for tzPrint) -# 2003-05-21 fl Added experimental Window/ImageWindow classes -# 2003-09-05 fl Added fromstring/tostring methods -# -# Copyright (c) Secret Labs AB 1997-2003. -# Copyright (c) Fredrik Lundh 1996-2003. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image - - -class HDC: - """ - Wraps an HDC integer. The resulting object can be passed to the - :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` - methods. - """ - - def __init__(self, dc: int) -> None: - self.dc = dc - - def __int__(self) -> int: - return self.dc - - -class HWND: - """ - Wraps an HWND integer. The resulting object can be passed to the - :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` - methods, instead of a DC. - """ - - def __init__(self, wnd: int) -> None: - self.wnd = wnd - - def __int__(self) -> int: - return self.wnd - - -class Dib: - """ - A Windows bitmap with the given mode and size. The mode can be one of "1", - "L", "P", or "RGB". - - If the display requires a palette, this constructor creates a suitable - palette and associates it with the image. For an "L" image, 128 graylevels - are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together - with 20 graylevels. - - To make sure that palettes work properly under Windows, you must call the - ``palette`` method upon certain events from Windows. - - :param image: Either a PIL image, or a mode string. If a mode string is - used, a size must also be given. The mode can be one of "1", - "L", "P", or "RGB". - :param size: If the first argument is a mode string, this - defines the size of the image. - """ - - def __init__( - self, image: Image.Image | str, size: tuple[int, int] | list[int] | None = None - ) -> None: - if isinstance(image, str): - mode = image - image = "" - else: - mode = image.mode - size = image.size - if mode not in ["1", "L", "P", "RGB"]: - mode = Image.getmodebase(mode) - self.image = Image.core.display(mode, size) - self.mode = mode - self.size = size - if image: - assert not isinstance(image, str) - self.paste(image) - - def expose(self, handle): - """ - Copy the bitmap contents to a device context. - - :param handle: Device context (HDC), cast to a Python integer, or an - HDC or HWND instance. In PythonWin, you can use - ``CDC.GetHandleAttrib()`` to get a suitable handle. - """ - if isinstance(handle, HWND): - dc = self.image.getdc(handle) - try: - result = self.image.expose(dc) - finally: - self.image.releasedc(handle, dc) - else: - result = self.image.expose(handle) - return result - - def draw(self, handle, dst, src=None): - """ - Same as expose, but allows you to specify where to draw the image, and - what part of it to draw. - - The destination and source areas are given as 4-tuple rectangles. If - the source is omitted, the entire image is copied. If the source and - the destination have different sizes, the image is resized as - necessary. - """ - if not src: - src = (0, 0) + self.size - if isinstance(handle, HWND): - dc = self.image.getdc(handle) - try: - result = self.image.draw(dc, dst, src) - finally: - self.image.releasedc(handle, dc) - else: - result = self.image.draw(handle, dst, src) - return result - - def query_palette(self, handle): - """ - Installs the palette associated with the image in the given device - context. - - This method should be called upon **QUERYNEWPALETTE** and - **PALETTECHANGED** events from Windows. If this method returns a - non-zero value, one or more display palette entries were changed, and - the image should be redrawn. - - :param handle: Device context (HDC), cast to a Python integer, or an - HDC or HWND instance. - :return: A true value if one or more entries were changed (this - indicates that the image should be redrawn). - """ - if isinstance(handle, HWND): - handle = self.image.getdc(handle) - try: - result = self.image.query_palette(handle) - finally: - self.image.releasedc(handle, handle) - else: - result = self.image.query_palette(handle) - return result - - def paste( - self, im: Image.Image, box: tuple[int, int, int, int] | None = None - ) -> None: - """ - Paste a PIL image into the bitmap image. - - :param im: A PIL image. The size must match the target region. - If the mode does not match, the image is converted to the - mode of the bitmap image. - :param box: A 4-tuple defining the left, upper, right, and - lower pixel coordinate. See :ref:`coordinate-system`. If - None is given instead of a tuple, all of the image is - assumed. - """ - im.load() - if self.mode != im.mode: - im = im.convert(self.mode) - if box: - self.image.paste(im.im, box) - else: - self.image.paste(im.im) - - def frombytes(self, buffer: bytes) -> None: - """ - Load display memory contents from byte data. - - :param buffer: A buffer containing display data (usually - data returned from :py:func:`~PIL.ImageWin.Dib.tobytes`) - """ - self.image.frombytes(buffer) - - def tobytes(self) -> bytes: - """ - Copy display memory contents to bytes object. - - :return: A bytes object containing display data. - """ - return self.image.tobytes() - - -class Window: - """Create a Window with the given title size.""" - - def __init__( - self, title: str = "PIL", width: int | None = None, height: int | None = None - ) -> None: - self.hwnd = Image.core.createwindow( - title, self.__dispatcher, width or 0, height or 0 - ) - - def __dispatcher(self, action, *args): - return getattr(self, f"ui_handle_{action}")(*args) - - def ui_handle_clear(self, dc, x0, y0, x1, y1): - pass - - def ui_handle_damage(self, x0, y0, x1, y1): - pass - - def ui_handle_destroy(self) -> None: - pass - - def ui_handle_repair(self, dc, x0, y0, x1, y1): - pass - - def ui_handle_resize(self, width, height): - pass - - def mainloop(self) -> None: - Image.core.eventloop() - - -class ImageWindow(Window): - """Create an image window which displays the given image.""" - - def __init__(self, image, title="PIL"): - if not isinstance(image, Dib): - image = Dib(image) - self.image = image - width, height = image.size - super().__init__(title, width=width, height=height) - - def ui_handle_repair(self, dc, x0, y0, x1, y1): - self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/venv/Lib/site-packages/PIL/ImtImagePlugin.py b/venv/Lib/site-packages/PIL/ImtImagePlugin.py deleted file mode 100644 index abb3fb7..0000000 --- a/venv/Lib/site-packages/PIL/ImtImagePlugin.py +++ /dev/null @@ -1,103 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# IM Tools support for PIL -# -# history: -# 1996-05-27 fl Created (read 8-bit images only) -# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) -# -# Copyright (c) Secret Labs AB 1997-2001. -# Copyright (c) Fredrik Lundh 1996-2001. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import re - -from . import Image, ImageFile - -# -# -------------------------------------------------------------------- - -field = re.compile(rb"([a-z]*) ([^ \r\n]*)") - - -## -# Image plugin for IM Tools images. - - -class ImtImageFile(ImageFile.ImageFile): - format = "IMT" - format_description = "IM Tools" - - def _open(self) -> None: - # Quick rejection: if there's not a LF among the first - # 100 bytes, this is (probably) not a text header. - - assert self.fp is not None - - buffer = self.fp.read(100) - if b"\n" not in buffer: - msg = "not an IM file" - raise SyntaxError(msg) - - xsize = ysize = 0 - - while True: - if buffer: - s = buffer[:1] - buffer = buffer[1:] - else: - s = self.fp.read(1) - if not s: - break - - if s == b"\x0C": - # image data begins - self.tile = [ - ( - "raw", - (0, 0) + self.size, - self.fp.tell() - len(buffer), - (self.mode, 0, 1), - ) - ] - - break - - else: - # read key/value pair - if b"\n" not in buffer: - buffer += self.fp.read(100) - lines = buffer.split(b"\n") - s += lines.pop(0) - buffer = b"\n".join(lines) - if len(s) == 1 or len(s) > 100: - break - if s[0] == ord(b"*"): - continue # comment - - m = field.match(s) - if not m: - break - k, v = m.group(1, 2) - if k == b"width": - xsize = int(v) - self._size = xsize, ysize - elif k == b"height": - ysize = int(v) - self._size = xsize, ysize - elif k == b"pixel" and v == b"n8": - self._mode = "L" - - -# -# -------------------------------------------------------------------- - -Image.register_open(ImtImageFile.format, ImtImageFile) - -# -# no extension registered (".im" is simply too common) diff --git a/venv/Lib/site-packages/PIL/IptcImagePlugin.py b/venv/Lib/site-packages/PIL/IptcImagePlugin.py deleted file mode 100644 index 73df83b..0000000 --- a/venv/Lib/site-packages/PIL/IptcImagePlugin.py +++ /dev/null @@ -1,235 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# IPTC/NAA file handling -# -# history: -# 1995-10-01 fl Created -# 1998-03-09 fl Cleaned up and added to PIL -# 2002-06-18 fl Added getiptcinfo helper -# -# Copyright (c) Secret Labs AB 1997-2002. -# Copyright (c) Fredrik Lundh 1995. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from io import BytesIO -from typing import Sequence - -from . import Image, ImageFile -from ._binary import i16be as i16 -from ._binary import i32be as i32 -from ._deprecate import deprecate - -COMPRESSION = {1: "raw", 5: "jpeg"} - - -def __getattr__(name: str) -> bytes: - if name == "PAD": - deprecate("IptcImagePlugin.PAD", 12) - return b"\0\0\0\0" - msg = f"module '{__name__}' has no attribute '{name}'" - raise AttributeError(msg) - - -# -# Helpers - - -def _i(c: bytes) -> int: - return i32((b"\0\0\0\0" + c)[-4:]) - - -def _i8(c: int | bytes) -> int: - return c if isinstance(c, int) else c[0] - - -def i(c: bytes) -> int: - """.. deprecated:: 10.2.0""" - deprecate("IptcImagePlugin.i", 12) - return _i(c) - - -def dump(c: Sequence[int | bytes]) -> None: - """.. deprecated:: 10.2.0""" - deprecate("IptcImagePlugin.dump", 12) - for i in c: - print(f"{_i8(i):02x}", end=" ") - print() - - -## -# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields -# from TIFF and JPEG files, use the getiptcinfo function. - - -class IptcImageFile(ImageFile.ImageFile): - format = "IPTC" - format_description = "IPTC/NAA" - - def getint(self, key: tuple[int, int]) -> int: - return _i(self.info[key]) - - def field(self) -> tuple[tuple[int, int] | None, int]: - # - # get a IPTC field header - s = self.fp.read(5) - if not s.strip(b"\x00"): - return None, 0 - - tag = s[1], s[2] - - # syntax - if s[0] != 0x1C or tag[0] not in [1, 2, 3, 4, 5, 6, 7, 8, 9, 240]: - msg = "invalid IPTC/NAA file" - raise SyntaxError(msg) - - # field size - size = s[3] - if size > 132: - msg = "illegal field length in IPTC/NAA file" - raise OSError(msg) - elif size == 128: - size = 0 - elif size > 128: - size = _i(self.fp.read(size - 128)) - else: - size = i16(s, 3) - - return tag, size - - def _open(self) -> None: - # load descriptive fields - while True: - offset = self.fp.tell() - tag, size = self.field() - if not tag or tag == (8, 10): - break - if size: - tagdata = self.fp.read(size) - else: - tagdata = None - if tag in self.info: - if isinstance(self.info[tag], list): - self.info[tag].append(tagdata) - else: - self.info[tag] = [self.info[tag], tagdata] - else: - self.info[tag] = tagdata - - # mode - layers = self.info[(3, 60)][0] - component = self.info[(3, 60)][1] - if (3, 65) in self.info: - id = self.info[(3, 65)][0] - 1 - else: - id = 0 - if layers == 1 and not component: - self._mode = "L" - elif layers == 3 and component: - self._mode = "RGB"[id] - elif layers == 4 and component: - self._mode = "CMYK"[id] - - # size - self._size = self.getint((3, 20)), self.getint((3, 30)) - - # compression - try: - compression = COMPRESSION[self.getint((3, 120))] - except KeyError as e: - msg = "Unknown IPTC image compression" - raise OSError(msg) from e - - # tile - if tag == (8, 10): - self.tile = [("iptc", (0, 0) + self.size, offset, compression)] - - def load(self): - if len(self.tile) != 1 or self.tile[0][0] != "iptc": - return ImageFile.ImageFile.load(self) - - offset, compression = self.tile[0][2:] - - self.fp.seek(offset) - - # Copy image data to temporary file - o = BytesIO() - if compression == "raw": - # To simplify access to the extracted file, - # prepend a PPM header - o.write(b"P5\n%d %d\n255\n" % self.size) - while True: - type, size = self.field() - if type != (8, 10): - break - while size > 0: - s = self.fp.read(min(size, 8192)) - if not s: - break - o.write(s) - size -= len(s) - - with Image.open(o) as _im: - _im.load() - self.im = _im.im - - -Image.register_open(IptcImageFile.format, IptcImageFile) - -Image.register_extension(IptcImageFile.format, ".iim") - - -def getiptcinfo(im): - """ - Get IPTC information from TIFF, JPEG, or IPTC file. - - :param im: An image containing IPTC data. - :returns: A dictionary containing IPTC information, or None if - no IPTC information block was found. - """ - from . import JpegImagePlugin, TiffImagePlugin - - data = None - - if isinstance(im, IptcImageFile): - # return info dictionary right away - return im.info - - elif isinstance(im, JpegImagePlugin.JpegImageFile): - # extract the IPTC/NAA resource - photoshop = im.info.get("photoshop") - if photoshop: - data = photoshop.get(0x0404) - - elif isinstance(im, TiffImagePlugin.TiffImageFile): - # get raw data from the IPTC/NAA tag (PhotoShop tags the data - # as 4-byte integers, so we cannot use the get method...) - try: - data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] - except (AttributeError, KeyError): - pass - - if data is None: - return None # no properties - - # create an IptcImagePlugin object without initializing it - class FakeImage: - pass - - im = FakeImage() - im.__class__ = IptcImageFile - - # parse the IPTC information chunk - im.info = {} - im.fp = BytesIO(data) - - try: - im._open() - except (IndexError, KeyError): - pass # expected failure - - return im.info diff --git a/venv/Lib/site-packages/PIL/Jpeg2KImagePlugin.py b/venv/Lib/site-packages/PIL/Jpeg2KImagePlugin.py deleted file mode 100644 index e50cd77..0000000 --- a/venv/Lib/site-packages/PIL/Jpeg2KImagePlugin.py +++ /dev/null @@ -1,408 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# JPEG2000 file handling -# -# History: -# 2014-03-12 ajh Created -# 2021-06-30 rogermb Extract dpi information from the 'resc' header box -# -# Copyright (c) 2014 Coriolis Systems Limited -# Copyright (c) 2014 Alastair Houghton -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -import os -import struct -from typing import IO, Tuple, cast - -from . import Image, ImageFile, ImagePalette, _binary - - -class BoxReader: - """ - A small helper class to read fields stored in JPEG2000 header boxes - and to easily step into and read sub-boxes. - """ - - def __init__(self, fp, length=-1): - self.fp = fp - self.has_length = length >= 0 - self.length = length - self.remaining_in_box = -1 - - def _can_read(self, num_bytes: int) -> bool: - if self.has_length and self.fp.tell() + num_bytes > self.length: - # Outside box: ensure we don't read past the known file length - return False - if self.remaining_in_box >= 0: - # Inside box contents: ensure read does not go past box boundaries - return num_bytes <= self.remaining_in_box - else: - return True # No length known, just read - - def _read_bytes(self, num_bytes: int) -> bytes: - if not self._can_read(num_bytes): - msg = "Not enough data in header" - raise SyntaxError(msg) - - data = self.fp.read(num_bytes) - if len(data) < num_bytes: - msg = f"Expected to read {num_bytes} bytes but only got {len(data)}." - raise OSError(msg) - - if self.remaining_in_box > 0: - self.remaining_in_box -= num_bytes - return data - - def read_fields(self, field_format: str) -> tuple[int | bytes, ...]: - size = struct.calcsize(field_format) - data = self._read_bytes(size) - return struct.unpack(field_format, data) - - def read_boxes(self) -> BoxReader: - size = self.remaining_in_box - data = self._read_bytes(size) - return BoxReader(io.BytesIO(data), size) - - def has_next_box(self) -> bool: - if self.has_length: - return self.fp.tell() + self.remaining_in_box < self.length - else: - return True - - def next_box_type(self) -> bytes: - # Skip the rest of the box if it has not been read - if self.remaining_in_box > 0: - self.fp.seek(self.remaining_in_box, os.SEEK_CUR) - self.remaining_in_box = -1 - - # Read the length and type of the next box - lbox, tbox = cast(Tuple[int, bytes], self.read_fields(">I4s")) - if lbox == 1: - lbox = cast(int, self.read_fields(">Q")[0]) - hlen = 16 - else: - hlen = 8 - - if lbox < hlen or not self._can_read(lbox - hlen): - msg = "Invalid header length" - raise SyntaxError(msg) - - self.remaining_in_box = lbox - hlen - return tbox - - -def _parse_codestream(fp) -> tuple[tuple[int, int], str]: - """Parse the JPEG 2000 codestream to extract the size and component - count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" - - hdr = fp.read(2) - lsiz = _binary.i16be(hdr) - siz = hdr + fp.read(lsiz - 2) - lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from( - ">HHIIIIIIIIH", siz - ) - - size = (xsiz - xosiz, ysiz - yosiz) - if csiz == 1: - ssiz = struct.unpack_from(">B", siz, 38) - if (ssiz[0] & 0x7F) + 1 > 8: - mode = "I;16" - else: - mode = "L" - elif csiz == 2: - mode = "LA" - elif csiz == 3: - mode = "RGB" - elif csiz == 4: - mode = "RGBA" - else: - msg = "unable to determine J2K image mode" - raise SyntaxError(msg) - - return size, mode - - -def _res_to_dpi(num: int, denom: int, exp: int) -> float | None: - """Convert JPEG2000's (numerator, denominator, exponent-base-10) resolution, - calculated as (num / denom) * 10^exp and stored in dots per meter, - to floating-point dots per inch.""" - if denom == 0: - return None - return (254 * num * (10**exp)) / (10000 * denom) - - -def _parse_jp2_header(fp): - """Parse the JP2 header box to extract size, component count, - color space information, and optionally DPI information, - returning a (size, mode, mimetype, dpi) tuple.""" - - # Find the JP2 header box - reader = BoxReader(fp) - header = None - mimetype = None - while reader.has_next_box(): - tbox = reader.next_box_type() - - if tbox == b"jp2h": - header = reader.read_boxes() - break - elif tbox == b"ftyp": - if reader.read_fields(">4s")[0] == b"jpx ": - mimetype = "image/jpx" - - size = None - mode = None - bpc = None - nc = None - dpi = None # 2-tuple of DPI info, or None - palette = None - - while header.has_next_box(): - tbox = header.next_box_type() - - if tbox == b"ihdr": - height, width, nc, bpc = header.read_fields(">IIHB") - size = (width, height) - if nc == 1 and (bpc & 0x7F) > 8: - mode = "I;16" - elif nc == 1: - mode = "L" - elif nc == 2: - mode = "LA" - elif nc == 3: - mode = "RGB" - elif nc == 4: - mode = "RGBA" - elif tbox == b"colr" and nc == 4: - meth, _, _, enumcs = header.read_fields(">BBBI") - if meth == 1 and enumcs == 12: - mode = "CMYK" - elif tbox == b"pclr" and mode in ("L", "LA"): - ne, npc = header.read_fields(">HB") - bitdepths = header.read_fields(">" + ("B" * npc)) - if max(bitdepths) <= 8: - palette = ImagePalette.ImagePalette() - for i in range(ne): - palette.getcolor(header.read_fields(">" + ("B" * npc))) - mode = "P" if mode == "L" else "PA" - elif tbox == b"res ": - res = header.read_boxes() - while res.has_next_box(): - tres = res.next_box_type() - if tres == b"resc": - vrcn, vrcd, hrcn, hrcd, vrce, hrce = res.read_fields(">HHHHBB") - hres = _res_to_dpi(hrcn, hrcd, hrce) - vres = _res_to_dpi(vrcn, vrcd, vrce) - if hres is not None and vres is not None: - dpi = (hres, vres) - break - - if size is None or mode is None: - msg = "Malformed JP2 header" - raise SyntaxError(msg) - - return size, mode, mimetype, dpi, palette - - -## -# Image plugin for JPEG2000 images. - - -class Jpeg2KImageFile(ImageFile.ImageFile): - format = "JPEG2000" - format_description = "JPEG 2000 (ISO 15444)" - - def _open(self) -> None: - sig = self.fp.read(4) - if sig == b"\xff\x4f\xff\x51": - self.codec = "j2k" - self._size, self._mode = _parse_codestream(self.fp) - else: - sig = sig + self.fp.read(8) - - if sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a": - self.codec = "jp2" - header = _parse_jp2_header(self.fp) - self._size, self._mode, self.custom_mimetype, dpi, self.palette = header - if dpi is not None: - self.info["dpi"] = dpi - if self.fp.read(12).endswith(b"jp2c\xff\x4f\xff\x51"): - self._parse_comment() - else: - msg = "not a JPEG 2000 file" - raise SyntaxError(msg) - - self._reduce = 0 - self.layers = 0 - - fd = -1 - length = -1 - - try: - fd = self.fp.fileno() - length = os.fstat(fd).st_size - except Exception: - fd = -1 - try: - pos = self.fp.tell() - self.fp.seek(0, io.SEEK_END) - length = self.fp.tell() - self.fp.seek(pos) - except Exception: - length = -1 - - self.tile = [ - ( - "jpeg2k", - (0, 0) + self.size, - 0, - (self.codec, self._reduce, self.layers, fd, length), - ) - ] - - def _parse_comment(self) -> None: - hdr = self.fp.read(2) - length = _binary.i16be(hdr) - self.fp.seek(length - 2, os.SEEK_CUR) - - while True: - marker = self.fp.read(2) - if not marker: - break - typ = marker[1] - if typ in (0x90, 0xD9): - # Start of tile or end of codestream - break - hdr = self.fp.read(2) - length = _binary.i16be(hdr) - if typ == 0x64: - # Comment - self.info["comment"] = self.fp.read(length - 2)[2:] - break - else: - self.fp.seek(length - 2, os.SEEK_CUR) - - @property - def reduce(self): - # https://github.com/python-pillow/Pillow/issues/4343 found that the - # new Image 'reduce' method was shadowed by this plugin's 'reduce' - # property. This attempts to allow for both scenarios - return self._reduce or super().reduce - - @reduce.setter - def reduce(self, value): - self._reduce = value - - def load(self): - if self.tile and self._reduce: - power = 1 << self._reduce - adjust = power >> 1 - self._size = ( - int((self.size[0] + adjust) / power), - int((self.size[1] + adjust) / power), - ) - - # Update the reduce and layers settings - t = self.tile[0] - t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4]) - self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] - - return ImageFile.ImageFile.load(self) - - -def _accept(prefix: bytes) -> bool: - return ( - prefix[:4] == b"\xff\x4f\xff\x51" - or prefix[:12] == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" - ) - - -# ------------------------------------------------------------ -# Save support - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - # Get the keyword arguments - info = im.encoderinfo - - if isinstance(filename, str): - filename = filename.encode() - if filename.endswith(b".j2k") or info.get("no_jp2", False): - kind = "j2k" - else: - kind = "jp2" - - offset = info.get("offset", None) - tile_offset = info.get("tile_offset", None) - tile_size = info.get("tile_size", None) - quality_mode = info.get("quality_mode", "rates") - quality_layers = info.get("quality_layers", None) - if quality_layers is not None and not ( - isinstance(quality_layers, (list, tuple)) - and all( - isinstance(quality_layer, (int, float)) for quality_layer in quality_layers - ) - ): - msg = "quality_layers must be a sequence of numbers" - raise ValueError(msg) - - num_resolutions = info.get("num_resolutions", 0) - cblk_size = info.get("codeblock_size", None) - precinct_size = info.get("precinct_size", None) - irreversible = info.get("irreversible", False) - progression = info.get("progression", "LRCP") - cinema_mode = info.get("cinema_mode", "no") - mct = info.get("mct", 0) - signed = info.get("signed", False) - comment = info.get("comment") - if isinstance(comment, str): - comment = comment.encode() - plt = info.get("plt", False) - - fd = -1 - if hasattr(fp, "fileno"): - try: - fd = fp.fileno() - except Exception: - fd = -1 - - im.encoderconfig = ( - offset, - tile_offset, - tile_size, - quality_mode, - quality_layers, - num_resolutions, - cblk_size, - precinct_size, - irreversible, - progression, - cinema_mode, - mct, - signed, - fd, - comment, - plt, - ) - - ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)]) - - -# ------------------------------------------------------------ -# Registry stuff - - -Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) -Image.register_save(Jpeg2KImageFile.format, _save) - -Image.register_extensions( - Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"] -) - -Image.register_mime(Jpeg2KImageFile.format, "image/jp2") diff --git a/venv/Lib/site-packages/PIL/JpegImagePlugin.py b/venv/Lib/site-packages/PIL/JpegImagePlugin.py deleted file mode 100644 index b15bf06..0000000 --- a/venv/Lib/site-packages/PIL/JpegImagePlugin.py +++ /dev/null @@ -1,861 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# JPEG (JFIF) file handling -# -# See "Digital Compression and Coding of Continuous-Tone Still Images, -# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) -# -# History: -# 1995-09-09 fl Created -# 1995-09-13 fl Added full parser -# 1996-03-25 fl Added hack to use the IJG command line utilities -# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug -# 1996-05-28 fl Added draft support, JFIF version (0.1) -# 1996-12-30 fl Added encoder options, added progression property (0.2) -# 1997-08-27 fl Save mode 1 images as BW (0.3) -# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) -# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) -# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) -# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) -# 2003-04-25 fl Added experimental EXIF decoder (0.5) -# 2003-06-06 fl Added experimental EXIF GPSinfo decoder -# 2003-09-13 fl Extract COM markers -# 2009-09-06 fl Added icc_profile support (from Florian Hoech) -# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) -# 2009-03-08 fl Added subsampling support (from Justin Huff). -# -# Copyright (c) 1997-2003 by Secret Labs AB. -# Copyright (c) 1995-1996 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import array -import io -import math -import os -import struct -import subprocess -import sys -import tempfile -import warnings -from typing import IO, Any - -from . import Image, ImageFile -from ._binary import i16be as i16 -from ._binary import i32be as i32 -from ._binary import o8 -from ._binary import o16be as o16 -from .JpegPresets import presets - -# -# Parser - - -def Skip(self: JpegImageFile, marker: int) -> None: - n = i16(self.fp.read(2)) - 2 - ImageFile._safe_read(self.fp, n) - - -def APP(self, marker): - # - # Application marker. Store these in the APP dictionary. - # Also look for well-known application markers. - - n = i16(self.fp.read(2)) - 2 - s = ImageFile._safe_read(self.fp, n) - - app = "APP%d" % (marker & 15) - - self.app[app] = s # compatibility - self.applist.append((app, s)) - - if marker == 0xFFE0 and s[:4] == b"JFIF": - # extract JFIF information - self.info["jfif"] = version = i16(s, 5) # version - self.info["jfif_version"] = divmod(version, 256) - # extract JFIF properties - try: - jfif_unit = s[7] - jfif_density = i16(s, 8), i16(s, 10) - except Exception: - pass - else: - if jfif_unit == 1: - self.info["dpi"] = jfif_density - self.info["jfif_unit"] = jfif_unit - self.info["jfif_density"] = jfif_density - elif marker == 0xFFE1 and s[:6] == b"Exif\0\0": - # extract EXIF information - if "exif" in self.info: - self.info["exif"] += s[6:] - else: - self.info["exif"] = s - self._exif_offset = self.fp.tell() - n + 6 - elif marker == 0xFFE1 and s[:29] == b"http://ns.adobe.com/xap/1.0/\x00": - self.info["xmp"] = s.split(b"\x00", 1)[1] - elif marker == 0xFFE2 and s[:5] == b"FPXR\0": - # extract FlashPix information (incomplete) - self.info["flashpix"] = s # FIXME: value will change - elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": - # Since an ICC profile can be larger than the maximum size of - # a JPEG marker (64K), we need provisions to split it into - # multiple markers. The format defined by the ICC specifies - # one or more APP2 markers containing the following data: - # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) - # Marker sequence number 1, 2, etc (1 byte) - # Number of markers Total of APP2's used (1 byte) - # Profile data (remainder of APP2 data) - # Decoders should use the marker sequence numbers to - # reassemble the profile, rather than assuming that the APP2 - # markers appear in the correct sequence. - self.icclist.append(s) - elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00": - # parse the image resource block - offset = 14 - photoshop = self.info.setdefault("photoshop", {}) - while s[offset : offset + 4] == b"8BIM": - try: - offset += 4 - # resource code - code = i16(s, offset) - offset += 2 - # resource name (usually empty) - name_len = s[offset] - # name = s[offset+1:offset+1+name_len] - offset += 1 + name_len - offset += offset & 1 # align - # resource data block - size = i32(s, offset) - offset += 4 - data = s[offset : offset + size] - if code == 0x03ED: # ResolutionInfo - data = { - "XResolution": i32(data, 0) / 65536, - "DisplayedUnitsX": i16(data, 4), - "YResolution": i32(data, 8) / 65536, - "DisplayedUnitsY": i16(data, 12), - } - photoshop[code] = data - offset += size - offset += offset & 1 # align - except struct.error: - break # insufficient data - - elif marker == 0xFFEE and s[:5] == b"Adobe": - self.info["adobe"] = i16(s, 5) - # extract Adobe custom properties - try: - adobe_transform = s[11] - except IndexError: - pass - else: - self.info["adobe_transform"] = adobe_transform - elif marker == 0xFFE2 and s[:4] == b"MPF\0": - # extract MPO information - self.info["mp"] = s[4:] - # offset is current location minus buffer size - # plus constant header size - self.info["mpoffset"] = self.fp.tell() - n + 4 - - -def COM(self: JpegImageFile, marker: int) -> None: - # - # Comment marker. Store these in the APP dictionary. - n = i16(self.fp.read(2)) - 2 - s = ImageFile._safe_read(self.fp, n) - - self.info["comment"] = s - self.app["COM"] = s # compatibility - self.applist.append(("COM", s)) - - -def SOF(self: JpegImageFile, marker: int) -> None: - # - # Start of frame marker. Defines the size and mode of the - # image. JPEG is colour blind, so we use some simple - # heuristics to map the number of layers to an appropriate - # mode. Note that this could be made a bit brighter, by - # looking for JFIF and Adobe APP markers. - - n = i16(self.fp.read(2)) - 2 - s = ImageFile._safe_read(self.fp, n) - self._size = i16(s, 3), i16(s, 1) - - self.bits = s[0] - if self.bits != 8: - msg = f"cannot handle {self.bits}-bit layers" - raise SyntaxError(msg) - - self.layers = s[5] - if self.layers == 1: - self._mode = "L" - elif self.layers == 3: - self._mode = "RGB" - elif self.layers == 4: - self._mode = "CMYK" - else: - msg = f"cannot handle {self.layers}-layer images" - raise SyntaxError(msg) - - if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: - self.info["progressive"] = self.info["progression"] = 1 - - if self.icclist: - # fixup icc profile - self.icclist.sort() # sort by sequence number - if self.icclist[0][13] == len(self.icclist): - profile = [p[14:] for p in self.icclist] - icc_profile = b"".join(profile) - else: - icc_profile = None # wrong number of fragments - self.info["icc_profile"] = icc_profile - self.icclist = [] - - for i in range(6, len(s), 3): - t = s[i : i + 3] - # 4-tuples: id, vsamp, hsamp, qtable - self.layer.append((t[0], t[1] // 16, t[1] & 15, t[2])) - - -def DQT(self: JpegImageFile, marker: int) -> None: - # - # Define quantization table. Note that there might be more - # than one table in each marker. - - # FIXME: The quantization tables can be used to estimate the - # compression quality. - - n = i16(self.fp.read(2)) - 2 - s = ImageFile._safe_read(self.fp, n) - while len(s): - v = s[0] - precision = 1 if (v // 16 == 0) else 2 # in bytes - qt_length = 1 + precision * 64 - if len(s) < qt_length: - msg = "bad quantization table marker" - raise SyntaxError(msg) - data = array.array("B" if precision == 1 else "H", s[1:qt_length]) - if sys.byteorder == "little" and precision > 1: - data.byteswap() # the values are always big-endian - self.quantization[v & 15] = [data[i] for i in zigzag_index] - s = s[qt_length:] - - -# -# JPEG marker table - -MARKER = { - 0xFFC0: ("SOF0", "Baseline DCT", SOF), - 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), - 0xFFC2: ("SOF2", "Progressive DCT", SOF), - 0xFFC3: ("SOF3", "Spatial lossless", SOF), - 0xFFC4: ("DHT", "Define Huffman table", Skip), - 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), - 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), - 0xFFC7: ("SOF7", "Differential spatial", SOF), - 0xFFC8: ("JPG", "Extension", None), - 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), - 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), - 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), - 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), - 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), - 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), - 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), - 0xFFD0: ("RST0", "Restart 0", None), - 0xFFD1: ("RST1", "Restart 1", None), - 0xFFD2: ("RST2", "Restart 2", None), - 0xFFD3: ("RST3", "Restart 3", None), - 0xFFD4: ("RST4", "Restart 4", None), - 0xFFD5: ("RST5", "Restart 5", None), - 0xFFD6: ("RST6", "Restart 6", None), - 0xFFD7: ("RST7", "Restart 7", None), - 0xFFD8: ("SOI", "Start of image", None), - 0xFFD9: ("EOI", "End of image", None), - 0xFFDA: ("SOS", "Start of scan", Skip), - 0xFFDB: ("DQT", "Define quantization table", DQT), - 0xFFDC: ("DNL", "Define number of lines", Skip), - 0xFFDD: ("DRI", "Define restart interval", Skip), - 0xFFDE: ("DHP", "Define hierarchical progression", SOF), - 0xFFDF: ("EXP", "Expand reference component", Skip), - 0xFFE0: ("APP0", "Application segment 0", APP), - 0xFFE1: ("APP1", "Application segment 1", APP), - 0xFFE2: ("APP2", "Application segment 2", APP), - 0xFFE3: ("APP3", "Application segment 3", APP), - 0xFFE4: ("APP4", "Application segment 4", APP), - 0xFFE5: ("APP5", "Application segment 5", APP), - 0xFFE6: ("APP6", "Application segment 6", APP), - 0xFFE7: ("APP7", "Application segment 7", APP), - 0xFFE8: ("APP8", "Application segment 8", APP), - 0xFFE9: ("APP9", "Application segment 9", APP), - 0xFFEA: ("APP10", "Application segment 10", APP), - 0xFFEB: ("APP11", "Application segment 11", APP), - 0xFFEC: ("APP12", "Application segment 12", APP), - 0xFFED: ("APP13", "Application segment 13", APP), - 0xFFEE: ("APP14", "Application segment 14", APP), - 0xFFEF: ("APP15", "Application segment 15", APP), - 0xFFF0: ("JPG0", "Extension 0", None), - 0xFFF1: ("JPG1", "Extension 1", None), - 0xFFF2: ("JPG2", "Extension 2", None), - 0xFFF3: ("JPG3", "Extension 3", None), - 0xFFF4: ("JPG4", "Extension 4", None), - 0xFFF5: ("JPG5", "Extension 5", None), - 0xFFF6: ("JPG6", "Extension 6", None), - 0xFFF7: ("JPG7", "Extension 7", None), - 0xFFF8: ("JPG8", "Extension 8", None), - 0xFFF9: ("JPG9", "Extension 9", None), - 0xFFFA: ("JPG10", "Extension 10", None), - 0xFFFB: ("JPG11", "Extension 11", None), - 0xFFFC: ("JPG12", "Extension 12", None), - 0xFFFD: ("JPG13", "Extension 13", None), - 0xFFFE: ("COM", "Comment", COM), -} - - -def _accept(prefix: bytes) -> bool: - # Magic number was taken from https://en.wikipedia.org/wiki/JPEG - return prefix[:3] == b"\xFF\xD8\xFF" - - -## -# Image plugin for JPEG and JFIF images. - - -class JpegImageFile(ImageFile.ImageFile): - format = "JPEG" - format_description = "JPEG (ISO 10918)" - - def _open(self): - s = self.fp.read(3) - - if not _accept(s): - msg = "not a JPEG file" - raise SyntaxError(msg) - s = b"\xFF" - - # Create attributes - self.bits = self.layers = 0 - - # JPEG specifics (internal) - self.layer = [] - self.huffman_dc = {} - self.huffman_ac = {} - self.quantization = {} - self.app = {} # compatibility - self.applist = [] - self.icclist = [] - - while True: - i = s[0] - if i == 0xFF: - s = s + self.fp.read(1) - i = i16(s) - else: - # Skip non-0xFF junk - s = self.fp.read(1) - continue - - if i in MARKER: - name, description, handler = MARKER[i] - if handler is not None: - handler(self, i) - if i == 0xFFDA: # start of scan - rawmode = self.mode - if self.mode == "CMYK": - rawmode = "CMYK;I" # assume adobe conventions - self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))] - # self.__offset = self.fp.tell() - break - s = self.fp.read(1) - elif i in {0, 0xFFFF}: - # padded marker or junk; move on - s = b"\xff" - elif i == 0xFF00: # Skip extraneous data (escaped 0xFF) - s = self.fp.read(1) - else: - msg = "no marker found" - raise SyntaxError(msg) - - self._read_dpi_from_exif() - - def load_read(self, read_bytes: int) -> bytes: - """ - internal: read more image data - For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker - so libjpeg can finish decoding - """ - s = self.fp.read(read_bytes) - - if not s and ImageFile.LOAD_TRUNCATED_IMAGES and not hasattr(self, "_ended"): - # Premature EOF. - # Pretend file is finished adding EOI marker - self._ended = True - return b"\xFF\xD9" - - return s - - def draft( - self, mode: str | None, size: tuple[int, int] | None - ) -> tuple[str, tuple[int, int, float, float]] | None: - if len(self.tile) != 1: - return None - - # Protect from second call - if self.decoderconfig: - return None - - d, e, o, a = self.tile[0] - scale = 1 - original_size = self.size - - if a[0] == "RGB" and mode in ["L", "YCbCr"]: - self._mode = mode - a = mode, "" - - if size: - scale = min(self.size[0] // size[0], self.size[1] // size[1]) - for s in [8, 4, 2, 1]: - if scale >= s: - break - e = ( - e[0], - e[1], - (e[2] - e[0] + s - 1) // s + e[0], - (e[3] - e[1] + s - 1) // s + e[1], - ) - self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s) - scale = s - - self.tile = [(d, e, o, a)] - self.decoderconfig = (scale, 0) - - box = (0, 0, original_size[0] / scale, original_size[1] / scale) - return self.mode, box - - def load_djpeg(self) -> None: - # ALTERNATIVE: handle JPEGs via the IJG command line utilities - - f, path = tempfile.mkstemp() - os.close(f) - if os.path.exists(self.filename): - subprocess.check_call(["djpeg", "-outfile", path, self.filename]) - else: - try: - os.unlink(path) - except OSError: - pass - - msg = "Invalid Filename" - raise ValueError(msg) - - try: - with Image.open(path) as _im: - _im.load() - self.im = _im.im - finally: - try: - os.unlink(path) - except OSError: - pass - - self._mode = self.im.mode - self._size = self.im.size - - self.tile = [] - - def _getexif(self) -> dict[str, Any] | None: - return _getexif(self) - - def _read_dpi_from_exif(self) -> None: - # If DPI isn't in JPEG header, fetch from EXIF - if "dpi" in self.info or "exif" not in self.info: - return - try: - exif = self.getexif() - resolution_unit = exif[0x0128] - x_resolution = exif[0x011A] - try: - dpi = float(x_resolution[0]) / x_resolution[1] - except TypeError: - dpi = x_resolution - if math.isnan(dpi): - msg = "DPI is not a number" - raise ValueError(msg) - if resolution_unit == 3: # cm - # 1 dpcm = 2.54 dpi - dpi *= 2.54 - self.info["dpi"] = dpi, dpi - except ( - struct.error, # truncated EXIF - KeyError, # dpi not included - SyntaxError, # invalid/unreadable EXIF - TypeError, # dpi is an invalid float - ValueError, # dpi is an invalid float - ZeroDivisionError, # invalid dpi rational value - ): - self.info["dpi"] = 72, 72 - - def _getmp(self): - return _getmp(self) - - -def _getexif(self) -> dict[str, Any] | None: - if "exif" not in self.info: - return None - return self.getexif()._get_merged_dict() - - -def _getmp(self): - # Extract MP information. This method was inspired by the "highly - # experimental" _getexif version that's been in use for years now, - # itself based on the ImageFileDirectory class in the TIFF plugin. - - # The MP record essentially consists of a TIFF file embedded in a JPEG - # application marker. - try: - data = self.info["mp"] - except KeyError: - return None - file_contents = io.BytesIO(data) - head = file_contents.read(8) - endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<" - # process dictionary - from . import TiffImagePlugin - - try: - info = TiffImagePlugin.ImageFileDirectory_v2(head) - file_contents.seek(info.next) - info.load(file_contents) - mp = dict(info) - except Exception as e: - msg = "malformed MP Index (unreadable directory)" - raise SyntaxError(msg) from e - # it's an error not to have a number of images - try: - quant = mp[0xB001] - except KeyError as e: - msg = "malformed MP Index (no number of images)" - raise SyntaxError(msg) from e - # get MP entries - mpentries = [] - try: - rawmpentries = mp[0xB002] - for entrynum in range(0, quant): - unpackedentry = struct.unpack_from( - f"{endianness}LLLHH", rawmpentries, entrynum * 16 - ) - labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2") - mpentry = dict(zip(labels, unpackedentry)) - mpentryattr = { - "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)), - "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)), - "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)), - "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27, - "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24, - "MPType": mpentry["Attribute"] & 0x00FFFFFF, - } - if mpentryattr["ImageDataFormat"] == 0: - mpentryattr["ImageDataFormat"] = "JPEG" - else: - msg = "unsupported picture format in MPO" - raise SyntaxError(msg) - mptypemap = { - 0x000000: "Undefined", - 0x010001: "Large Thumbnail (VGA Equivalent)", - 0x010002: "Large Thumbnail (Full HD Equivalent)", - 0x020001: "Multi-Frame Image (Panorama)", - 0x020002: "Multi-Frame Image: (Disparity)", - 0x020003: "Multi-Frame Image: (Multi-Angle)", - 0x030000: "Baseline MP Primary Image", - } - mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown") - mpentry["Attribute"] = mpentryattr - mpentries.append(mpentry) - mp[0xB002] = mpentries - except KeyError as e: - msg = "malformed MP Index (bad MP Entry)" - raise SyntaxError(msg) from e - # Next we should try and parse the individual image unique ID list; - # we don't because I've never seen this actually used in a real MPO - # file and so can't test it. - return mp - - -# -------------------------------------------------------------------- -# stuff to save JPEG files - -RAWMODE = { - "1": "L", - "L": "L", - "RGB": "RGB", - "RGBX": "RGB", - "CMYK": "CMYK;I", # assume adobe conventions - "YCbCr": "YCbCr", -} - -# fmt: off -zigzag_index = ( - 0, 1, 5, 6, 14, 15, 27, 28, - 2, 4, 7, 13, 16, 26, 29, 42, - 3, 8, 12, 17, 25, 30, 41, 43, - 9, 11, 18, 24, 31, 40, 44, 53, - 10, 19, 23, 32, 39, 45, 52, 54, - 20, 22, 33, 38, 46, 51, 55, 60, - 21, 34, 37, 47, 50, 56, 59, 61, - 35, 36, 48, 49, 57, 58, 62, 63, -) - -samplings = { - (1, 1, 1, 1, 1, 1): 0, - (2, 1, 1, 1, 1, 1): 1, - (2, 2, 1, 1, 1, 1): 2, -} -# fmt: on - - -def get_sampling(im): - # There's no subsampling when images have only 1 layer - # (grayscale images) or when they are CMYK (4 layers), - # so set subsampling to the default value. - # - # NOTE: currently Pillow can't encode JPEG to YCCK format. - # If YCCK support is added in the future, subsampling code will have - # to be updated (here and in JpegEncode.c) to deal with 4 layers. - if not hasattr(im, "layers") or im.layers in (1, 4): - return -1 - sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] - return samplings.get(sampling, -1) - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.width == 0 or im.height == 0: - msg = "cannot write empty image as JPEG" - raise ValueError(msg) - - try: - rawmode = RAWMODE[im.mode] - except KeyError as e: - msg = f"cannot write mode {im.mode} as JPEG" - raise OSError(msg) from e - - info = im.encoderinfo - - dpi = [round(x) for x in info.get("dpi", (0, 0))] - - quality = info.get("quality", -1) - subsampling = info.get("subsampling", -1) - qtables = info.get("qtables") - - if quality == "keep": - quality = -1 - subsampling = "keep" - qtables = "keep" - elif quality in presets: - preset = presets[quality] - quality = -1 - subsampling = preset.get("subsampling", -1) - qtables = preset.get("quantization") - elif not isinstance(quality, int): - msg = "Invalid quality setting" - raise ValueError(msg) - else: - if subsampling in presets: - subsampling = presets[subsampling].get("subsampling", -1) - if isinstance(qtables, str) and qtables in presets: - qtables = presets[qtables].get("quantization") - - if subsampling == "4:4:4": - subsampling = 0 - elif subsampling == "4:2:2": - subsampling = 1 - elif subsampling == "4:2:0": - subsampling = 2 - elif subsampling == "4:1:1": - # For compatibility. Before Pillow 4.3, 4:1:1 actually meant 4:2:0. - # Set 4:2:0 if someone is still using that value. - subsampling = 2 - elif subsampling == "keep": - if im.format != "JPEG": - msg = "Cannot use 'keep' when original image is not a JPEG" - raise ValueError(msg) - subsampling = get_sampling(im) - - def validate_qtables(qtables): - if qtables is None: - return qtables - if isinstance(qtables, str): - try: - lines = [ - int(num) - for line in qtables.splitlines() - for num in line.split("#", 1)[0].split() - ] - except ValueError as e: - msg = "Invalid quantization table" - raise ValueError(msg) from e - else: - qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)] - if isinstance(qtables, (tuple, list, dict)): - if isinstance(qtables, dict): - qtables = [ - qtables[key] for key in range(len(qtables)) if key in qtables - ] - elif isinstance(qtables, tuple): - qtables = list(qtables) - if not (0 < len(qtables) < 5): - msg = "None or too many quantization tables" - raise ValueError(msg) - for idx, table in enumerate(qtables): - try: - if len(table) != 64: - msg = "Invalid quantization table" - raise TypeError(msg) - table = array.array("H", table) - except TypeError as e: - msg = "Invalid quantization table" - raise ValueError(msg) from e - else: - qtables[idx] = list(table) - return qtables - - if qtables == "keep": - if im.format != "JPEG": - msg = "Cannot use 'keep' when original image is not a JPEG" - raise ValueError(msg) - qtables = getattr(im, "quantization", None) - qtables = validate_qtables(qtables) - - extra = info.get("extra", b"") - - MAX_BYTES_IN_MARKER = 65533 - icc_profile = info.get("icc_profile") - if icc_profile: - ICC_OVERHEAD_LEN = 14 - MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN - markers = [] - while icc_profile: - markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) - icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] - i = 1 - for marker in markers: - size = o16(2 + ICC_OVERHEAD_LEN + len(marker)) - extra += ( - b"\xFF\xE2" - + size - + b"ICC_PROFILE\0" - + o8(i) - + o8(len(markers)) - + marker - ) - i += 1 - - comment = info.get("comment", im.info.get("comment")) - - # "progressive" is the official name, but older documentation - # says "progression" - # FIXME: issue a warning if the wrong form is used (post-1.1.7) - progressive = info.get("progressive", False) or info.get("progression", False) - - optimize = info.get("optimize", False) - - exif = info.get("exif", b"") - if isinstance(exif, Image.Exif): - exif = exif.tobytes() - if len(exif) > MAX_BYTES_IN_MARKER: - msg = "EXIF data is too long" - raise ValueError(msg) - - # get keyword arguments - im.encoderconfig = ( - quality, - progressive, - info.get("smooth", 0), - optimize, - info.get("keep_rgb", False), - info.get("streamtype", 0), - dpi[0], - dpi[1], - subsampling, - info.get("restart_marker_blocks", 0), - info.get("restart_marker_rows", 0), - qtables, - comment, - extra, - exif, - ) - - # if we optimize, libjpeg needs a buffer big enough to hold the whole image - # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is - # channels*size, this is a value that's been used in a django patch. - # https://github.com/matthewwithanm/django-imagekit/issues/50 - bufsize = 0 - if optimize or progressive: - # CMYK can be bigger - if im.mode == "CMYK": - bufsize = 4 * im.size[0] * im.size[1] - # keep sets quality to -1, but the actual value may be high. - elif quality >= 95 or quality == -1: - bufsize = 2 * im.size[0] * im.size[1] - else: - bufsize = im.size[0] * im.size[1] - if exif: - bufsize += len(exif) + 5 - if extra: - bufsize += len(extra) + 1 - else: - # The EXIF info needs to be written as one block, + APP1, + one spare byte. - # Ensure that our buffer is big enough. Same with the icc_profile block. - bufsize = max(bufsize, len(exif) + 5, len(extra) + 1) - - ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize) - - -def _save_cjpeg(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - # ALTERNATIVE: handle JPEGs via the IJG command line utilities. - tempfile = im._dump() - subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) - try: - os.unlink(tempfile) - except OSError: - pass - - -## -# Factory for making JPEG and MPO instances -def jpeg_factory(fp=None, filename=None): - im = JpegImageFile(fp, filename) - try: - mpheader = im._getmp() - if mpheader[45057] > 1: - for segment, content in im.applist: - if segment == "APP1" and b' hdrgm:Version="' in content: - # Ultra HDR images are not yet supported - return im - # It's actually an MPO - from .MpoImagePlugin import MpoImageFile - - # Don't reload everything, just convert it. - im = MpoImageFile.adopt(im, mpheader) - except (TypeError, IndexError): - # It is really a JPEG - pass - except SyntaxError: - warnings.warn( - "Image appears to be a malformed MPO file, it will be " - "interpreted as a base JPEG file" - ) - return im - - -# --------------------------------------------------------------------- -# Registry stuff - -Image.register_open(JpegImageFile.format, jpeg_factory, _accept) -Image.register_save(JpegImageFile.format, _save) - -Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"]) - -Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/venv/Lib/site-packages/PIL/JpegPresets.py b/venv/Lib/site-packages/PIL/JpegPresets.py deleted file mode 100644 index 3aefa07..0000000 --- a/venv/Lib/site-packages/PIL/JpegPresets.py +++ /dev/null @@ -1,242 +0,0 @@ -""" -JPEG quality settings equivalent to the Photoshop settings. -Can be used when saving JPEG files. - -The following presets are available by default: -``web_low``, ``web_medium``, ``web_high``, ``web_very_high``, ``web_maximum``, -``low``, ``medium``, ``high``, ``maximum``. -More presets can be added to the :py:data:`presets` dict if needed. - -To apply the preset, specify:: - - quality="preset_name" - -To apply only the quantization table:: - - qtables="preset_name" - -To apply only the subsampling setting:: - - subsampling="preset_name" - -Example:: - - im.save("image_name.jpg", quality="web_high") - -Subsampling ------------ - -Subsampling is the practice of encoding images by implementing less resolution -for chroma information than for luma information. -(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) - -Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and -4:2:0. - -You can get the subsampling of a JPEG with the -:func:`.JpegImagePlugin.get_sampling` function. - -In JPEG compressed data a JPEG marker is used instead of an EXIF tag. -(ref.: https://web.archive.org/web/20240227115053/https://exiv2.org/tags.html) - - -Quantization tables -------------------- - -They are values use by the DCT (Discrete cosine transform) to remove -*unnecessary* information from the image (the lossy part of the compression). -(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, -https://en.wikipedia.org/wiki/JPEG#Quantization) - -You can get the quantization tables of a JPEG with:: - - im.quantization - -This will return a dict with a number of lists. You can pass this dict -directly as the qtables argument when saving a JPEG. - -The quantization table format in presets is a list with sublists. These formats -are interchangeable. - -Libjpeg ref.: -https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html - -""" - -from __future__ import annotations - -# fmt: off -presets = { - 'web_low': {'subsampling': 2, # "4:2:0" - 'quantization': [ - [20, 16, 25, 39, 50, 46, 62, 68, - 16, 18, 23, 38, 38, 53, 65, 68, - 25, 23, 31, 38, 53, 65, 68, 68, - 39, 38, 38, 53, 65, 68, 68, 68, - 50, 38, 53, 65, 68, 68, 68, 68, - 46, 53, 65, 68, 68, 68, 68, 68, - 62, 65, 68, 68, 68, 68, 68, 68, - 68, 68, 68, 68, 68, 68, 68, 68], - [21, 25, 32, 38, 54, 68, 68, 68, - 25, 28, 24, 38, 54, 68, 68, 68, - 32, 24, 32, 43, 66, 68, 68, 68, - 38, 38, 43, 53, 68, 68, 68, 68, - 54, 54, 66, 68, 68, 68, 68, 68, - 68, 68, 68, 68, 68, 68, 68, 68, - 68, 68, 68, 68, 68, 68, 68, 68, - 68, 68, 68, 68, 68, 68, 68, 68] - ]}, - 'web_medium': {'subsampling': 2, # "4:2:0" - 'quantization': [ - [16, 11, 11, 16, 23, 27, 31, 30, - 11, 12, 12, 15, 20, 23, 23, 30, - 11, 12, 13, 16, 23, 26, 35, 47, - 16, 15, 16, 23, 26, 37, 47, 64, - 23, 20, 23, 26, 39, 51, 64, 64, - 27, 23, 26, 37, 51, 64, 64, 64, - 31, 23, 35, 47, 64, 64, 64, 64, - 30, 30, 47, 64, 64, 64, 64, 64], - [17, 15, 17, 21, 20, 26, 38, 48, - 15, 19, 18, 17, 20, 26, 35, 43, - 17, 18, 20, 22, 26, 30, 46, 53, - 21, 17, 22, 28, 30, 39, 53, 64, - 20, 20, 26, 30, 39, 48, 64, 64, - 26, 26, 30, 39, 48, 63, 64, 64, - 38, 35, 46, 53, 64, 64, 64, 64, - 48, 43, 53, 64, 64, 64, 64, 64] - ]}, - 'web_high': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [6, 4, 4, 6, 9, 11, 12, 16, - 4, 5, 5, 6, 8, 10, 12, 12, - 4, 5, 5, 6, 10, 12, 14, 19, - 6, 6, 6, 11, 12, 15, 19, 28, - 9, 8, 10, 12, 16, 20, 27, 31, - 11, 10, 12, 15, 20, 27, 31, 31, - 12, 12, 14, 19, 27, 31, 31, 31, - 16, 12, 19, 28, 31, 31, 31, 31], - [7, 7, 13, 24, 26, 31, 31, 31, - 7, 12, 16, 21, 31, 31, 31, 31, - 13, 16, 17, 31, 31, 31, 31, 31, - 24, 21, 31, 31, 31, 31, 31, 31, - 26, 31, 31, 31, 31, 31, 31, 31, - 31, 31, 31, 31, 31, 31, 31, 31, - 31, 31, 31, 31, 31, 31, 31, 31, - 31, 31, 31, 31, 31, 31, 31, 31] - ]}, - 'web_very_high': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [2, 2, 2, 2, 3, 4, 5, 6, - 2, 2, 2, 2, 3, 4, 5, 6, - 2, 2, 2, 2, 4, 5, 7, 9, - 2, 2, 2, 4, 5, 7, 9, 12, - 3, 3, 4, 5, 8, 10, 12, 12, - 4, 4, 5, 7, 10, 12, 12, 12, - 5, 5, 7, 9, 12, 12, 12, 12, - 6, 6, 9, 12, 12, 12, 12, 12], - [3, 3, 5, 9, 13, 15, 15, 15, - 3, 4, 6, 11, 14, 12, 12, 12, - 5, 6, 9, 14, 12, 12, 12, 12, - 9, 11, 14, 12, 12, 12, 12, 12, - 13, 14, 12, 12, 12, 12, 12, 12, - 15, 12, 12, 12, 12, 12, 12, 12, - 15, 12, 12, 12, 12, 12, 12, 12, - 15, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'web_maximum': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 2, - 1, 1, 1, 1, 1, 1, 2, 2, - 1, 1, 1, 1, 1, 2, 2, 3, - 1, 1, 1, 1, 2, 2, 3, 3, - 1, 1, 1, 2, 2, 3, 3, 3, - 1, 1, 2, 2, 3, 3, 3, 3], - [1, 1, 1, 2, 2, 3, 3, 3, - 1, 1, 1, 2, 3, 3, 3, 3, - 1, 1, 1, 3, 3, 3, 3, 3, - 2, 2, 3, 3, 3, 3, 3, 3, - 2, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3] - ]}, - 'low': {'subsampling': 2, # "4:2:0" - 'quantization': [ - [18, 14, 14, 21, 30, 35, 34, 17, - 14, 16, 16, 19, 26, 23, 12, 12, - 14, 16, 17, 21, 23, 12, 12, 12, - 21, 19, 21, 23, 12, 12, 12, 12, - 30, 26, 23, 12, 12, 12, 12, 12, - 35, 23, 12, 12, 12, 12, 12, 12, - 34, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12], - [20, 19, 22, 27, 20, 20, 17, 17, - 19, 25, 23, 14, 14, 12, 12, 12, - 22, 23, 14, 14, 12, 12, 12, 12, - 27, 14, 14, 12, 12, 12, 12, 12, - 20, 14, 12, 12, 12, 12, 12, 12, - 20, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'medium': {'subsampling': 2, # "4:2:0" - 'quantization': [ - [12, 8, 8, 12, 17, 21, 24, 17, - 8, 9, 9, 11, 15, 19, 12, 12, - 8, 9, 10, 12, 19, 12, 12, 12, - 12, 11, 12, 21, 12, 12, 12, 12, - 17, 15, 19, 12, 12, 12, 12, 12, - 21, 19, 12, 12, 12, 12, 12, 12, - 24, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12], - [13, 11, 13, 16, 20, 20, 17, 17, - 11, 14, 14, 14, 14, 12, 12, 12, - 13, 14, 14, 14, 12, 12, 12, 12, - 16, 14, 14, 12, 12, 12, 12, 12, - 20, 14, 12, 12, 12, 12, 12, 12, - 20, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'high': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [6, 4, 4, 6, 9, 11, 12, 16, - 4, 5, 5, 6, 8, 10, 12, 12, - 4, 5, 5, 6, 10, 12, 12, 12, - 6, 6, 6, 11, 12, 12, 12, 12, - 9, 8, 10, 12, 12, 12, 12, 12, - 11, 10, 12, 12, 12, 12, 12, 12, - 12, 12, 12, 12, 12, 12, 12, 12, - 16, 12, 12, 12, 12, 12, 12, 12], - [7, 7, 13, 24, 20, 20, 17, 17, - 7, 12, 16, 14, 14, 12, 12, 12, - 13, 16, 14, 14, 12, 12, 12, 12, - 24, 14, 14, 12, 12, 12, 12, 12, - 20, 14, 12, 12, 12, 12, 12, 12, - 20, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12, - 17, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'maximum': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [2, 2, 2, 2, 3, 4, 5, 6, - 2, 2, 2, 2, 3, 4, 5, 6, - 2, 2, 2, 2, 4, 5, 7, 9, - 2, 2, 2, 4, 5, 7, 9, 12, - 3, 3, 4, 5, 8, 10, 12, 12, - 4, 4, 5, 7, 10, 12, 12, 12, - 5, 5, 7, 9, 12, 12, 12, 12, - 6, 6, 9, 12, 12, 12, 12, 12], - [3, 3, 5, 9, 13, 15, 15, 15, - 3, 4, 6, 10, 14, 12, 12, 12, - 5, 6, 9, 14, 12, 12, 12, 12, - 9, 10, 14, 12, 12, 12, 12, 12, - 13, 14, 12, 12, 12, 12, 12, 12, - 15, 12, 12, 12, 12, 12, 12, 12, - 15, 12, 12, 12, 12, 12, 12, 12, - 15, 12, 12, 12, 12, 12, 12, 12] - ]}, -} -# fmt: on diff --git a/venv/Lib/site-packages/PIL/McIdasImagePlugin.py b/venv/Lib/site-packages/PIL/McIdasImagePlugin.py deleted file mode 100644 index 2797223..0000000 --- a/venv/Lib/site-packages/PIL/McIdasImagePlugin.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# Basic McIdas support for PIL -# -# History: -# 1997-05-05 fl Created (8-bit images only) -# 2009-03-08 fl Added 16/32-bit support. -# -# Thanks to Richard Jones and Craig Swank for specs and samples. -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1997. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import struct - -from . import Image, ImageFile - - -def _accept(prefix: bytes) -> bool: - return prefix[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" - - -## -# Image plugin for McIdas area images. - - -class McIdasImageFile(ImageFile.ImageFile): - format = "MCIDAS" - format_description = "McIdas area file" - - def _open(self) -> None: - # parse area file directory - assert self.fp is not None - - s = self.fp.read(256) - if not _accept(s) or len(s) != 256: - msg = "not an McIdas area file" - raise SyntaxError(msg) - - self.area_descriptor_raw = s - self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) - - # get mode - if w[11] == 1: - mode = rawmode = "L" - elif w[11] == 2: - # FIXME: add memory map support - mode = "I" - rawmode = "I;16B" - elif w[11] == 4: - # FIXME: add memory map support - mode = "I" - rawmode = "I;32B" - else: - msg = "unsupported McIdas format" - raise SyntaxError(msg) - - self._mode = mode - self._size = w[10], w[9] - - offset = w[34] + w[15] - stride = w[15] + w[10] * w[11] * w[14] - - self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] - - -# -------------------------------------------------------------------- -# registry - -Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) - -# no default extension diff --git a/venv/Lib/site-packages/PIL/MicImagePlugin.py b/venv/Lib/site-packages/PIL/MicImagePlugin.py deleted file mode 100644 index 0723988..0000000 --- a/venv/Lib/site-packages/PIL/MicImagePlugin.py +++ /dev/null @@ -1,107 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# Microsoft Image Composer support for PIL -# -# Notes: -# uses TiffImagePlugin.py to read the actual image streams -# -# History: -# 97-01-20 fl Created -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1997. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import olefile - -from . import Image, TiffImagePlugin - -# -# -------------------------------------------------------------------- - - -def _accept(prefix: bytes) -> bool: - return prefix[:8] == olefile.MAGIC - - -## -# Image plugin for Microsoft's Image Composer file format. - - -class MicImageFile(TiffImagePlugin.TiffImageFile): - format = "MIC" - format_description = "Microsoft Image Composer" - _close_exclusive_fp_after_loading = False - - def _open(self) -> None: - # read the OLE directory and see if this is a likely - # to be a Microsoft Image Composer file - - try: - self.ole = olefile.OleFileIO(self.fp) - except OSError as e: - msg = "not an MIC file; invalid OLE file" - raise SyntaxError(msg) from e - - # find ACI subfiles with Image members (maybe not the - # best way to identify MIC files, but what the... ;-) - - self.images = [ - path - for path in self.ole.listdir() - if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image" - ] - - # if we didn't find any images, this is probably not - # an MIC file. - if not self.images: - msg = "not an MIC file; no image entries" - raise SyntaxError(msg) - - self.frame = -1 - self._n_frames = len(self.images) - self.is_animated = self._n_frames > 1 - - self.__fp = self.fp - self.seek(0) - - def seek(self, frame): - if not self._seek_check(frame): - return - try: - filename = self.images[frame] - except IndexError as e: - msg = "no such frame" - raise EOFError(msg) from e - - self.fp = self.ole.openstream(filename) - - TiffImagePlugin.TiffImageFile._open(self) - - self.frame = frame - - def tell(self) -> int: - return self.frame - - def close(self) -> None: - self.__fp.close() - self.ole.close() - super().close() - - def __exit__(self, *args: object) -> None: - self.__fp.close() - self.ole.close() - super().__exit__() - - -# -# -------------------------------------------------------------------- - -Image.register_open(MicImageFile.format, MicImageFile, _accept) - -Image.register_extension(MicImageFile.format, ".mic") diff --git a/venv/Lib/site-packages/PIL/MpegImagePlugin.py b/venv/Lib/site-packages/PIL/MpegImagePlugin.py deleted file mode 100644 index ad4d3e9..0000000 --- a/venv/Lib/site-packages/PIL/MpegImagePlugin.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# MPEG file handling -# -# History: -# 95-09-09 fl Created -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1995. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image, ImageFile -from ._binary import i8 -from ._typing import SupportsRead - -# -# Bitstream parser - - -class BitStream: - def __init__(self, fp: SupportsRead[bytes]) -> None: - self.fp = fp - self.bits = 0 - self.bitbuffer = 0 - - def next(self) -> int: - return i8(self.fp.read(1)) - - def peek(self, bits: int) -> int: - while self.bits < bits: - c = self.next() - if c < 0: - self.bits = 0 - continue - self.bitbuffer = (self.bitbuffer << 8) + c - self.bits += 8 - return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 - - def skip(self, bits: int) -> None: - while self.bits < bits: - self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) - self.bits += 8 - self.bits = self.bits - bits - - def read(self, bits: int) -> int: - v = self.peek(bits) - self.bits = self.bits - bits - return v - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"\x00\x00\x01\xb3" - - -## -# Image plugin for MPEG streams. This plugin can identify a stream, -# but it cannot read it. - - -class MpegImageFile(ImageFile.ImageFile): - format = "MPEG" - format_description = "MPEG" - - def _open(self) -> None: - assert self.fp is not None - - s = BitStream(self.fp) - if s.read(32) != 0x1B3: - msg = "not an MPEG file" - raise SyntaxError(msg) - - self._mode = "RGB" - self._size = s.read(12), s.read(12) - - -# -------------------------------------------------------------------- -# Registry stuff - -Image.register_open(MpegImageFile.format, MpegImageFile, _accept) - -Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"]) - -Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/venv/Lib/site-packages/PIL/MpoImagePlugin.py b/venv/Lib/site-packages/PIL/MpoImagePlugin.py deleted file mode 100644 index f215706..0000000 --- a/venv/Lib/site-packages/PIL/MpoImagePlugin.py +++ /dev/null @@ -1,180 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# MPO file handling -# -# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the -# Camera & Imaging Products Association) -# -# The multi-picture object combines multiple JPEG images (with a modified EXIF -# data format) into a single file. While it can theoretically be used much like -# a GIF animation, it is commonly used to represent 3D photographs and is (as -# of this writing) the most commonly used format by 3D cameras. -# -# History: -# 2014-03-13 Feneric Created -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import itertools -import os -import struct -from typing import IO - -from . import ( - Image, - ImageSequence, - JpegImagePlugin, - TiffImagePlugin, -) -from ._binary import o32le - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - JpegImagePlugin._save(im, fp, filename) - - -def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - append_images = im.encoderinfo.get("append_images", []) - if not append_images and not getattr(im, "is_animated", False): - _save(im, fp, filename) - return - - mpf_offset = 28 - offsets: list[int] = [] - for imSequence in itertools.chain([im], append_images): - for im_frame in ImageSequence.Iterator(imSequence): - if not offsets: - # APP2 marker - im_frame.encoderinfo["extra"] = ( - b"\xFF\xE2" + struct.pack(">H", 6 + 82) + b"MPF\0" + b" " * 82 - ) - exif = im_frame.encoderinfo.get("exif") - if isinstance(exif, Image.Exif): - exif = exif.tobytes() - im_frame.encoderinfo["exif"] = exif - if exif: - mpf_offset += 4 + len(exif) - - JpegImagePlugin._save(im_frame, fp, filename) - offsets.append(fp.tell()) - else: - im_frame.save(fp, "JPEG") - offsets.append(fp.tell() - offsets[-1]) - - ifd = TiffImagePlugin.ImageFileDirectory_v2() - ifd[0xB000] = b"0100" - ifd[0xB001] = len(offsets) - - mpentries = b"" - data_offset = 0 - for i, size in enumerate(offsets): - if i == 0: - mptype = 0x030000 # Baseline MP Primary Image - else: - mptype = 0x000000 # Undefined - mpentries += struct.pack(" None: - self.fp.seek(0) # prep the fp in order to pass the JPEG test - JpegImagePlugin.JpegImageFile._open(self) - self._after_jpeg_open() - - def _after_jpeg_open(self, mpheader=None): - self.mpinfo = mpheader if mpheader is not None else self._getmp() - self.n_frames = self.mpinfo[0xB001] - self.__mpoffsets = [ - mpent["DataOffset"] + self.info["mpoffset"] for mpent in self.mpinfo[0xB002] - ] - self.__mpoffsets[0] = 0 - # Note that the following assertion will only be invalid if something - # gets broken within JpegImagePlugin. - assert self.n_frames == len(self.__mpoffsets) - del self.info["mpoffset"] # no longer needed - self.is_animated = self.n_frames > 1 - self._fp = self.fp # FIXME: hack - self._fp.seek(self.__mpoffsets[0]) # get ready to read first frame - self.__frame = 0 - self.offset = 0 - # for now we can only handle reading and individual frame extraction - self.readonly = 1 - - def load_seek(self, pos: int) -> None: - self._fp.seek(pos) - - def seek(self, frame: int) -> None: - if not self._seek_check(frame): - return - self.fp = self._fp - self.offset = self.__mpoffsets[frame] - - original_exif = self.info.get("exif") - if "exif" in self.info: - del self.info["exif"] - - self.fp.seek(self.offset + 2) # skip SOI marker - if not self.fp.read(2): - msg = "No data found for frame" - raise ValueError(msg) - self.fp.seek(self.offset) - JpegImagePlugin.JpegImageFile._open(self) - if self.info.get("exif") != original_exif: - self._reload_exif() - - self.tile = [("jpeg", (0, 0) + self.size, self.offset, self.tile[0][-1])] - self.__frame = frame - - def tell(self) -> int: - return self.__frame - - @staticmethod - def adopt(jpeg_instance, mpheader=None): - """ - Transform the instance of JpegImageFile into - an instance of MpoImageFile. - After the call, the JpegImageFile is extended - to be an MpoImageFile. - - This is essentially useful when opening a JPEG - file that reveals itself as an MPO, to avoid - double call to _open. - """ - jpeg_instance.__class__ = MpoImageFile - jpeg_instance._after_jpeg_open(mpheader) - return jpeg_instance - - -# --------------------------------------------------------------------- -# Registry stuff - -# Note that since MPO shares a factory with JPEG, we do not need to do a -# separate registration for it here. -# Image.register_open(MpoImageFile.format, -# JpegImagePlugin.jpeg_factory, _accept) -Image.register_save(MpoImageFile.format, _save) -Image.register_save_all(MpoImageFile.format, _save_all) - -Image.register_extension(MpoImageFile.format, ".mpo") - -Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/venv/Lib/site-packages/PIL/MspImagePlugin.py b/venv/Lib/site-packages/PIL/MspImagePlugin.py deleted file mode 100644 index 0a75c86..0000000 --- a/venv/Lib/site-packages/PIL/MspImagePlugin.py +++ /dev/null @@ -1,200 +0,0 @@ -# -# The Python Imaging Library. -# -# MSP file handling -# -# This is the format used by the Paint program in Windows 1 and 2. -# -# History: -# 95-09-05 fl Created -# 97-01-03 fl Read/write MSP images -# 17-02-21 es Fixed RLE interpretation -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1995-97. -# Copyright (c) Eric Soroos 2017. -# -# See the README file for information on usage and redistribution. -# -# More info on this format: https://archive.org/details/gg243631 -# Page 313: -# Figure 205. Windows Paint Version 1: "DanM" Format -# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03 -# -# See also: https://www.fileformat.info/format/mspaint/egff.htm -from __future__ import annotations - -import io -import struct -from typing import IO - -from . import Image, ImageFile -from ._binary import i16le as i16 -from ._binary import o16le as o16 - -# -# read MSP files - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] in [b"DanM", b"LinS"] - - -## -# Image plugin for Windows MSP images. This plugin supports both -# uncompressed (Windows 1.0). - - -class MspImageFile(ImageFile.ImageFile): - format = "MSP" - format_description = "Windows Paint" - - def _open(self) -> None: - # Header - assert self.fp is not None - - s = self.fp.read(32) - if not _accept(s): - msg = "not an MSP file" - raise SyntaxError(msg) - - # Header checksum - checksum = 0 - for i in range(0, 32, 2): - checksum = checksum ^ i16(s, i) - if checksum != 0: - msg = "bad MSP checksum" - raise SyntaxError(msg) - - self._mode = "1" - self._size = i16(s, 4), i16(s, 6) - - if s[:4] == b"DanM": - self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))] - else: - self.tile = [("MSP", (0, 0) + self.size, 32, None)] - - -class MspDecoder(ImageFile.PyDecoder): - # The algo for the MSP decoder is from - # https://www.fileformat.info/format/mspaint/egff.htm - # cc-by-attribution -- That page references is taken from the - # Encyclopedia of Graphics File Formats and is licensed by - # O'Reilly under the Creative Common/Attribution license - # - # For RLE encoded files, the 32byte header is followed by a scan - # line map, encoded as one 16bit word of encoded byte length per - # line. - # - # NOTE: the encoded length of the line can be 0. This was not - # handled in the previous version of this encoder, and there's no - # mention of how to handle it in the documentation. From the few - # examples I've seen, I've assumed that it is a fill of the - # background color, in this case, white. - # - # - # Pseudocode of the decoder: - # Read a BYTE value as the RunType - # If the RunType value is zero - # Read next byte as the RunCount - # Read the next byte as the RunValue - # Write the RunValue byte RunCount times - # If the RunType value is non-zero - # Use this value as the RunCount - # Read and write the next RunCount bytes literally - # - # e.g.: - # 0x00 03 ff 05 00 01 02 03 04 - # would yield the bytes: - # 0xff ff ff 00 01 02 03 04 - # - # which are then interpreted as a bit packed mode '1' image - - _pulls_fd = True - - def decode(self, buffer: bytes) -> tuple[int, int]: - assert self.fd is not None - - img = io.BytesIO() - blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8)) - try: - self.fd.seek(32) - rowmap = struct.unpack_from( - f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2) - ) - except struct.error as e: - msg = "Truncated MSP file in row map" - raise OSError(msg) from e - - for x, rowlen in enumerate(rowmap): - try: - if rowlen == 0: - img.write(blank_line) - continue - row = self.fd.read(rowlen) - if len(row) != rowlen: - msg = f"Truncated MSP file, expected {rowlen} bytes on row {x}" - raise OSError(msg) - idx = 0 - while idx < rowlen: - runtype = row[idx] - idx += 1 - if runtype == 0: - (runcount, runval) = struct.unpack_from("Bc", row, idx) - img.write(runval * runcount) - idx += 2 - else: - runcount = runtype - img.write(row[idx : idx + runcount]) - idx += runcount - - except struct.error as e: - msg = f"Corrupted MSP file in row {x}" - raise OSError(msg) from e - - self.set_as_raw(img.getvalue(), ("1", 0, 1)) - - return -1, 0 - - -Image.register_decoder("MSP", MspDecoder) - - -# -# write MSP files (uncompressed only) - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.mode != "1": - msg = f"cannot write mode {im.mode} as MSP" - raise OSError(msg) - - # create MSP header - header = [0] * 16 - - header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 - header[2], header[3] = im.size - header[4], header[5] = 1, 1 - header[6], header[7] = 1, 1 - header[8], header[9] = im.size - - checksum = 0 - for h in header: - checksum = checksum ^ h - header[12] = checksum # FIXME: is this the right field? - - # header - for h in header: - fp.write(o16(h)) - - # image body - ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))]) - - -# -# registry - -Image.register_open(MspImageFile.format, MspImageFile, _accept) -Image.register_save(MspImageFile.format, _save) - -Image.register_extension(MspImageFile.format, ".msp") diff --git a/venv/Lib/site-packages/PIL/PSDraw.py b/venv/Lib/site-packages/PIL/PSDraw.py deleted file mode 100644 index 673eae1..0000000 --- a/venv/Lib/site-packages/PIL/PSDraw.py +++ /dev/null @@ -1,237 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# Simple PostScript graphics interface -# -# History: -# 1996-04-20 fl Created -# 1999-01-10 fl Added gsave/grestore to image method -# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) -# -# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. -# Copyright (c) 1996 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import sys -from typing import TYPE_CHECKING - -from . import EpsImagePlugin - -## -# Simple PostScript graphics interface. - - -class PSDraw: - """ - Sets up printing to the given file. If ``fp`` is omitted, - ``sys.stdout.buffer`` or ``sys.stdout`` is assumed. - """ - - def __init__(self, fp=None): - if not fp: - try: - fp = sys.stdout.buffer - except AttributeError: - fp = sys.stdout - self.fp = fp - - def begin_document(self, id: str | None = None) -> None: - """Set up printing of a document. (Write PostScript DSC header.)""" - # FIXME: incomplete - self.fp.write( - b"%!PS-Adobe-3.0\n" - b"save\n" - b"/showpage { } def\n" - b"%%EndComments\n" - b"%%BeginDocument\n" - ) - # self.fp.write(ERROR_PS) # debugging! - self.fp.write(EDROFF_PS) - self.fp.write(VDI_PS) - self.fp.write(b"%%EndProlog\n") - self.isofont: dict[bytes, int] = {} - - def end_document(self) -> None: - """Ends printing. (Write PostScript DSC footer.)""" - self.fp.write(b"%%EndDocument\nrestore showpage\n%%End\n") - if hasattr(self.fp, "flush"): - self.fp.flush() - - def setfont(self, font: str, size: int) -> None: - """ - Selects which font to use. - - :param font: A PostScript font name - :param size: Size in points. - """ - font_bytes = bytes(font, "UTF-8") - if font_bytes not in self.isofont: - # reencode font - self.fp.write( - b"/PSDraw-%s ISOLatin1Encoding /%s E\n" % (font_bytes, font_bytes) - ) - self.isofont[font_bytes] = 1 - # rough - self.fp.write(b"/F0 %d /PSDraw-%s F\n" % (size, font_bytes)) - - def line(self, xy0: tuple[int, int], xy1: tuple[int, int]) -> None: - """ - Draws a line between the two points. Coordinates are given in - PostScript point coordinates (72 points per inch, (0, 0) is the lower - left corner of the page). - """ - self.fp.write(b"%d %d %d %d Vl\n" % (*xy0, *xy1)) - - def rectangle(self, box: tuple[int, int, int, int]) -> None: - """ - Draws a rectangle. - - :param box: A tuple of four integers, specifying left, bottom, width and - height. - """ - self.fp.write(b"%d %d M 0 %d %d Vr\n" % box) - - def text(self, xy: tuple[int, int], text: str) -> None: - """ - Draws text at the given position. You must use - :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. - """ - text_bytes = bytes(text, "UTF-8") - text_bytes = b"\\(".join(text_bytes.split(b"(")) - text_bytes = b"\\)".join(text_bytes.split(b")")) - self.fp.write(b"%d %d M (%s) S\n" % (xy + (text_bytes,))) - - if TYPE_CHECKING: - from . import Image - - def image( - self, box: tuple[int, int, int, int], im: Image.Image, dpi: int | None = None - ) -> None: - """Draw a PIL image, centered in the given box.""" - # default resolution depends on mode - if not dpi: - if im.mode == "1": - dpi = 200 # fax - else: - dpi = 100 # grayscale - # image size (on paper) - x = im.size[0] * 72 / dpi - y = im.size[1] * 72 / dpi - # max allowed size - xmax = float(box[2] - box[0]) - ymax = float(box[3] - box[1]) - if x > xmax: - y = y * xmax / x - x = xmax - if y > ymax: - x = x * ymax / y - y = ymax - dx = (xmax - x) / 2 + box[0] - dy = (ymax - y) / 2 + box[1] - self.fp.write(b"gsave\n%f %f translate\n" % (dx, dy)) - if (x, y) != im.size: - # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) - sx = x / im.size[0] - sy = y / im.size[1] - self.fp.write(b"%f %f scale\n" % (sx, sy)) - EpsImagePlugin._save(im, self.fp, "", 0) - self.fp.write(b"\ngrestore\n") - - -# -------------------------------------------------------------------- -# PostScript driver - -# -# EDROFF.PS -- PostScript driver for Edroff 2 -# -# History: -# 94-01-25 fl: created (edroff 2.04) -# -# Copyright (c) Fredrik Lundh 1994. -# - - -EDROFF_PS = b"""\ -/S { show } bind def -/P { moveto show } bind def -/M { moveto } bind def -/X { 0 rmoveto } bind def -/Y { 0 exch rmoveto } bind def -/E { findfont - dup maxlength dict begin - { - 1 index /FID ne { def } { pop pop } ifelse - } forall - /Encoding exch def - dup /FontName exch def - currentdict end definefont pop -} bind def -/F { findfont exch scalefont dup setfont - [ exch /setfont cvx ] cvx bind def -} bind def -""" - -# -# VDI.PS -- PostScript driver for VDI meta commands -# -# History: -# 94-01-25 fl: created (edroff 2.04) -# -# Copyright (c) Fredrik Lundh 1994. -# - -VDI_PS = b"""\ -/Vm { moveto } bind def -/Va { newpath arcn stroke } bind def -/Vl { moveto lineto stroke } bind def -/Vc { newpath 0 360 arc closepath } bind def -/Vr { exch dup 0 rlineto - exch dup 0 exch rlineto - exch neg 0 rlineto - 0 exch neg rlineto - setgray fill } bind def -/Tm matrix def -/Ve { Tm currentmatrix pop - translate scale newpath 0 0 .5 0 360 arc closepath - Tm setmatrix -} bind def -/Vf { currentgray exch setgray fill setgray } bind def -""" - -# -# ERROR.PS -- Error handler -# -# History: -# 89-11-21 fl: created (pslist 1.10) -# - -ERROR_PS = b"""\ -/landscape false def -/errorBUF 200 string def -/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def -errordict begin /handleerror { - initmatrix /Courier findfont 10 scalefont setfont - newpath 72 720 moveto $error begin /newerror false def - (PostScript Error) show errorNL errorNL - (Error: ) show - /errorname load errorBUF cvs show errorNL errorNL - (Command: ) show - /command load dup type /stringtype ne { errorBUF cvs } if show - errorNL errorNL - (VMstatus: ) show - vmstatus errorBUF cvs show ( bytes available, ) show - errorBUF cvs show ( bytes used at level ) show - errorBUF cvs show errorNL errorNL - (Operand stargck: ) show errorNL /ostargck load { - dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL - } forall errorNL - (Execution stargck: ) show errorNL /estargck load { - dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL - } forall - end showpage -} def end -""" diff --git a/venv/Lib/site-packages/PIL/PaletteFile.py b/venv/Lib/site-packages/PIL/PaletteFile.py deleted file mode 100644 index 81652e5..0000000 --- a/venv/Lib/site-packages/PIL/PaletteFile.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Python Imaging Library -# $Id$ -# -# stuff to read simple, teragon-style palette files -# -# History: -# 97-08-23 fl Created -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1997. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from typing import IO - -from ._binary import o8 - - -class PaletteFile: - """File handler for Teragon-style palette files.""" - - rawmode = "RGB" - - def __init__(self, fp: IO[bytes]) -> None: - palette = [o8(i) * 3 for i in range(256)] - - while True: - s = fp.readline() - - if not s: - break - if s[:1] == b"#": - continue - if len(s) > 100: - msg = "bad palette file" - raise SyntaxError(msg) - - v = [int(x) for x in s.split()] - try: - [i, r, g, b] = v - except ValueError: - [i, r] = v - g = b = r - - if 0 <= i <= 255: - palette[i] = o8(r) + o8(g) + o8(b) - - self.palette = b"".join(palette) - - def getpalette(self) -> tuple[bytes, str]: - return self.palette, self.rawmode diff --git a/venv/Lib/site-packages/PIL/PalmImagePlugin.py b/venv/Lib/site-packages/PIL/PalmImagePlugin.py deleted file mode 100644 index 1735070..0000000 --- a/venv/Lib/site-packages/PIL/PalmImagePlugin.py +++ /dev/null @@ -1,229 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# - -## -# Image plugin for Palm pixmap images (output only). -## -from __future__ import annotations - -from typing import IO - -from . import Image, ImageFile -from ._binary import o8 -from ._binary import o16be as o16b - -# fmt: off -_Palm8BitColormapValues = ( - (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), - (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), - (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), - (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), - (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), - (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), - (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), - (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), - (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), - (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), - (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), - (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), - (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), - (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), - (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), - (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), - (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), - (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), - (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), - (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), - (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), - (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), - (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), - (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), - (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), - (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), - (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), - (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), - (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), - (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), - (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), - (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), - (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), - (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), - (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), - (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), - (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), - (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), - (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), - (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), - (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), - (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), - (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), - (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), - (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), - (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), - (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), - (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), - (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), - (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), - (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), - (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), - (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), - (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), - (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), - (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), - (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), - (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), - (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), - (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), - (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), - (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), - (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), - (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) -# fmt: on - - -# so build a prototype image to be used for palette resampling -def build_prototype_image() -> Image.Image: - image = Image.new("L", (1, len(_Palm8BitColormapValues))) - image.putdata(list(range(len(_Palm8BitColormapValues)))) - palettedata: tuple[int, ...] = () - for colormapValue in _Palm8BitColormapValues: - palettedata += colormapValue - palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues)) - image.putpalette(palettedata) - return image - - -Palm8BitColormapImage = build_prototype_image() - -# OK, we now have in Palm8BitColormapImage, -# a "P"-mode image with the right palette -# -# -------------------------------------------------------------------- - -_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000} - -_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00} - - -# -# -------------------------------------------------------------------- - -## -# (Internal) Image save plugin for the Palm format. - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.mode == "P": - # we assume this is a color Palm image with the standard colormap, - # unless the "info" dict has a "custom-colormap" field - - rawmode = "P" - bpp = 8 - version = 1 - - elif im.mode == "L": - if im.encoderinfo.get("bpp") in (1, 2, 4): - # this is 8-bit grayscale, so we shift it to get the high-order bits, - # and invert it because - # Palm does grayscale from white (0) to black (1) - bpp = im.encoderinfo["bpp"] - maxval = (1 << bpp) - 1 - shift = 8 - bpp - im = im.point(lambda x: maxval - (x >> shift)) - elif im.info.get("bpp") in (1, 2, 4): - # here we assume that even though the inherent mode is 8-bit grayscale, - # only the lower bpp bits are significant. - # We invert them to match the Palm. - bpp = im.info["bpp"] - maxval = (1 << bpp) - 1 - im = im.point(lambda x: maxval - (x & maxval)) - else: - msg = f"cannot write mode {im.mode} as Palm" - raise OSError(msg) - - # we ignore the palette here - im._mode = "P" - rawmode = f"P;{bpp}" - version = 1 - - elif im.mode == "1": - # monochrome -- write it inverted, as is the Palm standard - rawmode = "1;I" - bpp = 1 - version = 0 - - else: - msg = f"cannot write mode {im.mode} as Palm" - raise OSError(msg) - - # - # make sure image data is available - im.load() - - # write header - - cols = im.size[0] - rows = im.size[1] - - rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2 - transparent_index = 0 - compression_type = _COMPRESSION_TYPES["none"] - - flags = 0 - if im.mode == "P" and "custom-colormap" in im.info: - flags = flags & _FLAGS["custom-colormap"] - colormapsize = 4 * 256 + 2 - colormapmode = im.palette.mode - colormap = im.getdata().getpalette() - else: - colormapsize = 0 - - if "offset" in im.info: - offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 - else: - offset = 0 - - fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) - fp.write(o8(bpp)) - fp.write(o8(version)) - fp.write(o16b(offset)) - fp.write(o8(transparent_index)) - fp.write(o8(compression_type)) - fp.write(o16b(0)) # reserved by Palm - - # now write colormap if necessary - - if colormapsize > 0: - fp.write(o16b(256)) - for i in range(256): - fp.write(o8(i)) - if colormapmode == "RGB": - fp.write( - o8(colormap[3 * i]) - + o8(colormap[3 * i + 1]) - + o8(colormap[3 * i + 2]) - ) - elif colormapmode == "RGBA": - fp.write( - o8(colormap[4 * i]) - + o8(colormap[4 * i + 1]) - + o8(colormap[4 * i + 2]) - ) - - # now convert data to raw form - ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))]) - - if hasattr(fp, "flush"): - fp.flush() - - -# -# -------------------------------------------------------------------- - -Image.register_save("Palm", _save) - -Image.register_extension("Palm", ".palm") - -Image.register_mime("Palm", "image/palm") diff --git a/venv/Lib/site-packages/PIL/PcdImagePlugin.py b/venv/Lib/site-packages/PIL/PcdImagePlugin.py deleted file mode 100644 index 1cd5c4a..0000000 --- a/venv/Lib/site-packages/PIL/PcdImagePlugin.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# PCD file handling -# -# History: -# 96-05-10 fl Created -# 96-05-27 fl Added draft mode (128x192, 256x384) -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1996. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image, ImageFile - -## -# Image plugin for PhotoCD images. This plugin only reads the 768x512 -# image from the file; higher resolutions are encoded in a proprietary -# encoding. - - -class PcdImageFile(ImageFile.ImageFile): - format = "PCD" - format_description = "Kodak PhotoCD" - - def _open(self) -> None: - # rough - assert self.fp is not None - - self.fp.seek(2048) - s = self.fp.read(2048) - - if s[:4] != b"PCD_": - msg = "not a PCD file" - raise SyntaxError(msg) - - orientation = s[1538] & 3 - self.tile_post_rotate = None - if orientation == 1: - self.tile_post_rotate = 90 - elif orientation == 3: - self.tile_post_rotate = -90 - - self._mode = "RGB" - self._size = 768, 512 # FIXME: not correct for rotated images! - self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)] - - def load_end(self) -> None: - if self.tile_post_rotate: - # Handle rotated PCDs - assert self.im is not None - - self.im = self.im.rotate(self.tile_post_rotate) - self._size = self.im.size - - -# -# registry - -Image.register_open(PcdImageFile.format, PcdImageFile) - -Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/venv/Lib/site-packages/PIL/PcfFontFile.py b/venv/Lib/site-packages/PIL/PcfFontFile.py deleted file mode 100644 index 0d1968b..0000000 --- a/venv/Lib/site-packages/PIL/PcfFontFile.py +++ /dev/null @@ -1,254 +0,0 @@ -# -# THIS IS WORK IN PROGRESS -# -# The Python Imaging Library -# $Id$ -# -# portable compiled font file parser -# -# history: -# 1997-08-19 fl created -# 2003-09-13 fl fixed loading of unicode fonts -# -# Copyright (c) 1997-2003 by Secret Labs AB. -# Copyright (c) 1997-2003 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -from typing import BinaryIO, Callable - -from . import FontFile, Image -from ._binary import i8 -from ._binary import i16be as b16 -from ._binary import i16le as l16 -from ._binary import i32be as b32 -from ._binary import i32le as l32 - -# -------------------------------------------------------------------- -# declarations - -PCF_MAGIC = 0x70636601 # "\x01fcp" - -PCF_PROPERTIES = 1 << 0 -PCF_ACCELERATORS = 1 << 1 -PCF_METRICS = 1 << 2 -PCF_BITMAPS = 1 << 3 -PCF_INK_METRICS = 1 << 4 -PCF_BDF_ENCODINGS = 1 << 5 -PCF_SWIDTHS = 1 << 6 -PCF_GLYPH_NAMES = 1 << 7 -PCF_BDF_ACCELERATORS = 1 << 8 - -BYTES_PER_ROW: list[Callable[[int], int]] = [ - lambda bits: ((bits + 7) >> 3), - lambda bits: ((bits + 15) >> 3) & ~1, - lambda bits: ((bits + 31) >> 3) & ~3, - lambda bits: ((bits + 63) >> 3) & ~7, -] - - -def sz(s: bytes, o: int) -> bytes: - return s[o : s.index(b"\0", o)] - - -class PcfFontFile(FontFile.FontFile): - """Font file plugin for the X11 PCF format.""" - - name = "name" - - def __init__(self, fp: BinaryIO, charset_encoding: str = "iso8859-1"): - self.charset_encoding = charset_encoding - - magic = l32(fp.read(4)) - if magic != PCF_MAGIC: - msg = "not a PCF file" - raise SyntaxError(msg) - - super().__init__() - - count = l32(fp.read(4)) - self.toc = {} - for i in range(count): - type = l32(fp.read(4)) - self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) - - self.fp = fp - - self.info = self._load_properties() - - metrics = self._load_metrics() - bitmaps = self._load_bitmaps(metrics) - encoding = self._load_encoding() - - # - # create glyph structure - - for ch, ix in enumerate(encoding): - if ix is not None: - ( - xsize, - ysize, - left, - right, - width, - ascent, - descent, - attributes, - ) = metrics[ix] - self.glyph[ch] = ( - (width, 0), - (left, descent - ysize, xsize + left, descent), - (0, 0, xsize, ysize), - bitmaps[ix], - ) - - def _getformat( - self, tag: int - ) -> tuple[BinaryIO, int, Callable[[bytes], int], Callable[[bytes], int]]: - format, size, offset = self.toc[tag] - - fp = self.fp - fp.seek(offset) - - format = l32(fp.read(4)) - - if format & 4: - i16, i32 = b16, b32 - else: - i16, i32 = l16, l32 - - return fp, format, i16, i32 - - def _load_properties(self) -> dict[bytes, bytes | int]: - # - # font properties - - properties = {} - - fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) - - nprops = i32(fp.read(4)) - - # read property description - p = [(i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))) for _ in range(nprops)] - - if nprops & 3: - fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad - - data = fp.read(i32(fp.read(4))) - - for k, s, v in p: - property_value: bytes | int = sz(data, v) if s else v - properties[sz(data, k)] = property_value - - return properties - - def _load_metrics(self) -> list[tuple[int, int, int, int, int, int, int, int]]: - # - # font metrics - - metrics: list[tuple[int, int, int, int, int, int, int, int]] = [] - - fp, format, i16, i32 = self._getformat(PCF_METRICS) - - append = metrics.append - - if (format & 0xFF00) == 0x100: - # "compressed" metrics - for i in range(i16(fp.read(2))): - left = i8(fp.read(1)) - 128 - right = i8(fp.read(1)) - 128 - width = i8(fp.read(1)) - 128 - ascent = i8(fp.read(1)) - 128 - descent = i8(fp.read(1)) - 128 - xsize = right - left - ysize = ascent + descent - append((xsize, ysize, left, right, width, ascent, descent, 0)) - - else: - # "jumbo" metrics - for i in range(i32(fp.read(4))): - left = i16(fp.read(2)) - right = i16(fp.read(2)) - width = i16(fp.read(2)) - ascent = i16(fp.read(2)) - descent = i16(fp.read(2)) - attributes = i16(fp.read(2)) - xsize = right - left - ysize = ascent + descent - append((xsize, ysize, left, right, width, ascent, descent, attributes)) - - return metrics - - def _load_bitmaps( - self, metrics: list[tuple[int, int, int, int, int, int, int, int]] - ) -> list[Image.Image]: - # - # bitmap data - - fp, format, i16, i32 = self._getformat(PCF_BITMAPS) - - nbitmaps = i32(fp.read(4)) - - if nbitmaps != len(metrics): - msg = "Wrong number of bitmaps" - raise OSError(msg) - - offsets = [i32(fp.read(4)) for _ in range(nbitmaps)] - - bitmap_sizes = [i32(fp.read(4)) for _ in range(4)] - - # byteorder = format & 4 # non-zero => MSB - bitorder = format & 8 # non-zero => MSB - padindex = format & 3 - - bitmapsize = bitmap_sizes[padindex] - offsets.append(bitmapsize) - - data = fp.read(bitmapsize) - - pad = BYTES_PER_ROW[padindex] - mode = "1;R" - if bitorder: - mode = "1" - - bitmaps = [] - for i in range(nbitmaps): - xsize, ysize = metrics[i][:2] - b, e = offsets[i : i + 2] - bitmaps.append( - Image.frombytes("1", (xsize, ysize), data[b:e], "raw", mode, pad(xsize)) - ) - - return bitmaps - - def _load_encoding(self) -> list[int | None]: - fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) - - first_col, last_col = i16(fp.read(2)), i16(fp.read(2)) - first_row, last_row = i16(fp.read(2)), i16(fp.read(2)) - - i16(fp.read(2)) # default - - nencoding = (last_col - first_col + 1) * (last_row - first_row + 1) - - # map character code to bitmap index - encoding: list[int | None] = [None] * min(256, nencoding) - - encoding_offsets = [i16(fp.read(2)) for _ in range(nencoding)] - - for i in range(first_col, len(encoding)): - try: - encoding_offset = encoding_offsets[ - ord(bytearray([i]).decode(self.charset_encoding)) - ] - if encoding_offset != 0xFFFF: - encoding[i] = encoding_offset - except UnicodeDecodeError: - # character is not supported in selected encoding - pass - - return encoding diff --git a/venv/Lib/site-packages/PIL/PcxImagePlugin.py b/venv/Lib/site-packages/PIL/PcxImagePlugin.py deleted file mode 100644 index dd42003..0000000 --- a/venv/Lib/site-packages/PIL/PcxImagePlugin.py +++ /dev/null @@ -1,227 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# PCX file handling -# -# This format was originally used by ZSoft's popular PaintBrush -# program for the IBM PC. It is also supported by many MS-DOS and -# Windows applications, including the Windows PaintBrush program in -# Windows 3. -# -# history: -# 1995-09-01 fl Created -# 1996-05-20 fl Fixed RGB support -# 1997-01-03 fl Fixed 2-bit and 4-bit support -# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) -# 1999-02-07 fl Added write support -# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust -# 2002-07-30 fl Seek from to current position, not beginning of file -# 2003-06-03 fl Extract DPI settings (info["dpi"]) -# -# Copyright (c) 1997-2003 by Secret Labs AB. -# Copyright (c) 1995-2003 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -import logging -from typing import IO - -from . import Image, ImageFile, ImagePalette -from ._binary import i16le as i16 -from ._binary import o8 -from ._binary import o16le as o16 - -logger = logging.getLogger(__name__) - - -def _accept(prefix: bytes) -> bool: - return prefix[0] == 10 and prefix[1] in [0, 2, 3, 5] - - -## -# Image plugin for Paintbrush images. - - -class PcxImageFile(ImageFile.ImageFile): - format = "PCX" - format_description = "Paintbrush" - - def _open(self) -> None: - # header - assert self.fp is not None - - s = self.fp.read(128) - if not _accept(s): - msg = "not a PCX file" - raise SyntaxError(msg) - - # image - bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1 - if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: - msg = "bad PCX image size" - raise SyntaxError(msg) - logger.debug("BBox: %s %s %s %s", *bbox) - - # format - version = s[1] - bits = s[3] - planes = s[65] - provided_stride = i16(s, 66) - logger.debug( - "PCX version %s, bits %s, planes %s, stride %s", - version, - bits, - planes, - provided_stride, - ) - - self.info["dpi"] = i16(s, 12), i16(s, 14) - - if bits == 1 and planes == 1: - mode = rawmode = "1" - - elif bits == 1 and planes in (2, 4): - mode = "P" - rawmode = "P;%dL" % planes - self.palette = ImagePalette.raw("RGB", s[16:64]) - - elif version == 5 and bits == 8 and planes == 1: - mode = rawmode = "L" - # FIXME: hey, this doesn't work with the incremental loader !!! - self.fp.seek(-769, io.SEEK_END) - s = self.fp.read(769) - if len(s) == 769 and s[0] == 12: - # check if the palette is linear grayscale - for i in range(256): - if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3: - mode = rawmode = "P" - break - if mode == "P": - self.palette = ImagePalette.raw("RGB", s[1:]) - self.fp.seek(128) - - elif version == 5 and bits == 8 and planes == 3: - mode = "RGB" - rawmode = "RGB;L" - - else: - msg = "unknown PCX mode" - raise OSError(msg) - - self._mode = mode - self._size = bbox[2] - bbox[0], bbox[3] - bbox[1] - - # Don't trust the passed in stride. - # Calculate the approximate position for ourselves. - # CVE-2020-35653 - stride = (self._size[0] * bits + 7) // 8 - - # While the specification states that this must be even, - # not all images follow this - if provided_stride != stride: - stride += stride % 2 - - bbox = (0, 0) + self.size - logger.debug("size: %sx%s", *self.size) - - self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] - - -# -------------------------------------------------------------------- -# save PCX files - - -SAVE = { - # mode: (version, bits, planes, raw mode) - "1": (2, 1, 1, "1"), - "L": (5, 8, 1, "L"), - "P": (5, 8, 1, "P"), - "RGB": (5, 8, 3, "RGB;L"), -} - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - try: - version, bits, planes, rawmode = SAVE[im.mode] - except KeyError as e: - msg = f"Cannot save {im.mode} images as PCX" - raise ValueError(msg) from e - - # bytes per plane - stride = (im.size[0] * bits + 7) // 8 - # stride should be even - stride += stride % 2 - # Stride needs to be kept in sync with the PcxEncode.c version. - # Ideally it should be passed in in the state, but the bytes value - # gets overwritten. - - logger.debug( - "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", - im.size[0], - bits, - stride, - ) - - # under windows, we could determine the current screen size with - # "Image.core.display_mode()[1]", but I think that's overkill... - - screen = im.size - - dpi = 100, 100 - - # PCX header - fp.write( - o8(10) - + o8(version) - + o8(1) - + o8(bits) - + o16(0) - + o16(0) - + o16(im.size[0] - 1) - + o16(im.size[1] - 1) - + o16(dpi[0]) - + o16(dpi[1]) - + b"\0" * 24 - + b"\xFF" * 24 - + b"\0" - + o8(planes) - + o16(stride) - + o16(1) - + o16(screen[0]) - + o16(screen[1]) - + b"\0" * 54 - ) - - assert fp.tell() == 128 - - ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))]) - - if im.mode == "P": - # colour palette - assert im.im is not None - - fp.write(o8(12)) - palette = im.im.getpalette("RGB", "RGB") - palette += b"\x00" * (768 - len(palette)) - fp.write(palette) # 768 bytes - elif im.mode == "L": - # grayscale palette - fp.write(o8(12)) - for i in range(256): - fp.write(o8(i) * 3) - - -# -------------------------------------------------------------------- -# registry - - -Image.register_open(PcxImageFile.format, PcxImageFile, _accept) -Image.register_save(PcxImageFile.format, _save) - -Image.register_extension(PcxImageFile.format, ".pcx") - -Image.register_mime(PcxImageFile.format, "image/x-pcx") diff --git a/venv/Lib/site-packages/PIL/PdfImagePlugin.py b/venv/Lib/site-packages/PIL/PdfImagePlugin.py deleted file mode 100644 index f0da1e0..0000000 --- a/venv/Lib/site-packages/PIL/PdfImagePlugin.py +++ /dev/null @@ -1,304 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# PDF (Acrobat) file handling -# -# History: -# 1996-07-16 fl Created -# 1997-01-18 fl Fixed header -# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. -# 2004-02-24 fl Fixes for 1 and P images. -# -# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. -# Copyright (c) 1996-1997 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# - -## -# Image plugin for PDF images (output only). -## -from __future__ import annotations - -import io -import math -import os -import time -from typing import IO - -from . import Image, ImageFile, ImageSequence, PdfParser, __version__, features - -# -# -------------------------------------------------------------------- - -# object ids: -# 1. catalogue -# 2. pages -# 3. image -# 4. page -# 5. page contents - - -def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - _save(im, fp, filename, save_all=True) - - -## -# (Internal) Image save plugin for the PDF format. - - -def _write_image(im, filename, existing_pdf, image_refs): - # FIXME: Should replace ASCIIHexDecode with RunLengthDecode - # (packbits) or LZWDecode (tiff/lzw compression). Note that - # PDF 1.2 also supports Flatedecode (zip compression). - - params = None - decode = None - - # - # Get image characteristics - - width, height = im.size - - dict_obj = {"BitsPerComponent": 8} - if im.mode == "1": - if features.check("libtiff"): - filter = "CCITTFaxDecode" - dict_obj["BitsPerComponent"] = 1 - params = PdfParser.PdfArray( - [ - PdfParser.PdfDict( - { - "K": -1, - "BlackIs1": True, - "Columns": width, - "Rows": height, - } - ) - ] - ) - else: - filter = "DCTDecode" - dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray") - procset = "ImageB" # grayscale - elif im.mode == "L": - filter = "DCTDecode" - # params = f"<< /Predictor 15 /Columns {width-2} >>" - dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray") - procset = "ImageB" # grayscale - elif im.mode == "LA": - filter = "JPXDecode" - # params = f"<< /Predictor 15 /Columns {width-2} >>" - procset = "ImageB" # grayscale - dict_obj["SMaskInData"] = 1 - elif im.mode == "P": - filter = "ASCIIHexDecode" - palette = im.getpalette() - dict_obj["ColorSpace"] = [ - PdfParser.PdfName("Indexed"), - PdfParser.PdfName("DeviceRGB"), - len(palette) // 3 - 1, - PdfParser.PdfBinary(palette), - ] - procset = "ImageI" # indexed color - - if "transparency" in im.info: - smask = im.convert("LA").getchannel("A") - smask.encoderinfo = {} - - image_ref = _write_image(smask, filename, existing_pdf, image_refs)[0] - dict_obj["SMask"] = image_ref - elif im.mode == "RGB": - filter = "DCTDecode" - dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceRGB") - procset = "ImageC" # color images - elif im.mode == "RGBA": - filter = "JPXDecode" - procset = "ImageC" # color images - dict_obj["SMaskInData"] = 1 - elif im.mode == "CMYK": - filter = "DCTDecode" - dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceCMYK") - procset = "ImageC" # color images - decode = [1, 0, 1, 0, 1, 0, 1, 0] - else: - msg = f"cannot save mode {im.mode}" - raise ValueError(msg) - - # - # image - - op = io.BytesIO() - - if filter == "ASCIIHexDecode": - ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)]) - elif filter == "CCITTFaxDecode": - im.save( - op, - "TIFF", - compression="group4", - # use a single strip - strip_size=math.ceil(width / 8) * height, - ) - elif filter == "DCTDecode": - Image.SAVE["JPEG"](im, op, filename) - elif filter == "JPXDecode": - del dict_obj["BitsPerComponent"] - Image.SAVE["JPEG2000"](im, op, filename) - else: - msg = f"unsupported PDF filter ({filter})" - raise ValueError(msg) - - stream = op.getvalue() - if filter == "CCITTFaxDecode": - stream = stream[8:] - filter = PdfParser.PdfArray([PdfParser.PdfName(filter)]) - else: - filter = PdfParser.PdfName(filter) - - image_ref = image_refs.pop(0) - existing_pdf.write_obj( - image_ref, - stream=stream, - Type=PdfParser.PdfName("XObject"), - Subtype=PdfParser.PdfName("Image"), - Width=width, # * 72.0 / x_resolution, - Height=height, # * 72.0 / y_resolution, - Filter=filter, - Decode=decode, - DecodeParms=params, - **dict_obj, - ) - - return image_ref, procset - - -def _save(im, fp, filename, save_all=False): - is_appending = im.encoderinfo.get("append", False) - if is_appending: - existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b") - else: - existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b") - - dpi = im.encoderinfo.get("dpi") - if dpi: - x_resolution = dpi[0] - y_resolution = dpi[1] - else: - x_resolution = y_resolution = im.encoderinfo.get("resolution", 72.0) - - info = { - "title": ( - None if is_appending else os.path.splitext(os.path.basename(filename))[0] - ), - "author": None, - "subject": None, - "keywords": None, - "creator": None, - "producer": None, - "creationDate": None if is_appending else time.gmtime(), - "modDate": None if is_appending else time.gmtime(), - } - for k, default in info.items(): - v = im.encoderinfo.get(k) if k in im.encoderinfo else default - if v: - existing_pdf.info[k[0].upper() + k[1:]] = v - - # - # make sure image data is available - im.load() - - existing_pdf.start_writing() - existing_pdf.write_header() - existing_pdf.write_comment(f"created by Pillow {__version__} PDF driver") - - # - # pages - ims = [im] - if save_all: - append_images = im.encoderinfo.get("append_images", []) - for append_im in append_images: - append_im.encoderinfo = im.encoderinfo.copy() - ims.append(append_im) - number_of_pages = 0 - image_refs = [] - page_refs = [] - contents_refs = [] - for im in ims: - im_number_of_pages = 1 - if save_all: - try: - im_number_of_pages = im.n_frames - except AttributeError: - # Image format does not have n_frames. - # It is a single frame image - pass - number_of_pages += im_number_of_pages - for i in range(im_number_of_pages): - image_refs.append(existing_pdf.next_object_id(0)) - if im.mode == "P" and "transparency" in im.info: - image_refs.append(existing_pdf.next_object_id(0)) - - page_refs.append(existing_pdf.next_object_id(0)) - contents_refs.append(existing_pdf.next_object_id(0)) - existing_pdf.pages.append(page_refs[-1]) - - # - # catalog and list of pages - existing_pdf.write_catalog() - - page_number = 0 - for im_sequence in ims: - im_pages = ImageSequence.Iterator(im_sequence) if save_all else [im_sequence] - for im in im_pages: - image_ref, procset = _write_image(im, filename, existing_pdf, image_refs) - - # - # page - - existing_pdf.write_page( - page_refs[page_number], - Resources=PdfParser.PdfDict( - ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)], - XObject=PdfParser.PdfDict(image=image_ref), - ), - MediaBox=[ - 0, - 0, - im.width * 72.0 / x_resolution, - im.height * 72.0 / y_resolution, - ], - Contents=contents_refs[page_number], - ) - - # - # page contents - - page_contents = b"q %f 0 0 %f 0 0 cm /image Do Q\n" % ( - im.width * 72.0 / x_resolution, - im.height * 72.0 / y_resolution, - ) - - existing_pdf.write_obj(contents_refs[page_number], stream=page_contents) - - page_number += 1 - - # - # trailer - existing_pdf.write_xref_and_trailer() - if hasattr(fp, "flush"): - fp.flush() - existing_pdf.close() - - -# -# -------------------------------------------------------------------- - - -Image.register_save("PDF", _save) -Image.register_save_all("PDF", _save_all) - -Image.register_extension("PDF", ".pdf") - -Image.register_mime("PDF", "application/pdf") diff --git a/venv/Lib/site-packages/PIL/PdfParser.py b/venv/Lib/site-packages/PIL/PdfParser.py deleted file mode 100644 index 9e22313..0000000 --- a/venv/Lib/site-packages/PIL/PdfParser.py +++ /dev/null @@ -1,1003 +0,0 @@ -from __future__ import annotations - -import calendar -import codecs -import collections -import mmap -import os -import re -import time -import zlib -from typing import TYPE_CHECKING, Any, List, NamedTuple, Union - - -# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set -# on page 656 -def encode_text(s: str) -> bytes: - return codecs.BOM_UTF16_BE + s.encode("utf_16_be") - - -PDFDocEncoding = { - 0x16: "\u0017", - 0x18: "\u02D8", - 0x19: "\u02C7", - 0x1A: "\u02C6", - 0x1B: "\u02D9", - 0x1C: "\u02DD", - 0x1D: "\u02DB", - 0x1E: "\u02DA", - 0x1F: "\u02DC", - 0x80: "\u2022", - 0x81: "\u2020", - 0x82: "\u2021", - 0x83: "\u2026", - 0x84: "\u2014", - 0x85: "\u2013", - 0x86: "\u0192", - 0x87: "\u2044", - 0x88: "\u2039", - 0x89: "\u203A", - 0x8A: "\u2212", - 0x8B: "\u2030", - 0x8C: "\u201E", - 0x8D: "\u201C", - 0x8E: "\u201D", - 0x8F: "\u2018", - 0x90: "\u2019", - 0x91: "\u201A", - 0x92: "\u2122", - 0x93: "\uFB01", - 0x94: "\uFB02", - 0x95: "\u0141", - 0x96: "\u0152", - 0x97: "\u0160", - 0x98: "\u0178", - 0x99: "\u017D", - 0x9A: "\u0131", - 0x9B: "\u0142", - 0x9C: "\u0153", - 0x9D: "\u0161", - 0x9E: "\u017E", - 0xA0: "\u20AC", -} - - -def decode_text(b): - if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: - return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be") - else: - return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b) - - -class PdfFormatError(RuntimeError): - """An error that probably indicates a syntactic or semantic error in the - PDF file structure""" - - pass - - -def check_format_condition(condition: bool, error_message: str) -> None: - if not condition: - raise PdfFormatError(error_message) - - -class IndirectReferenceTuple(NamedTuple): - object_id: int - generation: int - - -class IndirectReference(IndirectReferenceTuple): - def __str__(self) -> str: - return f"{self.object_id} {self.generation} R" - - def __bytes__(self) -> bytes: - return self.__str__().encode("us-ascii") - - def __eq__(self, other: object) -> bool: - if self.__class__ is not other.__class__: - return False - assert isinstance(other, IndirectReference) - return other.object_id == self.object_id and other.generation == self.generation - - def __ne__(self, other): - return not (self == other) - - def __hash__(self) -> int: - return hash((self.object_id, self.generation)) - - -class IndirectObjectDef(IndirectReference): - def __str__(self) -> str: - return f"{self.object_id} {self.generation} obj" - - -class XrefTable: - def __init__(self): - self.existing_entries = {} # object ID => (offset, generation) - self.new_entries = {} # object ID => (offset, generation) - self.deleted_entries = {0: 65536} # object ID => generation - self.reading_finished = False - - def __setitem__(self, key, value): - if self.reading_finished: - self.new_entries[key] = value - else: - self.existing_entries[key] = value - if key in self.deleted_entries: - del self.deleted_entries[key] - - def __getitem__(self, key): - try: - return self.new_entries[key] - except KeyError: - return self.existing_entries[key] - - def __delitem__(self, key): - if key in self.new_entries: - generation = self.new_entries[key][1] + 1 - del self.new_entries[key] - self.deleted_entries[key] = generation - elif key in self.existing_entries: - generation = self.existing_entries[key][1] + 1 - self.deleted_entries[key] = generation - elif key in self.deleted_entries: - generation = self.deleted_entries[key] - else: - msg = f"object ID {key} cannot be deleted because it doesn't exist" - raise IndexError(msg) - - def __contains__(self, key): - return key in self.existing_entries or key in self.new_entries - - def __len__(self) -> int: - return len( - set(self.existing_entries.keys()) - | set(self.new_entries.keys()) - | set(self.deleted_entries.keys()) - ) - - def keys(self): - return ( - set(self.existing_entries.keys()) - set(self.deleted_entries.keys()) - ) | set(self.new_entries.keys()) - - def write(self, f): - keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys())) - deleted_keys = sorted(set(self.deleted_entries.keys())) - startxref = f.tell() - f.write(b"xref\n") - while keys: - # find a contiguous sequence of object IDs - prev = None - for index, key in enumerate(keys): - if prev is None or prev + 1 == key: - prev = key - else: - contiguous_keys = keys[:index] - keys = keys[index:] - break - else: - contiguous_keys = keys - keys = None - f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys))) - for object_id in contiguous_keys: - if object_id in self.new_entries: - f.write(b"%010d %05d n \n" % self.new_entries[object_id]) - else: - this_deleted_object_id = deleted_keys.pop(0) - check_format_condition( - object_id == this_deleted_object_id, - f"expected the next deleted object ID to be {object_id}, " - f"instead found {this_deleted_object_id}", - ) - try: - next_in_linked_list = deleted_keys[0] - except IndexError: - next_in_linked_list = 0 - f.write( - b"%010d %05d f \n" - % (next_in_linked_list, self.deleted_entries[object_id]) - ) - return startxref - - -class PdfName: - def __init__(self, name): - if isinstance(name, PdfName): - self.name = name.name - elif isinstance(name, bytes): - self.name = name - else: - self.name = name.encode("us-ascii") - - def name_as_str(self) -> str: - return self.name.decode("us-ascii") - - def __eq__(self, other): - return ( - isinstance(other, PdfName) and other.name == self.name - ) or other == self.name - - def __hash__(self) -> int: - return hash(self.name) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({repr(self.name)})" - - @classmethod - def from_pdf_stream(cls, data): - return cls(PdfParser.interpret_name(data)) - - allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"} - - def __bytes__(self) -> bytes: - result = bytearray(b"/") - for b in self.name: - if b in self.allowed_chars: - result.append(b) - else: - result.extend(b"#%02X" % b) - return bytes(result) - - -class PdfArray(List[Any]): - def __bytes__(self) -> bytes: - return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]" - - -if TYPE_CHECKING: - _DictBase = collections.UserDict[Union[str, bytes], Any] -else: - _DictBase = collections.UserDict - - -class PdfDict(_DictBase): - def __setattr__(self, key, value): - if key == "data": - collections.UserDict.__setattr__(self, key, value) - else: - self[key.encode("us-ascii")] = value - - def __getattr__(self, key): - try: - value = self[key.encode("us-ascii")] - except KeyError as e: - raise AttributeError(key) from e - if isinstance(value, bytes): - value = decode_text(value) - if key.endswith("Date"): - if value.startswith("D:"): - value = value[2:] - - relationship = "Z" - if len(value) > 17: - relationship = value[14] - offset = int(value[15:17]) * 60 - if len(value) > 20: - offset += int(value[18:20]) - - format = "%Y%m%d%H%M%S"[: len(value) - 2] - value = time.strptime(value[: len(format) + 2], format) - if relationship in ["+", "-"]: - offset *= 60 - if relationship == "+": - offset *= -1 - value = time.gmtime(calendar.timegm(value) + offset) - return value - - def __bytes__(self) -> bytes: - out = bytearray(b"<<") - for key, value in self.items(): - if value is None: - continue - value = pdf_repr(value) - out.extend(b"\n") - out.extend(bytes(PdfName(key))) - out.extend(b" ") - out.extend(value) - out.extend(b"\n>>") - return bytes(out) - - -class PdfBinary: - def __init__(self, data): - self.data = data - - def __bytes__(self) -> bytes: - return b"<%s>" % b"".join(b"%02X" % b for b in self.data) - - -class PdfStream: - def __init__(self, dictionary, buf): - self.dictionary = dictionary - self.buf = buf - - def decode(self): - try: - filter = self.dictionary.Filter - except AttributeError: - return self.buf - if filter == b"FlateDecode": - try: - expected_length = self.dictionary.DL - except AttributeError: - expected_length = self.dictionary.Length - return zlib.decompress(self.buf, bufsize=int(expected_length)) - else: - msg = f"stream filter {repr(self.dictionary.Filter)} unknown/unsupported" - raise NotImplementedError(msg) - - -def pdf_repr(x): - if x is True: - return b"true" - elif x is False: - return b"false" - elif x is None: - return b"null" - elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)): - return bytes(x) - elif isinstance(x, (int, float)): - return str(x).encode("us-ascii") - elif isinstance(x, time.struct_time): - return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")" - elif isinstance(x, dict): - return bytes(PdfDict(x)) - elif isinstance(x, list): - return bytes(PdfArray(x)) - elif isinstance(x, str): - return pdf_repr(encode_text(x)) - elif isinstance(x, bytes): - # XXX escape more chars? handle binary garbage - x = x.replace(b"\\", b"\\\\") - x = x.replace(b"(", b"\\(") - x = x.replace(b")", b"\\)") - return b"(" + x + b")" - else: - return bytes(x) - - -class PdfParser: - """Based on - https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf - Supports PDF up to 1.4 - """ - - def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"): - if buf and f: - msg = "specify buf or f or filename, but not both buf and f" - raise RuntimeError(msg) - self.filename = filename - self.buf = buf - self.f = f - self.start_offset = start_offset - self.should_close_buf = False - self.should_close_file = False - if filename is not None and f is None: - self.f = f = open(filename, mode) - self.should_close_file = True - if f is not None: - self.buf = buf = self.get_buf_from_file(f) - self.should_close_buf = True - if not filename and hasattr(f, "name"): - self.filename = f.name - self.cached_objects = {} - if buf: - self.read_pdf_info() - else: - self.file_size_total = self.file_size_this = 0 - self.root = PdfDict() - self.root_ref = None - self.info = PdfDict() - self.info_ref = None - self.page_tree_root = {} - self.pages = [] - self.orig_pages = [] - self.pages_ref = None - self.last_xref_section_offset = None - self.trailer_dict = {} - self.xref_table = XrefTable() - self.xref_table.reading_finished = True - if f: - self.seek_end() - - def __enter__(self) -> PdfParser: - return self - - def __exit__(self, *args: object) -> None: - self.close() - - def start_writing(self) -> None: - self.close_buf() - self.seek_end() - - def close_buf(self) -> None: - try: - self.buf.close() - except AttributeError: - pass - self.buf = None - - def close(self) -> None: - if self.should_close_buf: - self.close_buf() - if self.f is not None and self.should_close_file: - self.f.close() - self.f = None - - def seek_end(self) -> None: - self.f.seek(0, os.SEEK_END) - - def write_header(self) -> None: - self.f.write(b"%PDF-1.4\n") - - def write_comment(self, s): - self.f.write(f"% {s}\n".encode()) - - def write_catalog(self) -> IndirectReference: - self.del_root() - self.root_ref = self.next_object_id(self.f.tell()) - self.pages_ref = self.next_object_id(0) - self.rewrite_pages() - self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref) - self.write_obj( - self.pages_ref, - Type=PdfName(b"Pages"), - Count=len(self.pages), - Kids=self.pages, - ) - return self.root_ref - - def rewrite_pages(self) -> None: - pages_tree_nodes_to_delete = [] - for i, page_ref in enumerate(self.orig_pages): - page_info = self.cached_objects[page_ref] - del self.xref_table[page_ref.object_id] - pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")]) - if page_ref not in self.pages: - # the page has been deleted - continue - # make dict keys into strings for passing to write_page - stringified_page_info = {} - for key, value in page_info.items(): - # key should be a PdfName - stringified_page_info[key.name_as_str()] = value - stringified_page_info["Parent"] = self.pages_ref - new_page_ref = self.write_page(None, **stringified_page_info) - for j, cur_page_ref in enumerate(self.pages): - if cur_page_ref == page_ref: - # replace the page reference with the new one - self.pages[j] = new_page_ref - # delete redundant Pages tree nodes from xref table - for pages_tree_node_ref in pages_tree_nodes_to_delete: - while pages_tree_node_ref: - pages_tree_node = self.cached_objects[pages_tree_node_ref] - if pages_tree_node_ref.object_id in self.xref_table: - del self.xref_table[pages_tree_node_ref.object_id] - pages_tree_node_ref = pages_tree_node.get(b"Parent", None) - self.orig_pages = [] - - def write_xref_and_trailer(self, new_root_ref=None): - if new_root_ref: - self.del_root() - self.root_ref = new_root_ref - if self.info: - self.info_ref = self.write_obj(None, self.info) - start_xref = self.xref_table.write(self.f) - num_entries = len(self.xref_table) - trailer_dict = {b"Root": self.root_ref, b"Size": num_entries} - if self.last_xref_section_offset is not None: - trailer_dict[b"Prev"] = self.last_xref_section_offset - if self.info: - trailer_dict[b"Info"] = self.info_ref - self.last_xref_section_offset = start_xref - self.f.write( - b"trailer\n" - + bytes(PdfDict(trailer_dict)) - + b"\nstartxref\n%d\n%%%%EOF" % start_xref - ) - - def write_page(self, ref, *objs, **dict_obj): - if isinstance(ref, int): - ref = self.pages[ref] - if "Type" not in dict_obj: - dict_obj["Type"] = PdfName(b"Page") - if "Parent" not in dict_obj: - dict_obj["Parent"] = self.pages_ref - return self.write_obj(ref, *objs, **dict_obj) - - def write_obj(self, ref, *objs, **dict_obj): - f = self.f - if ref is None: - ref = self.next_object_id(f.tell()) - else: - self.xref_table[ref.object_id] = (f.tell(), ref.generation) - f.write(bytes(IndirectObjectDef(*ref))) - stream = dict_obj.pop("stream", None) - if stream is not None: - dict_obj["Length"] = len(stream) - if dict_obj: - f.write(pdf_repr(dict_obj)) - for obj in objs: - f.write(pdf_repr(obj)) - if stream is not None: - f.write(b"stream\n") - f.write(stream) - f.write(b"\nendstream\n") - f.write(b"endobj\n") - return ref - - def del_root(self) -> None: - if self.root_ref is None: - return - del self.xref_table[self.root_ref.object_id] - del self.xref_table[self.root[b"Pages"].object_id] - - @staticmethod - def get_buf_from_file(f): - if hasattr(f, "getbuffer"): - return f.getbuffer() - elif hasattr(f, "getvalue"): - return f.getvalue() - else: - try: - return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) - except ValueError: # cannot mmap an empty file - return b"" - - def read_pdf_info(self) -> None: - self.file_size_total = len(self.buf) - self.file_size_this = self.file_size_total - self.start_offset - self.read_trailer() - self.root_ref = self.trailer_dict[b"Root"] - self.info_ref = self.trailer_dict.get(b"Info", None) - self.root = PdfDict(self.read_indirect(self.root_ref)) - if self.info_ref is None: - self.info = PdfDict() - else: - self.info = PdfDict(self.read_indirect(self.info_ref)) - check_format_condition(b"Type" in self.root, "/Type missing in Root") - check_format_condition( - self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog" - ) - check_format_condition(b"Pages" in self.root, "/Pages missing in Root") - check_format_condition( - isinstance(self.root[b"Pages"], IndirectReference), - "/Pages in Root is not an indirect reference", - ) - self.pages_ref = self.root[b"Pages"] - self.page_tree_root = self.read_indirect(self.pages_ref) - self.pages = self.linearize_page_tree(self.page_tree_root) - # save the original list of page references - # in case the user modifies, adds or deletes some pages - # and we need to rewrite the pages and their list - self.orig_pages = self.pages[:] - - def next_object_id(self, offset=None): - try: - # TODO: support reuse of deleted objects - reference = IndirectReference(max(self.xref_table.keys()) + 1, 0) - except ValueError: - reference = IndirectReference(1, 0) - if offset is not None: - self.xref_table[reference.object_id] = (offset, 0) - return reference - - delimiter = rb"[][()<>{}/%]" - delimiter_or_ws = rb"[][()<>{}/%\000\011\012\014\015\040]" - whitespace = rb"[\000\011\012\014\015\040]" - whitespace_or_hex = rb"[\000\011\012\014\015\0400-9a-fA-F]" - whitespace_optional = whitespace + b"*" - whitespace_mandatory = whitespace + b"+" - # No "\012" aka "\n" or "\015" aka "\r": - whitespace_optional_no_nl = rb"[\000\011\014\040]*" - newline_only = rb"[\r\n]+" - newline = whitespace_optional_no_nl + newline_only + whitespace_optional_no_nl - re_trailer_end = re.compile( - whitespace_mandatory - + rb"trailer" - + whitespace_optional - + rb"<<(.*>>)" - + newline - + rb"startxref" - + newline - + rb"([0-9]+)" - + newline - + rb"%%EOF" - + whitespace_optional - + rb"$", - re.DOTALL, - ) - re_trailer_prev = re.compile( - whitespace_optional - + rb"trailer" - + whitespace_optional - + rb"<<(.*?>>)" - + newline - + rb"startxref" - + newline - + rb"([0-9]+)" - + newline - + rb"%%EOF" - + whitespace_optional, - re.DOTALL, - ) - - def read_trailer(self): - search_start_offset = len(self.buf) - 16384 - if search_start_offset < self.start_offset: - search_start_offset = self.start_offset - m = self.re_trailer_end.search(self.buf, search_start_offset) - check_format_condition(m, "trailer end not found") - # make sure we found the LAST trailer - last_match = m - while m: - last_match = m - m = self.re_trailer_end.search(self.buf, m.start() + 16) - if not m: - m = last_match - trailer_data = m.group(1) - self.last_xref_section_offset = int(m.group(2)) - self.trailer_dict = self.interpret_trailer(trailer_data) - self.xref_table = XrefTable() - self.read_xref_table(xref_section_offset=self.last_xref_section_offset) - if b"Prev" in self.trailer_dict: - self.read_prev_trailer(self.trailer_dict[b"Prev"]) - - def read_prev_trailer(self, xref_section_offset): - trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset) - m = self.re_trailer_prev.search( - self.buf[trailer_offset : trailer_offset + 16384] - ) - check_format_condition(m, "previous trailer not found") - trailer_data = m.group(1) - check_format_condition( - int(m.group(2)) == xref_section_offset, - "xref section offset in previous trailer doesn't match what was expected", - ) - trailer_dict = self.interpret_trailer(trailer_data) - if b"Prev" in trailer_dict: - self.read_prev_trailer(trailer_dict[b"Prev"]) - - re_whitespace_optional = re.compile(whitespace_optional) - re_name = re.compile( - whitespace_optional - + rb"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" - + delimiter_or_ws - + rb")" - ) - re_dict_start = re.compile(whitespace_optional + rb"<<") - re_dict_end = re.compile(whitespace_optional + rb">>" + whitespace_optional) - - @classmethod - def interpret_trailer(cls, trailer_data): - trailer = {} - offset = 0 - while True: - m = cls.re_name.match(trailer_data, offset) - if not m: - m = cls.re_dict_end.match(trailer_data, offset) - check_format_condition( - m and m.end() == len(trailer_data), - "name not found in trailer, remaining data: " - + repr(trailer_data[offset:]), - ) - break - key = cls.interpret_name(m.group(1)) - value, offset = cls.get_value(trailer_data, m.end()) - trailer[key] = value - check_format_condition( - b"Size" in trailer and isinstance(trailer[b"Size"], int), - "/Size not in trailer or not an integer", - ) - check_format_condition( - b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), - "/Root not in trailer or not an indirect reference", - ) - return trailer - - re_hashes_in_name = re.compile(rb"([^#]*)(#([0-9a-fA-F]{2}))?") - - @classmethod - def interpret_name(cls, raw, as_text=False): - name = b"" - for m in cls.re_hashes_in_name.finditer(raw): - if m.group(3): - name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii")) - else: - name += m.group(1) - if as_text: - return name.decode("utf-8") - else: - return bytes(name) - - re_null = re.compile(whitespace_optional + rb"null(?=" + delimiter_or_ws + rb")") - re_true = re.compile(whitespace_optional + rb"true(?=" + delimiter_or_ws + rb")") - re_false = re.compile(whitespace_optional + rb"false(?=" + delimiter_or_ws + rb")") - re_int = re.compile( - whitespace_optional + rb"([-+]?[0-9]+)(?=" + delimiter_or_ws + rb")" - ) - re_real = re.compile( - whitespace_optional - + rb"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" - + delimiter_or_ws - + rb")" - ) - re_array_start = re.compile(whitespace_optional + rb"\[") - re_array_end = re.compile(whitespace_optional + rb"]") - re_string_hex = re.compile( - whitespace_optional + rb"<(" + whitespace_or_hex + rb"*)>" - ) - re_string_lit = re.compile(whitespace_optional + rb"\(") - re_indirect_reference = re.compile( - whitespace_optional - + rb"([-+]?[0-9]+)" - + whitespace_mandatory - + rb"([-+]?[0-9]+)" - + whitespace_mandatory - + rb"R(?=" - + delimiter_or_ws - + rb")" - ) - re_indirect_def_start = re.compile( - whitespace_optional - + rb"([-+]?[0-9]+)" - + whitespace_mandatory - + rb"([-+]?[0-9]+)" - + whitespace_mandatory - + rb"obj(?=" - + delimiter_or_ws - + rb")" - ) - re_indirect_def_end = re.compile( - whitespace_optional + rb"endobj(?=" + delimiter_or_ws + rb")" - ) - re_comment = re.compile( - rb"(" + whitespace_optional + rb"%[^\r\n]*" + newline + rb")*" - ) - re_stream_start = re.compile(whitespace_optional + rb"stream\r?\n") - re_stream_end = re.compile( - whitespace_optional + rb"endstream(?=" + delimiter_or_ws + rb")" - ) - - @classmethod - def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1): - if max_nesting == 0: - return None, None - m = cls.re_comment.match(data, offset) - if m: - offset = m.end() - m = cls.re_indirect_def_start.match(data, offset) - if m: - check_format_condition( - int(m.group(1)) > 0, - "indirect object definition: object ID must be greater than 0", - ) - check_format_condition( - int(m.group(2)) >= 0, - "indirect object definition: generation must be non-negative", - ) - check_format_condition( - expect_indirect is None - or expect_indirect - == IndirectReference(int(m.group(1)), int(m.group(2))), - "indirect object definition different than expected", - ) - object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1) - if offset is None: - return object, None - m = cls.re_indirect_def_end.match(data, offset) - check_format_condition(m, "indirect object definition end not found") - return object, m.end() - check_format_condition( - not expect_indirect, "indirect object definition not found" - ) - m = cls.re_indirect_reference.match(data, offset) - if m: - check_format_condition( - int(m.group(1)) > 0, - "indirect object reference: object ID must be greater than 0", - ) - check_format_condition( - int(m.group(2)) >= 0, - "indirect object reference: generation must be non-negative", - ) - return IndirectReference(int(m.group(1)), int(m.group(2))), m.end() - m = cls.re_dict_start.match(data, offset) - if m: - offset = m.end() - result = {} - m = cls.re_dict_end.match(data, offset) - while not m: - key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) - if offset is None: - return result, None - value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) - result[key] = value - if offset is None: - return result, None - m = cls.re_dict_end.match(data, offset) - offset = m.end() - m = cls.re_stream_start.match(data, offset) - if m: - try: - stream_len_str = result.get(b"Length") - stream_len = int(stream_len_str) - except (TypeError, ValueError) as e: - msg = f"bad or missing Length in stream dict ({stream_len_str})" - raise PdfFormatError(msg) from e - stream_data = data[m.end() : m.end() + stream_len] - m = cls.re_stream_end.match(data, m.end() + stream_len) - check_format_condition(m, "stream end not found") - offset = m.end() - result = PdfStream(PdfDict(result), stream_data) - else: - result = PdfDict(result) - return result, offset - m = cls.re_array_start.match(data, offset) - if m: - offset = m.end() - result = [] - m = cls.re_array_end.match(data, offset) - while not m: - value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) - result.append(value) - if offset is None: - return result, None - m = cls.re_array_end.match(data, offset) - return result, m.end() - m = cls.re_null.match(data, offset) - if m: - return None, m.end() - m = cls.re_true.match(data, offset) - if m: - return True, m.end() - m = cls.re_false.match(data, offset) - if m: - return False, m.end() - m = cls.re_name.match(data, offset) - if m: - return PdfName(cls.interpret_name(m.group(1))), m.end() - m = cls.re_int.match(data, offset) - if m: - return int(m.group(1)), m.end() - m = cls.re_real.match(data, offset) - if m: - # XXX Decimal instead of float??? - return float(m.group(1)), m.end() - m = cls.re_string_hex.match(data, offset) - if m: - # filter out whitespace - hex_string = bytearray( - b for b in m.group(1) if b in b"0123456789abcdefABCDEF" - ) - if len(hex_string) % 2 == 1: - # append a 0 if the length is not even - yes, at the end - hex_string.append(ord(b"0")) - return bytearray.fromhex(hex_string.decode("us-ascii")), m.end() - m = cls.re_string_lit.match(data, offset) - if m: - return cls.get_literal_string(data, m.end()) - # return None, offset # fallback (only for debugging) - msg = f"unrecognized object: {repr(data[offset : offset + 32])}" - raise PdfFormatError(msg) - - re_lit_str_token = re.compile( - rb"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))" - ) - escaped_chars = { - b"n": b"\n", - b"r": b"\r", - b"t": b"\t", - b"b": b"\b", - b"f": b"\f", - b"(": b"(", - b")": b")", - b"\\": b"\\", - ord(b"n"): b"\n", - ord(b"r"): b"\r", - ord(b"t"): b"\t", - ord(b"b"): b"\b", - ord(b"f"): b"\f", - ord(b"("): b"(", - ord(b")"): b")", - ord(b"\\"): b"\\", - } - - @classmethod - def get_literal_string(cls, data, offset): - nesting_depth = 0 - result = bytearray() - for m in cls.re_lit_str_token.finditer(data, offset): - result.extend(data[offset : m.start()]) - if m.group(1): - result.extend(cls.escaped_chars[m.group(1)[1]]) - elif m.group(2): - result.append(int(m.group(2)[1:], 8)) - elif m.group(3): - pass - elif m.group(5): - result.extend(b"\n") - elif m.group(6): - result.extend(b"(") - nesting_depth += 1 - elif m.group(7): - if nesting_depth == 0: - return bytes(result), m.end() - result.extend(b")") - nesting_depth -= 1 - offset = m.end() - msg = "unfinished literal string" - raise PdfFormatError(msg) - - re_xref_section_start = re.compile(whitespace_optional + rb"xref" + newline) - re_xref_subsection_start = re.compile( - whitespace_optional - + rb"([0-9]+)" - + whitespace_mandatory - + rb"([0-9]+)" - + whitespace_optional - + newline_only - ) - re_xref_entry = re.compile(rb"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)") - - def read_xref_table(self, xref_section_offset): - subsection_found = False - m = self.re_xref_section_start.match( - self.buf, xref_section_offset + self.start_offset - ) - check_format_condition(m, "xref section start not found") - offset = m.end() - while True: - m = self.re_xref_subsection_start.match(self.buf, offset) - if not m: - check_format_condition( - subsection_found, "xref subsection start not found" - ) - break - subsection_found = True - offset = m.end() - first_object = int(m.group(1)) - num_objects = int(m.group(2)) - for i in range(first_object, first_object + num_objects): - m = self.re_xref_entry.match(self.buf, offset) - check_format_condition(m, "xref entry not found") - offset = m.end() - is_free = m.group(3) == b"f" - if not is_free: - generation = int(m.group(2)) - new_entry = (int(m.group(1)), generation) - if i not in self.xref_table: - self.xref_table[i] = new_entry - return offset - - def read_indirect(self, ref, max_nesting=-1): - offset, generation = self.xref_table[ref[0]] - check_format_condition( - generation == ref[1], - f"expected to find generation {ref[1]} for object ID {ref[0]} in xref " - f"table, instead found generation {generation} at offset {offset}", - ) - value = self.get_value( - self.buf, - offset + self.start_offset, - expect_indirect=IndirectReference(*ref), - max_nesting=max_nesting, - )[0] - self.cached_objects[ref] = value - return value - - def linearize_page_tree(self, node=None): - if node is None: - node = self.page_tree_root - check_format_condition( - node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages" - ) - pages = [] - for kid in node[b"Kids"]: - kid_object = self.read_indirect(kid) - if kid_object[b"Type"] == b"Page": - pages.append(kid) - else: - pages.extend(self.linearize_page_tree(node=kid_object)) - return pages diff --git a/venv/Lib/site-packages/PIL/PixarImagePlugin.py b/venv/Lib/site-packages/PIL/PixarImagePlugin.py deleted file mode 100644 index 887b656..0000000 --- a/venv/Lib/site-packages/PIL/PixarImagePlugin.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# PIXAR raster support for PIL -# -# history: -# 97-01-29 fl Created -# -# notes: -# This is incomplete; it is based on a few samples created with -# Photoshop 2.5 and 3.0, and a summary description provided by -# Greg Coats . Hopefully, "L" and -# "RGBA" support will be added in future versions. -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1997. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image, ImageFile -from ._binary import i16le as i16 - -# -# helpers - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"\200\350\000\000" - - -## -# Image plugin for PIXAR raster images. - - -class PixarImageFile(ImageFile.ImageFile): - format = "PIXAR" - format_description = "PIXAR raster image" - - def _open(self) -> None: - # assuming a 4-byte magic label - assert self.fp is not None - - s = self.fp.read(4) - if not _accept(s): - msg = "not a PIXAR file" - raise SyntaxError(msg) - - # read rest of header - s = s + self.fp.read(508) - - self._size = i16(s, 418), i16(s, 416) - - # get channel/depth descriptions - mode = i16(s, 424), i16(s, 426) - - if mode == (14, 2): - self._mode = "RGB" - # FIXME: to be continued... - - # create tile descriptor (assuming "dumped") - self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))] - - -# -# -------------------------------------------------------------------- - -Image.register_open(PixarImageFile.format, PixarImageFile, _accept) - -Image.register_extension(PixarImageFile.format, ".pxr") diff --git a/venv/Lib/site-packages/PIL/PngImagePlugin.py b/venv/Lib/site-packages/PIL/PngImagePlugin.py deleted file mode 100644 index d283492..0000000 --- a/venv/Lib/site-packages/PIL/PngImagePlugin.py +++ /dev/null @@ -1,1489 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# PNG support code -# -# See "PNG (Portable Network Graphics) Specification, version 1.0; -# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). -# -# history: -# 1996-05-06 fl Created (couldn't resist it) -# 1996-12-14 fl Upgraded, added read and verify support (0.2) -# 1996-12-15 fl Separate PNG stream parser -# 1996-12-29 fl Added write support, added getchunks -# 1996-12-30 fl Eliminated circular references in decoder (0.3) -# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) -# 2001-02-08 fl Added transparency support (from Zircon) (0.5) -# 2001-04-16 fl Don't close data source in "open" method (0.6) -# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) -# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) -# 2004-09-20 fl Added PngInfo chunk container -# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) -# 2008-08-13 fl Added tRNS support for RGB images -# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) -# 2009-03-08 fl Added zTXT support (from Lowell Alleman) -# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) -# -# Copyright (c) 1997-2009 by Secret Labs AB -# Copyright (c) 1996 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import itertools -import logging -import re -import struct -import warnings -import zlib -from enum import IntEnum -from typing import IO, TYPE_CHECKING, Any, NoReturn - -from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence -from ._binary import i16be as i16 -from ._binary import i32be as i32 -from ._binary import o8 -from ._binary import o16be as o16 -from ._binary import o32be as o32 - -if TYPE_CHECKING: - from . import _imaging - -logger = logging.getLogger(__name__) - -is_cid = re.compile(rb"\w\w\w\w").match - - -_MAGIC = b"\211PNG\r\n\032\n" - - -_MODES = { - # supported bits/color combinations, and corresponding modes/rawmodes - # Grayscale - (1, 0): ("1", "1"), - (2, 0): ("L", "L;2"), - (4, 0): ("L", "L;4"), - (8, 0): ("L", "L"), - (16, 0): ("I;16", "I;16B"), - # Truecolour - (8, 2): ("RGB", "RGB"), - (16, 2): ("RGB", "RGB;16B"), - # Indexed-colour - (1, 3): ("P", "P;1"), - (2, 3): ("P", "P;2"), - (4, 3): ("P", "P;4"), - (8, 3): ("P", "P"), - # Grayscale with alpha - (8, 4): ("LA", "LA"), - (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available - # Truecolour with alpha - (8, 6): ("RGBA", "RGBA"), - (16, 6): ("RGBA", "RGBA;16B"), -} - - -_simple_palette = re.compile(b"^\xff*\x00\xff*$") - -MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK -""" -Maximum decompressed size for a iTXt or zTXt chunk. -Eliminates decompression bombs where compressed chunks can expand 1000x. -See :ref:`Text in PNG File Format`. -""" -MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK -""" -Set the maximum total text chunk size. -See :ref:`Text in PNG File Format`. -""" - - -# APNG frame disposal modes -class Disposal(IntEnum): - OP_NONE = 0 - """ - No disposal is done on this frame before rendering the next frame. - See :ref:`Saving APNG sequences`. - """ - OP_BACKGROUND = 1 - """ - This frame’s modified region is cleared to fully transparent black before rendering - the next frame. - See :ref:`Saving APNG sequences`. - """ - OP_PREVIOUS = 2 - """ - This frame’s modified region is reverted to the previous frame’s contents before - rendering the next frame. - See :ref:`Saving APNG sequences`. - """ - - -# APNG frame blend modes -class Blend(IntEnum): - OP_SOURCE = 0 - """ - All color components of this frame, including alpha, overwrite the previous output - image contents. - See :ref:`Saving APNG sequences`. - """ - OP_OVER = 1 - """ - This frame should be alpha composited with the previous output image contents. - See :ref:`Saving APNG sequences`. - """ - - -def _safe_zlib_decompress(s): - dobj = zlib.decompressobj() - plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) - if dobj.unconsumed_tail: - msg = "Decompressed Data Too Large" - raise ValueError(msg) - return plaintext - - -def _crc32(data, seed=0): - return zlib.crc32(data, seed) & 0xFFFFFFFF - - -# -------------------------------------------------------------------- -# Support classes. Suitable for PNG and related formats like MNG etc. - - -class ChunkStream: - def __init__(self, fp: IO[bytes]) -> None: - self.fp: IO[bytes] | None = fp - self.queue: list[tuple[bytes, int, int]] | None = [] - - def read(self) -> tuple[bytes, int, int]: - """Fetch a new chunk. Returns header information.""" - cid = None - - assert self.fp is not None - if self.queue: - cid, pos, length = self.queue.pop() - self.fp.seek(pos) - else: - s = self.fp.read(8) - cid = s[4:] - pos = self.fp.tell() - length = i32(s) - - if not is_cid(cid): - if not ImageFile.LOAD_TRUNCATED_IMAGES: - msg = f"broken PNG file (chunk {repr(cid)})" - raise SyntaxError(msg) - - return cid, pos, length - - def __enter__(self) -> ChunkStream: - return self - - def __exit__(self, *args: object) -> None: - self.close() - - def close(self) -> None: - self.queue = self.fp = None - - def push(self, cid: bytes, pos: int, length: int) -> None: - assert self.queue is not None - self.queue.append((cid, pos, length)) - - def call(self, cid, pos, length): - """Call the appropriate chunk handler""" - - logger.debug("STREAM %r %s %s", cid, pos, length) - return getattr(self, f"chunk_{cid.decode('ascii')}")(pos, length) - - def crc(self, cid: bytes, data: bytes) -> None: - """Read and verify checksum""" - - # Skip CRC checks for ancillary chunks if allowed to load truncated - # images - # 5th byte of first char is 1 [specs, section 5.4] - if ImageFile.LOAD_TRUNCATED_IMAGES and (cid[0] >> 5 & 1): - self.crc_skip(cid, data) - return - - assert self.fp is not None - try: - crc1 = _crc32(data, _crc32(cid)) - crc2 = i32(self.fp.read(4)) - if crc1 != crc2: - msg = f"broken PNG file (bad header checksum in {repr(cid)})" - raise SyntaxError(msg) - except struct.error as e: - msg = f"broken PNG file (incomplete checksum in {repr(cid)})" - raise SyntaxError(msg) from e - - def crc_skip(self, cid: bytes, data: bytes) -> None: - """Read checksum""" - - assert self.fp is not None - self.fp.read(4) - - def verify(self, endchunk: bytes = b"IEND") -> list[bytes]: - # Simple approach; just calculate checksum for all remaining - # blocks. Must be called directly after open. - - cids = [] - - while True: - try: - cid, pos, length = self.read() - except struct.error as e: - msg = "truncated PNG file" - raise OSError(msg) from e - - if cid == endchunk: - break - self.crc(cid, ImageFile._safe_read(self.fp, length)) - cids.append(cid) - - return cids - - -class iTXt(str): - """ - Subclass of string to allow iTXt chunks to look like strings while - keeping their extra information - - """ - - lang: str | bytes | None - tkey: str | bytes | None - - @staticmethod - def __new__(cls, text, lang=None, tkey=None): - """ - :param cls: the class to use when creating the instance - :param text: value for this key - :param lang: language code - :param tkey: UTF-8 version of the key name - """ - - self = str.__new__(cls, text) - self.lang = lang - self.tkey = tkey - return self - - -class PngInfo: - """ - PNG chunk container (for use with save(pnginfo=)) - - """ - - def __init__(self) -> None: - self.chunks: list[tuple[bytes, bytes, bool]] = [] - - def add(self, cid: bytes, data: bytes, after_idat: bool = False) -> None: - """Appends an arbitrary chunk. Use with caution. - - :param cid: a byte string, 4 bytes long. - :param data: a byte string of the encoded data - :param after_idat: for use with private chunks. Whether the chunk - should be written after IDAT - - """ - - self.chunks.append((cid, data, after_idat)) - - def add_itxt( - self, - key: str | bytes, - value: str | bytes, - lang: str | bytes = "", - tkey: str | bytes = "", - zip: bool = False, - ) -> None: - """Appends an iTXt chunk. - - :param key: latin-1 encodable text key name - :param value: value for this key - :param lang: language code - :param tkey: UTF-8 version of the key name - :param zip: compression flag - - """ - - if not isinstance(key, bytes): - key = key.encode("latin-1", "strict") - if not isinstance(value, bytes): - value = value.encode("utf-8", "strict") - if not isinstance(lang, bytes): - lang = lang.encode("utf-8", "strict") - if not isinstance(tkey, bytes): - tkey = tkey.encode("utf-8", "strict") - - if zip: - self.add( - b"iTXt", - key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value), - ) - else: - self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value) - - def add_text( - self, key: str | bytes, value: str | bytes | iTXt, zip: bool = False - ) -> None: - """Appends a text chunk. - - :param key: latin-1 encodable text key name - :param value: value for this key, text or an - :py:class:`PIL.PngImagePlugin.iTXt` instance - :param zip: compression flag - - """ - if isinstance(value, iTXt): - return self.add_itxt( - key, - value, - value.lang if value.lang is not None else b"", - value.tkey if value.tkey is not None else b"", - zip=zip, - ) - - # The tEXt chunk stores latin-1 text - if not isinstance(value, bytes): - try: - value = value.encode("latin-1", "strict") - except UnicodeError: - return self.add_itxt(key, value, zip=zip) - - if not isinstance(key, bytes): - key = key.encode("latin-1", "strict") - - if zip: - self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) - else: - self.add(b"tEXt", key + b"\0" + value) - - -# -------------------------------------------------------------------- -# PNG image stream (IHDR/IEND) - - -class PngStream(ChunkStream): - def __init__(self, fp): - super().__init__(fp) - - # local copies of Image attributes - self.im_info = {} - self.im_text = {} - self.im_size = (0, 0) - self.im_mode = None - self.im_tile = None - self.im_palette = None - self.im_custom_mimetype = None - self.im_n_frames = None - self._seq_num = None - self.rewind_state = None - - self.text_memory = 0 - - def check_text_memory(self, chunklen: int) -> None: - self.text_memory += chunklen - if self.text_memory > MAX_TEXT_MEMORY: - msg = ( - "Too much memory used in text chunks: " - f"{self.text_memory}>MAX_TEXT_MEMORY" - ) - raise ValueError(msg) - - def save_rewind(self) -> None: - self.rewind_state = { - "info": self.im_info.copy(), - "tile": self.im_tile, - "seq_num": self._seq_num, - } - - def rewind(self) -> None: - self.im_info = self.rewind_state["info"].copy() - self.im_tile = self.rewind_state["tile"] - self._seq_num = self.rewind_state["seq_num"] - - def chunk_iCCP(self, pos: int, length: int) -> bytes: - # ICC profile - s = ImageFile._safe_read(self.fp, length) - # according to PNG spec, the iCCP chunk contains: - # Profile name 1-79 bytes (character string) - # Null separator 1 byte (null character) - # Compression method 1 byte (0) - # Compressed profile n bytes (zlib with deflate compression) - i = s.find(b"\0") - logger.debug("iCCP profile name %r", s[:i]) - comp_method = s[i + 1] - logger.debug("Compression method %s", comp_method) - if comp_method != 0: - msg = f"Unknown compression method {comp_method} in iCCP chunk" - raise SyntaxError(msg) - try: - icc_profile = _safe_zlib_decompress(s[i + 2 :]) - except ValueError: - if ImageFile.LOAD_TRUNCATED_IMAGES: - icc_profile = None - else: - raise - except zlib.error: - icc_profile = None # FIXME - self.im_info["icc_profile"] = icc_profile - return s - - def chunk_IHDR(self, pos: int, length: int) -> bytes: - # image header - s = ImageFile._safe_read(self.fp, length) - if length < 13: - if ImageFile.LOAD_TRUNCATED_IMAGES: - return s - msg = "Truncated IHDR chunk" - raise ValueError(msg) - self.im_size = i32(s, 0), i32(s, 4) - try: - self.im_mode, self.im_rawmode = _MODES[(s[8], s[9])] - except Exception: - pass - if s[12]: - self.im_info["interlace"] = 1 - if s[11]: - msg = "unknown filter category" - raise SyntaxError(msg) - return s - - def chunk_IDAT(self, pos: int, length: int) -> NoReturn: - # image data - if "bbox" in self.im_info: - tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)] - else: - if self.im_n_frames is not None: - self.im_info["default_image"] = True - tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)] - self.im_tile = tile - self.im_idat = length - msg = "image data found" - raise EOFError(msg) - - def chunk_IEND(self, pos: int, length: int) -> NoReturn: - msg = "end of PNG image" - raise EOFError(msg) - - def chunk_PLTE(self, pos: int, length: int) -> bytes: - # palette - s = ImageFile._safe_read(self.fp, length) - if self.im_mode == "P": - self.im_palette = "RGB", s - return s - - def chunk_tRNS(self, pos: int, length: int) -> bytes: - # transparency - s = ImageFile._safe_read(self.fp, length) - if self.im_mode == "P": - if _simple_palette.match(s): - # tRNS contains only one full-transparent entry, - # other entries are full opaque - i = s.find(b"\0") - if i >= 0: - self.im_info["transparency"] = i - else: - # otherwise, we have a byte string with one alpha value - # for each palette entry - self.im_info["transparency"] = s - elif self.im_mode in ("1", "L", "I;16"): - self.im_info["transparency"] = i16(s) - elif self.im_mode == "RGB": - self.im_info["transparency"] = i16(s), i16(s, 2), i16(s, 4) - return s - - def chunk_gAMA(self, pos: int, length: int) -> bytes: - # gamma setting - s = ImageFile._safe_read(self.fp, length) - self.im_info["gamma"] = i32(s) / 100000.0 - return s - - def chunk_cHRM(self, pos: int, length: int) -> bytes: - # chromaticity, 8 unsigned ints, actual value is scaled by 100,000 - # WP x,y, Red x,y, Green x,y Blue x,y - - s = ImageFile._safe_read(self.fp, length) - raw_vals = struct.unpack(">%dI" % (len(s) // 4), s) - self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals) - return s - - def chunk_sRGB(self, pos: int, length: int) -> bytes: - # srgb rendering intent, 1 byte - # 0 perceptual - # 1 relative colorimetric - # 2 saturation - # 3 absolute colorimetric - - s = ImageFile._safe_read(self.fp, length) - if length < 1: - if ImageFile.LOAD_TRUNCATED_IMAGES: - return s - msg = "Truncated sRGB chunk" - raise ValueError(msg) - self.im_info["srgb"] = s[0] - return s - - def chunk_pHYs(self, pos: int, length: int) -> bytes: - # pixels per unit - s = ImageFile._safe_read(self.fp, length) - if length < 9: - if ImageFile.LOAD_TRUNCATED_IMAGES: - return s - msg = "Truncated pHYs chunk" - raise ValueError(msg) - px, py = i32(s, 0), i32(s, 4) - unit = s[8] - if unit == 1: # meter - dpi = px * 0.0254, py * 0.0254 - self.im_info["dpi"] = dpi - elif unit == 0: - self.im_info["aspect"] = px, py - return s - - def chunk_tEXt(self, pos: int, length: int) -> bytes: - # text - s = ImageFile._safe_read(self.fp, length) - try: - k, v = s.split(b"\0", 1) - except ValueError: - # fallback for broken tEXt tags - k = s - v = b"" - if k: - k = k.decode("latin-1", "strict") - v_str = v.decode("latin-1", "replace") - - self.im_info[k] = v if k == "exif" else v_str - self.im_text[k] = v_str - self.check_text_memory(len(v_str)) - - return s - - def chunk_zTXt(self, pos: int, length: int) -> bytes: - # compressed text - s = ImageFile._safe_read(self.fp, length) - try: - k, v = s.split(b"\0", 1) - except ValueError: - k = s - v = b"" - if v: - comp_method = v[0] - else: - comp_method = 0 - if comp_method != 0: - msg = f"Unknown compression method {comp_method} in zTXt chunk" - raise SyntaxError(msg) - try: - v = _safe_zlib_decompress(v[1:]) - except ValueError: - if ImageFile.LOAD_TRUNCATED_IMAGES: - v = b"" - else: - raise - except zlib.error: - v = b"" - - if k: - k = k.decode("latin-1", "strict") - v = v.decode("latin-1", "replace") - - self.im_info[k] = self.im_text[k] = v - self.check_text_memory(len(v)) - - return s - - def chunk_iTXt(self, pos: int, length: int) -> bytes: - # international text - r = s = ImageFile._safe_read(self.fp, length) - try: - k, r = r.split(b"\0", 1) - except ValueError: - return s - if len(r) < 2: - return s - cf, cm, r = r[0], r[1], r[2:] - try: - lang, tk, v = r.split(b"\0", 2) - except ValueError: - return s - if cf != 0: - if cm == 0: - try: - v = _safe_zlib_decompress(v) - except ValueError: - if ImageFile.LOAD_TRUNCATED_IMAGES: - return s - else: - raise - except zlib.error: - return s - else: - return s - if k == b"XML:com.adobe.xmp": - self.im_info["xmp"] = v - try: - k = k.decode("latin-1", "strict") - lang = lang.decode("utf-8", "strict") - tk = tk.decode("utf-8", "strict") - v = v.decode("utf-8", "strict") - except UnicodeError: - return s - - self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) - self.check_text_memory(len(v)) - - return s - - def chunk_eXIf(self, pos: int, length: int) -> bytes: - s = ImageFile._safe_read(self.fp, length) - self.im_info["exif"] = b"Exif\x00\x00" + s - return s - - # APNG chunks - def chunk_acTL(self, pos: int, length: int) -> bytes: - s = ImageFile._safe_read(self.fp, length) - if length < 8: - if ImageFile.LOAD_TRUNCATED_IMAGES: - return s - msg = "APNG contains truncated acTL chunk" - raise ValueError(msg) - if self.im_n_frames is not None: - self.im_n_frames = None - warnings.warn("Invalid APNG, will use default PNG image if possible") - return s - n_frames = i32(s) - if n_frames == 0 or n_frames > 0x80000000: - warnings.warn("Invalid APNG, will use default PNG image if possible") - return s - self.im_n_frames = n_frames - self.im_info["loop"] = i32(s, 4) - self.im_custom_mimetype = "image/apng" - return s - - def chunk_fcTL(self, pos: int, length: int) -> bytes: - s = ImageFile._safe_read(self.fp, length) - if length < 26: - if ImageFile.LOAD_TRUNCATED_IMAGES: - return s - msg = "APNG contains truncated fcTL chunk" - raise ValueError(msg) - seq = i32(s) - if (self._seq_num is None and seq != 0) or ( - self._seq_num is not None and self._seq_num != seq - 1 - ): - msg = "APNG contains frame sequence errors" - raise SyntaxError(msg) - self._seq_num = seq - width, height = i32(s, 4), i32(s, 8) - px, py = i32(s, 12), i32(s, 16) - im_w, im_h = self.im_size - if px + width > im_w or py + height > im_h: - msg = "APNG contains invalid frames" - raise SyntaxError(msg) - self.im_info["bbox"] = (px, py, px + width, py + height) - delay_num, delay_den = i16(s, 20), i16(s, 22) - if delay_den == 0: - delay_den = 100 - self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000 - self.im_info["disposal"] = s[24] - self.im_info["blend"] = s[25] - return s - - def chunk_fdAT(self, pos: int, length: int) -> bytes: - if length < 4: - if ImageFile.LOAD_TRUNCATED_IMAGES: - s = ImageFile._safe_read(self.fp, length) - return s - msg = "APNG contains truncated fDAT chunk" - raise ValueError(msg) - s = ImageFile._safe_read(self.fp, 4) - seq = i32(s) - if self._seq_num != seq - 1: - msg = "APNG contains frame sequence errors" - raise SyntaxError(msg) - self._seq_num = seq - return self.chunk_IDAT(pos + 4, length - 4) - - -# -------------------------------------------------------------------- -# PNG reader - - -def _accept(prefix: bytes) -> bool: - return prefix[:8] == _MAGIC - - -## -# Image plugin for PNG images. - - -class PngImageFile(ImageFile.ImageFile): - format = "PNG" - format_description = "Portable network graphics" - - def _open(self) -> None: - if not _accept(self.fp.read(8)): - msg = "not a PNG file" - raise SyntaxError(msg) - self._fp = self.fp - self.__frame = 0 - - # - # Parse headers up to the first IDAT or fDAT chunk - - self.private_chunks: list[tuple[bytes, bytes] | tuple[bytes, bytes, bool]] = [] - self.png: PngStream | None = PngStream(self.fp) - - while True: - # - # get next chunk - - cid, pos, length = self.png.read() - - try: - s = self.png.call(cid, pos, length) - except EOFError: - break - except AttributeError: - logger.debug("%r %s %s (unknown)", cid, pos, length) - s = ImageFile._safe_read(self.fp, length) - if cid[1:2].islower(): - self.private_chunks.append((cid, s)) - - self.png.crc(cid, s) - - # - # Copy relevant attributes from the PngStream. An alternative - # would be to let the PngStream class modify these attributes - # directly, but that introduces circular references which are - # difficult to break if things go wrong in the decoder... - # (believe me, I've tried ;-) - - self._mode = self.png.im_mode - self._size = self.png.im_size - self.info = self.png.im_info - self._text = None - self.tile = self.png.im_tile - self.custom_mimetype = self.png.im_custom_mimetype - self.n_frames = self.png.im_n_frames or 1 - self.default_image = self.info.get("default_image", False) - - if self.png.im_palette: - rawmode, data = self.png.im_palette - self.palette = ImagePalette.raw(rawmode, data) - - if cid == b"fdAT": - self.__prepare_idat = length - 4 - else: - self.__prepare_idat = length # used by load_prepare() - - if self.png.im_n_frames is not None: - self._close_exclusive_fp_after_loading = False - self.png.save_rewind() - self.__rewind_idat = self.__prepare_idat - self.__rewind = self._fp.tell() - if self.default_image: - # IDAT chunk contains default image and not first animation frame - self.n_frames += 1 - self._seek(0) - self.is_animated = self.n_frames > 1 - - @property - def text(self): - # experimental - if self._text is None: - # iTxt, tEXt and zTXt chunks may appear at the end of the file - # So load the file to ensure that they are read - if self.is_animated: - frame = self.__frame - # for APNG, seek to the final frame before loading - self.seek(self.n_frames - 1) - self.load() - if self.is_animated: - self.seek(frame) - return self._text - - def verify(self) -> None: - """Verify PNG file""" - - if self.fp is None: - msg = "verify must be called directly after open" - raise RuntimeError(msg) - - # back up to beginning of IDAT block - self.fp.seek(self.tile[0][2] - 8) - - assert self.png is not None - self.png.verify() - self.png.close() - - if self._exclusive_fp: - self.fp.close() - self.fp = None - - def seek(self, frame: int) -> None: - if not self._seek_check(frame): - return - if frame < self.__frame: - self._seek(0, True) - - last_frame = self.__frame - for f in range(self.__frame + 1, frame + 1): - try: - self._seek(f) - except EOFError as e: - self.seek(last_frame) - msg = "no more images in APNG file" - raise EOFError(msg) from e - - def _seek(self, frame: int, rewind: bool = False) -> None: - assert self.png is not None - - self.dispose: _imaging.ImagingCore | None - if frame == 0: - if rewind: - self._fp.seek(self.__rewind) - self.png.rewind() - self.__prepare_idat = self.__rewind_idat - self.im = None - if self.pyaccess: - self.pyaccess = None - self.info = self.png.im_info - self.tile = self.png.im_tile - self.fp = self._fp - self._prev_im = None - self.dispose = None - self.default_image = self.info.get("default_image", False) - self.dispose_op = self.info.get("disposal") - self.blend_op = self.info.get("blend") - self.dispose_extent = self.info.get("bbox") - self.__frame = 0 - else: - if frame != self.__frame + 1: - msg = f"cannot seek to frame {frame}" - raise ValueError(msg) - - # ensure previous frame was loaded - self.load() - - if self.dispose: - self.im.paste(self.dispose, self.dispose_extent) - self._prev_im = self.im.copy() - - self.fp = self._fp - - # advance to the next frame - if self.__prepare_idat: - ImageFile._safe_read(self.fp, self.__prepare_idat) - self.__prepare_idat = 0 - frame_start = False - while True: - self.fp.read(4) # CRC - - try: - cid, pos, length = self.png.read() - except (struct.error, SyntaxError): - break - - if cid == b"IEND": - msg = "No more images in APNG file" - raise EOFError(msg) - if cid == b"fcTL": - if frame_start: - # there must be at least one fdAT chunk between fcTL chunks - msg = "APNG missing frame data" - raise SyntaxError(msg) - frame_start = True - - try: - self.png.call(cid, pos, length) - except UnicodeDecodeError: - break - except EOFError: - if cid == b"fdAT": - length -= 4 - if frame_start: - self.__prepare_idat = length - break - ImageFile._safe_read(self.fp, length) - except AttributeError: - logger.debug("%r %s %s (unknown)", cid, pos, length) - ImageFile._safe_read(self.fp, length) - - self.__frame = frame - self.tile = self.png.im_tile - self.dispose_op = self.info.get("disposal") - self.blend_op = self.info.get("blend") - self.dispose_extent = self.info.get("bbox") - - if not self.tile: - msg = "image not found in APNG frame" - raise EOFError(msg) - - # setup frame disposal (actual disposal done when needed in the next _seek()) - if self._prev_im is None and self.dispose_op == Disposal.OP_PREVIOUS: - self.dispose_op = Disposal.OP_BACKGROUND - - self.dispose = None - if self.dispose_op == Disposal.OP_PREVIOUS: - if self._prev_im: - self.dispose = self._prev_im.copy() - self.dispose = self._crop(self.dispose, self.dispose_extent) - elif self.dispose_op == Disposal.OP_BACKGROUND: - self.dispose = Image.core.fill(self.mode, self.size) - self.dispose = self._crop(self.dispose, self.dispose_extent) - - def tell(self) -> int: - return self.__frame - - def load_prepare(self) -> None: - """internal: prepare to read PNG file""" - - if self.info.get("interlace"): - self.decoderconfig = self.decoderconfig + (1,) - - self.__idat = self.__prepare_idat # used by load_read() - ImageFile.ImageFile.load_prepare(self) - - def load_read(self, read_bytes: int) -> bytes: - """internal: read more image data""" - - assert self.png is not None - while self.__idat == 0: - # end of chunk, skip forward to next one - - self.fp.read(4) # CRC - - cid, pos, length = self.png.read() - - if cid not in [b"IDAT", b"DDAT", b"fdAT"]: - self.png.push(cid, pos, length) - return b"" - - if cid == b"fdAT": - try: - self.png.call(cid, pos, length) - except EOFError: - pass - self.__idat = length - 4 # sequence_num has already been read - else: - self.__idat = length # empty chunks are allowed - - # read more data from this chunk - if read_bytes <= 0: - read_bytes = self.__idat - else: - read_bytes = min(read_bytes, self.__idat) - - self.__idat = self.__idat - read_bytes - - return self.fp.read(read_bytes) - - def load_end(self) -> None: - """internal: finished reading image data""" - assert self.png is not None - if self.__idat != 0: - self.fp.read(self.__idat) - while True: - self.fp.read(4) # CRC - - try: - cid, pos, length = self.png.read() - except (struct.error, SyntaxError): - break - - if cid == b"IEND": - break - elif cid == b"fcTL" and self.is_animated: - # start of the next frame, stop reading - self.__prepare_idat = 0 - self.png.push(cid, pos, length) - break - - try: - self.png.call(cid, pos, length) - except UnicodeDecodeError: - break - except EOFError: - if cid == b"fdAT": - length -= 4 - try: - ImageFile._safe_read(self.fp, length) - except OSError as e: - if ImageFile.LOAD_TRUNCATED_IMAGES: - break - else: - raise e - except AttributeError: - logger.debug("%r %s %s (unknown)", cid, pos, length) - s = ImageFile._safe_read(self.fp, length) - if cid[1:2].islower(): - self.private_chunks.append((cid, s, True)) - self._text = self.png.im_text - if not self.is_animated: - self.png.close() - self.png = None - else: - if self._prev_im and self.blend_op == Blend.OP_OVER: - updated = self._crop(self.im, self.dispose_extent) - if self.im.mode == "RGB" and "transparency" in self.info: - mask = updated.convert_transparent( - "RGBA", self.info["transparency"] - ) - else: - mask = updated.convert("RGBA") - self._prev_im.paste(updated, self.dispose_extent, mask) - self.im = self._prev_im - if self.pyaccess: - self.pyaccess = None - - def _getexif(self) -> dict[str, Any] | None: - if "exif" not in self.info: - self.load() - if "exif" not in self.info and "Raw profile type exif" not in self.info: - return None - return self.getexif()._get_merged_dict() - - def getexif(self) -> Image.Exif: - if "exif" not in self.info: - self.load() - - return super().getexif() - - -# -------------------------------------------------------------------- -# PNG writer - -_OUTMODES = { - # supported PIL modes, and corresponding rawmode, bit depth and color type - "1": ("1", b"\x01", b"\x00"), - "L;1": ("L;1", b"\x01", b"\x00"), - "L;2": ("L;2", b"\x02", b"\x00"), - "L;4": ("L;4", b"\x04", b"\x00"), - "L": ("L", b"\x08", b"\x00"), - "LA": ("LA", b"\x08", b"\x04"), - "I": ("I;16B", b"\x10", b"\x00"), - "I;16": ("I;16B", b"\x10", b"\x00"), - "I;16B": ("I;16B", b"\x10", b"\x00"), - "P;1": ("P;1", b"\x01", b"\x03"), - "P;2": ("P;2", b"\x02", b"\x03"), - "P;4": ("P;4", b"\x04", b"\x03"), - "P": ("P", b"\x08", b"\x03"), - "RGB": ("RGB", b"\x08", b"\x02"), - "RGBA": ("RGBA", b"\x08", b"\x06"), -} - - -def putchunk(fp, cid, *data): - """Write a PNG chunk (including CRC field)""" - - data = b"".join(data) - - fp.write(o32(len(data)) + cid) - fp.write(data) - crc = _crc32(data, _crc32(cid)) - fp.write(o32(crc)) - - -class _idat: - # wrap output from the encoder in IDAT chunks - - def __init__(self, fp, chunk): - self.fp = fp - self.chunk = chunk - - def write(self, data: bytes) -> None: - self.chunk(self.fp, b"IDAT", data) - - -class _fdat: - # wrap encoder output in fdAT chunks - - def __init__(self, fp, chunk, seq_num): - self.fp = fp - self.chunk = chunk - self.seq_num = seq_num - - def write(self, data: bytes) -> None: - self.chunk(self.fp, b"fdAT", o32(self.seq_num), data) - self.seq_num += 1 - - -def _write_multiple_frames(im, fp, chunk, mode, rawmode, default_image, append_images): - duration = im.encoderinfo.get("duration") - loop = im.encoderinfo.get("loop", im.info.get("loop", 0)) - disposal = im.encoderinfo.get("disposal", im.info.get("disposal", Disposal.OP_NONE)) - blend = im.encoderinfo.get("blend", im.info.get("blend", Blend.OP_SOURCE)) - - if default_image: - chain = itertools.chain(append_images) - else: - chain = itertools.chain([im], append_images) - - im_frames = [] - frame_count = 0 - for im_seq in chain: - for im_frame in ImageSequence.Iterator(im_seq): - if im_frame.mode == mode: - im_frame = im_frame.copy() - else: - im_frame = im_frame.convert(mode) - encoderinfo = im.encoderinfo.copy() - if isinstance(duration, (list, tuple)): - encoderinfo["duration"] = duration[frame_count] - elif duration is None and "duration" in im_frame.info: - encoderinfo["duration"] = im_frame.info["duration"] - if isinstance(disposal, (list, tuple)): - encoderinfo["disposal"] = disposal[frame_count] - if isinstance(blend, (list, tuple)): - encoderinfo["blend"] = blend[frame_count] - frame_count += 1 - - if im_frames: - previous = im_frames[-1] - prev_disposal = previous["encoderinfo"].get("disposal") - prev_blend = previous["encoderinfo"].get("blend") - if prev_disposal == Disposal.OP_PREVIOUS and len(im_frames) < 2: - prev_disposal = Disposal.OP_BACKGROUND - - if prev_disposal == Disposal.OP_BACKGROUND: - base_im = previous["im"].copy() - dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0)) - bbox = previous["bbox"] - if bbox: - dispose = dispose.crop(bbox) - else: - bbox = (0, 0) + im.size - base_im.paste(dispose, bbox) - elif prev_disposal == Disposal.OP_PREVIOUS: - base_im = im_frames[-2]["im"] - else: - base_im = previous["im"] - delta = ImageChops.subtract_modulo( - im_frame.convert("RGBA"), base_im.convert("RGBA") - ) - bbox = delta.getbbox(alpha_only=False) - if ( - not bbox - and prev_disposal == encoderinfo.get("disposal") - and prev_blend == encoderinfo.get("blend") - and "duration" in encoderinfo - ): - previous["encoderinfo"]["duration"] += encoderinfo["duration"] - continue - else: - bbox = None - im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) - - if len(im_frames) == 1 and not default_image: - return im_frames[0]["im"] - - # animation control - chunk( - fp, - b"acTL", - o32(len(im_frames)), # 0: num_frames - o32(loop), # 4: num_plays - ) - - # default image IDAT (if it exists) - if default_image: - if im.mode != mode: - im = im.convert(mode) - ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) - - seq_num = 0 - for frame, frame_data in enumerate(im_frames): - im_frame = frame_data["im"] - if not frame_data["bbox"]: - bbox = (0, 0) + im_frame.size - else: - bbox = frame_data["bbox"] - im_frame = im_frame.crop(bbox) - size = im_frame.size - encoderinfo = frame_data["encoderinfo"] - frame_duration = int(round(encoderinfo.get("duration", 0))) - frame_disposal = encoderinfo.get("disposal", disposal) - frame_blend = encoderinfo.get("blend", blend) - # frame control - chunk( - fp, - b"fcTL", - o32(seq_num), # sequence_number - o32(size[0]), # width - o32(size[1]), # height - o32(bbox[0]), # x_offset - o32(bbox[1]), # y_offset - o16(frame_duration), # delay_numerator - o16(1000), # delay_denominator - o8(frame_disposal), # dispose_op - o8(frame_blend), # blend_op - ) - seq_num += 1 - # frame data - if frame == 0 and not default_image: - # first frame must be in IDAT chunks for backwards compatibility - ImageFile._save( - im_frame, - _idat(fp, chunk), - [("zip", (0, 0) + im_frame.size, 0, rawmode)], - ) - else: - fdat_chunks = _fdat(fp, chunk, seq_num) - ImageFile._save( - im_frame, - fdat_chunks, - [("zip", (0, 0) + im_frame.size, 0, rawmode)], - ) - seq_num = fdat_chunks.seq_num - - -def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - _save(im, fp, filename, save_all=True) - - -def _save(im, fp, filename, chunk=putchunk, save_all=False): - # save an image to disk (called by the save method) - - if save_all: - default_image = im.encoderinfo.get( - "default_image", im.info.get("default_image") - ) - modes = set() - sizes = set() - append_images = im.encoderinfo.get("append_images", []) - for im_seq in itertools.chain([im], append_images): - for im_frame in ImageSequence.Iterator(im_seq): - modes.add(im_frame.mode) - sizes.add(im_frame.size) - for mode in ("RGBA", "RGB", "P"): - if mode in modes: - break - else: - mode = modes.pop() - size = tuple(max(frame_size[i] for frame_size in sizes) for i in range(2)) - else: - size = im.size - mode = im.mode - - outmode = mode - if mode == "P": - # - # attempt to minimize storage requirements for palette images - if "bits" in im.encoderinfo: - # number of bits specified by user - colors = min(1 << im.encoderinfo["bits"], 256) - else: - # check palette contents - if im.palette: - colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 1) - else: - colors = 256 - - if colors <= 16: - if colors <= 2: - bits = 1 - elif colors <= 4: - bits = 2 - else: - bits = 4 - outmode += f";{bits}" - - # encoder options - im.encoderconfig = ( - im.encoderinfo.get("optimize", False), - im.encoderinfo.get("compress_level", -1), - im.encoderinfo.get("compress_type", -1), - im.encoderinfo.get("dictionary", b""), - ) - - # get the corresponding PNG mode - try: - rawmode, bit_depth, color_type = _OUTMODES[outmode] - except KeyError as e: - msg = f"cannot write mode {mode} as PNG" - raise OSError(msg) from e - - # - # write minimal PNG file - - fp.write(_MAGIC) - - chunk( - fp, - b"IHDR", - o32(size[0]), # 0: size - o32(size[1]), - bit_depth, - color_type, - b"\0", # 10: compression - b"\0", # 11: filter category - b"\0", # 12: interlace flag - ) - - chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"] - - icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile")) - if icc: - # ICC profile - # according to PNG spec, the iCCP chunk contains: - # Profile name 1-79 bytes (character string) - # Null separator 1 byte (null character) - # Compression method 1 byte (0) - # Compressed profile n bytes (zlib with deflate compression) - name = b"ICC Profile" - data = name + b"\0\0" + zlib.compress(icc) - chunk(fp, b"iCCP", data) - - # You must either have sRGB or iCCP. - # Disallow sRGB chunks when an iCCP-chunk has been emitted. - chunks.remove(b"sRGB") - - info = im.encoderinfo.get("pnginfo") - if info: - chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"] - for info_chunk in info.chunks: - cid, data = info_chunk[:2] - if cid in chunks: - chunks.remove(cid) - chunk(fp, cid, data) - elif cid in chunks_multiple_allowed: - chunk(fp, cid, data) - elif cid[1:2].islower(): - # Private chunk - after_idat = len(info_chunk) == 3 and info_chunk[2] - if not after_idat: - chunk(fp, cid, data) - - if im.mode == "P": - palette_byte_number = colors * 3 - palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] - while len(palette_bytes) < palette_byte_number: - palette_bytes += b"\0" - chunk(fp, b"PLTE", palette_bytes) - - transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None)) - - if transparency or transparency == 0: - if im.mode == "P": - # limit to actual palette size - alpha_bytes = colors - if isinstance(transparency, bytes): - chunk(fp, b"tRNS", transparency[:alpha_bytes]) - else: - transparency = max(0, min(255, transparency)) - alpha = b"\xFF" * transparency + b"\0" - chunk(fp, b"tRNS", alpha[:alpha_bytes]) - elif im.mode in ("1", "L", "I", "I;16"): - transparency = max(0, min(65535, transparency)) - chunk(fp, b"tRNS", o16(transparency)) - elif im.mode == "RGB": - red, green, blue = transparency - chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) - else: - if "transparency" in im.encoderinfo: - # don't bother with transparency if it's an RGBA - # and it's in the info dict. It's probably just stale. - msg = "cannot use transparency for this mode" - raise OSError(msg) - else: - if im.mode == "P" and im.im.getpalettemode() == "RGBA": - alpha = im.im.getpalette("RGBA", "A") - alpha_bytes = colors - chunk(fp, b"tRNS", alpha[:alpha_bytes]) - - dpi = im.encoderinfo.get("dpi") - if dpi: - chunk( - fp, - b"pHYs", - o32(int(dpi[0] / 0.0254 + 0.5)), - o32(int(dpi[1] / 0.0254 + 0.5)), - b"\x01", - ) - - if info: - chunks = [b"bKGD", b"hIST"] - for info_chunk in info.chunks: - cid, data = info_chunk[:2] - if cid in chunks: - chunks.remove(cid) - chunk(fp, cid, data) - - exif = im.encoderinfo.get("exif") - if exif: - if isinstance(exif, Image.Exif): - exif = exif.tobytes(8) - if exif.startswith(b"Exif\x00\x00"): - exif = exif[6:] - chunk(fp, b"eXIf", exif) - - if save_all: - im = _write_multiple_frames( - im, fp, chunk, mode, rawmode, default_image, append_images - ) - if im: - ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) - - if info: - for info_chunk in info.chunks: - cid, data = info_chunk[:2] - if cid[1:2].islower(): - # Private chunk - after_idat = len(info_chunk) == 3 and info_chunk[2] - if after_idat: - chunk(fp, cid, data) - - chunk(fp, b"IEND", b"") - - if hasattr(fp, "flush"): - fp.flush() - - -# -------------------------------------------------------------------- -# PNG chunk converter - - -def getchunks(im, **params): - """Return a list of PNG chunks representing this image.""" - - class collector: - data = [] - - def write(self, data: bytes) -> None: - pass - - def append(self, chunk: bytes) -> None: - self.data.append(chunk) - - def append(fp, cid, *data): - data = b"".join(data) - crc = o32(_crc32(data, _crc32(cid))) - fp.append((cid, data, crc)) - - fp = collector() - - try: - im.encoderinfo = params - _save(im, fp, None, append) - finally: - del im.encoderinfo - - return fp.data - - -# -------------------------------------------------------------------- -# Registry - -Image.register_open(PngImageFile.format, PngImageFile, _accept) -Image.register_save(PngImageFile.format, _save) -Image.register_save_all(PngImageFile.format, _save_all) - -Image.register_extensions(PngImageFile.format, [".png", ".apng"]) - -Image.register_mime(PngImageFile.format, "image/png") diff --git a/venv/Lib/site-packages/PIL/PpmImagePlugin.py b/venv/Lib/site-packages/PIL/PpmImagePlugin.py deleted file mode 100644 index 16c9ccb..0000000 --- a/venv/Lib/site-packages/PIL/PpmImagePlugin.py +++ /dev/null @@ -1,371 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# PPM support for PIL -# -# History: -# 96-03-24 fl Created -# 98-03-06 fl Write RGBA images (as RGB, that is) -# -# Copyright (c) Secret Labs AB 1997-98. -# Copyright (c) Fredrik Lundh 1996. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import math -from typing import IO - -from . import Image, ImageFile -from ._binary import i16be as i16 -from ._binary import o8 -from ._binary import o32le as o32 - -# -# -------------------------------------------------------------------- - -b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d" - -MODES = { - # standard - b"P1": "1", - b"P2": "L", - b"P3": "RGB", - b"P4": "1", - b"P5": "L", - b"P6": "RGB", - # extensions - b"P0CMYK": "CMYK", - b"Pf": "F", - # PIL extensions (for test purposes only) - b"PyP": "P", - b"PyRGBA": "RGBA", - b"PyCMYK": "CMYK", -} - - -def _accept(prefix: bytes) -> bool: - return prefix[0:1] == b"P" and prefix[1] in b"0123456fy" - - -## -# Image plugin for PBM, PGM, and PPM images. - - -class PpmImageFile(ImageFile.ImageFile): - format = "PPM" - format_description = "Pbmplus image" - - def _read_magic(self) -> bytes: - assert self.fp is not None - - magic = b"" - # read until whitespace or longest available magic number - for _ in range(6): - c = self.fp.read(1) - if not c or c in b_whitespace: - break - magic += c - return magic - - def _read_token(self) -> bytes: - assert self.fp is not None - - token = b"" - while len(token) <= 10: # read until next whitespace or limit of 10 characters - c = self.fp.read(1) - if not c: - break - elif c in b_whitespace: # token ended - if not token: - # skip whitespace at start - continue - break - elif c == b"#": - # ignores rest of the line; stops at CR, LF or EOF - while self.fp.read(1) not in b"\r\n": - pass - continue - token += c - if not token: - # Token was not even 1 byte - msg = "Reached EOF while reading header" - raise ValueError(msg) - elif len(token) > 10: - msg = f"Token too long in file header: {token.decode()}" - raise ValueError(msg) - return token - - def _open(self) -> None: - assert self.fp is not None - - magic_number = self._read_magic() - try: - mode = MODES[magic_number] - except KeyError: - msg = "not a PPM file" - raise SyntaxError(msg) - self._mode = mode - - if magic_number in (b"P1", b"P4"): - self.custom_mimetype = "image/x-portable-bitmap" - elif magic_number in (b"P2", b"P5"): - self.custom_mimetype = "image/x-portable-graymap" - elif magic_number in (b"P3", b"P6"): - self.custom_mimetype = "image/x-portable-pixmap" - - self._size = int(self._read_token()), int(self._read_token()) - - decoder_name = "raw" - if magic_number in (b"P1", b"P2", b"P3"): - decoder_name = "ppm_plain" - - args: str | tuple[str | int, ...] - if mode == "1": - args = "1;I" - elif mode == "F": - scale = float(self._read_token()) - if scale == 0.0 or not math.isfinite(scale): - msg = "scale must be finite and non-zero" - raise ValueError(msg) - self.info["scale"] = abs(scale) - - rawmode = "F;32F" if scale < 0 else "F;32BF" - args = (rawmode, 0, -1) - else: - maxval = int(self._read_token()) - if not 0 < maxval < 65536: - msg = "maxval must be greater than 0 and less than 65536" - raise ValueError(msg) - if maxval > 255 and mode == "L": - self._mode = "I" - - rawmode = mode - if decoder_name != "ppm_plain": - # If maxval matches a bit depth, use the raw decoder directly - if maxval == 65535 and mode == "L": - rawmode = "I;16B" - elif maxval != 255: - decoder_name = "ppm" - - args = rawmode if decoder_name == "raw" else (rawmode, maxval) - self.tile = [(decoder_name, (0, 0) + self.size, self.fp.tell(), args)] - - -# -# -------------------------------------------------------------------- - - -class PpmPlainDecoder(ImageFile.PyDecoder): - _pulls_fd = True - _comment_spans: bool - - def _read_block(self) -> bytes: - assert self.fd is not None - - return self.fd.read(ImageFile.SAFEBLOCK) - - def _find_comment_end(self, block: bytes, start: int = 0) -> int: - a = block.find(b"\n", start) - b = block.find(b"\r", start) - return min(a, b) if a * b > 0 else max(a, b) # lowest nonnegative index (or -1) - - def _ignore_comments(self, block: bytes) -> bytes: - if self._comment_spans: - # Finish current comment - while block: - comment_end = self._find_comment_end(block) - if comment_end != -1: - # Comment ends in this block - # Delete tail of comment - block = block[comment_end + 1 :] - break - else: - # Comment spans whole block - # So read the next block, looking for the end - block = self._read_block() - - # Search for any further comments - self._comment_spans = False - while True: - comment_start = block.find(b"#") - if comment_start == -1: - # No comment found - break - comment_end = self._find_comment_end(block, comment_start) - if comment_end != -1: - # Comment ends in this block - # Delete comment - block = block[:comment_start] + block[comment_end + 1 :] - else: - # Comment continues to next block(s) - block = block[:comment_start] - self._comment_spans = True - break - return block - - def _decode_bitonal(self) -> bytearray: - """ - This is a separate method because in the plain PBM format, all data tokens are - exactly one byte, so the inter-token whitespace is optional. - """ - data = bytearray() - total_bytes = self.state.xsize * self.state.ysize - - while len(data) != total_bytes: - block = self._read_block() # read next block - if not block: - # eof - break - - block = self._ignore_comments(block) - - tokens = b"".join(block.split()) - for token in tokens: - if token not in (48, 49): - msg = b"Invalid token for this mode: %s" % bytes([token]) - raise ValueError(msg) - data = (data + tokens)[:total_bytes] - invert = bytes.maketrans(b"01", b"\xFF\x00") - return data.translate(invert) - - def _decode_blocks(self, maxval: int) -> bytearray: - data = bytearray() - max_len = 10 - out_byte_count = 4 if self.mode == "I" else 1 - out_max = 65535 if self.mode == "I" else 255 - bands = Image.getmodebands(self.mode) - total_bytes = self.state.xsize * self.state.ysize * bands * out_byte_count - - half_token = b"" - while len(data) != total_bytes: - block = self._read_block() # read next block - if not block: - if half_token: - block = bytearray(b" ") # flush half_token - else: - # eof - break - - block = self._ignore_comments(block) - - if half_token: - block = half_token + block # stitch half_token to new block - half_token = b"" - - tokens = block.split() - - if block and not block[-1:].isspace(): # block might split token - half_token = tokens.pop() # save half token for later - if len(half_token) > max_len: # prevent buildup of half_token - msg = ( - b"Token too long found in data: %s" % half_token[: max_len + 1] - ) - raise ValueError(msg) - - for token in tokens: - if len(token) > max_len: - msg = b"Token too long found in data: %s" % token[: max_len + 1] - raise ValueError(msg) - value = int(token) - if value < 0: - msg_str = f"Channel value is negative: {value}" - raise ValueError(msg_str) - if value > maxval: - msg_str = f"Channel value too large for this mode: {value}" - raise ValueError(msg_str) - value = round(value / maxval * out_max) - data += o32(value) if self.mode == "I" else o8(value) - if len(data) == total_bytes: # finished! - break - return data - - def decode(self, buffer: bytes) -> tuple[int, int]: - self._comment_spans = False - if self.mode == "1": - data = self._decode_bitonal() - rawmode = "1;8" - else: - maxval = self.args[-1] - data = self._decode_blocks(maxval) - rawmode = "I;32" if self.mode == "I" else self.mode - self.set_as_raw(bytes(data), rawmode) - return -1, 0 - - -class PpmDecoder(ImageFile.PyDecoder): - _pulls_fd = True - - def decode(self, buffer: bytes) -> tuple[int, int]: - assert self.fd is not None - - data = bytearray() - maxval = self.args[-1] - in_byte_count = 1 if maxval < 256 else 2 - out_byte_count = 4 if self.mode == "I" else 1 - out_max = 65535 if self.mode == "I" else 255 - bands = Image.getmodebands(self.mode) - dest_length = self.state.xsize * self.state.ysize * bands * out_byte_count - while len(data) < dest_length: - pixels = self.fd.read(in_byte_count * bands) - if len(pixels) < in_byte_count * bands: - # eof - break - for b in range(bands): - value = ( - pixels[b] if in_byte_count == 1 else i16(pixels, b * in_byte_count) - ) - value = min(out_max, round(value / maxval * out_max)) - data += o32(value) if self.mode == "I" else o8(value) - rawmode = "I;32" if self.mode == "I" else self.mode - self.set_as_raw(bytes(data), rawmode) - return -1, 0 - - -# -# -------------------------------------------------------------------- - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.mode == "1": - rawmode, head = "1;I", b"P4" - elif im.mode == "L": - rawmode, head = "L", b"P5" - elif im.mode == "I": - rawmode, head = "I;16B", b"P5" - elif im.mode in ("RGB", "RGBA"): - rawmode, head = "RGB", b"P6" - elif im.mode == "F": - rawmode, head = "F;32F", b"Pf" - else: - msg = f"cannot write mode {im.mode} as PPM" - raise OSError(msg) - fp.write(head + b"\n%d %d\n" % im.size) - if head == b"P6": - fp.write(b"255\n") - elif head == b"P5": - if rawmode == "L": - fp.write(b"255\n") - else: - fp.write(b"65535\n") - elif head == b"Pf": - fp.write(b"-1.0\n") - row_order = -1 if im.mode == "F" else 1 - ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, row_order))]) - - -# -# -------------------------------------------------------------------- - - -Image.register_open(PpmImageFile.format, PpmImageFile, _accept) -Image.register_save(PpmImageFile.format, _save) - -Image.register_decoder("ppm", PpmDecoder) -Image.register_decoder("ppm_plain", PpmPlainDecoder) - -Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm", ".pfm"]) - -Image.register_mime(PpmImageFile.format, "image/x-portable-anymap") diff --git a/venv/Lib/site-packages/PIL/PsdImagePlugin.py b/venv/Lib/site-packages/PIL/PsdImagePlugin.py deleted file mode 100644 index edf698b..0000000 --- a/venv/Lib/site-packages/PIL/PsdImagePlugin.py +++ /dev/null @@ -1,326 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# Adobe PSD 2.5/3.0 file handling -# -# History: -# 1995-09-01 fl Created -# 1997-01-03 fl Read most PSD images -# 1997-01-18 fl Fixed P and CMYK support -# 2001-10-21 fl Added seek/tell support (for layers) -# -# Copyright (c) 1997-2001 by Secret Labs AB. -# Copyright (c) 1995-2001 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -from functools import cached_property - -from . import Image, ImageFile, ImagePalette -from ._binary import i8 -from ._binary import i16be as i16 -from ._binary import i32be as i32 -from ._binary import si16be as si16 -from ._binary import si32be as si32 - -MODES = { - # (photoshop mode, bits) -> (pil mode, required channels) - (0, 1): ("1", 1), - (0, 8): ("L", 1), - (1, 8): ("L", 1), - (2, 8): ("P", 1), - (3, 8): ("RGB", 3), - (4, 8): ("CMYK", 4), - (7, 8): ("L", 1), # FIXME: multilayer - (8, 8): ("L", 1), # duotone - (9, 8): ("LAB", 3), -} - - -# --------------------------------------------------------------------. -# read PSD images - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"8BPS" - - -## -# Image plugin for Photoshop images. - - -class PsdImageFile(ImageFile.ImageFile): - format = "PSD" - format_description = "Adobe Photoshop" - _close_exclusive_fp_after_loading = False - - def _open(self) -> None: - read = self.fp.read - - # - # header - - s = read(26) - if not _accept(s) or i16(s, 4) != 1: - msg = "not a PSD file" - raise SyntaxError(msg) - - psd_bits = i16(s, 22) - psd_channels = i16(s, 12) - psd_mode = i16(s, 24) - - mode, channels = MODES[(psd_mode, psd_bits)] - - if channels > psd_channels: - msg = "not enough channels" - raise OSError(msg) - if mode == "RGB" and psd_channels == 4: - mode = "RGBA" - channels = 4 - - self._mode = mode - self._size = i32(s, 18), i32(s, 14) - - # - # color mode data - - size = i32(read(4)) - if size: - data = read(size) - if mode == "P" and size == 768: - self.palette = ImagePalette.raw("RGB;L", data) - - # - # image resources - - self.resources = [] - - size = i32(read(4)) - if size: - # load resources - end = self.fp.tell() + size - while self.fp.tell() < end: - read(4) # signature - id = i16(read(2)) - name = read(i8(read(1))) - if not (len(name) & 1): - read(1) # padding - data = read(i32(read(4))) - if len(data) & 1: - read(1) # padding - self.resources.append((id, name, data)) - if id == 1039: # ICC profile - self.info["icc_profile"] = data - - # - # layer and mask information - - self._layers_position = None - - size = i32(read(4)) - if size: - end = self.fp.tell() + size - size = i32(read(4)) - if size: - self._layers_position = self.fp.tell() - self._layers_size = size - self.fp.seek(end) - self._n_frames: int | None = None - - # - # image descriptor - - self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) - - # keep the file open - self._fp = self.fp - self.frame = 1 - self._min_frame = 1 - - @cached_property - def layers(self): - layers = [] - if self._layers_position is not None: - self._fp.seek(self._layers_position) - _layer_data = io.BytesIO(ImageFile._safe_read(self._fp, self._layers_size)) - layers = _layerinfo(_layer_data, self._layers_size) - self._n_frames = len(layers) - return layers - - @property - def n_frames(self) -> int: - if self._n_frames is None: - self._n_frames = len(self.layers) - return self._n_frames - - @property - def is_animated(self) -> bool: - return len(self.layers) > 1 - - def seek(self, layer: int) -> None: - if not self._seek_check(layer): - return - - # seek to given layer (1..max) - try: - _, mode, _, tile = self.layers[layer - 1] - self._mode = mode - self.tile = tile - self.frame = layer - self.fp = self._fp - except IndexError as e: - msg = "no such layer" - raise EOFError(msg) from e - - def tell(self) -> int: - # return layer number (0=image, 1..max=layers) - return self.frame - - -def _layerinfo(fp, ct_bytes): - # read layerinfo block - layers = [] - - def read(size): - return ImageFile._safe_read(fp, size) - - ct = si16(read(2)) - - # sanity check - if ct_bytes < (abs(ct) * 20): - msg = "Layer block too short for number of layers requested" - raise SyntaxError(msg) - - for _ in range(abs(ct)): - # bounding box - y0 = si32(read(4)) - x0 = si32(read(4)) - y1 = si32(read(4)) - x1 = si32(read(4)) - - # image info - mode = [] - ct_types = i16(read(2)) - if ct_types > 4: - fp.seek(ct_types * 6 + 12, io.SEEK_CUR) - size = i32(read(4)) - fp.seek(size, io.SEEK_CUR) - continue - - for _ in range(ct_types): - type = i16(read(2)) - - if type == 65535: - m = "A" - else: - m = "RGBA"[type] - - mode.append(m) - read(4) # size - - # figure out the image mode - mode.sort() - if mode == ["R"]: - mode = "L" - elif mode == ["B", "G", "R"]: - mode = "RGB" - elif mode == ["A", "B", "G", "R"]: - mode = "RGBA" - else: - mode = None # unknown - - # skip over blend flags and extra information - read(12) # filler - name = "" - size = i32(read(4)) # length of the extra data field - if size: - data_end = fp.tell() + size - - length = i32(read(4)) - if length: - fp.seek(length - 16, io.SEEK_CUR) - - length = i32(read(4)) - if length: - fp.seek(length, io.SEEK_CUR) - - length = i8(read(1)) - if length: - # Don't know the proper encoding, - # Latin-1 should be a good guess - name = read(length).decode("latin-1", "replace") - - fp.seek(data_end) - layers.append((name, mode, (x0, y0, x1, y1))) - - # get tiles - for i, (name, mode, bbox) in enumerate(layers): - tile = [] - for m in mode: - t = _maketile(fp, m, bbox, 1) - if t: - tile.extend(t) - layers[i] = name, mode, bbox, tile - - return layers - - -def _maketile(file, mode, bbox, channels): - tile = None - read = file.read - - compression = i16(read(2)) - - xsize = bbox[2] - bbox[0] - ysize = bbox[3] - bbox[1] - - offset = file.tell() - - if compression == 0: - # - # raw compression - tile = [] - for channel in range(channels): - layer = mode[channel] - if mode == "CMYK": - layer += ";I" - tile.append(("raw", bbox, offset, layer)) - offset = offset + xsize * ysize - - elif compression == 1: - # - # packbits compression - i = 0 - tile = [] - bytecount = read(channels * ysize * 2) - offset = file.tell() - for channel in range(channels): - layer = mode[channel] - if mode == "CMYK": - layer += ";I" - tile.append(("packbits", bbox, offset, layer)) - for y in range(ysize): - offset = offset + i16(bytecount, i) - i += 2 - - file.seek(offset) - - if offset & 1: - read(1) # padding - - return tile - - -# -------------------------------------------------------------------- -# registry - - -Image.register_open(PsdImageFile.format, PsdImageFile, _accept) - -Image.register_extension(PsdImageFile.format, ".psd") - -Image.register_mime(PsdImageFile.format, "image/vnd.adobe.photoshop") diff --git a/venv/Lib/site-packages/PIL/PyAccess.py b/venv/Lib/site-packages/PIL/PyAccess.py deleted file mode 100644 index 3be1cca..0000000 --- a/venv/Lib/site-packages/PIL/PyAccess.py +++ /dev/null @@ -1,381 +0,0 @@ -# -# The Python Imaging Library -# Pillow fork -# -# Python implementation of the PixelAccess Object -# -# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. -# Copyright (c) 1995-2009 by Fredrik Lundh. -# Copyright (c) 2013 Eric Soroos -# -# See the README file for information on usage and redistribution -# - -# Notes: -# -# * Implements the pixel access object following Access.c -# * Taking only the tuple form, which is used from python. -# * Fill.c uses the integer form, but it's still going to use the old -# Access.c implementation. -# -from __future__ import annotations - -import logging -import sys -from typing import TYPE_CHECKING - -from ._deprecate import deprecate - -FFI: type -try: - from cffi import FFI - - defs = """ - struct Pixel_RGBA { - unsigned char r,g,b,a; - }; - struct Pixel_I16 { - unsigned char l,r; - }; - """ - ffi = FFI() - ffi.cdef(defs) -except ImportError as ex: - # Allow error import for doc purposes, but error out when accessing - # anything in core. - from ._util import DeferredError - - FFI = ffi = DeferredError.new(ex) - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from . import Image - - -class PyAccess: - def __init__(self, img: Image.Image, readonly: bool = False) -> None: - deprecate("PyAccess", 11) - vals = dict(img.im.unsafe_ptrs) - self.readonly = readonly - self.image8 = ffi.cast("unsigned char **", vals["image8"]) - self.image32 = ffi.cast("int **", vals["image32"]) - self.image = ffi.cast("unsigned char **", vals["image"]) - self.xsize, self.ysize = img.im.size - self._img = img - - # Keep pointer to im object to prevent dereferencing. - self._im = img.im - if self._im.mode in ("P", "PA"): - self._palette = img.palette - - # Debugging is polluting test traces, only useful here - # when hacking on PyAccess - # logger.debug("%s", vals) - self._post_init() - - def _post_init(self) -> None: - pass - - def __setitem__( - self, - xy: tuple[int, int] | list[int], - color: float | tuple[int, ...] | list[int], - ) -> None: - """ - Modifies the pixel at x,y. The color is given as a single - numerical value for single band images, and a tuple for - multi-band images. In addition to this, RGB and RGBA tuples - are accepted for P and PA images. - - :param xy: The pixel coordinate, given as (x, y). See - :ref:`coordinate-system`. - :param color: The pixel value. - """ - if self.readonly: - msg = "Attempt to putpixel a read only image" - raise ValueError(msg) - (x, y) = xy - if x < 0: - x = self.xsize + x - if y < 0: - y = self.ysize + y - (x, y) = self.check_xy((x, y)) - - if ( - self._im.mode in ("P", "PA") - and isinstance(color, (list, tuple)) - and len(color) in [3, 4] - ): - # RGB or RGBA value for a P or PA image - if self._im.mode == "PA": - alpha = color[3] if len(color) == 4 else 255 - color = color[:3] - palette_index = self._palette.getcolor(color, self._img) - color = (palette_index, alpha) if self._im.mode == "PA" else palette_index - - return self.set_pixel(x, y, color) - - def __getitem__(self, xy: tuple[int, int] | list[int]) -> float | tuple[int, ...]: - """ - Returns the pixel at x,y. The pixel is returned as a single - value for single band images or a tuple for multiple band - images - - :param xy: The pixel coordinate, given as (x, y). See - :ref:`coordinate-system`. - :returns: a pixel value for single band images, a tuple of - pixel values for multiband images. - """ - (x, y) = xy - if x < 0: - x = self.xsize + x - if y < 0: - y = self.ysize + y - (x, y) = self.check_xy((x, y)) - return self.get_pixel(x, y) - - putpixel = __setitem__ - getpixel = __getitem__ - - def check_xy(self, xy: tuple[int, int]) -> tuple[int, int]: - (x, y) = xy - if not (0 <= x < self.xsize and 0 <= y < self.ysize): - msg = "pixel location out of range" - raise ValueError(msg) - return xy - - def get_pixel(self, x: int, y: int) -> float | tuple[int, ...]: - raise NotImplementedError() - - def set_pixel( - self, x: int, y: int, color: float | tuple[int, ...] | list[int] - ) -> None: - raise NotImplementedError() - - -class _PyAccess32_2(PyAccess): - """PA, LA, stored in first and last bytes of a 32 bit word""" - - def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) - - def get_pixel(self, x: int, y: int) -> tuple[int, int]: - pixel = self.pixels[y][x] - return pixel.r, pixel.a - - def set_pixel(self, x, y, color): - pixel = self.pixels[y][x] - # tuple - pixel.r = min(color[0], 255) - pixel.a = min(color[1], 255) - - -class _PyAccess32_3(PyAccess): - """RGB and friends, stored in the first three bytes of a 32 bit word""" - - def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) - - def get_pixel(self, x: int, y: int) -> tuple[int, int, int]: - pixel = self.pixels[y][x] - return pixel.r, pixel.g, pixel.b - - def set_pixel(self, x, y, color): - pixel = self.pixels[y][x] - # tuple - pixel.r = min(color[0], 255) - pixel.g = min(color[1], 255) - pixel.b = min(color[2], 255) - pixel.a = 255 - - -class _PyAccess32_4(PyAccess): - """RGBA etc, all 4 bytes of a 32 bit word""" - - def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) - - def get_pixel(self, x: int, y: int) -> tuple[int, int, int, int]: - pixel = self.pixels[y][x] - return pixel.r, pixel.g, pixel.b, pixel.a - - def set_pixel(self, x, y, color): - pixel = self.pixels[y][x] - # tuple - pixel.r = min(color[0], 255) - pixel.g = min(color[1], 255) - pixel.b = min(color[2], 255) - pixel.a = min(color[3], 255) - - -class _PyAccess8(PyAccess): - """1, L, P, 8 bit images stored as uint8""" - - def _post_init(self, *args, **kwargs): - self.pixels = self.image8 - - def get_pixel(self, x: int, y: int) -> int: - return self.pixels[y][x] - - def set_pixel(self, x, y, color): - try: - # integer - self.pixels[y][x] = min(color, 255) - except TypeError: - # tuple - self.pixels[y][x] = min(color[0], 255) - - -class _PyAccessI16_N(PyAccess): - """I;16 access, native bitendian without conversion""" - - def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast("unsigned short **", self.image) - - def get_pixel(self, x: int, y: int) -> int: - return self.pixels[y][x] - - def set_pixel(self, x, y, color): - try: - # integer - self.pixels[y][x] = min(color, 65535) - except TypeError: - # tuple - self.pixels[y][x] = min(color[0], 65535) - - -class _PyAccessI16_L(PyAccess): - """I;16L access, with conversion""" - - def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast("struct Pixel_I16 **", self.image) - - def get_pixel(self, x: int, y: int) -> int: - pixel = self.pixels[y][x] - return pixel.l + pixel.r * 256 - - def set_pixel(self, x, y, color): - pixel = self.pixels[y][x] - try: - color = min(color, 65535) - except TypeError: - color = min(color[0], 65535) - - pixel.l = color & 0xFF - pixel.r = color >> 8 - - -class _PyAccessI16_B(PyAccess): - """I;16B access, with conversion""" - - def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast("struct Pixel_I16 **", self.image) - - def get_pixel(self, x: int, y: int) -> int: - pixel = self.pixels[y][x] - return pixel.l * 256 + pixel.r - - def set_pixel(self, x, y, color): - pixel = self.pixels[y][x] - try: - color = min(color, 65535) - except Exception: - color = min(color[0], 65535) - - pixel.l = color >> 8 - pixel.r = color & 0xFF - - -class _PyAccessI32_N(PyAccess): - """Signed Int32 access, native endian""" - - def _post_init(self, *args, **kwargs): - self.pixels = self.image32 - - def get_pixel(self, x: int, y: int) -> int: - return self.pixels[y][x] - - def set_pixel(self, x, y, color): - self.pixels[y][x] = color - - -class _PyAccessI32_Swap(PyAccess): - """I;32L/B access, with byteswapping conversion""" - - def _post_init(self, *args, **kwargs): - self.pixels = self.image32 - - def reverse(self, i): - orig = ffi.new("int *", i) - chars = ffi.cast("unsigned char *", orig) - chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0] - return ffi.cast("int *", chars)[0] - - def get_pixel(self, x: int, y: int) -> int: - return self.reverse(self.pixels[y][x]) - - def set_pixel(self, x, y, color): - self.pixels[y][x] = self.reverse(color) - - -class _PyAccessF(PyAccess): - """32 bit float access""" - - def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast("float **", self.image32) - - def get_pixel(self, x: int, y: int) -> float: - return self.pixels[y][x] - - def set_pixel(self, x, y, color): - try: - # not a tuple - self.pixels[y][x] = color - except TypeError: - # tuple - self.pixels[y][x] = color[0] - - -mode_map = { - "1": _PyAccess8, - "L": _PyAccess8, - "P": _PyAccess8, - "I;16N": _PyAccessI16_N, - "LA": _PyAccess32_2, - "La": _PyAccess32_2, - "PA": _PyAccess32_2, - "RGB": _PyAccess32_3, - "LAB": _PyAccess32_3, - "HSV": _PyAccess32_3, - "YCbCr": _PyAccess32_3, - "RGBA": _PyAccess32_4, - "RGBa": _PyAccess32_4, - "RGBX": _PyAccess32_4, - "CMYK": _PyAccess32_4, - "F": _PyAccessF, - "I": _PyAccessI32_N, -} - -if sys.byteorder == "little": - mode_map["I;16"] = _PyAccessI16_N - mode_map["I;16L"] = _PyAccessI16_N - mode_map["I;16B"] = _PyAccessI16_B - - mode_map["I;32L"] = _PyAccessI32_N - mode_map["I;32B"] = _PyAccessI32_Swap -else: - mode_map["I;16"] = _PyAccessI16_L - mode_map["I;16L"] = _PyAccessI16_L - mode_map["I;16B"] = _PyAccessI16_N - - mode_map["I;32L"] = _PyAccessI32_Swap - mode_map["I;32B"] = _PyAccessI32_N - - -def new(img: Image.Image, readonly: bool = False) -> PyAccess | None: - access_type = mode_map.get(img.mode, None) - if not access_type: - logger.debug("PyAccess Not Implemented: %s", img.mode) - return None - return access_type(img, readonly) diff --git a/venv/Lib/site-packages/PIL/QoiImagePlugin.py b/venv/Lib/site-packages/PIL/QoiImagePlugin.py deleted file mode 100644 index 202ef52..0000000 --- a/venv/Lib/site-packages/PIL/QoiImagePlugin.py +++ /dev/null @@ -1,115 +0,0 @@ -# -# The Python Imaging Library. -# -# QOI support for PIL -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import os - -from . import Image, ImageFile -from ._binary import i32be as i32 - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] == b"qoif" - - -class QoiImageFile(ImageFile.ImageFile): - format = "QOI" - format_description = "Quite OK Image" - - def _open(self) -> None: - if not _accept(self.fp.read(4)): - msg = "not a QOI file" - raise SyntaxError(msg) - - self._size = tuple(i32(self.fp.read(4)) for i in range(2)) - - channels = self.fp.read(1)[0] - self._mode = "RGB" if channels == 3 else "RGBA" - - self.fp.seek(1, os.SEEK_CUR) # colorspace - self.tile = [("qoi", (0, 0) + self._size, self.fp.tell(), None)] - - -class QoiDecoder(ImageFile.PyDecoder): - _pulls_fd = True - _previous_pixel: bytes | bytearray | None = None - _previously_seen_pixels: dict[int, bytes | bytearray] = {} - - def _add_to_previous_pixels(self, value: bytes | bytearray) -> None: - self._previous_pixel = value - - r, g, b, a = value - hash_value = (r * 3 + g * 5 + b * 7 + a * 11) % 64 - self._previously_seen_pixels[hash_value] = value - - def decode(self, buffer: bytes) -> tuple[int, int]: - assert self.fd is not None - - self._previously_seen_pixels = {} - self._add_to_previous_pixels(bytearray((0, 0, 0, 255))) - - data = bytearray() - bands = Image.getmodebands(self.mode) - dest_length = self.state.xsize * self.state.ysize * bands - while len(data) < dest_length: - byte = self.fd.read(1)[0] - value: bytes | bytearray - if byte == 0b11111110 and self._previous_pixel: # QOI_OP_RGB - value = bytearray(self.fd.read(3)) + self._previous_pixel[3:] - elif byte == 0b11111111: # QOI_OP_RGBA - value = self.fd.read(4) - else: - op = byte >> 6 - if op == 0: # QOI_OP_INDEX - op_index = byte & 0b00111111 - value = self._previously_seen_pixels.get( - op_index, bytearray((0, 0, 0, 0)) - ) - elif op == 1 and self._previous_pixel: # QOI_OP_DIFF - value = bytearray( - ( - (self._previous_pixel[0] + ((byte & 0b00110000) >> 4) - 2) - % 256, - (self._previous_pixel[1] + ((byte & 0b00001100) >> 2) - 2) - % 256, - (self._previous_pixel[2] + (byte & 0b00000011) - 2) % 256, - self._previous_pixel[3], - ) - ) - elif op == 2 and self._previous_pixel: # QOI_OP_LUMA - second_byte = self.fd.read(1)[0] - diff_green = (byte & 0b00111111) - 32 - diff_red = ((second_byte & 0b11110000) >> 4) - 8 - diff_blue = (second_byte & 0b00001111) - 8 - - value = bytearray( - tuple( - (self._previous_pixel[i] + diff_green + diff) % 256 - for i, diff in enumerate((diff_red, 0, diff_blue)) - ) - ) - value += self._previous_pixel[3:] - elif op == 3 and self._previous_pixel: # QOI_OP_RUN - run_length = (byte & 0b00111111) + 1 - value = self._previous_pixel - if bands == 3: - value = value[:3] - data += value * run_length - continue - self._add_to_previous_pixels(value) - - if bands == 3: - value = value[:3] - data += value - self.set_as_raw(data) - return -1, 0 - - -Image.register_open(QoiImageFile.format, QoiImageFile, _accept) -Image.register_decoder("qoi", QoiDecoder) -Image.register_extension(QoiImageFile.format, ".qoi") diff --git a/venv/Lib/site-packages/PIL/SgiImagePlugin.py b/venv/Lib/site-packages/PIL/SgiImagePlugin.py deleted file mode 100644 index 50d9791..0000000 --- a/venv/Lib/site-packages/PIL/SgiImagePlugin.py +++ /dev/null @@ -1,238 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# SGI image file handling -# -# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. -# -# -# -# History: -# 2017-22-07 mb Add RLE decompression -# 2016-16-10 mb Add save method without compression -# 1995-09-10 fl Created -# -# Copyright (c) 2016 by Mickael Bonfill. -# Copyright (c) 2008 by Karsten Hiddemann. -# Copyright (c) 1997 by Secret Labs AB. -# Copyright (c) 1995 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import os -import struct -from typing import IO - -from . import Image, ImageFile -from ._binary import i16be as i16 -from ._binary import o8 - - -def _accept(prefix: bytes) -> bool: - return len(prefix) >= 2 and i16(prefix) == 474 - - -MODES = { - (1, 1, 1): "L", - (1, 2, 1): "L", - (2, 1, 1): "L;16B", - (2, 2, 1): "L;16B", - (1, 3, 3): "RGB", - (2, 3, 3): "RGB;16B", - (1, 3, 4): "RGBA", - (2, 3, 4): "RGBA;16B", -} - - -## -# Image plugin for SGI images. -class SgiImageFile(ImageFile.ImageFile): - format = "SGI" - format_description = "SGI Image File Format" - - def _open(self) -> None: - # HEAD - assert self.fp is not None - - headlen = 512 - s = self.fp.read(headlen) - - if not _accept(s): - msg = "Not an SGI image file" - raise ValueError(msg) - - # compression : verbatim or RLE - compression = s[2] - - # bpc : 1 or 2 bytes (8bits or 16bits) - bpc = s[3] - - # dimension : 1, 2 or 3 (depending on xsize, ysize and zsize) - dimension = i16(s, 4) - - # xsize : width - xsize = i16(s, 6) - - # ysize : height - ysize = i16(s, 8) - - # zsize : channels count - zsize = i16(s, 10) - - # layout - layout = bpc, dimension, zsize - - # determine mode from bits/zsize - rawmode = "" - try: - rawmode = MODES[layout] - except KeyError: - pass - - if rawmode == "": - msg = "Unsupported SGI image mode" - raise ValueError(msg) - - self._size = xsize, ysize - self._mode = rawmode.split(";")[0] - if self.mode == "RGB": - self.custom_mimetype = "image/rgb" - - # orientation -1 : scanlines begins at the bottom-left corner - orientation = -1 - - # decoder info - if compression == 0: - pagesize = xsize * ysize * bpc - if bpc == 2: - self.tile = [ - ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation)) - ] - else: - self.tile = [] - offset = headlen - for layer in self.mode: - self.tile.append( - ("raw", (0, 0) + self.size, offset, (layer, 0, orientation)) - ) - offset += pagesize - elif compression == 1: - self.tile = [ - ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc)) - ] - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.mode not in {"RGB", "RGBA", "L"}: - msg = "Unsupported SGI image mode" - raise ValueError(msg) - - # Get the keyword arguments - info = im.encoderinfo - - # Byte-per-pixel precision, 1 = 8bits per pixel - bpc = info.get("bpc", 1) - - if bpc not in (1, 2): - msg = "Unsupported number of bytes per pixel" - raise ValueError(msg) - - # Flip the image, since the origin of SGI file is the bottom-left corner - orientation = -1 - # Define the file as SGI File Format - magic_number = 474 - # Run-Length Encoding Compression - Unsupported at this time - rle = 0 - - # Number of dimensions (x,y,z) - dim = 3 - # X Dimension = width / Y Dimension = height - x, y = im.size - if im.mode == "L" and y == 1: - dim = 1 - elif im.mode == "L": - dim = 2 - # Z Dimension: Number of channels - z = len(im.mode) - - if dim in {1, 2}: - z = 1 - - # assert we've got the right number of bands. - if len(im.getbands()) != z: - msg = f"incorrect number of bands in SGI write: {z} vs {len(im.getbands())}" - raise ValueError(msg) - - # Minimum Byte value - pinmin = 0 - # Maximum Byte value (255 = 8bits per pixel) - pinmax = 255 - # Image name (79 characters max, truncated below in write) - img_name = os.path.splitext(os.path.basename(filename))[0] - if isinstance(img_name, str): - img_name = img_name.encode("ascii", "ignore") - # Standard representation of pixel in the file - colormap = 0 - fp.write(struct.pack(">h", magic_number)) - fp.write(o8(rle)) - fp.write(o8(bpc)) - fp.write(struct.pack(">H", dim)) - fp.write(struct.pack(">H", x)) - fp.write(struct.pack(">H", y)) - fp.write(struct.pack(">H", z)) - fp.write(struct.pack(">l", pinmin)) - fp.write(struct.pack(">l", pinmax)) - fp.write(struct.pack("4s", b"")) # dummy - fp.write(struct.pack("79s", img_name)) # truncates to 79 chars - fp.write(struct.pack("s", b"")) # force null byte after img_name - fp.write(struct.pack(">l", colormap)) - fp.write(struct.pack("404s", b"")) # dummy - - rawmode = "L" - if bpc == 2: - rawmode = "L;16B" - - for channel in im.split(): - fp.write(channel.tobytes("raw", rawmode, 0, orientation)) - - if hasattr(fp, "flush"): - fp.flush() - - -class SGI16Decoder(ImageFile.PyDecoder): - _pulls_fd = True - - def decode(self, buffer: bytes) -> tuple[int, int]: - assert self.fd is not None - assert self.im is not None - - rawmode, stride, orientation = self.args - pagesize = self.state.xsize * self.state.ysize - zsize = len(self.mode) - self.fd.seek(512) - - for band in range(zsize): - channel = Image.new("L", (self.state.xsize, self.state.ysize)) - channel.frombytes( - self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation - ) - self.im.putband(channel.im, band) - - return -1, 0 - - -# -# registry - - -Image.register_decoder("SGI16", SGI16Decoder) -Image.register_open(SgiImageFile.format, SgiImageFile, _accept) -Image.register_save(SgiImageFile.format, _save) -Image.register_mime(SgiImageFile.format, "image/sgi") - -Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"]) - -# End of file diff --git a/venv/Lib/site-packages/PIL/SpiderImagePlugin.py b/venv/Lib/site-packages/PIL/SpiderImagePlugin.py deleted file mode 100644 index f5a09c3..0000000 --- a/venv/Lib/site-packages/PIL/SpiderImagePlugin.py +++ /dev/null @@ -1,325 +0,0 @@ -# -# The Python Imaging Library. -# -# SPIDER image file handling -# -# History: -# 2004-08-02 Created BB -# 2006-03-02 added save method -# 2006-03-13 added support for stack images -# -# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. -# Copyright (c) 2004 by William Baxter. -# Copyright (c) 2004 by Secret Labs AB. -# Copyright (c) 2004 by Fredrik Lundh. -# - -## -# Image plugin for the Spider image format. This format is used -# by the SPIDER software, in processing image data from electron -# microscopy and tomography. -## - -# -# SpiderImagePlugin.py -# -# The Spider image format is used by SPIDER software, in processing -# image data from electron microscopy and tomography. -# -# Spider home page: -# https://spider.wadsworth.org/spider_doc/spider/docs/spider.html -# -# Details about the Spider image format: -# https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html -# -from __future__ import annotations - -import os -import struct -import sys -from typing import IO, TYPE_CHECKING, Any, Tuple, cast - -from . import Image, ImageFile - - -def isInt(f: Any) -> int: - try: - i = int(f) - if f - i == 0: - return 1 - else: - return 0 - except (ValueError, OverflowError): - return 0 - - -iforms = [1, 3, -11, -12, -21, -22] - - -# There is no magic number to identify Spider files, so just check a -# series of header locations to see if they have reasonable values. -# Returns no. of bytes in the header, if it is a valid Spider header, -# otherwise returns 0 - - -def isSpiderHeader(t: tuple[float, ...]) -> int: - h = (99,) + t # add 1 value so can use spider header index start=1 - # header values 1,2,5,12,13,22,23 should be integers - for i in [1, 2, 5, 12, 13, 22, 23]: - if not isInt(h[i]): - return 0 - # check iform - iform = int(h[5]) - if iform not in iforms: - return 0 - # check other header values - labrec = int(h[13]) # no. records in file header - labbyt = int(h[22]) # total no. of bytes in header - lenbyt = int(h[23]) # record length in bytes - if labbyt != (labrec * lenbyt): - return 0 - # looks like a valid header - return labbyt - - -def isSpiderImage(filename: str) -> int: - with open(filename, "rb") as fp: - f = fp.read(92) # read 23 * 4 bytes - t = struct.unpack(">23f", f) # try big-endian first - hdrlen = isSpiderHeader(t) - if hdrlen == 0: - t = struct.unpack("<23f", f) # little-endian - hdrlen = isSpiderHeader(t) - return hdrlen - - -class SpiderImageFile(ImageFile.ImageFile): - format = "SPIDER" - format_description = "Spider 2D image" - _close_exclusive_fp_after_loading = False - - def _open(self) -> None: - # check header - n = 27 * 4 # read 27 float values - f = self.fp.read(n) - - try: - self.bigendian = 1 - t = struct.unpack(">27f", f) # try big-endian first - hdrlen = isSpiderHeader(t) - if hdrlen == 0: - self.bigendian = 0 - t = struct.unpack("<27f", f) # little-endian - hdrlen = isSpiderHeader(t) - if hdrlen == 0: - msg = "not a valid Spider file" - raise SyntaxError(msg) - except struct.error as e: - msg = "not a valid Spider file" - raise SyntaxError(msg) from e - - h = (99,) + t # add 1 value : spider header index starts at 1 - iform = int(h[5]) - if iform != 1: - msg = "not a Spider 2D image" - raise SyntaxError(msg) - - self._size = int(h[12]), int(h[2]) # size in pixels (width, height) - self.istack = int(h[24]) - self.imgnumber = int(h[27]) - - if self.istack == 0 and self.imgnumber == 0: - # stk=0, img=0: a regular 2D image - offset = hdrlen - self._nimages = 1 - elif self.istack > 0 and self.imgnumber == 0: - # stk>0, img=0: Opening the stack for the first time - self.imgbytes = int(h[12]) * int(h[2]) * 4 - self.hdrlen = hdrlen - self._nimages = int(h[26]) - # Point to the first image in the stack - offset = hdrlen * 2 - self.imgnumber = 1 - elif self.istack == 0 and self.imgnumber > 0: - # stk=0, img>0: an image within the stack - offset = hdrlen + self.stkoffset - self.istack = 2 # So Image knows it's still a stack - else: - msg = "inconsistent stack header values" - raise SyntaxError(msg) - - if self.bigendian: - self.rawmode = "F;32BF" - else: - self.rawmode = "F;32F" - self._mode = "F" - - self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))] - self._fp = self.fp # FIXME: hack - - @property - def n_frames(self) -> int: - return self._nimages - - @property - def is_animated(self) -> bool: - return self._nimages > 1 - - # 1st image index is zero (although SPIDER imgnumber starts at 1) - def tell(self) -> int: - if self.imgnumber < 1: - return 0 - else: - return self.imgnumber - 1 - - def seek(self, frame: int) -> None: - if self.istack == 0: - msg = "attempt to seek in a non-stack file" - raise EOFError(msg) - if not self._seek_check(frame): - return - self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) - self.fp = self._fp - self.fp.seek(self.stkoffset) - self._open() - - # returns a byte image after rescaling to 0..255 - def convert2byte(self, depth: int = 255) -> Image.Image: - extrema = self.getextrema() - assert isinstance(extrema[0], float) - minimum, maximum = cast(Tuple[float, float], extrema) - m: float = 1 - if maximum != minimum: - m = depth / (maximum - minimum) - b = -m * minimum - return self.point(lambda i: i * m + b).convert("L") - - if TYPE_CHECKING: - from . import ImageTk - - # returns a ImageTk.PhotoImage object, after rescaling to 0..255 - def tkPhotoImage(self) -> ImageTk.PhotoImage: - from . import ImageTk - - return ImageTk.PhotoImage(self.convert2byte(), palette=256) - - -# -------------------------------------------------------------------- -# Image series - - -# given a list of filenames, return a list of images -def loadImageSeries(filelist: list[str] | None = None) -> list[SpiderImageFile] | None: - """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage""" - if filelist is None or len(filelist) < 1: - return None - - imglist = [] - for img in filelist: - if not os.path.exists(img): - print(f"unable to find {img}") - continue - try: - with Image.open(img) as im: - im = im.convert2byte() - except Exception: - if not isSpiderImage(img): - print(f"{img} is not a Spider image file") - continue - im.info["filename"] = img - imglist.append(im) - return imglist - - -# -------------------------------------------------------------------- -# For saving images in Spider format - - -def makeSpiderHeader(im: Image.Image) -> list[bytes]: - nsam, nrow = im.size - lenbyt = nsam * 4 # There are labrec records in the header - labrec = int(1024 / lenbyt) - if 1024 % lenbyt != 0: - labrec += 1 - labbyt = labrec * lenbyt - nvalues = int(labbyt / 4) - if nvalues < 23: - return [] - - hdr = [0.0] * nvalues - - # NB these are Fortran indices - hdr[1] = 1.0 # nslice (=1 for an image) - hdr[2] = float(nrow) # number of rows per slice - hdr[3] = float(nrow) # number of records in the image - hdr[5] = 1.0 # iform for 2D image - hdr[12] = float(nsam) # number of pixels per line - hdr[13] = float(labrec) # number of records in file header - hdr[22] = float(labbyt) # total number of bytes in header - hdr[23] = float(lenbyt) # record length in bytes - - # adjust for Fortran indexing - hdr = hdr[1:] - hdr.append(0.0) - # pack binary data into a string - return [struct.pack("f", v) for v in hdr] - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.mode[0] != "F": - im = im.convert("F") - - hdr = makeSpiderHeader(im) - if len(hdr) < 256: - msg = "Error creating Spider header" - raise OSError(msg) - - # write the SPIDER header - fp.writelines(hdr) - - rawmode = "F;32NF" # 32-bit native floating point - ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) - - -def _save_spider(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - # get the filename extension and register it with Image - filename_ext = os.path.splitext(filename)[1] - ext = filename_ext.decode() if isinstance(filename_ext, bytes) else filename_ext - Image.register_extension(SpiderImageFile.format, ext) - _save(im, fp, filename) - - -# -------------------------------------------------------------------- - - -Image.register_open(SpiderImageFile.format, SpiderImageFile) -Image.register_save(SpiderImageFile.format, _save_spider) - -if __name__ == "__main__": - if len(sys.argv) < 2: - print("Syntax: python3 SpiderImagePlugin.py [infile] [outfile]") - sys.exit() - - filename = sys.argv[1] - if not isSpiderImage(filename): - print("input image must be in Spider format") - sys.exit() - - with Image.open(filename) as im: - print(f"image: {im}") - print(f"format: {im.format}") - print(f"size: {im.size}") - print(f"mode: {im.mode}") - print("max, min: ", end=" ") - print(im.getextrema()) - - if len(sys.argv) > 2: - outfile = sys.argv[2] - - # perform some image operation - im = im.transpose(Image.Transpose.FLIP_LEFT_RIGHT) - print( - f"saving a flipped version of {os.path.basename(filename)} " - f"as {outfile} " - ) - im.save(outfile, SpiderImageFile.format) diff --git a/venv/Lib/site-packages/PIL/SunImagePlugin.py b/venv/Lib/site-packages/PIL/SunImagePlugin.py deleted file mode 100644 index 4e09847..0000000 --- a/venv/Lib/site-packages/PIL/SunImagePlugin.py +++ /dev/null @@ -1,141 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# Sun image file handling -# -# History: -# 1995-09-10 fl Created -# 1996-05-28 fl Fixed 32-bit alignment -# 1998-12-29 fl Import ImagePalette module -# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) -# -# Copyright (c) 1997-2001 by Secret Labs AB -# Copyright (c) 1995-1996 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -from . import Image, ImageFile, ImagePalette -from ._binary import i32be as i32 - - -def _accept(prefix: bytes) -> bool: - return len(prefix) >= 4 and i32(prefix) == 0x59A66A95 - - -## -# Image plugin for Sun raster files. - - -class SunImageFile(ImageFile.ImageFile): - format = "SUN" - format_description = "Sun Raster File" - - def _open(self) -> None: - # The Sun Raster file header is 32 bytes in length - # and has the following format: - - # typedef struct _SunRaster - # { - # DWORD MagicNumber; /* Magic (identification) number */ - # DWORD Width; /* Width of image in pixels */ - # DWORD Height; /* Height of image in pixels */ - # DWORD Depth; /* Number of bits per pixel */ - # DWORD Length; /* Size of image data in bytes */ - # DWORD Type; /* Type of raster file */ - # DWORD ColorMapType; /* Type of color map */ - # DWORD ColorMapLength; /* Size of the color map in bytes */ - # } SUNRASTER; - - assert self.fp is not None - - # HEAD - s = self.fp.read(32) - if not _accept(s): - msg = "not an SUN raster file" - raise SyntaxError(msg) - - offset = 32 - - self._size = i32(s, 4), i32(s, 8) - - depth = i32(s, 12) - # data_length = i32(s, 16) # unreliable, ignore. - file_type = i32(s, 20) - palette_type = i32(s, 24) # 0: None, 1: RGB, 2: Raw/arbitrary - palette_length = i32(s, 28) - - if depth == 1: - self._mode, rawmode = "1", "1;I" - elif depth == 4: - self._mode, rawmode = "L", "L;4" - elif depth == 8: - self._mode = rawmode = "L" - elif depth == 24: - if file_type == 3: - self._mode, rawmode = "RGB", "RGB" - else: - self._mode, rawmode = "RGB", "BGR" - elif depth == 32: - if file_type == 3: - self._mode, rawmode = "RGB", "RGBX" - else: - self._mode, rawmode = "RGB", "BGRX" - else: - msg = "Unsupported Mode/Bit Depth" - raise SyntaxError(msg) - - if palette_length: - if palette_length > 1024: - msg = "Unsupported Color Palette Length" - raise SyntaxError(msg) - - if palette_type != 1: - msg = "Unsupported Palette Type" - raise SyntaxError(msg) - - offset = offset + palette_length - self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length)) - if self.mode == "L": - self._mode = "P" - rawmode = rawmode.replace("L", "P") - - # 16 bit boundaries on stride - stride = ((self.size[0] * depth + 15) // 16) * 2 - - # file type: Type is the version (or flavor) of the bitmap - # file. The following values are typically found in the Type - # field: - # 0000h Old - # 0001h Standard - # 0002h Byte-encoded - # 0003h RGB format - # 0004h TIFF format - # 0005h IFF format - # FFFFh Experimental - - # Old and standard are the same, except for the length tag. - # byte-encoded is run-length-encoded - # RGB looks similar to standard, but RGB byte order - # TIFF and IFF mean that they were converted from T/IFF - # Experimental means that it's something else. - # (https://www.fileformat.info/format/sunraster/egff.htm) - - if file_type in (0, 1, 3, 4, 5): - self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))] - elif file_type == 2: - self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)] - else: - msg = "Unsupported Sun Raster file type" - raise SyntaxError(msg) - - -# -# registry - - -Image.register_open(SunImageFile.format, SunImageFile, _accept) - -Image.register_extension(SunImageFile.format, ".ras") diff --git a/venv/Lib/site-packages/PIL/TarIO.py b/venv/Lib/site-packages/PIL/TarIO.py deleted file mode 100644 index cba26d4..0000000 --- a/venv/Lib/site-packages/PIL/TarIO.py +++ /dev/null @@ -1,67 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# read files from within a tar file -# -# History: -# 95-06-18 fl Created -# 96-05-28 fl Open files in binary mode -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1995-96. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io - -from . import ContainerIO - - -class TarIO(ContainerIO.ContainerIO[bytes]): - """A file object that provides read access to a given member of a TAR file.""" - - def __init__(self, tarfile: str, file: str) -> None: - """ - Create file object. - - :param tarfile: Name of TAR file. - :param file: Name of member file. - """ - self.fh = open(tarfile, "rb") - - while True: - s = self.fh.read(512) - if len(s) != 512: - msg = "unexpected end of tar file" - raise OSError(msg) - - name = s[:100].decode("utf-8") - i = name.find("\0") - if i == 0: - msg = "cannot find subfile" - raise OSError(msg) - if i > 0: - name = name[:i] - - size = int(s[124:135], 8) - - if file == name: - break - - self.fh.seek((size + 511) & (~511), io.SEEK_CUR) - - # Open region - super().__init__(self.fh, self.fh.tell(), size) - - # Context manager support - def __enter__(self) -> TarIO: - return self - - def __exit__(self, *args: object) -> None: - self.close() - - def close(self) -> None: - self.fh.close() diff --git a/venv/Lib/site-packages/PIL/TgaImagePlugin.py b/venv/Lib/site-packages/PIL/TgaImagePlugin.py deleted file mode 100644 index 39104ae..0000000 --- a/venv/Lib/site-packages/PIL/TgaImagePlugin.py +++ /dev/null @@ -1,262 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# TGA file handling -# -# History: -# 95-09-01 fl created (reads 24-bit files only) -# 97-01-04 fl support more TGA versions, including compressed images -# 98-07-04 fl fixed orientation and alpha layer bugs -# 98-09-11 fl fixed orientation for runlength decoder -# -# Copyright (c) Secret Labs AB 1997-98. -# Copyright (c) Fredrik Lundh 1995-97. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import warnings -from typing import IO - -from . import Image, ImageFile, ImagePalette -from ._binary import i16le as i16 -from ._binary import o8 -from ._binary import o16le as o16 - -# -# -------------------------------------------------------------------- -# Read RGA file - - -MODES = { - # map imagetype/depth to rawmode - (1, 8): "P", - (3, 1): "1", - (3, 8): "L", - (3, 16): "LA", - (2, 16): "BGRA;15Z", - (2, 24): "BGR", - (2, 32): "BGRA", -} - - -## -# Image plugin for Targa files. - - -class TgaImageFile(ImageFile.ImageFile): - format = "TGA" - format_description = "Targa" - - def _open(self) -> None: - # process header - assert self.fp is not None - - s = self.fp.read(18) - - id_len = s[0] - - colormaptype = s[1] - imagetype = s[2] - - depth = s[16] - - flags = s[17] - - self._size = i16(s, 12), i16(s, 14) - - # validate header fields - if ( - colormaptype not in (0, 1) - or self.size[0] <= 0 - or self.size[1] <= 0 - or depth not in (1, 8, 16, 24, 32) - ): - msg = "not a TGA file" - raise SyntaxError(msg) - - # image mode - if imagetype in (3, 11): - self._mode = "L" - if depth == 1: - self._mode = "1" # ??? - elif depth == 16: - self._mode = "LA" - elif imagetype in (1, 9): - self._mode = "P" if colormaptype else "L" - elif imagetype in (2, 10): - self._mode = "RGB" if depth == 24 else "RGBA" - else: - msg = "unknown TGA mode" - raise SyntaxError(msg) - - # orientation - orientation = flags & 0x30 - self._flip_horizontally = orientation in [0x10, 0x30] - if orientation in [0x20, 0x30]: - orientation = 1 - elif orientation in [0, 0x10]: - orientation = -1 - else: - msg = "unknown TGA orientation" - raise SyntaxError(msg) - - self.info["orientation"] = orientation - - if imagetype & 8: - self.info["compression"] = "tga_rle" - - if id_len: - self.info["id_section"] = self.fp.read(id_len) - - if colormaptype: - # read palette - start, size, mapdepth = i16(s, 3), i16(s, 5), s[7] - if mapdepth == 16: - self.palette = ImagePalette.raw( - "BGRA;15Z", bytes(2 * start) + self.fp.read(2 * size) - ) - self.palette.mode = "RGBA" - elif mapdepth == 24: - self.palette = ImagePalette.raw( - "BGR", bytes(3 * start) + self.fp.read(3 * size) - ) - elif mapdepth == 32: - self.palette = ImagePalette.raw( - "BGRA", bytes(4 * start) + self.fp.read(4 * size) - ) - else: - msg = "unknown TGA map depth" - raise SyntaxError(msg) - - # setup tile descriptor - try: - rawmode = MODES[(imagetype & 7, depth)] - if imagetype & 8: - # compressed - self.tile = [ - ( - "tga_rle", - (0, 0) + self.size, - self.fp.tell(), - (rawmode, orientation, depth), - ) - ] - else: - self.tile = [ - ( - "raw", - (0, 0) + self.size, - self.fp.tell(), - (rawmode, 0, orientation), - ) - ] - except KeyError: - pass # cannot decode - - def load_end(self) -> None: - if self._flip_horizontally: - assert self.im is not None - self.im = self.im.transpose(Image.Transpose.FLIP_LEFT_RIGHT) - - -# -# -------------------------------------------------------------------- -# Write TGA file - - -SAVE = { - "1": ("1", 1, 0, 3), - "L": ("L", 8, 0, 3), - "LA": ("LA", 16, 0, 3), - "P": ("P", 8, 1, 1), - "RGB": ("BGR", 24, 0, 2), - "RGBA": ("BGRA", 32, 0, 2), -} - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - try: - rawmode, bits, colormaptype, imagetype = SAVE[im.mode] - except KeyError as e: - msg = f"cannot write mode {im.mode} as TGA" - raise OSError(msg) from e - - if "rle" in im.encoderinfo: - rle = im.encoderinfo["rle"] - else: - compression = im.encoderinfo.get("compression", im.info.get("compression")) - rle = compression == "tga_rle" - if rle: - imagetype += 8 - - id_section = im.encoderinfo.get("id_section", im.info.get("id_section", "")) - id_len = len(id_section) - if id_len > 255: - id_len = 255 - id_section = id_section[:255] - warnings.warn("id_section has been trimmed to 255 characters") - - if colormaptype: - assert im.im is not None - palette = im.im.getpalette("RGB", "BGR") - colormaplength, colormapentry = len(palette) // 3, 24 - else: - colormaplength, colormapentry = 0, 0 - - if im.mode in ("LA", "RGBA"): - flags = 8 - else: - flags = 0 - - orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1)) - if orientation > 0: - flags = flags | 0x20 - - fp.write( - o8(id_len) - + o8(colormaptype) - + o8(imagetype) - + o16(0) # colormapfirst - + o16(colormaplength) - + o8(colormapentry) - + o16(0) - + o16(0) - + o16(im.size[0]) - + o16(im.size[1]) - + o8(bits) - + o8(flags) - ) - - if id_section: - fp.write(id_section) - - if colormaptype: - fp.write(palette) - - if rle: - ImageFile._save( - im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))] - ) - else: - ImageFile._save( - im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))] - ) - - # write targa version 2 footer - fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000") - - -# -# -------------------------------------------------------------------- -# Registry - - -Image.register_open(TgaImageFile.format, TgaImageFile) -Image.register_save(TgaImageFile.format, _save) - -Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"]) - -Image.register_mime(TgaImageFile.format, "image/x-tga") diff --git a/venv/Lib/site-packages/PIL/TiffImagePlugin.py b/venv/Lib/site-packages/PIL/TiffImagePlugin.py deleted file mode 100644 index ac5b63c..0000000 --- a/venv/Lib/site-packages/PIL/TiffImagePlugin.py +++ /dev/null @@ -1,2200 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# TIFF file handling -# -# TIFF is a flexible, if somewhat aged, image file format originally -# defined by Aldus. Although TIFF supports a wide variety of pixel -# layouts and compression methods, the name doesn't really stand for -# "thousands of incompatible file formats," it just feels that way. -# -# To read TIFF data from a stream, the stream must be seekable. For -# progressive decoding, make sure to use TIFF files where the tag -# directory is placed first in the file. -# -# History: -# 1995-09-01 fl Created -# 1996-05-04 fl Handle JPEGTABLES tag -# 1996-05-18 fl Fixed COLORMAP support -# 1997-01-05 fl Fixed PREDICTOR support -# 1997-08-27 fl Added support for rational tags (from Perry Stoll) -# 1998-01-10 fl Fixed seek/tell (from Jan Blom) -# 1998-07-15 fl Use private names for internal variables -# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) -# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) -# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) -# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) -# 2001-12-18 fl Added workaround for broken Matrox library -# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) -# 2003-05-19 fl Check FILLORDER tag -# 2003-09-26 fl Added RGBa support -# 2004-02-24 fl Added DPI support; fixed rational write support -# 2005-02-07 fl Added workaround for broken Corel Draw 10 files -# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) -# -# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. -# Copyright (c) 1995-1997 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import io -import itertools -import logging -import math -import os -import struct -import warnings -from collections.abc import MutableMapping -from fractions import Fraction -from numbers import Number, Rational -from typing import IO, TYPE_CHECKING, Any, Callable, NoReturn - -from . import ExifTags, Image, ImageFile, ImageOps, ImagePalette, TiffTags -from ._binary import i16be as i16 -from ._binary import i32be as i32 -from ._binary import o8 -from ._deprecate import deprecate -from .TiffTags import TYPES - -logger = logging.getLogger(__name__) - -# Set these to true to force use of libtiff for reading or writing. -READ_LIBTIFF = False -WRITE_LIBTIFF = False -IFD_LEGACY_API = True -STRIP_SIZE = 65536 - -II = b"II" # little-endian (Intel style) -MM = b"MM" # big-endian (Motorola style) - -# -# -------------------------------------------------------------------- -# Read TIFF files - -# a few tag names, just to make the code below a bit more readable -OSUBFILETYPE = 255 -IMAGEWIDTH = 256 -IMAGELENGTH = 257 -BITSPERSAMPLE = 258 -COMPRESSION = 259 -PHOTOMETRIC_INTERPRETATION = 262 -FILLORDER = 266 -IMAGEDESCRIPTION = 270 -STRIPOFFSETS = 273 -SAMPLESPERPIXEL = 277 -ROWSPERSTRIP = 278 -STRIPBYTECOUNTS = 279 -X_RESOLUTION = 282 -Y_RESOLUTION = 283 -PLANAR_CONFIGURATION = 284 -RESOLUTION_UNIT = 296 -TRANSFERFUNCTION = 301 -SOFTWARE = 305 -DATE_TIME = 306 -ARTIST = 315 -PREDICTOR = 317 -COLORMAP = 320 -TILEWIDTH = 322 -TILELENGTH = 323 -TILEOFFSETS = 324 -TILEBYTECOUNTS = 325 -SUBIFD = 330 -EXTRASAMPLES = 338 -SAMPLEFORMAT = 339 -JPEGTABLES = 347 -YCBCRSUBSAMPLING = 530 -REFERENCEBLACKWHITE = 532 -COPYRIGHT = 33432 -IPTC_NAA_CHUNK = 33723 # newsphoto properties -PHOTOSHOP_CHUNK = 34377 # photoshop properties -ICCPROFILE = 34675 -EXIFIFD = 34665 -XMP = 700 -JPEGQUALITY = 65537 # pseudo-tag by libtiff - -# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java -IMAGEJ_META_DATA_BYTE_COUNTS = 50838 -IMAGEJ_META_DATA = 50839 - -COMPRESSION_INFO = { - # Compression => pil compression name - 1: "raw", - 2: "tiff_ccitt", - 3: "group3", - 4: "group4", - 5: "tiff_lzw", - 6: "tiff_jpeg", # obsolete - 7: "jpeg", - 8: "tiff_adobe_deflate", - 32771: "tiff_raw_16", # 16-bit padding - 32773: "packbits", - 32809: "tiff_thunderscan", - 32946: "tiff_deflate", - 34676: "tiff_sgilog", - 34677: "tiff_sgilog24", - 34925: "lzma", - 50000: "zstd", - 50001: "webp", -} - -COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()} - -OPEN_INFO = { - # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, - # ExtraSamples) => mode, rawmode - (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), - (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), - (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), - (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), - (II, 1, (1,), 1, (1,), ()): ("1", "1"), - (MM, 1, (1,), 1, (1,), ()): ("1", "1"), - (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), - (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), - (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), - (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), - (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), - (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), - (II, 1, (1,), 1, (2,), ()): ("L", "L;2"), - (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), - (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), - (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), - (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), - (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), - (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), - (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), - (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), - (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), - (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), - (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), - (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), - (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), - (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), - (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), - (II, 1, (1,), 1, (8,), ()): ("L", "L"), - (MM, 1, (1,), 1, (8,), ()): ("L", "L"), - (II, 1, (2,), 1, (8,), ()): ("L", "L"), - (MM, 1, (2,), 1, (8,), ()): ("L", "L"), - (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), - (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), - (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), - (II, 0, (1,), 1, (16,), ()): ("I;16", "I;16"), - (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), - (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), - (II, 1, (1,), 2, (16,), ()): ("I;16", "I;16R"), - (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"), - (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"), - (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), - (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), - (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), - (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), - (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"), - (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), - (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), - (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), - (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), - (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), - (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), - (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), - (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), - (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples - (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples - (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"), - (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"), - (II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"), - (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"), - (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"), - (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"), - (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), - (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), - (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), - (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), - (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), - (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), - (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), - (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), - (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), - (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), - (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), - (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), - (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 - (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 - (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"), - (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"), - (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"), - (MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"), - (II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16L"), - (MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16B"), - (II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"), - (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"), - (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"), - (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"), - (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), - (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), - (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), - (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), - (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), - (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), - (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), - (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), - (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), - (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), - (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), - (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), - (II, 3, (1,), 1, (8,), ()): ("P", "P"), - (MM, 3, (1,), 1, (8,), ()): ("P", "P"), - (II, 3, (1,), 1, (8, 8), (0,)): ("P", "PX"), - (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), - (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), - (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), - (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), - (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), - (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), - (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), - (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), - (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), - (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), - (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"), - (II, 6, (1,), 1, (8,), ()): ("L", "L"), - (MM, 6, (1,), 1, (8,), ()): ("L", "L"), - # JPEG compressed images handled by LibTiff and auto-converted to RGBX - # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel - (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), - (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), - (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), - (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), -} - -MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO) - -PREFIXES = [ - b"MM\x00\x2A", # Valid TIFF header with big-endian byte order - b"II\x2A\x00", # Valid TIFF header with little-endian byte order - b"MM\x2A\x00", # Invalid TIFF header, assume big-endian - b"II\x00\x2A", # Invalid TIFF header, assume little-endian - b"MM\x00\x2B", # BigTIFF with big-endian byte order - b"II\x2B\x00", # BigTIFF with little-endian byte order -] - -if not getattr(Image.core, "libtiff_support_custom_tags", True): - deprecate("Support for LibTIFF earlier than version 4", 12) - - -def _accept(prefix: bytes) -> bool: - return prefix[:4] in PREFIXES - - -def _limit_rational(val, max_val): - inv = abs(val) > 1 - n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) - return n_d[::-1] if inv else n_d - - -def _limit_signed_rational(val, max_val, min_val): - frac = Fraction(val) - n_d = frac.numerator, frac.denominator - - if min(n_d) < min_val: - n_d = _limit_rational(val, abs(min_val)) - - if max(n_d) > max_val: - val = Fraction(*n_d) - n_d = _limit_rational(val, max_val) - - return n_d - - -## -# Wrapper for TIFF IFDs. - -_load_dispatch = {} -_write_dispatch = {} - - -def _delegate(op): - def delegate(self, *args): - return getattr(self._val, op)(*args) - - return delegate - - -class IFDRational(Rational): - """Implements a rational class where 0/0 is a legal value to match - the in the wild use of exif rationals. - - e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used - """ - - """ If the denominator is 0, store this as a float('nan'), otherwise store - as a fractions.Fraction(). Delegate as appropriate - - """ - - __slots__ = ("_numerator", "_denominator", "_val") - - def __init__(self, value, denominator=1): - """ - :param value: either an integer numerator, a - float/rational/other number, or an IFDRational - :param denominator: Optional integer denominator - """ - if isinstance(value, IFDRational): - self._numerator = value.numerator - self._denominator = value.denominator - self._val = value._val - return - - if isinstance(value, Fraction): - self._numerator = value.numerator - self._denominator = value.denominator - else: - self._numerator = value - self._denominator = denominator - - if denominator == 0: - self._val = float("nan") - elif denominator == 1: - self._val = Fraction(value) - else: - self._val = Fraction(value, denominator) - - @property - def numerator(self): - return self._numerator - - @property - def denominator(self): - return self._denominator - - def limit_rational(self, max_denominator): - """ - - :param max_denominator: Integer, the maximum denominator value - :returns: Tuple of (numerator, denominator) - """ - - if self.denominator == 0: - return self.numerator, self.denominator - - f = self._val.limit_denominator(max_denominator) - return f.numerator, f.denominator - - def __repr__(self) -> str: - return str(float(self._val)) - - def __hash__(self) -> int: - return self._val.__hash__() - - def __eq__(self, other: object) -> bool: - val = self._val - if isinstance(other, IFDRational): - other = other._val - if isinstance(other, float): - val = float(val) - return val == other - - def __getstate__(self): - return [self._val, self._numerator, self._denominator] - - def __setstate__(self, state): - IFDRational.__init__(self, 0) - _val, _numerator, _denominator = state - self._val = _val - self._numerator = _numerator - self._denominator = _denominator - - """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul', - 'truediv', 'rtruediv', 'floordiv', 'rfloordiv', - 'mod','rmod', 'pow','rpow', 'pos', 'neg', - 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool', - 'ceil', 'floor', 'round'] - print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a)) - """ - - __add__ = _delegate("__add__") - __radd__ = _delegate("__radd__") - __sub__ = _delegate("__sub__") - __rsub__ = _delegate("__rsub__") - __mul__ = _delegate("__mul__") - __rmul__ = _delegate("__rmul__") - __truediv__ = _delegate("__truediv__") - __rtruediv__ = _delegate("__rtruediv__") - __floordiv__ = _delegate("__floordiv__") - __rfloordiv__ = _delegate("__rfloordiv__") - __mod__ = _delegate("__mod__") - __rmod__ = _delegate("__rmod__") - __pow__ = _delegate("__pow__") - __rpow__ = _delegate("__rpow__") - __pos__ = _delegate("__pos__") - __neg__ = _delegate("__neg__") - __abs__ = _delegate("__abs__") - __trunc__ = _delegate("__trunc__") - __lt__ = _delegate("__lt__") - __gt__ = _delegate("__gt__") - __le__ = _delegate("__le__") - __ge__ = _delegate("__ge__") - __bool__ = _delegate("__bool__") - __ceil__ = _delegate("__ceil__") - __floor__ = _delegate("__floor__") - __round__ = _delegate("__round__") - # Python >= 3.11 - if hasattr(Fraction, "__int__"): - __int__ = _delegate("__int__") - - -def _register_loader(idx, size): - def decorator(func): - from .TiffTags import TYPES - - if func.__name__.startswith("load_"): - TYPES[idx] = func.__name__[5:].replace("_", " ") - _load_dispatch[idx] = size, func # noqa: F821 - return func - - return decorator - - -def _register_writer(idx): - def decorator(func): - _write_dispatch[idx] = func # noqa: F821 - return func - - return decorator - - -def _register_basic(idx_fmt_name): - from .TiffTags import TYPES - - idx, fmt, name = idx_fmt_name - TYPES[idx] = name - size = struct.calcsize(f"={fmt}") - _load_dispatch[idx] = ( # noqa: F821 - size, - lambda self, data, legacy_api=True: ( - self._unpack(f"{len(data) // size}{fmt}", data) - ), - ) - _write_dispatch[idx] = lambda self, *values: ( # noqa: F821 - b"".join(self._pack(fmt, value) for value in values) - ) - - -if TYPE_CHECKING: - _IFDv2Base = MutableMapping[int, Any] -else: - _IFDv2Base = MutableMapping - - -class ImageFileDirectory_v2(_IFDv2Base): - """This class represents a TIFF tag directory. To speed things up, we - don't decode tags unless they're asked for. - - Exposes a dictionary interface of the tags in the directory:: - - ifd = ImageFileDirectory_v2() - ifd[key] = 'Some Data' - ifd.tagtype[key] = TiffTags.ASCII - print(ifd[key]) - 'Some Data' - - Individual values are returned as the strings or numbers, sequences are - returned as tuples of the values. - - The tiff metadata type of each item is stored in a dictionary of - tag types in - :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types - are read from a tiff file, guessed from the type added, or added - manually. - - Data Structures: - - * ``self.tagtype = {}`` - - * Key: numerical TIFF tag number - * Value: integer corresponding to the data type from - :py:data:`.TiffTags.TYPES` - - .. versionadded:: 3.0.0 - - 'Internal' data structures: - - * ``self._tags_v2 = {}`` - - * Key: numerical TIFF tag number - * Value: decoded data, as tuple for multiple values - - * ``self._tagdata = {}`` - - * Key: numerical TIFF tag number - * Value: undecoded byte string from file - - * ``self._tags_v1 = {}`` - - * Key: numerical TIFF tag number - * Value: decoded data in the v1 format - - Tags will be found in the private attributes ``self._tagdata``, and in - ``self._tags_v2`` once decoded. - - ``self.legacy_api`` is a value for internal use, and shouldn't be changed - from outside code. In cooperation with - :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api`` - is true, then decoded tags will be populated into both ``_tags_v1`` and - ``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF - save routine. Tags should be read from ``_tags_v1`` if - ``legacy_api == true``. - - """ - - _load_dispatch: dict[int, Callable[[ImageFileDirectory_v2, bytes, bool], Any]] = {} - _write_dispatch: dict[int, Callable[..., Any]] = {} - - def __init__( - self, - ifh: bytes = b"II\052\0\0\0\0\0", - prefix: bytes | None = None, - group: int | None = None, - ) -> None: - """Initialize an ImageFileDirectory. - - To construct an ImageFileDirectory from a real file, pass the 8-byte - magic header to the constructor. To only set the endianness, pass it - as the 'prefix' keyword argument. - - :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets - endianness. - :param prefix: Override the endianness of the file. - """ - if not _accept(ifh): - msg = f"not a TIFF file (header {repr(ifh)} not valid)" - raise SyntaxError(msg) - self._prefix = prefix if prefix is not None else ifh[:2] - if self._prefix == MM: - self._endian = ">" - elif self._prefix == II: - self._endian = "<" - else: - msg = "not a TIFF IFD" - raise SyntaxError(msg) - self._bigtiff = ifh[2] == 43 - self.group = group - self.tagtype: dict[int, int] = {} - """ Dictionary of tag types """ - self.reset() - (self.next,) = ( - self._unpack("Q", ifh[8:]) if self._bigtiff else self._unpack("L", ifh[4:]) - ) - self._legacy_api = False - - prefix = property(lambda self: self._prefix) - offset = property(lambda self: self._offset) - - @property - def legacy_api(self) -> bool: - return self._legacy_api - - @legacy_api.setter - def legacy_api(self, value: bool) -> NoReturn: - msg = "Not allowing setting of legacy api" - raise Exception(msg) - - def reset(self) -> None: - self._tags_v1: dict[int, Any] = {} # will remain empty if legacy_api is false - self._tags_v2: dict[int, Any] = {} # main tag storage - self._tagdata: dict[int, bytes] = {} - self.tagtype = {} # added 2008-06-05 by Florian Hoech - self._next = None - self._offset = None - - def __str__(self) -> str: - return str(dict(self)) - - def named(self): - """ - :returns: dict of name|key: value - - Returns the complete tag dictionary, with named tags where possible. - """ - return { - TiffTags.lookup(code, self.group).name: value - for code, value in self.items() - } - - def __len__(self) -> int: - return len(set(self._tagdata) | set(self._tags_v2)) - - def __getitem__(self, tag): - if tag not in self._tags_v2: # unpack on the fly - data = self._tagdata[tag] - typ = self.tagtype[tag] - size, handler = self._load_dispatch[typ] - self[tag] = handler(self, data, self.legacy_api) # check type - val = self._tags_v2[tag] - if self.legacy_api and not isinstance(val, (tuple, bytes)): - val = (val,) - return val - - def __contains__(self, tag): - return tag in self._tags_v2 or tag in self._tagdata - - def __setitem__(self, tag, value): - self._setitem(tag, value, self.legacy_api) - - def _setitem(self, tag, value, legacy_api): - basetypes = (Number, bytes, str) - - info = TiffTags.lookup(tag, self.group) - values = [value] if isinstance(value, basetypes) else value - - if tag not in self.tagtype: - if info.type: - self.tagtype[tag] = info.type - else: - self.tagtype[tag] = TiffTags.UNDEFINED - if all(isinstance(v, IFDRational) for v in values): - self.tagtype[tag] = ( - TiffTags.RATIONAL - if all(v >= 0 for v in values) - else TiffTags.SIGNED_RATIONAL - ) - elif all(isinstance(v, int) for v in values): - if all(0 <= v < 2**16 for v in values): - self.tagtype[tag] = TiffTags.SHORT - elif all(-(2**15) < v < 2**15 for v in values): - self.tagtype[tag] = TiffTags.SIGNED_SHORT - else: - self.tagtype[tag] = ( - TiffTags.LONG - if all(v >= 0 for v in values) - else TiffTags.SIGNED_LONG - ) - elif all(isinstance(v, float) for v in values): - self.tagtype[tag] = TiffTags.DOUBLE - elif all(isinstance(v, str) for v in values): - self.tagtype[tag] = TiffTags.ASCII - elif all(isinstance(v, bytes) for v in values): - self.tagtype[tag] = TiffTags.BYTE - - if self.tagtype[tag] == TiffTags.UNDEFINED: - values = [ - v.encode("ascii", "replace") if isinstance(v, str) else v - for v in values - ] - elif self.tagtype[tag] == TiffTags.RATIONAL: - values = [float(v) if isinstance(v, int) else v for v in values] - - is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict) - if not is_ifd: - values = tuple(info.cvt_enum(value) for value in values) - - dest = self._tags_v1 if legacy_api else self._tags_v2 - - # Three branches: - # Spec'd length == 1, Actual length 1, store as element - # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed. - # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple. - # Don't mess with the legacy api, since it's frozen. - if not is_ifd and ( - (info.length == 1) - or self.tagtype[tag] == TiffTags.BYTE - or (info.length is None and len(values) == 1 and not legacy_api) - ): - # Don't mess with the legacy api, since it's frozen. - if legacy_api and self.tagtype[tag] in [ - TiffTags.RATIONAL, - TiffTags.SIGNED_RATIONAL, - ]: # rationals - values = (values,) - try: - (dest[tag],) = values - except ValueError: - # We've got a builtin tag with 1 expected entry - warnings.warn( - f"Metadata Warning, tag {tag} had too many entries: " - f"{len(values)}, expected 1" - ) - dest[tag] = values[0] - - else: - # Spec'd length > 1 or undefined - # Unspec'd, and length > 1 - dest[tag] = values - - def __delitem__(self, tag: int) -> None: - self._tags_v2.pop(tag, None) - self._tags_v1.pop(tag, None) - self._tagdata.pop(tag, None) - - def __iter__(self): - return iter(set(self._tagdata) | set(self._tags_v2)) - - def _unpack(self, fmt, data): - return struct.unpack(self._endian + fmt, data) - - def _pack(self, fmt, *values): - return struct.pack(self._endian + fmt, *values) - - list( - map( - _register_basic, - [ - (TiffTags.SHORT, "H", "short"), - (TiffTags.LONG, "L", "long"), - (TiffTags.SIGNED_BYTE, "b", "signed byte"), - (TiffTags.SIGNED_SHORT, "h", "signed short"), - (TiffTags.SIGNED_LONG, "l", "signed long"), - (TiffTags.FLOAT, "f", "float"), - (TiffTags.DOUBLE, "d", "double"), - (TiffTags.IFD, "L", "long"), - (TiffTags.LONG8, "Q", "long8"), - ], - ) - ) - - @_register_loader(1, 1) # Basic type, except for the legacy API. - def load_byte(self, data, legacy_api=True): - return data - - @_register_writer(1) # Basic type, except for the legacy API. - def write_byte(self, data): - if isinstance(data, IFDRational): - data = int(data) - if isinstance(data, int): - data = bytes((data,)) - return data - - @_register_loader(2, 1) - def load_string(self, data, legacy_api=True): - if data.endswith(b"\0"): - data = data[:-1] - return data.decode("latin-1", "replace") - - @_register_writer(2) - def write_string(self, value): - # remerge of https://github.com/python-pillow/Pillow/pull/1416 - if isinstance(value, int): - value = str(value) - if not isinstance(value, bytes): - value = value.encode("ascii", "replace") - return value + b"\0" - - @_register_loader(5, 8) - def load_rational(self, data, legacy_api=True): - vals = self._unpack(f"{len(data) // 4}L", data) - - def combine(a, b): - return (a, b) if legacy_api else IFDRational(a, b) - - return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) - - @_register_writer(5) - def write_rational(self, *values): - return b"".join( - self._pack("2L", *_limit_rational(frac, 2**32 - 1)) for frac in values - ) - - @_register_loader(7, 1) - def load_undefined(self, data, legacy_api=True): - return data - - @_register_writer(7) - def write_undefined(self, value): - if isinstance(value, IFDRational): - value = int(value) - if isinstance(value, int): - value = str(value).encode("ascii", "replace") - return value - - @_register_loader(10, 8) - def load_signed_rational(self, data, legacy_api=True): - vals = self._unpack(f"{len(data) // 4}l", data) - - def combine(a, b): - return (a, b) if legacy_api else IFDRational(a, b) - - return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) - - @_register_writer(10) - def write_signed_rational(self, *values): - return b"".join( - self._pack("2l", *_limit_signed_rational(frac, 2**31 - 1, -(2**31))) - for frac in values - ) - - def _ensure_read(self, fp, size): - ret = fp.read(size) - if len(ret) != size: - msg = ( - "Corrupt EXIF data. " - f"Expecting to read {size} bytes but only got {len(ret)}. " - ) - raise OSError(msg) - return ret - - def load(self, fp): - self.reset() - self._offset = fp.tell() - - try: - tag_count = ( - self._unpack("Q", self._ensure_read(fp, 8)) - if self._bigtiff - else self._unpack("H", self._ensure_read(fp, 2)) - )[0] - for i in range(tag_count): - tag, typ, count, data = ( - self._unpack("HHQ8s", self._ensure_read(fp, 20)) - if self._bigtiff - else self._unpack("HHL4s", self._ensure_read(fp, 12)) - ) - - tagname = TiffTags.lookup(tag, self.group).name - typname = TYPES.get(typ, "unknown") - msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})" - - try: - unit_size, handler = self._load_dispatch[typ] - except KeyError: - logger.debug("%s - unsupported type %s", msg, typ) - continue # ignore unsupported type - size = count * unit_size - if size > (8 if self._bigtiff else 4): - here = fp.tell() - (offset,) = self._unpack("Q" if self._bigtiff else "L", data) - msg += f" Tag Location: {here} - Data Location: {offset}" - fp.seek(offset) - data = ImageFile._safe_read(fp, size) - fp.seek(here) - else: - data = data[:size] - - if len(data) != size: - warnings.warn( - "Possibly corrupt EXIF data. " - f"Expecting to read {size} bytes but only got {len(data)}." - f" Skipping tag {tag}" - ) - logger.debug(msg) - continue - - if not data: - logger.debug(msg) - continue - - self._tagdata[tag] = data - self.tagtype[tag] = typ - - msg += " - value: " + ( - "" % size if size > 32 else repr(data) - ) - logger.debug(msg) - - (self.next,) = ( - self._unpack("Q", self._ensure_read(fp, 8)) - if self._bigtiff - else self._unpack("L", self._ensure_read(fp, 4)) - ) - except OSError as msg: - warnings.warn(str(msg)) - return - - def tobytes(self, offset=0): - # FIXME What about tagdata? - result = self._pack("H", len(self._tags_v2)) - - entries = [] - offset = offset + len(result) + len(self._tags_v2) * 12 + 4 - stripoffsets = None - - # pass 1: convert tags to binary format - # always write tags in ascending order - for tag, value in sorted(self._tags_v2.items()): - if tag == STRIPOFFSETS: - stripoffsets = len(entries) - typ = self.tagtype.get(tag) - logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value)) - is_ifd = typ == TiffTags.LONG and isinstance(value, dict) - if is_ifd: - if self._endian == "<": - ifh = b"II\x2A\x00\x08\x00\x00\x00" - else: - ifh = b"MM\x00\x2A\x00\x00\x00\x08" - ifd = ImageFileDirectory_v2(ifh, group=tag) - values = self._tags_v2[tag] - for ifd_tag, ifd_value in values.items(): - ifd[ifd_tag] = ifd_value - data = ifd.tobytes(offset) - else: - values = value if isinstance(value, tuple) else (value,) - data = self._write_dispatch[typ](self, *values) - - tagname = TiffTags.lookup(tag, self.group).name - typname = "ifd" if is_ifd else TYPES.get(typ, "unknown") - msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})" - msg += " - value: " + ( - "" % len(data) if len(data) >= 16 else str(values) - ) - logger.debug(msg) - - # count is sum of lengths for string and arbitrary data - if is_ifd: - count = 1 - elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]: - count = len(data) - else: - count = len(values) - # figure out if data fits into the entry - if len(data) <= 4: - entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) - else: - entries.append((tag, typ, count, self._pack("L", offset), data)) - offset += (len(data) + 1) // 2 * 2 # pad to word - - # update strip offset data to point beyond auxiliary data - if stripoffsets is not None: - tag, typ, count, value, data = entries[stripoffsets] - if data: - msg = "multistrip support not yet implemented" - raise NotImplementedError(msg) - value = self._pack("L", self._unpack("L", value)[0] + offset) - entries[stripoffsets] = tag, typ, count, value, data - - # pass 2: write entries to file - for tag, typ, count, value, data in entries: - logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data)) - result += self._pack("HHL4s", tag, typ, count, value) - - # -- overwrite here for multi-page -- - result += b"\0\0\0\0" # end of entries - - # pass 3: write auxiliary data to file - for tag, typ, count, value, data in entries: - result += data - if len(data) & 1: - result += b"\0" - - return result - - def save(self, fp): - if fp.tell() == 0: # skip TIFF header on subsequent pages - # tiff header -- PIL always starts the first IFD at offset 8 - fp.write(self._prefix + self._pack("HL", 42, 8)) - - offset = fp.tell() - result = self.tobytes(offset) - fp.write(result) - return offset + len(result) - - -ImageFileDirectory_v2._load_dispatch = _load_dispatch -ImageFileDirectory_v2._write_dispatch = _write_dispatch -for idx, name in TYPES.items(): - name = name.replace(" ", "_") - setattr(ImageFileDirectory_v2, f"load_{name}", _load_dispatch[idx][1]) - setattr(ImageFileDirectory_v2, f"write_{name}", _write_dispatch[idx]) -del _load_dispatch, _write_dispatch, idx, name - - -# Legacy ImageFileDirectory support. -class ImageFileDirectory_v1(ImageFileDirectory_v2): - """This class represents the **legacy** interface to a TIFF tag directory. - - Exposes a dictionary interface of the tags in the directory:: - - ifd = ImageFileDirectory_v1() - ifd[key] = 'Some Data' - ifd.tagtype[key] = TiffTags.ASCII - print(ifd[key]) - ('Some Data',) - - Also contains a dictionary of tag types as read from the tiff image file, - :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. - - Values are returned as a tuple. - - .. deprecated:: 3.0.0 - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._legacy_api = True - - tags = property(lambda self: self._tags_v1) - tagdata = property(lambda self: self._tagdata) - - # defined in ImageFileDirectory_v2 - tagtype: dict[int, int] - """Dictionary of tag types""" - - @classmethod - def from_v2(cls, original): - """Returns an - :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` - instance with the same data as is contained in the original - :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` - instance. - - :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` - - """ - - ifd = cls(prefix=original.prefix) - ifd._tagdata = original._tagdata - ifd.tagtype = original.tagtype - ifd.next = original.next # an indicator for multipage tiffs - return ifd - - def to_v2(self) -> ImageFileDirectory_v2: - """Returns an - :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` - instance with the same data as is contained in the original - :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` - instance. - - :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` - - """ - - ifd = ImageFileDirectory_v2(prefix=self.prefix) - ifd._tagdata = dict(self._tagdata) - ifd.tagtype = dict(self.tagtype) - ifd._tags_v2 = dict(self._tags_v2) - return ifd - - def __contains__(self, tag): - return tag in self._tags_v1 or tag in self._tagdata - - def __len__(self) -> int: - return len(set(self._tagdata) | set(self._tags_v1)) - - def __iter__(self): - return iter(set(self._tagdata) | set(self._tags_v1)) - - def __setitem__(self, tag, value): - for legacy_api in (False, True): - self._setitem(tag, value, legacy_api) - - def __getitem__(self, tag): - if tag not in self._tags_v1: # unpack on the fly - data = self._tagdata[tag] - typ = self.tagtype[tag] - size, handler = self._load_dispatch[typ] - for legacy in (False, True): - self._setitem(tag, handler(self, data, legacy), legacy) - val = self._tags_v1[tag] - if not isinstance(val, (tuple, bytes)): - val = (val,) - return val - - -# undone -- switch this pointer when IFD_LEGACY_API == False -ImageFileDirectory = ImageFileDirectory_v1 - - -## -# Image plugin for TIFF files. - - -class TiffImageFile(ImageFile.ImageFile): - format = "TIFF" - format_description = "Adobe TIFF" - _close_exclusive_fp_after_loading = False - - def __init__(self, fp=None, filename=None): - self.tag_v2 = None - """ Image file directory (tag dictionary) """ - - self.tag = None - """ Legacy tag entries """ - - super().__init__(fp, filename) - - def _open(self) -> None: - """Open the first image in a TIFF file""" - - # Header - ifh = self.fp.read(8) - if ifh[2] == 43: - ifh += self.fp.read(8) - - self.tag_v2 = ImageFileDirectory_v2(ifh) - - # legacy IFD entries will be filled in later - self.ifd = None - - # setup frame pointers - self.__first = self.__next = self.tag_v2.next - self.__frame = -1 - self._fp = self.fp - self._frame_pos: list[int] = [] - self._n_frames: int | None = None - - logger.debug("*** TiffImageFile._open ***") - logger.debug("- __first: %s", self.__first) - logger.debug("- ifh: %s", repr(ifh)) # Use repr to avoid str(bytes) - - # and load the first frame - self._seek(0) - - @property - def n_frames(self): - if self._n_frames is None: - current = self.tell() - self._seek(len(self._frame_pos)) - while self._n_frames is None: - self._seek(self.tell() + 1) - self.seek(current) - return self._n_frames - - def seek(self, frame: int) -> None: - """Select a given frame as current image""" - if not self._seek_check(frame): - return - self._seek(frame) - # Create a new core image object on second and - # subsequent frames in the image. Image may be - # different size/mode. - Image._decompression_bomb_check(self.size) - self.im = Image.core.new(self.mode, self.size) - - def _seek(self, frame: int) -> None: - self.fp = self._fp - - # reset buffered io handle in case fp - # was passed to libtiff, invalidating the buffer - self.fp.tell() - - while len(self._frame_pos) <= frame: - if not self.__next: - msg = "no more images in TIFF file" - raise EOFError(msg) - logger.debug( - "Seeking to frame %s, on frame %s, __next %s, location: %s", - frame, - self.__frame, - self.__next, - self.fp.tell(), - ) - if self.__next >= 2**63: - msg = "Unable to seek to frame" - raise ValueError(msg) - self.fp.seek(self.__next) - self._frame_pos.append(self.__next) - logger.debug("Loading tags, location: %s", self.fp.tell()) - self.tag_v2.load(self.fp) - if self.tag_v2.next in self._frame_pos: - # This IFD has already been processed - # Declare this to be the end of the image - self.__next = 0 - else: - self.__next = self.tag_v2.next - if self.__next == 0: - self._n_frames = frame + 1 - if len(self._frame_pos) == 1: - self.is_animated = self.__next != 0 - self.__frame += 1 - self.fp.seek(self._frame_pos[frame]) - self.tag_v2.load(self.fp) - if XMP in self.tag_v2: - self.info["xmp"] = self.tag_v2[XMP] - elif "xmp" in self.info: - del self.info["xmp"] - self._reload_exif() - # fill the legacy tag/ifd entries - self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) - self.__frame = frame - self._setup() - - def tell(self) -> int: - """Return the current frame number""" - return self.__frame - - def get_photoshop_blocks(self): - """ - Returns a dictionary of Photoshop "Image Resource Blocks". - The keys are the image resource ID. For more information, see - https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577409_pgfId-1037727 - - :returns: Photoshop "Image Resource Blocks" in a dictionary. - """ - blocks = {} - val = self.tag_v2.get(ExifTags.Base.ImageResources) - if val: - while val[:4] == b"8BIM": - id = i16(val[4:6]) - n = math.ceil((val[6] + 1) / 2) * 2 - size = i32(val[6 + n : 10 + n]) - data = val[10 + n : 10 + n + size] - blocks[id] = {"data": data} - - val = val[math.ceil((10 + n + size) / 2) * 2 :] - return blocks - - def load(self): - if self.tile and self.use_load_libtiff: - return self._load_libtiff() - return super().load() - - def load_end(self) -> None: - # allow closing if we're on the first frame, there's no next - # This is the ImageFile.load path only, libtiff specific below. - if not self.is_animated: - self._close_exclusive_fp_after_loading = True - - # reset buffered io handle in case fp - # was passed to libtiff, invalidating the buffer - self.fp.tell() - - # load IFD data from fp before it is closed - exif = self.getexif() - for key in TiffTags.TAGS_V2_GROUPS: - if key not in exif: - continue - exif.get_ifd(key) - - ImageOps.exif_transpose(self, in_place=True) - if ExifTags.Base.Orientation in self.tag_v2: - del self.tag_v2[ExifTags.Base.Orientation] - - def _load_libtiff(self): - """Overload method triggered when we detect a compressed tiff - Calls out to libtiff""" - - Image.Image.load(self) - - self.load_prepare() - - if not len(self.tile) == 1: - msg = "Not exactly one tile" - raise OSError(msg) - - # (self._compression, (extents tuple), - # 0, (rawmode, self._compression, fp)) - extents = self.tile[0][1] - args = list(self.tile[0][3]) - - # To be nice on memory footprint, if there's a - # file descriptor, use that instead of reading - # into a string in python. - try: - fp = hasattr(self.fp, "fileno") and self.fp.fileno() - # flush the file descriptor, prevents error on pypy 2.4+ - # should also eliminate the need for fp.tell - # in _seek - if hasattr(self.fp, "flush"): - self.fp.flush() - except OSError: - # io.BytesIO have a fileno, but returns an OSError if - # it doesn't use a file descriptor. - fp = False - - if fp: - args[2] = fp - - decoder = Image._getdecoder( - self.mode, "libtiff", tuple(args), self.decoderconfig - ) - try: - decoder.setimage(self.im, extents) - except ValueError as e: - msg = "Couldn't set the image" - raise OSError(msg) from e - - close_self_fp = self._exclusive_fp and not self.is_animated - if hasattr(self.fp, "getvalue"): - # We've got a stringio like thing passed in. Yay for all in memory. - # The decoder needs the entire file in one shot, so there's not - # a lot we can do here other than give it the entire file. - # unless we could do something like get the address of the - # underlying string for stringio. - # - # Rearranging for supporting byteio items, since they have a fileno - # that returns an OSError if there's no underlying fp. Easier to - # deal with here by reordering. - logger.debug("have getvalue. just sending in a string from getvalue") - n, err = decoder.decode(self.fp.getvalue()) - elif fp: - # we've got a actual file on disk, pass in the fp. - logger.debug("have fileno, calling fileno version of the decoder.") - if not close_self_fp: - self.fp.seek(0) - # 4 bytes, otherwise the trace might error out - n, err = decoder.decode(b"fpfp") - else: - # we have something else. - logger.debug("don't have fileno or getvalue. just reading") - self.fp.seek(0) - # UNDONE -- so much for that buffer size thing. - n, err = decoder.decode(self.fp.read()) - - self.tile = [] - self.readonly = 0 - - self.load_end() - - if close_self_fp: - self.fp.close() - self.fp = None # might be shared - - if err < 0: - raise OSError(err) - - return Image.Image.load(self) - - def _setup(self): - """Setup this image object based on current tags""" - - if 0xBC01 in self.tag_v2: - msg = "Windows Media Photo files not yet supported" - raise OSError(msg) - - # extract relevant tags - self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] - self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) - - # photometric is a required tag, but not everyone is reading - # the specification - photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) - - # old style jpeg compression images most certainly are YCbCr - if self._compression == "tiff_jpeg": - photo = 6 - - fillorder = self.tag_v2.get(FILLORDER, 1) - - logger.debug("*** Summary ***") - logger.debug("- compression: %s", self._compression) - logger.debug("- photometric_interpretation: %s", photo) - logger.debug("- planar_configuration: %s", self._planar_configuration) - logger.debug("- fill_order: %s", fillorder) - logger.debug("- YCbCr subsampling: %s", self.tag.get(YCBCRSUBSAMPLING)) - - # size - xsize = int(self.tag_v2.get(IMAGEWIDTH)) - ysize = int(self.tag_v2.get(IMAGELENGTH)) - self._size = xsize, ysize - - logger.debug("- size: %s", self.size) - - sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,)) - if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1: - # SAMPLEFORMAT is properly per band, so an RGB image will - # be (1,1,1). But, we don't support per band pixel types, - # and anything more than one band is a uint8. So, just - # take the first element. Revisit this if adding support - # for more exotic images. - sample_format = (1,) - - bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,)) - extra_tuple = self.tag_v2.get(EXTRASAMPLES, ()) - if photo in (2, 6, 8): # RGB, YCbCr, LAB - bps_count = 3 - elif photo == 5: # CMYK - bps_count = 4 - else: - bps_count = 1 - bps_count += len(extra_tuple) - bps_actual_count = len(bps_tuple) - samples_per_pixel = self.tag_v2.get( - SAMPLESPERPIXEL, - 3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1, - ) - - if samples_per_pixel > MAX_SAMPLESPERPIXEL: - # DOS check, samples_per_pixel can be a Long, and we extend the tuple below - logger.error( - "More samples per pixel than can be decoded: %s", samples_per_pixel - ) - msg = "Invalid value for samples per pixel" - raise SyntaxError(msg) - - if samples_per_pixel < bps_actual_count: - # If a file has more values in bps_tuple than expected, - # remove the excess. - bps_tuple = bps_tuple[:samples_per_pixel] - elif samples_per_pixel > bps_actual_count and bps_actual_count == 1: - # If a file has only one value in bps_tuple, when it should have more, - # presume it is the same number of bits for all of the samples. - bps_tuple = bps_tuple * samples_per_pixel - - if len(bps_tuple) != samples_per_pixel: - msg = "unknown data organization" - raise SyntaxError(msg) - - # mode: check photometric interpretation and bits per pixel - key = ( - self.tag_v2.prefix, - photo, - sample_format, - fillorder, - bps_tuple, - extra_tuple, - ) - logger.debug("format key: %s", key) - try: - self._mode, rawmode = OPEN_INFO[key] - except KeyError as e: - logger.debug("- unsupported format") - msg = "unknown pixel mode" - raise SyntaxError(msg) from e - - logger.debug("- raw mode: %s", rawmode) - logger.debug("- pil mode: %s", self.mode) - - self.info["compression"] = self._compression - - xres = self.tag_v2.get(X_RESOLUTION, 1) - yres = self.tag_v2.get(Y_RESOLUTION, 1) - - if xres and yres: - resunit = self.tag_v2.get(RESOLUTION_UNIT) - if resunit == 2: # dots per inch - self.info["dpi"] = (xres, yres) - elif resunit == 3: # dots per centimeter. convert to dpi - self.info["dpi"] = (xres * 2.54, yres * 2.54) - elif resunit is None: # used to default to 1, but now 2) - self.info["dpi"] = (xres, yres) - # For backward compatibility, - # we also preserve the old behavior - self.info["resolution"] = xres, yres - else: # No absolute unit of measurement - self.info["resolution"] = xres, yres - - # build tile descriptors - x = y = layer = 0 - self.tile = [] - self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw" - if self.use_load_libtiff: - # Decoder expects entire file as one tile. - # There's a buffer size limit in load (64k) - # so large g4 images will fail if we use that - # function. - # - # Setup the one tile for the whole image, then - # use the _load_libtiff function. - - # libtiff handles the fillmode for us, so 1;IR should - # actually be 1;I. Including the R double reverses the - # bits, so stripes of the image are reversed. See - # https://github.com/python-pillow/Pillow/issues/279 - if fillorder == 2: - # Replace fillorder with fillorder=1 - key = key[:3] + (1,) + key[4:] - logger.debug("format key: %s", key) - # this should always work, since all the - # fillorder==2 modes have a corresponding - # fillorder=1 mode - self._mode, rawmode = OPEN_INFO[key] - # libtiff always returns the bytes in native order. - # we're expecting image byte order. So, if the rawmode - # contains I;16, we need to convert from native to image - # byte order. - if rawmode == "I;16": - rawmode = "I;16N" - if ";16B" in rawmode: - rawmode = rawmode.replace(";16B", ";16N") - if ";16L" in rawmode: - rawmode = rawmode.replace(";16L", ";16N") - - # YCbCr images with new jpeg compression with pixels in one plane - # unpacked straight into RGB values - if ( - photo == 6 - and self._compression == "jpeg" - and self._planar_configuration == 1 - ): - rawmode = "RGB" - - # Offset in the tile tuple is 0, we go from 0,0 to - # w,h, and we only do this once -- eds - a = (rawmode, self._compression, False, self.tag_v2.offset) - self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a)) - - elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2: - # striped image - if STRIPOFFSETS in self.tag_v2: - offsets = self.tag_v2[STRIPOFFSETS] - h = self.tag_v2.get(ROWSPERSTRIP, ysize) - w = self.size[0] - else: - # tiled image - offsets = self.tag_v2[TILEOFFSETS] - w = self.tag_v2.get(TILEWIDTH) - h = self.tag_v2.get(TILELENGTH) - - for offset in offsets: - if x + w > xsize: - stride = w * sum(bps_tuple) / 8 # bytes per line - else: - stride = 0 - - tile_rawmode = rawmode - if self._planar_configuration == 2: - # each band on it's own layer - tile_rawmode = rawmode[layer] - # adjust stride width accordingly - stride /= bps_count - - a = (tile_rawmode, int(stride), 1) - self.tile.append( - ( - self._compression, - (x, y, min(x + w, xsize), min(y + h, ysize)), - offset, - a, - ) - ) - x = x + w - if x >= self.size[0]: - x, y = 0, y + h - if y >= self.size[1]: - x = y = 0 - layer += 1 - else: - logger.debug("- unsupported data organization") - msg = "unknown data organization" - raise SyntaxError(msg) - - # Fix up info. - if ICCPROFILE in self.tag_v2: - self.info["icc_profile"] = self.tag_v2[ICCPROFILE] - - # fixup palette descriptor - - if self.mode in ["P", "PA"]: - palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] - self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) - - -# -# -------------------------------------------------------------------- -# Write TIFF files - -# little endian is default except for image modes with -# explicit big endian byte-order - -SAVE_INFO = { - # mode => rawmode, byteorder, photometrics, - # sampleformat, bitspersample, extra - "1": ("1", II, 1, 1, (1,), None), - "L": ("L", II, 1, 1, (8,), None), - "LA": ("LA", II, 1, 1, (8, 8), 2), - "P": ("P", II, 3, 1, (8,), None), - "PA": ("PA", II, 3, 1, (8, 8), 2), - "I": ("I;32S", II, 1, 2, (32,), None), - "I;16": ("I;16", II, 1, 1, (16,), None), - "I;16S": ("I;16S", II, 1, 2, (16,), None), - "F": ("F;32F", II, 1, 3, (32,), None), - "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), - "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), - "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), - "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), - "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), - "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), - "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), - "I;16B": ("I;16B", MM, 1, 1, (16,), None), - "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), - "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), -} - - -def _save(im, fp, filename): - try: - rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] - except KeyError as e: - msg = f"cannot write mode {im.mode} as TIFF" - raise OSError(msg) from e - - ifd = ImageFileDirectory_v2(prefix=prefix) - - encoderinfo = im.encoderinfo - encoderconfig = im.encoderconfig - try: - compression = encoderinfo["compression"] - except KeyError: - compression = im.info.get("compression") - if isinstance(compression, int): - # compression value may be from BMP. Ignore it - compression = None - if compression is None: - compression = "raw" - elif compression == "tiff_jpeg": - # OJPEG is obsolete, so use new-style JPEG compression instead - compression = "jpeg" - elif compression == "tiff_deflate": - compression = "tiff_adobe_deflate" - - libtiff = WRITE_LIBTIFF or compression != "raw" - - # required for color libtiff images - ifd[PLANAR_CONFIGURATION] = 1 - - ifd[IMAGEWIDTH] = im.size[0] - ifd[IMAGELENGTH] = im.size[1] - - # write any arbitrary tags passed in as an ImageFileDirectory - if "tiffinfo" in encoderinfo: - info = encoderinfo["tiffinfo"] - elif "exif" in encoderinfo: - info = encoderinfo["exif"] - if isinstance(info, bytes): - exif = Image.Exif() - exif.load(info) - info = exif - else: - info = {} - logger.debug("Tiffinfo Keys: %s", list(info)) - if isinstance(info, ImageFileDirectory_v1): - info = info.to_v2() - for key in info: - if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS: - ifd[key] = info.get_ifd(key) - else: - ifd[key] = info.get(key) - try: - ifd.tagtype[key] = info.tagtype[key] - except Exception: - pass # might not be an IFD. Might not have populated type - - legacy_ifd = {} - if hasattr(im, "tag"): - legacy_ifd = im.tag.to_v2() - - supplied_tags = {**legacy_ifd, **getattr(im, "tag_v2", {})} - for tag in ( - # IFD offset that may not be correct in the saved image - EXIFIFD, - # Determined by the image format and should not be copied from legacy_ifd. - SAMPLEFORMAT, - ): - if tag in supplied_tags: - del supplied_tags[tag] - - # additions written by Greg Couch, gregc@cgl.ucsf.edu - # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com - if hasattr(im, "tag_v2"): - # preserve tags from original TIFF image file - for key in ( - RESOLUTION_UNIT, - X_RESOLUTION, - Y_RESOLUTION, - IPTC_NAA_CHUNK, - PHOTOSHOP_CHUNK, - XMP, - ): - if key in im.tag_v2: - if key == IPTC_NAA_CHUNK and im.tag_v2.tagtype[key] not in ( - TiffTags.BYTE, - TiffTags.UNDEFINED, - ): - del supplied_tags[key] - else: - ifd[key] = im.tag_v2[key] - ifd.tagtype[key] = im.tag_v2.tagtype[key] - - # preserve ICC profile (should also work when saving other formats - # which support profiles as TIFF) -- 2008-06-06 Florian Hoech - icc = encoderinfo.get("icc_profile", im.info.get("icc_profile")) - if icc: - ifd[ICCPROFILE] = icc - - for key, name in [ - (IMAGEDESCRIPTION, "description"), - (X_RESOLUTION, "resolution"), - (Y_RESOLUTION, "resolution"), - (X_RESOLUTION, "x_resolution"), - (Y_RESOLUTION, "y_resolution"), - (RESOLUTION_UNIT, "resolution_unit"), - (SOFTWARE, "software"), - (DATE_TIME, "date_time"), - (ARTIST, "artist"), - (COPYRIGHT, "copyright"), - ]: - if name in encoderinfo: - ifd[key] = encoderinfo[name] - - dpi = encoderinfo.get("dpi") - if dpi: - ifd[RESOLUTION_UNIT] = 2 - ifd[X_RESOLUTION] = dpi[0] - ifd[Y_RESOLUTION] = dpi[1] - - if bits != (1,): - ifd[BITSPERSAMPLE] = bits - if len(bits) != 1: - ifd[SAMPLESPERPIXEL] = len(bits) - if extra is not None: - ifd[EXTRASAMPLES] = extra - if format != 1: - ifd[SAMPLEFORMAT] = format - - if PHOTOMETRIC_INTERPRETATION not in ifd: - ifd[PHOTOMETRIC_INTERPRETATION] = photo - elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0: - if im.mode == "1": - inverted_im = im.copy() - px = inverted_im.load() - for y in range(inverted_im.height): - for x in range(inverted_im.width): - px[x, y] = 0 if px[x, y] == 255 else 255 - im = inverted_im - else: - im = ImageOps.invert(im) - - if im.mode in ["P", "PA"]: - lut = im.im.getpalette("RGB", "RGB;L") - colormap = [] - colors = len(lut) // 3 - for i in range(3): - colormap += [v * 256 for v in lut[colors * i : colors * (i + 1)]] - colormap += [0] * (256 - colors) - ifd[COLORMAP] = colormap - # data orientation - w, h = ifd[IMAGEWIDTH], ifd[IMAGELENGTH] - stride = len(bits) * ((w * bits[0] + 7) // 8) - if ROWSPERSTRIP not in ifd: - # aim for given strip size (64 KB by default) when using libtiff writer - if libtiff: - im_strip_size = encoderinfo.get("strip_size", STRIP_SIZE) - rows_per_strip = 1 if stride == 0 else min(im_strip_size // stride, h) - # JPEG encoder expects multiple of 8 rows - if compression == "jpeg": - rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, h) - else: - rows_per_strip = h - if rows_per_strip == 0: - rows_per_strip = 1 - ifd[ROWSPERSTRIP] = rows_per_strip - strip_byte_counts = 1 if stride == 0 else stride * ifd[ROWSPERSTRIP] - strips_per_image = (h + ifd[ROWSPERSTRIP] - 1) // ifd[ROWSPERSTRIP] - if strip_byte_counts >= 2**16: - ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG - ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + ( - stride * h - strip_byte_counts * (strips_per_image - 1), - ) - ifd[STRIPOFFSETS] = tuple( - range(0, strip_byte_counts * strips_per_image, strip_byte_counts) - ) # this is adjusted by IFD writer - # no compression by default: - ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) - - if im.mode == "YCbCr": - for tag, value in { - YCBCRSUBSAMPLING: (1, 1), - REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255), - }.items(): - ifd.setdefault(tag, value) - - blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS] - if libtiff: - if "quality" in encoderinfo: - quality = encoderinfo["quality"] - if not isinstance(quality, int) or quality < 0 or quality > 100: - msg = "Invalid quality setting" - raise ValueError(msg) - if compression != "jpeg": - msg = "quality setting only supported for 'jpeg' compression" - raise ValueError(msg) - ifd[JPEGQUALITY] = quality - - logger.debug("Saving using libtiff encoder") - logger.debug("Items: %s", sorted(ifd.items())) - _fp = 0 - if hasattr(fp, "fileno"): - try: - fp.seek(0) - _fp = os.dup(fp.fileno()) - except io.UnsupportedOperation: - pass - - # optional types for non core tags - types = {} - # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library - # based on the data in the strip. - # OSUBFILETYPE is deprecated. - # The other tags expect arrays with a certain length (fixed or depending on - # BITSPERSAMPLE, etc), passing arrays with a different length will result in - # segfaults. Block these tags until we add extra validation. - # SUBIFD may also cause a segfault. - blocklist += [ - OSUBFILETYPE, - REFERENCEBLACKWHITE, - STRIPBYTECOUNTS, - STRIPOFFSETS, - TRANSFERFUNCTION, - SUBIFD, - ] - - # bits per sample is a single short in the tiff directory, not a list. - atts = {BITSPERSAMPLE: bits[0]} - # Merge the ones that we have with (optional) more bits from - # the original file, e.g x,y resolution so that we can - # save(load('')) == original file. - for tag, value in itertools.chain(ifd.items(), supplied_tags.items()): - # Libtiff can only process certain core items without adding - # them to the custom dictionary. - # Custom items are supported for int, float, unicode, string and byte - # values. Other types and tuples require a tagtype. - if tag not in TiffTags.LIBTIFF_CORE: - if not getattr(Image.core, "libtiff_support_custom_tags", False): - continue - - if tag in ifd.tagtype: - types[tag] = ifd.tagtype[tag] - elif not (isinstance(value, (int, float, str, bytes))): - continue - else: - type = TiffTags.lookup(tag).type - if type: - types[tag] = type - if tag not in atts and tag not in blocklist: - if isinstance(value, str): - atts[tag] = value.encode("ascii", "replace") + b"\0" - elif isinstance(value, IFDRational): - atts[tag] = float(value) - else: - atts[tag] = value - - if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1: - atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0] - - logger.debug("Converted items: %s", sorted(atts.items())) - - # libtiff always expects the bytes in native order. - # we're storing image byte order. So, if the rawmode - # contains I;16, we need to convert from native to image - # byte order. - if im.mode in ("I;16B", "I;16"): - rawmode = "I;16N" - - # Pass tags as sorted list so that the tags are set in a fixed order. - # This is required by libtiff for some tags. For example, the JPEGQUALITY - # pseudo tag requires that the COMPRESS tag was already set. - tags = list(atts.items()) - tags.sort() - a = (rawmode, compression, _fp, filename, tags, types) - encoder = Image._getencoder(im.mode, "libtiff", a, encoderconfig) - encoder.setimage(im.im, (0, 0) + im.size) - while True: - # undone, change to self.decodermaxblock: - errcode, data = encoder.encode(16 * 1024)[1:] - if not _fp: - fp.write(data) - if errcode: - break - if _fp: - try: - os.close(_fp) - except OSError: - pass - if errcode < 0: - msg = f"encoder error {errcode} when writing image file" - raise OSError(msg) - - else: - for tag in blocklist: - del ifd[tag] - offset = ifd.save(fp) - - ImageFile._save( - im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))] - ) - - # -- helper for multi-page save -- - if "_debug_multipage" in encoderinfo: - # just to access o32 and o16 (using correct byte order) - im._debug_multipage = ifd - - -class AppendingTiffWriter: - fieldSizes = [ - 0, # None - 1, # byte - 1, # ascii - 2, # short - 4, # long - 8, # rational - 1, # sbyte - 1, # undefined - 2, # sshort - 4, # slong - 8, # srational - 4, # float - 8, # double - 4, # ifd - 2, # unicode - 4, # complex - 8, # long8 - ] - - Tags = { - 273, # StripOffsets - 288, # FreeOffsets - 324, # TileOffsets - 519, # JPEGQTables - 520, # JPEGDCTables - 521, # JPEGACTables - } - - def __init__(self, fn, new=False): - if hasattr(fn, "read"): - self.f = fn - self.close_fp = False - else: - self.name = fn - self.close_fp = True - try: - self.f = open(fn, "w+b" if new else "r+b") - except OSError: - self.f = open(fn, "w+b") - self.beginning = self.f.tell() - self.setup() - - def setup(self) -> None: - # Reset everything. - self.f.seek(self.beginning, os.SEEK_SET) - - self.whereToWriteNewIFDOffset = None - self.offsetOfNewPage = 0 - - self.IIMM = iimm = self.f.read(4) - if not iimm: - # empty file - first page - self.isFirst = True - return - - self.isFirst = False - if iimm == b"II\x2a\x00": - self.setEndian("<") - elif iimm == b"MM\x00\x2a": - self.setEndian(">") - else: - msg = "Invalid TIFF file header" - raise RuntimeError(msg) - - self.skipIFDs() - self.goToEnd() - - def finalize(self) -> None: - if self.isFirst: - return - - # fix offsets - self.f.seek(self.offsetOfNewPage) - - iimm = self.f.read(4) - if not iimm: - # Make it easy to finish a frame without committing to a new one. - return - - if iimm != self.IIMM: - msg = "IIMM of new page doesn't match IIMM of first page" - raise RuntimeError(msg) - - ifd_offset = self.readLong() - ifd_offset += self.offsetOfNewPage - self.f.seek(self.whereToWriteNewIFDOffset) - self.writeLong(ifd_offset) - self.f.seek(ifd_offset) - self.fixIFD() - - def newFrame(self) -> None: - # Call this to finish a frame. - self.finalize() - self.setup() - - def __enter__(self) -> AppendingTiffWriter: - return self - - def __exit__(self, *args: object) -> None: - if self.close_fp: - self.close() - - def tell(self) -> int: - return self.f.tell() - self.offsetOfNewPage - - def seek(self, offset, whence=io.SEEK_SET): - if whence == os.SEEK_SET: - offset += self.offsetOfNewPage - - self.f.seek(offset, whence) - return self.tell() - - def goToEnd(self) -> None: - self.f.seek(0, os.SEEK_END) - pos = self.f.tell() - - # pad to 16 byte boundary - pad_bytes = 16 - pos % 16 - if 0 < pad_bytes < 16: - self.f.write(bytes(pad_bytes)) - self.offsetOfNewPage = self.f.tell() - - def setEndian(self, endian: str) -> None: - self.endian = endian - self.longFmt = f"{self.endian}L" - self.shortFmt = f"{self.endian}H" - self.tagFormat = f"{self.endian}HHL" - - def skipIFDs(self) -> None: - while True: - ifd_offset = self.readLong() - if ifd_offset == 0: - self.whereToWriteNewIFDOffset = self.f.tell() - 4 - break - - self.f.seek(ifd_offset) - num_tags = self.readShort() - self.f.seek(num_tags * 12, os.SEEK_CUR) - - def write(self, data: bytes) -> int | None: - return self.f.write(data) - - def readShort(self) -> int: - (value,) = struct.unpack(self.shortFmt, self.f.read(2)) - return value - - def readLong(self) -> int: - (value,) = struct.unpack(self.longFmt, self.f.read(4)) - return value - - def rewriteLastShortToLong(self, value: int) -> None: - self.f.seek(-2, os.SEEK_CUR) - bytes_written = self.f.write(struct.pack(self.longFmt, value)) - if bytes_written is not None and bytes_written != 4: - msg = f"wrote only {bytes_written} bytes but wanted 4" - raise RuntimeError(msg) - - def rewriteLastShort(self, value: int) -> None: - self.f.seek(-2, os.SEEK_CUR) - bytes_written = self.f.write(struct.pack(self.shortFmt, value)) - if bytes_written is not None and bytes_written != 2: - msg = f"wrote only {bytes_written} bytes but wanted 2" - raise RuntimeError(msg) - - def rewriteLastLong(self, value: int) -> None: - self.f.seek(-4, os.SEEK_CUR) - bytes_written = self.f.write(struct.pack(self.longFmt, value)) - if bytes_written is not None and bytes_written != 4: - msg = f"wrote only {bytes_written} bytes but wanted 4" - raise RuntimeError(msg) - - def writeShort(self, value: int) -> None: - bytes_written = self.f.write(struct.pack(self.shortFmt, value)) - if bytes_written is not None and bytes_written != 2: - msg = f"wrote only {bytes_written} bytes but wanted 2" - raise RuntimeError(msg) - - def writeLong(self, value: int) -> None: - bytes_written = self.f.write(struct.pack(self.longFmt, value)) - if bytes_written is not None and bytes_written != 4: - msg = f"wrote only {bytes_written} bytes but wanted 4" - raise RuntimeError(msg) - - def close(self) -> None: - self.finalize() - self.f.close() - - def fixIFD(self) -> None: - num_tags = self.readShort() - - for i in range(num_tags): - tag, field_type, count = struct.unpack(self.tagFormat, self.f.read(8)) - - field_size = self.fieldSizes[field_type] - total_size = field_size * count - is_local = total_size <= 4 - offset: int | None - if not is_local: - offset = self.readLong() + self.offsetOfNewPage - self.rewriteLastLong(offset) - - if tag in self.Tags: - cur_pos = self.f.tell() - - if is_local: - self.fixOffsets( - count, isShort=(field_size == 2), isLong=(field_size == 4) - ) - self.f.seek(cur_pos + 4) - else: - self.f.seek(offset) - self.fixOffsets( - count, isShort=(field_size == 2), isLong=(field_size == 4) - ) - self.f.seek(cur_pos) - - offset = cur_pos = None - - elif is_local: - # skip the locally stored value that is not an offset - self.f.seek(4, os.SEEK_CUR) - - def fixOffsets( - self, count: int, isShort: bool = False, isLong: bool = False - ) -> None: - if not isShort and not isLong: - msg = "offset is neither short nor long" - raise RuntimeError(msg) - - for i in range(count): - offset = self.readShort() if isShort else self.readLong() - offset += self.offsetOfNewPage - if isShort and offset >= 65536: - # offset is now too large - we must convert shorts to longs - if count != 1: - msg = "not implemented" - raise RuntimeError(msg) # XXX TODO - - # simple case - the offset is just one and therefore it is - # local (not referenced with another offset) - self.rewriteLastShortToLong(offset) - self.f.seek(-10, os.SEEK_CUR) - self.writeShort(TiffTags.LONG) # rewrite the type to LONG - self.f.seek(8, os.SEEK_CUR) - elif isShort: - self.rewriteLastShort(offset) - else: - self.rewriteLastLong(offset) - - -def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - encoderinfo = im.encoderinfo.copy() - encoderconfig = im.encoderconfig - append_images = list(encoderinfo.get("append_images", [])) - if not hasattr(im, "n_frames") and not append_images: - return _save(im, fp, filename) - - cur_idx = im.tell() - try: - with AppendingTiffWriter(fp) as tf: - for ims in [im] + append_images: - ims.encoderinfo = encoderinfo - ims.encoderconfig = encoderconfig - if not hasattr(ims, "n_frames"): - nfr = 1 - else: - nfr = ims.n_frames - - for idx in range(nfr): - ims.seek(idx) - ims.load() - _save(ims, tf, filename) - tf.newFrame() - finally: - im.seek(cur_idx) - - -# -# -------------------------------------------------------------------- -# Register - -Image.register_open(TiffImageFile.format, TiffImageFile, _accept) -Image.register_save(TiffImageFile.format, _save) -Image.register_save_all(TiffImageFile.format, _save_all) - -Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"]) - -Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/venv/Lib/site-packages/PIL/TiffTags.py b/venv/Lib/site-packages/PIL/TiffTags.py deleted file mode 100644 index e318c87..0000000 --- a/venv/Lib/site-packages/PIL/TiffTags.py +++ /dev/null @@ -1,555 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# TIFF tags -# -# This module provides clear-text names for various well-known -# TIFF tags. the TIFF codec works just fine without it. -# -# Copyright (c) Secret Labs AB 1999. -# -# See the README file for information on usage and redistribution. -# - -## -# This module provides constants and clear-text names for various -# well-known TIFF tags. -## -from __future__ import annotations - -from typing import NamedTuple - - -class _TagInfo(NamedTuple): - value: int | None - name: str - type: int | None - length: int | None - enum: dict[str, int] - - -class TagInfo(_TagInfo): - __slots__: list[str] = [] - - def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None): - return super().__new__(cls, value, name, type, length, enum or {}) - - def cvt_enum(self, value): - # Using get will call hash(value), which can be expensive - # for some types (e.g. Fraction). Since self.enum is rarely - # used, it's usually better to test it first. - return self.enum.get(value, value) if self.enum else value - - -def lookup(tag, group=None): - """ - :param tag: Integer tag number - :param group: Which :py:data:`~PIL.TiffTags.TAGS_V2_GROUPS` to look in - - .. versionadded:: 8.3.0 - - :returns: Taginfo namedtuple, From the ``TAGS_V2`` info if possible, - otherwise just populating the value and name from ``TAGS``. - If the tag is not recognized, "unknown" is returned for the name - - """ - - if group is not None: - info = TAGS_V2_GROUPS[group].get(tag) if group in TAGS_V2_GROUPS else None - else: - info = TAGS_V2.get(tag) - return info or TagInfo(tag, TAGS.get(tag, "unknown")) - - -## -# Map tag numbers to tag info. -# -# id: (Name, Type, Length[, enum_values]) -# -# The length here differs from the length in the tiff spec. For -# numbers, the tiff spec is for the number of fields returned. We -# agree here. For string-like types, the tiff spec uses the length of -# field in bytes. In Pillow, we are using the number of expected -# fields, in general 1 for string-like types. - - -BYTE = 1 -ASCII = 2 -SHORT = 3 -LONG = 4 -RATIONAL = 5 -SIGNED_BYTE = 6 -UNDEFINED = 7 -SIGNED_SHORT = 8 -SIGNED_LONG = 9 -SIGNED_RATIONAL = 10 -FLOAT = 11 -DOUBLE = 12 -IFD = 13 -LONG8 = 16 - -_tags_v2 = { - 254: ("NewSubfileType", LONG, 1), - 255: ("SubfileType", SHORT, 1), - 256: ("ImageWidth", LONG, 1), - 257: ("ImageLength", LONG, 1), - 258: ("BitsPerSample", SHORT, 0), - 259: ( - "Compression", - SHORT, - 1, - { - "Uncompressed": 1, - "CCITT 1d": 2, - "Group 3 Fax": 3, - "Group 4 Fax": 4, - "LZW": 5, - "JPEG": 6, - "PackBits": 32773, - }, - ), - 262: ( - "PhotometricInterpretation", - SHORT, - 1, - { - "WhiteIsZero": 0, - "BlackIsZero": 1, - "RGB": 2, - "RGB Palette": 3, - "Transparency Mask": 4, - "CMYK": 5, - "YCbCr": 6, - "CieLAB": 8, - "CFA": 32803, # TIFF/EP, Adobe DNG - "LinearRaw": 32892, # Adobe DNG - }, - ), - 263: ("Threshholding", SHORT, 1), - 264: ("CellWidth", SHORT, 1), - 265: ("CellLength", SHORT, 1), - 266: ("FillOrder", SHORT, 1), - 269: ("DocumentName", ASCII, 1), - 270: ("ImageDescription", ASCII, 1), - 271: ("Make", ASCII, 1), - 272: ("Model", ASCII, 1), - 273: ("StripOffsets", LONG, 0), - 274: ("Orientation", SHORT, 1), - 277: ("SamplesPerPixel", SHORT, 1), - 278: ("RowsPerStrip", LONG, 1), - 279: ("StripByteCounts", LONG, 0), - 280: ("MinSampleValue", SHORT, 0), - 281: ("MaxSampleValue", SHORT, 0), - 282: ("XResolution", RATIONAL, 1), - 283: ("YResolution", RATIONAL, 1), - 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}), - 285: ("PageName", ASCII, 1), - 286: ("XPosition", RATIONAL, 1), - 287: ("YPosition", RATIONAL, 1), - 288: ("FreeOffsets", LONG, 1), - 289: ("FreeByteCounts", LONG, 1), - 290: ("GrayResponseUnit", SHORT, 1), - 291: ("GrayResponseCurve", SHORT, 0), - 292: ("T4Options", LONG, 1), - 293: ("T6Options", LONG, 1), - 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), - 297: ("PageNumber", SHORT, 2), - 301: ("TransferFunction", SHORT, 0), - 305: ("Software", ASCII, 1), - 306: ("DateTime", ASCII, 1), - 315: ("Artist", ASCII, 1), - 316: ("HostComputer", ASCII, 1), - 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), - 318: ("WhitePoint", RATIONAL, 2), - 319: ("PrimaryChromaticities", RATIONAL, 6), - 320: ("ColorMap", SHORT, 0), - 321: ("HalftoneHints", SHORT, 2), - 322: ("TileWidth", LONG, 1), - 323: ("TileLength", LONG, 1), - 324: ("TileOffsets", LONG, 0), - 325: ("TileByteCounts", LONG, 0), - 330: ("SubIFDs", LONG, 0), - 332: ("InkSet", SHORT, 1), - 333: ("InkNames", ASCII, 1), - 334: ("NumberOfInks", SHORT, 1), - 336: ("DotRange", SHORT, 0), - 337: ("TargetPrinter", ASCII, 1), - 338: ("ExtraSamples", SHORT, 0), - 339: ("SampleFormat", SHORT, 0), - 340: ("SMinSampleValue", DOUBLE, 0), - 341: ("SMaxSampleValue", DOUBLE, 0), - 342: ("TransferRange", SHORT, 6), - 347: ("JPEGTables", UNDEFINED, 1), - # obsolete JPEG tags - 512: ("JPEGProc", SHORT, 1), - 513: ("JPEGInterchangeFormat", LONG, 1), - 514: ("JPEGInterchangeFormatLength", LONG, 1), - 515: ("JPEGRestartInterval", SHORT, 1), - 517: ("JPEGLosslessPredictors", SHORT, 0), - 518: ("JPEGPointTransforms", SHORT, 0), - 519: ("JPEGQTables", LONG, 0), - 520: ("JPEGDCTables", LONG, 0), - 521: ("JPEGACTables", LONG, 0), - 529: ("YCbCrCoefficients", RATIONAL, 3), - 530: ("YCbCrSubSampling", SHORT, 2), - 531: ("YCbCrPositioning", SHORT, 1), - 532: ("ReferenceBlackWhite", RATIONAL, 6), - 700: ("XMP", BYTE, 0), - 33432: ("Copyright", ASCII, 1), - 33723: ("IptcNaaInfo", UNDEFINED, 1), - 34377: ("PhotoshopInfo", BYTE, 0), - # FIXME add more tags here - 34665: ("ExifIFD", LONG, 1), - 34675: ("ICCProfile", UNDEFINED, 1), - 34853: ("GPSInfoIFD", LONG, 1), - 36864: ("ExifVersion", UNDEFINED, 1), - 37724: ("ImageSourceData", UNDEFINED, 1), - 40965: ("InteroperabilityIFD", LONG, 1), - 41730: ("CFAPattern", UNDEFINED, 1), - # MPInfo - 45056: ("MPFVersion", UNDEFINED, 1), - 45057: ("NumberOfImages", LONG, 1), - 45058: ("MPEntry", UNDEFINED, 1), - 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check - 45060: ("TotalFrames", LONG, 1), - 45313: ("MPIndividualNum", LONG, 1), - 45569: ("PanOrientation", LONG, 1), - 45570: ("PanOverlap_H", RATIONAL, 1), - 45571: ("PanOverlap_V", RATIONAL, 1), - 45572: ("BaseViewpointNum", LONG, 1), - 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1), - 45574: ("BaselineLength", RATIONAL, 1), - 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1), - 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1), - 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1), - 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1), - 45579: ("YawAngle", SIGNED_RATIONAL, 1), - 45580: ("PitchAngle", SIGNED_RATIONAL, 1), - 45581: ("RollAngle", SIGNED_RATIONAL, 1), - 40960: ("FlashPixVersion", UNDEFINED, 1), - 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), - 50780: ("BestQualityScale", RATIONAL, 1), - 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one - 50839: ("ImageJMetaData", UNDEFINED, 1), # see Issue #2006 -} -TAGS_V2_GROUPS = { - # ExifIFD - 34665: { - 36864: ("ExifVersion", UNDEFINED, 1), - 40960: ("FlashPixVersion", UNDEFINED, 1), - 40965: ("InteroperabilityIFD", LONG, 1), - 41730: ("CFAPattern", UNDEFINED, 1), - }, - # GPSInfoIFD - 34853: { - 0: ("GPSVersionID", BYTE, 4), - 1: ("GPSLatitudeRef", ASCII, 2), - 2: ("GPSLatitude", RATIONAL, 3), - 3: ("GPSLongitudeRef", ASCII, 2), - 4: ("GPSLongitude", RATIONAL, 3), - 5: ("GPSAltitudeRef", BYTE, 1), - 6: ("GPSAltitude", RATIONAL, 1), - 7: ("GPSTimeStamp", RATIONAL, 3), - 8: ("GPSSatellites", ASCII, 0), - 9: ("GPSStatus", ASCII, 2), - 10: ("GPSMeasureMode", ASCII, 2), - 11: ("GPSDOP", RATIONAL, 1), - 12: ("GPSSpeedRef", ASCII, 2), - 13: ("GPSSpeed", RATIONAL, 1), - 14: ("GPSTrackRef", ASCII, 2), - 15: ("GPSTrack", RATIONAL, 1), - 16: ("GPSImgDirectionRef", ASCII, 2), - 17: ("GPSImgDirection", RATIONAL, 1), - 18: ("GPSMapDatum", ASCII, 0), - 19: ("GPSDestLatitudeRef", ASCII, 2), - 20: ("GPSDestLatitude", RATIONAL, 3), - 21: ("GPSDestLongitudeRef", ASCII, 2), - 22: ("GPSDestLongitude", RATIONAL, 3), - 23: ("GPSDestBearingRef", ASCII, 2), - 24: ("GPSDestBearing", RATIONAL, 1), - 25: ("GPSDestDistanceRef", ASCII, 2), - 26: ("GPSDestDistance", RATIONAL, 1), - 27: ("GPSProcessingMethod", UNDEFINED, 0), - 28: ("GPSAreaInformation", UNDEFINED, 0), - 29: ("GPSDateStamp", ASCII, 11), - 30: ("GPSDifferential", SHORT, 1), - }, - # InteroperabilityIFD - 40965: {1: ("InteropIndex", ASCII, 1), 2: ("InteropVersion", UNDEFINED, 1)}, -} - -# Legacy Tags structure -# these tags aren't included above, but were in the previous versions -TAGS = { - 347: "JPEGTables", - 700: "XMP", - # Additional Exif Info - 32932: "Wang Annotation", - 33434: "ExposureTime", - 33437: "FNumber", - 33445: "MD FileTag", - 33446: "MD ScalePixel", - 33447: "MD ColorTable", - 33448: "MD LabName", - 33449: "MD SampleInfo", - 33450: "MD PrepDate", - 33451: "MD PrepTime", - 33452: "MD FileUnits", - 33550: "ModelPixelScaleTag", - 33723: "IptcNaaInfo", - 33918: "INGR Packet Data Tag", - 33919: "INGR Flag Registers", - 33920: "IrasB Transformation Matrix", - 33922: "ModelTiepointTag", - 34264: "ModelTransformationTag", - 34377: "PhotoshopInfo", - 34735: "GeoKeyDirectoryTag", - 34736: "GeoDoubleParamsTag", - 34737: "GeoAsciiParamsTag", - 34850: "ExposureProgram", - 34852: "SpectralSensitivity", - 34855: "ISOSpeedRatings", - 34856: "OECF", - 34864: "SensitivityType", - 34865: "StandardOutputSensitivity", - 34866: "RecommendedExposureIndex", - 34867: "ISOSpeed", - 34868: "ISOSpeedLatitudeyyy", - 34869: "ISOSpeedLatitudezzz", - 34908: "HylaFAX FaxRecvParams", - 34909: "HylaFAX FaxSubAddress", - 34910: "HylaFAX FaxRecvTime", - 36864: "ExifVersion", - 36867: "DateTimeOriginal", - 36868: "DateTimeDigitized", - 37121: "ComponentsConfiguration", - 37122: "CompressedBitsPerPixel", - 37724: "ImageSourceData", - 37377: "ShutterSpeedValue", - 37378: "ApertureValue", - 37379: "BrightnessValue", - 37380: "ExposureBiasValue", - 37381: "MaxApertureValue", - 37382: "SubjectDistance", - 37383: "MeteringMode", - 37384: "LightSource", - 37385: "Flash", - 37386: "FocalLength", - 37396: "SubjectArea", - 37500: "MakerNote", - 37510: "UserComment", - 37520: "SubSec", - 37521: "SubSecTimeOriginal", - 37522: "SubsecTimeDigitized", - 40960: "FlashPixVersion", - 40961: "ColorSpace", - 40962: "PixelXDimension", - 40963: "PixelYDimension", - 40964: "RelatedSoundFile", - 40965: "InteroperabilityIFD", - 41483: "FlashEnergy", - 41484: "SpatialFrequencyResponse", - 41486: "FocalPlaneXResolution", - 41487: "FocalPlaneYResolution", - 41488: "FocalPlaneResolutionUnit", - 41492: "SubjectLocation", - 41493: "ExposureIndex", - 41495: "SensingMethod", - 41728: "FileSource", - 41729: "SceneType", - 41730: "CFAPattern", - 41985: "CustomRendered", - 41986: "ExposureMode", - 41987: "WhiteBalance", - 41988: "DigitalZoomRatio", - 41989: "FocalLengthIn35mmFilm", - 41990: "SceneCaptureType", - 41991: "GainControl", - 41992: "Contrast", - 41993: "Saturation", - 41994: "Sharpness", - 41995: "DeviceSettingDescription", - 41996: "SubjectDistanceRange", - 42016: "ImageUniqueID", - 42032: "CameraOwnerName", - 42033: "BodySerialNumber", - 42034: "LensSpecification", - 42035: "LensMake", - 42036: "LensModel", - 42037: "LensSerialNumber", - 42112: "GDAL_METADATA", - 42113: "GDAL_NODATA", - 42240: "Gamma", - 50215: "Oce Scanjob Description", - 50216: "Oce Application Selector", - 50217: "Oce Identification Number", - 50218: "Oce ImageLogic Characteristics", - # Adobe DNG - 50706: "DNGVersion", - 50707: "DNGBackwardVersion", - 50708: "UniqueCameraModel", - 50709: "LocalizedCameraModel", - 50710: "CFAPlaneColor", - 50711: "CFALayout", - 50712: "LinearizationTable", - 50713: "BlackLevelRepeatDim", - 50714: "BlackLevel", - 50715: "BlackLevelDeltaH", - 50716: "BlackLevelDeltaV", - 50717: "WhiteLevel", - 50718: "DefaultScale", - 50719: "DefaultCropOrigin", - 50720: "DefaultCropSize", - 50721: "ColorMatrix1", - 50722: "ColorMatrix2", - 50723: "CameraCalibration1", - 50724: "CameraCalibration2", - 50725: "ReductionMatrix1", - 50726: "ReductionMatrix2", - 50727: "AnalogBalance", - 50728: "AsShotNeutral", - 50729: "AsShotWhiteXY", - 50730: "BaselineExposure", - 50731: "BaselineNoise", - 50732: "BaselineSharpness", - 50733: "BayerGreenSplit", - 50734: "LinearResponseLimit", - 50735: "CameraSerialNumber", - 50736: "LensInfo", - 50737: "ChromaBlurRadius", - 50738: "AntiAliasStrength", - 50740: "DNGPrivateData", - 50778: "CalibrationIlluminant1", - 50779: "CalibrationIlluminant2", - 50784: "Alias Layer Metadata", -} - -TAGS_V2: dict[int, TagInfo] = {} - - -def _populate(): - for k, v in _tags_v2.items(): - # Populate legacy structure. - TAGS[k] = v[0] - if len(v) == 4: - for sk, sv in v[3].items(): - TAGS[(k, sv)] = sk - - TAGS_V2[k] = TagInfo(k, *v) - - for tags in TAGS_V2_GROUPS.values(): - for k, v in tags.items(): - tags[k] = TagInfo(k, *v) - - -_populate() -## -# Map type numbers to type names -- defined in ImageFileDirectory. - -TYPES: dict[int, str] = {} - -# -# These tags are handled by default in libtiff, without -# adding to the custom dictionary. From tif_dir.c, searching for -# case TIFFTAG in the _TIFFVSetField function: -# Line: item. -# 148: case TIFFTAG_SUBFILETYPE: -# 151: case TIFFTAG_IMAGEWIDTH: -# 154: case TIFFTAG_IMAGELENGTH: -# 157: case TIFFTAG_BITSPERSAMPLE: -# 181: case TIFFTAG_COMPRESSION: -# 202: case TIFFTAG_PHOTOMETRIC: -# 205: case TIFFTAG_THRESHHOLDING: -# 208: case TIFFTAG_FILLORDER: -# 214: case TIFFTAG_ORIENTATION: -# 221: case TIFFTAG_SAMPLESPERPIXEL: -# 228: case TIFFTAG_ROWSPERSTRIP: -# 238: case TIFFTAG_MINSAMPLEVALUE: -# 241: case TIFFTAG_MAXSAMPLEVALUE: -# 244: case TIFFTAG_SMINSAMPLEVALUE: -# 247: case TIFFTAG_SMAXSAMPLEVALUE: -# 250: case TIFFTAG_XRESOLUTION: -# 256: case TIFFTAG_YRESOLUTION: -# 262: case TIFFTAG_PLANARCONFIG: -# 268: case TIFFTAG_XPOSITION: -# 271: case TIFFTAG_YPOSITION: -# 274: case TIFFTAG_RESOLUTIONUNIT: -# 280: case TIFFTAG_PAGENUMBER: -# 284: case TIFFTAG_HALFTONEHINTS: -# 288: case TIFFTAG_COLORMAP: -# 294: case TIFFTAG_EXTRASAMPLES: -# 298: case TIFFTAG_MATTEING: -# 305: case TIFFTAG_TILEWIDTH: -# 316: case TIFFTAG_TILELENGTH: -# 327: case TIFFTAG_TILEDEPTH: -# 333: case TIFFTAG_DATATYPE: -# 344: case TIFFTAG_SAMPLEFORMAT: -# 361: case TIFFTAG_IMAGEDEPTH: -# 364: case TIFFTAG_SUBIFD: -# 376: case TIFFTAG_YCBCRPOSITIONING: -# 379: case TIFFTAG_YCBCRSUBSAMPLING: -# 383: case TIFFTAG_TRANSFERFUNCTION: -# 389: case TIFFTAG_REFERENCEBLACKWHITE: -# 393: case TIFFTAG_INKNAMES: - -# Following pseudo-tags are also handled by default in libtiff: -# TIFFTAG_JPEGQUALITY 65537 - -# some of these are not in our TAGS_V2 dict and were included from tiff.h - -# This list also exists in encode.c -LIBTIFF_CORE = { - 255, - 256, - 257, - 258, - 259, - 262, - 263, - 266, - 274, - 277, - 278, - 280, - 281, - 340, - 341, - 282, - 283, - 284, - 286, - 287, - 296, - 297, - 321, - 320, - 338, - 32995, - 322, - 323, - 32998, - 32996, - 339, - 32997, - 330, - 531, - 530, - 301, - 532, - 333, - # as above - 269, # this has been in our tests forever, and works - 65537, -} - -LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes -LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff -LIBTIFF_CORE.remove(323) # Tiled images -LIBTIFF_CORE.remove(333) # Ink Names either - -# Note to advanced users: There may be combinations of these -# parameters and values that when added properly, will work and -# produce valid tiff images that may work in your application. -# It is safe to add and remove tags from this set from Pillow's point -# of view so long as you test against libtiff. diff --git a/venv/Lib/site-packages/PIL/WalImageFile.py b/venv/Lib/site-packages/PIL/WalImageFile.py deleted file mode 100644 index fbd7be6..0000000 --- a/venv/Lib/site-packages/PIL/WalImageFile.py +++ /dev/null @@ -1,124 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# WAL file handling -# -# History: -# 2003-04-23 fl created -# -# Copyright (c) 2003 by Fredrik Lundh. -# -# See the README file for information on usage and redistribution. -# - -""" -This reader is based on the specification available from: -https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml -and has been tested with a few sample files found using google. - -.. note:: - This format cannot be automatically recognized, so the reader - is not registered for use with :py:func:`PIL.Image.open()`. - To open a WAL file, use the :py:func:`PIL.WalImageFile.open()` function instead. -""" -from __future__ import annotations - -from . import Image, ImageFile -from ._binary import i32le as i32 - - -class WalImageFile(ImageFile.ImageFile): - format = "WAL" - format_description = "Quake2 Texture" - - def _open(self) -> None: - self._mode = "P" - - # read header fields - header = self.fp.read(32 + 24 + 32 + 12) - self._size = i32(header, 32), i32(header, 36) - Image._decompression_bomb_check(self.size) - - # load pixel data - offset = i32(header, 40) - self.fp.seek(offset) - - # strings are null-terminated - self.info["name"] = header[:32].split(b"\0", 1)[0] - next_name = header[56 : 56 + 32].split(b"\0", 1)[0] - if next_name: - self.info["next_name"] = next_name - - def load(self): - if not self.im: - self.im = Image.core.new(self.mode, self.size) - self.frombytes(self.fp.read(self.size[0] * self.size[1])) - self.putpalette(quake2palette) - return Image.Image.load(self) - - -def open(filename): - """ - Load texture from a Quake2 WAL texture file. - - By default, a Quake2 standard palette is attached to the texture. - To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method. - - :param filename: WAL file name, or an opened file handle. - :returns: An image instance. - """ - return WalImageFile(filename) - - -quake2palette = ( - # default palette taken from piffo 0.93 by Hans Häggström - b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" - b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" - b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" - b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" - b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" - b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" - b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" - b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" - b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" - b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" - b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" - b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" - b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" - b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" - b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" - b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" - b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" - b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" - b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" - b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" - b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" - b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" - b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" - b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" - b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" - b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" - b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" - b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" - b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" - b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" - b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" - b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" - b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" - b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" - b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" - b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" - b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" - b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" - b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" - b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" - b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" - b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" - b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" - b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" - b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" - b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" - b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" - b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" -) diff --git a/venv/Lib/site-packages/PIL/WebPImagePlugin.py b/venv/Lib/site-packages/PIL/WebPImagePlugin.py deleted file mode 100644 index 59be5bf..0000000 --- a/venv/Lib/site-packages/PIL/WebPImagePlugin.py +++ /dev/null @@ -1,363 +0,0 @@ -from __future__ import annotations - -from io import BytesIO -from typing import IO, Any - -from . import Image, ImageFile - -try: - from . import _webp - - SUPPORTED = True -except ImportError: - SUPPORTED = False - - -_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True} - -_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True} - -_VP8_MODES_BY_IDENTIFIER = { - b"VP8 ": "RGB", - b"VP8X": "RGBA", - b"VP8L": "RGBA", # lossless -} - - -def _accept(prefix: bytes) -> bool | str: - is_riff_file_format = prefix[:4] == b"RIFF" - is_webp_file = prefix[8:12] == b"WEBP" - is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER - - if is_riff_file_format and is_webp_file and is_valid_vp8_mode: - if not SUPPORTED: - return ( - "image file could not be identified because WEBP support not installed" - ) - return True - return False - - -class WebPImageFile(ImageFile.ImageFile): - format = "WEBP" - format_description = "WebP image" - __loaded = 0 - __logical_frame = 0 - - def _open(self) -> None: - if not _webp.HAVE_WEBPANIM: - # Legacy mode - data, width, height, self._mode, icc_profile, exif = _webp.WebPDecode( - self.fp.read() - ) - if icc_profile: - self.info["icc_profile"] = icc_profile - if exif: - self.info["exif"] = exif - self._size = width, height - self.fp = BytesIO(data) - self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] - self.n_frames = 1 - self.is_animated = False - return - - # Use the newer AnimDecoder API to parse the (possibly) animated file, - # and access muxed chunks like ICC/EXIF/XMP. - self._decoder = _webp.WebPAnimDecoder(self.fp.read()) - - # Get info from decoder - width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info() - self._size = width, height - self.info["loop"] = loop_count - bg_a, bg_r, bg_g, bg_b = ( - (bgcolor >> 24) & 0xFF, - (bgcolor >> 16) & 0xFF, - (bgcolor >> 8) & 0xFF, - bgcolor & 0xFF, - ) - self.info["background"] = (bg_r, bg_g, bg_b, bg_a) - self.n_frames = frame_count - self.is_animated = self.n_frames > 1 - self._mode = "RGB" if mode == "RGBX" else mode - self.rawmode = mode - self.tile = [] - - # Attempt to read ICC / EXIF / XMP chunks from file - icc_profile = self._decoder.get_chunk("ICCP") - exif = self._decoder.get_chunk("EXIF") - xmp = self._decoder.get_chunk("XMP ") - if icc_profile: - self.info["icc_profile"] = icc_profile - if exif: - self.info["exif"] = exif - if xmp: - self.info["xmp"] = xmp - - # Initialize seek state - self._reset(reset=False) - - def _getexif(self) -> dict[str, Any] | None: - if "exif" not in self.info: - return None - return self.getexif()._get_merged_dict() - - def seek(self, frame: int) -> None: - if not self._seek_check(frame): - return - - # Set logical frame to requested position - self.__logical_frame = frame - - def _reset(self, reset: bool = True) -> None: - if reset: - self._decoder.reset() - self.__physical_frame = 0 - self.__loaded = -1 - self.__timestamp = 0 - - def _get_next(self): - # Get next frame - ret = self._decoder.get_next() - self.__physical_frame += 1 - - # Check if an error occurred - if ret is None: - self._reset() # Reset just to be safe - self.seek(0) - msg = "failed to decode next frame in WebP file" - raise EOFError(msg) - - # Compute duration - data, timestamp = ret - duration = timestamp - self.__timestamp - self.__timestamp = timestamp - - # libwebp gives frame end, adjust to start of frame - timestamp -= duration - return data, timestamp, duration - - def _seek(self, frame: int) -> None: - if self.__physical_frame == frame: - return # Nothing to do - if frame < self.__physical_frame: - self._reset() # Rewind to beginning - while self.__physical_frame < frame: - self._get_next() # Advance to the requested frame - - def load(self): - if _webp.HAVE_WEBPANIM: - if self.__loaded != self.__logical_frame: - self._seek(self.__logical_frame) - - # We need to load the image data for this frame - data, timestamp, duration = self._get_next() - self.info["timestamp"] = timestamp - self.info["duration"] = duration - self.__loaded = self.__logical_frame - - # Set tile - if self.fp and self._exclusive_fp: - self.fp.close() - self.fp = BytesIO(data) - self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)] - - return super().load() - - def load_seek(self, pos: int) -> None: - pass - - def tell(self) -> int: - if not _webp.HAVE_WEBPANIM: - return super().tell() - - return self.__logical_frame - - -def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - encoderinfo = im.encoderinfo.copy() - append_images = list(encoderinfo.get("append_images", [])) - - # If total frame count is 1, then save using the legacy API, which - # will preserve non-alpha modes - total = 0 - for ims in [im] + append_images: - total += getattr(ims, "n_frames", 1) - if total == 1: - _save(im, fp, filename) - return - - background: int | tuple[int, ...] = (0, 0, 0, 0) - if "background" in encoderinfo: - background = encoderinfo["background"] - elif "background" in im.info: - background = im.info["background"] - if isinstance(background, int): - # GifImagePlugin stores a global color table index in - # info["background"]. So it must be converted to an RGBA value - palette = im.getpalette() - if palette: - r, g, b = palette[background * 3 : (background + 1) * 3] - background = (r, g, b, 255) - else: - background = (background, background, background, 255) - - duration = im.encoderinfo.get("duration", im.info.get("duration", 0)) - loop = im.encoderinfo.get("loop", 0) - minimize_size = im.encoderinfo.get("minimize_size", False) - kmin = im.encoderinfo.get("kmin", None) - kmax = im.encoderinfo.get("kmax", None) - allow_mixed = im.encoderinfo.get("allow_mixed", False) - verbose = False - lossless = im.encoderinfo.get("lossless", False) - quality = im.encoderinfo.get("quality", 80) - alpha_quality = im.encoderinfo.get("alpha_quality", 100) - method = im.encoderinfo.get("method", 0) - icc_profile = im.encoderinfo.get("icc_profile") or "" - exif = im.encoderinfo.get("exif", "") - if isinstance(exif, Image.Exif): - exif = exif.tobytes() - xmp = im.encoderinfo.get("xmp", "") - if allow_mixed: - lossless = False - - # Sensible keyframe defaults are from gif2webp.c script - if kmin is None: - kmin = 9 if lossless else 3 - if kmax is None: - kmax = 17 if lossless else 5 - - # Validate background color - if ( - not isinstance(background, (list, tuple)) - or len(background) != 4 - or not all(0 <= v < 256 for v in background) - ): - msg = f"Background color is not an RGBA tuple clamped to (0-255): {background}" - raise OSError(msg) - - # Convert to packed uint - bg_r, bg_g, bg_b, bg_a = background - background = (bg_a << 24) | (bg_r << 16) | (bg_g << 8) | (bg_b << 0) - - # Setup the WebP animation encoder - enc = _webp.WebPAnimEncoder( - im.size[0], - im.size[1], - background, - loop, - minimize_size, - kmin, - kmax, - allow_mixed, - verbose, - ) - - # Add each frame - frame_idx = 0 - timestamp = 0 - cur_idx = im.tell() - try: - for ims in [im] + append_images: - # Get # of frames in this image - nfr = getattr(ims, "n_frames", 1) - - for idx in range(nfr): - ims.seek(idx) - ims.load() - - # Make sure image mode is supported - frame = ims - rawmode = ims.mode - if ims.mode not in _VALID_WEBP_MODES: - alpha = ( - "A" in ims.mode - or "a" in ims.mode - or (ims.mode == "P" and "A" in ims.im.getpalettemode()) - ) - rawmode = "RGBA" if alpha else "RGB" - frame = ims.convert(rawmode) - - if rawmode == "RGB": - # For faster conversion, use RGBX - rawmode = "RGBX" - - # Append the frame to the animation encoder - enc.add( - frame.tobytes("raw", rawmode), - round(timestamp), - frame.size[0], - frame.size[1], - rawmode, - lossless, - quality, - alpha_quality, - method, - ) - - # Update timestamp and frame index - if isinstance(duration, (list, tuple)): - timestamp += duration[frame_idx] - else: - timestamp += duration - frame_idx += 1 - - finally: - im.seek(cur_idx) - - # Force encoder to flush frames - enc.add(None, round(timestamp), 0, 0, "", lossless, quality, alpha_quality, 0) - - # Get the final output from the encoder - data = enc.assemble(icc_profile, exif, xmp) - if data is None: - msg = "cannot write file as WebP (encoder returned None)" - raise OSError(msg) - - fp.write(data) - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - lossless = im.encoderinfo.get("lossless", False) - quality = im.encoderinfo.get("quality", 80) - alpha_quality = im.encoderinfo.get("alpha_quality", 100) - icc_profile = im.encoderinfo.get("icc_profile") or "" - exif = im.encoderinfo.get("exif", b"") - if isinstance(exif, Image.Exif): - exif = exif.tobytes() - if exif.startswith(b"Exif\x00\x00"): - exif = exif[6:] - xmp = im.encoderinfo.get("xmp", "") - method = im.encoderinfo.get("method", 4) - exact = 1 if im.encoderinfo.get("exact") else 0 - - if im.mode not in _VALID_WEBP_LEGACY_MODES: - im = im.convert("RGBA" if im.has_transparency_data else "RGB") - - data = _webp.WebPEncode( - im.tobytes(), - im.size[0], - im.size[1], - lossless, - float(quality), - float(alpha_quality), - im.mode, - icc_profile, - method, - exact, - exif, - xmp, - ) - if data is None: - msg = "cannot write file as WebP (encoder returned None)" - raise OSError(msg) - - fp.write(data) - - -Image.register_open(WebPImageFile.format, WebPImageFile, _accept) -if SUPPORTED: - Image.register_save(WebPImageFile.format, _save) - if _webp.HAVE_WEBPANIM: - Image.register_save_all(WebPImageFile.format, _save_all) - Image.register_extension(WebPImageFile.format, ".webp") - Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/venv/Lib/site-packages/PIL/WmfImagePlugin.py b/venv/Lib/site-packages/PIL/WmfImagePlugin.py deleted file mode 100644 index 3d5cddc..0000000 --- a/venv/Lib/site-packages/PIL/WmfImagePlugin.py +++ /dev/null @@ -1,181 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# WMF stub codec -# -# history: -# 1996-12-14 fl Created -# 2004-02-22 fl Turned into a stub driver -# 2004-02-23 fl Added EMF support -# -# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. -# Copyright (c) Fredrik Lundh 1996. -# -# See the README file for information on usage and redistribution. -# -# WMF/EMF reference documentation: -# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf -# http://wvware.sourceforge.net/caolan/index.html -# http://wvware.sourceforge.net/caolan/ora-wmf.html -from __future__ import annotations - -from typing import IO - -from . import Image, ImageFile -from ._binary import i16le as word -from ._binary import si16le as short -from ._binary import si32le as _long - -_handler = None - - -def register_handler(handler: ImageFile.StubHandler | None) -> None: - """ - Install application-specific WMF image handler. - - :param handler: Handler object. - """ - global _handler - _handler = handler - - -if hasattr(Image.core, "drawwmf"): - # install default handler (windows only) - - class WmfHandler(ImageFile.StubHandler): - def open(self, im: ImageFile.StubImageFile) -> None: - im._mode = "RGB" - self.bbox = im.info["wmf_bbox"] - - def load(self, im: ImageFile.StubImageFile) -> Image.Image: - im.fp.seek(0) # rewind - return Image.frombytes( - "RGB", - im.size, - Image.core.drawwmf(im.fp.read(), im.size, self.bbox), - "raw", - "BGR", - (im.size[0] * 3 + 3) & -4, - -1, - ) - - register_handler(WmfHandler()) - -# -# -------------------------------------------------------------------- -# Read WMF file - - -def _accept(prefix: bytes) -> bool: - return ( - prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00" - ) - - -## -# Image plugin for Windows metafiles. - - -class WmfStubImageFile(ImageFile.StubImageFile): - format = "WMF" - format_description = "Windows Metafile" - - def _open(self) -> None: - self._inch = None - - # check placable header - s = self.fp.read(80) - - if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": - # placeable windows metafile - - # get units per inch - self._inch = word(s, 14) - - # get bounding box - x0 = short(s, 6) - y0 = short(s, 8) - x1 = short(s, 10) - y1 = short(s, 12) - - # normalize size to 72 dots per inch - self.info["dpi"] = 72 - size = ( - (x1 - x0) * self.info["dpi"] // self._inch, - (y1 - y0) * self.info["dpi"] // self._inch, - ) - - self.info["wmf_bbox"] = x0, y0, x1, y1 - - # sanity check (standard metafile header) - if s[22:26] != b"\x01\x00\t\x00": - msg = "Unsupported WMF file format" - raise SyntaxError(msg) - - elif s[:4] == b"\x01\x00\x00\x00" and s[40:44] == b" EMF": - # enhanced metafile - - # get bounding box - x0 = _long(s, 8) - y0 = _long(s, 12) - x1 = _long(s, 16) - y1 = _long(s, 20) - - # get frame (in 0.01 millimeter units) - frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36) - - size = x1 - x0, y1 - y0 - - # calculate dots per inch from bbox and frame - xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0]) - ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1]) - - self.info["wmf_bbox"] = x0, y0, x1, y1 - - if xdpi == ydpi: - self.info["dpi"] = xdpi - else: - self.info["dpi"] = xdpi, ydpi - - else: - msg = "Unsupported file format" - raise SyntaxError(msg) - - self._mode = "RGB" - self._size = size - - loader = self._load() - if loader: - loader.open(self) - - def _load(self) -> ImageFile.StubHandler | None: - return _handler - - def load(self, dpi=None): - if dpi is not None and self._inch is not None: - self.info["dpi"] = dpi - x0, y0, x1, y1 = self.info["wmf_bbox"] - self._size = ( - (x1 - x0) * self.info["dpi"] // self._inch, - (y1 - y0) * self.info["dpi"] // self._inch, - ) - return super().load() - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if _handler is None or not hasattr(_handler, "save"): - msg = "WMF save handler not installed" - raise OSError(msg) - _handler.save(im, fp, filename) - - -# -# -------------------------------------------------------------------- -# Registry stuff - - -Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) -Image.register_save(WmfStubImageFile.format, _save) - -Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"]) diff --git a/venv/Lib/site-packages/PIL/XVThumbImagePlugin.py b/venv/Lib/site-packages/PIL/XVThumbImagePlugin.py deleted file mode 100644 index c84adac..0000000 --- a/venv/Lib/site-packages/PIL/XVThumbImagePlugin.py +++ /dev/null @@ -1,81 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# XV Thumbnail file handler by Charles E. "Gene" Cash -# (gcash@magicnet.net) -# -# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, -# available from ftp://ftp.cis.upenn.edu/pub/xv/ -# -# history: -# 98-08-15 cec created (b/w only) -# 98-12-09 cec added color palette -# 98-12-28 fl added to PIL (with only a few very minor modifications) -# -# To do: -# FIXME: make save work (this requires quantization support) -# -from __future__ import annotations - -from . import Image, ImageFile, ImagePalette -from ._binary import o8 - -_MAGIC = b"P7 332" - -# standard color palette for thumbnails (RGB332) -PALETTE = b"" -for r in range(8): - for g in range(8): - for b in range(4): - PALETTE = PALETTE + ( - o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3) - ) - - -def _accept(prefix: bytes) -> bool: - return prefix[:6] == _MAGIC - - -## -# Image plugin for XV thumbnail images. - - -class XVThumbImageFile(ImageFile.ImageFile): - format = "XVThumb" - format_description = "XV thumbnail image" - - def _open(self) -> None: - # check magic - assert self.fp is not None - - if not _accept(self.fp.read(6)): - msg = "not an XV thumbnail file" - raise SyntaxError(msg) - - # Skip to beginning of next line - self.fp.readline() - - # skip info comments - while True: - s = self.fp.readline() - if not s: - msg = "Unexpected EOF reading XV thumbnail file" - raise SyntaxError(msg) - if s[0] != 35: # ie. when not a comment: '#' - break - - # parse header line (already read) - s = s.strip().split() - - self._mode = "P" - self._size = int(s[0]), int(s[1]) - - self.palette = ImagePalette.raw("RGB", PALETTE) - - self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))] - - -# -------------------------------------------------------------------- - -Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) diff --git a/venv/Lib/site-packages/PIL/XbmImagePlugin.py b/venv/Lib/site-packages/PIL/XbmImagePlugin.py deleted file mode 100644 index 6d11bbf..0000000 --- a/venv/Lib/site-packages/PIL/XbmImagePlugin.py +++ /dev/null @@ -1,98 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# XBM File handling -# -# History: -# 1995-09-08 fl Created -# 1996-11-01 fl Added save support -# 1997-07-07 fl Made header parser more tolerant -# 1997-07-22 fl Fixed yet another parser bug -# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) -# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) -# 2004-02-24 fl Allow some whitespace before first #define -# -# Copyright (c) 1997-2004 by Secret Labs AB -# Copyright (c) 1996-1997 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import re -from typing import IO - -from . import Image, ImageFile - -# XBM header -xbm_head = re.compile( - rb"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" - b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" - b"(?P" - b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" - b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" - b")?" - rb"[\000-\377]*_bits\[]" -) - - -def _accept(prefix: bytes) -> bool: - return prefix.lstrip()[:7] == b"#define" - - -## -# Image plugin for X11 bitmaps. - - -class XbmImageFile(ImageFile.ImageFile): - format = "XBM" - format_description = "X11 Bitmap" - - def _open(self) -> None: - assert self.fp is not None - - m = xbm_head.match(self.fp.read(512)) - - if not m: - msg = "not a XBM file" - raise SyntaxError(msg) - - xsize = int(m.group("width")) - ysize = int(m.group("height")) - - if m.group("hotspot"): - self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot"))) - - self._mode = "1" - self._size = xsize, ysize - - self.tile = [("xbm", (0, 0) + self.size, m.end(), None)] - - -def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: - if im.mode != "1": - msg = f"cannot write mode {im.mode} as XBM" - raise OSError(msg) - - fp.write(f"#define im_width {im.size[0]}\n".encode("ascii")) - fp.write(f"#define im_height {im.size[1]}\n".encode("ascii")) - - hotspot = im.encoderinfo.get("hotspot") - if hotspot: - fp.write(f"#define im_x_hot {hotspot[0]}\n".encode("ascii")) - fp.write(f"#define im_y_hot {hotspot[1]}\n".encode("ascii")) - - fp.write(b"static char im_bits[] = {\n") - - ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)]) - - fp.write(b"};\n") - - -Image.register_open(XbmImageFile.format, XbmImageFile, _accept) -Image.register_save(XbmImageFile.format, _save) - -Image.register_extension(XbmImageFile.format, ".xbm") - -Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/venv/Lib/site-packages/PIL/XpmImagePlugin.py b/venv/Lib/site-packages/PIL/XpmImagePlugin.py deleted file mode 100644 index 8d56331..0000000 --- a/venv/Lib/site-packages/PIL/XpmImagePlugin.py +++ /dev/null @@ -1,125 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# XPM File handling -# -# History: -# 1996-12-29 fl Created -# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) -# -# Copyright (c) Secret Labs AB 1997-2001. -# Copyright (c) Fredrik Lundh 1996-2001. -# -# See the README file for information on usage and redistribution. -# -from __future__ import annotations - -import re - -from . import Image, ImageFile, ImagePalette -from ._binary import o8 - -# XPM header -xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)') - - -def _accept(prefix: bytes) -> bool: - return prefix[:9] == b"/* XPM */" - - -## -# Image plugin for X11 pixel maps. - - -class XpmImageFile(ImageFile.ImageFile): - format = "XPM" - format_description = "X11 Pixel Map" - - def _open(self) -> None: - if not _accept(self.fp.read(9)): - msg = "not an XPM file" - raise SyntaxError(msg) - - # skip forward to next string - while True: - s = self.fp.readline() - if not s: - msg = "broken XPM file" - raise SyntaxError(msg) - m = xpm_head.match(s) - if m: - break - - self._size = int(m.group(1)), int(m.group(2)) - - pal = int(m.group(3)) - bpp = int(m.group(4)) - - if pal > 256 or bpp != 1: - msg = "cannot read this XPM file" - raise ValueError(msg) - - # - # load palette description - - palette = [b"\0\0\0"] * 256 - - for _ in range(pal): - s = self.fp.readline() - if s[-2:] == b"\r\n": - s = s[:-2] - elif s[-1:] in b"\r\n": - s = s[:-1] - - c = s[1] - s = s[2:-2].split() - - for i in range(0, len(s), 2): - if s[i] == b"c": - # process colour key - rgb = s[i + 1] - if rgb == b"None": - self.info["transparency"] = c - elif rgb[:1] == b"#": - # FIXME: handle colour names (see ImagePalette.py) - rgb = int(rgb[1:], 16) - palette[c] = ( - o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255) - ) - else: - # unknown colour - msg = "cannot read this XPM file" - raise ValueError(msg) - break - - else: - # missing colour key - msg = "cannot read this XPM file" - raise ValueError(msg) - - self._mode = "P" - self.palette = ImagePalette.raw("RGB", b"".join(palette)) - - self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))] - - def load_read(self, read_bytes: int) -> bytes: - # - # load all image data in one chunk - - xsize, ysize = self.size - - s = [self.fp.readline()[1 : xsize + 1].ljust(xsize) for i in range(ysize)] - - return b"".join(s) - - -# -# Registry - - -Image.register_open(XpmImageFile.format, XpmImageFile, _accept) - -Image.register_extension(XpmImageFile.format, ".xpm") - -Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/venv/Lib/site-packages/PIL/__init__.py b/venv/Lib/site-packages/PIL/__init__.py deleted file mode 100644 index 09546fe..0000000 --- a/venv/Lib/site-packages/PIL/__init__.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Pillow (Fork of the Python Imaging Library) - -Pillow is the friendly PIL fork by Jeffrey A. Clark and contributors. - https://github.com/python-pillow/Pillow/ - -Pillow is forked from PIL 1.1.7. - -PIL is the Python Imaging Library by Fredrik Lundh and contributors. -Copyright (c) 1999 by Secret Labs AB. - -Use PIL.__version__ for this Pillow version. - -;-) -""" - -from __future__ import annotations - -from . import _version - -# VERSION was removed in Pillow 6.0.0. -# PILLOW_VERSION was removed in Pillow 9.0.0. -# Use __version__ instead. -__version__ = _version.__version__ -del _version - - -_plugins = [ - "BlpImagePlugin", - "BmpImagePlugin", - "BufrStubImagePlugin", - "CurImagePlugin", - "DcxImagePlugin", - "DdsImagePlugin", - "EpsImagePlugin", - "FitsImagePlugin", - "FliImagePlugin", - "FpxImagePlugin", - "FtexImagePlugin", - "GbrImagePlugin", - "GifImagePlugin", - "GribStubImagePlugin", - "Hdf5StubImagePlugin", - "IcnsImagePlugin", - "IcoImagePlugin", - "ImImagePlugin", - "ImtImagePlugin", - "IptcImagePlugin", - "JpegImagePlugin", - "Jpeg2KImagePlugin", - "McIdasImagePlugin", - "MicImagePlugin", - "MpegImagePlugin", - "MpoImagePlugin", - "MspImagePlugin", - "PalmImagePlugin", - "PcdImagePlugin", - "PcxImagePlugin", - "PdfImagePlugin", - "PixarImagePlugin", - "PngImagePlugin", - "PpmImagePlugin", - "PsdImagePlugin", - "QoiImagePlugin", - "SgiImagePlugin", - "SpiderImagePlugin", - "SunImagePlugin", - "TgaImagePlugin", - "TiffImagePlugin", - "WebPImagePlugin", - "WmfImagePlugin", - "XbmImagePlugin", - "XpmImagePlugin", - "XVThumbImagePlugin", -] - - -class UnidentifiedImageError(OSError): - """ - Raised in :py:meth:`PIL.Image.open` if an image cannot be opened and identified. - - If a PNG image raises this error, setting :data:`.ImageFile.LOAD_TRUNCATED_IMAGES` - to true may allow the image to be opened after all. The setting will ignore missing - data and checksum failures. - """ - - pass diff --git a/venv/Lib/site-packages/PIL/__main__.py b/venv/Lib/site-packages/PIL/__main__.py deleted file mode 100644 index 043156e..0000000 --- a/venv/Lib/site-packages/PIL/__main__.py +++ /dev/null @@ -1,7 +0,0 @@ -from __future__ import annotations - -import sys - -from .features import pilinfo - -pilinfo(supported_formats="--report" not in sys.argv) diff --git a/venv/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-312.pyc deleted file mode 100644 index dee3dd6..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-312.pyc deleted file mode 100644 index 6e9b850..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-312.pyc deleted file mode 100644 index d60ff55..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-312.pyc deleted file mode 100644 index 21c44d3..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-312.pyc deleted file mode 100644 index ccb930a..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-312.pyc deleted file mode 100644 index abcda4e..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-312.pyc deleted file mode 100644 index 6e9bc00..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-312.pyc deleted file mode 100644 index 5ce0fde..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-312.pyc deleted file mode 100644 index b0f64df..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-312.pyc deleted file mode 100644 index 7b1cbb7..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-312.pyc deleted file mode 100644 index 42fb6da..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/FliImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/FliImagePlugin.cpython-312.pyc deleted file mode 100644 index efcaaf9..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/FliImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/FontFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/FontFile.cpython-312.pyc deleted file mode 100644 index 2204cec..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/FontFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-312.pyc deleted file mode 100644 index dc0efdd..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-312.pyc deleted file mode 100644 index 70ad03a..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-312.pyc deleted file mode 100644 index 76d2498..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/GdImageFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/GdImageFile.cpython-312.pyc deleted file mode 100644 index 814e562..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/GdImageFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/GifImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/GifImagePlugin.cpython-312.pyc deleted file mode 100644 index 465464a..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/GifImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-312.pyc deleted file mode 100644 index 8818914..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-312.pyc deleted file mode 100644 index 369179d..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-312.pyc deleted file mode 100644 index 351bf4c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-312.pyc deleted file mode 100644 index 6201c54..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-312.pyc deleted file mode 100644 index 0e77735..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-312.pyc deleted file mode 100644 index 443d529..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImImagePlugin.cpython-312.pyc deleted file mode 100644 index 03a116f..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/Image.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/Image.cpython-312.pyc deleted file mode 100644 index f85b330..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/Image.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-312.pyc deleted file mode 100644 index fdb882b..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageCms.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageCms.cpython-312.pyc deleted file mode 100644 index b6409de..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageCms.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-312.pyc deleted file mode 100644 index 6632568..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageDraw.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageDraw.cpython-312.pyc deleted file mode 100644 index f87544c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageDraw.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageDraw2.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageDraw2.cpython-312.pyc deleted file mode 100644 index d61b68c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageDraw2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageEnhance.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageEnhance.cpython-312.pyc deleted file mode 100644 index 236d5a5..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageEnhance.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-312.pyc deleted file mode 100644 index cf80cdc..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageFilter.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageFilter.cpython-312.pyc deleted file mode 100644 index 223cde4..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageFilter.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageFont.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageFont.cpython-312.pyc deleted file mode 100644 index 3a7c823..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageFont.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageGrab.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageGrab.cpython-312.pyc deleted file mode 100644 index 914f875..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageGrab.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageMath.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageMath.cpython-312.pyc deleted file mode 100644 index ae64a40..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageMath.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-312.pyc deleted file mode 100644 index 6b3f7f9..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageMorph.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageMorph.cpython-312.pyc deleted file mode 100644 index 226f0fd..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageMorph.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageOps.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageOps.cpython-312.pyc deleted file mode 100644 index e861c40..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageOps.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-312.pyc deleted file mode 100644 index 87d0185..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImagePath.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImagePath.cpython-312.pyc deleted file mode 100644 index c005f09..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImagePath.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageQt.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageQt.cpython-312.pyc deleted file mode 100644 index 1cab947..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageQt.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-312.pyc deleted file mode 100644 index ab45ac5..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageShow.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageShow.cpython-312.pyc deleted file mode 100644 index f97760a..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageShow.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageStat.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageStat.cpython-312.pyc deleted file mode 100644 index 949574a..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageStat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageTk.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageTk.cpython-312.pyc deleted file mode 100644 index 596deee..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageTk.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageTransform.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageTransform.cpython-312.pyc deleted file mode 100644 index 55c88ec..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageTransform.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImageWin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImageWin.cpython-312.pyc deleted file mode 100644 index 2a2fe0a..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImageWin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-312.pyc deleted file mode 100644 index 84009ea..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-312.pyc deleted file mode 100644 index 1ddb344..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-312.pyc deleted file mode 100644 index c159282..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-312.pyc deleted file mode 100644 index f7b5e5c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/JpegPresets.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/JpegPresets.cpython-312.pyc deleted file mode 100644 index 9ccdbff..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/JpegPresets.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-312.pyc deleted file mode 100644 index 7c41a6c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/MicImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/MicImagePlugin.cpython-312.pyc deleted file mode 100644 index e4b3624..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/MicImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-312.pyc deleted file mode 100644 index 39094c2..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-312.pyc deleted file mode 100644 index e537c13..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/MspImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/MspImagePlugin.cpython-312.pyc deleted file mode 100644 index 481b7e4..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/MspImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PSDraw.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PSDraw.cpython-312.pyc deleted file mode 100644 index 6647a9e..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PSDraw.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-312.pyc deleted file mode 100644 index d4e1ef2..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-312.pyc deleted file mode 100644 index 2415a33..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-312.pyc deleted file mode 100644 index 7b5ca4b..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PcfFontFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PcfFontFile.cpython-312.pyc deleted file mode 100644 index 4008389..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PcfFontFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-312.pyc deleted file mode 100644 index 0f851df..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-312.pyc deleted file mode 100644 index fd2a35c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PdfParser.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PdfParser.cpython-312.pyc deleted file mode 100644 index dd405c1..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PdfParser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-312.pyc deleted file mode 100644 index 023edde..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-312.pyc deleted file mode 100644 index 319cf4f..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-312.pyc deleted file mode 100644 index 1bdeada..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-312.pyc deleted file mode 100644 index 972e754..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/PyAccess.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/PyAccess.cpython-312.pyc deleted file mode 100644 index ed156da..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/PyAccess.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/QoiImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/QoiImagePlugin.cpython-312.pyc deleted file mode 100644 index bc05a75..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/QoiImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-312.pyc deleted file mode 100644 index 125af06..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-312.pyc deleted file mode 100644 index 4853cef..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/SunImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/SunImagePlugin.cpython-312.pyc deleted file mode 100644 index 3a27cde..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/SunImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/TarIO.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/TarIO.cpython-312.pyc deleted file mode 100644 index 22ccc74..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/TarIO.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-312.pyc deleted file mode 100644 index 2269f9a..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-312.pyc deleted file mode 100644 index ab455f9..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-312.pyc deleted file mode 100644 index f7a8bf2..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/WalImageFile.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/WalImageFile.cpython-312.pyc deleted file mode 100644 index ab11415..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/WalImageFile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-312.pyc deleted file mode 100644 index 150dd1c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-312.pyc deleted file mode 100644 index bc269bf..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-312.pyc deleted file mode 100644 index 6424332..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-312.pyc deleted file mode 100644 index 6b4d0c5..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-312.pyc deleted file mode 100644 index 7059192..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 96d69e6..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/__main__.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index 3bb7631..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/_binary.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/_binary.cpython-312.pyc deleted file mode 100644 index 31bb51e..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/_binary.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-312.pyc deleted file mode 100644 index 6198aa2..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/_tkinter_finder.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/_tkinter_finder.cpython-312.pyc deleted file mode 100644 index 33b4037..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/_tkinter_finder.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/_typing.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/_typing.cpython-312.pyc deleted file mode 100644 index 0f52c56..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/_typing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/_util.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/_util.cpython-312.pyc deleted file mode 100644 index 9d170f0..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/_util.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/_version.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/_version.cpython-312.pyc deleted file mode 100644 index 7423e65..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/_version.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/features.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/features.cpython-312.pyc deleted file mode 100644 index 003d0cf..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/features.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/__pycache__/report.cpython-312.pyc b/venv/Lib/site-packages/PIL/__pycache__/report.cpython-312.pyc deleted file mode 100644 index 6d4895c..0000000 Binary files a/venv/Lib/site-packages/PIL/__pycache__/report.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_binary.py b/venv/Lib/site-packages/PIL/_binary.py deleted file mode 100644 index 4594ccc..0000000 --- a/venv/Lib/site-packages/PIL/_binary.py +++ /dev/null @@ -1,112 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# Binary input/output support routines. -# -# Copyright (c) 1997-2003 by Secret Labs AB -# Copyright (c) 1995-2003 by Fredrik Lundh -# Copyright (c) 2012 by Brian Crowell -# -# See the README file for information on usage and redistribution. -# - - -"""Binary input/output support routines.""" -from __future__ import annotations - -from struct import pack, unpack_from - - -def i8(c: bytes) -> int: - return c[0] - - -def o8(i: int) -> bytes: - return bytes((i & 255,)) - - -# Input, le = little endian, be = big endian -def i16le(c: bytes, o: int = 0) -> int: - """ - Converts a 2-bytes (16 bits) string to an unsigned integer. - - :param c: string containing bytes to convert - :param o: offset of bytes to convert in string - """ - return unpack_from(" int: - """ - Converts a 2-bytes (16 bits) string to a signed integer. - - :param c: string containing bytes to convert - :param o: offset of bytes to convert in string - """ - return unpack_from(" int: - """ - Converts a 2-bytes (16 bits) string to a signed integer, big endian. - - :param c: string containing bytes to convert - :param o: offset of bytes to convert in string - """ - return unpack_from(">h", c, o)[0] - - -def i32le(c: bytes, o: int = 0) -> int: - """ - Converts a 4-bytes (32 bits) string to an unsigned integer. - - :param c: string containing bytes to convert - :param o: offset of bytes to convert in string - """ - return unpack_from(" int: - """ - Converts a 4-bytes (32 bits) string to a signed integer. - - :param c: string containing bytes to convert - :param o: offset of bytes to convert in string - """ - return unpack_from(" int: - """ - Converts a 4-bytes (32 bits) string to a signed integer, big endian. - - :param c: string containing bytes to convert - :param o: offset of bytes to convert in string - """ - return unpack_from(">i", c, o)[0] - - -def i16be(c: bytes, o: int = 0) -> int: - return unpack_from(">H", c, o)[0] - - -def i32be(c: bytes, o: int = 0) -> int: - return unpack_from(">I", c, o)[0] - - -# Output, le = little endian, be = big endian -def o16le(i: int) -> bytes: - return pack(" bytes: - return pack(" bytes: - return pack(">H", i) - - -def o32be(i: int) -> bytes: - return pack(">I", i) diff --git a/venv/Lib/site-packages/PIL/_deprecate.py b/venv/Lib/site-packages/PIL/_deprecate.py deleted file mode 100644 index 33a0e07..0000000 --- a/venv/Lib/site-packages/PIL/_deprecate.py +++ /dev/null @@ -1,71 +0,0 @@ -from __future__ import annotations - -import warnings - -from . import __version__ - - -def deprecate( - deprecated: str, - when: int | None, - replacement: str | None = None, - *, - action: str | None = None, - plural: bool = False, -) -> None: - """ - Deprecations helper. - - :param deprecated: Name of thing to be deprecated. - :param when: Pillow major version to be removed in. - :param replacement: Name of replacement. - :param action: Instead of "replacement", give a custom call to action - e.g. "Upgrade to new thing". - :param plural: if the deprecated thing is plural, needing "are" instead of "is". - - Usually of the form: - - "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). - Use [replacement] instead." - - You can leave out the replacement sentence: - - "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd)" - - Or with another call to action: - - "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). - [action]." - """ - - is_ = "are" if plural else "is" - - if when is None: - removed = "a future version" - elif when <= int(__version__.split(".")[0]): - msg = f"{deprecated} {is_} deprecated and should be removed." - raise RuntimeError(msg) - elif when == 11: - removed = "Pillow 11 (2024-10-15)" - elif when == 12: - removed = "Pillow 12 (2025-10-15)" - else: - msg = f"Unknown removal version: {when}. Update {__name__}?" - raise ValueError(msg) - - if replacement and action: - msg = "Use only one of 'replacement' and 'action'" - raise ValueError(msg) - - if replacement: - action = f". Use {replacement} instead." - elif action: - action = f". {action.rstrip('.')}." - else: - action = "" - - warnings.warn( - f"{deprecated} {is_} deprecated and will be removed in {removed}{action}", - DeprecationWarning, - stacklevel=3, - ) diff --git a/venv/Lib/site-packages/PIL/_imaging.cp312-win_amd64.pyd b/venv/Lib/site-packages/PIL/_imaging.cp312-win_amd64.pyd deleted file mode 100644 index c380321..0000000 Binary files a/venv/Lib/site-packages/PIL/_imaging.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_imaging.pyi b/venv/Lib/site-packages/PIL/_imaging.pyi deleted file mode 100644 index 8cccd3a..0000000 --- a/venv/Lib/site-packages/PIL/_imaging.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Any - -class ImagingCore: - def __getattr__(self, name: str) -> Any: ... - -class ImagingFont: - def __getattr__(self, name: str) -> Any: ... - -class ImagingDraw: - def __getattr__(self, name: str) -> Any: ... - -class PixelAccess: - def __getitem__(self, xy: tuple[int, int]) -> float | tuple[int, ...]: ... - def __setitem__( - self, xy: tuple[int, int], color: float | tuple[int, ...] - ) -> None: ... - -class ImagingDecoder: - def __getattr__(self, name: str) -> Any: ... - -class ImagingEncoder: - def __getattr__(self, name: str) -> Any: ... - -class _Outline: - def close(self) -> None: ... - def __getattr__(self, name: str) -> Any: ... - -def font(image: ImagingCore, glyphdata: bytes) -> ImagingFont: ... -def outline() -> _Outline: ... -def __getattr__(name: str) -> Any: ... diff --git a/venv/Lib/site-packages/PIL/_imagingcms.cp312-win_amd64.pyd b/venv/Lib/site-packages/PIL/_imagingcms.cp312-win_amd64.pyd deleted file mode 100644 index 91c1b05..0000000 Binary files a/venv/Lib/site-packages/PIL/_imagingcms.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_imagingcms.pyi b/venv/Lib/site-packages/PIL/_imagingcms.pyi deleted file mode 100644 index 2abd6d0..0000000 --- a/venv/Lib/site-packages/PIL/_imagingcms.pyi +++ /dev/null @@ -1,141 +0,0 @@ -import datetime -import sys -from typing import Literal, SupportsFloat, TypedDict - -littlecms_version: str | None - -_Tuple3f = tuple[float, float, float] -_Tuple2x3f = tuple[_Tuple3f, _Tuple3f] -_Tuple3x3f = tuple[_Tuple3f, _Tuple3f, _Tuple3f] - -class _IccMeasurementCondition(TypedDict): - observer: int - backing: _Tuple3f - geo: str - flare: float - illuminant_type: str - -class _IccViewingCondition(TypedDict): - illuminant: _Tuple3f - surround: _Tuple3f - illuminant_type: str - -class CmsProfile: - @property - def rendering_intent(self) -> int: ... - @property - def creation_date(self) -> datetime.datetime | None: ... - @property - def copyright(self) -> str | None: ... - @property - def target(self) -> str | None: ... - @property - def manufacturer(self) -> str | None: ... - @property - def model(self) -> str | None: ... - @property - def profile_description(self) -> str | None: ... - @property - def screening_description(self) -> str | None: ... - @property - def viewing_condition(self) -> str | None: ... - @property - def version(self) -> float: ... - @property - def icc_version(self) -> int: ... - @property - def attributes(self) -> int: ... - @property - def header_flags(self) -> int: ... - @property - def header_manufacturer(self) -> str: ... - @property - def header_model(self) -> str: ... - @property - def device_class(self) -> str: ... - @property - def connection_space(self) -> str: ... - @property - def xcolor_space(self) -> str: ... - @property - def profile_id(self) -> bytes: ... - @property - def is_matrix_shaper(self) -> bool: ... - @property - def technology(self) -> str | None: ... - @property - def colorimetric_intent(self) -> str | None: ... - @property - def perceptual_rendering_intent_gamut(self) -> str | None: ... - @property - def saturation_rendering_intent_gamut(self) -> str | None: ... - @property - def red_colorant(self) -> _Tuple2x3f | None: ... - @property - def green_colorant(self) -> _Tuple2x3f | None: ... - @property - def blue_colorant(self) -> _Tuple2x3f | None: ... - @property - def red_primary(self) -> _Tuple2x3f | None: ... - @property - def green_primary(self) -> _Tuple2x3f | None: ... - @property - def blue_primary(self) -> _Tuple2x3f | None: ... - @property - def media_white_point_temperature(self) -> float | None: ... - @property - def media_white_point(self) -> _Tuple2x3f | None: ... - @property - def media_black_point(self) -> _Tuple2x3f | None: ... - @property - def luminance(self) -> _Tuple2x3f | None: ... - @property - def chromatic_adaptation(self) -> tuple[_Tuple3x3f, _Tuple3x3f] | None: ... - @property - def chromaticity(self) -> _Tuple3x3f | None: ... - @property - def colorant_table(self) -> list[str] | None: ... - @property - def colorant_table_out(self) -> list[str] | None: ... - @property - def intent_supported(self) -> dict[int, tuple[bool, bool, bool]] | None: ... - @property - def clut(self) -> dict[int, tuple[bool, bool, bool]] | None: ... - @property - def icc_measurement_condition(self) -> _IccMeasurementCondition | None: ... - @property - def icc_viewing_condition(self) -> _IccViewingCondition | None: ... - def is_intent_supported(self, intent: int, direction: int, /) -> int: ... - -class CmsTransform: - def apply(self, id_in: int, id_out: int) -> int: ... - -def profile_open(profile: str, /) -> CmsProfile: ... -def profile_frombytes(profile: bytes, /) -> CmsProfile: ... -def profile_tobytes(profile: CmsProfile, /) -> bytes: ... -def buildTransform( - input_profile: CmsProfile, - output_profile: CmsProfile, - in_mode: str, - out_mode: str, - rendering_intent: int = 0, - cms_flags: int = 0, - /, -) -> CmsTransform: ... -def buildProofTransform( - input_profile: CmsProfile, - output_profile: CmsProfile, - proof_profile: CmsProfile, - in_mode: str, - out_mode: str, - rendering_intent: int = 0, - proof_intent: int = 0, - cms_flags: int = 0, - /, -) -> CmsTransform: ... -def createProfile( - color_space: Literal["LAB", "XYZ", "sRGB"], color_temp: SupportsFloat = 0.0, / -) -> CmsProfile: ... - -if sys.platform == "win32": - def get_display_profile_win32(handle: int = 0, is_dc: int = 0, /) -> str | None: ... diff --git a/venv/Lib/site-packages/PIL/_imagingft.cp312-win_amd64.pyd b/venv/Lib/site-packages/PIL/_imagingft.cp312-win_amd64.pyd deleted file mode 100644 index f6db636..0000000 Binary files a/venv/Lib/site-packages/PIL/_imagingft.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_imagingft.pyi b/venv/Lib/site-packages/PIL/_imagingft.pyi deleted file mode 100644 index 5e97b40..0000000 --- a/venv/Lib/site-packages/PIL/_imagingft.pyi +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Any, TypedDict - -from . import _imaging - -class _Axis(TypedDict): - minimum: int | None - default: int | None - maximum: int | None - name: bytes | None - -class Font: - @property - def family(self) -> str | None: ... - @property - def style(self) -> str | None: ... - @property - def ascent(self) -> int: ... - @property - def descent(self) -> int: ... - @property - def height(self) -> int: ... - @property - def x_ppem(self) -> int: ... - @property - def y_ppem(self) -> int: ... - @property - def glyphs(self) -> int: ... - def render( - self, - string: str | bytes, - fill, - mode=..., - dir=..., - features=..., - lang=..., - stroke_width=..., - anchor=..., - foreground_ink_long=..., - x_start=..., - y_start=..., - /, - ) -> tuple[_imaging.ImagingCore, tuple[int, int]]: ... - def getsize( - self, - string: str | bytes | bytearray, - mode=..., - dir=..., - features=..., - lang=..., - anchor=..., - /, - ) -> tuple[tuple[int, int], tuple[int, int]]: ... - def getlength( - self, string: str | bytes, mode=..., dir=..., features=..., lang=..., / - ) -> float: ... - def getvarnames(self) -> list[bytes]: ... - def getvaraxes(self) -> list[_Axis] | None: ... - def setvarname(self, instance_index: int, /) -> None: ... - def setvaraxes(self, axes: list[float], /) -> None: ... - -def getfont( - filename: str | bytes, - size: float, - index=..., - encoding=..., - font_bytes=..., - layout_engine=..., -) -> Font: ... -def __getattr__(name: str) -> Any: ... diff --git a/venv/Lib/site-packages/PIL/_imagingmath.cp312-win_amd64.pyd b/venv/Lib/site-packages/PIL/_imagingmath.cp312-win_amd64.pyd deleted file mode 100644 index 29316fa..0000000 Binary files a/venv/Lib/site-packages/PIL/_imagingmath.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_imagingmath.pyi b/venv/Lib/site-packages/PIL/_imagingmath.pyi deleted file mode 100644 index e27843e..0000000 --- a/venv/Lib/site-packages/PIL/_imagingmath.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from typing import Any - -def __getattr__(name: str) -> Any: ... diff --git a/venv/Lib/site-packages/PIL/_imagingmorph.cp312-win_amd64.pyd b/venv/Lib/site-packages/PIL/_imagingmorph.cp312-win_amd64.pyd deleted file mode 100644 index ee4d30e..0000000 Binary files a/venv/Lib/site-packages/PIL/_imagingmorph.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_imagingmorph.pyi b/venv/Lib/site-packages/PIL/_imagingmorph.pyi deleted file mode 100644 index e27843e..0000000 --- a/venv/Lib/site-packages/PIL/_imagingmorph.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from typing import Any - -def __getattr__(name: str) -> Any: ... diff --git a/venv/Lib/site-packages/PIL/_imagingtk.cp312-win_amd64.pyd b/venv/Lib/site-packages/PIL/_imagingtk.cp312-win_amd64.pyd deleted file mode 100644 index 8da18a8..0000000 Binary files a/venv/Lib/site-packages/PIL/_imagingtk.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_tkinter_finder.py b/venv/Lib/site-packages/PIL/_tkinter_finder.py deleted file mode 100644 index beddfb0..0000000 --- a/venv/Lib/site-packages/PIL/_tkinter_finder.py +++ /dev/null @@ -1,21 +0,0 @@ -""" Find compiled module linking to Tcl / Tk libraries -""" - -from __future__ import annotations - -import sys -import tkinter - -tk = getattr(tkinter, "_tkinter") - -try: - if hasattr(sys, "pypy_find_executable"): - TKINTER_LIB = tk.tklib_cffi.__file__ - else: - TKINTER_LIB = tk.__file__ -except AttributeError: - # _tkinter may be compiled directly into Python, in which case __file__ is - # not available. load_tkinter_funcs will check the binary first in any case. - TKINTER_LIB = None - -tk_version = str(tkinter.TkVersion) diff --git a/venv/Lib/site-packages/PIL/_typing.py b/venv/Lib/site-packages/PIL/_typing.py deleted file mode 100644 index 09ece18..0000000 --- a/venv/Lib/site-packages/PIL/_typing.py +++ /dev/null @@ -1,39 +0,0 @@ -from __future__ import annotations - -import os -import sys -from typing import Any, Protocol, Sequence, TypeVar, Union - -try: - import numpy.typing as npt - - NumpyArray = npt.NDArray[Any] -except ImportError: - pass - -if sys.version_info >= (3, 10): - from typing import TypeGuard -else: - try: - from typing_extensions import TypeGuard - except ImportError: - - class TypeGuard: # type: ignore[no-redef] - def __class_getitem__(cls, item: Any) -> type[bool]: - return bool - - -Coords = Union[Sequence[float], Sequence[Sequence[float]]] - - -_T_co = TypeVar("_T_co", covariant=True) - - -class SupportsRead(Protocol[_T_co]): - def read(self, __length: int = ...) -> _T_co: ... - - -StrOrBytesPath = Union[str, bytes, "os.PathLike[str]", "os.PathLike[bytes]"] - - -__all__ = ["TypeGuard", "StrOrBytesPath", "SupportsRead"] diff --git a/venv/Lib/site-packages/PIL/_util.py b/venv/Lib/site-packages/PIL/_util.py deleted file mode 100644 index 6bc7628..0000000 --- a/venv/Lib/site-packages/PIL/_util.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import annotations - -import os -from typing import Any, NoReturn - -from ._typing import StrOrBytesPath, TypeGuard - - -def is_path(f: Any) -> TypeGuard[StrOrBytesPath]: - return isinstance(f, (bytes, str, os.PathLike)) - - -def is_directory(f: Any) -> TypeGuard[StrOrBytesPath]: - """Checks if an object is a string, and that it points to a directory.""" - return is_path(f) and os.path.isdir(f) - - -class DeferredError: - def __init__(self, ex: BaseException): - self.ex = ex - - def __getattr__(self, elt: str) -> NoReturn: - raise self.ex - - @staticmethod - def new(ex: BaseException) -> Any: - """ - Creates an object that raises the wrapped exception ``ex`` when used, - and casts it to :py:obj:`~typing.Any` type. - """ - return DeferredError(ex) diff --git a/venv/Lib/site-packages/PIL/_version.py b/venv/Lib/site-packages/PIL/_version.py deleted file mode 100644 index cebfd86..0000000 --- a/venv/Lib/site-packages/PIL/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# Master version for Pillow -from __future__ import annotations - -__version__ = "10.4.0" diff --git a/venv/Lib/site-packages/PIL/_webp.cp312-win_amd64.pyd b/venv/Lib/site-packages/PIL/_webp.cp312-win_amd64.pyd deleted file mode 100644 index 5421a7e..0000000 Binary files a/venv/Lib/site-packages/PIL/_webp.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/PIL/_webp.pyi b/venv/Lib/site-packages/PIL/_webp.pyi deleted file mode 100644 index e27843e..0000000 --- a/venv/Lib/site-packages/PIL/_webp.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from typing import Any - -def __getattr__(name: str) -> Any: ... diff --git a/venv/Lib/site-packages/PIL/features.py b/venv/Lib/site-packages/PIL/features.py deleted file mode 100644 index 13908c4..0000000 --- a/venv/Lib/site-packages/PIL/features.py +++ /dev/null @@ -1,340 +0,0 @@ -from __future__ import annotations - -import collections -import os -import sys -import warnings -from typing import IO - -import PIL - -from . import Image - -modules = { - "pil": ("PIL._imaging", "PILLOW_VERSION"), - "tkinter": ("PIL._tkinter_finder", "tk_version"), - "freetype2": ("PIL._imagingft", "freetype2_version"), - "littlecms2": ("PIL._imagingcms", "littlecms_version"), - "webp": ("PIL._webp", "webpdecoder_version"), -} - - -def check_module(feature: str) -> bool: - """ - Checks if a module is available. - - :param feature: The module to check for. - :returns: ``True`` if available, ``False`` otherwise. - :raises ValueError: If the module is not defined in this version of Pillow. - """ - if feature not in modules: - msg = f"Unknown module {feature}" - raise ValueError(msg) - - module, ver = modules[feature] - - try: - __import__(module) - return True - except ModuleNotFoundError: - return False - except ImportError as ex: - warnings.warn(str(ex)) - return False - - -def version_module(feature: str) -> str | None: - """ - :param feature: The module to check for. - :returns: - The loaded version number as a string, or ``None`` if unknown or not available. - :raises ValueError: If the module is not defined in this version of Pillow. - """ - if not check_module(feature): - return None - - module, ver = modules[feature] - - return getattr(__import__(module, fromlist=[ver]), ver) - - -def get_supported_modules() -> list[str]: - """ - :returns: A list of all supported modules. - """ - return [f for f in modules if check_module(f)] - - -codecs = { - "jpg": ("jpeg", "jpeglib"), - "jpg_2000": ("jpeg2k", "jp2klib"), - "zlib": ("zip", "zlib"), - "libtiff": ("libtiff", "libtiff"), -} - - -def check_codec(feature: str) -> bool: - """ - Checks if a codec is available. - - :param feature: The codec to check for. - :returns: ``True`` if available, ``False`` otherwise. - :raises ValueError: If the codec is not defined in this version of Pillow. - """ - if feature not in codecs: - msg = f"Unknown codec {feature}" - raise ValueError(msg) - - codec, lib = codecs[feature] - - return f"{codec}_encoder" in dir(Image.core) - - -def version_codec(feature: str) -> str | None: - """ - :param feature: The codec to check for. - :returns: - The version number as a string, or ``None`` if not available. - Checked at compile time for ``jpg``, run-time otherwise. - :raises ValueError: If the codec is not defined in this version of Pillow. - """ - if not check_codec(feature): - return None - - codec, lib = codecs[feature] - - version = getattr(Image.core, f"{lib}_version") - - if feature == "libtiff": - return version.split("\n")[0].split("Version ")[1] - - return version - - -def get_supported_codecs() -> list[str]: - """ - :returns: A list of all supported codecs. - """ - return [f for f in codecs if check_codec(f)] - - -features = { - "webp_anim": ("PIL._webp", "HAVE_WEBPANIM", None), - "webp_mux": ("PIL._webp", "HAVE_WEBPMUX", None), - "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY", None), - "raqm": ("PIL._imagingft", "HAVE_RAQM", "raqm_version"), - "fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"), - "harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"), - "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"), - "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"), - "xcb": ("PIL._imaging", "HAVE_XCB", None), -} - - -def check_feature(feature: str) -> bool | None: - """ - Checks if a feature is available. - - :param feature: The feature to check for. - :returns: ``True`` if available, ``False`` if unavailable, ``None`` if unknown. - :raises ValueError: If the feature is not defined in this version of Pillow. - """ - if feature not in features: - msg = f"Unknown feature {feature}" - raise ValueError(msg) - - module, flag, ver = features[feature] - - try: - imported_module = __import__(module, fromlist=["PIL"]) - return getattr(imported_module, flag) - except ModuleNotFoundError: - return None - except ImportError as ex: - warnings.warn(str(ex)) - return None - - -def version_feature(feature: str) -> str | None: - """ - :param feature: The feature to check for. - :returns: The version number as a string, or ``None`` if not available. - :raises ValueError: If the feature is not defined in this version of Pillow. - """ - if not check_feature(feature): - return None - - module, flag, ver = features[feature] - - if ver is None: - return None - - return getattr(__import__(module, fromlist=[ver]), ver) - - -def get_supported_features() -> list[str]: - """ - :returns: A list of all supported features. - """ - return [f for f in features if check_feature(f)] - - -def check(feature: str) -> bool | None: - """ - :param feature: A module, codec, or feature name. - :returns: - ``True`` if the module, codec, or feature is available, - ``False`` or ``None`` otherwise. - """ - - if feature in modules: - return check_module(feature) - if feature in codecs: - return check_codec(feature) - if feature in features: - return check_feature(feature) - warnings.warn(f"Unknown feature '{feature}'.", stacklevel=2) - return False - - -def version(feature: str) -> str | None: - """ - :param feature: - The module, codec, or feature to check for. - :returns: - The version number as a string, or ``None`` if unknown or not available. - """ - if feature in modules: - return version_module(feature) - if feature in codecs: - return version_codec(feature) - if feature in features: - return version_feature(feature) - return None - - -def get_supported() -> list[str]: - """ - :returns: A list of all supported modules, features, and codecs. - """ - - ret = get_supported_modules() - ret.extend(get_supported_features()) - ret.extend(get_supported_codecs()) - return ret - - -def pilinfo(out: IO[str] | None = None, supported_formats: bool = True) -> None: - """ - Prints information about this installation of Pillow. - This function can be called with ``python3 -m PIL``. - It can also be called with ``python3 -m PIL.report`` or ``python3 -m PIL --report`` - to have "supported_formats" set to ``False``, omitting the list of all supported - image file formats. - - :param out: - The output stream to print to. Defaults to ``sys.stdout`` if ``None``. - :param supported_formats: - If ``True``, a list of all supported image file formats will be printed. - """ - - if out is None: - out = sys.stdout - - Image.init() - - print("-" * 68, file=out) - print(f"Pillow {PIL.__version__}", file=out) - py_version_lines = sys.version.splitlines() - print(f"Python {py_version_lines[0].strip()}", file=out) - for py_version in py_version_lines[1:]: - print(f" {py_version.strip()}", file=out) - print("-" * 68, file=out) - print(f"Python executable is {sys.executable or 'unknown'}", file=out) - if sys.prefix != sys.base_prefix: - print(f"Environment Python files loaded from {sys.prefix}", file=out) - print(f"System Python files loaded from {sys.base_prefix}", file=out) - print("-" * 68, file=out) - print( - f"Python Pillow modules loaded from {os.path.dirname(Image.__file__)}", - file=out, - ) - print( - f"Binary Pillow modules loaded from {os.path.dirname(Image.core.__file__)}", - file=out, - ) - print("-" * 68, file=out) - - for name, feature in [ - ("pil", "PIL CORE"), - ("tkinter", "TKINTER"), - ("freetype2", "FREETYPE2"), - ("littlecms2", "LITTLECMS2"), - ("webp", "WEBP"), - ("transp_webp", "WEBP Transparency"), - ("webp_mux", "WEBPMUX"), - ("webp_anim", "WEBP Animation"), - ("jpg", "JPEG"), - ("jpg_2000", "OPENJPEG (JPEG2000)"), - ("zlib", "ZLIB (PNG/ZIP)"), - ("libtiff", "LIBTIFF"), - ("raqm", "RAQM (Bidirectional Text)"), - ("libimagequant", "LIBIMAGEQUANT (Quantization method)"), - ("xcb", "XCB (X protocol)"), - ]: - if check(name): - v: str | None = None - if name == "jpg": - libjpeg_turbo_version = version_feature("libjpeg_turbo") - if libjpeg_turbo_version is not None: - v = "libjpeg-turbo " + libjpeg_turbo_version - if v is None: - v = version(name) - if v is not None: - version_static = name in ("pil", "jpg") - if name == "littlecms2": - # this check is also in src/_imagingcms.c:setup_module() - version_static = tuple(int(x) for x in v.split(".")) < (2, 7) - t = "compiled for" if version_static else "loaded" - if name == "raqm": - for f in ("fribidi", "harfbuzz"): - v2 = version_feature(f) - if v2 is not None: - v += f", {f} {v2}" - print("---", feature, "support ok,", t, v, file=out) - else: - print("---", feature, "support ok", file=out) - else: - print("***", feature, "support not installed", file=out) - print("-" * 68, file=out) - - if supported_formats: - extensions = collections.defaultdict(list) - for ext, i in Image.EXTENSION.items(): - extensions[i].append(ext) - - for i in sorted(Image.ID): - line = f"{i}" - if i in Image.MIME: - line = f"{line} {Image.MIME[i]}" - print(line, file=out) - - if i in extensions: - print( - "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out - ) - - features = [] - if i in Image.OPEN: - features.append("open") - if i in Image.SAVE: - features.append("save") - if i in Image.SAVE_ALL: - features.append("save_all") - if i in Image.DECODERS: - features.append("decode") - if i in Image.ENCODERS: - features.append("encode") - - print("Features: {}".format(", ".join(features)), file=out) - print("-" * 68, file=out) diff --git a/venv/Lib/site-packages/PIL/py.typed b/venv/Lib/site-packages/PIL/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/PIL/report.py b/venv/Lib/site-packages/PIL/report.py deleted file mode 100644 index d2815e8..0000000 --- a/venv/Lib/site-packages/PIL/report.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import annotations - -from .features import pilinfo - -pilinfo(supported_formats=False) diff --git a/venv/Lib/site-packages/Tea/__init__.py b/venv/Lib/site-packages/Tea/__init__.py deleted file mode 100644 index f6b7e26..0000000 --- a/venv/Lib/site-packages/Tea/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.4.3" diff --git a/venv/Lib/site-packages/Tea/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 652054a..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/__pycache__/core.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/core.cpython-312.pyc deleted file mode 100644 index a711795..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/__pycache__/decorators.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/decorators.cpython-312.pyc deleted file mode 100644 index e0b2a57..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/decorators.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 23bb3bb..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/__pycache__/model.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/model.cpython-312.pyc deleted file mode 100644 index c471b68..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/model.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/__pycache__/request.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/request.cpython-312.pyc deleted file mode 100644 index 21c00ac..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/request.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/__pycache__/response.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/response.cpython-312.pyc deleted file mode 100644 index 4f5e9a5..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/response.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/__pycache__/stream.cpython-312.pyc b/venv/Lib/site-packages/Tea/__pycache__/stream.cpython-312.pyc deleted file mode 100644 index 24274b9..0000000 Binary files a/venv/Lib/site-packages/Tea/__pycache__/stream.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/Tea/core.py b/venv/Lib/site-packages/Tea/core.py deleted file mode 100644 index bb3542b..0000000 --- a/venv/Lib/site-packages/Tea/core.py +++ /dev/null @@ -1,381 +0,0 @@ -import asyncio -import logging -import os -import ssl -import time -from enum import Enum -from typing import Any, Dict, Optional -from urllib.parse import urlencode, urlparse - -import aiohttp -import certifi -from requests import Session, PreparedRequest, adapters, status_codes - -from Tea.exceptions import RequiredArgumentException, RetryError -from Tea.model import TeaModel -from Tea.request import TeaRequest -from Tea.response import TeaResponse -from Tea.stream import BaseStream - -DEFAULT_CONNECT_TIMEOUT = 5000 -DEFAULT_READ_TIMEOUT = 10000 -DEFAULT_POOL_SIZE = 10 - -logger = logging.getLogger('alibabacloud-tea') -logger.setLevel(logging.DEBUG) -ch = logging.StreamHandler() -logger.addHandler(ch) - - -class TLSVersion(Enum): - TLSv1 = 'TLSv1' - TLSv1_1 = 'TLSv1.1' - TLSv1_2 = 'TLSv1.2' - TLSv1_3 = 'TLSv1.3' - - -class _TLSAdapter(adapters.HTTPAdapter): - """A HTTPAdapter that uses an arbitrary TLS version.""" - - def __init__(self, ssl_context=None, **kwargs): - self.ssl_context = ssl_context - super().__init__(**kwargs) - - def init_poolmanager(self, *args, **kwargs): - """Override the init_poolmanager method to set the SSL.""" - kwargs['ssl_context'] = self.ssl_context - super().init_poolmanager(*args, **kwargs) - - -class TeaCore: - _sessions = {} - http_adapter = adapters.HTTPAdapter(pool_connections=DEFAULT_POOL_SIZE, pool_maxsize=DEFAULT_POOL_SIZE * 4) - https_adapter = adapters.HTTPAdapter(pool_connections=DEFAULT_POOL_SIZE, pool_maxsize=DEFAULT_POOL_SIZE * 4) - - @staticmethod - def _set_tls_minimum_version(sls_context, tls_min_version): - context = sls_context - if tls_min_version is not None: - if tls_min_version == 'TLSv1': - context.minimum_version = ssl.TLSVersion.TLSv1 - elif tls_min_version == 'TLSv1.1': - context.minimum_version = ssl.TLSVersion.TLSv1_1 - elif tls_min_version == 'TLSv1.2': - context.minimum_version = ssl.TLSVersion.TLSv1_2 - elif tls_min_version == 'TLSv1.3': - context.minimum_version = ssl.TLSVersion.TLSv1_3 - return context - - @staticmethod - def get_adapter(prefix, tls_min_version: str = None): - ca_cert = certifi.where() - context = ssl.create_default_context() - if ca_cert and prefix.upper() == 'HTTPS': - context = TeaCore._set_tls_minimum_version(context, tls_min_version) - context.load_verify_locations(ca_cert) - adapter = _TLSAdapter(ssl_context=context, pool_connections=DEFAULT_POOL_SIZE, - pool_maxsize=DEFAULT_POOL_SIZE * 4) - return adapter - - @staticmethod - def _get_session(session_key: str, protocol: str, tls_min_version: str = None, verify: bool = True): - if session_key not in TeaCore._sessions: - session = Session() - adapter = TeaCore.get_adapter(protocol, tls_min_version) - if protocol.upper() == 'HTTPS': - if verify: - session.mount('https://', adapter) - else: - session.mount('https://', TeaCore.https_adapter) - else: - session.mount('http://', adapter) - TeaCore._sessions[session_key] = session - return TeaCore._sessions[session_key] - - @staticmethod - def _prepare_http_debug(request, symbol): - base = '' - for key, value in request.headers.items(): - base += f'\n{symbol} {key} : {value}' - return base - - @staticmethod - def _do_http_debug(request, response): - # logger the request - url = urlparse(request.url) - request_base = f'\n> {request.method.upper()} {url.path + url.query} HTTP/1.1' - logger.debug(request_base + TeaCore._prepare_http_debug(request, '>')) - - # logger the response - response_base = f'\n< HTTP/1.1 {response.status_code}' \ - f' {status_codes._codes.get(response.status_code)[0].upper()}' - logger.debug(response_base + TeaCore._prepare_http_debug(response, '<')) - - @staticmethod - def compose_url(request): - host = request.headers.get('host') - if not host: - raise RequiredArgumentException('endpoint') - else: - host = host.rstrip('/') - protocol = f'{request.protocol.lower()}://' - pathname = request.pathname - - if host.startswith(('http://', 'https://')): - protocol = '' - - if request.port == 80: - port = '' - else: - port = f':{request.port}' - - url = protocol + host + port + pathname - - if request.query: - if "?" in url: - if not url.endswith("&"): - url += "&" - else: - url += "?" - - encode_query = {} - for key in request.query: - value = request.query[key] - if value is not None: - encode_query[key] = str(value) - url += urlencode(encode_query) - return url.rstrip("?&") - - @staticmethod - async def async_do_action( - request: TeaRequest, - runtime_option=None - ) -> TeaResponse: - runtime_option = runtime_option or {} - - url = TeaCore.compose_url(request) - verify = not runtime_option.get('ignoreSSL', False) - tls_min_version = runtime_option.get('tlsMinVersion') - if isinstance(tls_min_version, Enum): - tls_min_version = tls_min_version.value - - timeout = runtime_option.get('timeout') - connect_timeout = runtime_option.get('connectTimeout') or timeout or DEFAULT_CONNECT_TIMEOUT - read_timeout = runtime_option.get('readTimeout') or timeout or DEFAULT_READ_TIMEOUT - - connect_timeout, read_timeout = (int(connect_timeout) / 1000, int(read_timeout) / 1000) - - proxy = None - if request.protocol.upper() == 'HTTP': - proxy = runtime_option.get('httpProxy') - if not proxy: - proxy = os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy') - elif request.protocol.upper() == 'HTTPS': - proxy = runtime_option.get('httpsProxy') - if not proxy: - proxy = os.environ.get('HTTPS_PROXY') or os.environ.get('https_proxy') - - connector = None - ca_cert = certifi.where() - if ca_cert and request.protocol.upper() == 'HTTPS': - ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) - ssl_context = TeaCore._set_tls_minimum_version(ssl_context, tls_min_version) - ssl_context.load_verify_locations(ca_cert) - connector = aiohttp.TCPConnector( - ssl=ssl_context, - ) - else: - verify = False - - timeout = aiohttp.ClientTimeout( - sock_read=read_timeout, - sock_connect=connect_timeout - ) - async with aiohttp.ClientSession( - connector=connector - ) as s: - body = b'' - if isinstance(request.body, BaseStream): - for content in request.body: - body += content - elif isinstance(request.body, str): - body = request.body.encode('utf-8') - else: - body = request.body - try: - async with s.request(request.method, url, - data=body, - headers=request.headers, - ssl=verify, - proxy=proxy, - timeout=timeout) as response: - tea_resp = TeaResponse() - tea_resp.body = await response.read() - tea_resp.headers = {k.lower(): v for k, v in response.headers.items()} - tea_resp.status_code = response.status - tea_resp.status_message = response.reason - tea_resp.response = response - except IOError as e: - raise RetryError(str(e)) - return tea_resp - - @staticmethod - def do_action( - request: TeaRequest, - runtime_option=None - ) -> TeaResponse: - url = TeaCore.compose_url(request) - - runtime_option = runtime_option or {} - - verify = not runtime_option.get('ignoreSSL', False) - tls_min_version = runtime_option.get('tlsMinVersion') - if isinstance(tls_min_version, Enum): - tls_min_version = tls_min_version.value - - if verify: - verify = runtime_option.get('ca', True) if runtime_option.get('ca', True) is not None else True - cert = runtime_option.get('cert', None) - - timeout = runtime_option.get('timeout') - connect_timeout = runtime_option.get('connectTimeout') or timeout or DEFAULT_CONNECT_TIMEOUT - read_timeout = runtime_option.get('readTimeout') or timeout or DEFAULT_READ_TIMEOUT - - timeout = (int(connect_timeout) / 1000, int(read_timeout) / 1000) - - if isinstance(request.body, str): - request.body = request.body.encode('utf-8') - - p = PreparedRequest() - p.prepare( - method=request.method.upper(), - url=url, - data=request.body, - headers=request.headers, - ) - - proxies = {} - http_proxy = runtime_option.get('httpProxy') - https_proxy = runtime_option.get('httpsProxy') - no_proxy = runtime_option.get('noProxy') - - if not http_proxy: - http_proxy = os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy') - if not https_proxy: - https_proxy = os.environ.get('HTTPS_PROXY') or os.environ.get('https_proxy') - - if http_proxy: - proxies['http'] = http_proxy - if https_proxy: - proxies['https'] = https_proxy - if no_proxy: - proxies['no_proxy'] = no_proxy - - host = request.headers.get('host') - host = host.rstrip('/') - - session_key = f'{request.protocol.lower()}://{host}:{request.port}' - session = TeaCore._get_session(session_key=session_key, protocol=request.protocol, - tls_min_version=tls_min_version, verify=verify) - try: - resp = session.send( - p, - proxies=proxies, - timeout=timeout, - verify=verify, - cert=cert, - ) - except IOError as e: - raise RetryError(str(e)) - - debug = runtime_option.get('debug') or os.getenv('DEBUG') - if debug and debug.lower() == 'sdk': - TeaCore._do_http_debug(p, resp) - - response = TeaResponse() - response.status_message = resp.reason - response.status_code = resp.status_code - response.headers = {k.lower(): v for k, v in resp.headers.items()} - response.body = resp.content - response.response = resp - return response - - @staticmethod - def get_response_body(resp) -> str: - return resp.content.decode("utf-8") - - @staticmethod - def allow_retry(dic, retry_times, now=None) -> bool: - if retry_times == 0: - return True - if dic is None or not dic.__contains__("maxAttempts") or \ - dic.get('retryable') is not True and retry_times >= 1: - return False - else: - retry = 0 if dic.get("maxAttempts") is None else int( - dic.get("maxAttempts")) - return retry >= retry_times - - @staticmethod - def get_backoff_time(dic, retry_times) -> int: - default_back_off_time = 0 - if dic is None or not dic.get("policy") or dic.get("policy") == "no": - return default_back_off_time - - back_off_time = dic.get('period', default_back_off_time) - if not isinstance(back_off_time, int) and \ - not (isinstance(back_off_time, str) and back_off_time.isdigit()): - return default_back_off_time - - back_off_time = int(back_off_time) - if back_off_time < 0: - return retry_times - - return back_off_time - - @staticmethod - async def sleep_async(t): - await asyncio.sleep(t) - - @staticmethod - def sleep(t): - time.sleep(t) - - @staticmethod - def is_retryable(ex) -> bool: - return isinstance(ex, RetryError) - - @staticmethod - def bytes_readable(body): - return body - - @staticmethod - def merge(*dic_list) -> dict: - dic_result = {} - for item in dic_list: - if isinstance(item, dict): - dic_result.update(item) - elif isinstance(item, TeaModel): - dic_result.update(item.to_map()) - return dic_result - - @staticmethod - def to_map(model: Optional[TeaModel]) -> Dict[str, Any]: - if isinstance(model, TeaModel): - return model.to_map() - else: - return dict() - - @staticmethod - def from_map( - model: TeaModel, - dic: Dict[str, Any] - ) -> TeaModel: - if isinstance(model, TeaModel): - try: - return model.from_map(dic) - except Exception: - model._map = dic - return model - else: - return model diff --git a/venv/Lib/site-packages/Tea/decorators.py b/venv/Lib/site-packages/Tea/decorators.py deleted file mode 100644 index 92cdab2..0000000 --- a/venv/Lib/site-packages/Tea/decorators.py +++ /dev/null @@ -1,30 +0,0 @@ -import warnings -import functools - - -def deprecated(reason): - """This is a decorator which can be used to mark functions as deprecated. - It will result in a warning being emitted when the function is used. - - Args: - reason (str): Explanation of why the function is deprecated. - """ - - def decorator(func): - original_func = func.__func__ if isinstance(func, staticmethod) or isinstance(func, classmethod) else func - - @functools.wraps(original_func) - def decorated_function(*args, **kwargs): - warnings.warn(f"Call to deprecated function {original_func.__name__}. {reason}", - category=DeprecationWarning, - stacklevel=2) - return original_func(*args, **kwargs) - - if isinstance(func, staticmethod): - return staticmethod(decorated_function) - elif isinstance(func, classmethod): - return classmethod(decorated_function) - else: - return decorated_function - - return decorator diff --git a/venv/Lib/site-packages/Tea/exceptions.py b/venv/Lib/site-packages/Tea/exceptions.py deleted file mode 100644 index e9bdf25..0000000 --- a/venv/Lib/site-packages/Tea/exceptions.py +++ /dev/null @@ -1,56 +0,0 @@ -from .request import TeaRequest - - -class TeaException(Exception): - def __init__(self, dic): - self.code = dic.get("code") - self.message = dic.get("message") - self.data = dic.get("data") - self.description = dic.get("description") - self.accessDeniedDetail = dic.get("accessDeniedDetail") - if isinstance(dic.get("data"), dict) and dic.get("data").get("statusCode") is not None: - self.statusCode = dic.get("data").get("statusCode") - - def __str__(self): - return f'Error: {self.code} {self.message} Response: {self.data}' - - -class ValidateException(Exception): - pass - - -class RequiredArgumentException(ValidateException): - def __init__(self, arg): - self.arg = arg - - def __str__(self): - return f'"{self.arg}" is required.' - - -class RetryError(Exception): - def __init__(self, message): - self.message = message - self.data = None - - -class UnretryableException(TeaException): - def __init__( - self, - request: TeaRequest, - ex: Exception - ): - self.last_request = request - self.inner_exception = ex - if isinstance(ex, TeaException): - super().__init__({ - 'code': ex.code, - 'message': ex.message, - 'data': ex.data - }) - else: - super().__init__({ - 'message': repr(ex), - }) - - def __str__(self): - return str(self.inner_exception) diff --git a/venv/Lib/site-packages/Tea/model.py b/venv/Lib/site-packages/Tea/model.py deleted file mode 100644 index dc058d8..0000000 --- a/venv/Lib/site-packages/Tea/model.py +++ /dev/null @@ -1,53 +0,0 @@ -import re -from .exceptions import RequiredArgumentException, ValidateException - - -class TeaModel: - _map = None - - def validate(self): - pass - - def to_map(self): - return self._map - - def from_map(self, map=None): - pass - - @staticmethod - def validate_required(prop, prop_name): - if prop is None: - raise RequiredArgumentException(prop_name) - - @staticmethod - def validate_max_length(prop, prop_name, max_length): - if len(prop) > max_length: - raise ValidateException(f'{prop_name} is exceed max-length: {max_length}') - - @staticmethod - def validate_min_length(prop, prop_name, min_length): - if len(prop) < min_length: - raise ValidateException(f'{prop_name} is less than min-length: {min_length}') - - @staticmethod - def validate_pattern(prop, prop_name, pattern): - match_obj = re.search(pattern, str(prop), re.M | re.I) - if not match_obj: - raise ValidateException(f'{prop_name} is not match: {pattern}') - - @staticmethod - def validate_maximum(num, prop_name, maximum): - if num > maximum: - raise ValidateException(f'{prop_name} is greater than the maximum: {maximum}') - - @staticmethod - def validate_minimum(num, prop_name, minimum): - if num < minimum: - raise ValidateException(f'{prop_name} is less than the minimum: {minimum}') - - def __str__(self): - s = self.to_map() - if s: - return str(s) - else: - return object.__str__(self) diff --git a/venv/Lib/site-packages/Tea/request.py b/venv/Lib/site-packages/Tea/request.py deleted file mode 100644 index b57e11d..0000000 --- a/venv/Lib/site-packages/Tea/request.py +++ /dev/null @@ -1,29 +0,0 @@ -class TeaRequest: - _PROPERTY_DEFAULT_MAP = { - 'query': {}, - 'protocol': 'http', - 'port': 80, - 'method': 'GET', - 'headers': {}, - 'pathname': "", - 'body': None, - } - - def __init__(self): - self.query = {} - self.protocol = "http" - self.port = 80 - self.method = "GET" - self.headers = {} - self.pathname = "" - self.body = None - - def __setattr__(self, key, value): - if key in self._PROPERTY_DEFAULT_MAP: - if not value: - if isinstance(self._PROPERTY_DEFAULT_MAP[key], (list, dict)): - self.__dict__[key] = self._PROPERTY_DEFAULT_MAP[key].copy() - else: - self.__dict__[key] = self._PROPERTY_DEFAULT_MAP[key] - return - self.__dict__[key] = value diff --git a/venv/Lib/site-packages/Tea/response.py b/venv/Lib/site-packages/Tea/response.py deleted file mode 100644 index 5345a46..0000000 --- a/venv/Lib/site-packages/Tea/response.py +++ /dev/null @@ -1,8 +0,0 @@ -class TeaResponse: - # status - status_code = None - # reason - status_message = None - headers = None - response = None - body = None diff --git a/venv/Lib/site-packages/Tea/stream.py b/venv/Lib/site-packages/Tea/stream.py deleted file mode 100644 index c0101b7..0000000 --- a/venv/Lib/site-packages/Tea/stream.py +++ /dev/null @@ -1,38 +0,0 @@ -class BaseStream: - def __init__(self, size=1024): - self.size = size - - def read(self, size=1024): - raise NotImplementedError('read method must be overridden') - - def __len__(self): - raise NotImplementedError('__len__ method must be overridden') - - def __next__(self): - raise NotImplementedError('__next__ method must be overridden') - - def __iter__(self): - return self - - -class _ReadableMc(type): - def __instancecheck__(self, instance): - if hasattr(instance, 'read') and hasattr(instance, '__iter__'): - return True - - -class READABLE(metaclass=_ReadableMc): - pass - - -class _WriteableMc(type): - def __instancecheck__(self, instance): - if hasattr(instance, 'write'): - return True - - -class WRITABLE(metaclass=_WriteableMc): - pass - - -STREAM_CLASS = (READABLE, WRITABLE) diff --git a/venv/Lib/site-packages/__pycache__/six.cpython-312.pyc b/venv/Lib/site-packages/__pycache__/six.cpython-312.pyc deleted file mode 100644 index 3670be1..0000000 Binary files a/venv/Lib/site-packages/__pycache__/six.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/venv/Lib/site-packages/__pycache__/typing_extensions.cpython-312.pyc deleted file mode 100644 index f08a2ce..0000000 Binary files a/venv/Lib/site-packages/__pycache__/typing_extensions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/_cffi_backend.cp312-win_amd64.pyd b/venv/Lib/site-packages/_cffi_backend.cp312-win_amd64.pyd deleted file mode 100644 index c83b3bd..0000000 Binary files a/venv/Lib/site-packages/_cffi_backend.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/INSTALLER b/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/METADATA b/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/METADATA deleted file mode 100644 index 942d74c..0000000 --- a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/METADATA +++ /dev/null @@ -1,318 +0,0 @@ -Metadata-Version: 2.3 -Name: aiofiles -Version: 24.1.0 -Summary: File support for asyncio. -Project-URL: Changelog, https://github.com/Tinche/aiofiles#history -Project-URL: Bug Tracker, https://github.com/Tinche/aiofiles/issues -Project-URL: repository, https://github.com/Tinche/aiofiles -Author-email: Tin Tvrtkovic -License: Apache-2.0 -License-File: LICENSE -License-File: NOTICE -Classifier: Development Status :: 5 - Production/Stable -Classifier: Framework :: AsyncIO -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=3.8 -Description-Content-Type: text/markdown - -# aiofiles: file support for asyncio - -[![PyPI](https://img.shields.io/pypi/v/aiofiles.svg)](https://pypi.python.org/pypi/aiofiles) -[![Build](https://github.com/Tinche/aiofiles/workflows/CI/badge.svg)](https://github.com/Tinche/aiofiles/actions) -[![Coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/Tinche/882f02e3df32136c847ba90d2688f06e/raw/covbadge.json)](https://github.com/Tinche/aiofiles/actions/workflows/main.yml) -[![Supported Python versions](https://img.shields.io/pypi/pyversions/aiofiles.svg)](https://github.com/Tinche/aiofiles) -[![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) - -**aiofiles** is an Apache2 licensed library, written in Python, for handling local -disk files in asyncio applications. - -Ordinary local file IO is blocking, and cannot easily and portably be made -asynchronous. This means doing file IO may interfere with asyncio applications, -which shouldn't block the executing thread. aiofiles helps with this by -introducing asynchronous versions of files that support delegating operations to -a separate thread pool. - -```python -async with aiofiles.open('filename', mode='r') as f: - contents = await f.read() -print(contents) -'My file contents' -``` - -Asynchronous iteration is also supported. - -```python -async with aiofiles.open('filename') as f: - async for line in f: - ... -``` - -Asynchronous interface to tempfile module. - -```python -async with aiofiles.tempfile.TemporaryFile('wb') as f: - await f.write(b'Hello, World!') -``` - -## Features - -- a file API very similar to Python's standard, blocking API -- support for buffered and unbuffered binary files, and buffered text files -- support for `async`/`await` ([PEP 492](https://peps.python.org/pep-0492/)) constructs -- async interface to tempfile module - -## Installation - -To install aiofiles, simply: - -```bash -$ pip install aiofiles -``` - -## Usage - -Files are opened using the `aiofiles.open()` coroutine, which in addition to -mirroring the builtin `open` accepts optional `loop` and `executor` -arguments. If `loop` is absent, the default loop will be used, as per the -set asyncio policy. If `executor` is not specified, the default event loop -executor will be used. - -In case of success, an asynchronous file object is returned with an -API identical to an ordinary file, except the following methods are coroutines -and delegate to an executor: - -- `close` -- `flush` -- `isatty` -- `read` -- `readall` -- `read1` -- `readinto` -- `readline` -- `readlines` -- `seek` -- `seekable` -- `tell` -- `truncate` -- `writable` -- `write` -- `writelines` - -In case of failure, one of the usual exceptions will be raised. - -`aiofiles.stdin`, `aiofiles.stdout`, `aiofiles.stderr`, -`aiofiles.stdin_bytes`, `aiofiles.stdout_bytes`, and -`aiofiles.stderr_bytes` provide async access to `sys.stdin`, -`sys.stdout`, `sys.stderr`, and their corresponding `.buffer` properties. - -The `aiofiles.os` module contains executor-enabled coroutine versions of -several useful `os` functions that deal with files: - -- `stat` -- `statvfs` -- `sendfile` -- `rename` -- `renames` -- `replace` -- `remove` -- `unlink` -- `mkdir` -- `makedirs` -- `rmdir` -- `removedirs` -- `link` -- `symlink` -- `readlink` -- `listdir` -- `scandir` -- `access` -- `getcwd` -- `path.abspath` -- `path.exists` -- `path.isfile` -- `path.isdir` -- `path.islink` -- `path.ismount` -- `path.getsize` -- `path.getatime` -- `path.getctime` -- `path.samefile` -- `path.sameopenfile` - -### Tempfile - -**aiofiles.tempfile** implements the following interfaces: - -- TemporaryFile -- NamedTemporaryFile -- SpooledTemporaryFile -- TemporaryDirectory - -Results return wrapped with a context manager allowing use with async with and async for. - -```python -async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f: - await f.write(b'Line1\n Line2') - await f.seek(0) - async for line in f: - print(line) - -async with aiofiles.tempfile.TemporaryDirectory() as d: - filename = os.path.join(d, "file.ext") -``` - -### Writing tests for aiofiles - -Real file IO can be mocked by patching `aiofiles.threadpool.sync_open` -as desired. The return type also needs to be registered with the -`aiofiles.threadpool.wrap` dispatcher: - -```python -aiofiles.threadpool.wrap.register(mock.MagicMock)( - lambda *args, **kwargs: aiofiles.threadpool.AsyncBufferedIOBase(*args, **kwargs) -) - -async def test_stuff(): - write_data = 'data' - read_file_chunks = [ - b'file chunks 1', - b'file chunks 2', - b'file chunks 3', - b'', - ] - file_chunks_iter = iter(read_file_chunks) - - mock_file_stream = mock.MagicMock( - read=lambda *args, **kwargs: next(file_chunks_iter) - ) - - with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file_stream) as mock_open: - async with aiofiles.open('filename', 'w') as f: - await f.write(write_data) - assert f.read() == b'file chunks 1' - - mock_file_stream.write.assert_called_once_with(write_data) -``` - -### History - -#### 24.1.0 (2024-06-24) - -- Import `os.link` conditionally to fix importing on android. - [#175](https://github.com/Tinche/aiofiles/issues/175) -- Remove spurious items from `aiofiles.os.__all__` when running on Windows. -- Switch to more modern async idioms: Remove types.coroutine and make AiofilesContextManager an awaitable instead a coroutine. -- Add `aiofiles.os.path.abspath` and `aiofiles.os.getcwd`. - [#174](https://github.com/Tinche/aiofiles/issues/181) -- _aiofiles_ is now tested on Python 3.13 too. - [#184](https://github.com/Tinche/aiofiles/pull/184) -- Dropped Python 3.7 support. If you require it, use version 23.2.1. - -#### 23.2.1 (2023-08-09) - -- Import `os.statvfs` conditionally to fix importing on non-UNIX systems. - [#171](https://github.com/Tinche/aiofiles/issues/171) [#172](https://github.com/Tinche/aiofiles/pull/172) -- aiofiles is now also tested on Windows. - -#### 23.2.0 (2023-08-09) - -- aiofiles is now tested on Python 3.12 too. - [#166](https://github.com/Tinche/aiofiles/issues/166) [#168](https://github.com/Tinche/aiofiles/pull/168) -- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` now accepts a `delete_on_close` argument, just like the stdlib version. -- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` no longer exposes a `delete` attribute, just like the stdlib version. -- Added `aiofiles.os.statvfs` and `aiofiles.os.path.ismount`. - [#162](https://github.com/Tinche/aiofiles/pull/162) -- Use [PDM](https://pdm.fming.dev/latest/) instead of Poetry. - [#169](https://github.com/Tinche/aiofiles/pull/169) - -#### 23.1.0 (2023-02-09) - -- Added `aiofiles.os.access`. - [#146](https://github.com/Tinche/aiofiles/pull/146) -- Removed `aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.softspace`. - [#151](https://github.com/Tinche/aiofiles/pull/151) -- Added `aiofiles.stdin`, `aiofiles.stdin_bytes`, and other stdio streams. - [#154](https://github.com/Tinche/aiofiles/pull/154) -- Transition to `asyncio.get_running_loop` (vs `asyncio.get_event_loop`) internally. - -#### 22.1.0 (2022-09-04) - -- Added `aiofiles.os.path.islink`. - [#126](https://github.com/Tinche/aiofiles/pull/126) -- Added `aiofiles.os.readlink`. - [#125](https://github.com/Tinche/aiofiles/pull/125) -- Added `aiofiles.os.symlink`. - [#124](https://github.com/Tinche/aiofiles/pull/124) -- Added `aiofiles.os.unlink`. - [#123](https://github.com/Tinche/aiofiles/pull/123) -- Added `aiofiles.os.link`. - [#121](https://github.com/Tinche/aiofiles/pull/121) -- Added `aiofiles.os.renames`. - [#120](https://github.com/Tinche/aiofiles/pull/120) -- Added `aiofiles.os.{listdir, scandir}`. - [#143](https://github.com/Tinche/aiofiles/pull/143) -- Switched to CalVer. -- Dropped Python 3.6 support. If you require it, use version 0.8.0. -- aiofiles is now tested on Python 3.11. - -#### 0.8.0 (2021-11-27) - -- aiofiles is now tested on Python 3.10. -- Added `aiofiles.os.replace`. - [#107](https://github.com/Tinche/aiofiles/pull/107) -- Added `aiofiles.os.{makedirs, removedirs}`. -- Added `aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}`. - [#63](https://github.com/Tinche/aiofiles/pull/63) -- Added `suffix`, `prefix`, `dir` args to `aiofiles.tempfile.TemporaryDirectory`. - [#116](https://github.com/Tinche/aiofiles/pull/116) - -#### 0.7.0 (2021-05-17) - -- Added the `aiofiles.tempfile` module for async temporary files. - [#56](https://github.com/Tinche/aiofiles/pull/56) -- Switched to Poetry and GitHub actions. -- Dropped 3.5 support. - -#### 0.6.0 (2020-10-27) - -- `aiofiles` is now tested on ppc64le. -- Added `name` and `mode` properties to async file objects. - [#82](https://github.com/Tinche/aiofiles/pull/82) -- Fixed a DeprecationWarning internally. - [#75](https://github.com/Tinche/aiofiles/pull/75) -- Python 3.9 support and tests. - -#### 0.5.0 (2020-04-12) - -- Python 3.8 support. Code base modernization (using `async/await` instead of `asyncio.coroutine`/`yield from`). -- Added `aiofiles.os.remove`, `aiofiles.os.rename`, `aiofiles.os.mkdir`, `aiofiles.os.rmdir`. - [#62](https://github.com/Tinche/aiofiles/pull/62) - -#### 0.4.0 (2018-08-11) - -- Python 3.7 support. -- Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x. - -#### 0.3.2 (2017-09-23) - -- The LICENSE is now included in the sdist. - [#31](https://github.com/Tinche/aiofiles/pull/31) - -#### 0.3.1 (2017-03-10) - -- Introduced a changelog. -- `aiofiles.os.sendfile` will now work if the standard `os` module contains a `sendfile` function. - -### Contributing - -Contributions are very welcome. Tests can be run with `tox`, please ensure -the coverage at least stays the same before you submit a pull request. diff --git a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/RECORD b/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/RECORD deleted file mode 100644 index 7a9df6a..0000000 --- a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/RECORD +++ /dev/null @@ -1,27 +0,0 @@ -aiofiles-24.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -aiofiles-24.1.0.dist-info/METADATA,sha256=CvUJx21XclgI1Lp5Bt_4AyJesRYg0xCSx4exJZVmaSA,10708 -aiofiles-24.1.0.dist-info/RECORD,, -aiofiles-24.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -aiofiles-24.1.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 -aiofiles-24.1.0.dist-info/licenses/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 -aiofiles-24.1.0.dist-info/licenses/NOTICE,sha256=EExY0dRQvWR0wJ2LZLwBgnM6YKw9jCU-M0zegpRSD_E,55 -aiofiles/__init__.py,sha256=1iAMJQyJtX3LGIS0AoFTJeO1aJ_RK2jpBSBhg0VoIrE,344 -aiofiles/__pycache__/__init__.cpython-312.pyc,, -aiofiles/__pycache__/base.cpython-312.pyc,, -aiofiles/__pycache__/os.cpython-312.pyc,, -aiofiles/__pycache__/ospath.cpython-312.pyc,, -aiofiles/base.py,sha256=zo0FgkCqZ5aosjvxqIvDf2t-RFg1Lc6X8P6rZ56p6fQ,1784 -aiofiles/os.py,sha256=0DrsG-eH4h7xRzglv9pIWsQuzqe7ZhVYw5FQS18fIys,1153 -aiofiles/ospath.py,sha256=WaYelz_k6ykAFRLStr4bqYIfCVQ-5GGzIqIizykbY2Q,794 -aiofiles/tempfile/__init__.py,sha256=hFSNTOjOUv371Ozdfy6FIxeln46Nm3xOVh4ZR3Q94V0,10244 -aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc,, -aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc,, -aiofiles/tempfile/temptypes.py,sha256=ddEvNjMLVlr7WUILCe6ypTqw77yREeIonTk16Uw_NVs,2093 -aiofiles/threadpool/__init__.py,sha256=kt0hwwx3bLiYtnA1SORhW8mJ6z4W9Xr7MbY80UIJJrI,3133 -aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc,, -aiofiles/threadpool/__pycache__/binary.cpython-312.pyc,, -aiofiles/threadpool/__pycache__/text.cpython-312.pyc,, -aiofiles/threadpool/__pycache__/utils.cpython-312.pyc,, -aiofiles/threadpool/binary.py,sha256=hp-km9VCRu0MLz_wAEUfbCz7OL7xtn9iGAawabpnp5U,2315 -aiofiles/threadpool/text.py,sha256=fNmpw2PEkj0BZSldipJXAgZqVGLxALcfOMiuDQ54Eas,1223 -aiofiles/threadpool/utils.py,sha256=B59dSZwO_WZs2dFFycKeA91iD2Xq2nNw1EFF8YMBI5k,1868 diff --git a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/REQUESTED b/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/WHEEL b/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/WHEEL deleted file mode 100644 index cdd68a4..0000000 --- a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.25.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/licenses/LICENSE b/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/licenses/LICENSE deleted file mode 100644 index e06d208..0000000 --- a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/licenses/NOTICE b/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/licenses/NOTICE deleted file mode 100644 index d134f28..0000000 --- a/venv/Lib/site-packages/aiofiles-24.1.0.dist-info/licenses/NOTICE +++ /dev/null @@ -1,2 +0,0 @@ -Asyncio support for files -Copyright 2016 Tin Tvrtkovic diff --git a/venv/Lib/site-packages/aiofiles/__init__.py b/venv/Lib/site-packages/aiofiles/__init__.py deleted file mode 100644 index 9e75111..0000000 --- a/venv/Lib/site-packages/aiofiles/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Utilities for asyncio-friendly file handling.""" -from .threadpool import ( - open, - stdin, - stdout, - stderr, - stdin_bytes, - stdout_bytes, - stderr_bytes, -) -from . import tempfile - -__all__ = [ - "open", - "tempfile", - "stdin", - "stdout", - "stderr", - "stdin_bytes", - "stdout_bytes", - "stderr_bytes", -] diff --git a/venv/Lib/site-packages/aiofiles/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 163e8b4..0000000 Binary files a/venv/Lib/site-packages/aiofiles/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 372eab0..0000000 Binary files a/venv/Lib/site-packages/aiofiles/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/__pycache__/os.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/__pycache__/os.cpython-312.pyc deleted file mode 100644 index f0a8d6e..0000000 Binary files a/venv/Lib/site-packages/aiofiles/__pycache__/os.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/__pycache__/ospath.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/__pycache__/ospath.cpython-312.pyc deleted file mode 100644 index a51afdd..0000000 Binary files a/venv/Lib/site-packages/aiofiles/__pycache__/ospath.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/base.py b/venv/Lib/site-packages/aiofiles/base.py deleted file mode 100644 index 64f7d6b..0000000 --- a/venv/Lib/site-packages/aiofiles/base.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Various base classes.""" -from collections.abc import Awaitable -from contextlib import AbstractAsyncContextManager -from asyncio import get_running_loop - - -class AsyncBase: - def __init__(self, file, loop, executor): - self._file = file - self._executor = executor - self._ref_loop = loop - - @property - def _loop(self): - return self._ref_loop or get_running_loop() - - def __aiter__(self): - """We are our own iterator.""" - return self - - def __repr__(self): - return super().__repr__() + " wrapping " + repr(self._file) - - async def __anext__(self): - """Simulate normal file iteration.""" - line = await self.readline() - if line: - return line - else: - raise StopAsyncIteration - - -class AsyncIndirectBase(AsyncBase): - def __init__(self, name, loop, executor, indirect): - self._indirect = indirect - self._name = name - super().__init__(None, loop, executor) - - @property - def _file(self): - return self._indirect() - - @_file.setter - def _file(self, v): - pass # discard writes - - -class AiofilesContextManager(Awaitable, AbstractAsyncContextManager): - """An adjusted async context manager for aiofiles.""" - - __slots__ = ("_coro", "_obj") - - def __init__(self, coro): - self._coro = coro - self._obj = None - - def __await__(self): - if self._obj is None: - self._obj = yield from self._coro.__await__() - return self._obj - - async def __aenter__(self): - return await self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - await get_running_loop().run_in_executor( - None, self._obj._file.__exit__, exc_type, exc_val, exc_tb - ) - self._obj = None diff --git a/venv/Lib/site-packages/aiofiles/os.py b/venv/Lib/site-packages/aiofiles/os.py deleted file mode 100644 index 92243fa..0000000 --- a/venv/Lib/site-packages/aiofiles/os.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Async executor versions of file functions from the os module.""" - -import os - -from . import ospath as path -from .ospath import wrap - -__all__ = [ - "path", - "stat", - "rename", - "renames", - "replace", - "remove", - "unlink", - "mkdir", - "makedirs", - "rmdir", - "removedirs", - "symlink", - "readlink", - "listdir", - "scandir", - "access", - "wrap", - "getcwd", -] -if hasattr(os, "link"): - __all__ += ["link"] -if hasattr(os, "sendfile"): - __all__ += ["sendfile"] -if hasattr(os, "statvfs"): - __all__ += ["statvfs"] - - -stat = wrap(os.stat) -rename = wrap(os.rename) -renames = wrap(os.renames) -replace = wrap(os.replace) -remove = wrap(os.remove) -unlink = wrap(os.unlink) -mkdir = wrap(os.mkdir) -makedirs = wrap(os.makedirs) -rmdir = wrap(os.rmdir) -removedirs = wrap(os.removedirs) -symlink = wrap(os.symlink) -readlink = wrap(os.readlink) -listdir = wrap(os.listdir) -scandir = wrap(os.scandir) -access = wrap(os.access) -getcwd = wrap(os.getcwd) - -if hasattr(os, "link"): - link = wrap(os.link) -if hasattr(os, "sendfile"): - sendfile = wrap(os.sendfile) -if hasattr(os, "statvfs"): - statvfs = wrap(os.statvfs) diff --git a/venv/Lib/site-packages/aiofiles/ospath.py b/venv/Lib/site-packages/aiofiles/ospath.py deleted file mode 100644 index 387d68d..0000000 --- a/venv/Lib/site-packages/aiofiles/ospath.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Async executor versions of file functions from the os.path module.""" - -import asyncio -from functools import partial, wraps -from os import path - - -def wrap(func): - @wraps(func) - async def run(*args, loop=None, executor=None, **kwargs): - if loop is None: - loop = asyncio.get_running_loop() - pfunc = partial(func, *args, **kwargs) - return await loop.run_in_executor(executor, pfunc) - - return run - - -exists = wrap(path.exists) -isfile = wrap(path.isfile) -isdir = wrap(path.isdir) -islink = wrap(path.islink) -ismount = wrap(path.ismount) -getsize = wrap(path.getsize) -getmtime = wrap(path.getmtime) -getatime = wrap(path.getatime) -getctime = wrap(path.getctime) -samefile = wrap(path.samefile) -sameopenfile = wrap(path.sameopenfile) -abspath = wrap(path.abspath) diff --git a/venv/Lib/site-packages/aiofiles/tempfile/__init__.py b/venv/Lib/site-packages/aiofiles/tempfile/__init__.py deleted file mode 100644 index 75d10b6..0000000 --- a/venv/Lib/site-packages/aiofiles/tempfile/__init__.py +++ /dev/null @@ -1,357 +0,0 @@ -import asyncio -from functools import partial, singledispatch -from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOBase -from tempfile import NamedTemporaryFile as syncNamedTemporaryFile -from tempfile import SpooledTemporaryFile as syncSpooledTemporaryFile -from tempfile import TemporaryDirectory as syncTemporaryDirectory -from tempfile import TemporaryFile as syncTemporaryFile -from tempfile import _TemporaryFileWrapper as syncTemporaryFileWrapper - -from ..base import AiofilesContextManager -from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO -from ..threadpool.text import AsyncTextIOWrapper -from .temptypes import AsyncSpooledTemporaryFile, AsyncTemporaryDirectory -import sys - -__all__ = [ - "NamedTemporaryFile", - "TemporaryFile", - "SpooledTemporaryFile", - "TemporaryDirectory", -] - - -# ================================================================ -# Public methods for async open and return of temp file/directory -# objects with async interface -# ================================================================ -if sys.version_info >= (3, 12): - - def NamedTemporaryFile( - mode="w+b", - buffering=-1, - encoding=None, - newline=None, - suffix=None, - prefix=None, - dir=None, - delete=True, - delete_on_close=True, - loop=None, - executor=None, - ): - """Async open a named temporary file""" - return AiofilesContextManager( - _temporary_file( - named=True, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - delete=delete, - delete_on_close=delete_on_close, - loop=loop, - executor=executor, - ) - ) - -else: - - def NamedTemporaryFile( - mode="w+b", - buffering=-1, - encoding=None, - newline=None, - suffix=None, - prefix=None, - dir=None, - delete=True, - loop=None, - executor=None, - ): - """Async open a named temporary file""" - return AiofilesContextManager( - _temporary_file( - named=True, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - delete=delete, - loop=loop, - executor=executor, - ) - ) - - -def TemporaryFile( - mode="w+b", - buffering=-1, - encoding=None, - newline=None, - suffix=None, - prefix=None, - dir=None, - loop=None, - executor=None, -): - """Async open an unnamed temporary file""" - return AiofilesContextManager( - _temporary_file( - named=False, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - loop=loop, - executor=executor, - ) - ) - - -def SpooledTemporaryFile( - max_size=0, - mode="w+b", - buffering=-1, - encoding=None, - newline=None, - suffix=None, - prefix=None, - dir=None, - loop=None, - executor=None, -): - """Async open a spooled temporary file""" - return AiofilesContextManager( - _spooled_temporary_file( - max_size=max_size, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - loop=loop, - executor=executor, - ) - ) - - -def TemporaryDirectory(suffix=None, prefix=None, dir=None, loop=None, executor=None): - """Async open a temporary directory""" - return AiofilesContextManagerTempDir( - _temporary_directory( - suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor - ) - ) - - -# ========================================================= -# Internal coroutines to open new temp files/directories -# ========================================================= -if sys.version_info >= (3, 12): - - async def _temporary_file( - named=True, - mode="w+b", - buffering=-1, - encoding=None, - newline=None, - suffix=None, - prefix=None, - dir=None, - delete=True, - delete_on_close=True, - loop=None, - executor=None, - max_size=0, - ): - """Async method to open a temporary file with async interface""" - if loop is None: - loop = asyncio.get_running_loop() - - if named: - cb = partial( - syncNamedTemporaryFile, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - delete=delete, - delete_on_close=delete_on_close, - ) - else: - cb = partial( - syncTemporaryFile, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - ) - - f = await loop.run_in_executor(executor, cb) - - # Wrap based on type of underlying IO object - if type(f) is syncTemporaryFileWrapper: - # _TemporaryFileWrapper was used (named files) - result = wrap(f.file, f, loop=loop, executor=executor) - result._closer = f._closer - return result - else: - # IO object was returned directly without wrapper - return wrap(f, f, loop=loop, executor=executor) - -else: - - async def _temporary_file( - named=True, - mode="w+b", - buffering=-1, - encoding=None, - newline=None, - suffix=None, - prefix=None, - dir=None, - delete=True, - loop=None, - executor=None, - max_size=0, - ): - """Async method to open a temporary file with async interface""" - if loop is None: - loop = asyncio.get_running_loop() - - if named: - cb = partial( - syncNamedTemporaryFile, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - delete=delete, - ) - else: - cb = partial( - syncTemporaryFile, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - ) - - f = await loop.run_in_executor(executor, cb) - - # Wrap based on type of underlying IO object - if type(f) is syncTemporaryFileWrapper: - # _TemporaryFileWrapper was used (named files) - result = wrap(f.file, f, loop=loop, executor=executor) - # add delete property - result.delete = f.delete - return result - else: - # IO object was returned directly without wrapper - return wrap(f, f, loop=loop, executor=executor) - - -async def _spooled_temporary_file( - max_size=0, - mode="w+b", - buffering=-1, - encoding=None, - newline=None, - suffix=None, - prefix=None, - dir=None, - loop=None, - executor=None, -): - """Open a spooled temporary file with async interface""" - if loop is None: - loop = asyncio.get_running_loop() - - cb = partial( - syncSpooledTemporaryFile, - max_size=max_size, - mode=mode, - buffering=buffering, - encoding=encoding, - newline=newline, - suffix=suffix, - prefix=prefix, - dir=dir, - ) - - f = await loop.run_in_executor(executor, cb) - - # Single interface provided by SpooledTemporaryFile for all modes - return AsyncSpooledTemporaryFile(f, loop=loop, executor=executor) - - -async def _temporary_directory( - suffix=None, prefix=None, dir=None, loop=None, executor=None -): - """Async method to open a temporary directory with async interface""" - if loop is None: - loop = asyncio.get_running_loop() - - cb = partial(syncTemporaryDirectory, suffix, prefix, dir) - f = await loop.run_in_executor(executor, cb) - - return AsyncTemporaryDirectory(f, loop=loop, executor=executor) - - -class AiofilesContextManagerTempDir(AiofilesContextManager): - """With returns the directory location, not the object (matching sync lib)""" - - async def __aenter__(self): - self._obj = await self._coro - return self._obj.name - - -@singledispatch -def wrap(base_io_obj, file, *, loop=None, executor=None): - """Wrap the object with interface based on type of underlying IO""" - raise TypeError("Unsupported IO type: {}".format(base_io_obj)) - - -@wrap.register(TextIOBase) -def _(base_io_obj, file, *, loop=None, executor=None): - return AsyncTextIOWrapper(file, loop=loop, executor=executor) - - -@wrap.register(BufferedWriter) -def _(base_io_obj, file, *, loop=None, executor=None): - return AsyncBufferedIOBase(file, loop=loop, executor=executor) - - -@wrap.register(BufferedReader) -@wrap.register(BufferedRandom) -def _(base_io_obj, file, *, loop=None, executor=None): - return AsyncBufferedReader(file, loop=loop, executor=executor) - - -@wrap.register(FileIO) -def _(base_io_obj, file, *, loop=None, executor=None): - return AsyncFileIO(file, loop=loop, executor=executor) diff --git a/venv/Lib/site-packages/aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 57c165a..0000000 Binary files a/venv/Lib/site-packages/aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc deleted file mode 100644 index 4dfe80b..0000000 Binary files a/venv/Lib/site-packages/aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/tempfile/temptypes.py b/venv/Lib/site-packages/aiofiles/tempfile/temptypes.py deleted file mode 100644 index dccee6c..0000000 --- a/venv/Lib/site-packages/aiofiles/tempfile/temptypes.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Async wrappers for spooled temp files and temp directory objects""" -from functools import partial - -from ..base import AsyncBase -from ..threadpool.utils import ( - cond_delegate_to_executor, - delegate_to_executor, - proxy_property_directly, -) - - -@delegate_to_executor("fileno", "rollover") -@cond_delegate_to_executor( - "close", - "flush", - "isatty", - "read", - "readline", - "readlines", - "seek", - "tell", - "truncate", -) -@proxy_property_directly("closed", "encoding", "mode", "name", "newlines") -class AsyncSpooledTemporaryFile(AsyncBase): - """Async wrapper for SpooledTemporaryFile class""" - - async def _check(self): - if self._file._rolled: - return - max_size = self._file._max_size - if max_size and self._file.tell() > max_size: - await self.rollover() - - async def write(self, s): - """Implementation to anticipate rollover""" - if self._file._rolled: - cb = partial(self._file.write, s) - return await self._loop.run_in_executor(self._executor, cb) - else: - file = self._file._file # reference underlying base IO object - rv = file.write(s) - await self._check() - return rv - - async def writelines(self, iterable): - """Implementation to anticipate rollover""" - if self._file._rolled: - cb = partial(self._file.writelines, iterable) - return await self._loop.run_in_executor(self._executor, cb) - else: - file = self._file._file # reference underlying base IO object - rv = file.writelines(iterable) - await self._check() - return rv - - -@delegate_to_executor("cleanup") -@proxy_property_directly("name") -class AsyncTemporaryDirectory: - """Async wrapper for TemporaryDirectory class""" - - def __init__(self, file, loop, executor): - self._file = file - self._loop = loop - self._executor = executor - - async def close(self): - await self.cleanup() diff --git a/venv/Lib/site-packages/aiofiles/threadpool/__init__.py b/venv/Lib/site-packages/aiofiles/threadpool/__init__.py deleted file mode 100644 index e543283..0000000 --- a/venv/Lib/site-packages/aiofiles/threadpool/__init__.py +++ /dev/null @@ -1,139 +0,0 @@ -"""Handle files using a thread pool executor.""" -import asyncio -import sys -from functools import partial, singledispatch -from io import ( - BufferedIOBase, - BufferedRandom, - BufferedReader, - BufferedWriter, - FileIO, - TextIOBase, -) - -from ..base import AiofilesContextManager -from .binary import ( - AsyncBufferedIOBase, - AsyncBufferedReader, - AsyncFileIO, - AsyncIndirectBufferedIOBase, -) -from .text import AsyncTextIndirectIOWrapper, AsyncTextIOWrapper - -sync_open = open - -__all__ = ( - "open", - "stdin", - "stdout", - "stderr", - "stdin_bytes", - "stdout_bytes", - "stderr_bytes", -) - - -def open( - file, - mode="r", - buffering=-1, - encoding=None, - errors=None, - newline=None, - closefd=True, - opener=None, - *, - loop=None, - executor=None, -): - return AiofilesContextManager( - _open( - file, - mode=mode, - buffering=buffering, - encoding=encoding, - errors=errors, - newline=newline, - closefd=closefd, - opener=opener, - loop=loop, - executor=executor, - ) - ) - - -async def _open( - file, - mode="r", - buffering=-1, - encoding=None, - errors=None, - newline=None, - closefd=True, - opener=None, - *, - loop=None, - executor=None, -): - """Open an asyncio file.""" - if loop is None: - loop = asyncio.get_running_loop() - cb = partial( - sync_open, - file, - mode=mode, - buffering=buffering, - encoding=encoding, - errors=errors, - newline=newline, - closefd=closefd, - opener=opener, - ) - f = await loop.run_in_executor(executor, cb) - - return wrap(f, loop=loop, executor=executor) - - -@singledispatch -def wrap(file, *, loop=None, executor=None): - raise TypeError("Unsupported io type: {}.".format(file)) - - -@wrap.register(TextIOBase) -def _(file, *, loop=None, executor=None): - return AsyncTextIOWrapper(file, loop=loop, executor=executor) - - -@wrap.register(BufferedWriter) -@wrap.register(BufferedIOBase) -def _(file, *, loop=None, executor=None): - return AsyncBufferedIOBase(file, loop=loop, executor=executor) - - -@wrap.register(BufferedReader) -@wrap.register(BufferedRandom) -def _(file, *, loop=None, executor=None): - return AsyncBufferedReader(file, loop=loop, executor=executor) - - -@wrap.register(FileIO) -def _(file, *, loop=None, executor=None): - return AsyncFileIO(file, loop=loop, executor=executor) - - -stdin = AsyncTextIndirectIOWrapper("sys.stdin", None, None, indirect=lambda: sys.stdin) -stdout = AsyncTextIndirectIOWrapper( - "sys.stdout", None, None, indirect=lambda: sys.stdout -) -stderr = AsyncTextIndirectIOWrapper( - "sys.stderr", None, None, indirect=lambda: sys.stderr -) -stdin_bytes = AsyncIndirectBufferedIOBase( - "sys.stdin.buffer", None, None, indirect=lambda: sys.stdin.buffer -) -stdout_bytes = AsyncIndirectBufferedIOBase( - "sys.stdout.buffer", None, None, indirect=lambda: sys.stdout.buffer -) -stderr_bytes = AsyncIndirectBufferedIOBase( - "sys.stderr.buffer", None, None, indirect=lambda: sys.stderr.buffer -) diff --git a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 6ea661d..0000000 Binary files a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/binary.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/binary.cpython-312.pyc deleted file mode 100644 index 19d524d..0000000 Binary files a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/binary.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/text.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/text.cpython-312.pyc deleted file mode 100644 index 4d5e471..0000000 Binary files a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/text.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 38e5d0b..0000000 Binary files a/venv/Lib/site-packages/aiofiles/threadpool/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiofiles/threadpool/binary.py b/venv/Lib/site-packages/aiofiles/threadpool/binary.py deleted file mode 100644 index 63fcaff..0000000 --- a/venv/Lib/site-packages/aiofiles/threadpool/binary.py +++ /dev/null @@ -1,104 +0,0 @@ -from ..base import AsyncBase, AsyncIndirectBase -from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly - - -@delegate_to_executor( - "close", - "flush", - "isatty", - "read", - "read1", - "readinto", - "readline", - "readlines", - "seek", - "seekable", - "tell", - "truncate", - "writable", - "write", - "writelines", -) -@proxy_method_directly("detach", "fileno", "readable") -@proxy_property_directly("closed", "raw", "name", "mode") -class AsyncBufferedIOBase(AsyncBase): - """The asyncio executor version of io.BufferedWriter and BufferedIOBase.""" - - -@delegate_to_executor("peek") -class AsyncBufferedReader(AsyncBufferedIOBase): - """The asyncio executor version of io.BufferedReader and Random.""" - - -@delegate_to_executor( - "close", - "flush", - "isatty", - "read", - "readall", - "readinto", - "readline", - "readlines", - "seek", - "seekable", - "tell", - "truncate", - "writable", - "write", - "writelines", -) -@proxy_method_directly("fileno", "readable") -@proxy_property_directly("closed", "name", "mode") -class AsyncFileIO(AsyncBase): - """The asyncio executor version of io.FileIO.""" - - -@delegate_to_executor( - "close", - "flush", - "isatty", - "read", - "read1", - "readinto", - "readline", - "readlines", - "seek", - "seekable", - "tell", - "truncate", - "writable", - "write", - "writelines", -) -@proxy_method_directly("detach", "fileno", "readable") -@proxy_property_directly("closed", "raw", "name", "mode") -class AsyncIndirectBufferedIOBase(AsyncIndirectBase): - """The indirect asyncio executor version of io.BufferedWriter and BufferedIOBase.""" - - -@delegate_to_executor("peek") -class AsyncIndirectBufferedReader(AsyncIndirectBufferedIOBase): - """The indirect asyncio executor version of io.BufferedReader and Random.""" - - -@delegate_to_executor( - "close", - "flush", - "isatty", - "read", - "readall", - "readinto", - "readline", - "readlines", - "seek", - "seekable", - "tell", - "truncate", - "writable", - "write", - "writelines", -) -@proxy_method_directly("fileno", "readable") -@proxy_property_directly("closed", "name", "mode") -class AsyncIndirectFileIO(AsyncIndirectBase): - """The indirect asyncio executor version of io.FileIO.""" diff --git a/venv/Lib/site-packages/aiofiles/threadpool/text.py b/venv/Lib/site-packages/aiofiles/threadpool/text.py deleted file mode 100644 index 0e62590..0000000 --- a/venv/Lib/site-packages/aiofiles/threadpool/text.py +++ /dev/null @@ -1,64 +0,0 @@ -from ..base import AsyncBase, AsyncIndirectBase -from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly - - -@delegate_to_executor( - "close", - "flush", - "isatty", - "read", - "readable", - "readline", - "readlines", - "seek", - "seekable", - "tell", - "truncate", - "write", - "writable", - "writelines", -) -@proxy_method_directly("detach", "fileno", "readable") -@proxy_property_directly( - "buffer", - "closed", - "encoding", - "errors", - "line_buffering", - "newlines", - "name", - "mode", -) -class AsyncTextIOWrapper(AsyncBase): - """The asyncio executor version of io.TextIOWrapper.""" - - -@delegate_to_executor( - "close", - "flush", - "isatty", - "read", - "readable", - "readline", - "readlines", - "seek", - "seekable", - "tell", - "truncate", - "write", - "writable", - "writelines", -) -@proxy_method_directly("detach", "fileno", "readable") -@proxy_property_directly( - "buffer", - "closed", - "encoding", - "errors", - "line_buffering", - "newlines", - "name", - "mode", -) -class AsyncTextIndirectIOWrapper(AsyncIndirectBase): - """The indirect asyncio executor version of io.TextIOWrapper.""" diff --git a/venv/Lib/site-packages/aiofiles/threadpool/utils.py b/venv/Lib/site-packages/aiofiles/threadpool/utils.py deleted file mode 100644 index 5fd3bb9..0000000 --- a/venv/Lib/site-packages/aiofiles/threadpool/utils.py +++ /dev/null @@ -1,72 +0,0 @@ -import functools - - -def delegate_to_executor(*attrs): - def cls_builder(cls): - for attr_name in attrs: - setattr(cls, attr_name, _make_delegate_method(attr_name)) - return cls - - return cls_builder - - -def proxy_method_directly(*attrs): - def cls_builder(cls): - for attr_name in attrs: - setattr(cls, attr_name, _make_proxy_method(attr_name)) - return cls - - return cls_builder - - -def proxy_property_directly(*attrs): - def cls_builder(cls): - for attr_name in attrs: - setattr(cls, attr_name, _make_proxy_property(attr_name)) - return cls - - return cls_builder - - -def cond_delegate_to_executor(*attrs): - def cls_builder(cls): - for attr_name in attrs: - setattr(cls, attr_name, _make_cond_delegate_method(attr_name)) - return cls - - return cls_builder - - -def _make_delegate_method(attr_name): - async def method(self, *args, **kwargs): - cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) - return await self._loop.run_in_executor(self._executor, cb) - - return method - - -def _make_proxy_method(attr_name): - def method(self, *args, **kwargs): - return getattr(self._file, attr_name)(*args, **kwargs) - - return method - - -def _make_proxy_property(attr_name): - def proxy_property(self): - return getattr(self._file, attr_name) - - return property(proxy_property) - - -def _make_cond_delegate_method(attr_name): - """For spooled temp files, delegate only if rolled to file object""" - - async def method(self, *args, **kwargs): - if self._file._rolled: - cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) - return await self._loop.run_in_executor(self._executor, cb) - else: - return getattr(self._file, attr_name)(*args, **kwargs) - - return method diff --git a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER b/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE b/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE deleted file mode 100644 index f26bcf4..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE +++ /dev/null @@ -1,279 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations, which became -Zope Corporation. In 2001, the Python Software Foundation (PSF, see -https://www.python.org/psf/) was formed, a non-profit organization -created specifically to own Python-related Intellectual Property. -Zope Corporation was a sponsoring member of the PSF. - -All Python releases are Open Source (see https://opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -Python software and documentation are licensed under the -Python Software Foundation License Version 2. - -Starting with Python 3.8.6, examples, recipes, and other code in -the documentation are dual licensed under the PSF License Version 2 -and the Zero-Clause BSD license. - -Some software incorporated into Python is under different licenses. -The licenses are listed with code falling under that license. - - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; -All Rights Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION ----------------------------------------------------------------------- - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA b/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA deleted file mode 100644 index c632040..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA +++ /dev/null @@ -1,123 +0,0 @@ -Metadata-Version: 2.3 -Name: aiohappyeyeballs -Version: 2.6.1 -Summary: Happy Eyeballs for asyncio -License: PSF-2.0 -Author: J. Nick Koston -Author-email: nick@koston.org -Requires-Python: >=3.9 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: Operating System :: OS Independent -Classifier: Topic :: Software Development :: Libraries -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: License :: OSI Approved :: Python Software Foundation License -Project-URL: Bug Tracker, https://github.com/aio-libs/aiohappyeyeballs/issues -Project-URL: Changelog, https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md -Project-URL: Documentation, https://aiohappyeyeballs.readthedocs.io -Project-URL: Repository, https://github.com/aio-libs/aiohappyeyeballs -Description-Content-Type: text/markdown - -# aiohappyeyeballs - -

- - CI Status - - - Documentation Status - - - Test coverage percentage - -

-

- - Poetry - - - Ruff - - - pre-commit - -

-

- - PyPI Version - - Supported Python versions - License -

- ---- - -**Documentation**: https://aiohappyeyeballs.readthedocs.io - -**Source Code**: https://github.com/aio-libs/aiohappyeyeballs - ---- - -[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs) -([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html)) - -## Use case - -This library exists to allow connecting with -[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs) -([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html)) -when you -already have a list of addrinfo and not a DNS name. - -The stdlib version of `loop.create_connection()` -will only work when you pass in an unresolved name which -is not a good fit when using DNS caching or resolving -names via another method such as `zeroconf`. - -## Installation - -Install this via pip (or your favourite package manager): - -`pip install aiohappyeyeballs` - -## License - -[aiohappyeyeballs is licensed under the same terms as cpython itself.](https://github.com/python/cpython/blob/main/LICENSE) - -## Example usage - -```python - -addr_infos = await loop.getaddrinfo("example.org", 80) - -socket = await start_connection(addr_infos) -socket = await start_connection(addr_infos, local_addr_infos=local_addr_infos, happy_eyeballs_delay=0.2) - -transport, protocol = await loop.create_connection( - MyProtocol, sock=socket, ...) - -# Remove the first address for each family from addr_info -pop_addr_infos_interleave(addr_info, 1) - -# Remove all matching address from addr_info -remove_addr_infos(addr_info, "dead::beef::") - -# Convert a local_addr to local_addr_infos -local_addr_infos = addr_to_addr_infos(("127.0.0.1",0)) -``` - -## Credits - -This package contains code from cpython and is licensed under the same terms as cpython itself. - -This package was created with -[Copier](https://copier.readthedocs.io/) and the -[browniebroke/pypackage-template](https://github.com/browniebroke/pypackage-template) -project template. - diff --git a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD b/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD deleted file mode 100644 index 615f098..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD +++ /dev/null @@ -1,17 +0,0 @@ -aiohappyeyeballs-2.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -aiohappyeyeballs-2.6.1.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 -aiohappyeyeballs-2.6.1.dist-info/METADATA,sha256=NSXlhJwAfi380eEjAo7BQ4P_TVal9xi0qkyZWibMsVM,5915 -aiohappyeyeballs-2.6.1.dist-info/RECORD,, -aiohappyeyeballs-2.6.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -aiohappyeyeballs-2.6.1.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88 -aiohappyeyeballs/__init__.py,sha256=x7kktHEtaD9quBcWDJPuLeKyjuVAI-Jj14S9B_5hcTs,361 -aiohappyeyeballs/__pycache__/__init__.cpython-312.pyc,, -aiohappyeyeballs/__pycache__/_staggered.cpython-312.pyc,, -aiohappyeyeballs/__pycache__/impl.cpython-312.pyc,, -aiohappyeyeballs/__pycache__/types.cpython-312.pyc,, -aiohappyeyeballs/__pycache__/utils.cpython-312.pyc,, -aiohappyeyeballs/_staggered.py,sha256=edfVowFx-P-ywJjIEF3MdPtEMVODujV6CeMYr65otac,6900 -aiohappyeyeballs/impl.py,sha256=Dlcm2mTJ28ucrGnxkb_fo9CZzLAkOOBizOt7dreBbXE,9681 -aiohappyeyeballs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -aiohappyeyeballs/types.py,sha256=YZJIAnyoV4Dz0WFtlaf_OyE4EW7Xus1z7aIfNI6tDDQ,425 -aiohappyeyeballs/utils.py,sha256=on9GxIR0LhEfZu8P6Twi9hepX9zDanuZM20MWsb3xlQ,3028 diff --git a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/REQUESTED b/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL b/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL deleted file mode 100644 index 0582547..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: poetry-core 2.1.1 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/aiohappyeyeballs/__init__.py b/venv/Lib/site-packages/aiohappyeyeballs/__init__.py deleted file mode 100644 index 71c689c..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -__version__ = "2.6.1" - -from .impl import start_connection -from .types import AddrInfoType, SocketFactoryType -from .utils import addr_to_addr_infos, pop_addr_infos_interleave, remove_addr_infos - -__all__ = ( - "AddrInfoType", - "SocketFactoryType", - "addr_to_addr_infos", - "pop_addr_infos_interleave", - "remove_addr_infos", - "start_connection", -) diff --git a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d6e88ff..0000000 Binary files a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/_staggered.cpython-312.pyc b/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/_staggered.cpython-312.pyc deleted file mode 100644 index 609c2fb..0000000 Binary files a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/_staggered.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/impl.cpython-312.pyc b/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/impl.cpython-312.pyc deleted file mode 100644 index 3e4adc0..0000000 Binary files a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/impl.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/types.cpython-312.pyc b/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/types.cpython-312.pyc deleted file mode 100644 index bcb017e..0000000 Binary files a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/types.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 160d500..0000000 Binary files a/venv/Lib/site-packages/aiohappyeyeballs/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohappyeyeballs/_staggered.py b/venv/Lib/site-packages/aiohappyeyeballs/_staggered.py deleted file mode 100644 index 9a4ba72..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs/_staggered.py +++ /dev/null @@ -1,207 +0,0 @@ -import asyncio -import contextlib - -# PY3.9: Import Callable from typing until we drop Python 3.9 support -# https://github.com/python/cpython/issues/87131 -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Iterable, - List, - Optional, - Set, - Tuple, - TypeVar, - Union, -) - -_T = TypeVar("_T") - -RE_RAISE_EXCEPTIONS = (SystemExit, KeyboardInterrupt) - - -def _set_result(wait_next: "asyncio.Future[None]") -> None: - """Set the result of a future if it is not already done.""" - if not wait_next.done(): - wait_next.set_result(None) - - -async def _wait_one( - futures: "Iterable[asyncio.Future[Any]]", - loop: asyncio.AbstractEventLoop, -) -> _T: - """Wait for the first future to complete.""" - wait_next = loop.create_future() - - def _on_completion(fut: "asyncio.Future[Any]") -> None: - if not wait_next.done(): - wait_next.set_result(fut) - - for f in futures: - f.add_done_callback(_on_completion) - - try: - return await wait_next - finally: - for f in futures: - f.remove_done_callback(_on_completion) - - -async def staggered_race( - coro_fns: Iterable[Callable[[], Awaitable[_T]]], - delay: Optional[float], - *, - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> Tuple[Optional[_T], Optional[int], List[Optional[BaseException]]]: - """ - Run coroutines with staggered start times and take the first to finish. - - This method takes an iterable of coroutine functions. The first one is - started immediately. From then on, whenever the immediately preceding one - fails (raises an exception), or when *delay* seconds has passed, the next - coroutine is started. This continues until one of the coroutines complete - successfully, in which case all others are cancelled, or until all - coroutines fail. - - The coroutines provided should be well-behaved in the following way: - - * They should only ``return`` if completed successfully. - - * They should always raise an exception if they did not complete - successfully. In particular, if they handle cancellation, they should - probably reraise, like this:: - - try: - # do work - except asyncio.CancelledError: - # undo partially completed work - raise - - Args: - ---- - coro_fns: an iterable of coroutine functions, i.e. callables that - return a coroutine object when called. Use ``functools.partial`` or - lambdas to pass arguments. - - delay: amount of time, in seconds, between starting coroutines. If - ``None``, the coroutines will run sequentially. - - loop: the event loop to use. If ``None``, the running loop is used. - - Returns: - ------- - tuple *(winner_result, winner_index, exceptions)* where - - - *winner_result*: the result of the winning coroutine, or ``None`` - if no coroutines won. - - - *winner_index*: the index of the winning coroutine in - ``coro_fns``, or ``None`` if no coroutines won. If the winning - coroutine may return None on success, *winner_index* can be used - to definitively determine whether any coroutine won. - - - *exceptions*: list of exceptions returned by the coroutines. - ``len(exceptions)`` is equal to the number of coroutines actually - started, and the order is the same as in ``coro_fns``. The winning - coroutine's entry is ``None``. - - """ - loop = loop or asyncio.get_running_loop() - exceptions: List[Optional[BaseException]] = [] - tasks: Set[asyncio.Task[Optional[Tuple[_T, int]]]] = set() - - async def run_one_coro( - coro_fn: Callable[[], Awaitable[_T]], - this_index: int, - start_next: "asyncio.Future[None]", - ) -> Optional[Tuple[_T, int]]: - """ - Run a single coroutine. - - If the coroutine fails, set the exception in the exceptions list and - start the next coroutine by setting the result of the start_next. - - If the coroutine succeeds, return the result and the index of the - coroutine in the coro_fns list. - - If SystemExit or KeyboardInterrupt is raised, re-raise it. - """ - try: - result = await coro_fn() - except RE_RAISE_EXCEPTIONS: - raise - except BaseException as e: - exceptions[this_index] = e - _set_result(start_next) # Kickstart the next coroutine - return None - - return result, this_index - - start_next_timer: Optional[asyncio.TimerHandle] = None - start_next: Optional[asyncio.Future[None]] - task: asyncio.Task[Optional[Tuple[_T, int]]] - done: Union[asyncio.Future[None], asyncio.Task[Optional[Tuple[_T, int]]]] - coro_iter = iter(coro_fns) - this_index = -1 - try: - while True: - if coro_fn := next(coro_iter, None): - this_index += 1 - exceptions.append(None) - start_next = loop.create_future() - task = loop.create_task(run_one_coro(coro_fn, this_index, start_next)) - tasks.add(task) - start_next_timer = ( - loop.call_later(delay, _set_result, start_next) if delay else None - ) - elif not tasks: - # We exhausted the coro_fns list and no tasks are running - # so we have no winner and all coroutines failed. - break - - while tasks or start_next: - done = await _wait_one( - (*tasks, start_next) if start_next else tasks, loop - ) - if done is start_next: - # The current task has failed or the timer has expired - # so we need to start the next task. - start_next = None - if start_next_timer: - start_next_timer.cancel() - start_next_timer = None - - # Break out of the task waiting loop to start the next - # task. - break - - if TYPE_CHECKING: - assert isinstance(done, asyncio.Task) - - tasks.remove(done) - if winner := done.result(): - return *winner, exceptions - finally: - # We either have: - # - a winner - # - all tasks failed - # - a KeyboardInterrupt or SystemExit. - - # - # If the timer is still running, cancel it. - # - if start_next_timer: - start_next_timer.cancel() - - # - # If there are any tasks left, cancel them and than - # wait them so they fill the exceptions list. - # - for task in tasks: - task.cancel() - with contextlib.suppress(asyncio.CancelledError): - await task - - return None, None, exceptions diff --git a/venv/Lib/site-packages/aiohappyeyeballs/impl.py b/venv/Lib/site-packages/aiohappyeyeballs/impl.py deleted file mode 100644 index 8f3919a..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs/impl.py +++ /dev/null @@ -1,259 +0,0 @@ -"""Base implementation.""" - -import asyncio -import collections -import contextlib -import functools -import itertools -import socket -from typing import List, Optional, Sequence, Set, Union - -from . import _staggered -from .types import AddrInfoType, SocketFactoryType - - -async def start_connection( - addr_infos: Sequence[AddrInfoType], - *, - local_addr_infos: Optional[Sequence[AddrInfoType]] = None, - happy_eyeballs_delay: Optional[float] = None, - interleave: Optional[int] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - socket_factory: Optional[SocketFactoryType] = None, -) -> socket.socket: - """ - Connect to a TCP server. - - Create a socket connection to a specified destination. The - destination is specified as a list of AddrInfoType tuples as - returned from getaddrinfo(). - - The arguments are, in order: - - * ``family``: the address family, e.g. ``socket.AF_INET`` or - ``socket.AF_INET6``. - * ``type``: the socket type, e.g. ``socket.SOCK_STREAM`` or - ``socket.SOCK_DGRAM``. - * ``proto``: the protocol, e.g. ``socket.IPPROTO_TCP`` or - ``socket.IPPROTO_UDP``. - * ``canonname``: the canonical name of the address, e.g. - ``"www.python.org"``. - * ``sockaddr``: the socket address - - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - socket. - - The expected use case is to use this method in conjunction with - loop.create_connection() to establish a connection to a server:: - - socket = await start_connection(addr_infos) - transport, protocol = await loop.create_connection( - MyProtocol, sock=socket, ...) - """ - if not (current_loop := loop): - current_loop = asyncio.get_running_loop() - - single_addr_info = len(addr_infos) == 1 - - if happy_eyeballs_delay is not None and interleave is None: - # If using happy eyeballs, default to interleave addresses by family - interleave = 1 - - if interleave and not single_addr_info: - addr_infos = _interleave_addrinfos(addr_infos, interleave) - - sock: Optional[socket.socket] = None - # uvloop can raise RuntimeError instead of OSError - exceptions: List[List[Union[OSError, RuntimeError]]] = [] - if happy_eyeballs_delay is None or single_addr_info: - # not using happy eyeballs - for addrinfo in addr_infos: - try: - sock = await _connect_sock( - current_loop, - exceptions, - addrinfo, - local_addr_infos, - None, - socket_factory, - ) - break - except (RuntimeError, OSError): - continue - else: # using happy eyeballs - open_sockets: Set[socket.socket] = set() - try: - sock, _, _ = await _staggered.staggered_race( - ( - functools.partial( - _connect_sock, - current_loop, - exceptions, - addrinfo, - local_addr_infos, - open_sockets, - socket_factory, - ) - for addrinfo in addr_infos - ), - happy_eyeballs_delay, - ) - finally: - # If we have a winner, staggered_race will - # cancel the other tasks, however there is a - # small race window where any of the other tasks - # can be done before they are cancelled which - # will leave the socket open. To avoid this problem - # we pass a set to _connect_sock to keep track of - # the open sockets and close them here if there - # are any "runner up" sockets. - for s in open_sockets: - if s is not sock: - with contextlib.suppress(OSError): - s.close() - open_sockets = None # type: ignore[assignment] - - if sock is None: - all_exceptions = [exc for sub in exceptions for exc in sub] - try: - first_exception = all_exceptions[0] - if len(all_exceptions) == 1: - raise first_exception - else: - # If they all have the same str(), raise one. - model = str(first_exception) - if all(str(exc) == model for exc in all_exceptions): - raise first_exception - # Raise a combined exception so the user can see all - # the various error messages. - msg = "Multiple exceptions: {}".format( - ", ".join(str(exc) for exc in all_exceptions) - ) - # If the errno is the same for all exceptions, raise - # an OSError with that errno. - if isinstance(first_exception, OSError): - first_errno = first_exception.errno - if all( - isinstance(exc, OSError) and exc.errno == first_errno - for exc in all_exceptions - ): - raise OSError(first_errno, msg) - elif isinstance(first_exception, RuntimeError) and all( - isinstance(exc, RuntimeError) for exc in all_exceptions - ): - raise RuntimeError(msg) - # We have a mix of OSError and RuntimeError - # so we have to pick which one to raise. - # and we raise OSError for compatibility - raise OSError(msg) - finally: - all_exceptions = None # type: ignore[assignment] - exceptions = None # type: ignore[assignment] - - return sock - - -async def _connect_sock( - loop: asyncio.AbstractEventLoop, - exceptions: List[List[Union[OSError, RuntimeError]]], - addr_info: AddrInfoType, - local_addr_infos: Optional[Sequence[AddrInfoType]] = None, - open_sockets: Optional[Set[socket.socket]] = None, - socket_factory: Optional[SocketFactoryType] = None, -) -> socket.socket: - """ - Create, bind and connect one socket. - - If open_sockets is passed, add the socket to the set of open sockets. - Any failure caught here will remove the socket from the set and close it. - - Callers can use this set to close any sockets that are not the winner - of all staggered tasks in the result there are runner up sockets aka - multiple winners. - """ - my_exceptions: List[Union[OSError, RuntimeError]] = [] - exceptions.append(my_exceptions) - family, type_, proto, _, address = addr_info - sock = None - try: - if socket_factory is not None: - sock = socket_factory(addr_info) - else: - sock = socket.socket(family=family, type=type_, proto=proto) - if open_sockets is not None: - open_sockets.add(sock) - sock.setblocking(False) - if local_addr_infos is not None: - for lfamily, _, _, _, laddr in local_addr_infos: - # skip local addresses of different family - if lfamily != family: - continue - try: - sock.bind(laddr) - break - except OSError as exc: - msg = ( - f"error while attempting to bind on " - f"address {laddr!r}: " - f"{(exc.strerror or '').lower()}" - ) - exc = OSError(exc.errno, msg) - my_exceptions.append(exc) - else: # all bind attempts failed - if my_exceptions: - raise my_exceptions.pop() - else: - raise OSError(f"no matching local address with {family=} found") - await loop.sock_connect(sock, address) - return sock - except (RuntimeError, OSError) as exc: - my_exceptions.append(exc) - if sock is not None: - if open_sockets is not None: - open_sockets.remove(sock) - try: - sock.close() - except OSError as e: - my_exceptions.append(e) - raise - raise - except: - if sock is not None: - if open_sockets is not None: - open_sockets.remove(sock) - try: - sock.close() - except OSError as e: - my_exceptions.append(e) - raise - raise - finally: - exceptions = my_exceptions = None # type: ignore[assignment] - - -def _interleave_addrinfos( - addrinfos: Sequence[AddrInfoType], first_address_family_count: int = 1 -) -> List[AddrInfoType]: - """Interleave list of addrinfo tuples by family.""" - # Group addresses by family - addrinfos_by_family: collections.OrderedDict[int, List[AddrInfoType]] = ( - collections.OrderedDict() - ) - for addr in addrinfos: - family = addr[0] - if family not in addrinfos_by_family: - addrinfos_by_family[family] = [] - addrinfos_by_family[family].append(addr) - addrinfos_lists = list(addrinfos_by_family.values()) - - reordered: List[AddrInfoType] = [] - if first_address_family_count > 1: - reordered.extend(addrinfos_lists[0][: first_address_family_count - 1]) - del addrinfos_lists[0][: first_address_family_count - 1] - reordered.extend( - a - for a in itertools.chain.from_iterable(itertools.zip_longest(*addrinfos_lists)) - if a is not None - ) - return reordered diff --git a/venv/Lib/site-packages/aiohappyeyeballs/py.typed b/venv/Lib/site-packages/aiohappyeyeballs/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/aiohappyeyeballs/types.py b/venv/Lib/site-packages/aiohappyeyeballs/types.py deleted file mode 100644 index e8c7507..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs/types.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Types for aiohappyeyeballs.""" - -import socket - -# PY3.9: Import Callable from typing until we drop Python 3.9 support -# https://github.com/python/cpython/issues/87131 -from typing import Callable, Tuple, Union - -AddrInfoType = Tuple[ - Union[int, socket.AddressFamily], - Union[int, socket.SocketKind], - int, - str, - Tuple, # type: ignore[type-arg] -] - -SocketFactoryType = Callable[[AddrInfoType], socket.socket] diff --git a/venv/Lib/site-packages/aiohappyeyeballs/utils.py b/venv/Lib/site-packages/aiohappyeyeballs/utils.py deleted file mode 100644 index ea29adb..0000000 --- a/venv/Lib/site-packages/aiohappyeyeballs/utils.py +++ /dev/null @@ -1,97 +0,0 @@ -"""Utility functions for aiohappyeyeballs.""" - -import ipaddress -import socket -from typing import Dict, List, Optional, Tuple, Union - -from .types import AddrInfoType - - -def addr_to_addr_infos( - addr: Optional[ - Union[Tuple[str, int, int, int], Tuple[str, int, int], Tuple[str, int]] - ], -) -> Optional[List[AddrInfoType]]: - """Convert an address tuple to a list of addr_info tuples.""" - if addr is None: - return None - host = addr[0] - port = addr[1] - is_ipv6 = ":" in host - if is_ipv6: - flowinfo = 0 - scopeid = 0 - addr_len = len(addr) - if addr_len >= 4: - scopeid = addr[3] # type: ignore[misc] - if addr_len >= 3: - flowinfo = addr[2] # type: ignore[misc] - addr = (host, port, flowinfo, scopeid) - family = socket.AF_INET6 - else: - addr = (host, port) - family = socket.AF_INET - return [(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)] - - -def pop_addr_infos_interleave( - addr_infos: List[AddrInfoType], interleave: Optional[int] = None -) -> None: - """ - Pop addr_info from the list of addr_infos by family up to interleave times. - - The interleave parameter is used to know how many addr_infos for - each family should be popped of the top of the list. - """ - seen: Dict[int, int] = {} - if interleave is None: - interleave = 1 - to_remove: List[AddrInfoType] = [] - for addr_info in addr_infos: - family = addr_info[0] - if family not in seen: - seen[family] = 0 - if seen[family] < interleave: - to_remove.append(addr_info) - seen[family] += 1 - for addr_info in to_remove: - addr_infos.remove(addr_info) - - -def _addr_tuple_to_ip_address( - addr: Union[Tuple[str, int], Tuple[str, int, int, int]], -) -> Union[ - Tuple[ipaddress.IPv4Address, int], Tuple[ipaddress.IPv6Address, int, int, int] -]: - """Convert an address tuple to an IPv4Address.""" - return (ipaddress.ip_address(addr[0]), *addr[1:]) - - -def remove_addr_infos( - addr_infos: List[AddrInfoType], - addr: Union[Tuple[str, int], Tuple[str, int, int, int]], -) -> None: - """ - Remove an address from the list of addr_infos. - - The addr value is typically the return value of - sock.getpeername(). - """ - bad_addrs_infos: List[AddrInfoType] = [] - for addr_info in addr_infos: - if addr_info[-1] == addr: - bad_addrs_infos.append(addr_info) - if bad_addrs_infos: - for bad_addr_info in bad_addrs_infos: - addr_infos.remove(bad_addr_info) - return - # Slow path in case addr is formatted differently - match_addr = _addr_tuple_to_ip_address(addr) - for addr_info in addr_infos: - if match_addr == _addr_tuple_to_ip_address(addr_info[-1]): - bad_addrs_infos.append(addr_info) - if bad_addrs_infos: - for bad_addr_info in bad_addrs_infos: - addr_infos.remove(bad_addr_info) - return - raise ValueError(f"Address {addr} not found in addr_infos") diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/INSTALLER b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/METADATA b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/METADATA deleted file mode 100644 index 078765d..0000000 --- a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/METADATA +++ /dev/null @@ -1,262 +0,0 @@ -Metadata-Version: 2.4 -Name: aiohttp -Version: 3.13.3 -Summary: Async http client/server framework (asyncio) -Maintainer-email: aiohttp team -License: Apache-2.0 AND MIT -Project-URL: Homepage, https://github.com/aio-libs/aiohttp -Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org -Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org -Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI -Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp -Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html -Project-URL: Docs: RTD, https://docs.aiohttp.org -Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues -Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp -Classifier: Development Status :: 5 - Production/Stable -Classifier: Framework :: AsyncIO -Classifier: Intended Audience :: Developers -Classifier: Operating System :: POSIX -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.9 -Description-Content-Type: text/x-rst -License-File: LICENSE.txt -License-File: vendor/llhttp/LICENSE -Requires-Dist: aiohappyeyeballs>=2.5.0 -Requires-Dist: aiosignal>=1.4.0 -Requires-Dist: async-timeout<6.0,>=4.0; python_version < "3.11" -Requires-Dist: attrs>=17.3.0 -Requires-Dist: frozenlist>=1.1.1 -Requires-Dist: multidict<7.0,>=4.5 -Requires-Dist: propcache>=0.2.0 -Requires-Dist: yarl<2.0,>=1.17.0 -Provides-Extra: speedups -Requires-Dist: aiodns>=3.3.0; extra == "speedups" -Requires-Dist: Brotli>=1.2; platform_python_implementation == "CPython" and extra == "speedups" -Requires-Dist: brotlicffi>=1.2; platform_python_implementation != "CPython" and extra == "speedups" -Requires-Dist: backports.zstd; (platform_python_implementation == "CPython" and python_version < "3.14") and extra == "speedups" -Dynamic: license-file - -================================== -Async http client/server framework -================================== - -.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg - :height: 64px - :width: 64px - :alt: aiohttp logo - -| - -.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg - :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI - :alt: GitHub Actions status for master branch - -.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/aiohttp - :alt: codecov.io status for master branch - -.. image:: https://badge.fury.io/py/aiohttp.svg - :target: https://pypi.org/project/aiohttp - :alt: Latest PyPI package version - -.. image:: https://img.shields.io/pypi/dm/aiohttp - :target: https://pypistats.org/packages/aiohttp - :alt: Downloads count - -.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest - :target: https://docs.aiohttp.org/ - :alt: Latest Read The Docs - -.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json - :target: https://codspeed.io/aio-libs/aiohttp - :alt: Codspeed.io status for aiohttp - - -Key Features -============ - -- Supports both client and server side of HTTP protocol. -- Supports both client and server Web-Sockets out-of-the-box and avoids - Callback Hell. -- Provides Web-server with middleware and pluggable routing. - - -Getting started -=============== - -Client ------- - -To get something from the web: - -.. code-block:: python - - import aiohttp - import asyncio - - async def main(): - - async with aiohttp.ClientSession() as session: - async with session.get('http://python.org') as response: - - print("Status:", response.status) - print("Content-type:", response.headers['content-type']) - - html = await response.text() - print("Body:", html[:15], "...") - - asyncio.run(main()) - -This prints: - -.. code-block:: - - Status: 200 - Content-type: text/html; charset=utf-8 - Body: ... - -Coming from `requests `_ ? Read `why we need so many lines `_. - -Server ------- - -An example using a simple server: - -.. code-block:: python - - # examples/server_simple.py - from aiohttp import web - - async def handle(request): - name = request.match_info.get('name', "Anonymous") - text = "Hello, " + name - return web.Response(text=text) - - async def wshandle(request): - ws = web.WebSocketResponse() - await ws.prepare(request) - - async for msg in ws: - if msg.type == web.WSMsgType.text: - await ws.send_str("Hello, {}".format(msg.data)) - elif msg.type == web.WSMsgType.binary: - await ws.send_bytes(msg.data) - elif msg.type == web.WSMsgType.close: - break - - return ws - - - app = web.Application() - app.add_routes([web.get('/', handle), - web.get('/echo', wshandle), - web.get('/{name}', handle)]) - - if __name__ == '__main__': - web.run_app(app) - - -Documentation -============= - -https://aiohttp.readthedocs.io/ - - -Demos -===== - -https://github.com/aio-libs/aiohttp-demos - - -External links -============== - -* `Third party libraries - `_ -* `Built with aiohttp - `_ -* `Powered by aiohttp - `_ - -Feel free to make a Pull Request for adding your link to these pages! - - -Communication channels -====================== - -*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions - -*Matrix*: `#aio-libs:matrix.org `_ - -We support `Stack Overflow -`_. -Please add *aiohttp* tag to your question there. - -Requirements -============ - -- attrs_ -- multidict_ -- yarl_ -- frozenlist_ - -Optionally you may install the aiodns_ library (highly recommended for sake of speed). - -.. _aiodns: https://pypi.python.org/pypi/aiodns -.. _attrs: https://github.com/python-attrs/attrs -.. _multidict: https://pypi.python.org/pypi/multidict -.. _frozenlist: https://pypi.org/project/frozenlist/ -.. _yarl: https://pypi.python.org/pypi/yarl -.. _async-timeout: https://pypi.python.org/pypi/async_timeout - -License -======= - -``aiohttp`` is offered under the Apache 2 license. - - -Keepsafe -======== - -The aiohttp community would like to thank Keepsafe -(https://www.getkeepsafe.com) for its support in the early days of -the project. - - -Source code -=========== - -The latest developer version is available in a GitHub repository: -https://github.com/aio-libs/aiohttp - -Benchmarks -========== - -If you are interested in efficiency, the AsyncIO community maintains a -list of benchmarks on the official wiki: -https://github.com/python/asyncio/wiki/Benchmarks - --------- - -.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat - :target: https://matrix.to/#/%23aio-libs:matrix.org - :alt: Matrix Room — #aio-libs:matrix.org - -.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat - :target: https://matrix.to/#/%23aio-libs-space:matrix.org - :alt: Matrix Space — #aio-libs-space:matrix.org - -.. image:: https://insights.linuxfoundation.org/api/badge/health-score?project=aiohttp - :target: https://insights.linuxfoundation.org/project/aiohttp - :alt: LFX Health Score diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/RECORD b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/RECORD deleted file mode 100644 index 7fbdeaa..0000000 --- a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/RECORD +++ /dev/null @@ -1,139 +0,0 @@ -aiohttp-3.13.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -aiohttp-3.13.3.dist-info/METADATA,sha256=jkzui8KtHZ32gb8TfFZwIW4-zZ6Sr1eh1R6wYZW79Sg,8407 -aiohttp-3.13.3.dist-info/RECORD,, -aiohttp-3.13.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -aiohttp-3.13.3.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101 -aiohttp-3.13.3.dist-info/licenses/LICENSE.txt,sha256=wUk-nxDVnR-6n53ygAjhVX4zz5-6yM4SY6ozk5goA94,601 -aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE,sha256=bd-mKNt20th7iWi6-61g9RxOyIEA3Xu5b5chbYivCAg,1127 -aiohttp-3.13.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8 -aiohttp/.hash/_cparser.pxd.hash,sha256=eJQ2z7M7WoAng7D5ukCXzE3Yx22bLgv1PyOe0YbbQTM,108 -aiohttp/.hash/_find_header.pxd.hash,sha256=TxG5w4etbVd6sfm5JWbdf5PW6LnuXRQnlMoFBVGKN2E,112 -aiohttp/.hash/_http_parser.pyx.hash,sha256=NYbk_8ETW0vAtpTcxRVuWVmKJr9CUh2fR8I9emVQck4,112 -aiohttp/.hash/_http_writer.pyx.hash,sha256=J4W44iDZQwIyZ0rGO5v-_sKIfPtAwqn99EwgaevQmo8,112 -aiohttp/.hash/hdrs.py.hash,sha256=c2N-IMHz4dvAGL36CUyEw15noHE2AkJTeSBy3IxcCec,103 -aiohttp/__init__.py,sha256=wTWxnyVGn59VuoFuK1m2_jJ-Cw5Be9ktp7h5Hfvyaas,8580 -aiohttp/__pycache__/__init__.cpython-312.pyc,, -aiohttp/__pycache__/_cookie_helpers.cpython-312.pyc,, -aiohttp/__pycache__/abc.cpython-312.pyc,, -aiohttp/__pycache__/base_protocol.cpython-312.pyc,, -aiohttp/__pycache__/client.cpython-312.pyc,, -aiohttp/__pycache__/client_exceptions.cpython-312.pyc,, -aiohttp/__pycache__/client_middleware_digest_auth.cpython-312.pyc,, -aiohttp/__pycache__/client_middlewares.cpython-312.pyc,, -aiohttp/__pycache__/client_proto.cpython-312.pyc,, -aiohttp/__pycache__/client_reqrep.cpython-312.pyc,, -aiohttp/__pycache__/client_ws.cpython-312.pyc,, -aiohttp/__pycache__/compression_utils.cpython-312.pyc,, -aiohttp/__pycache__/connector.cpython-312.pyc,, -aiohttp/__pycache__/cookiejar.cpython-312.pyc,, -aiohttp/__pycache__/formdata.cpython-312.pyc,, -aiohttp/__pycache__/hdrs.cpython-312.pyc,, -aiohttp/__pycache__/helpers.cpython-312.pyc,, -aiohttp/__pycache__/http.cpython-312.pyc,, -aiohttp/__pycache__/http_exceptions.cpython-312.pyc,, -aiohttp/__pycache__/http_parser.cpython-312.pyc,, -aiohttp/__pycache__/http_websocket.cpython-312.pyc,, -aiohttp/__pycache__/http_writer.cpython-312.pyc,, -aiohttp/__pycache__/log.cpython-312.pyc,, -aiohttp/__pycache__/multipart.cpython-312.pyc,, -aiohttp/__pycache__/payload.cpython-312.pyc,, -aiohttp/__pycache__/payload_streamer.cpython-312.pyc,, -aiohttp/__pycache__/pytest_plugin.cpython-312.pyc,, -aiohttp/__pycache__/resolver.cpython-312.pyc,, -aiohttp/__pycache__/streams.cpython-312.pyc,, -aiohttp/__pycache__/tcp_helpers.cpython-312.pyc,, -aiohttp/__pycache__/test_utils.cpython-312.pyc,, -aiohttp/__pycache__/tracing.cpython-312.pyc,, -aiohttp/__pycache__/typedefs.cpython-312.pyc,, -aiohttp/__pycache__/web.cpython-312.pyc,, -aiohttp/__pycache__/web_app.cpython-312.pyc,, -aiohttp/__pycache__/web_exceptions.cpython-312.pyc,, -aiohttp/__pycache__/web_fileresponse.cpython-312.pyc,, -aiohttp/__pycache__/web_log.cpython-312.pyc,, -aiohttp/__pycache__/web_middlewares.cpython-312.pyc,, -aiohttp/__pycache__/web_protocol.cpython-312.pyc,, -aiohttp/__pycache__/web_request.cpython-312.pyc,, -aiohttp/__pycache__/web_response.cpython-312.pyc,, -aiohttp/__pycache__/web_routedef.cpython-312.pyc,, -aiohttp/__pycache__/web_runner.cpython-312.pyc,, -aiohttp/__pycache__/web_server.cpython-312.pyc,, -aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc,, -aiohttp/__pycache__/web_ws.cpython-312.pyc,, -aiohttp/__pycache__/worker.cpython-312.pyc,, -aiohttp/_cookie_helpers.py,sha256=x6tVKd6fgqjIFQzQ_z-t_CRl-Pnar7qJh8HUwroSKIA,13997 -aiohttp/_cparser.pxd,sha256=GP0Y9NqZYQGkJtS81XDzU70e7rRMb34TR7yGMmx5_zs,4453 -aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70 -aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090 -aiohttp/_http_parser.cp312-win_amd64.pyd,sha256=kVErC3Q1vBoeaoCynkMwWayfaXk4Ju-VaWbOVdGcwB8,248832 -aiohttp/_http_parser.pyx,sha256=9-jyYF9-4i7ToMV0mvVgQ_rqNa8KGJfhQVY0GGrZuGg,29096 -aiohttp/_http_writer.cp312-win_amd64.pyd,sha256=e2t5uBtwmasH8kAxdg6QOvalydEl5-m3n46J4WSffiI,47104 -aiohttp/_http_writer.pyx,sha256=WWdOf19QPqScBkifDhJynqPPOAmwB9sKJAO0Kkor4tE,4826 -aiohttp/_websocket/.hash/mask.pxd.hash,sha256=TL0gGYyJWxqG8dWwa08B74WGg6-0M6_Breqrff-AiZg,115 -aiohttp/_websocket/.hash/mask.pyx.hash,sha256=7xo6f01JaOQmaUNij3dQlOgxkEC1edkAIhwpeOvimLI,115 -aiohttp/_websocket/.hash/reader_c.pxd.hash,sha256=RzhqjHN1HadWDeMHVQvaf-XLlGxF6nm5u-HJHGsx2aE,119 -aiohttp/_websocket/__init__.py,sha256=R51KWH5kkdtDLb7T-ilztksbfweKCy3t22SgxGtiY-4,45 -aiohttp/_websocket/__pycache__/__init__.cpython-312.pyc,, -aiohttp/_websocket/__pycache__/helpers.cpython-312.pyc,, -aiohttp/_websocket/__pycache__/models.cpython-312.pyc,, -aiohttp/_websocket/__pycache__/reader.cpython-312.pyc,, -aiohttp/_websocket/__pycache__/reader_c.cpython-312.pyc,, -aiohttp/_websocket/__pycache__/reader_py.cpython-312.pyc,, -aiohttp/_websocket/__pycache__/writer.cpython-312.pyc,, -aiohttp/_websocket/helpers.py,sha256=amqvDhoAKAi8ptB4qUNuQhkaOn-4JxSh_VLAqytmEfw,5185 -aiohttp/_websocket/mask.cp312-win_amd64.pyd,sha256=Q7mH9VajqPagYj6NGCurPmwJWcMZU07zN4FEkfUAP_c,36864 -aiohttp/_websocket/mask.pxd,sha256=41TdSZvhcbYSW_Vrw7bF4r_yoor2njtdaZ3bmvK6-jw,115 -aiohttp/_websocket/mask.pyx,sha256=Ro7dOOv43HAAqNMz3xyCA11ppcn-vARIvjycStTEYww,1445 -aiohttp/_websocket/models.py,sha256=Pz8qvnU43VUCNZcY4g03VwTsHOsb_jSN8iG69xMAc_A,2205 -aiohttp/_websocket/reader.py,sha256=1r0cJ-jdFgbSrC6-jI0zjEA1CppzoUn8u_wiebrVVO0,1061 -aiohttp/_websocket/reader_c.cp312-win_amd64.pyd,sha256=2gSIJBH5w8xkfbErzqeI_MTILdr4gR4Pc4ytNj_jaD0,147968 -aiohttp/_websocket/reader_c.pxd,sha256=HNOl4gRWtNBNEYNbK9PGOfFEQwUqJGexBbDKB_20sl0,2735 -aiohttp/_websocket/reader_c.py,sha256=UKfslJuANla_CQMe7yIJzE8vp7bpzz9TLr-lH87XW6U,19346 -aiohttp/_websocket/reader_py.py,sha256=UKfslJuANla_CQMe7yIJzE8vp7bpzz9TLr-lH87XW6U,19346 -aiohttp/_websocket/writer.py,sha256=MpuNvG_t34CaDTAzW5FZJaRME8sL19rZotxSbXz2aas,11523 -aiohttp/abc.py,sha256=01N6Y63o2bBC8Vi0ZjO6Jw0V9kXZfy3egwzKFW-tv9c,7417 -aiohttp/base_protocol.py,sha256=8vNIv6QV_SDCW-8tfhlyxSwiBD7dAiMTqJI1GI8RG5s,3125 -aiohttp/client.py,sha256=KlWhIZt935YpOZcXOOZl3eIRkuO-l0z2BH7arfhGg-A,59992 -aiohttp/client_exceptions.py,sha256=sJcuvYKaB2nwuSdP7k18y3wc74aU0xAzdJikzzesrPE,11788 -aiohttp/client_middleware_digest_auth.py,sha256=K4TPt4-rPQ0jjSHx3UFguMN7n31LpCC_o6JA-Hrg_Pc,18107 -aiohttp/client_middlewares.py,sha256=FEVIXFkQ58n5bhK4BGEqqDCWnDh-GNJmWq20I5Yt6SU,1973 -aiohttp/client_proto.py,sha256=rfbg8nUsfpCMM_zGpQygiFn8nzSdBI-731rmXVGHwLc,12469 -aiohttp/client_reqrep.py,sha256=BUrqo2BJbrNazrIJr-ZgMLRTvE2fSON3zPQSq1dfgfU,54927 -aiohttp/client_ws.py,sha256=9DraHuupuJcT7NOgyeGml8SBr7V5D5ID5-piY1fQMdA,15537 -aiohttp/compression_utils.py,sha256=w0ECGGLVjtCXdYg-U_9DBn-DASzDPaWEVRx1HlwWslk,12086 -aiohttp/connector.py,sha256=X2sRe6EAeWiaP6eaK9hWvLtSbdiJfNhK3bWl7XbR_V4,70846 -aiohttp/cookiejar.py,sha256=C2fVzQGFieFP9mFDTOvfEc6fb5kPS2ijL2tFKAUW7Sw,19444 -aiohttp/formdata.py,sha256=sz3VaTHVk11z_5G1LaDhUwrONJ8zRAGlZGg3hcCApzA,6563 -aiohttp/hdrs.py,sha256=7htmhgZyE9HqWbPpxHU0r7kAIdT2kpOXQa1AadDh2W8,5232 -aiohttp/helpers.py,sha256=1tXIvGSRWJD9wsS7GUVHLfJEsDM_XigurpgjxajkH0g,31615 -aiohttp/http.py,sha256=DGKcwDbgIMpasv7s2jeKCRuixyj7W-RIrihRFjj0xcY,1914 -aiohttp/http_exceptions.py,sha256=J3v-1S9S22GfAEtx0pEqp6d4G1Lqi2-gOrdLtuGlEhY,3185 -aiohttp/http_parser.py,sha256=O5ud4wO80WLFe9kpXU0xGhjczUfrb7BAr0XAP7rBn7E,39263 -aiohttp/http_websocket.py,sha256=b9kBmxPLPFQP_nu_sMhIMIeqDOm0ug8G4prbrhEMHZ0,878 -aiohttp/http_writer.py,sha256=jA_aJW7JdH1mihrIYdJcLOHVKQ4Agg3g993v50eITBs,12824 -aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333 -aiohttp/multipart.py,sha256=UvcLOX3lO3ad3nfODsdlyvYWMAZHdUZ-wlZ5w1TbD2E,41634 -aiohttp/payload.py,sha256=Xbs_2l0wDaThFG-ehNlvzQUkHuBPpc5FxpJnJa3ZPcs,41994 -aiohttp/payload_streamer.py,sha256=K0iV85iW0vEG3rDkcopruidspynzQvrwW8mJvgPHisg,2289 -aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8 -aiohttp/pytest_plugin.py,sha256=ymhjbYHz2Kf0ZU_4Ly0hAp73dhsgrQIzJDo4Aot3_TI,13345 -aiohttp/resolver.py,sha256=ePJgZAN5EQY4YuFiuZmVZM6p3UuzJ4qMWM1fu8DJ2Fc,10305 -aiohttp/streams.py,sha256=J0G4ZJPdRScOPtnaB1ixhQYjLunLk8z70mfN9bc5K_o,24424 -aiohttp/tcp_helpers.py,sha256=K-hhGh3jd6qCEnHJo8LvFyfJwBjh99UKI7A0aSRVhj4,998 -aiohttp/test_utils.py,sha256=zFWAb-rPz1fWRUHnrjnfUH7ORlfIgZ2UZbEGe4YTa9I,23790 -aiohttp/tracing.py,sha256=Kb-N32aMmYqC2Yc82NV6l0mIcavSQst1BHSFj94Apl0,15013 -aiohttp/typedefs.py,sha256=Sx5v2yUyLu8nbabqtJRWj1M1_uW0IZACu78uYD7LBy0,1726 -aiohttp/web.py,sha256=BQ96NEuTWikKGN5NnnTHjFLt07GUMWvvn42iFuIS3Mg,18444 -aiohttp/web_app.py,sha256=WwEEzUg34j81kK2dPFnhlqx_z6nGjnHZDweZJF65pKc,20072 -aiohttp/web_exceptions.py,sha256=itNRhCMDJFhnMWftr5SyTsoqh-i0n9rzTj0sjcAEUjo,10812 -aiohttp/web_fileresponse.py,sha256=QIIbcIruCgfYrc8ZDvOgNlZzLbAagwXA9FrNI7NKNPY,16780 -aiohttp/web_log.py,sha256=G5ugloW9noUxPft0SmVWOXw30MviL6rqZc3XrKN_T1U,8081 -aiohttp/web_middlewares.py,sha256=mM2-R8eaV2r6Mi9Zc2bDG8QnhE9h0IzPvtDX_fkKR5s,4286 -aiohttp/web_protocol.py,sha256=gJaDFtYPA-1gz35fwchjLhxrkmXXMOzFMCDHLQ1FHiI,27802 -aiohttp/web_request.py,sha256=9zqyP32ScMUylQ_ta4tBHpWmoprhSB4jTgj2ixmGK74,30763 -aiohttp/web_response.py,sha256=WJVumt-P0uMaFSbef_owvOXpq90E4VMl3RvSOWh0nJE,30197 -aiohttp/web_routedef.py,sha256=XC10f57Q36JmYaaQqrecsyfIxHMepCKaKkBEB7hLzJI,6324 -aiohttp/web_runner.py,sha256=zyVYVzCgnopiGwnIhKlNZHtLV_IYQ9aC-Vm43j_HRoA,12185 -aiohttp/web_server.py,sha256=RZSWt_Mj-Lu89bFYsr_T3rjxW2VNN7PHNJ2mvv2qELs,2972 -aiohttp/web_urldispatcher.py,sha256=4FiNFUWU_jITYl_DnObptuF5c0ShXAEiWyLVmE-GtN0,45595 -aiohttp/web_ws.py,sha256=VXHGDtfy_jrBByLvuhnL-A_PmpcoT_ZLyYdj_EcL3Hw,23370 -aiohttp/worker.py,sha256=N_9iyS_tR9U0pf3BRaIH2nzA1pjN1Xfi2gGmRrMhnho,8407 diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/REQUESTED b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/WHEEL b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/WHEEL deleted file mode 100644 index 10ac2c2..0000000 --- a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/LICENSE.txt b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/LICENSE.txt deleted file mode 100644 index e497a32..0000000 --- a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,13 +0,0 @@ - Copyright aio-libs contributors. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE deleted file mode 100644 index 6c1512d..0000000 --- a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -This software is licensed under the MIT License. - -Copyright Fedor Indutny, 2018. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/top_level.txt b/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/top_level.txt deleted file mode 100644 index ee4ba4f..0000000 --- a/venv/Lib/site-packages/aiohttp-3.13.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -aiohttp diff --git a/venv/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash b/venv/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash deleted file mode 100644 index 5322009..0000000 --- a/venv/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -18fd18f4da996101a426d4bcd570f353bd1eeeb44c6f7e1347bc86326c79ff3b *D:/a/aiohttp/aiohttp/aiohttp/_cparser.pxd diff --git a/venv/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash b/venv/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash deleted file mode 100644 index 8af9f81..0000000 --- a/venv/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73 *D:/a/aiohttp/aiohttp/aiohttp/_find_header.pxd diff --git a/venv/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash b/venv/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash deleted file mode 100644 index 690de3e..0000000 --- a/venv/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -f7e8f2605f7ee22ed3a0c5749af56043faea35af0a1897e1415634186ad9b868 *D:/a/aiohttp/aiohttp/aiohttp/_http_parser.pyx diff --git a/venv/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash b/venv/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash deleted file mode 100644 index 5e3dbb4..0000000 --- a/venv/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -59674e7f5f503ea49c06489f0e12729ea3cf3809b007db0a2403b42a4a2be2d1 *D:/a/aiohttp/aiohttp/aiohttp/_http_writer.pyx diff --git a/venv/Lib/site-packages/aiohttp/.hash/hdrs.py.hash b/venv/Lib/site-packages/aiohttp/.hash/hdrs.py.hash deleted file mode 100644 index e4f3c29..0000000 --- a/venv/Lib/site-packages/aiohttp/.hash/hdrs.py.hash +++ /dev/null @@ -1 +0,0 @@ -ee1b6686067213d1ea59b3e9c47534afb90021d4f692939741ad4069d0e1d96f *D:/a/aiohttp/aiohttp/aiohttp/hdrs.py diff --git a/venv/Lib/site-packages/aiohttp/__init__.py b/venv/Lib/site-packages/aiohttp/__init__.py deleted file mode 100644 index 357baf0..0000000 --- a/venv/Lib/site-packages/aiohttp/__init__.py +++ /dev/null @@ -1,278 +0,0 @@ -__version__ = "3.13.3" - -from typing import TYPE_CHECKING, Tuple - -from . import hdrs as hdrs -from .client import ( - BaseConnector, - ClientConnectionError, - ClientConnectionResetError, - ClientConnectorCertificateError, - ClientConnectorDNSError, - ClientConnectorError, - ClientConnectorSSLError, - ClientError, - ClientHttpProxyError, - ClientOSError, - ClientPayloadError, - ClientProxyConnectionError, - ClientRequest, - ClientResponse, - ClientResponseError, - ClientSession, - ClientSSLError, - ClientTimeout, - ClientWebSocketResponse, - ClientWSTimeout, - ConnectionTimeoutError, - ContentTypeError, - Fingerprint, - InvalidURL, - InvalidUrlClientError, - InvalidUrlRedirectClientError, - NamedPipeConnector, - NonHttpUrlClientError, - NonHttpUrlRedirectClientError, - RedirectClientError, - RequestInfo, - ServerConnectionError, - ServerDisconnectedError, - ServerFingerprintMismatch, - ServerTimeoutError, - SocketTimeoutError, - TCPConnector, - TooManyRedirects, - UnixConnector, - WSMessageTypeError, - WSServerHandshakeError, - request, -) -from .client_middleware_digest_auth import DigestAuthMiddleware -from .client_middlewares import ClientHandlerType, ClientMiddlewareType -from .compression_utils import set_zlib_backend -from .connector import ( - AddrInfoType as AddrInfoType, - SocketFactoryType as SocketFactoryType, -) -from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar -from .formdata import FormData as FormData -from .helpers import BasicAuth, ChainMapProxy, ETag -from .http import ( - HttpVersion as HttpVersion, - HttpVersion10 as HttpVersion10, - HttpVersion11 as HttpVersion11, - WebSocketError as WebSocketError, - WSCloseCode as WSCloseCode, - WSMessage as WSMessage, - WSMsgType as WSMsgType, -) -from .multipart import ( - BadContentDispositionHeader as BadContentDispositionHeader, - BadContentDispositionParam as BadContentDispositionParam, - BodyPartReader as BodyPartReader, - MultipartReader as MultipartReader, - MultipartWriter as MultipartWriter, - content_disposition_filename as content_disposition_filename, - parse_content_disposition as parse_content_disposition, -) -from .payload import ( - PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, - AsyncIterablePayload as AsyncIterablePayload, - BufferedReaderPayload as BufferedReaderPayload, - BytesIOPayload as BytesIOPayload, - BytesPayload as BytesPayload, - IOBasePayload as IOBasePayload, - JsonPayload as JsonPayload, - Payload as Payload, - StringIOPayload as StringIOPayload, - StringPayload as StringPayload, - TextIOPayload as TextIOPayload, - get_payload as get_payload, - payload_type as payload_type, -) -from .payload_streamer import streamer as streamer -from .resolver import ( - AsyncResolver as AsyncResolver, - DefaultResolver as DefaultResolver, - ThreadedResolver as ThreadedResolver, -) -from .streams import ( - EMPTY_PAYLOAD as EMPTY_PAYLOAD, - DataQueue as DataQueue, - EofStream as EofStream, - FlowControlDataQueue as FlowControlDataQueue, - StreamReader as StreamReader, -) -from .tracing import ( - TraceConfig as TraceConfig, - TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, - TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, - TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, - TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, - TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, - TraceDnsCacheHitParams as TraceDnsCacheHitParams, - TraceDnsCacheMissParams as TraceDnsCacheMissParams, - TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, - TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, - TraceRequestChunkSentParams as TraceRequestChunkSentParams, - TraceRequestEndParams as TraceRequestEndParams, - TraceRequestExceptionParams as TraceRequestExceptionParams, - TraceRequestHeadersSentParams as TraceRequestHeadersSentParams, - TraceRequestRedirectParams as TraceRequestRedirectParams, - TraceRequestStartParams as TraceRequestStartParams, - TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, -) - -if TYPE_CHECKING: - # At runtime these are lazy-loaded at the bottom of the file. - from .worker import ( - GunicornUVLoopWebWorker as GunicornUVLoopWebWorker, - GunicornWebWorker as GunicornWebWorker, - ) - -__all__: Tuple[str, ...] = ( - "hdrs", - # client - "AddrInfoType", - "BaseConnector", - "ClientConnectionError", - "ClientConnectionResetError", - "ClientConnectorCertificateError", - "ClientConnectorDNSError", - "ClientConnectorError", - "ClientConnectorSSLError", - "ClientError", - "ClientHttpProxyError", - "ClientOSError", - "ClientPayloadError", - "ClientProxyConnectionError", - "ClientResponse", - "ClientRequest", - "ClientResponseError", - "ClientSSLError", - "ClientSession", - "ClientTimeout", - "ClientWebSocketResponse", - "ClientWSTimeout", - "ConnectionTimeoutError", - "ContentTypeError", - "Fingerprint", - "FlowControlDataQueue", - "InvalidURL", - "InvalidUrlClientError", - "InvalidUrlRedirectClientError", - "NonHttpUrlClientError", - "NonHttpUrlRedirectClientError", - "RedirectClientError", - "RequestInfo", - "ServerConnectionError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ServerTimeoutError", - "SocketFactoryType", - "SocketTimeoutError", - "TCPConnector", - "TooManyRedirects", - "UnixConnector", - "NamedPipeConnector", - "WSServerHandshakeError", - "request", - # client_middleware - "ClientMiddlewareType", - "ClientHandlerType", - # cookiejar - "CookieJar", - "DummyCookieJar", - # formdata - "FormData", - # helpers - "BasicAuth", - "ChainMapProxy", - "DigestAuthMiddleware", - "ETag", - "set_zlib_backend", - # http - "HttpVersion", - "HttpVersion10", - "HttpVersion11", - "WSMsgType", - "WSCloseCode", - "WSMessage", - "WebSocketError", - # multipart - "BadContentDispositionHeader", - "BadContentDispositionParam", - "BodyPartReader", - "MultipartReader", - "MultipartWriter", - "content_disposition_filename", - "parse_content_disposition", - # payload - "AsyncIterablePayload", - "BufferedReaderPayload", - "BytesIOPayload", - "BytesPayload", - "IOBasePayload", - "JsonPayload", - "PAYLOAD_REGISTRY", - "Payload", - "StringIOPayload", - "StringPayload", - "TextIOPayload", - "get_payload", - "payload_type", - # payload_streamer - "streamer", - # resolver - "AsyncResolver", - "DefaultResolver", - "ThreadedResolver", - # streams - "DataQueue", - "EMPTY_PAYLOAD", - "EofStream", - "StreamReader", - # tracing - "TraceConfig", - "TraceConnectionCreateEndParams", - "TraceConnectionCreateStartParams", - "TraceConnectionQueuedEndParams", - "TraceConnectionQueuedStartParams", - "TraceConnectionReuseconnParams", - "TraceDnsCacheHitParams", - "TraceDnsCacheMissParams", - "TraceDnsResolveHostEndParams", - "TraceDnsResolveHostStartParams", - "TraceRequestChunkSentParams", - "TraceRequestEndParams", - "TraceRequestExceptionParams", - "TraceRequestHeadersSentParams", - "TraceRequestRedirectParams", - "TraceRequestStartParams", - "TraceResponseChunkReceivedParams", - # workers (imported lazily with __getattr__) - "GunicornUVLoopWebWorker", - "GunicornWebWorker", - "WSMessageTypeError", -) - - -def __dir__() -> Tuple[str, ...]: - return __all__ + ("__doc__",) - - -def __getattr__(name: str) -> object: - global GunicornUVLoopWebWorker, GunicornWebWorker - - # Importing gunicorn takes a long time (>100ms), so only import if actually needed. - if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"): - try: - from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw - except ImportError: - return None - - GunicornUVLoopWebWorker = guv # type: ignore[misc] - GunicornWebWorker = gw # type: ignore[misc] - return guv if name == "GunicornUVLoopWebWorker" else gw - - raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 0752493..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/_cookie_helpers.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/_cookie_helpers.cpython-312.pyc deleted file mode 100644 index 2aab11b..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/_cookie_helpers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-312.pyc deleted file mode 100644 index d249507..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-312.pyc deleted file mode 100644 index 04914f9..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-312.pyc deleted file mode 100644 index 83447a0..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-312.pyc deleted file mode 100644 index d69527b..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/client_middleware_digest_auth.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/client_middleware_digest_auth.cpython-312.pyc deleted file mode 100644 index 65e8758..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/client_middleware_digest_auth.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/client_middlewares.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/client_middlewares.cpython-312.pyc deleted file mode 100644 index 6c04017..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/client_middlewares.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-312.pyc deleted file mode 100644 index b4d3405..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-312.pyc deleted file mode 100644 index 13ecfc5..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-312.pyc deleted file mode 100644 index b56864b..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-312.pyc deleted file mode 100644 index 195d08c..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-312.pyc deleted file mode 100644 index 1d35f4d..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-312.pyc deleted file mode 100644 index c5ae1ec..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-312.pyc deleted file mode 100644 index 0ba37d1..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-312.pyc deleted file mode 100644 index 5ad0bfc..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-312.pyc deleted file mode 100644 index 0cb9234..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-312.pyc deleted file mode 100644 index 23aee28..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-312.pyc deleted file mode 100644 index 9d8a3ad..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-312.pyc deleted file mode 100644 index 27a89c8..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-312.pyc deleted file mode 100644 index e106648..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-312.pyc deleted file mode 100644 index 8b95a0d..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-312.pyc deleted file mode 100644 index 2506bf7..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-312.pyc deleted file mode 100644 index d0289d1..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-312.pyc deleted file mode 100644 index 31d8e78..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-312.pyc deleted file mode 100644 index a2f04c9..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-312.pyc deleted file mode 100644 index c4272da..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-312.pyc deleted file mode 100644 index c671888..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-312.pyc deleted file mode 100644 index f472f69..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-312.pyc deleted file mode 100644 index 83b09d8..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/test_utils.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/test_utils.cpython-312.pyc deleted file mode 100644 index 9b3b521..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/test_utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-312.pyc deleted file mode 100644 index 19c35cb..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-312.pyc deleted file mode 100644 index 351a19a..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web.cpython-312.pyc deleted file mode 100644 index b02bc2d..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_app.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_app.cpython-312.pyc deleted file mode 100644 index 4d6c2cb..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_app.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_exceptions.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_exceptions.cpython-312.pyc deleted file mode 100644 index 9730898..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-312.pyc deleted file mode 100644 index 1cd57ca..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_log.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_log.cpython-312.pyc deleted file mode 100644 index 2fc91ca..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_log.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_middlewares.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_middlewares.cpython-312.pyc deleted file mode 100644 index 0f17f7e..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_middlewares.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_protocol.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_protocol.cpython-312.pyc deleted file mode 100644 index 0e9dd6f..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_protocol.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_request.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_request.cpython-312.pyc deleted file mode 100644 index 7da5ad9..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_request.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_response.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_response.cpython-312.pyc deleted file mode 100644 index 6476e95..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_response.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_routedef.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_routedef.cpython-312.pyc deleted file mode 100644 index ee2425a..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_routedef.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_runner.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_runner.cpython-312.pyc deleted file mode 100644 index 725579d..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_runner.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_server.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_server.cpython-312.pyc deleted file mode 100644 index d2a1d79..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_server.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc deleted file mode 100644 index 219630e..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/web_ws.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/web_ws.cpython-312.pyc deleted file mode 100644 index 661e03f..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/web_ws.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/__pycache__/worker.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/__pycache__/worker.cpython-312.pyc deleted file mode 100644 index 646c07f..0000000 Binary files a/venv/Lib/site-packages/aiohttp/__pycache__/worker.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_cookie_helpers.py b/venv/Lib/site-packages/aiohttp/_cookie_helpers.py deleted file mode 100644 index 10e2e0e..0000000 --- a/venv/Lib/site-packages/aiohttp/_cookie_helpers.py +++ /dev/null @@ -1,338 +0,0 @@ -""" -Internal cookie handling helpers. - -This module contains internal utilities for cookie parsing and manipulation. -These are not part of the public API and may change without notice. -""" - -import re -from http.cookies import Morsel -from typing import List, Optional, Sequence, Tuple, cast - -from .log import internal_logger - -__all__ = ( - "parse_set_cookie_headers", - "parse_cookie_header", - "preserve_morsel_with_coded_value", -) - -# Cookie parsing constants -# Allow more characters in cookie names to handle real-world cookies -# that don't strictly follow RFC standards (fixes #2683) -# RFC 6265 defines cookie-name token as per RFC 2616 Section 2.2, -# but many servers send cookies with characters like {} [] () etc. -# This makes the cookie parser more tolerant of real-world cookies -# while still providing some validation to catch obviously malformed names. -_COOKIE_NAME_RE = re.compile(r"^[!#$%&\'()*+\-./0-9:<=>?@A-Z\[\]^_`a-z{|}~]+$") -_COOKIE_KNOWN_ATTRS = frozenset( # AKA Morsel._reserved - ( - "path", - "domain", - "max-age", - "expires", - "secure", - "httponly", - "samesite", - "partitioned", - "version", - "comment", - ) -) -_COOKIE_BOOL_ATTRS = frozenset( # AKA Morsel._flags - ("secure", "httponly", "partitioned") -) - -# SimpleCookie's pattern for parsing cookies with relaxed validation -# Based on http.cookies pattern but extended to allow more characters in cookie names -# to handle real-world cookies (fixes #2683) -_COOKIE_PATTERN = re.compile( - r""" - \s* # Optional whitespace at start of cookie - (?P # Start of group 'key' - # aiohttp has extended to include [] for compatibility with real-world cookies - [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\[\]]+ # Any word of at least one letter - ) # End of group 'key' - ( # Optional group: there may not be a value. - \s*=\s* # Equal Sign - (?P # Start of group 'val' - "(?:[^\\"]|\\.)*" # Any double-quoted string (properly closed) - | # or - "[^";]* # Unmatched opening quote (differs from SimpleCookie - issue #7993) - | # or - # Special case for "expires" attr - RFC 822, RFC 850, RFC 1036, RFC 1123 - (\w{3,6}day|\w{3}),\s # Day of the week or abbreviated day (with comma) - [\w\d\s-]{9,11}\s[\d:]{8}\s # Date and time in specific format - (GMT|[+-]\d{4}) # Timezone: GMT or RFC 2822 offset like -0000, +0100 - # NOTE: RFC 2822 timezone support is an aiohttp extension - # for issue #4493 - SimpleCookie does NOT support this - | # or - # ANSI C asctime() format: "Wed Jun 9 10:18:14 2021" - # NOTE: This is an aiohttp extension for issue #4327 - SimpleCookie does NOT support this format - \w{3}\s+\w{3}\s+[\s\d]\d\s+\d{2}:\d{2}:\d{2}\s+\d{4} - | # or - [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]* # Any word or empty string - ) # End of group 'val' - )? # End of optional value group - \s* # Any number of spaces. - (\s+|;|$) # Ending either at space, semicolon, or EOS. - """, - re.VERBOSE | re.ASCII, -) - - -def preserve_morsel_with_coded_value(cookie: Morsel[str]) -> Morsel[str]: - """ - Preserve a Morsel's coded_value exactly as received from the server. - - This function ensures that cookie encoding is preserved exactly as sent by - the server, which is critical for compatibility with old servers that have - strict requirements about cookie formats. - - This addresses the issue described in https://github.com/aio-libs/aiohttp/pull/1453 - where Python's SimpleCookie would re-encode cookies, breaking authentication - with certain servers. - - Args: - cookie: A Morsel object from SimpleCookie - - Returns: - A Morsel object with preserved coded_value - - """ - mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) - # We use __setstate__ instead of the public set() API because it allows us to - # bypass validation and set already validated state. This is more stable than - # setting protected attributes directly and unlikely to change since it would - # break pickling. - mrsl_val.__setstate__( # type: ignore[attr-defined] - {"key": cookie.key, "value": cookie.value, "coded_value": cookie.coded_value} - ) - return mrsl_val - - -_unquote_sub = re.compile(r"\\(?:([0-3][0-7][0-7])|(.))").sub - - -def _unquote_replace(m: re.Match[str]) -> str: - """ - Replace function for _unquote_sub regex substitution. - - Handles escaped characters in cookie values: - - Octal sequences are converted to their character representation - - Other escaped characters are unescaped by removing the backslash - """ - if m[1]: - return chr(int(m[1], 8)) - return m[2] - - -def _unquote(value: str) -> str: - """ - Unquote a cookie value. - - Vendored from http.cookies._unquote to ensure compatibility. - - Note: The original implementation checked for None, but we've removed - that check since all callers already ensure the value is not None. - """ - # If there aren't any doublequotes, - # then there can't be any special characters. See RFC 2109. - if len(value) < 2: - return value - if value[0] != '"' or value[-1] != '"': - return value - - # We have to assume that we must decode this string. - # Down to work. - - # Remove the "s - value = value[1:-1] - - # Check for special sequences. Examples: - # \012 --> \n - # \" --> " - # - return _unquote_sub(_unquote_replace, value) - - -def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: - """ - Parse a Cookie header according to RFC 6265 Section 5.4. - - Cookie headers contain only name-value pairs separated by semicolons. - There are no attributes in Cookie headers - even names that match - attribute names (like 'path' or 'secure') should be treated as cookies. - - This parser uses the same regex-based approach as parse_set_cookie_headers - to properly handle quoted values that may contain semicolons. When the - regex fails to match a malformed cookie, it falls back to simple parsing - to ensure subsequent cookies are not lost - https://github.com/aio-libs/aiohttp/issues/11632 - - Args: - header: The Cookie header value to parse - - Returns: - List of (name, Morsel) tuples for compatibility with SimpleCookie.update() - """ - if not header: - return [] - - cookies: List[Tuple[str, Morsel[str]]] = [] - morsel: Morsel[str] - i = 0 - n = len(header) - - invalid_names = [] - while i < n: - # Use the same pattern as parse_set_cookie_headers to find cookies - match = _COOKIE_PATTERN.match(header, i) - if not match: - # Fallback for malformed cookies https://github.com/aio-libs/aiohttp/issues/11632 - # Find next semicolon to skip or attempt simple key=value parsing - next_semi = header.find(";", i) - eq_pos = header.find("=", i) - - # Try to extract key=value if '=' comes before ';' - if eq_pos != -1 and (next_semi == -1 or eq_pos < next_semi): - end_pos = next_semi if next_semi != -1 else n - key = header[i:eq_pos].strip() - value = header[eq_pos + 1 : end_pos].strip() - - # Validate the name (same as regex path) - if not _COOKIE_NAME_RE.match(key): - invalid_names.append(key) - else: - morsel = Morsel() - morsel.__setstate__( # type: ignore[attr-defined] - {"key": key, "value": _unquote(value), "coded_value": value} - ) - cookies.append((key, morsel)) - - # Move to next cookie or end - i = next_semi + 1 if next_semi != -1 else n - continue - - key = match.group("key") - value = match.group("val") or "" - i = match.end(0) - - # Validate the name - if not key or not _COOKIE_NAME_RE.match(key): - invalid_names.append(key) - continue - - # Create new morsel - morsel = Morsel() - # Preserve the original value as coded_value (with quotes if present) - # We use __setstate__ instead of the public set() API because it allows us to - # bypass validation and set already validated state. This is more stable than - # setting protected attributes directly and unlikely to change since it would - # break pickling. - morsel.__setstate__( # type: ignore[attr-defined] - {"key": key, "value": _unquote(value), "coded_value": value} - ) - - cookies.append((key, morsel)) - - if invalid_names: - internal_logger.debug( - "Cannot load cookie. Illegal cookie names: %r", invalid_names - ) - - return cookies - - -def parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]: - """ - Parse cookie headers using a vendored version of SimpleCookie parsing. - - This implementation is based on SimpleCookie.__parse_string to ensure - compatibility with how SimpleCookie parses cookies, including handling - of malformed cookies with missing semicolons. - - This function is used for both Cookie and Set-Cookie headers in order to be - forgiving. Ideally we would have followed RFC 6265 Section 5.2 (for Cookie - headers) and RFC 6265 Section 4.2.1 (for Set-Cookie headers), but the - real world data makes it impossible since we need to be a bit more forgiving. - - NOTE: This implementation differs from SimpleCookie in handling unmatched quotes. - SimpleCookie will stop parsing when it encounters a cookie value with an unmatched - quote (e.g., 'cookie="value'), causing subsequent cookies to be silently dropped. - This implementation handles unmatched quotes more gracefully to prevent cookie loss. - See https://github.com/aio-libs/aiohttp/issues/7993 - """ - parsed_cookies: List[Tuple[str, Morsel[str]]] = [] - - for header in headers: - if not header: - continue - - # Parse cookie string using SimpleCookie's algorithm - i = 0 - n = len(header) - current_morsel: Optional[Morsel[str]] = None - morsel_seen = False - - while 0 <= i < n: - # Start looking for a cookie - match = _COOKIE_PATTERN.match(header, i) - if not match: - # No more cookies - break - - key, value = match.group("key"), match.group("val") - i = match.end(0) - lower_key = key.lower() - - if key[0] == "$": - if not morsel_seen: - # We ignore attributes which pertain to the cookie - # mechanism as a whole, such as "$Version". - continue - # Process as attribute - if current_morsel is not None: - attr_lower_key = lower_key[1:] - if attr_lower_key in _COOKIE_KNOWN_ATTRS: - current_morsel[attr_lower_key] = value or "" - elif lower_key in _COOKIE_KNOWN_ATTRS: - if not morsel_seen: - # Invalid cookie string - attribute before cookie - break - if lower_key in _COOKIE_BOOL_ATTRS: - # Boolean attribute with any value should be True - if current_morsel is not None and current_morsel.isReservedKey(key): - current_morsel[lower_key] = True - elif value is None: - # Invalid cookie string - non-boolean attribute without value - break - elif current_morsel is not None: - # Regular attribute with value - current_morsel[lower_key] = _unquote(value) - elif value is not None: - # This is a cookie name=value pair - # Validate the name - if key in _COOKIE_KNOWN_ATTRS or not _COOKIE_NAME_RE.match(key): - internal_logger.warning( - "Can not load cookies: Illegal cookie name %r", key - ) - current_morsel = None - else: - # Create new morsel - current_morsel = Morsel() - # Preserve the original value as coded_value (with quotes if present) - # We use __setstate__ instead of the public set() API because it allows us to - # bypass validation and set already validated state. This is more stable than - # setting protected attributes directly and unlikely to change since it would - # break pickling. - current_morsel.__setstate__( # type: ignore[attr-defined] - {"key": key, "value": _unquote(value), "coded_value": value} - ) - parsed_cookies.append((key, current_morsel)) - morsel_seen = True - else: - # Invalid cookie string - no value for non-attribute - break - - return parsed_cookies diff --git a/venv/Lib/site-packages/aiohttp/_cparser.pxd b/venv/Lib/site-packages/aiohttp/_cparser.pxd deleted file mode 100644 index 1b3be6d..0000000 --- a/venv/Lib/site-packages/aiohttp/_cparser.pxd +++ /dev/null @@ -1,158 +0,0 @@ -from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t - - -cdef extern from "llhttp.h": - - struct llhttp__internal_s: - int32_t _index - void* _span_pos0 - void* _span_cb0 - int32_t error - const char* reason - const char* error_pos - void* data - void* _current - uint64_t content_length - uint8_t type - uint8_t method - uint8_t http_major - uint8_t http_minor - uint8_t header_state - uint8_t lenient_flags - uint8_t upgrade - uint8_t finish - uint16_t flags - uint16_t status_code - void* settings - - ctypedef llhttp__internal_s llhttp__internal_t - ctypedef llhttp__internal_t llhttp_t - - ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1 - ctypedef int (*llhttp_cb)(llhttp_t*) except -1 - - struct llhttp_settings_s: - llhttp_cb on_message_begin - llhttp_data_cb on_url - llhttp_data_cb on_status - llhttp_data_cb on_header_field - llhttp_data_cb on_header_value - llhttp_cb on_headers_complete - llhttp_data_cb on_body - llhttp_cb on_message_complete - llhttp_cb on_chunk_header - llhttp_cb on_chunk_complete - - llhttp_cb on_url_complete - llhttp_cb on_status_complete - llhttp_cb on_header_field_complete - llhttp_cb on_header_value_complete - - ctypedef llhttp_settings_s llhttp_settings_t - - enum llhttp_errno: - HPE_OK, - HPE_INTERNAL, - HPE_STRICT, - HPE_LF_EXPECTED, - HPE_UNEXPECTED_CONTENT_LENGTH, - HPE_CLOSED_CONNECTION, - HPE_INVALID_METHOD, - HPE_INVALID_URL, - HPE_INVALID_CONSTANT, - HPE_INVALID_VERSION, - HPE_INVALID_HEADER_TOKEN, - HPE_INVALID_CONTENT_LENGTH, - HPE_INVALID_CHUNK_SIZE, - HPE_INVALID_STATUS, - HPE_INVALID_EOF_STATE, - HPE_INVALID_TRANSFER_ENCODING, - HPE_CB_MESSAGE_BEGIN, - HPE_CB_HEADERS_COMPLETE, - HPE_CB_MESSAGE_COMPLETE, - HPE_CB_CHUNK_HEADER, - HPE_CB_CHUNK_COMPLETE, - HPE_PAUSED, - HPE_PAUSED_UPGRADE, - HPE_USER - - ctypedef llhttp_errno llhttp_errno_t - - enum llhttp_flags: - F_CHUNKED, - F_CONTENT_LENGTH - - enum llhttp_type: - HTTP_REQUEST, - HTTP_RESPONSE, - HTTP_BOTH - - enum llhttp_method: - HTTP_DELETE, - HTTP_GET, - HTTP_HEAD, - HTTP_POST, - HTTP_PUT, - HTTP_CONNECT, - HTTP_OPTIONS, - HTTP_TRACE, - HTTP_COPY, - HTTP_LOCK, - HTTP_MKCOL, - HTTP_MOVE, - HTTP_PROPFIND, - HTTP_PROPPATCH, - HTTP_SEARCH, - HTTP_UNLOCK, - HTTP_BIND, - HTTP_REBIND, - HTTP_UNBIND, - HTTP_ACL, - HTTP_REPORT, - HTTP_MKACTIVITY, - HTTP_CHECKOUT, - HTTP_MERGE, - HTTP_MSEARCH, - HTTP_NOTIFY, - HTTP_SUBSCRIBE, - HTTP_UNSUBSCRIBE, - HTTP_PATCH, - HTTP_PURGE, - HTTP_MKCALENDAR, - HTTP_LINK, - HTTP_UNLINK, - HTTP_SOURCE, - HTTP_PRI, - HTTP_DESCRIBE, - HTTP_ANNOUNCE, - HTTP_SETUP, - HTTP_PLAY, - HTTP_PAUSE, - HTTP_TEARDOWN, - HTTP_GET_PARAMETER, - HTTP_SET_PARAMETER, - HTTP_REDIRECT, - HTTP_RECORD, - HTTP_FLUSH - - ctypedef llhttp_method llhttp_method_t; - - void llhttp_settings_init(llhttp_settings_t* settings) - void llhttp_init(llhttp_t* parser, llhttp_type type, - const llhttp_settings_t* settings) - - llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len) - - int llhttp_should_keep_alive(const llhttp_t* parser) - - void llhttp_resume_after_upgrade(llhttp_t* parser) - - llhttp_errno_t llhttp_get_errno(const llhttp_t* parser) - const char* llhttp_get_error_reason(const llhttp_t* parser) - const char* llhttp_get_error_pos(const llhttp_t* parser) - - const char* llhttp_method_name(llhttp_method_t method) - - void llhttp_set_lenient_headers(llhttp_t* parser, int enabled) - void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled) - void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled) diff --git a/venv/Lib/site-packages/aiohttp/_find_header.pxd b/venv/Lib/site-packages/aiohttp/_find_header.pxd deleted file mode 100644 index 37a6c37..0000000 --- a/venv/Lib/site-packages/aiohttp/_find_header.pxd +++ /dev/null @@ -1,2 +0,0 @@ -cdef extern from "_find_header.h": - int find_header(char *, int) diff --git a/venv/Lib/site-packages/aiohttp/_headers.pxi b/venv/Lib/site-packages/aiohttp/_headers.pxi deleted file mode 100644 index 3744721..0000000 --- a/venv/Lib/site-packages/aiohttp/_headers.pxi +++ /dev/null @@ -1,83 +0,0 @@ -# The file is autogenerated from aiohttp/hdrs.py -# Run ./tools/gen.py to update it after the origin changing. - -from . import hdrs -cdef tuple headers = ( - hdrs.ACCEPT, - hdrs.ACCEPT_CHARSET, - hdrs.ACCEPT_ENCODING, - hdrs.ACCEPT_LANGUAGE, - hdrs.ACCEPT_RANGES, - hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, - hdrs.ACCESS_CONTROL_ALLOW_HEADERS, - hdrs.ACCESS_CONTROL_ALLOW_METHODS, - hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, - hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, - hdrs.ACCESS_CONTROL_MAX_AGE, - hdrs.ACCESS_CONTROL_REQUEST_HEADERS, - hdrs.ACCESS_CONTROL_REQUEST_METHOD, - hdrs.AGE, - hdrs.ALLOW, - hdrs.AUTHORIZATION, - hdrs.CACHE_CONTROL, - hdrs.CONNECTION, - hdrs.CONTENT_DISPOSITION, - hdrs.CONTENT_ENCODING, - hdrs.CONTENT_LANGUAGE, - hdrs.CONTENT_LENGTH, - hdrs.CONTENT_LOCATION, - hdrs.CONTENT_MD5, - hdrs.CONTENT_RANGE, - hdrs.CONTENT_TRANSFER_ENCODING, - hdrs.CONTENT_TYPE, - hdrs.COOKIE, - hdrs.DATE, - hdrs.DESTINATION, - hdrs.DIGEST, - hdrs.ETAG, - hdrs.EXPECT, - hdrs.EXPIRES, - hdrs.FORWARDED, - hdrs.FROM, - hdrs.HOST, - hdrs.IF_MATCH, - hdrs.IF_MODIFIED_SINCE, - hdrs.IF_NONE_MATCH, - hdrs.IF_RANGE, - hdrs.IF_UNMODIFIED_SINCE, - hdrs.KEEP_ALIVE, - hdrs.LAST_EVENT_ID, - hdrs.LAST_MODIFIED, - hdrs.LINK, - hdrs.LOCATION, - hdrs.MAX_FORWARDS, - hdrs.ORIGIN, - hdrs.PRAGMA, - hdrs.PROXY_AUTHENTICATE, - hdrs.PROXY_AUTHORIZATION, - hdrs.RANGE, - hdrs.REFERER, - hdrs.RETRY_AFTER, - hdrs.SEC_WEBSOCKET_ACCEPT, - hdrs.SEC_WEBSOCKET_EXTENSIONS, - hdrs.SEC_WEBSOCKET_KEY, - hdrs.SEC_WEBSOCKET_KEY1, - hdrs.SEC_WEBSOCKET_PROTOCOL, - hdrs.SEC_WEBSOCKET_VERSION, - hdrs.SERVER, - hdrs.SET_COOKIE, - hdrs.TE, - hdrs.TRAILER, - hdrs.TRANSFER_ENCODING, - hdrs.URI, - hdrs.UPGRADE, - hdrs.USER_AGENT, - hdrs.VARY, - hdrs.VIA, - hdrs.WWW_AUTHENTICATE, - hdrs.WANT_DIGEST, - hdrs.WARNING, - hdrs.X_FORWARDED_FOR, - hdrs.X_FORWARDED_HOST, - hdrs.X_FORWARDED_PROTO, -) diff --git a/venv/Lib/site-packages/aiohttp/_http_parser.cp312-win_amd64.pyd b/venv/Lib/site-packages/aiohttp/_http_parser.cp312-win_amd64.pyd deleted file mode 100644 index c151985..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_http_parser.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_http_parser.pyx b/venv/Lib/site-packages/aiohttp/_http_parser.pyx deleted file mode 100644 index 4a7101e..0000000 --- a/venv/Lib/site-packages/aiohttp/_http_parser.pyx +++ /dev/null @@ -1,835 +0,0 @@ -# Based on https://github.com/MagicStack/httptools -# - -from cpython cimport ( - Py_buffer, - PyBUF_SIMPLE, - PyBuffer_Release, - PyBytes_AsString, - PyBytes_AsStringAndSize, - PyObject_GetBuffer, -) -from cpython.mem cimport PyMem_Free, PyMem_Malloc -from libc.limits cimport ULLONG_MAX -from libc.string cimport memcpy - -from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy -from yarl import URL as _URL - -from aiohttp import hdrs -from aiohttp.helpers import DEBUG, set_exception - -from .http_exceptions import ( - BadHttpMessage, - BadHttpMethod, - BadStatusLine, - ContentLengthError, - InvalidHeader, - InvalidURLError, - LineTooLong, - PayloadEncodingError, - TransferEncodingError, -) -from .http_parser import DeflateBuffer as _DeflateBuffer -from .http_writer import ( - HttpVersion as _HttpVersion, - HttpVersion10 as _HttpVersion10, - HttpVersion11 as _HttpVersion11, -) -from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader - -cimport cython - -from aiohttp cimport _cparser as cparser - -include "_headers.pxi" - -from aiohttp cimport _find_header - -ALLOWED_UPGRADES = frozenset({"websocket"}) -DEF DEFAULT_FREELIST_SIZE = 250 - -cdef extern from "Python.h": - int PyByteArray_Resize(object, Py_ssize_t) except -1 - Py_ssize_t PyByteArray_Size(object) except -1 - char* PyByteArray_AsString(object) - -__all__ = ('HttpRequestParser', 'HttpResponseParser', - 'RawRequestMessage', 'RawResponseMessage') - -cdef object URL = _URL -cdef object URL_build = URL.build -cdef object CIMultiDict = _CIMultiDict -cdef object CIMultiDictProxy = _CIMultiDictProxy -cdef object HttpVersion = _HttpVersion -cdef object HttpVersion10 = _HttpVersion10 -cdef object HttpVersion11 = _HttpVersion11 -cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 -cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING -cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD -cdef object StreamReader = _StreamReader -cdef object DeflateBuffer = _DeflateBuffer -cdef bytes EMPTY_BYTES = b"" - -cdef inline object extend(object buf, const char* at, size_t length): - cdef Py_ssize_t s - cdef char* ptr - s = PyByteArray_Size(buf) - PyByteArray_Resize(buf, s + length) - ptr = PyByteArray_AsString(buf) - memcpy(ptr + s, at, length) - - -DEF METHODS_COUNT = 46; - -cdef list _http_method = [] - -for i in range(METHODS_COUNT): - _http_method.append( - cparser.llhttp_method_name( i).decode('ascii')) - - -cdef inline str http_method_str(int i): - if i < METHODS_COUNT: - return _http_method[i] - else: - return "" - -cdef inline object find_header(bytes raw_header): - cdef Py_ssize_t size - cdef char *buf - cdef int idx - PyBytes_AsStringAndSize(raw_header, &buf, &size) - idx = _find_header.find_header(buf, size) - if idx == -1: - return raw_header.decode('utf-8', 'surrogateescape') - return headers[idx] - - -@cython.freelist(DEFAULT_FREELIST_SIZE) -cdef class RawRequestMessage: - cdef readonly str method - cdef readonly str path - cdef readonly object version # HttpVersion - cdef readonly object headers # CIMultiDict - cdef readonly object raw_headers # tuple - cdef readonly object should_close - cdef readonly object compression - cdef readonly object upgrade - cdef readonly object chunked - cdef readonly object url # yarl.URL - - def __init__(self, method, path, version, headers, raw_headers, - should_close, compression, upgrade, chunked, url): - self.method = method - self.path = path - self.version = version - self.headers = headers - self.raw_headers = raw_headers - self.should_close = should_close - self.compression = compression - self.upgrade = upgrade - self.chunked = chunked - self.url = url - - def __repr__(self): - info = [] - info.append(("method", self.method)) - info.append(("path", self.path)) - info.append(("version", self.version)) - info.append(("headers", self.headers)) - info.append(("raw_headers", self.raw_headers)) - info.append(("should_close", self.should_close)) - info.append(("compression", self.compression)) - info.append(("upgrade", self.upgrade)) - info.append(("chunked", self.chunked)) - info.append(("url", self.url)) - sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - return '' - - def _replace(self, **dct): - cdef RawRequestMessage ret - ret = _new_request_message(self.method, - self.path, - self.version, - self.headers, - self.raw_headers, - self.should_close, - self.compression, - self.upgrade, - self.chunked, - self.url) - if "method" in dct: - ret.method = dct["method"] - if "path" in dct: - ret.path = dct["path"] - if "version" in dct: - ret.version = dct["version"] - if "headers" in dct: - ret.headers = dct["headers"] - if "raw_headers" in dct: - ret.raw_headers = dct["raw_headers"] - if "should_close" in dct: - ret.should_close = dct["should_close"] - if "compression" in dct: - ret.compression = dct["compression"] - if "upgrade" in dct: - ret.upgrade = dct["upgrade"] - if "chunked" in dct: - ret.chunked = dct["chunked"] - if "url" in dct: - ret.url = dct["url"] - return ret - -cdef _new_request_message(str method, - str path, - object version, - object headers, - object raw_headers, - bint should_close, - object compression, - bint upgrade, - bint chunked, - object url): - cdef RawRequestMessage ret - ret = RawRequestMessage.__new__(RawRequestMessage) - ret.method = method - ret.path = path - ret.version = version - ret.headers = headers - ret.raw_headers = raw_headers - ret.should_close = should_close - ret.compression = compression - ret.upgrade = upgrade - ret.chunked = chunked - ret.url = url - return ret - - -@cython.freelist(DEFAULT_FREELIST_SIZE) -cdef class RawResponseMessage: - cdef readonly object version # HttpVersion - cdef readonly int code - cdef readonly str reason - cdef readonly object headers # CIMultiDict - cdef readonly object raw_headers # tuple - cdef readonly object should_close - cdef readonly object compression - cdef readonly object upgrade - cdef readonly object chunked - - def __init__(self, version, code, reason, headers, raw_headers, - should_close, compression, upgrade, chunked): - self.version = version - self.code = code - self.reason = reason - self.headers = headers - self.raw_headers = raw_headers - self.should_close = should_close - self.compression = compression - self.upgrade = upgrade - self.chunked = chunked - - def __repr__(self): - info = [] - info.append(("version", self.version)) - info.append(("code", self.code)) - info.append(("reason", self.reason)) - info.append(("headers", self.headers)) - info.append(("raw_headers", self.raw_headers)) - info.append(("should_close", self.should_close)) - info.append(("compression", self.compression)) - info.append(("upgrade", self.upgrade)) - info.append(("chunked", self.chunked)) - sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - return '' - - -cdef _new_response_message(object version, - int code, - str reason, - object headers, - object raw_headers, - bint should_close, - object compression, - bint upgrade, - bint chunked): - cdef RawResponseMessage ret - ret = RawResponseMessage.__new__(RawResponseMessage) - ret.version = version - ret.code = code - ret.reason = reason - ret.headers = headers - ret.raw_headers = raw_headers - ret.should_close = should_close - ret.compression = compression - ret.upgrade = upgrade - ret.chunked = chunked - return ret - - -@cython.internal -cdef class HttpParser: - - cdef: - cparser.llhttp_t* _cparser - cparser.llhttp_settings_t* _csettings - - bytes _raw_name - object _name - bytes _raw_value - bint _has_value - - object _protocol - object _loop - object _timer - - size_t _max_line_size - size_t _max_field_size - size_t _max_headers - bint _response_with_body - bint _read_until_eof - - bint _started - object _url - bytearray _buf - str _path - str _reason - list _headers - list _raw_headers - bint _upgraded - list _messages - object _payload - bint _payload_error - object _payload_exception - object _last_error - bint _auto_decompress - int _limit - - str _content_encoding - - Py_buffer py_buf - - def __cinit__(self): - self._cparser = \ - PyMem_Malloc(sizeof(cparser.llhttp_t)) - if self._cparser is NULL: - raise MemoryError() - - self._csettings = \ - PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) - if self._csettings is NULL: - raise MemoryError() - - def __dealloc__(self): - PyMem_Free(self._cparser) - PyMem_Free(self._csettings) - - cdef _init( - self, cparser.llhttp_type mode, - object protocol, object loop, int limit, - object timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True, - ): - cparser.llhttp_settings_init(self._csettings) - cparser.llhttp_init(self._cparser, mode, self._csettings) - self._cparser.data = self - self._cparser.content_length = 0 - - self._protocol = protocol - self._loop = loop - self._timer = timer - - self._buf = bytearray() - self._payload = None - self._payload_error = 0 - self._payload_exception = payload_exception - self._messages = [] - - self._raw_name = EMPTY_BYTES - self._raw_value = EMPTY_BYTES - self._has_value = False - - self._max_line_size = max_line_size - self._max_headers = max_headers - self._max_field_size = max_field_size - self._response_with_body = response_with_body - self._read_until_eof = read_until_eof - self._upgraded = False - self._auto_decompress = auto_decompress - self._content_encoding = None - - self._csettings.on_url = cb_on_url - self._csettings.on_status = cb_on_status - self._csettings.on_header_field = cb_on_header_field - self._csettings.on_header_value = cb_on_header_value - self._csettings.on_headers_complete = cb_on_headers_complete - self._csettings.on_body = cb_on_body - self._csettings.on_message_begin = cb_on_message_begin - self._csettings.on_message_complete = cb_on_message_complete - self._csettings.on_chunk_header = cb_on_chunk_header - self._csettings.on_chunk_complete = cb_on_chunk_complete - - self._last_error = None - self._limit = limit - - cdef _process_header(self): - cdef str value - if self._raw_name is not EMPTY_BYTES: - name = find_header(self._raw_name) - value = self._raw_value.decode('utf-8', 'surrogateescape') - - self._headers.append((name, value)) - - if name is CONTENT_ENCODING: - self._content_encoding = value - - self._has_value = False - self._raw_headers.append((self._raw_name, self._raw_value)) - self._raw_name = EMPTY_BYTES - self._raw_value = EMPTY_BYTES - - cdef _on_header_field(self, char* at, size_t length): - if self._has_value: - self._process_header() - - if self._raw_name is EMPTY_BYTES: - self._raw_name = at[:length] - else: - self._raw_name += at[:length] - - cdef _on_header_value(self, char* at, size_t length): - if self._raw_value is EMPTY_BYTES: - self._raw_value = at[:length] - else: - self._raw_value += at[:length] - self._has_value = True - - cdef _on_headers_complete(self): - self._process_header() - - should_close = not cparser.llhttp_should_keep_alive(self._cparser) - upgrade = self._cparser.upgrade - chunked = self._cparser.flags & cparser.F_CHUNKED - - raw_headers = tuple(self._raw_headers) - headers = CIMultiDictProxy(CIMultiDict(self._headers)) - - if self._cparser.type == cparser.HTTP_REQUEST: - h_upg = headers.get("upgrade", "") - allowed = upgrade and h_upg.isascii() and h_upg.lower() in ALLOWED_UPGRADES - if allowed or self._cparser.method == cparser.HTTP_CONNECT: - self._upgraded = True - else: - if upgrade and self._cparser.status_code == 101: - self._upgraded = True - - # do not support old websocket spec - if SEC_WEBSOCKET_KEY1 in headers: - raise InvalidHeader(SEC_WEBSOCKET_KEY1) - - encoding = None - enc = self._content_encoding - if enc is not None: - self._content_encoding = None - if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: - encoding = enc - - if self._cparser.type == cparser.HTTP_REQUEST: - method = http_method_str(self._cparser.method) - msg = _new_request_message( - method, self._path, - self.http_version(), headers, raw_headers, - should_close, encoding, upgrade, chunked, self._url) - else: - msg = _new_response_message( - self.http_version(), self._cparser.status_code, self._reason, - headers, raw_headers, should_close, encoding, - upgrade, chunked) - - if ( - ULLONG_MAX > self._cparser.content_length > 0 or chunked or - self._cparser.method == cparser.HTTP_CONNECT or - (self._cparser.status_code >= 199 and - self._cparser.content_length == 0 and - self._read_until_eof) - ): - payload = StreamReader( - self._protocol, timer=self._timer, loop=self._loop, - limit=self._limit) - else: - payload = EMPTY_PAYLOAD - - self._payload = payload - if encoding is not None and self._auto_decompress: - self._payload = DeflateBuffer(payload, encoding) - - if not self._response_with_body: - payload = EMPTY_PAYLOAD - - self._messages.append((msg, payload)) - - cdef _on_message_complete(self): - self._payload.feed_eof() - self._payload = None - - cdef _on_chunk_header(self): - self._payload.begin_http_chunk_receiving() - - cdef _on_chunk_complete(self): - self._payload.end_http_chunk_receiving() - - cdef object _on_status_complete(self): - pass - - cdef inline http_version(self): - cdef cparser.llhttp_t* parser = self._cparser - - if parser.http_major == 1: - if parser.http_minor == 0: - return HttpVersion10 - elif parser.http_minor == 1: - return HttpVersion11 - - return HttpVersion(parser.http_major, parser.http_minor) - - ### Public API ### - - def feed_eof(self): - cdef bytes desc - - if self._payload is not None: - if self._cparser.flags & cparser.F_CHUNKED: - raise TransferEncodingError( - "Not enough data to satisfy transfer length header.") - elif self._cparser.flags & cparser.F_CONTENT_LENGTH: - raise ContentLengthError( - "Not enough data to satisfy content length header.") - elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: - desc = cparser.llhttp_get_error_reason(self._cparser) - raise PayloadEncodingError(desc.decode('latin-1')) - else: - self._payload.feed_eof() - elif self._started: - self._on_headers_complete() - if self._messages: - return self._messages[-1][0] - - def feed_data(self, data): - cdef: - size_t data_len - size_t nb - cdef cparser.llhttp_errno_t errno - - PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) - data_len = self.py_buf.len - - errno = cparser.llhttp_execute( - self._cparser, - self.py_buf.buf, - data_len) - - if errno is cparser.HPE_PAUSED_UPGRADE: - cparser.llhttp_resume_after_upgrade(self._cparser) - - nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf - - PyBuffer_Release(&self.py_buf) - - if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): - if self._payload_error == 0: - if self._last_error is not None: - ex = self._last_error - self._last_error = None - else: - after = cparser.llhttp_get_error_pos(self._cparser) - before = data[:after - self.py_buf.buf] - after_b = after.split(b"\r\n", 1)[0] - before = before.rsplit(b"\r\n", 1)[-1] - data = before + after_b - pointer = " " * (len(repr(before))-1) + "^" - ex = parser_error_from_errno(self._cparser, data, pointer) - self._payload = None - raise ex - - if self._messages: - messages = self._messages - self._messages = [] - else: - messages = () - - if self._upgraded: - return messages, True, data[nb:] - else: - return messages, False, b"" - - def set_upgraded(self, val): - self._upgraded = val - - -cdef class HttpRequestParser(HttpParser): - - def __init__( - self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True, - ): - self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, - max_line_size, max_headers, max_field_size, - payload_exception, response_with_body, read_until_eof, - auto_decompress) - - cdef object _on_status_complete(self): - cdef int idx1, idx2 - if not self._buf: - return - self._path = self._buf.decode('utf-8', 'surrogateescape') - try: - idx3 = len(self._path) - if self._cparser.method == cparser.HTTP_CONNECT: - # authority-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 - self._url = URL.build(authority=self._path, encoded=True) - elif idx3 > 1 and self._path[0] == '/': - # origin-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 - idx1 = self._path.find("?") - if idx1 == -1: - query = "" - idx2 = self._path.find("#") - if idx2 == -1: - path = self._path - fragment = "" - else: - path = self._path[0: idx2] - fragment = self._path[idx2+1:] - - else: - path = self._path[0:idx1] - idx1 += 1 - idx2 = self._path.find("#", idx1+1) - if idx2 == -1: - query = self._path[idx1:] - fragment = "" - else: - query = self._path[idx1: idx2] - fragment = self._path[idx2+1:] - - self._url = URL.build( - path=path, - query_string=query, - fragment=fragment, - encoded=True, - ) - else: - # absolute-form for proxy maybe, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 - self._url = URL(self._path, encoded=True) - finally: - PyByteArray_Resize(self._buf, 0) - - -cdef class HttpResponseParser(HttpParser): - - def __init__( - self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True - ): - self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, - max_line_size, max_headers, max_field_size, - payload_exception, response_with_body, read_until_eof, - auto_decompress) - # Use strict parsing on dev mode, so users are warned about broken servers. - if not DEBUG: - cparser.llhttp_set_lenient_headers(self._cparser, 1) - cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1) - cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1) - - cdef object _on_status_complete(self): - if self._buf: - self._reason = self._buf.decode('utf-8', 'surrogateescape') - PyByteArray_Resize(self._buf, 0) - else: - self._reason = self._reason or '' - -cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - - pyparser._started = True - pyparser._headers = [] - pyparser._raw_headers = [] - PyByteArray_Resize(pyparser._buf, 0) - pyparser._path = None - pyparser._reason = None - return 0 - - -cdef int cb_on_url(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - try: - if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) - extend(pyparser._buf, at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_status(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef str reason - try: - if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) - extend(pyparser._buf, at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_header_field(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef Py_ssize_t size - try: - pyparser._on_status_complete() - size = len(pyparser._raw_name) + length - if size > pyparser._max_field_size: - raise LineTooLong( - 'Header name is too long', pyparser._max_field_size, size) - pyparser._on_header_field(at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_header_value(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef Py_ssize_t size - try: - size = len(pyparser._raw_value) + length - if size > pyparser._max_field_size: - raise LineTooLong( - 'Header value is too long', pyparser._max_field_size, size) - pyparser._on_header_value(at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_status_complete() - pyparser._on_headers_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: - return 2 - else: - return 0 - - -cdef int cb_on_body(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef bytes body = at[:length] - try: - pyparser._payload.feed_data(body, length) - except BaseException as underlying_exc: - reraised_exc = underlying_exc - if pyparser._payload_exception is not None: - reraised_exc = pyparser._payload_exception(str(underlying_exc)) - - set_exception(pyparser._payload, reraised_exc, underlying_exc) - - pyparser._payload_error = 1 - return -1 - else: - return 0 - - -cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._started = False - pyparser._on_message_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_chunk_header() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_chunk_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): - cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) - cdef bytes desc = cparser.llhttp_get_error_reason(parser) - - err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) - - if errno in {cparser.HPE_CB_MESSAGE_BEGIN, - cparser.HPE_CB_HEADERS_COMPLETE, - cparser.HPE_CB_MESSAGE_COMPLETE, - cparser.HPE_CB_CHUNK_HEADER, - cparser.HPE_CB_CHUNK_COMPLETE, - cparser.HPE_INVALID_CONSTANT, - cparser.HPE_INVALID_HEADER_TOKEN, - cparser.HPE_INVALID_CONTENT_LENGTH, - cparser.HPE_INVALID_CHUNK_SIZE, - cparser.HPE_INVALID_EOF_STATE, - cparser.HPE_INVALID_TRANSFER_ENCODING}: - return BadHttpMessage(err_msg) - elif errno == cparser.HPE_INVALID_METHOD: - return BadHttpMethod(error=err_msg) - elif errno in {cparser.HPE_INVALID_STATUS, - cparser.HPE_INVALID_VERSION}: - return BadStatusLine(error=err_msg) - elif errno == cparser.HPE_INVALID_URL: - return InvalidURLError(err_msg) - - return BadHttpMessage(err_msg) diff --git a/venv/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd b/venv/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd deleted file mode 100644 index 2ec7690..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_http_writer.pyx b/venv/Lib/site-packages/aiohttp/_http_writer.pyx deleted file mode 100644 index 7989c18..0000000 --- a/venv/Lib/site-packages/aiohttp/_http_writer.pyx +++ /dev/null @@ -1,162 +0,0 @@ -from cpython.bytes cimport PyBytes_FromStringAndSize -from cpython.exc cimport PyErr_NoMemory -from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc -from cpython.object cimport PyObject_Str -from libc.stdint cimport uint8_t, uint64_t -from libc.string cimport memcpy - -from multidict import istr - -DEF BUF_SIZE = 16 * 1024 # 16KiB - -cdef object _istr = istr - - -# ----------------- writer --------------------------- - -cdef struct Writer: - char *buf - Py_ssize_t size - Py_ssize_t pos - bint heap_allocated - -cdef inline void _init_writer(Writer* writer, char *buf): - writer.buf = buf - writer.size = BUF_SIZE - writer.pos = 0 - writer.heap_allocated = 0 - - -cdef inline void _release_writer(Writer* writer): - if writer.heap_allocated: - PyMem_Free(writer.buf) - - -cdef inline int _write_byte(Writer* writer, uint8_t ch): - cdef char * buf - cdef Py_ssize_t size - - if writer.pos == writer.size: - # reallocate - size = writer.size + BUF_SIZE - if not writer.heap_allocated: - buf = PyMem_Malloc(size) - if buf == NULL: - PyErr_NoMemory() - return -1 - memcpy(buf, writer.buf, writer.size) - else: - buf = PyMem_Realloc(writer.buf, size) - if buf == NULL: - PyErr_NoMemory() - return -1 - writer.buf = buf - writer.size = size - writer.heap_allocated = 1 - writer.buf[writer.pos] = ch - writer.pos += 1 - return 0 - - -cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): - cdef uint64_t utf = symbol - - if utf < 0x80: - return _write_byte(writer, utf) - elif utf < 0x800: - if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - elif 0xD800 <= utf <= 0xDFFF: - # surogate pair, ignored - return 0 - elif utf < 0x10000: - if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - return -1 - if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - elif utf > 0x10FFFF: - # symbol is too large - return 0 - else: - if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: - return -1 - if _write_byte(writer, - (0x80 | ((utf >> 12) & 0x3f))) < 0: - return -1 - if _write_byte(writer, - (0x80 | ((utf >> 6) & 0x3f))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - - -cdef inline int _write_str(Writer* writer, str s): - cdef Py_UCS4 ch - for ch in s: - if _write_utf8(writer, ch) < 0: - return -1 - - -cdef inline int _write_str_raise_on_nlcr(Writer* writer, object s): - cdef Py_UCS4 ch - cdef str out_str - if type(s) is str: - out_str = s - elif type(s) is _istr: - out_str = PyObject_Str(s) - elif not isinstance(s, str): - raise TypeError("Cannot serialize non-str key {!r}".format(s)) - else: - out_str = str(s) - - for ch in out_str: - if ch == 0x0D or ch == 0x0A: - raise ValueError( - "Newline or carriage return detected in headers. " - "Potential header injection attack." - ) - if _write_utf8(writer, ch) < 0: - return -1 - - -# --------------- _serialize_headers ---------------------- - -def _serialize_headers(str status_line, headers): - cdef Writer writer - cdef object key - cdef object val - cdef char buf[BUF_SIZE] - - _init_writer(&writer, buf) - - try: - if _write_str(&writer, status_line) < 0: - raise - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - for key, val in headers.items(): - if _write_str_raise_on_nlcr(&writer, key) < 0: - raise - if _write_byte(&writer, b':') < 0: - raise - if _write_byte(&writer, b' ') < 0: - raise - if _write_str_raise_on_nlcr(&writer, val) < 0: - raise - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - return PyBytes_FromStringAndSize(writer.buf, writer.pos) - finally: - _release_writer(&writer) diff --git a/venv/Lib/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash b/venv/Lib/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash deleted file mode 100644 index 59ec123..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -e354dd499be171b6125bf56bc3b6c5e2bff2a28af69e3b5d699ddb9af2bafa3c *D:/a/aiohttp/aiohttp/aiohttp/_websocket/mask.pxd diff --git a/venv/Lib/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash b/venv/Lib/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash deleted file mode 100644 index 025189e..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -468edd38ebf8dc7000a8d333df1c82035d69a5c9febc0448be3c9c4ad4c4630c *D:/a/aiohttp/aiohttp/aiohttp/_websocket/mask.pyx diff --git a/venv/Lib/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash b/venv/Lib/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash deleted file mode 100644 index 0f260dc..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -1cd3a5e20456b4d04d11835b2bd3c639f14443052a2467b105b0ca07fdb4b25d *D:/a/aiohttp/aiohttp/aiohttp/_websocket/reader_c.pxd diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__init__.py b/venv/Lib/site-packages/aiohttp/_websocket/__init__.py deleted file mode 100644 index 836257c..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""WebSocket protocol versions 13 and 8.""" diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index fdf5548..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/helpers.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/helpers.cpython-312.pyc deleted file mode 100644 index 9e47f18..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/helpers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/models.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/models.cpython-312.pyc deleted file mode 100644 index c152be1..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader.cpython-312.pyc deleted file mode 100644 index 69ca22c..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader_c.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader_c.cpython-312.pyc deleted file mode 100644 index 6d8f678..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader_c.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader_py.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader_py.cpython-312.pyc deleted file mode 100644 index 602a52f..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/reader_py.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/writer.cpython-312.pyc b/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/writer.cpython-312.pyc deleted file mode 100644 index 06c576c..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/__pycache__/writer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/helpers.py b/venv/Lib/site-packages/aiohttp/_websocket/helpers.py deleted file mode 100644 index 0bb58df..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/helpers.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Helpers for WebSocket protocol versions 13 and 8.""" - -import functools -import re -from struct import Struct -from typing import TYPE_CHECKING, Final, List, Optional, Pattern, Tuple - -from ..helpers import NO_EXTENSIONS -from .models import WSHandshakeError - -UNPACK_LEN3 = Struct("!Q").unpack_from -UNPACK_CLOSE_CODE = Struct("!H").unpack -PACK_LEN1 = Struct("!BB").pack -PACK_LEN2 = Struct("!BBH").pack -PACK_LEN3 = Struct("!BBQ").pack -PACK_CLOSE_CODE = Struct("!H").pack -PACK_RANDBITS = Struct("!L").pack -MSG_SIZE: Final[int] = 2**14 -MASK_LEN: Final[int] = 4 - -WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - - -# Used by _websocket_mask_python -@functools.lru_cache -def _xor_table() -> List[bytes]: - return [bytes(a ^ b for a in range(256)) for b in range(256)] - - -def _websocket_mask_python(mask: bytes, data: bytearray) -> None: - """Websocket masking function. - - `mask` is a `bytes` object of length 4; `data` is a `bytearray` - object of any length. The contents of `data` are masked with `mask`, - as specified in section 5.3 of RFC 6455. - - Note that this function mutates the `data` argument. - - This pure-python implementation may be replaced by an optimized - version when available. - - """ - assert isinstance(data, bytearray), data - assert len(mask) == 4, mask - - if data: - _XOR_TABLE = _xor_table() - a, b, c, d = (_XOR_TABLE[n] for n in mask) - data[::4] = data[::4].translate(a) - data[1::4] = data[1::4].translate(b) - data[2::4] = data[2::4].translate(c) - data[3::4] = data[3::4].translate(d) - - -if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover - websocket_mask = _websocket_mask_python -else: - try: - from .mask import _websocket_mask_cython # type: ignore[import-not-found] - - websocket_mask = _websocket_mask_cython - except ImportError: # pragma: no cover - websocket_mask = _websocket_mask_python - - -_WS_EXT_RE: Final[Pattern[str]] = re.compile( - r"^(?:;\s*(?:" - r"(server_no_context_takeover)|" - r"(client_no_context_takeover)|" - r"(server_max_window_bits(?:=(\d+))?)|" - r"(client_max_window_bits(?:=(\d+))?)))*$" -) - -_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") - - -def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: - if not extstr: - return 0, False - - compress = 0 - notakeover = False - for ext in _WS_EXT_RE_SPLIT.finditer(extstr): - defext = ext.group(1) - # Return compress = 15 when get `permessage-deflate` - if not defext: - compress = 15 - break - match = _WS_EXT_RE.match(defext) - if match: - compress = 15 - if isserver: - # Server never fail to detect compress handshake. - # Server does not need to send max wbit to client - if match.group(4): - compress = int(match.group(4)) - # Group3 must match if group4 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # CONTINUE to next extension - if compress > 15 or compress < 9: - compress = 0 - continue - if match.group(1): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - else: - if match.group(6): - compress = int(match.group(6)) - # Group5 must match if group6 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # FAIL the parse progress - if compress > 15 or compress < 9: - raise WSHandshakeError("Invalid window size") - if match.group(2): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - # Return Fail if client side and not match - elif not isserver: - raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) - - return compress, notakeover - - -def ws_ext_gen( - compress: int = 15, isserver: bool = False, server_notakeover: bool = False -) -> str: - # client_notakeover=False not used for server - # compress wbit 8 does not support in zlib - if compress < 9 or compress > 15: - raise ValueError( - "Compress wbits must between 9 and 15, zlib does not support wbits=8" - ) - enabledext = ["permessage-deflate"] - if not isserver: - enabledext.append("client_max_window_bits") - - if compress < 15: - enabledext.append("server_max_window_bits=" + str(compress)) - if server_notakeover: - enabledext.append("server_no_context_takeover") - # if client_notakeover: - # enabledext.append('client_no_context_takeover') - return "; ".join(enabledext) diff --git a/venv/Lib/site-packages/aiohttp/_websocket/mask.cp312-win_amd64.pyd b/venv/Lib/site-packages/aiohttp/_websocket/mask.cp312-win_amd64.pyd deleted file mode 100644 index 060d974..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/mask.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/mask.pxd b/venv/Lib/site-packages/aiohttp/_websocket/mask.pxd deleted file mode 100644 index 90983de..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/mask.pxd +++ /dev/null @@ -1,3 +0,0 @@ -"""Cython declarations for websocket masking.""" - -cpdef void _websocket_mask_cython(bytes mask, bytearray data) diff --git a/venv/Lib/site-packages/aiohttp/_websocket/mask.pyx b/venv/Lib/site-packages/aiohttp/_websocket/mask.pyx deleted file mode 100644 index 2d956c8..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/mask.pyx +++ /dev/null @@ -1,48 +0,0 @@ -from cpython cimport PyBytes_AsString - - -#from cpython cimport PyByteArray_AsString # cython still not exports that -cdef extern from "Python.h": - char* PyByteArray_AsString(bytearray ba) except NULL - -from libc.stdint cimport uint32_t, uint64_t, uintmax_t - - -cpdef void _websocket_mask_cython(bytes mask, bytearray data): - """Note, this function mutates its `data` argument - """ - cdef: - Py_ssize_t data_len, i - # bit operations on signed integers are implementation-specific - unsigned char * in_buf - const unsigned char * mask_buf - uint32_t uint32_msk - uint64_t uint64_msk - - assert len(mask) == 4 - - data_len = len(data) - in_buf = PyByteArray_AsString(data) - mask_buf = PyBytes_AsString(mask) - uint32_msk = (mask_buf)[0] - - # TODO: align in_data ptr to achieve even faster speeds - # does it need in python ?! malloc() always aligns to sizeof(long) bytes - - if sizeof(size_t) >= 8: - uint64_msk = uint32_msk - uint64_msk = (uint64_msk << 32) | uint32_msk - - while data_len >= 8: - (in_buf)[0] ^= uint64_msk - in_buf += 8 - data_len -= 8 - - - while data_len >= 4: - (in_buf)[0] ^= uint32_msk - in_buf += 4 - data_len -= 4 - - for i in range(0, data_len): - in_buf[i] ^= mask_buf[i] diff --git a/venv/Lib/site-packages/aiohttp/_websocket/models.py b/venv/Lib/site-packages/aiohttp/_websocket/models.py deleted file mode 100644 index 7e89b96..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/models.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Models for WebSocket protocol versions 13 and 8.""" - -import json -from enum import IntEnum -from typing import Any, Callable, Final, NamedTuple, Optional, cast - -WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) - - -class WSCloseCode(IntEnum): - OK = 1000 - GOING_AWAY = 1001 - PROTOCOL_ERROR = 1002 - UNSUPPORTED_DATA = 1003 - ABNORMAL_CLOSURE = 1006 - INVALID_TEXT = 1007 - POLICY_VIOLATION = 1008 - MESSAGE_TOO_BIG = 1009 - MANDATORY_EXTENSION = 1010 - INTERNAL_ERROR = 1011 - SERVICE_RESTART = 1012 - TRY_AGAIN_LATER = 1013 - BAD_GATEWAY = 1014 - - -class WSMsgType(IntEnum): - # websocket spec types - CONTINUATION = 0x0 - TEXT = 0x1 - BINARY = 0x2 - PING = 0x9 - PONG = 0xA - CLOSE = 0x8 - - # aiohttp specific types - CLOSING = 0x100 - CLOSED = 0x101 - ERROR = 0x102 - - text = TEXT - binary = BINARY - ping = PING - pong = PONG - close = CLOSE - closing = CLOSING - closed = CLOSED - error = ERROR - - -class WSMessage(NamedTuple): - type: WSMsgType - # To type correctly, this would need some kind of tagged union for each type. - data: Any - extra: Optional[str] - - def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: - """Return parsed JSON data. - - .. versionadded:: 0.22 - """ - return loads(self.data) - - -# Constructing the tuple directly to avoid the overhead of -# the lambda and arg processing since NamedTuples are constructed -# with a run time built lambda -# https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441 -WS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None)) -WS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None)) - - -class WebSocketError(Exception): - """WebSocket protocol parser error.""" - - def __init__(self, code: int, message: str) -> None: - self.code = code - super().__init__(code, message) - - def __str__(self) -> str: - return cast(str, self.args[1]) - - -class WSHandshakeError(Exception): - """WebSocket protocol handshake error.""" diff --git a/venv/Lib/site-packages/aiohttp/_websocket/reader.py b/venv/Lib/site-packages/aiohttp/_websocket/reader.py deleted file mode 100644 index 23f3226..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/reader.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Reader for WebSocket protocol versions 13 and 8.""" - -from typing import TYPE_CHECKING - -from ..helpers import NO_EXTENSIONS - -if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover - from .reader_py import ( - WebSocketDataQueue as WebSocketDataQueuePython, - WebSocketReader as WebSocketReaderPython, - ) - - WebSocketReader = WebSocketReaderPython - WebSocketDataQueue = WebSocketDataQueuePython -else: - try: - from .reader_c import ( # type: ignore[import-not-found] - WebSocketDataQueue as WebSocketDataQueueCython, - WebSocketReader as WebSocketReaderCython, - ) - - WebSocketReader = WebSocketReaderCython - WebSocketDataQueue = WebSocketDataQueueCython - except ImportError: # pragma: no cover - from .reader_py import ( - WebSocketDataQueue as WebSocketDataQueuePython, - WebSocketReader as WebSocketReaderPython, - ) - - WebSocketReader = WebSocketReaderPython - WebSocketDataQueue = WebSocketDataQueuePython diff --git a/venv/Lib/site-packages/aiohttp/_websocket/reader_c.cp312-win_amd64.pyd b/venv/Lib/site-packages/aiohttp/_websocket/reader_c.cp312-win_amd64.pyd deleted file mode 100644 index 85fa3fa..0000000 Binary files a/venv/Lib/site-packages/aiohttp/_websocket/reader_c.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/aiohttp/_websocket/reader_c.pxd b/venv/Lib/site-packages/aiohttp/_websocket/reader_c.pxd deleted file mode 100644 index a7620d8..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/reader_c.pxd +++ /dev/null @@ -1,110 +0,0 @@ -import cython - -from .mask cimport _websocket_mask_cython as websocket_mask - - -cdef unsigned int READ_HEADER -cdef unsigned int READ_PAYLOAD_LENGTH -cdef unsigned int READ_PAYLOAD_MASK -cdef unsigned int READ_PAYLOAD - -cdef int OP_CODE_NOT_SET -cdef int OP_CODE_CONTINUATION -cdef int OP_CODE_TEXT -cdef int OP_CODE_BINARY -cdef int OP_CODE_CLOSE -cdef int OP_CODE_PING -cdef int OP_CODE_PONG - -cdef int COMPRESSED_NOT_SET -cdef int COMPRESSED_FALSE -cdef int COMPRESSED_TRUE - -cdef object UNPACK_LEN3 -cdef object UNPACK_CLOSE_CODE -cdef object TUPLE_NEW - -cdef object WSMsgType -cdef object WSMessage - -cdef object WS_MSG_TYPE_TEXT -cdef object WS_MSG_TYPE_BINARY - -cdef set ALLOWED_CLOSE_CODES -cdef set MESSAGE_TYPES_WITH_CONTENT - -cdef tuple EMPTY_FRAME -cdef tuple EMPTY_FRAME_ERROR - -cdef class WebSocketDataQueue: - - cdef unsigned int _size - cdef public object _protocol - cdef unsigned int _limit - cdef object _loop - cdef bint _eof - cdef object _waiter - cdef object _exception - cdef public object _buffer - cdef object _get_buffer - cdef object _put_buffer - - cdef void _release_waiter(self) - - cpdef void feed_data(self, object data, unsigned int size) - - @cython.locals(size="unsigned int") - cdef _read_from_buffer(self) - -cdef class WebSocketReader: - - cdef WebSocketDataQueue queue - cdef unsigned int _max_msg_size - - cdef Exception _exc - cdef bytearray _partial - cdef unsigned int _state - - cdef int _opcode - cdef bint _frame_fin - cdef int _frame_opcode - cdef list _payload_fragments - cdef Py_ssize_t _frame_payload_len - - cdef bytes _tail - cdef bint _has_mask - cdef bytes _frame_mask - cdef Py_ssize_t _payload_bytes_to_read - cdef unsigned int _payload_len_flag - cdef int _compressed - cdef object _decompressobj - cdef bint _compress - - cpdef tuple feed_data(self, object data) - - @cython.locals( - is_continuation=bint, - fin=bint, - has_partial=bint, - payload_merged=bytes, - ) - cpdef void _handle_frame(self, bint fin, int opcode, object payload, int compressed) except * - - @cython.locals( - start_pos=Py_ssize_t, - data_len=Py_ssize_t, - length=Py_ssize_t, - chunk_size=Py_ssize_t, - chunk_len=Py_ssize_t, - data_len=Py_ssize_t, - data_cstr="const unsigned char *", - first_byte="unsigned char", - second_byte="unsigned char", - f_start_pos=Py_ssize_t, - f_end_pos=Py_ssize_t, - has_mask=bint, - fin=bint, - had_fragments=Py_ssize_t, - payload_bytearray=bytearray, - ) - cpdef void _feed_data(self, bytes data) except * diff --git a/venv/Lib/site-packages/aiohttp/_websocket/reader_c.py b/venv/Lib/site-packages/aiohttp/_websocket/reader_c.py deleted file mode 100644 index 5166d7e..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/reader_c.py +++ /dev/null @@ -1,478 +0,0 @@ -"""Reader for WebSocket protocol versions 13 and 8.""" - -import asyncio -import builtins -from collections import deque -from typing import Deque, Final, Optional, Set, Tuple, Union - -from ..base_protocol import BaseProtocol -from ..compression_utils import ZLibDecompressor -from ..helpers import _EXC_SENTINEL, set_exception -from ..streams import EofStream -from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask -from .models import ( - WS_DEFLATE_TRAILING, - WebSocketError, - WSCloseCode, - WSMessage, - WSMsgType, -) - -ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} - -# States for the reader, used to parse the WebSocket frame -# integer values are used so they can be cythonized -READ_HEADER = 1 -READ_PAYLOAD_LENGTH = 2 -READ_PAYLOAD_MASK = 3 -READ_PAYLOAD = 4 - -WS_MSG_TYPE_BINARY = WSMsgType.BINARY -WS_MSG_TYPE_TEXT = WSMsgType.TEXT - -# WSMsgType values unpacked so they can by cythonized to ints -OP_CODE_NOT_SET = -1 -OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value -OP_CODE_TEXT = WSMsgType.TEXT.value -OP_CODE_BINARY = WSMsgType.BINARY.value -OP_CODE_CLOSE = WSMsgType.CLOSE.value -OP_CODE_PING = WSMsgType.PING.value -OP_CODE_PONG = WSMsgType.PONG.value - -EMPTY_FRAME_ERROR = (True, b"") -EMPTY_FRAME = (False, b"") - -COMPRESSED_NOT_SET = -1 -COMPRESSED_FALSE = 0 -COMPRESSED_TRUE = 1 - -TUPLE_NEW = tuple.__new__ - -cython_int = int # Typed to int in Python, but cython with use a signed int in the pxd - - -class WebSocketDataQueue: - """WebSocketDataQueue resumes and pauses an underlying stream. - - It is a destination for WebSocket data. - """ - - def __init__( - self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop - ) -> None: - self._size = 0 - self._protocol = protocol - self._limit = limit * 2 - self._loop = loop - self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._exception: Union[BaseException, None] = None - self._buffer: Deque[Tuple[WSMessage, int]] = deque() - self._get_buffer = self._buffer.popleft - self._put_buffer = self._buffer.append - - def is_eof(self) -> bool: - return self._eof - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception( - self, - exc: BaseException, - exc_cause: builtins.BaseException = _EXC_SENTINEL, - ) -> None: - self._eof = True - self._exception = exc - if (waiter := self._waiter) is not None: - self._waiter = None - set_exception(waiter, exc, exc_cause) - - def _release_waiter(self) -> None: - if (waiter := self._waiter) is None: - return - self._waiter = None - if not waiter.done(): - waiter.set_result(None) - - def feed_eof(self) -> None: - self._eof = True - self._release_waiter() - self._exception = None # Break cyclic references - - def feed_data(self, data: "WSMessage", size: "cython_int") -> None: - self._size += size - self._put_buffer((data, size)) - self._release_waiter() - if self._size > self._limit and not self._protocol._reading_paused: - self._protocol.pause_reading() - - async def read(self) -> WSMessage: - if not self._buffer and not self._eof: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - return self._read_from_buffer() - - def _read_from_buffer(self) -> WSMessage: - if self._buffer: - data, size = self._get_buffer() - self._size -= size - if self._size < self._limit and self._protocol._reading_paused: - self._protocol.resume_reading() - return data - if self._exception is not None: - raise self._exception - raise EofStream - - -class WebSocketReader: - def __init__( - self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True - ) -> None: - self.queue = queue - self._max_msg_size = max_msg_size - - self._exc: Optional[Exception] = None - self._partial = bytearray() - self._state = READ_HEADER - - self._opcode: int = OP_CODE_NOT_SET - self._frame_fin = False - self._frame_opcode: int = OP_CODE_NOT_SET - self._payload_fragments: list[bytes] = [] - self._frame_payload_len = 0 - - self._tail: bytes = b"" - self._has_mask = False - self._frame_mask: Optional[bytes] = None - self._payload_bytes_to_read = 0 - self._payload_len_flag = 0 - self._compressed: int = COMPRESSED_NOT_SET - self._decompressobj: Optional[ZLibDecompressor] = None - self._compress = compress - - def feed_eof(self) -> None: - self.queue.feed_eof() - - # data can be bytearray on Windows because proactor event loop uses bytearray - # and asyncio types this to Union[bytes, bytearray, memoryview] so we need - # coerce data to bytes if it is not - def feed_data( - self, data: Union[bytes, bytearray, memoryview] - ) -> Tuple[bool, bytes]: - if type(data) is not bytes: - data = bytes(data) - - if self._exc is not None: - return True, data - - try: - self._feed_data(data) - except Exception as exc: - self._exc = exc - set_exception(self.queue, exc) - return EMPTY_FRAME_ERROR - - return EMPTY_FRAME - - def _handle_frame( - self, - fin: bool, - opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int - payload: Union[bytes, bytearray], - compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int - ) -> None: - msg: WSMessage - if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}: - # Validate continuation frames before processing - if opcode == OP_CODE_CONTINUATION and self._opcode == OP_CODE_NOT_SET: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) - - # load text/binary - if not fin: - # got partial frame payload - if opcode != OP_CODE_CONTINUATION: - self._opcode = opcode - self._partial += payload - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(self._partial)} " - f"exceeds limit {self._max_msg_size}", - ) - return - - has_partial = bool(self._partial) - if opcode == OP_CODE_CONTINUATION: - opcode = self._opcode - self._opcode = OP_CODE_NOT_SET - # previous frame was non finished - # we should get continuation opcode - elif has_partial: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - f"to be zero, got {opcode!r}", - ) - - assembled_payload: Union[bytes, bytearray] - if has_partial: - assembled_payload = self._partial + payload - self._partial.clear() - else: - assembled_payload = payload - - if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(assembled_payload)} " - f"exceeds limit {self._max_msg_size}", - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - if not self._decompressobj: - self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) - # XXX: It's possible that the zlib backend (isal is known to - # do this, maybe others too?) will return max_length bytes, - # but internally buffer more data such that the payload is - # >max_length, so we return one extra byte and if we're able - # to do that, then the message is too big. - payload_merged = self._decompressobj.decompress_sync( - assembled_payload + WS_DEFLATE_TRAILING, - ( - self._max_msg_size + 1 - if self._max_msg_size - else self._max_msg_size - ), - ) - if self._max_msg_size and len(payload_merged) > self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Decompressed message exceeds size limit {self._max_msg_size}", - ) - elif type(assembled_payload) is bytes: - payload_merged = assembled_payload - else: - payload_merged = bytes(assembled_payload) - - if opcode == OP_CODE_TEXT: - try: - text = payload_merged.decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - - # XXX: The Text and Binary messages here can be a performance - # bottleneck, so we use tuple.__new__ to improve performance. - # This is not type safe, but many tests should fail in - # test_client_ws_functional.py if this is wrong. - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), - len(payload_merged), - ) - else: - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), - len(payload_merged), - ) - elif opcode == OP_CODE_CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message)) - elif payload: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", - ) - else: - msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) - - self.queue.feed_data(msg, 0) - elif opcode == OP_CODE_PING: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) - self.queue.feed_data(msg, len(payload)) - elif opcode == OP_CODE_PONG: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) - self.queue.feed_data(msg, len(payload)) - else: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" - ) - - def _feed_data(self, data: bytes) -> None: - """Return the next frame from the socket.""" - if self._tail: - data, self._tail = self._tail + data, b"" - - start_pos: int = 0 - data_len = len(data) - data_cstr = data - - while True: - # read header - if self._state == READ_HEADER: - if data_len - start_pos < 2: - break - first_byte = data_cstr[start_pos] - second_byte = data_cstr[start_pos + 1] - start_pos += 2 - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) - - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F - - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be larger than 125 bytes", - ) - - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed == COMPRESSED_NOT_SET: - self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_len_flag = length - self._state = READ_PAYLOAD_LENGTH - - # read payload length - if self._state == READ_PAYLOAD_LENGTH: - len_flag = self._payload_len_flag - if len_flag == 126: - if data_len - start_pos < 2: - break - first_byte = data_cstr[start_pos] - second_byte = data_cstr[start_pos + 1] - start_pos += 2 - self._payload_bytes_to_read = first_byte << 8 | second_byte - elif len_flag > 126: - if data_len - start_pos < 8: - break - self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0] - start_pos += 8 - else: - self._payload_bytes_to_read = len_flag - - self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD - - # read payload mask - if self._state == READ_PAYLOAD_MASK: - if data_len - start_pos < 4: - break - self._frame_mask = data_cstr[start_pos : start_pos + 4] - start_pos += 4 - self._state = READ_PAYLOAD - - if self._state == READ_PAYLOAD: - chunk_len = data_len - start_pos - if self._payload_bytes_to_read >= chunk_len: - f_end_pos = data_len - self._payload_bytes_to_read -= chunk_len - else: - f_end_pos = start_pos + self._payload_bytes_to_read - self._payload_bytes_to_read = 0 - - had_fragments = self._frame_payload_len - self._frame_payload_len += f_end_pos - start_pos - f_start_pos = start_pos - start_pos = f_end_pos - - if self._payload_bytes_to_read != 0: - # If we don't have a complete frame, we need to save the - # data for the next call to feed_data. - self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) - break - - payload: Union[bytes, bytearray] - if had_fragments: - # We have to join the payload fragments get the payload - self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) - if self._has_mask: - assert self._frame_mask is not None - payload_bytearray = bytearray(b"".join(self._payload_fragments)) - websocket_mask(self._frame_mask, payload_bytearray) - payload = payload_bytearray - else: - payload = b"".join(self._payload_fragments) - self._payload_fragments.clear() - elif self._has_mask: - assert self._frame_mask is not None - payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment] - if type(payload_bytearray) is not bytearray: # pragma: no branch - # Cython will do the conversion for us - # but we need to do it for Python and we - # will always get here in Python - payload_bytearray = bytearray(payload_bytearray) - websocket_mask(self._frame_mask, payload_bytearray) - payload = payload_bytearray - else: - payload = data_cstr[f_start_pos:f_end_pos] - - self._handle_frame( - self._frame_fin, self._frame_opcode, payload, self._compressed - ) - self._frame_payload_len = 0 - self._state = READ_HEADER - - # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. - self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b"" diff --git a/venv/Lib/site-packages/aiohttp/_websocket/reader_py.py b/venv/Lib/site-packages/aiohttp/_websocket/reader_py.py deleted file mode 100644 index 5166d7e..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/reader_py.py +++ /dev/null @@ -1,478 +0,0 @@ -"""Reader for WebSocket protocol versions 13 and 8.""" - -import asyncio -import builtins -from collections import deque -from typing import Deque, Final, Optional, Set, Tuple, Union - -from ..base_protocol import BaseProtocol -from ..compression_utils import ZLibDecompressor -from ..helpers import _EXC_SENTINEL, set_exception -from ..streams import EofStream -from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask -from .models import ( - WS_DEFLATE_TRAILING, - WebSocketError, - WSCloseCode, - WSMessage, - WSMsgType, -) - -ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} - -# States for the reader, used to parse the WebSocket frame -# integer values are used so they can be cythonized -READ_HEADER = 1 -READ_PAYLOAD_LENGTH = 2 -READ_PAYLOAD_MASK = 3 -READ_PAYLOAD = 4 - -WS_MSG_TYPE_BINARY = WSMsgType.BINARY -WS_MSG_TYPE_TEXT = WSMsgType.TEXT - -# WSMsgType values unpacked so they can by cythonized to ints -OP_CODE_NOT_SET = -1 -OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value -OP_CODE_TEXT = WSMsgType.TEXT.value -OP_CODE_BINARY = WSMsgType.BINARY.value -OP_CODE_CLOSE = WSMsgType.CLOSE.value -OP_CODE_PING = WSMsgType.PING.value -OP_CODE_PONG = WSMsgType.PONG.value - -EMPTY_FRAME_ERROR = (True, b"") -EMPTY_FRAME = (False, b"") - -COMPRESSED_NOT_SET = -1 -COMPRESSED_FALSE = 0 -COMPRESSED_TRUE = 1 - -TUPLE_NEW = tuple.__new__ - -cython_int = int # Typed to int in Python, but cython with use a signed int in the pxd - - -class WebSocketDataQueue: - """WebSocketDataQueue resumes and pauses an underlying stream. - - It is a destination for WebSocket data. - """ - - def __init__( - self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop - ) -> None: - self._size = 0 - self._protocol = protocol - self._limit = limit * 2 - self._loop = loop - self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._exception: Union[BaseException, None] = None - self._buffer: Deque[Tuple[WSMessage, int]] = deque() - self._get_buffer = self._buffer.popleft - self._put_buffer = self._buffer.append - - def is_eof(self) -> bool: - return self._eof - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception( - self, - exc: BaseException, - exc_cause: builtins.BaseException = _EXC_SENTINEL, - ) -> None: - self._eof = True - self._exception = exc - if (waiter := self._waiter) is not None: - self._waiter = None - set_exception(waiter, exc, exc_cause) - - def _release_waiter(self) -> None: - if (waiter := self._waiter) is None: - return - self._waiter = None - if not waiter.done(): - waiter.set_result(None) - - def feed_eof(self) -> None: - self._eof = True - self._release_waiter() - self._exception = None # Break cyclic references - - def feed_data(self, data: "WSMessage", size: "cython_int") -> None: - self._size += size - self._put_buffer((data, size)) - self._release_waiter() - if self._size > self._limit and not self._protocol._reading_paused: - self._protocol.pause_reading() - - async def read(self) -> WSMessage: - if not self._buffer and not self._eof: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - return self._read_from_buffer() - - def _read_from_buffer(self) -> WSMessage: - if self._buffer: - data, size = self._get_buffer() - self._size -= size - if self._size < self._limit and self._protocol._reading_paused: - self._protocol.resume_reading() - return data - if self._exception is not None: - raise self._exception - raise EofStream - - -class WebSocketReader: - def __init__( - self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True - ) -> None: - self.queue = queue - self._max_msg_size = max_msg_size - - self._exc: Optional[Exception] = None - self._partial = bytearray() - self._state = READ_HEADER - - self._opcode: int = OP_CODE_NOT_SET - self._frame_fin = False - self._frame_opcode: int = OP_CODE_NOT_SET - self._payload_fragments: list[bytes] = [] - self._frame_payload_len = 0 - - self._tail: bytes = b"" - self._has_mask = False - self._frame_mask: Optional[bytes] = None - self._payload_bytes_to_read = 0 - self._payload_len_flag = 0 - self._compressed: int = COMPRESSED_NOT_SET - self._decompressobj: Optional[ZLibDecompressor] = None - self._compress = compress - - def feed_eof(self) -> None: - self.queue.feed_eof() - - # data can be bytearray on Windows because proactor event loop uses bytearray - # and asyncio types this to Union[bytes, bytearray, memoryview] so we need - # coerce data to bytes if it is not - def feed_data( - self, data: Union[bytes, bytearray, memoryview] - ) -> Tuple[bool, bytes]: - if type(data) is not bytes: - data = bytes(data) - - if self._exc is not None: - return True, data - - try: - self._feed_data(data) - except Exception as exc: - self._exc = exc - set_exception(self.queue, exc) - return EMPTY_FRAME_ERROR - - return EMPTY_FRAME - - def _handle_frame( - self, - fin: bool, - opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int - payload: Union[bytes, bytearray], - compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int - ) -> None: - msg: WSMessage - if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}: - # Validate continuation frames before processing - if opcode == OP_CODE_CONTINUATION and self._opcode == OP_CODE_NOT_SET: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) - - # load text/binary - if not fin: - # got partial frame payload - if opcode != OP_CODE_CONTINUATION: - self._opcode = opcode - self._partial += payload - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(self._partial)} " - f"exceeds limit {self._max_msg_size}", - ) - return - - has_partial = bool(self._partial) - if opcode == OP_CODE_CONTINUATION: - opcode = self._opcode - self._opcode = OP_CODE_NOT_SET - # previous frame was non finished - # we should get continuation opcode - elif has_partial: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - f"to be zero, got {opcode!r}", - ) - - assembled_payload: Union[bytes, bytearray] - if has_partial: - assembled_payload = self._partial + payload - self._partial.clear() - else: - assembled_payload = payload - - if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(assembled_payload)} " - f"exceeds limit {self._max_msg_size}", - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - if not self._decompressobj: - self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) - # XXX: It's possible that the zlib backend (isal is known to - # do this, maybe others too?) will return max_length bytes, - # but internally buffer more data such that the payload is - # >max_length, so we return one extra byte and if we're able - # to do that, then the message is too big. - payload_merged = self._decompressobj.decompress_sync( - assembled_payload + WS_DEFLATE_TRAILING, - ( - self._max_msg_size + 1 - if self._max_msg_size - else self._max_msg_size - ), - ) - if self._max_msg_size and len(payload_merged) > self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Decompressed message exceeds size limit {self._max_msg_size}", - ) - elif type(assembled_payload) is bytes: - payload_merged = assembled_payload - else: - payload_merged = bytes(assembled_payload) - - if opcode == OP_CODE_TEXT: - try: - text = payload_merged.decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - - # XXX: The Text and Binary messages here can be a performance - # bottleneck, so we use tuple.__new__ to improve performance. - # This is not type safe, but many tests should fail in - # test_client_ws_functional.py if this is wrong. - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), - len(payload_merged), - ) - else: - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), - len(payload_merged), - ) - elif opcode == OP_CODE_CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message)) - elif payload: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", - ) - else: - msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) - - self.queue.feed_data(msg, 0) - elif opcode == OP_CODE_PING: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) - self.queue.feed_data(msg, len(payload)) - elif opcode == OP_CODE_PONG: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) - self.queue.feed_data(msg, len(payload)) - else: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" - ) - - def _feed_data(self, data: bytes) -> None: - """Return the next frame from the socket.""" - if self._tail: - data, self._tail = self._tail + data, b"" - - start_pos: int = 0 - data_len = len(data) - data_cstr = data - - while True: - # read header - if self._state == READ_HEADER: - if data_len - start_pos < 2: - break - first_byte = data_cstr[start_pos] - second_byte = data_cstr[start_pos + 1] - start_pos += 2 - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) - - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F - - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be larger than 125 bytes", - ) - - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed == COMPRESSED_NOT_SET: - self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_len_flag = length - self._state = READ_PAYLOAD_LENGTH - - # read payload length - if self._state == READ_PAYLOAD_LENGTH: - len_flag = self._payload_len_flag - if len_flag == 126: - if data_len - start_pos < 2: - break - first_byte = data_cstr[start_pos] - second_byte = data_cstr[start_pos + 1] - start_pos += 2 - self._payload_bytes_to_read = first_byte << 8 | second_byte - elif len_flag > 126: - if data_len - start_pos < 8: - break - self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0] - start_pos += 8 - else: - self._payload_bytes_to_read = len_flag - - self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD - - # read payload mask - if self._state == READ_PAYLOAD_MASK: - if data_len - start_pos < 4: - break - self._frame_mask = data_cstr[start_pos : start_pos + 4] - start_pos += 4 - self._state = READ_PAYLOAD - - if self._state == READ_PAYLOAD: - chunk_len = data_len - start_pos - if self._payload_bytes_to_read >= chunk_len: - f_end_pos = data_len - self._payload_bytes_to_read -= chunk_len - else: - f_end_pos = start_pos + self._payload_bytes_to_read - self._payload_bytes_to_read = 0 - - had_fragments = self._frame_payload_len - self._frame_payload_len += f_end_pos - start_pos - f_start_pos = start_pos - start_pos = f_end_pos - - if self._payload_bytes_to_read != 0: - # If we don't have a complete frame, we need to save the - # data for the next call to feed_data. - self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) - break - - payload: Union[bytes, bytearray] - if had_fragments: - # We have to join the payload fragments get the payload - self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) - if self._has_mask: - assert self._frame_mask is not None - payload_bytearray = bytearray(b"".join(self._payload_fragments)) - websocket_mask(self._frame_mask, payload_bytearray) - payload = payload_bytearray - else: - payload = b"".join(self._payload_fragments) - self._payload_fragments.clear() - elif self._has_mask: - assert self._frame_mask is not None - payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment] - if type(payload_bytearray) is not bytearray: # pragma: no branch - # Cython will do the conversion for us - # but we need to do it for Python and we - # will always get here in Python - payload_bytearray = bytearray(payload_bytearray) - websocket_mask(self._frame_mask, payload_bytearray) - payload = payload_bytearray - else: - payload = data_cstr[f_start_pos:f_end_pos] - - self._handle_frame( - self._frame_fin, self._frame_opcode, payload, self._compressed - ) - self._frame_payload_len = 0 - self._state = READ_HEADER - - # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. - self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b"" diff --git a/venv/Lib/site-packages/aiohttp/_websocket/writer.py b/venv/Lib/site-packages/aiohttp/_websocket/writer.py deleted file mode 100644 index 9604202..0000000 --- a/venv/Lib/site-packages/aiohttp/_websocket/writer.py +++ /dev/null @@ -1,262 +0,0 @@ -"""WebSocket protocol versions 13 and 8.""" - -import asyncio -import random -import sys -from functools import partial -from typing import Final, Optional, Set, Union - -from ..base_protocol import BaseProtocol -from ..client_exceptions import ClientConnectionResetError -from ..compression_utils import ZLibBackend, ZLibCompressor -from .helpers import ( - MASK_LEN, - MSG_SIZE, - PACK_CLOSE_CODE, - PACK_LEN1, - PACK_LEN2, - PACK_LEN3, - PACK_RANDBITS, - websocket_mask, -) -from .models import WS_DEFLATE_TRAILING, WSMsgType - -DEFAULT_LIMIT: Final[int] = 2**16 - -# WebSocket opcode boundary: opcodes 0-7 are data frames, 8-15 are control frames -# Control frames (ping, pong, close) are never compressed -WS_CONTROL_FRAME_OPCODE: Final[int] = 8 - -# For websockets, keeping latency low is extremely important as implementations -# generally expect to be able to send and receive messages quickly. We use a -# larger chunk size to reduce the number of executor calls and avoid task -# creation overhead, since both are significant sources of latency when chunks -# are small. A size of 16KiB was chosen as a balance between avoiding task -# overhead and not blocking the event loop too long with synchronous compression. - -WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 16 * 1024 - - -class WebSocketWriter: - """WebSocket writer. - - The writer is responsible for sending messages to the client. It is - created by the protocol when a connection is established. The writer - should avoid implementing any application logic and should only be - concerned with the low-level details of the WebSocket protocol. - """ - - def __init__( - self, - protocol: BaseProtocol, - transport: asyncio.Transport, - *, - use_mask: bool = False, - limit: int = DEFAULT_LIMIT, - random: random.Random = random.Random(), - compress: int = 0, - notakeover: bool = False, - ) -> None: - """Initialize a WebSocket writer.""" - self.protocol = protocol - self.transport = transport - self.use_mask = use_mask - self.get_random_bits = partial(random.getrandbits, 32) - self.compress = compress - self.notakeover = notakeover - self._closing = False - self._limit = limit - self._output_size = 0 - self._compressobj: Optional[ZLibCompressor] = None - self._send_lock = asyncio.Lock() - self._background_tasks: Set[asyncio.Task[None]] = set() - - async def send_frame( - self, message: bytes, opcode: int, compress: Optional[int] = None - ) -> None: - """Send a frame over the websocket with message as its payload.""" - if self._closing and not (opcode & WSMsgType.CLOSE): - raise ClientConnectionResetError("Cannot write to closing transport") - - if not (compress or self.compress) or opcode >= WS_CONTROL_FRAME_OPCODE: - # Non-compressed frames don't need lock or shield - self._write_websocket_frame(message, opcode, 0) - elif len(message) <= WEBSOCKET_MAX_SYNC_CHUNK_SIZE: - # Small compressed payloads - compress synchronously in event loop - # We need the lock even though sync compression has no await points. - # This prevents small frames from interleaving with large frames that - # compress in the executor, avoiding compressor state corruption. - async with self._send_lock: - self._send_compressed_frame_sync(message, opcode, compress) - else: - # Large compressed frames need shield to prevent corruption - # For large compressed frames, the entire compress+send - # operation must be atomic. If cancelled after compression but - # before send, the compressor state would be advanced but data - # not sent, corrupting subsequent frames. - # Create a task to shield from cancellation - # The lock is acquired inside the shielded task so the entire - # operation (lock + compress + send) completes atomically. - # Use eager_start on Python 3.12+ to avoid scheduling overhead - loop = asyncio.get_running_loop() - coro = self._send_compressed_frame_async_locked(message, opcode, compress) - if sys.version_info >= (3, 12): - send_task = asyncio.Task(coro, loop=loop, eager_start=True) - else: - send_task = loop.create_task(coro) - # Keep a strong reference to prevent garbage collection - self._background_tasks.add(send_task) - send_task.add_done_callback(self._background_tasks.discard) - await asyncio.shield(send_task) - - # It is safe to return control to the event loop when using compression - # after this point as we have already sent or buffered all the data. - # Once we have written output_size up to the limit, we call the - # drain helper which waits for the transport to be ready to accept - # more data. This is a flow control mechanism to prevent the buffer - # from growing too large. The drain helper will return right away - # if the writer is not paused. - if self._output_size > self._limit: - self._output_size = 0 - if self.protocol._paused: - await self.protocol._drain_helper() - - def _write_websocket_frame(self, message: bytes, opcode: int, rsv: int) -> None: - """ - Write a websocket frame to the transport. - - This method handles frame header construction, masking, and writing to transport. - It does not handle compression or flow control - those are the responsibility - of the caller. - """ - msg_length = len(message) - - use_mask = self.use_mask - mask_bit = 0x80 if use_mask else 0 - - # Depending on the message length, the header is assembled differently. - # The first byte is reserved for the opcode and the RSV bits. - first_byte = 0x80 | rsv | opcode - if msg_length < 126: - header = PACK_LEN1(first_byte, msg_length | mask_bit) - header_len = 2 - elif msg_length < 65536: - header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) - header_len = 4 - else: - header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) - header_len = 10 - - if self.transport.is_closing(): - raise ClientConnectionResetError("Cannot write to closing transport") - - # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 - # If we are using a mask, we need to generate it randomly - # and apply it to the message before sending it. A mask is - # a 32-bit value that is applied to the message using a - # bitwise XOR operation. It is used to prevent certain types - # of attacks on the websocket protocol. The mask is only used - # when aiohttp is acting as a client. Servers do not use a mask. - if use_mask: - mask = PACK_RANDBITS(self.get_random_bits()) - message = bytearray(message) - websocket_mask(mask, message) - self.transport.write(header + mask + message) - self._output_size += MASK_LEN - elif msg_length > MSG_SIZE: - self.transport.write(header) - self.transport.write(message) - else: - self.transport.write(header + message) - - self._output_size += header_len + msg_length - - def _get_compressor(self, compress: Optional[int]) -> ZLibCompressor: - """Get or create a compressor object for the given compression level.""" - if compress: - # Do not set self._compress if compressing is for this frame - return ZLibCompressor( - level=ZLibBackend.Z_BEST_SPEED, - wbits=-compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) - if not self._compressobj: - self._compressobj = ZLibCompressor( - level=ZLibBackend.Z_BEST_SPEED, - wbits=-self.compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) - return self._compressobj - - def _send_compressed_frame_sync( - self, message: bytes, opcode: int, compress: Optional[int] - ) -> None: - """ - Synchronous send for small compressed frames. - - This is used for small compressed payloads that compress synchronously in the event loop. - Since there are no await points, this is inherently cancellation-safe. - """ - # RSV are the reserved bits in the frame header. They are used to - # indicate that the frame is using an extension. - # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 - compressobj = self._get_compressor(compress) - # (0x40) RSV1 is set for compressed frames - # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 - self._write_websocket_frame( - ( - compressobj.compress_sync(message) - + compressobj.flush( - ZLibBackend.Z_FULL_FLUSH - if self.notakeover - else ZLibBackend.Z_SYNC_FLUSH - ) - ).removesuffix(WS_DEFLATE_TRAILING), - opcode, - 0x40, - ) - - async def _send_compressed_frame_async_locked( - self, message: bytes, opcode: int, compress: Optional[int] - ) -> None: - """ - Async send for large compressed frames with lock. - - Acquires the lock and compresses large payloads asynchronously in - the executor. The lock is held for the entire operation to ensure - the compressor state is not corrupted by concurrent sends. - - MUST be run shielded from cancellation. If cancelled after - compression but before sending, the compressor state would be - advanced but data not sent, corrupting subsequent frames. - """ - async with self._send_lock: - # RSV are the reserved bits in the frame header. They are used to - # indicate that the frame is using an extension. - # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 - compressobj = self._get_compressor(compress) - # (0x40) RSV1 is set for compressed frames - # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 - self._write_websocket_frame( - ( - await compressobj.compress(message) - + compressobj.flush( - ZLibBackend.Z_FULL_FLUSH - if self.notakeover - else ZLibBackend.Z_SYNC_FLUSH - ) - ).removesuffix(WS_DEFLATE_TRAILING), - opcode, - 0x40, - ) - - async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: - """Close the websocket, sending the specified code and message.""" - if isinstance(message, str): - message = message.encode("utf-8") - try: - await self.send_frame( - PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE - ) - finally: - self._closing = True diff --git a/venv/Lib/site-packages/aiohttp/abc.py b/venv/Lib/site-packages/aiohttp/abc.py deleted file mode 100644 index faf0957..0000000 --- a/venv/Lib/site-packages/aiohttp/abc.py +++ /dev/null @@ -1,268 +0,0 @@ -import asyncio -import logging -import socket -from abc import ABC, abstractmethod -from collections.abc import Sized -from http.cookies import BaseCookie, Morsel -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Dict, - Generator, - Iterable, - List, - Optional, - Sequence, - Tuple, - TypedDict, - Union, -) - -from multidict import CIMultiDict -from yarl import URL - -from ._cookie_helpers import parse_set_cookie_headers -from .typedefs import LooseCookies - -if TYPE_CHECKING: - from .web_app import Application - from .web_exceptions import HTTPException - from .web_request import BaseRequest, Request - from .web_response import StreamResponse -else: - BaseRequest = Request = Application = StreamResponse = None - HTTPException = None - - -class AbstractRouter(ABC): - def __init__(self) -> None: - self._frozen = False - - def post_init(self, app: Application) -> None: - """Post init stage. - - Not an abstract method for sake of backward compatibility, - but if the router wants to be aware of the application - it can override this. - """ - - @property - def frozen(self) -> bool: - return self._frozen - - def freeze(self) -> None: - """Freeze router.""" - self._frozen = True - - @abstractmethod - async def resolve(self, request: Request) -> "AbstractMatchInfo": - """Return MATCH_INFO for given request""" - - -class AbstractMatchInfo(ABC): - - __slots__ = () - - @property # pragma: no branch - @abstractmethod - def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: - """Execute matched request handler""" - - @property - @abstractmethod - def expect_handler( - self, - ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]: - """Expect handler for 100-continue processing""" - - @property # pragma: no branch - @abstractmethod - def http_exception(self) -> Optional[HTTPException]: - """HTTPException instance raised on router's resolving, or None""" - - @abstractmethod # pragma: no branch - def get_info(self) -> Dict[str, Any]: - """Return a dict with additional info useful for introspection""" - - @property # pragma: no branch - @abstractmethod - def apps(self) -> Tuple[Application, ...]: - """Stack of nested applications. - - Top level application is left-most element. - - """ - - @abstractmethod - def add_app(self, app: Application) -> None: - """Add application to the nested apps stack.""" - - @abstractmethod - def freeze(self) -> None: - """Freeze the match info. - - The method is called after route resolution. - - After the call .add_app() is forbidden. - - """ - - -class AbstractView(ABC): - """Abstract class based view.""" - - def __init__(self, request: Request) -> None: - self._request = request - - @property - def request(self) -> Request: - """Request instance.""" - return self._request - - @abstractmethod - def __await__(self) -> Generator[None, None, StreamResponse]: - """Execute the view handler.""" - - -class ResolveResult(TypedDict): - """Resolve result. - - This is the result returned from an AbstractResolver's - resolve method. - - :param hostname: The hostname that was provided. - :param host: The IP address that was resolved. - :param port: The port that was resolved. - :param family: The address family that was resolved. - :param proto: The protocol that was resolved. - :param flags: The flags that were resolved. - """ - - hostname: str - host: str - port: int - family: int - proto: int - flags: int - - -class AbstractResolver(ABC): - """Abstract DNS resolver.""" - - @abstractmethod - async def resolve( - self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET - ) -> List[ResolveResult]: - """Return IP address for given hostname""" - - @abstractmethod - async def close(self) -> None: - """Release resolver""" - - -if TYPE_CHECKING: - IterableBase = Iterable[Morsel[str]] -else: - IterableBase = Iterable - - -ClearCookiePredicate = Callable[["Morsel[str]"], bool] - - -class AbstractCookieJar(Sized, IterableBase): - """Abstract Cookie Jar.""" - - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = loop or asyncio.get_running_loop() - - @property - @abstractmethod - def quote_cookie(self) -> bool: - """Return True if cookies should be quoted.""" - - @abstractmethod - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: - """Clear all cookies if no predicate is passed.""" - - @abstractmethod - def clear_domain(self, domain: str) -> None: - """Clear all cookies for domain and all subdomains.""" - - @abstractmethod - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - """Update cookies.""" - - def update_cookies_from_headers( - self, headers: Sequence[str], response_url: URL - ) -> None: - """Update cookies from raw Set-Cookie headers.""" - if headers and (cookies_to_update := parse_set_cookie_headers(headers)): - self.update_cookies(cookies_to_update, response_url) - - @abstractmethod - def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": - """Return the jar's cookies filtered by their attributes.""" - - -class AbstractStreamWriter(ABC): - """Abstract stream writer.""" - - buffer_size: int = 0 - output_size: int = 0 - length: Optional[int] = 0 - - @abstractmethod - async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: - """Write chunk into stream.""" - - @abstractmethod - async def write_eof(self, chunk: bytes = b"") -> None: - """Write last chunk.""" - - @abstractmethod - async def drain(self) -> None: - """Flush the write buffer.""" - - @abstractmethod - def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None - ) -> None: - """Enable HTTP body compression""" - - @abstractmethod - def enable_chunking(self) -> None: - """Enable HTTP chunked mode""" - - @abstractmethod - async def write_headers( - self, status_line: str, headers: "CIMultiDict[str]" - ) -> None: - """Write HTTP headers""" - - def send_headers(self) -> None: - """Force sending buffered headers if not already sent. - - Required only if write_headers() buffers headers instead of sending immediately. - For backwards compatibility, this method does nothing by default. - """ - - -class AbstractAccessLogger(ABC): - """Abstract writer to access log.""" - - __slots__ = ("logger", "log_format") - - def __init__(self, logger: logging.Logger, log_format: str) -> None: - self.logger = logger - self.log_format = log_format - - @abstractmethod - def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: - """Emit log to logger.""" - - @property - def enabled(self) -> bool: - """Check if logger is enabled.""" - return True diff --git a/venv/Lib/site-packages/aiohttp/base_protocol.py b/venv/Lib/site-packages/aiohttp/base_protocol.py deleted file mode 100644 index b0a67ed..0000000 --- a/venv/Lib/site-packages/aiohttp/base_protocol.py +++ /dev/null @@ -1,100 +0,0 @@ -import asyncio -from typing import Optional, cast - -from .client_exceptions import ClientConnectionResetError -from .helpers import set_exception -from .tcp_helpers import tcp_nodelay - - -class BaseProtocol(asyncio.Protocol): - __slots__ = ( - "_loop", - "_paused", - "_drain_waiter", - "_connection_lost", - "_reading_paused", - "transport", - ) - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop: asyncio.AbstractEventLoop = loop - self._paused = False - self._drain_waiter: Optional[asyncio.Future[None]] = None - self._reading_paused = False - - self.transport: Optional[asyncio.Transport] = None - - @property - def connected(self) -> bool: - """Return True if the connection is open.""" - return self.transport is not None - - @property - def writing_paused(self) -> bool: - return self._paused - - def pause_writing(self) -> None: - assert not self._paused - self._paused = True - - def resume_writing(self) -> None: - assert self._paused - self._paused = False - - waiter = self._drain_waiter - if waiter is not None: - self._drain_waiter = None - if not waiter.done(): - waiter.set_result(None) - - def pause_reading(self) -> None: - if not self._reading_paused and self.transport is not None: - try: - self.transport.pause_reading() - except (AttributeError, NotImplementedError, RuntimeError): - pass - self._reading_paused = True - - def resume_reading(self) -> None: - if self._reading_paused and self.transport is not None: - try: - self.transport.resume_reading() - except (AttributeError, NotImplementedError, RuntimeError): - pass - self._reading_paused = False - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - tr = cast(asyncio.Transport, transport) - tcp_nodelay(tr, True) - self.transport = tr - - def connection_lost(self, exc: Optional[BaseException]) -> None: - # Wake up the writer if currently paused. - self.transport = None - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - return - self._drain_waiter = None - if waiter.done(): - return - if exc is None: - waiter.set_result(None) - else: - set_exception( - waiter, - ConnectionError("Connection lost"), - exc, - ) - - async def _drain_helper(self) -> None: - if self.transport is None: - raise ClientConnectionResetError("Connection lost") - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - waiter = self._loop.create_future() - self._drain_waiter = waiter - await asyncio.shield(waiter) diff --git a/venv/Lib/site-packages/aiohttp/client.py b/venv/Lib/site-packages/aiohttp/client.py deleted file mode 100644 index bc4ee17..0000000 --- a/venv/Lib/site-packages/aiohttp/client.py +++ /dev/null @@ -1,1635 +0,0 @@ -"""HTTP Client for asyncio.""" - -import asyncio -import base64 -import hashlib -import json -import os -import sys -import traceback -import warnings -from contextlib import suppress -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Coroutine, - Final, - FrozenSet, - Generator, - Generic, - Iterable, - List, - Mapping, - Optional, - Sequence, - Set, - Tuple, - Type, - TypedDict, - TypeVar, - Union, -) - -import attr -from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr -from yarl import URL - -from . import hdrs, http, payload -from ._websocket.reader import WebSocketDataQueue -from .abc import AbstractCookieJar -from .client_exceptions import ( - ClientConnectionError, - ClientConnectionResetError, - ClientConnectorCertificateError, - ClientConnectorDNSError, - ClientConnectorError, - ClientConnectorSSLError, - ClientError, - ClientHttpProxyError, - ClientOSError, - ClientPayloadError, - ClientProxyConnectionError, - ClientResponseError, - ClientSSLError, - ConnectionTimeoutError, - ContentTypeError, - InvalidURL, - InvalidUrlClientError, - InvalidUrlRedirectClientError, - NonHttpUrlClientError, - NonHttpUrlRedirectClientError, - RedirectClientError, - ServerConnectionError, - ServerDisconnectedError, - ServerFingerprintMismatch, - ServerTimeoutError, - SocketTimeoutError, - TooManyRedirects, - WSMessageTypeError, - WSServerHandshakeError, -) -from .client_middlewares import ClientMiddlewareType, build_client_middlewares -from .client_reqrep import ( - ClientRequest as ClientRequest, - ClientResponse as ClientResponse, - Fingerprint as Fingerprint, - RequestInfo as RequestInfo, - _merge_ssl_params, -) -from .client_ws import ( - DEFAULT_WS_CLIENT_TIMEOUT, - ClientWebSocketResponse as ClientWebSocketResponse, - ClientWSTimeout as ClientWSTimeout, -) -from .connector import ( - HTTP_AND_EMPTY_SCHEMA_SET, - BaseConnector as BaseConnector, - NamedPipeConnector as NamedPipeConnector, - TCPConnector as TCPConnector, - UnixConnector as UnixConnector, -) -from .cookiejar import CookieJar -from .helpers import ( - _SENTINEL, - DEBUG, - EMPTY_BODY_METHODS, - BasicAuth, - TimeoutHandle, - basicauth_from_netrc, - get_env_proxy_for_url, - netrc_from_env, - sentinel, - strip_auth_from_url, -) -from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter -from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse -from .tracing import Trace, TraceConfig -from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL - -__all__ = ( - # client_exceptions - "ClientConnectionError", - "ClientConnectionResetError", - "ClientConnectorCertificateError", - "ClientConnectorDNSError", - "ClientConnectorError", - "ClientConnectorSSLError", - "ClientError", - "ClientHttpProxyError", - "ClientOSError", - "ClientPayloadError", - "ClientProxyConnectionError", - "ClientResponseError", - "ClientSSLError", - "ConnectionTimeoutError", - "ContentTypeError", - "InvalidURL", - "InvalidUrlClientError", - "RedirectClientError", - "NonHttpUrlClientError", - "InvalidUrlRedirectClientError", - "NonHttpUrlRedirectClientError", - "ServerConnectionError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ServerTimeoutError", - "SocketTimeoutError", - "TooManyRedirects", - "WSServerHandshakeError", - # client_reqrep - "ClientRequest", - "ClientResponse", - "Fingerprint", - "RequestInfo", - # connector - "BaseConnector", - "TCPConnector", - "UnixConnector", - "NamedPipeConnector", - # client_ws - "ClientWebSocketResponse", - # client - "ClientSession", - "ClientTimeout", - "ClientWSTimeout", - "request", - "WSMessageTypeError", -) - - -if TYPE_CHECKING: - from ssl import SSLContext -else: - SSLContext = None - -if sys.version_info >= (3, 11) and TYPE_CHECKING: - from typing import Unpack - - -class _RequestOptions(TypedDict, total=False): - params: Query - data: Any - json: Any - cookies: Union[LooseCookies, None] - headers: Union[LooseHeaders, None] - skip_auto_headers: Union[Iterable[str], None] - auth: Union[BasicAuth, None] - allow_redirects: bool - max_redirects: int - compress: Union[str, bool, None] - chunked: Union[bool, None] - expect100: bool - raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] - read_until_eof: bool - proxy: Union[StrOrURL, None] - proxy_auth: Union[BasicAuth, None] - timeout: "Union[ClientTimeout, _SENTINEL, None]" - ssl: Union[SSLContext, bool, Fingerprint] - server_hostname: Union[str, None] - proxy_headers: Union[LooseHeaders, None] - trace_request_ctx: Union[Mapping[str, Any], None] - read_bufsize: Union[int, None] - auto_decompress: Union[bool, None] - max_line_size: Union[int, None] - max_field_size: Union[int, None] - middlewares: Optional[Sequence[ClientMiddlewareType]] - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ClientTimeout: - total: Optional[float] = None - connect: Optional[float] = None - sock_read: Optional[float] = None - sock_connect: Optional[float] = None - ceil_threshold: float = 5 - - # pool_queue_timeout: Optional[float] = None - # dns_resolution_timeout: Optional[float] = None - # socket_connect_timeout: Optional[float] = None - # connection_acquiring_timeout: Optional[float] = None - # new_connection_timeout: Optional[float] = None - # http_header_timeout: Optional[float] = None - # response_body_timeout: Optional[float] = None - - # to create a timeout specific for a single request, either - # - create a completely new one to overwrite the default - # - or use http://www.attrs.org/en/stable/api.html#attr.evolve - # to overwrite the defaults - - -# 5 Minute default read timeout -DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30) - -# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 -IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) - -_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse) -_CharsetResolver = Callable[[ClientResponse, bytes], str] - - -class ClientSession: - """First-class interface for making HTTP requests.""" - - ATTRS = frozenset( - [ - "_base_url", - "_base_url_origin", - "_source_traceback", - "_connector", - "_loop", - "_cookie_jar", - "_connector_owner", - "_default_auth", - "_version", - "_json_serialize", - "_requote_redirect_url", - "_timeout", - "_raise_for_status", - "_auto_decompress", - "_trust_env", - "_default_headers", - "_skip_auto_headers", - "_request_class", - "_response_class", - "_ws_response_class", - "_trace_configs", - "_read_bufsize", - "_max_line_size", - "_max_field_size", - "_resolve_charset", - "_default_proxy", - "_default_proxy_auth", - "_retry_connection", - "_middlewares", - "requote_redirect_url", - ] - ) - - _source_traceback: Optional[traceback.StackSummary] = None - _connector: Optional[BaseConnector] = None - - def __init__( - self, - base_url: Optional[StrOrURL] = None, - *, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - json_serialize: JSONEncoder = json.dumps, - request_class: Type[ClientRequest] = ClientRequest, - response_class: Type[ClientResponse] = ClientResponse, - ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, - version: HttpVersion = http.HttpVersion11, - cookie_jar: Optional[AbstractCookieJar] = None, - connector_owner: bool = True, - raise_for_status: Union[ - bool, Callable[[ClientResponse], Awaitable[None]] - ] = False, - read_timeout: Union[float, _SENTINEL] = sentinel, - conn_timeout: Optional[float] = None, - timeout: Union[object, ClientTimeout] = sentinel, - auto_decompress: bool = True, - trust_env: bool = False, - requote_redirect_url: bool = True, - trace_configs: Optional[List[TraceConfig]] = None, - read_bufsize: int = 2**16, - max_line_size: int = 8190, - max_field_size: int = 8190, - fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", - middlewares: Sequence[ClientMiddlewareType] = (), - ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel, - ) -> None: - # We initialise _connector to None immediately, as it's referenced in __del__() - # and could cause issues if an exception occurs during initialisation. - self._connector: Optional[BaseConnector] = None - - if loop is None: - if connector is not None: - loop = connector._loop - - loop = loop or asyncio.get_running_loop() - - if base_url is None or isinstance(base_url, URL): - self._base_url: Optional[URL] = base_url - self._base_url_origin = None if base_url is None else base_url.origin() - else: - self._base_url = URL(base_url) - self._base_url_origin = self._base_url.origin() - assert self._base_url.absolute, "Only absolute URLs are supported" - if self._base_url is not None and not self._base_url.path.endswith("/"): - raise ValueError("base_url must have a trailing '/'") - - if timeout is sentinel or timeout is None: - self._timeout = DEFAULT_TIMEOUT - if read_timeout is not sentinel: - warnings.warn( - "read_timeout is deprecated, use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - self._timeout = attr.evolve(self._timeout, total=read_timeout) - if conn_timeout is not None: - self._timeout = attr.evolve(self._timeout, connect=conn_timeout) - warnings.warn( - "conn_timeout is deprecated, use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - else: - if not isinstance(timeout, ClientTimeout): - raise ValueError( - f"timeout parameter cannot be of {type(timeout)} type, " - "please use 'timeout=ClientTimeout(...)'", - ) - self._timeout = timeout - if read_timeout is not sentinel: - raise ValueError( - "read_timeout and timeout parameters " - "conflict, please setup " - "timeout.read" - ) - if conn_timeout is not None: - raise ValueError( - "conn_timeout and timeout parameters " - "conflict, please setup " - "timeout.connect" - ) - - if ssl_shutdown_timeout is not sentinel: - warnings.warn( - "The ssl_shutdown_timeout parameter is deprecated and will be removed in aiohttp 4.0", - DeprecationWarning, - stacklevel=2, - ) - - if connector is None: - connector = TCPConnector( - loop=loop, ssl_shutdown_timeout=ssl_shutdown_timeout - ) - - if connector._loop is not loop: - raise RuntimeError("Session and connector has to use same event loop") - - self._loop = loop - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - if cookie_jar is None: - cookie_jar = CookieJar(loop=loop) - self._cookie_jar = cookie_jar - - if cookies: - self._cookie_jar.update_cookies(cookies) - - self._connector = connector - self._connector_owner = connector_owner - self._default_auth = auth - self._version = version - self._json_serialize = json_serialize - self._raise_for_status = raise_for_status - self._auto_decompress = auto_decompress - self._trust_env = trust_env - self._requote_redirect_url = requote_redirect_url - self._read_bufsize = read_bufsize - self._max_line_size = max_line_size - self._max_field_size = max_field_size - - # Convert to list of tuples - if headers: - real_headers: CIMultiDict[str] = CIMultiDict(headers) - else: - real_headers = CIMultiDict() - self._default_headers: CIMultiDict[str] = real_headers - if skip_auto_headers is not None: - self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers) - else: - self._skip_auto_headers = frozenset() - - self._request_class = request_class - self._response_class = response_class - self._ws_response_class = ws_response_class - - self._trace_configs = trace_configs or [] - for trace_config in self._trace_configs: - trace_config.freeze() - - self._resolve_charset = fallback_charset_resolver - - self._default_proxy = proxy - self._default_proxy_auth = proxy_auth - self._retry_connection: bool = True - self._middlewares = middlewares - - def __init_subclass__(cls: Type["ClientSession"]) -> None: - warnings.warn( - "Inheritance class {} from ClientSession " - "is discouraged".format(cls.__name__), - DeprecationWarning, - stacklevel=2, - ) - - if DEBUG: - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom ClientSession.{} attribute " - "is discouraged".format(name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - def __del__(self, _warnings: Any = warnings) -> None: - if not self.closed: - kwargs = {"source": self} - _warnings.warn( - f"Unclosed client session {self!r}", ResourceWarning, **kwargs - ) - context = {"client_session": self, "message": "Unclosed client session"} - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - if sys.version_info >= (3, 11) and TYPE_CHECKING: - - def request( - self, - method: str, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - else: - - def request( - self, method: str, url: StrOrURL, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP request.""" - return _RequestContextManager(self._request(method, url, **kwargs)) - - def _build_url(self, str_or_url: StrOrURL) -> URL: - url = URL(str_or_url) - if self._base_url and not url.absolute: - return self._base_url.join(url) - return url - - async def _request( - self, - method: str, - str_or_url: StrOrURL, - *, - params: Query = None, - data: Any = None, - json: Any = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - allow_redirects: bool = True, - max_redirects: int = 10, - compress: Union[str, bool, None] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - raise_for_status: Union[ - None, bool, Callable[[ClientResponse], Awaitable[None]] - ] = None, - read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, _SENTINEL] = sentinel, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - server_hostname: Optional[str] = None, - proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[Mapping[str, Any]] = None, - read_bufsize: Optional[int] = None, - auto_decompress: Optional[bool] = None, - max_line_size: Optional[int] = None, - max_field_size: Optional[int] = None, - middlewares: Optional[Sequence[ClientMiddlewareType]] = None, - ) -> ClientResponse: - - # NOTE: timeout clamps existing connect and read timeouts. We cannot - # set the default to None because we need to detect if the user wants - # to use the existing timeouts by setting timeout to None. - - if self.closed: - raise RuntimeError("Session is closed") - - ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - - if data is not None and json is not None: - raise ValueError( - "data and json parameters can not be used at the same time" - ) - elif json is not None: - data = payload.JsonPayload(json, dumps=self._json_serialize) - - if not isinstance(chunked, bool) and chunked is not None: - warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) - - redirects = 0 - history: List[ClientResponse] = [] - version = self._version - params = params or {} - - # Merge with default headers and transform to CIMultiDict - headers = self._prepare_headers(headers) - - try: - url = self._build_url(str_or_url) - except ValueError as e: - raise InvalidUrlClientError(str_or_url) from e - - assert self._connector is not None - if url.scheme not in self._connector.allowed_protocol_schema_set: - raise NonHttpUrlClientError(url) - - skip_headers: Optional[Iterable[istr]] - if skip_auto_headers is not None: - skip_headers = { - istr(i) for i in skip_auto_headers - } | self._skip_auto_headers - elif self._skip_auto_headers: - skip_headers = self._skip_auto_headers - else: - skip_headers = None - - if proxy is None: - proxy = self._default_proxy - if proxy_auth is None: - proxy_auth = self._default_proxy_auth - - if proxy is None: - proxy_headers = None - else: - proxy_headers = self._prepare_headers(proxy_headers) - try: - proxy = URL(proxy) - except ValueError as e: - raise InvalidURL(proxy) from e - - if timeout is sentinel: - real_timeout: ClientTimeout = self._timeout - else: - if not isinstance(timeout, ClientTimeout): - real_timeout = ClientTimeout(total=timeout) - else: - real_timeout = timeout - # timeout is cumulative for all request operations - # (request, redirects, responses, data consuming) - tm = TimeoutHandle( - self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold - ) - handle = tm.start() - - if read_bufsize is None: - read_bufsize = self._read_bufsize - - if auto_decompress is None: - auto_decompress = self._auto_decompress - - if max_line_size is None: - max_line_size = self._max_line_size - - if max_field_size is None: - max_field_size = self._max_field_size - - traces = [ - Trace( - self, - trace_config, - trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), - ) - for trace_config in self._trace_configs - ] - - for trace in traces: - await trace.send_request_start(method, url.update_query(params), headers) - - timer = tm.timer() - try: - with timer: - # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests - retry_persistent_connection = ( - self._retry_connection and method in IDEMPOTENT_METHODS - ) - while True: - url, auth_from_url = strip_auth_from_url(url) - if not url.raw_host: - # NOTE: Bail early, otherwise, causes `InvalidURL` through - # NOTE: `self._request_class()` below. - err_exc_cls = ( - InvalidUrlRedirectClientError - if redirects - else InvalidUrlClientError - ) - raise err_exc_cls(url) - # If `auth` was passed for an already authenticated URL, - # disallow only if this is the initial URL; this is to avoid issues - # with sketchy redirects that are not the caller's responsibility - if not history and (auth and auth_from_url): - raise ValueError( - "Cannot combine AUTH argument with " - "credentials encoded in URL" - ) - - # Override the auth with the one from the URL only if we - # have no auth, or if we got an auth from a redirect URL - if auth is None or (history and auth_from_url is not None): - auth = auth_from_url - - if ( - auth is None - and self._default_auth - and ( - not self._base_url or self._base_url_origin == url.origin() - ) - ): - auth = self._default_auth - - # Try netrc if auth is still None and trust_env is enabled. - if auth is None and self._trust_env and url.host is not None: - auth = await self._loop.run_in_executor( - None, self._get_netrc_auth, url.host - ) - - # It would be confusing if we support explicit - # Authorization header with auth argument - if ( - headers is not None - and auth is not None - and hdrs.AUTHORIZATION in headers - ): - raise ValueError( - "Cannot combine AUTHORIZATION header " - "with AUTH argument or credentials " - "encoded in URL" - ) - - all_cookies = self._cookie_jar.filter_cookies(url) - - if cookies is not None: - tmp_cookie_jar = CookieJar( - quote_cookie=self._cookie_jar.quote_cookie - ) - tmp_cookie_jar.update_cookies(cookies) - req_cookies = tmp_cookie_jar.filter_cookies(url) - if req_cookies: - all_cookies.load(req_cookies) - - proxy_: Optional[URL] = None - if proxy is not None: - proxy_ = URL(proxy) - elif self._trust_env: - with suppress(LookupError): - proxy_, proxy_auth = await asyncio.to_thread( - get_env_proxy_for_url, url - ) - - req = self._request_class( - method, - url, - params=params, - headers=headers, - skip_auto_headers=skip_headers, - data=data, - cookies=all_cookies, - auth=auth, - version=version, - compress=compress, - chunked=chunked, - expect100=expect100, - loop=self._loop, - response_class=self._response_class, - proxy=proxy_, - proxy_auth=proxy_auth, - timer=timer, - session=self, - ssl=ssl if ssl is not None else True, - server_hostname=server_hostname, - proxy_headers=proxy_headers, - traces=traces, - trust_env=self.trust_env, - ) - - async def _connect_and_send_request( - req: ClientRequest, - ) -> ClientResponse: - # connection timeout - assert self._connector is not None - try: - conn = await self._connector.connect( - req, traces=traces, timeout=real_timeout - ) - except asyncio.TimeoutError as exc: - raise ConnectionTimeoutError( - f"Connection timeout to host {req.url}" - ) from exc - - assert conn.protocol is not None - conn.protocol.set_response_params( - timer=timer, - skip_payload=req.method in EMPTY_BODY_METHODS, - read_until_eof=read_until_eof, - auto_decompress=auto_decompress, - read_timeout=real_timeout.sock_read, - read_bufsize=read_bufsize, - timeout_ceil_threshold=self._connector._timeout_ceil_threshold, - max_line_size=max_line_size, - max_field_size=max_field_size, - ) - try: - resp = await req.send(conn) - try: - await resp.start(conn) - except BaseException: - resp.close() - raise - except BaseException: - conn.close() - raise - return resp - - # Apply middleware (if any) - per-request middleware overrides session middleware - effective_middlewares = ( - self._middlewares if middlewares is None else middlewares - ) - - if effective_middlewares: - handler = build_client_middlewares( - _connect_and_send_request, effective_middlewares - ) - else: - handler = _connect_and_send_request - - try: - resp = await handler(req) - # Client connector errors should not be retried - except ( - ConnectionTimeoutError, - ClientConnectorError, - ClientConnectorCertificateError, - ClientConnectorSSLError, - ): - raise - except (ClientOSError, ServerDisconnectedError): - if retry_persistent_connection: - retry_persistent_connection = False - continue - raise - except ClientError: - raise - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise ClientOSError(*exc.args) from exc - - # Update cookies from raw headers to preserve duplicates - if resp._raw_cookie_headers: - self._cookie_jar.update_cookies_from_headers( - resp._raw_cookie_headers, resp.url - ) - - # redirects - if resp.status in (301, 302, 303, 307, 308) and allow_redirects: - - for trace in traces: - await trace.send_request_redirect( - method, url.update_query(params), headers, resp - ) - - redirects += 1 - history.append(resp) - if max_redirects and redirects >= max_redirects: - if req._body is not None: - await req._body.close() - resp.close() - raise TooManyRedirects( - history[0].request_info, tuple(history) - ) - - # For 301 and 302, mimic IE, now changed in RFC - # https://github.com/kennethreitz/requests/pull/269 - if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( - resp.status in (301, 302) and resp.method == hdrs.METH_POST - ): - method = hdrs.METH_GET - data = None - if headers.get(hdrs.CONTENT_LENGTH): - headers.pop(hdrs.CONTENT_LENGTH) - else: - # For 307/308, always preserve the request body - # For 301/302 with non-POST methods, preserve the request body - # https://www.rfc-editor.org/rfc/rfc9110#section-15.4.3-3.1 - # Use the existing payload to avoid recreating it from a potentially consumed file - data = req._body - - r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( - hdrs.URI - ) - if r_url is None: - # see github.com/aio-libs/aiohttp/issues/2022 - break - else: - # reading from correct redirection - # response is forbidden - resp.release() - - try: - parsed_redirect_url = URL( - r_url, encoded=not self._requote_redirect_url - ) - except ValueError as e: - if req._body is not None: - await req._body.close() - resp.close() - raise InvalidUrlRedirectClientError( - r_url, - "Server attempted redirecting to a location that does not look like a URL", - ) from e - - scheme = parsed_redirect_url.scheme - if scheme not in HTTP_AND_EMPTY_SCHEMA_SET: - if req._body is not None: - await req._body.close() - resp.close() - raise NonHttpUrlRedirectClientError(r_url) - elif not scheme: - parsed_redirect_url = url.join(parsed_redirect_url) - - try: - redirect_origin = parsed_redirect_url.origin() - except ValueError as origin_val_err: - if req._body is not None: - await req._body.close() - resp.close() - raise InvalidUrlRedirectClientError( - parsed_redirect_url, - "Invalid redirect URL origin", - ) from origin_val_err - - if url.origin() != redirect_origin: - auth = None - headers.pop(hdrs.AUTHORIZATION, None) - - url = parsed_redirect_url - params = {} - resp.release() - continue - - break - - if req._body is not None: - await req._body.close() - # check response status - if raise_for_status is None: - raise_for_status = self._raise_for_status - - if raise_for_status is None: - pass - elif callable(raise_for_status): - await raise_for_status(resp) - elif raise_for_status: - resp.raise_for_status() - - # register connection - if handle is not None: - if resp.connection is not None: - resp.connection.add_callback(handle.cancel) - else: - handle.cancel() - - resp._history = tuple(history) - - for trace in traces: - await trace.send_request_end( - method, url.update_query(params), headers, resp - ) - return resp - - except BaseException as e: - # cleanup timer - tm.close() - if handle: - handle.cancel() - handle = None - - for trace in traces: - await trace.send_request_exception( - method, url.update_query(params), headers, e - ) - raise - - def ws_connect( - self, - url: StrOrURL, - *, - method: str = hdrs.METH_GET, - protocols: Iterable[str] = (), - timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, - params: Query = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - server_hostname: Optional[str] = None, - proxy_headers: Optional[LooseHeaders] = None, - compress: int = 0, - max_msg_size: int = 4 * 1024 * 1024, - ) -> "_WSRequestContextManager": - """Initiate websocket connection.""" - return _WSRequestContextManager( - self._ws_connect( - url, - method=method, - protocols=protocols, - timeout=timeout, - receive_timeout=receive_timeout, - autoclose=autoclose, - autoping=autoping, - heartbeat=heartbeat, - auth=auth, - origin=origin, - params=params, - headers=headers, - proxy=proxy, - proxy_auth=proxy_auth, - ssl=ssl, - verify_ssl=verify_ssl, - fingerprint=fingerprint, - ssl_context=ssl_context, - server_hostname=server_hostname, - proxy_headers=proxy_headers, - compress=compress, - max_msg_size=max_msg_size, - ) - ) - - async def _ws_connect( - self, - url: StrOrURL, - *, - method: str = hdrs.METH_GET, - protocols: Iterable[str] = (), - timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, - params: Query = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - server_hostname: Optional[str] = None, - proxy_headers: Optional[LooseHeaders] = None, - compress: int = 0, - max_msg_size: int = 4 * 1024 * 1024, - ) -> ClientWebSocketResponse: - if timeout is not sentinel: - if isinstance(timeout, ClientWSTimeout): - ws_timeout = timeout - else: - warnings.warn( - "parameter 'timeout' of type 'float' " - "is deprecated, please use " - "'timeout=ClientWSTimeout(ws_close=...)'", - DeprecationWarning, - stacklevel=2, - ) - ws_timeout = ClientWSTimeout(ws_close=timeout) - else: - ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT - if receive_timeout is not None: - warnings.warn( - "float parameter 'receive_timeout' " - "is deprecated, please use parameter " - "'timeout=ClientWSTimeout(ws_receive=...)'", - DeprecationWarning, - stacklevel=2, - ) - ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout) - - if headers is None: - real_headers: CIMultiDict[str] = CIMultiDict() - else: - real_headers = CIMultiDict(headers) - - default_headers = { - hdrs.UPGRADE: "websocket", - hdrs.CONNECTION: "Upgrade", - hdrs.SEC_WEBSOCKET_VERSION: "13", - } - - for key, value in default_headers.items(): - real_headers.setdefault(key, value) - - sec_key = base64.b64encode(os.urandom(16)) - real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() - - if protocols: - real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) - if origin is not None: - real_headers[hdrs.ORIGIN] = origin - if compress: - extstr = ws_ext_gen(compress=compress) - real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr - - # For the sake of backward compatibility, if user passes in None, convert it to True - if ssl is None: - warnings.warn( - "ssl=None is deprecated, please use ssl=True", - DeprecationWarning, - stacklevel=2, - ) - ssl = True - ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - - # send request - resp = await self.request( - method, - url, - params=params, - headers=real_headers, - read_until_eof=False, - auth=auth, - proxy=proxy, - proxy_auth=proxy_auth, - ssl=ssl, - server_hostname=server_hostname, - proxy_headers=proxy_headers, - ) - - try: - # check handshake - if resp.status != 101: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid response status", - status=resp.status, - headers=resp.headers, - ) - - if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid upgrade header", - status=resp.status, - headers=resp.headers, - ) - - if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid connection header", - status=resp.status, - headers=resp.headers, - ) - - # key calculation - r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") - match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() - if r_key != match: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid challenge response", - status=resp.status, - headers=resp.headers, - ) - - # websocket protocol - protocol = None - if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: - resp_protocols = [ - proto.strip() - for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") - ] - - for proto in resp_protocols: - if proto in protocols: - protocol = proto - break - - # websocket compress - notakeover = False - if compress: - compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) - if compress_hdrs: - try: - compress, notakeover = ws_ext_parse(compress_hdrs) - except WSHandshakeError as exc: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message=exc.args[0], - status=resp.status, - headers=resp.headers, - ) from exc - else: - compress = 0 - notakeover = False - - conn = resp.connection - assert conn is not None - conn_proto = conn.protocol - assert conn_proto is not None - - # For WS connection the read_timeout must be either receive_timeout or greater - # None == no timeout, i.e. infinite timeout, so None is the max timeout possible - if ws_timeout.ws_receive is None: - # Reset regardless - conn_proto.read_timeout = None - elif conn_proto.read_timeout is not None: - conn_proto.read_timeout = max( - ws_timeout.ws_receive, conn_proto.read_timeout - ) - - transport = conn.transport - assert transport is not None - reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop) - conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) - writer = WebSocketWriter( - conn_proto, - transport, - use_mask=True, - compress=compress, - notakeover=notakeover, - ) - except BaseException: - resp.close() - raise - else: - return self._ws_response_class( - reader, - writer, - protocol, - resp, - ws_timeout, - autoclose, - autoping, - self._loop, - heartbeat=heartbeat, - compress=compress, - client_notakeover=notakeover, - ) - - def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": - """Add default headers and transform it to CIMultiDict""" - # Convert headers to MultiDict - result = CIMultiDict(self._default_headers) - if headers: - if not isinstance(headers, (MultiDictProxy, MultiDict)): - headers = CIMultiDict(headers) - added_names: Set[str] = set() - for key, value in headers.items(): - if key in added_names: - result.add(key, value) - else: - result[key] = value - added_names.add(key) - return result - - def _get_netrc_auth(self, host: str) -> Optional[BasicAuth]: - """ - Get auth from netrc for the given host. - - This method is designed to be called in an executor to avoid - blocking I/O in the event loop. - """ - netrc_obj = netrc_from_env() - try: - return basicauth_from_netrc(netrc_obj, host) - except LookupError: - return None - - if sys.version_info >= (3, 11) and TYPE_CHECKING: - - def get( - self, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - def options( - self, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - def head( - self, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - def post( - self, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - def put( - self, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - def patch( - self, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - def delete( - self, - url: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> "_RequestContextManager": ... - - else: - - def get( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP GET request.""" - return _RequestContextManager( - self._request( - hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs - ) - ) - - def options( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP OPTIONS request.""" - return _RequestContextManager( - self._request( - hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs - ) - ) - - def head( - self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP HEAD request.""" - return _RequestContextManager( - self._request( - hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs - ) - ) - - def post( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP POST request.""" - return _RequestContextManager( - self._request(hdrs.METH_POST, url, data=data, **kwargs) - ) - - def put( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PUT request.""" - return _RequestContextManager( - self._request(hdrs.METH_PUT, url, data=data, **kwargs) - ) - - def patch( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PATCH request.""" - return _RequestContextManager( - self._request(hdrs.METH_PATCH, url, data=data, **kwargs) - ) - - def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": - """Perform HTTP DELETE request.""" - return _RequestContextManager( - self._request(hdrs.METH_DELETE, url, **kwargs) - ) - - async def close(self) -> None: - """Close underlying connector. - - Release all acquired resources. - """ - if not self.closed: - if self._connector is not None and self._connector_owner: - await self._connector.close() - self._connector = None - - @property - def closed(self) -> bool: - """Is client session closed. - - A readonly property. - """ - return self._connector is None or self._connector.closed - - @property - def connector(self) -> Optional[BaseConnector]: - """Connector instance used for the session.""" - return self._connector - - @property - def cookie_jar(self) -> AbstractCookieJar: - """The session cookies.""" - return self._cookie_jar - - @property - def version(self) -> Tuple[int, int]: - """The session HTTP protocol version.""" - return self._version - - @property - def requote_redirect_url(self) -> bool: - """Do URL requoting on redirection handling.""" - return self._requote_redirect_url - - @requote_redirect_url.setter - def requote_redirect_url(self, val: bool) -> None: - """Do URL requoting on redirection handling.""" - warnings.warn( - "session.requote_redirect_url modification is deprecated #2778", - DeprecationWarning, - stacklevel=2, - ) - self._requote_redirect_url = val - - @property - def loop(self) -> asyncio.AbstractEventLoop: - """Session's loop.""" - warnings.warn( - "client.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - @property - def timeout(self) -> ClientTimeout: - """Timeout for the session.""" - return self._timeout - - @property - def headers(self) -> "CIMultiDict[str]": - """The default headers of the client session.""" - return self._default_headers - - @property - def skip_auto_headers(self) -> FrozenSet[istr]: - """Headers for which autogeneration should be skipped""" - return self._skip_auto_headers - - @property - def auth(self) -> Optional[BasicAuth]: - """An object that represents HTTP Basic Authorization""" - return self._default_auth - - @property - def json_serialize(self) -> JSONEncoder: - """Json serializer callable""" - return self._json_serialize - - @property - def connector_owner(self) -> bool: - """Should connector be closed on session closing""" - return self._connector_owner - - @property - def raise_for_status( - self, - ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: - """Should `ClientResponse.raise_for_status()` be called for each response.""" - return self._raise_for_status - - @property - def auto_decompress(self) -> bool: - """Should the body response be automatically decompressed.""" - return self._auto_decompress - - @property - def trust_env(self) -> bool: - """ - Should proxies information from environment or netrc be trusted. - - Information is from HTTP_PROXY / HTTPS_PROXY environment variables - or ~/.netrc file if present. - """ - return self._trust_env - - @property - def trace_configs(self) -> List[TraceConfig]: - """A list of TraceConfig instances used for client tracing""" - return self._trace_configs - - def detach(self) -> None: - """Detach connector from session without closing the former. - - Session is switched to closed state anyway. - """ - self._connector = None - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "ClientSession": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - await self.close() - - -class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): - - __slots__ = ("_coro", "_resp") - - def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro - - def send(self, arg: None) -> "asyncio.Future[Any]": - return self._coro.send(arg) - - def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]": - return self._coro.throw(*args, **kwargs) - - def close(self) -> None: - return self._coro.close() - - def __await__(self) -> Generator[Any, None, _RetType]: - ret = self._coro.__await__() - return ret - - def __iter__(self) -> Generator[Any, None, _RetType]: - return self.__await__() - - async def __aenter__(self) -> _RetType: - self._resp: _RetType = await self._coro - return await self._resp.__aenter__() - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self._resp.__aexit__(exc_type, exc, tb) - - -_RequestContextManager = _BaseRequestContextManager[ClientResponse] -_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse] - - -class _SessionRequestContextManager: - - __slots__ = ("_coro", "_resp", "_session") - - def __init__( - self, - coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], - session: ClientSession, - ) -> None: - self._coro = coro - self._resp: Optional[ClientResponse] = None - self._session = session - - async def __aenter__(self) -> ClientResponse: - try: - self._resp = await self._coro - except BaseException: - await self._session.close() - raise - else: - return self._resp - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - assert self._resp is not None - self._resp.close() - await self._session.close() - - -if sys.version_info >= (3, 11) and TYPE_CHECKING: - - def request( - method: str, - url: StrOrURL, - *, - version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Unpack[_RequestOptions], - ) -> _SessionRequestContextManager: ... - -else: - - def request( - method: str, - url: StrOrURL, - *, - version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any, - ) -> _SessionRequestContextManager: - """Constructs and sends a request. - - Returns response object. - method - HTTP method - url - request url - params - (optional) Dictionary or bytes to be sent in the query - string of the new request - data - (optional) Dictionary, bytes, or file-like object to - send in the body of the request - json - (optional) Any json compatible python object - headers - (optional) Dictionary of HTTP Headers to send with - the request - cookies - (optional) Dict object to send with the request - auth - (optional) BasicAuth named tuple represent HTTP Basic Auth - auth - aiohttp.helpers.BasicAuth - allow_redirects - (optional) If set to False, do not follow - redirects - version - Request HTTP version. - compress - Set to True if request has to be compressed - with deflate encoding. - chunked - Set to chunk size for chunked transfer encoding. - expect100 - Expect 100-continue response from server. - connector - BaseConnector sub-class instance to support - connection pooling. - read_until_eof - Read response until eof if response - does not have Content-Length header. - loop - Optional event loop. - timeout - Optional ClientTimeout settings structure, 5min - total timeout by default. - Usage:: - >>> import aiohttp - >>> async with aiohttp.request('GET', 'http://python.org/') as resp: - ... print(resp) - ... data = await resp.read() - - """ - connector_owner = False - if connector is None: - connector_owner = True - connector = TCPConnector(loop=loop, force_close=True) - - session = ClientSession( - loop=loop, - cookies=kwargs.pop("cookies", None), - version=version, - timeout=kwargs.pop("timeout", sentinel), - connector=connector, - connector_owner=connector_owner, - ) - - return _SessionRequestContextManager( - session._request(method, url, **kwargs), - session, - ) diff --git a/venv/Lib/site-packages/aiohttp/client_exceptions.py b/venv/Lib/site-packages/aiohttp/client_exceptions.py deleted file mode 100644 index 1d298e9..0000000 --- a/venv/Lib/site-packages/aiohttp/client_exceptions.py +++ /dev/null @@ -1,421 +0,0 @@ -"""HTTP related errors.""" - -import asyncio -import warnings -from typing import TYPE_CHECKING, Optional, Tuple, Union - -from multidict import MultiMapping - -from .typedefs import StrOrURL - -if TYPE_CHECKING: - import ssl - - SSLContext = ssl.SSLContext -else: - try: - import ssl - - SSLContext = ssl.SSLContext - except ImportError: # pragma: no cover - ssl = SSLContext = None # type: ignore[assignment] - -if TYPE_CHECKING: - from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo - from .http_parser import RawResponseMessage -else: - RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None - -__all__ = ( - "ClientError", - "ClientConnectionError", - "ClientConnectionResetError", - "ClientOSError", - "ClientConnectorError", - "ClientProxyConnectionError", - "ClientSSLError", - "ClientConnectorDNSError", - "ClientConnectorSSLError", - "ClientConnectorCertificateError", - "ConnectionTimeoutError", - "SocketTimeoutError", - "ServerConnectionError", - "ServerTimeoutError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ClientResponseError", - "ClientHttpProxyError", - "WSServerHandshakeError", - "ContentTypeError", - "ClientPayloadError", - "InvalidURL", - "InvalidUrlClientError", - "RedirectClientError", - "NonHttpUrlClientError", - "InvalidUrlRedirectClientError", - "NonHttpUrlRedirectClientError", - "WSMessageTypeError", -) - - -class ClientError(Exception): - """Base class for client connection errors.""" - - -class ClientResponseError(ClientError): - """Base class for exceptions that occur after getting a response. - - request_info: An instance of RequestInfo. - history: A sequence of responses, if redirects occurred. - status: HTTP status code. - message: Error message. - headers: Response headers. - """ - - def __init__( - self, - request_info: RequestInfo, - history: Tuple[ClientResponse, ...], - *, - code: Optional[int] = None, - status: Optional[int] = None, - message: str = "", - headers: Optional[MultiMapping[str]] = None, - ) -> None: - self.request_info = request_info - if code is not None: - if status is not None: - raise ValueError( - "Both code and status arguments are provided; " - "code is deprecated, use status instead" - ) - warnings.warn( - "code argument is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - if status is not None: - self.status = status - elif code is not None: - self.status = code - else: - self.status = 0 - self.message = message - self.headers = headers - self.history = history - self.args = (request_info, history) - - def __str__(self) -> str: - return "{}, message={!r}, url={!r}".format( - self.status, - self.message, - str(self.request_info.real_url), - ) - - def __repr__(self) -> str: - args = f"{self.request_info!r}, {self.history!r}" - if self.status != 0: - args += f", status={self.status!r}" - if self.message != "": - args += f", message={self.message!r}" - if self.headers is not None: - args += f", headers={self.headers!r}" - return f"{type(self).__name__}({args})" - - @property - def code(self) -> int: - warnings.warn( - "code property is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - return self.status - - @code.setter - def code(self, value: int) -> None: - warnings.warn( - "code property is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - self.status = value - - -class ContentTypeError(ClientResponseError): - """ContentType found is not valid.""" - - -class WSServerHandshakeError(ClientResponseError): - """websocket server handshake error.""" - - -class ClientHttpProxyError(ClientResponseError): - """HTTP proxy error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - proxy responds with status other than ``200 OK`` - on ``CONNECT`` request. - """ - - -class TooManyRedirects(ClientResponseError): - """Client was redirected too many times.""" - - -class ClientConnectionError(ClientError): - """Base class for client socket errors.""" - - -class ClientConnectionResetError(ClientConnectionError, ConnectionResetError): - """ConnectionResetError""" - - -class ClientOSError(ClientConnectionError, OSError): - """OSError error.""" - - -class ClientConnectorError(ClientOSError): - """Client connector error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - a connection can not be established. - """ - - def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: - self._conn_key = connection_key - self._os_error = os_error - super().__init__(os_error.errno, os_error.strerror) - self.args = (connection_key, os_error) - - @property - def os_error(self) -> OSError: - return self._os_error - - @property - def host(self) -> str: - return self._conn_key.host - - @property - def port(self) -> Optional[int]: - return self._conn_key.port - - @property - def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]: - return self._conn_key.ssl - - def __str__(self) -> str: - return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( - self, "default" if self.ssl is True else self.ssl, self.strerror - ) - - # OSError.__reduce__ does too much black magick - __reduce__ = BaseException.__reduce__ - - -class ClientConnectorDNSError(ClientConnectorError): - """DNS resolution failed during client connection. - - Raised in :class:`aiohttp.connector.TCPConnector` if - DNS resolution fails. - """ - - -class ClientProxyConnectionError(ClientConnectorError): - """Proxy connection error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - connection to proxy can not be established. - """ - - -class UnixClientConnectorError(ClientConnectorError): - """Unix connector error. - - Raised in :py:class:`aiohttp.connector.UnixConnector` - if connection to unix socket can not be established. - """ - - def __init__( - self, path: str, connection_key: ConnectionKey, os_error: OSError - ) -> None: - self._path = path - super().__init__(connection_key, os_error) - - @property - def path(self) -> str: - return self._path - - def __str__(self) -> str: - return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( - self, "default" if self.ssl is True else self.ssl, self.strerror - ) - - -class ServerConnectionError(ClientConnectionError): - """Server connection errors.""" - - -class ServerDisconnectedError(ServerConnectionError): - """Server disconnected.""" - - def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: - if message is None: - message = "Server disconnected" - - self.args = (message,) - self.message = message - - -class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): - """Server timeout error.""" - - -class ConnectionTimeoutError(ServerTimeoutError): - """Connection timeout error.""" - - -class SocketTimeoutError(ServerTimeoutError): - """Socket timeout error.""" - - -class ServerFingerprintMismatch(ServerConnectionError): - """SSL certificate does not match expected fingerprint.""" - - def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: - self.expected = expected - self.got = got - self.host = host - self.port = port - self.args = (expected, got, host, port) - - def __repr__(self) -> str: - return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( - self.__class__.__name__, self.expected, self.got, self.host, self.port - ) - - -class ClientPayloadError(ClientError): - """Response payload error.""" - - -class InvalidURL(ClientError, ValueError): - """Invalid URL. - - URL used for fetching is malformed, e.g. it doesn't contains host - part. - """ - - # Derive from ValueError for backward compatibility - - def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None: - # The type of url is not yarl.URL because the exception can be raised - # on URL(url) call - self._url = url - self._description = description - - if description: - super().__init__(url, description) - else: - super().__init__(url) - - @property - def url(self) -> StrOrURL: - return self._url - - @property - def description(self) -> "str | None": - return self._description - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self}>" - - def __str__(self) -> str: - if self._description: - return f"{self._url} - {self._description}" - return str(self._url) - - -class InvalidUrlClientError(InvalidURL): - """Invalid URL client error.""" - - -class RedirectClientError(ClientError): - """Client redirect error.""" - - -class NonHttpUrlClientError(ClientError): - """Non http URL client error.""" - - -class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError): - """Invalid URL redirect client error.""" - - -class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError): - """Non http URL redirect client error.""" - - -class ClientSSLError(ClientConnectorError): - """Base error for ssl.*Errors.""" - - -if ssl is not None: - cert_errors = (ssl.CertificateError,) - cert_errors_bases = ( - ClientSSLError, - ssl.CertificateError, - ) - - ssl_errors = (ssl.SSLError,) - ssl_error_bases = (ClientSSLError, ssl.SSLError) -else: # pragma: no cover - cert_errors = tuple() - cert_errors_bases = ( - ClientSSLError, - ValueError, - ) - - ssl_errors = tuple() - ssl_error_bases = (ClientSSLError,) - - -class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] - """Response ssl error.""" - - -class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] - """Response certificate error.""" - - def __init__( - self, connection_key: ConnectionKey, certificate_error: Exception - ) -> None: - self._conn_key = connection_key - self._certificate_error = certificate_error - self.args = (connection_key, certificate_error) - - @property - def certificate_error(self) -> Exception: - return self._certificate_error - - @property - def host(self) -> str: - return self._conn_key.host - - @property - def port(self) -> Optional[int]: - return self._conn_key.port - - @property - def ssl(self) -> bool: - return self._conn_key.is_ssl - - def __str__(self) -> str: - return ( - "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " - "[{0.certificate_error.__class__.__name__}: " - "{0.certificate_error.args}]".format(self) - ) - - -class WSMessageTypeError(TypeError): - """WebSocket message type is not valid.""" diff --git a/venv/Lib/site-packages/aiohttp/client_middleware_digest_auth.py b/venv/Lib/site-packages/aiohttp/client_middleware_digest_auth.py deleted file mode 100644 index 5aab5ac..0000000 --- a/venv/Lib/site-packages/aiohttp/client_middleware_digest_auth.py +++ /dev/null @@ -1,480 +0,0 @@ -""" -Digest authentication middleware for aiohttp client. - -This middleware implements HTTP Digest Authentication according to RFC 7616, -providing a more secure alternative to Basic Authentication. It supports all -standard hash algorithms including MD5, SHA, SHA-256, SHA-512 and their session -variants, as well as both 'auth' and 'auth-int' quality of protection (qop) options. -""" - -import hashlib -import os -import re -import sys -import time -from typing import ( - Callable, - Dict, - Final, - FrozenSet, - List, - Literal, - Tuple, - TypedDict, - Union, -) - -from yarl import URL - -from . import hdrs -from .client_exceptions import ClientError -from .client_middlewares import ClientHandlerType -from .client_reqrep import ClientRequest, ClientResponse -from .payload import Payload - - -class DigestAuthChallenge(TypedDict, total=False): - realm: str - nonce: str - qop: str - algorithm: str - opaque: str - domain: str - stale: str - - -DigestFunctions: Dict[str, Callable[[bytes], "hashlib._Hash"]] = { - "MD5": hashlib.md5, - "MD5-SESS": hashlib.md5, - "SHA": hashlib.sha1, - "SHA-SESS": hashlib.sha1, - "SHA256": hashlib.sha256, - "SHA256-SESS": hashlib.sha256, - "SHA-256": hashlib.sha256, - "SHA-256-SESS": hashlib.sha256, - "SHA512": hashlib.sha512, - "SHA512-SESS": hashlib.sha512, - "SHA-512": hashlib.sha512, - "SHA-512-SESS": hashlib.sha512, -} - - -# Compile the regex pattern once at module level for performance -_HEADER_PAIRS_PATTERN = re.compile( - r'(?:^|\s|,\s*)(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' - if sys.version_info < (3, 11) - else r'(?:^|\s|,\s*)((?>\w+))\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' - # +------------|--------|--|-|-|--|----|------|----|--||-----|-> Match valid start/sep - # +--------|--|-|-|--|----|------|----|--||-----|-> alphanumeric key (atomic - # | | | | | | | | || | group reduces backtracking) - # +--|-|-|--|----|------|----|--||-----|-> maybe whitespace - # | | | | | | | || | - # +-|-|--|----|------|----|--||-----|-> = (delimiter) - # +-|--|----|------|----|--||-----|-> maybe whitespace - # | | | | | || | - # +--|----|------|----|--||-----|-> group quoted or unquoted - # | | | | || | - # +----|------|----|--||-----|-> if quoted... - # +------|----|--||-----|-> anything but " or \ - # +----|--||-----|-> escaped characters allowed - # +--||-----|-> or can be empty string - # || | - # +|-----|-> if unquoted... - # +-----|-> anything but , or - # +-> at least one char req'd -) - - -# RFC 7616: Challenge parameters to extract -CHALLENGE_FIELDS: Final[ - Tuple[ - Literal["realm", "nonce", "qop", "algorithm", "opaque", "domain", "stale"], ... - ] -] = ( - "realm", - "nonce", - "qop", - "algorithm", - "opaque", - "domain", - "stale", -) - -# Supported digest authentication algorithms -# Use a tuple of sorted keys for predictable documentation and error messages -SUPPORTED_ALGORITHMS: Final[Tuple[str, ...]] = tuple(sorted(DigestFunctions.keys())) - -# RFC 7616: Fields that require quoting in the Digest auth header -# These fields must be enclosed in double quotes in the Authorization header. -# Algorithm, qop, and nc are never quoted per RFC specifications. -# This frozen set is used by the template-based header construction to -# automatically determine which fields need quotes. -QUOTED_AUTH_FIELDS: Final[FrozenSet[str]] = frozenset( - {"username", "realm", "nonce", "uri", "response", "opaque", "cnonce"} -) - - -def escape_quotes(value: str) -> str: - """Escape double quotes for HTTP header values.""" - return value.replace('"', '\\"') - - -def unescape_quotes(value: str) -> str: - """Unescape double quotes in HTTP header values.""" - return value.replace('\\"', '"') - - -def parse_header_pairs(header: str) -> Dict[str, str]: - """ - Parse key-value pairs from WWW-Authenticate or similar HTTP headers. - - This function handles the complex format of WWW-Authenticate header values, - supporting both quoted and unquoted values, proper handling of commas in - quoted values, and whitespace variations per RFC 7616. - - Examples of supported formats: - - key1="value1", key2=value2 - - key1 = "value1" , key2="value, with, commas" - - key1=value1,key2="value2" - - realm="example.com", nonce="12345", qop="auth" - - Args: - header: The header value string to parse - - Returns: - Dictionary mapping parameter names to their values - """ - return { - stripped_key: unescape_quotes(quoted_val) if quoted_val else unquoted_val - for key, quoted_val, unquoted_val in _HEADER_PAIRS_PATTERN.findall(header) - if (stripped_key := key.strip()) - } - - -class DigestAuthMiddleware: - """ - HTTP digest authentication middleware for aiohttp client. - - This middleware intercepts 401 Unauthorized responses containing a Digest - authentication challenge, calculates the appropriate digest credentials, - and automatically retries the request with the proper Authorization header. - - Features: - - Handles all aspects of Digest authentication handshake automatically - - Supports all standard hash algorithms: - - MD5, MD5-SESS - - SHA, SHA-SESS - - SHA256, SHA256-SESS, SHA-256, SHA-256-SESS - - SHA512, SHA512-SESS, SHA-512, SHA-512-SESS - - Supports 'auth' and 'auth-int' quality of protection modes - - Properly handles quoted strings and parameter parsing - - Includes replay attack protection with client nonce count tracking - - Supports preemptive authentication per RFC 7616 Section 3.6 - - Standards compliance: - - RFC 7616: HTTP Digest Access Authentication (primary reference) - - RFC 2617: HTTP Authentication (deprecated by RFC 7616) - - RFC 1945: Section 11.1 (username restrictions) - - Implementation notes: - The core digest calculation is inspired by the implementation in - https://github.com/requests/requests/blob/v2.18.4/requests/auth.py - with added support for modern digest auth features and error handling. - """ - - def __init__( - self, - login: str, - password: str, - preemptive: bool = True, - ) -> None: - if login is None: - raise ValueError("None is not allowed as login value") - - if password is None: - raise ValueError("None is not allowed as password value") - - if ":" in login: - raise ValueError('A ":" is not allowed in username (RFC 1945#section-11.1)') - - self._login_str: Final[str] = login - self._login_bytes: Final[bytes] = login.encode("utf-8") - self._password_bytes: Final[bytes] = password.encode("utf-8") - - self._last_nonce_bytes = b"" - self._nonce_count = 0 - self._challenge: DigestAuthChallenge = {} - self._preemptive: bool = preemptive - # Set of URLs defining the protection space - self._protection_space: List[str] = [] - - async def _encode( - self, method: str, url: URL, body: Union[Payload, Literal[b""]] - ) -> str: - """ - Build digest authorization header for the current challenge. - - Args: - method: The HTTP method (GET, POST, etc.) - url: The request URL - body: The request body (used for qop=auth-int) - - Returns: - A fully formatted Digest authorization header string - - Raises: - ClientError: If the challenge is missing required parameters or - contains unsupported values - - """ - challenge = self._challenge - if "realm" not in challenge: - raise ClientError( - "Malformed Digest auth challenge: Missing 'realm' parameter" - ) - - if "nonce" not in challenge: - raise ClientError( - "Malformed Digest auth challenge: Missing 'nonce' parameter" - ) - - # Empty realm values are allowed per RFC 7616 (SHOULD, not MUST, contain host name) - realm = challenge["realm"] - nonce = challenge["nonce"] - - # Empty nonce values are not allowed as they are security-critical for replay protection - if not nonce: - raise ClientError( - "Security issue: Digest auth challenge contains empty 'nonce' value" - ) - - qop_raw = challenge.get("qop", "") - # Preserve original algorithm case for response while using uppercase for processing - algorithm_original = challenge.get("algorithm", "MD5") - algorithm = algorithm_original.upper() - opaque = challenge.get("opaque", "") - - # Convert string values to bytes once - nonce_bytes = nonce.encode("utf-8") - realm_bytes = realm.encode("utf-8") - path = URL(url).path_qs - - # Process QoP - qop = "" - qop_bytes = b"" - if qop_raw: - valid_qops = {"auth", "auth-int"}.intersection( - {q.strip() for q in qop_raw.split(",") if q.strip()} - ) - if not valid_qops: - raise ClientError( - f"Digest auth error: Unsupported Quality of Protection (qop) value(s): {qop_raw}" - ) - - qop = "auth-int" if "auth-int" in valid_qops else "auth" - qop_bytes = qop.encode("utf-8") - - if algorithm not in DigestFunctions: - raise ClientError( - f"Digest auth error: Unsupported hash algorithm: {algorithm}. " - f"Supported algorithms: {', '.join(SUPPORTED_ALGORITHMS)}" - ) - hash_fn: Final = DigestFunctions[algorithm] - - def H(x: bytes) -> bytes: - """RFC 7616 Section 3: Hash function H(data) = hex(hash(data)).""" - return hash_fn(x).hexdigest().encode() - - def KD(s: bytes, d: bytes) -> bytes: - """RFC 7616 Section 3: KD(secret, data) = H(concat(secret, ":", data)).""" - return H(b":".join((s, d))) - - # Calculate A1 and A2 - A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes)) - A2 = f"{method.upper()}:{path}".encode() - if qop == "auth-int": - if isinstance(body, Payload): # will always be empty bytes unless Payload - entity_bytes = await body.as_bytes() # Get bytes from Payload - else: - entity_bytes = body - entity_hash = H(entity_bytes) - A2 = b":".join((A2, entity_hash)) - - HA1 = H(A1) - HA2 = H(A2) - - # Nonce count handling - if nonce_bytes == self._last_nonce_bytes: - self._nonce_count += 1 - else: - self._nonce_count = 1 - - self._last_nonce_bytes = nonce_bytes - ncvalue = f"{self._nonce_count:08x}" - ncvalue_bytes = ncvalue.encode("utf-8") - - # Generate client nonce - cnonce = hashlib.sha1( - b"".join( - [ - str(self._nonce_count).encode("utf-8"), - nonce_bytes, - time.ctime().encode("utf-8"), - os.urandom(8), - ] - ) - ).hexdigest()[:16] - cnonce_bytes = cnonce.encode("utf-8") - - # Special handling for session-based algorithms - if algorithm.upper().endswith("-SESS"): - HA1 = H(b":".join((HA1, nonce_bytes, cnonce_bytes))) - - # Calculate the response digest - if qop: - noncebit = b":".join( - (nonce_bytes, ncvalue_bytes, cnonce_bytes, qop_bytes, HA2) - ) - response_digest = KD(HA1, noncebit) - else: - response_digest = KD(HA1, b":".join((nonce_bytes, HA2))) - - # Define a dict mapping of header fields to their values - # Group fields into always-present, optional, and qop-dependent - header_fields = { - # Always present fields - "username": escape_quotes(self._login_str), - "realm": escape_quotes(realm), - "nonce": escape_quotes(nonce), - "uri": path, - "response": response_digest.decode(), - "algorithm": algorithm_original, - } - - # Optional fields - if opaque: - header_fields["opaque"] = escape_quotes(opaque) - - # QoP-dependent fields - if qop: - header_fields["qop"] = qop - header_fields["nc"] = ncvalue - header_fields["cnonce"] = cnonce - - # Build header using templates for each field type - pairs: List[str] = [] - for field, value in header_fields.items(): - if field in QUOTED_AUTH_FIELDS: - pairs.append(f'{field}="{value}"') - else: - pairs.append(f"{field}={value}") - - return f"Digest {', '.join(pairs)}" - - def _in_protection_space(self, url: URL) -> bool: - """ - Check if the given URL is within the current protection space. - - According to RFC 7616, a URI is in the protection space if any URI - in the protection space is a prefix of it (after both have been made absolute). - """ - request_str = str(url) - for space_str in self._protection_space: - # Check if request starts with space URL - if not request_str.startswith(space_str): - continue - # Exact match or space ends with / (proper directory prefix) - if len(request_str) == len(space_str) or space_str[-1] == "/": - return True - # Check next char is / to ensure proper path boundary - if request_str[len(space_str)] == "/": - return True - return False - - def _authenticate(self, response: ClientResponse) -> bool: - """ - Takes the given response and tries digest-auth, if needed. - - Returns true if the original request must be resent. - """ - if response.status != 401: - return False - - auth_header = response.headers.get("www-authenticate", "") - if not auth_header: - return False # No authentication header present - - method, sep, headers = auth_header.partition(" ") - if not sep: - # No space found in www-authenticate header - return False # Malformed auth header, missing scheme separator - - if method.lower() != "digest": - # Not a digest auth challenge (could be Basic, Bearer, etc.) - return False - - if not headers: - # We have a digest scheme but no parameters - return False # Malformed digest header, missing parameters - - # We have a digest auth header with content - if not (header_pairs := parse_header_pairs(headers)): - # Failed to parse any key-value pairs - return False # Malformed digest header, no valid parameters - - # Extract challenge parameters - self._challenge = {} - for field in CHALLENGE_FIELDS: - if value := header_pairs.get(field): - self._challenge[field] = value - - # Update protection space based on domain parameter or default to origin - origin = response.url.origin() - - if domain := self._challenge.get("domain"): - # Parse space-separated list of URIs - self._protection_space = [] - for uri in domain.split(): - # Remove quotes if present - uri = uri.strip('"') - if uri.startswith("/"): - # Path-absolute, relative to origin - self._protection_space.append(str(origin.join(URL(uri)))) - else: - # Absolute URI - self._protection_space.append(str(URL(uri))) - else: - # No domain specified, protection space is entire origin - self._protection_space = [str(origin)] - - # Return True only if we found at least one challenge parameter - return bool(self._challenge) - - async def __call__( - self, request: ClientRequest, handler: ClientHandlerType - ) -> ClientResponse: - """Run the digest auth middleware.""" - response = None - for retry_count in range(2): - # Apply authorization header if: - # 1. This is a retry after 401 (retry_count > 0), OR - # 2. Preemptive auth is enabled AND we have a challenge AND the URL is in protection space - if retry_count > 0 or ( - self._preemptive - and self._challenge - and self._in_protection_space(request.url) - ): - request.headers[hdrs.AUTHORIZATION] = await self._encode( - request.method, request.url, request.body - ) - - # Send the request - response = await handler(request) - - # Check if we need to authenticate - if not self._authenticate(response): - break - - # At this point, response is guaranteed to be defined - assert response is not None - return response diff --git a/venv/Lib/site-packages/aiohttp/client_middlewares.py b/venv/Lib/site-packages/aiohttp/client_middlewares.py deleted file mode 100644 index 3ca2cb2..0000000 --- a/venv/Lib/site-packages/aiohttp/client_middlewares.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Client middleware support.""" - -from collections.abc import Awaitable, Callable, Sequence - -from .client_reqrep import ClientRequest, ClientResponse - -__all__ = ("ClientMiddlewareType", "ClientHandlerType", "build_client_middlewares") - -# Type alias for client request handlers - functions that process requests and return responses -ClientHandlerType = Callable[[ClientRequest], Awaitable[ClientResponse]] - -# Type for client middleware - similar to server but uses ClientRequest/ClientResponse -ClientMiddlewareType = Callable[ - [ClientRequest, ClientHandlerType], Awaitable[ClientResponse] -] - - -def build_client_middlewares( - handler: ClientHandlerType, - middlewares: Sequence[ClientMiddlewareType], -) -> ClientHandlerType: - """ - Apply middlewares to request handler. - - The middlewares are applied in reverse order, so the first middleware - in the list wraps all subsequent middlewares and the handler. - - This implementation avoids using partial/update_wrapper to minimize overhead - and doesn't cache to avoid holding references to stateful middleware. - """ - # Optimize for single middleware case - if len(middlewares) == 1: - middleware = middlewares[0] - - async def single_middleware_handler(req: ClientRequest) -> ClientResponse: - return await middleware(req, handler) - - return single_middleware_handler - - # Build the chain for multiple middlewares - current_handler = handler - - for middleware in reversed(middlewares): - # Create a new closure that captures the current state - def make_wrapper( - mw: ClientMiddlewareType, next_h: ClientHandlerType - ) -> ClientHandlerType: - async def wrapped(req: ClientRequest) -> ClientResponse: - return await mw(req, next_h) - - return wrapped - - current_handler = make_wrapper(middleware, current_handler) - - return current_handler diff --git a/venv/Lib/site-packages/aiohttp/client_proto.py b/venv/Lib/site-packages/aiohttp/client_proto.py deleted file mode 100644 index e2fb1ce..0000000 --- a/venv/Lib/site-packages/aiohttp/client_proto.py +++ /dev/null @@ -1,359 +0,0 @@ -import asyncio -from contextlib import suppress -from typing import Any, Optional, Tuple, Union - -from .base_protocol import BaseProtocol -from .client_exceptions import ( - ClientConnectionError, - ClientOSError, - ClientPayloadError, - ServerDisconnectedError, - SocketTimeoutError, -) -from .helpers import ( - _EXC_SENTINEL, - EMPTY_BODY_STATUS_CODES, - BaseTimerContext, - set_exception, - set_result, -) -from .http import HttpResponseParser, RawResponseMessage -from .http_exceptions import HttpProcessingError -from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader - - -class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): - """Helper class to adapt between Protocol and StreamReader.""" - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - BaseProtocol.__init__(self, loop=loop) - DataQueue.__init__(self, loop) - - self._should_close = False - - self._payload: Optional[StreamReader] = None - self._skip_payload = False - self._payload_parser = None - - self._timer = None - - self._tail = b"" - self._upgraded = False - self._parser: Optional[HttpResponseParser] = None - - self._read_timeout: Optional[float] = None - self._read_timeout_handle: Optional[asyncio.TimerHandle] = None - - self._timeout_ceil_threshold: Optional[float] = 5 - - self._closed: Union[None, asyncio.Future[None]] = None - self._connection_lost_called = False - - @property - def closed(self) -> Union[None, asyncio.Future[None]]: - """Future that is set when the connection is closed. - - This property returns a Future that will be completed when the connection - is closed. The Future is created lazily on first access to avoid creating - futures that will never be awaited. - - Returns: - - A Future[None] if the connection is still open or was closed after - this property was accessed - - None if connection_lost() was already called before this property - was ever accessed (indicating no one is waiting for the closure) - """ - if self._closed is None and not self._connection_lost_called: - self._closed = self._loop.create_future() - return self._closed - - @property - def upgraded(self) -> bool: - return self._upgraded - - @property - def should_close(self) -> bool: - return bool( - self._should_close - or (self._payload is not None and not self._payload.is_eof()) - or self._upgraded - or self._exception is not None - or self._payload_parser is not None - or self._buffer - or self._tail - ) - - def force_close(self) -> None: - self._should_close = True - - def close(self) -> None: - self._exception = None # Break cyclic references - transport = self.transport - if transport is not None: - transport.close() - self.transport = None - self._payload = None - self._drop_timeout() - - def abort(self) -> None: - self._exception = None # Break cyclic references - transport = self.transport - if transport is not None: - transport.abort() - self.transport = None - self._payload = None - self._drop_timeout() - - def is_connected(self) -> bool: - return self.transport is not None and not self.transport.is_closing() - - def connection_lost(self, exc: Optional[BaseException]) -> None: - self._connection_lost_called = True - self._drop_timeout() - - original_connection_error = exc - reraised_exc = original_connection_error - - connection_closed_cleanly = original_connection_error is None - - if self._closed is not None: - # If someone is waiting for the closed future, - # we should set it to None or an exception. If - # self._closed is None, it means that - # connection_lost() was called already - # or nobody is waiting for it. - if connection_closed_cleanly: - set_result(self._closed, None) - else: - assert original_connection_error is not None - set_exception( - self._closed, - ClientConnectionError( - f"Connection lost: {original_connection_error !s}", - ), - original_connection_error, - ) - - if self._payload_parser is not None: - with suppress(Exception): # FIXME: log this somehow? - self._payload_parser.feed_eof() - - uncompleted = None - if self._parser is not None: - try: - uncompleted = self._parser.feed_eof() - except Exception as underlying_exc: - if self._payload is not None: - client_payload_exc_msg = ( - f"Response payload is not completed: {underlying_exc !r}" - ) - if not connection_closed_cleanly: - client_payload_exc_msg = ( - f"{client_payload_exc_msg !s}. " - f"{original_connection_error !r}" - ) - set_exception( - self._payload, - ClientPayloadError(client_payload_exc_msg), - underlying_exc, - ) - - if not self.is_eof(): - if isinstance(original_connection_error, OSError): - reraised_exc = ClientOSError(*original_connection_error.args) - if connection_closed_cleanly: - reraised_exc = ServerDisconnectedError(uncompleted) - # assigns self._should_close to True as side effect, - # we do it anyway below - underlying_non_eof_exc = ( - _EXC_SENTINEL - if connection_closed_cleanly - else original_connection_error - ) - assert underlying_non_eof_exc is not None - assert reraised_exc is not None - self.set_exception(reraised_exc, underlying_non_eof_exc) - - self._should_close = True - self._parser = None - self._payload = None - self._payload_parser = None - self._reading_paused = False - - super().connection_lost(reraised_exc) - - def eof_received(self) -> None: - # should call parser.feed_eof() most likely - self._drop_timeout() - - def pause_reading(self) -> None: - super().pause_reading() - self._drop_timeout() - - def resume_reading(self) -> None: - super().resume_reading() - self._reschedule_timeout() - - def set_exception( - self, - exc: BaseException, - exc_cause: BaseException = _EXC_SENTINEL, - ) -> None: - self._should_close = True - self._drop_timeout() - super().set_exception(exc, exc_cause) - - def set_parser(self, parser: Any, payload: Any) -> None: - # TODO: actual types are: - # parser: WebSocketReader - # payload: WebSocketDataQueue - # but they are not generi enough - # Need an ABC for both types - self._payload = payload - self._payload_parser = parser - - self._drop_timeout() - - if self._tail: - data, self._tail = self._tail, b"" - self.data_received(data) - - def set_response_params( - self, - *, - timer: Optional[BaseTimerContext] = None, - skip_payload: bool = False, - read_until_eof: bool = False, - auto_decompress: bool = True, - read_timeout: Optional[float] = None, - read_bufsize: int = 2**16, - timeout_ceil_threshold: float = 5, - max_line_size: int = 8190, - max_field_size: int = 8190, - ) -> None: - self._skip_payload = skip_payload - - self._read_timeout = read_timeout - - self._timeout_ceil_threshold = timeout_ceil_threshold - - self._parser = HttpResponseParser( - self, - self._loop, - read_bufsize, - timer=timer, - payload_exception=ClientPayloadError, - response_with_body=not skip_payload, - read_until_eof=read_until_eof, - auto_decompress=auto_decompress, - max_line_size=max_line_size, - max_field_size=max_field_size, - ) - - if self._tail: - data, self._tail = self._tail, b"" - self.data_received(data) - - def _drop_timeout(self) -> None: - if self._read_timeout_handle is not None: - self._read_timeout_handle.cancel() - self._read_timeout_handle = None - - def _reschedule_timeout(self) -> None: - timeout = self._read_timeout - if self._read_timeout_handle is not None: - self._read_timeout_handle.cancel() - - if timeout: - self._read_timeout_handle = self._loop.call_later( - timeout, self._on_read_timeout - ) - else: - self._read_timeout_handle = None - - def start_timeout(self) -> None: - self._reschedule_timeout() - - @property - def read_timeout(self) -> Optional[float]: - return self._read_timeout - - @read_timeout.setter - def read_timeout(self, read_timeout: Optional[float]) -> None: - self._read_timeout = read_timeout - - def _on_read_timeout(self) -> None: - exc = SocketTimeoutError("Timeout on reading data from socket") - self.set_exception(exc) - if self._payload is not None: - set_exception(self._payload, exc) - - def data_received(self, data: bytes) -> None: - self._reschedule_timeout() - - if not data: - return - - # custom payload parser - currently always WebSocketReader - if self._payload_parser is not None: - eof, tail = self._payload_parser.feed_data(data) - if eof: - self._payload = None - self._payload_parser = None - - if tail: - self.data_received(tail) - return - - if self._upgraded or self._parser is None: - # i.e. websocket connection, websocket parser is not set yet - self._tail += data - return - - # parse http messages - try: - messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as underlying_exc: - if self.transport is not None: - # connection.release() could be called BEFORE - # data_received(), the transport is already - # closed in this case - self.transport.close() - # should_close is True after the call - if isinstance(underlying_exc, HttpProcessingError): - exc = HttpProcessingError( - code=underlying_exc.code, - message=underlying_exc.message, - headers=underlying_exc.headers, - ) - else: - exc = HttpProcessingError() - self.set_exception(exc, underlying_exc) - return - - self._upgraded = upgraded - - payload: Optional[StreamReader] = None - for message, payload in messages: - if message.should_close: - self._should_close = True - - self._payload = payload - - if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES: - self.feed_data((message, EMPTY_PAYLOAD), 0) - else: - self.feed_data((message, payload), 0) - - if payload is not None: - # new message(s) was processed - # register timeout handler unsubscribing - # either on end-of-stream or immediately for - # EMPTY_PAYLOAD - if payload is not EMPTY_PAYLOAD: - payload.on_eof(self._drop_timeout) - else: - self._drop_timeout() - - if upgraded and tail: - self.data_received(tail) diff --git a/venv/Lib/site-packages/aiohttp/client_reqrep.py b/venv/Lib/site-packages/aiohttp/client_reqrep.py deleted file mode 100644 index a9e0795..0000000 --- a/venv/Lib/site-packages/aiohttp/client_reqrep.py +++ /dev/null @@ -1,1536 +0,0 @@ -import asyncio -import codecs -import contextlib -import functools -import io -import re -import sys -import traceback -import warnings -from collections.abc import Mapping -from hashlib import md5, sha1, sha256 -from http.cookies import Morsel, SimpleCookie -from types import MappingProxyType, TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - List, - Literal, - NamedTuple, - Optional, - Tuple, - Type, - Union, -) - -import attr -from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL - -from . import hdrs, helpers, http, multipart, payload -from ._cookie_helpers import ( - parse_cookie_header, - parse_set_cookie_headers, - preserve_morsel_with_coded_value, -) -from .abc import AbstractStreamWriter -from .client_exceptions import ( - ClientConnectionError, - ClientOSError, - ClientResponseError, - ContentTypeError, - InvalidURL, - ServerFingerprintMismatch, -) -from .compression_utils import HAS_BROTLI, HAS_ZSTD -from .formdata import FormData -from .helpers import ( - _SENTINEL, - BaseTimerContext, - BasicAuth, - HeadersMixin, - TimerNoop, - noop, - reify, - sentinel, - set_exception, - set_result, -) -from .http import ( - SERVER_SOFTWARE, - HttpVersion, - HttpVersion10, - HttpVersion11, - StreamWriter, -) -from .streams import StreamReader -from .typedefs import ( - DEFAULT_JSON_DECODER, - JSONDecoder, - LooseCookies, - LooseHeaders, - Query, - RawHeaders, -) - -if TYPE_CHECKING: - import ssl - from ssl import SSLContext -else: - try: - import ssl - from ssl import SSLContext - except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - - -__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") - - -if TYPE_CHECKING: - from .client import ClientSession - from .connector import Connection - from .tracing import Trace - - -_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") -_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") -json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") - - -def _gen_default_accept_encoding() -> str: - encodings = [ - "gzip", - "deflate", - ] - if HAS_BROTLI: - encodings.append("br") - if HAS_ZSTD: - encodings.append("zstd") - return ", ".join(encodings) - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ContentDisposition: - type: Optional[str] - parameters: "MappingProxyType[str, str]" - filename: Optional[str] - - -class _RequestInfo(NamedTuple): - url: URL - method: str - headers: "CIMultiDictProxy[str]" - real_url: URL - - -class RequestInfo(_RequestInfo): - - def __new__( - cls, - url: URL, - method: str, - headers: "CIMultiDictProxy[str]", - real_url: Union[URL, _SENTINEL] = sentinel, - ) -> "RequestInfo": - """Create a new RequestInfo instance. - - For backwards compatibility, the real_url parameter is optional. - """ - return tuple.__new__( - cls, (url, method, headers, url if real_url is sentinel else real_url) - ) - - -class Fingerprint: - HASHFUNC_BY_DIGESTLEN = { - 16: md5, - 20: sha1, - 32: sha256, - } - - def __init__(self, fingerprint: bytes) -> None: - digestlen = len(fingerprint) - hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) - if not hashfunc: - raise ValueError("fingerprint has invalid length") - elif hashfunc is md5 or hashfunc is sha1: - raise ValueError("md5 and sha1 are insecure and not supported. Use sha256.") - self._hashfunc = hashfunc - self._fingerprint = fingerprint - - @property - def fingerprint(self) -> bytes: - return self._fingerprint - - def check(self, transport: asyncio.Transport) -> None: - if not transport.get_extra_info("sslcontext"): - return - sslobj = transport.get_extra_info("ssl_object") - cert = sslobj.getpeercert(binary_form=True) - got = self._hashfunc(cert).digest() - if got != self._fingerprint: - host, port, *_ = transport.get_extra_info("peername") - raise ServerFingerprintMismatch(self._fingerprint, got, host, port) - - -if ssl is not None: - SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) -else: # pragma: no cover - SSL_ALLOWED_TYPES = (bool, type(None)) - - -def _merge_ssl_params( - ssl: Union["SSLContext", bool, Fingerprint], - verify_ssl: Optional[bool], - ssl_context: Optional["SSLContext"], - fingerprint: Optional[bytes], -) -> Union["SSLContext", bool, Fingerprint]: - if ssl is None: - ssl = True # Double check for backwards compatibility - if verify_ssl is not None and not verify_ssl: - warnings.warn( - "verify_ssl is deprecated, use ssl=False instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not True: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = False - if ssl_context is not None: - warnings.warn( - "ssl_context is deprecated, use ssl=context instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not True: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = ssl_context - if fingerprint is not None: - warnings.warn( - "fingerprint is deprecated, use ssl=Fingerprint(fingerprint) instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not True: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = Fingerprint(fingerprint) - if not isinstance(ssl, SSL_ALLOWED_TYPES): - raise TypeError( - "ssl should be SSLContext, bool, Fingerprint or None, " - "got {!r} instead.".format(ssl) - ) - return ssl - - -_SSL_SCHEMES = frozenset(("https", "wss")) - - -# ConnectionKey is a NamedTuple because it is used as a key in a dict -# and a set in the connector. Since a NamedTuple is a tuple it uses -# the fast native tuple __hash__ and __eq__ implementation in CPython. -class ConnectionKey(NamedTuple): - # the key should contain an information about used proxy / TLS - # to prevent reusing wrong connections from a pool - host: str - port: Optional[int] - is_ssl: bool - ssl: Union[SSLContext, bool, Fingerprint] - proxy: Optional[URL] - proxy_auth: Optional[BasicAuth] - proxy_headers_hash: Optional[int] # hash(CIMultiDict) - - -def _is_expected_content_type( - response_content_type: str, expected_content_type: str -) -> bool: - if expected_content_type == "application/json": - return json_re.match(response_content_type) is not None - return expected_content_type in response_content_type - - -def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) -> None: - """Warn if the payload is not closed. - - Callers must check that the body is a Payload before calling this method. - - Args: - payload: The payload to check - stacklevel: Stack level for the warning (default 2 for direct callers) - """ - if not payload.autoclose and not payload.consumed: - warnings.warn( - "The previous request body contains unclosed resources. " - "Use await request.update_body() instead of setting request.body " - "directly to properly close resources and avoid leaks.", - ResourceWarning, - stacklevel=stacklevel, - ) - - -class ClientResponse(HeadersMixin): - - # Some of these attributes are None when created, - # but will be set by the start() method. - # As the end user will likely never see the None values, we cheat the types below. - # from the Status-Line of the response - version: Optional[HttpVersion] = None # HTTP-Version - status: int = None # type: ignore[assignment] # Status-Code - reason: Optional[str] = None # Reason-Phrase - - content: StreamReader = None # type: ignore[assignment] # Payload stream - _body: Optional[bytes] = None - _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] - _history: Tuple["ClientResponse", ...] = () - _raw_headers: RawHeaders = None # type: ignore[assignment] - - _connection: Optional["Connection"] = None # current connection - _cookies: Optional[SimpleCookie] = None - _raw_cookie_headers: Optional[Tuple[str, ...]] = None - _continue: Optional["asyncio.Future[bool]"] = None - _source_traceback: Optional[traceback.StackSummary] = None - _session: Optional["ClientSession"] = None - # set up by ClientRequest after ClientResponse object creation - # post-init stage allows to not change ctor signature - _closed = True # to allow __del__ for non-initialized properly response - _released = False - _in_context = False - - _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8" - - __writer: Optional["asyncio.Task[None]"] = None - - def __init__( - self, - method: str, - url: URL, - *, - writer: "Optional[asyncio.Task[None]]", - continue100: Optional["asyncio.Future[bool]"], - timer: BaseTimerContext, - request_info: RequestInfo, - traces: List["Trace"], - loop: asyncio.AbstractEventLoop, - session: "ClientSession", - ) -> None: - # URL forbids subclasses, so a simple type check is enough. - assert type(url) is URL - - self.method = method - - self._real_url = url - self._url = url.with_fragment(None) if url.raw_fragment else url - if writer is not None: - self._writer = writer - if continue100 is not None: - self._continue = continue100 - self._request_info = request_info - self._timer = timer if timer is not None else TimerNoop() - self._cache: Dict[str, Any] = {} - self._traces = traces - self._loop = loop - # Save reference to _resolve_charset, so that get_encoding() will still - # work after the response has finished reading the body. - # TODO: Fix session=None in tests (see ClientRequest.__init__). - if session is not None: - # store a reference to session #1985 - self._session = session - self._resolve_charset = session._resolve_charset - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - def __reset_writer(self, _: object = None) -> None: - self.__writer = None - - @property - def _writer(self) -> Optional["asyncio.Task[None]"]: - """The writer task for streaming data. - - _writer is only provided for backwards compatibility - for subclasses that may need to access it. - """ - return self.__writer - - @_writer.setter - def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: - """Set the writer task for streaming data.""" - if self.__writer is not None: - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = writer - if writer is None: - return - if writer.done(): - # The writer is already done, so we can clear it immediately. - self.__writer = None - else: - writer.add_done_callback(self.__reset_writer) - - @property - def cookies(self) -> SimpleCookie: - if self._cookies is None: - if self._raw_cookie_headers is not None: - # Parse cookies for response.cookies (SimpleCookie for backward compatibility) - cookies = SimpleCookie() - # Use parse_set_cookie_headers for more lenient parsing that handles - # malformed cookies better than SimpleCookie.load - cookies.update(parse_set_cookie_headers(self._raw_cookie_headers)) - self._cookies = cookies - else: - self._cookies = SimpleCookie() - return self._cookies - - @cookies.setter - def cookies(self, cookies: SimpleCookie) -> None: - self._cookies = cookies - # Generate raw cookie headers from the SimpleCookie - if cookies: - self._raw_cookie_headers = tuple( - morsel.OutputString() for morsel in cookies.values() - ) - else: - self._raw_cookie_headers = None - - @reify - def url(self) -> URL: - return self._url - - @reify - def url_obj(self) -> URL: - warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) - return self._url - - @reify - def real_url(self) -> URL: - return self._real_url - - @reify - def host(self) -> str: - assert self._url.host is not None - return self._url.host - - @reify - def headers(self) -> "CIMultiDictProxy[str]": - return self._headers - - @reify - def raw_headers(self) -> RawHeaders: - return self._raw_headers - - @reify - def request_info(self) -> RequestInfo: - return self._request_info - - @reify - def content_disposition(self) -> Optional[ContentDisposition]: - raw = self._headers.get(hdrs.CONTENT_DISPOSITION) - if raw is None: - return None - disposition_type, params_dct = multipart.parse_content_disposition(raw) - params = MappingProxyType(params_dct) - filename = multipart.content_disposition_filename(params) - return ContentDisposition(disposition_type, params, filename) - - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: - return - - if self._connection is not None: - self._connection.release() - self._cleanup_writer() - - if self._loop.get_debug(): - kwargs = {"source": self} - _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) - context = {"client_response": self, "message": "Unclosed response"} - if self._source_traceback: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __repr__(self) -> str: - out = io.StringIO() - ascii_encodable_url = str(self.url) - if self.reason: - ascii_encodable_reason = self.reason.encode( - "ascii", "backslashreplace" - ).decode("ascii") - else: - ascii_encodable_reason = "None" - print( - "".format( - ascii_encodable_url, self.status, ascii_encodable_reason - ), - file=out, - ) - print(self.headers, file=out) - return out.getvalue() - - @property - def connection(self) -> Optional["Connection"]: - return self._connection - - @reify - def history(self) -> Tuple["ClientResponse", ...]: - """A sequence of of responses, if redirects occurred.""" - return self._history - - @reify - def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": - links_str = ", ".join(self.headers.getall("link", [])) - - if not links_str: - return MultiDictProxy(MultiDict()) - - links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() - - for val in re.split(r",(?=\s*<)", links_str): - match = re.match(r"\s*<(.*)>(.*)", val) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - url, params_str = match.groups() - params = params_str.split(";")[1:] - - link: MultiDict[Union[str, URL]] = MultiDict() - - for param in params: - match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - key, _, value, _ = match.groups() - - link.add(key, value) - - key = link.get("rel", url) - - link.add("url", self.url.join(URL(url))) - - links.add(str(key), MultiDictProxy(link)) - - return MultiDictProxy(links) - - async def start(self, connection: "Connection") -> "ClientResponse": - """Start response processing.""" - self._closed = False - self._protocol = connection.protocol - self._connection = connection - - with self._timer: - while True: - # read response - try: - protocol = self._protocol - message, payload = await protocol.read() # type: ignore[union-attr] - except http.HttpProcessingError as exc: - raise ClientResponseError( - self.request_info, - self.history, - status=exc.code, - message=exc.message, - headers=exc.headers, - ) from exc - - if message.code < 100 or message.code > 199 or message.code == 101: - break - - if self._continue is not None: - set_result(self._continue, True) - self._continue = None - - # payload eof handler - payload.on_eof(self._response_eof) - - # response status - self.version = message.version - self.status = message.code - self.reason = message.reason - - # headers - self._headers = message.headers # type is CIMultiDictProxy - self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] - - # payload - self.content = payload - - # cookies - if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): - # Store raw cookie headers for CookieJar - self._raw_cookie_headers = tuple(cookie_hdrs) - return self - - def _response_eof(self) -> None: - if self._closed: - return - - # protocol could be None because connection could be detached - protocol = self._connection and self._connection.protocol - if protocol is not None and protocol.upgraded: - return - - self._closed = True - self._cleanup_writer() - self._release_connection() - - @property - def closed(self) -> bool: - return self._closed - - def close(self) -> None: - if not self._released: - self._notify_content() - - self._closed = True - if self._loop is None or self._loop.is_closed(): - return - - self._cleanup_writer() - if self._connection is not None: - self._connection.close() - self._connection = None - - def release(self) -> Any: - if not self._released: - self._notify_content() - - self._closed = True - - self._cleanup_writer() - self._release_connection() - return noop() - - @property - def ok(self) -> bool: - """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. - - This is **not** a check for ``200 OK`` but a check that the response - status is under 400. - """ - return 400 > self.status - - def raise_for_status(self) -> None: - if not self.ok: - # reason should always be not None for a started response - assert self.reason is not None - - # If we're in a context we can rely on __aexit__() to release as the - # exception propagates. - if not self._in_context: - self.release() - - raise ClientResponseError( - self.request_info, - self.history, - status=self.status, - message=self.reason, - headers=self.headers, - ) - - def _release_connection(self) -> None: - if self._connection is not None: - if self.__writer is None: - self._connection.release() - self._connection = None - else: - self.__writer.add_done_callback(lambda f: self._release_connection()) - - async def _wait_released(self) -> None: - if self.__writer is not None: - try: - await self.__writer - except asyncio.CancelledError: - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise - self._release_connection() - - def _cleanup_writer(self) -> None: - if self.__writer is not None: - self.__writer.cancel() - self._session = None - - def _notify_content(self) -> None: - content = self.content - if content and content.exception() is None: - set_exception(content, _CONNECTION_CLOSED_EXCEPTION) - self._released = True - - async def wait_for_close(self) -> None: - if self.__writer is not None: - try: - await self.__writer - except asyncio.CancelledError: - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise - self.release() - - async def read(self) -> bytes: - """Read response payload.""" - if self._body is None: - try: - self._body = await self.content.read() - for trace in self._traces: - await trace.send_response_chunk_received( - self.method, self.url, self._body - ) - except BaseException: - self.close() - raise - elif self._released: # Response explicitly released - raise ClientConnectionError("Connection closed") - - protocol = self._connection and self._connection.protocol - if protocol is None or not protocol.upgraded: - await self._wait_released() # Underlying connection released - return self._body - - def get_encoding(self) -> str: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - mimetype = helpers.parse_mimetype(ctype) - - encoding = mimetype.parameters.get("charset") - if encoding: - with contextlib.suppress(LookupError, ValueError): - return codecs.lookup(encoding).name - - if mimetype.type == "application" and ( - mimetype.subtype == "json" or mimetype.subtype == "rdap" - ): - # RFC 7159 states that the default encoding is UTF-8. - # RFC 7483 defines application/rdap+json - return "utf-8" - - if self._body is None: - raise RuntimeError( - "Cannot compute fallback encoding of a not yet read body" - ) - - return self._resolve_charset(self, self._body) - - async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: - """Read response payload and decode.""" - if self._body is None: - await self.read() - - if encoding is None: - encoding = self.get_encoding() - - return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] - - async def json( - self, - *, - encoding: Optional[str] = None, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - content_type: Optional[str] = "application/json", - ) -> Any: - """Read and decodes JSON response.""" - if self._body is None: - await self.read() - - if content_type: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - if not _is_expected_content_type(ctype, content_type): - raise ContentTypeError( - self.request_info, - self.history, - status=self.status, - message=( - "Attempt to decode JSON with unexpected mimetype: %s" % ctype - ), - headers=self.headers, - ) - - stripped = self._body.strip() # type: ignore[union-attr] - if not stripped: - return None - - if encoding is None: - encoding = self.get_encoding() - - return loads(stripped.decode(encoding)) - - async def __aenter__(self) -> "ClientResponse": - self._in_context = True - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - self._in_context = False - # similar to _RequestContextManager, we do not need to check - # for exceptions, response object can close connection - # if state is broken - self.release() - await self.wait_for_close() - - -class ClientRequest: - GET_METHODS = { - hdrs.METH_GET, - hdrs.METH_HEAD, - hdrs.METH_OPTIONS, - hdrs.METH_TRACE, - } - POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} - ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - - DEFAULT_HEADERS = { - hdrs.ACCEPT: "*/*", - hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), - } - - # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. - _body: Union[None, payload.Payload] = None - auth = None - response = None - - __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data - - # These class defaults help create_autospec() work correctly. - # If autospec is improved in future, maybe these can be removed. - url = URL() - method = "GET" - - _continue = None # waiter future for '100 Continue' response - - _skip_auto_headers: Optional["CIMultiDict[None]"] = None - - # N.B. - # Adding __del__ method with self._writer closing doesn't make sense - # because _writer is instance method, thus it keeps a reference to self. - # Until writer has finished finalizer will not be called. - - def __init__( - self, - method: str, - url: URL, - *, - params: Query = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - data: Any = None, - cookies: Optional[LooseCookies] = None, - auth: Optional[BasicAuth] = None, - version: http.HttpVersion = http.HttpVersion11, - compress: Union[str, bool, None] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - response_class: Optional[Type["ClientResponse"]] = None, - proxy: Optional[URL] = None, - proxy_auth: Optional[BasicAuth] = None, - timer: Optional[BaseTimerContext] = None, - session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - proxy_headers: Optional[LooseHeaders] = None, - traces: Optional[List["Trace"]] = None, - trust_env: bool = False, - server_hostname: Optional[str] = None, - ): - if loop is None: - loop = asyncio.get_event_loop() - if match := _CONTAINS_CONTROL_CHAR_RE.search(method): - raise ValueError( - f"Method cannot contain non-token characters {method!r} " - f"(found at least {match.group()!r})" - ) - # URL forbids subclasses, so a simple type check is enough. - assert type(url) is URL, url - if proxy is not None: - assert type(proxy) is URL, proxy - # FIXME: session is None in tests only, need to fix tests - # assert session is not None - if TYPE_CHECKING: - assert session is not None - self._session = session - if params: - url = url.extend_query(params) - self.original_url = url - self.url = url.with_fragment(None) if url.raw_fragment else url - self.method = method.upper() - self.chunked = chunked - self.compress = compress - self.loop = loop - self.length = None - if response_class is None: - real_response_class = ClientResponse - else: - real_response_class = response_class - self.response_class: Type[ClientResponse] = real_response_class - self._timer = timer if timer is not None else TimerNoop() - self._ssl = ssl if ssl is not None else True - self.server_hostname = server_hostname - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - self.update_version(version) - self.update_host(url) - self.update_headers(headers) - self.update_auto_headers(skip_auto_headers) - self.update_cookies(cookies) - self.update_content_encoding(data) - self.update_auth(auth, trust_env) - self.update_proxy(proxy, proxy_auth, proxy_headers) - - self.update_body_from_data(data) - if data is not None or self.method not in self.GET_METHODS: - self.update_transfer_encoding() - self.update_expect_continue(expect100) - self._traces = [] if traces is None else traces - - def __reset_writer(self, _: object = None) -> None: - self.__writer = None - - def _get_content_length(self) -> Optional[int]: - """Extract and validate Content-Length header value. - - Returns parsed Content-Length value or None if not set. - Raises ValueError if header exists but cannot be parsed as an integer. - """ - if hdrs.CONTENT_LENGTH not in self.headers: - return None - - content_length_hdr = self.headers[hdrs.CONTENT_LENGTH] - try: - return int(content_length_hdr) - except ValueError: - raise ValueError( - f"Invalid Content-Length header: {content_length_hdr}" - ) from None - - @property - def skip_auto_headers(self) -> CIMultiDict[None]: - return self._skip_auto_headers or CIMultiDict() - - @property - def _writer(self) -> Optional["asyncio.Task[None]"]: - return self.__writer - - @_writer.setter - def _writer(self, writer: "asyncio.Task[None]") -> None: - if self.__writer is not None: - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = writer - writer.add_done_callback(self.__reset_writer) - - def is_ssl(self) -> bool: - return self.url.scheme in _SSL_SCHEMES - - @property - def ssl(self) -> Union["SSLContext", bool, Fingerprint]: - return self._ssl - - @property - def connection_key(self) -> ConnectionKey: - if proxy_headers := self.proxy_headers: - h: Optional[int] = hash(tuple(proxy_headers.items())) - else: - h = None - url = self.url - return tuple.__new__( - ConnectionKey, - ( - url.raw_host or "", - url.port, - url.scheme in _SSL_SCHEMES, - self._ssl, - self.proxy, - self.proxy_auth, - h, - ), - ) - - @property - def host(self) -> str: - ret = self.url.raw_host - assert ret is not None - return ret - - @property - def port(self) -> Optional[int]: - return self.url.port - - @property - def body(self) -> Union[payload.Payload, Literal[b""]]: - """Request body.""" - # empty body is represented as bytes for backwards compatibility - return self._body or b"" - - @body.setter - def body(self, value: Any) -> None: - """Set request body with warning for non-autoclose payloads. - - WARNING: This setter must be called from within an event loop and is not - thread-safe. Setting body outside of an event loop may raise RuntimeError - when closing file-based payloads. - - DEPRECATED: Direct assignment to body is deprecated and will be removed - in a future version. Use await update_body() instead for proper resource - management. - """ - # Close existing payload if present - if self._body is not None: - # Warn if the payload needs manual closing - # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload - _warn_if_unclosed_payload(self._body, stacklevel=3) - # NOTE: In the future, when we remove sync close support, - # this setter will need to be removed and only the async - # update_body() method will be available. For now, we call - # _close() for backwards compatibility. - self._body._close() - self._update_body(value) - - @property - def request_info(self) -> RequestInfo: - headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) - # These are created on every request, so we use a NamedTuple - # for performance reasons. We don't use the RequestInfo.__new__ - # method because it has a different signature which is provided - # for backwards compatibility only. - return tuple.__new__( - RequestInfo, (self.url, self.method, headers, self.original_url) - ) - - @property - def session(self) -> "ClientSession": - """Return the ClientSession instance. - - This property provides access to the ClientSession that initiated - this request, allowing middleware to make additional requests - using the same session. - """ - return self._session - - def update_host(self, url: URL) -> None: - """Update destination host, port and connection type (ssl).""" - # get host/port - if not url.raw_host: - raise InvalidURL(url) - - # basic auth info - if url.raw_user or url.raw_password: - self.auth = helpers.BasicAuth(url.user or "", url.password or "") - - def update_version(self, version: Union[http.HttpVersion, str]) -> None: - """Convert request version to two elements tuple. - - parser HTTP version '1.1' => (1, 1) - """ - if isinstance(version, str): - v = [part.strip() for part in version.split(".", 1)] - try: - version = http.HttpVersion(int(v[0]), int(v[1])) - except ValueError: - raise ValueError( - f"Can not parse http version number: {version}" - ) from None - self.version = version - - def update_headers(self, headers: Optional[LooseHeaders]) -> None: - """Update request headers.""" - self.headers: CIMultiDict[str] = CIMultiDict() - - # Build the host header - host = self.url.host_port_subcomponent - - # host_port_subcomponent is None when the URL is a relative URL. - # but we know we do not have a relative URL here. - assert host is not None - self.headers[hdrs.HOST] = host - - if not headers: - return - - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() - - for key, value in headers: # type: ignore[misc] - # A special case for Host header - if key in hdrs.HOST_ALL: - self.headers[key] = value - else: - self.headers.add(key, value) - - def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: - if skip_auto_headers is not None: - self._skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type] - else: - # Fast path when there are no headers to skip - # which is the most common case. - used_headers = self.headers - - for hdr, val in self.DEFAULT_HEADERS.items(): - if hdr not in used_headers: - self.headers[hdr] = val - - if hdrs.USER_AGENT not in used_headers: - self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE - - def update_cookies(self, cookies: Optional[LooseCookies]) -> None: - """Update request cookies header.""" - if not cookies: - return - - c = SimpleCookie() - if hdrs.COOKIE in self.headers: - # parse_cookie_header for RFC 6265 compliant Cookie header parsing - c.update(parse_cookie_header(self.headers.get(hdrs.COOKIE, ""))) - del self.headers[hdrs.COOKIE] - - if isinstance(cookies, Mapping): - iter_cookies = cookies.items() - else: - iter_cookies = cookies # type: ignore[assignment] - for name, value in iter_cookies: - if isinstance(value, Morsel): - # Use helper to preserve coded_value exactly as sent by server - c[name] = preserve_morsel_with_coded_value(value) - else: - c[name] = value # type: ignore[assignment] - - self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() - - def update_content_encoding(self, data: Any) -> None: - """Set request content encoding.""" - if not data: - # Don't compress an empty body. - self.compress = None - return - - if self.headers.get(hdrs.CONTENT_ENCODING): - if self.compress: - raise ValueError( - "compress can not be set if Content-Encoding header is set" - ) - elif self.compress: - if not isinstance(self.compress, str): - self.compress = "deflate" - self.headers[hdrs.CONTENT_ENCODING] = self.compress - self.chunked = True # enable chunked, no need to deal with length - - def update_transfer_encoding(self) -> None: - """Analyze transfer-encoding header.""" - te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() - - if "chunked" in te: - if self.chunked: - raise ValueError( - "chunked can not be set " - 'if "Transfer-Encoding: chunked" header is set' - ) - - elif self.chunked: - if hdrs.CONTENT_LENGTH in self.headers: - raise ValueError( - "chunked can not be set if Content-Length header is set" - ) - - self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - - def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: - """Set basic auth.""" - if auth is None: - auth = self.auth - if auth is None: - return - - if not isinstance(auth, helpers.BasicAuth): - raise TypeError("BasicAuth() tuple is required instead") - - self.headers[hdrs.AUTHORIZATION] = auth.encode() - - def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None: - """Update request body from data.""" - if self._body is not None: - _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel) - - if body is None: - self._body = None - # Set Content-Length to 0 when body is None for methods that expect a body - if ( - self.method not in self.GET_METHODS - and not self.chunked - and hdrs.CONTENT_LENGTH not in self.headers - ): - self.headers[hdrs.CONTENT_LENGTH] = "0" - return - - # FormData - maybe_payload = body() if isinstance(body, FormData) else body - - try: - body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None) - except payload.LookupError: - body_payload = FormData(maybe_payload)() # type: ignore[arg-type] - - self._body = body_payload - # enable chunked encoding if needed - if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: - if (size := body_payload.size) is not None: - self.headers[hdrs.CONTENT_LENGTH] = str(size) - else: - self.chunked = True - - # copy payload headers - assert body_payload.headers - headers = self.headers - skip_headers = self._skip_auto_headers - for key, value in body_payload.headers.items(): - if key in headers or (skip_headers is not None and key in skip_headers): - continue - headers[key] = value - - def _update_body(self, body: Any) -> None: - """Update request body after its already been set.""" - # Remove existing Content-Length header since body is changing - if hdrs.CONTENT_LENGTH in self.headers: - del self.headers[hdrs.CONTENT_LENGTH] - - # Remove existing Transfer-Encoding header to avoid conflicts - if self.chunked and hdrs.TRANSFER_ENCODING in self.headers: - del self.headers[hdrs.TRANSFER_ENCODING] - - # Now update the body using the existing method - # Called from _update_body, add 1 to stacklevel from caller - self.update_body_from_data(body, _stacklevel=4) - - # Update transfer encoding headers if needed (same logic as __init__) - if body is not None or self.method not in self.GET_METHODS: - self.update_transfer_encoding() - - async def update_body(self, body: Any) -> None: - """ - Update request body and close previous payload if needed. - - This method safely updates the request body by first closing any existing - payload to prevent resource leaks, then setting the new body. - - IMPORTANT: Always use this method instead of setting request.body directly. - Direct assignment to request.body will leak resources if the previous body - contains file handles, streams, or other resources that need cleanup. - - Args: - body: The new body content. Can be: - - bytes/bytearray: Raw binary data - - str: Text data (will be encoded using charset from Content-Type) - - FormData: Form data that will be encoded as multipart/form-data - - Payload: A pre-configured payload object - - AsyncIterable: An async iterable of bytes chunks - - File-like object: Will be read and sent as binary data - - None: Clears the body - - Usage: - # CORRECT: Use update_body - await request.update_body(b"new request data") - - # WRONG: Don't set body directly - # request.body = b"new request data" # This will leak resources! - - # Update with form data - form_data = FormData() - form_data.add_field('field', 'value') - await request.update_body(form_data) - - # Clear body - await request.update_body(None) - - Note: - This method is async because it may need to close file handles or - other resources associated with the previous payload. Always await - this method to ensure proper cleanup. - - Warning: - Setting request.body directly is highly discouraged and can lead to: - - Resource leaks (unclosed file handles, streams) - - Memory leaks (unreleased buffers) - - Unexpected behavior with streaming payloads - - It is not recommended to change the payload type in middleware. If the - body was already set (e.g., as bytes), it's best to keep the same type - rather than converting it (e.g., to str) as this may result in unexpected - behavior. - - See Also: - - update_body_from_data: Synchronous body update without cleanup - - body property: Direct body access (STRONGLY DISCOURAGED) - - """ - # Close existing payload if it exists and needs closing - if self._body is not None: - await self._body.close() - self._update_body(body) - - def update_expect_continue(self, expect: bool = False) -> None: - if expect: - self.headers[hdrs.EXPECT] = "100-continue" - elif ( - hdrs.EXPECT in self.headers - and self.headers[hdrs.EXPECT].lower() == "100-continue" - ): - expect = True - - if expect: - self._continue = self.loop.create_future() - - def update_proxy( - self, - proxy: Optional[URL], - proxy_auth: Optional[BasicAuth], - proxy_headers: Optional[LooseHeaders], - ) -> None: - self.proxy = proxy - if proxy is None: - self.proxy_auth = None - self.proxy_headers = None - return - - if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): - raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy_auth = proxy_auth - - if proxy_headers is not None and not isinstance( - proxy_headers, (MultiDict, MultiDictProxy) - ): - proxy_headers = CIMultiDict(proxy_headers) - self.proxy_headers = proxy_headers - - async def write_bytes( - self, - writer: AbstractStreamWriter, - conn: "Connection", - content_length: Optional[int] = None, - ) -> None: - """ - Write the request body to the connection stream. - - This method handles writing different types of request bodies: - 1. Payload objects (using their specialized write_with_length method) - 2. Bytes/bytearray objects - 3. Iterable body content - - Args: - writer: The stream writer to write the body to - conn: The connection being used for this request - content_length: Optional maximum number of bytes to write from the body - (None means write the entire body) - - The method properly handles: - - Waiting for 100-Continue responses if required - - Content length constraints for chunked encoding - - Error handling for network issues, cancellation, and other exceptions - - Signaling EOF and timeout management - - Raises: - ClientOSError: When there's an OS-level error writing the body - ClientConnectionError: When there's a general connection error - asyncio.CancelledError: When the operation is cancelled - - """ - # 100 response - if self._continue is not None: - # Force headers to be sent before waiting for 100-continue - writer.send_headers() - await writer.drain() - await self._continue - - protocol = conn.protocol - assert protocol is not None - try: - # This should be a rare case but the - # self._body can be set to None while - # the task is being started or we wait above - # for the 100-continue response. - # The more likely case is we have an empty - # payload, but 100-continue is still expected. - if self._body is not None: - await self._body.write_with_length(writer, content_length) - except OSError as underlying_exc: - reraised_exc = underlying_exc - - # Distinguish between timeout and other OS errors for better error reporting - exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( - underlying_exc, asyncio.TimeoutError - ) - if exc_is_not_timeout: - reraised_exc = ClientOSError( - underlying_exc.errno, - f"Can not write request body for {self.url !s}", - ) - - set_exception(protocol, reraised_exc, underlying_exc) - except asyncio.CancelledError: - # Body hasn't been fully sent, so connection can't be reused - conn.close() - raise - except Exception as underlying_exc: - set_exception( - protocol, - ClientConnectionError( - "Failed to send bytes into the underlying connection " - f"{conn !s}: {underlying_exc!r}", - ), - underlying_exc, - ) - else: - # Successfully wrote the body, signal EOF and start response timeout - await writer.write_eof() - protocol.start_timeout() - - async def send(self, conn: "Connection") -> "ClientResponse": - # Specify request target: - # - CONNECT request must send authority form URI - # - not CONNECT proxy must send absolute form URI - # - most common is origin form URI - if self.method == hdrs.METH_CONNECT: - connect_host = self.url.host_subcomponent - assert connect_host is not None - path = f"{connect_host}:{self.url.port}" - elif self.proxy and not self.is_ssl(): - path = str(self.url) - else: - path = self.url.raw_path_qs - - protocol = conn.protocol - assert protocol is not None - writer = StreamWriter( - protocol, - self.loop, - on_chunk_sent=( - functools.partial(self._on_chunk_request_sent, self.method, self.url) - if self._traces - else None - ), - on_headers_sent=( - functools.partial(self._on_headers_request_sent, self.method, self.url) - if self._traces - else None - ), - ) - - if self.compress: - writer.enable_compression(self.compress) # type: ignore[arg-type] - - if self.chunked is not None: - writer.enable_chunking() - - # set default content-type - if ( - self.method in self.POST_METHODS - and ( - self._skip_auto_headers is None - or hdrs.CONTENT_TYPE not in self._skip_auto_headers - ) - and hdrs.CONTENT_TYPE not in self.headers - ): - self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" - - v = self.version - if hdrs.CONNECTION not in self.headers: - if conn._connector.force_close: - if v == HttpVersion11: - self.headers[hdrs.CONNECTION] = "close" - elif v == HttpVersion10: - self.headers[hdrs.CONNECTION] = "keep-alive" - - # status + headers - status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" - - # Buffer headers for potential coalescing with body - await writer.write_headers(status_line, self.headers) - - task: Optional["asyncio.Task[None]"] - if self._body or self._continue is not None or protocol.writing_paused: - coro = self.write_bytes(writer, conn, self._get_content_length()) - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to write - # bytes immediately to avoid having to schedule - # the task on the event loop. - task = asyncio.Task(coro, loop=self.loop, eager_start=True) - else: - task = self.loop.create_task(coro) - if task.done(): - task = None - else: - self._writer = task - else: - # We have nothing to write because - # - there is no body - # - the protocol does not have writing paused - # - we are not waiting for a 100-continue response - protocol.start_timeout() - writer.set_eof() - task = None - response_class = self.response_class - assert response_class is not None - self.response = response_class( - self.method, - self.original_url, - writer=task, - continue100=self._continue, - timer=self._timer, - request_info=self.request_info, - traces=self._traces, - loop=self.loop, - session=self._session, - ) - return self.response - - async def close(self) -> None: - if self.__writer is not None: - try: - await self.__writer - except asyncio.CancelledError: - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise - - def terminate(self) -> None: - if self.__writer is not None: - if not self.loop.is_closed(): - self.__writer.cancel() - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = None - - async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: - for trace in self._traces: - await trace.send_request_chunk_sent(method, url, chunk) - - async def _on_headers_request_sent( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - for trace in self._traces: - await trace.send_request_headers(method, url, headers) diff --git a/venv/Lib/site-packages/aiohttp/client_ws.py b/venv/Lib/site-packages/aiohttp/client_ws.py deleted file mode 100644 index daa57d1..0000000 --- a/venv/Lib/site-packages/aiohttp/client_ws.py +++ /dev/null @@ -1,428 +0,0 @@ -"""WebSocket client for asyncio.""" - -import asyncio -import sys -from types import TracebackType -from typing import Any, Optional, Type, cast - -import attr - -from ._websocket.reader import WebSocketDataQueue -from .client_exceptions import ClientError, ServerTimeoutError, WSMessageTypeError -from .client_reqrep import ClientResponse -from .helpers import calculate_timeout_when, set_result -from .http import ( - WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE, - WebSocketError, - WSCloseCode, - WSMessage, - WSMsgType, -) -from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter -from .streams import EofStream -from .typedefs import ( - DEFAULT_JSON_DECODER, - DEFAULT_JSON_ENCODER, - JSONDecoder, - JSONEncoder, -) - -if sys.version_info >= (3, 11): - import asyncio as async_timeout -else: - import async_timeout - - -@attr.s(frozen=True, slots=True) -class ClientWSTimeout: - ws_receive = attr.ib(type=Optional[float], default=None) - ws_close = attr.ib(type=Optional[float], default=None) - - -DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0) - - -class ClientWebSocketResponse: - def __init__( - self, - reader: WebSocketDataQueue, - writer: WebSocketWriter, - protocol: Optional[str], - response: ClientResponse, - timeout: ClientWSTimeout, - autoclose: bool, - autoping: bool, - loop: asyncio.AbstractEventLoop, - *, - heartbeat: Optional[float] = None, - compress: int = 0, - client_notakeover: bool = False, - ) -> None: - self._response = response - self._conn = response.connection - - self._writer = writer - self._reader = reader - self._protocol = protocol - self._closed = False - self._closing = False - self._close_code: Optional[int] = None - self._timeout = timeout - self._autoclose = autoclose - self._autoping = autoping - self._heartbeat = heartbeat - self._heartbeat_cb: Optional[asyncio.TimerHandle] = None - self._heartbeat_when: float = 0.0 - if heartbeat is not None: - self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb: Optional[asyncio.TimerHandle] = None - self._loop = loop - self._waiting: bool = False - self._close_wait: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None - self._compress = compress - self._client_notakeover = client_notakeover - self._ping_task: Optional[asyncio.Task[None]] = None - - self._reset_heartbeat() - - def _cancel_heartbeat(self) -> None: - self._cancel_pong_response_cb() - if self._heartbeat_cb is not None: - self._heartbeat_cb.cancel() - self._heartbeat_cb = None - if self._ping_task is not None: - self._ping_task.cancel() - self._ping_task = None - - def _cancel_pong_response_cb(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - - def _reset_heartbeat(self) -> None: - if self._heartbeat is None: - return - self._cancel_pong_response_cb() - loop = self._loop - assert loop is not None - conn = self._conn - timeout_ceil_threshold = ( - conn._connector._timeout_ceil_threshold if conn is not None else 5 - ) - now = loop.time() - when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) - self._heartbeat_when = when - if self._heartbeat_cb is None: - # We do not cancel the previous heartbeat_cb here because - # it generates a significant amount of TimerHandle churn - # which causes asyncio to rebuild the heap frequently. - # Instead _send_heartbeat() will reschedule the next - # heartbeat if it fires too early. - self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) - - def _send_heartbeat(self) -> None: - self._heartbeat_cb = None - loop = self._loop - now = loop.time() - if now < self._heartbeat_when: - # Heartbeat fired too early, reschedule - self._heartbeat_cb = loop.call_at( - self._heartbeat_when, self._send_heartbeat - ) - return - - conn = self._conn - timeout_ceil_threshold = ( - conn._connector._timeout_ceil_threshold if conn is not None else 5 - ) - when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) - self._cancel_pong_response_cb() - self._pong_response_cb = loop.call_at(when, self._pong_not_received) - - coro = self._writer.send_frame(b"", WSMsgType.PING) - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to send the ping - # immediately to avoid having to schedule - # the task on the event loop. - ping_task = asyncio.Task(coro, loop=loop, eager_start=True) - else: - ping_task = loop.create_task(coro) - - if not ping_task.done(): - self._ping_task = ping_task - ping_task.add_done_callback(self._ping_task_done) - else: - self._ping_task_done(ping_task) - - def _ping_task_done(self, task: "asyncio.Task[None]") -> None: - """Callback for when the ping task completes.""" - if not task.cancelled() and (exc := task.exception()): - self._handle_ping_pong_exception(exc) - self._ping_task = None - - def _pong_not_received(self) -> None: - self._handle_ping_pong_exception( - ServerTimeoutError(f"No PONG received after {self._pong_heartbeat} seconds") - ) - - def _handle_ping_pong_exception(self, exc: BaseException) -> None: - """Handle exceptions raised during ping/pong processing.""" - if self._closed: - return - self._set_closed() - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - self._response.close() - if self._waiting and not self._closing: - self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0) - - def _set_closed(self) -> None: - """Set the connection to closed. - - Cancel any heartbeat timers and set the closed flag. - """ - self._closed = True - self._cancel_heartbeat() - - def _set_closing(self) -> None: - """Set the connection to closing. - - Cancel any heartbeat timers and set the closing flag. - """ - self._closing = True - self._cancel_heartbeat() - - @property - def closed(self) -> bool: - return self._closed - - @property - def close_code(self) -> Optional[int]: - return self._close_code - - @property - def protocol(self) -> Optional[str]: - return self._protocol - - @property - def compress(self) -> int: - return self._compress - - @property - def client_notakeover(self) -> bool: - return self._client_notakeover - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """extra info from connection transport""" - conn = self._response.connection - if conn is None: - return default - transport = conn.transport - if transport is None: - return default - return transport.get_extra_info(name, default) - - def exception(self) -> Optional[BaseException]: - return self._exception - - async def ping(self, message: bytes = b"") -> None: - await self._writer.send_frame(message, WSMsgType.PING) - - async def pong(self, message: bytes = b"") -> None: - await self._writer.send_frame(message, WSMsgType.PONG) - - async def send_frame( - self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None - ) -> None: - """Send a frame over the websocket.""" - await self._writer.send_frame(message, opcode, compress) - - async def send_str(self, data: str, compress: Optional[int] = None) -> None: - if not isinstance(data, str): - raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send_frame( - data.encode("utf-8"), WSMsgType.TEXT, compress=compress - ) - - async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: - if not isinstance(data, (bytes, bytearray, memoryview)): - raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress) - - async def send_json( - self, - data: Any, - compress: Optional[int] = None, - *, - dumps: JSONEncoder = DEFAULT_JSON_ENCODER, - ) -> None: - await self.send_str(dumps(data), compress=compress) - - async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting and not self._closing: - assert self._loop is not None - self._close_wait = self._loop.create_future() - self._set_closing() - self._reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._close_wait - - if self._closed: - return False - - self._set_closed() - try: - await self._writer.close(code, message) - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._response.close() - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - self._response.close() - return True - - if self._close_code: - self._response.close() - return True - - while True: - try: - async with async_timeout.timeout(self._timeout.ws_close): - msg = await self._reader.read() - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._response.close() - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - self._response.close() - return True - - if msg.type is WSMsgType.CLOSE: - self._close_code = msg.data - self._response.close() - return True - - async def receive(self, timeout: Optional[float] = None) -> WSMessage: - receive_timeout = timeout or self._timeout.ws_receive - - while True: - if self._waiting: - raise RuntimeError("Concurrent call to receive() is not allowed") - - if self._closed: - return WS_CLOSED_MESSAGE - elif self._closing: - await self.close() - return WS_CLOSED_MESSAGE - - try: - self._waiting = True - try: - if receive_timeout: - # Entering the context manager and creating - # Timeout() object can take almost 50% of the - # run time in this loop so we avoid it if - # there is no read timeout. - async with async_timeout.timeout(receive_timeout): - msg = await self._reader.read() - else: - msg = await self._reader.read() - self._reset_heartbeat() - finally: - self._waiting = False - if self._close_wait: - set_result(self._close_wait, None) - except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - raise - except EofStream: - self._close_code = WSCloseCode.OK - await self.close() - return WSMessage(WSMsgType.CLOSED, None, None) - except ClientError: - # Likely ServerDisconnectedError when connection is lost - self._set_closed() - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - return WS_CLOSED_MESSAGE - except WebSocketError as exc: - self._close_code = exc.code - await self.close(code=exc.code) - return WSMessage(WSMsgType.ERROR, exc, None) - except Exception as exc: - self._exception = exc - self._set_closing() - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - await self.close() - return WSMessage(WSMsgType.ERROR, exc, None) - - if msg.type not in _INTERNAL_RECEIVE_TYPES: - # If its not a close/closing/ping/pong message - # we can return it immediately - return msg - - if msg.type is WSMsgType.CLOSE: - self._set_closing() - self._close_code = msg.data - if not self._closed and self._autoclose: - await self.close() - elif msg.type is WSMsgType.CLOSING: - self._set_closing() - elif msg.type is WSMsgType.PING and self._autoping: - await self.pong(msg.data) - continue - elif msg.type is WSMsgType.PONG and self._autoping: - continue - - return msg - - async def receive_str(self, *, timeout: Optional[float] = None) -> str: - msg = await self.receive(timeout) - if msg.type is not WSMsgType.TEXT: - raise WSMessageTypeError( - f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT" - ) - return cast(str, msg.data) - - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: - msg = await self.receive(timeout) - if msg.type is not WSMsgType.BINARY: - raise WSMessageTypeError( - f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY" - ) - return cast(bytes, msg.data) - - async def receive_json( - self, - *, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - timeout: Optional[float] = None, - ) -> Any: - data = await self.receive_str(timeout=timeout) - return loads(data) - - def __aiter__(self) -> "ClientWebSocketResponse": - return self - - async def __anext__(self) -> WSMessage: - msg = await self.receive() - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): - raise StopAsyncIteration - return msg - - async def __aenter__(self) -> "ClientWebSocketResponse": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - await self.close() diff --git a/venv/Lib/site-packages/aiohttp/compression_utils.py b/venv/Lib/site-packages/aiohttp/compression_utils.py deleted file mode 100644 index e478d24..0000000 --- a/venv/Lib/site-packages/aiohttp/compression_utils.py +++ /dev/null @@ -1,348 +0,0 @@ -import asyncio -import sys -import zlib -from abc import ABC, abstractmethod -from concurrent.futures import Executor -from typing import Any, Final, Optional, Protocol, TypedDict, cast - -if sys.version_info >= (3, 12): - from collections.abc import Buffer -else: - from typing import Union - - Buffer = Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] - -try: - try: - import brotlicffi as brotli - except ImportError: - import brotli - - HAS_BROTLI = True -except ImportError: # pragma: no cover - HAS_BROTLI = False - -try: - if sys.version_info >= (3, 14): - from compression.zstd import ZstdDecompressor # noqa: I900 - else: # TODO(PY314): Remove mentions of backports.zstd across codebase - from backports.zstd import ZstdDecompressor - - HAS_ZSTD = True -except ImportError: - HAS_ZSTD = False - - -MAX_SYNC_CHUNK_SIZE = 4096 -DEFAULT_MAX_DECOMPRESS_SIZE = 2**25 # 32MiB - -# Unlimited decompression constants - different libraries use different conventions -ZLIB_MAX_LENGTH_UNLIMITED = 0 # zlib uses 0 to mean unlimited -ZSTD_MAX_LENGTH_UNLIMITED = -1 # zstd uses -1 to mean unlimited - - -class ZLibCompressObjProtocol(Protocol): - def compress(self, data: Buffer) -> bytes: ... - def flush(self, mode: int = ..., /) -> bytes: ... - - -class ZLibDecompressObjProtocol(Protocol): - def decompress(self, data: Buffer, max_length: int = ...) -> bytes: ... - def flush(self, length: int = ..., /) -> bytes: ... - - @property - def eof(self) -> bool: ... - - -class ZLibBackendProtocol(Protocol): - MAX_WBITS: int - Z_FULL_FLUSH: int - Z_SYNC_FLUSH: int - Z_BEST_SPEED: int - Z_FINISH: int - - def compressobj( - self, - level: int = ..., - method: int = ..., - wbits: int = ..., - memLevel: int = ..., - strategy: int = ..., - zdict: Optional[Buffer] = ..., - ) -> ZLibCompressObjProtocol: ... - def decompressobj( - self, wbits: int = ..., zdict: Buffer = ... - ) -> ZLibDecompressObjProtocol: ... - - def compress( - self, data: Buffer, /, level: int = ..., wbits: int = ... - ) -> bytes: ... - def decompress( - self, data: Buffer, /, wbits: int = ..., bufsize: int = ... - ) -> bytes: ... - - -class CompressObjArgs(TypedDict, total=False): - wbits: int - strategy: int - level: int - - -class ZLibBackendWrapper: - def __init__(self, _zlib_backend: ZLibBackendProtocol): - self._zlib_backend: ZLibBackendProtocol = _zlib_backend - - @property - def name(self) -> str: - return getattr(self._zlib_backend, "__name__", "undefined") - - @property - def MAX_WBITS(self) -> int: - return self._zlib_backend.MAX_WBITS - - @property - def Z_FULL_FLUSH(self) -> int: - return self._zlib_backend.Z_FULL_FLUSH - - @property - def Z_SYNC_FLUSH(self) -> int: - return self._zlib_backend.Z_SYNC_FLUSH - - @property - def Z_BEST_SPEED(self) -> int: - return self._zlib_backend.Z_BEST_SPEED - - @property - def Z_FINISH(self) -> int: - return self._zlib_backend.Z_FINISH - - def compressobj(self, *args: Any, **kwargs: Any) -> ZLibCompressObjProtocol: - return self._zlib_backend.compressobj(*args, **kwargs) - - def decompressobj(self, *args: Any, **kwargs: Any) -> ZLibDecompressObjProtocol: - return self._zlib_backend.decompressobj(*args, **kwargs) - - def compress(self, data: Buffer, *args: Any, **kwargs: Any) -> bytes: - return self._zlib_backend.compress(data, *args, **kwargs) - - def decompress(self, data: Buffer, *args: Any, **kwargs: Any) -> bytes: - return self._zlib_backend.decompress(data, *args, **kwargs) - - # Everything not explicitly listed in the Protocol we just pass through - def __getattr__(self, attrname: str) -> Any: - return getattr(self._zlib_backend, attrname) - - -ZLibBackend: ZLibBackendWrapper = ZLibBackendWrapper(zlib) - - -def set_zlib_backend(new_zlib_backend: ZLibBackendProtocol) -> None: - ZLibBackend._zlib_backend = new_zlib_backend - - -def encoding_to_mode( - encoding: Optional[str] = None, - suppress_deflate_header: bool = False, -) -> int: - if encoding == "gzip": - return 16 + ZLibBackend.MAX_WBITS - - return -ZLibBackend.MAX_WBITS if suppress_deflate_header else ZLibBackend.MAX_WBITS - - -class DecompressionBaseHandler(ABC): - def __init__( - self, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ): - """Base class for decompression handlers.""" - self._executor = executor - self._max_sync_chunk_size = max_sync_chunk_size - - @abstractmethod - def decompress_sync( - self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED - ) -> bytes: - """Decompress the given data.""" - - async def decompress( - self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED - ) -> bytes: - """Decompress the given data.""" - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.decompress_sync, data, max_length - ) - return self.decompress_sync(data, max_length) - - -class ZLibCompressor: - def __init__( - self, - encoding: Optional[str] = None, - suppress_deflate_header: bool = False, - level: Optional[int] = None, - wbits: Optional[int] = None, - strategy: Optional[int] = None, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ): - self._executor = executor - self._max_sync_chunk_size = max_sync_chunk_size - self._mode = ( - encoding_to_mode(encoding, suppress_deflate_header) - if wbits is None - else wbits - ) - self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) - - kwargs: CompressObjArgs = {} - kwargs["wbits"] = self._mode - if strategy is not None: - kwargs["strategy"] = strategy - if level is not None: - kwargs["level"] = level - self._compressor = self._zlib_backend.compressobj(**kwargs) - - def compress_sync(self, data: bytes) -> bytes: - return self._compressor.compress(data) - - async def compress(self, data: bytes) -> bytes: - """Compress the data and returned the compressed bytes. - - Note that flush() must be called after the last call to compress() - - If the data size is large than the max_sync_chunk_size, the compression - will be done in the executor. Otherwise, the compression will be done - in the event loop. - - **WARNING: This method is NOT cancellation-safe when used with flush().** - If this operation is cancelled, the compressor state may be corrupted. - The connection MUST be closed after cancellation to avoid data corruption - in subsequent compress operations. - - For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap - compress() + flush() + send operations in a shield and lock to ensure atomicity. - """ - # For large payloads, offload compression to executor to avoid blocking event loop - should_use_executor = ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ) - if should_use_executor: - return await asyncio.get_running_loop().run_in_executor( - self._executor, self._compressor.compress, data - ) - return self.compress_sync(data) - - def flush(self, mode: Optional[int] = None) -> bytes: - """Flush the compressor synchronously. - - **WARNING: This method is NOT cancellation-safe when called after compress().** - The flush() operation accesses shared compressor state. If compress() was - cancelled, calling flush() may result in corrupted data. The connection MUST - be closed after compress() cancellation. - - For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap - compress() + flush() + send operations in a shield and lock to ensure atomicity. - """ - return self._compressor.flush( - mode if mode is not None else self._zlib_backend.Z_FINISH - ) - - -class ZLibDecompressor(DecompressionBaseHandler): - def __init__( - self, - encoding: Optional[str] = None, - suppress_deflate_header: bool = False, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ): - super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) - self._mode = encoding_to_mode(encoding, suppress_deflate_header) - self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) - self._decompressor = self._zlib_backend.decompressobj(wbits=self._mode) - - def decompress_sync( - self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED - ) -> bytes: - return self._decompressor.decompress(data, max_length) - - def flush(self, length: int = 0) -> bytes: - return ( - self._decompressor.flush(length) - if length > 0 - else self._decompressor.flush() - ) - - @property - def eof(self) -> bool: - return self._decompressor.eof - - -class BrotliDecompressor(DecompressionBaseHandler): - # Supports both 'brotlipy' and 'Brotli' packages - # since they share an import name. The top branches - # are for 'brotlipy' and bottom branches for 'Brotli' - def __init__( - self, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ) -> None: - """Decompress data using the Brotli library.""" - if not HAS_BROTLI: - raise RuntimeError( - "The brotli decompression is not available. " - "Please install `Brotli` module" - ) - self._obj = brotli.Decompressor() - super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) - - def decompress_sync( - self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED - ) -> bytes: - """Decompress the given data.""" - if hasattr(self._obj, "decompress"): - return cast(bytes, self._obj.decompress(data, max_length)) - return cast(bytes, self._obj.process(data, max_length)) - - def flush(self) -> bytes: - """Flush the decompressor.""" - if hasattr(self._obj, "flush"): - return cast(bytes, self._obj.flush()) - return b"" - - -class ZSTDDecompressor(DecompressionBaseHandler): - def __init__( - self, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ) -> None: - if not HAS_ZSTD: - raise RuntimeError( - "The zstd decompression is not available. " - "Please install `backports.zstd` module" - ) - self._obj = ZstdDecompressor() - super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) - - def decompress_sync( - self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED - ) -> bytes: - # zstd uses -1 for unlimited, while zlib uses 0 for unlimited - # Convert the zlib convention (0=unlimited) to zstd convention (-1=unlimited) - zstd_max_length = ( - ZSTD_MAX_LENGTH_UNLIMITED - if max_length == ZLIB_MAX_LENGTH_UNLIMITED - else max_length - ) - return self._obj.decompress(data, zstd_max_length) - - def flush(self) -> bytes: - return b"" diff --git a/venv/Lib/site-packages/aiohttp/connector.py b/venv/Lib/site-packages/aiohttp/connector.py deleted file mode 100644 index 290a424..0000000 --- a/venv/Lib/site-packages/aiohttp/connector.py +++ /dev/null @@ -1,1842 +0,0 @@ -import asyncio -import functools -import random -import socket -import sys -import traceback -import warnings -from collections import OrderedDict, defaultdict, deque -from contextlib import suppress -from http import HTTPStatus -from itertools import chain, cycle, islice -from time import monotonic -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - DefaultDict, - Deque, - Dict, - Iterator, - List, - Literal, - Optional, - Sequence, - Set, - Tuple, - Type, - Union, - cast, -) - -import aiohappyeyeballs -from aiohappyeyeballs import AddrInfoType, SocketFactoryType - -from . import hdrs, helpers -from .abc import AbstractResolver, ResolveResult -from .client_exceptions import ( - ClientConnectionError, - ClientConnectorCertificateError, - ClientConnectorDNSError, - ClientConnectorError, - ClientConnectorSSLError, - ClientHttpProxyError, - ClientProxyConnectionError, - ServerFingerprintMismatch, - UnixClientConnectorError, - cert_errors, - ssl_errors, -) -from .client_proto import ResponseHandler -from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ( - _SENTINEL, - ceil_timeout, - is_ip_address, - noop, - sentinel, - set_exception, - set_result, -) -from .log import client_logger -from .resolver import DefaultResolver - -if sys.version_info >= (3, 12): - from collections.abc import Buffer -else: - Buffer = Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] - -if TYPE_CHECKING: - import ssl - - SSLContext = ssl.SSLContext -else: - try: - import ssl - - SSLContext = ssl.SSLContext - except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - -EMPTY_SCHEMA_SET = frozenset({""}) -HTTP_SCHEMA_SET = frozenset({"http", "https"}) -WS_SCHEMA_SET = frozenset({"ws", "wss"}) - -HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET -HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET - -NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( - 3, - 13, - 1, -) or sys.version_info < (3, 12, 7) -# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 -# which first appeared in Python 3.12.7 and 3.13.1 - - -__all__ = ( - "BaseConnector", - "TCPConnector", - "UnixConnector", - "NamedPipeConnector", - "AddrInfoType", - "SocketFactoryType", -) - - -if TYPE_CHECKING: - from .client import ClientTimeout - from .client_reqrep import ConnectionKey - from .tracing import Trace - - -class _DeprecationWaiter: - __slots__ = ("_awaitable", "_awaited") - - def __init__(self, awaitable: Awaitable[Any]) -> None: - self._awaitable = awaitable - self._awaited = False - - def __await__(self) -> Any: - self._awaited = True - return self._awaitable.__await__() - - def __del__(self) -> None: - if not self._awaited: - warnings.warn( - "Connector.close() is a coroutine, " - "please use await connector.close()", - DeprecationWarning, - ) - - -async def _wait_for_close(waiters: List[Awaitable[object]]) -> None: - """Wait for all waiters to finish closing.""" - results = await asyncio.gather(*waiters, return_exceptions=True) - for res in results: - if isinstance(res, Exception): - client_logger.debug("Error while closing connector: %r", res) - - -class Connection: - - _source_traceback = None - - def __init__( - self, - connector: "BaseConnector", - key: "ConnectionKey", - protocol: ResponseHandler, - loop: asyncio.AbstractEventLoop, - ) -> None: - self._key = key - self._connector = connector - self._loop = loop - self._protocol: Optional[ResponseHandler] = protocol - self._callbacks: List[Callable[[], None]] = [] - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - def __repr__(self) -> str: - return f"Connection<{self._key}>" - - def __del__(self, _warnings: Any = warnings) -> None: - if self._protocol is not None: - kwargs = {"source": self} - _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) - if self._loop.is_closed(): - return - - self._connector._release(self._key, self._protocol, should_close=True) - - context = {"client_connection": self, "message": "Unclosed connection"} - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __bool__(self) -> Literal[True]: - """Force subclasses to not be falsy, to make checks simpler.""" - return True - - @property - def loop(self) -> asyncio.AbstractEventLoop: - warnings.warn( - "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - @property - def transport(self) -> Optional[asyncio.Transport]: - if self._protocol is None: - return None - return self._protocol.transport - - @property - def protocol(self) -> Optional[ResponseHandler]: - return self._protocol - - def add_callback(self, callback: Callable[[], None]) -> None: - if callback is not None: - self._callbacks.append(callback) - - def _notify_release(self) -> None: - callbacks, self._callbacks = self._callbacks[:], [] - - for cb in callbacks: - with suppress(Exception): - cb() - - def close(self) -> None: - self._notify_release() - - if self._protocol is not None: - self._connector._release(self._key, self._protocol, should_close=True) - self._protocol = None - - def release(self) -> None: - self._notify_release() - - if self._protocol is not None: - self._connector._release(self._key, self._protocol) - self._protocol = None - - @property - def closed(self) -> bool: - return self._protocol is None or not self._protocol.is_connected() - - -class _ConnectTunnelConnection(Connection): - """Special connection wrapper for CONNECT tunnels that must never be pooled. - - This connection wraps the proxy connection that will be upgraded with TLS. - It must never be released to the pool because: - 1. Its 'closed' future will never complete, causing session.close() to hang - 2. It represents an intermediate state, not a reusable connection - 3. The real connection (with TLS) will be created separately - """ - - def release(self) -> None: - """Do nothing - don't pool or close the connection. - - These connections are an intermediate state during the CONNECT tunnel - setup and will be cleaned up naturally after the TLS upgrade. If they - were to be pooled, they would never be properly closed, causing - session.close() to wait forever for their 'closed' future. - """ - - -class _TransportPlaceholder: - """placeholder for BaseConnector.connect function""" - - __slots__ = ("closed", "transport") - - def __init__(self, closed_future: asyncio.Future[Optional[Exception]]) -> None: - """Initialize a placeholder for a transport.""" - self.closed = closed_future - self.transport = None - - def close(self) -> None: - """Close the placeholder.""" - - def abort(self) -> None: - """Abort the placeholder (does nothing).""" - - -class BaseConnector: - """Base connector class. - - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - enable_cleanup_closed - Enables clean-up closed ssl transports. - Disabled by default. - timeout_ceil_threshold - Trigger ceiling of timeout values when - it's above timeout_ceil_threshold. - loop - Optional event loop. - """ - - _closed = True # prevent AttributeError in __del__ if ctor was failed - _source_traceback = None - - # abort transport after 2 seconds (cleanup broken connections) - _cleanup_closed_period = 2.0 - - allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET - - def __init__( - self, - *, - keepalive_timeout: Union[object, None, float] = sentinel, - force_close: bool = False, - limit: int = 100, - limit_per_host: int = 0, - enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - timeout_ceil_threshold: float = 5, - ) -> None: - - if force_close: - if keepalive_timeout is not None and keepalive_timeout is not sentinel: - raise ValueError( - "keepalive_timeout cannot be set if force_close is True" - ) - else: - if keepalive_timeout is sentinel: - keepalive_timeout = 15.0 - - loop = loop or asyncio.get_running_loop() - self._timeout_ceil_threshold = timeout_ceil_threshold - - self._closed = False - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - # Connection pool of reusable connections. - # We use a deque to store connections because it has O(1) popleft() - # and O(1) append() operations to implement a FIFO queue. - self._conns: DefaultDict[ - ConnectionKey, Deque[Tuple[ResponseHandler, float]] - ] = defaultdict(deque) - self._limit = limit - self._limit_per_host = limit_per_host - self._acquired: Set[ResponseHandler] = set() - self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = ( - defaultdict(set) - ) - self._keepalive_timeout = cast(float, keepalive_timeout) - self._force_close = force_close - - # {host_key: FIFO list of waiters} - # The FIFO is implemented with an OrderedDict with None keys because - # python does not have an ordered set. - self._waiters: DefaultDict[ - ConnectionKey, OrderedDict[asyncio.Future[None], None] - ] = defaultdict(OrderedDict) - - self._loop = loop - self._factory = functools.partial(ResponseHandler, loop=loop) - - # start keep-alive connection cleanup task - self._cleanup_handle: Optional[asyncio.TimerHandle] = None - - # start cleanup closed transports task - self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None - - if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED: - warnings.warn( - "enable_cleanup_closed ignored because " - "https://github.com/python/cpython/pull/118960 is fixed " - f"in Python version {sys.version_info}", - DeprecationWarning, - stacklevel=2, - ) - enable_cleanup_closed = False - - self._cleanup_closed_disabled = not enable_cleanup_closed - self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] - self._placeholder_future: asyncio.Future[Optional[Exception]] = ( - loop.create_future() - ) - self._placeholder_future.set_result(None) - self._cleanup_closed() - - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: - return - if not self._conns: - return - - conns = [repr(c) for c in self._conns.values()] - - self._close() - - kwargs = {"source": self} - _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) - context = { - "connector": self, - "connections": conns, - "message": "Unclosed connector", - } - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __enter__(self) -> "BaseConnector": - warnings.warn( - '"with Connector():" is deprecated, ' - 'use "async with Connector():" instead', - DeprecationWarning, - ) - return self - - def __exit__(self, *exc: Any) -> None: - self._close() - - async def __aenter__(self) -> "BaseConnector": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - exc_traceback: Optional[TracebackType] = None, - ) -> None: - await self.close() - - @property - def force_close(self) -> bool: - """Ultimately close connection on releasing if True.""" - return self._force_close - - @property - def limit(self) -> int: - """The total number for simultaneous connections. - - If limit is 0 the connector has no limit. - The default limit size is 100. - """ - return self._limit - - @property - def limit_per_host(self) -> int: - """The limit for simultaneous connections to the same endpoint. - - Endpoints are the same if they are have equal - (host, port, is_ssl) triple. - """ - return self._limit_per_host - - def _cleanup(self) -> None: - """Cleanup unused transports.""" - if self._cleanup_handle: - self._cleanup_handle.cancel() - # _cleanup_handle should be unset, otherwise _release() will not - # recreate it ever! - self._cleanup_handle = None - - now = monotonic() - timeout = self._keepalive_timeout - - if self._conns: - connections = defaultdict(deque) - deadline = now - timeout - for key, conns in self._conns.items(): - alive: Deque[Tuple[ResponseHandler, float]] = deque() - for proto, use_time in conns: - if proto.is_connected() and use_time - deadline >= 0: - alive.append((proto, use_time)) - continue - transport = proto.transport - proto.close() - if not self._cleanup_closed_disabled and key.is_ssl: - self._cleanup_closed_transports.append(transport) - - if alive: - connections[key] = alive - - self._conns = connections - - if self._conns: - self._cleanup_handle = helpers.weakref_handle( - self, - "_cleanup", - timeout, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - - def _cleanup_closed(self) -> None: - """Double confirmation for transport close. - - Some broken ssl servers may leave socket open without proper close. - """ - if self._cleanup_closed_handle: - self._cleanup_closed_handle.cancel() - - for transport in self._cleanup_closed_transports: - if transport is not None: - transport.abort() - - self._cleanup_closed_transports = [] - - if not self._cleanup_closed_disabled: - self._cleanup_closed_handle = helpers.weakref_handle( - self, - "_cleanup_closed", - self._cleanup_closed_period, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - - def close(self, *, abort_ssl: bool = False) -> Awaitable[None]: - """Close all opened transports. - - :param abort_ssl: If True, SSL connections will be aborted immediately - without performing the shutdown handshake. This provides - faster cleanup at the cost of less graceful disconnection. - """ - if not (waiters := self._close(abort_ssl=abort_ssl)): - # If there are no connections to close, we can return a noop - # awaitable to avoid scheduling a task on the event loop. - return _DeprecationWaiter(noop()) - coro = _wait_for_close(waiters) - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to close connections - # immediately to avoid having to schedule the task on the event loop. - task = asyncio.Task(coro, loop=self._loop, eager_start=True) - else: - task = self._loop.create_task(coro) - return _DeprecationWaiter(task) - - def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]: - waiters: List[Awaitable[object]] = [] - - if self._closed: - return waiters - - self._closed = True - - try: - if self._loop.is_closed(): - return waiters - - # cancel cleanup task - if self._cleanup_handle: - self._cleanup_handle.cancel() - - # cancel cleanup close task - if self._cleanup_closed_handle: - self._cleanup_closed_handle.cancel() - - for data in self._conns.values(): - for proto, _ in data: - if ( - abort_ssl - and proto.transport - and proto.transport.get_extra_info("sslcontext") is not None - ): - proto.abort() - else: - proto.close() - if closed := proto.closed: - waiters.append(closed) - - for proto in self._acquired: - if ( - abort_ssl - and proto.transport - and proto.transport.get_extra_info("sslcontext") is not None - ): - proto.abort() - else: - proto.close() - if closed := proto.closed: - waiters.append(closed) - - for transport in self._cleanup_closed_transports: - if transport is not None: - transport.abort() - - return waiters - - finally: - self._conns.clear() - self._acquired.clear() - for keyed_waiters in self._waiters.values(): - for keyed_waiter in keyed_waiters: - keyed_waiter.cancel() - self._waiters.clear() - self._cleanup_handle = None - self._cleanup_closed_transports.clear() - self._cleanup_closed_handle = None - - @property - def closed(self) -> bool: - """Is connector closed. - - A readonly property. - """ - return self._closed - - def _available_connections(self, key: "ConnectionKey") -> int: - """ - Return number of available connections. - - The limit, limit_per_host and the connection key are taken into account. - - If it returns less than 1 means that there are no connections - available. - """ - # check total available connections - # If there are no limits, this will always return 1 - total_remain = 1 - - if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0: - return total_remain - - # check limit per host - if host_remain := self._limit_per_host: - if acquired := self._acquired_per_host.get(key): - host_remain -= len(acquired) - if total_remain > host_remain: - return host_remain - - return total_remain - - def _update_proxy_auth_header_and_build_proxy_req( - self, req: ClientRequest - ) -> ClientRequest: - """Set Proxy-Authorization header for non-SSL proxy requests and builds the proxy request for SSL proxy requests.""" - url = req.proxy - assert url is not None - headers: Dict[str, str] = {} - if req.proxy_headers is not None: - headers = req.proxy_headers # type: ignore[assignment] - headers[hdrs.HOST] = req.headers[hdrs.HOST] - proxy_req = ClientRequest( - hdrs.METH_GET, - url, - headers=headers, - auth=req.proxy_auth, - loop=self._loop, - ssl=req.ssl, - ) - auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) - if auth is not None: - if not req.is_ssl(): - req.headers[hdrs.PROXY_AUTHORIZATION] = auth - else: - proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth - return proxy_req - - async def connect( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> Connection: - """Get from pool or create new connection.""" - key = req.connection_key - if (conn := await self._get(key, traces)) is not None: - # If we do not have to wait and we can get a connection from the pool - # we can avoid the timeout ceil logic and directly return the connection - if req.proxy: - self._update_proxy_auth_header_and_build_proxy_req(req) - return conn - - async with ceil_timeout(timeout.connect, timeout.ceil_threshold): - if self._available_connections(key) <= 0: - await self._wait_for_available_connection(key, traces) - if (conn := await self._get(key, traces)) is not None: - if req.proxy: - self._update_proxy_auth_header_and_build_proxy_req(req) - return conn - - placeholder = cast( - ResponseHandler, _TransportPlaceholder(self._placeholder_future) - ) - self._acquired.add(placeholder) - if self._limit_per_host: - self._acquired_per_host[key].add(placeholder) - - try: - # Traces are done inside the try block to ensure that the - # that the placeholder is still cleaned up if an exception - # is raised. - if traces: - for trace in traces: - await trace.send_connection_create_start() - proto = await self._create_connection(req, traces, timeout) - if traces: - for trace in traces: - await trace.send_connection_create_end() - except BaseException: - self._release_acquired(key, placeholder) - raise - else: - if self._closed: - proto.close() - raise ClientConnectionError("Connector is closed.") - - # The connection was successfully created, drop the placeholder - # and add the real connection to the acquired set. There should - # be no awaits after the proto is added to the acquired set - # to ensure that the connection is not left in the acquired set - # on cancellation. - self._acquired.remove(placeholder) - self._acquired.add(proto) - if self._limit_per_host: - acquired_per_host = self._acquired_per_host[key] - acquired_per_host.remove(placeholder) - acquired_per_host.add(proto) - return Connection(self, key, proto, self._loop) - - async def _wait_for_available_connection( - self, key: "ConnectionKey", traces: List["Trace"] - ) -> None: - """Wait for an available connection slot.""" - # We loop here because there is a race between - # the connection limit check and the connection - # being acquired. If the connection is acquired - # between the check and the await statement, we - # need to loop again to check if the connection - # slot is still available. - attempts = 0 - while True: - fut: asyncio.Future[None] = self._loop.create_future() - keyed_waiters = self._waiters[key] - keyed_waiters[fut] = None - if attempts: - # If we have waited before, we need to move the waiter - # to the front of the queue as otherwise we might get - # starved and hit the timeout. - keyed_waiters.move_to_end(fut, last=False) - - try: - # Traces happen in the try block to ensure that the - # the waiter is still cleaned up if an exception is raised. - if traces: - for trace in traces: - await trace.send_connection_queued_start() - await fut - if traces: - for trace in traces: - await trace.send_connection_queued_end() - finally: - # pop the waiter from the queue if its still - # there and not already removed by _release_waiter - keyed_waiters.pop(fut, None) - if not self._waiters.get(key, True): - del self._waiters[key] - - if self._available_connections(key) > 0: - break - attempts += 1 - - async def _get( - self, key: "ConnectionKey", traces: List["Trace"] - ) -> Optional[Connection]: - """Get next reusable connection for the key or None. - - The connection will be marked as acquired. - """ - if (conns := self._conns.get(key)) is None: - return None - - t1 = monotonic() - while conns: - proto, t0 = conns.popleft() - # We will we reuse the connection if its connected and - # the keepalive timeout has not been exceeded - if proto.is_connected() and t1 - t0 <= self._keepalive_timeout: - if not conns: - # The very last connection was reclaimed: drop the key - del self._conns[key] - self._acquired.add(proto) - if self._limit_per_host: - self._acquired_per_host[key].add(proto) - if traces: - for trace in traces: - try: - await trace.send_connection_reuseconn() - except BaseException: - self._release_acquired(key, proto) - raise - return Connection(self, key, proto, self._loop) - - # Connection cannot be reused, close it - transport = proto.transport - proto.close() - # only for SSL transports - if not self._cleanup_closed_disabled and key.is_ssl: - self._cleanup_closed_transports.append(transport) - - # No more connections: drop the key - del self._conns[key] - return None - - def _release_waiter(self) -> None: - """ - Iterates over all waiters until one to be released is found. - - The one to be released is not finished and - belongs to a host that has available connections. - """ - if not self._waiters: - return - - # Having the dict keys ordered this avoids to iterate - # at the same order at each call. - queues = list(self._waiters) - random.shuffle(queues) - - for key in queues: - if self._available_connections(key) < 1: - continue - - waiters = self._waiters[key] - while waiters: - waiter, _ = waiters.popitem(last=False) - if not waiter.done(): - waiter.set_result(None) - return - - def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: - """Release acquired connection.""" - if self._closed: - # acquired connection is already released on connector closing - return - - self._acquired.discard(proto) - if self._limit_per_host and (conns := self._acquired_per_host.get(key)): - conns.discard(proto) - if not conns: - del self._acquired_per_host[key] - self._release_waiter() - - def _release( - self, - key: "ConnectionKey", - protocol: ResponseHandler, - *, - should_close: bool = False, - ) -> None: - if self._closed: - # acquired connection is already released on connector closing - return - - self._release_acquired(key, protocol) - - if self._force_close or should_close or protocol.should_close: - transport = protocol.transport - protocol.close() - - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - return - - self._conns[key].append((protocol, monotonic())) - - if self._cleanup_handle is None: - self._cleanup_handle = helpers.weakref_handle( - self, - "_cleanup", - self._keepalive_timeout, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - raise NotImplementedError() - - -class _DNSCacheTable: - def __init__(self, ttl: Optional[float] = None) -> None: - self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {} - self._timestamps: Dict[Tuple[str, int], float] = {} - self._ttl = ttl - - def __contains__(self, host: object) -> bool: - return host in self._addrs_rr - - def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None: - self._addrs_rr[key] = (cycle(addrs), len(addrs)) - - if self._ttl is not None: - self._timestamps[key] = monotonic() - - def remove(self, key: Tuple[str, int]) -> None: - self._addrs_rr.pop(key, None) - - if self._ttl is not None: - self._timestamps.pop(key, None) - - def clear(self) -> None: - self._addrs_rr.clear() - self._timestamps.clear() - - def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]: - loop, length = self._addrs_rr[key] - addrs = list(islice(loop, length)) - # Consume one more element to shift internal state of `cycle` - next(loop) - return addrs - - def expired(self, key: Tuple[str, int]) -> bool: - if self._ttl is None: - return False - - return self._timestamps[key] + self._ttl < monotonic() - - -def _make_ssl_context(verified: bool) -> SSLContext: - """Create SSL context. - - This method is not async-friendly and should be called from a thread - because it will load certificates from disk and do other blocking I/O. - """ - if ssl is None: - # No ssl support - return None - if verified: - sslcontext = ssl.create_default_context() - else: - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - sslcontext.options |= ssl.OP_NO_COMPRESSION - sslcontext.set_default_verify_paths() - sslcontext.set_alpn_protocols(("http/1.1",)) - return sslcontext - - -# The default SSLContext objects are created at import time -# since they do blocking I/O to load certificates from disk, -# and imports should always be done before the event loop starts -# or in a thread. -_SSL_CONTEXT_VERIFIED = _make_ssl_context(True) -_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False) - - -class TCPConnector(BaseConnector): - """TCP connector. - - verify_ssl - Set to True to check ssl certifications. - fingerprint - Pass the binary sha256 - digest of the expected certificate in DER format to verify - that the certificate the server presents matches. See also - https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning - resolver - Enable DNS lookups and use this - resolver - use_dns_cache - Use memory cache for DNS lookups. - ttl_dns_cache - Max seconds having cached a DNS entry, None forever. - family - socket address family - local_addr - local tuple of (host, port) to bind socket to - - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - enable_cleanup_closed - Enables clean-up closed ssl transports. - Disabled by default. - happy_eyeballs_delay - This is the “Connection Attempt Delay” - as defined in RFC 8305. To disable - the happy eyeballs algorithm, set to None. - interleave - “First Address Family Count” as defined in RFC 8305 - loop - Optional event loop. - socket_factory - A SocketFactoryType function that, if supplied, - will be used to create sockets given an - AddrInfoType. - ssl_shutdown_timeout - DEPRECATED. Will be removed in aiohttp 4.0. - Grace period for SSL shutdown handshake on TLS - connections. Default is 0 seconds (immediate abort). - This parameter allowed for a clean SSL shutdown by - notifying the remote peer of connection closure, - while avoiding excessive delays during connector cleanup. - Note: Only takes effect on Python 3.11+. - """ - - allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) - - def __init__( - self, - *, - verify_ssl: bool = True, - fingerprint: Optional[bytes] = None, - use_dns_cache: bool = True, - ttl_dns_cache: Optional[int] = 10, - family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC, - ssl_context: Optional[SSLContext] = None, - ssl: Union[bool, Fingerprint, SSLContext] = True, - local_addr: Optional[Tuple[str, int]] = None, - resolver: Optional[AbstractResolver] = None, - keepalive_timeout: Union[None, float, object] = sentinel, - force_close: bool = False, - limit: int = 100, - limit_per_host: int = 0, - enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - timeout_ceil_threshold: float = 5, - happy_eyeballs_delay: Optional[float] = 0.25, - interleave: Optional[int] = None, - socket_factory: Optional[SocketFactoryType] = None, - ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel, - ): - super().__init__( - keepalive_timeout=keepalive_timeout, - force_close=force_close, - limit=limit, - limit_per_host=limit_per_host, - enable_cleanup_closed=enable_cleanup_closed, - loop=loop, - timeout_ceil_threshold=timeout_ceil_threshold, - ) - - self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - - self._resolver: AbstractResolver - if resolver is None: - self._resolver = DefaultResolver(loop=self._loop) - self._resolver_owner = True - else: - self._resolver = resolver - self._resolver_owner = False - - self._use_dns_cache = use_dns_cache - self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) - self._throttle_dns_futures: Dict[ - Tuple[str, int], Set["asyncio.Future[None]"] - ] = {} - self._family = family - self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) - self._happy_eyeballs_delay = happy_eyeballs_delay - self._interleave = interleave - self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() - self._socket_factory = socket_factory - self._ssl_shutdown_timeout: Optional[float] - # Handle ssl_shutdown_timeout with warning for Python < 3.11 - if ssl_shutdown_timeout is sentinel: - self._ssl_shutdown_timeout = 0 - else: - # Deprecation warning for ssl_shutdown_timeout parameter - warnings.warn( - "The ssl_shutdown_timeout parameter is deprecated and will be removed in aiohttp 4.0", - DeprecationWarning, - stacklevel=2, - ) - if ( - sys.version_info < (3, 11) - and ssl_shutdown_timeout is not None - and ssl_shutdown_timeout != 0 - ): - warnings.warn( - f"ssl_shutdown_timeout={ssl_shutdown_timeout} is ignored on Python < 3.11; " - "only ssl_shutdown_timeout=0 is supported. The timeout will be ignored.", - RuntimeWarning, - stacklevel=2, - ) - self._ssl_shutdown_timeout = ssl_shutdown_timeout - - def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]: - """Close all ongoing DNS calls.""" - for fut in chain.from_iterable(self._throttle_dns_futures.values()): - fut.cancel() - - waiters = super()._close(abort_ssl=abort_ssl) - - for t in self._resolve_host_tasks: - t.cancel() - waiters.append(t) - - return waiters - - async def close(self, *, abort_ssl: bool = False) -> None: - """ - Close all opened transports. - - :param abort_ssl: If True, SSL connections will be aborted immediately - without performing the shutdown handshake. If False (default), - the behavior is determined by ssl_shutdown_timeout: - - If ssl_shutdown_timeout=0: connections are aborted - - If ssl_shutdown_timeout>0: graceful shutdown is performed - """ - if self._resolver_owner: - await self._resolver.close() - # Use abort_ssl param if explicitly set, otherwise use ssl_shutdown_timeout default - await super().close(abort_ssl=abort_ssl or self._ssl_shutdown_timeout == 0) - - @property - def family(self) -> int: - """Socket family like AF_INET.""" - return self._family - - @property - def use_dns_cache(self) -> bool: - """True if local DNS caching is enabled.""" - return self._use_dns_cache - - def clear_dns_cache( - self, host: Optional[str] = None, port: Optional[int] = None - ) -> None: - """Remove specified host/port or clear all dns local cache.""" - if host is not None and port is not None: - self._cached_hosts.remove((host, port)) - elif host is not None or port is not None: - raise ValueError("either both host and port or none of them are allowed") - else: - self._cached_hosts.clear() - - async def _resolve_host( - self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None - ) -> List[ResolveResult]: - """Resolve host and return list of addresses.""" - if is_ip_address(host): - return [ - { - "hostname": host, - "host": host, - "port": port, - "family": self._family, - "proto": 0, - "flags": 0, - } - ] - - if not self._use_dns_cache: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - res = await self._resolver.resolve(host, port, family=self._family) - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) - - return res - - key = (host, port) - if key in self._cached_hosts and not self._cached_hosts.expired(key): - # get result early, before any await (#4014) - result = self._cached_hosts.next_addrs(key) - - if traces: - for trace in traces: - await trace.send_dns_cache_hit(host) - return result - - futures: Set["asyncio.Future[None]"] - # - # If multiple connectors are resolving the same host, we wait - # for the first one to resolve and then use the result for all of them. - # We use a throttle to ensure that we only resolve the host once - # and then use the result for all the waiters. - # - if key in self._throttle_dns_futures: - # get futures early, before any await (#4014) - futures = self._throttle_dns_futures[key] - future: asyncio.Future[None] = self._loop.create_future() - futures.add(future) - if traces: - for trace in traces: - await trace.send_dns_cache_hit(host) - try: - await future - finally: - futures.discard(future) - return self._cached_hosts.next_addrs(key) - - # update dict early, before any await (#4014) - self._throttle_dns_futures[key] = futures = set() - # In this case we need to create a task to ensure that we can shield - # the task from cancellation as cancelling this lookup should not cancel - # the underlying lookup or else the cancel event will get broadcast to - # all the waiters across all connections. - # - coro = self._resolve_host_with_throttle(key, host, port, futures, traces) - loop = asyncio.get_running_loop() - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to send immediately - resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True) - else: - resolved_host_task = loop.create_task(coro) - - if not resolved_host_task.done(): - self._resolve_host_tasks.add(resolved_host_task) - resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) - - try: - return await asyncio.shield(resolved_host_task) - except asyncio.CancelledError: - - def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: - with suppress(Exception, asyncio.CancelledError): - fut.result() - - resolved_host_task.add_done_callback(drop_exception) - raise - - async def _resolve_host_with_throttle( - self, - key: Tuple[str, int], - host: str, - port: int, - futures: Set["asyncio.Future[None]"], - traces: Optional[Sequence["Trace"]], - ) -> List[ResolveResult]: - """Resolve host and set result for all waiters. - - This method must be run in a task and shielded from cancellation - to avoid cancelling the underlying lookup. - """ - try: - if traces: - for trace in traces: - await trace.send_dns_cache_miss(host) - - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - addrs = await self._resolver.resolve(host, port, family=self._family) - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) - - self._cached_hosts.add(key, addrs) - for fut in futures: - set_result(fut, None) - except BaseException as e: - # any DNS exception is set for the waiters to raise the same exception. - # This coro is always run in task that is shielded from cancellation so - # we should never be propagating cancellation here. - for fut in futures: - set_exception(fut, e) - raise - finally: - self._throttle_dns_futures.pop(key) - - return self._cached_hosts.next_addrs(key) - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - """Create connection. - - Has same keyword arguments as BaseEventLoop.create_connection. - """ - if req.proxy: - _, proto = await self._create_proxy_connection(req, traces, timeout) - else: - _, proto = await self._create_direct_connection(req, traces, timeout) - - return proto - - def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: - """Logic to get the correct SSL context - - 0. if req.ssl is false, return None - - 1. if ssl_context is specified in req, use it - 2. if _ssl_context is specified in self, use it - 3. otherwise: - 1. if verify_ssl is not specified in req, use self.ssl_context - (will generate a default context according to self.verify_ssl) - 2. if verify_ssl is True in req, generate a default SSL context - 3. if verify_ssl is False in req, generate a SSL context that - won't verify - """ - if not req.is_ssl(): - return None - - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - sslcontext = req.ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return _SSL_CONTEXT_UNVERIFIED - sslcontext = self._ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return _SSL_CONTEXT_UNVERIFIED - return _SSL_CONTEXT_VERIFIED - - def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: - ret = req.ssl - if isinstance(ret, Fingerprint): - return ret - ret = self._ssl - if isinstance(ret, Fingerprint): - return ret - return None - - async def _wrap_create_connection( - self, - *args: Any, - addr_infos: List[AddrInfoType], - req: ClientRequest, - timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, - **kwargs: Any, - ) -> Tuple[asyncio.Transport, ResponseHandler]: - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - sock = await aiohappyeyeballs.start_connection( - addr_infos=addr_infos, - local_addr_infos=self._local_addr_infos, - happy_eyeballs_delay=self._happy_eyeballs_delay, - interleave=self._interleave, - loop=self._loop, - socket_factory=self._socket_factory, - ) - # Add ssl_shutdown_timeout for Python 3.11+ when SSL is used - if ( - kwargs.get("ssl") - and self._ssl_shutdown_timeout - and sys.version_info >= (3, 11) - ): - kwargs["ssl_shutdown_timeout"] = self._ssl_shutdown_timeout - return await self._loop.create_connection(*args, **kwargs, sock=sock) - except cert_errors as exc: - raise ClientConnectorCertificateError(req.connection_key, exc) from exc - except ssl_errors as exc: - raise ClientConnectorSSLError(req.connection_key, exc) from exc - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise client_error(req.connection_key, exc) from exc - - async def _wrap_existing_connection( - self, - *args: Any, - req: ClientRequest, - timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, - **kwargs: Any, - ) -> Tuple[asyncio.Transport, ResponseHandler]: - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - return await self._loop.create_connection(*args, **kwargs) - except cert_errors as exc: - raise ClientConnectorCertificateError(req.connection_key, exc) from exc - except ssl_errors as exc: - raise ClientConnectorSSLError(req.connection_key, exc) from exc - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise client_error(req.connection_key, exc) from exc - - def _fail_on_no_start_tls(self, req: "ClientRequest") -> None: - """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``. - - It is necessary for TLS-in-TLS so that it is possible to - send HTTPS queries through HTTPS proxies. - - This doesn't affect regular HTTP requests, though. - """ - if not req.is_ssl(): - return - - proxy_url = req.proxy - assert proxy_url is not None - if proxy_url.scheme != "https": - return - - self._check_loop_for_start_tls() - - def _check_loop_for_start_tls(self) -> None: - try: - self._loop.start_tls - except AttributeError as attr_exc: - raise RuntimeError( - "An HTTPS request is being sent through an HTTPS proxy. " - "This needs support for TLS in TLS but it is not implemented " - "in your runtime for the stdlib asyncio.\n\n" - "Please upgrade to Python 3.11 or higher. For more details, " - "please see:\n" - "* https://bugs.python.org/issue37179\n" - "* https://github.com/python/cpython/pull/28073\n" - "* https://docs.aiohttp.org/en/stable/" - "client_advanced.html#proxy-support\n" - "* https://github.com/aio-libs/aiohttp/discussions/6044\n", - ) from attr_exc - - def _loop_supports_start_tls(self) -> bool: - try: - self._check_loop_for_start_tls() - except RuntimeError: - return False - else: - return True - - def _warn_about_tls_in_tls( - self, - underlying_transport: asyncio.Transport, - req: ClientRequest, - ) -> None: - """Issue a warning if the requested URL has HTTPS scheme.""" - if req.request_info.url.scheme != "https": - return - - # Check if uvloop is being used, which supports TLS in TLS, - # otherwise assume that asyncio's native transport is being used. - if type(underlying_transport).__module__.startswith("uvloop"): - return - - # Support in asyncio was added in Python 3.11 (bpo-44011) - asyncio_supports_tls_in_tls = sys.version_info >= (3, 11) or getattr( - underlying_transport, - "_start_tls_compatible", - False, - ) - - if asyncio_supports_tls_in_tls: - return - - warnings.warn( - "An HTTPS request is being sent through an HTTPS proxy. " - "This support for TLS in TLS is known to be disabled " - "in the stdlib asyncio (Python <3.11). This is why you'll probably see " - "an error in the log below.\n\n" - "It is possible to enable it via monkeypatching. " - "For more details, see:\n" - "* https://bugs.python.org/issue37179\n" - "* https://github.com/python/cpython/pull/28073\n\n" - "You can temporarily patch this as follows:\n" - "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n" - "* https://github.com/aio-libs/aiohttp/discussions/6044\n", - RuntimeWarning, - source=self, - # Why `4`? At least 3 of the calls in the stack originate - # from the methods in this class. - stacklevel=3, - ) - - async def _start_tls_connection( - self, - underlying_transport: asyncio.Transport, - req: ClientRequest, - timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, - ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: - """Wrap the raw TCP transport with TLS.""" - tls_proto = self._factory() # Create a brand new proto for TLS - sslcontext = self._get_ssl_context(req) - if TYPE_CHECKING: - # _start_tls_connection is unreachable in the current code path - # if sslcontext is None. - assert sslcontext is not None - - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - try: - # ssl_shutdown_timeout is only available in Python 3.11+ - if sys.version_info >= (3, 11) and self._ssl_shutdown_timeout: - tls_transport = await self._loop.start_tls( - underlying_transport, - tls_proto, - sslcontext, - server_hostname=req.server_hostname or req.host, - ssl_handshake_timeout=timeout.total, - ssl_shutdown_timeout=self._ssl_shutdown_timeout, - ) - else: - tls_transport = await self._loop.start_tls( - underlying_transport, - tls_proto, - sslcontext, - server_hostname=req.server_hostname or req.host, - ssl_handshake_timeout=timeout.total, - ) - except BaseException: - # We need to close the underlying transport since - # `start_tls()` probably failed before it had a - # chance to do this: - if self._ssl_shutdown_timeout == 0: - underlying_transport.abort() - else: - underlying_transport.close() - raise - if isinstance(tls_transport, asyncio.Transport): - fingerprint = self._get_fingerprint(req) - if fingerprint: - try: - fingerprint.check(tls_transport) - except ServerFingerprintMismatch: - tls_transport.close() - if not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(tls_transport) - raise - except cert_errors as exc: - raise ClientConnectorCertificateError(req.connection_key, exc) from exc - except ssl_errors as exc: - raise ClientConnectorSSLError(req.connection_key, exc) from exc - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise client_error(req.connection_key, exc) from exc - except TypeError as type_err: - # Example cause looks like this: - # TypeError: transport is not supported by start_tls() - - raise ClientConnectionError( - "Cannot initialize a TLS-in-TLS connection to host " - f"{req.host!s}:{req.port:d} through an underlying connection " - f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} " - f"[{type_err!s}]" - ) from type_err - else: - if tls_transport is None: - msg = "Failed to start TLS (possibly caused by closing transport)" - raise client_error(req.connection_key, OSError(msg)) - tls_proto.connection_made( - tls_transport - ) # Kick the state machine of the new TLS protocol - - return tls_transport, tls_proto - - def _convert_hosts_to_addr_infos( - self, hosts: List[ResolveResult] - ) -> List[AddrInfoType]: - """Converts the list of hosts to a list of addr_infos. - - The list of hosts is the result of a DNS lookup. The list of - addr_infos is the result of a call to `socket.getaddrinfo()`. - """ - addr_infos: List[AddrInfoType] = [] - for hinfo in hosts: - host = hinfo["host"] - is_ipv6 = ":" in host - family = socket.AF_INET6 if is_ipv6 else socket.AF_INET - if self._family and self._family != family: - continue - addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"]) - addr_infos.append( - (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr) - ) - return addr_infos - - async def _create_direct_connection( - self, - req: ClientRequest, - traces: List["Trace"], - timeout: "ClientTimeout", - *, - client_error: Type[Exception] = ClientConnectorError, - ) -> Tuple[asyncio.Transport, ResponseHandler]: - sslcontext = self._get_ssl_context(req) - fingerprint = self._get_fingerprint(req) - - host = req.url.raw_host - assert host is not None - # Replace multiple trailing dots with a single one. - # A trailing dot is only present for fully-qualified domain names. - # See https://github.com/aio-libs/aiohttp/pull/7364. - if host.endswith(".."): - host = host.rstrip(".") + "." - port = req.port - assert port is not None - try: - # Cancelling this lookup should not cancel the underlying lookup - # or else the cancel event will get broadcast to all the waiters - # across all connections. - hosts = await self._resolve_host(host, port, traces=traces) - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - # in case of proxy it is not ClientProxyConnectionError - # it is problem of resolving proxy ip itself - raise ClientConnectorDNSError(req.connection_key, exc) from exc - - last_exc: Optional[Exception] = None - addr_infos = self._convert_hosts_to_addr_infos(hosts) - while addr_infos: - # Strip trailing dots, certificates contain FQDN without dots. - # See https://github.com/aio-libs/aiohttp/issues/3636 - server_hostname = ( - (req.server_hostname or host).rstrip(".") if sslcontext else None - ) - - try: - transp, proto = await self._wrap_create_connection( - self._factory, - timeout=timeout, - ssl=sslcontext, - addr_infos=addr_infos, - server_hostname=server_hostname, - req=req, - client_error=client_error, - ) - except (ClientConnectorError, asyncio.TimeoutError) as exc: - last_exc = exc - aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave) - continue - - if req.is_ssl() and fingerprint: - try: - fingerprint.check(transp) - except ServerFingerprintMismatch as exc: - transp.close() - if not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transp) - last_exc = exc - # Remove the bad peer from the list of addr_infos - sock: socket.socket = transp.get_extra_info("socket") - bad_peer = sock.getpeername() - aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer) - continue - - return transp, proto - else: - assert last_exc is not None - raise last_exc - - async def _create_proxy_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: - self._fail_on_no_start_tls(req) - runtime_has_start_tls = self._loop_supports_start_tls() - proxy_req = self._update_proxy_auth_header_and_build_proxy_req(req) - - # create connection to proxy server - transport, proto = await self._create_direct_connection( - proxy_req, [], timeout, client_error=ClientProxyConnectionError - ) - - if req.is_ssl(): - if runtime_has_start_tls: - self._warn_about_tls_in_tls(transport, req) - - # For HTTPS requests over HTTP proxy - # we must notify proxy to tunnel connection - # so we send CONNECT command: - # CONNECT www.python.org:443 HTTP/1.1 - # Host: www.python.org - # - # next we must do TLS handshake and so on - # to do this we must wrap raw socket into secure one - # asyncio handles this perfectly - proxy_req.method = hdrs.METH_CONNECT - proxy_req.url = req.url - key = req.connection_key._replace( - proxy=None, proxy_auth=None, proxy_headers_hash=None - ) - conn = _ConnectTunnelConnection(self, key, proto, self._loop) - proxy_resp = await proxy_req.send(conn) - try: - protocol = conn._protocol - assert protocol is not None - - # read_until_eof=True will ensure the connection isn't closed - # once the response is received and processed allowing - # START_TLS to work on the connection below. - protocol.set_response_params( - read_until_eof=runtime_has_start_tls, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - resp = await proxy_resp.start(conn) - except BaseException: - proxy_resp.close() - conn.close() - raise - else: - conn._protocol = None - try: - if resp.status != 200: - message = resp.reason - if message is None: - message = HTTPStatus(resp.status).phrase - raise ClientHttpProxyError( - proxy_resp.request_info, - resp.history, - status=resp.status, - message=message, - headers=resp.headers, - ) - if not runtime_has_start_tls: - rawsock = transport.get_extra_info("socket", default=None) - if rawsock is None: - raise RuntimeError( - "Transport does not expose socket instance" - ) - # Duplicate the socket, so now we can close proxy transport - rawsock = rawsock.dup() - except BaseException: - # It shouldn't be closed in `finally` because it's fed to - # `loop.start_tls()` and the docs say not to touch it after - # passing there. - transport.close() - raise - finally: - if not runtime_has_start_tls: - transport.close() - - if not runtime_has_start_tls: - # HTTP proxy with support for upgrade to HTTPS - sslcontext = self._get_ssl_context(req) - return await self._wrap_existing_connection( - self._factory, - timeout=timeout, - ssl=sslcontext, - sock=rawsock, - server_hostname=req.host, - req=req, - ) - - return await self._start_tls_connection( - # Access the old transport for the last time before it's - # closed and forgotten forever: - transport, - req=req, - timeout=timeout, - ) - finally: - proxy_resp.close() - - return transport, proto - - -class UnixConnector(BaseConnector): - """Unix socket connector. - - path - Unix socket path. - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - loop - Optional event loop. - """ - - allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"}) - - def __init__( - self, - path: str, - force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, - limit: int = 100, - limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__( - force_close=force_close, - keepalive_timeout=keepalive_timeout, - limit=limit, - limit_per_host=limit_per_host, - loop=loop, - ) - self._path = path - - @property - def path(self) -> str: - """Path to unix socket.""" - return self._path - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - _, proto = await self._loop.create_unix_connection( - self._factory, self._path - ) - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc - - return proto - - -class NamedPipeConnector(BaseConnector): - """Named pipe connector. - - Only supported by the proactor event loop. - See also: https://docs.python.org/3/library/asyncio-eventloop.html - - path - Windows named pipe path. - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - loop - Optional event loop. - """ - - allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"}) - - def __init__( - self, - path: str, - force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, - limit: int = 100, - limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__( - force_close=force_close, - keepalive_timeout=keepalive_timeout, - limit=limit, - limit_per_host=limit_per_host, - loop=loop, - ) - if not isinstance( - self._loop, - asyncio.ProactorEventLoop, # type: ignore[attr-defined] - ): - raise RuntimeError( - "Named Pipes only available in proactor loop under windows" - ) - self._path = path - - @property - def path(self) -> str: - """Path to the named pipe.""" - return self._path - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] - self._factory, self._path - ) - # the drain is required so that the connection_made is called - # and transport is set otherwise it is not set before the - # `assert conn.transport is not None` - # in client.py's _request method - await asyncio.sleep(0) - # other option is to manually set transport like - # `proto.transport = trans` - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise ClientConnectorError(req.connection_key, exc) from exc - - return cast(ResponseHandler, proto) diff --git a/venv/Lib/site-packages/aiohttp/cookiejar.py b/venv/Lib/site-packages/aiohttp/cookiejar.py deleted file mode 100644 index 193648d..0000000 --- a/venv/Lib/site-packages/aiohttp/cookiejar.py +++ /dev/null @@ -1,522 +0,0 @@ -import asyncio -import calendar -import contextlib -import datetime -import heapq -import itertools -import os # noqa -import pathlib -import pickle -import re -import time -import warnings -from collections import defaultdict -from collections.abc import Mapping -from http.cookies import BaseCookie, Morsel, SimpleCookie -from typing import ( - DefaultDict, - Dict, - Iterable, - Iterator, - List, - Optional, - Set, - Tuple, - Union, -) - -from yarl import URL - -from ._cookie_helpers import preserve_morsel_with_coded_value -from .abc import AbstractCookieJar, ClearCookiePredicate -from .helpers import is_ip_address -from .typedefs import LooseCookies, PathLike, StrOrURL - -__all__ = ("CookieJar", "DummyCookieJar") - - -CookieItem = Union[str, "Morsel[str]"] - -# We cache these string methods here as their use is in performance critical code. -_FORMAT_PATH = "{}/{}".format -_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format - -# The minimum number of scheduled cookie expirations before we start cleaning up -# the expiration heap. This is a performance optimization to avoid cleaning up the -# heap too often when there are only a few scheduled expirations. -_MIN_SCHEDULED_COOKIE_EXPIRATION = 100 -_SIMPLE_COOKIE = SimpleCookie() - - -class CookieJar(AbstractCookieJar): - """Implements cookie storage adhering to RFC 6265.""" - - DATE_TOKENS_RE = re.compile( - r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" - r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" - ) - - DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") - - DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") - - DATE_MONTH_RE = re.compile( - "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)", - re.I, - ) - - DATE_YEAR_RE = re.compile(r"(\d{2,4})") - - # calendar.timegm() fails for timestamps after datetime.datetime.max - # Minus one as a loss of precision occurs when timestamp() is called. - MAX_TIME = ( - int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1 - ) - try: - calendar.timegm(time.gmtime(MAX_TIME)) - except (OSError, ValueError): - # Hit the maximum representable time on Windows - # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 - # Throws ValueError on PyPy 3.9, OSError elsewhere - MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) - except OverflowError: - # #4515: datetime.max may not be representable on 32-bit platforms - MAX_TIME = 2**31 - 1 - # Avoid minuses in the future, 3x faster - SUB_MAX_TIME = MAX_TIME - 1 - - def __init__( - self, - *, - unsafe: bool = False, - quote_cookie: bool = True, - treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__(loop=loop) - self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( - SimpleCookie - ) - self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = ( - defaultdict(dict) - ) - self._host_only_cookies: Set[Tuple[str, str]] = set() - self._unsafe = unsafe - self._quote_cookie = quote_cookie - if treat_as_secure_origin is None: - treat_as_secure_origin = [] - elif isinstance(treat_as_secure_origin, URL): - treat_as_secure_origin = [treat_as_secure_origin.origin()] - elif isinstance(treat_as_secure_origin, str): - treat_as_secure_origin = [URL(treat_as_secure_origin).origin()] - else: - treat_as_secure_origin = [ - URL(url).origin() if isinstance(url, str) else url.origin() - for url in treat_as_secure_origin - ] - self._treat_as_secure_origin = treat_as_secure_origin - self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] - self._expirations: Dict[Tuple[str, str, str], float] = {} - - @property - def quote_cookie(self) -> bool: - return self._quote_cookie - - def save(self, file_path: PathLike) -> None: - file_path = pathlib.Path(file_path) - with file_path.open(mode="wb") as f: - pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) - - def load(self, file_path: PathLike) -> None: - file_path = pathlib.Path(file_path) - with file_path.open(mode="rb") as f: - self._cookies = pickle.load(f) - - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: - if predicate is None: - self._expire_heap.clear() - self._cookies.clear() - self._morsel_cache.clear() - self._host_only_cookies.clear() - self._expirations.clear() - return - - now = time.time() - to_del = [ - key - for (domain, path), cookie in self._cookies.items() - for name, morsel in cookie.items() - if ( - (key := (domain, path, name)) in self._expirations - and self._expirations[key] <= now - ) - or predicate(morsel) - ] - if to_del: - self._delete_cookies(to_del) - - def clear_domain(self, domain: str) -> None: - self.clear(lambda x: self._is_domain_match(domain, x["domain"])) - - def __iter__(self) -> "Iterator[Morsel[str]]": - self._do_expiration() - for val in self._cookies.values(): - yield from val.values() - - def __len__(self) -> int: - """Return number of cookies. - - This function does not iterate self to avoid unnecessary expiration - checks. - """ - return sum(len(cookie.values()) for cookie in self._cookies.values()) - - def _do_expiration(self) -> None: - """Remove expired cookies.""" - if not (expire_heap_len := len(self._expire_heap)): - return - - # If the expiration heap grows larger than the number expirations - # times two, we clean it up to avoid keeping expired entries in - # the heap and consuming memory. We guard this with a minimum - # threshold to avoid cleaning up the heap too often when there are - # only a few scheduled expirations. - if ( - expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION - and expire_heap_len > len(self._expirations) * 2 - ): - # Remove any expired entries from the expiration heap - # that do not match the expiration time in the expirations - # as it means the cookie has been re-added to the heap - # with a different expiration time. - self._expire_heap = [ - entry - for entry in self._expire_heap - if self._expirations.get(entry[1]) == entry[0] - ] - heapq.heapify(self._expire_heap) - - now = time.time() - to_del: List[Tuple[str, str, str]] = [] - # Find any expired cookies and add them to the to-delete list - while self._expire_heap: - when, cookie_key = self._expire_heap[0] - if when > now: - break - heapq.heappop(self._expire_heap) - # Check if the cookie hasn't been re-added to the heap - # with a different expiration time as it will be removed - # later when it reaches the top of the heap and its - # expiration time is met. - if self._expirations.get(cookie_key) == when: - to_del.append(cookie_key) - - if to_del: - self._delete_cookies(to_del) - - def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: - for domain, path, name in to_del: - self._host_only_cookies.discard((domain, name)) - self._cookies[(domain, path)].pop(name, None) - self._morsel_cache[(domain, path)].pop(name, None) - self._expirations.pop((domain, path, name), None) - - def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: - cookie_key = (domain, path, name) - if self._expirations.get(cookie_key) == when: - # Avoid adding duplicates to the heap - return - heapq.heappush(self._expire_heap, (when, cookie_key)) - self._expirations[cookie_key] = when - - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - """Update cookies.""" - hostname = response_url.raw_host - - if not self._unsafe and is_ip_address(hostname): - # Don't accept cookies from IPs - return - - if isinstance(cookies, Mapping): - cookies = cookies.items() - - for name, cookie in cookies: - if not isinstance(cookie, Morsel): - tmp = SimpleCookie() - tmp[name] = cookie # type: ignore[assignment] - cookie = tmp[name] - - domain = cookie["domain"] - - # ignore domains with trailing dots - if domain and domain[-1] == ".": - domain = "" - del cookie["domain"] - - if not domain and hostname is not None: - # Set the cookie's domain to the response hostname - # and set its host-only-flag - self._host_only_cookies.add((hostname, name)) - domain = cookie["domain"] = hostname - - if domain and domain[0] == ".": - # Remove leading dot - domain = domain[1:] - cookie["domain"] = domain - - if hostname and not self._is_domain_match(domain, hostname): - # Setting cookies for different domains is not allowed - continue - - path = cookie["path"] - if not path or path[0] != "/": - # Set the cookie's path to the response path - path = response_url.path - if not path.startswith("/"): - path = "/" - else: - # Cut everything from the last slash to the end - path = "/" + path[1 : path.rfind("/")] - cookie["path"] = path - path = path.rstrip("/") - - if max_age := cookie["max-age"]: - try: - delta_seconds = int(max_age) - max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) - self._expire_cookie(max_age_expiration, domain, path, name) - except ValueError: - cookie["max-age"] = "" - - elif expires := cookie["expires"]: - if expire_time := self._parse_date(expires): - self._expire_cookie(expire_time, domain, path, name) - else: - cookie["expires"] = "" - - key = (domain, path) - if self._cookies[key].get(name) != cookie: - # Don't blow away the cache if the same - # cookie gets set again - self._cookies[key][name] = cookie - self._morsel_cache[key].pop(name, None) - - self._do_expiration() - - def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": - """Returns this jar's cookies filtered by their attributes.""" - # We always use BaseCookie now since all - # cookies set on on filtered are fully constructed - # Morsels, not just names and values. - filtered: BaseCookie[str] = BaseCookie() - if not self._cookies: - # Skip do_expiration() if there are no cookies. - return filtered - self._do_expiration() - if not self._cookies: - # Skip rest of function if no non-expired cookies. - return filtered - if type(request_url) is not URL: - warnings.warn( - "filter_cookies expects yarl.URL instances only," - f"and will stop working in 4.x, got {type(request_url)}", - DeprecationWarning, - stacklevel=2, - ) - request_url = URL(request_url) - hostname = request_url.raw_host or "" - - is_not_secure = request_url.scheme not in ("https", "wss") - if is_not_secure and self._treat_as_secure_origin: - request_origin = URL() - with contextlib.suppress(ValueError): - request_origin = request_url.origin() - is_not_secure = request_origin not in self._treat_as_secure_origin - - # Send shared cookie - key = ("", "") - for c in self._cookies[key].values(): - # Check cache first - if c.key in self._morsel_cache[key]: - filtered[c.key] = self._morsel_cache[key][c.key] - continue - - # Build and cache the morsel - mrsl_val = self._build_morsel(c) - self._morsel_cache[key][c.key] = mrsl_val - filtered[c.key] = mrsl_val - - if is_ip_address(hostname): - if not self._unsafe: - return filtered - domains: Iterable[str] = (hostname,) - else: - # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com") - domains = itertools.accumulate( - reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED - ) - - # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar") - paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH) - # Create every combination of (domain, path) pairs. - pairs = itertools.product(domains, paths) - - path_len = len(request_url.path) - # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 - for p in pairs: - if p not in self._cookies: - continue - for name, cookie in self._cookies[p].items(): - domain = cookie["domain"] - - if (domain, name) in self._host_only_cookies and domain != hostname: - continue - - # Skip edge case when the cookie has a trailing slash but request doesn't. - if len(cookie["path"]) > path_len: - continue - - if is_not_secure and cookie["secure"]: - continue - - # We already built the Morsel so reuse it here - if name in self._morsel_cache[p]: - filtered[name] = self._morsel_cache[p][name] - continue - - # Build and cache the morsel - mrsl_val = self._build_morsel(cookie) - self._morsel_cache[p][name] = mrsl_val - filtered[name] = mrsl_val - - return filtered - - def _build_morsel(self, cookie: Morsel[str]) -> Morsel[str]: - """Build a morsel for sending, respecting quote_cookie setting.""" - if self._quote_cookie and cookie.coded_value and cookie.coded_value[0] == '"': - return preserve_morsel_with_coded_value(cookie) - morsel: Morsel[str] = Morsel() - if self._quote_cookie: - value, coded_value = _SIMPLE_COOKIE.value_encode(cookie.value) - else: - coded_value = value = cookie.value - # We use __setstate__ instead of the public set() API because it allows us to - # bypass validation and set already validated state. This is more stable than - # setting protected attributes directly and unlikely to change since it would - # break pickling. - morsel.__setstate__({"key": cookie.key, "value": value, "coded_value": coded_value}) # type: ignore[attr-defined] - return morsel - - @staticmethod - def _is_domain_match(domain: str, hostname: str) -> bool: - """Implements domain matching adhering to RFC 6265.""" - if hostname == domain: - return True - - if not hostname.endswith(domain): - return False - - non_matching = hostname[: -len(domain)] - - if not non_matching.endswith("."): - return False - - return not is_ip_address(hostname) - - @classmethod - def _parse_date(cls, date_str: str) -> Optional[int]: - """Implements date string parsing adhering to RFC 6265.""" - if not date_str: - return None - - found_time = False - found_day = False - found_month = False - found_year = False - - hour = minute = second = 0 - day = 0 - month = 0 - year = 0 - - for token_match in cls.DATE_TOKENS_RE.finditer(date_str): - - token = token_match.group("token") - - if not found_time: - time_match = cls.DATE_HMS_TIME_RE.match(token) - if time_match: - found_time = True - hour, minute, second = (int(s) for s in time_match.groups()) - continue - - if not found_day: - day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) - if day_match: - found_day = True - day = int(day_match.group()) - continue - - if not found_month: - month_match = cls.DATE_MONTH_RE.match(token) - if month_match: - found_month = True - assert month_match.lastindex is not None - month = month_match.lastindex - continue - - if not found_year: - year_match = cls.DATE_YEAR_RE.match(token) - if year_match: - found_year = True - year = int(year_match.group()) - - if 70 <= year <= 99: - year += 1900 - elif 0 <= year <= 69: - year += 2000 - - if False in (found_day, found_month, found_year, found_time): - return None - - if not 1 <= day <= 31: - return None - - if year < 1601 or hour > 23 or minute > 59 or second > 59: - return None - - return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1)) - - -class DummyCookieJar(AbstractCookieJar): - """Implements a dummy cookie storage. - - It can be used with the ClientSession when no cookie processing is needed. - - """ - - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - super().__init__(loop=loop) - - def __iter__(self) -> "Iterator[Morsel[str]]": - while False: - yield None - - def __len__(self) -> int: - return 0 - - @property - def quote_cookie(self) -> bool: - return True - - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: - pass - - def clear_domain(self, domain: str) -> None: - pass - - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - pass - - def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": - return SimpleCookie() diff --git a/venv/Lib/site-packages/aiohttp/formdata.py b/venv/Lib/site-packages/aiohttp/formdata.py deleted file mode 100644 index a5a4f60..0000000 --- a/venv/Lib/site-packages/aiohttp/formdata.py +++ /dev/null @@ -1,179 +0,0 @@ -import io -import warnings -from typing import Any, Iterable, List, Optional -from urllib.parse import urlencode - -from multidict import MultiDict, MultiDictProxy - -from . import hdrs, multipart, payload -from .helpers import guess_filename -from .payload import Payload - -__all__ = ("FormData",) - - -class FormData: - """Helper class for form body generation. - - Supports multipart/form-data and application/x-www-form-urlencoded. - """ - - def __init__( - self, - fields: Iterable[Any] = (), - quote_fields: bool = True, - charset: Optional[str] = None, - *, - default_to_multipart: bool = False, - ) -> None: - self._writer = multipart.MultipartWriter("form-data") - self._fields: List[Any] = [] - self._is_multipart = default_to_multipart - self._quote_fields = quote_fields - self._charset = charset - - if isinstance(fields, dict): - fields = list(fields.items()) - elif not isinstance(fields, (list, tuple)): - fields = (fields,) - self.add_fields(*fields) - - @property - def is_multipart(self) -> bool: - return self._is_multipart - - def add_field( - self, - name: str, - value: Any, - *, - content_type: Optional[str] = None, - filename: Optional[str] = None, - content_transfer_encoding: Optional[str] = None, - ) -> None: - - if isinstance(value, io.IOBase): - self._is_multipart = True - elif isinstance(value, (bytes, bytearray, memoryview)): - msg = ( - "In v4, passing bytes will no longer create a file field. " - "Please explicitly use the filename parameter or pass a BytesIO object." - ) - if filename is None and content_transfer_encoding is None: - warnings.warn(msg, DeprecationWarning) - filename = name - - type_options: MultiDict[str] = MultiDict({"name": name}) - if filename is not None and not isinstance(filename, str): - raise TypeError("filename must be an instance of str. Got: %s" % filename) - if filename is None and isinstance(value, io.IOBase): - filename = guess_filename(value, name) - if filename is not None: - type_options["filename"] = filename - self._is_multipart = True - - headers = {} - if content_type is not None: - if not isinstance(content_type, str): - raise TypeError( - "content_type must be an instance of str. Got: %s" % content_type - ) - headers[hdrs.CONTENT_TYPE] = content_type - self._is_multipart = True - if content_transfer_encoding is not None: - if not isinstance(content_transfer_encoding, str): - raise TypeError( - "content_transfer_encoding must be an instance" - " of str. Got: %s" % content_transfer_encoding - ) - msg = ( - "content_transfer_encoding is deprecated. " - "To maintain compatibility with v4 please pass a BytesPayload." - ) - warnings.warn(msg, DeprecationWarning) - self._is_multipart = True - - self._fields.append((type_options, headers, value)) - - def add_fields(self, *fields: Any) -> None: - to_add = list(fields) - - while to_add: - rec = to_add.pop(0) - - if isinstance(rec, io.IOBase): - k = guess_filename(rec, "unknown") - self.add_field(k, rec) # type: ignore[arg-type] - - elif isinstance(rec, (MultiDictProxy, MultiDict)): - to_add.extend(rec.items()) - - elif isinstance(rec, (list, tuple)) and len(rec) == 2: - k, fp = rec - self.add_field(k, fp) - - else: - raise TypeError( - "Only io.IOBase, multidict and (name, file) " - "pairs allowed, use .add_field() for passing " - "more complex parameters, got {!r}".format(rec) - ) - - def _gen_form_urlencoded(self) -> payload.BytesPayload: - # form data (x-www-form-urlencoded) - data = [] - for type_options, _, value in self._fields: - data.append((type_options["name"], value)) - - charset = self._charset if self._charset is not None else "utf-8" - - if charset == "utf-8": - content_type = "application/x-www-form-urlencoded" - else: - content_type = "application/x-www-form-urlencoded; charset=%s" % charset - - return payload.BytesPayload( - urlencode(data, doseq=True, encoding=charset).encode(), - content_type=content_type, - ) - - def _gen_form_data(self) -> multipart.MultipartWriter: - """Encode a list of fields using the multipart/form-data MIME format""" - for dispparams, headers, value in self._fields: - try: - if hdrs.CONTENT_TYPE in headers: - part = payload.get_payload( - value, - content_type=headers[hdrs.CONTENT_TYPE], - headers=headers, - encoding=self._charset, - ) - else: - part = payload.get_payload( - value, headers=headers, encoding=self._charset - ) - except Exception as exc: - raise TypeError( - "Can not serialize value type: %r\n " - "headers: %r\n value: %r" % (type(value), headers, value) - ) from exc - - if dispparams: - part.set_content_disposition( - "form-data", quote_fields=self._quote_fields, **dispparams - ) - # FIXME cgi.FieldStorage doesn't likes body parts with - # Content-Length which were sent via chunked transfer encoding - assert part.headers is not None - part.headers.popall(hdrs.CONTENT_LENGTH, None) - - self._writer.append_payload(part) - - self._fields.clear() - return self._writer - - def __call__(self) -> Payload: - if self._is_multipart: - return self._gen_form_data() - else: - return self._gen_form_urlencoded() diff --git a/venv/Lib/site-packages/aiohttp/hdrs.py b/venv/Lib/site-packages/aiohttp/hdrs.py deleted file mode 100644 index c8d6b35..0000000 --- a/venv/Lib/site-packages/aiohttp/hdrs.py +++ /dev/null @@ -1,121 +0,0 @@ -"""HTTP Headers constants.""" - -# After changing the file content call ./tools/gen.py -# to regenerate the headers parser -import itertools -from typing import Final, Set - -from multidict import istr - -METH_ANY: Final[str] = "*" -METH_CONNECT: Final[str] = "CONNECT" -METH_HEAD: Final[str] = "HEAD" -METH_GET: Final[str] = "GET" -METH_DELETE: Final[str] = "DELETE" -METH_OPTIONS: Final[str] = "OPTIONS" -METH_PATCH: Final[str] = "PATCH" -METH_POST: Final[str] = "POST" -METH_PUT: Final[str] = "PUT" -METH_TRACE: Final[str] = "TRACE" - -METH_ALL: Final[Set[str]] = { - METH_CONNECT, - METH_HEAD, - METH_GET, - METH_DELETE, - METH_OPTIONS, - METH_PATCH, - METH_POST, - METH_PUT, - METH_TRACE, -} - -ACCEPT: Final[istr] = istr("Accept") -ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset") -ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding") -ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language") -ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges") -ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age") -ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials") -ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers") -ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods") -ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin") -ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers") -ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers") -ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method") -AGE: Final[istr] = istr("Age") -ALLOW: Final[istr] = istr("Allow") -AUTHORIZATION: Final[istr] = istr("Authorization") -CACHE_CONTROL: Final[istr] = istr("Cache-Control") -CONNECTION: Final[istr] = istr("Connection") -CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition") -CONTENT_ENCODING: Final[istr] = istr("Content-Encoding") -CONTENT_LANGUAGE: Final[istr] = istr("Content-Language") -CONTENT_LENGTH: Final[istr] = istr("Content-Length") -CONTENT_LOCATION: Final[istr] = istr("Content-Location") -CONTENT_MD5: Final[istr] = istr("Content-MD5") -CONTENT_RANGE: Final[istr] = istr("Content-Range") -CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding") -CONTENT_TYPE: Final[istr] = istr("Content-Type") -COOKIE: Final[istr] = istr("Cookie") -DATE: Final[istr] = istr("Date") -DESTINATION: Final[istr] = istr("Destination") -DIGEST: Final[istr] = istr("Digest") -ETAG: Final[istr] = istr("Etag") -EXPECT: Final[istr] = istr("Expect") -EXPIRES: Final[istr] = istr("Expires") -FORWARDED: Final[istr] = istr("Forwarded") -FROM: Final[istr] = istr("From") -HOST: Final[istr] = istr("Host") -IF_MATCH: Final[istr] = istr("If-Match") -IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since") -IF_NONE_MATCH: Final[istr] = istr("If-None-Match") -IF_RANGE: Final[istr] = istr("If-Range") -IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since") -KEEP_ALIVE: Final[istr] = istr("Keep-Alive") -LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID") -LAST_MODIFIED: Final[istr] = istr("Last-Modified") -LINK: Final[istr] = istr("Link") -LOCATION: Final[istr] = istr("Location") -MAX_FORWARDS: Final[istr] = istr("Max-Forwards") -ORIGIN: Final[istr] = istr("Origin") -PRAGMA: Final[istr] = istr("Pragma") -PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate") -PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization") -RANGE: Final[istr] = istr("Range") -REFERER: Final[istr] = istr("Referer") -RETRY_AFTER: Final[istr] = istr("Retry-After") -SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept") -SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version") -SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol") -SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions") -SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key") -SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1") -SERVER: Final[istr] = istr("Server") -SET_COOKIE: Final[istr] = istr("Set-Cookie") -TE: Final[istr] = istr("TE") -TRAILER: Final[istr] = istr("Trailer") -TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding") -UPGRADE: Final[istr] = istr("Upgrade") -URI: Final[istr] = istr("URI") -USER_AGENT: Final[istr] = istr("User-Agent") -VARY: Final[istr] = istr("Vary") -VIA: Final[istr] = istr("Via") -WANT_DIGEST: Final[istr] = istr("Want-Digest") -WARNING: Final[istr] = istr("Warning") -WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate") -X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For") -X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host") -X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto") - -# These are the upper/lower case variants of the headers/methods -# Example: {'hOst', 'host', 'HoST', 'HOSt', 'hOsT', 'HosT', 'hoSt', ...} -METH_HEAD_ALL: Final = frozenset( - map("".join, itertools.product(*zip(METH_HEAD.upper(), METH_HEAD.lower()))) -) -METH_CONNECT_ALL: Final = frozenset( - map("".join, itertools.product(*zip(METH_CONNECT.upper(), METH_CONNECT.lower()))) -) -HOST_ALL: Final = frozenset( - map("".join, itertools.product(*zip(HOST.upper(), HOST.lower()))) -) diff --git a/venv/Lib/site-packages/aiohttp/helpers.py b/venv/Lib/site-packages/aiohttp/helpers.py deleted file mode 100644 index dfab987..0000000 --- a/venv/Lib/site-packages/aiohttp/helpers.py +++ /dev/null @@ -1,986 +0,0 @@ -"""Various helper functions""" - -import asyncio -import base64 -import binascii -import contextlib -import datetime -import enum -import functools -import inspect -import netrc -import os -import platform -import re -import sys -import time -import weakref -from collections import namedtuple -from contextlib import suppress -from email.message import EmailMessage -from email.parser import HeaderParser -from email.policy import HTTP -from email.utils import parsedate -from math import ceil -from pathlib import Path -from types import MappingProxyType, TracebackType -from typing import ( - Any, - Callable, - ContextManager, - Dict, - Generator, - Generic, - Iterable, - Iterator, - List, - Mapping, - Optional, - Protocol, - Tuple, - Type, - TypeVar, - Union, - get_args, - overload, -) -from urllib.parse import quote -from urllib.request import getproxies, proxy_bypass - -import attr -from multidict import MultiDict, MultiDictProxy, MultiMapping -from propcache.api import under_cached_property as reify -from yarl import URL - -from . import hdrs -from .log import client_logger - -if sys.version_info >= (3, 11): - import asyncio as async_timeout -else: - import async_timeout - -__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify") - -IS_MACOS = platform.system() == "Darwin" -IS_WINDOWS = platform.system() == "Windows" - -PY_310 = sys.version_info >= (3, 10) -PY_311 = sys.version_info >= (3, 11) - - -_T = TypeVar("_T") -_S = TypeVar("_S") - -_SENTINEL = enum.Enum("_SENTINEL", "sentinel") -sentinel = _SENTINEL.sentinel - -NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) - -# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 -EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200))) -# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 -# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 -EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL - -DEBUG = sys.flags.dev_mode or ( - not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) -) - - -CHAR = {chr(i) for i in range(0, 128)} -CTL = {chr(i) for i in range(0, 32)} | { - chr(127), -} -SEPARATORS = { - "(", - ")", - "<", - ">", - "@", - ",", - ";", - ":", - "\\", - '"', - "/", - "[", - "]", - "?", - "=", - "{", - "}", - " ", - chr(9), -} -TOKEN = CHAR ^ CTL ^ SEPARATORS - - -class noop: - def __await__(self) -> Generator[None, None, None]: - yield - - -class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): - """Http basic authentication helper.""" - - def __new__( - cls, login: str, password: str = "", encoding: str = "latin1" - ) -> "BasicAuth": - if login is None: - raise ValueError("None is not allowed as login value") - - if password is None: - raise ValueError("None is not allowed as password value") - - if ":" in login: - raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') - - return super().__new__(cls, login, password, encoding) - - @classmethod - def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": - """Create a BasicAuth object from an Authorization HTTP header.""" - try: - auth_type, encoded_credentials = auth_header.split(" ", 1) - except ValueError: - raise ValueError("Could not parse authorization header.") - - if auth_type.lower() != "basic": - raise ValueError("Unknown authorization method %s" % auth_type) - - try: - decoded = base64.b64decode( - encoded_credentials.encode("ascii"), validate=True - ).decode(encoding) - except binascii.Error: - raise ValueError("Invalid base64 encoding.") - - try: - # RFC 2617 HTTP Authentication - # https://www.ietf.org/rfc/rfc2617.txt - # the colon must be present, but the username and password may be - # otherwise blank. - username, password = decoded.split(":", 1) - except ValueError: - raise ValueError("Invalid credentials.") - - return cls(username, password, encoding=encoding) - - @classmethod - def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: - """Create BasicAuth from url.""" - if not isinstance(url, URL): - raise TypeError("url should be yarl.URL instance") - # Check raw_user and raw_password first as yarl is likely - # to already have these values parsed from the netloc in the cache. - if url.raw_user is None and url.raw_password is None: - return None - return cls(url.user or "", url.password or "", encoding=encoding) - - def encode(self) -> str: - """Encode credentials.""" - creds = (f"{self.login}:{self.password}").encode(self.encoding) - return "Basic %s" % base64.b64encode(creds).decode(self.encoding) - - -def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - """Remove user and password from URL if present and return BasicAuth object.""" - # Check raw_user and raw_password first as yarl is likely - # to already have these values parsed from the netloc in the cache. - if url.raw_user is None and url.raw_password is None: - return url, None - return url.with_user(None), BasicAuth(url.user or "", url.password or "") - - -def netrc_from_env() -> Optional[netrc.netrc]: - """Load netrc from file. - - Attempt to load it from the path specified by the env-var - NETRC or in the default location in the user's home directory. - - Returns None if it couldn't be found or fails to parse. - """ - netrc_env = os.environ.get("NETRC") - - if netrc_env is not None: - netrc_path = Path(netrc_env) - else: - try: - home_dir = Path.home() - except RuntimeError as e: # pragma: no cover - # if pathlib can't resolve home, it may raise a RuntimeError - client_logger.debug( - "Could not resolve home directory when " - "trying to look for .netrc file: %s", - e, - ) - return None - - netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc") - - try: - return netrc.netrc(str(netrc_path)) - except netrc.NetrcParseError as e: - client_logger.warning("Could not parse .netrc file: %s", e) - except OSError as e: - netrc_exists = False - with contextlib.suppress(OSError): - netrc_exists = netrc_path.is_file() - # we couldn't read the file (doesn't exist, permissions, etc.) - if netrc_env or netrc_exists: - # only warn if the environment wanted us to load it, - # or it appears like the default file does actually exist - client_logger.warning("Could not read .netrc file: %s", e) - - return None - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ProxyInfo: - proxy: URL - proxy_auth: Optional[BasicAuth] - - -def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth: - """ - Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``. - - :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no - entry is found for the ``host``. - """ - if netrc_obj is None: - raise LookupError("No .netrc file found") - auth_from_netrc = netrc_obj.authenticators(host) - - if auth_from_netrc is None: - raise LookupError(f"No entry for {host!s} found in the `.netrc` file.") - login, account, password = auth_from_netrc - - # TODO(PY311): username = login or account - # Up to python 3.10, account could be None if not specified, - # and login will be empty string if not specified. From 3.11, - # login and account will be empty string if not specified. - username = login if (login or account is None) else account - - # TODO(PY311): Remove this, as password will be empty string - # if not specified - if password is None: - password = "" - - return BasicAuth(username, password) - - -def proxies_from_env() -> Dict[str, ProxyInfo]: - proxy_urls = { - k: URL(v) - for k, v in getproxies().items() - if k in ("http", "https", "ws", "wss") - } - netrc_obj = netrc_from_env() - stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} - ret = {} - for proto, val in stripped.items(): - proxy, auth = val - if proxy.scheme in ("https", "wss"): - client_logger.warning( - "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy - ) - continue - if netrc_obj and auth is None: - if proxy.host is not None: - try: - auth = basicauth_from_netrc(netrc_obj, proxy.host) - except LookupError: - auth = None - ret[proto] = ProxyInfo(proxy, auth) - return ret - - -def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - """Get a permitted proxy for the given URL from the env.""" - if url.host is not None and proxy_bypass(url.host): - raise LookupError(f"Proxying is disallowed for `{url.host!r}`") - - proxies_in_env = proxies_from_env() - try: - proxy_info = proxies_in_env[url.scheme] - except KeyError: - raise LookupError(f"No proxies found for `{url!s}` in the env") - else: - return proxy_info.proxy, proxy_info.proxy_auth - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class MimeType: - type: str - subtype: str - suffix: str - parameters: "MultiDictProxy[str]" - - -@functools.lru_cache(maxsize=56) -def parse_mimetype(mimetype: str) -> MimeType: - """Parses a MIME type into its components. - - mimetype is a MIME type string. - - Returns a MimeType object. - - Example: - - >>> parse_mimetype('text/html; charset=utf-8') - MimeType(type='text', subtype='html', suffix='', - parameters={'charset': 'utf-8'}) - - """ - if not mimetype: - return MimeType( - type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) - ) - - parts = mimetype.split(";") - params: MultiDict[str] = MultiDict() - for item in parts[1:]: - if not item: - continue - key, _, value = item.partition("=") - params.add(key.lower().strip(), value.strip(' "')) - - fulltype = parts[0].strip().lower() - if fulltype == "*": - fulltype = "*/*" - - mtype, _, stype = fulltype.partition("/") - stype, _, suffix = stype.partition("+") - - return MimeType( - type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) - ) - - -class EnsureOctetStream(EmailMessage): - def __init__(self) -> None: - super().__init__() - # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 - self.set_default_type("application/octet-stream") - - def get_content_type(self) -> str: - """Re-implementation from Message - - Returns application/octet-stream in place of plain/text when - value is wrong. - - The way this class is used guarantees that content-type will - be present so simplify the checks wrt to the base implementation. - """ - value = self.get("content-type", "").lower() - - # Based on the implementation of _splitparam in the standard library - ctype, _, _ = value.partition(";") - ctype = ctype.strip() - if ctype.count("/") != 1: - return self.get_default_type() - return ctype - - -@functools.lru_cache(maxsize=56) -def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]: - """Parse Content-Type header. - - Returns a tuple of the parsed content type and a - MappingProxyType of parameters. The default returned value - is `application/octet-stream` - """ - msg = HeaderParser(EnsureOctetStream, policy=HTTP).parsestr(f"Content-Type: {raw}") - content_type = msg.get_content_type() - params = msg.get_params(()) - content_dict = dict(params[1:]) # First element is content type again - return content_type, MappingProxyType(content_dict) - - -def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: - name = getattr(obj, "name", None) - if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": - return Path(name).name - return default - - -not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]") -QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"} - - -def quoted_string(content: str) -> str: - """Return 7-bit content as quoted-string. - - Format content into a quoted-string as defined in RFC5322 for - Internet Message Format. Notice that this is not the 8-bit HTTP - format, but the 7-bit email format. Content must be in usascii or - a ValueError is raised. - """ - if not (QCONTENT > set(content)): - raise ValueError(f"bad content for quoted-string {content!r}") - return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) - - -def content_disposition_header( - disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str -) -> str: - """Sets ``Content-Disposition`` header for MIME. - - This is the MIME payload Content-Disposition header from RFC 2183 - and RFC 7579 section 4.2, not the HTTP Content-Disposition from - RFC 6266. - - disptype is a disposition type: inline, attachment, form-data. - Should be valid extension token (see RFC 2183) - - quote_fields performs value quoting to 7-bit MIME headers - according to RFC 7578. Set to quote_fields to False if recipient - can take 8-bit file names and field values. - - _charset specifies the charset to use when quote_fields is True. - - params is a dict with disposition params. - """ - if not disptype or not (TOKEN > set(disptype)): - raise ValueError(f"bad content disposition type {disptype!r}") - - value = disptype - if params: - lparams = [] - for key, val in params.items(): - if not key or not (TOKEN > set(key)): - raise ValueError(f"bad content disposition parameter {key!r}={val!r}") - if quote_fields: - if key.lower() == "filename": - qval = quote(val, "", encoding=_charset) - lparams.append((key, '"%s"' % qval)) - else: - try: - qval = quoted_string(val) - except ValueError: - qval = "".join( - (_charset, "''", quote(val, "", encoding=_charset)) - ) - lparams.append((key + "*", qval)) - else: - lparams.append((key, '"%s"' % qval)) - else: - qval = val.replace("\\", "\\\\").replace('"', '\\"') - lparams.append((key, '"%s"' % qval)) - sparams = "; ".join("=".join(pair) for pair in lparams) - value = "; ".join((value, sparams)) - return value - - -def is_ip_address(host: Optional[str]) -> bool: - """Check if host looks like an IP Address. - - This check is only meant as a heuristic to ensure that - a host is not a domain name. - """ - if not host: - return False - # For a host to be an ipv4 address, it must be all numeric. - # The host must contain a colon to be an IPv6 address. - return ":" in host or host.replace(".", "").isdigit() - - -_cached_current_datetime: Optional[int] = None -_cached_formatted_datetime = "" - - -def rfc822_formatted_time() -> str: - global _cached_current_datetime - global _cached_formatted_datetime - - now = int(time.time()) - if now != _cached_current_datetime: - # Weekday and month names for HTTP date/time formatting; - # always English! - # Tuples are constants stored in codeobject! - _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") - _monthname = ( - "", # Dummy so we can use 1-based month numbers - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - ) - - year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) - _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( - _weekdayname[wd], - day, - _monthname[month], - year, - hh, - mm, - ss, - ) - _cached_current_datetime = now - return _cached_formatted_datetime - - -def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: - ref, name = info - ob = ref() - if ob is not None: - with suppress(Exception): - getattr(ob, name)() - - -def weakref_handle( - ob: object, - name: str, - timeout: float, - loop: asyncio.AbstractEventLoop, - timeout_ceil_threshold: float = 5, -) -> Optional[asyncio.TimerHandle]: - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout >= timeout_ceil_threshold: - when = ceil(when) - - return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) - return None - - -def call_later( - cb: Callable[[], Any], - timeout: float, - loop: asyncio.AbstractEventLoop, - timeout_ceil_threshold: float = 5, -) -> Optional[asyncio.TimerHandle]: - if timeout is None or timeout <= 0: - return None - now = loop.time() - when = calculate_timeout_when(now, timeout, timeout_ceil_threshold) - return loop.call_at(when, cb) - - -def calculate_timeout_when( - loop_time: float, - timeout: float, - timeout_ceiling_threshold: float, -) -> float: - """Calculate when to execute a timeout.""" - when = loop_time + timeout - if timeout > timeout_ceiling_threshold: - return ceil(when) - return when - - -class TimeoutHandle: - """Timeout handle""" - - __slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks") - - def __init__( - self, - loop: asyncio.AbstractEventLoop, - timeout: Optional[float], - ceil_threshold: float = 5, - ) -> None: - self._timeout = timeout - self._loop = loop - self._ceil_threshold = ceil_threshold - self._callbacks: List[ - Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] - ] = [] - - def register( - self, callback: Callable[..., None], *args: Any, **kwargs: Any - ) -> None: - self._callbacks.append((callback, args, kwargs)) - - def close(self) -> None: - self._callbacks.clear() - - def start(self) -> Optional[asyncio.TimerHandle]: - timeout = self._timeout - if timeout is not None and timeout > 0: - when = self._loop.time() + timeout - if timeout >= self._ceil_threshold: - when = ceil(when) - return self._loop.call_at(when, self.__call__) - else: - return None - - def timer(self) -> "BaseTimerContext": - if self._timeout is not None and self._timeout > 0: - timer = TimerContext(self._loop) - self.register(timer.timeout) - return timer - else: - return TimerNoop() - - def __call__(self) -> None: - for cb, args, kwargs in self._callbacks: - with suppress(Exception): - cb(*args, **kwargs) - - self._callbacks.clear() - - -class BaseTimerContext(ContextManager["BaseTimerContext"]): - - __slots__ = () - - def assert_timeout(self) -> None: - """Raise TimeoutError if timeout has been exceeded.""" - - -class TimerNoop(BaseTimerContext): - - __slots__ = () - - def __enter__(self) -> BaseTimerContext: - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - return - - -class TimerContext(BaseTimerContext): - """Low resolution timeout context manager""" - - __slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling") - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._tasks: List[asyncio.Task[Any]] = [] - self._cancelled = False - self._cancelling = 0 - - def assert_timeout(self) -> None: - """Raise TimeoutError if timer has already been cancelled.""" - if self._cancelled: - raise asyncio.TimeoutError from None - - def __enter__(self) -> BaseTimerContext: - task = asyncio.current_task(loop=self._loop) - if task is None: - raise RuntimeError("Timeout context manager should be used inside a task") - - if sys.version_info >= (3, 11): - # Remember if the task was already cancelling - # so when we __exit__ we can decide if we should - # raise asyncio.TimeoutError or let the cancellation propagate - self._cancelling = task.cancelling() - - if self._cancelled: - raise asyncio.TimeoutError from None - - self._tasks.append(task) - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: - enter_task: Optional[asyncio.Task[Any]] = None - if self._tasks: - enter_task = self._tasks.pop() - - if exc_type is asyncio.CancelledError and self._cancelled: - assert enter_task is not None - # The timeout was hit, and the task was cancelled - # so we need to uncancel the last task that entered the context manager - # since the cancellation should not leak out of the context manager - if sys.version_info >= (3, 11): - # If the task was already cancelling don't raise - # asyncio.TimeoutError and instead return None - # to allow the cancellation to propagate - if enter_task.uncancel() > self._cancelling: - return None - raise asyncio.TimeoutError from exc_val - return None - - def timeout(self) -> None: - if not self._cancelled: - for task in set(self._tasks): - task.cancel() - - self._cancelled = True - - -def ceil_timeout( - delay: Optional[float], ceil_threshold: float = 5 -) -> async_timeout.Timeout: - if delay is None or delay <= 0: - return async_timeout.timeout(None) - - loop = asyncio.get_running_loop() - now = loop.time() - when = now + delay - if delay > ceil_threshold: - when = ceil(when) - return async_timeout.timeout_at(when) - - -class HeadersMixin: - """Mixin for handling headers.""" - - ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) - - _headers: MultiMapping[str] - _content_type: Optional[str] = None - _content_dict: Optional[Dict[str, str]] = None - _stored_content_type: Union[str, None, _SENTINEL] = sentinel - - def _parse_content_type(self, raw: Optional[str]) -> None: - self._stored_content_type = raw - if raw is None: - # default value according to RFC 2616 - self._content_type = "application/octet-stream" - self._content_dict = {} - else: - content_type, content_mapping_proxy = parse_content_type(raw) - self._content_type = content_type - # _content_dict needs to be mutable so we can update it - self._content_dict = content_mapping_proxy.copy() - - @property - def content_type(self) -> str: - """The value of content part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) - if self._stored_content_type != raw: - self._parse_content_type(raw) - assert self._content_type is not None - return self._content_type - - @property - def charset(self) -> Optional[str]: - """The value of charset part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) - if self._stored_content_type != raw: - self._parse_content_type(raw) - assert self._content_dict is not None - return self._content_dict.get("charset") - - @property - def content_length(self) -> Optional[int]: - """The value of Content-Length HTTP header.""" - content_length = self._headers.get(hdrs.CONTENT_LENGTH) - return None if content_length is None else int(content_length) - - -def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: - if not fut.done(): - fut.set_result(result) - - -_EXC_SENTINEL = BaseException() - - -class ErrorableProtocol(Protocol): - def set_exception( - self, - exc: BaseException, - exc_cause: BaseException = ..., - ) -> None: ... # pragma: no cover - - -def set_exception( - fut: "asyncio.Future[_T] | ErrorableProtocol", - exc: BaseException, - exc_cause: BaseException = _EXC_SENTINEL, -) -> None: - """Set future exception. - - If the future is marked as complete, this function is a no-op. - - :param exc_cause: An exception that is a direct cause of ``exc``. - Only set if provided. - """ - if asyncio.isfuture(fut) and fut.done(): - return - - exc_is_sentinel = exc_cause is _EXC_SENTINEL - exc_causes_itself = exc is exc_cause - if not exc_is_sentinel and not exc_causes_itself: - exc.__cause__ = exc_cause - - fut.set_exception(exc) - - -@functools.total_ordering -class AppKey(Generic[_T]): - """Keys for static typing support in Application.""" - - __slots__ = ("_name", "_t", "__orig_class__") - - # This may be set by Python when instantiating with a generic type. We need to - # support this, in order to support types that are not concrete classes, - # like Iterable, which can't be passed as the second parameter to __init__. - __orig_class__: Type[object] - - def __init__(self, name: str, t: Optional[Type[_T]] = None): - # Prefix with module name to help deduplicate key names. - frame = inspect.currentframe() - while frame: - if frame.f_code.co_name == "": - module: str = frame.f_globals["__name__"] - break - frame = frame.f_back - - self._name = module + "." + name - self._t = t - - def __lt__(self, other: object) -> bool: - if isinstance(other, AppKey): - return self._name < other._name - return True # Order AppKey above other types. - - def __repr__(self) -> str: - t = self._t - if t is None: - with suppress(AttributeError): - # Set to type arg. - t = get_args(self.__orig_class__)[0] - - if t is None: - t_repr = "<>" - elif isinstance(t, type): - if t.__module__ == "builtins": - t_repr = t.__qualname__ - else: - t_repr = f"{t.__module__}.{t.__qualname__}" - else: - t_repr = repr(t) - return f"" - - -class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]): - __slots__ = ("_maps",) - - def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None: - self._maps = tuple(maps) - - def __init_subclass__(cls) -> None: - raise TypeError( - "Inheritance class {} from ChainMapProxy " - "is forbidden".format(cls.__name__) - ) - - @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: ... - - @overload - def __getitem__(self, key: str) -> Any: ... - - def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: - for mapping in self._maps: - try: - return mapping[key] - except KeyError: - pass - raise KeyError(key) - - @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ... - - @overload - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... - - @overload - def get(self, key: str, default: Any = ...) -> Any: ... - - def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: - try: - return self[key] - except KeyError: - return default - - def __len__(self) -> int: - # reuses stored hash values if possible - return len(set().union(*self._maps)) - - def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: - d: Dict[Union[str, AppKey[Any]], Any] = {} - for mapping in reversed(self._maps): - # reuses stored hash values if possible - d.update(mapping) - return iter(d) - - def __contains__(self, key: object) -> bool: - return any(key in m for m in self._maps) - - def __bool__(self) -> bool: - return any(self._maps) - - def __repr__(self) -> str: - content = ", ".join(map(repr, self._maps)) - return f"ChainMapProxy({content})" - - -# https://tools.ietf.org/html/rfc7232#section-2.3 -_ETAGC = r"[!\x23-\x7E\x80-\xff]+" -_ETAGC_RE = re.compile(_ETAGC) -_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"' -QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) -LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") - -ETAG_ANY = "*" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ETag: - value: str - is_weak: bool = False - - -def validate_etag_value(value: str) -> None: - if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): - raise ValueError( - f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" - ) - - -def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: - """Process a date string, return a datetime object""" - if date_str is not None: - timetuple = parsedate(date_str) - if timetuple is not None: - with suppress(ValueError): - return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) - return None - - -@functools.lru_cache -def must_be_empty_body(method: str, code: int) -> bool: - """Check if a request must return an empty body.""" - return ( - code in EMPTY_BODY_STATUS_CODES - or method in EMPTY_BODY_METHODS - or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL) - ) - - -def should_remove_content_length(method: str, code: int) -> bool: - """Check if a Content-Length header should be removed. - - This should always be a subset of must_be_empty_body - """ - # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8 - # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4 - return code in EMPTY_BODY_STATUS_CODES or ( - 200 <= code < 300 and method in hdrs.METH_CONNECT_ALL - ) diff --git a/venv/Lib/site-packages/aiohttp/http.py b/venv/Lib/site-packages/aiohttp/http.py deleted file mode 100644 index a1feae2..0000000 --- a/venv/Lib/site-packages/aiohttp/http.py +++ /dev/null @@ -1,72 +0,0 @@ -import sys -from http import HTTPStatus -from typing import Mapping, Tuple - -from . import __version__ -from .http_exceptions import HttpProcessingError as HttpProcessingError -from .http_parser import ( - HeadersParser as HeadersParser, - HttpParser as HttpParser, - HttpRequestParser as HttpRequestParser, - HttpResponseParser as HttpResponseParser, - RawRequestMessage as RawRequestMessage, - RawResponseMessage as RawResponseMessage, -) -from .http_websocket import ( - WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, - WS_KEY as WS_KEY, - WebSocketError as WebSocketError, - WebSocketReader as WebSocketReader, - WebSocketWriter as WebSocketWriter, - WSCloseCode as WSCloseCode, - WSMessage as WSMessage, - WSMsgType as WSMsgType, - ws_ext_gen as ws_ext_gen, - ws_ext_parse as ws_ext_parse, -) -from .http_writer import ( - HttpVersion as HttpVersion, - HttpVersion10 as HttpVersion10, - HttpVersion11 as HttpVersion11, - StreamWriter as StreamWriter, -) - -__all__ = ( - "HttpProcessingError", - "RESPONSES", - "SERVER_SOFTWARE", - # .http_writer - "StreamWriter", - "HttpVersion", - "HttpVersion10", - "HttpVersion11", - # .http_parser - "HeadersParser", - "HttpParser", - "HttpRequestParser", - "HttpResponseParser", - "RawRequestMessage", - "RawResponseMessage", - # .http_websocket - "WS_CLOSED_MESSAGE", - "WS_CLOSING_MESSAGE", - "WS_KEY", - "WebSocketReader", - "WebSocketWriter", - "ws_ext_gen", - "ws_ext_parse", - "WSMessage", - "WebSocketError", - "WSMsgType", - "WSCloseCode", -) - - -SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( - sys.version_info, __version__ -) - -RESPONSES: Mapping[int, Tuple[str, str]] = { - v: (v.phrase, v.description) for v in HTTPStatus.__members__.values() -} diff --git a/venv/Lib/site-packages/aiohttp/http_exceptions.py b/venv/Lib/site-packages/aiohttp/http_exceptions.py deleted file mode 100644 index 0b5867c..0000000 --- a/venv/Lib/site-packages/aiohttp/http_exceptions.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Low-level http related exceptions.""" - -from textwrap import indent -from typing import Optional, Union - -from .typedefs import _CIMultiDict - -__all__ = ("HttpProcessingError",) - - -class HttpProcessingError(Exception): - """HTTP error. - - Shortcut for raising HTTP errors with custom code, message and headers. - - code: HTTP Error code. - message: (optional) Error message. - headers: (optional) Headers to be sent in response, a list of pairs - """ - - code = 0 - message = "" - headers = None - - def __init__( - self, - *, - code: Optional[int] = None, - message: str = "", - headers: Optional[_CIMultiDict] = None, - ) -> None: - if code is not None: - self.code = code - self.headers = headers - self.message = message - - def __str__(self) -> str: - msg = indent(self.message, " ") - return f"{self.code}, message:\n{msg}" - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>" - - -class BadHttpMessage(HttpProcessingError): - - code = 400 - message = "Bad Request" - - def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: - super().__init__(message=message, headers=headers) - self.args = (message,) - - -class HttpBadRequest(BadHttpMessage): - - code = 400 - message = "Bad Request" - - -class PayloadEncodingError(BadHttpMessage): - """Base class for payload errors""" - - -class ContentEncodingError(PayloadEncodingError): - """Content encoding error.""" - - -class TransferEncodingError(PayloadEncodingError): - """transfer encoding error.""" - - -class ContentLengthError(PayloadEncodingError): - """Not enough data to satisfy content length header.""" - - -class DecompressSizeError(PayloadEncodingError): - """Decompressed size exceeds the configured limit.""" - - -class LineTooLong(BadHttpMessage): - def __init__( - self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" - ) -> None: - super().__init__( - f"Got more than {limit} bytes ({actual_size}) when reading {line}." - ) - self.args = (line, limit, actual_size) - - -class InvalidHeader(BadHttpMessage): - def __init__(self, hdr: Union[bytes, str]) -> None: - hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr - super().__init__(f"Invalid HTTP header: {hdr!r}") - self.hdr = hdr_s - self.args = (hdr,) - - -class BadStatusLine(BadHttpMessage): - def __init__(self, line: str = "", error: Optional[str] = None) -> None: - if not isinstance(line, str): - line = repr(line) - super().__init__(error or f"Bad status line {line!r}") - self.args = (line,) - self.line = line - - -class BadHttpMethod(BadStatusLine): - """Invalid HTTP method in status line.""" - - def __init__(self, line: str = "", error: Optional[str] = None) -> None: - super().__init__(line, error or f"Bad HTTP method in status line {line!r}") - - -class InvalidURLError(BadHttpMessage): - pass diff --git a/venv/Lib/site-packages/aiohttp/http_parser.py b/venv/Lib/site-packages/aiohttp/http_parser.py deleted file mode 100644 index 393e76a..0000000 --- a/venv/Lib/site-packages/aiohttp/http_parser.py +++ /dev/null @@ -1,1086 +0,0 @@ -import abc -import asyncio -import re -import string -from contextlib import suppress -from enum import IntEnum -from typing import ( - Any, - ClassVar, - Final, - Generic, - List, - Literal, - NamedTuple, - Optional, - Pattern, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -from multidict import CIMultiDict, CIMultiDictProxy, istr -from yarl import URL - -from . import hdrs -from .base_protocol import BaseProtocol -from .compression_utils import ( - DEFAULT_MAX_DECOMPRESS_SIZE, - HAS_BROTLI, - HAS_ZSTD, - BrotliDecompressor, - ZLibDecompressor, - ZSTDDecompressor, -) -from .helpers import ( - _EXC_SENTINEL, - DEBUG, - EMPTY_BODY_METHODS, - EMPTY_BODY_STATUS_CODES, - NO_EXTENSIONS, - BaseTimerContext, - set_exception, -) -from .http_exceptions import ( - BadHttpMessage, - BadHttpMethod, - BadStatusLine, - ContentEncodingError, - ContentLengthError, - DecompressSizeError, - InvalidHeader, - InvalidURLError, - LineTooLong, - TransferEncodingError, -) -from .http_writer import HttpVersion, HttpVersion10 -from .streams import EMPTY_PAYLOAD, StreamReader -from .typedefs import RawHeaders - -__all__ = ( - "HeadersParser", - "HttpParser", - "HttpRequestParser", - "HttpResponseParser", - "RawRequestMessage", - "RawResponseMessage", -) - -_SEP = Literal[b"\r\n", b"\n"] - -ASCIISET: Final[Set[str]] = set(string.printable) - -# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview -# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens -# -# method = token -# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / -# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA -# token = 1*tchar -_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~") -TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+") -VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII) -DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII) -HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+") - - -class RawRequestMessage(NamedTuple): - method: str - path: str - version: HttpVersion - headers: "CIMultiDictProxy[str]" - raw_headers: RawHeaders - should_close: bool - compression: Optional[str] - upgrade: bool - chunked: bool - url: URL - - -class RawResponseMessage(NamedTuple): - version: HttpVersion - code: int - reason: str - headers: CIMultiDictProxy[str] - raw_headers: RawHeaders - should_close: bool - compression: Optional[str] - upgrade: bool - chunked: bool - - -_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage) - - -class ParseState(IntEnum): - - PARSE_NONE = 0 - PARSE_LENGTH = 1 - PARSE_CHUNKED = 2 - PARSE_UNTIL_EOF = 3 - - -class ChunkState(IntEnum): - PARSE_CHUNKED_SIZE = 0 - PARSE_CHUNKED_CHUNK = 1 - PARSE_CHUNKED_CHUNK_EOF = 2 - PARSE_MAYBE_TRAILERS = 3 - PARSE_TRAILERS = 4 - - -class HeadersParser: - def __init__( - self, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - lax: bool = False, - ) -> None: - self.max_line_size = max_line_size - self.max_headers = max_headers - self.max_field_size = max_field_size - self._lax = lax - - def parse_headers( - self, lines: List[bytes] - ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: - headers: CIMultiDict[str] = CIMultiDict() - # note: "raw" does not mean inclusion of OWS before/after the field value - raw_headers = [] - - lines_idx = 0 - line = lines[lines_idx] - line_count = len(lines) - - while line: - # Parse initial header name : value pair. - try: - bname, bvalue = line.split(b":", 1) - except ValueError: - raise InvalidHeader(line) from None - - if len(bname) == 0: - raise InvalidHeader(bname) - - # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 - if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"} - raise InvalidHeader(line) - - bvalue = bvalue.lstrip(b" \t") - if len(bname) > self.max_field_size: - raise LineTooLong( - "request header name {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(len(bname)), - ) - name = bname.decode("utf-8", "surrogateescape") - if not TOKENRE.fullmatch(name): - raise InvalidHeader(bname) - - header_length = len(bvalue) - - # next line - lines_idx += 1 - line = lines[lines_idx] - - # consume continuation lines - continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t') - - # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding - if continuation: - bvalue_lst = [bvalue] - while continuation: - header_length += len(line) - if header_length > self.max_field_size: - raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(header_length), - ) - bvalue_lst.append(line) - - # next line - lines_idx += 1 - if lines_idx < line_count: - line = lines[lines_idx] - if line: - continuation = line[0] in (32, 9) # (' ', '\t') - else: - line = b"" - break - bvalue = b"".join(bvalue_lst) - else: - if header_length > self.max_field_size: - raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(header_length), - ) - - bvalue = bvalue.strip(b" \t") - value = bvalue.decode("utf-8", "surrogateescape") - - # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 - if "\n" in value or "\r" in value or "\x00" in value: - raise InvalidHeader(bvalue) - - headers.add(name, value) - raw_headers.append((bname, bvalue)) - - return (CIMultiDictProxy(headers), tuple(raw_headers)) - - -def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: - """Check if the upgrade header is supported.""" - u = headers.get(hdrs.UPGRADE, "") - # .lower() can transform non-ascii characters. - return u.isascii() and u.lower() in {"tcp", "websocket"} - - -class HttpParser(abc.ABC, Generic[_MsgT]): - lax: ClassVar[bool] = False - - def __init__( - self, - protocol: Optional[BaseProtocol] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - limit: int = 2**16, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - timer: Optional[BaseTimerContext] = None, - code: Optional[int] = None, - method: Optional[str] = None, - payload_exception: Optional[Type[BaseException]] = None, - response_with_body: bool = True, - read_until_eof: bool = False, - auto_decompress: bool = True, - ) -> None: - self.protocol = protocol - self.loop = loop - self.max_line_size = max_line_size - self.max_headers = max_headers - self.max_field_size = max_field_size - self.timer = timer - self.code = code - self.method = method - self.payload_exception = payload_exception - self.response_with_body = response_with_body - self.read_until_eof = read_until_eof - - self._lines: List[bytes] = [] - self._tail = b"" - self._upgraded = False - self._payload = None - self._payload_parser: Optional[HttpPayloadParser] = None - self._auto_decompress = auto_decompress - self._limit = limit - self._headers_parser = HeadersParser( - max_line_size, max_headers, max_field_size, self.lax - ) - - @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> _MsgT: ... - - @abc.abstractmethod - def _is_chunked_te(self, te: str) -> bool: ... - - def feed_eof(self) -> Optional[_MsgT]: - if self._payload_parser is not None: - self._payload_parser.feed_eof() - self._payload_parser = None - else: - # try to extract partial message - if self._tail: - self._lines.append(self._tail) - - if self._lines: - if self._lines[-1] != "\r\n": - self._lines.append(b"") - with suppress(Exception): - return self.parse_message(self._lines) - return None - - def feed_data( - self, - data: bytes, - SEP: _SEP = b"\r\n", - EMPTY: bytes = b"", - CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, - METH_CONNECT: str = hdrs.METH_CONNECT, - SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, - ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: - - messages = [] - - if self._tail: - data, self._tail = self._tail + data, b"" - - data_len = len(data) - start_pos = 0 - loop = self.loop - - should_close = False - while start_pos < data_len: - - # read HTTP message (request/response line + headers), \r\n\r\n - # and split by lines - if self._payload_parser is None and not self._upgraded: - pos = data.find(SEP, start_pos) - # consume \r\n - if pos == start_pos and not self._lines: - start_pos = pos + len(SEP) - continue - - if pos >= start_pos: - if should_close: - raise BadHttpMessage("Data after `Connection: close`") - - # line found - line = data[start_pos:pos] - if SEP == b"\n": # For lax response parsing - line = line.rstrip(b"\r") - self._lines.append(line) - start_pos = pos + len(SEP) - - # \r\n\r\n found - if self._lines[-1] == EMPTY: - try: - msg: _MsgT = self.parse_message(self._lines) - finally: - self._lines.clear() - - def get_content_length() -> Optional[int]: - # payload length - length_hdr = msg.headers.get(CONTENT_LENGTH) - if length_hdr is None: - return None - - # Shouldn't allow +/- or other number formats. - # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2 - # msg.headers is already stripped of leading/trailing wsp - if not DIGITS.fullmatch(length_hdr): - raise InvalidHeader(CONTENT_LENGTH) - - return int(length_hdr) - - length = get_content_length() - # do not support old websocket spec - if SEC_WEBSOCKET_KEY1 in msg.headers: - raise InvalidHeader(SEC_WEBSOCKET_KEY1) - - self._upgraded = msg.upgrade and _is_supported_upgrade( - msg.headers - ) - - method = getattr(msg, "method", self.method) - # code is only present on responses - code = getattr(msg, "code", 0) - - assert self.protocol is not None - # calculate payload - empty_body = code in EMPTY_BODY_STATUS_CODES or bool( - method and method in EMPTY_BODY_METHODS - ) - if not empty_body and ( - ((length is not None and length > 0) or msg.chunked) - and not self._upgraded - ): - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - payload_parser = HttpPayloadParser( - payload, - length=length, - chunked=msg.chunked, - method=method, - compression=msg.compression, - code=self.code, - response_with_body=self.response_with_body, - auto_decompress=self._auto_decompress, - lax=self.lax, - headers_parser=self._headers_parser, - ) - if not payload_parser.done: - self._payload_parser = payload_parser - elif method == METH_CONNECT: - assert isinstance(msg, RawRequestMessage) - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - self._upgraded = True - self._payload_parser = HttpPayloadParser( - payload, - method=msg.method, - compression=msg.compression, - auto_decompress=self._auto_decompress, - lax=self.lax, - headers_parser=self._headers_parser, - ) - elif not empty_body and length is None and self.read_until_eof: - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - payload_parser = HttpPayloadParser( - payload, - length=length, - chunked=msg.chunked, - method=method, - compression=msg.compression, - code=self.code, - response_with_body=self.response_with_body, - auto_decompress=self._auto_decompress, - lax=self.lax, - headers_parser=self._headers_parser, - ) - if not payload_parser.done: - self._payload_parser = payload_parser - else: - payload = EMPTY_PAYLOAD - - messages.append((msg, payload)) - should_close = msg.should_close - else: - self._tail = data[start_pos:] - data = EMPTY - break - - # no parser, just store - elif self._payload_parser is None and self._upgraded: - assert not self._lines - break - - # feed payload - elif data and start_pos < data_len: - assert not self._lines - assert self._payload_parser is not None - try: - eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) - except BaseException as underlying_exc: - reraised_exc = underlying_exc - if self.payload_exception is not None: - reraised_exc = self.payload_exception(str(underlying_exc)) - - set_exception( - self._payload_parser.payload, - reraised_exc, - underlying_exc, - ) - - eof = True - data = b"" - if isinstance( - underlying_exc, (InvalidHeader, TransferEncodingError) - ): - raise - - if eof: - start_pos = 0 - data_len = len(data) - self._payload_parser = None - continue - else: - break - - if data and start_pos < data_len: - data = data[start_pos:] - else: - data = EMPTY - - return messages, self._upgraded, data - - def parse_headers( - self, lines: List[bytes] - ) -> Tuple[ - "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool - ]: - """Parses RFC 5322 headers from a stream. - - Line continuations are supported. Returns list of header name - and value pairs. Header name is in upper case. - """ - headers, raw_headers = self._headers_parser.parse_headers(lines) - close_conn = None - encoding = None - upgrade = False - chunked = False - - # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6 - # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf - singletons = ( - hdrs.CONTENT_LENGTH, - hdrs.CONTENT_LOCATION, - hdrs.CONTENT_RANGE, - hdrs.CONTENT_TYPE, - hdrs.ETAG, - hdrs.HOST, - hdrs.MAX_FORWARDS, - hdrs.SERVER, - hdrs.TRANSFER_ENCODING, - hdrs.USER_AGENT, - ) - bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None) - if bad_hdr is not None: - raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.") - - # keep-alive - conn = headers.get(hdrs.CONNECTION) - if conn: - v = conn.lower() - if v == "close": - close_conn = True - elif v == "keep-alive": - close_conn = False - # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols - elif v == "upgrade" and headers.get(hdrs.UPGRADE): - upgrade = True - - # encoding - enc = headers.get(hdrs.CONTENT_ENCODING, "") - if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: - encoding = enc - - # chunking - te = headers.get(hdrs.TRANSFER_ENCODING) - if te is not None: - if self._is_chunked_te(te): - chunked = True - - if hdrs.CONTENT_LENGTH in headers: - raise BadHttpMessage( - "Transfer-Encoding can't be present with Content-Length", - ) - - return (headers, raw_headers, close_conn, encoding, upgrade, chunked) - - def set_upgraded(self, val: bool) -> None: - """Set connection upgraded (to websocket) mode. - - :param bool val: new state. - """ - self._upgraded = val - - -class HttpRequestParser(HttpParser[RawRequestMessage]): - """Read request status line. - - Exception .http_exceptions.BadStatusLine - could be raised in case of any errors in status line. - Returns RawRequestMessage. - """ - - def parse_message(self, lines: List[bytes]) -> RawRequestMessage: - # request line - line = lines[0].decode("utf-8", "surrogateescape") - try: - method, path, version = line.split(" ", maxsplit=2) - except ValueError: - raise BadHttpMethod(line) from None - - if len(path) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(path)) - ) - - # method - if not TOKENRE.fullmatch(method): - raise BadHttpMethod(method) - - # version - match = VERSRE.fullmatch(version) - if match is None: - raise BadStatusLine(line) - version_o = HttpVersion(int(match.group(1)), int(match.group(2))) - - if method == "CONNECT": - # authority-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 - url = URL.build(authority=path, encoded=True) - elif path.startswith("/"): - # origin-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 - path_part, _hash_separator, url_fragment = path.partition("#") - path_part, _question_mark_separator, qs_part = path_part.partition("?") - - # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based - # NOTE: parser does, otherwise it results into the same - # NOTE: HTTP Request-Line input producing different - # NOTE: `yarl.URL()` objects - url = URL.build( - path=path_part, - query_string=qs_part, - fragment=url_fragment, - encoded=True, - ) - elif path == "*" and method == "OPTIONS": - # asterisk-form, - url = URL(path, encoded=True) - else: - # absolute-form for proxy maybe, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 - url = URL(path, encoded=True) - if url.scheme == "": - # not absolute-form - raise InvalidURLError( - path.encode(errors="surrogateescape").decode("latin1") - ) - - # read headers - ( - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) = self.parse_headers(lines[1:]) - - if close is None: # then the headers weren't set in the request - if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close - close = True - else: # HTTP 1.1 must ask to close. - close = False - - return RawRequestMessage( - method, - path, - version_o, - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - url, - ) - - def _is_chunked_te(self, te: str) -> bool: - te = te.rsplit(",", maxsplit=1)[-1].strip(" \t") - # .lower() transforms some non-ascii chars, so must check first. - if te.isascii() and te.lower() == "chunked": - return True - # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 - raise BadHttpMessage("Request has invalid `Transfer-Encoding`") - - -class HttpResponseParser(HttpParser[RawResponseMessage]): - """Read response status line and headers. - - BadStatusLine could be raised in case of any errors in status line. - Returns RawResponseMessage. - """ - - # Lax mode should only be enabled on response parser. - lax = not DEBUG - - def feed_data( - self, - data: bytes, - SEP: Optional[_SEP] = None, - *args: Any, - **kwargs: Any, - ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]: - if SEP is None: - SEP = b"\r\n" if DEBUG else b"\n" - return super().feed_data(data, SEP, *args, **kwargs) - - def parse_message(self, lines: List[bytes]) -> RawResponseMessage: - line = lines[0].decode("utf-8", "surrogateescape") - try: - version, status = line.split(maxsplit=1) - except ValueError: - raise BadStatusLine(line) from None - - try: - status, reason = status.split(maxsplit=1) - except ValueError: - status = status.strip() - reason = "" - - if len(reason) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(reason)) - ) - - # version - match = VERSRE.fullmatch(version) - if match is None: - raise BadStatusLine(line) - version_o = HttpVersion(int(match.group(1)), int(match.group(2))) - - # The status code is a three-digit ASCII number, no padding - if len(status) != 3 or not DIGITS.fullmatch(status): - raise BadStatusLine(line) - status_i = int(status) - - # read headers - ( - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) = self.parse_headers(lines[1:]) - - if close is None: - if version_o <= HttpVersion10: - close = True - # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length - elif 100 <= status_i < 200 or status_i in {204, 304}: - close = False - elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers: - close = False - else: - # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8 - close = True - - return RawResponseMessage( - version_o, - status_i, - reason.strip(), - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) - - def _is_chunked_te(self, te: str) -> bool: - # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 - return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked" - - -class HttpPayloadParser: - def __init__( - self, - payload: StreamReader, - length: Optional[int] = None, - chunked: bool = False, - compression: Optional[str] = None, - code: Optional[int] = None, - method: Optional[str] = None, - response_with_body: bool = True, - auto_decompress: bool = True, - lax: bool = False, - *, - headers_parser: HeadersParser, - ) -> None: - self._length = 0 - self._type = ParseState.PARSE_UNTIL_EOF - self._chunk = ChunkState.PARSE_CHUNKED_SIZE - self._chunk_size = 0 - self._chunk_tail = b"" - self._auto_decompress = auto_decompress - self._lax = lax - self._headers_parser = headers_parser - self._trailer_lines: list[bytes] = [] - self.done = False - - # payload decompression wrapper - if response_with_body and compression and self._auto_decompress: - real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( - payload, compression - ) - else: - real_payload = payload - - # payload parser - if not response_with_body: - # don't parse payload if it's not expected to be received - self._type = ParseState.PARSE_NONE - real_payload.feed_eof() - self.done = True - elif chunked: - self._type = ParseState.PARSE_CHUNKED - elif length is not None: - self._type = ParseState.PARSE_LENGTH - self._length = length - if self._length == 0: - real_payload.feed_eof() - self.done = True - - self.payload = real_payload - - def feed_eof(self) -> None: - if self._type == ParseState.PARSE_UNTIL_EOF: - self.payload.feed_eof() - elif self._type == ParseState.PARSE_LENGTH: - raise ContentLengthError( - "Not enough data to satisfy content length header." - ) - elif self._type == ParseState.PARSE_CHUNKED: - raise TransferEncodingError( - "Not enough data to satisfy transfer length header." - ) - - def feed_data( - self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";" - ) -> Tuple[bool, bytes]: - # Read specified amount of bytes - if self._type == ParseState.PARSE_LENGTH: - required = self._length - chunk_len = len(chunk) - - if required >= chunk_len: - self._length = required - chunk_len - self.payload.feed_data(chunk, chunk_len) - if self._length == 0: - self.payload.feed_eof() - return True, b"" - else: - self._length = 0 - self.payload.feed_data(chunk[:required], required) - self.payload.feed_eof() - return True, chunk[required:] - - # Chunked transfer encoding parser - elif self._type == ParseState.PARSE_CHUNKED: - if self._chunk_tail: - chunk = self._chunk_tail + chunk - self._chunk_tail = b"" - - while chunk: - - # read next chunk size - if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: - pos = chunk.find(SEP) - if pos >= 0: - i = chunk.find(CHUNK_EXT, 0, pos) - if i >= 0: - size_b = chunk[:i] # strip chunk-extensions - # Verify no LF in the chunk-extension - if b"\n" in (ext := chunk[i:pos]): - exc = TransferEncodingError( - f"Unexpected LF in chunk-extension: {ext!r}" - ) - set_exception(self.payload, exc) - raise exc - else: - size_b = chunk[:pos] - - if self._lax: # Allow whitespace in lax mode. - size_b = size_b.strip() - - if not re.fullmatch(HEXDIGITS, size_b): - exc = TransferEncodingError( - chunk[:pos].decode("ascii", "surrogateescape") - ) - set_exception(self.payload, exc) - raise exc - size = int(bytes(size_b), 16) - - chunk = chunk[pos + len(SEP) :] - if size == 0: # eof marker - self._chunk = ChunkState.PARSE_TRAILERS - if self._lax and chunk.startswith(b"\r"): - chunk = chunk[1:] - else: - self._chunk = ChunkState.PARSE_CHUNKED_CHUNK - self._chunk_size = size - self.payload.begin_http_chunk_receiving() - else: - self._chunk_tail = chunk - return False, b"" - - # read chunk and feed buffer - if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: - required = self._chunk_size - chunk_len = len(chunk) - - if required > chunk_len: - self._chunk_size = required - chunk_len - self.payload.feed_data(chunk, chunk_len) - return False, b"" - else: - self._chunk_size = 0 - self.payload.feed_data(chunk[:required], required) - chunk = chunk[required:] - self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF - self.payload.end_http_chunk_receiving() - - # toss the CRLF at the end of the chunk - if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: - if self._lax and chunk.startswith(b"\r"): - chunk = chunk[1:] - if chunk[: len(SEP)] == SEP: - chunk = chunk[len(SEP) :] - self._chunk = ChunkState.PARSE_CHUNKED_SIZE - else: - self._chunk_tail = chunk - return False, b"" - - if self._chunk == ChunkState.PARSE_TRAILERS: - pos = chunk.find(SEP) - if pos < 0: # No line found - self._chunk_tail = chunk - return False, b"" - - line = chunk[:pos] - chunk = chunk[pos + len(SEP) :] - if SEP == b"\n": # For lax response parsing - line = line.rstrip(b"\r") - self._trailer_lines.append(line) - - # \r\n\r\n found, end of stream - if self._trailer_lines[-1] == b"": - # Headers and trailers are defined the same way, - # so we reuse the HeadersParser here. - try: - trailers, raw_trailers = self._headers_parser.parse_headers( - self._trailer_lines - ) - finally: - self._trailer_lines.clear() - self.payload.feed_eof() - return True, chunk - - # Read all bytes until eof - elif self._type == ParseState.PARSE_UNTIL_EOF: - self.payload.feed_data(chunk, len(chunk)) - - return False, b"" - - -class DeflateBuffer: - """DeflateStream decompress stream and feed data into specified stream.""" - - decompressor: Any - - def __init__( - self, - out: StreamReader, - encoding: Optional[str], - max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, - ) -> None: - self.out = out - self.size = 0 - out.total_compressed_bytes = self.size - self.encoding = encoding - self._started_decoding = False - - self.decompressor: Union[BrotliDecompressor, ZLibDecompressor, ZSTDDecompressor] - if encoding == "br": - if not HAS_BROTLI: # pragma: no cover - raise ContentEncodingError( - "Can not decode content-encoding: brotli (br). " - "Please install `Brotli`" - ) - self.decompressor = BrotliDecompressor() - elif encoding == "zstd": - if not HAS_ZSTD: - raise ContentEncodingError( - "Can not decode content-encoding: zstandard (zstd). " - "Please install `backports.zstd`" - ) - self.decompressor = ZSTDDecompressor() - else: - self.decompressor = ZLibDecompressor(encoding=encoding) - - self._max_decompress_size = max_decompress_size - - def set_exception( - self, - exc: BaseException, - exc_cause: BaseException = _EXC_SENTINEL, - ) -> None: - set_exception(self.out, exc, exc_cause) - - def feed_data(self, chunk: bytes, size: int) -> None: - if not size: - return - - self.size += size - self.out.total_compressed_bytes = self.size - - # RFC1950 - # bits 0..3 = CM = 0b1000 = 8 = "deflate" - # bits 4..7 = CINFO = 1..7 = windows size. - if ( - not self._started_decoding - and self.encoding == "deflate" - and chunk[0] & 0xF != 8 - ): - # Change the decoder to decompress incorrectly compressed data - # Actually we should issue a warning about non-RFC-compliant data. - self.decompressor = ZLibDecompressor( - encoding=self.encoding, suppress_deflate_header=True - ) - - try: - # Decompress with limit + 1 so we can detect if output exceeds limit - chunk = self.decompressor.decompress_sync( - chunk, max_length=self._max_decompress_size + 1 - ) - except Exception: - raise ContentEncodingError( - "Can not decode content-encoding: %s" % self.encoding - ) - - self._started_decoding = True - - # Check if decompression limit was exceeded - if len(chunk) > self._max_decompress_size: - raise DecompressSizeError( - "Decompressed data exceeds the configured limit of %d bytes" - % self._max_decompress_size - ) - - if chunk: - self.out.feed_data(chunk, len(chunk)) - - def feed_eof(self) -> None: - chunk = self.decompressor.flush() - - if chunk or self.size > 0: - self.out.feed_data(chunk, len(chunk)) - if self.encoding == "deflate" and not self.decompressor.eof: - raise ContentEncodingError("deflate") - - self.out.feed_eof() - - def begin_http_chunk_receiving(self) -> None: - self.out.begin_http_chunk_receiving() - - def end_http_chunk_receiving(self) -> None: - self.out.end_http_chunk_receiving() - - -HttpRequestParserPy = HttpRequestParser -HttpResponseParserPy = HttpResponseParser -RawRequestMessagePy = RawRequestMessage -RawResponseMessagePy = RawResponseMessage - -try: - if not NO_EXTENSIONS: - from ._http_parser import ( # type: ignore[import-not-found,no-redef] - HttpRequestParser, - HttpResponseParser, - RawRequestMessage, - RawResponseMessage, - ) - - HttpRequestParserC = HttpRequestParser - HttpResponseParserC = HttpResponseParser - RawRequestMessageC = RawRequestMessage - RawResponseMessageC = RawResponseMessage -except ImportError: # pragma: no cover - pass diff --git a/venv/Lib/site-packages/aiohttp/http_websocket.py b/venv/Lib/site-packages/aiohttp/http_websocket.py deleted file mode 100644 index 6b4b30e..0000000 --- a/venv/Lib/site-packages/aiohttp/http_websocket.py +++ /dev/null @@ -1,36 +0,0 @@ -"""WebSocket protocol versions 13 and 8.""" - -from ._websocket.helpers import WS_KEY, ws_ext_gen, ws_ext_parse -from ._websocket.models import ( - WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE, - WebSocketError, - WSCloseCode, - WSHandshakeError, - WSMessage, - WSMsgType, -) -from ._websocket.reader import WebSocketReader -from ._websocket.writer import WebSocketWriter - -# Messages that the WebSocketResponse.receive needs to handle internally -_INTERNAL_RECEIVE_TYPES = frozenset( - (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.PING, WSMsgType.PONG) -) - - -__all__ = ( - "WS_CLOSED_MESSAGE", - "WS_CLOSING_MESSAGE", - "WS_KEY", - "WebSocketReader", - "WebSocketWriter", - "WSMessage", - "WebSocketError", - "WSMsgType", - "WSCloseCode", - "ws_ext_gen", - "ws_ext_parse", - "WSHandshakeError", - "WSMessage", -) diff --git a/venv/Lib/site-packages/aiohttp/http_writer.py b/venv/Lib/site-packages/aiohttp/http_writer.py deleted file mode 100644 index a140b21..0000000 --- a/venv/Lib/site-packages/aiohttp/http_writer.py +++ /dev/null @@ -1,378 +0,0 @@ -"""Http related parsers and protocol.""" - -import asyncio -import sys -from typing import ( # noqa - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Iterable, - List, - NamedTuple, - Optional, - Union, -) - -from multidict import CIMultiDict - -from .abc import AbstractStreamWriter -from .base_protocol import BaseProtocol -from .client_exceptions import ClientConnectionResetError -from .compression_utils import ZLibCompressor -from .helpers import NO_EXTENSIONS - -__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") - - -MIN_PAYLOAD_FOR_WRITELINES = 2048 -IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2) -IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9) -SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9 -# writelines is not safe for use -# on Python 3.12+ until 3.12.9 -# on Python 3.13+ until 3.13.2 -# and on older versions it not any faster than write -# CVE-2024-12254: https://github.com/python/cpython/pull/127656 - - -class HttpVersion(NamedTuple): - major: int - minor: int - - -HttpVersion10 = HttpVersion(1, 0) -HttpVersion11 = HttpVersion(1, 1) - - -_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] -_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]] - - -class StreamWriter(AbstractStreamWriter): - - length: Optional[int] = None - chunked: bool = False - _eof: bool = False - _compress: Optional[ZLibCompressor] = None - - def __init__( - self, - protocol: BaseProtocol, - loop: asyncio.AbstractEventLoop, - on_chunk_sent: _T_OnChunkSent = None, - on_headers_sent: _T_OnHeadersSent = None, - ) -> None: - self._protocol = protocol - self.loop = loop - self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent - self._on_headers_sent: _T_OnHeadersSent = on_headers_sent - self._headers_buf: Optional[bytes] = None - self._headers_written: bool = False - - @property - def transport(self) -> Optional[asyncio.Transport]: - return self._protocol.transport - - @property - def protocol(self) -> BaseProtocol: - return self._protocol - - def enable_chunking(self) -> None: - self.chunked = True - - def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None - ) -> None: - self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) - - def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: - size = len(chunk) - self.buffer_size += size - self.output_size += size - transport = self._protocol.transport - if transport is None or transport.is_closing(): - raise ClientConnectionResetError("Cannot write to closing transport") - transport.write(chunk) - - def _writelines(self, chunks: Iterable[bytes]) -> None: - size = 0 - for chunk in chunks: - size += len(chunk) - self.buffer_size += size - self.output_size += size - transport = self._protocol.transport - if transport is None or transport.is_closing(): - raise ClientConnectionResetError("Cannot write to closing transport") - if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES: - transport.write(b"".join(chunks)) - else: - transport.writelines(chunks) - - def _write_chunked_payload( - self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] - ) -> None: - """Write a chunk with proper chunked encoding.""" - chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii") - self._writelines((chunk_len_pre, chunk, b"\r\n")) - - def _send_headers_with_payload( - self, - chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"], - is_eof: bool, - ) -> None: - """Send buffered headers with payload, coalescing into single write.""" - # Mark headers as written - self._headers_written = True - headers_buf = self._headers_buf - self._headers_buf = None - - if TYPE_CHECKING: - # Safe because callers (write() and write_eof()) only invoke this method - # after checking that self._headers_buf is truthy - assert headers_buf is not None - - if not self.chunked: - # Non-chunked: coalesce headers with body - if chunk: - self._writelines((headers_buf, chunk)) - else: - self._write(headers_buf) - return - - # Coalesce headers with chunked data - if chunk: - chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii") - if is_eof: - self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n0\r\n\r\n")) - else: - self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n")) - elif is_eof: - self._writelines((headers_buf, b"0\r\n\r\n")) - else: - self._write(headers_buf) - - async def write( - self, - chunk: Union[bytes, bytearray, memoryview], - *, - drain: bool = True, - LIMIT: int = 0x10000, - ) -> None: - """ - Writes chunk of data to a stream. - - write_eof() indicates end of stream. - writer can't be used after write_eof() method being called. - write() return drain future. - """ - if self._on_chunk_sent is not None: - await self._on_chunk_sent(chunk) - - if isinstance(chunk, memoryview): - if chunk.nbytes != len(chunk): - # just reshape it - chunk = chunk.cast("c") - - if self._compress is not None: - chunk = await self._compress.compress(chunk) - if not chunk: - return - - if self.length is not None: - chunk_len = len(chunk) - if self.length >= chunk_len: - self.length = self.length - chunk_len - else: - chunk = chunk[: self.length] - self.length = 0 - if not chunk: - return - - # Handle buffered headers for small payload optimization - if self._headers_buf and not self._headers_written: - self._send_headers_with_payload(chunk, False) - if drain and self.buffer_size > LIMIT: - self.buffer_size = 0 - await self.drain() - return - - if chunk: - if self.chunked: - self._write_chunked_payload(chunk) - else: - self._write(chunk) - - if drain and self.buffer_size > LIMIT: - self.buffer_size = 0 - await self.drain() - - async def write_headers( - self, status_line: str, headers: "CIMultiDict[str]" - ) -> None: - """Write headers to the stream.""" - if self._on_headers_sent is not None: - await self._on_headers_sent(headers) - # status + headers - buf = _serialize_headers(status_line, headers) - self._headers_written = False - self._headers_buf = buf - - def send_headers(self) -> None: - """Force sending buffered headers if not already sent.""" - if not self._headers_buf or self._headers_written: - return - - self._headers_written = True - headers_buf = self._headers_buf - self._headers_buf = None - - if TYPE_CHECKING: - # Safe because we only enter this block when self._headers_buf is truthy - assert headers_buf is not None - - self._write(headers_buf) - - def set_eof(self) -> None: - """Indicate that the message is complete.""" - if self._eof: - return - - # If headers haven't been sent yet, send them now - # This handles the case where there's no body at all - if self._headers_buf and not self._headers_written: - self._headers_written = True - headers_buf = self._headers_buf - self._headers_buf = None - - if TYPE_CHECKING: - # Safe because we only enter this block when self._headers_buf is truthy - assert headers_buf is not None - - # Combine headers and chunked EOF marker in a single write - if self.chunked: - self._writelines((headers_buf, b"0\r\n\r\n")) - else: - self._write(headers_buf) - elif self.chunked and self._headers_written: - # Headers already sent, just send the final chunk marker - self._write(b"0\r\n\r\n") - - self._eof = True - - async def write_eof(self, chunk: bytes = b"") -> None: - if self._eof: - return - - if chunk and self._on_chunk_sent is not None: - await self._on_chunk_sent(chunk) - - # Handle body/compression - if self._compress: - chunks: List[bytes] = [] - chunks_len = 0 - if chunk and (compressed_chunk := await self._compress.compress(chunk)): - chunks_len = len(compressed_chunk) - chunks.append(compressed_chunk) - - flush_chunk = self._compress.flush() - chunks_len += len(flush_chunk) - chunks.append(flush_chunk) - assert chunks_len - - # Send buffered headers with compressed data if not yet sent - if self._headers_buf and not self._headers_written: - self._headers_written = True - headers_buf = self._headers_buf - self._headers_buf = None - - if self.chunked: - # Coalesce headers with compressed chunked data - chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii") - self._writelines( - (headers_buf, chunk_len_pre, *chunks, b"\r\n0\r\n\r\n") - ) - else: - # Coalesce headers with compressed data - self._writelines((headers_buf, *chunks)) - await self.drain() - self._eof = True - return - - # Headers already sent, just write compressed data - if self.chunked: - chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii") - self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n")) - elif len(chunks) > 1: - self._writelines(chunks) - else: - self._write(chunks[0]) - await self.drain() - self._eof = True - return - - # No compression - send buffered headers if not yet sent - if self._headers_buf and not self._headers_written: - # Use helper to send headers with payload - self._send_headers_with_payload(chunk, True) - await self.drain() - self._eof = True - return - - # Handle remaining body - if self.chunked: - if chunk: - # Write final chunk with EOF marker - self._writelines( - (f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n0\r\n\r\n") - ) - else: - self._write(b"0\r\n\r\n") - await self.drain() - self._eof = True - return - - if chunk: - self._write(chunk) - await self.drain() - - self._eof = True - - async def drain(self) -> None: - """Flush the write buffer. - - The intended use is to write - - await w.write(data) - await w.drain() - """ - protocol = self._protocol - if protocol.transport is not None and protocol._paused: - await protocol._drain_helper() - - -def _safe_header(string: str) -> str: - if "\r" in string or "\n" in string: - raise ValueError( - "Newline or carriage return detected in headers. " - "Potential header injection attack." - ) - return string - - -def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: - headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items()) - line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n" - return line.encode("utf-8") - - -_serialize_headers = _py_serialize_headers - -try: - import aiohttp._http_writer as _http_writer # type: ignore[import-not-found] - - _c_serialize_headers = _http_writer._serialize_headers - if not NO_EXTENSIONS: - _serialize_headers = _c_serialize_headers -except ImportError: - pass diff --git a/venv/Lib/site-packages/aiohttp/log.py b/venv/Lib/site-packages/aiohttp/log.py deleted file mode 100644 index 3cecea2..0000000 --- a/venv/Lib/site-packages/aiohttp/log.py +++ /dev/null @@ -1,8 +0,0 @@ -import logging - -access_logger = logging.getLogger("aiohttp.access") -client_logger = logging.getLogger("aiohttp.client") -internal_logger = logging.getLogger("aiohttp.internal") -server_logger = logging.getLogger("aiohttp.server") -web_logger = logging.getLogger("aiohttp.web") -ws_logger = logging.getLogger("aiohttp.websocket") diff --git a/venv/Lib/site-packages/aiohttp/multipart.py b/venv/Lib/site-packages/aiohttp/multipart.py deleted file mode 100644 index 9c37f0b..0000000 --- a/venv/Lib/site-packages/aiohttp/multipart.py +++ /dev/null @@ -1,1152 +0,0 @@ -import base64 -import binascii -import json -import re -import sys -import uuid -import warnings -from collections import deque -from collections.abc import Mapping, Sequence -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Deque, - Dict, - Iterator, - List, - Optional, - Tuple, - Type, - Union, - cast, -) -from urllib.parse import parse_qsl, unquote, urlencode - -from multidict import CIMultiDict, CIMultiDictProxy - -from .abc import AbstractStreamWriter -from .compression_utils import ( - DEFAULT_MAX_DECOMPRESS_SIZE, - ZLibCompressor, - ZLibDecompressor, -) -from .hdrs import ( - CONTENT_DISPOSITION, - CONTENT_ENCODING, - CONTENT_LENGTH, - CONTENT_TRANSFER_ENCODING, - CONTENT_TYPE, -) -from .helpers import CHAR, TOKEN, parse_mimetype, reify -from .http import HeadersParser -from .log import internal_logger -from .payload import ( - JsonPayload, - LookupError, - Order, - Payload, - StringPayload, - get_payload, - payload_type, -) -from .streams import StreamReader - -if sys.version_info >= (3, 11): - from typing import Self -else: - from typing import TypeVar - - Self = TypeVar("Self", bound="BodyPartReader") - -__all__ = ( - "MultipartReader", - "MultipartWriter", - "BodyPartReader", - "BadContentDispositionHeader", - "BadContentDispositionParam", - "parse_content_disposition", - "content_disposition_filename", -) - - -if TYPE_CHECKING: - from .client_reqrep import ClientResponse - - -class BadContentDispositionHeader(RuntimeWarning): - pass - - -class BadContentDispositionParam(RuntimeWarning): - pass - - -def parse_content_disposition( - header: Optional[str], -) -> Tuple[Optional[str], Dict[str, str]]: - def is_token(string: str) -> bool: - return bool(string) and TOKEN >= set(string) - - def is_quoted(string: str) -> bool: - return string[0] == string[-1] == '"' - - def is_rfc5987(string: str) -> bool: - return is_token(string) and string.count("'") == 2 - - def is_extended_param(string: str) -> bool: - return string.endswith("*") - - def is_continuous_param(string: str) -> bool: - pos = string.find("*") + 1 - if not pos: - return False - substring = string[pos:-1] if string.endswith("*") else string[pos:] - return substring.isdigit() - - def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: - return re.sub(f"\\\\([{chars}])", "\\1", text) - - if not header: - return None, {} - - disptype, *parts = header.split(";") - if not is_token(disptype): - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - params: Dict[str, str] = {} - while parts: - item = parts.pop(0) - - if not item: # To handle trailing semicolons - warnings.warn(BadContentDispositionHeader(header)) - continue - - if "=" not in item: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - key, value = item.split("=", 1) - key = key.lower().strip() - value = value.lstrip() - - if key in params: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - if not is_token(key): - warnings.warn(BadContentDispositionParam(item)) - continue - - elif is_continuous_param(key): - if is_quoted(value): - value = unescape(value[1:-1]) - elif not is_token(value): - warnings.warn(BadContentDispositionParam(item)) - continue - - elif is_extended_param(key): - if is_rfc5987(value): - encoding, _, value = value.split("'", 2) - encoding = encoding or "utf-8" - else: - warnings.warn(BadContentDispositionParam(item)) - continue - - try: - value = unquote(value, encoding, "strict") - except UnicodeDecodeError: # pragma: nocover - warnings.warn(BadContentDispositionParam(item)) - continue - - else: - failed = True - if is_quoted(value): - failed = False - value = unescape(value[1:-1].lstrip("\\/")) - elif is_token(value): - failed = False - elif parts: - # maybe just ; in filename, in any case this is just - # one case fix, for proper fix we need to redesign parser - _value = f"{value};{parts[0]}" - if is_quoted(_value): - parts.pop(0) - value = unescape(_value[1:-1].lstrip("\\/")) - failed = False - - if failed: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - params[key] = value - - return disptype.lower(), params - - -def content_disposition_filename( - params: Mapping[str, str], name: str = "filename" -) -> Optional[str]: - name_suf = "%s*" % name - if not params: - return None - elif name_suf in params: - return params[name_suf] - elif name in params: - return params[name] - else: - parts = [] - fnparams = sorted( - (key, value) for key, value in params.items() if key.startswith(name_suf) - ) - for num, (key, value) in enumerate(fnparams): - _, tail = key.split("*", 1) - if tail.endswith("*"): - tail = tail[:-1] - if tail == str(num): - parts.append(value) - else: - break - if not parts: - return None - value = "".join(parts) - if "'" in value: - encoding, _, value = value.split("'", 2) - encoding = encoding or "utf-8" - return unquote(value, encoding, "strict") - return value - - -class MultipartResponseWrapper: - """Wrapper around the MultipartReader. - - It takes care about - underlying connection and close it when it needs in. - """ - - def __init__( - self, - resp: "ClientResponse", - stream: "MultipartReader", - ) -> None: - self.resp = resp - self.stream = stream - - def __aiter__(self) -> "MultipartResponseWrapper": - return self - - async def __anext__( - self, - ) -> Union["MultipartReader", "BodyPartReader"]: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - def at_eof(self) -> bool: - """Returns True when all response data had been read.""" - return self.resp.content.at_eof() - - async def next( - self, - ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: - """Emits next multipart reader object.""" - item = await self.stream.next() - if self.stream.at_eof(): - await self.release() - return item - - async def release(self) -> None: - """Release the connection gracefully. - - All remaining content is read to the void. - """ - await self.resp.release() - - -class BodyPartReader: - """Multipart reader for single body part.""" - - chunk_size = 8192 - - def __init__( - self, - boundary: bytes, - headers: "CIMultiDictProxy[str]", - content: StreamReader, - *, - subtype: str = "mixed", - default_charset: Optional[str] = None, - max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, - ) -> None: - self.headers = headers - self._boundary = boundary - self._boundary_len = len(boundary) + 2 # Boundary + \r\n - self._content = content - self._default_charset = default_charset - self._at_eof = False - self._is_form_data = subtype == "form-data" - # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 - length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) - self._length = int(length) if length is not None else None - self._read_bytes = 0 - self._unread: Deque[bytes] = deque() - self._prev_chunk: Optional[bytes] = None - self._content_eof = 0 - self._cache: Dict[str, Any] = {} - self._max_decompress_size = max_decompress_size - - def __aiter__(self: Self) -> Self: - return self - - async def __anext__(self) -> bytes: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - async def next(self) -> Optional[bytes]: - item = await self.read() - if not item: - return None - return item - - async def read(self, *, decode: bool = False) -> bytes: - """Reads body part data. - - decode: Decodes data following by encoding - method from Content-Encoding header. If it missed - data remains untouched - """ - if self._at_eof: - return b"" - data = bytearray() - while not self._at_eof: - data.extend(await self.read_chunk(self.chunk_size)) - if decode: - return await self.decode(data) - return data - - async def read_chunk(self, size: int = chunk_size) -> bytes: - """Reads body part content chunk of the specified size. - - size: chunk size - """ - if self._at_eof: - return b"" - if self._length: - chunk = await self._read_chunk_from_length(size) - else: - chunk = await self._read_chunk_from_stream(size) - - # For the case of base64 data, we must read a fragment of size with a - # remainder of 0 by dividing by 4 for string without symbols \n or \r - encoding = self.headers.get(CONTENT_TRANSFER_ENCODING) - if encoding and encoding.lower() == "base64": - stripped_chunk = b"".join(chunk.split()) - remainder = len(stripped_chunk) % 4 - - while remainder != 0 and not self.at_eof(): - over_chunk_size = 4 - remainder - over_chunk = b"" - - if self._prev_chunk: - over_chunk = self._prev_chunk[:over_chunk_size] - self._prev_chunk = self._prev_chunk[len(over_chunk) :] - - if len(over_chunk) != over_chunk_size: - over_chunk += await self._content.read(4 - len(over_chunk)) - - if not over_chunk: - self._at_eof = True - - stripped_chunk += b"".join(over_chunk.split()) - chunk += over_chunk - remainder = len(stripped_chunk) % 4 - - self._read_bytes += len(chunk) - if self._read_bytes == self._length: - self._at_eof = True - if self._at_eof and await self._content.readline() != b"\r\n": - raise ValueError("Reader did not read all the data or it is malformed") - return chunk - - async def _read_chunk_from_length(self, size: int) -> bytes: - # Reads body part content chunk of the specified size. - # The body part must has Content-Length header with proper value. - assert self._length is not None, "Content-Length required for chunked read" - chunk_size = min(size, self._length - self._read_bytes) - chunk = await self._content.read(chunk_size) - if self._content.at_eof(): - self._at_eof = True - return chunk - - async def _read_chunk_from_stream(self, size: int) -> bytes: - # Reads content chunk of body part with unknown length. - # The Content-Length header for body part is not necessary. - assert ( - size >= self._boundary_len - ), "Chunk size must be greater or equal than boundary length + 2" - first_chunk = self._prev_chunk is None - if first_chunk: - # We need to re-add the CRLF that got removed from headers parsing. - self._prev_chunk = b"\r\n" + await self._content.read(size) - - chunk = b"" - # content.read() may return less than size, so we need to loop to ensure - # we have enough data to detect the boundary. - while len(chunk) < self._boundary_len: - chunk += await self._content.read(size) - self._content_eof += int(self._content.at_eof()) - if self._content_eof > 2: - raise ValueError("Reading after EOF") - if self._content_eof: - break - if len(chunk) > size: - self._content.unread_data(chunk[size:]) - chunk = chunk[:size] - - assert self._prev_chunk is not None - window = self._prev_chunk + chunk - sub = b"\r\n" + self._boundary - if first_chunk: - idx = window.find(sub) - else: - idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) - if idx >= 0: - # pushing boundary back to content - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=DeprecationWarning) - self._content.unread_data(window[idx:]) - self._prev_chunk = self._prev_chunk[:idx] - chunk = window[len(self._prev_chunk) : idx] - if not chunk: - self._at_eof = True - result = self._prev_chunk[2 if first_chunk else 0 :] # Strip initial CRLF - self._prev_chunk = chunk - return result - - async def readline(self) -> bytes: - """Reads body part by line by line.""" - if self._at_eof: - return b"" - - if self._unread: - line = self._unread.popleft() - else: - line = await self._content.readline() - - if line.startswith(self._boundary): - # the very last boundary may not come with \r\n, - # so set single rules for everyone - sline = line.rstrip(b"\r\n") - boundary = self._boundary - last_boundary = self._boundary + b"--" - # ensure that we read exactly the boundary, not something alike - if sline == boundary or sline == last_boundary: - self._at_eof = True - self._unread.append(line) - return b"" - else: - next_line = await self._content.readline() - if next_line.startswith(self._boundary): - line = line[:-2] # strip CRLF but only once - self._unread.append(next_line) - - return line - - async def release(self) -> None: - """Like read(), but reads all the data to the void.""" - if self._at_eof: - return - while not self._at_eof: - await self.read_chunk(self.chunk_size) - - async def text(self, *, encoding: Optional[str] = None) -> str: - """Like read(), but assumes that body part contains text data.""" - data = await self.read(decode=True) - # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm - # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send - encoding = encoding or self.get_charset(default="utf-8") - return data.decode(encoding) - - async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: - """Like read(), but assumes that body parts contains JSON data.""" - data = await self.read(decode=True) - if not data: - return None - encoding = encoding or self.get_charset(default="utf-8") - return cast(Dict[str, Any], json.loads(data.decode(encoding))) - - async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: - """Like read(), but assumes that body parts contain form urlencoded data.""" - data = await self.read(decode=True) - if not data: - return [] - if encoding is not None: - real_encoding = encoding - else: - real_encoding = self.get_charset(default="utf-8") - try: - decoded_data = data.rstrip().decode(real_encoding) - except UnicodeDecodeError: - raise ValueError("data cannot be decoded with %s encoding" % real_encoding) - - return parse_qsl( - decoded_data, - keep_blank_values=True, - encoding=real_encoding, - ) - - def at_eof(self) -> bool: - """Returns True if the boundary was reached or False otherwise.""" - return self._at_eof - - async def decode(self, data: bytes) -> bytes: - """Decodes data. - - Decoding is done according the specified Content-Encoding - or Content-Transfer-Encoding headers value. - """ - if CONTENT_TRANSFER_ENCODING in self.headers: - data = self._decode_content_transfer(data) - # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 - if not self._is_form_data and CONTENT_ENCODING in self.headers: - return await self._decode_content(data) - return data - - async def _decode_content(self, data: bytes) -> bytes: - encoding = self.headers.get(CONTENT_ENCODING, "").lower() - if encoding == "identity": - return data - if encoding in {"deflate", "gzip"}: - return await ZLibDecompressor( - encoding=encoding, - suppress_deflate_header=True, - ).decompress(data, max_length=self._max_decompress_size) - - raise RuntimeError(f"unknown content encoding: {encoding}") - - def _decode_content_transfer(self, data: bytes) -> bytes: - encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() - - if encoding == "base64": - return base64.b64decode(data) - elif encoding == "quoted-printable": - return binascii.a2b_qp(data) - elif encoding in ("binary", "8bit", "7bit"): - return data - else: - raise RuntimeError(f"unknown content transfer encoding: {encoding}") - - def get_charset(self, default: str) -> str: - """Returns charset parameter from Content-Type header or default.""" - ctype = self.headers.get(CONTENT_TYPE, "") - mimetype = parse_mimetype(ctype) - return mimetype.parameters.get("charset", self._default_charset or default) - - @reify - def name(self) -> Optional[str]: - """Returns name specified in Content-Disposition header. - - If the header is missing or malformed, returns None. - """ - _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) - return content_disposition_filename(params, "name") - - @reify - def filename(self) -> Optional[str]: - """Returns filename specified in Content-Disposition header. - - Returns None if the header is missing or malformed. - """ - _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) - return content_disposition_filename(params, "filename") - - -@payload_type(BodyPartReader, order=Order.try_first) -class BodyPartReaderPayload(Payload): - _value: BodyPartReader - # _autoclose = False (inherited) - Streaming reader that may have resources - - def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: - super().__init__(value, *args, **kwargs) - - params: Dict[str, str] = {} - if value.name is not None: - params["name"] = value.name - if value.filename is not None: - params["filename"] = value.filename - - if params: - self.set_content_disposition("attachment", True, **params) - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - raise TypeError("Unable to decode.") - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """Raises TypeError as body parts should be consumed via write(). - - This is intentional: BodyPartReader payloads are designed for streaming - large data (potentially gigabytes) and must be consumed only once via - the write() method to avoid memory exhaustion. They cannot be buffered - in memory for reuse. - """ - raise TypeError("Unable to read body part as bytes. Use write() to consume.") - - async def write(self, writer: AbstractStreamWriter) -> None: - field = self._value - chunk = await field.read_chunk(size=2**16) - while chunk: - await writer.write(await field.decode(chunk)) - chunk = await field.read_chunk(size=2**16) - - -class MultipartReader: - """Multipart body reader.""" - - #: Response wrapper, used when multipart readers constructs from response. - response_wrapper_cls = MultipartResponseWrapper - #: Multipart reader class, used to handle multipart/* body parts. - #: None points to type(self) - multipart_reader_cls: Optional[Type["MultipartReader"]] = None - #: Body part reader class for non multipart/* content types. - part_reader_cls = BodyPartReader - - def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: - self._mimetype = parse_mimetype(headers[CONTENT_TYPE]) - assert self._mimetype.type == "multipart", "multipart/* content type expected" - if "boundary" not in self._mimetype.parameters: - raise ValueError( - "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE] - ) - - self.headers = headers - self._boundary = ("--" + self._get_boundary()).encode() - self._content = content - self._default_charset: Optional[str] = None - self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None - self._at_eof = False - self._at_bof = True - self._unread: List[bytes] = [] - - def __aiter__(self: Self) -> Self: - return self - - async def __anext__( - self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - @classmethod - def from_response( - cls, - response: "ClientResponse", - ) -> MultipartResponseWrapper: - """Constructs reader instance from HTTP response. - - :param response: :class:`~aiohttp.client.ClientResponse` instance - """ - obj = cls.response_wrapper_cls( - response, cls(response.headers, response.content) - ) - return obj - - def at_eof(self) -> bool: - """Returns True if the final boundary was reached, false otherwise.""" - return self._at_eof - - async def next( - self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: - """Emits the next multipart body part.""" - # So, if we're at BOF, we need to skip till the boundary. - if self._at_eof: - return None - await self._maybe_release_last_part() - if self._at_bof: - await self._read_until_first_boundary() - self._at_bof = False - else: - await self._read_boundary() - if self._at_eof: # we just read the last boundary, nothing to do there - return None - - part = await self.fetch_next_part() - # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6 - if ( - self._last_part is None - and self._mimetype.subtype == "form-data" - and isinstance(part, BodyPartReader) - ): - _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) - if params.get("name") == "_charset_": - # Longest encoding in https://encoding.spec.whatwg.org/encodings.json - # is 19 characters, so 32 should be more than enough for any valid encoding. - charset = await part.read_chunk(32) - if len(charset) > 31: - raise RuntimeError("Invalid default charset") - self._default_charset = charset.strip().decode() - part = await self.fetch_next_part() - self._last_part = part - return self._last_part - - async def release(self) -> None: - """Reads all the body parts to the void till the final boundary.""" - while not self._at_eof: - item = await self.next() - if item is None: - break - await item.release() - - async def fetch_next_part( - self, - ) -> Union["MultipartReader", BodyPartReader]: - """Returns the next body part reader.""" - headers = await self._read_headers() - return self._get_part_reader(headers) - - def _get_part_reader( - self, - headers: "CIMultiDictProxy[str]", - ) -> Union["MultipartReader", BodyPartReader]: - """Dispatches the response by the `Content-Type` header. - - Returns a suitable reader instance. - - :param dict headers: Response headers - """ - ctype = headers.get(CONTENT_TYPE, "") - mimetype = parse_mimetype(ctype) - - if mimetype.type == "multipart": - if self.multipart_reader_cls is None: - return type(self)(headers, self._content) - return self.multipart_reader_cls(headers, self._content) - else: - return self.part_reader_cls( - self._boundary, - headers, - self._content, - subtype=self._mimetype.subtype, - default_charset=self._default_charset, - ) - - def _get_boundary(self) -> str: - boundary = self._mimetype.parameters["boundary"] - if len(boundary) > 70: - raise ValueError("boundary %r is too long (70 chars max)" % boundary) - - return boundary - - async def _readline(self) -> bytes: - if self._unread: - return self._unread.pop() - return await self._content.readline() - - async def _read_until_first_boundary(self) -> None: - while True: - chunk = await self._readline() - if chunk == b"": - raise ValueError( - "Could not find starting boundary %r" % (self._boundary) - ) - chunk = chunk.rstrip() - if chunk == self._boundary: - return - elif chunk == self._boundary + b"--": - self._at_eof = True - return - - async def _read_boundary(self) -> None: - chunk = (await self._readline()).rstrip() - if chunk == self._boundary: - pass - elif chunk == self._boundary + b"--": - self._at_eof = True - epilogue = await self._readline() - next_line = await self._readline() - - # the epilogue is expected and then either the end of input or the - # parent multipart boundary, if the parent boundary is found then - # it should be marked as unread and handed to the parent for - # processing - if next_line[:2] == b"--": - self._unread.append(next_line) - # otherwise the request is likely missing an epilogue and both - # lines should be passed to the parent for processing - # (this handles the old behavior gracefully) - else: - self._unread.extend([next_line, epilogue]) - else: - raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") - - async def _read_headers(self) -> "CIMultiDictProxy[str]": - lines = [] - while True: - chunk = await self._content.readline() - chunk = chunk.rstrip(b"\r\n") - lines.append(chunk) - if not chunk: - break - parser = HeadersParser() - headers, raw_headers = parser.parse_headers(lines) - return headers - - async def _maybe_release_last_part(self) -> None: - """Ensures that the last read body part is read completely.""" - if self._last_part is not None: - if not self._last_part.at_eof(): - await self._last_part.release() - self._unread.extend(self._last_part._unread) - self._last_part = None - - -_Part = Tuple[Payload, str, str] - - -class MultipartWriter(Payload): - """Multipart body writer.""" - - _value: None - # _consumed = False (inherited) - Can be encoded multiple times - _autoclose = True # No file handles, just collects parts in memory - - def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: - boundary = boundary if boundary is not None else uuid.uuid4().hex - # The underlying Payload API demands a str (utf-8), not bytes, - # so we need to ensure we don't lose anything during conversion. - # As a result, require the boundary to be ASCII only. - # In both situations. - - try: - self._boundary = boundary.encode("ascii") - except UnicodeEncodeError: - raise ValueError("boundary should contain ASCII only chars") from None - ctype = f"multipart/{subtype}; boundary={self._boundary_value}" - - super().__init__(None, content_type=ctype) - - self._parts: List[_Part] = [] - self._is_form_data = subtype == "form-data" - - def __enter__(self) -> "MultipartWriter": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - pass - - def __iter__(self) -> Iterator[_Part]: - return iter(self._parts) - - def __len__(self) -> int: - return len(self._parts) - - def __bool__(self) -> bool: - return True - - _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z") - _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]") - - @property - def _boundary_value(self) -> str: - """Wrap boundary parameter value in quotes, if necessary. - - Reads self.boundary and returns a unicode string. - """ - # Refer to RFCs 7231, 7230, 5234. - # - # parameter = token "=" ( token / quoted-string ) - # token = 1*tchar - # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE - # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text - # obs-text = %x80-FF - # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) - # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" - # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" - # / DIGIT / ALPHA - # ; any VCHAR, except delimiters - # VCHAR = %x21-7E - value = self._boundary - if re.match(self._valid_tchar_regex, value): - return value.decode("ascii") # cannot fail - - if re.search(self._invalid_qdtext_char_regex, value): - raise ValueError("boundary value contains invalid characters") - - # escape %x5C and %x22 - quoted_value_content = value.replace(b"\\", b"\\\\") - quoted_value_content = quoted_value_content.replace(b'"', b'\\"') - - return '"' + quoted_value_content.decode("ascii") + '"' - - @property - def boundary(self) -> str: - return self._boundary.decode("ascii") - - def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: - if headers is None: - headers = CIMultiDict() - - if isinstance(obj, Payload): - obj.headers.update(headers) - return self.append_payload(obj) - else: - try: - payload = get_payload(obj, headers=headers) - except LookupError: - raise TypeError("Cannot create payload from %r" % obj) - else: - return self.append_payload(payload) - - def append_payload(self, payload: Payload) -> Payload: - """Adds a new body part to multipart writer.""" - encoding: Optional[str] = None - te_encoding: Optional[str] = None - if self._is_form_data: - # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 - # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 - assert ( - not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} - & payload.headers.keys() - ) - # Set default Content-Disposition in case user doesn't create one - if CONTENT_DISPOSITION not in payload.headers: - name = f"section-{len(self._parts)}" - payload.set_content_disposition("form-data", name=name) - else: - # compression - encoding = payload.headers.get(CONTENT_ENCODING, "").lower() - if encoding and encoding not in ("deflate", "gzip", "identity"): - raise RuntimeError(f"unknown content encoding: {encoding}") - if encoding == "identity": - encoding = None - - # te encoding - te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() - if te_encoding not in ("", "base64", "quoted-printable", "binary"): - raise RuntimeError(f"unknown content transfer encoding: {te_encoding}") - if te_encoding == "binary": - te_encoding = None - - # size - size = payload.size - if size is not None and not (encoding or te_encoding): - payload.headers[CONTENT_LENGTH] = str(size) - - self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] - return payload - - def append_json( - self, obj: Any, headers: Optional[Mapping[str, str]] = None - ) -> Payload: - """Helper to append JSON part.""" - if headers is None: - headers = CIMultiDict() - - return self.append_payload(JsonPayload(obj, headers=headers)) - - def append_form( - self, - obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[Mapping[str, str]] = None, - ) -> Payload: - """Helper to append form urlencoded part.""" - assert isinstance(obj, (Sequence, Mapping)) - - if headers is None: - headers = CIMultiDict() - - if isinstance(obj, Mapping): - obj = list(obj.items()) - data = urlencode(obj, doseq=True) - - return self.append_payload( - StringPayload( - data, headers=headers, content_type="application/x-www-form-urlencoded" - ) - ) - - @property - def size(self) -> Optional[int]: - """Size of the payload.""" - total = 0 - for part, encoding, te_encoding in self._parts: - part_size = part.size - if encoding or te_encoding or part_size is None: - return None - - total += int( - 2 - + len(self._boundary) - + 2 - + part_size # b'--'+self._boundary+b'\r\n' - + len(part._binary_headers) - + 2 # b'\r\n' - ) - - total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' - return total - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """Return string representation of the multipart data. - - WARNING: This method may do blocking I/O if parts contain file payloads. - It should not be called in the event loop. Use as_bytes().decode() instead. - """ - return "".join( - "--" - + self.boundary - + "\r\n" - + part._binary_headers.decode(encoding, errors) - + part.decode() - for part, _e, _te in self._parts - ) - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """Return bytes representation of the multipart data. - - This method is async-safe and calls as_bytes on underlying payloads. - """ - parts: List[bytes] = [] - - # Process each part - for part, _e, _te in self._parts: - # Add boundary - parts.append(b"--" + self._boundary + b"\r\n") - - # Add headers - parts.append(part._binary_headers) - - # Add payload content using as_bytes for async safety - part_bytes = await part.as_bytes(encoding, errors) - parts.append(part_bytes) - - # Add trailing CRLF - parts.append(b"\r\n") - - # Add closing boundary - parts.append(b"--" + self._boundary + b"--\r\n") - - return b"".join(parts) - - async def write( - self, writer: AbstractStreamWriter, close_boundary: bool = True - ) -> None: - """Write body.""" - for part, encoding, te_encoding in self._parts: - if self._is_form_data: - # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2 - assert CONTENT_DISPOSITION in part.headers - assert "name=" in part.headers[CONTENT_DISPOSITION] - - await writer.write(b"--" + self._boundary + b"\r\n") - await writer.write(part._binary_headers) - - if encoding or te_encoding: - w = MultipartPayloadWriter(writer) - if encoding: - w.enable_compression(encoding) - if te_encoding: - w.enable_encoding(te_encoding) - await part.write(w) # type: ignore[arg-type] - await w.write_eof() - else: - await part.write(writer) - - await writer.write(b"\r\n") - - if close_boundary: - await writer.write(b"--" + self._boundary + b"--\r\n") - - async def close(self) -> None: - """ - Close all part payloads that need explicit closing. - - IMPORTANT: This method must not await anything that might not finish - immediately, as it may be called during cleanup/cancellation. Schedule - any long-running operations without awaiting them. - """ - if self._consumed: - return - self._consumed = True - - # Close all parts that need explicit closing - # We catch and log exceptions to ensure all parts get a chance to close - # we do not use asyncio.gather() here because we are not allowed - # to suspend given we may be called during cleanup - for idx, (part, _, _) in enumerate(self._parts): - if not part.autoclose and not part.consumed: - try: - await part.close() - except Exception as exc: - internal_logger.error( - "Failed to close multipart part %d: %s", idx, exc, exc_info=True - ) - - -class MultipartPayloadWriter: - def __init__(self, writer: AbstractStreamWriter) -> None: - self._writer = writer - self._encoding: Optional[str] = None - self._compress: Optional[ZLibCompressor] = None - self._encoding_buffer: Optional[bytearray] = None - - def enable_encoding(self, encoding: str) -> None: - if encoding == "base64": - self._encoding = encoding - self._encoding_buffer = bytearray() - elif encoding == "quoted-printable": - self._encoding = "quoted-printable" - - def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None - ) -> None: - self._compress = ZLibCompressor( - encoding=encoding, - suppress_deflate_header=True, - strategy=strategy, - ) - - async def write_eof(self) -> None: - if self._compress is not None: - chunk = self._compress.flush() - if chunk: - self._compress = None - await self.write(chunk) - - if self._encoding == "base64": - if self._encoding_buffer: - await self._writer.write(base64.b64encode(self._encoding_buffer)) - - async def write(self, chunk: bytes) -> None: - if self._compress is not None: - if chunk: - chunk = await self._compress.compress(chunk) - if not chunk: - return - - if self._encoding == "base64": - buf = self._encoding_buffer - assert buf is not None - buf.extend(chunk) - - if buf: - div, mod = divmod(len(buf), 3) - enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) - if enc_chunk: - b64chunk = base64.b64encode(enc_chunk) - await self._writer.write(b64chunk) - elif self._encoding == "quoted-printable": - await self._writer.write(binascii.b2a_qp(chunk)) - else: - await self._writer.write(chunk) diff --git a/venv/Lib/site-packages/aiohttp/payload.py b/venv/Lib/site-packages/aiohttp/payload.py deleted file mode 100644 index 5b88fa0..0000000 --- a/venv/Lib/site-packages/aiohttp/payload.py +++ /dev/null @@ -1,1120 +0,0 @@ -import asyncio -import enum -import io -import json -import mimetypes -import os -import sys -import warnings -from abc import ABC, abstractmethod -from collections.abc import Iterable -from itertools import chain -from typing import ( - IO, - TYPE_CHECKING, - Any, - Dict, - Final, - List, - Optional, - Set, - TextIO, - Tuple, - Type, - Union, -) - -from multidict import CIMultiDict - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import ( - _SENTINEL, - content_disposition_header, - guess_filename, - parse_mimetype, - sentinel, -) -from .streams import StreamReader -from .typedefs import JSONEncoder, _CIMultiDict - -__all__ = ( - "PAYLOAD_REGISTRY", - "get_payload", - "payload_type", - "Payload", - "BytesPayload", - "StringPayload", - "IOBasePayload", - "BytesIOPayload", - "BufferedReaderPayload", - "TextIOPayload", - "StringIOPayload", - "JsonPayload", - "AsyncIterablePayload", -) - -TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB -READ_SIZE: Final[int] = 2**16 # 64 KB -_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() - - -class LookupError(Exception): - """Raised when no payload factory is found for the given data type.""" - - -class Order(str, enum.Enum): - normal = "normal" - try_first = "try_first" - try_last = "try_last" - - -def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": - return PAYLOAD_REGISTRY.get(data, *args, **kwargs) - - -def register_payload( - factory: Type["Payload"], type: Any, *, order: Order = Order.normal -) -> None: - PAYLOAD_REGISTRY.register(factory, type, order=order) - - -class payload_type: - def __init__(self, type: Any, *, order: Order = Order.normal) -> None: - self.type = type - self.order = order - - def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: - register_payload(factory, self.type, order=self.order) - return factory - - -PayloadType = Type["Payload"] -_PayloadRegistryItem = Tuple[PayloadType, Any] - - -class PayloadRegistry: - """Payload registry. - - note: we need zope.interface for more efficient adapter search - """ - - __slots__ = ("_first", "_normal", "_last", "_normal_lookup") - - def __init__(self) -> None: - self._first: List[_PayloadRegistryItem] = [] - self._normal: List[_PayloadRegistryItem] = [] - self._last: List[_PayloadRegistryItem] = [] - self._normal_lookup: Dict[Any, PayloadType] = {} - - def get( - self, - data: Any, - *args: Any, - _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, - **kwargs: Any, - ) -> "Payload": - if self._first: - for factory, type_ in self._first: - if isinstance(data, type_): - return factory(data, *args, **kwargs) - # Try the fast lookup first - if lookup_factory := self._normal_lookup.get(type(data)): - return lookup_factory(data, *args, **kwargs) - # Bail early if its already a Payload - if isinstance(data, Payload): - return data - # Fallback to the slower linear search - for factory, type_ in _CHAIN(self._normal, self._last): - if isinstance(data, type_): - return factory(data, *args, **kwargs) - raise LookupError() - - def register( - self, factory: PayloadType, type: Any, *, order: Order = Order.normal - ) -> None: - if order is Order.try_first: - self._first.append((factory, type)) - elif order is Order.normal: - self._normal.append((factory, type)) - if isinstance(type, Iterable): - for t in type: - self._normal_lookup[t] = factory - else: - self._normal_lookup[type] = factory - elif order is Order.try_last: - self._last.append((factory, type)) - else: - raise ValueError(f"Unsupported order {order!r}") - - -class Payload(ABC): - - _default_content_type: str = "application/octet-stream" - _size: Optional[int] = None - _consumed: bool = False # Default: payload has not been consumed yet - _autoclose: bool = False # Default: assume resource needs explicit closing - - def __init__( - self, - value: Any, - headers: Optional[ - Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] - ] = None, - content_type: Union[str, None, _SENTINEL] = sentinel, - filename: Optional[str] = None, - encoding: Optional[str] = None, - **kwargs: Any, - ) -> None: - self._encoding = encoding - self._filename = filename - self._headers: _CIMultiDict = CIMultiDict() - self._value = value - if content_type is not sentinel and content_type is not None: - self._headers[hdrs.CONTENT_TYPE] = content_type - elif self._filename is not None: - if sys.version_info >= (3, 13): - guesser = mimetypes.guess_file_type - else: - guesser = mimetypes.guess_type - content_type = guesser(self._filename)[0] - if content_type is None: - content_type = self._default_content_type - self._headers[hdrs.CONTENT_TYPE] = content_type - else: - self._headers[hdrs.CONTENT_TYPE] = self._default_content_type - if headers: - self._headers.update(headers) - - @property - def size(self) -> Optional[int]: - """Size of the payload in bytes. - - Returns the number of bytes that will be transmitted when the payload - is written. For string payloads, this is the size after encoding to bytes, - not the length of the string. - """ - return self._size - - @property - def filename(self) -> Optional[str]: - """Filename of the payload.""" - return self._filename - - @property - def headers(self) -> _CIMultiDict: - """Custom item headers""" - return self._headers - - @property - def _binary_headers(self) -> bytes: - return ( - "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( - "utf-8" - ) - + b"\r\n" - ) - - @property - def encoding(self) -> Optional[str]: - """Payload encoding""" - return self._encoding - - @property - def content_type(self) -> str: - """Content type""" - return self._headers[hdrs.CONTENT_TYPE] - - @property - def consumed(self) -> bool: - """Whether the payload has been consumed and cannot be reused.""" - return self._consumed - - @property - def autoclose(self) -> bool: - """ - Whether the payload can close itself automatically. - - Returns True if the payload has no file handles or resources that need - explicit closing. If False, callers must await close() to release resources. - """ - return self._autoclose - - def set_content_disposition( - self, - disptype: str, - quote_fields: bool = True, - _charset: str = "utf-8", - **params: Any, - ) -> None: - """Sets ``Content-Disposition`` header.""" - self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( - disptype, quote_fields=quote_fields, _charset=_charset, **params - ) - - @abstractmethod - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """ - Return string representation of the value. - - This is named decode() to allow compatibility with bytes objects. - """ - - @abstractmethod - async def write(self, writer: AbstractStreamWriter) -> None: - """ - Write payload to the writer stream. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - - This is a legacy method that writes the entire payload without length constraints. - - Important: - For new implementations, use write_with_length() instead of this method. - This method is maintained for backwards compatibility and will eventually - delegate to write_with_length(writer, None) in all implementations. - - All payload subclasses must override this method for backwards compatibility, - but new code should use write_with_length for more flexibility and control. - - """ - - # write_with_length is new in aiohttp 3.12 - # it should be overridden by subclasses - async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] - ) -> None: - """ - Write payload with a specific content length constraint. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - content_length: Maximum number of bytes to write (None for unlimited) - - This method allows writing payload content with a specific length constraint, - which is particularly useful for HTTP responses with Content-Length header. - - Note: - This is the base implementation that provides backwards compatibility - for subclasses that don't override this method. Specific payload types - should override this method to implement proper length-constrained writing. - - """ - # Backwards compatibility for subclasses that don't override this method - # and for the default implementation - await self.write(writer) - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """ - Return bytes representation of the value. - - This is a convenience method that calls decode() and encodes the result - to bytes using the specified encoding. - """ - # Use instance encoding if available, otherwise use parameter - actual_encoding = self._encoding or encoding - return self.decode(actual_encoding, errors).encode(actual_encoding) - - def _close(self) -> None: - """ - Async safe synchronous close operations for backwards compatibility. - - This method exists only for backwards compatibility with code that - needs to clean up payloads synchronously. In the future, we will - drop this method and only support the async close() method. - - WARNING: This method must be safe to call from within the event loop - without blocking. Subclasses should not perform any blocking I/O here. - - WARNING: This method must be called from within an event loop for - certain payload types (e.g., IOBasePayload). Calling it outside an - event loop may raise RuntimeError. - """ - # This is a no-op by default, but subclasses can override it - # for non-blocking cleanup operations. - - async def close(self) -> None: - """ - Close the payload if it holds any resources. - - IMPORTANT: This method must not await anything that might not finish - immediately, as it may be called during cleanup/cancellation. Schedule - any long-running operations without awaiting them. - - In the future, this will be the only close method supported. - """ - self._close() - - -class BytesPayload(Payload): - _value: bytes - # _consumed = False (inherited) - Bytes are immutable and can be reused - _autoclose = True # No file handle, just bytes in memory - - def __init__( - self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any - ) -> None: - if "content_type" not in kwargs: - kwargs["content_type"] = "application/octet-stream" - - super().__init__(value, *args, **kwargs) - - if isinstance(value, memoryview): - self._size = value.nbytes - elif isinstance(value, (bytes, bytearray)): - self._size = len(value) - else: - raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") - - if self._size > TOO_LARGE_BYTES_BODY: - kwargs = {"source": self} - warnings.warn( - "Sending a large body directly with raw bytes might" - " lock the event loop. You should probably pass an " - "io.BytesIO object instead", - ResourceWarning, - **kwargs, - ) - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - return self._value.decode(encoding, errors) - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """ - Return bytes representation of the value. - - This method returns the raw bytes content of the payload. - It is equivalent to accessing the _value attribute directly. - """ - return self._value - - async def write(self, writer: AbstractStreamWriter) -> None: - """ - Write the entire bytes payload to the writer stream. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - - This method writes the entire bytes content without any length constraint. - - Note: - For new implementations that need length control, use write_with_length(). - This method is maintained for backwards compatibility and is equivalent - to write_with_length(writer, None). - - """ - await writer.write(self._value) - - async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] - ) -> None: - """ - Write bytes payload with a specific content length constraint. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - content_length: Maximum number of bytes to write (None for unlimited) - - This method writes either the entire byte sequence or a slice of it - up to the specified content_length. For BytesPayload, this operation - is performed efficiently using array slicing. - - """ - if content_length is not None: - await writer.write(self._value[:content_length]) - else: - await writer.write(self._value) - - -class StringPayload(BytesPayload): - def __init__( - self, - value: str, - *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, - **kwargs: Any, - ) -> None: - - if encoding is None: - if content_type is None: - real_encoding = "utf-8" - content_type = "text/plain; charset=utf-8" - else: - mimetype = parse_mimetype(content_type) - real_encoding = mimetype.parameters.get("charset", "utf-8") - else: - if content_type is None: - content_type = "text/plain; charset=%s" % encoding - real_encoding = encoding - - super().__init__( - value.encode(real_encoding), - encoding=real_encoding, - content_type=content_type, - *args, - **kwargs, - ) - - -class StringIOPayload(StringPayload): - def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: - super().__init__(value.read(), *args, **kwargs) - - -class IOBasePayload(Payload): - _value: io.IOBase - # _consumed = False (inherited) - File can be re-read from the same position - _start_position: Optional[int] = None - # _autoclose = False (inherited) - Has file handle that needs explicit closing - - def __init__( - self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any - ) -> None: - if "filename" not in kwargs: - kwargs["filename"] = guess_filename(value) - - super().__init__(value, *args, **kwargs) - - if self._filename is not None and disposition is not None: - if hdrs.CONTENT_DISPOSITION not in self.headers: - self.set_content_disposition(disposition, filename=self._filename) - - def _set_or_restore_start_position(self) -> None: - """Set or restore the start position of the file-like object.""" - if self._start_position is None: - try: - self._start_position = self._value.tell() - except (OSError, AttributeError): - self._consumed = True # Cannot seek, mark as consumed - return - try: - self._value.seek(self._start_position) - except (OSError, AttributeError): - # Failed to seek back - mark as consumed since we've already read - self._consumed = True - - def _read_and_available_len( - self, remaining_content_len: Optional[int] - ) -> Tuple[Optional[int], bytes]: - """ - Read the file-like object and return both its total size and the first chunk. - - Args: - remaining_content_len: Optional limit on how many bytes to read in this operation. - If None, READ_SIZE will be used as the default chunk size. - - Returns: - A tuple containing: - - The total size of the remaining unread content (None if size cannot be determined) - - The first chunk of bytes read from the file object - - This method is optimized to perform both size calculation and initial read - in a single operation, which is executed in a single executor job to minimize - context switches and file operations when streaming content. - - """ - self._set_or_restore_start_position() - size = self.size # Call size only once since it does I/O - return size, self._value.read( - min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE) - ) - - def _read(self, remaining_content_len: Optional[int]) -> bytes: - """ - Read a chunk of data from the file-like object. - - Args: - remaining_content_len: Optional maximum number of bytes to read. - If None, READ_SIZE will be used as the default chunk size. - - Returns: - A chunk of bytes read from the file object, respecting the - remaining_content_len limit if specified. - - This method is used for subsequent reads during streaming after - the initial _read_and_available_len call has been made. - - """ - return self._value.read(remaining_content_len or READ_SIZE) # type: ignore[no-any-return] - - @property - def size(self) -> Optional[int]: - """ - Size of the payload in bytes. - - Returns the total size of the payload content from the initial position. - This ensures consistent Content-Length for requests, including 307/308 redirects - where the same payload instance is reused. - - Returns None if the size cannot be determined (e.g., for unseekable streams). - """ - try: - # Store the start position on first access. - # This is critical when the same payload instance is reused (e.g., 307/308 - # redirects). Without storing the initial position, after the payload is - # read once, the file position would be at EOF, which would cause the - # size calculation to return 0 (file_size - EOF position). - # By storing the start position, we ensure the size calculation always - # returns the correct total size for any subsequent use. - if self._start_position is None: - self._start_position = self._value.tell() - - # Return the total size from the start position - # This ensures Content-Length is correct even after reading - return os.fstat(self._value.fileno()).st_size - self._start_position - except (AttributeError, OSError): - return None - - async def write(self, writer: AbstractStreamWriter) -> None: - """ - Write the entire file-like payload to the writer stream. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - - This method writes the entire file content without any length constraint. - It delegates to write_with_length() with no length limit for implementation - consistency. - - Note: - For new implementations that need length control, use write_with_length() directly. - This method is maintained for backwards compatibility with existing code. - - """ - await self.write_with_length(writer, None) - - async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] - ) -> None: - """ - Write file-like payload with a specific content length constraint. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - content_length: Maximum number of bytes to write (None for unlimited) - - This method implements optimized streaming of file content with length constraints: - - 1. File reading is performed in a thread pool to avoid blocking the event loop - 2. Content is read and written in chunks to maintain memory efficiency - 3. Writing stops when either: - - All available file content has been written (when size is known) - - The specified content_length has been reached - 4. File resources are properly closed even if the operation is cancelled - - The implementation carefully handles both known-size and unknown-size payloads, - as well as constrained and unconstrained content lengths. - - """ - loop = asyncio.get_running_loop() - total_written_len = 0 - remaining_content_len = content_length - - # Get initial data and available length - available_len, chunk = await loop.run_in_executor( - None, self._read_and_available_len, remaining_content_len - ) - # Process data chunks until done - while chunk: - chunk_len = len(chunk) - - # Write data with or without length constraint - if remaining_content_len is None: - await writer.write(chunk) - else: - await writer.write(chunk[:remaining_content_len]) - remaining_content_len -= chunk_len - - total_written_len += chunk_len - - # Check if we're done writing - if self._should_stop_writing( - available_len, total_written_len, remaining_content_len - ): - return - - # Read next chunk - chunk = await loop.run_in_executor( - None, - self._read, - ( - min(READ_SIZE, remaining_content_len) - if remaining_content_len is not None - else READ_SIZE - ), - ) - - def _should_stop_writing( - self, - available_len: Optional[int], - total_written_len: int, - remaining_content_len: Optional[int], - ) -> bool: - """ - Determine if we should stop writing data. - - Args: - available_len: Known size of the payload if available (None if unknown) - total_written_len: Number of bytes already written - remaining_content_len: Remaining bytes to be written for content-length limited responses - - Returns: - True if we should stop writing data, based on either: - - Having written all available data (when size is known) - - Having written all requested content (when content-length is specified) - - """ - return (available_len is not None and total_written_len >= available_len) or ( - remaining_content_len is not None and remaining_content_len <= 0 - ) - - def _close(self) -> None: - """ - Async safe synchronous close operations for backwards compatibility. - - This method exists only for backwards - compatibility. Use the async close() method instead. - - WARNING: This method MUST be called from within an event loop. - Calling it outside an event loop will raise RuntimeError. - """ - # Skip if already consumed - if self._consumed: - return - self._consumed = True # Mark as consumed to prevent further writes - # Schedule file closing without awaiting to prevent cancellation issues - loop = asyncio.get_running_loop() - close_future = loop.run_in_executor(None, self._value.close) - # Hold a strong reference to the future to prevent it from being - # garbage collected before it completes. - _CLOSE_FUTURES.add(close_future) - close_future.add_done_callback(_CLOSE_FUTURES.remove) - - async def close(self) -> None: - """ - Close the payload if it holds any resources. - - IMPORTANT: This method must not await anything that might not finish - immediately, as it may be called during cleanup/cancellation. Schedule - any long-running operations without awaiting them. - """ - self._close() - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """ - Return string representation of the value. - - WARNING: This method does blocking I/O and should not be called in the event loop. - """ - return self._read_all().decode(encoding, errors) - - def _read_all(self) -> bytes: - """Read the entire file-like object and return its content as bytes.""" - self._set_or_restore_start_position() - # Use readlines() to ensure we get all content - return b"".join(self._value.readlines()) - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """ - Return bytes representation of the value. - - This method reads the entire file content and returns it as bytes. - It is equivalent to reading the file-like object directly. - The file reading is performed in an executor to avoid blocking the event loop. - """ - loop = asyncio.get_running_loop() - return await loop.run_in_executor(None, self._read_all) - - -class TextIOPayload(IOBasePayload): - _value: io.TextIOBase - # _autoclose = False (inherited) - Has text file handle that needs explicit closing - - def __init__( - self, - value: TextIO, - *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, - **kwargs: Any, - ) -> None: - - if encoding is None: - if content_type is None: - encoding = "utf-8" - content_type = "text/plain; charset=utf-8" - else: - mimetype = parse_mimetype(content_type) - encoding = mimetype.parameters.get("charset", "utf-8") - else: - if content_type is None: - content_type = "text/plain; charset=%s" % encoding - - super().__init__( - value, - content_type=content_type, - encoding=encoding, - *args, - **kwargs, - ) - - def _read_and_available_len( - self, remaining_content_len: Optional[int] - ) -> Tuple[Optional[int], bytes]: - """ - Read the text file-like object and return both its total size and the first chunk. - - Args: - remaining_content_len: Optional limit on how many bytes to read in this operation. - If None, READ_SIZE will be used as the default chunk size. - - Returns: - A tuple containing: - - The total size of the remaining unread content (None if size cannot be determined) - - The first chunk of bytes read from the file object, encoded using the payload's encoding - - This method is optimized to perform both size calculation and initial read - in a single operation, which is executed in a single executor job to minimize - context switches and file operations when streaming content. - - Note: - TextIOPayload handles encoding of the text content before writing it - to the stream. If no encoding is specified, UTF-8 is used as the default. - - """ - self._set_or_restore_start_position() - size = self.size - chunk = self._value.read( - min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE) - ) - return size, chunk.encode(self._encoding) if self._encoding else chunk.encode() - - def _read(self, remaining_content_len: Optional[int]) -> bytes: - """ - Read a chunk of data from the text file-like object. - - Args: - remaining_content_len: Optional maximum number of bytes to read. - If None, READ_SIZE will be used as the default chunk size. - - Returns: - A chunk of bytes read from the file object and encoded using the payload's - encoding. The data is automatically converted from text to bytes. - - This method is used for subsequent reads during streaming after - the initial _read_and_available_len call has been made. It properly - handles text encoding, converting the text content to bytes using - the specified encoding (or UTF-8 if none was provided). - - """ - chunk = self._value.read(remaining_content_len or READ_SIZE) - return chunk.encode(self._encoding) if self._encoding else chunk.encode() - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """ - Return string representation of the value. - - WARNING: This method does blocking I/O and should not be called in the event loop. - """ - self._set_or_restore_start_position() - return self._value.read() - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """ - Return bytes representation of the value. - - This method reads the entire text file content and returns it as bytes. - It encodes the text content using the specified encoding. - The file reading is performed in an executor to avoid blocking the event loop. - """ - loop = asyncio.get_running_loop() - - # Use instance encoding if available, otherwise use parameter - actual_encoding = self._encoding or encoding - - def _read_and_encode() -> bytes: - self._set_or_restore_start_position() - # TextIO read() always returns the full content - return self._value.read().encode(actual_encoding, errors) - - return await loop.run_in_executor(None, _read_and_encode) - - -class BytesIOPayload(IOBasePayload): - _value: io.BytesIO - _size: int # Always initialized in __init__ - _autoclose = True # BytesIO is in-memory, safe to auto-close - - def __init__(self, value: io.BytesIO, *args: Any, **kwargs: Any) -> None: - super().__init__(value, *args, **kwargs) - # Calculate size once during initialization - self._size = len(self._value.getbuffer()) - self._value.tell() - - @property - def size(self) -> int: - """Size of the payload in bytes. - - Returns the number of bytes in the BytesIO buffer that will be transmitted. - This is calculated once during initialization for efficiency. - """ - return self._size - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - self._set_or_restore_start_position() - return self._value.read().decode(encoding, errors) - - async def write(self, writer: AbstractStreamWriter) -> None: - return await self.write_with_length(writer, None) - - async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] - ) -> None: - """ - Write BytesIO payload with a specific content length constraint. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - content_length: Maximum number of bytes to write (None for unlimited) - - This implementation is specifically optimized for BytesIO objects: - - 1. Reads content in chunks to maintain memory efficiency - 2. Yields control back to the event loop periodically to prevent blocking - when dealing with large BytesIO objects - 3. Respects content_length constraints when specified - 4. Properly cleans up by closing the BytesIO object when done or on error - - The periodic yielding to the event loop is important for maintaining - responsiveness when processing large in-memory buffers. - - """ - self._set_or_restore_start_position() - loop_count = 0 - remaining_bytes = content_length - while chunk := self._value.read(READ_SIZE): - if loop_count > 0: - # Avoid blocking the event loop - # if they pass a large BytesIO object - # and we are not in the first iteration - # of the loop - await asyncio.sleep(0) - if remaining_bytes is None: - await writer.write(chunk) - else: - await writer.write(chunk[:remaining_bytes]) - remaining_bytes -= len(chunk) - if remaining_bytes <= 0: - return - loop_count += 1 - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """ - Return bytes representation of the value. - - This method reads the entire BytesIO content and returns it as bytes. - It is equivalent to accessing the _value attribute directly. - """ - self._set_or_restore_start_position() - return self._value.read() - - async def close(self) -> None: - """ - Close the BytesIO payload. - - This does nothing since BytesIO is in-memory and does not require explicit closing. - """ - - -class BufferedReaderPayload(IOBasePayload): - _value: io.BufferedIOBase - # _autoclose = False (inherited) - Has buffered file handle that needs explicit closing - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - self._set_or_restore_start_position() - return self._value.read().decode(encoding, errors) - - -class JsonPayload(BytesPayload): - def __init__( - self, - value: Any, - encoding: str = "utf-8", - content_type: str = "application/json", - dumps: JSONEncoder = json.dumps, - *args: Any, - **kwargs: Any, - ) -> None: - - super().__init__( - dumps(value).encode(encoding), - content_type=content_type, - encoding=encoding, - *args, - **kwargs, - ) - - -if TYPE_CHECKING: - from typing import AsyncIterable, AsyncIterator - - _AsyncIterator = AsyncIterator[bytes] - _AsyncIterable = AsyncIterable[bytes] -else: - from collections.abc import AsyncIterable, AsyncIterator - - _AsyncIterator = AsyncIterator - _AsyncIterable = AsyncIterable - - -class AsyncIterablePayload(Payload): - - _iter: Optional[_AsyncIterator] = None - _value: _AsyncIterable - _cached_chunks: Optional[List[bytes]] = None - # _consumed stays False to allow reuse with cached content - _autoclose = True # Iterator doesn't need explicit closing - - def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: - if not isinstance(value, AsyncIterable): - raise TypeError( - "value argument must support " - "collections.abc.AsyncIterable interface, " - "got {!r}".format(type(value)) - ) - - if "content_type" not in kwargs: - kwargs["content_type"] = "application/octet-stream" - - super().__init__(value, *args, **kwargs) - - self._iter = value.__aiter__() - - async def write(self, writer: AbstractStreamWriter) -> None: - """ - Write the entire async iterable payload to the writer stream. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - - This method iterates through the async iterable and writes each chunk - to the writer without any length constraint. - - Note: - For new implementations that need length control, use write_with_length() directly. - This method is maintained for backwards compatibility with existing code. - - """ - await self.write_with_length(writer, None) - - async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] - ) -> None: - """ - Write async iterable payload with a specific content length constraint. - - Args: - writer: An AbstractStreamWriter instance that handles the actual writing - content_length: Maximum number of bytes to write (None for unlimited) - - This implementation handles streaming of async iterable content with length constraints: - - 1. If cached chunks are available, writes from them - 2. Otherwise iterates through the async iterable one chunk at a time - 3. Respects content_length constraints when specified - 4. Does NOT generate cache - that's done by as_bytes() - - """ - # If we have cached chunks, use them - if self._cached_chunks is not None: - remaining_bytes = content_length - for chunk in self._cached_chunks: - if remaining_bytes is None: - await writer.write(chunk) - elif remaining_bytes > 0: - await writer.write(chunk[:remaining_bytes]) - remaining_bytes -= len(chunk) - else: - break - return - - # If iterator is exhausted and we don't have cached chunks, nothing to write - if self._iter is None: - return - - # Stream from the iterator - remaining_bytes = content_length - - try: - while True: - if sys.version_info >= (3, 10): - chunk = await anext(self._iter) - else: - chunk = await self._iter.__anext__() - if remaining_bytes is None: - await writer.write(chunk) - # If we have a content length limit - elif remaining_bytes > 0: - await writer.write(chunk[:remaining_bytes]) - remaining_bytes -= len(chunk) - # We still want to exhaust the iterator even - # if we have reached the content length limit - # since the file handle may not get closed by - # the iterator if we don't do this - except StopAsyncIteration: - # Iterator is exhausted - self._iter = None - self._consumed = True # Mark as consumed when streamed without caching - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """Decode the payload content as a string if cached chunks are available.""" - if self._cached_chunks is not None: - return b"".join(self._cached_chunks).decode(encoding, errors) - raise TypeError("Unable to decode - content not cached. Call as_bytes() first.") - - async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """ - Return bytes representation of the value. - - This method reads the entire async iterable content and returns it as bytes. - It generates and caches the chunks for future reuse. - """ - # If we have cached chunks, return them joined - if self._cached_chunks is not None: - return b"".join(self._cached_chunks) - - # If iterator is exhausted and no cache, return empty - if self._iter is None: - return b"" - - # Read all chunks and cache them - chunks: List[bytes] = [] - async for chunk in self._iter: - chunks.append(chunk) - - # Iterator is exhausted, cache the chunks - self._iter = None - self._cached_chunks = chunks - # Keep _consumed as False to allow reuse with cached chunks - - return b"".join(chunks) - - -class StreamReaderPayload(AsyncIterablePayload): - def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: - super().__init__(value.iter_any(), *args, **kwargs) - - -PAYLOAD_REGISTRY = PayloadRegistry() -PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) -PAYLOAD_REGISTRY.register(StringPayload, str) -PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) -PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) -PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) -PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) -PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) -PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) -# try_last for giving a chance to more specialized async interables like -# multipart.BodyPartReaderPayload override the default -PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/venv/Lib/site-packages/aiohttp/payload_streamer.py b/venv/Lib/site-packages/aiohttp/payload_streamer.py deleted file mode 100644 index 831fdc0..0000000 --- a/venv/Lib/site-packages/aiohttp/payload_streamer.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Payload implementation for coroutines as data provider. - -As a simple case, you can upload data from file:: - - @aiohttp.streamer - async def file_sender(writer, file_name=None): - with open(file_name, 'rb') as f: - chunk = f.read(2**16) - while chunk: - await writer.write(chunk) - - chunk = f.read(2**16) - -Then you can use `file_sender` like this: - - async with session.post('http://httpbin.org/post', - data=file_sender(file_name='huge_file')) as resp: - print(await resp.text()) - -..note:: Coroutine must accept `writer` as first argument - -""" - -import types -import warnings -from typing import Any, Awaitable, Callable, Dict, Tuple - -from .abc import AbstractStreamWriter -from .payload import Payload, payload_type - -__all__ = ("streamer",) - - -class _stream_wrapper: - def __init__( - self, - coro: Callable[..., Awaitable[None]], - args: Tuple[Any, ...], - kwargs: Dict[str, Any], - ) -> None: - self.coro = types.coroutine(coro) - self.args = args - self.kwargs = kwargs - - async def __call__(self, writer: AbstractStreamWriter) -> None: - await self.coro(writer, *self.args, **self.kwargs) - - -class streamer: - def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: - warnings.warn( - "@streamer is deprecated, use async generators instead", - DeprecationWarning, - stacklevel=2, - ) - self.coro = coro - - def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: - return _stream_wrapper(self.coro, args, kwargs) - - -@payload_type(_stream_wrapper) -class StreamWrapperPayload(Payload): - async def write(self, writer: AbstractStreamWriter) -> None: - await self._value(writer) - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - raise TypeError("Unable to decode.") - - -@payload_type(streamer) -class StreamPayload(StreamWrapperPayload): - def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: - super().__init__(value(), *args, **kwargs) - - async def write(self, writer: AbstractStreamWriter) -> None: - await self._value(writer) diff --git a/venv/Lib/site-packages/aiohttp/py.typed b/venv/Lib/site-packages/aiohttp/py.typed deleted file mode 100644 index f5642f7..0000000 --- a/venv/Lib/site-packages/aiohttp/py.typed +++ /dev/null @@ -1 +0,0 @@ -Marker diff --git a/venv/Lib/site-packages/aiohttp/pytest_plugin.py b/venv/Lib/site-packages/aiohttp/pytest_plugin.py deleted file mode 100644 index 7d59fe8..0000000 --- a/venv/Lib/site-packages/aiohttp/pytest_plugin.py +++ /dev/null @@ -1,444 +0,0 @@ -import asyncio -import contextlib -import inspect -import warnings -from typing import ( - Any, - Awaitable, - Callable, - Dict, - Iterator, - Optional, - Protocol, - Union, - overload, -) - -import pytest - -from .test_utils import ( - BaseTestServer, - RawTestServer, - TestClient, - TestServer, - loop_context, - setup_test_loop, - teardown_test_loop, - unused_port as _unused_port, -) -from .web import Application, BaseRequest, Request -from .web_protocol import _RequestHandler - -try: - import uvloop -except ImportError: # pragma: no cover - uvloop = None # type: ignore[assignment] - - -class AiohttpClient(Protocol): - @overload - async def __call__( - self, - __param: Application, - *, - server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> TestClient[Request, Application]: ... - @overload - async def __call__( - self, - __param: BaseTestServer, - *, - server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> TestClient[BaseRequest, None]: ... - - -class AiohttpServer(Protocol): - def __call__( - self, app: Application, *, port: Optional[int] = None, **kwargs: Any - ) -> Awaitable[TestServer]: ... - - -class AiohttpRawServer(Protocol): - def __call__( - self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any - ) -> Awaitable[RawTestServer]: ... - - -def pytest_addoption(parser): # type: ignore[no-untyped-def] - parser.addoption( - "--aiohttp-fast", - action="store_true", - default=False, - help="run tests faster by disabling extra checks", - ) - parser.addoption( - "--aiohttp-loop", - action="store", - default="pyloop", - help="run tests with specific loop: pyloop, uvloop or all", - ) - parser.addoption( - "--aiohttp-enable-loop-debug", - action="store_true", - default=False, - help="enable event loop debug mode", - ) - - -def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] - """Set up pytest fixture. - - Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. - """ - func = fixturedef.func - - if inspect.isasyncgenfunction(func): - # async generator fixture - is_async_gen = True - elif inspect.iscoroutinefunction(func): - # regular async fixture - is_async_gen = False - else: - # not an async fixture, nothing to do - return - - strip_request = False - if "request" not in fixturedef.argnames: - fixturedef.argnames += ("request",) - strip_request = True - - def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] - request = kwargs["request"] - if strip_request: - del kwargs["request"] - - # if neither the fixture nor the test use the 'loop' fixture, - # 'getfixturevalue' will fail because the test is not parameterized - # (this can be removed someday if 'loop' is no longer parameterized) - if "loop" not in request.fixturenames: - raise Exception( - "Asynchronous fixtures must depend on the 'loop' fixture or " - "be used in tests depending from it." - ) - - _loop = request.getfixturevalue("loop") - - if is_async_gen: - # for async generators, we need to advance the generator once, - # then advance it again in a finalizer - gen = func(*args, **kwargs) - - def finalizer(): # type: ignore[no-untyped-def] - try: - return _loop.run_until_complete(gen.__anext__()) - except StopAsyncIteration: - pass - - request.addfinalizer(finalizer) - return _loop.run_until_complete(gen.__anext__()) - else: - return _loop.run_until_complete(func(*args, **kwargs)) - - fixturedef.func = wrapper - - -@pytest.fixture -def fast(request): # type: ignore[no-untyped-def] - """--fast config option""" - return request.config.getoption("--aiohttp-fast") - - -@pytest.fixture -def loop_debug(request): # type: ignore[no-untyped-def] - """--enable-loop-debug config option""" - return request.config.getoption("--aiohttp-enable-loop-debug") - - -@contextlib.contextmanager -def _runtime_warning_context(): # type: ignore[no-untyped-def] - """Context manager which checks for RuntimeWarnings. - - This exists specifically to - avoid "coroutine 'X' was never awaited" warnings being missed. - - If RuntimeWarnings occur in the context a RuntimeError is raised. - """ - with warnings.catch_warnings(record=True) as _warnings: - yield - rw = [ - "{w.filename}:{w.lineno}:{w.message}".format(w=w) - for w in _warnings - if w.category == RuntimeWarning - ] - if rw: - raise RuntimeError( - "{} Runtime Warning{},\n{}".format( - len(rw), "" if len(rw) == 1 else "s", "\n".join(rw) - ) - ) - - -@contextlib.contextmanager -def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] - """Passthrough loop context. - - Sets up and tears down a loop unless one is passed in via the loop - argument when it's passed straight through. - """ - if loop: - # loop already exists, pass it straight through - yield loop - else: - # this shadows loop_context's standard behavior - loop = setup_test_loop() - yield loop - teardown_test_loop(loop, fast=fast) - - -def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] - """Fix pytest collecting for coroutines.""" - if collector.funcnamefilter(name) and inspect.iscoroutinefunction(obj): - return list(collector._genfunctions(name, obj)) - - -def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] - """Run coroutines in an event loop instead of a normal function call.""" - fast = pyfuncitem.config.getoption("--aiohttp-fast") - if inspect.iscoroutinefunction(pyfuncitem.function): - existing_loop = ( - pyfuncitem.funcargs.get("proactor_loop") - or pyfuncitem.funcargs.get("selector_loop") - or pyfuncitem.funcargs.get("uvloop_loop") - or pyfuncitem.funcargs.get("loop", None) - ) - - with _runtime_warning_context(): - with _passthrough_loop_context(existing_loop, fast=fast) as _loop: - testargs = { - arg: pyfuncitem.funcargs[arg] - for arg in pyfuncitem._fixtureinfo.argnames - } - _loop.run_until_complete(pyfuncitem.obj(**testargs)) - - return True - - -def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] - if "loop_factory" not in metafunc.fixturenames: - return - - loops = metafunc.config.option.aiohttp_loop - avail_factories: dict[str, Callable[[], asyncio.AbstractEventLoop]] - avail_factories = {"pyloop": asyncio.new_event_loop} - - if uvloop is not None: # pragma: no cover - avail_factories["uvloop"] = uvloop.new_event_loop - - if loops == "all": - loops = "pyloop,uvloop?" - - factories = {} # type: ignore[var-annotated] - for name in loops.split(","): - required = not name.endswith("?") - name = name.strip(" ?") - if name not in avail_factories: # pragma: no cover - if required: - raise ValueError( - "Unknown loop '%s', available loops: %s" - % (name, list(factories.keys())) - ) - else: - continue - factories[name] = avail_factories[name] - metafunc.parametrize( - "loop_factory", list(factories.values()), ids=list(factories.keys()) - ) - - -@pytest.fixture -def loop( - loop_factory: Callable[[], asyncio.AbstractEventLoop], - fast: bool, - loop_debug: bool, -) -> Iterator[asyncio.AbstractEventLoop]: - """Return an instance of the event loop.""" - with loop_context(loop_factory, fast=fast) as _loop: - if loop_debug: - _loop.set_debug(True) # pragma: no cover - asyncio.set_event_loop(_loop) - yield _loop - - -@pytest.fixture -def proactor_loop() -> Iterator[asyncio.AbstractEventLoop]: - factory = asyncio.ProactorEventLoop # type: ignore[attr-defined] - - with loop_context(factory) as _loop: - asyncio.set_event_loop(_loop) - yield _loop - - -@pytest.fixture -def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]: - warnings.warn( - "Deprecated, use aiohttp_unused_port fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_unused_port - - -@pytest.fixture -def aiohttp_unused_port() -> Callable[[], int]: - """Return a port that is unused on the current host.""" - return _unused_port - - -@pytest.fixture -def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: - """Factory to create a TestServer instance, given an app. - - aiohttp_server(app, **kwargs) - """ - servers = [] - - async def go( - app: Application, - *, - host: str = "127.0.0.1", - port: Optional[int] = None, - **kwargs: Any, - ) -> TestServer: - server = TestServer(app, host=host, port=port) - await server.start_server(loop=loop, **kwargs) - servers.append(server) - return server - - yield go - - async def finalize() -> None: - while servers: - await servers.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_server fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_server - - -@pytest.fixture -def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]: - """Factory to create a RawTestServer instance, given a web handler. - - aiohttp_raw_server(handler, **kwargs) - """ - servers = [] - - async def go( - handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any - ) -> RawTestServer: - server = RawTestServer(handler, port=port) - await server.start_server(loop=loop, **kwargs) - servers.append(server) - return server - - yield go - - async def finalize() -> None: - while servers: - await servers.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover - aiohttp_raw_server, -): - warnings.warn( - "Deprecated, use aiohttp_raw_server fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_raw_server - - -@pytest.fixture -def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]: - """Factory to create a TestClient instance. - - aiohttp_client(app, **kwargs) - aiohttp_client(server, **kwargs) - aiohttp_client(raw_server, **kwargs) - """ - clients = [] - - @overload - async def go( - __param: Application, - *, - server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> TestClient[Request, Application]: ... - - @overload - async def go( - __param: BaseTestServer, - *, - server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> TestClient[BaseRequest, None]: ... - - async def go( - __param: Union[Application, BaseTestServer], - *args: Any, - server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> TestClient[Any, Any]: - if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] - __param, (Application, BaseTestServer) - ): - __param = __param(loop, *args, **kwargs) - kwargs = {} - else: - assert not args, "args should be empty" - - if isinstance(__param, Application): - server_kwargs = server_kwargs or {} - server = TestServer(__param, loop=loop, **server_kwargs) - client = TestClient(server, loop=loop, **kwargs) - elif isinstance(__param, BaseTestServer): - client = TestClient(__param, loop=loop, **kwargs) - else: - raise ValueError("Unknown argument type: %r" % type(__param)) - - await client.start_server() - clients.append(client) - return client - - yield go - - async def finalize() -> None: - while clients: - await clients.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_client fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_client diff --git a/venv/Lib/site-packages/aiohttp/resolver.py b/venv/Lib/site-packages/aiohttp/resolver.py deleted file mode 100644 index b20e567..0000000 --- a/venv/Lib/site-packages/aiohttp/resolver.py +++ /dev/null @@ -1,274 +0,0 @@ -import asyncio -import socket -import weakref -from typing import Any, Dict, Final, List, Optional, Tuple, Type, Union - -from .abc import AbstractResolver, ResolveResult - -__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") - - -try: - import aiodns - - aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo") -except ImportError: # pragma: no cover - aiodns = None # type: ignore[assignment] - aiodns_default = False - - -_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV -_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV -_AI_ADDRCONFIG = socket.AI_ADDRCONFIG -if hasattr(socket, "AI_MASK"): - _AI_ADDRCONFIG &= socket.AI_MASK - - -class ThreadedResolver(AbstractResolver): - """Threaded resolver. - - Uses an Executor for synchronous getaddrinfo() calls. - concurrent.futures.ThreadPoolExecutor is used by default. - """ - - def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = loop or asyncio.get_running_loop() - - async def resolve( - self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET - ) -> List[ResolveResult]: - infos = await self._loop.getaddrinfo( - host, - port, - type=socket.SOCK_STREAM, - family=family, - flags=_AI_ADDRCONFIG, - ) - - hosts: List[ResolveResult] = [] - for family, _, proto, _, address in infos: - if family == socket.AF_INET6: - if len(address) < 3: - # IPv6 is not supported by Python build, - # or IPv6 is not enabled in the host - continue - if address[3]: - # This is essential for link-local IPv6 addresses. - # LL IPv6 is a VERY rare case. Strictly speaking, we should use - # getnameinfo() unconditionally, but performance makes sense. - resolved_host, _port = await self._loop.getnameinfo( - address, _NAME_SOCKET_FLAGS - ) - port = int(_port) - else: - resolved_host, port = address[:2] - else: # IPv4 - assert family == socket.AF_INET - resolved_host, port = address # type: ignore[misc] - hosts.append( - ResolveResult( - hostname=host, - host=resolved_host, - port=port, - family=family, - proto=proto, - flags=_NUMERIC_SOCKET_FLAGS, - ) - ) - - return hosts - - async def close(self) -> None: - pass - - -class AsyncResolver(AbstractResolver): - """Use the `aiodns` package to make asynchronous DNS lookups""" - - def __init__( - self, - loop: Optional[asyncio.AbstractEventLoop] = None, - *args: Any, - **kwargs: Any, - ) -> None: - if aiodns is None: - raise RuntimeError("Resolver requires aiodns library") - - self._loop = loop or asyncio.get_running_loop() - self._manager: Optional[_DNSResolverManager] = None - # If custom args are provided, create a dedicated resolver instance - # This means each AsyncResolver with custom args gets its own - # aiodns.DNSResolver instance - if args or kwargs: - self._resolver = aiodns.DNSResolver(*args, **kwargs) - return - # Use the shared resolver from the manager for default arguments - self._manager = _DNSResolverManager() - self._resolver = self._manager.get_resolver(self, self._loop) - - if not hasattr(self._resolver, "gethostbyname"): - # aiodns 1.1 is not available, fallback to DNSResolver.query - self.resolve = self._resolve_with_query # type: ignore - - async def resolve( - self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET - ) -> List[ResolveResult]: - try: - resp = await self._resolver.getaddrinfo( - host, - port=port, - type=socket.SOCK_STREAM, - family=family, - flags=_AI_ADDRCONFIG, - ) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(None, msg) from exc - hosts: List[ResolveResult] = [] - for node in resp.nodes: - address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr - family = node.family - if family == socket.AF_INET6: - if len(address) > 3 and address[3]: - # This is essential for link-local IPv6 addresses. - # LL IPv6 is a VERY rare case. Strictly speaking, we should use - # getnameinfo() unconditionally, but performance makes sense. - result = await self._resolver.getnameinfo( - (address[0].decode("ascii"), *address[1:]), - _NAME_SOCKET_FLAGS, - ) - resolved_host = result.node - else: - resolved_host = address[0].decode("ascii") - port = address[1] - else: # IPv4 - assert family == socket.AF_INET - resolved_host = address[0].decode("ascii") - port = address[1] - hosts.append( - ResolveResult( - hostname=host, - host=resolved_host, - port=port, - family=family, - proto=0, - flags=_NUMERIC_SOCKET_FLAGS, - ) - ) - - if not hosts: - raise OSError(None, "DNS lookup failed") - - return hosts - - async def _resolve_with_query( - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: - qtype: Final = "AAAA" if family == socket.AF_INET6 else "A" - - try: - resp = await self._resolver.query(host, qtype) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(None, msg) from exc - - hosts = [] - for rr in resp: - hosts.append( - { - "hostname": host, - "host": rr.host, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST, - } - ) - - if not hosts: - raise OSError(None, "DNS lookup failed") - - return hosts - - async def close(self) -> None: - if self._manager: - # Release the resolver from the manager if using the shared resolver - self._manager.release_resolver(self, self._loop) - self._manager = None # Clear reference to manager - self._resolver = None # type: ignore[assignment] # Clear reference to resolver - return - # Otherwise cancel our dedicated resolver - if self._resolver is not None: - self._resolver.cancel() - self._resolver = None # type: ignore[assignment] # Clear reference - - -class _DNSResolverManager: - """Manager for aiodns.DNSResolver objects. - - This class manages shared aiodns.DNSResolver instances - with no custom arguments across different event loops. - """ - - _instance: Optional["_DNSResolverManager"] = None - - def __new__(cls) -> "_DNSResolverManager": - if cls._instance is None: - cls._instance = super().__new__(cls) - cls._instance._init() - return cls._instance - - def _init(self) -> None: - # Use WeakKeyDictionary to allow event loops to be garbage collected - self._loop_data: weakref.WeakKeyDictionary[ - asyncio.AbstractEventLoop, - tuple["aiodns.DNSResolver", weakref.WeakSet["AsyncResolver"]], - ] = weakref.WeakKeyDictionary() - - def get_resolver( - self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop - ) -> "aiodns.DNSResolver": - """Get or create the shared aiodns.DNSResolver instance for a specific event loop. - - Args: - client: The AsyncResolver instance requesting the resolver. - This is required to track resolver usage. - loop: The event loop to use for the resolver. - """ - # Create a new resolver and client set for this loop if it doesn't exist - if loop not in self._loop_data: - resolver = aiodns.DNSResolver(loop=loop) - client_set: weakref.WeakSet["AsyncResolver"] = weakref.WeakSet() - self._loop_data[loop] = (resolver, client_set) - else: - # Get the existing resolver and client set - resolver, client_set = self._loop_data[loop] - - # Register this client with the loop - client_set.add(client) - return resolver - - def release_resolver( - self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop - ) -> None: - """Release the resolver for an AsyncResolver client when it's closed. - - Args: - client: The AsyncResolver instance to release. - loop: The event loop the resolver was using. - """ - # Remove client from its loop's tracking - current_loop_data = self._loop_data.get(loop) - if current_loop_data is None: - return - resolver, client_set = current_loop_data - client_set.discard(client) - # If no more clients for this loop, cancel and remove its resolver - if not client_set: - if resolver is not None: - resolver.cancel() - del self._loop_data[loop] - - -_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] -DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/venv/Lib/site-packages/aiohttp/streams.py b/venv/Lib/site-packages/aiohttp/streams.py deleted file mode 100644 index 6cc74fc..0000000 --- a/venv/Lib/site-packages/aiohttp/streams.py +++ /dev/null @@ -1,758 +0,0 @@ -import asyncio -import collections -import warnings -from typing import ( - Awaitable, - Callable, - Deque, - Final, - Generic, - List, - Optional, - Tuple, - TypeVar, -) - -from .base_protocol import BaseProtocol -from .helpers import ( - _EXC_SENTINEL, - BaseTimerContext, - TimerNoop, - set_exception, - set_result, -) -from .log import internal_logger - -__all__ = ( - "EMPTY_PAYLOAD", - "EofStream", - "StreamReader", - "DataQueue", -) - -_T = TypeVar("_T") - - -class EofStream(Exception): - """eof stream indication.""" - - -class AsyncStreamIterator(Generic[_T]): - - __slots__ = ("read_func",) - - def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: - self.read_func = read_func - - def __aiter__(self) -> "AsyncStreamIterator[_T]": - return self - - async def __anext__(self) -> _T: - try: - rv = await self.read_func() - except EofStream: - raise StopAsyncIteration - if rv == b"": - raise StopAsyncIteration - return rv - - -class ChunkTupleAsyncStreamIterator: - - __slots__ = ("_stream",) - - def __init__(self, stream: "StreamReader") -> None: - self._stream = stream - - def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": - return self - - async def __anext__(self) -> Tuple[bytes, bool]: - rv = await self._stream.readchunk() - if rv == (b"", False): - raise StopAsyncIteration - return rv - - -class AsyncStreamReaderMixin: - - __slots__ = () - - def __aiter__(self) -> AsyncStreamIterator[bytes]: - return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] - - def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: - """Returns an asynchronous iterator that yields chunks of size n.""" - return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined] - - def iter_any(self) -> AsyncStreamIterator[bytes]: - """Yield all available data as soon as it is received.""" - return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] - - def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: - """Yield chunks of data as they are received by the server. - - The yielded objects are tuples - of (bytes, bool) as returned by the StreamReader.readchunk method. - """ - return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] - - -class StreamReader(AsyncStreamReaderMixin): - """An enhancement of asyncio.StreamReader. - - Supports asynchronous iteration by line, chunk or as available:: - - async for line in reader: - ... - async for chunk in reader.iter_chunked(1024): - ... - async for slice in reader.iter_any(): - ... - - """ - - __slots__ = ( - "_protocol", - "_low_water", - "_high_water", - "_low_water_chunks", - "_high_water_chunks", - "_loop", - "_size", - "_cursor", - "_http_chunk_splits", - "_buffer", - "_buffer_offset", - "_eof", - "_waiter", - "_eof_waiter", - "_exception", - "_timer", - "_eof_callbacks", - "_eof_counter", - "total_bytes", - "total_compressed_bytes", - ) - - def __init__( - self, - protocol: BaseProtocol, - limit: int, - *, - timer: Optional[BaseTimerContext] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - self._protocol = protocol - self._low_water = limit - self._high_water = limit * 2 - if loop is None: - loop = asyncio.get_event_loop() - # Ensure high_water_chunks >= 3 so it's always > low_water_chunks. - self._high_water_chunks = max(3, limit // 4) - # Use max(2, ...) because there's always at least 1 chunk split remaining - # (the current position), so we need low_water >= 2 to allow resume. - self._low_water_chunks = max(2, self._high_water_chunks // 2) - self._loop = loop - self._size = 0 - self._cursor = 0 - self._http_chunk_splits: Optional[Deque[int]] = None - self._buffer: Deque[bytes] = collections.deque() - self._buffer_offset = 0 - self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._eof_waiter: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None - self._timer = TimerNoop() if timer is None else timer - self._eof_callbacks: List[Callable[[], None]] = [] - self._eof_counter = 0 - self.total_bytes = 0 - self.total_compressed_bytes: Optional[int] = None - - def __repr__(self) -> str: - info = [self.__class__.__name__] - if self._size: - info.append("%d bytes" % self._size) - if self._eof: - info.append("eof") - if self._low_water != 2**16: # default limit - info.append("low=%d high=%d" % (self._low_water, self._high_water)) - if self._waiter: - info.append("w=%r" % self._waiter) - if self._exception: - info.append("e=%r" % self._exception) - return "<%s>" % " ".join(info) - - def get_read_buffer_limits(self) -> Tuple[int, int]: - return (self._low_water, self._high_water) - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception( - self, - exc: BaseException, - exc_cause: BaseException = _EXC_SENTINEL, - ) -> None: - self._exception = exc - self._eof_callbacks.clear() - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_exception(waiter, exc, exc_cause) - - waiter = self._eof_waiter - if waiter is not None: - self._eof_waiter = None - set_exception(waiter, exc, exc_cause) - - def on_eof(self, callback: Callable[[], None]) -> None: - if self._eof: - try: - callback() - except Exception: - internal_logger.exception("Exception in eof callback") - else: - self._eof_callbacks.append(callback) - - def feed_eof(self) -> None: - self._eof = True - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - waiter = self._eof_waiter - if waiter is not None: - self._eof_waiter = None - set_result(waiter, None) - - if self._protocol._reading_paused: - self._protocol.resume_reading() - - for cb in self._eof_callbacks: - try: - cb() - except Exception: - internal_logger.exception("Exception in eof callback") - - self._eof_callbacks.clear() - - def is_eof(self) -> bool: - """Return True if 'feed_eof' was called.""" - return self._eof - - def at_eof(self) -> bool: - """Return True if the buffer is empty and 'feed_eof' was called.""" - return self._eof and not self._buffer - - async def wait_eof(self) -> None: - if self._eof: - return - - assert self._eof_waiter is None - self._eof_waiter = self._loop.create_future() - try: - await self._eof_waiter - finally: - self._eof_waiter = None - - @property - def total_raw_bytes(self) -> int: - if self.total_compressed_bytes is None: - return self.total_bytes - return self.total_compressed_bytes - - def unread_data(self, data: bytes) -> None: - """rollback reading some data from stream, inserting it to buffer head.""" - warnings.warn( - "unread_data() is deprecated " - "and will be removed in future releases (#3260)", - DeprecationWarning, - stacklevel=2, - ) - if not data: - return - - if self._buffer_offset: - self._buffer[0] = self._buffer[0][self._buffer_offset :] - self._buffer_offset = 0 - self._size += len(data) - self._cursor -= len(data) - self._buffer.appendleft(data) - self._eof_counter = 0 - - # TODO: size is ignored, remove the param later - def feed_data(self, data: bytes, size: int = 0) -> None: - assert not self._eof, "feed_data after feed_eof" - - if not data: - return - - data_len = len(data) - self._size += data_len - self._buffer.append(data) - self.total_bytes += data_len - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - if self._size > self._high_water and not self._protocol._reading_paused: - self._protocol.pause_reading() - - def begin_http_chunk_receiving(self) -> None: - if self._http_chunk_splits is None: - if self.total_bytes: - raise RuntimeError( - "Called begin_http_chunk_receiving when some data was already fed" - ) - self._http_chunk_splits = collections.deque() - - def end_http_chunk_receiving(self) -> None: - if self._http_chunk_splits is None: - raise RuntimeError( - "Called end_chunk_receiving without calling " - "begin_chunk_receiving first" - ) - - # self._http_chunk_splits contains logical byte offsets from start of - # the body transfer. Each offset is the offset of the end of a chunk. - # "Logical" means bytes, accessible for a user. - # If no chunks containing logical data were received, current position - # is difinitely zero. - pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 - - if self.total_bytes == pos: - # We should not add empty chunks here. So we check for that. - # Note, when chunked + gzip is used, we can receive a chunk - # of compressed data, but that data may not be enough for gzip FSM - # to yield any uncompressed data. That's why current position may - # not change after receiving a chunk. - return - - self._http_chunk_splits.append(self.total_bytes) - - # If we get too many small chunks before self._high_water is reached, then any - # .read() call becomes computationally expensive, and could block the event loop - # for too long, hence an additional self._high_water_chunks here. - if ( - len(self._http_chunk_splits) > self._high_water_chunks - and not self._protocol._reading_paused - ): - self._protocol.pause_reading() - - # wake up readchunk when end of http chunk received - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - async def _wait(self, func_name: str) -> None: - if not self._protocol.connected: - raise RuntimeError("Connection closed.") - - # StreamReader uses a future to link the protocol feed_data() method - # to a read coroutine. Running two read coroutines at the same time - # would have an unexpected behaviour. It would not possible to know - # which coroutine would get the next data. - if self._waiter is not None: - raise RuntimeError( - "%s() called while another coroutine is " - "already waiting for incoming data" % func_name - ) - - waiter = self._waiter = self._loop.create_future() - try: - with self._timer: - await waiter - finally: - self._waiter = None - - async def readline(self) -> bytes: - return await self.readuntil() - - async def readuntil(self, separator: bytes = b"\n") -> bytes: - seplen = len(separator) - if seplen == 0: - raise ValueError("Separator should be at least one-byte string") - - if self._exception is not None: - raise self._exception - - chunk = b"" - chunk_size = 0 - not_enough = True - - while not_enough: - while self._buffer and not_enough: - offset = self._buffer_offset - ichar = self._buffer[0].find(separator, offset) + 1 - # Read from current offset to found separator or to the end. - data = self._read_nowait_chunk( - ichar - offset + seplen - 1 if ichar else -1 - ) - chunk += data - chunk_size += len(data) - if ichar: - not_enough = False - - if chunk_size > self._high_water: - raise ValueError("Chunk too big") - - if self._eof: - break - - if not_enough: - await self._wait("readuntil") - - return chunk - - async def read(self, n: int = -1) -> bytes: - if self._exception is not None: - raise self._exception - - # migration problem; with DataQueue you have to catch - # EofStream exception, so common way is to run payload.read() inside - # infinite loop. what can cause real infinite loop with StreamReader - # lets keep this code one major release. - if __debug__: - if self._eof and not self._buffer: - self._eof_counter = getattr(self, "_eof_counter", 0) + 1 - if self._eof_counter > 5: - internal_logger.warning( - "Multiple access to StreamReader in eof state, " - "might be infinite loop.", - stack_info=True, - ) - - if not n: - return b"" - - if n < 0: - # This used to just loop creating a new waiter hoping to - # collect everything in self._buffer, but that would - # deadlock if the subprocess sends more than self.limit - # bytes. So just call self.readany() until EOF. - blocks = [] - while True: - block = await self.readany() - if not block: - break - blocks.append(block) - return b"".join(blocks) - - # TODO: should be `if` instead of `while` - # because waiter maybe triggered on chunk end, - # without feeding any data - while not self._buffer and not self._eof: - await self._wait("read") - - return self._read_nowait(n) - - async def readany(self) -> bytes: - if self._exception is not None: - raise self._exception - - # TODO: should be `if` instead of `while` - # because waiter maybe triggered on chunk end, - # without feeding any data - while not self._buffer and not self._eof: - await self._wait("readany") - - return self._read_nowait(-1) - - async def readchunk(self) -> Tuple[bytes, bool]: - """Returns a tuple of (data, end_of_http_chunk). - - When chunked transfer - encoding is used, end_of_http_chunk is a boolean indicating if the end - of the data corresponds to the end of a HTTP chunk , otherwise it is - always False. - """ - while True: - if self._exception is not None: - raise self._exception - - while self._http_chunk_splits: - pos = self._http_chunk_splits.popleft() - if pos == self._cursor: - return (b"", True) - if pos > self._cursor: - return (self._read_nowait(pos - self._cursor), True) - internal_logger.warning( - "Skipping HTTP chunk end due to data " - "consumption beyond chunk boundary" - ) - - if self._buffer: - return (self._read_nowait_chunk(-1), False) - # return (self._read_nowait(-1), False) - - if self._eof: - # Special case for signifying EOF. - # (b'', True) is not a final return value actually. - return (b"", False) - - await self._wait("readchunk") - - async def readexactly(self, n: int) -> bytes: - if self._exception is not None: - raise self._exception - - blocks: List[bytes] = [] - while n > 0: - block = await self.read(n) - if not block: - partial = b"".join(blocks) - raise asyncio.IncompleteReadError(partial, len(partial) + n) - blocks.append(block) - n -= len(block) - - return b"".join(blocks) - - def read_nowait(self, n: int = -1) -> bytes: - # default was changed to be consistent with .read(-1) - # - # I believe the most users don't know about the method and - # they are not affected. - if self._exception is not None: - raise self._exception - - if self._waiter and not self._waiter.done(): - raise RuntimeError( - "Called while some coroutine is waiting for incoming data." - ) - - return self._read_nowait(n) - - def _read_nowait_chunk(self, n: int) -> bytes: - first_buffer = self._buffer[0] - offset = self._buffer_offset - if n != -1 and len(first_buffer) - offset > n: - data = first_buffer[offset : offset + n] - self._buffer_offset += n - - elif offset: - self._buffer.popleft() - data = first_buffer[offset:] - self._buffer_offset = 0 - - else: - data = self._buffer.popleft() - - data_len = len(data) - self._size -= data_len - self._cursor += data_len - - chunk_splits = self._http_chunk_splits - # Prevent memory leak: drop useless chunk splits - while chunk_splits and chunk_splits[0] < self._cursor: - chunk_splits.popleft() - - if ( - self._protocol._reading_paused - and self._size < self._low_water - and ( - self._http_chunk_splits is None - or len(self._http_chunk_splits) < self._low_water_chunks - ) - ): - self._protocol.resume_reading() - return data - - def _read_nowait(self, n: int) -> bytes: - """Read not more than n bytes, or whole buffer if n == -1""" - self._timer.assert_timeout() - - chunks = [] - while self._buffer: - chunk = self._read_nowait_chunk(n) - chunks.append(chunk) - if n != -1: - n -= len(chunk) - if n == 0: - break - - return b"".join(chunks) if chunks else b"" - - -class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] - - __slots__ = ("_read_eof_chunk",) - - def __init__(self) -> None: - self._read_eof_chunk = False - self.total_bytes = 0 - - def __repr__(self) -> str: - return "<%s>" % self.__class__.__name__ - - def exception(self) -> Optional[BaseException]: - return None - - def set_exception( - self, - exc: BaseException, - exc_cause: BaseException = _EXC_SENTINEL, - ) -> None: - pass - - def on_eof(self, callback: Callable[[], None]) -> None: - try: - callback() - except Exception: - internal_logger.exception("Exception in eof callback") - - def feed_eof(self) -> None: - pass - - def is_eof(self) -> bool: - return True - - def at_eof(self) -> bool: - return True - - async def wait_eof(self) -> None: - return - - def feed_data(self, data: bytes, n: int = 0) -> None: - pass - - async def readline(self) -> bytes: - return b"" - - async def read(self, n: int = -1) -> bytes: - return b"" - - # TODO add async def readuntil - - async def readany(self) -> bytes: - return b"" - - async def readchunk(self) -> Tuple[bytes, bool]: - if not self._read_eof_chunk: - self._read_eof_chunk = True - return (b"", False) - - return (b"", True) - - async def readexactly(self, n: int) -> bytes: - raise asyncio.IncompleteReadError(b"", n) - - def read_nowait(self, n: int = -1) -> bytes: - return b"" - - -EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader() - - -class DataQueue(Generic[_T]): - """DataQueue is a general-purpose blocking queue with one reader.""" - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None - self._buffer: Deque[Tuple[_T, int]] = collections.deque() - - def __len__(self) -> int: - return len(self._buffer) - - def is_eof(self) -> bool: - return self._eof - - def at_eof(self) -> bool: - return self._eof and not self._buffer - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception( - self, - exc: BaseException, - exc_cause: BaseException = _EXC_SENTINEL, - ) -> None: - self._eof = True - self._exception = exc - if (waiter := self._waiter) is not None: - self._waiter = None - set_exception(waiter, exc, exc_cause) - - def feed_data(self, data: _T, size: int = 0) -> None: - self._buffer.append((data, size)) - if (waiter := self._waiter) is not None: - self._waiter = None - set_result(waiter, None) - - def feed_eof(self) -> None: - self._eof = True - if (waiter := self._waiter) is not None: - self._waiter = None - set_result(waiter, None) - - async def read(self) -> _T: - if not self._buffer and not self._eof: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - if self._buffer: - data, _ = self._buffer.popleft() - return data - if self._exception is not None: - raise self._exception - raise EofStream - - def __aiter__(self) -> AsyncStreamIterator[_T]: - return AsyncStreamIterator(self.read) - - -class FlowControlDataQueue(DataQueue[_T]): - """FlowControlDataQueue resumes and pauses an underlying stream. - - It is a destination for parsed data. - - This class is deprecated and will be removed in version 4.0. - """ - - def __init__( - self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop - ) -> None: - super().__init__(loop=loop) - self._size = 0 - self._protocol = protocol - self._limit = limit * 2 - - def feed_data(self, data: _T, size: int = 0) -> None: - super().feed_data(data, size) - self._size += size - - if self._size > self._limit and not self._protocol._reading_paused: - self._protocol.pause_reading() - - async def read(self) -> _T: - if not self._buffer and not self._eof: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - if self._buffer: - data, size = self._buffer.popleft() - self._size -= size - if self._size < self._limit and self._protocol._reading_paused: - self._protocol.resume_reading() - return data - if self._exception is not None: - raise self._exception - raise EofStream diff --git a/venv/Lib/site-packages/aiohttp/tcp_helpers.py b/venv/Lib/site-packages/aiohttp/tcp_helpers.py deleted file mode 100644 index 88b2442..0000000 --- a/venv/Lib/site-packages/aiohttp/tcp_helpers.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Helper methods to tune a TCP connection""" - -import asyncio -import socket -from contextlib import suppress -from typing import Optional # noqa - -__all__ = ("tcp_keepalive", "tcp_nodelay") - - -if hasattr(socket, "SO_KEEPALIVE"): - - def tcp_keepalive(transport: asyncio.Transport) -> None: - sock = transport.get_extra_info("socket") - if sock is not None: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - -else: - - def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover - pass - - -def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: - sock = transport.get_extra_info("socket") - - if sock is None: - return - - if sock.family not in (socket.AF_INET, socket.AF_INET6): - return - - value = bool(value) - - # socket may be closed already, on windows OSError get raised - with suppress(OSError): - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) diff --git a/venv/Lib/site-packages/aiohttp/test_utils.py b/venv/Lib/site-packages/aiohttp/test_utils.py deleted file mode 100644 index 87c3142..0000000 --- a/venv/Lib/site-packages/aiohttp/test_utils.py +++ /dev/null @@ -1,774 +0,0 @@ -"""Utilities shared by tests.""" - -import asyncio -import contextlib -import gc -import inspect -import ipaddress -import os -import socket -import sys -import warnings -from abc import ABC, abstractmethod -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Generic, - Iterator, - List, - Optional, - Type, - TypeVar, - cast, - overload, -) -from unittest import IsolatedAsyncioTestCase, mock - -from aiosignal import Signal -from multidict import CIMultiDict, CIMultiDictProxy -from yarl import URL - -import aiohttp -from aiohttp.client import ( - _RequestContextManager, - _RequestOptions, - _WSRequestContextManager, -) - -from . import ClientSession, hdrs -from .abc import AbstractCookieJar -from .client_reqrep import ClientResponse -from .client_ws import ClientWebSocketResponse -from .helpers import sentinel -from .http import HttpVersion, RawRequestMessage -from .streams import EMPTY_PAYLOAD, StreamReader -from .typedefs import StrOrURL -from .web import ( - Application, - AppRunner, - BaseRequest, - BaseRunner, - Request, - Server, - ServerRunner, - SockSite, - UrlMappingMatchInfo, -) -from .web_protocol import _RequestHandler - -if TYPE_CHECKING: - from ssl import SSLContext -else: - SSLContext = None - -if sys.version_info >= (3, 11) and TYPE_CHECKING: - from typing import Unpack - -if sys.version_info >= (3, 11): - from typing import Self -else: - Self = Any - -_ApplicationNone = TypeVar("_ApplicationNone", Application, None) -_Request = TypeVar("_Request", bound=BaseRequest) - -REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" - - -def get_unused_port_socket( - host: str, family: socket.AddressFamily = socket.AF_INET -) -> socket.socket: - return get_port_socket(host, 0, family) - - -def get_port_socket( - host: str, port: int, family: socket.AddressFamily -) -> socket.socket: - s = socket.socket(family, socket.SOCK_STREAM) - if REUSE_ADDRESS: - # Windows has different semantics for SO_REUSEADDR, - # so don't set it. Ref: - # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - s.bind((host, port)) - return s - - -def unused_port() -> int: - """Return a port that is unused on the current host.""" - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("127.0.0.1", 0)) - return cast(int, s.getsockname()[1]) - - -class BaseTestServer(ABC): - __test__ = False - - def __init__( - self, - *, - scheme: str = "", - loop: Optional[asyncio.AbstractEventLoop] = None, - host: str = "127.0.0.1", - port: Optional[int] = None, - skip_url_asserts: bool = False, - socket_factory: Callable[ - [str, int, socket.AddressFamily], socket.socket - ] = get_port_socket, - **kwargs: Any, - ) -> None: - self._loop = loop - self.runner: Optional[BaseRunner] = None - self._root: Optional[URL] = None - self.host = host - self.port = port - self._closed = False - self.scheme = scheme - self.skip_url_asserts = skip_url_asserts - self.socket_factory = socket_factory - - async def start_server( - self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any - ) -> None: - if self.runner: - return - self._loop = loop - self._ssl = kwargs.pop("ssl", None) - self.runner = await self._make_runner(handler_cancellation=True, **kwargs) - await self.runner.setup() - if not self.port: - self.port = 0 - absolute_host = self.host - try: - version = ipaddress.ip_address(self.host).version - except ValueError: - version = 4 - if version == 6: - absolute_host = f"[{self.host}]" - family = socket.AF_INET6 if version == 6 else socket.AF_INET - _sock = self.socket_factory(self.host, self.port, family) - self.host, self.port = _sock.getsockname()[:2] - site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) - await site.start() - server = site._server - assert server is not None - sockets = server.sockets # type: ignore[attr-defined] - assert sockets is not None - self.port = sockets[0].getsockname()[1] - if not self.scheme: - self.scheme = "https" if self._ssl else "http" - self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}") - - @abstractmethod # pragma: no cover - async def _make_runner(self, **kwargs: Any) -> BaseRunner: - pass - - def make_url(self, path: StrOrURL) -> URL: - assert self._root is not None - url = URL(path) - if not self.skip_url_asserts: - assert not url.absolute - return self._root.join(url) - else: - return URL(str(self._root) + str(path)) - - @property - def started(self) -> bool: - return self.runner is not None - - @property - def closed(self) -> bool: - return self._closed - - @property - def handler(self) -> Server: - # for backward compatibility - # web.Server instance - runner = self.runner - assert runner is not None - assert runner.server is not None - return runner.server - - async def close(self) -> None: - """Close all fixtures created by the test client. - - After that point, the TestClient is no longer usable. - - This is an idempotent function: running close multiple times - will not have any additional effects. - - close is also run when the object is garbage collected, and on - exit when used as a context manager. - - """ - if self.started and not self.closed: - assert self.runner is not None - await self.runner.cleanup() - self._root = None - self.port = None - self._closed = True - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "BaseTestServer": - await self.start_server(loop=self._loop) - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - await self.close() - - -class TestServer(BaseTestServer): - def __init__( - self, - app: Application, - *, - scheme: str = "", - host: str = "127.0.0.1", - port: Optional[int] = None, - **kwargs: Any, - ): - self.app = app - super().__init__(scheme=scheme, host=host, port=port, **kwargs) - - async def _make_runner(self, **kwargs: Any) -> BaseRunner: - return AppRunner(self.app, **kwargs) - - -class RawTestServer(BaseTestServer): - def __init__( - self, - handler: _RequestHandler, - *, - scheme: str = "", - host: str = "127.0.0.1", - port: Optional[int] = None, - **kwargs: Any, - ) -> None: - self._handler = handler - super().__init__(scheme=scheme, host=host, port=port, **kwargs) - - async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: - srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) - return ServerRunner(srv, debug=debug, **kwargs) - - -class TestClient(Generic[_Request, _ApplicationNone]): - """ - A test client implementation. - - To write functional tests for aiohttp based servers. - - """ - - __test__ = False - - @overload - def __init__( - self: "TestClient[Request, Application]", - server: TestServer, - *, - cookie_jar: Optional[AbstractCookieJar] = None, - **kwargs: Any, - ) -> None: ... - @overload - def __init__( - self: "TestClient[_Request, None]", - server: BaseTestServer, - *, - cookie_jar: Optional[AbstractCookieJar] = None, - **kwargs: Any, - ) -> None: ... - def __init__( - self, - server: BaseTestServer, - *, - cookie_jar: Optional[AbstractCookieJar] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any, - ) -> None: - if not isinstance(server, BaseTestServer): - raise TypeError( - "server must be TestServer instance, found type: %r" % type(server) - ) - self._server = server - self._loop = loop - if cookie_jar is None: - cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) - self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) - self._session._retry_connection = False - self._closed = False - self._responses: List[ClientResponse] = [] - self._websockets: List[ClientWebSocketResponse] = [] - - async def start_server(self) -> None: - await self._server.start_server(loop=self._loop) - - @property - def host(self) -> str: - return self._server.host - - @property - def port(self) -> Optional[int]: - return self._server.port - - @property - def server(self) -> BaseTestServer: - return self._server - - @property - def app(self) -> _ApplicationNone: - return getattr(self._server, "app", None) # type: ignore[return-value] - - @property - def session(self) -> ClientSession: - """An internal aiohttp.ClientSession. - - Unlike the methods on the TestClient, client session requests - do not automatically include the host in the url queried, and - will require an absolute path to the resource. - - """ - return self._session - - def make_url(self, path: StrOrURL) -> URL: - return self._server.make_url(path) - - async def _request( - self, method: str, path: StrOrURL, **kwargs: Any - ) -> ClientResponse: - resp = await self._session.request(method, self.make_url(path), **kwargs) - # save it to close later - self._responses.append(resp) - return resp - - if sys.version_info >= (3, 11) and TYPE_CHECKING: - - def request( - self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions] - ) -> _RequestContextManager: ... - - def get( - self, - path: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> _RequestContextManager: ... - - def options( - self, - path: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> _RequestContextManager: ... - - def head( - self, - path: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> _RequestContextManager: ... - - def post( - self, - path: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> _RequestContextManager: ... - - def put( - self, - path: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> _RequestContextManager: ... - - def patch( - self, - path: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> _RequestContextManager: ... - - def delete( - self, - path: StrOrURL, - **kwargs: Unpack[_RequestOptions], - ) -> _RequestContextManager: ... - - else: - - def request( - self, method: str, path: StrOrURL, **kwargs: Any - ) -> _RequestContextManager: - """Routes a request to tested http server. - - The interface is identical to aiohttp.ClientSession.request, - except the loop kwarg is overridden by the instance used by the - test server. - - """ - return _RequestContextManager(self._request(method, path, **kwargs)) - - def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP GET request.""" - return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) - - def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP POST request.""" - return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) - - def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP OPTIONS request.""" - return _RequestContextManager( - self._request(hdrs.METH_OPTIONS, path, **kwargs) - ) - - def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP HEAD request.""" - return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) - - def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PUT request.""" - return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) - - def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager( - self._request(hdrs.METH_PATCH, path, **kwargs) - ) - - def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager( - self._request(hdrs.METH_DELETE, path, **kwargs) - ) - - def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: - """Initiate websocket connection. - - The api corresponds to aiohttp.ClientSession.ws_connect. - - """ - return _WSRequestContextManager(self._ws_connect(path, **kwargs)) - - async def _ws_connect( - self, path: StrOrURL, **kwargs: Any - ) -> ClientWebSocketResponse: - ws = await self._session.ws_connect(self.make_url(path), **kwargs) - self._websockets.append(ws) - return ws - - async def close(self) -> None: - """Close all fixtures created by the test client. - - After that point, the TestClient is no longer usable. - - This is an idempotent function: running close multiple times - will not have any additional effects. - - close is also run on exit when used as a(n) (asynchronous) - context manager. - - """ - if not self._closed: - for resp in self._responses: - resp.close() - for ws in self._websockets: - await ws.close() - await self._session.close() - await self._server.close() - self._closed = True - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> Self: - await self.start_server() - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self.close() - - -class AioHTTPTestCase(IsolatedAsyncioTestCase): - """A base class to allow for unittest web applications using aiohttp. - - Provides the following: - - * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. - * self.loop (asyncio.BaseEventLoop): the event loop in which the - application and server are running. - * self.app (aiohttp.web.Application): the application returned by - self.get_application() - - Note that the TestClient's methods are asynchronous: you have to - execute function on the test client using asynchronous methods. - """ - - async def get_application(self) -> Application: - """Get application. - - This method should be overridden - to return the aiohttp.web.Application - object to test. - """ - return self.get_app() - - def get_app(self) -> Application: - """Obsolete method used to constructing web application. - - Use .get_application() coroutine instead. - """ - raise RuntimeError("Did you forget to define get_application()?") - - async def asyncSetUp(self) -> None: - self.loop = asyncio.get_running_loop() - return await self.setUpAsync() - - async def setUpAsync(self) -> None: - self.app = await self.get_application() - self.server = await self.get_server(self.app) - self.client = await self.get_client(self.server) - - await self.client.start_server() - - async def asyncTearDown(self) -> None: - return await self.tearDownAsync() - - async def tearDownAsync(self) -> None: - await self.client.close() - - async def get_server(self, app: Application) -> TestServer: - """Return a TestServer instance.""" - return TestServer(app, loop=self.loop) - - async def get_client(self, server: TestServer) -> TestClient[Request, Application]: - """Return a TestClient instance.""" - return TestClient(server, loop=self.loop) - - -def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: - """ - A decorator dedicated to use with asynchronous AioHTTPTestCase test methods. - - In 3.8+, this does nothing. - """ - warnings.warn( - "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+", - DeprecationWarning, - stacklevel=2, - ) - return func - - -_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] - - -@contextlib.contextmanager -def loop_context( - loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False -) -> Iterator[asyncio.AbstractEventLoop]: - """A contextmanager that creates an event_loop, for test purposes. - - Handles the creation and cleanup of a test loop. - """ - loop = setup_test_loop(loop_factory) - yield loop - teardown_test_loop(loop, fast=fast) - - -def setup_test_loop( - loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, -) -> asyncio.AbstractEventLoop: - """Create and return an asyncio.BaseEventLoop instance. - - The caller should also call teardown_test_loop, - once they are done with the loop. - """ - loop = loop_factory() - asyncio.set_event_loop(loop) - return loop - - -def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: - """Teardown and cleanup an event_loop created by setup_test_loop.""" - closed = loop.is_closed() - if not closed: - loop.call_soon(loop.stop) - loop.run_forever() - loop.close() - - if not fast: - gc.collect() - - asyncio.set_event_loop(None) - - -def _create_app_mock() -> mock.MagicMock: - def get_dict(app: Any, key: str) -> Any: - return app.__app_dict[key] - - def set_dict(app: Any, key: str, value: Any) -> None: - app.__app_dict[key] = value - - app = mock.MagicMock(spec=Application) - app.__app_dict = {} - app.__getitem__ = get_dict - app.__setitem__ = set_dict - - app._debug = False - app.on_response_prepare = Signal(app) - app.on_response_prepare.freeze() - return app - - -def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: - transport = mock.Mock() - - def get_extra_info(key: str) -> Optional[SSLContext]: - if key == "sslcontext": - return sslcontext - else: - return None - - transport.get_extra_info.side_effect = get_extra_info - return transport - - -def make_mocked_request( - method: str, - path: str, - headers: Any = None, - *, - match_info: Any = sentinel, - version: HttpVersion = HttpVersion(1, 1), - closing: bool = False, - app: Any = None, - writer: Any = sentinel, - protocol: Any = sentinel, - transport: Any = sentinel, - payload: StreamReader = EMPTY_PAYLOAD, - sslcontext: Optional[SSLContext] = None, - client_max_size: int = 1024**2, - loop: Any = ..., -) -> Request: - """Creates mocked web.Request testing purposes. - - Useful in unit tests, when spinning full web server is overkill or - specific conditions and errors are hard to trigger. - """ - task = mock.Mock() - if loop is ...: - # no loop passed, try to get the current one if - # its is running as we need a real loop to create - # executor jobs to be able to do testing - # with a real executor - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = mock.Mock() - loop.create_future.return_value = () - - if version < HttpVersion(1, 1): - closing = True - - if headers: - headers = CIMultiDictProxy(CIMultiDict(headers)) - raw_hdrs = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() - ) - else: - headers = CIMultiDictProxy(CIMultiDict()) - raw_hdrs = () - - chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() - - message = RawRequestMessage( - method, - path, - version, - headers, - raw_hdrs, - closing, - None, - False, - chunked, - URL(path), - ) - if app is None: - app = _create_app_mock() - - if transport is sentinel: - transport = _create_transport(sslcontext) - - if protocol is sentinel: - protocol = mock.Mock() - protocol.transport = transport - type(protocol).peername = mock.PropertyMock( - return_value=transport.get_extra_info("peername") - ) - type(protocol).ssl_context = mock.PropertyMock(return_value=sslcontext) - - if writer is sentinel: - writer = mock.Mock() - writer.write_headers = make_mocked_coro(None) - writer.write = make_mocked_coro(None) - writer.write_eof = make_mocked_coro(None) - writer.drain = make_mocked_coro(None) - writer.transport = transport - - protocol.transport = transport - protocol.writer = writer - - req = Request( - message, payload, protocol, writer, task, loop, client_max_size=client_max_size - ) - - match_info = UrlMappingMatchInfo( - {} if match_info is sentinel else match_info, mock.Mock() - ) - match_info.add_app(app) - req._match_info = match_info - - return req - - -def make_mocked_coro( - return_value: Any = sentinel, raise_exception: Any = sentinel -) -> Any: - """Creates a coroutine mock.""" - - async def mock_coro(*args: Any, **kwargs: Any) -> Any: - if raise_exception is not sentinel: - raise raise_exception - if not inspect.isawaitable(return_value): - return return_value - await return_value - - return mock.Mock(wraps=mock_coro) diff --git a/venv/Lib/site-packages/aiohttp/tracing.py b/venv/Lib/site-packages/aiohttp/tracing.py deleted file mode 100644 index 568fa7f..0000000 --- a/venv/Lib/site-packages/aiohttp/tracing.py +++ /dev/null @@ -1,455 +0,0 @@ -from types import SimpleNamespace -from typing import TYPE_CHECKING, Mapping, Optional, Type, TypeVar - -import attr -from aiosignal import Signal -from multidict import CIMultiDict -from yarl import URL - -from .client_reqrep import ClientResponse - -if TYPE_CHECKING: - from .client import ClientSession - - _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) - _TracingSignal = Signal[ClientSession, SimpleNamespace, _ParamT_contra] - - -__all__ = ( - "TraceConfig", - "TraceRequestStartParams", - "TraceRequestEndParams", - "TraceRequestExceptionParams", - "TraceConnectionQueuedStartParams", - "TraceConnectionQueuedEndParams", - "TraceConnectionCreateStartParams", - "TraceConnectionCreateEndParams", - "TraceConnectionReuseconnParams", - "TraceDnsResolveHostStartParams", - "TraceDnsResolveHostEndParams", - "TraceDnsCacheHitParams", - "TraceDnsCacheMissParams", - "TraceRequestRedirectParams", - "TraceRequestChunkSentParams", - "TraceResponseChunkReceivedParams", - "TraceRequestHeadersSentParams", -) - - -class TraceConfig: - """First-class used to trace requests launched via ClientSession objects.""" - - def __init__( - self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace - ) -> None: - self._on_request_start: _TracingSignal[TraceRequestStartParams] = Signal(self) - self._on_request_chunk_sent: _TracingSignal[TraceRequestChunkSentParams] = ( - Signal(self) - ) - self._on_response_chunk_received: _TracingSignal[ - TraceResponseChunkReceivedParams - ] = Signal(self) - self._on_request_end: _TracingSignal[TraceRequestEndParams] = Signal(self) - self._on_request_exception: _TracingSignal[TraceRequestExceptionParams] = ( - Signal(self) - ) - self._on_request_redirect: _TracingSignal[TraceRequestRedirectParams] = Signal( - self - ) - self._on_connection_queued_start: _TracingSignal[ - TraceConnectionQueuedStartParams - ] = Signal(self) - self._on_connection_queued_end: _TracingSignal[ - TraceConnectionQueuedEndParams - ] = Signal(self) - self._on_connection_create_start: _TracingSignal[ - TraceConnectionCreateStartParams - ] = Signal(self) - self._on_connection_create_end: _TracingSignal[ - TraceConnectionCreateEndParams - ] = Signal(self) - self._on_connection_reuseconn: _TracingSignal[ - TraceConnectionReuseconnParams - ] = Signal(self) - self._on_dns_resolvehost_start: _TracingSignal[ - TraceDnsResolveHostStartParams - ] = Signal(self) - self._on_dns_resolvehost_end: _TracingSignal[TraceDnsResolveHostEndParams] = ( - Signal(self) - ) - self._on_dns_cache_hit: _TracingSignal[TraceDnsCacheHitParams] = Signal(self) - self._on_dns_cache_miss: _TracingSignal[TraceDnsCacheMissParams] = Signal(self) - self._on_request_headers_sent: _TracingSignal[TraceRequestHeadersSentParams] = ( - Signal(self) - ) - - self._trace_config_ctx_factory = trace_config_ctx_factory - - def trace_config_ctx( - self, trace_request_ctx: Optional[Mapping[str, str]] = None - ) -> SimpleNamespace: - """Return a new trace_config_ctx instance""" - return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) - - def freeze(self) -> None: - self._on_request_start.freeze() - self._on_request_chunk_sent.freeze() - self._on_response_chunk_received.freeze() - self._on_request_end.freeze() - self._on_request_exception.freeze() - self._on_request_redirect.freeze() - self._on_connection_queued_start.freeze() - self._on_connection_queued_end.freeze() - self._on_connection_create_start.freeze() - self._on_connection_create_end.freeze() - self._on_connection_reuseconn.freeze() - self._on_dns_resolvehost_start.freeze() - self._on_dns_resolvehost_end.freeze() - self._on_dns_cache_hit.freeze() - self._on_dns_cache_miss.freeze() - self._on_request_headers_sent.freeze() - - @property - def on_request_start(self) -> "_TracingSignal[TraceRequestStartParams]": - return self._on_request_start - - @property - def on_request_chunk_sent( - self, - ) -> "_TracingSignal[TraceRequestChunkSentParams]": - return self._on_request_chunk_sent - - @property - def on_response_chunk_received( - self, - ) -> "_TracingSignal[TraceResponseChunkReceivedParams]": - return self._on_response_chunk_received - - @property - def on_request_end(self) -> "_TracingSignal[TraceRequestEndParams]": - return self._on_request_end - - @property - def on_request_exception( - self, - ) -> "_TracingSignal[TraceRequestExceptionParams]": - return self._on_request_exception - - @property - def on_request_redirect( - self, - ) -> "_TracingSignal[TraceRequestRedirectParams]": - return self._on_request_redirect - - @property - def on_connection_queued_start( - self, - ) -> "_TracingSignal[TraceConnectionQueuedStartParams]": - return self._on_connection_queued_start - - @property - def on_connection_queued_end( - self, - ) -> "_TracingSignal[TraceConnectionQueuedEndParams]": - return self._on_connection_queued_end - - @property - def on_connection_create_start( - self, - ) -> "_TracingSignal[TraceConnectionCreateStartParams]": - return self._on_connection_create_start - - @property - def on_connection_create_end( - self, - ) -> "_TracingSignal[TraceConnectionCreateEndParams]": - return self._on_connection_create_end - - @property - def on_connection_reuseconn( - self, - ) -> "_TracingSignal[TraceConnectionReuseconnParams]": - return self._on_connection_reuseconn - - @property - def on_dns_resolvehost_start( - self, - ) -> "_TracingSignal[TraceDnsResolveHostStartParams]": - return self._on_dns_resolvehost_start - - @property - def on_dns_resolvehost_end( - self, - ) -> "_TracingSignal[TraceDnsResolveHostEndParams]": - return self._on_dns_resolvehost_end - - @property - def on_dns_cache_hit(self) -> "_TracingSignal[TraceDnsCacheHitParams]": - return self._on_dns_cache_hit - - @property - def on_dns_cache_miss(self) -> "_TracingSignal[TraceDnsCacheMissParams]": - return self._on_dns_cache_miss - - @property - def on_request_headers_sent( - self, - ) -> "_TracingSignal[TraceRequestHeadersSentParams]": - return self._on_request_headers_sent - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestStartParams: - """Parameters sent by the `on_request_start` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestChunkSentParams: - """Parameters sent by the `on_request_chunk_sent` signal""" - - method: str - url: URL - chunk: bytes - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceResponseChunkReceivedParams: - """Parameters sent by the `on_response_chunk_received` signal""" - - method: str - url: URL - chunk: bytes - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestEndParams: - """Parameters sent by the `on_request_end` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - response: ClientResponse - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestExceptionParams: - """Parameters sent by the `on_request_exception` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - exception: BaseException - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestRedirectParams: - """Parameters sent by the `on_request_redirect` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - response: ClientResponse - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionQueuedStartParams: - """Parameters sent by the `on_connection_queued_start` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionQueuedEndParams: - """Parameters sent by the `on_connection_queued_end` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionCreateStartParams: - """Parameters sent by the `on_connection_create_start` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionCreateEndParams: - """Parameters sent by the `on_connection_create_end` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionReuseconnParams: - """Parameters sent by the `on_connection_reuseconn` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsResolveHostStartParams: - """Parameters sent by the `on_dns_resolvehost_start` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsResolveHostEndParams: - """Parameters sent by the `on_dns_resolvehost_end` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsCacheHitParams: - """Parameters sent by the `on_dns_cache_hit` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsCacheMissParams: - """Parameters sent by the `on_dns_cache_miss` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestHeadersSentParams: - """Parameters sent by the `on_request_headers_sent` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - - -class Trace: - """Internal dependency holder class. - - Used to keep together the main dependencies used - at the moment of send a signal. - """ - - def __init__( - self, - session: "ClientSession", - trace_config: TraceConfig, - trace_config_ctx: SimpleNamespace, - ) -> None: - self._trace_config = trace_config - self._trace_config_ctx = trace_config_ctx - self._session = session - - async def send_request_start( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - return await self._trace_config.on_request_start.send( - self._session, - self._trace_config_ctx, - TraceRequestStartParams(method, url, headers), - ) - - async def send_request_chunk_sent( - self, method: str, url: URL, chunk: bytes - ) -> None: - return await self._trace_config.on_request_chunk_sent.send( - self._session, - self._trace_config_ctx, - TraceRequestChunkSentParams(method, url, chunk), - ) - - async def send_response_chunk_received( - self, method: str, url: URL, chunk: bytes - ) -> None: - return await self._trace_config.on_response_chunk_received.send( - self._session, - self._trace_config_ctx, - TraceResponseChunkReceivedParams(method, url, chunk), - ) - - async def send_request_end( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - response: ClientResponse, - ) -> None: - return await self._trace_config.on_request_end.send( - self._session, - self._trace_config_ctx, - TraceRequestEndParams(method, url, headers, response), - ) - - async def send_request_exception( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - exception: BaseException, - ) -> None: - return await self._trace_config.on_request_exception.send( - self._session, - self._trace_config_ctx, - TraceRequestExceptionParams(method, url, headers, exception), - ) - - async def send_request_redirect( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - response: ClientResponse, - ) -> None: - return await self._trace_config._on_request_redirect.send( - self._session, - self._trace_config_ctx, - TraceRequestRedirectParams(method, url, headers, response), - ) - - async def send_connection_queued_start(self) -> None: - return await self._trace_config.on_connection_queued_start.send( - self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() - ) - - async def send_connection_queued_end(self) -> None: - return await self._trace_config.on_connection_queued_end.send( - self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() - ) - - async def send_connection_create_start(self) -> None: - return await self._trace_config.on_connection_create_start.send( - self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() - ) - - async def send_connection_create_end(self) -> None: - return await self._trace_config.on_connection_create_end.send( - self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() - ) - - async def send_connection_reuseconn(self) -> None: - return await self._trace_config.on_connection_reuseconn.send( - self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() - ) - - async def send_dns_resolvehost_start(self, host: str) -> None: - return await self._trace_config.on_dns_resolvehost_start.send( - self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) - ) - - async def send_dns_resolvehost_end(self, host: str) -> None: - return await self._trace_config.on_dns_resolvehost_end.send( - self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) - ) - - async def send_dns_cache_hit(self, host: str) -> None: - return await self._trace_config.on_dns_cache_hit.send( - self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) - ) - - async def send_dns_cache_miss(self, host: str) -> None: - return await self._trace_config.on_dns_cache_miss.send( - self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) - ) - - async def send_request_headers( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - return await self._trace_config._on_request_headers_sent.send( - self._session, - self._trace_config_ctx, - TraceRequestHeadersSentParams(method, url, headers), - ) diff --git a/venv/Lib/site-packages/aiohttp/typedefs.py b/venv/Lib/site-packages/aiohttp/typedefs.py deleted file mode 100644 index cc8c082..0000000 --- a/venv/Lib/site-packages/aiohttp/typedefs.py +++ /dev/null @@ -1,69 +0,0 @@ -import json -import os -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Iterable, - Mapping, - Protocol, - Tuple, - Union, -) - -from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr -from yarl import URL, Query as _Query - -Query = _Query - -DEFAULT_JSON_ENCODER = json.dumps -DEFAULT_JSON_DECODER = json.loads - -if TYPE_CHECKING: - _CIMultiDict = CIMultiDict[str] - _CIMultiDictProxy = CIMultiDictProxy[str] - _MultiDict = MultiDict[str] - _MultiDictProxy = MultiDictProxy[str] - from http.cookies import BaseCookie, Morsel - - from .web import Request, StreamResponse -else: - _CIMultiDict = CIMultiDict - _CIMultiDictProxy = CIMultiDictProxy - _MultiDict = MultiDict - _MultiDictProxy = MultiDictProxy - -Byteish = Union[bytes, bytearray, memoryview] -JSONEncoder = Callable[[Any], str] -JSONDecoder = Callable[[str], Any] -LooseHeaders = Union[ - Mapping[str, str], - Mapping[istr, str], - _CIMultiDict, - _CIMultiDictProxy, - Iterable[Tuple[Union[str, istr], str]], -] -RawHeaders = Tuple[Tuple[bytes, bytes], ...] -StrOrURL = Union[str, URL] - -LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] -LooseCookiesIterables = Iterable[ - Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] -] -LooseCookies = Union[ - LooseCookiesMappings, - LooseCookiesIterables, - "BaseCookie[str]", -] - -Handler = Callable[["Request"], Awaitable["StreamResponse"]] - - -class Middleware(Protocol): - def __call__( - self, request: "Request", handler: Handler - ) -> Awaitable["StreamResponse"]: ... - - -PathLike = Union[str, "os.PathLike[str]"] diff --git a/venv/Lib/site-packages/aiohttp/web.py b/venv/Lib/site-packages/aiohttp/web.py deleted file mode 100644 index 5a1fc96..0000000 --- a/venv/Lib/site-packages/aiohttp/web.py +++ /dev/null @@ -1,592 +0,0 @@ -import asyncio -import logging -import os -import socket -import sys -import warnings -from argparse import ArgumentParser -from collections.abc import Iterable -from contextlib import suppress -from importlib import import_module -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Iterable as TypingIterable, - List, - Optional, - Set, - Type, - Union, - cast, -) - -from .abc import AbstractAccessLogger -from .helpers import AppKey as AppKey -from .log import access_logger -from .typedefs import PathLike -from .web_app import Application as Application, CleanupError as CleanupError -from .web_exceptions import ( - HTTPAccepted as HTTPAccepted, - HTTPBadGateway as HTTPBadGateway, - HTTPBadRequest as HTTPBadRequest, - HTTPClientError as HTTPClientError, - HTTPConflict as HTTPConflict, - HTTPCreated as HTTPCreated, - HTTPError as HTTPError, - HTTPException as HTTPException, - HTTPExpectationFailed as HTTPExpectationFailed, - HTTPFailedDependency as HTTPFailedDependency, - HTTPForbidden as HTTPForbidden, - HTTPFound as HTTPFound, - HTTPGatewayTimeout as HTTPGatewayTimeout, - HTTPGone as HTTPGone, - HTTPInsufficientStorage as HTTPInsufficientStorage, - HTTPInternalServerError as HTTPInternalServerError, - HTTPLengthRequired as HTTPLengthRequired, - HTTPMethodNotAllowed as HTTPMethodNotAllowed, - HTTPMisdirectedRequest as HTTPMisdirectedRequest, - HTTPMove as HTTPMove, - HTTPMovedPermanently as HTTPMovedPermanently, - HTTPMultipleChoices as HTTPMultipleChoices, - HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, - HTTPNoContent as HTTPNoContent, - HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, - HTTPNotAcceptable as HTTPNotAcceptable, - HTTPNotExtended as HTTPNotExtended, - HTTPNotFound as HTTPNotFound, - HTTPNotImplemented as HTTPNotImplemented, - HTTPNotModified as HTTPNotModified, - HTTPOk as HTTPOk, - HTTPPartialContent as HTTPPartialContent, - HTTPPaymentRequired as HTTPPaymentRequired, - HTTPPermanentRedirect as HTTPPermanentRedirect, - HTTPPreconditionFailed as HTTPPreconditionFailed, - HTTPPreconditionRequired as HTTPPreconditionRequired, - HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, - HTTPRedirection as HTTPRedirection, - HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, - HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, - HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, - HTTPRequestTimeout as HTTPRequestTimeout, - HTTPRequestURITooLong as HTTPRequestURITooLong, - HTTPResetContent as HTTPResetContent, - HTTPSeeOther as HTTPSeeOther, - HTTPServerError as HTTPServerError, - HTTPServiceUnavailable as HTTPServiceUnavailable, - HTTPSuccessful as HTTPSuccessful, - HTTPTemporaryRedirect as HTTPTemporaryRedirect, - HTTPTooManyRequests as HTTPTooManyRequests, - HTTPUnauthorized as HTTPUnauthorized, - HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, - HTTPUnprocessableEntity as HTTPUnprocessableEntity, - HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, - HTTPUpgradeRequired as HTTPUpgradeRequired, - HTTPUseProxy as HTTPUseProxy, - HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, - HTTPVersionNotSupported as HTTPVersionNotSupported, - NotAppKeyWarning as NotAppKeyWarning, -) -from .web_fileresponse import FileResponse as FileResponse -from .web_log import AccessLogger -from .web_middlewares import ( - middleware as middleware, - normalize_path_middleware as normalize_path_middleware, -) -from .web_protocol import ( - PayloadAccessError as PayloadAccessError, - RequestHandler as RequestHandler, - RequestPayloadError as RequestPayloadError, -) -from .web_request import ( - BaseRequest as BaseRequest, - FileField as FileField, - Request as Request, -) -from .web_response import ( - ContentCoding as ContentCoding, - Response as Response, - StreamResponse as StreamResponse, - json_response as json_response, -) -from .web_routedef import ( - AbstractRouteDef as AbstractRouteDef, - RouteDef as RouteDef, - RouteTableDef as RouteTableDef, - StaticDef as StaticDef, - delete as delete, - get as get, - head as head, - options as options, - patch as patch, - post as post, - put as put, - route as route, - static as static, - view as view, -) -from .web_runner import ( - AppRunner as AppRunner, - BaseRunner as BaseRunner, - BaseSite as BaseSite, - GracefulExit as GracefulExit, - NamedPipeSite as NamedPipeSite, - ServerRunner as ServerRunner, - SockSite as SockSite, - TCPSite as TCPSite, - UnixSite as UnixSite, -) -from .web_server import Server as Server -from .web_urldispatcher import ( - AbstractResource as AbstractResource, - AbstractRoute as AbstractRoute, - DynamicResource as DynamicResource, - PlainResource as PlainResource, - PrefixedSubAppResource as PrefixedSubAppResource, - Resource as Resource, - ResourceRoute as ResourceRoute, - StaticResource as StaticResource, - UrlDispatcher as UrlDispatcher, - UrlMappingMatchInfo as UrlMappingMatchInfo, - View as View, -) -from .web_ws import ( - WebSocketReady as WebSocketReady, - WebSocketResponse as WebSocketResponse, - WSMsgType as WSMsgType, -) - -__all__ = ( - # web_app - "AppKey", - "Application", - "CleanupError", - # web_exceptions - "NotAppKeyWarning", - "HTTPAccepted", - "HTTPBadGateway", - "HTTPBadRequest", - "HTTPClientError", - "HTTPConflict", - "HTTPCreated", - "HTTPError", - "HTTPException", - "HTTPExpectationFailed", - "HTTPFailedDependency", - "HTTPForbidden", - "HTTPFound", - "HTTPGatewayTimeout", - "HTTPGone", - "HTTPInsufficientStorage", - "HTTPInternalServerError", - "HTTPLengthRequired", - "HTTPMethodNotAllowed", - "HTTPMisdirectedRequest", - "HTTPMove", - "HTTPMovedPermanently", - "HTTPMultipleChoices", - "HTTPNetworkAuthenticationRequired", - "HTTPNoContent", - "HTTPNonAuthoritativeInformation", - "HTTPNotAcceptable", - "HTTPNotExtended", - "HTTPNotFound", - "HTTPNotImplemented", - "HTTPNotModified", - "HTTPOk", - "HTTPPartialContent", - "HTTPPaymentRequired", - "HTTPPermanentRedirect", - "HTTPPreconditionFailed", - "HTTPPreconditionRequired", - "HTTPProxyAuthenticationRequired", - "HTTPRedirection", - "HTTPRequestEntityTooLarge", - "HTTPRequestHeaderFieldsTooLarge", - "HTTPRequestRangeNotSatisfiable", - "HTTPRequestTimeout", - "HTTPRequestURITooLong", - "HTTPResetContent", - "HTTPSeeOther", - "HTTPServerError", - "HTTPServiceUnavailable", - "HTTPSuccessful", - "HTTPTemporaryRedirect", - "HTTPTooManyRequests", - "HTTPUnauthorized", - "HTTPUnavailableForLegalReasons", - "HTTPUnprocessableEntity", - "HTTPUnsupportedMediaType", - "HTTPUpgradeRequired", - "HTTPUseProxy", - "HTTPVariantAlsoNegotiates", - "HTTPVersionNotSupported", - # web_fileresponse - "FileResponse", - # web_middlewares - "middleware", - "normalize_path_middleware", - # web_protocol - "PayloadAccessError", - "RequestHandler", - "RequestPayloadError", - # web_request - "BaseRequest", - "FileField", - "Request", - # web_response - "ContentCoding", - "Response", - "StreamResponse", - "json_response", - # web_routedef - "AbstractRouteDef", - "RouteDef", - "RouteTableDef", - "StaticDef", - "delete", - "get", - "head", - "options", - "patch", - "post", - "put", - "route", - "static", - "view", - # web_runner - "AppRunner", - "BaseRunner", - "BaseSite", - "GracefulExit", - "ServerRunner", - "SockSite", - "TCPSite", - "UnixSite", - "NamedPipeSite", - # web_server - "Server", - # web_urldispatcher - "AbstractResource", - "AbstractRoute", - "DynamicResource", - "PlainResource", - "PrefixedSubAppResource", - "Resource", - "ResourceRoute", - "StaticResource", - "UrlDispatcher", - "UrlMappingMatchInfo", - "View", - # web_ws - "WebSocketReady", - "WebSocketResponse", - "WSMsgType", - # web - "run_app", -) - - -if TYPE_CHECKING: - from ssl import SSLContext -else: - try: - from ssl import SSLContext - except ImportError: # pragma: no cover - SSLContext = object # type: ignore[misc,assignment] - -# Only display warning when using -Wdefault, -We, -X dev or similar. -warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True) - -HostSequence = TypingIterable[str] - - -async def _run_app( - app: Union[Application, Awaitable[Application]], - *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Union[PathLike, TypingIterable[PathLike], None] = None, - sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, - ssl_context: Optional[SSLContext] = None, - print: Optional[Callable[..., None]] = print, - backlog: int = 128, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, - **kwargs: Any, # TODO(PY311): Use Unpack -) -> None: - # An internal function to actually do all dirty job for application running - if asyncio.iscoroutine(app): - app = await app - - app = cast(Application, app) - - runner = AppRunner(app, **kwargs) - - await runner.setup() - - sites: List[BaseSite] = [] - - try: - if host is not None: - if isinstance(host, str): - sites.append( - TCPSite( - runner, - host, - port, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - else: - for h in host: - sites.append( - TCPSite( - runner, - h, - port, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - elif path is None and sock is None or port is not None: - sites.append( - TCPSite( - runner, - port=port, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - - if path is not None: - if isinstance(path, (str, os.PathLike)): - sites.append( - UnixSite( - runner, - path, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - else: - for p in path: - sites.append( - UnixSite( - runner, - p, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - - if sock is not None: - if not isinstance(sock, Iterable): - sites.append( - SockSite( - runner, - sock, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - else: - for s in sock: - sites.append( - SockSite( - runner, - s, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - for site in sites: - await site.start() - - if print: # pragma: no branch - names = sorted(str(s.name) for s in runner.sites) - print( - "======== Running on {} ========\n" - "(Press CTRL+C to quit)".format(", ".join(names)) - ) - - # sleep forever by 1 hour intervals, - while True: - await asyncio.sleep(3600) - finally: - await runner.cleanup() - - -def _cancel_tasks( - to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop -) -> None: - if not to_cancel: - return - - for task in to_cancel: - task.cancel() - - loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - loop.call_exception_handler( - { - "message": "unhandled exception during asyncio.run() shutdown", - "exception": task.exception(), - "task": task, - } - ) - - -def run_app( - app: Union[Application, Awaitable[Application]], - *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Union[PathLike, TypingIterable[PathLike], None] = None, - sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, - shutdown_timeout: float = 60.0, - keepalive_timeout: float = 75.0, - ssl_context: Optional[SSLContext] = None, - print: Optional[Callable[..., None]] = print, - backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, - handle_signals: bool = True, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, - handler_cancellation: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any, -) -> None: - """Run an app locally""" - if loop is None: - loop = asyncio.new_event_loop() - - # Configure if and only if in debugging mode and using the default logger - if loop.get_debug() and access_log and access_log.name == "aiohttp.access": - if access_log.level == logging.NOTSET: - access_log.setLevel(logging.DEBUG) - if not access_log.hasHandlers(): - access_log.addHandler(logging.StreamHandler()) - - main_task = loop.create_task( - _run_app( - app, - host=host, - port=port, - path=path, - sock=sock, - shutdown_timeout=shutdown_timeout, - keepalive_timeout=keepalive_timeout, - ssl_context=ssl_context, - print=print, - backlog=backlog, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - handle_signals=handle_signals, - reuse_address=reuse_address, - reuse_port=reuse_port, - handler_cancellation=handler_cancellation, - **kwargs, - ) - ) - - try: - asyncio.set_event_loop(loop) - loop.run_until_complete(main_task) - except (GracefulExit, KeyboardInterrupt): # pragma: no cover - pass - finally: - try: - main_task.cancel() - with suppress(asyncio.CancelledError): - loop.run_until_complete(main_task) - finally: - _cancel_tasks(asyncio.all_tasks(loop), loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() - - -def main(argv: List[str]) -> None: - arg_parser = ArgumentParser( - description="aiohttp.web Application server", prog="aiohttp.web" - ) - arg_parser.add_argument( - "entry_func", - help=( - "Callable returning the `aiohttp.web.Application` instance to " - "run. Should be specified in the 'module:function' syntax." - ), - metavar="entry-func", - ) - arg_parser.add_argument( - "-H", - "--hostname", - help="TCP/IP hostname to serve on (default: localhost)", - default=None, - ) - arg_parser.add_argument( - "-P", - "--port", - help="TCP/IP port to serve on (default: %(default)r)", - type=int, - default=8080, - ) - arg_parser.add_argument( - "-U", - "--path", - help="Unix file system path to serve on. Can be combined with hostname " - "to serve on both Unix and TCP.", - ) - args, extra_argv = arg_parser.parse_known_args(argv) - - # Import logic - mod_str, _, func_str = args.entry_func.partition(":") - if not func_str or not mod_str: - arg_parser.error("'entry-func' not in 'module:function' syntax") - if mod_str.startswith("."): - arg_parser.error("relative module names not supported") - try: - module = import_module(mod_str) - except ImportError as ex: - arg_parser.error(f"unable to import {mod_str}: {ex}") - try: - func = getattr(module, func_str) - except AttributeError: - arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") - - # Compatibility logic - if args.path is not None and not hasattr(socket, "AF_UNIX"): - arg_parser.error( - "file system paths not supported by your operating environment" - ) - - logging.basicConfig(level=logging.DEBUG) - - if args.path and args.hostname is None: - host = port = None - else: - host = args.hostname or "localhost" - port = args.port - - app = func(extra_argv) - run_app(app, host=host, port=port, path=args.path) - arg_parser.exit(message="Stopped\n") - - -if __name__ == "__main__": # pragma: no branch - main(sys.argv[1:]) # pragma: no cover diff --git a/venv/Lib/site-packages/aiohttp/web_app.py b/venv/Lib/site-packages/aiohttp/web_app.py deleted file mode 100644 index 619c008..0000000 --- a/venv/Lib/site-packages/aiohttp/web_app.py +++ /dev/null @@ -1,620 +0,0 @@ -import asyncio -import logging -import warnings -from functools import lru_cache, partial, update_wrapper -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Awaitable, - Callable, - Dict, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, - overload, -) - -from aiosignal import Signal -from frozenlist import FrozenList - -from . import hdrs -from .abc import ( - AbstractAccessLogger, - AbstractMatchInfo, - AbstractRouter, - AbstractStreamWriter, -) -from .helpers import DEBUG, AppKey -from .http_parser import RawRequestMessage -from .log import web_logger -from .streams import StreamReader -from .typedefs import Handler, Middleware -from .web_exceptions import NotAppKeyWarning -from .web_log import AccessLogger -from .web_middlewares import _fix_request_current_app -from .web_protocol import RequestHandler -from .web_request import Request -from .web_response import StreamResponse -from .web_routedef import AbstractRouteDef -from .web_server import Server -from .web_urldispatcher import ( - AbstractResource, - AbstractRoute, - Domain, - MaskDomain, - MatchedSubAppResource, - PrefixedSubAppResource, - SystemRoute, - UrlDispatcher, -) - -__all__ = ("Application", "CleanupError") - - -if TYPE_CHECKING: - _AppSignal = Signal["Application"] - _RespPrepareSignal = Signal[Request, StreamResponse] - _Middlewares = FrozenList[Middleware] - _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]] - _Subapps = List["Application"] -else: - # No type checker mode, skip types - _AppSignal = Signal - _RespPrepareSignal = Signal - _Middlewares = FrozenList - _MiddlewaresHandlers = Optional[Sequence] - _Subapps = List - -_T = TypeVar("_T") -_U = TypeVar("_U") -_Resource = TypeVar("_Resource", bound=AbstractResource) - - -def _build_middlewares( - handler: Handler, apps: Tuple["Application", ...] -) -> Callable[[Request], Awaitable[StreamResponse]]: - """Apply middlewares to handler.""" - for app in apps[::-1]: - for m, _ in app._middlewares_handlers: # type: ignore[union-attr] - handler = update_wrapper(partial(m, handler=handler), handler) - return handler - - -_cached_build_middleware = lru_cache(maxsize=1024)(_build_middlewares) - - -class Application(MutableMapping[Union[str, AppKey[Any]], Any]): - ATTRS = frozenset( - [ - "logger", - "_debug", - "_router", - "_loop", - "_handler_args", - "_middlewares", - "_middlewares_handlers", - "_has_legacy_middlewares", - "_run_middlewares", - "_state", - "_frozen", - "_pre_frozen", - "_subapps", - "_on_response_prepare", - "_on_startup", - "_on_shutdown", - "_on_cleanup", - "_client_max_size", - "_cleanup_ctx", - ] - ) - - def __init__( - self, - *, - logger: logging.Logger = web_logger, - router: Optional[UrlDispatcher] = None, - middlewares: Iterable[Middleware] = (), - handler_args: Optional[Mapping[str, Any]] = None, - client_max_size: int = 1024**2, - loop: Optional[asyncio.AbstractEventLoop] = None, - debug: Any = ..., # mypy doesn't support ellipsis - ) -> None: - if router is None: - router = UrlDispatcher() - else: - warnings.warn( - "router argument is deprecated", DeprecationWarning, stacklevel=2 - ) - assert isinstance(router, AbstractRouter), router - - if loop is not None: - warnings.warn( - "loop argument is deprecated", DeprecationWarning, stacklevel=2 - ) - - if debug is not ...: - warnings.warn( - "debug argument is deprecated", DeprecationWarning, stacklevel=2 - ) - self._debug = debug - self._router: UrlDispatcher = router - self._loop = loop - self._handler_args = handler_args - self.logger = logger - - self._middlewares: _Middlewares = FrozenList(middlewares) - - # initialized on freezing - self._middlewares_handlers: _MiddlewaresHandlers = None - # initialized on freezing - self._run_middlewares: Optional[bool] = None - self._has_legacy_middlewares: bool = True - - self._state: Dict[Union[AppKey[Any], str], object] = {} - self._frozen = False - self._pre_frozen = False - self._subapps: _Subapps = [] - - self._on_response_prepare: _RespPrepareSignal = Signal(self) - self._on_startup: _AppSignal = Signal(self) - self._on_shutdown: _AppSignal = Signal(self) - self._on_cleanup: _AppSignal = Signal(self) - self._cleanup_ctx = CleanupContext() - self._on_startup.append(self._cleanup_ctx._on_startup) - self._on_cleanup.append(self._cleanup_ctx._on_cleanup) - self._client_max_size = client_max_size - - def __init_subclass__(cls: Type["Application"]) -> None: - warnings.warn( - "Inheritance class {} from web.Application " - "is discouraged".format(cls.__name__), - DeprecationWarning, - stacklevel=3, - ) - - if DEBUG: # pragma: no cover - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom web.Application.{} attribute " - "is discouraged".format(name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - # MutableMapping API - - def __eq__(self, other: object) -> bool: - return self is other - - @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: ... - - @overload - def __getitem__(self, key: str) -> Any: ... - - def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: - return self._state[key] - - def _check_frozen(self) -> None: - if self._frozen: - warnings.warn( - "Changing state of started or joined application is deprecated", - DeprecationWarning, - stacklevel=3, - ) - - @overload # type: ignore[override] - def __setitem__(self, key: AppKey[_T], value: _T) -> None: ... - - @overload - def __setitem__(self, key: str, value: Any) -> None: ... - - def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: - self._check_frozen() - if not isinstance(key, AppKey): - warnings.warn( - "It is recommended to use web.AppKey instances for keys.\n" - + "https://docs.aiohttp.org/en/stable/web_advanced.html" - + "#application-s-config", - category=NotAppKeyWarning, - stacklevel=2, - ) - self._state[key] = value - - def __delitem__(self, key: Union[str, AppKey[_T]]) -> None: - self._check_frozen() - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: - return iter(self._state) - - def __hash__(self) -> int: - return id(self) - - @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... - - @overload - def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: ... - - @overload - def get(self, key: str, default: Any = ...) -> Any: ... - - def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: - return self._state.get(key, default) - - ######## - @property - def loop(self) -> asyncio.AbstractEventLoop: - # Technically the loop can be None - # but we mask it by explicit type cast - # to provide more convenient type annotation - warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) - return cast(asyncio.AbstractEventLoop, self._loop) - - def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: - if loop is None: - loop = asyncio.get_event_loop() - if self._loop is not None and self._loop is not loop: - raise RuntimeError( - "web.Application instance initialized with different loop" - ) - - self._loop = loop - - # set loop debug - if self._debug is ...: - self._debug = loop.get_debug() - - # set loop to sub applications - for subapp in self._subapps: - subapp._set_loop(loop) - - @property - def pre_frozen(self) -> bool: - return self._pre_frozen - - def pre_freeze(self) -> None: - if self._pre_frozen: - return - - self._pre_frozen = True - self._middlewares.freeze() - self._router.freeze() - self._on_response_prepare.freeze() - self._cleanup_ctx.freeze() - self._on_startup.freeze() - self._on_shutdown.freeze() - self._on_cleanup.freeze() - self._middlewares_handlers = tuple(self._prepare_middleware()) - self._has_legacy_middlewares = any( - not new_style for _, new_style in self._middlewares_handlers - ) - - # If current app and any subapp do not have middlewares avoid run all - # of the code footprint that it implies, which have a middleware - # hardcoded per app that sets up the current_app attribute. If no - # middlewares are configured the handler will receive the proper - # current_app without needing all of this code. - self._run_middlewares = True if self.middlewares else False - - for subapp in self._subapps: - subapp.pre_freeze() - self._run_middlewares = self._run_middlewares or subapp._run_middlewares - - @property - def frozen(self) -> bool: - return self._frozen - - def freeze(self) -> None: - if self._frozen: - return - - self.pre_freeze() - self._frozen = True - for subapp in self._subapps: - subapp.freeze() - - @property - def debug(self) -> bool: - warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2) - return self._debug # type: ignore[no-any-return] - - def _reg_subapp_signals(self, subapp: "Application") -> None: - def reg_handler(signame: str) -> None: - subsig = getattr(subapp, signame) - - async def handler(app: "Application") -> None: - await subsig.send(subapp) - - appsig = getattr(self, signame) - appsig.append(handler) - - reg_handler("on_startup") - reg_handler("on_shutdown") - reg_handler("on_cleanup") - - def add_subapp(self, prefix: str, subapp: "Application") -> PrefixedSubAppResource: - if not isinstance(prefix, str): - raise TypeError("Prefix must be str") - prefix = prefix.rstrip("/") - if not prefix: - raise ValueError("Prefix cannot be empty") - factory = partial(PrefixedSubAppResource, prefix, subapp) - return self._add_subapp(factory, subapp) - - def _add_subapp( - self, resource_factory: Callable[[], _Resource], subapp: "Application" - ) -> _Resource: - if self.frozen: - raise RuntimeError("Cannot add sub application to frozen application") - if subapp.frozen: - raise RuntimeError("Cannot add frozen application") - resource = resource_factory() - self.router.register_resource(resource) - self._reg_subapp_signals(subapp) - self._subapps.append(subapp) - subapp.pre_freeze() - if self._loop is not None: - subapp._set_loop(self._loop) - return resource - - def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResource: - if not isinstance(domain, str): - raise TypeError("Domain must be str") - elif "*" in domain: - rule: Domain = MaskDomain(domain) - else: - rule = Domain(domain) - factory = partial(MatchedSubAppResource, rule, subapp) - return self._add_subapp(factory, subapp) - - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: - return self.router.add_routes(routes) - - @property - def on_response_prepare(self) -> _RespPrepareSignal: - return self._on_response_prepare - - @property - def on_startup(self) -> _AppSignal: - return self._on_startup - - @property - def on_shutdown(self) -> _AppSignal: - return self._on_shutdown - - @property - def on_cleanup(self) -> _AppSignal: - return self._on_cleanup - - @property - def cleanup_ctx(self) -> "CleanupContext": - return self._cleanup_ctx - - @property - def router(self) -> UrlDispatcher: - return self._router - - @property - def middlewares(self) -> _Middlewares: - return self._middlewares - - def _make_handler( - self, - *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - **kwargs: Any, - ) -> Server: - - if not issubclass(access_log_class, AbstractAccessLogger): - raise TypeError( - "access_log_class must be subclass of " - "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) - ) - - self._set_loop(loop) - self.freeze() - - kwargs["debug"] = self._debug - kwargs["access_log_class"] = access_log_class - if self._handler_args: - for k, v in self._handler_args.items(): - kwargs[k] = v - - return Server( - self._handle, # type: ignore[arg-type] - request_factory=self._make_request, - loop=self._loop, - **kwargs, - ) - - def make_handler( - self, - *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - **kwargs: Any, - ) -> Server: - - warnings.warn( - "Application.make_handler(...) is deprecated, use AppRunner API instead", - DeprecationWarning, - stacklevel=2, - ) - - return self._make_handler( - loop=loop, access_log_class=access_log_class, **kwargs - ) - - async def startup(self) -> None: - """Causes on_startup signal - - Should be called in the event loop along with the request handler. - """ - await self.on_startup.send(self) - - async def shutdown(self) -> None: - """Causes on_shutdown signal - - Should be called before cleanup() - """ - await self.on_shutdown.send(self) - - async def cleanup(self) -> None: - """Causes on_cleanup signal - - Should be called after shutdown() - """ - if self.on_cleanup.frozen: - await self.on_cleanup.send(self) - else: - # If an exception occurs in startup, ensure cleanup contexts are completed. - await self._cleanup_ctx._on_cleanup(self) - - def _make_request( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: RequestHandler, - writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - _cls: Type[Request] = Request, - ) -> Request: - if TYPE_CHECKING: - assert self._loop is not None - return _cls( - message, - payload, - protocol, - writer, - task, - self._loop, - client_max_size=self._client_max_size, - ) - - def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]: - for m in reversed(self._middlewares): - if getattr(m, "__middleware_version__", None) == 1: - yield m, True - else: - warnings.warn( - f'old-style middleware "{m!r}" deprecated, see #2252', - DeprecationWarning, - stacklevel=2, - ) - yield m, False - - yield _fix_request_current_app(self), True - - async def _handle(self, request: Request) -> StreamResponse: - loop = asyncio.get_event_loop() - debug = loop.get_debug() - match_info = await self._router.resolve(request) - if debug: # pragma: no cover - if not isinstance(match_info, AbstractMatchInfo): - raise TypeError( - "match_info should be AbstractMatchInfo " - "instance, not {!r}".format(match_info) - ) - match_info.add_app(self) - - match_info.freeze() - - request._match_info = match_info - - if request.headers.get(hdrs.EXPECT): - resp = await match_info.expect_handler(request) - await request.writer.drain() - if resp is not None: - return resp - - handler = match_info.handler - - if self._run_middlewares: - # If its a SystemRoute, don't cache building the middlewares since - # they are constructed for every MatchInfoError as a new handler - # is made each time. - if not self._has_legacy_middlewares and not isinstance( - match_info.route, SystemRoute - ): - handler = _cached_build_middleware(handler, match_info.apps) - else: - for app in match_info.apps[::-1]: - for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] - if new_style: - handler = update_wrapper( - partial(m, handler=handler), handler - ) - else: - handler = await m(app, handler) # type: ignore[arg-type,assignment] - - return await handler(request) - - def __call__(self) -> "Application": - """gunicorn compatibility""" - return self - - def __repr__(self) -> str: - return f"" - - def __bool__(self) -> bool: - return True - - -class CleanupError(RuntimeError): - @property - def exceptions(self) -> List[BaseException]: - return cast(List[BaseException], self.args[1]) - - -if TYPE_CHECKING: - _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] -else: - _CleanupContextBase = FrozenList - - -class CleanupContext(_CleanupContextBase): - def __init__(self) -> None: - super().__init__() - self._exits: List[AsyncIterator[None]] = [] - - async def _on_startup(self, app: Application) -> None: - for cb in self: - it = cb(app).__aiter__() - await it.__anext__() - self._exits.append(it) - - async def _on_cleanup(self, app: Application) -> None: - errors = [] - for it in reversed(self._exits): - try: - await it.__anext__() - except StopAsyncIteration: - pass - except (Exception, asyncio.CancelledError) as exc: - errors.append(exc) - else: - errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) - if errors: - if len(errors) == 1: - raise errors[0] - else: - raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/venv/Lib/site-packages/aiohttp/web_exceptions.py b/venv/Lib/site-packages/aiohttp/web_exceptions.py deleted file mode 100644 index ee2c1e7..0000000 --- a/venv/Lib/site-packages/aiohttp/web_exceptions.py +++ /dev/null @@ -1,452 +0,0 @@ -import warnings -from typing import Any, Dict, Iterable, List, Optional, Set # noqa - -from yarl import URL - -from .typedefs import LooseHeaders, StrOrURL -from .web_response import Response - -__all__ = ( - "HTTPException", - "HTTPError", - "HTTPRedirection", - "HTTPSuccessful", - "HTTPOk", - "HTTPCreated", - "HTTPAccepted", - "HTTPNonAuthoritativeInformation", - "HTTPNoContent", - "HTTPResetContent", - "HTTPPartialContent", - "HTTPMove", - "HTTPMultipleChoices", - "HTTPMovedPermanently", - "HTTPFound", - "HTTPSeeOther", - "HTTPNotModified", - "HTTPUseProxy", - "HTTPTemporaryRedirect", - "HTTPPermanentRedirect", - "HTTPClientError", - "HTTPBadRequest", - "HTTPUnauthorized", - "HTTPPaymentRequired", - "HTTPForbidden", - "HTTPNotFound", - "HTTPMethodNotAllowed", - "HTTPNotAcceptable", - "HTTPProxyAuthenticationRequired", - "HTTPRequestTimeout", - "HTTPConflict", - "HTTPGone", - "HTTPLengthRequired", - "HTTPPreconditionFailed", - "HTTPRequestEntityTooLarge", - "HTTPRequestURITooLong", - "HTTPUnsupportedMediaType", - "HTTPRequestRangeNotSatisfiable", - "HTTPExpectationFailed", - "HTTPMisdirectedRequest", - "HTTPUnprocessableEntity", - "HTTPFailedDependency", - "HTTPUpgradeRequired", - "HTTPPreconditionRequired", - "HTTPTooManyRequests", - "HTTPRequestHeaderFieldsTooLarge", - "HTTPUnavailableForLegalReasons", - "HTTPServerError", - "HTTPInternalServerError", - "HTTPNotImplemented", - "HTTPBadGateway", - "HTTPServiceUnavailable", - "HTTPGatewayTimeout", - "HTTPVersionNotSupported", - "HTTPVariantAlsoNegotiates", - "HTTPInsufficientStorage", - "HTTPNotExtended", - "HTTPNetworkAuthenticationRequired", -) - - -class NotAppKeyWarning(UserWarning): - """Warning when not using AppKey in Application.""" - - -############################################################ -# HTTP Exceptions -############################################################ - - -class HTTPException(Response, Exception): - - # You should set in subclasses: - # status = 200 - - status_code = -1 - empty_body = False - - __http_exception__ = True - - def __init__( - self, - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - if body is not None: - warnings.warn( - "body argument is deprecated for http web exceptions", - DeprecationWarning, - ) - Response.__init__( - self, - status=self.status_code, - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - Exception.__init__(self, self.reason) - if self.body is None and not self.empty_body: - self.text = f"{self.status}: {self.reason}" - - def __bool__(self) -> bool: - return True - - -class HTTPError(HTTPException): - """Base class for exceptions with status codes in the 400s and 500s.""" - - -class HTTPRedirection(HTTPException): - """Base class for exceptions with status codes in the 300s.""" - - -class HTTPSuccessful(HTTPException): - """Base class for exceptions with status codes in the 200s.""" - - -class HTTPOk(HTTPSuccessful): - status_code = 200 - - -class HTTPCreated(HTTPSuccessful): - status_code = 201 - - -class HTTPAccepted(HTTPSuccessful): - status_code = 202 - - -class HTTPNonAuthoritativeInformation(HTTPSuccessful): - status_code = 203 - - -class HTTPNoContent(HTTPSuccessful): - status_code = 204 - empty_body = True - - -class HTTPResetContent(HTTPSuccessful): - status_code = 205 - empty_body = True - - -class HTTPPartialContent(HTTPSuccessful): - status_code = 206 - - -############################################################ -# 3xx redirection -############################################################ - - -class HTTPMove(HTTPRedirection): - def __init__( - self, - location: StrOrURL, - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - if not location: - raise ValueError("HTTP redirects need a location to redirect to.") - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self.headers["Location"] = str(URL(location)) - self.location = location - - -class HTTPMultipleChoices(HTTPMove): - status_code = 300 - - -class HTTPMovedPermanently(HTTPMove): - status_code = 301 - - -class HTTPFound(HTTPMove): - status_code = 302 - - -# This one is safe after a POST (the redirected location will be -# retrieved with GET): -class HTTPSeeOther(HTTPMove): - status_code = 303 - - -class HTTPNotModified(HTTPRedirection): - # FIXME: this should include a date or etag header - status_code = 304 - empty_body = True - - -class HTTPUseProxy(HTTPMove): - # Not a move, but looks a little like one - status_code = 305 - - -class HTTPTemporaryRedirect(HTTPMove): - status_code = 307 - - -class HTTPPermanentRedirect(HTTPMove): - status_code = 308 - - -############################################################ -# 4xx client error -############################################################ - - -class HTTPClientError(HTTPError): - pass - - -class HTTPBadRequest(HTTPClientError): - status_code = 400 - - -class HTTPUnauthorized(HTTPClientError): - status_code = 401 - - -class HTTPPaymentRequired(HTTPClientError): - status_code = 402 - - -class HTTPForbidden(HTTPClientError): - status_code = 403 - - -class HTTPNotFound(HTTPClientError): - status_code = 404 - - -class HTTPMethodNotAllowed(HTTPClientError): - status_code = 405 - - def __init__( - self, - method: str, - allowed_methods: Iterable[str], - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - allow = ",".join(sorted(allowed_methods)) - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self.headers["Allow"] = allow - self.allowed_methods: Set[str] = set(allowed_methods) - self.method = method.upper() - - -class HTTPNotAcceptable(HTTPClientError): - status_code = 406 - - -class HTTPProxyAuthenticationRequired(HTTPClientError): - status_code = 407 - - -class HTTPRequestTimeout(HTTPClientError): - status_code = 408 - - -class HTTPConflict(HTTPClientError): - status_code = 409 - - -class HTTPGone(HTTPClientError): - status_code = 410 - - -class HTTPLengthRequired(HTTPClientError): - status_code = 411 - - -class HTTPPreconditionFailed(HTTPClientError): - status_code = 412 - - -class HTTPRequestEntityTooLarge(HTTPClientError): - status_code = 413 - - def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: - kwargs.setdefault( - "text", - "Maximum request body size {} exceeded, " - "actual body size {}".format(max_size, actual_size), - ) - super().__init__(**kwargs) - - -class HTTPRequestURITooLong(HTTPClientError): - status_code = 414 - - -class HTTPUnsupportedMediaType(HTTPClientError): - status_code = 415 - - -class HTTPRequestRangeNotSatisfiable(HTTPClientError): - status_code = 416 - - -class HTTPExpectationFailed(HTTPClientError): - status_code = 417 - - -class HTTPMisdirectedRequest(HTTPClientError): - status_code = 421 - - -class HTTPUnprocessableEntity(HTTPClientError): - status_code = 422 - - -class HTTPFailedDependency(HTTPClientError): - status_code = 424 - - -class HTTPUpgradeRequired(HTTPClientError): - status_code = 426 - - -class HTTPPreconditionRequired(HTTPClientError): - status_code = 428 - - -class HTTPTooManyRequests(HTTPClientError): - status_code = 429 - - -class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): - status_code = 431 - - -class HTTPUnavailableForLegalReasons(HTTPClientError): - status_code = 451 - - def __init__( - self, - link: Optional[StrOrURL], - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self._link = None - if link: - self._link = URL(link) - self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"' - - @property - def link(self) -> Optional[URL]: - return self._link - - -############################################################ -# 5xx Server Error -############################################################ -# Response status codes beginning with the digit "5" indicate cases in -# which the server is aware that it has erred or is incapable of -# performing the request. Except when responding to a HEAD request, the -# server SHOULD include an entity containing an explanation of the error -# situation, and whether it is a temporary or permanent condition. User -# agents SHOULD display any included entity to the user. These response -# codes are applicable to any request method. - - -class HTTPServerError(HTTPError): - pass - - -class HTTPInternalServerError(HTTPServerError): - status_code = 500 - - -class HTTPNotImplemented(HTTPServerError): - status_code = 501 - - -class HTTPBadGateway(HTTPServerError): - status_code = 502 - - -class HTTPServiceUnavailable(HTTPServerError): - status_code = 503 - - -class HTTPGatewayTimeout(HTTPServerError): - status_code = 504 - - -class HTTPVersionNotSupported(HTTPServerError): - status_code = 505 - - -class HTTPVariantAlsoNegotiates(HTTPServerError): - status_code = 506 - - -class HTTPInsufficientStorage(HTTPServerError): - status_code = 507 - - -class HTTPNotExtended(HTTPServerError): - status_code = 510 - - -class HTTPNetworkAuthenticationRequired(HTTPServerError): - status_code = 511 diff --git a/venv/Lib/site-packages/aiohttp/web_fileresponse.py b/venv/Lib/site-packages/aiohttp/web_fileresponse.py deleted file mode 100644 index 26484b9..0000000 --- a/venv/Lib/site-packages/aiohttp/web_fileresponse.py +++ /dev/null @@ -1,418 +0,0 @@ -import asyncio -import io -import os -import pathlib -import sys -from contextlib import suppress -from enum import Enum, auto -from mimetypes import MimeTypes -from stat import S_ISREG -from types import MappingProxyType -from typing import ( # noqa - IO, - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Final, - Iterator, - List, - Optional, - Set, - Tuple, - Union, - cast, -) - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import ETAG_ANY, ETag, must_be_empty_body -from .typedefs import LooseHeaders, PathLike -from .web_exceptions import ( - HTTPForbidden, - HTTPNotFound, - HTTPNotModified, - HTTPPartialContent, - HTTPPreconditionFailed, - HTTPRequestRangeNotSatisfiable, -) -from .web_response import StreamResponse - -__all__ = ("FileResponse",) - -if TYPE_CHECKING: - from .web_request import BaseRequest - - -_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] - - -NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) - -CONTENT_TYPES: Final[MimeTypes] = MimeTypes() - -# File extension to IANA encodings map that will be checked in the order defined. -ENCODING_EXTENSIONS = MappingProxyType( - {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")} -) - -FALLBACK_CONTENT_TYPE = "application/octet-stream" - -# Provide additional MIME type/extension pairs to be recognized. -# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only -ADDITIONAL_CONTENT_TYPES = MappingProxyType( - { - "application/gzip": ".gz", - "application/x-brotli": ".br", - "application/x-bzip2": ".bz2", - "application/x-compress": ".Z", - "application/x-xz": ".xz", - } -) - - -class _FileResponseResult(Enum): - """The result of the file response.""" - - SEND_FILE = auto() # Ie a regular file to send - NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file - PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed - NOT_MODIFIED = auto() # 304 Not Modified - - -# Add custom pairs and clear the encodings map so guess_type ignores them. -CONTENT_TYPES.encodings_map.clear() -for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): - CONTENT_TYPES.add_type(content_type, extension) - - -_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() - - -class FileResponse(StreamResponse): - """A response object can be used to send files.""" - - def __init__( - self, - path: PathLike, - chunk_size: int = 256 * 1024, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - ) -> None: - super().__init__(status=status, reason=reason, headers=headers) - - self._path = pathlib.Path(path) - self._chunk_size = chunk_size - - def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes: - fobj.seek(offset) - return fobj.read(chunk_size) # type: ignore[no-any-return] - - async def _sendfile_fallback( - self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int - ) -> AbstractStreamWriter: - # To keep memory usage low,fobj is transferred in chunks - # controlled by the constructor's chunk_size argument. - - chunk_size = self._chunk_size - loop = asyncio.get_event_loop() - chunk = await loop.run_in_executor( - None, self._seek_and_read, fobj, offset, chunk_size - ) - while chunk: - await writer.write(chunk) - count = count - chunk_size - if count <= 0: - break - chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count)) - - await writer.drain() - return writer - - async def _sendfile( - self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int - ) -> AbstractStreamWriter: - writer = await super().prepare(request) - assert writer is not None - - if NOSENDFILE or self.compression: - return await self._sendfile_fallback(writer, fobj, offset, count) - - loop = request._loop - transport = request.transport - assert transport is not None - - try: - await loop.sendfile(transport, fobj, offset, count) - except NotImplementedError: - return await self._sendfile_fallback(writer, fobj, offset, count) - - await super().write_eof() - return writer - - @staticmethod - def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool: - if len(etags) == 1 and etags[0].value == ETAG_ANY: - return True - return any( - etag.value == etag_value for etag in etags if weak or not etag.is_weak - ) - - async def _not_modified( - self, request: "BaseRequest", etag_value: str, last_modified: float - ) -> Optional[AbstractStreamWriter]: - self.set_status(HTTPNotModified.status_code) - self._length_check = False - self.etag = etag_value - self.last_modified = last_modified - # Delete any Content-Length headers provided by user. HTTP 304 - # should always have empty response body - return await super().prepare(request) - - async def _precondition_failed( - self, request: "BaseRequest" - ) -> Optional[AbstractStreamWriter]: - self.set_status(HTTPPreconditionFailed.status_code) - self.content_length = 0 - return await super().prepare(request) - - def _make_response( - self, request: "BaseRequest", accept_encoding: str - ) -> Tuple[ - _FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str] - ]: - """Return the response result, io object, stat result, and encoding. - - If an uncompressed file is returned, the encoding is set to - :py:data:`None`. - - This method should be called from a thread executor - since it calls os.stat which may block. - """ - file_path, st, file_encoding = self._get_file_path_stat_encoding( - accept_encoding - ) - if not file_path: - return _FileResponseResult.NOT_ACCEPTABLE, None, st, None - - etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" - - # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 - if (ifmatch := request.if_match) is not None and not self._etag_match( - etag_value, ifmatch, weak=False - ): - return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding - - if ( - (unmodsince := request.if_unmodified_since) is not None - and ifmatch is None - and st.st_mtime > unmodsince.timestamp() - ): - return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding - - # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 - if (ifnonematch := request.if_none_match) is not None and self._etag_match( - etag_value, ifnonematch, weak=True - ): - return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding - - if ( - (modsince := request.if_modified_since) is not None - and ifnonematch is None - and st.st_mtime <= modsince.timestamp() - ): - return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding - - fobj = file_path.open("rb") - with suppress(OSError): - # fstat() may not be available on all platforms - # Once we open the file, we want the fstat() to ensure - # the file has not changed between the first stat() - # and the open(). - st = os.stat(fobj.fileno()) - return _FileResponseResult.SEND_FILE, fobj, st, file_encoding - - def _get_file_path_stat_encoding( - self, accept_encoding: str - ) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]: - file_path = self._path - for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): - if file_encoding not in accept_encoding: - continue - - compressed_path = file_path.with_suffix(file_path.suffix + file_extension) - with suppress(OSError): - # Do not follow symlinks and ignore any non-regular files. - st = compressed_path.lstat() - if S_ISREG(st.st_mode): - return compressed_path, st, file_encoding - - # Fallback to the uncompressed file - st = file_path.stat() - return file_path if S_ISREG(st.st_mode) else None, st, None - - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - loop = asyncio.get_running_loop() - # Encoding comparisons should be case-insensitive - # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 - accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - try: - response_result, fobj, st, file_encoding = await loop.run_in_executor( - None, self._make_response, request, accept_encoding - ) - except PermissionError: - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) - except OSError: - # Most likely to be FileNotFoundError or OSError for circular - # symlinks in python >= 3.13, so respond with 404. - self.set_status(HTTPNotFound.status_code) - return await super().prepare(request) - - # Forbid special files like sockets, pipes, devices, etc. - if response_result is _FileResponseResult.NOT_ACCEPTABLE: - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) - - if response_result is _FileResponseResult.PRE_CONDITION_FAILED: - return await self._precondition_failed(request) - - if response_result is _FileResponseResult.NOT_MODIFIED: - etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" - last_modified = st.st_mtime - return await self._not_modified(request, etag_value, last_modified) - - assert fobj is not None - try: - return await self._prepare_open_file(request, fobj, st, file_encoding) - finally: - # We do not await here because we do not want to wait - # for the executor to finish before returning the response - # so the connection can begin servicing another request - # as soon as possible. - close_future = loop.run_in_executor(None, fobj.close) - # Hold a strong reference to the future to prevent it from being - # garbage collected before it completes. - _CLOSE_FUTURES.add(close_future) - close_future.add_done_callback(_CLOSE_FUTURES.remove) - - async def _prepare_open_file( - self, - request: "BaseRequest", - fobj: io.BufferedReader, - st: os.stat_result, - file_encoding: Optional[str], - ) -> Optional[AbstractStreamWriter]: - status = self._status - file_size: int = st.st_size - file_mtime: float = st.st_mtime - count: int = file_size - start: Optional[int] = None - - if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp(): - # If-Range header check: - # condition = cached date >= last modification date - # return 206 if True else 200. - # if False: - # Range header would not be processed, return 200 - # if True but Range header missing - # return 200 - try: - rng = request.http_range - start = rng.start - end: Optional[int] = rng.stop - except ValueError: - # https://tools.ietf.org/html/rfc7233: - # A server generating a 416 (Range Not Satisfiable) response to - # a byte-range request SHOULD send a Content-Range header field - # with an unsatisfied-range value. - # The complete-length in a 416 response indicates the current - # length of the selected representation. - # - # Will do the same below. Many servers ignore this and do not - # send a Content-Range header with HTTP 416 - self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" - self.set_status(HTTPRequestRangeNotSatisfiable.status_code) - return await super().prepare(request) - - # If a range request has been made, convert start, end slice - # notation into file pointer offset and count - if start is not None: - if start < 0 and end is None: # return tail of file - start += file_size - if start < 0: - # if Range:bytes=-1000 in request header but file size - # is only 200, there would be trouble without this - start = 0 - count = file_size - start - else: - # rfc7233:If the last-byte-pos value is - # absent, or if the value is greater than or equal to - # the current length of the representation data, - # the byte range is interpreted as the remainder - # of the representation (i.e., the server replaces the - # value of last-byte-pos with a value that is one less than - # the current length of the selected representation). - count = ( - min(end if end is not None else file_size, file_size) - start - ) - - if start >= file_size: - # HTTP 416 should be returned in this case. - # - # According to https://tools.ietf.org/html/rfc7233: - # If a valid byte-range-set includes at least one - # byte-range-spec with a first-byte-pos that is less than - # the current length of the representation, or at least one - # suffix-byte-range-spec with a non-zero suffix-length, - # then the byte-range-set is satisfiable. Otherwise, the - # byte-range-set is unsatisfiable. - self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" - self.set_status(HTTPRequestRangeNotSatisfiable.status_code) - return await super().prepare(request) - - status = HTTPPartialContent.status_code - # Even though you are sending the whole file, you should still - # return a HTTP 206 for a Range request. - self.set_status(status) - - # If the Content-Type header is not already set, guess it based on the - # extension of the request path. The encoding returned by guess_type - # can be ignored since the map was cleared above. - if hdrs.CONTENT_TYPE not in self._headers: - if sys.version_info >= (3, 13): - guesser = CONTENT_TYPES.guess_file_type - else: - guesser = CONTENT_TYPES.guess_type - self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE - - if file_encoding: - self._headers[hdrs.CONTENT_ENCODING] = file_encoding - self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING - # Disable compression if we are already sending - # a compressed file since we don't want to double - # compress. - self._compression = False - - self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" - self.last_modified = file_mtime - self.content_length = count - - self._headers[hdrs.ACCEPT_RANGES] = "bytes" - - if status == HTTPPartialContent.status_code: - real_start = start - assert real_start is not None - self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( - real_start, real_start + count - 1, file_size - ) - - # If we are sending 0 bytes calling sendfile() will throw a ValueError - if count == 0 or must_be_empty_body(request.method, status): - return await super().prepare(request) - - # be aware that start could be None or int=0 here. - offset = start or 0 - - return await self._sendfile(request, fobj, offset, count) diff --git a/venv/Lib/site-packages/aiohttp/web_log.py b/venv/Lib/site-packages/aiohttp/web_log.py deleted file mode 100644 index d5ea2be..0000000 --- a/venv/Lib/site-packages/aiohttp/web_log.py +++ /dev/null @@ -1,216 +0,0 @@ -import datetime -import functools -import logging -import os -import re -import time as time_mod -from collections import namedtuple -from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa - -from .abc import AbstractAccessLogger -from .web_request import BaseRequest -from .web_response import StreamResponse - -KeyMethod = namedtuple("KeyMethod", "key method") - - -class AccessLogger(AbstractAccessLogger): - """Helper object to log access. - - Usage: - log = logging.getLogger("spam") - log_format = "%a %{User-Agent}i" - access_logger = AccessLogger(log, log_format) - access_logger.log(request, response, time) - - Format: - %% The percent sign - %a Remote IP-address (IP-address of proxy if using reverse proxy) - %t Time when the request was started to process - %P The process ID of the child that serviced the request - %r First line of request - %s Response status code - %b Size of response in bytes, including HTTP headers - %T Time taken to serve the request, in seconds - %Tf Time taken to serve the request, in seconds with floating fraction - in .06f format - %D Time taken to serve the request, in microseconds - %{FOO}i request.headers['FOO'] - %{FOO}o response.headers['FOO'] - %{FOO}e os.environ['FOO'] - - """ - - LOG_FORMAT_MAP = { - "a": "remote_address", - "t": "request_start_time", - "P": "process_id", - "r": "first_request_line", - "s": "response_status", - "b": "response_size", - "T": "request_time", - "Tf": "request_time_frac", - "D": "request_time_micro", - "i": "request_header", - "o": "response_header", - } - - LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' - FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") - CLEANUP_RE = re.compile(r"(%[^s])") - _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {} - - def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: - """Initialise the logger. - - logger is a logger object to be used for logging. - log_format is a string with apache compatible log format description. - - """ - super().__init__(logger, log_format=log_format) - - _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) - if not _compiled_format: - _compiled_format = self.compile_format(log_format) - AccessLogger._FORMAT_CACHE[log_format] = _compiled_format - - self._log_format, self._methods = _compiled_format - - def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: - """Translate log_format into form usable by modulo formatting - - All known atoms will be replaced with %s - Also methods for formatting of those atoms will be added to - _methods in appropriate order - - For example we have log_format = "%a %t" - This format will be translated to "%s %s" - Also contents of _methods will be - [self._format_a, self._format_t] - These method will be called and results will be passed - to translated string format. - - Each _format_* method receive 'args' which is list of arguments - given to self.log - - Exceptions are _format_e, _format_i and _format_o methods which - also receive key name (by functools.partial) - - """ - # list of (key, method) tuples, we don't use an OrderedDict as users - # can repeat the same key more than once - methods = list() - - for atom in self.FORMAT_RE.findall(log_format): - if atom[1] == "": - format_key1 = self.LOG_FORMAT_MAP[atom[0]] - m = getattr(AccessLogger, "_format_%s" % atom[0]) - key_method = KeyMethod(format_key1, m) - else: - format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) - m = getattr(AccessLogger, "_format_%s" % atom[2]) - key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) - - methods.append(key_method) - - log_format = self.FORMAT_RE.sub(r"%s", log_format) - log_format = self.CLEANUP_RE.sub(r"%\1", log_format) - return log_format, methods - - @staticmethod - def _format_i( - key: str, request: BaseRequest, response: StreamResponse, time: float - ) -> str: - if request is None: - return "(no headers)" - - # suboptimal, make istr(key) once - return request.headers.get(key, "-") - - @staticmethod - def _format_o( - key: str, request: BaseRequest, response: StreamResponse, time: float - ) -> str: - # suboptimal, make istr(key) once - return response.headers.get(key, "-") - - @staticmethod - def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str: - if request is None: - return "-" - ip = request.remote - return ip if ip is not None else "-" - - @staticmethod - def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str: - tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone)) - now = datetime.datetime.now(tz) - start_time = now - datetime.timedelta(seconds=time) - return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]") - - @staticmethod - def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str: - return "<%s>" % os.getpid() - - @staticmethod - def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: - if request is None: - return "-" - return "{} {} HTTP/{}.{}".format( - request.method, - request.path_qs, - request.version.major, - request.version.minor, - ) - - @staticmethod - def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: - return response.status - - @staticmethod - def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int: - return response.body_length - - @staticmethod - def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str: - return str(round(time)) - - @staticmethod - def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str: - return "%06f" % time - - @staticmethod - def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str: - return str(round(time * 1000000)) - - def _format_line( - self, request: BaseRequest, response: StreamResponse, time: float - ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: - return [(key, method(request, response, time)) for key, method in self._methods] - - @property - def enabled(self) -> bool: - """Check if logger is enabled.""" - # Avoid formatting the log line if it will not be emitted. - return self.logger.isEnabledFor(logging.INFO) - - def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: - try: - fmt_info = self._format_line(request, response, time) - - values = list() - extra = dict() - for key, value in fmt_info: - values.append(value) - - if key.__class__ is str: - extra[key] = value - else: - k1, k2 = key # type: ignore[misc] - dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type] - dct[k2] = value # type: ignore[index,has-type] - extra[k1] = dct # type: ignore[has-type,assignment] - - self.logger.info(self._log_format % tuple(values), extra=extra) - except Exception: - self.logger.exception("Error in logging") diff --git a/venv/Lib/site-packages/aiohttp/web_middlewares.py b/venv/Lib/site-packages/aiohttp/web_middlewares.py deleted file mode 100644 index 2f1f5f5..0000000 --- a/venv/Lib/site-packages/aiohttp/web_middlewares.py +++ /dev/null @@ -1,121 +0,0 @@ -import re -from typing import TYPE_CHECKING, Tuple, Type, TypeVar - -from .typedefs import Handler, Middleware -from .web_exceptions import HTTPMove, HTTPPermanentRedirect -from .web_request import Request -from .web_response import StreamResponse -from .web_urldispatcher import SystemRoute - -__all__ = ( - "middleware", - "normalize_path_middleware", -) - -if TYPE_CHECKING: - from .web_app import Application - -_Func = TypeVar("_Func") - - -async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: - alt_request = request.clone(rel_url=path) - - match_info = await request.app.router.resolve(alt_request) - alt_request._match_info = match_info - - if match_info.http_exception is None: - return True, alt_request - - return False, request - - -def middleware(f: _Func) -> _Func: - f.__middleware_version__ = 1 # type: ignore[attr-defined] - return f - - -def normalize_path_middleware( - *, - append_slash: bool = True, - remove_slash: bool = False, - merge_slashes: bool = True, - redirect_class: Type[HTTPMove] = HTTPPermanentRedirect, -) -> Middleware: - """Factory for producing a middleware that normalizes the path of a request. - - Normalizing means: - - Add or remove a trailing slash to the path. - - Double slashes are replaced by one. - - The middleware returns as soon as it finds a path that resolves - correctly. The order if both merge and append/remove are enabled is - 1) merge slashes - 2) append/remove slash - 3) both merge slashes and append/remove slash. - If the path resolves with at least one of those conditions, it will - redirect to the new path. - - Only one of `append_slash` and `remove_slash` can be enabled. If both - are `True` the factory will raise an assertion error - - If `append_slash` is `True` the middleware will append a slash when - needed. If a resource is defined with trailing slash and the request - comes without it, it will append it automatically. - - If `remove_slash` is `True`, `append_slash` must be `False`. When enabled - the middleware will remove trailing slashes and redirect if the resource - is defined - - If merge_slashes is True, merge multiple consecutive slashes in the - path into one. - """ - correct_configuration = not (append_slash and remove_slash) - assert correct_configuration, "Cannot both remove and append slash" - - @middleware - async def impl(request: Request, handler: Handler) -> StreamResponse: - if isinstance(request.match_info.route, SystemRoute): - paths_to_check = [] - if "?" in request.raw_path: - path, query = request.raw_path.split("?", 1) - query = "?" + query - else: - query = "" - path = request.raw_path - - if merge_slashes: - paths_to_check.append(re.sub("//+", "/", path)) - if append_slash and not request.path.endswith("/"): - paths_to_check.append(path + "/") - if remove_slash and request.path.endswith("/"): - paths_to_check.append(path[:-1]) - if merge_slashes and append_slash: - paths_to_check.append(re.sub("//+", "/", path + "/")) - if merge_slashes and remove_slash: - merged_slashes = re.sub("//+", "/", path) - paths_to_check.append(merged_slashes[:-1]) - - for path in paths_to_check: - path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg - resolves, request = await _check_request_resolves(request, path) - if resolves: - raise redirect_class(request.raw_path + query) - - return await handler(request) - - return impl - - -def _fix_request_current_app(app: "Application") -> Middleware: - @middleware - async def impl(request: Request, handler: Handler) -> StreamResponse: - match_info = request.match_info - prev = match_info.current_app - match_info.current_app = app - try: - return await handler(request) - finally: - match_info.current_app = prev - - return impl diff --git a/venv/Lib/site-packages/aiohttp/web_protocol.py b/venv/Lib/site-packages/aiohttp/web_protocol.py deleted file mode 100644 index 1bd344a..0000000 --- a/venv/Lib/site-packages/aiohttp/web_protocol.py +++ /dev/null @@ -1,792 +0,0 @@ -import asyncio -import asyncio.streams -import sys -import traceback -import warnings -from collections import deque -from contextlib import suppress -from html import escape as html_escape -from http import HTTPStatus -from logging import Logger -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Deque, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -import attr -import yarl -from propcache import under_cached_property - -from .abc import AbstractAccessLogger, AbstractStreamWriter -from .base_protocol import BaseProtocol -from .helpers import ceil_timeout -from .http import ( - HttpProcessingError, - HttpRequestParser, - HttpVersion10, - RawRequestMessage, - StreamWriter, -) -from .http_exceptions import BadHttpMethod -from .log import access_logger, server_logger -from .streams import EMPTY_PAYLOAD, StreamReader -from .tcp_helpers import tcp_keepalive -from .web_exceptions import HTTPException, HTTPInternalServerError -from .web_log import AccessLogger -from .web_request import BaseRequest -from .web_response import Response, StreamResponse - -__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") - -if TYPE_CHECKING: - import ssl - - from .web_server import Server - - -_RequestFactory = Callable[ - [ - RawRequestMessage, - StreamReader, - "RequestHandler", - AbstractStreamWriter, - "asyncio.Task[None]", - ], - BaseRequest, -] - -_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] - -ERROR = RawRequestMessage( - "UNKNOWN", - "/", - HttpVersion10, - {}, # type: ignore[arg-type] - {}, # type: ignore[arg-type] - True, - None, - False, - False, - yarl.URL("/"), -) - - -class RequestPayloadError(Exception): - """Payload parsing error.""" - - -class PayloadAccessError(Exception): - """Payload was accessed after response was sent.""" - - -_PAYLOAD_ACCESS_ERROR = PayloadAccessError() - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class _ErrInfo: - status: int - exc: BaseException - message: str - - -_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] - - -class RequestHandler(BaseProtocol): - """HTTP protocol implementation. - - RequestHandler handles incoming HTTP request. It reads request line, - request headers and request payload and calls handle_request() method. - By default it always returns with 404 response. - - RequestHandler handles errors in incoming request, like bad - status line, bad headers or incomplete payload. If any error occurs, - connection gets closed. - - keepalive_timeout -- number of seconds before closing - keep-alive connection - - tcp_keepalive -- TCP keep-alive is on, default is on - - debug -- enable debug mode - - logger -- custom logger object - - access_log_class -- custom class for access_logger - - access_log -- custom logging object - - access_log_format -- access log format string - - loop -- Optional event loop - - max_line_size -- Optional maximum header line size - - max_field_size -- Optional maximum header field size - - max_headers -- Optional maximum header size - - timeout_ceil_threshold -- Optional value to specify - threshold to ceil() timeout - values - - """ - - __slots__ = ( - "_request_count", - "_keepalive", - "_manager", - "_request_handler", - "_request_factory", - "_tcp_keepalive", - "_next_keepalive_close_time", - "_keepalive_handle", - "_keepalive_timeout", - "_lingering_time", - "_messages", - "_message_tail", - "_handler_waiter", - "_waiter", - "_task_handler", - "_upgrade", - "_payload_parser", - "_request_parser", - "_reading_paused", - "logger", - "debug", - "access_log", - "access_logger", - "_close", - "_force_close", - "_current_request", - "_timeout_ceil_threshold", - "_request_in_progress", - "_logging_enabled", - "_cache", - ) - - def __init__( - self, - manager: "Server", - *, - loop: asyncio.AbstractEventLoop, - # Default should be high enough that it's likely longer than a reverse proxy. - keepalive_timeout: float = 3630, - tcp_keepalive: bool = True, - logger: Logger = server_logger, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log: Logger = access_logger, - access_log_format: str = AccessLogger.LOG_FORMAT, - debug: bool = False, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - lingering_time: float = 10.0, - read_bufsize: int = 2**16, - auto_decompress: bool = True, - timeout_ceil_threshold: float = 5, - ): - super().__init__(loop) - - # _request_count is the number of requests processed with the same connection. - self._request_count = 0 - self._keepalive = False - self._current_request: Optional[BaseRequest] = None - self._manager: Optional[Server] = manager - self._request_handler: Optional[_RequestHandler] = manager.request_handler - self._request_factory: Optional[_RequestFactory] = manager.request_factory - - self._tcp_keepalive = tcp_keepalive - # placeholder to be replaced on keepalive timeout setup - self._next_keepalive_close_time = 0.0 - self._keepalive_handle: Optional[asyncio.Handle] = None - self._keepalive_timeout = keepalive_timeout - self._lingering_time = float(lingering_time) - - self._messages: Deque[_MsgType] = deque() - self._message_tail = b"" - - self._waiter: Optional[asyncio.Future[None]] = None - self._handler_waiter: Optional[asyncio.Future[None]] = None - self._task_handler: Optional[asyncio.Task[None]] = None - - self._upgrade = False - self._payload_parser: Any = None - self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( - self, - loop, - read_bufsize, - max_line_size=max_line_size, - max_field_size=max_field_size, - max_headers=max_headers, - payload_exception=RequestPayloadError, - auto_decompress=auto_decompress, - ) - - self._timeout_ceil_threshold: float = 5 - try: - self._timeout_ceil_threshold = float(timeout_ceil_threshold) - except (TypeError, ValueError): - pass - - self.logger = logger - self.debug = debug - self.access_log = access_log - if access_log: - self.access_logger: Optional[AbstractAccessLogger] = access_log_class( - access_log, access_log_format - ) - self._logging_enabled = self.access_logger.enabled - else: - self.access_logger = None - self._logging_enabled = False - - self._close = False - self._force_close = False - self._request_in_progress = False - self._cache: dict[str, Any] = {} - - def __repr__(self) -> str: - return "<{} {}>".format( - self.__class__.__name__, - "connected" if self.transport is not None else "disconnected", - ) - - @under_cached_property - def ssl_context(self) -> Optional["ssl.SSLContext"]: - """Return SSLContext if available.""" - return ( - None - if self.transport is None - else self.transport.get_extra_info("sslcontext") - ) - - @under_cached_property - def peername( - self, - ) -> Optional[Union[str, Tuple[str, int, int, int], Tuple[str, int]]]: - """Return peername if available.""" - return ( - None - if self.transport is None - else self.transport.get_extra_info("peername") - ) - - @property - def keepalive_timeout(self) -> float: - return self._keepalive_timeout - - async def shutdown(self, timeout: Optional[float] = 15.0) -> None: - """Do worker process exit preparations. - - We need to clean up everything and stop accepting requests. - It is especially important for keep-alive connections. - """ - self._force_close = True - - if self._keepalive_handle is not None: - self._keepalive_handle.cancel() - - # Wait for graceful handler completion - if self._request_in_progress: - # The future is only created when we are shutting - # down while the handler is still processing a request - # to avoid creating a future for every request. - self._handler_waiter = self._loop.create_future() - try: - async with ceil_timeout(timeout): - await self._handler_waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._handler_waiter = None - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise - # Then cancel handler and wait - try: - async with ceil_timeout(timeout): - if self._current_request is not None: - self._current_request._cancel(asyncio.CancelledError()) - - if self._task_handler is not None and not self._task_handler.done(): - await asyncio.shield(self._task_handler) - except (asyncio.CancelledError, asyncio.TimeoutError): - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise - - # force-close non-idle handler - if self._task_handler is not None: - self._task_handler.cancel() - - self.force_close() - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - super().connection_made(transport) - - real_transport = cast(asyncio.Transport, transport) - if self._tcp_keepalive: - tcp_keepalive(real_transport) - - assert self._manager is not None - self._manager.connection_made(self, real_transport) - - loop = self._loop - if sys.version_info >= (3, 12): - task = asyncio.Task(self.start(), loop=loop, eager_start=True) - else: - task = loop.create_task(self.start()) - self._task_handler = task - - def connection_lost(self, exc: Optional[BaseException]) -> None: - if self._manager is None: - return - self._manager.connection_lost(self, exc) - - # Grab value before setting _manager to None. - handler_cancellation = self._manager.handler_cancellation - - self.force_close() - super().connection_lost(exc) - self._manager = None - self._request_factory = None - self._request_handler = None - self._request_parser = None - - if self._keepalive_handle is not None: - self._keepalive_handle.cancel() - - if self._current_request is not None: - if exc is None: - exc = ConnectionResetError("Connection lost") - self._current_request._cancel(exc) - - if handler_cancellation and self._task_handler is not None: - self._task_handler.cancel() - - self._task_handler = None - - if self._payload_parser is not None: - self._payload_parser.feed_eof() - self._payload_parser = None - - def set_parser(self, parser: Any) -> None: - # Actual type is WebReader - assert self._payload_parser is None - - self._payload_parser = parser - - if self._message_tail: - self._payload_parser.feed_data(self._message_tail) - self._message_tail = b"" - - def eof_received(self) -> None: - pass - - def data_received(self, data: bytes) -> None: - if self._force_close or self._close: - return - # parse http messages - messages: Sequence[_MsgType] - if self._payload_parser is None and not self._upgrade: - assert self._request_parser is not None - try: - messages, upgraded, tail = self._request_parser.feed_data(data) - except HttpProcessingError as exc: - messages = [ - (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD) - ] - upgraded = False - tail = b"" - - for msg, payload in messages or (): - self._request_count += 1 - self._messages.append((msg, payload)) - - waiter = self._waiter - if messages and waiter is not None and not waiter.done(): - # don't set result twice - waiter.set_result(None) - - self._upgrade = upgraded - if upgraded and tail: - self._message_tail = tail - - # no parser, just store - elif self._payload_parser is None and self._upgrade and data: - self._message_tail += data - - # feed payload - elif data: - eof, tail = self._payload_parser.feed_data(data) - if eof: - self.close() - - def keep_alive(self, val: bool) -> None: - """Set keep-alive connection mode. - - :param bool val: new state. - """ - self._keepalive = val - if self._keepalive_handle: - self._keepalive_handle.cancel() - self._keepalive_handle = None - - def close(self) -> None: - """Close connection. - - Stop accepting new pipelining messages and close - connection when handlers done processing messages. - """ - self._close = True - if self._waiter: - self._waiter.cancel() - - def force_close(self) -> None: - """Forcefully close connection.""" - self._force_close = True - if self._waiter: - self._waiter.cancel() - if self.transport is not None: - self.transport.close() - self.transport = None - - def log_access( - self, request: BaseRequest, response: StreamResponse, time: Optional[float] - ) -> None: - if self._logging_enabled and self.access_logger is not None: - if TYPE_CHECKING: - assert time is not None - self.access_logger.log(request, response, self._loop.time() - time) - - def log_debug(self, *args: Any, **kw: Any) -> None: - if self.debug: - self.logger.debug(*args, **kw) - - def log_exception(self, *args: Any, **kw: Any) -> None: - self.logger.exception(*args, **kw) - - def _process_keepalive(self) -> None: - self._keepalive_handle = None - if self._force_close or not self._keepalive: - return - - loop = self._loop - now = loop.time() - close_time = self._next_keepalive_close_time - if now < close_time: - # Keep alive close check fired too early, reschedule - self._keepalive_handle = loop.call_at(close_time, self._process_keepalive) - return - - # handler in idle state - if self._waiter and not self._waiter.done(): - self.force_close() - - async def _handle_request( - self, - request: BaseRequest, - start_time: Optional[float], - request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], - ) -> Tuple[StreamResponse, bool]: - self._request_in_progress = True - try: - try: - self._current_request = request - resp = await request_handler(request) - finally: - self._current_request = None - except HTTPException as exc: - resp = exc - resp, reset = await self.finish_response(request, resp, start_time) - except asyncio.CancelledError: - raise - except asyncio.TimeoutError as exc: - self.log_debug("Request handler timed out.", exc_info=exc) - resp = self.handle_error(request, 504) - resp, reset = await self.finish_response(request, resp, start_time) - except Exception as exc: - resp = self.handle_error(request, 500, exc) - resp, reset = await self.finish_response(request, resp, start_time) - else: - # Deprecation warning (See #2415) - if getattr(resp, "__http_exception__", False): - warnings.warn( - "returning HTTPException object is deprecated " - "(#2415) and will be removed, " - "please raise the exception instead", - DeprecationWarning, - ) - - resp, reset = await self.finish_response(request, resp, start_time) - finally: - self._request_in_progress = False - if self._handler_waiter is not None: - self._handler_waiter.set_result(None) - - return resp, reset - - async def start(self) -> None: - """Process incoming request. - - It reads request line, request headers and request payload, then - calls handle_request() method. Subclass has to override - handle_request(). start() handles various exceptions in request - or response handling. Connection is being closed always unless - keep_alive(True) specified. - """ - loop = self._loop - manager = self._manager - assert manager is not None - keepalive_timeout = self._keepalive_timeout - resp = None - assert self._request_factory is not None - assert self._request_handler is not None - - while not self._force_close: - if not self._messages: - try: - # wait for next request - self._waiter = loop.create_future() - await self._waiter - finally: - self._waiter = None - - message, payload = self._messages.popleft() - - # time is only fetched if logging is enabled as otherwise - # its thrown away and never used. - start = loop.time() if self._logging_enabled else None - - manager.requests_count += 1 - writer = StreamWriter(self, loop) - if isinstance(message, _ErrInfo): - # make request_factory work - request_handler = self._make_error_handler(message) - message = ERROR - else: - request_handler = self._request_handler - - # Important don't hold a reference to the current task - # as on traceback it will prevent the task from being - # collected and will cause a memory leak. - request = self._request_factory( - message, - payload, - self, - writer, - self._task_handler or asyncio.current_task(loop), # type: ignore[arg-type] - ) - try: - # a new task is used for copy context vars (#3406) - coro = self._handle_request(request, start, request_handler) - if sys.version_info >= (3, 12): - task = asyncio.Task(coro, loop=loop, eager_start=True) - else: - task = loop.create_task(coro) - try: - resp, reset = await task - except ConnectionError: - self.log_debug("Ignored premature client disconnection") - break - - # Drop the processed task from asyncio.Task.all_tasks() early - del task - if reset: - self.log_debug("Ignored premature client disconnection 2") - break - - # notify server about keep-alive - self._keepalive = bool(resp.keep_alive) - - # check payload - if not payload.is_eof(): - lingering_time = self._lingering_time - if not self._force_close and lingering_time: - self.log_debug( - "Start lingering close timer for %s sec.", lingering_time - ) - - now = loop.time() - end_t = now + lingering_time - - try: - while not payload.is_eof() and now < end_t: - async with ceil_timeout(end_t - now): - # read and ignore - await payload.readany() - now = loop.time() - except (asyncio.CancelledError, asyncio.TimeoutError): - if ( - sys.version_info >= (3, 11) - and (t := asyncio.current_task()) - and t.cancelling() - ): - raise - - # if payload still uncompleted - if not payload.is_eof() and not self._force_close: - self.log_debug("Uncompleted request.") - self.close() - - payload.set_exception(_PAYLOAD_ACCESS_ERROR) - - except asyncio.CancelledError: - self.log_debug("Ignored premature client disconnection") - self.force_close() - raise - except Exception as exc: - self.log_exception("Unhandled exception", exc_info=exc) - self.force_close() - except BaseException: - self.force_close() - raise - finally: - request._task = None # type: ignore[assignment] # Break reference cycle in case of exception - if self.transport is None and resp is not None: - self.log_debug("Ignored premature client disconnection.") - - if self._keepalive and not self._close and not self._force_close: - # start keep-alive timer - close_time = loop.time() + keepalive_timeout - self._next_keepalive_close_time = close_time - if self._keepalive_handle is None: - self._keepalive_handle = loop.call_at( - close_time, self._process_keepalive - ) - else: - break - - # remove handler, close transport if no handlers left - if not self._force_close: - self._task_handler = None - if self.transport is not None: - self.transport.close() - - async def finish_response( - self, request: BaseRequest, resp: StreamResponse, start_time: Optional[float] - ) -> Tuple[StreamResponse, bool]: - """Prepare the response and write_eof, then log access. - - This has to - be called within the context of any exception so the access logger - can get exception information. Returns True if the client disconnects - prematurely. - """ - request._finish() - if self._request_parser is not None: - self._request_parser.set_upgraded(False) - self._upgrade = False - if self._message_tail: - self._request_parser.feed_data(self._message_tail) - self._message_tail = b"" - try: - prepare_meth = resp.prepare - except AttributeError: - if resp is None: - self.log_exception("Missing return statement on request handler") - else: - self.log_exception( - "Web-handler should return a response instance, " - "got {!r}".format(resp) - ) - exc = HTTPInternalServerError() - resp = Response( - status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers - ) - prepare_meth = resp.prepare - try: - await prepare_meth(request) - await resp.write_eof() - except ConnectionError: - self.log_access(request, resp, start_time) - return resp, True - - self.log_access(request, resp, start_time) - return resp, False - - def handle_error( - self, - request: BaseRequest, - status: int = 500, - exc: Optional[BaseException] = None, - message: Optional[str] = None, - ) -> StreamResponse: - """Handle errors. - - Returns HTTP response with specific status code. Logs additional - information. It always closes current connection. - """ - if self._request_count == 1 and isinstance(exc, BadHttpMethod): - # BadHttpMethod is common when a client sends non-HTTP - # or encrypted traffic to an HTTP port. This is expected - # to happen when connected to the public internet so we log - # it at the debug level as to not fill logs with noise. - self.logger.debug( - "Error handling request from %s", request.remote, exc_info=exc - ) - else: - self.log_exception( - "Error handling request from %s", request.remote, exc_info=exc - ) - - # some data already got sent, connection is broken - if request.writer.output_size > 0: - raise ConnectionError( - "Response is sent already, cannot send another response " - "with the error message" - ) - - ct = "text/plain" - if status == HTTPStatus.INTERNAL_SERVER_ERROR: - title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) - msg = HTTPStatus.INTERNAL_SERVER_ERROR.description - tb = None - if self.debug: - with suppress(Exception): - tb = traceback.format_exc() - - if "text/html" in request.headers.get("Accept", ""): - if tb: - tb = html_escape(tb) - msg = f"

Traceback:

\n
{tb}
" - message = ( - "" - "{title}" - "\n

{title}

" - "\n{msg}\n\n" - ).format(title=title, msg=msg) - ct = "text/html" - else: - if tb: - msg = tb - message = title + "\n\n" + msg - - resp = Response(status=status, text=message, content_type=ct) - resp.force_close() - - return resp - - def _make_error_handler( - self, err_info: _ErrInfo - ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]: - async def handler(request: BaseRequest) -> StreamResponse: - return self.handle_error( - request, err_info.status, err_info.exc, err_info.message - ) - - return handler diff --git a/venv/Lib/site-packages/aiohttp/web_request.py b/venv/Lib/site-packages/aiohttp/web_request.py deleted file mode 100644 index 0eafcd6..0000000 --- a/venv/Lib/site-packages/aiohttp/web_request.py +++ /dev/null @@ -1,914 +0,0 @@ -import asyncio -import datetime -import io -import re -import socket -import string -import tempfile -import types -import warnings -from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Final, - Iterator, - Mapping, - MutableMapping, - Optional, - Pattern, - Tuple, - Union, - cast, -) -from urllib.parse import parse_qsl - -import attr -from multidict import ( - CIMultiDict, - CIMultiDictProxy, - MultiDict, - MultiDictProxy, - MultiMapping, -) -from yarl import URL - -from . import hdrs -from ._cookie_helpers import parse_cookie_header -from .abc import AbstractStreamWriter -from .helpers import ( - _SENTINEL, - DEBUG, - ETAG_ANY, - LIST_QUOTED_ETAG_RE, - ChainMapProxy, - ETag, - HeadersMixin, - parse_http_date, - reify, - sentinel, - set_exception, -) -from .http_parser import RawRequestMessage -from .http_writer import HttpVersion -from .multipart import BodyPartReader, MultipartReader -from .streams import EmptyStreamReader, StreamReader -from .typedefs import ( - DEFAULT_JSON_DECODER, - JSONDecoder, - LooseHeaders, - RawHeaders, - StrOrURL, -) -from .web_exceptions import HTTPRequestEntityTooLarge -from .web_response import StreamResponse - -__all__ = ("BaseRequest", "FileField", "Request") - - -if TYPE_CHECKING: - from .web_app import Application - from .web_protocol import RequestHandler - from .web_urldispatcher import UrlMappingMatchInfo - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class FileField: - name: str - filename: str - file: io.BufferedReader - content_type: str - headers: CIMultiDictProxy[str] - - -_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" -# '-' at the end to prevent interpretation as range in a char class - -_TOKEN: Final[str] = rf"[{_TCHAR}]+" - -_QDTEXT: Final[str] = r"[{}]".format( - r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) -) -# qdtext includes 0x5C to escape 0x5D ('\]') -# qdtext excludes obs-text (because obsoleted, and encoding not specified) - -_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" - -_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( - qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR -) - -_FORWARDED_PAIR: Final[str] = ( - r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( - token=_TOKEN, quoted_string=_QUOTED_STRING - ) -) - -_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") -# same pattern as _QUOTED_PAIR but contains a capture group - -_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) - -############################################################ -# HTTP Request -############################################################ - - -class BaseRequest(MutableMapping[str, Any], HeadersMixin): - - POST_METHODS = { - hdrs.METH_PATCH, - hdrs.METH_POST, - hdrs.METH_PUT, - hdrs.METH_TRACE, - hdrs.METH_DELETE, - } - - ATTRS = HeadersMixin.ATTRS | frozenset( - [ - "_message", - "_protocol", - "_payload_writer", - "_payload", - "_headers", - "_method", - "_version", - "_rel_url", - "_post", - "_read_bytes", - "_state", - "_cache", - "_task", - "_client_max_size", - "_loop", - "_transport_sslcontext", - "_transport_peername", - ] - ) - _post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None - _read_bytes: Optional[bytes] = None - - def __init__( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: "RequestHandler", - payload_writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - loop: asyncio.AbstractEventLoop, - *, - client_max_size: int = 1024**2, - state: Optional[Dict[str, Any]] = None, - scheme: Optional[str] = None, - host: Optional[str] = None, - remote: Optional[str] = None, - ) -> None: - self._message = message - self._protocol = protocol - self._payload_writer = payload_writer - - self._payload = payload - self._headers: CIMultiDictProxy[str] = message.headers - self._method = message.method - self._version = message.version - self._cache: Dict[str, Any] = {} - url = message.url - if url.absolute: - if scheme is not None: - url = url.with_scheme(scheme) - if host is not None: - url = url.with_host(host) - # absolute URL is given, - # override auto-calculating url, host, and scheme - # all other properties should be good - self._cache["url"] = url - self._cache["host"] = url.host - self._cache["scheme"] = url.scheme - self._rel_url = url.relative() - else: - self._rel_url = url - if scheme is not None: - self._cache["scheme"] = scheme - if host is not None: - self._cache["host"] = host - - self._state = {} if state is None else state - self._task = task - self._client_max_size = client_max_size - self._loop = loop - - self._transport_sslcontext = protocol.ssl_context - self._transport_peername = protocol.peername - - if remote is not None: - self._cache["remote"] = remote - - def clone( - self, - *, - method: Union[str, _SENTINEL] = sentinel, - rel_url: Union[StrOrURL, _SENTINEL] = sentinel, - headers: Union[LooseHeaders, _SENTINEL] = sentinel, - scheme: Union[str, _SENTINEL] = sentinel, - host: Union[str, _SENTINEL] = sentinel, - remote: Union[str, _SENTINEL] = sentinel, - client_max_size: Union[int, _SENTINEL] = sentinel, - ) -> "BaseRequest": - """Clone itself with replacement some attributes. - - Creates and returns a new instance of Request object. If no parameters - are given, an exact copy is returned. If a parameter is not passed, it - will reuse the one from the current request object. - """ - if self._read_bytes: - raise RuntimeError("Cannot clone request after reading its content") - - dct: Dict[str, Any] = {} - if method is not sentinel: - dct["method"] = method - if rel_url is not sentinel: - new_url: URL = URL(rel_url) - dct["url"] = new_url - dct["path"] = str(new_url) - if headers is not sentinel: - # a copy semantic - dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) - dct["raw_headers"] = tuple( - (k.encode("utf-8"), v.encode("utf-8")) - for k, v in dct["headers"].items() - ) - - message = self._message._replace(**dct) - - kwargs = {} - if scheme is not sentinel: - kwargs["scheme"] = scheme - if host is not sentinel: - kwargs["host"] = host - if remote is not sentinel: - kwargs["remote"] = remote - if client_max_size is sentinel: - client_max_size = self._client_max_size - - return self.__class__( - message, - self._payload, - self._protocol, - self._payload_writer, - self._task, - self._loop, - client_max_size=client_max_size, - state=self._state.copy(), - **kwargs, - ) - - @property - def task(self) -> "asyncio.Task[None]": - return self._task - - @property - def protocol(self) -> "RequestHandler": - return self._protocol - - @property - def transport(self) -> Optional[asyncio.Transport]: - if self._protocol is None: - return None - return self._protocol.transport - - @property - def writer(self) -> AbstractStreamWriter: - return self._payload_writer - - @property - def client_max_size(self) -> int: - return self._client_max_size - - @reify - def message(self) -> RawRequestMessage: - warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) - return self._message - - @reify - def rel_url(self) -> URL: - return self._rel_url - - @reify - def loop(self) -> asyncio.AbstractEventLoop: - warnings.warn( - "request.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - # MutableMapping API - - def __getitem__(self, key: str) -> Any: - return self._state[key] - - def __setitem__(self, key: str, value: Any) -> None: - self._state[key] = value - - def __delitem__(self, key: str) -> None: - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[str]: - return iter(self._state) - - ######## - - @reify - def secure(self) -> bool: - """A bool indicating if the request is handled with SSL.""" - return self.scheme == "https" - - @reify - def forwarded(self) -> Tuple[Mapping[str, str], ...]: - """A tuple containing all parsed Forwarded header(s). - - Makes an effort to parse Forwarded headers as specified by RFC 7239: - - - It adds one (immutable) dictionary per Forwarded 'field-value', ie - per proxy. The element corresponds to the data in the Forwarded - field-value added by the first proxy encountered by the client. Each - subsequent item corresponds to those added by later proxies. - - It checks that every value has valid syntax in general as specified - in section 4: either a 'token' or a 'quoted-string'. - - It un-escapes found escape sequences. - - It does NOT validate 'by' and 'for' contents as specified in section - 6. - - It does NOT validate 'host' contents (Host ABNF). - - It does NOT validate 'proto' contents for valid URI scheme names. - - Returns a tuple containing one or more immutable dicts - """ - elems = [] - for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): - length = len(field_value) - pos = 0 - need_separator = False - elem: Dict[str, str] = {} - elems.append(types.MappingProxyType(elem)) - while 0 <= pos < length: - match = _FORWARDED_PAIR_RE.match(field_value, pos) - if match is not None: # got a valid forwarded-pair - if need_separator: - # bad syntax here, skip to next comma - pos = field_value.find(",", pos) - else: - name, value, port = match.groups() - if value[0] == '"': - # quoted string: remove quotes and unescape - value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) - if port: - value += port - elem[name.lower()] = value - pos += len(match.group(0)) - need_separator = True - elif field_value[pos] == ",": # next forwarded-element - need_separator = False - elem = {} - elems.append(types.MappingProxyType(elem)) - pos += 1 - elif field_value[pos] == ";": # next forwarded-pair - need_separator = False - pos += 1 - elif field_value[pos] in " \t": - # Allow whitespace even between forwarded-pairs, though - # RFC 7239 doesn't. This simplifies code and is in line - # with Postel's law. - pos += 1 - else: - # bad syntax here, skip to next comma - pos = field_value.find(",", pos) - return tuple(elems) - - @reify - def scheme(self) -> str: - """A string representing the scheme of the request. - - Hostname is resolved in this order: - - - overridden value by .clone(scheme=new_scheme) call. - - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. - - 'http' or 'https'. - """ - if self._transport_sslcontext: - return "https" - else: - return "http" - - @reify - def method(self) -> str: - """Read only property for getting HTTP method. - - The value is upper-cased str like 'GET', 'POST', 'PUT' etc. - """ - return self._method - - @reify - def version(self) -> HttpVersion: - """Read only property for getting HTTP version of request. - - Returns aiohttp.protocol.HttpVersion instance. - """ - return self._version - - @reify - def host(self) -> str: - """Hostname of the request. - - Hostname is resolved in this order: - - - overridden value by .clone(host=new_host) call. - - HOST HTTP header - - socket.getfqdn() value - - For example, 'example.com' or 'localhost:8080'. - - For historical reasons, the port number may be included. - """ - host = self._message.headers.get(hdrs.HOST) - if host is not None: - return host - return socket.getfqdn() - - @reify - def remote(self) -> Optional[str]: - """Remote IP of client initiated HTTP request. - - The IP is resolved in this order: - - - overridden value by .clone(remote=new_remote) call. - - peername of opened socket - """ - if self._transport_peername is None: - return None - if isinstance(self._transport_peername, (list, tuple)): - return str(self._transport_peername[0]) - return str(self._transport_peername) - - @reify - def url(self) -> URL: - """The full URL of the request.""" - # authority is used here because it may include the port number - # and we want yarl to parse it correctly - return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url) - - @reify - def path(self) -> str: - """The URL including *PATH INFO* without the host or scheme. - - E.g., ``/app/blog`` - """ - return self._rel_url.path - - @reify - def path_qs(self) -> str: - """The URL including PATH_INFO and the query string. - - E.g, /app/blog?id=10 - """ - return str(self._rel_url) - - @reify - def raw_path(self) -> str: - """The URL including raw *PATH INFO* without the host or scheme. - - Warning, the path is unquoted and may contains non valid URL characters - - E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` - """ - return self._message.path - - @reify - def query(self) -> "MultiMapping[str]": - """A multidict with all the variables in the query string.""" - return self._rel_url.query - - @reify - def query_string(self) -> str: - """The query string in the URL. - - E.g., id=10 - """ - return self._rel_url.query_string - - @reify - def headers(self) -> CIMultiDictProxy[str]: - """A case-insensitive multidict proxy with all headers.""" - return self._headers - - @reify - def raw_headers(self) -> RawHeaders: - """A sequence of pairs for all headers.""" - return self._message.raw_headers - - @reify - def if_modified_since(self) -> Optional[datetime.datetime]: - """The value of If-Modified-Since HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) - - @reify - def if_unmodified_since(self) -> Optional[datetime.datetime]: - """The value of If-Unmodified-Since HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) - - @staticmethod - def _etag_values(etag_header: str) -> Iterator[ETag]: - """Extract `ETag` objects from raw header.""" - if etag_header == ETAG_ANY: - yield ETag( - is_weak=False, - value=ETAG_ANY, - ) - else: - for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): - is_weak, value, garbage = match.group(2, 3, 4) - # Any symbol captured by 4th group means - # that the following sequence is invalid. - if garbage: - break - - yield ETag( - is_weak=bool(is_weak), - value=value, - ) - - @classmethod - def _if_match_or_none_impl( - cls, header_value: Optional[str] - ) -> Optional[Tuple[ETag, ...]]: - if not header_value: - return None - - return tuple(cls._etag_values(header_value)) - - @reify - def if_match(self) -> Optional[Tuple[ETag, ...]]: - """The value of If-Match HTTP header, or None. - - This header is represented as a `tuple` of `ETag` objects. - """ - return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) - - @reify - def if_none_match(self) -> Optional[Tuple[ETag, ...]]: - """The value of If-None-Match HTTP header, or None. - - This header is represented as a `tuple` of `ETag` objects. - """ - return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) - - @reify - def if_range(self) -> Optional[datetime.datetime]: - """The value of If-Range HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self.headers.get(hdrs.IF_RANGE)) - - @reify - def keep_alive(self) -> bool: - """Is keepalive enabled by client?""" - return not self._message.should_close - - @reify - def cookies(self) -> Mapping[str, str]: - """Return request cookies. - - A read-only dictionary-like object. - """ - # Use parse_cookie_header for RFC 6265 compliant Cookie header parsing - # that accepts special characters in cookie names (fixes #2683) - parsed = parse_cookie_header(self.headers.get(hdrs.COOKIE, "")) - # Extract values from Morsel objects - return MappingProxyType({name: morsel.value for name, morsel in parsed}) - - @reify - def http_range(self) -> slice: - """The content of Range HTTP header. - - Return a slice instance. - - """ - rng = self._headers.get(hdrs.RANGE) - start, end = None, None - if rng is not None: - try: - pattern = r"^bytes=(\d*)-(\d*)$" - start, end = re.findall(pattern, rng, re.ASCII)[0] - except IndexError: # pattern was not found in header - raise ValueError("range not in acceptable format") - - end = int(end) if end else None - start = int(start) if start else None - - if start is None and end is not None: - # end with no start is to return tail of content - start = -end - end = None - - if start is not None and end is not None: - # end is inclusive in range header, exclusive for slice - end += 1 - - if start >= end: - raise ValueError("start cannot be after end") - - if start is end is None: # No valid range supplied - raise ValueError("No start or end of range specified") - - return slice(start, end, 1) - - @reify - def content(self) -> StreamReader: - """Return raw payload stream.""" - return self._payload - - @property - def has_body(self) -> bool: - """Return True if request's HTTP BODY can be read, False otherwise.""" - warnings.warn( - "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 - ) - return not self._payload.at_eof() - - @property - def can_read_body(self) -> bool: - """Return True if request's HTTP BODY can be read, False otherwise.""" - return not self._payload.at_eof() - - @reify - def body_exists(self) -> bool: - """Return True if request has HTTP BODY, False otherwise.""" - return type(self._payload) is not EmptyStreamReader - - async def release(self) -> None: - """Release request. - - Eat unread part of HTTP BODY if present. - """ - while not self._payload.at_eof(): - await self._payload.readany() - - async def read(self) -> bytes: - """Read request body if present. - - Returns bytes object with full request content. - """ - if self._read_bytes is None: - body = bytearray() - while True: - chunk = await self._payload.readany() - body.extend(chunk) - if self._client_max_size: - body_size = len(body) - if body_size >= self._client_max_size: - raise HTTPRequestEntityTooLarge( - max_size=self._client_max_size, actual_size=body_size - ) - if not chunk: - break - self._read_bytes = bytes(body) - return self._read_bytes - - async def text(self) -> str: - """Return BODY as text using encoding from .charset.""" - bytes_body = await self.read() - encoding = self.charset or "utf-8" - return bytes_body.decode(encoding) - - async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: - """Return BODY as JSON.""" - body = await self.text() - return loads(body) - - async def multipart(self) -> MultipartReader: - """Return async iterator to process BODY as multipart.""" - return MultipartReader(self._headers, self._payload) - - async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": - """Return POST parameters.""" - if self._post is not None: - return self._post - if self._method not in self.POST_METHODS: - self._post = MultiDictProxy(MultiDict()) - return self._post - - content_type = self.content_type - if content_type not in ( - "", - "application/x-www-form-urlencoded", - "multipart/form-data", - ): - self._post = MultiDictProxy(MultiDict()) - return self._post - - out: MultiDict[Union[str, bytes, FileField]] = MultiDict() - - if content_type == "multipart/form-data": - multipart = await self.multipart() - max_size = self._client_max_size - - size = 0 - while (field := await multipart.next()) is not None: - field_ct = field.headers.get(hdrs.CONTENT_TYPE) - - if isinstance(field, BodyPartReader): - if field.name is None: - raise ValueError("Multipart field missing name.") - - # Note that according to RFC 7578, the Content-Type header - # is optional, even for files, so we can't assume it's - # present. - # https://tools.ietf.org/html/rfc7578#section-4.4 - if field.filename: - # store file in temp file - tmp = await self._loop.run_in_executor( - None, tempfile.TemporaryFile - ) - chunk = await field.read_chunk(size=2**16) - while chunk: - chunk = await field.decode(chunk) - await self._loop.run_in_executor(None, tmp.write, chunk) - size += len(chunk) - if 0 < max_size < size: - await self._loop.run_in_executor(None, tmp.close) - raise HTTPRequestEntityTooLarge( - max_size=max_size, actual_size=size - ) - chunk = await field.read_chunk(size=2**16) - await self._loop.run_in_executor(None, tmp.seek, 0) - - if field_ct is None: - field_ct = "application/octet-stream" - - ff = FileField( - field.name, - field.filename, - cast(io.BufferedReader, tmp), - field_ct, - field.headers, - ) - out.add(field.name, ff) - else: - # deal with ordinary data - value = await field.read(decode=True) - if field_ct is None or field_ct.startswith("text/"): - charset = field.get_charset(default="utf-8") - out.add(field.name, value.decode(charset)) - else: - out.add(field.name, value) - size += len(value) - if 0 < max_size < size: - raise HTTPRequestEntityTooLarge( - max_size=max_size, actual_size=size - ) - else: - raise ValueError( - "To decode nested multipart you need to use custom reader", - ) - else: - data = await self.read() - if data: - charset = self.charset or "utf-8" - out.extend( - parse_qsl( - data.rstrip().decode(charset), - keep_blank_values=True, - encoding=charset, - ) - ) - - self._post = MultiDictProxy(out) - return self._post - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """Extra info from protocol transport""" - protocol = self._protocol - if protocol is None: - return default - - transport = protocol.transport - if transport is None: - return default - - return transport.get_extra_info(name, default) - - def __repr__(self) -> str: - ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( - "ascii" - ) - return "<{} {} {} >".format( - self.__class__.__name__, self._method, ascii_encodable_path - ) - - def __eq__(self, other: object) -> bool: - return id(self) == id(other) - - def __bool__(self) -> bool: - return True - - async def _prepare_hook(self, response: StreamResponse) -> None: - return - - def _cancel(self, exc: BaseException) -> None: - set_exception(self._payload, exc) - - def _finish(self) -> None: - if self._post is None or self.content_type != "multipart/form-data": - return - - # NOTE: Release file descriptors for the - # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom` - # NOTE: instances of files sent within multipart request body - # NOTE: via HTTP POST request. - for file_name, file_field_object in self._post.items(): - if isinstance(file_field_object, FileField): - file_field_object.file.close() - - -class Request(BaseRequest): - - ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) - - _match_info: Optional["UrlMappingMatchInfo"] = None - - if DEBUG: - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom {}.{} attribute " - "is discouraged".format(self.__class__.__name__, name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - def clone( - self, - *, - method: Union[str, _SENTINEL] = sentinel, - rel_url: Union[StrOrURL, _SENTINEL] = sentinel, - headers: Union[LooseHeaders, _SENTINEL] = sentinel, - scheme: Union[str, _SENTINEL] = sentinel, - host: Union[str, _SENTINEL] = sentinel, - remote: Union[str, _SENTINEL] = sentinel, - client_max_size: Union[int, _SENTINEL] = sentinel, - ) -> "Request": - ret = super().clone( - method=method, - rel_url=rel_url, - headers=headers, - scheme=scheme, - host=host, - remote=remote, - client_max_size=client_max_size, - ) - new_ret = cast(Request, ret) - new_ret._match_info = self._match_info - return new_ret - - @reify - def match_info(self) -> "UrlMappingMatchInfo": - """Result of route resolving.""" - match_info = self._match_info - assert match_info is not None - return match_info - - @property - def app(self) -> "Application": - """Application instance.""" - match_info = self._match_info - assert match_info is not None - return match_info.current_app - - @property - def config_dict(self) -> ChainMapProxy: - match_info = self._match_info - assert match_info is not None - lst = match_info.apps - app = self.app - idx = lst.index(app) - sublist = list(reversed(lst[: idx + 1])) - return ChainMapProxy(sublist) - - async def _prepare_hook(self, response: StreamResponse) -> None: - match_info = self._match_info - if match_info is None: - return - for app in match_info._apps: - if on_response_prepare := app.on_response_prepare: - await on_response_prepare.send(self, response) diff --git a/venv/Lib/site-packages/aiohttp/web_response.py b/venv/Lib/site-packages/aiohttp/web_response.py deleted file mode 100644 index e5f8b6c..0000000 --- a/venv/Lib/site-packages/aiohttp/web_response.py +++ /dev/null @@ -1,856 +0,0 @@ -import asyncio -import collections.abc -import datetime -import enum -import json -import math -import time -import warnings -from concurrent.futures import Executor -from http import HTTPStatus -from http.cookies import SimpleCookie -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - MutableMapping, - Optional, - Union, - cast, -) - -from multidict import CIMultiDict, istr - -from . import hdrs, payload -from .abc import AbstractStreamWriter -from .compression_utils import ZLibCompressor -from .helpers import ( - ETAG_ANY, - QUOTED_ETAG_RE, - ETag, - HeadersMixin, - must_be_empty_body, - parse_http_date, - rfc822_formatted_time, - sentinel, - should_remove_content_length, - validate_etag_value, -) -from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11 -from .payload import Payload -from .typedefs import JSONEncoder, LooseHeaders - -REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus} -LARGE_BODY_SIZE = 1024**2 - -__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") - - -if TYPE_CHECKING: - from .web_request import BaseRequest - - BaseClass = MutableMapping[str, Any] -else: - BaseClass = collections.abc.MutableMapping - - -# TODO(py311): Convert to StrEnum for wider use -class ContentCoding(enum.Enum): - # The content codings that we have support for. - # - # Additional registered codings are listed at: - # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding - deflate = "deflate" - gzip = "gzip" - identity = "identity" - - -CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding} - -############################################################ -# HTTP Response classes -############################################################ - - -class StreamResponse(BaseClass, HeadersMixin): - - _body: Union[None, bytes, bytearray, Payload] - _length_check = True - _body = None - _keep_alive: Optional[bool] = None - _chunked: bool = False - _compression: bool = False - _compression_strategy: Optional[int] = None - _compression_force: Optional[ContentCoding] = None - _req: Optional["BaseRequest"] = None - _payload_writer: Optional[AbstractStreamWriter] = None - _eof_sent: bool = False - _must_be_empty_body: Optional[bool] = None - _body_length = 0 - _cookies: Optional[SimpleCookie] = None - _send_headers_immediately = True - - def __init__( - self, - *, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - _real_headers: Optional[CIMultiDict[str]] = None, - ) -> None: - """Initialize a new stream response object. - - _real_headers is an internal parameter used to pass a pre-populated - headers object. It is used by the `Response` class to avoid copying - the headers when creating a new response object. It is not intended - to be used by external code. - """ - self._state: Dict[str, Any] = {} - - if _real_headers is not None: - self._headers = _real_headers - elif headers is not None: - self._headers: CIMultiDict[str] = CIMultiDict(headers) - else: - self._headers = CIMultiDict() - - self._set_status(status, reason) - - @property - def prepared(self) -> bool: - return self._eof_sent or self._payload_writer is not None - - @property - def task(self) -> "Optional[asyncio.Task[None]]": - if self._req: - return self._req.task - else: - return None - - @property - def status(self) -> int: - return self._status - - @property - def chunked(self) -> bool: - return self._chunked - - @property - def compression(self) -> bool: - return self._compression - - @property - def reason(self) -> str: - return self._reason - - def set_status( - self, - status: int, - reason: Optional[str] = None, - ) -> None: - assert ( - not self.prepared - ), "Cannot change the response status code after the headers have been sent" - self._set_status(status, reason) - - def _set_status(self, status: int, reason: Optional[str]) -> None: - self._status = int(status) - if reason is None: - reason = REASON_PHRASES.get(self._status, "") - elif "\n" in reason: - raise ValueError("Reason cannot contain \\n") - self._reason = reason - - @property - def keep_alive(self) -> Optional[bool]: - return self._keep_alive - - def force_close(self) -> None: - self._keep_alive = False - - @property - def body_length(self) -> int: - return self._body_length - - @property - def output_length(self) -> int: - warnings.warn("output_length is deprecated", DeprecationWarning) - assert self._payload_writer - return self._payload_writer.buffer_size - - def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: - """Enables automatic chunked transfer encoding.""" - if hdrs.CONTENT_LENGTH in self._headers: - raise RuntimeError( - "You can't enable chunked encoding when a content length is set" - ) - if chunk_size is not None: - warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) - self._chunked = True - - def enable_compression( - self, - force: Optional[Union[bool, ContentCoding]] = None, - strategy: Optional[int] = None, - ) -> None: - """Enables response compression encoding.""" - # Backwards compatibility for when force was a bool <0.17. - if isinstance(force, bool): - force = ContentCoding.deflate if force else ContentCoding.identity - warnings.warn( - "Using boolean for force is deprecated #3318", DeprecationWarning - ) - elif force is not None: - assert isinstance( - force, ContentCoding - ), "force should one of None, bool or ContentEncoding" - - self._compression = True - self._compression_force = force - self._compression_strategy = strategy - - @property - def headers(self) -> "CIMultiDict[str]": - return self._headers - - @property - def cookies(self) -> SimpleCookie: - if self._cookies is None: - self._cookies = SimpleCookie() - return self._cookies - - def set_cookie( - self, - name: str, - value: str, - *, - expires: Optional[str] = None, - domain: Optional[str] = None, - max_age: Optional[Union[int, str]] = None, - path: str = "/", - secure: Optional[bool] = None, - httponly: Optional[bool] = None, - version: Optional[str] = None, - samesite: Optional[str] = None, - partitioned: Optional[bool] = None, - ) -> None: - """Set or update response cookie. - - Sets new cookie or updates existent with new value. - Also updates only those params which are not None. - """ - if self._cookies is None: - self._cookies = SimpleCookie() - - self._cookies[name] = value - c = self._cookies[name] - - if expires is not None: - c["expires"] = expires - elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": - del c["expires"] - - if domain is not None: - c["domain"] = domain - - if max_age is not None: - c["max-age"] = str(max_age) - elif "max-age" in c: - del c["max-age"] - - c["path"] = path - - if secure is not None: - c["secure"] = secure - if httponly is not None: - c["httponly"] = httponly - if version is not None: - c["version"] = version - if samesite is not None: - c["samesite"] = samesite - - if partitioned is not None: - c["partitioned"] = partitioned - - def del_cookie( - self, - name: str, - *, - domain: Optional[str] = None, - path: str = "/", - secure: Optional[bool] = None, - httponly: Optional[bool] = None, - samesite: Optional[str] = None, - ) -> None: - """Delete cookie. - - Creates new empty expired cookie. - """ - # TODO: do we need domain/path here? - if self._cookies is not None: - self._cookies.pop(name, None) - self.set_cookie( - name, - "", - max_age=0, - expires="Thu, 01 Jan 1970 00:00:00 GMT", - domain=domain, - path=path, - secure=secure, - httponly=httponly, - samesite=samesite, - ) - - @property - def content_length(self) -> Optional[int]: - # Just a placeholder for adding setter - return super().content_length - - @content_length.setter - def content_length(self, value: Optional[int]) -> None: - if value is not None: - value = int(value) - if self._chunked: - raise RuntimeError( - "You can't set content length when chunked encoding is enable" - ) - self._headers[hdrs.CONTENT_LENGTH] = str(value) - else: - self._headers.pop(hdrs.CONTENT_LENGTH, None) - - @property - def content_type(self) -> str: - # Just a placeholder for adding setter - return super().content_type - - @content_type.setter - def content_type(self, value: str) -> None: - self.content_type # read header values if needed - self._content_type = str(value) - self._generate_content_type_header() - - @property - def charset(self) -> Optional[str]: - # Just a placeholder for adding setter - return super().charset - - @charset.setter - def charset(self, value: Optional[str]) -> None: - ctype = self.content_type # read header values if needed - if ctype == "application/octet-stream": - raise RuntimeError( - "Setting charset for application/octet-stream " - "doesn't make sense, setup content_type first" - ) - assert self._content_dict is not None - if value is None: - self._content_dict.pop("charset", None) - else: - self._content_dict["charset"] = str(value).lower() - self._generate_content_type_header() - - @property - def last_modified(self) -> Optional[datetime.datetime]: - """The value of Last-Modified HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED)) - - @last_modified.setter - def last_modified( - self, value: Optional[Union[int, float, datetime.datetime, str]] - ) -> None: - if value is None: - self._headers.pop(hdrs.LAST_MODIFIED, None) - elif isinstance(value, (int, float)): - self._headers[hdrs.LAST_MODIFIED] = time.strftime( - "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) - ) - elif isinstance(value, datetime.datetime): - self._headers[hdrs.LAST_MODIFIED] = time.strftime( - "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() - ) - elif isinstance(value, str): - self._headers[hdrs.LAST_MODIFIED] = value - else: - msg = f"Unsupported type for last_modified: {type(value).__name__}" - raise TypeError(msg) - - @property - def etag(self) -> Optional[ETag]: - quoted_value = self._headers.get(hdrs.ETAG) - if not quoted_value: - return None - elif quoted_value == ETAG_ANY: - return ETag(value=ETAG_ANY) - match = QUOTED_ETAG_RE.fullmatch(quoted_value) - if not match: - return None - is_weak, value = match.group(1, 2) - return ETag( - is_weak=bool(is_weak), - value=value, - ) - - @etag.setter - def etag(self, value: Optional[Union[ETag, str]]) -> None: - if value is None: - self._headers.pop(hdrs.ETAG, None) - elif (isinstance(value, str) and value == ETAG_ANY) or ( - isinstance(value, ETag) and value.value == ETAG_ANY - ): - self._headers[hdrs.ETAG] = ETAG_ANY - elif isinstance(value, str): - validate_etag_value(value) - self._headers[hdrs.ETAG] = f'"{value}"' - elif isinstance(value, ETag) and isinstance(value.value, str): - validate_etag_value(value.value) - hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' - self._headers[hdrs.ETAG] = hdr_value - else: - raise ValueError( - f"Unsupported etag type: {type(value)}. " - f"etag must be str, ETag or None" - ) - - def _generate_content_type_header( - self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE - ) -> None: - assert self._content_dict is not None - assert self._content_type is not None - params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) - if params: - ctype = self._content_type + "; " + params - else: - ctype = self._content_type - self._headers[CONTENT_TYPE] = ctype - - async def _do_start_compression(self, coding: ContentCoding) -> None: - if coding is ContentCoding.identity: - return - assert self._payload_writer is not None - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._payload_writer.enable_compression( - coding.value, self._compression_strategy - ) - # Compressed payload may have different content length, - # remove the header - self._headers.popall(hdrs.CONTENT_LENGTH, None) - - async def _start_compression(self, request: "BaseRequest") -> None: - if self._compression_force: - await self._do_start_compression(self._compression_force) - return - # Encoding comparisons should be case-insensitive - # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 - accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for value, coding in CONTENT_CODINGS.items(): - if value in accept_encoding: - await self._do_start_compression(coding) - return - - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - if self._eof_sent: - return None - if self._payload_writer is not None: - return self._payload_writer - self._must_be_empty_body = must_be_empty_body(request.method, self.status) - return await self._start(request) - - async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - self._req = request - writer = self._payload_writer = request._payload_writer - - await self._prepare_headers() - await request._prepare_hook(self) - await self._write_headers() - - return writer - - async def _prepare_headers(self) -> None: - request = self._req - assert request is not None - writer = self._payload_writer - assert writer is not None - keep_alive = self._keep_alive - if keep_alive is None: - keep_alive = request.keep_alive - self._keep_alive = keep_alive - - version = request.version - - headers = self._headers - if self._cookies: - for cookie in self._cookies.values(): - value = cookie.output(header="")[1:] - headers.add(hdrs.SET_COOKIE, value) - - if self._compression: - await self._start_compression(request) - - if self._chunked: - if version != HttpVersion11: - raise RuntimeError( - "Using chunked encoding is forbidden " - "for HTTP/{0.major}.{0.minor}".format(request.version) - ) - if not self._must_be_empty_body: - writer.enable_chunking() - headers[hdrs.TRANSFER_ENCODING] = "chunked" - elif self._length_check: # Disabled for WebSockets - writer.length = self.content_length - if writer.length is None: - if version >= HttpVersion11: - if not self._must_be_empty_body: - writer.enable_chunking() - headers[hdrs.TRANSFER_ENCODING] = "chunked" - elif not self._must_be_empty_body: - keep_alive = False - - # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 - # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 - if self._must_be_empty_body: - if hdrs.CONTENT_LENGTH in headers and should_remove_content_length( - request.method, self.status - ): - del headers[hdrs.CONTENT_LENGTH] - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10 - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 - if hdrs.TRANSFER_ENCODING in headers: - del headers[hdrs.TRANSFER_ENCODING] - elif (writer.length if self._length_check else self.content_length) != 0: - # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 - headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") - headers.setdefault(hdrs.DATE, rfc822_formatted_time()) - headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) - - # connection header - if hdrs.CONNECTION not in headers: - if keep_alive: - if version == HttpVersion10: - headers[hdrs.CONNECTION] = "keep-alive" - elif version == HttpVersion11: - headers[hdrs.CONNECTION] = "close" - - async def _write_headers(self) -> None: - request = self._req - assert request is not None - writer = self._payload_writer - assert writer is not None - # status line - version = request.version - status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" - await writer.write_headers(status_line, self._headers) - # Send headers immediately if not opted into buffering - if self._send_headers_immediately: - writer.send_headers() - - async def write(self, data: Union[bytes, bytearray, memoryview]) -> None: - assert isinstance( - data, (bytes, bytearray, memoryview) - ), "data argument must be byte-ish (%r)" % type(data) - - if self._eof_sent: - raise RuntimeError("Cannot call write() after write_eof()") - if self._payload_writer is None: - raise RuntimeError("Cannot call write() before prepare()") - - await self._payload_writer.write(data) - - async def drain(self) -> None: - assert not self._eof_sent, "EOF has already been sent" - assert self._payload_writer is not None, "Response has not been started" - warnings.warn( - "drain method is deprecated, use await resp.write()", - DeprecationWarning, - stacklevel=2, - ) - await self._payload_writer.drain() - - async def write_eof(self, data: bytes = b"") -> None: - assert isinstance( - data, (bytes, bytearray, memoryview) - ), "data argument must be byte-ish (%r)" % type(data) - - if self._eof_sent: - return - - assert self._payload_writer is not None, "Response has not been started" - - await self._payload_writer.write_eof(data) - self._eof_sent = True - self._req = None - self._body_length = self._payload_writer.output_size - self._payload_writer = None - - def __repr__(self) -> str: - if self._eof_sent: - info = "eof" - elif self.prepared: - assert self._req is not None - info = f"{self._req.method} {self._req.path} " - else: - info = "not prepared" - return f"<{self.__class__.__name__} {self.reason} {info}>" - - def __getitem__(self, key: str) -> Any: - return self._state[key] - - def __setitem__(self, key: str, value: Any) -> None: - self._state[key] = value - - def __delitem__(self, key: str) -> None: - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[str]: - return iter(self._state) - - def __hash__(self) -> int: - return hash(id(self)) - - def __eq__(self, other: object) -> bool: - return self is other - - def __bool__(self) -> bool: - return True - - -class Response(StreamResponse): - - _compressed_body: Optional[bytes] = None - _send_headers_immediately = False - - def __init__( - self, - *, - body: Any = None, - status: int = 200, - reason: Optional[str] = None, - text: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - content_type: Optional[str] = None, - charset: Optional[str] = None, - zlib_executor_size: Optional[int] = None, - zlib_executor: Optional[Executor] = None, - ) -> None: - if body is not None and text is not None: - raise ValueError("body and text are not allowed together") - - if headers is None: - real_headers: CIMultiDict[str] = CIMultiDict() - else: - real_headers = CIMultiDict(headers) - - if content_type is not None and "charset" in content_type: - raise ValueError("charset must not be in content_type argument") - - if text is not None: - if hdrs.CONTENT_TYPE in real_headers: - if content_type or charset: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - else: - # fast path for filling headers - if not isinstance(text, str): - raise TypeError("text argument must be str (%r)" % type(text)) - if content_type is None: - content_type = "text/plain" - if charset is None: - charset = "utf-8" - real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset - body = text.encode(charset) - text = None - elif hdrs.CONTENT_TYPE in real_headers: - if content_type is not None or charset is not None: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - elif content_type is not None: - if charset is not None: - content_type += "; charset=" + charset - real_headers[hdrs.CONTENT_TYPE] = content_type - - super().__init__(status=status, reason=reason, _real_headers=real_headers) - - if text is not None: - self.text = text - else: - self.body = body - - self._zlib_executor_size = zlib_executor_size - self._zlib_executor = zlib_executor - - @property - def body(self) -> Optional[Union[bytes, Payload]]: - return self._body - - @body.setter - def body(self, body: Any) -> None: - if body is None: - self._body = None - elif isinstance(body, (bytes, bytearray)): - self._body = body - else: - try: - self._body = body = payload.PAYLOAD_REGISTRY.get(body) - except payload.LookupError: - raise ValueError("Unsupported body type %r" % type(body)) - - headers = self._headers - - # set content-type - if hdrs.CONTENT_TYPE not in headers: - headers[hdrs.CONTENT_TYPE] = body.content_type - - # copy payload headers - if body.headers: - for key, value in body.headers.items(): - if key not in headers: - headers[key] = value - - self._compressed_body = None - - @property - def text(self) -> Optional[str]: - if self._body is None: - return None - # Note: When _body is a Payload (e.g. FilePayload), this may do blocking I/O - # This is generally safe as most common payloads (BytesPayload, StringPayload) - # don't do blocking I/O, but be careful with file-based payloads - return self._body.decode(self.charset or "utf-8") - - @text.setter - def text(self, text: str) -> None: - assert text is None or isinstance( - text, str - ), "text argument must be str (%r)" % type(text) - - if self.content_type == "application/octet-stream": - self.content_type = "text/plain" - if self.charset is None: - self.charset = "utf-8" - - self._body = text.encode(self.charset) - self._compressed_body = None - - @property - def content_length(self) -> Optional[int]: - if self._chunked: - return None - - if hdrs.CONTENT_LENGTH in self._headers: - return int(self._headers[hdrs.CONTENT_LENGTH]) - - if self._compressed_body is not None: - # Return length of the compressed body - return len(self._compressed_body) - elif isinstance(self._body, Payload): - # A payload without content length, or a compressed payload - return None - elif self._body is not None: - return len(self._body) - else: - return 0 - - @content_length.setter - def content_length(self, value: Optional[int]) -> None: - raise RuntimeError("Content length is set automatically") - - async def write_eof(self, data: bytes = b"") -> None: - if self._eof_sent: - return - if self._compressed_body is None: - body: Optional[Union[bytes, Payload]] = self._body - else: - body = self._compressed_body - assert not data, f"data arg is not supported, got {data!r}" - assert self._req is not None - assert self._payload_writer is not None - if body is None or self._must_be_empty_body: - await super().write_eof() - elif isinstance(self._body, Payload): - await self._body.write(self._payload_writer) - await self._body.close() - await super().write_eof() - else: - await super().write_eof(cast(bytes, body)) - - async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - if hdrs.CONTENT_LENGTH in self._headers: - if should_remove_content_length(request.method, self.status): - del self._headers[hdrs.CONTENT_LENGTH] - elif not self._chunked: - if isinstance(self._body, Payload): - if (size := self._body.size) is not None: - self._headers[hdrs.CONTENT_LENGTH] = str(size) - else: - body_len = len(self._body) if self._body else "0" - # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 - if body_len != "0" or ( - self.status != 304 and request.method not in hdrs.METH_HEAD_ALL - ): - self._headers[hdrs.CONTENT_LENGTH] = str(body_len) - - return await super()._start(request) - - async def _do_start_compression(self, coding: ContentCoding) -> None: - if self._chunked or isinstance(self._body, Payload): - return await super()._do_start_compression(coding) - if coding is ContentCoding.identity: - return - # Instead of using _payload_writer.enable_compression, - # compress the whole body - compressor = ZLibCompressor( - encoding=coding.value, - max_sync_chunk_size=self._zlib_executor_size, - executor=self._zlib_executor, - ) - assert self._body is not None - if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE: - warnings.warn( - "Synchronous compression of large response bodies " - f"({len(self._body)} bytes) might block the async event loop. " - "Consider providing a custom value to zlib_executor_size/" - "zlib_executor response properties or disabling compression on it." - ) - self._compressed_body = ( - await compressor.compress(self._body) + compressor.flush() - ) - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) - - -def json_response( - data: Any = sentinel, - *, - text: Optional[str] = None, - body: Optional[bytes] = None, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - content_type: str = "application/json", - dumps: JSONEncoder = json.dumps, -) -> Response: - if data is not sentinel: - if text or body: - raise ValueError("only one of data, text, or body should be specified") - else: - text = dumps(data) - return Response( - text=text, - body=body, - status=status, - reason=reason, - headers=headers, - content_type=content_type, - ) diff --git a/venv/Lib/site-packages/aiohttp/web_routedef.py b/venv/Lib/site-packages/aiohttp/web_routedef.py deleted file mode 100644 index f51b6cd..0000000 --- a/venv/Lib/site-packages/aiohttp/web_routedef.py +++ /dev/null @@ -1,214 +0,0 @@ -import abc -import os # noqa -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterator, - List, - Optional, - Sequence, - Type, - Union, - overload, -) - -import attr - -from . import hdrs -from .abc import AbstractView -from .typedefs import Handler, PathLike - -if TYPE_CHECKING: - from .web_request import Request - from .web_response import StreamResponse - from .web_urldispatcher import AbstractRoute, UrlDispatcher -else: - Request = StreamResponse = UrlDispatcher = AbstractRoute = None - - -__all__ = ( - "AbstractRouteDef", - "RouteDef", - "StaticDef", - "RouteTableDef", - "head", - "options", - "get", - "post", - "patch", - "put", - "delete", - "route", - "view", - "static", -) - - -class AbstractRouteDef(abc.ABC): - @abc.abstractmethod - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - pass # pragma: no cover - - -_HandlerType = Union[Type[AbstractView], Handler] - - -@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) -class RouteDef(AbstractRouteDef): - method: str - path: str - handler: _HandlerType - kwargs: Dict[str, Any] - - def __repr__(self) -> str: - info = [] - for name, value in sorted(self.kwargs.items()): - info.append(f", {name}={value!r}") - return " {handler.__name__!r}{info}>".format( - method=self.method, path=self.path, handler=self.handler, info="".join(info) - ) - - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - if self.method in hdrs.METH_ALL: - reg = getattr(router, "add_" + self.method.lower()) - return [reg(self.path, self.handler, **self.kwargs)] - else: - return [ - router.add_route(self.method, self.path, self.handler, **self.kwargs) - ] - - -@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) -class StaticDef(AbstractRouteDef): - prefix: str - path: PathLike - kwargs: Dict[str, Any] - - def __repr__(self) -> str: - info = [] - for name, value in sorted(self.kwargs.items()): - info.append(f", {name}={value!r}") - return " {path}{info}>".format( - prefix=self.prefix, path=self.path, info="".join(info) - ) - - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - resource = router.add_static(self.prefix, self.path, **self.kwargs) - routes = resource.get_info().get("routes", {}) - return list(routes.values()) - - -def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return RouteDef(method, path, handler, kwargs) - - -def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_HEAD, path, handler, **kwargs) - - -def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_OPTIONS, path, handler, **kwargs) - - -def get( - path: str, - handler: _HandlerType, - *, - name: Optional[str] = None, - allow_head: bool = True, - **kwargs: Any, -) -> RouteDef: - return route( - hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs - ) - - -def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_POST, path, handler, **kwargs) - - -def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_PUT, path, handler, **kwargs) - - -def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_PATCH, path, handler, **kwargs) - - -def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_DELETE, path, handler, **kwargs) - - -def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: - return route(hdrs.METH_ANY, path, handler, **kwargs) - - -def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef: - return StaticDef(prefix, path, kwargs) - - -_Deco = Callable[[_HandlerType], _HandlerType] - - -class RouteTableDef(Sequence[AbstractRouteDef]): - """Route definition table""" - - def __init__(self) -> None: - self._items: List[AbstractRouteDef] = [] - - def __repr__(self) -> str: - return f"" - - @overload - def __getitem__(self, index: int) -> AbstractRouteDef: ... - - @overload - def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... - - def __getitem__(self, index): # type: ignore[no-untyped-def] - return self._items[index] - - def __iter__(self) -> Iterator[AbstractRouteDef]: - return iter(self._items) - - def __len__(self) -> int: - return len(self._items) - - def __contains__(self, item: object) -> bool: - return item in self._items - - def route(self, method: str, path: str, **kwargs: Any) -> _Deco: - def inner(handler: _HandlerType) -> _HandlerType: - self._items.append(RouteDef(method, path, handler, kwargs)) - return handler - - return inner - - def head(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_HEAD, path, **kwargs) - - def get(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_GET, path, **kwargs) - - def post(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_POST, path, **kwargs) - - def put(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_PUT, path, **kwargs) - - def patch(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_PATCH, path, **kwargs) - - def delete(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_DELETE, path, **kwargs) - - def options(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_OPTIONS, path, **kwargs) - - def view(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_ANY, path, **kwargs) - - def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None: - self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/venv/Lib/site-packages/aiohttp/web_runner.py b/venv/Lib/site-packages/aiohttp/web_runner.py deleted file mode 100644 index bcfec72..0000000 --- a/venv/Lib/site-packages/aiohttp/web_runner.py +++ /dev/null @@ -1,399 +0,0 @@ -import asyncio -import signal -import socket -import warnings -from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, List, Optional, Set - -from yarl import URL - -from .typedefs import PathLike -from .web_app import Application -from .web_server import Server - -if TYPE_CHECKING: - from ssl import SSLContext -else: - try: - from ssl import SSLContext - except ImportError: # pragma: no cover - SSLContext = object # type: ignore[misc,assignment] - -__all__ = ( - "BaseSite", - "TCPSite", - "UnixSite", - "NamedPipeSite", - "SockSite", - "BaseRunner", - "AppRunner", - "ServerRunner", - "GracefulExit", -) - - -class GracefulExit(SystemExit): - code = 1 - - -def _raise_graceful_exit() -> None: - raise GracefulExit() - - -class BaseSite(ABC): - __slots__ = ("_runner", "_ssl_context", "_backlog", "_server") - - def __init__( - self, - runner: "BaseRunner", - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - if runner.server is None: - raise RuntimeError("Call runner.setup() before making a site") - if shutdown_timeout != 60.0: - msg = "shutdown_timeout should be set on BaseRunner" - warnings.warn(msg, DeprecationWarning, stacklevel=2) - runner._shutdown_timeout = shutdown_timeout - self._runner = runner - self._ssl_context = ssl_context - self._backlog = backlog - self._server: Optional[asyncio.AbstractServer] = None - - @property - @abstractmethod - def name(self) -> str: - pass # pragma: no cover - - @abstractmethod - async def start(self) -> None: - self._runner._reg_site(self) - - async def stop(self) -> None: - self._runner._check_site(self) - if self._server is not None: # Maybe not started yet - self._server.close() - - self._runner._unreg_site(self) - - -class TCPSite(BaseSite): - __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port") - - def __init__( - self, - runner: "BaseRunner", - host: Optional[str] = None, - port: Optional[int] = None, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._host = host - if port is None: - port = 8443 if self._ssl_context else 8080 - self._port = port - self._reuse_address = reuse_address - self._reuse_port = reuse_port - - @property - def name(self) -> str: - scheme = "https" if self._ssl_context else "http" - host = "0.0.0.0" if not self._host else self._host - return str(URL.build(scheme=scheme, host=host, port=self._port)) - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_server( - server, - self._host, - self._port, - ssl=self._ssl_context, - backlog=self._backlog, - reuse_address=self._reuse_address, - reuse_port=self._reuse_port, - ) - - -class UnixSite(BaseSite): - __slots__ = ("_path",) - - def __init__( - self, - runner: "BaseRunner", - path: PathLike, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._path = path - - @property - def name(self) -> str: - scheme = "https" if self._ssl_context else "http" - return f"{scheme}://unix:{self._path}:" - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_unix_server( - server, - self._path, - ssl=self._ssl_context, - backlog=self._backlog, - ) - - -class NamedPipeSite(BaseSite): - __slots__ = ("_path",) - - def __init__( - self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 - ) -> None: - loop = asyncio.get_event_loop() - if not isinstance( - loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] - ): - raise RuntimeError( - "Named Pipes only available in proactor loop under windows" - ) - super().__init__(runner, shutdown_timeout=shutdown_timeout) - self._path = path - - @property - def name(self) -> str: - return self._path - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - _server = await loop.start_serving_pipe( # type: ignore[attr-defined] - server, self._path - ) - self._server = _server[0] - - -class SockSite(BaseSite): - __slots__ = ("_sock", "_name") - - def __init__( - self, - runner: "BaseRunner", - sock: socket.socket, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._sock = sock - scheme = "https" if self._ssl_context else "http" - if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX: - name = f"{scheme}://unix:{sock.getsockname()}:" - else: - host, port = sock.getsockname()[:2] - name = str(URL.build(scheme=scheme, host=host, port=port)) - self._name = name - - @property - def name(self) -> str: - return self._name - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_server( - server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog - ) - - -class BaseRunner(ABC): - __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout") - - def __init__( - self, - *, - handle_signals: bool = False, - shutdown_timeout: float = 60.0, - **kwargs: Any, - ) -> None: - self._handle_signals = handle_signals - self._kwargs = kwargs - self._server: Optional[Server] = None - self._sites: List[BaseSite] = [] - self._shutdown_timeout = shutdown_timeout - - @property - def server(self) -> Optional[Server]: - return self._server - - @property - def addresses(self) -> List[Any]: - ret: List[Any] = [] - for site in self._sites: - server = site._server - if server is not None: - sockets = server.sockets # type: ignore[attr-defined] - if sockets is not None: - for sock in sockets: - ret.append(sock.getsockname()) - return ret - - @property - def sites(self) -> Set[BaseSite]: - return set(self._sites) - - async def setup(self) -> None: - loop = asyncio.get_event_loop() - - if self._handle_signals: - try: - loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) - loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) - except NotImplementedError: # pragma: no cover - # add_signal_handler is not implemented on Windows - pass - - self._server = await self._make_server() - - @abstractmethod - async def shutdown(self) -> None: - """Call any shutdown hooks to help server close gracefully.""" - - async def cleanup(self) -> None: - # The loop over sites is intentional, an exception on gather() - # leaves self._sites in unpredictable state. - # The loop guaranties that a site is either deleted on success or - # still present on failure - for site in list(self._sites): - await site.stop() - - if self._server: # If setup succeeded - # Yield to event loop to ensure incoming requests prior to stopping the sites - # have all started to be handled before we proceed to close idle connections. - await asyncio.sleep(0) - self._server.pre_shutdown() - await self.shutdown() - await self._server.shutdown(self._shutdown_timeout) - await self._cleanup_server() - - self._server = None - if self._handle_signals: - loop = asyncio.get_running_loop() - try: - loop.remove_signal_handler(signal.SIGINT) - loop.remove_signal_handler(signal.SIGTERM) - except NotImplementedError: # pragma: no cover - # remove_signal_handler is not implemented on Windows - pass - - @abstractmethod - async def _make_server(self) -> Server: - pass # pragma: no cover - - @abstractmethod - async def _cleanup_server(self) -> None: - pass # pragma: no cover - - def _reg_site(self, site: BaseSite) -> None: - if site in self._sites: - raise RuntimeError(f"Site {site} is already registered in runner {self}") - self._sites.append(site) - - def _check_site(self, site: BaseSite) -> None: - if site not in self._sites: - raise RuntimeError(f"Site {site} is not registered in runner {self}") - - def _unreg_site(self, site: BaseSite) -> None: - if site not in self._sites: - raise RuntimeError(f"Site {site} is not registered in runner {self}") - self._sites.remove(site) - - -class ServerRunner(BaseRunner): - """Low-level web server runner""" - - __slots__ = ("_web_server",) - - def __init__( - self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any - ) -> None: - super().__init__(handle_signals=handle_signals, **kwargs) - self._web_server = web_server - - async def shutdown(self) -> None: - pass - - async def _make_server(self) -> Server: - return self._web_server - - async def _cleanup_server(self) -> None: - pass - - -class AppRunner(BaseRunner): - """Web Application runner""" - - __slots__ = ("_app",) - - def __init__( - self, app: Application, *, handle_signals: bool = False, **kwargs: Any - ) -> None: - super().__init__(handle_signals=handle_signals, **kwargs) - if not isinstance(app, Application): - raise TypeError( - "The first argument should be web.Application " - "instance, got {!r}".format(app) - ) - self._app = app - - @property - def app(self) -> Application: - return self._app - - async def shutdown(self) -> None: - await self._app.shutdown() - - async def _make_server(self) -> Server: - loop = asyncio.get_event_loop() - self._app._set_loop(loop) - self._app.on_startup.freeze() - await self._app.startup() - self._app.freeze() - - return self._app._make_handler(loop=loop, **self._kwargs) - - async def _cleanup_server(self) -> None: - await self._app.cleanup() diff --git a/venv/Lib/site-packages/aiohttp/web_server.py b/venv/Lib/site-packages/aiohttp/web_server.py deleted file mode 100644 index 328aca1..0000000 --- a/venv/Lib/site-packages/aiohttp/web_server.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Low level HTTP server.""" - -import asyncio -from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa - -from .abc import AbstractStreamWriter -from .http_parser import RawRequestMessage -from .streams import StreamReader -from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler -from .web_request import BaseRequest - -__all__ = ("Server",) - - -class Server: - def __init__( - self, - handler: _RequestHandler, - *, - request_factory: Optional[_RequestFactory] = None, - handler_cancellation: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any, - ) -> None: - self._loop = loop or asyncio.get_running_loop() - self._connections: Dict[RequestHandler, asyncio.Transport] = {} - self._kwargs = kwargs - # requests_count is the number of requests being processed by the server - # for the lifetime of the server. - self.requests_count = 0 - self.request_handler = handler - self.request_factory = request_factory or self._make_request - self.handler_cancellation = handler_cancellation - - @property - def connections(self) -> List[RequestHandler]: - return list(self._connections.keys()) - - def connection_made( - self, handler: RequestHandler, transport: asyncio.Transport - ) -> None: - self._connections[handler] = transport - - def connection_lost( - self, handler: RequestHandler, exc: Optional[BaseException] = None - ) -> None: - if handler in self._connections: - if handler._task_handler: - handler._task_handler.add_done_callback( - lambda f: self._connections.pop(handler, None) - ) - else: - del self._connections[handler] - - def _make_request( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: RequestHandler, - writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - ) -> BaseRequest: - return BaseRequest(message, payload, protocol, writer, task, self._loop) - - def pre_shutdown(self) -> None: - for conn in self._connections: - conn.close() - - async def shutdown(self, timeout: Optional[float] = None) -> None: - coros = (conn.shutdown(timeout) for conn in self._connections) - await asyncio.gather(*coros) - self._connections.clear() - - def __call__(self) -> RequestHandler: - try: - return RequestHandler(self, loop=self._loop, **self._kwargs) - except TypeError: - # Failsafe creation: remove all custom handler_args - kwargs = { - k: v - for k, v in self._kwargs.items() - if k in ["debug", "access_log_class"] - } - return RequestHandler(self, loop=self._loop, **kwargs) diff --git a/venv/Lib/site-packages/aiohttp/web_urldispatcher.py b/venv/Lib/site-packages/aiohttp/web_urldispatcher.py deleted file mode 100644 index cfa57a3..0000000 --- a/venv/Lib/site-packages/aiohttp/web_urldispatcher.py +++ /dev/null @@ -1,1305 +0,0 @@ -import abc -import asyncio -import base64 -import functools -import hashlib -import html -import inspect -import keyword -import os -import platform -import re -import sys -import warnings -from functools import wraps -from pathlib import Path -from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Container, - Dict, - Final, - Generator, - Iterable, - Iterator, - List, - Mapping, - NoReturn, - Optional, - Pattern, - Set, - Sized, - Tuple, - Type, - TypedDict, - Union, - cast, -) - -from yarl import URL, __version__ as yarl_version - -from . import hdrs -from .abc import AbstractMatchInfo, AbstractRouter, AbstractView -from .helpers import DEBUG -from .http import HttpVersion11 -from .typedefs import Handler, PathLike -from .web_exceptions import ( - HTTPException, - HTTPExpectationFailed, - HTTPForbidden, - HTTPMethodNotAllowed, - HTTPNotFound, -) -from .web_fileresponse import FileResponse -from .web_request import Request -from .web_response import Response, StreamResponse -from .web_routedef import AbstractRouteDef - -__all__ = ( - "UrlDispatcher", - "UrlMappingMatchInfo", - "AbstractResource", - "Resource", - "PlainResource", - "DynamicResource", - "AbstractRoute", - "ResourceRoute", - "StaticResource", - "View", -) - - -if TYPE_CHECKING: - from .web_app import Application - - BaseDict = Dict[str, str] -else: - BaseDict = dict - -CIRCULAR_SYMLINK_ERROR = ( - (OSError,) - if sys.version_info < (3, 10) and sys.platform.startswith("win32") - else (RuntimeError,) if sys.version_info < (3, 13) else () -) - -YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) - -HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( - r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" -) -ROUTE_RE: Final[Pattern[str]] = re.compile( - r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" -) -PATH_SEP: Final[str] = re.escape("/") - -IS_WINDOWS: Final[bool] = platform.system() == "Windows" - -_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] -_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] - -html_escape = functools.partial(html.escape, quote=True) - - -class _InfoDict(TypedDict, total=False): - path: str - - formatter: str - pattern: Pattern[str] - - directory: Path - prefix: str - routes: Mapping[str, "AbstractRoute"] - - app: "Application" - - domain: str - - rule: "AbstractRuleMatching" - - http_exception: HTTPException - - -class AbstractResource(Sized, Iterable["AbstractRoute"]): - def __init__(self, *, name: Optional[str] = None) -> None: - self._name = name - - @property - def name(self) -> Optional[str]: - return self._name - - @property - @abc.abstractmethod - def canonical(self) -> str: - """Exposes the resource's canonical path. - - For example '/foo/bar/{name}' - - """ - - @abc.abstractmethod # pragma: no branch - def url_for(self, **kwargs: str) -> URL: - """Construct url for resource with additional params.""" - - @abc.abstractmethod # pragma: no branch - async def resolve(self, request: Request) -> _Resolve: - """Resolve resource. - - Return (UrlMappingMatchInfo, allowed_methods) pair. - """ - - @abc.abstractmethod - def add_prefix(self, prefix: str) -> None: - """Add a prefix to processed URLs. - - Required for subapplications support. - """ - - @abc.abstractmethod - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - def freeze(self) -> None: - pass - - @abc.abstractmethod - def raw_match(self, path: str) -> bool: - """Perform a raw match against path""" - - -class AbstractRoute(abc.ABC): - def __init__( - self, - method: str, - handler: Union[Handler, Type[AbstractView]], - *, - expect_handler: Optional[_ExpectHandler] = None, - resource: Optional[AbstractResource] = None, - ) -> None: - - if expect_handler is None: - expect_handler = _default_expect_handler - - assert inspect.iscoroutinefunction(expect_handler) or ( - sys.version_info < (3, 14) and asyncio.iscoroutinefunction(expect_handler) - ), f"Coroutine is expected, got {expect_handler!r}" - - method = method.upper() - if not HTTP_METHOD_RE.match(method): - raise ValueError(f"{method} is not allowed HTTP method") - - assert callable(handler), handler - if inspect.iscoroutinefunction(handler) or ( - sys.version_info < (3, 14) and asyncio.iscoroutinefunction(handler) - ): - pass - elif inspect.isgeneratorfunction(handler): - if TYPE_CHECKING: - assert False - warnings.warn( - "Bare generators are deprecated, use @coroutine wrapper", - DeprecationWarning, - ) - elif isinstance(handler, type) and issubclass(handler, AbstractView): - pass - else: - warnings.warn( - "Bare functions are deprecated, use async ones", DeprecationWarning - ) - - @wraps(handler) - async def handler_wrapper(request: Request) -> StreamResponse: - result = old_handler(request) # type: ignore[call-arg] - if asyncio.iscoroutine(result): - result = await result - assert isinstance(result, StreamResponse) - return result - - old_handler = handler - handler = handler_wrapper - - self._method = method - self._handler = handler - self._expect_handler = expect_handler - self._resource = resource - - @property - def method(self) -> str: - return self._method - - @property - def handler(self) -> Handler: - return self._handler - - @property - @abc.abstractmethod - def name(self) -> Optional[str]: - """Optional route's name, always equals to resource's name.""" - - @property - def resource(self) -> Optional[AbstractResource]: - return self._resource - - @abc.abstractmethod - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - @abc.abstractmethod # pragma: no branch - def url_for(self, *args: str, **kwargs: str) -> URL: - """Construct url for route with additional params.""" - - async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]: - return await self._expect_handler(request) - - -class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): - - __slots__ = ("_route", "_apps", "_current_app", "_frozen") - - def __init__(self, match_dict: Dict[str, str], route: AbstractRoute) -> None: - super().__init__(match_dict) - self._route = route - self._apps: List[Application] = [] - self._current_app: Optional[Application] = None - self._frozen = False - - @property - def handler(self) -> Handler: - return self._route.handler - - @property - def route(self) -> AbstractRoute: - return self._route - - @property - def expect_handler(self) -> _ExpectHandler: - return self._route.handle_expect_header - - @property - def http_exception(self) -> Optional[HTTPException]: - return None - - def get_info(self) -> _InfoDict: # type: ignore[override] - return self._route.get_info() - - @property - def apps(self) -> Tuple["Application", ...]: - return tuple(self._apps) - - def add_app(self, app: "Application") -> None: - if self._frozen: - raise RuntimeError("Cannot change apps stack after .freeze() call") - if self._current_app is None: - self._current_app = app - self._apps.insert(0, app) - - @property - def current_app(self) -> "Application": - app = self._current_app - assert app is not None - return app - - @current_app.setter - def current_app(self, app: "Application") -> None: - if DEBUG: # pragma: no cover - if app not in self._apps: - raise RuntimeError( - "Expected one of the following apps {!r}, got {!r}".format( - self._apps, app - ) - ) - self._current_app = app - - def freeze(self) -> None: - self._frozen = True - - def __repr__(self) -> str: - return f"" - - -class MatchInfoError(UrlMappingMatchInfo): - - __slots__ = ("_exception",) - - def __init__(self, http_exception: HTTPException) -> None: - self._exception = http_exception - super().__init__({}, SystemRoute(self._exception)) - - @property - def http_exception(self) -> HTTPException: - return self._exception - - def __repr__(self) -> str: - return "".format( - self._exception.status, self._exception.reason - ) - - -async def _default_expect_handler(request: Request) -> None: - """Default handler for Expect header. - - Just send "100 Continue" to client. - raise HTTPExpectationFailed if value of header is not "100-continue" - """ - expect = request.headers.get(hdrs.EXPECT, "") - if request.version == HttpVersion11: - if expect.lower() == "100-continue": - await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") - # Reset output_size as we haven't started the main body yet. - request.writer.output_size = 0 - else: - raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) - - -class Resource(AbstractResource): - def __init__(self, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - self._routes: Dict[str, ResourceRoute] = {} - self._any_route: Optional[ResourceRoute] = None - self._allowed_methods: Set[str] = set() - - def add_route( - self, - method: str, - handler: Union[Type[AbstractView], Handler], - *, - expect_handler: Optional[_ExpectHandler] = None, - ) -> "ResourceRoute": - if route := self._routes.get(method, self._any_route): - raise RuntimeError( - "Added route will never be executed, " - f"method {route.method} is already " - "registered" - ) - - route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) - self.register_route(route_obj) - return route_obj - - def register_route(self, route: "ResourceRoute") -> None: - assert isinstance( - route, ResourceRoute - ), f"Instance of Route class is required, got {route!r}" - if route.method == hdrs.METH_ANY: - self._any_route = route - self._allowed_methods.add(route.method) - self._routes[route.method] = route - - async def resolve(self, request: Request) -> _Resolve: - if (match_dict := self._match(request.rel_url.path_safe)) is None: - return None, set() - if route := self._routes.get(request.method, self._any_route): - return UrlMappingMatchInfo(match_dict, route), self._allowed_methods - return None, self._allowed_methods - - @abc.abstractmethod - def _match(self, path: str) -> Optional[Dict[str, str]]: - pass # pragma: no cover - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator["ResourceRoute"]: - return iter(self._routes.values()) - - # TODO: implement all abstract methods - - -class PlainResource(Resource): - def __init__(self, path: str, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - assert not path or path.startswith("/") - self._path = path - - @property - def canonical(self) -> str: - return self._path - - def freeze(self) -> None: - if not self._path: - self._path = "/" - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._path = prefix + self._path - - def _match(self, path: str) -> Optional[Dict[str, str]]: - # string comparison is about 10 times faster than regexp matching - if self._path == path: - return {} - return None - - def raw_match(self, path: str) -> bool: - return self._path == path - - def get_info(self) -> _InfoDict: - return {"path": self._path} - - def url_for(self) -> URL: # type: ignore[override] - return URL.build(path=self._path, encoded=True) - - def __repr__(self) -> str: - name = "'" + self.name + "' " if self.name is not None else "" - return f"" - - -class DynamicResource(Resource): - - DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") - DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") - GOOD = r"[^{}/]+" - - def __init__(self, path: str, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - self._orig_path = path - pattern = "" - formatter = "" - for part in ROUTE_RE.split(path): - match = self.DYN.fullmatch(part) - if match: - pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) - formatter += "{" + match.group("var") + "}" - continue - - match = self.DYN_WITH_RE.fullmatch(part) - if match: - pattern += "(?P<{var}>{re})".format(**match.groupdict()) - formatter += "{" + match.group("var") + "}" - continue - - if "{" in part or "}" in part: - raise ValueError(f"Invalid path '{path}'['{part}']") - - part = _requote_path(part) - formatter += part - pattern += re.escape(part) - - try: - compiled = re.compile(pattern) - except re.error as exc: - raise ValueError(f"Bad pattern '{pattern}': {exc}") from None - assert compiled.pattern.startswith(PATH_SEP) - assert formatter.startswith("/") - self._pattern = compiled - self._formatter = formatter - - @property - def canonical(self) -> str: - return self._formatter - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) - self._formatter = prefix + self._formatter - - def _match(self, path: str) -> Optional[Dict[str, str]]: - match = self._pattern.fullmatch(path) - if match is None: - return None - return { - key: _unquote_path_safe(value) for key, value in match.groupdict().items() - } - - def raw_match(self, path: str) -> bool: - return self._orig_path == path - - def get_info(self) -> _InfoDict: - return {"formatter": self._formatter, "pattern": self._pattern} - - def url_for(self, **parts: str) -> URL: - url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) - return URL.build(path=url, encoded=True) - - def __repr__(self) -> str: - name = "'" + self.name + "' " if self.name is not None else "" - return "".format( - name=name, formatter=self._formatter - ) - - -class PrefixResource(AbstractResource): - def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: - assert not prefix or prefix.startswith("/"), prefix - assert prefix in ("", "/") or not prefix.endswith("/"), prefix - super().__init__(name=name) - self._prefix = _requote_path(prefix) - self._prefix2 = self._prefix + "/" - - @property - def canonical(self) -> str: - return self._prefix - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._prefix = prefix + self._prefix - self._prefix2 = self._prefix + "/" - - def raw_match(self, prefix: str) -> bool: - return False - - # TODO: impl missing abstract methods - - -class StaticResource(PrefixResource): - VERSION_KEY = "v" - - def __init__( - self, - prefix: str, - directory: PathLike, - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - chunk_size: int = 256 * 1024, - show_index: bool = False, - follow_symlinks: bool = False, - append_version: bool = False, - ) -> None: - super().__init__(prefix, name=name) - try: - directory = Path(directory).expanduser().resolve(strict=True) - except FileNotFoundError as error: - raise ValueError(f"'{directory}' does not exist") from error - if not directory.is_dir(): - raise ValueError(f"'{directory}' is not a directory") - self._directory = directory - self._show_index = show_index - self._chunk_size = chunk_size - self._follow_symlinks = follow_symlinks - self._expect_handler = expect_handler - self._append_version = append_version - - self._routes = { - "GET": ResourceRoute( - "GET", self._handle, self, expect_handler=expect_handler - ), - "HEAD": ResourceRoute( - "HEAD", self._handle, self, expect_handler=expect_handler - ), - } - self._allowed_methods = set(self._routes) - - def url_for( # type: ignore[override] - self, - *, - filename: PathLike, - append_version: Optional[bool] = None, - ) -> URL: - if append_version is None: - append_version = self._append_version - filename = str(filename).lstrip("/") - - url = URL.build(path=self._prefix, encoded=True) - # filename is not encoded - if YARL_VERSION < (1, 6): - url = url / filename.replace("%", "%25") - else: - url = url / filename - - if append_version: - unresolved_path = self._directory.joinpath(filename) - try: - if self._follow_symlinks: - normalized_path = Path(os.path.normpath(unresolved_path)) - normalized_path.relative_to(self._directory) - filepath = normalized_path.resolve() - else: - filepath = unresolved_path.resolve() - filepath.relative_to(self._directory) - except (ValueError, FileNotFoundError): - # ValueError for case when path point to symlink - # with follow_symlinks is False - return url # relatively safe - if filepath.is_file(): - # TODO cache file content - # with file watcher for cache invalidation - with filepath.open("rb") as f: - file_bytes = f.read() - h = self._get_file_hash(file_bytes) - url = url.with_query({self.VERSION_KEY: h}) - return url - return url - - @staticmethod - def _get_file_hash(byte_array: bytes) -> str: - m = hashlib.sha256() # todo sha256 can be configurable param - m.update(byte_array) - b64 = base64.urlsafe_b64encode(m.digest()) - return b64.decode("ascii") - - def get_info(self) -> _InfoDict: - return { - "directory": self._directory, - "prefix": self._prefix, - "routes": self._routes, - } - - def set_options_route(self, handler: Handler) -> None: - if "OPTIONS" in self._routes: - raise RuntimeError("OPTIONS route was set already") - self._routes["OPTIONS"] = ResourceRoute( - "OPTIONS", handler, self, expect_handler=self._expect_handler - ) - self._allowed_methods.add("OPTIONS") - - async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.path_safe - method = request.method - # We normalise here to avoid matches that traverse below the static root. - # e.g. /static/../../../../home/user/webapp/static/ - norm_path = os.path.normpath(path) - if IS_WINDOWS: - norm_path = norm_path.replace("\\", "/") - if not norm_path.startswith(self._prefix2) and norm_path != self._prefix: - return None, set() - - allowed_methods = self._allowed_methods - if method not in allowed_methods: - return None, allowed_methods - - match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])} - return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator[AbstractRoute]: - return iter(self._routes.values()) - - async def _handle(self, request: Request) -> StreamResponse: - filename = request.match_info["filename"] - unresolved_path = self._directory.joinpath(filename) - loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, self._resolve_path_to_response, unresolved_path - ) - - def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: - """Take the unresolved path and query the file system to form a response.""" - # Check for access outside the root directory. For follow symlinks, URI - # cannot traverse out, but symlinks can. Otherwise, no access outside - # root is permitted. - try: - if self._follow_symlinks: - normalized_path = Path(os.path.normpath(unresolved_path)) - normalized_path.relative_to(self._directory) - file_path = normalized_path.resolve() - else: - file_path = unresolved_path.resolve() - file_path.relative_to(self._directory) - except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error: - # ValueError is raised for the relative check. Circular symlinks - # raise here on resolving for python < 3.13. - raise HTTPNotFound() from error - - # if path is a directory, return the contents if permitted. Note the - # directory check will raise if a segment is not readable. - try: - if file_path.is_dir(): - if self._show_index: - return Response( - text=self._directory_as_html(file_path), - content_type="text/html", - ) - else: - raise HTTPForbidden() - except PermissionError as error: - raise HTTPForbidden() from error - - # Return the file response, which handles all other checks. - return FileResponse(file_path, chunk_size=self._chunk_size) - - def _directory_as_html(self, dir_path: Path) -> str: - """returns directory's index as html.""" - assert dir_path.is_dir() - - relative_path_to_dir = dir_path.relative_to(self._directory).as_posix() - index_of = f"Index of /{html_escape(relative_path_to_dir)}" - h1 = f"

{index_of}

" - - index_list = [] - dir_index = dir_path.iterdir() - for _file in sorted(dir_index): - # show file url as relative to static path - rel_path = _file.relative_to(self._directory).as_posix() - quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}") - - # if file is a directory, add '/' to the end of the name - if _file.is_dir(): - file_name = f"{_file.name}/" - else: - file_name = _file.name - - index_list.append( - f'
  • {html_escape(file_name)}
  • ' - ) - ul = "
      \n{}\n
    ".format("\n".join(index_list)) - body = f"\n{h1}\n{ul}\n" - - head_str = f"\n{index_of}\n" - html = f"\n{head_str}\n{body}\n" - - return html - - def __repr__(self) -> str: - name = "'" + self.name + "'" if self.name is not None else "" - return " {directory!r}>".format( - name=name, path=self._prefix, directory=self._directory - ) - - -class PrefixedSubAppResource(PrefixResource): - def __init__(self, prefix: str, app: "Application") -> None: - super().__init__(prefix) - self._app = app - self._add_prefix_to_resources(prefix) - - def add_prefix(self, prefix: str) -> None: - super().add_prefix(prefix) - self._add_prefix_to_resources(prefix) - - def _add_prefix_to_resources(self, prefix: str) -> None: - router = self._app.router - for resource in router.resources(): - # Since the canonical path of a resource is about - # to change, we need to unindex it and then reindex - router.unindex_resource(resource) - resource.add_prefix(prefix) - router.index_resource(resource) - - def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not supported by sub-application root") - - def get_info(self) -> _InfoDict: - return {"app": self._app, "prefix": self._prefix} - - async def resolve(self, request: Request) -> _Resolve: - match_info = await self._app.router.resolve(request) - match_info.add_app(self._app) - if isinstance(match_info.http_exception, HTTPMethodNotAllowed): - methods = match_info.http_exception.allowed_methods - else: - methods = set() - return match_info, methods - - def __len__(self) -> int: - return len(self._app.router.routes()) - - def __iter__(self) -> Iterator[AbstractRoute]: - return iter(self._app.router.routes()) - - def __repr__(self) -> str: - return " {app!r}>".format( - prefix=self._prefix, app=self._app - ) - - -class AbstractRuleMatching(abc.ABC): - @abc.abstractmethod # pragma: no branch - async def match(self, request: Request) -> bool: - """Return bool if the request satisfies the criteria""" - - @abc.abstractmethod # pragma: no branch - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - @property - @abc.abstractmethod # pragma: no branch - def canonical(self) -> str: - """Return a str""" - - -class Domain(AbstractRuleMatching): - re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: - super().__init__() - self._domain = self.validation(domain) - - @property - def canonical(self) -> str: - return self._domain - - def validation(self, domain: str) -> str: - if not isinstance(domain, str): - raise TypeError("Domain must be str") - domain = domain.rstrip(".").lower() - if not domain: - raise ValueError("Domain cannot be empty") - elif "://" in domain: - raise ValueError("Scheme not supported") - url = URL("http://" + domain) - assert url.raw_host is not None - if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): - raise ValueError("Domain not valid") - if url.port == 80: - return url.raw_host - return f"{url.raw_host}:{url.port}" - - async def match(self, request: Request) -> bool: - host = request.headers.get(hdrs.HOST) - if not host: - return False - return self.match_domain(host) - - def match_domain(self, host: str) -> bool: - return host.lower() == self._domain - - def get_info(self) -> _InfoDict: - return {"domain": self._domain} - - -class MaskDomain(Domain): - re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: - super().__init__(domain) - mask = self._domain.replace(".", r"\.").replace("*", ".*") - self._mask = re.compile(mask) - - @property - def canonical(self) -> str: - return self._mask.pattern - - def match_domain(self, host: str) -> bool: - return self._mask.fullmatch(host) is not None - - -class MatchedSubAppResource(PrefixedSubAppResource): - def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: - AbstractResource.__init__(self) - self._prefix = "" - self._app = app - self._rule = rule - - @property - def canonical(self) -> str: - return self._rule.canonical - - def get_info(self) -> _InfoDict: - return {"app": self._app, "rule": self._rule} - - async def resolve(self, request: Request) -> _Resolve: - if not await self._rule.match(request): - return None, set() - match_info = await self._app.router.resolve(request) - match_info.add_app(self._app) - if isinstance(match_info.http_exception, HTTPMethodNotAllowed): - methods = match_info.http_exception.allowed_methods - else: - methods = set() - return match_info, methods - - def __repr__(self) -> str: - return f" {self._app!r}>" - - -class ResourceRoute(AbstractRoute): - """A route with resource""" - - def __init__( - self, - method: str, - handler: Union[Handler, Type[AbstractView]], - resource: AbstractResource, - *, - expect_handler: Optional[_ExpectHandler] = None, - ) -> None: - super().__init__( - method, handler, expect_handler=expect_handler, resource=resource - ) - - def __repr__(self) -> str: - return " {handler!r}".format( - method=self.method, resource=self._resource, handler=self.handler - ) - - @property - def name(self) -> Optional[str]: - if self._resource is None: - return None - return self._resource.name - - def url_for(self, *args: str, **kwargs: str) -> URL: - """Construct url for route with additional params.""" - assert self._resource is not None - return self._resource.url_for(*args, **kwargs) - - def get_info(self) -> _InfoDict: - assert self._resource is not None - return self._resource.get_info() - - -class SystemRoute(AbstractRoute): - def __init__(self, http_exception: HTTPException) -> None: - super().__init__(hdrs.METH_ANY, self._handle) - self._http_exception = http_exception - - def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not allowed for SystemRoute") - - @property - def name(self) -> Optional[str]: - return None - - def get_info(self) -> _InfoDict: - return {"http_exception": self._http_exception} - - async def _handle(self, request: Request) -> StreamResponse: - raise self._http_exception - - @property - def status(self) -> int: - return self._http_exception.status - - @property - def reason(self) -> str: - return self._http_exception.reason - - def __repr__(self) -> str: - return "".format(self=self) - - -class View(AbstractView): - async def _iter(self) -> StreamResponse: - if self.request.method not in hdrs.METH_ALL: - self._raise_allowed_methods() - method: Optional[Callable[[], Awaitable[StreamResponse]]] - method = getattr(self, self.request.method.lower(), None) - if method is None: - self._raise_allowed_methods() - ret = await method() - assert isinstance(ret, StreamResponse) - return ret - - def __await__(self) -> Generator[None, None, StreamResponse]: - return self._iter().__await__() - - def _raise_allowed_methods(self) -> NoReturn: - allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} - raise HTTPMethodNotAllowed(self.request.method, allowed_methods) - - -class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): - def __init__(self, resources: List[AbstractResource]) -> None: - self._resources = resources - - def __len__(self) -> int: - return len(self._resources) - - def __iter__(self) -> Iterator[AbstractResource]: - yield from self._resources - - def __contains__(self, resource: object) -> bool: - return resource in self._resources - - -class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): - def __init__(self, resources: List[AbstractResource]): - self._routes: List[AbstractRoute] = [] - for resource in resources: - for route in resource: - self._routes.append(route) - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator[AbstractRoute]: - yield from self._routes - - def __contains__(self, route: object) -> bool: - return route in self._routes - - -class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): - - NAME_SPLIT_RE = re.compile(r"[.:-]") - - def __init__(self) -> None: - super().__init__() - self._resources: List[AbstractResource] = [] - self._named_resources: Dict[str, AbstractResource] = {} - self._resource_index: dict[str, list[AbstractResource]] = {} - self._matched_sub_app_resources: List[MatchedSubAppResource] = [] - - async def resolve(self, request: Request) -> UrlMappingMatchInfo: - resource_index = self._resource_index - allowed_methods: Set[str] = set() - - # MatchedSubAppResource is primarily used to match on domain names - # (though custom rules could match on other things). This means that - # the traversal algorithm below can't be applied, and that we likely - # need to check these first so a sub app that defines the same path - # as a parent app will get priority if there's a domain match. - # - # For most cases we do not expect there to be many of these since - # currently they are only added by `.add_domain()`. - for resource in self._matched_sub_app_resources: - match_dict, allowed = await resource.resolve(request) - if match_dict is not None: - return match_dict - else: - allowed_methods |= allowed - - # Walk the url parts looking for candidates. We walk the url backwards - # to ensure the most explicit match is found first. If there are multiple - # candidates for a given url part because there are multiple resources - # registered for the same canonical path, we resolve them in a linear - # fashion to ensure registration order is respected. - url_part = request.rel_url.path_safe - while url_part: - for candidate in resource_index.get(url_part, ()): - match_dict, allowed = await candidate.resolve(request) - if match_dict is not None: - return match_dict - else: - allowed_methods |= allowed - if url_part == "/": - break - url_part = url_part.rpartition("/")[0] or "/" - - if allowed_methods: - return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods)) - - return MatchInfoError(HTTPNotFound()) - - def __iter__(self) -> Iterator[str]: - return iter(self._named_resources) - - def __len__(self) -> int: - return len(self._named_resources) - - def __contains__(self, resource: object) -> bool: - return resource in self._named_resources - - def __getitem__(self, name: str) -> AbstractResource: - return self._named_resources[name] - - def resources(self) -> ResourcesView: - return ResourcesView(self._resources) - - def routes(self) -> RoutesView: - return RoutesView(self._resources) - - def named_resources(self) -> Mapping[str, AbstractResource]: - return MappingProxyType(self._named_resources) - - def register_resource(self, resource: AbstractResource) -> None: - assert isinstance( - resource, AbstractResource - ), f"Instance of AbstractResource class is required, got {resource!r}" - if self.frozen: - raise RuntimeError("Cannot register a resource into frozen router.") - - name = resource.name - - if name is not None: - parts = self.NAME_SPLIT_RE.split(name) - for part in parts: - if keyword.iskeyword(part): - raise ValueError( - f"Incorrect route name {name!r}, " - "python keywords cannot be used " - "for route name" - ) - if not part.isidentifier(): - raise ValueError( - "Incorrect route name {!r}, " - "the name should be a sequence of " - "python identifiers separated " - "by dash, dot or column".format(name) - ) - if name in self._named_resources: - raise ValueError( - "Duplicate {!r}, " - "already handled by {!r}".format(name, self._named_resources[name]) - ) - self._named_resources[name] = resource - self._resources.append(resource) - - if isinstance(resource, MatchedSubAppResource): - # We cannot index match sub-app resources because they have match rules - self._matched_sub_app_resources.append(resource) - else: - self.index_resource(resource) - - def _get_resource_index_key(self, resource: AbstractResource) -> str: - """Return a key to index the resource in the resource index.""" - if "{" in (index_key := resource.canonical): - # strip at the first { to allow for variables, and than - # rpartition at / to allow for variable parts in the path - # For example if the canonical path is `/core/locations{tail:.*}` - # the index key will be `/core` since index is based on the - # url parts split by `/` - index_key = index_key.partition("{")[0].rpartition("/")[0] - return index_key.rstrip("/") or "/" - - def index_resource(self, resource: AbstractResource) -> None: - """Add a resource to the resource index.""" - resource_key = self._get_resource_index_key(resource) - # There may be multiple resources for a canonical path - # so we keep them in a list to ensure that registration - # order is respected. - self._resource_index.setdefault(resource_key, []).append(resource) - - def unindex_resource(self, resource: AbstractResource) -> None: - """Remove a resource from the resource index.""" - resource_key = self._get_resource_index_key(resource) - self._resource_index[resource_key].remove(resource) - - def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: - if path and not path.startswith("/"): - raise ValueError("path should be started with / or be empty") - # Reuse last added resource if path and name are the same - if self._resources: - resource = self._resources[-1] - if resource.name == name and resource.raw_match(path): - return cast(Resource, resource) - if not ("{" in path or "}" in path or ROUTE_RE.search(path)): - resource = PlainResource(path, name=name) - self.register_resource(resource) - return resource - resource = DynamicResource(path, name=name) - self.register_resource(resource) - return resource - - def add_route( - self, - method: str, - path: str, - handler: Union[Handler, Type[AbstractView]], - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - ) -> AbstractRoute: - resource = self.add_resource(path, name=name) - return resource.add_route(method, handler, expect_handler=expect_handler) - - def add_static( - self, - prefix: str, - path: PathLike, - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - chunk_size: int = 256 * 1024, - show_index: bool = False, - follow_symlinks: bool = False, - append_version: bool = False, - ) -> AbstractResource: - """Add static files view. - - prefix - url prefix - path - folder with files - - """ - assert prefix.startswith("/") - if prefix.endswith("/"): - prefix = prefix[:-1] - resource = StaticResource( - prefix, - path, - name=name, - expect_handler=expect_handler, - chunk_size=chunk_size, - show_index=show_index, - follow_symlinks=follow_symlinks, - append_version=append_version, - ) - self.register_resource(resource) - return resource - - def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method HEAD.""" - return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) - - def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method OPTIONS.""" - return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) - - def add_get( - self, - path: str, - handler: Handler, - *, - name: Optional[str] = None, - allow_head: bool = True, - **kwargs: Any, - ) -> AbstractRoute: - """Shortcut for add_route with method GET. - - If allow_head is true, another - route is added allowing head requests to the same endpoint. - """ - resource = self.add_resource(path, name=name) - if allow_head: - resource.add_route(hdrs.METH_HEAD, handler, **kwargs) - return resource.add_route(hdrs.METH_GET, handler, **kwargs) - - def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method POST.""" - return self.add_route(hdrs.METH_POST, path, handler, **kwargs) - - def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method PUT.""" - return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) - - def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method PATCH.""" - return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) - - def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method DELETE.""" - return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) - - def add_view( - self, path: str, handler: Type[AbstractView], **kwargs: Any - ) -> AbstractRoute: - """Shortcut for add_route with ANY methods for a class-based view.""" - return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) - - def freeze(self) -> None: - super().freeze() - for resource in self._resources: - resource.freeze() - - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: - """Append routes to route table. - - Parameter should be a sequence of RouteDef objects. - - Returns a list of registered AbstractRoute instances. - """ - registered_routes = [] - for route_def in routes: - registered_routes.extend(route_def.register(self)) - return registered_routes - - -def _quote_path(value: str) -> str: - if YARL_VERSION < (1, 6): - value = value.replace("%", "%25") - return URL.build(path=value, encoded=False).raw_path - - -def _unquote_path_safe(value: str) -> str: - if "%" not in value: - return value - return value.replace("%2F", "/").replace("%25", "%") - - -def _requote_path(value: str) -> str: - # Quote non-ascii characters and other characters which must be quoted, - # but preserve existing %-sequences. - result = _quote_path(value) - if "%" in value: - result = result.replace("%25", "%") - return result diff --git a/venv/Lib/site-packages/aiohttp/web_ws.py b/venv/Lib/site-packages/aiohttp/web_ws.py deleted file mode 100644 index 575f9a3..0000000 --- a/venv/Lib/site-packages/aiohttp/web_ws.py +++ /dev/null @@ -1,631 +0,0 @@ -import asyncio -import base64 -import binascii -import hashlib -import json -import sys -from typing import Any, Final, Iterable, Optional, Tuple, Union, cast - -import attr -from multidict import CIMultiDict - -from . import hdrs -from ._websocket.reader import WebSocketDataQueue -from ._websocket.writer import DEFAULT_LIMIT -from .abc import AbstractStreamWriter -from .client_exceptions import WSMessageTypeError -from .helpers import calculate_timeout_when, set_exception, set_result -from .http import ( - WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE, - WS_KEY, - WebSocketError, - WebSocketReader, - WebSocketWriter, - WSCloseCode, - WSMessage, - WSMsgType as WSMsgType, - ws_ext_gen, - ws_ext_parse, -) -from .http_websocket import _INTERNAL_RECEIVE_TYPES -from .log import ws_logger -from .streams import EofStream -from .typedefs import JSONDecoder, JSONEncoder -from .web_exceptions import HTTPBadRequest, HTTPException -from .web_request import BaseRequest -from .web_response import StreamResponse - -if sys.version_info >= (3, 11): - import asyncio as async_timeout -else: - import async_timeout - -__all__ = ( - "WebSocketResponse", - "WebSocketReady", - "WSMsgType", -) - -THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class WebSocketReady: - ok: bool - protocol: Optional[str] - - def __bool__(self) -> bool: - return self.ok - - -class WebSocketResponse(StreamResponse): - - _length_check: bool = False - _ws_protocol: Optional[str] = None - _writer: Optional[WebSocketWriter] = None - _reader: Optional[WebSocketDataQueue] = None - _closed: bool = False - _closing: bool = False - _conn_lost: int = 0 - _close_code: Optional[int] = None - _loop: Optional[asyncio.AbstractEventLoop] = None - _waiting: bool = False - _close_wait: Optional[asyncio.Future[None]] = None - _exception: Optional[BaseException] = None - _heartbeat_when: float = 0.0 - _heartbeat_cb: Optional[asyncio.TimerHandle] = None - _pong_response_cb: Optional[asyncio.TimerHandle] = None - _ping_task: Optional[asyncio.Task[None]] = None - - def __init__( - self, - *, - timeout: float = 10.0, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - protocols: Iterable[str] = (), - compress: bool = True, - max_msg_size: int = 4 * 1024 * 1024, - writer_limit: int = DEFAULT_LIMIT, - ) -> None: - super().__init__(status=101) - self._protocols = protocols - self._timeout = timeout - self._receive_timeout = receive_timeout - self._autoclose = autoclose - self._autoping = autoping - self._heartbeat = heartbeat - if heartbeat is not None: - self._pong_heartbeat = heartbeat / 2.0 - self._compress: Union[bool, int] = compress - self._max_msg_size = max_msg_size - self._writer_limit = writer_limit - - def _cancel_heartbeat(self) -> None: - self._cancel_pong_response_cb() - if self._heartbeat_cb is not None: - self._heartbeat_cb.cancel() - self._heartbeat_cb = None - if self._ping_task is not None: - self._ping_task.cancel() - self._ping_task = None - - def _cancel_pong_response_cb(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - - def _reset_heartbeat(self) -> None: - if self._heartbeat is None: - return - self._cancel_pong_response_cb() - req = self._req - timeout_ceil_threshold = ( - req._protocol._timeout_ceil_threshold if req is not None else 5 - ) - loop = self._loop - assert loop is not None - now = loop.time() - when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) - self._heartbeat_when = when - if self._heartbeat_cb is None: - # We do not cancel the previous heartbeat_cb here because - # it generates a significant amount of TimerHandle churn - # which causes asyncio to rebuild the heap frequently. - # Instead _send_heartbeat() will reschedule the next - # heartbeat if it fires too early. - self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) - - def _send_heartbeat(self) -> None: - self._heartbeat_cb = None - loop = self._loop - assert loop is not None and self._writer is not None - now = loop.time() - if now < self._heartbeat_when: - # Heartbeat fired too early, reschedule - self._heartbeat_cb = loop.call_at( - self._heartbeat_when, self._send_heartbeat - ) - return - - req = self._req - timeout_ceil_threshold = ( - req._protocol._timeout_ceil_threshold if req is not None else 5 - ) - when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) - self._cancel_pong_response_cb() - self._pong_response_cb = loop.call_at(when, self._pong_not_received) - - coro = self._writer.send_frame(b"", WSMsgType.PING) - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to send the ping - # immediately to avoid having to schedule - # the task on the event loop. - ping_task = asyncio.Task(coro, loop=loop, eager_start=True) - else: - ping_task = loop.create_task(coro) - - if not ping_task.done(): - self._ping_task = ping_task - ping_task.add_done_callback(self._ping_task_done) - else: - self._ping_task_done(ping_task) - - def _ping_task_done(self, task: "asyncio.Task[None]") -> None: - """Callback for when the ping task completes.""" - if not task.cancelled() and (exc := task.exception()): - self._handle_ping_pong_exception(exc) - self._ping_task = None - - def _pong_not_received(self) -> None: - if self._req is not None and self._req.transport is not None: - self._handle_ping_pong_exception( - asyncio.TimeoutError( - f"No PONG received after {self._pong_heartbeat} seconds" - ) - ) - - def _handle_ping_pong_exception(self, exc: BaseException) -> None: - """Handle exceptions raised during ping/pong processing.""" - if self._closed: - return - self._set_closed() - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = exc - if self._waiting and not self._closing and self._reader is not None: - self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0) - - def _set_closed(self) -> None: - """Set the connection to closed. - - Cancel any heartbeat timers and set the closed flag. - """ - self._closed = True - self._cancel_heartbeat() - - async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: - # make pre-check to don't hide it by do_handshake() exceptions - if self._payload_writer is not None: - return self._payload_writer - - protocol, writer = self._pre_start(request) - payload_writer = await super().prepare(request) - assert payload_writer is not None - self._post_start(request, protocol, writer) - await payload_writer.drain() - return payload_writer - - def _handshake( - self, request: BaseRequest - ) -> Tuple["CIMultiDict[str]", Optional[str], int, bool]: - headers = request.headers - if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): - raise HTTPBadRequest( - text=( - "No WebSocket UPGRADE hdr: {}\n Can " - '"Upgrade" only to "WebSocket".' - ).format(headers.get(hdrs.UPGRADE)) - ) - - if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): - raise HTTPBadRequest( - text="No CONNECTION upgrade hdr: {}".format( - headers.get(hdrs.CONNECTION) - ) - ) - - # find common sub-protocol between client and server - protocol: Optional[str] = None - if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: - req_protocols = [ - str(proto.strip()) - for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") - ] - - for proto in req_protocols: - if proto in self._protocols: - protocol = proto - break - else: - # No overlap found: Return no protocol as per spec - ws_logger.warning( - "%s: Client protocols %r don’t overlap server-known ones %r", - request.remote, - req_protocols, - self._protocols, - ) - - # check supported version - version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "") - if version not in ("13", "8", "7"): - raise HTTPBadRequest(text=f"Unsupported version: {version}") - - # check client handshake for validity - key = headers.get(hdrs.SEC_WEBSOCKET_KEY) - try: - if not key or len(base64.b64decode(key)) != 16: - raise HTTPBadRequest(text=f"Handshake error: {key!r}") - except binascii.Error: - raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None - - accept_val = base64.b64encode( - hashlib.sha1(key.encode() + WS_KEY).digest() - ).decode() - response_headers = CIMultiDict( - { - hdrs.UPGRADE: "websocket", - hdrs.CONNECTION: "upgrade", - hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, - } - ) - - notakeover = False - compress = 0 - if self._compress: - extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) - # Server side always get return with no exception. - # If something happened, just drop compress extension - compress, notakeover = ws_ext_parse(extensions, isserver=True) - if compress: - enabledext = ws_ext_gen( - compress=compress, isserver=True, server_notakeover=notakeover - ) - response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext - - if protocol: - response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol - return ( - response_headers, - protocol, - compress, - notakeover, - ) - - def _pre_start(self, request: BaseRequest) -> Tuple[Optional[str], WebSocketWriter]: - self._loop = request._loop - - headers, protocol, compress, notakeover = self._handshake(request) - - self.set_status(101) - self.headers.update(headers) - self.force_close() - self._compress = compress - transport = request._protocol.transport - assert transport is not None - writer = WebSocketWriter( - request._protocol, - transport, - compress=compress, - notakeover=notakeover, - limit=self._writer_limit, - ) - - return protocol, writer - - def _post_start( - self, request: BaseRequest, protocol: Optional[str], writer: WebSocketWriter - ) -> None: - self._ws_protocol = protocol - self._writer = writer - - self._reset_heartbeat() - - loop = self._loop - assert loop is not None - self._reader = WebSocketDataQueue(request._protocol, 2**16, loop=loop) - request.protocol.set_parser( - WebSocketReader( - self._reader, self._max_msg_size, compress=bool(self._compress) - ) - ) - # disable HTTP keepalive for WebSocket - request.protocol.keep_alive(False) - - def can_prepare(self, request: BaseRequest) -> WebSocketReady: - if self._writer is not None: - raise RuntimeError("Already started") - try: - _, protocol, _, _ = self._handshake(request) - except HTTPException: - return WebSocketReady(False, None) - else: - return WebSocketReady(True, protocol) - - @property - def prepared(self) -> bool: - return self._writer is not None - - @property - def closed(self) -> bool: - return self._closed - - @property - def close_code(self) -> Optional[int]: - return self._close_code - - @property - def ws_protocol(self) -> Optional[str]: - return self._ws_protocol - - @property - def compress(self) -> Union[int, bool]: - return self._compress - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """Get optional transport information. - - If no value associated with ``name`` is found, ``default`` is returned. - """ - writer = self._writer - if writer is None: - return default - transport = writer.transport - if transport is None: - return default - return transport.get_extra_info(name, default) - - def exception(self) -> Optional[BaseException]: - return self._exception - - async def ping(self, message: bytes = b"") -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - await self._writer.send_frame(message, WSMsgType.PING) - - async def pong(self, message: bytes = b"") -> None: - # unsolicited pong - if self._writer is None: - raise RuntimeError("Call .prepare() first") - await self._writer.send_frame(message, WSMsgType.PONG) - - async def send_frame( - self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None - ) -> None: - """Send a frame over the websocket.""" - if self._writer is None: - raise RuntimeError("Call .prepare() first") - await self._writer.send_frame(message, opcode, compress) - - async def send_str(self, data: str, compress: Optional[int] = None) -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - if not isinstance(data, str): - raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send_frame( - data.encode("utf-8"), WSMsgType.TEXT, compress=compress - ) - - async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - if not isinstance(data, (bytes, bytearray, memoryview)): - raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress) - - async def send_json( - self, - data: Any, - compress: Optional[int] = None, - *, - dumps: JSONEncoder = json.dumps, - ) -> None: - await self.send_str(dumps(data), compress=compress) - - async def write_eof(self) -> None: # type: ignore[override] - if self._eof_sent: - return - if self._payload_writer is None: - raise RuntimeError("Response has not been started") - - await self.close() - self._eof_sent = True - - async def close( - self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True - ) -> bool: - """Close websocket connection.""" - if self._writer is None: - raise RuntimeError("Call .prepare() first") - - if self._closed: - return False - self._set_closed() - - try: - await self._writer.close(code, message) - writer = self._payload_writer - assert writer is not None - if drain: - await writer.drain() - except (asyncio.CancelledError, asyncio.TimeoutError): - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - raise - except Exception as exc: - self._exception = exc - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - return True - - reader = self._reader - assert reader is not None - # we need to break `receive()` cycle before we can call - # `reader.read()` as `close()` may be called from different task - if self._waiting: - assert self._loop is not None - assert self._close_wait is None - self._close_wait = self._loop.create_future() - reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._close_wait - - if self._closing: - self._close_transport() - return True - - try: - async with async_timeout.timeout(self._timeout): - while True: - msg = await reader.read() - if msg.type is WSMsgType.CLOSE: - self._set_code_close_transport(msg.data) - return True - except asyncio.CancelledError: - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - raise - except Exception as exc: - self._exception = exc - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - return True - - def _set_closing(self, code: WSCloseCode) -> None: - """Set the close code and mark the connection as closing.""" - self._closing = True - self._close_code = code - self._cancel_heartbeat() - - def _set_code_close_transport(self, code: WSCloseCode) -> None: - """Set the close code and close the transport.""" - self._close_code = code - self._close_transport() - - def _close_transport(self) -> None: - """Close the transport.""" - if self._req is not None and self._req.transport is not None: - self._req.transport.close() - - async def receive(self, timeout: Optional[float] = None) -> WSMessage: - if self._reader is None: - raise RuntimeError("Call .prepare() first") - - receive_timeout = timeout or self._receive_timeout - while True: - if self._waiting: - raise RuntimeError("Concurrent call to receive() is not allowed") - - if self._closed: - self._conn_lost += 1 - if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: - raise RuntimeError("WebSocket connection is closed.") - return WS_CLOSED_MESSAGE - elif self._closing: - return WS_CLOSING_MESSAGE - - try: - self._waiting = True - try: - if receive_timeout: - # Entering the context manager and creating - # Timeout() object can take almost 50% of the - # run time in this loop so we avoid it if - # there is no read timeout. - async with async_timeout.timeout(receive_timeout): - msg = await self._reader.read() - else: - msg = await self._reader.read() - self._reset_heartbeat() - finally: - self._waiting = False - if self._close_wait: - set_result(self._close_wait, None) - except asyncio.TimeoutError: - raise - except EofStream: - self._close_code = WSCloseCode.OK - await self.close() - return WSMessage(WSMsgType.CLOSED, None, None) - except WebSocketError as exc: - self._close_code = exc.code - await self.close(code=exc.code) - return WSMessage(WSMsgType.ERROR, exc, None) - except Exception as exc: - self._exception = exc - self._set_closing(WSCloseCode.ABNORMAL_CLOSURE) - await self.close() - return WSMessage(WSMsgType.ERROR, exc, None) - - if msg.type not in _INTERNAL_RECEIVE_TYPES: - # If its not a close/closing/ping/pong message - # we can return it immediately - return msg - - if msg.type is WSMsgType.CLOSE: - self._set_closing(msg.data) - # Could be closed while awaiting reader. - if not self._closed and self._autoclose: - # The client is likely going to close the - # connection out from under us so we do not - # want to drain any pending writes as it will - # likely result writing to a broken pipe. - await self.close(drain=False) - elif msg.type is WSMsgType.CLOSING: - self._set_closing(WSCloseCode.OK) - elif msg.type is WSMsgType.PING and self._autoping: - await self.pong(msg.data) - continue - elif msg.type is WSMsgType.PONG and self._autoping: - continue - - return msg - - async def receive_str(self, *, timeout: Optional[float] = None) -> str: - msg = await self.receive(timeout) - if msg.type is not WSMsgType.TEXT: - raise WSMessageTypeError( - f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT" - ) - return cast(str, msg.data) - - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: - msg = await self.receive(timeout) - if msg.type is not WSMsgType.BINARY: - raise WSMessageTypeError( - f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY" - ) - return cast(bytes, msg.data) - - async def receive_json( - self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None - ) -> Any: - data = await self.receive_str(timeout=timeout) - return loads(data) - - async def write(self, data: bytes) -> None: - raise RuntimeError("Cannot call .write() for websocket") - - def __aiter__(self) -> "WebSocketResponse": - return self - - async def __anext__(self) -> WSMessage: - msg = await self.receive() - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): - raise StopAsyncIteration - return msg - - def _cancel(self, exc: BaseException) -> None: - # web_protocol calls this from connection_lost - # or when the server is shutting down. - self._closing = True - self._cancel_heartbeat() - if self._reader is not None: - set_exception(self._reader, exc) diff --git a/venv/Lib/site-packages/aiohttp/worker.py b/venv/Lib/site-packages/aiohttp/worker.py deleted file mode 100644 index f7281bf..0000000 --- a/venv/Lib/site-packages/aiohttp/worker.py +++ /dev/null @@ -1,255 +0,0 @@ -"""Async gunicorn worker for aiohttp.web""" - -import asyncio -import inspect -import os -import re -import signal -import sys -from types import FrameType -from typing import TYPE_CHECKING, Any, Optional - -from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat -from gunicorn.workers import base - -from aiohttp import web - -from .helpers import set_result -from .web_app import Application -from .web_log import AccessLogger - -if TYPE_CHECKING: - import ssl - - SSLContext = ssl.SSLContext -else: - try: - import ssl - - SSLContext = ssl.SSLContext - except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - - -__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker") - - -class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] - - DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT - DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default - - def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover - super().__init__(*args, **kw) - - self._task: Optional[asyncio.Task[None]] = None - self.exit_code = 0 - self._notify_waiter: Optional[asyncio.Future[bool]] = None - - def init_process(self) -> None: - # create new event_loop after fork - asyncio.get_event_loop().close() - - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - super().init_process() - - def run(self) -> None: - self._task = self.loop.create_task(self._run()) - - try: # ignore all finalization problems - self.loop.run_until_complete(self._task) - except Exception: - self.log.exception("Exception in gunicorn worker") - self.loop.run_until_complete(self.loop.shutdown_asyncgens()) - self.loop.close() - - sys.exit(self.exit_code) - - async def _run(self) -> None: - runner = None - if isinstance(self.wsgi, Application): - app = self.wsgi - elif inspect.iscoroutinefunction(self.wsgi) or ( - sys.version_info < (3, 14) and asyncio.iscoroutinefunction(self.wsgi) - ): - wsgi = await self.wsgi() - if isinstance(wsgi, web.AppRunner): - runner = wsgi - app = runner.app - else: - app = wsgi - else: - raise RuntimeError( - "wsgi app should be either Application or " - "async function returning Application, got {}".format(self.wsgi) - ) - - if runner is None: - access_log = self.log.access_log if self.cfg.accesslog else None - runner = web.AppRunner( - app, - logger=self.log, - keepalive_timeout=self.cfg.keepalive, - access_log=access_log, - access_log_format=self._get_valid_log_format( - self.cfg.access_log_format - ), - shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, - ) - await runner.setup() - - ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None - - runner = runner - assert runner is not None - server = runner.server - assert server is not None - for sock in self.sockets: - site = web.SockSite( - runner, - sock, - ssl_context=ctx, - ) - await site.start() - - # If our parent changed then we shut down. - pid = os.getpid() - try: - while self.alive: # type: ignore[has-type] - self.notify() - - cnt = server.requests_count - if self.max_requests and cnt > self.max_requests: - self.alive = False - self.log.info("Max requests, shutting down: %s", self) - - elif pid == os.getpid() and self.ppid != os.getppid(): - self.alive = False - self.log.info("Parent changed, shutting down: %s", self) - else: - await self._wait_next_notify() - except BaseException: - pass - - await runner.cleanup() - - def _wait_next_notify(self) -> "asyncio.Future[bool]": - self._notify_waiter_done() - - loop = self.loop - assert loop is not None - self._notify_waiter = waiter = loop.create_future() - self.loop.call_later(1.0, self._notify_waiter_done, waiter) - - return waiter - - def _notify_waiter_done( - self, waiter: Optional["asyncio.Future[bool]"] = None - ) -> None: - if waiter is None: - waiter = self._notify_waiter - if waiter is not None: - set_result(waiter, True) - - if waiter is self._notify_waiter: - self._notify_waiter = None - - def init_signals(self) -> None: - # Set up signals through the event loop API. - - self.loop.add_signal_handler( - signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None - ) - - self.loop.add_signal_handler( - signal.SIGTERM, self.handle_exit, signal.SIGTERM, None - ) - - self.loop.add_signal_handler( - signal.SIGINT, self.handle_quit, signal.SIGINT, None - ) - - self.loop.add_signal_handler( - signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None - ) - - self.loop.add_signal_handler( - signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None - ) - - self.loop.add_signal_handler( - signal.SIGABRT, self.handle_abort, signal.SIGABRT, None - ) - - # Don't let SIGTERM and SIGUSR1 disturb active requests - # by interrupting system calls - signal.siginterrupt(signal.SIGTERM, False) - signal.siginterrupt(signal.SIGUSR1, False) - # Reset signals so Gunicorn doesn't swallow subprocess return codes - # See: https://github.com/aio-libs/aiohttp/issues/6130 - - def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None: - self.alive = False - - # worker_int callback - self.cfg.worker_int(self) - - # wakeup closing process - self._notify_waiter_done() - - def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None: - self.alive = False - self.exit_code = 1 - self.cfg.worker_abort(self) - sys.exit(1) - - @staticmethod - def _create_ssl_context(cfg: Any) -> "SSLContext": - """Creates SSLContext instance for usage in asyncio.create_server. - - See ssl.SSLSocket.__init__ for more details. - """ - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - - ctx = ssl.SSLContext(cfg.ssl_version) - ctx.load_cert_chain(cfg.certfile, cfg.keyfile) - ctx.verify_mode = cfg.cert_reqs - if cfg.ca_certs: - ctx.load_verify_locations(cfg.ca_certs) - if cfg.ciphers: - ctx.set_ciphers(cfg.ciphers) - return ctx - - def _get_valid_log_format(self, source_format: str) -> str: - if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: - return self.DEFAULT_AIOHTTP_LOG_FORMAT - elif re.search(r"%\([^\)]+\)", source_format): - raise ValueError( - "Gunicorn's style options in form of `%(name)s` are not " - "supported for the log formatting. Please use aiohttp's " - "format specification to configure access log formatting: " - "http://docs.aiohttp.org/en/stable/logging.html" - "#format-specification" - ) - else: - return source_format - - -class GunicornUVLoopWebWorker(GunicornWebWorker): - def init_process(self) -> None: - import uvloop - - # Close any existing event loop before setting a - # new policy. - asyncio.get_event_loop().close() - - # Setup uvloop policy, so that every - # asyncio.get_event_loop() will create an instance - # of uvloop event loop. - asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) - - super().init_process() diff --git a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/INSTALLER b/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/METADATA b/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/METADATA deleted file mode 100644 index 03a6f0f..0000000 --- a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/METADATA +++ /dev/null @@ -1,112 +0,0 @@ -Metadata-Version: 2.4 -Name: aiosignal -Version: 1.4.0 -Summary: aiosignal: a list of registered asynchronous callbacks -Home-page: https://github.com/aio-libs/aiosignal -Maintainer: aiohttp team -Maintainer-email: team@aiohttp.org -License: Apache 2.0 -Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby -Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions -Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal -Project-URL: Docs: RTD, https://docs.aiosignal.org -Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues -Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Development Status :: 5 - Production/Stable -Classifier: Operating System :: POSIX -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows -Classifier: Framework :: AsyncIO -Requires-Python: >=3.9 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: frozenlist>=1.1.0 -Requires-Dist: typing-extensions>=4.2; python_version < "3.13" -Dynamic: license-file - -========= -aiosignal -========= - -.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg - :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI - :alt: GitHub status for master branch - -.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg?flag=pytest - :target: https://codecov.io/gh/aio-libs/aiosignal?flags[0]=pytest - :alt: codecov.io status for master branch - -.. image:: https://badge.fury.io/py/aiosignal.svg - :target: https://pypi.org/project/aiosignal - :alt: Latest PyPI package version - -.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest - :target: https://aiosignal.readthedocs.io/ - :alt: Latest Read The Docs - -.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F - :target: https://aio-libs.discourse.group/ - :alt: Discourse group for io-libs - -.. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - -Introduction -============ - -A project to manage callbacks in `asyncio` projects. - -``Signal`` is a list of registered asynchronous callbacks. - -The signal's life-cycle has two stages: after creation its content -could be filled by using standard list operations: ``sig.append()`` -etc. - -After you call ``sig.freeze()`` the signal is *frozen*: adding, removing -and dropping callbacks is forbidden. - -The only available operation is calling the previously registered -callbacks by using ``await sig.send(data)``. - -For concrete usage examples see the `Signals - -section of the `Web Server Advanced -` chapter of the `aiohttp -documentation`_. - - -Installation ------------- - -:: - - $ pip install aiosignal - - -Documentation -============= - -https://aiosignal.readthedocs.io/ - -License -======= - -``aiosignal`` is offered under the Apache 2 license. - -Source code -=========== - -The project is hosted on GitHub_ - -Please file an issue in the `bug tracker -`_ if you have found a bug -or have some suggestions to improve the library. - -.. _GitHub: https://github.com/aio-libs/aiosignal -.. _aiohttp documentation: https://docs.aiohttp.org/ diff --git a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/RECORD b/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/RECORD deleted file mode 100644 index 30b91c3..0000000 --- a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -aiosignal-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -aiosignal-1.4.0.dist-info/METADATA,sha256=CSR-8dqLxpZyjUcTDnAuQwf299EB1sSFv_nzpxznAI0,3662 -aiosignal-1.4.0.dist-info/RECORD,, -aiosignal-1.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -aiosignal-1.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -aiosignal-1.4.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 -aiosignal-1.4.0.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10 -aiosignal/__init__.py,sha256=TIkmUG9HTBt4dfq2nISYBiZiRB2xwvFtEZydLP0HPL4,1537 -aiosignal/__pycache__/__init__.cpython-312.pyc,, -aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/REQUESTED b/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/WHEEL b/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE b/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE deleted file mode 100644 index 7082a2d..0000000 --- a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2013-2019 Nikolay Kim and Andrew Svetlov - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/top_level.txt b/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/top_level.txt deleted file mode 100644 index ac6df3a..0000000 --- a/venv/Lib/site-packages/aiosignal-1.4.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -aiosignal diff --git a/venv/Lib/site-packages/aiosignal/__init__.py b/venv/Lib/site-packages/aiosignal/__init__.py deleted file mode 100644 index 5ede009..0000000 --- a/venv/Lib/site-packages/aiosignal/__init__.py +++ /dev/null @@ -1,59 +0,0 @@ -import sys -from typing import Any, Awaitable, Callable, TypeVar - -from frozenlist import FrozenList - -if sys.version_info >= (3, 11): - from typing import Unpack -else: - from typing_extensions import Unpack - -if sys.version_info >= (3, 13): - from typing import TypeVarTuple -else: - from typing_extensions import TypeVarTuple - -_T = TypeVar("_T") -_Ts = TypeVarTuple("_Ts", default=Unpack[tuple[()]]) - -__version__ = "1.4.0" - -__all__ = ("Signal",) - - -class Signal(FrozenList[Callable[[Unpack[_Ts]], Awaitable[object]]]): - """Coroutine-based signal implementation. - - To connect a callback to a signal, use any list method. - - Signals are fired using the send() coroutine, which takes named - arguments. - """ - - __slots__ = ("_owner",) - - def __init__(self, owner: object): - super().__init__() - self._owner = owner - - def __repr__(self) -> str: - return "".format( - self._owner, self.frozen, list(self) - ) - - async def send(self, *args: Unpack[_Ts], **kwargs: Any) -> None: - """ - Sends data to all registered receivers. - """ - if not self.frozen: - raise RuntimeError("Cannot send non-frozen signal.") - - for receiver in self: - await receiver(*args, **kwargs) - - def __call__( - self, func: Callable[[Unpack[_Ts]], Awaitable[_T]] - ) -> Callable[[Unpack[_Ts]], Awaitable[_T]]: - """Decorator to add a function to this Signal.""" - self.append(func) - return func diff --git a/venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index bbaf146..0000000 Binary files a/venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/aiosignal/py.typed b/venv/Lib/site-packages/aiosignal/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/METADATA deleted file mode 100644 index aa0cb18..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/METADATA +++ /dev/null @@ -1,477 +0,0 @@ -Metadata-Version: 2.1 -Name: alibabacloud-credentials -Version: 1.0.4 -Summary: The alibabacloud credentials module of alibabaCloud Python SDK. -Home-page: https://github.com/aliyun/credentials-python -Author: Alibaba Cloud -Author-email: alibaba-cloud-sdk-dev-team@list.alibaba-inc.com -License: Apache License 2.0 -Keywords: alibabacloud,sdk,tea -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -Requires-Dist: APScheduler (<4.0.0,>=3.10.0) -Requires-Dist: aiofiles (<25.0.0,>=22.1.0) -Requires-Dist: alibabacloud-tea (>=0.4.0) -Requires-Dist: alibabacloud-credentials-api (<2.0.0,>=1.0.0) - -English | [简体中文](README-CN.md) - -![Alibaba Cloud Logo](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -# Alibaba Cloud Credentials for Python - -[![PyPI version](https://badge.fury.io/py/alibabacloud_credentials.svg)](https://badge.fury.io/py/alibabacloud_credentials) -[![Python Test](https://github.com/aliyun/credentials-python/actions/workflows/testPython.yml/badge.svg)](https://github.com/aliyun/credentials-python/actions/workflows/testPython.yml) -[![codecov](https://codecov.io/gh/aliyun/credentials-python/graph/badge.svg?token=Y0J1E7T35I)](https://codecov.io/gh/aliyun/credentials-python) - -## Important Updates - -- Starting from version 1.0rc1, the package `alibabacloud-credentials` only supports Python 3.7 and above. - -## Installation - -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/ "pip User Guide") to install pip. - -```bash -# Install the alibabacloud-credentials -pip install alibabacloud-credentials -``` - -## Usage - -Before you begin, you need to sign up for an Alibaba Cloud account and retrieve your [Credentials](https://usercenter.console.aliyun.com/#/manage/ak). - -### **Parameters** of the **Credentials tool** - ------------------------------------------------------------ - -The parameters of the Credentials tool are defined in the `Config` class of the `alibabacloud_credentials.models` module. The credential type is determined by the value of `type`, which is a required parameter in the configurations. After you determine a credential type, configure parameters based on the credential type. The following table describes the valid values of `type` and the parameters supported by each credential type. In the table, a check mark (`✓`) indicates that the parameter is required, a hyphen (`-`) indicates that the parameter is optional, and an X mark (`×`) indicates that the parameter is not supported. - -**Note** - -We recommend that you do not use parameters that are not listed in the following table. - -| **type** | **access_key** | **sts** | **ram_role_arn** | **ecs_ram_role** | **oidc_role_arn** | **credentials_uri** | **bearer** | -| --- | --- | ---- | --- | --- | --- | --- | --- | -| access_key_id: the AccessKey ID. | ✓ | ✓ | ✓ | × | × | × | × | -| access_key_secret: the AccessKey secret. | ✓ | ✓ | ✓ | × | × | × | × | -| security_token: Security Token Service (STS) token. | × | ✓ | - | × | × | × | × | -| role_arn: the Alibaba Cloud Resource Name (ARN) of the Resource Access Management (RAM) role. | × | × | ✓ | × | ✓ | × | × | -| role_session_name: the name of the custom session. The default format is `credentials-java-The current timestamp`. | × | × | - | × | - | × | × | -| role_name: specifies the name of the RAM role. | × | × | × | - | × | × | × | -| disable_imds_v1: specifies whether to forcibly use the security hardening mode (IMDSv2). If you set this parameter to true, the security hardening mode (IMDSv2) is used. Default value: `false`. | × | × | × | - | × | × | × | -| bearer_token: a bearer token. | × | × | × | × | × | × | ✓ | -| policy: a custom policy. | × | × | - | × | - | × | × | -| role_session_expiration: the session timeout period. Default value: 3600. Unit: seconds. | × | × | - | × | - | × | × | -| oidc_provider_arn: the ARN of the OpenID Connect (OIDC) identity provider (IdP). | × | × | × | × | ✓ | × | × | -| oidc_token_file_path: the absolute path to the OIDC token. | × | × | × | × | ✓ | × | × | -| external_id: the external ID of the role, which is used to prevent the confused deputy issue. | × | × | - | × | × | × | × | -| credentials_uri: the URI of the credential. | × | × | × | × | × | ✓ | × | -| sts_endpoint: the endpoint of STS. VPC endpoints and Internet endpoints are supported. Default value: `sts.aliyuncs.com`. | × | × | - | × | - | × | × | -| timeout: the timeout period of HTTP read requests. Default value: 5000. Unit: milliseconds. | × | × | - | - | - | - | × | -| connect_timeout: the timeout period of HTTP connection requests. Default value: 10000. Unit: milliseconds. | × | × | - | - | - | - | × | - - -Initialize a Credentials client ------------------------------------------------- - -You can use one of the following methods to initialize a Credentials client as needed: - -**Important** - -* If you use a plaintext AccessKey pair in a project, the AccessKey pair may be leaked due to improper permission management on the code repository. This may threaten the security of all resources within the account to which the AccessKey pair belongs. We recommend that you store the AccessKey pair in environment variables or configuration files. - -* We recommend that you initialize the Credentials client in single-instance mode. This mode not only enables the credential caching feature of the SDK, but also effectively prevents traffic control issues and waste of performance resources caused by multiple API calls. - -### Credential Type - -#### Use the default credential provider chain - -If you do not specify a method to initialize a Credentials client, the default credential provider chain is used. For more information, see [Default credential provider chain](#default-credential-provider-chain). - -```python -from alibabacloud_credentials.client import Client as CredClient - -# Do not specify a method to initialize a Credentials client. -credentialsClient = CredClient() - -credential = credentialsClient.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -#### Access Key - -Setup access_key credential through [User Information Management][ak], it have full authority over the account, please keep it safe. For security reasons, you cannot hand over a primary account AccessKey with full access to the developer of a project. You may create a sub-account [RAM Sub-account][ram] , grant its [authorization][permissions],and use the AccessKey of RAM Sub-account. - -```python -import os -from alibabacloud_credentials.client import Client -from alibabacloud_credentials.models import Config - -config = Config( - type='access_key', - access_key_id=os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_ID'), - access_key_secret=os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_SECRET'), -) -cred = Client(config) - -credential = cred.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -#### STS - -Create a temporary security credential by applying Temporary Security Credentials (TSC) through the Security Token Service (STS). - -```python -import os - -from alibabacloud_credentials.client import Client -from alibabacloud_credentials.models import Config - -config = Config( - type='sts', - # Obtain the AccessKey ID from the environment variable. - access_key_id=os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_ID'), - # Obtain the AccessKey secret from the environment variable. - access_key_secret=os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_SECRET'), - # Obtain the temporary STS token from the environment variable. - security_token=os.environ.get('ALIBABA_CLOUD_SECURITY_TOKEN') -) -credClient = Client(config) - -credential = credClient.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -#### RAM Role ARN - -The underlying logic of this method is to use an STS token to initialize a Credentials client. After you specify the Alibaba Cloud Resource Name (ARN) of a RAM role, the Credentials tool obtains the security token from STS. You can also use the `policy` parameter to limit the permissions of the RAM role. - -```python -import os - -from alibabacloud_credentials.client import Client as CredClient -from alibabacloud_credentials.models import Config as CredConfig - -credentialsConfig = CredConfig( - access_key_id=os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_ID'), - access_key_secret=os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_SECRET'), - type='ram_role_arn', - # Specify the ARN of the RAM role that you want your application to assume by specifying the ALIBABA_CLOUD_ROLE_ARN environment variable. Example: acs:ram::123456789012****:role/adminrole. - role_arn='', - # Specify the role session name by specifying the ALIBABA_CLOUD_ROLE_SESSION_NAME environment variable. - role_session_name='', - # Optional. Specify the minimum permissions for the RAM role. Example: {"Statement": [{"Action": ["*"],"Effect": "Allow","Resource": ["*"]}],"Version":"1"} - policy='', - role_session_expiration=3600 -) -credentialsClient = CredClient(credentialsConfig) - -credential = credentialsClient.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -#### ECS RAM Role - -ECS instances and elastic container instances can be assigned RAM roles. Programs that run on the instances can use the Credentials tool to automatically obtain an STS token for the RAM role. The STS token can be used to initialize the Credentials client. - -By default, the Credentials tool accesses the metadata server of ECS in security hardening mode (IMDSv2). If an exception is thrown, the Credentials tool switches to the normal mode (IMDSv1). You can also configure the `disable_imds_v1` parameter or the *ALIBABA_CLOUD_IMDSV1_DISABLE* environment variable to specify the exception handling logic. Valid values: - -* false (default): The Credentials tool continues to obtain the access credential in normal mode (IMDSv1). - -* true: The exception is thrown and the Credentials tool continues to obtain the access credential in security hardening mode. - -The configurations for the metadata server determine whether the server supports the security hardening mode (IMDSv2). - -In addition, you can specify ALIBABA_CLOUD_ECS_METADATA_DISABLED=true to disable access from the Credentials tool to the metadata server of ECS. - -```python -from alibabacloud_credentials.client import Client as CredClient -from alibabacloud_credentials.models import Config as CredConfig - -credentialsConfig = CredConfig( - type='ecs_ram_role', - # Optional. Specify the name of the RAM role of the ECS instance by specifying the ALIBABA_CLOUD_ECS_METADATA environment variable. If you do not specify this parameter, the value is automatically obtained. We recommend that you specify this parameter to reduce the number of requests. - role_name='', - # Default value: False. This parameter is optional. True: The security hardening mode (IMDSv2) is forcibly used. False: The system preferentially attempts to obtain the access credential in security hardening mode (IMDSv2). If the attempt fails, the system switches to the normal mode (IMDSv1) to obtain access credentials. - # disable_imds_v1=True, -) -credentialsClient = CredClient(credentialsConfig) - -credential = credentialsClient.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -#### OIDC Role ARN - -To ensure the security of cloud resources and enable untrusted applications to securely obtain required STS tokens, you can use the [RAM Roles for Service Accounts (RRSA)] feature to grant minimum necessary permissions to an application. ACK creates and mounts corresponding OpenID Connect (OIDC) token files for different application pods, and passes relevant configuration information to environment variables. The Credentials tool obtains the configuration information from the environment variables and calls the [AssumeRoleWithOIDC] operation of STS to obtain the STS token for attached roles. - -The following environment variables are injected into the pod: - -***ALIBABA_CLOUD_ROLE_ARN*** : the ARN of the RAM role. - -***ALIBABA_CLOUD_OIDC_PROVIDER_ARN*** : the ARN of the OIDC identity provider (IdP). - -***ALIBABA_CLOUD_OIDC_TOKEN_FILE*** : the path of the OIDC token file. - -```python -from alibabacloud_credentials.client import Client -from alibabacloud_credentials.models import Config - -config = Config( - type='oidc_role_arn', - # Specify the ARN of the RAM role by specifying the ALIBABA_CLOUD_ROLE_ARN environment variable. - role_arn='', - # Specify the ARN of the OIDC IdP by specifying the ALIBABA_CLOUD_OIDC_PROVIDER_ARN environment variable. - oidc_provider_arn='', - # Specify the path of the OIDC token file by specifying the ALIBABA_CLOUD_OIDC_TOKEN_FILE environment variable. - oidc_token_file_path='', - # Specify the role session name by specifying the ALIBABA_CLOUD_ROLE_SESSION_NAME environment variable. - role_session_name='', - # Optional. Specify the minimum permissions for the RAM role. Example: {"Statement": [{"Action": ["*"],"Effect": "Allow","Resource": ["*"]}],"Version":"1"} - policy='', - # Specify the validity period of the session. - role_session_expiration=3600 -) -cred = Client(config) - -credential = cred.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -#### Credentials URI - -This method lets you encapsulate an STS token in your application and provide a custom URI to external resources. Other services can obtain the STS token only through the URI. This minimizes the risk of AccessKey exposure. The Credentials tool lets you obtain the STS token by calling the service URI to initialize the Credentials client. - -```python -from alibabacloud_credentials.client import Client -from alibabacloud_credentials.models import Config - -config = Config( - type='credentials_uri', - # Specify the URI of the credential in the http://local_or_remote_uri/ format by specifying the ALIBABA_CLOUD_CREDENTIALS_URI environment variable. - credentials_uri='', -) -cred = Client(config) - -credential = cred.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -The URI must meet the following requirements: - -* GET requests are supported. - -* The HTTP 200 status code can be returned. - -* The following response body structure is used: - - ```json - { - "Code": "Success", - "AccessKeySecret": "AccessKeySecret", - "AccessKeyId": "AccessKeyId", - "Expiration": "2021-09-26T03:46:38Z", - "SecurityToken": "SecurityToken" - } - ``` - -#### Bearer - -Only [Cloud Call Center](https://api.aliyun.com/api/CCC/2020-07-01/ListPrivilegesOfUser){#0764d4314be87} lets you use a bearer token to initialize an SDK client. - -```python -from alibabacloud_credentials.client import Client -from alibabacloud_credentials.models import Config - -config = Config( - type='bearer', - # Enter the bearer token. - bearer_token='', -) -cred = Client(config) - -credential = cred.get_credential() -access_key_id = credential.get_access_key_id() -access_key_secret = credential.get_access_key_secret() -security_token = credential.get_security_token() -cred_type = credential.get_type() -``` - -### Default credential provider chain - -If you want to use different types of credentials in the development and production environments of your application, you generally need to obtain the environment information from the code and write code branches to obtain different credentials for the development and production environments. The default credential provider chain of Alibaba Cloud Credentials for Java allows you to use the same code to obtain credentials for different environments based on configurations independent of the application. If you use `cred = CredClient()`{#e19090ab80ah6} to initialize a Credentials client without specifying an initialization method, the Credentials tool obtains the credential information in the following order: - -1. btain the credential information from environment variables - - Look for environment credentials in environment variable. If the `ALIBABA_CLOUD_ACCESS_KEY_ID` and `ALIBABA_CLOUD_ACCESS_KEY_SECRET` environment variables are defined and are not empty, the program will use them to create default credentials. If the `ALIBABA_CLOUD_ACCESS_KEY_ID`, `ALIBABA_CLOUD_ACCESS_KEY_SECRET` and `ALIBABA_CLOUD_SECURITY_TOKEN` environment variables are defined and are not empty, the program will use them to create temporary security credentials(STS). Note: This token has an expiration time, it is recommended to use it in a temporary environment. - -2. Obtain the credential information by using the RAM role of an OIDC IdP - - If no credentials with a higher priority are found, the Credentials tool checks the following environment variables that are related to the RAM role of the OIDC IdP: - - * ***ALIBABA_CLOUD_ROLE_ARN*** : the ARN of the RAM role. - - * ***ALIBABA_CLOUD_OIDC_PROVIDER_ARN*** : the ARN of the OIDC IdP. - - * ***ALIBABA_CLOUD_OIDC_TOKEN_FILE:*** the file path of the OIDC token. - - If the preceding three environment variables are specified and valid, the Credentials tool uses the environment variables to call the [AssumeRoleWithOIDC] operation of STS to obtain an STS token as the default credential. - -3. Obtain the credential information from a configuration file - - **Note** - Make sure that the version of alibabacloud_credentials is **1.0rc3** or later. - - If no credentials with a higher priority are found, the Credentials tool attempts to load the `config.json`{#d4a26eb196u7q} file. Default file path: - - * Linux/macOS: `~/.aliyun/config.json` - - * Windows: `C:\Users\USER_NAME\.aliyun\config.json` - - Do not change the preceding default paths. If you want to use this method to configure an access credential, manually create a config.json file in the corresponding path. Example: - - ```json - { - "current": "", - "profiles": [ - { - "name": "", - "mode": "AK", - "access_key_id": "", - "access_key_secret": "" - }, - { - "name": "", - "mode": "StsToken", - "access_key_id": "", - "access_key_secret": "", - "sts_token": "" - }, - { - "name": "", - "mode": "RamRoleArn", - "access_key_id": "", - "access_key_secret": "", - "ram_role_arn": "", - "ram_session_name": "", - "expired_seconds": 3600 - }, - { - "name": "", - "mode": "EcsRamRole", - "ram_role_name": "" - }, - { - "name": "", - "mode": "OIDC", - "oidc_provider_arn": "", - "oidc_token_file": "", - "ram_role_arn": "", - "ram_session_name": "", - "expired_seconds": 3600 - }, - { - "name": "", - "mode": "ChainableRamRoleArn", - "source_profile": "", - "ram_role_arn": "", - "ram_session_name": "", - "expired_seconds": 3600 - } - ] - } - ``` - In the config.json file, you can use mode to specify a type of credential: - - * AK: uses the AccessKey pair of a RAM user to obtain the credential information. - - * StsToken: uses the STS token as the credential information. - - * RamRoleArn: uses the ARN of a RAM role to obtain the credential information. - - * EcsRamRole: uses the RAM role attached to an ECS instance to obtain the credential information. - - * OIDC: uses the ARN of an OIDC IdP and the OIDC token file to obtain the credential information. - - * ChainableRamRoleArn: utilizes a role chaining mechanism. It allows you to assume a new RAM role and acquire a new, temporary credential by referencing another credential profile, which is specified by the `source_profile` parameter. - - After you complete the configurations, the Credentials tool selects the credential specified by the **current** parameter in the configuration file and initialize the client. You can also specify the ***ALIBABA_CLOUD_PROFILE*** environment variable to specify the credential information. For example, you can set the ***ALIBABA_CLOUD_PROFILE*** environment variable to **client1** . - - -3. Obtain the credential information by using the RAM role of an ECS instance - - By default, if no credential that has a higher priority exists, the Credential tool accesses the metadata server of ECS in security hardening mode (IMDSv2) to obtain the STS token of the RAM role used by the ECS instance and uses the STS token as the default credential. The program automatically access the metadata server of ECS to obtain the name of the RAM role (RoleName) and then obtains the credential. Two requests are sent in this process. If you want to send only one request, add the ***ALIBABA_CLOUD_ECS_METADATA*** environment variable to specify the name of the RAM role. If an exception occurs in the security hardening mode (IMDSv2), the Credentials tool obtains the access credential in normal mode. You can also configure the ***ALIBABA_CLOUD_IMDSV1_DISABLED*** environment variable to specify an exception handling logic. Valid values of the environment variable: - - 1. false: The Credentials tool continues to obtain the access credential in normal mode. - - 2. true: The exception is thrown and the Credentials tool continues to obtain the access credential in security hardening mode. - - The configurations for the metadata server determine whether the server supports the security hardening mode (IMDSv2). - - In addition, you can specify ALIBABA_CLOUD_ECS_METADATA_DISABLED=true to disable access from the Credentials tool to the metadata server of ECS. - -4. Obtain the credential information based on a URI - - If no valid credential is obtained using the preceding methods, the Credentials tool checks the ***ALIBABA_CLOUD_CREDENTIALS_URI*** environment variable. If this environment variable exists and specifies a valid URI, the Credentials tool initiates an HTTP requests to obtain an STS token as the default credential. - -## Issues - -[Opening an Issue](https://github.com/aliyun/credentials-python/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Changelog - -Detailed changes for each release are documented in the [release notes](./ChangeLog.md). - -## References - -- [Latest Release](https://github.com/aliyun/credentials-python) - -## License - -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. - -[ak]: https://usercenter.console.aliyun.com/#/manage/ak -[ram]: https://ram.console.aliyun.com/users -[permissions]: https://ram.console.aliyun.com/permissions -[RAM Role]: https://ram.console.aliyun.com/#/role/list -[OIDC Role]: https://help.aliyun.com/zh/ram/user-guide/role-based-sso-by-using-oidc -[policy]: https://help.aliyun.com/zh/ram/user-guide/policy-management/ diff --git a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/RECORD deleted file mode 100644 index d3ddeb2..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/RECORD +++ /dev/null @@ -1,60 +0,0 @@ -alibabacloud_credentials-1.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_credentials-1.0.4.dist-info/METADATA,sha256=z9Cv9ARvWZ7frwNNoTdHs1VIQ-QEk5zSGgAAbcAX5Lg,29119 -alibabacloud_credentials-1.0.4.dist-info/RECORD,, -alibabacloud_credentials-1.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_credentials-1.0.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 -alibabacloud_credentials-1.0.4.dist-info/top_level.txt,sha256=qFzgPK-RjwJQIKjXEWcs5M8gtikqSgnJJB8uhoRMTfE,25 -alibabacloud_credentials/__init__.py,sha256=kwEyWwKLIIqYSnkf5eXgPtRgw9Bz51riSupwtvDpJMA,23 -alibabacloud_credentials/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_credentials/__pycache__/client.cpython-312.pyc,, -alibabacloud_credentials/__pycache__/credentials.cpython-312.pyc,, -alibabacloud_credentials/__pycache__/exceptions.cpython-312.pyc,, -alibabacloud_credentials/__pycache__/models.cpython-312.pyc,, -alibabacloud_credentials/__pycache__/providers.cpython-312.pyc,, -alibabacloud_credentials/client.py,sha256=WRNu8Vcvq1FRTSHGgN-xOdQGcCpmsS5XHNcsKQcNCRc,10432 -alibabacloud_credentials/credentials.py,sha256=0YHFnoMAZiocGudhb7hcwYaLbveSMbynR1g41Mygym0,19501 -alibabacloud_credentials/exceptions.py,sha256=zC-nFPC1p8dJ6S_aL2myTxqrPi7AmgT-CVEEyxwGBZU,193 -alibabacloud_credentials/http/__init__.py,sha256=sOd9LH9kKm9rW-yR9EQXeAiq_sRTEwxfk3BqNTivo0Y,67 -alibabacloud_credentials/http/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_credentials/http/__pycache__/_options.cpython-312.pyc,, -alibabacloud_credentials/http/_options.py,sha256=O54AbxrS4lzNlhaMzQGWil5jzXVFKcAmYy68BhRM2Y8,303 -alibabacloud_credentials/models.py,sha256=062x3B-N3cCWUxMbJtpRC86xeq9Jd4ShuLGVrpR-flI,12735 -alibabacloud_credentials/provider/__init__.py,sha256=n0ItOq67m6fAai3gSVjL_u8srjYCS98ooHp_qnBsbwI,1145 -alibabacloud_credentials/provider/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/cli_profile.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/cloud_sso.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/default.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/ecs_ram_role.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/env.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/oauth.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/oidc.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/profile.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/ram_role_arn.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/refreshable.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/rsa_key_pair.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/static_ak.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/static_sts.cpython-312.pyc,, -alibabacloud_credentials/provider/__pycache__/uri.cpython-312.pyc,, -alibabacloud_credentials/provider/cli_profile.py,sha256=R6JytZLjKTtkoza04xao9pB6WjAlm9UA9uK50PmIBUk,27398 -alibabacloud_credentials/provider/cloud_sso.py,sha256=EmAePEhde1_Lsx0K1bWFOkfBpSrlYctckCpTwXNlgz8,7015 -alibabacloud_credentials/provider/default.py,sha256=S058kift2q39m31YJ7KO4g1lqmHepeyT2wTEgGBb1nA,4353 -alibabacloud_credentials/provider/ecs_ram_role.py,sha256=hFp-Ny56T_trN0_wSPJPtjN2hGC2lZsig0xKMHFJEH4,11790 -alibabacloud_credentials/provider/env.py,sha256=VHQF7GidQ65YUm7HV_-1n5O910S3S4YoEMD1LIWYTCs,1260 -alibabacloud_credentials/provider/oauth.py,sha256=FMnIm4J31w6Qcs-KwRlmrDwm3bd6W3IsOA7woj97kmQ,12127 -alibabacloud_credentials/provider/oidc.py,sha256=Q_BLrQ7Nq1-AXY14EMuk8C7FaJacvcHas6MJmnAXaZQ,9332 -alibabacloud_credentials/provider/profile.py,sha256=XrYPB_bd_WKkiVdtbWN-95dVSeFRy9g0cnmxVP6Fpgo,5895 -alibabacloud_credentials/provider/ram_role_arn.py,sha256=xSKaucxnZnaeJrjDXJP6aFRIjXiDGvxKrpm0OiNezyc,11025 -alibabacloud_credentials/provider/refreshable.py,sha256=TDZjhsnNANnCK5KKXmuT5afpN8wPkFu4MoVIbawI4gA,11648 -alibabacloud_credentials/provider/rsa_key_pair.py,sha256=yhoY1SbSfBE8L97f0pT2294yf3KNVe0sE0MbctDEoCc,8411 -alibabacloud_credentials/provider/static_ak.py,sha256=_l6v_EbwhG9qepf6Hjmpvypn-3I-LbfC2L4EA7aQhAo,1208 -alibabacloud_credentials/provider/static_sts.py,sha256=B3JaScWEYAncG72nboIkwmz7jwVVj1AAyE-bMTq4E54,1517 -alibabacloud_credentials/provider/uri.py,sha256=z_JEioYLV0NIu_Y-WehGXsmw7S2Ro3wOQ2Xvk16XKts,5875 -alibabacloud_credentials/providers.py,sha256=CvhK7uSs0HkyQuhoo2px4H4TvQZIWq5oWBHFwKiZQ28,31943 -alibabacloud_credentials/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_credentials/utils/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_credentials/utils/__pycache__/auth_constant.cpython-312.pyc,, -alibabacloud_credentials/utils/__pycache__/auth_util.cpython-312.pyc,, -alibabacloud_credentials/utils/__pycache__/parameter_helper.cpython-312.pyc,, -alibabacloud_credentials/utils/auth_constant.py,sha256=85keKa6x31tPvTxfl8MYqUdCiDRpgWq7k9SqDZUzx80,929 -alibabacloud_credentials/utils/auth_util.py,sha256=T5Yg98uwdQ6S3GqHeHZT1PJkn98Retc3MviR1Zt-_mo,2479 -alibabacloud_credentials/utils/parameter_helper.py,sha256=QqbXj-AoACy7_7rbAQvqPCTFeCyWshEM6knpNDS37DM,1800 diff --git a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/WHEEL deleted file mode 100644 index 57e3d84..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/top_level.txt deleted file mode 100644 index ee4249f..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials-1.0.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_credentials diff --git a/venv/Lib/site-packages/alibabacloud_credentials/__init__.py b/venv/Lib/site-packages/alibabacloud_credentials/__init__.py deleted file mode 100644 index 92192ee..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "1.0.4" diff --git a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index fc9bc24..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/client.cpython-312.pyc deleted file mode 100644 index b3d5d69..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/credentials.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/credentials.cpython-312.pyc deleted file mode 100644 index d2f776e..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/credentials.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 9b29912..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/models.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/models.cpython-312.pyc deleted file mode 100644 index b10a8cb..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/providers.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/providers.cpython-312.pyc deleted file mode 100644 index 851ca4b..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/__pycache__/providers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/client.py b/venv/Lib/site-packages/alibabacloud_credentials/client.py deleted file mode 100644 index 1fc228c..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/client.py +++ /dev/null @@ -1,245 +0,0 @@ -from functools import wraps - -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials import credentials -from alibabacloud_credentials.exceptions import CredentialException -from alibabacloud_credentials.models import Config, CredentialModel -from alibabacloud_credentials.http import HttpOptions -from alibabacloud_credentials.provider import (StaticAKCredentialsProvider, - StaticSTSCredentialsProvider, - RamRoleArnCredentialsProvider, - OIDCRoleArnCredentialsProvider, - RsaKeyPairCredentialsProvider, - EcsRamRoleCredentialsProvider, - URLCredentialsProvider, - DefaultCredentialsProvider) -from alibabacloud_credentials.utils import auth_constant as ac -from Tea.decorators import deprecated - - -def attribute_error_return_none(f): - @wraps(f) - def i(*args, **kwargs): - try: - return f(*args, **kwargs) - except AttributeError: - return - - return i - - -class _CredentialsProviderWrap: - - def __init__(self, - *, - type_name: str = None, - provider: ICredentialsProvider = None): - self.type_name = type_name - self.provider = provider - - def get_access_key_id(self) -> str: - credential = self.provider.get_credentials() - return credential.get_access_key_id() - - async def get_access_key_id_async(self) -> str: - credential = await self.provider.get_credentials_async() - return credential.get_access_key_id() - - def get_access_key_secret(self) -> str: - credential = self.provider.get_credentials() - return credential.get_access_key_secret() - - async def get_access_key_secret_async(self) -> str: - credential = await self.provider.get_credentials_async() - return credential.get_access_key_secret() - - def get_security_token(self): - credential = self.provider.get_credentials() - return credential.get_security_token() - - async def get_security_token_async(self): - credential = await self.provider.get_credentials_async() - return credential.get_security_token() - - def get_credential(self) -> CredentialModel: - credential = self.provider.get_credentials() - return CredentialModel( - access_key_id=credential.get_access_key_id(), - access_key_secret=credential.get_access_key_secret(), - security_token=credential.get_security_token(), - type=self.type_name, - provider_name=credential.get_provider_name(), - ) - - async def get_credential_async(self) -> CredentialModel: - credential = await self.provider.get_credentials_async() - return CredentialModel( - access_key_id=credential.get_access_key_id(), - access_key_secret=credential.get_access_key_secret(), - security_token=credential.get_security_token(), - type=self.type_name, - provider_name=credential.get_provider_name(), - ) - - def get_type(self) -> str: - return self.type_name - - -class Client: - cloud_credential = None - - def __init__(self, - config: Config = None, - provider: ICredentialsProvider = None): - if provider is not None: - self.cloud_credential = _CredentialsProviderWrap(type_name=provider.get_provider_name(), provider=provider) - elif config is None: - provider = DefaultCredentialsProvider() - self.cloud_credential = _CredentialsProviderWrap(type_name='default', provider=provider) - else: - self.cloud_credential = Client.get_credentials(config) - - def get_credential(self) -> CredentialModel: - """ - Get credential - @return: the whole credential - """ - return self.cloud_credential.get_credential() - - async def get_credential_async(self) -> CredentialModel: - """ - Get credential - @return: the whole credential - """ - return await self.cloud_credential.get_credential_async() - - @staticmethod - def get_credentials(config): - if config.type == ac.ACCESS_KEY: - provider = StaticAKCredentialsProvider( - access_key_id=config.access_key_id, - access_key_secret=config.access_key_secret, - ) - return _CredentialsProviderWrap(type_name='access_key', provider=provider) - elif config.type == ac.STS: - provider = StaticSTSCredentialsProvider( - access_key_id=config.access_key_id, - access_key_secret=config.access_key_secret, - security_token=config.security_token, - ) - return _CredentialsProviderWrap(type_name='sts', provider=provider) - elif config.type == ac.BEARER: - return credentials.BearerTokenCredential(config.bearer_token) - elif config.type == ac.ECS_RAM_ROLE: - provider = EcsRamRoleCredentialsProvider( - role_name=config.role_name, - disable_imds_v1=config.disable_imds_v1, - http_options=HttpOptions( - read_timeout=config.timeout, - connect_timeout=config.connect_timeout, - proxy=config.proxy, - ), - ) - return _CredentialsProviderWrap(type_name='ecs_ram_role', provider=provider) - elif config.type == ac.CREDENTIALS_URI: - provider = URLCredentialsProvider( - uri=config.credentials_uri, - http_options=HttpOptions( - read_timeout=config.timeout, - connect_timeout=config.connect_timeout, - proxy=config.proxy, - ), - ) - return _CredentialsProviderWrap(type_name='credentials_uri', provider=provider) - elif config.type == ac.RAM_ROLE_ARN: - if config.security_token is not None and config.security_token != '': - previous_provider = StaticSTSCredentialsProvider( - access_key_id=config.access_key_id, - access_key_secret=config.access_key_secret, - security_token=config.security_token, - ) - else: - previous_provider = StaticAKCredentialsProvider( - access_key_id=config.access_key_id, - access_key_secret=config.access_key_secret, - ) - provider = RamRoleArnCredentialsProvider( - credentials_provider=previous_provider, - role_arn=config.role_arn, - role_session_name=config.role_session_name, - duration_seconds=config.role_session_expiration, - policy=config.policy, - external_id=config.external_id, - sts_endpoint=config.sts_endpoint, - http_options=HttpOptions( - read_timeout=config.timeout, - connect_timeout=config.connect_timeout, - proxy=config.proxy, - ), - ) - return _CredentialsProviderWrap(type_name='ram_role_arn', provider=provider) - elif config.type == ac.RSA_KEY_PAIR: - provider = RsaKeyPairCredentialsProvider( - public_key_id=config.public_key_id, - private_key_file=config.private_key_file, - duration_seconds=config.role_session_expiration, - sts_endpoint=config.sts_endpoint, - http_options=HttpOptions( - read_timeout=config.timeout, - connect_timeout=config.connect_timeout, - proxy=config.proxy, - ), - ) - return _CredentialsProviderWrap(type_name='rsa_key_pair', provider=provider) - elif config.type == ac.OIDC_ROLE_ARN: - provider = OIDCRoleArnCredentialsProvider( - role_arn=config.role_arn, - oidc_provider_arn=config.oidc_provider_arn, - oidc_token_file_path=config.oidc_token_file_path, - role_session_name=config.role_session_name, - duration_seconds=config.role_session_expiration, - policy=config.policy, - sts_endpoint=config.sts_endpoint, - http_options=HttpOptions( - read_timeout=config.timeout, - connect_timeout=config.connect_timeout, - proxy=config.proxy, - ), - ) - return _CredentialsProviderWrap(type_name='oidc_role_arn', provider=provider) - raise CredentialException( - 'invalid type option, support: access_key, sts, bearer, ecs_ram_role, ram_role_arn, rsa_key_pair, oidc_role_arn, credentials_uri') - - @deprecated("Use 'get_credential().access_key_id' instead") - def get_access_key_id(self): - return self.cloud_credential.get_access_key_id() - - @deprecated("Use 'get_credential().access_key_secret' instead") - def get_access_key_secret(self): - return self.cloud_credential.get_access_key_secret() - - @deprecated("Use 'get_credential().security_token' instead") - def get_security_token(self): - return self.cloud_credential.get_security_token() - - @deprecated("Use 'get_credential_async().access_key_id' instead") - async def get_access_key_id_async(self): - return await self.cloud_credential.get_access_key_id_async() - - @deprecated("Use 'get_credential_async().access_key_secret' instead") - async def get_access_key_secret_async(self): - return await self.cloud_credential.get_access_key_secret_async() - - @deprecated("Use 'get_credential_async().security_token' instead") - async def get_security_token_async(self): - return await self.cloud_credential.get_security_token_async() - - @deprecated("Use 'get_credential().type' instead") - @attribute_error_return_none - def get_type(self): - return self.cloud_credential.get_type() - - @deprecated("Use 'get_credential().bearer_token' instead") - @attribute_error_return_none - def get_bearer_token(self): - return self.cloud_credential.bearer_token diff --git a/venv/Lib/site-packages/alibabacloud_credentials/credentials.py b/venv/Lib/site-packages/alibabacloud_credentials/credentials.py deleted file mode 100644 index b625da3..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/credentials.py +++ /dev/null @@ -1,561 +0,0 @@ -import calendar -import json -import time -from urllib.parse import urlparse, parse_qs - -from Tea.core import TeaCore - -from alibabacloud_credentials.utils import auth_constant as ac -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException -from alibabacloud_credentials.models import CredentialModel - - -class Credential: - def get_access_key_id(self): - return - - def get_access_key_secret(self): - return - - def get_security_token(self): - return - - async def get_access_key_id_async(self): - return - - async def get_access_key_secret_async(self): - return - - async def get_security_token_async(self): - return - - def get_credential(self): - return - - async def get_credential_async(self): - return - - -class _AutomaticallyRefreshCredentials: - def __init__(self, expiration, provider): - self.expiration = expiration - self.provider = provider - - def _with_should_refresh(self): - if self.expiration is None: - return True - return int(time.mktime(time.localtime())) >= (self.expiration - 180) - - def _get_new_credential(self): - return self.provider.get_credentials() - - def _refresh_credential(self): - if self._with_should_refresh(): - return self._get_new_credential() - - async def _get_new_credential_async(self): - return await self.provider.get_credentials_async() - - async def _refresh_credential_async(self): - if self._with_should_refresh(): - return await self._get_new_credential_async() - - -class AccessKeyCredential(Credential): - """AccessKeyCredential""" - - def __init__(self, access_key_id, access_key_secret): - self.access_key_id = access_key_id - self.access_key_secret = access_key_secret - self.credential_type = ac.ACCESS_KEY - - def get_access_key_id(self): - return self.access_key_id - - def get_access_key_secret(self): - return self.access_key_secret - - async def get_access_key_id_async(self): - return self.access_key_id - - async def get_access_key_secret_async(self): - return self.access_key_secret - - def get_credential(self): - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - type=ac.ACCESS_KEY - ) - - async def get_credential_async(self): - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - type=ac.ACCESS_KEY - ) - - -class BearerTokenCredential(Credential): - """BearerTokenCredential""" - - def __init__(self, bearer_token): - self.bearer_token = bearer_token - self.credential_type = ac.BEARER - - def get_credential(self): - return CredentialModel( - bearer_token=self.bearer_token, - type=ac.BEARER - ) - - async def get_credential_async(self): - return CredentialModel( - bearer_token=self.bearer_token, - type=ac.BEARER - ) - - def get_type(self) -> str: - return self.credential_type - - -class EcsRamRoleCredential(Credential, _AutomaticallyRefreshCredentials): - """EcsRamRoleCredential""" - - def __init__(self, access_key_id, access_key_secret, security_token, expiration, provider): - super().__init__(expiration, provider) - self.access_key_id = access_key_id - self.access_key_secret = access_key_secret - self.security_token = security_token - self.credential_type = ac.ECS_RAM_ROLE - - def _refresh_credential(self): - credential = super()._refresh_credential() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - self.security_token = credential.security_token - - async def _refresh_credential_async(self): - credential = await super()._refresh_credential_async() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - self.security_token = credential.security_token - - def get_access_key_id(self): - self._refresh_credential() - return self.access_key_id - - def get_access_key_secret(self): - self._refresh_credential() - return self.access_key_secret - - def get_security_token(self): - self._refresh_credential() - return self.security_token - - async def get_access_key_id_async(self): - await self._refresh_credential_async() - return self.access_key_id - - async def get_access_key_secret_async(self): - await self._refresh_credential_async() - return self.access_key_secret - - async def get_security_token_async(self): - await self._refresh_credential_async() - return self.security_token - - def get_credential(self): - self._refresh_credential() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.ECS_RAM_ROLE - ) - - async def get_credential_async(self): - await self._refresh_credential_async() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.ECS_RAM_ROLE - ) - - -class RamRoleArnCredential(Credential, _AutomaticallyRefreshCredentials): - """RamRoleArnCredential""" - - def __init__(self, access_key_id, access_key_secret, security_token, expiration, provider): - super().__init__(expiration, provider) - self.access_key_id = access_key_id - self.access_key_secret = access_key_secret - self.security_token = security_token - self.credential_type = ac.RAM_ROLE_ARN - - def _refresh_credential(self): - credential = super()._refresh_credential() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - self.security_token = credential.security_token - - async def _refresh_credential_async(self): - credential = await super()._refresh_credential_async() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - self.security_token = credential.security_token - - def get_access_key_id(self): - self._refresh_credential() - return self.access_key_id - - def get_access_key_secret(self): - self._refresh_credential() - return self.access_key_secret - - def get_security_token(self): - self._refresh_credential() - return self.security_token - - async def get_access_key_id_async(self): - await self._refresh_credential_async() - return self.access_key_id - - async def get_access_key_secret_async(self): - await self._refresh_credential_async() - return self.access_key_secret - - async def get_security_token_async(self): - await self._refresh_credential_async() - return self.security_token - - def get_credential(self): - self._refresh_credential() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.RAM_ROLE_ARN - ) - - async def get_credential_async(self): - await self._refresh_credential_async() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.RAM_ROLE_ARN - ) - - -class OIDCRoleArnCredential(Credential, _AutomaticallyRefreshCredentials): - """OIDCRoleArnCredential""" - - def __init__(self, access_key_id, access_key_secret, security_token, expiration, provider): - super().__init__(expiration, provider) - self.access_key_id = access_key_id - self.access_key_secret = access_key_secret - self.security_token = security_token - self.credential_type = ac.OIDC_ROLE_ARN - - def _refresh_credential(self): - credential = super()._refresh_credential() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - self.security_token = credential.security_token - - async def _refresh_credential_async(self): - credential = await super()._refresh_credential_async() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - self.security_token = credential.security_token - - def get_access_key_id(self): - self._refresh_credential() - return self.access_key_id - - def get_access_key_secret(self): - self._refresh_credential() - return self.access_key_secret - - def get_security_token(self): - self._refresh_credential() - return self.security_token - - async def get_access_key_id_async(self): - await self._refresh_credential_async() - return self.access_key_id - - async def get_access_key_secret_async(self): - await self._refresh_credential_async() - return self.access_key_secret - - async def get_security_token_async(self): - await self._refresh_credential_async() - return self.security_token - - def get_credential(self): - self._refresh_credential() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.OIDC_ROLE_ARN - ) - - async def get_credential_async(self): - await self._refresh_credential_async() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.OIDC_ROLE_ARN - ) - - -class CredentialsURICredential(Credential): - """CredentialsURICredential""" - - def __init__(self, credentials_uri): - self.access_key_id = None - self.access_key_secret = None - self.security_token = None - self.expiration = None - self.credentials_uri = credentials_uri - self.credential_type = ac.CREDENTIALS_URI - - def _need_refresh(self): - if self.expiration is None: - return True - - return int(time.mktime(time.localtime())) >= (self.expiration - 180) - - def _ensure_credential(self): - if self._need_refresh(): - self._get_new_credential() - - async def _ensure_credential_async(self): - if self._need_refresh(): - await self._get_new_credential_async() - - def _get_new_credential(self): - r = urlparse(self.credentials_uri) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.method = 'GET' - tea_request.pathname = r.path - for key, values in parse_qs(r.query).items(): - for value in values: - tea_request.query[key] = value - response = TeaCore.do_action(tea_request) - if response.status_code != 200: - raise CredentialException( - "Get credentials from " + self.credentials_uri + " failed, HttpCode=" + str(response.status_code)) - body = response.body.decode('utf-8') - - dic = json.loads(body) - content_code = dic.get('Code') - content_access_key_id = dic.get('AccessKeyId') - content_access_key_secret = dic.get('AccessKeySecret') - content_security_token = dic.get('SecurityToken') - content_expiration = dic.get('Expiration') - - if content_code != "Success": - raise CredentialException( - "Get credentials from " + self.credentials_uri + " failed, Code is " + content_code) - - # 先转换为时间数组 - time_array = time.strptime(content_expiration, "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - time_stamp = calendar.timegm(time_array) - self.access_key_id = content_access_key_id - self.access_key_secret = content_access_key_secret - self.security_token = content_security_token - self.expiration = time_stamp - - async def _get_new_credential_async(self): - r = urlparse(self.credentials_uri) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.netloc - tea_request.method = 'GET' - tea_request.pathname = r.path - tea_request.query = parse_qs(r.query) - response = await TeaCore.async_do_action(tea_request) - if response.status_code != 200: - raise CredentialException( - "Get credentials from " + self.credentials_uri + " failed, HttpCode=" + str(response.status_code)) - body = response.body.decode('utf-8') - - dic = json.loads(body) - content_code = dic.get('Code') - content_access_key_id = dic.get('AccessKeyId') - content_access_key_secret = dic.get('AccessKeySecret') - content_security_token = dic.get('SecurityToken') - content_expiration = dic.get('Expiration') - - if content_code != "Success": - raise CredentialException( - "Get credentials from " + self.credentials_uri + " failed, Code is " + content_code) - - # 先转换为时间数组 - time_array = time.strptime(content_expiration, "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - time_stamp = calendar.timegm(time_array) - self.access_key_id = content_access_key_id - self.access_key_secret = content_access_key_secret - self.security_token = content_security_token - self.expiration = time_stamp - - def get_access_key_id(self): - self._ensure_credential() - return self.access_key_id - - def get_access_key_secret(self): - self._ensure_credential() - return self.access_key_secret - - def get_security_token(self): - self._ensure_credential() - return self.security_token - - async def get_access_key_id_async(self): - await self._ensure_credential_async() - return self.access_key_id - - async def get_access_key_secret_async(self): - await self._ensure_credential_async() - return self.access_key_secret - - async def get_security_token_async(self): - await self._ensure_credential_async() - return self.security_token - - def get_credential(self): - self._ensure_credential() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.CREDENTIALS_URI - ) - - async def get_credential_async(self): - await self._ensure_credential_async() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.CREDENTIALS_URI - ) - - -class RsaKeyPairCredential(Credential, _AutomaticallyRefreshCredentials): - def __init__(self, access_key_id, access_key_secret, expiration, provider): - super().__init__(expiration, provider) - self.access_key_id = access_key_id - self.access_key_secret = access_key_secret - self.credential_type = ac.RSA_KEY_PAIR - - def _refresh_credential(self): - credential = super()._refresh_credential() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - - async def _refresh_credential_async(self): - credential = await super()._refresh_credential_async() - if credential: - self.access_key_id = credential.access_key_id - self.access_key_secret = credential.access_key_secret - self.expiration = credential.expiration - self.security_token = credential.security_token - - def get_access_key_id(self): - self._refresh_credential() - return self.access_key_id - - def get_access_key_secret(self): - self._refresh_credential() - return self.access_key_secret - - async def get_access_key_id_async(self): - await self._refresh_credential_async() - return self.access_key_id - - async def get_access_key_secret_async(self): - await self._refresh_credential_async() - return self.access_key_secret - - def get_credential(self): - self._refresh_credential() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - type=ac.RSA_KEY_PAIR - ) - - async def get_credential_async(self): - await self._refresh_credential_async() - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - type=ac.RSA_KEY_PAIR - ) - - -class StsCredential(Credential): - def __init__(self, access_key_id, access_key_secret, security_token): - self.access_key_id = access_key_id - self.access_key_secret = access_key_secret - self.security_token = security_token - self.credential_type = ac.STS - - def get_access_key_id(self): - return self.access_key_id - - def get_access_key_secret(self): - return self.access_key_secret - - def get_security_token(self): - return self.security_token - - async def get_access_key_id_async(self): - return self.access_key_id - - async def get_access_key_secret_async(self): - return self.access_key_secret - - async def get_security_token_async(self): - return self.security_token - - def get_credential(self): - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.STS - ) - - async def get_credential_async(self): - return CredentialModel( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - type=ac.STS - ) diff --git a/venv/Lib/site-packages/alibabacloud_credentials/exceptions.py b/venv/Lib/site-packages/alibabacloud_credentials/exceptions.py deleted file mode 100644 index 27795b6..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -class CredentialException(Exception): - - def __init__(self, message, code=None, request_id=None): - self.code = code - self.message = message - self.request_id = request_id diff --git a/venv/Lib/site-packages/alibabacloud_credentials/http/__init__.py b/venv/Lib/site-packages/alibabacloud_credentials/http/__init__.py deleted file mode 100644 index 7a37211..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/http/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from ._options import HttpOptions - -__all__ = [ - 'HttpOptions' -] diff --git a/venv/Lib/site-packages/alibabacloud_credentials/http/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/http/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index fcf9156..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/http/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/http/__pycache__/_options.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/http/__pycache__/_options.cpython-312.pyc deleted file mode 100644 index d0d9c26..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/http/__pycache__/_options.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/http/_options.py b/venv/Lib/site-packages/alibabacloud_credentials/http/_options.py deleted file mode 100644 index d3b38f7..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/http/_options.py +++ /dev/null @@ -1,9 +0,0 @@ -class HttpOptions: - def __init__(self, - *, - proxy: str = None, - connect_timeout: int = None, - read_timeout: int = None): - self.proxy = proxy - self.connect_timeout = connect_timeout - self.read_timeout = read_timeout diff --git a/venv/Lib/site-packages/alibabacloud_credentials/models.py b/venv/Lib/site-packages/alibabacloud_credentials/models.py deleted file mode 100644 index bd83d68..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/models.py +++ /dev/null @@ -1,311 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from Tea.model import TeaModel - - -class Config(TeaModel): - """ - Model for initializing credential - """ - - def __init__( - self, - *, - type: str = None, - access_key_id: str = None, - access_key_secret: str = None, - security_token: str = None, - bearer_token: str = None, - duration_seconds: int = None, - role_arn: str = None, - oidc_provider_arn: str = None, - oidc_token_file_path: str = None, - role_session_name: str = None, - role_session_expiration: int = None, - policy: str = None, - external_id: str = None, - sts_endpoint: str = None, - public_key_id: str = None, - private_key_file: str = None, - role_name: str = None, - enable_imds_v2: bool = None, - disable_imds_v1: bool = None, - metadata_token_duration: int = None, - credentials_uri: str = None, - host: str = None, - timeout: int = None, - connect_timeout: int = None, - proxy: str = None, - ): - """ - Initialize the credential object. - - ### Parameters - - #### General Parameters - - `type` (str): Credential type, including `access_key`, `sts`, `bearer`, `ecs_ram_role`, `ram_role_arn`, `rsa_key_pair`, `oidc_role_arn`, `credentials_uri`. - - #### Access Key Type - - `access_key_id` (str): Access Key ID. - - `access_key_secret` (str): Access Key Secret. - - `security_token` (str, optional): Security token. - - #### Bearer Token Type - - `bearer_token` (str): Bearer token. - - #### RAM Role ARN and OIDC Role ARN Types - - `role_arn` (str): Role ARN. - - `oidc_provider_arn` (str, for `oidc_role_arn` only): OIDC provider ARN. - - `oidc_token_file_path` (str, for `oidc_role_arn` only): Path to the OIDC token file. - - `role_session_name` (str): Role session name. - - `role_session_expiration` (int, optional): Role session expiration time in seconds. - - `policy` (str, optional): Policy. - - `external_id` (str, optional): External ID. - - `sts_endpoint` (str, optional): STS endpoint. - - `duration_seconds`: deprecated - - #### RSA Key Pair Type - - `public_key_id` (str): Public key ID. - - `private_key_file` (str): Path to the private key file. - - #### ECS RAM Role Type - - `role_name` (str): Role name. - - `disable_imds_v1` (bool, optional): Whether to disable IMDS v1. Default is `False`. - - #### Credentials URI Type - - `credentials_uri` (str): Credentials URI. - - #### HTTP Options - - `host` (str, optional): Host address. - - `timeout` (int, optional): Read timeout in milliseconds. Default values: - - `ecs_ram_role`: 1000ms - - `ram_role_arn`: 5000ms - - `oidc_role_arn`: 5000ms - - `connect_timeout` (int, optional): Connection timeout in milliseconds. Default values: - - `ecs_ram_role`: 1000ms - - `ram_role_arn`: 10000ms - - `oidc_role_arn`: 10000ms - - `proxy` (str, optional): HTTP or HTTPS proxy. - - #### Other Parameters - - `duration_seconds` (int, optional): Duration in seconds, mainly used for `sts` type credentials. - - Note: Some parameters are only valid for specific credential types. Please use them according to your actual needs. - """ - self.type = type - self.access_key_id = access_key_id - self.access_key_secret = access_key_secret - self.security_token = security_token - self.bearer_token = bearer_token - self.duration_seconds = duration_seconds - self.role_arn = role_arn - self.oidc_provider_arn = oidc_provider_arn - self.oidc_token_file_path = oidc_token_file_path - self.role_session_name = role_session_name - self.role_session_expiration = role_session_expiration - self.policy = policy - self.external_id = external_id - self.sts_endpoint = sts_endpoint - self.public_key_id = public_key_id - self.private_key_file = private_key_file - self.role_name = role_name - self.disable_imds_v1 = disable_imds_v1 - self.enable_imds_v2 = enable_imds_v2 - self.metadata_token_duration = metadata_token_duration - self.credentials_uri = credentials_uri - self.host = host - self.timeout = timeout - self.connect_timeout = connect_timeout - self.proxy = proxy - - def validate(self): - pass - - def to_map(self): - result = dict() - if self.type is not None: - result['type'] = self.type - if self.access_key_id is not None: - result['accessKeyId'] = self.access_key_id - if self.access_key_secret is not None: - result['accessKeySecret'] = self.access_key_secret - if self.security_token is not None: - result['securityToken'] = self.security_token - if self.bearer_token is not None: - result['bearerToken'] = self.bearer_token - if self.duration_seconds is not None: - result['durationSeconds'] = self.duration_seconds - if self.role_arn is not None: - result['roleArn'] = self.role_arn - if self.oidc_provider_arn is not None: - result['oidcProviderArn'] = self.oidc_provider_arn - if self.oidc_token_file_path is not None: - result['oidcTokenFilePath'] = self.oidc_token_file_path - if self.role_session_name is not None: - result['roleSessionName'] = self.role_session_name - if self.role_session_expiration is not None: - result['roleSessionExpiration'] = self.role_session_expiration - if self.policy is not None: - result['policy'] = self.policy - if self.external_id is not None: - result['externalId'] = self.external_id - if self.sts_endpoint is not None: - result['stsEndpoint'] = self.sts_endpoint - if self.public_key_id is not None: - result['publicKeyId'] = self.public_key_id - if self.private_key_file is not None: - result['privateKeyFile'] = self.private_key_file - if self.role_name is not None: - result['roleName'] = self.role_name - if self.disable_imds_v1 is not None: - result['disableIMDSv1'] = self.disable_imds_v1 - if self.enable_imds_v2 is not None: - result['enableIMDSv2'] = self.enable_imds_v2 - if self.metadata_token_duration is not None: - result['metadataTokenDuration'] = self.metadata_token_duration - if self.credentials_uri is not None: - result['credentialsUri'] = self.credentials_uri - if self.host is not None: - result['host'] = self.host - if self.timeout is not None: - result['timeout'] = self.timeout - if self.connect_timeout is not None: - result['connectTimeout'] = self.connect_timeout - if self.proxy is not None: - result['proxy'] = self.proxy - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('type') is not None: - self.type = m.get('type') - if m.get('accessKeyId') is not None: - self.access_key_id = m.get('accessKeyId') - if m.get('accessKeySecret') is not None: - self.access_key_secret = m.get('accessKeySecret') - if m.get('securityToken') is not None: - self.security_token = m.get('securityToken') - if m.get('bearerToken') is not None: - self.bearer_token = m.get('bearerToken') - if m.get('durationSeconds') is not None: - self.duration_seconds = m.get('durationSeconds') - if m.get('roleArn') is not None: - self.role_arn = m.get('roleArn') - if m.get('oidcProviderArn') is not None: - self.oidc_provider_arn = m.get('oidcProviderArn') - if m.get('oidcTokenFilePath') is not None: - self.oidc_token_file_path = m.get('oidcTokenFilePath') - if m.get('roleSessionName') is not None: - self.role_session_name = m.get('roleSessionName') - if m.get('roleSessionExpiration') is not None: - self.role_session_expiration = m.get('roleSessionExpiration') - if m.get('policy') is not None: - self.policy = m.get('policy') - if m.get('externalId') is not None: - self.external_id = m.get('externalId') - if m.get('stsEndpoint') is not None: - self.sts_endpoint = m.get('stsEndpoint') - if m.get('publicKeyId') is not None: - self.public_key_id = m.get('publicKeyId') - if m.get('privateKeyFile') is not None: - self.private_key_file = m.get('privateKeyFile') - if m.get('roleName') is not None: - self.role_name = m.get('roleName') - if m.get('disableIMDSv1') is not None: - self.disable_imds_v1 = m.get('disableIMDSv1') - if m.get('enableIMDSv2') is not None: - self.enable_imds_v2 = m.get('enableIMDSv2') - if m.get('metadataTokenDuration') is not None: - self.metadata_token_duration = m.get('metadataTokenDuration') - if m.get('credentialsUri') is not None: - self.credentials_uri = m.get('credentialsUri') - if m.get('host') is not None: - self.host = m.get('host') - if m.get('timeout') is not None: - self.timeout = m.get('timeout') - if m.get('connectTimeout') is not None: - self.connect_timeout = m.get('connectTimeout') - if m.get('proxy') is not None: - self.proxy = m.get('proxy') - return self - - -class CredentialModel(TeaModel): - def __init__( - self, - access_key_id: str = None, - access_key_secret: str = None, - security_token: str = None, - bearer_token: str = None, - type: str = None, - provider_name: str = None, - ): - # accesskey id - self.access_key_id = access_key_id - # accesskey secret - self.access_key_secret = access_key_secret - # security token - self.security_token = security_token - # bearer token - self.bearer_token = bearer_token - # type - self.type = type - # provider name - self.provider_name = provider_name - - def validate(self): - pass - - def to_map(self) -> dict: - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_key_id is not None: - result['accessKeyId'] = self.access_key_id - if self.access_key_secret is not None: - result['accessKeySecret'] = self.access_key_secret - if self.security_token is not None: - result['securityToken'] = self.security_token - if self.bearer_token is not None: - result['bearerToken'] = self.bearer_token - if self.type is not None: - result['type'] = self.type - if self.provider_name is not None: - result['providerName'] = self.provider_name - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('accessKeyId') is not None: - self.access_key_id = m.get('accessKeyId') - if m.get('accessKeySecret') is not None: - self.access_key_secret = m.get('accessKeySecret') - if m.get('securityToken') is not None: - self.security_token = m.get('securityToken') - if m.get('bearerToken') is not None: - self.bearer_token = m.get('bearerToken') - if m.get('type') is not None: - self.type = m.get('type') - if m.get('providerName') is not None: - self.provider_name = m.get('providerName') - return self - - def get_access_key_id(self) -> str: - return self.access_key_id - - def get_access_key_secret(self) -> str: - return self.access_key_secret - - def get_security_token(self) -> str: - return self.security_token - - def get_bearer_token(self) -> str: - return self.bearer_token - - def get_type(self) -> str: - return self.type - - def get_provider_name(self) -> str: - return self.provider_name diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__init__.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/__init__.py deleted file mode 100644 index 60eaf60..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -from .static_ak import StaticAKCredentialsProvider -from .static_sts import StaticSTSCredentialsProvider -from .env import EnvironmentVariableCredentialsProvider -from .ecs_ram_role import EcsRamRoleCredentialsProvider -from .ram_role_arn import RamRoleArnCredentialsProvider -from .oidc import OIDCRoleArnCredentialsProvider -from .rsa_key_pair import RsaKeyPairCredentialsProvider -from .uri import URLCredentialsProvider -from .cli_profile import CLIProfileCredentialsProvider -from .profile import ProfileCredentialsProvider -from .default import DefaultCredentialsProvider -from .cloud_sso import CloudSSOCredentialsProvider -from .oauth import OAuthCredentialsProvider - -__all__ = [ - 'StaticAKCredentialsProvider', - 'StaticSTSCredentialsProvider', - 'EnvironmentVariableCredentialsProvider', - 'EcsRamRoleCredentialsProvider', - 'RamRoleArnCredentialsProvider', - 'OIDCRoleArnCredentialsProvider', - 'RsaKeyPairCredentialsProvider', - 'URLCredentialsProvider', - 'CLIProfileCredentialsProvider', - 'ProfileCredentialsProvider', - 'DefaultCredentialsProvider', - 'CloudSSOCredentialsProvider', - 'OAuthCredentialsProvider' -] diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c17eaa9..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/cli_profile.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/cli_profile.cpython-312.pyc deleted file mode 100644 index b516d87..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/cli_profile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/cloud_sso.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/cloud_sso.cpython-312.pyc deleted file mode 100644 index 3b7b56a..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/cloud_sso.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/default.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/default.cpython-312.pyc deleted file mode 100644 index 0f5bd9a..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/default.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/ecs_ram_role.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/ecs_ram_role.cpython-312.pyc deleted file mode 100644 index 280b612..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/ecs_ram_role.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/env.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/env.cpython-312.pyc deleted file mode 100644 index c968be0..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/env.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/oauth.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/oauth.cpython-312.pyc deleted file mode 100644 index 08bd32b..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/oauth.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/oidc.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/oidc.cpython-312.pyc deleted file mode 100644 index 47239f9..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/oidc.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/profile.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/profile.cpython-312.pyc deleted file mode 100644 index b26fb08..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/profile.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/ram_role_arn.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/ram_role_arn.cpython-312.pyc deleted file mode 100644 index d1b19f7..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/ram_role_arn.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/refreshable.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/refreshable.cpython-312.pyc deleted file mode 100644 index 3a31dc3..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/refreshable.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/rsa_key_pair.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/rsa_key_pair.cpython-312.pyc deleted file mode 100644 index 214748e..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/rsa_key_pair.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/static_ak.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/static_ak.cpython-312.pyc deleted file mode 100644 index 548936c..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/static_ak.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/static_sts.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/static_sts.cpython-312.pyc deleted file mode 100644 index 2e6c07a..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/static_sts.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/uri.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/uri.cpython-312.pyc deleted file mode 100644 index ce3d842..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/provider/__pycache__/uri.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/cli_profile.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/cli_profile.py deleted file mode 100644 index 1f1450b..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/cli_profile.py +++ /dev/null @@ -1,589 +0,0 @@ -import os -import json -import threading -import platform -from typing import Any, Dict - -import aiofiles - -# 跨平台文件锁支持 -if platform.system() == 'Windows': - # Windows平台使用msvcrt - import msvcrt - - HAS_MSVCRT = True - HAS_FCNTL = False -else: - # 其他平台尝试使用fcntl,如果不可用则不设文件锁 - HAS_MSVCRT = False - try: - import fcntl - - HAS_FCNTL = True - except ImportError: - HAS_FCNTL = False - -from .static_ak import StaticAKCredentialsProvider -from .ecs_ram_role import EcsRamRoleCredentialsProvider -from .ram_role_arn import RamRoleArnCredentialsProvider -from .oidc import OIDCRoleArnCredentialsProvider -from .static_sts import StaticSTSCredentialsProvider -from .cloud_sso import CloudSSOCredentialsProvider -from .oauth import OAuthCredentialsProvider, OAuthTokenUpdateCallback, OAuthTokenUpdateCallbackAsync -from .refreshable import Credentials -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_constant as ac -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.exceptions import CredentialException - - -async def _load_config_async(file_path: str) -> Any: - async with aiofiles.open(file_path, mode='r') as f: - content = await f.read() - return json.loads(content) - - -def _load_config(file_path: str) -> Any: - with open(file_path, mode='r') as f: - content = f.read() - return json.loads(content) - - -class CLIProfileCredentialsProvider(ICredentialsProvider): - - def __init__(self, *, - profile_name: str = None, - profile_file: str = None, - allow_config_force_rewrite: bool = False): - self._profile_file = profile_file or os.path.join(ac.HOME, ".aliyun/config.json") - self._profile_name = profile_name or au.environment_profile_name - self._allow_config_force_rewrite = allow_config_force_rewrite - self.__innerProvider = None - # 文件锁,用于并发安全 - self._file_lock = threading.RLock() - - def _should_reload_credentials_provider(self) -> bool: - if self.__innerProvider is None: - return True - return False - - def get_credentials(self) -> Credentials: - if au.environment_cli_profile_disabled.lower() == "true": - raise CredentialException('cli credentials file is disabled') - - if self._should_reload_credentials_provider(): - if not os.path.exists(self._profile_file) or not os.path.isfile(self._profile_file): - raise CredentialException(f'unable to open credentials file: {self._profile_file}') - try: - config = _load_config(self._profile_file) - except Exception as e: - raise CredentialException( - f'failed to parse credential form cli credentials file: {self._profile_file}') - if config is None: - raise CredentialException( - f'failed to parse credential form cli credentials file: {self._profile_file}') - - profile_name = self._profile_name - if self._profile_name is None or self._profile_name == '': - profile_name = config.get('current') - self.__innerProvider = self._get_credentials_provider(config, profile_name) - - cre = self.__innerProvider.get_credentials() - credentials = Credentials( - access_key_id=cre.get_access_key_id(), - access_key_secret=cre.get_access_key_secret(), - security_token=cre.get_security_token(), - provider_name=f'{self.get_provider_name()}/{cre.get_provider_name()}' - ) - return credentials - - async def get_credentials_async(self) -> Credentials: - if au.environment_cli_profile_disabled.lower() == "true": - raise CredentialException('cli credentials file is disabled') - - if self._should_reload_credentials_provider(): - if not os.path.exists(self._profile_file) or not os.path.isfile(self._profile_file): - raise CredentialException(f'unable to open credentials file: {self._profile_file}') - try: - config = await _load_config_async(self._profile_file) - except Exception as e: - raise CredentialException( - f'failed to parse credential form cli credentials file: {self._profile_file}') - if config is None: - raise CredentialException( - f'failed to parse credential form cli credentials file: {self._profile_file}') - - profile_name = self._profile_name - if self._profile_name is None or self._profile_name == '': - profile_name = config.get('current') - self.__innerProvider = self._get_credentials_provider(config, profile_name) - - cre = await self.__innerProvider.get_credentials_async() - credentials = Credentials( - access_key_id=cre.get_access_key_id(), - access_key_secret=cre.get_access_key_secret(), - security_token=cre.get_security_token(), - provider_name=f'{self.get_provider_name()}/{cre.get_provider_name()}' - ) - return credentials - - def _get_credentials_provider(self, config: Dict, profile_name: str) -> ICredentialsProvider: - if profile_name is None or profile_name == '': - raise CredentialException('invalid profile name') - - profiles = config.get('profiles', []) - - if not profiles: - raise CredentialException(f"unable to get profile with '{profile_name}' form cli credentials file.") - - for profile in profiles: - if profile.get('name') is not None and profile['name'] == profile_name: - mode = profile.get('mode') - if mode == "AK": - return StaticAKCredentialsProvider( - access_key_id=profile.get('access_key_id'), - access_key_secret=profile.get('access_key_secret') - ) - elif mode == "StsToken": - return StaticSTSCredentialsProvider( - access_key_id=profile.get('access_key_id'), - access_key_secret=profile.get('access_key_secret'), - security_token=profile.get('sts_token') - ) - elif mode == "RamRoleArn": - pre_provider = StaticAKCredentialsProvider( - access_key_id=profile.get('access_key_id'), - access_key_secret=profile.get('access_key_secret') - ) - return RamRoleArnCredentialsProvider( - credentials_provider=pre_provider, - role_arn=profile.get('ram_role_arn'), - role_session_name=profile.get('ram_session_name'), - duration_seconds=profile.get('expired_seconds'), - policy=profile.get('policy'), - external_id=profile.get('external_id'), - sts_region_id=profile.get('sts_region'), - enable_vpc=profile.get('enable_vpc'), - ) - elif mode == "EcsRamRole": - return EcsRamRoleCredentialsProvider( - role_name=profile.get('ram_role_name') - ) - elif mode == "OIDC": - return OIDCRoleArnCredentialsProvider( - role_arn=profile.get('ram_role_arn'), - oidc_provider_arn=profile.get('oidc_provider_arn'), - oidc_token_file_path=profile.get('oidc_token_file'), - role_session_name=profile.get('role_session_name'), - duration_seconds=profile.get('expired_seconds'), - policy=profile.get('policy'), - sts_region_id=profile.get('sts_region'), - enable_vpc=profile.get('enable_vpc'), - ) - elif mode == "ChainableRamRoleArn": - previous_provider = self._get_credentials_provider(config, profile.get('source_profile')) - return RamRoleArnCredentialsProvider( - credentials_provider=previous_provider, - role_arn=profile.get('ram_role_arn'), - role_session_name=profile.get('ram_session_name'), - duration_seconds=profile.get('expired_seconds'), - policy=profile.get('policy'), - external_id=profile.get('external_id'), - sts_region_id=profile.get('sts_region'), - enable_vpc=profile.get('enable_vpc'), - ) - elif mode == "CloudSSO": - return CloudSSOCredentialsProvider( - sign_in_url=profile.get('cloud_sso_sign_in_url'), - account_id=profile.get('cloud_sso_account_id'), - access_config=profile.get('cloud_sso_access_config'), - access_token=profile.get('access_token'), - access_token_expire=profile.get('cloud_sso_access_token_expire'), - ) - elif mode == "OAuth": - # 获取 OAuth 配置 - site_type = profile.get('oauth_site_type', 'CN') - oauth_base_url_map = { - 'CN': 'https://oauth.aliyun.com', - 'INTL': 'https://oauth.alibabacloud.com' - } - sign_in_url = oauth_base_url_map.get(site_type.upper()) - if not sign_in_url: - raise CredentialException('Invalid OAuth site type, support CN or INTL') - - oauth_client_map = { - 'CN': '4038181954557748008', - 'INTL': '4103531455503354461' - } - client_id = oauth_client_map.get(site_type.upper()) - if not client_id: - raise CredentialException('Invalid OAuth site type, support CN or INTL') - - return OAuthCredentialsProvider( - client_id=client_id, - sign_in_url=sign_in_url, - access_token=profile.get('oauth_access_token'), - access_token_expire=profile.get('oauth_access_token_expire'), - refresh_token=profile.get('oauth_refresh_token'), - token_update_callback=self._get_oauth_token_update_callback(), - token_update_callback_async=self._get_oauth_token_update_callback_async(), - ) - else: - raise CredentialException(f"unsupported profile mode '{mode}' form cli credentials file.") - - raise CredentialException(f"unable to get profile with '{profile_name}' form cli credentials file.") - - def get_provider_name(self) -> str: - return 'cli_profile' - - def _update_oauth_tokens(self, refresh_token: str, access_token: str, access_key: str, secret: str, - security_token: str, access_token_expire: int, sts_expire: int) -> None: - """更新 OAuth 令牌并写回配置文件""" - - with self._file_lock: - try: - # 读取现有配置 - config = _load_config(self._profile_file) - - # 找到当前 profile 并更新 OAuth 令牌 - profile_name = self._profile_name - if not profile_name: - profile_name = config.get('current') - profiles = config.get('profiles', []) - profile_tag = False - for profile in profiles: - if profile.get('name') == profile_name: - profile_tag = True - # 更新 OAuth 令牌 - profile['oauth_refresh_token'] = refresh_token - profile['oauth_access_token'] = access_token - profile['oauth_access_token_expire'] = access_token_expire - # 更新 STS 凭据 - profile['access_key_id'] = access_key - profile['access_key_secret'] = secret - profile['sts_token'] = security_token - profile['sts_expiration'] = sts_expire - break - - # 写回配置文件 - if not profile_tag: - raise CredentialException(f"unable to get profile with '{profile_name}' form cli credentials file.") - - self._write_configuration_to_file_with_lock(self._profile_file, config) - - except Exception as e: - raise CredentialException(f"failed to update OAuth tokens in config file: {e}") - - def _write_configuration_to_file(self, config_path: str, config: Dict) -> None: - """将配置写入文件,使用原子写入确保数据完整性""" - # 获取原文件权限(如果存在) - file_mode = 0o644 - if os.path.exists(config_path): - file_mode = os.stat(config_path).st_mode - - # 创建唯一临时文件 - import time - temp_file = config_path + '.tmp-' + str(int(time.time() * 1000000)) # 微秒级时间戳 - backup_file = None - - try: - # 写入临时文件 - self._write_config_file(temp_file, file_mode, config) - - # 原子性重命名,Windows下需要特殊处理 - if platform.system() == 'Windows' and self._allow_config_force_rewrite: - # Windows下需要先删除目标文件,使用备份机制确保数据安全 - if os.path.exists(config_path): - backup_file = config_path + '.backup' - # 创建备份 - import shutil - shutil.copy2(config_path, backup_file) - # 删除原文件 - os.remove(config_path) - - os.rename(temp_file, config_path) - - # 成功后删除备份 - if backup_file and os.path.exists(backup_file): - os.remove(backup_file) - - except Exception as e: - # 恢复原文件(如果存在备份) - if backup_file and os.path.exists(backup_file): - try: - if not os.path.exists(config_path): - os.rename(backup_file, config_path) - except Exception as restore_error: - raise CredentialException( - f"Failed to restore original file after write error: {restore_error}. Original error: {e}") - - raise e - - def _write_config_file(self, filename: str, file_mode: int, config: Dict) -> None: - try: - with open(filename, 'w', encoding='utf-8') as f: - json.dump(config, f, indent=4, ensure_ascii=False) - - # 设置文件权限 - os.chmod(filename, file_mode) - - except Exception as e: - raise CredentialException(f"Failed to write config file: {e}") - - def _write_configuration_to_file_with_lock(self, config_path: str, config: Dict) -> None: - """使用操作系统级别的文件锁写入配置文件""" - # 获取原文件权限(如果存在) - file_mode = 0o644 - if os.path.exists(config_path): - file_mode = os.stat(config_path).st_mode - - backup_file = None - - try: - # 确保文件存在 - if not os.path.exists(config_path): - # 创建空文件 - with open(config_path, 'w') as f: - json.dump({}, f) - - # 在获取文件锁之前创建备份(Windows下需要) - if platform.system() == 'Windows' and self._allow_config_force_rewrite and os.path.exists(config_path): - backup_file = config_path + '.backup' - import shutil - shutil.copy2(config_path, backup_file) - - # 打开文件用于锁定 - with open(config_path, 'r+') as f: - # 获取独占锁(阻塞其他进程) - if HAS_MSVCRT: - # Windows使用msvcrt - msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, 1) - elif HAS_FCNTL: - # Unix/Linux使用fcntl - fcntl.flock(f.fileno(), fcntl.LOCK_EX) - # 如果都不支持,则跳过文件锁(仅进程内保护) - - try: - if platform.system() == 'Windows' and self._allow_config_force_rewrite: - # Windows下直接在锁定的文件中写入 - f.seek(0) - f.truncate() # 清空文件内容 - json.dump(config, f, indent=4, ensure_ascii=False) - f.flush() - else: - # 其他环境使用临时文件+rename(在文件锁内部进行原子操作) - import time - temp_file = config_path + '.tmp-' + str(int(time.time() * 1000000)) - self._write_config_file(temp_file, file_mode, config) - # 在文件锁内部进行原子重命名 - os.rename(temp_file, config_path) - - finally: - # 释放锁 - try: - if HAS_MSVCRT: - msvcrt.locking(f.fileno(), msvcrt.LK_UNLCK, 1) - elif HAS_FCNTL: - fcntl.flock(f.fileno(), fcntl.LOCK_UN) - except (OSError, PermissionError): - # 在Windows下,如果文件被重命名,文件句柄可能已经无效 - # 这种情况下锁会自动释放,所以忽略错误 - pass - - # 成功后删除备份 - if backup_file and os.path.exists(backup_file): - os.remove(backup_file) - - except Exception as e: - # 恢复原文件(如果存在备份) - if backup_file and os.path.exists(backup_file): - try: - if not os.path.exists(config_path): - os.rename(backup_file, config_path) - except Exception as restore_error: - raise CredentialException( - f"Failed to restore original file after write error: {restore_error}. Original error: {e}") - - raise e - - def _get_oauth_token_update_callback(self) -> OAuthTokenUpdateCallback: - """获取 OAuth 令牌更新回调函数""" - return lambda refresh_token, access_token, access_key, secret, security_token, access_token_expire, sts_expire: self._update_oauth_tokens( - refresh_token, access_token, access_key, secret, security_token, access_token_expire, sts_expire - ) - - async def _write_configuration_to_file_async(self, config_path: str, config: Dict) -> None: - """异步将配置写入文件,使用原子写入确保数据完整性""" - # 获取原文件权限(如果存在) - file_mode = 0o644 - if os.path.exists(config_path): - file_mode = os.stat(config_path).st_mode - - # 创建唯一临时文件 - import time - temp_file = config_path + '.tmp-' + str(int(time.time() * 1000000)) # 微秒级时间戳 - backup_file = None - - try: - # 异步写入临时文件 - await self._write_config_file_async(temp_file, file_mode, config) - - # 原子性重命名,Windows下需要特殊处理 - if platform.system() == 'Windows' and self._allow_config_force_rewrite: - # Windows下需要先删除目标文件,使用备份机制确保数据安全 - if os.path.exists(config_path): - backup_file = config_path + '.backup' - # 创建备份 - import shutil - shutil.copy2(config_path, backup_file) - # 删除原文件 - os.remove(config_path) - - os.rename(temp_file, config_path) - - # 成功后删除备份 - if backup_file and os.path.exists(backup_file): - os.remove(backup_file) - - except Exception as e: - # 恢复原文件(如果存在备份) - if backup_file and os.path.exists(backup_file): - try: - if not os.path.exists(config_path): - os.rename(backup_file, config_path) - except Exception as restore_error: - raise CredentialException( - f"Failed to restore original file after write error: {restore_error}. Original error: {e}") - - raise e - - async def _write_config_file_async(self, filename: str, file_mode: int, config: Dict) -> None: - try: - async with aiofiles.open(filename, 'w', encoding='utf-8') as f: - await f.write(json.dumps(config, indent=4, ensure_ascii=False)) - - # 设置文件权限 - os.chmod(filename, file_mode) - - except Exception as e: - raise CredentialException(f"Failed to write config file: {e}") - - async def _write_configuration_to_file_with_lock_async(self, config_path: str, config: Dict) -> None: - """异步使用操作系统级别的文件锁写入配置文件""" - # 获取原文件权限(如果存在) - file_mode = 0o644 - if os.path.exists(config_path): - file_mode = os.stat(config_path).st_mode - - backup_file = None - - try: - # 确保文件存在 - if not os.path.exists(config_path): - # 创建空文件 - with open(config_path, 'w') as f: - json.dump({}, f) - - # 在获取文件锁之前创建备份(Windows下需要) - if platform.system() == 'Windows' and self._allow_config_force_rewrite and os.path.exists(config_path): - backup_file = config_path + '.backup' - import shutil - shutil.copy2(config_path, backup_file) - - # 打开文件用于锁定 - with open(config_path, 'r+') as f: - # 获取独占锁(阻塞其他进程) - if HAS_MSVCRT: - # Windows使用msvcrt - msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, 1) - elif HAS_FCNTL: - # Unix/Linux使用fcntl - fcntl.flock(f.fileno(), fcntl.LOCK_EX) - # 如果都不支持,则跳过文件锁(仅进程内保护) - - try: - if platform.system() == 'Windows' and self._allow_config_force_rewrite: - # Windows下直接在锁定的文件中写入 - f.seek(0) - f.truncate() # 清空文件内容 - json.dump(config, f, indent=4, ensure_ascii=False) - f.flush() - else: - # 其他环境使用临时文件+rename(在文件锁内部进行原子操作) - import time - temp_file = config_path + '.tmp-' + str(int(time.time() * 1000000)) - await self._write_config_file_async(temp_file, file_mode, config) - # 在文件锁内部进行原子重命名 - os.rename(temp_file, config_path) - - finally: - # 释放锁 - try: - if HAS_MSVCRT: - msvcrt.locking(f.fileno(), msvcrt.LK_UNLCK, 1) - elif HAS_FCNTL: - fcntl.flock(f.fileno(), fcntl.LOCK_UN) - except (OSError, PermissionError): - # 在Windows下,如果文件被重命名,文件句柄可能已经无效 - # 这种情况下锁会自动释放,所以忽略错误 - pass - - # 成功后删除备份 - if backup_file and os.path.exists(backup_file): - os.remove(backup_file) - - except Exception as e: - # 恢复原文件(如果存在备份) - if backup_file and os.path.exists(backup_file): - try: - if not os.path.exists(config_path): - os.rename(backup_file, config_path) - except Exception as restore_error: - raise CredentialException( - f"Failed to restore original file after write error: {restore_error}. Original error: {e}") - - raise e - - async def _update_oauth_tokens_async(self, refresh_token: str, access_token: str, access_key: str, secret: str, - security_token: str, access_token_expire: int, sts_expire: int) -> None: - """异步更新 OAuth 令牌并写回配置文件""" - - try: - with self._file_lock: - cfg_path = self._profile_file - conf = await _load_config_async(cfg_path) - - # 找到当前 profile 并更新 OAuth 令牌 - profile_name = self._profile_name - if not profile_name: - profile_name = conf.get('current') - profiles = conf.get('profiles', []) - profile_tag = False - for profile in profiles: - if profile.get('name') == profile_name: - profile_tag = True - # 更新 OAuth 相关字段 - profile['oauth_refresh_token'] = refresh_token - profile['oauth_access_token'] = access_token - profile['oauth_access_token_expire'] = access_token_expire - # 更新 STS 凭据 - profile['access_key_id'] = access_key - profile['access_key_secret'] = secret - profile['sts_token'] = security_token - profile['sts_expiration'] = sts_expire - break - - if not profile_tag: - raise CredentialException(f"Profile '{profile_name}' not found in config file") - - # 异步写回配置文件 - await self._write_configuration_to_file_with_lock_async(cfg_path, conf) - - except Exception as e: - raise CredentialException(f"failed to update OAuth tokens in config file: {e}") - - def _get_oauth_token_update_callback_async(self) -> OAuthTokenUpdateCallbackAsync: - """获取异步 OAuth 令牌更新回调函数""" - return lambda refresh_token, access_token, access_key, secret, security_token, access_token_expire, sts_expire: self._update_oauth_tokens_async( - refresh_token, access_token, access_key, secret, security_token, access_token_expire, sts_expire - ) diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/cloud_sso.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/cloud_sso.py deleted file mode 100644 index 77ff6f0..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/cloud_sso.py +++ /dev/null @@ -1,160 +0,0 @@ -import calendar -import json -import time -from urllib.parse import urlparse - -from alibabacloud_credentials.provider.refreshable import Credentials, RefreshResult, RefreshCachedSupplier -from alibabacloud_credentials.http import HttpOptions -from Tea.core import TeaCore -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException - - -def _get_stale_time(expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 60 * 60 - return expiration - 15 * 60 - - -class CloudSSOCredentialsProvider(ICredentialsProvider): - DEFAULT_CONNECT_TIMEOUT = 5000 - DEFAULT_READ_TIMEOUT = 10000 - - def __init__(self, *, - sign_in_url: str = None, - account_id: str = None, - access_config: str = None, - access_token: str = None, - access_token_expire: int = 0, - http_options: HttpOptions = None): - - self._sign_in_url = sign_in_url - self._account_id = account_id - self._access_config = access_config - self._access_token = access_token - self._access_token_expire = access_token_expire - - if self._access_token is None or self._access_token_expire == 0 or self._access_token_expire - int( - time.mktime(time.localtime())) <= 0: - raise ValueError( - 'CloudSSO access token is empty or expired, please re-login with cli') - if self._sign_in_url is None or self._account_id is None or self._access_config is None: - raise ValueError( - 'CloudSSO sign in url or account id or access config is empty') - - self._http_options = http_options if http_options is not None else HttpOptions() - self._runtime_options = { - 'connectTimeout': self._http_options.connect_timeout if self._http_options.connect_timeout is not None else CloudSSOCredentialsProvider.DEFAULT_CONNECT_TIMEOUT, - 'readTimeout': self._http_options.read_timeout if self._http_options.read_timeout is not None else CloudSSOCredentialsProvider.DEFAULT_READ_TIMEOUT, - 'httpsProxy': self._http_options.proxy - } - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - ) - - def get_credentials(self) -> Credentials: - return self._credentials_cache._sync_call() - - async def get_credentials_async(self) -> Credentials: - return await self._credentials_cache._async_call() - - def _refresh_credentials(self) -> RefreshResult[Credentials]: - r = urlparse(self._sign_in_url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme - tea_request.method = 'POST' - tea_request.pathname = '/cloud-credentials' - - tea_request.body = json.dumps({ - 'AccountId': self._account_id, - 'AccessConfigurationId': self._access_config, - }) - - tea_request.headers['Accept'] = 'application/json' - tea_request.headers['Content-Type'] = 'application/json' - tea_request.headers['Authorization'] = f'Bearer {self._access_token}' - - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from sso, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'CloudCredential' not in dic: - raise CredentialException( - f'error retrieving credentials from sso result: {response.body.decode("utf-8")}') - - cre = dic.get('CloudCredential') - if 'AccessKeyId' not in cre or 'AccessKeySecret' not in cre or 'SecurityToken' not in cre: - raise CredentialException( - f'error retrieving credentials from sso result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('AccessKeyId'), - access_key_secret=cre.get('AccessKeySecret'), - security_token=cre.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - async def _refresh_credentials_async(self) -> RefreshResult[Credentials]: - r = urlparse(self._sign_in_url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme - tea_request.method = 'POST' - tea_request.pathname = '/cloud-credentials' - - tea_request.body = json.dumps({ - 'AccountId': self._account_id, - 'AccessConfigurationId': self._access_config, - }) - - tea_request.headers['Accept'] = 'application/json' - tea_request.headers['Content-Type'] = 'application/json' - tea_request.headers['Authorization'] = f'Bearer {self._access_token}' - - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from sso, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'CloudCredential' not in dic: - raise CredentialException( - f'error retrieving credentials from sso result: {response.body.decode("utf-8")}') - - cre = dic.get('CloudCredential') - if 'AccessKeyId' not in cre or 'AccessKeySecret' not in cre or 'SecurityToken' not in cre: - raise CredentialException( - f'error retrieving credentials from sso result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('AccessKeyId'), - access_key_secret=cre.get('AccessKeySecret'), - security_token=cre.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - def get_provider_name(self) -> str: - return 'cloud_sso' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/default.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/default.py deleted file mode 100644 index a2aa73a..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/default.py +++ /dev/null @@ -1,89 +0,0 @@ -from . import EnvironmentVariableCredentialsProvider, EcsRamRoleCredentialsProvider, \ - OIDCRoleArnCredentialsProvider, URLCredentialsProvider, CLIProfileCredentialsProvider, ProfileCredentialsProvider - -from alibabacloud_credentials.provider.refreshable import Credentials -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.exceptions import CredentialException - - -class DefaultCredentialsProvider(ICredentialsProvider): - - def __init__(self, *, - reuse_last_provider_enabled: bool = True): - - self.__reuse_last_provider_enabled = reuse_last_provider_enabled - self.__last_used_provider = None - - self.__providers_chain = [ - EnvironmentVariableCredentialsProvider() - ] - if au.enable_oidc_credential: - self.__providers_chain.append(OIDCRoleArnCredentialsProvider()) - - self.__providers_chain.append(CLIProfileCredentialsProvider()) - self.__providers_chain.append(ProfileCredentialsProvider()) - if au.environment_ecs_metadata_disabled.lower() != 'true': - self.__providers_chain.append(EcsRamRoleCredentialsProvider()) - - if au.environment_credentials_uri is not None and au.environment_credentials_uri != '': - self.__providers_chain.append(URLCredentialsProvider()) - - def get_credentials(self) -> Credentials: - if self.__reuse_last_provider_enabled and self.__last_used_provider is not None: - credentials = self.__last_used_provider.get_credentials() - return Credentials( - access_key_id=credentials.get_access_key_id(), - access_key_secret=credentials.get_access_key_secret(), - security_token=credentials.get_security_token(), - provider_name=f'{self.get_provider_name()}/{credentials.get_provider_name()}' - ) - - error_messages = [] - for provider in self.__providers_chain: - try: - credentials = provider.get_credentials() - if credentials is not None: - self.__last_used_provider = provider - return Credentials( - access_key_id=credentials.get_access_key_id(), - access_key_secret=credentials.get_access_key_secret(), - security_token=credentials.get_security_token(), - provider_name=f'{self.get_provider_name()}/{credentials.get_provider_name()}' - ) - except Exception as e: - error_messages.append(f'{type(provider).__name__}: {str(e)}') - - raise CredentialException( - f'unable to load credentials from any of the providers in the chain: {error_messages}') - - async def get_credentials_async(self) -> Credentials: - if self.__reuse_last_provider_enabled and self.__last_used_provider is not None: - credentials = await self.__last_used_provider.get_credentials_async() - return Credentials( - access_key_id=credentials.get_access_key_id(), - access_key_secret=credentials.get_access_key_secret(), - security_token=credentials.get_security_token(), - provider_name=f'{self.get_provider_name()}/{credentials.get_provider_name()}' - ) - - error_messages = [] - for provider in self.__providers_chain: - try: - credentials = await provider.get_credentials_async() - if credentials is not None: - self.__last_used_provider = provider - return Credentials( - access_key_id=credentials.get_access_key_id(), - access_key_secret=credentials.get_access_key_secret(), - security_token=credentials.get_security_token(), - provider_name=f'{self.get_provider_name()}/{credentials.get_provider_name()}' - ) - except Exception as e: - error_messages.append(f'{type(provider).__name__}: {str(e)}') - - raise CredentialException( - f'unable to load credentials from any of the providers in the chain: {error_messages}') - - def get_provider_name(self) -> str: - return 'default' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/ecs_ram_role.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/ecs_ram_role.py deleted file mode 100644 index a667cc1..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/ecs_ram_role.py +++ /dev/null @@ -1,253 +0,0 @@ -import calendar -import json -import time -import signal -import logging - -from alibabacloud_credentials.provider.refreshable import Credentials, RefreshResult, StaleValueBehavior, \ - RefreshCachedSupplier, NonBlocking -from alibabacloud_credentials.http import HttpOptions -from Tea.core import TeaCore -from apscheduler.schedulers.background import BackgroundScheduler -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException - -log = logging.getLogger('credentials') -log.setLevel(logging.INFO) -ch = logging.StreamHandler() -log.addHandler(ch) - - -class EcsRamRoleCredentialsProvider(ICredentialsProvider): - DEFAULT_METADATA_TOKEN_DURATION = 21600 - DEFAULT_CONNECT_TIMEOUT = 1000 - DEFAULT_READ_TIMEOUT = 1000 - - def __init__(self, *, - role_name: str = None, - disable_imds_v1: bool = None, - http_options: HttpOptions = None, - async_update_enabled: bool = True): - - if au.environment_ecs_metadata_disabled.lower() == 'true': - raise ValueError('IMDS credentials is disabled') - - self.__url_in_ecs_metadata = '/latest/meta-data/ram/security-credentials/' - self.__url_in_ecs_metadata_token = '/latest/api/token' - self.__ecs_metadata_fetch_error_msg = 'Failed to get RAM session credentials from ECS metadata service.' - self.__ecs_metadata_token_fetch_error_msg = 'Failed to get token from ECS Metadata Service.' - self.__metadata_service_host = '100.100.100.200' - self._should_refresh = False - - self._role_name = role_name if role_name is not None else au.environment_ecs_metadata - self._disable_imds_v1 = disable_imds_v1 if disable_imds_v1 is not None else au.environment_imds_v1_disabled.lower() == 'true' - self._http_options = http_options if http_options is not None else HttpOptions() - self._runtime_options = { - 'connectTimeout': self._http_options.connect_timeout if self._http_options.connect_timeout is not None else EcsRamRoleCredentialsProvider.DEFAULT_CONNECT_TIMEOUT, - 'readTimeout': self._http_options.read_timeout if self._http_options.read_timeout is not None else EcsRamRoleCredentialsProvider.DEFAULT_READ_TIMEOUT, - 'httpProxy': self._http_options.proxy - } - - if async_update_enabled: - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - stale_value_behavior=StaleValueBehavior.ALLOW, - prefetch_strategy=NonBlocking() - ) - - scheduler = BackgroundScheduler() - - def refresh_task(): - if self._should_refresh: - log.debug(f'Begin checking or refreshing credentials asynchronously') - self.get_credentials() - - scheduler.add_job(refresh_task, 'interval', minutes=1) - scheduler.start() - - def shutdown_handler(signum, frame): - log.debug(f'Shutting down scheduler...') - scheduler.shutdown(wait=False) - - signal.signal(signal.SIGINT, shutdown_handler) - signal.signal(signal.SIGTERM, shutdown_handler) - - else: - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - stale_value_behavior=StaleValueBehavior.ALLOW - ) - - def get_credentials(self) -> Credentials: - return self._credentials_cache._sync_call() - - async def get_credentials_async(self) -> Credentials: - return await self._credentials_cache._async_call() - - def _get_role_name(self, url: str = None) -> str: - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = self._get_metadata_token(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata - response = TeaCore.do_action(tea_request, self._runtime_options) - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + ' HttpCode=' + str(response.status_code)) - return response.body.decode('utf-8') - - async def _get_role_name_async(self, url: str = None) -> str: - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = await self._get_metadata_token_async(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + ' HttpCode=' + str(response.status_code)) - return response.body.decode('utf-8') - - def _get_metadata_token(self, url: str = None) -> str: - tea_request = ph.get_new_request() - tea_request.method = 'PUT' - tea_request.headers['host'] = url if url else self.__metadata_service_host - tea_request.headers['X-aliyun-ecs-metadata-token-ttl-seconds'] = str( - EcsRamRoleCredentialsProvider.DEFAULT_METADATA_TOKEN_DURATION) - if not url: - tea_request.pathname = self.__url_in_ecs_metadata_token - try: - response = TeaCore.do_action(tea_request, self._runtime_options) - if response.status_code != 200: - raise CredentialException( - self.__ecs_metadata_token_fetch_error_msg + ' HttpCode=' + str(response.status_code)) - return response.body.decode('utf-8') - except Exception as e: - if self._disable_imds_v1: - raise e - return None - - async def _get_metadata_token_async(self, url: str = None) -> str: - tea_request = ph.get_new_request() - tea_request.method = 'PUT' - tea_request.headers['host'] = url if url else self.__metadata_service_host - tea_request.headers['X-aliyun-ecs-metadata-token-ttl-seconds'] = str( - EcsRamRoleCredentialsProvider.DEFAULT_METADATA_TOKEN_DURATION) - if not url: - tea_request.pathname = self.__url_in_ecs_metadata_token - try: - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - if response.status_code != 200: - raise CredentialException( - self.__ecs_metadata_token_fetch_error_msg + ' HttpCode=' + str(response.status_code)) - return response.body.decode('utf-8') - except Exception as e: - if self._disable_imds_v1: - raise e - return None - - def _refresh_credentials(self, url: str = None) -> RefreshResult[Credentials]: - role_name = self._role_name - if self._role_name is None or self._role_name == '': - role_name = self._get_role_name(url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = self._get_metadata_token(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata + role_name - # request - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + ' HttpCode=' + str(response.status_code)) - - dic = json.loads(response.body.decode('utf-8')) - content_code = dic.get('Code') - content_access_key_id = dic.get('AccessKeyId') - content_access_key_secret = dic.get('AccessKeySecret') - content_security_token = dic.get('SecurityToken') - content_expiration = dic.get('Expiration') - - if content_code != 'Success': - raise CredentialException(self.__ecs_metadata_fetch_error_msg) - - # 先转换为时间数组 - time_array = time.strptime(content_expiration, '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=content_access_key_id, - access_key_secret=content_access_key_secret, - security_token=content_security_token, - expiration=expiration, - provider_name=self.get_provider_name() - ) - self._should_refresh = True - return RefreshResult(value=credentials, - stale_time=self._get_stale_time(expiration), - prefetch_time=self._get_prefetch_time(expiration)) - - async def _refresh_credentials_async(self, url: str = None) -> RefreshResult[Credentials]: - role_name = self._role_name - if self._role_name is None: - role_name = await self._get_role_name_async(url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = await self._get_metadata_token_async(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata + role_name - - # request - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + ' HttpCode=' + str(response.status_code)) - - dic = json.loads(response.body.decode('utf-8')) - content_code = dic.get('Code') - content_access_key_id = dic.get('AccessKeyId') - content_access_key_secret = dic.get('AccessKeySecret') - content_security_token = dic.get('SecurityToken') - content_expiration = dic.get('Expiration') - - if content_code != 'Success': - raise CredentialException(self.__ecs_metadata_fetch_error_msg) - - # 先转换为时间数组 - time_array = time.strptime(content_expiration, '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=content_access_key_id, - access_key_secret=content_access_key_secret, - security_token=content_security_token, - expiration=expiration, - provider_name=self.get_provider_name() - ) - self._should_refresh = True - return RefreshResult(value=credentials, - stale_time=self._get_stale_time(expiration), - prefetch_time=self._get_prefetch_time(expiration)) - - def _get_stale_time(self, expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 60 * 60 - return expiration - 15 * 60 - - def _get_prefetch_time(self, expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 5 * 60 - return int(time.mktime(time.localtime())) + 60 * 60 - - def get_provider_name(self) -> str: - return 'ecs_ram_role' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/env.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/env.py deleted file mode 100644 index a669480..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/env.py +++ /dev/null @@ -1,33 +0,0 @@ - -from alibabacloud_credentials.provider.refreshable import Credentials -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util -from alibabacloud_credentials.exceptions import CredentialException - - -class EnvironmentVariableCredentialsProvider(ICredentialsProvider): - - def get_credentials(self) -> Credentials: - - access_key_id = auth_util.environment_access_key_id - access_key_secret = auth_util.environment_access_key_secret - security_token = auth_util.environment_security_token - - if access_key_id is None or len(access_key_id) == 0: - raise CredentialException("Environment variable accessKeyId cannot be empty") - - if access_key_secret is None or len(access_key_secret) == 0: - raise CredentialException("Environment variable accessKeySecret cannot be empty") - - return Credentials( - access_key_id=access_key_id, - access_key_secret=access_key_secret, - security_token=security_token, - provider_name=self.get_provider_name() - ) - - async def get_credentials_async(self) -> Credentials: - return self.get_credentials() - - def get_provider_name(self) -> str: - return 'env' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/oauth.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/oauth.py deleted file mode 100644 index 81eb03e..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/oauth.py +++ /dev/null @@ -1,287 +0,0 @@ -import calendar -import json -import logging -import time -from urllib.parse import urlparse, urlencode -from typing import Callable, Optional - -from alibabacloud_credentials.provider.refreshable import Credentials, RefreshResult, RefreshCachedSupplier -from alibabacloud_credentials.http import HttpOptions -from Tea.core import TeaCore -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException - -log = logging.getLogger('credentials') -log.setLevel(logging.INFO) -ch = logging.StreamHandler() -log.addHandler(ch) - -# OAuth 令牌更新回调函数类型 -OAuthTokenUpdateCallback = Callable[[str, str, str, str, str, int, int], None] -OAuthTokenUpdateCallbackAsync = Callable[[str, str, str, str, str, int, int], None] - - -def _get_stale_time(expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 60 * 60 - return expiration - 15 * 60 - - -class OAuthCredentialsProvider(ICredentialsProvider): - DEFAULT_CONNECT_TIMEOUT = 5000 - DEFAULT_READ_TIMEOUT = 10000 - - def __init__(self, *, - client_id: str = None, - sign_in_url: str = None, - access_token: str = None, - access_token_expire: int = 0, - refresh_token: str = None, - http_options: HttpOptions = None, - token_update_callback: Optional[OAuthTokenUpdateCallback] = None, - token_update_callback_async: Optional[OAuthTokenUpdateCallbackAsync] = None): - - if not client_id: - raise ValueError('the ClientId is empty') - - if not sign_in_url: - raise ValueError('the url for sign-in is empty') - - if not refresh_token: - raise ValueError('OAuth access token is empty or expired, please re-login with cli') - - self._client_id = client_id - self._sign_in_url = sign_in_url - self._access_token = access_token - self._access_token_expire = access_token_expire - self._refresh_token = refresh_token - self._token_update_callback = token_update_callback - self._token_update_callback_async = token_update_callback_async - - self._http_options = http_options if http_options is not None else HttpOptions() - self._runtime_options = { - 'connectTimeout': self._http_options.connect_timeout if self._http_options.connect_timeout is not None else OAuthCredentialsProvider.DEFAULT_CONNECT_TIMEOUT, - 'readTimeout': self._http_options.read_timeout if self._http_options.read_timeout is not None else OAuthCredentialsProvider.DEFAULT_READ_TIMEOUT, - 'httpsProxy': self._http_options.proxy - } - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - ) - - def get_credentials(self) -> Credentials: - return self._credentials_cache._sync_call() - - async def get_credentials_async(self) -> Credentials: - return await self._credentials_cache._async_call() - - def _try_refresh_oauth_token(self) -> None: - current_time = int(time.mktime(time.localtime())) - # 构建刷新令牌请求 - r = urlparse(self._sign_in_url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme - tea_request.method = 'POST' - tea_request.pathname = '/v1/token' - - # 设置请求体 - body_data = { - 'grant_type': 'refresh_token', - 'refresh_token': self._refresh_token, - 'client_id': self._client_id, - 'Timestamp': ph.get_iso_8061_date() - } - tea_request.body = urlencode(body_data) - tea_request.headers['Content-Type'] = 'application/x-www-form-urlencoded' - - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException(f"failed to refresh OAuth token, status code: {response.status_code}, response: {response.body.decode('utf-8')}") - - # 解析响应 - dic = json.loads(response.body.decode('utf-8')) - if 'access_token' not in dic or 'refresh_token' not in dic: - raise CredentialException(f"failed to refresh OAuth token: {response.body.decode('utf-8')}") - - # 更新令牌 - new_access_token = dic.get('access_token') - new_refresh_token = dic.get('refresh_token') - expires_in = dic.get('expires_in', 3600) - new_access_token_expire = current_time + expires_in - - self._access_token = new_access_token - self._refresh_token = new_refresh_token - self._access_token_expire = new_access_token_expire - - async def _try_refresh_oauth_token_async(self) -> None: - current_time = int(time.mktime(time.localtime())) - # 构建刷新令牌请求 - r = urlparse(self._sign_in_url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme - tea_request.method = 'POST' - tea_request.pathname = '/v1/token' - - # 设置请求体 - body_data = { - 'grant_type': 'refresh_token', - 'refresh_token': self._refresh_token, - 'client_id': self._client_id, - 'Timestamp': ph.get_iso_8061_date() - } - tea_request.body = urlencode(body_data) - tea_request.headers['Content-Type'] = 'application/x-www-form-urlencoded' - - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException(f"failed to refresh OAuth token, status code: {response.status_code}, response: {response.body.decode('utf-8')}") - - # 解析响应 - dic = json.loads(response.body.decode('utf-8')) - if 'access_token' not in dic or 'refresh_token' not in dic: - raise CredentialException(f"failed to refresh OAuth token: {response.body.decode('utf-8')}") - - # 更新令牌 - new_access_token = dic.get('access_token') - new_refresh_token = dic.get('refresh_token') - expires_in = dic.get('expires_in', 3600) - new_access_token_expire = current_time + expires_in - - self._access_token = new_access_token - self._refresh_token = new_refresh_token - self._access_token_expire = new_access_token_expire - - def _refresh_credentials(self) -> RefreshResult[Credentials]: - if self._access_token is None or self._access_token_expire <= 0 or self._access_token_expire - int( - time.mktime(time.localtime())) <= 180: - self._try_refresh_oauth_token() - - r = urlparse(self._sign_in_url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme - tea_request.method = 'POST' - tea_request.pathname = '/v1/exchange' - - tea_request.headers['Content-Type'] = 'application/json' - tea_request.headers['Authorization'] = f'Bearer {self._access_token}' - - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f"error refreshing credentials from OAuth, http_code: {response.status_code}, result: {response.body.decode('utf-8')}") - - dic = json.loads(response.body.decode('utf-8')) - if 'error' in dic: - raise CredentialException( - f"error retrieving credentials from OAuth result: {response.body.decode('utf-8')}") - - if 'AccessKeyId' not in dic or 'AccessKeySecret' not in dic or 'SecurityToken' not in dic: - raise CredentialException( - f"error retrieving credentials from OAuth result: {response.body.decode('utf-8')}") - - # 先转换为时间数组 - time_array = time.strptime(dic.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=dic.get('AccessKeyId'), - access_key_secret=dic.get('AccessKeySecret'), - security_token=dic.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - - # 调用令牌更新回调函数 - if self._token_update_callback: - try: - self._token_update_callback( - self._refresh_token, - self._access_token, - credentials.get_access_key_id(), - credentials.get_access_key_secret(), - credentials.get_security_token(), - self._access_token_expire, - expiration - ) - except Exception as e: - log.warning(f'failed to update OAuth tokens in config file: {e}') - - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - async def _refresh_credentials_async(self) -> RefreshResult[Credentials]: - if self._access_token is None or self._access_token_expire <= 0 or self._access_token_expire - int( - time.mktime(time.localtime())) <= 180: - await self._try_refresh_oauth_token_async() - - r = urlparse(self._sign_in_url) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme - tea_request.method = 'POST' - tea_request.pathname = '/v1/exchange' - - tea_request.headers['Content-Type'] = 'application/json' - tea_request.headers['Authorization'] = f'Bearer {self._access_token}' - - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f"error refreshing credentials from OAuth, http_code: {response.status_code}, result: {response.body.decode('utf-8')}") - - dic = json.loads(response.body.decode('utf-8')) - if 'error' in dic: - raise CredentialException( - f"error retrieving credentials from OAuth result: {response.body.decode('utf-8')}") - - if 'AccessKeyId' not in dic or 'AccessKeySecret' not in dic or 'SecurityToken' not in dic: - raise CredentialException( - f"error retrieving credentials from OAuth result: {response.body.decode('utf-8')}") - - # 先转换为时间数组 - time_array = time.strptime(dic.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=dic.get('AccessKeyId'), - access_key_secret=dic.get('AccessKeySecret'), - security_token=dic.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - - if self._token_update_callback_async: - try: - await self._token_update_callback_async( - self._refresh_token, - self._access_token, - credentials.get_access_key_id(), - credentials.get_access_key_secret(), - credentials.get_security_token(), - self._access_token_expire, - expiration - ) - except Exception as e: - log.warning(f'failed to update OAuth tokens in config file: {e}') - - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - def _get_client_id(self) -> str: - """获取客户端ID""" - return self._client_id - - def get_provider_name(self) -> str: - return 'oauth' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/oidc.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/oidc.py deleted file mode 100644 index 3c92555..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/oidc.py +++ /dev/null @@ -1,206 +0,0 @@ -import calendar -import json -import time -import aiofiles - -from alibabacloud_credentials.provider.refreshable import Credentials, RefreshResult, RefreshCachedSupplier -from alibabacloud_credentials.http import HttpOptions -from Tea.core import TeaCore -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException - - -async def _get_token_async(file_path: str) -> str: - async with aiofiles.open(file_path, mode='r') as file: - token = await file.read() - return token - - -def _get_token(file_path: str) -> str: - with open(file_path, mode='r') as file: - token = file.read() - return token - - -def _get_stale_time(expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 60 * 60 - return expiration - 15 * 60 - - -class OIDCRoleArnCredentialsProvider(ICredentialsProvider): - DEFAULT_DURATION_SECONDS = 3600 - DEFAULT_CONNECT_TIMEOUT = 5000 - DEFAULT_READ_TIMEOUT = 10000 - - def __init__(self, *, - role_arn: str = None, - oidc_provider_arn: str = None, - oidc_token_file_path: str = None, - role_session_name: str = None, - duration_seconds: int = DEFAULT_DURATION_SECONDS, - policy: str = None, - sts_region_id: str = None, - sts_endpoint: str = None, - enable_vpc: bool = None, - http_options: HttpOptions = None): - - self._role_arn = role_arn or au.environment_role_arn - self._oidc_provider_arn = oidc_provider_arn or au.environment_oidc_provider_arn - self._oidc_token_file_path = oidc_token_file_path or au.environment_oidc_token_file - self._role_session_name = role_session_name or au.environment_role_session_name - self._duration_seconds = duration_seconds - self._policy = policy - - if self._role_session_name is None or self._role_session_name == '': - self._role_session_name = f'credentials-python-{str(int(time.mktime(time.localtime())))}' - if self._duration_seconds is None: - self._duration_seconds = self.DEFAULT_DURATION_SECONDS - if self._duration_seconds < 900: - raise ValueError('session duration should be in the range of 900s - max session duration') - if self._role_arn is None or self._role_arn == '': - raise ValueError('role_arn or environment variable ALIBABA_CLOUD_ROLE_ARN cannot be empty') - if self._oidc_provider_arn is None or self._oidc_provider_arn == '': - raise ValueError( - 'oidc_provider_arn or environment variable ALIBABA_CLOUD_OIDC_PROVIDER_ARN cannot be empty') - if self._oidc_token_file_path is None or self._oidc_token_file_path == '': - raise ValueError( - 'oidc_token_file_path or environment variable ALIBABA_CLOUD_OIDC_TOKEN_FILE cannot be empty') - - if sts_endpoint is not None and sts_endpoint != '': - self._sts_endpoint = sts_endpoint - else: - if enable_vpc is not None: - prefix = 'sts-vpc' if enable_vpc else 'sts' - else: - prefix = 'sts-vpc' if au.environment_enable_vpc.lower() == 'true' else 'sts' - if sts_region_id is not None and sts_region_id != '': - self._sts_endpoint = f'{prefix}.{sts_region_id}.aliyuncs.com' - elif au.environment_sts_region is not None and au.environment_sts_region != '': - self._sts_endpoint = f'{prefix}.{au.environment_sts_region}.aliyuncs.com' - else: - self._sts_endpoint = 'sts.aliyuncs.com' - - self._http_options = http_options if http_options is not None else HttpOptions() - self._runtime_options = { - 'connectTimeout': self._http_options.connect_timeout if self._http_options.connect_timeout is not None else OIDCRoleArnCredentialsProvider.DEFAULT_CONNECT_TIMEOUT, - 'readTimeout': self._http_options.read_timeout if self._http_options.read_timeout is not None else OIDCRoleArnCredentialsProvider.DEFAULT_READ_TIMEOUT, - 'httpsProxy': self._http_options.proxy - } - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - ) - - def get_credentials(self) -> Credentials: - return self._credentials_cache._sync_call() - - async def get_credentials_async(self) -> Credentials: - return await self._credentials_cache._async_call() - - def _refresh_credentials(self) -> RefreshResult[Credentials]: - token = _get_token(self._oidc_token_file_path) - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRoleWithOIDC', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self._duration_seconds), - 'RoleArn': self._role_arn, - 'OIDCProviderArn': self._oidc_provider_arn, - 'OIDCToken': token, - 'RoleSessionName': self._role_session_name, - 'Timestamp': ph.get_iso_8061_date() - } - - if self._policy is not None and self._policy != '': - tea_request.query['Policy'] = self._policy - - tea_request.protocol = 'https' - tea_request.headers['host'] = self._sts_endpoint - - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from oidc_role_arn, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'Credentials' not in dic: - raise CredentialException( - f'error retrieving credentials from oidc_role_arn result: {response.body.decode("utf-8")}') - - cre = dic.get('Credentials') - if 'AccessKeyId' not in cre or 'AccessKeySecret' not in cre or 'SecurityToken' not in cre: - raise CredentialException( - f'error retrieving credentials from oidc_role_arn result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('AccessKeyId'), - access_key_secret=cre.get('AccessKeySecret'), - security_token=cre.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - async def _refresh_credentials_async(self) -> RefreshResult[Credentials]: - token = await _get_token_async(self._oidc_token_file_path) - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRoleWithOIDC', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self._duration_seconds), - 'RoleArn': self._role_arn, - 'OIDCProviderArn': self._oidc_provider_arn, - 'OIDCToken': token, - 'RoleSessionName': self._role_session_name, - 'Timestamp': ph.get_iso_8061_date() - } - - if self._policy is not None and self._policy != '': - tea_request.query['Policy'] = self._policy - - tea_request.protocol = 'https' - tea_request.headers['host'] = self._sts_endpoint - - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from oidc_role_arn, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'Credentials' not in dic: - raise CredentialException( - f'error retrieving credentials from oidc_role_arn result: {response.body.decode("utf-8")}') - - cre = dic.get('Credentials') - if 'AccessKeyId' not in cre or 'AccessKeySecret' not in cre or 'SecurityToken' not in cre: - raise CredentialException( - f'error retrieving credentials from oidc_role_arn result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('AccessKeyId'), - access_key_secret=cre.get('AccessKeySecret'), - security_token=cre.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - def get_provider_name(self) -> str: - return 'oidc_role_arn' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/profile.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/profile.py deleted file mode 100644 index 67e2fa0..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/profile.py +++ /dev/null @@ -1,144 +0,0 @@ -import os -import configparser -from typing import Dict - -import aiofiles - -from alibabacloud_credentials.provider import StaticAKCredentialsProvider, EcsRamRoleCredentialsProvider, \ - RamRoleArnCredentialsProvider, OIDCRoleArnCredentialsProvider, RsaKeyPairCredentialsProvider -from alibabacloud_credentials.provider.refreshable import Credentials -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_constant as ac -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.exceptions import CredentialException - - -async def _load_ini_async(file_path: str) -> Dict[str, Dict[str, str]]: - config = configparser.ConfigParser() - async with aiofiles.open(file_path, mode='r') as f: - content = await f.read() - config.read_string(content) - ini_map = {} - for section in config.sections(): - option = {} - for key, value in config.items(section): - if '#' in value: - option[key] = value.split('#')[0].strip() - else: - option[key] = value.strip() - ini_map[section] = option - return ini_map - - -def _load_ini(file_path: str) -> Dict[str, Dict[str, str]]: - config = configparser.ConfigParser() - config.read(file_path, encoding='utf-8') - ini_map = {} - for section in config.sections(): - option = {} - for key, value in config.items(section): - if '#' in value: - option[key] = value.split('#')[0].strip() - else: - option[key] = value.strip() - ini_map[section] = option - return ini_map - - -def _get_default_file() -> str: - return os.path.join(ac.HOME, ".alibabacloud/credentials.ini") - - -class ProfileCredentialsProvider(ICredentialsProvider): - - def __init__(self, *, - profile_file: str = None, - profile_name: str = None): - self._profile_file = profile_file or au.environment_credentials_file - self._profile_name = profile_name or au.client_type - self.__innerProvider = None - - if self._profile_file is None or self._profile_file == '': - self._profile_file = _get_default_file() - - def _should_reload_credentials_provider(self) -> bool: - if self.__innerProvider is None: - return True - return False - - def get_credentials(self) -> Credentials: - if self._should_reload_credentials_provider(): - ini_map = _load_ini(self._profile_file) - section = ini_map.get(self._profile_name) - if section is None: - raise CredentialException(f'failed to get credential from credentials file: ${self._profile_file}') - self.__innerProvider = self._get_credentials_provider(section) - - cre = self.__innerProvider.get_credentials() - credentials = Credentials( - access_key_id=cre.get_access_key_id(), - access_key_secret=cre.get_access_key_secret(), - security_token=cre.get_security_token(), - provider_name=f'{self.get_provider_name()}/{cre.get_provider_name()}' - ) - return credentials - - async def get_credentials_async(self) -> Credentials: - if self._should_reload_credentials_provider(): - ini_map = await _load_ini_async(self._profile_file) - section = ini_map.get(self._profile_name) - if section is None: - raise CredentialException(f'failed to get credential from credentials file: ${self._profile_file}') - self.__innerProvider = self._get_credentials_provider(section) - - cre = await self.__innerProvider.get_credentials_async() - credentials = Credentials( - access_key_id=cre.get_access_key_id(), - access_key_secret=cre.get_access_key_secret(), - security_token=cre.get_security_token(), - provider_name=f'{self.get_provider_name()}/{cre.get_provider_name()}' - ) - return credentials - - def _get_credentials_provider(self, section: Dict) -> ICredentialsProvider: - - config_type = section.get(ac.INI_TYPE) - if 'access_key' == config_type: - return StaticAKCredentialsProvider( - access_key_id=section.get('access_key_id'), - access_key_secret=section.get('access_key_secret') - ) - elif 'ram_role_arn' == config_type: - pre_provider = StaticAKCredentialsProvider( - access_key_id=section.get('access_key_id'), - access_key_secret=section.get('access_key_secret') - ) - return RamRoleArnCredentialsProvider( - credentials_provider=pre_provider, - role_arn=section.get('role_arn'), - role_session_name=section.get('role_session_name'), - policy=section.get('policy') - ) - elif 'oidc_role_arn' == config_type: - return OIDCRoleArnCredentialsProvider( - role_arn=section.get('role_arn'), - oidc_provider_arn=section.get('oidc_provider_arn'), - oidc_token_file_path=section.get('oidc_token_file_path'), - role_session_name=section.get('role_session_name'), - policy=section.get('policy') - ) - elif 'ecs_ram_role' == config_type: - return EcsRamRoleCredentialsProvider( - role_name=section.get('role_name') - ) - elif 'rsa_key_pair' == config_type: - return RsaKeyPairCredentialsProvider( - public_key_id=section.get('public_key_id'), - private_key_file=section.get('private_key_file') - ) - else: - raise CredentialException( - f'unsupported credential type {config_type} from credentials file {self._profile_file}') - - def get_provider_name(self) -> str: - return 'profile' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/ram_role_arn.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/ram_role_arn.py deleted file mode 100644 index 29eb346..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/ram_role_arn.py +++ /dev/null @@ -1,234 +0,0 @@ -import calendar -import json -import time - -from alibabacloud_credentials.provider.refreshable import Credentials, RefreshResult, RefreshCachedSupplier -from alibabacloud_credentials.provider import StaticAKCredentialsProvider, StaticSTSCredentialsProvider -from alibabacloud_credentials.http import HttpOptions -from Tea.core import TeaCore -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException - - -def _get_stale_time(expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 60 * 60 - return expiration - 15 * 60 - - -class RamRoleArnCredentialsProvider(ICredentialsProvider): - DEFAULT_DURATION_SECONDS = 3600 - DEFAULT_CONNECT_TIMEOUT = 5000 - DEFAULT_READ_TIMEOUT = 10000 - - def __init__(self, *, - access_key_id: str = None, - access_key_secret: str = None, - security_token: str = None, - credentials_provider: ICredentialsProvider = None, - role_arn: str = None, - role_session_name: str = None, - duration_seconds: int = DEFAULT_DURATION_SECONDS, - policy: str = None, - external_id: str = None, - sts_region_id: str = None, - sts_endpoint: str = None, - enable_vpc: bool = None, - http_options: HttpOptions = None): - - if credentials_provider is not None: - self._credentials_provider = credentials_provider - elif security_token is not None and security_token != '': - self._credentials_provider = StaticSTSCredentialsProvider( - access_key_id=access_key_id, - access_key_secret=access_key_secret, - security_token=security_token - ) - else: - self._credentials_provider = StaticAKCredentialsProvider( - access_key_id=access_key_id, - access_key_secret=access_key_secret, - ) - - self._role_arn = role_arn or au.environment_role_arn - self._role_session_name = role_session_name or au.environment_role_session_name - self._duration_seconds = duration_seconds - self._policy = policy - self._external_id = external_id - - if self._role_session_name is None or self._role_session_name == '': - self._role_session_name = f'credentials-python-{str(int(time.mktime(time.localtime())))}' - if self._duration_seconds is None: - self._duration_seconds = self.DEFAULT_DURATION_SECONDS - if self._duration_seconds < 900: - raise ValueError('session duration should be in the range of 900s - max session duration') - if self._role_arn is None or self._role_arn == '': - raise ValueError('role_arn or environment variable ALIBABA_CLOUD_ROLE_ARN cannot be empty') - - if sts_endpoint is not None and sts_endpoint != '': - self._sts_endpoint = sts_endpoint - else: - if enable_vpc is not None: - prefix = 'sts-vpc' if enable_vpc else 'sts' - else: - prefix = 'sts-vpc' if au.environment_enable_vpc.lower() == 'true' else 'sts' - if sts_region_id is not None and sts_region_id != '': - self._sts_endpoint = f'{prefix}.{sts_region_id}.aliyuncs.com' - elif au.environment_sts_region is not None and au.environment_sts_region != '': - self._sts_endpoint = f'{prefix}.{au.environment_sts_region}.aliyuncs.com' - else: - self._sts_endpoint = 'sts.aliyuncs.com' - - self._http_options = http_options if http_options is not None else HttpOptions() - self._runtime_options = { - 'connectTimeout': self._http_options.connect_timeout if self._http_options.connect_timeout is not None else RamRoleArnCredentialsProvider.DEFAULT_CONNECT_TIMEOUT, - 'readTimeout': self._http_options.read_timeout if self._http_options.read_timeout is not None else RamRoleArnCredentialsProvider.DEFAULT_READ_TIMEOUT, - 'httpsProxy': self._http_options.proxy - } - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - ) - - def get_credentials(self) -> Credentials: - return self._credentials_cache._sync_call() - - async def get_credentials_async(self) -> Credentials: - return await self._credentials_cache._async_call() - - def _refresh_credentials(self) -> RefreshResult[Credentials]: - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRole', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self._duration_seconds), - 'RoleArn': self._role_arn, - 'RoleSessionName': self._role_session_name, - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0', - 'Timestamp': ph.get_iso_8061_date(), - 'SignatureNonce': ph.get_uuid() - } - - if self._policy is not None and self._policy != '': - tea_request.query['Policy'] = self._policy - - if self._external_id is not None and self._external_id != '': - tea_request.query['ExternalId'] = self._external_id - - pre_credentials = self._credentials_provider.get_credentials() - if pre_credentials is None: - raise CredentialException('unable to load original credentials from the provider in RAM role arn') - - tea_request.query['AccessKeyId'] = pre_credentials.get_access_key_id() - security_token = pre_credentials.get_security_token() - if security_token is not None and security_token != '': - tea_request.query['SecurityToken'] = security_token - - string_to_sign = ph.compose_string_to_sign('GET', tea_request.query) - signature = ph.sign_string(string_to_sign, pre_credentials.get_access_key_secret() + '&') - tea_request.query['Signature'] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = self._sts_endpoint - - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from ram_role_arn, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'Credentials' not in dic: - raise CredentialException( - f'error retrieving credentials from ram_role_arn result: {response.body.decode("utf-8")}') - - cre = dic.get('Credentials') - if 'AccessKeyId' not in cre or 'AccessKeySecret' not in cre or 'SecurityToken' not in cre: - raise CredentialException( - f'error retrieving credentials from ram_role_arn result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('AccessKeyId'), - access_key_secret=cre.get('AccessKeySecret'), - security_token=cre.get('SecurityToken'), - expiration=expiration, - provider_name=f'{self.get_provider_name()}/{pre_credentials.get_provider_name()}' - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - async def _refresh_credentials_async(self) -> RefreshResult[Credentials]: - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRole', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self._duration_seconds), - 'RoleArn': self._role_arn, - 'RoleSessionName': self._role_session_name, - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0', - 'Timestamp': ph.get_iso_8061_date(), - 'SignatureNonce': ph.get_uuid() - } - - if self._policy is not None and self._policy != '': - tea_request.query['Policy'] = self._policy - - if self._external_id is not None and self._external_id != '': - tea_request.query['ExternalId'] = self._external_id - - pre_credentials = await self._credentials_provider.get_credentials_async() - if pre_credentials is None: - raise CredentialException('unable to load original credentials from the provider in RAM role arn') - - tea_request.query['AccessKeyId'] = pre_credentials.get_access_key_id() - security_token = pre_credentials.get_security_token() - if security_token is not None and security_token != '': - tea_request.query['SecurityToken'] = security_token - - string_to_sign = ph.compose_string_to_sign('GET', tea_request.query) - signature = ph.sign_string(string_to_sign, pre_credentials.get_access_key_secret() + '&') - tea_request.query['Signature'] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = self._sts_endpoint - - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from ram_role_arn, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'Credentials' not in dic: - raise CredentialException( - f'error retrieving credentials from ram_role_arn result: {response.body.decode("utf-8")}') - - cre = dic.get('Credentials') - if 'AccessKeyId' not in cre or 'AccessKeySecret' not in cre or 'SecurityToken' not in cre: - raise CredentialException( - f'error retrieving credentials from ram_role_arn result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('AccessKeyId'), - access_key_secret=cre.get('AccessKeySecret'), - security_token=cre.get('SecurityToken'), - expiration=expiration, - provider_name=f'{self.get_provider_name()}/{pre_credentials.get_provider_name()}' - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - def get_provider_name(self) -> str: - return 'ram_role_arn' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/refreshable.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/refreshable.py deleted file mode 100644 index 967024e..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/refreshable.py +++ /dev/null @@ -1,279 +0,0 @@ -import random -import asyncio -import threading -import logging -import time -import atexit -from datetime import datetime -from enum import Enum -from typing import Callable, Generic, TypeVar, Coroutine, Any -from threading import Semaphore -from concurrent.futures.thread import ThreadPoolExecutor - -from alibabacloud_credentials.exceptions import CredentialException -from alibabacloud_credentials_api import ICredentials - -log = logging.getLogger('credentials') -log.setLevel(logging.INFO) -ch = logging.StreamHandler() -log.addHandler(ch) - -T = TypeVar('T') -INT64_MAX = 2 ** 63 - 1 -MAX_CONCURRENT_REFRESHES = 100 -CONCURRENT_REFRESH_LEASES = Semaphore(MAX_CONCURRENT_REFRESHES) -EXECUTOR = ThreadPoolExecutor(max_workers=INT64_MAX, thread_name_prefix='non-blocking-refresh') - - -def _shutdown_handler(): - EXECUTOR.shutdown(wait=False) - - -atexit.register(_shutdown_handler) - - -def _jitter_time(now: int, jitter_start: int, jitter_end: int) -> int: - jitter_amount = random.randint(jitter_start, jitter_end) - return now + jitter_amount - - -def _max_stale_failure_jitter(num_failures: int) -> int: - backoff_millis = max(10 * 1000, (1 << num_failures - 1) * 100) - return backoff_millis - - -class Credentials(ICredentials): - def __init__(self, *, - access_key_id: str = None, - access_key_secret: str = None, - security_token: str = None, - expiration: int = None, - provider_name: str = None): - self._access_key_id = access_key_id - self._access_key_secret = access_key_secret - self._security_token = security_token - self._expiration = expiration - self._provider_name = provider_name - - def get_access_key_id(self) -> str: - return self._access_key_id - - def get_access_key_secret(self) -> str: - return self._access_key_secret - - def get_security_token(self) -> str: - return self._security_token - - def get_expiration(self) -> int: - return self._expiration - - def get_provider_name(self) -> str: - return self._provider_name - - -class StaleValueBehavior(Enum): - """ - Strictly treat the stale time. Never return a stale cached value (except when the supplier returns an expired - value, in which case the supplier will return the value but only for a very short period of time to prevent - overloading the underlying supplier). - """ - STRICT = 0 - """ - Allow stale values to be returned from the cache. Value retrieval will never fail, as long as the cache has - succeeded when calling the underlying supplier at least once. - """ - ALLOW = 1 - - -class RefreshResult(Generic[T]): - def __init__(self, *, - value: T, - stale_time: int = INT64_MAX, - prefetch_time: int = INT64_MAX): - self._value = value - self._stale_time = stale_time - self._prefetch_time = prefetch_time - - def value(self) -> T: - return self._value - - def stale_time(self) -> int: - return self._stale_time - - def prefetch_time(self) -> int: - return self._prefetch_time - - -class PrefetchStrategy: - def prefetch(self, action: Callable): - raise NotImplementedError - - async def prefetch_async(self, action: Callable): - raise NotImplementedError - - -class NonBlocking(PrefetchStrategy): - - def prefetch(self, action: Callable): - if not CONCURRENT_REFRESH_LEASES.acquire(False): - log.warning('Skipping a background refresh task because there are too many other tasks running.') - return - - try: - EXECUTOR.submit(action) - except KeyboardInterrupt: - _shutdown_handler() - except Exception as t: - log.warning(f'Exception occurred when submitting background task.', exc_info=True) - finally: - CONCURRENT_REFRESH_LEASES.release() - - async def prefetch_async(self, action: Callable): - def run_asyncio_loop(): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - loop.run_until_complete(action()) - loop.close() - - self.prefetch(run_asyncio_loop) - - -class OneCallerBlocks(PrefetchStrategy): - def prefetch(self, action: Callable): - action() - - async def prefetch_async(self, action: Callable): - await action() - - -class RefreshCachedSupplier(Generic[T]): - STALE_TIME = 15 * 60 # seconds - REFRESH_BLOCKING_MAX_WAIT = 5 # seconds - - def __init__(self, refresh_callable: Callable[[], RefreshResult[T]], - refresh_callable_async: Callable[[], Coroutine[Any, Any, RefreshResult[T]]], - stale_value_behavior: StaleValueBehavior = StaleValueBehavior.STRICT, - prefetch_strategy: PrefetchStrategy = OneCallerBlocks()): - - self._refresh_callable = refresh_callable - self._refresh_callable_async = refresh_callable_async - self._stale_value_behavior = stale_value_behavior - self._prefetch_strategy = prefetch_strategy - self._consecutive_refresh_failures = 0 - self._cached_value = None - self._refresh_lock = threading.Lock() - - def _sync_call(self) -> T: - if self._cache_is_stale(): - log.debug('Refreshing synchronously') - self._refresh_cache() - elif self._should_initiate_cache_prefetch(): - log.debug(f'Prefetching using strategy: {self._prefetch_strategy.__class__.__name__}') - self._prefetch_cache() - return self._cached_value.value() - - async def _async_call(self) -> T: - if self._cache_is_stale(): - log.debug('Refreshing synchronously') - await self._refresh_cache_async() - elif self._should_initiate_cache_prefetch(): - log.debug(f'Prefetching using strategy: {self._prefetch_strategy.__class__.__name__}') - await self._prefetch_cache_async() - return self._cached_value.value() - - def _cache_is_stale(self) -> bool: - if self._cached_value is None: - return True - return int(time.mktime(time.localtime())) >= self._cached_value.stale_time() - - def _should_initiate_cache_prefetch(self) -> bool: - if self._cached_value is None: - return True - return int(time.mktime(time.localtime())) >= self._cached_value.prefetch_time() - - def _prefetch_cache(self): - self._prefetch_strategy.prefetch(self._refresh_cache) - - def _refresh_cache(self): - acquired = self._refresh_lock.acquire(timeout=RefreshCachedSupplier.REFRESH_BLOCKING_MAX_WAIT) - try: - if self._cache_is_stale() or self._should_initiate_cache_prefetch(): - try: - self._cached_value = self._handle_fetched_success(self._refresh_callable()) - except Exception as ex: - self._cached_value = self._handle_fetched_failure(ex) - finally: - if acquired: - self._refresh_lock.release() - - async def _prefetch_cache_async(self): - await self._prefetch_strategy.prefetch_async(self._refresh_cache_async) - - async def _refresh_cache_async(self): - acquired = self._refresh_lock.acquire(timeout=RefreshCachedSupplier.REFRESH_BLOCKING_MAX_WAIT) - try: - if self._cache_is_stale() or self._should_initiate_cache_prefetch(): - try: - self._cached_value = self._handle_fetched_success(await self._refresh_callable_async()) - except Exception as ex: - self._cached_value = self._handle_fetched_failure(ex) - finally: - if acquired: - self._refresh_lock.release() - - def _handle_fetched_success(self, value: RefreshResult[T]) -> RefreshResult[T]: - log.debug(f'Refresh credentials successfully, retrieved value is {value}, cached value is {self._cached_value}') - self._consecutive_refresh_failures = 0 - now = int(time.mktime(time.localtime())) - # 过期时间大于15分钟,不用管 - if now < value.stale_time(): - log.debug( - f'Retrieved value stale time is {datetime.fromtimestamp(value.stale_time())}. Using staleTime of {datetime.fromtimestamp(value.stale_time())}') - return value - # 不足或等于15分钟,但未过期,下次会再次刷新 - if now < value.stale_time() + RefreshCachedSupplier.STALE_TIME: - log.warning( - f'Retrieved value stale time is in the past ({datetime.fromtimestamp(value.stale_time())}). Using staleTime of {datetime.fromtimestamp(now)}') - return RefreshResult(value=value.value(), stale_time=now, prefetch_time=value.prefetch_time()) - - log.warning( - f'Retrieved value expiration time of the credential is in the past ({datetime.fromtimestamp(value.stale_time() + RefreshCachedSupplier.STALE_TIME)}). Trying use the cached value.') - # 已过期,看缓存,缓存若大于15分钟,返回缓存,若小于15分钟,则根据策略判断是立刻重试还是稍后重试 - if self._cached_value is None: - raise CredentialException('No cached value was found.') - elif now < self._cached_value.stale_time(): - log.warning( - f'Cached value staleTime is {datetime.fromtimestamp(self._cached_value.stale_time())}. Using staleTime of {datetime.fromtimestamp(self._cached_value.stale_time())}') - return self._cached_value - elif self._stale_value_behavior == StaleValueBehavior.STRICT: - log.warning( - f'Cached value expiration is in the past ({datetime.fromtimestamp(self._cached_value.stale_time())}). Using expiration of {datetime.fromtimestamp(now + 1)}') - return RefreshResult(value=self._cached_value.value(), stale_time=now + 1, - prefetch_time=self._cached_value.prefetch_time()) - else: # ALLOW - extended_stale_time = now + int((50 * 1000 + random.randint(0, 20 * 1000 + 1)) / 1000) - log.warning( - f'Cached value expiration has been extended to {datetime.fromtimestamp(extended_stale_time)} because the downstream service returned a time in the past: {datetime.fromtimestamp(self._cached_value.stale_time())}') - return RefreshResult(value=self._cached_value.value(), stale_time=extended_stale_time, - prefetch_time=self._cached_value.prefetch_time()) - - def _handle_fetched_failure(self, exception: Exception) -> RefreshResult[T]: - log.warning(f'Refresh credentials failed, cached value is {self._cached_value}, error: {exception}') - if not self._cached_value: - log.exception(exception) - raise exception - now = int(time.mktime(time.localtime())) - if now < self._cached_value.stale_time(): - return self._cached_value - - self._consecutive_refresh_failures += 1 - if self._stale_value_behavior == StaleValueBehavior.STRICT: - log.exception(exception) - raise exception - else: # ALLOW - new_stale_time = int( - _jitter_time(now * 1000, 1000, _max_stale_failure_jitter(self._consecutive_refresh_failures)) / 1000) - log.warning( - f'Cached value expiration has been extended to {datetime.fromtimestamp(new_stale_time)} because calling the downstream service failed (consecutive failures: {self._consecutive_refresh_failures}).') - return RefreshResult(value=self._cached_value.value(), stale_time=new_stale_time, - prefetch_time=self._cached_value.prefetch_time()) diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/rsa_key_pair.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/rsa_key_pair.py deleted file mode 100644 index be40c2f..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/rsa_key_pair.py +++ /dev/null @@ -1,186 +0,0 @@ -import calendar -import json -import time - -from alibabacloud_credentials.provider.refreshable import Credentials, RefreshResult, RefreshCachedSupplier -from alibabacloud_credentials.http import HttpOptions -from Tea.core import TeaCore -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException - - -def _get_stale_time(expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 60 * 60 - return expiration - 15 * 60 - - -def _get_content(file_path: str) -> str: - with open(file_path, mode='r') as file: - content = file.read() - return content - - -class RsaKeyPairCredentialsProvider(ICredentialsProvider): - DEFAULT_DURATION_SECONDS = 3600 - DEFAULT_CONNECT_TIMEOUT = 5000 - DEFAULT_READ_TIMEOUT = 10000 - - def __init__(self, *, - public_key_id: str = None, - private_key_file: str = None, - duration_seconds: int = DEFAULT_DURATION_SECONDS, - sts_region_id: str = None, - sts_endpoint: str = None, - enable_vpc: bool = None, - http_options: HttpOptions = None): - - self._public_key_id = public_key_id - self._private_key_file = private_key_file - self._duration_seconds = duration_seconds - - if self._duration_seconds is None: - self._duration_seconds = self.DEFAULT_DURATION_SECONDS - if self._duration_seconds < 900: - raise ValueError('session duration should be in the range of 900s - max session duration') - if self._public_key_id is None or self._public_key_id == '': - raise ValueError('public_key_id cannot be empty') - if self._private_key_file is None or self._private_key_file == '': - raise ValueError('private_key_file cannot be empty') - self._private_key = _get_content(self._private_key_file) - if self._private_key is None or self._private_key == '': - raise ValueError('private_key cannot be empty') - - if sts_endpoint is not None and sts_endpoint != '': - self._sts_endpoint = sts_endpoint - else: - if enable_vpc is not None: - prefix = 'sts-vpc' if enable_vpc else 'sts' - else: - prefix = 'sts-vpc' if au.environment_enable_vpc.lower() == 'true' else 'sts' - if sts_region_id is not None and sts_region_id != '': - self._sts_endpoint = f'{prefix}.{sts_region_id}.aliyuncs.com' - elif au.environment_sts_region is not None and au.environment_sts_region != '': - self._sts_endpoint = f'{prefix}.{au.environment_sts_region}.aliyuncs.com' - else: - self._sts_endpoint = 'sts.ap-northeast-1.aliyuncs.com' - - self._http_options = http_options if http_options is not None else HttpOptions() - self._runtime_options = { - 'connectTimeout': self._http_options.connect_timeout if self._http_options.connect_timeout is not None else RsaKeyPairCredentialsProvider.DEFAULT_CONNECT_TIMEOUT, - 'readTimeout': self._http_options.read_timeout if self._http_options.read_timeout is not None else RsaKeyPairCredentialsProvider.DEFAULT_READ_TIMEOUT, - 'httpsProxy': self._http_options.proxy - } - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - ) - - def get_credentials(self) -> Credentials: - return self._credentials_cache._sync_call() - - async def get_credentials_async(self) -> Credentials: - return await self._credentials_cache._async_call() - - def _refresh_credentials(self) -> RefreshResult[Credentials]: - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'GenerateSessionAccessKey', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self._duration_seconds), - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0', - 'Timestamp': ph.get_iso_8061_date(), - 'SignatureNonce': ph.get_uuid(), - 'AccessKeyId': self._public_key_id, - } - - string_to_sign = ph.compose_string_to_sign('GET', tea_request.query) - signature = ph.sign_string(string_to_sign, self._private_key + '&') - tea_request.query['Signature'] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = self._sts_endpoint - - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from rsa_key_pair, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'SessionAccessKey' not in dic: - raise CredentialException( - f'error retrieving credentials from rsa_key_pair result: {response.body.decode("utf-8")}') - - cre = dic.get('SessionAccessKey') - if 'SessionAccessKeyId' not in cre or 'SessionAccessKeySecret' not in cre: - raise CredentialException( - f'error retrieving credentials from rsa_key_pair result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('SessionAccessKeyId'), - access_key_secret=cre.get('SessionAccessKeySecret'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - async def _refresh_credentials_async(self) -> RefreshResult[Credentials]: - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'GenerateSessionAccessKey', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self._duration_seconds), - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0', - 'Timestamp': ph.get_iso_8061_date(), - 'SignatureNonce': ph.get_uuid(), - 'AccessKeyId': self._public_key_id, - } - - string_to_sign = ph.compose_string_to_sign('GET', tea_request.query) - signature = ph.sign_string(string_to_sign, self._private_key + '&') - tea_request.query['Signature'] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = self._sts_endpoint - - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from rsa_key_pair, http_code: {response.status_code}, result: {response.body.decode("utf-8")}') - - dic = json.loads(response.body.decode('utf-8')) - if 'SessionAccessKey' not in dic: - raise CredentialException( - f'error retrieving credentials from rsa_key_pair result: {response.body.decode("utf-8")}') - - cre = dic.get('SessionAccessKey') - if 'SessionAccessKeyId' not in cre or 'SessionAccessKeySecret' not in cre: - raise CredentialException( - f'error retrieving credentials from rsa_key_pair result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(cre.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=cre.get('SessionAccessKeyId'), - access_key_secret=cre.get('SessionAccessKeySecret'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - def get_provider_name(self) -> str: - return 'rsa_key_pair' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/static_ak.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/static_ak.py deleted file mode 100644 index 6aa8c31..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/static_ak.py +++ /dev/null @@ -1,32 +0,0 @@ -from alibabacloud_credentials.provider.refreshable import Credentials -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util - - -class StaticAKCredentialsProvider(ICredentialsProvider): - - def __init__(self, *, - access_key_id: str = None, - access_key_secret: str = None): - - self.access_key_id = access_key_id or auth_util.environment_access_key_id - self.access_key_secret = access_key_secret or auth_util.environment_access_key_secret - - if self.access_key_id is None or self.access_key_id == '': - raise ValueError('the access key id is empty') - if self.access_key_secret is None or self.access_key_secret == '': - raise ValueError('the access key secret is empty') - - def get_credentials(self) -> Credentials: - - return Credentials( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - provider_name=self.get_provider_name() - ) - - async def get_credentials_async(self) -> Credentials: - return self.get_credentials() - - def get_provider_name(self) -> str: - return 'static_ak' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/static_sts.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/static_sts.py deleted file mode 100644 index a4b3c50..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/static_sts.py +++ /dev/null @@ -1,37 +0,0 @@ -from alibabacloud_credentials.provider.refreshable import Credentials -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util - - -class StaticSTSCredentialsProvider(ICredentialsProvider): - - def __init__(self, *, - access_key_id: str = None, - access_key_secret: str = None, - security_token: str = None): - - self.access_key_id = access_key_id or auth_util.environment_access_key_id - self.access_key_secret = access_key_secret or auth_util.environment_access_key_secret - self.security_token = security_token or auth_util.environment_security_token - - if self.access_key_id is None or self.access_key_id == '': - raise ValueError('the access key id is empty') - if self.access_key_secret is None or self.access_key_secret == '': - raise ValueError('the access key secret is empty') - if self.security_token is None or self.security_token == '': - raise ValueError('the security token is empty') - - def get_credentials(self) -> Credentials: - - return Credentials( - access_key_id=self.access_key_id, - access_key_secret=self.access_key_secret, - security_token=self.security_token, - provider_name=self.get_provider_name() - ) - - async def get_credentials_async(self) -> Credentials: - return self.get_credentials() - - def get_provider_name(self) -> str: - return 'static_sts' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/provider/uri.py b/venv/Lib/site-packages/alibabacloud_credentials/provider/uri.py deleted file mode 100644 index 819f9f1..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/provider/uri.py +++ /dev/null @@ -1,135 +0,0 @@ -import calendar -import json -import time -from urllib.parse import urlparse, parse_qs - -from alibabacloud_credentials.provider.refreshable import Credentials, RefreshResult, RefreshCachedSupplier -from alibabacloud_credentials.http import HttpOptions -from Tea.core import TeaCore -from alibabacloud_credentials_api import ICredentialsProvider -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.utils import parameter_helper as ph -from alibabacloud_credentials.exceptions import CredentialException - - -def _get_stale_time(expiration: int) -> int: - if expiration < 0: - return int(time.mktime(time.localtime())) + 60 * 60 - return expiration - 15 * 60 - - -class URLCredentialsProvider(ICredentialsProvider): - DEFAULT_CONNECT_TIMEOUT = 5000 - DEFAULT_READ_TIMEOUT = 10000 - - def __init__(self, *, - uri: str = None, - protocol: str = 'http', - http_options: HttpOptions = None): - - self._uri = uri or au.environment_credentials_uri - if self._uri is None or self._uri == '': - raise ValueError('uri or environment variable ALIBABA_CLOUD_CREDENTIALS_URI cannot be empty') - self._protocol = protocol - - self._http_options = http_options if http_options is not None else HttpOptions() - self._runtime_options = { - 'connectTimeout': self._http_options.connect_timeout if self._http_options.connect_timeout is not None else URLCredentialsProvider.DEFAULT_CONNECT_TIMEOUT, - 'readTimeout': self._http_options.read_timeout if self._http_options.read_timeout is not None else URLCredentialsProvider.DEFAULT_READ_TIMEOUT, - 'httpsProxy': self._http_options.proxy - } - self._credentials_cache = RefreshCachedSupplier( - refresh_callable=self._refresh_credentials, - refresh_callable_async=self._refresh_credentials_async, - ) - - def get_credentials(self) -> Credentials: - return self._credentials_cache._sync_call() - - async def get_credentials_async(self) -> Credentials: - return await self._credentials_cache._async_call() - - def _refresh_credentials(self) -> RefreshResult[Credentials]: - r = urlparse(self._uri) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme or self._protocol or 'http' - tea_request.method = 'GET' - tea_request.pathname = r.path - for key, values in parse_qs(r.query).items(): - for value in values: - tea_request.query[key] = value - - response = TeaCore.do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from {self._uri}, http_code={str(response.status_code)}, result: {response.body.decode("utf-8")}') - - body = response.body.decode('utf-8') - - dic = json.loads(body) - content_code = dic.get('Code') - - if content_code != "Success" or 'AccessKeyId' not in dic or 'AccessKeySecret' not in dic or 'SecurityToken' not in dic or 'Expiration' not in dic: - raise CredentialException( - f'error retrieving credentials from {self._uri} result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(dic.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=dic.get('AccessKeyId'), - access_key_secret=dic.get('AccessKeySecret'), - security_token=dic.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - async def _refresh_credentials_async(self) -> RefreshResult[Credentials]: - r = urlparse(self._uri) - tea_request = ph.get_new_request() - tea_request.headers['host'] = r.hostname - tea_request.port = r.port - tea_request.protocol = r.scheme or self._protocol or 'http' - tea_request.method = 'GET' - tea_request.pathname = r.path - for key, values in parse_qs(r.query).items(): - for value in values: - tea_request.query[key] = value - - response = await TeaCore.async_do_action(tea_request, self._runtime_options) - - if response.status_code != 200: - raise CredentialException( - f'error refreshing credentials from {self._uri}, http_code={str(response.status_code)}, result: {response.body.decode("utf-8")}') - - body = response.body.decode('utf-8') - - dic = json.loads(body) - content_code = dic.get('Code') - - if content_code != "Success" or 'AccessKeyId' not in dic or 'AccessKeySecret' not in dic or 'SecurityToken' not in dic or 'Expiration' not in dic: - raise CredentialException( - f'error retrieving credentials from {self._uri} result: {response.body.decode("utf-8")}') - - # 先转换为时间数组 - time_array = time.strptime(dic.get('Expiration'), '%Y-%m-%dT%H:%M:%SZ') - # 转换为时间戳 - expiration = calendar.timegm(time_array) - credentials = Credentials( - access_key_id=dic.get('AccessKeyId'), - access_key_secret=dic.get('AccessKeySecret'), - security_token=dic.get('SecurityToken'), - expiration=expiration, - provider_name=self.get_provider_name() - ) - return RefreshResult(value=credentials, - stale_time=_get_stale_time(expiration)) - - def get_provider_name(self) -> str: - return 'credential_uri' diff --git a/venv/Lib/site-packages/alibabacloud_credentials/providers.py b/venv/Lib/site-packages/alibabacloud_credentials/providers.py deleted file mode 100644 index 9bfa16e..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/providers.py +++ /dev/null @@ -1,691 +0,0 @@ -import calendar -import configparser -import json -import os -import time - -import requests -from Tea.core import TeaCore - -from alibabacloud_credentials import credentials -from alibabacloud_credentials.exceptions import CredentialException -from alibabacloud_credentials.models import Config -from alibabacloud_credentials.utils import auth_constant as ac -from alibabacloud_credentials.utils import auth_util as au -from alibabacloud_credentials.utils import parameter_helper as ph - - -class AlibabaCloudCredentialsProvider: - """BaseProvider class""" - duration_seconds = 3600 - timeout = 3000 - - def __init__(self, config=None): - if isinstance(config, Config): - self.type = config.type - self.access_key_id = config.access_key_id - self.access_key_secret = config.access_key_secret - self.role_arn = config.role_arn - self.role_session_name = config.role_session_name - self.public_key_id = config.public_key_id - self.role_name = config.role_name - self.disable_imds_v1 = config.disable_imds_v1 - self.oidc_provider_arn = config.oidc_provider_arn - self.oidc_token_file_path = config.oidc_token_file_path - self.private_key_file = config.private_key_file - self.bearer_token = config.bearer_token - self.security_token = config.security_token - self.host = config.host - self.timeout = config.timeout or AlibabaCloudCredentialsProvider.timeout - self.connect_timeout = config.connect_timeout or AlibabaCloudCredentialsProvider.timeout - self.proxy = config.proxy - self.sts_endpoint = config.sts_endpoint - - def _set_arg(self, key, value): - if value is not None: - setattr(self, key, value) - - val = getattr(self, key, None) - if val is None: - setattr(self, key, None) - - def _verify_empty_args(self, *args, config): - if None in args and config is None: - raise CredentialException( - '"%s" needs to receive a "model.Config" object or other necessary args' % self.__class__ - ) - - def get_credentials(self): - raise NotImplementedError('get_credentials() must be overridden') - - -class DefaultCredentialsProvider(AlibabaCloudCredentialsProvider): - def __init__(self): - super().__init__() - self.user_configuration_providers = [ - EnvironmentVariableCredentialsProvider() - ] - if au.enable_oidc_credential: - self.user_configuration_providers.append(OIDCRoleArnCredentialProvider( - role_session_name=au.environment_role_session_name, - role_arn=au.environment_role_arn, - oidc_provider_arn=au.environment_oidc_provider_arn, - oidc_token_file_path=au.environment_oidc_token_file - )) - - self.user_configuration_providers.append(ProfileCredentialsProvider()) - role_name = au.environment_ECSMeta_data - - if role_name is not None: - self.user_configuration_providers.append(EcsRamRoleCredentialProvider(role_name)) - self.user_configuration_providers.append(CredentialsUriProvider()) - - def get_credentials(self): - for provider in self.user_configuration_providers: - credential = provider.get_credentials() - if credential is not None: - return credential - raise CredentialException("not found credentials") - - def add_credentials_provider(self, p): - self.user_configuration_providers.append(p) - - def remove_credentials_provider(self, p): - self.user_configuration_providers.remove(p) - - def contains_credentials_provider(self, p): - return self.user_configuration_providers.__contains__(p) - - def clear_credentials_provider(self): - self.user_configuration_providers.clear() - - -class EcsRamRoleCredentialProvider(AlibabaCloudCredentialsProvider): - """EcsRamRoleCredentialProvider""" - default_metadata_token_duration = 21600 - - def __init__(self, role_name=None, config=None): - self._verify_empty_args(role_name, config=config) - super().__init__(config) - self.__url_in_ecs_metadata = "/latest/meta-data/ram/security-credentials/" - self.__url_in_ecs_metadata_token = "/latest/api/token" - self.__ecs_metadata_fetch_error_msg = "Failed to get RAM session credentials from ECS metadata service." - self.__ecs_metadata_token_fetch_error_msg = "Failed to get token from ECS Metadata Service." - self.__metadata_service_host = "100.100.100.200" - self._set_arg('role_name', role_name) - self.disable_imds_v1 = au.environment_imds_v1_disabled and au.environment_imds_v1_disabled.lower() == 'true' - - if isinstance(config, Config): - self.disable_imds_v1 = config.disable_imds_v1 is not None and config.disable_imds_v1 == True - - def _get_role_name(self, url=None): - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = self._get_metadata_token(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata - response = TeaCore.do_action(tea_request) - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + " HttpCode=" + str(response.status_code)) - self.role_name = response.body.decode('utf-8') - - async def _get_role_name_async(self, url=None): - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = await self._get_metadata_token_async(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata - response = await TeaCore.async_do_action(tea_request) - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + " HttpCode=" + str(response.status_code)) - self.role_name = response.body.decode('utf-8') - - def _get_metadata_token(self, url=None): - tea_request = ph.get_new_request() - tea_request.method = 'PUT' - tea_request.headers['host'] = url if url else self.__metadata_service_host - tea_request.headers['X-aliyun-ecs-metadata-token-ttl-seconds'] = str(self.default_metadata_token_duration) - if not url: - tea_request.pathname = self.__url_in_ecs_metadata_token - try: - response = TeaCore.do_action(tea_request) - if response.status_code != 200: - raise CredentialException( - self.__ecs_metadata_token_fetch_error_msg + " HttpCode=" + str(response.status_code)) - return response.body.decode('utf-8') - except Exception as e: - if self.disable_imds_v1: - raise e - return None - - async def _get_metadata_token_async(self, url=None): - tea_request = ph.get_new_request() - tea_request.method = 'PUT' - tea_request.headers['host'] = url if url else self.__metadata_service_host - tea_request.headers['X-aliyun-ecs-metadata-token-ttl-seconds'] = str(self.default_metadata_token_duration) - if not url: - tea_request.pathname = self.__url_in_ecs_metadata_token - try: - response = await TeaCore.async_do_action(tea_request) - if response.status_code != 200: - raise CredentialException( - self.__ecs_metadata_token_fetch_error_msg + " HttpCode=" + str(response.status_code)) - return response.body.decode('utf-8') - except Exception as e: - if self.disable_imds_v1: - raise e - return None - - def _create_credential(self, url=None): - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = self._get_metadata_token(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata + self.role_name - # request - response = TeaCore.do_action(tea_request) - - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + " HttpCode=" + str(response.status_code)) - - dic = json.loads(response.body.decode('utf-8')) - content_code = dic.get('Code') - content_access_key_id = dic.get('AccessKeyId') - content_access_key_secret = dic.get('AccessKeySecret') - content_security_token = dic.get('SecurityToken') - content_expiration = dic.get('Expiration') - - if content_code != "Success": - raise CredentialException(self.__ecs_metadata_fetch_error_msg) - - # 先转换为时间数组 - time_array = time.strptime(content_expiration, "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - time_stamp = calendar.timegm(time_array) - return credentials.EcsRamRoleCredential(content_access_key_id, content_access_key_secret, - content_security_token, time_stamp, self) - - def get_credentials(self): - if self.role_name == "": - self._get_role_name() - return self._create_credential() - - async def _create_credential_async(self, url=None): - tea_request = ph.get_new_request() - tea_request.headers['host'] = url if url else self.__metadata_service_host - metadata_token = await self._get_metadata_token_async(url) - if metadata_token is not None: - tea_request.headers['X-aliyun-ecs-metadata-token'] = metadata_token - if not url: - tea_request.pathname = self.__url_in_ecs_metadata + self.role_name - - # request - response = await TeaCore.async_do_action(tea_request) - - if response.status_code != 200: - raise CredentialException(self.__ecs_metadata_fetch_error_msg + " HttpCode=" + str(response.status_code)) - - dic = json.loads(response.body.decode('utf-8')) - content_code = dic.get('Code') - content_access_key_id = dic.get('AccessKeyId') - content_access_key_secret = dic.get('AccessKeySecret') - content_security_token = dic.get('SecurityToken') - content_expiration = dic.get('Expiration') - - if content_code != "Success": - raise CredentialException(self.__ecs_metadata_fetch_error_msg) - - # 先转换为时间数组 - time_array = time.strptime(content_expiration, "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - time_stamp = calendar.timegm(time_array) - return credentials.EcsRamRoleCredential(content_access_key_id, content_access_key_secret, - content_security_token, time_stamp, self) - - async def get_credentials_async(self): - if self.role_name == "": - await self._get_role_name_async() - return await self._create_credential_async() - - -class RamRoleArnCredentialProvider(AlibabaCloudCredentialsProvider): - """RamRoleArnCredentialProvider""" - - def __init__(self, access_key_id=None, access_key_secret=None, role_session_name=None, role_arn=None, - region_id=None, - policy=None, config=None): - self._verify_empty_args(access_key_id, access_key_secret, config=config) - super().__init__(config) - self._set_arg('role_arn', role_arn) - self._set_arg('access_key_id', access_key_id) - self._set_arg('access_key_secret', access_key_secret) - self._set_arg('region_id', region_id) - self._set_arg('role_session_name', role_session_name) - self._set_arg('policy', policy) - if region_id is None and au.environment_sts_region is not None: - self._set_arg('region_id', au.environment_sts_region) - if self.region_id is not None: - self._set_arg('sts_endpoint', f'sts.{self.region_id}.aliyuncs.com') - else: - self._set_arg('sts_endpoint', - 'sts.aliyuncs.com' if config is None or config.sts_endpoint is None else config.sts_endpoint) - - def get_credentials(self): - return self._create_credentials() - - def _create_credentials(self): - # 获取credential 先实现签名用工具类 - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRole', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self.duration_seconds), - 'RoleArn': self.role_arn, - 'AccessKeyId': self.access_key_id, - 'RoleSessionName': self.role_session_name, - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0' - } - tea_request.query["Timestamp"] = ph.get_iso_8061_date() - tea_request.query["SignatureNonce"] = ph.get_uuid() - if self.policy is not None: - tea_request.query["Policy"] = self.policy - string_to_sign = ph.compose_string_to_sign("GET", tea_request.query) - signature = ph.sign_string(string_to_sign, self.access_key_secret + "&") - tea_request.query["Signature"] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = self.sts_endpoint - # request - response = TeaCore.do_action(tea_request) - if response.status_code == 200: - dic = json.loads(response.body.decode('utf-8')) - if "Credentials" in dic: - cre = dic.get("Credentials") - # 先转换为时间数组 - time_array = time.strptime(cre.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - expiration = calendar.timegm(time_array) - return credentials.RamRoleArnCredential(cre.get("AccessKeyId"), cre.get("AccessKeySecret"), - cre.get("SecurityToken"), expiration, self) - raise CredentialException(response.body.decode('utf-8')) - - async def get_credentials_async(self): - return await self._create_credentials_async() - - async def _create_credentials_async(self): - # 获取credential 先实现签名用工具类 - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRole', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self.duration_seconds), - 'RoleArn': self.role_arn, - 'AccessKeyId': self.access_key_id, - 'RoleSessionName': self.role_session_name, - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0' - } - tea_request.query["Timestamp"] = ph.get_iso_8061_date() - tea_request.query["SignatureNonce"] = ph.get_uuid() - if self.policy is not None: - tea_request.query["Policy"] = self.policy - string_to_sign = ph.compose_string_to_sign("GET", tea_request.query) - signature = ph.sign_string(string_to_sign, self.access_key_secret + "&") - tea_request.query["Signature"] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = self.sts_endpoint - # request - response = await TeaCore.async_do_action(tea_request) - if response.status_code == 200: - dic = json.loads(response.body.decode('utf-8')) - if "Credentials" in dic: - cre = dic.get("Credentials") - # 先转换为时间数组 - time_array = time.strptime(cre.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - expiration = calendar.timegm(time_array) - return credentials.RamRoleArnCredential(cre.get("AccessKeyId"), cre.get("AccessKeySecret"), - cre.get("SecurityToken"), expiration, self) - raise CredentialException(response.body.decode('utf-8')) - - -class OIDCRoleArnCredentialProvider(AlibabaCloudCredentialsProvider): - """OIDCRoleArnCredentialProvider""" - - def __init__(self, role_session_name=None, role_arn=None, - oidc_provider_arn=None, - oidc_token_file_path=None, - region_id=None, - policy=None, config=None): - self._verify_empty_args(role_arn, oidc_provider_arn, oidc_token_file_path, config=config) - super().__init__(config) - self._set_arg('role_arn', role_arn) - self._set_arg('oidc_provider_arn', oidc_provider_arn) - if oidc_token_file_path is not None: - self._set_arg('oidc_token_file_path', oidc_token_file_path) - elif config.oidc_token_file_path is not None: - self._set_arg('oidc_token_file_path', oidc_token_file_path) - elif au.environment_oidc_token_file is not None: - self._set_arg('oidc_token_file_path', au.environment_oidc_token_file) - else: - raise CredentialException( - 'The oidc_token_file_path does not exist and env ALIBABA_CLOUD_OIDC_TOKEN_FILE is none.') - self._set_arg('region_id', region_id) - self._set_arg('role_session_name', role_session_name) - self._set_arg('policy', policy) - if region_id is None and au.environment_sts_region is not None: - self._set_arg('region_id', au.environment_sts_region) - if self.region_id is not None: - self._set_arg('sts_endpoint', f'sts.{self.region_id}.aliyuncs.com') - else: - self._set_arg('sts_endpoint', - 'sts.aliyuncs.com' if config is None or config.sts_endpoint is None else config.sts_endpoint) - - def get_credentials(self): - return self._create_credentials() - - def _create_credentials(self): - # 获取credential 先实现签名用工具类 - oidc_token = au.get_private_key(self.oidc_token_file_path) - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRoleWithOIDC', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self.duration_seconds), - 'RoleArn': self.role_arn, - 'OIDCProviderArn': self.oidc_provider_arn, - 'OIDCToken': oidc_token, - 'RoleSessionName': self.role_session_name or 'defaultSessionName' - } - tea_request.query["Timestamp"] = ph.get_iso_8061_date() - tea_request.query["SignatureNonce"] = ph.get_uuid() - if self.policy is not None: - tea_request.query["Policy"] = self.policy - tea_request.protocol = 'https' - tea_request.headers['host'] = self.sts_endpoint - # request - response = TeaCore.do_action(tea_request) - if response.status_code == 200: - dic = json.loads(response.body.decode('utf-8')) - if "Credentials" in dic: - cre = dic.get("Credentials") - # 先转换为时间数组 - time_array = time.strptime(cre.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - expiration = calendar.timegm(time_array) - return credentials.OIDCRoleArnCredential(cre.get("AccessKeyId"), cre.get("AccessKeySecret"), - cre.get("SecurityToken"), expiration, self) - raise CredentialException(response.body.decode('utf-8')) - - async def get_credentials_async(self): - return await self._create_credentials_async() - - async def _create_credentials_async(self): - # 获取credential 先实现签名用工具类 - oidc_token = au.get_private_key(self.oidc_token_file_path) - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'AssumeRoleWithOIDC', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self.duration_seconds), - 'RoleArn': self.role_arn, - 'OIDCProviderArn': self.oidc_provider_arn, - 'OIDCToken': oidc_token, - 'RoleSessionName': self.role_session_name or 'defaultSessionName' - } - tea_request.query["Timestamp"] = ph.get_iso_8061_date() - tea_request.query["SignatureNonce"] = ph.get_uuid() - if self.policy is not None: - tea_request.query["Policy"] = self.policy - tea_request.protocol = 'https' - tea_request.headers['host'] = self.sts_endpoint - # request - response = await TeaCore.async_do_action(tea_request) - if response.status_code == 200: - dic = json.loads(response.body.decode('utf-8')) - if "Credentials" in dic: - cre = dic.get("Credentials") - # 先转换为时间数组 - time_array = time.strptime(cre.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ") - # 转换为时间戳 - expiration = calendar.timegm(time_array) - return credentials.OIDCRoleArnCredential(cre.get("AccessKeyId"), cre.get("AccessKeySecret"), - cre.get("SecurityToken"), expiration, self) - raise CredentialException(response.body.decode('utf-8')) - - -class RsaKeyPairCredentialProvider(AlibabaCloudCredentialsProvider): - - def __init__(self, access_key_id=None, access_key_secret=None, region_id=None, config=None): - self._verify_empty_args(access_key_id, access_key_secret, config=config) - super().__init__(config) - self._set_arg('access_key_id', access_key_id) - self._set_arg('access_key_secret', access_key_secret) - self._set_arg('region_id', region_id) - - async def get_credentials_async(self): - return await self._create_credential_async() - - async def _create_credential_async(self, turl=None): - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'GenerateSessionAccessKey', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self.duration_seconds), - 'AccessKeyId': self.access_key_id, - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0' - } - tea_request.query["Timestamp"] = ph.get_iso_8061_date() - tea_request.query["SignatureNonce"] = ph.get_uuid() - - str_to_sign = ph.compose_string_to_sign('GET', tea_request.query) - signature = ph.sign_string(str_to_sign, self.access_key_id + '&') - tea_request.query['Signature'] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = turl if turl else 'sts.aliyuncs.com' - # request - response = await TeaCore.async_do_action(tea_request) - if response.status_code == 200: - dic = json.loads(response.body.decode('utf-8')) - if "SessionAccessKey" in dic: - cre = dic.get("SessionAccessKey") - time_array = time.strptime(cre.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ") - expiration = calendar.timegm(time_array) - return credentials.RsaKeyPairCredential(cre.get("SessionAccessKeyId"), - cre.get("SessionAccessKeySecret"), - expiration, self) - raise CredentialException(response.body.decode('utf-8')) - - def get_credentials(self): - return self._create_credential() - - def _create_credential(self, turl=None): - tea_request = ph.get_new_request() - tea_request.query = { - 'Action': 'GenerateSessionAccessKey', - 'Format': 'JSON', - 'Version': '2015-04-01', - 'DurationSeconds': str(self.duration_seconds), - 'AccessKeyId': self.access_key_id, - 'SignatureMethod': 'HMAC-SHA1', - 'SignatureVersion': '1.0' - } - tea_request.query["Timestamp"] = ph.get_iso_8061_date() - tea_request.query["SignatureNonce"] = ph.get_uuid() - - str_to_sign = ph.compose_string_to_sign('GET', tea_request.query) - signature = ph.sign_string(str_to_sign, self.access_key_id + '&') - tea_request.query['Signature'] = signature - tea_request.protocol = 'https' - tea_request.headers['host'] = turl if turl else 'sts.aliyuncs.com' - # request - response = TeaCore.do_action(tea_request) - if response.status_code == 200: - dic = json.loads(response.body.decode('utf-8')) - if "SessionAccessKey" in dic: - cre = dic.get("SessionAccessKey") - time_array = time.strptime(cre.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ") - expiration = calendar.timegm(time_array) - return credentials.RsaKeyPairCredential(cre.get("SessionAccessKeyId"), - cre.get("SessionAccessKeySecret"), - expiration, self) - raise CredentialException(response.body.decode('utf-8')) - - -class ProfileCredentialsProvider(AlibabaCloudCredentialsProvider): - def __init__(self, path=None): - super().__init__() - self._set_arg('file_path', path) - - def parse_ini(self): - file_path = self.file_path if self.file_path else au.environment_credentials_file - if file_path is None: - if not ac.HOME: - return - if os.path.exists(os.path.join(ac.HOME, "/.alibabacloud/credentials.ini")): - # Support '/.alibabacloud/credentials.ini' is due to historical mistakes. - # Please try to use '~/.alibabacloud/credentials.ini'. - file_path = os.path.join(ac.HOME, "/.alibabacloud/credentials.ini") - elif os.path.exists(os.path.join(ac.HOME, ".alibabacloud/credentials.ini")): - file_path = os.path.join(ac.HOME, ".alibabacloud/credentials.ini") - if file_path is None: - return - elif len(file_path) == 0: - raise CredentialException("The specified credentials file is empty") - - # loads ini - conf = configparser.ConfigParser() - conf.read(file_path, encoding='utf-8') - ini_map = dict(conf._sections) - for k in dict(conf._sections): - option = dict(ini_map[k]) - for key, value in dict(ini_map[k]).items(): - if '#' in value: - option[key] = value.split('#')[0].strip() - else: - option[key] = value.strip() - ini_map[k] = option - client_config = ini_map.get(au.client_type) - return client_config - - def get_credentials(self): - client_config = self.parse_ini() - if client_config is None: - return - return self._create_credential(client_config) - - def _create_credential(self, config): - config_type = config.get(ac.INI_TYPE) - if not config_type: - raise CredentialException("The configured client type is empty") - elif ac.INI_TYPE_ARN == config_type: - return self._get_sts_assume_role_session_provider(config).get_credentials() - elif ac.INI_TYPE_OIDC == config_type: - return self._get_sts_oidc_role_session_provider(config).get_credentials() - elif ac.INI_TYPE_KEY_PAIR == config_type: - return self._get_sts_get_session_access_key_provider(config).get_credentials() - elif ac.INI_TYPE_RAM == config_type: - return self._get_instance_profile_provider(config).get_credentials() - - access_key_id = config.get(ac.INI_ACCESS_KEY_ID) - access_key_secret = config.get(ac.INI_ACCESS_KEY_IDSECRET) - if not access_key_id or not access_key_secret: - return - return credentials.AccessKeyCredential(access_key_id, access_key_secret) - - @staticmethod - def _get_sts_assume_role_session_provider(config): - access_key_id = config.get(ac.INI_ACCESS_KEY_ID) - access_key_secret = config.get(ac.INI_ACCESS_KEY_IDSECRET) - role_session_name = config.get(ac.INI_ROLE_SESSION_NAME) - role_arn = config.get(ac.INI_ROLE_ARN) - region_id = config.get(ac.DEFAULT_REGION) - policy = config.get(ac.INI_POLICY) - - if not access_key_id or not access_key_secret: - raise CredentialException("The configured access_key_id or access_key_secret is empty") - if not role_session_name or not role_arn: - raise CredentialException("The configured role_session_name or role_arn is empty") - return RamRoleArnCredentialProvider( - access_key_id, access_key_secret, role_session_name, role_arn, region_id, policy - ) - - @staticmethod - def _get_sts_oidc_role_session_provider(config): - role_session_name = config.get(ac.INI_ROLE_SESSION_NAME) - role_arn = config.get(ac.INI_ROLE_ARN) - oidc_provider_arn = config.get(ac.INI_OIDC_PROVIDER_ARN) - oidc_token_file_path = config.get(ac.INI_OIDC_TOKEN_FILE_PATH) - region_id = config.get(ac.DEFAULT_REGION) - policy = config.get(ac.INI_POLICY) - - if not role_arn: - raise CredentialException("The configured role_arn is empty") - if not oidc_provider_arn: - raise CredentialException("The configured oidc_provider_arn is empty") - return OIDCRoleArnCredentialProvider( - role_session_name, role_arn, oidc_provider_arn, oidc_token_file_path, - region_id, policy - ) - - @staticmethod - def _get_sts_get_session_access_key_provider(config): - public_key_id = config.get(ac.INI_PUBLIC_KEY_ID) - private_key_file = config.get(ac.INI_PRIVATE_KEY_FILE) - if not private_key_file: - raise CredentialException("The configured private_key_file is empty") - private_key = au.get_private_key(private_key_file) - if not public_key_id or not private_key: - raise CredentialException("The configured public_key_id or private_key_file content is empty") - - return RsaKeyPairCredentialProvider(public_key_id, private_key) - - @staticmethod - def _get_instance_profile_provider(config): - role_name = config.get(ac.INI_ROLE_NAME) - if not role_name: - raise CredentialException("The configured role_name is empty") - return EcsRamRoleCredentialProvider(role_name) - - -class EnvironmentVariableCredentialsProvider(AlibabaCloudCredentialsProvider): - def get_credentials(self): - if 'default' != au.client_type: - return - access_key_id = au.environment_access_key_id - access_key_secret = au.environment_access_key_secret - security_token = au.environment_security_token - - if access_key_id is None or access_key_secret is None: - return - - if len(access_key_id) == 0: - raise CredentialException("Environment variable accessKeyId cannot be empty") - - if len(access_key_secret) == 0: - raise CredentialException("Environment variable accessKeySecret cannot be empty") - - if security_token is not None and len(security_token) > 0: - return credentials.StsCredential(access_key_id, access_key_secret, security_token) - - return credentials.AccessKeyCredential(access_key_id, access_key_secret) - - -class CredentialsUriProvider(AlibabaCloudCredentialsProvider): - def get_credentials(self): - credentials_uri = os.environ.get('ALIBABA_CLOUD_CREDENTIALS_URI') - if credentials_uri is None: - return None - return credentials.CredentialsURICredential(credentials_uri) diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/__init__.py b/venv/Lib/site-packages/alibabacloud_credentials/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 33dfdcc..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/auth_constant.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/auth_constant.cpython-312.pyc deleted file mode 100644 index d830efd..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/auth_constant.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/auth_util.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/auth_util.cpython-312.pyc deleted file mode 100644 index 29b1282..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/auth_util.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/parameter_helper.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/parameter_helper.cpython-312.pyc deleted file mode 100644 index 9b15bf9..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials/utils/__pycache__/parameter_helper.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/auth_constant.py b/venv/Lib/site-packages/alibabacloud_credentials/utils/auth_constant.py deleted file mode 100644 index 0f92ef0..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/utils/auth_constant.py +++ /dev/null @@ -1,31 +0,0 @@ -from alibabacloud_credentials.utils import auth_util - -HOME = auth_util.get_home() - -INI_ACCESS_KEY_ID = "access_key_id" -INI_ACCESS_KEY_IDSECRET = "access_key_secret" -INI_TYPE = "type" -INI_TYPE_RAM = "ecs_ram_role" -INI_TYPE_ARN = "ram_role_arn" -INI_TYPE_OIDC = "oidc_role_arn" -INI_TYPE_KEY_PAIR = "rsa_key_pair" -INI_PUBLIC_KEY_ID = "public_key_id" -INI_PRIVATE_KEY_FILE = "private_key_file" -INI_PRIVATE_KEY = "private_key" -INI_ROLE_NAME = "role_name" -INI_ROLE_SESSION_NAME = "role_session_name" -INI_ROLE_ARN = "role_arn" -INI_POLICY = "policy" -INI_OIDC_PROVIDER_ARN = "oidc_provider_arn" -INI_OIDC_TOKEN_FILE_PATH = "oidc_token_file_path" -TSC_VALID_TIME_SECONDS = 3600 -DEFAULT_REGION = "region_id" -INI_ENABLE = "enable" -ACCESS_KEY = "access_key" -STS = "sts" -ECS_RAM_ROLE = "ecs_ram_role" -RAM_ROLE_ARN = "ram_role_arn" -OIDC_ROLE_ARN = "oidc_role_arn" -CREDENTIALS_URI = "credentials_uri" -RSA_KEY_PAIR = "rsa_key_pair" -BEARER = "bearer" diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/auth_util.py b/venv/Lib/site-packages/alibabacloud_credentials/utils/auth_util.py deleted file mode 100644 index 5ec9056..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/utils/auth_util.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -import platform -import re - -client_type = os.environ.get('ALIBABA_CLOUD_PROFILE', 'default') - -environment_access_key_id = os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_ID') -environment_access_key_secret = os.environ.get('ALIBABA_CLOUD_ACCESS_KEY_SECRET') -environment_security_token = os.environ.get('ALIBABA_CLOUD_SECURITY_TOKEN') - -environment_ECSMeta_data = os.environ.get('ALIBABA_CLOUD_ECS_METADATA') -environment_ecs_metadata = os.environ.get('ALIBABA_CLOUD_ECS_METADATA') -environment_imds_v1_disabled = os.environ.get('ALIBABA_CLOUD_IMDSV1_DISABLED', 'false') -environment_ecs_metadata_disabled = os.environ.get('ALIBABA_CLOUD_ECS_METADATA_DISABLED', 'false') - -environment_credentials_file = os.environ.get('ALIBABA_CLOUD_CREDENTIALS_FILE') -environment_profile_name = os.environ.get('ALIBABA_CLOUD_PROFILE') -environment_oidc_token_file = os.environ.get('ALIBABA_CLOUD_OIDC_TOKEN_FILE') -environment_role_arn = os.environ.get('ALIBABA_CLOUD_ROLE_ARN') -environment_oidc_provider_arn = os.environ.get('ALIBABA_CLOUD_OIDC_PROVIDER_ARN') -environment_role_session_name = os.environ.get('ALIBABA_CLOUD_ROLE_SESSION_NAME') - -environment_credentials_uri = os.environ.get('ALIBABA_CLOUD_CREDENTIALS_URI') - -environment_cli_profile_disabled = os.environ.get('ALIBABA_CLOUD_CLI_PROFILE_DISABLED', 'false') - -environment_sts_region = os.environ.get('ALIBABA_CLOUD_STS_REGION') -environment_enable_vpc = os.environ.get('ALIBABA_CLOUD_VPC_ENDPOINT_ENABLED', 'false') - -enable_oidc_credential = environment_oidc_token_file is not None and environment_oidc_token_file != '' \ - and environment_role_arn is not None and environment_role_arn != '' \ - and environment_oidc_provider_arn is not None and environment_oidc_provider_arn != '' -private_key = None - - -def get_private_key(file_path): - with open(file_path, encoding='utf-8') as f: - key = f.read() - return key - - -def get_home(): - if platform.system() == 'Windows': - home = os.getenv('HOME') - home_path = os.getenv('HOMEPATH') - home_drive = os.getenv('HOMEDRIVE') - if home: - return home - elif home_path: - has_drive_in_home_path = bool(re.match(r'^[A-Za-z]:', home_path)) - return home_path if has_drive_in_home_path else os.path.join(home_drive or '', home_path) - else: - return os.path.expanduser("~") - else: - return os.getenv('HOME') or os.getenv('HOMEPATH') or os.path.expanduser("~") diff --git a/venv/Lib/site-packages/alibabacloud_credentials/utils/parameter_helper.py b/venv/Lib/site-packages/alibabacloud_credentials/utils/parameter_helper.py deleted file mode 100644 index 4aa5f50..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials/utils/parameter_helper.py +++ /dev/null @@ -1,62 +0,0 @@ -from urllib.parse import quote_plus, urlencode - -import hmac -import hashlib -import base64 -import socket -import uuid -import datetime -import platform - -import alibabacloud_credentials -from Tea.request import TeaRequest - -TIME_ZONE = "UTC" -FORMAT_ISO_8601 = "yyyy-MM-dd'T'HH:mm:ss'Z'" -FORMAT_RFC_2616 = "%a, %d %b %Y %H:%M:%S GMT" -SEPARATOR = "&" -ENCODING = "UTF-8" -ALGORITHM_NAME = "HmacSHA1" - - -def get_new_request(): - request = TeaRequest() - request.headers['user-agent'] = f'AlibabaCloud ({platform.system()}; {platform.machine()}) ' \ - f'Python/{platform.python_version()} ' \ - f'Credentials/{alibabacloud_credentials.__version__} ' \ - f'TeaDSL/1' - return request - - -def get_uuid(): - name = socket.gethostname() + str(uuid.uuid1()) - namespace = uuid.NAMESPACE_URL - return str(uuid.uuid5(namespace, name)) - - -def get_iso_8061_date(): - return datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") - - -def compose_string_to_sign(method, queries): - sorted_key = sorted(list(queries.keys())) - canonicalized_query_string = '' - for key in sorted_key: - canonicalized_query_string += '&%s=%s' % ( - quote_plus(key), quote_plus(queries.get(key)) - ) - - string_to_sign = method + SEPARATOR + quote_plus('/') + SEPARATOR + quote_plus(canonicalized_query_string[1:]) - return string_to_sign - - -def sign_string(sign, secret): - hash_val = hmac.new(secret.encode(ENCODING), sign.encode(ENCODING), hashlib.sha1).digest() - signature = base64.encodebytes(hash_val).decode(ENCODING) - return signature.rstrip('\n') - - -def compose_url(endpoint, queries, protocol): - url = protocol + "://" + endpoint + "/?" - url += urlencode(queries) - return url diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/METADATA deleted file mode 100644 index 311c0de..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/METADATA +++ /dev/null @@ -1,34 +0,0 @@ -Metadata-Version: 2.4 -Name: alibabacloud-credentials-api -Version: 1.0.0 -Summary: Alibaba Cloud Gateway SPI SDK Library for Python -Home-page: https://github.com/aliyun/alibabacloud-gateway -Author: Alibaba Cloud SDK -Author-email: sdk-team@alibabacloud.com -License: Apache License 2.0 -Keywords: alibabacloud,credentials,api -Platform: any -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description-content-type -Dynamic: home-page -Dynamic: keywords -Dynamic: license -Dynamic: platform -Dynamic: requires-python -Dynamic: summary diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/RECORD deleted file mode 100644 index f0efdc3..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -alibabacloud_credentials_api-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_credentials_api-1.0.0.dist-info/METADATA,sha256=aSfvACGsABp4euDCwTHARD8tSRzJqF2zs76TVwv2g34,1224 -alibabacloud_credentials_api-1.0.0.dist-info/RECORD,, -alibabacloud_credentials_api-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_credentials_api-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -alibabacloud_credentials_api-1.0.0.dist-info/top_level.txt,sha256=dIv-StdUW0heWk7dXongOwOf0pfJobK5uFhkNl5D9AQ,29 -alibabacloud_credentials_api/__init__.py,sha256=lB4fbOMNpXj08gB_nt5NLijbjYlWmGRTFsk1OKPVF5M,140 -alibabacloud_credentials_api/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_credentials_api/__pycache__/models.cpython-312.pyc,, -alibabacloud_credentials_api/models.py,sha256=-DOj-xplEss-xal6eCq_iyyvUHpN_O4khoTKqmAjTqE,983 diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/top_level.txt deleted file mode 100644 index 115b610..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials_api-1.0.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_credentials_api diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api/__init__.py b/venv/Lib/site-packages/alibabacloud_credentials_api/__init__.py deleted file mode 100644 index e50455e..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials_api/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .models import ICredentials, ICredentialsProvider - -__version__ = "1.0.0" - -__all__ = [ - 'ICredentials', - 'ICredentialsProvider' -] diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials_api/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 56b9155..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials_api/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api/__pycache__/models.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_credentials_api/__pycache__/models.cpython-312.pyc deleted file mode 100644 index 355287e..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_credentials_api/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_credentials_api/models.py b/venv/Lib/site-packages/alibabacloud_credentials_api/models.py deleted file mode 100644 index fce8d0d..0000000 --- a/venv/Lib/site-packages/alibabacloud_credentials_api/models.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. - -class ICredentials: - def get_access_key_id(self) -> str: - raise NotImplementedError('get_access_key_id() must be overridden') - - def get_access_key_secret(self) -> str: - raise NotImplementedError('get_access_key_secret() must be overridden') - - def get_security_token(self) -> str: - raise NotImplementedError('get_security_token() must be overridden') - - def get_provider_name(self) -> str: - raise NotImplementedError('get_provider_name() must be overridden') - - -class ICredentialsProvider: - def get_credentials(self) -> ICredentials: - raise NotImplementedError('get_credentials() must be overridden') - - async def get_credentials_async(self) -> ICredentials: - raise NotImplementedError('get_credentials_async() must be overridden') - - def get_provider_name(self) -> str: - raise NotImplementedError('get_provider_name() must be overridden') \ No newline at end of file diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/LICENSE b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/LICENSE deleted file mode 100644 index ceacaf0..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. \ No newline at end of file diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/METADATA deleted file mode 100644 index c2a0b94..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/METADATA +++ /dev/null @@ -1,69 +0,0 @@ -Metadata-Version: 2.1 -Name: alibabacloud-dypnsapi20170525 -Version: 2.0.0 -Summary: Alibaba Cloud Dypnsapi (20170525) SDK Library for Python -Home-page: https://github.com/aliyun/alibabacloud-python-sdk -Author: Alibaba Cloud SDK -Author-email: sdk-team@alibabacloud.com -License: Apache License 2.0 -Keywords: alibabacloud,dypnsapi20170525 -Platform: any -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Topic :: Software Development -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: alibabacloud-endpoint-util (<1.0.0,>=0.0.4) -Requires-Dist: alibabacloud-openapi-util (<1.0.0,>=0.2.2) -Requires-Dist: alibabacloud-tea-openapi (<1.0.0,>=0.3.16) -Requires-Dist: alibabacloud-tea-util (<1.0.0,>=0.3.13) - -English | [简体中文](README-CN.md) -![](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -## Alibaba Cloud Dypnsapi SDK for Python - -## Requirements - -- Python >= 3.7 - -## Installation - -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/ "pip User Guide") to install pip. - -```bash -# Install the alibabacloud_dypnsapi20170525 -pip install alibabacloud_dypnsapi20170525 -``` - -## Issues - -[Opening an Issue](https://github.com/aliyun/alibabacloud-sdk/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Usage - -[Quick Examples](https://github.com/aliyun/alibabacloud-python-sdk/blob/master/docs/0-Usage-EN.md#quick-examples) - -## Changelog - -Detailed changes for each release are documented in the [release notes](https://github.com/aliyun/alibabacloud-python-sdk/blob/master/dypnsapi-20170525/ChangeLog.md). - -## References - -- [Latest Release](https://github.com/aliyun/alibabacloud-sdk/tree/master/python) - -## License - -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/RECORD deleted file mode 100644 index 936d8a3..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/RECORD +++ /dev/null @@ -1,13 +0,0 @@ -alibabacloud_dypnsapi20170525-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_dypnsapi20170525-2.0.0.dist-info/LICENSE,sha256=0CFItL6bHvxqS44T6vlLoW2R4Zaic304OO3WxN0oXF0,600 -alibabacloud_dypnsapi20170525-2.0.0.dist-info/METADATA,sha256=_Inh9QPu5UePijHplWsTRa8xLNu-9if8Jld4p4QZsAI,2347 -alibabacloud_dypnsapi20170525-2.0.0.dist-info/RECORD,, -alibabacloud_dypnsapi20170525-2.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_dypnsapi20170525-2.0.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 -alibabacloud_dypnsapi20170525-2.0.0.dist-info/top_level.txt,sha256=lwUhOD7yax3q5hEVJQItYrG_kW1Vy3O6FfZAA8Gy5Wg,30 -alibabacloud_dypnsapi20170525/__init__.py,sha256=8sGE31KO-DzdioQZ2J7tZ-GG2VwZjYHpfIX2_sBl6H0,21 -alibabacloud_dypnsapi20170525/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_dypnsapi20170525/__pycache__/client.cpython-312.pyc,, -alibabacloud_dypnsapi20170525/__pycache__/models.cpython-312.pyc,, -alibabacloud_dypnsapi20170525/client.py,sha256=xQaWzZZfj1Kow4t1Xh8yom2Qu8KA10JwgymJ7vMUhRM,131383 -alibabacloud_dypnsapi20170525/models.py,sha256=3SAowPbBj2NmuM17NOiZlPnKCvdHJopz5q8CC9cpOjo,148327 diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/WHEEL deleted file mode 100644 index 57e3d84..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/top_level.txt deleted file mode 100644 index c3531db..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525-2.0.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_dypnsapi20170525 diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__init__.py b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__init__.py deleted file mode 100644 index d97070a..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '2.0.0' \ No newline at end of file diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index e9169dc..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/client.cpython-312.pyc deleted file mode 100644 index e2601ab..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/models.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/models.cpython-312.pyc deleted file mode 100644 index 91b0f7c..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/client.py b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/client.py deleted file mode 100644 index c82edd2..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/client.py +++ /dev/null @@ -1,2646 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from typing import Dict -from Tea.core import TeaCore - -from alibabacloud_tea_openapi.client import Client as OpenApiClient -from alibabacloud_tea_openapi import models as open_api_models -from alibabacloud_tea_util.client import Client as UtilClient -from alibabacloud_endpoint_util.client import Client as EndpointUtilClient -from alibabacloud_dypnsapi20170525 import models as dypnsapi_20170525_models -from alibabacloud_tea_util import models as util_models -from alibabacloud_openapi_util.client import Client as OpenApiUtilClient - - -class Client(OpenApiClient): - """ - *\ - """ - def __init__( - self, - config: open_api_models.Config, - ): - super().__init__(config) - self._endpoint_rule = 'central' - self.check_config(config) - self._endpoint = self.get_endpoint('dypnsapi', self._region_id, self._endpoint_rule, self._network, self._suffix, self._endpoint_map, self._endpoint) - - def get_endpoint( - self, - product_id: str, - region_id: str, - endpoint_rule: str, - network: str, - suffix: str, - endpoint_map: Dict[str, str], - endpoint: str, - ) -> str: - if not UtilClient.empty(endpoint): - return endpoint - if not UtilClient.is_unset(endpoint_map) and not UtilClient.empty(endpoint_map.get(region_id)): - return endpoint_map.get(region_id) - return EndpointUtilClient.get_endpoint_rules(product_id, region_id, endpoint_rule, network, suffix) - - def check_sms_verify_code_with_options( - self, - request: dypnsapi_20170525_models.CheckSmsVerifyCodeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.CheckSmsVerifyCodeResponse: - """ - @summary Verifies SMS verification codes. - - @param request: CheckSmsVerifyCodeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: CheckSmsVerifyCodeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.case_auth_policy): - query['CaseAuthPolicy'] = request.case_auth_policy - if not UtilClient.is_unset(request.country_code): - query['CountryCode'] = request.country_code - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - if not UtilClient.is_unset(request.verify_code): - query['VerifyCode'] = request.verify_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='CheckSmsVerifyCode', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.CheckSmsVerifyCodeResponse(), - self.call_api(params, req, runtime) - ) - - async def check_sms_verify_code_with_options_async( - self, - request: dypnsapi_20170525_models.CheckSmsVerifyCodeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.CheckSmsVerifyCodeResponse: - """ - @summary Verifies SMS verification codes. - - @param request: CheckSmsVerifyCodeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: CheckSmsVerifyCodeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.case_auth_policy): - query['CaseAuthPolicy'] = request.case_auth_policy - if not UtilClient.is_unset(request.country_code): - query['CountryCode'] = request.country_code - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - if not UtilClient.is_unset(request.verify_code): - query['VerifyCode'] = request.verify_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='CheckSmsVerifyCode', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.CheckSmsVerifyCodeResponse(), - await self.call_api_async(params, req, runtime) - ) - - def check_sms_verify_code( - self, - request: dypnsapi_20170525_models.CheckSmsVerifyCodeRequest, - ) -> dypnsapi_20170525_models.CheckSmsVerifyCodeResponse: - """ - @summary Verifies SMS verification codes. - - @param request: CheckSmsVerifyCodeRequest - @return: CheckSmsVerifyCodeResponse - """ - runtime = util_models.RuntimeOptions() - return self.check_sms_verify_code_with_options(request, runtime) - - async def check_sms_verify_code_async( - self, - request: dypnsapi_20170525_models.CheckSmsVerifyCodeRequest, - ) -> dypnsapi_20170525_models.CheckSmsVerifyCodeResponse: - """ - @summary Verifies SMS verification codes. - - @param request: CheckSmsVerifyCodeRequest - @return: CheckSmsVerifyCodeResponse - """ - runtime = util_models.RuntimeOptions() - return await self.check_sms_verify_code_with_options_async(request, runtime) - - def create_scheme_config_with_options( - self, - request: dypnsapi_20170525_models.CreateSchemeConfigRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.CreateSchemeConfigResponse: - """ - @summary Creates a code for a converged communication authentication service. - - @param request: CreateSchemeConfigRequest - @param runtime: runtime options for this request RuntimeOptions - @return: CreateSchemeConfigResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.android_package_name): - query['AndroidPackageName'] = request.android_package_name - if not UtilClient.is_unset(request.android_package_sign): - query['AndroidPackageSign'] = request.android_package_sign - if not UtilClient.is_unset(request.app_name): - query['AppName'] = request.app_name - if not UtilClient.is_unset(request.h_5origin): - query['H5Origin'] = request.h_5origin - if not UtilClient.is_unset(request.h_5url): - query['H5Url'] = request.h_5url - if not UtilClient.is_unset(request.ios_bundle_id): - query['IosBundleId'] = request.ios_bundle_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.platform): - query['Platform'] = request.platform - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='CreateSchemeConfig', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.CreateSchemeConfigResponse(), - self.call_api(params, req, runtime) - ) - - async def create_scheme_config_with_options_async( - self, - request: dypnsapi_20170525_models.CreateSchemeConfigRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.CreateSchemeConfigResponse: - """ - @summary Creates a code for a converged communication authentication service. - - @param request: CreateSchemeConfigRequest - @param runtime: runtime options for this request RuntimeOptions - @return: CreateSchemeConfigResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.android_package_name): - query['AndroidPackageName'] = request.android_package_name - if not UtilClient.is_unset(request.android_package_sign): - query['AndroidPackageSign'] = request.android_package_sign - if not UtilClient.is_unset(request.app_name): - query['AppName'] = request.app_name - if not UtilClient.is_unset(request.h_5origin): - query['H5Origin'] = request.h_5origin - if not UtilClient.is_unset(request.h_5url): - query['H5Url'] = request.h_5url - if not UtilClient.is_unset(request.ios_bundle_id): - query['IosBundleId'] = request.ios_bundle_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.platform): - query['Platform'] = request.platform - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='CreateSchemeConfig', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.CreateSchemeConfigResponse(), - await self.call_api_async(params, req, runtime) - ) - - def create_scheme_config( - self, - request: dypnsapi_20170525_models.CreateSchemeConfigRequest, - ) -> dypnsapi_20170525_models.CreateSchemeConfigResponse: - """ - @summary Creates a code for a converged communication authentication service. - - @param request: CreateSchemeConfigRequest - @return: CreateSchemeConfigResponse - """ - runtime = util_models.RuntimeOptions() - return self.create_scheme_config_with_options(request, runtime) - - async def create_scheme_config_async( - self, - request: dypnsapi_20170525_models.CreateSchemeConfigRequest, - ) -> dypnsapi_20170525_models.CreateSchemeConfigResponse: - """ - @summary Creates a code for a converged communication authentication service. - - @param request: CreateSchemeConfigRequest - @return: CreateSchemeConfigResponse - """ - runtime = util_models.RuntimeOptions() - return await self.create_scheme_config_with_options_async(request, runtime) - - def create_verify_scheme_with_options( - self, - request: dypnsapi_20170525_models.CreateVerifySchemeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.CreateVerifySchemeResponse: - """ - @summary Creates a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: CreateVerifySchemeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: CreateVerifySchemeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.app_name): - query['AppName'] = request.app_name - if not UtilClient.is_unset(request.auth_type): - query['AuthType'] = request.auth_type - if not UtilClient.is_unset(request.bundle_id): - query['BundleId'] = request.bundle_id - if not UtilClient.is_unset(request.cm_api_code): - query['CmApiCode'] = request.cm_api_code - if not UtilClient.is_unset(request.ct_api_code): - query['CtApiCode'] = request.ct_api_code - if not UtilClient.is_unset(request.cu_api_code): - query['CuApiCode'] = request.cu_api_code - if not UtilClient.is_unset(request.email): - query['Email'] = request.email - if not UtilClient.is_unset(request.hm_app_identifier): - query['HmAppIdentifier'] = request.hm_app_identifier - if not UtilClient.is_unset(request.hm_package_name): - query['HmPackageName'] = request.hm_package_name - if not UtilClient.is_unset(request.hm_sign_name): - query['HmSignName'] = request.hm_sign_name - if not UtilClient.is_unset(request.ip_white_list): - query['IpWhiteList'] = request.ip_white_list - if not UtilClient.is_unset(request.origin): - query['Origin'] = request.origin - if not UtilClient.is_unset(request.os_type): - query['OsType'] = request.os_type - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.pack_name): - query['PackName'] = request.pack_name - if not UtilClient.is_unset(request.pack_sign): - query['PackSign'] = request.pack_sign - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scene_type): - query['SceneType'] = request.scene_type - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - if not UtilClient.is_unset(request.sms_sign_name): - query['SmsSignName'] = request.sms_sign_name - if not UtilClient.is_unset(request.url): - query['Url'] = request.url - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='CreateVerifyScheme', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.CreateVerifySchemeResponse(), - self.call_api(params, req, runtime) - ) - - async def create_verify_scheme_with_options_async( - self, - request: dypnsapi_20170525_models.CreateVerifySchemeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.CreateVerifySchemeResponse: - """ - @summary Creates a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: CreateVerifySchemeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: CreateVerifySchemeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.app_name): - query['AppName'] = request.app_name - if not UtilClient.is_unset(request.auth_type): - query['AuthType'] = request.auth_type - if not UtilClient.is_unset(request.bundle_id): - query['BundleId'] = request.bundle_id - if not UtilClient.is_unset(request.cm_api_code): - query['CmApiCode'] = request.cm_api_code - if not UtilClient.is_unset(request.ct_api_code): - query['CtApiCode'] = request.ct_api_code - if not UtilClient.is_unset(request.cu_api_code): - query['CuApiCode'] = request.cu_api_code - if not UtilClient.is_unset(request.email): - query['Email'] = request.email - if not UtilClient.is_unset(request.hm_app_identifier): - query['HmAppIdentifier'] = request.hm_app_identifier - if not UtilClient.is_unset(request.hm_package_name): - query['HmPackageName'] = request.hm_package_name - if not UtilClient.is_unset(request.hm_sign_name): - query['HmSignName'] = request.hm_sign_name - if not UtilClient.is_unset(request.ip_white_list): - query['IpWhiteList'] = request.ip_white_list - if not UtilClient.is_unset(request.origin): - query['Origin'] = request.origin - if not UtilClient.is_unset(request.os_type): - query['OsType'] = request.os_type - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.pack_name): - query['PackName'] = request.pack_name - if not UtilClient.is_unset(request.pack_sign): - query['PackSign'] = request.pack_sign - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scene_type): - query['SceneType'] = request.scene_type - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - if not UtilClient.is_unset(request.sms_sign_name): - query['SmsSignName'] = request.sms_sign_name - if not UtilClient.is_unset(request.url): - query['Url'] = request.url - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='CreateVerifyScheme', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.CreateVerifySchemeResponse(), - await self.call_api_async(params, req, runtime) - ) - - def create_verify_scheme( - self, - request: dypnsapi_20170525_models.CreateVerifySchemeRequest, - ) -> dypnsapi_20170525_models.CreateVerifySchemeResponse: - """ - @summary Creates a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: CreateVerifySchemeRequest - @return: CreateVerifySchemeResponse - """ - runtime = util_models.RuntimeOptions() - return self.create_verify_scheme_with_options(request, runtime) - - async def create_verify_scheme_async( - self, - request: dypnsapi_20170525_models.CreateVerifySchemeRequest, - ) -> dypnsapi_20170525_models.CreateVerifySchemeResponse: - """ - @summary Creates a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: CreateVerifySchemeRequest - @return: CreateVerifySchemeResponse - """ - runtime = util_models.RuntimeOptions() - return await self.create_verify_scheme_with_options_async(request, runtime) - - def delete_verify_scheme_with_options( - self, - request: dypnsapi_20170525_models.DeleteVerifySchemeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.DeleteVerifySchemeResponse: - """ - @summary Deletes a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DeleteVerifySchemeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: DeleteVerifySchemeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.customer_id): - query['CustomerId'] = request.customer_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_code): - query['SchemeCode'] = request.scheme_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='DeleteVerifyScheme', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.DeleteVerifySchemeResponse(), - self.call_api(params, req, runtime) - ) - - async def delete_verify_scheme_with_options_async( - self, - request: dypnsapi_20170525_models.DeleteVerifySchemeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.DeleteVerifySchemeResponse: - """ - @summary Deletes a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DeleteVerifySchemeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: DeleteVerifySchemeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.customer_id): - query['CustomerId'] = request.customer_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_code): - query['SchemeCode'] = request.scheme_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='DeleteVerifyScheme', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.DeleteVerifySchemeResponse(), - await self.call_api_async(params, req, runtime) - ) - - def delete_verify_scheme( - self, - request: dypnsapi_20170525_models.DeleteVerifySchemeRequest, - ) -> dypnsapi_20170525_models.DeleteVerifySchemeResponse: - """ - @summary Deletes a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DeleteVerifySchemeRequest - @return: DeleteVerifySchemeResponse - """ - runtime = util_models.RuntimeOptions() - return self.delete_verify_scheme_with_options(request, runtime) - - async def delete_verify_scheme_async( - self, - request: dypnsapi_20170525_models.DeleteVerifySchemeRequest, - ) -> dypnsapi_20170525_models.DeleteVerifySchemeResponse: - """ - @summary Deletes a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DeleteVerifySchemeRequest - @return: DeleteVerifySchemeResponse - """ - runtime = util_models.RuntimeOptions() - return await self.delete_verify_scheme_with_options_async(request, runtime) - - def describe_verify_scheme_with_options( - self, - request: dypnsapi_20170525_models.DescribeVerifySchemeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.DescribeVerifySchemeResponse: - """ - @summary Queries the details of a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DescribeVerifySchemeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: DescribeVerifySchemeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.customer_id): - query['CustomerId'] = request.customer_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_code): - query['SchemeCode'] = request.scheme_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='DescribeVerifyScheme', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.DescribeVerifySchemeResponse(), - self.call_api(params, req, runtime) - ) - - async def describe_verify_scheme_with_options_async( - self, - request: dypnsapi_20170525_models.DescribeVerifySchemeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.DescribeVerifySchemeResponse: - """ - @summary Queries the details of a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DescribeVerifySchemeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: DescribeVerifySchemeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.customer_id): - query['CustomerId'] = request.customer_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_code): - query['SchemeCode'] = request.scheme_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='DescribeVerifyScheme', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.DescribeVerifySchemeResponse(), - await self.call_api_async(params, req, runtime) - ) - - def describe_verify_scheme( - self, - request: dypnsapi_20170525_models.DescribeVerifySchemeRequest, - ) -> dypnsapi_20170525_models.DescribeVerifySchemeResponse: - """ - @summary Queries the details of a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DescribeVerifySchemeRequest - @return: DescribeVerifySchemeResponse - """ - runtime = util_models.RuntimeOptions() - return self.describe_verify_scheme_with_options(request, runtime) - - async def describe_verify_scheme_async( - self, - request: dypnsapi_20170525_models.DescribeVerifySchemeRequest, - ) -> dypnsapi_20170525_models.DescribeVerifySchemeResponse: - """ - @summary Queries the details of a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 100 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: DescribeVerifySchemeRequest - @return: DescribeVerifySchemeResponse - """ - runtime = util_models.RuntimeOptions() - return await self.describe_verify_scheme_with_options_async(request, runtime) - - def get_auth_token_with_options( - self, - request: dypnsapi_20170525_models.GetAuthTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetAuthTokenResponse: - """ - @summary Obtains the authorization token used for the authentication of the phone number verification for HTML5 pages. You can obtain AccessToken and JwtToken after a successful call. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetAuthTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.biz_type): - query['BizType'] = request.biz_type - if not UtilClient.is_unset(request.cm_api_code): - query['CmApiCode'] = request.cm_api_code - if not UtilClient.is_unset(request.ct_api_code): - query['CtApiCode'] = request.ct_api_code - if not UtilClient.is_unset(request.cu_api_code): - query['CuApiCode'] = request.cu_api_code - if not UtilClient.is_unset(request.origin): - query['Origin'] = request.origin - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scene_code): - query['SceneCode'] = request.scene_code - if not UtilClient.is_unset(request.url): - query['Url'] = request.url - if not UtilClient.is_unset(request.version): - query['Version'] = request.version - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetAuthToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetAuthTokenResponse(), - self.call_api(params, req, runtime) - ) - - async def get_auth_token_with_options_async( - self, - request: dypnsapi_20170525_models.GetAuthTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetAuthTokenResponse: - """ - @summary Obtains the authorization token used for the authentication of the phone number verification for HTML5 pages. You can obtain AccessToken and JwtToken after a successful call. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetAuthTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.biz_type): - query['BizType'] = request.biz_type - if not UtilClient.is_unset(request.cm_api_code): - query['CmApiCode'] = request.cm_api_code - if not UtilClient.is_unset(request.ct_api_code): - query['CtApiCode'] = request.ct_api_code - if not UtilClient.is_unset(request.cu_api_code): - query['CuApiCode'] = request.cu_api_code - if not UtilClient.is_unset(request.origin): - query['Origin'] = request.origin - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scene_code): - query['SceneCode'] = request.scene_code - if not UtilClient.is_unset(request.url): - query['Url'] = request.url - if not UtilClient.is_unset(request.version): - query['Version'] = request.version - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetAuthToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetAuthTokenResponse(), - await self.call_api_async(params, req, runtime) - ) - - def get_auth_token( - self, - request: dypnsapi_20170525_models.GetAuthTokenRequest, - ) -> dypnsapi_20170525_models.GetAuthTokenResponse: - """ - @summary Obtains the authorization token used for the authentication of the phone number verification for HTML5 pages. You can obtain AccessToken and JwtToken after a successful call. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthTokenRequest - @return: GetAuthTokenResponse - """ - runtime = util_models.RuntimeOptions() - return self.get_auth_token_with_options(request, runtime) - - async def get_auth_token_async( - self, - request: dypnsapi_20170525_models.GetAuthTokenRequest, - ) -> dypnsapi_20170525_models.GetAuthTokenResponse: - """ - @summary Obtains the authorization token used for the authentication of the phone number verification for HTML5 pages. You can obtain AccessToken and JwtToken after a successful call. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthTokenRequest - @return: GetAuthTokenResponse - """ - runtime = util_models.RuntimeOptions() - return await self.get_auth_token_with_options_async(request, runtime) - - def get_authorization_url_with_options( - self, - request: dypnsapi_20170525_models.GetAuthorizationUrlRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetAuthorizationUrlResponse: - """ - @summary Obtains the URL for the Alipay account authorization. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account and obtain an Alibaba Cloud AccessKey pair. For more information, see [Process of communication authorization](https://help.aliyun.com/document_detail/196922.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthorizationUrlRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetAuthorizationUrlResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.end_date): - query['EndDate'] = request.end_date - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_no): - query['PhoneNo'] = request.phone_no - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_id): - query['SchemeId'] = request.scheme_id - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetAuthorizationUrl', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetAuthorizationUrlResponse(), - self.call_api(params, req, runtime) - ) - - async def get_authorization_url_with_options_async( - self, - request: dypnsapi_20170525_models.GetAuthorizationUrlRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetAuthorizationUrlResponse: - """ - @summary Obtains the URL for the Alipay account authorization. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account and obtain an Alibaba Cloud AccessKey pair. For more information, see [Process of communication authorization](https://help.aliyun.com/document_detail/196922.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthorizationUrlRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetAuthorizationUrlResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.end_date): - query['EndDate'] = request.end_date - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_no): - query['PhoneNo'] = request.phone_no - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_id): - query['SchemeId'] = request.scheme_id - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetAuthorizationUrl', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetAuthorizationUrlResponse(), - await self.call_api_async(params, req, runtime) - ) - - def get_authorization_url( - self, - request: dypnsapi_20170525_models.GetAuthorizationUrlRequest, - ) -> dypnsapi_20170525_models.GetAuthorizationUrlResponse: - """ - @summary Obtains the URL for the Alipay account authorization. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account and obtain an Alibaba Cloud AccessKey pair. For more information, see [Process of communication authorization](https://help.aliyun.com/document_detail/196922.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthorizationUrlRequest - @return: GetAuthorizationUrlResponse - """ - runtime = util_models.RuntimeOptions() - return self.get_authorization_url_with_options(request, runtime) - - async def get_authorization_url_async( - self, - request: dypnsapi_20170525_models.GetAuthorizationUrlRequest, - ) -> dypnsapi_20170525_models.GetAuthorizationUrlResponse: - """ - @summary Obtains the URL for the Alipay account authorization. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account and obtain an Alibaba Cloud AccessKey pair. For more information, see [Process of communication authorization](https://help.aliyun.com/document_detail/196922.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetAuthorizationUrlRequest - @return: GetAuthorizationUrlResponse - """ - runtime = util_models.RuntimeOptions() - return await self.get_authorization_url_with_options_async(request, runtime) - - def get_fusion_auth_token_with_options( - self, - request: dypnsapi_20170525_models.GetFusionAuthTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the token that is obtained from the client SDKs. - - @param request: GetFusionAuthTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetFusionAuthTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.bundle_id): - query['BundleId'] = request.bundle_id - if not UtilClient.is_unset(request.duration_seconds): - query['DurationSeconds'] = request.duration_seconds - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.package_name): - query['PackageName'] = request.package_name - if not UtilClient.is_unset(request.package_sign): - query['PackageSign'] = request.package_sign - if not UtilClient.is_unset(request.platform): - query['Platform'] = request.platform - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_code): - query['SchemeCode'] = request.scheme_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetFusionAuthToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetFusionAuthTokenResponse(), - self.call_api(params, req, runtime) - ) - - async def get_fusion_auth_token_with_options_async( - self, - request: dypnsapi_20170525_models.GetFusionAuthTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the token that is obtained from the client SDKs. - - @param request: GetFusionAuthTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetFusionAuthTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.bundle_id): - query['BundleId'] = request.bundle_id - if not UtilClient.is_unset(request.duration_seconds): - query['DurationSeconds'] = request.duration_seconds - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.package_name): - query['PackageName'] = request.package_name - if not UtilClient.is_unset(request.package_sign): - query['PackageSign'] = request.package_sign - if not UtilClient.is_unset(request.platform): - query['Platform'] = request.platform - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scheme_code): - query['SchemeCode'] = request.scheme_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetFusionAuthToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetFusionAuthTokenResponse(), - await self.call_api_async(params, req, runtime) - ) - - def get_fusion_auth_token( - self, - request: dypnsapi_20170525_models.GetFusionAuthTokenRequest, - ) -> dypnsapi_20170525_models.GetFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the token that is obtained from the client SDKs. - - @param request: GetFusionAuthTokenRequest - @return: GetFusionAuthTokenResponse - """ - runtime = util_models.RuntimeOptions() - return self.get_fusion_auth_token_with_options(request, runtime) - - async def get_fusion_auth_token_async( - self, - request: dypnsapi_20170525_models.GetFusionAuthTokenRequest, - ) -> dypnsapi_20170525_models.GetFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the token that is obtained from the client SDKs. - - @param request: GetFusionAuthTokenRequest - @return: GetFusionAuthTokenResponse - """ - runtime = util_models.RuntimeOptions() - return await self.get_fusion_auth_token_with_options_async(request, runtime) - - def get_mobile_with_options( - self, - request: dypnsapi_20170525_models.GetMobileRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetMobileResponse: - """ - @summary Obtains a phone number for one-click logon. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration. You can call this operation only after you confirm the authorization on the authorization page provided by the SDK for one-click logon. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetMobileRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetMobileResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.access_token): - query['AccessToken'] = request.access_token - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetMobile', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetMobileResponse(), - self.call_api(params, req, runtime) - ) - - async def get_mobile_with_options_async( - self, - request: dypnsapi_20170525_models.GetMobileRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetMobileResponse: - """ - @summary Obtains a phone number for one-click logon. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration. You can call this operation only after you confirm the authorization on the authorization page provided by the SDK for one-click logon. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetMobileRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetMobileResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.access_token): - query['AccessToken'] = request.access_token - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetMobile', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetMobileResponse(), - await self.call_api_async(params, req, runtime) - ) - - def get_mobile( - self, - request: dypnsapi_20170525_models.GetMobileRequest, - ) -> dypnsapi_20170525_models.GetMobileResponse: - """ - @summary Obtains a phone number for one-click logon. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration. You can call this operation only after you confirm the authorization on the authorization page provided by the SDK for one-click logon. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetMobileRequest - @return: GetMobileResponse - """ - runtime = util_models.RuntimeOptions() - return self.get_mobile_with_options(request, runtime) - - async def get_mobile_async( - self, - request: dypnsapi_20170525_models.GetMobileRequest, - ) -> dypnsapi_20170525_models.GetMobileResponse: - """ - @summary Obtains a phone number for one-click logon. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration. You can call this operation only after you confirm the authorization on the authorization page provided by the SDK for one-click logon. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetMobileRequest - @return: GetMobileResponse - """ - runtime = util_models.RuntimeOptions() - return await self.get_mobile_with_options_async(request, runtime) - - def get_phone_with_token_with_options( - self, - request: dypnsapi_20170525_models.GetPhoneWithTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetPhoneWithTokenResponse: - """ - @summary Obtains a phone number for one-click logon. This operation is exclusive to HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration in HTML5 pages. You can call this operation only after you confirm the authorization on the authorization page provided by the JavaScript SDK. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetPhoneWithTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetPhoneWithTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.sp_token): - query['SpToken'] = request.sp_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetPhoneWithToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetPhoneWithTokenResponse(), - self.call_api(params, req, runtime) - ) - - async def get_phone_with_token_with_options_async( - self, - request: dypnsapi_20170525_models.GetPhoneWithTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetPhoneWithTokenResponse: - """ - @summary Obtains a phone number for one-click logon. This operation is exclusive to HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration in HTML5 pages. You can call this operation only after you confirm the authorization on the authorization page provided by the JavaScript SDK. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetPhoneWithTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetPhoneWithTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.sp_token): - query['SpToken'] = request.sp_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetPhoneWithToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetPhoneWithTokenResponse(), - await self.call_api_async(params, req, runtime) - ) - - def get_phone_with_token( - self, - request: dypnsapi_20170525_models.GetPhoneWithTokenRequest, - ) -> dypnsapi_20170525_models.GetPhoneWithTokenResponse: - """ - @summary Obtains a phone number for one-click logon. This operation is exclusive to HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration in HTML5 pages. You can call this operation only after you confirm the authorization on the authorization page provided by the JavaScript SDK. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetPhoneWithTokenRequest - @return: GetPhoneWithTokenResponse - """ - runtime = util_models.RuntimeOptions() - return self.get_phone_with_token_with_options(request, runtime) - - async def get_phone_with_token_async( - self, - request: dypnsapi_20170525_models.GetPhoneWithTokenRequest, - ) -> dypnsapi_20170525_models.GetPhoneWithTokenResponse: - """ - @summary Obtains a phone number for one-click logon. This operation is exclusive to HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable only to one-click logon or registration in HTML5 pages. You can call this operation only after you confirm the authorization on the authorization page provided by the JavaScript SDK. You are prohibited from simulating or bypassing the authorization process. Alibaba Cloud reserves the right to terminate our services and take legal actions against such violations. - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetPhoneWithTokenRequest - @return: GetPhoneWithTokenResponse - """ - runtime = util_models.RuntimeOptions() - return await self.get_phone_with_token_with_options_async(request, runtime) - - def get_sms_auth_tokens_with_options( - self, - request: dypnsapi_20170525_models.GetSmsAuthTokensRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetSmsAuthTokensResponse: - """ - @summary Obtains the authorization token for an SMS verification code. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetSmsAuthTokensRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetSmsAuthTokensResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.bundle_id): - query['BundleId'] = request.bundle_id - if not UtilClient.is_unset(request.expire): - query['Expire'] = request.expire - if not UtilClient.is_unset(request.os_type): - query['OsType'] = request.os_type - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.package_name): - query['PackageName'] = request.package_name - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scene_code): - query['SceneCode'] = request.scene_code - if not UtilClient.is_unset(request.sign_name): - query['SignName'] = request.sign_name - if not UtilClient.is_unset(request.sms_code_expire): - query['SmsCodeExpire'] = request.sms_code_expire - if not UtilClient.is_unset(request.sms_template_code): - query['SmsTemplateCode'] = request.sms_template_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetSmsAuthTokens', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetSmsAuthTokensResponse(), - self.call_api(params, req, runtime) - ) - - async def get_sms_auth_tokens_with_options_async( - self, - request: dypnsapi_20170525_models.GetSmsAuthTokensRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.GetSmsAuthTokensResponse: - """ - @summary Obtains the authorization token for an SMS verification code. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetSmsAuthTokensRequest - @param runtime: runtime options for this request RuntimeOptions - @return: GetSmsAuthTokensResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.bundle_id): - query['BundleId'] = request.bundle_id - if not UtilClient.is_unset(request.expire): - query['Expire'] = request.expire - if not UtilClient.is_unset(request.os_type): - query['OsType'] = request.os_type - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.package_name): - query['PackageName'] = request.package_name - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.scene_code): - query['SceneCode'] = request.scene_code - if not UtilClient.is_unset(request.sign_name): - query['SignName'] = request.sign_name - if not UtilClient.is_unset(request.sms_code_expire): - query['SmsCodeExpire'] = request.sms_code_expire - if not UtilClient.is_unset(request.sms_template_code): - query['SmsTemplateCode'] = request.sms_template_code - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetSmsAuthTokens', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.GetSmsAuthTokensResponse(), - await self.call_api_async(params, req, runtime) - ) - - def get_sms_auth_tokens( - self, - request: dypnsapi_20170525_models.GetSmsAuthTokensRequest, - ) -> dypnsapi_20170525_models.GetSmsAuthTokensResponse: - """ - @summary Obtains the authorization token for an SMS verification code. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetSmsAuthTokensRequest - @return: GetSmsAuthTokensResponse - """ - runtime = util_models.RuntimeOptions() - return self.get_sms_auth_tokens_with_options(request, runtime) - - async def get_sms_auth_tokens_async( - self, - request: dypnsapi_20170525_models.GetSmsAuthTokensRequest, - ) -> dypnsapi_20170525_models.GetSmsAuthTokensResponse: - """ - @summary Obtains the authorization token for an SMS verification code. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: GetSmsAuthTokensRequest - @return: GetSmsAuthTokensResponse - """ - runtime = util_models.RuntimeOptions() - return await self.get_sms_auth_tokens_with_options_async(request, runtime) - - def query_gate_verify_billing_public_with_options( - self, - request: dypnsapi_20170525_models.QueryGateVerifyBillingPublicRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.QueryGateVerifyBillingPublicResponse: - """ - @summary Queries the fees generated by a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyBillingPublicRequest - @param runtime: runtime options for this request RuntimeOptions - @return: QueryGateVerifyBillingPublicResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.authentication_type): - query['AuthenticationType'] = request.authentication_type - if not UtilClient.is_unset(request.month): - query['Month'] = request.month - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='QueryGateVerifyBillingPublic', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.QueryGateVerifyBillingPublicResponse(), - self.call_api(params, req, runtime) - ) - - async def query_gate_verify_billing_public_with_options_async( - self, - request: dypnsapi_20170525_models.QueryGateVerifyBillingPublicRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.QueryGateVerifyBillingPublicResponse: - """ - @summary Queries the fees generated by a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyBillingPublicRequest - @param runtime: runtime options for this request RuntimeOptions - @return: QueryGateVerifyBillingPublicResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.authentication_type): - query['AuthenticationType'] = request.authentication_type - if not UtilClient.is_unset(request.month): - query['Month'] = request.month - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='QueryGateVerifyBillingPublic', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.QueryGateVerifyBillingPublicResponse(), - await self.call_api_async(params, req, runtime) - ) - - def query_gate_verify_billing_public( - self, - request: dypnsapi_20170525_models.QueryGateVerifyBillingPublicRequest, - ) -> dypnsapi_20170525_models.QueryGateVerifyBillingPublicResponse: - """ - @summary Queries the fees generated by a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyBillingPublicRequest - @return: QueryGateVerifyBillingPublicResponse - """ - runtime = util_models.RuntimeOptions() - return self.query_gate_verify_billing_public_with_options(request, runtime) - - async def query_gate_verify_billing_public_async( - self, - request: dypnsapi_20170525_models.QueryGateVerifyBillingPublicRequest, - ) -> dypnsapi_20170525_models.QueryGateVerifyBillingPublicResponse: - """ - @summary Queries the fees generated by a verification service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyBillingPublicRequest - @return: QueryGateVerifyBillingPublicResponse - """ - runtime = util_models.RuntimeOptions() - return await self.query_gate_verify_billing_public_with_options_async(request, runtime) - - def query_gate_verify_statistic_public_with_options( - self, - request: dypnsapi_20170525_models.QueryGateVerifyStatisticPublicRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.QueryGateVerifyStatisticPublicResponse: - """ - @summary Queries the calls of Phone Number Verification Service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyStatisticPublicRequest - @param runtime: runtime options for this request RuntimeOptions - @return: QueryGateVerifyStatisticPublicResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.authentication_type): - query['AuthenticationType'] = request.authentication_type - if not UtilClient.is_unset(request.end_date): - query['EndDate'] = request.end_date - if not UtilClient.is_unset(request.os_type): - query['OsType'] = request.os_type - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.scene_code): - query['SceneCode'] = request.scene_code - if not UtilClient.is_unset(request.start_date): - query['StartDate'] = request.start_date - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='QueryGateVerifyStatisticPublic', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.QueryGateVerifyStatisticPublicResponse(), - self.call_api(params, req, runtime) - ) - - async def query_gate_verify_statistic_public_with_options_async( - self, - request: dypnsapi_20170525_models.QueryGateVerifyStatisticPublicRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.QueryGateVerifyStatisticPublicResponse: - """ - @summary Queries the calls of Phone Number Verification Service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyStatisticPublicRequest - @param runtime: runtime options for this request RuntimeOptions - @return: QueryGateVerifyStatisticPublicResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.authentication_type): - query['AuthenticationType'] = request.authentication_type - if not UtilClient.is_unset(request.end_date): - query['EndDate'] = request.end_date - if not UtilClient.is_unset(request.os_type): - query['OsType'] = request.os_type - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.scene_code): - query['SceneCode'] = request.scene_code - if not UtilClient.is_unset(request.start_date): - query['StartDate'] = request.start_date - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='QueryGateVerifyStatisticPublic', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.QueryGateVerifyStatisticPublicResponse(), - await self.call_api_async(params, req, runtime) - ) - - def query_gate_verify_statistic_public( - self, - request: dypnsapi_20170525_models.QueryGateVerifyStatisticPublicRequest, - ) -> dypnsapi_20170525_models.QueryGateVerifyStatisticPublicResponse: - """ - @summary Queries the calls of Phone Number Verification Service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyStatisticPublicRequest - @return: QueryGateVerifyStatisticPublicResponse - """ - runtime = util_models.RuntimeOptions() - return self.query_gate_verify_statistic_public_with_options(request, runtime) - - async def query_gate_verify_statistic_public_async( - self, - request: dypnsapi_20170525_models.QueryGateVerifyStatisticPublicRequest, - ) -> dypnsapi_20170525_models.QueryGateVerifyStatisticPublicResponse: - """ - @summary Queries the calls of Phone Number Verification Service. - - @description ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: QueryGateVerifyStatisticPublicRequest - @return: QueryGateVerifyStatisticPublicResponse - """ - runtime = util_models.RuntimeOptions() - return await self.query_gate_verify_statistic_public_with_options_async(request, runtime) - - def query_send_details_with_options( - self, - request: dypnsapi_20170525_models.QuerySendDetailsRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.QuerySendDetailsResponse: - """ - @deprecated OpenAPI QuerySendDetails is deprecated - - @summary Queries the delivery status of the SMS verification code. You can query only the delivery status of the SMS verification code that is sent by calling corresponding API operations. - - @param request: QuerySendDetailsRequest - @param runtime: runtime options for this request RuntimeOptions - @return: QuerySendDetailsResponse - Deprecated - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.biz_id): - query['BizId'] = request.biz_id - if not UtilClient.is_unset(request.current_page): - query['CurrentPage'] = request.current_page - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.page_size): - query['PageSize'] = request.page_size - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.send_date): - query['SendDate'] = request.send_date - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='QuerySendDetails', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.QuerySendDetailsResponse(), - self.call_api(params, req, runtime) - ) - - async def query_send_details_with_options_async( - self, - request: dypnsapi_20170525_models.QuerySendDetailsRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.QuerySendDetailsResponse: - """ - @deprecated OpenAPI QuerySendDetails is deprecated - - @summary Queries the delivery status of the SMS verification code. You can query only the delivery status of the SMS verification code that is sent by calling corresponding API operations. - - @param request: QuerySendDetailsRequest - @param runtime: runtime options for this request RuntimeOptions - @return: QuerySendDetailsResponse - Deprecated - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.biz_id): - query['BizId'] = request.biz_id - if not UtilClient.is_unset(request.current_page): - query['CurrentPage'] = request.current_page - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.page_size): - query['PageSize'] = request.page_size - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.send_date): - query['SendDate'] = request.send_date - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='QuerySendDetails', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.QuerySendDetailsResponse(), - await self.call_api_async(params, req, runtime) - ) - - def query_send_details( - self, - request: dypnsapi_20170525_models.QuerySendDetailsRequest, - ) -> dypnsapi_20170525_models.QuerySendDetailsResponse: - """ - @deprecated OpenAPI QuerySendDetails is deprecated - - @summary Queries the delivery status of the SMS verification code. You can query only the delivery status of the SMS verification code that is sent by calling corresponding API operations. - - @param request: QuerySendDetailsRequest - @return: QuerySendDetailsResponse - Deprecated - """ - runtime = util_models.RuntimeOptions() - return self.query_send_details_with_options(request, runtime) - - async def query_send_details_async( - self, - request: dypnsapi_20170525_models.QuerySendDetailsRequest, - ) -> dypnsapi_20170525_models.QuerySendDetailsResponse: - """ - @deprecated OpenAPI QuerySendDetails is deprecated - - @summary Queries the delivery status of the SMS verification code. You can query only the delivery status of the SMS verification code that is sent by calling corresponding API operations. - - @param request: QuerySendDetailsRequest - @return: QuerySendDetailsResponse - Deprecated - """ - runtime = util_models.RuntimeOptions() - return await self.query_send_details_with_options_async(request, runtime) - - def send_sms_verify_code_with_options( - self, - request: dypnsapi_20170525_models.SendSmsVerifyCodeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.SendSmsVerifyCodeResponse: - """ - @summary Sends SMS verification codes. - - @param request: SendSmsVerifyCodeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: SendSmsVerifyCodeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.auto_retry): - query['AutoRetry'] = request.auto_retry - if not UtilClient.is_unset(request.code_length): - query['CodeLength'] = request.code_length - if not UtilClient.is_unset(request.code_type): - query['CodeType'] = request.code_type - if not UtilClient.is_unset(request.country_code): - query['CountryCode'] = request.country_code - if not UtilClient.is_unset(request.duplicate_policy): - query['DuplicatePolicy'] = request.duplicate_policy - if not UtilClient.is_unset(request.interval): - query['Interval'] = request.interval - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.return_verify_code): - query['ReturnVerifyCode'] = request.return_verify_code - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - if not UtilClient.is_unset(request.sign_name): - query['SignName'] = request.sign_name - if not UtilClient.is_unset(request.sms_up_extend_code): - query['SmsUpExtendCode'] = request.sms_up_extend_code - if not UtilClient.is_unset(request.template_code): - query['TemplateCode'] = request.template_code - if not UtilClient.is_unset(request.template_param): - query['TemplateParam'] = request.template_param - if not UtilClient.is_unset(request.valid_time): - query['ValidTime'] = request.valid_time - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='SendSmsVerifyCode', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.SendSmsVerifyCodeResponse(), - self.call_api(params, req, runtime) - ) - - async def send_sms_verify_code_with_options_async( - self, - request: dypnsapi_20170525_models.SendSmsVerifyCodeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.SendSmsVerifyCodeResponse: - """ - @summary Sends SMS verification codes. - - @param request: SendSmsVerifyCodeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: SendSmsVerifyCodeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.auto_retry): - query['AutoRetry'] = request.auto_retry - if not UtilClient.is_unset(request.code_length): - query['CodeLength'] = request.code_length - if not UtilClient.is_unset(request.code_type): - query['CodeType'] = request.code_type - if not UtilClient.is_unset(request.country_code): - query['CountryCode'] = request.country_code - if not UtilClient.is_unset(request.duplicate_policy): - query['DuplicatePolicy'] = request.duplicate_policy - if not UtilClient.is_unset(request.interval): - query['Interval'] = request.interval - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.return_verify_code): - query['ReturnVerifyCode'] = request.return_verify_code - if not UtilClient.is_unset(request.scheme_name): - query['SchemeName'] = request.scheme_name - if not UtilClient.is_unset(request.sign_name): - query['SignName'] = request.sign_name - if not UtilClient.is_unset(request.sms_up_extend_code): - query['SmsUpExtendCode'] = request.sms_up_extend_code - if not UtilClient.is_unset(request.template_code): - query['TemplateCode'] = request.template_code - if not UtilClient.is_unset(request.template_param): - query['TemplateParam'] = request.template_param - if not UtilClient.is_unset(request.valid_time): - query['ValidTime'] = request.valid_time - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='SendSmsVerifyCode', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.SendSmsVerifyCodeResponse(), - await self.call_api_async(params, req, runtime) - ) - - def send_sms_verify_code( - self, - request: dypnsapi_20170525_models.SendSmsVerifyCodeRequest, - ) -> dypnsapi_20170525_models.SendSmsVerifyCodeResponse: - """ - @summary Sends SMS verification codes. - - @param request: SendSmsVerifyCodeRequest - @return: SendSmsVerifyCodeResponse - """ - runtime = util_models.RuntimeOptions() - return self.send_sms_verify_code_with_options(request, runtime) - - async def send_sms_verify_code_async( - self, - request: dypnsapi_20170525_models.SendSmsVerifyCodeRequest, - ) -> dypnsapi_20170525_models.SendSmsVerifyCodeResponse: - """ - @summary Sends SMS verification codes. - - @param request: SendSmsVerifyCodeRequest - @return: SendSmsVerifyCodeResponse - """ - runtime = util_models.RuntimeOptions() - return await self.send_sms_verify_code_with_options_async(request, runtime) - - def verify_mobile_with_options( - self, - request: dypnsapi_20170525_models.VerifyMobileRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifyMobileResponse: - """ - @summary Verifies the phone number that you use. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable to only the verification of thephone number that you use. To obtain a phone number for one-click logon, call [GetMobile](https://help.aliyun.com/document_detail/189865.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyMobileRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifyMobileResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.access_code): - query['AccessCode'] = request.access_code - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifyMobile', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifyMobileResponse(), - self.call_api(params, req, runtime) - ) - - async def verify_mobile_with_options_async( - self, - request: dypnsapi_20170525_models.VerifyMobileRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifyMobileResponse: - """ - @summary Verifies the phone number that you use. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable to only the verification of thephone number that you use. To obtain a phone number for one-click logon, call [GetMobile](https://help.aliyun.com/document_detail/189865.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyMobileRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifyMobileResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.access_code): - query['AccessCode'] = request.access_code - if not UtilClient.is_unset(request.out_id): - query['OutId'] = request.out_id - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifyMobile', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifyMobileResponse(), - await self.call_api_async(params, req, runtime) - ) - - def verify_mobile( - self, - request: dypnsapi_20170525_models.VerifyMobileRequest, - ) -> dypnsapi_20170525_models.VerifyMobileResponse: - """ - @summary Verifies the phone number that you use. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable to only the verification of thephone number that you use. To obtain a phone number for one-click logon, call [GetMobile](https://help.aliyun.com/document_detail/189865.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyMobileRequest - @return: VerifyMobileResponse - """ - runtime = util_models.RuntimeOptions() - return self.verify_mobile_with_options(request, runtime) - - async def verify_mobile_async( - self, - request: dypnsapi_20170525_models.VerifyMobileRequest, - ) -> dypnsapi_20170525_models.VerifyMobileResponse: - """ - @summary Verifies the phone number that you use. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Getting Started](https://help.aliyun.com/document_detail/84541.html). - > This operation is applicable to only the verification of thephone number that you use. To obtain a phone number for one-click logon, call [GetMobile](https://help.aliyun.com/document_detail/189865.html). - ### [](#qps)QPS limits - You can call this operation up to 5,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyMobileRequest - @return: VerifyMobileResponse - """ - runtime = util_models.RuntimeOptions() - return await self.verify_mobile_with_options_async(request, runtime) - - def verify_phone_with_token_with_options( - self, - request: dypnsapi_20170525_models.VerifyPhoneWithTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifyPhoneWithTokenResponse: - """ - @summary Verifies the phone number used in HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyPhoneWithTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifyPhoneWithTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.sp_token): - query['SpToken'] = request.sp_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifyPhoneWithToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifyPhoneWithTokenResponse(), - self.call_api(params, req, runtime) - ) - - async def verify_phone_with_token_with_options_async( - self, - request: dypnsapi_20170525_models.VerifyPhoneWithTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifyPhoneWithTokenResponse: - """ - @summary Verifies the phone number used in HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyPhoneWithTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifyPhoneWithTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.sp_token): - query['SpToken'] = request.sp_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifyPhoneWithToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifyPhoneWithTokenResponse(), - await self.call_api_async(params, req, runtime) - ) - - def verify_phone_with_token( - self, - request: dypnsapi_20170525_models.VerifyPhoneWithTokenRequest, - ) -> dypnsapi_20170525_models.VerifyPhoneWithTokenResponse: - """ - @summary Verifies the phone number used in HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyPhoneWithTokenRequest - @return: VerifyPhoneWithTokenResponse - """ - runtime = util_models.RuntimeOptions() - return self.verify_phone_with_token_with_options(request, runtime) - - async def verify_phone_with_token_async( - self, - request: dypnsapi_20170525_models.VerifyPhoneWithTokenRequest, - ) -> dypnsapi_20170525_models.VerifyPhoneWithTokenResponse: - """ - @summary Verifies the phone number used in HTML5 pages. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the phone number verification feature for HTML5 pages](https://help.aliyun.com/document_detail/169786.html). - ### [](#qps)QPS limits - You can call this operation up to 1,000 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifyPhoneWithTokenRequest - @return: VerifyPhoneWithTokenResponse - """ - runtime = util_models.RuntimeOptions() - return await self.verify_phone_with_token_with_options_async(request, runtime) - - def verify_sms_code_with_options( - self, - request: dypnsapi_20170525_models.VerifySmsCodeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifySmsCodeResponse: - """ - @summary Verifies SMS verification codes. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifySmsCodeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifySmsCodeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.sms_code): - query['SmsCode'] = request.sms_code - if not UtilClient.is_unset(request.sms_token): - query['SmsToken'] = request.sms_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifySmsCode', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifySmsCodeResponse(), - self.call_api(params, req, runtime) - ) - - async def verify_sms_code_with_options_async( - self, - request: dypnsapi_20170525_models.VerifySmsCodeRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifySmsCodeResponse: - """ - @summary Verifies SMS verification codes. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifySmsCodeRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifySmsCodeResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.phone_number): - query['PhoneNumber'] = request.phone_number - if not UtilClient.is_unset(request.sms_code): - query['SmsCode'] = request.sms_code - if not UtilClient.is_unset(request.sms_token): - query['SmsToken'] = request.sms_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifySmsCode', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifySmsCodeResponse(), - await self.call_api_async(params, req, runtime) - ) - - def verify_sms_code( - self, - request: dypnsapi_20170525_models.VerifySmsCodeRequest, - ) -> dypnsapi_20170525_models.VerifySmsCodeResponse: - """ - @summary Verifies SMS verification codes. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifySmsCodeRequest - @return: VerifySmsCodeResponse - """ - runtime = util_models.RuntimeOptions() - return self.verify_sms_code_with_options(request, runtime) - - async def verify_sms_code_async( - self, - request: dypnsapi_20170525_models.VerifySmsCodeRequest, - ) -> dypnsapi_20170525_models.VerifySmsCodeResponse: - """ - @summary Verifies SMS verification codes. - - @description ### [](#)Preparations - You must register an Alibaba Cloud account, obtain an Alibaba Cloud AccessKey pair, and create a verification service. For more information, see [Use the SMS verification feature](https://help.aliyun.com/document_detail/313209.html). - ### [](#qps)QPS limits - You can call this operation up to 500 times per second per account. If the number of calls per second exceeds the limit, throttling is triggered. As a result, your business may be affected. We recommend that you take note of the limit when you call this operation. - - @param request: VerifySmsCodeRequest - @return: VerifySmsCodeResponse - """ - runtime = util_models.RuntimeOptions() - return await self.verify_sms_code_with_options_async(request, runtime) - - def verify_with_fusion_auth_token_with_options( - self, - request: dypnsapi_20170525_models.VerifyWithFusionAuthTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifyWithFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the verification token that is obtained by using the authentication token. - - @param request: VerifyWithFusionAuthTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifyWithFusionAuthTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.verify_token): - query['VerifyToken'] = request.verify_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifyWithFusionAuthToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifyWithFusionAuthTokenResponse(), - self.call_api(params, req, runtime) - ) - - async def verify_with_fusion_auth_token_with_options_async( - self, - request: dypnsapi_20170525_models.VerifyWithFusionAuthTokenRequest, - runtime: util_models.RuntimeOptions, - ) -> dypnsapi_20170525_models.VerifyWithFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the verification token that is obtained by using the authentication token. - - @param request: VerifyWithFusionAuthTokenRequest - @param runtime: runtime options for this request RuntimeOptions - @return: VerifyWithFusionAuthTokenResponse - """ - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.owner_id): - query['OwnerId'] = request.owner_id - if not UtilClient.is_unset(request.resource_owner_account): - query['ResourceOwnerAccount'] = request.resource_owner_account - if not UtilClient.is_unset(request.resource_owner_id): - query['ResourceOwnerId'] = request.resource_owner_id - if not UtilClient.is_unset(request.verify_token): - query['VerifyToken'] = request.verify_token - req = open_api_models.OpenApiRequest( - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='VerifyWithFusionAuthToken', - version='2017-05-25', - protocol='HTTPS', - pathname='/', - method='POST', - auth_type='AK', - style='RPC', - req_body_type='formData', - body_type='json' - ) - return TeaCore.from_map( - dypnsapi_20170525_models.VerifyWithFusionAuthTokenResponse(), - await self.call_api_async(params, req, runtime) - ) - - def verify_with_fusion_auth_token( - self, - request: dypnsapi_20170525_models.VerifyWithFusionAuthTokenRequest, - ) -> dypnsapi_20170525_models.VerifyWithFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the verification token that is obtained by using the authentication token. - - @param request: VerifyWithFusionAuthTokenRequest - @return: VerifyWithFusionAuthTokenResponse - """ - runtime = util_models.RuntimeOptions() - return self.verify_with_fusion_auth_token_with_options(request, runtime) - - async def verify_with_fusion_auth_token_async( - self, - request: dypnsapi_20170525_models.VerifyWithFusionAuthTokenRequest, - ) -> dypnsapi_20170525_models.VerifyWithFusionAuthTokenResponse: - """ - @summary Obtains the verification results by using the verification token that is obtained by using the authentication token. - - @param request: VerifyWithFusionAuthTokenRequest - @return: VerifyWithFusionAuthTokenResponse - """ - runtime = util_models.RuntimeOptions() - return await self.verify_with_fusion_auth_token_with_options_async(request, runtime) diff --git a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/models.py b/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/models.py deleted file mode 100644 index ac52dff..0000000 --- a/venv/Lib/site-packages/alibabacloud_dypnsapi20170525/models.py +++ /dev/null @@ -1,4222 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from Tea.model import TeaModel -from typing import Dict, List - - -class CheckSmsVerifyCodeRequest(TeaModel): - def __init__( - self, - case_auth_policy: int = None, - country_code: str = None, - out_id: str = None, - owner_id: int = None, - phone_number: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scheme_name: str = None, - verify_code: str = None, - ): - # The verification policy for uppercase and lowercase letters of the verification code. Valid values: - # - # * 1: The verification policy does not distinguish uppercase and lowercase letters. - # * 2: The verification policy distinguishes uppercase and lowercase letters. - self.case_auth_policy = case_auth_policy - # The country code of the phone number. Default value: 86. - self.country_code = country_code - # The external ID. - self.out_id = out_id - self.owner_id = owner_id - # The phone number. - # - # This parameter is required. - self.phone_number = phone_number - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The verification service name. If this parameter is not specified, the default service is used. The name can be up to 20 characters in length. - self.scheme_name = scheme_name - # The verification code. - # - # This parameter is required. - self.verify_code = verify_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.case_auth_policy is not None: - result['CaseAuthPolicy'] = self.case_auth_policy - if self.country_code is not None: - result['CountryCode'] = self.country_code - if self.out_id is not None: - result['OutId'] = self.out_id - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.phone_number is not None: - result['PhoneNumber'] = self.phone_number - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scheme_name is not None: - result['SchemeName'] = self.scheme_name - if self.verify_code is not None: - result['VerifyCode'] = self.verify_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('CaseAuthPolicy') is not None: - self.case_auth_policy = m.get('CaseAuthPolicy') - if m.get('CountryCode') is not None: - self.country_code = m.get('CountryCode') - if m.get('OutId') is not None: - self.out_id = m.get('OutId') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PhoneNumber') is not None: - self.phone_number = m.get('PhoneNumber') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SchemeName') is not None: - self.scheme_name = m.get('SchemeName') - if m.get('VerifyCode') is not None: - self.verify_code = m.get('VerifyCode') - return self - - -class CheckSmsVerifyCodeResponseBodyModel(TeaModel): - def __init__( - self, - out_id: str = None, - verify_result: str = None, - ): - # The external ID. - self.out_id = out_id - # The verification results. Valid values: - # - # * PASS: The verification is successful. - # * UNKNOWN: The verification failed. - self.verify_result = verify_result - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.out_id is not None: - result['OutId'] = self.out_id - if self.verify_result is not None: - result['VerifyResult'] = self.verify_result - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('OutId') is not None: - self.out_id = m.get('OutId') - if m.get('VerifyResult') is not None: - self.verify_result = m.get('VerifyResult') - return self - - -class CheckSmsVerifyCodeResponseBody(TeaModel): - def __init__( - self, - access_denied_detail: str = None, - code: str = None, - message: str = None, - model: CheckSmsVerifyCodeResponseBodyModel = None, - success: bool = None, - ): - # The details about the access denial. - self.access_denied_detail = access_denied_detail - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [Response codes](https://help.aliyun.com/zh/pnvs/developer-reference/api-return-code?spm=a2c4g.11174283.0.0.70c5616bkj38Wa). - self.code = code - # The returned message. - self.message = message - # The returned data. - self.model = model - # Indicates whether the request is successful. Valid values: - # - # * true - # * false - self.success = success - - def validate(self): - if self.model: - self.model.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_denied_detail is not None: - result['AccessDeniedDetail'] = self.access_denied_detail - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.model is not None: - result['Model'] = self.model.to_map() - if self.success is not None: - result['Success'] = self.success - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AccessDeniedDetail') is not None: - self.access_denied_detail = m.get('AccessDeniedDetail') - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('Model') is not None: - temp_model = CheckSmsVerifyCodeResponseBodyModel() - self.model = temp_model.from_map(m['Model']) - if m.get('Success') is not None: - self.success = m.get('Success') - return self - - -class CheckSmsVerifyCodeResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: CheckSmsVerifyCodeResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = CheckSmsVerifyCodeResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class CreateSchemeConfigRequest(TeaModel): - def __init__( - self, - android_package_name: str = None, - android_package_sign: str = None, - app_name: str = None, - h_5origin: str = None, - h_5url: str = None, - ios_bundle_id: str = None, - owner_id: int = None, - platform: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scheme_name: str = None, - ): - # The package name. This parameter is required when Platform is set to Android. The name must be 1 to 128 characters in length and can contain digits, letters, hyphens (-), underscores (_), and periods (.). - self.android_package_name = android_package_name - # The package signature. This parameter is required when Platform is set to Android. The signature must be 32 characters in length and can contain digits and letters. - self.android_package_sign = android_package_sign - # The app name, which can be up to 20 characters in length and can contain letters. - self.app_name = app_name - # The reserved field. HTML5 apps are not supported. - self.h_5origin = h_5origin - # The reserved field. HTML5 apps are not supported. - self.h_5url = h_5url - # The bundle ID. This parameter is required when OsType is set to iOS. The bundle ID must be 1 to 128 characters in length and can contain digits, letters, hyphens (-), underscores (_), and periods (.). - self.ios_bundle_id = ios_bundle_id - self.owner_id = owner_id - # The app platform. - # - # Valid values: - # - # * Android - # * iOS - # - # This parameter is required. - self.platform = platform - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The service name, which can be up to 10 characters in length and can contain letters. - # - # This parameter is required. - self.scheme_name = scheme_name - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.android_package_name is not None: - result['AndroidPackageName'] = self.android_package_name - if self.android_package_sign is not None: - result['AndroidPackageSign'] = self.android_package_sign - if self.app_name is not None: - result['AppName'] = self.app_name - if self.h_5origin is not None: - result['H5Origin'] = self.h_5origin - if self.h_5url is not None: - result['H5Url'] = self.h_5url - if self.ios_bundle_id is not None: - result['IosBundleId'] = self.ios_bundle_id - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.platform is not None: - result['Platform'] = self.platform - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scheme_name is not None: - result['SchemeName'] = self.scheme_name - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AndroidPackageName') is not None: - self.android_package_name = m.get('AndroidPackageName') - if m.get('AndroidPackageSign') is not None: - self.android_package_sign = m.get('AndroidPackageSign') - if m.get('AppName') is not None: - self.app_name = m.get('AppName') - if m.get('H5Origin') is not None: - self.h_5origin = m.get('H5Origin') - if m.get('H5Url') is not None: - self.h_5url = m.get('H5Url') - if m.get('IosBundleId') is not None: - self.ios_bundle_id = m.get('IosBundleId') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('Platform') is not None: - self.platform = m.get('Platform') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SchemeName') is not None: - self.scheme_name = m.get('SchemeName') - return self - - -class CreateSchemeConfigResponseBodyModel(TeaModel): - def __init__( - self, - scheme_code: str = None, - ): - # The service code. - self.scheme_code = scheme_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.scheme_code is not None: - result['SchemeCode'] = self.scheme_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('SchemeCode') is not None: - self.scheme_code = m.get('SchemeCode') - return self - - -class CreateSchemeConfigResponseBody(TeaModel): - def __init__( - self, - code: str = None, - message: str = None, - model: CreateSchemeConfigResponseBodyModel = None, - request_id: str = None, - success: bool = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/zh/pnvs/developer-reference/api-return-code?spm=a2c4g.11186623.0.0.5c3a662fbgeAuk). - self.code = code - # The returned message. - self.message = message - # The returned results. - self.model = model - # The request ID. - self.request_id = request_id - # Indicates whether the request is successful. Valid values: - # - # * **true**\ - # * **false**\ - self.success = success - - def validate(self): - if self.model: - self.model.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.model is not None: - result['Model'] = self.model.to_map() - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.success is not None: - result['Success'] = self.success - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('Model') is not None: - temp_model = CreateSchemeConfigResponseBodyModel() - self.model = temp_model.from_map(m['Model']) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Success') is not None: - self.success = m.get('Success') - return self - - -class CreateSchemeConfigResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: CreateSchemeConfigResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = CreateSchemeConfigResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class CreateVerifySchemeRequest(TeaModel): - def __init__( - self, - app_name: str = None, - auth_type: str = None, - bundle_id: str = None, - cm_api_code: int = None, - ct_api_code: int = None, - cu_api_code: int = None, - email: str = None, - hm_app_identifier: str = None, - hm_package_name: str = None, - hm_sign_name: str = None, - ip_white_list: str = None, - origin: str = None, - os_type: str = None, - owner_id: int = None, - pack_name: str = None, - pack_sign: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scene_type: str = None, - scheme_name: str = None, - sms_sign_name: str = None, - url: str = None, - ): - # The app name. - # - # This parameter is required. - self.app_name = app_name - # The verification type. You can select multiple types only when the phone number verification is supported. Separate multiple types with commas (,). - # - # * **1**: phone number verification - # * **2**: SMS verification - self.auth_type = auth_type - # The bundle ID. This parameter is required when OsType is set to iOS. The bundle ID must be 1 to 128 characters in length and can contain digits, letters, hyphens (-), underscores (_), and periods (.). - self.bundle_id = bundle_id - # The channel code of China Mobile. - self.cm_api_code = cm_api_code - # The channel code of China Telecom. - self.ct_api_code = ct_api_code - # The channel code of China Unicom. - self.cu_api_code = cu_api_code - # The email address that receives the key. - self.email = email - self.hm_app_identifier = hm_app_identifier - self.hm_package_name = hm_package_name - self.hm_sign_name = hm_sign_name - # The IP address whitelist. - self.ip_white_list = ip_white_list - # The source URL of the HTML5 app page. We recommend that you specify this parameter as a domain name. - self.origin = origin - # The type of the operating system for the terminal. Valid values: iOS and Android. - # - # This parameter is required. - self.os_type = os_type - self.owner_id = owner_id - # The package name. This parameter is required when OsType is set to Android. The name must be 1 to 128 characters in length and can contain digits, letters, hyphens (-), underscores (_), and periods (.). - self.pack_name = pack_name - # The package signature. This parameter is required when OsType is set to Android. The signature must be 32 characters in length and can contain digits and letters. - self.pack_sign = pack_sign - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The service type. - self.scene_type = scene_type - # The service name. - # - # This parameter is required. - self.scheme_name = scheme_name - # The bound SMS signature. This parameter is valid only when AuthType is set to 2. The signature must be approved. - self.sms_sign_name = sms_sign_name - # The URL of the HTML5 app page. - self.url = url - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.app_name is not None: - result['AppName'] = self.app_name - if self.auth_type is not None: - result['AuthType'] = self.auth_type - if self.bundle_id is not None: - result['BundleId'] = self.bundle_id - if self.cm_api_code is not None: - result['CmApiCode'] = self.cm_api_code - if self.ct_api_code is not None: - result['CtApiCode'] = self.ct_api_code - if self.cu_api_code is not None: - result['CuApiCode'] = self.cu_api_code - if self.email is not None: - result['Email'] = self.email - if self.hm_app_identifier is not None: - result['HmAppIdentifier'] = self.hm_app_identifier - if self.hm_package_name is not None: - result['HmPackageName'] = self.hm_package_name - if self.hm_sign_name is not None: - result['HmSignName'] = self.hm_sign_name - if self.ip_white_list is not None: - result['IpWhiteList'] = self.ip_white_list - if self.origin is not None: - result['Origin'] = self.origin - if self.os_type is not None: - result['OsType'] = self.os_type - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.pack_name is not None: - result['PackName'] = self.pack_name - if self.pack_sign is not None: - result['PackSign'] = self.pack_sign - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scene_type is not None: - result['SceneType'] = self.scene_type - if self.scheme_name is not None: - result['SchemeName'] = self.scheme_name - if self.sms_sign_name is not None: - result['SmsSignName'] = self.sms_sign_name - if self.url is not None: - result['Url'] = self.url - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AppName') is not None: - self.app_name = m.get('AppName') - if m.get('AuthType') is not None: - self.auth_type = m.get('AuthType') - if m.get('BundleId') is not None: - self.bundle_id = m.get('BundleId') - if m.get('CmApiCode') is not None: - self.cm_api_code = m.get('CmApiCode') - if m.get('CtApiCode') is not None: - self.ct_api_code = m.get('CtApiCode') - if m.get('CuApiCode') is not None: - self.cu_api_code = m.get('CuApiCode') - if m.get('Email') is not None: - self.email = m.get('Email') - if m.get('HmAppIdentifier') is not None: - self.hm_app_identifier = m.get('HmAppIdentifier') - if m.get('HmPackageName') is not None: - self.hm_package_name = m.get('HmPackageName') - if m.get('HmSignName') is not None: - self.hm_sign_name = m.get('HmSignName') - if m.get('IpWhiteList') is not None: - self.ip_white_list = m.get('IpWhiteList') - if m.get('Origin') is not None: - self.origin = m.get('Origin') - if m.get('OsType') is not None: - self.os_type = m.get('OsType') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PackName') is not None: - self.pack_name = m.get('PackName') - if m.get('PackSign') is not None: - self.pack_sign = m.get('PackSign') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SceneType') is not None: - self.scene_type = m.get('SceneType') - if m.get('SchemeName') is not None: - self.scheme_name = m.get('SchemeName') - if m.get('SmsSignName') is not None: - self.sms_sign_name = m.get('SmsSignName') - if m.get('Url') is not None: - self.url = m.get('Url') - return self - - -class CreateVerifySchemeResponseBodyGateVerifySchemeDTO(TeaModel): - def __init__( - self, - scheme_code: str = None, - ): - # The service code. - self.scheme_code = scheme_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.scheme_code is not None: - result['SchemeCode'] = self.scheme_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('SchemeCode') is not None: - self.scheme_code = m.get('SchemeCode') - return self - - -class CreateVerifySchemeResponseBody(TeaModel): - def __init__( - self, - code: str = None, - gate_verify_scheme_dto: CreateVerifySchemeResponseBodyGateVerifySchemeDTO = None, - http_status_code: int = None, - message: str = None, - request_id: str = None, - success: bool = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The response parameters. - self.gate_verify_scheme_dto = gate_verify_scheme_dto - # The HTTP status code. - self.http_status_code = http_status_code - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - # Indicates whether the request is successful. Valid values: - # - # * **true**\ - # * **false**\ - self.success = success - - def validate(self): - if self.gate_verify_scheme_dto: - self.gate_verify_scheme_dto.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.gate_verify_scheme_dto is not None: - result['GateVerifySchemeDTO'] = self.gate_verify_scheme_dto.to_map() - if self.http_status_code is not None: - result['HttpStatusCode'] = self.http_status_code - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.success is not None: - result['Success'] = self.success - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('GateVerifySchemeDTO') is not None: - temp_model = CreateVerifySchemeResponseBodyGateVerifySchemeDTO() - self.gate_verify_scheme_dto = temp_model.from_map(m['GateVerifySchemeDTO']) - if m.get('HttpStatusCode') is not None: - self.http_status_code = m.get('HttpStatusCode') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Success') is not None: - self.success = m.get('Success') - return self - - -class CreateVerifySchemeResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: CreateVerifySchemeResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = CreateVerifySchemeResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class DeleteVerifySchemeRequest(TeaModel): - def __init__( - self, - customer_id: int = None, - owner_id: int = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scheme_code: str = None, - ): - # The user ID. - self.customer_id = customer_id - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The service code. - # - # This parameter is required. - self.scheme_code = scheme_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.customer_id is not None: - result['CustomerId'] = self.customer_id - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scheme_code is not None: - result['SchemeCode'] = self.scheme_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('CustomerId') is not None: - self.customer_id = m.get('CustomerId') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SchemeCode') is not None: - self.scheme_code = m.get('SchemeCode') - return self - - -class DeleteVerifySchemeResponseBody(TeaModel): - def __init__( - self, - code: str = None, - message: str = None, - request_id: str = None, - result: bool = None, - ): - # The request is successful. For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - # The result of the operation. Valid values: - # - # * **true**: The verification service is deleted. - # * **false**: The verification service failed to be deleted. - self.result = result - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.result is not None: - result['Result'] = self.result - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Result') is not None: - self.result = m.get('Result') - return self - - -class DeleteVerifySchemeResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: DeleteVerifySchemeResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = DeleteVerifySchemeResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class DescribeVerifySchemeRequest(TeaModel): - def __init__( - self, - customer_id: int = None, - owner_id: int = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scheme_code: str = None, - ): - # The user ID. - self.customer_id = customer_id - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The service code. - # - # This parameter is required. - self.scheme_code = scheme_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.customer_id is not None: - result['CustomerId'] = self.customer_id - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scheme_code is not None: - result['SchemeCode'] = self.scheme_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('CustomerId') is not None: - self.customer_id = m.get('CustomerId') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SchemeCode') is not None: - self.scheme_code = m.get('SchemeCode') - return self - - -class DescribeVerifySchemeResponseBodySchemeQueryResultDTO(TeaModel): - def __init__( - self, - app_encrypt_info: str = None, - ): - # The key generated when you create a service in the console. - self.app_encrypt_info = app_encrypt_info - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.app_encrypt_info is not None: - result['AppEncryptInfo'] = self.app_encrypt_info - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AppEncryptInfo') is not None: - self.app_encrypt_info = m.get('AppEncryptInfo') - return self - - -class DescribeVerifySchemeResponseBody(TeaModel): - def __init__( - self, - code: str = None, - message: str = None, - request_id: str = None, - scheme_query_result_dto: DescribeVerifySchemeResponseBodySchemeQueryResultDTO = None, - ): - # The response code. OK indicates that the request is successful. For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - # The response parameters. - self.scheme_query_result_dto = scheme_query_result_dto - - def validate(self): - if self.scheme_query_result_dto: - self.scheme_query_result_dto.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.scheme_query_result_dto is not None: - result['SchemeQueryResultDTO'] = self.scheme_query_result_dto.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('SchemeQueryResultDTO') is not None: - temp_model = DescribeVerifySchemeResponseBodySchemeQueryResultDTO() - self.scheme_query_result_dto = temp_model.from_map(m['SchemeQueryResultDTO']) - return self - - -class DescribeVerifySchemeResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: DescribeVerifySchemeResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = DescribeVerifySchemeResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetAuthTokenRequest(TeaModel): - def __init__( - self, - biz_type: int = None, - cm_api_code: int = None, - ct_api_code: int = None, - cu_api_code: int = None, - origin: str = None, - owner_id: int = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scene_code: str = None, - url: str = None, - version: str = None, - ): - self.biz_type = biz_type - self.cm_api_code = cm_api_code - self.ct_api_code = ct_api_code - self.cu_api_code = cu_api_code - # The requested domain name. - # - # This parameter is required. - self.origin = origin - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - self.scene_code = scene_code - # The URL of the requested web page. - # - # This parameter is required. - self.url = url - self.version = version - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.biz_type is not None: - result['BizType'] = self.biz_type - if self.cm_api_code is not None: - result['CmApiCode'] = self.cm_api_code - if self.ct_api_code is not None: - result['CtApiCode'] = self.ct_api_code - if self.cu_api_code is not None: - result['CuApiCode'] = self.cu_api_code - if self.origin is not None: - result['Origin'] = self.origin - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scene_code is not None: - result['SceneCode'] = self.scene_code - if self.url is not None: - result['Url'] = self.url - if self.version is not None: - result['Version'] = self.version - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('BizType') is not None: - self.biz_type = m.get('BizType') - if m.get('CmApiCode') is not None: - self.cm_api_code = m.get('CmApiCode') - if m.get('CtApiCode') is not None: - self.ct_api_code = m.get('CtApiCode') - if m.get('CuApiCode') is not None: - self.cu_api_code = m.get('CuApiCode') - if m.get('Origin') is not None: - self.origin = m.get('Origin') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SceneCode') is not None: - self.scene_code = m.get('SceneCode') - if m.get('Url') is not None: - self.url = m.get('Url') - if m.get('Version') is not None: - self.version = m.get('Version') - return self - - -class GetAuthTokenResponseBodyTokenInfo(TeaModel): - def __init__( - self, - access_token: str = None, - jwt_token: str = None, - ): - # The business authentication token. - # - # > AccessToken is valid for 10 minutes and can be used repeatedly within its validity period. - self.access_token = access_token - # The API authentication token. - # - # > JwtToken is valid for 1 hour and can be used repeatedly within its validity period. - self.jwt_token = jwt_token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_token is not None: - result['AccessToken'] = self.access_token - if self.jwt_token is not None: - result['JwtToken'] = self.jwt_token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AccessToken') is not None: - self.access_token = m.get('AccessToken') - if m.get('JwtToken') is not None: - self.jwt_token = m.get('JwtToken') - return self - - -class GetAuthTokenResponseBody(TeaModel): - def __init__( - self, - code: str = None, - message: str = None, - request_id: str = None, - token_info: GetAuthTokenResponseBodyTokenInfo = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - # The response parameters. - self.token_info = token_info - - def validate(self): - if self.token_info: - self.token_info.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.token_info is not None: - result['TokenInfo'] = self.token_info.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('TokenInfo') is not None: - temp_model = GetAuthTokenResponseBodyTokenInfo() - self.token_info = temp_model.from_map(m['TokenInfo']) - return self - - -class GetAuthTokenResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetAuthTokenResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetAuthTokenResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetAuthorizationUrlRequest(TeaModel): - def __init__( - self, - end_date: str = None, - owner_id: int = None, - phone_no: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scheme_id: int = None, - ): - # The authorization end date, which is in the yyyy-MM-dd format. This parameter is required for services of contract type. - self.end_date = end_date - self.owner_id = owner_id - # The phone number. - # - # This parameter is required. - self.phone_no = phone_no - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The ID of the authorization scenario. You can view the ID of the authorization scenario on the **Authorization Scenario Management** page in the **Phone Number Verification Service console**. - # - # This parameter is required. - self.scheme_id = scheme_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.end_date is not None: - result['EndDate'] = self.end_date - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.phone_no is not None: - result['PhoneNo'] = self.phone_no - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scheme_id is not None: - result['SchemeId'] = self.scheme_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EndDate') is not None: - self.end_date = m.get('EndDate') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PhoneNo') is not None: - self.phone_no = m.get('PhoneNo') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SchemeId') is not None: - self.scheme_id = m.get('SchemeId') - return self - - -class GetAuthorizationUrlResponseBodyData(TeaModel): - def __init__( - self, - authorization_url: str = None, - ): - # The authorization URL. - self.authorization_url = authorization_url - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.authorization_url is not None: - result['AuthorizationUrl'] = self.authorization_url - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AuthorizationUrl') is not None: - self.authorization_url = m.get('AuthorizationUrl') - return self - - -class GetAuthorizationUrlResponseBody(TeaModel): - def __init__( - self, - code: str = None, - data: GetAuthorizationUrlResponseBodyData = None, - message: str = None, - request_id: str = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The response parameters. - self.data = data - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.data: - self.data.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.data is not None: - result['Data'] = self.data.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Data') is not None: - temp_model = GetAuthorizationUrlResponseBodyData() - self.data = temp_model.from_map(m['Data']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class GetAuthorizationUrlResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetAuthorizationUrlResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetAuthorizationUrlResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetFusionAuthTokenRequest(TeaModel): - def __init__( - self, - bundle_id: str = None, - duration_seconds: int = None, - owner_id: int = None, - package_name: str = None, - package_sign: str = None, - platform: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scheme_code: str = None, - ): - # The bundle ID of the app. This parameter is required when Platform is set to iOS. - self.bundle_id = bundle_id - # The validity period of the token. Unit: seconds. Valid values: 900 to 43200. - # - # This parameter is required. - self.duration_seconds = duration_seconds - self.owner_id = owner_id - # The package name of the app. This parameter is required when Platform is set to Android. - self.package_name = package_name - # The package signature of the app. This parameter is required when Platform is set to Android. - self.package_sign = package_sign - # The platform type. Valid values: Android and iOS. - # - # This parameter is required. - self.platform = platform - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The service code. - # - # This parameter is required. - self.scheme_code = scheme_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.bundle_id is not None: - result['BundleId'] = self.bundle_id - if self.duration_seconds is not None: - result['DurationSeconds'] = self.duration_seconds - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.package_name is not None: - result['PackageName'] = self.package_name - if self.package_sign is not None: - result['PackageSign'] = self.package_sign - if self.platform is not None: - result['Platform'] = self.platform - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scheme_code is not None: - result['SchemeCode'] = self.scheme_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('BundleId') is not None: - self.bundle_id = m.get('BundleId') - if m.get('DurationSeconds') is not None: - self.duration_seconds = m.get('DurationSeconds') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PackageName') is not None: - self.package_name = m.get('PackageName') - if m.get('PackageSign') is not None: - self.package_sign = m.get('PackageSign') - if m.get('Platform') is not None: - self.platform = m.get('Platform') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SchemeCode') is not None: - self.scheme_code = m.get('SchemeCode') - return self - - -class GetFusionAuthTokenResponseBody(TeaModel): - def __init__( - self, - code: str = None, - message: str = None, - model: str = None, - request_id: str = None, - success: bool = None, - ): - # The response code. If OK is returned, the request is successful. Other values indicate that the request failed. For more information, see Error codes. - self.code = code - # The returned message. - self.message = message - # The authentication code. The value of this parameter is a string. - self.model = model - # The request ID, which is used to locate and troubleshoot issues. - self.request_id = request_id - # Indicates whether the request is successful. Valid values: true false - self.success = success - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.model is not None: - result['Model'] = self.model - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.success is not None: - result['Success'] = self.success - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('Model') is not None: - self.model = m.get('Model') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Success') is not None: - self.success = m.get('Success') - return self - - -class GetFusionAuthTokenResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetFusionAuthTokenResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetFusionAuthTokenResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetMobileRequest(TeaModel): - def __init__( - self, - access_token: str = None, - out_id: str = None, - owner_id: int = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - ): - # The logon token obtained by the SDK for your app. - # - # This parameter is required. - self.access_token = access_token - # The external ID. - self.out_id = out_id - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_token is not None: - result['AccessToken'] = self.access_token - if self.out_id is not None: - result['OutId'] = self.out_id - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AccessToken') is not None: - self.access_token = m.get('AccessToken') - if m.get('OutId') is not None: - self.out_id = m.get('OutId') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - return self - - -class GetMobileResponseBodyGetMobileResultDTO(TeaModel): - def __init__( - self, - mobile: str = None, - ): - # The phone number, - self.mobile = mobile - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.mobile is not None: - result['Mobile'] = self.mobile - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Mobile') is not None: - self.mobile = m.get('Mobile') - return self - - -class GetMobileResponseBody(TeaModel): - def __init__( - self, - code: str = None, - get_mobile_result_dto: GetMobileResponseBodyGetMobileResultDTO = None, - message: str = None, - request_id: str = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The response parameters. - self.get_mobile_result_dto = get_mobile_result_dto - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.get_mobile_result_dto: - self.get_mobile_result_dto.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.get_mobile_result_dto is not None: - result['GetMobileResultDTO'] = self.get_mobile_result_dto.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('GetMobileResultDTO') is not None: - temp_model = GetMobileResponseBodyGetMobileResultDTO() - self.get_mobile_result_dto = temp_model.from_map(m['GetMobileResultDTO']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class GetMobileResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetMobileResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetMobileResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetPhoneWithTokenRequest(TeaModel): - def __init__( - self, - owner_id: int = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - sp_token: str = None, - ): - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The token for phone number verification that is obtained by the JavaScript SDK. The validity period of the token is 10 minutes for China Telecom, 30 minutes for China Unicom, and 2 minutes for China Mobile. The token can be used only once. - # - # This parameter is required. - self.sp_token = sp_token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.sp_token is not None: - result['SpToken'] = self.sp_token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SpToken') is not None: - self.sp_token = m.get('SpToken') - return self - - -class GetPhoneWithTokenResponseBodyData(TeaModel): - def __init__( - self, - mobile: str = None, - ): - # The phone number. - self.mobile = mobile - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.mobile is not None: - result['Mobile'] = self.mobile - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Mobile') is not None: - self.mobile = m.get('Mobile') - return self - - -class GetPhoneWithTokenResponseBody(TeaModel): - def __init__( - self, - code: str = None, - data: GetPhoneWithTokenResponseBodyData = None, - message: str = None, - request_id: str = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The response parameters. - self.data = data - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.data: - self.data.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.data is not None: - result['Data'] = self.data.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Data') is not None: - temp_model = GetPhoneWithTokenResponseBodyData() - self.data = temp_model.from_map(m['Data']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class GetPhoneWithTokenResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetPhoneWithTokenResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetPhoneWithTokenResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetSmsAuthTokensRequest(TeaModel): - def __init__( - self, - bundle_id: str = None, - expire: int = None, - os_type: str = None, - owner_id: int = None, - package_name: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - scene_code: str = None, - sign_name: str = None, - sms_code_expire: int = None, - sms_template_code: str = None, - ): - # The ID of the iOS application. This parameter is required if OsType is set to **iOS**. - self.bundle_id = bundle_id - # The validity period of the token. Unit: seconds. Valid values: 900 to 43200. - # - # This parameter is required. - self.expire = expire - # The type of the operating system. Valid values: **Android** and **iOS**. - # - # This parameter is required. - self.os_type = os_type - self.owner_id = owner_id - # The package name. This parameter is required if OsType is set to **Android**. - self.package_name = package_name - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The service code. - # - # This parameter is required. - self.scene_code = scene_code - # The signature. This parameter is required if OsType is set to **Android**. - self.sign_name = sign_name - # The validity period of the SMS verification code. Unit: seconds. Default value: 180. - self.sms_code_expire = sms_code_expire - # The code of the text message template. - # - # This parameter is required. - self.sms_template_code = sms_template_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.bundle_id is not None: - result['BundleId'] = self.bundle_id - if self.expire is not None: - result['Expire'] = self.expire - if self.os_type is not None: - result['OsType'] = self.os_type - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.package_name is not None: - result['PackageName'] = self.package_name - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.scene_code is not None: - result['SceneCode'] = self.scene_code - if self.sign_name is not None: - result['SignName'] = self.sign_name - if self.sms_code_expire is not None: - result['SmsCodeExpire'] = self.sms_code_expire - if self.sms_template_code is not None: - result['SmsTemplateCode'] = self.sms_template_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('BundleId') is not None: - self.bundle_id = m.get('BundleId') - if m.get('Expire') is not None: - self.expire = m.get('Expire') - if m.get('OsType') is not None: - self.os_type = m.get('OsType') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PackageName') is not None: - self.package_name = m.get('PackageName') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SceneCode') is not None: - self.scene_code = m.get('SceneCode') - if m.get('SignName') is not None: - self.sign_name = m.get('SignName') - if m.get('SmsCodeExpire') is not None: - self.sms_code_expire = m.get('SmsCodeExpire') - if m.get('SmsTemplateCode') is not None: - self.sms_template_code = m.get('SmsTemplateCode') - return self - - -class GetSmsAuthTokensResponseBodyData(TeaModel): - def __init__( - self, - biz_token: str = None, - expire_time: int = None, - sts_access_key_id: str = None, - sts_access_key_secret: str = None, - sts_token: str = None, - ): - # The business token. - self.biz_token = biz_token - # The time when the token expired. This value is a UNIX timestamp representing the number of milliseconds that have elapsed since January 1, 1970, 00:00:00 UTC. - self.expire_time = expire_time - # The AccessKey ID. - self.sts_access_key_id = sts_access_key_id - # The AccessKey secret. - self.sts_access_key_secret = sts_access_key_secret - # The security token. - self.sts_token = sts_token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.biz_token is not None: - result['BizToken'] = self.biz_token - if self.expire_time is not None: - result['ExpireTime'] = self.expire_time - if self.sts_access_key_id is not None: - result['StsAccessKeyId'] = self.sts_access_key_id - if self.sts_access_key_secret is not None: - result['StsAccessKeySecret'] = self.sts_access_key_secret - if self.sts_token is not None: - result['StsToken'] = self.sts_token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('BizToken') is not None: - self.biz_token = m.get('BizToken') - if m.get('ExpireTime') is not None: - self.expire_time = m.get('ExpireTime') - if m.get('StsAccessKeyId') is not None: - self.sts_access_key_id = m.get('StsAccessKeyId') - if m.get('StsAccessKeySecret') is not None: - self.sts_access_key_secret = m.get('StsAccessKeySecret') - if m.get('StsToken') is not None: - self.sts_token = m.get('StsToken') - return self - - -class GetSmsAuthTokensResponseBody(TeaModel): - def __init__( - self, - code: str = None, - data: GetSmsAuthTokensResponseBodyData = None, - message: str = None, - request_id: str = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The response parameters. - self.data = data - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.data: - self.data.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.data is not None: - result['Data'] = self.data.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Data') is not None: - temp_model = GetSmsAuthTokensResponseBodyData() - self.data = temp_model.from_map(m['Data']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class GetSmsAuthTokensResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetSmsAuthTokensResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetSmsAuthTokensResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class QueryGateVerifyBillingPublicRequest(TeaModel): - def __init__( - self, - authentication_type: int = None, - month: str = None, - owner_id: int = None, - resource_owner_account: str = None, - ): - # The verification method. Valid values: - # - # * **0**: phone number verification - # * **1**: one-click logon - # * **2**: all - # * **3**: facial recognition - # * **4**: SMS verification - # - # This parameter is required. - self.authentication_type = authentication_type - # The month in which the bill is generated. Specify this parameter in the YYYYMM format. Example: 202111. - # - # This parameter is required. - self.month = month - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.authentication_type is not None: - result['AuthenticationType'] = self.authentication_type - if self.month is not None: - result['Month'] = self.month - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AuthenticationType') is not None: - self.authentication_type = m.get('AuthenticationType') - if m.get('Month') is not None: - self.month = m.get('Month') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - return self - - -class QueryGateVerifyBillingPublicResponseBodyDataSceneBillingList(TeaModel): - def __init__( - self, - add: str = None, - amount: str = None, - app_name: str = None, - item_name: str = None, - scene_code: str = None, - scene_name: str = None, - single_price: str = None, - ): - # The billable items. - self.add = add - # The fees generated for the verification service. Unitrogen: CNY. - self.amount = amount - # The application name. - self.app_name = app_name - # The verification method. - self.item_name = item_name - # The service code. - self.scene_code = scene_code - # The service name. - self.scene_name = scene_name - # The unit price. Unit: CNY. - self.single_price = single_price - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.add is not None: - result['Add'] = self.add - if self.amount is not None: - result['Amount'] = self.amount - if self.app_name is not None: - result['AppName'] = self.app_name - if self.item_name is not None: - result['ItemName'] = self.item_name - if self.scene_code is not None: - result['SceneCode'] = self.scene_code - if self.scene_name is not None: - result['SceneName'] = self.scene_name - if self.single_price is not None: - result['SinglePrice'] = self.single_price - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Add') is not None: - self.add = m.get('Add') - if m.get('Amount') is not None: - self.amount = m.get('Amount') - if m.get('AppName') is not None: - self.app_name = m.get('AppName') - if m.get('ItemName') is not None: - self.item_name = m.get('ItemName') - if m.get('SceneCode') is not None: - self.scene_code = m.get('SceneCode') - if m.get('SceneName') is not None: - self.scene_name = m.get('SceneName') - if m.get('SinglePrice') is not None: - self.single_price = m.get('SinglePrice') - return self - - -class QueryGateVerifyBillingPublicResponseBodyData(TeaModel): - def __init__( - self, - amount_sum: str = None, - scene_billing_list: List[QueryGateVerifyBillingPublicResponseBodyDataSceneBillingList] = None, - ): - # The fees generated for all verification services. Unitrogen: CNY. - self.amount_sum = amount_sum - # The details of fees. - self.scene_billing_list = scene_billing_list - - def validate(self): - if self.scene_billing_list: - for k in self.scene_billing_list: - if k: - k.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.amount_sum is not None: - result['AmountSum'] = self.amount_sum - result['SceneBillingList'] = [] - if self.scene_billing_list is not None: - for k in self.scene_billing_list: - result['SceneBillingList'].append(k.to_map() if k else None) - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AmountSum') is not None: - self.amount_sum = m.get('AmountSum') - self.scene_billing_list = [] - if m.get('SceneBillingList') is not None: - for k in m.get('SceneBillingList'): - temp_model = QueryGateVerifyBillingPublicResponseBodyDataSceneBillingList() - self.scene_billing_list.append(temp_model.from_map(k)) - return self - - -class QueryGateVerifyBillingPublicResponseBody(TeaModel): - def __init__( - self, - code: str = None, - data: QueryGateVerifyBillingPublicResponseBodyData = None, - message: str = None, - request_id: str = None, - ): - # The response code. Valid values: - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The billing information about each verification service. - self.data = data - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.data: - self.data.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.data is not None: - result['Data'] = self.data.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Data') is not None: - temp_model = QueryGateVerifyBillingPublicResponseBodyData() - self.data = temp_model.from_map(m['Data']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class QueryGateVerifyBillingPublicResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: QueryGateVerifyBillingPublicResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = QueryGateVerifyBillingPublicResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class QueryGateVerifyStatisticPublicRequest(TeaModel): - def __init__( - self, - authentication_type: int = None, - end_date: str = None, - os_type: str = None, - owner_id: int = None, - resource_owner_account: str = None, - scene_code: str = None, - start_date: str = None, - ): - # The verification method. Valid values: - # - # * **1**: one-click logon - # * **2**: phone number verification, including the verification of the phone number used in HTML5 pages - # * **3**: SMS verification - # * **4**: facial recognition - # - # This parameter is required. - self.authentication_type = authentication_type - # The end date. Specify this parameter in the YYYYMMDD format. Example: 20220106. - # - # This parameter is required. - self.end_date = end_date - # The type of the operating system. Valid values: - # - # * **Android**\ - # * **iOS**\ - self.os_type = os_type - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - # The service code. - self.scene_code = scene_code - # The start date. Specify this parameter in the YYYYMMDD format. Example: 20220101. - # - # This parameter is required. - self.start_date = start_date - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.authentication_type is not None: - result['AuthenticationType'] = self.authentication_type - if self.end_date is not None: - result['EndDate'] = self.end_date - if self.os_type is not None: - result['OsType'] = self.os_type - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.scene_code is not None: - result['SceneCode'] = self.scene_code - if self.start_date is not None: - result['StartDate'] = self.start_date - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AuthenticationType') is not None: - self.authentication_type = m.get('AuthenticationType') - if m.get('EndDate') is not None: - self.end_date = m.get('EndDate') - if m.get('OsType') is not None: - self.os_type = m.get('OsType') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('SceneCode') is not None: - self.scene_code = m.get('SceneCode') - if m.get('StartDate') is not None: - self.start_date = m.get('StartDate') - return self - - -class QueryGateVerifyStatisticPublicResponseBodyDataDayStatistic(TeaModel): - def __init__( - self, - statistic_date_str: str = None, - total_fail: int = None, - total_success: int = None, - total_unknown: int = None, - ): - # The date. This field is accurate to the day. The value of this field is in the YYYYMMDD format. Example: 20220103. - self.statistic_date_str = statistic_date_str - # The failed calls on the day. - self.total_fail = total_fail - # The successful calls on the day. - self.total_success = total_success - # The unknown calls on the day. - self.total_unknown = total_unknown - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.statistic_date_str is not None: - result['StatisticDateStr'] = self.statistic_date_str - if self.total_fail is not None: - result['TotalFail'] = self.total_fail - if self.total_success is not None: - result['TotalSuccess'] = self.total_success - if self.total_unknown is not None: - result['TotalUnknown'] = self.total_unknown - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('StatisticDateStr') is not None: - self.statistic_date_str = m.get('StatisticDateStr') - if m.get('TotalFail') is not None: - self.total_fail = m.get('TotalFail') - if m.get('TotalSuccess') is not None: - self.total_success = m.get('TotalSuccess') - if m.get('TotalUnknown') is not None: - self.total_unknown = m.get('TotalUnknown') - return self - - -class QueryGateVerifyStatisticPublicResponseBodyData(TeaModel): - def __init__( - self, - day_statistic: List[QueryGateVerifyStatisticPublicResponseBodyDataDayStatistic] = None, - total: int = None, - total_fail: int = None, - total_success: int = None, - total_unknown: int = None, - ): - # The information about the daily calls. - self.day_statistic = day_statistic - # The total calls. - self.total = total - # The failed calls. - self.total_fail = total_fail - # The successful calls. - self.total_success = total_success - # The unknown calls. - self.total_unknown = total_unknown - - def validate(self): - if self.day_statistic: - for k in self.day_statistic: - if k: - k.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - result['DayStatistic'] = [] - if self.day_statistic is not None: - for k in self.day_statistic: - result['DayStatistic'].append(k.to_map() if k else None) - if self.total is not None: - result['Total'] = self.total - if self.total_fail is not None: - result['TotalFail'] = self.total_fail - if self.total_success is not None: - result['TotalSuccess'] = self.total_success - if self.total_unknown is not None: - result['TotalUnknown'] = self.total_unknown - return result - - def from_map(self, m: dict = None): - m = m or dict() - self.day_statistic = [] - if m.get('DayStatistic') is not None: - for k in m.get('DayStatistic'): - temp_model = QueryGateVerifyStatisticPublicResponseBodyDataDayStatistic() - self.day_statistic.append(temp_model.from_map(k)) - if m.get('Total') is not None: - self.total = m.get('Total') - if m.get('TotalFail') is not None: - self.total_fail = m.get('TotalFail') - if m.get('TotalSuccess') is not None: - self.total_success = m.get('TotalSuccess') - if m.get('TotalUnknown') is not None: - self.total_unknown = m.get('TotalUnknown') - return self - - -class QueryGateVerifyStatisticPublicResponseBody(TeaModel): - def __init__( - self, - code: str = None, - data: QueryGateVerifyStatisticPublicResponseBodyData = None, - message: str = None, - request_id: str = None, - ): - # The response code. Valid values: - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The information about the calls of Phone Number Verification Service, including the total calls, the successful calls, failed calls, unknown calls, and daily calls within the statistical date range. - self.data = data - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.data: - self.data.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.data is not None: - result['Data'] = self.data.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Data') is not None: - temp_model = QueryGateVerifyStatisticPublicResponseBodyData() - self.data = temp_model.from_map(m['Data']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class QueryGateVerifyStatisticPublicResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: QueryGateVerifyStatisticPublicResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = QueryGateVerifyStatisticPublicResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class QuerySendDetailsRequest(TeaModel): - def __init__( - self, - biz_id: str = None, - current_page: int = None, - owner_id: int = None, - page_size: int = None, - phone_number: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - send_date: str = None, - ): - # The unique ID of the business, which is provided by Alibaba Cloud. - self.biz_id = biz_id - # The number of the page on which you are reading the text message. Pages start from page 1. The value of this parameter cannot exceed the maximum page number. - # - # This parameter is required. - self.current_page = current_page - self.owner_id = owner_id - # The number of entries per page. - # - # This parameter is required. - self.page_size = page_size - # The phone number. - # - # This parameter is required. - self.phone_number = phone_number - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The date when the text message was sent. You can query text messages that were sent within the last 30 days. - # - # Specify the date in the yyyyMMdd format. Example: 20181225. - # - # This parameter is required. - self.send_date = send_date - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.biz_id is not None: - result['BizId'] = self.biz_id - if self.current_page is not None: - result['CurrentPage'] = self.current_page - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.phone_number is not None: - result['PhoneNumber'] = self.phone_number - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.send_date is not None: - result['SendDate'] = self.send_date - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('BizId') is not None: - self.biz_id = m.get('BizId') - if m.get('CurrentPage') is not None: - self.current_page = m.get('CurrentPage') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('PhoneNumber') is not None: - self.phone_number = m.get('PhoneNumber') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SendDate') is not None: - self.send_date = m.get('SendDate') - return self - - -class QuerySendDetailsResponseBodyModel(TeaModel): - def __init__( - self, - content: str = None, - err_code: str = None, - out_id: str = None, - phone_num: str = None, - receive_date: str = None, - send_date: str = None, - send_status: int = None, - template_code: str = None, - ): - # The content of the text message. - self.content = content - # The status code returned by the carrier. - # - # * If the text message was delivered, "DELIVERED" is returned. - # * If the text message failed to be sent, see [Error codes](https://help.aliyun.com/document_detail/101347.html?spm=a2c4g.419277.0.i8) for more information. - self.err_code = err_code - # The extension field. - self.out_id = out_id - # The phone number. - self.phone_num = phone_num - # The date and time when the text message was received. - self.receive_date = receive_date - # The date when the text message was sent. You can query text messages that were sent within the last 30 days. - # - # The date is in the yyyyMMdd format. Example: 20181225. - self.send_date = send_date - # The delivery status of the text message. - # - # * 1: A delivery receipt is to be sent. - # * 2: The text message failed to be sent. - # * 3: The text message was sent. - self.send_status = send_status - # The code of the text message template. - # - # Log on to the SMS console. In the left-side navigation pane, click **Go China** or **Go Globe**. You can view the text message template code in the **Template Code** column on the **Message Templates** tab. - # - # > The text message templates must be created on the Go Globe page and approved. - self.template_code = template_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.content is not None: - result['Content'] = self.content - if self.err_code is not None: - result['ErrCode'] = self.err_code - if self.out_id is not None: - result['OutId'] = self.out_id - if self.phone_num is not None: - result['PhoneNum'] = self.phone_num - if self.receive_date is not None: - result['ReceiveDate'] = self.receive_date - if self.send_date is not None: - result['SendDate'] = self.send_date - if self.send_status is not None: - result['SendStatus'] = self.send_status - if self.template_code is not None: - result['TemplateCode'] = self.template_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Content') is not None: - self.content = m.get('Content') - if m.get('ErrCode') is not None: - self.err_code = m.get('ErrCode') - if m.get('OutId') is not None: - self.out_id = m.get('OutId') - if m.get('PhoneNum') is not None: - self.phone_num = m.get('PhoneNum') - if m.get('ReceiveDate') is not None: - self.receive_date = m.get('ReceiveDate') - if m.get('SendDate') is not None: - self.send_date = m.get('SendDate') - if m.get('SendStatus') is not None: - self.send_status = m.get('SendStatus') - if m.get('TemplateCode') is not None: - self.template_code = m.get('TemplateCode') - return self - - -class QuerySendDetailsResponseBody(TeaModel): - def __init__( - self, - access_denied_detail: str = None, - code: str = None, - message: str = None, - model: List[QuerySendDetailsResponseBodyModel] = None, - success: bool = None, - total_count: int = None, - ): - # The details about the access denial. - self.access_denied_detail = access_denied_detail - # The response code. - # - # If OK is returned, the request is successful. Other values indicate that the request failed. For more information, see [Error codes](https://help.aliyun.com/document_detail/101346.html?spm=a2c4g.419277.0.i11). - self.code = code - # The returned message. - self.message = message - # The returned data. - self.model = model - # Indicates whether the request is successful. Valid values: - # - # * true - # * false - self.success = success - # The total number of entries in the list. - self.total_count = total_count - - def validate(self): - if self.model: - for k in self.model: - if k: - k.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_denied_detail is not None: - result['AccessDeniedDetail'] = self.access_denied_detail - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - result['Model'] = [] - if self.model is not None: - for k in self.model: - result['Model'].append(k.to_map() if k else None) - if self.success is not None: - result['Success'] = self.success - if self.total_count is not None: - result['TotalCount'] = self.total_count - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AccessDeniedDetail') is not None: - self.access_denied_detail = m.get('AccessDeniedDetail') - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - self.model = [] - if m.get('Model') is not None: - for k in m.get('Model'): - temp_model = QuerySendDetailsResponseBodyModel() - self.model.append(temp_model.from_map(k)) - if m.get('Success') is not None: - self.success = m.get('Success') - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') - return self - - -class QuerySendDetailsResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: QuerySendDetailsResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = QuerySendDetailsResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class SendSmsVerifyCodeRequest(TeaModel): - def __init__( - self, - auto_retry: int = None, - code_length: int = None, - code_type: int = None, - country_code: str = None, - duplicate_policy: int = None, - interval: int = None, - out_id: str = None, - owner_id: int = None, - phone_number: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - return_verify_code: bool = None, - scheme_name: str = None, - sign_name: str = None, - sms_up_extend_code: str = None, - template_code: str = None, - template_param: str = None, - valid_time: int = None, - ): - self.auto_retry = auto_retry - # The length of the verification code. Default value: 4. Valid values: 4 to 8. - self.code_length = code_length - # The type of the generated verification code. Default value: 1. Valid values: - # - # * 1: digits only - # * 2: uppercase letters only - # * 3: lowercase letters only - # * 4: uppercase and lowercase letters - # * 5: digits and uppercase letters - # * 6: digits and lowercase letters - # * 7: digits and uppercase and lowercase letters - self.code_type = code_type - # The country code of the phone number. SMS verification codes can be sent only by using phone numbers in the Chinese mainland. Default value: 86. - self.country_code = country_code - # Specifies how to handle the verification codes received earlier in a case where verification codes are sent to the same phone number for the same scenario within the validity period. - # - # * 1 (default): The latest verification code overwrites the verification codes received earlier. In this case, verification codes received earlier expire. - # * 2: Verification codes within their validity period are valid and can be used for verification. - self.duplicate_policy = duplicate_policy - # The time interval. Unit: seconds. Default value: 60. This parameter specifies how often you can send a verification code. - self.interval = interval - # The external ID. - self.out_id = out_id - self.owner_id = owner_id - # The phone number. - # - # This parameter is required. - self.phone_number = phone_number - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # Specifies whether to return a verification code. - # - # * **true**\ - # * **false**\ - self.return_verify_code = return_verify_code - # The verification service name. If this parameter is not specified, the default service is used. The name can be up to 20 characters in length. - self.scheme_name = scheme_name - # The signature. - # - # This parameter is required. - self.sign_name = sign_name - # The extension code of the upstream text message. Upstream text messages are text messages sent to the communication service provider. Upstream text messages are used to customize a service, complete an inquiry, or send a request. You are charged for sending upstream text messages based on the billing standards of the service provider. - # - # > The extension code is automatically generated by the system when the signature is generated. You do not need to specify the extension code. You can skip this parameter based on your business requirements. If you want to use custom extension codes, contact your account manager. - self.sms_up_extend_code = sms_up_extend_code - # The code of the text message template. - # - # Log on to the [SMS console](https://dysms.console.aliyun.com/dysms.htm?spm=5176.12818093.categories-n-products.ddysms.3b2816d0xml2NA#/overview). In the left-side navigation pane, click **Go China** or **Go Globe**. You can view the text message template code in the **Template Code** column on the **Message Templates** tab. - # - # > The text message templates must be created on the Go Globe page and approved. - # - # This parameter is required. - self.template_code = template_code - # The value of the variable in the text message template. The verification code is replaced with "##code##". - # - # Example 1: For a system-defined template that contains variables, if the template content is "Your verification code is ${code} and valid for 5 minutes. Do not disclose the verification code to others.", specify the value of this parameter as {"code":"##code##"} - # - # Example 2: For a custom template, if the template content is ${content}, specify the value of this parameter as {"content":"Your verification code is ##code## and must be used within 5 minutes."}. - # - # > - # - # * If line breaks are required in JSON-formatted data, they must meet the relevant requirements that are specified in the standard JSON protocol. - # - # * For more information about template variables, see [SMS template specifications](https://help.aliyun.com/document_detail/108253.html). - # - # This parameter is required. - self.template_param = template_param - # The validity period of the verification code. Unit: seconds. Default value: 300. - self.valid_time = valid_time - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.auto_retry is not None: - result['AutoRetry'] = self.auto_retry - if self.code_length is not None: - result['CodeLength'] = self.code_length - if self.code_type is not None: - result['CodeType'] = self.code_type - if self.country_code is not None: - result['CountryCode'] = self.country_code - if self.duplicate_policy is not None: - result['DuplicatePolicy'] = self.duplicate_policy - if self.interval is not None: - result['Interval'] = self.interval - if self.out_id is not None: - result['OutId'] = self.out_id - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.phone_number is not None: - result['PhoneNumber'] = self.phone_number - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.return_verify_code is not None: - result['ReturnVerifyCode'] = self.return_verify_code - if self.scheme_name is not None: - result['SchemeName'] = self.scheme_name - if self.sign_name is not None: - result['SignName'] = self.sign_name - if self.sms_up_extend_code is not None: - result['SmsUpExtendCode'] = self.sms_up_extend_code - if self.template_code is not None: - result['TemplateCode'] = self.template_code - if self.template_param is not None: - result['TemplateParam'] = self.template_param - if self.valid_time is not None: - result['ValidTime'] = self.valid_time - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AutoRetry') is not None: - self.auto_retry = m.get('AutoRetry') - if m.get('CodeLength') is not None: - self.code_length = m.get('CodeLength') - if m.get('CodeType') is not None: - self.code_type = m.get('CodeType') - if m.get('CountryCode') is not None: - self.country_code = m.get('CountryCode') - if m.get('DuplicatePolicy') is not None: - self.duplicate_policy = m.get('DuplicatePolicy') - if m.get('Interval') is not None: - self.interval = m.get('Interval') - if m.get('OutId') is not None: - self.out_id = m.get('OutId') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PhoneNumber') is not None: - self.phone_number = m.get('PhoneNumber') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('ReturnVerifyCode') is not None: - self.return_verify_code = m.get('ReturnVerifyCode') - if m.get('SchemeName') is not None: - self.scheme_name = m.get('SchemeName') - if m.get('SignName') is not None: - self.sign_name = m.get('SignName') - if m.get('SmsUpExtendCode') is not None: - self.sms_up_extend_code = m.get('SmsUpExtendCode') - if m.get('TemplateCode') is not None: - self.template_code = m.get('TemplateCode') - if m.get('TemplateParam') is not None: - self.template_param = m.get('TemplateParam') - if m.get('ValidTime') is not None: - self.valid_time = m.get('ValidTime') - return self - - -class SendSmsVerifyCodeResponseBodyModel(TeaModel): - def __init__( - self, - biz_id: str = None, - out_id: str = None, - request_id: str = None, - verify_code: str = None, - ): - # The business ID. - self.biz_id = biz_id - # The external ID. - self.out_id = out_id - # The request ID. - self.request_id = request_id - # The verification code. - self.verify_code = verify_code - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.biz_id is not None: - result['BizId'] = self.biz_id - if self.out_id is not None: - result['OutId'] = self.out_id - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.verify_code is not None: - result['VerifyCode'] = self.verify_code - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('BizId') is not None: - self.biz_id = m.get('BizId') - if m.get('OutId') is not None: - self.out_id = m.get('OutId') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('VerifyCode') is not None: - self.verify_code = m.get('VerifyCode') - return self - - -class SendSmsVerifyCodeResponseBody(TeaModel): - def __init__( - self, - access_denied_detail: str = None, - code: str = None, - message: str = None, - model: SendSmsVerifyCodeResponseBodyModel = None, - request_id: str = None, - success: bool = None, - ): - # The details about the access denial. - self.access_denied_detail = access_denied_detail - # The response code. If OK is returned, the request is successful. For more information, see [Response codes](https://help.aliyun.com/zh/pnvs/developer-reference/api-return-code?spm=a2c4g.11174283.0.0.70c5616bkj38Wa). - self.code = code - # The returned message. - self.message = message - # The returned data. - self.model = model - self.request_id = request_id - # Indicates whether the request is successful. Valid values: - # - # * **true**\ - # * **false**\ - self.success = success - - def validate(self): - if self.model: - self.model.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_denied_detail is not None: - result['AccessDeniedDetail'] = self.access_denied_detail - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.model is not None: - result['Model'] = self.model.to_map() - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.success is not None: - result['Success'] = self.success - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AccessDeniedDetail') is not None: - self.access_denied_detail = m.get('AccessDeniedDetail') - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('Model') is not None: - temp_model = SendSmsVerifyCodeResponseBodyModel() - self.model = temp_model.from_map(m['Model']) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Success') is not None: - self.success = m.get('Success') - return self - - -class SendSmsVerifyCodeResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: SendSmsVerifyCodeResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = SendSmsVerifyCodeResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class VerifyMobileRequest(TeaModel): - def __init__( - self, - access_code: str = None, - out_id: str = None, - owner_id: int = None, - phone_number: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - ): - # The token obtained by the SDK for your app. - # - # This parameter is required. - self.access_code = access_code - # The external ID. - self.out_id = out_id - self.owner_id = owner_id - # The phone number. - # - # This parameter is required. - self.phone_number = phone_number - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_code is not None: - result['AccessCode'] = self.access_code - if self.out_id is not None: - result['OutId'] = self.out_id - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.phone_number is not None: - result['PhoneNumber'] = self.phone_number - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AccessCode') is not None: - self.access_code = m.get('AccessCode') - if m.get('OutId') is not None: - self.out_id = m.get('OutId') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PhoneNumber') is not None: - self.phone_number = m.get('PhoneNumber') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - return self - - -class VerifyMobileResponseBodyGateVerifyResultDTO(TeaModel): - def __init__( - self, - verify_id: str = None, - verify_result: str = None, - ): - # The verification ID. - self.verify_id = verify_id - # The verification results. Valid values: - # - # * **PASS: The input phone number is consistent with the phone number that you use.**\ - # * **REJECT: The input phone number is different from the phone number that you use.**\ - # * **UNKNOWN: The system cannot judge whether the input phone number is consistent with the phone number that you use. - self.verify_result = verify_result - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.verify_id is not None: - result['VerifyId'] = self.verify_id - if self.verify_result is not None: - result['VerifyResult'] = self.verify_result - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('VerifyId') is not None: - self.verify_id = m.get('VerifyId') - if m.get('VerifyResult') is not None: - self.verify_result = m.get('VerifyResult') - return self - - -class VerifyMobileResponseBody(TeaModel): - def __init__( - self, - code: str = None, - gate_verify_result_dto: VerifyMobileResponseBodyGateVerifyResultDTO = None, - message: str = None, - request_id: str = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The response parameters. - self.gate_verify_result_dto = gate_verify_result_dto - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.gate_verify_result_dto: - self.gate_verify_result_dto.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.gate_verify_result_dto is not None: - result['GateVerifyResultDTO'] = self.gate_verify_result_dto.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('GateVerifyResultDTO') is not None: - temp_model = VerifyMobileResponseBodyGateVerifyResultDTO() - self.gate_verify_result_dto = temp_model.from_map(m['GateVerifyResultDTO']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class VerifyMobileResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: VerifyMobileResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = VerifyMobileResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class VerifyPhoneWithTokenRequest(TeaModel): - def __init__( - self, - owner_id: int = None, - phone_number: str = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - sp_token: str = None, - ): - self.owner_id = owner_id - # The phone number. - # - # This parameter is required. - self.phone_number = phone_number - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The token for phone number verification that is obtained by the JavaScript SDK. - # - # This parameter is required. - self.sp_token = sp_token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.phone_number is not None: - result['PhoneNumber'] = self.phone_number - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.sp_token is not None: - result['SpToken'] = self.sp_token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('PhoneNumber') is not None: - self.phone_number = m.get('PhoneNumber') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('SpToken') is not None: - self.sp_token = m.get('SpToken') - return self - - -class VerifyPhoneWithTokenResponseBodyGateVerify(TeaModel): - def __init__( - self, - verify_id: str = None, - verify_result: str = None, - ): - # The external ID. - self.verify_id = verify_id - # The verification results. Valid values: - # - # * PASS: The input phone number is consistent with the phone number used in HTML5 pages. - # * REJECT: The input phone number is different from the phone number used in HTML5 pages. - # * UNKNOWN: The system cannot judge whether the input phone number is consistent with the phone number used in HTML5 pages. - self.verify_result = verify_result - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.verify_id is not None: - result['VerifyId'] = self.verify_id - if self.verify_result is not None: - result['VerifyResult'] = self.verify_result - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('VerifyId') is not None: - self.verify_id = m.get('VerifyId') - if m.get('VerifyResult') is not None: - self.verify_result = m.get('VerifyResult') - return self - - -class VerifyPhoneWithTokenResponseBody(TeaModel): - def __init__( - self, - code: str = None, - gate_verify: VerifyPhoneWithTokenResponseBodyGateVerify = None, - message: str = None, - request_id: str = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # The response parameters. - self.gate_verify = gate_verify - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - if self.gate_verify: - self.gate_verify.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.gate_verify is not None: - result['GateVerify'] = self.gate_verify.to_map() - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('GateVerify') is not None: - temp_model = VerifyPhoneWithTokenResponseBodyGateVerify() - self.gate_verify = temp_model.from_map(m['GateVerify']) - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class VerifyPhoneWithTokenResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: VerifyPhoneWithTokenResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = VerifyPhoneWithTokenResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class VerifySmsCodeRequest(TeaModel): - def __init__( - self, - phone_number: str = None, - sms_code: str = None, - sms_token: str = None, - ): - # The phone number, which is used to receive SMS verification codes. - # - # This parameter is required. - self.phone_number = phone_number - # The SMS verification code. - # - # This parameter is required. - self.sms_code = sms_code - # The text message verification code. After you successfully call the corresponding API operation to send the SMS verification code, the end users receive the SMS verification code. SmsToken is returned by the SDK for SMS verification for you to verify the text message verification code. For an Android client, sendVerifyCode is called to send the verification code. For an iOS client, sendVerifyCodeWithTimeout is called to send the verification code. For more information, see [Overview](https://help.aliyun.com/document_detail/400434.html). - # - # This parameter is required. - self.sms_token = sms_token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.phone_number is not None: - result['PhoneNumber'] = self.phone_number - if self.sms_code is not None: - result['SmsCode'] = self.sms_code - if self.sms_token is not None: - result['SmsToken'] = self.sms_token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('PhoneNumber') is not None: - self.phone_number = m.get('PhoneNumber') - if m.get('SmsCode') is not None: - self.sms_code = m.get('SmsCode') - if m.get('SmsToken') is not None: - self.sms_token = m.get('SmsToken') - return self - - -class VerifySmsCodeResponseBody(TeaModel): - def __init__( - self, - code: str = None, - data: bool = None, - message: str = None, - request_id: str = None, - ): - # The response code. - # - # * If OK is returned, the request is successful. - # * For more information about other error codes, see [API response codes](https://help.aliyun.com/document_detail/85198.html). - self.code = code - # Indicates whether the request is successful. Valid values: - # - # * **true**\ - # * **false**\ - self.data = data - # The returned message. - self.message = message - # The request ID. - self.request_id = request_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.data is not None: - result['Data'] = self.data - if self.message is not None: - result['Message'] = self.message - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Data') is not None: - self.data = m.get('Data') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class VerifySmsCodeResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: VerifySmsCodeResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = VerifySmsCodeResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class VerifyWithFusionAuthTokenRequest(TeaModel): - def __init__( - self, - owner_id: int = None, - resource_owner_account: str = None, - resource_owner_id: int = None, - verify_token: str = None, - ): - self.owner_id = owner_id - self.resource_owner_account = resource_owner_account - self.resource_owner_id = resource_owner_id - # The unified verification token that is returned by the client SDKs. - # - # This parameter is required. - self.verify_token = verify_token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.resource_owner_account is not None: - result['ResourceOwnerAccount'] = self.resource_owner_account - if self.resource_owner_id is not None: - result['ResourceOwnerId'] = self.resource_owner_id - if self.verify_token is not None: - result['VerifyToken'] = self.verify_token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ResourceOwnerAccount') is not None: - self.resource_owner_account = m.get('ResourceOwnerAccount') - if m.get('ResourceOwnerId') is not None: - self.resource_owner_id = m.get('ResourceOwnerId') - if m.get('VerifyToken') is not None: - self.verify_token = m.get('VerifyToken') - return self - - -class VerifyWithFusionAuthTokenResponseBodyModel(TeaModel): - def __init__( - self, - phone_number: str = None, - phone_score: int = None, - verify_result: str = None, - ): - # The phone number, which is returned when the verification is successful. - self.phone_number = phone_number - # The phone number score, which is generated only after the phone number scoring node is enabled and the verification is successful. The higher the score, the more risky the phone number. Valid values: 0 to 100. - self.phone_score = phone_score - # The verification result. Valid values: PASS and UNKNOWN. - self.verify_result = verify_result - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.phone_number is not None: - result['PhoneNumber'] = self.phone_number - if self.phone_score is not None: - result['PhoneScore'] = self.phone_score - if self.verify_result is not None: - result['VerifyResult'] = self.verify_result - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('PhoneNumber') is not None: - self.phone_number = m.get('PhoneNumber') - if m.get('PhoneScore') is not None: - self.phone_score = m.get('PhoneScore') - if m.get('VerifyResult') is not None: - self.verify_result = m.get('VerifyResult') - return self - - -class VerifyWithFusionAuthTokenResponseBody(TeaModel): - def __init__( - self, - code: str = None, - message: str = None, - model: VerifyWithFusionAuthTokenResponseBodyModel = None, - request_id: str = None, - success: bool = None, - ): - # The response code. If OK is returned, the request is successful. Other values indicate that the request failed. For more information, see Error codes. - self.code = code - # The returned message. - self.message = message - # The returned data. - self.model = model - # The request ID, which is used to troubleshoot issues. - self.request_id = request_id - # Indicates whether the request is successful. Valid values: true false - self.success = success - - def validate(self): - if self.model: - self.model.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - if self.model is not None: - result['Model'] = self.model.to_map() - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.success is not None: - result['Success'] = self.success - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - if m.get('Model') is not None: - temp_model = VerifyWithFusionAuthTokenResponseBodyModel() - self.model = temp_model.from_map(m['Model']) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Success') is not None: - self.success = m.get('Success') - return self - - -class VerifyWithFusionAuthTokenResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: VerifyWithFusionAuthTokenResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = VerifyWithFusionAuthTokenResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/METADATA deleted file mode 100644 index c93e403..0000000 --- a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/METADATA +++ /dev/null @@ -1,59 +0,0 @@ -Metadata-Version: 2.4 -Name: alibabacloud_endpoint_util -Version: 0.0.4 -Summary: The endpoint-util module of alibabaCloud Python SDK. -Home-page: https://github.com/aliyun/endpoint-util/tree/master/python -Author: Alibaba Cloud -Author-email: alibaba-cloud-sdk-dev-team@list.alibaba-inc.com -License: Apache License 2.0 -Keywords: alibabacloud,sdk,tea -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Topic :: Software Development -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: keywords -Dynamic: license -Dynamic: platform -Dynamic: summary - -English | [简体中文](README-CN.md) -![](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -## Alibaba Cloud Endpoint for Python - -## Installation -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/ "pip User Guide") to install pip. - -```bash -# Install the endpoint-util -pip install alibabacloud_endpoint_util -``` - -## Issues -[Opening an Issue](https://github.com/aliyun/endpoint-util/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Changelog -Detailed changes for each release are documented in the [release notes](./ChangeLog.md). - -## References -* [Latest Release](https://github.com/aliyun/endpoint-util/tree/master/python) - -## License -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/RECORD deleted file mode 100644 index 11f989d..0000000 --- a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -alibabacloud_endpoint_util-0.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_endpoint_util-0.0.4.dist-info/METADATA,sha256=bqrkoK0l3kgkI7vfjV15CubNj5n08S65KGC0stF0kxw,2069 -alibabacloud_endpoint_util-0.0.4.dist-info/RECORD,, -alibabacloud_endpoint_util-0.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_endpoint_util-0.0.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -alibabacloud_endpoint_util-0.0.4.dist-info/top_level.txt,sha256=SxgZWIi9gxV6ujIETxJc180437X1tKT1-mkqnUPqMQs,27 -alibabacloud_endpoint_util/__init__.py,sha256=b8Tfyn46wV92NWm_uziWVxCW3zTxPV8EyFKcI9HhXcA,23 -alibabacloud_endpoint_util/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_endpoint_util/__pycache__/client.cpython-312.pyc,, -alibabacloud_endpoint_util/client.py,sha256=ZdoaGLvMhUnLK1Vt03mw8V4gMp1mVZ5dGqebbgttsJY,850 diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/top_level.txt deleted file mode 100644 index 4b73633..0000000 --- a/venv/Lib/site-packages/alibabacloud_endpoint_util-0.0.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_endpoint_util diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util/__init__.py b/venv/Lib/site-packages/alibabacloud_endpoint_util/__init__.py deleted file mode 100644 index 81f0fde..0000000 --- a/venv/Lib/site-packages/alibabacloud_endpoint_util/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.0.4" diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_endpoint_util/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c7d4b22..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_endpoint_util/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_endpoint_util/__pycache__/client.cpython-312.pyc deleted file mode 100644 index 50d458e..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_endpoint_util/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_endpoint_util/client.py b/venv/Lib/site-packages/alibabacloud_endpoint_util/client.py deleted file mode 100644 index e915621..0000000 --- a/venv/Lib/site-packages/alibabacloud_endpoint_util/client.py +++ /dev/null @@ -1,20 +0,0 @@ -class Client: - @staticmethod - def get_endpoint_rules(product, region_id, endpoint_type, network, suffix=None): - product = product or "" - network = network or "" - if endpoint_type == "regional": - if region_id is None or region_id == "": - raise RuntimeError( - "RegionId is empty, please set a valid RegionId") - result = "..aliyuncs.com".replace( - "", region_id) - else: - result = ".aliyuncs.com" - - result = result.replace("", product.lower()) - if network == "" or network == "public": - result = result.replace("", "") - else: - result = result.replace("", "-"+network) - return result diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/METADATA deleted file mode 100644 index 5fafd39..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/METADATA +++ /dev/null @@ -1,34 +0,0 @@ -Metadata-Version: 2.4 -Name: alibabacloud_gateway_spi -Version: 0.0.3 -Summary: Alibaba Cloud Gateway SPI SDK Library for Python -Home-page: https://github.com/aliyun/alibabacloud-gateway -Author: Alibaba Cloud SDK -Author-email: sdk-team@alibabacloud.com -License: Apache License 2.0 -Keywords: alibabacloud,gateway,spi -Platform: any -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Topic :: Software Development -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -Requires-Dist: alibabacloud_credentials>=0.3.4 -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description-content-type -Dynamic: home-page -Dynamic: keywords -Dynamic: license -Dynamic: platform -Dynamic: requires-dist -Dynamic: requires-python -Dynamic: summary diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/RECORD deleted file mode 100644 index 6f916e2..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -alibabacloud_gateway_spi-0.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_gateway_spi-0.0.3.dist-info/METADATA,sha256=WEphVSwbGrBkLkcz9WAAnOrk-7Ycs0MimPyoZND4_QY,1183 -alibabacloud_gateway_spi-0.0.3.dist-info/RECORD,, -alibabacloud_gateway_spi-0.0.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_gateway_spi-0.0.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -alibabacloud_gateway_spi-0.0.3.dist-info/top_level.txt,sha256=511ROoT0u4oP62ojq0HVNV0pPDXQQuCtjypzurQn_lg,25 -alibabacloud_gateway_spi/__init__.py,sha256=4GZKi13lDTD25YBkGakhZyEQZWTER_OWQMNPoH_UM2c,22 -alibabacloud_gateway_spi/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_gateway_spi/__pycache__/client.cpython-312.pyc,, -alibabacloud_gateway_spi/__pycache__/models.cpython-312.pyc,, -alibabacloud_gateway_spi/client.py,sha256=cNULImzv6_3zhKwPIA8P9HgyayF6_U3XWYd_9PGViQ0,1495 -alibabacloud_gateway_spi/models.py,sha256=QEpI-h1Zj_zZF53G1-gG5tu2nJ7N2Rnb24BuiwZiHcs,12117 diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/top_level.txt deleted file mode 100644 index db2f9c9..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi-0.0.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_gateway_spi diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi/__init__.py b/venv/Lib/site-packages/alibabacloud_gateway_spi/__init__.py deleted file mode 100644 index 27fdca4..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.0.3" diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index f772ce8..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/client.cpython-312.pyc deleted file mode 100644 index 556f73a..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/models.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/models.cpython-312.pyc deleted file mode 100644 index cf2f516..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_gateway_spi/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi/client.py b/venv/Lib/site-packages/alibabacloud_gateway_spi/client.py deleted file mode 100644 index 35e8ab8..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi/client.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from alibabacloud_gateway_spi import models as gateway_spi_models - - -class Client: - def __init__(self): - pass - - def modify_configuration( - self, - context: gateway_spi_models.InterceptorContext, - attribute_map: gateway_spi_models.AttributeMap, - ) -> None: - raise Exception('Un-implemented') - - async def modify_configuration_async( - self, - context: gateway_spi_models.InterceptorContext, - attribute_map: gateway_spi_models.AttributeMap, - ) -> None: - raise Exception('Un-implemented') - - def modify_request( - self, - context: gateway_spi_models.InterceptorContext, - attribute_map: gateway_spi_models.AttributeMap, - ) -> None: - raise Exception('Un-implemented') - - async def modify_request_async( - self, - context: gateway_spi_models.InterceptorContext, - attribute_map: gateway_spi_models.AttributeMap, - ) -> None: - raise Exception('Un-implemented') - - def modify_response( - self, - context: gateway_spi_models.InterceptorContext, - attribute_map: gateway_spi_models.AttributeMap, - ) -> None: - raise Exception('Un-implemented') - - async def modify_response_async( - self, - context: gateway_spi_models.InterceptorContext, - attribute_map: gateway_spi_models.AttributeMap, - ) -> None: - raise Exception('Un-implemented') diff --git a/venv/Lib/site-packages/alibabacloud_gateway_spi/models.py b/venv/Lib/site-packages/alibabacloud_gateway_spi/models.py deleted file mode 100644 index 4d910d8..0000000 --- a/venv/Lib/site-packages/alibabacloud_gateway_spi/models.py +++ /dev/null @@ -1,345 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from Tea.model import TeaModel -from typing import Dict, Any, BinaryIO - -from alibabacloud_credentials.client import Client as CredentialClient - - -class InterceptorContextRequest(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - query: Dict[str, str] = None, - body: Any = None, - stream: BinaryIO = None, - host_map: Dict[str, str] = None, - pathname: str = None, - product_id: str = None, - action: str = None, - version: str = None, - protocol: str = None, - method: str = None, - auth_type: str = None, - body_type: str = None, - req_body_type: str = None, - style: str = None, - credential: CredentialClient = None, - signature_version: str = None, - signature_algorithm: str = None, - user_agent: str = None, - ): - self.headers = headers - self.query = query - self.body = body - self.stream = stream - self.host_map = host_map - self.pathname = pathname - self.product_id = product_id - self.action = action - self.version = version - self.protocol = protocol - self.method = method - self.auth_type = auth_type - self.body_type = body_type - self.req_body_type = req_body_type - self.style = style - self.credential = credential - self.signature_version = signature_version - self.signature_algorithm = signature_algorithm - self.user_agent = user_agent - - def validate(self): - self.validate_required(self.pathname, 'pathname') - self.validate_required(self.product_id, 'product_id') - self.validate_required(self.action, 'action') - self.validate_required(self.version, 'version') - self.validate_required(self.protocol, 'protocol') - self.validate_required(self.method, 'method') - self.validate_required(self.auth_type, 'auth_type') - self.validate_required(self.body_type, 'body_type') - self.validate_required(self.req_body_type, 'req_body_type') - self.validate_required(self.credential, 'credential') - self.validate_required(self.user_agent, 'user_agent') - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.query is not None: - result['query'] = self.query - if self.body is not None: - result['body'] = self.body - if self.stream is not None: - result['stream'] = self.stream - if self.host_map is not None: - result['hostMap'] = self.host_map - if self.pathname is not None: - result['pathname'] = self.pathname - if self.product_id is not None: - result['productId'] = self.product_id - if self.action is not None: - result['action'] = self.action - if self.version is not None: - result['version'] = self.version - if self.protocol is not None: - result['protocol'] = self.protocol - if self.method is not None: - result['method'] = self.method - if self.auth_type is not None: - result['authType'] = self.auth_type - if self.body_type is not None: - result['bodyType'] = self.body_type - if self.req_body_type is not None: - result['reqBodyType'] = self.req_body_type - if self.style is not None: - result['style'] = self.style - if self.credential is not None: - result['credential'] = self.credential - if self.signature_version is not None: - result['signatureVersion'] = self.signature_version - if self.signature_algorithm is not None: - result['signatureAlgorithm'] = self.signature_algorithm - if self.user_agent is not None: - result['userAgent'] = self.user_agent - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('query') is not None: - self.query = m.get('query') - if m.get('body') is not None: - self.body = m.get('body') - if m.get('stream') is not None: - self.stream = m.get('stream') - if m.get('hostMap') is not None: - self.host_map = m.get('hostMap') - if m.get('pathname') is not None: - self.pathname = m.get('pathname') - if m.get('productId') is not None: - self.product_id = m.get('productId') - if m.get('action') is not None: - self.action = m.get('action') - if m.get('version') is not None: - self.version = m.get('version') - if m.get('protocol') is not None: - self.protocol = m.get('protocol') - if m.get('method') is not None: - self.method = m.get('method') - if m.get('authType') is not None: - self.auth_type = m.get('authType') - if m.get('bodyType') is not None: - self.body_type = m.get('bodyType') - if m.get('reqBodyType') is not None: - self.req_body_type = m.get('reqBodyType') - if m.get('style') is not None: - self.style = m.get('style') - if m.get('credential') is not None: - self.credential = m.get('credential') - if m.get('signatureVersion') is not None: - self.signature_version = m.get('signatureVersion') - if m.get('signatureAlgorithm') is not None: - self.signature_algorithm = m.get('signatureAlgorithm') - if m.get('userAgent') is not None: - self.user_agent = m.get('userAgent') - return self - - -class InterceptorContextConfiguration(TeaModel): - def __init__( - self, - region_id: str = None, - endpoint: str = None, - endpoint_rule: str = None, - endpoint_map: Dict[str, str] = None, - endpoint_type: str = None, - network: str = None, - suffix: str = None, - ): - self.region_id = region_id - self.endpoint = endpoint - self.endpoint_rule = endpoint_rule - self.endpoint_map = endpoint_map - self.endpoint_type = endpoint_type - self.network = network - self.suffix = suffix - - def validate(self): - self.validate_required(self.region_id, 'region_id') - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.region_id is not None: - result['regionId'] = self.region_id - if self.endpoint is not None: - result['endpoint'] = self.endpoint - if self.endpoint_rule is not None: - result['endpointRule'] = self.endpoint_rule - if self.endpoint_map is not None: - result['endpointMap'] = self.endpoint_map - if self.endpoint_type is not None: - result['endpointType'] = self.endpoint_type - if self.network is not None: - result['network'] = self.network - if self.suffix is not None: - result['suffix'] = self.suffix - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('regionId') is not None: - self.region_id = m.get('regionId') - if m.get('endpoint') is not None: - self.endpoint = m.get('endpoint') - if m.get('endpointRule') is not None: - self.endpoint_rule = m.get('endpointRule') - if m.get('endpointMap') is not None: - self.endpoint_map = m.get('endpointMap') - if m.get('endpointType') is not None: - self.endpoint_type = m.get('endpointType') - if m.get('network') is not None: - self.network = m.get('network') - if m.get('suffix') is not None: - self.suffix = m.get('suffix') - return self - - -class InterceptorContextResponse(TeaModel): - def __init__( - self, - status_code: int = None, - headers: Dict[str, str] = None, - body: BinaryIO = None, - deserialized_body: Any = None, - ): - self.status_code = status_code - self.headers = headers - self.body = body - self.deserialized_body = deserialized_body - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.headers is not None: - result['headers'] = self.headers - if self.body is not None: - result['body'] = self.body - if self.deserialized_body is not None: - result['deserializedBody'] = self.deserialized_body - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('body') is not None: - self.body = m.get('body') - if m.get('deserializedBody') is not None: - self.deserialized_body = m.get('deserializedBody') - return self - - -class InterceptorContext(TeaModel): - def __init__( - self, - request: InterceptorContextRequest = None, - configuration: InterceptorContextConfiguration = None, - response: InterceptorContextResponse = None, - ): - self.request = request - self.configuration = configuration - self.response = response - - def validate(self): - self.validate_required(self.request, 'request') - if self.request: - self.request.validate() - self.validate_required(self.configuration, 'configuration') - if self.configuration: - self.configuration.validate() - self.validate_required(self.response, 'response') - if self.response: - self.response.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.request is not None: - result['request'] = self.request.to_map() - if self.configuration is not None: - result['configuration'] = self.configuration.to_map() - if self.response is not None: - result['response'] = self.response.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('request') is not None: - temp_model = InterceptorContextRequest() - self.request = temp_model.from_map(m['request']) - if m.get('configuration') is not None: - temp_model = InterceptorContextConfiguration() - self.configuration = temp_model.from_map(m['configuration']) - if m.get('response') is not None: - temp_model = InterceptorContextResponse() - self.response = temp_model.from_map(m['response']) - return self - - -class AttributeMap(TeaModel): - def __init__( - self, - attributes: Dict[str, Any] = None, - key: Dict[str, str] = None, - ): - self.attributes = attributes - self.key = key - - def validate(self): - self.validate_required(self.attributes, 'attributes') - self.validate_required(self.key, 'key') - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.attributes is not None: - result['attributes'] = self.attributes - if self.key is not None: - result['key'] = self.key - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('attributes') is not None: - self.attributes = m.get('attributes') - if m.get('key') is not None: - self.key = m.get('key') - return self - - diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/METADATA deleted file mode 100644 index b844897..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.4 -Name: alibabacloud_openapi_util -Version: 0.2.2 -Summary: Aliyun Tea OpenApi Library for Python -Home-page: https://github.com/aliyun/darabonba-openapi-util -Author: Alibaba Cloud -Author-email: alibaba-cloud-sdk-dev-team@list.alibaba-inc.com -License: Apache License 2.0 -Keywords: alibabacloud_openapi_util -Platform: any -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Topic :: Software Development -Requires-Dist: alibabacloud_tea_util>=0.0.2 -Requires-Dist: cryptography>=3.0.0 -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: keywords -Dynamic: license -Dynamic: platform -Dynamic: requires-dist -Dynamic: summary - -English | [简体中文](README-CN.md) -![](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -## Alibaba Cloud OpenApi Util Client for Python - -## Installation -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/ "pip User Guide") to install pip. - -```bash -# Install the alibabacloud_openapi_util -pip install alibabacloud_openapi_util -``` - -## Issues -[Opening an Issue](https://github.com/aliyun/darabonba-openapi-util/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Changelog -Detailed changes for each release are documented in the [release notes](./ChangeLog.md). - -## References -* [Latest Release](https://github.com/aliyun/darabonba-openapi-util) - -## License -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/RECORD deleted file mode 100644 index d2975d2..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -alibabacloud_openapi_util-0.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_openapi_util-0.2.2.dist-info/METADATA,sha256=BxeKkAhw9hQu2XsgGOzFIGUJc_pYOz6ONqSKpWnf_nw,2052 -alibabacloud_openapi_util-0.2.2.dist-info/RECORD,, -alibabacloud_openapi_util-0.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_openapi_util-0.2.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -alibabacloud_openapi_util-0.2.2.dist-info/top_level.txt,sha256=Jk2YmRHG2WRAq2vkmIbshlKi2Du-ol5QFs7S_gqy62M,26 -alibabacloud_openapi_util/__init__.py,sha256=Mgz08fr8tY-8mhsBCgrKWOz7pu_-v4kgS4rNrtcwGxM,23 -alibabacloud_openapi_util/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_openapi_util/__pycache__/client.cpython-312.pyc,, -alibabacloud_openapi_util/__pycache__/sm3.cpython-312.pyc,, -alibabacloud_openapi_util/client.py,sha256=VRpy41kjyqYcFjD3ehBpw4iqisbXdYsw6x_D1A-Ftto,13704 -alibabacloud_openapi_util/sm3.py,sha256=UQAALAv9q7HO7nAsZBZiMi5KHNZB8rCKLzyyhtdp5kw,4463 diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/top_level.txt deleted file mode 100644 index 7c31536..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util-0.2.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_openapi_util diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util/__init__.py b/venv/Lib/site-packages/alibabacloud_openapi_util/__init__.py deleted file mode 100644 index 020ed73..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '0.2.2' diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2a2523d..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/client.cpython-312.pyc deleted file mode 100644 index 9a9aaba..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/sm3.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/sm3.cpython-312.pyc deleted file mode 100644 index b03ece3..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_openapi_util/__pycache__/sm3.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util/client.py b/venv/Lib/site-packages/alibabacloud_openapi_util/client.py deleted file mode 100644 index a7bc5db..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util/client.py +++ /dev/null @@ -1,431 +0,0 @@ -# -*- coding: utf-8 -*- -import binascii -import datetime -import hashlib -import hmac -import base64 -import copy -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.hazmat.primitives.serialization import load_pem_private_key - -from urllib.parse import quote_plus, quote - -from .sm3 import hash_sm3, Sm3 -from alibabacloud_tea_util.client import Client as Util -from Tea.stream import STREAM_CLASS -from Tea.model import TeaModel - - -def to_str(val): - if val is None: - return val - - if isinstance(val, bytes): - return str(val, encoding='utf-8') - else: - return str(val) - - -def rsa_sign(plaintext, secret): - if not secret.startswith(b'-----BEGIN RSA PRIVATE KEY-----'): - secret = b'-----BEGIN RSA PRIVATE KEY-----\n%s' % secret - if not secret.endswith(b'-----END RSA PRIVATE KEY-----'): - secret = b'%s\n-----END RSA PRIVATE KEY-----' % secret - - key = load_pem_private_key(secret, password=None, backend=default_backend()) - return key.sign(plaintext, padding.PKCS1v15(), hashes.SHA256()) - - -def signature_method(secret, source, sign_type): - source = source.encode('utf-8') - secret = secret.encode('utf-8') - if sign_type == 'ACS3-HMAC-SHA256': - return hmac.new(secret, source, hashlib.sha256).digest() - elif sign_type == 'ACS3-HMAC-SM3': - return hmac.new(secret, source, Sm3).digest() - elif sign_type == 'ACS3-RSA-SHA256': - return rsa_sign(source, secret) - - -def get_canonical_query_string(query): - if query is None or len(query) <= 0: - return '' - canon_keys = [] - for k, v in query.items(): - if v is not None: - canon_keys.append(k) - - canon_keys.sort() - query_string = '' - for key in canon_keys: - value = quote(query[key], safe='~', encoding='utf-8') - if value is None: - s = f'{key}&' - else: - s = f'{key}={value}&' - query_string += s - return query_string[:-1] - - -def get_canonicalized_headers(headers): - canon_keys = [] - tmp_headers = {} - for k, v in headers.items(): - if v is not None: - if k.lower() not in canon_keys: - canon_keys.append(k.lower()) - tmp_headers[k.lower()] = [to_str(v).strip()] - else: - tmp_headers[k.lower()].append(to_str(v).strip()) - - canon_keys.sort() - canonical_headers = '' - for key in canon_keys: - header_entry = ','.join(sorted(tmp_headers[key])) - s = f'{key}:{header_entry}\n' - canonical_headers += s - return canonical_headers, ';'.join(canon_keys) - - -class Client(object): - """ - This is for OpenApi Util - """ - - @staticmethod - def convert(body, content): - """ - Convert all params of body other than type of readable into content - - @param body: source Model - - @param content: target Model - - @return: void - """ - body_map = Client._except_stream(body.to_map()) - content.from_map(body_map) - - @staticmethod - def _except_stream(val): - if isinstance(val, dict): - result = {} - for k, v in val.items(): - result[k] = Client._except_stream(v) - return result - elif isinstance(val, list): - result = [] - for i in val: - if i is not None: - item = Client._except_stream(i) - if item is not None: - result.append(item) - else: - result.append(Client._except_stream(i)) - return result - elif isinstance(val, STREAM_CLASS): - return None - return val - - @staticmethod - def _get_canonicalized_headers(headers): - canon_keys = [] - for k in headers: - if k.startswith('x-acs-'): - canon_keys.append(k) - canon_keys = sorted(canon_keys) - canon_header = '' - for k in canon_keys: - canon_header += '%s:%s\n' % (k, headers[k]) - return canon_header - - @staticmethod - def _get_canonicalized_resource(pathname, query): - if len(query) <= 0: - return pathname - resource = '%s?' % pathname - query_list = sorted(list(query)) - for key in query_list: - if query[key] is not None: - if query[key] == '': - s = '%s&' % key - else: - s = '%s=%s&' % (key, query[key]) - resource += s - return resource[:-1] - - @staticmethod - def get_string_to_sign(request): - """ - Get the string to be signed according to request - - @param request: which contains signed messages - - @return: the signed string - """ - method, pathname, headers, query = request.method, request.pathname, request.headers, request.query - - accept = '' if headers.get('accept') is None else headers.get('accept') - content_md5 = '' if headers.get('content-md5') is None else headers.get('content-md5') - content_type = '' if headers.get('content-type') is None else headers.get('content-type') - date = '' if headers.get('date') is None else headers.get('date') - - header = '%s\n%s\n%s\n%s\n%s\n' % (method, accept, content_md5, content_type, date) - canon_headers = Client._get_canonicalized_headers(headers) - canon_resource = Client._get_canonicalized_resource(pathname, query) - sign_str = header + canon_headers + canon_resource - return sign_str - - @staticmethod - def get_roasignature(string_to_sign, secret): - """ - Get signature according to stringToSign, secret - - @type string_to_sign: str - @param string_to_sign: the signed string - - @type secret: str - @param secret: accesskey secret - - @return: the signature - """ - hash_val = hmac.new(secret.encode('utf-8'), string_to_sign.encode('utf-8'), hashlib.sha1).digest() - signature = base64.b64encode(hash_val).decode('utf-8') - return signature - - @staticmethod - def _object_handler(key, value, out): - if value is None: - return - - if isinstance(value, dict): - for k, v in value.items(): - Client._object_handler('%s.%s' % (key, k), v, out) - elif isinstance(value, TeaModel): - for k, v in value.to_map().items(): - Client._object_handler('%s.%s' % (key, k), v, out) - elif isinstance(value, (list, tuple)): - for index, val in enumerate(value): - Client._object_handler('%s.%s' % (key, index + 1), val, out) - else: - if key.startswith('.'): - key = key[1:] - if isinstance(value, bytes): - out[key] = str(value, encoding='utf-8') - elif not isinstance(value, STREAM_CLASS): - out[key] = str(value) - - @staticmethod - def to_form(filter): - """ - Parse filter into a form string - - @type filter: dict - @param filter: object - - @return: the string - """ - result = {} - if filter: - Client._object_handler('', filter, result) - return Util.to_form_string( - Util.anyify_map_value(result) - ) - - @staticmethod - def get_timestamp(): - """ - Get timestamp - - @return: the timestamp string - """ - return datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") - - @staticmethod - def query(filter): - """ - Parse filter into a object which's type is map[string]string - - @type filter: dict - @param filter: query param - - @return: the object - """ - out_dict = {} - if filter: - Client._object_handler('', filter, out_dict) - return out_dict - - @staticmethod - def get_rpcsignature(signed_params, method, secret): - """ - Get signature according to signedParams, method and secret - - @type signed_params: dict - @param signed_params: params which need to be signed - - @type method: str - @param method: http method e.g. GET - - @type secret: str - @param secret: AccessKeySecret - - @return: the signature - """ - queries = signed_params.copy() - keys = list(queries.keys()) - keys.sort() - - canonicalized_query_string = "" - - for k in keys: - if queries[k] is not None: - canonicalized_query_string += f'&{quote(k, safe="~", encoding="utf-8")}=' \ - f'{quote(queries[k], safe="~", encoding="utf-8")}' - - string_to_sign = f'{method}&%2F&{quote_plus(canonicalized_query_string[1:], safe="~", encoding="utf-8")}' - - digest_maker = hmac.new(bytes(secret + '&', encoding="utf-8"), - bytes(string_to_sign, encoding="utf-8"), - digestmod=hashlib.sha1) - hash_bytes = digest_maker.digest() - signed_str = str(base64.b64encode(hash_bytes), encoding="utf-8") - - return signed_str - - @staticmethod - def array_to_string_with_specified_style(array, prefix, style): - """ - Parse array into a string with specified style - - @type array: any - @param array: the array - - @type prefix: str - @param prefix: the prefix string - - @param style: specified style e.g. repeatList - - @return: the string - """ - if array is None: - return '' - - if style == 'repeatList': - return Client._flat_repeat_list({prefix: array}) - elif style == 'simple': - return ','.join(map(str, array)) - elif style == 'spaceDelimited': - return ' '.join(map(str, array)) - elif style == 'pipeDelimited': - return '|'.join(map(str, array)) - elif style == 'json': - return Util.to_jsonstring(Client._parse_to_dict(array)) - else: - return '' - - @staticmethod - def _flat_repeat_list(dic): - query = {} - if dic: - Client._object_handler('', dic, query) - - l = [] - q = sorted(query) - for i in q: - k = quote_plus(i, encoding='utf-8') - v = quote_plus(query[i], encoding='utf-8') - l.append(k + '=' + v) - return '&&'.join(l) - - @staticmethod - def parse_to_map(inp): - """ - Transform input as map. - """ - try: - result = Client._parse_to_dict(inp) - return copy.deepcopy(result) - except TypeError: - return - - @staticmethod - def _parse_to_dict(val): - if isinstance(val, dict): - result = {} - for k, v in val.items(): - if isinstance(v, (list, dict, TeaModel)): - result[k] = Client._parse_to_dict(v) - else: - result[k] = v - return result - elif isinstance(val, list): - result = [] - for i in val: - if isinstance(i, (list, dict, TeaModel)): - result.append(Client._parse_to_dict(i)) - else: - result.append(i) - return result - elif isinstance(val, TeaModel): - return val.to_map() - - @staticmethod - def get_endpoint(endpoint, server_use, endpoint_type): - """ - If endpointType is internal, use internal endpoint - If serverUse is true and endpointType is accelerate, use accelerate endpoint - Default return endpoint - @param server_use whether use accelerate endpoint - @param endpoint_type value must be internal or accelerate - @return the final endpoint - """ - if endpoint_type == "internal": - str_split = endpoint.split('.') - str_split[0] += "-internal" - endpoint = ".".join(str_split) - - if server_use and endpoint_type == "accelerate": - return "oss-accelerate.aliyuncs.com" - - return endpoint - - @staticmethod - def hash(raw, sign_type): - if sign_type == 'ACS3-HMAC-SHA256' or sign_type == 'ACS3-RSA-SHA256': - return hashlib.sha256(raw).digest() - elif sign_type == 'ACS3-HMAC-SM3': - return hash_sm3(raw) - - @staticmethod - def hex_encode(raw): - if raw: - return binascii.b2a_hex(raw).decode('utf-8') - - @staticmethod - def get_authorization(request, sign_type, payload, ak, secret): - canonical_uri = request.pathname if request.pathname else '/' - canonicalized_query = get_canonical_query_string(request.query) - canonicalized_headers, signed_headers = get_canonicalized_headers(request.headers) - - canonical_request = f'{request.method}\n' \ - f'{canonical_uri}\n' \ - f'{canonicalized_query}\n' \ - f'{canonicalized_headers}\n' \ - f'{signed_headers}\n' \ - f'{payload}' - - str_to_sign = f'{sign_type}\n{Client.hex_encode(Client.hash(canonical_request.encode("utf-8"), sign_type))}' - signature = Client.hex_encode(signature_method(secret, str_to_sign, sign_type)) - auth = f'{sign_type} Credential={ak},SignedHeaders={signed_headers},Signature={signature}' - return auth - - @staticmethod - def get_encode_path(path): - return quote(path, safe='/~', encoding="utf-8") - - @staticmethod - def get_encode_param(param): - return quote(param, safe='~', encoding="utf-8") diff --git a/venv/Lib/site-packages/alibabacloud_openapi_util/sm3.py b/venv/Lib/site-packages/alibabacloud_openapi_util/sm3.py deleted file mode 100644 index b4cc7fd..0000000 --- a/venv/Lib/site-packages/alibabacloud_openapi_util/sm3.py +++ /dev/null @@ -1,189 +0,0 @@ -import binascii -import copy - -IV = "7380166f 4914b2b9 172442d7 da8a0600 a96f30bc 163138aa e38dee4d b0fb0e4e" -IV = int(IV.replace(" ", ""), 16) -a = [] -for i in range(0, 8): - a.append(0) - a[i] = (IV >> ((7 - i) * 32)) & 0xFFFFFFFF -IV = a - -T_j = [] -for i in range(0, 16): - T_j.append(0) - T_j[i] = 0x79cc4519 -for i in range(16, 64): - T_j.append(0) - T_j[i] = 0x7a879d8a - - -def rotate_left(a, k): - k = k % 32 - return ((a << k) & 0xFFFFFFFF) | ((a & 0xFFFFFFFF) >> (32 - k)) - - -def FF_j(X, Y, Z, j): - if 0 <= j < 16: - ret = X ^ Y ^ Z - elif 16 <= j < 64: - ret = (X & Y) | (X & Z) | (Y & Z) - return ret - - -def GG_j(X, Y, Z, j): - if 0 <= j < 16: - ret = X ^ Y ^ Z - elif 16 <= j < 64: - # ret = (X | Y) & ((2 ** 32 - 1 - X) | Z) - ret = (X & Y) | ((~ X) & Z) - return ret - - -def P_0(X): - return X ^ (rotate_left(X, 9)) ^ (rotate_left(X, 17)) - - -def P_1(X): - return X ^ (rotate_left(X, 15)) ^ (rotate_left(X, 23)) - - -def CF(V_i, B_i): - W = [] - for i in range(16): - weight = 0x1000000 - data = 0 - for k in range(i * 4, (i + 1) * 4): - data = data + B_i[k] * weight - weight = int(weight / 0x100) - W.append(data) - - for j in range(16, 68): - W.append(0) - W[j] = P_1(W[j - 16] ^ W[j - 9] ^ (rotate_left(W[j - 3], 15))) ^ (rotate_left(W[j - 13], 7)) ^ W[j - 6] - str1 = "%08x" % W[j] - W_1 = [] - for j in range(0, 64): - W_1.append(0) - W_1[j] = W[j] ^ W[j + 4] - str1 = "%08x" % W_1[j] - - A, B, C, D, E, F, G, H = V_i - """ - print "00", - out_hex([A, B, C, D, E, F, G, H]) - """ - for j in range(0, 64): - SS1 = rotate_left(((rotate_left(A, 12)) + E + (rotate_left(T_j[j], j))) & 0xFFFFFFFF, 7) - SS2 = SS1 ^ (rotate_left(A, 12)) - TT1 = (FF_j(A, B, C, j) + D + SS2 + W_1[j]) & 0xFFFFFFFF - TT2 = (GG_j(E, F, G, j) + H + SS1 + W[j]) & 0xFFFFFFFF - D = C - C = rotate_left(B, 9) - B = A - A = TT1 - H = G - G = rotate_left(F, 19) - F = E - E = P_0(TT2) - - A = A & 0xFFFFFFFF - B = B & 0xFFFFFFFF - C = C & 0xFFFFFFFF - D = D & 0xFFFFFFFF - E = E & 0xFFFFFFFF - F = F & 0xFFFFFFFF - G = G & 0xFFFFFFFF - H = H & 0xFFFFFFFF - """ - str1 = "%02d" % j - if str1[0] == "0": - str1 = ' ' + str1[1:] - out_hex([A, B, C, D, E, F, G, H]) - """ - - V_i_1 = [ - A ^ V_i[0], - B ^ V_i[1], - C ^ V_i[2], - D ^ V_i[3], - E ^ V_i[4], - F ^ V_i[5], - G ^ V_i[6], - H ^ V_i[7] - ] - return V_i_1 - - -def hash_msg(msg): - len1 = len(msg) - reserve1 = len1 % 64 - msg.append(0x80) - reserve1 = reserve1 + 1 - # 56-64, add 64 byte - range_end = 56 - if reserve1 > range_end: - range_end = range_end + 64 - - for i in range(reserve1, range_end): - msg.append(0x00) - - bit_length = (len1) * 8 - bit_length_str = [bit_length % 0x100] - for i in range(7): - bit_length = int(bit_length / 0x100) - bit_length_str.append(bit_length % 0x100) - for i in range(8): - msg.append(bit_length_str[7 - i]) - - group_count = round(len(msg) / 64) - - b = [] - for i in range(0, group_count): - b.append(msg[i * 64:(i + 1) * 64]) - - v = [IV] - for i in range(0, group_count): - v.append(CF(v[i], b[i])) - - y = v[i + 1] - result = "" - for i in y: - result = '%s%08x' % (result, i) - return result - - -def to_byte_array(msg): # 转换成byte数组 - ml = len(msg) - msg_byte = [] - for i in range(ml): - msg_byte.append(msg[i]) - return msg_byte - - -def hash_sm3(msg): - msg_byte = to_byte_array(msg) - return binascii.a2b_hex(hash_msg(msg_byte)) - - -class Sm3: - block_size = 64 - - def __init__(self, msg_byte=b''): - self.byte_array = to_byte_array(msg_byte) - - def update(self, data): - self.byte_array.extend(to_byte_array(data)) - - def digest(self): - return binascii.a2b_hex(hash_msg(self.byte_array)) - - def hexdigest(self): - return hash_msg(self.byte_array) - - @property - def digest_size(self): - return len(self.byte_array) - - def copy(self): - return copy.deepcopy(self) diff --git a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/METADATA deleted file mode 100644 index ade06e8..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/METADATA +++ /dev/null @@ -1,79 +0,0 @@ -Metadata-Version: 2.4 -Name: alibabacloud-tea -Version: 0.4.3 -Summary: The tea module of alibabaCloud Python SDK. -Home-page: https://github.com/aliyun/tea-python -Author: Alibaba Cloud -Author-email: alibaba-cloud-sdk-dev-team@list.alibaba-inc.com -License: Apache License 2.0 -Keywords: alibabacloud,sdk,tea -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development -Requires-Python: >=3.7 -Requires-Dist: requests<3.0.0,>=2.21.0 -Requires-Dist: aiohttp<4.0.0,>=3.7.0 -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: keywords -Dynamic: license -Dynamic: platform -Dynamic: requires-dist -Dynamic: requires-python -Dynamic: summary - -English | [简体中文](README-CN.md) - -![Alibaba Cloud](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -## Alibaba Cloud Tea for Python - -[![PyPI version](https://badge.fury.io/py/alibabacloud-tea.svg)](https://badge.fury.io/py/alibabacloud-tea) -[![Python Test](https://github.com/aliyun/tea-python/actions/workflows/ci.yml/badge.svg)](https://github.com/aliyun/tea-python/actions/workflows/ci.yml) -[![codecov](https://codecov.io/gh/aliyun/tea-python/graph/badge.svg?token=FN19OMRTVY)](https://codecov.io/gh/aliyun/tea-python) -[![python](https://img.shields.io/pypi/pyversions/alibabacloud-tea.svg)](https://img.shields.io/pypi/pyversions/alibabacloud-tea.svg) - -## Important Updates - -- Starting from version 0.4.0, the package `alibabacloud-tea` only supports Python 3.7 and above. - -## Installation - -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/ "pip User Guide") to install pip. - -```bash -# Install the alibabacloud-tea -pip install alibabacloud-tea -``` - -## Issues - -[Opening an Issue](https://github.com/aliyun/tea-python/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Changelog - -Detailed changes for each release are documented in the [release notes](./ChangeLog.md). - -## References - -- [Latest Release](https://github.com/aliyun/tea-python/tree/master/python) - -## License - -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. diff --git a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/RECORD deleted file mode 100644 index 983af12..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -Tea/__init__.py,sha256=zl5I1YpNZmeOH3leDXrvH_Vx2M1QM2PJGxbtVLvJp-A,23 -Tea/__pycache__/__init__.cpython-312.pyc,, -Tea/__pycache__/core.cpython-312.pyc,, -Tea/__pycache__/decorators.cpython-312.pyc,, -Tea/__pycache__/exceptions.cpython-312.pyc,, -Tea/__pycache__/model.cpython-312.pyc,, -Tea/__pycache__/request.cpython-312.pyc,, -Tea/__pycache__/response.cpython-312.pyc,, -Tea/__pycache__/stream.cpython-312.pyc,, -Tea/core.py,sha256=7nTK2yFNWhkKoPBo6B8No57n8TVNNYR7aSW_4T8JmwI,13292 -Tea/decorators.py,sha256=UN-78kKDGNawpx4bAvVszjiHF5IAYPRC3E0YYQMaZdQ,1032 -Tea/exceptions.py,sha256=-2wK00M3gzC53f-0qenplyKNUKnzdu1HyNmRZRzNUhQ,1490 -Tea/model.py,sha256=8eQYUrMcBlyJSOHgdU_l3D2IqjkNO293uGTmisTdTng,1542 -Tea/request.py,sha256=szscFWIxiaOXe_45joLodmsJ_rfA8xhvOC0XRGK5E78,849 -Tea/response.py,sha256=3zgwInil_T6u5FZXvBV7zRIs0TWFcjAwgrwoX406Yw4,149 -Tea/stream.py,sha256=foR2zGN_nrFSKQai1zw0gttcqaQOEQvTue0S61yah88,892 -alibabacloud_tea-0.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_tea-0.4.3.dist-info/METADATA,sha256=ijuxSsvaLyiNP7Pk9AtlDwmsSz4tfNM67dekmhH4IkI,2843 -alibabacloud_tea-0.4.3.dist-info/RECORD,, -alibabacloud_tea-0.4.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_tea-0.4.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -alibabacloud_tea-0.4.3.dist-info/top_level.txt,sha256=9pdmruRzz-SPlHMB25KgIsDzIoBrlS2Gz_m9HmBoV5I,4 diff --git a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/top_level.txt deleted file mode 100644 index 6bd3eeb..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea-0.4.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Tea diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/METADATA deleted file mode 100644 index 6fce41f..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/METADATA +++ /dev/null @@ -1,59 +0,0 @@ -Metadata-Version: 2.1 -Name: alibabacloud-tea-openapi -Version: 0.4.2 -Summary: Alibaba Cloud openapi SDK Library for Python -Home-page: https://github.com/aliyun/darabonba-openapi -Author: Alibaba Cloud SDK -Author-email: sdk-team@alibabacloud.com -License: Apache License 2.0 -Keywords: alibabacloud,tea,openapi -Platform: any -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -Requires-Dist: alibabacloud-credentials (<2.0.0,>=1.0.2) -Requires-Dist: alibabacloud-gateway-spi (<1.0.0,>=0.0.2) -Requires-Dist: alibabacloud-tea-util (<1.0.0,>=0.3.13) -Requires-Dist: cryptography (<45.0.0,>=3.0.0) -Requires-Dist: darabonba-core (<2.0.0,>=1.0.3) - -English | [简体中文](README-CN.md) -![](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -# Alibaba Cloud OpenApi SDK for Python - -## Installation -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/ "pip User Guide") to install pip. - -```bash -# Install the alibabacloud_tea_openapi -pip install alibabacloud_tea_openapi -``` - -## Issues - -[Opening an Issue](https://github.com/aliyun/darabonba-openapi/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Changelog -Detailed changes for each release are documented in the [release notes](./ChangeLog.md). - -## References -* [Latest Release](https://github.com/aliyun/darabonba-openapi) - -## License -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/RECORD deleted file mode 100644 index 81b8e0b..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/RECORD +++ /dev/null @@ -1,38 +0,0 @@ -alibabacloud_tea_openapi-0.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_tea_openapi-0.4.2.dist-info/METADATA,sha256=T_mFy6B3RLDAACWeP4XSlm2cOFg55keY4TlYF0-1JcQ,2162 -alibabacloud_tea_openapi-0.4.2.dist-info/RECORD,, -alibabacloud_tea_openapi-0.4.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_tea_openapi-0.4.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 -alibabacloud_tea_openapi-0.4.2.dist-info/top_level.txt,sha256=4lWEZ9RD6Zm7vIs2bb8m2nd30oMf0MKXg3-3fJptIJM,25 -alibabacloud_tea_openapi/__init__.py,sha256=6hfVa12Q-nXyUEXr6SyKpqPEDJW6vlRHyPxlA27PfTs,22 -alibabacloud_tea_openapi/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_tea_openapi/__pycache__/client.cpython-312.pyc,, -alibabacloud_tea_openapi/__pycache__/sm3.cpython-312.pyc,, -alibabacloud_tea_openapi/__pycache__/utils.cpython-312.pyc,, -alibabacloud_tea_openapi/client.py,sha256=2NpQJfysKT4AtSf750NSWW36iFScTjMMsyFuK1Hxkeg,139400 -alibabacloud_tea_openapi/exceptions/__init__.py,sha256=AFHoNWLBtReqGmufKDNrPQ-CuU2IJX2SySu8pXi02v0,391 -alibabacloud_tea_openapi/exceptions/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_tea_openapi/exceptions/__pycache__/_alibaba_cloud.cpython-312.pyc,, -alibabacloud_tea_openapi/exceptions/__pycache__/_client.cpython-312.pyc,, -alibabacloud_tea_openapi/exceptions/__pycache__/_server.cpython-312.pyc,, -alibabacloud_tea_openapi/exceptions/__pycache__/_throttling.cpython-312.pyc,, -alibabacloud_tea_openapi/exceptions/_alibaba_cloud.py,sha256=iak2oE6atHZsXLszZoIKj078J9_hjAhVc7a2pjpBJBw,1123 -alibabacloud_tea_openapi/exceptions/_client.py,sha256=WAyQmjdRIPLvdHjBQcgGBWcTYOkEJoy7sr-uY-UmqVY,1070 -alibabacloud_tea_openapi/exceptions/_server.py,sha256=vFb5iqZ3gRaKN33QO4r9CTVwC_z58G7Bm7rE6RWVWPs,1013 -alibabacloud_tea_openapi/exceptions/_throttling.py,sha256=K1pjvv7WuAEak4knJfcMXIg8Z5vuTPPKnLC5W_fTJnk,1060 -alibabacloud_tea_openapi/models/__init__.py,sha256=92Euoj3OJHiMO1kcMzr40juIDtJ4yd_DKyPbC1Uht38,325 -alibabacloud_tea_openapi/models/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_tea_openapi/models/__pycache__/_sseresponse.cpython-312.pyc,, -alibabacloud_tea_openapi/models/_sseresponse.py,sha256=qCvTvbWlGsdsaHVv6WYy00kj-wx1gBSDkV_n0O5u32k,1548 -alibabacloud_tea_openapi/sm3.py,sha256=oa76RbUVp2ChDDbGo7Tn-Kq_dWPzBaQ27NMpjaBaUZk,4273 -alibabacloud_tea_openapi/utils.py,sha256=lXoc79GU0gUKdHXw6_js5-MMaiYICvHBDBU_V9ZjOqM,20076 -alibabacloud_tea_openapi/utils_models/__init__.py,sha256=LlDKPU2d1gWW1ks-RyKc0ZHVMx1WBR2ZqMRt-saTi88,347 -alibabacloud_tea_openapi/utils_models/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_tea_openapi/utils_models/__pycache__/_config.cpython-312.pyc,, -alibabacloud_tea_openapi/utils_models/__pycache__/_global_parameters.cpython-312.pyc,, -alibabacloud_tea_openapi/utils_models/__pycache__/_open_api_request.cpython-312.pyc,, -alibabacloud_tea_openapi/utils_models/__pycache__/_params.cpython-312.pyc,, -alibabacloud_tea_openapi/utils_models/_config.py,sha256=TqWHx00pSdVGZGfkd1UQnCd4nAxD9vdLnokRZFv8_r0,10208 -alibabacloud_tea_openapi/utils_models/_global_parameters.py,sha256=LeJrSp_Pt08spb00P6IO3EdALdfM7T4PI1nts0CVypo,1010 -alibabacloud_tea_openapi/utils_models/_open_api_request.py,sha256=nz5l6yF1Fc7LAzpS23N_-PPoGjWMhFthJQXheTq15c0,1984 -alibabacloud_tea_openapi/utils_models/_params.py,sha256=BKLm6V5j9LBbUeBhQ14w1Dn4yNZklab6GyXd8jlAqgY,3056 diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/WHEEL deleted file mode 100644 index 57e3d84..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/top_level.txt deleted file mode 100644 index 7c5bb0e..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi-0.4.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_tea_openapi diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/__init__.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/__init__.py deleted file mode 100644 index df12433..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.4.2" diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 6c347cb..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/client.cpython-312.pyc deleted file mode 100644 index 27becf2..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/sm3.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/sm3.cpython-312.pyc deleted file mode 100644 index a137cb0..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/sm3.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index b533aae..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/client.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/client.py deleted file mode 100644 index 70e9112..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/client.py +++ /dev/null @@ -1,2591 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from typing import Dict, Generator, AsyncGenerator, Any - -from alibabacloud_credentials import models as credential_models -from alibabacloud_credentials.client import Client as CredentialClient -from alibabacloud_gateway_spi import models as spi_models -from alibabacloud_gateway_spi.client import Client as SPIClient -from alibabacloud_tea_openapi import exceptions as main_exceptions -from alibabacloud_tea_openapi import models as main_models -from alibabacloud_tea_openapi import utils_models as open_api_util_models -from alibabacloud_tea_openapi.utils import Utils -from darabonba.core import DaraCore as DaraCore -from darabonba.core import DaraCore -from darabonba.exceptions import DaraException, UnretryableException -from darabonba.policy.retry import RetryOptions, RetryPolicyContext -from darabonba.request import DaraRequest -from darabonba.runtime import RuntimeOptions -from darabonba.utils.bytes import Bytes as DaraBytes -from darabonba.utils.form import Form as DaraForm -from darabonba.utils.stream import Stream as DaraStream -from darabonba.utils.xml import XML as DaraXML - -""" - * @remarks - * This is for OpenApi SDK -""" -class Client: - _endpoint: str = None - _region_id: str = None - _protocol: str = None - _method: str = None - _user_agent: str = None - _endpoint_rule: str = None - _endpoint_map: Dict[str, str] = None - _suffix: str = None - _read_timeout: int = None - _connect_timeout: int = None - _http_proxy: str = None - _https_proxy: str = None - _socks_5proxy: str = None - _socks_5net_work: str = None - _no_proxy: str = None - _network: str = None - _product_id: str = None - _max_idle_conns: int = None - _endpoint_type: str = None - _open_platform_endpoint: str = None - _credential: CredentialClient = None - _signature_version: str = None - _signature_algorithm: str = None - _headers: Dict[str, str] = None - _spi: SPIClient = None - _global_parameters: open_api_util_models.GlobalParameters = None - _key: str = None - _cert: str = None - _ca: str = None - _disable_http_2: bool = None - _retry_options: RetryOptions = None - _tls_min_version: str = None - _attribute_map: spi_models.AttributeMap = None - - def __init__( - self, - config: open_api_util_models.Config, - ): - if DaraCore.is_null(config): - raise main_exceptions.ClientException( - code = 'ParameterMissing', - message = '\'config\' can not be unset' - ) - if (not DaraCore.is_null(config.access_key_id) and config.access_key_id != '') and (not DaraCore.is_null(config.access_key_secret) and config.access_key_secret != ''): - if not DaraCore.is_null(config.security_token) and config.security_token != '': - config.type = 'sts' - else: - config.type = 'access_key' - - credential_config = credential_models.Config( - access_key_id = config.access_key_id, - type = config.type, - access_key_secret = config.access_key_secret - ) - credential_config.security_token = config.security_token - self._credential = CredentialClient(credential_config) - elif not DaraCore.is_null(config.bearer_token) and config.bearer_token != '': - cc = credential_models.Config( - type = 'bearer', - bearer_token = config.bearer_token - ) - self._credential = CredentialClient(cc) - elif not DaraCore.is_null(config.credential): - self._credential = config.credential - self._endpoint = config.endpoint - self._endpoint_type = config.endpoint_type - self._network = config.network - self._suffix = config.suffix - self._protocol = config.protocol - self._method = config.method - self._region_id = config.region_id - self._user_agent = config.user_agent - self._read_timeout = config.read_timeout - self._connect_timeout = config.connect_timeout - self._http_proxy = config.http_proxy - self._https_proxy = config.https_proxy - self._no_proxy = config.no_proxy - self._socks_5proxy = config.socks_5proxy - self._socks_5net_work = config.socks_5net_work - self._max_idle_conns = config.max_idle_conns - self._signature_version = config.signature_version - self._signature_algorithm = config.signature_algorithm - self._global_parameters = config.global_parameters - self._key = config.key - self._cert = config.cert - self._ca = config.ca - self._disable_http_2 = config.disable_http_2 - self._retry_options = config.retry_options - self._tls_min_version = config.tls_min_version - - """ - * @remarks - * Encapsulate the request and invoke the network - * - * @param action - api name - * @param version - product version - * @param protocol - http or https - * @param method - e.g. GET - * @param authType - authorization type e.g. AK - * @param bodyType - response body type e.g. String - * @param request - object of OpenApiRequest - * @param runtime - which controls some details of call api, such as retry times - * @returns the response - """ - def do_rpcrequest( - self, - action: str, - version: str, - protocol: str, - method: str, - auth_type: str, - body_type: str, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or protocol - _request.method = method - _request.pathname = '/' - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.query = DaraCore.merge({ - 'Action': action, - 'Format': 'json', - 'Version': version, - 'Timestamp': Utils.get_timestamp(), - 'SignatureNonce': Utils.get_nonce(), - }, global_queries, extends_queries, request.query) - headers = self.get_rpc_headers() - if DaraCore.is_null(headers): - # endpoint is setted in product client - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers) - else: - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers, headers) - - if not DaraCore.is_null(request.body): - m = request.body - tmp = Utils.query(m) - _request.body = DaraForm.to_form_string(tmp) - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - if auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = self._credential.get_credential() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - credential_type = credential_model.type - if credential_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.query["BearerToken"] = bearer_token - _request.query["SignatureType"] = 'BEARERTOKEN' - elif credential_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.query["SecurityToken"] = security_token - _request.query["SignatureMethod"] = 'HMAC-SHA1' - _request.query["SignatureVersion"] = '1.0' - _request.query["AccessKeyId"] = access_key_id - t = None - if not DaraCore.is_null(request.body): - t = request.body - signed_param = DaraCore.merge({}, _request.query, Utils.query(t)) - _request.query["Signature"] = Utils.get_rpcsignature(signed_param, _request.method, access_key_secret) - - _last_request = _request - _response = DaraCore.do_action(_request, _runtime) - _last_response = _response - if (_response.status_code >= 400) and (_response.status_code < 600): - _res = DaraStream.read_as_json(_response.body) - err = _res - request_id = err.get("RequestId") or err.get("requestId") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif body_type == 'byte': - byt = DaraStream.read_as_bytes(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'string': - _str = DaraStream.read_as_string(_response.body) - return { - 'body': _str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'json': - obj = DaraStream.read_as_json(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'array': - arr = DaraStream.read_as_json(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - return { - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - async def do_rpcrequest_async( - self, - action: str, - version: str, - protocol: str, - method: str, - auth_type: str, - body_type: str, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or protocol - _request.method = method - _request.pathname = '/' - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.query = DaraCore.merge({ - 'Action': action, - 'Format': 'json', - 'Version': version, - 'Timestamp': Utils.get_timestamp(), - 'SignatureNonce': Utils.get_nonce(), - }, global_queries, extends_queries, request.query) - headers = self.get_rpc_headers() - if DaraCore.is_null(headers): - # endpoint is setted in product client - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers) - else: - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers, headers) - - if not DaraCore.is_null(request.body): - m = request.body - tmp = Utils.query(m) - _request.body = DaraForm.to_form_string(tmp) - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - if auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = await self._credential.get_credential_async() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - credential_type = credential_model.type - if credential_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.query["BearerToken"] = bearer_token - _request.query["SignatureType"] = 'BEARERTOKEN' - elif credential_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.query["SecurityToken"] = security_token - _request.query["SignatureMethod"] = 'HMAC-SHA1' - _request.query["SignatureVersion"] = '1.0' - _request.query["AccessKeyId"] = access_key_id - t = None - if not DaraCore.is_null(request.body): - t = request.body - signed_param = DaraCore.merge({}, _request.query, Utils.query(t)) - _request.query["Signature"] = Utils.get_rpcsignature(signed_param, _request.method, access_key_secret) - - _last_request = _request - _response = await DaraCore.async_do_action(_request, _runtime) - _last_response = _response - if (_response.status_code >= 400) and (_response.status_code < 600): - _res = await DaraStream.read_as_json_async(_response.body) - err = _res - request_id = err.get("RequestId") or err.get("requestId") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif body_type == 'byte': - byt = await DaraStream.read_as_bytes_async(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'string': - _str = await DaraStream.read_as_string_async(_response.body) - return { - 'body': _str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'json': - obj = await DaraStream.read_as_json_async(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'array': - arr = await DaraStream.read_as_json_async(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - return { - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - """ - * @remarks - * Encapsulate the request and invoke the network - * - * @param action - api name - * @param version - product version - * @param protocol - http or https - * @param method - e.g. GET - * @param authType - authorization type e.g. AK - * @param pathname - pathname of every api - * @param bodyType - response body type e.g. String - * @param request - object of OpenApiRequest - * @param runtime - which controls some details of call api, such as retry times - * @returns the response - """ - def do_roarequest( - self, - action: str, - version: str, - protocol: str, - method: str, - auth_type: str, - pathname: str, - body_type: str, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or protocol - _request.method = method - _request.pathname = pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.headers = DaraCore.merge({ - 'date': Utils.get_date_utcstring(), - 'host': self._endpoint, - 'accept': 'application/json', - 'x-acs-signature-nonce': Utils.get_nonce(), - 'x-acs-signature-method': 'HMAC-SHA1', - 'x-acs-signature-version': '1.0', - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers) - if not DaraCore.is_null(request.body): - _request.body = DaraCore.to_json_string(request.body) - _request.headers["content-type"] = 'application/json; charset=utf-8' - _request.query = DaraCore.merge({}, global_queries, extends_queries) - if not DaraCore.is_null(request.query): - _request.query = DaraCore.merge({}, _request.query, request.query) - if auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = self._credential.get_credential() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - credential_type = credential_model.type - if credential_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - _request.headers["x-acs-signature-type"] = 'BEARERTOKEN' - elif credential_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - string_to_sign = Utils.get_string_to_sign(_request) - _request.headers["authorization"] = f'acs {access_key_id}:{Utils.get_roasignature(string_to_sign, access_key_secret)}' - - _last_request = _request - _response = DaraCore.do_action(_request, _runtime) - _last_response = _response - if _response.status_code == 204: - return { - 'headers': _response.headers - } - if (_response.status_code >= 400) and (_response.status_code < 600): - _res = DaraStream.read_as_json(_response.body) - err = _res - request_id = err.get("RequestId") or err.get("requestId") - request_id = request_id or err.get("requestid") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif body_type == 'byte': - byt = DaraStream.read_as_bytes(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'string': - _str = DaraStream.read_as_string(_response.body) - return { - 'body': _str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'json': - obj = DaraStream.read_as_json(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'array': - arr = DaraStream.read_as_json(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - return { - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - async def do_roarequest_async( - self, - action: str, - version: str, - protocol: str, - method: str, - auth_type: str, - pathname: str, - body_type: str, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or protocol - _request.method = method - _request.pathname = pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.headers = DaraCore.merge({ - 'date': Utils.get_date_utcstring(), - 'host': self._endpoint, - 'accept': 'application/json', - 'x-acs-signature-nonce': Utils.get_nonce(), - 'x-acs-signature-method': 'HMAC-SHA1', - 'x-acs-signature-version': '1.0', - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers) - if not DaraCore.is_null(request.body): - _request.body = DaraCore.to_json_string(request.body) - _request.headers["content-type"] = 'application/json; charset=utf-8' - _request.query = DaraCore.merge({}, global_queries, extends_queries) - if not DaraCore.is_null(request.query): - _request.query = DaraCore.merge({}, _request.query, request.query) - if auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = await self._credential.get_credential_async() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - credential_type = credential_model.type - if credential_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - _request.headers["x-acs-signature-type"] = 'BEARERTOKEN' - elif credential_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - string_to_sign = Utils.get_string_to_sign(_request) - _request.headers["authorization"] = f'acs {access_key_id}:{Utils.get_roasignature(string_to_sign, access_key_secret)}' - - _last_request = _request - _response = await DaraCore.async_do_action(_request, _runtime) - _last_response = _response - if _response.status_code == 204: - return { - 'headers': _response.headers - } - if (_response.status_code >= 400) and (_response.status_code < 600): - _res = await DaraStream.read_as_json_async(_response.body) - err = _res - request_id = err.get("RequestId") or err.get("requestId") - request_id = request_id or err.get("requestid") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif body_type == 'byte': - byt = await DaraStream.read_as_bytes_async(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'string': - _str = await DaraStream.read_as_string_async(_response.body) - return { - 'body': _str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'json': - obj = await DaraStream.read_as_json_async(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'array': - arr = await DaraStream.read_as_json_async(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - return { - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - """ - * @remarks - * Encapsulate the request and invoke the network with form body - * - * @param action - api name - * @param version - product version - * @param protocol - http or https - * @param method - e.g. GET - * @param authType - authorization type e.g. AK - * @param pathname - pathname of every api - * @param bodyType - response body type e.g. String - * @param request - object of OpenApiRequest - * @param runtime - which controls some details of call api, such as retry times - * @returns the response - """ - def do_roarequest_with_form( - self, - action: str, - version: str, - protocol: str, - method: str, - auth_type: str, - pathname: str, - body_type: str, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or protocol - _request.method = method - _request.pathname = pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.headers = DaraCore.merge({ - 'date': Utils.get_date_utcstring(), - 'host': self._endpoint, - 'accept': 'application/json', - 'x-acs-signature-nonce': Utils.get_nonce(), - 'x-acs-signature-method': 'HMAC-SHA1', - 'x-acs-signature-version': '1.0', - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers) - if not DaraCore.is_null(request.body): - m = request.body - _request.body = Utils.to_form(m) - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - _request.query = DaraCore.merge({}, global_queries, extends_queries) - if not DaraCore.is_null(request.query): - _request.query = DaraCore.merge({}, _request.query, request.query) - if auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = self._credential.get_credential() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - credential_type = credential_model.type - if credential_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - _request.headers["x-acs-signature-type"] = 'BEARERTOKEN' - elif credential_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - string_to_sign = Utils.get_string_to_sign(_request) - _request.headers["authorization"] = f'acs {access_key_id}:{Utils.get_roasignature(string_to_sign, access_key_secret)}' - - _last_request = _request - _response = DaraCore.do_action(_request, _runtime) - _last_response = _response - if _response.status_code == 204: - return { - 'headers': _response.headers - } - if (_response.status_code >= 400) and (_response.status_code < 600): - _res = DaraStream.read_as_json(_response.body) - err = _res - request_id = err.get("RequestId") or err.get("requestId") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif body_type == 'byte': - byt = DaraStream.read_as_bytes(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'string': - _str = DaraStream.read_as_string(_response.body) - return { - 'body': _str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'json': - obj = DaraStream.read_as_json(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'array': - arr = DaraStream.read_as_json(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - return { - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - async def do_roarequest_with_form_async( - self, - action: str, - version: str, - protocol: str, - method: str, - auth_type: str, - pathname: str, - body_type: str, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or protocol - _request.method = method - _request.pathname = pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.headers = DaraCore.merge({ - 'date': Utils.get_date_utcstring(), - 'host': self._endpoint, - 'accept': 'application/json', - 'x-acs-signature-nonce': Utils.get_nonce(), - 'x-acs-signature-method': 'HMAC-SHA1', - 'x-acs-signature-version': '1.0', - 'x-acs-version': version, - 'x-acs-action': action, - 'user-agent': Utils.get_user_agent(self._user_agent), - }, global_headers, extends_headers, request.headers) - if not DaraCore.is_null(request.body): - m = request.body - _request.body = Utils.to_form(m) - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - _request.query = DaraCore.merge({}, global_queries, extends_queries) - if not DaraCore.is_null(request.query): - _request.query = DaraCore.merge({}, _request.query, request.query) - if auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = await self._credential.get_credential_async() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - credential_type = credential_model.type - if credential_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - _request.headers["x-acs-signature-type"] = 'BEARERTOKEN' - elif credential_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - string_to_sign = Utils.get_string_to_sign(_request) - _request.headers["authorization"] = f'acs {access_key_id}:{Utils.get_roasignature(string_to_sign, access_key_secret)}' - - _last_request = _request - _response = await DaraCore.async_do_action(_request, _runtime) - _last_response = _response - if _response.status_code == 204: - return { - 'headers': _response.headers - } - if (_response.status_code >= 400) and (_response.status_code < 600): - _res = await DaraStream.read_as_json_async(_response.body) - err = _res - request_id = err.get("RequestId") or err.get("requestId") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif body_type == 'byte': - byt = await DaraStream.read_as_bytes_async(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'string': - _str = await DaraStream.read_as_string_async(_response.body) - return { - 'body': _str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'json': - obj = await DaraStream.read_as_json_async(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif body_type == 'array': - arr = await DaraStream.read_as_json_async(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - return { - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - """ - * @remarks - * Encapsulate the request and invoke the network - * - * @param action - api name - * @param version - product version - * @param protocol - http or https - * @param method - e.g. GET - * @param authType - authorization type e.g. AK - * @param bodyType - response body type e.g. String - * @param request - object of OpenApiRequest - * @param runtime - which controls some details of call api, such as retry times - * @returns the response - """ - def do_request( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or params.protocol - _request.method = params.method - _request.pathname = params.pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.query = DaraCore.merge({}, global_queries, extends_queries, request.query) - # endpoint is setted in product client - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': params.version, - 'x-acs-action': params.action, - 'user-agent': Utils.get_user_agent(self._user_agent), - 'x-acs-date': Utils.get_timestamp(), - 'x-acs-signature-nonce': Utils.get_nonce(), - 'accept': 'application/json', - }, global_headers, extends_headers, request.headers) - if params.style == 'RPC': - headers = self.get_rpc_headers() - if not DaraCore.is_null(headers): - _request.headers = DaraCore.merge({}, _request.headers, headers) - signature_algorithm = self._signature_algorithm or 'ACS3-HMAC-SHA256' - hashed_request_payload = Utils.hash(DaraBytes.from_('', 'utf-8'), signature_algorithm) - if not DaraCore.is_null(request.stream): - tmp = DaraStream.read_as_bytes(request.stream) - hashed_request_payload = Utils.hash(tmp, signature_algorithm) - _request.body = tmp - _request.headers["content-type"] = 'application/octet-stream' - else: - if not DaraCore.is_null(request.body): - if params.req_body_type == 'byte': - byte_obj = bytes(request.body) - hashed_request_payload = Utils.hash(byte_obj, signature_algorithm) - _request.body = byte_obj - elif params.req_body_type == 'json': - json_obj = DaraCore.to_json_string(request.body) - hashed_request_payload = Utils.hash(json_obj.encode('utf-8'), signature_algorithm) - _request.body = json_obj - _request.headers["content-type"] = 'application/json; charset=utf-8' - else: - m = request.body - form_obj = Utils.to_form(m) - hashed_request_payload = Utils.hash(form_obj.encode('utf-8'), signature_algorithm) - _request.body = form_obj - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - - - _request.headers["x-acs-content-sha256"] = hashed_request_payload.hex() - if params.auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = self._credential.get_credential() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - auth_type = credential_model.type - if auth_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - if params.style == 'RPC': - _request.query["SignatureType"] = 'BEARERTOKEN' - else: - _request.headers["x-acs-signature-type"] = 'BEARERTOKEN' - - elif auth_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - _request.headers["Authorization"] = Utils.get_authorization(_request, signature_algorithm, hashed_request_payload.hex(), access_key_id, access_key_secret) - - _last_request = _request - _response = DaraCore.do_action(_request, _runtime) - _last_response = _response - if (_response.status_code >= 400) and (_response.status_code < 600): - err = {} - if not DaraCore.is_null(_response.headers.get("content-type")) and _response.headers.get("content-type") == 'text/xml;charset=utf-8': - _str = DaraStream.read_as_string(_response.body) - resp_map = DaraXML.parse_xml(_str, None) - err = resp_map.get("Error") - else: - _res = DaraStream.read_as_json(_response.body) - err = _res - - request_id = err.get("RequestId") or err.get("requestId") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if params.body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif params.body_type == 'byte': - byt = DaraStream.read_as_bytes(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif params.body_type == 'string': - resp_str = DaraStream.read_as_string(_response.body) - return { - 'body': resp_str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif params.body_type == 'json': - obj = DaraStream.read_as_json(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif params.body_type == 'array': - arr = DaraStream.read_as_json(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - anything = DaraStream.read_as_string(_response.body) - return { - 'body': anything, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - async def do_request_async( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or params.protocol - _request.method = params.method - _request.pathname = params.pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.query = DaraCore.merge({}, global_queries, extends_queries, request.query) - # endpoint is setted in product client - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': params.version, - 'x-acs-action': params.action, - 'user-agent': Utils.get_user_agent(self._user_agent), - 'x-acs-date': Utils.get_timestamp(), - 'x-acs-signature-nonce': Utils.get_nonce(), - 'accept': 'application/json', - }, global_headers, extends_headers, request.headers) - if params.style == 'RPC': - headers = self.get_rpc_headers() - if not DaraCore.is_null(headers): - _request.headers = DaraCore.merge({}, _request.headers, headers) - signature_algorithm = self._signature_algorithm or 'ACS3-HMAC-SHA256' - hashed_request_payload = Utils.hash(DaraBytes.from_('', 'utf-8'), signature_algorithm) - if not DaraCore.is_null(request.stream): - tmp = await DaraStream.read_as_bytes_async(request.stream) - hashed_request_payload = Utils.hash(tmp, signature_algorithm) - _request.body = tmp - _request.headers["content-type"] = 'application/octet-stream' - else: - if not DaraCore.is_null(request.body): - if params.req_body_type == 'byte': - byte_obj = bytes(request.body) - hashed_request_payload = Utils.hash(byte_obj, signature_algorithm) - _request.body = byte_obj - elif params.req_body_type == 'json': - json_obj = DaraCore.to_json_string(request.body) - hashed_request_payload = Utils.hash(json_obj.encode('utf-8'), signature_algorithm) - _request.body = json_obj - _request.headers["content-type"] = 'application/json; charset=utf-8' - else: - m = request.body - form_obj = Utils.to_form(m) - hashed_request_payload = Utils.hash(form_obj.encode('utf-8'), signature_algorithm) - _request.body = form_obj - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - - - _request.headers["x-acs-content-sha256"] = hashed_request_payload.hex() - if params.auth_type != 'Anonymous': - if DaraCore.is_null(self._credential): - raise main_exceptions.ClientException( - code = f'InvalidCredentials', - message = f'Please set up the credentials correctly. If you are setting them through environment variables, please ensure that ALIBABA_CLOUD_ACCESS_KEY_ID and ALIBABA_CLOUD_ACCESS_KEY_SECRET are set correctly. See https://help.aliyun.com/zh/sdk/developer-reference/configure-the-alibaba-cloud-accesskey-environment-variable-on-linux-macos-and-windows-systems for more details.' - ) - credential_model = await self._credential.get_credential_async() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - auth_type = credential_model.type - if auth_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - if params.style == 'RPC': - _request.query["SignatureType"] = 'BEARERTOKEN' - else: - _request.headers["x-acs-signature-type"] = 'BEARERTOKEN' - - elif auth_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - _request.headers["Authorization"] = Utils.get_authorization(_request, signature_algorithm, hashed_request_payload.hex(), access_key_id, access_key_secret) - - _last_request = _request - _response = await DaraCore.async_do_action(_request, _runtime) - _last_response = _response - if (_response.status_code >= 400) and (_response.status_code < 600): - err = {} - if not DaraCore.is_null(_response.headers.get("content-type")) and _response.headers.get("content-type") == 'text/xml;charset=utf-8': - _str = await DaraStream.read_as_string_async(_response.body) - resp_map = DaraXML.parse_xml(_str, None) - err = resp_map.get("Error") - else: - _res = await DaraStream.read_as_json_async(_response.body) - err = _res - - request_id = err.get("RequestId") or err.get("requestId") - code = err.get("Code") or err.get("code") - if (f'{code}' == 'Throttling') or (f'{code}' == 'Throttling.User') or (f'{code}' == 'Throttling.Api'): - raise main_exceptions.ThrottlingException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - retry_after = Utils.get_throttling_time_left(_response.headers), - data = err, - request_id = f'{request_id}' - ) - elif (_response.status_code >= 400) and (_response.status_code < 500): - raise main_exceptions.ClientException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - access_denied_detail = self.get_access_denied_detail(err), - request_id = f'{request_id}' - ) - else: - raise main_exceptions.ServerException( - status_code = _response.status_code, - code = f'{code}', - message = f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {request_id}', - description = f'{err.get("Description") or err.get("description")}', - data = err, - request_id = f'{request_id}' - ) - - if params.body_type == 'binary': - resp = { - 'body': _response.body, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - return resp - elif params.body_type == 'byte': - byt = await DaraStream.read_as_bytes_async(_response.body) - return { - 'body': byt, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif params.body_type == 'string': - resp_str = await DaraStream.read_as_string_async(_response.body) - return { - 'body': resp_str, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif params.body_type == 'json': - obj = await DaraStream.read_as_json_async(_response.body) - res = obj - return { - 'body': res, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - elif params.body_type == 'array': - arr = await DaraStream.read_as_json_async(_response.body) - return { - 'body': arr, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - else: - anything = await DaraStream.read_as_string_async(_response.body) - return { - 'body': anything, - 'headers': _response.headers, - 'statusCode': _response.status_code - } - - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - """ - * @remarks - * Encapsulate the request and invoke the network - * - * @param action - api name - * @param version - product version - * @param protocol - http or https - * @param method - e.g. GET - * @param authType - authorization type e.g. AK - * @param bodyType - response body type e.g. String - * @param request - object of OpenApiRequest - * @param runtime - which controls some details of call api, such as retry times - * @returns the response - """ - def execute( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - 'disableHttp2': bool(self._disable_http_2 or False), - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - # spi = new Gateway();//Gateway implements SPI,这一步在产品 SDK 中实例化 - headers = self.get_rpc_headers() - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - request_context = spi_models.InterceptorContextRequest( - headers = DaraCore.merge({}, global_headers, extends_headers, request.headers, headers), - query = DaraCore.merge({}, global_queries, extends_queries, request.query), - body = request.body, - stream = request.stream, - host_map = request.host_map, - pathname = params.pathname, - product_id = self._product_id, - action = params.action, - version = params.version, - protocol = self._protocol or params.protocol, - method = self._method or params.method, - auth_type = params.auth_type, - body_type = params.body_type, - req_body_type = params.req_body_type, - style = params.style, - credential = self._credential, - signature_version = self._signature_version, - signature_algorithm = self._signature_algorithm, - user_agent = Utils.get_user_agent(self._user_agent) - ) - configuration_context = spi_models.InterceptorContextConfiguration( - region_id = self._region_id, - endpoint = request.endpoint_override or self._endpoint, - endpoint_rule = self._endpoint_rule, - endpoint_map = self._endpoint_map, - endpoint_type = self._endpoint_type, - network = self._network, - suffix = self._suffix - ) - interceptor_context = spi_models.InterceptorContext( - request = request_context, - configuration = configuration_context - ) - attribute_map = spi_models.AttributeMap() - if not DaraCore.is_null(self._attribute_map): - attribute_map = self._attribute_map - # 1. spi.modifyConfiguration(context: SPI.InterceptorContext, attributeMap: SPI.AttributeMap); - self._spi.modify_configuration(interceptor_context, attribute_map) - # 2. spi.modifyRequest(context: SPI.InterceptorContext, attributeMap: SPI.AttributeMap); - self._spi.modify_request(interceptor_context, attribute_map) - _request.protocol = interceptor_context.request.protocol - _request.method = interceptor_context.request.method - _request.pathname = interceptor_context.request.pathname - _request.query = interceptor_context.request.query - _request.body = interceptor_context.request.stream - _request.headers = interceptor_context.request.headers - _last_request = _request - _response = DaraCore.do_action(_request, _runtime) - _last_response = _response - response_context = spi_models.InterceptorContextResponse( - status_code = _response.status_code, - headers = _response.headers, - body = _response.body - ) - interceptor_context.response = response_context - # 3. spi.modifyResponse(context: SPI.InterceptorContext, attributeMap: SPI.AttributeMap); - self._spi.modify_response(interceptor_context, attribute_map) - return { - 'headers': interceptor_context.response.headers, - 'statusCode': interceptor_context.response.status_code, - 'body': interceptor_context.response.deserialized_body - } - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - async def execute_async( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - 'disableHttp2': bool(self._disable_http_2 or False), - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - # spi = new Gateway();//Gateway implements SPI,这一步在产品 SDK 中实例化 - headers = self.get_rpc_headers() - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - request_context = spi_models.InterceptorContextRequest( - headers = DaraCore.merge({}, global_headers, extends_headers, request.headers, headers), - query = DaraCore.merge({}, global_queries, extends_queries, request.query), - body = request.body, - stream = request.stream, - host_map = request.host_map, - pathname = params.pathname, - product_id = self._product_id, - action = params.action, - version = params.version, - protocol = self._protocol or params.protocol, - method = self._method or params.method, - auth_type = params.auth_type, - body_type = params.body_type, - req_body_type = params.req_body_type, - style = params.style, - credential = self._credential, - signature_version = self._signature_version, - signature_algorithm = self._signature_algorithm, - user_agent = Utils.get_user_agent(self._user_agent) - ) - configuration_context = spi_models.InterceptorContextConfiguration( - region_id = self._region_id, - endpoint = request.endpoint_override or self._endpoint, - endpoint_rule = self._endpoint_rule, - endpoint_map = self._endpoint_map, - endpoint_type = self._endpoint_type, - network = self._network, - suffix = self._suffix - ) - interceptor_context = spi_models.InterceptorContext( - request = request_context, - configuration = configuration_context - ) - attribute_map = spi_models.AttributeMap() - if not DaraCore.is_null(self._attribute_map): - attribute_map = self._attribute_map - # 1. spi.modifyConfiguration(context: SPI.InterceptorContext, attributeMap: SPI.AttributeMap); - await self._spi.modify_configuration_async(interceptor_context, attribute_map) - # 2. spi.modifyRequest(context: SPI.InterceptorContext, attributeMap: SPI.AttributeMap); - await self._spi.modify_request_async(interceptor_context, attribute_map) - _request.protocol = interceptor_context.request.protocol - _request.method = interceptor_context.request.method - _request.pathname = interceptor_context.request.pathname - _request.query = interceptor_context.request.query - _request.body = interceptor_context.request.stream - _request.headers = interceptor_context.request.headers - _last_request = _request - _response = await DaraCore.async_do_action(_request, _runtime) - _last_response = _response - response_context = spi_models.InterceptorContextResponse( - status_code = _response.status_code, - headers = _response.headers, - body = _response.body - ) - interceptor_context.response = response_context - # 3. spi.modifyResponse(context: SPI.InterceptorContext, attributeMap: SPI.AttributeMap); - await self._spi.modify_response_async(interceptor_context, attribute_map) - return { - 'headers': interceptor_context.response.headers, - 'statusCode': interceptor_context.response.status_code, - 'body': interceptor_context.response.deserialized_body - } - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - def call_sseapi( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> Generator[main_models.SSEResponse, None, None]: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or params.protocol - _request.method = params.method - _request.pathname = params.pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.query = DaraCore.merge({}, global_queries, extends_queries, request.query) - # endpoint is setted in product client - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': params.version, - 'x-acs-action': params.action, - 'user-agent': Utils.get_user_agent(self._user_agent), - 'x-acs-date': Utils.get_timestamp(), - 'x-acs-signature-nonce': Utils.get_nonce(), - 'accept': 'application/json', - }, extends_headers, global_headers, request.headers) - if params.style == 'RPC': - headers = self.get_rpc_headers() - if not DaraCore.is_null(headers): - _request.headers = DaraCore.merge({}, _request.headers, headers) - signature_algorithm = self._signature_algorithm or 'ACS3-HMAC-SHA256' - hashed_request_payload = Utils.hash(DaraBytes.from_('', 'utf-8'), signature_algorithm) - if not DaraCore.is_null(request.stream): - tmp = DaraStream.read_as_bytes(request.stream) - hashed_request_payload = Utils.hash(tmp, signature_algorithm) - _request.body = tmp - _request.headers["content-type"] = 'application/octet-stream' - else: - if not DaraCore.is_null(request.body): - if params.req_body_type == 'byte': - byte_obj = bytes(request.body) - hashed_request_payload = Utils.hash(byte_obj, signature_algorithm) - _request.body = byte_obj - elif params.req_body_type == 'json': - json_obj = DaraCore.to_json_string(request.body) - hashed_request_payload = Utils.hash(json_obj.encode('utf-8'), signature_algorithm) - _request.body = json_obj - _request.headers["content-type"] = 'application/json; charset=utf-8' - else: - m = request.body - form_obj = Utils.to_form(m) - hashed_request_payload = Utils.hash(form_obj.encode('utf-8'), signature_algorithm) - _request.body = form_obj - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - - - _request.headers["x-acs-content-sha256"] = hashed_request_payload.hex() - if params.auth_type != 'Anonymous': - credential_model = self._credential.get_credential() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - auth_type = credential_model.type - if auth_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - elif auth_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - _request.headers["Authorization"] = Utils.get_authorization(_request, signature_algorithm, hashed_request_payload.hex(), access_key_id, access_key_secret) - - _last_request = _request - _response = DaraCore.do_sse_action(_request, _runtime) - _last_response = _response - if (_response.status_code >= 400) and (_response.status_code < 600): - err = {} - if not DaraCore.is_null(_response.headers.get("content-type")) and _response.headers.get("content-type") == 'text/xml;charset=utf-8': - _str = DaraStream.read_as_string(_response.body) - resp_map = DaraXML.parse_xml(_str, None) - err = resp_map.get("Error") - else: - _res = DaraStream.read_as_json(_response.body) - err = _res - - err["statusCode"] = _response.status_code - raise DaraException({ - 'code': f'{err.get("Code") or err.get("code")}', - 'message': f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {err.get("RequestId") or err.get("requestId")}', - 'data': err, - 'description': f'{err.get("Description") or err.get("description")}', - 'accessDeniedDetail': err.get("AccessDeniedDetail") or err.get("accessDeniedDetail") - }) - events = DaraStream.read_as_sse(_response.body) - for event in events: - yield main_models.SSEResponse( - status_code = _response.status_code, - headers = _response.headers, - event = event - ) - return - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - async def call_sseapi_async( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> AsyncGenerator[main_models.SSEResponse, None, None]: - _runtime = { - 'key': runtime.key or self._key, - 'cert': runtime.cert or self._cert, - 'ca': runtime.ca or self._ca, - 'readTimeout': DaraCore.to_number(runtime.read_timeout or self._read_timeout), - 'connectTimeout': DaraCore.to_number(runtime.connect_timeout or self._connect_timeout), - 'httpProxy': runtime.http_proxy or self._http_proxy, - 'httpsProxy': runtime.https_proxy or self._https_proxy, - 'noProxy': runtime.no_proxy or self._no_proxy, - 'socks5Proxy': runtime.socks_5proxy or self._socks_5proxy, - 'socks5NetWork': runtime.socks_5net_work or self._socks_5net_work, - 'maxIdleConns': DaraCore.to_number(runtime.max_idle_conns or self._max_idle_conns), - 'retryOptions': self._retry_options, - 'ignoreSSL': runtime.ignore_ssl, - 'tlsMinVersion': self._tls_min_version, - } - _last_request = None - _last_response = None - _retries_attempted = 0 - _context = RetryPolicyContext( - retries_attempted= _retries_attempted - ) - while DaraCore.should_retry(_runtime.get('retryOptions'), _context): - if _retries_attempted > 0: - _backoff_time = DaraCore.get_backoff_time(_runtime.get('retryOptions'), _context) - if _backoff_time > 0: - DaraCore.sleep(_backoff_time) - _retries_attempted = _retries_attempted + 1 - try: - _request = DaraRequest() - _request.protocol = self._protocol or params.protocol - _request.method = params.method - _request.pathname = params.pathname - global_queries = {} - global_headers = {} - if not DaraCore.is_null(self._global_parameters): - global_params = self._global_parameters - if not DaraCore.is_null(global_params.queries): - global_queries = global_params.queries - if not DaraCore.is_null(global_params.headers): - global_headers = global_params.headers - extends_headers = {} - extends_queries = {} - if not DaraCore.is_null(runtime.extends_parameters): - extends_parameters = runtime.extends_parameters - if not DaraCore.is_null(extends_parameters.headers): - extends_headers = extends_parameters.headers - if not DaraCore.is_null(extends_parameters.queries): - extends_queries = extends_parameters.queries - _request.query = DaraCore.merge({}, global_queries, extends_queries, request.query) - # endpoint is setted in product client - _request.headers = DaraCore.merge({ - 'host': self._endpoint, - 'x-acs-version': params.version, - 'x-acs-action': params.action, - 'user-agent': Utils.get_user_agent(self._user_agent), - 'x-acs-date': Utils.get_timestamp(), - 'x-acs-signature-nonce': Utils.get_nonce(), - 'accept': 'application/json', - }, extends_headers, global_headers, request.headers) - if params.style == 'RPC': - headers = self.get_rpc_headers() - if not DaraCore.is_null(headers): - _request.headers = DaraCore.merge({}, _request.headers, headers) - signature_algorithm = self._signature_algorithm or 'ACS3-HMAC-SHA256' - hashed_request_payload = Utils.hash(DaraBytes.from_('', 'utf-8'), signature_algorithm) - if not DaraCore.is_null(request.stream): - tmp = await DaraStream.read_as_bytes_async(request.stream) - hashed_request_payload = Utils.hash(tmp, signature_algorithm) - _request.body = tmp - _request.headers["content-type"] = 'application/octet-stream' - else: - if not DaraCore.is_null(request.body): - if params.req_body_type == 'byte': - byte_obj = bytes(request.body) - hashed_request_payload = Utils.hash(byte_obj, signature_algorithm) - _request.body = byte_obj - elif params.req_body_type == 'json': - json_obj = DaraCore.to_json_string(request.body) - hashed_request_payload = Utils.hash(json_obj.encode('utf-8'), signature_algorithm) - _request.body = json_obj - _request.headers["content-type"] = 'application/json; charset=utf-8' - else: - m = request.body - form_obj = Utils.to_form(m) - hashed_request_payload = Utils.hash(form_obj.encode('utf-8'), signature_algorithm) - _request.body = form_obj - _request.headers["content-type"] = 'application/x-www-form-urlencoded' - - - _request.headers["x-acs-content-sha256"] = hashed_request_payload.hex() - if params.auth_type != 'Anonymous': - credential_model = await self._credential.get_credential_async() - if not DaraCore.is_null(credential_model.provider_name): - _request.headers["x-acs-credentials-provider"] = credential_model.provider_name - auth_type = credential_model.type - if auth_type == 'bearer': - bearer_token = credential_model.bearer_token - _request.headers["x-acs-bearer-token"] = bearer_token - elif auth_type == 'id_token': - id_token = credential_model.security_token - _request.headers["x-acs-zero-trust-idtoken"] = id_token - else: - access_key_id = credential_model.access_key_id - access_key_secret = credential_model.access_key_secret - security_token = credential_model.security_token - if not DaraCore.is_null(security_token) and security_token != '': - _request.headers["x-acs-accesskey-id"] = access_key_id - _request.headers["x-acs-security-token"] = security_token - _request.headers["Authorization"] = Utils.get_authorization(_request, signature_algorithm, hashed_request_payload.hex(), access_key_id, access_key_secret) - - _last_request = _request - _response = await DaraCore.async_do_sse_action(_request, _runtime) - _last_response = _response - if (_response.status_code >= 400) and (_response.status_code < 600): - err = {} - if not DaraCore.is_null(_response.headers.get("content-type")) and _response.headers.get("content-type") == 'text/xml;charset=utf-8': - _str = await DaraStream.read_as_string_async(_response.body) - resp_map = DaraXML.parse_xml(_str, None) - err = resp_map.get("Error") - else: - _res = await DaraStream.read_as_json_async(_response.body) - err = _res - - err["statusCode"] = _response.status_code - raise DaraException({ - 'code': f'{err.get("Code") or err.get("code")}', - 'message': f'code: {_response.status_code}, {err.get("Message") or err.get("message")} request id: {err.get("RequestId") or err.get("requestId")}', - 'data': err, - 'description': f'{err.get("Description") or err.get("description")}', - 'accessDeniedDetail': err.get("AccessDeniedDetail") or err.get("accessDeniedDetail") - }) - events = DaraStream.read_as_sse_async(_response.body) - async for event in events: - yield main_models.SSEResponse( - status_code = _response.status_code, - headers = _response.headers, - event = event - ) - return - except Exception as e: - _context = RetryPolicyContext( - retries_attempted= _retries_attempted, - http_request = _last_request, - http_response = _last_response, - exception = e - ) - continue - raise UnretryableException(_context) - - def call_api( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - if DaraCore.is_null(params): - raise main_exceptions.ClientException( - code = 'ParameterMissing', - message = '\'params\' can not be unset' - ) - if DaraCore.is_null(self._signature_version) or self._signature_version != 'v4': - if DaraCore.is_null(self._signature_algorithm) or self._signature_algorithm != 'v2': - return self.do_request(params, request, runtime) - elif (params.style == 'ROA') and (params.req_body_type == 'json'): - return self.do_roarequest(params.action, params.version, params.protocol, params.method, params.auth_type, params.pathname, params.body_type, request, runtime) - elif params.style == 'ROA': - return self.do_roarequest_with_form(params.action, params.version, params.protocol, params.method, params.auth_type, params.pathname, params.body_type, request, runtime) - else: - return self.do_rpcrequest(params.action, params.version, params.protocol, params.method, params.auth_type, params.body_type, request, runtime) - - else: - return self.execute(params, request, runtime) - - - async def call_api_async( - self, - params: open_api_util_models.Params, - request: open_api_util_models.OpenApiRequest, - runtime: RuntimeOptions, - ) -> dict: - if DaraCore.is_null(params): - raise main_exceptions.ClientException( - code = 'ParameterMissing', - message = '\'params\' can not be unset' - ) - if DaraCore.is_null(self._signature_version) or self._signature_version != 'v4': - if DaraCore.is_null(self._signature_algorithm) or self._signature_algorithm != 'v2': - return await self.do_request_async(params, request, runtime) - elif (params.style == 'ROA') and (params.req_body_type == 'json'): - return await self.do_roarequest_async(params.action, params.version, params.protocol, params.method, params.auth_type, params.pathname, params.body_type, request, runtime) - elif params.style == 'ROA': - return await self.do_roarequest_with_form_async(params.action, params.version, params.protocol, params.method, params.auth_type, params.pathname, params.body_type, request, runtime) - else: - return await self.do_rpcrequest_async(params.action, params.version, params.protocol, params.method, params.auth_type, params.body_type, request, runtime) - - else: - return await self.execute_async(params, request, runtime) - - - def get_access_key_id(self) -> str: - if DaraCore.is_null(self._credential): - return '' - access_key_id = self._credential.get_access_key_id() - return access_key_id - - async def get_access_key_id_async(self) -> str: - if DaraCore.is_null(self._credential): - return '' - access_key_id = await self._credential.get_access_key_id_async() - return access_key_id - - def get_access_key_secret(self) -> str: - if DaraCore.is_null(self._credential): - return '' - secret = self._credential.get_access_key_secret() - return secret - - async def get_access_key_secret_async(self) -> str: - if DaraCore.is_null(self._credential): - return '' - secret = await self._credential.get_access_key_secret_async() - return secret - - def get_security_token(self) -> str: - if DaraCore.is_null(self._credential): - return '' - token = self._credential.get_security_token() - return token - - async def get_security_token_async(self) -> str: - if DaraCore.is_null(self._credential): - return '' - token = await self._credential.get_security_token_async() - return token - - def get_bearer_token(self) -> str: - if DaraCore.is_null(self._credential): - return '' - token = self._credential.get_bearer_token() - return token - - async def get_bearer_token_async(self) -> str: - if DaraCore.is_null(self._credential): - return '' - token = self._credential.get_bearer_token() - return token - - def get_type(self) -> str: - if DaraCore.is_null(self._credential): - return '' - auth_type = self._credential.get_type() - return auth_type - - async def get_type_async(self) -> str: - if DaraCore.is_null(self._credential): - return '' - auth_type = self._credential.get_type() - return auth_type - - def check_config( - self, - config: open_api_util_models.Config, - ) -> None: - if DaraCore.is_null(self._endpoint_rule) and DaraCore.is_null(config.endpoint): - raise main_exceptions.ClientException( - code = 'ParameterMissing', - message = '\'config.endpoint\' can not be empty' - ) - - def set_gateway_client( - self, - spi: SPIClient, - ) -> None: - self._spi = spi - - def set_rpc_headers( - self, - headers: Dict[str, str], - ) -> None: - self._headers = headers - - def get_rpc_headers(self) -> Dict[str, str]: - headers = self._headers - self._headers = None - return headers - - def default_any( - input_value: Any, - default_value: Any, - ) -> Any: - if DaraCore.is_null(input_value): - return default_value - return input_value - - def get_access_denied_detail( - self, - err: Dict[str, Any], - ) -> Dict[str, Any]: - access_denied_detail = None - if not DaraCore.is_null(err.get("AccessDeniedDetail")): - detail_1 = err.get("AccessDeniedDetail") - access_denied_detail = detail_1 - elif not DaraCore.is_null(err.get("accessDeniedDetail")): - detail_2 = err.get("accessDeniedDetail") - access_denied_detail = detail_2 - return access_denied_detail diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__init__.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__init__.py deleted file mode 100644 index 91f50c0..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from ._alibaba_cloud import AlibabaCloudException -from ._client import ClientException -from ._server import ServerException -from ._throttling import ThrottlingException - -__all__ = [ - AlibabaCloudException, - ClientException, - ServerException, - ThrottlingException -] diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c9c2092..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_alibaba_cloud.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_alibaba_cloud.cpython-312.pyc deleted file mode 100644 index 0dd45fe..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_alibaba_cloud.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_client.cpython-312.pyc deleted file mode 100644 index 36240db..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_server.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_server.cpython-312.pyc deleted file mode 100644 index 401768b..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_server.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_throttling.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_throttling.cpython-312.pyc deleted file mode 100644 index cf44ab2..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/__pycache__/_throttling.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_alibaba_cloud.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_alibaba_cloud.py deleted file mode 100644 index e4eae82..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_alibaba_cloud.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from typing import Dict, Any - -from darabonba.exceptions import ResponseException - -class AlibabaCloudException(ResponseException): - def __init__( - self, *, - retry_after: int = None, - data: Dict[str, Any] = None, - access_denied_detail: Dict[str, Any] = None, - stack: str = None, - status_code: int = None, - code: str = None, - message: str = None, - description: str = None, - request_id: str = None, - ): - super().__init__( - status_code = status_code, - retry_after = retry_after, - description = description, - data = data, - access_denied_detail = access_denied_detail, - message = message, - code = code, - stack = stack, - ) - self.name = 'AlibabaCloudException' - self.status_code = status_code - self.code = code - self.message = message - self.description = description - self.request_id = request_id - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_client.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_client.py deleted file mode 100644 index ef72987..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_client.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from typing import Dict, Any - -from alibabacloud_tea_openapi import exceptions as main_exceptions - -class ClientException(main_exceptions.AlibabaCloudException): - def __init__( - self, *, - status_code: int = None, - code: str = None, - message: str = None, - description: str = None, - request_id: str = None, - retry_after: int = None, - data: Dict[str, Any] = None, - stack: str = None, - access_denied_detail: Dict[str, Any] = None, - ): - super().__init__( - status_code = status_code, - code = code, - message = message, - description = description, - request_id = request_id, - retry_after = retry_after, - data = data, - access_denied_detail = access_denied_detail, - stack = stack, - ) - self.name = 'ClientException' - self.access_denied_detail = access_denied_detail - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_server.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_server.py deleted file mode 100644 index b5ab80a..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_server.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from typing import Dict, Any - -from alibabacloud_tea_openapi import exceptions as main_exceptions - -class ServerException(main_exceptions.AlibabaCloudException): - def __init__( - self, *, - status_code: int = None, - code: str = None, - message: str = None, - description: str = None, - request_id: str = None, - retry_after: int = None, - data: Dict[str, Any] = None, - access_denied_detail: Dict[str, Any] = None, - stack: str = None, - ): - super().__init__( - status_code = status_code, - code = code, - message = message, - description = description, - request_id = request_id, - retry_after = retry_after, - data = data, - access_denied_detail = access_denied_detail, - stack = stack, - ) - self.name = 'ServerException' - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_throttling.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_throttling.py deleted file mode 100644 index 8e5cc9e..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/exceptions/_throttling.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from typing import Dict, Any - -from alibabacloud_tea_openapi import exceptions as main_exceptions - -class ThrottlingException(main_exceptions.AlibabaCloudException): - def __init__( - self, *, - status_code: int = None, - code: str = None, - message: str = None, - description: str = None, - request_id: str = None, - data: Dict[str, Any] = None, - access_denied_detail: Dict[str, Any] = None, - stack: str = None, - retry_after: int = None, - ): - super().__init__( - status_code = status_code, - code = code, - message = message, - description = description, - request_id = request_id, - retry_after = retry_after, - data = data, - access_denied_detail = access_denied_detail, - stack = stack, - ) - self.name = 'ThrottlingException' - self.retry_after = retry_after - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__init__.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__init__.py deleted file mode 100644 index e2b32e7..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from ..utils_models import Params, Config, GlobalParameters, OpenApiRequest -from ._sseresponse import SSEResponse - -__all__ = [ - SSEResponse, - Params, - Config, - GlobalParameters, - OpenApiRequest -] diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2e4544a..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__pycache__/_sseresponse.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__pycache__/_sseresponse.cpython-312.pyc deleted file mode 100644 index 3b068f6..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/__pycache__/_sseresponse.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/_sseresponse.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/models/_sseresponse.py deleted file mode 100644 index e90306b..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/models/_sseresponse.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - -from typing import Dict - -from darabonba.event import Event as SSEEvent -from darabonba.model import DaraModel - - - -class SSEResponse(DaraModel): - def __init__( - self, *, - headers: Dict[str, str] = None, - status_code: int = None, - event: SSEEvent = None, - ): - self.headers = headers - # HTTP Status Code - self.status_code = status_code - self.event = event - - def validate(self): - self.validate_required(self.headers, 'headers') - self.validate_required(self.status_code, 'status_code') - self.validate_required(self.event, 'event') - - def to_map(self): - result = dict() - _map = super().to_map() - if _map is not None: - result = _map - if self.headers is not None: - result['headers'] = self.headers - - if self.status_code is not None: - result['statusCode'] = self.status_code - - if self.event is not None: - result['event'] = self.event.to_map() - - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - - if m.get('event') is not None: - temp_model = SSEEvent() - self.event = temp_model.from_map(m.get('event')) - - return self - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/sm3.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/sm3.py deleted file mode 100644 index d38777b..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/sm3.py +++ /dev/null @@ -1,189 +0,0 @@ -import binascii -import copy - -IV = "7380166f 4914b2b9 172442d7 da8a0600 a96f30bc 163138aa e38dee4d b0fb0e4e" -IV = int(IV.replace(" ", ""), 16) -a = [] -for i in range(0, 8): - a.append(0) - a[i] = (IV >> ((7 - i) * 32)) & 0xFFFFFFFF -IV = a - -T_j = [] -for i in range(0, 16): - T_j.append(0) - T_j[i] = 0x79cc4519 -for i in range(16, 64): - T_j.append(0) - T_j[i] = 0x7a879d8a - - -def rotate_left(a, k): - k = k % 32 - return ((a << k) & 0xFFFFFFFF) | ((a & 0xFFFFFFFF) >> (32 - k)) - - -def FF_j(X, Y, Z, j): - if 0 <= j < 16: - ret = X ^ Y ^ Z - elif 16 <= j < 64: - ret = (X & Y) | (X & Z) | (Y & Z) - return ret - - -def GG_j(X, Y, Z, j): - if 0 <= j < 16: - ret = X ^ Y ^ Z - elif 16 <= j < 64: - # ret = (X | Y) & ((2 ** 32 - 1 - X) | Z) - ret = (X & Y) | ((~ X) & Z) - return ret - - -def P_0(X): - return X ^ (rotate_left(X, 9)) ^ (rotate_left(X, 17)) - - -def P_1(X): - return X ^ (rotate_left(X, 15)) ^ (rotate_left(X, 23)) - - -def CF(V_i, B_i): - W = [] - for i in range(16): - weight = 0x1000000 - data = 0 - for k in range(i * 4, (i + 1) * 4): - data = data + B_i[k] * weight - weight = int(weight / 0x100) - W.append(data) - - for j in range(16, 68): - W.append(0) - W[j] = P_1(W[j - 16] ^ W[j - 9] ^ (rotate_left(W[j - 3], 15))) ^ (rotate_left(W[j - 13], 7)) ^ W[j - 6] - str1 = "%08x" % W[j] - W_1 = [] - for j in range(0, 64): - W_1.append(0) - W_1[j] = W[j] ^ W[j + 4] - str1 = "%08x" % W_1[j] - - A, B, C, D, E, F, G, H = V_i - """ - print "00", - out_hex([A, B, C, D, E, F, G, H]) - """ - for j in range(0, 64): - SS1 = rotate_left(((rotate_left(A, 12)) + E + (rotate_left(T_j[j], j))) & 0xFFFFFFFF, 7) - SS2 = SS1 ^ (rotate_left(A, 12)) - TT1 = (FF_j(A, B, C, j) + D + SS2 + W_1[j]) & 0xFFFFFFFF - TT2 = (GG_j(E, F, G, j) + H + SS1 + W[j]) & 0xFFFFFFFF - D = C - C = rotate_left(B, 9) - B = A - A = TT1 - H = G - G = rotate_left(F, 19) - F = E - E = P_0(TT2) - - A = A & 0xFFFFFFFF - B = B & 0xFFFFFFFF - C = C & 0xFFFFFFFF - D = D & 0xFFFFFFFF - E = E & 0xFFFFFFFF - F = F & 0xFFFFFFFF - G = G & 0xFFFFFFFF - H = H & 0xFFFFFFFF - """ - str1 = "%02d" % j - if str1[0] == "0": - str1 = ' ' + str1[1:] - out_hex([A, B, C, D, E, F, G, H]) - """ - - V_i_1 = [ - A ^ V_i[0], - B ^ V_i[1], - C ^ V_i[2], - D ^ V_i[3], - E ^ V_i[4], - F ^ V_i[5], - G ^ V_i[6], - H ^ V_i[7] - ] - return V_i_1 - - -def hash_msg(msg): - len1 = len(msg) - reserve1 = len1 % 64 - msg.append(0x80) - reserve1 = reserve1 + 1 - # 56-64, add 64 byte - range_end = 56 - if reserve1 > range_end: - range_end = range_end + 64 - - for i in range(reserve1, range_end): - msg.append(0x00) - - bit_length = (len1) * 8 - bit_length_str = [bit_length % 0x100] - for i in range(7): - bit_length = int(bit_length / 0x100) - bit_length_str.append(bit_length % 0x100) - for i in range(8): - msg.append(bit_length_str[7 - i]) - - group_count = round(len(msg) / 64) - - b = [] - for i in range(0, group_count): - b.append(msg[i * 64:(i + 1) * 64]) - - v = [IV] - for i in range(0, group_count): - v.append(CF(v[i], b[i])) - - y = v[i + 1] - result = "" - for i in y: - result = '%s%08x' % (result, i) - return result - - -def to_byte_array(msg): # 转换成byte数组 - ml = len(msg) - msg_byte = [] - for i in range(ml): - msg_byte.append(msg[i]) - return msg_byte - - -def hash_sm3(msg): - msg_byte = to_byte_array(msg) - return binascii.a2b_hex(hash_msg(msg_byte)) - - -class Sm3: - block_size = 64 - - def __init__(self, msg_byte=b''): - self.byte_array = to_byte_array(msg_byte) - - def update(self, data): - self.byte_array.extend(to_byte_array(data)) - - def digest(self): - return binascii.a2b_hex(hash_msg(self.byte_array)) - - def hexdigest(self): - return hash_msg(self.byte_array) - - @property - def digest_size(self): - return len(self.byte_array) - - def copy(self): - return copy.deepcopy(self) \ No newline at end of file diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils.py deleted file mode 100644 index 68d4633..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils.py +++ /dev/null @@ -1,630 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations -from darabonba.model import DaraModel -from typing import Dict, Any, List -import binascii -import datetime -import hashlib -import hmac -import base64 -import copy -import platform -import time -import Tea -import threading -import random -import hashlib -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.hazmat.primitives.serialization import load_pem_private_key -from urllib.parse import quote_plus, quote -from darabonba.utils.stream import STREAM_CLASS -from darabonba.utils.form import Form -from darabonba.core import DaraCore -from datetime import datetime -from typing import Any, Dict, List -from .sm3 import hash_sm3, Sm3 - -_process_start_time = int(time.time() * 1000) -_seqId = 0 - -def to_str(val): - if val is None: - return val - - if isinstance(val, bytes): - return str(val, encoding='utf-8') - else: - return str(val) - - -def rsa_sign(plaintext, secret): - if not secret.startswith(b'-----BEGIN RSA PRIVATE KEY-----'): - secret = b'-----BEGIN RSA PRIVATE KEY-----\n%s' % secret - if not secret.endswith(b'-----END RSA PRIVATE KEY-----'): - secret = b'%s\n-----END RSA PRIVATE KEY-----' % secret - - key = load_pem_private_key(secret, password=None, backend=default_backend()) - return key.sign(plaintext, padding.PKCS1v15(), hashes.SHA256()) - - -def signature_method(secret, source, sign_type): - source = source.encode('utf-8') - secret = secret.encode('utf-8') - if sign_type == 'ACS3-HMAC-SHA256': - return hmac.new(secret, source, hashlib.sha256).digest() - elif sign_type == 'ACS3-HMAC-SM3': - return hmac.new(secret, source, Sm3).digest() - elif sign_type == 'ACS3-RSA-SHA256': - return rsa_sign(source, secret) - - -def get_canonical_query_string(query): - if query is None or len(query) <= 0: - return '' - canon_keys = [] - for k, v in query.items(): - if v is not None: - canon_keys.append(k) - - canon_keys.sort() - query_string = '' - for key in canon_keys: - value = quote(query[key], safe='~', encoding='utf-8') - if value is None: - s = f'{key}&' - else: - s = f'{key}={value}&' - query_string += s - return query_string[:-1] - - -def get_canonicalized_headers(headers): - canon_keys = [] - tmp_headers = {} - for k, v in headers.items(): - if v is not None: - if k.lower() not in canon_keys: - canon_keys.append(k.lower()) - tmp_headers[k.lower()] = [to_str(v).strip()] - else: - tmp_headers[k.lower()].append(to_str(v).strip()) - - canon_keys.sort() - canonical_headers = '' - for key in canon_keys: - header_entry = ','.join(sorted(tmp_headers[key])) - s = f'{key}:{header_entry}\n' - canonical_headers += s - return canonical_headers, ';'.join(canon_keys) - - -class Utils(object): - """ - This is for OpenApi Util - """ - - @staticmethod - def convert(body, content): - """ - Convert all params of body other than type of readable into content - - @param body: source Model - - @param content: target Model - - @return: void - """ - body_map = Utils._except_stream(body.to_map()) - content.from_map(body_map) - - @staticmethod - def _except_stream(val): - if isinstance(val, dict): - result = {} - for k, v in val.items(): - result[k] = Utils._except_stream(v) - return result - elif isinstance(val, list): - result = [] - for i in val: - if i is not None: - item = Utils._except_stream(i) - if item is not None: - result.append(item) - else: - result.append(Utils._except_stream(i)) - return result - elif isinstance(val, STREAM_CLASS): - return None - return val - - @staticmethod - def _get_canonicalized_headers(headers): - canon_keys = [] - for k in headers: - if k.startswith('x-acs-'): - canon_keys.append(k) - canon_keys = sorted(canon_keys) - canon_header = '' - for k in canon_keys: - canon_header += '%s:%s\n' % (k, headers[k]) - return canon_header - - @staticmethod - def _get_canonicalized_resource(pathname, query): - if len(query) <= 0: - return pathname - resource = '%s?' % pathname - query_list = sorted(list(query)) - for key in query_list: - if query[key] is not None: - if query[key] == '': - s = '%s&' % key - else: - s = '%s=%s&' % (key, query[key]) - resource += s - return resource[:-1] - - @staticmethod - def get_string_to_sign(request): - """ - Get the string to be signed according to request - - @param request: which contains signed messages - - @return: the signed string - """ - method, pathname, headers, query = request.method, request.pathname, request.headers, request.query - - accept = '' if headers.get('accept') is None else headers.get('accept') - content_md5 = '' if headers.get('content-md5') is None else headers.get('content-md5') - content_type = '' if headers.get('content-type') is None else headers.get('content-type') - date = '' if headers.get('date') is None else headers.get('date') - - header = '%s\n%s\n%s\n%s\n%s\n' % (method, accept, content_md5, content_type, date) - canon_headers = Utils._get_canonicalized_headers(headers) - canon_resource = Utils._get_canonicalized_resource(pathname, query) - sign_str = header + canon_headers + canon_resource - return sign_str - - @staticmethod - def get_roasignature(string_to_sign, secret): - """ - Get signature according to stringToSign, secret - - @type string_to_sign: str - @param string_to_sign: the signed string - - @type secret: str - @param secret: accesskey secret - - @return: the signature - """ - hash_val = hmac.new(secret.encode('utf-8'), string_to_sign.encode('utf-8'), hashlib.sha1).digest() - signature = base64.b64encode(hash_val).decode('utf-8') - return signature - - @staticmethod - def _object_handler(key, value, out): - if value is None: - return - - if isinstance(value, dict): - for k, v in value.items(): - Utils._object_handler('%s.%s' % (key, k), v, out) - elif isinstance(value, DaraModel): - for k, v in value.to_map().items(): - Utils._object_handler('%s.%s' % (key, k), v, out) - elif isinstance(value, (list, tuple)): - for index, val in enumerate(value): - Utils._object_handler('%s.%s' % (key, index + 1), val, out) - else: - if key.startswith('.'): - key = key[1:] - if isinstance(value, bytes): - out[key] = str(value, encoding='utf-8') - elif not isinstance(value, STREAM_CLASS): - out[key] = str(value) - - @staticmethod - def anyify_map_value( - m: Dict[str, str], - ) -> Dict[str, Any]: - """ - Anyify the value of map - @return: the new anyfied map - """ - return m - @staticmethod - def to_form(filter): - """ - Parse filter into a form string - - @type filter: dict - @param filter: object - - @return: the string - """ - result = {} - if filter: - Utils._object_handler('', filter, result) - return Form.to_form_string(result) - - @staticmethod - def get_timestamp(): - """ - Get timestamp - - @return: the timestamp string - """ - return datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") - - @staticmethod - def query(filter): - """ - Parse filter into a object which's type is map[string]string - - @type filter: dict - @param filter: query param - - @return: the object - """ - out_dict = {} - if filter: - Utils._object_handler('', filter, out_dict) - return out_dict - - @staticmethod - def get_rpcsignature(signed_params, method, secret): - """ - Get signature according to signedParams, method and secret - - @type signed_params: dict - @param signed_params: params which need to be signed - - @type method: str - @param method: http method e.g. GET - - @type secret: str - @param secret: AccessKeySecret - - @return: the signature - """ - queries = signed_params.copy() - keys = list(queries.keys()) - keys.sort() - - canonicalized_query_string = "" - - for k in keys: - if queries[k] is not None: - canonicalized_query_string += f'&{quote(k, safe="~", encoding="utf-8")}=' \ - f'{quote(queries[k], safe="~", encoding="utf-8")}' - - string_to_sign = f'{method}&%2F&{quote_plus(canonicalized_query_string[1:], safe="~", encoding="utf-8")}' - - digest_maker = hmac.new(bytes(secret + '&', encoding="utf-8"), - bytes(string_to_sign, encoding="utf-8"), - digestmod=hashlib.sha1) - hash_bytes = digest_maker.digest() - signed_str = str(base64.b64encode(hash_bytes), encoding="utf-8") - - return signed_str - - @staticmethod - def array_to_string_with_specified_style(array, prefix, style): - """ - Parse array into a string with specified style - - @type array: any - @param array: the array - - @type prefix: str - @param prefix: the prefix string - - @param style: specified style e.g. repeatList - - @return: the string - """ - if array is None: - return '' - - if style == 'repeatList': - return Utils._flat_repeat_list({prefix: array}) - elif style == 'simple': - return ','.join(map(str, array)) - elif style == 'spaceDelimited': - return ' '.join(map(str, array)) - elif style == 'pipeDelimited': - return '|'.join(map(str, array)) - elif style == 'json': - return DaraCore.to_json_string(Utils._parse_to_dict(array)) - else: - return '' - - @staticmethod - def _flat_repeat_list(dic): - query = {} - if dic: - Utils._object_handler('', dic, query) - - l = [] - q = sorted(query) - for i in q: - k = quote_plus(i, encoding='utf-8') - v = quote_plus(query[i], encoding='utf-8') - l.append(k + '=' + v) - return '&&'.join(l) - - @staticmethod - def parse_to_map(inp): - """ - Transform input as map. - """ - try: - result = Utils._parse_to_dict(inp) - return copy.deepcopy(result) - except TypeError: - return - - @staticmethod - def _parse_to_dict(val): - if isinstance(val, dict): - result = {} - for k, v in val.items(): - if isinstance(v, (list, dict, DaraModel)): - result[k] = Utils._parse_to_dict(v) - else: - result[k] = v - return result - elif isinstance(val, list): - result = [] - for i in val: - if isinstance(i, (list, dict, DaraModel)): - result.append(Utils._parse_to_dict(i)) - else: - result.append(i) - return result - elif isinstance(val, DaraModel): - return val.to_map() - - @staticmethod - def get_endpoint(endpoint, server_use, endpoint_type): - """ - If endpointType is internal, use internal endpoint - If serverUse is true and endpointType is accelerate, use accelerate endpoint - Default return endpoint - @param server_use whether use accelerate endpoint - @param endpoint_type value must be internal or accelerate - @return the final endpoint - """ - if endpoint_type == "internal": - str_split = endpoint.split('.') - str_split[0] += "-internal" - endpoint = ".".join(str_split) - - if server_use and endpoint_type == "accelerate": - return "oss-accelerate.aliyuncs.com" - - return endpoint - - @staticmethod - def hash(raw, sign_type): - if sign_type == 'ACS3-HMAC-SHA256' or sign_type == 'ACS3-RSA-SHA256': - return hashlib.sha256(raw).digest() - elif sign_type == 'ACS3-HMAC-SM3': - return hash_sm3(raw) - - @staticmethod - def hex_encode(raw): - if raw: - return binascii.b2a_hex(raw).decode('utf-8') - - @staticmethod - def get_authorization(request, sign_type, payload, ak, secret): - canonical_uri = request.pathname if request.pathname else '/' - canonicalized_query = get_canonical_query_string(request.query) - canonicalized_headers, signed_headers = get_canonicalized_headers(request.headers) - - canonical_request = f'{request.method}\n' \ - f'{canonical_uri}\n' \ - f'{canonicalized_query}\n' \ - f'{canonicalized_headers}\n' \ - f'{signed_headers}\n' \ - f'{payload}' - - str_to_sign = f'{sign_type}\n{Utils.hex_encode(Utils.hash(canonical_request.encode("utf-8"), sign_type))}' - signature = Utils.hex_encode(signature_method(secret, str_to_sign, sign_type)) - auth = f'{sign_type} Credential={ak},SignedHeaders={signed_headers},Signature={signature}' - return auth - - @staticmethod - def get_encode_path(path): - return quote(path, safe='/~', encoding="utf-8") - - @staticmethod - def get_encode_param(param): - return quote(param, safe='~', encoding="utf-8") - - - @staticmethod - def get_nonce() -> str: - """ - Generate a nonce string - @return: the nonce string - """ - global _seqId - thread_id = threading.get_ident() - current_time = int(time.time() * 1000) - seq = _seqId - _seqId += 1 - randNum = random.getrandbits(64) - _process_start_time = int(time.time() * 1000) - msg = f'{_process_start_time}-{thread_id}-{current_time}-{seq}-{randNum}' - md5 = hashlib.md5() - md5.update(msg.encode('utf-8')) - return md5.hexdigest() - - @staticmethod - def get_date_utcstring() -> str: - """ - Get an UTC format string by current date, e.g. 'Thu, 06 Feb 2020 07:32:54 GMT' - @return: the UTC format string - """ - return datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') - - @staticmethod - def stringify_map_value( - m: Dict[str, Any], - ) -> Dict[str, str]: - """ - Stringify the value of map - @return: the new stringified map - """ - if m is None: - return {} - - dic_result = {} - for k, v in m.items(): - if v is not None: - if isinstance(v, bytes): - v = v.decode('utf-8') - else: - v = str(v) - dic_result[k] = v - return dic_result - - @staticmethod - def to_array( - input: Any, - ) -> List[Dict[str, Any]]: - """ - Transform input as array. - """ - if input is None: - return [] - - out = [] - for i in input: - if isinstance(i, DaraModel): - out.append(i.to_map()) - else: - out.append(i) - return out - - @staticmethod - def __get_default_agent(): - return f'AlibabaCloud ({platform.system()}; {platform.machine()}) ' \ - f'Python/{platform.python_version()} Core/{Tea.__version__} TeaDSL/2' - - @staticmethod - def get_user_agent( - user_agent: str, - ) -> str: - """ - Get user agent, if it userAgent is not null, splice it with defaultUserAgent and return, otherwise return defaultUserAgent - @return: the string value - """ - if user_agent: - return f'{Utils.__get_default_agent()} {user_agent}' - return Utils.__get_default_agent() - - @staticmethod - def get_endpoint_rules(product, region_id, endpoint_type, network, suffix=None): - product = product or "" - network = network or "" - if endpoint_type == "regional": - if region_id is None or region_id == "": - raise RuntimeError( - "RegionId is empty, please set a valid RegionId") - result = "..aliyuncs.com".replace( - "", region_id) - else: - result = ".aliyuncs.com" - - result = result.replace("", product.lower()) - if network == "" or network == "public": - result = result.replace("", "") - else: - result = result.replace("", "-"+network) - return result - - - @staticmethod - def get_throttling_time_left(headers: Dict[str, str]) -> int: - """ - Get throttling time left based on the response headers - - @param headers: The response headers - @return: Remaining time in milliseconds before the throttle is lifted - """ - rate_limit_user_api = headers.get("x-ratelimit-user-api") - rate_limit_user = headers.get("x-ratelimit-user") - - time_left_user_api = Utils._get_time_left(rate_limit_user_api) - time_left_user = Utils._get_time_left(rate_limit_user) - - return max(time_left_user_api, time_left_user) - - @staticmethod - def _get_time_left(rate_limit: str) -> int: - """ - Extract time left from rate limit string - - @param rate_limit: Rate limit string from headers - @return: Time left in milliseconds - """ - if rate_limit: - pairs = rate_limit.split(',') - for pair in pairs: - key, value = pair.split(':') - if key.strip() == 'TimeLeft': - return int(value.strip()) - return 0 - - @staticmethod - def flat_map(params: Dict[str, Any], prefix: str = '') -> Dict[str, str]: - """ - Flatten the dictionary with a given prefix - - @param params: Dictionary to flatten - @param prefix: Prefix for keys in the flattened dictionary - @return: A flattened dictionary - """ - flat_result = {} - - def _flatten(current_params, current_prefix): - if isinstance(current_params, dict): - for k, v in current_params.items(): - new_key = f"{current_prefix}.{k}" if current_prefix else k - _flatten(v, new_key) - elif isinstance(current_params, list): - for index, item in enumerate(current_params): - new_key = f"{current_prefix}.{index + 1}" - _flatten(item, new_key) - else: - flat_result[current_prefix] = str(current_params) - - _flatten(params, prefix) - return flat_result - - @staticmethod - def map_to_flat_style(input: Any) -> Any: - """ - Convert input to a flat style - - @param input: Input to convert - @return: A flat representation of the input - """ - if isinstance(input, dict): - return Utils.flat_map(input) - elif isinstance(input, list): - flat_result = {} - for index, item in enumerate(input): - flat_result[index + 1] = str(item) - return flat_result - else: - return str(input) \ No newline at end of file diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__init__.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__init__.py deleted file mode 100644 index 41f469a..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations - - - -from ._global_parameters import GlobalParameters -from ._config import Config -from ._params import Params -from ._open_api_request import OpenApiRequest - -__all__ = [ - GlobalParameters, - Config, - Params, - OpenApiRequest -] diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index eae7df4..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_config.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_config.cpython-312.pyc deleted file mode 100644 index 0f7df48..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_config.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_global_parameters.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_global_parameters.cpython-312.pyc deleted file mode 100644 index 21f6035..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_global_parameters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_open_api_request.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_open_api_request.cpython-312.pyc deleted file mode 100644 index 30b27dd..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_open_api_request.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_params.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_params.cpython-312.pyc deleted file mode 100644 index 1077609..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/__pycache__/_params.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_config.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_config.py deleted file mode 100644 index a2e058b..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_config.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations -from darabonba.model import DaraModel -from alibabacloud_credentials.client import Client -from alibabacloud_tea_openapi import utils_models as main_models -from darabonba.policy.retry import RetryOptions - - -""" - * @remarks - * Model for initing client -""" -class Config(DaraModel): - def __init__( - self, - access_key_id: str = None, - access_key_secret: str = None, - security_token: str = None, - bearer_token: str = None, - protocol: str = None, - method: str = None, - region_id: str = None, - read_timeout: int = None, - connect_timeout: int = None, - http_proxy: str = None, - https_proxy: str = None, - credential: Client = None, - endpoint: str = None, - no_proxy: str = None, - max_idle_conns: int = None, - network: str = None, - user_agent: str = None, - suffix: str = None, - socks_5proxy: str = None, - socks_5net_work: str = None, - endpoint_type: str = None, - open_platform_endpoint: str = None, - type: str = None, - signature_version: str = None, - signature_algorithm: str = None, - global_parameters: main_models.GlobalParameters = None, - key: str = None, - cert: str = None, - ca: str = None, - disable_http_2: bool = None, - retry_options: RetryOptions = None, - tls_min_version: str = None, - ): - # accesskey id - self.access_key_id = access_key_id - # accesskey secret - self.access_key_secret = access_key_secret - # security token - self.security_token = security_token - # bearer token - self.bearer_token = bearer_token - # http protocol - self.protocol = protocol - # http method - self.method = method - # region id - self.region_id = region_id - # read timeout - self.read_timeout = read_timeout - # connect timeout - self.connect_timeout = connect_timeout - # http proxy - self.http_proxy = http_proxy - # https proxy - self.https_proxy = https_proxy - # credential - self.credential = credential - # endpoint - self.endpoint = endpoint - # proxy white list - self.no_proxy = no_proxy - # max idle conns - self.max_idle_conns = max_idle_conns - # network for endpoint - self.network = network - # user agent - self.user_agent = user_agent - # suffix for endpoint - self.suffix = suffix - # socks5 proxy - self.socks_5proxy = socks_5proxy - # socks5 network - self.socks_5net_work = socks_5net_work - # endpoint type - self.endpoint_type = endpoint_type - # OpenPlatform endpoint - self.open_platform_endpoint = open_platform_endpoint - # credential type - self.type = type - # Signature Version - self.signature_version = signature_version - # Signature Algorithm - self.signature_algorithm = signature_algorithm - # Global Parameters - self.global_parameters = global_parameters - # privite key for client certificate - self.key = key - # client certificate - self.cert = cert - # server certificate - self.ca = ca - # disable HTTP/2 - self.disable_http_2 = disable_http_2 - # retry options - self.retry_options = retry_options - # TLS Minimum Version - self.tls_min_version = tls_min_version - - def validate(self): - if self.global_parameters: - self.global_parameters.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.access_key_id is not None: - result['accessKeyId'] = self.access_key_id - if self.access_key_secret is not None: - result['accessKeySecret'] = self.access_key_secret - if self.security_token is not None: - result['securityToken'] = self.security_token - if self.bearer_token is not None: - result['bearerToken'] = self.bearer_token - if self.protocol is not None: - result['protocol'] = self.protocol - if self.method is not None: - result['method'] = self.method - if self.region_id is not None: - result['regionId'] = self.region_id - if self.read_timeout is not None: - result['readTimeout'] = self.read_timeout - if self.connect_timeout is not None: - result['connectTimeout'] = self.connect_timeout - if self.http_proxy is not None: - result['httpProxy'] = self.http_proxy - if self.https_proxy is not None: - result['httpsProxy'] = self.https_proxy - if self.credential is not None: - result['credential'] = self.credential - if self.endpoint is not None: - result['endpoint'] = self.endpoint - if self.no_proxy is not None: - result['noProxy'] = self.no_proxy - if self.max_idle_conns is not None: - result['maxIdleConns'] = self.max_idle_conns - if self.network is not None: - result['network'] = self.network - if self.user_agent is not None: - result['userAgent'] = self.user_agent - if self.suffix is not None: - result['suffix'] = self.suffix - if self.socks_5proxy is not None: - result['socks5Proxy'] = self.socks_5proxy - if self.socks_5net_work is not None: - result['socks5NetWork'] = self.socks_5net_work - if self.endpoint_type is not None: - result['endpointType'] = self.endpoint_type - if self.open_platform_endpoint is not None: - result['openPlatformEndpoint'] = self.open_platform_endpoint - if self.type is not None: - result['type'] = self.type - if self.signature_version is not None: - result['signatureVersion'] = self.signature_version - if self.signature_algorithm is not None: - result['signatureAlgorithm'] = self.signature_algorithm - if self.global_parameters is not None: - result['globalParameters'] = self.global_parameters.to_map() - - if self.key is not None: - result['key'] = self.key - if self.cert is not None: - result['cert'] = self.cert - if self.ca is not None: - result['ca'] = self.ca - if self.disable_http_2 is not None: - result['disableHttp2'] = self.disable_http_2 - if self.retry_options is not None: - result['retryOptions'] = self.retry_options.to_map() - - if self.tls_min_version is not None: - result['tlsMinVersion'] = self.tls_min_version - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('accessKeyId') is not None: - self.access_key_id = m.get('accessKeyId') - if m.get('accessKeySecret') is not None: - self.access_key_secret = m.get('accessKeySecret') - if m.get('securityToken') is not None: - self.security_token = m.get('securityToken') - if m.get('bearerToken') is not None: - self.bearer_token = m.get('bearerToken') - if m.get('protocol') is not None: - self.protocol = m.get('protocol') - if m.get('method') is not None: - self.method = m.get('method') - if m.get('regionId') is not None: - self.region_id = m.get('regionId') - if m.get('readTimeout') is not None: - self.read_timeout = m.get('readTimeout') - if m.get('connectTimeout') is not None: - self.connect_timeout = m.get('connectTimeout') - if m.get('httpProxy') is not None: - self.http_proxy = m.get('httpProxy') - if m.get('httpsProxy') is not None: - self.https_proxy = m.get('httpsProxy') - if m.get('credential') is not None: - self.credential = m.get('credential') - if m.get('endpoint') is not None: - self.endpoint = m.get('endpoint') - if m.get('noProxy') is not None: - self.no_proxy = m.get('noProxy') - if m.get('maxIdleConns') is not None: - self.max_idle_conns = m.get('maxIdleConns') - if m.get('network') is not None: - self.network = m.get('network') - if m.get('userAgent') is not None: - self.user_agent = m.get('userAgent') - if m.get('suffix') is not None: - self.suffix = m.get('suffix') - if m.get('socks5Proxy') is not None: - self.socks_5proxy = m.get('socks5Proxy') - if m.get('socks5NetWork') is not None: - self.socks_5net_work = m.get('socks5NetWork') - if m.get('endpointType') is not None: - self.endpoint_type = m.get('endpointType') - if m.get('openPlatformEndpoint') is not None: - self.open_platform_endpoint = m.get('openPlatformEndpoint') - if m.get('type') is not None: - self.type = m.get('type') - if m.get('signatureVersion') is not None: - self.signature_version = m.get('signatureVersion') - if m.get('signatureAlgorithm') is not None: - self.signature_algorithm = m.get('signatureAlgorithm') - if m.get('globalParameters') is not None: - temp_model = main_models.GlobalParameters() - self.global_parameters = temp_model.from_map(m.get('globalParameters')) - - if m.get('key') is not None: - self.key = m.get('key') - if m.get('cert') is not None: - self.cert = m.get('cert') - if m.get('ca') is not None: - self.ca = m.get('ca') - if m.get('disableHttp2') is not None: - self.disable_http_2 = m.get('disableHttp2') - if m.get('retryOptions') is not None: - temp_model = RetryOptions() - self.retry_options = temp_model.from_map(m.get('retryOptions')) - - if m.get('tlsMinVersion') is not None: - self.tls_min_version = m.get('tlsMinVersion') - return self - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_global_parameters.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_global_parameters.py deleted file mode 100644 index d9b7068..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_global_parameters.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations -from darabonba.model import DaraModel -from typing import Dict - - - - -class GlobalParameters(DaraModel): - def __init__( - self, - headers: Dict[str, str] = None, - queries: Dict[str, str] = None, - ): - self.headers = headers - self.queries = queries - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.queries is not None: - result['queries'] = self.queries - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('queries') is not None: - self.queries = m.get('queries') - return self - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_open_api_request.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_open_api_request.py deleted file mode 100644 index cca2da8..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_open_api_request.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations -from darabonba.model import DaraModel -from typing import Dict, Any, BinaryIO - - -class OpenApiRequest(DaraModel): - def __init__( - self, - headers: Dict[str, str] = None, - query: Dict[str, str] = None, - body: Any = None, - stream: BinaryIO = None, - host_map: Dict[str, str] = None, - endpoint_override: str = None, - ): - self.headers = headers - self.query = query - self.body = body - self.stream = stream - self.host_map = host_map - self.endpoint_override = endpoint_override - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.query is not None: - result['query'] = self.query - if self.body is not None: - result['body'] = self.body - if self.stream is not None: - result['stream'] = self.stream - if self.host_map is not None: - result['hostMap'] = self.host_map - if self.endpoint_override is not None: - result['endpointOverride'] = self.endpoint_override - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('query') is not None: - self.query = m.get('query') - if m.get('body') is not None: - self.body = m.get('body') - if m.get('stream') is not None: - self.stream = m.get('stream') - if m.get('hostMap') is not None: - self.host_map = m.get('hostMap') - if m.get('endpointOverride') is not None: - self.endpoint_override = m.get('endpointOverride') - return self - diff --git a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_params.py b/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_params.py deleted file mode 100644 index 9eb196c..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_openapi/utils_models/_params.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from __future__ import annotations -from darabonba.model import DaraModel - - -class Params(DaraModel): - def __init__( - self, - action: str = None, - version: str = None, - protocol: str = None, - pathname: str = None, - method: str = None, - auth_type: str = None, - body_type: str = None, - req_body_type: str = None, - style: str = None, - ): - self.action = action - self.version = version - self.protocol = protocol - self.pathname = pathname - self.method = method - self.auth_type = auth_type - self.body_type = body_type - self.req_body_type = req_body_type - self.style = style - - def validate(self): - self.validate_required(self.action, 'action') - self.validate_required(self.version, 'version') - self.validate_required(self.protocol, 'protocol') - self.validate_required(self.pathname, 'pathname') - self.validate_required(self.method, 'method') - self.validate_required(self.auth_type, 'auth_type') - self.validate_required(self.body_type, 'body_type') - self.validate_required(self.req_body_type, 'req_body_type') - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.action is not None: - result['action'] = self.action - if self.version is not None: - result['version'] = self.version - if self.protocol is not None: - result['protocol'] = self.protocol - if self.pathname is not None: - result['pathname'] = self.pathname - if self.method is not None: - result['method'] = self.method - if self.auth_type is not None: - result['authType'] = self.auth_type - if self.body_type is not None: - result['bodyType'] = self.body_type - if self.req_body_type is not None: - result['reqBodyType'] = self.req_body_type - if self.style is not None: - result['style'] = self.style - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('action') is not None: - self.action = m.get('action') - if m.get('version') is not None: - self.version = m.get('version') - if m.get('protocol') is not None: - self.protocol = m.get('protocol') - if m.get('pathname') is not None: - self.pathname = m.get('pathname') - if m.get('method') is not None: - self.method = m.get('method') - if m.get('authType') is not None: - self.auth_type = m.get('authType') - if m.get('bodyType') is not None: - self.body_type = m.get('bodyType') - if m.get('reqBodyType') is not None: - self.req_body_type = m.get('reqBodyType') - if m.get('style') is not None: - self.style = m.get('style') - return self - diff --git a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/INSTALLER b/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/METADATA b/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/METADATA deleted file mode 100644 index f5da4fd..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/METADATA +++ /dev/null @@ -1,53 +0,0 @@ -Metadata-Version: 2.1 -Name: alibabacloud-tea-util -Version: 0.3.14 -Summary: The tea-util module of alibabaCloud Python SDK. -Home-page: https://github.com/aliyun/tea-util/tree/master/python -Author: Alibaba Cloud -Author-email: alibaba-cloud-sdk-dev-team@list.alibaba-inc.com -License: Apache License 2.0 -Keywords: alibabacloud,sdk,tea -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development -Requires-Python: >=3.6 -Requires-Dist: alibabacloud-tea (>=0.3.3) - -English | [简体中文](README-CN.md) -![](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -## Alibaba Cloud Tea Util for Python - -## Installation -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/?spm=5176.doc53090.2.7.zHDiNV "pip User Guide") to install pip. - -```bash -# Install the tea-util -pip install alibabacloud_tea_util -``` - -## Issues -[Opening an Issue](https://github.com/aliyun/tea-util/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Changelog -Detailed changes for each release are documented in the [release notes](./ChangeLog.md). - -## References -* [Latest Release](https://github.com/aliyun/tea-util/tree/master/python) - -## License -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. diff --git a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/RECORD b/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/RECORD deleted file mode 100644 index b379375..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -alibabacloud_tea_util-0.3.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alibabacloud_tea_util-0.3.14.dist-info/METADATA,sha256=BeT71iPPMgtjSSn6CoIxmsDHZUcIuxlDpiFBceREv0I,1955 -alibabacloud_tea_util-0.3.14.dist-info/RECORD,, -alibabacloud_tea_util-0.3.14.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alibabacloud_tea_util-0.3.14.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 -alibabacloud_tea_util-0.3.14.dist-info/top_level.txt,sha256=4-v1E-bE9bLbQAkXqJJSGMZ4C9IMCrLGozlm1O5cHJo,22 -alibabacloud_tea_util/__init__.py,sha256=26qYQ26Uogib24M1FDFsoHIUHN7iYRjaLHbs851DYZM,24 -alibabacloud_tea_util/__pycache__/__init__.cpython-312.pyc,, -alibabacloud_tea_util/__pycache__/client.cpython-312.pyc,, -alibabacloud_tea_util/__pycache__/models.cpython-312.pyc,, -alibabacloud_tea_util/client.py,sha256=5AUESXcbWxMXPnUP5sxDeMvNcqWr8SKeWTSa38J2RG8,13598 -alibabacloud_tea_util/models.py,sha256=vvrh7bAloGUi6If1o3lcO7zTWhxS0xzOdDyp8oNM-0s,6983 diff --git a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/REQUESTED b/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/WHEEL b/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/WHEEL deleted file mode 100644 index 57e3d84..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/top_level.txt b/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/top_level.txt deleted file mode 100644 index 7c58285..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util-0.3.14.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alibabacloud_tea_util diff --git a/venv/Lib/site-packages/alibabacloud_tea_util/__init__.py b/venv/Lib/site-packages/alibabacloud_tea_util/__init__.py deleted file mode 100644 index dc1bba8..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.3.14" diff --git a/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 6942b74..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/client.cpython-312.pyc deleted file mode 100644 index b313296..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/models.cpython-312.pyc b/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/models.cpython-312.pyc deleted file mode 100644 index 92ba1a0..0000000 Binary files a/venv/Lib/site-packages/alibabacloud_tea_util/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alibabacloud_tea_util/client.py b/venv/Lib/site-packages/alibabacloud_tea_util/client.py deleted file mode 100644 index 08f89a6..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util/client.py +++ /dev/null @@ -1,530 +0,0 @@ -import json -import platform -import time -import Tea -import asyncio -import threading -import random -import hashlib - -from datetime import datetime -from urllib.parse import urlencode -from io import BytesIO - -from Tea.model import TeaModel -from Tea.stream import READABLE -from typing import Any, BinaryIO, Dict, List - -_process_start_time = int(time.time() * 1000) -_seqId = 0 - -class Client: - """ - This is a utility module - """ - - class __ModelEncoder(json.JSONEncoder): - def default(self, o: Any) -> Any: - if isinstance(o, TeaModel): - return o.to_map() - elif isinstance(o, bytes): - return o.decode('utf-8') - super().default(o) - - @staticmethod - def __read_part(f, size=1024): - while True: - part = f.read(size) - if part: - yield part - else: - return - - @staticmethod - def __get_default_agent(): - return f'AlibabaCloud ({platform.system()}; {platform.machine()}) ' \ - f'Python/{platform.python_version()} Core/{Tea.__version__} TeaDSL/1' - - @staticmethod - def to_bytes( - val: str, - ) -> bytes: - """ - Convert a string(utf8) to bytes - @return: the return bytes - """ - if isinstance(val, bytes): - return val - elif isinstance(val, str): - return val.encode(encoding="utf-8") - else: - return str(val).encode(encoding="utf-8") - - - @staticmethod - def to_string( - val: bytes, - ) -> str: - """ - Convert a bytes to string(utf8) - @return: the return string - """ - if isinstance(val, str): - return val - elif isinstance(val, bytes): - return val.decode('utf-8') - else: - return str(val) - - @staticmethod - def parse_json( - val: str, - ) -> Any: - """ - Parse it by JSON format - @return: the parsed result - """ - try: - return json.loads(val) - except ValueError: - raise RuntimeError(f'Failed to parse the value as json format, Value: "{val}".') - - @staticmethod - async def read_as_bytes_async(stream) -> bytes: - """ - Read data from a readable stream, and compose it to a bytes - @param stream: the readable stream - @return: the bytes result - """ - if isinstance(stream, bytes): - return stream - elif isinstance(stream, str): - return bytes(stream, encoding='utf-8') - else: - return await stream.read() - - @staticmethod - async def read_as_string_async(stream) -> str: - """ - Read data from a readable stream, and compose it to a string - @param stream: the readable stream - @return: the string result - """ - buff = await Client.read_as_bytes_async(stream) - return Client.to_string(buff) - - @staticmethod - async def read_as_json_async(stream) -> Any: - """ - Read data from a readable stream, and parse it by JSON format - @param stream: the readable stream - @return: the parsed result - """ - return Client.parse_json( - await Client.read_as_string_async(stream) - ) - - @staticmethod - def read_as_bytes(stream) -> bytes: - """ - Read data from a readable stream, and compose it to a bytes - @param stream: the readable stream - @return: the bytes result - """ - if isinstance(stream, READABLE): - b = b'' - for part in Client.__read_part(stream, 1024): - b += part - return b - elif isinstance(stream, bytes): - return stream - else: - return bytes(stream, encoding='utf-8') - - @staticmethod - def read_as_string(stream) -> str: - """ - Read data from a readable stream, and compose it to a string - @param stream: the readable stream - @return: the string result - """ - buff = Client.read_as_bytes(stream) - return Client.to_string(buff) - - @staticmethod - def read_as_json(stream) -> Any: - """ - Read data from a readable stream, and parse it by JSON format - @param stream: the readable stream - @return: the parsed result - """ - return Client.parse_json(Client.read_as_string(stream)) - - @staticmethod - def get_nonce() -> str: - """ - Generate a nonce string - @return: the nonce string - """ - global _seqId - thread_id = threading.get_ident() - current_time = int(time.time() * 1000) - seq = _seqId - _seqId += 1 - randNum = random.getrandbits(64) - msg = f'{_process_start_time}-{thread_id}-{current_time}-{seq}-{randNum}' - md5 = hashlib.md5() - md5.update(msg.encode('utf-8')) - return md5.hexdigest() - - @staticmethod - def get_date_utcstring() -> str: - """ - Get an UTC format string by current date, e.g. 'Thu, 06 Feb 2020 07:32:54 GMT' - @return: the UTC format string - """ - return datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') - - @staticmethod - def default_string( - real: str, - default: str, - ) -> str: - """ - If not set the real, use default value - @return: the return string - """ - return real if real is not None else default - - @staticmethod - def default_number( - real: int, - default: int, - ) -> int: - """ - If not set the real, use default value - @return: the return number - """ - return real if real is not None else default - - @staticmethod - def to_form_string( - val: dict, - ) -> str: - """ - Format a map to form string, like a=a%20b%20c - @return: the form string - """ - if not val: - return "" - keys = sorted(list(val)) - dic = {k: val[k] for k in keys if not isinstance(val[k], READABLE)} - return urlencode(dic) - - @staticmethod - def to_jsonstring( - val: Any, - ) -> str: - """ - Stringify a value by JSON format - @return: the JSON format string - """ - if isinstance(val, str): - return str(val) - return json.dumps( - val, cls=Client.__ModelEncoder, ensure_ascii=False, separators=(",", ":") - ) - - @staticmethod - def empty( - val: str, - ) -> bool: - """ - Check the string is empty? - @return: if string is null or zero length, return true - """ - return not val - - @staticmethod - def equal_string( - val1: str, - val2: str, - ) -> bool: - """ - Check one string equals another one? - @return: if equals, return true - """ - return val1 == val2 - - @staticmethod - def equal_number( - val1: int, - val2: int, - ) -> bool: - """ - Check one number equals another one? - @return: if equals, return true - """ - return val1 == val2 - - @staticmethod - def is_unset( - value: Any, - ) -> bool: - """ - Check one value is unset - @return: if unset, return true - """ - return value is None - - @staticmethod - def stringify_map_value( - m: Dict[str, Any], - ) -> Dict[str, str]: - """ - Stringify the value of map - @return: the new stringified map - """ - if m is None: - return {} - - dic_result = {} - for k, v in m.items(): - if v is not None: - if isinstance(v, bytes): - v = v.decode('utf-8') - else: - v = str(v) - dic_result[k] = v - return dic_result - - @staticmethod - def anyify_map_value( - m: Dict[str, str], - ) -> Dict[str, Any]: - """ - Anyify the value of map - @return: the new anyfied map - """ - return m - - @staticmethod - def assert_as_boolean( - value: Any, - ) -> bool: - """ - Assert a value, if it is a boolean, return it, otherwise throws - @return: the boolean value - """ - if not isinstance(value, bool): - raise ValueError(f'{value} is not a bool') - return value - - @staticmethod - def assert_as_string( - value: Any, - ) -> str: - """ - Assert a value, if it is a string, return it, otherwise throws - @return: the string value - """ - if not isinstance(value, str): - raise ValueError(f'{value} is not a str') - return value - - @staticmethod - def assert_as_bytes( - value: Any, - ) -> bytes: - """ - Assert a value, if it is a bytes, return it, otherwise throws - @return: the bytes value - """ - if not isinstance(value, bytes): - raise ValueError(f'{value} is not a bytes') - return value - - @staticmethod - def assert_as_number( - value: Any, - ) -> int: - """ - Assert a value, if it is a number, return it, otherwise throws - @return: the number value - """ - if not isinstance(value, (int, float)): - raise ValueError(f'{value} is not a number') - return value - - @staticmethod - def assert_as_integer( - value: Any, - ) -> int: - """ - Assert a value, if it is a integer, return it, otherwise throws - @return: the integer value - """ - if not isinstance(value, int): - raise ValueError(f'{value} is not a int number') - return value - - @staticmethod - def assert_as_map( - value: Any, - ) -> Dict[str, Any]: - """ - Assert a value, if it is a map, return it, otherwise throws - @return: the map value - """ - if not isinstance(value, dict): - raise ValueError(f'{value} is not a dict') - return value - - @staticmethod - def get_user_agent( - user_agent: str, - ) -> str: - """ - Get user agent, if it userAgent is not null, splice it with defaultUserAgent and return, otherwise return defaultUserAgent - @return: the string value - """ - if user_agent: - return f'{Client.__get_default_agent()} {user_agent}' - return Client.__get_default_agent() - - @staticmethod - def is_2xx( - code: int, - ) -> bool: - """ - If the code between 200 and 300, return true, or return false - @return: boolean - """ - return 200 <= code < 300 - - @staticmethod - def is_3xx( - code: int, - ) -> bool: - """ - If the code between 300 and 400, return true, or return false - @return: boolean - """ - return 300 <= code < 400 - - @staticmethod - def is_4xx( - code: int, - ) -> bool: - """ - If the code between 400 and 500, return true, or return false - @return: boolean - """ - return 400 <= code < 500 - - @staticmethod - def is_5xx( - code: int, - ) -> bool: - """ - If the code between 500 and 600, return true, or return false - @return: boolean - """ - return 500 <= code < 600 - - @staticmethod - def validate_model( - m: TeaModel, - ) -> None: - """ - Validate model - @return: void - """ - if isinstance(m, TeaModel): - m.validate() - - @staticmethod - def to_map( - in_: TeaModel, - ) -> Dict[str, Any]: - """ - Model transforms to map[string]any - @return: map[string]any - """ - if isinstance(in_, TeaModel): - return in_.to_map() - else: - return in_ - - @staticmethod - def sleep( - millisecond: int, - ) -> None: - """ - Suspends the current thread for the specified number of milliseconds. - """ - time.sleep(millisecond / 1000) - - @staticmethod - async def sleep_async( - millisecond: int, - ) -> None: - """ - Suspends the current thread for the specified number of milliseconds. - """ - await asyncio.sleep(millisecond / 1000) - - @staticmethod - def to_array( - input: Any, - ) -> List[Dict[str, Any]]: - """ - Transform input as array. - """ - if input is None: - return [] - - out = [] - for i in input: - if isinstance(i, TeaModel): - out.append(i.to_map()) - else: - out.append(i) - return out - - @staticmethod - def assert_as_readable( - value: Any, - ) -> BinaryIO: - """ - Assert a value, if it is a readable, return it, otherwise throws - @return: the readable value - """ - if isinstance(value, str): - value = value.encode('utf-8') - - if isinstance(value, bytes): - value = BytesIO(value) - elif not isinstance(value, READABLE): - raise ValueError(f'The value is not a readable') - return value - - @staticmethod - def assert_as_array( - value: Any, - ) -> list: - if not isinstance(value, list): - raise ValueError('The value is not a list') - return value - - @staticmethod - def get_host_name() -> str: - """ - Get hostname of current machine - @return: the string value - """ - import socket - try: - return socket.gethostname() - except Exception: - return '' diff --git a/venv/Lib/site-packages/alibabacloud_tea_util/models.py b/venv/Lib/site-packages/alibabacloud_tea_util/models.py deleted file mode 100644 index bf3572a..0000000 --- a/venv/Lib/site-packages/alibabacloud_tea_util/models.py +++ /dev/null @@ -1,198 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is auto-generated, don't edit it. Thanks. -from Tea.model import TeaModel -from typing import Dict - - -class ExtendsParameters(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - queries: Dict[str, str] = None, - ): - self.headers = headers - self.queries = queries - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.queries is not None: - result['queries'] = self.queries - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('queries') is not None: - self.queries = m.get('queries') - return self - - -class RuntimeOptions(TeaModel): - """ - The common runtime options model - """ - def __init__( - self, - autoretry: bool = None, - ignore_ssl: bool = None, - max_attempts: int = None, - backoff_policy: str = None, - backoff_period: int = None, - read_timeout: int = None, - connect_timeout: int = None, - http_proxy: str = None, - https_proxy: str = None, - no_proxy: str = None, - max_idle_conns: int = None, - local_addr: str = None, - socks_5proxy: str = None, - socks_5net_work: str = None, - keep_alive: bool = None, - key: str = None, - cert: str = None, - ca: str = None, - extends_parameters: ExtendsParameters = None, - ): - # whether to try again - self.autoretry = autoretry - # ignore SSL validation - self.ignore_ssl = ignore_ssl - # privite key for client certificate - self.key = key - # client certificate - self.cert = cert - # server certificate - self.ca = ca - # maximum number of retries - self.max_attempts = max_attempts - # backoff policy - self.backoff_policy = backoff_policy - # backoff period - self.backoff_period = backoff_period - # read timeout - self.read_timeout = read_timeout - # connect timeout - self.connect_timeout = connect_timeout - # http proxy url - self.http_proxy = http_proxy - # https Proxy url - self.https_proxy = https_proxy - # agent blacklist - self.no_proxy = no_proxy - # maximum number of connections - self.max_idle_conns = max_idle_conns - # local addr - self.local_addr = local_addr - # SOCKS5 proxy - self.socks_5proxy = socks_5proxy - # SOCKS5 netWork - self.socks_5net_work = socks_5net_work - # whether to enable keep-alive - self.keep_alive = keep_alive - # Extends Parameters - self.extends_parameters = extends_parameters - - def validate(self): - if self.extends_parameters: - self.extends_parameters.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.autoretry is not None: - result['autoretry'] = self.autoretry - if self.ignore_ssl is not None: - result['ignoreSSL'] = self.ignore_ssl - if self.key is not None: - result['key'] = self.key - if self.cert is not None: - result['cert'] = self.cert - if self.ca is not None: - result['ca'] = self.ca - if self.max_attempts is not None: - result['max_attempts'] = self.max_attempts - if self.backoff_policy is not None: - result['backoff_policy'] = self.backoff_policy - if self.backoff_period is not None: - result['backoff_period'] = self.backoff_period - if self.read_timeout is not None: - result['readTimeout'] = self.read_timeout - if self.connect_timeout is not None: - result['connectTimeout'] = self.connect_timeout - if self.http_proxy is not None: - result['httpProxy'] = self.http_proxy - if self.https_proxy is not None: - result['httpsProxy'] = self.https_proxy - if self.no_proxy is not None: - result['noProxy'] = self.no_proxy - if self.max_idle_conns is not None: - result['maxIdleConns'] = self.max_idle_conns - if self.local_addr is not None: - result['localAddr'] = self.local_addr - if self.socks_5proxy is not None: - result['socks5Proxy'] = self.socks_5proxy - if self.socks_5net_work is not None: - result['socks5NetWork'] = self.socks_5net_work - if self.keep_alive is not None: - result['keepAlive'] = self.keep_alive - if self.extends_parameters is not None: - result['extendsParameters'] = self.extends_parameters.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('autoretry') is not None: - self.autoretry = m.get('autoretry') - if m.get('ignoreSSL') is not None: - self.ignore_ssl = m.get('ignoreSSL') - if m.get('key') is not None: - self.key = m.get('key') - if m.get('cert') is not None: - self.cert = m.get('cert') - if m.get('ca') is not None: - self.ca = m.get('ca') - if m.get('max_attempts') is not None: - self.max_attempts = m.get('max_attempts') - if m.get('backoff_policy') is not None: - self.backoff_policy = m.get('backoff_policy') - if m.get('backoff_period') is not None: - self.backoff_period = m.get('backoff_period') - if m.get('readTimeout') is not None: - self.read_timeout = m.get('readTimeout') - if m.get('connectTimeout') is not None: - self.connect_timeout = m.get('connectTimeout') - if m.get('httpProxy') is not None: - self.http_proxy = m.get('httpProxy') - if m.get('httpsProxy') is not None: - self.https_proxy = m.get('httpsProxy') - if m.get('noProxy') is not None: - self.no_proxy = m.get('noProxy') - if m.get('maxIdleConns') is not None: - self.max_idle_conns = m.get('maxIdleConns') - if m.get('localAddr') is not None: - self.local_addr = m.get('localAddr') - if m.get('socks5Proxy') is not None: - self.socks_5proxy = m.get('socks5Proxy') - if m.get('socks5NetWork') is not None: - self.socks_5net_work = m.get('socks5NetWork') - if m.get('keepAlive') is not None: - self.keep_alive = m.get('keepAlive') - if m.get('extendsParameters') is not None: - temp_model = ExtendsParameters() - self.extends_parameters = temp_model.from_map(m['extendsParameters']) - return self - - diff --git a/venv/Lib/site-packages/alipay/__init__.py b/venv/Lib/site-packages/alipay/__init__.py deleted file mode 100644 index adf75d1..0000000 --- a/venv/Lib/site-packages/alipay/__init__.py +++ /dev/null @@ -1,890 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 -""" - __init__.py - ~~~~~~~~~~ -""" -import json -from datetime import datetime -from functools import partial - -import hashlib -import OpenSSL -from Cryptodome.Hash import SHA, SHA256 -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import PKCS1_v1_5 - -from .compat import decodebytes, encodebytes, quote_plus, urlopen -from .exceptions import AliPayException, AliPayValidationError -from .utils import AliPayConfig -from .loggers import logger - - -# 常见加密算法 -CryptoAlgSet = ( - b'rsaEncryption', - b'md2WithRSAEncryption', - b'md5WithRSAEncryption', - b'sha1WithRSAEncryption', - b'sha256WithRSAEncryption', - b'sha384WithRSAEncryption', - b'sha512WithRSAEncryption' -) - - -class BaseAliPay: - @property - def appid(self): - return self._appid - - @property - def sign_type(self): - return self._sign_type - - @property - def app_private_key(self): - """签名用""" - return self._app_private_key - - @property - def alipay_public_key(self): - """验证签名用""" - return self._alipay_public_key - - def __init__( - self, - appid, - app_notify_url=None, - app_private_key_string=None, - alipay_public_key_string=None, - sign_type="RSA2", - debug=False, - verbose=False, - config=None - ): - """ - 初始化: - alipay = AliPay( - appid="", - app_notify_url="http://example.com", - sign_type="RSA2" - ) - """ - self._appid = str(appid) - self._app_notify_url = app_notify_url - self._app_private_key_string = app_private_key_string - self._alipay_public_key_string = alipay_public_key_string - self._verbose = verbose - self._config = config or AliPayConfig() - - self._app_private_key = None - self._alipay_public_key = None - if sign_type not in ("RSA", "RSA2"): - message = "Unsupported sign type {}".format(sign_type) - raise AliPayException(None, message) - self._sign_type = sign_type - - if debug: - # self._gateway = "https://openapi.alipaydev.com/gateway.do" - self._gateway = "https://openapi-sandbox.dl.alipaydev.com/gateway.do" - else: - self._gateway = "https://openapi.alipay.com/gateway.do" - - # load key file immediately - self._load_key() - - def _load_key(self): - # load private key - content = self._app_private_key_string - self._app_private_key = RSA.importKey(content) - - # load public key - content = self._alipay_public_key_string - self._alipay_public_key = RSA.importKey(content) - - def _sign(self, unsigned_string): - """ - 通过如下方法调试签名 - 方法1 - key = rsa.PrivateKey.load_pkcs1(open(self._app_private_key_string).read()) - sign = rsa.sign(unsigned_string.encode(), key, "SHA-1") - # base64 编码,转换为 unicode 表示并移除回车 - sign = base64.encodebytes(sign).decode().replace("\n", "") - 方法2 - key = RSA.importKey(open(self._app_private_key_string).read()) - signer = PKCS1_v1_5.new(key) - signature = signer.sign(SHA.new(unsigned_string.encode())) - # base64 编码,转换为 unicode 表示并移除回车 - sign = base64.encodebytes(signature).decode().replace("\n", "") - 方法3 - echo "abc" | openssl sha1 -sign alipay.key | openssl base64 - """ - # 开始计算签名 - key = self.app_private_key - signer = PKCS1_v1_5.new(key) - if self._sign_type == "RSA": - signature = signer.sign(SHA.new(unsigned_string.encode())) - else: - signature = signer.sign(SHA256.new(unsigned_string.encode())) - # base64 编码,转换为 unicode 表示并移除回车 - sign = encodebytes(signature).decode().replace("\n", "") - return sign - - def _ordered_data(self, data): - for k, v in data.items(): - if isinstance(v, dict): - # 将字典类型的数据dump出来 - data[k] = json.dumps(v, separators=(',', ':')) - return sorted(data.items()) - - def build_body(self, method, biz_content=None, **kwargs): - if not biz_content: - biz_content = {} - - data = { - "app_id": self._appid, - "method": method, - "charset": "utf-8", - "sign_type": self._sign_type, - "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), - "version": "1.0", - "biz_content": biz_content - } - data.update(kwargs) - - if method in ( - "alipay.trade.app.pay", "alipay.trade.wap.pay", - "alipay.trade.page.pay", "alipay.trade.pay", - "alipay.trade.precreate", "alipay.trade.create" - ) and not data.get("notify_url") and self._app_notify_url: - data["notify_url"] = self._app_notify_url - - # the following keys are optional, and should be removed if it's empty - keys = ("notify_url", "return_url") - for key in keys: - if key in data and not data.get(key, None): - data.pop(key, None) - - if self._verbose: - logger.debug("data to be signed") - logger.debug(data) - return data - - def sign_data(self, data): - # 排序后的字符串 - ordered_items = self._ordered_data(data) - raw_string = "&".join("{}={}".format(k, v) for k, v in ordered_items) - sign = self._sign(raw_string) - unquoted_items = ordered_items + [('sign', sign)] - - # 获得最终的订单信息字符串 - signed_string = "&".join("{}={}".format(k, quote_plus(v)) for k, v in unquoted_items) - if self._verbose: - logger.debug("signed srtring") - logger.debug(signed_string) - return signed_string - - def _verify(self, raw_content, signature): - # 开始计算签名 - key = self.alipay_public_key - signer = PKCS1_v1_5.new(key) - if self._sign_type == "RSA": - digest = SHA.new() - else: - digest = SHA256.new() - digest.update(raw_content.encode()) - return bool(signer.verify(digest, decodebytes(signature.encode()))) - - def verify(self, data, signature): - if "sign_type" in data: - sign_type = data.pop("sign_type") - if sign_type != self._sign_type: - raise AliPayException(None, "Unknown sign type: {}".format(sign_type)) - # 排序后的字符串 - unsigned_items = self._ordered_data(data) - message = "&".join(u"{}={}".format(k, v) for k, v in unsigned_items) - return self._verify(message, signature) - - def client_api(self, api_name, biz_content=None, **kwargs): - """ - alipay api without http request - """ - if not biz_content: - biz_content = {} - - data = self.build_body(api_name, biz_content, **kwargs) - return self.sign_data(data) - - def server_api(self, api_name, biz_content=None, **kwargs): - """ - alipay api with http request - """ - if not biz_content: - biz_content = {} - - data = self.build_body(api_name, biz_content, **kwargs) - # alipay.trade.query => alipay_trade_query_response - response_type = api_name.replace(".", "_") + "_response" - # print(data) - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_wap_pay( - self, subject, out_trade_no, total_amount, - return_url=None, notify_url=None, **kwargs - ): - biz_content = { - "subject": subject, - "out_trade_no": out_trade_no, - "total_amount": total_amount, - "product_code": "QUICK_WAP_PAY" - } - biz_content.update(kwargs) - data = self.build_body( - "alipay.trade.wap.pay", - biz_content, - return_url=return_url, - notify_url=notify_url - ) - return self.sign_data(data) - - def api_alipay_trade_app_pay( - self, subject, out_trade_no, total_amount, notify_url=None, **kwargs - ): - biz_content = { - "subject": subject, - "out_trade_no": out_trade_no, - "total_amount": total_amount, - "product_code": "QUICK_MSECURITY_PAY" - } - biz_content.update(kwargs) - data = self.build_body("alipay.trade.app.pay", biz_content, notify_url=notify_url) - return self.sign_data(data) - - def api_alipay_trade_page_pay(self, subject, out_trade_no, total_amount, - return_url=None, notify_url=None, **kwargs): - biz_content = { - "subject": subject, - "out_trade_no": out_trade_no, - "total_amount": total_amount, - "product_code": "FAST_INSTANT_TRADE_PAY" - } - - biz_content.update(kwargs) - data = self.build_body( - "alipay.trade.page.pay", - biz_content, - return_url=return_url, - notify_url=notify_url - ) - return self.sign_data(data) - - def api_alipay_trade_query(self, out_trade_no=None, trade_no=None): - """ - response = { - "alipay_trade_query_response": { - "trade_no": "2017032121001004070200176844", - "code": "10000", - "invoice_amount": "20.00", - "open_id": "20880072506750308812798160715407", - "fund_bill_list": [ - { - "amount": "20.00", - "fund_channel": "ALIPAYACCOUNT" - } - ], - "buyer_logon_id": "csq***@sandbox.com", - "send_pay_date": "2017-03-21 13:29:17", - "receipt_amount": "20.00", - "out_trade_no": "out_trade_no15", - "buyer_pay_amount": "20.00", - "buyer_user_id": "2088102169481075", - "msg": "Success", - "point_amount": "0.00", - "trade_status": "TRADE_SUCCESS", - "total_amount": "20.00" - }, - "sign": "" - } - """ - assert (out_trade_no is not None) or (trade_no is not None),\ - "Both trade_no and out_trade_no are None" - - biz_content = {} - if out_trade_no: - biz_content["out_trade_no"] = out_trade_no - if trade_no: - biz_content["trade_no"] = trade_no - data = self.build_body("alipay.trade.query", biz_content) - response_type = "alipay_trade_query_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_pay( - self, out_trade_no, scene, auth_code, subject, notify_url=None, **kwargs - ): - """ - eg: - self.api_alipay_trade_pay( - out_trade_no, - "bar_code/wave_code", - auth_code, - subject, - total_amount=12, - discountable_amount=10 - ) - - failed response = { - "alipay_trade_pay_response": { - "code": "40004", - "msg": "Business Failed", - "sub_code": "ACQ.INVALID_PARAMETER", - "sub_msg": "", - "buyer_pay_amount": "0.00", - "invoice_amount": "0.00", - "point_amount": "0.00", - "receipt_amount": "0.00" - }, - "sign": "" - } - succeeded response = { - "alipay_trade_pay_response": { - "trade_no": "2017032121001004070200176846", - "code": "10000", - "invoice_amount": "20.00", - "open_id": "20880072506750308812798160715407", - "fund_bill_list": [ - { - "amount": "20.00", - "fund_channel": "ALIPAYACCOUNT" - } - ], - "buyer_logon_id": "csq***@sandbox.com", - "receipt_amount": "20.00", - "out_trade_no": "out_trade_no18", - "buyer_pay_amount": "20.00", - "buyer_user_id": "2088102169481075", - "msg": "Success", - "point_amount": "0.00", - "gmt_payment": "2017-03-21 15:07:29", - "total_amount": "20.00" - }, - "sign": "" - } - """ - assert scene in ("bar_code", "wave_code"), 'scene not in ("bar_code", "wave_code")' - - biz_content = { - "out_trade_no": out_trade_no, - "scene": scene, - "auth_code": auth_code, - "subject": subject - } - biz_content.update(**kwargs) - data = self.build_body("alipay.trade.pay", biz_content, notify_url=notify_url) - response_type = "alipay_trade_pay_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_refund(self, refund_amount, out_trade_no=None, trade_no=None, **kwargs): - biz_content = { - "refund_amount": refund_amount - } - biz_content.update(**kwargs) - if out_trade_no: - biz_content["out_trade_no"] = out_trade_no - if trade_no: - biz_content["trade_no"] = trade_no - - data = self.build_body("alipay.trade.refund", biz_content) - response_type = "alipay_trade_refund_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_cancel(self, out_trade_no=None, trade_no=None): - """ - response = { - "alipay_trade_cancel_response": { - "msg": "Success", - "out_trade_no": "out_trade_no15", - "code": "10000", - "retry_flag": "N" - } - } - """ - - assert (out_trade_no is not None) or (trade_no is not None),\ - "Both trade_no and out_trade_no are None" - - biz_content = {} - if out_trade_no: - biz_content["out_trade_no"] = out_trade_no - if trade_no: - biz_content["trade_no"] = trade_no - - data = self.build_body("alipay.trade.cancel", biz_content) - response_type = "alipay_trade_cancel_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_close(self, out_trade_no=None, trade_no=None, operator_id=None): - """ - response = { - "alipay_trade_close_response": { - "code": "10000", - "msg": "Success", - "trade_no": "2013112111001004500000675971", - "out_trade_no": "YX_001"a - } - } - """ - - assert (out_trade_no is not None) or (trade_no is not None),\ - "Both trade_no and out_trade_no are None" - - biz_content = {} - if out_trade_no: - biz_content["out_trade_no"] = out_trade_no - if trade_no: - biz_content["trade_no"] = trade_no - if operator_id: - biz_content["operator_id"] = operator_id - - data = self.build_body("alipay.trade.close", biz_content) - response_type = "alipay_trade_close_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_create( - self, subject, out_trade_no, total_amount, notify_url=None, **kwargs - ): - biz_content = { - "subject": subject, - "out_trade_no": out_trade_no, - "total_amount": total_amount - } - biz_content.update(kwargs) - data = self.build_body("alipay.trade.create", biz_content, notify_url=notify_url) - response_type = "alipay_trade_create" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_precreate( - self, subject, out_trade_no, total_amount, notify_url=None, **kwargs - ): - """ - success response = { - "alipay_trade_precreate_response": { - "msg": "Success", - "out_trade_no": "out_trade_no17", - "code": "10000", - "qr_code": "https://qr.alipay.com/bax03431ljhokirwl38f00a7" - }, - "sign": "" - } - - - failed response = { - "alipay_trade_precreate_response": { - "msg": "Business Failed", - "sub_code": "ACQ.TOTAL_FEE_EXCEED", - "code": "40004", - "sub_msg": "订单金额超过限额" - }, - "sign": "" - } - """ - biz_content = { - "out_trade_no": out_trade_no, - "total_amount": total_amount, - "subject": subject - } - biz_content.update(**kwargs) - data = self.build_body("alipay.trade.precreate", biz_content, notify_url=notify_url) - response_type = "alipay_trade_precreate_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_fastpay_refund_query( - self, out_request_no, trade_no=None, out_trade_no=None - ): - assert (out_trade_no is not None) or (trade_no is not None),\ - "Both trade_no and out_trade_no are None" - - biz_content = {"out_request_no": out_request_no} - if trade_no: - biz_content["trade_no"] = trade_no - else: - biz_content["out_trade_no"] = out_trade_no - - data = self.build_body("alipay.trade.fastpay.refund.query", biz_content) - response_type = "alipay_trade_fastpay_refund_query_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_fund_trans_toaccount_transfer( - self, out_biz_no, payee_type, payee_account, amount, **kwargs - ): - assert payee_type in ("ALIPAY_USERID", "ALIPAY_LOGONID"), "unknown payee type" - biz_content = { - "out_biz_no": out_biz_no, - "payee_type": payee_type, - "payee_account": payee_account, - "amount": amount - } - biz_content.update(kwargs) - data = self.build_body("alipay.fund.trans.toaccount.transfer", biz_content) - response_type = "alipay_fund_trans_toaccount_transfer_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_fund_trans_order_query(self, out_biz_no=None, order_id=None): - if out_biz_no is None and order_id is None: - raise Exception("Both out_biz_no and order_id are None!") - - biz_content = {} - if out_biz_no: - biz_content["out_biz_no"] = out_biz_no - if order_id: - biz_content["order_id"] = order_id - - data = self.build_body("alipay.fund.trans.order.query", biz_content) - response_type = "alipay_fund_trans_order_query_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_trade_order_settle( - self, - out_request_no, - trade_no, - royalty_parameters, - **kwargs - ): - biz_content = { - "out_request_no": out_request_no, - "trade_no": trade_no, - "royalty_parameters": royalty_parameters, - } - biz_content.update(kwargs) - data = self.build_body("alipay.trade.order.settle", biz_content) - response_type = "alipay_trade_order_settle_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_ebpp_invoice_token_batchquery(self, invoice_token=None, scene=None): - if scene is None: - scene = "INVOICE_EXPENSE" - if invoice_token is None: - raise Exception("invoice_token is None!") - - biz_content = { - "invoice_token": invoice_token, - "scene": scene - } - data = self.build_body("alipay.ebpp.invoice.token.batchquery", biz_content) - response_type = "alipay_ebpp_invoice_token_batchquery_response" - return self.verified_sync_response(data, response_type) - - def _verify_and_return_sync_response(self, raw_string, response_type): - """ - return response if verification succeeded, raise exception if not - - As to issue #69, json.loads(raw_string)[response_type] should not be returned directly, - use json.loads(plain_content) instead - - failed response is like this - { - "alipay_trade_query_response": { - "sub_code": "isv.invalid-app-id", - "code": "40002", - "sub_msg": "无效的AppID参数", - "msg": "Invalid Arguments" - } - } - or - { - "error_response": { - "msg": "Invalid Arguments", - "code": "40002", - "sub_msg": "授权码code无效", - "sub_code": "isv.code-invalid" - }, - "alipay_cert_sn": "a5b59edf65dcda9ca26e071ab6f5a0a7", - "sign": "" - } - - """ - response = json.loads(raw_string) - if "sign" not in response.keys() or "error_response" in response.keys(): - result = response.get(response_type, None) or response.get("error_response") - raise AliPayException( - code=result.get("code", "0"), - message=raw_string - ) - - sign = response["sign"] - - # locate string to be signed - plain_content = self._get_string_to_be_signed(raw_string, response_type) - - if not self._verify(plain_content, sign): - raise AliPayValidationError - return json.loads(plain_content) - - def verified_sync_response(self, data, response_type): - url = self._gateway + "?" + self.sign_data(data) - raw_string = urlopen(url, timeout=self._config.timeout).read().decode() - return self._verify_and_return_sync_response(raw_string, response_type) - - def _get_string_to_be_signed(self, raw_string, response_type): - """ - https://docs.open.alipay.com/200/106120 - 从同步返回的接口里面找到待签名的字符串 - """ - balance = 0 - start = end = raw_string.find("{", raw_string.find(response_type)) - # 从response_type之后的第一个{的下一位开始匹配, - # 如果是{则balance加1; 如果是}而且balance=0,就是待验签字符串的终点 - for i, c in enumerate(raw_string[start + 1:], start + 1): - if c == "{": - balance += 1 - elif c == "}": - if balance == 0: - end = i + 1 - break - balance -= 1 - return raw_string[start:end] - - -class AliPay(BaseAliPay): - pass - - -class DCAliPay(BaseAliPay): - """ - 数字证书 (digital certificate) 版本 - """ - - def __init__( - self, - appid, - app_private_key_string, - app_public_key_cert_string, - alipay_public_key_cert_string, - alipay_root_cert_string, - app_notify_url=None, - sign_type="RSA2", - debug=False, - verbose=False - ): - """ - 初始化 - DCAlipay( - appid='', - app_notify_url='http://example.com', - app_private_key_string='', - app_public_key_cert_string='', - alipay_public_key_cert_sring='', - aplipay_root_cert_string='', - ) - """ - self._app_public_key_cert_string = app_public_key_cert_string - self._alipay_public_key_cert_string = alipay_public_key_cert_string - self._alipay_root_cert_string = alipay_root_cert_string - alipay_public_key_string = self.load_alipay_public_key_string() - super().__init__( - appid=appid, - app_notify_url=app_notify_url, - app_private_key_string=app_private_key_string, - alipay_public_key_string=alipay_public_key_string, - sign_type=sign_type, - debug=debug, - verbose=verbose - ) - - def api_alipay_open_app_alipaycert_download(self, alipay_cert_sn): - """ - 下载支付宝证书 - 验签使用,支付宝公钥证书无感知升级机制 - """ - biz_content = { - "alipay_cert_sn": alipay_cert_sn - } - data = self.build_body("alipay.open.app.alipaycert.download", biz_content) - return self.sign_data(data) - - def build_body(self, *args, **kwargs): - data = super().build_body(*args, **kwargs) - data["app_cert_sn"] = self.app_cert_sn - data["alipay_root_cert_sn"] = self.alipay_root_cert_sn - if self._verbose: - logger.debug("data to be signed") - logger.debug(data) - return data - - def load_alipay_public_key_string(self): - cert = OpenSSL.crypto.load_certificate( - OpenSSL.crypto.FILETYPE_PEM, self._alipay_public_key_cert_string - ) - return OpenSSL.crypto.dump_publickey( - OpenSSL.crypto.FILETYPE_PEM, cert.get_pubkey() - ).decode("utf-8") - - @staticmethod - def get_cert_sn(cert): - """ - 获取证书 SN 算法 - """ - cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert) - certIssue = cert.get_issuer() - name = 'CN={},OU={},O={},C={}'.format(certIssue.CN, certIssue.OU, certIssue.O, certIssue.C) - string = name + str(cert.get_serial_number()) - return hashlib.md5(string.encode()).hexdigest() - - @staticmethod - def read_pem_cert_chain(certContent): - """解析根证书""" - # 根证书中,每个 cert 中间有两个回车间隔 - items = [i for i in certContent.split('\n\n') if i] - load_cert = partial(OpenSSL.crypto.load_certificate, OpenSSL.crypto.FILETYPE_PEM) - return [load_cert(c) for c in items] - - @staticmethod - def get_root_cert_sn(rootCert): - """ 根证书 SN 算法""" - certs = DCAliPay.read_pem_cert_chain(rootCert) - rootCertSN = None - for cert in certs: - try: - sigAlg = cert.get_signature_algorithm() - except ValueError: - continue - if sigAlg in CryptoAlgSet: - certIssue = cert.get_issuer() - name = 'CN={},OU={},O={},C={}'.format( - certIssue.CN, certIssue.OU, certIssue.O, certIssue.C - ) - string = name + str(cert.get_serial_number()) - certSN = hashlib.md5(string.encode()).hexdigest() - if not rootCertSN: - rootCertSN = certSN - else: - rootCertSN = rootCertSN + '_' + certSN - return rootCertSN - - @property - def app_cert_sn(self): - if not hasattr(self, "_app_cert_sn"): - self._app_cert_sn = self.get_cert_sn(self._app_public_key_cert_string) - return getattr(self, "_app_cert_sn") - - @property - def alipay_root_cert_sn(self): - if not hasattr(self, "_alipay_root_cert_sn"): - self._alipay_root_cert_sn = self.get_root_cert_sn(self._alipay_root_cert_string) - return getattr(self, "_alipay_root_cert_sn") - - def api_alipay_fund_trans_uni_transfer( - self, out_biz_no, identity_type, identity, trans_amount, name=None, **kwargs - ): - """ - 单笔转账接口, 只支持公钥证书模式 - 文档地址: https://opendocs.alipay.com/apis/api_28/alipay.fund.trans.uni.transfer - """ - assert identity_type in ("ALIPAY_USER_ID", "ALIPAY_LOGON_ID"), "unknown identity type" - - biz_content = { - "payee_info": { - "identity": identity, - "identity_type": identity_type, - }, - "out_biz_no": out_biz_no, - "trans_amount": trans_amount, - "product_code": "TRANS_ACCOUNT_NO_PWD", - "biz_scene": "DIRECT_TRANSFER", - } - biz_content["payee_info"]["name"] = name if name else None - biz_content.update(kwargs) - - response_type = "alipay_fund_trans_uni_transfer_response" - data = self.build_body("alipay.fund.trans.uni.transfer", biz_content) - return self.verified_sync_response(data, response_type) - - -class ISVAliPay(BaseAliPay): - - def __init__( - self, - appid, - app_notify_url=None, - app_private_key_string=None, - alipay_public_key_string=None, - sign_type="RSA2", - debug=False, - verbose=False, - app_auth_token=None, - app_auth_code=None - ): - if not app_auth_token and not app_auth_code: - raise Exception("Both app_auth_code and app_auth_token are None !!!") - - self._app_auth_token = app_auth_token - self._app_auth_code = app_auth_code - super().__init__( - appid, - app_notify_url, - app_private_key_string=app_private_key_string, - alipay_public_key_string=alipay_public_key_string, - sign_type=sign_type, - debug=debug, - verbose=verbose - ) - - @property - def app_auth_token(self): - # 没有则换取token - if not self._app_auth_token: - result = self.api_alipay_open_auth_token_app(self._app_auth_code) - self._app_auth_token = result.get("app_auth_token") - if not self._app_auth_token: - msg = "Get auth token by auth code failed: {}" - raise Exception(msg.format(self._app_auth_code)) - return self._app_auth_token - - def build_body(self, *args, **kwargs): - data = super().build_body(*args, **kwargs) - if self._app_auth_token: - data["app_auth_token"] = self._app_auth_token - if self._verbose: - logger.debug("data to be signed") - logger.debug(data) - - return data - - def api_alipay_open_auth_token_app(self, refresh_token=None): - """ - response = { - "code": "10000", - "msg": "Success", - "app_auth_token": "201708BB28623ce3d10f4f62875e9ef5cbeebX07", - "app_refresh_token": "201708BB108a270d8bb6409890d16175a04a7X07", - "auth_app_id": "appid", - "expires_in": 31536000, - "re_expires_in": 32140800, - "user_id": "2088xxxxx - } - """ - - if refresh_token: - biz_content = { - "grant_type": "refresh_token", - "refresh_token": refresh_token - } - else: - biz_content = { - "grant_type": "authorization_code", - "code": self._app_auth_code - } - data = self.build_body( - "alipay.open.auth.token.app", - biz_content, - ) - response_type = "alipay_open_auth_token_app_response" - return self.verified_sync_response(data, response_type) - - def api_alipay_open_auth_token_app_query(self): - biz_content = {"app_auth_token": self.app_auth_token} - data = self.build_body( - "alipay.open.auth.token.app.query", - biz_content, - ) - response_type = "alipay_open_auth_token_app_query_response" - return self.verified_sync_response(data, response_type) diff --git a/venv/Lib/site-packages/alipay/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/alipay/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 7422f87..0000000 Binary files a/venv/Lib/site-packages/alipay/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alipay/__pycache__/compat.cpython-312.pyc b/venv/Lib/site-packages/alipay/__pycache__/compat.cpython-312.pyc deleted file mode 100644 index 5f84302..0000000 Binary files a/venv/Lib/site-packages/alipay/__pycache__/compat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alipay/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/alipay/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 51ce22f..0000000 Binary files a/venv/Lib/site-packages/alipay/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alipay/__pycache__/loggers.cpython-312.pyc b/venv/Lib/site-packages/alipay/__pycache__/loggers.cpython-312.pyc deleted file mode 100644 index 6017065..0000000 Binary files a/venv/Lib/site-packages/alipay/__pycache__/loggers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alipay/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/alipay/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 37efefa..0000000 Binary files a/venv/Lib/site-packages/alipay/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/alipay/compat.py b/venv/Lib/site-packages/alipay/compat.py deleted file mode 100644 index 914fb44..0000000 --- a/venv/Lib/site-packages/alipay/compat.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 -""" - compat.py - ~~~~~~~~~~ -""" -from urllib.parse import quote_plus -from urllib.request import urlopen -from base64 import decodebytes, encodebytes diff --git a/venv/Lib/site-packages/alipay/exceptions.py b/venv/Lib/site-packages/alipay/exceptions.py deleted file mode 100644 index 2e59f44..0000000 --- a/venv/Lib/site-packages/alipay/exceptions.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 -""" - exceptions.py - ~~~~~~~~~~ -""" - - -class AliPayException(Exception): - def __init__(self, code, message): - self.__code = code - self.__message = message - - def to_unicode(self): - return "AliPayException: code:{}, message:{}".format(self.__code, self.__message) - - def __str__(self): - return self.to_unicode() - - def __repr__(self): - return self.to_unicode() - - -class AliPayValidationError(Exception): - pass diff --git a/venv/Lib/site-packages/alipay/loggers.py b/venv/Lib/site-packages/alipay/loggers.py deleted file mode 100644 index 4a564ac..0000000 --- a/venv/Lib/site-packages/alipay/loggers.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging -import logging.config - -logging.config.dictConfig({ - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - }, - "handlers": { - "console": { - "level": "DEBUG", - "formatter": "standard", - "class": "logging.StreamHandler", - }, - }, - "loggers": { - "python-alipay-sdk": { - "handlers": ["console"], - "level": "DEBUG", - } - } -}) -logger = logging.getLogger("python-alipay-sdk") diff --git a/venv/Lib/site-packages/alipay/utils.py b/venv/Lib/site-packages/alipay/utils.py deleted file mode 100644 index 19871d8..0000000 --- a/venv/Lib/site-packages/alipay/utils.py +++ /dev/null @@ -1,9 +0,0 @@ -""" - alipay/utils.py - ~~~~~~~~~~ -""" - - -class AliPayConfig: - def __init__(self, timeout=15): - self.timeout = timeout diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/INSTALLER b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/METADATA b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/METADATA deleted file mode 100644 index a7e0cce..0000000 --- a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/METADATA +++ /dev/null @@ -1,149 +0,0 @@ -Metadata-Version: 2.4 -Name: APScheduler -Version: 3.11.2 -Summary: In-process task scheduler with Cron-like capabilities -Author-email: Alex Grönholm -License: MIT -Project-URL: Documentation, https://apscheduler.readthedocs.io/en/3.x/ -Project-URL: Changelog, https://apscheduler.readthedocs.io/en/3.x/versionhistory.html -Project-URL: Source code, https://github.com/agronholm/apscheduler -Project-URL: Issue tracker, https://github.com/agronholm/apscheduler/issues -Keywords: scheduling,cron -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE.txt -Requires-Dist: tzlocal>=3.0 -Requires-Dist: backports.zoneinfo; python_version < "3.9" -Provides-Extra: etcd -Requires-Dist: etcd3; extra == "etcd" -Requires-Dist: protobuf<=3.21.0; extra == "etcd" -Provides-Extra: gevent -Requires-Dist: gevent; extra == "gevent" -Provides-Extra: mongodb -Requires-Dist: pymongo>=3.0; extra == "mongodb" -Provides-Extra: redis -Requires-Dist: redis>=3.0; extra == "redis" -Provides-Extra: rethinkdb -Requires-Dist: rethinkdb>=2.4.0; extra == "rethinkdb" -Provides-Extra: sqlalchemy -Requires-Dist: sqlalchemy>=1.4; extra == "sqlalchemy" -Provides-Extra: tornado -Requires-Dist: tornado>=4.3; extra == "tornado" -Provides-Extra: twisted -Requires-Dist: twisted; extra == "twisted" -Provides-Extra: zookeeper -Requires-Dist: kazoo; extra == "zookeeper" -Provides-Extra: test -Requires-Dist: APScheduler[etcd,mongodb,redis,rethinkdb,sqlalchemy,tornado,zookeeper]; extra == "test" -Requires-Dist: pytest; extra == "test" -Requires-Dist: pytest-timeout; extra == "test" -Requires-Dist: anyio>=4.5.2; extra == "test" -Requires-Dist: PySide6; (platform_python_implementation == "CPython" and python_version < "3.14") and extra == "test" -Requires-Dist: gevent; python_version < "3.14" and extra == "test" -Requires-Dist: pytz; extra == "test" -Requires-Dist: twisted; python_version < "3.14" and extra == "test" -Provides-Extra: doc -Requires-Dist: packaging; extra == "doc" -Requires-Dist: sphinx; extra == "doc" -Requires-Dist: sphinx-rtd-theme>=1.3.0; extra == "doc" -Dynamic: license-file - -.. image:: https://github.com/agronholm/apscheduler/workflows/Python%20codeqa/test/badge.svg?branch=3.x - :target: https://github.com/agronholm/apscheduler/actions?query=workflow%3A%22Python+codeqa%2Ftest%22+branch%3A3.x - :alt: Build Status -.. image:: https://coveralls.io/repos/github/agronholm/apscheduler/badge.svg?branch=3.x - :target: https://coveralls.io/github/agronholm/apscheduler?branch=3.x - :alt: Code Coverage -.. image:: https://readthedocs.org/projects/apscheduler/badge/?version=3.x - :target: https://apscheduler.readthedocs.io/en/master/?badge=3.x - :alt: Documentation - -Advanced Python Scheduler (APScheduler) is a Python library that lets you schedule your Python code -to be executed later, either just once or periodically. You can add new jobs or remove old ones on -the fly as you please. If you store your jobs in a database, they will also survive scheduler -restarts and maintain their state. When the scheduler is restarted, it will then run all the jobs -it should have run while it was offline [#f1]_. - -Among other things, APScheduler can be used as a cross-platform, application specific replacement -to platform specific schedulers, such as the cron daemon or the Windows task scheduler. Please -note, however, that APScheduler is **not** a daemon or service itself, nor does it come with any -command line tools. It is primarily meant to be run inside existing applications. That said, -APScheduler does provide some building blocks for you to build a scheduler service or to run a -dedicated scheduler process. - -APScheduler has three built-in scheduling systems you can use: - -* Cron-style scheduling (with optional start/end times) -* Interval-based execution (runs jobs on even intervals, with optional start/end times) -* One-off delayed execution (runs jobs once, on a set date/time) - -You can mix and match scheduling systems and the backends where the jobs are stored any way you -like. Supported backends for storing jobs include: - -* Memory -* `SQLAlchemy `_ (any RDBMS supported by SQLAlchemy works) -* `MongoDB `_ -* `Redis `_ -* `RethinkDB `_ -* `ZooKeeper `_ -* `Etcd `_ - -APScheduler also integrates with several common Python frameworks, like: - -* `asyncio `_ (:pep:`3156`) -* `gevent `_ -* `Tornado `_ -* `Twisted `_ -* `Qt `_ (using either - `PyQt `_ , - `PySide6 `_ , - `PySide2 `_ or - `PySide `_) - -There are third party solutions for integrating APScheduler with other frameworks: - -* `Django `_ -* `Flask `_ - - -.. [#f1] The cutoff period for this is also configurable. - - -Documentation -------------- - -Documentation can be found `here `_. - - -Source ------- - -The source can be browsed at `Github `_. - - -Reporting bugs --------------- - -A `bug tracker `_ is provided by Github. - - -Getting help ------------- - -If you have problems or other questions, you can either: - -* Ask in the `apscheduler `_ room on Gitter -* Ask on the `APScheduler GitHub discussion forum `_, or -* Ask on `StackOverflow `_ and tag your - question with the ``apscheduler`` tag diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/RECORD b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/RECORD deleted file mode 100644 index 81e345d..0000000 --- a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/RECORD +++ /dev/null @@ -1,86 +0,0 @@ -apscheduler-3.11.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -apscheduler-3.11.2.dist-info/METADATA,sha256=isrjIgfQXa2C9Ky7hBjOtp21BjnfMw4nIUkd6JeXbgA,6443 -apscheduler-3.11.2.dist-info/RECORD,, -apscheduler-3.11.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apscheduler-3.11.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -apscheduler-3.11.2.dist-info/entry_points.txt,sha256=HSDTxgulLTgymfXK2UNCPP1ib5rlQSFgZJEg72vto3s,1181 -apscheduler-3.11.2.dist-info/licenses/LICENSE.txt,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130 -apscheduler-3.11.2.dist-info/top_level.txt,sha256=O3oMCWxG-AHkecUoO6Ze7-yYjWrttL95uHO8-RFdYvE,12 -apscheduler/__init__.py,sha256=hOpI9oJuk5l5I_VtdsHPous2Qr-ZDX573e7NaYRWFUs,380 -apscheduler/__pycache__/__init__.cpython-312.pyc,, -apscheduler/__pycache__/events.cpython-312.pyc,, -apscheduler/__pycache__/job.cpython-312.pyc,, -apscheduler/__pycache__/util.cpython-312.pyc,, -apscheduler/events.py,sha256=a6bVF-idiP_sCwkhXad421ChE0B8tV7zJEOolV3r7j4,3583 -apscheduler/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apscheduler/executors/__pycache__/__init__.cpython-312.pyc,, -apscheduler/executors/__pycache__/asyncio.cpython-312.pyc,, -apscheduler/executors/__pycache__/base.cpython-312.pyc,, -apscheduler/executors/__pycache__/debug.cpython-312.pyc,, -apscheduler/executors/__pycache__/gevent.cpython-312.pyc,, -apscheduler/executors/__pycache__/pool.cpython-312.pyc,, -apscheduler/executors/__pycache__/tornado.cpython-312.pyc,, -apscheduler/executors/__pycache__/twisted.cpython-312.pyc,, -apscheduler/executors/asyncio.py,sha256=g0ArcxefoTnEqtyr_IRc-M3dcj0bhuvHcxwRp2s3nDE,1768 -apscheduler/executors/base.py,sha256=Cug2WKRhcJIGJP90EfsjrKUIXlox_azq0fn_SzmcLtE,7114 -apscheduler/executors/debug.py,sha256=15_ogSBzl8RRCfBYDnkIV2uMH8cLk1KImYmBa_NVGpc,573 -apscheduler/executors/gevent.py,sha256=_ZFpbn7-tH5_lAeL4sxEyPhxyUTtUUSrH8s42EHGQ2w,761 -apscheduler/executors/pool.py,sha256=q_shxnvXLjdcwhtKyPvQSYngOjAeKQO8KCvZeb19RSQ,2683 -apscheduler/executors/tornado.py,sha256=lb6mshRj7GMLz3d8StwESnlZsAfrNmW78Wokcg__Lk8,1581 -apscheduler/executors/twisted.py,sha256=YUEDnaPbP_M0lXCmNAW_yPiLKwbO9vD3KMiBFQ2D4h0,726 -apscheduler/job.py,sha256=tudKpRxBn9BBFCIEHWrHJPK2WctGDmFawvt1imd0wmA,11235 -apscheduler/jobstores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apscheduler/jobstores/__pycache__/__init__.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/base.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/etcd.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/memory.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/mongodb.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/redis.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/rethinkdb.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/sqlalchemy.cpython-312.pyc,, -apscheduler/jobstores/__pycache__/zookeeper.cpython-312.pyc,, -apscheduler/jobstores/base.py,sha256=ZDOgMtHLaF3TPUOQwmkBIDcpnHU0aUhtzZOGmMGaJn8,4416 -apscheduler/jobstores/etcd.py,sha256=O7C40CGlnn3cPinchJEs2sWcqnzEZQt3c6WnhgPRSdQ,5703 -apscheduler/jobstores/memory.py,sha256=HmOs7FbrOoQNywz-yfq2v5esGDHeKE_mvMNFDeGZ31E,3595 -apscheduler/jobstores/mongodb.py,sha256=mCIwcKiWcicM2qdAQn51QBEkGlNfbk_73Oi6soShNcM,5319 -apscheduler/jobstores/redis.py,sha256=El-H2eUfZjPZca7vwy10B9gZv5RzRucbkDu7Ti07vyM,5482 -apscheduler/jobstores/rethinkdb.py,sha256=SdT3jPrhxnmBoL4IClDfHsez4DpREnYEsHndIP8idHA,5922 -apscheduler/jobstores/sqlalchemy.py,sha256=2jaq3ZcoXEyIqqvYf3eloaP-_ZAqojt0EuWWvQ2LMRg,6799 -apscheduler/jobstores/zookeeper.py,sha256=32bEZNJNniPwmYXBITZ3eSRBq6hipqPKDqh4q4NiZvc,6439 -apscheduler/schedulers/__init__.py,sha256=POEy7n3BZgccZ44atMvxj0w5PejN55g-55NduZUZFqQ,406 -apscheduler/schedulers/__pycache__/__init__.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/asyncio.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/background.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/base.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/blocking.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/gevent.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/qt.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/tornado.cpython-312.pyc,, -apscheduler/schedulers/__pycache__/twisted.cpython-312.pyc,, -apscheduler/schedulers/asyncio.py,sha256=JZYgLBCgTn6EGgdV8vDzjvqKrizCoqM3shjNivIEs_M,2154 -apscheduler/schedulers/background.py,sha256=sRNrikUhpyblvA5RCpKC5Djvf3-b6NHvnXTblxlqIaM,1476 -apscheduler/schedulers/base.py,sha256=hvnvcI1DOC9bmvrFk8UiLlGxsXKHtMpEHLDEe63mQ_s,48342 -apscheduler/schedulers/blocking.py,sha256=138rf9X1C-ZxWVTVAO_pyfYMBKhkqO2qZqJoyGInv5c,872 -apscheduler/schedulers/gevent.py,sha256=zS5nHQUkQMrn0zKOaFnUyiG0fXTE01yE9GXVNCdrd90,987 -apscheduler/schedulers/qt.py,sha256=6BHOCi8e6L3wXTWwQDjNl8w_GJF_dY6iiO3gEtCJgmI,1241 -apscheduler/schedulers/tornado.py,sha256=uD5el752Qpbnn6KbImn65600nSUYQwGAPKjJ-L4Qzow,2166 -apscheduler/schedulers/twisted.py,sha256=qzFnW36Qow8dK4rp1-iPZQD4jDG9EN7bvXIIWbp3vm4,1977 -apscheduler/triggers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apscheduler/triggers/__pycache__/__init__.cpython-312.pyc,, -apscheduler/triggers/__pycache__/base.cpython-312.pyc,, -apscheduler/triggers/__pycache__/calendarinterval.cpython-312.pyc,, -apscheduler/triggers/__pycache__/combining.cpython-312.pyc,, -apscheduler/triggers/__pycache__/date.cpython-312.pyc,, -apscheduler/triggers/__pycache__/interval.cpython-312.pyc,, -apscheduler/triggers/base.py,sha256=8iKllubaexF456IK9jfi56QTrVIfDDPLavUc8wTlnL0,1333 -apscheduler/triggers/calendarinterval.py,sha256=tuyAhOvxbQUaPCycFK4s24kLIaH2woaC1SSmq7TZHLY,7411 -apscheduler/triggers/combining.py,sha256=OVUUVJb_PYnJQZP9Ij-9FtmX5vRo_FYIirCHkBNzCL0,4044 -apscheduler/triggers/cron/__init__.py,sha256=MCYF0Xdx7l3HImBM3eBS1y75snzkOHdh0gurISz_Qcw,10082 -apscheduler/triggers/cron/__pycache__/__init__.cpython-312.pyc,, -apscheduler/triggers/cron/__pycache__/expressions.cpython-312.pyc,, -apscheduler/triggers/cron/__pycache__/fields.cpython-312.pyc,, -apscheduler/triggers/cron/expressions.py,sha256=fN3heBgp6SEeFnwr1bfc6BfBO0sbwzxwaxvKv3KEuSo,9044 -apscheduler/triggers/cron/fields.py,sha256=itUk3ZQ57a_2BQtssXNjjC3ogCoCrtxjQNFWjCT3ROs,3618 -apscheduler/triggers/date.py,sha256=ZS_TMjUCSldqlZsUUjlwvuWeMKeDXqqAMcZVFGYpam4,1698 -apscheduler/triggers/interval.py,sha256=R0rAtygqUX3omGpKV6DxEQB5BGQrDNBllN4tAnHFRSQ,4674 -apscheduler/util.py,sha256=-X3qCy9dqdb_y7wY6_p8ANz29qRcA__-60KsS7aBPJo,14223 diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/REQUESTED b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/WHEEL b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/entry_points.txt b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/entry_points.txt deleted file mode 100644 index 5890251..0000000 --- a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/entry_points.txt +++ /dev/null @@ -1,25 +0,0 @@ -[apscheduler.executors] -asyncio = apscheduler.executors.asyncio:AsyncIOExecutor -debug = apscheduler.executors.debug:DebugExecutor -gevent = apscheduler.executors.gevent:GeventExecutor -processpool = apscheduler.executors.pool:ProcessPoolExecutor -threadpool = apscheduler.executors.pool:ThreadPoolExecutor -tornado = apscheduler.executors.tornado:TornadoExecutor -twisted = apscheduler.executors.twisted:TwistedExecutor - -[apscheduler.jobstores] -etcd = apscheduler.jobstores.etcd:EtcdJobStore -memory = apscheduler.jobstores.memory:MemoryJobStore -mongodb = apscheduler.jobstores.mongodb:MongoDBJobStore -redis = apscheduler.jobstores.redis:RedisJobStore -rethinkdb = apscheduler.jobstores.rethinkdb:RethinkDBJobStore -sqlalchemy = apscheduler.jobstores.sqlalchemy:SQLAlchemyJobStore -zookeeper = apscheduler.jobstores.zookeeper:ZooKeeperJobStore - -[apscheduler.triggers] -and = apscheduler.triggers.combining:AndTrigger -calendarinterval = apscheduler.triggers.calendarinterval:CalendarIntervalTrigger -cron = apscheduler.triggers.cron:CronTrigger -date = apscheduler.triggers.date:DateTrigger -interval = apscheduler.triggers.interval:IntervalTrigger -or = apscheduler.triggers.combining:OrTrigger diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/licenses/LICENSE.txt b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 07806f8..0000000 --- a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,19 +0,0 @@ -This is the MIT license: http://www.opensource.org/licenses/mit-license.php - -Copyright (c) Alex Grönholm - -Permission is hereby granted, free of charge, to any person obtaining a copy of this -software and associated documentation files (the "Software"), to deal in the Software -without restriction, including without limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons -to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or -substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR -PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE -FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/top_level.txt b/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/top_level.txt deleted file mode 100644 index d31d10d..0000000 --- a/venv/Lib/site-packages/apscheduler-3.11.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -apscheduler diff --git a/venv/Lib/site-packages/apscheduler/__init__.py b/venv/Lib/site-packages/apscheduler/__init__.py deleted file mode 100644 index 475135f..0000000 --- a/venv/Lib/site-packages/apscheduler/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -import importlib.metadata as importlib_metadata -import sys - -try: - release = importlib_metadata.version("APScheduler").split("-")[0] -except importlib_metadata.PackageNotFoundError: - release = "3.5.0" - -version_info = tuple(int(x) if x.isdigit() else x for x in release.split(".")) -version = __version__ = ".".join(str(x) for x in version_info[:3]) -del sys, importlib_metadata diff --git a/venv/Lib/site-packages/apscheduler/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d6479ae..0000000 Binary files a/venv/Lib/site-packages/apscheduler/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/__pycache__/events.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/__pycache__/events.cpython-312.pyc deleted file mode 100644 index 646a9c2..0000000 Binary files a/venv/Lib/site-packages/apscheduler/__pycache__/events.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/__pycache__/job.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/__pycache__/job.cpython-312.pyc deleted file mode 100644 index 559191d..0000000 Binary files a/venv/Lib/site-packages/apscheduler/__pycache__/job.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/__pycache__/util.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/__pycache__/util.cpython-312.pyc deleted file mode 100644 index 82c416f..0000000 Binary files a/venv/Lib/site-packages/apscheduler/__pycache__/util.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/events.py b/venv/Lib/site-packages/apscheduler/events.py deleted file mode 100644 index a4c11aa..0000000 --- a/venv/Lib/site-packages/apscheduler/events.py +++ /dev/null @@ -1,134 +0,0 @@ -__all__ = ( - "EVENT_ALL", - "EVENT_ALL_JOBS_REMOVED", - "EVENT_EXECUTOR_ADDED", - "EVENT_EXECUTOR_REMOVED", - "EVENT_JOBSTORE_ADDED", - "EVENT_JOBSTORE_REMOVED", - "EVENT_JOB_ADDED", - "EVENT_JOB_ERROR", - "EVENT_JOB_EXECUTED", - "EVENT_JOB_MAX_INSTANCES", - "EVENT_JOB_MISSED", - "EVENT_JOB_MODIFIED", - "EVENT_JOB_REMOVED", - "EVENT_JOB_SUBMITTED", - "EVENT_SCHEDULER_PAUSED", - "EVENT_SCHEDULER_RESUMED", - "EVENT_SCHEDULER_SHUTDOWN", - "EVENT_SCHEDULER_STARTED", - "JobEvent", - "JobExecutionEvent", - "JobSubmissionEvent", - "SchedulerEvent", -) - - -EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2**0 -EVENT_SCHEDULER_SHUTDOWN = 2**1 -EVENT_SCHEDULER_PAUSED = 2**2 -EVENT_SCHEDULER_RESUMED = 2**3 -EVENT_EXECUTOR_ADDED = 2**4 -EVENT_EXECUTOR_REMOVED = 2**5 -EVENT_JOBSTORE_ADDED = 2**6 -EVENT_JOBSTORE_REMOVED = 2**7 -EVENT_ALL_JOBS_REMOVED = 2**8 -EVENT_JOB_ADDED = 2**9 -EVENT_JOB_REMOVED = 2**10 -EVENT_JOB_MODIFIED = 2**11 -EVENT_JOB_EXECUTED = 2**12 -EVENT_JOB_ERROR = 2**13 -EVENT_JOB_MISSED = 2**14 -EVENT_JOB_SUBMITTED = 2**15 -EVENT_JOB_MAX_INSTANCES = 2**16 -EVENT_ALL = ( - EVENT_SCHEDULER_STARTED - | EVENT_SCHEDULER_SHUTDOWN - | EVENT_SCHEDULER_PAUSED - | EVENT_SCHEDULER_RESUMED - | EVENT_EXECUTOR_ADDED - | EVENT_EXECUTOR_REMOVED - | EVENT_JOBSTORE_ADDED - | EVENT_JOBSTORE_REMOVED - | EVENT_ALL_JOBS_REMOVED - | EVENT_JOB_ADDED - | EVENT_JOB_REMOVED - | EVENT_JOB_MODIFIED - | EVENT_JOB_EXECUTED - | EVENT_JOB_ERROR - | EVENT_JOB_MISSED - | EVENT_JOB_SUBMITTED - | EVENT_JOB_MAX_INSTANCES -) - - -class SchedulerEvent: - """ - An event that concerns the scheduler itself. - - :ivar code: the type code of this event - :ivar alias: alias of the job store or executor that was added or removed (if applicable) - """ - - def __init__(self, code, alias=None): - super().__init__() - self.code = code - self.alias = alias - - def __repr__(self): - return f"" - - -class JobEvent(SchedulerEvent): - """ - An event that concerns a job. - - :ivar code: the type code of this event - :ivar job_id: identifier of the job in question - :ivar jobstore: alias of the job store containing the job in question - """ - - def __init__(self, code, job_id, jobstore): - super().__init__(code) - self.code = code - self.job_id = job_id - self.jobstore = jobstore - - -class JobSubmissionEvent(JobEvent): - """ - An event that concerns the submission of a job to its executor. - - :ivar scheduled_run_times: a list of datetimes when the job was intended to run - """ - - def __init__(self, code, job_id, jobstore, scheduled_run_times): - super().__init__(code, job_id, jobstore) - self.scheduled_run_times = scheduled_run_times - - -class JobExecutionEvent(JobEvent): - """ - An event that concerns the running of a job within its executor. - - :ivar scheduled_run_time: the time when the job was scheduled to be run - :ivar retval: the return value of the successfully executed job - :ivar exception: the exception raised by the job - :ivar traceback: a formatted traceback for the exception - """ - - def __init__( - self, - code, - job_id, - jobstore, - scheduled_run_time, - retval=None, - exception=None, - traceback=None, - ): - super().__init__(code, job_id, jobstore) - self.scheduled_run_time = scheduled_run_time - self.retval = retval - self.exception = exception - self.traceback = traceback diff --git a/venv/Lib/site-packages/apscheduler/executors/__init__.py b/venv/Lib/site-packages/apscheduler/executors/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 8a9d198..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/asyncio.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/asyncio.cpython-312.pyc deleted file mode 100644 index 5f07578..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/asyncio.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/base.cpython-312.pyc deleted file mode 100644 index f6c1d3f..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/debug.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/debug.cpython-312.pyc deleted file mode 100644 index 9d6ec0f..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/debug.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/gevent.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/gevent.cpython-312.pyc deleted file mode 100644 index c742c56..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/gevent.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/pool.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/pool.cpython-312.pyc deleted file mode 100644 index 05f9534..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/pool.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/tornado.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/tornado.cpython-312.pyc deleted file mode 100644 index 371c2e8..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/tornado.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/__pycache__/twisted.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/executors/__pycache__/twisted.cpython-312.pyc deleted file mode 100644 index 4358a7a..0000000 Binary files a/venv/Lib/site-packages/apscheduler/executors/__pycache__/twisted.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/executors/asyncio.py b/venv/Lib/site-packages/apscheduler/executors/asyncio.py deleted file mode 100644 index cc7eb65..0000000 --- a/venv/Lib/site-packages/apscheduler/executors/asyncio.py +++ /dev/null @@ -1,52 +0,0 @@ -import sys - -from apscheduler.executors.base import BaseExecutor, run_coroutine_job, run_job -from apscheduler.util import iscoroutinefunction_partial - - -class AsyncIOExecutor(BaseExecutor): - """ - Runs jobs in the default executor of the event loop. - - If the job function is a native coroutine function, it is scheduled to be run directly in the - event loop as soon as possible. All other functions are run in the event loop's default - executor which is usually a thread pool. - - Plugin alias: ``asyncio`` - """ - - def start(self, scheduler, alias): - super().start(scheduler, alias) - self._eventloop = scheduler._eventloop - self._pending_futures = set() - - def shutdown(self, wait=True): - # There is no way to honor wait=True without converting this method into a coroutine method - for f in self._pending_futures: - if not f.done(): - f.cancel() - - self._pending_futures.clear() - - def _do_submit_job(self, job, run_times): - def callback(f): - self._pending_futures.discard(f) - try: - events = f.result() - except BaseException: - self._run_job_error(job.id, *sys.exc_info()[1:]) - else: - self._run_job_success(job.id, events) - - if iscoroutinefunction_partial(job.func): - coro = run_coroutine_job( - job, job._jobstore_alias, run_times, self._logger.name - ) - f = self._eventloop.create_task(coro) - else: - f = self._eventloop.run_in_executor( - None, run_job, job, job._jobstore_alias, run_times, self._logger.name - ) - - f.add_done_callback(callback) - self._pending_futures.add(f) diff --git a/venv/Lib/site-packages/apscheduler/executors/base.py b/venv/Lib/site-packages/apscheduler/executors/base.py deleted file mode 100644 index 6decbb5..0000000 --- a/venv/Lib/site-packages/apscheduler/executors/base.py +++ /dev/null @@ -1,205 +0,0 @@ -import logging -import sys -import traceback -from abc import ABCMeta, abstractmethod -from collections import defaultdict -from datetime import datetime, timedelta, timezone -from traceback import format_tb - -from apscheduler.events import ( - EVENT_JOB_ERROR, - EVENT_JOB_EXECUTED, - EVENT_JOB_MISSED, - JobExecutionEvent, -) - - -class MaxInstancesReachedError(Exception): - def __init__(self, job): - super().__init__( - f'Job "{job.id}" has already reached its maximum number of instances ' - f"({job.max_instances})" - ) - - -class BaseExecutor(metaclass=ABCMeta): - """Abstract base class that defines the interface that every executor must implement.""" - - _scheduler = None - _lock = None - _logger = logging.getLogger("apscheduler.executors") - - def __init__(self): - super().__init__() - self._instances = defaultdict(lambda: 0) - - def start(self, scheduler, alias): - """ - Called by the scheduler when the scheduler is being started or when the executor is being - added to an already running scheduler. - - :param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting - this executor - :param str|unicode alias: alias of this executor as it was assigned to the scheduler - - """ - self._scheduler = scheduler - self._lock = scheduler._create_lock() - self._logger = logging.getLogger(f"apscheduler.executors.{alias}") - - def shutdown(self, wait=True): - """ - Shuts down this executor. - - :param bool wait: ``True`` to wait until all submitted jobs - have been executed - """ - - def submit_job(self, job, run_times): - """ - Submits job for execution. - - :param Job job: job to execute - :param list[datetime] run_times: list of datetimes specifying - when the job should have been run - :raises MaxInstancesReachedError: if the maximum number of - allowed instances for this job has been reached - - """ - assert self._lock is not None, "This executor has not been started yet" - with self._lock: - if self._instances[job.id] >= job.max_instances: - raise MaxInstancesReachedError(job) - - self._do_submit_job(job, run_times) - self._instances[job.id] += 1 - - @abstractmethod - def _do_submit_job(self, job, run_times): - """Performs the actual task of scheduling `run_job` to be called.""" - - def _run_job_success(self, job_id, events): - """ - Called by the executor with the list of generated events when :func:`run_job` has been - successfully called. - - """ - with self._lock: - self._instances[job_id] -= 1 - if self._instances[job_id] == 0: - del self._instances[job_id] - - for event in events: - self._scheduler._dispatch_event(event) - - def _run_job_error(self, job_id, exc, traceback=None): - """Called by the executor with the exception if there is an error calling `run_job`.""" - with self._lock: - self._instances[job_id] -= 1 - if self._instances[job_id] == 0: - del self._instances[job_id] - - exc_info = (exc.__class__, exc, traceback) - self._logger.error("Error running job %s", job_id, exc_info=exc_info) - - -def run_job(job, jobstore_alias, run_times, logger_name): - """ - Called by executors to run the job. Returns a list of scheduler events to be dispatched by the - scheduler. - - """ - events = [] - logger = logging.getLogger(logger_name) - for run_time in run_times: - # See if the job missed its run time window, and handle - # possible misfires accordingly - if job.misfire_grace_time is not None: - difference = datetime.now(timezone.utc) - run_time - grace_time = timedelta(seconds=job.misfire_grace_time) - if difference > grace_time: - events.append( - JobExecutionEvent( - EVENT_JOB_MISSED, job.id, jobstore_alias, run_time - ) - ) - logger.warning('Run time of job "%s" was missed by %s', job, difference) - continue - - logger.info('Running job "%s" (scheduled at %s)', job, run_time) - try: - retval = job.func(*job.args, **job.kwargs) - except BaseException: - exc, tb = sys.exc_info()[1:] - formatted_tb = "".join(format_tb(tb)) - events.append( - JobExecutionEvent( - EVENT_JOB_ERROR, - job.id, - jobstore_alias, - run_time, - exception=exc, - traceback=formatted_tb, - ) - ) - logger.exception('Job "%s" raised an exception', job) - - # This is to prevent cyclic references that would lead to memory leaks - traceback.clear_frames(tb) - del tb - else: - events.append( - JobExecutionEvent( - EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, retval=retval - ) - ) - logger.info('Job "%s" executed successfully', job) - - return events - - -async def run_coroutine_job(job, jobstore_alias, run_times, logger_name): - """Coroutine version of run_job().""" - events = [] - logger = logging.getLogger(logger_name) - for run_time in run_times: - # See if the job missed its run time window, and handle possible misfires accordingly - if job.misfire_grace_time is not None: - difference = datetime.now(timezone.utc) - run_time - grace_time = timedelta(seconds=job.misfire_grace_time) - if difference > grace_time: - events.append( - JobExecutionEvent( - EVENT_JOB_MISSED, job.id, jobstore_alias, run_time - ) - ) - logger.warning('Run time of job "%s" was missed by %s', job, difference) - continue - - logger.info('Running job "%s" (scheduled at %s)', job, run_time) - try: - retval = await job.func(*job.args, **job.kwargs) - except BaseException: - exc, tb = sys.exc_info()[1:] - formatted_tb = "".join(format_tb(tb)) - events.append( - JobExecutionEvent( - EVENT_JOB_ERROR, - job.id, - jobstore_alias, - run_time, - exception=exc, - traceback=formatted_tb, - ) - ) - logger.exception('Job "%s" raised an exception', job) - traceback.clear_frames(tb) - else: - events.append( - JobExecutionEvent( - EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, retval=retval - ) - ) - logger.info('Job "%s" executed successfully', job) - - return events diff --git a/venv/Lib/site-packages/apscheduler/executors/debug.py b/venv/Lib/site-packages/apscheduler/executors/debug.py deleted file mode 100644 index ac739ae..0000000 --- a/venv/Lib/site-packages/apscheduler/executors/debug.py +++ /dev/null @@ -1,20 +0,0 @@ -import sys - -from apscheduler.executors.base import BaseExecutor, run_job - - -class DebugExecutor(BaseExecutor): - """ - A special executor that executes the target callable directly instead of deferring it to a - thread or process. - - Plugin alias: ``debug`` - """ - - def _do_submit_job(self, job, run_times): - try: - events = run_job(job, job._jobstore_alias, run_times, self._logger.name) - except BaseException: - self._run_job_error(job.id, *sys.exc_info()[1:]) - else: - self._run_job_success(job.id, events) diff --git a/venv/Lib/site-packages/apscheduler/executors/gevent.py b/venv/Lib/site-packages/apscheduler/executors/gevent.py deleted file mode 100644 index 385be40..0000000 --- a/venv/Lib/site-packages/apscheduler/executors/gevent.py +++ /dev/null @@ -1,29 +0,0 @@ -import sys - -from apscheduler.executors.base import BaseExecutor, run_job - -try: - import gevent -except ImportError as exc: # pragma: nocover - raise ImportError("GeventExecutor requires gevent installed") from exc - - -class GeventExecutor(BaseExecutor): - """ - Runs jobs as greenlets. - - Plugin alias: ``gevent`` - """ - - def _do_submit_job(self, job, run_times): - def callback(greenlet): - try: - events = greenlet.get() - except BaseException: - self._run_job_error(job.id, *sys.exc_info()[1:]) - else: - self._run_job_success(job.id, events) - - gevent.spawn( - run_job, job, job._jobstore_alias, run_times, self._logger.name - ).link(callback) diff --git a/venv/Lib/site-packages/apscheduler/executors/pool.py b/venv/Lib/site-packages/apscheduler/executors/pool.py deleted file mode 100644 index 166de7c..0000000 --- a/venv/Lib/site-packages/apscheduler/executors/pool.py +++ /dev/null @@ -1,82 +0,0 @@ -import concurrent.futures -import multiprocessing -from abc import abstractmethod -from concurrent.futures.process import BrokenProcessPool - -from apscheduler.executors.base import BaseExecutor, run_job - - -class BasePoolExecutor(BaseExecutor): - @abstractmethod - def __init__(self, pool): - super().__init__() - self._pool = pool - - def _do_submit_job(self, job, run_times): - def callback(f): - exc, tb = ( - f.exception_info() - if hasattr(f, "exception_info") - else (f.exception(), getattr(f.exception(), "__traceback__", None)) - ) - if exc: - self._run_job_error(job.id, exc, tb) - else: - self._run_job_success(job.id, f.result()) - - f = self._pool.submit( - run_job, job, job._jobstore_alias, run_times, self._logger.name - ) - f.add_done_callback(callback) - - def shutdown(self, wait=True): - self._pool.shutdown(wait) - - -class ThreadPoolExecutor(BasePoolExecutor): - """ - An executor that runs jobs in a concurrent.futures thread pool. - - Plugin alias: ``threadpool`` - - :param max_workers: the maximum number of spawned threads. - :param pool_kwargs: dict of keyword arguments to pass to the underlying - ThreadPoolExecutor constructor - """ - - def __init__(self, max_workers=10, pool_kwargs=None): - pool_kwargs = pool_kwargs or {} - pool = concurrent.futures.ThreadPoolExecutor(int(max_workers), **pool_kwargs) - super().__init__(pool) - - -class ProcessPoolExecutor(BasePoolExecutor): - """ - An executor that runs jobs in a concurrent.futures process pool. - - Plugin alias: ``processpool`` - - :param max_workers: the maximum number of spawned processes. - :param pool_kwargs: dict of keyword arguments to pass to the underlying - ProcessPoolExecutor constructor - """ - - def __init__(self, max_workers=10, pool_kwargs=None): - self.pool_kwargs = pool_kwargs or {} - self.pool_kwargs.setdefault("mp_context", multiprocessing.get_context("spawn")) - pool = concurrent.futures.ProcessPoolExecutor( - int(max_workers), **self.pool_kwargs - ) - super().__init__(pool) - - def _do_submit_job(self, job, run_times): - try: - super()._do_submit_job(job, run_times) - except BrokenProcessPool: - self._logger.warning( - "Process pool is broken; replacing pool with a fresh instance" - ) - self._pool = self._pool.__class__( - self._pool._max_workers, **self.pool_kwargs - ) - super()._do_submit_job(job, run_times) diff --git a/venv/Lib/site-packages/apscheduler/executors/tornado.py b/venv/Lib/site-packages/apscheduler/executors/tornado.py deleted file mode 100644 index 46789bd..0000000 --- a/venv/Lib/site-packages/apscheduler/executors/tornado.py +++ /dev/null @@ -1,49 +0,0 @@ -import sys -from concurrent.futures import ThreadPoolExecutor - -from tornado.gen import convert_yielded - -from apscheduler.executors.base import BaseExecutor, run_coroutine_job, run_job -from apscheduler.util import iscoroutinefunction_partial - - -class TornadoExecutor(BaseExecutor): - """ - Runs jobs either in a thread pool or directly on the I/O loop. - - If the job function is a native coroutine function, it is scheduled to be run directly in the - I/O loop as soon as possible. All other functions are run in a thread pool. - - Plugin alias: ``tornado`` - - :param int max_workers: maximum number of worker threads in the thread pool - """ - - def __init__(self, max_workers=10): - super().__init__() - self.executor = ThreadPoolExecutor(max_workers) - - def start(self, scheduler, alias): - super().start(scheduler, alias) - self._ioloop = scheduler._ioloop - - def _do_submit_job(self, job, run_times): - def callback(f): - try: - events = f.result() - except BaseException: - self._run_job_error(job.id, *sys.exc_info()[1:]) - else: - self._run_job_success(job.id, events) - - if iscoroutinefunction_partial(job.func): - f = run_coroutine_job( - job, job._jobstore_alias, run_times, self._logger.name - ) - else: - f = self.executor.submit( - run_job, job, job._jobstore_alias, run_times, self._logger.name - ) - - f = convert_yielded(f) - f.add_done_callback(callback) diff --git a/venv/Lib/site-packages/apscheduler/executors/twisted.py b/venv/Lib/site-packages/apscheduler/executors/twisted.py deleted file mode 100644 index 710b20a..0000000 --- a/venv/Lib/site-packages/apscheduler/executors/twisted.py +++ /dev/null @@ -1,24 +0,0 @@ -from apscheduler.executors.base import BaseExecutor, run_job - - -class TwistedExecutor(BaseExecutor): - """ - Runs jobs in the reactor's thread pool. - - Plugin alias: ``twisted`` - """ - - def start(self, scheduler, alias): - super().start(scheduler, alias) - self._reactor = scheduler._reactor - - def _do_submit_job(self, job, run_times): - def callback(success, result): - if success: - self._run_job_success(job.id, result) - else: - self._run_job_error(job.id, result.value, result.tb) - - self._reactor.getThreadPool().callInThreadWithCallback( - callback, run_job, job, job._jobstore_alias, run_times, self._logger.name - ) diff --git a/venv/Lib/site-packages/apscheduler/job.py b/venv/Lib/site-packages/apscheduler/job.py deleted file mode 100644 index 38ac305..0000000 --- a/venv/Lib/site-packages/apscheduler/job.py +++ /dev/null @@ -1,333 +0,0 @@ -from collections.abc import Iterable, Mapping -from datetime import timezone -from inspect import isclass, ismethod -from uuid import uuid4 - -from apscheduler.triggers.base import BaseTrigger -from apscheduler.util import ( - check_callable_args, - convert_to_datetime, - datetime_repr, - get_callable_name, - obj_to_ref, - ref_to_obj, -) - -UTC = timezone.utc - - -class Job: - """ - Contains the options given when scheduling callables and its current schedule and other state. - This class should never be instantiated by the user. - - :var str id: the unique identifier of this job - :var str name: the description of this job - :var func: the callable to execute - :var tuple|list args: positional arguments to the callable - :var dict kwargs: keyword arguments to the callable - :var bool coalesce: whether to only run the job once when several run times are due - :var trigger: the trigger object that controls the schedule of this job - :var str executor: the name of the executor that will run this job - :var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to - be late (``None`` means "allow the job to run no matter how late it is") - :var int max_instances: the maximum number of concurrently executing instances allowed for this - job - :var datetime.datetime next_run_time: the next scheduled run time of this job - - .. note:: - The ``misfire_grace_time`` has some non-obvious effects on job execution. See the - :ref:`missed-job-executions` section in the documentation for an in-depth explanation. - """ - - __slots__ = ( - "__weakref__", - "_jobstore_alias", - "_scheduler", - "args", - "coalesce", - "executor", - "func", - "func_ref", - "id", - "kwargs", - "max_instances", - "misfire_grace_time", - "name", - "next_run_time", - "trigger", - ) - - def __init__(self, scheduler, id=None, **kwargs): - super().__init__() - self._scheduler = scheduler - self._jobstore_alias = None - self._modify(id=id or uuid4().hex, **kwargs) - - def modify(self, **changes): - """ - Makes the given changes to this job and saves it in the associated job store. - - Accepted keyword arguments are the same as the variables on this class. - - .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.modify_job` - - :return Job: this job instance - - """ - self._scheduler.modify_job(self.id, self._jobstore_alias, **changes) - return self - - def reschedule(self, trigger, **trigger_args): - """ - Shortcut for switching the trigger on this job. - - .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.reschedule_job` - - :return Job: this job instance - - """ - self._scheduler.reschedule_job( - self.id, self._jobstore_alias, trigger, **trigger_args - ) - return self - - def pause(self): - """ - Temporarily suspend the execution of this job. - - .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.pause_job` - - :return Job: this job instance - - """ - self._scheduler.pause_job(self.id, self._jobstore_alias) - return self - - def resume(self): - """ - Resume the schedule of this job if previously paused. - - .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.resume_job` - - :return Job: this job instance - - """ - self._scheduler.resume_job(self.id, self._jobstore_alias) - return self - - def remove(self): - """ - Unschedules this job and removes it from its associated job store. - - .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.remove_job` - - """ - self._scheduler.remove_job(self.id, self._jobstore_alias) - - @property - def pending(self): - """ - Returns ``True`` if the referenced job is still waiting to be added to its designated job - store. - - """ - return self._jobstore_alias is None - - # - # Private API - # - - def _get_run_times(self, now): - """ - Computes the scheduled run times between ``next_run_time`` and ``now`` (inclusive). - - :type now: datetime.datetime - :rtype: list[datetime.datetime] - - """ - run_times = [] - next_run_time = self.next_run_time - while next_run_time and next_run_time.astimezone(UTC) <= now.astimezone(UTC): - run_times.append(next_run_time) - next_run_time = self.trigger.get_next_fire_time(next_run_time, now) - - return run_times - - def _modify(self, **changes): - """ - Validates the changes to the Job and makes the modifications if and only if all of them - validate. - - """ - approved = {} - - if "id" in changes: - value = changes.pop("id") - if not isinstance(value, str): - raise TypeError("id must be a nonempty string") - if hasattr(self, "id"): - raise ValueError("The job ID may not be changed") - approved["id"] = value - - if "func" in changes or "args" in changes or "kwargs" in changes: - func = changes.pop("func") if "func" in changes else self.func - args = changes.pop("args") if "args" in changes else self.args - kwargs = changes.pop("kwargs") if "kwargs" in changes else self.kwargs - - if isinstance(func, str): - func_ref = func - func = ref_to_obj(func) - elif callable(func): - try: - func_ref = obj_to_ref(func) - except ValueError: - # If this happens, this Job won't be serializable - func_ref = None - else: - raise TypeError("func must be a callable or a textual reference to one") - - if not hasattr(self, "name") and changes.get("name", None) is None: - changes["name"] = get_callable_name(func) - - if isinstance(args, str) or not isinstance(args, Iterable): - raise TypeError("args must be a non-string iterable") - if isinstance(kwargs, str) or not isinstance(kwargs, Mapping): - raise TypeError("kwargs must be a dict-like object") - - check_callable_args(func, args, kwargs) - - approved["func"] = func - approved["func_ref"] = func_ref - approved["args"] = args - approved["kwargs"] = kwargs - - if "name" in changes: - value = changes.pop("name") - if not value or not isinstance(value, str): - raise TypeError("name must be a nonempty string") - approved["name"] = value - - if "misfire_grace_time" in changes: - value = changes.pop("misfire_grace_time") - if value is not None and (not isinstance(value, int) or value <= 0): - raise TypeError( - "misfire_grace_time must be either None or a positive integer" - ) - approved["misfire_grace_time"] = value - - if "coalesce" in changes: - value = bool(changes.pop("coalesce")) - approved["coalesce"] = value - - if "max_instances" in changes: - value = changes.pop("max_instances") - if not isinstance(value, int) or value <= 0: - raise TypeError("max_instances must be a positive integer") - approved["max_instances"] = value - - if "trigger" in changes: - trigger = changes.pop("trigger") - if not isinstance(trigger, BaseTrigger): - raise TypeError( - f"Expected a trigger instance, got {trigger.__class__.__name__} instead" - ) - - approved["trigger"] = trigger - - if "executor" in changes: - value = changes.pop("executor") - if not isinstance(value, str): - raise TypeError("executor must be a string") - approved["executor"] = value - - if "next_run_time" in changes: - value = changes.pop("next_run_time") - approved["next_run_time"] = convert_to_datetime( - value, self._scheduler.timezone, "next_run_time" - ) - - if changes: - raise AttributeError( - "The following are not modifiable attributes of Job: {}".format( - ", ".join(changes) - ) - ) - - for key, value in approved.items(): - setattr(self, key, value) - - def __getstate__(self): - # Don't allow this Job to be serialized if the function reference could not be determined - if not self.func_ref: - raise ValueError( - f"This Job cannot be serialized since the reference to its callable ({self.func!r}) could not " - "be determined. Consider giving a textual reference (module:function name) " - "instead." - ) - - # Instance methods cannot survive serialization as-is, so store the "self" argument - # explicitly - func = self.func - if ( - ismethod(func) - and not isclass(func.__self__) - and obj_to_ref(func) == self.func_ref - ): - args = (func.__self__,) + tuple(self.args) - else: - args = self.args - - return { - "version": 1, - "id": self.id, - "func": self.func_ref, - "trigger": self.trigger, - "executor": self.executor, - "args": args, - "kwargs": self.kwargs, - "name": self.name, - "misfire_grace_time": self.misfire_grace_time, - "coalesce": self.coalesce, - "max_instances": self.max_instances, - "next_run_time": self.next_run_time, - } - - def __setstate__(self, state): - if state.get("version", 1) > 1: - raise ValueError( - f"Job has version {state['version']}, but only version 1 can be handled" - ) - - self.id = state["id"] - self.func_ref = state["func"] - self.func = ref_to_obj(self.func_ref) - self.trigger = state["trigger"] - self.executor = state["executor"] - self.args = state["args"] - self.kwargs = state["kwargs"] - self.name = state["name"] - self.misfire_grace_time = state["misfire_grace_time"] - self.coalesce = state["coalesce"] - self.max_instances = state["max_instances"] - self.next_run_time = state["next_run_time"] - - def __eq__(self, other): - if isinstance(other, Job): - return self.id == other.id - return NotImplemented - - def __repr__(self): - return f"" - - def __str__(self): - if hasattr(self, "next_run_time"): - status = ( - "next run at: " + datetime_repr(self.next_run_time) - if self.next_run_time - else "paused" - ) - else: - status = "pending" - - return f"{self.name} (trigger: {self.trigger}, {status})" diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__init__.py b/venv/Lib/site-packages/apscheduler/jobstores/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4be2293..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 2a2f6c2..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/etcd.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/etcd.cpython-312.pyc deleted file mode 100644 index 36af4cd..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/etcd.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/memory.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/memory.cpython-312.pyc deleted file mode 100644 index 7273941..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/memory.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/mongodb.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/mongodb.cpython-312.pyc deleted file mode 100644 index 175111b..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/mongodb.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/redis.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/redis.cpython-312.pyc deleted file mode 100644 index 5fd2865..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/redis.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/rethinkdb.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/rethinkdb.cpython-312.pyc deleted file mode 100644 index 9622a71..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/rethinkdb.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/sqlalchemy.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/sqlalchemy.cpython-312.pyc deleted file mode 100644 index b4a8e3e..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/sqlalchemy.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/zookeeper.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/zookeeper.cpython-312.pyc deleted file mode 100644 index 06b5475..0000000 Binary files a/venv/Lib/site-packages/apscheduler/jobstores/__pycache__/zookeeper.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/jobstores/base.py b/venv/Lib/site-packages/apscheduler/jobstores/base.py deleted file mode 100644 index 01cabd1..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/base.py +++ /dev/null @@ -1,141 +0,0 @@ -import logging -from abc import ABCMeta, abstractmethod - - -class JobLookupError(KeyError): - """Raised when the job store cannot find a job for update or removal.""" - - def __init__(self, job_id): - super().__init__(f"No job by the id of {job_id} was found") - - -class ConflictingIdError(KeyError): - """Raised when the uniqueness of job IDs is being violated.""" - - def __init__(self, job_id): - super().__init__(f"Job identifier ({job_id}) conflicts with an existing job") - - -class TransientJobError(ValueError): - """ - Raised when an attempt to add transient (with no func_ref) job to a persistent job store is - detected. - """ - - def __init__(self, job_id): - super().__init__( - f"Job ({job_id}) cannot be added to this job store because a reference to the callable " - "could not be determined." - ) - - -class BaseJobStore(metaclass=ABCMeta): - """Abstract base class that defines the interface that every job store must implement.""" - - _scheduler = None - _alias = None - _logger = logging.getLogger("apscheduler.jobstores") - - def start(self, scheduler, alias): - """ - Called by the scheduler when the scheduler is being started or when the job store is being - added to an already running scheduler. - - :param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting - this job store - :param str|unicode alias: alias of this job store as it was assigned to the scheduler - """ - - self._scheduler = scheduler - self._alias = alias - self._logger = logging.getLogger(f"apscheduler.jobstores.{alias}") - - def shutdown(self): - """Frees any resources still bound to this job store.""" - - def _fix_paused_jobs_sorting(self, jobs): - for i, job in enumerate(jobs): - if job.next_run_time is not None: - if i > 0: - paused_jobs = jobs[:i] - del jobs[:i] - jobs.extend(paused_jobs) - break - - @abstractmethod - def lookup_job(self, job_id): - """ - Returns a specific job, or ``None`` if it isn't found.. - - The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of - the returned job to point to the scheduler and itself, respectively. - - :param str|unicode job_id: identifier of the job - :rtype: Job - """ - - @abstractmethod - def get_due_jobs(self, now): - """ - Returns the list of jobs that have ``next_run_time`` earlier or equal to ``now``. - The returned jobs must be sorted by next run time (ascending). - - :param datetime.datetime now: the current (timezone aware) datetime - :rtype: list[Job] - """ - - @abstractmethod - def get_next_run_time(self): - """ - Returns the earliest run time of all the jobs stored in this job store, or ``None`` if - there are no active jobs. - - :rtype: datetime.datetime - """ - - @abstractmethod - def get_all_jobs(self): - """ - Returns a list of all jobs in this job store. - The returned jobs should be sorted by next run time (ascending). - Paused jobs (next_run_time == None) should be sorted last. - - The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of - the returned jobs to point to the scheduler and itself, respectively. - - :rtype: list[Job] - """ - - @abstractmethod - def add_job(self, job): - """ - Adds the given job to this store. - - :param Job job: the job to add - :raises ConflictingIdError: if there is another job in this store with the same ID - """ - - @abstractmethod - def update_job(self, job): - """ - Replaces the job in the store with the given newer version. - - :param Job job: the job to update - :raises JobLookupError: if the job does not exist - """ - - @abstractmethod - def remove_job(self, job_id): - """ - Removes the given job from this store. - - :param str|unicode job_id: identifier of the job - :raises JobLookupError: if the job does not exist - """ - - @abstractmethod - def remove_all_jobs(self): - """Removes all jobs from this store.""" - - def __repr__(self): - return f"<{self.__class__.__name__}>" diff --git a/venv/Lib/site-packages/apscheduler/jobstores/etcd.py b/venv/Lib/site-packages/apscheduler/jobstores/etcd.py deleted file mode 100644 index 3fe74ff..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/etcd.py +++ /dev/null @@ -1,170 +0,0 @@ -import pickle -from datetime import datetime, timezone - -from apscheduler.job import Job -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.util import ( - datetime_to_utc_timestamp, - maybe_ref, - utc_timestamp_to_datetime, -) - -try: - from etcd3 import Etcd3Client -except ImportError as exc: # pragma: nocover - raise ImportError("EtcdJobStore requires etcd3 be installed") from exc - - -class EtcdJobStore(BaseJobStore): - """ - Stores jobs in a etcd. Any leftover keyword arguments are directly passed to - etcd3's `etcd3.client - `_. - - Plugin alias: ``etcd`` - - :param str path: path to store jobs in - :param client: a :class:`~etcd3.client.etcd3` instance to use instead of - providing connection arguments - :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the - highest available - """ - - def __init__( - self, - path="/apscheduler", - client=None, - close_connection_on_exit=False, - pickle_protocol=pickle.DEFAULT_PROTOCOL, - **connect_args, - ): - super().__init__() - self.pickle_protocol = pickle_protocol - self.close_connection_on_exit = close_connection_on_exit - - if not path: - raise ValueError('The "path" parameter must not be empty') - - self.path = path - - if client: - self.client = maybe_ref(client) - else: - self.client = Etcd3Client(**connect_args) - - def lookup_job(self, job_id): - node_path = self.path + "/" + str(job_id) - try: - content, _ = self.client.get(node_path) - content = pickle.loads(content) - job = self._reconstitute_job(content["job_state"]) - return job - except BaseException: - return None - - def get_due_jobs(self, now): - timestamp = datetime_to_utc_timestamp(now) - jobs = [ - job_record["job"] - for job_record in self._get_jobs() - if job_record["next_run_time"] is not None - and job_record["next_run_time"] <= timestamp - ] - return jobs - - def get_next_run_time(self): - next_runs = [ - job_record["next_run_time"] - for job_record in self._get_jobs() - if job_record["next_run_time"] is not None - ] - return utc_timestamp_to_datetime(min(next_runs)) if len(next_runs) > 0 else None - - def get_all_jobs(self): - jobs = [job_record["job"] for job_record in self._get_jobs()] - self._fix_paused_jobs_sorting(jobs) - return jobs - - def add_job(self, job): - node_path = self.path + "/" + str(job.id) - value = { - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": job.__getstate__(), - } - data = pickle.dumps(value, self.pickle_protocol) - status = self.client.put_if_not_exists(node_path, value=data) - if not status: - raise ConflictingIdError(job.id) - - def update_job(self, job): - node_path = self.path + "/" + str(job.id) - changes = { - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": job.__getstate__(), - } - data = pickle.dumps(changes, self.pickle_protocol) - status, _ = self.client.transaction( - compare=[self.client.transactions.version(node_path) > 0], - success=[self.client.transactions.put(node_path, value=data)], - failure=[], - ) - if not status: - raise JobLookupError(job.id) - - def remove_job(self, job_id): - node_path = self.path + "/" + str(job_id) - status, _ = self.client.transaction( - compare=[self.client.transactions.version(node_path) > 0], - success=[self.client.transactions.delete(node_path)], - failure=[], - ) - if not status: - raise JobLookupError(job_id) - - def remove_all_jobs(self): - self.client.delete_prefix(self.path) - - def shutdown(self): - self.client.close() - - def _reconstitute_job(self, job_state): - job_state = job_state - job = Job.__new__(Job) - job.__setstate__(job_state) - job._scheduler = self._scheduler - job._jobstore_alias = self._alias - return job - - def _get_jobs(self): - jobs = [] - failed_job_ids = [] - all_ids = list(self.client.get_prefix(self.path)) - - for doc, _ in all_ids: - try: - content = pickle.loads(doc) - job_record = { - "next_run_time": content["next_run_time"], - "job": self._reconstitute_job(content["job_state"]), - } - jobs.append(job_record) - except BaseException: - content = pickle.loads(doc) - failed_id = content["job_state"]["id"] - failed_job_ids.append(failed_id) - self._logger.exception( - 'Unable to restore job "%s" -- removing it', failed_id - ) - - if failed_job_ids: - for failed_id in failed_job_ids: - self.remove_job(failed_id) - paused_sort_key = datetime(9999, 12, 31, tzinfo=timezone.utc) - return sorted( - jobs, - key=lambda job_record: job_record["job"].next_run_time or paused_sort_key, - ) - - def __repr__(self): - self._logger.exception("<%s (client=%s)>", self.__class__.__name__, self.client) - return f"<{self.__class__.__name__} (client={self.client})>" diff --git a/venv/Lib/site-packages/apscheduler/jobstores/memory.py b/venv/Lib/site-packages/apscheduler/jobstores/memory.py deleted file mode 100644 index 8103cfd..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/memory.py +++ /dev/null @@ -1,106 +0,0 @@ -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.util import datetime_to_utc_timestamp - - -class MemoryJobStore(BaseJobStore): - """ - Stores jobs in an array in RAM. Provides no persistence support. - - Plugin alias: ``memory`` - """ - - def __init__(self): - super().__init__() - # list of (job, timestamp), sorted by next_run_time and job id (ascending) - self._jobs = [] - self._jobs_index = {} # id -> (job, timestamp) lookup table - - def lookup_job(self, job_id): - return self._jobs_index.get(job_id, (None, None))[0] - - def get_due_jobs(self, now): - now_timestamp = datetime_to_utc_timestamp(now) - pending = [] - for job, timestamp in self._jobs: - if timestamp is None or timestamp > now_timestamp: - break - pending.append(job) - - return pending - - def get_next_run_time(self): - return self._jobs[0][0].next_run_time if self._jobs else None - - def get_all_jobs(self): - return [j[0] for j in self._jobs] - - def add_job(self, job): - if job.id in self._jobs_index: - raise ConflictingIdError(job.id) - - timestamp = datetime_to_utc_timestamp(job.next_run_time) - index = self._get_job_index(timestamp, job.id) - self._jobs.insert(index, (job, timestamp)) - self._jobs_index[job.id] = (job, timestamp) - - def update_job(self, job): - old_job, old_timestamp = self._jobs_index.get(job.id, (None, None)) - if old_job is None: - raise JobLookupError(job.id) - - # If the next run time has not changed, simply replace the job in its present index. - # Otherwise, reinsert the job to the list to preserve the ordering. - old_index = self._get_job_index(old_timestamp, old_job.id) - new_timestamp = datetime_to_utc_timestamp(job.next_run_time) - if old_timestamp == new_timestamp: - self._jobs[old_index] = (job, new_timestamp) - else: - del self._jobs[old_index] - new_index = self._get_job_index(new_timestamp, job.id) - self._jobs.insert(new_index, (job, new_timestamp)) - - self._jobs_index[old_job.id] = (job, new_timestamp) - - def remove_job(self, job_id): - job, timestamp = self._jobs_index.get(job_id, (None, None)) - if job is None: - raise JobLookupError(job_id) - - index = self._get_job_index(timestamp, job_id) - del self._jobs[index] - del self._jobs_index[job.id] - - def remove_all_jobs(self): - self._jobs = [] - self._jobs_index = {} - - def shutdown(self): - self.remove_all_jobs() - - def _get_job_index(self, timestamp, job_id): - """ - Returns the index of the given job, or if it's not found, the index where the job should be - inserted based on the given timestamp. - - :type timestamp: int - :type job_id: str - - """ - lo, hi = 0, len(self._jobs) - timestamp = float("inf") if timestamp is None else timestamp - while lo < hi: - mid = (lo + hi) // 2 - mid_job, mid_timestamp = self._jobs[mid] - mid_timestamp = float("inf") if mid_timestamp is None else mid_timestamp - if mid_timestamp > timestamp: - hi = mid - elif mid_timestamp < timestamp: - lo = mid + 1 - elif mid_job.id > job_id: - hi = mid - elif mid_job.id < job_id: - lo = mid + 1 - else: - return mid - - return lo diff --git a/venv/Lib/site-packages/apscheduler/jobstores/mongodb.py b/venv/Lib/site-packages/apscheduler/jobstores/mongodb.py deleted file mode 100644 index 102c0bd..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/mongodb.py +++ /dev/null @@ -1,158 +0,0 @@ -import pickle -import warnings - -from apscheduler.job import Job -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.util import ( - datetime_to_utc_timestamp, - maybe_ref, - utc_timestamp_to_datetime, -) - -try: - from bson.binary import Binary - from pymongo import ASCENDING, MongoClient - from pymongo.errors import DuplicateKeyError -except ImportError as exc: # pragma: nocover - raise ImportError("MongoDBJobStore requires PyMongo installed") from exc - - -class MongoDBJobStore(BaseJobStore): - """ - Stores jobs in a MongoDB database. Any leftover keyword arguments are directly passed to - pymongo's `MongoClient - `_. - - Plugin alias: ``mongodb`` - - :param str database: database to store jobs in - :param str collection: collection to store jobs in - :param client: a :class:`~pymongo.mongo_client.MongoClient` instance to use instead of - providing connection arguments - :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the - highest available - """ - - def __init__( - self, - database="apscheduler", - collection="jobs", - client=None, - pickle_protocol=pickle.HIGHEST_PROTOCOL, - **connect_args, - ): - super().__init__() - self.pickle_protocol = pickle_protocol - - if not database: - raise ValueError('The "database" parameter must not be empty') - if not collection: - raise ValueError('The "collection" parameter must not be empty') - - if client: - self.client = maybe_ref(client) - else: - connect_args.setdefault("w", 1) - self.client = MongoClient(**connect_args) - - self.collection = self.client[database][collection] - - def start(self, scheduler, alias): - super().start(scheduler, alias) - self.collection.create_index("next_run_time", sparse=True) - - @property - def connection(self): - warnings.warn( - 'The "connection" member is deprecated -- use "client" instead', - DeprecationWarning, - ) - return self.client - - def lookup_job(self, job_id): - document = self.collection.find_one(job_id, ["job_state"]) - return self._reconstitute_job(document["job_state"]) if document else None - - def get_due_jobs(self, now): - timestamp = datetime_to_utc_timestamp(now) - return self._get_jobs({"next_run_time": {"$lte": timestamp}}) - - def get_next_run_time(self): - document = self.collection.find_one( - {"next_run_time": {"$ne": None}}, - projection=["next_run_time"], - sort=[("next_run_time", ASCENDING)], - ) - return ( - utc_timestamp_to_datetime(document["next_run_time"]) if document else None - ) - - def get_all_jobs(self): - jobs = self._get_jobs({}) - self._fix_paused_jobs_sorting(jobs) - return jobs - - def add_job(self, job): - try: - self.collection.insert_one( - { - "_id": job.id, - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": Binary( - pickle.dumps(job.__getstate__(), self.pickle_protocol) - ), - } - ) - except DuplicateKeyError: - raise ConflictingIdError(job.id) - - def update_job(self, job): - changes = { - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)), - } - result = self.collection.update_one({"_id": job.id}, {"$set": changes}) - if result and result.matched_count == 0: - raise JobLookupError(job.id) - - def remove_job(self, job_id): - result = self.collection.delete_one({"_id": job_id}) - if result and result.deleted_count == 0: - raise JobLookupError(job_id) - - def remove_all_jobs(self): - self.collection.delete_many({}) - - def shutdown(self): - self.client.close() - - def _reconstitute_job(self, job_state): - job_state = pickle.loads(job_state) - job = Job.__new__(Job) - job.__setstate__(job_state) - job._scheduler = self._scheduler - job._jobstore_alias = self._alias - return job - - def _get_jobs(self, conditions): - jobs = [] - failed_job_ids = [] - for document in self.collection.find( - conditions, ["_id", "job_state"], sort=[("next_run_time", ASCENDING)] - ): - try: - jobs.append(self._reconstitute_job(document["job_state"])) - except BaseException: - self._logger.exception( - 'Unable to restore job "%s" -- removing it', document["_id"] - ) - failed_job_ids.append(document["_id"]) - - # Remove all the jobs we failed to restore - if failed_job_ids: - self.collection.delete_many({"_id": {"$in": failed_job_ids}}) - - return jobs - - def __repr__(self): - return f"<{self.__class__.__name__} (client={self.client})>" diff --git a/venv/Lib/site-packages/apscheduler/jobstores/redis.py b/venv/Lib/site-packages/apscheduler/jobstores/redis.py deleted file mode 100644 index 528285f..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/redis.py +++ /dev/null @@ -1,160 +0,0 @@ -import pickle -from datetime import datetime, timezone - -from apscheduler.job import Job -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.util import datetime_to_utc_timestamp, utc_timestamp_to_datetime - -try: - from redis import Redis -except ImportError as exc: # pragma: nocover - raise ImportError("RedisJobStore requires redis installed") from exc - - -class RedisJobStore(BaseJobStore): - """ - Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's - :class:`~redis.StrictRedis`. - - Plugin alias: ``redis`` - - :param int db: the database number to store jobs in - :param str jobs_key: key to store jobs in - :param str run_times_key: key to store the jobs' run times in - :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the - highest available - """ - - def __init__( - self, - db=0, - jobs_key="apscheduler.jobs", - run_times_key="apscheduler.run_times", - pickle_protocol=pickle.HIGHEST_PROTOCOL, - **connect_args, - ): - super().__init__() - - if db is None: - raise ValueError('The "db" parameter must not be empty') - if not jobs_key: - raise ValueError('The "jobs_key" parameter must not be empty') - if not run_times_key: - raise ValueError('The "run_times_key" parameter must not be empty') - - self.pickle_protocol = pickle_protocol - self.jobs_key = jobs_key - self.run_times_key = run_times_key - self.redis = Redis(db=int(db), **connect_args) - - def lookup_job(self, job_id): - job_state = self.redis.hget(self.jobs_key, job_id) - return self._reconstitute_job(job_state) if job_state else None - - def get_due_jobs(self, now): - timestamp = datetime_to_utc_timestamp(now) - job_ids = self.redis.zrangebyscore(self.run_times_key, 0, timestamp) - if job_ids: - job_states = self.redis.hmget(self.jobs_key, *job_ids) - return self._reconstitute_jobs(zip(job_ids, job_states)) - return [] - - def get_next_run_time(self): - next_run_time = self.redis.zrange(self.run_times_key, 0, 0, withscores=True) - if next_run_time: - return utc_timestamp_to_datetime(next_run_time[0][1]) - - def get_all_jobs(self): - job_states = self.redis.hgetall(self.jobs_key) - jobs = self._reconstitute_jobs(job_states.items()) - paused_sort_key = datetime(9999, 12, 31, tzinfo=timezone.utc) - return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key) - - def add_job(self, job): - if self.redis.hexists(self.jobs_key, job.id): - raise ConflictingIdError(job.id) - - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.hset( - self.jobs_key, - job.id, - pickle.dumps(job.__getstate__(), self.pickle_protocol), - ) - if job.next_run_time: - pipe.zadd( - self.run_times_key, - {job.id: datetime_to_utc_timestamp(job.next_run_time)}, - ) - - pipe.execute() - - def update_job(self, job): - if not self.redis.hexists(self.jobs_key, job.id): - raise JobLookupError(job.id) - - with self.redis.pipeline() as pipe: - pipe.hset( - self.jobs_key, - job.id, - pickle.dumps(job.__getstate__(), self.pickle_protocol), - ) - if job.next_run_time: - pipe.zadd( - self.run_times_key, - {job.id: datetime_to_utc_timestamp(job.next_run_time)}, - ) - else: - pipe.zrem(self.run_times_key, job.id) - - pipe.execute() - - def remove_job(self, job_id): - if not self.redis.hexists(self.jobs_key, job_id): - raise JobLookupError(job_id) - - with self.redis.pipeline() as pipe: - pipe.hdel(self.jobs_key, job_id) - pipe.zrem(self.run_times_key, job_id) - pipe.execute() - - def remove_all_jobs(self): - with self.redis.pipeline() as pipe: - pipe.delete(self.jobs_key) - pipe.delete(self.run_times_key) - pipe.execute() - - def shutdown(self): - self.redis.connection_pool.disconnect() - - def _reconstitute_job(self, job_state): - job_state = pickle.loads(job_state) - job = Job.__new__(Job) - job.__setstate__(job_state) - job._scheduler = self._scheduler - job._jobstore_alias = self._alias - return job - - def _reconstitute_jobs(self, job_states): - jobs = [] - failed_job_ids = [] - for job_id, job_state in job_states: - try: - jobs.append(self._reconstitute_job(job_state)) - except BaseException: - self._logger.exception( - 'Unable to restore job "%s" -- removing it', job_id - ) - failed_job_ids.append(job_id) - - # Remove all the jobs we failed to restore - if failed_job_ids: - with self.redis.pipeline() as pipe: - pipe.hdel(self.jobs_key, *failed_job_ids) - pipe.zrem(self.run_times_key, *failed_job_ids) - pipe.execute() - - return jobs - - def __repr__(self): - return f"<{self.__class__.__name__}>" diff --git a/venv/Lib/site-packages/apscheduler/jobstores/rethinkdb.py b/venv/Lib/site-packages/apscheduler/jobstores/rethinkdb.py deleted file mode 100644 index d78290b..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/rethinkdb.py +++ /dev/null @@ -1,173 +0,0 @@ -import pickle - -from apscheduler.job import Job -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.util import ( - datetime_to_utc_timestamp, - maybe_ref, - utc_timestamp_to_datetime, -) - -try: - from rethinkdb import RethinkDB -except ImportError as exc: # pragma: nocover - raise ImportError("RethinkDBJobStore requires rethinkdb installed") from exc - - -class RethinkDBJobStore(BaseJobStore): - """ - Stores jobs in a RethinkDB database. Any leftover keyword arguments are directly passed to - rethinkdb's `RethinkdbClient `_. - - Plugin alias: ``rethinkdb`` - - :param str database: database to store jobs in - :param str collection: collection to store jobs in - :param client: a :class:`rethinkdb.net.Connection` instance to use instead of providing - connection arguments - :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the - highest available - """ - - def __init__( - self, - database="apscheduler", - table="jobs", - client=None, - pickle_protocol=pickle.HIGHEST_PROTOCOL, - **connect_args, - ): - super().__init__() - - if not database: - raise ValueError('The "database" parameter must not be empty') - if not table: - raise ValueError('The "table" parameter must not be empty') - - self.database = database - self.table_name = table - self.table = None - self.client = client - self.pickle_protocol = pickle_protocol - self.connect_args = connect_args - self.r = RethinkDB() - self.conn = None - - def start(self, scheduler, alias): - super().start(scheduler, alias) - - if self.client: - self.conn = maybe_ref(self.client) - else: - self.conn = self.r.connect(db=self.database, **self.connect_args) - - if self.database not in self.r.db_list().run(self.conn): - self.r.db_create(self.database).run(self.conn) - - if self.table_name not in self.r.table_list().run(self.conn): - self.r.table_create(self.table_name).run(self.conn) - - if "next_run_time" not in self.r.table(self.table_name).index_list().run( - self.conn - ): - self.r.table(self.table_name).index_create("next_run_time").run(self.conn) - - self.table = self.r.db(self.database).table(self.table_name) - - def lookup_job(self, job_id): - results = list(self.table.get_all(job_id).pluck("job_state").run(self.conn)) - return self._reconstitute_job(results[0]["job_state"]) if results else None - - def get_due_jobs(self, now): - return self._get_jobs( - self.r.row["next_run_time"] <= datetime_to_utc_timestamp(now) - ) - - def get_next_run_time(self): - results = list( - self.table.filter(self.r.row["next_run_time"] != None) - .order_by(self.r.asc("next_run_time")) - .map(lambda x: x["next_run_time"]) - .limit(1) - .run(self.conn) - ) - return utc_timestamp_to_datetime(results[0]) if results else None - - def get_all_jobs(self): - jobs = self._get_jobs() - self._fix_paused_jobs_sorting(jobs) - return jobs - - def add_job(self, job): - job_dict = { - "id": job.id, - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": self.r.binary( - pickle.dumps(job.__getstate__(), self.pickle_protocol) - ), - } - results = self.table.insert(job_dict).run(self.conn) - if results["errors"] > 0: - raise ConflictingIdError(job.id) - - def update_job(self, job): - changes = { - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": self.r.binary( - pickle.dumps(job.__getstate__(), self.pickle_protocol) - ), - } - results = self.table.get_all(job.id).update(changes).run(self.conn) - skipped = False in map(lambda x: results[x] == 0, results.keys()) - if results["skipped"] > 0 or results["errors"] > 0 or not skipped: - raise JobLookupError(job.id) - - def remove_job(self, job_id): - results = self.table.get_all(job_id).delete().run(self.conn) - if results["deleted"] + results["skipped"] != 1: - raise JobLookupError(job_id) - - def remove_all_jobs(self): - self.table.delete().run(self.conn) - - def shutdown(self): - self.conn.close() - - def _reconstitute_job(self, job_state): - job_state = pickle.loads(job_state) - job = Job.__new__(Job) - job.__setstate__(job_state) - job._scheduler = self._scheduler - job._jobstore_alias = self._alias - return job - - def _get_jobs(self, predicate=None): - jobs = [] - failed_job_ids = [] - query = ( - self.table.filter(self.r.row["next_run_time"] != None).filter(predicate) - if predicate - else self.table - ) - query = query.order_by("next_run_time", "id").pluck("id", "job_state") - - for document in query.run(self.conn): - try: - jobs.append(self._reconstitute_job(document["job_state"])) - except Exception: - self._logger.exception( - 'Unable to restore job "%s" -- removing it', document["id"] - ) - failed_job_ids.append(document["id"]) - - # Remove all the jobs we failed to restore - if failed_job_ids: - self.r.expr(failed_job_ids).for_each( - lambda job_id: self.table.get_all(job_id).delete() - ).run(self.conn) - - return jobs - - def __repr__(self): - connection = self.conn - return f"<{self.__class__.__name__} (connection={connection})>" diff --git a/venv/Lib/site-packages/apscheduler/jobstores/sqlalchemy.py b/venv/Lib/site-packages/apscheduler/jobstores/sqlalchemy.py deleted file mode 100644 index 9866acf..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/sqlalchemy.py +++ /dev/null @@ -1,194 +0,0 @@ -import pickle - -from apscheduler.job import Job -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.util import ( - datetime_to_utc_timestamp, - maybe_ref, - utc_timestamp_to_datetime, -) - -try: - from sqlalchemy import ( - Column, - Float, - LargeBinary, - MetaData, - Table, - Unicode, - and_, - create_engine, - select, - ) - from sqlalchemy.exc import IntegrityError - from sqlalchemy.sql.expression import null -except ImportError as exc: # pragma: nocover - raise ImportError("SQLAlchemyJobStore requires SQLAlchemy installed") from exc - - -class SQLAlchemyJobStore(BaseJobStore): - """ - Stores jobs in a database table using SQLAlchemy. - The table will be created if it doesn't exist in the database. - - Plugin alias: ``sqlalchemy`` - - :param str url: connection string (see - :ref:`SQLAlchemy documentation ` on this) - :param engine: an SQLAlchemy :class:`~sqlalchemy.engine.Engine` to use instead of creating a - new one based on ``url`` - :param str tablename: name of the table to store jobs in - :param metadata: a :class:`~sqlalchemy.schema.MetaData` instance to use instead of creating a - new one - :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the - highest available - :param str tableschema: name of the (existing) schema in the target database where the table - should be - :param dict engine_options: keyword arguments to :func:`~sqlalchemy.create_engine` - (ignored if ``engine`` is given) - """ - - def __init__( - self, - url=None, - engine=None, - tablename="apscheduler_jobs", - metadata=None, - pickle_protocol=pickle.HIGHEST_PROTOCOL, - tableschema=None, - engine_options=None, - ): - super().__init__() - self.pickle_protocol = pickle_protocol - metadata = maybe_ref(metadata) or MetaData() - - if engine: - self.engine = maybe_ref(engine) - elif url: - self.engine = create_engine(url, **(engine_options or {})) - else: - raise ValueError('Need either "engine" or "url" defined') - - # 191 = max key length in MySQL for InnoDB/utf8mb4 tables, - # 25 = precision that translates to an 8-byte float - self.jobs_t = Table( - tablename, - metadata, - Column("id", Unicode(191), primary_key=True), - Column("next_run_time", Float(25), index=True), - Column("job_state", LargeBinary, nullable=False), - schema=tableschema, - ) - - def start(self, scheduler, alias): - super().start(scheduler, alias) - self.jobs_t.create(self.engine, True) - - def lookup_job(self, job_id): - selectable = select(self.jobs_t.c.job_state).where(self.jobs_t.c.id == job_id) - with self.engine.begin() as connection: - job_state = connection.execute(selectable).scalar() - return self._reconstitute_job(job_state) if job_state else None - - def get_due_jobs(self, now): - timestamp = datetime_to_utc_timestamp(now) - return self._get_jobs(self.jobs_t.c.next_run_time <= timestamp) - - def get_next_run_time(self): - selectable = ( - select(self.jobs_t.c.next_run_time) - .where(self.jobs_t.c.next_run_time != null()) - .order_by(self.jobs_t.c.next_run_time) - .limit(1) - ) - with self.engine.begin() as connection: - next_run_time = connection.execute(selectable).scalar() - return utc_timestamp_to_datetime(next_run_time) - - def get_all_jobs(self): - jobs = self._get_jobs() - self._fix_paused_jobs_sorting(jobs) - return jobs - - def add_job(self, job): - insert = self.jobs_t.insert().values( - **{ - "id": job.id, - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": pickle.dumps(job.__getstate__(), self.pickle_protocol), - } - ) - with self.engine.begin() as connection: - try: - connection.execute(insert) - except IntegrityError: - raise ConflictingIdError(job.id) - - def update_job(self, job): - update = ( - self.jobs_t.update() - .values( - **{ - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": pickle.dumps(job.__getstate__(), self.pickle_protocol), - } - ) - .where(self.jobs_t.c.id == job.id) - ) - with self.engine.begin() as connection: - result = connection.execute(update) - if result.rowcount == 0: - raise JobLookupError(job.id) - - def remove_job(self, job_id): - delete = self.jobs_t.delete().where(self.jobs_t.c.id == job_id) - with self.engine.begin() as connection: - result = connection.execute(delete) - if result.rowcount == 0: - raise JobLookupError(job_id) - - def remove_all_jobs(self): - delete = self.jobs_t.delete() - with self.engine.begin() as connection: - connection.execute(delete) - - def shutdown(self): - self.engine.dispose() - - def _reconstitute_job(self, job_state): - job_state = pickle.loads(job_state) - job_state["jobstore"] = self - job = Job.__new__(Job) - job.__setstate__(job_state) - job._scheduler = self._scheduler - job._jobstore_alias = self._alias - return job - - def _get_jobs(self, *conditions): - jobs = [] - selectable = select(self.jobs_t.c.id, self.jobs_t.c.job_state).order_by( - self.jobs_t.c.next_run_time - ) - selectable = selectable.where(and_(*conditions)) if conditions else selectable - failed_job_ids = set() - with self.engine.begin() as connection: - for row in connection.execute(selectable): - try: - jobs.append(self._reconstitute_job(row.job_state)) - except BaseException: - self._logger.exception( - 'Unable to restore job "%s" -- removing it', row.id - ) - failed_job_ids.add(row.id) - - # Remove all the jobs we failed to restore - if failed_job_ids: - delete = self.jobs_t.delete().where( - self.jobs_t.c.id.in_(failed_job_ids) - ) - connection.execute(delete) - - return jobs - - def __repr__(self): - return f"<{self.__class__.__name__} (url={self.engine.url})>" diff --git a/venv/Lib/site-packages/apscheduler/jobstores/zookeeper.py b/venv/Lib/site-packages/apscheduler/jobstores/zookeeper.py deleted file mode 100644 index 687fbc2..0000000 --- a/venv/Lib/site-packages/apscheduler/jobstores/zookeeper.py +++ /dev/null @@ -1,197 +0,0 @@ -import pickle -from datetime import datetime, timezone - -from kazoo.exceptions import NodeExistsError, NoNodeError - -from apscheduler.job import Job -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.util import ( - datetime_to_utc_timestamp, - maybe_ref, - utc_timestamp_to_datetime, -) - -try: - from kazoo.client import KazooClient -except ImportError as exc: # pragma: nocover - raise ImportError("ZooKeeperJobStore requires Kazoo installed") from exc - - -class ZooKeeperJobStore(BaseJobStore): - """ - Stores jobs in a ZooKeeper tree. Any leftover keyword arguments are directly passed to - kazoo's `KazooClient - `_. - - Plugin alias: ``zookeeper`` - - :param str path: path to store jobs in - :param client: a :class:`~kazoo.client.KazooClient` instance to use instead of - providing connection arguments - :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the - highest available - """ - - def __init__( - self, - path="/apscheduler", - client=None, - close_connection_on_exit=False, - pickle_protocol=pickle.HIGHEST_PROTOCOL, - **connect_args, - ): - super().__init__() - self.pickle_protocol = pickle_protocol - self.close_connection_on_exit = close_connection_on_exit - - if not path: - raise ValueError('The "path" parameter must not be empty') - - self.path = path - - if client: - self.client = maybe_ref(client) - else: - self.client = KazooClient(**connect_args) - self._ensured_path = False - - def _ensure_paths(self): - if not self._ensured_path: - self.client.ensure_path(self.path) - self._ensured_path = True - - def start(self, scheduler, alias): - super().start(scheduler, alias) - if not self.client.connected: - self.client.start() - - def lookup_job(self, job_id): - self._ensure_paths() - node_path = self.path + "/" + str(job_id) - try: - content, _ = self.client.get(node_path) - doc = pickle.loads(content) - job = self._reconstitute_job(doc["job_state"]) - return job - except BaseException: - return None - - def get_due_jobs(self, now): - timestamp = datetime_to_utc_timestamp(now) - jobs = [ - job_def["job"] - for job_def in self._get_jobs() - if job_def["next_run_time"] is not None - and job_def["next_run_time"] <= timestamp - ] - return jobs - - def get_next_run_time(self): - next_runs = [ - job_def["next_run_time"] - for job_def in self._get_jobs() - if job_def["next_run_time"] is not None - ] - return utc_timestamp_to_datetime(min(next_runs)) if len(next_runs) > 0 else None - - def get_all_jobs(self): - jobs = [job_def["job"] for job_def in self._get_jobs()] - self._fix_paused_jobs_sorting(jobs) - return jobs - - def add_job(self, job): - self._ensure_paths() - node_path = self.path + "/" + str(job.id) - value = { - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": job.__getstate__(), - } - data = pickle.dumps(value, self.pickle_protocol) - try: - self.client.create(node_path, value=data) - except NodeExistsError: - raise ConflictingIdError(job.id) - - def update_job(self, job): - self._ensure_paths() - node_path = self.path + "/" + str(job.id) - changes = { - "next_run_time": datetime_to_utc_timestamp(job.next_run_time), - "job_state": job.__getstate__(), - } - data = pickle.dumps(changes, self.pickle_protocol) - try: - self.client.set(node_path, value=data) - except NoNodeError: - raise JobLookupError(job.id) - - def remove_job(self, job_id): - self._ensure_paths() - node_path = self.path + "/" + str(job_id) - try: - self.client.delete(node_path) - except NoNodeError: - raise JobLookupError(job_id) - - def remove_all_jobs(self): - try: - self.client.delete(self.path, recursive=True) - except NoNodeError: - pass - self._ensured_path = False - - def shutdown(self): - if self.close_connection_on_exit: - self.client.stop() - self.client.close() - - def _reconstitute_job(self, job_state): - job_state = job_state - job = Job.__new__(Job) - job.__setstate__(job_state) - job._scheduler = self._scheduler - job._jobstore_alias = self._alias - return job - - def _get_jobs(self): - self._ensure_paths() - jobs = [] - failed_job_ids = [] - all_ids = self.client.get_children(self.path) - for node_name in all_ids: - try: - node_path = self.path + "/" + node_name - content, _ = self.client.get(node_path) - doc = pickle.loads(content) - job_def = { - "job_id": node_name, - "next_run_time": doc["next_run_time"] - if doc["next_run_time"] - else None, - "job_state": doc["job_state"], - "job": self._reconstitute_job(doc["job_state"]), - "creation_time": _.ctime, - } - jobs.append(job_def) - except BaseException: - self._logger.exception( - 'Unable to restore job "%s" -- removing it', node_name - ) - failed_job_ids.append(node_name) - - # Remove all the jobs we failed to restore - if failed_job_ids: - for failed_id in failed_job_ids: - self.remove_job(failed_id) - paused_sort_key = datetime(9999, 12, 31, tzinfo=timezone.utc) - return sorted( - jobs, - key=lambda job_def: ( - job_def["job"].next_run_time or paused_sort_key, - job_def["creation_time"], - ), - ) - - def __repr__(self): - self._logger.exception("<%s (client=%s)>", self.__class__.__name__, self.client) - return f"<{self.__class__.__name__} (client={self.client})>" diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__init__.py b/venv/Lib/site-packages/apscheduler/schedulers/__init__.py deleted file mode 100644 index c17cc29..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -class SchedulerAlreadyRunningError(Exception): - """Raised when attempting to start or configure the scheduler when it's already running.""" - - def __str__(self): - return "Scheduler is already running" - - -class SchedulerNotRunningError(Exception): - """Raised when attempting to shutdown the scheduler when it's not running.""" - - def __str__(self): - return "Scheduler is not running" diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 0e36379..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/asyncio.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/asyncio.cpython-312.pyc deleted file mode 100644 index ad836c0..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/asyncio.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/background.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/background.cpython-312.pyc deleted file mode 100644 index 323ebe0..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/background.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/base.cpython-312.pyc deleted file mode 100644 index d9febdc..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/blocking.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/blocking.cpython-312.pyc deleted file mode 100644 index 12bb5a8..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/blocking.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/gevent.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/gevent.cpython-312.pyc deleted file mode 100644 index 45588bf..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/gevent.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/qt.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/qt.cpython-312.pyc deleted file mode 100644 index e1ec910..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/qt.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/tornado.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/tornado.cpython-312.pyc deleted file mode 100644 index 1b9dad5..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/tornado.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/twisted.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/twisted.cpython-312.pyc deleted file mode 100644 index d1f54c5..0000000 Binary files a/venv/Lib/site-packages/apscheduler/schedulers/__pycache__/twisted.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/schedulers/asyncio.py b/venv/Lib/site-packages/apscheduler/schedulers/asyncio.py deleted file mode 100644 index 7fb3fa6..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/asyncio.py +++ /dev/null @@ -1,75 +0,0 @@ -import asyncio -from functools import partial, wraps - -from apscheduler.schedulers import SchedulerNotRunningError -from apscheduler.schedulers.base import BaseScheduler -from apscheduler.util import maybe_ref - - -def run_in_event_loop(func): - @wraps(func) - def wrapper(self, *args, **kwargs): - wrapped = partial(func, self, *args, **kwargs) - self._eventloop.call_soon_threadsafe(wrapped) - - return wrapper - - -class AsyncIOScheduler(BaseScheduler): - """ - A scheduler that runs on an asyncio (:pep:`3156`) event loop. - - The default executor can run jobs based on native coroutines (``async def``). - - Extra options: - - ============== ============================================================= - ``event_loop`` AsyncIO event loop to use (defaults to the global event loop) - ============== ============================================================= - """ - - _eventloop = None - _timeout = None - - def start(self, paused=False): - if not self._eventloop or self._eventloop.is_closed(): - self._eventloop = asyncio.get_running_loop() - - super().start(paused) - - @run_in_event_loop - def _shutdown(self, wait=True): - super().shutdown(wait) - self._stop_timer() - self._eventloop = None - - def shutdown(self, wait=True): - if not self.running: - raise SchedulerNotRunningError - - self._shutdown(wait) - - def _configure(self, config): - self._eventloop = maybe_ref(config.pop("event_loop", None)) - super()._configure(config) - - def _start_timer(self, wait_seconds): - self._stop_timer() - if wait_seconds is not None: - self._timeout = self._eventloop.call_later(wait_seconds, self.wakeup) - - def _stop_timer(self): - if self._timeout: - self._timeout.cancel() - del self._timeout - - @run_in_event_loop - def wakeup(self): - self._stop_timer() - wait_seconds = self._process_jobs() - self._start_timer(wait_seconds) - - def _create_default_executor(self): - from apscheduler.executors.asyncio import AsyncIOExecutor - - return AsyncIOExecutor() diff --git a/venv/Lib/site-packages/apscheduler/schedulers/background.py b/venv/Lib/site-packages/apscheduler/schedulers/background.py deleted file mode 100644 index 7d8d1bc..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/background.py +++ /dev/null @@ -1,42 +0,0 @@ -from threading import Event, Thread - -from apscheduler.schedulers.base import BaseScheduler -from apscheduler.schedulers.blocking import BlockingScheduler -from apscheduler.util import asbool - - -class BackgroundScheduler(BlockingScheduler): - """ - A scheduler that runs in the background using a separate thread - (:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will return immediately). - - Extra options: - - ========== ============================================================================= - ``daemon`` Set the ``daemon`` option in the background thread (defaults to ``True``, see - `the documentation - `_ - for further details) - ========== ============================================================================= - """ - - _thread = None - - def _configure(self, config): - self._daemon = asbool(config.pop("daemon", True)) - super()._configure(config) - - def start(self, *args, **kwargs): - if self._event is None or self._event.is_set(): - self._event = Event() - - BaseScheduler.start(self, *args, **kwargs) - self._thread = Thread( - target=self._main_loop, name="APScheduler", daemon=self._daemon - ) - self._thread.start() - - def shutdown(self, *args, **kwargs): - super().shutdown(*args, **kwargs) - self._thread.join() - del self._thread diff --git a/venv/Lib/site-packages/apscheduler/schedulers/base.py b/venv/Lib/site-packages/apscheduler/schedulers/base.py deleted file mode 100644 index 7d713c7..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/base.py +++ /dev/null @@ -1,1264 +0,0 @@ -import sys -import warnings -from abc import ABCMeta, abstractmethod -from collections.abc import Mapping, MutableMapping -from contextlib import ExitStack -from datetime import datetime, timedelta -from importlib.metadata import entry_points -from logging import getLogger -from threading import TIMEOUT_MAX, RLock - -from tzlocal import get_localzone - -from apscheduler.events import ( - EVENT_ALL, - EVENT_ALL_JOBS_REMOVED, - EVENT_EXECUTOR_ADDED, - EVENT_EXECUTOR_REMOVED, - EVENT_JOB_ADDED, - EVENT_JOB_MAX_INSTANCES, - EVENT_JOB_MODIFIED, - EVENT_JOB_REMOVED, - EVENT_JOB_SUBMITTED, - EVENT_JOBSTORE_ADDED, - EVENT_JOBSTORE_REMOVED, - EVENT_SCHEDULER_PAUSED, - EVENT_SCHEDULER_RESUMED, - EVENT_SCHEDULER_SHUTDOWN, - EVENT_SCHEDULER_STARTED, - JobEvent, - JobSubmissionEvent, - SchedulerEvent, -) -from apscheduler.executors.base import BaseExecutor, MaxInstancesReachedError -from apscheduler.executors.pool import ThreadPoolExecutor -from apscheduler.job import Job -from apscheduler.jobstores.base import BaseJobStore, ConflictingIdError, JobLookupError -from apscheduler.jobstores.memory import MemoryJobStore -from apscheduler.schedulers import ( - SchedulerAlreadyRunningError, - SchedulerNotRunningError, -) -from apscheduler.triggers.base import BaseTrigger -from apscheduler.util import ( - asbool, - asint, - astimezone, - maybe_ref, - obj_to_ref, - ref_to_obj, - undefined, -) - -#: constant indicating a scheduler's stopped state -STATE_STOPPED = 0 -#: constant indicating a scheduler's running state (started and processing jobs) -STATE_RUNNING = 1 -#: constant indicating a scheduler's paused state (started but not processing jobs) -STATE_PAUSED = 2 - - -class BaseScheduler(metaclass=ABCMeta): - """ - Abstract base class for all schedulers. - - Takes the following keyword arguments: - - :param str|logging.Logger logger: logger to use for the scheduler's logging (defaults to - apscheduler.scheduler) - :param str|datetime.tzinfo timezone: the default time zone (defaults to the local timezone) - :param int|float jobstore_retry_interval: the minimum number of seconds to wait between - retries in the scheduler's main loop if the job store raises an exception when getting - the list of due jobs - :param dict job_defaults: default values for newly added jobs - :param dict jobstores: a dictionary of job store alias -> job store instance or configuration - dict - :param dict executors: a dictionary of executor alias -> executor instance or configuration - dict - - :ivar int state: current running state of the scheduler (one of the following constants from - ``apscheduler.schedulers.base``: ``STATE_STOPPED``, ``STATE_RUNNING``, ``STATE_PAUSED``) - - .. seealso:: :ref:`scheduler-config` - """ - - # The `group=...` API is only available in the backport, used in <=3.7, and in std>=3.10. - if (3, 8) <= sys.version_info < (3, 10): - _trigger_plugins = { - ep.name: ep for ep in entry_points()["apscheduler.triggers"] - } - _executor_plugins = { - ep.name: ep for ep in entry_points()["apscheduler.executors"] - } - _jobstore_plugins = { - ep.name: ep for ep in entry_points()["apscheduler.jobstores"] - } - else: - _trigger_plugins = { - ep.name: ep for ep in entry_points(group="apscheduler.triggers") - } - _executor_plugins = { - ep.name: ep for ep in entry_points(group="apscheduler.executors") - } - _jobstore_plugins = { - ep.name: ep for ep in entry_points(group="apscheduler.jobstores") - } - - _trigger_classes = {} - _executor_classes = {} - _jobstore_classes = {} - - # - # Public API - # - - def __init__(self, gconfig={}, **options): - super().__init__() - self._executors = {} - self._executors_lock = self._create_lock() - self._jobstores = {} - self._jobstores_lock = self._create_lock() - self._listeners = [] - self._listeners_lock = self._create_lock() - self._pending_jobs = [] - self.state = STATE_STOPPED - self.configure(gconfig, **options) - - def __getstate__(self): - raise TypeError( - "Schedulers cannot be serialized. Ensure that you are not passing a " - "scheduler instance as an argument to a job, or scheduling an instance " - "method where the instance contains a scheduler as an attribute." - ) - - def configure(self, gconfig={}, prefix="apscheduler.", **options): - """ - Reconfigures the scheduler with the given options. - - Can only be done when the scheduler isn't running. - - :param dict gconfig: a "global" configuration dictionary whose values can be overridden by - keyword arguments to this method - :param str|unicode prefix: pick only those keys from ``gconfig`` that are prefixed with - this string (pass an empty string or ``None`` to use all keys) - :raises SchedulerAlreadyRunningError: if the scheduler is already running - - """ - if self.state != STATE_STOPPED: - raise SchedulerAlreadyRunningError - - # If a non-empty prefix was given, strip it from the keys in the - # global configuration dict - if prefix: - prefixlen = len(prefix) - gconfig = dict( - (key[prefixlen:], value) - for key, value in gconfig.items() - if key.startswith(prefix) - ) - - # Create a structure from the dotted options - # (e.g. "a.b.c = d" -> {'a': {'b': {'c': 'd'}}}) - config = {} - for key, value in gconfig.items(): - parts = key.split(".") - parent = config - key = parts.pop(0) - while parts: - parent = parent.setdefault(key, {}) - key = parts.pop(0) - parent[key] = value - - # Override any options with explicit keyword arguments - config.update(options) - self._configure(config) - - def start(self, paused=False): - """ - Start the configured executors and job stores and begin processing scheduled jobs. - - :param bool paused: if ``True``, don't start job processing until :meth:`resume` is called - :raises SchedulerAlreadyRunningError: if the scheduler is already running - :raises RuntimeError: if running under uWSGI with threads disabled - - """ - if self.state != STATE_STOPPED: - raise SchedulerAlreadyRunningError - - self._check_uwsgi() - - with self._executors_lock: - # Create a default executor if nothing else is configured - if "default" not in self._executors: - self.add_executor(self._create_default_executor(), "default") - - # Start all the executors - for alias, executor in self._executors.items(): - executor.start(self, alias) - - with self._jobstores_lock: - # Create a default job store if nothing else is configured - if "default" not in self._jobstores: - self.add_jobstore(self._create_default_jobstore(), "default") - - # Start all the job stores - for alias, store in self._jobstores.items(): - store.start(self, alias) - - # Schedule all pending jobs - for job, jobstore_alias, replace_existing in self._pending_jobs: - self._real_add_job(job, jobstore_alias, replace_existing) - del self._pending_jobs[:] - - self.state = STATE_PAUSED if paused else STATE_RUNNING - self._logger.info("Scheduler started") - self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_STARTED)) - - if not paused: - self.wakeup() - - @abstractmethod - def shutdown(self, wait=True): - """ - Shuts down the scheduler, along with its executors and job stores. - - Does not interrupt any currently running jobs. - - :param bool wait: ``True`` to wait until all currently executing jobs have finished - :raises SchedulerNotRunningError: if the scheduler has not been started yet - - """ - if self.state == STATE_STOPPED: - raise SchedulerNotRunningError - - self.state = STATE_STOPPED - - # Shut down all executors - with self._executors_lock, self._jobstores_lock: - for executor in self._executors.values(): - executor.shutdown(wait) - - # Shut down all job stores - for jobstore in self._jobstores.values(): - jobstore.shutdown() - - self._logger.info("Scheduler has been shut down") - self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_SHUTDOWN)) - - def pause(self): - """ - Pause job processing in the scheduler. - - This will prevent the scheduler from waking up to do job processing until :meth:`resume` - is called. It will not however stop any already running job processing. - - """ - if self.state == STATE_STOPPED: - raise SchedulerNotRunningError - elif self.state == STATE_RUNNING: - self.state = STATE_PAUSED - self._logger.info("Paused scheduler job processing") - self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_PAUSED)) - - def resume(self): - """Resume job processing in the scheduler.""" - if self.state == STATE_STOPPED: - raise SchedulerNotRunningError - elif self.state == STATE_PAUSED: - self.state = STATE_RUNNING - self._logger.info("Resumed scheduler job processing") - self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_RESUMED)) - self.wakeup() - - @property - def running(self): - """ - Return ``True`` if the scheduler has been started. - - This is a shortcut for ``scheduler.state != STATE_STOPPED``. - - """ - return self.state != STATE_STOPPED - - def add_executor(self, executor, alias="default", **executor_opts): - """ - Adds an executor to this scheduler. - - Any extra keyword arguments will be passed to the executor plugin's constructor, assuming - that the first argument is the name of an executor plugin. - - :param str|unicode|apscheduler.executors.base.BaseExecutor executor: either an executor - instance or the name of an executor plugin - :param str|unicode alias: alias for the scheduler - :raises ValueError: if there is already an executor by the given alias - - """ - with self._executors_lock: - if alias in self._executors: - raise ValueError( - f'This scheduler already has an executor by the alias of "{alias}"' - ) - - if isinstance(executor, BaseExecutor): - self._executors[alias] = executor - elif isinstance(executor, str): - self._executors[alias] = executor = self._create_plugin_instance( - "executor", executor, executor_opts - ) - else: - raise TypeError( - f"Expected an executor instance or a string, got {executor.__class__.__name__} instead" - ) - - # Start the executor right away if the scheduler is running - if self.state != STATE_STOPPED: - executor.start(self, alias) - - self._dispatch_event(SchedulerEvent(EVENT_EXECUTOR_ADDED, alias)) - - def remove_executor(self, alias, shutdown=True): - """ - Removes the executor by the given alias from this scheduler. - - :param str|unicode alias: alias of the executor - :param bool shutdown: ``True`` to shut down the executor after - removing it - - """ - with self._executors_lock: - executor = self._lookup_executor(alias) - del self._executors[alias] - - if shutdown: - executor.shutdown() - - self._dispatch_event(SchedulerEvent(EVENT_EXECUTOR_REMOVED, alias)) - - def add_jobstore(self, jobstore, alias="default", **jobstore_opts): - """ - Adds a job store to this scheduler. - - Any extra keyword arguments will be passed to the job store plugin's constructor, assuming - that the first argument is the name of a job store plugin. - - :param str|unicode|apscheduler.jobstores.base.BaseJobStore jobstore: job store to be added - :param str|unicode alias: alias for the job store - :raises ValueError: if there is already a job store by the given alias - - """ - with self._jobstores_lock: - if alias in self._jobstores: - raise ValueError( - f'This scheduler already has a job store by the alias of "{alias}"' - ) - - if isinstance(jobstore, BaseJobStore): - self._jobstores[alias] = jobstore - elif isinstance(jobstore, str): - self._jobstores[alias] = jobstore = self._create_plugin_instance( - "jobstore", jobstore, jobstore_opts - ) - else: - raise TypeError( - f"Expected a job store instance or a string, got {jobstore.__class__.__name__} instead" - ) - - # Start the job store right away if the scheduler isn't stopped - if self.state != STATE_STOPPED: - jobstore.start(self, alias) - - # Notify listeners that a new job store has been added - self._dispatch_event(SchedulerEvent(EVENT_JOBSTORE_ADDED, alias)) - - # Notify the scheduler so it can scan the new job store for jobs - if self.state != STATE_STOPPED: - self.wakeup() - - def remove_jobstore(self, alias, shutdown=True): - """ - Removes the job store by the given alias from this scheduler. - - :param str|unicode alias: alias of the job store - :param bool shutdown: ``True`` to shut down the job store after removing it - - """ - with self._jobstores_lock: - jobstore = self._lookup_jobstore(alias) - del self._jobstores[alias] - - if shutdown: - jobstore.shutdown() - - self._dispatch_event(SchedulerEvent(EVENT_JOBSTORE_REMOVED, alias)) - - def add_listener(self, callback, mask=EVENT_ALL): - """ - add_listener(callback, mask=EVENT_ALL) - - Adds a listener for scheduler events. - - When a matching event occurs, ``callback`` is executed with the event object as its - sole argument. If the ``mask`` parameter is not provided, the callback will receive events - of all types. - - :param callback: any callable that takes one argument - :param int mask: bitmask that indicates which events should be - listened to - - .. seealso:: :mod:`apscheduler.events` - .. seealso:: :ref:`scheduler-events` - - """ - with self._listeners_lock: - self._listeners.append((callback, mask)) - - def remove_listener(self, callback): - """Removes a previously added event listener.""" - - with self._listeners_lock: - for i, (cb, _) in enumerate(self._listeners): - if callback == cb: - del self._listeners[i] - - def add_job( - self, - func, - trigger=None, - args=None, - kwargs=None, - id=None, - name=None, - misfire_grace_time=undefined, - coalesce=undefined, - max_instances=undefined, - next_run_time=undefined, - jobstore="default", - executor="default", - replace_existing=False, - **trigger_args, - ): - """ - add_job(func, trigger=None, args=None, kwargs=None, id=None, \ - name=None, misfire_grace_time=undefined, coalesce=undefined, \ - max_instances=undefined, next_run_time=undefined, \ - jobstore='default', executor='default', \ - replace_existing=False, **trigger_args) - - Adds the given job to the job list and wakes up the scheduler if it's already running. - - Any option that defaults to ``undefined`` will be replaced with the corresponding default - value when the job is scheduled (which happens when the scheduler is started, or - immediately if the scheduler is already running). - - The ``func`` argument can be given either as a callable object or a textual reference in - the ``package.module:some.object`` format, where the first half (separated by ``:``) is an - importable module and the second half is a reference to the callable object, relative to - the module. - - The ``trigger`` argument can either be: - #. the alias name of the trigger (e.g. ``date``, ``interval`` or ``cron``), in which case - any extra keyword arguments to this method are passed on to the trigger's constructor - #. an instance of a trigger class - - :param func: callable (or a textual reference to one) to run at the given time - :param str|apscheduler.triggers.base.BaseTrigger trigger: trigger that determines when - ``func`` is called - :param list|tuple args: list of positional arguments to call func with - :param dict kwargs: dict of keyword arguments to call func with - :param str|unicode id: explicit identifier for the job (for modifying it later) - :param str|unicode name: textual description of the job - :param int misfire_grace_time: seconds after the designated runtime that the job is still - allowed to be run (or ``None`` to allow the job to run no matter how late it is) - :param bool coalesce: run once instead of many times if the scheduler determines that the - job should be run more than once in succession - :param int max_instances: maximum number of concurrently running instances allowed for this - job - :param datetime next_run_time: when to first run the job, regardless of the trigger (pass - ``None`` to add the job as paused) - :param str|unicode jobstore: alias of the job store to store the job in - :param str|unicode executor: alias of the executor to run the job with - :param bool replace_existing: ``True`` to replace an existing job with the same ``id`` - (but retain the number of runs from the existing one) - :rtype: Job - - """ - job_kwargs = { - "trigger": self._create_trigger(trigger, trigger_args), - "executor": executor, - "func": func, - "args": tuple(args) if args is not None else (), - "kwargs": dict(kwargs) if kwargs is not None else {}, - "id": id, - "name": name, - "misfire_grace_time": misfire_grace_time, - "coalesce": coalesce, - "max_instances": max_instances, - "next_run_time": next_run_time, - } - job_kwargs = dict( - (key, value) for key, value in job_kwargs.items() if value is not undefined - ) - job = Job(self, **job_kwargs) - - # Don't really add jobs to job stores before the scheduler is up and running - with self._jobstores_lock: - if self.state == STATE_STOPPED: - self._pending_jobs.append((job, jobstore, replace_existing)) - self._logger.info( - "Adding job tentatively -- it will be properly scheduled when " - "the scheduler starts" - ) - else: - self._real_add_job(job, jobstore, replace_existing) - - return job - - def scheduled_job( - self, - trigger, - args=None, - kwargs=None, - id=None, - name=None, - misfire_grace_time=undefined, - coalesce=undefined, - max_instances=undefined, - next_run_time=undefined, - jobstore="default", - executor="default", - **trigger_args, - ): - """ - scheduled_job(trigger, args=None, kwargs=None, id=None, \ - name=None, misfire_grace_time=undefined, \ - coalesce=undefined, max_instances=undefined, \ - next_run_time=undefined, jobstore='default', \ - executor='default',**trigger_args) - - A decorator version of :meth:`add_job`, except that ``replace_existing`` is always - ``True``. - - .. important:: The ``id`` argument must be given if scheduling a job in a persistent job - store. The scheduler cannot, however, enforce this requirement. - - """ - - def inner(func): - self.add_job( - func, - trigger, - args, - kwargs, - id, - name, - misfire_grace_time, - coalesce, - max_instances, - next_run_time, - jobstore, - executor, - True, - **trigger_args, - ) - return func - - return inner - - def modify_job(self, job_id, jobstore=None, **changes): - """ - Modifies the properties of a single job. - - Modifications are passed to this method as extra keyword arguments. - - :param str|unicode job_id: the identifier of the job - :param str|unicode jobstore: alias of the job store that contains the job - :return Job: the relevant job instance - - """ - with self._jobstores_lock: - job, jobstore = self._lookup_job(job_id, jobstore) - job._modify(**changes) - if jobstore: - self._lookup_jobstore(jobstore).update_job(job) - - self._dispatch_event(JobEvent(EVENT_JOB_MODIFIED, job_id, jobstore)) - - # Wake up the scheduler since the job's next run time may have been changed - if self.state == STATE_RUNNING: - self.wakeup() - - return job - - def reschedule_job(self, job_id, jobstore=None, trigger=None, **trigger_args): - """ - Constructs a new trigger for a job and updates its next run time. - - Extra keyword arguments are passed directly to the trigger's constructor. - - :param str|unicode job_id: the identifier of the job - :param str|unicode jobstore: alias of the job store that contains the job - :param trigger: alias of the trigger type or a trigger instance - :return Job: the relevant job instance - - """ - trigger = self._create_trigger(trigger, trigger_args) - now = datetime.now(self.timezone) - next_run_time = trigger.get_next_fire_time(None, now) - return self.modify_job( - job_id, jobstore, trigger=trigger, next_run_time=next_run_time - ) - - def pause_job(self, job_id, jobstore=None): - """ - Causes the given job not to be executed until it is explicitly resumed. - - :param str|unicode job_id: the identifier of the job - :param str|unicode jobstore: alias of the job store that contains the job - :return Job: the relevant job instance - - """ - return self.modify_job(job_id, jobstore, next_run_time=None) - - def resume_job(self, job_id, jobstore=None): - """ - Resumes the schedule of the given job, or removes the job if its schedule is finished. - - :param str|unicode job_id: the identifier of the job - :param str|unicode jobstore: alias of the job store that contains the job - :return Job|None: the relevant job instance if the job was rescheduled, or ``None`` if no - next run time could be calculated and the job was removed - - """ - with self._jobstores_lock: - job, jobstore = self._lookup_job(job_id, jobstore) - now = datetime.now(self.timezone) - next_run_time = job.trigger.get_next_fire_time(None, now) - if next_run_time: - return self.modify_job(job_id, jobstore, next_run_time=next_run_time) - else: - self.remove_job(job.id, jobstore) - - def get_jobs(self, jobstore=None, pending=None): - """ - Returns a list of pending jobs (if the scheduler hasn't been started yet) and scheduled - jobs, either from a specific job store or from all of them. - - If the scheduler has not been started yet, only pending jobs can be returned because the - job stores haven't been started yet either. - - :param str|unicode jobstore: alias of the job store - :param bool pending: **DEPRECATED** - :rtype: list[Job] - - """ - if pending is not None: - warnings.warn( - 'The "pending" option is deprecated -- get_jobs() always returns ' - "scheduled jobs if the scheduler has been started and pending jobs " - "otherwise", - DeprecationWarning, - ) - - with self._jobstores_lock: - jobs = [] - if self.state == STATE_STOPPED: - for job, alias, replace_existing in self._pending_jobs: - if jobstore is None or alias == jobstore: - jobs.append(job) - else: - for alias, store in self._jobstores.items(): - if jobstore is None or alias == jobstore: - jobs.extend(store.get_all_jobs()) - - return jobs - - def get_job(self, job_id, jobstore=None): - """ - Returns the Job that matches the given ``job_id``. - - :param str|unicode job_id: the identifier of the job - :param str|unicode jobstore: alias of the job store that most likely contains the job - :return: the Job by the given ID, or ``None`` if it wasn't found - :rtype: Job - - """ - with self._jobstores_lock: - try: - return self._lookup_job(job_id, jobstore)[0] - except JobLookupError: - return - - def remove_job(self, job_id, jobstore=None): - """ - Removes a job, preventing it from being run any more. - - :param str|unicode job_id: the identifier of the job - :param str|unicode jobstore: alias of the job store that contains the job - :raises JobLookupError: if the job was not found - - """ - jobstore_alias = None - with self._jobstores_lock: - # Check if the job is among the pending jobs - if self.state == STATE_STOPPED: - for i, (job, alias, replace_existing) in enumerate(self._pending_jobs): - if job.id == job_id and jobstore in (None, alias): - del self._pending_jobs[i] - jobstore_alias = alias - break - else: - # Otherwise, try to remove it from each store until it succeeds or we run out of - # stores to check - for alias, store in self._jobstores.items(): - if jobstore in (None, alias): - try: - store.remove_job(job_id) - jobstore_alias = alias - break - except JobLookupError: - continue - - if jobstore_alias is None: - raise JobLookupError(job_id) - - # Notify listeners that a job has been removed - event = JobEvent(EVENT_JOB_REMOVED, job_id, jobstore_alias) - self._dispatch_event(event) - - self._logger.info("Removed job %s", job_id) - - def remove_all_jobs(self, jobstore=None): - """ - Removes all jobs from the specified job store, or all job stores if none is given. - - :param str|unicode jobstore: alias of the job store - - """ - with self._jobstores_lock: - if self.state == STATE_STOPPED: - if jobstore: - self._pending_jobs = [ - pending - for pending in self._pending_jobs - if pending[1] != jobstore - ] - else: - self._pending_jobs = [] - else: - for alias, store in self._jobstores.items(): - if jobstore in (None, alias): - store.remove_all_jobs() - - self._dispatch_event(SchedulerEvent(EVENT_ALL_JOBS_REMOVED, jobstore)) - - def print_jobs(self, jobstore=None, out=None): - """ - print_jobs(jobstore=None, out=sys.stdout) - - Prints out a textual listing of all jobs currently scheduled on either all job stores or - just a specific one. - - :param str|unicode jobstore: alias of the job store, ``None`` to list jobs from all stores - :param file out: a file-like object to print to (defaults to **sys.stdout** if nothing is - given) - - """ - out = out or sys.stdout - with self._jobstores_lock: - if self.state == STATE_STOPPED: - print("Pending jobs:", file=out) - if self._pending_jobs: - for job, jobstore_alias, replace_existing in self._pending_jobs: - if jobstore in (None, jobstore_alias): - print(f" {job}", file=out) - else: - print(" No pending jobs", file=out) - else: - for alias, store in sorted(self._jobstores.items()): - if jobstore in (None, alias): - print(f"Jobstore {alias}:", file=out) - jobs = store.get_all_jobs() - if jobs: - for job in jobs: - print(f" {job}", file=out) - else: - print(" No scheduled jobs", file=out) - - def export_jobs(self, outfile, jobstore=None): - """ - Export stored jobs as JSON. - - :param outfile: either a file object opened in text write mode ("w"), or a path - to the target file - :param jobstore: alias of the job store to export jobs from (if omitted, export - from all configured job stores) - - """ - import json - import pickle - from base64 import b64encode - - from apscheduler import version - - if self.state == STATE_STOPPED: - raise RuntimeError( - "the scheduler must have been started for job export to work" - ) - - def encode_with_pickle(obj): - return b64encode(pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)).decode("ascii") - - def json_default(obj): - if hasattr(obj, "__getstate__") and hasattr(obj, "__setstate__"): - state = obj.__getstate__() - if isinstance(state, Mapping): - return { - "__apscheduler_class__": obj_to_ref(obj.__class__), - "__apscheduler_state__": state, - } - - return {"__apscheduler_pickle__": encode_with_pickle(obj)} - - with self._jobstores_lock: - all_jobs = [ - job - for alias, store in self._jobstores.items() - for job in store.get_all_jobs() - if jobstore in (None, alias) - ] - - with ExitStack() as stack: - if not hasattr(outfile, "write"): - outfile = stack.enter_context(open(outfile, "w")) - - json.dump( - { - "version": 1, - "scheduler_version": version, - "jobs": [job.__getstate__() for job in all_jobs], - }, - outfile, - default=json_default, - ) - - def import_jobs(self, infile, jobstore="default"): - """ - Import jobs previously exported via :meth:`export_jobs. - - :param infile: either a file object opened in text read mode ("r") or a path to - a JSON file containing previously exported jobs - :param jobstore: the alias of the job store to import the jobs to - - """ - import json - import pickle - from base64 import b64decode - - def json_object_hook(dct): - if pickle_data := dct.get("__apscheduler_pickle__"): - return pickle.loads(b64decode(pickle_data)) - - if obj_class := dct.get("__apscheduler_class__"): - if obj_state := dct.get("__apscheduler_state__"): - obj_class = ref_to_obj(obj_class) - obj = object.__new__(obj_class) - obj.__setstate__(obj_state) - return obj - - return dct - - jobstore = self._jobstores[jobstore] - with ExitStack() as stack: - if not hasattr(infile, "read"): - infile = stack.enter_context(open(infile)) - - data = json.load(infile, object_hook=json_object_hook) - if not isinstance(data, dict): - raise ValueError() - - if (version := data.get("version", None)) != 1: - raise ValueError(f"unrecognized version: {version}") - - for job_state in data["jobs"]: - job = object.__new__(Job) - job.__setstate__(job_state) - jobstore.add_job(job) - - @abstractmethod - def wakeup(self): - """ - Notifies the scheduler that there may be jobs due for execution. - Triggers :meth:`_process_jobs` to be run in an implementation specific manner. - """ - - # - # Private API - # - - def _configure(self, config): - # Set general options - self._logger = maybe_ref(config.pop("logger", None)) or getLogger( - "apscheduler.scheduler" - ) - self.timezone = astimezone(config.pop("timezone", None)) or get_localzone() - self.jobstore_retry_interval = float(config.pop("jobstore_retry_interval", 10)) - - # Set the job defaults - job_defaults = config.get("job_defaults", {}) - self._job_defaults = { - "misfire_grace_time": asint(job_defaults.get("misfire_grace_time", 1)), - "coalesce": asbool(job_defaults.get("coalesce", True)), - "max_instances": asint(job_defaults.get("max_instances", 1)), - } - - # Configure executors - self._executors.clear() - for alias, value in config.get("executors", {}).items(): - if isinstance(value, BaseExecutor): - self.add_executor(value, alias) - elif isinstance(value, MutableMapping): - executor_class = value.pop("class", None) - plugin = value.pop("type", None) - if plugin: - executor = self._create_plugin_instance("executor", plugin, value) - elif executor_class: - cls = maybe_ref(executor_class) - executor = cls(**value) - else: - raise ValueError( - f'Cannot create executor "{alias}" -- either "type" or "class" must be defined' - ) - - self.add_executor(executor, alias) - else: - raise TypeError( - f"Expected executor instance or dict for executors['{alias}'], got {value.__class__.__name__} instead" - ) - - # Configure job stores - self._jobstores.clear() - for alias, value in config.get("jobstores", {}).items(): - if isinstance(value, BaseJobStore): - self.add_jobstore(value, alias) - elif isinstance(value, MutableMapping): - jobstore_class = value.pop("class", None) - plugin = value.pop("type", None) - if plugin: - jobstore = self._create_plugin_instance("jobstore", plugin, value) - elif jobstore_class: - cls = maybe_ref(jobstore_class) - jobstore = cls(**value) - else: - raise ValueError( - f'Cannot create job store "{alias}" -- either "type" or "class" must be ' - "defined" - ) - - self.add_jobstore(jobstore, alias) - else: - raise TypeError( - f"Expected job store instance or dict for jobstores['{alias}'], got {value.__class__.__name__} instead" - ) - - def _create_default_executor(self): - """Creates a default executor store, specific to the particular scheduler type.""" - return ThreadPoolExecutor() - - def _create_default_jobstore(self): - """Creates a default job store, specific to the particular scheduler type.""" - return MemoryJobStore() - - def _lookup_executor(self, alias): - """ - Returns the executor instance by the given name from the list of executors that were added - to this scheduler. - - :type alias: str - :raises KeyError: if no executor by the given alias is not found - - """ - try: - return self._executors[alias] - except KeyError: - raise KeyError(f"No such executor: {alias}") - - def _lookup_jobstore(self, alias): - """ - Returns the job store instance by the given name from the list of job stores that were - added to this scheduler. - - :type alias: str - :raises KeyError: if no job store by the given alias is not found - - """ - try: - return self._jobstores[alias] - except KeyError: - raise KeyError(f"No such job store: {alias}") - - def _lookup_job(self, job_id, jobstore_alias): - """ - Finds a job by its ID. - - :type job_id: str - :param str jobstore_alias: alias of a job store to look in - :return tuple[Job, str]: a tuple of job, jobstore alias (jobstore alias is None in case of - a pending job) - :raises JobLookupError: if no job by the given ID is found. - - """ - if self.state == STATE_STOPPED: - # Check if the job is among the pending jobs - for job, alias, replace_existing in self._pending_jobs: - if job.id == job_id: - return job, None - else: - # Look in all job stores - for alias, store in self._jobstores.items(): - if jobstore_alias in (None, alias): - job = store.lookup_job(job_id) - if job is not None: - return job, alias - - raise JobLookupError(job_id) - - def _dispatch_event(self, event): - """ - Dispatches the given event to interested listeners. - - :param SchedulerEvent event: the event to send - - """ - with self._listeners_lock: - listeners = tuple(self._listeners) - - for cb, mask in listeners: - if event.code & mask: - try: - cb(event) - except BaseException: - self._logger.exception("Error notifying listener") - - def _check_uwsgi(self): - """Check if we're running under uWSGI with threads disabled.""" - uwsgi_module = sys.modules.get("uwsgi") - if not getattr(uwsgi_module, "has_threads", True): - raise RuntimeError( - "The scheduler seems to be running under uWSGI, but threads have " - "been disabled. You must run uWSGI with the --enable-threads " - "option for the scheduler to work." - ) - - def _real_add_job(self, job, jobstore_alias, replace_existing): - """ - :param Job job: the job to add - :param bool replace_existing: ``True`` to use update_job() in case the job already exists - in the store - - """ - # Fill in undefined values with defaults - replacements = {} - for key, value in self._job_defaults.items(): - if not hasattr(job, key): - replacements[key] = value - - # Calculate the next run time if there is none defined - if not hasattr(job, "next_run_time"): - now = datetime.now(self.timezone) - replacements["next_run_time"] = job.trigger.get_next_fire_time(None, now) - - # Apply any replacements - job._modify(**replacements) - - # Add the job to the given job store - store = self._lookup_jobstore(jobstore_alias) - try: - store.add_job(job) - except ConflictingIdError: - if replace_existing: - store.update_job(job) - else: - raise - - # Mark the job as no longer pending - job._jobstore_alias = jobstore_alias - - # Notify listeners that a new job has been added - event = JobEvent(EVENT_JOB_ADDED, job.id, jobstore_alias) - self._dispatch_event(event) - - self._logger.info('Added job "%s" to job store "%s"', job.name, jobstore_alias) - - # Notify the scheduler about the new job - if self.state == STATE_RUNNING: - self.wakeup() - - def _create_plugin_instance(self, type_, alias, constructor_kwargs): - """Creates an instance of the given plugin type, loading the plugin first if necessary.""" - plugin_container, class_container, base_class = { - "trigger": (self._trigger_plugins, self._trigger_classes, BaseTrigger), - "jobstore": (self._jobstore_plugins, self._jobstore_classes, BaseJobStore), - "executor": (self._executor_plugins, self._executor_classes, BaseExecutor), - }[type_] - - try: - plugin_cls = class_container[alias] - except KeyError: - if alias in plugin_container: - plugin_cls = class_container[alias] = plugin_container[alias].load() - if not issubclass(plugin_cls, base_class): - raise TypeError( - f"The {type_} entry point does not point to a {type_} class" - ) - else: - raise LookupError(f'No {type_} by the name "{alias}" was found') - - return plugin_cls(**constructor_kwargs) - - def _create_trigger(self, trigger, trigger_args): - if isinstance(trigger, BaseTrigger): - return trigger - elif trigger is None: - trigger = "date" - elif not isinstance(trigger, str): - raise TypeError( - f"Expected a trigger instance or string, got {trigger.__class__.__name__} instead" - ) - - # Use the scheduler's time zone if nothing else is specified - trigger_args.setdefault("timezone", self.timezone) - - # Instantiate the trigger class - return self._create_plugin_instance("trigger", trigger, trigger_args) - - def _create_lock(self): - """Creates a reentrant lock object.""" - return RLock() - - def _process_jobs(self): - """ - Iterates through jobs in every jobstore, starts jobs that are due and figures out how long - to wait for the next round. - - If the ``get_due_jobs()`` call raises an exception, a new wakeup is scheduled in at least - ``jobstore_retry_interval`` seconds. - - """ - if self.state == STATE_PAUSED: - self._logger.debug("Scheduler is paused -- not processing jobs") - return None - - self._logger.debug("Looking for jobs to run") - now = datetime.now(self.timezone) - next_wakeup_time = None - events = [] - - with self._jobstores_lock: - for jobstore_alias, jobstore in self._jobstores.items(): - try: - due_jobs = jobstore.get_due_jobs(now) - except Exception as e: - # Schedule a wakeup at least in jobstore_retry_interval seconds - self._logger.warning( - "Error getting due jobs from job store %r: %s", - jobstore_alias, - e, - ) - retry_wakeup_time = now + timedelta( - seconds=self.jobstore_retry_interval - ) - if not next_wakeup_time or next_wakeup_time > retry_wakeup_time: - next_wakeup_time = retry_wakeup_time - - continue - - for job in due_jobs: - # Look up the job's executor - try: - executor = self._lookup_executor(job.executor) - except BaseException: - self._logger.error( - 'Executor lookup ("%s") failed for job "%s" -- removing it from the ' - "job store", - job.executor, - job, - ) - self.remove_job(job.id, jobstore_alias) - continue - - run_times = job._get_run_times(now) - run_times = ( - run_times[-1:] if run_times and job.coalesce else run_times - ) - if run_times: - try: - executor.submit_job(job, run_times) - except MaxInstancesReachedError: - self._logger.warning( - 'Execution of job "%s" skipped: maximum number of running ' - "instances reached (%d)", - job, - job.max_instances, - ) - event = JobSubmissionEvent( - EVENT_JOB_MAX_INSTANCES, - job.id, - jobstore_alias, - run_times, - ) - events.append(event) - except BaseException: - self._logger.exception( - 'Error submitting job "%s" to executor "%s"', - job, - job.executor, - ) - else: - event = JobSubmissionEvent( - EVENT_JOB_SUBMITTED, job.id, jobstore_alias, run_times - ) - events.append(event) - - # Update the job if it has a next execution time. - # Otherwise remove it from the job store. - job_next_run = job.trigger.get_next_fire_time( - run_times[-1], now - ) - if job_next_run: - job._modify(next_run_time=job_next_run) - jobstore.update_job(job) - else: - self.remove_job(job.id, jobstore_alias) - - # Set a new next wakeup time if there isn't one yet or - # the jobstore has an even earlier one - jobstore_next_run_time = jobstore.get_next_run_time() - if jobstore_next_run_time and ( - next_wakeup_time is None - or jobstore_next_run_time < next_wakeup_time - ): - next_wakeup_time = jobstore_next_run_time.astimezone(self.timezone) - - # Dispatch collected events - for event in events: - self._dispatch_event(event) - - # Determine the delay until this method should be called again - if self.state == STATE_PAUSED: - wait_seconds = None - self._logger.debug("Scheduler is paused; waiting until resume() is called") - elif next_wakeup_time is None: - wait_seconds = None - self._logger.debug("No jobs; waiting until a job is added") - else: - now = datetime.now(self.timezone) - wait_seconds = min( - max((next_wakeup_time - now).total_seconds(), 0), TIMEOUT_MAX - ) - self._logger.debug( - "Next wakeup is due at %s (in %f seconds)", - next_wakeup_time, - wait_seconds, - ) - - return wait_seconds diff --git a/venv/Lib/site-packages/apscheduler/schedulers/blocking.py b/venv/Lib/site-packages/apscheduler/schedulers/blocking.py deleted file mode 100644 index 0330885..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/blocking.py +++ /dev/null @@ -1,33 +0,0 @@ -from threading import TIMEOUT_MAX, Event - -from apscheduler.schedulers.base import STATE_STOPPED, BaseScheduler - - -class BlockingScheduler(BaseScheduler): - """ - A scheduler that runs in the foreground - (:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will block). - """ - - _event = None - - def start(self, *args, **kwargs): - if self._event is None or self._event.is_set(): - self._event = Event() - - super().start(*args, **kwargs) - self._main_loop() - - def shutdown(self, wait=True): - super().shutdown(wait) - self._event.set() - - def _main_loop(self): - wait_seconds = TIMEOUT_MAX - while self.state != STATE_STOPPED: - self._event.wait(wait_seconds) - self._event.clear() - wait_seconds = self._process_jobs() - - def wakeup(self): - self._event.set() diff --git a/venv/Lib/site-packages/apscheduler/schedulers/gevent.py b/venv/Lib/site-packages/apscheduler/schedulers/gevent.py deleted file mode 100644 index e32ad64..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/gevent.py +++ /dev/null @@ -1,34 +0,0 @@ -from apscheduler.schedulers.base import BaseScheduler -from apscheduler.schedulers.blocking import BlockingScheduler - -try: - import gevent - from gevent.event import Event - from gevent.lock import RLock -except ImportError as exc: # pragma: nocover - raise ImportError("GeventScheduler requires gevent installed") from exc - - -class GeventScheduler(BlockingScheduler): - """A scheduler that runs as a Gevent greenlet.""" - - _greenlet = None - - def start(self, *args, **kwargs): - self._event = Event() - BaseScheduler.start(self, *args, **kwargs) - self._greenlet = gevent.spawn(self._main_loop) - return self._greenlet - - def shutdown(self, *args, **kwargs): - super().shutdown(*args, **kwargs) - self._greenlet.join() - del self._greenlet - - def _create_lock(self): - return RLock() - - def _create_default_executor(self): - from apscheduler.executors.gevent import GeventExecutor - - return GeventExecutor() diff --git a/venv/Lib/site-packages/apscheduler/schedulers/qt.py b/venv/Lib/site-packages/apscheduler/schedulers/qt.py deleted file mode 100644 index 457b539..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/qt.py +++ /dev/null @@ -1,44 +0,0 @@ -from importlib import import_module -from itertools import product - -from apscheduler.schedulers.base import BaseScheduler - -for version, pkgname in product(range(6, 1, -1), ("PySide", "PyQt")): - try: - qtcore = import_module(pkgname + str(version) + ".QtCore") - except ImportError: - pass - else: - QTimer = qtcore.QTimer - break -else: - raise ImportError("QtScheduler requires either PySide/PyQt (v6 to v2) installed") - - -class QtScheduler(BaseScheduler): - """A scheduler that runs in a Qt event loop.""" - - _timer = None - - def shutdown(self, *args, **kwargs): - super().shutdown(*args, **kwargs) - self._stop_timer() - - def _start_timer(self, wait_seconds): - self._stop_timer() - if wait_seconds is not None: - wait_time = min(int(wait_seconds * 1000), 2147483647) - self._timer = QTimer.singleShot(wait_time, self._process_jobs) - - def _stop_timer(self): - if self._timer: - if self._timer.isActive(): - self._timer.stop() - del self._timer - - def wakeup(self): - self._start_timer(0) - - def _process_jobs(self): - wait_seconds = super()._process_jobs() - self._start_timer(wait_seconds) diff --git a/venv/Lib/site-packages/apscheduler/schedulers/tornado.py b/venv/Lib/site-packages/apscheduler/schedulers/tornado.py deleted file mode 100644 index a25764d..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/tornado.py +++ /dev/null @@ -1,75 +0,0 @@ -from datetime import timedelta -from functools import wraps - -from apscheduler.schedulers import SchedulerNotRunningError -from apscheduler.schedulers.base import BaseScheduler -from apscheduler.util import maybe_ref - -try: - from tornado.ioloop import IOLoop -except ImportError as exc: # pragma: nocover - raise ImportError("TornadoScheduler requires tornado installed") from exc - - -def run_in_ioloop(func): - @wraps(func) - def wrapper(self, *args, **kwargs): - if self._ioloop is None: - raise SchedulerNotRunningError - - self._ioloop.add_callback(func, self, *args, **kwargs) - - return wrapper - - -class TornadoScheduler(BaseScheduler): - """ - A scheduler that runs on a Tornado IOLoop. - - The default executor can run jobs based on native coroutines (``async def``). - - =========== =============================================================== - ``io_loop`` Tornado IOLoop instance to use (defaults to the global IO loop) - =========== =============================================================== - """ - - _ioloop = None - _timeout = None - - @run_in_ioloop - def _shutdown(self, wait=True): - super().shutdown(wait) - self._stop_timer() - - def shutdown(self, wait=True): - if not self.running: - raise SchedulerNotRunningError - - self._shutdown(wait) - - def _configure(self, config): - self._ioloop = maybe_ref(config.pop("io_loop", None)) or IOLoop.current() - super()._configure(config) - - def _start_timer(self, wait_seconds): - self._stop_timer() - if wait_seconds is not None: - self._timeout = self._ioloop.add_timeout( - timedelta(seconds=wait_seconds), self.wakeup - ) - - def _stop_timer(self): - if self._timeout: - self._ioloop.remove_timeout(self._timeout) - del self._timeout - - def _create_default_executor(self): - from apscheduler.executors.tornado import TornadoExecutor - - return TornadoExecutor() - - @run_in_ioloop - def wakeup(self): - self._stop_timer() - wait_seconds = self._process_jobs() - self._start_timer(wait_seconds) diff --git a/venv/Lib/site-packages/apscheduler/schedulers/twisted.py b/venv/Lib/site-packages/apscheduler/schedulers/twisted.py deleted file mode 100644 index 8a91831..0000000 --- a/venv/Lib/site-packages/apscheduler/schedulers/twisted.py +++ /dev/null @@ -1,69 +0,0 @@ -from functools import wraps - -from apscheduler.schedulers import SchedulerNotRunningError -from apscheduler.schedulers.base import BaseScheduler -from apscheduler.util import maybe_ref - -try: - from twisted.internet import reactor as default_reactor -except ImportError as exc: # pragma: nocover - raise ImportError("TwistedScheduler requires Twisted installed") from exc - - -def run_in_reactor(func): - @wraps(func) - def wrapper(self, *args, **kwargs): - self._reactor.callFromThread(func, self, *args, **kwargs) - - return wrapper - - -class TwistedScheduler(BaseScheduler): - """ - A scheduler that runs on a Twisted reactor. - - Extra options: - - =========== ======================================================== - ``reactor`` Reactor instance to use (defaults to the global reactor) - =========== ======================================================== - """ - - _reactor = None - _delayedcall = None - - def _configure(self, config): - self._reactor = maybe_ref(config.pop("reactor", default_reactor)) - super()._configure(config) - - @run_in_reactor - def _shutdown(self, wait=True): - super().shutdown(wait) - self._stop_timer() - - def shutdown(self, wait=True): - if not self.running: - raise SchedulerNotRunningError - - self._shutdown(wait) - - def _start_timer(self, wait_seconds): - self._stop_timer() - if wait_seconds is not None: - self._delayedcall = self._reactor.callLater(wait_seconds, self.wakeup) - - def _stop_timer(self): - if self._delayedcall and self._delayedcall.active(): - self._delayedcall.cancel() - del self._delayedcall - - @run_in_reactor - def wakeup(self): - self._stop_timer() - wait_seconds = self._process_jobs() - self._start_timer(wait_seconds) - - def _create_default_executor(self): - from apscheduler.executors.twisted import TwistedExecutor - - return TwistedExecutor() diff --git a/venv/Lib/site-packages/apscheduler/triggers/__init__.py b/venv/Lib/site-packages/apscheduler/triggers/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index ec65b87..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/__pycache__/base.cpython-312.pyc deleted file mode 100644 index e0ea74c..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/calendarinterval.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/__pycache__/calendarinterval.cpython-312.pyc deleted file mode 100644 index 0025fd1..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/calendarinterval.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/combining.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/__pycache__/combining.cpython-312.pyc deleted file mode 100644 index d7db2a2..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/combining.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/date.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/__pycache__/date.cpython-312.pyc deleted file mode 100644 index b6aabb1..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/date.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/interval.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/__pycache__/interval.cpython-312.pyc deleted file mode 100644 index d439e4f..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/__pycache__/interval.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/base.py b/venv/Lib/site-packages/apscheduler/triggers/base.py deleted file mode 100644 index 917af8c..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/base.py +++ /dev/null @@ -1,35 +0,0 @@ -import random -from abc import ABCMeta, abstractmethod -from datetime import timedelta - - -class BaseTrigger(metaclass=ABCMeta): - """Abstract base class that defines the interface that every trigger must implement.""" - - __slots__ = () - - @abstractmethod - def get_next_fire_time(self, previous_fire_time, now): - """ - Returns the next datetime to fire on, If no such datetime can be calculated, returns - ``None``. - - :param datetime.datetime previous_fire_time: the previous time the trigger was fired - :param datetime.datetime now: current datetime - """ - - def _apply_jitter(self, next_fire_time, jitter, now): - """ - Randomize ``next_fire_time`` by adding a random value (the jitter). - - :param datetime.datetime|None next_fire_time: next fire time without jitter applied. If - ``None``, returns ``None``. - :param int|None jitter: maximum number of seconds to add to ``next_fire_time`` - (if ``None`` or ``0``, returns ``next_fire_time``) - :param datetime.datetime now: current datetime - :return datetime.datetime|None: next fire time with a jitter. - """ - if next_fire_time is None or not jitter: - return next_fire_time - - return next_fire_time + timedelta(seconds=random.uniform(0, jitter)) diff --git a/venv/Lib/site-packages/apscheduler/triggers/calendarinterval.py b/venv/Lib/site-packages/apscheduler/triggers/calendarinterval.py deleted file mode 100644 index 114e9ea..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/calendarinterval.py +++ /dev/null @@ -1,186 +0,0 @@ -from __future__ import annotations - -from datetime import date, datetime, time, timedelta, tzinfo -from typing import Any - -from tzlocal import get_localzone - -from apscheduler.triggers.base import BaseTrigger -from apscheduler.util import ( - asdate, - astimezone, - timezone_repr, -) - - -class CalendarIntervalTrigger(BaseTrigger): - """ - Runs the task on specified calendar-based intervals always at the same exact time of - day. - - When calculating the next date, the ``years`` and ``months`` parameters are first - added to the previous date while keeping the day of the month constant. This is - repeated until the resulting date is valid. After that, the ``weeks`` and ``days`` - parameters are added to that date. Finally, the date is combined with the given time - (hour, minute, second) to form the final datetime. - - This means that if the ``days`` or ``weeks`` parameters are not used, the task will - always be executed on the same day of the month at the same wall clock time, - assuming the date and time are valid. - - If the resulting datetime is invalid due to a daylight saving forward shift, the - date is discarded and the process moves on to the next date. If instead the datetime - is ambiguous due to a backward DST shift, the earlier of the two resulting datetimes - is used. - - If no previous run time is specified when requesting a new run time (like when - starting for the first time or resuming after being paused), ``start_date`` is used - as a reference and the next valid datetime equal to or later than the current time - will be returned. Otherwise, the next valid datetime starting from the previous run - time is returned, even if it's in the past. - - .. warning:: Be wary of setting a start date near the end of the month (29. – 31.) - if you have ``months`` specified in your interval, as this will skip the months - when those days do not exist. Likewise, setting the start date on the leap day - (February 29th) and having ``years`` defined may cause some years to be skipped. - - Users are also discouraged from using a time inside the target timezone's DST - switching period (typically around 2 am) since a date could either be skipped or - repeated due to the specified wall clock time either occurring twice or not at - all. - - :param years: number of years to wait - :param months: number of months to wait - :param weeks: number of weeks to wait - :param days: number of days to wait - :param hour: hour to run the task at - :param minute: minute to run the task at - :param second: second to run the task at - :param start_date: first date to trigger on (defaults to current date if omitted) - :param end_date: latest possible date to trigger on - :param timezone: time zone to use for calculating the next fire time (defaults - to scheduler timezone if created via the scheduler, otherwise the local time - zone) - :param jitter: delay the job execution by ``jitter`` seconds at most - """ - - __slots__ = ( - "_time", - "days", - "end_date", - "jitter", - "months", - "start_date", - "timezone", - "weeks", - "years", - ) - - def __init__( - self, - *, - years: int = 0, - months: int = 0, - weeks: int = 0, - days: int = 0, - hour: int = 0, - minute: int = 0, - second: int = 0, - start_date: date | str | None = None, - end_date: date | str | None = None, - timezone: str | tzinfo | None = None, - jitter: int | None = None, - ): - if timezone: - self.timezone = astimezone(timezone) - else: - self.timezone = astimezone(get_localzone()) - - self.years = years - self.months = months - self.weeks = weeks - self.days = days - self.start_date = asdate(start_date) or date.today() - self.end_date = asdate(end_date) - self.jitter = jitter - self._time = time(hour, minute, second, tzinfo=self.timezone) - - if self.years == self.months == self.weeks == self.days == 0: - raise ValueError("interval must be at least 1 day long") - - if self.end_date and self.start_date > self.end_date: - raise ValueError("end_date cannot be earlier than start_date") - - def get_next_fire_time( - self, previous_fire_time: datetime | None, now: datetime - ) -> datetime | None: - while True: - if previous_fire_time: - year, month = previous_fire_time.year, previous_fire_time.month - while True: - month += self.months - year += self.years + (month - 1) // 12 - month = (month - 1) % 12 + 1 - try: - next_date = date(year, month, previous_fire_time.day) - except ValueError: - pass # Nonexistent date - else: - next_date += timedelta(self.days + self.weeks * 7) - break - else: - next_date = self.start_date - - # Don't return any date past end_date - if self.end_date and next_date > self.end_date: - return None - - # Combine the date with the designated time and normalize the result - timestamp = datetime.combine(next_date, self._time).timestamp() - next_time = datetime.fromtimestamp(timestamp, self.timezone) - - # Check if the time is off due to normalization and a forward DST shift - if next_time.timetz() != self._time: - previous_fire_time = next_time.date() - else: - return self._apply_jitter(next_time, self.jitter, now) - - def __getstate__(self) -> dict[str, Any]: - return { - "version": 1, - "interval": [self.years, self.months, self.weeks, self.days], - "time": [self._time.hour, self._time.minute, self._time.second], - "start_date": self.start_date, - "end_date": self.end_date, - "timezone": self.timezone, - "jitter": self.jitter, - } - - def __setstate__(self, state: dict[str, Any]) -> None: - if state.get("version", 1) > 1: - raise ValueError( - f"Got serialized data for version {state['version']} of " - f"{self.__class__.__name__}, but only versions up to 1 can be handled" - ) - - self.years, self.months, self.weeks, self.days = state["interval"] - self.start_date = state["start_date"] - self.end_date = state["end_date"] - self.timezone = state["timezone"] - self.jitter = state["jitter"] - self._time = time(*state["time"], tzinfo=self.timezone) - - def __repr__(self) -> str: - fields = [] - for field in "years", "months", "weeks", "days": - value = getattr(self, field) - if value > 0: - fields.append(f"{field}={value}") - - fields.append(f"time={self._time.isoformat()!r}") - fields.append(f"start_date='{self.start_date}'") - if self.end_date: - fields.append(f"end_date='{self.end_date}'") - - fields.append(f"timezone={timezone_repr(self.timezone)!r}") - return f"{self.__class__.__name__}({', '.join(fields)})" diff --git a/venv/Lib/site-packages/apscheduler/triggers/combining.py b/venv/Lib/site-packages/apscheduler/triggers/combining.py deleted file mode 100644 index 7d526e9..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/combining.py +++ /dev/null @@ -1,114 +0,0 @@ -from apscheduler.triggers.base import BaseTrigger -from apscheduler.util import obj_to_ref, ref_to_obj - - -class BaseCombiningTrigger(BaseTrigger): - __slots__ = ("jitter", "triggers") - - def __init__(self, triggers, jitter=None): - self.triggers = triggers - self.jitter = jitter - - def __getstate__(self): - return { - "version": 1, - "triggers": [ - (obj_to_ref(trigger.__class__), trigger.__getstate__()) - for trigger in self.triggers - ], - "jitter": self.jitter, - } - - def __setstate__(self, state): - if state.get("version", 1) > 1: - raise ValueError( - f"Got serialized data for version {state['version']} of " - f"{self.__class__.__name__}, but only versions up to 1 can be handled" - ) - - self.jitter = state["jitter"] - self.triggers = [] - for clsref, state in state["triggers"]: - cls = ref_to_obj(clsref) - trigger = cls.__new__(cls) - trigger.__setstate__(state) - self.triggers.append(trigger) - - def __repr__(self): - return "<{}({}{})>".format( - self.__class__.__name__, - self.triggers, - f", jitter={self.jitter}" if self.jitter else "", - ) - - -class AndTrigger(BaseCombiningTrigger): - """ - Always returns the earliest next fire time that all the given triggers can agree on. - The trigger is considered to be finished when any of the given triggers has finished its - schedule. - - Trigger alias: ``and`` - - .. warning:: This trigger should only be used to combine triggers that fire on - specific times of day, such as - :class:`~apscheduler.triggers.cron.CronTrigger` and - class:`~apscheduler.triggers.calendarinterval.CalendarIntervalTrigger`. - Attempting to use it with - :class:`~apscheduler.triggers.interval.IntervalTrigger` will likely result in - the scheduler hanging as it tries to find a fire time that matches exactly - between fire times produced by all the given triggers. - - :param list triggers: triggers to combine - :param int|None jitter: delay the job execution by ``jitter`` seconds at most - """ - - __slots__ = () - - def get_next_fire_time(self, previous_fire_time, now): - while True: - fire_times = [ - trigger.get_next_fire_time(previous_fire_time, now) - for trigger in self.triggers - ] - if None in fire_times: - return None - elif min(fire_times) == max(fire_times): - return self._apply_jitter(fire_times[0], self.jitter, now) - else: - now = max(fire_times) - - def __str__(self): - return "and[{}]".format(", ".join(str(trigger) for trigger in self.triggers)) - - -class OrTrigger(BaseCombiningTrigger): - """ - Always returns the earliest next fire time produced by any of the given triggers. - The trigger is considered finished when all the given triggers have finished their schedules. - - Trigger alias: ``or`` - - :param list triggers: triggers to combine - :param int|None jitter: delay the job execution by ``jitter`` seconds at most - - .. note:: Triggers that depends on the previous fire time, such as the interval trigger, may - seem to behave strangely since they are always passed the previous fire time produced by - any of the given triggers. - """ - - __slots__ = () - - def get_next_fire_time(self, previous_fire_time, now): - fire_times = [ - trigger.get_next_fire_time(previous_fire_time, now) - for trigger in self.triggers - ] - fire_times = [fire_time for fire_time in fire_times if fire_time is not None] - if fire_times: - return self._apply_jitter(min(fire_times), self.jitter, now) - else: - return None - - def __str__(self): - return "or[{}]".format(", ".join(str(trigger) for trigger in self.triggers)) diff --git a/venv/Lib/site-packages/apscheduler/triggers/cron/__init__.py b/venv/Lib/site-packages/apscheduler/triggers/cron/__init__.py deleted file mode 100644 index 19219b8..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/cron/__init__.py +++ /dev/null @@ -1,301 +0,0 @@ -from datetime import datetime, timedelta, timezone - -from tzlocal import get_localzone - -from apscheduler.triggers.base import BaseTrigger -from apscheduler.triggers.cron.fields import ( - DEFAULT_VALUES, - BaseField, - DayOfMonthField, - DayOfWeekField, - MonthField, - WeekField, -) -from apscheduler.util import ( - astimezone, - convert_to_datetime, - datetime_ceil, - datetime_repr, - datetime_utc_add, -) - -UTC = timezone.utc - - -class CronTrigger(BaseTrigger): - """ - Triggers when current time matches all specified time constraints, - similarly to how the UNIX cron scheduler works. - - :param int|str year: 4-digit year - :param int|str month: month (1-12) - :param int|str day: day of month (1-31) - :param int|str week: ISO week (1-53) - :param int|str day_of_week: number or name of weekday (0-6 or mon,tue,wed,thu,fri,sat,sun) - :param int|str hour: hour (0-23) - :param int|str minute: minute (0-59) - :param int|str second: second (0-59) - :param datetime|str start_date: earliest possible date/time to trigger on (inclusive) - :param datetime|str end_date: latest possible date/time to trigger on (inclusive) - :param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (defaults - to scheduler timezone) - :param int|None jitter: delay the job execution by ``jitter`` seconds at most - - .. note:: The first weekday is always **monday**. - """ - - FIELD_NAMES = ( - "year", - "month", - "day", - "week", - "day_of_week", - "hour", - "minute", - "second", - ) - FIELDS_MAP = { - "year": BaseField, - "month": MonthField, - "week": WeekField, - "day": DayOfMonthField, - "day_of_week": DayOfWeekField, - "hour": BaseField, - "minute": BaseField, - "second": BaseField, - } - - __slots__ = "end_date", "fields", "jitter", "start_date", "timezone" - - def __init__( - self, - year=None, - month=None, - day=None, - week=None, - day_of_week=None, - hour=None, - minute=None, - second=None, - start_date=None, - end_date=None, - timezone=None, - jitter=None, - ): - if timezone: - self.timezone = astimezone(timezone) - elif isinstance(start_date, datetime) and start_date.tzinfo: - self.timezone = astimezone(start_date.tzinfo) - elif isinstance(end_date, datetime) and end_date.tzinfo: - self.timezone = astimezone(end_date.tzinfo) - else: - self.timezone = get_localzone() - - self.start_date = convert_to_datetime(start_date, self.timezone, "start_date") - self.end_date = convert_to_datetime(end_date, self.timezone, "end_date") - - self.jitter = jitter - - values = dict( - (key, value) - for (key, value) in locals().items() - if key in self.FIELD_NAMES and value is not None - ) - self.fields = [] - assign_defaults = False - for field_name in self.FIELD_NAMES: - if field_name in values: - exprs = values.pop(field_name) - is_default = False - assign_defaults = not values - elif assign_defaults: - exprs = DEFAULT_VALUES[field_name] - is_default = True - else: - exprs = "*" - is_default = True - - field_class = self.FIELDS_MAP[field_name] - field = field_class(field_name, exprs, is_default) - self.fields.append(field) - - @classmethod - def from_crontab(cls, expr, timezone=None): - """ - Create a :class:`~CronTrigger` from a standard crontab expression. - - See https://en.wikipedia.org/wiki/Cron for more information on the format accepted here. - - :param expr: minute, hour, day of month, month, day of week - :param datetime.tzinfo|str timezone: time zone to use for the date/time calculations ( - defaults to scheduler timezone) - :return: a :class:`~CronTrigger` instance - - """ - values = expr.split() - if len(values) != 5: - raise ValueError(f"Wrong number of fields; got {len(values)}, expected 5") - - return cls( - minute=values[0], - hour=values[1], - day=values[2], - month=values[3], - day_of_week=values[4], - timezone=timezone, - ) - - def _increment_field_value(self, dateval, fieldnum): - """ - Increments the designated field and resets all less significant fields to their minimum - values. - - :type dateval: datetime - :type fieldnum: int - :return: a tuple containing the new date, and the number of the field that was actually - incremented - :rtype: tuple - """ - - values = {} - i = 0 - while i < len(self.fields): - field = self.fields[i] - if not field.REAL: - if i == fieldnum: - fieldnum -= 1 - i -= 1 - else: - i += 1 - continue - - if i < fieldnum: - values[field.name] = field.get_value(dateval) - i += 1 - elif i > fieldnum: - values[field.name] = field.get_min(dateval) - i += 1 - else: - value = field.get_value(dateval) - maxval = field.get_max(dateval) - if value == maxval: - fieldnum -= 1 - i -= 1 - else: - values[field.name] = value + 1 - i += 1 - - difference = datetime(**values) - dateval.replace(tzinfo=None) - dateval = datetime_utc_add(dateval, difference) - return dateval, fieldnum - - def _set_field_value(self, dateval, fieldnum, new_value): - values = {} - for i, field in enumerate(self.fields): - if field.REAL: - if i < fieldnum: - values[field.name] = field.get_value(dateval) - elif i > fieldnum: - values[field.name] = field.get_min(dateval) - else: - values[field.name] = new_value - - return datetime(**values, tzinfo=self.timezone, fold=dateval.fold) - - def get_next_fire_time(self, previous_fire_time, now): - if previous_fire_time: - start_date = min( - now.astimezone(UTC), - datetime_utc_add( - previous_fire_time, timedelta(microseconds=1) - ).astimezone(UTC), - ).astimezone(self.timezone) - if start_date == previous_fire_time: - start_date = datetime_utc_add(start_date, timedelta(microseconds=1)) - else: - start_date = ( - max(now.astimezone(UTC), self.start_date.astimezone(UTC)).astimezone( - self.timezone - ) - if self.start_date - else now - ) - - fieldnum = 0 - next_date = datetime_ceil(start_date).astimezone(self.timezone) - while 0 <= fieldnum < len(self.fields): - field = self.fields[fieldnum] - curr_value = field.get_value(next_date) - next_value = field.get_next_value(next_date) - - if next_value is None: - # No valid value was found - next_date, fieldnum = self._increment_field_value( - next_date, fieldnum - 1 - ) - elif next_value > curr_value: - # A valid, but higher than the starting value, was found - if field.REAL: - next_date = self._set_field_value(next_date, fieldnum, next_value) - fieldnum += 1 - else: - next_date, fieldnum = self._increment_field_value( - next_date, fieldnum - ) - else: - # A valid value was found, no changes necessary - fieldnum += 1 - - # Return if the date has rolled past the end date - if self.end_date and next_date > self.end_date: - return None - - if fieldnum >= 0: - next_date = self._apply_jitter(next_date, self.jitter, now) - return min(next_date, self.end_date) if self.end_date else next_date - - def __getstate__(self): - return { - "version": 2, - "timezone": self.timezone, - "start_date": self.start_date, - "end_date": self.end_date, - "fields": self.fields, - "jitter": self.jitter, - } - - def __setstate__(self, state): - # This is for compatibility with APScheduler 3.0.x - if isinstance(state, tuple): - state = state[1] - - if state.get("version", 1) > 2: - raise ValueError( - f"Got serialized data for version {state['version']} of " - f"{self.__class__.__name__}, but only versions up to 2 can be handled" - ) - - self.timezone = astimezone(state["timezone"]) - self.start_date = state["start_date"] - self.end_date = state["end_date"] - self.fields = state["fields"] - self.jitter = state.get("jitter") - - def __str__(self): - options = [f"{f.name}='{f}'" for f in self.fields if not f.is_default] - return "cron[{}]".format(", ".join(options)) - - def __repr__(self): - options = [f"{f.name}='{f}'" for f in self.fields if not f.is_default] - if self.start_date: - options.append(f"start_date={datetime_repr(self.start_date)!r}") - if self.end_date: - options.append(f"end_date={datetime_repr(self.end_date)!r}") - if self.jitter: - options.append(f"jitter={self.jitter}") - - return "<{} ({}, timezone='{}')>".format( - self.__class__.__name__, - ", ".join(options), - self.timezone, - ) diff --git a/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4854362..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/expressions.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/expressions.cpython-312.pyc deleted file mode 100644 index ff63b8a..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/expressions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/fields.cpython-312.pyc b/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/fields.cpython-312.pyc deleted file mode 100644 index f07f4cd..0000000 Binary files a/venv/Lib/site-packages/apscheduler/triggers/cron/__pycache__/fields.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/apscheduler/triggers/cron/expressions.py b/venv/Lib/site-packages/apscheduler/triggers/cron/expressions.py deleted file mode 100644 index 74044da..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/cron/expressions.py +++ /dev/null @@ -1,285 +0,0 @@ -"""This module contains the expressions applicable for CronTrigger's fields.""" - -__all__ = ( - "AllExpression", - "LastDayOfMonthExpression", - "RangeExpression", - "WeekdayPositionExpression", - "WeekdayRangeExpression", -) - -import re -from calendar import monthrange - -from apscheduler.util import asint - -WEEKDAYS = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"] -MONTHS = [ - "jan", - "feb", - "mar", - "apr", - "may", - "jun", - "jul", - "aug", - "sep", - "oct", - "nov", - "dec", -] - - -class AllExpression: - value_re = re.compile(r"\*(?:/(?P\d+))?$") - - def __init__(self, step=None): - self.step = asint(step) - if self.step == 0: - raise ValueError("Increment must be higher than 0") - - def validate_range(self, field_name): - from apscheduler.triggers.cron.fields import MAX_VALUES, MIN_VALUES - - value_range = MAX_VALUES[field_name] - MIN_VALUES[field_name] - if self.step and self.step > value_range: - raise ValueError( - f"the step value ({self.step}) is higher than the total range of the " - f"expression ({value_range})" - ) - - def get_next_value(self, date, field): - start = field.get_value(date) - minval = field.get_min(date) - maxval = field.get_max(date) - start = max(start, minval) - - if not self.step: - next = start - else: - distance_to_next = (self.step - (start - minval)) % self.step - next = start + distance_to_next - - if next <= maxval: - return next - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.step == other.step - - def __str__(self): - if self.step: - return f"*/{self.step}" - return "*" - - def __repr__(self): - return f"{self.__class__.__name__}({self.step})" - - -class RangeExpression(AllExpression): - value_re = re.compile(r"(?P\d+)(?:-(?P\d+))?(?:/(?P\d+))?$") - - def __init__(self, first, last=None, step=None): - super().__init__(step) - first = asint(first) - last = asint(last) - if last is None and step is None: - last = first - if last is not None and first > last: - raise ValueError( - "The minimum value in a range must not be higher than the maximum" - ) - self.first = first - self.last = last - - def validate_range(self, field_name): - from apscheduler.triggers.cron.fields import MAX_VALUES, MIN_VALUES - - super().validate_range(field_name) - if self.first < MIN_VALUES[field_name]: - raise ValueError( - f"the first value ({self.first}) is lower than the minimum value ({MIN_VALUES[field_name]})" - ) - if self.last is not None and self.last > MAX_VALUES[field_name]: - raise ValueError( - f"the last value ({self.last}) is higher than the maximum value ({MAX_VALUES[field_name]})" - ) - value_range = (self.last or MAX_VALUES[field_name]) - self.first - if self.step and self.step > value_range: - raise ValueError( - f"the step value ({self.step}) is higher than the total range of the " - f"expression ({value_range})" - ) - - def get_next_value(self, date, field): - startval = field.get_value(date) - minval = field.get_min(date) - maxval = field.get_max(date) - - # Apply range limits - minval = max(minval, self.first) - maxval = min(maxval, self.last) if self.last is not None else maxval - nextval = max(minval, startval) - - # Apply the step if defined - if self.step: - distance_to_next = (self.step - (nextval - minval)) % self.step - nextval += distance_to_next - - return nextval if nextval <= maxval else None - - def __eq__(self, other): - return ( - isinstance(other, self.__class__) - and self.first == other.first - and self.last == other.last - ) - - def __str__(self): - if self.last != self.first and self.last is not None: - range = f"{self.first}-{self.last}" - else: - range = str(self.first) - - if self.step: - return f"{range}/{self.step}" - - return range - - def __repr__(self): - args = [str(self.first)] - if (self.last != self.first and self.last is not None) or self.step: - args.append(str(self.last)) - - if self.step: - args.append(str(self.step)) - - return "{}({})".format(self.__class__.__name__, ", ".join(args)) - - -class MonthRangeExpression(RangeExpression): - value_re = re.compile(r"(?P[a-z]+)(?:-(?P[a-z]+))?", re.IGNORECASE) - - def __init__(self, first, last=None): - try: - first_num = MONTHS.index(first.lower()) + 1 - except ValueError: - raise ValueError(f'Invalid month name "{first}"') - - if last: - try: - last_num = MONTHS.index(last.lower()) + 1 - except ValueError: - raise ValueError(f'Invalid month name "{last}"') - else: - last_num = None - - super().__init__(first_num, last_num) - - def __str__(self): - if self.last != self.first and self.last is not None: - return f"{MONTHS[self.first - 1]}-{MONTHS[self.last - 1]}" - return MONTHS[self.first - 1] - - def __repr__(self): - args = [f"'{MONTHS[self.first]}'"] - if self.last != self.first and self.last is not None: - args.append(f"'{MONTHS[self.last - 1]}'") - return "{}({})".format(self.__class__.__name__, ", ".join(args)) - - -class WeekdayRangeExpression(RangeExpression): - value_re = re.compile(r"(?P[a-z]+)(?:-(?P[a-z]+))?", re.IGNORECASE) - - def __init__(self, first, last=None): - try: - first_num = WEEKDAYS.index(first.lower()) - except ValueError: - raise ValueError(f'Invalid weekday name "{first}"') - - if last: - try: - last_num = WEEKDAYS.index(last.lower()) - except ValueError: - raise ValueError(f'Invalid weekday name "{last}"') - else: - last_num = None - - super().__init__(first_num, last_num) - - def __str__(self): - if self.last != self.first and self.last is not None: - return f"{WEEKDAYS[self.first]}-{WEEKDAYS[self.last]}" - return WEEKDAYS[self.first] - - def __repr__(self): - args = [f"'{WEEKDAYS[self.first]}'"] - if self.last != self.first and self.last is not None: - args.append(f"'{WEEKDAYS[self.last]}'") - return "{}({})".format(self.__class__.__name__, ", ".join(args)) - - -class WeekdayPositionExpression(AllExpression): - options = ["1st", "2nd", "3rd", "4th", "5th", "last"] - value_re = re.compile( - r"(?P{}) +(?P(?:\d+|\w+))".format("|".join(options)), - re.IGNORECASE, - ) - - def __init__(self, option_name, weekday_name): - super().__init__(None) - try: - self.option_num = self.options.index(option_name.lower()) - except ValueError: - raise ValueError(f'Invalid weekday position "{option_name}"') - - try: - self.weekday = WEEKDAYS.index(weekday_name.lower()) - except ValueError: - raise ValueError(f'Invalid weekday name "{weekday_name}"') - - def get_next_value(self, date, field): - # Figure out the weekday of the month's first day and the number of days in that month - first_day_wday, last_day = monthrange(date.year, date.month) - - # Calculate which day of the month is the first of the target weekdays - first_hit_day = self.weekday - first_day_wday + 1 - if first_hit_day <= 0: - first_hit_day += 7 - - # Calculate what day of the month the target weekday would be - if self.option_num < 5: - target_day = first_hit_day + self.option_num * 7 - else: - target_day = first_hit_day + ((last_day - first_hit_day) // 7) * 7 - - if target_day <= last_day and target_day >= date.day: - return target_day - - def __eq__(self, other): - return ( - super().__eq__(other) - and self.option_num == other.option_num - and self.weekday == other.weekday - ) - - def __str__(self): - return f"{self.options[self.option_num]} {WEEKDAYS[self.weekday]}" - - def __repr__(self): - return f"{self.__class__.__name__}('{self.options[self.option_num]}', '{WEEKDAYS[self.weekday]}')" - - -class LastDayOfMonthExpression(AllExpression): - value_re = re.compile(r"last", re.IGNORECASE) - - def __init__(self): - super().__init__(None) - - def get_next_value(self, date, field): - return monthrange(date.year, date.month)[1] - - def __str__(self): - return "last" - - def __repr__(self): - return f"{self.__class__.__name__}()" diff --git a/venv/Lib/site-packages/apscheduler/triggers/cron/fields.py b/venv/Lib/site-packages/apscheduler/triggers/cron/fields.py deleted file mode 100644 index 2133fdb..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/cron/fields.py +++ /dev/null @@ -1,149 +0,0 @@ -"""Fields represent CronTrigger options which map to :class:`~datetime.datetime` fields.""" - -__all__ = ( - "DEFAULT_VALUES", - "MAX_VALUES", - "MIN_VALUES", - "BaseField", - "DayOfMonthField", - "DayOfWeekField", - "WeekField", -) - -import re -from calendar import monthrange - -from apscheduler.triggers.cron.expressions import ( - AllExpression, - LastDayOfMonthExpression, - MonthRangeExpression, - RangeExpression, - WeekdayPositionExpression, - WeekdayRangeExpression, -) - -MIN_VALUES = { - "year": 1970, - "month": 1, - "day": 1, - "week": 1, - "day_of_week": 0, - "hour": 0, - "minute": 0, - "second": 0, -} -MAX_VALUES = { - "year": 9999, - "month": 12, - "day": 31, - "week": 53, - "day_of_week": 6, - "hour": 23, - "minute": 59, - "second": 59, -} -DEFAULT_VALUES = { - "year": "*", - "month": 1, - "day": 1, - "week": "*", - "day_of_week": "*", - "hour": 0, - "minute": 0, - "second": 0, -} -SEPARATOR = re.compile(" *, *") - - -class BaseField: - REAL = True - COMPILERS = [AllExpression, RangeExpression] - - def __init__(self, name, exprs, is_default=False): - self.name = name - self.is_default = is_default - self.compile_expressions(exprs) - - def get_min(self, dateval): - return MIN_VALUES[self.name] - - def get_max(self, dateval): - return MAX_VALUES[self.name] - - def get_value(self, dateval): - return getattr(dateval, self.name) - - def get_next_value(self, dateval): - smallest = None - for expr in self.expressions: - value = expr.get_next_value(dateval, self) - if smallest is None or (value is not None and value < smallest): - smallest = value - - return smallest - - def compile_expressions(self, exprs): - self.expressions = [] - - # Split a comma-separated expression list, if any - for expr in SEPARATOR.split(str(exprs).strip()): - self.compile_expression(expr) - - def compile_expression(self, expr): - for compiler in self.COMPILERS: - match = compiler.value_re.match(expr) - if match: - compiled_expr = compiler(**match.groupdict()) - - try: - compiled_expr.validate_range(self.name) - except ValueError as e: - raise ValueError( - f"Error validating expression {expr!r}: {e}" - ) from None - - self.expressions.append(compiled_expr) - return - - raise ValueError(f'Unrecognized expression "{expr}" for field "{self.name}"') - - def __eq__(self, other): - return ( - isinstance(self, self.__class__) and self.expressions == other.expressions - ) - - def __str__(self): - expr_strings = (str(e) for e in self.expressions) - return ",".join(expr_strings) - - def __repr__(self): - return f"{self.__class__.__name__}('{self.name}', '{self}')" - - -class WeekField(BaseField): - REAL = False - - def get_value(self, dateval): - return dateval.isocalendar()[1] - - -class DayOfMonthField(BaseField): - COMPILERS = BaseField.COMPILERS + [ - WeekdayPositionExpression, - LastDayOfMonthExpression, - ] - - def get_max(self, dateval): - return monthrange(dateval.year, dateval.month)[1] - - -class DayOfWeekField(BaseField): - REAL = False - COMPILERS = BaseField.COMPILERS + [WeekdayRangeExpression] - - def get_value(self, dateval): - return dateval.weekday() - - -class MonthField(BaseField): - COMPILERS = BaseField.COMPILERS + [MonthRangeExpression] diff --git a/venv/Lib/site-packages/apscheduler/triggers/date.py b/venv/Lib/site-packages/apscheduler/triggers/date.py deleted file mode 100644 index a9302da..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/date.py +++ /dev/null @@ -1,51 +0,0 @@ -from datetime import datetime - -from tzlocal import get_localzone - -from apscheduler.triggers.base import BaseTrigger -from apscheduler.util import astimezone, convert_to_datetime, datetime_repr - - -class DateTrigger(BaseTrigger): - """ - Triggers once on the given datetime. If ``run_date`` is left empty, current time is used. - - :param datetime|str run_date: the date/time to run the job at - :param datetime.tzinfo|str timezone: time zone for ``run_date`` if it doesn't have one already - """ - - __slots__ = "run_date" - - def __init__(self, run_date=None, timezone=None): - timezone = astimezone(timezone) or get_localzone() - if run_date is not None: - self.run_date = convert_to_datetime(run_date, timezone, "run_date") - else: - self.run_date = datetime.now(timezone) - - def get_next_fire_time(self, previous_fire_time, now): - return self.run_date if previous_fire_time is None else None - - def __getstate__(self): - return {"version": 1, "run_date": self.run_date} - - def __setstate__(self, state): - # This is for compatibility with APScheduler 3.0.x - if isinstance(state, tuple): - state = state[1] - - if state.get("version", 1) > 1: - raise ValueError( - f"Got serialized data for version {state['version']} of " - f"{self.__class__.__name__}, but only version 1 can be handled" - ) - - self.run_date = state["run_date"] - - def __str__(self): - return f"date[{datetime_repr(self.run_date)}]" - - def __repr__(self): - return ( - f"<{self.__class__.__name__} (run_date='{datetime_repr(self.run_date)}')>" - ) diff --git a/venv/Lib/site-packages/apscheduler/triggers/interval.py b/venv/Lib/site-packages/apscheduler/triggers/interval.py deleted file mode 100644 index 0e4b24a..0000000 --- a/venv/Lib/site-packages/apscheduler/triggers/interval.py +++ /dev/null @@ -1,138 +0,0 @@ -import random -from datetime import datetime, timedelta -from math import ceil - -from tzlocal import get_localzone - -from apscheduler.triggers.base import BaseTrigger -from apscheduler.util import ( - astimezone, - convert_to_datetime, - datetime_repr, -) - - -class IntervalTrigger(BaseTrigger): - """ - Triggers on specified intervals, starting on ``start_date`` if specified, ``datetime.now()`` + - interval otherwise. - - :param int weeks: number of weeks to wait - :param int days: number of days to wait - :param int hours: number of hours to wait - :param int minutes: number of minutes to wait - :param int seconds: number of seconds to wait - :param datetime|str start_date: starting point for the interval calculation - :param datetime|str end_date: latest possible date/time to trigger on - :param datetime.tzinfo|str timezone: time zone to use for the date/time calculations - :param int|None jitter: delay the job execution by ``jitter`` seconds at most - """ - - __slots__ = ( - "end_date", - "interval", - "interval_length", - "jitter", - "start_date", - "timezone", - ) - - def __init__( - self, - weeks=0, - days=0, - hours=0, - minutes=0, - seconds=0, - start_date=None, - end_date=None, - timezone=None, - jitter=None, - ): - self.interval = timedelta( - weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds - ) - self.interval_length = self.interval.total_seconds() - if self.interval_length == 0: - self.interval = timedelta(seconds=1) - self.interval_length = 1 - - if timezone: - self.timezone = astimezone(timezone) - elif isinstance(start_date, datetime) and start_date.tzinfo: - self.timezone = astimezone(start_date.tzinfo) - elif isinstance(end_date, datetime) and end_date.tzinfo: - self.timezone = astimezone(end_date.tzinfo) - else: - self.timezone = get_localzone() - - start_date = start_date or (datetime.now(self.timezone) + self.interval) - self.start_date = convert_to_datetime(start_date, self.timezone, "start_date") - self.end_date = convert_to_datetime(end_date, self.timezone, "end_date") - - self.jitter = jitter - - def get_next_fire_time(self, previous_fire_time, now): - if previous_fire_time: - next_fire_time = previous_fire_time.timestamp() + self.interval_length - elif self.start_date > now: - next_fire_time = self.start_date.timestamp() - else: - timediff = now.timestamp() - self.start_date.timestamp() - next_interval_num = ceil(timediff / self.interval_length) - next_fire_time = ( - self.start_date.timestamp() + self.interval_length * next_interval_num - ) - - if self.jitter is not None: - next_fire_time += random.uniform(0, self.jitter) - - if not self.end_date or next_fire_time <= self.end_date.timestamp(): - return datetime.fromtimestamp(next_fire_time, tz=self.timezone) - - def __getstate__(self): - return { - "version": 2, - "timezone": astimezone(self.timezone), - "start_date": self.start_date, - "end_date": self.end_date, - "interval": self.interval, - "jitter": self.jitter, - } - - def __setstate__(self, state): - # This is for compatibility with APScheduler 3.0.x - if isinstance(state, tuple): - state = state[1] - - if state.get("version", 1) > 2: - raise ValueError( - f"Got serialized data for version {state['version']} of " - f"{self.__class__.__name__}, but only versions up to 2 can be handled" - ) - - self.timezone = state["timezone"] - self.start_date = state["start_date"] - self.end_date = state["end_date"] - self.interval = state["interval"] - self.interval_length = self.interval.total_seconds() - self.jitter = state.get("jitter") - - def __str__(self): - return f"interval[{self.interval!s}]" - - def __repr__(self): - options = [ - f"interval={self.interval!r}", - f"start_date={datetime_repr(self.start_date)!r}", - ] - if self.end_date: - options.append(f"end_date={datetime_repr(self.end_date)!r}") - if self.jitter: - options.append(f"jitter={self.jitter}") - - return "<{} ({}, timezone='{}')>".format( - self.__class__.__name__, - ", ".join(options), - self.timezone, - ) diff --git a/venv/Lib/site-packages/apscheduler/util.py b/venv/Lib/site-packages/apscheduler/util.py deleted file mode 100644 index caf56b8..0000000 --- a/venv/Lib/site-packages/apscheduler/util.py +++ /dev/null @@ -1,485 +0,0 @@ -"""This module contains several handy functions primarily meant for internal use.""" - -__all__ = ( - "asbool", - "asint", - "astimezone", - "check_callable_args", - "convert_to_datetime", - "datetime_ceil", - "datetime_to_utc_timestamp", - "get_callable_name", - "localize", - "maybe_ref", - "normalize", - "obj_to_ref", - "ref_to_obj", - "undefined", - "utc_timestamp_to_datetime", -) - -import re -import sys -from calendar import timegm -from datetime import date, datetime, time, timedelta, timezone, tzinfo -from functools import partial -from inspect import isbuiltin, isclass, isfunction, ismethod, signature - -if sys.version_info < (3, 14): - from asyncio import iscoroutinefunction -else: - from inspect import iscoroutinefunction - -if sys.version_info < (3, 9): - from backports.zoneinfo import ZoneInfo -else: - from zoneinfo import ZoneInfo - -UTC = timezone.utc - - -class _Undefined: - def __nonzero__(self): - return False - - def __bool__(self): - return False - - def __repr__(self): - return "" - - -undefined = ( - _Undefined() -) #: a unique object that only signifies that no value is defined - - -def asint(text): - """ - Safely converts a string to an integer, returning ``None`` if the string is ``None``. - - :type text: str - :rtype: int - - """ - if text is not None: - return int(text) - - -def asbool(obj): - """ - Interprets an object as a boolean value. - - :rtype: bool - - """ - if isinstance(obj, str): - obj = obj.strip().lower() - if obj in ("true", "yes", "on", "y", "t", "1"): - return True - - if obj in ("false", "no", "off", "n", "f", "0"): - return False - - raise ValueError(f'Unable to interpret value "{obj}" as boolean') - - return bool(obj) - - -def astimezone(obj): - """ - Interprets an object as a timezone. - - :rtype: tzinfo - - """ - if isinstance(obj, str): - if obj == "UTC": - return timezone.utc - - return ZoneInfo(obj) - - if isinstance(obj, tzinfo): - if obj.tzname(None) == "local": - raise ValueError( - "Unable to determine the name of the local timezone -- you must " - "explicitly specify the name of the local timezone. Please refrain " - "from using timezones like EST to prevent problems with daylight " - "saving time. Instead, use a locale based timezone name (such as " - "Europe/Helsinki)." - ) - elif isinstance(obj, ZoneInfo): - return obj - elif hasattr(obj, "zone"): - # pytz timezones - if obj.zone: - return ZoneInfo(obj.zone) - - return timezone(obj._offset) - - return obj - - if obj is not None: - raise TypeError(f"Expected tzinfo, got {obj.__class__.__name__} instead") - - -def asdate(obj): - if isinstance(obj, str): - return date.fromisoformat(obj) - - return obj - - -_DATE_REGEX = re.compile( - r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})" - r"(?:[ T](?P\d{1,2}):(?P\d{1,2}):(?P\d{1,2})" - r"(?:\.(?P\d{1,6}))?" - r"(?PZ|[+-]\d\d:\d\d)?)?$" -) - - -def convert_to_datetime(input, tz, arg_name): - """ - Converts the given object to a timezone aware datetime object. - - If a timezone aware datetime object is passed, it is returned unmodified. - If a native datetime object is passed, it is given the specified timezone. - If the input is a string, it is parsed as a datetime with the given timezone. - - Date strings are accepted in three different forms: date only (Y-m-d), date with - time (Y-m-d H:M:S) or with date+time with microseconds (Y-m-d H:M:S.micro). - Additionally you can override the time zone by giving a specific offset in the - format specified by ISO 8601: Z (UTC), +HH:MM or -HH:MM. - - :param str|datetime input: the datetime or string to convert to a timezone aware - datetime - :param datetime.tzinfo tz: timezone to interpret ``input`` in - :param str arg_name: the name of the argument (used in an error message) - :rtype: datetime - - """ - if input is None: - return - elif isinstance(input, datetime): - datetime_ = input - elif isinstance(input, date): - datetime_ = datetime.combine(input, time()) - elif isinstance(input, str): - m = _DATE_REGEX.match(input) - if not m: - raise ValueError("Invalid date string") - - values = m.groupdict() - tzname = values.pop("timezone") - if tzname == "Z": - tz = timezone.utc - elif tzname: - hours, minutes = (int(x) for x in tzname[1:].split(":")) - sign = 1 if tzname[0] == "+" else -1 - tz = timezone(sign * timedelta(hours=hours, minutes=minutes)) - - values = {k: int(v or 0) for k, v in values.items()} - datetime_ = datetime(**values) - else: - raise TypeError(f"Unsupported type for {arg_name}: {input.__class__.__name__}") - - if datetime_.tzinfo is not None: - return datetime_ - if tz is None: - raise ValueError( - f'The "tz" argument must be specified if {arg_name} has no timezone information' - ) - if isinstance(tz, str): - tz = astimezone(tz) - - return localize(datetime_, tz) - - -def datetime_to_utc_timestamp(timeval): - """ - Converts a datetime instance to a timestamp. - - :type timeval: datetime - :rtype: float - - """ - if timeval is not None: - return timegm(timeval.utctimetuple()) + timeval.microsecond / 1000000 - - -def utc_timestamp_to_datetime(timestamp): - """ - Converts the given timestamp to a datetime instance. - - :type timestamp: float - :rtype: datetime - - """ - if timestamp is not None: - return datetime.fromtimestamp(timestamp, timezone.utc) - - -def timedelta_seconds(delta): - """ - Converts the given timedelta to seconds. - - :type delta: timedelta - :rtype: float - - """ - return delta.days * 24 * 60 * 60 + delta.seconds + delta.microseconds / 1000000.0 - - -def datetime_ceil(dateval): - """ - Rounds the given datetime object upwards. - - :type dateval: datetime - - """ - if dateval.microsecond > 0: - return datetime_utc_add( - dateval, timedelta(seconds=1, microseconds=-dateval.microsecond) - ) - - return dateval - - -def datetime_utc_add(dateval: datetime, tdelta: timedelta) -> datetime: - """ - Adds an timedelta to a datetime in UTC for correct datetime arithmetic across - Daylight Saving Time changes - - :param dateval: The date to add to - :type dateval: datetime - :param operand: The timedelta to add to the datetime - :type operand: timedelta - :return: The sum of the datetime and the timedelta - :rtype: datetime - """ - original_tz = dateval.tzinfo - if original_tz is None: - return dateval + tdelta - - return (dateval.astimezone(UTC) + tdelta).astimezone(original_tz) - - -def datetime_repr(dateval): - return dateval.strftime("%Y-%m-%d %H:%M:%S %Z") if dateval else "None" - - -def timezone_repr(timezone: tzinfo) -> str: - if isinstance(timezone, ZoneInfo): - return timezone.key - - return repr(timezone) - - -def get_callable_name(func): - """ - Returns the best available display name for the given function/callable. - - :rtype: str - - """ - if ismethod(func): - self = func.__self__ - cls = self if isclass(self) else type(self) - return f"{cls.__qualname__}.{func.__name__}" - elif isclass(func) or isfunction(func) or isbuiltin(func): - return func.__qualname__ - elif hasattr(func, "__call__") and callable(func.__call__): - # instance of a class with a __call__ method - return type(func).__qualname__ - - raise TypeError( - f"Unable to determine a name for {func!r} -- maybe it is not a callable?" - ) - - -def obj_to_ref(obj): - """ - Returns the path to the given callable. - - :rtype: str - :raises TypeError: if the given object is not callable - :raises ValueError: if the given object is a :class:`~functools.partial`, lambda or a nested - function - - """ - if isinstance(obj, partial): - raise ValueError("Cannot create a reference to a partial()") - - name = get_callable_name(obj) - if "" in name: - raise ValueError("Cannot create a reference to a lambda") - if "" in name: - raise ValueError("Cannot create a reference to a nested function") - - if ismethod(obj): - module = obj.__self__.__module__ - else: - module = obj.__module__ - - return f"{module}:{name}" - - -def ref_to_obj(ref): - """ - Returns the object pointed to by ``ref``. - - :type ref: str - - """ - if not isinstance(ref, str): - raise TypeError("References must be strings") - if ":" not in ref: - raise ValueError("Invalid reference") - - modulename, rest = ref.split(":", 1) - try: - obj = __import__(modulename, fromlist=[rest]) - except ImportError as exc: - raise LookupError( - f"Error resolving reference {ref}: could not import module" - ) from exc - - try: - for name in rest.split("."): - obj = getattr(obj, name) - return obj - except Exception: - raise LookupError(f"Error resolving reference {ref}: error looking up object") - - -def maybe_ref(ref): - """ - Returns the object that the given reference points to, if it is indeed a reference. - If it is not a reference, the object is returned as-is. - - """ - if not isinstance(ref, str): - return ref - return ref_to_obj(ref) - - -def check_callable_args(func, args, kwargs): - """ - Ensures that the given callable can be called with the given arguments. - - :type args: tuple - :type kwargs: dict - - """ - pos_kwargs_conflicts = [] # parameters that have a match in both args and kwargs - positional_only_kwargs = [] # positional-only parameters that have a match in kwargs - unsatisfied_args = [] # parameters in signature that don't have a match in args or kwargs - unsatisfied_kwargs = [] # keyword-only arguments that don't have a match in kwargs - unmatched_args = list( - args - ) # args that didn't match any of the parameters in the signature - # kwargs that didn't match any of the parameters in the signature - unmatched_kwargs = list(kwargs) - # indicates if the signature defines *args and **kwargs respectively - has_varargs = has_var_kwargs = False - - try: - sig = signature(func, follow_wrapped=False) - except ValueError: - # signature() doesn't work against every kind of callable - return - - for param in sig.parameters.values(): - if param.kind == param.POSITIONAL_OR_KEYWORD: - if param.name in unmatched_kwargs and unmatched_args: - pos_kwargs_conflicts.append(param.name) - elif unmatched_args: - del unmatched_args[0] - elif param.name in unmatched_kwargs: - unmatched_kwargs.remove(param.name) - elif param.default is param.empty: - unsatisfied_args.append(param.name) - elif param.kind == param.POSITIONAL_ONLY: - if unmatched_args: - del unmatched_args[0] - elif param.name in unmatched_kwargs: - unmatched_kwargs.remove(param.name) - positional_only_kwargs.append(param.name) - elif param.default is param.empty: - unsatisfied_args.append(param.name) - elif param.kind == param.KEYWORD_ONLY: - if param.name in unmatched_kwargs: - unmatched_kwargs.remove(param.name) - elif param.default is param.empty: - unsatisfied_kwargs.append(param.name) - elif param.kind == param.VAR_POSITIONAL: - has_varargs = True - elif param.kind == param.VAR_KEYWORD: - has_var_kwargs = True - - # Make sure there are no conflicts between args and kwargs - if pos_kwargs_conflicts: - raise ValueError( - "The following arguments are supplied in both args and kwargs: {}".format( - ", ".join(pos_kwargs_conflicts) - ) - ) - - # Check if keyword arguments are being fed to positional-only parameters - if positional_only_kwargs: - raise ValueError( - "The following arguments cannot be given as keyword arguments: {}".format( - ", ".join(positional_only_kwargs) - ) - ) - - # Check that the number of positional arguments minus the number of matched kwargs - # matches the argspec - if unsatisfied_args: - raise ValueError( - "The following arguments have not been supplied: {}".format( - ", ".join(unsatisfied_args) - ) - ) - - # Check that all keyword-only arguments have been supplied - if unsatisfied_kwargs: - raise ValueError( - "The following keyword-only arguments have not been supplied in kwargs: " - "{}".format(", ".join(unsatisfied_kwargs)) - ) - - # Check that the callable can accept the given number of positional arguments - if not has_varargs and unmatched_args: - raise ValueError( - f"The list of positional arguments is longer than the target callable can " - f"handle (allowed: {len(args) - len(unmatched_args)}, given in args: " - f"{len(args)})" - ) - - # Check that the callable can accept the given keyword arguments - if not has_var_kwargs and unmatched_kwargs: - raise ValueError( - "The target callable does not accept the following keyword arguments: " - "{}".format(", ".join(unmatched_kwargs)) - ) - - -def iscoroutinefunction_partial(f): - while isinstance(f, partial): - f = f.func - - # The asyncio version of iscoroutinefunction includes testing for @coroutine - # decorations vs. the inspect version which does not. - return iscoroutinefunction(f) - - -def normalize(dt): - return datetime.fromtimestamp(dt.timestamp(), dt.tzinfo) - - -def localize(dt, tzinfo): - if hasattr(tzinfo, "localize"): - return tzinfo.localize(dt) - - return normalize(dt.replace(tzinfo=tzinfo)) diff --git a/venv/Lib/site-packages/attr/__init__.py b/venv/Lib/site-packages/attr/__init__.py deleted file mode 100644 index 5c6e065..0000000 --- a/venv/Lib/site-packages/attr/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Classes Without Boilerplate -""" - -from functools import partial -from typing import Callable, Literal, Protocol - -from . import converters, exceptions, filters, setters, validators -from ._cmp import cmp_using -from ._config import get_run_validators, set_run_validators -from ._funcs import asdict, assoc, astuple, has, resolve_types -from ._make import ( - NOTHING, - Attribute, - Converter, - Factory, - _Nothing, - attrib, - attrs, - evolve, - fields, - fields_dict, - make_class, - validate, -) -from ._next_gen import define, field, frozen, mutable -from ._version_info import VersionInfo - - -s = attributes = attrs -ib = attr = attrib -dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) - - -class AttrsInstance(Protocol): - pass - - -NothingType = Literal[_Nothing.NOTHING] - -__all__ = [ - "NOTHING", - "Attribute", - "AttrsInstance", - "Converter", - "Factory", - "NothingType", - "asdict", - "assoc", - "astuple", - "attr", - "attrib", - "attributes", - "attrs", - "cmp_using", - "converters", - "define", - "evolve", - "exceptions", - "field", - "fields", - "fields_dict", - "filters", - "frozen", - "get_run_validators", - "has", - "ib", - "make_class", - "mutable", - "resolve_types", - "s", - "set_run_validators", - "setters", - "validate", - "validators", -] - - -def _make_getattr(mod_name: str) -> Callable: - """ - Create a metadata proxy for packaging information that uses *mod_name* in - its warnings and errors. - """ - - def __getattr__(name: str) -> str: - if name not in ("__version__", "__version_info__"): - msg = f"module {mod_name} has no attribute {name}" - raise AttributeError(msg) - - from importlib.metadata import metadata - - meta = metadata("attrs") - - if name == "__version_info__": - return VersionInfo._from_version_string(meta["version"]) - - return meta["version"] - - return __getattr__ - - -__getattr__ = _make_getattr(__name__) diff --git a/venv/Lib/site-packages/attr/__init__.pyi b/venv/Lib/site-packages/attr/__init__.pyi deleted file mode 100644 index 8d78fa1..0000000 --- a/venv/Lib/site-packages/attr/__init__.pyi +++ /dev/null @@ -1,389 +0,0 @@ -import enum -import sys - -from typing import ( - Any, - Callable, - Generic, - Literal, - Mapping, - Protocol, - Sequence, - TypeVar, - overload, -) - -# `import X as X` is required to make these public -from . import converters as converters -from . import exceptions as exceptions -from . import filters as filters -from . import setters as setters -from . import validators as validators -from ._cmp import cmp_using as cmp_using -from ._typing_compat import AttrsInstance_ -from ._version_info import VersionInfo -from attrs import ( - define as define, - field as field, - mutable as mutable, - frozen as frozen, - _EqOrderType, - _ValidatorType, - _ConverterType, - _ReprArgType, - _OnSetAttrType, - _OnSetAttrArgType, - _FieldTransformer, - _ValidatorArgType, -) - -if sys.version_info >= (3, 10): - from typing import TypeGuard, TypeAlias -else: - from typing_extensions import TypeGuard, TypeAlias - -if sys.version_info >= (3, 11): - from typing import dataclass_transform -else: - from typing_extensions import dataclass_transform - -__version__: str -__version_info__: VersionInfo -__title__: str -__description__: str -__url__: str -__uri__: str -__author__: str -__email__: str -__license__: str -__copyright__: str - -_T = TypeVar("_T") -_C = TypeVar("_C", bound=type) - -_FilterType = Callable[["Attribute[_T]", _T], bool] - -# We subclass this here to keep the protocol's qualified name clean. -class AttrsInstance(AttrsInstance_, Protocol): - pass - -_A = TypeVar("_A", bound=type[AttrsInstance]) - -class _Nothing(enum.Enum): - NOTHING = enum.auto() - -NOTHING = _Nothing.NOTHING -NothingType: TypeAlias = Literal[_Nothing.NOTHING] - -# NOTE: Factory lies about its return type to make this possible: -# `x: List[int] # = Factory(list)` -# Work around mypy issue #4554 in the common case by using an overload. - -@overload -def Factory(factory: Callable[[], _T]) -> _T: ... -@overload -def Factory( - factory: Callable[[Any], _T], - takes_self: Literal[True], -) -> _T: ... -@overload -def Factory( - factory: Callable[[], _T], - takes_self: Literal[False], -) -> _T: ... - -In = TypeVar("In") -Out = TypeVar("Out") - -class Converter(Generic[In, Out]): - @overload - def __init__(self, converter: Callable[[In], Out]) -> None: ... - @overload - def __init__( - self, - converter: Callable[[In, AttrsInstance, Attribute], Out], - *, - takes_self: Literal[True], - takes_field: Literal[True], - ) -> None: ... - @overload - def __init__( - self, - converter: Callable[[In, Attribute], Out], - *, - takes_field: Literal[True], - ) -> None: ... - @overload - def __init__( - self, - converter: Callable[[In, AttrsInstance], Out], - *, - takes_self: Literal[True], - ) -> None: ... - -class Attribute(Generic[_T]): - name: str - default: _T | None - validator: _ValidatorType[_T] | None - repr: _ReprArgType - cmp: _EqOrderType - eq: _EqOrderType - order: _EqOrderType - hash: bool | None - init: bool - converter: Converter | None - metadata: dict[Any, Any] - type: type[_T] | None - kw_only: bool - on_setattr: _OnSetAttrType - alias: str | None - - def evolve(self, **changes: Any) -> "Attribute[Any]": ... - -# NOTE: We had several choices for the annotation to use for type arg: -# 1) Type[_T] -# - Pros: Handles simple cases correctly -# - Cons: Might produce less informative errors in the case of conflicting -# TypeVars e.g. `attr.ib(default='bad', type=int)` -# 2) Callable[..., _T] -# - Pros: Better error messages than #1 for conflicting TypeVars -# - Cons: Terrible error messages for validator checks. -# e.g. attr.ib(type=int, validator=validate_str) -# -> error: Cannot infer function type argument -# 3) type (and do all of the work in the mypy plugin) -# - Pros: Simple here, and we could customize the plugin with our own errors. -# - Cons: Would need to write mypy plugin code to handle all the cases. -# We chose option #1. - -# `attr` lies about its return type to make the following possible: -# attr() -> Any -# attr(8) -> int -# attr(validator=) -> Whatever the callable expects. -# This makes this type of assignments possible: -# x: int = attr(8) -# -# This form catches explicit None or no default but with no other arguments -# returns Any. -@overload -def attrib( - default: None = ..., - validator: None = ..., - repr: _ReprArgType = ..., - cmp: _EqOrderType | None = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - type: None = ..., - converter: None = ..., - factory: None = ..., - kw_only: bool | None = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., -) -> Any: ... - -# This form catches an explicit None or no default and infers the type from the -# other arguments. -@overload -def attrib( - default: None = ..., - validator: _ValidatorArgType[_T] | None = ..., - repr: _ReprArgType = ..., - cmp: _EqOrderType | None = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - type: type[_T] | None = ..., - converter: _ConverterType - | list[_ConverterType] - | tuple[_ConverterType] - | None = ..., - factory: Callable[[], _T] | None = ..., - kw_only: bool | None = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., -) -> _T: ... - -# This form catches an explicit default argument. -@overload -def attrib( - default: _T, - validator: _ValidatorArgType[_T] | None = ..., - repr: _ReprArgType = ..., - cmp: _EqOrderType | None = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - type: type[_T] | None = ..., - converter: _ConverterType - | list[_ConverterType] - | tuple[_ConverterType] - | None = ..., - factory: Callable[[], _T] | None = ..., - kw_only: bool | None = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., -) -> _T: ... - -# This form covers type=non-Type: e.g. forward references (str), Any -@overload -def attrib( - default: _T | None = ..., - validator: _ValidatorArgType[_T] | None = ..., - repr: _ReprArgType = ..., - cmp: _EqOrderType | None = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - type: object = ..., - converter: _ConverterType - | list[_ConverterType] - | tuple[_ConverterType] - | None = ..., - factory: Callable[[], _T] | None = ..., - kw_only: bool | None = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., -) -> Any: ... -@overload -@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) -def attrs( - maybe_cls: _C, - these: dict[str, Any] | None = ..., - repr_ns: str | None = ..., - repr: bool = ..., - cmp: _EqOrderType | None = ..., - hash: bool | None = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - auto_detect: bool = ..., - collect_by_mro: bool = ..., - getstate_setstate: bool | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - field_transformer: _FieldTransformer | None = ..., - match_args: bool = ..., - unsafe_hash: bool | None = ..., -) -> _C: ... -@overload -@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) -def attrs( - maybe_cls: None = ..., - these: dict[str, Any] | None = ..., - repr_ns: str | None = ..., - repr: bool = ..., - cmp: _EqOrderType | None = ..., - hash: bool | None = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - auto_detect: bool = ..., - collect_by_mro: bool = ..., - getstate_setstate: bool | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - field_transformer: _FieldTransformer | None = ..., - match_args: bool = ..., - unsafe_hash: bool | None = ..., -) -> Callable[[_C], _C]: ... -def fields(cls: type[AttrsInstance]) -> Any: ... -def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ... -def validate(inst: AttrsInstance) -> None: ... -def resolve_types( - cls: _A, - globalns: dict[str, Any] | None = ..., - localns: dict[str, Any] | None = ..., - attribs: list[Attribute[Any]] | None = ..., - include_extras: bool = ..., -) -> _A: ... - -# TODO: add support for returning a proper attrs class from the mypy plugin -# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', -# [attr.ib()])` is valid -def make_class( - name: str, - attrs: list[str] | tuple[str, ...] | dict[str, Any], - bases: tuple[type, ...] = ..., - class_body: dict[str, Any] | None = ..., - repr_ns: str | None = ..., - repr: bool = ..., - cmp: _EqOrderType | None = ..., - hash: bool | None = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - collect_by_mro: bool = ..., - on_setattr: _OnSetAttrArgType | None = ..., - field_transformer: _FieldTransformer | None = ..., -) -> type: ... - -# _funcs -- - -# TODO: add support for returning TypedDict from the mypy plugin -# FIXME: asdict/astuple do not honor their factory args. Waiting on one of -# these: -# https://github.com/python/mypy/issues/4236 -# https://github.com/python/typing/issues/253 -# XXX: remember to fix attrs.asdict/astuple too! -def asdict( - inst: AttrsInstance, - recurse: bool = ..., - filter: _FilterType[Any] | None = ..., - dict_factory: type[Mapping[Any, Any]] = ..., - retain_collection_types: bool = ..., - value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ..., - tuple_keys: bool | None = ..., -) -> dict[str, Any]: ... - -# TODO: add support for returning NamedTuple from the mypy plugin -def astuple( - inst: AttrsInstance, - recurse: bool = ..., - filter: _FilterType[Any] | None = ..., - tuple_factory: type[Sequence[Any]] = ..., - retain_collection_types: bool = ..., -) -> tuple[Any, ...]: ... -def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ... -def assoc(inst: _T, **changes: Any) -> _T: ... -def evolve(inst: _T, **changes: Any) -> _T: ... - -# _config -- - -def set_run_validators(run: bool) -> None: ... -def get_run_validators() -> bool: ... - -# aliases -- - -s = attributes = attrs -ib = attr = attrib -dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/venv/Lib/site-packages/attr/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index ab29b73..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-312.pyc deleted file mode 100644 index 6a9440b..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/_compat.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/_compat.cpython-312.pyc deleted file mode 100644 index dc48665..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/_compat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/_config.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/_config.cpython-312.pyc deleted file mode 100644 index 5bef360..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/_config.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-312.pyc deleted file mode 100644 index dc3c27b..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/_make.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/_make.cpython-312.pyc deleted file mode 100644 index 35f9f15..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/_make.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc deleted file mode 100644 index e43f0ed..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-312.pyc deleted file mode 100644 index a8bcdb0..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/converters.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/converters.cpython-312.pyc deleted file mode 100644 index fc457c7..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/converters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 9919f5f..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/filters.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/filters.cpython-312.pyc deleted file mode 100644 index adbe806..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/filters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/setters.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/setters.cpython-312.pyc deleted file mode 100644 index d3ef551..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/setters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/__pycache__/validators.cpython-312.pyc b/venv/Lib/site-packages/attr/__pycache__/validators.cpython-312.pyc deleted file mode 100644 index c98e0ba..0000000 Binary files a/venv/Lib/site-packages/attr/__pycache__/validators.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attr/_cmp.py b/venv/Lib/site-packages/attr/_cmp.py deleted file mode 100644 index 09bab49..0000000 --- a/venv/Lib/site-packages/attr/_cmp.py +++ /dev/null @@ -1,160 +0,0 @@ -# SPDX-License-Identifier: MIT - - -import functools -import types - -from ._make import __ne__ - - -_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} - - -def cmp_using( - eq=None, - lt=None, - le=None, - gt=None, - ge=None, - require_same_type=True, - class_name="Comparable", -): - """ - Create a class that can be passed into `attrs.field`'s ``eq``, ``order``, - and ``cmp`` arguments to customize field comparison. - - The resulting class will have a full set of ordering methods if at least - one of ``{lt, le, gt, ge}`` and ``eq`` are provided. - - Args: - eq (typing.Callable | None): - Callable used to evaluate equality of two objects. - - lt (typing.Callable | None): - Callable used to evaluate whether one object is less than another - object. - - le (typing.Callable | None): - Callable used to evaluate whether one object is less than or equal - to another object. - - gt (typing.Callable | None): - Callable used to evaluate whether one object is greater than - another object. - - ge (typing.Callable | None): - Callable used to evaluate whether one object is greater than or - equal to another object. - - require_same_type (bool): - When `True`, equality and ordering methods will return - `NotImplemented` if objects are not of the same type. - - class_name (str | None): Name of class. Defaults to "Comparable". - - See `comparison` for more details. - - .. versionadded:: 21.1.0 - """ - - body = { - "__slots__": ["value"], - "__init__": _make_init(), - "_requirements": [], - "_is_comparable_to": _is_comparable_to, - } - - # Add operations. - num_order_functions = 0 - has_eq_function = False - - if eq is not None: - has_eq_function = True - body["__eq__"] = _make_operator("eq", eq) - body["__ne__"] = __ne__ - - if lt is not None: - num_order_functions += 1 - body["__lt__"] = _make_operator("lt", lt) - - if le is not None: - num_order_functions += 1 - body["__le__"] = _make_operator("le", le) - - if gt is not None: - num_order_functions += 1 - body["__gt__"] = _make_operator("gt", gt) - - if ge is not None: - num_order_functions += 1 - body["__ge__"] = _make_operator("ge", ge) - - type_ = types.new_class( - class_name, (object,), {}, lambda ns: ns.update(body) - ) - - # Add same type requirement. - if require_same_type: - type_._requirements.append(_check_same_type) - - # Add total ordering if at least one operation was defined. - if 0 < num_order_functions < 4: - if not has_eq_function: - # functools.total_ordering requires __eq__ to be defined, - # so raise early error here to keep a nice stack. - msg = "eq must be define is order to complete ordering from lt, le, gt, ge." - raise ValueError(msg) - type_ = functools.total_ordering(type_) - - return type_ - - -def _make_init(): - """ - Create __init__ method. - """ - - def __init__(self, value): - """ - Initialize object with *value*. - """ - self.value = value - - return __init__ - - -def _make_operator(name, func): - """ - Create operator method. - """ - - def method(self, other): - if not self._is_comparable_to(other): - return NotImplemented - - result = func(self.value, other.value) - if result is NotImplemented: - return NotImplemented - - return result - - method.__name__ = f"__{name}__" - method.__doc__ = ( - f"Return a {_operation_names[name]} b. Computed by attrs." - ) - - return method - - -def _is_comparable_to(self, other): - """ - Check whether `other` is comparable to `self`. - """ - return all(func(self, other) for func in self._requirements) - - -def _check_same_type(self, other): - """ - Return True if *self* and *other* are of the same type, False otherwise. - """ - return other.value.__class__ is self.value.__class__ diff --git a/venv/Lib/site-packages/attr/_cmp.pyi b/venv/Lib/site-packages/attr/_cmp.pyi deleted file mode 100644 index cc7893b..0000000 --- a/venv/Lib/site-packages/attr/_cmp.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Any, Callable - -_CompareWithType = Callable[[Any, Any], bool] - -def cmp_using( - eq: _CompareWithType | None = ..., - lt: _CompareWithType | None = ..., - le: _CompareWithType | None = ..., - gt: _CompareWithType | None = ..., - ge: _CompareWithType | None = ..., - require_same_type: bool = ..., - class_name: str = ..., -) -> type: ... diff --git a/venv/Lib/site-packages/attr/_compat.py b/venv/Lib/site-packages/attr/_compat.py deleted file mode 100644 index bc68ed9..0000000 --- a/venv/Lib/site-packages/attr/_compat.py +++ /dev/null @@ -1,99 +0,0 @@ -# SPDX-License-Identifier: MIT - -import inspect -import platform -import sys -import threading - -from collections.abc import Mapping, Sequence # noqa: F401 -from typing import _GenericAlias - - -PYPY = platform.python_implementation() == "PyPy" -PY_3_10_PLUS = sys.version_info[:2] >= (3, 10) -PY_3_11_PLUS = sys.version_info[:2] >= (3, 11) -PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) -PY_3_13_PLUS = sys.version_info[:2] >= (3, 13) -PY_3_14_PLUS = sys.version_info[:2] >= (3, 14) - - -if PY_3_14_PLUS: - import annotationlib - - # We request forward-ref annotations to not break in the presence of - # forward references. - - def _get_annotations(cls): - return annotationlib.get_annotations( - cls, format=annotationlib.Format.FORWARDREF - ) - -else: - - def _get_annotations(cls): - """ - Get annotations for *cls*. - """ - return cls.__dict__.get("__annotations__", {}) - - -class _AnnotationExtractor: - """ - Extract type annotations from a callable, returning None whenever there - is none. - """ - - __slots__ = ["sig"] - - def __init__(self, callable): - try: - self.sig = inspect.signature(callable) - except (ValueError, TypeError): # inspect failed - self.sig = None - - def get_first_param_type(self): - """ - Return the type annotation of the first argument if it's not empty. - """ - if not self.sig: - return None - - params = list(self.sig.parameters.values()) - if params and params[0].annotation is not inspect.Parameter.empty: - return params[0].annotation - - return None - - def get_return_type(self): - """ - Return the return type if it's not empty. - """ - if ( - self.sig - and self.sig.return_annotation is not inspect.Signature.empty - ): - return self.sig.return_annotation - - return None - - -# Thread-local global to track attrs instances which are already being repr'd. -# This is needed because there is no other (thread-safe) way to pass info -# about the instances that are already being repr'd through the call stack -# in order to ensure we don't perform infinite recursion. -# -# For instance, if an instance contains a dict which contains that instance, -# we need to know that we're already repr'ing the outside instance from within -# the dict's repr() call. -# -# This lives here rather than in _make.py so that the functions in _make.py -# don't have a direct reference to the thread-local in their globals dict. -# If they have such a reference, it breaks cloudpickle. -repr_context = threading.local() - - -def get_generic_base(cl): - """If this is a generic class (A[str]), return the generic base for it.""" - if cl.__class__ is _GenericAlias: - return cl.__origin__ - return None diff --git a/venv/Lib/site-packages/attr/_config.py b/venv/Lib/site-packages/attr/_config.py deleted file mode 100644 index 4b25772..0000000 --- a/venv/Lib/site-packages/attr/_config.py +++ /dev/null @@ -1,31 +0,0 @@ -# SPDX-License-Identifier: MIT - -__all__ = ["get_run_validators", "set_run_validators"] - -_run_validators = True - - -def set_run_validators(run): - """ - Set whether or not validators are run. By default, they are run. - - .. deprecated:: 21.3.0 It will not be removed, but it also will not be - moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` - instead. - """ - if not isinstance(run, bool): - msg = "'run' must be bool." - raise TypeError(msg) - global _run_validators - _run_validators = run - - -def get_run_validators(): - """ - Return whether or not validators are run. - - .. deprecated:: 21.3.0 It will not be removed, but it also will not be - moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` - instead. - """ - return _run_validators diff --git a/venv/Lib/site-packages/attr/_funcs.py b/venv/Lib/site-packages/attr/_funcs.py deleted file mode 100644 index 1adb500..0000000 --- a/venv/Lib/site-packages/attr/_funcs.py +++ /dev/null @@ -1,497 +0,0 @@ -# SPDX-License-Identifier: MIT - - -import copy - -from ._compat import get_generic_base -from ._make import _OBJ_SETATTR, NOTHING, fields -from .exceptions import AttrsAttributeNotFoundError - - -_ATOMIC_TYPES = frozenset( - { - type(None), - bool, - int, - float, - str, - complex, - bytes, - type(...), - type, - range, - property, - } -) - - -def asdict( - inst, - recurse=True, - filter=None, - dict_factory=dict, - retain_collection_types=False, - value_serializer=None, -): - """ - Return the *attrs* attribute values of *inst* as a dict. - - Optionally recurse into other *attrs*-decorated classes. - - Args: - inst: Instance of an *attrs*-decorated class. - - recurse (bool): Recurse into classes that are also *attrs*-decorated. - - filter (~typing.Callable): - A callable whose return code determines whether an attribute or - element is included (`True`) or dropped (`False`). Is called with - the `attrs.Attribute` as the first argument and the value as the - second argument. - - dict_factory (~typing.Callable): - A callable to produce dictionaries from. For example, to produce - ordered dictionaries instead of normal Python dictionaries, pass in - ``collections.OrderedDict``. - - retain_collection_types (bool): - Do not convert to `list` when encountering an attribute whose type - is `tuple` or `set`. Only meaningful if *recurse* is `True`. - - value_serializer (typing.Callable | None): - A hook that is called for every attribute or dict key/value. It - receives the current instance, field and value and must return the - (updated) value. The hook is run *after* the optional *filter* has - been applied. - - Returns: - Return type of *dict_factory*. - - Raises: - attrs.exceptions.NotAnAttrsClassError: - If *cls* is not an *attrs* class. - - .. versionadded:: 16.0.0 *dict_factory* - .. versionadded:: 16.1.0 *retain_collection_types* - .. versionadded:: 20.3.0 *value_serializer* - .. versionadded:: 21.3.0 - If a dict has a collection for a key, it is serialized as a tuple. - """ - attrs = fields(inst.__class__) - rv = dict_factory() - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - - if value_serializer is not None: - v = value_serializer(inst, a, v) - - if recurse is True: - value_type = type(v) - if value_type in _ATOMIC_TYPES: - rv[a.name] = v - elif has(value_type): - rv[a.name] = asdict( - v, - recurse=True, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - elif issubclass(value_type, (tuple, list, set, frozenset)): - cf = value_type if retain_collection_types is True else list - items = [ - _asdict_anything( - i, - is_key=False, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - for i in v - ] - try: - rv[a.name] = cf(items) - except TypeError: - if not issubclass(cf, tuple): - raise - # Workaround for TypeError: cf.__new__() missing 1 required - # positional argument (which appears, for a namedturle) - rv[a.name] = cf(*items) - elif issubclass(value_type, dict): - df = dict_factory - rv[a.name] = df( - ( - _asdict_anything( - kk, - is_key=True, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - _asdict_anything( - vv, - is_key=False, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - ) - for kk, vv in v.items() - ) - else: - rv[a.name] = v - else: - rv[a.name] = v - return rv - - -def _asdict_anything( - val, - is_key, - filter, - dict_factory, - retain_collection_types, - value_serializer, -): - """ - ``asdict`` only works on attrs instances, this works on anything. - """ - val_type = type(val) - if val_type in _ATOMIC_TYPES: - rv = val - if value_serializer is not None: - rv = value_serializer(None, None, rv) - elif getattr(val_type, "__attrs_attrs__", None) is not None: - # Attrs class. - rv = asdict( - val, - recurse=True, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - elif issubclass(val_type, (tuple, list, set, frozenset)): - if retain_collection_types is True: - cf = val.__class__ - elif is_key: - cf = tuple - else: - cf = list - - rv = cf( - [ - _asdict_anything( - i, - is_key=False, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - for i in val - ] - ) - elif issubclass(val_type, dict): - df = dict_factory - rv = df( - ( - _asdict_anything( - kk, - is_key=True, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - _asdict_anything( - vv, - is_key=False, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - ) - for kk, vv in val.items() - ) - else: - rv = val - if value_serializer is not None: - rv = value_serializer(None, None, rv) - - return rv - - -def astuple( - inst, - recurse=True, - filter=None, - tuple_factory=tuple, - retain_collection_types=False, -): - """ - Return the *attrs* attribute values of *inst* as a tuple. - - Optionally recurse into other *attrs*-decorated classes. - - Args: - inst: Instance of an *attrs*-decorated class. - - recurse (bool): - Recurse into classes that are also *attrs*-decorated. - - filter (~typing.Callable): - A callable whose return code determines whether an attribute or - element is included (`True`) or dropped (`False`). Is called with - the `attrs.Attribute` as the first argument and the value as the - second argument. - - tuple_factory (~typing.Callable): - A callable to produce tuples from. For example, to produce lists - instead of tuples. - - retain_collection_types (bool): - Do not convert to `list` or `dict` when encountering an attribute - which type is `tuple`, `dict` or `set`. Only meaningful if - *recurse* is `True`. - - Returns: - Return type of *tuple_factory* - - Raises: - attrs.exceptions.NotAnAttrsClassError: - If *cls* is not an *attrs* class. - - .. versionadded:: 16.2.0 - """ - attrs = fields(inst.__class__) - rv = [] - retain = retain_collection_types # Very long. :/ - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - value_type = type(v) - if recurse is True: - if value_type in _ATOMIC_TYPES: - rv.append(v) - elif has(value_type): - rv.append( - astuple( - v, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - ) - elif issubclass(value_type, (tuple, list, set, frozenset)): - cf = v.__class__ if retain is True else list - items = [ - ( - astuple( - j, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(j.__class__) - else j - ) - for j in v - ] - try: - rv.append(cf(items)) - except TypeError: - if not issubclass(cf, tuple): - raise - # Workaround for TypeError: cf.__new__() missing 1 required - # positional argument (which appears, for a namedturle) - rv.append(cf(*items)) - elif issubclass(value_type, dict): - df = value_type if retain is True else dict - rv.append( - df( - ( - ( - astuple( - kk, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(kk.__class__) - else kk - ), - ( - astuple( - vv, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(vv.__class__) - else vv - ), - ) - for kk, vv in v.items() - ) - ) - else: - rv.append(v) - else: - rv.append(v) - - return rv if tuple_factory is list else tuple_factory(rv) - - -def has(cls): - """ - Check whether *cls* is a class with *attrs* attributes. - - Args: - cls (type): Class to introspect. - - Raises: - TypeError: If *cls* is not a class. - - Returns: - bool: - """ - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is not None: - return True - - # No attrs, maybe it's a specialized generic (A[str])? - generic_base = get_generic_base(cls) - if generic_base is not None: - generic_attrs = getattr(generic_base, "__attrs_attrs__", None) - if generic_attrs is not None: - # Stick it on here for speed next time. - cls.__attrs_attrs__ = generic_attrs - return generic_attrs is not None - return False - - -def assoc(inst, **changes): - """ - Copy *inst* and apply *changes*. - - This is different from `evolve` that applies the changes to the arguments - that create the new instance. - - `evolve`'s behavior is preferable, but there are `edge cases`_ where it - doesn't work. Therefore `assoc` is deprecated, but will not be removed. - - .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251 - - Args: - inst: Instance of a class with *attrs* attributes. - - changes: Keyword changes in the new copy. - - Returns: - A copy of inst with *changes* incorporated. - - Raises: - attrs.exceptions.AttrsAttributeNotFoundError: - If *attr_name* couldn't be found on *cls*. - - attrs.exceptions.NotAnAttrsClassError: - If *cls* is not an *attrs* class. - - .. deprecated:: 17.1.0 - Use `attrs.evolve` instead if you can. This function will not be - removed du to the slightly different approach compared to - `attrs.evolve`, though. - """ - new = copy.copy(inst) - attrs = fields(inst.__class__) - for k, v in changes.items(): - a = getattr(attrs, k, NOTHING) - if a is NOTHING: - msg = f"{k} is not an attrs attribute on {new.__class__}." - raise AttrsAttributeNotFoundError(msg) - _OBJ_SETATTR(new, k, v) - return new - - -def resolve_types( - cls, globalns=None, localns=None, attribs=None, include_extras=True -): - """ - Resolve any strings and forward annotations in type annotations. - - This is only required if you need concrete types in :class:`Attribute`'s - *type* field. In other words, you don't need to resolve your types if you - only use them for static type checking. - - With no arguments, names will be looked up in the module in which the class - was created. If this is not what you want, for example, if the name only - exists inside a method, you may pass *globalns* or *localns* to specify - other dictionaries in which to look up these names. See the docs of - `typing.get_type_hints` for more details. - - Args: - cls (type): Class to resolve. - - globalns (dict | None): Dictionary containing global variables. - - localns (dict | None): Dictionary containing local variables. - - attribs (list | None): - List of attribs for the given class. This is necessary when calling - from inside a ``field_transformer`` since *cls* is not an *attrs* - class yet. - - include_extras (bool): - Resolve more accurately, if possible. Pass ``include_extras`` to - ``typing.get_hints``, if supported by the typing module. On - supported Python versions (3.9+), this resolves the types more - accurately. - - Raises: - TypeError: If *cls* is not a class. - - attrs.exceptions.NotAnAttrsClassError: - If *cls* is not an *attrs* class and you didn't pass any attribs. - - NameError: If types cannot be resolved because of missing variables. - - Returns: - *cls* so you can use this function also as a class decorator. Please - note that you have to apply it **after** `attrs.define`. That means the - decorator has to come in the line **before** `attrs.define`. - - .. versionadded:: 20.1.0 - .. versionadded:: 21.1.0 *attribs* - .. versionadded:: 23.1.0 *include_extras* - """ - # Since calling get_type_hints is expensive we cache whether we've - # done it already. - if getattr(cls, "__attrs_types_resolved__", None) != cls: - import typing - - kwargs = { - "globalns": globalns, - "localns": localns, - "include_extras": include_extras, - } - - hints = typing.get_type_hints(cls, **kwargs) - for field in fields(cls) if attribs is None else attribs: - if field.name in hints: - # Since fields have been frozen we must work around it. - _OBJ_SETATTR(field, "type", hints[field.name]) - # We store the class we resolved so that subclasses know they haven't - # been resolved. - cls.__attrs_types_resolved__ = cls - - # Return the class so you can use it as a decorator too. - return cls diff --git a/venv/Lib/site-packages/attr/_make.py b/venv/Lib/site-packages/attr/_make.py deleted file mode 100644 index d24d9ba..0000000 --- a/venv/Lib/site-packages/attr/_make.py +++ /dev/null @@ -1,3362 +0,0 @@ -# SPDX-License-Identifier: MIT - -from __future__ import annotations - -import abc -import contextlib -import copy -import enum -import inspect -import itertools -import linecache -import sys -import types -import unicodedata -import weakref - -from collections.abc import Callable, Mapping -from functools import cached_property -from typing import Any, NamedTuple, TypeVar - -# We need to import _compat itself in addition to the _compat members to avoid -# having the thread-local in the globals here. -from . import _compat, _config, setters -from ._compat import ( - PY_3_10_PLUS, - PY_3_11_PLUS, - PY_3_13_PLUS, - _AnnotationExtractor, - _get_annotations, - get_generic_base, -) -from .exceptions import ( - DefaultAlreadySetError, - FrozenInstanceError, - NotAnAttrsClassError, - UnannotatedAttributeError, -) - - -# This is used at least twice, so cache it here. -_OBJ_SETATTR = object.__setattr__ -_INIT_FACTORY_PAT = "__attr_factory_%s" -_CLASSVAR_PREFIXES = ( - "typing.ClassVar", - "t.ClassVar", - "ClassVar", - "typing_extensions.ClassVar", -) -# we don't use a double-underscore prefix because that triggers -# name mangling when trying to create a slot for the field -# (when slots=True) -_HASH_CACHE_FIELD = "_attrs_cached_hash" - -_EMPTY_METADATA_SINGLETON = types.MappingProxyType({}) - -# Unique object for unequivocal getattr() defaults. -_SENTINEL = object() - -_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate) - - -class _Nothing(enum.Enum): - """ - Sentinel to indicate the lack of a value when `None` is ambiguous. - - If extending attrs, you can use ``typing.Literal[NOTHING]`` to show - that a value may be ``NOTHING``. - - .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. - .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. - """ - - NOTHING = enum.auto() - - def __repr__(self): - return "NOTHING" - - def __bool__(self): - return False - - -NOTHING = _Nothing.NOTHING -""" -Sentinel to indicate the lack of a value when `None` is ambiguous. - -When using in 3rd party code, use `attrs.NothingType` for type annotations. -""" - - -class _CacheHashWrapper(int): - """ - An integer subclass that pickles / copies as None - - This is used for non-slots classes with ``cache_hash=True``, to avoid - serializing a potentially (even likely) invalid hash value. Since `None` - is the default value for uncalculated hashes, whenever this is copied, - the copy's value for the hash should automatically reset. - - See GH #613 for more details. - """ - - def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 - return _none_constructor, _args - - -def attrib( - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=None, - init=True, - metadata=None, - type=None, - converter=None, - factory=None, - kw_only=None, - eq=None, - order=None, - on_setattr=None, - alias=None, -): - """ - Create a new field / attribute on a class. - - Identical to `attrs.field`, except it's not keyword-only. - - Consider using `attrs.field` in new code (``attr.ib`` will *never* go away, - though). - - .. warning:: - - Does **nothing** unless the class is also decorated with - `attr.s` (or similar)! - - - .. versionadded:: 15.2.0 *convert* - .. versionadded:: 16.3.0 *metadata* - .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. - .. versionchanged:: 17.1.0 - *hash* is `None` and therefore mirrors *eq* by default. - .. versionadded:: 17.3.0 *type* - .. deprecated:: 17.4.0 *convert* - .. versionadded:: 17.4.0 - *converter* as a replacement for the deprecated *convert* to achieve - consistency with other noun-based arguments. - .. versionadded:: 18.1.0 - ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. - .. versionadded:: 18.2.0 *kw_only* - .. versionchanged:: 19.2.0 *convert* keyword argument removed. - .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 - .. versionchanged:: 21.1.0 - *eq*, *order*, and *cmp* also accept a custom callable - .. versionchanged:: 21.1.0 *cmp* undeprecated - .. versionadded:: 22.2.0 *alias* - .. versionchanged:: 25.4.0 - *kw_only* can now be None, and its default is also changed from False to - None. - """ - eq, eq_key, order, order_key = _determine_attrib_eq_order( - cmp, eq, order, True - ) - - if hash is not None and hash is not True and hash is not False: - msg = "Invalid value for hash. Must be True, False, or None." - raise TypeError(msg) - - if factory is not None: - if default is not NOTHING: - msg = ( - "The `default` and `factory` arguments are mutually exclusive." - ) - raise ValueError(msg) - if not callable(factory): - msg = "The `factory` argument must be a callable." - raise ValueError(msg) - default = Factory(factory) - - if metadata is None: - metadata = {} - - # Apply syntactic sugar by auto-wrapping. - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - if validator and isinstance(validator, (list, tuple)): - validator = and_(*validator) - - if converter and isinstance(converter, (list, tuple)): - converter = pipe(*converter) - - return _CountingAttr( - default=default, - validator=validator, - repr=repr, - cmp=None, - hash=hash, - init=init, - converter=converter, - metadata=metadata, - type=type, - kw_only=kw_only, - eq=eq, - eq_key=eq_key, - order=order, - order_key=order_key, - on_setattr=on_setattr, - alias=alias, - ) - - -def _compile_and_eval( - script: str, - globs: dict[str, Any] | None, - locs: Mapping[str, object] | None = None, - filename: str = "", -) -> None: - """ - Evaluate the script with the given global (globs) and local (locs) - variables. - """ - bytecode = compile(script, filename, "exec") - eval(bytecode, globs, locs) - - -def _linecache_and_compile( - script: str, - filename: str, - globs: dict[str, Any] | None, - locals: Mapping[str, object] | None = None, -) -> dict[str, Any]: - """ - Cache the script with _linecache_, compile it and return the _locals_. - """ - - locs = {} if locals is None else locals - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - count = 1 - base_filename = filename - while True: - linecache_tuple = ( - len(script), - None, - script.splitlines(True), - filename, - ) - old_val = linecache.cache.setdefault(filename, linecache_tuple) - if old_val == linecache_tuple: - break - - filename = f"{base_filename[:-1]}-{count}>" - count += 1 - - _compile_and_eval(script, globs, locs, filename) - - return locs - - -def _make_attr_tuple_class(cls_name: str, attr_names: list[str]) -> type: - """ - Create a tuple subclass to hold `Attribute`s for an `attrs` class. - - The subclass is a bare tuple with properties for names. - - class MyClassAttributes(tuple): - __slots__ = () - x = property(itemgetter(0)) - """ - attr_class_name = f"{cls_name}Attributes" - body = {} - for i, attr_name in enumerate(attr_names): - - def getter(self, i=i): - return self[i] - - body[attr_name] = property(getter) - return type(attr_class_name, (tuple,), body) - - -# Tuple class for extracted attributes from a class definition. -# `base_attrs` is a subset of `attrs`. -class _Attributes(NamedTuple): - attrs: type - base_attrs: list[Attribute] - base_attrs_map: dict[str, type] - - -def _is_class_var(annot): - """ - Check whether *annot* is a typing.ClassVar. - - The string comparison hack is used to avoid evaluating all string - annotations which would put attrs-based classes at a performance - disadvantage compared to plain old classes. - """ - annot = str(annot) - - # Annotation can be quoted. - if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): - annot = annot[1:-1] - - return annot.startswith(_CLASSVAR_PREFIXES) - - -def _has_own_attribute(cls, attrib_name): - """ - Check whether *cls* defines *attrib_name* (and doesn't just inherit it). - """ - return attrib_name in cls.__dict__ - - -def _collect_base_attrs( - cls, taken_attr_names -) -> tuple[list[Attribute], dict[str, type]]: - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in reversed(cls.__mro__[1:-1]): - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.inherited or a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) # noqa: PLW2901 - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - # For each name, only keep the freshest definition i.e. the furthest at the - # back. base_attr_map is fine because it gets overwritten with every new - # instance. - filtered = [] - seen = set() - for a in reversed(base_attrs): - if a.name in seen: - continue - filtered.insert(0, a) - seen.add(a.name) - - return filtered, base_attr_map - - -def _collect_base_attrs_broken(cls, taken_attr_names): - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - - N.B. *taken_attr_names* will be mutated. - - Adhere to the old incorrect behavior. - - Notably it collects from the front and considers inherited attributes which - leads to the buggy behavior reported in #428. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in cls.__mro__[1:-1]: - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) # noqa: PLW2901 - taken_attr_names.add(a.name) - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - return base_attrs, base_attr_map - - -def _transform_attrs( - cls, - these, - auto_attribs, - kw_only, - collect_by_mro, - field_transformer, -) -> _Attributes: - """ - Transform all `_CountingAttr`s on a class into `Attribute`s. - - If *these* is passed, use that and don't look for them on the class. - - If *collect_by_mro* is True, collect them in the correct MRO order, - otherwise use the old -- incorrect -- order. See #428. - - Return an `_Attributes`. - """ - cd = cls.__dict__ - anns = _get_annotations(cls) - - if these is not None: - ca_list = list(these.items()) - elif auto_attribs is True: - ca_names = { - name - for name, attr in cd.items() - if attr.__class__ is _CountingAttr - } - ca_list = [] - annot_names = set() - for attr_name, type in anns.items(): - if _is_class_var(type): - continue - annot_names.add(attr_name) - a = cd.get(attr_name, NOTHING) - - if a.__class__ is not _CountingAttr: - a = attrib(a) - ca_list.append((attr_name, a)) - - unannotated = ca_names - annot_names - if unannotated: - raise UnannotatedAttributeError( - "The following `attr.ib`s lack a type annotation: " - + ", ".join( - sorted(unannotated, key=lambda n: cd.get(n).counter) - ) - + "." - ) - else: - ca_list = sorted( - ( - (name, attr) - for name, attr in cd.items() - if attr.__class__ is _CountingAttr - ), - key=lambda e: e[1].counter, - ) - - fca = Attribute.from_counting_attr - no = ClassProps.KeywordOnly.NO - own_attrs = [ - fca( - attr_name, - ca, - kw_only is not no, - anns.get(attr_name), - ) - for attr_name, ca in ca_list - ] - - if collect_by_mro: - base_attrs, base_attr_map = _collect_base_attrs( - cls, {a.name for a in own_attrs} - ) - else: - base_attrs, base_attr_map = _collect_base_attrs_broken( - cls, {a.name for a in own_attrs} - ) - - if kw_only is ClassProps.KeywordOnly.FORCE: - own_attrs = [a.evolve(kw_only=True) for a in own_attrs] - base_attrs = [a.evolve(kw_only=True) for a in base_attrs] - - attrs = base_attrs + own_attrs - - if field_transformer is not None: - attrs = tuple(field_transformer(cls, attrs)) - - # Check attr order after executing the field_transformer. - # Mandatory vs non-mandatory attr order only matters when they are part of - # the __init__ signature and when they aren't kw_only (which are moved to - # the end and can be mandatory or non-mandatory in any order, as they will - # be specified as keyword args anyway). Check the order of those attrs: - had_default = False - for a in (a for a in attrs if a.init is not False and a.kw_only is False): - if had_default is True and a.default is NOTHING: - msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" - raise ValueError(msg) - - if had_default is False and a.default is not NOTHING: - had_default = True - - # Resolve default field alias after executing field_transformer. - # This allows field_transformer to differentiate between explicit vs - # default aliases and supply their own defaults. - for a in attrs: - if not a.alias: - # Evolve is very slow, so we hold our nose and do it dirty. - _OBJ_SETATTR.__get__(a)("alias", _default_init_alias_for(a.name)) - - # Create AttrsClass *after* applying the field_transformer since it may - # add or remove attributes! - attr_names = [a.name for a in attrs] - AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) - - return _Attributes(AttrsClass(attrs), base_attrs, base_attr_map) - - -def _make_cached_property_getattr(cached_properties, original_getattr, cls): - lines = [ - # Wrapped to get `__class__` into closure cell for super() - # (It will be replaced with the newly constructed class after construction). - "def wrapper(_cls):", - " __class__ = _cls", - " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):", - " func = cached_properties.get(item)", - " if func is not None:", - " result = func(self)", - " _setter = _cached_setattr_get(self)", - " _setter(item, result)", - " return result", - ] - if original_getattr is not None: - lines.append( - " return original_getattr(self, item)", - ) - else: - lines.extend( - [ - " try:", - " return super().__getattribute__(item)", - " except AttributeError:", - " if not hasattr(super(), '__getattr__'):", - " raise", - " return super().__getattr__(item)", - " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"", - " raise AttributeError(original_error)", - ] - ) - - lines.extend( - [ - " return __getattr__", - "__getattr__ = wrapper(_cls)", - ] - ) - - unique_filename = _generate_unique_filename(cls, "getattr") - - glob = { - "cached_properties": cached_properties, - "_cached_setattr_get": _OBJ_SETATTR.__get__, - "original_getattr": original_getattr, - } - - return _linecache_and_compile( - "\n".join(lines), unique_filename, glob, locals={"_cls": cls} - )["__getattr__"] - - -def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - if isinstance(self, BaseException) and name in ( - "__cause__", - "__context__", - "__traceback__", - "__suppress_context__", - "__notes__", - ): - BaseException.__setattr__(self, name, value) - return - - raise FrozenInstanceError - - -def _frozen_delattrs(self, name): - """ - Attached to frozen classes as __delattr__. - """ - if isinstance(self, BaseException) and name in ("__notes__",): - BaseException.__delattr__(self, name) - return - - raise FrozenInstanceError - - -def evolve(*args, **changes): - """ - Create a new instance, based on the first positional argument with - *changes* applied. - - .. tip:: - - On Python 3.13 and later, you can also use `copy.replace` instead. - - Args: - - inst: - Instance of a class with *attrs* attributes. *inst* must be passed - as a positional argument. - - changes: - Keyword changes in the new copy. - - Returns: - A copy of inst with *changes* incorporated. - - Raises: - TypeError: - If *attr_name* couldn't be found in the class ``__init__``. - - attrs.exceptions.NotAnAttrsClassError: - If *cls* is not an *attrs* class. - - .. versionadded:: 17.1.0 - .. deprecated:: 23.1.0 - It is now deprecated to pass the instance using the keyword argument - *inst*. It will raise a warning until at least April 2024, after which - it will become an error. Always pass the instance as a positional - argument. - .. versionchanged:: 24.1.0 - *inst* can't be passed as a keyword argument anymore. - """ - try: - (inst,) = args - except ValueError: - msg = ( - f"evolve() takes 1 positional argument, but {len(args)} were given" - ) - raise TypeError(msg) from None - - cls = inst.__class__ - attrs = fields(cls) - for a in attrs: - if not a.init: - continue - attr_name = a.name # To deal with private attributes. - init_name = a.alias - if init_name not in changes: - changes[init_name] = getattr(inst, attr_name) - - return cls(**changes) - - -class _ClassBuilder: - """ - Iteratively build *one* class. - """ - - __slots__ = ( - "_add_method_dunders", - "_attr_names", - "_attrs", - "_base_attr_map", - "_base_names", - "_cache_hash", - "_cls", - "_cls_dict", - "_delete_attribs", - "_frozen", - "_has_custom_setattr", - "_has_post_init", - "_has_pre_init", - "_is_exc", - "_on_setattr", - "_pre_init_has_args", - "_repr_added", - "_script_snippets", - "_slots", - "_weakref_slot", - "_wrote_own_setattr", - ) - - def __init__( - self, - cls: type, - these, - auto_attribs: bool, - props: ClassProps, - has_custom_setattr: bool, - ): - attrs, base_attrs, base_map = _transform_attrs( - cls, - these, - auto_attribs, - props.kw_only, - props.collected_fields_by_mro, - props.field_transformer, - ) - - self._cls = cls - self._cls_dict = dict(cls.__dict__) if props.is_slotted else {} - self._attrs = attrs - self._base_names = {a.name for a in base_attrs} - self._base_attr_map = base_map - self._attr_names = tuple(a.name for a in attrs) - self._slots = props.is_slotted - self._frozen = props.is_frozen - self._weakref_slot = props.has_weakref_slot - self._cache_hash = ( - props.hashability is ClassProps.Hashability.HASHABLE_CACHED - ) - self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) - self._pre_init_has_args = False - if self._has_pre_init: - # Check if the pre init method has more arguments than just `self` - # We want to pass arguments if pre init expects arguments - pre_init_func = cls.__attrs_pre_init__ - pre_init_signature = inspect.signature(pre_init_func) - self._pre_init_has_args = len(pre_init_signature.parameters) > 1 - self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) - self._delete_attribs = not bool(these) - self._is_exc = props.is_exception - self._on_setattr = props.on_setattr_hook - - self._has_custom_setattr = has_custom_setattr - self._wrote_own_setattr = False - - self._cls_dict["__attrs_attrs__"] = self._attrs - self._cls_dict["__attrs_props__"] = props - - if props.is_frozen: - self._cls_dict["__setattr__"] = _frozen_setattrs - self._cls_dict["__delattr__"] = _frozen_delattrs - - self._wrote_own_setattr = True - elif self._on_setattr in ( - _DEFAULT_ON_SETATTR, - setters.validate, - setters.convert, - ): - has_validator = has_converter = False - for a in attrs: - if a.validator is not None: - has_validator = True - if a.converter is not None: - has_converter = True - - if has_validator and has_converter: - break - if ( - ( - self._on_setattr == _DEFAULT_ON_SETATTR - and not (has_validator or has_converter) - ) - or (self._on_setattr == setters.validate and not has_validator) - or (self._on_setattr == setters.convert and not has_converter) - ): - # If class-level on_setattr is set to convert + validate, but - # there's no field to convert or validate, pretend like there's - # no on_setattr. - self._on_setattr = None - - if props.added_pickling: - ( - self._cls_dict["__getstate__"], - self._cls_dict["__setstate__"], - ) = self._make_getstate_setstate() - - # tuples of script, globs, hook - self._script_snippets: list[ - tuple[str, dict, Callable[[dict, dict], Any]] - ] = [] - self._repr_added = False - - # We want to only do this check once; in 99.9% of cases these - # exist. - if not hasattr(self._cls, "__module__") or not hasattr( - self._cls, "__qualname__" - ): - self._add_method_dunders = self._add_method_dunders_safe - else: - self._add_method_dunders = self._add_method_dunders_unsafe - - def __repr__(self): - return f"<_ClassBuilder(cls={self._cls.__name__})>" - - def _eval_snippets(self) -> None: - """ - Evaluate any registered snippets in one go. - """ - script = "\n".join([snippet[0] for snippet in self._script_snippets]) - globs = {} - for _, snippet_globs, _ in self._script_snippets: - globs.update(snippet_globs) - - locs = _linecache_and_compile( - script, - _generate_unique_filename(self._cls, "methods"), - globs, - ) - - for _, _, hook in self._script_snippets: - hook(self._cls_dict, locs) - - def build_class(self): - """ - Finalize class based on the accumulated configuration. - - Builder cannot be used after calling this method. - """ - self._eval_snippets() - if self._slots is True: - cls = self._create_slots_class() - self._cls.__attrs_base_of_slotted__ = weakref.ref(cls) - else: - cls = self._patch_original_class() - if PY_3_10_PLUS: - cls = abc.update_abstractmethods(cls) - - # The method gets only called if it's not inherited from a base class. - # _has_own_attribute does NOT work properly for classmethods. - if ( - getattr(cls, "__attrs_init_subclass__", None) - and "__attrs_init_subclass__" not in cls.__dict__ - ): - cls.__attrs_init_subclass__() - - return cls - - def _patch_original_class(self): - """ - Apply accumulated methods and return the class. - """ - cls = self._cls - base_names = self._base_names - - # Clean class of attribute definitions (`attr.ib()`s). - if self._delete_attribs: - for name in self._attr_names: - if ( - name not in base_names - and getattr(cls, name, _SENTINEL) is not _SENTINEL - ): - # An AttributeError can happen if a base class defines a - # class variable and we want to set an attribute with the - # same name by using only a type annotation. - with contextlib.suppress(AttributeError): - delattr(cls, name) - - # Attach our dunder methods. - for name, value in self._cls_dict.items(): - setattr(cls, name, value) - - # If we've inherited an attrs __setattr__ and don't write our own, - # reset it to object's. - if not self._wrote_own_setattr and getattr( - cls, "__attrs_own_setattr__", False - ): - cls.__attrs_own_setattr__ = False - - if not self._has_custom_setattr: - cls.__setattr__ = _OBJ_SETATTR - - return cls - - def _create_slots_class(self): - """ - Build and return a new class with a `__slots__` attribute. - """ - cd = { - k: v - for k, v in self._cls_dict.items() - if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") - } - - # 3.14.0rc2+ - if hasattr(sys, "_clear_type_descriptors"): - sys._clear_type_descriptors(self._cls) - - # If our class doesn't have its own implementation of __setattr__ - # (either from the user or by us), check the bases, if one of them has - # an attrs-made __setattr__, that needs to be reset. We don't walk the - # MRO because we only care about our immediate base classes. - # XXX: This can be confused by subclassing a slotted attrs class with - # XXX: a non-attrs class and subclass the resulting class with an attrs - # XXX: class. See `test_slotted_confused` for details. For now that's - # XXX: OK with us. - if not self._wrote_own_setattr: - cd["__attrs_own_setattr__"] = False - - if not self._has_custom_setattr: - for base_cls in self._cls.__bases__: - if base_cls.__dict__.get("__attrs_own_setattr__", False): - cd["__setattr__"] = _OBJ_SETATTR - break - - # Traverse the MRO to collect existing slots - # and check for an existing __weakref__. - existing_slots = {} - weakref_inherited = False - for base_cls in self._cls.__mro__[1:-1]: - if base_cls.__dict__.get("__weakref__", None) is not None: - weakref_inherited = True - existing_slots.update( - { - name: getattr(base_cls, name) - for name in getattr(base_cls, "__slots__", []) - } - ) - - base_names = set(self._base_names) - - names = self._attr_names - if ( - self._weakref_slot - and "__weakref__" not in getattr(self._cls, "__slots__", ()) - and "__weakref__" not in names - and not weakref_inherited - ): - names += ("__weakref__",) - - cached_properties = { - name: cached_prop.func - for name, cached_prop in cd.items() - if isinstance(cached_prop, cached_property) - } - - # Collect methods with a `__class__` reference that are shadowed in the new class. - # To know to update them. - additional_closure_functions_to_update = [] - if cached_properties: - class_annotations = _get_annotations(self._cls) - for name, func in cached_properties.items(): - # Add cached properties to names for slotting. - names += (name,) - # Clear out function from class to avoid clashing. - del cd[name] - additional_closure_functions_to_update.append(func) - annotation = inspect.signature(func).return_annotation - if annotation is not inspect.Parameter.empty: - class_annotations[name] = annotation - - original_getattr = cd.get("__getattr__") - if original_getattr is not None: - additional_closure_functions_to_update.append(original_getattr) - - cd["__getattr__"] = _make_cached_property_getattr( - cached_properties, original_getattr, self._cls - ) - - # We only add the names of attributes that aren't inherited. - # Setting __slots__ to inherited attributes wastes memory. - slot_names = [name for name in names if name not in base_names] - - # There are slots for attributes from current class - # that are defined in parent classes. - # As their descriptors may be overridden by a child class, - # we collect them here and update the class dict - reused_slots = { - slot: slot_descriptor - for slot, slot_descriptor in existing_slots.items() - if slot in slot_names - } - slot_names = [name for name in slot_names if name not in reused_slots] - cd.update(reused_slots) - if self._cache_hash: - slot_names.append(_HASH_CACHE_FIELD) - - cd["__slots__"] = tuple(slot_names) - - cd["__qualname__"] = self._cls.__qualname__ - - # Create new class based on old class and our methods. - cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) - - # The following is a fix for - # . - # If a method mentions `__class__` or uses the no-arg super(), the - # compiler will bake a reference to the class in the method itself - # as `method.__closure__`. Since we replace the class with a - # clone, we rewrite these references so it keeps working. - for item in itertools.chain( - cls.__dict__.values(), additional_closure_functions_to_update - ): - if isinstance(item, (classmethod, staticmethod)): - # Class- and staticmethods hide their functions inside. - # These might need to be rewritten as well. - closure_cells = getattr(item.__func__, "__closure__", None) - elif isinstance(item, property): - # Workaround for property `super()` shortcut (PY3-only). - # There is no universal way for other descriptors. - closure_cells = getattr(item.fget, "__closure__", None) - else: - closure_cells = getattr(item, "__closure__", None) - - if not closure_cells: # Catch None or the empty list. - continue - for cell in closure_cells: - try: - match = cell.cell_contents is self._cls - except ValueError: # noqa: PERF203 - # ValueError: Cell is empty - pass - else: - if match: - cell.cell_contents = cls - return cls - - def add_repr(self, ns): - script, globs = _make_repr_script(self._attrs, ns) - - def _attach_repr(cls_dict, globs): - cls_dict["__repr__"] = self._add_method_dunders(globs["__repr__"]) - - self._script_snippets.append((script, globs, _attach_repr)) - self._repr_added = True - return self - - def add_str(self): - if not self._repr_added: - msg = "__str__ can only be generated if a __repr__ exists." - raise ValueError(msg) - - def __str__(self): - return self.__repr__() - - self._cls_dict["__str__"] = self._add_method_dunders(__str__) - return self - - def _make_getstate_setstate(self): - """ - Create custom __setstate__ and __getstate__ methods. - """ - # __weakref__ is not writable. - state_attr_names = tuple( - an for an in self._attr_names if an != "__weakref__" - ) - - def slots_getstate(self): - """ - Automatically created by attrs. - """ - return {name: getattr(self, name) for name in state_attr_names} - - hash_caching_enabled = self._cache_hash - - def slots_setstate(self, state): - """ - Automatically created by attrs. - """ - __bound_setattr = _OBJ_SETATTR.__get__(self) - if isinstance(state, tuple): - # Backward compatibility with attrs instances pickled with - # attrs versions before v22.2.0 which stored tuples. - for name, value in zip(state_attr_names, state): - __bound_setattr(name, value) - else: - for name in state_attr_names: - if name in state: - __bound_setattr(name, state[name]) - - # The hash code cache is not included when the object is - # serialized, but it still needs to be initialized to None to - # indicate that the first call to __hash__ should be a cache - # miss. - if hash_caching_enabled: - __bound_setattr(_HASH_CACHE_FIELD, None) - - return slots_getstate, slots_setstate - - def make_unhashable(self): - self._cls_dict["__hash__"] = None - return self - - def add_hash(self): - script, globs = _make_hash_script( - self._cls, - self._attrs, - frozen=self._frozen, - cache_hash=self._cache_hash, - ) - - def attach_hash(cls_dict: dict, locs: dict) -> None: - cls_dict["__hash__"] = self._add_method_dunders(locs["__hash__"]) - - self._script_snippets.append((script, globs, attach_hash)) - - return self - - def add_init(self): - script, globs, annotations = _make_init_script( - self._cls, - self._attrs, - self._has_pre_init, - self._pre_init_has_args, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - self._on_setattr, - attrs_init=False, - ) - - def _attach_init(cls_dict, globs): - init = globs["__init__"] - init.__annotations__ = annotations - cls_dict["__init__"] = self._add_method_dunders(init) - - self._script_snippets.append((script, globs, _attach_init)) - - return self - - def add_replace(self): - self._cls_dict["__replace__"] = self._add_method_dunders( - lambda self, **changes: evolve(self, **changes) - ) - return self - - def add_match_args(self): - self._cls_dict["__match_args__"] = tuple( - field.name - for field in self._attrs - if field.init and not field.kw_only - ) - - def add_attrs_init(self): - script, globs, annotations = _make_init_script( - self._cls, - self._attrs, - self._has_pre_init, - self._pre_init_has_args, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - self._on_setattr, - attrs_init=True, - ) - - def _attach_attrs_init(cls_dict, globs): - init = globs["__attrs_init__"] - init.__annotations__ = annotations - cls_dict["__attrs_init__"] = self._add_method_dunders(init) - - self._script_snippets.append((script, globs, _attach_attrs_init)) - - return self - - def add_eq(self): - cd = self._cls_dict - - script, globs = _make_eq_script(self._attrs) - - def _attach_eq(cls_dict, globs): - cls_dict["__eq__"] = self._add_method_dunders(globs["__eq__"]) - - self._script_snippets.append((script, globs, _attach_eq)) - - cd["__ne__"] = __ne__ - - return self - - def add_order(self): - cd = self._cls_dict - - cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( - self._add_method_dunders(meth) - for meth in _make_order(self._cls, self._attrs) - ) - - return self - - def add_setattr(self): - sa_attrs = {} - for a in self._attrs: - on_setattr = a.on_setattr or self._on_setattr - if on_setattr and on_setattr is not setters.NO_OP: - sa_attrs[a.name] = a, on_setattr - - if not sa_attrs: - return self - - if self._has_custom_setattr: - # We need to write a __setattr__ but there already is one! - msg = "Can't combine custom __setattr__ with on_setattr hooks." - raise ValueError(msg) - - # docstring comes from _add_method_dunders - def __setattr__(self, name, val): - try: - a, hook = sa_attrs[name] - except KeyError: - nval = val - else: - nval = hook(self, a, val) - - _OBJ_SETATTR(self, name, nval) - - self._cls_dict["__attrs_own_setattr__"] = True - self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) - self._wrote_own_setattr = True - - return self - - def _add_method_dunders_unsafe(self, method: Callable) -> Callable: - """ - Add __module__ and __qualname__ to a *method*. - """ - method.__module__ = self._cls.__module__ - - method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" - - method.__doc__ = ( - f"Method generated by attrs for class {self._cls.__qualname__}." - ) - - return method - - def _add_method_dunders_safe(self, method: Callable) -> Callable: - """ - Add __module__ and __qualname__ to a *method* if possible. - """ - with contextlib.suppress(AttributeError): - method.__module__ = self._cls.__module__ - - with contextlib.suppress(AttributeError): - method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" - - with contextlib.suppress(AttributeError): - method.__doc__ = f"Method generated by attrs for class {self._cls.__qualname__}." - - return method - - -def _determine_attrs_eq_order(cmp, eq, order, default_eq): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. If *eq* is None, set it to *default_eq*. - """ - if cmp is not None and any((eq is not None, order is not None)): - msg = "Don't mix `cmp` with `eq' and `order`." - raise ValueError(msg) - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - return cmp, cmp - - # If left None, equality is set to the specified default and ordering - # mirrors equality. - if eq is None: - eq = default_eq - - if order is None: - order = eq - - if eq is False and order is True: - msg = "`order` can only be True if `eq` is True too." - raise ValueError(msg) - - return eq, order - - -def _determine_attrib_eq_order(cmp, eq, order, default_eq): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. If *eq* is None, set it to *default_eq*. - """ - if cmp is not None and any((eq is not None, order is not None)): - msg = "Don't mix `cmp` with `eq' and `order`." - raise ValueError(msg) - - def decide_callable_or_boolean(value): - """ - Decide whether a key function is used. - """ - if callable(value): - value, key = True, value - else: - key = None - return value, key - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - cmp, cmp_key = decide_callable_or_boolean(cmp) - return cmp, cmp_key, cmp, cmp_key - - # If left None, equality is set to the specified default and ordering - # mirrors equality. - if eq is None: - eq, eq_key = default_eq, None - else: - eq, eq_key = decide_callable_or_boolean(eq) - - if order is None: - order, order_key = eq, eq_key - else: - order, order_key = decide_callable_or_boolean(order) - - if eq is False and order is True: - msg = "`order` can only be True if `eq` is True too." - raise ValueError(msg) - - return eq, eq_key, order, order_key - - -def _determine_whether_to_implement( - cls, flag, auto_detect, dunders, default=True -): - """ - Check whether we should implement a set of methods for *cls*. - - *flag* is the argument passed into @attr.s like 'init', *auto_detect* the - same as passed into @attr.s and *dunders* is a tuple of attribute names - whose presence signal that the user has implemented it themselves. - - Return *default* if no reason for either for or against is found. - """ - if flag is True or flag is False: - return flag - - if flag is None and auto_detect is False: - return default - - # Logically, flag is None and auto_detect is True here. - for dunder in dunders: - if _has_own_attribute(cls, dunder): - return False - - return default - - -def attrs( - maybe_cls=None, - these=None, - repr_ns=None, - repr=None, - cmp=None, - hash=None, - init=None, - slots=False, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=False, - kw_only=False, - cache_hash=False, - auto_exc=False, - eq=None, - order=None, - auto_detect=False, - collect_by_mro=False, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, - match_args=True, - unsafe_hash=None, - force_kw_only=True, -): - r""" - A class decorator that adds :term:`dunder methods` according to the - specified attributes using `attr.ib` or the *these* argument. - - Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will - *never* go away, though). - - Args: - repr_ns (str): - When using nested classes, there was no way in Python 2 to - automatically detect that. This argument allows to set a custom - name for a more meaningful ``repr`` output. This argument is - pointless in Python 3 and is therefore deprecated. - - .. caution:: - Refer to `attrs.define` for the rest of the parameters, but note that they - can have different defaults. - - Notably, leaving *on_setattr* as `None` will **not** add any hooks. - - .. versionadded:: 16.0.0 *slots* - .. versionadded:: 16.1.0 *frozen* - .. versionadded:: 16.3.0 *str* - .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. - .. versionchanged:: 17.1.0 - *hash* supports `None` as value which is also the default now. - .. versionadded:: 17.3.0 *auto_attribs* - .. versionchanged:: 18.1.0 - If *these* is passed, no attributes are deleted from the class body. - .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. - .. versionadded:: 18.2.0 *weakref_slot* - .. deprecated:: 18.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a - `DeprecationWarning` if the classes compared are subclasses of - each other. ``__eq`` and ``__ne__`` never tried to compared subclasses - to each other. - .. versionchanged:: 19.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider - subclasses comparable anymore. - .. versionadded:: 18.2.0 *kw_only* - .. versionadded:: 18.2.0 *cache_hash* - .. versionadded:: 19.1.0 *auto_exc* - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *auto_detect* - .. versionadded:: 20.1.0 *collect_by_mro* - .. versionadded:: 20.1.0 *getstate_setstate* - .. versionadded:: 20.1.0 *on_setattr* - .. versionadded:: 20.3.0 *field_transformer* - .. versionchanged:: 21.1.0 - ``init=False`` injects ``__attrs_init__`` - .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` - .. versionchanged:: 21.1.0 *cmp* undeprecated - .. versionadded:: 21.3.0 *match_args* - .. versionadded:: 22.2.0 - *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). - .. deprecated:: 24.1.0 *repr_ns* - .. versionchanged:: 24.1.0 - Instances are not compared as tuples of attributes anymore, but using a - big ``and`` condition. This is faster and has more correct behavior for - uncomparable values like `math.nan`. - .. versionadded:: 24.1.0 - If a class has an *inherited* classmethod called - ``__attrs_init_subclass__``, it is executed after the class is created. - .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. - .. versionchanged:: 25.4.0 - *kw_only* now only applies to attributes defined in the current class, - and respects attribute-level ``kw_only=False`` settings. - .. versionadded:: 25.4.0 *force_kw_only* - """ - if repr_ns is not None: - import warnings - - warnings.warn( - DeprecationWarning( - "The `repr_ns` argument is deprecated and will be removed in or after August 2025." - ), - stacklevel=2, - ) - - eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) - - # unsafe_hash takes precedence due to PEP 681. - if unsafe_hash is not None: - hash = unsafe_hash - - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - def wrap(cls): - nonlocal hash - is_frozen = frozen or _has_frozen_base_class(cls) - is_exc = auto_exc is True and issubclass(cls, BaseException) - has_own_setattr = auto_detect and _has_own_attribute( - cls, "__setattr__" - ) - - if has_own_setattr and is_frozen: - msg = "Can't freeze a class with a custom __setattr__." - raise ValueError(msg) - - eq = not is_exc and _determine_whether_to_implement( - cls, eq_, auto_detect, ("__eq__", "__ne__") - ) - - Hashability = ClassProps.Hashability - - if is_exc: - hashability = Hashability.LEAVE_ALONE - elif hash is True: - hashability = ( - Hashability.HASHABLE_CACHED - if cache_hash - else Hashability.HASHABLE - ) - elif hash is False: - hashability = Hashability.LEAVE_ALONE - elif hash is None: - if auto_detect is True and _has_own_attribute(cls, "__hash__"): - hashability = Hashability.LEAVE_ALONE - elif eq is True and is_frozen is True: - hashability = ( - Hashability.HASHABLE_CACHED - if cache_hash - else Hashability.HASHABLE - ) - elif eq is False: - hashability = Hashability.LEAVE_ALONE - else: - hashability = Hashability.UNHASHABLE - else: - msg = "Invalid value for hash. Must be True, False, or None." - raise TypeError(msg) - - KeywordOnly = ClassProps.KeywordOnly - if kw_only: - kwo = KeywordOnly.FORCE if force_kw_only else KeywordOnly.YES - else: - kwo = KeywordOnly.NO - - props = ClassProps( - is_exception=is_exc, - is_frozen=is_frozen, - is_slotted=slots, - collected_fields_by_mro=collect_by_mro, - added_init=_determine_whether_to_implement( - cls, init, auto_detect, ("__init__",) - ), - added_repr=_determine_whether_to_implement( - cls, repr, auto_detect, ("__repr__",) - ), - added_eq=eq, - added_ordering=not is_exc - and _determine_whether_to_implement( - cls, - order_, - auto_detect, - ("__lt__", "__le__", "__gt__", "__ge__"), - ), - hashability=hashability, - added_match_args=match_args, - kw_only=kwo, - has_weakref_slot=weakref_slot, - added_str=str, - added_pickling=_determine_whether_to_implement( - cls, - getstate_setstate, - auto_detect, - ("__getstate__", "__setstate__"), - default=slots, - ), - on_setattr_hook=on_setattr, - field_transformer=field_transformer, - ) - - if not props.is_hashable and cache_hash: - msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." - raise TypeError(msg) - - builder = _ClassBuilder( - cls, - these, - auto_attribs=auto_attribs, - props=props, - has_custom_setattr=has_own_setattr, - ) - - if props.added_repr: - builder.add_repr(repr_ns) - - if props.added_str: - builder.add_str() - - if props.added_eq: - builder.add_eq() - if props.added_ordering: - builder.add_order() - - if not frozen: - builder.add_setattr() - - if props.is_hashable: - builder.add_hash() - elif props.hashability is Hashability.UNHASHABLE: - builder.make_unhashable() - - if props.added_init: - builder.add_init() - else: - builder.add_attrs_init() - if cache_hash: - msg = "Invalid value for cache_hash. To use hash caching, init must be True." - raise TypeError(msg) - - if PY_3_13_PLUS and not _has_own_attribute(cls, "__replace__"): - builder.add_replace() - - if ( - PY_3_10_PLUS - and match_args - and not _has_own_attribute(cls, "__match_args__") - ): - builder.add_match_args() - - return builder.build_class() - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but `None` if used as `@attrs()`. - if maybe_cls is None: - return wrap - - return wrap(maybe_cls) - - -_attrs = attrs -""" -Internal alias so we can use it in functions that take an argument called -*attrs*. -""" - - -def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return cls.__setattr__ is _frozen_setattrs - - -def _generate_unique_filename(cls: type, func_name: str) -> str: - """ - Create a "filename" suitable for a function being generated. - """ - return ( - f"" - ) - - -def _make_hash_script( - cls: type, attrs: list[Attribute], frozen: bool, cache_hash: bool -) -> tuple[str, dict]: - attrs = tuple( - a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) - ) - - tab = " " - - type_hash = hash(_generate_unique_filename(cls, "hash")) - # If eq is custom generated, we need to include the functions in globs - globs = {} - - hash_def = "def __hash__(self" - hash_func = "hash((" - closing_braces = "))" - if not cache_hash: - hash_def += "):" - else: - hash_def += ", *" - - hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" - hash_func = "_cache_wrapper(" + hash_func - closing_braces += ")" - - method_lines = [hash_def] - - def append_hash_computation_lines(prefix, indent): - """ - Generate the code for actually computing the hash code. - Below this will either be returned directly or used to compute - a value which is then cached, depending on the value of cache_hash - """ - - method_lines.extend( - [ - indent + prefix + hash_func, - indent + f" {type_hash},", - ] - ) - - for a in attrs: - if a.eq_key: - cmp_name = f"_{a.name}_key" - globs[cmp_name] = a.eq_key - method_lines.append( - indent + f" {cmp_name}(self.{a.name})," - ) - else: - method_lines.append(indent + f" self.{a.name},") - - method_lines.append(indent + " " + closing_braces) - - if cache_hash: - method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:") - if frozen: - append_hash_computation_lines( - f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2 - ) - method_lines.append(tab * 2 + ")") # close __setattr__ - else: - append_hash_computation_lines( - f"self.{_HASH_CACHE_FIELD} = ", tab * 2 - ) - method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}") - else: - append_hash_computation_lines("return ", tab) - - script = "\n".join(method_lines) - return script, globs - - -def _add_hash(cls: type, attrs: list[Attribute]): - """ - Add a hash method to *cls*. - """ - script, globs = _make_hash_script( - cls, attrs, frozen=False, cache_hash=False - ) - _compile_and_eval( - script, globs, filename=_generate_unique_filename(cls, "__hash__") - ) - cls.__hash__ = globs["__hash__"] - return cls - - -def __ne__(self, other): - """ - Check equality and either forward a NotImplemented or - return the result negated. - """ - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - - return not result - - -def _make_eq_script(attrs: list) -> tuple[str, dict]: - """ - Create __eq__ method for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.eq] - - lines = [ - "def __eq__(self, other):", - " if other.__class__ is not self.__class__:", - " return NotImplemented", - ] - - globs = {} - if attrs: - lines.append(" return (") - for a in attrs: - if a.eq_key: - cmp_name = f"_{a.name}_key" - # Add the key function to the global namespace - # of the evaluated function. - globs[cmp_name] = a.eq_key - lines.append( - f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})" - ) - else: - lines.append(f" self.{a.name} == other.{a.name}") - if a is not attrs[-1]: - lines[-1] = f"{lines[-1]} and" - lines.append(" )") - else: - lines.append(" return True") - - script = "\n".join(lines) - - return script, globs - - -def _make_order(cls, attrs): - """ - Create ordering methods for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.order] - - def attrs_to_tuple(obj): - """ - Save us some typing. - """ - return tuple( - key(value) if key else value - for value, key in ( - (getattr(obj, a.name), a.order_key) for a in attrs - ) - ) - - def __lt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) < attrs_to_tuple(other) - - return NotImplemented - - def __le__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) <= attrs_to_tuple(other) - - return NotImplemented - - def __gt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) > attrs_to_tuple(other) - - return NotImplemented - - def __ge__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) >= attrs_to_tuple(other) - - return NotImplemented - - return __lt__, __le__, __gt__, __ge__ - - -def _add_eq(cls, attrs=None): - """ - Add equality methods to *cls* with *attrs*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - script, globs = _make_eq_script(attrs) - _compile_and_eval( - script, globs, filename=_generate_unique_filename(cls, "__eq__") - ) - cls.__eq__ = globs["__eq__"] - cls.__ne__ = __ne__ - - return cls - - -def _make_repr_script(attrs, ns) -> tuple[str, dict]: - """ - Create the source and globs for a __repr__ and return it. - """ - # Figure out which attributes to include, and which function to use to - # format them. The a.repr value can be either bool or a custom - # callable. - attr_names_with_reprs = tuple( - (a.name, (repr if a.repr is True else a.repr), a.init) - for a in attrs - if a.repr is not False - ) - globs = { - name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr - } - globs["_compat"] = _compat - globs["AttributeError"] = AttributeError - globs["NOTHING"] = NOTHING - attribute_fragments = [] - for name, r, i in attr_names_with_reprs: - accessor = ( - "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' - ) - fragment = ( - "%s={%s!r}" % (name, accessor) - if r == repr - else "%s={%s_repr(%s)}" % (name, name, accessor) - ) - attribute_fragments.append(fragment) - repr_fragment = ", ".join(attribute_fragments) - - if ns is None: - cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' - else: - cls_name_fragment = ns + ".{self.__class__.__name__}" - - lines = [ - "def __repr__(self):", - " try:", - " already_repring = _compat.repr_context.already_repring", - " except AttributeError:", - " already_repring = {id(self),}", - " _compat.repr_context.already_repring = already_repring", - " else:", - " if id(self) in already_repring:", - " return '...'", - " else:", - " already_repring.add(id(self))", - " try:", - f" return f'{cls_name_fragment}({repr_fragment})'", - " finally:", - " already_repring.remove(id(self))", - ] - - return "\n".join(lines), globs - - -def _add_repr(cls, ns=None, attrs=None): - """ - Add a repr method to *cls*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - script, globs = _make_repr_script(attrs, ns) - _compile_and_eval( - script, globs, filename=_generate_unique_filename(cls, "__repr__") - ) - cls.__repr__ = globs["__repr__"] - return cls - - -def fields(cls): - """ - Return the tuple of *attrs* attributes for a class. - - The tuple also allows accessing the fields by their names (see below for - examples). - - Args: - cls (type): Class to introspect. - - Raises: - TypeError: If *cls* is not a class. - - attrs.exceptions.NotAnAttrsClassError: - If *cls* is not an *attrs* class. - - Returns: - tuple (with name accessors) of `attrs.Attribute` - - .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields - by name. - .. versionchanged:: 23.1.0 Add support for generic classes. - """ - generic_base = get_generic_base(cls) - - if generic_base is None and not isinstance(cls, type): - msg = "Passed object must be a class." - raise TypeError(msg) - - attrs = getattr(cls, "__attrs_attrs__", None) - - if attrs is None: - if generic_base is not None: - attrs = getattr(generic_base, "__attrs_attrs__", None) - if attrs is not None: - # Even though this is global state, stick it on here to speed - # it up. We rely on `cls` being cached for this to be - # efficient. - cls.__attrs_attrs__ = attrs - return attrs - msg = f"{cls!r} is not an attrs-decorated class." - raise NotAnAttrsClassError(msg) - - return attrs - - -def fields_dict(cls): - """ - Return an ordered dictionary of *attrs* attributes for a class, whose keys - are the attribute names. - - Args: - cls (type): Class to introspect. - - Raises: - TypeError: If *cls* is not a class. - - attrs.exceptions.NotAnAttrsClassError: - If *cls* is not an *attrs* class. - - Returns: - dict[str, attrs.Attribute]: Dict of attribute name to definition - - .. versionadded:: 18.1.0 - """ - if not isinstance(cls, type): - msg = "Passed object must be a class." - raise TypeError(msg) - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - msg = f"{cls!r} is not an attrs-decorated class." - raise NotAnAttrsClassError(msg) - return {a.name: a for a in attrs} - - -def validate(inst): - """ - Validate all attributes on *inst* that have a validator. - - Leaves all exceptions through. - - Args: - inst: Instance of a class with *attrs* attributes. - """ - if _config._run_validators is False: - return - - for a in fields(inst.__class__): - v = a.validator - if v is not None: - v(inst, a, getattr(inst, a.name)) - - -def _is_slot_attr(a_name, base_attr_map): - """ - Check if the attribute name comes from a slot class. - """ - cls = base_attr_map.get(a_name) - return cls and "__slots__" in cls.__dict__ - - -def _make_init_script( - cls, - attrs, - pre_init, - pre_init_has_args, - post_init, - frozen, - slots, - cache_hash, - base_attr_map, - is_exc, - cls_on_setattr, - attrs_init, -) -> tuple[str, dict, dict]: - has_cls_on_setattr = ( - cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP - ) - - if frozen and has_cls_on_setattr: - msg = "Frozen classes can't use on_setattr." - raise ValueError(msg) - - needs_cached_setattr = cache_hash or frozen - filtered_attrs = [] - attr_dict = {} - for a in attrs: - if not a.init and a.default is NOTHING: - continue - - filtered_attrs.append(a) - attr_dict[a.name] = a - - if a.on_setattr is not None: - if frozen is True: - msg = "Frozen classes can't use on_setattr." - raise ValueError(msg) - - needs_cached_setattr = True - elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: - needs_cached_setattr = True - - script, globs, annotations = _attrs_to_init_script( - filtered_attrs, - frozen, - slots, - pre_init, - pre_init_has_args, - post_init, - cache_hash, - base_attr_map, - is_exc, - needs_cached_setattr, - has_cls_on_setattr, - "__attrs_init__" if attrs_init else "__init__", - ) - if cls.__module__ in sys.modules: - # This makes typing.get_type_hints(CLS.__init__) resolve string types. - globs.update(sys.modules[cls.__module__].__dict__) - - globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) - - if needs_cached_setattr: - # Save the lookup overhead in __init__ if we need to circumvent - # setattr hooks. - globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__ - - return script, globs, annotations - - -def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str: - """ - Use the cached object.setattr to set *attr_name* to *value_var*. - """ - return f"_setattr('{attr_name}', {value_var})" - - -def _setattr_with_converter( - attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter -) -> str: - """ - Use the cached object.setattr to set *attr_name* to *value_var*, but run - its converter first. - """ - return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})" - - -def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str: - """ - Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise - relegate to _setattr. - """ - if has_on_setattr: - return _setattr(attr_name, value, True) - - return f"self.{attr_name} = {value}" - - -def _assign_with_converter( - attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter -) -> str: - """ - Unless *attr_name* has an on_setattr hook, use normal assignment after - conversion. Otherwise relegate to _setattr_with_converter. - """ - if has_on_setattr: - return _setattr_with_converter(attr_name, value_var, True, converter) - - return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}" - - -def _determine_setters( - frozen: bool, slots: bool, base_attr_map: dict[str, type] -): - """ - Determine the correct setter functions based on whether a class is frozen - and/or slotted. - """ - if frozen is True: - if slots is True: - return (), _setattr, _setattr_with_converter - - # Dict frozen classes assign directly to __dict__. - # But only if the attribute doesn't come from an ancestor slot - # class. - # Note _inst_dict will be used again below if cache_hash is True - - def fmt_setter( - attr_name: str, value_var: str, has_on_setattr: bool - ) -> str: - if _is_slot_attr(attr_name, base_attr_map): - return _setattr(attr_name, value_var, has_on_setattr) - - return f"_inst_dict['{attr_name}'] = {value_var}" - - def fmt_setter_with_converter( - attr_name: str, - value_var: str, - has_on_setattr: bool, - converter: Converter, - ) -> str: - if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): - return _setattr_with_converter( - attr_name, value_var, has_on_setattr, converter - ) - - return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}" - - return ( - ("_inst_dict = self.__dict__",), - fmt_setter, - fmt_setter_with_converter, - ) - - # Not frozen -- we can just assign directly. - return (), _assign, _assign_with_converter - - -def _attrs_to_init_script( - attrs: list[Attribute], - is_frozen: bool, - is_slotted: bool, - call_pre_init: bool, - pre_init_has_args: bool, - call_post_init: bool, - does_cache_hash: bool, - base_attr_map: dict[str, type], - is_exc: bool, - needs_cached_setattr: bool, - has_cls_on_setattr: bool, - method_name: str, -) -> tuple[str, dict, dict]: - """ - Return a script of an initializer for *attrs*, a dict of globals, and - annotations for the initializer. - - The globals are required by the generated script. - """ - lines = ["self.__attrs_pre_init__()"] if call_pre_init else [] - - if needs_cached_setattr: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup per - # assignment. Note _setattr will be used again below if - # does_cache_hash is True. - "_setattr = _cached_setattr_get(self)" - ) - - extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters( - is_frozen, is_slotted, base_attr_map - ) - lines.extend(extra_lines) - - args = [] # Parameters in the definition of __init__ - pre_init_args = [] # Parameters in the call to __attrs_pre_init__ - kw_only_args = [] # Used for both 'args' and 'pre_init_args' above - attrs_to_validate = [] - - # This is a dictionary of names to validator and converter callables. - # Injecting this into __init__ globals lets us avoid lookups. - names_for_globals = {} - annotations = {"return": None} - - for a in attrs: - if a.validator: - attrs_to_validate.append(a) - - attr_name = a.name - has_on_setattr = a.on_setattr is not None or ( - a.on_setattr is not setters.NO_OP and has_cls_on_setattr - ) - # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not - # explicitly provided - arg_name = a.alias - - has_factory = isinstance(a.default, Factory) - maybe_self = "self" if has_factory and a.default.takes_self else "" - - if a.converter is not None and not isinstance(a.converter, Converter): - converter = Converter(a.converter) - else: - converter = a.converter - - if a.init is False: - if has_factory: - init_factory_name = _INIT_FACTORY_PAT % (a.name,) - if converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - init_factory_name + f"({maybe_self})", - has_on_setattr, - converter, - ) - ) - names_for_globals[converter._get_global_name(a.name)] = ( - converter.converter - ) - else: - lines.append( - fmt_setter( - attr_name, - init_factory_name + f"({maybe_self})", - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - elif converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - f"attr_dict['{attr_name}'].default", - has_on_setattr, - converter, - ) - ) - names_for_globals[converter._get_global_name(a.name)] = ( - converter.converter - ) - else: - lines.append( - fmt_setter( - attr_name, - f"attr_dict['{attr_name}'].default", - has_on_setattr, - ) - ) - elif a.default is not NOTHING and not has_factory: - arg = f"{arg_name}=attr_dict['{attr_name}'].default" - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - pre_init_args.append(arg_name) - - if converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr, converter - ) - ) - names_for_globals[converter._get_global_name(a.name)] = ( - converter.converter - ) - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - elif has_factory: - arg = f"{arg_name}=NOTHING" - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - pre_init_args.append(arg_name) - lines.append(f"if {arg_name} is not NOTHING:") - - init_factory_name = _INIT_FACTORY_PAT % (a.name,) - if converter is not None: - lines.append( - " " - + fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr, converter - ) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter_with_converter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - converter, - ) - ) - names_for_globals[converter._get_global_name(a.name)] = ( - converter.converter - ) - else: - lines.append( - " " + fmt_setter(attr_name, arg_name, has_on_setattr) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.kw_only: - kw_only_args.append(arg_name) - else: - args.append(arg_name) - pre_init_args.append(arg_name) - - if converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr, converter - ) - ) - names_for_globals[converter._get_global_name(a.name)] = ( - converter.converter - ) - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - if a.init is True: - if a.type is not None and converter is None: - annotations[arg_name] = a.type - elif converter is not None and converter._first_param_type: - # Use the type from the converter if present. - annotations[arg_name] = converter._first_param_type - - if attrs_to_validate: # we can skip this if there are no validators. - names_for_globals["_config"] = _config - lines.append("if _config._run_validators is True:") - for a in attrs_to_validate: - val_name = "__attr_validator_" + a.name - attr_name = "__attr_" + a.name - lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") - names_for_globals[val_name] = a.validator - names_for_globals[attr_name] = a - - if call_post_init: - lines.append("self.__attrs_post_init__()") - - # Because this is set only after __attrs_post_init__ is called, a crash - # will result if post-init tries to access the hash code. This seemed - # preferable to setting this beforehand, in which case alteration to field - # values during post-init combined with post-init accessing the hash code - # would result in silent bugs. - if does_cache_hash: - if is_frozen: - if is_slotted: - init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)" - else: - init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None" - else: - init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None" - lines.append(init_hash_cache) - - # For exceptions we rely on BaseException.__init__ for proper - # initialization. - if is_exc: - vals = ",".join(f"self.{a.name}" for a in attrs if a.init) - - lines.append(f"BaseException.__init__(self, {vals})") - - args = ", ".join(args) - pre_init_args = ", ".join(pre_init_args) - if kw_only_args: - # leading comma & kw_only args - args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}" - pre_init_kw_only_args = ", ".join( - [ - f"{kw_arg_name}={kw_arg_name}" - # We need to remove the defaults from the kw_only_args. - for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args) - ] - ) - pre_init_args += ", " if pre_init_args else "" - pre_init_args += pre_init_kw_only_args - - if call_pre_init and pre_init_has_args: - # If pre init method has arguments, pass the values given to __init__. - lines[0] = f"self.__attrs_pre_init__({pre_init_args})" - - # Python <3.12 doesn't allow backslashes in f-strings. - NL = "\n " - return ( - f"""def {method_name}(self, {args}): - {NL.join(lines) if lines else "pass"} -""", - names_for_globals, - annotations, - ) - - -def _default_init_alias_for(name: str) -> str: - """ - The default __init__ parameter name for a field. - - This performs private-name adjustment via leading-unscore stripping, - and is the default value of Attribute.alias if not provided. - """ - - return name.lstrip("_") - - -class Attribute: - """ - *Read-only* representation of an attribute. - - .. warning:: - - You should never instantiate this class yourself. - - The class has *all* arguments of `attr.ib` (except for ``factory`` which is - only syntactic sugar for ``default=Factory(...)`` plus the following: - - - ``name`` (`str`): The name of the attribute. - - ``alias`` (`str`): The __init__ parameter name of the attribute, after - any explicit overrides and default private-attribute-name handling. - - ``inherited`` (`bool`): Whether or not that attribute has been inherited - from a base class. - - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The - callables that are used for comparing and ordering objects by this - attribute, respectively. These are set by passing a callable to - `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also - :ref:`comparison customization `. - - Instances of this class are frequently used for introspection purposes - like: - - - `fields` returns a tuple of them. - - Validators get them passed as the first argument. - - The :ref:`field transformer ` hook receives a list of - them. - - The ``alias`` property exposes the __init__ parameter name of the field, - with any overrides and default private-attribute handling applied. - - - .. versionadded:: 20.1.0 *inherited* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.2.0 *inherited* is not taken into account for - equality checks and hashing anymore. - .. versionadded:: 21.1.0 *eq_key* and *order_key* - .. versionadded:: 22.2.0 *alias* - - For the full version history of the fields, see `attr.ib`. - """ - - # These slots must NOT be reordered because we use them later for - # instantiation. - __slots__ = ( # noqa: RUF023 - "name", - "default", - "validator", - "repr", - "eq", - "eq_key", - "order", - "order_key", - "hash", - "init", - "metadata", - "type", - "converter", - "kw_only", - "inherited", - "on_setattr", - "alias", - ) - - def __init__( - self, - name, - default, - validator, - repr, - cmp, # XXX: unused, remove along with other cmp code. - hash, - init, - inherited, - metadata=None, - type=None, - converter=None, - kw_only=False, - eq=None, - eq_key=None, - order=None, - order_key=None, - on_setattr=None, - alias=None, - ): - eq, eq_key, order, order_key = _determine_attrib_eq_order( - cmp, eq_key or eq, order_key or order, True - ) - - # Cache this descriptor here to speed things up later. - bound_setattr = _OBJ_SETATTR.__get__(self) - - # Despite the big red warning, people *do* instantiate `Attribute` - # themselves. - bound_setattr("name", name) - bound_setattr("default", default) - bound_setattr("validator", validator) - bound_setattr("repr", repr) - bound_setattr("eq", eq) - bound_setattr("eq_key", eq_key) - bound_setattr("order", order) - bound_setattr("order_key", order_key) - bound_setattr("hash", hash) - bound_setattr("init", init) - bound_setattr("converter", converter) - bound_setattr( - "metadata", - ( - types.MappingProxyType(dict(metadata)) # Shallow copy - if metadata - else _EMPTY_METADATA_SINGLETON - ), - ) - bound_setattr("type", type) - bound_setattr("kw_only", kw_only) - bound_setattr("inherited", inherited) - bound_setattr("on_setattr", on_setattr) - bound_setattr("alias", alias) - - def __setattr__(self, name, value): - raise FrozenInstanceError - - @classmethod - def from_counting_attr( - cls, name: str, ca: _CountingAttr, kw_only: bool, type=None - ): - # The 'kw_only' argument is the class-level setting, and is used if the - # attribute itself does not explicitly set 'kw_only'. - # type holds the annotated value. deal with conflicts: - if type is None: - type = ca.type - elif ca.type is not None: - msg = f"Type annotation and type argument cannot both be present for '{name}'." - raise ValueError(msg) - return cls( - name, - ca._default, - ca._validator, - ca.repr, - None, - ca.hash, - ca.init, - False, - ca.metadata, - type, - ca.converter, - kw_only if ca.kw_only is None else ca.kw_only, - ca.eq, - ca.eq_key, - ca.order, - ca.order_key, - ca.on_setattr, - ca.alias, - ) - - # Don't use attrs.evolve since fields(Attribute) doesn't work - def evolve(self, **changes): - """ - Copy *self* and apply *changes*. - - This works similarly to `attrs.evolve` but that function does not work - with :class:`attrs.Attribute`. - - It is mainly meant to be used for `transform-fields`. - - .. versionadded:: 20.3.0 - """ - new = copy.copy(self) - - new._setattrs(changes.items()) - - return new - - # Don't use _add_pickle since fields(Attribute) doesn't work - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple( - getattr(self, name) if name != "metadata" else dict(self.metadata) - for name in self.__slots__ - ) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - self._setattrs(zip(self.__slots__, state)) - - def _setattrs(self, name_values_pairs): - bound_setattr = _OBJ_SETATTR.__get__(self) - for name, value in name_values_pairs: - if name != "metadata": - bound_setattr(name, value) - else: - bound_setattr( - name, - ( - types.MappingProxyType(dict(value)) - if value - else _EMPTY_METADATA_SINGLETON - ), - ) - - -_a = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=(name != "metadata"), - init=True, - inherited=False, - alias=_default_init_alias_for(name), - ) - for name in Attribute.__slots__ -] - -Attribute = _add_hash( - _add_eq( - _add_repr(Attribute, attrs=_a), - attrs=[a for a in _a if a.name != "inherited"], - ), - attrs=[a for a in _a if a.hash and a.name != "inherited"], -) - - -class _CountingAttr: - """ - Intermediate representation of attributes that uses a counter to preserve - the order in which the attributes have been defined. - - *Internal* data structure of the attrs library. Running into is most - likely the result of a bug like a forgotten `@attr.s` decorator. - """ - - __slots__ = ( - "_default", - "_validator", - "alias", - "converter", - "counter", - "eq", - "eq_key", - "hash", - "init", - "kw_only", - "metadata", - "on_setattr", - "order", - "order_key", - "repr", - "type", - ) - __attrs_attrs__ = ( - *tuple( - Attribute( - name=name, - alias=_default_init_alias_for(name), - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=True, - init=True, - kw_only=False, - eq=True, - eq_key=None, - order=False, - order_key=None, - inherited=False, - on_setattr=None, - ) - for name in ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - "on_setattr", - "alias", - ) - ), - Attribute( - name="metadata", - alias="metadata", - default=None, - validator=None, - repr=True, - cmp=None, - hash=False, - init=True, - kw_only=False, - eq=True, - eq_key=None, - order=False, - order_key=None, - inherited=False, - on_setattr=None, - ), - ) - cls_counter = 0 - - def __init__( - self, - default, - validator, - repr, - cmp, - hash, - init, - converter, - metadata, - type, - kw_only, - eq, - eq_key, - order, - order_key, - on_setattr, - alias, - ): - _CountingAttr.cls_counter += 1 - self.counter = _CountingAttr.cls_counter - self._default = default - self._validator = validator - self.converter = converter - self.repr = repr - self.eq = eq - self.eq_key = eq_key - self.order = order - self.order_key = order_key - self.hash = hash - self.init = init - self.metadata = metadata - self.type = type - self.kw_only = kw_only - self.on_setattr = on_setattr - self.alias = alias - - def validator(self, meth): - """ - Decorator that adds *meth* to the list of validators. - - Returns *meth* unchanged. - - .. versionadded:: 17.1.0 - """ - if self._validator is None: - self._validator = meth - else: - self._validator = and_(self._validator, meth) - return meth - - def default(self, meth): - """ - Decorator that allows to set the default for an attribute. - - Returns *meth* unchanged. - - Raises: - DefaultAlreadySetError: If default has been set before. - - .. versionadded:: 17.1.0 - """ - if self._default is not NOTHING: - raise DefaultAlreadySetError - - self._default = Factory(meth, takes_self=True) - - return meth - - -_CountingAttr = _add_eq(_add_repr(_CountingAttr)) - - -class ClassProps: - """ - Effective class properties as derived from parameters to `attr.s()` or - `define()` decorators. - - This is the same data structure that *attrs* uses internally to decide how - to construct the final class. - - Warning: - - This feature is currently **experimental** and is not covered by our - strict backwards-compatibility guarantees. - - - Attributes: - is_exception (bool): - Whether the class is treated as an exception class. - - is_slotted (bool): - Whether the class is `slotted `. - - has_weakref_slot (bool): - Whether the class has a slot for weak references. - - is_frozen (bool): - Whether the class is frozen. - - kw_only (KeywordOnly): - Whether / how the class enforces keyword-only arguments on the - ``__init__`` method. - - collected_fields_by_mro (bool): - Whether the class fields were collected by method resolution order. - That is, correctly but unlike `dataclasses`. - - added_init (bool): - Whether the class has an *attrs*-generated ``__init__`` method. - - added_repr (bool): - Whether the class has an *attrs*-generated ``__repr__`` method. - - added_eq (bool): - Whether the class has *attrs*-generated equality methods. - - added_ordering (bool): - Whether the class has *attrs*-generated ordering methods. - - hashability (Hashability): How `hashable ` the class is. - - added_match_args (bool): - Whether the class supports positional `match ` over its - fields. - - added_str (bool): - Whether the class has an *attrs*-generated ``__str__`` method. - - added_pickling (bool): - Whether the class has *attrs*-generated ``__getstate__`` and - ``__setstate__`` methods for `pickle`. - - on_setattr_hook (Callable[[Any, Attribute[Any], Any], Any] | None): - The class's ``__setattr__`` hook. - - field_transformer (Callable[[Attribute[Any]], Attribute[Any]] | None): - The class's `field transformers `. - - .. versionadded:: 25.4.0 - """ - - class Hashability(enum.Enum): - """ - The hashability of a class. - - .. versionadded:: 25.4.0 - """ - - HASHABLE = "hashable" - """Write a ``__hash__``.""" - HASHABLE_CACHED = "hashable_cache" - """Write a ``__hash__`` and cache the hash.""" - UNHASHABLE = "unhashable" - """Set ``__hash__`` to ``None``.""" - LEAVE_ALONE = "leave_alone" - """Don't touch ``__hash__``.""" - - class KeywordOnly(enum.Enum): - """ - How attributes should be treated regarding keyword-only parameters. - - .. versionadded:: 25.4.0 - """ - - NO = "no" - """Attributes are not keyword-only.""" - YES = "yes" - """Attributes in current class without kw_only=False are keyword-only.""" - FORCE = "force" - """All attributes are keyword-only.""" - - __slots__ = ( # noqa: RUF023 -- order matters for __init__ - "is_exception", - "is_slotted", - "has_weakref_slot", - "is_frozen", - "kw_only", - "collected_fields_by_mro", - "added_init", - "added_repr", - "added_eq", - "added_ordering", - "hashability", - "added_match_args", - "added_str", - "added_pickling", - "on_setattr_hook", - "field_transformer", - ) - - def __init__( - self, - is_exception, - is_slotted, - has_weakref_slot, - is_frozen, - kw_only, - collected_fields_by_mro, - added_init, - added_repr, - added_eq, - added_ordering, - hashability, - added_match_args, - added_str, - added_pickling, - on_setattr_hook, - field_transformer, - ): - self.is_exception = is_exception - self.is_slotted = is_slotted - self.has_weakref_slot = has_weakref_slot - self.is_frozen = is_frozen - self.kw_only = kw_only - self.collected_fields_by_mro = collected_fields_by_mro - self.added_init = added_init - self.added_repr = added_repr - self.added_eq = added_eq - self.added_ordering = added_ordering - self.hashability = hashability - self.added_match_args = added_match_args - self.added_str = added_str - self.added_pickling = added_pickling - self.on_setattr_hook = on_setattr_hook - self.field_transformer = field_transformer - - @property - def is_hashable(self): - return ( - self.hashability is ClassProps.Hashability.HASHABLE - or self.hashability is ClassProps.Hashability.HASHABLE_CACHED - ) - - -_cas = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=True, - init=True, - inherited=False, - alias=_default_init_alias_for(name), - ) - for name in ClassProps.__slots__ -] - -ClassProps = _add_eq(_add_repr(ClassProps, attrs=_cas), attrs=_cas) - - -class Factory: - """ - Stores a factory callable. - - If passed as the default value to `attrs.field`, the factory is used to - generate a new value. - - Args: - factory (typing.Callable): - A callable that takes either none or exactly one mandatory - positional argument depending on *takes_self*. - - takes_self (bool): - Pass the partially initialized instance that is being initialized - as a positional argument. - - .. versionadded:: 17.1.0 *takes_self* - """ - - __slots__ = ("factory", "takes_self") - - def __init__(self, factory, takes_self=False): - self.factory = factory - self.takes_self = takes_self - - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple(getattr(self, name) for name in self.__slots__) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - for name, value in zip(self.__slots__, state): - setattr(self, name, value) - - -_f = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=True, - init=True, - inherited=False, - ) - for name in Factory.__slots__ -] - -Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) - - -class Converter: - """ - Stores a converter callable. - - Allows for the wrapped converter to take additional arguments. The - arguments are passed in the order they are documented. - - Args: - converter (Callable): A callable that converts the passed value. - - takes_self (bool): - Pass the partially initialized instance that is being initialized - as a positional argument. (default: `False`) - - takes_field (bool): - Pass the field definition (an :class:`Attribute`) into the - converter as a positional argument. (default: `False`) - - .. versionadded:: 24.1.0 - """ - - __slots__ = ( - "__call__", - "_first_param_type", - "_global_name", - "converter", - "takes_field", - "takes_self", - ) - - def __init__(self, converter, *, takes_self=False, takes_field=False): - self.converter = converter - self.takes_self = takes_self - self.takes_field = takes_field - - ex = _AnnotationExtractor(converter) - self._first_param_type = ex.get_first_param_type() - - if not (self.takes_self or self.takes_field): - self.__call__ = lambda value, _, __: self.converter(value) - elif self.takes_self and not self.takes_field: - self.__call__ = lambda value, instance, __: self.converter( - value, instance - ) - elif not self.takes_self and self.takes_field: - self.__call__ = lambda value, __, field: self.converter( - value, field - ) - else: - self.__call__ = lambda value, instance, field: self.converter( - value, instance, field - ) - - rt = ex.get_return_type() - if rt is not None: - self.__call__.__annotations__["return"] = rt - - @staticmethod - def _get_global_name(attr_name: str) -> str: - """ - Return the name that a converter for an attribute name *attr_name* - would have. - """ - return f"__attr_converter_{attr_name}" - - def _fmt_converter_call(self, attr_name: str, value_var: str) -> str: - """ - Return a string that calls the converter for an attribute name - *attr_name* and the value in variable named *value_var* according to - `self.takes_self` and `self.takes_field`. - """ - if not (self.takes_self or self.takes_field): - return f"{self._get_global_name(attr_name)}({value_var})" - - if self.takes_self and self.takes_field: - return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])" - - if self.takes_self: - return f"{self._get_global_name(attr_name)}({value_var}, self)" - - return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])" - - def __getstate__(self): - """ - Return a dict containing only converter and takes_self -- the rest gets - computed when loading. - """ - return { - "converter": self.converter, - "takes_self": self.takes_self, - "takes_field": self.takes_field, - } - - def __setstate__(self, state): - """ - Load instance from state. - """ - self.__init__(**state) - - -_f = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=True, - init=True, - inherited=False, - ) - for name in ("converter", "takes_self", "takes_field") -] - -Converter = _add_hash( - _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f -) - - -def make_class( - name, attrs, bases=(object,), class_body=None, **attributes_arguments -): - r""" - A quick way to create a new class called *name* with *attrs*. - - .. note:: - - ``make_class()`` is a thin wrapper around `attr.s`, not `attrs.define` - which means that it doesn't come with some of the improved defaults. - - For example, if you want the same ``on_setattr`` behavior as in - `attrs.define`, you have to pass the hooks yourself: ``make_class(..., - on_setattr=setters.pipe(setters.convert, setters.validate)`` - - .. warning:: - - It is *your* duty to ensure that the class name and the attribute names - are valid identifiers. ``make_class()`` will *not* validate them for - you. - - Args: - name (str): The name for the new class. - - attrs (list | dict): - A list of names or a dictionary of mappings of names to `attr.ib`\ - s / `attrs.field`\ s. - - The order is deduced from the order of the names or attributes - inside *attrs*. Otherwise the order of the definition of the - attributes is used. - - bases (tuple[type, ...]): Classes that the new class will subclass. - - class_body (dict): - An optional dictionary of class attributes for the new class. - - attributes_arguments: Passed unmodified to `attr.s`. - - Returns: - type: A new class with *attrs*. - - .. versionadded:: 17.1.0 *bases* - .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. - .. versionchanged:: 23.2.0 *class_body* - .. versionchanged:: 25.2.0 Class names can now be unicode. - """ - # Class identifiers are converted into the normal form NFKC while parsing - name = unicodedata.normalize("NFKC", name) - - if isinstance(attrs, dict): - cls_dict = attrs - elif isinstance(attrs, (list, tuple)): - cls_dict = {a: attrib() for a in attrs} - else: - msg = "attrs argument must be a dict or a list." - raise TypeError(msg) - - pre_init = cls_dict.pop("__attrs_pre_init__", None) - post_init = cls_dict.pop("__attrs_post_init__", None) - user_init = cls_dict.pop("__init__", None) - - body = {} - if class_body is not None: - body.update(class_body) - if pre_init is not None: - body["__attrs_pre_init__"] = pre_init - if post_init is not None: - body["__attrs_post_init__"] = post_init - if user_init is not None: - body["__init__"] = user_init - - type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) - - # For pickling to work, the __module__ variable needs to be set to the - # frame where the class is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - with contextlib.suppress(AttributeError, ValueError): - type_.__module__ = sys._getframe(1).f_globals.get( - "__name__", "__main__" - ) - - # We do it here for proper warnings with meaningful stacklevel. - cmp = attributes_arguments.pop("cmp", None) - ( - attributes_arguments["eq"], - attributes_arguments["order"], - ) = _determine_attrs_eq_order( - cmp, - attributes_arguments.get("eq"), - attributes_arguments.get("order"), - True, - ) - - cls = _attrs(these=cls_dict, **attributes_arguments)(type_) - # Only add type annotations now or "_attrs()" will complain: - cls.__annotations__ = { - k: v.type for k, v in cls_dict.items() if v.type is not None - } - return cls - - -# These are required by within this module so we define them here and merely -# import into .validators / .converters. - - -@attrs(slots=True, unsafe_hash=True) -class _AndValidator: - """ - Compose many validators to a single one. - """ - - _validators = attrib() - - def __call__(self, inst, attr, value): - for v in self._validators: - v(inst, attr, value) - - -def and_(*validators): - """ - A validator that composes multiple validators into one. - - When called on a value, it runs all wrapped validators. - - Args: - validators (~collections.abc.Iterable[typing.Callable]): - Arbitrary number of validators. - - .. versionadded:: 17.1.0 - """ - vals = [] - for validator in validators: - vals.extend( - validator._validators - if isinstance(validator, _AndValidator) - else [validator] - ) - - return _AndValidator(tuple(vals)) - - -def pipe(*converters): - """ - A converter that composes multiple converters into one. - - When called on a value, it runs all wrapped converters, returning the - *last* value. - - Type annotations will be inferred from the wrapped converters', if they - have any. - - converters (~collections.abc.Iterable[typing.Callable]): - Arbitrary number of converters. - - .. versionadded:: 20.1.0 - """ - - return_instance = any(isinstance(c, Converter) for c in converters) - - if return_instance: - - def pipe_converter(val, inst, field): - for c in converters: - val = ( - c(val, inst, field) if isinstance(c, Converter) else c(val) - ) - - return val - - else: - - def pipe_converter(val): - for c in converters: - val = c(val) - - return val - - if not converters: - # If the converter list is empty, pipe_converter is the identity. - A = TypeVar("A") - pipe_converter.__annotations__.update({"val": A, "return": A}) - else: - # Get parameter type from first converter. - t = _AnnotationExtractor(converters[0]).get_first_param_type() - if t: - pipe_converter.__annotations__["val"] = t - - last = converters[-1] - if not PY_3_11_PLUS and isinstance(last, Converter): - last = last.__call__ - - # Get return type from last converter. - rt = _AnnotationExtractor(last).get_return_type() - if rt: - pipe_converter.__annotations__["return"] = rt - - if return_instance: - return Converter(pipe_converter, takes_self=True, takes_field=True) - return pipe_converter diff --git a/venv/Lib/site-packages/attr/_next_gen.py b/venv/Lib/site-packages/attr/_next_gen.py deleted file mode 100644 index 4ccd0da..0000000 --- a/venv/Lib/site-packages/attr/_next_gen.py +++ /dev/null @@ -1,674 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -These are keyword-only APIs that call `attr.s` and `attr.ib` with different -default values. -""" - -from functools import partial - -from . import setters -from ._funcs import asdict as _asdict -from ._funcs import astuple as _astuple -from ._make import ( - _DEFAULT_ON_SETATTR, - NOTHING, - _frozen_setattrs, - attrib, - attrs, -) -from .exceptions import NotAnAttrsClassError, UnannotatedAttributeError - - -def define( - maybe_cls=None, - *, - these=None, - repr=None, - unsafe_hash=None, - hash=None, - init=None, - slots=True, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=None, - kw_only=False, - cache_hash=False, - auto_exc=True, - eq=None, - order=False, - auto_detect=True, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, - match_args=True, - force_kw_only=False, -): - r""" - A class decorator that adds :term:`dunder methods` according to - :term:`fields ` specified using :doc:`type annotations `, - `field()` calls, or the *these* argument. - - Since *attrs* patches or replaces an existing class, you cannot use - `object.__init_subclass__` with *attrs* classes, because it runs too early. - As a replacement, you can define ``__attrs_init_subclass__`` on your class. - It will be called by *attrs* classes that subclass it after they're - created. See also :ref:`init-subclass`. - - Args: - slots (bool): - Create a :term:`slotted class ` that's more - memory-efficient. Slotted classes are generally superior to the - default dict classes, but have some gotchas you should know about, - so we encourage you to read the :term:`glossary entry `. - - auto_detect (bool): - Instead of setting the *init*, *repr*, *eq*, and *hash* arguments - explicitly, assume they are set to True **unless any** of the - involved methods for one of the arguments is implemented in the - *current* class (meaning, it is *not* inherited from some base - class). - - So, for example by implementing ``__eq__`` on a class yourself, - *attrs* will deduce ``eq=False`` and will create *neither* - ``__eq__`` *nor* ``__ne__`` (but Python classes come with a - sensible ``__ne__`` by default, so it *should* be enough to only - implement ``__eq__`` in most cases). - - Passing :data:`True` or :data:`False` to *init*, *repr*, *eq*, or *hash* - overrides whatever *auto_detect* would determine. - - auto_exc (bool): - If the class subclasses `BaseException` (which implicitly includes - any subclass of any exception), the following happens to behave - like a well-behaved Python exception class: - - - the values for *eq*, *order*, and *hash* are ignored and the - instances compare and hash by the instance's ids [#]_ , - - all attributes that are either passed into ``__init__`` or have a - default value are additionally available as a tuple in the - ``args`` attribute, - - the value of *str* is ignored leaving ``__str__`` to base - classes. - - .. [#] - Note that *attrs* will *not* remove existing implementations of - ``__hash__`` or the equality methods. It just won't add own - ones. - - on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): - A callable that is run whenever the user attempts to set an - attribute (either by assignment like ``i.x = 42`` or by using - `setattr` like ``setattr(i, "x", 42)``). It receives the same - arguments as validators: the instance, the attribute that is being - modified, and the new value. - - If no exception is raised, the attribute is set to the return value - of the callable. - - If a list of callables is passed, they're automatically wrapped in - an `attrs.setters.pipe`. - - If left None, the default behavior is to run converters and - validators whenever an attribute is set. - - init (bool): - Create a ``__init__`` method that initializes the *attrs* - attributes. Leading underscores are stripped for the argument name, - unless an alias is set on the attribute. - - .. seealso:: - `init` shows advanced ways to customize the generated - ``__init__`` method, including executing code before and after. - - repr(bool): - Create a ``__repr__`` method with a human readable representation - of *attrs* attributes. - - str (bool): - Create a ``__str__`` method that is identical to ``__repr__``. This - is usually not necessary except for `Exception`\ s. - - eq (bool | None): - If True or None (default), add ``__eq__`` and ``__ne__`` methods - that check two instances for equality. - - .. seealso:: - `comparison` describes how to customize the comparison behavior - going as far comparing NumPy arrays. - - order (bool | None): - If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` - methods that behave like *eq* above and allow instances to be - ordered. - - They compare the instances as if they were tuples of their *attrs* - attributes if and only if the types of both classes are - *identical*. - - If `None` mirror value of *eq*. - - .. seealso:: `comparison` - - unsafe_hash (bool | None): - If None (default), the ``__hash__`` method is generated according - how *eq* and *frozen* are set. - - 1. If *both* are True, *attrs* will generate a ``__hash__`` for - you. - 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set - to None, marking it unhashable (which it is). - 3. If *eq* is False, ``__hash__`` will be left untouched meaning - the ``__hash__`` method of the base class will be used. If the - base class is `object`, this means it will fall back to id-based - hashing. - - Although not recommended, you can decide for yourself and force - *attrs* to create one (for example, if the class is immutable even - though you didn't freeze it programmatically) by passing True or - not. Both of these cases are rather special and should be used - carefully. - - .. seealso:: - - - Our documentation on `hashing`, - - Python's documentation on `object.__hash__`, - - and the `GitHub issue that led to the default \ behavior - `_ for more - details. - - hash (bool | None): - Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence. - - cache_hash (bool): - Ensure that the object's hash code is computed only once and stored - on the object. If this is set to True, hashing must be either - explicitly or implicitly enabled for this class. If the hash code - is cached, avoid any reassignments of fields involved in hash code - computation or mutations of the objects those fields point to after - object creation. If such changes occur, the behavior of the - object's hash code is undefined. - - frozen (bool): - Make instances immutable after initialization. If someone attempts - to modify a frozen instance, `attrs.exceptions.FrozenInstanceError` - is raised. - - .. note:: - - 1. This is achieved by installing a custom ``__setattr__`` - method on your class, so you can't implement your own. - - 2. True immutability is impossible in Python. - - 3. This *does* have a minor a runtime performance `impact - ` when initializing new instances. In other - words: ``__init__`` is slightly slower with ``frozen=True``. - - 4. If a class is frozen, you cannot modify ``self`` in - ``__attrs_post_init__`` or a self-written ``__init__``. You - can circumvent that limitation by using - ``object.__setattr__(self, "attribute_name", value)``. - - 5. Subclasses of a frozen class are frozen too. - - kw_only (bool): - Make attributes keyword-only in the generated ``__init__`` (if - *init* is False, this parameter is ignored). Attributes that - explicitly set ``kw_only=False`` are not affected; base class - attributes are also not affected. - - Also see *force_kw_only*. - - weakref_slot (bool): - Make instances weak-referenceable. This has no effect unless - *slots* is True. - - field_transformer (~typing.Callable | None): - A function that is called with the original class object and all - fields right before *attrs* finalizes the class. You can use this, - for example, to automatically add converters or validators to - fields based on their types. - - .. seealso:: `transform-fields` - - match_args (bool): - If True (default), set ``__match_args__`` on the class to support - :pep:`634` (*Structural Pattern Matching*). It is a tuple of all - non-keyword-only ``__init__`` parameter names on Python 3.10 and - later. Ignored on older Python versions. - - collect_by_mro (bool): - If True, *attrs* collects attributes from base classes correctly - according to the `method resolution order - `_. If False, *attrs* - will mimic the (wrong) behavior of `dataclasses` and :pep:`681`. - - See also `issue #428 - `_. - - force_kw_only (bool): - A back-compat flag for restoring pre-25.4.0 behavior. If True and - ``kw_only=True``, all attributes are made keyword-only, including - base class attributes, and those set to ``kw_only=False`` at the - attribute level. Defaults to False. - - See also `issue #980 - `_. - - getstate_setstate (bool | None): - .. note:: - - This is usually only interesting for slotted classes and you - should probably just set *auto_detect* to True. - - If True, ``__getstate__`` and ``__setstate__`` are generated and - attached to the class. This is necessary for slotted classes to be - pickleable. If left None, it's True by default for slotted classes - and False for dict classes. - - If *auto_detect* is True, and *getstate_setstate* is left None, and - **either** ``__getstate__`` or ``__setstate__`` is detected - directly on the class (meaning: not inherited), it is set to False - (this is usually what you want). - - auto_attribs (bool | None): - If True, look at type annotations to determine which attributes to - use, like `dataclasses`. If False, it will only look for explicit - :func:`field` class attributes, like classic *attrs*. - - If left None, it will guess: - - 1. If any attributes are annotated and no unannotated - `attrs.field`\ s are found, it assumes *auto_attribs=True*. - 2. Otherwise it assumes *auto_attribs=False* and tries to collect - `attrs.field`\ s. - - If *attrs* decides to look at type annotations, **all** fields - **must** be annotated. If *attrs* encounters a field that is set to - a :func:`field` / `attr.ib` but lacks a type annotation, an - `attrs.exceptions.UnannotatedAttributeError` is raised. Use - ``field_name: typing.Any = field(...)`` if you don't want to set a - type. - - .. warning:: - - For features that use the attribute name to create decorators - (for example, :ref:`validators `), you still *must* - assign :func:`field` / `attr.ib` to them. Otherwise Python will - either not find the name or try to use the default value to - call, for example, ``validator`` on it. - - Attributes annotated as `typing.ClassVar`, and attributes that are - neither annotated nor set to an `field()` are **ignored**. - - these (dict[str, object]): - A dictionary of name to the (private) return value of `field()` - mappings. This is useful to avoid the definition of your attributes - within the class body because you can't (for example, if you want - to add ``__repr__`` methods to Django models) or don't want to. - - If *these* is not `None`, *attrs* will *not* search the class body - for attributes and will *not* remove any attributes from it. - - The order is deduced from the order of the attributes inside - *these*. - - Arguably, this is a rather obscure feature. - - .. versionadded:: 20.1.0 - .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. - .. versionadded:: 22.2.0 - *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). - .. versionchanged:: 24.1.0 - Instances are not compared as tuples of attributes anymore, but using a - big ``and`` condition. This is faster and has more correct behavior for - uncomparable values like `math.nan`. - .. versionadded:: 24.1.0 - If a class has an *inherited* classmethod called - ``__attrs_init_subclass__``, it is executed after the class is created. - .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. - .. versionadded:: 24.3.0 - Unless already present, a ``__replace__`` method is automatically - created for `copy.replace` (Python 3.13+ only). - .. versionchanged:: 25.4.0 - *kw_only* now only applies to attributes defined in the current class, - and respects attribute-level ``kw_only=False`` settings. - .. versionadded:: 25.4.0 - Added *force_kw_only* to go back to the previous *kw_only* behavior. - - .. note:: - - The main differences to the classic `attr.s` are: - - - Automatically detect whether or not *auto_attribs* should be `True` - (c.f. *auto_attribs* parameter). - - Converters and validators run when attributes are set by default -- - if *frozen* is `False`. - - *slots=True* - - Usually, this has only upsides and few visible effects in everyday - programming. But it *can* lead to some surprising behaviors, so - please make sure to read :term:`slotted classes`. - - - *auto_exc=True* - - *auto_detect=True* - - *order=False* - - *force_kw_only=False* - - Some options that were only relevant on Python 2 or were kept around - for backwards-compatibility have been removed. - - """ - - def do_it(cls, auto_attribs): - return attrs( - maybe_cls=cls, - these=these, - repr=repr, - hash=hash, - unsafe_hash=unsafe_hash, - init=init, - slots=slots, - frozen=frozen, - weakref_slot=weakref_slot, - str=str, - auto_attribs=auto_attribs, - kw_only=kw_only, - cache_hash=cache_hash, - auto_exc=auto_exc, - eq=eq, - order=order, - auto_detect=auto_detect, - collect_by_mro=True, - getstate_setstate=getstate_setstate, - on_setattr=on_setattr, - field_transformer=field_transformer, - match_args=match_args, - force_kw_only=force_kw_only, - ) - - def wrap(cls): - """ - Making this a wrapper ensures this code runs during class creation. - - We also ensure that frozen-ness of classes is inherited. - """ - nonlocal frozen, on_setattr - - had_on_setattr = on_setattr not in (None, setters.NO_OP) - - # By default, mutable classes convert & validate on setattr. - if frozen is False and on_setattr is None: - on_setattr = _DEFAULT_ON_SETATTR - - # However, if we subclass a frozen class, we inherit the immutability - # and disable on_setattr. - for base_cls in cls.__bases__: - if base_cls.__setattr__ is _frozen_setattrs: - if had_on_setattr: - msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)." - raise ValueError(msg) - - on_setattr = setters.NO_OP - break - - if auto_attribs is not None: - return do_it(cls, auto_attribs) - - try: - return do_it(cls, True) - except UnannotatedAttributeError: - return do_it(cls, False) - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but `None` if used as `@attrs()`. - if maybe_cls is None: - return wrap - - return wrap(maybe_cls) - - -mutable = define -frozen = partial(define, frozen=True, on_setattr=None) - - -def field( - *, - default=NOTHING, - validator=None, - repr=True, - hash=None, - init=True, - metadata=None, - type=None, - converter=None, - factory=None, - kw_only=None, - eq=None, - order=None, - on_setattr=None, - alias=None, -): - """ - Create a new :term:`field` / :term:`attribute` on a class. - - .. warning:: - - Does **nothing** unless the class is also decorated with - `attrs.define` (or similar)! - - Args: - default: - A value that is used if an *attrs*-generated ``__init__`` is used - and no value is passed while instantiating or the attribute is - excluded using ``init=False``. - - If the value is an instance of `attrs.Factory`, its callable will - be used to construct a new value (useful for mutable data types - like lists or dicts). - - If a default is not set (or set manually to `attrs.NOTHING`), a - value *must* be supplied when instantiating; otherwise a - `TypeError` will be raised. - - .. seealso:: `defaults` - - factory (~typing.Callable): - Syntactic sugar for ``default=attr.Factory(factory)``. - - validator (~typing.Callable | list[~typing.Callable]): - Callable that is called by *attrs*-generated ``__init__`` methods - after the instance has been initialized. They receive the - initialized instance, the :func:`~attrs.Attribute`, and the passed - value. - - The return value is *not* inspected so the validator has to throw - an exception itself. - - If a `list` is passed, its items are treated as validators and must - all pass. - - Validators can be globally disabled and re-enabled using - `attrs.validators.get_disabled` / `attrs.validators.set_disabled`. - - The validator can also be set using decorator notation as shown - below. - - .. seealso:: :ref:`validators` - - repr (bool | ~typing.Callable): - Include this attribute in the generated ``__repr__`` method. If - True, include the attribute; if False, omit it. By default, the - built-in ``repr()`` function is used. To override how the attribute - value is formatted, pass a ``callable`` that takes a single value - and returns a string. Note that the resulting string is used as-is, - which means it will be used directly *instead* of calling - ``repr()`` (the default). - - eq (bool | ~typing.Callable): - If True (default), include this attribute in the generated - ``__eq__`` and ``__ne__`` methods that check two instances for - equality. To override how the attribute value is compared, pass a - callable that takes a single value and returns the value to be - compared. - - .. seealso:: `comparison` - - order (bool | ~typing.Callable): - If True (default), include this attributes in the generated - ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To - override how the attribute value is ordered, pass a callable that - takes a single value and returns the value to be ordered. - - .. seealso:: `comparison` - - hash (bool | None): - Include this attribute in the generated ``__hash__`` method. If - None (default), mirror *eq*'s value. This is the correct behavior - according the Python spec. Setting this value to anything else - than None is *discouraged*. - - .. seealso:: `hashing` - - init (bool): - Include this attribute in the generated ``__init__`` method. - - It is possible to set this to False and set a default value. In - that case this attributed is unconditionally initialized with the - specified default value or factory. - - .. seealso:: `init` - - converter (typing.Callable | Converter): - A callable that is called by *attrs*-generated ``__init__`` methods - to convert attribute's value to the desired format. - - If a vanilla callable is passed, it is given the passed-in value as - the only positional argument. It is possible to receive additional - arguments by wrapping the callable in a `Converter`. - - Either way, the returned value will be used as the new value of the - attribute. The value is converted before being passed to the - validator, if any. - - .. seealso:: :ref:`converters` - - metadata (dict | None): - An arbitrary mapping, to be used by third-party code. - - .. seealso:: `extending-metadata`. - - type (type): - The type of the attribute. Nowadays, the preferred method to - specify the type is using a variable annotation (see :pep:`526`). - This argument is provided for backwards-compatibility and for usage - with `make_class`. Regardless of the approach used, the type will - be stored on ``Attribute.type``. - - Please note that *attrs* doesn't do anything with this metadata by - itself. You can use it as part of your own code or for `static type - checking `. - - kw_only (bool | None): - Make this attribute keyword-only in the generated ``__init__`` (if - *init* is False, this parameter is ignored). If None (default), - mirror the setting from `attrs.define`. - - on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): - Allows to overwrite the *on_setattr* setting from `attr.s`. If left - None, the *on_setattr* value from `attr.s` is used. Set to - `attrs.setters.NO_OP` to run **no** `setattr` hooks for this - attribute -- regardless of the setting in `define()`. - - alias (str | None): - Override this attribute's parameter name in the generated - ``__init__`` method. If left None, default to ``name`` stripped - of leading underscores. See `private-attributes`. - - .. versionadded:: 20.1.0 - .. versionchanged:: 21.1.0 - *eq*, *order*, and *cmp* also accept a custom callable - .. versionadded:: 22.2.0 *alias* - .. versionadded:: 23.1.0 - The *type* parameter has been re-added; mostly for `attrs.make_class`. - Please note that type checkers ignore this metadata. - .. versionchanged:: 25.4.0 - *kw_only* can now be None, and its default is also changed from False to - None. - - .. seealso:: - - `attr.ib` - """ - return attrib( - default=default, - validator=validator, - repr=repr, - hash=hash, - init=init, - metadata=metadata, - type=type, - converter=converter, - factory=factory, - kw_only=kw_only, - eq=eq, - order=order, - on_setattr=on_setattr, - alias=alias, - ) - - -def asdict(inst, *, recurse=True, filter=None, value_serializer=None): - """ - Same as `attr.asdict`, except that collections types are always retained - and dict is always used as *dict_factory*. - - .. versionadded:: 21.3.0 - """ - return _asdict( - inst=inst, - recurse=recurse, - filter=filter, - value_serializer=value_serializer, - retain_collection_types=True, - ) - - -def astuple(inst, *, recurse=True, filter=None): - """ - Same as `attr.astuple`, except that collections types are always retained - and `tuple` is always used as the *tuple_factory*. - - .. versionadded:: 21.3.0 - """ - return _astuple( - inst=inst, recurse=recurse, filter=filter, retain_collection_types=True - ) - - -def inspect(cls): - """ - Inspect the class and return its effective build parameters. - - Warning: - This feature is currently **experimental** and is not covered by our - strict backwards-compatibility guarantees. - - Args: - cls: The *attrs*-decorated class to inspect. - - Returns: - The effective build parameters of the class. - - Raises: - NotAnAttrsClassError: If the class is not an *attrs*-decorated class. - - .. versionadded:: 25.4.0 - """ - try: - return cls.__dict__["__attrs_props__"] - except KeyError: - msg = f"{cls!r} is not an attrs-decorated class." - raise NotAnAttrsClassError(msg) from None diff --git a/venv/Lib/site-packages/attr/_typing_compat.pyi b/venv/Lib/site-packages/attr/_typing_compat.pyi deleted file mode 100644 index ca7b71e..0000000 --- a/venv/Lib/site-packages/attr/_typing_compat.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any, ClassVar, Protocol - -# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. -MYPY = False - -if MYPY: - # A protocol to be able to statically accept an attrs class. - class AttrsInstance_(Protocol): - __attrs_attrs__: ClassVar[Any] - -else: - # For type checkers without plug-in support use an empty protocol that - # will (hopefully) be combined into a union. - class AttrsInstance_(Protocol): - pass diff --git a/venv/Lib/site-packages/attr/_version_info.py b/venv/Lib/site-packages/attr/_version_info.py deleted file mode 100644 index 27f1888..0000000 --- a/venv/Lib/site-packages/attr/_version_info.py +++ /dev/null @@ -1,89 +0,0 @@ -# SPDX-License-Identifier: MIT - - -from functools import total_ordering - -from ._funcs import astuple -from ._make import attrib, attrs - - -@total_ordering -@attrs(eq=False, order=False, slots=True, frozen=True) -class VersionInfo: - """ - A version object that can be compared to tuple of length 1--4: - - >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) - True - >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) - True - >>> vi = attr.VersionInfo(19, 2, 0, "final") - >>> vi < (19, 1, 1) - False - >>> vi < (19,) - False - >>> vi == (19, 2,) - True - >>> vi == (19, 2, 1) - False - - .. versionadded:: 19.2 - """ - - year = attrib(type=int) - minor = attrib(type=int) - micro = attrib(type=int) - releaselevel = attrib(type=str) - - @classmethod - def _from_version_string(cls, s): - """ - Parse *s* and return a _VersionInfo. - """ - v = s.split(".") - if len(v) == 3: - v.append("final") - - return cls( - year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] - ) - - def _ensure_tuple(self, other): - """ - Ensure *other* is a tuple of a valid length. - - Returns a possibly transformed *other* and ourselves as a tuple of - the same length as *other*. - """ - - if self.__class__ is other.__class__: - other = astuple(other) - - if not isinstance(other, tuple): - raise NotImplementedError - - if not (1 <= len(other) <= 4): - raise NotImplementedError - - return astuple(self)[: len(other)], other - - def __eq__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - return us == them - - def __lt__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't - # have to do anything special with releaselevel for now. - return us < them - - def __hash__(self): - return hash((self.year, self.minor, self.micro, self.releaselevel)) diff --git a/venv/Lib/site-packages/attr/_version_info.pyi b/venv/Lib/site-packages/attr/_version_info.pyi deleted file mode 100644 index 45ced08..0000000 --- a/venv/Lib/site-packages/attr/_version_info.pyi +++ /dev/null @@ -1,9 +0,0 @@ -class VersionInfo: - @property - def year(self) -> int: ... - @property - def minor(self) -> int: ... - @property - def micro(self) -> int: ... - @property - def releaselevel(self) -> str: ... diff --git a/venv/Lib/site-packages/attr/converters.py b/venv/Lib/site-packages/attr/converters.py deleted file mode 100644 index 0a79dee..0000000 --- a/venv/Lib/site-packages/attr/converters.py +++ /dev/null @@ -1,162 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly useful converters. -""" - -import typing - -from ._compat import _AnnotationExtractor -from ._make import NOTHING, Converter, Factory, pipe - - -__all__ = [ - "default_if_none", - "optional", - "pipe", - "to_bool", -] - - -def optional(converter): - """ - A converter that allows an attribute to be optional. An optional attribute - is one which can be set to `None`. - - Type annotations will be inferred from the wrapped converter's, if it has - any. - - Args: - converter (typing.Callable): - the converter that is used for non-`None` values. - - .. versionadded:: 17.1.0 - """ - - if isinstance(converter, Converter): - - def optional_converter(val, inst, field): - if val is None: - return None - return converter(val, inst, field) - - else: - - def optional_converter(val): - if val is None: - return None - return converter(val) - - xtr = _AnnotationExtractor(converter) - - t = xtr.get_first_param_type() - if t: - optional_converter.__annotations__["val"] = typing.Optional[t] - - rt = xtr.get_return_type() - if rt: - optional_converter.__annotations__["return"] = typing.Optional[rt] - - if isinstance(converter, Converter): - return Converter(optional_converter, takes_self=True, takes_field=True) - - return optional_converter - - -def default_if_none(default=NOTHING, factory=None): - """ - A converter that allows to replace `None` values by *default* or the result - of *factory*. - - Args: - default: - Value to be used if `None` is passed. Passing an instance of - `attrs.Factory` is supported, however the ``takes_self`` option is - *not*. - - factory (typing.Callable): - A callable that takes no parameters whose result is used if `None` - is passed. - - Raises: - TypeError: If **neither** *default* or *factory* is passed. - - TypeError: If **both** *default* and *factory* are passed. - - ValueError: - If an instance of `attrs.Factory` is passed with - ``takes_self=True``. - - .. versionadded:: 18.2.0 - """ - if default is NOTHING and factory is None: - msg = "Must pass either `default` or `factory`." - raise TypeError(msg) - - if default is not NOTHING and factory is not None: - msg = "Must pass either `default` or `factory` but not both." - raise TypeError(msg) - - if factory is not None: - default = Factory(factory) - - if isinstance(default, Factory): - if default.takes_self: - msg = "`takes_self` is not supported by default_if_none." - raise ValueError(msg) - - def default_if_none_converter(val): - if val is not None: - return val - - return default.factory() - - else: - - def default_if_none_converter(val): - if val is not None: - return val - - return default - - return default_if_none_converter - - -def to_bool(val): - """ - Convert "boolean" strings (for example, from environment variables) to real - booleans. - - Values mapping to `True`: - - - ``True`` - - ``"true"`` / ``"t"`` - - ``"yes"`` / ``"y"`` - - ``"on"`` - - ``"1"`` - - ``1`` - - Values mapping to `False`: - - - ``False`` - - ``"false"`` / ``"f"`` - - ``"no"`` / ``"n"`` - - ``"off"`` - - ``"0"`` - - ``0`` - - Raises: - ValueError: For any other value. - - .. versionadded:: 21.3.0 - """ - if isinstance(val, str): - val = val.lower() - - if val in (True, "true", "t", "yes", "y", "on", "1", 1): - return True - if val in (False, "false", "f", "no", "n", "off", "0", 0): - return False - - msg = f"Cannot convert value to bool: {val!r}" - raise ValueError(msg) diff --git a/venv/Lib/site-packages/attr/converters.pyi b/venv/Lib/site-packages/attr/converters.pyi deleted file mode 100644 index 12bd0c4..0000000 --- a/venv/Lib/site-packages/attr/converters.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Callable, Any, overload - -from attrs import _ConverterType, _CallableConverterType - -@overload -def pipe(*validators: _CallableConverterType) -> _CallableConverterType: ... -@overload -def pipe(*validators: _ConverterType) -> _ConverterType: ... -@overload -def optional(converter: _CallableConverterType) -> _CallableConverterType: ... -@overload -def optional(converter: _ConverterType) -> _ConverterType: ... -@overload -def default_if_none(default: Any) -> _CallableConverterType: ... -@overload -def default_if_none( - *, factory: Callable[[], Any] -) -> _CallableConverterType: ... -def to_bool(val: str | int | bool) -> bool: ... diff --git a/venv/Lib/site-packages/attr/exceptions.py b/venv/Lib/site-packages/attr/exceptions.py deleted file mode 100644 index 3b7abb8..0000000 --- a/venv/Lib/site-packages/attr/exceptions.py +++ /dev/null @@ -1,95 +0,0 @@ -# SPDX-License-Identifier: MIT - -from __future__ import annotations - -from typing import ClassVar - - -class FrozenError(AttributeError): - """ - A frozen/immutable instance or attribute have been attempted to be - modified. - - It mirrors the behavior of ``namedtuples`` by using the same error message - and subclassing `AttributeError`. - - .. versionadded:: 20.1.0 - """ - - msg = "can't set attribute" - args: ClassVar[tuple[str]] = [msg] - - -class FrozenInstanceError(FrozenError): - """ - A frozen instance has been attempted to be modified. - - .. versionadded:: 16.1.0 - """ - - -class FrozenAttributeError(FrozenError): - """ - A frozen attribute has been attempted to be modified. - - .. versionadded:: 20.1.0 - """ - - -class AttrsAttributeNotFoundError(ValueError): - """ - An *attrs* function couldn't find an attribute that the user asked for. - - .. versionadded:: 16.2.0 - """ - - -class NotAnAttrsClassError(ValueError): - """ - A non-*attrs* class has been passed into an *attrs* function. - - .. versionadded:: 16.2.0 - """ - - -class DefaultAlreadySetError(RuntimeError): - """ - A default has been set when defining the field and is attempted to be reset - using the decorator. - - .. versionadded:: 17.1.0 - """ - - -class UnannotatedAttributeError(RuntimeError): - """ - A class with ``auto_attribs=True`` has a field without a type annotation. - - .. versionadded:: 17.3.0 - """ - - -class PythonTooOldError(RuntimeError): - """ - It was attempted to use an *attrs* feature that requires a newer Python - version. - - .. versionadded:: 18.2.0 - """ - - -class NotCallableError(TypeError): - """ - A field requiring a callable has been set with a value that is not - callable. - - .. versionadded:: 19.2.0 - """ - - def __init__(self, msg, value): - super(TypeError, self).__init__(msg, value) - self.msg = msg - self.value = value - - def __str__(self): - return str(self.msg) diff --git a/venv/Lib/site-packages/attr/exceptions.pyi b/venv/Lib/site-packages/attr/exceptions.pyi deleted file mode 100644 index f268011..0000000 --- a/venv/Lib/site-packages/attr/exceptions.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Any - -class FrozenError(AttributeError): - msg: str = ... - -class FrozenInstanceError(FrozenError): ... -class FrozenAttributeError(FrozenError): ... -class AttrsAttributeNotFoundError(ValueError): ... -class NotAnAttrsClassError(ValueError): ... -class DefaultAlreadySetError(RuntimeError): ... -class UnannotatedAttributeError(RuntimeError): ... -class PythonTooOldError(RuntimeError): ... - -class NotCallableError(TypeError): - msg: str = ... - value: Any = ... - def __init__(self, msg: str, value: Any) -> None: ... diff --git a/venv/Lib/site-packages/attr/filters.py b/venv/Lib/site-packages/attr/filters.py deleted file mode 100644 index 689b170..0000000 --- a/venv/Lib/site-packages/attr/filters.py +++ /dev/null @@ -1,72 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly useful filters for `attrs.asdict` and `attrs.astuple`. -""" - -from ._make import Attribute - - -def _split_what(what): - """ - Returns a tuple of `frozenset`s of classes and attributes. - """ - return ( - frozenset(cls for cls in what if isinstance(cls, type)), - frozenset(cls for cls in what if isinstance(cls, str)), - frozenset(cls for cls in what if isinstance(cls, Attribute)), - ) - - -def include(*what): - """ - Create a filter that only allows *what*. - - Args: - what (list[type, str, attrs.Attribute]): - What to include. Can be a type, a name, or an attribute. - - Returns: - Callable: - A callable that can be passed to `attrs.asdict`'s and - `attrs.astuple`'s *filter* argument. - - .. versionchanged:: 23.1.0 Accept strings with field names. - """ - cls, names, attrs = _split_what(what) - - def include_(attribute, value): - return ( - value.__class__ in cls - or attribute.name in names - or attribute in attrs - ) - - return include_ - - -def exclude(*what): - """ - Create a filter that does **not** allow *what*. - - Args: - what (list[type, str, attrs.Attribute]): - What to exclude. Can be a type, a name, or an attribute. - - Returns: - Callable: - A callable that can be passed to `attrs.asdict`'s and - `attrs.astuple`'s *filter* argument. - - .. versionchanged:: 23.3.0 Accept field name string as input argument - """ - cls, names, attrs = _split_what(what) - - def exclude_(attribute, value): - return not ( - value.__class__ in cls - or attribute.name in names - or attribute in attrs - ) - - return exclude_ diff --git a/venv/Lib/site-packages/attr/filters.pyi b/venv/Lib/site-packages/attr/filters.pyi deleted file mode 100644 index 974abdc..0000000 --- a/venv/Lib/site-packages/attr/filters.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Any - -from . import Attribute, _FilterType - -def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... -def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... diff --git a/venv/Lib/site-packages/attr/py.typed b/venv/Lib/site-packages/attr/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/attr/setters.py b/venv/Lib/site-packages/attr/setters.py deleted file mode 100644 index 78b0839..0000000 --- a/venv/Lib/site-packages/attr/setters.py +++ /dev/null @@ -1,79 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly used hooks for on_setattr. -""" - -from . import _config -from .exceptions import FrozenAttributeError - - -def pipe(*setters): - """ - Run all *setters* and return the return value of the last one. - - .. versionadded:: 20.1.0 - """ - - def wrapped_pipe(instance, attrib, new_value): - rv = new_value - - for setter in setters: - rv = setter(instance, attrib, rv) - - return rv - - return wrapped_pipe - - -def frozen(_, __, ___): - """ - Prevent an attribute to be modified. - - .. versionadded:: 20.1.0 - """ - raise FrozenAttributeError - - -def validate(instance, attrib, new_value): - """ - Run *attrib*'s validator on *new_value* if it has one. - - .. versionadded:: 20.1.0 - """ - if _config._run_validators is False: - return new_value - - v = attrib.validator - if not v: - return new_value - - v(instance, attrib, new_value) - - return new_value - - -def convert(instance, attrib, new_value): - """ - Run *attrib*'s converter -- if it has one -- on *new_value* and return the - result. - - .. versionadded:: 20.1.0 - """ - c = attrib.converter - if c: - # This can be removed once we drop 3.8 and use attrs.Converter instead. - from ._make import Converter - - if not isinstance(c, Converter): - return c(new_value) - - return c(new_value, instance, attrib) - - return new_value - - -# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. -# Sphinx's autodata stopped working, so the docstring is inlined in the API -# docs. -NO_OP = object() diff --git a/venv/Lib/site-packages/attr/setters.pyi b/venv/Lib/site-packages/attr/setters.pyi deleted file mode 100644 index 73abf36..0000000 --- a/venv/Lib/site-packages/attr/setters.pyi +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any, NewType, NoReturn, TypeVar - -from . import Attribute -from attrs import _OnSetAttrType - -_T = TypeVar("_T") - -def frozen( - instance: Any, attribute: Attribute[Any], new_value: Any -) -> NoReturn: ... -def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... -def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... - -# convert is allowed to return Any, because they can be chained using pipe. -def convert( - instance: Any, attribute: Attribute[Any], new_value: Any -) -> Any: ... - -_NoOpType = NewType("_NoOpType", object) -NO_OP: _NoOpType diff --git a/venv/Lib/site-packages/attr/validators.py b/venv/Lib/site-packages/attr/validators.py deleted file mode 100644 index 837e003..0000000 --- a/venv/Lib/site-packages/attr/validators.py +++ /dev/null @@ -1,748 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly useful validators. -""" - -import operator -import re - -from contextlib import contextmanager -from re import Pattern - -from ._config import get_run_validators, set_run_validators -from ._make import _AndValidator, and_, attrib, attrs -from .converters import default_if_none -from .exceptions import NotCallableError - - -__all__ = [ - "and_", - "deep_iterable", - "deep_mapping", - "disabled", - "ge", - "get_disabled", - "gt", - "in_", - "instance_of", - "is_callable", - "le", - "lt", - "matches_re", - "max_len", - "min_len", - "not_", - "optional", - "or_", - "set_disabled", -] - - -def set_disabled(disabled): - """ - Globally disable or enable running validators. - - By default, they are run. - - Args: - disabled (bool): If `True`, disable running all validators. - - .. warning:: - - This function is not thread-safe! - - .. versionadded:: 21.3.0 - """ - set_run_validators(not disabled) - - -def get_disabled(): - """ - Return a bool indicating whether validators are currently disabled or not. - - Returns: - bool:`True` if validators are currently disabled. - - .. versionadded:: 21.3.0 - """ - return not get_run_validators() - - -@contextmanager -def disabled(): - """ - Context manager that disables running validators within its context. - - .. warning:: - - This context manager is not thread-safe! - - .. versionadded:: 21.3.0 - """ - set_run_validators(False) - try: - yield - finally: - set_run_validators(True) - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _InstanceOfValidator: - type = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not isinstance(value, self.type): - msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})." - raise TypeError( - msg, - attr, - self.type, - value, - ) - - def __repr__(self): - return f"" - - -def instance_of(type): - """ - A validator that raises a `TypeError` if the initializer is called with a - wrong type for this particular attribute (checks are performed using - `isinstance` therefore it's also valid to pass a tuple of types). - - Args: - type (type | tuple[type]): The type to check for. - - Raises: - TypeError: - With a human readable error message, the attribute (of type - `attrs.Attribute`), the expected type, and the value it got. - """ - return _InstanceOfValidator(type) - - -@attrs(repr=False, frozen=True, slots=True) -class _MatchesReValidator: - pattern = attrib() - match_func = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.match_func(value): - msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)" - raise ValueError( - msg, - attr, - self.pattern, - value, - ) - - def __repr__(self): - return f"" - - -def matches_re(regex, flags=0, func=None): - r""" - A validator that raises `ValueError` if the initializer is called with a - string that doesn't match *regex*. - - Args: - regex (str, re.Pattern): - A regex string or precompiled pattern to match against - - flags (int): - Flags that will be passed to the underlying re function (default 0) - - func (typing.Callable): - Which underlying `re` function to call. Valid options are - `re.fullmatch`, `re.search`, and `re.match`; the default `None` - means `re.fullmatch`. For performance reasons, the pattern is - always precompiled using `re.compile`. - - .. versionadded:: 19.2.0 - .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. - """ - valid_funcs = (re.fullmatch, None, re.search, re.match) - if func not in valid_funcs: - msg = "'func' must be one of {}.".format( - ", ".join( - sorted((e and e.__name__) or "None" for e in set(valid_funcs)) - ) - ) - raise ValueError(msg) - - if isinstance(regex, Pattern): - if flags: - msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead" - raise TypeError(msg) - pattern = regex - else: - pattern = re.compile(regex, flags) - - if func is re.match: - match_func = pattern.match - elif func is re.search: - match_func = pattern.search - else: - match_func = pattern.fullmatch - - return _MatchesReValidator(pattern, match_func) - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _OptionalValidator: - validator = attrib() - - def __call__(self, inst, attr, value): - if value is None: - return - - self.validator(inst, attr, value) - - def __repr__(self): - return f"" - - -def optional(validator): - """ - A validator that makes an attribute optional. An optional attribute is one - which can be set to `None` in addition to satisfying the requirements of - the sub-validator. - - Args: - validator - (typing.Callable | tuple[typing.Callable] | list[typing.Callable]): - A validator (or validators) that is used for non-`None` values. - - .. versionadded:: 15.1.0 - .. versionchanged:: 17.1.0 *validator* can be a list of validators. - .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators. - """ - if isinstance(validator, (list, tuple)): - return _OptionalValidator(_AndValidator(validator)) - - return _OptionalValidator(validator) - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _InValidator: - options = attrib() - _original_options = attrib(hash=False) - - def __call__(self, inst, attr, value): - try: - in_options = value in self.options - except TypeError: # e.g. `1 in "abc"` - in_options = False - - if not in_options: - msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})" - raise ValueError( - msg, - attr, - self._original_options, - value, - ) - - def __repr__(self): - return f"" - - -def in_(options): - """ - A validator that raises a `ValueError` if the initializer is called with a - value that does not belong in the *options* provided. - - The check is performed using ``value in options``, so *options* has to - support that operation. - - To keep the validator hashable, dicts, lists, and sets are transparently - transformed into a `tuple`. - - Args: - options: Allowed options. - - Raises: - ValueError: - With a human readable error message, the attribute (of type - `attrs.Attribute`), the expected options, and the value it got. - - .. versionadded:: 17.1.0 - .. versionchanged:: 22.1.0 - The ValueError was incomplete until now and only contained the human - readable error message. Now it contains all the information that has - been promised since 17.1.0. - .. versionchanged:: 24.1.0 - *options* that are a list, dict, or a set are now transformed into a - tuple to keep the validator hashable. - """ - repr_options = options - if isinstance(options, (list, dict, set)): - options = tuple(options) - - return _InValidator(options, repr_options) - - -@attrs(repr=False, slots=False, unsafe_hash=True) -class _IsCallableValidator: - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not callable(value): - message = ( - "'{name}' must be callable " - "(got {value!r} that is a {actual!r})." - ) - raise NotCallableError( - msg=message.format( - name=attr.name, value=value, actual=value.__class__ - ), - value=value, - ) - - def __repr__(self): - return "" - - -def is_callable(): - """ - A validator that raises a `attrs.exceptions.NotCallableError` if the - initializer is called with a value for this particular attribute that is - not callable. - - .. versionadded:: 19.1.0 - - Raises: - attrs.exceptions.NotCallableError: - With a human readable error message containing the attribute - (`attrs.Attribute`) name, and the value it got. - """ - return _IsCallableValidator() - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _DeepIterable: - member_validator = attrib(validator=is_callable()) - iterable_validator = attrib( - default=None, validator=optional(is_callable()) - ) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.iterable_validator is not None: - self.iterable_validator(inst, attr, value) - - for member in value: - self.member_validator(inst, attr, member) - - def __repr__(self): - iterable_identifier = ( - "" - if self.iterable_validator is None - else f" {self.iterable_validator!r}" - ) - return ( - f"" - ) - - -def deep_iterable(member_validator, iterable_validator=None): - """ - A validator that performs deep validation of an iterable. - - Args: - member_validator: Validator(s) to apply to iterable members. - - iterable_validator: - Validator(s) to apply to iterable itself (optional). - - Raises - TypeError: if any sub-validators fail - - .. versionadded:: 19.1.0 - - .. versionchanged:: 25.4.0 - *member_validator* and *iterable_validator* can now be a list or tuple - of validators. - """ - if isinstance(member_validator, (list, tuple)): - member_validator = and_(*member_validator) - if isinstance(iterable_validator, (list, tuple)): - iterable_validator = and_(*iterable_validator) - return _DeepIterable(member_validator, iterable_validator) - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _DeepMapping: - key_validator = attrib(validator=optional(is_callable())) - value_validator = attrib(validator=optional(is_callable())) - mapping_validator = attrib(validator=optional(is_callable())) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.mapping_validator is not None: - self.mapping_validator(inst, attr, value) - - for key in value: - if self.key_validator is not None: - self.key_validator(inst, attr, key) - if self.value_validator is not None: - self.value_validator(inst, attr, value[key]) - - def __repr__(self): - return f"" - - -def deep_mapping( - key_validator=None, value_validator=None, mapping_validator=None -): - """ - A validator that performs deep validation of a dictionary. - - All validators are optional, but at least one of *key_validator* or - *value_validator* must be provided. - - Args: - key_validator: Validator(s) to apply to dictionary keys. - - value_validator: Validator(s) to apply to dictionary values. - - mapping_validator: - Validator(s) to apply to top-level mapping attribute. - - .. versionadded:: 19.1.0 - - .. versionchanged:: 25.4.0 - *key_validator* and *value_validator* are now optional, but at least one - of them must be provided. - - .. versionchanged:: 25.4.0 - *key_validator*, *value_validator*, and *mapping_validator* can now be a - list or tuple of validators. - - Raises: - TypeError: If any sub-validator fails on validation. - - ValueError: - If neither *key_validator* nor *value_validator* is provided on - instantiation. - """ - if key_validator is None and value_validator is None: - msg = ( - "At least one of key_validator or value_validator must be provided" - ) - raise ValueError(msg) - - if isinstance(key_validator, (list, tuple)): - key_validator = and_(*key_validator) - if isinstance(value_validator, (list, tuple)): - value_validator = and_(*value_validator) - if isinstance(mapping_validator, (list, tuple)): - mapping_validator = and_(*mapping_validator) - - return _DeepMapping(key_validator, value_validator, mapping_validator) - - -@attrs(repr=False, frozen=True, slots=True) -class _NumberValidator: - bound = attrib() - compare_op = attrib() - compare_func = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.compare_func(value, self.bound): - msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}" - raise ValueError(msg) - - def __repr__(self): - return f"" - - -def lt(val): - """ - A validator that raises `ValueError` if the initializer is called with a - number larger or equal to *val*. - - The validator uses `operator.lt` to compare the values. - - Args: - val: Exclusive upper bound for values. - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, "<", operator.lt) - - -def le(val): - """ - A validator that raises `ValueError` if the initializer is called with a - number greater than *val*. - - The validator uses `operator.le` to compare the values. - - Args: - val: Inclusive upper bound for values. - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, "<=", operator.le) - - -def ge(val): - """ - A validator that raises `ValueError` if the initializer is called with a - number smaller than *val*. - - The validator uses `operator.ge` to compare the values. - - Args: - val: Inclusive lower bound for values - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, ">=", operator.ge) - - -def gt(val): - """ - A validator that raises `ValueError` if the initializer is called with a - number smaller or equal to *val*. - - The validator uses `operator.gt` to compare the values. - - Args: - val: Exclusive lower bound for values - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, ">", operator.gt) - - -@attrs(repr=False, frozen=True, slots=True) -class _MaxLengthValidator: - max_length = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if len(value) > self.max_length: - msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}" - raise ValueError(msg) - - def __repr__(self): - return f"" - - -def max_len(length): - """ - A validator that raises `ValueError` if the initializer is called - with a string or iterable that is longer than *length*. - - Args: - length (int): Maximum length of the string or iterable - - .. versionadded:: 21.3.0 - """ - return _MaxLengthValidator(length) - - -@attrs(repr=False, frozen=True, slots=True) -class _MinLengthValidator: - min_length = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if len(value) < self.min_length: - msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}" - raise ValueError(msg) - - def __repr__(self): - return f"" - - -def min_len(length): - """ - A validator that raises `ValueError` if the initializer is called - with a string or iterable that is shorter than *length*. - - Args: - length (int): Minimum length of the string or iterable - - .. versionadded:: 22.1.0 - """ - return _MinLengthValidator(length) - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _SubclassOfValidator: - type = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not issubclass(value, self.type): - msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})." - raise TypeError( - msg, - attr, - self.type, - value, - ) - - def __repr__(self): - return f"" - - -def _subclass_of(type): - """ - A validator that raises a `TypeError` if the initializer is called with a - wrong type for this particular attribute (checks are performed using - `issubclass` therefore it's also valid to pass a tuple of types). - - Args: - type (type | tuple[type, ...]): The type(s) to check for. - - Raises: - TypeError: - With a human readable error message, the attribute (of type - `attrs.Attribute`), the expected type, and the value it got. - """ - return _SubclassOfValidator(type) - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _NotValidator: - validator = attrib() - msg = attrib( - converter=default_if_none( - "not_ validator child '{validator!r}' " - "did not raise a captured error" - ) - ) - exc_types = attrib( - validator=deep_iterable( - member_validator=_subclass_of(Exception), - iterable_validator=instance_of(tuple), - ), - ) - - def __call__(self, inst, attr, value): - try: - self.validator(inst, attr, value) - except self.exc_types: - pass # suppress error to invert validity - else: - raise ValueError( - self.msg.format( - validator=self.validator, - exc_types=self.exc_types, - ), - attr, - self.validator, - value, - self.exc_types, - ) - - def __repr__(self): - return f"" - - -def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): - """ - A validator that wraps and logically 'inverts' the validator passed to it. - It will raise a `ValueError` if the provided validator *doesn't* raise a - `ValueError` or `TypeError` (by default), and will suppress the exception - if the provided validator *does*. - - Intended to be used with existing validators to compose logic without - needing to create inverted variants, for example, ``not_(in_(...))``. - - Args: - validator: A validator to be logically inverted. - - msg (str): - Message to raise if validator fails. Formatted with keys - ``exc_types`` and ``validator``. - - exc_types (tuple[type, ...]): - Exception type(s) to capture. Other types raised by child - validators will not be intercepted and pass through. - - Raises: - ValueError: - With a human readable error message, the attribute (of type - `attrs.Attribute`), the validator that failed to raise an - exception, the value it got, and the expected exception types. - - .. versionadded:: 22.2.0 - """ - try: - exc_types = tuple(exc_types) - except TypeError: - exc_types = (exc_types,) - return _NotValidator(validator, msg, exc_types) - - -@attrs(repr=False, slots=True, unsafe_hash=True) -class _OrValidator: - validators = attrib() - - def __call__(self, inst, attr, value): - for v in self.validators: - try: - v(inst, attr, value) - except Exception: # noqa: BLE001, PERF203, S112 - continue - else: - return - - msg = f"None of {self.validators!r} satisfied for value {value!r}" - raise ValueError(msg) - - def __repr__(self): - return f"" - - -def or_(*validators): - """ - A validator that composes multiple validators into one. - - When called on a value, it runs all wrapped validators until one of them is - satisfied. - - Args: - validators (~collections.abc.Iterable[typing.Callable]): - Arbitrary number of validators. - - Raises: - ValueError: - If no validator is satisfied. Raised with a human-readable error - message listing all the wrapped validators and the value that - failed all of them. - - .. versionadded:: 24.1.0 - """ - vals = [] - for v in validators: - vals.extend(v.validators if isinstance(v, _OrValidator) else [v]) - - return _OrValidator(tuple(vals)) diff --git a/venv/Lib/site-packages/attr/validators.pyi b/venv/Lib/site-packages/attr/validators.pyi deleted file mode 100644 index 36a7e80..0000000 --- a/venv/Lib/site-packages/attr/validators.pyi +++ /dev/null @@ -1,140 +0,0 @@ -from types import UnionType -from typing import ( - Any, - AnyStr, - Callable, - Container, - ContextManager, - Iterable, - Mapping, - Match, - Pattern, - TypeVar, - overload, -) - -from attrs import _ValidatorType -from attrs import _ValidatorArgType - -_T = TypeVar("_T") -_T1 = TypeVar("_T1") -_T2 = TypeVar("_T2") -_T3 = TypeVar("_T3") -_T4 = TypeVar("_T4") -_T5 = TypeVar("_T5") -_T6 = TypeVar("_T6") -_I = TypeVar("_I", bound=Iterable) -_K = TypeVar("_K") -_V = TypeVar("_V") -_M = TypeVar("_M", bound=Mapping) - -def set_disabled(run: bool) -> None: ... -def get_disabled() -> bool: ... -def disabled() -> ContextManager[None]: ... - -# To be more precise on instance_of use some overloads. -# If there are more than 3 items in the tuple then we fall back to Any -@overload -def instance_of(type: type[_T]) -> _ValidatorType[_T]: ... -@overload -def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ... -@overload -def instance_of( - type: tuple[type[_T1], type[_T2]], -) -> _ValidatorType[_T1 | _T2]: ... -@overload -def instance_of( - type: tuple[type[_T1], type[_T2], type[_T3]], -) -> _ValidatorType[_T1 | _T2 | _T3]: ... -@overload -def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ... -@overload -def instance_of(type: UnionType) -> _ValidatorType[Any]: ... -def optional( - validator: ( - _ValidatorType[_T] - | list[_ValidatorType[_T]] - | tuple[_ValidatorType[_T]] - ), -) -> _ValidatorType[_T | None]: ... -def in_(options: Container[_T]) -> _ValidatorType[_T]: ... -def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... -def matches_re( - regex: Pattern[AnyStr] | AnyStr, - flags: int = ..., - func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ..., -) -> _ValidatorType[AnyStr]: ... -def deep_iterable( - member_validator: _ValidatorArgType[_T], - iterable_validator: _ValidatorArgType[_I] | None = ..., -) -> _ValidatorType[_I]: ... -@overload -def deep_mapping( - key_validator: _ValidatorArgType[_K], - value_validator: _ValidatorArgType[_V] | None = ..., - mapping_validator: _ValidatorArgType[_M] | None = ..., -) -> _ValidatorType[_M]: ... -@overload -def deep_mapping( - key_validator: _ValidatorArgType[_K] | None = ..., - value_validator: _ValidatorArgType[_V] = ..., - mapping_validator: _ValidatorArgType[_M] | None = ..., -) -> _ValidatorType[_M]: ... -def is_callable() -> _ValidatorType[_T]: ... -def lt(val: _T) -> _ValidatorType[_T]: ... -def le(val: _T) -> _ValidatorType[_T]: ... -def ge(val: _T) -> _ValidatorType[_T]: ... -def gt(val: _T) -> _ValidatorType[_T]: ... -def max_len(length: int) -> _ValidatorType[_T]: ... -def min_len(length: int) -> _ValidatorType[_T]: ... -def not_( - validator: _ValidatorType[_T], - *, - msg: str | None = None, - exc_types: type[Exception] | Iterable[type[Exception]] = ..., -) -> _ValidatorType[_T]: ... -@overload -def or_( - __v1: _ValidatorType[_T1], - __v2: _ValidatorType[_T2], -) -> _ValidatorType[_T1 | _T2]: ... -@overload -def or_( - __v1: _ValidatorType[_T1], - __v2: _ValidatorType[_T2], - __v3: _ValidatorType[_T3], -) -> _ValidatorType[_T1 | _T2 | _T3]: ... -@overload -def or_( - __v1: _ValidatorType[_T1], - __v2: _ValidatorType[_T2], - __v3: _ValidatorType[_T3], - __v4: _ValidatorType[_T4], -) -> _ValidatorType[_T1 | _T2 | _T3 | _T4]: ... -@overload -def or_( - __v1: _ValidatorType[_T1], - __v2: _ValidatorType[_T2], - __v3: _ValidatorType[_T3], - __v4: _ValidatorType[_T4], - __v5: _ValidatorType[_T5], -) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5]: ... -@overload -def or_( - __v1: _ValidatorType[_T1], - __v2: _ValidatorType[_T2], - __v3: _ValidatorType[_T3], - __v4: _ValidatorType[_T4], - __v5: _ValidatorType[_T5], - __v6: _ValidatorType[_T6], -) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5 | _T6]: ... -@overload -def or_( - __v1: _ValidatorType[Any], - __v2: _ValidatorType[Any], - __v3: _ValidatorType[Any], - __v4: _ValidatorType[Any], - __v5: _ValidatorType[Any], - __v6: _ValidatorType[Any], - *validators: _ValidatorType[Any], -) -> _ValidatorType[Any]: ... diff --git a/venv/Lib/site-packages/attrs-25.4.0.dist-info/INSTALLER b/venv/Lib/site-packages/attrs-25.4.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/attrs-25.4.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/attrs-25.4.0.dist-info/METADATA b/venv/Lib/site-packages/attrs-25.4.0.dist-info/METADATA deleted file mode 100644 index 51128bb..0000000 --- a/venv/Lib/site-packages/attrs-25.4.0.dist-info/METADATA +++ /dev/null @@ -1,235 +0,0 @@ -Metadata-Version: 2.4 -Name: attrs -Version: 25.4.0 -Summary: Classes Without Boilerplate -Project-URL: Documentation, https://www.attrs.org/ -Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html -Project-URL: GitHub, https://github.com/python-attrs/attrs -Project-URL: Funding, https://github.com/sponsors/hynek -Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi -Author-email: Hynek Schlawack -License-Expression: MIT -License-File: LICENSE -Keywords: attribute,boilerplate,class -Classifier: Development Status :: 5 - Production/Stable -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Typing :: Typed -Requires-Python: >=3.9 -Description-Content-Type: text/markdown - -

    - - attrs - -

    - - -*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). -Trusted by NASA for [Mars missions since 2020](https://github.com/readme/featured/nasa-ingenuity-helicopter)! - -Its main goal is to help you to write **concise** and **correct** software without slowing down your code. - - -## Sponsors - -*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). -Especially those generously supporting us at the *The Organization* tier and higher: - - - -

    - - - - - - - - - -

    - - - -

    - Please consider joining them to help make attrs’s maintenance more sustainable! -

    - - - -## Example - -*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: - - - -```pycon ->>> from attrs import asdict, define, make_class, Factory - ->>> @define -... class SomeClass: -... a_number: int = 42 -... list_of_numbers: list[int] = Factory(list) -... -... def hard_math(self, another_number): -... return self.a_number + sum(self.list_of_numbers) * another_number - - ->>> sc = SomeClass(1, [1, 2, 3]) ->>> sc -SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) - ->>> sc.hard_math(3) -19 ->>> sc == SomeClass(1, [1, 2, 3]) -True ->>> sc != SomeClass(2, [3, 2, 1]) -True - ->>> asdict(sc) -{'a_number': 1, 'list_of_numbers': [1, 2, 3]} - ->>> SomeClass() -SomeClass(a_number=42, list_of_numbers=[]) - ->>> C = make_class("C", ["a", "b"]) ->>> C("foo", "bar") -C(a='foo', b='bar') -``` - -After *declaring* your attributes, *attrs* gives you: - -- a concise and explicit overview of the class's attributes, -- a nice human-readable `__repr__`, -- equality-checking methods, -- an initializer, -- and much more, - -*without* writing dull boilerplate code again and again and *without* runtime performance penalties. - ---- - -This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. -The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. - -Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation! - - -### Hate Type Annotations!? - -No problem! -Types are entirely **optional** with *attrs*. -Simply assign `attrs.field()` to the attributes instead of annotating them with types: - -```python -from attrs import define, field - -@define -class SomeClass: - a_number = field(default=42) - list_of_numbers = field(factory=list) -``` - - -## Data Classes - -On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). -In practice it does a lot more and is more flexible. -For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger. - -For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice. - - -## Project Information - -- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) -- [**Documentation**](https://www.attrs.org/) -- [**PyPI**](https://pypi.org/project/attrs/) -- [**Source Code**](https://github.com/python-attrs/attrs) -- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) -- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) -- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs) - - -### *attrs* for Enterprise - -Available as part of the [Tidelift Subscription](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek). - -The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. -Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. - -## Release Information - -### Backwards-incompatible Changes - -- Class-level `kw_only=True` behavior is now consistent with `dataclasses`. - - Previously, a class that sets `kw_only=True` makes all attributes keyword-only, including those from base classes. - If an attribute sets `kw_only=False`, that setting is ignored, and it is still made keyword-only. - - Now, only the attributes defined in that class that doesn't explicitly set `kw_only=False` are made keyword-only. - - This shouldn't be a problem for most users, unless you have a pattern like this: - - ```python - @attrs.define(kw_only=True) - class Base: - a: int - b: int = attrs.field(default=1, kw_only=False) - - @attrs.define - class Subclass(Base): - c: int - ``` - - Here, we have a `kw_only=True` *attrs* class (`Base`) with an attribute that sets `kw_only=False` and has a default (`Base.b`), and then create a subclass (`Subclass`) with required arguments (`Subclass.c`). - Previously this would work, since it would make `Base.b` keyword-only, but now this fails since `Base.b` is positional, and we have a required positional argument (`Subclass.c`) following another argument with defaults. - [#1457](https://github.com/python-attrs/attrs/issues/1457) - - -### Changes - -- Values passed to the `__init__()` method of `attrs` classes are now correctly passed to `__attrs_pre_init__()` instead of their default values (in cases where *kw_only* was not specified). - [#1427](https://github.com/python-attrs/attrs/issues/1427) -- Added support for Python 3.14 and [PEP 749](https://peps.python.org/pep-0749/). - [#1446](https://github.com/python-attrs/attrs/issues/1446), - [#1451](https://github.com/python-attrs/attrs/issues/1451) -- `attrs.validators.deep_mapping()` now allows to leave out either *key_validator* xor *value_validator*. - [#1448](https://github.com/python-attrs/attrs/issues/1448) -- `attrs.validators.deep_iterator()` and `attrs.validators.deep_mapping()` now accept lists and tuples for all validators and wrap them into a `attrs.validators.and_()`. - [#1449](https://github.com/python-attrs/attrs/issues/1449) -- Added a new **experimental** way to inspect classes: - - `attrs.inspect(cls)` returns the _effective_ class-wide parameters that were used by *attrs* to construct the class. - - The returned class is the same data structure that *attrs* uses internally to decide how to construct the final class. - [#1454](https://github.com/python-attrs/attrs/issues/1454) -- Fixed annotations for `attrs.field(converter=...)`. - Previously, a `tuple` of converters was only accepted if it had exactly one element. - [#1461](https://github.com/python-attrs/attrs/issues/1461) -- The performance of `attrs.asdict()` has been improved by 45–260%. - [#1463](https://github.com/python-attrs/attrs/issues/1463) -- The performance of `attrs.astuple()` has been improved by 49–270%. - [#1469](https://github.com/python-attrs/attrs/issues/1469) -- The type annotation for `attrs.validators.or_()` now allows for different types of validators. - - This was only an issue on Pyright. - [#1474](https://github.com/python-attrs/attrs/issues/1474) - - - ---- - -[Full changelog →](https://www.attrs.org/en/stable/changelog.html) diff --git a/venv/Lib/site-packages/attrs-25.4.0.dist-info/RECORD b/venv/Lib/site-packages/attrs-25.4.0.dist-info/RECORD deleted file mode 100644 index e77f608..0000000 --- a/venv/Lib/site-packages/attrs-25.4.0.dist-info/RECORD +++ /dev/null @@ -1,56 +0,0 @@ -attr/__init__.py,sha256=fOYIvt1eGSqQre4uCS3sJWKZ0mwAuC8UD6qba5OS9_U,2057 -attr/__init__.pyi,sha256=IZkzIjvtbRqDWGkDBIF9dd12FgDa379JYq3GHnVOvFQ,11309 -attr/__pycache__/__init__.cpython-312.pyc,, -attr/__pycache__/_cmp.cpython-312.pyc,, -attr/__pycache__/_compat.cpython-312.pyc,, -attr/__pycache__/_config.cpython-312.pyc,, -attr/__pycache__/_funcs.cpython-312.pyc,, -attr/__pycache__/_make.cpython-312.pyc,, -attr/__pycache__/_next_gen.cpython-312.pyc,, -attr/__pycache__/_version_info.cpython-312.pyc,, -attr/__pycache__/converters.cpython-312.pyc,, -attr/__pycache__/exceptions.cpython-312.pyc,, -attr/__pycache__/filters.cpython-312.pyc,, -attr/__pycache__/setters.cpython-312.pyc,, -attr/__pycache__/validators.cpython-312.pyc,, -attr/_cmp.py,sha256=3Nn1TjxllUYiX_nJoVnEkXoDk0hM1DYKj5DE7GZe4i0,4117 -attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368 -attr/_compat.py,sha256=x0g7iEUOnBVJC72zyFCgb1eKqyxS-7f2LGnNyZ_r95s,2829 -attr/_config.py,sha256=dGq3xR6fgZEF6UBt_L0T-eUHIB4i43kRmH0P28sJVw8,843 -attr/_funcs.py,sha256=Ix5IETTfz5F01F-12MF_CSFomIn2h8b67EVVz2gCtBE,16479 -attr/_make.py,sha256=NRJDGS8syg2h3YNflVNoK2FwR3CpdSZxx8M6lacwljA,104141 -attr/_next_gen.py,sha256=BQtCUlzwg2gWHTYXBQvrEYBnzBUrDvO57u0Py6UCPhc,26274 -attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 -attr/_version_info.py,sha256=w4R-FYC3NK_kMkGUWJlYP4cVAlH9HRaC-um3fcjYkHM,2222 -attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 -attr/converters.py,sha256=GlDeOzPeTFgeBBLbj9G57Ez5lAk68uhSALRYJ_exe84,3861 -attr/converters.pyi,sha256=orU2bff-VjQa2kMDyvnMQV73oJT2WRyQuw4ZR1ym1bE,643 -attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977 -attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 -attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795 -attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208 -attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -attr/setters.py,sha256=5-dcT63GQK35ONEzSgfXCkbB7pPkaR-qv15mm4PVSzQ,1617 -attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584 -attr/validators.py,sha256=1BnYGTuYvSucGEI4ju-RPNJteVzG0ZlfWpJiWoSFHQ8,21458 -attr/validators.pyi,sha256=ftmW3m4KJ3pQcIXAj-BejT7BY4ZfqrC1G-5W7XvoPds,4082 -attrs-25.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -attrs-25.4.0.dist-info/METADATA,sha256=2Rerxj7agcMRxiwdkt6lC2guqHAmkGKCH13nWWK7ZoQ,10473 -attrs-25.4.0.dist-info/RECORD,, -attrs-25.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -attrs-25.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 -attrs-25.4.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 -attrs/__init__.py,sha256=RxaAZNwYiEh-fcvHLZNpQ_DWKni73M_jxEPEftiq1Zc,1183 -attrs/__init__.pyi,sha256=2gV79g9UxJppGSM48hAZJ6h_MHb70dZoJL31ZNJeZYI,9416 -attrs/__pycache__/__init__.cpython-312.pyc,, -attrs/__pycache__/converters.cpython-312.pyc,, -attrs/__pycache__/exceptions.cpython-312.pyc,, -attrs/__pycache__/filters.cpython-312.pyc,, -attrs/__pycache__/setters.cpython-312.pyc,, -attrs/__pycache__/validators.cpython-312.pyc,, -attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76 -attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76 -attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73 -attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73 -attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76 diff --git a/venv/Lib/site-packages/attrs-25.4.0.dist-info/REQUESTED b/venv/Lib/site-packages/attrs-25.4.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/attrs-25.4.0.dist-info/WHEEL b/venv/Lib/site-packages/attrs-25.4.0.dist-info/WHEEL deleted file mode 100644 index 12228d4..0000000 --- a/venv/Lib/site-packages/attrs-25.4.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.27.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE b/venv/Lib/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE deleted file mode 100644 index 2bd6453..0000000 --- a/venv/Lib/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Hynek Schlawack and the attrs contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/Lib/site-packages/attrs/__init__.py b/venv/Lib/site-packages/attrs/__init__.py deleted file mode 100644 index dc1ce4b..0000000 --- a/venv/Lib/site-packages/attrs/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr import ( - NOTHING, - Attribute, - AttrsInstance, - Converter, - Factory, - NothingType, - _make_getattr, - assoc, - cmp_using, - define, - evolve, - field, - fields, - fields_dict, - frozen, - has, - make_class, - mutable, - resolve_types, - validate, -) -from attr._make import ClassProps -from attr._next_gen import asdict, astuple, inspect - -from . import converters, exceptions, filters, setters, validators - - -__all__ = [ - "NOTHING", - "Attribute", - "AttrsInstance", - "ClassProps", - "Converter", - "Factory", - "NothingType", - "__author__", - "__copyright__", - "__description__", - "__doc__", - "__email__", - "__license__", - "__title__", - "__url__", - "__version__", - "__version_info__", - "asdict", - "assoc", - "astuple", - "cmp_using", - "converters", - "define", - "evolve", - "exceptions", - "field", - "fields", - "fields_dict", - "filters", - "frozen", - "has", - "inspect", - "make_class", - "mutable", - "resolve_types", - "setters", - "validate", - "validators", -] - -__getattr__ = _make_getattr(__name__) diff --git a/venv/Lib/site-packages/attrs/__init__.pyi b/venv/Lib/site-packages/attrs/__init__.pyi deleted file mode 100644 index 6364bac..0000000 --- a/venv/Lib/site-packages/attrs/__init__.pyi +++ /dev/null @@ -1,314 +0,0 @@ -import sys - -from typing import ( - Any, - Callable, - Mapping, - Sequence, - overload, - TypeVar, -) - -# Because we need to type our own stuff, we have to make everything from -# attr explicitly public too. -from attr import __author__ as __author__ -from attr import __copyright__ as __copyright__ -from attr import __description__ as __description__ -from attr import __email__ as __email__ -from attr import __license__ as __license__ -from attr import __title__ as __title__ -from attr import __url__ as __url__ -from attr import __version__ as __version__ -from attr import __version_info__ as __version_info__ -from attr import assoc as assoc -from attr import Attribute as Attribute -from attr import AttrsInstance as AttrsInstance -from attr import cmp_using as cmp_using -from attr import converters as converters -from attr import Converter as Converter -from attr import evolve as evolve -from attr import exceptions as exceptions -from attr import Factory as Factory -from attr import fields as fields -from attr import fields_dict as fields_dict -from attr import filters as filters -from attr import has as has -from attr import make_class as make_class -from attr import NOTHING as NOTHING -from attr import resolve_types as resolve_types -from attr import setters as setters -from attr import validate as validate -from attr import validators as validators -from attr import attrib, asdict as asdict, astuple as astuple -from attr import NothingType as NothingType - -if sys.version_info >= (3, 11): - from typing import dataclass_transform -else: - from typing_extensions import dataclass_transform - -_T = TypeVar("_T") -_C = TypeVar("_C", bound=type) - -_EqOrderType = bool | Callable[[Any], Any] -_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] -_CallableConverterType = Callable[[Any], Any] -_ConverterType = _CallableConverterType | Converter[Any, Any] -_ReprType = Callable[[Any], str] -_ReprArgType = bool | _ReprType -_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] -_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType -_FieldTransformer = Callable[ - [type, list["Attribute[Any]"]], list["Attribute[Any]"] -] -# FIXME: in reality, if multiple validators are passed they must be in a list -# or tuple, but those are invariant and so would prevent subtypes of -# _ValidatorType from working when passed in a list or tuple. -_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]] - -@overload -def field( - *, - default: None = ..., - validator: None = ..., - repr: _ReprArgType = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - converter: None = ..., - factory: None = ..., - kw_only: bool | None = ..., - eq: bool | None = ..., - order: bool | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., - type: type | None = ..., -) -> Any: ... - -# This form catches an explicit None or no default and infers the type from the -# other arguments. -@overload -def field( - *, - default: None = ..., - validator: _ValidatorArgType[_T] | None = ..., - repr: _ReprArgType = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - converter: _ConverterType - | list[_ConverterType] - | tuple[_ConverterType, ...] - | None = ..., - factory: Callable[[], _T] | None = ..., - kw_only: bool | None = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., - type: type | None = ..., -) -> _T: ... - -# This form catches an explicit default argument. -@overload -def field( - *, - default: _T, - validator: _ValidatorArgType[_T] | None = ..., - repr: _ReprArgType = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - converter: _ConverterType - | list[_ConverterType] - | tuple[_ConverterType, ...] - | None = ..., - factory: Callable[[], _T] | None = ..., - kw_only: bool | None = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., - type: type | None = ..., -) -> _T: ... - -# This form covers type=non-Type: e.g. forward references (str), Any -@overload -def field( - *, - default: _T | None = ..., - validator: _ValidatorArgType[_T] | None = ..., - repr: _ReprArgType = ..., - hash: bool | None = ..., - init: bool = ..., - metadata: Mapping[Any, Any] | None = ..., - converter: _ConverterType - | list[_ConverterType] - | tuple[_ConverterType, ...] - | None = ..., - factory: Callable[[], _T] | None = ..., - kw_only: bool | None = ..., - eq: _EqOrderType | None = ..., - order: _EqOrderType | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - alias: str | None = ..., - type: type | None = ..., -) -> Any: ... -@overload -@dataclass_transform(field_specifiers=(attrib, field)) -def define( - maybe_cls: _C, - *, - these: dict[str, Any] | None = ..., - repr: bool = ..., - unsafe_hash: bool | None = ..., - hash: bool | None = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: bool | None = ..., - order: bool | None = ..., - auto_detect: bool = ..., - getstate_setstate: bool | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - field_transformer: _FieldTransformer | None = ..., - match_args: bool = ..., -) -> _C: ... -@overload -@dataclass_transform(field_specifiers=(attrib, field)) -def define( - maybe_cls: None = ..., - *, - these: dict[str, Any] | None = ..., - repr: bool = ..., - unsafe_hash: bool | None = ..., - hash: bool | None = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: bool | None = ..., - order: bool | None = ..., - auto_detect: bool = ..., - getstate_setstate: bool | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - field_transformer: _FieldTransformer | None = ..., - match_args: bool = ..., -) -> Callable[[_C], _C]: ... - -mutable = define - -@overload -@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) -def frozen( - maybe_cls: _C, - *, - these: dict[str, Any] | None = ..., - repr: bool = ..., - unsafe_hash: bool | None = ..., - hash: bool | None = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: bool | None = ..., - order: bool | None = ..., - auto_detect: bool = ..., - getstate_setstate: bool | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - field_transformer: _FieldTransformer | None = ..., - match_args: bool = ..., -) -> _C: ... -@overload -@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) -def frozen( - maybe_cls: None = ..., - *, - these: dict[str, Any] | None = ..., - repr: bool = ..., - unsafe_hash: bool | None = ..., - hash: bool | None = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: bool | None = ..., - order: bool | None = ..., - auto_detect: bool = ..., - getstate_setstate: bool | None = ..., - on_setattr: _OnSetAttrArgType | None = ..., - field_transformer: _FieldTransformer | None = ..., - match_args: bool = ..., -) -> Callable[[_C], _C]: ... - -class ClassProps: - # XXX: somehow when defining/using enums Mypy starts looking at our own - # (untyped) code and causes tons of errors. - Hashability: Any - KeywordOnly: Any - - is_exception: bool - is_slotted: bool - has_weakref_slot: bool - is_frozen: bool - # kw_only: ClassProps.KeywordOnly - kw_only: Any - collected_fields_by_mro: bool - added_init: bool - added_repr: bool - added_eq: bool - added_ordering: bool - # hashability: ClassProps.Hashability - hashability: Any - added_match_args: bool - added_str: bool - added_pickling: bool - on_setattr_hook: _OnSetAttrType | None - field_transformer: Callable[[Attribute[Any]], Attribute[Any]] | None - - def __init__( - self, - is_exception: bool, - is_slotted: bool, - has_weakref_slot: bool, - is_frozen: bool, - # kw_only: ClassProps.KeywordOnly - kw_only: Any, - collected_fields_by_mro: bool, - added_init: bool, - added_repr: bool, - added_eq: bool, - added_ordering: bool, - # hashability: ClassProps.Hashability - hashability: Any, - added_match_args: bool, - added_str: bool, - added_pickling: bool, - on_setattr_hook: _OnSetAttrType, - field_transformer: Callable[[Attribute[Any]], Attribute[Any]], - ) -> None: ... - @property - def is_hashable(self) -> bool: ... - -def inspect(cls: type) -> ClassProps: ... diff --git a/venv/Lib/site-packages/attrs/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/attrs/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d535453..0000000 Binary files a/venv/Lib/site-packages/attrs/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attrs/__pycache__/converters.cpython-312.pyc b/venv/Lib/site-packages/attrs/__pycache__/converters.cpython-312.pyc deleted file mode 100644 index 7a73eec..0000000 Binary files a/venv/Lib/site-packages/attrs/__pycache__/converters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 5eebcad..0000000 Binary files a/venv/Lib/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attrs/__pycache__/filters.cpython-312.pyc b/venv/Lib/site-packages/attrs/__pycache__/filters.cpython-312.pyc deleted file mode 100644 index b82fda9..0000000 Binary files a/venv/Lib/site-packages/attrs/__pycache__/filters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attrs/__pycache__/setters.cpython-312.pyc b/venv/Lib/site-packages/attrs/__pycache__/setters.cpython-312.pyc deleted file mode 100644 index 66afe8e..0000000 Binary files a/venv/Lib/site-packages/attrs/__pycache__/setters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attrs/__pycache__/validators.cpython-312.pyc b/venv/Lib/site-packages/attrs/__pycache__/validators.cpython-312.pyc deleted file mode 100644 index cce6f1b..0000000 Binary files a/venv/Lib/site-packages/attrs/__pycache__/validators.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/attrs/converters.py b/venv/Lib/site-packages/attrs/converters.py deleted file mode 100644 index 7821f6c..0000000 --- a/venv/Lib/site-packages/attrs/converters.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.converters import * # noqa: F403 diff --git a/venv/Lib/site-packages/attrs/exceptions.py b/venv/Lib/site-packages/attrs/exceptions.py deleted file mode 100644 index 3323f9d..0000000 --- a/venv/Lib/site-packages/attrs/exceptions.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.exceptions import * # noqa: F403 diff --git a/venv/Lib/site-packages/attrs/filters.py b/venv/Lib/site-packages/attrs/filters.py deleted file mode 100644 index 3080f48..0000000 --- a/venv/Lib/site-packages/attrs/filters.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.filters import * # noqa: F403 diff --git a/venv/Lib/site-packages/attrs/py.typed b/venv/Lib/site-packages/attrs/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/attrs/setters.py b/venv/Lib/site-packages/attrs/setters.py deleted file mode 100644 index f3d73bb..0000000 --- a/venv/Lib/site-packages/attrs/setters.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.setters import * # noqa: F403 diff --git a/venv/Lib/site-packages/attrs/validators.py b/venv/Lib/site-packages/attrs/validators.py deleted file mode 100644 index 037e124..0000000 --- a/venv/Lib/site-packages/attrs/validators.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.validators import * # noqa: F403 diff --git a/venv/Lib/site-packages/blinker-1.9.0.dist-info/INSTALLER b/venv/Lib/site-packages/blinker-1.9.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/blinker-1.9.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/blinker-1.9.0.dist-info/LICENSE.txt b/venv/Lib/site-packages/blinker-1.9.0.dist-info/LICENSE.txt deleted file mode 100644 index 79c9825..0000000 --- a/venv/Lib/site-packages/blinker-1.9.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright 2010 Jason Kirtland - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/blinker-1.9.0.dist-info/METADATA b/venv/Lib/site-packages/blinker-1.9.0.dist-info/METADATA deleted file mode 100644 index 6d343f5..0000000 --- a/venv/Lib/site-packages/blinker-1.9.0.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.3 -Name: blinker -Version: 1.9.0 -Summary: Fast, simple object-to-object and broadcast signaling -Author: Jason Kirtland -Maintainer-email: Pallets Ecosystem -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://blinker.readthedocs.io -Project-URL: Source, https://github.com/pallets-eco/blinker/ - -# Blinker - -Blinker provides a fast dispatching system that allows any number of -interested parties to subscribe to events, or "signals". - - -## Pallets Community Ecosystem - -> [!IMPORTANT]\ -> This project is part of the Pallets Community Ecosystem. Pallets is the open -> source organization that maintains Flask; Pallets-Eco enables community -> maintenance of related projects. If you are interested in helping maintain -> this project, please reach out on [the Pallets Discord server][discord]. -> -> [discord]: https://discord.gg/pallets - - -## Example - -Signal receivers can subscribe to specific senders or receive signals -sent by any sender. - -```pycon ->>> from blinker import signal ->>> started = signal('round-started') ->>> def each(round): -... print(f"Round {round}") -... ->>> started.connect(each) - ->>> def round_two(round): -... print("This is round two.") -... ->>> started.connect(round_two, sender=2) - ->>> for round in range(1, 4): -... started.send(round) -... -Round 1! -Round 2! -This is round two. -Round 3! -``` - diff --git a/venv/Lib/site-packages/blinker-1.9.0.dist-info/RECORD b/venv/Lib/site-packages/blinker-1.9.0.dist-info/RECORD deleted file mode 100644 index 2eba51c..0000000 --- a/venv/Lib/site-packages/blinker-1.9.0.dist-info/RECORD +++ /dev/null @@ -1,13 +0,0 @@ -blinker-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -blinker-1.9.0.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054 -blinker-1.9.0.dist-info/METADATA,sha256=uIRiM8wjjbHkCtbCyTvctU37IAZk0kEe5kxAld1dvzA,1633 -blinker-1.9.0.dist-info/RECORD,, -blinker-1.9.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -blinker-1.9.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82 -blinker/__init__.py,sha256=I2EdZqpy4LyjX17Hn1yzJGWCjeLaVaPzsMgHkLfj_cQ,317 -blinker/__pycache__/__init__.cpython-312.pyc,, -blinker/__pycache__/_utilities.cpython-312.pyc,, -blinker/__pycache__/base.cpython-312.pyc,, -blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675 -blinker/base.py,sha256=QpDuvXXcwJF49lUBcH5BiST46Rz9wSG7VW_p7N_027M,19132 -blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/Lib/site-packages/blinker-1.9.0.dist-info/REQUESTED b/venv/Lib/site-packages/blinker-1.9.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/blinker-1.9.0.dist-info/WHEEL b/venv/Lib/site-packages/blinker-1.9.0.dist-info/WHEEL deleted file mode 100644 index e3c6fee..0000000 --- a/venv/Lib/site-packages/blinker-1.9.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.10.1 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/blinker/__init__.py b/venv/Lib/site-packages/blinker/__init__.py deleted file mode 100644 index 1772fa4..0000000 --- a/venv/Lib/site-packages/blinker/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from .base import ANY -from .base import default_namespace -from .base import NamedSignal -from .base import Namespace -from .base import Signal -from .base import signal - -__all__ = [ - "ANY", - "default_namespace", - "NamedSignal", - "Namespace", - "Signal", - "signal", -] diff --git a/venv/Lib/site-packages/blinker/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/blinker/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 92ac1d3..0000000 Binary files a/venv/Lib/site-packages/blinker/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc b/venv/Lib/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc deleted file mode 100644 index 9bb6514..0000000 Binary files a/venv/Lib/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/blinker/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/blinker/__pycache__/base.cpython-312.pyc deleted file mode 100644 index c596458..0000000 Binary files a/venv/Lib/site-packages/blinker/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/blinker/_utilities.py b/venv/Lib/site-packages/blinker/_utilities.py deleted file mode 100644 index 000c902..0000000 --- a/venv/Lib/site-packages/blinker/_utilities.py +++ /dev/null @@ -1,64 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import inspect -import typing as t -from weakref import ref -from weakref import WeakMethod - -T = t.TypeVar("T") - - -class Symbol: - """A constant symbol, nicer than ``object()``. Repeated calls return the - same instance. - - >>> Symbol('foo') is Symbol('foo') - True - >>> Symbol('foo') - foo - """ - - symbols: t.ClassVar[dict[str, Symbol]] = {} - - def __new__(cls, name: str) -> Symbol: - if name in cls.symbols: - return cls.symbols[name] - - obj = super().__new__(cls) - cls.symbols[name] = obj - return obj - - def __init__(self, name: str) -> None: - self.name = name - - def __repr__(self) -> str: - return self.name - - def __getnewargs__(self) -> tuple[t.Any, ...]: - return (self.name,) - - -def make_id(obj: object) -> c.Hashable: - """Get a stable identifier for a receiver or sender, to be used as a dict - key or in a set. - """ - if inspect.ismethod(obj): - # The id of a bound method is not stable, but the id of the unbound - # function and instance are. - return id(obj.__func__), id(obj.__self__) - - if isinstance(obj, (str, int)): - # Instances with the same value always compare equal and have the same - # hash, even if the id may change. - return obj - - # Assume other types are not hashable but will always be the same instance. - return id(obj) - - -def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]: - if inspect.ismethod(obj): - return WeakMethod(obj, callback) # type: ignore[arg-type, return-value] - - return ref(obj, callback) diff --git a/venv/Lib/site-packages/blinker/base.py b/venv/Lib/site-packages/blinker/base.py deleted file mode 100644 index d051b94..0000000 --- a/venv/Lib/site-packages/blinker/base.py +++ /dev/null @@ -1,512 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import sys -import typing as t -import weakref -from collections import defaultdict -from contextlib import contextmanager -from functools import cached_property -from inspect import iscoroutinefunction - -from ._utilities import make_id -from ._utilities import make_ref -from ._utilities import Symbol - -F = t.TypeVar("F", bound=c.Callable[..., t.Any]) - -ANY = Symbol("ANY") -"""Symbol for "any sender".""" - -ANY_ID = 0 - - -class Signal: - """A notification emitter. - - :param doc: The docstring for the signal. - """ - - ANY = ANY - """An alias for the :data:`~blinker.ANY` sender symbol.""" - - set_class: type[set[t.Any]] = set - """The set class to use for tracking connected receivers and senders. - Python's ``set`` is unordered. If receivers must be dispatched in the order - they were connected, an ordered set implementation can be used. - - .. versionadded:: 1.7 - """ - - @cached_property - def receiver_connected(self) -> Signal: - """Emitted at the end of each :meth:`connect` call. - - The signal sender is the signal instance, and the :meth:`connect` - arguments are passed through: ``receiver``, ``sender``, and ``weak``. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver connects.") - - @cached_property - def receiver_disconnected(self) -> Signal: - """Emitted at the end of each :meth:`disconnect` call. - - The sender is the signal instance, and the :meth:`disconnect` arguments - are passed through: ``receiver`` and ``sender``. - - This signal is emitted **only** when :meth:`disconnect` is called - explicitly. This signal cannot be emitted by an automatic disconnect - when a weakly referenced receiver or sender goes out of scope, as the - instance is no longer be available to be used as the sender for this - signal. - - An alternative approach is available by subscribing to - :attr:`receiver_connected` and setting up a custom weakref cleanup - callback on weak receivers and senders. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver disconnects.") - - def __init__(self, doc: str | None = None) -> None: - if doc: - self.__doc__ = doc - - self.receivers: dict[ - t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any] - ] = {} - """The map of connected receivers. Useful to quickly check if any - receivers are connected to the signal: ``if s.receivers:``. The - structure and data is not part of the public API, but checking its - boolean value is. - """ - - self.is_muted: bool = False - self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {} - - def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F: - """Connect ``receiver`` to be called when the signal is sent by - ``sender``. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends. - """ - receiver_id = make_id(receiver) - sender_id = ANY_ID if sender is ANY else make_id(sender) - - if weak: - self.receivers[receiver_id] = make_ref( - receiver, self._make_cleanup_receiver(receiver_id) - ) - else: - self.receivers[receiver_id] = receiver - - self._by_sender[sender_id].add(receiver_id) - self._by_receiver[receiver_id].add(sender_id) - - if sender is not ANY and sender_id not in self._weak_senders: - # store a cleanup for weakref-able senders - try: - self._weak_senders[sender_id] = make_ref( - sender, self._make_cleanup_sender(sender_id) - ) - except TypeError: - pass - - if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers: - try: - self.receiver_connected.send( - self, receiver=receiver, sender=sender, weak=weak - ) - except TypeError: - # TODO no explanation or test for this - self.disconnect(receiver, sender) - raise - - return receiver - - def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]: - """Connect the decorated function to be called when the signal is sent - by ``sender``. - - The decorated function will be called when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument along - with any extra keyword arguments. - - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends.= - - .. versionadded:: 1.1 - """ - - def decorator(fn: F) -> F: - self.connect(fn, sender, weak) - return fn - - return decorator - - @contextmanager - def connected_to( - self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY - ) -> c.Generator[None, None, None]: - """A context manager that temporarily connects ``receiver`` to the - signal while a ``with`` block executes. When the block exits, the - receiver is disconnected. Useful for tests. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. - - .. versionadded:: 1.1 - """ - self.connect(receiver, sender=sender, weak=False) - - try: - yield None - finally: - self.disconnect(receiver) - - @contextmanager - def muted(self) -> c.Generator[None, None, None]: - """A context manager that temporarily disables the signal. No receivers - will be called if the signal is sent, until the ``with`` block exits. - Useful for tests. - """ - self.is_muted = True - - try: - yield None - finally: - self.is_muted = False - - def send( - self, - sender: t.Any | None = None, - /, - *, - _async_wrapper: c.Callable[ - [c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Call all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _async_wrapper: Will be called on any receivers that are async - coroutines to turn them into sync callables. For example, could run - the receiver with an event loop. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionchanged:: 1.7 - Added the ``_async_wrapper`` argument. - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if iscoroutinefunction(receiver): - if _async_wrapper is None: - raise RuntimeError("Cannot send to a coroutine function.") - - result = _async_wrapper(receiver)(sender, **kwargs) - else: - result = receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - async def send_async( - self, - sender: t.Any | None = None, - /, - *, - _sync_wrapper: c.Callable[ - [c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Await all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _sync_wrapper: Will be called on any receivers that are sync - callables to turn them into async coroutines. For example, - could call the receiver in a thread. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionadded:: 1.7 - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if not iscoroutinefunction(receiver): - if _sync_wrapper is None: - raise RuntimeError("Cannot send to a non-coroutine function.") - - result = await _sync_wrapper(receiver)(sender, **kwargs) - else: - result = await receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - def has_receivers_for(self, sender: t.Any) -> bool: - """Check if there is at least one receiver that will be called with the - given ``sender``. A receiver connected to :data:`ANY` will always be - called, regardless of sender. Does not check if weakly referenced - receivers are still live. See :meth:`receivers_for` for a stronger - search. - - :param sender: Check for receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - if not self.receivers: - return False - - if self._by_sender[ANY_ID]: - return True - - if sender is ANY: - return False - - return make_id(sender) in self._by_sender - - def receivers_for( - self, sender: t.Any - ) -> c.Generator[c.Callable[..., t.Any], None, None]: - """Yield each receiver to be called for ``sender``, in addition to those - to be called for :data:`ANY`. Weakly referenced receivers that are not - live will be disconnected and skipped. - - :param sender: Yield receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - # TODO: test receivers_for(ANY) - if not self.receivers: - return - - sender_id = make_id(sender) - - if sender_id in self._by_sender: - ids = self._by_sender[ANY_ID] | self._by_sender[sender_id] - else: - ids = self._by_sender[ANY_ID].copy() - - for receiver_id in ids: - receiver = self.receivers.get(receiver_id) - - if receiver is None: - continue - - if isinstance(receiver, weakref.ref): - strong = receiver() - - if strong is None: - self._disconnect(receiver_id, ANY_ID) - continue - - yield strong - else: - yield receiver - - def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None: - """Disconnect ``receiver`` from being called when the signal is sent by - ``sender``. - - :param receiver: A connected receiver callable. - :param sender: Disconnect from only this sender. By default, disconnect - from all senders. - """ - sender_id: c.Hashable - - if sender is ANY: - sender_id = ANY_ID - else: - sender_id = make_id(sender) - - receiver_id = make_id(receiver) - self._disconnect(receiver_id, sender_id) - - if ( - "receiver_disconnected" in self.__dict__ - and self.receiver_disconnected.receivers - ): - self.receiver_disconnected.send(self, receiver=receiver, sender=sender) - - def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None: - if sender_id == ANY_ID: - if self._by_receiver.pop(receiver_id, None) is not None: - for bucket in self._by_sender.values(): - bucket.discard(receiver_id) - - self.receivers.pop(receiver_id, None) - else: - self._by_sender[sender_id].discard(receiver_id) - self._by_receiver[receiver_id].discard(sender_id) - - def _make_cleanup_receiver( - self, receiver_id: c.Hashable - ) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]: - """Create a callback function to disconnect a weakly referenced - receiver when it is garbage collected. - """ - - def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None: - # If the interpreter is shutting down, disconnecting can result in a - # weird ignored exception. Don't call it in that case. - if not sys.is_finalizing(): - self._disconnect(receiver_id, ANY_ID) - - return cleanup - - def _make_cleanup_sender( - self, sender_id: c.Hashable - ) -> c.Callable[[weakref.ref[t.Any]], None]: - """Create a callback function to disconnect all receivers for a weakly - referenced sender when it is garbage collected. - """ - assert sender_id != ANY_ID - - def cleanup(ref: weakref.ref[t.Any]) -> None: - self._weak_senders.pop(sender_id, None) - - for receiver_id in self._by_sender.pop(sender_id, ()): - self._by_receiver[receiver_id].discard(sender_id) - - return cleanup - - def _cleanup_bookkeeping(self) -> None: - """Prune unused sender/receiver bookkeeping. Not threadsafe. - - Connecting & disconnecting leaves behind a small amount of bookkeeping - data. Typical workloads using Blinker, for example in most web apps, - Flask, CLI scripts, etc., are not adversely affected by this - bookkeeping. - - With a long-running process performing dynamic signal routing with high - volume, e.g. connecting to function closures, senders are all unique - object instances. Doing all of this over and over may cause memory usage - to grow due to extraneous bookkeeping. (An empty ``set`` for each stale - sender/receiver pair.) - - This method will prune that bookkeeping away, with the caveat that such - pruning is not threadsafe. The risk is that cleanup of a fully - disconnected receiver/sender pair occurs while another thread is - connecting that same pair. If you are in the highly dynamic, unique - receiver/sender situation that has lead you to this method, that failure - mode is perhaps not a big deal for you. - """ - for mapping in (self._by_sender, self._by_receiver): - for ident, bucket in list(mapping.items()): - if not bucket: - mapping.pop(ident, None) - - def _clear_state(self) -> None: - """Disconnect all receivers and senders. Useful for tests.""" - self._weak_senders.clear() - self.receivers.clear() - self._by_sender.clear() - self._by_receiver.clear() - - -class NamedSignal(Signal): - """A named generic notification emitter. The name is not used by the signal - itself, but matches the key in the :class:`Namespace` that it belongs to. - - :param name: The name of the signal within the namespace. - :param doc: The docstring for the signal. - """ - - def __init__(self, name: str, doc: str | None = None) -> None: - super().__init__(doc) - - #: The name of this signal. - self.name: str = name - - def __repr__(self) -> str: - base = super().__repr__() - return f"{base[:-1]}; {self.name!r}>" # noqa: E702 - - -class Namespace(dict[str, NamedSignal]): - """A dict mapping names to signals.""" - - def signal(self, name: str, doc: str | None = None) -> NamedSignal: - """Return the :class:`NamedSignal` for the given ``name``, creating it - if required. Repeated calls with the same name return the same signal. - - :param name: The name of the signal. - :param doc: The docstring of the signal. - """ - if name not in self: - self[name] = NamedSignal(name, doc) - - return self[name] - - -class _PNamespaceSignal(t.Protocol): - def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ... - - -default_namespace: Namespace = Namespace() -"""A default :class:`Namespace` for creating named signals. :func:`signal` -creates a :class:`NamedSignal` in this namespace. -""" - -signal: _PNamespaceSignal = default_namespace.signal -"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given -``name``, creating it if required. Repeated calls with the same name return the -same signal. -""" diff --git a/venv/Lib/site-packages/blinker/py.typed b/venv/Lib/site-packages/blinker/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/INSTALLER b/venv/Lib/site-packages/boto3-1.42.25.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/boto3-1.42.25.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/LICENSE b/venv/Lib/site-packages/boto3-1.42.25.dist-info/LICENSE deleted file mode 100644 index f433b1a..0000000 --- a/venv/Lib/site-packages/boto3-1.42.25.dist-info/LICENSE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/METADATA b/venv/Lib/site-packages/boto3-1.42.25.dist-info/METADATA deleted file mode 100644 index aff24be..0000000 --- a/venv/Lib/site-packages/boto3-1.42.25.dist-info/METADATA +++ /dev/null @@ -1,186 +0,0 @@ -Metadata-Version: 2.1 -Name: boto3 -Version: 1.42.25 -Summary: The AWS SDK for Python -Home-page: https://github.com/boto/boto3 -Author: Amazon Web Services -License: Apache-2.0 -Project-URL: Documentation, https://boto3.amazonaws.com/v1/documentation/api/latest/index.html -Project-URL: Source, https://github.com/boto/boto3 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Requires-Python: >= 3.9 -License-File: LICENSE -License-File: NOTICE -Requires-Dist: botocore (<1.43.0,>=1.42.25) -Requires-Dist: jmespath (<2.0.0,>=0.7.1) -Requires-Dist: s3transfer (<0.17.0,>=0.16.0) -Provides-Extra: crt -Requires-Dist: botocore[crt] (<2.0a0,>=1.21.0) ; extra == 'crt' - -=============================== -Boto3 - The AWS SDK for Python -=============================== - -|Version| |Python| |License| - -Boto3 is the Amazon Web Services (AWS) Software Development Kit (SDK) for -Python, which allows Python developers to write software that makes use -of services like Amazon S3 and Amazon EC2. You can find the latest, most -up to date, documentation at our `doc site`_, including a list of -services that are supported. - -Boto3 is maintained and published by `Amazon Web Services`_. - -Boto (pronounced boh-toh) was named after the fresh water dolphin native to the Amazon river. The name was chosen by the author of the original Boto library, Mitch Garnaat, as a reference to the company. - -Notices -------- - -On 2026-04-29, support for Python 3.9 will end for Boto3. This follows the -Python Software Foundation `end of support `__ -for the runtime which occurred on 2025-10-31. - -On 2025-04-22, support for Python 3.8 ended for Boto3. This follows the -Python Software Foundation `end of support `__ -for the runtime which occurred on 2024-10-07. - -For more information on deprecations, see this -`blog post `__. - -.. _boto: https://docs.pythonboto.org/ -.. _`doc site`: https://boto3.amazonaws.com/v1/documentation/api/latest/index.html -.. _`Amazon Web Services`: https://aws.amazon.com/what-is-aws/ -.. |Python| image:: https://img.shields.io/pypi/pyversions/boto3.svg?style=flat - :target: https://pypi.python.org/pypi/boto3/ - :alt: Python Versions -.. |Version| image:: http://img.shields.io/pypi/v/boto3.svg?style=flat - :target: https://pypi.python.org/pypi/boto3/ - :alt: Package Version -.. |License| image:: http://img.shields.io/pypi/l/boto3.svg?style=flat - :target: https://github.com/boto/boto3/blob/develop/LICENSE - :alt: License - -Getting Started ---------------- -Assuming that you have a supported version of Python installed, you can first -set up your environment with: - -.. code-block:: sh - - $ python -m venv .venv - ... - $ . .venv/bin/activate - -Then, you can install boto3 from PyPI with: - -.. code-block:: sh - - $ python -m pip install boto3 - -or install from source with: - -.. code-block:: sh - - $ git clone https://github.com/boto/boto3.git - $ cd boto3 - $ python -m pip install -r requirements.txt - $ python -m pip install -e . - - -Using Boto3 -~~~~~~~~~~~~~~ -After installing boto3 - -Next, set up credentials (in e.g. ``~/.aws/credentials``): - -.. code-block:: ini - - [default] - aws_access_key_id = YOUR_KEY - aws_secret_access_key = YOUR_SECRET - -Then, set up a default region (in e.g. ``~/.aws/config``): - -.. code-block:: ini - - [default] - region = us-east-1 - -Other credential configuration methods can be found `here `__ - -Then, from a Python interpreter: - -.. code-block:: python - - >>> import boto3 - >>> s3 = boto3.resource('s3') - >>> for bucket in s3.buckets.all(): - print(bucket.name) - -Running Tests -~~~~~~~~~~~~~ -You can run tests in all supported Python versions using ``tox``. By default, -it will run all of the unit and functional tests, but you can also specify your own -``pytest`` options. Note that this requires that you have all supported -versions of Python installed, otherwise you must pass ``-e`` or run the -``pytest`` command directly: - -.. code-block:: sh - - $ tox - $ tox -- unit/test_session.py - $ tox -e py26,py33 -- integration/ - -You can also run individual tests with your default Python version: - -.. code-block:: sh - - $ pytest tests/unit - - -Getting Help ------------- - -We use GitHub issues for tracking bugs and feature requests and have limited -bandwidth to address them. Please use these community resources for getting -help: - -* Ask a question on `Stack Overflow `__ and tag it with `boto3 `__ -* Open a support ticket with `AWS Support `__ -* If it turns out that you may have found a bug, please `open an issue `__ - - -Contributing ------------- - -We value feedback and contributions from our community. Whether it's a bug report, new feature, correction, or additional documentation, we welcome your issues and pull requests. Please read through this `CONTRIBUTING `__ document before submitting any issues or pull requests to ensure we have all the necessary information to effectively respond to your contribution. - - -Maintenance and Support for SDK Major Versions ----------------------------------------------- - -Boto3 was made generally available on 06/22/2015 and is currently in the full support phase of the availability life cycle. - -For information about maintenance and support for SDK major versions and their underlying dependencies, see the following in the AWS SDKs and Tools Shared Configuration and Credentials Reference Guide: - -* `AWS SDKs and Tools Maintenance Policy `__ -* `AWS SDKs and Tools Version Support Matrix `__ - - -More Resources --------------- - -* `NOTICE `__ -* `Changelog `__ -* `License `__ diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/NOTICE b/venv/Lib/site-packages/boto3-1.42.25.dist-info/NOTICE deleted file mode 100644 index eff609f..0000000 --- a/venv/Lib/site-packages/boto3-1.42.25.dist-info/NOTICE +++ /dev/null @@ -1,2 +0,0 @@ -boto3 -Copyright 2013-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/RECORD b/venv/Lib/site-packages/boto3-1.42.25.dist-info/RECORD deleted file mode 100644 index b8ec88e..0000000 --- a/venv/Lib/site-packages/boto3-1.42.25.dist-info/RECORD +++ /dev/null @@ -1,103 +0,0 @@ -boto3-1.42.25.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -boto3-1.42.25.dist-info/LICENSE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 -boto3-1.42.25.dist-info/METADATA,sha256=h5FbmZzSbwljT4Ck1c5Y_lOhR-G3CXY5vhcH5NC4tL4,6813 -boto3-1.42.25.dist-info/NOTICE,sha256=BPseYUhKeBDxugm7QrwByljJrzOSfXxaIVVuTE0cf6Q,83 -boto3-1.42.25.dist-info/RECORD,, -boto3-1.42.25.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -boto3-1.42.25.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91 -boto3-1.42.25.dist-info/top_level.txt,sha256=MP6_SI1GcPseXodd3Ykt5F_mCBsrUksiziLxjEZKGUU,6 -boto3/__init__.py,sha256=N6D46TJ_IieuuEgeI5shb_y8aNWAOQ0wZlhRE8foSGQ,3367 -boto3/__pycache__/__init__.cpython-312.pyc,, -boto3/__pycache__/compat.cpython-312.pyc,, -boto3/__pycache__/crt.cpython-312.pyc,, -boto3/__pycache__/exceptions.cpython-312.pyc,, -boto3/__pycache__/session.cpython-312.pyc,, -boto3/__pycache__/utils.cpython-312.pyc,, -boto3/compat.py,sha256=YlQVXOvdJN39m13gS8WfVJqHbzCw3fElnbYrt1beNYg,3202 -boto3/crt.py,sha256=x_jZj-QMI5ZD0D2tu91ggqnsGp4pIYEHVHWqVlfckcw,7254 -boto3/data/cloudformation/2010-05-15/resources-1.json,sha256=5mFVKJVtbVoHyPdHSyNfZ5mpkgCAws5PhnveSu4qzdI,5110 -boto3/data/cloudwatch/2010-08-01/resources-1.json,sha256=q4AgE8F4pbscd-2U3NYSGAzK55zpMyOQGr83JUxbZXI,11690 -boto3/data/dynamodb/2012-08-10/resources-1.json,sha256=hBLa1Jt7bdT557U9A7UcSi8SCpONKzdbtDRTzjM1-Y0,3849 -boto3/data/ec2/2014-10-01/resources-1.json,sha256=tMG1AMYP2ksnPWY6-3l8DB-EhKsSNtAO9YHhvHqBKu0,68469 -boto3/data/ec2/2015-03-01/resources-1.json,sha256=tMG1AMYP2ksnPWY6-3l8DB-EhKsSNtAO9YHhvHqBKu0,68469 -boto3/data/ec2/2015-04-15/resources-1.json,sha256=tMG1AMYP2ksnPWY6-3l8DB-EhKsSNtAO9YHhvHqBKu0,68469 -boto3/data/ec2/2015-10-01/resources-1.json,sha256=SOfYX2c1KgvnxMO2FCdJpV42rJWNMwVhlFAXhvUPTzA,76564 -boto3/data/ec2/2016-04-01/resources-1.json,sha256=SOfYX2c1KgvnxMO2FCdJpV42rJWNMwVhlFAXhvUPTzA,76564 -boto3/data/ec2/2016-09-15/resources-1.json,sha256=SOfYX2c1KgvnxMO2FCdJpV42rJWNMwVhlFAXhvUPTzA,76564 -boto3/data/ec2/2016-11-15/resources-1.json,sha256=vx7YiL-sUvBFeo4SZ81G7Qa2Hy-y6xY4z2YlSx7_wEw,76922 -boto3/data/glacier/2012-06-01/resources-1.json,sha256=GT5qWQLGeXtrHgTDNG23Mrpyweg6O0Udgd139BuNTVs,19940 -boto3/data/iam/2010-05-08/resources-1.json,sha256=PsOT9yBqSJtluBFHCVRsg6k6Ly2VkSYODnYxSl0DVOc,50357 -boto3/data/s3/2006-03-01/resources-1.json,sha256=VeKALhMRqv7fyDHMLOM5_RzXUEuDdg_n6OIRi3sdB-o,37204 -boto3/data/sns/2010-03-31/resources-1.json,sha256=7zmKQhafgsRDu4U1yiw3NXHz-zJhHKrOmtuoYlxQP-s,9091 -boto3/data/sqs/2012-11-05/resources-1.json,sha256=LRIIr5BId3UDeuBfLn-vRiWsSZCM9_ynqdxF8uzHgy8,6545 -boto3/docs/__init__.py,sha256=QM5OK8DS8ugKJE2HfI1BY9gFRvj7p7RyGQVN1MLLZII,1844 -boto3/docs/__pycache__/__init__.cpython-312.pyc,, -boto3/docs/__pycache__/action.cpython-312.pyc,, -boto3/docs/__pycache__/attr.cpython-312.pyc,, -boto3/docs/__pycache__/base.cpython-312.pyc,, -boto3/docs/__pycache__/client.cpython-312.pyc,, -boto3/docs/__pycache__/collection.cpython-312.pyc,, -boto3/docs/__pycache__/docstring.cpython-312.pyc,, -boto3/docs/__pycache__/method.cpython-312.pyc,, -boto3/docs/__pycache__/resource.cpython-312.pyc,, -boto3/docs/__pycache__/service.cpython-312.pyc,, -boto3/docs/__pycache__/subresource.cpython-312.pyc,, -boto3/docs/__pycache__/utils.cpython-312.pyc,, -boto3/docs/__pycache__/waiter.cpython-312.pyc,, -boto3/docs/action.py,sha256=mCW9IUvZS1eStA0DrSqD1B_hZBz6YTdrQmbI5d2Jzbo,8122 -boto3/docs/attr.py,sha256=BnG3tR1KKQvvY58aeJiWQ5W5DiMnJ_9jUjmG6tDbFiU,2500 -boto3/docs/base.py,sha256=nOrQSCeUSIZPkn-I59o7CfjEthgdkpCt_rXtE9zQnXc,2103 -boto3/docs/client.py,sha256=HeNMMm0oKClpkzY1yyVT_JbSFkGF92n7Nnv2J5u3bJg,1003 -boto3/docs/collection.py,sha256=l8x2qW1HHnQsRDbR0yeUnaOGbgo2oAqxDyhyrbf5bes,11296 -boto3/docs/docstring.py,sha256=oPugaubdAXY6aNa-kXGI51lP1xE2s4AnfTsLhibf7-E,2511 -boto3/docs/method.py,sha256=MFX6L3SzXoL8Jz1fkuDLZ-OXMMnKuIBI2kkA8-NRvNg,2725 -boto3/docs/resource.py,sha256=jsFszXfdvnCX7hVxPyARqPU7H4c5D_zfUBi4N8vybZw,15134 -boto3/docs/service.py,sha256=TZnEIT_GbUL_XtvgTplEJB65pEh48RNglrP7NPMQQdU,8544 -boto3/docs/subresource.py,sha256=WkEA4qmQbrN7Oz9ofypJOPfATXIzjwampAi2m430NbE,5766 -boto3/docs/utils.py,sha256=H0UeVvmVbYBZ6F-CVEUxVggLMBOIoA5q8y8hxBFnRKE,5436 -boto3/docs/waiter.py,sha256=EW0DF9XDtbAVzxUZj3kI20fCoTJJnF9ZjaBRrCILBws,5165 -boto3/dynamodb/__init__.py,sha256=GkSq-WxXWfVHu1SEcMrlJbzkfw9ACgF3UdCL6fPpTmY,562 -boto3/dynamodb/__pycache__/__init__.cpython-312.pyc,, -boto3/dynamodb/__pycache__/conditions.cpython-312.pyc,, -boto3/dynamodb/__pycache__/table.cpython-312.pyc,, -boto3/dynamodb/__pycache__/transform.cpython-312.pyc,, -boto3/dynamodb/__pycache__/types.cpython-312.pyc,, -boto3/dynamodb/conditions.py,sha256=AHaDXW0ri0hGMU_MevrElBl8K1Ls9Tm5NYW5UPbf7-4,15028 -boto3/dynamodb/table.py,sha256=ui8oL634pE6UdMiN6Mz50wAjRQkCF1plq9XsbUEgbWw,6340 -boto3/dynamodb/transform.py,sha256=JnW5ZzPIfxEcDszSvXKUZmp_1rw445tsddS3FG--JwA,12909 -boto3/dynamodb/types.py,sha256=ch0vIKaAYexjL42S_OJWyvjWMcb0UbNrmkKGcz76O3c,9541 -boto3/ec2/__init__.py,sha256=GkSq-WxXWfVHu1SEcMrlJbzkfw9ACgF3UdCL6fPpTmY,562 -boto3/ec2/__pycache__/__init__.cpython-312.pyc,, -boto3/ec2/__pycache__/createtags.cpython-312.pyc,, -boto3/ec2/__pycache__/deletetags.cpython-312.pyc,, -boto3/ec2/createtags.py,sha256=pUPJOYn7m0Jcch9UL-DEVGgbQHoyAemECPBhzyBx28c,1577 -boto3/ec2/deletetags.py,sha256=KaYcqSt8FFM_TW0g0pZ14qDjVnmRCPV0sMe6DprEtvo,1217 -boto3/examples/cloudfront.rst,sha256=K-sBWZxoLjABCZHrqAZs57cYefwPmDir03pm6PE_mh4,1390 -boto3/examples/s3.rst,sha256=a3mbSl7EbNbwd2GKYlP9nXrTHZItZVQRdMG3gamZtSo,5528 -boto3/exceptions.py,sha256=drkrpwrC-gQBeaoPlgq4FQFRjd4qom4JHeKhGpsGUm4,4187 -boto3/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -boto3/resources/__pycache__/__init__.cpython-312.pyc,, -boto3/resources/__pycache__/action.cpython-312.pyc,, -boto3/resources/__pycache__/base.cpython-312.pyc,, -boto3/resources/__pycache__/collection.cpython-312.pyc,, -boto3/resources/__pycache__/factory.cpython-312.pyc,, -boto3/resources/__pycache__/model.cpython-312.pyc,, -boto3/resources/__pycache__/params.cpython-312.pyc,, -boto3/resources/__pycache__/response.cpython-312.pyc,, -boto3/resources/action.py,sha256=vPfVHVgXiGqhwpgRSCC7lSsY3vGjgsSiYhXa14CMAqw,9600 -boto3/resources/base.py,sha256=c0dqK8P-9H3nHSIWmtj6eRSBvilXAdT4D0Pos2ARX-A,4929 -boto3/resources/collection.py,sha256=aVifZoUVHUarGF9S4Ih8qBUfdqKKOBAEd0BaISKaLio,19113 -boto3/resources/factory.py,sha256=T76teDpkRJfbsNejkCpRUK3k1N16LflkR5BByf2ofns,22707 -boto3/resources/model.py,sha256=RTE9rs3vKAvxNI2LMYkOfzZ8Cgy2-zpS4XXjuvd9I3g,20340 -boto3/resources/params.py,sha256=u0D2-il6JbXpUus8NDj3K9ww3047l89lF69QnxwIX5A,6107 -boto3/resources/response.py,sha256=aIATkyer_rl5qsp-OFCxe36whvY4JzjgNc9qN-vYMxg,11638 -boto3/s3/__init__.py,sha256=GkSq-WxXWfVHu1SEcMrlJbzkfw9ACgF3UdCL6fPpTmY,562 -boto3/s3/__pycache__/__init__.cpython-312.pyc,, -boto3/s3/__pycache__/constants.cpython-312.pyc,, -boto3/s3/__pycache__/inject.cpython-312.pyc,, -boto3/s3/__pycache__/transfer.cpython-312.pyc,, -boto3/s3/constants.py,sha256=L0K_cNKUKZRwVNQpBaelkI1Tc-brOCkeW-JtgK7YhJo,718 -boto3/s3/inject.py,sha256=0UZiCPfurNVJcBOvL1ZJmKEP8fR4syP0rdXuaUulKp0,30377 -boto3/s3/transfer.py,sha256=dlUHApjiqnsShxA7DJ-uWRZVsGk2hkGwsFmriJBCmgE,19010 -boto3/session.py,sha256=KD1Av7-V0DOd26H2CZI2Ab8BpkJKgYi4kmAZ2jTN96k,22136 -boto3/utils.py,sha256=8secEXlszf5Drh4V_IuhUyCSuzY3fHEtO_-Wn-RPFxo,3005 diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/REQUESTED b/venv/Lib/site-packages/boto3-1.42.25.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/WHEEL b/venv/Lib/site-packages/boto3-1.42.25.dist-info/WHEEL deleted file mode 100644 index dcfdc6e..0000000 --- a/venv/Lib/site-packages/boto3-1.42.25.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/boto3-1.42.25.dist-info/top_level.txt b/venv/Lib/site-packages/boto3-1.42.25.dist-info/top_level.txt deleted file mode 100644 index 30ddf82..0000000 --- a/venv/Lib/site-packages/boto3-1.42.25.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -boto3 diff --git a/venv/Lib/site-packages/boto3/__init__.py b/venv/Lib/site-packages/boto3/__init__.py deleted file mode 100644 index 23e2c07..0000000 --- a/venv/Lib/site-packages/boto3/__init__.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging -from logging import NullHandler - -from boto3.compat import _warn_deprecated_python -from boto3.session import Session - -__author__ = 'Amazon Web Services' -__version__ = '1.42.25' - - -# The default Boto3 session; autoloaded when needed. -DEFAULT_SESSION = None - - -def setup_default_session(**kwargs): - """ - Set up a default session, passing through any parameters to the session - constructor. There is no need to call this unless you wish to pass custom - parameters, because a default session will be created for you. - """ - global DEFAULT_SESSION - DEFAULT_SESSION = Session(**kwargs) - - -def set_stream_logger(name='boto3', level=logging.DEBUG, format_string=None): - """ - Add a stream handler for the given name and level to the logging module. - By default, this logs all boto3 messages to ``stdout``. - - >>> import boto3 - >>> boto3.set_stream_logger('boto3.resources', logging.INFO) - - For debugging purposes a good choice is to set the stream logger to ``''`` - which is equivalent to saying "log everything". - - .. WARNING:: - Be aware that when logging anything from ``'botocore'`` the full wire - trace will appear in your logs. If your payloads contain sensitive data - this should not be used in production. - - :type name: string - :param name: Log name - :type level: int - :param level: Logging level, e.g. ``logging.INFO`` - :type format_string: str - :param format_string: Log message format - """ - if format_string is None: - format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s" - - logger = logging.getLogger(name) - logger.setLevel(level) - handler = logging.StreamHandler() - handler.setLevel(level) - formatter = logging.Formatter(format_string) - handler.setFormatter(formatter) - logger.addHandler(handler) - - -def _get_default_session(): - """ - Get the default session, creating one if needed. - - :rtype: :py:class:`~boto3.session.Session` - :return: The default session - """ - if DEFAULT_SESSION is None: - setup_default_session() - _warn_deprecated_python() - - return DEFAULT_SESSION - - -def client(*args, **kwargs): - """ - Create a low-level service client by name using the default session. - - See :py:meth:`boto3.session.Session.client`. - """ - return _get_default_session().client(*args, **kwargs) - - -def resource(*args, **kwargs): - """ - Create a resource service client by name using the default session. - - See :py:meth:`boto3.session.Session.resource`. - """ - return _get_default_session().resource(*args, **kwargs) - - -# Set up do-nothing logging like a library is supposed to. -# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library -logging.getLogger('boto3').addHandler(NullHandler()) diff --git a/venv/Lib/site-packages/boto3/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/boto3/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2b3bd45..0000000 Binary files a/venv/Lib/site-packages/boto3/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/__pycache__/compat.cpython-312.pyc b/venv/Lib/site-packages/boto3/__pycache__/compat.cpython-312.pyc deleted file mode 100644 index 8660550..0000000 Binary files a/venv/Lib/site-packages/boto3/__pycache__/compat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/__pycache__/crt.cpython-312.pyc b/venv/Lib/site-packages/boto3/__pycache__/crt.cpython-312.pyc deleted file mode 100644 index b5626e9..0000000 Binary files a/venv/Lib/site-packages/boto3/__pycache__/crt.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/boto3/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index e1440f5..0000000 Binary files a/venv/Lib/site-packages/boto3/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/__pycache__/session.cpython-312.pyc b/venv/Lib/site-packages/boto3/__pycache__/session.cpython-312.pyc deleted file mode 100644 index 55517f0..0000000 Binary files a/venv/Lib/site-packages/boto3/__pycache__/session.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/boto3/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index f5b50bf..0000000 Binary files a/venv/Lib/site-packages/boto3/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/compat.py b/venv/Lib/site-packages/boto3/compat.py deleted file mode 100644 index 44a6431..0000000 --- a/venv/Lib/site-packages/boto3/compat.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import sys -import os -import errno -import socket -import warnings - -from boto3.exceptions import PythonDeprecationWarning - -from s3transfer.manager import TransferConfig - -# In python3, socket.error is OSError, which is too general -# for what we want (i.e FileNotFoundError is a subclass of OSError). -# In py3 all the socket related errors are in a newly created -# ConnectionError -SOCKET_ERROR = ConnectionError - -_APPEND_MODE_CHAR = 'a' - -import collections.abc as collections_abc - - -TRANSFER_CONFIG_SUPPORTS_CRT = hasattr(TransferConfig, 'UNSET_DEFAULT') - - -if sys.platform.startswith('win'): - def rename_file(current_filename, new_filename): - try: - os.remove(new_filename) - except OSError as e: - if not e.errno == errno.ENOENT: - # We only want to a ignore trying to remove - # a file that does not exist. If it fails - # for any other reason we should be propagating - # that exception. - raise - os.rename(current_filename, new_filename) -else: - rename_file = os.rename - - -def filter_python_deprecation_warnings(): - """ - Invoking this filter acknowledges your runtime will soon be deprecated - at which time you will stop receiving all updates to your client. - """ - warnings.filterwarnings( - 'ignore', - message=".*Boto3 will no longer support Python.*", - category=PythonDeprecationWarning, - module=r".*boto3\.compat" - ) - - -def _warn_deprecated_python(): - """Use this template for future deprecation campaigns as needed.""" - py_39_params = { - 'date': 'April 29, 2026', - 'blog_link': ( - 'https://aws.amazon.com/blogs/developer/' - 'python-support-policy-updates-for-aws-sdks-and-tools/' - ) - } - deprecated_versions = { - # Example template for future deprecations - (3, 9): py_39_params, - } - py_version = sys.version_info[:2] - - if py_version in deprecated_versions: - params = deprecated_versions[py_version] - warning = ( - "Boto3 will no longer support Python {}.{} " - "starting {}. To continue receiving service updates, " - "bug fixes, and security updates please upgrade to Python 3.10 or " - "later. More information can be found here: {}" - ).format(py_version[0], py_version[1], params['date'], params['blog_link']) - warnings.warn(warning, PythonDeprecationWarning) - - -def is_append_mode(fileobj): - return ( - hasattr(fileobj, 'mode') and - isinstance(fileobj.mode, str) and - _APPEND_MODE_CHAR in fileobj.mode - ) diff --git a/venv/Lib/site-packages/boto3/crt.py b/venv/Lib/site-packages/boto3/crt.py deleted file mode 100644 index fb30401..0000000 --- a/venv/Lib/site-packages/boto3/crt.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -""" -This file contains private functionality for interacting with the AWS -Common Runtime library (awscrt) in boto3. - -All code contained within this file is for internal usage within this -project and is not intended for external consumption. All interfaces -contained within are subject to abrupt breaking changes. -""" - -import logging -import threading - -import botocore.exceptions -from botocore.session import Session -from s3transfer.crt import ( - BotocoreCRTCredentialsWrapper, - BotocoreCRTRequestSerializer, - CRTTransferManager, - acquire_crt_s3_process_lock, - create_s3_crt_client, -) - -from boto3.compat import TRANSFER_CONFIG_SUPPORTS_CRT -from boto3.exceptions import InvalidCrtTransferConfigError -from boto3.s3.constants import CRT_TRANSFER_CLIENT - -logger = logging.getLogger(__name__) - -# Singletons for CRT-backed transfers -CRT_S3_CLIENT = None -BOTOCORE_CRT_SERIALIZER = None - -CLIENT_CREATION_LOCK = threading.Lock() -PROCESS_LOCK_NAME = 'boto3' - - -_ALLOWED_CRT_TRANSFER_CONFIG_OPTIONS = { - 'multipart_threshold', - 'max_concurrency', - 'max_request_concurrency', - 'multipart_chunksize', - 'preferred_transfer_client', -} - - -def _create_crt_client(session, config, region_name, cred_provider): - """Create a CRT S3 Client for file transfer. - - Instantiating many of these may lead to degraded performance or - system resource exhaustion. - """ - create_crt_client_kwargs = { - 'region': region_name, - 'use_ssl': True, - 'crt_credentials_provider': cred_provider, - } - return create_s3_crt_client(**create_crt_client_kwargs) - - -def _create_crt_request_serializer(session, region_name): - return BotocoreCRTRequestSerializer( - session, {'region_name': region_name, 'endpoint_url': None} - ) - - -def _create_crt_s3_client( - session, config, region_name, credentials, lock, **kwargs -): - """Create boto3 wrapper class to manage crt lock reference and S3 client.""" - cred_wrapper = BotocoreCRTCredentialsWrapper(credentials) - cred_provider = cred_wrapper.to_crt_credentials_provider() - return CRTS3Client( - _create_crt_client(session, config, region_name, cred_provider), - lock, - region_name, - cred_wrapper, - ) - - -def _initialize_crt_transfer_primatives(client, config): - lock = acquire_crt_s3_process_lock(PROCESS_LOCK_NAME) - if lock is None: - # If we're unable to acquire the lock, we cannot - # use the CRT in this process and should default to - # the classic s3transfer manager. - return None, None - - session = Session() - region_name = client.meta.region_name - credentials = client._get_credentials() - - serializer = _create_crt_request_serializer(session, region_name) - s3_client = _create_crt_s3_client( - session, config, region_name, credentials, lock - ) - return serializer, s3_client - - -def get_crt_s3_client(client, config): - global CRT_S3_CLIENT - global BOTOCORE_CRT_SERIALIZER - - with CLIENT_CREATION_LOCK: - if CRT_S3_CLIENT is None: - serializer, s3_client = _initialize_crt_transfer_primatives( - client, config - ) - BOTOCORE_CRT_SERIALIZER = serializer - CRT_S3_CLIENT = s3_client - - return CRT_S3_CLIENT - - -class CRTS3Client: - """ - This wrapper keeps track of our underlying CRT client, the lock used to - acquire it and the region we've used to instantiate the client. - - Due to limitations in the existing CRT interfaces, we can only make calls - in a single region and does not support redirects. We track the region to - ensure we don't use the CRT client when a successful request cannot be made. - """ - - def __init__(self, crt_client, process_lock, region, cred_provider): - self.crt_client = crt_client - self.process_lock = process_lock - self.region = region - self.cred_provider = cred_provider - - -def is_crt_compatible_request(client, crt_s3_client): - """ - Boto3 client must use same signing region and credentials - as the CRT_S3_CLIENT singleton. Otherwise fallback to classic. - """ - if crt_s3_client is None: - return False - - boto3_creds = client._get_credentials() - if boto3_creds is None: - return False - - is_same_identity = compare_identity( - boto3_creds.get_frozen_credentials(), crt_s3_client.cred_provider - ) - is_same_region = client.meta.region_name == crt_s3_client.region - return is_same_region and is_same_identity - - -def compare_identity(boto3_creds, crt_s3_creds): - try: - crt_creds = crt_s3_creds() - except botocore.exceptions.NoCredentialsError: - return False - - is_matching_identity = ( - boto3_creds.access_key == crt_creds.access_key_id - and boto3_creds.secret_key == crt_creds.secret_access_key - and boto3_creds.token == crt_creds.session_token - ) - return is_matching_identity - - -def _validate_crt_transfer_config(config): - if config is None: - return - # CRT client can also be configured via `AUTO_RESOLVE_TRANSFER_CLIENT` - # but it predates this validation. We only validate against CRT client - # configured via `CRT_TRANSFER_CLIENT` to preserve compatibility. - if config.preferred_transfer_client != CRT_TRANSFER_CLIENT: - return - invalid_crt_args = [] - for param in config.DEFAULTS.keys(): - val = config.get_deep_attr(param) - if ( - param not in _ALLOWED_CRT_TRANSFER_CONFIG_OPTIONS - and val is not config.UNSET_DEFAULT - ): - invalid_crt_args.append(param) - if len(invalid_crt_args) > 0: - raise InvalidCrtTransferConfigError( - "The following transfer config options are invalid " - "when preferred_transfer_client is set to crt: " - f"{', '.join(invalid_crt_args)}`" - ) - - -def create_crt_transfer_manager(client, config): - """Create a CRTTransferManager for optimized data transfer.""" - crt_s3_client = get_crt_s3_client(client, config) - if is_crt_compatible_request(client, crt_s3_client): - crt_transfer_manager_kwargs = { - 'crt_s3_client': crt_s3_client.crt_client, - 'crt_request_serializer': BOTOCORE_CRT_SERIALIZER, - } - if TRANSFER_CONFIG_SUPPORTS_CRT: - _validate_crt_transfer_config(config) - crt_transfer_manager_kwargs['config'] = config - if not TRANSFER_CONFIG_SUPPORTS_CRT and config: - logger.warning( - 'Using TransferConfig with CRT client requires ' - 's3transfer >= 0.16.0, configured values will be ignored.' - ) - return CRTTransferManager(**crt_transfer_manager_kwargs) - return None diff --git a/venv/Lib/site-packages/boto3/data/cloudformation/2010-05-15/resources-1.json b/venv/Lib/site-packages/boto3/data/cloudformation/2010-05-15/resources-1.json deleted file mode 100644 index fd43937..0000000 --- a/venv/Lib/site-packages/boto3/data/cloudformation/2010-05-15/resources-1.json +++ /dev/null @@ -1,195 +0,0 @@ -{ - "service": { - "actions": { - "CreateStack": { - "request": { "operation": "CreateStack" }, - "resource": { - "type": "Stack", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "StackName" } - ] - } - } - }, - "has": { - "Event": { - "resource": { - "type": "Event", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Stack": { - "resource": { - "type": "Stack", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - } - }, - "hasMany": { - "Stacks": { - "request": { "operation": "DescribeStacks" }, - "resource": { - "type": "Stack", - "identifiers": [ - { "target": "Name", "source": "response", "path": "Stacks[].StackName" } - ], - "path": "Stacks[]" - } - } - } - }, - "resources": { - "Event": { - "identifiers": [ - { - "name": "Id", - "memberName": "EventId" - } - ], - "shape": "StackEvent" - }, - "Stack": { - "identifiers": [ - { - "name": "Name", - "memberName": "StackName" - } - ], - "shape": "Stack", - "load": { - "request": { - "operation": "DescribeStacks", - "params": [ - { "target": "StackName", "source": "identifier", "name": "Name" } - ] - }, - "path": "Stacks[0]" - }, - "actions": { - "CancelUpdate": { - "request": { - "operation": "CancelUpdateStack", - "params": [ - { "target": "StackName", "source": "identifier", "name": "Name" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteStack", - "params": [ - { "target": "StackName", "source": "identifier", "name": "Name" } - ] - } - }, - "Update": { - "request": { - "operation": "UpdateStack", - "params": [ - { "target": "StackName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "has": { - "Resource": { - "resource": { - "type": "StackResource", - "identifiers": [ - { "target": "StackName", "source": "identifier", "name": "Name" }, - { "target": "LogicalId", "source": "input" } - ] - } - } - }, - "hasMany": { - "Events": { - "request": { - "operation": "DescribeStackEvents", - "params": [ - { "target": "StackName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Event", - "identifiers": [ - { "target": "Id", "source": "response", "path": "StackEvents[].EventId" } - ], - "path": "StackEvents[]" - } - }, - "ResourceSummaries": { - "request": { - "operation": "ListStackResources", - "params": [ - { "target": "StackName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "StackResourceSummary", - "identifiers": [ - { "target": "LogicalId", "source": "response", "path": "StackResourceSummaries[].LogicalResourceId" }, - { "target": "StackName", "source": "requestParameter", "path": "StackName" } - ], - "path": "StackResourceSummaries[]" - } - } - } - }, - "StackResource": { - "identifiers": [ - { "name": "StackName" }, - { - "name": "LogicalId", - "memberName": "LogicalResourceId" - } - ], - "shape": "StackResourceDetail", - "load": { - "request": { - "operation": "DescribeStackResource", - "params": [ - { "target": "LogicalResourceId", "source": "identifier", "name": "LogicalId" }, - { "target": "StackName", "source": "identifier", "name": "StackName" } - ] - }, - "path": "StackResourceDetail" - }, - "has": { - "Stack": { - "resource": { - "type": "Stack", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "StackName" } - ] - } - } - } - }, - "StackResourceSummary": { - "identifiers": [ - { "name": "StackName" }, - { - "name": "LogicalId", - "memberName": "LogicalResourceId" - } - ], - "shape": "StackResourceSummary", - "has": { - "Resource": { - "resource": { - "type": "StackResource", - "identifiers": [ - { "target": "LogicalId", "source": "identifier", "name": "LogicalId" }, - { "target": "StackName", "source": "identifier", "name": "StackName" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/cloudwatch/2010-08-01/resources-1.json b/venv/Lib/site-packages/boto3/data/cloudwatch/2010-08-01/resources-1.json deleted file mode 100644 index e0746d0..0000000 --- a/venv/Lib/site-packages/boto3/data/cloudwatch/2010-08-01/resources-1.json +++ /dev/null @@ -1,334 +0,0 @@ -{ - "service": { - "has": { - "Alarm": { - "resource": { - "type": "Alarm", - "identifiers": [ - { - "target": "Name", - "source": "input" - } - ] - } - }, - "Metric": { - "resource": { - "type": "Metric", - "identifiers": [ - { - "target": "Namespace", - "source": "input" - }, - { - "target": "Name", - "source": "input" - } - ] - } - } - }, - "hasMany": { - "Alarms": { - "request": { "operation": "DescribeAlarms" }, - "resource": { - "type": "Alarm", - "identifiers": [ - { - "target": "Name", - "source": "response", - "path": "MetricAlarms[].AlarmName" - } - ], - "path": "MetricAlarms[]" - } - }, - "Metrics": { - "request": { "operation": "ListMetrics" }, - "resource": { - "type": "Metric", - "identifiers": [ - { - "target": "Namespace", - "source": "response", - "path": "Metrics[].Namespace" - }, - { - "target": "Name", - "source": "response", - "path": "Metrics[].MetricName" - } - ], - "path": "Metrics[]" - } - } - } - }, - "resources": { - "Alarm": { - "identifiers": [ - { - "name": "Name", - "memberName": "AlarmName" - } - ], - "shape": "MetricAlarm", - "load": { - "request": { - "operation": "DescribeAlarms", - "params": [ - { - "target": "AlarmNames[0]", - "source": "identifier", - "name": "Name" - } - ] - }, - "path": "MetricAlarms[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteAlarms", - "params": [ - { - "target": "AlarmNames[0]", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "DescribeHistory": { - "request": { - "operation": "DescribeAlarmHistory", - "params": [ - { - "target": "AlarmName", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "DisableActions": { - "request": { - "operation": "DisableAlarmActions", - "params": [ - { - "target": "AlarmNames[0]", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "EnableActions": { - "request": { - "operation": "EnableAlarmActions", - "params": [ - { - "target": "AlarmNames[0]", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "SetState": { - "request": { - "operation": "SetAlarmState", - "params": [ - { - "target": "AlarmName", - "source": "identifier", - "name": "Name" - } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteAlarms", - "params": [ - { - "target": "AlarmNames[]", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "DisableActions": { - "request": { - "operation": "DisableAlarmActions", - "params": [ - { - "target": "AlarmNames[]", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "EnableActions": { - "request": { - "operation": "EnableAlarmActions", - "params": [ - { - "target": "AlarmNames[]", - "source": "identifier", - "name": "Name" - } - ] - } - } - }, - "has": { - "Metric": { - "resource": { - "type": "Metric", - "identifiers": [ - { - "target": "Namespace", - "source": "data", - "path": "Namespace" - }, - { - "target": "Name", - "source": "data", - "path": "MetricName" - } - ] - } - } - } - }, - "Metric": { - "identifiers": [ - { - "name": "Namespace", - "memberName": "Namespace" - }, - { - "name": "Name", - "memberName": "MetricName" - } - ], - "shape": "Metric", - "load": { - "request": { - "operation": "ListMetrics", - "params": [ - { - "target": "MetricName", - "source": "identifier", - "name": "Name" - }, - { - "target": "Namespace", - "source": "identifier", - "name": "Namespace" - } - ] - }, - "path": "Metrics[0]" - }, - "actions": { - "GetStatistics": { - "request": { - "operation": "GetMetricStatistics", - "params": [ - { - "target": "Namespace", - "source": "identifier", - "name": "Namespace" - }, - { - "target": "MetricName", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "PutAlarm": { - "request": { - "operation": "PutMetricAlarm", - "params": [ - { - "target": "Namespace", - "source": "identifier", - "name": "Namespace" - }, - { - "target": "MetricName", - "source": "identifier", - "name": "Name" - } - ] - }, - "resource": { - "type": "Alarm", - "identifiers": [ - { - "target": "Name", - "source": "requestParameter", - "path": "AlarmName" - } - ] - } - }, - "PutData": { - "request": { - "operation": "PutMetricData", - "params": [ - { - "target": "Namespace", - "source": "identifier", - "name": "Namespace" - }, - { - "target": "MetricData[].MetricName", - "source": "identifier", - "name": "Name" - } - ] - } - } - }, - "hasMany": { - "Alarms": { - "request": { - "operation": "DescribeAlarmsForMetric", - "params": [ - { - "target": "Namespace", - "source": "identifier", - "name": "Namespace" - }, - { - "target": "MetricName", - "source": "identifier", - "name": "Name" - } - ] - }, - "resource": { - "type": "Alarm", - "identifiers": [ - { - "target": "Name", - "source": "response", - "path": "MetricAlarms[].AlarmName" - } - ], - "path": "MetricAlarms[]" - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/dynamodb/2012-08-10/resources-1.json b/venv/Lib/site-packages/boto3/data/dynamodb/2012-08-10/resources-1.json deleted file mode 100644 index b79994e..0000000 --- a/venv/Lib/site-packages/boto3/data/dynamodb/2012-08-10/resources-1.json +++ /dev/null @@ -1,150 +0,0 @@ -{ - "service": { - "actions": { - "BatchGetItem": { - "request": { "operation": "BatchGetItem" } - }, - "BatchWriteItem": { - "request": { "operation": "BatchWriteItem" } - }, - "CreateTable": { - "request": { "operation": "CreateTable" }, - "resource": { - "type": "Table", - "identifiers": [ - { "target": "Name", "source": "response", "path": "TableDescription.TableName" } - ], - "path": "TableDescription" - } - } - }, - "has": { - "Table": { - "resource": { - "type": "Table", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - } - }, - "hasMany": { - "Tables": { - "request": { "operation": "ListTables" }, - "resource": { - "type": "Table", - "identifiers": [ - { "target": "Name", "source": "response", "path": "TableNames[]" } - ] - } - } - } - }, - "resources": { - "Table": { - "identifiers": [ - { - "name": "Name", - "memberName": "TableName" - } - ], - "shape": "TableDescription", - "load": { - "request": { - "operation": "DescribeTable", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - }, - "path": "Table" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTable", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - }, - "DeleteItem": { - "request": { - "operation": "DeleteItem", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - }, - "GetItem": { - "request": { - "operation": "GetItem", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - }, - "PutItem": { - "request": { - "operation": "PutItem", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - }, - "Query": { - "request": { - "operation": "Query", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - }, - "Scan": { - "request": { - "operation": "Scan", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - }, - "Update": { - "request": { - "operation": "UpdateTable", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Table", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "Name" } - ], - "path": "TableDescription" - } - }, - "UpdateItem": { - "request": { - "operation": "UpdateItem", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "waiters":{ - "Exists": { - "waiterName": "TableExists", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - }, - "NotExists": { - "waiterName": "TableNotExists", - "params": [ - { "target": "TableName", "source": "identifier", "name": "Name" } - ] - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/ec2/2014-10-01/resources-1.json b/venv/Lib/site-packages/boto3/data/ec2/2014-10-01/resources-1.json deleted file mode 100644 index 8ccf160..0000000 --- a/venv/Lib/site-packages/boto3/data/ec2/2014-10-01/resources-1.json +++ /dev/null @@ -1,2289 +0,0 @@ -{ - "service": { - "actions": { - "CreateDhcpOptions": { - "request": { "operation": "CreateDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } - ], - "path": "DhcpOptions" - } - }, - "CreateInstances": { - "request": { "operation": "RunInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateInternetGateway": { - "request": { "operation": "CreateInternetGateway" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } - ], - "path": "InternetGateway" - } - }, - "CreateKeyPair": { - "request": { "operation": "CreateKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "CreateNetworkAcl": { - "request": { "operation": "CreateNetworkAcl" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateNetworkInterface": { - "request": { "operation": "CreateNetworkInterface" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreatePlacementGroup": { - "request": { "operation": "CreatePlacementGroup" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ] - } - }, - "CreateRouteTable": { - "request": { "operation": "CreateRouteTable" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { "operation": "CreateSecurityGroup" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSnapshot": { - "request": { "operation": "CreateSnapshot" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateSubnet": { - "request": { "operation": "CreateSubnet" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { "operation": "CreateTags" }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "requestParameter", "path": "Resources[]" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "CreateVolume": { - "request": { "operation": "CreateVolume" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VolumeId" } - ], - "path": "@" - } - }, - "CreateVpc": { - "request": { "operation": "CreateVpc" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpc.VpcId" } - ], - "path": "Vpc" - } - }, - "CreateVpcPeeringConnection": { - "request": { "operation": "CreateVpcPeeringConnection" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - }, - "DisassociateRouteTable": { - "request": { "operation": "DisassociateRouteTable" } - }, - "ImportKeyPair": { - "request": { "operation": "ImportKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "RegisterImage": { - "request": { "operation": "RegisterImage" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Instance": { - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "InternetGateway": { - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "NetworkAcl": { - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "NetworkInterface": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "RouteTableAssociation": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "SecurityGroup": { - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Snapshot": { - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "VpcPeeringConnection": { - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "DhcpOptionsSets": { - "request": { "operation": "DescribeDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } - ], - "path": "DhcpOptions[]" - } - }, - "Images": { - "request": { "operation": "DescribeImages" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Images[].ImageId" } - ], - "path": "Images[]" - } - }, - "Instances": { - "request": { "operation": "DescribeInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { "operation": "DescribeInternetGateways" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "KeyPairs": { - "request": { "operation": "DescribeKeyPairs" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } - ], - "path": "KeyPairs[]" - } - }, - "NetworkAcls": { - "request": { "operation": "DescribeNetworkAcls" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { "operation": "DescribeNetworkInterfaces" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroups": { - "request": { "operation": "DescribePlacementGroups" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } - ], - "path": "PlacementGroups[]" - } - }, - "RouteTables": { - "request": { "operation": "DescribeRouteTables" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { "operation": "DescribeSecurityGroups" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Snapshots": { - "request": { "operation": "DescribeSnapshots" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - }, - "Subnets": { - "request": { "operation": "DescribeSubnets" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - }, - "Volumes": { - "request": { "operation": "DescribeVolumes" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcPeeringConnections": { - "request": { "operation": "DescribeVpcPeeringConnections" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Vpcs": { - "request": { "operation": "DescribeVpcs" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } - ], - "path": "Vpcs[]" - } - } - } - }, - "resources": { - "DhcpOptions": { - "identifiers": [ - { - "name": "Id", - "memberName": "DhcpOptionsId" - } - ], - "shape": "DhcpOptions", - "load": { - "request": { - "operation": "DescribeDhcpOptions", - "params": [ - { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "DhcpOptions[0]" - }, - "actions": { - "AssociateWithVpc": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Image": { - "identifiers": [ - { - "name": "Id", - "memberName": "ImageId" - } - ], - "shape": "Image", - "load": { - "request": { - "operation": "DescribeImages", - "params": [ - { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Images[0]" - }, - "actions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Deregister": { - "request": { - "operation": "DeregisterImage", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Instance": { - "identifiers": [ - { - "name": "Id", - "memberName": "InstanceId" - } - ], - "shape": "Instance", - "load": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Reservations[0].Instances[0]" - }, - "actions": { - "AttachClassicLinkVpc": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachVolume": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ConsoleOutput": { - "request": { - "operation": "GetConsoleOutput", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateImage": { - "request": { - "operation": "CreateImage", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkVpc": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachVolume": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "PasswordData": { - "request": { - "operation": "GetPasswordData", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ReportStatus": { - "request": { - "operation": "ReportInstanceStatus", - "params": [ - { "target": "Instances[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetKernel": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "kernel" } - ] - } - }, - "ResetRamdisk": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "ramdisk" } - ] - } - }, - "ResetSourceDestCheck": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "InstanceExists", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Running": { - "waiterName": "InstanceRunning", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Stopped": { - "waiterName": "InstanceStopped", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Terminated": { - "waiterName": "InstanceTerminated", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - } - }, - "has": { - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "data", "path": "ImageId" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "data", "path": "KeyName" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Placement.GroupName" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Volumes": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - } - } - }, - "InternetGateway": { - "identifiers": [ - { - "name": "Id", - "memberName": "InternetGatewayId" - } - ], - "shape": "InternetGateway", - "load": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "InternetGateways[0]" - }, - "actions": { - "AttachToVpc": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromVpc": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "KeyPair": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPairInfo", - "load": { - "request": { - "operation": "DescribeKeyPairs", - "params": [ - { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "KeyPairs[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "NetworkAcl": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkAclId" - } - ], - "shape": "NetworkAcl", - "load": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkAcls[0]" - }, - "actions": { - "CreateEntry": { - "request": { - "operation": "CreateNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkAcl", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "DeleteEntry": { - "request": { - "operation": "DeleteNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceAssociation": { - "request": { - "operation": "ReplaceNetworkAclAssociation", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceEntry": { - "request": { - "operation": "ReplaceNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterface": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkInterfaceId" - } - ], - "shape": "NetworkInterface", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0]" - }, - "actions": { - "AssignPrivateIpAddresses": { - "request": { - "operation": "AssignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Attach": { - "request": { - "operation": "AttachNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Detach": { - "request": { - "operation": "DetachNetworkInterface", - "params": [ - { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "UnassignPrivateIpAddresses": { - "request": { - "operation": "UnassignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "PlacementGroup": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "PlacementGroup", - "load": { - "request": { - "operation": "DescribePlacementGroups", - "params": [ - { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "PlacementGroups[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePlacementGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - } - } - }, - "RouteTable": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableId" - } - ], - "shape": "RouteTable", - "load": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "RouteTables[0]" - }, - "actions": { - "AssociateWithSubnet": { - "request": { - "operation": "AssociateRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "AssociationId" } - ] - } - }, - "CreateRoute": { - "request": { - "operation": "CreateRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Associations": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } - ], - "path": "RouteTables[0].Associations[]" - } - } - } - }, - "RouteTableAssociation": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableAssociationId" - } - ], - "shape": "RouteTableAssociation", - "actions": { - "Delete": { - "request": { - "operation": "DisassociateRouteTable", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceSubnet": { - "request": { - "operation": "ReplaceRouteTableAssociation", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NewAssociationId" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RouteTableId" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - } - } - }, - "SecurityGroup": { - "identifiers": [ - { - "name": "Id", - "memberName": "GroupId" - } - ], - "shape": "SecurityGroup", - "load": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "SecurityGroups[0]" - }, - "actions": { - "AuthorizeEgress": { - "request": { - "operation": "AuthorizeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "AuthorizeIngress": { - "request": { - "operation": "AuthorizeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSecurityGroup", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeEgress": { - "request": { - "operation": "RevokeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeIngress": { - "request": { - "operation": "RevokeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Snapshot": { - "identifiers": [ - { - "name": "Id", - "memberName": "SnapshotId" - } - ], - "shape": "Snapshot", - "load": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Snapshots[0]" - }, - "actions": { - "Copy": { - "request": { - "operation": "CopySnapshot", - "params": [ - { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSnapshot", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifySnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Completed": { - "waiterName": "SnapshotCompleted", - "params": [ - { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Snapshots[]" - } - }, - "has": { - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VolumeId" } - ] - } - } - } - }, - "Subnet": { - "identifiers": [ - { - "name": "Id", - "memberName": "SubnetId" - } - ], - "shape": "Subnet", - "load": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Subnets[0]" - }, - "actions": { - "CreateInstances": { - "request": { - "operation": "RunInstances", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateNetworkInterface": { - "request": { - "operation": "CreateNetworkInterface", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSubnet", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - } - } - }, - "Tag": { - "identifiers": [ - { - "name": "ResourceId", - "memberName": "ResourceId" - }, - { - "name": "Key", - "memberName": "Key" - }, - { - "name": "Value", - "memberName": "Value" - } - ], - "shape": "TagDescription", - "load": { - "request": { - "operation": "DescribeTags", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "key" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, - { "target": "Filters[1].Name", "source": "string", "value": "value" }, - { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } - ] - }, - "path": "Tags[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } - ] - } - } - } - }, - "Volume": { - "identifiers": [ - { - "name": "Id", - "memberName": "VolumeId" - } - ], - "shape": "Volume", - "load": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Volumes[0]" - }, - "actions": { - "AttachToInstance": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateSnapshot": { - "request": { - "operation": "CreateSnapshot", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeStatus": { - "request": { - "operation": "DescribeVolumeStatus", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromInstance": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableIo": { - "request": { - "operation": "EnableVolumeIO", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "hasMany": { - "Snapshots": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - } - } - }, - "Vpc": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcId" - } - ], - "shape": "Vpc", - "load": { - "request": { - "operation": "DescribeVpcs", - "params": [ - { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Vpcs[0]" - }, - "actions": { - "AssociateDhcpOptions": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachClassicLinkInstance": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachInternetGateway": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateNetworkAcl": { - "request": { - "operation": "CreateNetworkAcl", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateRouteTable": { - "request": { - "operation": "CreateRouteTable", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { - "operation": "CreateSecurityGroup", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSubnet": { - "request": { - "operation": "CreateSubnet", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkInstance": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachInternetGateway": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DisableClassicLink": { - "request": { - "operation": "DisableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableClassicLink": { - "request": { - "operation": "EnableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "RequestVpcPeeringConnection": { - "request": { - "operation": "CreateVpcPeeringConnection", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "data", "path": "DhcpOptionsId" } - ] - } - } - }, - "hasMany": { - "AcceptedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "NetworkAcls": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "RequestedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "RouteTables": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Subnets": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - } - } - }, - "VpcPeeringConnection": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcPeeringConnectionId" - } - ], - "shape": "VpcPeeringConnection", - "load": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "VpcPeeringConnections[0]" - }, - "actions": { - "Accept": { - "request": { - "operation": "AcceptVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reject": { - "request": { - "operation": "RejectVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "AccepterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } - ] - } - }, - "RequesterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/ec2/2015-03-01/resources-1.json b/venv/Lib/site-packages/boto3/data/ec2/2015-03-01/resources-1.json deleted file mode 100644 index 8ccf160..0000000 --- a/venv/Lib/site-packages/boto3/data/ec2/2015-03-01/resources-1.json +++ /dev/null @@ -1,2289 +0,0 @@ -{ - "service": { - "actions": { - "CreateDhcpOptions": { - "request": { "operation": "CreateDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } - ], - "path": "DhcpOptions" - } - }, - "CreateInstances": { - "request": { "operation": "RunInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateInternetGateway": { - "request": { "operation": "CreateInternetGateway" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } - ], - "path": "InternetGateway" - } - }, - "CreateKeyPair": { - "request": { "operation": "CreateKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "CreateNetworkAcl": { - "request": { "operation": "CreateNetworkAcl" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateNetworkInterface": { - "request": { "operation": "CreateNetworkInterface" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreatePlacementGroup": { - "request": { "operation": "CreatePlacementGroup" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ] - } - }, - "CreateRouteTable": { - "request": { "operation": "CreateRouteTable" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { "operation": "CreateSecurityGroup" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSnapshot": { - "request": { "operation": "CreateSnapshot" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateSubnet": { - "request": { "operation": "CreateSubnet" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { "operation": "CreateTags" }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "requestParameter", "path": "Resources[]" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "CreateVolume": { - "request": { "operation": "CreateVolume" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VolumeId" } - ], - "path": "@" - } - }, - "CreateVpc": { - "request": { "operation": "CreateVpc" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpc.VpcId" } - ], - "path": "Vpc" - } - }, - "CreateVpcPeeringConnection": { - "request": { "operation": "CreateVpcPeeringConnection" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - }, - "DisassociateRouteTable": { - "request": { "operation": "DisassociateRouteTable" } - }, - "ImportKeyPair": { - "request": { "operation": "ImportKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "RegisterImage": { - "request": { "operation": "RegisterImage" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Instance": { - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "InternetGateway": { - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "NetworkAcl": { - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "NetworkInterface": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "RouteTableAssociation": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "SecurityGroup": { - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Snapshot": { - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "VpcPeeringConnection": { - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "DhcpOptionsSets": { - "request": { "operation": "DescribeDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } - ], - "path": "DhcpOptions[]" - } - }, - "Images": { - "request": { "operation": "DescribeImages" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Images[].ImageId" } - ], - "path": "Images[]" - } - }, - "Instances": { - "request": { "operation": "DescribeInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { "operation": "DescribeInternetGateways" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "KeyPairs": { - "request": { "operation": "DescribeKeyPairs" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } - ], - "path": "KeyPairs[]" - } - }, - "NetworkAcls": { - "request": { "operation": "DescribeNetworkAcls" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { "operation": "DescribeNetworkInterfaces" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroups": { - "request": { "operation": "DescribePlacementGroups" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } - ], - "path": "PlacementGroups[]" - } - }, - "RouteTables": { - "request": { "operation": "DescribeRouteTables" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { "operation": "DescribeSecurityGroups" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Snapshots": { - "request": { "operation": "DescribeSnapshots" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - }, - "Subnets": { - "request": { "operation": "DescribeSubnets" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - }, - "Volumes": { - "request": { "operation": "DescribeVolumes" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcPeeringConnections": { - "request": { "operation": "DescribeVpcPeeringConnections" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Vpcs": { - "request": { "operation": "DescribeVpcs" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } - ], - "path": "Vpcs[]" - } - } - } - }, - "resources": { - "DhcpOptions": { - "identifiers": [ - { - "name": "Id", - "memberName": "DhcpOptionsId" - } - ], - "shape": "DhcpOptions", - "load": { - "request": { - "operation": "DescribeDhcpOptions", - "params": [ - { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "DhcpOptions[0]" - }, - "actions": { - "AssociateWithVpc": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Image": { - "identifiers": [ - { - "name": "Id", - "memberName": "ImageId" - } - ], - "shape": "Image", - "load": { - "request": { - "operation": "DescribeImages", - "params": [ - { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Images[0]" - }, - "actions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Deregister": { - "request": { - "operation": "DeregisterImage", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Instance": { - "identifiers": [ - { - "name": "Id", - "memberName": "InstanceId" - } - ], - "shape": "Instance", - "load": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Reservations[0].Instances[0]" - }, - "actions": { - "AttachClassicLinkVpc": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachVolume": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ConsoleOutput": { - "request": { - "operation": "GetConsoleOutput", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateImage": { - "request": { - "operation": "CreateImage", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkVpc": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachVolume": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "PasswordData": { - "request": { - "operation": "GetPasswordData", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ReportStatus": { - "request": { - "operation": "ReportInstanceStatus", - "params": [ - { "target": "Instances[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetKernel": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "kernel" } - ] - } - }, - "ResetRamdisk": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "ramdisk" } - ] - } - }, - "ResetSourceDestCheck": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "InstanceExists", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Running": { - "waiterName": "InstanceRunning", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Stopped": { - "waiterName": "InstanceStopped", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Terminated": { - "waiterName": "InstanceTerminated", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - } - }, - "has": { - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "data", "path": "ImageId" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "data", "path": "KeyName" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Placement.GroupName" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Volumes": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - } - } - }, - "InternetGateway": { - "identifiers": [ - { - "name": "Id", - "memberName": "InternetGatewayId" - } - ], - "shape": "InternetGateway", - "load": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "InternetGateways[0]" - }, - "actions": { - "AttachToVpc": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromVpc": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "KeyPair": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPairInfo", - "load": { - "request": { - "operation": "DescribeKeyPairs", - "params": [ - { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "KeyPairs[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "NetworkAcl": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkAclId" - } - ], - "shape": "NetworkAcl", - "load": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkAcls[0]" - }, - "actions": { - "CreateEntry": { - "request": { - "operation": "CreateNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkAcl", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "DeleteEntry": { - "request": { - "operation": "DeleteNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceAssociation": { - "request": { - "operation": "ReplaceNetworkAclAssociation", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceEntry": { - "request": { - "operation": "ReplaceNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterface": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkInterfaceId" - } - ], - "shape": "NetworkInterface", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0]" - }, - "actions": { - "AssignPrivateIpAddresses": { - "request": { - "operation": "AssignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Attach": { - "request": { - "operation": "AttachNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Detach": { - "request": { - "operation": "DetachNetworkInterface", - "params": [ - { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "UnassignPrivateIpAddresses": { - "request": { - "operation": "UnassignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "PlacementGroup": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "PlacementGroup", - "load": { - "request": { - "operation": "DescribePlacementGroups", - "params": [ - { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "PlacementGroups[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePlacementGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - } - } - }, - "RouteTable": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableId" - } - ], - "shape": "RouteTable", - "load": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "RouteTables[0]" - }, - "actions": { - "AssociateWithSubnet": { - "request": { - "operation": "AssociateRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "AssociationId" } - ] - } - }, - "CreateRoute": { - "request": { - "operation": "CreateRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Associations": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } - ], - "path": "RouteTables[0].Associations[]" - } - } - } - }, - "RouteTableAssociation": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableAssociationId" - } - ], - "shape": "RouteTableAssociation", - "actions": { - "Delete": { - "request": { - "operation": "DisassociateRouteTable", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceSubnet": { - "request": { - "operation": "ReplaceRouteTableAssociation", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NewAssociationId" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RouteTableId" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - } - } - }, - "SecurityGroup": { - "identifiers": [ - { - "name": "Id", - "memberName": "GroupId" - } - ], - "shape": "SecurityGroup", - "load": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "SecurityGroups[0]" - }, - "actions": { - "AuthorizeEgress": { - "request": { - "operation": "AuthorizeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "AuthorizeIngress": { - "request": { - "operation": "AuthorizeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSecurityGroup", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeEgress": { - "request": { - "operation": "RevokeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeIngress": { - "request": { - "operation": "RevokeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Snapshot": { - "identifiers": [ - { - "name": "Id", - "memberName": "SnapshotId" - } - ], - "shape": "Snapshot", - "load": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Snapshots[0]" - }, - "actions": { - "Copy": { - "request": { - "operation": "CopySnapshot", - "params": [ - { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSnapshot", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifySnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Completed": { - "waiterName": "SnapshotCompleted", - "params": [ - { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Snapshots[]" - } - }, - "has": { - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VolumeId" } - ] - } - } - } - }, - "Subnet": { - "identifiers": [ - { - "name": "Id", - "memberName": "SubnetId" - } - ], - "shape": "Subnet", - "load": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Subnets[0]" - }, - "actions": { - "CreateInstances": { - "request": { - "operation": "RunInstances", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateNetworkInterface": { - "request": { - "operation": "CreateNetworkInterface", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSubnet", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - } - } - }, - "Tag": { - "identifiers": [ - { - "name": "ResourceId", - "memberName": "ResourceId" - }, - { - "name": "Key", - "memberName": "Key" - }, - { - "name": "Value", - "memberName": "Value" - } - ], - "shape": "TagDescription", - "load": { - "request": { - "operation": "DescribeTags", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "key" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, - { "target": "Filters[1].Name", "source": "string", "value": "value" }, - { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } - ] - }, - "path": "Tags[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } - ] - } - } - } - }, - "Volume": { - "identifiers": [ - { - "name": "Id", - "memberName": "VolumeId" - } - ], - "shape": "Volume", - "load": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Volumes[0]" - }, - "actions": { - "AttachToInstance": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateSnapshot": { - "request": { - "operation": "CreateSnapshot", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeStatus": { - "request": { - "operation": "DescribeVolumeStatus", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromInstance": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableIo": { - "request": { - "operation": "EnableVolumeIO", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "hasMany": { - "Snapshots": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - } - } - }, - "Vpc": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcId" - } - ], - "shape": "Vpc", - "load": { - "request": { - "operation": "DescribeVpcs", - "params": [ - { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Vpcs[0]" - }, - "actions": { - "AssociateDhcpOptions": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachClassicLinkInstance": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachInternetGateway": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateNetworkAcl": { - "request": { - "operation": "CreateNetworkAcl", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateRouteTable": { - "request": { - "operation": "CreateRouteTable", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { - "operation": "CreateSecurityGroup", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSubnet": { - "request": { - "operation": "CreateSubnet", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkInstance": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachInternetGateway": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DisableClassicLink": { - "request": { - "operation": "DisableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableClassicLink": { - "request": { - "operation": "EnableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "RequestVpcPeeringConnection": { - "request": { - "operation": "CreateVpcPeeringConnection", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "data", "path": "DhcpOptionsId" } - ] - } - } - }, - "hasMany": { - "AcceptedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "NetworkAcls": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "RequestedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "RouteTables": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Subnets": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - } - } - }, - "VpcPeeringConnection": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcPeeringConnectionId" - } - ], - "shape": "VpcPeeringConnection", - "load": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "VpcPeeringConnections[0]" - }, - "actions": { - "Accept": { - "request": { - "operation": "AcceptVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reject": { - "request": { - "operation": "RejectVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "AccepterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } - ] - } - }, - "RequesterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/ec2/2015-04-15/resources-1.json b/venv/Lib/site-packages/boto3/data/ec2/2015-04-15/resources-1.json deleted file mode 100644 index 8ccf160..0000000 --- a/venv/Lib/site-packages/boto3/data/ec2/2015-04-15/resources-1.json +++ /dev/null @@ -1,2289 +0,0 @@ -{ - "service": { - "actions": { - "CreateDhcpOptions": { - "request": { "operation": "CreateDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } - ], - "path": "DhcpOptions" - } - }, - "CreateInstances": { - "request": { "operation": "RunInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateInternetGateway": { - "request": { "operation": "CreateInternetGateway" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } - ], - "path": "InternetGateway" - } - }, - "CreateKeyPair": { - "request": { "operation": "CreateKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "CreateNetworkAcl": { - "request": { "operation": "CreateNetworkAcl" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateNetworkInterface": { - "request": { "operation": "CreateNetworkInterface" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreatePlacementGroup": { - "request": { "operation": "CreatePlacementGroup" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ] - } - }, - "CreateRouteTable": { - "request": { "operation": "CreateRouteTable" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { "operation": "CreateSecurityGroup" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSnapshot": { - "request": { "operation": "CreateSnapshot" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateSubnet": { - "request": { "operation": "CreateSubnet" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { "operation": "CreateTags" }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "requestParameter", "path": "Resources[]" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "CreateVolume": { - "request": { "operation": "CreateVolume" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VolumeId" } - ], - "path": "@" - } - }, - "CreateVpc": { - "request": { "operation": "CreateVpc" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpc.VpcId" } - ], - "path": "Vpc" - } - }, - "CreateVpcPeeringConnection": { - "request": { "operation": "CreateVpcPeeringConnection" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - }, - "DisassociateRouteTable": { - "request": { "operation": "DisassociateRouteTable" } - }, - "ImportKeyPair": { - "request": { "operation": "ImportKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "RegisterImage": { - "request": { "operation": "RegisterImage" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Instance": { - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "InternetGateway": { - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "NetworkAcl": { - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "NetworkInterface": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "RouteTableAssociation": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "SecurityGroup": { - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Snapshot": { - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "VpcPeeringConnection": { - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "DhcpOptionsSets": { - "request": { "operation": "DescribeDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } - ], - "path": "DhcpOptions[]" - } - }, - "Images": { - "request": { "operation": "DescribeImages" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Images[].ImageId" } - ], - "path": "Images[]" - } - }, - "Instances": { - "request": { "operation": "DescribeInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { "operation": "DescribeInternetGateways" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "KeyPairs": { - "request": { "operation": "DescribeKeyPairs" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } - ], - "path": "KeyPairs[]" - } - }, - "NetworkAcls": { - "request": { "operation": "DescribeNetworkAcls" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { "operation": "DescribeNetworkInterfaces" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroups": { - "request": { "operation": "DescribePlacementGroups" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } - ], - "path": "PlacementGroups[]" - } - }, - "RouteTables": { - "request": { "operation": "DescribeRouteTables" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { "operation": "DescribeSecurityGroups" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Snapshots": { - "request": { "operation": "DescribeSnapshots" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - }, - "Subnets": { - "request": { "operation": "DescribeSubnets" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - }, - "Volumes": { - "request": { "operation": "DescribeVolumes" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcPeeringConnections": { - "request": { "operation": "DescribeVpcPeeringConnections" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Vpcs": { - "request": { "operation": "DescribeVpcs" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } - ], - "path": "Vpcs[]" - } - } - } - }, - "resources": { - "DhcpOptions": { - "identifiers": [ - { - "name": "Id", - "memberName": "DhcpOptionsId" - } - ], - "shape": "DhcpOptions", - "load": { - "request": { - "operation": "DescribeDhcpOptions", - "params": [ - { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "DhcpOptions[0]" - }, - "actions": { - "AssociateWithVpc": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Image": { - "identifiers": [ - { - "name": "Id", - "memberName": "ImageId" - } - ], - "shape": "Image", - "load": { - "request": { - "operation": "DescribeImages", - "params": [ - { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Images[0]" - }, - "actions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Deregister": { - "request": { - "operation": "DeregisterImage", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Instance": { - "identifiers": [ - { - "name": "Id", - "memberName": "InstanceId" - } - ], - "shape": "Instance", - "load": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Reservations[0].Instances[0]" - }, - "actions": { - "AttachClassicLinkVpc": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachVolume": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ConsoleOutput": { - "request": { - "operation": "GetConsoleOutput", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateImage": { - "request": { - "operation": "CreateImage", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkVpc": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachVolume": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "PasswordData": { - "request": { - "operation": "GetPasswordData", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ReportStatus": { - "request": { - "operation": "ReportInstanceStatus", - "params": [ - { "target": "Instances[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetKernel": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "kernel" } - ] - } - }, - "ResetRamdisk": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "ramdisk" } - ] - } - }, - "ResetSourceDestCheck": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "InstanceExists", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Running": { - "waiterName": "InstanceRunning", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Stopped": { - "waiterName": "InstanceStopped", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Terminated": { - "waiterName": "InstanceTerminated", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - } - }, - "has": { - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "data", "path": "ImageId" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "data", "path": "KeyName" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Placement.GroupName" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Volumes": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - } - } - }, - "InternetGateway": { - "identifiers": [ - { - "name": "Id", - "memberName": "InternetGatewayId" - } - ], - "shape": "InternetGateway", - "load": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "InternetGateways[0]" - }, - "actions": { - "AttachToVpc": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromVpc": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "KeyPair": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPairInfo", - "load": { - "request": { - "operation": "DescribeKeyPairs", - "params": [ - { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "KeyPairs[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "NetworkAcl": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkAclId" - } - ], - "shape": "NetworkAcl", - "load": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkAcls[0]" - }, - "actions": { - "CreateEntry": { - "request": { - "operation": "CreateNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkAcl", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "DeleteEntry": { - "request": { - "operation": "DeleteNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceAssociation": { - "request": { - "operation": "ReplaceNetworkAclAssociation", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceEntry": { - "request": { - "operation": "ReplaceNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterface": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkInterfaceId" - } - ], - "shape": "NetworkInterface", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0]" - }, - "actions": { - "AssignPrivateIpAddresses": { - "request": { - "operation": "AssignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Attach": { - "request": { - "operation": "AttachNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Detach": { - "request": { - "operation": "DetachNetworkInterface", - "params": [ - { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "UnassignPrivateIpAddresses": { - "request": { - "operation": "UnassignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "PlacementGroup": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "PlacementGroup", - "load": { - "request": { - "operation": "DescribePlacementGroups", - "params": [ - { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "PlacementGroups[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePlacementGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - } - } - }, - "RouteTable": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableId" - } - ], - "shape": "RouteTable", - "load": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "RouteTables[0]" - }, - "actions": { - "AssociateWithSubnet": { - "request": { - "operation": "AssociateRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "AssociationId" } - ] - } - }, - "CreateRoute": { - "request": { - "operation": "CreateRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Associations": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } - ], - "path": "RouteTables[0].Associations[]" - } - } - } - }, - "RouteTableAssociation": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableAssociationId" - } - ], - "shape": "RouteTableAssociation", - "actions": { - "Delete": { - "request": { - "operation": "DisassociateRouteTable", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceSubnet": { - "request": { - "operation": "ReplaceRouteTableAssociation", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NewAssociationId" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RouteTableId" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - } - } - }, - "SecurityGroup": { - "identifiers": [ - { - "name": "Id", - "memberName": "GroupId" - } - ], - "shape": "SecurityGroup", - "load": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "SecurityGroups[0]" - }, - "actions": { - "AuthorizeEgress": { - "request": { - "operation": "AuthorizeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "AuthorizeIngress": { - "request": { - "operation": "AuthorizeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSecurityGroup", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeEgress": { - "request": { - "operation": "RevokeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeIngress": { - "request": { - "operation": "RevokeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Snapshot": { - "identifiers": [ - { - "name": "Id", - "memberName": "SnapshotId" - } - ], - "shape": "Snapshot", - "load": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Snapshots[0]" - }, - "actions": { - "Copy": { - "request": { - "operation": "CopySnapshot", - "params": [ - { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSnapshot", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifySnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Completed": { - "waiterName": "SnapshotCompleted", - "params": [ - { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Snapshots[]" - } - }, - "has": { - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VolumeId" } - ] - } - } - } - }, - "Subnet": { - "identifiers": [ - { - "name": "Id", - "memberName": "SubnetId" - } - ], - "shape": "Subnet", - "load": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Subnets[0]" - }, - "actions": { - "CreateInstances": { - "request": { - "operation": "RunInstances", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateNetworkInterface": { - "request": { - "operation": "CreateNetworkInterface", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSubnet", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - } - } - }, - "Tag": { - "identifiers": [ - { - "name": "ResourceId", - "memberName": "ResourceId" - }, - { - "name": "Key", - "memberName": "Key" - }, - { - "name": "Value", - "memberName": "Value" - } - ], - "shape": "TagDescription", - "load": { - "request": { - "operation": "DescribeTags", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "key" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, - { "target": "Filters[1].Name", "source": "string", "value": "value" }, - { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } - ] - }, - "path": "Tags[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } - ] - } - } - } - }, - "Volume": { - "identifiers": [ - { - "name": "Id", - "memberName": "VolumeId" - } - ], - "shape": "Volume", - "load": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Volumes[0]" - }, - "actions": { - "AttachToInstance": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateSnapshot": { - "request": { - "operation": "CreateSnapshot", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeStatus": { - "request": { - "operation": "DescribeVolumeStatus", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromInstance": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableIo": { - "request": { - "operation": "EnableVolumeIO", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "hasMany": { - "Snapshots": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - } - } - }, - "Vpc": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcId" - } - ], - "shape": "Vpc", - "load": { - "request": { - "operation": "DescribeVpcs", - "params": [ - { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Vpcs[0]" - }, - "actions": { - "AssociateDhcpOptions": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachClassicLinkInstance": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachInternetGateway": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateNetworkAcl": { - "request": { - "operation": "CreateNetworkAcl", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateRouteTable": { - "request": { - "operation": "CreateRouteTable", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { - "operation": "CreateSecurityGroup", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSubnet": { - "request": { - "operation": "CreateSubnet", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkInstance": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachInternetGateway": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DisableClassicLink": { - "request": { - "operation": "DisableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableClassicLink": { - "request": { - "operation": "EnableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "RequestVpcPeeringConnection": { - "request": { - "operation": "CreateVpcPeeringConnection", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "data", "path": "DhcpOptionsId" } - ] - } - } - }, - "hasMany": { - "AcceptedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "NetworkAcls": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "RequestedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "RouteTables": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Subnets": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - } - } - }, - "VpcPeeringConnection": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcPeeringConnectionId" - } - ], - "shape": "VpcPeeringConnection", - "load": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "VpcPeeringConnections[0]" - }, - "actions": { - "Accept": { - "request": { - "operation": "AcceptVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reject": { - "request": { - "operation": "RejectVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "AccepterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } - ] - } - }, - "RequesterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/ec2/2015-10-01/resources-1.json b/venv/Lib/site-packages/boto3/data/ec2/2015-10-01/resources-1.json deleted file mode 100644 index 4831a36..0000000 --- a/venv/Lib/site-packages/boto3/data/ec2/2015-10-01/resources-1.json +++ /dev/null @@ -1,2567 +0,0 @@ -{ - "service": { - "actions": { - "CreateDhcpOptions": { - "request": { "operation": "CreateDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } - ], - "path": "DhcpOptions" - } - }, - "CreateInstances": { - "request": { "operation": "RunInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateInternetGateway": { - "request": { "operation": "CreateInternetGateway" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } - ], - "path": "InternetGateway" - } - }, - "CreateKeyPair": { - "request": { "operation": "CreateKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ], - "path": "@" - } - }, - "CreateNetworkAcl": { - "request": { "operation": "CreateNetworkAcl" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateNetworkInterface": { - "request": { "operation": "CreateNetworkInterface" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreatePlacementGroup": { - "request": { "operation": "CreatePlacementGroup" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ] - } - }, - "CreateRouteTable": { - "request": { "operation": "CreateRouteTable" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { "operation": "CreateSecurityGroup" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSnapshot": { - "request": { "operation": "CreateSnapshot" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateSubnet": { - "request": { "operation": "CreateSubnet" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { "operation": "CreateTags" } - }, - "CreateVolume": { - "request": { "operation": "CreateVolume" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VolumeId" } - ], - "path": "@" - } - }, - "CreateVpc": { - "request": { "operation": "CreateVpc" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpc.VpcId" } - ], - "path": "Vpc" - } - }, - "CreateVpcPeeringConnection": { - "request": { "operation": "CreateVpcPeeringConnection" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - }, - "DisassociateRouteTable": { - "request": { "operation": "DisassociateRouteTable" } - }, - "ImportKeyPair": { - "request": { "operation": "ImportKeyPair" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "RegisterImage": { - "request": { "operation": "RegisterImage" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Instance": { - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "InternetGateway": { - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "NetworkAcl": { - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "NetworkInterface": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "RouteTableAssociation": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "SecurityGroup": { - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Snapshot": { - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "VpcPeeringConnection": { - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "ClassicAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "standard" } - ] - }, - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "response", "path": "Addresses[].PublicIp" } - ], - "path": "Addresses[]" - } - }, - "DhcpOptionsSets": { - "request": { "operation": "DescribeDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } - ], - "path": "DhcpOptions[]" - } - }, - "Images": { - "request": { "operation": "DescribeImages" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Images[].ImageId" } - ], - "path": "Images[]" - } - }, - "Instances": { - "request": { "operation": "DescribeInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { "operation": "DescribeInternetGateways" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "KeyPairs": { - "request": { "operation": "DescribeKeyPairs" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } - ], - "path": "KeyPairs[]" - } - }, - "NetworkAcls": { - "request": { "operation": "DescribeNetworkAcls" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { "operation": "DescribeNetworkInterfaces" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroups": { - "request": { "operation": "DescribePlacementGroups" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } - ], - "path": "PlacementGroups[]" - } - }, - "RouteTables": { - "request": { "operation": "DescribeRouteTables" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { "operation": "DescribeSecurityGroups" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Snapshots": { - "request": { "operation": "DescribeSnapshots" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - }, - "Subnets": { - "request": { "operation": "DescribeSubnets" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - }, - "Volumes": { - "request": { "operation": "DescribeVolumes" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "vpc" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - }, - "VpcPeeringConnections": { - "request": { "operation": "DescribeVpcPeeringConnections" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Vpcs": { - "request": { "operation": "DescribeVpcs" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } - ], - "path": "Vpcs[]" - } - } - } - }, - "resources": { - "ClassicAddress": { - "identifiers": [ - { - "name": "PublicIp" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "PublicIps[]", "source": "identifier", "name": "PublicIp" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "PublicIp", "source": "identifier", "name": "PublicIp" } - ] - } - }, - "Disassociate": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - } - } - }, - "DhcpOptions": { - "identifiers": [ - { - "name": "Id", - "memberName": "DhcpOptionsId" - } - ], - "shape": "DhcpOptions", - "load": { - "request": { - "operation": "DescribeDhcpOptions", - "params": [ - { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "DhcpOptions[0]" - }, - "actions": { - "AssociateWithVpc": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Image": { - "identifiers": [ - { - "name": "Id", - "memberName": "ImageId" - } - ], - "shape": "Image", - "load": { - "request": { - "operation": "DescribeImages", - "params": [ - { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Images[0]" - }, - "actions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Deregister": { - "request": { - "operation": "DeregisterImage", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Instance": { - "identifiers": [ - { - "name": "Id", - "memberName": "InstanceId" - } - ], - "shape": "Instance", - "load": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Reservations[0].Instances[0]" - }, - "actions": { - "AttachClassicLinkVpc": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachVolume": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ConsoleOutput": { - "request": { - "operation": "GetConsoleOutput", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateImage": { - "request": { - "operation": "CreateImage", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkVpc": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachVolume": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "PasswordData": { - "request": { - "operation": "GetPasswordData", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ReportStatus": { - "request": { - "operation": "ReportInstanceStatus", - "params": [ - { "target": "Instances[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetKernel": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "kernel" } - ] - } - }, - "ResetRamdisk": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "ramdisk" } - ] - } - }, - "ResetSourceDestCheck": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "InstanceExists", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Running": { - "waiterName": "InstanceRunning", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Stopped": { - "waiterName": "InstanceStopped", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Terminated": { - "waiterName": "InstanceTerminated", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - } - }, - "has": { - "ClassicAddress": { - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "data", "path": "PublicIpAddress" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "data", "path": "ImageId" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "data", "path": "KeyName" } - ] - } - }, - "NetworkInterfaces": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "data", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Placement.GroupName" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Volumes": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - } - } - }, - "InternetGateway": { - "identifiers": [ - { - "name": "Id", - "memberName": "InternetGatewayId" - } - ], - "shape": "InternetGateway", - "load": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "InternetGateways[0]" - }, - "actions": { - "AttachToVpc": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromVpc": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "KeyPair": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPair", - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "KeyPairInfo": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPairInfo", - "load": { - "request": { - "operation": "DescribeKeyPairs", - "params": [ - { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "KeyPairs[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "NetworkAcl": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkAclId" - } - ], - "shape": "NetworkAcl", - "load": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkAcls[0]" - }, - "actions": { - "CreateEntry": { - "request": { - "operation": "CreateNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkAcl", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "DeleteEntry": { - "request": { - "operation": "DeleteNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceAssociation": { - "request": { - "operation": "ReplaceNetworkAclAssociation", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceEntry": { - "request": { - "operation": "ReplaceNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterface": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkInterfaceId" - } - ], - "shape": "NetworkInterface", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0]" - }, - "actions": { - "AssignPrivateIpAddresses": { - "request": { - "operation": "AssignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Attach": { - "request": { - "operation": "AttachNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Detach": { - "request": { - "operation": "DetachNetworkInterface", - "params": [ - { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "UnassignPrivateIpAddresses": { - "request": { - "operation": "UnassignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "Association.AssociationId" } - ], - "path": "Association" - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterfaceAssociation": { - "identifiers": [ - { - "name": "Id" - } - ], - "shape": "InstanceNetworkInterfaceAssociation", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "association.association-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0].Association" - }, - "actions": { - "Delete": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Address": { - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - } - }, - "PlacementGroup": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "PlacementGroup", - "load": { - "request": { - "operation": "DescribePlacementGroups", - "params": [ - { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "PlacementGroups[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePlacementGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - } - } - }, - "Route": { - "identifiers": [ - { "name": "RouteTableId" }, - { - "name": "DestinationCidrBlock", - "memberName": "DestinationCidrBlock" - } - ], - "shape": "Route", - "actions": { - "Delete": { - "request": { - "operation": "DeleteRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - }, - "Replace": { - "request": { - "operation": "ReplaceRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "identifier", "name": "RouteTableId" } - ] - } - } - } - }, - "RouteTable": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableId" - } - ], - "shape": "RouteTable", - "load": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "RouteTables[0]" - }, - "actions": { - "AssociateWithSubnet": { - "request": { - "operation": "AssociateRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "AssociationId" } - ] - } - }, - "CreateRoute": { - "request": { - "operation": "CreateRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "requestParameter", "path": "DestinationCidrBlock" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Routes": { - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "data", "path": "Routes[].DestinationCidrBlock" } - ], - "path": "Routes[]" - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Associations": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } - ], - "path": "RouteTables[0].Associations[]" - } - } - } - }, - "RouteTableAssociation": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableAssociationId" - } - ], - "shape": "RouteTableAssociation", - "actions": { - "Delete": { - "request": { - "operation": "DisassociateRouteTable", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceSubnet": { - "request": { - "operation": "ReplaceRouteTableAssociation", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NewAssociationId" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RouteTableId" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - } - } - }, - "SecurityGroup": { - "identifiers": [ - { - "name": "Id", - "memberName": "GroupId" - } - ], - "shape": "SecurityGroup", - "load": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "SecurityGroups[0]" - }, - "actions": { - "AuthorizeEgress": { - "request": { - "operation": "AuthorizeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "AuthorizeIngress": { - "request": { - "operation": "AuthorizeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSecurityGroup", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeEgress": { - "request": { - "operation": "RevokeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeIngress": { - "request": { - "operation": "RevokeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Snapshot": { - "identifiers": [ - { - "name": "Id", - "memberName": "SnapshotId" - } - ], - "shape": "Snapshot", - "load": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Snapshots[0]" - }, - "actions": { - "Copy": { - "request": { - "operation": "CopySnapshot", - "params": [ - { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSnapshot", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifySnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Completed": { - "waiterName": "SnapshotCompleted", - "params": [ - { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Snapshots[]" - } - }, - "has": { - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VolumeId" } - ] - } - } - } - }, - "Subnet": { - "identifiers": [ - { - "name": "Id", - "memberName": "SubnetId" - } - ], - "shape": "Subnet", - "load": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Subnets[0]" - }, - "actions": { - "CreateInstances": { - "request": { - "operation": "RunInstances", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateNetworkInterface": { - "request": { - "operation": "CreateNetworkInterface", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSubnet", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - } - } - }, - "Tag": { - "identifiers": [ - { - "name": "ResourceId", - "memberName": "ResourceId" - }, - { - "name": "Key", - "memberName": "Key" - }, - { - "name": "Value", - "memberName": "Value" - } - ], - "shape": "TagDescription", - "load": { - "request": { - "operation": "DescribeTags", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "key" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, - { "target": "Filters[1].Name", "source": "string", "value": "value" }, - { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } - ] - }, - "path": "Tags[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } - ] - } - } - } - }, - "Volume": { - "identifiers": [ - { - "name": "Id", - "memberName": "VolumeId" - } - ], - "shape": "Volume", - "load": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Volumes[0]" - }, - "actions": { - "AttachToInstance": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateSnapshot": { - "request": { - "operation": "CreateSnapshot", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeStatus": { - "request": { - "operation": "DescribeVolumeStatus", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromInstance": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableIo": { - "request": { - "operation": "EnableVolumeIO", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "hasMany": { - "Snapshots": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - } - } - }, - "Vpc": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcId" - } - ], - "shape": "Vpc", - "load": { - "request": { - "operation": "DescribeVpcs", - "params": [ - { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Vpcs[0]" - }, - "actions": { - "AssociateDhcpOptions": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachClassicLinkInstance": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachInternetGateway": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateNetworkAcl": { - "request": { - "operation": "CreateNetworkAcl", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateRouteTable": { - "request": { - "operation": "CreateRouteTable", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { - "operation": "CreateSecurityGroup", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSubnet": { - "request": { - "operation": "CreateSubnet", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkInstance": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachInternetGateway": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DisableClassicLink": { - "request": { - "operation": "DisableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableClassicLink": { - "request": { - "operation": "EnableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "RequestVpcPeeringConnection": { - "request": { - "operation": "CreateVpcPeeringConnection", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "data", "path": "DhcpOptionsId" } - ] - } - } - }, - "hasMany": { - "AcceptedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "NetworkAcls": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "RequestedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "RouteTables": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Subnets": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - } - } - }, - "VpcPeeringConnection": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcPeeringConnectionId" - } - ], - "shape": "VpcPeeringConnection", - "load": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "VpcPeeringConnections[0]" - }, - "actions": { - "Accept": { - "request": { - "operation": "AcceptVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reject": { - "request": { - "operation": "RejectVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "VpcPeeringConnectionExists", - "params": [ - { "target": "VpcPeeringConnectionIds[]", "source": "identifier", "name": "Id" } - ], - "path": "VpcPeeringConnections[0]" - } - }, - "has": { - "AccepterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } - ] - } - }, - "RequesterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } - ] - } - } - } - }, - "VpcAddress": { - "identifiers": [ - { - "name": "AllocationId" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "AllocationIds[0]", "source": "identifier", "name": "AllocationId" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "AllocationId", "source": "identifier", "name": "AllocationId" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AssociationId" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/ec2/2016-04-01/resources-1.json b/venv/Lib/site-packages/boto3/data/ec2/2016-04-01/resources-1.json deleted file mode 100644 index 4831a36..0000000 --- a/venv/Lib/site-packages/boto3/data/ec2/2016-04-01/resources-1.json +++ /dev/null @@ -1,2567 +0,0 @@ -{ - "service": { - "actions": { - "CreateDhcpOptions": { - "request": { "operation": "CreateDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } - ], - "path": "DhcpOptions" - } - }, - "CreateInstances": { - "request": { "operation": "RunInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateInternetGateway": { - "request": { "operation": "CreateInternetGateway" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } - ], - "path": "InternetGateway" - } - }, - "CreateKeyPair": { - "request": { "operation": "CreateKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ], - "path": "@" - } - }, - "CreateNetworkAcl": { - "request": { "operation": "CreateNetworkAcl" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateNetworkInterface": { - "request": { "operation": "CreateNetworkInterface" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreatePlacementGroup": { - "request": { "operation": "CreatePlacementGroup" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ] - } - }, - "CreateRouteTable": { - "request": { "operation": "CreateRouteTable" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { "operation": "CreateSecurityGroup" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSnapshot": { - "request": { "operation": "CreateSnapshot" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateSubnet": { - "request": { "operation": "CreateSubnet" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { "operation": "CreateTags" } - }, - "CreateVolume": { - "request": { "operation": "CreateVolume" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VolumeId" } - ], - "path": "@" - } - }, - "CreateVpc": { - "request": { "operation": "CreateVpc" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpc.VpcId" } - ], - "path": "Vpc" - } - }, - "CreateVpcPeeringConnection": { - "request": { "operation": "CreateVpcPeeringConnection" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - }, - "DisassociateRouteTable": { - "request": { "operation": "DisassociateRouteTable" } - }, - "ImportKeyPair": { - "request": { "operation": "ImportKeyPair" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "RegisterImage": { - "request": { "operation": "RegisterImage" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Instance": { - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "InternetGateway": { - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "NetworkAcl": { - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "NetworkInterface": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "RouteTableAssociation": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "SecurityGroup": { - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Snapshot": { - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "VpcPeeringConnection": { - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "ClassicAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "standard" } - ] - }, - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "response", "path": "Addresses[].PublicIp" } - ], - "path": "Addresses[]" - } - }, - "DhcpOptionsSets": { - "request": { "operation": "DescribeDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } - ], - "path": "DhcpOptions[]" - } - }, - "Images": { - "request": { "operation": "DescribeImages" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Images[].ImageId" } - ], - "path": "Images[]" - } - }, - "Instances": { - "request": { "operation": "DescribeInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { "operation": "DescribeInternetGateways" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "KeyPairs": { - "request": { "operation": "DescribeKeyPairs" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } - ], - "path": "KeyPairs[]" - } - }, - "NetworkAcls": { - "request": { "operation": "DescribeNetworkAcls" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { "operation": "DescribeNetworkInterfaces" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroups": { - "request": { "operation": "DescribePlacementGroups" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } - ], - "path": "PlacementGroups[]" - } - }, - "RouteTables": { - "request": { "operation": "DescribeRouteTables" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { "operation": "DescribeSecurityGroups" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Snapshots": { - "request": { "operation": "DescribeSnapshots" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - }, - "Subnets": { - "request": { "operation": "DescribeSubnets" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - }, - "Volumes": { - "request": { "operation": "DescribeVolumes" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "vpc" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - }, - "VpcPeeringConnections": { - "request": { "operation": "DescribeVpcPeeringConnections" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Vpcs": { - "request": { "operation": "DescribeVpcs" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } - ], - "path": "Vpcs[]" - } - } - } - }, - "resources": { - "ClassicAddress": { - "identifiers": [ - { - "name": "PublicIp" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "PublicIps[]", "source": "identifier", "name": "PublicIp" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "PublicIp", "source": "identifier", "name": "PublicIp" } - ] - } - }, - "Disassociate": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - } - } - }, - "DhcpOptions": { - "identifiers": [ - { - "name": "Id", - "memberName": "DhcpOptionsId" - } - ], - "shape": "DhcpOptions", - "load": { - "request": { - "operation": "DescribeDhcpOptions", - "params": [ - { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "DhcpOptions[0]" - }, - "actions": { - "AssociateWithVpc": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Image": { - "identifiers": [ - { - "name": "Id", - "memberName": "ImageId" - } - ], - "shape": "Image", - "load": { - "request": { - "operation": "DescribeImages", - "params": [ - { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Images[0]" - }, - "actions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Deregister": { - "request": { - "operation": "DeregisterImage", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Instance": { - "identifiers": [ - { - "name": "Id", - "memberName": "InstanceId" - } - ], - "shape": "Instance", - "load": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Reservations[0].Instances[0]" - }, - "actions": { - "AttachClassicLinkVpc": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachVolume": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ConsoleOutput": { - "request": { - "operation": "GetConsoleOutput", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateImage": { - "request": { - "operation": "CreateImage", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkVpc": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachVolume": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "PasswordData": { - "request": { - "operation": "GetPasswordData", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ReportStatus": { - "request": { - "operation": "ReportInstanceStatus", - "params": [ - { "target": "Instances[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetKernel": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "kernel" } - ] - } - }, - "ResetRamdisk": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "ramdisk" } - ] - } - }, - "ResetSourceDestCheck": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "InstanceExists", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Running": { - "waiterName": "InstanceRunning", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Stopped": { - "waiterName": "InstanceStopped", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Terminated": { - "waiterName": "InstanceTerminated", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - } - }, - "has": { - "ClassicAddress": { - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "data", "path": "PublicIpAddress" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "data", "path": "ImageId" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "data", "path": "KeyName" } - ] - } - }, - "NetworkInterfaces": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "data", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Placement.GroupName" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Volumes": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - } - } - }, - "InternetGateway": { - "identifiers": [ - { - "name": "Id", - "memberName": "InternetGatewayId" - } - ], - "shape": "InternetGateway", - "load": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "InternetGateways[0]" - }, - "actions": { - "AttachToVpc": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromVpc": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "KeyPair": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPair", - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "KeyPairInfo": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPairInfo", - "load": { - "request": { - "operation": "DescribeKeyPairs", - "params": [ - { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "KeyPairs[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "NetworkAcl": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkAclId" - } - ], - "shape": "NetworkAcl", - "load": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkAcls[0]" - }, - "actions": { - "CreateEntry": { - "request": { - "operation": "CreateNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkAcl", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "DeleteEntry": { - "request": { - "operation": "DeleteNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceAssociation": { - "request": { - "operation": "ReplaceNetworkAclAssociation", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceEntry": { - "request": { - "operation": "ReplaceNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterface": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkInterfaceId" - } - ], - "shape": "NetworkInterface", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0]" - }, - "actions": { - "AssignPrivateIpAddresses": { - "request": { - "operation": "AssignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Attach": { - "request": { - "operation": "AttachNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Detach": { - "request": { - "operation": "DetachNetworkInterface", - "params": [ - { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "UnassignPrivateIpAddresses": { - "request": { - "operation": "UnassignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "Association.AssociationId" } - ], - "path": "Association" - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterfaceAssociation": { - "identifiers": [ - { - "name": "Id" - } - ], - "shape": "InstanceNetworkInterfaceAssociation", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "association.association-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0].Association" - }, - "actions": { - "Delete": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Address": { - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - } - }, - "PlacementGroup": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "PlacementGroup", - "load": { - "request": { - "operation": "DescribePlacementGroups", - "params": [ - { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "PlacementGroups[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePlacementGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - } - } - }, - "Route": { - "identifiers": [ - { "name": "RouteTableId" }, - { - "name": "DestinationCidrBlock", - "memberName": "DestinationCidrBlock" - } - ], - "shape": "Route", - "actions": { - "Delete": { - "request": { - "operation": "DeleteRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - }, - "Replace": { - "request": { - "operation": "ReplaceRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "identifier", "name": "RouteTableId" } - ] - } - } - } - }, - "RouteTable": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableId" - } - ], - "shape": "RouteTable", - "load": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "RouteTables[0]" - }, - "actions": { - "AssociateWithSubnet": { - "request": { - "operation": "AssociateRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "AssociationId" } - ] - } - }, - "CreateRoute": { - "request": { - "operation": "CreateRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "requestParameter", "path": "DestinationCidrBlock" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Routes": { - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "data", "path": "Routes[].DestinationCidrBlock" } - ], - "path": "Routes[]" - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Associations": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } - ], - "path": "RouteTables[0].Associations[]" - } - } - } - }, - "RouteTableAssociation": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableAssociationId" - } - ], - "shape": "RouteTableAssociation", - "actions": { - "Delete": { - "request": { - "operation": "DisassociateRouteTable", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceSubnet": { - "request": { - "operation": "ReplaceRouteTableAssociation", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NewAssociationId" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RouteTableId" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - } - } - }, - "SecurityGroup": { - "identifiers": [ - { - "name": "Id", - "memberName": "GroupId" - } - ], - "shape": "SecurityGroup", - "load": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "SecurityGroups[0]" - }, - "actions": { - "AuthorizeEgress": { - "request": { - "operation": "AuthorizeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "AuthorizeIngress": { - "request": { - "operation": "AuthorizeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSecurityGroup", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeEgress": { - "request": { - "operation": "RevokeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeIngress": { - "request": { - "operation": "RevokeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Snapshot": { - "identifiers": [ - { - "name": "Id", - "memberName": "SnapshotId" - } - ], - "shape": "Snapshot", - "load": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Snapshots[0]" - }, - "actions": { - "Copy": { - "request": { - "operation": "CopySnapshot", - "params": [ - { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSnapshot", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifySnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Completed": { - "waiterName": "SnapshotCompleted", - "params": [ - { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Snapshots[]" - } - }, - "has": { - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VolumeId" } - ] - } - } - } - }, - "Subnet": { - "identifiers": [ - { - "name": "Id", - "memberName": "SubnetId" - } - ], - "shape": "Subnet", - "load": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Subnets[0]" - }, - "actions": { - "CreateInstances": { - "request": { - "operation": "RunInstances", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateNetworkInterface": { - "request": { - "operation": "CreateNetworkInterface", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSubnet", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - } - } - }, - "Tag": { - "identifiers": [ - { - "name": "ResourceId", - "memberName": "ResourceId" - }, - { - "name": "Key", - "memberName": "Key" - }, - { - "name": "Value", - "memberName": "Value" - } - ], - "shape": "TagDescription", - "load": { - "request": { - "operation": "DescribeTags", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "key" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, - { "target": "Filters[1].Name", "source": "string", "value": "value" }, - { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } - ] - }, - "path": "Tags[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } - ] - } - } - } - }, - "Volume": { - "identifiers": [ - { - "name": "Id", - "memberName": "VolumeId" - } - ], - "shape": "Volume", - "load": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Volumes[0]" - }, - "actions": { - "AttachToInstance": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateSnapshot": { - "request": { - "operation": "CreateSnapshot", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeStatus": { - "request": { - "operation": "DescribeVolumeStatus", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromInstance": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableIo": { - "request": { - "operation": "EnableVolumeIO", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "hasMany": { - "Snapshots": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - } - } - }, - "Vpc": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcId" - } - ], - "shape": "Vpc", - "load": { - "request": { - "operation": "DescribeVpcs", - "params": [ - { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Vpcs[0]" - }, - "actions": { - "AssociateDhcpOptions": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachClassicLinkInstance": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachInternetGateway": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateNetworkAcl": { - "request": { - "operation": "CreateNetworkAcl", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateRouteTable": { - "request": { - "operation": "CreateRouteTable", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { - "operation": "CreateSecurityGroup", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSubnet": { - "request": { - "operation": "CreateSubnet", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkInstance": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachInternetGateway": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DisableClassicLink": { - "request": { - "operation": "DisableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableClassicLink": { - "request": { - "operation": "EnableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "RequestVpcPeeringConnection": { - "request": { - "operation": "CreateVpcPeeringConnection", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "data", "path": "DhcpOptionsId" } - ] - } - } - }, - "hasMany": { - "AcceptedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "NetworkAcls": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "RequestedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "RouteTables": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Subnets": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - } - } - }, - "VpcPeeringConnection": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcPeeringConnectionId" - } - ], - "shape": "VpcPeeringConnection", - "load": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "VpcPeeringConnections[0]" - }, - "actions": { - "Accept": { - "request": { - "operation": "AcceptVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reject": { - "request": { - "operation": "RejectVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "VpcPeeringConnectionExists", - "params": [ - { "target": "VpcPeeringConnectionIds[]", "source": "identifier", "name": "Id" } - ], - "path": "VpcPeeringConnections[0]" - } - }, - "has": { - "AccepterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } - ] - } - }, - "RequesterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } - ] - } - } - } - }, - "VpcAddress": { - "identifiers": [ - { - "name": "AllocationId" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "AllocationIds[0]", "source": "identifier", "name": "AllocationId" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "AllocationId", "source": "identifier", "name": "AllocationId" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AssociationId" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/ec2/2016-09-15/resources-1.json b/venv/Lib/site-packages/boto3/data/ec2/2016-09-15/resources-1.json deleted file mode 100644 index 4831a36..0000000 --- a/venv/Lib/site-packages/boto3/data/ec2/2016-09-15/resources-1.json +++ /dev/null @@ -1,2567 +0,0 @@ -{ - "service": { - "actions": { - "CreateDhcpOptions": { - "request": { "operation": "CreateDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } - ], - "path": "DhcpOptions" - } - }, - "CreateInstances": { - "request": { "operation": "RunInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateInternetGateway": { - "request": { "operation": "CreateInternetGateway" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } - ], - "path": "InternetGateway" - } - }, - "CreateKeyPair": { - "request": { "operation": "CreateKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ], - "path": "@" - } - }, - "CreateNetworkAcl": { - "request": { "operation": "CreateNetworkAcl" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateNetworkInterface": { - "request": { "operation": "CreateNetworkInterface" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreatePlacementGroup": { - "request": { "operation": "CreatePlacementGroup" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ] - } - }, - "CreateRouteTable": { - "request": { "operation": "CreateRouteTable" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { "operation": "CreateSecurityGroup" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSnapshot": { - "request": { "operation": "CreateSnapshot" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateSubnet": { - "request": { "operation": "CreateSubnet" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { "operation": "CreateTags" } - }, - "CreateVolume": { - "request": { "operation": "CreateVolume" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VolumeId" } - ], - "path": "@" - } - }, - "CreateVpc": { - "request": { "operation": "CreateVpc" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpc.VpcId" } - ], - "path": "Vpc" - } - }, - "CreateVpcPeeringConnection": { - "request": { "operation": "CreateVpcPeeringConnection" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - }, - "DisassociateRouteTable": { - "request": { "operation": "DisassociateRouteTable" } - }, - "ImportKeyPair": { - "request": { "operation": "ImportKeyPair" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "RegisterImage": { - "request": { "operation": "RegisterImage" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Instance": { - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "InternetGateway": { - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "NetworkAcl": { - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "NetworkInterface": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "RouteTableAssociation": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "SecurityGroup": { - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Snapshot": { - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "VpcPeeringConnection": { - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "ClassicAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "standard" } - ] - }, - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "response", "path": "Addresses[].PublicIp" } - ], - "path": "Addresses[]" - } - }, - "DhcpOptionsSets": { - "request": { "operation": "DescribeDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } - ], - "path": "DhcpOptions[]" - } - }, - "Images": { - "request": { "operation": "DescribeImages" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Images[].ImageId" } - ], - "path": "Images[]" - } - }, - "Instances": { - "request": { "operation": "DescribeInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { "operation": "DescribeInternetGateways" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "KeyPairs": { - "request": { "operation": "DescribeKeyPairs" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } - ], - "path": "KeyPairs[]" - } - }, - "NetworkAcls": { - "request": { "operation": "DescribeNetworkAcls" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { "operation": "DescribeNetworkInterfaces" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroups": { - "request": { "operation": "DescribePlacementGroups" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } - ], - "path": "PlacementGroups[]" - } - }, - "RouteTables": { - "request": { "operation": "DescribeRouteTables" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { "operation": "DescribeSecurityGroups" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Snapshots": { - "request": { "operation": "DescribeSnapshots" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - }, - "Subnets": { - "request": { "operation": "DescribeSubnets" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - }, - "Volumes": { - "request": { "operation": "DescribeVolumes" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "vpc" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - }, - "VpcPeeringConnections": { - "request": { "operation": "DescribeVpcPeeringConnections" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Vpcs": { - "request": { "operation": "DescribeVpcs" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } - ], - "path": "Vpcs[]" - } - } - } - }, - "resources": { - "ClassicAddress": { - "identifiers": [ - { - "name": "PublicIp" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "PublicIps[]", "source": "identifier", "name": "PublicIp" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "PublicIp", "source": "identifier", "name": "PublicIp" } - ] - } - }, - "Disassociate": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - } - } - }, - "DhcpOptions": { - "identifiers": [ - { - "name": "Id", - "memberName": "DhcpOptionsId" - } - ], - "shape": "DhcpOptions", - "load": { - "request": { - "operation": "DescribeDhcpOptions", - "params": [ - { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "DhcpOptions[0]" - }, - "actions": { - "AssociateWithVpc": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Image": { - "identifiers": [ - { - "name": "Id", - "memberName": "ImageId" - } - ], - "shape": "Image", - "load": { - "request": { - "operation": "DescribeImages", - "params": [ - { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Images[0]" - }, - "actions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Deregister": { - "request": { - "operation": "DeregisterImage", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Instance": { - "identifiers": [ - { - "name": "Id", - "memberName": "InstanceId" - } - ], - "shape": "Instance", - "load": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Reservations[0].Instances[0]" - }, - "actions": { - "AttachClassicLinkVpc": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachVolume": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ConsoleOutput": { - "request": { - "operation": "GetConsoleOutput", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateImage": { - "request": { - "operation": "CreateImage", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkVpc": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachVolume": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "PasswordData": { - "request": { - "operation": "GetPasswordData", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ReportStatus": { - "request": { - "operation": "ReportInstanceStatus", - "params": [ - { "target": "Instances[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetKernel": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "kernel" } - ] - } - }, - "ResetRamdisk": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "ramdisk" } - ] - } - }, - "ResetSourceDestCheck": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "InstanceExists", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Running": { - "waiterName": "InstanceRunning", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Stopped": { - "waiterName": "InstanceStopped", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Terminated": { - "waiterName": "InstanceTerminated", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - } - }, - "has": { - "ClassicAddress": { - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "data", "path": "PublicIpAddress" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "data", "path": "ImageId" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "data", "path": "KeyName" } - ] - } - }, - "NetworkInterfaces": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "data", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Placement.GroupName" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Volumes": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - } - } - }, - "InternetGateway": { - "identifiers": [ - { - "name": "Id", - "memberName": "InternetGatewayId" - } - ], - "shape": "InternetGateway", - "load": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "InternetGateways[0]" - }, - "actions": { - "AttachToVpc": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromVpc": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "KeyPair": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPair", - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "KeyPairInfo": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPairInfo", - "load": { - "request": { - "operation": "DescribeKeyPairs", - "params": [ - { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "KeyPairs[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "NetworkAcl": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkAclId" - } - ], - "shape": "NetworkAcl", - "load": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkAcls[0]" - }, - "actions": { - "CreateEntry": { - "request": { - "operation": "CreateNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkAcl", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "DeleteEntry": { - "request": { - "operation": "DeleteNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceAssociation": { - "request": { - "operation": "ReplaceNetworkAclAssociation", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceEntry": { - "request": { - "operation": "ReplaceNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterface": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkInterfaceId" - } - ], - "shape": "NetworkInterface", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0]" - }, - "actions": { - "AssignPrivateIpAddresses": { - "request": { - "operation": "AssignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Attach": { - "request": { - "operation": "AttachNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Detach": { - "request": { - "operation": "DetachNetworkInterface", - "params": [ - { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "UnassignPrivateIpAddresses": { - "request": { - "operation": "UnassignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "Association.AssociationId" } - ], - "path": "Association" - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterfaceAssociation": { - "identifiers": [ - { - "name": "Id" - } - ], - "shape": "InstanceNetworkInterfaceAssociation", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "association.association-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0].Association" - }, - "actions": { - "Delete": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Address": { - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - } - }, - "PlacementGroup": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "PlacementGroup", - "load": { - "request": { - "operation": "DescribePlacementGroups", - "params": [ - { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "PlacementGroups[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePlacementGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - } - } - }, - "Route": { - "identifiers": [ - { "name": "RouteTableId" }, - { - "name": "DestinationCidrBlock", - "memberName": "DestinationCidrBlock" - } - ], - "shape": "Route", - "actions": { - "Delete": { - "request": { - "operation": "DeleteRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - }, - "Replace": { - "request": { - "operation": "ReplaceRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "identifier", "name": "RouteTableId" } - ] - } - } - } - }, - "RouteTable": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableId" - } - ], - "shape": "RouteTable", - "load": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "RouteTables[0]" - }, - "actions": { - "AssociateWithSubnet": { - "request": { - "operation": "AssociateRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "AssociationId" } - ] - } - }, - "CreateRoute": { - "request": { - "operation": "CreateRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "requestParameter", "path": "DestinationCidrBlock" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Routes": { - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "data", "path": "Routes[].DestinationCidrBlock" } - ], - "path": "Routes[]" - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Associations": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } - ], - "path": "RouteTables[0].Associations[]" - } - } - } - }, - "RouteTableAssociation": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableAssociationId" - } - ], - "shape": "RouteTableAssociation", - "actions": { - "Delete": { - "request": { - "operation": "DisassociateRouteTable", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceSubnet": { - "request": { - "operation": "ReplaceRouteTableAssociation", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NewAssociationId" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RouteTableId" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - } - } - }, - "SecurityGroup": { - "identifiers": [ - { - "name": "Id", - "memberName": "GroupId" - } - ], - "shape": "SecurityGroup", - "load": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "SecurityGroups[0]" - }, - "actions": { - "AuthorizeEgress": { - "request": { - "operation": "AuthorizeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "AuthorizeIngress": { - "request": { - "operation": "AuthorizeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSecurityGroup", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeEgress": { - "request": { - "operation": "RevokeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeIngress": { - "request": { - "operation": "RevokeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Snapshot": { - "identifiers": [ - { - "name": "Id", - "memberName": "SnapshotId" - } - ], - "shape": "Snapshot", - "load": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Snapshots[0]" - }, - "actions": { - "Copy": { - "request": { - "operation": "CopySnapshot", - "params": [ - { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSnapshot", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifySnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Completed": { - "waiterName": "SnapshotCompleted", - "params": [ - { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Snapshots[]" - } - }, - "has": { - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VolumeId" } - ] - } - } - } - }, - "Subnet": { - "identifiers": [ - { - "name": "Id", - "memberName": "SubnetId" - } - ], - "shape": "Subnet", - "load": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Subnets[0]" - }, - "actions": { - "CreateInstances": { - "request": { - "operation": "RunInstances", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateNetworkInterface": { - "request": { - "operation": "CreateNetworkInterface", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSubnet", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - } - } - }, - "Tag": { - "identifiers": [ - { - "name": "ResourceId", - "memberName": "ResourceId" - }, - { - "name": "Key", - "memberName": "Key" - }, - { - "name": "Value", - "memberName": "Value" - } - ], - "shape": "TagDescription", - "load": { - "request": { - "operation": "DescribeTags", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "key" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, - { "target": "Filters[1].Name", "source": "string", "value": "value" }, - { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } - ] - }, - "path": "Tags[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } - ] - } - } - } - }, - "Volume": { - "identifiers": [ - { - "name": "Id", - "memberName": "VolumeId" - } - ], - "shape": "Volume", - "load": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Volumes[0]" - }, - "actions": { - "AttachToInstance": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateSnapshot": { - "request": { - "operation": "CreateSnapshot", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeStatus": { - "request": { - "operation": "DescribeVolumeStatus", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromInstance": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableIo": { - "request": { - "operation": "EnableVolumeIO", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "hasMany": { - "Snapshots": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - } - } - }, - "Vpc": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcId" - } - ], - "shape": "Vpc", - "load": { - "request": { - "operation": "DescribeVpcs", - "params": [ - { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Vpcs[0]" - }, - "actions": { - "AssociateDhcpOptions": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachClassicLinkInstance": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachInternetGateway": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateNetworkAcl": { - "request": { - "operation": "CreateNetworkAcl", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateRouteTable": { - "request": { - "operation": "CreateRouteTable", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { - "operation": "CreateSecurityGroup", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSubnet": { - "request": { - "operation": "CreateSubnet", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkInstance": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachInternetGateway": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DisableClassicLink": { - "request": { - "operation": "DisableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableClassicLink": { - "request": { - "operation": "EnableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "RequestVpcPeeringConnection": { - "request": { - "operation": "CreateVpcPeeringConnection", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "data", "path": "DhcpOptionsId" } - ] - } - } - }, - "hasMany": { - "AcceptedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "NetworkAcls": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "RequestedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "RouteTables": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Subnets": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - } - } - }, - "VpcPeeringConnection": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcPeeringConnectionId" - } - ], - "shape": "VpcPeeringConnection", - "load": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "VpcPeeringConnections[0]" - }, - "actions": { - "Accept": { - "request": { - "operation": "AcceptVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reject": { - "request": { - "operation": "RejectVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "VpcPeeringConnectionExists", - "params": [ - { "target": "VpcPeeringConnectionIds[]", "source": "identifier", "name": "Id" } - ], - "path": "VpcPeeringConnections[0]" - } - }, - "has": { - "AccepterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } - ] - } - }, - "RequesterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } - ] - } - } - } - }, - "VpcAddress": { - "identifiers": [ - { - "name": "AllocationId" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "AllocationIds[0]", "source": "identifier", "name": "AllocationId" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "AllocationId", "source": "identifier", "name": "AllocationId" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AssociationId" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/ec2/2016-11-15/resources-1.json b/venv/Lib/site-packages/boto3/data/ec2/2016-11-15/resources-1.json deleted file mode 100644 index 9872201..0000000 --- a/venv/Lib/site-packages/boto3/data/ec2/2016-11-15/resources-1.json +++ /dev/null @@ -1,2582 +0,0 @@ -{ - "service": { - "actions": { - "CreateDhcpOptions": { - "request": { "operation": "CreateDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } - ], - "path": "DhcpOptions" - } - }, - "CreateInstances": { - "request": { "operation": "RunInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateInternetGateway": { - "request": { "operation": "CreateInternetGateway" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } - ], - "path": "InternetGateway" - } - }, - "CreateKeyPair": { - "request": { "operation": "CreateKeyPair" }, - "resource": { - "type": "KeyPair", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ], - "path": "@" - } - }, - "CreateNetworkAcl": { - "request": { "operation": "CreateNetworkAcl" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateNetworkInterface": { - "request": { "operation": "CreateNetworkInterface" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreatePlacementGroup": { - "request": { "operation": "CreatePlacementGroup" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ] - } - }, - "CreateRouteTable": { - "request": { "operation": "CreateRouteTable" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { "operation": "CreateSecurityGroup" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSnapshot": { - "request": { "operation": "CreateSnapshot" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateSubnet": { - "request": { "operation": "CreateSubnet" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { "operation": "CreateTags" } - }, - "CreateVolume": { - "request": { "operation": "CreateVolume" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VolumeId" } - ], - "path": "@" - } - }, - "CreateVpc": { - "request": { "operation": "CreateVpc" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpc.VpcId" } - ], - "path": "Vpc" - } - }, - "CreateVpcPeeringConnection": { - "request": { "operation": "CreateVpcPeeringConnection" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - }, - "DisassociateRouteTable": { - "request": { "operation": "DisassociateRouteTable" } - }, - "ImportKeyPair": { - "request": { "operation": "ImportKeyPair" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyName" } - ] - } - }, - "RegisterImage": { - "request": { "operation": "RegisterImage" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Instance": { - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "InternetGateway": { - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "NetworkAcl": { - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "NetworkInterface": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "RouteTableAssociation": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "SecurityGroup": { - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Snapshot": { - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - }, - "VpcPeeringConnection": { - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "ClassicAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "standard" } - ] - }, - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "response", "path": "Addresses[].PublicIp" } - ], - "path": "Addresses[]" - } - }, - "DhcpOptionsSets": { - "request": { "operation": "DescribeDhcpOptions" }, - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } - ], - "path": "DhcpOptions[]" - } - }, - "Images": { - "request": { "operation": "DescribeImages" }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Images[].ImageId" } - ], - "path": "Images[]" - } - }, - "Instances": { - "request": { "operation": "DescribeInstances" }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { "operation": "DescribeInternetGateways" }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "KeyPairs": { - "request": { "operation": "DescribeKeyPairs" }, - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } - ], - "path": "KeyPairs[]" - } - }, - "NetworkAcls": { - "request": { "operation": "DescribeNetworkAcls" }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { "operation": "DescribeNetworkInterfaces" }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroups": { - "request": { "operation": "DescribePlacementGroups" }, - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } - ], - "path": "PlacementGroups[]" - } - }, - "RouteTables": { - "request": { "operation": "DescribeRouteTables" }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { "operation": "DescribeSecurityGroups" }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Snapshots": { - "request": { "operation": "DescribeSnapshots" }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - }, - "Subnets": { - "request": { "operation": "DescribeSubnets" }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - }, - "Volumes": { - "request": { "operation": "DescribeVolumes" }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "domain" }, - { "target": "Filters[0].Values[0]", "source": "string", "value": "vpc" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - }, - "VpcPeeringConnections": { - "request": { "operation": "DescribeVpcPeeringConnections" }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Vpcs": { - "request": { "operation": "DescribeVpcs" }, - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } - ], - "path": "Vpcs[]" - } - } - } - }, - "resources": { - "ClassicAddress": { - "identifiers": [ - { - "name": "PublicIp" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "PublicIps[]", "source": "identifier", "name": "PublicIp" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "PublicIp", "source": "identifier", "name": "PublicIp" } - ] - } - }, - "Disassociate": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "PublicIp", "source": "data", "path": "PublicIp" } - ] - } - } - } - }, - "DhcpOptions": { - "identifiers": [ - { - "name": "Id", - "memberName": "DhcpOptionsId" - } - ], - "shape": "DhcpOptions", - "load": { - "request": { - "operation": "DescribeDhcpOptions", - "params": [ - { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "DhcpOptions[0]" - }, - "actions": { - "AssociateWithVpc": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteDhcpOptions", - "params": [ - { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Image": { - "identifiers": [ - { - "name": "Id", - "memberName": "ImageId" - } - ], - "shape": "Image", - "load": { - "request": { - "operation": "DescribeImages", - "params": [ - { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Images[0]" - }, - "actions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Deregister": { - "request": { - "operation": "DeregisterImage", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetImageAttribute", - "params": [ - { "target": "ImageId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "ImageExists", - "params": [ - { "target": "ImageIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Images[0]" - } - } - }, - "Instance": { - "identifiers": [ - { - "name": "Id", - "memberName": "InstanceId" - } - ], - "shape": "Instance", - "load": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Reservations[0].Instances[0]" - }, - "actions": { - "AttachClassicLinkVpc": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachVolume": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ConsoleOutput": { - "request": { - "operation": "GetConsoleOutput", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateImage": { - "request": { - "operation": "CreateImage", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "response", "path": "ImageId" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkVpc": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachVolume": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "PasswordData": { - "request": { - "operation": "GetPasswordData", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ReportStatus": { - "request": { - "operation": "ReportInstanceStatus", - "params": [ - { "target": "Instances[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetKernel": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "kernel" } - ] - } - }, - "ResetRamdisk": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "ramdisk" } - ] - } - }, - "ResetSourceDestCheck": { - "request": { - "operation": "ResetInstanceAttribute", - "params": [ - { "target": "InstanceId", "source": "identifier", "name": "Id" }, - { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Monitor": { - "request": { - "operation": "MonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Reboot": { - "request": { - "operation": "RebootInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Start": { - "request": { - "operation": "StartInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Stop": { - "request": { - "operation": "StopInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Terminate": { - "request": { - "operation": "TerminateInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "Unmonitor": { - "request": { - "operation": "UnmonitorInstances", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "InstanceExists", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Running": { - "waiterName": "InstanceRunning", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Stopped": { - "waiterName": "InstanceStopped", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - }, - "Terminated": { - "waiterName": "InstanceTerminated", - "params": [ - { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Reservations[0].Instances[0]" - } - }, - "has": { - "ClassicAddress": { - "resource": { - "type": "ClassicAddress", - "identifiers": [ - { "target": "PublicIp", "source": "data", "path": "PublicIpAddress" } - ] - } - }, - "Image": { - "resource": { - "type": "Image", - "identifiers": [ - { "target": "Id", "source": "data", "path": "ImageId" } - ] - } - }, - "KeyPair": { - "resource": { - "type": "KeyPairInfo", - "identifiers": [ - { "target": "Name", "source": "data", "path": "KeyName" } - ] - } - }, - "NetworkInterfaces": { - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "data", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "PlacementGroup": { - "resource": { - "type": "PlacementGroup", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Placement.GroupName" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Volumes": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } - ], - "path": "Volumes[]" - } - }, - "VpcAddresses": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "instance-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } - ], - "path": "Addresses[]" - } - } - } - }, - "InternetGateway": { - "identifiers": [ - { - "name": "Id", - "memberName": "InternetGatewayId" - } - ], - "shape": "InternetGateway", - "load": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "InternetGateways[0]" - }, - "actions": { - "AttachToVpc": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromVpc": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "KeyPair": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPair", - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "KeyPairInfo": { - "identifiers": [ - { - "name": "Name", - "memberName": "KeyName" - } - ], - "shape": "KeyPairInfo", - "load": { - "request": { - "operation": "DescribeKeyPairs", - "params": [ - { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "KeyPairs[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteKeyPair", - "params": [ - { "target": "KeyName", "source": "identifier", "name": "Name" } - ] - } - } - } - }, - "NetworkAcl": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkAclId" - } - ], - "shape": "NetworkAcl", - "load": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkAcls[0]" - }, - "actions": { - "CreateEntry": { - "request": { - "operation": "CreateNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkAcl", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "DeleteEntry": { - "request": { - "operation": "DeleteNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceAssociation": { - "request": { - "operation": "ReplaceNetworkAclAssociation", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceEntry": { - "request": { - "operation": "ReplaceNetworkAclEntry", - "params": [ - { "target": "NetworkAclId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterface": { - "identifiers": [ - { - "name": "Id", - "memberName": "NetworkInterfaceId" - } - ], - "shape": "NetworkInterface", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0]" - }, - "actions": { - "AssignPrivateIpAddresses": { - "request": { - "operation": "AssignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Attach": { - "request": { - "operation": "AttachNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteNetworkInterface", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "Detach": { - "request": { - "operation": "DetachNetworkInterface", - "params": [ - { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetNetworkInterfaceAttribute", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - }, - "UnassignPrivateIpAddresses": { - "request": { - "operation": "UnassignPrivateIpAddresses", - "params": [ - { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "Association.AssociationId" } - ], - "path": "Association" - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "NetworkInterfaceAssociation": { - "identifiers": [ - { - "name": "Id" - } - ], - "shape": "InstanceNetworkInterfaceAssociation", - "load": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "association.association-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "NetworkInterfaces[0].Association" - }, - "actions": { - "Delete": { - "request": { - "operation": "DisassociateAddress", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Address": { - "resource": { - "type": "VpcAddress", - "identifiers": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - } - }, - "PlacementGroup": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "PlacementGroup", - "load": { - "request": { - "operation": "DescribePlacementGroups", - "params": [ - { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } - ] - }, - "path": "PlacementGroups[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePlacementGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - } - } - }, - "Route": { - "identifiers": [ - { "name": "RouteTableId" }, - { - "name": "DestinationCidrBlock", - "memberName": "DestinationCidrBlock" - } - ], - "shape": "Route", - "actions": { - "Delete": { - "request": { - "operation": "DeleteRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - }, - "Replace": { - "request": { - "operation": "ReplaceRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "RouteTableId" }, - { "target": "DestinationCidrBlock", "source": "identifier", "name": "DestinationCidrBlock" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "identifier", "name": "RouteTableId" } - ] - } - } - } - }, - "RouteTable": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableId" - } - ], - "shape": "RouteTable", - "load": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "RouteTables[0]" - }, - "actions": { - "AssociateWithSubnet": { - "request": { - "operation": "AssociateRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "AssociationId" } - ] - } - }, - "CreateRoute": { - "request": { - "operation": "CreateRoute", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "requestParameter", "path": "DestinationCidrBlock" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRouteTable", - "params": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Associations": { - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "Associations[].RouteTableAssociationId" } - ], - "path": "Associations[]" - } - }, - "Routes": { - "resource": { - "type": "Route", - "identifiers": [ - { "target": "RouteTableId", "source": "identifier", "name": "Id" }, - { "target": "DestinationCidrBlock", "source": "data", "path": "Routes[].DestinationCidrBlock" } - ], - "path": "Routes[]" - } - }, - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - } - }, - "RouteTableAssociation": { - "identifiers": [ - { - "name": "Id", - "memberName": "RouteTableAssociationId" - } - ], - "shape": "RouteTableAssociation", - "actions": { - "Delete": { - "request": { - "operation": "DisassociateRouteTable", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - } - }, - "ReplaceSubnet": { - "request": { - "operation": "ReplaceRouteTableAssociation", - "params": [ - { "target": "AssociationId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTableAssociation", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NewAssociationId" } - ] - } - } - }, - "has": { - "RouteTable": { - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RouteTableId" } - ] - } - }, - "Subnet": { - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "data", "path": "SubnetId" } - ] - } - } - } - }, - "SecurityGroup": { - "identifiers": [ - { - "name": "Id", - "memberName": "GroupId" - } - ], - "shape": "SecurityGroup", - "load": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "SecurityGroups[0]" - }, - "actions": { - "AuthorizeEgress": { - "request": { - "operation": "AuthorizeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "AuthorizeIngress": { - "request": { - "operation": "AuthorizeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSecurityGroup", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeEgress": { - "request": { - "operation": "RevokeSecurityGroupEgress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - }, - "RevokeIngress": { - "request": { - "operation": "RevokeSecurityGroupIngress", - "params": [ - { "target": "GroupId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "Snapshot": { - "identifiers": [ - { - "name": "Id", - "memberName": "SnapshotId" - } - ], - "shape": "Snapshot", - "load": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Snapshots[0]" - }, - "actions": { - "Copy": { - "request": { - "operation": "CopySnapshot", - "params": [ - { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSnapshot", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifySnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - }, - "ResetAttribute": { - "request": { - "operation": "ResetSnapshotAttribute", - "params": [ - { "target": "SnapshotId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Completed": { - "waiterName": "SnapshotCompleted", - "params": [ - { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } - ], - "path": "Snapshots[]" - } - }, - "has": { - "Volume": { - "resource": { - "type": "Volume", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VolumeId" } - ] - } - } - } - }, - "Subnet": { - "identifiers": [ - { - "name": "Id", - "memberName": "SubnetId" - } - ], - "shape": "Subnet", - "load": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Subnets[0]" - }, - "actions": { - "CreateInstances": { - "request": { - "operation": "RunInstances", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } - ], - "path": "Instances[]" - } - }, - "CreateNetworkInterface": { - "request": { - "operation": "CreateNetworkInterface", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } - ], - "path": "NetworkInterface" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSubnet", - "params": [ - { "target": "SubnetId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "VpcId" } - ] - } - } - }, - "hasMany": { - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - } - } - }, - "Tag": { - "identifiers": [ - { - "name": "ResourceId", - "memberName": "ResourceId" - }, - { - "name": "Key", - "memberName": "Key" - }, - { - "name": "Value", - "memberName": "Value" - } - ], - "shape": "TagDescription", - "load": { - "request": { - "operation": "DescribeTags", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "key" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, - { "target": "Filters[1].Name", "source": "string", "value": "value" }, - { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } - ] - }, - "path": "Tags[0]" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteTags", - "params": [ - { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, - { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, - { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } - ] - } - } - } - }, - "Volume": { - "identifiers": [ - { - "name": "Id", - "memberName": "VolumeId" - } - ], - "shape": "Volume", - "load": { - "request": { - "operation": "DescribeVolumes", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Volumes[0]" - }, - "actions": { - "AttachToInstance": { - "request": { - "operation": "AttachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateSnapshot": { - "request": { - "operation": "CreateSnapshot", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SnapshotId" } - ], - "path": "@" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeStatus": { - "request": { - "operation": "DescribeVolumeStatus", - "params": [ - { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachFromInstance": { - "request": { - "operation": "DetachVolume", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableIo": { - "request": { - "operation": "EnableVolumeIO", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVolumeAttribute", - "params": [ - { "target": "VolumeId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "hasMany": { - "Snapshots": { - "request": { - "operation": "DescribeSnapshots", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Snapshot", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } - ], - "path": "Snapshots[]" - } - } - } - }, - "Vpc": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcId" - } - ], - "shape": "Vpc", - "load": { - "request": { - "operation": "DescribeVpcs", - "params": [ - { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "Vpcs[0]" - }, - "actions": { - "AssociateDhcpOptions": { - "request": { - "operation": "AssociateDhcpOptions", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachClassicLinkInstance": { - "request": { - "operation": "AttachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "AttachInternetGateway": { - "request": { - "operation": "AttachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "CreateNetworkAcl": { - "request": { - "operation": "CreateNetworkAcl", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } - ], - "path": "NetworkAcl" - } - }, - "CreateRouteTable": { - "request": { - "operation": "CreateRouteTable", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } - ], - "path": "RouteTable" - } - }, - "CreateSecurityGroup": { - "request": { - "operation": "CreateSecurityGroup", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "GroupId" } - ] - } - }, - "CreateSubnet": { - "request": { - "operation": "CreateSubnet", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } - ], - "path": "Subnet" - } - }, - "CreateTags": { - "request": { - "operation": "CreateTags", - "params": [ - { "target": "Resources[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Tag", - "identifiers": [ - { "target": "ResourceId", "source": "identifier", "name": "Id" }, - { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, - { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DescribeAttribute": { - "request": { - "operation": "DescribeVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachClassicLinkInstance": { - "request": { - "operation": "DetachClassicLinkVpc", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DetachInternetGateway": { - "request": { - "operation": "DetachInternetGateway", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "DisableClassicLink": { - "request": { - "operation": "DisableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "EnableClassicLink": { - "request": { - "operation": "EnableVpcClassicLink", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "ModifyAttribute": { - "request": { - "operation": "ModifyVpcAttribute", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - } - }, - "RequestVpcPeeringConnection": { - "request": { - "operation": "CreateVpcPeeringConnection", - "params": [ - { "target": "VpcId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnection" - } - } - }, - "waiters": { - "Available": { - "waiterName": "VpcAvailable", - "params": [ - { "target": "VpcIds[]", "source": "identifier", "name": "Id" } - ] - }, - "Exists": { - "waiterName": "VpcExists", - "params": [ - { "target": "VpcIds[]", "source": "identifier", "name": "Id" } - ] - } - }, - "has": { - "DhcpOptions": { - "resource": { - "type": "DhcpOptions", - "identifiers": [ - { "target": "Id", "source": "data", "path": "DhcpOptionsId" } - ] - } - } - }, - "hasMany": { - "AcceptedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "Instances": { - "request": { - "operation": "DescribeInstances", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Instance", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } - ], - "path": "Reservations[].Instances[]" - } - }, - "InternetGateways": { - "request": { - "operation": "DescribeInternetGateways", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "InternetGateway", - "identifiers": [ - { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } - ], - "path": "InternetGateways[]" - } - }, - "NetworkAcls": { - "request": { - "operation": "DescribeNetworkAcls", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkAcl", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } - ], - "path": "NetworkAcls[]" - } - }, - "NetworkInterfaces": { - "request": { - "operation": "DescribeNetworkInterfaces", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "NetworkInterface", - "identifiers": [ - { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } - ], - "path": "NetworkInterfaces[]" - } - }, - "RequestedVpcPeeringConnections": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "VpcPeeringConnection", - "identifiers": [ - { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } - ], - "path": "VpcPeeringConnections[]" - } - }, - "RouteTables": { - "request": { - "operation": "DescribeRouteTables", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "RouteTable", - "identifiers": [ - { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } - ], - "path": "RouteTables[]" - } - }, - "SecurityGroups": { - "request": { - "operation": "DescribeSecurityGroups", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "SecurityGroup", - "identifiers": [ - { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } - ], - "path": "SecurityGroups[]" - } - }, - "Subnets": { - "request": { - "operation": "DescribeSubnets", - "params": [ - { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, - { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Subnet", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } - ], - "path": "Subnets[]" - } - } - } - }, - "VpcPeeringConnection": { - "identifiers": [ - { - "name": "Id", - "memberName": "VpcPeeringConnectionId" - } - ], - "shape": "VpcPeeringConnection", - "load": { - "request": { - "operation": "DescribeVpcPeeringConnections", - "params": [ - { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } - ] - }, - "path": "VpcPeeringConnections[0]" - }, - "actions": { - "Accept": { - "request": { - "operation": "AcceptVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Reject": { - "request": { - "operation": "RejectVpcPeeringConnection", - "params": [ - { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "VpcPeeringConnectionExists", - "params": [ - { "target": "VpcPeeringConnectionIds[]", "source": "identifier", "name": "Id" } - ], - "path": "VpcPeeringConnections[0]" - } - }, - "has": { - "AccepterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } - ] - } - }, - "RequesterVpc": { - "resource": { - "type": "Vpc", - "identifiers": [ - { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } - ] - } - } - } - }, - "VpcAddress": { - "identifiers": [ - { - "name": "AllocationId" - } - ], - "shape": "Address", - "load": { - "request": { - "operation": "DescribeAddresses", - "params": [ - { "target": "AllocationIds[0]", "source": "identifier", "name": "AllocationId" } - ] - }, - "path": "Addresses[0]" - }, - "actions": { - "Associate": { - "request": { - "operation": "AssociateAddress", - "params": [ - { "target": "AllocationId", "source": "identifier", "name": "AllocationId" } - ] - } - }, - "Release": { - "request": { - "operation": "ReleaseAddress", - "params": [ - { "target": "AllocationId", "source": "data", "path": "AllocationId" } - ] - } - } - }, - "has": { - "Association": { - "resource": { - "type": "NetworkInterfaceAssociation", - "identifiers": [ - { "target": "Id", "source": "data", "path": "AssociationId" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/glacier/2012-06-01/resources-1.json b/venv/Lib/site-packages/boto3/data/glacier/2012-06-01/resources-1.json deleted file mode 100644 index d1ed48f..0000000 --- a/venv/Lib/site-packages/boto3/data/glacier/2012-06-01/resources-1.json +++ /dev/null @@ -1,581 +0,0 @@ -{ - "service": { - "actions": { - "CreateVault": { - "request": { - "operation": "CreateVault", - "params": [ - { "target": "accountId", "source": "string", "value": "-" } - ] - }, - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "requestParameter", "path": "accountId" }, - { "target": "Name", "source": "requestParameter", "path": "vaultName" } - ] - } - } - }, - "has": { - "Account": { - "resource": { - "type": "Account", - "identifiers": [ - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "Vaults": { - "request": { - "operation": "ListVaults", - "params": [ - { "target": "accountId", "source": "string", "value": "-" } - ] - }, - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "requestParameter", "path": "accountId" }, - { "target": "Name", "source": "response", "path": "VaultList[].VaultName" } - ], - "path": "VaultList[]" - } - } - } - }, - "resources": { - "Account": { - "identifiers": [ - { "name": "Id" } - ], - "actions": { - "CreateVault": { - "request": { - "operation": "CreateVault", - "params": [ - { "target": "accountId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "Id" }, - { "target": "Name", "source": "requestParameter", "path": "vaultName" } - ] - } - } - }, - "has": { - "Vault": { - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "Id" }, - { "target": "Name", "source": "input" } - ] - } - } - }, - "hasMany": { - "Vaults": { - "request": { - "operation": "ListVaults", - "params": [ - { "target": "accountId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "Id" }, - { "target": "Name", "source": "response", "path": "VaultList[].VaultName" } - ], - "path": "VaultList[]" - } - } - } - }, - "Archive": { - "identifiers": [ - { "name": "AccountId" }, - { "name": "VaultName" }, - { "name": "Id" } - ], - "actions": { - "Delete": { - "request": { - "operation": "DeleteArchive", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "archiveId", "source": "identifier", "name": "Id" } - ] - } - }, - "InitiateArchiveRetrieval": { - "request": { - "operation": "InitiateJob", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "jobParameters.Type", "source": "string", "value": "archive-retrieval" }, - { "target": "jobParameters.ArchiveId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Job", - "identifiers": [ - { "target": "Id", "source": "response", "path": "jobId" }, - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "VaultName" } - ] - } - } - }, - "has": { - "Vault": { - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "Name", "source": "identifier", "name": "VaultName" } - ] - } - } - } - }, - "Job": { - "identifiers": [ - { "name": "AccountId" }, - { "name": "VaultName" }, - { - "name": "Id", - "memberName": "JobId" - } - ], - "shape": "GlacierJobDescription", - "load": { - "request": { - "operation": "DescribeJob", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "jobId", "source": "identifier", "name": "Id" } - ] - }, - "path": "@" - }, - "actions": { - "GetOutput": { - "request": { - "operation": "GetJobOutput", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "jobId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vault": { - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "Name", "source": "identifier", "name": "VaultName" } - ] - } - } - } - }, - "MultipartUpload": { - "identifiers": [ - { "name": "AccountId" }, - { "name": "VaultName" }, - { - "name": "Id", - "memberName": "MultipartUploadId" - } - ], - "shape": "UploadListElement", - "actions": { - "Abort": { - "request": { - "operation": "AbortMultipartUpload", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "uploadId", "source": "identifier", "name": "Id" } - ] - } - }, - "Complete": { - "request": { - "operation": "CompleteMultipartUpload", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "uploadId", "source": "identifier", "name": "Id" } - ] - } - }, - "Parts": { - "request": { - "operation": "ListParts", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "uploadId", "source": "identifier", "name": "Id" } - ] - } - }, - "UploadPart": { - "request": { - "operation": "UploadMultipartPart", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" }, - { "target": "uploadId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Vault": { - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "Name", "source": "identifier", "name": "VaultName" } - ] - } - } - } - }, - "Notification": { - "identifiers": [ - { "name": "AccountId" }, - { "name": "VaultName" } - ], - "shape": "VaultNotificationConfig", - "load": { - "request": { - "operation": "GetVaultNotifications", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" } - ] - }, - "path": "vaultNotificationConfig" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteVaultNotifications", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" } - ] - } - }, - "Set": { - "request": { - "operation": "SetVaultNotifications", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "VaultName" } - ] - } - } - }, - "has": { - "Vault": { - "resource": { - "type": "Vault", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "Name", "source": "identifier", "name": "VaultName" } - ] - } - } - } - }, - "Vault": { - "identifiers": [ - { "name": "AccountId" }, - { - "name": "Name", - "memberName": "VaultName" - } - ], - "shape": "DescribeVaultOutput", - "load": { - "request": { - "operation": "DescribeVault", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" } - ] - }, - "path": "@" - }, - "actions": { - "Create": { - "request": { - "operation": "CreateVault", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteVault", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" } - ] - } - }, - "InitiateInventoryRetrieval": { - "request": { - "operation": "InitiateJob", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "jobParameters.Type", "source": "string", "value": "inventory-retrieval" } - ] - }, - "resource": { - "type": "Job", - "identifiers": [ - { "target": "Id", "source": "response", "path": "jobId" }, - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" } - ] - } - }, - "InitiateMultipartUpload": { - "request": { - "operation": "InitiateMultipartUpload", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" } - ] - }, - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "Id", "source": "response", "path": "uploadId" }, - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" } - ] - } - }, - "UploadArchive": { - "request": { - "operation": "UploadArchive", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" } - ] - }, - "resource": { - "type": "Archive", - "identifiers": [ - { "target": "Id", "source": "response", "path": "archiveId" }, - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "has": { - "Account": { - "resource": { - "type": "Account", - "identifiers": [ - { "target": "Id", "source": "identifier", "name": "AccountId" } - ] - } - }, - "Archive": { - "resource": { - "type": "Archive", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "input" } - ] - } - }, - "Job": { - "resource": { - "type": "Job", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "input" } - ] - } - }, - "MultipartUpload": { - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "input" } - ] - } - }, - "Notification": { - "resource": { - "type": "Notification", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "CompletedJobs": { - "request": { - "operation": "ListJobs", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "completed", "source": "string", "value": "true" } - ] - }, - "resource": { - "type": "Job", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "JobList[].JobId" } - ], - "path": "JobList[]" - } - }, - "FailedJobs": { - "request": { - "operation": "ListJobs", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "statuscode", "source": "string", "value": "Failed" } - ] - }, - "resource": { - "type": "Job", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "JobList[].JobId" } - ], - "path": "JobList[]" - } - }, - "Jobs": { - "request": { - "operation": "ListJobs", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Job", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "JobList[].JobId" } - ], - "path": "JobList[]" - } - }, - "JobsInProgress": { - "request": { - "operation": "ListJobs", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "statuscode", "source": "string", "value": "InProgress" } - ] - }, - "resource": { - "type": "Job", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "JobList[].JobId" } - ], - "path": "JobList[]" - } - }, - "MultipartUplaods": { - "request": { - "operation": "ListMultipartUploads", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" } - ] - }, - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "UploadsList[].MultipartUploadId" } - ], - "path": "UploadsList[]" - } - }, - "MultipartUploads": { - "request": { - "operation": "ListMultipartUploads", - "params": [ - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "accountId", "source": "identifier", "name": "AccountId" } - ] - }, - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "UploadsList[].MultipartUploadId" } - ], - "path": "UploadsList[]" - } - }, - "SucceededJobs": { - "request": { - "operation": "ListJobs", - "params": [ - { "target": "accountId", "source": "identifier", "name": "AccountId" }, - { "target": "vaultName", "source": "identifier", "name": "Name" }, - { "target": "statuscode", "source": "string", "value": "Succeeded" } - ] - }, - "resource": { - "type": "Job", - "identifiers": [ - { "target": "AccountId", "source": "identifier", "name": "AccountId" }, - { "target": "VaultName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "JobList[].JobId" } - ], - "path": "JobList[]" - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/iam/2010-05-08/resources-1.json b/venv/Lib/site-packages/boto3/data/iam/2010-05-08/resources-1.json deleted file mode 100644 index 59d1855..0000000 --- a/venv/Lib/site-packages/boto3/data/iam/2010-05-08/resources-1.json +++ /dev/null @@ -1,1721 +0,0 @@ -{ - "service": { - "actions": { - "ChangePassword": { - "request": { "operation": "ChangePassword" } - }, - "CreateAccountAlias": { - "request": { "operation": "CreateAccountAlias" } - }, - "CreateAccountPasswordPolicy": { - "request": { "operation": "UpdateAccountPasswordPolicy" }, - "resource": { - "type": "AccountPasswordPolicy", - "identifiers": [ ] - } - }, - "CreateGroup": { - "request": { "operation": "CreateGroup" }, - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ], - "path": "Group" - } - }, - "CreateInstanceProfile": { - "request": { "operation": "CreateInstanceProfile" }, - "resource": { - "type": "InstanceProfile", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "InstanceProfileName" } - ], - "path": "InstanceProfile" - } - }, - "CreatePolicy": { - "request": { "operation": "CreatePolicy" }, - "resource": { - "type": "Policy", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "Policy.Arn" } - ] - } - }, - "CreateRole": { - "request": { "operation": "CreateRole" }, - "resource": { - "type": "Role", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "RoleName" } - ], - "path": "Role" - } - }, - "CreateSamlProvider": { - "request": { "operation": "CreateSAMLProvider" }, - "resource": { - "type": "SamlProvider", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "SAMLProviderArn" } - ] - } - }, - "CreateServerCertificate": { - "request": { "operation": "UploadServerCertificate" }, - "resource": { - "type": "ServerCertificate", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "ServerCertificateName" } - ] - } - }, - "CreateSigningCertificate": { - "request": { "operation": "UploadSigningCertificate" }, - "resource": { - "type": "SigningCertificate", - "identifiers": [ - { "target": "Id", "source": "response", "path": "Certificate.CertificateId" } - ], - "path": "Certificate" - } - }, - "CreateUser": { - "request": { "operation": "CreateUser" }, - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "UserName" } - ], - "path": "User" - } - }, - "CreateVirtualMfaDevice": { - "request": { "operation": "CreateVirtualMFADevice" }, - "resource": { - "type": "VirtualMfaDevice", - "identifiers": [ - { "target": "SerialNumber", "source": "response", "path": "VirtualMFADevice.SerialNumber" } - ], - "path": "VirtualMFADevice" - } - } - }, - "has": { - "AccountPasswordPolicy": { - "resource": { - "type": "AccountPasswordPolicy", - "identifiers": [ ] - } - }, - "AccountSummary": { - "resource": { - "type": "AccountSummary", - "identifiers": [ ] - } - }, - "CurrentUser": { - "resource": { - "type": "CurrentUser", - "identifiers": [ ] - } - }, - "Group": { - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "InstanceProfile": { - "resource": { - "type": "InstanceProfile", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "Policy": { - "resource": { - "type": "Policy", - "identifiers": [ - { "target": "PolicyArn", "source": "input" } - ] - } - }, - "Role": { - "resource": { - "type": "Role", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "SamlProvider": { - "resource": { - "type": "SamlProvider", - "identifiers": [ - { "target": "Arn", "source": "input" } - ] - } - }, - "ServerCertificate": { - "resource": { - "type": "ServerCertificate", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - }, - "VirtualMfaDevice": { - "resource": { - "type": "VirtualMfaDevice", - "identifiers": [ - { "target": "SerialNumber", "source": "input" } - ] - } - } - }, - "hasMany": { - "Groups": { - "request": { "operation": "ListGroups" }, - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "response", "path": "Groups[].GroupName" } - ], - "path": "Groups[]" - } - }, - "InstanceProfiles": { - "request": { "operation": "ListInstanceProfiles" }, - "resource": { - "type": "InstanceProfile", - "identifiers": [ - { "target": "Name", "source": "response", "path": "InstanceProfiles[].InstanceProfileName" } - ], - "path": "InstanceProfiles[]" - } - }, - "Policies": { - "request": { "operation": "ListPolicies" }, - "resource": { - "type": "Policy", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "Policies[].Arn" } - ], - "path": "Policies[]" - } - }, - "Roles": { - "request": { "operation": "ListRoles" }, - "resource": { - "type": "Role", - "identifiers": [ - { "target": "Name", "source": "response", "path": "Roles[].RoleName" } - ], - "path": "Roles[]" - } - }, - "SamlProviders": { - "request": { "operation": "ListSAMLProviders" }, - "resource": { - "type": "SamlProvider", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "SAMLProviderList[].Arn" } - ] - } - }, - "ServerCertificates": { - "request": { "operation": "ListServerCertificates" }, - "resource": { - "type": "ServerCertificate", - "identifiers": [ - { "target": "Name", "source": "response", "path": "ServerCertificateMetadataList[].ServerCertificateName" } - ] - } - }, - "Users": { - "request": { "operation": "ListUsers" }, - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "response", "path": "Users[].UserName" } - ], - "path": "Users[]" - } - }, - "VirtualMfaDevices": { - "request": { "operation": "ListVirtualMFADevices" }, - "resource": { - "type": "VirtualMfaDevice", - "identifiers": [ - { "target": "SerialNumber", "source": "response", "path": "VirtualMFADevices[].SerialNumber" } - ], - "path": "VirtualMFADevices[]" - } - } - } - }, - "resources": { - "AccessKey": { - "identifiers": [ - { - "name": "UserName", - "memberName": "UserName" - }, - { - "name": "Id", - "memberName": "AccessKeyId" - } - ], - "shape": "AccessKeyMetadata", - "actions": { - "Activate": { - "request": { - "operation": "UpdateAccessKey", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, - { "target": "Status", "source": "string", "value": "Active" } - ] - } - }, - "Deactivate": { - "request": { - "operation": "UpdateAccessKey", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, - { "target": "Status", "source": "string", "value": "Inactive" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteAccessKey", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "AccessKeyId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "UserName" } - ] - } - } - } - }, - "AccessKeyPair": { - "identifiers": [ - { - "name": "UserName", - "memberName": "UserName" - }, - { - "name": "Id", - "memberName": "AccessKeyId" - }, - { - "name": "Secret", - "memberName": "SecretAccessKey" - } - ], - "shape": "AccessKey", - "actions": { - "Activate": { - "request": { - "operation": "UpdateAccessKey", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, - { "target": "Status", "source": "string", "value": "Active" } - ] - } - }, - "Deactivate": { - "request": { - "operation": "UpdateAccessKey", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, - { "target": "Status", "source": "string", "value": "Inactive" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteAccessKey", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "AccessKeyId", "source": "identifier", "name": "Id" } - ] - } - } - } - }, - "AccountPasswordPolicy": { - "identifiers": [ ], - "shape": "PasswordPolicy", - "load": { - "request": { "operation": "GetAccountPasswordPolicy" }, - "path": "PasswordPolicy" - }, - "actions": { - "Delete": { - "request": { "operation": "DeleteAccountPasswordPolicy" } - }, - "Update": { - "request": { "operation": "UpdateAccountPasswordPolicy" } - } - } - }, - "AccountSummary": { - "identifiers": [ ], - "shape": "GetAccountSummaryResponse", - "load": { - "request": { "operation": "GetAccountSummary" }, - "path": "@" - } - }, - "AssumeRolePolicy": { - "identifiers": [ - { "name": "RoleName" } - ], - "actions": { - "Update": { - "request": { - "operation": "UpdateAssumeRolePolicy", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "RoleName" } - ] - } - } - }, - "has": { - "Role": { - "resource": { - "type": "Role", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "RoleName" } - ] - } - } - } - }, - "CurrentUser": { - "identifiers": [ ], - "shape": "User", - "load": { - "request": { "operation": "GetUser" }, - "path": "User" - }, - "has": { - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "data", "path": "UserName" } - ] - } - } - }, - "hasMany": { - "AccessKeys": { - "request": { "operation": "ListAccessKeys" }, - "resource": { - "type": "AccessKey", - "identifiers": [ - { "target": "UserName", "source": "response", "path": "AccessKeyMetadata[].UserName" }, - { "target": "Id", "source": "response", "path": "AccessKeyMetadata[].AccessKeyId" } - ], - "path": "AccessKeyMetadata[]" - } - }, - "MfaDevices": { - "request": { "operation": "ListMFADevices" }, - "resource": { - "type": "MfaDevice", - "identifiers": [ - { "target": "UserName", "source": "response", "path": "MFADevices[].UserName" }, - { "target": "SerialNumber", "source": "response", "path": "MFADevices[].SerialNumber" } - ], - "path": "MFADevices[]" - } - }, - "SigningCertificates": { - "request": { "operation": "ListSigningCertificates" }, - "resource": { - "type": "SigningCertificate", - "identifiers": [ - { "target": "UserName", "source": "response", "path": "Certificates[].UserName" }, - { "target": "Id", "source": "response", "path": "Certificates[].CertificateId" } - ], - "path": "Certificates[]" - } - } - } - }, - "Group": { - "identifiers": [ - { - "name": "Name", - "memberName": "GroupName" - } - ], - "shape": "Group", - "load": { - "request": { - "operation": "GetGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - }, - "path": "Group" - }, - "actions": { - "AddUser": { - "request": { - "operation": "AddUserToGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - }, - "AttachPolicy": { - "request": { - "operation": "AttachGroupPolicy", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - }, - "Create": { - "request": { - "operation": "CreateGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "GroupName" } - ], - "path": "Group" - } - }, - "CreatePolicy": { - "request": { - "operation": "PutGroupPolicy", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "GroupPolicy", - "identifiers": [ - { "target": "GroupName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "requestParameter", "path": "PolicyName" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - }, - "DetachPolicy": { - "request": { - "operation": "DetachGroupPolicy", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - }, - "RemoveUser": { - "request": { - "operation": "RemoveUserFromGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - } - }, - "Update": { - "request": { - "operation": "UpdateGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "NewGroupName" } - ] - } - } - }, - "has": { - "Policy": { - "resource": { - "type": "GroupPolicy", - "identifiers": [ - { "target": "GroupName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "input" } - ] - } - } - }, - "hasMany": { - "AttachedPolicies": { - "request": { - "operation": "ListAttachedGroupPolicies", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Policy", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "AttachedPolicies[].PolicyArn" } - ] - } - }, - "Policies": { - "request": { - "operation": "ListGroupPolicies", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "GroupPolicy", - "identifiers": [ - { "target": "GroupName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "response", "path": "PolicyNames[]" } - ] - } - }, - "Users": { - "request": { - "operation": "GetGroup", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "response", "path": "Users[].UserName" } - ], - "path": "Users[]" - } - } - } - }, - "GroupPolicy": { - "identifiers": [ - { - "name": "GroupName", - "memberName": "GroupName" - }, - { - "name": "Name", - "memberName": "PolicyName" - } - ], - "shape": "GetGroupPolicyResponse", - "load": { - "request": { - "operation": "GetGroupPolicy", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "GroupName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteGroupPolicy", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "GroupName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - } - }, - "Put": { - "request": { - "operation": "PutGroupPolicy", - "params": [ - { "target": "GroupName", "source": "identifier", "name": "GroupName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "has": { - "Group": { - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "GroupName" } - ] - } - } - } - }, - "InstanceProfile": { - "identifiers": [ - { - "name": "Name", - "memberName": "InstanceProfileName" - } - ], - "shape": "InstanceProfile", - "load": { - "request": { - "operation": "GetInstanceProfile", - "params": [ - { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } - ] - }, - "path": "InstanceProfile" - }, - "actions": { - "AddRole": { - "request": { - "operation": "AddRoleToInstanceProfile", - "params": [ - { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteInstanceProfile", - "params": [ - { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } - ] - } - }, - "RemoveRole": { - "request": { - "operation": "RemoveRoleFromInstanceProfile", - "params": [ - { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "has": { - "Roles": { - "resource": { - "type": "Role", - "identifiers": [ - { "target": "Name", "source": "data", "path": "Roles[].RoleName" } - ], - "path": "Roles[]" - } - } - } - }, - "LoginProfile": { - "identifiers": [ - { - "name": "UserName", - "memberName": "UserName" - } - ], - "shape": "LoginProfile", - "load": { - "request": { - "operation": "GetLoginProfile", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" } - ] - }, - "path": "LoginProfile" - }, - "actions": { - "Create": { - "request": { - "operation": "CreateLoginProfile", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" } - ] - }, - "resource": { - "type": "LoginProfile", - "identifiers": [ - { "target": "UserName", "source": "response", "path": "LoginProfile.UserName" } - ], - "path": "LoginProfile" - } - }, - "Delete": { - "request": { - "operation": "DeleteLoginProfile", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" } - ] - } - }, - "Update": { - "request": { - "operation": "UpdateLoginProfile", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" } - ] - } - } - }, - "has": { - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "UserName" } - ] - } - } - } - }, - "MfaDevice": { - "identifiers": [ - { - "name": "UserName", - "memberName": "UserName" - }, - { - "name": "SerialNumber", - "memberName": "SerialNumber" - } - ], - "shape": "MFADevice", - "actions": { - "Associate": { - "request": { - "operation": "EnableMFADevice", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } - ] - } - }, - "Disassociate": { - "request": { - "operation": "DeactivateMFADevice", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } - ] - } - }, - "Resync": { - "request": { - "operation": "ResyncMFADevice", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } - ] - } - } - }, - "has": { - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "UserName" } - ] - } - } - } - }, - "Policy": { - "identifiers": [ - { - "name": "Arn", - "memberName": "Arn" - } - ], - "shape": "Policy", - "load": { - "request": { - "operation": "GetPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - }, - "path": "Policy" - }, - "actions": { - "AttachGroup": { - "request": { - "operation": "AttachGroupPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "AttachRole": { - "request": { - "operation": "AttachRolePolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "AttachUser": { - "request": { - "operation": "AttachUserPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "CreateVersion": { - "request": { - "operation": "CreatePolicyVersion", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - }, - "resource": { - "type": "PolicyVersion", - "identifiers": [ - { "target": "Arn", "source": "identifier", "name": "Arn" }, - { "target": "VersionId", "source": "response", "path": "PolicyVersion.VersionId" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeletePolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "DetachGroup": { - "request": { - "operation": "DetachGroupPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "DetachRole": { - "request": { - "operation": "DetachRolePolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "DetachUser": { - "request": { - "operation": "DetachUserPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - } - } - }, - "has": { - "DefaultVersion": { - "resource": { - "type": "PolicyVersion", - "identifiers": [ - { "target": "Arn", "source": "identifier", "name": "Arn" }, - { "target": "VersionId", "source": "data", "path": "DefaultVersionId" } - ] - } - } - }, - "hasMany": { - "AttachedGroups": { - "request": { - "operation": "ListEntitiesForPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, - { "target": "EntityFilter", "source": "string", "value": "Group" } - ] - }, - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PolicyGroups[].GroupName" } - ] - } - }, - "AttachedRoles": { - "request": { - "operation": "ListEntitiesForPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, - { "target": "EntityFilter", "source": "string", "value": "Role" } - ] - }, - "resource": { - "type": "Role", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PolicyRoles[].RoleName" } - ] - } - }, - "AttachedUsers": { - "request": { - "operation": "ListEntitiesForPolicy", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, - { "target": "EntityFilter", "source": "string", "value": "User" } - ] - }, - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "response", "path": "PolicyUsers[].UserName" } - ] - } - }, - "Versions": { - "request": { - "operation": "ListPolicyVersions", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" } - ] - }, - "resource": { - "type": "PolicyVersion", - "identifiers": [ - { "target": "Arn", "source": "identifier", "name": "Arn" }, - { "target": "VersionId", "source": "response", "path": "Versions[].VersionId" } - ], - "path": "Versions[]" - } - } - } - }, - "PolicyVersion": { - "identifiers": [ - { "name": "Arn" }, - { "name": "VersionId" } - ], - "shape": "PolicyVersion", - "load": { - "request": { - "operation": "GetPolicyVersion", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, - { "target": "VersionId", "source": "identifier", "name": "VersionId" } - ] - }, - "path": "PolicyVersion" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeletePolicyVersion", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, - { "target": "VersionId", "source": "identifier", "name": "VersionId" } - ] - } - }, - "SetAsDefault": { - "request": { - "operation": "SetDefaultPolicyVersion", - "params": [ - { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, - { "target": "VersionId", "source": "identifier", "name": "VersionId" } - ] - } - } - } - }, - "Role": { - "identifiers": [ - { - "name": "Name", - "memberName": "RoleName" - } - ], - "shape": "Role", - "load": { - "request": { - "operation": "GetRole", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - }, - "path": "Role" - }, - "actions": { - "AttachPolicy": { - "request": { - "operation": "AttachRolePolicy", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteRole", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - } - }, - "DetachPolicy": { - "request": { - "operation": "DetachRolePolicy", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "has": { - "AssumeRolePolicy": { - "resource": { - "type": "AssumeRolePolicy", - "identifiers": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - } - }, - "Policy": { - "resource": { - "type": "RolePolicy", - "identifiers": [ - { "target": "RoleName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "input" } - ] - } - } - }, - "hasMany": { - "AttachedPolicies": { - "request": { - "operation": "ListAttachedRolePolicies", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Policy", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "AttachedPolicies[].PolicyArn" } - ] - } - }, - "InstanceProfiles": { - "request": { - "operation": "ListInstanceProfilesForRole", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "InstanceProfile", - "identifiers": [ - { "target": "Name", "source": "response", "path": "InstanceProfiles[].InstanceProfileName" } - ], - "path": "InstanceProfiles[]" - } - }, - "Policies": { - "request": { - "operation": "ListRolePolicies", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "RolePolicy", - "identifiers": [ - { "target": "RoleName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "response", "path": "PolicyNames[]" } - ] - } - } - } - }, - "RolePolicy": { - "identifiers": [ - { - "name": "RoleName", - "memberName": "RoleName" - }, - { - "name": "Name", - "memberName": "PolicyName" - } - ], - "shape": "GetRolePolicyResponse", - "load": { - "request": { - "operation": "GetRolePolicy", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "RoleName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteRolePolicy", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "RoleName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - } - }, - "Put": { - "request": { - "operation": "PutRolePolicy", - "params": [ - { "target": "RoleName", "source": "identifier", "name": "RoleName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "has": { - "Role": { - "resource": { - "type": "Role", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "RoleName" } - ] - } - } - } - }, - "SamlProvider": { - "identifiers": [ - { "name": "Arn" } - ], - "shape": "GetSAMLProviderResponse", - "load": { - "request": { - "operation": "GetSAMLProvider", - "params": [ - { "target": "SAMLProviderArn", "source": "identifier", "name": "Arn" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteSAMLProvider", - "params": [ - { "target": "SAMLProviderArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "Update": { - "request": { - "operation": "UpdateSAMLProvider", - "params": [ - { "target": "SAMLProviderArn", "source": "identifier", "name": "Arn" } - ] - } - } - } - }, - "ServerCertificate": { - "identifiers": [ - { "name": "Name" } - ], - "shape": "ServerCertificate", - "load": { - "request": { - "operation": "GetServerCertificate", - "params": [ - { "target": "ServerCertificateName", "source": "identifier", "name": "Name" } - ] - }, - "path": "ServerCertificate" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteServerCertificate", - "params": [ - { "target": "ServerCertificateName", "source": "identifier", "name": "Name" } - ] - } - }, - "Update": { - "request": { - "operation": "UpdateServerCertificate", - "params": [ - { "target": "ServerCertificateName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "ServerCertificate", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "NewServerCertificateName" } - ] - } - } - } - }, - "SigningCertificate": { - "identifiers": [ - { - "name": "UserName", - "memberName": "UserName" - }, - { - "name": "Id", - "memberName": "CertificateId" - } - ], - "shape": "SigningCertificate", - "actions": { - "Activate": { - "request": { - "operation": "UpdateSigningCertificate", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "CertificateId", "source": "identifier", "name": "Id" }, - { "target": "Status", "source": "string", "value": "Active" } - ] - } - }, - "Deactivate": { - "request": { - "operation": "UpdateSigningCertificate", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "CertificateId", "source": "identifier", "name": "Id" }, - { "target": "Status", "source": "string", "value": "Inactive" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteSigningCertificate", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "CertificateId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "UserName" } - ] - } - } - } - }, - "User": { - "identifiers": [ - { - "name": "Name", - "memberName": "UserName" - } - ], - "shape": "User", - "load": { - "request": { - "operation": "GetUser", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "path": "User" - }, - "actions": { - "AddGroup": { - "request": { - "operation": "AddUserToGroup", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - } - }, - "AttachPolicy": { - "request": { - "operation": "AttachUserPolicy", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - } - }, - "Create": { - "request": { - "operation": "CreateUser", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "UserName" } - ], - "path": "User" - } - }, - "CreateAccessKeyPair": { - "request": { - "operation": "CreateAccessKey", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "AccessKeyPair", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "AccessKey.AccessKeyId" }, - { "target": "Secret", "source": "response", "path": "AccessKey.SecretAccessKey" } - ], - "path": "AccessKey" - } - }, - "CreateLoginProfile": { - "request": { - "operation": "CreateLoginProfile", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "LoginProfile", - "identifiers": [ - { "target": "UserName", "source": "response", "path": "LoginProfile.UserName" } - ], - "path": "LoginProfile" - } - }, - "CreatePolicy": { - "request": { - "operation": "PutUserPolicy", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "UserPolicy", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "requestParameter", "path": "PolicyName" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteUser", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - } - }, - "DetachPolicy": { - "request": { - "operation": "DetachUserPolicy", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - } - }, - "EnableMfa": { - "request": { - "operation": "EnableMFADevice", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "MfaDevice", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "SerialNumber", "source": "requestParameter", "path": "SerialNumber" } - ] - } - }, - "RemoveGroup": { - "request": { - "operation": "RemoveUserFromGroup", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - } - }, - "Update": { - "request": { - "operation": "UpdateUser", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "NewUserName" } - ] - } - } - }, - "has": { - "AccessKey": { - "resource": { - "type": "AccessKey", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "input" } - ] - } - }, - "LoginProfile": { - "resource": { - "type": "LoginProfile", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - } - }, - "MfaDevice": { - "resource": { - "type": "MfaDevice", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "SerialNumber", "source": "input" } - ] - } - }, - "Policy": { - "resource": { - "type": "UserPolicy", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "input" } - ] - } - }, - "SigningCertificate": { - "resource": { - "type": "SigningCertificate", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "input" } - ] - } - } - }, - "hasMany": { - "AccessKeys": { - "request": { - "operation": "ListAccessKeys", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "AccessKey", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "AccessKeyMetadata[].AccessKeyId" } - ], - "path": "AccessKeyMetadata[]" - } - }, - "AttachedPolicies": { - "request": { - "operation": "ListAttachedUserPolicies", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Policy", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "AttachedPolicies[].PolicyArn" } - ] - } - }, - "Groups": { - "request": { - "operation": "ListGroupsForUser", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Group", - "identifiers": [ - { "target": "Name", "source": "response", "path": "Groups[].GroupName" } - ], - "path": "Groups[]" - } - }, - "MfaDevices": { - "request": { - "operation": "ListMFADevices", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "MfaDevice", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "SerialNumber", "source": "response", "path": "MFADevices[].SerialNumber" } - ], - "path": "MFADevices[]" - } - }, - "Policies": { - "request": { - "operation": "ListUserPolicies", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "UserPolicy", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Name", "source": "response", "path": "PolicyNames[]" } - ] - } - }, - "SigningCertificates": { - "request": { - "operation": "ListSigningCertificates", - "params": [ - { "target": "UserName", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "SigningCertificate", - "identifiers": [ - { "target": "UserName", "source": "identifier", "name": "Name" }, - { "target": "Id", "source": "response", "path": "Certificates[].CertificateId" } - ], - "path": "Certificates[]" - } - } - } - }, - "UserPolicy": { - "identifiers": [ - { - "name": "UserName", - "memberName": "UserName" - }, - { - "name": "Name", - "memberName": "PolicyName" - } - ], - "shape": "GetUserPolicyResponse", - "load": { - "request": { - "operation": "GetUserPolicy", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteUserPolicy", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - } - }, - "Put": { - "request": { - "operation": "PutUserPolicy", - "params": [ - { "target": "UserName", "source": "identifier", "name": "UserName" }, - { "target": "PolicyName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "has": { - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "UserName" } - ] - } - } - } - }, - "VirtualMfaDevice": { - "identifiers": [ - { - "name": "SerialNumber", - "memberName": "SerialNumber" - } - ], - "shape": "VirtualMFADevice", - "actions": { - "Delete": { - "request": { - "operation": "DeleteVirtualMFADevice", - "params": [ - { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } - ] - } - } - }, - "has": { - "User": { - "resource": { - "type": "User", - "identifiers": [ - { "target": "Name", "source": "data", "path": "User.UserName" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/s3/2006-03-01/resources-1.json b/venv/Lib/site-packages/boto3/data/s3/2006-03-01/resources-1.json deleted file mode 100644 index f1e88c6..0000000 --- a/venv/Lib/site-packages/boto3/data/s3/2006-03-01/resources-1.json +++ /dev/null @@ -1,1249 +0,0 @@ -{ - "service": { - "actions": { - "CreateBucket": { - "request": { "operation": "CreateBucket" }, - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "requestParameter", "path": "Bucket" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "input" } - ] - } - } - }, - "hasMany": { - "Buckets": { - "request": { "operation": "ListBuckets" }, - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "response", "path": "Buckets[].Name" } - ], - "path": "Buckets[]" - } - } - } - }, - "resources": { - "Bucket": { - "identifiers": [ - { "name": "Name" } - ], - "shape": "Bucket", - "actions": { - "Create": { - "request": { - "operation": "CreateBucket", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteBucket", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - } - }, - "DeleteObjects": { - "request": { - "operation": "DeleteObjects", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - } - }, - "PutObject": { - "request": { - "operation": "PutObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "Object", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" }, - { "target": "Key", "source": "requestParameter", "path": "Key" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "BucketExists", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - }, - "NotExists": { - "waiterName": "BucketNotExists", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - } - }, - "has": { - "Acl": { - "resource": { - "type": "BucketAcl", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "Cors": { - "resource": { - "type": "BucketCors", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "Lifecycle": { - "resource": { - "type": "BucketLifecycle", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "LifecycleConfiguration": { - "resource": { - "type": "BucketLifecycleConfiguration", - "identifiers": [ - { - "target": "BucketName", - "source": "identifier", - "name": "Name" - } - ] - } - }, - "Logging": { - "resource": { - "type": "BucketLogging", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "Notification": { - "resource": { - "type": "BucketNotification", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "Object": { - "resource": { - "type": "Object", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" }, - { "target": "Key", "source": "input" } - ] - } - }, - "Policy": { - "resource": { - "type": "BucketPolicy", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "RequestPayment": { - "resource": { - "type": "BucketRequestPayment", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "Tagging": { - "resource": { - "type": "BucketTagging", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "Versioning": { - "resource": { - "type": "BucketVersioning", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - }, - "Website": { - "resource": { - "type": "BucketWebsite", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" } - ] - } - } - }, - "hasMany": { - "MultipartUploads": { - "request": { - "operation": "ListMultipartUploads", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" }, - { "target": "ObjectKey", "source": "response", "path": "Uploads[].Key" }, - { "target": "Id", "source": "response", "path": "Uploads[].UploadId" } - ], - "path": "Uploads[]" - } - }, - "ObjectVersions": { - "request": { - "operation": "ListObjectVersions", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "ObjectVersion", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" }, - { "target": "ObjectKey", "source": "response", "path": "[Versions,DeleteMarkers]|[].Key" }, - { "target": "Id", "source": "response", "path": "[Versions,DeleteMarkers]|[].VersionId" } - ], - "path": "[Versions,DeleteMarkers]|[]" - } - }, - "Objects": { - "request": { - "operation": "ListObjects", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "Name" } - ] - }, - "resource": { - "type": "ObjectSummary", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "Name" }, - { "target": "Key", "source": "response", "path": "Contents[].Key" } - ], - "path": "Contents[]" - } - } - } - }, - "BucketAcl": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketAclOutput", - "load": { - "request": { - "operation": "GetBucketAcl", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Put": { - "request": { - "operation": "PutBucketAcl", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketCors": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketCorsOutput", - "load": { - "request": { - "operation": "GetBucketCors", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteBucketCors", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - }, - "Put": { - "request": { - "operation": "PutBucketCors", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketLifecycle": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketLifecycleOutput", - "load": { - "request": { - "operation": "GetBucketLifecycle", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteBucketLifecycle", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - }, - "Put": { - "request": { - "operation": "PutBucketLifecycle", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketLifecycleConfiguration": { - "identifiers": [ - { - "name": "BucketName" - } - ], - "shape": "GetBucketLifecycleConfigurationOutput", - "load": { - "request": { - "operation": "GetBucketLifecycleConfiguration", - "params": [ - { - "target": "Bucket", - "source": "identifier", - "name": "BucketName" - } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteBucketLifecycle", - "params": [ - { - "target": "Bucket", - "source": "identifier", - "name": "BucketName" - } - ] - } - }, - "Put": { - "request": { - "operation": "PutBucketLifecycleConfiguration", - "params": [ - { - "target": "Bucket", - "source": "identifier", - "name": "BucketName" - } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { - "target": "Name", - "source": "identifier", - "name": "BucketName" - } - ] - } - } - } - }, - "BucketLogging": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketLoggingOutput", - "load": { - "request": { - "operation": "GetBucketLogging", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Put": { - "request": { - "operation": "PutBucketLogging", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketNotification": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "NotificationConfiguration", - "load": { - "request": { - "operation": "GetBucketNotificationConfiguration", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Put": { - "request": { - "operation": "PutBucketNotificationConfiguration", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketPolicy": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketPolicyOutput", - "load": { - "request": { - "operation": "GetBucketPolicy", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteBucketPolicy", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - }, - "Put": { - "request": { - "operation": "PutBucketPolicy", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketRequestPayment": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketRequestPaymentOutput", - "load": { - "request": { - "operation": "GetBucketRequestPayment", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Put": { - "request": { - "operation": "PutBucketRequestPayment", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketTagging": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketTaggingOutput", - "load": { - "request": { - "operation": "GetBucketTagging", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteBucketTagging", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - }, - "Put": { - "request": { - "operation": "PutBucketTagging", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketVersioning": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketVersioningOutput", - "load": { - "request": { - "operation": "GetBucketVersioning", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Enable": { - "request": { - "operation": "PutBucketVersioning", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "VersioningConfiguration.Status", "source": "string", "value": "Enabled" } - ] - } - }, - "Put": { - "request": { - "operation": "PutBucketVersioning", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - }, - "Suspend": { - "request": { - "operation": "PutBucketVersioning", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "VersioningConfiguration.Status", "source": "string", "value": "Suspended" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "BucketWebsite": { - "identifiers": [ - { "name": "BucketName" } - ], - "shape": "GetBucketWebsiteOutput", - "load": { - "request": { - "operation": "GetBucketWebsite", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteBucketWebsite", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - }, - "Put": { - "request": { - "operation": "PutBucketWebsite", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" } - ] - } - } - }, - "has": { - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - } - } - }, - "MultipartUpload": { - "identifiers": [ - { "name": "BucketName" }, - { "name": "ObjectKey" }, - { "name": "Id" } - ], - "shape": "MultipartUpload", - "actions": { - "Abort": { - "request": { - "operation": "AbortMultipartUpload", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "UploadId", "source": "identifier", "name": "Id" } - ] - } - }, - "Complete": { - "request": { - "operation": "CompleteMultipartUpload", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "UploadId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "Object", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" } - ] - } - } - }, - "has": { - "Object": { - "resource": { - "type": "Object", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" } - ] - } - }, - "Part": { - "resource": { - "type": "MultipartUploadPart", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "ObjectKey" }, - { "target": "MultipartUploadId", "source": "identifier", "name": "Id" }, - { "target": "PartNumber", "source": "input" } - ] - } - } - }, - "hasMany": { - "Parts": { - "request": { - "operation": "ListParts", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "UploadId", "source": "identifier", "name": "Id" } - ] - }, - "resource": { - "type": "MultipartUploadPart", - "identifiers": [ - { "target": "BucketName", "source": "requestParameter", "path": "Bucket" }, - { "target": "ObjectKey", "source": "requestParameter", "path": "Key" }, - { "target": "MultipartUploadId", "source": "requestParameter", "path": "UploadId" }, - { "target": "PartNumber", "source": "response", "path": "Parts[].PartNumber" } - ], - "path": "Parts[]" - } - } - } - }, - "MultipartUploadPart": { - "identifiers": [ - { "name": "BucketName" }, - { "name": "ObjectKey" }, - { "name": "MultipartUploadId" }, - { - "name": "PartNumber", - "type": "integer", - "memberName": "PartNumber" - } - ], - "shape": "Part", - "actions": { - "CopyFrom": { - "request": { - "operation": "UploadPartCopy", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "UploadId", "source": "identifier", "name": "MultipartUploadId" }, - { "target": "PartNumber", "source": "identifier", "name": "PartNumber" } - ] - } - }, - "Upload": { - "request": { - "operation": "UploadPart", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "UploadId", "source": "identifier", "name": "MultipartUploadId" }, - { "target": "PartNumber", "source": "identifier", "name": "PartNumber" } - ] - } - } - }, - "has": { - "MultipartUpload": { - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "ObjectKey" }, - { "target": "Id", "source": "identifier", "name": "MultipartUploadId" } - ] - } - } - } - }, - "Object": { - "identifiers": [ - { "name": "BucketName" }, - { "name": "Key" } - ], - "shape": "HeadObjectOutput", - "load": { - "request": { - "operation": "HeadObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - }, - "path": "@" - }, - "actions": { - "CopyFrom": { - "request": { - "operation": "CopyObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "Get": { - "request": { - "operation": "GetObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "InitiateMultipartUpload": { - "request": { - "operation": "CreateMultipartUpload", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - }, - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" }, - { "target": "Id", "source": "response", "path": "UploadId" } - ] - } - }, - "Put": { - "request": { - "operation": "PutObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "RestoreObject": { - "request": { - "operation": "RestoreObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteObjects", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Delete.Objects[].Key", "source": "identifier", "name": "Key" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "ObjectExists", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - }, - "NotExists": { - "waiterName": "ObjectNotExists", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "has": { - "Acl": { - "resource": { - "type": "ObjectAcl", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" } - ] - } - }, - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - }, - "MultipartUpload": { - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" }, - { "target": "Id", "source": "input" } - ] - } - }, - "Version": { - "resource": { - "type": "ObjectVersion", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" }, - { "target": "Id", "source": "input" } - ] - } - } - } - }, - "ObjectAcl": { - "identifiers": [ - { "name": "BucketName" }, - { "name": "ObjectKey" } - ], - "shape": "GetObjectAclOutput", - "load": { - "request": { - "operation": "GetObjectAcl", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" } - ] - }, - "path": "@" - }, - "actions": { - "Put": { - "request": { - "operation": "PutObjectAcl", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" } - ] - } - } - }, - "has": { - "Object": { - "resource": { - "type": "Object", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" } - ] - } - } - } - }, - "ObjectSummary": { - "identifiers": [ - { "name": "BucketName" }, - { "name": "Key" } - ], - "shape": "Object", - "actions": { - "CopyFrom": { - "request": { - "operation": "CopyObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "Get": { - "request": { - "operation": "GetObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "InitiateMultipartUpload": { - "request": { - "operation": "CreateMultipartUpload", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - }, - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" }, - { "target": "Id", "source": "response", "path": "UploadId" } - ] - } - }, - "Put": { - "request": { - "operation": "PutObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "RestoreObject": { - "request": { - "operation": "RestoreObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteObjects", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Delete.Objects[].Key", "source": "identifier", "name": "Key" } - ] - } - } - }, - "waiters": { - "Exists": { - "waiterName": "ObjectExists", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - }, - "NotExists": { - "waiterName": "ObjectNotExists", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "has": { - "Acl": { - "resource": { - "type": "ObjectAcl", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" } - ] - } - }, - "Bucket": { - "resource": { - "type": "Bucket", - "identifiers": [ - { "target": "Name", "source": "identifier", "name": "BucketName" } - ] - } - }, - "MultipartUpload": { - "resource": { - "type": "MultipartUpload", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" }, - { "target": "Id", "source": "input" } - ] - } - }, - "Object": { - "resource": { - "type": "Object", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "Key" } - ] - } - }, - "Version": { - "resource": { - "type": "ObjectVersion", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "ObjectKey", "source": "identifier", "name": "Key" }, - { "target": "Id", "source": "input" } - ] - } - } - } - }, - "ObjectVersion": { - "identifiers": [ - { "name": "BucketName" }, - { "name": "ObjectKey" }, - { "name": "Id" } - ], - "shape": "ObjectVersion", - "actions": { - "Delete": { - "request": { - "operation": "DeleteObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "VersionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Get": { - "request": { - "operation": "GetObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "VersionId", "source": "identifier", "name": "Id" } - ] - } - }, - "Head": { - "request": { - "operation": "HeadObject", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "VersionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteObjects", - "params": [ - { "target": "Bucket", "source": "identifier", "name": "BucketName" }, - { "target": "Delete.Objects[*].Key", "source": "identifier", "name": "ObjectKey" }, - { "target": "Delete.Objects[*].VersionId", "source": "identifier", "name": "Id" } - ] - } - } - }, - "has": { - "Object": { - "resource": { - "type": "Object", - "identifiers": [ - { "target": "BucketName", "source": "identifier", "name": "BucketName" }, - { "target": "Key", "source": "identifier", "name": "ObjectKey" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/sns/2010-03-31/resources-1.json b/venv/Lib/site-packages/boto3/data/sns/2010-03-31/resources-1.json deleted file mode 100644 index cee300a..0000000 --- a/venv/Lib/site-packages/boto3/data/sns/2010-03-31/resources-1.json +++ /dev/null @@ -1,327 +0,0 @@ -{ - "service": { - "actions": { - "CreatePlatformApplication": { - "request": { "operation": "CreatePlatformApplication" }, - "resource": { - "type": "PlatformApplication", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "PlatformApplicationArn" } - ] - } - }, - "CreateTopic": { - "request": { "operation": "CreateTopic" }, - "resource": { - "type": "Topic", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "TopicArn" } - ] - } - } - }, - "has": { - "PlatformApplication": { - "resource": { - "type": "PlatformApplication", - "identifiers": [ - { "target": "Arn", "source": "input" } - ] - } - }, - "PlatformEndpoint": { - "resource": { - "type": "PlatformEndpoint", - "identifiers": [ - { "target": "Arn", "source": "input" } - ] - } - }, - "Subscription": { - "resource": { - "type": "Subscription", - "identifiers": [ - { "target": "Arn", "source": "input" } - ] - } - }, - "Topic": { - "resource": { - "type": "Topic", - "identifiers": [ - { "target": "Arn", "source": "input" } - ] - } - } - }, - "hasMany": { - "PlatformApplications": { - "request": { "operation": "ListPlatformApplications" }, - "resource": { - "type": "PlatformApplication", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "PlatformApplications[].PlatformApplicationArn" } - ] - } - }, - "Subscriptions": { - "request": { "operation": "ListSubscriptions" }, - "resource": { - "type": "Subscription", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "Subscriptions[].SubscriptionArn" } - ] - } - }, - "Topics": { - "request": { "operation": "ListTopics" }, - "resource": { - "type": "Topic", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "Topics[].TopicArn" } - ] - } - } - } - }, - "resources": { - "PlatformApplication": { - "identifiers": [ - { "name": "Arn" } - ], - "shape": "GetPlatformApplicationAttributesResponse", - "load": { - "request": { - "operation": "GetPlatformApplicationAttributes", - "params": [ - { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } - ] - }, - "path": "@" - }, - "actions": { - "CreatePlatformEndpoint": { - "request": { - "operation": "CreatePlatformEndpoint", - "params": [ - { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } - ] - }, - "resource": { - "type": "PlatformEndpoint", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "EndpointArn" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeletePlatformApplication", - "params": [ - { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "SetAttributes": { - "request": { - "operation": "SetPlatformApplicationAttributes", - "params": [ - { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } - ] - } - } - }, - "hasMany": { - "Endpoints": { - "request": { - "operation": "ListEndpointsByPlatformApplication", - "params": [ - { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } - ] - }, - "resource": { - "type": "PlatformEndpoint", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "Endpoints[].EndpointArn" } - ] - } - } - } - }, - "PlatformEndpoint": { - "identifiers": [ - { "name": "Arn" } - ], - "shape": "GetEndpointAttributesResponse", - "load": { - "request": { - "operation": "GetEndpointAttributes", - "params": [ - { "target": "EndpointArn", "source": "identifier", "name": "Arn" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "DeleteEndpoint", - "params": [ - { "target": "EndpointArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "Publish": { - "request": { - "operation": "Publish", - "params": [ - { "target": "TargetArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "SetAttributes": { - "request": { - "operation": "SetEndpointAttributes", - "params": [ - { "target": "EndpointArn", "source": "identifier", "name": "Arn" } - ] - } - } - } - }, - "Subscription": { - "identifiers": [ - { "name": "Arn" } - ], - "shape": "GetSubscriptionAttributesResponse", - "load": { - "request": { - "operation": "GetSubscriptionAttributes", - "params": [ - { "target": "SubscriptionArn", "source": "identifier", "name": "Arn" } - ] - }, - "path": "@" - }, - "actions": { - "Delete": { - "request": { - "operation": "Unsubscribe", - "params": [ - { "target": "SubscriptionArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "SetAttributes": { - "request": { - "operation": "SetSubscriptionAttributes", - "params": [ - { "target": "SubscriptionArn", "source": "identifier", "name": "Arn" } - ] - } - } - } - }, - "Topic": { - "identifiers": [ - { "name": "Arn" } - ], - "shape": "GetTopicAttributesResponse", - "load": { - "request": { - "operation": "GetTopicAttributes", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - }, - "path": "@" - }, - "actions": { - "AddPermission": { - "request": { - "operation": "AddPermission", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "ConfirmSubscription": { - "request": { - "operation": "ConfirmSubscription", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - }, - "resource": { - "type": "Subscription", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "SubscriptionArn" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteTopic", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "Publish": { - "request": { - "operation": "Publish", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "RemovePermission": { - "request": { - "operation": "RemovePermission", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "SetAttributes": { - "request": { - "operation": "SetTopicAttributes", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - } - }, - "Subscribe": { - "request": { - "operation": "Subscribe", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - }, - "resource": { - "type": "Subscription", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "SubscriptionArn" } - ] - } - } - }, - "hasMany": { - "Subscriptions": { - "request": { - "operation": "ListSubscriptionsByTopic", - "params": [ - { "target": "TopicArn", "source": "identifier", "name": "Arn" } - ] - }, - "resource": { - "type": "Subscription", - "identifiers": [ - { "target": "Arn", "source": "response", "path": "Subscriptions[].SubscriptionArn" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/data/sqs/2012-11-05/resources-1.json b/venv/Lib/site-packages/boto3/data/sqs/2012-11-05/resources-1.json deleted file mode 100644 index b1e74ab..0000000 --- a/venv/Lib/site-packages/boto3/data/sqs/2012-11-05/resources-1.json +++ /dev/null @@ -1,232 +0,0 @@ -{ - "service": { - "actions": { - "CreateQueue": { - "request": { "operation": "CreateQueue" }, - "resource": { - "type": "Queue", - "identifiers": [ - { "target": "Url", "source": "response", "path": "QueueUrl" } - ] - } - }, - "GetQueueByName": { - "request": { "operation": "GetQueueUrl" }, - "resource": { - "type": "Queue", - "identifiers": [ - { "target": "Url", "source": "response", "path": "QueueUrl" } - ] - } - } - }, - "has": { - "Queue": { - "resource": { - "type": "Queue", - "identifiers": [ - { "target": "Url", "source": "input" } - ] - } - } - }, - "hasMany": { - "Queues": { - "request": { "operation": "ListQueues" }, - "resource": { - "type": "Queue", - "identifiers": [ - { "target": "Url", "source": "response", "path": "QueueUrls[]" } - ] - } - } - } - }, - "resources": { - "Message": { - "identifiers": [ - { "name": "QueueUrl" }, - { - "name": "ReceiptHandle", - "memberName": "ReceiptHandle" - } - ], - "shape": "Message", - "actions": { - "ChangeVisibility": { - "request": { - "operation": "ChangeMessageVisibility", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" }, - { "target": "ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteMessage", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" }, - { "target": "ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" } - ] - } - } - }, - "batchActions": { - "Delete": { - "request": { - "operation": "DeleteMessageBatch", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" }, - { "target": "Entries[*].Id", "source": "data", "path": "MessageId" }, - { "target": "Entries[*].ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" } - ] - } - } - }, - "has": { - "Queue": { - "resource": { - "type": "Queue", - "identifiers": [ - { "target": "Url", "source": "identifier", "name": "QueueUrl" } - ] - } - } - } - }, - "Queue": { - "identifiers": [ - { "name": "Url" } - ], - "shape": "GetQueueAttributesResult", - "load": { - "request": { - "operation": "GetQueueAttributes", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" }, - { "target": "AttributeNames[]", "source": "string", "value": "All" } - ] - }, - "path": "@" - }, - "actions": { - "AddPermission": { - "request": { - "operation": "AddPermission", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "ChangeMessageVisibilityBatch": { - "request": { - "operation": "ChangeMessageVisibilityBatch", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "Delete": { - "request": { - "operation": "DeleteQueue", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "DeleteMessages": { - "request": { - "operation": "DeleteMessageBatch", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "Purge": { - "request": { - "operation": "PurgeQueue", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "ReceiveMessages": { - "request": { - "operation": "ReceiveMessage", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - }, - "resource": { - "type": "Message", - "identifiers": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" }, - { "target": "ReceiptHandle", "source": "response", "path": "Messages[].ReceiptHandle" } - ], - "path": "Messages[]" - } - }, - "RemovePermission": { - "request": { - "operation": "RemovePermission", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "SendMessage": { - "request": { - "operation": "SendMessage", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "SendMessages": { - "request": { - "operation": "SendMessageBatch", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - }, - "SetAttributes": { - "request": { - "operation": "SetQueueAttributes", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - } - } - }, - "has": { - "Message": { - "resource": { - "type": "Message", - "identifiers": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" }, - { "target": "ReceiptHandle", "source": "input" } - ] - } - } - }, - "hasMany": { - "DeadLetterSourceQueues": { - "request": { - "operation": "ListDeadLetterSourceQueues", - "params": [ - { "target": "QueueUrl", "source": "identifier", "name": "Url" } - ] - }, - "resource": { - "type": "Queue", - "identifiers": [ - { "target": "Url", "source": "response", "path": "queueUrls[]" } - ] - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/boto3/docs/__init__.py b/venv/Lib/site-packages/boto3/docs/__init__.py deleted file mode 100644 index 504d6c2..0000000 --- a/venv/Lib/site-packages/boto3/docs/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore.docs import DEPRECATED_SERVICE_NAMES - -from boto3.docs.service import ServiceDocumenter - - -def generate_docs(root_dir, session): - """Generates the reference documentation for botocore - - This will go through every available AWS service and output ReSTructured - text files documenting each service. - - :param root_dir: The directory to write the reference files to. Each - service's reference documentation is located at - root_dir/reference/services/service-name.rst - - :param session: The boto3 session - """ - services_doc_path = os.path.join(root_dir, 'reference', 'services') - if not os.path.exists(services_doc_path): - os.makedirs(services_doc_path) - - # Prevents deprecated service names from being generated in docs. - available_services = [ - service - for service in session.get_available_services() - if service not in DEPRECATED_SERVICE_NAMES - ] - - for service_name in available_services: - docs = ServiceDocumenter( - service_name, session, services_doc_path - ).document_service() - service_doc_path = os.path.join( - services_doc_path, f"{service_name}.rst" - ) - with open(service_doc_path, 'wb') as f: - f.write(docs) diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 7d2b321..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/action.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/action.cpython-312.pyc deleted file mode 100644 index 1660527..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/action.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/attr.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/attr.cpython-312.pyc deleted file mode 100644 index e98e6e6..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/attr.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 2c0ff0d..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/client.cpython-312.pyc deleted file mode 100644 index 5cf4e55..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/collection.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/collection.cpython-312.pyc deleted file mode 100644 index 10e3a0d..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/collection.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/docstring.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/docstring.cpython-312.pyc deleted file mode 100644 index 9ba0306..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/docstring.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/method.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/method.cpython-312.pyc deleted file mode 100644 index 309f781..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/method.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/resource.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/resource.cpython-312.pyc deleted file mode 100644 index 7668212..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/resource.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/service.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/service.cpython-312.pyc deleted file mode 100644 index 2b05d26..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/service.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/subresource.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/subresource.cpython-312.pyc deleted file mode 100644 index 6840bce..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/subresource.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index f07119e..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/__pycache__/waiter.cpython-312.pyc b/venv/Lib/site-packages/boto3/docs/__pycache__/waiter.cpython-312.pyc deleted file mode 100644 index 3c5a110..0000000 Binary files a/venv/Lib/site-packages/boto3/docs/__pycache__/waiter.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/docs/action.py b/venv/Lib/site-packages/boto3/docs/action.py deleted file mode 100644 index 44a9908..0000000 --- a/venv/Lib/site-packages/boto3/docs/action.py +++ /dev/null @@ -1,214 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.method import ( - document_custom_method, - document_model_driven_method, -) -from botocore.model import OperationModel -from botocore.utils import get_service_module_name - -from boto3.docs.base import NestedDocumenter -from boto3.docs.method import document_model_driven_resource_method -from boto3.docs.utils import ( - add_resource_type_overview, - get_resource_ignore_params, - get_resource_public_actions, -) - -PUT_DATA_WARNING_MESSAGE = """ -.. warning:: - It is recommended to use the :py:meth:`put_metric_data` - :doc:`client method <../../cloudwatch/client/put_metric_data>` - instead. If you would still like to use this resource method, - please make sure that ``MetricData[].MetricName`` is equal to - the metric resource's ``name`` attribute. -""" - -WARNING_MESSAGES = { - "Metric": {"put_data": PUT_DATA_WARNING_MESSAGE}, -} - -IGNORE_PARAMS = {"Metric": {"put_data": ["Namespace"]}} - - -class ActionDocumenter(NestedDocumenter): - def document_actions(self, section): - modeled_actions_list = self._resource_model.actions - modeled_actions = {} - for modeled_action in modeled_actions_list: - modeled_actions[modeled_action.name] = modeled_action - resource_actions = get_resource_public_actions( - self._resource.__class__ - ) - self.member_map['actions'] = sorted(resource_actions) - add_resource_type_overview( - section=section, - resource_type='Actions', - description=( - 'Actions call operations on resources. They may ' - 'automatically handle the passing in of arguments set ' - 'from identifiers and some attributes.' - ), - intro_link='actions_intro', - ) - resource_warnings = WARNING_MESSAGES.get(self._resource_name, {}) - for action_name in sorted(resource_actions): - # Create a new DocumentStructure for each action and add contents. - action_doc = DocumentStructure(action_name, target='html') - breadcrumb_section = action_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref(self._resource_class_name, 'index') - breadcrumb_section.write(f' / Action / {action_name}') - action_doc.add_title_section(action_name) - warning_message = resource_warnings.get(action_name) - if warning_message is not None: - action_doc.add_new_section("warning").write(warning_message) - action_section = action_doc.add_new_section( - action_name, - context={'qualifier': f'{self.class_name}.'}, - ) - if action_name in ['load', 'reload'] and self._resource_model.load: - document_load_reload_action( - section=action_section, - action_name=action_name, - resource_name=self._resource_name, - event_emitter=self._resource.meta.client.meta.events, - load_model=self._resource_model.load, - service_model=self._service_model, - ) - elif action_name in modeled_actions: - document_action( - section=action_section, - resource_name=self._resource_name, - event_emitter=self._resource.meta.client.meta.events, - action_model=modeled_actions[action_name], - service_model=self._service_model, - ) - else: - document_custom_method( - action_section, action_name, resource_actions[action_name] - ) - # Write actions in individual/nested files. - # Path: /reference/services///.rst - actions_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{self._resource_sub_path}', - ) - action_doc.write_to_file(actions_dir_path, action_name) - - -def document_action( - section, - resource_name, - event_emitter, - action_model, - service_model, - include_signature=True, -): - """Documents a resource action - - :param section: The section to write to - - :param resource_name: The name of the resource - - :param event_emitter: The event emitter to use to emit events - - :param action_model: The model of the action - - :param service_model: The model of the service - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - operation_model = service_model.operation_model( - action_model.request.operation - ) - ignore_params = IGNORE_PARAMS.get(resource_name, {}).get( - action_model.name, - get_resource_ignore_params(action_model.request.params), - ) - example_return_value = 'response' - if action_model.resource: - example_return_value = xform_name(action_model.resource.type) - example_resource_name = xform_name(resource_name) - if service_model.service_name == resource_name: - example_resource_name = resource_name - example_prefix = ( - f'{example_return_value} = {example_resource_name}.{action_model.name}' - ) - full_action_name = ( - f"{section.context.get('qualifier', '')}{action_model.name}" - ) - document_model_driven_resource_method( - section=section, - method_name=full_action_name, - operation_model=operation_model, - event_emitter=event_emitter, - method_description=operation_model.documentation, - example_prefix=example_prefix, - exclude_input=ignore_params, - resource_action_model=action_model, - include_signature=include_signature, - ) - - -def document_load_reload_action( - section, - action_name, - resource_name, - event_emitter, - load_model, - service_model, - include_signature=True, -): - """Documents the resource load action - - :param section: The section to write to - - :param action_name: The name of the loading action should be load or reload - - :param resource_name: The name of the resource - - :param event_emitter: The event emitter to use to emit events - - :param load_model: The model of the load action - - :param service_model: The model of the service - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - description = ( - f'Calls :py:meth:`{get_service_module_name(service_model)}.Client.' - f'{xform_name(load_model.request.operation)}` to update the attributes of the ' - f'{resource_name} resource. Note that the load and reload methods are ' - 'the same method and can be used interchangeably.' - ) - example_resource_name = xform_name(resource_name) - if service_model.service_name == resource_name: - example_resource_name = resource_name - example_prefix = f'{example_resource_name}.{action_name}' - full_action_name = f"{section.context.get('qualifier', '')}{action_name}" - document_model_driven_method( - section=section, - method_name=full_action_name, - operation_model=OperationModel({}, service_model), - event_emitter=event_emitter, - method_description=description, - example_prefix=example_prefix, - include_signature=include_signature, - ) diff --git a/venv/Lib/site-packages/boto3/docs/attr.py b/venv/Lib/site-packages/boto3/docs/attr.py deleted file mode 100644 index a968da2..0000000 --- a/venv/Lib/site-packages/boto3/docs/attr.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.params import ResponseParamsDocumenter - -from boto3.docs.utils import get_identifier_description - - -class ResourceShapeDocumenter(ResponseParamsDocumenter): - EVENT_NAME = 'resource-shape' - - -def document_attribute( - section, - service_name, - resource_name, - attr_name, - event_emitter, - attr_model, - include_signature=True, -): - if include_signature: - full_attr_name = f"{section.context.get('qualifier', '')}{attr_name}" - section.style.start_sphinx_py_attr(full_attr_name) - # Note that an attribute may have one, may have many, or may have no - # operations that back the resource's shape. So we just set the - # operation_name to the resource name if we ever to hook in and modify - # a particular attribute. - ResourceShapeDocumenter( - service_name=service_name, - operation_name=resource_name, - event_emitter=event_emitter, - ).document_params(section=section, shape=attr_model) - - -def document_identifier( - section, - resource_name, - identifier_model, - include_signature=True, -): - if include_signature: - full_identifier_name = ( - f"{section.context.get('qualifier', '')}{identifier_model.name}" - ) - section.style.start_sphinx_py_attr(full_identifier_name) - description = get_identifier_description( - resource_name, identifier_model.name - ) - section.write(f'*(string)* {description}') - - -def document_reference(section, reference_model, include_signature=True): - if include_signature: - full_reference_name = ( - f"{section.context.get('qualifier', '')}{reference_model.name}" - ) - section.style.start_sphinx_py_attr(full_reference_name) - reference_type = f'(:py:class:`{reference_model.resource.type}`) ' - section.write(reference_type) - section.include_doc_string( - f'The related {reference_model.name} if set, otherwise ``None``.' - ) diff --git a/venv/Lib/site-packages/boto3/docs/base.py b/venv/Lib/site-packages/boto3/docs/base.py deleted file mode 100644 index ee49646..0000000 --- a/venv/Lib/site-packages/boto3/docs/base.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.compat import OrderedDict - - -class BaseDocumenter: - def __init__(self, resource): - self._resource = resource - self._client = self._resource.meta.client - self._resource_model = self._resource.meta.resource_model - self._service_model = self._client.meta.service_model - self._resource_name = self._resource.meta.resource_model.name - self._service_name = self._service_model.service_name - self._service_docs_name = self._client.__class__.__name__ - self.member_map = OrderedDict() - self.represents_service_resource = ( - self._service_name == self._resource_name - ) - self._resource_class_name = self._resource_name - if self._resource_name == self._service_name: - self._resource_class_name = 'ServiceResource' - - @property - def class_name(self): - return f'{self._service_docs_name}.{self._resource_name}' - - -class NestedDocumenter(BaseDocumenter): - def __init__(self, resource, root_docs_path): - super().__init__(resource) - self._root_docs_path = root_docs_path - self._resource_sub_path = self._resource_name.lower() - if self._resource_name == self._service_name: - self._resource_sub_path = 'service-resource' - - @property - def class_name(self): - resource_class_name = self._resource_name - if self._resource_name == self._service_name: - resource_class_name = 'ServiceResource' - return f'{self._service_docs_name}.{resource_class_name}' diff --git a/venv/Lib/site-packages/boto3/docs/client.py b/venv/Lib/site-packages/boto3/docs/client.py deleted file mode 100644 index b199085..0000000 --- a/venv/Lib/site-packages/boto3/docs/client.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.client import ClientDocumenter - - -class Boto3ClientDocumenter(ClientDocumenter): - def _add_client_creation_example(self, section): - section.style.start_codeblock() - section.style.new_line() - section.write('import boto3') - section.style.new_line() - section.style.new_line() - section.write(f'client = boto3.client(\'{self._service_name}\')') - section.style.end_codeblock() diff --git a/venv/Lib/site-packages/boto3/docs/collection.py b/venv/Lib/site-packages/boto3/docs/collection.py deleted file mode 100644 index 9f6c23f..0000000 --- a/venv/Lib/site-packages/boto3/docs/collection.py +++ /dev/null @@ -1,290 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.method import get_instance_public_methods -from botocore.docs.utils import DocumentedShape - -from boto3.docs.base import NestedDocumenter -from boto3.docs.method import document_model_driven_resource_method -from boto3.docs.utils import ( - add_resource_type_overview, - get_resource_ignore_params, -) - - -class CollectionDocumenter(NestedDocumenter): - def document_collections(self, section): - collections = self._resource.meta.resource_model.collections - collections_list = [] - add_resource_type_overview( - section=section, - resource_type='Collections', - description=( - 'Collections provide an interface to iterate over and ' - 'manipulate groups of resources. ' - ), - intro_link='guide_collections', - ) - self.member_map['collections'] = collections_list - for collection in collections: - collections_list.append(collection.name) - # Create a new DocumentStructure for each collection and add contents. - collection_doc = DocumentStructure(collection.name, target='html') - breadcrumb_section = collection_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref(self._resource_class_name, 'index') - breadcrumb_section.write(f' / Collection / {collection.name}') - collection_doc.add_title_section(collection.name) - collection_section = collection_doc.add_new_section( - collection.name, - context={'qualifier': f'{self.class_name}.'}, - ) - self._document_collection(collection_section, collection) - - # Write collections in individual/nested files. - # Path: /reference/services///.rst - collections_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{self._resource_sub_path}', - ) - collection_doc.write_to_file(collections_dir_path, collection.name) - - def _document_collection(self, section, collection): - methods = get_instance_public_methods( - getattr(self._resource, collection.name) - ) - document_collection_object(section, collection) - batch_actions = {} - for batch_action in collection.batch_actions: - batch_actions[batch_action.name] = batch_action - - for method in sorted(methods): - method_section = section.add_new_section(method) - if method in batch_actions: - document_batch_action( - section=method_section, - resource_name=self._resource_name, - event_emitter=self._resource.meta.client.meta.events, - batch_action_model=batch_actions[method], - collection_model=collection, - service_model=self._resource.meta.client.meta.service_model, - ) - else: - document_collection_method( - section=method_section, - resource_name=self._resource_name, - action_name=method, - event_emitter=self._resource.meta.client.meta.events, - collection_model=collection, - service_model=self._resource.meta.client.meta.service_model, - ) - - -def document_collection_object( - section, - collection_model, - include_signature=True, -): - """Documents a collection resource object - - :param section: The section to write to - - :param collection_model: The model of the collection - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - if include_signature: - full_collection_name = ( - f"{section.context.get('qualifier', '')}{collection_model.name}" - ) - section.style.start_sphinx_py_attr(full_collection_name) - section.include_doc_string( - f'A collection of {collection_model.resource.type} resources.' - ) - section.include_doc_string( - f'A {collection_model.resource.type} Collection will include all ' - f'resources by default, and extreme caution should be taken when ' - f'performing actions on all resources.' - ) - - -def document_batch_action( - section, - resource_name, - event_emitter, - batch_action_model, - service_model, - collection_model, - include_signature=True, -): - """Documents a collection's batch action - - :param section: The section to write to - - :param resource_name: The name of the resource - - :param action_name: The name of collection action. Currently only - can be all, filter, limit, or page_size - - :param event_emitter: The event emitter to use to emit events - - :param batch_action_model: The model of the batch action - - :param collection_model: The model of the collection - - :param service_model: The model of the service - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - operation_model = service_model.operation_model( - batch_action_model.request.operation - ) - ignore_params = get_resource_ignore_params( - batch_action_model.request.params - ) - - example_return_value = 'response' - if batch_action_model.resource: - example_return_value = xform_name(batch_action_model.resource.type) - - example_resource_name = xform_name(resource_name) - if service_model.service_name == resource_name: - example_resource_name = resource_name - example_prefix = f'{example_return_value} = {example_resource_name}.{collection_model.name}.{batch_action_model.name}' - document_model_driven_resource_method( - section=section, - method_name=batch_action_model.name, - operation_model=operation_model, - event_emitter=event_emitter, - method_description=operation_model.documentation, - example_prefix=example_prefix, - exclude_input=ignore_params, - resource_action_model=batch_action_model, - include_signature=include_signature, - ) - - -def document_collection_method( - section, - resource_name, - action_name, - event_emitter, - collection_model, - service_model, - include_signature=True, -): - """Documents a collection method - - :param section: The section to write to - - :param resource_name: The name of the resource - - :param action_name: The name of collection action. Currently only - can be all, filter, limit, or page_size - - :param event_emitter: The event emitter to use to emit events - - :param collection_model: The model of the collection - - :param service_model: The model of the service - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - operation_model = service_model.operation_model( - collection_model.request.operation - ) - - underlying_operation_members = [] - if operation_model.input_shape: - underlying_operation_members = operation_model.input_shape.members - - example_resource_name = xform_name(resource_name) - if service_model.service_name == resource_name: - example_resource_name = resource_name - - custom_action_info_dict = { - 'all': { - 'method_description': ( - f'Creates an iterable of all {collection_model.resource.type} ' - f'resources in the collection.' - ), - 'example_prefix': f'{xform_name(collection_model.resource.type)}_iterator = {example_resource_name}.{collection_model.name}.all', - 'exclude_input': underlying_operation_members, - }, - 'filter': { - 'method_description': ( - f'Creates an iterable of all {collection_model.resource.type} ' - f'resources in the collection filtered by kwargs passed to ' - f'method. A {collection_model.resource.type} collection will ' - f'include all resources by default if no filters are provided, ' - f'and extreme caution should be taken when performing actions ' - f'on all resources.' - ), - 'example_prefix': f'{xform_name(collection_model.resource.type)}_iterator = {example_resource_name}.{collection_model.name}.filter', - 'exclude_input': get_resource_ignore_params( - collection_model.request.params - ), - }, - 'limit': { - 'method_description': ( - f'Creates an iterable up to a specified amount of ' - f'{collection_model.resource.type} resources in the collection.' - ), - 'example_prefix': f'{xform_name(collection_model.resource.type)}_iterator = {example_resource_name}.{collection_model.name}.limit', - 'include_input': [ - DocumentedShape( - name='count', - type_name='integer', - documentation=( - 'The limit to the number of resources in the iterable.' - ), - ) - ], - 'exclude_input': underlying_operation_members, - }, - 'page_size': { - 'method_description': ( - f'Creates an iterable of all {collection_model.resource.type} ' - f'resources in the collection, but limits the number of ' - f'items returned by each service call by the specified amount.' - ), - 'example_prefix': f'{xform_name(collection_model.resource.type)}_iterator = {example_resource_name}.{collection_model.name}.page_size', - 'include_input': [ - DocumentedShape( - name='count', - type_name='integer', - documentation=( - 'The number of items returned by each service call' - ), - ) - ], - 'exclude_input': underlying_operation_members, - }, - } - if action_name in custom_action_info_dict: - action_info = custom_action_info_dict[action_name] - document_model_driven_resource_method( - section=section, - method_name=action_name, - operation_model=operation_model, - event_emitter=event_emitter, - resource_action_model=collection_model, - include_signature=include_signature, - **action_info, - ) diff --git a/venv/Lib/site-packages/boto3/docs/docstring.py b/venv/Lib/site-packages/boto3/docs/docstring.py deleted file mode 100644 index daf6787..0000000 --- a/venv/Lib/site-packages/boto3/docs/docstring.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.docstring import LazyLoadedDocstring - -from boto3.docs.action import document_action, document_load_reload_action -from boto3.docs.attr import ( - document_attribute, - document_identifier, - document_reference, -) -from boto3.docs.collection import ( - document_batch_action, - document_collection_method, - document_collection_object, -) -from boto3.docs.subresource import document_sub_resource -from boto3.docs.waiter import document_resource_waiter - - -class ActionDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_action(*args, **kwargs) - - -class LoadReloadDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_load_reload_action(*args, **kwargs) - - -class SubResourceDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_sub_resource(*args, **kwargs) - - -class AttributeDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_attribute(*args, **kwargs) - - -class IdentifierDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_identifier(*args, **kwargs) - - -class ReferenceDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_reference(*args, **kwargs) - - -class CollectionDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_collection_object(*args, **kwargs) - - -class CollectionMethodDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_collection_method(*args, **kwargs) - - -class BatchActionDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_batch_action(*args, **kwargs) - - -class ResourceWaiterDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_resource_waiter(*args, **kwargs) diff --git a/venv/Lib/site-packages/boto3/docs/method.py b/venv/Lib/site-packages/boto3/docs/method.py deleted file mode 100644 index a5b3b25..0000000 --- a/venv/Lib/site-packages/boto3/docs/method.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.method import document_model_driven_method - - -def document_model_driven_resource_method( - section, - method_name, - operation_model, - event_emitter, - method_description=None, - example_prefix=None, - include_input=None, - include_output=None, - exclude_input=None, - exclude_output=None, - document_output=True, - resource_action_model=None, - include_signature=True, -): - document_model_driven_method( - section=section, - method_name=method_name, - operation_model=operation_model, - event_emitter=event_emitter, - method_description=method_description, - example_prefix=example_prefix, - include_input=include_input, - include_output=include_output, - exclude_input=exclude_input, - exclude_output=exclude_output, - document_output=document_output, - include_signature=include_signature, - ) - - # If this action returns a resource modify the return example to - # appropriately reflect that. - if resource_action_model.resource: - if 'return' in section.available_sections: - section.delete_section('return') - resource_type = resource_action_model.resource.type - - new_return_section = section.add_new_section('return') - return_resource_type = ( - f'{operation_model.service_model.service_name}.{resource_type}' - ) - - return_type = f':py:class:`{return_resource_type}`' - return_description = f'{resource_type} resource' - - if _method_returns_resource_list(resource_action_model.resource): - return_type = f'list({return_type})' - return_description = f'A list of {resource_type} resources' - - new_return_section.style.new_line() - new_return_section.write(f':rtype: {return_type}') - new_return_section.style.new_line() - new_return_section.write(f':returns: {return_description}') - new_return_section.style.new_line() - - -def _method_returns_resource_list(resource): - for identifier in resource.identifiers: - if identifier.path and '[]' in identifier.path: - return True - - return False diff --git a/venv/Lib/site-packages/boto3/docs/resource.py b/venv/Lib/site-packages/boto3/docs/resource.py deleted file mode 100644 index 2e3464e..0000000 --- a/venv/Lib/site-packages/boto3/docs/resource.py +++ /dev/null @@ -1,354 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.utils import get_official_service_name - -from boto3.docs.action import ActionDocumenter -from boto3.docs.attr import ( - document_attribute, - document_identifier, - document_reference, -) -from boto3.docs.base import BaseDocumenter -from boto3.docs.collection import CollectionDocumenter -from boto3.docs.subresource import SubResourceDocumenter -from boto3.docs.utils import ( - add_resource_type_overview, - get_identifier_args_for_signature, - get_identifier_description, - get_identifier_values_for_example, -) -from boto3.docs.waiter import WaiterResourceDocumenter - - -class ResourceDocumenter(BaseDocumenter): - def __init__(self, resource, botocore_session, root_docs_path): - super().__init__(resource) - self._botocore_session = botocore_session - self._root_docs_path = root_docs_path - self._resource_sub_path = self._resource_name.lower() - if self._resource_name == self._service_name: - self._resource_sub_path = 'service-resource' - - def document_resource(self, section): - self._add_title(section) - self._add_resource_note(section) - self._add_intro(section) - self._add_identifiers(section) - self._add_attributes(section) - self._add_references(section) - self._add_actions(section) - self._add_sub_resources(section) - self._add_collections(section) - self._add_waiters(section) - - def _add_title(self, section): - title_section = section.add_new_section('title') - title_section.style.h2(self._resource_name) - - def _add_intro(self, section): - identifier_names = [] - if self._resource_model.identifiers: - for identifier in self._resource_model.identifiers: - identifier_names.append(identifier.name) - - # Write out the class signature. - class_args = get_identifier_args_for_signature(identifier_names) - start_class = section.add_new_section('start_class') - start_class.style.start_sphinx_py_class( - class_name=f'{self.class_name}({class_args})' - ) - - # Add as short description about the resource - description_section = start_class.add_new_section('description') - self._add_description(description_section) - - # Add an example of how to instantiate the resource - example_section = start_class.add_new_section('example') - self._add_example(example_section, identifier_names) - - # Add the description for the parameters to instantiate the - # resource. - param_section = start_class.add_new_section('params') - self._add_params_description(param_section, identifier_names) - - end_class = section.add_new_section('end_class') - end_class.style.end_sphinx_py_class() - - def _add_description(self, section): - official_service_name = get_official_service_name(self._service_model) - section.write( - f'A resource representing an {official_service_name} {self._resource_name}' - ) - - def _add_example(self, section, identifier_names): - section.style.start_codeblock() - section.style.new_line() - section.write('import boto3') - section.style.new_line() - section.style.new_line() - section.write( - f'{self._service_name} = boto3.resource(\'{self._service_name}\')' - ) - section.style.new_line() - example_values = get_identifier_values_for_example(identifier_names) - section.write( - f'{xform_name(self._resource_name)} = {self._service_name}.{self._resource_name}({example_values})' - ) - section.style.end_codeblock() - - def _add_params_description(self, section, identifier_names): - for identifier_name in identifier_names: - description = get_identifier_description( - self._resource_name, identifier_name - ) - section.write(f':type {identifier_name}: string') - section.style.new_line() - section.write(f':param {identifier_name}: {description}') - section.style.new_line() - - def _add_overview_of_member_type(self, section, resource_member_type): - section.style.new_line() - section.write( - f'These are the resource\'s available {resource_member_type}:' - ) - section.style.new_line() - section.style.toctree() - for member in self.member_map[resource_member_type]: - section.style.tocitem(f'{member}') - - def _add_identifiers(self, section): - identifiers = self._resource.meta.resource_model.identifiers - section = section.add_new_section('identifiers') - member_list = [] - if identifiers: - self.member_map['identifiers'] = member_list - add_resource_type_overview( - section=section, - resource_type='Identifiers', - description=( - 'Identifiers are properties of a resource that are ' - 'set upon instantiation of the resource.' - ), - intro_link='identifiers_attributes_intro', - ) - for identifier in identifiers: - member_list.append(identifier.name) - # Create a new DocumentStructure for each identifier and add contents. - identifier_doc = DocumentStructure(identifier.name, target='html') - breadcrumb_section = identifier_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref(self._resource_class_name, 'index') - breadcrumb_section.write(f' / Identifier / {identifier.name}') - identifier_doc.add_title_section(identifier.name) - identifier_section = identifier_doc.add_new_section( - identifier.name, - context={'qualifier': f'{self.class_name}.'}, - ) - document_identifier( - section=identifier_section, - resource_name=self._resource_name, - identifier_model=identifier, - ) - # Write identifiers in individual/nested files. - # Path: /reference/services///.rst - identifiers_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{self._resource_sub_path}', - ) - identifier_doc.write_to_file(identifiers_dir_path, identifier.name) - - if identifiers: - self._add_overview_of_member_type(section, 'identifiers') - - def _add_attributes(self, section): - service_model = self._resource.meta.client.meta.service_model - attributes = {} - if self._resource.meta.resource_model.shape: - shape = service_model.shape_for( - self._resource.meta.resource_model.shape - ) - attributes = self._resource.meta.resource_model.get_attributes( - shape - ) - section = section.add_new_section('attributes') - attribute_list = [] - if attributes: - add_resource_type_overview( - section=section, - resource_type='Attributes', - description=( - 'Attributes provide access' - ' to the properties of a resource. Attributes are lazy-' - 'loaded the first time one is accessed via the' - ' :py:meth:`load` method.' - ), - intro_link='identifiers_attributes_intro', - ) - self.member_map['attributes'] = attribute_list - for attr_name in sorted(attributes): - _, attr_shape = attributes[attr_name] - attribute_list.append(attr_name) - # Create a new DocumentStructure for each attribute and add contents. - attribute_doc = DocumentStructure(attr_name, target='html') - breadcrumb_section = attribute_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref(self._resource_class_name, 'index') - breadcrumb_section.write(f' / Attribute / {attr_name}') - attribute_doc.add_title_section(attr_name) - attribute_section = attribute_doc.add_new_section( - attr_name, - context={'qualifier': f'{self.class_name}.'}, - ) - document_attribute( - section=attribute_section, - service_name=self._service_name, - resource_name=self._resource_name, - attr_name=attr_name, - event_emitter=self._resource.meta.client.meta.events, - attr_model=attr_shape, - ) - # Write attributes in individual/nested files. - # Path: /reference/services///.rst - attributes_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{self._resource_sub_path}', - ) - attribute_doc.write_to_file(attributes_dir_path, attr_name) - if attributes: - self._add_overview_of_member_type(section, 'attributes') - - def _add_references(self, section): - section = section.add_new_section('references') - references = self._resource.meta.resource_model.references - reference_list = [] - if references: - add_resource_type_overview( - section=section, - resource_type='References', - description=( - 'References are related resource instances that have ' - 'a belongs-to relationship.' - ), - intro_link='references_intro', - ) - self.member_map['references'] = reference_list - for reference in references: - reference_list.append(reference.name) - # Create a new DocumentStructure for each reference and add contents. - reference_doc = DocumentStructure(reference.name, target='html') - breadcrumb_section = reference_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref(self._resource_class_name, 'index') - breadcrumb_section.write(f' / Reference / {reference.name}') - reference_doc.add_title_section(reference.name) - reference_section = reference_doc.add_new_section( - reference.name, - context={'qualifier': f'{self.class_name}.'}, - ) - document_reference( - section=reference_section, - reference_model=reference, - ) - # Write references in individual/nested files. - # Path: /reference/services///.rst - references_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{self._resource_sub_path}', - ) - reference_doc.write_to_file(references_dir_path, reference.name) - if references: - self._add_overview_of_member_type(section, 'references') - - def _add_actions(self, section): - section = section.add_new_section('actions') - actions = self._resource.meta.resource_model.actions - if actions: - documenter = ActionDocumenter(self._resource, self._root_docs_path) - documenter.member_map = self.member_map - documenter.document_actions(section) - self._add_overview_of_member_type(section, 'actions') - - def _add_sub_resources(self, section): - section = section.add_new_section('sub-resources') - sub_resources = self._resource.meta.resource_model.subresources - if sub_resources: - documenter = SubResourceDocumenter( - self._resource, self._root_docs_path - ) - documenter.member_map = self.member_map - documenter.document_sub_resources(section) - self._add_overview_of_member_type(section, 'sub-resources') - - def _add_collections(self, section): - section = section.add_new_section('collections') - collections = self._resource.meta.resource_model.collections - if collections: - documenter = CollectionDocumenter( - self._resource, self._root_docs_path - ) - documenter.member_map = self.member_map - documenter.document_collections(section) - self._add_overview_of_member_type(section, 'collections') - - def _add_waiters(self, section): - section = section.add_new_section('waiters') - waiters = self._resource.meta.resource_model.waiters - if waiters: - service_waiter_model = self._botocore_session.get_waiter_model( - self._service_name - ) - documenter = WaiterResourceDocumenter( - self._resource, service_waiter_model, self._root_docs_path - ) - documenter.member_map = self.member_map - documenter.document_resource_waiters(section) - self._add_overview_of_member_type(section, 'waiters') - - def _add_resource_note(self, section): - section = section.add_new_section('feature-freeze') - section.style.start_note() - section.write( - "Before using anything on this page, please refer to the resources " - ":doc:`user guide <../../../../guide/resources>` for the most recent " - "guidance on using resources." - ) - section.style.end_note() - - -class ServiceResourceDocumenter(ResourceDocumenter): - @property - def class_name(self): - return f'{self._service_docs_name}.ServiceResource' - - def _add_title(self, section): - title_section = section.add_new_section('title') - title_section.style.h2('Service Resource') - - def _add_description(self, section): - official_service_name = get_official_service_name(self._service_model) - section.write(f'A resource representing {official_service_name}') - - def _add_example(self, section, identifier_names): - section.style.start_codeblock() - section.style.new_line() - section.write('import boto3') - section.style.new_line() - section.style.new_line() - section.write( - f'{self._service_name} = boto3.resource(\'{self._service_name}\')' - ) - section.style.end_codeblock() diff --git a/venv/Lib/site-packages/boto3/docs/service.py b/venv/Lib/site-packages/boto3/docs/service.py deleted file mode 100644 index af1a2f6..0000000 --- a/venv/Lib/site-packages/boto3/docs/service.py +++ /dev/null @@ -1,202 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.service import ServiceDocumenter as BaseServiceDocumenter -from botocore.exceptions import DataNotFoundError - -import boto3 -from boto3.docs.client import Boto3ClientDocumenter -from boto3.docs.resource import ResourceDocumenter, ServiceResourceDocumenter -from boto3.utils import ServiceContext - - -class ServiceDocumenter(BaseServiceDocumenter): - # The path used to find examples - EXAMPLE_PATH = os.path.join(os.path.dirname(boto3.__file__), 'examples') - - def __init__(self, service_name, session, root_docs_path): - super().__init__( - service_name=service_name, - # I know that this is an internal attribute, but the botocore session - # is needed to load the paginator and waiter models. - session=session._session, - root_docs_path=root_docs_path, - ) - self._boto3_session = session - self._client = self._boto3_session.client(service_name) - self._service_resource = None - if self._service_name in self._boto3_session.get_available_resources(): - self._service_resource = self._boto3_session.resource(service_name) - self.sections = [ - 'title', - 'client', - 'paginators', - 'waiters', - 'resources', - 'examples', - 'context-params', - ] - self._root_docs_path = root_docs_path - self._USER_GUIDE_LINK = ( - 'https://boto3.amazonaws.com/' - 'v1/documentation/api/latest/guide/resources.html' - ) - - def document_service(self): - """Documents an entire service. - - :returns: The reStructured text of the documented service. - """ - doc_structure = DocumentStructure( - self._service_name, section_names=self.sections, target='html' - ) - self.title(doc_structure.get_section('title')) - - self.client_api(doc_structure.get_section('client')) - self.paginator_api(doc_structure.get_section('paginators')) - self.waiter_api(doc_structure.get_section('waiters')) - if self._service_resource: - self.resource_section(doc_structure.get_section('resources')) - self._document_examples(doc_structure.get_section('examples')) - context_params_section = doc_structure.get_section('context-params') - self.client_context_params(context_params_section) - return doc_structure.flush_structure() - - def client_api(self, section): - examples = None - try: - examples = self.get_examples(self._service_name) - except DataNotFoundError: - pass - - Boto3ClientDocumenter( - self._client, self._root_docs_path, examples - ).document_client(section) - - def resource_section(self, section): - section.style.h2('Resources') - section.style.new_line() - section.write( - 'Resources are available in boto3 via the ' - '``resource`` method. For more detailed instructions ' - 'and examples on the usage of resources, see the ' - 'resources ' - ) - section.style.external_link( - title='user guide', - link=self._USER_GUIDE_LINK, - ) - section.write('.') - section.style.new_line() - section.style.new_line() - section.write('The available resources are:') - section.style.new_line() - section.style.toctree() - self._document_service_resource(section) - self._document_resources(section) - - def _document_service_resource(self, section): - # Create a new DocumentStructure for each Service Resource and add contents. - service_resource_doc = DocumentStructure( - 'service-resource', target='html' - ) - breadcrumb_section = service_resource_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref( - self._client.__class__.__name__, f'../../{self._service_name}' - ) - breadcrumb_section.write(' / Resource / ServiceResource') - ServiceResourceDocumenter( - self._service_resource, self._session, self._root_docs_path - ).document_resource(service_resource_doc) - # Write collections in individual/nested files. - # Path: /reference/services///.rst - resource_name = self._service_resource.meta.resource_model.name - if resource_name == self._service_name: - resource_name = 'service-resource' - service_resource_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{resource_name.lower()}', - ) - service_resource_doc.write_to_file(service_resource_dir_path, 'index') - section.style.tocitem(f'{self._service_name}/{resource_name}/index') - - def _document_resources(self, section): - temp_identifier_value = 'foo' - loader = self._session.get_component('data_loader') - json_resource_model = loader.load_service_model( - self._service_name, 'resources-1' - ) - service_model = self._service_resource.meta.client.meta.service_model - for resource_name in json_resource_model['resources']: - resource_model = json_resource_model['resources'][resource_name] - resource_cls = ( - self._boto3_session.resource_factory.load_from_definition( - resource_name=resource_name, - single_resource_json_definition=resource_model, - service_context=ServiceContext( - service_name=self._service_name, - resource_json_definitions=json_resource_model[ - 'resources' - ], - service_model=service_model, - service_waiter_model=None, - ), - ) - ) - identifiers = resource_cls.meta.resource_model.identifiers - args = [] - for _ in identifiers: - args.append(temp_identifier_value) - resource = resource_cls(*args, client=self._client) - # Create a new DocumentStructure for each Resource and add contents. - resource_name = resource.meta.resource_model.name.lower() - resource_doc = DocumentStructure(resource_name, target='html') - breadcrumb_section = resource_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref( - self._client.__class__.__name__, f'../../{self._service_name}' - ) - breadcrumb_section.write( - f' / Resource / {resource.meta.resource_model.name}' - ) - ResourceDocumenter( - resource, self._session, self._root_docs_path - ).document_resource( - resource_doc.add_new_section(resource.meta.resource_model.name) - ) - # Write collections in individual/nested files. - # Path: /reference/services///.rst - service_resource_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{resource_name}', - ) - resource_doc.write_to_file(service_resource_dir_path, 'index') - section.style.tocitem( - f'{self._service_name}/{resource_name}/index' - ) - - def _get_example_file(self): - return os.path.realpath( - os.path.join(self.EXAMPLE_PATH, f"{self._service_name}.rst") - ) - - def _document_examples(self, section): - examples_file = self._get_example_file() - if os.path.isfile(examples_file): - section.style.h2('Examples') - section.style.new_line() - with open(examples_file) as f: - section.write(f.read()) diff --git a/venv/Lib/site-packages/boto3/docs/subresource.py b/venv/Lib/site-packages/boto3/docs/subresource.py deleted file mode 100644 index a28203d..0000000 --- a/venv/Lib/site-packages/boto3/docs/subresource.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.utils import get_service_module_name - -from boto3.docs.base import NestedDocumenter -from boto3.docs.utils import ( - add_resource_type_overview, - get_identifier_args_for_signature, - get_identifier_description, - get_identifier_values_for_example, -) - - -class SubResourceDocumenter(NestedDocumenter): - def document_sub_resources(self, section): - add_resource_type_overview( - section=section, - resource_type='Sub-resources', - description=( - 'Sub-resources are methods that create a new instance of a' - ' child resource. This resource\'s identifiers get passed' - ' along to the child.' - ), - intro_link='subresources_intro', - ) - sub_resources = sorted( - self._resource.meta.resource_model.subresources, - key=lambda sub_resource: sub_resource.name, - ) - sub_resources_list = [] - self.member_map['sub-resources'] = sub_resources_list - for sub_resource in sub_resources: - sub_resources_list.append(sub_resource.name) - # Create a new DocumentStructure for each sub_resource and add contents. - sub_resource_doc = DocumentStructure( - sub_resource.name, target='html' - ) - breadcrumb_section = sub_resource_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref(self._resource_class_name, 'index') - breadcrumb_section.write(f' / Sub-Resource / {sub_resource.name}') - sub_resource_doc.add_title_section(sub_resource.name) - sub_resource_section = sub_resource_doc.add_new_section( - sub_resource.name, - context={'qualifier': f'{self.class_name}.'}, - ) - document_sub_resource( - section=sub_resource_section, - resource_name=self._resource_name, - sub_resource_model=sub_resource, - service_model=self._service_model, - ) - - # Write sub_resources in individual/nested files. - # Path: /reference/services///.rst - sub_resources_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{self._resource_sub_path}', - ) - sub_resource_doc.write_to_file( - sub_resources_dir_path, sub_resource.name - ) - - -def document_sub_resource( - section, - resource_name, - sub_resource_model, - service_model, - include_signature=True, -): - """Documents a resource action - - :param section: The section to write to - - :param resource_name: The name of the resource - - :param sub_resource_model: The model of the subresource - - :param service_model: The model of the service - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - identifiers_needed = [] - for identifier in sub_resource_model.resource.identifiers: - if identifier.source == 'input': - identifiers_needed.append(xform_name(identifier.target)) - - if include_signature: - signature_args = get_identifier_args_for_signature(identifiers_needed) - full_sub_resource_name = ( - f"{section.context.get('qualifier', '')}{sub_resource_model.name}" - ) - section.style.start_sphinx_py_method( - full_sub_resource_name, signature_args - ) - - method_intro_section = section.add_new_section('method-intro') - description = f'Creates a {sub_resource_model.resource.type} resource.' - method_intro_section.include_doc_string(description) - example_section = section.add_new_section('example') - example_values = get_identifier_values_for_example(identifiers_needed) - example_resource_name = xform_name(resource_name) - if service_model.service_name == resource_name: - example_resource_name = resource_name - example = f'{xform_name(sub_resource_model.resource.type)} = {example_resource_name}.{sub_resource_model.name}({example_values})' - example_section.style.start_codeblock() - example_section.write(example) - example_section.style.end_codeblock() - - param_section = section.add_new_section('params') - for identifier in identifiers_needed: - description = get_identifier_description( - sub_resource_model.name, identifier - ) - param_section.write(f':type {identifier}: string') - param_section.style.new_line() - param_section.write(f':param {identifier}: {description}') - param_section.style.new_line() - - return_section = section.add_new_section('return') - return_section.style.new_line() - return_section.write( - f':rtype: :py:class:`{get_service_module_name(service_model)}.{sub_resource_model.resource.type}`' - ) - return_section.style.new_line() - return_section.write( - f':returns: A {sub_resource_model.resource.type} resource' - ) - return_section.style.new_line() diff --git a/venv/Lib/site-packages/boto3/docs/utils.py b/venv/Lib/site-packages/boto3/docs/utils.py deleted file mode 100644 index 0830af5..0000000 --- a/venv/Lib/site-packages/boto3/docs/utils.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import inspect - -import jmespath - - -def get_resource_ignore_params(params): - """Helper method to determine which parameters to ignore for actions - - :returns: A list of the parameter names that does not need to be - included in a resource's method call for documentation purposes. - """ - ignore_params = [] - for param in params: - result = jmespath.compile(param.target) - current = result.parsed - # Use JMESPath to find the left most element in the target expression - # which will be the parameter to ignore in the action call. - while current['children']: - current = current['children'][0] - # Make sure the parameter we are about to ignore is a field. - # If it is not, we should ignore the result to avoid false positives. - if current['type'] == 'field': - ignore_params.append(current['value']) - return ignore_params - - -def is_resource_action(action_handle): - return inspect.isfunction(action_handle) - - -def get_resource_public_actions(resource_class): - resource_class_members = inspect.getmembers(resource_class) - resource_methods = {} - for name, member in resource_class_members: - if not name.startswith('_'): - if not name[0].isupper(): - if not name.startswith('wait_until'): - if is_resource_action(member): - resource_methods[name] = member - return resource_methods - - -def get_identifier_values_for_example(identifier_names): - return ','.join([f'\'{identifier}\'' for identifier in identifier_names]) - - -def get_identifier_args_for_signature(identifier_names): - return ','.join(identifier_names) - - -def get_identifier_description(resource_name, identifier_name): - return ( - f"The {resource_name}'s {identifier_name} identifier. " - f"This **must** be set." - ) - - -def add_resource_type_overview( - section, resource_type, description, intro_link=None -): - section.style.new_line() - section.style.h3(resource_type) - section.style.new_line() - section.style.new_line() - section.write(description) - section.style.new_line() - if intro_link is not None: - section.write( - f'For more information about {resource_type.lower()} refer to the ' - f':ref:`Resources Introduction Guide<{intro_link}>`.' - ) - section.style.new_line() - - -class DocumentModifiedShape: - def __init__( - self, shape_name, new_type, new_description, new_example_value - ): - self._shape_name = shape_name - self._new_type = new_type - self._new_description = new_description - self._new_example_value = new_example_value - - def replace_documentation_for_matching_shape( - self, event_name, section, **kwargs - ): - if self._shape_name == section.context.get('shape'): - self._replace_documentation(event_name, section) - for section_name in section.available_sections: - sub_section = section.get_section(section_name) - if self._shape_name == sub_section.context.get('shape'): - self._replace_documentation(event_name, sub_section) - else: - self.replace_documentation_for_matching_shape( - event_name, sub_section - ) - - def _replace_documentation(self, event_name, section): - if event_name.startswith( - 'docs.request-example' - ) or event_name.startswith('docs.response-example'): - section.remove_all_sections() - section.clear_text() - section.write(self._new_example_value) - - if event_name.startswith( - 'docs.request-params' - ) or event_name.startswith('docs.response-params'): - allowed_sections = ( - 'param-name', - 'param-documentation', - 'end-structure', - 'param-type', - 'end-param', - ) - for section_name in section.available_sections: - # Delete any extra members as a new shape is being - # used. - if section_name not in allowed_sections: - section.delete_section(section_name) - - # Update the documentation - description_section = section.get_section('param-documentation') - description_section.clear_text() - description_section.write(self._new_description) - - # Update the param type - type_section = section.get_section('param-type') - if type_section.getvalue().decode('utf-8').startswith(':type'): - type_section.clear_text() - type_section.write(f':type {section.name}: {self._new_type}') - else: - type_section.clear_text() - type_section.style.italics(f'({self._new_type}) -- ') diff --git a/venv/Lib/site-packages/boto3/docs/waiter.py b/venv/Lib/site-packages/boto3/docs/waiter.py deleted file mode 100644 index 4713ce7..0000000 --- a/venv/Lib/site-packages/boto3/docs/waiter.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.method import document_model_driven_method -from botocore.utils import get_service_module_name - -from boto3.docs.base import NestedDocumenter -from boto3.docs.utils import ( - add_resource_type_overview, - get_resource_ignore_params, -) - - -class WaiterResourceDocumenter(NestedDocumenter): - def __init__(self, resource, service_waiter_model, root_docs_path): - super().__init__(resource, root_docs_path) - self._service_waiter_model = service_waiter_model - - def document_resource_waiters(self, section): - waiters = self._resource.meta.resource_model.waiters - add_resource_type_overview( - section=section, - resource_type='Waiters', - description=( - 'Waiters provide an interface to wait for a resource' - ' to reach a specific state.' - ), - intro_link='waiters_intro', - ) - waiter_list = [] - self.member_map['waiters'] = waiter_list - for waiter in waiters: - waiter_list.append(waiter.name) - # Create a new DocumentStructure for each waiter and add contents. - waiter_doc = DocumentStructure(waiter.name, target='html') - breadcrumb_section = waiter_doc.add_new_section('breadcrumb') - breadcrumb_section.style.ref(self._resource_class_name, 'index') - breadcrumb_section.write(f' / Waiter / {waiter.name}') - waiter_doc.add_title_section(waiter.name) - waiter_section = waiter_doc.add_new_section( - waiter.name, - context={'qualifier': f'{self.class_name}.'}, - ) - document_resource_waiter( - section=waiter_section, - resource_name=self._resource_name, - event_emitter=self._resource.meta.client.meta.events, - service_model=self._service_model, - resource_waiter_model=waiter, - service_waiter_model=self._service_waiter_model, - ) - # Write waiters in individual/nested files. - # Path: /reference/services///.rst - waiters_dir_path = os.path.join( - self._root_docs_path, - f'{self._service_name}', - f'{self._resource_sub_path}', - ) - waiter_doc.write_to_file(waiters_dir_path, waiter.name) - - -def document_resource_waiter( - section, - resource_name, - event_emitter, - service_model, - resource_waiter_model, - service_waiter_model, - include_signature=True, -): - waiter_model = service_waiter_model.get_waiter( - resource_waiter_model.waiter_name - ) - operation_model = service_model.operation_model(waiter_model.operation) - - ignore_params = get_resource_ignore_params(resource_waiter_model.params) - service_module_name = get_service_module_name(service_model) - description = ( - 'Waits until this {} is {}. This method calls ' - ':py:meth:`{}.Waiter.{}.wait` which polls ' - ':py:meth:`{}.Client.{}` every {} seconds until ' - 'a successful state is reached. An error is raised ' - 'after {} failed checks.'.format( - resource_name, - ' '.join(resource_waiter_model.name.split('_')[2:]), - service_module_name, - xform_name(resource_waiter_model.waiter_name), - service_module_name, - xform_name(waiter_model.operation), - waiter_model.delay, - waiter_model.max_attempts, - ) - ) - example_prefix = ( - f'{xform_name(resource_name)}.{resource_waiter_model.name}' - ) - full_waiter_name = ( - f"{section.context.get('qualifier', '')}{resource_waiter_model.name}" - ) - document_model_driven_method( - section=section, - method_name=full_waiter_name, - operation_model=operation_model, - event_emitter=event_emitter, - example_prefix=example_prefix, - method_description=description, - exclude_input=ignore_params, - include_signature=include_signature, - ) - if 'return' in section.available_sections: - # Waiters do not return anything so we should remove - # any sections that may document the underlying return - # value of the client method. - return_section = section.get_section('return') - return_section.clear_text() - return_section.remove_all_sections() - return_section.write(':returns: None') diff --git a/venv/Lib/site-packages/boto3/dynamodb/__init__.py b/venv/Lib/site-packages/boto3/dynamodb/__init__.py deleted file mode 100644 index 6001b27..0000000 --- a/venv/Lib/site-packages/boto3/dynamodb/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. diff --git a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/boto3/dynamodb/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index f67059a..0000000 Binary files a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/conditions.cpython-312.pyc b/venv/Lib/site-packages/boto3/dynamodb/__pycache__/conditions.cpython-312.pyc deleted file mode 100644 index 967d480..0000000 Binary files a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/conditions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/table.cpython-312.pyc b/venv/Lib/site-packages/boto3/dynamodb/__pycache__/table.cpython-312.pyc deleted file mode 100644 index 106a0a6..0000000 Binary files a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/table.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/transform.cpython-312.pyc b/venv/Lib/site-packages/boto3/dynamodb/__pycache__/transform.cpython-312.pyc deleted file mode 100644 index d05b230..0000000 Binary files a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/transform.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/types.cpython-312.pyc b/venv/Lib/site-packages/boto3/dynamodb/__pycache__/types.cpython-312.pyc deleted file mode 100644 index 182f27d..0000000 Binary files a/venv/Lib/site-packages/boto3/dynamodb/__pycache__/types.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/dynamodb/conditions.py b/venv/Lib/site-packages/boto3/dynamodb/conditions.py deleted file mode 100644 index 8307489..0000000 --- a/venv/Lib/site-packages/boto3/dynamodb/conditions.py +++ /dev/null @@ -1,461 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import re -from collections import namedtuple - -from boto3.exceptions import ( - DynamoDBNeedsConditionError, - DynamoDBNeedsKeyConditionError, - DynamoDBOperationNotSupportedError, -) - -ATTR_NAME_REGEX = re.compile(r'[^.\[\]]+(?![^\[]*\])') - - -class ConditionBase: - expression_format = '' - expression_operator = '' - has_grouped_values = False - - def __init__(self, *values): - self._values = values - - def __and__(self, other): - if not isinstance(other, ConditionBase): - raise DynamoDBOperationNotSupportedError('AND', other) - return And(self, other) - - def __or__(self, other): - if not isinstance(other, ConditionBase): - raise DynamoDBOperationNotSupportedError('OR', other) - return Or(self, other) - - def __invert__(self): - return Not(self) - - def get_expression(self): - return { - 'format': self.expression_format, - 'operator': self.expression_operator, - 'values': self._values, - } - - def __eq__(self, other): - if isinstance(other, type(self)): - if self._values == other._values: - return True - return False - - def __ne__(self, other): - return not self.__eq__(other) - - -class AttributeBase: - def __init__(self, name): - self.name = name - - def __and__(self, value): - raise DynamoDBOperationNotSupportedError('AND', self) - - def __or__(self, value): - raise DynamoDBOperationNotSupportedError('OR', self) - - def __invert__(self): - raise DynamoDBOperationNotSupportedError('NOT', self) - - def eq(self, value): - """Creates a condition where the attribute is equal to the value. - - :param value: The value that the attribute is equal to. - """ - return Equals(self, value) - - def lt(self, value): - """Creates a condition where the attribute is less than the value. - - :param value: The value that the attribute is less than. - """ - return LessThan(self, value) - - def lte(self, value): - """Creates a condition where the attribute is less than or equal to the - value. - - :param value: The value that the attribute is less than or equal to. - """ - return LessThanEquals(self, value) - - def gt(self, value): - """Creates a condition where the attribute is greater than the value. - - :param value: The value that the attribute is greater than. - """ - return GreaterThan(self, value) - - def gte(self, value): - """Creates a condition where the attribute is greater than or equal to - the value. - - :param value: The value that the attribute is greater than or equal to. - """ - return GreaterThanEquals(self, value) - - def begins_with(self, value): - """Creates a condition where the attribute begins with the value. - - :param value: The value that the attribute begins with. - """ - return BeginsWith(self, value) - - def between(self, low_value, high_value): - """Creates a condition where the attribute is greater than or equal - to the low value and less than or equal to the high value. - - :param low_value: The value that the attribute is greater than or equal to. - :param high_value: The value that the attribute is less than or equal to. - """ - return Between(self, low_value, high_value) - - def __eq__(self, other): - return isinstance(other, type(self)) and self.name == other.name - - def __ne__(self, other): - return not self.__eq__(other) - - -class ConditionAttributeBase(ConditionBase, AttributeBase): - """This base class is for conditions that can have attribute methods. - - One example is the Size condition. To complete a condition, you need - to apply another AttributeBase method like eq(). - """ - - def __init__(self, *values): - ConditionBase.__init__(self, *values) - # This is assuming the first value to the condition is the attribute - # in which can be used to generate its attribute base. - AttributeBase.__init__(self, values[0].name) - - def __eq__(self, other): - return ConditionBase.__eq__(self, other) and AttributeBase.__eq__( - self, other - ) - - def __ne__(self, other): - return not self.__eq__(other) - - -class ComparisonCondition(ConditionBase): - expression_format = '{0} {operator} {1}' - - -class Equals(ComparisonCondition): - expression_operator = '=' - - -class NotEquals(ComparisonCondition): - expression_operator = '<>' - - -class LessThan(ComparisonCondition): - expression_operator = '<' - - -class LessThanEquals(ComparisonCondition): - expression_operator = '<=' - - -class GreaterThan(ComparisonCondition): - expression_operator = '>' - - -class GreaterThanEquals(ComparisonCondition): - expression_operator = '>=' - - -class In(ComparisonCondition): - expression_operator = 'IN' - has_grouped_values = True - - -class Between(ConditionBase): - expression_operator = 'BETWEEN' - expression_format = '{0} {operator} {1} AND {2}' - - -class BeginsWith(ConditionBase): - expression_operator = 'begins_with' - expression_format = '{operator}({0}, {1})' - - -class Contains(ConditionBase): - expression_operator = 'contains' - expression_format = '{operator}({0}, {1})' - - -class Size(ConditionAttributeBase): - expression_operator = 'size' - expression_format = '{operator}({0})' - - -class AttributeType(ConditionBase): - expression_operator = 'attribute_type' - expression_format = '{operator}({0}, {1})' - - -class AttributeExists(ConditionBase): - expression_operator = 'attribute_exists' - expression_format = '{operator}({0})' - - -class AttributeNotExists(ConditionBase): - expression_operator = 'attribute_not_exists' - expression_format = '{operator}({0})' - - -class And(ConditionBase): - expression_operator = 'AND' - expression_format = '({0} {operator} {1})' - - -class Or(ConditionBase): - expression_operator = 'OR' - expression_format = '({0} {operator} {1})' - - -class Not(ConditionBase): - expression_operator = 'NOT' - expression_format = '({operator} {0})' - - -class Key(AttributeBase): - pass - - -class Attr(AttributeBase): - """Represents an DynamoDB item's attribute.""" - - def ne(self, value): - """Creates a condition where the attribute is not equal to the value - - :param value: The value that the attribute is not equal to. - """ - return NotEquals(self, value) - - def is_in(self, value): - """Creates a condition where the attribute is in the value, - - :type value: list - :param value: The value that the attribute is in. - """ - return In(self, value) - - def exists(self): - """Creates a condition where the attribute exists.""" - return AttributeExists(self) - - def not_exists(self): - """Creates a condition where the attribute does not exist.""" - return AttributeNotExists(self) - - def contains(self, value): - """Creates a condition where the attribute contains the value. - - :param value: The value the attribute contains. - """ - return Contains(self, value) - - def size(self): - """Creates a condition for the attribute size. - - Note another AttributeBase method must be called on the returned - size condition to be a valid DynamoDB condition. - """ - return Size(self) - - def attribute_type(self, value): - """Creates a condition for the attribute type. - - :param value: The type of the attribute. - """ - return AttributeType(self, value) - - -BuiltConditionExpression = namedtuple( - 'BuiltConditionExpression', - [ - 'condition_expression', - 'attribute_name_placeholders', - 'attribute_value_placeholders', - ], -) - - -class ConditionExpressionBuilder: - """This class is used to build condition expressions with placeholders""" - - def __init__(self): - self._name_count = 0 - self._value_count = 0 - self._name_placeholder = 'n' - self._value_placeholder = 'v' - - def _get_name_placeholder(self): - return f"#{self._name_placeholder}{self._name_count}" - - def _get_value_placeholder(self): - return f":{self._value_placeholder}{self._value_count}" - - def reset(self): - """Resets the placeholder name and values""" - self._name_count = 0 - self._value_count = 0 - - def build_expression(self, condition, is_key_condition=False): - """Builds the condition expression and the dictionary of placeholders. - - :type condition: ConditionBase - :param condition: A condition to be built into a condition expression - string with any necessary placeholders. - - :type is_key_condition: Boolean - :param is_key_condition: True if the expression is for a - KeyConditionExpression. False otherwise. - - :rtype: (string, dict, dict) - :returns: Will return a string representing the condition with - placeholders inserted where necessary, a dictionary of - placeholders for attribute names, and a dictionary of - placeholders for attribute values. Here is a sample return value: - - ('#n0 = :v0', {'#n0': 'myattribute'}, {':v1': 'myvalue'}) - """ - if not isinstance(condition, ConditionBase): - raise DynamoDBNeedsConditionError(condition) - attribute_name_placeholders = {} - attribute_value_placeholders = {} - condition_expression = self._build_expression( - condition, - attribute_name_placeholders, - attribute_value_placeholders, - is_key_condition=is_key_condition, - ) - return BuiltConditionExpression( - condition_expression=condition_expression, - attribute_name_placeholders=attribute_name_placeholders, - attribute_value_placeholders=attribute_value_placeholders, - ) - - def _build_expression( - self, - condition, - attribute_name_placeholders, - attribute_value_placeholders, - is_key_condition, - ): - expression_dict = condition.get_expression() - replaced_values = [] - for value in expression_dict['values']: - # Build the necessary placeholders for that value. - # Placeholders are built for both attribute names and values. - replaced_value = self._build_expression_component( - value, - attribute_name_placeholders, - attribute_value_placeholders, - condition.has_grouped_values, - is_key_condition, - ) - replaced_values.append(replaced_value) - # Fill out the expression using the operator and the - # values that have been replaced with placeholders. - return expression_dict['format'].format( - *replaced_values, operator=expression_dict['operator'] - ) - - def _build_expression_component( - self, - value, - attribute_name_placeholders, - attribute_value_placeholders, - has_grouped_values, - is_key_condition, - ): - # Continue to recurse if the value is a ConditionBase in order - # to extract out all parts of the expression. - if isinstance(value, ConditionBase): - return self._build_expression( - value, - attribute_name_placeholders, - attribute_value_placeholders, - is_key_condition, - ) - # If it is not a ConditionBase, we can recurse no further. - # So we check if it is an attribute and add placeholders for - # its name - elif isinstance(value, AttributeBase): - if is_key_condition and not isinstance(value, Key): - raise DynamoDBNeedsKeyConditionError( - f'Attribute object {value.name} is of type {type(value)}. ' - f'KeyConditionExpression only supports Attribute objects ' - f'of type Key' - ) - return self._build_name_placeholder( - value, attribute_name_placeholders - ) - # If it is anything else, we treat it as a value and thus placeholders - # are needed for the value. - else: - return self._build_value_placeholder( - value, attribute_value_placeholders, has_grouped_values - ) - - def _build_name_placeholder(self, value, attribute_name_placeholders): - attribute_name = value.name - # Figure out which parts of the attribute name that needs replacement. - attribute_name_parts = ATTR_NAME_REGEX.findall(attribute_name) - - # Add a temporary placeholder for each of these parts. - placeholder_format = ATTR_NAME_REGEX.sub('%s', attribute_name) - str_format_args = [] - for part in attribute_name_parts: - name_placeholder = self._get_name_placeholder() - self._name_count += 1 - str_format_args.append(name_placeholder) - # Add the placeholder and value to dictionary of name placeholders. - attribute_name_placeholders[name_placeholder] = part - # Replace the temporary placeholders with the designated placeholders. - return placeholder_format % tuple(str_format_args) - - def _build_value_placeholder( - self, value, attribute_value_placeholders, has_grouped_values=False - ): - # If the values are grouped, we need to add a placeholder for - # each element inside of the actual value. - if has_grouped_values: - placeholder_list = [] - for v in value: - value_placeholder = self._get_value_placeholder() - self._value_count += 1 - placeholder_list.append(value_placeholder) - attribute_value_placeholders[value_placeholder] = v - # Assuming the values are grouped by parenthesis. - # IN is the currently the only one that uses this so it maybe - # needed to be changed in future. - return f"({', '.join(placeholder_list)})" - # Otherwise, treat the value as a single value that needs only - # one placeholder. - else: - value_placeholder = self._get_value_placeholder() - self._value_count += 1 - attribute_value_placeholders[value_placeholder] = value - return value_placeholder diff --git a/venv/Lib/site-packages/boto3/dynamodb/table.py b/venv/Lib/site-packages/boto3/dynamodb/table.py deleted file mode 100644 index d937b4d..0000000 --- a/venv/Lib/site-packages/boto3/dynamodb/table.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import logging - -logger = logging.getLogger(__name__) - - -def register_table_methods(base_classes, **kwargs): - base_classes.insert(0, TableResource) - - -# This class can be used to add any additional methods we want -# onto a table resource. Ideally to avoid creating a new -# base class for every method we can just update this -# class instead. Just be sure to move the bulk of the -# actual method implementation to another class. -class TableResource: - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def batch_writer(self, overwrite_by_pkeys=None): - """Create a batch writer object. - - This method creates a context manager for writing - objects to Amazon DynamoDB in batch. - - The batch writer will automatically handle buffering and sending items - in batches. In addition, the batch writer will also automatically - handle any unprocessed items and resend them as needed. All you need - to do is call ``put_item`` for any items you want to add, and - ``delete_item`` for any items you want to delete. - - Example usage:: - - with table.batch_writer() as batch: - for _ in range(1000000): - batch.put_item(Item={'HashKey': '...', - 'Otherstuff': '...'}) - # You can also delete_items in a batch. - batch.delete_item(Key={'HashKey': 'SomeHashKey'}) - - :type overwrite_by_pkeys: list(string) - :param overwrite_by_pkeys: De-duplicate request items in buffer - if match new request item on specified primary keys. i.e - ``["partition_key1", "sort_key2", "sort_key3"]`` - - """ - return BatchWriter( - self.name, self.meta.client, overwrite_by_pkeys=overwrite_by_pkeys - ) - - -class BatchWriter: - """Automatically handle batch writes to DynamoDB for a single table.""" - - def __init__( - self, table_name, client, flush_amount=25, overwrite_by_pkeys=None - ): - """ - - :type table_name: str - :param table_name: The name of the table. The class handles - batch writes to a single table. - - :type client: ``botocore.client.Client`` - :param client: A botocore client. Note this client - **must** have the dynamodb customizations applied - to it for transforming AttributeValues into the - wire protocol. What this means in practice is that - you need to use a client that comes from a DynamoDB - resource if you're going to instantiate this class - directly, i.e - ``boto3.resource('dynamodb').Table('foo').meta.client``. - - :type flush_amount: int - :param flush_amount: The number of items to keep in - a local buffer before sending a batch_write_item - request to DynamoDB. - - :type overwrite_by_pkeys: list(string) - :param overwrite_by_pkeys: De-duplicate request items in buffer - if match new request item on specified primary keys. i.e - ``["partition_key1", "sort_key2", "sort_key3"]`` - - """ - self._table_name = table_name - self._client = client - self._items_buffer = [] - self._flush_amount = flush_amount - self._overwrite_by_pkeys = overwrite_by_pkeys - - def put_item(self, Item): - self._add_request_and_process({'PutRequest': {'Item': Item}}) - - def delete_item(self, Key): - self._add_request_and_process({'DeleteRequest': {'Key': Key}}) - - def _add_request_and_process(self, request): - if self._overwrite_by_pkeys: - self._remove_dup_pkeys_request_if_any(request) - self._items_buffer.append(request) - self._flush_if_needed() - - def _remove_dup_pkeys_request_if_any(self, request): - pkey_values_new = self._extract_pkey_values(request) - for item in self._items_buffer: - if self._extract_pkey_values(item) == pkey_values_new: - self._items_buffer.remove(item) - logger.debug( - "With overwrite_by_pkeys enabled, skipping request:%s", - item, - ) - - def _extract_pkey_values(self, request): - if request.get('PutRequest'): - return [ - request['PutRequest']['Item'][key] - for key in self._overwrite_by_pkeys - ] - elif request.get('DeleteRequest'): - return [ - request['DeleteRequest']['Key'][key] - for key in self._overwrite_by_pkeys - ] - return None - - def _flush_if_needed(self): - if len(self._items_buffer) >= self._flush_amount: - self._flush() - - def _flush(self): - items_to_send = self._items_buffer[: self._flush_amount] - self._items_buffer = self._items_buffer[self._flush_amount :] - response = self._client.batch_write_item( - RequestItems={self._table_name: items_to_send} - ) - unprocessed_items = response['UnprocessedItems'] - if not unprocessed_items: - unprocessed_items = {} - item_list = unprocessed_items.get(self._table_name, []) - # Any unprocessed_items are immediately added to the - # next batch we send. - self._items_buffer.extend(item_list) - logger.debug( - "Batch write sent %s, unprocessed: %s", - len(items_to_send), - len(self._items_buffer), - ) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, tb): - # When we exit, we need to keep flushing whatever's left - # until there's nothing left in our items buffer. - while self._items_buffer: - self._flush() diff --git a/venv/Lib/site-packages/boto3/dynamodb/transform.py b/venv/Lib/site-packages/boto3/dynamodb/transform.py deleted file mode 100644 index 3944f31..0000000 --- a/venv/Lib/site-packages/boto3/dynamodb/transform.py +++ /dev/null @@ -1,343 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import copy - -from boto3.compat import collections_abc -from boto3.docs.utils import DocumentModifiedShape -from boto3.dynamodb.conditions import ConditionBase, ConditionExpressionBuilder -from boto3.dynamodb.types import TypeDeserializer, TypeSerializer - - -def register_high_level_interface(base_classes, **kwargs): - base_classes.insert(0, DynamoDBHighLevelResource) - - -class _ForgetfulDict(dict): - """A dictionary that discards any items set on it. For use as `memo` in - `copy.deepcopy()` when every instance of a repeated object in the deepcopied - data structure should result in a separate copy. - """ - - def __setitem__(self, key, value): - pass - - -def copy_dynamodb_params(params, **kwargs): - return copy.deepcopy(params, memo=_ForgetfulDict()) - - -class DynamoDBHighLevelResource: - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - # Apply handler that creates a copy of the user provided dynamodb - # item such that it can be modified. - self.meta.client.meta.events.register( - 'provide-client-params.dynamodb', - copy_dynamodb_params, - unique_id='dynamodb-create-params-copy', - ) - - self._injector = TransformationInjector() - # Apply the handler that generates condition expressions including - # placeholders. - self.meta.client.meta.events.register( - 'before-parameter-build.dynamodb', - self._injector.inject_condition_expressions, - unique_id='dynamodb-condition-expression', - ) - - # Apply the handler that serializes the request from python - # types to dynamodb types. - self.meta.client.meta.events.register( - 'before-parameter-build.dynamodb', - self._injector.inject_attribute_value_input, - unique_id='dynamodb-attr-value-input', - ) - - # Apply the handler that deserializes the response from dynamodb - # types to python types. - self.meta.client.meta.events.register( - 'after-call.dynamodb', - self._injector.inject_attribute_value_output, - unique_id='dynamodb-attr-value-output', - ) - - # Apply the documentation customizations to account for - # the transformations. - attr_value_shape_docs = DocumentModifiedShape( - 'AttributeValue', - new_type='valid DynamoDB type', - new_description=( - '- The value of the attribute. The valid value types are ' - 'listed in the ' - ':ref:`DynamoDB Reference Guide`.' - ), - new_example_value=( - '\'string\'|123|Binary(b\'bytes\')|True|None|set([\'string\'])' - '|set([123])|set([Binary(b\'bytes\')])|[]|{}' - ), - ) - - key_expression_shape_docs = DocumentModifiedShape( - 'KeyExpression', - new_type=( - 'condition from :py:class:`boto3.dynamodb.conditions.Key` ' - 'method' - ), - new_description=( - 'The condition(s) a key(s) must meet. Valid conditions are ' - 'listed in the ' - ':ref:`DynamoDB Reference Guide`.' - ), - new_example_value='Key(\'mykey\').eq(\'myvalue\')', - ) - - con_expression_shape_docs = DocumentModifiedShape( - 'ConditionExpression', - new_type=( - 'condition from :py:class:`boto3.dynamodb.conditions.Attr` ' - 'method' - ), - new_description=( - 'The condition(s) an attribute(s) must meet. Valid conditions ' - 'are listed in the ' - ':ref:`DynamoDB Reference Guide`.' - ), - new_example_value='Attr(\'myattribute\').eq(\'myvalue\')', - ) - - self.meta.client.meta.events.register( - 'docs.*.dynamodb.*.complete-section', - attr_value_shape_docs.replace_documentation_for_matching_shape, - unique_id='dynamodb-attr-value-docs', - ) - - self.meta.client.meta.events.register( - 'docs.*.dynamodb.*.complete-section', - key_expression_shape_docs.replace_documentation_for_matching_shape, - unique_id='dynamodb-key-expression-docs', - ) - - self.meta.client.meta.events.register( - 'docs.*.dynamodb.*.complete-section', - con_expression_shape_docs.replace_documentation_for_matching_shape, - unique_id='dynamodb-cond-expression-docs', - ) - - -class TransformationInjector: - """Injects the transformations into the user provided parameters.""" - - def __init__( - self, - transformer=None, - condition_builder=None, - serializer=None, - deserializer=None, - ): - self._transformer = transformer - if transformer is None: - self._transformer = ParameterTransformer() - - self._condition_builder = condition_builder - if condition_builder is None: - self._condition_builder = ConditionExpressionBuilder() - - self._serializer = serializer - if serializer is None: - self._serializer = TypeSerializer() - - self._deserializer = deserializer - if deserializer is None: - self._deserializer = TypeDeserializer() - - def inject_condition_expressions(self, params, model, **kwargs): - """Injects the condition expression transformation into the parameters - - This injection includes transformations for ConditionExpression shapes - and KeyExpression shapes. It also handles any placeholder names and - values that are generated when transforming the condition expressions. - """ - self._condition_builder.reset() - generated_names = {} - generated_values = {} - - # Create and apply the Condition Expression transformation. - transformation = ConditionExpressionTransformation( - self._condition_builder, - placeholder_names=generated_names, - placeholder_values=generated_values, - is_key_condition=False, - ) - self._transformer.transform( - params, model.input_shape, transformation, 'ConditionExpression' - ) - - # Create and apply the Key Condition Expression transformation. - transformation = ConditionExpressionTransformation( - self._condition_builder, - placeholder_names=generated_names, - placeholder_values=generated_values, - is_key_condition=True, - ) - self._transformer.transform( - params, model.input_shape, transformation, 'KeyExpression' - ) - - expr_attr_names_input = 'ExpressionAttributeNames' - expr_attr_values_input = 'ExpressionAttributeValues' - - # Now that all of the condition expression transformation are done, - # update the placeholder dictionaries in the request. - if expr_attr_names_input in params: - params[expr_attr_names_input].update(generated_names) - else: - if generated_names: - params[expr_attr_names_input] = generated_names - - if expr_attr_values_input in params: - params[expr_attr_values_input].update(generated_values) - else: - if generated_values: - params[expr_attr_values_input] = generated_values - - def inject_attribute_value_input(self, params, model, **kwargs): - """Injects DynamoDB serialization into parameter input""" - self._transformer.transform( - params, - model.input_shape, - self._serializer.serialize, - 'AttributeValue', - ) - - def inject_attribute_value_output(self, parsed, model, **kwargs): - """Injects DynamoDB deserialization into responses""" - if model.output_shape is not None: - self._transformer.transform( - parsed, - model.output_shape, - self._deserializer.deserialize, - 'AttributeValue', - ) - - -class ConditionExpressionTransformation: - """Provides a transformation for condition expressions - - The ``ParameterTransformer`` class can call this class directly - to transform the condition expressions in the parameters provided. - """ - - def __init__( - self, - condition_builder, - placeholder_names, - placeholder_values, - is_key_condition=False, - ): - self._condition_builder = condition_builder - self._placeholder_names = placeholder_names - self._placeholder_values = placeholder_values - self._is_key_condition = is_key_condition - - def __call__(self, value): - if isinstance(value, ConditionBase): - # Create a conditional expression string with placeholders - # for the provided condition. - built_expression = self._condition_builder.build_expression( - value, is_key_condition=self._is_key_condition - ) - - self._placeholder_names.update( - built_expression.attribute_name_placeholders - ) - self._placeholder_values.update( - built_expression.attribute_value_placeholders - ) - - return built_expression.condition_expression - # Use the user provided value if it is not a ConditonBase object. - return value - - -class ParameterTransformer: - """Transforms the input to and output from botocore based on shape""" - - def transform(self, params, model, transformation, target_shape): - """Transforms the dynamodb input to or output from botocore - - It applies a specified transformation whenever a specific shape name - is encountered while traversing the parameters in the dictionary. - - :param params: The parameters structure to transform. - :param model: The operation model. - :param transformation: The function to apply the parameter - :param target_shape: The name of the shape to apply the - transformation to - """ - self._transform_parameters(model, params, transformation, target_shape) - - def _transform_parameters( - self, model, params, transformation, target_shape - ): - type_name = model.type_name - if type_name in ('structure', 'map', 'list'): - getattr(self, f'_transform_{type_name}')( - model, params, transformation, target_shape - ) - - def _transform_structure( - self, model, params, transformation, target_shape - ): - if not isinstance(params, collections_abc.Mapping): - return - for param in params: - if param in model.members: - member_model = model.members[param] - member_shape = member_model.name - if member_shape == target_shape: - params[param] = transformation(params[param]) - else: - self._transform_parameters( - member_model, - params[param], - transformation, - target_shape, - ) - - def _transform_map(self, model, params, transformation, target_shape): - if not isinstance(params, collections_abc.Mapping): - return - value_model = model.value - value_shape = value_model.name - for key, value in params.items(): - if value_shape == target_shape: - params[key] = transformation(value) - else: - self._transform_parameters( - value_model, params[key], transformation, target_shape - ) - - def _transform_list(self, model, params, transformation, target_shape): - if not isinstance(params, collections_abc.MutableSequence): - return - member_model = model.member - member_shape = member_model.name - for i, item in enumerate(params): - if member_shape == target_shape: - params[i] = transformation(item) - else: - self._transform_parameters( - member_model, params[i], transformation, target_shape - ) diff --git a/venv/Lib/site-packages/boto3/dynamodb/types.py b/venv/Lib/site-packages/boto3/dynamodb/types.py deleted file mode 100644 index f358b12..0000000 --- a/venv/Lib/site-packages/boto3/dynamodb/types.py +++ /dev/null @@ -1,310 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from decimal import ( - Clamped, - Context, - Decimal, - Inexact, - Overflow, - Rounded, - Underflow, -) - -from boto3.compat import collections_abc - -STRING = 'S' -NUMBER = 'N' -BINARY = 'B' -STRING_SET = 'SS' -NUMBER_SET = 'NS' -BINARY_SET = 'BS' -NULL = 'NULL' -BOOLEAN = 'BOOL' -MAP = 'M' -LIST = 'L' - - -DYNAMODB_CONTEXT = Context( - Emin=-128, - Emax=126, - prec=38, - traps=[Clamped, Overflow, Inexact, Rounded, Underflow], -) - - -BINARY_TYPES = (bytearray, bytes) - - -class Binary: - """A class for representing Binary in dynamodb - - Especially for Python 2, use this class to explicitly specify - binary data for item in DynamoDB. It is essentially a wrapper around - binary. Unicode and Python 3 string types are not allowed. - """ - - def __init__(self, value): - if not isinstance(value, BINARY_TYPES): - types = ', '.join([str(t) for t in BINARY_TYPES]) - raise TypeError(f'Value must be of the following types: {types}') - self.value = value - - def __eq__(self, other): - if isinstance(other, Binary): - return self.value == other.value - return self.value == other - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return f'Binary({self.value!r})' - - def __str__(self): - return self.value - - def __bytes__(self): - return self.value - - def __hash__(self): - return hash(self.value) - - -class TypeSerializer: - """This class serializes Python data types to DynamoDB types.""" - - def serialize(self, value): - """The method to serialize the Python data types. - - :param value: A python value to be serialized to DynamoDB. Here are - the various conversions: - - Python DynamoDB - ------ -------- - None {'NULL': True} - True/False {'BOOL': True/False} - int/Decimal {'N': str(value)} - string {'S': string} - Binary/bytearray/bytes (py3 only) {'B': bytes} - set([int/Decimal]) {'NS': [str(value)]} - set([string]) {'SS': [string]) - set([Binary/bytearray/bytes]) {'BS': [bytes]} - list {'L': list} - dict {'M': dict} - - For types that involve numbers, it is recommended that ``Decimal`` - objects are used to be able to round-trip the Python type. - For types that involve binary, it is recommended that ``Binary`` - objects are used to be able to round-trip the Python type. - - :rtype: dict - :returns: A dictionary that represents a dynamoDB data type. These - dictionaries can be directly passed to botocore methods. - """ - dynamodb_type = self._get_dynamodb_type(value) - serializer = getattr(self, f'_serialize_{dynamodb_type}'.lower()) - return {dynamodb_type: serializer(value)} - - def _get_dynamodb_type(self, value): - dynamodb_type = None - - if self._is_null(value): - dynamodb_type = NULL - - elif self._is_boolean(value): - dynamodb_type = BOOLEAN - - elif self._is_number(value): - dynamodb_type = NUMBER - - elif self._is_string(value): - dynamodb_type = STRING - - elif self._is_binary(value): - dynamodb_type = BINARY - - elif self._is_type_set(value, self._is_number): - dynamodb_type = NUMBER_SET - - elif self._is_type_set(value, self._is_string): - dynamodb_type = STRING_SET - - elif self._is_type_set(value, self._is_binary): - dynamodb_type = BINARY_SET - - elif self._is_map(value): - dynamodb_type = MAP - - elif self._is_listlike(value): - dynamodb_type = LIST - - else: - msg = f'Unsupported type "{type(value)}" for value "{value}"' - raise TypeError(msg) - - return dynamodb_type - - def _is_null(self, value): - if value is None: - return True - return False - - def _is_boolean(self, value): - if isinstance(value, bool): - return True - return False - - def _is_number(self, value): - if isinstance(value, (int, Decimal)): - return True - elif isinstance(value, float): - raise TypeError( - 'Float types are not supported. Use Decimal types instead.' - ) - return False - - def _is_string(self, value): - if isinstance(value, str): - return True - return False - - def _is_binary(self, value): - if isinstance(value, (Binary, bytearray, bytes)): - return True - return False - - def _is_set(self, value): - if isinstance(value, collections_abc.Set): - return True - return False - - def _is_type_set(self, value, type_validator): - if self._is_set(value): - if False not in map(type_validator, value): - return True - return False - - def _is_map(self, value): - if isinstance(value, collections_abc.Mapping): - return True - return False - - def _is_listlike(self, value): - if isinstance(value, (list, tuple)): - return True - return False - - def _serialize_null(self, value): - return True - - def _serialize_bool(self, value): - return value - - def _serialize_n(self, value): - number = str(DYNAMODB_CONTEXT.create_decimal(value)) - if number in ['Infinity', 'NaN']: - raise TypeError('Infinity and NaN not supported') - return number - - def _serialize_s(self, value): - return value - - def _serialize_b(self, value): - if isinstance(value, Binary): - value = value.value - return value - - def _serialize_ss(self, value): - return [self._serialize_s(s) for s in value] - - def _serialize_ns(self, value): - return [self._serialize_n(n) for n in value] - - def _serialize_bs(self, value): - return [self._serialize_b(b) for b in value] - - def _serialize_l(self, value): - return [self.serialize(v) for v in value] - - def _serialize_m(self, value): - return {k: self.serialize(v) for k, v in value.items()} - - -class TypeDeserializer: - """This class deserializes DynamoDB types to Python types.""" - - def deserialize(self, value): - """The method to deserialize the DynamoDB data types. - - :param value: A DynamoDB value to be deserialized to a pythonic value. - Here are the various conversions: - - DynamoDB Python - -------- ------ - {'NULL': True} None - {'BOOL': True/False} True/False - {'N': str(value)} Decimal(str(value)) - {'S': string} string - {'B': bytes} Binary(bytes) - {'NS': [str(value)]} set([Decimal(str(value))]) - {'SS': [string]} set([string]) - {'BS': [bytes]} set([bytes]) - {'L': list} list - {'M': dict} dict - - :returns: The pythonic value of the DynamoDB type. - """ - - if not value: - raise TypeError( - 'Value must be a nonempty dictionary whose key ' - 'is a valid dynamodb type.' - ) - dynamodb_type = list(value.keys())[0] - try: - deserializer = getattr( - self, f'_deserialize_{dynamodb_type}'.lower() - ) - except AttributeError: - raise TypeError(f'Dynamodb type {dynamodb_type} is not supported') - return deserializer(value[dynamodb_type]) - - def _deserialize_null(self, value): - return None - - def _deserialize_bool(self, value): - return value - - def _deserialize_n(self, value): - return DYNAMODB_CONTEXT.create_decimal(value) - - def _deserialize_s(self, value): - return value - - def _deserialize_b(self, value): - return Binary(value) - - def _deserialize_ns(self, value): - return set(map(self._deserialize_n, value)) - - def _deserialize_ss(self, value): - return set(map(self._deserialize_s, value)) - - def _deserialize_bs(self, value): - return set(map(self._deserialize_b, value)) - - def _deserialize_l(self, value): - return [self.deserialize(v) for v in value] - - def _deserialize_m(self, value): - return {k: self.deserialize(v) for k, v in value.items()} diff --git a/venv/Lib/site-packages/boto3/ec2/__init__.py b/venv/Lib/site-packages/boto3/ec2/__init__.py deleted file mode 100644 index 6001b27..0000000 --- a/venv/Lib/site-packages/boto3/ec2/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. diff --git a/venv/Lib/site-packages/boto3/ec2/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/boto3/ec2/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index ce79355..0000000 Binary files a/venv/Lib/site-packages/boto3/ec2/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/ec2/__pycache__/createtags.cpython-312.pyc b/venv/Lib/site-packages/boto3/ec2/__pycache__/createtags.cpython-312.pyc deleted file mode 100644 index c4f83a3..0000000 Binary files a/venv/Lib/site-packages/boto3/ec2/__pycache__/createtags.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/ec2/__pycache__/deletetags.cpython-312.pyc b/venv/Lib/site-packages/boto3/ec2/__pycache__/deletetags.cpython-312.pyc deleted file mode 100644 index 8e07aa0..0000000 Binary files a/venv/Lib/site-packages/boto3/ec2/__pycache__/deletetags.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/ec2/createtags.py b/venv/Lib/site-packages/boto3/ec2/createtags.py deleted file mode 100644 index ec0ff1a..0000000 --- a/venv/Lib/site-packages/boto3/ec2/createtags.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - - -def inject_create_tags(event_name, class_attributes, **kwargs): - """This injects a custom create_tags method onto the ec2 service resource - - This is needed because the resource model is not able to express - creating multiple tag resources based on the fact you can apply a set - of tags to multiple ec2 resources. - """ - class_attributes['create_tags'] = create_tags - - -def create_tags(self, **kwargs): - # Call the client method - self.meta.client.create_tags(**kwargs) - resources = kwargs.get('Resources', []) - tags = kwargs.get('Tags', []) - tag_resources = [] - - # Generate all of the tag resources that just were created with the - # preceding client call. - for resource in resources: - for tag in tags: - # Add each tag from the tag set for each resource to the list - # that is returned by the method. - tag_resource = self.Tag(resource, tag['Key'], tag['Value']) - tag_resources.append(tag_resource) - return tag_resources diff --git a/venv/Lib/site-packages/boto3/ec2/deletetags.py b/venv/Lib/site-packages/boto3/ec2/deletetags.py deleted file mode 100644 index 19876d0..0000000 --- a/venv/Lib/site-packages/boto3/ec2/deletetags.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from boto3.resources.action import CustomModeledAction - - -def inject_delete_tags(event_emitter, **kwargs): - action_model = { - 'request': { - 'operation': 'DeleteTags', - 'params': [ - { - 'target': 'Resources[0]', - 'source': 'identifier', - 'name': 'Id', - } - ], - } - } - action = CustomModeledAction( - 'delete_tags', action_model, delete_tags, event_emitter - ) - action.inject(**kwargs) - - -def delete_tags(self, **kwargs): - kwargs['Resources'] = [self.id] - return self.meta.client.delete_tags(**kwargs) diff --git a/venv/Lib/site-packages/boto3/examples/cloudfront.rst b/venv/Lib/site-packages/boto3/examples/cloudfront.rst deleted file mode 100644 index ddec198..0000000 --- a/venv/Lib/site-packages/boto3/examples/cloudfront.rst +++ /dev/null @@ -1,35 +0,0 @@ -Generate a signed URL for Amazon CloudFront -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The following example shows how to generate a signed URL for Amazon CloudFront. -Note that you will need the ``cryptography`` `library `__ to follow this example:: - - import datetime - - from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import hashes - from cryptography.hazmat.primitives import serialization - from cryptography.hazmat.primitives.asymmetric import padding - from botocore.signers import CloudFrontSigner - - - def rsa_signer(message): - with open('path/to/key.pem', 'rb') as key_file: - private_key = serialization.load_pem_private_key( - key_file.read(), - password=None, - backend=default_backend() - ) - return private_key.sign(message, padding.PKCS1v15(), hashes.SHA1()) - - key_id = 'AKIAIOSFODNN7EXAMPLE' - url = 'http://d2949o5mkkp72v.cloudfront.net/hello.txt' - expire_date = datetime.datetime(2017, 1, 1) - - cloudfront_signer = CloudFrontSigner(key_id, rsa_signer) - - # Create a signed url that will be valid until the specific expiry date - # provided using a canned policy. - signed_url = cloudfront_signer.generate_presigned_url( - url, date_less_than=expire_date) - print(signed_url) diff --git a/venv/Lib/site-packages/boto3/examples/s3.rst b/venv/Lib/site-packages/boto3/examples/s3.rst deleted file mode 100644 index ec56238..0000000 --- a/venv/Lib/site-packages/boto3/examples/s3.rst +++ /dev/null @@ -1,185 +0,0 @@ -List objects in an Amazon S3 bucket -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The following example shows how to use an Amazon S3 bucket resource to list -the objects in the bucket. - -.. code-block:: python - - import boto3 - - s3 = boto3.resource('s3') - bucket = s3.Bucket('amzn-s3-demo-bucket') - for obj in bucket.objects.all(): - print(obj.key) - - -List top-level common prefixes in Amazon S3 bucket -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example shows how to list all of the top-level common prefixes in an -Amazon S3 bucket: - -.. code-block:: python - - import boto3 - - client = boto3.client('s3') - paginator = client.get_paginator('list_objects') - result = paginator.paginate(Bucket='amzn-s3-demo-bucket', Delimiter='/') - for prefix in result.search('CommonPrefixes'): - print(prefix.get('Prefix')) - - -Restore Glacier objects in an Amazon S3 bucket -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The following example shows how to initiate restoration of glacier objects in -an Amazon S3 bucket, determine if a restoration is on-going, and determine if a -restoration is finished. - -.. code-block:: python - - import boto3 - - s3 = boto3.resource('s3') - bucket = s3.Bucket('amzn-s3-demo-bucket') - for obj_sum in bucket.objects.all(): - obj = s3.Object(obj_sum.bucket_name, obj_sum.key) - if obj.storage_class == 'GLACIER': - # Try to restore the object if the storage class is glacier and - # the object does not have a completed or ongoing restoration - # request. - if obj.restore is None: - print('Submitting restoration request: %s' % obj.key) - obj.restore_object(RestoreRequest={'Days': 1}) - # Print out objects whose restoration is on-going - elif 'ongoing-request="true"' in obj.restore: - print('Restoration in-progress: %s' % obj.key) - # Print out objects whose restoration is complete - elif 'ongoing-request="false"' in obj.restore: - print('Restoration complete: %s' % obj.key) - - -Uploading/downloading files using SSE KMS -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example shows how to use SSE-KMS to upload objects using -server side encryption with a key managed by KMS. - -We can either use the default KMS master key, or create a -custom key in AWS and use it to encrypt the object by passing in its -key id. - -With KMS, nothing else needs to be provided for getting the -object; S3 already knows how to decrypt the object. - - -.. code-block:: python - - import boto3 - import os - - BUCKET = 'amzn-s3-demo-bucket' - s3 = boto3.client('s3') - keyid = '' - - print("Uploading S3 object with SSE-KMS") - s3.put_object(Bucket=BUCKET, - Key='encrypt-key', - Body=b'foobar', - ServerSideEncryption='aws:kms', - # Optional: SSEKMSKeyId - SSEKMSKeyId=keyid) - print("Done") - - # Getting the object: - print("Getting S3 object...") - response = s3.get_object(Bucket=BUCKET, - Key='encrypt-key') - print("Done, response body:") - print(response['Body'].read()) - - -Uploading/downloading files using SSE Customer Keys -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example shows how to use SSE-C to upload objects using -server side encryption with a customer provided key. - -First, we'll need a 32 byte key. For this example, we'll -randomly generate a key but you can use any 32 byte key -you want. Remember, you must the same key to download -the object. If you lose the encryption key, you lose -the object. - -Also note how we don't have to provide the SSECustomerKeyMD5. -Boto3 will automatically compute this value for us. - - -.. code-block:: python - - import boto3 - import os - - BUCKET = 'amzn-s3-demo-bucket' - KEY = os.urandom(32) - s3 = boto3.client('s3') - - print("Uploading S3 object with SSE-C") - s3.put_object(Bucket=BUCKET, - Key='encrypt-key', - Body=b'foobar', - SSECustomerKey=KEY, - SSECustomerAlgorithm='AES256') - print("Done") - - # Getting the object: - print("Getting S3 object...") - # Note how we're using the same ``KEY`` we - # created earlier. - response = s3.get_object(Bucket=BUCKET, - Key='encrypt-key', - SSECustomerKey=KEY, - SSECustomerAlgorithm='AES256') - print("Done, response body:") - print(response['Body'].read()) - - -Downloading a specific version of an S3 object -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example shows how to download a specific version of an -S3 object. - -.. code-block:: python - - import boto3 - s3 = boto3.client('s3') - - s3.download_file( - "amzn-s3-demo-bucket", "key-name", "tmp.txt", - ExtraArgs={"VersionId": "my-version-id"} - ) - - -Filter objects by last modified time using JMESPath -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example shows how to filter objects by last modified time -using JMESPath. - -.. code-block:: python - - import boto3 - s3 = boto3.client("s3") - - s3_paginator = s3.get_paginator('list_objects_v2') - s3_iterator = s3_paginator.paginate(Bucket='amzn-s3-demo-bucket') - - filtered_iterator = s3_iterator.search( - "Contents[?to_string(LastModified)>='\"2022-01-05 08:05:37+00:00\"'].Key" - ) - - for key_data in filtered_iterator: - print(key_data) diff --git a/venv/Lib/site-packages/boto3/exceptions.py b/venv/Lib/site-packages/boto3/exceptions.py deleted file mode 100644 index 1dbe37d..0000000 --- a/venv/Lib/site-packages/boto3/exceptions.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -# All exceptions in this class should subclass from Boto3Error. -import botocore.exceptions - - -# All exceptions should subclass from Boto3Error in this module. -class Boto3Error(Exception): - """Base class for all Boto3 errors.""" - - -class ResourceLoadException(Boto3Error): - pass - - -# NOTE: This doesn't appear to be used anywhere. -# It's probably safe to remove this. -class NoVersionFound(Boto3Error): - pass - - -# We're subclassing from botocore.exceptions.DataNotFoundError -# to keep backwards compatibility with anyone that was catching -# this low level Botocore error before this exception was -# introduced in boto3. -# Same thing for ResourceNotExistsError below. -class UnknownAPIVersionError( - Boto3Error, botocore.exceptions.DataNotFoundError -): - def __init__(self, service_name, bad_api_version, available_api_versions): - msg = ( - f"The '{service_name}' resource does not support an API version of: {bad_api_version}\n" - f"Valid API versions are: {available_api_versions}" - ) - # Not using super because we don't want the DataNotFoundError - # to be called, it has a different __init__ signature. - Boto3Error.__init__(self, msg) - - -class ResourceNotExistsError( - Boto3Error, botocore.exceptions.DataNotFoundError -): - """Raised when you attempt to create a resource that does not exist.""" - - def __init__(self, service_name, available_services, has_low_level_client): - msg = ( - "The '{}' resource does not exist.\n" - "The available resources are:\n" - " - {}\n".format( - service_name, '\n - '.join(available_services) - ) - ) - if has_low_level_client: - msg = ( - f"{msg}\nConsider using a boto3.client('{service_name}') " - f"instead of a resource for '{service_name}'" - ) - # Not using super because we don't want the DataNotFoundError - # to be called, it has a different __init__ signature. - Boto3Error.__init__(self, msg) - - -class RetriesExceededError(Boto3Error): - def __init__(self, last_exception, msg='Max Retries Exceeded'): - super().__init__(msg) - self.last_exception = last_exception - - -class S3TransferFailedError(Boto3Error): - pass - - -class S3UploadFailedError(Boto3Error): - pass - - -class DynamoDBOperationNotSupportedError(Boto3Error): - """Raised for operations that are not supported for an operand.""" - - def __init__(self, operation, value): - msg = ( - f'{operation} operation cannot be applied to value {value} of type ' - f'{type(value)} directly. Must use AttributeBase object methods ' - f'(i.e. Attr().eq()). to generate ConditionBase instances first.' - ) - Exception.__init__(self, msg) - - -# FIXME: Backward compatibility -DynanmoDBOperationNotSupportedError = DynamoDBOperationNotSupportedError - - -class DynamoDBNeedsConditionError(Boto3Error): - """Raised when input is not a condition""" - - def __init__(self, value): - msg = ( - f'Expecting a ConditionBase object. Got {value} of type {type(value)}. ' - f'Use AttributeBase object methods (i.e. Attr().eq()). to ' - f'generate ConditionBase instances.' - ) - Exception.__init__(self, msg) - - -class DynamoDBNeedsKeyConditionError(Boto3Error): - pass - - -class PythonDeprecationWarning(Warning): - """ - Python version being used is scheduled to become unsupported - in an future release. See warning for specifics. - """ - - pass - - -class InvalidCrtTransferConfigError(Boto3Error): - pass diff --git a/venv/Lib/site-packages/boto3/resources/__init__.py b/venv/Lib/site-packages/boto3/resources/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index ab80d62..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/action.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/action.cpython-312.pyc deleted file mode 100644 index 6c7b7bb..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/action.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/base.cpython-312.pyc deleted file mode 100644 index db8b0c6..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/collection.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/collection.cpython-312.pyc deleted file mode 100644 index badb454..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/collection.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/factory.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/factory.cpython-312.pyc deleted file mode 100644 index 50b02b8..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/factory.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/model.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/model.cpython-312.pyc deleted file mode 100644 index cc3eb03..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/model.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/params.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/params.cpython-312.pyc deleted file mode 100644 index ac47a4d..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/params.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/__pycache__/response.cpython-312.pyc b/venv/Lib/site-packages/boto3/resources/__pycache__/response.cpython-312.pyc deleted file mode 100644 index 5222964..0000000 Binary files a/venv/Lib/site-packages/boto3/resources/__pycache__/response.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/resources/action.py b/venv/Lib/site-packages/boto3/resources/action.py deleted file mode 100644 index 7c7d839..0000000 --- a/venv/Lib/site-packages/boto3/resources/action.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging - -from botocore import xform_name - -from boto3.docs.docstring import ActionDocstring -from boto3.utils import inject_attribute - -from .model import Action -from .params import create_request_parameters -from .response import RawHandler, ResourceHandler - -logger = logging.getLogger(__name__) - - -class ServiceAction: - """ - A class representing a callable action on a resource, for example - ``sqs.get_queue_by_name(...)`` or ``s3.Bucket('foo').delete()``. - The action may construct parameters from existing resource identifiers - and may return either a raw response or a new resource instance. - - :type action_model: :py:class`~boto3.resources.model.Action` - :param action_model: The action model. - - :type factory: ResourceFactory - :param factory: The factory that created the resource class to which - this action is attached. - - :type service_context: :py:class:`~boto3.utils.ServiceContext` - :param service_context: Context about the AWS service - """ - - def __init__(self, action_model, factory=None, service_context=None): - self._action_model = action_model - - # In the simplest case we just return the response, but if a - # resource is defined, then we must create these before returning. - resource_response_model = action_model.resource - if resource_response_model: - self._response_handler = ResourceHandler( - search_path=resource_response_model.path, - factory=factory, - resource_model=resource_response_model, - service_context=service_context, - operation_name=action_model.request.operation, - ) - else: - self._response_handler = RawHandler(action_model.path) - - def __call__(self, parent, *args, **kwargs): - """ - Perform the action's request operation after building operation - parameters and build any defined resources from the response. - - :type parent: :py:class:`~boto3.resources.base.ServiceResource` - :param parent: The resource instance to which this action is attached. - :rtype: dict or ServiceResource or list(ServiceResource) - :return: The response, either as a raw dict or resource instance(s). - """ - operation_name = xform_name(self._action_model.request.operation) - - # First, build predefined params and then update with the - # user-supplied kwargs, which allows overriding the pre-built - # params if needed. - params = create_request_parameters(parent, self._action_model.request) - params.update(kwargs) - - logger.debug( - 'Calling %s:%s with %r', - parent.meta.service_name, - operation_name, - params, - ) - - response = getattr(parent.meta.client, operation_name)(*args, **params) - - logger.debug('Response: %r', response) - - return self._response_handler(parent, params, response) - - -class BatchAction(ServiceAction): - """ - An action which operates on a batch of items in a collection, typically - a single page of results from the collection's underlying service - operation call. For example, this allows you to delete up to 999 - S3 objects in a single operation rather than calling ``.delete()`` on - each one individually. - - :type action_model: :py:class`~boto3.resources.model.Action` - :param action_model: The action model. - - :type factory: ResourceFactory - :param factory: The factory that created the resource class to which - this action is attached. - - :type service_context: :py:class:`~boto3.utils.ServiceContext` - :param service_context: Context about the AWS service - """ - - def __call__(self, parent, *args, **kwargs): - """ - Perform the batch action's operation on every page of results - from the collection. - - :type parent: - :py:class:`~boto3.resources.collection.ResourceCollection` - :param parent: The collection iterator to which this action - is attached. - :rtype: list(dict) - :return: A list of low-level response dicts from each call. - """ - service_name = None - client = None - responses = [] - operation_name = xform_name(self._action_model.request.operation) - - # Unlike the simple action above, a batch action must operate - # on batches (or pages) of items. So we get each page, construct - # the necessary parameters and call the batch operation. - for page in parent.pages(): - params = {} - for index, resource in enumerate(page): - # There is no public interface to get a service name - # or low-level client from a collection, so we get - # these from the first resource in the collection. - if service_name is None: - service_name = resource.meta.service_name - if client is None: - client = resource.meta.client - - create_request_parameters( - resource, - self._action_model.request, - params=params, - index=index, - ) - - if not params: - # There are no items, no need to make a call. - break - - params.update(kwargs) - - logger.debug( - 'Calling %s:%s with %r', service_name, operation_name, params - ) - - response = getattr(client, operation_name)(*args, **params) - - logger.debug('Response: %r', response) - - responses.append(self._response_handler(parent, params, response)) - - return responses - - -class WaiterAction: - """ - A class representing a callable waiter action on a resource, for example - ``s3.Bucket('foo').wait_until_bucket_exists()``. - The waiter action may construct parameters from existing resource - identifiers. - - :type waiter_model: :py:class`~boto3.resources.model.Waiter` - :param waiter_model: The action waiter. - :type waiter_resource_name: string - :param waiter_resource_name: The name of the waiter action for the - resource. It usually begins with a - ``wait_until_`` - """ - - def __init__(self, waiter_model, waiter_resource_name): - self._waiter_model = waiter_model - self._waiter_resource_name = waiter_resource_name - - def __call__(self, parent, *args, **kwargs): - """ - Perform the wait operation after building operation - parameters. - - :type parent: :py:class:`~boto3.resources.base.ServiceResource` - :param parent: The resource instance to which this action is attached. - """ - client_waiter_name = xform_name(self._waiter_model.waiter_name) - - # First, build predefined params and then update with the - # user-supplied kwargs, which allows overriding the pre-built - # params if needed. - params = create_request_parameters(parent, self._waiter_model) - params.update(kwargs) - - logger.debug( - 'Calling %s:%s with %r', - parent.meta.service_name, - self._waiter_resource_name, - params, - ) - - client = parent.meta.client - waiter = client.get_waiter(client_waiter_name) - response = waiter.wait(**params) - - logger.debug('Response: %r', response) - - -class CustomModeledAction: - """A custom, modeled action to inject into a resource.""" - - def __init__(self, action_name, action_model, function, event_emitter): - """ - :type action_name: str - :param action_name: The name of the action to inject, e.g. - 'delete_tags' - - :type action_model: dict - :param action_model: A JSON definition of the action, as if it were - part of the resource model. - - :type function: function - :param function: The function to perform when the action is called. - The first argument should be 'self', which will be the resource - the function is to be called on. - - :type event_emitter: :py:class:`botocore.hooks.BaseEventHooks` - :param event_emitter: The session event emitter. - """ - self.name = action_name - self.model = action_model - self.function = function - self.emitter = event_emitter - - def inject(self, class_attributes, service_context, event_name, **kwargs): - resource_name = event_name.rsplit(".")[-1] - action = Action(self.name, self.model, {}) - self.function.__name__ = self.name - self.function.__doc__ = ActionDocstring( - resource_name=resource_name, - event_emitter=self.emitter, - action_model=action, - service_model=service_context.service_model, - include_signature=False, - ) - inject_attribute(class_attributes, self.name, self.function) diff --git a/venv/Lib/site-packages/boto3/resources/base.py b/venv/Lib/site-packages/boto3/resources/base.py deleted file mode 100644 index 81ee220..0000000 --- a/venv/Lib/site-packages/boto3/resources/base.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging - -import boto3 - -logger = logging.getLogger(__name__) - - -class ResourceMeta: - """ - An object containing metadata about a resource. - """ - - def __init__( - self, - service_name, - identifiers=None, - client=None, - data=None, - resource_model=None, - ): - #: (``string``) The service name, e.g. 's3' - self.service_name = service_name - - if identifiers is None: - identifiers = [] - #: (``list``) List of identifier names - self.identifiers = identifiers - - #: (:py:class:`~botocore.client.BaseClient`) Low-level Botocore client - self.client = client - #: (``dict``) Loaded resource data attributes - self.data = data - - # The resource model for that resource - self.resource_model = resource_model - - def __repr__(self): - return f'ResourceMeta(\'{self.service_name}\', identifiers={self.identifiers})' - - def __eq__(self, other): - # Two metas are equal if their components are all equal - if other.__class__.__name__ != self.__class__.__name__: - return False - - return self.__dict__ == other.__dict__ - - def copy(self): - """ - Create a copy of this metadata object. - """ - params = self.__dict__.copy() - service_name = params.pop('service_name') - return ResourceMeta(service_name, **params) - - -class ServiceResource: - """ - A base class for resources. - - :type client: botocore.client - :param client: A low-level Botocore client instance - """ - - meta = None - """ - Stores metadata about this resource instance, such as the - ``service_name``, the low-level ``client`` and any cached ``data`` - from when the instance was hydrated. For example:: - - # Get a low-level client from a resource instance - client = resource.meta.client - response = client.operation(Param='foo') - - # Print the resource instance's service short name - print(resource.meta.service_name) - - See :py:class:`ResourceMeta` for more information. - """ - - def __init__(self, *args, **kwargs): - # Always work on a copy of meta, otherwise we would affect other - # instances of the same subclass. - self.meta = self.meta.copy() - - # Create a default client if none was passed - if kwargs.get('client') is not None: - self.meta.client = kwargs.get('client') - else: - self.meta.client = boto3.client(self.meta.service_name) - - # Allow setting identifiers as positional arguments in the order - # in which they were defined in the ResourceJSON. - for i, value in enumerate(args): - setattr(self, f"_{self.meta.identifiers[i]}", value) - - # Allow setting identifiers via keyword arguments. Here we need - # extra logic to ignore other keyword arguments like ``client``. - for name, value in kwargs.items(): - if name == 'client': - continue - - if name not in self.meta.identifiers: - raise ValueError(f'Unknown keyword argument: {name}') - - setattr(self, f"_{name}", value) - - # Validate that all identifiers have been set. - for identifier in self.meta.identifiers: - if getattr(self, identifier) is None: - raise ValueError(f'Required parameter {identifier} not set') - - def __repr__(self): - identifiers = [ - f'{identifier}={repr(getattr(self, identifier))}' - for identifier in self.meta.identifiers - ] - return f"{self.__class__.__name__}({', '.join(identifiers)})" - - def __eq__(self, other): - # Should be instances of the same resource class - if other.__class__.__name__ != self.__class__.__name__: - return False - - # Each of the identifiers should have the same value in both - # instances, e.g. two buckets need the same name to be equal. - for identifier in self.meta.identifiers: - if getattr(self, identifier) != getattr(other, identifier): - return False - - return True - - def __hash__(self): - identifiers = [] - for identifier in self.meta.identifiers: - identifiers.append(getattr(self, identifier)) - return hash((self.__class__.__name__, tuple(identifiers))) diff --git a/venv/Lib/site-packages/boto3/resources/collection.py b/venv/Lib/site-packages/boto3/resources/collection.py deleted file mode 100644 index 5d4c9e9..0000000 --- a/venv/Lib/site-packages/boto3/resources/collection.py +++ /dev/null @@ -1,566 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import copy -import logging - -from botocore import xform_name -from botocore.utils import merge_dicts - -from ..docs import docstring -from .action import BatchAction -from .params import create_request_parameters -from .response import ResourceHandler - -logger = logging.getLogger(__name__) - - -class ResourceCollection: - """ - Represents a collection of resources, which can be iterated through, - optionally with filtering. Collections automatically handle pagination - for you. - - See :ref:`guide_collections` for a high-level overview of collections, - including when remote service requests are performed. - - :type model: :py:class:`~boto3.resources.model.Collection` - :param model: Collection model - :type parent: :py:class:`~boto3.resources.base.ServiceResource` - :param parent: The collection's parent resource - :type handler: :py:class:`~boto3.resources.response.ResourceHandler` - :param handler: The resource response handler used to create resource - instances - """ - - def __init__(self, model, parent, handler, **kwargs): - self._model = model - self._parent = parent - self._py_operation_name = xform_name(model.request.operation) - self._handler = handler - self._params = copy.deepcopy(kwargs) - - def __repr__(self): - return '{}({}, {})'.format( - self.__class__.__name__, - self._parent, - f'{self._parent.meta.service_name}.{self._model.resource.type}', - ) - - def __iter__(self): - """ - A generator which yields resource instances after doing the - appropriate service operation calls and handling any pagination - on your behalf. - - Page size, item limit, and filter parameters are applied - if they have previously been set. - - >>> bucket = s3.Bucket('boto3') - >>> for obj in bucket.objects.all(): - ... print(obj.key) - 'key1' - 'key2' - - """ - limit = self._params.get('limit', None) - - count = 0 - for page in self.pages(): - for item in page: - yield item - - # If the limit is set and has been reached, then - # we stop processing items here. - count += 1 - if limit is not None and count >= limit: - return - - def _clone(self, **kwargs): - """ - Create a clone of this collection. This is used by the methods - below to provide a chainable interface that returns copies - rather than the original. This allows things like: - - >>> base = collection.filter(Param1=1) - >>> query1 = base.filter(Param2=2) - >>> query2 = base.filter(Param3=3) - >>> query1.params - {'Param1': 1, 'Param2': 2} - >>> query2.params - {'Param1': 1, 'Param3': 3} - - :rtype: :py:class:`ResourceCollection` - :return: A clone of this resource collection - """ - params = copy.deepcopy(self._params) - merge_dicts(params, kwargs, append_lists=True) - clone = self.__class__( - self._model, self._parent, self._handler, **params - ) - return clone - - def pages(self): - """ - A generator which yields pages of resource instances after - doing the appropriate service operation calls and handling - any pagination on your behalf. Non-paginated calls will - return a single page of items. - - Page size, item limit, and filter parameters are applied - if they have previously been set. - - >>> bucket = s3.Bucket('boto3') - >>> for page in bucket.objects.pages(): - ... for obj in page: - ... print(obj.key) - 'key1' - 'key2' - - :rtype: list(:py:class:`~boto3.resources.base.ServiceResource`) - :return: List of resource instances - """ - client = self._parent.meta.client - cleaned_params = self._params.copy() - limit = cleaned_params.pop('limit', None) - page_size = cleaned_params.pop('page_size', None) - params = create_request_parameters(self._parent, self._model.request) - merge_dicts(params, cleaned_params, append_lists=True) - - # Is this a paginated operation? If so, we need to get an - # iterator for the various pages. If not, then we simply - # call the operation and return the result as a single - # page in a list. For non-paginated results, we just ignore - # the page size parameter. - if client.can_paginate(self._py_operation_name): - logger.debug( - 'Calling paginated %s:%s with %r', - self._parent.meta.service_name, - self._py_operation_name, - params, - ) - paginator = client.get_paginator(self._py_operation_name) - pages = paginator.paginate( - PaginationConfig={'MaxItems': limit, 'PageSize': page_size}, - **params, - ) - else: - logger.debug( - 'Calling %s:%s with %r', - self._parent.meta.service_name, - self._py_operation_name, - params, - ) - pages = [getattr(client, self._py_operation_name)(**params)] - - # Now that we have a page iterator or single page of results - # we start processing and yielding individual items. - count = 0 - for page in pages: - page_items = [] - for item in self._handler(self._parent, params, page): - page_items.append(item) - - # If the limit is set and has been reached, then - # we stop processing items here. - count += 1 - if limit is not None and count >= limit: - break - - yield page_items - - # Stop reading pages if we've reached out limit - if limit is not None and count >= limit: - break - - def all(self): - """ - Get all items from the collection, optionally with a custom - page size and item count limit. - - This method returns an iterable generator which yields - individual resource instances. Example use:: - - # Iterate through items - >>> for queue in sqs.queues.all(): - ... print(queue.url) - 'https://url1' - 'https://url2' - - # Convert to list - >>> queues = list(sqs.queues.all()) - >>> len(queues) - 2 - """ - return self._clone() - - def filter(self, **kwargs): - """ - Get items from the collection, passing keyword arguments along - as parameters to the underlying service operation, which are - typically used to filter the results. - - This method returns an iterable generator which yields - individual resource instances. Example use:: - - # Iterate through items - >>> for queue in sqs.queues.filter(Param='foo'): - ... print(queue.url) - 'https://url1' - 'https://url2' - - # Convert to list - >>> queues = list(sqs.queues.filter(Param='foo')) - >>> len(queues) - 2 - - :rtype: :py:class:`ResourceCollection` - """ - return self._clone(**kwargs) - - def limit(self, count): - """ - Return at most this many resources. - - >>> for bucket in s3.buckets.limit(5): - ... print(bucket.name) - 'bucket1' - 'bucket2' - 'bucket3' - 'bucket4' - 'bucket5' - - :type count: int - :param count: Return no more than this many items - :rtype: :py:class:`ResourceCollection` - """ - return self._clone(limit=count) - - def page_size(self, count): - """ - Fetch at most this many resources per service request. - - >>> for obj in s3.Bucket('boto3').objects.page_size(100): - ... print(obj.key) - - :type count: int - :param count: Fetch this many items per request - :rtype: :py:class:`ResourceCollection` - """ - return self._clone(page_size=count) - - -class CollectionManager: - """ - A collection manager provides access to resource collection instances, - which can be iterated and filtered. The manager exposes some - convenience functions that are also found on resource collections, - such as :py:meth:`~ResourceCollection.all` and - :py:meth:`~ResourceCollection.filter`. - - Get all items:: - - >>> for bucket in s3.buckets.all(): - ... print(bucket.name) - - Get only some items via filtering:: - - >>> for queue in sqs.queues.filter(QueueNamePrefix='AWS'): - ... print(queue.url) - - Get whole pages of items: - - >>> for page in s3.Bucket('boto3').objects.pages(): - ... for obj in page: - ... print(obj.key) - - A collection manager is not iterable. You **must** call one of the - methods that return a :py:class:`ResourceCollection` before trying - to iterate, slice, or convert to a list. - - See the :ref:`guide_collections` guide for a high-level overview - of collections, including when remote service requests are performed. - - :type collection_model: :py:class:`~boto3.resources.model.Collection` - :param model: Collection model - - :type parent: :py:class:`~boto3.resources.base.ServiceResource` - :param parent: The collection's parent resource - - :type factory: :py:class:`~boto3.resources.factory.ResourceFactory` - :param factory: The resource factory to create new resources - - :type service_context: :py:class:`~boto3.utils.ServiceContext` - :param service_context: Context about the AWS service - """ - - # The class to use when creating an iterator - _collection_cls = ResourceCollection - - def __init__(self, collection_model, parent, factory, service_context): - self._model = collection_model - operation_name = self._model.request.operation - self._parent = parent - - search_path = collection_model.resource.path - self._handler = ResourceHandler( - search_path=search_path, - factory=factory, - resource_model=collection_model.resource, - service_context=service_context, - operation_name=operation_name, - ) - - def __repr__(self): - return '{}({}, {})'.format( - self.__class__.__name__, - self._parent, - f'{self._parent.meta.service_name}.{self._model.resource.type}', - ) - - def iterator(self, **kwargs): - """ - Get a resource collection iterator from this manager. - - :rtype: :py:class:`ResourceCollection` - :return: An iterable representing the collection of resources - """ - return self._collection_cls( - self._model, self._parent, self._handler, **kwargs - ) - - # Set up some methods to proxy ResourceCollection methods - def all(self): - return self.iterator() - - all.__doc__ = ResourceCollection.all.__doc__ - - def filter(self, **kwargs): - return self.iterator(**kwargs) - - filter.__doc__ = ResourceCollection.filter.__doc__ - - def limit(self, count): - return self.iterator(limit=count) - - limit.__doc__ = ResourceCollection.limit.__doc__ - - def page_size(self, count): - return self.iterator(page_size=count) - - page_size.__doc__ = ResourceCollection.page_size.__doc__ - - def pages(self): - return self.iterator().pages() - - pages.__doc__ = ResourceCollection.pages.__doc__ - - -class CollectionFactory: - """ - A factory to create new - :py:class:`CollectionManager` and :py:class:`ResourceCollection` - subclasses from a :py:class:`~boto3.resources.model.Collection` - model. These subclasses include methods to perform batch operations. - """ - - def load_from_definition( - self, resource_name, collection_model, service_context, event_emitter - ): - """ - Loads a collection from a model, creating a new - :py:class:`CollectionManager` subclass - with the correct properties and methods, named based on the service - and resource name, e.g. ec2.InstanceCollectionManager. It also - creates a new :py:class:`ResourceCollection` subclass which is used - by the new manager class. - - :type resource_name: string - :param resource_name: Name of the resource to look up. For services, - this should match the ``service_name``. - - :type service_context: :py:class:`~boto3.utils.ServiceContext` - :param service_context: Context about the AWS service - - :type event_emitter: :py:class:`~botocore.hooks.HierarchialEmitter` - :param event_emitter: An event emitter - - :rtype: Subclass of :py:class:`CollectionManager` - :return: The collection class. - """ - attrs = {} - collection_name = collection_model.name - - # Create the batch actions for a collection - self._load_batch_actions( - attrs, - resource_name, - collection_model, - service_context.service_model, - event_emitter, - ) - # Add the documentation to the collection class's methods - self._load_documented_collection_methods( - attrs=attrs, - resource_name=resource_name, - collection_model=collection_model, - service_model=service_context.service_model, - event_emitter=event_emitter, - base_class=ResourceCollection, - ) - - if service_context.service_name == resource_name: - cls_name = ( - f'{service_context.service_name}.{collection_name}Collection' - ) - else: - cls_name = f'{service_context.service_name}.{resource_name}.{collection_name}Collection' - - collection_cls = type(str(cls_name), (ResourceCollection,), attrs) - - # Add the documentation to the collection manager's methods - self._load_documented_collection_methods( - attrs=attrs, - resource_name=resource_name, - collection_model=collection_model, - service_model=service_context.service_model, - event_emitter=event_emitter, - base_class=CollectionManager, - ) - attrs['_collection_cls'] = collection_cls - cls_name += 'Manager' - - return type(str(cls_name), (CollectionManager,), attrs) - - def _load_batch_actions( - self, - attrs, - resource_name, - collection_model, - service_model, - event_emitter, - ): - """ - Batch actions on the collection become methods on both - the collection manager and iterators. - """ - for action_model in collection_model.batch_actions: - snake_cased = xform_name(action_model.name) - attrs[snake_cased] = self._create_batch_action( - resource_name, - snake_cased, - action_model, - collection_model, - service_model, - event_emitter, - ) - - def _load_documented_collection_methods( - factory_self, - attrs, - resource_name, - collection_model, - service_model, - event_emitter, - base_class, - ): - # The base class already has these methods defined. However - # the docstrings are generic and not based for a particular service - # or resource. So we override these methods by proxying to the - # base class's builtin method and adding a docstring - # that pertains to the resource. - - # A collection's all() method. - def all(self): - return base_class.all(self) - - all.__doc__ = docstring.CollectionMethodDocstring( - resource_name=resource_name, - action_name='all', - event_emitter=event_emitter, - collection_model=collection_model, - service_model=service_model, - include_signature=False, - ) - attrs['all'] = all - - # The collection's filter() method. - def filter(self, **kwargs): - return base_class.filter(self, **kwargs) - - filter.__doc__ = docstring.CollectionMethodDocstring( - resource_name=resource_name, - action_name='filter', - event_emitter=event_emitter, - collection_model=collection_model, - service_model=service_model, - include_signature=False, - ) - attrs['filter'] = filter - - # The collection's limit method. - def limit(self, count): - return base_class.limit(self, count) - - limit.__doc__ = docstring.CollectionMethodDocstring( - resource_name=resource_name, - action_name='limit', - event_emitter=event_emitter, - collection_model=collection_model, - service_model=service_model, - include_signature=False, - ) - attrs['limit'] = limit - - # The collection's page_size method. - def page_size(self, count): - return base_class.page_size(self, count) - - page_size.__doc__ = docstring.CollectionMethodDocstring( - resource_name=resource_name, - action_name='page_size', - event_emitter=event_emitter, - collection_model=collection_model, - service_model=service_model, - include_signature=False, - ) - attrs['page_size'] = page_size - - def _create_batch_action( - factory_self, - resource_name, - snake_cased, - action_model, - collection_model, - service_model, - event_emitter, - ): - """ - Creates a new method which makes a batch operation request - to the underlying service API. - """ - action = BatchAction(action_model) - - def batch_action(self, *args, **kwargs): - return action(self, *args, **kwargs) - - batch_action.__name__ = str(snake_cased) - batch_action.__doc__ = docstring.BatchActionDocstring( - resource_name=resource_name, - event_emitter=event_emitter, - batch_action_model=action_model, - service_model=service_model, - collection_model=collection_model, - include_signature=False, - ) - return batch_action diff --git a/venv/Lib/site-packages/boto3/resources/factory.py b/venv/Lib/site-packages/boto3/resources/factory.py deleted file mode 100644 index 5dab336..0000000 --- a/venv/Lib/site-packages/boto3/resources/factory.py +++ /dev/null @@ -1,601 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging -from functools import partial - -from ..docs import docstring -from ..exceptions import ResourceLoadException -from .action import ServiceAction, WaiterAction -from .base import ResourceMeta, ServiceResource -from .collection import CollectionFactory -from .model import ResourceModel -from .response import ResourceHandler, build_identifiers - -logger = logging.getLogger(__name__) - - -class ResourceFactory: - """ - A factory to create new :py:class:`~boto3.resources.base.ServiceResource` - classes from a :py:class:`~boto3.resources.model.ResourceModel`. There are - two types of lookups that can be done: one on the service itself (e.g. an - SQS resource) and another on models contained within the service (e.g. an - SQS Queue resource). - """ - - def __init__(self, emitter): - self._collection_factory = CollectionFactory() - self._emitter = emitter - - def load_from_definition( - self, resource_name, single_resource_json_definition, service_context - ): - """ - Loads a resource from a model, creating a new - :py:class:`~boto3.resources.base.ServiceResource` subclass - with the correct properties and methods, named based on the service - and resource name, e.g. EC2.Instance. - - :type resource_name: string - :param resource_name: Name of the resource to look up. For services, - this should match the ``service_name``. - - :type single_resource_json_definition: dict - :param single_resource_json_definition: - The loaded json of a single service resource or resource - definition. - - :type service_context: :py:class:`~boto3.utils.ServiceContext` - :param service_context: Context about the AWS service - - :rtype: Subclass of :py:class:`~boto3.resources.base.ServiceResource` - :return: The service or resource class. - """ - logger.debug( - 'Loading %s:%s', service_context.service_name, resource_name - ) - - # Using the loaded JSON create a ResourceModel object. - resource_model = ResourceModel( - resource_name, - single_resource_json_definition, - service_context.resource_json_definitions, - ) - - # Do some renaming of the shape if there was a naming collision - # that needed to be accounted for. - shape = None - if resource_model.shape: - shape = service_context.service_model.shape_for( - resource_model.shape - ) - resource_model.load_rename_map(shape) - - # Set some basic info - meta = ResourceMeta( - service_context.service_name, resource_model=resource_model - ) - attrs = { - 'meta': meta, - } - - # Create and load all of attributes of the resource class based - # on the models. - - # Identifiers - self._load_identifiers( - attrs=attrs, - meta=meta, - resource_name=resource_name, - resource_model=resource_model, - ) - - # Load/Reload actions - self._load_actions( - attrs=attrs, - resource_name=resource_name, - resource_model=resource_model, - service_context=service_context, - ) - - # Attributes that get auto-loaded - self._load_attributes( - attrs=attrs, - meta=meta, - resource_name=resource_name, - resource_model=resource_model, - service_context=service_context, - ) - - # Collections and their corresponding methods - self._load_collections( - attrs=attrs, - resource_model=resource_model, - service_context=service_context, - ) - - # References and Subresources - self._load_has_relations( - attrs=attrs, - resource_name=resource_name, - resource_model=resource_model, - service_context=service_context, - ) - - # Waiter resource actions - self._load_waiters( - attrs=attrs, - resource_name=resource_name, - resource_model=resource_model, - service_context=service_context, - ) - - # Create the name based on the requested service and resource - cls_name = resource_name - if service_context.service_name == resource_name: - cls_name = 'ServiceResource' - cls_name = f"{service_context.service_name}.{cls_name}" - - base_classes = [ServiceResource] - if self._emitter is not None: - self._emitter.emit( - f'creating-resource-class.{cls_name}', - class_attributes=attrs, - base_classes=base_classes, - service_context=service_context, - ) - return type(str(cls_name), tuple(base_classes), attrs) - - def _load_identifiers(self, attrs, meta, resource_model, resource_name): - """ - Populate required identifiers. These are arguments without which - the resource cannot be used. Identifiers become arguments for - operations on the resource. - """ - for identifier in resource_model.identifiers: - meta.identifiers.append(identifier.name) - attrs[identifier.name] = self._create_identifier( - identifier, resource_name - ) - - def _load_actions( - self, attrs, resource_name, resource_model, service_context - ): - """ - Actions on the resource become methods, with the ``load`` method - being a special case which sets internal data for attributes, and - ``reload`` is an alias for ``load``. - """ - if resource_model.load: - attrs['load'] = self._create_action( - action_model=resource_model.load, - resource_name=resource_name, - service_context=service_context, - is_load=True, - ) - attrs['reload'] = attrs['load'] - - for action in resource_model.actions: - attrs[action.name] = self._create_action( - action_model=action, - resource_name=resource_name, - service_context=service_context, - ) - - def _load_attributes( - self, attrs, meta, resource_name, resource_model, service_context - ): - """ - Load resource attributes based on the resource shape. The shape - name is referenced in the resource JSON, but the shape itself - is defined in the Botocore service JSON, hence the need for - access to the ``service_model``. - """ - if not resource_model.shape: - return - - shape = service_context.service_model.shape_for(resource_model.shape) - - identifiers = { - i.member_name: i - for i in resource_model.identifiers - if i.member_name - } - attributes = resource_model.get_attributes(shape) - for name, (orig_name, member) in attributes.items(): - if name in identifiers: - prop = self._create_identifier_alias( - resource_name=resource_name, - identifier=identifiers[name], - member_model=member, - service_context=service_context, - ) - else: - prop = self._create_autoload_property( - resource_name=resource_name, - name=orig_name, - snake_cased=name, - member_model=member, - service_context=service_context, - ) - attrs[name] = prop - - def _load_collections(self, attrs, resource_model, service_context): - """ - Load resource collections from the model. Each collection becomes - a :py:class:`~boto3.resources.collection.CollectionManager` instance - on the resource instance, which allows you to iterate and filter - through the collection's items. - """ - for collection_model in resource_model.collections: - attrs[collection_model.name] = self._create_collection( - resource_name=resource_model.name, - collection_model=collection_model, - service_context=service_context, - ) - - def _load_has_relations( - self, attrs, resource_name, resource_model, service_context - ): - """ - Load related resources, which are defined via a ``has`` - relationship but conceptually come in two forms: - - 1. A reference, which is a related resource instance and can be - ``None``, such as an EC2 instance's ``vpc``. - 2. A subresource, which is a resource constructor that will always - return a resource instance which shares identifiers/data with - this resource, such as ``s3.Bucket('name').Object('key')``. - """ - for reference in resource_model.references: - # This is a dangling reference, i.e. we have all - # the data we need to create the resource, so - # this instance becomes an attribute on the class. - attrs[reference.name] = self._create_reference( - reference_model=reference, - resource_name=resource_name, - service_context=service_context, - ) - - for subresource in resource_model.subresources: - # This is a sub-resource class you can create - # by passing in an identifier, e.g. s3.Bucket(name). - attrs[subresource.name] = self._create_class_partial( - subresource_model=subresource, - resource_name=resource_name, - service_context=service_context, - ) - - self._create_available_subresources_command( - attrs, resource_model.subresources - ) - - def _create_available_subresources_command(self, attrs, subresources): - _subresources = [subresource.name for subresource in subresources] - _subresources = sorted(_subresources) - - def get_available_subresources(factory_self): - """ - Returns a list of all the available sub-resources for this - Resource. - - :returns: A list containing the name of each sub-resource for this - resource - :rtype: list of str - """ - return _subresources - - attrs['get_available_subresources'] = get_available_subresources - - def _load_waiters( - self, attrs, resource_name, resource_model, service_context - ): - """ - Load resource waiters from the model. Each waiter allows you to - wait until a resource reaches a specific state by polling the state - of the resource. - """ - for waiter in resource_model.waiters: - attrs[waiter.name] = self._create_waiter( - resource_waiter_model=waiter, - resource_name=resource_name, - service_context=service_context, - ) - - def _create_identifier(factory_self, identifier, resource_name): - """ - Creates a read-only property for identifier attributes. - """ - - def get_identifier(self): - # The default value is set to ``None`` instead of - # raising an AttributeError because when resources are - # instantiated a check is made such that none of the - # identifiers have a value ``None``. If any are ``None``, - # a more informative user error than a generic AttributeError - # is raised. - return getattr(self, f"_{identifier.name}", None) - - get_identifier.__name__ = str(identifier.name) - get_identifier.__doc__ = docstring.IdentifierDocstring( - resource_name=resource_name, - identifier_model=identifier, - include_signature=False, - ) - - return property(get_identifier) - - def _create_identifier_alias( - factory_self, resource_name, identifier, member_model, service_context - ): - """ - Creates a read-only property that aliases an identifier. - """ - - def get_identifier(self): - return getattr(self, f"_{identifier.name}", None) - - get_identifier.__name__ = str(identifier.member_name) - get_identifier.__doc__ = docstring.AttributeDocstring( - service_name=service_context.service_name, - resource_name=resource_name, - attr_name=identifier.member_name, - event_emitter=factory_self._emitter, - attr_model=member_model, - include_signature=False, - ) - - return property(get_identifier) - - def _create_autoload_property( - factory_self, - resource_name, - name, - snake_cased, - member_model, - service_context, - ): - """ - Creates a new property on the resource to lazy-load its value - via the resource's ``load`` method (if it exists). - """ - - # The property loader will check to see if this resource has already - # been loaded and return the cached value if possible. If not, then - # it first checks to see if it CAN be loaded (raise if not), then - # calls the load before returning the value. - def property_loader(self): - if self.meta.data is None: - if hasattr(self, 'load'): - self.load() - else: - raise ResourceLoadException( - f'{self.__class__.__name__} has no load method' - ) - - return self.meta.data.get(name) - - property_loader.__name__ = str(snake_cased) - property_loader.__doc__ = docstring.AttributeDocstring( - service_name=service_context.service_name, - resource_name=resource_name, - attr_name=snake_cased, - event_emitter=factory_self._emitter, - attr_model=member_model, - include_signature=False, - ) - - return property(property_loader) - - def _create_waiter( - factory_self, resource_waiter_model, resource_name, service_context - ): - """ - Creates a new wait method for each resource where both a waiter and - resource model is defined. - """ - waiter = WaiterAction( - resource_waiter_model, - waiter_resource_name=resource_waiter_model.name, - ) - - def do_waiter(self, *args, **kwargs): - waiter(self, *args, **kwargs) - - do_waiter.__name__ = str(resource_waiter_model.name) - do_waiter.__doc__ = docstring.ResourceWaiterDocstring( - resource_name=resource_name, - event_emitter=factory_self._emitter, - service_model=service_context.service_model, - resource_waiter_model=resource_waiter_model, - service_waiter_model=service_context.service_waiter_model, - include_signature=False, - ) - return do_waiter - - def _create_collection( - factory_self, resource_name, collection_model, service_context - ): - """ - Creates a new property on the resource to lazy-load a collection. - """ - cls = factory_self._collection_factory.load_from_definition( - resource_name=resource_name, - collection_model=collection_model, - service_context=service_context, - event_emitter=factory_self._emitter, - ) - - def get_collection(self): - return cls( - collection_model=collection_model, - parent=self, - factory=factory_self, - service_context=service_context, - ) - - get_collection.__name__ = str(collection_model.name) - get_collection.__doc__ = docstring.CollectionDocstring( - collection_model=collection_model, include_signature=False - ) - return property(get_collection) - - def _create_reference( - factory_self, reference_model, resource_name, service_context - ): - """ - Creates a new property on the resource to lazy-load a reference. - """ - # References are essentially an action with no request - # or response, so we can re-use the response handlers to - # build up resources from identifiers and data members. - handler = ResourceHandler( - search_path=reference_model.resource.path, - factory=factory_self, - resource_model=reference_model.resource, - service_context=service_context, - ) - - # Are there any identifiers that need access to data members? - # This is important when building the resource below since - # it requires the data to be loaded. - needs_data = any( - i.source == 'data' for i in reference_model.resource.identifiers - ) - - def get_reference(self): - # We need to lazy-evaluate the reference to handle circular - # references between resources. We do this by loading the class - # when first accessed. - # This is using a *response handler* so we need to make sure - # our data is loaded (if possible) and pass that data into - # the handler as if it were a response. This allows references - # to have their data loaded properly. - if needs_data and self.meta.data is None and hasattr(self, 'load'): - self.load() - return handler(self, {}, self.meta.data) - - get_reference.__name__ = str(reference_model.name) - get_reference.__doc__ = docstring.ReferenceDocstring( - reference_model=reference_model, include_signature=False - ) - return property(get_reference) - - def _create_class_partial( - factory_self, subresource_model, resource_name, service_context - ): - """ - Creates a new method which acts as a functools.partial, passing - along the instance's low-level `client` to the new resource - class' constructor. - """ - name = subresource_model.resource.type - - def create_resource(self, *args, **kwargs): - # We need a new method here because we want access to the - # instance's client. - positional_args = [] - - # We lazy-load the class to handle circular references. - json_def = service_context.resource_json_definitions.get(name, {}) - resource_cls = factory_self.load_from_definition( - resource_name=name, - single_resource_json_definition=json_def, - service_context=service_context, - ) - - # Assumes that identifiers are in order, which lets you do - # e.g. ``sqs.Queue('foo').Message('bar')`` to create a new message - # linked with the ``foo`` queue and which has a ``bar`` receipt - # handle. If we did kwargs here then future positional arguments - # would lead to failure. - identifiers = subresource_model.resource.identifiers - if identifiers is not None: - for identifier, value in build_identifiers(identifiers, self): - positional_args.append(value) - - return partial( - resource_cls, *positional_args, client=self.meta.client - )(*args, **kwargs) - - create_resource.__name__ = str(name) - create_resource.__doc__ = docstring.SubResourceDocstring( - resource_name=resource_name, - sub_resource_model=subresource_model, - service_model=service_context.service_model, - include_signature=False, - ) - return create_resource - - def _create_action( - factory_self, - action_model, - resource_name, - service_context, - is_load=False, - ): - """ - Creates a new method which makes a request to the underlying - AWS service. - """ - # Create the action in in this closure but before the ``do_action`` - # method below is invoked, which allows instances of the resource - # to share the ServiceAction instance. - action = ServiceAction( - action_model, factory=factory_self, service_context=service_context - ) - - # A resource's ``load`` method is special because it sets - # values on the resource instead of returning the response. - if is_load: - # We need a new method here because we want access to the - # instance via ``self``. - def do_action(self, *args, **kwargs): - response = action(self, *args, **kwargs) - self.meta.data = response - - # Create the docstring for the load/reload methods. - lazy_docstring = docstring.LoadReloadDocstring( - action_name=action_model.name, - resource_name=resource_name, - event_emitter=factory_self._emitter, - load_model=action_model, - service_model=service_context.service_model, - include_signature=False, - ) - else: - # We need a new method here because we want access to the - # instance via ``self``. - def do_action(self, *args, **kwargs): - response = action(self, *args, **kwargs) - - if hasattr(self, 'load'): - # Clear cached data. It will be reloaded the next - # time that an attribute is accessed. - # TODO: Make this configurable in the future? - self.meta.data = None - - return response - - lazy_docstring = docstring.ActionDocstring( - resource_name=resource_name, - event_emitter=factory_self._emitter, - action_model=action_model, - service_model=service_context.service_model, - include_signature=False, - ) - - do_action.__name__ = str(action_model.name) - do_action.__doc__ = lazy_docstring - return do_action diff --git a/venv/Lib/site-packages/boto3/resources/model.py b/venv/Lib/site-packages/boto3/resources/model.py deleted file mode 100644 index 6b64996..0000000 --- a/venv/Lib/site-packages/boto3/resources/model.py +++ /dev/null @@ -1,630 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -""" -The models defined in this file represent the resource JSON description -format and provide a layer of abstraction from the raw JSON. The advantages -of this are: - -* Pythonic interface (e.g. ``action.request.operation``) -* Consumers need not change for minor JSON changes (e.g. renamed field) - -These models are used both by the resource factory to generate resource -classes as well as by the documentation generator. -""" - -import logging - -from botocore import xform_name - -logger = logging.getLogger(__name__) - - -class Identifier: - """ - A resource identifier, given by its name. - - :type name: string - :param name: The name of the identifier - """ - - def __init__(self, name, member_name=None): - #: (``string``) The name of the identifier - self.name = name - self.member_name = member_name - - -class Action: - """ - A service operation action. - - :type name: string - :param name: The name of the action - :type definition: dict - :param definition: The JSON definition - :type resource_defs: dict - :param resource_defs: All resources defined in the service - """ - - def __init__(self, name, definition, resource_defs): - self._definition = definition - - #: (``string``) The name of the action - self.name = name - #: (:py:class:`Request`) This action's request or ``None`` - self.request = None - if 'request' in definition: - self.request = Request(definition.get('request', {})) - #: (:py:class:`ResponseResource`) This action's resource or ``None`` - self.resource = None - if 'resource' in definition: - self.resource = ResponseResource( - definition.get('resource', {}), resource_defs - ) - #: (``string``) The JMESPath search path or ``None`` - self.path = definition.get('path') - - -class DefinitionWithParams: - """ - An item which has parameters exposed via the ``params`` property. - A request has an operation and parameters, while a waiter has - a name, a low-level waiter name and parameters. - - :type definition: dict - :param definition: The JSON definition - """ - - def __init__(self, definition): - self._definition = definition - - @property - def params(self): - """ - Get a list of auto-filled parameters for this request. - - :type: list(:py:class:`Parameter`) - """ - params = [] - - for item in self._definition.get('params', []): - params.append(Parameter(**item)) - - return params - - -class Parameter: - """ - An auto-filled parameter which has a source and target. For example, - the ``QueueUrl`` may be auto-filled from a resource's ``url`` identifier - when making calls to ``queue.receive_messages``. - - :type target: string - :param target: The destination parameter name, e.g. ``QueueUrl`` - :type source_type: string - :param source_type: Where the source is defined. - :type source: string - :param source: The source name, e.g. ``Url`` - """ - - def __init__( - self, target, source, name=None, path=None, value=None, **kwargs - ): - #: (``string``) The destination parameter name - self.target = target - #: (``string``) Where the source is defined - self.source = source - #: (``string``) The name of the source, if given - self.name = name - #: (``string``) The JMESPath query of the source - self.path = path - #: (``string|int|float|bool``) The source constant value - self.value = value - - # Complain if we encounter any unknown values. - if kwargs: - logger.warning('Unknown parameter options found: %s', kwargs) - - -class Request(DefinitionWithParams): - """ - A service operation action request. - - :type definition: dict - :param definition: The JSON definition - """ - - def __init__(self, definition): - super().__init__(definition) - - #: (``string``) The name of the low-level service operation - self.operation = definition.get('operation') - - -class Waiter(DefinitionWithParams): - """ - An event waiter specification. - - :type name: string - :param name: Name of the waiter - :type definition: dict - :param definition: The JSON definition - """ - - PREFIX = 'WaitUntil' - - def __init__(self, name, definition): - super().__init__(definition) - - #: (``string``) The name of this waiter - self.name = name - - #: (``string``) The name of the underlying event waiter - self.waiter_name = definition.get('waiterName') - - -class ResponseResource: - """ - A resource response to create after performing an action. - - :type definition: dict - :param definition: The JSON definition - :type resource_defs: dict - :param resource_defs: All resources defined in the service - """ - - def __init__(self, definition, resource_defs): - self._definition = definition - self._resource_defs = resource_defs - - #: (``string``) The name of the response resource type - self.type = definition.get('type') - - #: (``string``) The JMESPath search query or ``None`` - self.path = definition.get('path') - - @property - def identifiers(self): - """ - A list of resource identifiers. - - :type: list(:py:class:`Identifier`) - """ - identifiers = [] - - for item in self._definition.get('identifiers', []): - identifiers.append(Parameter(**item)) - - return identifiers - - @property - def model(self): - """ - Get the resource model for the response resource. - - :type: :py:class:`ResourceModel` - """ - return ResourceModel( - self.type, self._resource_defs[self.type], self._resource_defs - ) - - -class Collection(Action): - """ - A group of resources. See :py:class:`Action`. - - :type name: string - :param name: The name of the collection - :type definition: dict - :param definition: The JSON definition - :type resource_defs: dict - :param resource_defs: All resources defined in the service - """ - - @property - def batch_actions(self): - """ - Get a list of batch actions supported by the resource type - contained in this action. This is a shortcut for accessing - the same information through the resource model. - - :rtype: list(:py:class:`Action`) - """ - return self.resource.model.batch_actions - - -class ResourceModel: - """ - A model representing a resource, defined via a JSON description - format. A resource has identifiers, attributes, actions, - sub-resources, references and collections. For more information - on resources, see :ref:`guide_resources`. - - :type name: string - :param name: The name of this resource, e.g. ``sqs`` or ``Queue`` - :type definition: dict - :param definition: The JSON definition - :type resource_defs: dict - :param resource_defs: All resources defined in the service - """ - - def __init__(self, name, definition, resource_defs): - self._definition = definition - self._resource_defs = resource_defs - self._renamed = {} - - #: (``string``) The name of this resource - self.name = name - #: (``string``) The service shape name for this resource or ``None`` - self.shape = definition.get('shape') - - def load_rename_map(self, shape=None): - """ - Load a name translation map given a shape. This will set - up renamed values for any collisions, e.g. if the shape, - an action, and a subresource all are all named ``foo`` - then the resource will have an action ``foo``, a subresource - named ``Foo`` and a property named ``foo_attribute``. - This is the order of precedence, from most important to - least important: - - * Load action (resource.load) - * Identifiers - * Actions - * Subresources - * References - * Collections - * Waiters - * Attributes (shape members) - - Batch actions are only exposed on collections, so do not - get modified here. Subresources use upper camel casing, so - are unlikely to collide with anything but other subresources. - - Creates a structure like this:: - - renames = { - ('action', 'id'): 'id_action', - ('collection', 'id'): 'id_collection', - ('attribute', 'id'): 'id_attribute' - } - - # Get the final name for an action named 'id' - name = renames.get(('action', 'id'), 'id') - - :type shape: botocore.model.Shape - :param shape: The underlying shape for this resource. - """ - # Meta is a reserved name for resources - names = {'meta'} - self._renamed = {} - - if self._definition.get('load'): - names.add('load') - - for item in self._definition.get('identifiers', []): - self._load_name_with_category(names, item['name'], 'identifier') - - for name in self._definition.get('actions', {}): - self._load_name_with_category(names, name, 'action') - - for name, ref in self._get_has_definition().items(): - # Subresources require no data members, just typically - # identifiers and user input. - data_required = False - for identifier in ref['resource']['identifiers']: - if identifier['source'] == 'data': - data_required = True - break - - if not data_required: - self._load_name_with_category( - names, name, 'subresource', snake_case=False - ) - else: - self._load_name_with_category(names, name, 'reference') - - for name in self._definition.get('hasMany', {}): - self._load_name_with_category(names, name, 'collection') - - for name in self._definition.get('waiters', {}): - self._load_name_with_category( - names, Waiter.PREFIX + name, 'waiter' - ) - - if shape is not None: - for name in shape.members.keys(): - self._load_name_with_category(names, name, 'attribute') - - def _load_name_with_category(self, names, name, category, snake_case=True): - """ - Load a name with a given category, possibly renaming it - if that name is already in use. The name will be stored - in ``names`` and possibly be set up in ``self._renamed``. - - :type names: set - :param names: Existing names (Python attributes, properties, or - methods) on the resource. - :type name: string - :param name: The original name of the value. - :type category: string - :param category: The value type, such as 'identifier' or 'action' - :type snake_case: bool - :param snake_case: True (default) if the name should be snake cased. - """ - if snake_case: - name = xform_name(name) - - if name in names: - logger.debug('Renaming %s %s %s', self.name, category, name) - self._renamed[(category, name)] = f"{name}_{category}" - name += f"_{category}" - - if name in names: - # This isn't good, let's raise instead of trying to keep - # renaming this value. - raise ValueError( - f'Problem renaming {self.name} {category} to {name}!' - ) - - names.add(name) - - def _get_name(self, category, name, snake_case=True): - """ - Get a possibly renamed value given a category and name. This - uses the rename map set up in ``load_rename_map``, so that - method must be called once first. - - :type category: string - :param category: The value type, such as 'identifier' or 'action' - :type name: string - :param name: The original name of the value - :type snake_case: bool - :param snake_case: True (default) if the name should be snake cased. - :rtype: string - :return: Either the renamed value if it is set, otherwise the - original name. - """ - if snake_case: - name = xform_name(name) - - return self._renamed.get((category, name), name) - - def get_attributes(self, shape): - """ - Get a dictionary of attribute names to original name and shape - models that represent the attributes of this resource. Looks - like the following: - - { - 'some_name': ('SomeName', ) - } - - :type shape: botocore.model.Shape - :param shape: The underlying shape for this resource. - :rtype: dict - :return: Mapping of resource attributes. - """ - attributes = {} - identifier_names = [i.name for i in self.identifiers] - - for name, member in shape.members.items(): - snake_cased = xform_name(name) - if snake_cased in identifier_names: - # Skip identifiers, these are set through other means - continue - snake_cased = self._get_name( - 'attribute', snake_cased, snake_case=False - ) - attributes[snake_cased] = (name, member) - - return attributes - - @property - def identifiers(self): - """ - Get a list of resource identifiers. - - :type: list(:py:class:`Identifier`) - """ - identifiers = [] - - for item in self._definition.get('identifiers', []): - name = self._get_name('identifier', item['name']) - member_name = item.get('memberName', None) - if member_name: - member_name = self._get_name('attribute', member_name) - identifiers.append(Identifier(name, member_name)) - - return identifiers - - @property - def load(self): - """ - Get the load action for this resource, if it is defined. - - :type: :py:class:`Action` or ``None`` - """ - action = self._definition.get('load') - - if action is not None: - action = Action('load', action, self._resource_defs) - - return action - - @property - def actions(self): - """ - Get a list of actions for this resource. - - :type: list(:py:class:`Action`) - """ - actions = [] - - for name, item in self._definition.get('actions', {}).items(): - name = self._get_name('action', name) - actions.append(Action(name, item, self._resource_defs)) - - return actions - - @property - def batch_actions(self): - """ - Get a list of batch actions for this resource. - - :type: list(:py:class:`Action`) - """ - actions = [] - - for name, item in self._definition.get('batchActions', {}).items(): - name = self._get_name('batch_action', name) - actions.append(Action(name, item, self._resource_defs)) - - return actions - - def _get_has_definition(self): - """ - Get a ``has`` relationship definition from a model, where the - service resource model is treated special in that it contains - a relationship to every resource defined for the service. This - allows things like ``s3.Object('bucket-name', 'key')`` to - work even though the JSON doesn't define it explicitly. - - :rtype: dict - :return: Mapping of names to subresource and reference - definitions. - """ - if self.name not in self._resource_defs: - # This is the service resource, so let us expose all of - # the defined resources as subresources. - definition = {} - - for name, resource_def in self._resource_defs.items(): - # It's possible for the service to have renamed a - # resource or to have defined multiple names that - # point to the same resource type, so we need to - # take that into account. - found = False - has_items = self._definition.get('has', {}).items() - for has_name, has_def in has_items: - if has_def.get('resource', {}).get('type') == name: - definition[has_name] = has_def - found = True - - if not found: - # Create a relationship definition and attach it - # to the model, such that all identifiers must be - # supplied by the user. It will look something like: - # - # { - # 'resource': { - # 'type': 'ResourceName', - # 'identifiers': [ - # {'target': 'Name1', 'source': 'input'}, - # {'target': 'Name2', 'source': 'input'}, - # ... - # ] - # } - # } - # - fake_has = {'resource': {'type': name, 'identifiers': []}} - - for identifier in resource_def.get('identifiers', []): - fake_has['resource']['identifiers'].append( - {'target': identifier['name'], 'source': 'input'} - ) - - definition[name] = fake_has - else: - definition = self._definition.get('has', {}) - - return definition - - def _get_related_resources(self, subresources): - """ - Get a list of sub-resources or references. - - :type subresources: bool - :param subresources: ``True`` to get sub-resources, ``False`` to - get references. - :rtype: list(:py:class:`Action`) - """ - resources = [] - - for name, definition in self._get_has_definition().items(): - if subresources: - name = self._get_name('subresource', name, snake_case=False) - else: - name = self._get_name('reference', name) - action = Action(name, definition, self._resource_defs) - - data_required = False - for identifier in action.resource.identifiers: - if identifier.source == 'data': - data_required = True - break - - if subresources and not data_required: - resources.append(action) - elif not subresources and data_required: - resources.append(action) - - return resources - - @property - def subresources(self): - """ - Get a list of sub-resources. - - :type: list(:py:class:`Action`) - """ - return self._get_related_resources(True) - - @property - def references(self): - """ - Get a list of reference resources. - - :type: list(:py:class:`Action`) - """ - return self._get_related_resources(False) - - @property - def collections(self): - """ - Get a list of collections for this resource. - - :type: list(:py:class:`Collection`) - """ - collections = [] - - for name, item in self._definition.get('hasMany', {}).items(): - name = self._get_name('collection', name) - collections.append(Collection(name, item, self._resource_defs)) - - return collections - - @property - def waiters(self): - """ - Get a list of waiters for this resource. - - :type: list(:py:class:`Waiter`) - """ - waiters = [] - - for name, item in self._definition.get('waiters', {}).items(): - name = self._get_name('waiter', Waiter.PREFIX + name) - waiters.append(Waiter(name, item)) - - return waiters diff --git a/venv/Lib/site-packages/boto3/resources/params.py b/venv/Lib/site-packages/boto3/resources/params.py deleted file mode 100644 index 725256f..0000000 --- a/venv/Lib/site-packages/boto3/resources/params.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import re - -import jmespath -from botocore import xform_name - -from ..exceptions import ResourceLoadException - -INDEX_RE = re.compile(r'\[(.*)\]$') - - -def get_data_member(parent, path): - """ - Get a data member from a parent using a JMESPath search query, - loading the parent if required. If the parent cannot be loaded - and no data is present then an exception is raised. - - :type parent: ServiceResource - :param parent: The resource instance to which contains data we - are interested in. - :type path: string - :param path: The JMESPath expression to query - :raises ResourceLoadException: When no data is present and the - resource cannot be loaded. - :returns: The queried data or ``None``. - """ - # Ensure the parent has its data loaded, if possible. - if parent.meta.data is None: - if hasattr(parent, 'load'): - parent.load() - else: - raise ResourceLoadException( - f'{parent.__class__.__name__} has no load method!' - ) - - return jmespath.search(path, parent.meta.data) - - -def create_request_parameters(parent, request_model, params=None, index=None): - """ - Handle request parameters that can be filled in from identifiers, - resource data members or constants. - - By passing ``params``, you can invoke this method multiple times and - build up a parameter dict over time, which is particularly useful - for reverse JMESPath expressions that append to lists. - - :type parent: ServiceResource - :param parent: The resource instance to which this action is attached. - :type request_model: :py:class:`~boto3.resources.model.Request` - :param request_model: The action request model. - :type params: dict - :param params: If set, then add to this existing dict. It is both - edited in-place and returned. - :type index: int - :param index: The position of an item within a list - :rtype: dict - :return: Pre-filled parameters to be sent to the request operation. - """ - if params is None: - params = {} - - for param in request_model.params: - source = param.source - target = param.target - - if source == 'identifier': - # Resource identifier, e.g. queue.url - value = getattr(parent, xform_name(param.name)) - elif source == 'data': - # If this is a data member then it may incur a load - # action before returning the value. - value = get_data_member(parent, param.path) - elif source in ['string', 'integer', 'boolean']: - # These are hard-coded values in the definition - value = param.value - elif source == 'input': - # This is provided by the user, so ignore it here - continue - else: - raise NotImplementedError(f'Unsupported source type: {source}') - - build_param_structure(params, target, value, index) - - return params - - -def build_param_structure(params, target, value, index=None): - """ - This method provides a basic reverse JMESPath implementation that - lets you go from a JMESPath-like string to a possibly deeply nested - object. The ``params`` are mutated in-place, so subsequent calls - can modify the same element by its index. - - >>> build_param_structure(params, 'test[0]', 1) - >>> print(params) - {'test': [1]} - - >>> build_param_structure(params, 'foo.bar[0].baz', 'hello world') - >>> print(params) - {'test': [1], 'foo': {'bar': [{'baz': 'hello, world'}]}} - - """ - pos = params - parts = target.split('.') - - # First, split into parts like 'foo', 'bar[0]', 'baz' and process - # each piece. It can either be a list or a dict, depending on if - # an index like `[0]` is present. We detect this via a regular - # expression, and keep track of where we are in params via the - # pos variable, walking down to the last item. Once there, we - # set the value. - for i, part in enumerate(parts): - # Is it indexing an array? - result = INDEX_RE.search(part) - if result: - if result.group(1): - if result.group(1) == '*': - part = part[:-3] - else: - # We have an explicit index - index = int(result.group(1)) - part = part[: -len(f"{index}[]")] - else: - # Index will be set after we know the proper part - # name and that it's a list instance. - index = None - part = part[:-2] - - if part not in pos or not isinstance(pos[part], list): - pos[part] = [] - - # This means we should append, e.g. 'foo[]' - if index is None: - index = len(pos[part]) - - while len(pos[part]) <= index: - # Assume it's a dict until we set the final value below - pos[part].append({}) - - # Last item? Set the value, otherwise set the new position - if i == len(parts) - 1: - pos[part][index] = value - else: - # The new pos is the *item* in the array, not the array! - pos = pos[part][index] - else: - if part not in pos: - pos[part] = {} - - # Last item? Set the value, otherwise set the new position - if i == len(parts) - 1: - pos[part] = value - else: - pos = pos[part] diff --git a/venv/Lib/site-packages/boto3/resources/response.py b/venv/Lib/site-packages/boto3/resources/response.py deleted file mode 100644 index a27190a..0000000 --- a/venv/Lib/site-packages/boto3/resources/response.py +++ /dev/null @@ -1,316 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import jmespath -from botocore import xform_name - -from .params import get_data_member - - -def all_not_none(iterable): - """ - Return True if all elements of the iterable are not None (or if the - iterable is empty). This is like the built-in ``all``, except checks - against None, so 0 and False are allowable values. - """ - for element in iterable: - if element is None: - return False - return True - - -def build_identifiers(identifiers, parent, params=None, raw_response=None): - """ - Builds a mapping of identifier names to values based on the - identifier source location, type, and target. Identifier - values may be scalars or lists depending on the source type - and location. - - :type identifiers: list - :param identifiers: List of :py:class:`~boto3.resources.model.Parameter` - definitions - :type parent: ServiceResource - :param parent: The resource instance to which this action is attached. - :type params: dict - :param params: Request parameters sent to the service. - :type raw_response: dict - :param raw_response: Low-level operation response. - :rtype: list - :return: An ordered list of ``(name, value)`` identifier tuples. - """ - results = [] - - for identifier in identifiers: - source = identifier.source - target = identifier.target - - if source == 'response': - value = jmespath.search(identifier.path, raw_response) - elif source == 'requestParameter': - value = jmespath.search(identifier.path, params) - elif source == 'identifier': - value = getattr(parent, xform_name(identifier.name)) - elif source == 'data': - # If this is a data member then it may incur a load - # action before returning the value. - value = get_data_member(parent, identifier.path) - elif source == 'input': - # This value is set by the user, so ignore it here - continue - else: - raise NotImplementedError(f'Unsupported source type: {source}') - - results.append((xform_name(target), value)) - - return results - - -def build_empty_response(search_path, operation_name, service_model): - """ - Creates an appropriate empty response for the type that is expected, - based on the service model's shape type. For example, a value that - is normally a list would then return an empty list. A structure would - return an empty dict, and a number would return None. - - :type search_path: string - :param search_path: JMESPath expression to search in the response - :type operation_name: string - :param operation_name: Name of the underlying service operation. - :type service_model: :ref:`botocore.model.ServiceModel` - :param service_model: The Botocore service model - :rtype: dict, list, or None - :return: An appropriate empty value - """ - response = None - - operation_model = service_model.operation_model(operation_name) - shape = operation_model.output_shape - - if search_path: - # Walk the search path and find the final shape. For example, given - # a path of ``foo.bar[0].baz``, we first find the shape for ``foo``, - # then the shape for ``bar`` (ignoring the indexing), and finally - # the shape for ``baz``. - for item in search_path.split('.'): - item = item.strip('[0123456789]$') - - if shape.type_name == 'structure': - shape = shape.members[item] - elif shape.type_name == 'list': - shape = shape.member - else: - raise NotImplementedError( - f'Search path hits shape type {shape.type_name} from {item}' - ) - - # Anything not handled here is set to None - if shape.type_name == 'structure': - response = {} - elif shape.type_name == 'list': - response = [] - elif shape.type_name == 'map': - response = {} - - return response - - -class RawHandler: - """ - A raw action response handler. This passed through the response - dictionary, optionally after performing a JMESPath search if one - has been defined for the action. - - :type search_path: string - :param search_path: JMESPath expression to search in the response - :rtype: dict - :return: Service response - """ - - def __init__(self, search_path): - self.search_path = search_path - - def __call__(self, parent, params, response): - """ - :type parent: ServiceResource - :param parent: The resource instance to which this action is attached. - :type params: dict - :param params: Request parameters sent to the service. - :type response: dict - :param response: Low-level operation response. - """ - # TODO: Remove the '$' check after JMESPath supports it - if self.search_path and self.search_path != '$': - response = jmespath.search(self.search_path, response) - - return response - - -class ResourceHandler: - """ - Creates a new resource or list of new resources from the low-level - response based on the given response resource definition. - - :type search_path: string - :param search_path: JMESPath expression to search in the response - - :type factory: ResourceFactory - :param factory: The factory that created the resource class to which - this action is attached. - - :type resource_model: :py:class:`~boto3.resources.model.ResponseResource` - :param resource_model: Response resource model. - - :type service_context: :py:class:`~boto3.utils.ServiceContext` - :param service_context: Context about the AWS service - - :type operation_name: string - :param operation_name: Name of the underlying service operation, if it - exists. - - :rtype: ServiceResource or list - :return: New resource instance(s). - """ - - def __init__( - self, - search_path, - factory, - resource_model, - service_context, - operation_name=None, - ): - self.search_path = search_path - self.factory = factory - self.resource_model = resource_model - self.operation_name = operation_name - self.service_context = service_context - - def __call__(self, parent, params, response): - """ - :type parent: ServiceResource - :param parent: The resource instance to which this action is attached. - :type params: dict - :param params: Request parameters sent to the service. - :type response: dict - :param response: Low-level operation response. - """ - resource_name = self.resource_model.type - json_definition = self.service_context.resource_json_definitions.get( - resource_name - ) - - # Load the new resource class that will result from this action. - resource_cls = self.factory.load_from_definition( - resource_name=resource_name, - single_resource_json_definition=json_definition, - service_context=self.service_context, - ) - raw_response = response - search_response = None - - # Anytime a path is defined, it means the response contains the - # resource's attributes, so resource_data gets set here. It - # eventually ends up in resource.meta.data, which is where - # the attribute properties look for data. - if self.search_path: - search_response = jmespath.search(self.search_path, raw_response) - - # First, we parse all the identifiers, then create the individual - # response resources using them. Any identifiers that are lists - # will have one item consumed from the front of the list for each - # resource that is instantiated. Items which are not a list will - # be set as the same value on each new resource instance. - identifiers = dict( - build_identifiers( - self.resource_model.identifiers, parent, params, raw_response - ) - ) - - # If any of the identifiers is a list, then the response is plural - plural = [v for v in identifiers.values() if isinstance(v, list)] - - if plural: - response = [] - - # The number of items in an identifier that is a list will - # determine how many resource instances to create. - for i in range(len(plural[0])): - # Response item data is *only* available if a search path - # was given. This prevents accidentally loading unrelated - # data that may be in the response. - response_item = None - if search_response: - response_item = search_response[i] - response.append( - self.handle_response_item( - resource_cls, parent, identifiers, response_item - ) - ) - elif all_not_none(identifiers.values()): - # All identifiers must always exist, otherwise the resource - # cannot be instantiated. - response = self.handle_response_item( - resource_cls, parent, identifiers, search_response - ) - else: - # The response should be empty, but that may mean an - # empty dict, list, or None based on whether we make - # a remote service call and what shape it is expected - # to return. - response = None - if self.operation_name is not None: - # A remote service call was made, so try and determine - # its shape. - response = build_empty_response( - self.search_path, - self.operation_name, - self.service_context.service_model, - ) - - return response - - def handle_response_item( - self, resource_cls, parent, identifiers, resource_data - ): - """ - Handles the creation of a single response item by setting - parameters and creating the appropriate resource instance. - - :type resource_cls: ServiceResource subclass - :param resource_cls: The resource class to instantiate. - :type parent: ServiceResource - :param parent: The resource instance to which this action is attached. - :type identifiers: dict - :param identifiers: Map of identifier names to value or values. - :type resource_data: dict or None - :param resource_data: Data for resource attributes. - :rtype: ServiceResource - :return: New resource instance. - """ - kwargs = { - 'client': parent.meta.client, - } - - for name, value in identifiers.items(): - # If value is a list, then consume the next item - if isinstance(value, list): - value = value.pop(0) - - kwargs[name] = value - - resource = resource_cls(**kwargs) - - if resource_data is not None: - resource.meta.data = resource_data - - return resource diff --git a/venv/Lib/site-packages/boto3/s3/__init__.py b/venv/Lib/site-packages/boto3/s3/__init__.py deleted file mode 100644 index 6001b27..0000000 --- a/venv/Lib/site-packages/boto3/s3/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. diff --git a/venv/Lib/site-packages/boto3/s3/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/boto3/s3/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index b973924..0000000 Binary files a/venv/Lib/site-packages/boto3/s3/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/s3/__pycache__/constants.cpython-312.pyc b/venv/Lib/site-packages/boto3/s3/__pycache__/constants.cpython-312.pyc deleted file mode 100644 index 4c7381b..0000000 Binary files a/venv/Lib/site-packages/boto3/s3/__pycache__/constants.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/s3/__pycache__/inject.cpython-312.pyc b/venv/Lib/site-packages/boto3/s3/__pycache__/inject.cpython-312.pyc deleted file mode 100644 index 4390387..0000000 Binary files a/venv/Lib/site-packages/boto3/s3/__pycache__/inject.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/s3/__pycache__/transfer.cpython-312.pyc b/venv/Lib/site-packages/boto3/s3/__pycache__/transfer.cpython-312.pyc deleted file mode 100644 index ca4d3a9..0000000 Binary files a/venv/Lib/site-packages/boto3/s3/__pycache__/transfer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/boto3/s3/constants.py b/venv/Lib/site-packages/boto3/s3/constants.py deleted file mode 100644 index bd38778..0000000 --- a/venv/Lib/site-packages/boto3/s3/constants.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - - -# TransferConfig preferred_transfer_client settings -CLASSIC_TRANSFER_CLIENT = "classic" -CRT_TRANSFER_CLIENT = "crt" -AUTO_RESOLVE_TRANSFER_CLIENT = "auto" diff --git a/venv/Lib/site-packages/boto3/s3/inject.py b/venv/Lib/site-packages/boto3/s3/inject.py deleted file mode 100644 index 74ef267..0000000 --- a/venv/Lib/site-packages/boto3/s3/inject.py +++ /dev/null @@ -1,955 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import copy as python_copy -import logging -from functools import partial - -from botocore.exceptions import ClientError - -from boto3 import utils -from boto3.compat import is_append_mode -from boto3.s3.transfer import ( - ProgressCallbackInvoker, - S3Transfer, - TransferConfig, - create_transfer_manager, -) - -try: - from botocore.context import with_current_context -except ImportError: - from functools import wraps - - def with_current_context(hook=None): - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - return func(*args, **kwargs) - - return wrapper - - return decorator - - -try: - from botocore.useragent import register_feature_id -except ImportError: - - def register_feature_id(feature_id): - pass - - -logger = logging.getLogger(__name__) - - -def inject_s3_transfer_methods(class_attributes, **kwargs): - utils.inject_attribute(class_attributes, 'upload_file', upload_file) - utils.inject_attribute(class_attributes, 'download_file', download_file) - utils.inject_attribute(class_attributes, 'copy', copy) - utils.inject_attribute(class_attributes, 'upload_fileobj', upload_fileobj) - utils.inject_attribute( - class_attributes, 'download_fileobj', download_fileobj - ) - - -def inject_bucket_methods(class_attributes, **kwargs): - utils.inject_attribute(class_attributes, 'load', bucket_load) - utils.inject_attribute(class_attributes, 'upload_file', bucket_upload_file) - utils.inject_attribute( - class_attributes, 'download_file', bucket_download_file - ) - utils.inject_attribute(class_attributes, 'copy', bucket_copy) - utils.inject_attribute( - class_attributes, 'upload_fileobj', bucket_upload_fileobj - ) - utils.inject_attribute( - class_attributes, 'download_fileobj', bucket_download_fileobj - ) - - -def inject_object_methods(class_attributes, **kwargs): - utils.inject_attribute(class_attributes, 'upload_file', object_upload_file) - utils.inject_attribute( - class_attributes, 'download_file', object_download_file - ) - utils.inject_attribute(class_attributes, 'copy', object_copy) - utils.inject_attribute( - class_attributes, 'upload_fileobj', object_upload_fileobj - ) - utils.inject_attribute( - class_attributes, 'download_fileobj', object_download_fileobj - ) - - -def inject_object_summary_methods(class_attributes, **kwargs): - utils.inject_attribute(class_attributes, 'load', object_summary_load) - - -def bucket_load(self, *args, **kwargs): - """ - Calls s3.Client.list_buckets() to update the attributes of the Bucket - resource. - """ - # The docstring above is phrased this way to match what the autogenerated - # docs produce. - - # We can't actually get the bucket's attributes from a HeadBucket, - # so we need to use a ListBuckets and search for our bucket. - # However, we may fail if we lack permissions to ListBuckets - # or the bucket is in another account. In which case, creation_date - # will be None. - self.meta.data = {} - try: - response = self.meta.client.list_buckets() - for bucket_data in response['Buckets']: - if bucket_data['Name'] == self.name: - self.meta.data = bucket_data - break - except ClientError as e: - if not e.response.get('Error', {}).get('Code') == 'AccessDenied': - raise - - -def object_summary_load(self, *args, **kwargs): - """ - Calls s3.Client.head_object to update the attributes of the ObjectSummary - resource. - """ - response = self.meta.client.head_object( - Bucket=self.bucket_name, Key=self.key - ) - if 'ContentLength' in response: - response['Size'] = response.pop('ContentLength') - self.meta.data = response - - -@with_current_context(partial(register_feature_id, 'S3_TRANSFER')) -def upload_file( - self, Filename, Bucket, Key, ExtraArgs=None, Callback=None, Config=None -): - """Upload a file to an S3 object. - - Usage:: - - import boto3 - s3 = boto3.client('s3') - s3.upload_file('/tmp/hello.txt', 'amzn-s3-demo-bucket', 'hello.txt') - - Similar behavior as S3Transfer's upload_file() method, except that - argument names are capitalized. Detailed examples can be found at - :ref:`S3Transfer's Usage `. - - :type Filename: str - :param Filename: The path to the file to upload. - - :type Bucket: str - :param Bucket: The name of the bucket to upload to. - - :type Key: str - :param Key: The name of the key to upload to. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed upload arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the upload. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - transfer. - """ - with S3Transfer(self, Config) as transfer: - return transfer.upload_file( - filename=Filename, - bucket=Bucket, - key=Key, - extra_args=ExtraArgs, - callback=Callback, - ) - - -@with_current_context(partial(register_feature_id, 'S3_TRANSFER')) -def download_file( - self, Bucket, Key, Filename, ExtraArgs=None, Callback=None, Config=None -): - """Download an S3 object to a file. - - Usage:: - - import boto3 - s3 = boto3.client('s3') - s3.download_file('amzn-s3-demo-bucket', 'hello.txt', '/tmp/hello.txt') - - Similar behavior as S3Transfer's download_file() method, - except that parameters are capitalized. Detailed examples can be found at - :ref:`S3Transfer's Usage `. - - :type Bucket: str - :param Bucket: The name of the bucket to download from. - - :type Key: str - :param Key: The name of the key to download from. - - :type Filename: str - :param Filename: The path to the file to download to. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed download arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the download. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - transfer. - """ - with S3Transfer(self, Config) as transfer: - return transfer.download_file( - bucket=Bucket, - key=Key, - filename=Filename, - extra_args=ExtraArgs, - callback=Callback, - ) - - -def bucket_upload_file( - self, Filename, Key, ExtraArgs=None, Callback=None, Config=None -): - """Upload a file to an S3 object. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - s3.Bucket('amzn-s3-demo-bucket').upload_file('/tmp/hello.txt', 'hello.txt') - - Similar behavior as S3Transfer's upload_file() method, - except that parameters are capitalized. Detailed examples can be found at - :ref:`S3Transfer's Usage `. - - :type Filename: str - :param Filename: The path to the file to upload. - - :type Key: str - :param Key: The name of the key to upload to. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed upload arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the upload. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - transfer. - """ - return self.meta.client.upload_file( - Filename=Filename, - Bucket=self.name, - Key=Key, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) - - -def bucket_download_file( - self, Key, Filename, ExtraArgs=None, Callback=None, Config=None -): - """Download an S3 object to a file. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - s3.Bucket('amzn-s3-demo-bucket').download_file('hello.txt', '/tmp/hello.txt') - - Similar behavior as S3Transfer's download_file() method, - except that parameters are capitalized. Detailed examples can be found at - :ref:`S3Transfer's Usage `. - - :type Key: str - :param Key: The name of the key to download from. - - :type Filename: str - :param Filename: The path to the file to download to. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed download arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the download. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - transfer. - """ - return self.meta.client.download_file( - Bucket=self.name, - Key=Key, - Filename=Filename, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) - - -def object_upload_file( - self, Filename, ExtraArgs=None, Callback=None, Config=None -): - """Upload a file to an S3 object. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - s3.Object('amzn-s3-demo-bucket', 'hello.txt').upload_file('/tmp/hello.txt') - - Similar behavior as S3Transfer's upload_file() method, - except that parameters are capitalized. Detailed examples can be found at - :ref:`S3Transfer's Usage `. - - :type Filename: str - :param Filename: The path to the file to upload. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed upload arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the upload. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - transfer. - """ - return self.meta.client.upload_file( - Filename=Filename, - Bucket=self.bucket_name, - Key=self.key, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) - - -def object_download_file( - self, Filename, ExtraArgs=None, Callback=None, Config=None -): - """Download an S3 object to a file. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - s3.Object('amzn-s3-demo-bucket', 'hello.txt').download_file('/tmp/hello.txt') - - Similar behavior as S3Transfer's download_file() method, - except that parameters are capitalized. Detailed examples can be found at - :ref:`S3Transfer's Usage `. - - :type Filename: str - :param Filename: The path to the file to download to. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed download arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the download. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - transfer. - """ - return self.meta.client.download_file( - Bucket=self.bucket_name, - Key=self.key, - Filename=Filename, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) - - -@with_current_context(partial(register_feature_id, 'S3_TRANSFER')) -def copy( - self, - CopySource, - Bucket, - Key, - ExtraArgs=None, - Callback=None, - SourceClient=None, - Config=None, -): - """Copy an object from one S3 location to another. - - This is a managed transfer which will perform a multipart copy in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - copy_source = { - 'Bucket': 'amzn-s3-demo-bucket1', - 'Key': 'mykey' - } - s3.meta.client.copy(copy_source, 'amzn-s3-demo-bucket2', 'otherkey') - - :type CopySource: dict - :param CopySource: The name of the source bucket, key name of the - source object, and optional version ID of the source object. The - dictionary format is: - ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note - that the ``VersionId`` key is optional and may be omitted. - - :type Bucket: str - :param Bucket: The name of the bucket to copy to - - :type Key: str - :param Key: The name of the key to copy to - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed copy arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_COPY_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the copy. - - :type SourceClient: botocore or boto3 Client - :param SourceClient: The client to be used for operation that - may happen at the source object. For example, this client is - used for the head_object that determines the size of the copy. - If no client is provided, the current client is used as the client - for the source object. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - copy. - """ - subscribers = None - if Callback is not None: - subscribers = [ProgressCallbackInvoker(Callback)] - - config = Config - if config is None: - config = TransferConfig() - - # copy is not supported in the CRT - new_config = python_copy.copy(config) - new_config.preferred_transfer_client = "classic" - - with create_transfer_manager(self, new_config) as manager: - future = manager.copy( - copy_source=CopySource, - bucket=Bucket, - key=Key, - extra_args=ExtraArgs, - subscribers=subscribers, - source_client=SourceClient, - ) - return future.result() - - -def bucket_copy( - self, - CopySource, - Key, - ExtraArgs=None, - Callback=None, - SourceClient=None, - Config=None, -): - """Copy an object from one S3 location to an object in this bucket. - - This is a managed transfer which will perform a multipart copy in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - copy_source = { - 'Bucket': 'amzn-s3-demo-bucket1', - 'Key': 'mykey' - } - bucket = s3.Bucket('amzn-s3-demo-bucket2') - bucket.copy(copy_source, 'otherkey') - - :type CopySource: dict - :param CopySource: The name of the source bucket, key name of the - source object, and optional version ID of the source object. The - dictionary format is: - ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note - that the ``VersionId`` key is optional and may be omitted. - - :type Key: str - :param Key: The name of the key to copy to - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed copy arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_COPY_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the copy. - - :type SourceClient: botocore or boto3 Client - :param SourceClient: The client to be used for operation that - may happen at the source object. For example, this client is - used for the head_object that determines the size of the copy. - If no client is provided, the current client is used as the client - for the source object. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - copy. - """ - return self.meta.client.copy( - CopySource=CopySource, - Bucket=self.name, - Key=Key, - ExtraArgs=ExtraArgs, - Callback=Callback, - SourceClient=SourceClient, - Config=Config, - ) - - -def object_copy( - self, - CopySource, - ExtraArgs=None, - Callback=None, - SourceClient=None, - Config=None, -): - """Copy an object from one S3 location to this object. - - This is a managed transfer which will perform a multipart copy in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - copy_source = { - 'Bucket': 'amzn-s3-demo-bucket1', - 'Key': 'mykey' - } - bucket = s3.Bucket('amzn-s3-demo-bucket2') - obj = bucket.Object('otherkey') - obj.copy(copy_source) - - :type CopySource: dict - :param CopySource: The name of the source bucket, key name of the - source object, and optional version ID of the source object. The - dictionary format is: - ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note - that the ``VersionId`` key is optional and may be omitted. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed copy arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_COPY_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the copy. - - :type SourceClient: botocore or boto3 Client - :param SourceClient: The client to be used for operation that - may happen at the source object. For example, this client is - used for the head_object that determines the size of the copy. - If no client is provided, the current client is used as the client - for the source object. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - copy. - """ - return self.meta.client.copy( - CopySource=CopySource, - Bucket=self.bucket_name, - Key=self.key, - ExtraArgs=ExtraArgs, - Callback=Callback, - SourceClient=SourceClient, - Config=Config, - ) - - -@with_current_context(partial(register_feature_id, 'S3_TRANSFER')) -def upload_fileobj( - self, Fileobj, Bucket, Key, ExtraArgs=None, Callback=None, Config=None -): - """Upload a file-like object to S3. - - The file-like object must be in binary mode. - - This is a managed transfer which will perform a multipart upload in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.client('s3') - - with open('filename', 'rb') as data: - s3.upload_fileobj(data, 'amzn-s3-demo-bucket', 'mykey') - - :type Fileobj: a file-like object - :param Fileobj: A file-like object to upload. At a minimum, it must - implement the `read` method, and must return bytes. - - :type Bucket: str - :param Bucket: The name of the bucket to upload to. - - :type Key: str - :param Key: The name of the key to upload to. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed upload arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the upload. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - upload. - """ - if not hasattr(Fileobj, 'read'): - raise ValueError('Fileobj must implement read') - - subscribers = None - if Callback is not None: - subscribers = [ProgressCallbackInvoker(Callback)] - - config = Config - if config is None: - config = TransferConfig() - - with create_transfer_manager(self, config) as manager: - future = manager.upload( - fileobj=Fileobj, - bucket=Bucket, - key=Key, - extra_args=ExtraArgs, - subscribers=subscribers, - ) - return future.result() - - -def bucket_upload_fileobj( - self, Fileobj, Key, ExtraArgs=None, Callback=None, Config=None -): - """Upload a file-like object to this bucket. - - The file-like object must be in binary mode. - - This is a managed transfer which will perform a multipart upload in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - bucket = s3.Bucket('amzn-s3-demo-bucket') - - with open('filename', 'rb') as data: - bucket.upload_fileobj(data, 'mykey') - - :type Fileobj: a file-like object - :param Fileobj: A file-like object to upload. At a minimum, it must - implement the `read` method, and must return bytes. - - :type Key: str - :param Key: The name of the key to upload to. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed upload arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the upload. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - upload. - """ - return self.meta.client.upload_fileobj( - Fileobj=Fileobj, - Bucket=self.name, - Key=Key, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) - - -def object_upload_fileobj( - self, Fileobj, ExtraArgs=None, Callback=None, Config=None -): - """Upload a file-like object to this object. - - The file-like object must be in binary mode. - - This is a managed transfer which will perform a multipart upload in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - bucket = s3.Bucket('amzn-s3-demo-bucket') - obj = bucket.Object('mykey') - - with open('filename', 'rb') as data: - obj.upload_fileobj(data) - - :type Fileobj: a file-like object - :param Fileobj: A file-like object to upload. At a minimum, it must - implement the `read` method, and must return bytes. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed upload arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the upload. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - upload. - """ - return self.meta.client.upload_fileobj( - Fileobj=Fileobj, - Bucket=self.bucket_name, - Key=self.key, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) - - -def disable_threading_if_append_mode(config, fileobj): - """Set `TransferConfig.use_threads` to `False` if file-like - object is in append mode. - - :type config: boto3.s3.transfer.TransferConfig - :param config: The transfer configuration to be used when performing the - download. - - :type fileobj: A file-like object - :param fileobj: A file-like object to inspect for append mode. - """ - if is_append_mode(fileobj): - config.use_threads = False - logger.warning( - 'A single thread will be used because the provided file object ' - 'is in append mode. Writes may always be appended to the end of ' - 'the file regardless of seek position, so a single thread must be ' - 'used to ensure sequential writes.' - ) - - -@with_current_context(partial(register_feature_id, 'S3_TRANSFER')) -def download_fileobj( - self, Bucket, Key, Fileobj, ExtraArgs=None, Callback=None, Config=None -): - """Download an object from S3 to a file-like object. - - The file-like object must be in binary mode. - - This is a managed transfer which will perform a multipart download in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.client('s3') - - with open('filename', 'wb') as data: - s3.download_fileobj('amzn-s3-demo-bucket', 'mykey', data) - - :type Bucket: str - :param Bucket: The name of the bucket to download from. - - :type Key: str - :param Key: The name of the key to download from. - - :type Fileobj: a file-like object - :param Fileobj: A file-like object to download into. At a minimum, it must - implement the `write` method and must accept bytes. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed download arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the download. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - download. - """ - if not hasattr(Fileobj, 'write'): - raise ValueError('Fileobj must implement write') - - subscribers = None - if Callback is not None: - subscribers = [ProgressCallbackInvoker(Callback)] - - config = Config - if config is None: - config = TransferConfig() - - new_config = python_copy.copy(config) - disable_threading_if_append_mode(new_config, Fileobj) - - with create_transfer_manager(self, new_config) as manager: - future = manager.download( - bucket=Bucket, - key=Key, - fileobj=Fileobj, - extra_args=ExtraArgs, - subscribers=subscribers, - ) - return future.result() - - -def bucket_download_fileobj( - self, Key, Fileobj, ExtraArgs=None, Callback=None, Config=None -): - """Download an object from this bucket to a file-like-object. - - The file-like object must be in binary mode. - - This is a managed transfer which will perform a multipart download in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - bucket = s3.Bucket('amzn-s3-demo-bucket') - - with open('filename', 'wb') as data: - bucket.download_fileobj('mykey', data) - - :type Fileobj: a file-like object - :param Fileobj: A file-like object to download into. At a minimum, it must - implement the `write` method and must accept bytes. - - :type Key: str - :param Key: The name of the key to download from. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed download arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the download. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - download. - """ - return self.meta.client.download_fileobj( - Bucket=self.name, - Key=Key, - Fileobj=Fileobj, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) - - -def object_download_fileobj( - self, Fileobj, ExtraArgs=None, Callback=None, Config=None -): - """Download this object from S3 to a file-like object. - - The file-like object must be in binary mode. - - This is a managed transfer which will perform a multipart download in - multiple threads if necessary. - - Usage:: - - import boto3 - s3 = boto3.resource('s3') - bucket = s3.Bucket('amzn-s3-demo-bucket') - obj = bucket.Object('mykey') - - with open('filename', 'wb') as data: - obj.download_fileobj(data) - - :type Fileobj: a file-like object - :param Fileobj: A file-like object to download into. At a minimum, it must - implement the `write` method and must accept bytes. - - :type ExtraArgs: dict - :param ExtraArgs: Extra arguments that may be passed to the - client operation. For allowed download arguments see - :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`. - - :type Callback: function - :param Callback: A method which takes a number of bytes transferred to - be periodically called during the download. - - :type Config: boto3.s3.transfer.TransferConfig - :param Config: The transfer configuration to be used when performing the - download. - """ - return self.meta.client.download_fileobj( - Bucket=self.bucket_name, - Key=self.key, - Fileobj=Fileobj, - ExtraArgs=ExtraArgs, - Callback=Callback, - Config=Config, - ) diff --git a/venv/Lib/site-packages/boto3/s3/transfer.py b/venv/Lib/site-packages/boto3/s3/transfer.py deleted file mode 100644 index 9880ec3..0000000 --- a/venv/Lib/site-packages/boto3/s3/transfer.py +++ /dev/null @@ -1,516 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Abstractions over S3's upload/download operations. - -This module provides high level abstractions for efficient -uploads/downloads. It handles several things for the user: - -* Automatically switching to multipart transfers when - a file is over a specific size threshold -* Uploading/downloading a file in parallel -* Progress callbacks to monitor transfers -* Retries. While botocore handles retries for streaming uploads, - it is not possible for it to handle retries for streaming - downloads. This module handles retries for both cases so - you don't need to implement any retry logic yourself. - -This module has a reasonable set of defaults. It also allows you -to configure many aspects of the transfer process including: - -* Multipart threshold size -* Max parallel downloads -* Socket timeouts -* Retry amounts - -There is no support for s3->s3 multipart copies at this -time. - - -.. _ref_s3transfer_usage: - -Usage -===== - -The simplest way to use this module is: - -.. code-block:: python - - client = boto3.client('s3', 'us-west-2') - transfer = S3Transfer(client) - # Upload /tmp/myfile to s3://bucket/key - transfer.upload_file('/tmp/myfile', 'bucket', 'key') - - # Download s3://bucket/key to /tmp/myfile - transfer.download_file('bucket', 'key', '/tmp/myfile') - -The ``upload_file`` and ``download_file`` methods also accept -``**kwargs``, which will be forwarded through to the corresponding -client operation. Here are a few examples using ``upload_file``:: - - # Making the object public - transfer.upload_file('/tmp/myfile', 'bucket', 'key', - extra_args={'ACL': 'public-read'}) - - # Setting metadata - transfer.upload_file('/tmp/myfile', 'bucket', 'key', - extra_args={'Metadata': {'a': 'b', 'c': 'd'}}) - - # Setting content type - transfer.upload_file('/tmp/myfile.json', 'bucket', 'key', - extra_args={'ContentType': "application/json"}) - - -The ``S3Transfer`` class also supports progress callbacks so you can -provide transfer progress to users. Both the ``upload_file`` and -``download_file`` methods take an optional ``callback`` parameter. -Here's an example of how to print a simple progress percentage -to the user: - -.. code-block:: python - - class ProgressPercentage(object): - def __init__(self, filename): - self._filename = filename - self._size = float(os.path.getsize(filename)) - self._seen_so_far = 0 - self._lock = threading.Lock() - - def __call__(self, bytes_amount): - # To simplify we'll assume this is hooked up - # to a single filename. - with self._lock: - self._seen_so_far += bytes_amount - percentage = (self._seen_so_far / self._size) * 100 - sys.stdout.write( - "\r%s %s / %s (%.2f%%)" % ( - self._filename, self._seen_so_far, self._size, - percentage)) - sys.stdout.flush() - - - transfer = S3Transfer(boto3.client('s3', 'us-west-2')) - # Upload /tmp/myfile to s3://bucket/key and print upload progress. - transfer.upload_file('/tmp/myfile', 'bucket', 'key', - callback=ProgressPercentage('/tmp/myfile')) - - - -You can also provide a TransferConfig object to the S3Transfer -object that gives you more fine grained control over the -transfer. For example: - -.. code-block:: python - - client = boto3.client('s3', 'us-west-2') - config = TransferConfig( - multipart_threshold=8 * 1024 * 1024, - max_concurrency=10, - num_download_attempts=10, - ) - transfer = S3Transfer(client, config) - transfer.upload_file('/tmp/foo', 'bucket', 'key') - - -""" - -import logging -import threading -from os import PathLike, fspath, getpid - -from botocore.compat import HAS_CRT -from botocore.exceptions import ClientError, MissingDependencyException -from s3transfer.exceptions import ( - RetriesExceededError as S3TransferRetriesExceededError, -) -from s3transfer.futures import NonThreadedExecutor -from s3transfer.manager import TransferConfig as S3TransferConfig -from s3transfer.manager import TransferManager -from s3transfer.subscribers import BaseSubscriber -from s3transfer.utils import OSUtils - -import boto3.s3.constants as constants -from boto3.compat import TRANSFER_CONFIG_SUPPORTS_CRT -from boto3.exceptions import ( - RetriesExceededError, - S3UploadFailedError, -) - -if HAS_CRT: - import awscrt.s3 - - from boto3.crt import create_crt_transfer_manager - -KB = 1024 -MB = KB * KB - -logger = logging.getLogger(__name__) - - -def create_transfer_manager(client, config, osutil=None): - """Creates a transfer manager based on configuration - - :type client: boto3.client - :param client: The S3 client to use - - :type config: boto3.s3.transfer.TransferConfig - :param config: The transfer config to use - - :type osutil: s3transfer.utils.OSUtils - :param osutil: The os utility to use - - :rtype: s3transfer.manager.TransferManager - :returns: A transfer manager based on parameters provided - """ - if _should_use_crt(config): - crt_transfer_manager = create_crt_transfer_manager(client, config) - if crt_transfer_manager is not None: - logger.debug( - "Using CRT client. pid: %s, thread: %s", - getpid(), - threading.get_ident(), - ) - return crt_transfer_manager - - # If we don't resolve something above, fallback to the default. - logger.debug( - "Using default client. pid: %s, thread: %s", - getpid(), - threading.get_ident(), - ) - return _create_default_transfer_manager(client, config, osutil) - - -def _should_use_crt(config): - # This feature requires awscrt>=0.19.18 - has_min_crt = HAS_CRT and has_minimum_crt_version((0, 19, 18)) - is_optimized_instance = has_min_crt and awscrt.s3.is_optimized_for_system() - pref_transfer_client = config.preferred_transfer_client.lower() - - if ( - pref_transfer_client == constants.CRT_TRANSFER_CLIENT - and not has_min_crt - ): - msg = ( - "CRT transfer client is configured but is missing minimum CRT " - f"version. CRT installed: {HAS_CRT}" - ) - if HAS_CRT: - msg += f", with version: {awscrt.__version__}" - raise MissingDependencyException(msg=msg) - - if ( - is_optimized_instance - and pref_transfer_client == constants.AUTO_RESOLVE_TRANSFER_CLIENT - ) or pref_transfer_client == constants.CRT_TRANSFER_CLIENT: - logger.debug( - "Attempting to use CRTTransferManager. Config settings may be ignored." - ) - return True - - logger.debug( - "Opting out of CRT Transfer Manager. " - "Preferred client: %s, CRT available: %s, Instance Optimized: %s", - pref_transfer_client, - HAS_CRT, - is_optimized_instance, - ) - return False - - -def has_minimum_crt_version(minimum_version): - """Not intended for use outside boto3.""" - if not HAS_CRT: - return False - - crt_version_str = awscrt.__version__ - try: - crt_version_ints = map(int, crt_version_str.split(".")) - crt_version_tuple = tuple(crt_version_ints) - except (TypeError, ValueError): - return False - - return crt_version_tuple >= minimum_version - - -def _create_default_transfer_manager(client, config, osutil): - """Create the default TransferManager implementation for s3transfer.""" - executor_cls = None - if not config.use_threads: - executor_cls = NonThreadedExecutor - return TransferManager(client, config, osutil, executor_cls) - - -class TransferConfig(S3TransferConfig): - ALIAS = { - 'max_concurrency': 'max_request_concurrency', - 'max_io_queue': 'max_io_queue_size', - } - DEFAULTS = { - 'multipart_threshold': 8 * MB, - 'max_concurrency': 10, - 'max_request_concurrency': 10, - 'multipart_chunksize': 8 * MB, - 'num_download_attempts': 5, - 'max_io_queue': 100, - 'max_io_queue_size': 100, - 'io_chunksize': 256 * KB, - 'use_threads': True, - 'max_bandwidth': None, - 'preferred_transfer_client': constants.AUTO_RESOLVE_TRANSFER_CLIENT, - } - - def __init__( - self, - multipart_threshold=None, - max_concurrency=None, - multipart_chunksize=None, - num_download_attempts=None, - max_io_queue=None, - io_chunksize=None, - use_threads=None, - max_bandwidth=None, - preferred_transfer_client=None, - ): - """Configuration object for managed S3 transfers - - :param multipart_threshold: The transfer size threshold for which - multipart uploads, downloads, and copies will automatically be - triggered. - - :param max_concurrency: The maximum number of threads that will be - making requests to perform a transfer. If ``use_threads`` is - set to ``False``, the value provided is ignored as the transfer - will only ever use the current thread. - - :param multipart_chunksize: The partition size of each part for a - multipart transfer. - - :param num_download_attempts: The number of download attempts that - will be retried upon errors with downloading an object in S3. - Note that these retries account for errors that occur when - streaming down the data from s3 (i.e. socket errors and read - timeouts that occur after receiving an OK response from s3). - Other retryable exceptions such as throttling errors and 5xx - errors are already retried by botocore (this default is 5). This - does not take into account the number of exceptions retried by - botocore. Note: This value is ignored when resolved transfer - manager type is CRTTransferManager. - - :param max_io_queue: The maximum amount of read parts that can be - queued in memory to be written for a download. The size of each - of these read parts is at most the size of ``io_chunksize``. - Note: This value is ignored when resolved transfer manager type - is CRTTransferManager. - - :param io_chunksize: The max size of each chunk in the io queue. - Currently, this is size used when ``read`` is called on the - downloaded stream as well. Note: This value is ignored when - resolved transfer manager type is CRTTransferManager. - - :param use_threads: If True, threads will be used when performing - S3 transfers. If False, no threads will be used in - performing transfers; all logic will be run in the current thread. - Note: This value is ignored when resolved transfer manager type is - CRTTransferManager. - - :param max_bandwidth: The maximum bandwidth that will be consumed - in uploading and downloading file content. The value is an integer - in terms of bytes per second. Note: This value is ignored when - resolved transfer manager type is CRTTransferManager. - - :param preferred_transfer_client: String specifying preferred transfer - client for transfer operations. - - Current supported settings are: - * auto (default) - Use the CRTTransferManager when calls - are made with supported environment and settings. - * classic - Only use the origin S3TransferManager with - requests. Disables possible CRT upgrade on requests. - * crt - Only use the CRTTransferManager with requests. - """ - init_args = { - 'multipart_threshold': multipart_threshold, - 'max_concurrency': max_concurrency, - 'multipart_chunksize': multipart_chunksize, - 'num_download_attempts': num_download_attempts, - 'max_io_queue': max_io_queue, - 'io_chunksize': io_chunksize, - 'use_threads': use_threads, - 'max_bandwidth': max_bandwidth, - 'preferred_transfer_client': preferred_transfer_client, - } - resolved = self._resolve_init_args(init_args) - super().__init__( - multipart_threshold=resolved['multipart_threshold'], - max_request_concurrency=resolved['max_concurrency'], - multipart_chunksize=resolved['multipart_chunksize'], - num_download_attempts=resolved['num_download_attempts'], - max_io_queue_size=resolved['max_io_queue'], - io_chunksize=resolved['io_chunksize'], - max_bandwidth=resolved['max_bandwidth'], - ) - # Some of the argument names are not the same as the inherited - # S3TransferConfig so we add aliases so you can still access the - # old version of the names. - for alias in self.ALIAS: - setattr( - self, - alias, - object.__getattribute__(self, self.ALIAS[alias]), - ) - self.use_threads = resolved['use_threads'] - self.preferred_transfer_client = resolved['preferred_transfer_client'] - - def __setattr__(self, name, value): - # If the alias name is used, make sure we set the name that it points - # to as that is what actually is used in governing the TransferManager. - if name in self.ALIAS: - super().__setattr__(self.ALIAS[name], value) - # Always set the value of the actual name provided. - super().__setattr__(name, value) - - def __getattribute__(self, item): - value = object.__getattribute__(self, item) - if not TRANSFER_CONFIG_SUPPORTS_CRT: - return value - defaults = object.__getattribute__(self, 'DEFAULTS') - if item not in defaults: - return value - if value is self.UNSET_DEFAULT: - return defaults[item] - return value - - def _resolve_init_args(self, init_args): - resolved = {} - for init_arg, val in init_args.items(): - if val is not None: - resolved[init_arg] = val - elif TRANSFER_CONFIG_SUPPORTS_CRT: - resolved[init_arg] = self.UNSET_DEFAULT - else: - resolved[init_arg] = self.DEFAULTS[init_arg] - return resolved - - -class S3Transfer: - ALLOWED_DOWNLOAD_ARGS = TransferManager.ALLOWED_DOWNLOAD_ARGS - ALLOWED_UPLOAD_ARGS = TransferManager.ALLOWED_UPLOAD_ARGS - ALLOWED_COPY_ARGS = TransferManager.ALLOWED_COPY_ARGS - - def __init__(self, client=None, config=None, osutil=None, manager=None): - if not client and not manager: - raise ValueError( - 'Either a boto3.Client or s3transfer.manager.TransferManager ' - 'must be provided' - ) - if manager and any([client, config, osutil]): - raise ValueError( - 'Manager cannot be provided with client, config, ' - 'nor osutil. These parameters are mutually exclusive.' - ) - if config is None: - config = TransferConfig() - if osutil is None: - osutil = OSUtils() - if manager: - self._manager = manager - else: - self._manager = create_transfer_manager(client, config, osutil) - - def upload_file( - self, filename, bucket, key, callback=None, extra_args=None - ): - """Upload a file to an S3 object. - - Variants have also been injected into S3 client, Bucket and Object. - You don't have to use S3Transfer.upload_file() directly. - - .. seealso:: - :py:meth:`S3.Client.upload_file` - :py:meth:`S3.Client.upload_fileobj` - """ - if isinstance(filename, PathLike): - filename = fspath(filename) - if not isinstance(filename, str): - raise ValueError('Filename must be a string or a path-like object') - - subscribers = self._get_subscribers(callback) - future = self._manager.upload( - filename, bucket, key, extra_args, subscribers - ) - try: - future.result() - # If a client error was raised, add the backwards compatibility layer - # that raises a S3UploadFailedError. These specific errors were only - # ever thrown for upload_parts but now can be thrown for any related - # client error. - except ClientError as e: - raise S3UploadFailedError( - f"Failed to upload {filename} to {bucket}/{key}: {e}" - ) - - def download_file( - self, bucket, key, filename, extra_args=None, callback=None - ): - """Download an S3 object to a file. - - Variants have also been injected into S3 client, Bucket and Object. - You don't have to use S3Transfer.download_file() directly. - - .. seealso:: - :py:meth:`S3.Client.download_file` - :py:meth:`S3.Client.download_fileobj` - """ - if isinstance(filename, PathLike): - filename = fspath(filename) - if not isinstance(filename, str): - raise ValueError('Filename must be a string or a path-like object') - - subscribers = self._get_subscribers(callback) - future = self._manager.download( - bucket, key, filename, extra_args, subscribers - ) - try: - future.result() - # This is for backwards compatibility where when retries are - # exceeded we need to throw the same error from boto3 instead of - # s3transfer's built in RetriesExceededError as current users are - # catching the boto3 one instead of the s3transfer exception to do - # their own retries. - except S3TransferRetriesExceededError as e: - raise RetriesExceededError(e.last_exception) - - def _get_subscribers(self, callback): - if not callback: - return None - return [ProgressCallbackInvoker(callback)] - - def __enter__(self): - return self - - def __exit__(self, *args): - self._manager.__exit__(*args) - - -class ProgressCallbackInvoker(BaseSubscriber): - """A back-compat wrapper to invoke a provided callback via a subscriber - - :param callback: A callable that takes a single positional argument for - how many bytes were transferred. - """ - - def __init__(self, callback): - self._callback = callback - - def on_progress(self, bytes_transferred, **kwargs): - self._callback(bytes_transferred) diff --git a/venv/Lib/site-packages/boto3/session.py b/venv/Lib/site-packages/boto3/session.py deleted file mode 100644 index a990f46..0000000 --- a/venv/Lib/site-packages/boto3/session.py +++ /dev/null @@ -1,574 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import copy -import os - -import botocore.session -from botocore.client import Config -from botocore.exceptions import ( - DataNotFoundError, - NoCredentialsError, - UnknownServiceError, -) - -import boto3 -import boto3.utils -from boto3.exceptions import ResourceNotExistsError, UnknownAPIVersionError - -from .resources.factory import ResourceFactory - - -class Session: - """ - A session stores configuration state and allows you to create service - clients and resources. - - :type aws_access_key_id: string - :param aws_access_key_id: AWS access key ID - :type aws_secret_access_key: string - :param aws_secret_access_key: AWS secret access key - :type aws_session_token: string - :param aws_session_token: AWS temporary session token - :type region_name: string - :param region_name: Default region when creating new connections - :type botocore_session: botocore.session.Session - :param botocore_session: Use this Botocore session instead of creating - a new default one. - :type profile_name: string - :param profile_name: The name of a profile to use. If not given, then - the default profile is used. - :type aws_account_id: string - :param aws_account_id: AWS account ID - """ - - def __init__( - self, - aws_access_key_id=None, - aws_secret_access_key=None, - aws_session_token=None, - region_name=None, - botocore_session=None, - profile_name=None, - aws_account_id=None, - ): - if botocore_session is not None: - self._session = botocore_session - else: - # Create a new default session - self._session = botocore.session.get_session() - - # Setup custom user-agent string if it isn't already customized - if self._session.user_agent_name == 'Botocore': - botocore_info = f'Botocore/{self._session.user_agent_version}' - if self._session.user_agent_extra: - self._session.user_agent_extra += f" {botocore_info}" - else: - self._session.user_agent_extra = botocore_info - self._session.user_agent_name = 'Boto3' - self._session.user_agent_version = boto3.__version__ - - if profile_name is not None: - self._session.set_config_variable('profile', profile_name) - - credentials_kwargs = { - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_account_id": aws_account_id, - } - - if any(credentials_kwargs.values()): - if self._account_id_set_without_credentials(**credentials_kwargs): - raise NoCredentialsError() - - if aws_account_id is None: - del credentials_kwargs["aws_account_id"] - - self._session.set_credentials(*credentials_kwargs.values()) - - if region_name is not None: - self._session.set_config_variable('region', region_name) - - self.resource_factory = ResourceFactory( - self._session.get_component('event_emitter') - ) - self._setup_loader() - self._register_default_handlers() - - def __repr__(self): - return '{}(region_name={})'.format( - self.__class__.__name__, - repr(self._session.get_config_variable('region')), - ) - - @property - def profile_name(self): - """ - The **read-only** profile name. - """ - return self._session.profile or 'default' - - @property - def region_name(self): - """ - The **read-only** region name. - """ - return self._session.get_config_variable('region') - - @property - def events(self): - """ - The event emitter for a session - """ - return self._session.get_component('event_emitter') - - @property - def available_profiles(self): - """ - The profiles available to the session credentials - """ - return self._session.available_profiles - - def _setup_loader(self): - """ - Setup loader paths so that we can load resources. - """ - self._loader = self._session.get_component('data_loader') - self._loader.search_paths.append( - os.path.join(os.path.dirname(__file__), 'data') - ) - - def get_available_services(self): - """ - Get a list of available services that can be loaded as low-level - clients via :py:meth:`Session.client`. - - :rtype: list - :return: List of service names - """ - return self._session.get_available_services() - - def get_available_resources(self): - """ - Get a list of available services that can be loaded as resource - clients via :py:meth:`Session.resource`. - - :rtype: list - :return: List of service names - """ - return self._loader.list_available_services(type_name='resources-1') - - def get_available_partitions(self): - """Lists the available partitions - - :rtype: list - :return: Returns a list of partition names (e.g., ["aws", "aws-cn"]) - """ - return self._session.get_available_partitions() - - def get_available_regions( - self, service_name, partition_name='aws', allow_non_regional=False - ): - """Lists the region and endpoint names of a particular partition. - - The list of regions returned by this method are regions that are - explicitly known by the client to exist and is not comprehensive. A - region not returned in this list may still be available for the - provided service. - - :type service_name: string - :param service_name: Name of a service to list endpoint for (e.g., s3). - - :type partition_name: string - :param partition_name: Name of the partition to limit endpoints to. - (e.g., aws for the public AWS endpoints, aws-cn for AWS China - endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.) - - :type allow_non_regional: bool - :param allow_non_regional: Set to True to include endpoints that are - not regional endpoints (e.g., s3-external-1, - fips-us-gov-west-1, etc). - - :return: Returns a list of endpoint names (e.g., ["us-east-1"]). - """ - return self._session.get_available_regions( - service_name=service_name, - partition_name=partition_name, - allow_non_regional=allow_non_regional, - ) - - def get_credentials(self): - """ - Return the :class:`botocore.credentials.Credentials` object - associated with this session. If the credentials have not - yet been loaded, this will attempt to load them. If they - have already been loaded, this will return the cached - credentials. - """ - return self._session.get_credentials() - - def get_partition_for_region(self, region_name): - """Lists the partition name of a particular region. - - :type region_name: string - :param region_name: Name of the region to list partition for (e.g., - us-east-1). - - :rtype: string - :return: Returns the respective partition name (e.g., aws). - """ - return self._session.get_partition_for_region(region_name) - - def client( - self, - service_name, - region_name=None, - api_version=None, - use_ssl=True, - verify=None, - endpoint_url=None, - aws_access_key_id=None, - aws_secret_access_key=None, - aws_session_token=None, - config=None, - aws_account_id=None, - ): - """ - Create a low-level service client by name. - - :type service_name: string - :param service_name: The name of a service, e.g. 's3' or 'ec2'. You - can get a list of available services via - :py:meth:`get_available_services`. - - :type region_name: string - :param region_name: The name of the region associated with the client. - A client is associated with a single region. - - :type api_version: string - :param api_version: The API version to use. By default, botocore will - use the latest API version when creating a client. You only need - to specify this parameter if you want to use a previous API version - of the client. - - :type use_ssl: boolean - :param use_ssl: Whether or not to use SSL. By default, SSL is used. - Note that not all services support non-ssl connections. - - :type verify: boolean/string - :param verify: Whether or not to verify SSL certificates. By default - SSL certificates are verified. You can provide the following - values: - - * False - do not validate SSL certificates. SSL will still be - used (unless use_ssl is False), but SSL certificates - will not be verified. - * path/to/cert/bundle.pem - A filename of the CA cert bundle to - uses. You can specify this argument if you want to use a - different CA cert bundle than the one used by botocore. - - :type endpoint_url: string - :param endpoint_url: The complete URL to use for the constructed - client. Normally, botocore will automatically construct the - appropriate URL to use when communicating with a service. You - can specify a complete URL (including the "http/https" scheme) - to override this behavior. If this value is provided, - then ``use_ssl`` is ignored. - - :type aws_access_key_id: string - :param aws_access_key_id: The access key to use when creating - the client. This is entirely optional, and if not provided, - the credentials configured for the session will automatically - be used. You only need to provide this argument if you want - to override the credentials used for this specific client. - - :type aws_secret_access_key: string - :param aws_secret_access_key: The secret key to use when creating - the client. Same semantics as aws_access_key_id above. - - :type aws_session_token: string - :param aws_session_token: The session token to use when creating - the client. Same semantics as aws_access_key_id above. - - :type config: botocore.client.Config - :param config: Advanced client configuration options. If region_name - is specified in the client config, its value will take precedence - over environment variables and configuration values, but not over - a region_name value passed explicitly to the method. See - `botocore config documentation - `_ - for more details. - - :type aws_account_id: string - :param aws_account_id: The account id to use when creating - the client. Same semantics as aws_access_key_id above. - - :return: Service client instance - - """ - create_client_kwargs = { - 'region_name': region_name, - 'api_version': api_version, - 'use_ssl': use_ssl, - 'verify': verify, - 'endpoint_url': endpoint_url, - 'aws_access_key_id': aws_access_key_id, - 'aws_secret_access_key': aws_secret_access_key, - 'aws_session_token': aws_session_token, - 'config': config, - 'aws_account_id': aws_account_id, - } - if aws_account_id is None: - # Remove aws_account_id for arbitrary - # botocore version mismatches in AWS Lambda. - del create_client_kwargs['aws_account_id'] - - return self._session.create_client( - service_name, **create_client_kwargs - ) - - def resource( - self, - service_name, - region_name=None, - api_version=None, - use_ssl=True, - verify=None, - endpoint_url=None, - aws_access_key_id=None, - aws_secret_access_key=None, - aws_session_token=None, - config=None, - ): - """ - Create a resource service client by name. - - :type service_name: string - :param service_name: The name of a service, e.g. 's3' or 'ec2'. You - can get a list of available services via - :py:meth:`get_available_resources`. - - :type region_name: string - :param region_name: The name of the region associated with the client. - A client is associated with a single region. - - :type api_version: string - :param api_version: The API version to use. By default, botocore will - use the latest API version when creating a client. You only need - to specify this parameter if you want to use a previous API version - of the client. - - :type use_ssl: boolean - :param use_ssl: Whether or not to use SSL. By default, SSL is used. - Note that not all services support non-ssl connections. - - :type verify: boolean/string - :param verify: Whether or not to verify SSL certificates. By default - SSL certificates are verified. You can provide the following - values: - - * False - do not validate SSL certificates. SSL will still be - used (unless use_ssl is False), but SSL certificates - will not be verified. - * path/to/cert/bundle.pem - A filename of the CA cert bundle to - uses. You can specify this argument if you want to use a - different CA cert bundle than the one used by botocore. - - :type endpoint_url: string - :param endpoint_url: The complete URL to use for the constructed - client. Normally, botocore will automatically construct the - appropriate URL to use when communicating with a service. You - can specify a complete URL (including the "http/https" scheme) - to override this behavior. If this value is provided, - then ``use_ssl`` is ignored. - - :type aws_access_key_id: string - :param aws_access_key_id: The access key to use when creating - the client. This is entirely optional, and if not provided, - the credentials configured for the session will automatically - be used. You only need to provide this argument if you want - to override the credentials used for this specific client. - - :type aws_secret_access_key: string - :param aws_secret_access_key: The secret key to use when creating - the client. Same semantics as aws_access_key_id above. - - :type aws_session_token: string - :param aws_session_token: The session token to use when creating - the client. Same semantics as aws_access_key_id above. - - :type config: botocore.client.Config - :param config: Advanced client configuration options. If region_name - is specified in the client config, its value will take precedence - over environment variables and configuration values, but not over - a region_name value passed explicitly to the method. If - user_agent_extra is specified in the client config, it overrides - the default user_agent_extra provided by the resource API. See - `botocore config documentation - `_ - for more details. - - :return: Subclass of :py:class:`~boto3.resources.base.ServiceResource` - """ - try: - resource_model = self._loader.load_service_model( - service_name, 'resources-1', api_version - ) - except UnknownServiceError: - available = self.get_available_resources() - has_low_level_client = ( - service_name in self.get_available_services() - ) - raise ResourceNotExistsError( - service_name, available, has_low_level_client - ) - except DataNotFoundError: - # This is because we've provided an invalid API version. - available_api_versions = self._loader.list_api_versions( - service_name, 'resources-1' - ) - raise UnknownAPIVersionError( - service_name, api_version, ', '.join(available_api_versions) - ) - - if api_version is None: - # Even though botocore's load_service_model() can handle - # using the latest api_version if not provided, we need - # to track this api_version in boto3 in order to ensure - # we're pairing a resource model with a client model - # of the same API version. It's possible for the latest - # API version of a resource model in boto3 to not be - # the same API version as a service model in botocore. - # So we need to look up the api_version if one is not - # provided to ensure we load the same API version of the - # client. - # - # Note: This is relying on the fact that - # loader.load_service_model(..., api_version=None) - # and loader.determine_latest_version(..., 'resources-1') - # both load the same api version of the file. - api_version = self._loader.determine_latest_version( - service_name, 'resources-1' - ) - - # Creating a new resource instance requires the low-level client - # and service model, the resource version and resource JSON data. - # We pass these to the factory and get back a class, which is - # instantiated on top of the low-level client. - if config is not None: - if config.user_agent_extra is None: - config = copy.deepcopy(config) - config.user_agent_extra = 'Resource' - else: - config = Config(user_agent_extra='Resource') - client = self.client( - service_name, - region_name=region_name, - api_version=api_version, - use_ssl=use_ssl, - verify=verify, - endpoint_url=endpoint_url, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - config=config, - ) - service_model = client.meta.service_model - - # Create a ServiceContext object to serve as a reference to - # important read-only information about the general service. - service_context = boto3.utils.ServiceContext( - service_name=service_name, - service_model=service_model, - resource_json_definitions=resource_model['resources'], - service_waiter_model=boto3.utils.LazyLoadedWaiterModel( - self._session, service_name, api_version - ), - ) - - # Create the service resource class. - cls = self.resource_factory.load_from_definition( - resource_name=service_name, - single_resource_json_definition=resource_model['service'], - service_context=service_context, - ) - - return cls(client=client) - - def _register_default_handlers(self): - # S3 customizations - self._session.register( - 'creating-client-class.s3', - boto3.utils.lazy_call( - 'boto3.s3.inject.inject_s3_transfer_methods' - ), - ) - self._session.register( - 'creating-resource-class.s3.Bucket', - boto3.utils.lazy_call('boto3.s3.inject.inject_bucket_methods'), - ) - self._session.register( - 'creating-resource-class.s3.Object', - boto3.utils.lazy_call('boto3.s3.inject.inject_object_methods'), - ) - self._session.register( - 'creating-resource-class.s3.ObjectSummary', - boto3.utils.lazy_call( - 'boto3.s3.inject.inject_object_summary_methods' - ), - ) - - # DynamoDb customizations - self._session.register( - 'creating-resource-class.dynamodb', - boto3.utils.lazy_call( - 'boto3.dynamodb.transform.register_high_level_interface' - ), - unique_id='high-level-dynamodb', - ) - self._session.register( - 'creating-resource-class.dynamodb.Table', - boto3.utils.lazy_call( - 'boto3.dynamodb.table.register_table_methods' - ), - unique_id='high-level-dynamodb-table', - ) - - # EC2 Customizations - self._session.register( - 'creating-resource-class.ec2.ServiceResource', - boto3.utils.lazy_call('boto3.ec2.createtags.inject_create_tags'), - ) - - self._session.register( - 'creating-resource-class.ec2.Instance', - boto3.utils.lazy_call( - 'boto3.ec2.deletetags.inject_delete_tags', - event_emitter=self.events, - ), - ) - - def _account_id_set_without_credentials( - self, - *, - aws_account_id, - aws_access_key_id, - aws_secret_access_key, - **kwargs, - ): - if aws_account_id is None: - return False - elif aws_access_key_id is None or aws_secret_access_key is None: - return True - return False diff --git a/venv/Lib/site-packages/boto3/utils.py b/venv/Lib/site-packages/boto3/utils.py deleted file mode 100644 index 289bbdf..0000000 --- a/venv/Lib/site-packages/boto3/utils.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# https://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from collections import namedtuple -from importlib import import_module - -_ServiceContext = namedtuple( - 'ServiceContext', - [ - 'service_name', - 'service_model', - 'service_waiter_model', - 'resource_json_definitions', - ], -) - - -class ServiceContext(_ServiceContext): - """Provides important service-wide, read-only information about a service - - :type service_name: str - :param service_name: The name of the service - - :type service_model: :py:class:`botocore.model.ServiceModel` - :param service_model: The model of the service. - - :type service_waiter_model: :py:class:`botocore.waiter.WaiterModel` or - a waiter model-like object such as - :py:class:`boto3.utils.LazyLoadedWaiterModel` - :param service_waiter_model: The waiter model of the service. - - :type resource_json_definitions: dict - :param resource_json_definitions: The loaded json models of all resource - shapes for a service. It is equivalient of loading a - ``resource-1.json`` and retrieving the value at the key "resources". - """ - - pass - - -def lazy_call(full_name, **kwargs): - parent_kwargs = kwargs - - def _handler(**kwargs): - module, function_name = full_name.rsplit('.', 1) - module = import_module(module) - kwargs.update(parent_kwargs) - return getattr(module, function_name)(**kwargs) - - return _handler - - -def inject_attribute(class_attributes, name, value): - if name in class_attributes: - raise RuntimeError( - f'Cannot inject class attribute "{name}", attribute ' - f'already exists in class dict.' - ) - else: - class_attributes[name] = value - - -class LazyLoadedWaiterModel: - """A lazily loaded waiter model - - This does not load the service waiter model until an attempt is made - to retrieve the waiter model for a specific waiter. This is helpful - in docstring generation where we do not need to actually need to grab - the waiter-2.json until it is accessed through a ``get_waiter`` call - when the docstring is generated/accessed. - """ - - def __init__(self, bc_session, service_name, api_version): - self._session = bc_session - self._service_name = service_name - self._api_version = api_version - - def get_waiter(self, waiter_name): - return self._session.get_waiter_model( - self._service_name, self._api_version - ).get_waiter(waiter_name) diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/INSTALLER b/venv/Lib/site-packages/botocore-1.42.25.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/botocore-1.42.25.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/LICENSE.txt b/venv/Lib/site-packages/botocore-1.42.25.dist-info/LICENSE.txt deleted file mode 100644 index f433b1a..0000000 --- a/venv/Lib/site-packages/botocore-1.42.25.dist-info/LICENSE.txt +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/METADATA b/venv/Lib/site-packages/botocore-1.42.25.dist-info/METADATA deleted file mode 100644 index 88302da..0000000 --- a/venv/Lib/site-packages/botocore-1.42.25.dist-info/METADATA +++ /dev/null @@ -1,151 +0,0 @@ -Metadata-Version: 2.1 -Name: botocore -Version: 1.42.25 -Summary: Low-level, data-driven core of boto 3. -Home-page: https://github.com/boto/botocore -Author: Amazon Web Services -License: Apache-2.0 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Requires-Python: >= 3.9 -License-File: LICENSE.txt -License-File: NOTICE -Requires-Dist: jmespath (<2.0.0,>=0.7.1) -Requires-Dist: python-dateutil (<3.0.0,>=2.1) -Requires-Dist: urllib3 (<1.27,>=1.25.4) ; python_version < "3.10" -Requires-Dist: urllib3 (!=2.2.0,<3,>=1.25.4) ; python_version >= "3.10" -Provides-Extra: crt -Requires-Dist: awscrt (==0.29.2) ; extra == 'crt' - -botocore -======== - -|Version| |Python| |License| - -A low-level interface to a growing number of Amazon Web Services. The -botocore package is the foundation for the -`AWS CLI `__ as well as -`boto3 `__. - -Botocore is maintained and published by `Amazon Web Services`_. - -Notices -------- - -On 2026-04-29, support for Python 3.9 will end for Botocore. This follows the -Python Software Foundation `end of support `__ -for the runtime which occurred on 2025-10-31. - -On 2025-04-22, support for Python 3.8 ended for Botocore. This follows the -Python Software Foundation `end of support `__ -for the runtime which occurred on 2024-10-07. - -For more information, see this `blog post `__. - -.. _`Amazon Web Services`: https://aws.amazon.com/what-is-aws/ -.. |Python| image:: https://img.shields.io/pypi/pyversions/botocore.svg?style=flat - :target: https://pypi.python.org/pypi/botocore/ - :alt: Python Versions -.. |Version| image:: http://img.shields.io/pypi/v/botocore.svg?style=flat - :target: https://pypi.python.org/pypi/botocore/ - :alt: Package Version -.. |License| image:: http://img.shields.io/pypi/l/botocore.svg?style=flat - :target: https://github.com/boto/botocore/blob/develop/LICENSE.txt - :alt: License - -Getting Started ---------------- -Assuming that you have Python and ``virtualenv`` installed, set up your environment and install the required dependencies like this or you can install the library using ``pip``: - -.. code-block:: sh - - $ git clone https://github.com/boto/botocore.git - $ cd botocore - $ python -m venv .venv - ... - $ source .venv/bin/activate - $ python -m pip install -r requirements.txt - $ python -m pip install -e . - -.. code-block:: sh - - $ pip install botocore - -Using Botocore -~~~~~~~~~~~~~~ -After installing botocore - -Next, set up credentials (in e.g. ``~/.aws/credentials``): - -.. code-block:: ini - - [default] - aws_access_key_id = YOUR_KEY - aws_secret_access_key = YOUR_SECRET - -Then, set up a default region (in e.g. ``~/.aws/config``): - -.. code-block:: ini - - [default] - region=us-east-1 - -Other credentials configuration method can be found `here `__ - -Then, from a Python interpreter: - -.. code-block:: python - - >>> import botocore.session - >>> session = botocore.session.get_session() - >>> client = session.create_client('ec2') - >>> print(client.describe_instances()) - - -Getting Help ------------- - -We use GitHub issues for tracking bugs and feature requests and have limited -bandwidth to address them. Please use these community resources for getting -help. Please note many of the same resources available for ``boto3`` are -applicable for ``botocore``: - -* Ask a question on `Stack Overflow `__ and tag it with `boto3 `__ -* Open a support ticket with `AWS Support `__ -* If it turns out that you may have found a bug, please `open an issue `__ - - -Contributing ------------- - -We value feedback and contributions from our community. Whether it's a bug report, new feature, correction, or additional documentation, we welcome your issues and pull requests. Please read through this `CONTRIBUTING `__ document before submitting any issues or pull requests to ensure we have all the necessary information to effectively respond to your contribution. - - -Maintenance and Support for SDK Major Versions ----------------------------------------------- - -Botocore was made generally available on 06/22/2015 and is currently in the full support phase of the availability life cycle. - -For information about maintenance and support for SDK major versions and their underlying dependencies, see the following in the AWS SDKs and Tools Reference Guide: - -* `AWS SDKs and Tools Maintenance Policy `__ -* `AWS SDKs and Tools Version Support Matrix `__ - - -More Resources --------------- - -* `NOTICE `__ -* `Changelog `__ -* `License `__ diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/NOTICE b/venv/Lib/site-packages/botocore-1.42.25.dist-info/NOTICE deleted file mode 100644 index edcc3cd..0000000 --- a/venv/Lib/site-packages/botocore-1.42.25.dist-info/NOTICE +++ /dev/null @@ -1,60 +0,0 @@ -Botocore -Copyright 2012-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. - ----- - -Botocore includes vendorized parts of the requests python library for backwards compatibility. - -Requests License -================ - -Copyright 2013 Kenneth Reitz - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -Botocore includes vendorized parts of the urllib3 library for backwards compatibility. - -Urllib3 License -=============== - -This is the MIT license: http://www.opensource.org/licenses/mit-license.php - -Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt), -Modifications copyright 2012 Kenneth Reitz. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this -software and associated documentation files (the "Software"), to deal in the Software -without restriction, including without limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons -to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or -substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR -PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE -FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. - -Bundle of CA Root Certificates -============================== - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the -Mozilla Public License, v. 2.0. If a copy of the MPL -was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/RECORD b/venv/Lib/site-packages/botocore-1.42.25.dist-info/RECORD deleted file mode 100644 index 08ca47b..0000000 --- a/venv/Lib/site-packages/botocore-1.42.25.dist-info/RECORD +++ /dev/null @@ -1,2003 +0,0 @@ -botocore-1.42.25.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -botocore-1.42.25.dist-info/LICENSE.txt,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 -botocore-1.42.25.dist-info/METADATA,sha256=5hIj29mEvijqqTL0qAddyWYEshDsxDglUP_w6N6VLmI,5869 -botocore-1.42.25.dist-info/NOTICE,sha256=HRxabz1oyxH0-tGvqGp0UNAobxXBdu8OoEjyVbRtlbA,2467 -botocore-1.42.25.dist-info/RECORD,, -botocore-1.42.25.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -botocore-1.42.25.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91 -botocore-1.42.25.dist-info/top_level.txt,sha256=IdlNr9dnwi3lQt66dKnShE5HBUhIqBFqJmVhm11aijk,9 -botocore/__init__.py,sha256=HeEf7LTXDjdbeGBDRQv3E9fbnG-jhil54SmUFle7z3s,8019 -botocore/__pycache__/__init__.cpython-312.pyc,, -botocore/__pycache__/args.cpython-312.pyc,, -botocore/__pycache__/auth.cpython-312.pyc,, -botocore/__pycache__/awsrequest.cpython-312.pyc,, -botocore/__pycache__/client.cpython-312.pyc,, -botocore/__pycache__/compat.cpython-312.pyc,, -botocore/__pycache__/compress.cpython-312.pyc,, -botocore/__pycache__/config.cpython-312.pyc,, -botocore/__pycache__/configloader.cpython-312.pyc,, -botocore/__pycache__/configprovider.cpython-312.pyc,, -botocore/__pycache__/context.cpython-312.pyc,, -botocore/__pycache__/credentials.cpython-312.pyc,, -botocore/__pycache__/discovery.cpython-312.pyc,, -botocore/__pycache__/endpoint.cpython-312.pyc,, -botocore/__pycache__/endpoint_provider.cpython-312.pyc,, -botocore/__pycache__/errorfactory.cpython-312.pyc,, -botocore/__pycache__/eventstream.cpython-312.pyc,, -botocore/__pycache__/exceptions.cpython-312.pyc,, -botocore/__pycache__/handlers.cpython-312.pyc,, -botocore/__pycache__/history.cpython-312.pyc,, -botocore/__pycache__/hooks.cpython-312.pyc,, -botocore/__pycache__/httpchecksum.cpython-312.pyc,, -botocore/__pycache__/httpsession.cpython-312.pyc,, -botocore/__pycache__/loaders.cpython-312.pyc,, -botocore/__pycache__/model.cpython-312.pyc,, -botocore/__pycache__/monitoring.cpython-312.pyc,, -botocore/__pycache__/paginate.cpython-312.pyc,, -botocore/__pycache__/parsers.cpython-312.pyc,, -botocore/__pycache__/plugin.cpython-312.pyc,, -botocore/__pycache__/regions.cpython-312.pyc,, -botocore/__pycache__/response.cpython-312.pyc,, -botocore/__pycache__/retryhandler.cpython-312.pyc,, -botocore/__pycache__/serialize.cpython-312.pyc,, -botocore/__pycache__/session.cpython-312.pyc,, -botocore/__pycache__/signers.cpython-312.pyc,, -botocore/__pycache__/stub.cpython-312.pyc,, -botocore/__pycache__/tokens.cpython-312.pyc,, -botocore/__pycache__/translate.cpython-312.pyc,, -botocore/__pycache__/useragent.cpython-312.pyc,, -botocore/__pycache__/utils.cpython-312.pyc,, -botocore/__pycache__/validate.cpython-312.pyc,, -botocore/__pycache__/waiter.cpython-312.pyc,, -botocore/args.py,sha256=EHFfRFzpg9CE2S8Hgx3sDpj48RBZwlgh24FAfqwsEcI,39011 -botocore/auth.py,sha256=T0m-kVIqo28grLAKRqY49GLed0lfF2rI2ZjRdGAvUgA,46062 -botocore/awsrequest.py,sha256=PvhLGpW6ziN2hGUnT_jDdhR_LGc1jh2NVPrdGfaFlOw,23178 -botocore/cacert.pem,sha256=nW1QIfzIoiMvzo60s_mC3EhCUtVVSTrFwqPL8ssZQ4o,266617 -botocore/client.py,sha256=kwQ0qbTN67ouIi2lHZVhlgl-cvQEWguunKWHrgWJLgg,54154 -botocore/compat.py,sha256=5PGk-Hh7OVi6_7U8BmEifsaTn-scP-ahWp6hdUAKJ5s,11769 -botocore/compress.py,sha256=Qxl9IGVCpZ2gQG5KE_iqWQgbXvj_I70jA5yNtgZL1iY,4533 -botocore/config.py,sha256=hByTAk2HKH-yj1nehNR3fsIl0h3Er22b1lulmhxXyNA,20255 -botocore/configloader.py,sha256=NTejI7b9UGUXBv2uKiPaXH19Lgl30LY5ujZkXRcFpHs,10039 -botocore/configprovider.py,sha256=RxILFRpW14PxPOaI_mlBpbgzjkeHysqgwdHRnWBFzRM,38092 -botocore/context.py,sha256=XQRDA0YPajIjb6zQres-QQNOtV0L7B6UDfBHrza4W9E,3782 -botocore/credentials.py,sha256=OPlVYcVTx-dtnabSPTCnedz7BAOiVMuYVi6S4SzAsOg,101908 -botocore/crt/__init__.py,sha256=kCXQL93gdg5yBQJOTp7YFLl9wYNy4tV_5TAyJq0asD0,1006 -botocore/crt/__pycache__/__init__.cpython-312.pyc,, -botocore/crt/__pycache__/auth.cpython-312.pyc,, -botocore/crt/auth.py,sha256=ReT3jB_J6H-0wD3fogG4uuyrwwka14d7UJZ5weujftU,25076 -botocore/data/_retry.json,sha256=0dIGY-kUA1xaYn9-2YHgunpCeettHM3hKEYoTOirc6o,7270 -botocore/data/accessanalyzer/2019-11-01/endpoint-rule-set-1.json.gz,sha256=WYZMzIFU_reVnd_WKsuEKWOpJ9whrf3u2u-5palQyUQ,1237 -botocore/data/accessanalyzer/2019-11-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/accessanalyzer/2019-11-01/paginators-1.json,sha256=8cCsaqYmzBJj1naqrZSRYos_QGyOVec_G_9xuLEApG8,1908 -botocore/data/accessanalyzer/2019-11-01/paginators-1.sdk-extras.json,sha256=nwsOcoMZ1GDzrFfMc6_Gx0tNRKwt5b8XddGiZmkAS2s,600 -botocore/data/accessanalyzer/2019-11-01/service-2.json.gz,sha256=6n6i0M9YGz3eM4JLkWBuKORmz1V7z8VR3yZqvPmQxSc,27631 -botocore/data/account/2021-02-01/endpoint-rule-set-1.json.gz,sha256=9OKRgAsO8DACKal-4ifgr5FciBYP9XcMhvhNqwF1DNw,1379 -botocore/data/account/2021-02-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/account/2021-02-01/paginators-1.json,sha256=TCku1Qs1la1Ggv8u8dKSYP2E5i5sWpmhRmL4zSR87RQ,185 -botocore/data/account/2021-02-01/service-2.json.gz,sha256=CR2RXu51alnjYk1sWVE0dTGqGOQ764rXBf53Yp6TSC0,6344 -botocore/data/acm-pca/2017-08-22/endpoint-rule-set-1.json.gz,sha256=Jm-lRuzsaGxeJKYQS38kUmua3kjupUMyEeQHp0Sg0Co,1234 -botocore/data/acm-pca/2017-08-22/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/acm-pca/2017-08-22/paginators-1.json,sha256=q2wFRetchlBt43qtOCTJ_Qw49u-LnRgmPdEn1j_j50A,537 -botocore/data/acm-pca/2017-08-22/service-2.json.gz,sha256=FZomnFa0FdluF3A5UcpetXKkdv1QPnipspMTzcuysKc,24950 -botocore/data/acm-pca/2017-08-22/waiters-2.json,sha256=PH8MS91fUjXWbhd08bgVhUC-_SQPmLrJOGkGo6j6jaU,1759 -botocore/data/acm/2015-12-08/endpoint-rule-set-1.json.gz,sha256=cGgLRhxpamT-pIl833H4w-7Nd_g9azR8v1TEVdGJBN8,1230 -botocore/data/acm/2015-12-08/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/acm/2015-12-08/paginators-1.json,sha256=oB2exj3JKzcsLCvfBeMqYawlxz6YghtvUQlwOfdTY4g,203 -botocore/data/acm/2015-12-08/service-2.json.gz,sha256=IrqnaqImsnmd1RH6nePC2QnEPiuwMWU_bz7AQ0HUWRU,14816 -botocore/data/acm/2015-12-08/waiters-2.json,sha256=yHGQEXzDfVDuG0r6SRAMf4LYJdVcSwxxN4w6Op3t_wE,818 -botocore/data/aiops/2018-05-10/endpoint-rule-set-1.json.gz,sha256=PDpuFZHDn6Xhe4XdhofaWcl4zU93YLnE-xsTp1fgwg0,1296 -botocore/data/aiops/2018-05-10/paginators-1.json,sha256=f0tLATJ2XSci2en4kooSFQ-htod7hyA7nf54-4ycaIg,209 -botocore/data/aiops/2018-05-10/service-2.json.gz,sha256=_Ctq1qkZcdVJ8FiI8prJA6nBlYOItr4HXHJXg7QGQgs,6128 -botocore/data/aiops/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/amp/2020-08-01/endpoint-rule-set-1.json.gz,sha256=cwCo-B99s8RqYHiPiklepjrSEpJec25l_8T8wvaVoRA,1145 -botocore/data/amp/2020-08-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/amp/2020-08-01/paginators-1.json,sha256=gOCl7CM-ELiCZZ9BzIfEbHwzBmxizZraTadvic-_kI8,717 -botocore/data/amp/2020-08-01/service-2.json.gz,sha256=L5upJD1o1Vr1LkBeCRCWB0Tpq5B48MNU5IfuOpDvxE8,18753 -botocore/data/amp/2020-08-01/waiters-2.json,sha256=eeRO0PVcJh6MPtsVwtDu4vW0caXV1XwqJAdBHvUnfzY,3387 -botocore/data/amplify/2017-07-25/endpoint-rule-set-1.json.gz,sha256=u-ccSakJB4o0SDxSdr9izFjzBkQWbedxQkA2f-ujGlI,1149 -botocore/data/amplify/2017-07-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/amplify/2017-07-25/paginators-1.json,sha256=XJ4xwNrUExhAxy-8K8JJAPnBhdRZO7FB6NGTrgr_qZQ,685 -botocore/data/amplify/2017-07-25/service-2.json.gz,sha256=4kXfXn6MsUsCuaqUDsvXxhCLuTdZ2P13-AkkOj8I5-c,17604 -botocore/data/amplifybackend/2020-08-11/endpoint-rule-set-1.json.gz,sha256=HmHEDJJ8JksObUnJq0Hdcu13c9fAbnMkx22TCDfSvkA,1154 -botocore/data/amplifybackend/2020-08-11/paginators-1.json,sha256=0JG13-2KlCwca-Pwz7d5Mp3WIttu4BpwDusqxMXF9XY,186 -botocore/data/amplifybackend/2020-08-11/service-2.json.gz,sha256=FuhTLqSlLbOqOWf3A0zhaQoqVDPxC5D61Pun1cAmCwE,10990 -botocore/data/amplifyuibuilder/2021-08-11/endpoint-rule-set-1.json.gz,sha256=JnW8sMoqbEC0uM8KcfRwfYuAxLH6UE3kUnmZ8N69qPg,1156 -botocore/data/amplifyuibuilder/2021-08-11/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/amplifyuibuilder/2021-08-11/paginators-1.json,sha256=idtki67MCJcfs_brVKsvknxJbZtDfS-IK3cakM1IFCI,1063 -botocore/data/amplifyuibuilder/2021-08-11/service-2.json.gz,sha256=kIELrrSZJ1wNixqRTz2pdzWtCShv-HX7-ayXEEyeNR8,15634 -botocore/data/amplifyuibuilder/2021-08-11/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/apigateway/2015-07-09/endpoint-rule-set-1.json.gz,sha256=FZKoHWIqIWdkwrm3XHjdxR_z4yYOYlwDIkE-T78-a9U,1151 -botocore/data/apigateway/2015-07-09/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/apigateway/2015-07-09/paginators-1.json,sha256=gwAb1K7CkHdC49pAfwZMgaT18Hm1r5qDK1m_6m-Ki9w,2913 -botocore/data/apigateway/2015-07-09/service-2.json.gz,sha256=EPpsCRtop_bjgNaT_Duh1KkzyPsBiFOHSx1v724Oams,39734 -botocore/data/apigatewaymanagementapi/2018-11-29/endpoint-rule-set-1.json.gz,sha256=-bQKVepO7MEN1HS1o3-GgPdeQPM1yjzatKRi9BDoiUM,1151 -botocore/data/apigatewaymanagementapi/2018-11-29/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/apigatewaymanagementapi/2018-11-29/service-2.json.gz,sha256=hhxaDBhUCnGeqZFTwlSyzQ3tb0hgPYZ2Gy2-DuCYIA0,1444 -botocore/data/apigatewayv2/2018-11-29/endpoint-rule-set-1.json.gz,sha256=FZKoHWIqIWdkwrm3XHjdxR_z4yYOYlwDIkE-T78-a9U,1151 -botocore/data/apigatewayv2/2018-11-29/paginators-1.json,sha256=uQijp2s8aofeR6MXY0KDrgYJXF8Dk2P34Lbw6H9utSk,2457 -botocore/data/apigatewayv2/2018-11-29/service-2.json.gz,sha256=sXMi0dYLjRiSDrrhMJ3rLtfQVpjcDy0DNlIMEHDwO0M,53301 -botocore/data/appconfig/2019-10-09/endpoint-rule-set-1.json.gz,sha256=VXowmRWPJ7jGO6vDHMRnML678j7zSUIjKgPH88wRKAk,1230 -botocore/data/appconfig/2019-10-09/examples-1.json,sha256=lm2meYHY2djHXZ_3lYZa2PxELHhVDtZdMkVw4IWCI8Y,25502 -botocore/data/appconfig/2019-10-09/paginators-1.json,sha256=DlvXrqKcTiVi3Yv2rStPwl5O1kqSQaiyRGD_fQugFEQ,1367 -botocore/data/appconfig/2019-10-09/service-2.json.gz,sha256=N3WkAGvwun8kf2uKgU5RG1pDKeJnlvFbNUBJ6RTrxZM,19510 -botocore/data/appconfig/2019-10-09/waiters-2.json,sha256=1_6Y2OdolOE882ZkaU3E11-WALKr3g4cj_KZwxW_jmc,1217 -botocore/data/appconfigdata/2021-11-11/endpoint-rule-set-1.json.gz,sha256=KRPfgV0E7yLqc-MLoePF4JGHshpOmYWWBtpYG3kv7HM,1235 -botocore/data/appconfigdata/2021-11-11/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/appconfigdata/2021-11-11/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/appconfigdata/2021-11-11/service-2.json.gz,sha256=VCh6Waby1_WZyDkfe6fAknKnDr7W8ePaSeF7sCM7unU,3114 -botocore/data/appfabric/2023-05-19/endpoint-rule-set-1.json.gz,sha256=JtfGuYV65_2ocR4V9grr7JWsay-dhO0KotM64OD0T-Q,1296 -botocore/data/appfabric/2023-05-19/paginators-1.json,sha256=AceDN9kDs832sLebyXTQMYza-dMZ8m2hsVyzbqxUXnQ,745 -botocore/data/appfabric/2023-05-19/service-2.json.gz,sha256=CjSDan4TX3RONdVS85dtMaU8FPHf8NbtJQQ-e6OEtOc,8601 -botocore/data/appfabric/2023-05-19/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/appflow/2020-08-23/endpoint-rule-set-1.json.gz,sha256=0ie036VraQdTzjkUmq3dSM1Sm5fh5QAILz_4SXiIhng,1149 -botocore/data/appflow/2020-08-23/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/appflow/2020-08-23/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/appflow/2020-08-23/service-2.json.gz,sha256=IApa1aSItcH54xuM9zfCPYCq3AimPSN9pU2Q141Z-Ro,32811 -botocore/data/appintegrations/2020-07-29/endpoint-rule-set-1.json.gz,sha256=j1piNS7W8-G3uQvMgVKl5Q-WqiuyTAMUbD6ztfuLCgU,1153 -botocore/data/appintegrations/2020-07-29/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/appintegrations/2020-07-29/paginators-1.json,sha256=BYTiBgFZxnU-sQgufFZqZnJtClnQxZqlwuhNGC6J1vw,1147 -botocore/data/appintegrations/2020-07-29/service-2.json.gz,sha256=nXX1UtSQwoCOfpylbu1A3LGuf3dwNzy0rcaANA2QeKo,6981 -botocore/data/application-autoscaling/2016-02-06/endpoint-rule-set-1.json.gz,sha256=sUK7asxgll3kjAPV9ZDfdM0Y0fwpp3q6cLncW80MAyo,1244 -botocore/data/application-autoscaling/2016-02-06/examples-1.json,sha256=_IICzVD2rqZHmWHwRCsR313_WXRitdmWhlhDtSzomVE,8473 -botocore/data/application-autoscaling/2016-02-06/paginators-1.json,sha256=Yg5NHu8W50qc_r8JCtkNGMbKd861R4w8wQFdrbV0rR0,751 -botocore/data/application-autoscaling/2016-02-06/service-2.json.gz,sha256=1BUST19_y5aGNqZ2zTD0V3jUAw3qkjLot446S2vD-VA,24469 -botocore/data/application-insights/2018-11-25/endpoint-rule-set-1.json.gz,sha256=zqL0zNqRUXnbkJHdS0ypDyMTs4HQ-UD7edrvfwS1FKM,1158 -botocore/data/application-insights/2018-11-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/application-insights/2018-11-25/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/application-insights/2018-11-25/service-2.json.gz,sha256=9SbIWj79OSGfyIdG1V5tBM3s2Fbk-hLIjPL93qdeDHw,12426 -botocore/data/application-signals/2024-04-15/endpoint-rule-set-1.json.gz,sha256=reG_2_ScN6anU7zaMDs-aYOfAPO8ApjBW2H1C9wWEIM,840 -botocore/data/application-signals/2024-04-15/paginators-1.json,sha256=qxyHVSFC0Jy0F-_lae2jvxP4YeESvlp2me7mBB6INVY,1460 -botocore/data/application-signals/2024-04-15/paginators-1.sdk-extras.json,sha256=q7il5SmbvqgIkSGNSz9N91-QUBoPCGwaVGqozML7COM,952 -botocore/data/application-signals/2024-04-15/service-2.json.gz,sha256=kB0dm6hwvAPezcyy4rnOiL8EroNfvHQWeULbXfKbm1s,24454 -botocore/data/applicationcostprofiler/2020-09-10/endpoint-rule-set-1.json.gz,sha256=CWveMjTx4ZIH3ukBc0Kq-xtCgc20cVRh4NPPY42FaSo,1164 -botocore/data/applicationcostprofiler/2020-09-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/applicationcostprofiler/2020-09-10/paginators-1.json,sha256=2by8SKjvkqf2tkVd1NxlMiNsOoEUr6V3LekGj4k4yWg,205 -botocore/data/applicationcostprofiler/2020-09-10/service-2.json.gz,sha256=O9yxWur0p_wcRf3d6nkQZny4MhceeoGOlngsa2dsUKo,2850 -botocore/data/appmesh/2018-10-01/endpoint-rule-set-1.json.gz,sha256=Q7uCVWBKeg-37_LDbeHqgdildRNcIM-qUT264qqlWWk,1289 -botocore/data/appmesh/2018-10-01/examples-1.json,sha256=IKnIAQr_hsb-b42MXo7jKoBKd1lTzVS0bsbWMSTIwg8,41 -botocore/data/appmesh/2018-10-01/paginators-1.json,sha256=-TPoHMW78DG37BJz5SNi67CsUIs4PTTccyUhlXtMBm4,665 -botocore/data/appmesh/2018-10-01/service-2.json.gz,sha256=TpxsJlyjZWPhGSYJvQetnszgyN-fP19FR7BfD2uEyJ8,7902 -botocore/data/appmesh/2019-01-25/endpoint-rule-set-1.json.gz,sha256=16VzHCa6pezWBUYBZfRgdBL__bNQjZyA27OFifo-rlk,1149 -botocore/data/appmesh/2019-01-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/appmesh/2019-01-25/paginators-1.json,sha256=z6PCEVS0COSk5Nf9KXgXsZ3I9gcq9whv7yonh8s1YMM,1334 -botocore/data/appmesh/2019-01-25/service-2.json.gz,sha256=lPYs1z2c2hB0INCTTfHSy0GQO5moosH7AAd9ANPuZlE,23271 -botocore/data/apprunner/2020-05-15/endpoint-rule-set-1.json.gz,sha256=VxrcAbsqKvrrpWmTEiKH9XnhmSDE8hxDZceoR5lzCqU,1149 -botocore/data/apprunner/2020-05-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/apprunner/2020-05-15/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/apprunner/2020-05-15/service-2.json.gz,sha256=HgZZ7uGkbJmkBIl6trDD_SHrXVnCSTj_AbSK1ss9KkA,19689 -botocore/data/appstream/2016-12-01/endpoint-rule-set-1.json.gz,sha256=55Zm6cFBInyMvH3Bz2EgQFxjALiQxeSASC1uZ_EGJPE,1242 -botocore/data/appstream/2016-12-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/appstream/2016-12-01/paginators-1.json,sha256=agUpesJpo5f8dC0kH0m0asMYWn4N4MUHae5LK8W0Hwo,1584 -botocore/data/appstream/2016-12-01/service-2.json.gz,sha256=s26KT1aQV6cxXVdZGRuwu6F5O3BbFC0_6pxQC98Paic,38678 -botocore/data/appstream/2016-12-01/waiters-2.json,sha256=XZ1LQBLoJ56YEhaTqi2Bs5XKhax6pr9LRsQVIo7kHck,1245 -botocore/data/appsync/2017-07-25/endpoint-rule-set-1.json.gz,sha256=AgrxWnkH5Nhz2hq_I_BCmgD8ndjqf7_H6I1aI6iYk_Q,1149 -botocore/data/appsync/2017-07-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/appsync/2017-07-25/paginators-1.json,sha256=dFnt5T5D3rtFs2xAFiCK0lj-5A8p_ZNysOMx2vA-vFo,2052 -botocore/data/appsync/2017-07-25/service-2.json.gz,sha256=evBhI3DfKmBYoLSRyQEzbSfXQBH1SEFZoa-QkVpAl-o,31441 -botocore/data/arc-region-switch/2022-07-26/endpoint-rule-set-1.json.gz,sha256=Lf2Szj7Y7sBPwLt1xE4q9HOzJAYFlPkuzMmXg-n4QBo,1320 -botocore/data/arc-region-switch/2022-07-26/paginators-1.json,sha256=0d1_BtM6AvPZ5SwAkVXkRgdmQxLicfTo7EQFZHyWCUM,1383 -botocore/data/arc-region-switch/2022-07-26/paginators-1.sdk-extras.json,sha256=oAPYMezaJ5qMLpj-HQ8hn5DR_KWiDk0Yd7upZBcFeQE,725 -botocore/data/arc-region-switch/2022-07-26/service-2.json.gz,sha256=2JxVNK7b70dT9_2hs28EE_niNESlQ-m4-ntKxVEKIMc,12595 -botocore/data/arc-region-switch/2022-07-26/waiters-2.json,sha256=LCN4G74d_dY3Oed3NEBHvbMUvXZLg5Km0ioiJte6nW4,1499 -botocore/data/arc-zonal-shift/2022-10-30/endpoint-rule-set-1.json.gz,sha256=DF6-ijCaoEQ_hTZ_HBKceeMhiPP0OyyoU_6rkvS9Hlg,1305 -botocore/data/arc-zonal-shift/2022-10-30/paginators-1.json,sha256=wx99_DrI6RWKkZuUiP1HQ1xacRiIoUsgPuxVHGpvZGU,515 -botocore/data/arc-zonal-shift/2022-10-30/service-2.json.gz,sha256=iaiHMEF9NfOvdlkhxk9CuYsRebyskhYP7hipgPRIIfo,10676 -botocore/data/arc-zonal-shift/2022-10-30/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/artifact/2018-05-10/endpoint-rule-set-1.json.gz,sha256=FqCD2waCLr8t6CE1K-c8X8SC6qTNWu32h1A-dkcbEoI,1378 -botocore/data/artifact/2018-05-10/paginators-1.json,sha256=iEHcZhMQQtp5NM20fJVZJDialoOTzbsz03OSePilRxc,534 -botocore/data/artifact/2018-05-10/service-2.json.gz,sha256=JdNlvtX0dEcQUcPCpAlrIS3sTYiEiEMvi7JjHx4S610,3431 -botocore/data/artifact/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/athena/2017-05-18/endpoint-rule-set-1.json.gz,sha256=v87HzsMoZaZOtTi5kROtAzMW22LQDpOUXEqIyM7Qha0,1147 -botocore/data/athena/2017-05-18/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/athena/2017-05-18/paginators-1.json,sha256=lLXYrCWDDFVhjAdFEhKyoc0-zEe2YYUM4nR9vXRBDgE,1330 -botocore/data/athena/2017-05-18/service-2.json.gz,sha256=VPlCtF5kQUbnlnoxY-plXLXcSI-IJD3Oi9SmWtY_65w,33502 -botocore/data/auditmanager/2017-07-25/endpoint-rule-set-1.json.gz,sha256=MaYgoP3t3YU7DX1mbjXsBlWksh9g7VwgQjoBiVj8eic,1152 -botocore/data/auditmanager/2017-07-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/auditmanager/2017-07-25/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/auditmanager/2017-07-25/service-2.json.gz,sha256=OQECB_9TrNlulBKP8UNljTHl7x-wR8xLJsif_yxK2Gg,27932 -botocore/data/autoscaling-plans/2018-01-06/endpoint-rule-set-1.json.gz,sha256=7dp4MhJC4yB1JJBeI02t3J_S0BaKoluh3vpTjGZG7e8,1239 -botocore/data/autoscaling-plans/2018-01-06/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/autoscaling-plans/2018-01-06/paginators-1.json,sha256=Au_RY0jJAvQZ-sAmZQk8FXYyrw1rDVD4YILlb6sDxh8,389 -botocore/data/autoscaling-plans/2018-01-06/service-2.json.gz,sha256=6_6zDG0ia3IEBo2iLpZ5e9tppL23-eWyCsS1ei2VawI,9106 -botocore/data/autoscaling/2011-01-01/endpoint-rule-set-1.json.gz,sha256=LKfDis-gqwLPrK8c61L-1cqHmaGwX_lopLrEPGd88Ms,1236 -botocore/data/autoscaling/2011-01-01/examples-1.json,sha256=-VLit9j2MnCph5AkDejxys_Iqt3JaUweEkC1B0_37j4,54289 -botocore/data/autoscaling/2011-01-01/paginators-1.json,sha256=hM_o0QSb61rvEQvua3IVpSLBUVCEy2BcwdQv1D_wSXk,2033 -botocore/data/autoscaling/2011-01-01/paginators-1.sdk-extras.json,sha256=FWBD5vKeS-MHcMzdipl2xKN3ddQu81Dk19sMd_82lKs,177 -botocore/data/autoscaling/2011-01-01/service-2.json.gz,sha256=rN0juEND7wSmEMziFUmp9kgFV5oVam15ITv2tjp4Jxs,63462 -botocore/data/b2bi/2022-06-23/endpoint-rule-set-1.json.gz,sha256=FhVUAJPU_WN2RpxI3ZTDTvsC39WtqA9RJiHm5Yhw_vU,1296 -botocore/data/b2bi/2022-06-23/paginators-1.json,sha256=7ttS6Z0bHTlax4HX4atDWB9qbLUxoE9OTzdYeT62jiE,697 -botocore/data/b2bi/2022-06-23/service-2.json.gz,sha256=MUBlgsjcY8fijr_AjTklqNWOMLo6zNjjUVrXcSNPn_w,19765 -botocore/data/b2bi/2022-06-23/waiters-2.json,sha256=QMq6U9zbKkK3L-Tn-wU5690g011Rd4U2AUe93h__Arg,451 -botocore/data/backup-gateway/2021-01-01/endpoint-rule-set-1.json.gz,sha256=fHl9A64u9t3QFKSPRKYv_P-Lqm_qwlKwb35sF5ZbyKc,1154 -botocore/data/backup-gateway/2021-01-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/backup-gateway/2021-01-01/paginators-1.json,sha256=SBncJ16jo9My_HRd-t9A7KPTxlId0ZP7A9JGuJ8tsiA,531 -botocore/data/backup-gateway/2021-01-01/service-2.json.gz,sha256=8ocBvi-rKTOPzmWisbs4uWXRXuppohMn9JXHVvvFo-c,7373 -botocore/data/backup/2018-11-15/endpoint-rule-set-1.json.gz,sha256=piJpoI5RCKzPbXVC41-EIwqn40Usp_ySSLrTN1RFZu4,1147 -botocore/data/backup/2018-11-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/backup/2018-11-15/paginators-1.json,sha256=Xhdz-lLlNpgVP7ul4OdioPS7yLLUZxH1kENkr3Xblys,3976 -botocore/data/backup/2018-11-15/paginators-1.sdk-extras.json,sha256=5L_0CmNaoKhsX9pvnJgxZ31pDmYLjQF2BoJhpyhUuws,208 -botocore/data/backup/2018-11-15/service-2.json.gz,sha256=Q5V5lkzL8OqvDxO5WmubtsfkCKmsOcF2KRKWZWyMwZw,63845 -botocore/data/backupsearch/2018-05-10/endpoint-rule-set-1.json.gz,sha256=BRS8oGz1zVDFxWp1MIcvXZFCZXEm2P8N2SSYgGbYtJc,837 -botocore/data/backupsearch/2018-05-10/paginators-1.json,sha256=yHP69vTZn8UYtEwENmKyNQAb4VJ4_dvo2jF_qze7r4Y,707 -botocore/data/backupsearch/2018-05-10/service-2.json.gz,sha256=0MTdeBy1k4s9OY8iGiRISaNmBd_ycNa_rThSkt4uhcE,7506 -botocore/data/backupsearch/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/batch/2016-08-10/endpoint-rule-set-1.json.gz,sha256=ff2xQ1uydgJ-xrvO4j20bsoDAA6E6TjEJxaVTr6G90I,1266 -botocore/data/batch/2016-08-10/examples-1.json,sha256=OVGvwREzgw_LYc8FpiMwLMNKVBoPq2uadWkT4icK_aM,20292 -botocore/data/batch/2016-08-10/paginators-1.json,sha256=ZyYVhJ5W3ovu-L9f7gBaRB4t47W8jwftypzcghD1FCs,1622 -botocore/data/batch/2016-08-10/service-2.json.gz,sha256=eF4KhDuUuqs0ThP3xMfc6JP9OVHFNUs07lr9Q9DxdDc,61870 -botocore/data/bcm-dashboards/2025-08-18/endpoint-rule-set-1.json.gz,sha256=P8Fd1WaDCW2WuqjCZ8lEQlApM7N4f3hMjqkpHet7CTM,903 -botocore/data/bcm-dashboards/2025-08-18/paginators-1.json,sha256=psuO6Y_5J65x5ln4VJUFBbkyJ6QpacclUKZ0vCybvOY,191 -botocore/data/bcm-dashboards/2025-08-18/service-2.json.gz,sha256=gChRc6cVwXi-0_8CsjyzHpeA9sgHsupHxin_kUaAdlA,6191 -botocore/data/bcm-dashboards/2025-08-18/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/bcm-data-exports/2023-11-26/endpoint-rule-set-1.json.gz,sha256=_t88gfYrOaTFntSGBvkHvz-UVkq6xC6sbnLhIswFPR0,1286 -botocore/data/bcm-data-exports/2023-11-26/paginators-1.json,sha256=O6FqSUDC5izLwZBKGsqYvMoy2ROOd85-Hb7II57VJoY,509 -botocore/data/bcm-data-exports/2023-11-26/service-2.json.gz,sha256=PQ65hBA6H2H6pYNBgwcHEAMtqDHbX6v0UV_nInTJvfw,5122 -botocore/data/bcm-pricing-calculator/2024-06-19/endpoint-rule-set-1.json.gz,sha256=l1xbxrOOXcegV3UBw598lUT6rMOm5PztKn4n54W_qvc,908 -botocore/data/bcm-pricing-calculator/2024-06-19/paginators-1.json,sha256=2_8le9lK1cG14GXeXUw1W0WMTbfb0tRtdFstFj36z9U,1783 -botocore/data/bcm-pricing-calculator/2024-06-19/service-2.json.gz,sha256=9LE_IHJoCTuE1oVag6W1fR28lqcbXS6B1uhHwwzTz0E,13412 -botocore/data/bcm-pricing-calculator/2024-06-19/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/bcm-recommended-actions/2024-11-14/endpoint-rule-set-1.json.gz,sha256=hwYuMmNu6z4G-IsBv937iw1NIB3RVMwnfOfc05a-sy8,909 -botocore/data/bcm-recommended-actions/2024-11-14/paginators-1.json,sha256=IVWKoMWh9816owS8FS9WRNbQSsxNHHf2zdJs9WlFSZc,207 -botocore/data/bcm-recommended-actions/2024-11-14/service-2.json.gz,sha256=Xnlv2dMvzKMGgbb0Iio3c25C-m21WwrsnsVBgX7fH08,2575 -botocore/data/bcm-recommended-actions/2024-11-14/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/bedrock-agent-runtime/2023-07-26/endpoint-rule-set-1.json.gz,sha256=qnWG_kCfWXik23ba2hCVeKcfKA2alg6E77ZFayTjDPs,1309 -botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.json,sha256=C9qRMLkjskdop4L8GtJ_lTOeTAbdbUZxrB96tbMXE54,1344 -botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.sdk-extras.json,sha256=pTkYapptXTqJqkdaW-BtMU5clcvuIMpBrZsT72LGBsg,163 -botocore/data/bedrock-agent-runtime/2023-07-26/service-2.json.gz,sha256=voqW6a4ZYdb2HJtV6KHNXZeEqHwDCBCbhJlY5l2OhCo,50680 -botocore/data/bedrock-agent-runtime/2023-07-26/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/bedrock-agent/2023-06-05/endpoint-rule-set-1.json.gz,sha256=ci-kJUnzTsYES5bmK2tGzJpbP-GyTGobrIgoMqREkfE,1303 -botocore/data/bedrock-agent/2023-06-05/paginators-1.json,sha256=S_V0LwGpy2m_rxV0kBxgSTQCJCkQMHQImSKu2g2FSBU,2519 -botocore/data/bedrock-agent/2023-06-05/service-2.json.gz,sha256=YUcUmx_fTx4EUzOjV5yOBZNoxCRSOJ3xY7iskgzpuEw,58823 -botocore/data/bedrock-agent/2023-06-05/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/bedrock-agentcore-control/2023-06-05/endpoint-rule-set-1.json.gz,sha256=KeBgQ1MQVoiEsnYEGEkUnGpcsoNoOdxC9lKaBPB2Sp8,1312 -botocore/data/bedrock-agentcore-control/2023-06-05/paginators-1.json,sha256=mSXMmHW6VJPKJXI1hiV_wcd41sCOT7BVvwnR0tvH0Y4,3025 -botocore/data/bedrock-agentcore-control/2023-06-05/service-2.json.gz,sha256=-5muD-GYsZm4mnzB53XvIJeH_pPqZ_5IO0zC4GsBYVE,51693 -botocore/data/bedrock-agentcore-control/2023-06-05/waiters-2.json,sha256=ltaYPqrbtAE5fUL4n-cMigpOpoK6A5YGKpaJ5X4lFlc,3961 -botocore/data/bedrock-agentcore/2024-02-28/endpoint-rule-set-1.json.gz,sha256=Ncp5UJxaovMhof0AzmFthSjURE_-hBhPZjnNp-sj9Rc,1306 -botocore/data/bedrock-agentcore/2024-02-28/paginators-1.json,sha256=wu2SqNrrkziC33euLpNROY2iauMnsx8mRK6s3whmAjY,1053 -botocore/data/bedrock-agentcore/2024-02-28/service-2.json.gz,sha256=RC3gRwanSg6mqh3EULf8UV2Z1cCvlR_eN1KpeYl8rW0,22140 -botocore/data/bedrock-agentcore/2024-02-28/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/bedrock-data-automation-runtime/2024-06-13/endpoint-rule-set-1.json.gz,sha256=aid3K_BYgaH6gnsKViqKkzLova6GP2wFIjBEN8oD-Ok,1317 -botocore/data/bedrock-data-automation-runtime/2024-06-13/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/bedrock-data-automation-runtime/2024-06-13/service-2.json.gz,sha256=PXtNXGHb3D2ZbuQzP0B0rY_qtchpgaDxsYmaOaaWcBw,3509 -botocore/data/bedrock-data-automation/2023-07-26/endpoint-rule-set-1.json.gz,sha256=3NQkygBf6RRPnKDIWPFHP0uKFf0xVmYj1zICl3NZ9rc,1310 -botocore/data/bedrock-data-automation/2023-07-26/paginators-1.json,sha256=ws4Quiv4FeVskwu7oEHreNPkY5Qyvc_6E7p2lzFrzZM,367 -botocore/data/bedrock-data-automation/2023-07-26/service-2.json.gz,sha256=ETIrmrn3w7u8DO34YdiwEHb23J00RrSKgWiv64ClmJk,7262 -botocore/data/bedrock-runtime/2023-09-30/endpoint-rule-set-1.json.gz,sha256=jrrWEzw4MSKXBZjze8hxzcWyeRFtkJVKpUd9A-0SGq0,1305 -botocore/data/bedrock-runtime/2023-09-30/paginators-1.json,sha256=f2V5o6U1eaWP23rP4Qybme3wfj71oUfX79uvhvsR-T0,203 -botocore/data/bedrock-runtime/2023-09-30/service-2.json.gz,sha256=HfWShzSiA4Bo3wbJAmMhOhjJrt55G3jLHxVLtkH04eo,27119 -botocore/data/bedrock-runtime/2023-09-30/waiters-2.json,sha256=tj1ZnaqhwmJkUEQlwH7wm1SqY3lg1BvZDfzfPaIgNrY,38 -botocore/data/bedrock/2023-04-20/endpoint-rule-set-1.json.gz,sha256=a5sJtAZB_8BHWM-LMqH3dKhdkCPIjJPa6nS62UTPzSA,1298 -botocore/data/bedrock/2023-04-20/paginators-1.json,sha256=ghGmg8k5S2wPf-tl9s2WxbYILaciNaLilnPpBh541xE,3387 -botocore/data/bedrock/2023-04-20/service-2.json.gz,sha256=KWK2SE_riVm3_cwS89lixpQO5kF0Fn03_x_-aigoJ0E,76283 -botocore/data/bedrock/2023-04-20/waiters-2.json,sha256=tj1ZnaqhwmJkUEQlwH7wm1SqY3lg1BvZDfzfPaIgNrY,38 -botocore/data/billing/2023-09-07/endpoint-rule-set-1.json.gz,sha256=UiUgXi1FRro8sBG320VIAFKEOFnZ4Ia8dXQrGba2A5c,1538 -botocore/data/billing/2023-09-07/paginators-1.json,sha256=VVwgGWADfSrHSZlpYtjAy0BSdOZkNLuemV3vr2HZP2M,377 -botocore/data/billing/2023-09-07/service-2.json.gz,sha256=KDTk6X3GEpls2YihL-sxEYmUNCUrY6lNgGvDDN9wE2A,5431 -botocore/data/billing/2023-09-07/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/billingconductor/2021-07-30/endpoint-rule-set-1.json.gz,sha256=buqiUvW26DAahrG90zAu6moWv6LtH2NV5UdVsu77Ke8,1311 -botocore/data/billingconductor/2021-07-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/billingconductor/2021-07-30/paginators-1.json,sha256=C1lDM7aIG0KK8L7HotZs6eXvTQLuxzETH2wAHQdDzqI,2192 -botocore/data/billingconductor/2021-07-30/service-2.json.gz,sha256=7P4v81fSx2u85UoF7XGsz7P7T9ZNay2_J124JbSxKuI,16405 -botocore/data/billingconductor/2021-07-30/waiters-2.json,sha256=sAGuGxokCpXh7GUF-AzqqNR6DLDE-wgRMhjNJb41AHc,36 -botocore/data/braket/2019-09-01/endpoint-rule-set-1.json.gz,sha256=0j1hIIXlkqYWeT1CNVRhBJ99ABpD894feYGIbasDDZg,1147 -botocore/data/braket/2019-09-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/braket/2019-09-01/paginators-1.json,sha256=7oQATkyhA3sCLqboUPW6D7XGmWs8prCwyfxvjbvFu8c,691 -botocore/data/braket/2019-09-01/service-2.json.gz,sha256=jMTggtB_sPdnkCEpvkvJUrKN7RDck1roikcZk8-qIas,11429 -botocore/data/budgets/2016-10-20/endpoint-rule-set-1.json.gz,sha256=QydQqslmguUxDZ1w8DNWysrZtYVLgtPDB9FXUqTWg4M,1791 -botocore/data/budgets/2016-10-20/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/budgets/2016-10-20/paginators-1.json,sha256=4lIRhlnV70H90OPO79aAX2cps42vBAkZcxWDSS40zis,1512 -botocore/data/budgets/2016-10-20/service-2.json.gz,sha256=p2oVxZGB77VCeDHUEzi4OsNQaexMtL_qjbd7x7-JKWc,14621 -botocore/data/ce/2017-10-25/endpoint-rule-set-1.json.gz,sha256=5jJQZyuiAwn3n_pKKfb_DoNZYCykO9DAhOAAHchbChA,1937 -botocore/data/ce/2017-10-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ce/2017-10-25/paginators-1.json,sha256=Ky9BBLdDrEphFKJFV-wK5EN9nalaUevMp1k_V3f_1Xw,2558 -botocore/data/ce/2017-10-25/paginators-1.sdk-extras.json,sha256=PtomDOLRX3ei-nf22GLErTLGutc5yPaMxM3vHtgo0m0,462 -botocore/data/ce/2017-10-25/service-2.json.gz,sha256=yajhnnp8UGnibx77NOHi9YOMoJK4phnN3GsINpz-vxw,43836 -botocore/data/chatbot/2017-10-11/endpoint-rule-set-1.json.gz,sha256=Z6WP6SfKdMVluMft7r11zThaHu2YG6fFabG8Ya9CE-k,1295 -botocore/data/chatbot/2017-10-11/paginators-1.json,sha256=dyTPHZL8UEdw0bi3HPFXTYPfk9gRppvCy5ZRh2Vmysw,1723 -botocore/data/chatbot/2017-10-11/service-2.json.gz,sha256=UkE6kEYNo5CqVea8S1VnOY9Dh7PbItMM6X7TjrYlhhk,10732 -botocore/data/chime-sdk-identity/2021-04-20/endpoint-rule-set-1.json.gz,sha256=h4hL7Ybq9Asnhs6l8UNuXORB-JtebMOiKHvoA6cWCJM,1154 -botocore/data/chime-sdk-identity/2021-04-20/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/chime-sdk-identity/2021-04-20/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/chime-sdk-identity/2021-04-20/service-2.json.gz,sha256=GKfSo4bv2gnJ-bHFwFx_eed4UgMdVJNcdaAMyWHe7-g,8137 -botocore/data/chime-sdk-media-pipelines/2021-07-15/endpoint-rule-set-1.json.gz,sha256=UvOJX1oEkhKN0a9I2hhiI7oAZzS2ntmFlqlvt9uFKqs,1158 -botocore/data/chime-sdk-media-pipelines/2021-07-15/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/chime-sdk-media-pipelines/2021-07-15/service-2.json.gz,sha256=mLjng4DG4Rf9O6zcTAzcTwD_gfp7kmHJHjdnVjzWN1A,18799 -botocore/data/chime-sdk-meetings/2021-07-15/endpoint-rule-set-1.json.gz,sha256=zbcXcCldLls5or_t8gFNFLrtPkiaB6FjHI7OIvIwyAs,1154 -botocore/data/chime-sdk-meetings/2021-07-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/chime-sdk-meetings/2021-07-15/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/chime-sdk-meetings/2021-07-15/service-2.json.gz,sha256=PyExAQbLjIgXe-flhgPI3GP-wbIZtIpGGXVTK6rnWb0,11608 -botocore/data/chime-sdk-messaging/2021-05-15/endpoint-rule-set-1.json.gz,sha256=bMNnVWE3A_nK2UMNIK8lJPgeyCs_M-CKEUlXriTjzro,1154 -botocore/data/chime-sdk-messaging/2021-05-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/chime-sdk-messaging/2021-05-15/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/chime-sdk-messaging/2021-05-15/service-2.json.gz,sha256=5c0iYp6jH6FDJ0jM3JxZ6CZVlkp41QOPfEKPeKaePNc,16422 -botocore/data/chime-sdk-voice/2022-08-03/endpoint-rule-set-1.json.gz,sha256=SuEy_t62WwaapbWE_7NB5Q1CO-VU_AIMjsLuX3WN8e8,1301 -botocore/data/chime-sdk-voice/2022-08-03/paginators-1.json,sha256=28096cSFWwRSuJQMmk9A3HNyMAH8wFdjz3F_5pukB8Q,373 -botocore/data/chime-sdk-voice/2022-08-03/service-2.json.gz,sha256=9BG3t3SW5k0WXTlTntzB7Exo0eG-C3qMS2pGCksn2CQ,23793 -botocore/data/chime/2018-05-01/endpoint-rule-set-1.json.gz,sha256=I6eE78wKbyAkIYYMXbfSZgbRZcFH5KGcDQfDLUlt0pk,1303 -botocore/data/chime/2018-05-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/chime/2018-05-01/paginators-1.json,sha256=eU07vcRnjLd-9RmN_aGGPffN0ZXkpMRFYD_XbcyDy3A,343 -botocore/data/chime/2018-05-01/service-2.json.gz,sha256=RQ8fXf7JqGwIDh1QCNFWie1NSPu_4aITr6VMYgzzv6Q,15342 -botocore/data/cleanrooms/2022-02-17/endpoint-rule-set-1.json.gz,sha256=ehx0sttDXj9x4Re4UJBSjbqe4szVukKL4wAJCMuqo3w,1300 -botocore/data/cleanrooms/2022-02-17/paginators-1.json,sha256=iPjb26ef_F3feCu4c4D3ES2inGWlFEp3pEqOkg9sp3E,3964 -botocore/data/cleanrooms/2022-02-17/service-2.json.gz,sha256=nofm60e5rhas64SrE3ddMH8pUxhrRkd-NpkZfLKEdyo,46104 -botocore/data/cleanrooms/2022-02-17/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/cleanroomsml/2023-09-06/endpoint-rule-set-1.json.gz,sha256=GzlkhmpZ5leuAjNfTBPWVRSvSLh34c0PUPfMXyIeRiI,1302 -botocore/data/cleanroomsml/2023-09-06/paginators-1.json,sha256=-tYvoBkilgzoays9JIwkGktzi_n9C3PGlVvsTol8n7o,3170 -botocore/data/cleanroomsml/2023-09-06/service-2.json.gz,sha256=jsYCqzFMKkL1T1HJzgtMn02cI7pooxbSDqk-NU5hRF0,30400 -botocore/data/cleanroomsml/2023-09-06/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/cloud9/2017-09-23/endpoint-rule-set-1.json.gz,sha256=cn9aynzufoLaFyZBB5E3JWf7BIhP7ueIUrznmExdtzE,1149 -botocore/data/cloud9/2017-09-23/examples-1.json,sha256=Jbbei88MR8S4MFnfmPKNTEk_b1NdqqM5R6P781A23JY,9183 -botocore/data/cloud9/2017-09-23/paginators-1.json,sha256=lET7E3FWErLA8In260otKfr3_9oVSr5OTO1zcrBi28w,380 -botocore/data/cloud9/2017-09-23/service-2.json.gz,sha256=Xyjv5hF05OvDvOi2VfgDuXnic1aYWDUpl2Pr5m3vQZE,6083 -botocore/data/cloudcontrol/2021-09-30/endpoint-rule-set-1.json.gz,sha256=2wN5mt1Bm-W9mJ7DqWClNDjDovxXAXKKbpH4ocuJsAs,1154 -botocore/data/cloudcontrol/2021-09-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudcontrol/2021-09-30/paginators-1.json,sha256=Xh6wJghPx6VpGNTTEdpRQIsrJuVeyY5FQNpNLpUdkhc,392 -botocore/data/cloudcontrol/2021-09-30/paginators-1.sdk-extras.json,sha256=9NbQ8xHg5ztdpvYFDl15_74F30ZNPFnSFDxismgvSMg,143 -botocore/data/cloudcontrol/2021-09-30/service-2.json.gz,sha256=5X4CZQs4G_69lpr-xHtSTFYVTRVVVP9EvG-rdpYtbE8,6492 -botocore/data/cloudcontrol/2021-09-30/waiters-2.json,sha256=US_tyuvbMcXS6IrVB8D817Gg3pGKdCuooDJKz4Ta56U,738 -botocore/data/clouddirectory/2016-05-10/endpoint-rule-set-1.json.gz,sha256=iFaNe1e5o8CX9urrl_4jHZJx29yYvisg_1WS-xRFTew,1398 -botocore/data/clouddirectory/2016-05-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/clouddirectory/2016-05-10/paginators-1.json,sha256=y8GPuHURJmdagJ3QAI5mxkAzKvdCZwcnfYt3Z-qwgAU,2808 -botocore/data/clouddirectory/2016-05-10/service-2.json.gz,sha256=wo0c956PDETBGUSWr4S554Ii0viOSBJvAtAixQmueN0,22958 -botocore/data/clouddirectory/2017-01-11/endpoint-rule-set-1.json.gz,sha256=jkTMKHcM04J0o6m_11CAevtu9t18ocL1laFHBntFRkY,1239 -botocore/data/clouddirectory/2017-01-11/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/clouddirectory/2017-01-11/paginators-1.json,sha256=gIqmeqo-8lsyEDEVDFvc1RJfd0T7c9xN6SdMnxGvSpw,3342 -botocore/data/clouddirectory/2017-01-11/service-2.json.gz,sha256=h54nQJ5jCSWJebdsgcnPXCI8cuVNQlGFwi4WXjIGoGw,23910 -botocore/data/cloudformation/2010-05-15/endpoint-rule-set-1.json.gz,sha256=0zYdP4yGmsF5ATNqSSJSySkAAjv5ZKErVIrLWZxKq-M,1237 -botocore/data/cloudformation/2010-05-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudformation/2010-05-15/paginators-1.json,sha256=wGVn2mkDjwzpNruHooXuaGh29r875VHHC6yO_KAK8ys,3878 -botocore/data/cloudformation/2010-05-15/service-2.json.gz,sha256=znmYpdC-bkgqMmFj0Wwehtgq7xWJlOZGn6yClGhOKKs,85186 -botocore/data/cloudformation/2010-05-15/waiters-2.json,sha256=0fp18QpYZmIii-mio63vhFFVyREj4UBFHZbi-F2o4cE,11014 -botocore/data/cloudfront-keyvaluestore/2022-07-26/endpoint-rule-set-1.json.gz,sha256=_AdtO7GfcnyvvgoorWyywpC68pPXZaq7k6gxSmPx0U0,2209 -botocore/data/cloudfront-keyvaluestore/2022-07-26/paginators-1.json,sha256=2wyrpgvniacM8xlFDnHQiCR0KVEAVJxBEpWFBcrB4Z0,180 -botocore/data/cloudfront-keyvaluestore/2022-07-26/service-2.json.gz,sha256=g81KK2cpQUP4aLobdPiDfWl7067crpw8fgd9Qbhls5Q,2220 -botocore/data/cloudfront/2014-05-31/endpoint-rule-set-1.json.gz,sha256=o03t5or8kC8HDE9EXrmzKz393HXBy7rG3aZm7-7AP_g,1839 -botocore/data/cloudfront/2014-05-31/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2014-05-31/service-2.json.gz,sha256=4t-VJYYqEEUvKCoksa6OiBpgOW5mScVtQoaTHg7gw40,15298 -botocore/data/cloudfront/2014-05-31/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2014-10-21/endpoint-rule-set-1.json.gz,sha256=o03t5or8kC8HDE9EXrmzKz393HXBy7rG3aZm7-7AP_g,1839 -botocore/data/cloudfront/2014-10-21/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2014-10-21/service-2.json.gz,sha256=1T3Sje0jTajwqbanul6gGyB_dCODfA7YecdIeMh8bbA,15887 -botocore/data/cloudfront/2014-10-21/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2014-11-06/endpoint-rule-set-1.json.gz,sha256=o03t5or8kC8HDE9EXrmzKz393HXBy7rG3aZm7-7AP_g,1839 -botocore/data/cloudfront/2014-11-06/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2014-11-06/service-2.json.gz,sha256=KllpxSvPJxf36aufGRn9--mrpIhUD-TksiUN1xqqFSg,15959 -botocore/data/cloudfront/2014-11-06/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2015-04-17/endpoint-rule-set-1.json.gz,sha256=o03t5or8kC8HDE9EXrmzKz393HXBy7rG3aZm7-7AP_g,1839 -botocore/data/cloudfront/2015-04-17/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2015-04-17/service-2.json.gz,sha256=X5nrhQKPWIqQcKqfYz7q-Uhhu4_L89_6t2KrjPFxU2g,16213 -botocore/data/cloudfront/2015-04-17/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2015-07-27/endpoint-rule-set-1.json.gz,sha256=o03t5or8kC8HDE9EXrmzKz393HXBy7rG3aZm7-7AP_g,1839 -botocore/data/cloudfront/2015-07-27/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2015-07-27/service-2.json.gz,sha256=3JBXFMorjeRMyGlRHN5z5uhCXjfuNV9mmRVG-6aWhlo,16702 -botocore/data/cloudfront/2015-07-27/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2015-09-17/endpoint-rule-set-1.json.gz,sha256=o03t5or8kC8HDE9EXrmzKz393HXBy7rG3aZm7-7AP_g,1839 -botocore/data/cloudfront/2015-09-17/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2015-09-17/service-2.json.gz,sha256=WHmCIqa_yyqdKzb-pqEcxxRDuQvVI1kg_6AuuEaK5B4,15890 -botocore/data/cloudfront/2015-09-17/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2016-01-13/endpoint-rule-set-1.json.gz,sha256=o03t5or8kC8HDE9EXrmzKz393HXBy7rG3aZm7-7AP_g,1839 -botocore/data/cloudfront/2016-01-13/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2016-01-13/service-2.json.gz,sha256=GQq3NPDK6pZ0dTeMDXFO6eAMlqU1KaSXlP-Lqy_4xGY,16358 -botocore/data/cloudfront/2016-01-13/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2016-01-28/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2016-01-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2016-01-28/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2016-01-28/service-2.json.gz,sha256=dMSViNEluvgsu6NhXuvj8EKJH_tweOEN2RZym57sxng,16279 -botocore/data/cloudfront/2016-01-28/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2016-08-01/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2016-08-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2016-08-01/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2016-08-01/service-2.json.gz,sha256=wzJK5ZnEGQtYK3dtffcIEqENTusljD0z3ZdLbhp3kzo,17725 -botocore/data/cloudfront/2016-08-01/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2016-08-20/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2016-08-20/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2016-08-20/service-2.json.gz,sha256=5TI0wQk21IAMotAMwotb03NWbtjKXQGPubULkFQQb38,18123 -botocore/data/cloudfront/2016-08-20/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2016-09-07/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2016-09-07/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2016-09-07/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2016-09-07/service-2.json.gz,sha256=IOztJ-DDDpbG8N02RMhEtTQLPdYivfUn1_yLNSzE0QA,18444 -botocore/data/cloudfront/2016-09-07/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2016-09-29/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2016-09-29/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2016-09-29/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2016-09-29/service-2.json.gz,sha256=dFOPWGBlZykUFC5QoYOOWwy2YzixjI517GdKHfq4jVA,27522 -botocore/data/cloudfront/2016-09-29/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2016-11-25/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2016-11-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2016-11-25/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2016-11-25/service-2.json.gz,sha256=MS951uqn8x9eQ4CBpmkyCZTgzsU66ODDCaAGNsGep5g,27955 -botocore/data/cloudfront/2016-11-25/waiters-2.json,sha256=jzREqDxfIg2KbmPYOmDoYgDvy8mWAEK0w_NmEoCqhHI,1184 -botocore/data/cloudfront/2017-03-25/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2017-03-25/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2017-03-25/service-2.json.gz,sha256=6iC5RzVrj-3kUcuL_2tVw3dUmkrvF5hHSQiTuU0QrV4,29088 -botocore/data/cloudfront/2017-03-25/waiters-2.json,sha256=JboqzXjlni8p-wiVKBz1jRj-mFpkryqueCgI1hD7WPA,1184 -botocore/data/cloudfront/2017-10-30/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2017-10-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2017-10-30/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2017-10-30/service-2.json.gz,sha256=RM8XzLTHnKX6NOhivVpARhskZ7zleQuV51dfV12_AiQ,34767 -botocore/data/cloudfront/2017-10-30/waiters-2.json,sha256=JboqzXjlni8p-wiVKBz1jRj-mFpkryqueCgI1hD7WPA,1184 -botocore/data/cloudfront/2018-06-18/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2018-06-18/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2018-06-18/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2018-06-18/service-2.json.gz,sha256=jZPWfLXBEh-UF9yGbLL_zIfv2deFEL38RsyEdZHHjck,35482 -botocore/data/cloudfront/2018-06-18/waiters-2.json,sha256=JboqzXjlni8p-wiVKBz1jRj-mFpkryqueCgI1hD7WPA,1184 -botocore/data/cloudfront/2018-11-05/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2018-11-05/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2018-11-05/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2018-11-05/service-2.json.gz,sha256=05XqAegDBGg_xSLZbF0FmHECfhHLva1g2xkbJnqi-mY,36144 -botocore/data/cloudfront/2018-11-05/waiters-2.json,sha256=JboqzXjlni8p-wiVKBz1jRj-mFpkryqueCgI1hD7WPA,1184 -botocore/data/cloudfront/2019-03-26/endpoint-rule-set-1.json.gz,sha256=7A0wBLjhtULh4FpoylVQgkM1KvD0YWef6WNk1E-ddr0,1574 -botocore/data/cloudfront/2019-03-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2019-03-26/paginators-1.json,sha256=I7u4h1MFflBvFJemcrLHSn7uOrEeDFc7ecWGqwDxGF8,1126 -botocore/data/cloudfront/2019-03-26/service-2.json.gz,sha256=iG0tK_4K8H4U6FDsLU5E0UEwAJzQy669ATkMg5PEBzs,37652 -botocore/data/cloudfront/2019-03-26/waiters-2.json,sha256=qt7oBhQ-B52-397Q88q0EJoFpDWuOZM7CZpaFhX1xgM,1184 -botocore/data/cloudfront/2020-05-31/endpoint-rule-set-1.json.gz,sha256=gu927pMl8XD5I3GZWUmDaJvYZx4c0rxC5qlALXYSR74,1657 -botocore/data/cloudfront/2020-05-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudfront/2020-05-31/paginators-1.json,sha256=ASaoIzYBXx4dR4prrVpgqLWxB-zUZvsQjdqkdGbsstY,3642 -botocore/data/cloudfront/2020-05-31/service-2.json.gz,sha256=0PPG67_lole5AK6knXFuLUs51loFSodDXr_7UiSxw-0,89991 -botocore/data/cloudfront/2020-05-31/waiters-2.json,sha256=EBQKgBvTcueQ7pUpl3XfCyr-KY39mO_EedGvn21OpZg,1568 -botocore/data/cloudhsm/2014-05-30/endpoint-rule-set-1.json.gz,sha256=pJS1hOJa4xMfDgYsMaw3vIFPv1tJeaYs3MPIawq5AWk,1150 -botocore/data/cloudhsm/2014-05-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudhsm/2014-05-30/paginators-1.json,sha256=pe-X06JkfqlENEk-25nE_w_q3QQXkdMnQ5cOG1NPi6E,409 -botocore/data/cloudhsm/2014-05-30/service-2.json.gz,sha256=x4vAon8Agnbt5qiqVP-AmSgqU6zYyuMIsuGdZNZzOI0,5581 -botocore/data/cloudhsmv2/2017-04-28/endpoint-rule-set-1.json.gz,sha256=bZyC-KPrypHSlhwG5k1kdJyysL26gurD5hvv3F7yCGA,1242 -botocore/data/cloudhsmv2/2017-04-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudhsmv2/2017-04-28/paginators-1.json,sha256=VvCnjrdoGz3Lb-gi5YSOAhhAHzB50i0vIks0GaA2nS0,512 -botocore/data/cloudhsmv2/2017-04-28/service-2.json.gz,sha256=_FLSzcDzHcetFib5xfAB18BBdGr5L1zA8C2b0Utapdk,8056 -botocore/data/cloudsearch/2011-02-01/endpoint-rule-set-1.json.gz,sha256=Rhs5UV_Z8sIEX1HNar3O-8Q66NBnhjVYDWKOpzbEhCM,1149 -botocore/data/cloudsearch/2011-02-01/service-2.json.gz,sha256=k9uhqvcw8TmO6mZvhRpnU0jmcC1aNDdhv8frSCKIsMw,9599 -botocore/data/cloudsearch/2013-01-01/endpoint-rule-set-1.json.gz,sha256=Yv8LrD4lgjWPYP_xwVdRNejNLY0bnvrGWsszQmJzN9Y,1151 -botocore/data/cloudsearch/2013-01-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudsearch/2013-01-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/cloudsearch/2013-01-01/service-2.json.gz,sha256=RDoDNTU9hxV1YAEECwEx913or6Er9DwNx3DO1orCXGI,12107 -botocore/data/cloudsearchdomain/2013-01-01/endpoint-rule-set-1.json.gz,sha256=Lcs2Sn8GlcC9642kbosoHSN4TzFonOLOR7WpDAu3Jto,1155 -botocore/data/cloudsearchdomain/2013-01-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudsearchdomain/2013-01-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/cloudsearchdomain/2013-01-01/service-2.json.gz,sha256=GzybIV2_M3G2srxBLATr0pg7hrsb-MM7irCN4YneFAY,9123 -botocore/data/cloudtrail-data/2021-08-11/endpoint-rule-set-1.json.gz,sha256=4ijdA4uQep1LW8IAzt1GD7Q1v_pjRQvjZneXcslb6B8,1304 -botocore/data/cloudtrail-data/2021-08-11/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/cloudtrail-data/2021-08-11/service-2.json.gz,sha256=Ul58n35k2RkiuIlaCIc5rfrtZ_BnLxtdGPNd-DYpXdo,2165 -botocore/data/cloudtrail/2013-11-01/endpoint-rule-set-1.json.gz,sha256=9mCLEkh-4oN4eIw0_RHvdb5VOI9BolQV5AcfY1s-vbo,1232 -botocore/data/cloudtrail/2013-11-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudtrail/2013-11-01/paginators-1.json,sha256=o6vvum0HuZOUnwcmSutAt1JcJBwzSpHooZx75aRRQc0,1070 -botocore/data/cloudtrail/2013-11-01/service-2.json.gz,sha256=O17Dh1fVhQ-5r6XNOJ8BgQoflHY0yaahtE8EiGICR1c,45143 -botocore/data/cloudwatch/2010-08-01/endpoint-rule-set-1.json.gz,sha256=VVmWjI0Rnu_aKyuoWilWNZdhwN5oT9NPiA_zOWfms_Q,1415 -botocore/data/cloudwatch/2010-08-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cloudwatch/2010-08-01/paginators-1.json,sha256=OfAocfP12RM8pfP6Fh2EUikcL00nN2vRMCW3O4wsjHo,1122 -botocore/data/cloudwatch/2010-08-01/service-2.json.gz,sha256=rwrubQHGiEwGF-UqbPVYu48e1_QVp-MLNLBkwUBoJsw,41274 -botocore/data/cloudwatch/2010-08-01/waiters-2.json,sha256=MloXSzqs1ZkzyWAP2NrkVyNkIE63Hbk24II7PCuUxl0,644 -botocore/data/codeartifact/2018-09-22/endpoint-rule-set-1.json.gz,sha256=onS45l4gcMvcs4ARfdCdMIR2Tm1UltByz_EYIVWm9y8,1151 -botocore/data/codeartifact/2018-09-22/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codeartifact/2018-09-22/paginators-1.json,sha256=I3MlPdEGK-hCFxJnNpPrpWkJSBaM9dhFiZ4uo0AoR8o,1747 -botocore/data/codeartifact/2018-09-22/paginators-1.sdk-extras.json,sha256=kNVDIOe3C5yL0xTWSrW2xDchpno4Xozz60DY53uxNEA,444 -botocore/data/codeartifact/2018-09-22/service-2.json.gz,sha256=KvclCcFNDSnUOjiHNpVbl9kqQNXXCPNC9JN3YexXrGw,23321 -botocore/data/codebuild/2016-10-06/endpoint-rule-set-1.json.gz,sha256=eD3sllOUXimoNcee2s2rJSayeD1WAmeUxL8Z1QXZLuc,1149 -botocore/data/codebuild/2016-10-06/examples-1.json,sha256=_-tVq2XM1YDuzv78VwIj_WjyXHu-yrIPyxzTtTbdFJ8,9778 -botocore/data/codebuild/2016-10-06/paginators-1.json,sha256=jNb-seZLb_i52B9YQgWA8t6xpsFoHQEiAkYGnScwEz4,2448 -botocore/data/codebuild/2016-10-06/service-2.json.gz,sha256=0DtU0kVbr--IIGIWPbIH0wtzrMzAQn8NCguN2IxXeT0,48723 -botocore/data/codecatalyst/2022-09-28/endpoint-rule-set-1.json.gz,sha256=ZY4hlj6gNPXfqgz-Zc88BJ89oTuXeNN5fWRfUdVh_tg,849 -botocore/data/codecatalyst/2022-09-28/paginators-1.json,sha256=TuEQ6NVw_F_LgmG-TurtBCvFTRFRP8DWHseWdO8DNRk,1637 -botocore/data/codecatalyst/2022-09-28/service-2.json.gz,sha256=_l2Q9O-FNJN5IDiILI_IEN6TB3YIv-jNTg-KKYb62RA,13989 -botocore/data/codecatalyst/2022-09-28/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/codecommit/2015-04-13/endpoint-rule-set-1.json.gz,sha256=wajft7oTCC1ku05oJv3PryuyQpt1TtFSvUSHRqa-UBA,1151 -botocore/data/codecommit/2015-04-13/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codecommit/2015-04-13/paginators-1.json,sha256=2w92BpzUce0gSVEaZH0la2r8ZT_MDtxoLc6RG-dpln4,1206 -botocore/data/codecommit/2015-04-13/service-2.json.gz,sha256=q4So5CgcOPK07EEmWcAYYAwWZ68RqYjIQlPmIcg1g7M,40935 -botocore/data/codeconnections/2023-12-01/endpoint-rule-set-1.json.gz,sha256=yTojUmJswBThF63qFGRBEoMgr1z-8S_H4nUywUDMHYk,1302 -botocore/data/codeconnections/2023-12-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/codeconnections/2023-12-01/service-2.json.gz,sha256=bgNiOuF0C0aIDGbFxYM0qbRcCv_L_9SAQiFADFRmsQc,10017 -botocore/data/codedeploy/2014-10-06/endpoint-rule-set-1.json.gz,sha256=5T7sq8g-e_8NvYsXY1AgQL-9IMLbFaVYNwrHdlKXkSg,1150 -botocore/data/codedeploy/2014-10-06/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codedeploy/2014-10-06/paginators-1.json,sha256=riyMuhePXvzjx3lAoHiIaOi0U6v2lCVd65qX4UWPoxo,1313 -botocore/data/codedeploy/2014-10-06/service-2.json.gz,sha256=rTN6DbqPPpQAx8BF5RA8ZaP9IblKn17oTbYtxAGKbsI,31886 -botocore/data/codedeploy/2014-10-06/waiters-2.json,sha256=OARBxBeZTRUui1WztkVtUn7Q2lAh3-Bemczgk455MGQ,662 -botocore/data/codeguru-reviewer/2019-09-19/endpoint-rule-set-1.json.gz,sha256=80oU7POWen713fTLnpLB6tfhh8XWnjVWNcAxyQjduzg,1157 -botocore/data/codeguru-reviewer/2019-09-19/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codeguru-reviewer/2019-09-19/paginators-1.json,sha256=0bkbq9IDAtNTQOShBQuJVNtb8xgFFUYNdzOcl3ri_DM,223 -botocore/data/codeguru-reviewer/2019-09-19/service-2.json.gz,sha256=DlzvVDBTz6HXlwbzF3EoIQNUnSo9qXHY55c0KCZIdTI,11785 -botocore/data/codeguru-reviewer/2019-09-19/waiters-2.json,sha256=0jf0N7KHQV4qYAOPKBKNdiExhxEvojmGQ2Jzrc9lYR4,1733 -botocore/data/codeguru-security/2018-05-10/endpoint-rule-set-1.json.gz,sha256=iYPYBZxzwji1mQ_YWDBm180qmijgvE8eoO4MSxdKZ64,1306 -botocore/data/codeguru-security/2018-05-10/paginators-1.json,sha256=nwCp854x7Q4pjInZgk9mpYoj9BiFf09ekRTXObmU4GQ,522 -botocore/data/codeguru-security/2018-05-10/service-2.json.gz,sha256=ItdutsoDWJMiycCZ06C5jD5MTKa1gNEptYL7FVafjeU,7989 -botocore/data/codeguru-security/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/codeguruprofiler/2019-07-18/endpoint-rule-set-1.json.gz,sha256=OLx8W7ASJ65qIt85OP113XuwRqzT8K_L9o73rRs9p6g,1157 -botocore/data/codeguruprofiler/2019-07-18/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codeguruprofiler/2019-07-18/paginators-1.json,sha256=d7DXbQ-GmZLDQRjjpAO-vzvm7OEA-pNKfPUyA9rgaag,195 -botocore/data/codeguruprofiler/2019-07-18/service-2.json.gz,sha256=beDXPwzwv76Y4K3xVq3v646m8lbdd7-leFPnQRhFHu4,14592 -botocore/data/codepipeline/2015-07-09/endpoint-rule-set-1.json.gz,sha256=Mj5bGXJArCSXkfyS_hFiTyDtVm2WADVr0xA8WIyp4Ws,1151 -botocore/data/codepipeline/2015-07-09/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codepipeline/2015-07-09/paginators-1.json,sha256=qvL1MMRjFuod_E5JuAfBuSUwPrMBCxO6zsfl4mEDYH8,1386 -botocore/data/codepipeline/2015-07-09/service-2.json.gz,sha256=7dybNrz65m2rX47EysQMQfCxRIoK-8m2Iz75C3DDvKo,36738 -botocore/data/codestar-connections/2019-12-01/endpoint-rule-set-1.json.gz,sha256=Ok9n_fJzQlv3pPtjsjr16WHroABvZ-idpgYkFJChjzY,1154 -botocore/data/codestar-connections/2019-12-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codestar-connections/2019-12-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/codestar-connections/2019-12-01/service-2.json.gz,sha256=bMShJnqxiUcdrg89CH17SyiywXR5zJwqL9Exdwgoi0E,9897 -botocore/data/codestar-notifications/2019-10-15/endpoint-rule-set-1.json.gz,sha256=fVXtcmdxhE2tk2VmiKIkRSYGZhGSyEOvJa_2oDWwtGk,1157 -botocore/data/codestar-notifications/2019-10-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/codestar-notifications/2019-10-15/paginators-1.json,sha256=bD6rBB54kEd5ns5mM8KWWE2Gfs6rNkRWTLyvKHai9OA,531 -botocore/data/codestar-notifications/2019-10-15/service-2.json.gz,sha256=bkRrmlIP29qxu5JmW_O3CZBGvEn2RasYnqrIHcV7MtU,5570 -botocore/data/cognito-identity/2014-06-30/endpoint-rule-set-1.json.gz,sha256=kW8NBSx4DPtkeULV9FcwUyvS2aAdxbF80UL-CAIe7gc,1348 -botocore/data/cognito-identity/2014-06-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cognito-identity/2014-06-30/paginators-1.json,sha256=iRnVNYNjXj4riBW6sjwmAF2p9fSX2MkfoM5W_Y9_tkE,197 -botocore/data/cognito-identity/2014-06-30/service-2.json.gz,sha256=V-i0bXO8dVRxYuzXzJ9uMuuYXsQZB8FzC4srK7_jNRU,10267 -botocore/data/cognito-idp/2016-04-18/endpoint-rule-set-1.json.gz,sha256=p1yAP2n0HdJVPpxt8fH41MZkXxCbmeIWOZieyMQwDAI,1344 -botocore/data/cognito-idp/2016-04-18/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cognito-idp/2016-04-18/paginators-1.json,sha256=RdlZ6K9kobwkGd7v3X15eeU_1MTpNNWwHD35RwB4keA,1527 -botocore/data/cognito-idp/2016-04-18/service-2.json.gz,sha256=c4Zco5YtNKpzMT74f7J_0MBE6ftdupNMR-HDyvoQasA,114861 -botocore/data/cognito-sync/2014-06-30/endpoint-rule-set-1.json.gz,sha256=quUBz5ANIw1ZTH5bX0VflMwho1ufRZ4NBIy0m66XQnY,1153 -botocore/data/cognito-sync/2014-06-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/cognito-sync/2014-06-30/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/cognito-sync/2014-06-30/service-2.json.gz,sha256=LLBeMxDnViv7vtF-dnykYwZa2_rDxR6J4yO6XCGTSGQ,7342 -botocore/data/comprehend/2017-11-27/endpoint-rule-set-1.json.gz,sha256=eU_XylRL6g1OgDj5OmQXpdhYETpJ7_Wmu9NLbW76WDs,1150 -botocore/data/comprehend/2017-11-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/comprehend/2017-11-27/paginators-1.json,sha256=aCozRajzUb4wblnxzb_bTJlztnDFC3PnwItAMek2WtY,2033 -botocore/data/comprehend/2017-11-27/service-2.json.gz,sha256=n3ZY2QY9mmTQmYlYQU86ePrz8FPcb3UbDgGff2zyhFQ,43058 -botocore/data/comprehendmedical/2018-10-30/endpoint-rule-set-1.json.gz,sha256=hqqt2SGP95qdXN7hkW-Jc6U8qyQP4pzM1hm5T2gleJw,1155 -botocore/data/comprehendmedical/2018-10-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/comprehendmedical/2018-10-30/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/comprehendmedical/2018-10-30/service-2.json.gz,sha256=FJ0YOkvLylyIkyq3tn4-xEsGIwZiSYZCp-k9L1T7W2A,10293 -botocore/data/compute-optimizer-automation/2025-09-22/endpoint-rule-set-1.json.gz,sha256=hY0H1buk35cr5Ekwp1u9O4EKhAIyTkvHw8ENnH6I2KU,1302 -botocore/data/compute-optimizer-automation/2025-09-22/paginators-1.json,sha256=25BBuPwHP0Glc_fbh2x7tYdAxRQLlh5OqzXISl_Du1g,1680 -botocore/data/compute-optimizer-automation/2025-09-22/service-2.json.gz,sha256=gH1H6HUkyYWGZww36tIH0ExvTDaKzOOX4nRjaT8doFw,11777 -botocore/data/compute-optimizer-automation/2025-09-22/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/compute-optimizer/2019-11-01/endpoint-rule-set-1.json.gz,sha256=RgtaSy1-9CLVJfq19qDRodh-Vp2JWbmPd4_u1AvIP0s,1156 -botocore/data/compute-optimizer/2019-11-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/compute-optimizer/2019-11-01/paginators-1.json,sha256=FBFfvnKfuzo8mWExsEqu3Gy9-nKsBwTxjoRuqT-_oU0,1022 -botocore/data/compute-optimizer/2019-11-01/service-2.json.gz,sha256=_A2b6YXa4fM6rmXbiUSdhZcPHEir1wTmmGR0rEgJJ7c,40948 -botocore/data/config/2014-11-12/endpoint-rule-set-1.json.gz,sha256=W6n3707twyoeP8oIaDLV8R4NVetZyduKoRXuI3N2QvE,1231 -botocore/data/config/2014-11-12/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/config/2014-11-12/paginators-1.json,sha256=PrX21DZ-zHv2NKxXpMSY70B5v-TZmr7GeydAsMA7vw8,6209 -botocore/data/config/2014-11-12/service-2.json.gz,sha256=1kTdhwri0SP6vf_LKddrGTGGjf1h-lJvO2QI7cz_VQo,66638 -botocore/data/connect-contact-lens/2020-08-21/endpoint-rule-set-1.json.gz,sha256=jSymrSMnXeyj-5-55tzatkRa_scOm81uywO0yB0DjIo,1152 -botocore/data/connect-contact-lens/2020-08-21/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/connect-contact-lens/2020-08-21/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/connect-contact-lens/2020-08-21/service-2.json.gz,sha256=pHb6gV82G5I6sQ8e3C_BHEmt6ienfSDL89l7t4ppnP8,3162 -botocore/data/connect/2017-08-08/endpoint-rule-set-1.json.gz,sha256=yFrpB_dtJ6_bQ2Q4ZLCRjLm1ybc2By62ari5q_oGNAQ,1230 -botocore/data/connect/2017-08-08/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/connect/2017-08-08/paginators-1.json,sha256=WlDtEkow4KbvqUydSuSxlBWsNyYMJqkrM7j0pw4KqeA,15974 -botocore/data/connect/2017-08-08/service-2.json.gz,sha256=7tNQAgl0EMfhXsrDSr9RpYfHAiHOsKvGn5qPpOmvaoE,183687 -botocore/data/connectcampaigns/2021-01-30/endpoint-rule-set-1.json.gz,sha256=4JvsEChjSkV90asqhlS5Ps8ergy1bSaOlG7ffmwl2fs,1156 -botocore/data/connectcampaigns/2021-01-30/paginators-1.json,sha256=0u4LcBZFpshvXnakuryTCgfVdLeSI-dpWmlZds4eVWs,199 -botocore/data/connectcampaigns/2021-01-30/service-2.json.gz,sha256=c1KL1PHGU9X5POl_XNZ1t7Iq6HGA5LX77WYx0qviAaQ,5276 -botocore/data/connectcampaignsv2/2024-04-23/endpoint-rule-set-1.json.gz,sha256=KoCcXsFtEOU_UXfSDzChfZ4AX8KEVlxxdkPsbNGsyis,1306 -botocore/data/connectcampaignsv2/2024-04-23/paginators-1.json,sha256=LMkaF55JCeS3JzOF7c4eW3ZqvVCDHNHTmJCpGQ8wpi0,394 -botocore/data/connectcampaignsv2/2024-04-23/service-2.json.gz,sha256=ts0g4lJqYrrW2Rbgvwd6jXTNuA2WkjWxWOYG29iERv8,9361 -botocore/data/connectcases/2022-10-03/endpoint-rule-set-1.json.gz,sha256=1rkpQpKiyBDnP6dhPCELAeTWrZLR7m0KF4OiJYS3hYU,1293 -botocore/data/connectcases/2022-10-03/paginators-1.json,sha256=CBQn85TwOMroUn8rqB689UtbJkuBCAX371eSdVbZRAc,694 -botocore/data/connectcases/2022-10-03/service-2.json.gz,sha256=CI0lq71N3JD2KI7TPmo9YveQdDUiNwPDL3UH-FhheWE,18219 -botocore/data/connectcases/2022-10-03/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/connectparticipant/2018-09-07/endpoint-rule-set-1.json.gz,sha256=QzlzWsKTr1cHUT_WMn-ZD4fdm_LJU_AJD0L1LpmvL3s,1238 -botocore/data/connectparticipant/2018-09-07/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/connectparticipant/2018-09-07/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/connectparticipant/2018-09-07/service-2.json.gz,sha256=S_T2AGbtEoIEBUO-H-32m_0lOGYTDmAxuXS-yWcEro0,8850 -botocore/data/controlcatalog/2018-05-10/endpoint-rule-set-1.json.gz,sha256=OxZZc1Rvypz8maA6XSTOrF-fw3jrBYQJToYv8HYp3bo,1302 -botocore/data/controlcatalog/2018-05-10/paginators-1.json,sha256=4NXGNZRHFcTeivBOzG6lJUAPu3RKCzcLAhvgtDKjG4g,863 -botocore/data/controlcatalog/2018-05-10/service-2.json.gz,sha256=SW1mjKcV7-C6ap-Hs6Ao-7uRnm6cKSy76Pp340uvZpA,7454 -botocore/data/controlcatalog/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/controltower/2018-05-10/endpoint-rule-set-1.json.gz,sha256=nLuiw_ZLvRLUs68MEe3JSI9J7WM5cZcvx9bUHJLAN9o,1151 -botocore/data/controltower/2018-05-10/paginators-1.json,sha256=fnUyaumVMU4LxD6VRfutlQ549Lr8SBKeWce0mqgA0uM,1081 -botocore/data/controltower/2018-05-10/service-2.json.gz,sha256=5Szkqaxlyexx4ejjRrULCOJ8i-zltx9vkxUSeRdvytw,13991 -botocore/data/controltower/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/cost-optimization-hub/2022-07-26/endpoint-rule-set-1.json.gz,sha256=qwoD2hy2lvVz_rGe3ToJxrhhqvG4xqZCrtkGOObV6W8,1309 -botocore/data/cost-optimization-hub/2022-07-26/paginators-1.json,sha256=nc9cjDY9Czbo6fxvbnoL0-JpxcIuCbeBgscEFYlPAuE,721 -botocore/data/cost-optimization-hub/2022-07-26/paginators-1.sdk-extras.json,sha256=O4h78RRWkbXvEDne3NyxM_npO-yI2KU8QL1jnJnfiwE,382 -botocore/data/cost-optimization-hub/2022-07-26/service-2.json.gz,sha256=xVWuX9tVs9ZxgzkkndmmboHQwW1jJfCSksQlUio26fA,10918 -botocore/data/cost-optimization-hub/2022-07-26/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/cur/2017-01-06/endpoint-rule-set-1.json.gz,sha256=-OgCsjW-3OgyjMnaXoI0gLfvNprWxTFlAjgPPRLgUxI,1145 -botocore/data/cur/2017-01-06/examples-1.json,sha256=NyOJJuDWe_rnuUTIp9cdvnw0GfJCK2aaDMW8Qkyf2Mg,2874 -botocore/data/cur/2017-01-06/paginators-1.json,sha256=svrnnDA-WDB_TSjNDhx_3bXmieM10GBn4TRFNlZNPHg,209 -botocore/data/cur/2017-01-06/service-2.json.gz,sha256=HkCb_MyTh5HYp_HVBAbDo9CHYsQcLUiQXBl62C9vRUU,3835 -botocore/data/customer-profiles/2020-08-15/endpoint-rule-set-1.json.gz,sha256=v6ka3bV9EfsFS8kbxUXj602zbrqSmM_Vg9VfLhm_KYk,1148 -botocore/data/customer-profiles/2020-08-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/customer-profiles/2020-08-15/paginators-1.json,sha256=qXQvRpsrVX3Oz-LKJCsZqw5tpHDKwz3UJdjJ5DP8SDo,1877 -botocore/data/customer-profiles/2020-08-15/paginators-1.sdk-extras.json,sha256=1f1Dqw3cc1YvdqBJdmPMcEzzDehsm62v9MCIxPzxXlU,240 -botocore/data/customer-profiles/2020-08-15/service-2.json.gz,sha256=6QZhYC3bfmAXVfv9kWNpWyDAGYC9GyaLF2YJrCIzRRM,53660 -botocore/data/databrew/2017-07-25/endpoint-rule-set-1.json.gz,sha256=E8Zs2BjFWUZ64MvD5eVmu6jLXsatR4LDQHqaDFIDL_Q,1210 -botocore/data/databrew/2017-07-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/databrew/2017-07-25/paginators-1.json,sha256=i_5ZTxjwAyOvq_e_Etz8L97TB_O2FHjLsAkKFbGJf8U,1316 -botocore/data/databrew/2017-07-25/service-2.json.gz,sha256=Yet89peDO1YvDV-Zcrq0B9ANHti1p42encNoZ0NtiPI,20293 -botocore/data/dataexchange/2017-07-25/endpoint-rule-set-1.json.gz,sha256=J5MnrtZEnMMVZ_n-cKwpvRzf6n5f_-r9wn0Eph2cuFo,1152 -botocore/data/dataexchange/2017-07-25/paginators-1.json,sha256=bmlL4xVyx7gej5sWbNSYqvUOmgM9e3H9YZXsJC4Qh-I,1204 -botocore/data/dataexchange/2017-07-25/service-2.json.gz,sha256=uA8iyST_vlSNIpxYX-cqA_X9_f_UJeONt7XFn8q6YJw,17276 -botocore/data/dataexchange/2017-07-25/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/datapipeline/2012-10-29/endpoint-rule-set-1.json.gz,sha256=OyM3zjZBdOb4JO5kUU-cawK1GhmBUyrsdEjt-w-JH7k,1151 -botocore/data/datapipeline/2012-10-29/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/datapipeline/2012-10-29/paginators-1.json,sha256=JdrA68aI3fnPWh2_ecOxC5DtcFz4OkiO8GvsBkzOgUw,554 -botocore/data/datapipeline/2012-10-29/service-2.json.gz,sha256=b9eNrSyCjKlGHyAlkDVrqziT6ZspWdktxnc4dL0OMa4,9609 -botocore/data/datasync/2018-11-09/endpoint-rule-set-1.json.gz,sha256=BNKxPzRgsl8J5lt4kOgWER3UXPh8GpfCQ3IjmNyURrQ,1150 -botocore/data/datasync/2018-11-09/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/datasync/2018-11-09/paginators-1.json,sha256=OTxZXjoijv0y3dCqSgmbXidkpsOxdvViwEvduFFVp7g,842 -botocore/data/datasync/2018-11-09/service-2.json.gz,sha256=GIzH1dwfCnGfxmLlwKVPZ8zRuguxUlrNzc5tfZqc4BA,40386 -botocore/data/datazone/2018-05-10/endpoint-rule-set-1.json.gz,sha256=fUMysf6qtmsJSiqCSEapp1qHTgNtFMjivg7DPkvdqAY,1126 -botocore/data/datazone/2018-05-10/paginators-1.json,sha256=ExdBh1hfdpz6Bz3zem4hJVUJVWNQcw5qzameugQw2XU,6177 -botocore/data/datazone/2018-05-10/paginators-1.sdk-extras.json,sha256=QFlwjsMb1_P--nZveHlXUkKQWkav_M60iEpGYEXQ5Sk,392 -botocore/data/datazone/2018-05-10/service-2.json.gz,sha256=taHAhIwk1hJcr_U0XylstqXEnXnqZGOeoa7qbIQwM2A,84819 -botocore/data/dax/2017-04-19/endpoint-rule-set-1.json.gz,sha256=RDZOGcuKirAlJNWulHM6IhTD2R7P7U79spcQgKbRHFs,1145 -botocore/data/dax/2017-04-19/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/dax/2017-04-19/paginators-1.json,sha256=OOhBXs1nXQbwQO2dybisWoE6M5Z7WrPyQUCAyGgfEiA,1175 -botocore/data/dax/2017-04-19/service-2.json.gz,sha256=Cab1n2sJbT54EPCr8pVqFhE_cI5vTfl3h_wxyyqRSvM,10264 -botocore/data/deadline/2023-10-12/endpoint-rule-set-1.json.gz,sha256=qJpyNNOuYWUhZ67aV1qlLy3vtUkOmo2IYEVmvYlcN_8,1295 -botocore/data/deadline/2023-10-12/paginators-1.json,sha256=ehIpiR7Z0B8dWu4DvwXv7jImxr8VeYxDPwAxbn7j2D0,4939 -botocore/data/deadline/2023-10-12/paginators-1.sdk-extras.json,sha256=hFPg_wsYLg1pXtVqU63jGPKL7Fc9mW9vLLloldy_b04,218 -botocore/data/deadline/2023-10-12/service-2.json.gz,sha256=o6z1BRHh4i8Ye6clsolkNw-Hzyivxdze0D-r6IayVGw,46876 -botocore/data/deadline/2023-10-12/waiters-2.json,sha256=wkHGWjUdYS6_6x4_XhRzQBCkbZjkedtHL_YQmWlP9Gg,4422 -botocore/data/detective/2018-10-26/endpoint-rule-set-1.json.gz,sha256=j9sov5nqhZNEiJ3KeCwDMXxpYZG016-pnPcS93Ov-K4,1496 -botocore/data/detective/2018-10-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/detective/2018-10-26/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/detective/2018-10-26/service-2.json.gz,sha256=G_o6ZaduVmtcQfRh7l8PUnMnETWsGFYb24S2KHAr4qE,13190 -botocore/data/devicefarm/2015-06-23/endpoint-rule-set-1.json.gz,sha256=jNTNOTv3MmPgSNaB-Vg5uSOyXNWGUp_iAoTx4p-KSDk,1150 -botocore/data/devicefarm/2015-06-23/examples-1.json,sha256=ph2IehoxWkjr60w1Itx_H2XRMVKQ9J1WHbDDdS2-i6Q,42721 -botocore/data/devicefarm/2015-06-23/paginators-1.json,sha256=dsBpWrsUYvlphjtWSswDS3BYoWFzpq3sqwpOK4ER5vA,2870 -botocore/data/devicefarm/2015-06-23/service-2.json.gz,sha256=Ma1IZii8v05-9H_4Xb96Des350KC_skl4vPQwqMjSOY,31336 -botocore/data/devops-guru/2020-12-01/endpoint-rule-set-1.json.gz,sha256=0pwjFV-6eBErOENeNlIeBU1DfmqqT5ZRDmkCc2_wSSQ,1153 -botocore/data/devops-guru/2020-12-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/devops-guru/2020-12-01/paginators-1.json,sha256=L8a_Vi9F4QUZiw34P5LPuf6ELhTE3_rKfSJTiF-Jsrw,3043 -botocore/data/devops-guru/2020-12-01/service-2.json.gz,sha256=FEzGyOuQenKTeYo1FHWty4fu9Nxe5sNEyUOHmjF4a9s,25068 -botocore/data/directconnect/2012-10-25/endpoint-rule-set-1.json.gz,sha256=-Y4fu64Xpw4tHIqWFY9xd5p2YefrpVvDmzF70kYUYUY,1153 -botocore/data/directconnect/2012-10-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/directconnect/2012-10-25/paginators-1.json,sha256=xeMiI713ZrL0L4eTYXOT8iXsmsiguus1SZdRE7OWYCo,643 -botocore/data/directconnect/2012-10-25/service-2.json.gz,sha256=7jHBWPqoasIGh8S8_c1waIqIbl5mFFClBJOt2t6K0cc,22029 -botocore/data/discovery/2015-11-01/endpoint-rule-set-1.json.gz,sha256=S6mCuc6H0TvqjbvmyOC3lcysJsZfqmQgO2ioglYeO9w,1150 -botocore/data/discovery/2015-11-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/discovery/2015-11-01/paginators-1.json,sha256=9TAcWsEEH768Rt1ArlrAzFDXYkp82xhdZ5Kh5LVrkmw,1221 -botocore/data/discovery/2015-11-01/service-2.json.gz,sha256=ZLtHBbQ4ZLnq2NVFN_NeWLBBxZSUsxSLtz74LSq_QzA,19020 -botocore/data/dlm/2018-01-12/endpoint-rule-set-1.json.gz,sha256=d-yrYgBpSaQDOrfDwtg2gkp1EyApl1jVvIoC52xX3qA,1230 -botocore/data/dlm/2018-01-12/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/dlm/2018-01-12/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/dlm/2018-01-12/service-2.json.gz,sha256=-0BAv02k8Lqm3gkBqui6v_3MoDkIYsWf_Y1UpIG7oSA,12223 -botocore/data/dms/2016-01-01/endpoint-rule-set-1.json.gz,sha256=LBMPq7SrynOhAznnMpbV9zuokgEfgYO6wSzTdKZ78zE,1300 -botocore/data/dms/2016-01-01/examples-1.json,sha256=vV_0L6caRIbPqk4IOCZVqNc0xcbN77GsWwY3KaK0SA0,35747 -botocore/data/dms/2016-01-01/paginators-1.json,sha256=8X1HzmVYDUKuiYfiTM_NSKghY4YY9CshhiEc8DWd0Gc,2864 -botocore/data/dms/2016-01-01/service-2.json.gz,sha256=hNgAYJhnCuf8OP56yiiNvmk612rbtLovNprO-693hKM,86839 -botocore/data/dms/2016-01-01/waiters-2.json,sha256=q_cVn5QLry8e5ZZquSwUs7tJo5LQnnQfswzEpsF45F8,11781 -botocore/data/docdb-elastic/2022-11-28/endpoint-rule-set-1.json.gz,sha256=PSErpHCmtoTiSs4k4MNKTHKUmR_CCpX0yYyhRTJAZ2g,1302 -botocore/data/docdb-elastic/2022-11-28/paginators-1.json,sha256=G7BrLkcnoebH5opU6J0VCq3MfoxNBz1fR1-o5H0zFno,562 -botocore/data/docdb-elastic/2022-11-28/service-2.json.gz,sha256=cHESOZ6saRxi5BHV2dXe-MoR7jg5qRuDKj_QhR-HegI,7623 -botocore/data/docdb/2014-10-31/endpoint-rule-set-1.json.gz,sha256=QuoGtRqWIqiXweuqnNhSgo96SJIvQRpVulwOpfgTwxs,1230 -botocore/data/docdb/2014-10-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/docdb/2014-10-31/paginators-1.json,sha256=Lc8FwQvudtu-XOnFfOh-qM6pOrsnlRajew2PKY6ZtZk,2318 -botocore/data/docdb/2014-10-31/service-2.json.gz,sha256=oX7LlFUAUkhx6SvxwH-ZQ04O-o9wK8unMBGzjBxGz6k,35568 -botocore/data/docdb/2014-10-31/service-2.sdk-extras.json,sha256=U_PgxwtPhWl8ZwLlxYiXD4ZQ4iy605x4miYT38nMvnM,561 -botocore/data/docdb/2014-10-31/waiters-2.json,sha256=8bYoMOMz2Tb0aGdtlPhvlMel075q1n7BRnCpQ-Bcc1c,2398 -botocore/data/drs/2020-02-26/endpoint-rule-set-1.json.gz,sha256=grznwGXkNyhg0Rr7Z7j7Vsa1Nzi2ProuohztoKYid_I,1145 -botocore/data/drs/2020-02-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/drs/2020-02-26/paginators-1.json,sha256=j1Nq2iBDgHjtNTzLW5JGDB5BfwGLcqOX3kewE_mNNIM,1909 -botocore/data/drs/2020-02-26/service-2.json.gz,sha256=NZh7YC_uhsIuxBE9s3fg68bQxbgLhrX5bFmz8amhMjg,21053 -botocore/data/ds-data/2023-05-31/endpoint-rule-set-1.json.gz,sha256=bYGI15ZMVa-K94UIDC3xVm37FtNEuyTKVUiE1z4uGT0,1295 -botocore/data/ds-data/2023-05-31/paginators-1.json,sha256=584legW_1CS0O-xK6_nd_ykAX1uQpbsHJyqh7qMGBGs,989 -botocore/data/ds-data/2023-05-31/paginators-1.sdk-extras.json,sha256=hMmJO2oml7dz_QZQiHrgSlhB_pocyqPxnhOMS3EU6Zg,959 -botocore/data/ds-data/2023-05-31/service-2.json.gz,sha256=M8r6r3gbg0gJSQdlCNZWwu07ns8OFnP-yg-DCW0dsVc,7833 -botocore/data/ds/2015-04-16/endpoint-rule-set-1.json.gz,sha256=sBGgBoiR0SaqY3eK9IX8PqlwbayODqpor9EKUa0UCQQ,1144 -botocore/data/ds/2015-04-16/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ds/2015-04-16/paginators-1.json,sha256=yxY5Cm7p39vcMgdVs5pLlw3VkElaF6EQcTG7L5lY2cU,2555 -botocore/data/ds/2015-04-16/service-2.json.gz,sha256=pmZ4e8nriD3WfdQQiC0juttARlM-tshtaoS8cEmOzlM,33084 -botocore/data/ds/2015-04-16/waiters-2.json,sha256=0Wmt13KRi2wlleWgvUUHMnHbKBgJ0dKVvzJPbV1_qDQ,566 -botocore/data/dsql/2018-05-10/endpoint-rule-set-1.json.gz,sha256=ht63Ff5iQ9ESt1-TrLN7M_zXdO6MDBzVPcMSgZxioVg,831 -botocore/data/dsql/2018-05-10/paginators-1.json,sha256=TqVyPmU4ENrjA8pbfdQW-NgaOzWxliEb90BZ7NxjZ5I,187 -botocore/data/dsql/2018-05-10/service-2.json.gz,sha256=qfMxNhSXGP-KJhpP-Iw1fwq-Dmv0nMIqpMtDGZOaE1o,6436 -botocore/data/dsql/2018-05-10/waiters-2.json,sha256=lav4JgEiv9RzUWsPjBqpmv2bkfKIaKv92ErLhVzJk8M,665 -botocore/data/dynamodb/2011-12-05/endpoint-rule-set-1.json.gz,sha256=GvuoHY7cYBl-ueT8FvLORaWqQyKLav3zSi7xvqcKpiI,1343 -botocore/data/dynamodb/2011-12-05/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/dynamodb/2012-08-10/endpoint-rule-set-1.json.gz,sha256=_MGW5fRt8COrJSjMUgBOTVRPsdBTaHSph-ULHAO2UKo,3447 -botocore/data/dynamodb/2012-08-10/examples-1.json,sha256=cZ5PBzQtSA9b1ZN39RffvUM54Tqf_h5-AQA7zSBVK4Q,16947 -botocore/data/dynamodb/2012-08-10/paginators-1.json,sha256=U84oi-heJVXxjHM1enODt6qI5J117zh0YoM4BHwZZ18,1103 -botocore/data/dynamodb/2012-08-10/service-2.json.gz,sha256=iURDe-rxijQuYpsKb_AGMuTZc2wUSJkZg-mXMBS8KUI,83205 -botocore/data/dynamodb/2012-08-10/waiters-2.json,sha256=G_iaXR3xZP3M8lpMR1olm2p-EvK6InTidNZnUUqPL70,727 -botocore/data/dynamodbstreams/2012-08-10/endpoint-rule-set-1.json.gz,sha256=cRPLIPjhlaiWQVUeKiiugBwD5EHpY9vR5Kg6Fw9acfM,1602 -botocore/data/dynamodbstreams/2012-08-10/examples-1.json,sha256=LF2m4pmyTs0G8NR6AhmybL0E2F9WHfnbxz5q31DtjAg,7693 -botocore/data/dynamodbstreams/2012-08-10/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/dynamodbstreams/2012-08-10/service-2.json.gz,sha256=leCwq62n0tFPWdBDQRTp8m3_HbqG6aNeH3GfrF4H9d8,7008 -botocore/data/ebs/2019-11-02/endpoint-rule-set-1.json.gz,sha256=JgrcFSthlkXisasmuEivn2-R5E_llWoT2V1D-zNpZVg,1145 -botocore/data/ebs/2019-11-02/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ebs/2019-11-02/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/ebs/2019-11-02/service-2.json.gz,sha256=1sc4uE68eUWxFG4Ex0NUm9TUEpeLxD4V-ru4OljNgT8,6402 -botocore/data/ec2-instance-connect/2018-04-02/endpoint-rule-set-1.json.gz,sha256=Wf81iAYkXZUKIOu7dQlhixq9xQx6QdfFwOWH5jBcTI0,1161 -botocore/data/ec2-instance-connect/2018-04-02/examples-1.json,sha256=Qnm4-ldcu-2O38JTe_w17UJWdblMaRBfIc8HyJ62DYU,1712 -botocore/data/ec2-instance-connect/2018-04-02/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/ec2-instance-connect/2018-04-02/service-2.json.gz,sha256=2sbkwrMqEpM4DZ--vH-yC0loGCtYmEIiFusdmyOuy2w,2303 -botocore/data/ec2/2014-09-01/endpoint-rule-set-1.json.gz,sha256=glO6GiQb_rO_nE4SlXoi7PTHmnT2YjgQLO9fH88Mx6Q,1237 -botocore/data/ec2/2014-09-01/paginators-1.json,sha256=XpA8TZvmBGGraKlRGE-U-YeLIBN1ZvbcyE8Wh8uuIDM,1271 -botocore/data/ec2/2014-09-01/service-2.json.gz,sha256=n7g1z1PAFJLDQ5vafBXoaIwYtWSyhUZ486nav2uisR4,71841 -botocore/data/ec2/2014-09-01/waiters-2.json,sha256=HG1xDu-8ICfvY1n_YV9i0ylufepFUYmDd0dLkQxwKuY,8548 -botocore/data/ec2/2014-10-01/endpoint-rule-set-1.json.gz,sha256=glO6GiQb_rO_nE4SlXoi7PTHmnT2YjgQLO9fH88Mx6Q,1237 -botocore/data/ec2/2014-10-01/paginators-1.json,sha256=Uns0O6V6ZIXI09iZdCY77w-CBHbes_siW5vFU-bpE1w,1439 -botocore/data/ec2/2014-10-01/service-2.json.gz,sha256=7gKJQoOTzRxMbbNnnJD7A1lDbX3hDJ01jOkpsFRpR3E,75362 -botocore/data/ec2/2014-10-01/waiters-2.json,sha256=UDhKYGIrItEq2e56vKMh6yLdn_YfsfTYsmankCjsR3k,11040 -botocore/data/ec2/2015-03-01/endpoint-rule-set-1.json.gz,sha256=glO6GiQb_rO_nE4SlXoi7PTHmnT2YjgQLO9fH88Mx6Q,1237 -botocore/data/ec2/2015-03-01/paginators-1.json,sha256=Uns0O6V6ZIXI09iZdCY77w-CBHbes_siW5vFU-bpE1w,1439 -botocore/data/ec2/2015-03-01/service-2.json.gz,sha256=QFrCJVISjtvc0JSxPDYbxpP1zW6e-2eSc84MzbEL_lA,77885 -botocore/data/ec2/2015-03-01/waiters-2.json,sha256=UDhKYGIrItEq2e56vKMh6yLdn_YfsfTYsmankCjsR3k,11040 -botocore/data/ec2/2015-04-15/endpoint-rule-set-1.json.gz,sha256=glO6GiQb_rO_nE4SlXoi7PTHmnT2YjgQLO9fH88Mx6Q,1237 -botocore/data/ec2/2015-04-15/paginators-1.json,sha256=Uns0O6V6ZIXI09iZdCY77w-CBHbes_siW5vFU-bpE1w,1439 -botocore/data/ec2/2015-04-15/service-2.json.gz,sha256=We_aetspUrUH4UgbcjautxmM6WIq_IaMXUio-HvlSxg,90171 -botocore/data/ec2/2015-04-15/waiters-2.json,sha256=1iUHJTDrTvb5_HbDMbVVzC4Ex1S97GZl-tnP70MaDEY,11546 -botocore/data/ec2/2015-10-01/endpoint-rule-set-1.json.gz,sha256=FV2-lrGmMUvFAPk47n1Q1HXjfQqEgu0dokKUWxLkTD4,1391 -botocore/data/ec2/2015-10-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ec2/2015-10-01/paginators-1.json,sha256=Vom5HeCc0UgDyEyYKw3piztolJ3IIxz_tIhEX61TvM8,1793 -botocore/data/ec2/2015-10-01/service-2.json.gz,sha256=buGcWTup_BiB9qX2dT58SZ0ODUOO7C92DzaUr702bYg,107913 -botocore/data/ec2/2015-10-01/waiters-2.json,sha256=8sXo9xWtm1IZMKcm9Ne42ha-9XDTVP_fZUejgA1tw3E,14823 -botocore/data/ec2/2016-04-01/endpoint-rule-set-1.json.gz,sha256=FV2-lrGmMUvFAPk47n1Q1HXjfQqEgu0dokKUWxLkTD4,1391 -botocore/data/ec2/2016-04-01/examples-1.json,sha256=0xdUoNVzXNn5ZMmA_aiPwiQC68adrXjBJPhw3AzQC8M,109914 -botocore/data/ec2/2016-04-01/paginators-1.json,sha256=Vom5HeCc0UgDyEyYKw3piztolJ3IIxz_tIhEX61TvM8,1793 -botocore/data/ec2/2016-04-01/service-2.json.gz,sha256=qmmyGKjggVaA5Omf_wevkLh3_wcBljsoABv6FF0BLU4,112481 -botocore/data/ec2/2016-04-01/waiters-2.json,sha256=ZjSjdDS-pisO_MoRjsulXMshrcU5qNJd4m1bOBQ9mKQ,15259 -botocore/data/ec2/2016-09-15/endpoint-rule-set-1.json.gz,sha256=FV2-lrGmMUvFAPk47n1Q1HXjfQqEgu0dokKUWxLkTD4,1391 -botocore/data/ec2/2016-09-15/examples-1.json,sha256=Dv18Ql8faOeBMQlenC7HBzlgrNQXNeokvLsyFf6Q_yY,110174 -botocore/data/ec2/2016-09-15/paginators-1.json,sha256=Vom5HeCc0UgDyEyYKw3piztolJ3IIxz_tIhEX61TvM8,1793 -botocore/data/ec2/2016-09-15/service-2.json.gz,sha256=olZl6G0_IUcGBsnTgCfz7kpk0i-kkebpq6ErNQzzd3A,114400 -botocore/data/ec2/2016-09-15/waiters-2.json,sha256=1ZtptOEInU4p-4ZQFXbC5lxZ8XNsseki72qxLO2dX4M,14875 -botocore/data/ec2/2016-11-15/endpoint-rule-set-1.json.gz,sha256=MLvt9egMssclFFZUiz8t5eUFfQjzqwISWrttG_14YOE,1231 -botocore/data/ec2/2016-11-15/examples-1.json,sha256=gB8-MuMSl9N4ic1oBYCv02B_YplxOdnKsfS7g5pY7hk,147949 -botocore/data/ec2/2016-11-15/paginators-1.json,sha256=U7CXec4GP15ASq731ieA5tYrUc6A5gamOJ8mC5HqMN8,31259 -botocore/data/ec2/2016-11-15/paginators-1.sdk-extras.json,sha256=s-xAN9v51q2N4UE-PQ_I-wK9PDbrSnwQlKx0yA_rmSk,249 -botocore/data/ec2/2016-11-15/service-2.json.gz,sha256=XKq5V8NquKMM6wcI2xqKzRUf6qNJMulyA0aHS6TEkl4,480767 -botocore/data/ec2/2016-11-15/waiters-2.json,sha256=vtdUBpnIDWN6HFhRORCKX2AhcB4607o4_3EGE7JarSQ,20590 -botocore/data/ecr-public/2020-10-30/endpoint-rule-set-1.json.gz,sha256=FdM2eRo2pdU2-VfACffYEnQBaSRnbVEYONIPduQMch0,1245 -botocore/data/ecr-public/2020-10-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ecr-public/2020-10-30/paginators-1.json,sha256=EEmON1DSCdAARd-o_S_RiZ6rXcWO8AZbYlx4UMyZEGE,711 -botocore/data/ecr-public/2020-10-30/service-2.json.gz,sha256=9BN4Q7ChM_xnMgskWCgoILKvTaFW-YhozCVsz0B6eJc,10719 -botocore/data/ecr/2015-09-21/endpoint-rule-set-1.json.gz,sha256=5Hnat1efG_OUbCrTRkCCcTdhvlVP6hUsn-qvaZa-Fq8,2387 -botocore/data/ecr/2015-09-21/examples-1.json,sha256=cFx-qAY3SfNXEHCMe7I9RTWxV-Jtlo8moRHGDZ5UCAM,6603 -botocore/data/ecr/2015-09-21/paginators-1.json,sha256=jTAyTM5a36H94lthRLaf_MOoFKa0_9YW0wqzaMoyiNM,1736 -botocore/data/ecr/2015-09-21/service-2.json.gz,sha256=J268gUBF4YFRD1YnTJ9JtmqPdYNpfQcIE2_wigEew9c,29968 -botocore/data/ecr/2015-09-21/waiters-2.json,sha256=j4QQUhn_PYN87gWoaY1j1RR-lv7KjzPItwwn1WMYkB8,1482 -botocore/data/ecs/2014-11-13/endpoint-rule-set-1.json.gz,sha256=OLN3sFl2OGypYLPkBxewJbJvmXHeL_8AQVlTz_HH-PU,1145 -botocore/data/ecs/2014-11-13/examples-1.json,sha256=Qp-rrnSHaDiVv4ESeJkTGfC1-guCjRc9B9LfiwjrMjg,36519 -botocore/data/ecs/2014-11-13/paginators-1.json,sha256=Y_nqEkKUMY3UhZ5D6DJ2QqxBHfnLkqM6FsOxPp5JUVE,1565 -botocore/data/ecs/2014-11-13/service-2.json.gz,sha256=7oaPPecN54oWrBZi2ZAwLfCrWBQuxpJFxCv2hjWRfQo,126535 -botocore/data/ecs/2014-11-13/waiters-2.json,sha256=F4d_a7_xVQIib5MpmSitTQBxupfL0Z9NqxOibIA6Igs,2246 -botocore/data/efs/2015-02-01/endpoint-rule-set-1.json.gz,sha256=g_TtilHVH2kVNiZf5sQ24gFag1TF9P126STt4SuDbDM,1571 -botocore/data/efs/2015-02-01/examples-1.json,sha256=0EFBCHNGLNS0ftGQqjngkhfTFYpw6E-7lnuAh-d6YKU,8825 -botocore/data/efs/2015-02-01/paginators-1.json,sha256=SKRuOWm1E5Nvvzppzjn-IeS1Lj0I3qSqvc9t9XtKpA4,878 -botocore/data/efs/2015-02-01/service-2.json.gz,sha256=m3KgP_3uuqSmewB48BDQ22XlWmjOCdQrIJd4nF_HMOs,23873 -botocore/data/eks-auth/2023-11-26/endpoint-rule-set-1.json.gz,sha256=njfS_Q5Hs7ko4CMsEXhpOOLseMtMhXf2SFs4WjkaCtw,1126 -botocore/data/eks-auth/2023-11-26/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/eks-auth/2023-11-26/service-2.json.gz,sha256=fIb6vzFNcwM9pVcLGZhz1wx5VaAd6e6XNM0K-vbwaO8,2343 -botocore/data/eks-auth/2023-11-26/waiters-2.json,sha256=tj1ZnaqhwmJkUEQlwH7wm1SqY3lg1BvZDfzfPaIgNrY,38 -botocore/data/eks/2017-11-01/endpoint-rule-set-1.json.gz,sha256=of7JOvqqmL2bRckbtmLJk647xaOtQCQ-pxnwgSI5MVc,1265 -botocore/data/eks/2017-11-01/examples-1.json,sha256=vCT3MFB7D3tNzqaIdxd8nyDbt7hevsAvDE4RQTQcEKg,5021 -botocore/data/eks/2017-11-01/paginators-1.json,sha256=-dO7ymV_bnWAiWB8cAB4Me20IHUuL0M_wyxdaHa2NDg,2715 -botocore/data/eks/2017-11-01/service-2.json.gz,sha256=WRQ58CLerc8Tqvoxafky1VBE8xSjlt9QYp2Y2zLv_bU,56062 -botocore/data/eks/2017-11-01/service-2.sdk-extras.json,sha256=pmn0V8Su5NiqW8Y3X-IBtzD1Bz_JANtKgU4fsr-i_bM,107 -botocore/data/eks/2017-11-01/waiters-2.json,sha256=j-ZLRcYn34oHDZY9xth7Vrz7q1eCNn_fzC1bK1WVVwo,4198 -botocore/data/elasticache/2014-09-30/endpoint-rule-set-1.json.gz,sha256=oiN48uw-eOfLnRSto0P41S2G7W_IHbRQES5Us6o9oM4,1241 -botocore/data/elasticache/2014-09-30/paginators-1.json,sha256=YkZxwpICpidoDrIimyr0yFGYg_T0emkSfhlNfPOfVMA,2171 -botocore/data/elasticache/2014-09-30/service-2.json.gz,sha256=4X79tEvUU1rjfO-6203pbLYf_OxmYkW4MIz1d39DzWE,22920 -botocore/data/elasticache/2014-09-30/waiters-2.json,sha256=mIVMN9SNrvDJ2iW_uXAA-N5ptxGmDw964Sv89zKAs-g,3719 -botocore/data/elasticache/2015-02-02/endpoint-rule-set-1.json.gz,sha256=ttPAZOReOMz7x1MwEGALZaIXSyUAVW2rSsi-HHmqxoU,1236 -botocore/data/elasticache/2015-02-02/examples-1.json,sha256=iWpOlje8s2EFHlnYNgjHX2DpC7teIKmeA7f6e51u00I,111590 -botocore/data/elasticache/2015-02-02/paginators-1.json,sha256=XrsOWe2fflZLszEuZYsZjeXPNAAj5IjpOdfsse_Peg8,3401 -botocore/data/elasticache/2015-02-02/service-2.json.gz,sha256=7EdobcW3J9I84zFSJYk8hRWxlQZWikOmkeRgQnXHLXA,56727 -botocore/data/elasticache/2015-02-02/waiters-2.json,sha256=N6NTYHqUoktWaIjapl3RDepPknxNlIbb8a0wnS0HB_E,5118 -botocore/data/elasticbeanstalk/2010-12-01/endpoint-rule-set-1.json.gz,sha256=1tnXaW-vU5__TB0Limg5PP-CnaXS61i-I7yvu7ZmMf8,1240 -botocore/data/elasticbeanstalk/2010-12-01/examples-1.json,sha256=EuEpZEobhGxWPfRosGTFNWYs8zRFVtkQtLXD8M_5fm0,37449 -botocore/data/elasticbeanstalk/2010-12-01/paginators-1.json,sha256=qM8N07fmdTtnZBXFiyFeW31EjqjmDWb-viwc19UyF5o,934 -botocore/data/elasticbeanstalk/2010-12-01/service-2.json.gz,sha256=7UAm45nDI3IqdlaFSqJ6mcU7hGNrXxLYRAR-fJArVWQ,27773 -botocore/data/elasticbeanstalk/2010-12-01/waiters-2.json,sha256=nS1qW0cVQpjnVhpONryvuFWWW4JwJYSW82ooLigmCu0,1463 -botocore/data/elb/2012-06-01/endpoint-rule-set-1.json.gz,sha256=gUQlBNik9x1aVKZJqPexgUYSEL_mRQYykfdj4M5T8Bg,1242 -botocore/data/elb/2012-06-01/examples-1.json,sha256=NE6HcGypE87pOfvGkxKi_QD-UJ_qWHG2_Q9ynk6V9xA,30446 -botocore/data/elb/2012-06-01/paginators-1.json,sha256=udADJnjh3b-REUTKNlC9yYaRI6aOiXfx3demJA1Msxg,373 -botocore/data/elb/2012-06-01/service-2.json.gz,sha256=9ViOaDMqc163jY03DHAI_U9c9wH45V5tLiXKdYbv4Kk,13212 -botocore/data/elb/2012-06-01/waiters-2.json,sha256=9NjB-6qbZ5pHxElH90T-4YPEBdXHCA9QHdcF96gTbP0,1527 -botocore/data/elbv2/2015-12-01/endpoint-rule-set-1.json.gz,sha256=gUQlBNik9x1aVKZJqPexgUYSEL_mRQYykfdj4M5T8Bg,1242 -botocore/data/elbv2/2015-12-01/examples-1.json,sha256=4Qxoz28hEDW8u1O7iGLKnH9NNb7Po5qybLFQtvtR7ss,44281 -botocore/data/elbv2/2015-12-01/paginators-1.json,sha256=HdpSUaB3jTHWaSt0O3wUi_qAjuGs7sz-vbUPV64kgWc,1744 -botocore/data/elbv2/2015-12-01/service-2.json.gz,sha256=8pLOQP2qba0Vx8L6JRawwDa263ofBeh-SPY3UZVM4JY,32908 -botocore/data/elbv2/2015-12-01/waiters-2.json,sha256=k-g2ypXqfbW4ktwuK1iVKpApIncFhOPemhbs7pf7cW8,2371 -botocore/data/emr-containers/2020-10-01/endpoint-rule-set-1.json.gz,sha256=0lGN8074I4CTsu5Kr_sKsnxu25NYqaG8SwoOrkmUuXc,1235 -botocore/data/emr-containers/2020-10-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/emr-containers/2020-10-01/paginators-1.json,sha256=H-qB-RVfZ-v6uivpkok6jdL9RsL9yHZmDhdG4hdiPtU,889 -botocore/data/emr-containers/2020-10-01/service-2.json.gz,sha256=ompf2Y7-iduJsbtFP3t5Fa3e1ZpRVn0GbTeM9K3tGAY,11282 -botocore/data/emr-serverless/2021-07-13/endpoint-rule-set-1.json.gz,sha256=jtK9zN_ww823GhgbhezAaITrMxnDrGydHLt820Bk6LE,1152 -botocore/data/emr-serverless/2021-07-13/paginators-1.json,sha256=X_bd8HxYUcjMp19q-YBGDOq_AGDlUYT1vP3yTfn6l8E,529 -botocore/data/emr-serverless/2021-07-13/service-2.json.gz,sha256=JPhhSnpmLKo07QvfOh3zcbUMfM6WcIhPlhTGvFkXMmE,12704 -botocore/data/emr/2009-03-31/endpoint-rule-set-1.json.gz,sha256=X5WiXq1tv89N0InX9lfrniuo5-hwH6D3MpJEsDG3oeE,1239 -botocore/data/emr/2009-03-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/emr/2009-03-31/paginators-1.json,sha256=4EzVWE6TiQt5Mklp197KH8t17OiwaOVgVnBGK4y_HiQ,1357 -botocore/data/emr/2009-03-31/service-2.json.gz,sha256=kjkbInauNxmsBL88ZtbHcyL_haWsjGJpuDHgqkivELU,47420 -botocore/data/emr/2009-03-31/waiters-2.json,sha256=pMh5RSVHgFU-DlrH0dSf4IibHo9Hddmg9DvaR4a0Z90,2073 -botocore/data/endpoints.json,sha256=X94TgkSwEcfGT35DAIbHeoOnOTF4eA5x1n9yOuTX6PA,1253438 -botocore/data/entityresolution/2018-05-10/endpoint-rule-set-1.json.gz,sha256=SX72zrmnqPqK_Vez7KAnWOoqS9HxoDzCJp3ohRzW1cA,1304 -botocore/data/entityresolution/2018-05-10/paginators-1.json,sha256=SRcdwInaqBXq7gpYBftOPb7OMmwgOTUQUTrGaY4594g,1245 -botocore/data/entityresolution/2018-05-10/service-2.json.gz,sha256=uKDKcmcjvBt5mRt8QV6YbWH8ODOkEo59dNWxiOLfqeo,19472 -botocore/data/es/2015-01-01/endpoint-rule-set-1.json.gz,sha256=sZmoX-jyq_4WXYOpzXsjONus8GNdq_KExmnUY0oTVwE,1311 -botocore/data/es/2015-01-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/es/2015-01-01/paginators-1.json,sha256=sbfve7QYejJgHClHTY4PgdwH4A-PJlY2y0XZ0qRCq9Q,1022 -botocore/data/es/2015-01-01/service-2.json.gz,sha256=E_XX15Z8sKEpPfcNI1JCIAd08X85e36_3zybG2ARA30,29503 -botocore/data/events/2014-02-03/endpoint-rule-set-1.json.gz,sha256=UQL0lig0DdNBwzjzBtQnX40G3w6nr9jEqUjio8vih2g,1856 -botocore/data/events/2014-02-03/service-2.json.gz,sha256=KReL2WGM5coU0H_eVn-blnTda2s8ZoT4s29zOqZrx-U,5254 -botocore/data/events/2015-10-07/endpoint-rule-set-1.json.gz,sha256=ikecNNj4vRZbV-jTzqZHf1LN69Bb3jU37V7rbjHnT3o,1843 -botocore/data/events/2015-10-07/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/events/2015-10-07/paginators-1.json,sha256=A4gA5VY4LAnP_3iCOI-P0-c5nVH5ntM9hOh3gytyGco,504 -botocore/data/events/2015-10-07/service-2.json.gz,sha256=7dHQ-RoDdkV--KAWYv86261SKMgYKuMb9BIezG-P9mc,36776 -botocore/data/evidently/2021-02-01/endpoint-rule-set-1.json.gz,sha256=z_w2KPggzrcOIDDly3BRXu0NCu9Hi62FrnqyIotFGhc,1150 -botocore/data/evidently/2021-02-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/evidently/2021-02-01/paginators-1.json,sha256=dzsz3rOFQc5MqVrha2K97L1ooI2e1kt8Om55efyV-tI,1016 -botocore/data/evidently/2021-02-01/service-2.json.gz,sha256=YJ5Wq21LJWfD3u_QkAcOp3c4KIpnQpvMelcn8eIYJTs,20415 -botocore/data/evs/2023-07-27/endpoint-rule-set-1.json.gz,sha256=zRES9GG6eeWVqJTAY4nv_d0rV9l0FWNfv7_FtwTzmUQ,1295 -botocore/data/evs/2023-07-27/paginators-1.json,sha256=7n46zlvhz-yf-V1sx4iKtmndDOZNMHXF08ziDtI8sk4,559 -botocore/data/evs/2023-07-27/service-2.json.gz,sha256=ty18UcUChQUl1tQwHRqPEB9TmdGQRZFhBIyB9pyFKHs,10841 -botocore/data/evs/2023-07-27/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/finspace-data/2020-07-13/endpoint-rule-set-1.json.gz,sha256=3Hb7TIKrGjYfAZV3_8hTopSi7_G3_XcY7u-mhItUJ2w,1152 -botocore/data/finspace-data/2020-07-13/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/finspace-data/2020-07-13/paginators-1.json,sha256=2RzSHda8vNoQX1L1pkYSwHfCF6Us0IKOrXdsXe-ZHkU,851 -botocore/data/finspace-data/2020-07-13/service-2.json.gz,sha256=0p3ciqOCbmc0rWoJHBpZJIhKteSUyNX4MyeytF340qw,14501 -botocore/data/finspace/2021-03-12/endpoint-rule-set-1.json.gz,sha256=WX3nSfqGzlLeGhCW5LJKoQ-oJyVP9IpNMF7_f53rUpQ,1149 -botocore/data/finspace/2021-03-12/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/finspace/2021-03-12/paginators-1.json,sha256=S_FGEtC07GgFCRSKmv_l4RhRBCFmOEmIsQl7QfDI678,197 -botocore/data/finspace/2021-03-12/service-2.json.gz,sha256=G475x3i5fF9J4mLRFCnHP68sBM2OLzdmlue6xlrMuls,30464 -botocore/data/firehose/2015-08-04/endpoint-rule-set-1.json.gz,sha256=vntFf3yUNggCsodRza1jMKzHZJbcDhcfGRhMd6oexhE,1148 -botocore/data/firehose/2015-08-04/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/firehose/2015-08-04/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/firehose/2015-08-04/service-2.json.gz,sha256=wTuB7yqLbWQLFsHzONHMnzj9BavktZfqAlDSPkSK43o,36343 -botocore/data/fis/2020-12-01/endpoint-rule-set-1.json.gz,sha256=bA2NR_RwFl7dn29ly2tIXP24UYQpZuLIgy7DwGNaNL4,1230 -botocore/data/fis/2020-12-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/fis/2020-12-01/paginators-1.json,sha256=FZ37WL0c12pVwCrWPZghttxIuVBtwDYe0h208m44zeQ,1107 -botocore/data/fis/2020-12-01/service-2.json.gz,sha256=q3-DO_Hpjnddkl0StWKfksaR2d9ki0xtWJ320q-hLi0,10124 -botocore/data/fms/2018-01-01/endpoint-rule-set-1.json.gz,sha256=bx5p5vwO1Z6_bIbApbXkelcHk2KZyYZjq4PrgSay7DE,1145 -botocore/data/fms/2018-01-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/fms/2018-01-01/paginators-1.json,sha256=Nv9OHpCiWQyuj5sj_Pz-0TjbnmtiMCR0tuySMApzYjM,1470 -botocore/data/fms/2018-01-01/service-2.json.gz,sha256=STTpr3Cfi2UOIelvVkDl6Rk2rSarUIwQeKTCyqOPvcg,34811 -botocore/data/forecast/2018-06-26/endpoint-rule-set-1.json.gz,sha256=7kurepV542NfF3i0iY3XCRVFnJcTcCcWN4KZXAssthc,1148 -botocore/data/forecast/2018-06-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/forecast/2018-06-26/paginators-1.json,sha256=uwjfu4LU_nDuv9woqU_mcL_58oVcFi8QfUSAtQycpA8,2508 -botocore/data/forecast/2018-06-26/service-2.json.gz,sha256=VsiqHj4yU8CsnfS9KrVf23hcVcgQM0R0w8_Qw84G10s,40082 -botocore/data/forecastquery/2018-06-26/endpoint-rule-set-1.json.gz,sha256=hTTScnvIF7Wo0v6j8CRVoTm5SsNKIsczyGPVcM2nF0U,1152 -botocore/data/forecastquery/2018-06-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/forecastquery/2018-06-26/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/forecastquery/2018-06-26/service-2.json.gz,sha256=T0wavFy4pCW9hwaqGRvrWWFuLRt1EhuqUudn1IVqE6Y,2184 -botocore/data/frauddetector/2019-11-15/endpoint-rule-set-1.json.gz,sha256=XiQFoak8f9dZ4JXcsZMKiGU-hVza1UgqptkIkGdcErc,1152 -botocore/data/frauddetector/2019-11-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/frauddetector/2019-11-15/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/frauddetector/2019-11-15/service-2.json.gz,sha256=BCafUX5WF-wjWzNXP3WmZQIO4UwJa8_FY_Tri6ogv1c,24352 -botocore/data/freetier/2023-09-07/endpoint-rule-set-1.json.gz,sha256=F2qMUpLUYBtFDFvcQJyxKbMnWeciMm2qlfbmi8C0KlU,1420 -botocore/data/freetier/2023-09-07/paginators-1.json,sha256=7D1EodEvT5WrTkrtPkS0gfa4nMHocl8ljxcUoMJ1_8k,370 -botocore/data/freetier/2023-09-07/service-2.json.gz,sha256=I7bu8yzysdEl2CskpiF_cB2rBhG1PrA5kuyt6a2rKHs,4640 -botocore/data/freetier/2023-09-07/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/fsx/2018-03-01/endpoint-rule-set-1.json.gz,sha256=UJKCv-ZCx3gF0XAnbCm-ELmohAJDQVwtmborlCiA0xM,1145 -botocore/data/fsx/2018-03-01/examples-1.json,sha256=Ys4PS4GcrfV3F5Lg4hkaZgyemGgNKNLYSm-uepLDkR4,14242 -botocore/data/fsx/2018-03-01/paginators-1.json,sha256=s2sVh0nBC-oAhWvml5z1jHydcZmhTD8r5N-d5V12VjU,1250 -botocore/data/fsx/2018-03-01/service-2.json.gz,sha256=IyymgYhA9HTzjyapg9ZCDQUlyimEImoplkUiu6p8VQ8,80261 -botocore/data/gamelift/2015-10-01/endpoint-rule-set-1.json.gz,sha256=gLWvFVro36RgbgvrjGLWW_vWB6S2ZSXZlSvB8C3PbeQ,1149 -botocore/data/gamelift/2015-10-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/gamelift/2015-10-01/paginators-1.json,sha256=hPYwxPqikAsx9c8Py0ZJ_Yie8F5B8RnfEVJkend1l-M,4461 -botocore/data/gamelift/2015-10-01/service-2.json.gz,sha256=DdtWpk8YCnEwh45Bg3wG5KkjQZLKjxkvWBJuBjqKImw,116835 -botocore/data/gameliftstreams/2018-05-10/endpoint-rule-set-1.json.gz,sha256=cG5JBz-_xmvbNN1g3G7d6yCHYwBly2KssX4x93k89qg,839 -botocore/data/gameliftstreams/2018-05-10/paginators-1.json,sha256=j131UfxmFfUe6Mtq2v4AS_kj5v36e56Y_LEKcltg0lM,690 -botocore/data/gameliftstreams/2018-05-10/service-2.json.gz,sha256=bil04QfvHu8SFZM3psvN9herm4gpc597w96rQ_MaH7w,26747 -botocore/data/gameliftstreams/2018-05-10/waiters-2.json,sha256=1bi9fg1J2pQ1930lM_N2eBvnZBKDCjdy7s5sFugPVA4,2343 -botocore/data/geo-maps/2020-11-19/endpoint-rule-set-1.json.gz,sha256=c5YIFUxu_ldTB--u1fXdgl69_M69uE_F3cGEDIDIVT4,1639 -botocore/data/geo-maps/2020-11-19/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/geo-maps/2020-11-19/service-2.json.gz,sha256=DZL4YDqBVEr4C54IljyxD-3bdj1mqFxo1oMxQJzl7gs,7612 -botocore/data/geo-places/2020-11-19/endpoint-rule-set-1.json.gz,sha256=ye2hye_DGLHmf_t8Cyyt-jVtD5v3F0l31-drA8uZsUY,1640 -botocore/data/geo-places/2020-11-19/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/geo-places/2020-11-19/service-2.json.gz,sha256=vmFPXzyHt7uerEhtnR-UDzvTaebbK3hYgkiixSIDjLU,16133 -botocore/data/geo-routes/2020-11-19/endpoint-rule-set-1.json.gz,sha256=KNpaJXoSZy27Cg1QhyBgVZKdZizrCBNMGfvl7EU2UjQ,1645 -botocore/data/geo-routes/2020-11-19/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/geo-routes/2020-11-19/service-2.json.gz,sha256=4KlhZx2Ocq4v_DrfJHeW-RJAVHDc6T931qrAM4ziEo4,34018 -botocore/data/glacier/2012-06-01/endpoint-rule-set-1.json.gz,sha256=0gt1LhpSmRMu6OYwZrrC-1-3JU7fNmOg6d92c0G2DOI,1232 -botocore/data/glacier/2012-06-01/examples-1.json,sha256=hR-1NmWo9lL0Cdqnr6x95Ywu_VfJucv0T4OveUp-S4o,27536 -botocore/data/glacier/2012-06-01/paginators-1.json,sha256=RAeqGFOs4GRiC-DuphMOBHWljwDfqBQINYf1qA2LbNA,628 -botocore/data/glacier/2012-06-01/service-2.json.gz,sha256=CCfpXfvjKhSwfN6Ww011istUdgsOQ7zUaZ82MLCAbik,21004 -botocore/data/glacier/2012-06-01/waiters-2.json,sha256=hzoyJJT1wJh9lq1_z4MK2ZBj98TGRhroii0kbeFXnJw,785 -botocore/data/globalaccelerator/2018-08-08/endpoint-rule-set-1.json.gz,sha256=jLFB1M8Gx7TGldFWcAIeJc7z6GFslpVD6oTeg-KCZqE,1155 -botocore/data/globalaccelerator/2018-08-08/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/globalaccelerator/2018-08-08/paginators-1.json,sha256=Exal9Oqocr6pKQ_T5yEFYLXwm0BSxPYcuZTjZL2_8x8,2016 -botocore/data/globalaccelerator/2018-08-08/service-2.json.gz,sha256=2xxv9Ka8JsGvzfldchU6LhCh2uIyVXcMMn2mMPbjKrE,21919 -botocore/data/glue/2017-03-31/endpoint-rule-set-1.json.gz,sha256=Z3T_jbzErv6xEe78pkVuUyRkrJICXouaRuprYgTuJKs,1146 -botocore/data/glue/2017-03-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/glue/2017-03-31/paginators-1.json,sha256=tzX-0At6NqflYPWszHsH-AAfuAsYqw1LwJnu4nKh8Lg,5025 -botocore/data/glue/2017-03-31/paginators-1.sdk-extras.json,sha256=05DstPbLjNqM3IFboIp0gB9jwSQ1X_PuyxkLA1ecfL4,218 -botocore/data/glue/2017-03-31/service-2.json.gz,sha256=ngPWmNqv08unbIXB2NITpGQGRQ_bP4jkvWxb6jkpqEk,170328 -botocore/data/grafana/2020-08-18/endpoint-rule-set-1.json.gz,sha256=0bdy64L6AafIn_7tvhX91-9WWfRZNEjBrkT4XrEjtI0,1148 -botocore/data/grafana/2020-08-18/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/grafana/2020-08-18/paginators-1.json,sha256=1w34xYXn5nUies34W7BC_lzVPnbnhggKcWKUug4ckRc,908 -botocore/data/grafana/2020-08-18/paginators-1.sdk-extras.json,sha256=_g8panv1mpml0x69Y013wHvb22Sy63dKgVhK5oRqbwE,329 -botocore/data/grafana/2020-08-18/service-2.json.gz,sha256=ErJLGzFyTvgQ4arLOWuaw-oC7r5cdk7RfqnIzedBUqY,15023 -botocore/data/greengrass/2017-06-07/endpoint-rule-set-1.json.gz,sha256=Q6X0N0DuADuttv_HGEJnPV6J2GbldTRBqiDajqQvjB4,1361 -botocore/data/greengrass/2017-06-07/paginators-1.json,sha256=LphzapxioJkdlNs-zU4IVmg_pjswwy8RuDPq79sbW64,3366 -botocore/data/greengrass/2017-06-07/service-2.json.gz,sha256=JHzAZTIl7IwO9xqbizNIXIrZoyMVikVMtT2t5fUojaY,17132 -botocore/data/greengrassv2/2020-11-30/endpoint-rule-set-1.json.gz,sha256=Q6X0N0DuADuttv_HGEJnPV6J2GbldTRBqiDajqQvjB4,1361 -botocore/data/greengrassv2/2020-11-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/greengrassv2/2020-11-30/paginators-1.json,sha256=geNY9pksg1eDuJ9mpqk1iee_t8zQuFBrBG_O6eaZ7GU,1283 -botocore/data/greengrassv2/2020-11-30/service-2.json.gz,sha256=cJHHeyHMmbzNA0gPfieD8COh1hXrRcOJ0-Mz61hp0UE,20272 -botocore/data/groundstation/2019-05-23/endpoint-rule-set-1.json.gz,sha256=uD5Wq6-OigLnTOP0dESQ4hPnTzH3Fq4FK4up1xk0mAg,1151 -botocore/data/groundstation/2019-05-23/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/groundstation/2019-05-23/paginators-1.json,sha256=4_ogVwU_XXx--s-8FB9fXMd5kIjdEXBdN6iBd04Kmlk,1236 -botocore/data/groundstation/2019-05-23/service-2.json.gz,sha256=GmhWKXrKT4OrvIxOsh5nLXRKMY-fAkkfub4TGt0fapQ,18957 -botocore/data/groundstation/2019-05-23/waiters-2.json,sha256=fuayBSt0gQV3HjjFxrqZgUCLSo6DxBG5qb-ASxS3oKE,534 -botocore/data/guardduty/2017-11-28/endpoint-rule-set-1.json.gz,sha256=vdP64Rk2dYw_cEUMhgKUCUpYoO7_N7ru1DWq5IPlQOU,1235 -botocore/data/guardduty/2017-11-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/guardduty/2017-11-28/paginators-1.json,sha256=xlZuLLHS4zEJ5ldwSdcZic3kEOn-CbV_N9NCCDnX0_M,2224 -botocore/data/guardduty/2017-11-28/service-2.json.gz,sha256=dZQirrA33rUZc2m7f_O3-5LA3hAp42ebnUvRZaLpwus,62575 -botocore/data/health/2016-08-04/endpoint-rule-set-1.json.gz,sha256=npVbcRpc55Azotrq920hc8ppbk1sVNAHNKroNp_YnN8,1524 -botocore/data/health/2016-08-04/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/health/2016-08-04/paginators-1.json,sha256=yiHNcdPNOcqngUnAvp1BUD8e9oWSgqGS-T0Esl6r8vI,1397 -botocore/data/health/2016-08-04/service-2.json.gz,sha256=GfwgFil3FoEigZS7LsCJw5wj-nmU5FtoBKXaHVequ8c,10836 -botocore/data/healthlake/2017-07-01/endpoint-rule-set-1.json.gz,sha256=Kn3JSwu5MglJJIEyS7rUYY9T7Mc-2stAmENV9tUU-c0,1151 -botocore/data/healthlake/2017-07-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/healthlake/2017-07-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/healthlake/2017-07-01/service-2.json.gz,sha256=U9aTxnssbMU41_UcAf7SnYLXLITtXluKhoTrMTEl8qw,6387 -botocore/data/healthlake/2017-07-01/waiters-2.json,sha256=4Sv75r3pHvxlRrR22FlUPVRujs263gAEJcberiB4dkU,2737 -botocore/data/iam/2010-05-08/endpoint-rule-set-1.json.gz,sha256=RSXRGWewYyii5VDUGW8lVpWFtOE2YnSWZ8cwd6reJzc,2238 -botocore/data/iam/2010-05-08/examples-1.json,sha256=T5EqrFFZBiVlL9dsN-T5DnigU1UnMSXfVVwBK00AWrU,48537 -botocore/data/iam/2010-05-08/paginators-1.json,sha256=Mrjh9WIhO3YlPK04LELNlBGOWlr4EOWDPV22S4XlPM4,7036 -botocore/data/iam/2010-05-08/service-2.json.gz,sha256=lBW-SGqU99rI9R-qjAhPiOCj9MkLYvF0tWBXLKzPAjk,81064 -botocore/data/iam/2010-05-08/waiters-2.json,sha256=sC6nS5oxMDEinb4z8GAMfZvFfPVWBzL_j1chnAT_z4k,1462 -botocore/data/identitystore/2020-06-15/endpoint-rule-set-1.json.gz,sha256=OAaCammJOqSgkSZr-eBVpYPMm2UZBU600GoPIuunIQs,1235 -botocore/data/identitystore/2020-06-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/identitystore/2020-06-15/paginators-1.json,sha256=lpGJQxUC8FqJ_JuWaDSHw3cMW677pwZDQpoWRcBvA0M,704 -botocore/data/identitystore/2020-06-15/service-2.json.gz,sha256=kNSCi8Y2pfenzT3BQCa1jrHUMcA4l6rzMxN9SE3JhV8,9460 -botocore/data/imagebuilder/2019-12-02/endpoint-rule-set-1.json.gz,sha256=5fPq2eulSG53GcOEoTEnxWKSLzooQ0Oxgjtt3lsiYFQ,1237 -botocore/data/imagebuilder/2019-12-02/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/imagebuilder/2019-12-02/paginators-1.json,sha256=I6-12lIoWuh4TkN0X8GiCzc-8ilu4xD69Zuj_ckMoUQ,3820 -botocore/data/imagebuilder/2019-12-02/paginators-1.sdk-extras.json,sha256=bGrcj8XUhY0YmfiAvQv2t8JVDaOCrqss44DaLHra1HA,2040 -botocore/data/imagebuilder/2019-12-02/service-2.json.gz,sha256=B3RWUH-741aRc8DHuH21NKY6wJh0mQNLYhHHfWH_cBA,44248 -botocore/data/importexport/2010-06-01/endpoint-rule-set-1.json.gz,sha256=t2rCcqU40dRlMdWyKqR2_FXWhz4xyUtR5apEXrVFE5E,1599 -botocore/data/importexport/2010-06-01/paginators-1.json,sha256=Etmobek-KI_4Gx8vLRBQsy6nYiRvog88hJCCXuRESZQ,215 -botocore/data/importexport/2010-06-01/service-2.json.gz,sha256=Mxu3Sb146joUaB00ITK99BKSsPXW4ewxj0BfBbp9iGs,4733 -botocore/data/inspector-scan/2023-08-08/endpoint-rule-set-1.json.gz,sha256=-SOWS2I7IgOTBogE2jm9mtSkgAq1yPf1uUnEQ-Bg7_s,1303 -botocore/data/inspector-scan/2023-08-08/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/inspector-scan/2023-08-08/service-2.json.gz,sha256=7L97KzVORgdi51oswGcDZLZHn8_hNTVj3UVybeblzng,1715 -botocore/data/inspector/2015-08-18/endpoint-rule-set-1.json.gz,sha256=vQHKAkHIGGegKv80DC9R36-xRiK2kd4i_nhgZvbRaLk,1147 -botocore/data/inspector/2015-08-18/service-2.json.gz,sha256=aQq2Zfx6iBNDC7CdnJbJtG27mZj4ves1DsOOvk0IO30,8021 -botocore/data/inspector/2016-02-16/endpoint-rule-set-1.json.gz,sha256=x9jMg4FC6Lhbjng84QkNTC0xInEValNMry4VhA_sENo,1149 -botocore/data/inspector/2016-02-16/examples-1.json,sha256=EoIoRt_vSBIFaQ8UnXLRGL2W5H50CW9rscWvZ012w-g,36903 -botocore/data/inspector/2016-02-16/paginators-1.json,sha256=weo6-A-gbXJmE6B8bFERy0jQdJHvIDANiZLITbP_9ZQ,1610 -botocore/data/inspector/2016-02-16/service-2.json.gz,sha256=WDeeYduSKMSgk2KNM6HbTet1Oqniaax53xo3DXl9GKw,14162 -botocore/data/inspector2/2020-06-08/endpoint-rule-set-1.json.gz,sha256=IBa42CuhLBByfjq2nOWOJXWDJAyL1Q6TpgRt_61-aV0,1151 -botocore/data/inspector2/2020-06-08/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/inspector2/2020-06-08/paginators-1.json,sha256=Ib7iaLM9kEhPdRdVZSCwdlFg9qv4NEE6G8Hmobhq6jQ,2769 -botocore/data/inspector2/2020-06-08/paginators-1.sdk-extras.json,sha256=WXkFBTPQczZBVGrBAb2IoUJRliU1uNg-m8znDFawOOA,287 -botocore/data/inspector2/2020-06-08/service-2.json.gz,sha256=N1K-F37MNiNuWdMLCNy5gzNDig41IeOLTXDbX-nQ0Dg,45600 -botocore/data/internetmonitor/2021-06-03/endpoint-rule-set-1.json.gz,sha256=02K89xYstu9KgiCgWUEBdjvPblxI2tNP_6nQJH2TO4Q,1155 -botocore/data/internetmonitor/2021-06-03/paginators-1.json,sha256=5eozwrH81SIJWEZD5zsaAs9rr8CvRMMoXvYnOj1IxTs,531 -botocore/data/internetmonitor/2021-06-03/service-2.json.gz,sha256=wNKoXUBU7znWjD8VnVsO1bVhhlMigmAiVqjqk9nTDrU,13894 -botocore/data/internetmonitor/2021-06-03/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/invoicing/2024-12-01/endpoint-rule-set-1.json.gz,sha256=Of1RQ_CTRAH0wNlXgchAWNr8_Z_NGDgmnLxN193vXxY,899 -botocore/data/invoicing/2024-12-01/paginators-1.json,sha256=45c40-ubliTfW2ldc8DsZpWeou2oYKnh_TjoinLvgYg,575 -botocore/data/invoicing/2024-12-01/service-2.json.gz,sha256=bMmp1tTR1OCi0Pifk07Fhckx1tHVqwpcl0-VfjE88VA,11387 -botocore/data/invoicing/2024-12-01/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/iot-data/2015-05-28/endpoint-rule-set-1.json.gz,sha256=YPkKYp9nhrdIntsAAdHJcqEClrEwIC4ssvrAmVAo4HY,1482 -botocore/data/iot-data/2015-05-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iot-data/2015-05-28/paginators-1.json,sha256=FCM_y5QY56bw4TOgH3_OTBsnKj2PjI3ObCOOnKtsq80,201 -botocore/data/iot-data/2015-05-28/service-2.json.gz,sha256=fayxXT_fBDkrZH2JbYrqiGdB_a4Kp4BdYYTfo65oO4U,4775 -botocore/data/iot-jobs-data/2017-09-29/endpoint-rule-set-1.json.gz,sha256=2pCl4WuBNdGREQ5l5E9hKvW-xC67KxbEWyQDhEOYtC8,1153 -botocore/data/iot-jobs-data/2017-09-29/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iot-jobs-data/2017-09-29/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/iot-jobs-data/2017-09-29/service-2.json.gz,sha256=kQygAOZ5ViGWYC5iGkHjvcdCRilFZ3NTM_soWfr4_SM,5041 -botocore/data/iot-managed-integrations/2025-03-03/endpoint-rule-set-1.json.gz,sha256=xlOiLL5qwnUEnHzMC_FBmDWx_mEQ6MpEyYyqkjv5Xcc,843 -botocore/data/iot-managed-integrations/2025-03-03/paginators-1.json,sha256=leQj5NI9WDhpU0Nx_QY00lDJKI4MdDEV3xRl-1CP-0c,2987 -botocore/data/iot-managed-integrations/2025-03-03/service-2.json.gz,sha256=wIXgrviRsEJl7uw12j50fKoHn0dbofYQ97F_PTOBARQ,28812 -botocore/data/iot/2015-05-28/endpoint-rule-set-1.json.gz,sha256=Gp7uTSB4Hw_qI3BRliOSkpwSWIaCTUFSvutGeMZFuj0,1266 -botocore/data/iot/2015-05-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iot/2015-05-28/paginators-1.json,sha256=cwRCZcmss_KOdZo4DoDmqXspGgJAJMjsUeZd9h_QzMk,11232 -botocore/data/iot/2015-05-28/service-2.json.gz,sha256=zFcZxDlmg4227QsxiyZnIVjxXO-Y5OVNk_d7az7Ltl0,125599 -botocore/data/iotanalytics/2017-11-27/endpoint-rule-set-1.json.gz,sha256=YllmlqSSJO2my4h2sbNfe4wQzb4OE48JgKIc0Q-y1k8,1152 -botocore/data/iotanalytics/2017-11-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotanalytics/2017-11-27/paginators-1.json,sha256=X_pDdHN034STvHt8ULopV8fu0e5gyFt8Z1dj17AfZQY,895 -botocore/data/iotanalytics/2017-11-27/service-2.json.gz,sha256=XKlXU34C2KXLeHP_HcM-nBsAe8wNCy8qUOBodrb84NI,18182 -botocore/data/iotdeviceadvisor/2020-09-18/endpoint-rule-set-1.json.gz,sha256=TCWnmLuX9bg3OK5ivhklSKWHb6BJK3f4nPVa-glOS2M,1158 -botocore/data/iotdeviceadvisor/2020-09-18/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotdeviceadvisor/2020-09-18/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/iotdeviceadvisor/2020-09-18/service-2.json.gz,sha256=HNor3WSKM_kmM2dSPaQnXY7cK3UIW7gsYmxuXFCOSBo,5326 -botocore/data/iotevents-data/2018-10-23/endpoint-rule-set-1.json.gz,sha256=im639fsCizwu2RfnkwBGJJQQXS3_ogfVjHCf3k38jRY,1153 -botocore/data/iotevents-data/2018-10-23/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotevents-data/2018-10-23/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/iotevents-data/2018-10-23/service-2.json.gz,sha256=vly5frNJr8zv5uWlSLCeh7xxyeg55wRP96Tud6RTE34,6416 -botocore/data/iotevents/2018-07-27/endpoint-rule-set-1.json.gz,sha256=dEQxfXqVPGv-A2fNrZXpCXiYUZoCmcXs7cQ5kxwOhjk,1149 -botocore/data/iotevents/2018-07-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotevents/2018-07-27/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/iotevents/2018-07-27/service-2.json.gz,sha256=GpJteOnQJMXtNBHBIOcZiDcQiKQjDJ8zSkam9zJ2OmY,16112 -botocore/data/iotfleetwise/2021-06-17/endpoint-rule-set-1.json.gz,sha256=FM-XUqmGLzxdqhvGA2moaEV68AOKTlrfJOzLDk2z9KU,1151 -botocore/data/iotfleetwise/2021-06-17/paginators-1.json,sha256=Lpq4Xh8dmuXKeTACXakpt6YLxU7zzWOvkBqWbj9TuNI,2430 -botocore/data/iotfleetwise/2021-06-17/service-2.json.gz,sha256=ep6048iYePOnfAvxzl9Veriiwijtn_u4JsunHvV89RM,30396 -botocore/data/iotfleetwise/2021-06-17/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/iotsecuretunneling/2018-10-05/endpoint-rule-set-1.json.gz,sha256=zJarrf8XNffHl_8sNiIxnzSH8-0msRMUiRIQQo7OvT8,1401 -botocore/data/iotsecuretunneling/2018-10-05/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotsecuretunneling/2018-10-05/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/iotsecuretunneling/2018-10-05/service-2.json.gz,sha256=v-XkKBtAz4dMsu-zjbHyRwMrtBwKTaL_NvaKR6NWBAc,3432 -botocore/data/iotsitewise/2019-12-02/endpoint-rule-set-1.json.gz,sha256=qxjL0Hh9Xc10RG7yitJB6JlR2NcJzc6e6hv8CqKgiSo,1150 -botocore/data/iotsitewise/2019-12-02/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotsitewise/2019-12-02/paginators-1.json,sha256=zx8VdG9G3tMBgNLSeJAnaVAeuqsq6Fdmdo1D77G2k-4,4960 -botocore/data/iotsitewise/2019-12-02/paginators-1.sdk-extras.json,sha256=YRdxHylWCPUlQDFxU2BHajclulJZBfY-NpWldEBwzEU,159 -botocore/data/iotsitewise/2019-12-02/service-2.json.gz,sha256=5MMuA_cAAshTOdh23wAZGXiOzqZKddtNvbynaJkbFd4,60542 -botocore/data/iotsitewise/2019-12-02/waiters-2.json,sha256=qVN5Ie90YeUrNZqZKgckPkyTBYdKjgEbbrlsx-3RXUw,2237 -botocore/data/iotthingsgraph/2018-09-06/endpoint-rule-set-1.json.gz,sha256=4sFA4TUbc0ybUlZPNvgOCyCCuu6J4AGOULOztoWzotQ,1217 -botocore/data/iotthingsgraph/2018-09-06/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotthingsgraph/2018-09-06/paginators-1.json,sha256=3329WY0CXoFVg2osoDFw4kPWYxWK559asARwgffXvbw,1730 -botocore/data/iotthingsgraph/2018-09-06/service-2.json.gz,sha256=RO4tWFopRkkn0c2aE1h4olIcwtvDoiev44z0zcJ7FUA,10367 -botocore/data/iottwinmaker/2021-11-29/endpoint-rule-set-1.json.gz,sha256=F6dgtv43zxSZCeZ_KkUhQry4cumS_IRxB4jVLrmST_g,1152 -botocore/data/iottwinmaker/2021-11-29/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iottwinmaker/2021-11-29/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/iottwinmaker/2021-11-29/service-2.json.gz,sha256=hVLIDErA8wbFANTgBnNE7KtKZtrBn2aDdcfleR1ymMU,16782 -botocore/data/iottwinmaker/2021-11-29/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/iotwireless/2020-11-22/endpoint-rule-set-1.json.gz,sha256=P4bZSrq6Dutj3bDqDhRmBSlYWVjHXDHHnedlUSgcxkE,1153 -botocore/data/iotwireless/2020-11-22/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/iotwireless/2020-11-22/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/iotwireless/2020-11-22/service-2.json.gz,sha256=Qz8tJx_x7iQ_SrZ1RuR3E7Nl477woLM8GGE8eJbk7nY,37304 -botocore/data/ivs-realtime/2020-07-14/endpoint-rule-set-1.json.gz,sha256=fQBdznu81NEYLl9Rs1EQqmKqZjcC-YAo08B-InytCko,1300 -botocore/data/ivs-realtime/2020-07-14/paginators-1.json,sha256=HgDr8EFYuc0XAzussWpMVIYF65h571wHeqsQEV4DNTI,550 -botocore/data/ivs-realtime/2020-07-14/service-2.json.gz,sha256=OENGpif0-4jNdrCjWxwbfc1imMUoUf2Vtb0Q4mQenOo,19023 -botocore/data/ivs-realtime/2020-07-14/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/ivs/2020-07-14/endpoint-rule-set-1.json.gz,sha256=-d_Te3B0PLvcOWudEI5u4JG0Cmg3JM54x_bJFN03n2I,1145 -botocore/data/ivs/2020-07-14/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ivs/2020-07-14/paginators-1.json,sha256=QibJ2axvh2Gp9C80kOHE6Ac5RxI-El9k6jxWbVtHyqw,875 -botocore/data/ivs/2020-07-14/service-2.json.gz,sha256=BXHhOICsiwx3MdcZgDNDbDZEBvo8NgDgJ3v1QBuU480,17154 -botocore/data/ivschat/2020-07-14/endpoint-rule-set-1.json.gz,sha256=t-17mIW9rOb_IdbRX5PTzqKXL-TmnlwLuVo5nRTA9oE,1149 -botocore/data/ivschat/2020-07-14/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ivschat/2020-07-14/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/ivschat/2020-07-14/service-2.json.gz,sha256=r4sM07zxh4Sev3caxKIGYpgdUpN6HyUQridERC2Oq3w,8309 -botocore/data/ivschat/2020-07-14/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/kafka/2018-11-14/endpoint-rule-set-1.json.gz,sha256=kOGWp308-tRcO2ab4vjGT2h6zw6i9-4XvyF-cm0dPAU,1232 -botocore/data/kafka/2018-11-14/paginators-1.json,sha256=Lmq3WylSqus0mXFPis3ZiX_DYrpa08vSkyiIpYcpqjs,2459 -botocore/data/kafka/2018-11-14/service-2.json.gz,sha256=3VnYb5_kvKtmRJteLU3Su7f0pwN2qxt6IS9ZSji9zVE,22702 -botocore/data/kafkaconnect/2021-09-14/endpoint-rule-set-1.json.gz,sha256=iJziKtzhyFBC433oGhZWp3AKIxXC7UxXJqPZA2y2KmY,1417 -botocore/data/kafkaconnect/2021-09-14/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kafkaconnect/2021-09-14/paginators-1.json,sha256=Eq3TTanPhDwtKpF3EUFUazS6C8Dkwb7TENJPyonaUkc,733 -botocore/data/kafkaconnect/2021-09-14/service-2.json.gz,sha256=861hTCqc4ECAoehTB8bX_CkqUyytopZXwJkDyJqmRvA,8051 -botocore/data/kafkaconnect/2021-09-14/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/kendra-ranking/2022-10-19/endpoint-rule-set-1.json.gz,sha256=DP6d7W47BUdP1EEHtSz_xd9Q-5fRdOjyBtH1wGCDyHw,1129 -botocore/data/kendra-ranking/2022-10-19/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/kendra-ranking/2022-10-19/service-2.json.gz,sha256=mC_vPLPbIsHWP8vpaLoAeX4WWHq77EShQQS6WShkzNk,4384 -botocore/data/kendra/2019-02-03/endpoint-rule-set-1.json.gz,sha256=COayzMKkJiKSl7Hv-MTiFF5_j6hY1LVRqlDfXaCbfZU,1147 -botocore/data/kendra/2019-02-03/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kendra/2019-02-03/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/kendra/2019-02-03/service-2.json.gz,sha256=5eZRA4EQ_tVfC2j3RXKJPitaVqmyCzocyg40rU2AMZ8,71284 -botocore/data/keyspaces/2022-02-10/endpoint-rule-set-1.json.gz,sha256=2ahclOA-EaY47_HN1XRaC-i0dVR5ro0Lf1TGLVGO6B4,1235 -botocore/data/keyspaces/2022-02-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/keyspaces/2022-02-10/paginators-1.json,sha256=Wa_EIMfB_oAzXrO5w1GuutNoupwn1mvgPSdNbD3gySk,668 -botocore/data/keyspaces/2022-02-10/service-2.json.gz,sha256=2c2oISUnMxxzCBCa7gI9uC1qXZiK6B8RhqI170W9kas,14542 -botocore/data/keyspaces/2022-02-10/waiters-2.json,sha256=tj1ZnaqhwmJkUEQlwH7wm1SqY3lg1BvZDfzfPaIgNrY,38 -botocore/data/keyspacesstreams/2024-09-09/endpoint-rule-set-1.json.gz,sha256=iHZgKx4PPea3wRSXo04OxdkdJYgwcm-wUmwmUFw_ux0,840 -botocore/data/keyspacesstreams/2024-09-09/paginators-1.json,sha256=ec8rJotxhYa6UUQ81C-pRcMIA-QlKfB_AiaFoHv7Fbc,342 -botocore/data/keyspacesstreams/2024-09-09/paginators-1.sdk-extras.json,sha256=feP3TZjM_YFuizYme5vJUtGlqVgcpqFZXswcFM3kUwg,378 -botocore/data/keyspacesstreams/2024-09-09/service-2.json.gz,sha256=AW4XnDqzA-ZGtu3_OdFPkZP3Gyfdspf_OPLKdQWZtZU,6953 -botocore/data/kinesis-video-archived-media/2017-09-30/endpoint-rule-set-1.json.gz,sha256=Tdn_7DViI4OQm64DPoqnta2eKNRfsqsjhyud38GQfOE,1151 -botocore/data/kinesis-video-archived-media/2017-09-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kinesis-video-archived-media/2017-09-30/paginators-1.json,sha256=2QyELet6SZ2S2nDPmoKrNlJ9kQyJyMlMTkrUh1FHeh0,346 -botocore/data/kinesis-video-archived-media/2017-09-30/service-2.json.gz,sha256=G6pSYgy_kN6Y-y-YUfrvuscuoKIPszQQwUNh7IHKh48,13543 -botocore/data/kinesis-video-media/2017-09-30/endpoint-rule-set-1.json.gz,sha256=Tdn_7DViI4OQm64DPoqnta2eKNRfsqsjhyud38GQfOE,1151 -botocore/data/kinesis-video-media/2017-09-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kinesis-video-media/2017-09-30/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/kinesis-video-media/2017-09-30/service-2.json.gz,sha256=LnatY34lMXgj9VEpC2sq2ZNpEwJR7oYryG66AW2fnQg,3456 -botocore/data/kinesis-video-signaling/2019-12-04/endpoint-rule-set-1.json.gz,sha256=Tdn_7DViI4OQm64DPoqnta2eKNRfsqsjhyud38GQfOE,1151 -botocore/data/kinesis-video-signaling/2019-12-04/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kinesis-video-signaling/2019-12-04/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/kinesis-video-signaling/2019-12-04/service-2.json.gz,sha256=OGftlDbDeqS4cT_ztbNVqyrhguh031Cmh2-ZgLbHX1s,2462 -botocore/data/kinesis-video-webrtc-storage/2018-05-10/endpoint-rule-set-1.json.gz,sha256=uBy6LCCHErXlgVS-E-GTgBjPCWI8UQthl632SOg54pM,1302 -botocore/data/kinesis-video-webrtc-storage/2018-05-10/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/kinesis-video-webrtc-storage/2018-05-10/service-2.json.gz,sha256=c14m7neUF85cnnBq4rKCQwVLj6YDrIZ1hi2gsdBHapY,2094 -botocore/data/kinesis/2013-12-02/endpoint-rule-set-1.json.gz,sha256=4akcviTp6KVTIk8xGz5y-6dtU8jfHRRw39bmxuc1XQc,5443 -botocore/data/kinesis/2013-12-02/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kinesis/2013-12-02/paginators-1.json,sha256=qSFJYsvx9QiXPFHa-xy00L9bJWbtmRbGUfaVCF9VzNE,1257 -botocore/data/kinesis/2013-12-02/service-2.json.gz,sha256=eEFEaW2aBySCMXIJTN-oN_HxghOXM_FMprCkqWtz7Es,27027 -botocore/data/kinesis/2013-12-02/waiters-2.json,sha256=O09l7u4uKnojQ0nCnGvABSm0pUXaLj8vvi2Y7sfH_9w,615 -botocore/data/kinesisanalytics/2015-08-14/endpoint-rule-set-1.json.gz,sha256=M3tZvnw_BvWwWtVkPadDjawFnEMUDD7UtV1dYkwPVJ0,1156 -botocore/data/kinesisanalytics/2015-08-14/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kinesisanalytics/2015-08-14/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/kinesisanalytics/2015-08-14/service-2.json.gz,sha256=3sPCNQKgucpg_gvj4H0U2ZonqyGQKKEnNz_o-ZIF33k,14058 -botocore/data/kinesisanalyticsv2/2018-05-23/endpoint-rule-set-1.json.gz,sha256=M3tZvnw_BvWwWtVkPadDjawFnEMUDD7UtV1dYkwPVJ0,1156 -botocore/data/kinesisanalyticsv2/2018-05-23/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kinesisanalyticsv2/2018-05-23/paginators-1.json,sha256=4ttV2ZxNQIdY8Gfsw2atQYWigRj6V6b8bLI70CA4vKs,753 -botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json.gz,sha256=0WZFfbDKShsV2-Jig-wSLI-5Qo6435S1IBT-85gzUB4,25510 -botocore/data/kinesisvideo/2017-09-30/endpoint-rule-set-1.json.gz,sha256=Tdn_7DViI4OQm64DPoqnta2eKNRfsqsjhyud38GQfOE,1151 -botocore/data/kinesisvideo/2017-09-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/kinesisvideo/2017-09-30/paginators-1.json,sha256=u4Avq0nOOLDcxZR6MF_lKdBBqtPAxO96JsSaq9zIdqQ,758 -botocore/data/kinesisvideo/2017-09-30/service-2.json.gz,sha256=gAquI87lg1AaLwJY6Uc26H3sTtCmYkO4FW3yLKg0qak,15837 -botocore/data/kms/2014-11-01/endpoint-rule-set-1.json.gz,sha256=qvB_-M5BJRFNHhASTAZKhaY7QbJG0KEjCGpmOeqSdCk,1145 -botocore/data/kms/2014-11-01/examples-1.json,sha256=TgahTl1uBYiHIxv63mxyaCc-5c9xQKobWHPhDio4x3c,77655 -botocore/data/kms/2014-11-01/paginators-1.json,sha256=pUo1LF_2xGXf1-sM8xJiafSISwL8m8ygUKravuO-Iv0,1565 -botocore/data/kms/2014-11-01/service-2.json.gz,sha256=jvJLY9nsZjb6Sa-hBScqASCyCf_plutrrN4EtggdKjY,73287 -botocore/data/lakeformation/2017-03-31/endpoint-rule-set-1.json.gz,sha256=2SQSoj925OaYq6wzJDaGlVnjJ2kRsRdP4LNtnILv8oY,1152 -botocore/data/lakeformation/2017-03-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/lakeformation/2017-03-31/paginators-1.json,sha256=Igy5JjRrtDgCMOgHT__Et_A6258hmdm2ohrVs2ZasJc,1052 -botocore/data/lakeformation/2017-03-31/paginators-1.sdk-extras.json,sha256=C6kS_EfPl5yTSl_zGXUU30Tp5Z82gPH2KKPi-u1IbOo,159 -botocore/data/lakeformation/2017-03-31/service-2.json.gz,sha256=HXKtxUtAQle-RPZZPbNQaM2lAGm03EMsvLMDmwVRptk,24563 -botocore/data/lambda/2014-11-11/endpoint-rule-set-1.json.gz,sha256=LGmWF0vweqgzVZ42OWh2Y77lwWV1b0aKm_EvQ-TAraI,1288 -botocore/data/lambda/2014-11-11/service-2.json.gz,sha256=sGmlbVDuHQoeZupcOKsbNTeBXWSB5h74kZJ5hQ9wlzU,5528 -botocore/data/lambda/2015-03-31/endpoint-rule-set-1.json.gz,sha256=wWZJdsqEq_ceWe8AQlSmax8prJAVOv3ZNj2iArEG8UI,1148 -botocore/data/lambda/2015-03-31/examples-1.json,sha256=_TOXptTVZUFkSxrkaq_JpIKLxUYjRcK_TpC_0itGHLg,52811 -botocore/data/lambda/2015-03-31/paginators-1.json,sha256=i67K4A0RFZT91hXah41P-AlfAvxXy1TrUAcmy7foZ4o,2839 -botocore/data/lambda/2015-03-31/paginators-1.sdk-extras.json,sha256=evspsJGQ9hFD7SsREZ6pj-ooY7RdvPPHneM5PA4AKaM,196 -botocore/data/lambda/2015-03-31/service-2.json.gz,sha256=2yiKN2DF8dJB1mYPezPpO4H5csD5mSoW8e3eD006pJM,65929 -botocore/data/lambda/2015-03-31/waiters-2.json,sha256=imWEXGOjCilT015RuJNYSaoSXEuafoWZQgL_4KttBqU,4032 -botocore/data/launch-wizard/2018-05-10/endpoint-rule-set-1.json.gz,sha256=hRNg0SOnDDIOlJmg02E4MXmm_f2CVE9c5t2WEdM3zJo,1302 -botocore/data/launch-wizard/2018-05-10/paginators-1.json,sha256=_qhTYa40h1ckIfS0xEC6DCUnO-0OPlclJSK9zAxC8D4,733 -botocore/data/launch-wizard/2018-05-10/service-2.json.gz,sha256=q6wy1D97Us0EAId9GVyEo7slFo87khBht1nSSfwkjwk,4452 -botocore/data/lex-models/2017-04-19/endpoint-rule-set-1.json.gz,sha256=v5i06ZjtzGIUR_Gy9kgKIiBAUkAGSZAYEb9omEGOtEc,1331 -botocore/data/lex-models/2017-04-19/examples-1.json,sha256=bOPm5nP9H4YSzKIpuI2sCPe4agTMgdenNLtxDAWIat4,23898 -botocore/data/lex-models/2017-04-19/paginators-1.json,sha256=NmghgFUthvQgC3SqXuZBn-6vnUJ5ey3MZYBpRF7YMqI,1686 -botocore/data/lex-models/2017-04-19/service-2.json.gz,sha256=pOEUd9AHdCow-IUq7MjxJyvT0Fm3424X_ihFyMKW3Dc,29522 -botocore/data/lex-runtime/2016-11-28/endpoint-rule-set-1.json.gz,sha256=gTaD-roWr7YAVhoAtgXzZzq-6spjwJf2wGC2eGqNUCE,1331 -botocore/data/lex-runtime/2016-11-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/lex-runtime/2016-11-28/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/lex-runtime/2016-11-28/service-2.json.gz,sha256=wpHdOgtRLb3rDbbvDWD0tzwRMxZ8EUXbzaDqmfFx6Bg,11824 -botocore/data/lexv2-models/2020-08-07/endpoint-rule-set-1.json.gz,sha256=LjBsSBTGRNL3QDbKPbroHjykT2HPHQNHgfq_h0DRFds,1154 -botocore/data/lexv2-models/2020-08-07/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/lexv2-models/2020-08-07/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/lexv2-models/2020-08-07/service-2.json.gz,sha256=WnNARJJApw-hBaaJFfbkhtP2yqrpwMmBAtD-Fc5aDRg,78964 -botocore/data/lexv2-models/2020-08-07/waiters-2.json,sha256=Kj-OzJdHpbEuK2Og-0ok3E17irFQKjDwk2KfOj_xKcQ,7231 -botocore/data/lexv2-runtime/2020-08-07/endpoint-rule-set-1.json.gz,sha256=P_BIDol_mjY00faWmZsTRvP0202tjn9EnXJA9KiOFlA,1154 -botocore/data/lexv2-runtime/2020-08-07/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/lexv2-runtime/2020-08-07/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/lexv2-runtime/2020-08-07/service-2.json.gz,sha256=kYnc_722xsgWHLaCLGca_BOUV0tFw-XO8YHvR6V4vZc,12913 -botocore/data/license-manager-linux-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz,sha256=aYXbT22qz-6gi4rD9AQYtyj--X55v497F9jtqvFtTXM,1318 -botocore/data/license-manager-linux-subscriptions/2018-05-10/paginators-1.json,sha256=9hH87MXwn0OiJQlRwCyyof-Pe9Esid1WmRA32IvfLKU,591 -botocore/data/license-manager-linux-subscriptions/2018-05-10/service-2.json.gz,sha256=wljW0P9H8v8mi2UCjwVD2ZDAoCuGuljxORVJaYcNswM,4790 -botocore/data/license-manager-user-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz,sha256=ho92NActesLn8LJWhZZ7q9WMUjd-xkDNlyl31jL97kg,1171 -botocore/data/license-manager-user-subscriptions/2018-05-10/paginators-1.json,sha256=LNXlRPzi78dh0YocO4Tld8ErItNbVafz-InxaJIVd-s,944 -botocore/data/license-manager-user-subscriptions/2018-05-10/service-2.json.gz,sha256=pdq9HY-dhCsg4U8W7mBZXTRDRhSp4Q5KoQ8cnKgjY4k,6693 -botocore/data/license-manager-user-subscriptions/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/license-manager/2018-08-01/endpoint-rule-set-1.json.gz,sha256=g5Yhgm8HROCrW081Xzw60FOsSCwlkNfk30Ldw5m6PxY,1156 -botocore/data/license-manager/2018-08-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/license-manager/2018-08-01/paginators-1.json,sha256=u83kulrKizQ1RsV1wfSx_UjFbm72dCbztJd3m2qKZwc,1012 -botocore/data/license-manager/2018-08-01/service-2.json.gz,sha256=t55wxti3nfnfcxfYtfWLG8oMb-iOZuFDgxHRDE0rMXk,20541 -botocore/data/lightsail/2016-11-28/endpoint-rule-set-1.json.gz,sha256=tLTv8iCU8g6ui9TBc-t3QkrpSHQHqQwE8fSJ1sTwkcQ,1150 -botocore/data/lightsail/2016-11-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/lightsail/2016-11-28/paginators-1.json,sha256=9EaLlqeMLm1cO4A5z-uPznc4OgcKMLV3tbvMLdSjZF4,2925 -botocore/data/lightsail/2016-11-28/service-2.json.gz,sha256=B_PTLx0hTfJTP29KyjOORPleTdDEaoArkOSRMTZdyZQ,88085 -botocore/data/location/2020-11-19/endpoint-rule-set-1.json.gz,sha256=DP1r-_eoMIOEC7H_YcFKif9mt8Fopo_b6i5fXfDQzoY,1145 -botocore/data/location/2020-11-19/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/location/2020-11-19/paginators-1.json,sha256=voiPL9-aOzRI3yqf4kvw2pNzmkLpxD5QoFZZaOYaQoQ,1871 -botocore/data/location/2020-11-19/paginators-1.sdk-extras.json,sha256=RkjEzGF7VMsfK3VpqDSV5a3Ol5XSjUAn_udGxYz4uyA,197 -botocore/data/location/2020-11-19/service-2.json.gz,sha256=1qJI_vQKXwcFqFKUDfEgwpZ5FhzpErxXMMmX6tU-ZRc,44276 -botocore/data/logs/2014-03-28/endpoint-rule-set-1.json.gz,sha256=1-Cqv_9T-XGwcDHUeIlnDcbpbOhdvpUhio6nrSzVcas,1230 -botocore/data/logs/2014-03-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/logs/2014-03-28/paginators-1.json,sha256=W91hHLGNPpWw3v8TNgDZcoNQYH16WKc2tDaMivPQgQI,3553 -botocore/data/logs/2014-03-28/paginators-1.sdk-extras.json,sha256=NEtgDwhuhzOQCycfuhXi4tSe1_y5ekrExDmxJdQ9M1Q,198 -botocore/data/logs/2014-03-28/service-2.json.gz,sha256=8NHfMxzNsfIB7pjq_4MZICBQriYiJWjWEuyFEnlRtKg,79359 -botocore/data/lookoutequipment/2020-12-15/endpoint-rule-set-1.json.gz,sha256=3gZ1FV1hyJUT9MuoIg5IgfS0nhkWGOSreeR9UaaIHx4,1156 -botocore/data/lookoutequipment/2020-12-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/lookoutequipment/2020-12-15/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/lookoutequipment/2020-12-15/service-2.json.gz,sha256=Lw96WSW3dgrIzsOOwXrYxBEcCUw4kcntmVVLS-Rq4Wk,23238 -botocore/data/m2/2021-04-28/endpoint-rule-set-1.json.gz,sha256=iyFRkw3EULmlnrchHiixCe892auUKhnDiaQuxMhtbtw,1145 -botocore/data/m2/2021-04-28/paginators-1.json,sha256=-WFr8vEIZ868a3Kwj0mLLgJuYR_MI2osQq2gIZMWL4g,1787 -botocore/data/m2/2021-04-28/service-2.json.gz,sha256=L4rgTEofEjsDgcwE51l-cjZ2Huqng3RqqACx-ccVzF4,16743 -botocore/data/machinelearning/2014-12-12/endpoint-rule-set-1.json.gz,sha256=VWDHr-M9jSQ2HdX8U2FRdu4566Zj149L-zWehUOU7f8,1156 -botocore/data/machinelearning/2014-12-12/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/machinelearning/2014-12-12/paginators-1.json,sha256=80ddAOlwUPt-mXpDtk3eJqnm7lB95-DjTW6-G6eqmJc,679 -botocore/data/machinelearning/2014-12-12/service-2.json.gz,sha256=IwnIVmnshD12d3c6Qg7ZRJYpOe7cEtiUAK9wcJ8g3X4,21306 -botocore/data/machinelearning/2014-12-12/waiters-2.json,sha256=_tyML4Sw4VQBk8fUWh1bUQjlcooL1hgRpvkqxKxEeCY,1902 -botocore/data/macie2/2020-01-01/endpoint-rule-set-1.json.gz,sha256=_526u5sCZOnpXnQD0hZ9J0n1vTvV8c0mGztH0egtGDg,1148 -botocore/data/macie2/2020-01-01/paginators-1.json,sha256=QNpyggmzK1vrlEP4LHAy0qKzLTJNNoV9x3y8nqJkj3o,2959 -botocore/data/macie2/2020-01-01/service-2.json.gz,sha256=V4Ignp4yaxSrEJ7y0evHAutjESX4h7saIUkDVQTCl5Q,59305 -botocore/data/macie2/2020-01-01/waiters-2.json,sha256=YjTydOnsawe754SLZZxzxMgFaq0M88fq5jOu-UQvAWE,553 -botocore/data/mailmanager/2023-10-17/endpoint-rule-set-1.json.gz,sha256=5ZCtv0_Oj3oB4s1XZgd4h7BqoLd0pVA83THR5kJLRao,1302 -botocore/data/mailmanager/2023-10-17/paginators-1.json,sha256=6E7z80hMmR8EXzJv7fi20eCjKOVott4o8agYmmVvlU4,2045 -botocore/data/mailmanager/2023-10-17/service-2.json.gz,sha256=RrXONSbUZywFzGBL73EYsKRP1NSXDTh5IrEbCDPcE1Y,22050 -botocore/data/mailmanager/2023-10-17/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/managedblockchain-query/2023-05-04/endpoint-rule-set-1.json.gz,sha256=4vvD94QpbwkbDNjZu0eeEeJH_VtxQx7m7hjly_mJ_jU,1312 -botocore/data/managedblockchain-query/2023-05-04/paginators-1.json,sha256=aLhFDqzj7KQVTC4MVIRoQAo6tDKInqtlsOkFmvln-7o,882 -botocore/data/managedblockchain-query/2023-05-04/service-2.json.gz,sha256=z0cKFdw6oQgerV1g1bidbRI1HD0Pi_NryRZj0B8i52c,7186 -botocore/data/managedblockchain-query/2023-05-04/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/managedblockchain/2018-09-24/endpoint-rule-set-1.json.gz,sha256=DJtXVormTBLezX-BIzi9XH5FU-YaL0kTVDtCOjnRpgo,1158 -botocore/data/managedblockchain/2018-09-24/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/managedblockchain/2018-09-24/paginators-1.json,sha256=zAjmRcrAx6dDwoJVM-7ceZ1U04fGfxMgQsREvvVcIeI,189 -botocore/data/managedblockchain/2018-09-24/service-2.json.gz,sha256=yEg_IZh2Cv97t-mnVWrv3wS-0QCcPgy59_9tPkUQ9Zk,13879 -botocore/data/marketplace-agreement/2020-03-01/endpoint-rule-set-1.json.gz,sha256=zCezeGC0sVdarigGWlSUHEFdIRnjpA9JGIFOT2EMeC8,1309 -botocore/data/marketplace-agreement/2020-03-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/marketplace-agreement/2020-03-01/service-2.json.gz,sha256=QVXJGLMeiYWdCg2BQEKf6inxFOPf_D0afwZZZIXtFZU,8903 -botocore/data/marketplace-agreement/2020-03-01/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/marketplace-catalog/2018-09-17/endpoint-rule-set-1.json.gz,sha256=jmoZYkYo0OsQ23Px91-uYRYUHkeMAg_RIzYLcY-rXx0,1157 -botocore/data/marketplace-catalog/2018-09-17/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/marketplace-catalog/2018-09-17/paginators-1.json,sha256=JbO7iSHFp-U7kJIRHTRxPClYMlBkenux5Ow534JGcyQ,372 -botocore/data/marketplace-catalog/2018-09-17/service-2.json.gz,sha256=IV5oVnqMGkFRu6BtowDA4SFzLctNBYAOXVgV55Tb9mA,13942 -botocore/data/marketplace-deployment/2023-01-25/endpoint-rule-set-1.json.gz,sha256=y34pB81uNhv4x7PNj3OkDpEAvdNAfCaW9oy5bV-sEHI,1310 -botocore/data/marketplace-deployment/2023-01-25/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/marketplace-deployment/2023-01-25/service-2.json.gz,sha256=Og1padx7IVMkDR_iQEtdzwRuNM5uJrotTaCyNVf17eo,2721 -botocore/data/marketplace-entitlement/2017-01-11/endpoint-rule-set-1.json.gz,sha256=pefgluoneysLIvmGBn9S1-wU68gIkQsjkxwvsotwHG8,1530 -botocore/data/marketplace-entitlement/2017-01-11/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/marketplace-entitlement/2017-01-11/paginators-1.json,sha256=xFY_-BU5Ho7OPWDGn_aX-WwguHOeDyE1N4F-7nlw2KA,194 -botocore/data/marketplace-entitlement/2017-01-11/service-2.json.gz,sha256=a03c4aWEVZuGw-rqGDomG3xt3tLmsCKczpVyr4Jy-dY,2313 -botocore/data/marketplace-reporting/2018-05-10/endpoint-rule-set-1.json.gz,sha256=VQFxr7Cq-VdHa3DMmA9RvC39w2a0V6br9qPO5nkNFcQ,1308 -botocore/data/marketplace-reporting/2018-05-10/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/marketplace-reporting/2018-05-10/service-2.json.gz,sha256=wSAXm6APiTLZmkwflgyHfNhuYxN6TDU46vbdccfSCuk,2451 -botocore/data/marketplace-reporting/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/marketplacecommerceanalytics/2015-07-01/endpoint-rule-set-1.json.gz,sha256=QwqVwQ_RrBg_Jt8j4GM3xs6KHhDgZlEEc3_XgP1HIOU,1166 -botocore/data/marketplacecommerceanalytics/2015-07-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/marketplacecommerceanalytics/2015-07-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/marketplacecommerceanalytics/2015-07-01/service-2.json.gz,sha256=plmuG1H41QkQV1g8hCLeIZZZsESH1vwbXlrmb2vZ8_8,3272 -botocore/data/mediaconnect/2018-11-14/endpoint-rule-set-1.json.gz,sha256=i2vpIGnrTRHVYR7o1FO30Z5DLzDYGzbL4MEGoxuWgMo,1152 -botocore/data/mediaconnect/2018-11-14/paginators-1.json,sha256=KOhLwBbi4k8mumx_ac5Y-x7OkCFL705ZJOer8NxET9g,1712 -botocore/data/mediaconnect/2018-11-14/service-2.json.gz,sha256=pAJcbOxwJRnXn3DgsVa7zVuAGkTExTSSoWYHE30Jtfg,46399 -botocore/data/mediaconnect/2018-11-14/waiters-2.json,sha256=QkoMT-f_yIvagwfOQZbjK4UnteV2fIVD4BZnXv3m1h8,8820 -botocore/data/mediaconvert/2017-08-29/endpoint-rule-set-1.json.gz,sha256=_IzSDDrV8LUK-fW662UOsxvmwCV6nanEpt_lE0hLamA,1299 -botocore/data/mediaconvert/2017-08-29/paginators-1.json,sha256=oX8chsZnZYHV50i1ILrfgyIYwOI82aSlb46knSQk0qw,1153 -botocore/data/mediaconvert/2017-08-29/paginators-1.sdk-extras.json,sha256=tG933F4yMTEHzj_2Y6YhkDuomhFhDjRDmF0k1I7n8II,208 -botocore/data/mediaconvert/2017-08-29/service-2.json.gz,sha256=hQ3VOdyTGVq58UpzkfYRAKYxADz7oUicQp9Ww86QwjA,172695 -botocore/data/medialive/2017-10-14/endpoint-rule-set-1.json.gz,sha256=QyP4y4JgpfphqmpVnwl3A8KWQGj2HfVei30UK2ux6-0,1149 -botocore/data/medialive/2017-10-14/paginators-1.json,sha256=WjRxjvuXG4nsTz1IQogEjLBHw0HlX28zgkSS-J5nc5U,4028 -botocore/data/medialive/2017-10-14/service-2.json.gz,sha256=y2LD1hHf_uHvffhAv337Xqr0q975GK1WkrrPVfbi6IA,128920 -botocore/data/medialive/2017-10-14/waiters-2.json,sha256=v1qapfFgcUvllk-vANZFuTJLDN_edc3DsDFq4_nTViQ,15342 -botocore/data/mediapackage-vod/2018-11-07/endpoint-rule-set-1.json.gz,sha256=Dpe4AU0Pr5_U19FoNAUR1u8yi4EV0cd8IfQPHADW-Ek,1155 -botocore/data/mediapackage-vod/2018-11-07/paginators-1.json,sha256=uyOY7MfVXvY7qil_RhqS9KThRg9A3_8LB6C8en49Z3k,551 -botocore/data/mediapackage-vod/2018-11-07/service-2.json.gz,sha256=qSzB-VYNfxSzSeAUH1rV6QSkjWZDaYxn5q7-XDn0qPI,7204 -botocore/data/mediapackage/2017-10-12/endpoint-rule-set-1.json.gz,sha256=bYNn0wbBVIYo5NCr9wCNfvhy9LwXY0n2ajbY1xoUt6g,1152 -botocore/data/mediapackage/2017-10-12/paginators-1.json,sha256=Hkze_cyn0q7t1o4PHpf079W6jE_g7l8tGQf7x-t3ocs,531 -botocore/data/mediapackage/2017-10-12/service-2.json.gz,sha256=ogiL-Mk4Ikx3sfOhDgk2UPZzQWyIjYWCEyuxCVVUAKM,9876 -botocore/data/mediapackagev2/2022-12-25/endpoint-rule-set-1.json.gz,sha256=4ALPZqgy-6MspEfAaHX-sO965GVHc380E-ug3v68s0U,1304 -botocore/data/mediapackagev2/2022-12-25/paginators-1.json,sha256=TvuW6sRcN2gAFECRS2bfyHrPTUcTAZR4uEjHacwMgyU,676 -botocore/data/mediapackagev2/2022-12-25/service-2.json.gz,sha256=johE3UdDxgVoizDRz83gFQuU_5yRmwJuQUQM6YhDjlQ,24864 -botocore/data/mediapackagev2/2022-12-25/waiters-2.json,sha256=lXTTb_E9woEww_3b3x_f7fITdrFO96-eKWcC7F6VXGU,832 -botocore/data/mediastore-data/2017-09-01/endpoint-rule-set-1.json.gz,sha256=I_dq_CH6lnmhLbSRzADpgO1EhKz2Fzh2mahR5SmABZ8,1153 -botocore/data/mediastore-data/2017-09-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/mediastore-data/2017-09-01/paginators-1.json,sha256=iGhEIo_9ydhnm5jAD4K6mIgNGZ51FKUA4AlfMlG0sao,181 -botocore/data/mediastore-data/2017-09-01/service-2.json.gz,sha256=Gmo16uo5xsdIvQbvwn9czJLf6yaglqcrf6CkHqGs5I8,3785 -botocore/data/mediastore/2017-09-01/endpoint-rule-set-1.json.gz,sha256=eWDRLo2ZDeSLf-GonXPeM6FR1KvNcrFvvM0kMsk3QTQ,1150 -botocore/data/mediastore/2017-09-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/mediastore/2017-09-01/paginators-1.json,sha256=0XO8tEPJl9J7qprTHPQQt6dC7GrjIoqoCn4AcAbjiyM,191 -botocore/data/mediastore/2017-09-01/service-2.json.gz,sha256=bDiy7KPdHYhhkp7d1H5-DSh4fv9rmcs8J7x05G2zeEg,7064 -botocore/data/mediatailor/2018-04-23/endpoint-rule-set-1.json.gz,sha256=h4il7wfknN-DGWEV7w4uHdgP8r4S_jDT8TbkJ9jyHds,1153 -botocore/data/mediatailor/2018-04-23/paginators-1.json,sha256=AxqBHJot9wpawiVdBaiwALEkmIwfz6mhJsXIo7qDvlw,1336 -botocore/data/mediatailor/2018-04-23/service-2.json.gz,sha256=hyCAc8fIofvOzVslCpkhb_O9PS2LqRv_m42hF4js80s,26755 -botocore/data/medical-imaging/2023-07-19/endpoint-rule-set-1.json.gz,sha256=EQJapPzcopL9AEaATSL_ei5O81C0k6NGQdDZe_6br1Y,1304 -botocore/data/medical-imaging/2023-07-19/paginators-1.json,sha256=Zdv-t-Mpi7RENFkReFlaQ40h5arjqt4t0EDliR_8VOs,739 -botocore/data/medical-imaging/2023-07-19/paginators-1.sdk-extras.json,sha256=SH5DkCGoc9NqpV_7FKFpREPdZP8dt8uz6TDVCQZmeCI,159 -botocore/data/medical-imaging/2023-07-19/service-2.json.gz,sha256=liiDkVbC6CoVejmZOTUIApdqUlDJnvZuO8vJG5Dj7Zo,9442 -botocore/data/medical-imaging/2023-07-19/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/memorydb/2021-01-01/endpoint-rule-set-1.json.gz,sha256=Tqn4PueXprmT0LflOWGqiC5qG6cDZRzUNJbTAXIi58c,1264 -botocore/data/memorydb/2021-01-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/memorydb/2021-01-01/paginators-1.json,sha256=rPx4219WMZpwPaLBF1L70DN_b4x5cChfNl3u_g65bj0,2277 -botocore/data/memorydb/2021-01-01/service-2.json.gz,sha256=Mr6aZdH1tfSHVvBkSnXBwqPJGxqs09Mb5GvQhLajiHw,19249 -botocore/data/meteringmarketplace/2016-01-14/endpoint-rule-set-1.json.gz,sha256=kfB5iUXmAt5Oerv3jSpY_2QcNfCTftPSoX1rIY-YXdc,1518 -botocore/data/meteringmarketplace/2016-01-14/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/meteringmarketplace/2016-01-14/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/meteringmarketplace/2016-01-14/service-2.json.gz,sha256=35dZ9ELEyakgsBf7gspdGyl-9JeySjDk5eMuBQJxWao,7834 -botocore/data/mgh/2017-05-31/endpoint-rule-set-1.json.gz,sha256=VQI0l_JsREgfWdd6uuD6yTol-vJxq-aIp6KrNNhInHE,1145 -botocore/data/mgh/2017-05-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/mgh/2017-05-31/paginators-1.json,sha256=E2Ik6-I1lm3WF_e7avtK8OpnpzPT0CQg3im2ILInNK0,1326 -botocore/data/mgh/2017-05-31/service-2.json.gz,sha256=AyRRit48oyQVcuWgfHvFmR_WtyM2871F0jkDgEG5DIo,8366 -botocore/data/mgn/2020-02-26/endpoint-rule-set-1.json.gz,sha256=s_VqnF1B8F9jX47sx-ey_Fk-tDroCvpvRWwDTbsmEzo,1145 -botocore/data/mgn/2020-02-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/mgn/2020-02-26/paginators-1.json,sha256=zJ9gqjYlOC0wR5m9M1J-VB79ZFXJcrL78WvqPplRE8M,2682 -botocore/data/mgn/2020-02-26/service-2.json.gz,sha256=pumLeDNQRW20hFxiLuZK_JNXOOIA98a_m7LpBHJLKYs,20311 -botocore/data/mgn/2020-02-26/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/migration-hub-refactor-spaces/2021-10-26/endpoint-rule-set-1.json.gz,sha256=9Cy96En4t-I_7TYbRgjC7aNGcm8QDAq14NgvVnWHTDU,1154 -botocore/data/migration-hub-refactor-spaces/2021-10-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/migration-hub-refactor-spaces/2021-10-26/paginators-1.json,sha256=OZ6GIc0aC4ikn9W96M2UbxWeBdIy3QA7ksZ2Ec7t1e8,904 -botocore/data/migration-hub-refactor-spaces/2021-10-26/service-2.json.gz,sha256=TExL2b4_T46FgDUkGyIEjkmOYu5FnCO3WbAJ7I0is70,12478 -botocore/data/migrationhub-config/2019-06-30/endpoint-rule-set-1.json.gz,sha256=XOXriSHsVL3FjKSLM3okT-Eby1ZNQUsCdORP-EmxuEw,1156 -botocore/data/migrationhub-config/2019-06-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/migrationhub-config/2019-06-30/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/migrationhub-config/2019-06-30/service-2.json.gz,sha256=cEBbrmRdu_B2bJntR-ffkW5glt6tXhMANtVFQF21-QU,2737 -botocore/data/migrationhuborchestrator/2021-08-28/endpoint-rule-set-1.json.gz,sha256=JtlQO4uEr_M4RbRojX6Ac-7rCbM4iFlLljOrRjD-B_Q,1308 -botocore/data/migrationhuborchestrator/2021-08-28/paginators-1.json,sha256=K3BSaAaX302rt-fuD-8ewfuAaO1cXLwfwPxQmgs4gLw,1272 -botocore/data/migrationhuborchestrator/2021-08-28/service-2.json.gz,sha256=TFUPWpJOG8S0rnOjH3VD5smY353rLtlhxMkPIaNZFUk,8432 -botocore/data/migrationhuborchestrator/2021-08-28/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/migrationhubstrategy/2020-02-19/endpoint-rule-set-1.json.gz,sha256=BcfHH_UIhg4K3y7AmuXatzoEj8Nftsr7MwsLCVDmXSQ,1157 -botocore/data/migrationhubstrategy/2020-02-19/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/migrationhubstrategy/2020-02-19/paginators-1.json,sha256=1kU7uoqpjQDozh9dBNVWf7QyZDxK2PBkajg_gfz7dxY,1076 -botocore/data/migrationhubstrategy/2020-02-19/paginators-1.sdk-extras.json,sha256=x686VmA6fsdUSIKSMZbp5ZF280pREQ7HpnPkgQTZ730,220 -botocore/data/migrationhubstrategy/2020-02-19/service-2.json.gz,sha256=fe8nAdmWAfYYHAdvnLu4nzILLaaKQqrOYutNA1FF8ZI,13363 -botocore/data/mpa/2022-07-26/endpoint-rule-set-1.json.gz,sha256=KmUX4YnV-nDbCoHC-S6IF6K6PfIAvsE0WSuPvWcREpE,830 -botocore/data/mpa/2022-07-26/paginators-1.json,sha256=KEAQbo0paz_QEgpKVo8h7BdV99138Ck3ni2bl7msPKs,1049 -botocore/data/mpa/2022-07-26/service-2.json.gz,sha256=JiA5urmnhrSojibrgtDnU8qAmnVnT6wVkIg3xD7YaPg,9296 -botocore/data/mpa/2022-07-26/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/mq/2017-11-27/endpoint-rule-set-1.json.gz,sha256=HXEp0jNAHRtZw9EU_3xengNXLvWr7A1A5vRxj6O_v-s,1145 -botocore/data/mq/2017-11-27/paginators-1.json,sha256=JZRhf6w_8oFT1nPyeTQNU09bR1-xrJn09KOtiOPO2Rg,193 -botocore/data/mq/2017-11-27/service-2.json.gz,sha256=OSpBjxNgA2XhiOAO0IHN_W_1WmM5fgtdOoQY-UqVM7Y,14581 -botocore/data/mturk/2017-01-17/endpoint-rule-set-1.json.gz,sha256=TVVF1sEQQoBGpm-aF-7eqMghslCSqvgPi9xx1BnNBdo,1217 -botocore/data/mturk/2017-01-17/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/mturk/2017-01-17/paginators-1.json,sha256=NFfGwUHHAX0lwKOB92RJHnfVkFP5IvDCtM1FnTJ-A0g,1591 -botocore/data/mturk/2017-01-17/service-2.json.gz,sha256=eF8HBkyVInrm2lBBUJpcW1YeIzcCxEg2tD7F7A_o33I,19770 -botocore/data/mwaa-serverless/2024-07-26/endpoint-rule-set-1.json.gz,sha256=1hIMt-4pr9dnQd76p-HXmHH96aHGAcfBF-zm239wCmw,841 -botocore/data/mwaa-serverless/2024-07-26/paginators-1.json,sha256=2DDXXuSHlzvaAEATWiKeZEofpzHNoZ6pZrqYA6mxWxw,709 -botocore/data/mwaa-serverless/2024-07-26/service-2.json.gz,sha256=cjZxi5QgwMCWSFv4vLlSzO5Xt5zMHm0uWod6jNPzx4E,10217 -botocore/data/mwaa-serverless/2024-07-26/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/mwaa/2020-07-01/endpoint-rule-set-1.json.gz,sha256=LtISo-Al6zey3E4oSENhe7I9eg1j8TRxU6kV7wzahPE,1149 -botocore/data/mwaa/2020-07-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/mwaa/2020-07-01/paginators-1.json,sha256=ggep_PmvO9S8tClL3v6oAmOMYV4qZcZt06URX5M9658,195 -botocore/data/mwaa/2020-07-01/service-2.json.gz,sha256=pWBlsnoFlD3j4QQab_23kgJK7bXnxKh3jtXL7oKj8GU,11437 -botocore/data/neptune-graph/2023-11-29/endpoint-rule-set-1.json.gz,sha256=RM43rcd9jEW9pqOtCL3zUZuTlAJ1HeFNj0raC4hV78c,1408 -botocore/data/neptune-graph/2023-11-29/paginators-1.json,sha256=BahW2a3tKEIHiNWsH6L6euKU2GNi128H27Z9QERjxwE,869 -botocore/data/neptune-graph/2023-11-29/service-2.json.gz,sha256=AWvgJ2DaUyN3wWe6ewHvsX1kvqM5rf4wsyImGXrLbLc,15138 -botocore/data/neptune-graph/2023-11-29/waiters-2.json,sha256=KT4CSJIIJS_NGt8Lr21pPZdE6vPj-STKCnh9Z8opMQs,6302 -botocore/data/neptune/2014-10-31/endpoint-rule-set-1.json.gz,sha256=QuoGtRqWIqiXweuqnNhSgo96SJIvQRpVulwOpfgTwxs,1230 -botocore/data/neptune/2014-10-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/neptune/2014-10-31/paginators-1.json,sha256=66ojR4_WsS4k5APEI8fjU0mCJTn7B15KuG8mPLcqFk8,2881 -botocore/data/neptune/2014-10-31/service-2.json.gz,sha256=I0yTRwtx6aN9J2Dh8THh4QFlR2pd6YGuq3744gg2Ng4,46014 -botocore/data/neptune/2014-10-31/service-2.sdk-extras.json,sha256=U_PgxwtPhWl8ZwLlxYiXD4ZQ4iy605x4miYT38nMvnM,561 -botocore/data/neptune/2014-10-31/waiters-2.json,sha256=8bYoMOMz2Tb0aGdtlPhvlMel075q1n7BRnCpQ-Bcc1c,2398 -botocore/data/neptunedata/2023-08-01/endpoint-rule-set-1.json.gz,sha256=YFrSdaGbXW9eLS9UnGniHHq-vIyRCTgQQGW7hIqHVYg,1297 -botocore/data/neptunedata/2023-08-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/neptunedata/2023-08-01/service-2.json.gz,sha256=_wur_bhfaVxsNLCYt2FFCC2ytKuOmRcQK0sv2422NDo,23576 -botocore/data/network-firewall/2020-11-12/endpoint-rule-set-1.json.gz,sha256=2tp_dl3IWpLTjfiIgj5th24-5JRdQgRKwXWJlZHOAk0,1155 -botocore/data/network-firewall/2020-11-12/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/network-firewall/2020-11-12/paginators-1.json,sha256=ZuWICIjteVuD8Zh_STIm6dvL7AVdPg6cfs2C07QIM5E,2318 -botocore/data/network-firewall/2020-11-12/paginators-1.sdk-extras.json,sha256=IXOL2iFcreaZkcp6DCHiCPgS0wjUqDn0ZJCbvuOIIS4,594 -botocore/data/network-firewall/2020-11-12/service-2.json.gz,sha256=eL6VLsBpOao4sbXxVyPiU03EumPEQySx4dm3YVXpvq4,55508 -botocore/data/networkflowmonitor/2023-04-19/endpoint-rule-set-1.json.gz,sha256=bOTzXq9ye-sok3kFizDKV_eFL_en3nb6SRluyHZ-etY,840 -botocore/data/networkflowmonitor/2023-04-19/paginators-1.json,sha256=wOkVGLLik9ddJ4lYIRfRNlO9nDh6X68BQwpo38vvMM4,944 -botocore/data/networkflowmonitor/2023-04-19/paginators-1.sdk-extras.json,sha256=-Yf09BFjNn75UmPPQdc2fhWCP04zh0pZHgwGD41NTeE,411 -botocore/data/networkflowmonitor/2023-04-19/service-2.json.gz,sha256=dV-apxcRoV5Q_NSrvEsX-A43tePWKWfg7EDxSRfkqXk,10353 -botocore/data/networkflowmonitor/2023-04-19/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/networkmanager/2019-07-05/endpoint-rule-set-1.json.gz,sha256=c6Ue4sz7AugBdz52_70QyHWBj6zQgKy0vbJveQ0eEqM,1732 -botocore/data/networkmanager/2019-07-05/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/networkmanager/2019-07-05/paginators-1.json,sha256=754b8htCAy6FPKWQ_iWwTXNPcCZ262kCjNyOe3pwHeA,4412 -botocore/data/networkmanager/2019-07-05/service-2.json.gz,sha256=kUxRU1EMLhqGOB_M3MMNEsKBKeYoDeiFVjpQzXjK7hQ,26467 -botocore/data/networkmonitor/2023-08-01/endpoint-rule-set-1.json.gz,sha256=_EjySaaVBSEu6UHVoFWPBS_6ZwE5g0laOdWjB07RJoE,1303 -botocore/data/networkmonitor/2023-08-01/paginators-1.json,sha256=nHQ47DVYXQU7zjhe4CUO3-J0OdqR2OjaTaQ4c8vcMW0,187 -botocore/data/networkmonitor/2023-08-01/service-2.json.gz,sha256=562DeXsVYqYZTgHse53Mm6uG5dninPg3NJnt04RKGxo,5147 -botocore/data/networkmonitor/2023-08-01/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/notifications/2018-05-10/endpoint-rule-set-1.json.gz,sha256=Sj53Y3zm88m5TST8FPQC0e-JVnr-yE5vcAZRSDggoOI,836 -botocore/data/notifications/2018-05-10/paginators-1.json,sha256=_5tXGmluTxccm4q-eU4QTXfA4R5Hxz9gDkSXXrlvX0U,2086 -botocore/data/notifications/2018-05-10/service-2.json.gz,sha256=2L3xy40Tr_IN8PMvuOvPFuXvCtnBom8BnKxQwXHgmig,16071 -botocore/data/notifications/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/notificationscontacts/2018-05-10/endpoint-rule-set-1.json.gz,sha256=PHXbSmXs8w7RWuPkqw5-HtCEvBC2CiHIBctNKIdjvy4,907 -botocore/data/notificationscontacts/2018-05-10/paginators-1.json,sha256=iGpOqu4PGgEba54bj8oQAK9ZfNQBKCi6VlSw5JqLScQ,197 -botocore/data/notificationscontacts/2018-05-10/service-2.json.gz,sha256=nYP5lVFJ7XA6DyPNAseLfgmKyB9N97uecW6IvSe0Eh8,3777 -botocore/data/notificationscontacts/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/nova-act/2025-08-22/endpoint-rule-set-1.json.gz,sha256=px41vLVWEI9cvfXe5eGVxB_4mQLKnkcN6KTTmMWJDyU,1278 -botocore/data/nova-act/2025-08-22/paginators-1.json,sha256=2GRK4t9Cd6oYnF3H6gUz_6Lbv0c18GOjS43yorxtv40,727 -botocore/data/nova-act/2025-08-22/service-2.json.gz,sha256=SzoqnikyGNyL7-TGbU_79H3ZpDiDJ5jgF4k6utBbpVg,7110 -botocore/data/nova-act/2025-08-22/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/oam/2022-06-10/endpoint-rule-set-1.json.gz,sha256=gjaFtLnugEtVA__c6i2I1qfwN-tyUcE1NRw_Ca72EUA,1295 -botocore/data/oam/2022-06-10/paginators-1.json,sha256=O-yiC1jmUubOdoY_nq_BvS2UBfskjOM7cgJ547VWO3U,501 -botocore/data/oam/2022-06-10/service-2.json.gz,sha256=7MBtYsn4A9opq8jj463Me0Eua8Ynht1hWYh-yLE-yRY,7039 -botocore/data/observabilityadmin/2018-05-10/endpoint-rule-set-1.json.gz,sha256=YUiB30bSXZJsp5fOiy2TTMW5TTGCUF_z0kU88GLM9nw,1307 -botocore/data/observabilityadmin/2018-05-10/paginators-1.json,sha256=z0q6D6Y3CIK8BNUSd6nDPb0Bc-BRDJnnb0Y1NOmsf0A,1364 -botocore/data/observabilityadmin/2018-05-10/service-2.json.gz,sha256=PpVMg40hzKzQzo_rZDdo9ME3nAndDSRtbfzB8Q7WftQ,15582 -botocore/data/observabilityadmin/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/odb/2024-08-20/endpoint-rule-set-1.json.gz,sha256=tiekxXMlA3Wq_mSQzZFZjOHscVWFS47oFivZXLscnLQ,1295 -botocore/data/odb/2024-08-20/paginators-1.json,sha256=74QkGEVfoLcV2XLgheCVohdLRbXpRAWDw3edDGwS0NQ,1987 -botocore/data/odb/2024-08-20/service-2.json.gz,sha256=vMFpNjPDLwkZAAFv1hY06fClxiB9MyZPw6u5NWi5Qu4,21340 -botocore/data/odb/2024-08-20/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/omics/2022-11-28/endpoint-rule-set-1.json.gz,sha256=hKITm-4DzVtXSo3BJdwy0CYeK0QMwWDO7qXSjv6SE9s,1297 -botocore/data/omics/2022-11-28/paginators-1.json,sha256=jv36OpUcqzGkCvGA1USyceaqApWVuD6APgPpMVzsP9Y,3801 -botocore/data/omics/2022-11-28/service-2.json.gz,sha256=0zAD6WEugXmjHE4us-ylfn4_awG8IGQolJzz0Y0sGIU,42683 -botocore/data/omics/2022-11-28/waiters-2.json,sha256=ilyIBGDpQrZwAA4HzC3dsLnCTYa65vhX32YyypUzGwg,15423 -botocore/data/opensearch/2021-01-01/endpoint-rule-set-1.json.gz,sha256=sZmoX-jyq_4WXYOpzXsjONus8GNdq_KExmnUY0oTVwE,1311 -botocore/data/opensearch/2021-01-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/opensearch/2021-01-01/paginators-1.json,sha256=8SvuZgZ0Y0xqPmOvNUfP4ApehWFu0vVQzqEDf2RJcc4,203 -botocore/data/opensearch/2021-01-01/service-2.json.gz,sha256=2FovMkcSVjVhJ-9c2t32BQgs4VtlJrbxrzjLJiFMX9s,49228 -botocore/data/opensearchserverless/2021-11-01/endpoint-rule-set-1.json.gz,sha256=clLUDaExPX0vwnHHuBbENOx40yW7HAF9K61tNaSbrIs,1296 -botocore/data/opensearchserverless/2021-11-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/opensearchserverless/2021-11-01/service-2.json.gz,sha256=Lyl7fOhridB_mYEKWAVLht1rGsYDdlr7GSXMxsMPZjs,12356 -botocore/data/opensearchserverless/2021-11-01/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/organizations/2016-11-28/endpoint-rule-set-1.json.gz,sha256=Fua1_O22PTpWf4X5PTDGGgV0sGjPNC3HE9byow3RU7Y,1651 -botocore/data/organizations/2016-11-28/examples-1.json,sha256=H-s8eMAzogFkvDj193d_NweczAUFsyrDfjFEE_77BFQ,50009 -botocore/data/organizations/2016-11-28/paginators-1.json,sha256=gN3_2FHJt6Xyap3z3IqVjvbbPcC4jGIddUvlLZOsqA8,3185 -botocore/data/organizations/2016-11-28/paginators-1.sdk-extras.json,sha256=2OOgdafaSQgkls_L9T5FWZ0oZuzRK3NY3Dw4ogTQjDI,382 -botocore/data/organizations/2016-11-28/service-2.json.gz,sha256=IO0vVtDQk_lwVKpPN3jXHfzsi5rlWVU5Gm7pQjBXM3o,39484 -botocore/data/osis/2022-01-01/endpoint-rule-set-1.json.gz,sha256=JaGEJRopxsttvSxCXHb7g34mw_E-QPtcGi3m1BrNG8Y,1296 -botocore/data/osis/2022-01-01/paginators-1.json,sha256=VWwFT-KdB44DIlPwqvTWail3glgRum4sRxVlzVEE5ek,405 -botocore/data/osis/2022-01-01/service-2.json.gz,sha256=frqNPcO4OQjEZWI9R_qMapX-ZXUpGEvH9sWDWeTOJd0,7792 -botocore/data/outposts/2019-12-03/endpoint-rule-set-1.json.gz,sha256=R-O3yASKsl6Da2MBUVf-IWwCwreAgSn9rqChtv8m74s,1233 -botocore/data/outposts/2019-12-03/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/outposts/2019-12-03/paginators-1.json,sha256=ARUFaPXTMY1Sjtpwxkud166C_MT7IPGN0I01Lo_U8h4,1918 -botocore/data/outposts/2019-12-03/paginators-1.sdk-extras.json,sha256=IjBeb9H050H25aYAZb_jx0VRuUvzcIuHI_OxcsADLrM,315 -botocore/data/outposts/2019-12-03/service-2.json.gz,sha256=-YBuMsUKx0jF3zSD1mkoSbvi5TEKTp8qmB7PnZDHYhE,14999 -botocore/data/panorama/2019-07-24/endpoint-rule-set-1.json.gz,sha256=IZfp4tv7eLvB76H_xAjJwF2BqdN9sAZGphnU2F-jj-Q,1149 -botocore/data/panorama/2019-07-24/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/panorama/2019-07-24/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/panorama/2019-07-24/service-2.json.gz,sha256=FvjPwN-AsYsHYReGGwYyAPUfBTHHuDU3gVKEV4DsWWk,11986 -botocore/data/partitions.json,sha256=JAaRfCPvyMN1ESBXj-UGdQ1BkMWj1463fqTljs7vEvg,7089 -botocore/data/partnercentral-account/2025-04-04/endpoint-rule-set-1.json.gz,sha256=GUR2QyROWopJ-1yfi7xwDrxLkzE9U6T7wHdyQa16_Zo,843 -botocore/data/partnercentral-account/2025-04-04/paginators-1.json,sha256=rKgRX7jPlJ6Pexb0EVPaPAZJh-b3O0NbkhQQd6EM0Is,536 -botocore/data/partnercentral-account/2025-04-04/service-2.json.gz,sha256=uEHChpMxrvXZdoLvRIc9rhlLpqurJ31dYdxDBYMkVXA,13188 -botocore/data/partnercentral-account/2025-04-04/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/partnercentral-benefits/2018-05-10/endpoint-rule-set-1.json.gz,sha256=njHWa9s5-BifispsazuDcB_U_bLU6ez1yYonqo-6P3M,844 -botocore/data/partnercentral-benefits/2018-05-10/paginators-1.json,sha256=Spea3UM7gtCjad2_yxNdgtZ5FGKHphff2--T_uUEOpg,577 -botocore/data/partnercentral-benefits/2018-05-10/service-2.json.gz,sha256=40_uIceJ3P9vvW7XlTOEiB4C-A8W13AtZmngSrH0qXI,9064 -botocore/data/partnercentral-benefits/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/partnercentral-channel/2024-03-18/endpoint-rule-set-1.json.gz,sha256=Hl5DFO9nBuZFZ-zgDSeBLblzUNbtHY45RLTMOhLspH8,1121 -botocore/data/partnercentral-channel/2024-03-18/paginators-1.json,sha256=J7M-NM9oSS1RXWYRJ9dLiXrIs9T2TZZWeMNvOEqIU80,533 -botocore/data/partnercentral-channel/2024-03-18/service-2.json.gz,sha256=cquutWDzYIMn0SyviwEtxpjHc3aoOmNvmYGC2TXYNTo,6726 -botocore/data/partnercentral-channel/2024-03-18/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/partnercentral-selling/2022-07-26/endpoint-rule-set-1.json.gz,sha256=eGewO4nbdFo7-8J2mAE2KGfDVx5YK_j8obnwOBN9fGg,843 -botocore/data/partnercentral-selling/2022-07-26/paginators-1.json,sha256=EGbxWmVZij_vYX7N5XUnfJqb9RSjrrDwQrlXwQPIdbk,2102 -botocore/data/partnercentral-selling/2022-07-26/service-2.json.gz,sha256=FGFOeeczfi2ocw_myjvwZjB9AVaj8z7G7j_bfXsN3IE,54744 -botocore/data/partnercentral-selling/2022-07-26/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/payment-cryptography-data/2022-02-03/endpoint-rule-set-1.json.gz,sha256=OYc3fW7vo4OLML2KnyhCa5SHAdC7SSGDRrw0LABZTdE,1318 -botocore/data/payment-cryptography-data/2022-02-03/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/payment-cryptography-data/2022-02-03/service-2.json.gz,sha256=KuJ6sn6QyVDsUuZdAOUDAcP8zt2qRReWt18RSNSQ1kg,18775 -botocore/data/payment-cryptography-data/2022-02-03/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/payment-cryptography/2021-09-14/endpoint-rule-set-1.json.gz,sha256=x9D0BN-NUAoPJ61ZKySLDMy4VtimDShhnN-JELQlJCc,1318 -botocore/data/payment-cryptography/2021-09-14/paginators-1.json,sha256=Q3nZHuUZ53pNZpShnEVxB2Z6ec8thvlIx-hPXFVBNM8,504 -botocore/data/payment-cryptography/2021-09-14/service-2.json.gz,sha256=llWarwYPrBIeW6xx5o6376JoolmxSxYyzfVrgTtJtWo,22419 -botocore/data/payment-cryptography/2021-09-14/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/pca-connector-ad/2018-05-10/endpoint-rule-set-1.json.gz,sha256=29i_Z8cEEzG9WAYFVMVxziCrgSDPZgUa_5D0nfDer40,1305 -botocore/data/pca-connector-ad/2018-05-10/paginators-1.json,sha256=AS3R0cOqXrf6ALY1Ar4Z_HdXbvrA4SwPve_YSeqtIFc,932 -botocore/data/pca-connector-ad/2018-05-10/service-2.json.gz,sha256=rP0AaltVi5uLDNbcg_Sy465wNFF-RdsxxAtAzNKXrCQ,13292 -botocore/data/pca-connector-scep/2018-05-10/endpoint-rule-set-1.json.gz,sha256=fEOfC2EJIlvtGKEDK278ZcuZPoO7Fhe5cv-WxHgOtm4,1307 -botocore/data/pca-connector-scep/2018-05-10/paginators-1.json,sha256=-TAE2EG4hET8i1kSBmb5SkQbT8NEQ_peQNskuUSs0Ug,364 -botocore/data/pca-connector-scep/2018-05-10/service-2.json.gz,sha256=gvZwE_22hjooNR7hj0BAJVOYoBmg4jo1PBrS4LalPjU,5804 -botocore/data/pca-connector-scep/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/pcs/2023-02-10/endpoint-rule-set-1.json.gz,sha256=TqMJN2hcHU60Id9lMRQig_CtSmexQr4G-hXKAWtk2xM,1295 -botocore/data/pcs/2023-02-10/paginators-1.json,sha256=rm1F2IEEf8TDUFnjJYcvrgQaCiVryXOmbjqdg-aescw,525 -botocore/data/pcs/2023-02-10/service-2.json.gz,sha256=4J1Sb42rJTm6QknSdzGodSIXCoWsX7qTEPkcwVdw71c,12228 -botocore/data/pcs/2023-02-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/personalize-events/2018-03-22/endpoint-rule-set-1.json.gz,sha256=LthLnzOhzVpKpqFqiWCii35ZI9zRwuesn-_v4FmbDZM,1158 -botocore/data/personalize-events/2018-03-22/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/personalize-events/2018-03-22/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/personalize-events/2018-03-22/service-2.json.gz,sha256=caE_a9Up9LmDyfLABa2wfUFANsWCJaYxne6tfoNuzdc,3891 -botocore/data/personalize-runtime/2018-05-22/endpoint-rule-set-1.json.gz,sha256=1TApNWY0o_AP8ZJ2Y5u24U0YCDJ43IG34IAl6v4dupA,1159 -botocore/data/personalize-runtime/2018-05-22/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/personalize-runtime/2018-05-22/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/personalize-runtime/2018-05-22/service-2.json.gz,sha256=6tXpmLTrMOLnKq58hW-5qxp5M5fBVaUITdQkFBcXndw,3843 -botocore/data/personalize/2018-05-22/endpoint-rule-set-1.json.gz,sha256=j-BwG3yTJbxqZ0p7L4Vb-eMkw0KcVBLX77uqcA3QOEk,1153 -botocore/data/personalize/2018-05-22/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/personalize/2018-05-22/paginators-1.json,sha256=PfTPE03jTLANh2F51b68_GALtAUqFWJp2R0o2Xl5u0A,2766 -botocore/data/personalize/2018-05-22/service-2.json.gz,sha256=Z8FvaIENNgrv-gPhzYZ6_qYxA7ETo4410NnSmMiv6hE,31275 -botocore/data/pi/2018-02-27/endpoint-rule-set-1.json.gz,sha256=AfQsok5Z5X1jqWpxUOLHsnOydKhktzsEPRfEAwDTQRs,1144 -botocore/data/pi/2018-02-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/pi/2018-02-27/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/pi/2018-02-27/service-2.json.gz,sha256=dRjloIAtQXs4Z3B27jWLEJ-r4DeSPs1LRxJJzQiVXYg,12024 -botocore/data/pinpoint-email/2018-07-26/endpoint-rule-set-1.json.gz,sha256=qddTI3LJpC7CaEbAWqeCMTM9oU_OVWaIC0s0ZLfHhGg,1146 -botocore/data/pinpoint-email/2018-07-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/pinpoint-email/2018-07-26/paginators-1.json,sha256=G74a7tI3gD77zuNQfj6bfDHtriSA2qhAWh6Su9Tw6Bc,914 -botocore/data/pinpoint-email/2018-07-26/service-2.json.gz,sha256=MduB7TtJotkFuoBYt-rdbpAlBOtexCXsmIoz1qZF-mI,23622 -botocore/data/pinpoint-sms-voice-v2/2022-03-31/endpoint-rule-set-1.json.gz,sha256=pt2Y79gzIb9fF3tuwe6G8R3rVq3Bfyje6oZTV3fRlVU,1150 -botocore/data/pinpoint-sms-voice-v2/2022-03-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.json,sha256=T0sS1LbRMIty2JJWn7b12bAUhGxaf5hA1ZAsI9Vsj-g,4129 -botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.sdk-extras.json,sha256=CP3Bd5lERGMsJNkUdcHm8HDd9lidYVI8uQYg78EloQI,1431 -botocore/data/pinpoint-sms-voice-v2/2022-03-31/service-2.json.gz,sha256=tsWFwthlBxntDEMTiKMOAjcG_QhKnevWGxrYqiKNw3k,38886 -botocore/data/pinpoint-sms-voice-v2/2022-03-31/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/pinpoint-sms-voice/2018-09-05/endpoint-rule-set-1.json.gz,sha256=c00Jm_oJO9qiQRvnbyk7jSpZWrJ8Ph9X-T6sIIhdE0A,1305 -botocore/data/pinpoint-sms-voice/2018-09-05/service-2.json.gz,sha256=f59uS3aYaE4h8fqZ8oHY6fUreb5zkQfUuWs5zDHi6Q0,3344 -botocore/data/pinpoint/2016-12-01/endpoint-rule-set-1.json.gz,sha256=LOhoIMm6FpREac7uyDJbblC0Py4nIIbmQ026v-7oJAY,1313 -botocore/data/pinpoint/2016-12-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/pinpoint/2016-12-01/service-2.json.gz,sha256=qNaN-h-yr6zLwqkRf7dKTqhwsOCygBCiz19QjuBn6W0,70366 -botocore/data/pipes/2015-10-07/endpoint-rule-set-1.json.gz,sha256=gylLbktY-KZcA8DV-eLWMOj0zqV7EfngjsEHUT6ZkFI,1293 -botocore/data/pipes/2015-10-07/paginators-1.json,sha256=a_b-W2Fj-9dt3XIXqHzXHKGRz8elOX8p9h2pI3wg5ls,176 -botocore/data/pipes/2015-10-07/service-2.json.gz,sha256=V_tbPTekw9leSMSPbCFqJYTkwhO_AtWhmGF6jYJH_ak,23040 -botocore/data/pipes/2015-10-07/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/polly/2016-06-10/endpoint-rule-set-1.json.gz,sha256=0x8WydGe7eB7R4Ms6uFmM_gAVY2RYypXrpEUgUqipPY,1147 -botocore/data/polly/2016-06-10/examples-1.json,sha256=4KBzptmKd-ySr0PXR8a5UOE6w8nw-mm0Iq-LRhrtcNM,5101 -botocore/data/polly/2016-06-10/paginators-1.json,sha256=IJnO61fPCtuJPYshmxGjm9ZzkXfOxEvsL0acyUPG55E,463 -botocore/data/polly/2016-06-10/service-2.json.gz,sha256=bSZ8lrHyVOCJIss28DAiGxQapOGEZ4tn9pjwN_CVnbc,8149 -botocore/data/pricing/2017-10-15/endpoint-rule-set-1.json.gz,sha256=otm_gRGqvdgBAbgrslNQiZG8P8Vmqq-Jj6BqUz6StT0,1217 -botocore/data/pricing/2017-10-15/examples-1.json,sha256=LX0A-kHCd3N64FsP7EdT6IV-Sej2qNX9ygW6n6jBucs,4263 -botocore/data/pricing/2017-10-15/paginators-1.json,sha256=rizUQ-J932MNyVUTMjrRSVOm-tmzWnvnYhWoIMGxuuM,820 -botocore/data/pricing/2017-10-15/service-2.json.gz,sha256=EoHS7TSj7Mmj5z-6t0inSUuO8MuI_v2A0lMXzG_77Xw,4542 -botocore/data/pricing/2017-10-15/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/proton/2020-07-20/endpoint-rule-set-1.json.gz,sha256=DeUixZ9rPtd5JGbMCc0G-vgWx2NMRYtECzDKMMPW4Mk,1148 -botocore/data/proton/2020-07-20/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/proton/2020-07-20/paginators-1.json,sha256=oioU0xuoNT12gWLZPvkd8rPQIM9gk8AOnNiZxDJybLs,3501 -botocore/data/proton/2020-07-20/service-2.json.gz,sha256=2p32bSyCdQwcDrtYfqjKSZbOqBSrFX8xu78PPHkW2xQ,28216 -botocore/data/proton/2020-07-20/waiters-2.json,sha256=sGpaiRnx46CfHQh_T__IIByVlrchRRjseWa3NCdIqdI,6872 -botocore/data/qapps/2023-11-27/endpoint-rule-set-1.json.gz,sha256=z2wrAc3Vv5Gv-m3xDzww_XCeknqHN7-LLJsk7Lbjd1g,1300 -botocore/data/qapps/2023-11-27/paginators-1.json,sha256=McjYxb7TrO3PLEXrYryPjrdMEbmbSYts2zAu2s0zbnY,340 -botocore/data/qapps/2023-11-27/service-2.json.gz,sha256=rCqLoXew5Ijvb1XnU0AcMKrdIn2ly0f4Il2KXB18lnE,13950 -botocore/data/qapps/2023-11-27/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/qbusiness/2023-11-27/endpoint-rule-set-1.json.gz,sha256=VQFkGRtcYUZWSvRtCIVQMktqC7yd8FWd60MyxmFXtGk,1126 -botocore/data/qbusiness/2023-11-27/paginators-1.json,sha256=gZen1QNNvkJOLGMSxX-PievGiDr5-MEjDOr73fyIbvY,3436 -botocore/data/qbusiness/2023-11-27/paginators-1.sdk-extras.json,sha256=q8kHxHtnclzAwbKItnYG8gmYQx4NgxA2wfYQVA3yew8,428 -botocore/data/qbusiness/2023-11-27/service-2.json.gz,sha256=9FpIx2bRW1eseRpdLqiSOrEVDV5jvjKW1LnLYw01Vyc,55359 -botocore/data/qbusiness/2023-11-27/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/qconnect/2020-10-19/endpoint-rule-set-1.json.gz,sha256=wGG-mYSdROPXe0kxYRdN_-kftFXuDK15sojHiOhPSN0,1298 -botocore/data/qconnect/2020-10-19/paginators-1.json,sha256=7QW4D2QMFD1FAsyTfPOtoK82I6R2HJeZLva7-ZmnKSM,3927 -botocore/data/qconnect/2020-10-19/service-2.json.gz,sha256=XgVQT5R4-pBikaH1802F1i1-kP_atAWHvvcM_k9wnIg,57899 -botocore/data/qconnect/2020-10-19/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/quicksight/2018-04-01/endpoint-rule-set-1.json.gz,sha256=KOeSnCbychFIM6VMnWI1AGMzY_0dkOB8TubeQXuHZhU,1152 -botocore/data/quicksight/2018-04-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/quicksight/2018-04-01/paginators-1.json,sha256=fSxmq31yhxOoSK5iC_8DsHBF-_7VedmZffrgV-gRyr0,6868 -botocore/data/quicksight/2018-04-01/paginators-1.sdk-extras.json,sha256=G2fJdAlG2TfBIPG8xqxUa132R8bQFRhCO7mvj-PC4r4,4779 -botocore/data/quicksight/2018-04-01/service-2.json.gz,sha256=9UKGGE0iQmLlpvkTwSUAZWag5AqbP3XHI9-x-P27_zA,210313 -botocore/data/ram/2018-01-04/endpoint-rule-set-1.json.gz,sha256=960h10pu65i0595s0CY09EEyG6enPikb0TF8oQGWLjE,1230 -botocore/data/ram/2018-01-04/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ram/2018-01-04/paginators-1.json,sha256=68WO6NwCy0OQL3rko-MRoZ0l1F2vhih8z8F3sse3R3g,1085 -botocore/data/ram/2018-01-04/service-2.json.gz,sha256=sdL_Thm-4Kw0xukXSOAc8sZH7bonvqx6ij3KRIDvUJk,18021 -botocore/data/rbin/2021-06-15/endpoint-rule-set-1.json.gz,sha256=q_9tscCKcUhHbr4PJZvAaq-kxPBnxogaZAq3fkfmwqo,1146 -botocore/data/rbin/2021-06-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/rbin/2021-06-15/paginators-1.json,sha256=LB-X6UiLpJdFPrOCSc0raKGabdXiY9PhtS7nzQJbMts,181 -botocore/data/rbin/2021-06-15/service-2.json.gz,sha256=hausNmpenEpi8igLBRDJJFSzmBd3xQnAmk2IgCAA_10,4962 -botocore/data/rds-data/2018-08-01/endpoint-rule-set-1.json.gz,sha256=qYxcd5oWDiE9vyNmx4AkWYV6A3ws4CLHDF3zy3sy2OM,1149 -botocore/data/rds-data/2018-08-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/rds-data/2018-08-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/rds-data/2018-08-01/service-2.json.gz,sha256=vAvc4S87qbeSwBW3Pr0liGRbMxRNx4tOwknL4qv0cwY,6569 -botocore/data/rds/2014-09-01/endpoint-rule-set-1.json.gz,sha256=XDXLI9vS-BzGD8nDKghQtVKVsNuSUUqoglurusOykbM,1234 -botocore/data/rds/2014-09-01/paginators-1.json,sha256=CKMhQjYqNQB1hiHNi4vCNIVtQvu29SM_ySRhqxTKfOQ,3095 -botocore/data/rds/2014-09-01/service-2.json.gz,sha256=5sc-So6MmmwEYXcfTsECxZqg1LJAM6ejT_c7aphufVs,37839 -botocore/data/rds/2014-09-01/waiters-2.json,sha256=9BpCCotIHKKeyJHD5Bo1fdRi6EnHK6jyJJx_9wswzCQ,2645 -botocore/data/rds/2014-10-31/endpoint-rule-set-1.json.gz,sha256=QuoGtRqWIqiXweuqnNhSgo96SJIvQRpVulwOpfgTwxs,1230 -botocore/data/rds/2014-10-31/examples-1.json,sha256=Pa_Dpbo8pg0O9rZRPEuFXsgnzT6XUqIfwHpXauQnc0M,57903 -botocore/data/rds/2014-10-31/paginators-1.json,sha256=sVBn30vV3XWtW5PxTgtpHzvoQcB0J900ZdRg9cPJnm0,7402 -botocore/data/rds/2014-10-31/paginators-1.sdk-extras.json,sha256=S21buVoyp0LlykSD0lYWlVIRbOqJB4qpVw7mt2GFprQ,192 -botocore/data/rds/2014-10-31/service-2.json.gz,sha256=jtLDIDRAspI6RO1HYipLF-Fq6TQmRKrLN3GYCGfqYm0,168716 -botocore/data/rds/2014-10-31/service-2.sdk-extras.json,sha256=NWqAyPauBSLTPFOO_wMu4XZ7VTkw7nY8QjCorphUpTM,1345 -botocore/data/rds/2014-10-31/waiters-2.json,sha256=DaJxFaWQOJpx0aNV4rEHX8yDWHBfTWHNkA3u6NgDAOk,10970 -botocore/data/redshift-data/2019-12-20/endpoint-rule-set-1.json.gz,sha256=cGU_LsoInAk5jO0T0KTwJ4FbCAx0B2G02Vj8GFhn1V8,1152 -botocore/data/redshift-data/2019-12-20/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/redshift-data/2019-12-20/paginators-1.json,sha256=o__jaQIlvpn0QI1nUo42lhQikOWDAJKjNGc75Q982y4,1108 -botocore/data/redshift-data/2019-12-20/paginators-1.sdk-extras.json,sha256=W5XyQjgYJkSJeO772ifvKMUzs8b5j4AvLb8YQwWnm_w,458 -botocore/data/redshift-data/2019-12-20/service-2.json.gz,sha256=c58l6OgCVuBGN-bK0fpVgbYbeoN4Fq-Tmo0UvTnBaU4,8044 -botocore/data/redshift-serverless/2021-04-21/endpoint-rule-set-1.json.gz,sha256=p_PJ9fcqBSq-uEoJhC8Y4sn32ugw6LROJFiwPUfe4mA,1156 -botocore/data/redshift-serverless/2021-04-21/paginators-1.json,sha256=1vYwDzBLSRf-kJMkph5FppY0Ud0HNqTgr3GRJoiOKh4,2476 -botocore/data/redshift-serverless/2021-04-21/service-2.json.gz,sha256=JQcvkbmdudhQ5yOCwJJYKnSDBSbnckIMZ5Now_UlQCA,22461 -botocore/data/redshift/2012-12-01/endpoint-rule-set-1.json.gz,sha256=F1-KLgP2oF91JdfOJZav5BG8wtdsVT4NUu9FfFOCxDo,1234 -botocore/data/redshift/2012-12-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/redshift/2012-12-01/paginators-1.json,sha256=U9XZCiP9Zd_FwnH0gENTm1olzWqz-FWxLwLeiuiZMxI,6701 -botocore/data/redshift/2012-12-01/service-2.json.gz,sha256=-Xo2LQbY3M0LRp5z7DNl2ZKaLu5SihT8aZZxo1buegs,78219 -botocore/data/redshift/2012-12-01/waiters-2.json,sha256=mvax_COD6X10xa7Toxa2DsrarFdKFg9kOWbIKRLahS4,2344 -botocore/data/rekognition/2016-06-27/endpoint-rule-set-1.json.gz,sha256=1ZlnDPxNTPpHKWVX3U2KYLnWFV2iv0KNgSaYjRRvKQY,1150 -botocore/data/rekognition/2016-06-27/examples-1.json,sha256=pEUj6cF9yKB10eaE3lAAObBMc4nV3Jak105Ro2A3ZMc,20327 -botocore/data/rekognition/2016-06-27/paginators-1.json,sha256=mDoU6wXUCCgHeOrcvnEqTpQ18yV5otpEqZt5TsFarFA,1699 -botocore/data/rekognition/2016-06-27/service-2.json.gz,sha256=F4O1jPP442iuDq53VwJlJ5pH4L0TO-8JFugBK2NhSPA,71458 -botocore/data/rekognition/2016-06-27/waiters-2.json,sha256=KRKVzu37WzZwVdazhDURGYo_qTbgIDDIhBTPyvTt1lg,1542 -botocore/data/repostspace/2022-05-13/endpoint-rule-set-1.json.gz,sha256=bj-MB7jpW6Dy4Lrevn5h1JY2Nzu7XxRhROlvX9MWhvc,1300 -botocore/data/repostspace/2022-05-13/paginators-1.json,sha256=YMwMMFg603UCo-j7eNe9slLy3jUAAYa_ODX-Fh8ffd8,345 -botocore/data/repostspace/2022-05-13/service-2.json.gz,sha256=S-3a_E2Vdr0XySo1-A1I6A67KSrCSjnrX3xsMqTA32o,5946 -botocore/data/repostspace/2022-05-13/waiters-2.json,sha256=IFWB48e2MaBFCt9EUH0lvhocQtIiyM5RKbLNpjKLwOY,2429 -botocore/data/resiliencehub/2020-04-30/endpoint-rule-set-1.json.gz,sha256=VpDOI-CA801BvgddujS1TnHbqlNxqGBVINBYVeCM-KM,1152 -botocore/data/resiliencehub/2020-04-30/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/resiliencehub/2020-04-30/paginators-1.json,sha256=i9EQooI4rve2dtahGRtoAc2pv-KEBS8cGy_L4ND_khM,569 -botocore/data/resiliencehub/2020-04-30/service-2.json.gz,sha256=6snjPHOehQ5_PecIzZDhd3Sm1bgjG7aBiXl7rjl16s4,29858 -botocore/data/resource-explorer-2/2022-07-28/endpoint-rule-set-1.json.gz,sha256=J-wCNHiNF3KQZsX8ZqPj67VtYeA1ITRTkuYbN57yy5Y,1158 -botocore/data/resource-explorer-2/2022-07-28/paginators-1.json,sha256=PuLP47m2fS4G52Ue_2zXbnv1mbBD9Nd9Rjb9ZH4H6pE,1891 -botocore/data/resource-explorer-2/2022-07-28/paginators-1.sdk-extras.json,sha256=1BdFcjO9uS6r4XTazfeCLVaX7KPO_oELRwSUy9VBt7M,268 -botocore/data/resource-explorer-2/2022-07-28/service-2.json.gz,sha256=N6NeEEVvtStxfPnbmDKj_jpHkM7t9gnusP2oyj5v3dY,15681 -botocore/data/resource-explorer-2/2022-07-28/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/resource-groups/2017-11-27/endpoint-rule-set-1.json.gz,sha256=SgXGgTdONMnC-1Km8YMNlnwp8JN4sA8OnryteYc5CvM,1239 -botocore/data/resource-groups/2017-11-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/resource-groups/2017-11-27/paginators-1.json,sha256=UDYlyYXlpEjfEq1H2dATIOO_M33BElQGPX3C7qMybI8,971 -botocore/data/resource-groups/2017-11-27/paginators-1.sdk-extras.json,sha256=sLaKgsyulktCelU4GGH6YRaRLWwmRiSmoKar3VfbunY,165 -botocore/data/resource-groups/2017-11-27/service-2.json.gz,sha256=BOURu_wPdwmPtNhac7xYwQv4iXKPraukUyTD556N6As,14326 -botocore/data/resourcegroupstaggingapi/2017-01-26/endpoint-rule-set-1.json.gz,sha256=Wv0eVb8tXz7Quar-JnJ3V4yVY61ggUQi9JH5m0lD53Q,1149 -botocore/data/resourcegroupstaggingapi/2017-01-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/resourcegroupstaggingapi/2017-01-26/paginators-1.json,sha256=foWHoPRSV6VjAwni6ujDQPi6y99hZYvvnaMzbzSAqFY,854 -botocore/data/resourcegroupstaggingapi/2017-01-26/service-2.json.gz,sha256=LuIngyK8cZm0pKcSjKD329TkV_30SjBHOQzk7Zxb-DI,8919 -botocore/data/rolesanywhere/2018-05-10/endpoint-rule-set-1.json.gz,sha256=9toYryK1FCToWhkvMpmLRUKr1smHAeRD6s3iBMxJFnI,1153 -botocore/data/rolesanywhere/2018-05-10/paginators-1.json,sha256=IaF8k8b_3R6qbXcxbFkIQqN0DTaCim4eQhIiEanVZkc,541 -botocore/data/rolesanywhere/2018-05-10/service-2.json.gz,sha256=C64igE2gjoL2Q-UApkyprsQy5j6dqHATPxGZiHsCOm4,7228 -botocore/data/rolesanywhere/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/route53-recovery-cluster/2019-12-02/endpoint-rule-set-1.json.gz,sha256=sSO8ESV_FVDz--QWbQKB5T4ihB5Il1jCqSls3whsOqU,1168 -botocore/data/route53-recovery-cluster/2019-12-02/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/route53-recovery-cluster/2019-12-02/paginators-1.json,sha256=UhH6MsunbcB3w057DvJoHxEWGweOpch31kCr68-51eQ,201 -botocore/data/route53-recovery-cluster/2019-12-02/service-2.json.gz,sha256=ly0ZJ_0KNlFEHyKE8K5cVgymjPOhqI4g_wFzMtgoZZg,4044 -botocore/data/route53-recovery-control-config/2020-11-02/endpoint-rule-set-1.json.gz,sha256=Fm6dRsF-Gz9Hisj0YdZuUAIHjGBgHV_jdlEFR-y6_t8,1563 -botocore/data/route53-recovery-control-config/2020-11-02/paginators-1.json,sha256=eDByeUTgAtdsrqJD0NiWUp5AfuXhqM2q0oa-5MCgt38,892 -botocore/data/route53-recovery-control-config/2020-11-02/service-2.json.gz,sha256=S4uRWGgrkH_S1Eqp1uuofgOiMwm4PdqosuFySDmJceM,8486 -botocore/data/route53-recovery-control-config/2020-11-02/waiters-2.json,sha256=iw6vHr5XZ7c87aPCP4ejk0EHpOVt-ZT2ioC0asbgGJA,3674 -botocore/data/route53-recovery-readiness/2019-12-02/endpoint-rule-set-1.json.gz,sha256=nTDFWcY73vDgmmovQeyXpQQ20lrtjTJnMevSPrJP8OA,1166 -botocore/data/route53-recovery-readiness/2019-12-02/paginators-1.json,sha256=bkbDR1VU1mtDe84KapiLM8rWUPHKj-aEpn7TLzqFeW0,2032 -botocore/data/route53-recovery-readiness/2019-12-02/service-2.json.gz,sha256=F42Uzr9zdAqK5_hzIc_mqwFwcG7ChHvR__NFjsK7qdo,7335 -botocore/data/route53/2013-04-01/endpoint-rule-set-1.json.gz,sha256=VvgJNpH819gOe--pgvOBP3MBFdLxPR3idGEUZUEPxSA,2244 -botocore/data/route53/2013-04-01/examples-1.json,sha256=C3c7hhO4Y2jbpqrTEGNc7x007deldIJsNVDxdhaH_T8,29631 -botocore/data/route53/2013-04-01/paginators-1.json,sha256=-nS2WnQKiOUbqyQRXiMxCbqHwZ7xJQXVS98-vYEjiuI,1734 -botocore/data/route53/2013-04-01/service-2.json.gz,sha256=j-G-SVFE8AWUlAEuowhcR8rAvyymOc0DnpOOUO2U3Ps,64381 -botocore/data/route53/2013-04-01/waiters-2.json,sha256=s6BzW8AQ9pEM5yCsRa64E7lfUvhX5vxNARuiAtZwjsU,338 -botocore/data/route53domains/2014-05-15/endpoint-rule-set-1.json.gz,sha256=yo2FQ740vWWchOT9GPaIiD9wCjgiMvR4r1fV2kDFH8Q,1155 -botocore/data/route53domains/2014-05-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/route53domains/2014-05-15/paginators-1.json,sha256=VN49BhgZ_VxpcqSi9W0aIr8bv4iFv9QnrVoUerrFwoI,696 -botocore/data/route53domains/2014-05-15/service-2.json.gz,sha256=kdAjstu42ZMCVF0cU4htgdwc0PAGS8RGDuo3kODR4wI,21376 -botocore/data/route53globalresolver/2022-09-27/endpoint-rule-set-1.json.gz,sha256=ROthSwCVyCpmgbZ_niakR5BBEAMhs5W23apJZuyw1Eo,845 -botocore/data/route53globalresolver/2022-09-27/paginators-1.json,sha256=jCx3URYdT1kl3Hf37Pa8S-oFP9MXI4Lyq1G7bibeRdA,1617 -botocore/data/route53globalresolver/2022-09-27/service-2.json.gz,sha256=KdV4GSeOGzjlexm0beULXQ6EL2mh4nj5f5uI8aU9qwQ,16643 -botocore/data/route53globalresolver/2022-09-27/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/route53profiles/2018-05-10/endpoint-rule-set-1.json.gz,sha256=ZyvhGwes7rMn_O6iY_Q2-fIyuiyk3ErjMAW0lB1pTgs,1306 -botocore/data/route53profiles/2018-05-10/paginators-1.json,sha256=-QdEoZNxlj37dlhmE8U5hCOev_UGR-5Nv-_bCti2HtE,579 -botocore/data/route53profiles/2018-05-10/service-2.json.gz,sha256=Zr9eBJ9v0j13UAEmq0gghEHfFcLdXEKQr_77ZBAHSGU,4822 -botocore/data/route53resolver/2018-04-01/endpoint-rule-set-1.json.gz,sha256=Ag2dTX7PhGAc_wXdsWJqUODfIBEa4MhjQMd-jyXD878,1240 -botocore/data/route53resolver/2018-04-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/route53resolver/2018-04-01/paginators-1.json,sha256=dZl4mnbzBj99_gIPlVRqF3YSMSG98HW1xVD1Kh1C_-k,2954 -botocore/data/route53resolver/2018-04-01/paginators-1.sdk-extras.json,sha256=3XJ5UEbB_NT-xjx41jRgxgoNKMWuUL-bcLPzf9n1o9I,806 -botocore/data/route53resolver/2018-04-01/service-2.json.gz,sha256=iv21EOlNJRxx-4epZEm2hogJTycLWHbVkc2N8o__C3U,33355 -botocore/data/rtbfabric/2023-05-15/endpoint-rule-set-1.json.gz,sha256=JSP2cPodywaBWrNMAUyhUHqubDKyB-X40IUYCvKmEoU,1279 -botocore/data/rtbfabric/2023-05-15/paginators-1.json,sha256=6ywBd8ZAvDUaJQrd50jIX7X-0dWKd2qJv8qr7gpM1_k,527 -botocore/data/rtbfabric/2023-05-15/service-2.json.gz,sha256=iyhmb10UzCsAyXSxzzuU448FEcSvQ0sARkw256PioKk,8002 -botocore/data/rtbfabric/2023-05-15/waiters-2.json,sha256=mNT9bGvX2MjiibpZQCvH5iTEijZ576wsLLQlB_IcX3s,4847 -botocore/data/rum/2018-05-10/endpoint-rule-set-1.json.gz,sha256=I99AWdU2AIqBhJyMtb9mLJITUtQ5gLsGM_sPRHO8NAI,1145 -botocore/data/rum/2018-05-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/rum/2018-05-10/paginators-1.json,sha256=eiv4iOnLCb9wVy6VijmIS8FeKbt7SfSmIY3M4qv3wIs,733 -botocore/data/rum/2018-05-10/service-2.json.gz,sha256=iN_wzAVVC3VFIg7w6ya3-U2C3scpZr6roEx8_vDe9IY,15171 -botocore/data/rum/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/s3/2006-03-01/endpoint-rule-set-1.json.gz,sha256=QcrIKs3uD05gu_zLIPbU-v5ap0SBEJ73Abi5bouP8ys,23123 -botocore/data/s3/2006-03-01/examples-1.json,sha256=bGw9MrbmwHRES_w7kwW-Hr31-Js7JGP_oxoE4Tw21b4,57596 -botocore/data/s3/2006-03-01/paginators-1.json,sha256=sCuUQpM7lam7gE_27Js9PhAZ9gaz0L6CIoyeK07U3Tw,1837 -botocore/data/s3/2006-03-01/paginators-1.sdk-extras.json,sha256=FBQlFfamIxCDHfK2jYQSHUbEYyyQSfA-GrUTPx-aL_Q,882 -botocore/data/s3/2006-03-01/service-2.json.gz,sha256=UVSeugPd6ch69b2nM9DShWe6jJV9WG1DjVdZOlNQkvo,170127 -botocore/data/s3/2006-03-01/service-2.sdk-extras.json,sha256=Fhejim14rytpb7Tha_0Y63Ktc6qdJHvAeGQ8-d_WZAg,98 -botocore/data/s3/2006-03-01/waiters-2.json,sha256=m0RJIxnJW7u6emLjY1201rmfeKxgz1f7VDU7qKJOI4c,1436 -botocore/data/s3control/2018-08-20/endpoint-rule-set-1.json.gz,sha256=RF_vZUTRqGg7JzV6sjzxIzvg_AyhqosDJMaNETd0rho,9127 -botocore/data/s3control/2018-08-20/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/s3control/2018-08-20/paginators-1.json,sha256=XOG3QAgT_Ohb5f0Z6B-TyJTX1M_rM05_HryKrEyxYiM,603 -botocore/data/s3control/2018-08-20/service-2.json.gz,sha256=aSpc-ewF6msVrBgsxABYaAm2pHmb01uV_nnArvdKUXI,66285 -botocore/data/s3outposts/2017-07-25/endpoint-rule-set-1.json.gz,sha256=-oE-PLmlCf-BY32z0TBIRswHH4oqvmREdwo8u4KVSd0,1152 -botocore/data/s3outposts/2017-07-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/s3outposts/2017-07-25/paginators-1.json,sha256=MNhAyDW1gAXK_msh5EL1QpaFjXE7KCnk2xzMuUZUiT4,527 -botocore/data/s3outposts/2017-07-25/service-2.json.gz,sha256=pKkaPdGnwR5TyLZwui-ESUbopnUvr2KzZNQQSKeaF38,3475 -botocore/data/s3tables/2018-05-10/endpoint-rule-set-1.json.gz,sha256=F6hRbujPGtWhNzAsz53JaDAuJyQKoADlyQ7gvKUzXes,1298 -botocore/data/s3tables/2018-05-10/paginators-1.json,sha256=V7AYoq3xxG0TSO6MYFTjrAVhuUQ2fdLSCfrIS-LheUk,569 -botocore/data/s3tables/2018-05-10/service-2.json.gz,sha256=2ZBI7BZXbwdoaorp1S3nA_OCG-9NZvYVVC1Lvc9Neeo,13189 -botocore/data/s3tables/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/s3vectors/2025-07-15/endpoint-rule-set-1.json.gz,sha256=0hB7sTzNRgol9voO4YohOp4HlzFDMMjRaLS-p_cBGVU,835 -botocore/data/s3vectors/2025-07-15/paginators-1.json,sha256=dg8UTkD_91qznwta7_XjX-3mpkyV-NVNRH8SLZVjtKk,517 -botocore/data/s3vectors/2025-07-15/service-2.json.gz,sha256=9w8mzQLDlsdSA3a5774Wjqvo_dHKWZ0ykBNxmbihJo0,8796 -botocore/data/s3vectors/2025-07-15/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/sagemaker-a2i-runtime/2019-11-07/endpoint-rule-set-1.json.gz,sha256=iMfB-ZROADRqbTncigjAFF59TOXGPsre0b0mTjHsp-g,1159 -botocore/data/sagemaker-a2i-runtime/2019-11-07/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sagemaker-a2i-runtime/2019-11-07/paginators-1.json,sha256=X0gq-uz_QUVGPACQxWwKf6n-ZZ-MsaXi3huDYMOu10o,199 -botocore/data/sagemaker-a2i-runtime/2019-11-07/service-2.json.gz,sha256=oXTlOWh9nf1Y6-ohMsYCp9tUd-lV4mSaw1eKu3Q2frQ,3798 -botocore/data/sagemaker-edge/2020-09-23/endpoint-rule-set-1.json.gz,sha256=qp5X6uH6karNefLT-DxTN0rA5JZSrXDbzAt9nihVRiY,1153 -botocore/data/sagemaker-edge/2020-09-23/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sagemaker-edge/2020-09-23/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/sagemaker-edge/2020-09-23/service-2.json.gz,sha256=09av2-oIbO7VT71WE6joKlwrNsnXC0Yd7ZdXA6SoNnY,2248 -botocore/data/sagemaker-featurestore-runtime/2020-07-01/endpoint-rule-set-1.json.gz,sha256=F6N0XBijzrRlz5z0UEwQTJIqyqVQZjoYOm4w1XqraqQ,1168 -botocore/data/sagemaker-featurestore-runtime/2020-07-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sagemaker-featurestore-runtime/2020-07-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/sagemaker-featurestore-runtime/2020-07-01/service-2.json.gz,sha256=DJRtcD41TM7WOimS_fuL_4JHricIle4jAn8Cn0kHE8w,4151 -botocore/data/sagemaker-geospatial/2020-05-27/endpoint-rule-set-1.json.gz,sha256=ZLdhqQkalnHzF5wxp_iwvyq5loizeMWEpiuXeNc-l9I,1308 -botocore/data/sagemaker-geospatial/2020-05-27/paginators-1.json,sha256=F6o4MlbqixSACzxItwWHBiMmvvc3VqdxdWlY9NRKy6E,609 -botocore/data/sagemaker-geospatial/2020-05-27/service-2.json.gz,sha256=JBPkL3mHc5Ryf0giAxnf_7ETeOVgyEYjLEdAZblp5Yw,11920 -botocore/data/sagemaker-metrics/2022-09-30/endpoint-rule-set-1.json.gz,sha256=j5p_nfO6LoFKggGTMy9hQJWtT87W_1PeLNCVKfZ0JJs,1240 -botocore/data/sagemaker-metrics/2022-09-30/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/sagemaker-metrics/2022-09-30/service-2.json.gz,sha256=gDP7POlFAhrD8l6LtJLuN6Lq1Skru4LPiwpNGvBemGM,2024 -botocore/data/sagemaker-runtime/2017-05-13/endpoint-rule-set-1.json.gz,sha256=-1QvbLqUYuIe11pANvlUeHHzJiffzlzW0bnV3FKhA74,1271 -botocore/data/sagemaker-runtime/2017-05-13/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sagemaker-runtime/2017-05-13/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/sagemaker-runtime/2017-05-13/service-2.json.gz,sha256=gv2Ir2Geo90G3OSuVFfakUirVZlqFVHT3fWPLAFkOpc,5713 -botocore/data/sagemaker/2017-07-24/endpoint-rule-set-1.json.gz,sha256=6-ZFqHOWwmk5ax1BlJx7FJqax97ZLsq7iCzfgt2K-YY,1268 -botocore/data/sagemaker/2017-07-24/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sagemaker/2017-07-24/paginators-1.json,sha256=GNdREMpCEstnpX0K38x4i33IG3XZbv5pLMkTbxgnX18,14992 -botocore/data/sagemaker/2017-07-24/paginators-1.sdk-extras.json,sha256=ibwgf2aj5blabLx_CmGc9HT5PfqVfBU1UhdXmAVxmi0,154 -botocore/data/sagemaker/2017-07-24/service-2.json.gz,sha256=9NjLXHVtVaZ8tG-2GO1JEI_4Wvl_90XHbLLRlP3sf1M,341475 -botocore/data/sagemaker/2017-07-24/waiters-2.json,sha256=dwquOoDq4TRr9dNbKme2UILOi8MJfe5ADTYkP4bfABA,7018 -botocore/data/savingsplans/2019-06-28/endpoint-rule-set-1.json.gz,sha256=c0lWelPpEOfiOCtMKjLR8Ff40QYxE-y8TSKRd76ASjw,1543 -botocore/data/savingsplans/2019-06-28/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/savingsplans/2019-06-28/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/savingsplans/2019-06-28/service-2.json.gz,sha256=Q5YoDixSpPCDGCU2NiiIwF-eTenujGaW28KZ2sl7pFA,4552 -botocore/data/scheduler/2021-06-30/endpoint-rule-set-1.json.gz,sha256=_RyBq6pXRi3Cxv4h1KktZcmcrUD6YYWHK9EXV1sYzHA,1296 -botocore/data/scheduler/2021-06-30/paginators-1.json,sha256=VH5c3yVo1Un4lL_GVN-D3A5GNOTWMmnqAQ0QZAOMJOo,363 -botocore/data/scheduler/2021-06-30/service-2.json.gz,sha256=kKiBwVnbw95cuzMbRUxFUT_XRVQDw0WFWiRvsxzfDm0,9349 -botocore/data/schemas/2019-12-02/endpoint-rule-set-1.json.gz,sha256=C6DXpwkU2jA5UueMXb1O3er2LnRg94N3EvjQjk7Uhco,1149 -botocore/data/schemas/2019-12-02/paginators-1.json,sha256=JG7VhSHU5MW5ZSEzWuvc0fcOMdYngtguHEeVk1fPoro,830 -botocore/data/schemas/2019-12-02/service-2.json.gz,sha256=L5rr-SAX4q8yI0l-fFhm-2NcwiUZD-0SJgaegGs_G70,5722 -botocore/data/schemas/2019-12-02/waiters-2.json,sha256=t1IowU2djOrDdhK7r7dmmVfVARz1Zp31Dl3MPtnqy5I,824 -botocore/data/sdb/2009-04-15/endpoint-rule-set-1.json.gz,sha256=UdBbAzSPrEgacmIubIUax60JRPBNgiXBhwYn8UzmLbE,1198 -botocore/data/sdb/2009-04-15/paginators-1.json,sha256=3KF7ZF879CPbTIZ8drlqnq5S3aFHdubXunwekE3ARG4,317 -botocore/data/sdb/2009-04-15/service-2.json.gz,sha256=xHSAf87fa0W4eqtaVDr7TSZAf8zUqTHiHPjDn9h_HjI,6036 -botocore/data/sdk-default-configuration.json,sha256=LlmdeqSk0HQAKMCGNgPsFO1K6dJXQdjzq8Ad3wRs7g8,4135 -botocore/data/secretsmanager/2017-10-17/endpoint-rule-set-1.json.gz,sha256=ywFfkla5t7vUKTGjVWFk5p_4r5J2gzaOLXpPvr9u3bY,1351 -botocore/data/secretsmanager/2017-10-17/examples-1.json,sha256=3LKYx_uc48qXDFx7m8cU2l8XByq1wu28h5fOggrmDCI,22410 -botocore/data/secretsmanager/2017-10-17/paginators-1.json,sha256=wFoEW6m_jRSAAt8D1r54a9XXWnZerkFn83sHj413-ww,188 -botocore/data/secretsmanager/2017-10-17/service-2.json.gz,sha256=Pu_y6J9QL0R9hBr-sn9zmv4QG_qy2D389OOygHLcgH4,22383 -botocore/data/secretsmanager/2017-10-17/service-2.sdk-extras.json,sha256=IEA3uxtjPY8I1on-q2W9-tozHHIVmneQyB6gCTcYTro,120 -botocore/data/security-ir/2018-05-10/endpoint-rule-set-1.json.gz,sha256=FMCkrMZYnZqs_-IcIkE4e78F-ku26rZQbJKJAhLVJNc,835 -botocore/data/security-ir/2018-05-10/paginators-1.json,sha256=0Rs0rrkL3TlunqQTlmqG3Exz6DaEY1o4HU93oVh66QA,842 -botocore/data/security-ir/2018-05-10/paginators-1.sdk-extras.json,sha256=fyqNotjQNWEgg7fBBmltwWw7IoVr8M4vv9081d1COKk,339 -botocore/data/security-ir/2018-05-10/service-2.json.gz,sha256=rgv-Yp0jVZ81JDVzoF_GsvNoX5_e36Fi5AAFpLKQi5E,12359 -botocore/data/security-ir/2018-05-10/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/securityhub/2018-10-26/endpoint-rule-set-1.json.gz,sha256=pZW0NIE3u4B-Wt5P_k8XEYxSoJl1IYO36Wa9hLojdVw,1151 -botocore/data/securityhub/2018-10-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/securityhub/2018-10-26/paginators-1.json,sha256=gfqAsoWM3yy_-y4GgIRJP0g8WaDWfVU7BeeYKd34gQw,4103 -botocore/data/securityhub/2018-10-26/paginators-1.sdk-extras.json,sha256=XfR__53xdVA-fFj_GM6nl3mulsEQYh6An6wvwDAye-k,443 -botocore/data/securityhub/2018-10-26/service-2.json.gz,sha256=yv63sbgOryAXPqFcn1d9Axu0w0ETxxqUN2Kq2sCUBBU,166849 -botocore/data/securitylake/2018-05-10/endpoint-rule-set-1.json.gz,sha256=4GI3r9j7MIm9xjfntshKq-elJKU-hWhpLDxBou5p7jE,1299 -botocore/data/securitylake/2018-05-10/paginators-1.json,sha256=aw_RlW6BEfqxgzWUOJF6ZrCTf49mvjJ9uAmhefV_2kg,705 -botocore/data/securitylake/2018-05-10/paginators-1.sdk-extras.json,sha256=v0jKSsBUrC-WdKoMQzNm6hfXLmDajUWqKZtLDn1TA9k,169 -botocore/data/securitylake/2018-05-10/service-2.json.gz,sha256=IH8Az4oLAsRQeunUhJb6_xMI9iGoTZyz_erYFHu3feQ,14449 -botocore/data/serverlessrepo/2017-09-08/endpoint-rule-set-1.json.gz,sha256=UNUqVGphaaJGbhksfdbR-eCzsC3gEUMK5rRCjVneSok,1237 -botocore/data/serverlessrepo/2017-09-08/paginators-1.json,sha256=6mp7kgpraGJSmfK8vEcMsz_LdDUfQN9dI4kjn83wRhY,543 -botocore/data/serverlessrepo/2017-09-08/service-2.json.gz,sha256=3-JDqbRCMU6t3wLL10jY9PwIDgyE6fWQxJ3iJ6OkAf4,9570 -botocore/data/service-quotas/2019-06-24/endpoint-rule-set-1.json.gz,sha256=wx1EOrGvA2m5sDpsOFsvgEpYKz4K71Lt5yXXjUZJA-A,1237 -botocore/data/service-quotas/2019-06-24/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/service-quotas/2019-06-24/paginators-1.json,sha256=e9hZphztzUJpLy1e7mpXUhwobjRsYyLMWkY1DYQfRpw,1149 -botocore/data/service-quotas/2019-06-24/service-2.json.gz,sha256=dkQL1FOuJQthqdsV_1N2X7tNa225qjVSShW9kq-TlKc,10513 -botocore/data/servicecatalog-appregistry/2020-06-24/endpoint-rule-set-1.json.gz,sha256=sO9UGm7DHrMsSHHb6Pt2Pd_RhkIm3-cBaZhajQmdHZ4,1247 -botocore/data/servicecatalog-appregistry/2020-06-24/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/servicecatalog-appregistry/2020-06-24/paginators-1.json,sha256=2lclqrEMyRSrGV8L1DZoppkiLhUGI1VcinKImELBFi0,928 -botocore/data/servicecatalog-appregistry/2020-06-24/service-2.json.gz,sha256=V8ZNl7-70myWNjFN93BEYc6fs4a3cDCnMvDnyuRdceo,7868 -botocore/data/servicecatalog/2015-12-10/endpoint-rule-set-1.json.gz,sha256=Ath3NmA-Aeo_e9PCv1o4KxV9Sz6g5QiPtmKhl4t6qVs,1153 -botocore/data/servicecatalog/2015-12-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/servicecatalog/2015-12-10/paginators-1.json,sha256=ghdoop27a-JBxcfHGVvA0vMp9y_Z-MY1R7TuRQCrmP4,2996 -botocore/data/servicecatalog/2015-12-10/service-2.json.gz,sha256=qHOVMUlwgO9oLFTCtvMitfSswfDRGt2GDCjZzbEz2Fw,37405 -botocore/data/servicediscovery/2017-03-14/endpoint-rule-set-1.json.gz,sha256=8C-69qpDNm8MI_od91vYOUrOC9qpbpVn1MTZpwgKxOo,1306 -botocore/data/servicediscovery/2017-03-14/examples-1.json,sha256=iJqJB_1uy_oppRbcXbl5SmCA2yLLVdSdj674nZ7dSQQ,18861 -botocore/data/servicediscovery/2017-03-14/paginators-1.json,sha256=sKu-j-WBHT8KpiemY4vgLiQkV1Ub2GtqLbYiUxdkjjE,683 -botocore/data/servicediscovery/2017-03-14/paginators-1.sdk-extras.json,sha256=a89DrBwGFK_Oa_9ICtG1QFEBjaavhUhMm-2DyI02_Nw,166 -botocore/data/servicediscovery/2017-03-14/service-2.json.gz,sha256=YRP_HW0z2LVqQm4DSTFJ-mjqIDR6arNtN3-PPDEqwzk,20605 -botocore/data/ses/2010-12-01/endpoint-rule-set-1.json.gz,sha256=qddTI3LJpC7CaEbAWqeCMTM9oU_OVWaIC0s0ZLfHhGg,1146 -botocore/data/ses/2010-12-01/examples-1.json,sha256=LdOG9qOcWahQ6xYBc3_UEV-teA96yJJSesbf0fNI8Bw,28834 -botocore/data/ses/2010-12-01/paginators-1.json,sha256=G_7q2KFDP0LwwEUoCgd9qikwYlHoaFwDjQ_3CtWBVPw,883 -botocore/data/ses/2010-12-01/service-2.json.gz,sha256=4KKHHvJhwwaXIE0V0cosq1ZuhaZbS5RRysq6E9UoauE,36439 -botocore/data/ses/2010-12-01/waiters-2.json,sha256=4GF4zY3Tg43WiGAVWSJeabII8bSEU7_ElsMj_G3Bt68,380 -botocore/data/sesv2/2019-09-27/endpoint-rule-set-1.json.gz,sha256=gelnkMtDFFs7lDrgmO_sFYS7KHoE6F_yJupwTrxgEUI,1649 -botocore/data/sesv2/2019-09-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sesv2/2019-09-27/paginators-1.json,sha256=PY5qJB7wseX5NphauvzMB6XGJ3MkcixWUI4tLUR_2C8,895 -botocore/data/sesv2/2019-09-27/service-2.json.gz,sha256=NechrXk5Y6Oz7ftTlZCzsxO_wape4JNNqKwvdEd2YgY,65602 -botocore/data/shield/2016-06-02/endpoint-rule-set-1.json.gz,sha256=Sogrf_zOlePxjOUNYFc8OfvgTJZo_Fz4BVSe_64iQIs,1345 -botocore/data/shield/2016-06-02/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/shield/2016-06-02/paginators-1.json,sha256=MRQd38Sw6vMYcdoF_zRIXAdMaDQHs_indt6OtJxi0BE,361 -botocore/data/shield/2016-06-02/service-2.json.gz,sha256=8WkNMvug6x23wAnDU3IYuJqYjdu7u07eRte8df0Ny4Q,15250 -botocore/data/signer/2017-08-25/endpoint-rule-set-1.json.gz,sha256=X9YG_wFMMjpMxMS3DQHMZ7MRvXhxkRQWa6uhslym5mk,1146 -botocore/data/signer/2017-08-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/signer/2017-08-25/paginators-1.json,sha256=vjItW2pdi1KsZB_HwJEZqWIDJNHlrKbyxSuN6x8LHkU,526 -botocore/data/signer/2017-08-25/service-2.json.gz,sha256=5m5P5INAyznpfxL5Al-9tEATACdS0D_-zDIG86nFlcI,10333 -botocore/data/signer/2017-08-25/waiters-2.json,sha256=ZvZgSYJd2QhWkeR1jaM1ECQ8295slZ6oDEFLtA2tYRE,607 -botocore/data/signin/2023-01-01/endpoint-rule-set-1.json.gz,sha256=lQpbdhQhKyOsgoplvVdVSOjgWJJvO5a9wrj5IATZm2c,1486 -botocore/data/signin/2023-01-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/signin/2023-01-01/service-2.json.gz,sha256=DJSo1eKdEPrdxiuG7BaGrXmL3gCzkbF2XB4KNGu5b1g,4175 -botocore/data/simspaceweaver/2022-10-28/endpoint-rule-set-1.json.gz,sha256=nLxFqaiWrRRuHwpLLT_cUhCBPWxgaYCKt1Eur-INHTg,1303 -botocore/data/simspaceweaver/2022-10-28/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/simspaceweaver/2022-10-28/service-2.json.gz,sha256=mQX5XHC6Y_pjcumwmH_VVVP_RcH8YJTxJeyxJLsWtmk,6915 -botocore/data/sms-voice/2018-09-05/endpoint-rule-set-1.json.gz,sha256=JGqI1dK_GGtzHZJSCqIiVDUJA-OBqX-VEYYXP1RnFEU,1307 -botocore/data/sms-voice/2018-09-05/service-2.json.gz,sha256=f59uS3aYaE4h8fqZ8oHY6fUreb5zkQfUuWs5zDHi6Q0,3344 -botocore/data/snow-device-management/2021-08-04/endpoint-rule-set-1.json.gz,sha256=XMl5jMXYNpZFiEvZFLfv90sCDmu8RxLCcvX2GGyuMhM,1162 -botocore/data/snow-device-management/2021-08-04/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/snow-device-management/2021-08-04/paginators-1.json,sha256=rNmRDBuxcetGirFRJQJA1vFXHeMY-sFLZ8BSld7BkFw,677 -botocore/data/snow-device-management/2021-08-04/service-2.json.gz,sha256=brEd04I35edZfjNxHce5W0rtpo1qTzEBCW0A2jwkeDc,5955 -botocore/data/snowball/2016-06-30/endpoint-rule-set-1.json.gz,sha256=seBFWbdjDS72mm86RbqCNp4chQnV8ck0x7Xhc3jVWEQ,1150 -botocore/data/snowball/2016-06-30/examples-1.json,sha256=c2uRhH8SNSzMSPVVlezBwPPoxWxhOl2QxkdNc0A37q4,18099 -botocore/data/snowball/2016-06-30/paginators-1.json,sha256=vMdXg3dD9a7r3ifpM8lAmkBfXJBVz66l-6uUq_4OJjo,1061 -botocore/data/snowball/2016-06-30/service-2.json.gz,sha256=bZjixSfI9CvPbnsAvmS8303509n1zpw5aM2sddrgw4M,17015 -botocore/data/sns/2010-03-31/endpoint-rule-set-1.json.gz,sha256=QrIq1U4-cxz_29PHYJq2OfoK1UkvixdoeJwcs49ccac,1228 -botocore/data/sns/2010-03-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sns/2010-03-31/paginators-1.json,sha256=a5cU7i3ZYF5D-u8S4oYs5kDUAAeav2kcWeG21u8RjPg,1241 -botocore/data/sns/2010-03-31/service-2.json.gz,sha256=JAufUEU0XicW2BrfyUMm5Pmaazzj_Fq8jhdtYCodS1s,26186 -botocore/data/socialmessaging/2024-01-01/endpoint-rule-set-1.json.gz,sha256=KqEcLOoiZVMQweUOyC6aAdNM77Lv8595hQVpmluZU8g,1305 -botocore/data/socialmessaging/2024-01-01/paginators-1.json,sha256=BH0O-x9zwgKqsun7vNHC6L3vzsKP_tfiueJ7G6i8nT0,583 -botocore/data/socialmessaging/2024-01-01/service-2.json.gz,sha256=sHlbgcOlY4R-RK-VPd8mQ7H13VYuNil6XgaDIwWvTgI,9926 -botocore/data/sqs/2012-11-05/endpoint-rule-set-1.json.gz,sha256=aOMknkLaUCpWKYAQXCqhVS8l4wbI57OKpoepfSMSK2w,1230 -botocore/data/sqs/2012-11-05/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sqs/2012-11-05/paginators-1.json,sha256=fwyodl-UMt13laxQWAtCY9DEgncIy3mWPV-tS9M5m50,363 -botocore/data/sqs/2012-11-05/service-2.json.gz,sha256=u7w2BpbEgmjmTjsr9dBphW-mn2RRAMpdMgGxXwaBeGk,24462 -botocore/data/ssm-contacts/2021-05-03/endpoint-rule-set-1.json.gz,sha256=or7XL_E5hNALdjGwn-TsIzFrSFNn3wscRzcuXrUDmYU,1152 -botocore/data/ssm-contacts/2021-05-03/examples-1.json,sha256=DgD8jM1qr-3c2rDYBCXlsWUyaA_3S4VTwUogOr5KX0s,28860 -botocore/data/ssm-contacts/2021-05-03/paginators-1.json,sha256=Zvq8EuioTe0ZGvZrNX07bNJzAplhIUTDre4-HOhKrsc,1872 -botocore/data/ssm-contacts/2021-05-03/service-2.json.gz,sha256=4SnBO90rqUxNq057D8MmQYvTs8obdhnLmH77Vacxf6Q,12928 -botocore/data/ssm-guiconnect/2021-05-01/endpoint-rule-set-1.json.gz,sha256=TQhQhKOtCS0R0mJQwYDwjKSPVLrDoTEZ5usNnhSCvRY,1303 -botocore/data/ssm-guiconnect/2021-05-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/ssm-guiconnect/2021-05-01/service-2.json.gz,sha256=kJiPKCa4OIJUlvwlO9NQdQIj9_lmFznoYspqP6oc5jA,2137 -botocore/data/ssm-incidents/2018-05-10/endpoint-rule-set-1.json.gz,sha256=tVHJjvY4R9OfrMy4yU15oLHcMYAdk2OAg4rtgVR-qcc,1154 -botocore/data/ssm-incidents/2018-05-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ssm-incidents/2018-05-10/paginators-1.json,sha256=4qlmECBX9jmRprL7ROo4h4MHrfWWjH2gGPLr9sjuV3o,1259 -botocore/data/ssm-incidents/2018-05-10/service-2.json.gz,sha256=9XQGMqMhmcm8FFBPCJkh5t6lT-ibIp-jZ75t6NnAl5U,14417 -botocore/data/ssm-incidents/2018-05-10/waiters-2.json,sha256=1xhj2BSaBj_CCZlCG7wTLL4ZB0e8_Uuq97DXjf7rADI,1465 -botocore/data/ssm-quicksetup/2018-05-10/endpoint-rule-set-1.json.gz,sha256=QXTW6ihEr8MRticLQwR00nu83qv08O2RKtucqg3UAEM,1304 -botocore/data/ssm-quicksetup/2018-05-10/paginators-1.json,sha256=ytpk-IGuYLt-RpYS7gbZI2X5ie6eJJCCLWzTWUDNOK4,399 -botocore/data/ssm-quicksetup/2018-05-10/service-2.json.gz,sha256=ahRAw9CJ7-qopV0uG1kSh67rKh_8nn6AmueJp45xkpo,7343 -botocore/data/ssm-sap/2018-05-10/endpoint-rule-set-1.json.gz,sha256=AEF5Q6_H2YMD9exf-RYpYkcz8m0iBhB87v614Kwb98Y,1298 -botocore/data/ssm-sap/2018-05-10/paginators-1.json,sha256=i0Tvuamq_Ap6wKbKpSApgTZGJLyfNvFSUViSk9VAMfc,1615 -botocore/data/ssm-sap/2018-05-10/service-2.json.gz,sha256=6J6JgEow-sjFwMLUcBTfVM5wXfH1DkXJC3PHqs3P044,9460 -botocore/data/ssm/2014-11-06/endpoint-rule-set-1.json.gz,sha256=D6SRYhnir2OIqqtL2JElUmczcExCo0tcXi390knI91k,1230 -botocore/data/ssm/2014-11-06/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/ssm/2014-11-06/paginators-1.json,sha256=uvWveFCU2bgggyGE_TFsH5Qtpca7k5zdQNoKpxlALk4,9119 -botocore/data/ssm/2014-11-06/service-2.json.gz,sha256=vAU83HDjP0prYkV6rNn4ZyO8UdGn6oxMP3xfG3g_GyM,134833 -botocore/data/ssm/2014-11-06/waiters-2.json,sha256=eTUBQgvIuYcA9hhUZZ3mY4KqLap6FbcReyPUqdPYduc,1457 -botocore/data/sso-admin/2020-07-20/endpoint-rule-set-1.json.gz,sha256=YmNIAt_hQLZi2xG4fjx-B9KryzaEKe78qJAs2fBHc7c,1230 -botocore/data/sso-admin/2020-07-20/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sso-admin/2020-07-20/paginators-1.json,sha256=blhgKthRgscSeZRmLa2ASHICJ4LeT3IVnlIJ0FAlpdA,3714 -botocore/data/sso-admin/2020-07-20/service-2.json.gz,sha256=0GLbfQMf3Yi8U3VCYRtE5pJLVDAPWdF4HxP0qLnavHk,22511 -botocore/data/sso-admin/2020-07-20/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/sso-oidc/2019-06-10/endpoint-rule-set-1.json.gz,sha256=BSRqn4pZmxB7z0DnSTNnSc1Ed2xi_unJLEAFy4T4xQk,1231 -botocore/data/sso-oidc/2019-06-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sso-oidc/2019-06-10/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/sso-oidc/2019-06-10/service-2.json.gz,sha256=41aqnRh0cBswgV-mTLl_E425WmB5k-UwaupX-2SvRmA,6085 -botocore/data/sso/2019-06-10/endpoint-rule-set-1.json.gz,sha256=3l3jVKDdSkG_cDWMCYNjKEdTRFUDKW8LYB_TmQhPjfI,1233 -botocore/data/sso/2019-06-10/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/sso/2019-06-10/paginators-1.json,sha256=IScw_JafDnQ5pGRs-y61MtR0d4glhFcZR5D-8KLn2-Y,356 -botocore/data/sso/2019-06-10/service-2.json.gz,sha256=ZFbJNDCZv8_OmX3TF00Af9cJ9trpicftCmBw1qBWx9c,2954 -botocore/data/stepfunctions/2016-11-23/endpoint-rule-set-1.json.gz,sha256=If4Q_0xl2ogCDahfIO6M6SMjzEFFCvPysV3Vqy878kE,1208 -botocore/data/stepfunctions/2016-11-23/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/stepfunctions/2016-11-23/paginators-1.json,sha256=2p0xo5UgVh-6RA8-svDfT8HBM_Cf2d05upRi3VIOnuY,856 -botocore/data/stepfunctions/2016-11-23/service-2.json.gz,sha256=20Fs9zOLIyhSXWM36AY4T3iAEHQeUdAMtu530RhBsbk,32628 -botocore/data/storagegateway/2013-06-30/endpoint-rule-set-1.json.gz,sha256=Q-Enb8nDLXB2fCaMGefNiRsi7k62gINVxnHR0g3Xxeg,1153 -botocore/data/storagegateway/2013-06-30/examples-1.json,sha256=2-mBPJqbSFv2f3t6KqdtrU5dW0Z49zylBvFGmoQEAk8,49947 -botocore/data/storagegateway/2013-06-30/paginators-1.json,sha256=eu2RmTSbh-kl0okK4tdVRDTS3tlqOAoyoeuPW4f_GJ4,2101 -botocore/data/storagegateway/2013-06-30/service-2.json.gz,sha256=S1uiTlB0wtV8vVOcYQup1tJLAsxvuKteqN-Pgn0zDqg,57374 -botocore/data/sts/2011-06-15/endpoint-rule-set-1.json.gz,sha256=U_Jzm40V7TpXPkthKcKQb4TuLAi9RcuCvbSFWweZCDk,1775 -botocore/data/sts/2011-06-15/examples-1.json,sha256=yD_CcHN2f9t9PlGQ5NzOJaCYccexGPoonbBW2T4OMck,11885 -botocore/data/sts/2011-06-15/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/sts/2011-06-15/service-2.json.gz,sha256=M7ImiO-bUv2HrOZjHcNmONaSDIKjBGgHkVd9ax7dHjs,19931 -botocore/data/supplychain/2024-01-01/endpoint-rule-set-1.json.gz,sha256=wL3SVS1yLv7SxvMPp2KDqP7pWDC_fuxiQupsGDqRl-Y,1295 -botocore/data/supplychain/2024-01-01/paginators-1.json,sha256=LBjGaMynSYN1nFn9bxNRdFiHV-bdpQsauaC28a-tWpI,1066 -botocore/data/supplychain/2024-01-01/service-2.json.gz,sha256=vlEt52BGLWQfBKV_KIUWCwkcSfeLuDfWN9AHMCgRLHM,14123 -botocore/data/support-app/2021-08-20/endpoint-rule-set-1.json.gz,sha256=7vKDx2QhmifIxrLG_QHDTimIKEUYUWLHkyIErCz3Wdg,1148 -botocore/data/support-app/2021-08-20/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/support-app/2021-08-20/service-2.json.gz,sha256=zAdoDBzRL_GriMZ183w4FwJddADYrMwb5jart2otp2g,4271 -botocore/data/support/2013-04-15/endpoint-rule-set-1.json.gz,sha256=nOwMyFmLDu3AVWGbQ_9tCq_fZnNnpMt7wZEJN6z1vpc,1557 -botocore/data/support/2013-04-15/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/support/2013-04-15/paginators-1.json,sha256=b74jGAMdiNu8tKXAfyVILd2DpHqQx91qieo1BlSXpK8,363 -botocore/data/support/2013-04-15/service-2.json.gz,sha256=h_oUgQAWFdD9noIi4rVyxeo1Ys77AzFHtXbR70uer98,11901 -botocore/data/swf/2012-01-25/endpoint-rule-set-1.json.gz,sha256=HE2kpH6voES2FWh2AcBZPMmDI1ibCb2NrmHg4qhEsVU,1406 -botocore/data/swf/2012-01-25/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/swf/2012-01-25/paginators-1.json,sha256=tOiP-8y-iuvOSJM35cQg6qCE0ai5dd5IWenCE1BH_yk,1496 -botocore/data/swf/2012-01-25/service-2.json.gz,sha256=BX4bGWzGjVNHp5rFoK7Etovi2dQ3ioVaWkBCRnEq5RM,34463 -botocore/data/synthetics/2017-10-11/endpoint-rule-set-1.json.gz,sha256=40s6M727vTjQKrwMfr0eo7buFtL5wJ5jKiKupkr4Mqo,1150 -botocore/data/synthetics/2017-10-11/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/synthetics/2017-10-11/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/synthetics/2017-10-11/service-2.json.gz,sha256=yEeR8oPFSjGGwMGPy0zdurlX9FDpkMGv5cW6udU2dzc,18363 -botocore/data/taxsettings/2018-05-10/endpoint-rule-set-1.json.gz,sha256=C-DVn0asxbRl5ySvFcURqRHhL4wloG4HCyv7v-gLV0c,1377 -botocore/data/taxsettings/2018-05-10/paginators-1.json,sha256=13Jr861DGqLOCbfOeKUIwyYioTGmqiwvg_Cps7HFSx0,572 -botocore/data/taxsettings/2018-05-10/service-2.json.gz,sha256=uufVp6yK2XYJoxfAWC8f3W1WyZE_JAOjxAxvzg9v3Y4,14016 -botocore/data/textract/2018-06-27/endpoint-rule-set-1.json.gz,sha256=zPc1cjCGV-uqnX5sLPYhXtrjytp9RsyEPi5WmxHizHY,1149 -botocore/data/textract/2018-06-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/textract/2018-06-27/paginators-1.json,sha256=IQfBMdVD87vGqQnApoCTQrnbboZ3niS3DPFSlfrqh80,363 -botocore/data/textract/2018-06-27/service-2.json.gz,sha256=qQw-Z-Fw6uOUWNJb6pR0fDNVfZCPKXqnntUZyfS3Z8o,22046 -botocore/data/timestream-influxdb/2023-01-27/endpoint-rule-set-1.json.gz,sha256=6tzBUPnhaAfK8WmcatpWgmzj7M6RhyODOHOrL9qqYdw,1307 -botocore/data/timestream-influxdb/2023-01-27/paginators-1.json,sha256=MrHvKPvWlx0rMP-zmxg-KWRU_2dNof_f7e77IbVFi4M,688 -botocore/data/timestream-influxdb/2023-01-27/service-2.json.gz,sha256=SZyu1dG9lqgLIg06KG5v3SeY7YMJ_Pm3xIDTEd_mJLo,14484 -botocore/data/timestream-query/2018-11-01/endpoint-rule-set-1.json.gz,sha256=CpzSsGJHOUvlS00RiRABoOiqWVNoT6Umh5E41Q1gcAI,1375 -botocore/data/timestream-query/2018-11-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/timestream-query/2018-11-01/paginators-1.json,sha256=0Ppw_OmGYMcK7_ULTp_ACg7XEYwjNmlL1Uu5rXuTMRU,651 -botocore/data/timestream-query/2018-11-01/service-2.json.gz,sha256=edGkk5-a7-sIcOVi1LKY9Ii4nHi2tZ7HA-jM_g2sqjs,14832 -botocore/data/timestream-write/2018-11-01/endpoint-rule-set-1.json.gz,sha256=qba0_4KcGzBSM0r8_BdfbPtUqzR0u9thT8i5Br9klx0,1374 -botocore/data/timestream-write/2018-11-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/timestream-write/2018-11-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/timestream-write/2018-11-01/service-2.json.gz,sha256=evaF0s7PDwiesiTLx0j4TiAJjgIbzdtzhL9uOyI-UvA,12148 -botocore/data/tnb/2008-10-21/endpoint-rule-set-1.json.gz,sha256=eEjbQuvbypaU_He6pIXajmuoj6LQLM-tnJZcZOnN6Nk,1295 -botocore/data/tnb/2008-10-21/paginators-1.json,sha256=oz2uxUX8r9w5q6IjSx3zIxuNl3_jtJnCGLbFF1j0okw,932 -botocore/data/tnb/2008-10-21/service-2.json.gz,sha256=9PNEYyaoMnjFvHKfuaOMZzyn23fuw4qd6zmnTQ3JUn8,10024 -botocore/data/transcribe/2017-10-26/endpoint-rule-set-1.json.gz,sha256=o2xug8L_UWAXAxhUKrNXmPEj7GEl8pKxI1rk9doiJHk,1338 -botocore/data/transcribe/2017-10-26/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/transcribe/2017-10-26/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/transcribe/2017-10-26/service-2.json.gz,sha256=rWFR6zGcQ2m_F4hGbvOddJt-MOxbTS1cAmTLO0YO7DI,34483 -botocore/data/transcribe/2017-10-26/waiters-2.json,sha256=A7s_Vv8U1ZbFcz9_ftc9qvvJsEi8j6nwiwPVBMiDsoI,3500 -botocore/data/transfer/2018-11-05/endpoint-rule-set-1.json.gz,sha256=BeKPBlZX-WOr9ecbz8Hyb6kym7d-FgJ1ApkcFrNj4tQ,1148 -botocore/data/transfer/2018-11-05/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/transfer/2018-11-05/paginators-1.json,sha256=q8S788HxDFyo8rjz_-33w3gJTVEEJ3VDafrzWIwkJSQ,2416 -botocore/data/transfer/2018-11-05/service-2.json.gz,sha256=L_NYPbgsGl28dgjeqzox_6jWQVAgkIUYSZsZLF3pbHc,65960 -botocore/data/transfer/2018-11-05/waiters-2.json,sha256=hVdSZ-CDADnA9zRgSm0tK-qrrIGLUKXug5j6Ave1F-Q,868 -botocore/data/translate/2017-07-01/endpoint-rule-set-1.json.gz,sha256=F16rCh3MqBN_Jn3ZFJSI1_JXcWHVyb-C4CGv49V5Eu0,1149 -botocore/data/translate/2017-07-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/translate/2017-07-01/paginators-1.json,sha256=eE-1ycW-V5DQ_8t4NsRrfJYKhUnAaS7d5OyOimdaOaA,209 -botocore/data/translate/2017-07-01/service-2.json.gz,sha256=KgmK5-PKiR-i7l1n_wSNO4xMTucDzDSLGB6D4BYkUfU,13025 -botocore/data/trustedadvisor/2022-09-15/endpoint-rule-set-1.json.gz,sha256=_4Hw5a7_noCOf9_uNIbEjpjibTMTFTPfND_tFFeIK6M,1303 -botocore/data/trustedadvisor/2022-09-15/paginators-1.json,sha256=eM9ClOnA5h4jNM-0Xgyq-ZplOH89DYmmJzOJ59FieIY,1226 -botocore/data/trustedadvisor/2022-09-15/service-2.json.gz,sha256=2aETLuYYBhqZ9lNBUVy9ETFPTI76Gd8n4A3FmArxBXU,5193 -botocore/data/verifiedpermissions/2021-12-01/endpoint-rule-set-1.json.gz,sha256=X-Akm4L_iHvz6EAv7ZyeySD7tmoR2yh3E7Hh8t-Gk58,1306 -botocore/data/verifiedpermissions/2021-12-01/paginators-1.json,sha256=4cQu2IKJA_8dQUylEDfAsxkN5ZxnoXrjv9rRdWg3rsk,709 -botocore/data/verifiedpermissions/2021-12-01/service-2.json.gz,sha256=b45IPLQ1k-7MR8x_I47PgWGGIjFgo74XO2XA74yq68I,25931 -botocore/data/verifiedpermissions/2021-12-01/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/voice-id/2021-09-27/endpoint-rule-set-1.json.gz,sha256=46-whTxLckMJwb9q4AMFwAQYqSxezzrZwubaeiHvGe0,1148 -botocore/data/voice-id/2021-09-27/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/voice-id/2021-09-27/paginators-1.json,sha256=mgfNZB61NZhxJAtDiZ1WOqHTvwaWmArbDCHTAkdf520,1073 -botocore/data/voice-id/2021-09-27/service-2.json.gz,sha256=r7PiQEB49wYywT9RhS2S11FX0p7gOAfVVK7rXWhMFdQ,11875 -botocore/data/vpc-lattice/2022-11-30/endpoint-rule-set-1.json.gz,sha256=-mBcwUj6V_n79bns80gQif_CFXG33paEvgJ2rk0VMDg,1301 -botocore/data/vpc-lattice/2022-11-30/paginators-1.json,sha256=SvSN7CWErphlp2WSxvI_k2ML9sIm4oqTKNRBzZrWBKA,2586 -botocore/data/vpc-lattice/2022-11-30/service-2.json.gz,sha256=WfDoHywjwcfoJgZWqdftLRe2GAFHHVxn99iFWVRbOnM,26481 -botocore/data/vpc-lattice/2022-11-30/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/waf-regional/2016-11-28/endpoint-rule-set-1.json.gz,sha256=GUFyHLx6ykqGSd7rR2I8VvVtpBdlkuuddvzHtcAGr_4,1149 -botocore/data/waf-regional/2016-11-28/examples-1.json,sha256=6OPuCnLynJIfGO-Vxhb9ZZV9ktEKhpByvf2jSwAg-DY,29749 -botocore/data/waf-regional/2016-11-28/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/waf-regional/2016-11-28/service-2.json.gz,sha256=oA0nJlblyO3FEFYQSw4z_fzI_xMktsuLaa-l97PAwjg,42885 -botocore/data/waf/2015-08-24/endpoint-rule-set-1.json.gz,sha256=9AhRCoAe7bNOeXC4N9C0xc2vhGa881PGOvNB9-QAg0I,1340 -botocore/data/waf/2015-08-24/examples-1.json,sha256=6OPuCnLynJIfGO-Vxhb9ZZV9ktEKhpByvf2jSwAg-DY,29749 -botocore/data/waf/2015-08-24/paginators-1.json,sha256=ulE-ztimMiePJZAVUJkWb57N9b2OKV7xz_GIOHCw7PM,2717 -botocore/data/waf/2015-08-24/service-2.json.gz,sha256=yKftFrprtgmvYxJo9XRwQ9REKds3njAWMHbCRwMINc8,41724 -botocore/data/wafv2/2019-07-29/endpoint-rule-set-1.json.gz,sha256=RQZfoV7wP3eX7p-VhqGAPktgnBZjsBb2yVWWpjOUyNk,1148 -botocore/data/wafv2/2019-07-29/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/wafv2/2019-07-29/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/wafv2/2019-07-29/service-2.json.gz,sha256=Gv2hmYgMYP8G8FEpfgfvaxJdMWlwNv9KZHx9vDpGQDE,78520 -botocore/data/wellarchitected/2020-03-31/endpoint-rule-set-1.json.gz,sha256=mOYiUpvznaecRixjOPGwvQX2ea_-rnJc8gUP4NuM5_A,1154 -botocore/data/wellarchitected/2020-03-31/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/wellarchitected/2020-03-31/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/wellarchitected/2020-03-31/service-2.json.gz,sha256=gkRt77q-wq5m0d2lzVNu8wNPwloGlzrWNdrqwpjRUnM,21152 -botocore/data/wickr/2024-02-01/endpoint-rule-set-1.json.gz,sha256=w0Sg4ddOkfY-ED-p9Y5Ql42jper8K0q_I6NDTgPkCHU,1302 -botocore/data/wickr/2024-02-01/paginators-1.json,sha256=z_vaZpEWOTnwkP_Wdm5bru-1LQERs8qd2ufKsXmlvAU,1344 -botocore/data/wickr/2024-02-01/service-2.json.gz,sha256=qhmfZbnrcYRNbVV7sbhLwobeDS5nU24P5OSic8zUsaw,20363 -botocore/data/wickr/2024-02-01/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/wisdom/2020-10-19/endpoint-rule-set-1.json.gz,sha256=6TikHBL8b2R-MA3rIDTPhoHsR1PpCZZw3qavixpFxK8,1148 -botocore/data/wisdom/2020-10-19/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/wisdom/2020-10-19/paginators-1.json,sha256=Mrm9rH5_xIiJTb4YXQUznBuP7k3tAPM5lVtE0HpFnow,1778 -botocore/data/wisdom/2020-10-19/service-2.json.gz,sha256=h31QtdSY8VMT1jY7fHrFBhNBmYhdoejQ5FQoP0DrYQo,17064 -botocore/data/workdocs/2016-05-01/endpoint-rule-set-1.json.gz,sha256=7hY39AT9FB6YXosyeg7hH_SLNG3ZReMIO3jfiiAirkI,1150 -botocore/data/workdocs/2016-05-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/workdocs/2016-05-01/paginators-1.json,sha256=PERmz7nK6Ur9t877K2ivamloNl6knySKpwgvjbOcUe0,1666 -botocore/data/workdocs/2016-05-01/service-2.json.gz,sha256=mKK-rkJg7ZA40V1UoF1EX-R9cBKyzcLzxRa-QspDDWc,16249 -botocore/data/workmail/2017-10-01/endpoint-rule-set-1.json.gz,sha256=p1bWogwsj0pXL3bQGLJfH8-rqJEif0W75ixvMFX4niI,1150 -botocore/data/workmail/2017-10-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/workmail/2017-10-01/paginators-1.json,sha256=9nz-4DRPw6f_GmwUgKagpcRqgRjcQXmRkBBpC8Yk35E,1747 -botocore/data/workmail/2017-10-01/service-2.json.gz,sha256=KfG8q2XzjYXXgG1wOzQ1wUBUJgHNsAvSoNk8W90-kOI,27947 -botocore/data/workmailmessageflow/2019-05-01/endpoint-rule-set-1.json.gz,sha256=UnLgpBFmXp1ghEkxd336XCvKFu6qc8oLtNHzcFckBKA,1158 -botocore/data/workmailmessageflow/2019-05-01/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/workmailmessageflow/2019-05-01/paginators-1.json,sha256=hIQ7AbLBsY4fPSNLVMg0dS45m6cjZKFTjbp3ZLh4zj8,23 -botocore/data/workmailmessageflow/2019-05-01/service-2.json.gz,sha256=2KEoa_RI5eR-WTF4_ew_WaFNAyS352lmGimCKp7L674,2293 -botocore/data/workspaces-instances/2022-07-26/endpoint-rule-set-1.json.gz,sha256=Dfk_lIzIQ-IkRpjkQ-htZI6GRBV3HzanfVsu1zOLy58,841 -botocore/data/workspaces-instances/2022-07-26/paginators-1.json,sha256=sQfPhLdTn-F-MxfRXo7SLcJaHDvQ_P5RKI12w1QlaFo,539 -botocore/data/workspaces-instances/2022-07-26/service-2.json.gz,sha256=f6T1wbaIlYsBijMgr7w11vhBqZcEVE24GLR_Hkl-tkI,8640 -botocore/data/workspaces-instances/2022-07-26/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/workspaces-thin-client/2023-08-22/endpoint-rule-set-1.json.gz,sha256=ORNFn2U0cQKoHNDSM0szgNPB8uc2BAcykZIVuluQ5xw,1297 -botocore/data/workspaces-thin-client/2023-08-22/paginators-1.json,sha256=eoHZHYlG1VP49fqQ29q3I58cojJxkZ8AQQg_xOyd10Y,525 -botocore/data/workspaces-thin-client/2023-08-22/service-2.json.gz,sha256=si-bPpR26EJ66rpoOYEmm6JQfk1D5VJluXmDZKnZbs4,6460 -botocore/data/workspaces-web/2020-07-08/endpoint-rule-set-1.json.gz,sha256=yGAMnnSsWWTvTeFx8uc-IlyAyQzKd-RL2y3spFm-v7Y,1154 -botocore/data/workspaces-web/2020-07-08/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/workspaces-web/2020-07-08/paginators-1.json,sha256=sN0zyznWr8VY3qKPlet-yVbvoL5SfLBtaJf0jYyes08,551 -botocore/data/workspaces-web/2020-07-08/service-2.json.gz,sha256=lPObRCcyCaw0vKsLTXVj1_VRgsbS4ic8nnr_GIZ4cHs,21766 -botocore/data/workspaces-web/2020-07-08/waiters-2.json,sha256=fsA0_mwCl57UFPiqxJUWLb9AE7gd9kpBT4x0_6Q7dww,39 -botocore/data/workspaces/2015-04-08/endpoint-rule-set-1.json.gz,sha256=XIQZ1fC9WavUdq6R6fraOdJ_Rfe-dA3wY0U_u2lJqXI,1151 -botocore/data/workspaces/2015-04-08/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/workspaces/2015-04-08/paginators-1.json,sha256=RiG7XOsbS-HVzRL_pYhhradEGVQnhRgv41nSgAAzABI,1504 -botocore/data/workspaces/2015-04-08/service-2.json.gz,sha256=4UooMUjI9EJaq_54I9ffFdDeaMrVcqWQlm-EgpVCHes,40861 -botocore/data/xray/2016-04-12/endpoint-rule-set-1.json.gz,sha256=QsgfChKLrRg2NovsqnbVnfizrLIVywrYUmzqbbBoesY,1146 -botocore/data/xray/2016-04-12/examples-1.json,sha256=K3b6mgYkitvcecSlJT-iV_EQATmvOySs66iKJI5qx0g,44 -botocore/data/xray/2016-04-12/paginators-1.json,sha256=2BXVUlpR51GRav7g4-ML3Fr7U9pBDqXax4lZYeJnwZU,1785 -botocore/data/xray/2016-04-12/service-2.json.gz,sha256=scJCtdpVEwwHP63vHFOaQVxbRg53Qem8ywgzQWx_6aw,21603 -botocore/discovery.py,sha256=n1lxC_BzuKlwkj2fi-KtVTK-oku8RL9vWrdb7r4oaBc,11069 -botocore/docs/__init__.py,sha256=Mxx6eiy76-SxPpUsPMUPoHS-Wjy8Mj1gDfrowCu0S4U,2027 -botocore/docs/__pycache__/__init__.cpython-312.pyc,, -botocore/docs/__pycache__/client.cpython-312.pyc,, -botocore/docs/__pycache__/docstring.cpython-312.pyc,, -botocore/docs/__pycache__/example.cpython-312.pyc,, -botocore/docs/__pycache__/method.cpython-312.pyc,, -botocore/docs/__pycache__/paginator.cpython-312.pyc,, -botocore/docs/__pycache__/params.cpython-312.pyc,, -botocore/docs/__pycache__/service.cpython-312.pyc,, -botocore/docs/__pycache__/shape.cpython-312.pyc,, -botocore/docs/__pycache__/sharedexample.cpython-312.pyc,, -botocore/docs/__pycache__/translator.cpython-312.pyc,, -botocore/docs/__pycache__/utils.cpython-312.pyc,, -botocore/docs/__pycache__/waiter.cpython-312.pyc,, -botocore/docs/bcdoc/__init__.py,sha256=V2g87AefB2DOD9_3xIF5k9Nv5ttb4_gNJOVvSF0Mp3s,588 -botocore/docs/bcdoc/__pycache__/__init__.cpython-312.pyc,, -botocore/docs/bcdoc/__pycache__/docstringparser.cpython-312.pyc,, -botocore/docs/bcdoc/__pycache__/restdoc.cpython-312.pyc,, -botocore/docs/bcdoc/__pycache__/style.cpython-312.pyc,, -botocore/docs/bcdoc/docstringparser.py,sha256=a-TefXFJysEog9AdTgTwa8Pzik9sxcFlbhQ83jGwFR0,10197 -botocore/docs/bcdoc/restdoc.py,sha256=FLK19ZxPqz-kHJ7SF97f_dhqhwv_wggDiANZ1rHNwrQ,9771 -botocore/docs/bcdoc/style.py,sha256=bNfbjquLLfEt9F6HH_g7FwMeCU9ZZJLNXN-spELATvY,13120 -botocore/docs/client.py,sha256=Ur2X-uoT6OMQ9WhxyZPmCi0OAe8ZFjjbj4GDeCqINX4,17321 -botocore/docs/docstring.py,sha256=Jo9lA4ZFPq75cNCUfpz7zWiXlDB-Cn3bP62cZvBntfA,3648 -botocore/docs/example.py,sha256=ZOCQpJ8irhZ-0Urf7VJkd0SwDeJ1WLvyJc-TMBCZ4Ho,8945 -botocore/docs/method.py,sha256=_GiyG_2GV1uNOCq2e5MEEc300Wk3rACROLUAAxxay7s,12058 -botocore/docs/paginator.py,sha256=QlDyldRiPT8JyNpeUIyx9rsZoy7GsCYGVY-o16oCGkY,8988 -botocore/docs/params.py,sha256=k8GP7bxv1lWMnxKb84QKjEo182pOwDUt7q8VV7vcmB8,11727 -botocore/docs/service.py,sha256=oSPLoXn08cSsFDToEuFp4DDOl_COuSYQJ7q7OEf2lfg,4990 -botocore/docs/shape.py,sha256=EZze3L3AhPNnx_iHvRtn2Z-04TbMHTZ2_okdpAmwPOc,5198 -botocore/docs/sharedexample.py,sha256=WrAklim6mYWOgqcm9qmm5ajXpr2FqIGNuXnmlQrmNiU,9202 -botocore/docs/translator.py,sha256=v9ZTifRrwmfxBHCBaRPoZqufvpHI31pdVMny1wcVi-4,2331 -botocore/docs/utils.py,sha256=Ukqkmy8ncFE8JjwDaj4cLyQ4-yroos6RSixykh3bW5w,7301 -botocore/docs/waiter.py,sha256=kEFcWwO0BCj1roRSqGZjoz3YQx8paCnLgNPijeXwcLU,6546 -botocore/endpoint.py,sha256=N-sWbt6PVUzn7U6OB8MMXUiSS6CTZPhf7gDhlJpcOTQ,16739 -botocore/endpoint_provider.py,sha256=OvId9vKApaVVOJTy4i9EST8SpjyKR-wJrIZAWqJQQAY,23005 -botocore/errorfactory.py,sha256=hdrxsOw0ihpT540ukWPbWqc7D-Dxe_la5H-ZcXgdLJ4,3722 -botocore/eventstream.py,sha256=3giwq42Lq1bg1j28-W2EfqQgbaS4kZKIyhn59pkR9s0,20279 -botocore/exceptions.py,sha256=otjffJnMwhQCdVVJZt0F3QTZejt674eS77jDfEkqOQE,24126 -botocore/handlers.py,sha256=RLYXZjUwh0xFavSpy_U2z5hP9kyprsDNBKBu4cMyZOU,66613 -botocore/history.py,sha256=QR1WnpJYTo02Rz3GqWt45sF6wzu6EQrM_kS3FPH58t4,1744 -botocore/hooks.py,sha256=pITClZg88eiQX3PsQ1_32wydi52LJDIanF0Yvf2B4xI,25053 -botocore/httpchecksum.py,sha256=Z29nbWKKhFM_Gh4qD0LJWTlBbp5o_U-__97jFmiEf_A,19452 -botocore/httpsession.py,sha256=1wM2kzb6-Epb7vG6hqh4PWOOs3vTQQtMOpiiblRdk8c,18696 -botocore/loaders.py,sha256=80fw6XvffeFplFwugoqB-3yeAum55FpunxRY2pohk9I,18834 -botocore/model.py,sha256=T3ym_Y0G5LT-sKzvBwDTKmPFYr2Zznasipl_fzUXphI,32347 -botocore/monitoring.py,sha256=RDflkGbBrwnsH4pSENOkWikk1UuqkJVqHSYpHQtvK5M,20594 -botocore/paginate.py,sha256=SYqY0TZ4OJIkp5ZvqMUKkn8nJHW8VS7lS_4hm1npGOI,27909 -botocore/parsers.py,sha256=aLe-HLrgOQG45UZAFwydmkbm7jTyWttu86F5aVLIXbs,60400 -botocore/plugin.py,sha256=zLC_HxZ6nI8qSYfq05Te2w8ekU3dcD0jOl2Sh_qBhKk,2534 -botocore/regions.py,sha256=k8gPG2C16QzETAKMPw6fqK3___jj3oJApVgd5HuMaOg,33932 -botocore/response.py,sha256=4u5vgew19drph59fXGANyiK3iixDzvsNrDuLiUzp0R0,7865 -botocore/retries/__init__.py,sha256=YaZ6AwMRyuDBs5fOvl-PAvxQxZE2RBlcad2JmLOMo8k,121 -botocore/retries/__pycache__/__init__.cpython-312.pyc,, -botocore/retries/__pycache__/adaptive.cpython-312.pyc,, -botocore/retries/__pycache__/base.cpython-312.pyc,, -botocore/retries/__pycache__/bucket.cpython-312.pyc,, -botocore/retries/__pycache__/quota.cpython-312.pyc,, -botocore/retries/__pycache__/special.cpython-312.pyc,, -botocore/retries/__pycache__/standard.cpython-312.pyc,, -botocore/retries/__pycache__/throttling.cpython-312.pyc,, -botocore/retries/adaptive.py,sha256=0Y0QjSgK0sGS1nbWZV7wiBZgR82a-nA-vL5HjQadLOs,4207 -botocore/retries/base.py,sha256=rGJYVZEXLGSQ2BnaIT-W9ccGtSbIMvU-wzmV78d-Ccg,797 -botocore/retries/bucket.py,sha256=sZ5TGGMCyIh3qcRI5p5csNXPvtXZ2nC-sH6u5wJiIac,3994 -botocore/retries/quota.py,sha256=bijUNfy2fejuma0OB43sIn57OdRDhLSdFV_xCmcuTv4,1937 -botocore/retries/special.py,sha256=hePuqKytb0BmjKmfXbMf4fGrwiArbqu_HBepROpmi7U,1664 -botocore/retries/standard.py,sha256=Fu8JHmH-sCvS2aSMf1QjLXcoX6CQ3zUQoNt-AUXcx70,19971 -botocore/retries/throttling.py,sha256=x8pU_jMyapr0YODg8mtyYoXa8MzDAf0e-bWg9EfkFos,1779 -botocore/retryhandler.py,sha256=0cmoBCE3aIo60-co7idqO9FA2pIcyoI3NcDD_EHv0Yw,14702 -botocore/serialize.py,sha256=LmgqJo_qfFS0puj8DbeQxX-iILHrkAId69MZ3tifh3I,51612 -botocore/session.py,sha256=Hak58qxe5wV3XF5WARd9lxdPXOB1XtaVAVEYy2TGHWw,51661 -botocore/signers.py,sha256=cYFuBgbpSjqZi0kmRmgc0p327QCui8YkgIjXUfsMNSQ,34519 -botocore/stub.py,sha256=lsTeEZbwA4tZV1IRE3p-7hsmSHY7KW0dYF5sM6GuUfQ,16193 -botocore/tokens.py,sha256=LSLQD3w_3Ui72hp2eTzsFdrlP1IWYsBvgmOfJ-lTUvg,11764 -botocore/translate.py,sha256=UfKIIWr_BAcwvMScHuqrLtSD5yuXecl7Rs0Et3jfREc,3406 -botocore/useragent.py,sha256=ckJQ66jIQsyAcVqQ5oAo5E3JuFiY0TVyH3eLs0NtljE,24351 -botocore/utils.py,sha256=4KzBs7JNy_wOWWGNJwzVKE55Zt41vTHcmO8Kd_-xODg,137917 -botocore/validate.py,sha256=VHqG980ds_6zYnamTZSvV4EYFMtXs7FPnMVNAg_XgQ8,13759 -botocore/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -botocore/vendored/__pycache__/__init__.cpython-312.pyc,, -botocore/vendored/__pycache__/six.cpython-312.pyc,, -botocore/vendored/requests/__init__.py,sha256=Pu8JNWAMzj9l8E0Qs4rU7clTOfxVIA6OuUKJkJDmvvc,227 -botocore/vendored/requests/__pycache__/__init__.cpython-312.pyc,, -botocore/vendored/requests/__pycache__/exceptions.cpython-312.pyc,, -botocore/vendored/requests/exceptions.py,sha256=zZhHieXgR1teqbvuo_9OrwDMHnrvRtulW97VfzumQv4,2517 -botocore/vendored/requests/packages/__init__.py,sha256=aXkbNCjM_WhryRBocE4AaA_p7-CTxL5LOutY7XzKm4s,62 -botocore/vendored/requests/packages/__pycache__/__init__.cpython-312.pyc,, -botocore/vendored/requests/packages/urllib3/__init__.py,sha256=Nrq2HJOk0McF4saJ5zySsjVKGPV6j05iAFTJwkKEzOI,184 -botocore/vendored/requests/packages/urllib3/__pycache__/__init__.cpython-312.pyc,, -botocore/vendored/requests/packages/urllib3/__pycache__/exceptions.cpython-312.pyc,, -botocore/vendored/requests/packages/urllib3/exceptions.py,sha256=za-cEwBqxBKOqqKTaIVAMdH3j1nDRqi-MtdojdpU4Wc,4374 -botocore/vendored/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 -botocore/waiter.py,sha256=n4_6HcqEc-cXDMPsECdwKfhNSowmfzw490-c1BIZYn0,14711 diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/REQUESTED b/venv/Lib/site-packages/botocore-1.42.25.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/WHEEL b/venv/Lib/site-packages/botocore-1.42.25.dist-info/WHEEL deleted file mode 100644 index dcfdc6e..0000000 --- a/venv/Lib/site-packages/botocore-1.42.25.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/botocore-1.42.25.dist-info/top_level.txt b/venv/Lib/site-packages/botocore-1.42.25.dist-info/top_level.txt deleted file mode 100644 index c5b9e12..0000000 --- a/venv/Lib/site-packages/botocore-1.42.25.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -botocore diff --git a/venv/Lib/site-packages/botocore/__init__.py b/venv/Lib/site-packages/botocore/__init__.py deleted file mode 100644 index 327de16..0000000 --- a/venv/Lib/site-packages/botocore/__init__.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging -import os -import re -from logging import NullHandler - -__version__ = '1.42.25' - - -# Configure default logger to do nothing -log = logging.getLogger('botocore') -log.addHandler(NullHandler()) - -_INITIALIZERS = [] - -_first_cap_regex = re.compile('(.)([A-Z][a-z]+)') -_end_cap_regex = re.compile('([a-z0-9])([A-Z])') -# The regex below handles the special case where some acronym -# name is pluralized, e.g GatewayARNs, ListWebACLs, SomeCNAMEs. -_special_case_transform = re.compile('[A-Z]{2,}s$') -# Prepopulate the cache with special cases that don't match -# our regular transformation. -_xform_cache = { - ('CreateCachediSCSIVolume', '_'): 'create_cached_iscsi_volume', - ('CreateCachediSCSIVolume', '-'): 'create-cached-iscsi-volume', - ('DescribeCachediSCSIVolumes', '_'): 'describe_cached_iscsi_volumes', - ('DescribeCachediSCSIVolumes', '-'): 'describe-cached-iscsi-volumes', - ('DescribeStorediSCSIVolumes', '_'): 'describe_stored_iscsi_volumes', - ('DescribeStorediSCSIVolumes', '-'): 'describe-stored-iscsi-volumes', - ('CreateStorediSCSIVolume', '_'): 'create_stored_iscsi_volume', - ('CreateStorediSCSIVolume', '-'): 'create-stored-iscsi-volume', - ('ListHITsForQualificationType', '_'): 'list_hits_for_qualification_type', - ('ListHITsForQualificationType', '-'): 'list-hits-for-qualification-type', - ('ExecutePartiQLStatement', '_'): 'execute_partiql_statement', - ('ExecutePartiQLStatement', '-'): 'execute-partiql-statement', - ('ExecutePartiQLTransaction', '_'): 'execute_partiql_transaction', - ('ExecutePartiQLTransaction', '-'): 'execute-partiql-transaction', - ('ExecutePartiQLBatch', '_'): 'execute_partiql_batch', - ('ExecutePartiQLBatch', '-'): 'execute-partiql-batch', - ( - 'AssociateWhatsAppBusinessAccount', - '_', - ): 'associate_whatsapp_business_account', - ( - 'AssociateWhatsAppBusinessAccount', - '-', - ): 'associate-whatsapp-business-account', - ('CreateWhatsAppMessageTemplate', '_'): 'create_whatsapp_message_template', - ('CreateWhatsAppMessageTemplate', '-'): 'create-whatsapp-message-template', - ( - 'CreateWhatsAppMessageTemplateFromLibrary', - '_', - ): 'create_whatsapp_message_template_from_library', - ( - 'CreateWhatsAppMessageTemplateFromLibrary', - '-', - ): 'create-whatsapp-message-template-from-library', - ( - 'CreateWhatsAppMessageTemplateMedia', - '_', - ): 'create_whatsapp_message_template_media', - ( - 'CreateWhatsAppMessageTemplateMedia', - '-', - ): 'create-whatsapp-message-template-media', - ('DeleteWhatsAppMessageMedia', '_'): 'delete_whatsapp_message_media', - ('DeleteWhatsAppMessageMedia', '-'): 'delete-whatsapp-message-media', - ('DeleteWhatsAppMessageTemplate', '_'): 'delete_whatsapp_message_template', - ('DeleteWhatsAppMessageTemplate', '-'): 'delete-whatsapp-message-template', - ( - 'DisassociateWhatsAppBusinessAccount', - '_', - ): 'disassociate_whatsapp_business_account', - ( - 'DisassociateWhatsAppBusinessAccount', - '-', - ): 'disassociate-whatsapp-business-account', - ( - 'GetLinkedWhatsAppBusinessAccount', - '_', - ): 'get_linked_whatsapp_business_account', - ( - 'GetLinkedWhatsAppBusinessAccount', - '-', - ): 'get-linked-whatsapp-business-account', - ( - 'GetLinkedWhatsAppBusinessAccountPhoneNumber', - '_', - ): 'get_linked_whatsapp_business_account_phone_number', - ( - 'GetLinkedWhatsAppBusinessAccountPhoneNumber', - '-', - ): 'get-linked-whatsapp-business-account-phone-number', - ('GetWhatsAppMessageMedia', '_'): 'get_whatsapp_message_media', - ('GetWhatsAppMessageMedia', '-'): 'get-whatsapp-message-media', - ('GetWhatsAppMessageTemplate', '_'): 'get_whatsapp_message_template', - ('GetWhatsAppMessageTemplate', '-'): 'get-whatsapp-message-template', - ( - 'ListLinkedWhatsAppBusinessAccounts', - '_', - ): 'list_linked_whatsapp_business_accounts', - ( - 'ListLinkedWhatsAppBusinessAccounts', - '-', - ): 'list-linked-whatsapp-business-accounts', - ('ListWhatsAppMessageTemplates', '_'): 'list_whatsapp_message_templates', - ('ListWhatsAppMessageTemplates', '-'): 'list-whatsapp-message-templates', - ('ListWhatsAppTemplateLibrary', '_'): 'list_whatsapp_template_library', - ('ListWhatsAppTemplateLibrary', '-'): 'list-whatsapp-template-library', - ('PostWhatsAppMessageMedia', '_'): 'post_whatsapp_message_media', - ('PostWhatsAppMessageMedia', '-'): 'post-whatsapp-message-media', - ( - 'PutWhatsAppBusinessAccountEventDestinations', - '_', - ): 'put_whatsapp_business_account_event_destinations', - ( - 'PutWhatsAppBusinessAccountEventDestinations', - '-', - ): 'put-whatsapp-business-account-event-destinations', - ('SendWhatsAppMessage', '_'): 'send_whatsapp_message', - ('SendWhatsAppMessage', '-'): 'send-whatsapp-message', - ('UpdateWhatsAppMessageTemplate', '_'): 'update_whatsapp_message_template', - ('UpdateWhatsAppMessageTemplate', '-'): 'update-whatsapp-message-template', -} -ScalarTypes = ('string', 'integer', 'boolean', 'timestamp', 'float', 'double') - -BOTOCORE_ROOT = os.path.dirname(os.path.abspath(__file__)) - - -# Used to specify anonymous (unsigned) request signature -class UNSIGNED: - def __copy__(self): - return self - - def __deepcopy__(self, memodict): - return self - - -UNSIGNED = UNSIGNED() - - -def xform_name(name, sep='_', _xform_cache=_xform_cache): - """Convert camel case to a "pythonic" name. - - If the name contains the ``sep`` character, then it is - returned unchanged. - - """ - if sep in name: - # If the sep is in the name, assume that it's already - # transformed and return the string unchanged. - return name - key = (name, sep) - if key not in _xform_cache: - if _special_case_transform.search(name) is not None: - is_special = _special_case_transform.search(name) - matched = is_special.group() - # Replace something like ARNs, ACLs with _arns, _acls. - name = f"{name[: -len(matched)]}{sep}{matched.lower()}" - s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name) - transformed = _end_cap_regex.sub(r'\1' + sep + r'\2', s1).lower() - _xform_cache[key] = transformed - return _xform_cache[key] - - -def register_initializer(callback): - """Register an initializer function for session creation. - - This initializer function will be invoked whenever a new - `botocore.session.Session` is instantiated. - - :type callback: callable - :param callback: A callable that accepts a single argument - of type `botocore.session.Session`. - - """ - _INITIALIZERS.append(callback) - - -def unregister_initializer(callback): - """Unregister an initializer function. - - :type callback: callable - :param callback: A callable that was previously registered - with `botocore.register_initializer`. - - :raises ValueError: If a callback is provided that is not currently - registered as an initializer. - - """ - _INITIALIZERS.remove(callback) - - -def invoke_initializers(session): - """Invoke all initializers for a session. - - :type session: botocore.session.Session - :param session: The session to initialize. - - """ - for initializer in _INITIALIZERS: - initializer(session) diff --git a/venv/Lib/site-packages/botocore/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c050745..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/args.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/args.cpython-312.pyc deleted file mode 100644 index afab661..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/args.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/auth.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/auth.cpython-312.pyc deleted file mode 100644 index f03f709..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/auth.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/awsrequest.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/awsrequest.cpython-312.pyc deleted file mode 100644 index 8634ff3..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/awsrequest.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/client.cpython-312.pyc deleted file mode 100644 index d0993ff..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/compat.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/compat.cpython-312.pyc deleted file mode 100644 index fe34031..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/compat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/compress.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/compress.cpython-312.pyc deleted file mode 100644 index f1c4a48..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/compress.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/config.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/config.cpython-312.pyc deleted file mode 100644 index aa48cb8..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/config.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/configloader.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/configloader.cpython-312.pyc deleted file mode 100644 index 680d3d8..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/configloader.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/configprovider.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/configprovider.cpython-312.pyc deleted file mode 100644 index 15e5674..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/configprovider.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/context.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/context.cpython-312.pyc deleted file mode 100644 index fc34b7f..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/context.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/credentials.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/credentials.cpython-312.pyc deleted file mode 100644 index a8a5369..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/credentials.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/discovery.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/discovery.cpython-312.pyc deleted file mode 100644 index 4597af2..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/discovery.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/endpoint.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/endpoint.cpython-312.pyc deleted file mode 100644 index 8667f29..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/endpoint.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/endpoint_provider.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/endpoint_provider.cpython-312.pyc deleted file mode 100644 index f59b644..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/endpoint_provider.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/errorfactory.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/errorfactory.cpython-312.pyc deleted file mode 100644 index 6baf3b1..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/errorfactory.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/eventstream.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/eventstream.cpython-312.pyc deleted file mode 100644 index 8022885..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/eventstream.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index b8a768f..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/handlers.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/handlers.cpython-312.pyc deleted file mode 100644 index 8cccb87..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/handlers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/history.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/history.cpython-312.pyc deleted file mode 100644 index a47b20c..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/history.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/hooks.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/hooks.cpython-312.pyc deleted file mode 100644 index a7cdfd3..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/hooks.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/httpchecksum.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/httpchecksum.cpython-312.pyc deleted file mode 100644 index 104ede4..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/httpchecksum.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/httpsession.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/httpsession.cpython-312.pyc deleted file mode 100644 index 50071d1..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/httpsession.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/loaders.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/loaders.cpython-312.pyc deleted file mode 100644 index 393c855..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/loaders.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/model.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/model.cpython-312.pyc deleted file mode 100644 index d8d7856..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/model.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/monitoring.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/monitoring.cpython-312.pyc deleted file mode 100644 index e069317..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/monitoring.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/paginate.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/paginate.cpython-312.pyc deleted file mode 100644 index 73e930d..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/paginate.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/parsers.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/parsers.cpython-312.pyc deleted file mode 100644 index 53bb032..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/parsers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/plugin.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/plugin.cpython-312.pyc deleted file mode 100644 index 1905113..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/plugin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/regions.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/regions.cpython-312.pyc deleted file mode 100644 index c97661d..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/regions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/response.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/response.cpython-312.pyc deleted file mode 100644 index 8ad64ae..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/response.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/retryhandler.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/retryhandler.cpython-312.pyc deleted file mode 100644 index 3db63e6..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/retryhandler.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/serialize.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/serialize.cpython-312.pyc deleted file mode 100644 index 3be0de5..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/serialize.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/session.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/session.cpython-312.pyc deleted file mode 100644 index e6e18dc..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/session.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/signers.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/signers.cpython-312.pyc deleted file mode 100644 index 64eafd7..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/signers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/stub.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/stub.cpython-312.pyc deleted file mode 100644 index f63e91c..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/stub.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/tokens.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/tokens.cpython-312.pyc deleted file mode 100644 index 1ad4cae..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/tokens.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/translate.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/translate.cpython-312.pyc deleted file mode 100644 index 164f861..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/translate.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/useragent.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/useragent.cpython-312.pyc deleted file mode 100644 index d3cf767..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/useragent.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index adeaeb9..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/validate.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/validate.cpython-312.pyc deleted file mode 100644 index d5d48ff..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/validate.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/__pycache__/waiter.cpython-312.pyc b/venv/Lib/site-packages/botocore/__pycache__/waiter.cpython-312.pyc deleted file mode 100644 index 0c7099f..0000000 Binary files a/venv/Lib/site-packages/botocore/__pycache__/waiter.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/args.py b/venv/Lib/site-packages/botocore/args.py deleted file mode 100644 index 2a76e87..0000000 --- a/venv/Lib/site-packages/botocore/args.py +++ /dev/null @@ -1,998 +0,0 @@ -# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Internal module to help with normalizing botocore client args. - -This module (and all function/classes within this module) should be -considered internal, and *not* a public API. - -""" - -import copy -import logging -import socket - -import botocore.exceptions -import botocore.parsers -import botocore.serialize -from botocore.config import Config -from botocore.endpoint import EndpointCreator -from botocore.regions import EndpointResolverBuiltins as EPRBuiltins -from botocore.regions import EndpointRulesetResolver -from botocore.signers import RequestSigner -from botocore.useragent import UserAgentString, register_feature_id -from botocore.utils import ( - PRIORITY_ORDERED_SUPPORTED_PROTOCOLS, # noqa: F401 - ensure_boolean, - is_s3_accelerate_url, -) - -logger = logging.getLogger(__name__) - - -VALID_REGIONAL_ENDPOINTS_CONFIG = [ - 'legacy', - 'regional', -] -LEGACY_GLOBAL_STS_REGIONS = [ - 'ap-northeast-1', - 'ap-south-1', - 'ap-southeast-1', - 'ap-southeast-2', - 'aws-global', - 'ca-central-1', - 'eu-central-1', - 'eu-north-1', - 'eu-west-1', - 'eu-west-2', - 'eu-west-3', - 'sa-east-1', - 'us-east-1', - 'us-east-2', - 'us-west-1', - 'us-west-2', -] -# Maximum allowed length of the ``user_agent_appid`` config field. Longer -# values result in a warning-level log message. -USERAGENT_APPID_MAXLEN = 50 - -VALID_REQUEST_CHECKSUM_CALCULATION_CONFIG = ( - "when_supported", - "when_required", -) -VALID_RESPONSE_CHECKSUM_VALIDATION_CONFIG = ( - "when_supported", - "when_required", -) - - -VALID_ACCOUNT_ID_ENDPOINT_MODE_CONFIG = ( - 'preferred', - 'disabled', - 'required', -) - - -class ClientArgsCreator: - def __init__( - self, - event_emitter, - user_agent, - response_parser_factory, - loader, - exceptions_factory, - config_store, - user_agent_creator=None, - ): - self._event_emitter = event_emitter - self._response_parser_factory = response_parser_factory - self._loader = loader - self._exceptions_factory = exceptions_factory - self._config_store = config_store - if user_agent_creator is None: - self._session_ua_creator = UserAgentString.from_environment() - else: - self._session_ua_creator = user_agent_creator - - def get_client_args( - self, - service_model, - region_name, - is_secure, - endpoint_url, - verify, - credentials, - scoped_config, - client_config, - endpoint_bridge, - auth_token=None, - endpoints_ruleset_data=None, - partition_data=None, - ): - final_args = self.compute_client_args( - service_model, - client_config, - endpoint_bridge, - region_name, - endpoint_url, - is_secure, - scoped_config, - ) - - service_name = final_args['service_name'] # noqa - parameter_validation = final_args['parameter_validation'] - endpoint_config = final_args['endpoint_config'] - protocol = final_args['protocol'] - config_kwargs = final_args['config_kwargs'] - s3_config = final_args['s3_config'] - partition = endpoint_config['metadata'].get('partition', None) - socket_options = final_args['socket_options'] - configured_endpoint_url = final_args['configured_endpoint_url'] - signing_region = endpoint_config['signing_region'] - endpoint_region_name = endpoint_config['region_name'] - account_id_endpoint_mode = config_kwargs['account_id_endpoint_mode'] - - event_emitter = copy.copy(self._event_emitter) - signer = RequestSigner( - service_model.service_id, - signing_region, - endpoint_config['signing_name'], - endpoint_config['signature_version'], - credentials, - event_emitter, - auth_token, - ) - - config_kwargs['s3'] = s3_config - new_config = Config(**config_kwargs) - endpoint_creator = EndpointCreator(event_emitter) - - endpoint = endpoint_creator.create_endpoint( - service_model, - region_name=endpoint_region_name, - endpoint_url=endpoint_config['endpoint_url'], - verify=verify, - response_parser_factory=self._response_parser_factory, - max_pool_connections=new_config.max_pool_connections, - proxies=new_config.proxies, - timeout=(new_config.connect_timeout, new_config.read_timeout), - socket_options=socket_options, - client_cert=new_config.client_cert, - proxies_config=new_config.proxies_config, - ) - - # Emit event to allow service-specific or customer customization of serializer kwargs - event_name = f'creating-serializer.{service_name}' - serializer_kwargs = { - 'timestamp_precision': botocore.serialize.TIMESTAMP_PRECISION_DEFAULT - } - event_emitter.emit( - event_name, - protocol_name=protocol, - service_model=service_model, - serializer_kwargs=serializer_kwargs, - ) - - serializer = botocore.serialize.create_serializer( - protocol, - parameter_validation, - timestamp_precision=serializer_kwargs['timestamp_precision'], - ) - response_parser = botocore.parsers.create_parser(protocol) - - ruleset_resolver = self._build_endpoint_resolver( - endpoints_ruleset_data, - partition_data, - client_config, - service_model, - endpoint_region_name, - region_name, - configured_endpoint_url, - endpoint, - is_secure, - endpoint_bridge, - event_emitter, - credentials, - account_id_endpoint_mode, - ) - - # Copy the session's user agent factory and adds client configuration. - client_ua_creator = self._session_ua_creator.with_client_config( - new_config - ) - supplied_ua = client_config.user_agent if client_config else None - new_config._supplied_user_agent = supplied_ua - - return { - 'serializer': serializer, - 'endpoint': endpoint, - 'response_parser': response_parser, - 'event_emitter': event_emitter, - 'request_signer': signer, - 'service_model': service_model, - 'loader': self._loader, - 'client_config': new_config, - 'partition': partition, - 'exceptions_factory': self._exceptions_factory, - 'endpoint_ruleset_resolver': ruleset_resolver, - 'user_agent_creator': client_ua_creator, - } - - def compute_client_args( - self, - service_model, - client_config, - endpoint_bridge, - region_name, - endpoint_url, - is_secure, - scoped_config, - ): - service_name = service_model.endpoint_prefix - protocol = service_model.resolved_protocol - parameter_validation = True - if client_config and not client_config.parameter_validation: - parameter_validation = False - elif scoped_config: - raw_value = scoped_config.get('parameter_validation') - if raw_value is not None: - parameter_validation = ensure_boolean(raw_value) - - s3_config = self.compute_s3_config(client_config) - - configured_endpoint_url = self._compute_configured_endpoint_url( - client_config=client_config, - endpoint_url=endpoint_url, - ) - if configured_endpoint_url is not None: - register_feature_id('ENDPOINT_OVERRIDE') - - endpoint_config = self._compute_endpoint_config( - service_name=service_name, - region_name=region_name, - endpoint_url=configured_endpoint_url, - is_secure=is_secure, - endpoint_bridge=endpoint_bridge, - s3_config=s3_config, - ) - endpoint_variant_tags = endpoint_config['metadata'].get('tags', []) - - # Some third-party libraries expect the final user-agent string in - # ``client.meta.config.user_agent``. To maintain backwards - # compatibility, the preliminary user-agent string (before any Config - # object modifications and without request-specific user-agent - # components) is stored in the new Config object's ``user_agent`` - # property but not used by Botocore itself. - preliminary_ua_string = self._session_ua_creator.with_client_config( - client_config - ).to_string() - # Create a new client config to be passed to the client based - # on the final values. We do not want the user to be able - # to try to modify an existing client with a client config. - config_kwargs = dict( - region_name=endpoint_config['region_name'], - signature_version=endpoint_config['signature_version'], - user_agent=preliminary_ua_string, - ) - if 'dualstack' in endpoint_variant_tags: - config_kwargs.update(use_dualstack_endpoint=True) - if 'fips' in endpoint_variant_tags: - config_kwargs.update(use_fips_endpoint=True) - if client_config is not None: - config_kwargs.update( - connect_timeout=client_config.connect_timeout, - read_timeout=client_config.read_timeout, - max_pool_connections=client_config.max_pool_connections, - proxies=client_config.proxies, - proxies_config=client_config.proxies_config, - retries=client_config.retries, - client_cert=client_config.client_cert, - inject_host_prefix=client_config.inject_host_prefix, - tcp_keepalive=client_config.tcp_keepalive, - user_agent_extra=client_config.user_agent_extra, - user_agent_appid=client_config.user_agent_appid, - request_min_compression_size_bytes=( - client_config.request_min_compression_size_bytes - ), - disable_request_compression=( - client_config.disable_request_compression - ), - client_context_params=client_config.client_context_params, - sigv4a_signing_region_set=( - client_config.sigv4a_signing_region_set - ), - request_checksum_calculation=( - client_config.request_checksum_calculation - ), - response_checksum_validation=( - client_config.response_checksum_validation - ), - account_id_endpoint_mode=client_config.account_id_endpoint_mode, - auth_scheme_preference=client_config.auth_scheme_preference, - ) - self._compute_retry_config(config_kwargs) - self._compute_connect_timeout(config_kwargs) - self._compute_user_agent_appid_config(config_kwargs) - self._compute_request_compression_config(config_kwargs) - self._compute_sigv4a_signing_region_set_config(config_kwargs) - self._compute_checksum_config(config_kwargs) - self._compute_account_id_endpoint_mode_config(config_kwargs) - self._compute_inject_host_prefix(client_config, config_kwargs) - self._compute_auth_scheme_preference_config( - client_config, config_kwargs - ) - self._compute_signature_version_config(client_config, config_kwargs) - s3_config = self.compute_s3_config(client_config) - - is_s3_service = self._is_s3_service(service_name) - - if is_s3_service and 'dualstack' in endpoint_variant_tags: - if s3_config is None: - s3_config = {} - s3_config['use_dualstack_endpoint'] = True - - return { - 'service_name': service_name, - 'parameter_validation': parameter_validation, - 'configured_endpoint_url': configured_endpoint_url, - 'endpoint_config': endpoint_config, - 'protocol': protocol, - 'config_kwargs': config_kwargs, - 's3_config': s3_config, - 'socket_options': self._compute_socket_options( - scoped_config, client_config - ), - } - - def _compute_inject_host_prefix(self, client_config, config_kwargs): - # In the cases that a Config object was not provided, or the private value - # remained UNSET, we should resolve the value from the config store. - if ( - client_config is None - or client_config._inject_host_prefix == 'UNSET' - ): - configured_disable_host_prefix_injection = ( - self._config_store.get_config_variable( - 'disable_host_prefix_injection' - ) - ) - if configured_disable_host_prefix_injection is not None: - config_kwargs[ - 'inject_host_prefix' - ] = not configured_disable_host_prefix_injection - else: - config_kwargs['inject_host_prefix'] = True - - def _compute_configured_endpoint_url(self, client_config, endpoint_url): - if endpoint_url is not None: - return endpoint_url - - if self._ignore_configured_endpoint_urls(client_config): - logger.debug("Ignoring configured endpoint URLs.") - return endpoint_url - - return self._config_store.get_config_variable('endpoint_url') - - def _ignore_configured_endpoint_urls(self, client_config): - if ( - client_config - and client_config.ignore_configured_endpoint_urls is not None - ): - return client_config.ignore_configured_endpoint_urls - - return self._config_store.get_config_variable( - 'ignore_configured_endpoint_urls' - ) - - def compute_s3_config(self, client_config): - s3_configuration = self._config_store.get_config_variable('s3') - - # Next specific client config values takes precedence over - # specific values in the scoped config. - if client_config is not None: - if client_config.s3 is not None: - if s3_configuration is None: - s3_configuration = client_config.s3 - else: - # The current s3_configuration dictionary may be - # from a source that only should be read from so - # we want to be safe and just make a copy of it to modify - # before it actually gets updated. - s3_configuration = s3_configuration.copy() - s3_configuration.update(client_config.s3) - - return s3_configuration - - def _is_s3_service(self, service_name): - """Whether the service is S3 or S3 Control. - - Note that throughout this class, service_name refers to the endpoint - prefix, not the folder name of the service in botocore/data. For - S3 Control, the folder name is 's3control' but the endpoint prefix is - 's3-control'. - """ - return service_name in ['s3', 's3-control'] - - def _compute_endpoint_config( - self, - service_name, - region_name, - endpoint_url, - is_secure, - endpoint_bridge, - s3_config, - ): - resolve_endpoint_kwargs = { - 'service_name': service_name, - 'region_name': region_name, - 'endpoint_url': endpoint_url, - 'is_secure': is_secure, - 'endpoint_bridge': endpoint_bridge, - } - if service_name == 's3': - return self._compute_s3_endpoint_config( - s3_config=s3_config, **resolve_endpoint_kwargs - ) - if service_name == 'sts': - return self._compute_sts_endpoint_config(**resolve_endpoint_kwargs) - return self._resolve_endpoint(**resolve_endpoint_kwargs) - - def _compute_s3_endpoint_config( - self, s3_config, **resolve_endpoint_kwargs - ): - force_s3_global = self._should_force_s3_global( - resolve_endpoint_kwargs['region_name'], s3_config - ) - if force_s3_global: - resolve_endpoint_kwargs['region_name'] = None - endpoint_config = self._resolve_endpoint(**resolve_endpoint_kwargs) - self._set_region_if_custom_s3_endpoint( - endpoint_config, resolve_endpoint_kwargs['endpoint_bridge'] - ) - # For backwards compatibility reasons, we want to make sure the - # client.meta.region_name will remain us-east-1 if we forced the - # endpoint to be the global region. Specifically, if this value - # changes to aws-global, it breaks logic where a user is checking - # for us-east-1 as the global endpoint such as in creating buckets. - if force_s3_global and endpoint_config['region_name'] == 'aws-global': - endpoint_config['region_name'] = 'us-east-1' - return endpoint_config - - def _should_force_s3_global(self, region_name, s3_config): - s3_regional_config = 'legacy' - if s3_config and 'us_east_1_regional_endpoint' in s3_config: - s3_regional_config = s3_config['us_east_1_regional_endpoint'] - self._validate_s3_regional_config(s3_regional_config) - - is_global_region = region_name in ('us-east-1', None) - return s3_regional_config == 'legacy' and is_global_region - - def _validate_s3_regional_config(self, config_val): - if config_val not in VALID_REGIONAL_ENDPOINTS_CONFIG: - raise botocore.exceptions.InvalidS3UsEast1RegionalEndpointConfigError( - s3_us_east_1_regional_endpoint_config=config_val - ) - - def _set_region_if_custom_s3_endpoint( - self, endpoint_config, endpoint_bridge - ): - # If a user is providing a custom URL, the endpoint resolver will - # refuse to infer a signing region. If we want to default to s3v4, - # we have to account for this. - if ( - endpoint_config['signing_region'] is None - and endpoint_config['region_name'] is None - ): - endpoint = endpoint_bridge.resolve('s3') - endpoint_config['signing_region'] = endpoint['signing_region'] - endpoint_config['region_name'] = endpoint['region_name'] - - def _compute_sts_endpoint_config(self, **resolve_endpoint_kwargs): - endpoint_config = self._resolve_endpoint(**resolve_endpoint_kwargs) - if self._should_set_global_sts_endpoint( - resolve_endpoint_kwargs['region_name'], - resolve_endpoint_kwargs['endpoint_url'], - endpoint_config, - ): - self._set_global_sts_endpoint( - endpoint_config, resolve_endpoint_kwargs['is_secure'] - ) - return endpoint_config - - def _should_set_global_sts_endpoint( - self, region_name, endpoint_url, endpoint_config - ): - has_variant_tags = endpoint_config and endpoint_config.get( - 'metadata', {} - ).get('tags') - if endpoint_url or has_variant_tags: - return False - return ( - self._get_sts_regional_endpoints_config() == 'legacy' - and region_name in LEGACY_GLOBAL_STS_REGIONS - ) - - def _get_sts_regional_endpoints_config(self): - sts_regional_endpoints_config = self._config_store.get_config_variable( - 'sts_regional_endpoints' - ) - if not sts_regional_endpoints_config: - sts_regional_endpoints_config = 'regional' - if ( - sts_regional_endpoints_config - not in VALID_REGIONAL_ENDPOINTS_CONFIG - ): - raise botocore.exceptions.InvalidSTSRegionalEndpointsConfigError( - sts_regional_endpoints_config=sts_regional_endpoints_config - ) - return sts_regional_endpoints_config - - def _set_global_sts_endpoint(self, endpoint_config, is_secure): - scheme = 'https' if is_secure else 'http' - endpoint_config['endpoint_url'] = f'{scheme}://sts.amazonaws.com' - endpoint_config['signing_region'] = 'us-east-1' - - def _resolve_endpoint( - self, - service_name, - region_name, - endpoint_url, - is_secure, - endpoint_bridge, - ): - return endpoint_bridge.resolve( - service_name, region_name, endpoint_url, is_secure - ) - - def _compute_socket_options(self, scoped_config, client_config=None): - # This disables Nagle's algorithm and is the default socket options - # in urllib3. - socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] - client_keepalive = client_config and client_config.tcp_keepalive - scoped_keepalive = scoped_config and self._ensure_boolean( - scoped_config.get("tcp_keepalive", False) - ) - # Enables TCP Keepalive if specified in client config object or shared config file. - if client_keepalive or scoped_keepalive: - socket_options.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)) - return socket_options - - def _compute_retry_config(self, config_kwargs): - self._compute_retry_max_attempts(config_kwargs) - self._compute_retry_mode(config_kwargs) - - def _compute_retry_max_attempts(self, config_kwargs): - # There's a pre-existing max_attempts client config value that actually - # means max *retry* attempts. There's also a `max_attempts` we pull - # from the config store that means *total attempts*, which includes the - # intitial request. We can't change what `max_attempts` means in - # client config so we try to normalize everything to a new - # "total_max_attempts" variable. We ensure that after this, the only - # configuration for "max attempts" is the 'total_max_attempts' key. - # An explicitly provided max_attempts in the client config - # overrides everything. - retries = config_kwargs.get('retries') - if retries is not None: - if 'total_max_attempts' in retries: - retries.pop('max_attempts', None) - return - if 'max_attempts' in retries: - value = retries.pop('max_attempts') - # client config max_attempts means total retries so we - # have to add one for 'total_max_attempts' to account - # for the initial request. - retries['total_max_attempts'] = value + 1 - return - # Otherwise we'll check the config store which checks env vars, - # config files, etc. There is no default value for max_attempts - # so if this returns None and we don't set a default value here. - max_attempts = self._config_store.get_config_variable('max_attempts') - if max_attempts is not None: - if retries is None: - retries = {} - config_kwargs['retries'] = retries - retries['total_max_attempts'] = max_attempts - - def _compute_retry_mode(self, config_kwargs): - retries = config_kwargs.get('retries') - if retries is None: - retries = {} - config_kwargs['retries'] = retries - elif 'mode' in retries: - # If there's a retry mode explicitly set in the client config - # that overrides everything. - return - retry_mode = self._config_store.get_config_variable('retry_mode') - if retry_mode is None: - retry_mode = 'legacy' - retries['mode'] = retry_mode - - def _compute_connect_timeout(self, config_kwargs): - # Checking if connect_timeout is set on the client config. - # If it is not, we check the config_store in case a - # non legacy default mode has been configured. - connect_timeout = config_kwargs.get('connect_timeout') - if connect_timeout is not None: - return - connect_timeout = self._config_store.get_config_variable( - 'connect_timeout' - ) - if connect_timeout: - config_kwargs['connect_timeout'] = connect_timeout - - def _compute_request_compression_config(self, config_kwargs): - min_size = config_kwargs.get('request_min_compression_size_bytes') - disabled = config_kwargs.get('disable_request_compression') - if min_size is None: - min_size = self._config_store.get_config_variable( - 'request_min_compression_size_bytes' - ) - # conversion func is skipped so input validation must be done here - # regardless if the value is coming from the config store or the - # config object - min_size = self._validate_min_compression_size(min_size) - config_kwargs['request_min_compression_size_bytes'] = min_size - - if disabled is None: - disabled = self._config_store.get_config_variable( - 'disable_request_compression' - ) - else: - # if the user provided a value we must check if it's a boolean - disabled = ensure_boolean(disabled) - config_kwargs['disable_request_compression'] = disabled - - def _validate_min_compression_size(self, min_size): - min_allowed_min_size = 1 - max_allowed_min_size = 1048576 - error_msg_base = ( - f'Invalid value "{min_size}" for ' - 'request_min_compression_size_bytes.' - ) - try: - min_size = int(min_size) - except (ValueError, TypeError): - msg = ( - f'{error_msg_base} Value must be an integer. ' - f'Received {type(min_size)} instead.' - ) - raise botocore.exceptions.InvalidConfigError(error_msg=msg) - if not min_allowed_min_size <= min_size <= max_allowed_min_size: - msg = ( - f'{error_msg_base} Value must be between ' - f'{min_allowed_min_size} and {max_allowed_min_size}.' - ) - raise botocore.exceptions.InvalidConfigError(error_msg=msg) - - return min_size - - def _ensure_boolean(self, val): - if isinstance(val, bool): - return val - else: - return val.lower() == 'true' - - def _build_endpoint_resolver( - self, - endpoints_ruleset_data, - partition_data, - client_config, - service_model, - endpoint_region_name, - region_name, - endpoint_url, - endpoint, - is_secure, - endpoint_bridge, - event_emitter, - credentials, - account_id_endpoint_mode, - ): - if endpoints_ruleset_data is None: - return None - - # The legacy EndpointResolver is global to the session, but - # EndpointRulesetResolver is service-specific. Builtins for - # EndpointRulesetResolver must not be derived from the legacy - # endpoint resolver's output, including final_args, s3_config, - # etc. - s3_config_raw = self.compute_s3_config(client_config) or {} - service_name_raw = service_model.endpoint_prefix - # Maintain complex logic for s3 and sts endpoints for backwards - # compatibility. - if service_name_raw in ['s3', 'sts'] or region_name is None: - eprv2_region_name = endpoint_region_name - else: - eprv2_region_name = region_name - resolver_builtins = self.compute_endpoint_resolver_builtin_defaults( - region_name=eprv2_region_name, - service_name=service_name_raw, - s3_config=s3_config_raw, - endpoint_bridge=endpoint_bridge, - client_endpoint_url=endpoint_url, - legacy_endpoint_url=endpoint.host, - credentials=credentials, - account_id_endpoint_mode=account_id_endpoint_mode, - ) - # Client context params for s3 conflict with the available settings - # in the `s3` parameter on the `Config` object. If the same parameter - # is set in both places, the value in the `s3` parameter takes priority. - if client_config is not None: - client_context = client_config.client_context_params or {} - else: - client_context = {} - if self._is_s3_service(service_name_raw): - client_context.update(s3_config_raw) - - sig_version = ( - client_config.signature_version - if client_config is not None - else None - ) - return EndpointRulesetResolver( - endpoint_ruleset_data=endpoints_ruleset_data, - partition_data=partition_data, - service_model=service_model, - builtins=resolver_builtins, - client_context=client_context, - event_emitter=event_emitter, - use_ssl=is_secure, - requested_auth_scheme=sig_version, - ) - - def compute_endpoint_resolver_builtin_defaults( - self, - region_name, - service_name, - s3_config, - endpoint_bridge, - client_endpoint_url, - legacy_endpoint_url, - credentials, - account_id_endpoint_mode, - ): - # EndpointRulesetResolver rulesets may accept an "SDK::Endpoint" as - # input. If the endpoint_url argument of create_client() is set, it - # always takes priority. - if client_endpoint_url: - given_endpoint = client_endpoint_url - # If an endpoints.json data file other than the one bundled within - # the botocore/data directory is used, the output of legacy - # endpoint resolution is provided to EndpointRulesetResolver. - elif not endpoint_bridge.resolver_uses_builtin_data(): - given_endpoint = legacy_endpoint_url - else: - given_endpoint = None - - # The endpoint rulesets differ from legacy botocore behavior in whether - # forcing path style addressing in incompatible situations raises an - # exception or silently ignores the config setting. The - # AWS_S3_FORCE_PATH_STYLE parameter is adjusted both here and for each - # operation so that the ruleset behavior is backwards compatible. - if s3_config.get('use_accelerate_endpoint', False): - force_path_style = False - elif client_endpoint_url is not None and not is_s3_accelerate_url( - client_endpoint_url - ): - force_path_style = s3_config.get('addressing_style') != 'virtual' - else: - force_path_style = s3_config.get('addressing_style') == 'path' - - return { - EPRBuiltins.AWS_REGION: region_name, - EPRBuiltins.AWS_USE_FIPS: ( - # SDK_ENDPOINT cannot be combined with AWS_USE_FIPS - given_endpoint is None - # use legacy resolver's _resolve_endpoint_variant_config_var() - # or default to False if it returns None - and endpoint_bridge._resolve_endpoint_variant_config_var( - 'use_fips_endpoint' - ) - or False - ), - EPRBuiltins.AWS_USE_DUALSTACK: ( - # SDK_ENDPOINT cannot be combined with AWS_USE_DUALSTACK - given_endpoint is None - # use legacy resolver's _resolve_use_dualstack_endpoint() and - # or default to False if it returns None - and endpoint_bridge._resolve_use_dualstack_endpoint( - service_name - ) - or False - ), - EPRBuiltins.AWS_STS_USE_GLOBAL_ENDPOINT: ( - self._should_set_global_sts_endpoint( - region_name=region_name, - endpoint_url=None, - endpoint_config=None, - ) - ), - EPRBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT: ( - self._should_force_s3_global(region_name, s3_config) - ), - EPRBuiltins.AWS_S3_ACCELERATE: s3_config.get( - 'use_accelerate_endpoint', False - ), - EPRBuiltins.AWS_S3_FORCE_PATH_STYLE: force_path_style, - EPRBuiltins.AWS_S3_USE_ARN_REGION: s3_config.get( - 'use_arn_region', True - ), - EPRBuiltins.AWS_S3CONTROL_USE_ARN_REGION: s3_config.get( - 'use_arn_region', False - ), - EPRBuiltins.AWS_S3_DISABLE_MRAP: s3_config.get( - 's3_disable_multiregion_access_points', False - ), - EPRBuiltins.SDK_ENDPOINT: given_endpoint, - EPRBuiltins.ACCOUNT_ID: credentials.get_deferred_property( - 'account_id' - ) - if credentials - else None, - EPRBuiltins.ACCOUNT_ID_ENDPOINT_MODE: account_id_endpoint_mode, - } - - def _compute_user_agent_appid_config(self, config_kwargs): - user_agent_appid = config_kwargs.get('user_agent_appid') - if user_agent_appid is None: - user_agent_appid = self._config_store.get_config_variable( - 'user_agent_appid' - ) - if ( - user_agent_appid is not None - and len(user_agent_appid) > USERAGENT_APPID_MAXLEN - ): - logger.warning( - 'The configured value for user_agent_appid exceeds the ' - 'maximum length of %d characters.', - USERAGENT_APPID_MAXLEN, - ) - config_kwargs['user_agent_appid'] = user_agent_appid - - def _compute_sigv4a_signing_region_set_config(self, config_kwargs): - sigv4a_signing_region_set = config_kwargs.get( - 'sigv4a_signing_region_set' - ) - if sigv4a_signing_region_set is None: - sigv4a_signing_region_set = self._config_store.get_config_variable( - 'sigv4a_signing_region_set' - ) - config_kwargs['sigv4a_signing_region_set'] = sigv4a_signing_region_set - - def _compute_checksum_config(self, config_kwargs): - self._handle_checksum_config( - config_kwargs, - config_key="request_checksum_calculation", - valid_options=VALID_REQUEST_CHECKSUM_CALCULATION_CONFIG, - ) - self._handle_checksum_config( - config_kwargs, - config_key="response_checksum_validation", - valid_options=VALID_RESPONSE_CHECKSUM_VALIDATION_CONFIG, - ) - - def _handle_checksum_config( - self, - config_kwargs, - config_key, - valid_options, - ): - value = config_kwargs.get(config_key) - if value is None: - value = self._config_store.get_config_variable(config_key) - - if isinstance(value, str): - value = value.lower() - - if value not in valid_options: - raise botocore.exceptions.InvalidChecksumConfigError( - config_key=config_key, - config_value=value, - valid_options=valid_options, - ) - self._register_checksum_config_feature_ids(value, config_key) - config_kwargs[config_key] = value - - def _register_checksum_config_feature_ids(self, value, config_key): - checksum_config_feature_id = None - if config_key == "request_checksum_calculation": - checksum_config_feature_id = ( - f"FLEXIBLE_CHECKSUMS_REQ_{value.upper()}" - ) - elif config_key == "response_checksum_validation": - checksum_config_feature_id = ( - f"FLEXIBLE_CHECKSUMS_RES_{value.upper()}" - ) - if checksum_config_feature_id is not None: - register_feature_id(checksum_config_feature_id) - - def _compute_account_id_endpoint_mode_config(self, config_kwargs): - config_key = 'account_id_endpoint_mode' - - # Disable account id based endpoint routing for unsigned requests - # since there are no credentials to resolve. - signature_version = config_kwargs.get('signature_version') - if signature_version is botocore.UNSIGNED: - config_kwargs[config_key] = 'disabled' - return - - account_id_endpoint_mode = config_kwargs.get(config_key) - if account_id_endpoint_mode is None: - account_id_endpoint_mode = self._config_store.get_config_variable( - config_key - ) - - if isinstance(account_id_endpoint_mode, str): - account_id_endpoint_mode = account_id_endpoint_mode.lower() - - if ( - account_id_endpoint_mode - not in VALID_ACCOUNT_ID_ENDPOINT_MODE_CONFIG - ): - raise botocore.exceptions.InvalidConfigError( - error_msg=f"The configured value '{account_id_endpoint_mode}' for '{config_key}' is " - f"invalid. Valid values are: {VALID_ACCOUNT_ID_ENDPOINT_MODE_CONFIG}." - ) - - config_kwargs[config_key] = account_id_endpoint_mode - - def _compute_auth_scheme_preference_config( - self, client_config, config_kwargs - ): - config_key = 'auth_scheme_preference' - set_in_config_object = False - - if client_config and client_config.auth_scheme_preference: - value = client_config.auth_scheme_preference - set_in_config_object = True - else: - value = self._config_store.get_config_variable(config_key) - - if value is None: - config_kwargs[config_key] = None - return - - if not isinstance(value, str): - raise botocore.exceptions.InvalidConfigError( - error_msg=( - f"{config_key} must be a comma-delimited string. " - f"Received {type(value)} instead: {value}." - ) - ) - - value = ','.join( - item.replace(' ', '').replace('\t', '') - for item in value.split(',') - if item.strip() - ) - - if set_in_config_object: - value = ClientConfigString(value) - - config_kwargs[config_key] = value - - def _compute_signature_version_config(self, client_config, config_kwargs): - if client_config and client_config.signature_version: - value = client_config.signature_version - if isinstance(value, str): - config_kwargs['signature_version'] = ClientConfigString(value) - - -class ConfigObjectWrapper: - """Base class to mark values set via in-code Config object.""" - - pass - - -class ClientConfigString(str, ConfigObjectWrapper): - def __new__(cls, value=None): - return super().__new__(cls, value) diff --git a/venv/Lib/site-packages/botocore/auth.py b/venv/Lib/site-packages/botocore/auth.py deleted file mode 100644 index 7754f21..0000000 --- a/venv/Lib/site-packages/botocore/auth.py +++ /dev/null @@ -1,1227 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import base64 -import calendar -import datetime -import functools -import hmac -import json -import logging -import time -from collections.abc import Mapping -from email.utils import formatdate -from hashlib import sha1, sha256 -from operator import itemgetter - -from botocore.compat import ( - HAS_CRT, - MD5_AVAILABLE, # noqa: F401 - HTTPHeaders, - encodebytes, - ensure_unicode, - get_current_datetime, - parse_qs, - quote, - unquote, - urlsplit, - urlunsplit, -) -from botocore.exceptions import ( - NoAuthTokenError, - NoCredentialsError, - UnknownSignatureVersionError, - UnsupportedSignatureVersionError, -) -from botocore.utils import ( - is_valid_ipv6_endpoint_url, - normalize_url_path, - percent_encode_sequence, -) - -logger = logging.getLogger(__name__) - - -EMPTY_SHA256_HASH = ( - 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' -) -# This is the buffer size used when calculating sha256 checksums. -# Experimenting with various buffer sizes showed that this value generally -# gave the best result (in terms of performance). -PAYLOAD_BUFFER = 1024 * 1024 -ISO8601 = '%Y-%m-%dT%H:%M:%SZ' -SIGV4_TIMESTAMP = '%Y%m%dT%H%M%SZ' -SIGNED_HEADERS_BLACKLIST = [ - 'expect', - 'transfer-encoding', - 'user-agent', - 'x-amzn-trace-id', -] -UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD' -STREAMING_UNSIGNED_PAYLOAD_TRAILER = 'STREAMING-UNSIGNED-PAYLOAD-TRAILER' - - -def _host_from_url(url): - # Given URL, derive value for host header. Ensure that value: - # 1) is lowercase - # 2) excludes port, if it was the default port - # 3) excludes userinfo - url_parts = urlsplit(url) - host = url_parts.hostname # urlsplit's hostname is always lowercase - if is_valid_ipv6_endpoint_url(url): - host = f'[{host}]' - default_ports = { - 'http': 80, - 'https': 443, - } - if url_parts.port is not None: - if url_parts.port != default_ports.get(url_parts.scheme): - host = f'{host}:{url_parts.port}' - return host - - -def _get_body_as_dict(request): - # For query services, request.data is form-encoded and is already a - # dict, but for other services such as rest-json it could be a json - # string or bytes. In those cases we attempt to load the data as a - # dict. - data = request.data - if isinstance(data, bytes): - data = json.loads(data.decode('utf-8')) - elif isinstance(data, str): - data = json.loads(data) - return data - - -class BaseSigner: - REQUIRES_REGION = False - REQUIRES_TOKEN = False - - def add_auth(self, request): - raise NotImplementedError("add_auth") - - -class TokenSigner(BaseSigner): - REQUIRES_TOKEN = True - """ - Signers that expect an authorization token to perform the authorization - """ - - def __init__(self, auth_token): - self.auth_token = auth_token - - -class SigV2Auth(BaseSigner): - """ - Sign a request with Signature V2. - """ - - def __init__(self, credentials): - self.credentials = credentials - - def calc_signature(self, request, params): - logger.debug("Calculating signature using v2 auth.") - split = urlsplit(request.url) - path = split.path - if len(path) == 0: - path = '/' - string_to_sign = f"{request.method}\n{split.netloc}\n{path}\n" - lhmac = hmac.new( - self.credentials.secret_key.encode("utf-8"), digestmod=sha256 - ) - pairs = [] - for key in sorted(params): - # Any previous signature should not be a part of this - # one, so we skip that particular key. This prevents - # issues during retries. - if key == 'Signature': - continue - value = str(params[key]) - quoted_key = quote(key.encode('utf-8'), safe='') - quoted_value = quote(value.encode('utf-8'), safe='-_~') - pairs.append(f'{quoted_key}={quoted_value}') - qs = '&'.join(pairs) - string_to_sign += qs - logger.debug('String to sign: %s', string_to_sign) - lhmac.update(string_to_sign.encode('utf-8')) - b64 = base64.b64encode(lhmac.digest()).strip().decode('utf-8') - return (qs, b64) - - def add_auth(self, request): - # The auth handler is the last thing called in the - # preparation phase of a prepared request. - # Because of this we have to parse the query params - # from the request body so we can update them with - # the sigv2 auth params. - if self.credentials is None: - raise NoCredentialsError() - if request.data: - # POST - params = request.data - else: - # GET - params = request.params - params['AWSAccessKeyId'] = self.credentials.access_key - params['SignatureVersion'] = '2' - params['SignatureMethod'] = 'HmacSHA256' - params['Timestamp'] = time.strftime(ISO8601, time.gmtime()) - if self.credentials.token: - params['SecurityToken'] = self.credentials.token - qs, signature = self.calc_signature(request, params) - params['Signature'] = signature - return request - - -class SigV3Auth(BaseSigner): - def __init__(self, credentials): - self.credentials = credentials - - def add_auth(self, request): - if self.credentials is None: - raise NoCredentialsError() - if 'Date' in request.headers: - del request.headers['Date'] - request.headers['Date'] = formatdate(usegmt=True) - if self.credentials.token: - if 'X-Amz-Security-Token' in request.headers: - del request.headers['X-Amz-Security-Token'] - request.headers['X-Amz-Security-Token'] = self.credentials.token - new_hmac = hmac.new( - self.credentials.secret_key.encode('utf-8'), digestmod=sha256 - ) - new_hmac.update(request.headers['Date'].encode('utf-8')) - encoded_signature = encodebytes(new_hmac.digest()).strip() - signature = ( - f"AWS3-HTTPS AWSAccessKeyId={self.credentials.access_key}," - f"Algorithm=HmacSHA256,Signature={encoded_signature.decode('utf-8')}" - ) - if 'X-Amzn-Authorization' in request.headers: - del request.headers['X-Amzn-Authorization'] - request.headers['X-Amzn-Authorization'] = signature - - -class SigV4Auth(BaseSigner): - """ - Sign a request with Signature V4. - """ - - REQUIRES_REGION = True - - def __init__(self, credentials, service_name, region_name): - self.credentials = credentials - # We initialize these value here so the unit tests can have - # valid values. But these will get overriden in ``add_auth`` - # later for real requests. - self._region_name = region_name - self._service_name = service_name - - def _sign(self, key, msg, hex=False): - if hex: - sig = hmac.new(key, msg.encode('utf-8'), sha256).hexdigest() - else: - sig = hmac.new(key, msg.encode('utf-8'), sha256).digest() - return sig - - def headers_to_sign(self, request): - """ - Select the headers from the request that need to be included - in the StringToSign. - """ - header_map = HTTPHeaders() - for name, value in request.headers.items(): - lname = name.lower() - if lname not in SIGNED_HEADERS_BLACKLIST: - header_map[lname] = value - if 'host' not in header_map: - # TODO: We should set the host ourselves, instead of relying on our - # HTTP client to set it for us. - header_map['host'] = _host_from_url(request.url) - return header_map - - def canonical_query_string(self, request): - # The query string can come from two parts. One is the - # params attribute of the request. The other is from the request - # url (in which case we have to re-split the url into its components - # and parse out the query string component). - if request.params: - return self._canonical_query_string_params(request.params) - else: - return self._canonical_query_string_url(urlsplit(request.url)) - - def _canonical_query_string_params(self, params): - # [(key, value), (key2, value2)] - key_val_pairs = [] - if isinstance(params, Mapping): - params = params.items() - for key, value in params: - key_val_pairs.append( - (quote(key, safe='-_.~'), quote(str(value), safe='-_.~')) - ) - sorted_key_vals = [] - # Sort by the URI-encoded key names, and in the case of - # repeated keys, then sort by the value. - for key, value in sorted(key_val_pairs): - sorted_key_vals.append(f'{key}={value}') - canonical_query_string = '&'.join(sorted_key_vals) - return canonical_query_string - - def _canonical_query_string_url(self, parts): - canonical_query_string = '' - if parts.query: - # [(key, value), (key2, value2)] - key_val_pairs = [] - for pair in parts.query.split('&'): - key, _, value = pair.partition('=') - key_val_pairs.append((key, value)) - sorted_key_vals = [] - # Sort by the URI-encoded key names, and in the case of - # repeated keys, then sort by the value. - for key, value in sorted(key_val_pairs): - sorted_key_vals.append(f'{key}={value}') - canonical_query_string = '&'.join(sorted_key_vals) - return canonical_query_string - - def canonical_headers(self, headers_to_sign): - """ - Return the headers that need to be included in the StringToSign - in their canonical form by converting all header keys to lower - case, sorting them in alphabetical order and then joining - them into a string, separated by newlines. - """ - headers = [] - sorted_header_names = sorted(set(headers_to_sign)) - for key in sorted_header_names: - value = ','.join( - self._header_value(v) for v in headers_to_sign.get_all(key) - ) - headers.append(f'{key}:{ensure_unicode(value)}') - return '\n'.join(headers) - - def _header_value(self, value): - # From the sigv4 docs: - # Lowercase(HeaderName) + ':' + Trimall(HeaderValue) - # - # The Trimall function removes excess white space before and after - # values, and converts sequential spaces to a single space. - return ' '.join(value.split()) - - def signed_headers(self, headers_to_sign): - headers = sorted(n.lower().strip() for n in set(headers_to_sign)) - return ';'.join(headers) - - def _is_streaming_checksum_payload(self, request): - checksum_context = request.context.get('checksum', {}) - algorithm = checksum_context.get('request_algorithm') - return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer' - - def payload(self, request): - if self._is_streaming_checksum_payload(request): - return STREAMING_UNSIGNED_PAYLOAD_TRAILER - elif not self._should_sha256_sign_payload(request): - # When payload signing is disabled, we use this static string in - # place of the payload checksum. - return UNSIGNED_PAYLOAD - request_body = request.body - if request_body and hasattr(request_body, 'seek'): - position = request_body.tell() - read_chunksize = functools.partial( - request_body.read, PAYLOAD_BUFFER - ) - checksum = sha256() - for chunk in iter(read_chunksize, b''): - checksum.update(chunk) - hex_checksum = checksum.hexdigest() - request_body.seek(position) - return hex_checksum - elif request_body: - # The request serialization has ensured that - # request.body is a bytes() type. - return sha256(request_body).hexdigest() - else: - return EMPTY_SHA256_HASH - - def _should_sha256_sign_payload(self, request): - # Payloads will always be signed over insecure connections. - if not request.url.startswith('https'): - return True - - # Certain operations may have payload signing disabled by default. - # Since we don't have access to the operation model, we pass in this - # bit of metadata through the request context. - return request.context.get('payload_signing_enabled', True) - - def canonical_request(self, request): - cr = [request.method.upper()] - path = self._normalize_url_path(urlsplit(request.url).path) - cr.append(path) - cr.append(self.canonical_query_string(request)) - headers_to_sign = self.headers_to_sign(request) - cr.append(self.canonical_headers(headers_to_sign) + '\n') - cr.append(self.signed_headers(headers_to_sign)) - if 'X-Amz-Content-SHA256' in request.headers: - body_checksum = request.headers['X-Amz-Content-SHA256'] - else: - body_checksum = self.payload(request) - cr.append(body_checksum) - return '\n'.join(cr) - - def _normalize_url_path(self, path): - normalized_path = quote(normalize_url_path(path), safe='/~') - return normalized_path - - def scope(self, request): - scope = [self.credentials.access_key] - scope.append(request.context['timestamp'][0:8]) - scope.append(self._region_name) - scope.append(self._service_name) - scope.append('aws4_request') - return '/'.join(scope) - - def credential_scope(self, request): - scope = [] - scope.append(request.context['timestamp'][0:8]) - scope.append(self._region_name) - scope.append(self._service_name) - scope.append('aws4_request') - return '/'.join(scope) - - def string_to_sign(self, request, canonical_request): - """ - Return the canonical StringToSign as well as a dict - containing the original version of all headers that - were included in the StringToSign. - """ - sts = ['AWS4-HMAC-SHA256'] - sts.append(request.context['timestamp']) - sts.append(self.credential_scope(request)) - sts.append(sha256(canonical_request.encode('utf-8')).hexdigest()) - return '\n'.join(sts) - - def signature(self, string_to_sign, request): - key = self.credentials.secret_key - k_date = self._sign( - (f"AWS4{key}").encode(), request.context["timestamp"][0:8] - ) - k_region = self._sign(k_date, self._region_name) - k_service = self._sign(k_region, self._service_name) - k_signing = self._sign(k_service, 'aws4_request') - return self._sign(k_signing, string_to_sign, hex=True) - - def add_auth(self, request): - if self.credentials is None: - raise NoCredentialsError() - datetime_now = get_current_datetime() - request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP) - # This could be a retry. Make sure the previous - # authorization header is removed first. - self._modify_request_before_signing(request) - canonical_request = self.canonical_request(request) - logger.debug("Calculating signature using v4 auth.") - logger.debug('CanonicalRequest:\n%s', canonical_request) - string_to_sign = self.string_to_sign(request, canonical_request) - logger.debug('StringToSign:\n%s', string_to_sign) - signature = self.signature(string_to_sign, request) - logger.debug('Signature:\n%s', signature) - - self._inject_signature_to_request(request, signature) - - def _inject_signature_to_request(self, request, signature): - auth_str = [f'AWS4-HMAC-SHA256 Credential={self.scope(request)}'] - headers_to_sign = self.headers_to_sign(request) - auth_str.append( - f"SignedHeaders={self.signed_headers(headers_to_sign)}" - ) - auth_str.append(f'Signature={signature}') - request.headers['Authorization'] = ', '.join(auth_str) - return request - - def _modify_request_before_signing(self, request): - if 'Authorization' in request.headers: - del request.headers['Authorization'] - self._set_necessary_date_headers(request) - if self.credentials.token: - if 'X-Amz-Security-Token' in request.headers: - del request.headers['X-Amz-Security-Token'] - request.headers['X-Amz-Security-Token'] = self.credentials.token - - if not request.context.get('payload_signing_enabled', True): - if 'X-Amz-Content-SHA256' in request.headers: - del request.headers['X-Amz-Content-SHA256'] - request.headers['X-Amz-Content-SHA256'] = UNSIGNED_PAYLOAD - - def _set_necessary_date_headers(self, request): - # The spec allows for either the Date _or_ the X-Amz-Date value to be - # used so we check both. If there's a Date header, we use the date - # header. Otherwise we use the X-Amz-Date header. - if 'Date' in request.headers: - del request.headers['Date'] - datetime_timestamp = datetime.datetime.strptime( - request.context['timestamp'], SIGV4_TIMESTAMP - ) - request.headers['Date'] = formatdate( - int(calendar.timegm(datetime_timestamp.timetuple())) - ) - if 'X-Amz-Date' in request.headers: - del request.headers['X-Amz-Date'] - else: - if 'X-Amz-Date' in request.headers: - del request.headers['X-Amz-Date'] - request.headers['X-Amz-Date'] = request.context['timestamp'] - - -class S3SigV4Auth(SigV4Auth): - def _modify_request_before_signing(self, request): - super()._modify_request_before_signing(request) - if 'X-Amz-Content-SHA256' in request.headers: - del request.headers['X-Amz-Content-SHA256'] - - request.headers['X-Amz-Content-SHA256'] = self.payload(request) - - def _should_sha256_sign_payload(self, request): - # S3 allows optional body signing, so to minimize the performance - # impact, we opt to not SHA256 sign the body on streaming uploads, - # provided that we're on https. - client_config = request.context.get('client_config') - s3_config = getattr(client_config, 's3', None) - - # The config could be None if it isn't set, or if the customer sets it - # to None. - if s3_config is None: - s3_config = {} - - # The explicit configuration takes precedence over any implicit - # configuration. - sign_payload = s3_config.get('payload_signing_enabled', None) - if sign_payload is not None: - return sign_payload - - # We require that both a checksum be present and https be enabled - # to implicitly disable body signing. The combination of TLS and - # a checksum is sufficiently secure and durable for us to be - # confident in the request without body signing. - checksum_header = 'Content-MD5' - checksum_context = request.context.get('checksum', {}) - algorithm = checksum_context.get('request_algorithm') - if isinstance(algorithm, dict) and algorithm.get('in') == 'header': - checksum_header = algorithm['name'] - if ( - not request.url.startswith("https") - or checksum_header not in request.headers - ): - return True - - # If the input is streaming we disable body signing by default. - if request.context.get('has_streaming_input', False): - return False - - # If the S3-specific checks had no results, delegate to the generic - # checks. - return super()._should_sha256_sign_payload(request) - - def _normalize_url_path(self, path): - # For S3, we do not normalize the path. - return path - - -class S3ExpressAuth(S3SigV4Auth): - REQUIRES_IDENTITY_CACHE = True - - def __init__( - self, credentials, service_name, region_name, *, identity_cache - ): - super().__init__(credentials, service_name, region_name) - self._identity_cache = identity_cache - - def add_auth(self, request): - super().add_auth(request) - - def _modify_request_before_signing(self, request): - super()._modify_request_before_signing(request) - if 'x-amz-s3session-token' not in request.headers: - request.headers['x-amz-s3session-token'] = self.credentials.token - # S3Express does not support STS' X-Amz-Security-Token - if 'X-Amz-Security-Token' in request.headers: - del request.headers['X-Amz-Security-Token'] - - -class S3ExpressPostAuth(S3ExpressAuth): - REQUIRES_IDENTITY_CACHE = True - - def add_auth(self, request): - datetime_now = get_current_datetime() - request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP) - - fields = {} - if request.context.get('s3-presign-post-fields', None) is not None: - fields = request.context['s3-presign-post-fields'] - - policy = {} - conditions = [] - if request.context.get('s3-presign-post-policy', None) is not None: - policy = request.context['s3-presign-post-policy'] - if policy.get('conditions', None) is not None: - conditions = policy['conditions'] - - policy['conditions'] = conditions - - fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' - fields['x-amz-credential'] = self.scope(request) - fields['x-amz-date'] = request.context['timestamp'] - - conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'}) - conditions.append({'x-amz-credential': self.scope(request)}) - conditions.append({'x-amz-date': request.context['timestamp']}) - - if self.credentials.token is not None: - fields['X-Amz-S3session-Token'] = self.credentials.token - conditions.append( - {'X-Amz-S3session-Token': self.credentials.token} - ) - - # Dump the base64 encoded policy into the fields dictionary. - fields['policy'] = base64.b64encode( - json.dumps(policy).encode('utf-8') - ).decode('utf-8') - - fields['x-amz-signature'] = self.signature(fields['policy'], request) - - request.context['s3-presign-post-fields'] = fields - request.context['s3-presign-post-policy'] = policy - - -class S3ExpressQueryAuth(S3ExpressAuth): - DEFAULT_EXPIRES = 300 - REQUIRES_IDENTITY_CACHE = True - - def __init__( - self, - credentials, - service_name, - region_name, - *, - identity_cache, - expires=DEFAULT_EXPIRES, - ): - super().__init__( - credentials, - service_name, - region_name, - identity_cache=identity_cache, - ) - self._expires = expires - - def _modify_request_before_signing(self, request): - # We automatically set this header, so if it's the auto-set value we - # want to get rid of it since it doesn't make sense for presigned urls. - content_type = request.headers.get('content-type') - blocklisted_content_type = ( - 'application/x-www-form-urlencoded; charset=utf-8' - ) - if content_type == blocklisted_content_type: - del request.headers['content-type'] - - # Note that we're not including X-Amz-Signature. - # From the docs: "The Canonical Query String must include all the query - # parameters from the preceding table except for X-Amz-Signature. - signed_headers = self.signed_headers(self.headers_to_sign(request)) - - auth_params = { - 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', - 'X-Amz-Credential': self.scope(request), - 'X-Amz-Date': request.context['timestamp'], - 'X-Amz-Expires': self._expires, - 'X-Amz-SignedHeaders': signed_headers, - } - if self.credentials.token is not None: - auth_params['X-Amz-S3session-Token'] = self.credentials.token - # Now parse the original query string to a dict, inject our new query - # params, and serialize back to a query string. - url_parts = urlsplit(request.url) - # parse_qs makes each value a list, but in our case we know we won't - # have repeated keys so we know we have single element lists which we - # can convert back to scalar values. - query_string_parts = parse_qs(url_parts.query, keep_blank_values=True) - query_dict = {k: v[0] for k, v in query_string_parts.items()} - - if request.params: - query_dict.update(request.params) - request.params = {} - # The spec is particular about this. It *has* to be: - # https://?& - # You can't mix the two types of params together, i.e just keep doing - # new_query_params.update(op_params) - # new_query_params.update(auth_params) - # percent_encode_sequence(new_query_params) - operation_params = '' - if request.data: - # We also need to move the body params into the query string. To - # do this, we first have to convert it to a dict. - query_dict.update(_get_body_as_dict(request)) - request.data = '' - if query_dict: - operation_params = percent_encode_sequence(query_dict) + '&' - new_query_string = ( - f"{operation_params}{percent_encode_sequence(auth_params)}" - ) - # url_parts is a tuple (and therefore immutable) so we need to create - # a new url_parts with the new query string. - # - - # scheme - 0 - # netloc - 1 - # path - 2 - # query - 3 <-- we're replacing this. - # fragment - 4 - p = url_parts - new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) - request.url = urlunsplit(new_url_parts) - - def _inject_signature_to_request(self, request, signature): - # Rather than calculating an "Authorization" header, for the query - # param quth, we just append an 'X-Amz-Signature' param to the end - # of the query string. - request.url += f'&X-Amz-Signature={signature}' - - def _normalize_url_path(self, path): - # For S3, we do not normalize the path. - return path - - def payload(self, request): - # From the doc link above: - # "You don't include a payload hash in the Canonical Request, because - # when you create a presigned URL, you don't know anything about the - # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". - return UNSIGNED_PAYLOAD - - -class SigV4QueryAuth(SigV4Auth): - DEFAULT_EXPIRES = 3600 - - def __init__( - self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES - ): - super().__init__(credentials, service_name, region_name) - self._expires = expires - - def _modify_request_before_signing(self, request): - # We automatically set this header, so if it's the auto-set value we - # want to get rid of it since it doesn't make sense for presigned urls. - content_type = request.headers.get('content-type') - blacklisted_content_type = ( - 'application/x-www-form-urlencoded; charset=utf-8' - ) - if content_type == blacklisted_content_type: - del request.headers['content-type'] - - # Note that we're not including X-Amz-Signature. - # From the docs: "The Canonical Query String must include all the query - # parameters from the preceding table except for X-Amz-Signature. - signed_headers = self.signed_headers(self.headers_to_sign(request)) - - auth_params = { - 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', - 'X-Amz-Credential': self.scope(request), - 'X-Amz-Date': request.context['timestamp'], - 'X-Amz-Expires': self._expires, - 'X-Amz-SignedHeaders': signed_headers, - } - if self.credentials.token is not None: - auth_params['X-Amz-Security-Token'] = self.credentials.token - # Now parse the original query string to a dict, inject our new query - # params, and serialize back to a query string. - url_parts = urlsplit(request.url) - # parse_qs makes each value a list, but in our case we know we won't - # have repeated keys so we know we have single element lists which we - # can convert back to scalar values. - query_string_parts = parse_qs(url_parts.query, keep_blank_values=True) - query_dict = {k: v[0] for k, v in query_string_parts.items()} - - if request.params: - query_dict.update(request.params) - request.params = {} - # The spec is particular about this. It *has* to be: - # https://?& - # You can't mix the two types of params together, i.e just keep doing - # new_query_params.update(op_params) - # new_query_params.update(auth_params) - # percent_encode_sequence(new_query_params) - operation_params = '' - if request.data: - # We also need to move the body params into the query string. To - # do this, we first have to convert it to a dict. - query_dict.update(_get_body_as_dict(request)) - request.data = '' - if query_dict: - operation_params = percent_encode_sequence(query_dict) + '&' - new_query_string = ( - f"{operation_params}{percent_encode_sequence(auth_params)}" - ) - # url_parts is a tuple (and therefore immutable) so we need to create - # a new url_parts with the new query string. - # - - # scheme - 0 - # netloc - 1 - # path - 2 - # query - 3 <-- we're replacing this. - # fragment - 4 - p = url_parts - new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) - request.url = urlunsplit(new_url_parts) - - def _inject_signature_to_request(self, request, signature): - # Rather than calculating an "Authorization" header, for the query - # param quth, we just append an 'X-Amz-Signature' param to the end - # of the query string. - request.url += f'&X-Amz-Signature={signature}' - - -class S3SigV4QueryAuth(SigV4QueryAuth): - """S3 SigV4 auth using query parameters. - - This signer will sign a request using query parameters and signature - version 4, i.e a "presigned url" signer. - - Based off of: - - http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html - - """ - - def _normalize_url_path(self, path): - # For S3, we do not normalize the path. - return path - - def payload(self, request): - # From the doc link above: - # "You don't include a payload hash in the Canonical Request, because - # when you create a presigned URL, you don't know anything about the - # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". - return UNSIGNED_PAYLOAD - - -class S3SigV4PostAuth(SigV4Auth): - """ - Presigns a s3 post - - Implementation doc here: - http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-UsingHTTPPOST.html - """ - - def add_auth(self, request): - datetime_now = get_current_datetime() - request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP) - - fields = {} - if request.context.get('s3-presign-post-fields', None) is not None: - fields = request.context['s3-presign-post-fields'] - - policy = {} - conditions = [] - if request.context.get('s3-presign-post-policy', None) is not None: - policy = request.context['s3-presign-post-policy'] - if policy.get('conditions', None) is not None: - conditions = policy['conditions'] - - policy['conditions'] = conditions - - fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' - fields['x-amz-credential'] = self.scope(request) - fields['x-amz-date'] = request.context['timestamp'] - - conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'}) - conditions.append({'x-amz-credential': self.scope(request)}) - conditions.append({'x-amz-date': request.context['timestamp']}) - - if self.credentials.token is not None: - fields['x-amz-security-token'] = self.credentials.token - conditions.append({'x-amz-security-token': self.credentials.token}) - - # Dump the base64 encoded policy into the fields dictionary. - fields['policy'] = base64.b64encode( - json.dumps(policy).encode('utf-8') - ).decode('utf-8') - - fields['x-amz-signature'] = self.signature(fields['policy'], request) - - request.context['s3-presign-post-fields'] = fields - request.context['s3-presign-post-policy'] = policy - - -class HmacV1Auth(BaseSigner): - # List of Query String Arguments of Interest - QSAOfInterest = [ - 'accelerate', - 'acl', - 'cors', - 'defaultObjectAcl', - 'location', - 'logging', - 'partNumber', - 'policy', - 'requestPayment', - 'torrent', - 'versioning', - 'versionId', - 'versions', - 'website', - 'uploads', - 'uploadId', - 'response-content-type', - 'response-content-language', - 'response-expires', - 'response-cache-control', - 'response-content-disposition', - 'response-content-encoding', - 'delete', - 'lifecycle', - 'tagging', - 'restore', - 'storageClass', - 'notification', - 'replication', - 'requestPayment', - 'analytics', - 'metrics', - 'inventory', - 'select', - 'select-type', - 'object-lock', - ] - - def __init__(self, credentials, service_name=None, region_name=None): - self.credentials = credentials - - def sign_string(self, string_to_sign): - new_hmac = hmac.new( - self.credentials.secret_key.encode('utf-8'), digestmod=sha1 - ) - new_hmac.update(string_to_sign.encode('utf-8')) - return encodebytes(new_hmac.digest()).strip().decode('utf-8') - - def canonical_standard_headers(self, headers): - interesting_headers = ['content-md5', 'content-type', 'date'] - hoi = [] - if 'Date' in headers: - del headers['Date'] - headers['Date'] = self._get_date() - for ih in interesting_headers: - found = False - for key in headers: - lk = key.lower() - if headers[key] is not None and lk == ih: - hoi.append(headers[key].strip()) - found = True - if not found: - hoi.append('') - return '\n'.join(hoi) - - def canonical_custom_headers(self, headers): - hoi = [] - custom_headers = {} - for key in headers: - lk = key.lower() - if headers[key] is not None: - if lk.startswith('x-amz-'): - custom_headers[lk] = ','.join( - v.strip() for v in headers.get_all(key) - ) - sorted_header_keys = sorted(custom_headers.keys()) - for key in sorted_header_keys: - hoi.append(f"{key}:{custom_headers[key]}") - return '\n'.join(hoi) - - def unquote_v(self, nv): - """ - TODO: Do we need this? - """ - if len(nv) == 1: - return nv - else: - return (nv[0], unquote(nv[1])) - - def canonical_resource(self, split, auth_path=None): - # don't include anything after the first ? in the resource... - # unless it is one of the QSA of interest, defined above - # NOTE: - # The path in the canonical resource should always be the - # full path including the bucket name, even for virtual-hosting - # style addressing. The ``auth_path`` keeps track of the full - # path for the canonical resource and would be passed in if - # the client was using virtual-hosting style. - if auth_path is not None: - buf = auth_path - else: - buf = split.path - if split.query: - qsa = split.query.split('&') - qsa = [a.split('=', 1) for a in qsa] - qsa = [ - self.unquote_v(a) for a in qsa if a[0] in self.QSAOfInterest - ] - if len(qsa) > 0: - qsa.sort(key=itemgetter(0)) - qsa = ['='.join(a) for a in qsa] - buf += '?' - buf += '&'.join(qsa) - return buf - - def canonical_string( - self, method, split, headers, expires=None, auth_path=None - ): - cs = method.upper() + '\n' - cs += self.canonical_standard_headers(headers) + '\n' - custom_headers = self.canonical_custom_headers(headers) - if custom_headers: - cs += custom_headers + '\n' - cs += self.canonical_resource(split, auth_path=auth_path) - return cs - - def get_signature( - self, method, split, headers, expires=None, auth_path=None - ): - if self.credentials.token: - del headers['x-amz-security-token'] - headers['x-amz-security-token'] = self.credentials.token - string_to_sign = self.canonical_string( - method, split, headers, auth_path=auth_path - ) - logger.debug('StringToSign:\n%s', string_to_sign) - return self.sign_string(string_to_sign) - - def add_auth(self, request): - if self.credentials is None: - raise NoCredentialsError - logger.debug("Calculating signature using hmacv1 auth.") - split = urlsplit(request.url) - logger.debug("HTTP request method: %s", request.method) - signature = self.get_signature( - request.method, split, request.headers, auth_path=request.auth_path - ) - self._inject_signature(request, signature) - - def _get_date(self): - return formatdate(usegmt=True) - - def _inject_signature(self, request, signature): - if 'Authorization' in request.headers: - # We have to do this because request.headers is not - # normal dictionary. It has the (unintuitive) behavior - # of aggregating repeated setattr calls for the same - # key value. For example: - # headers['foo'] = 'a'; headers['foo'] = 'b' - # list(headers) will print ['foo', 'foo']. - del request.headers['Authorization'] - - auth_header = f"AWS {self.credentials.access_key}:{signature}" - request.headers['Authorization'] = auth_header - - -class HmacV1QueryAuth(HmacV1Auth): - """ - Generates a presigned request for s3. - - Spec from this document: - - http://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html - #RESTAuthenticationQueryStringAuth - - """ - - DEFAULT_EXPIRES = 3600 - - def __init__(self, credentials, expires=DEFAULT_EXPIRES): - self.credentials = credentials - self._expires = expires - - def _get_date(self): - return str(int(time.time() + int(self._expires))) - - def _inject_signature(self, request, signature): - query_dict = {} - query_dict['AWSAccessKeyId'] = self.credentials.access_key - query_dict['Signature'] = signature - - for header_key in request.headers: - lk = header_key.lower() - # For query string requests, Expires is used instead of the - # Date header. - if header_key == 'Date': - query_dict['Expires'] = request.headers['Date'] - # We only want to include relevant headers in the query string. - # These can be anything that starts with x-amz, is Content-MD5, - # or is Content-Type. - elif lk.startswith('x-amz-') or lk in ( - 'content-md5', - 'content-type', - ): - query_dict[lk] = request.headers[lk] - # Combine all of the identified headers into an encoded - # query string - new_query_string = percent_encode_sequence(query_dict) - - # Create a new url with the presigned url. - p = urlsplit(request.url) - if p[3]: - # If there was a pre-existing query string, we should - # add that back before injecting the new query string. - new_query_string = f'{p[3]}&{new_query_string}' - new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) - request.url = urlunsplit(new_url_parts) - - -class HmacV1PostAuth(HmacV1Auth): - """ - Generates a presigned post for s3. - - Spec from this document: - - http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingHTTPPOST.html - """ - - def add_auth(self, request): - fields = {} - if request.context.get('s3-presign-post-fields', None) is not None: - fields = request.context['s3-presign-post-fields'] - - policy = {} - conditions = [] - if request.context.get('s3-presign-post-policy', None) is not None: - policy = request.context['s3-presign-post-policy'] - if policy.get('conditions', None) is not None: - conditions = policy['conditions'] - - policy['conditions'] = conditions - - fields['AWSAccessKeyId'] = self.credentials.access_key - - if self.credentials.token is not None: - fields['x-amz-security-token'] = self.credentials.token - conditions.append({'x-amz-security-token': self.credentials.token}) - - # Dump the base64 encoded policy into the fields dictionary. - fields['policy'] = base64.b64encode( - json.dumps(policy).encode('utf-8') - ).decode('utf-8') - - fields['signature'] = self.sign_string(fields['policy']) - - request.context['s3-presign-post-fields'] = fields - request.context['s3-presign-post-policy'] = policy - - -class BearerAuth(TokenSigner): - """ - Performs bearer token authorization by placing the bearer token in the - Authorization header as specified by Section 2.1 of RFC 6750. - - https://datatracker.ietf.org/doc/html/rfc6750#section-2.1 - """ - - def add_auth(self, request): - if self.auth_token is None: - raise NoAuthTokenError() - - auth_header = f'Bearer {self.auth_token.token}' - if 'Authorization' in request.headers: - del request.headers['Authorization'] - request.headers['Authorization'] = auth_header - - -def resolve_auth_type(auth_trait): - for auth_type in auth_trait: - if auth_type == 'smithy.api#noAuth': - return AUTH_TYPE_TO_SIGNATURE_VERSION[auth_type] - elif auth_type in AUTH_TYPE_TO_SIGNATURE_VERSION: - signature_version = AUTH_TYPE_TO_SIGNATURE_VERSION[auth_type] - if signature_version in AUTH_TYPE_MAPS: - return signature_version - else: - raise UnknownSignatureVersionError(signature_version=auth_type) - raise UnsupportedSignatureVersionError(signature_version=auth_trait) - - -def resolve_auth_scheme_preference(preference_list, auth_options): - service_supported = [scheme.split('#')[-1] for scheme in auth_options] - - unsupported = [ - scheme - for scheme in preference_list - if scheme not in AUTH_PREF_TO_SIGNATURE_VERSION - ] - if unsupported: - logger.debug( - "Unsupported auth schemes in preference list: %r", unsupported - ) - - combined = preference_list + service_supported - prioritized_schemes = [ - scheme - for scheme in dict.fromkeys(combined) - if scheme in service_supported - ] - - for scheme in prioritized_schemes: - if scheme == 'noAuth': - return AUTH_PREF_TO_SIGNATURE_VERSION[scheme] - sig_version = AUTH_PREF_TO_SIGNATURE_VERSION.get(scheme) - if sig_version in AUTH_TYPE_MAPS: - return sig_version - - raise UnsupportedSignatureVersionError( - signature_version=', '.join(sorted(service_supported)) - ) - - -AUTH_TYPE_MAPS = { - 'v2': SigV2Auth, - 'v3': SigV3Auth, - 'v3https': SigV3Auth, - 's3': HmacV1Auth, - 's3-query': HmacV1QueryAuth, - 's3-presign-post': HmacV1PostAuth, - 's3v4-presign-post': S3SigV4PostAuth, - 'v4-s3express': S3ExpressAuth, - 'v4-s3express-query': S3ExpressQueryAuth, - 'v4-s3express-presign-post': S3ExpressPostAuth, - 'bearer': BearerAuth, -} - -# Define v4 signers depending on if CRT is present -if HAS_CRT: - from botocore.crt.auth import CRT_AUTH_TYPE_MAPS - - AUTH_TYPE_MAPS.update(CRT_AUTH_TYPE_MAPS) -else: - AUTH_TYPE_MAPS.update( - { - 'v4': SigV4Auth, - 'v4-query': SigV4QueryAuth, - 's3v4': S3SigV4Auth, - 's3v4-query': S3SigV4QueryAuth, - } - ) - -AUTH_TYPE_TO_SIGNATURE_VERSION = { - 'aws.auth#sigv4': 'v4', - 'aws.auth#sigv4a': 'v4a', - 'smithy.api#httpBearerAuth': 'bearer', - 'smithy.api#noAuth': 'none', -} - -# Mapping used specifically for resolving user-configured auth scheme preferences. -# This is similar to AUTH_TYPE_TO_SIGNATURE_VERSION, but uses simplified keys by -# stripping the auth trait prefixes ('smithy.api#httpBearerAuth' → 'httpBearerAuth'). -# These simplified keys match what customers are expected to provide in configuration. -AUTH_PREF_TO_SIGNATURE_VERSION = { - auth_scheme.split('#')[-1]: sig_version - for auth_scheme, sig_version in AUTH_TYPE_TO_SIGNATURE_VERSION.items() -} diff --git a/venv/Lib/site-packages/botocore/awsrequest.py b/venv/Lib/site-packages/botocore/awsrequest.py deleted file mode 100644 index 0668139..0000000 --- a/venv/Lib/site-packages/botocore/awsrequest.py +++ /dev/null @@ -1,635 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import functools -import logging -from collections.abc import Mapping - -import urllib3.util -from urllib3.connection import HTTPConnection, VerifiedHTTPSConnection -from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool - -import botocore.utils -from botocore.compat import ( - HTTPHeaders, - HTTPResponse, - MutableMapping, - urlencode, - urlparse, - urlsplit, - urlunsplit, -) -from botocore.exceptions import UnseekableStreamError - -logger = logging.getLogger(__name__) - - -class AWSHTTPResponse(HTTPResponse): - # The *args, **kwargs is used because the args are slightly - # different in py2.6 than in py2.7/py3. - def __init__(self, *args, **kwargs): - self._status_tuple = kwargs.pop('status_tuple') - HTTPResponse.__init__(self, *args, **kwargs) - - def _read_status(self): - if self._status_tuple is not None: - status_tuple = self._status_tuple - self._status_tuple = None - return status_tuple - else: - return HTTPResponse._read_status(self) - - -class AWSConnection: - """Mixin for HTTPConnection that supports Expect 100-continue. - - This when mixed with a subclass of httplib.HTTPConnection (though - technically we subclass from urllib3, which subclasses - httplib.HTTPConnection) and we only override this class to support Expect - 100-continue, which we need for S3. As far as I can tell, this is - general purpose enough to not be specific to S3, but I'm being - tentative and keeping it in botocore because I've only tested - this against AWS services. - - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._original_response_cls = self.response_class - # This variable is set when we receive an early response from the - # server. If this value is set to True, any calls to send() are noops. - # This value is reset to false every time _send_request is called. - # This is to workaround changes in urllib3 2.0 which uses separate - # send() calls in request() instead of delegating to endheaders(), - # which is where the body is sent in CPython's HTTPConnection. - self._response_received = False - self._expect_header_set = False - self._send_called = False - - def close(self): - super().close() - # Reset all of our instance state we were tracking. - self._response_received = False - self._expect_header_set = False - self._send_called = False - self.response_class = self._original_response_cls - - def request(self, method, url, body=None, headers=None, *args, **kwargs): - if headers is None: - headers = {} - self._response_received = False - if headers.get('Expect', b'') == b'100-continue': - self._expect_header_set = True - else: - self._expect_header_set = False - self.response_class = self._original_response_cls - rval = super().request(method, url, body, headers, *args, **kwargs) - self._expect_header_set = False - return rval - - def _convert_to_bytes(self, mixed_buffer): - # Take a list of mixed str/bytes and convert it - # all into a single bytestring. - # Any str will be encoded as utf-8. - bytes_buffer = [] - for chunk in mixed_buffer: - if isinstance(chunk, str): - bytes_buffer.append(chunk.encode('utf-8')) - else: - bytes_buffer.append(chunk) - msg = b"\r\n".join(bytes_buffer) - return msg - - def _send_output(self, message_body=None, *args, **kwargs): - self._buffer.extend((b"", b"")) - msg = self._convert_to_bytes(self._buffer) - del self._buffer[:] - # If msg and message_body are sent in a single send() call, - # it will avoid performance problems caused by the interaction - # between delayed ack and the Nagle algorithm. - if isinstance(message_body, bytes): - msg += message_body - message_body = None - self.send(msg) - if self._expect_header_set: - # This is our custom behavior. If the Expect header was - # set, it will trigger this custom behavior. - logger.debug("Waiting for 100 Continue response.") - # Wait for 1 second for the server to send a response. - if urllib3.util.wait_for_read(self.sock, 1): - self._handle_expect_response(message_body) - return - else: - # From the RFC: - # Because of the presence of older implementations, the - # protocol allows ambiguous situations in which a client may - # send "Expect: 100-continue" without receiving either a 417 - # (Expectation Failed) status or a 100 (Continue) status. - # Therefore, when a client sends this header field to an origin - # server (possibly via a proxy) from which it has never seen a - # 100 (Continue) status, the client SHOULD NOT wait for an - # indefinite period before sending the request body. - logger.debug( - "No response seen from server, continuing to " - "send the response body." - ) - if message_body is not None: - # message_body was not a string (i.e. it is a file), and - # we must run the risk of Nagle. - self.send(message_body) - - def _consume_headers(self, fp): - # Most servers (including S3) will just return - # the CLRF after the 100 continue response. However, - # some servers (I've specifically seen this for squid when - # used as a straight HTTP proxy) will also inject a - # Connection: keep-alive header. To account for this - # we'll read until we read '\r\n', and ignore any headers - # that come immediately after the 100 continue response. - current = None - while current != b'\r\n': - current = fp.readline() - - def _handle_expect_response(self, message_body): - # This is called when we sent the request headers containing - # an Expect: 100-continue header and received a response. - # We now need to figure out what to do. - fp = self.sock.makefile('rb', 0) - try: - maybe_status_line = fp.readline() - parts = maybe_status_line.split(None, 2) - if self._is_100_continue_status(maybe_status_line): - self._consume_headers(fp) - logger.debug( - "100 Continue response seen, now sending request body." - ) - self._send_message_body(message_body) - elif len(parts) == 3 and parts[0].startswith(b'HTTP/'): - # From the RFC: - # Requirements for HTTP/1.1 origin servers: - # - # - Upon receiving a request which includes an Expect - # request-header field with the "100-continue" - # expectation, an origin server MUST either respond with - # 100 (Continue) status and continue to read from the - # input stream, or respond with a final status code. - # - # So if we don't get a 100 Continue response, then - # whatever the server has sent back is the final response - # and don't send the message_body. - logger.debug( - "Received a non 100 Continue response " - "from the server, NOT sending request body." - ) - status_tuple = ( - parts[0].decode('ascii'), - int(parts[1]), - parts[2].decode('ascii'), - ) - response_class = functools.partial( - AWSHTTPResponse, status_tuple=status_tuple - ) - self.response_class = response_class - self._response_received = True - finally: - fp.close() - - def _send_message_body(self, message_body): - if message_body is not None: - self.send(message_body) - - def send(self, str): - if self._response_received: - if not self._send_called: - # urllib3 2.0 chunks and calls send potentially - # thousands of times inside `request` unlike the - # standard library. Only log this once for sanity. - logger.debug( - "send() called, but response already received. " - "Not sending data." - ) - self._send_called = True - return - return super().send(str) - - def _is_100_continue_status(self, maybe_status_line): - parts = maybe_status_line.split(None, 2) - # Check for HTTP/ 100 Continue\r\n or HTTP/ 100\r\n - return ( - len(parts) >= 2 - and parts[0].startswith(b'HTTP/') - and parts[1] == b'100' - ) - - -class AWSHTTPConnection(AWSConnection, HTTPConnection): - """An HTTPConnection that supports 100 Continue behavior.""" - - -class AWSHTTPSConnection(AWSConnection, VerifiedHTTPSConnection): - """An HTTPSConnection that supports 100 Continue behavior.""" - - -class AWSHTTPConnectionPool(HTTPConnectionPool): - ConnectionCls = AWSHTTPConnection - - -class AWSHTTPSConnectionPool(HTTPSConnectionPool): - ConnectionCls = AWSHTTPSConnection - - -def prepare_request_dict( - request_dict, endpoint_url, context=None, user_agent=None -): - """ - This method prepares a request dict to be created into an - AWSRequestObject. This prepares the request dict by adding the - url and the user agent to the request dict. - - :type request_dict: dict - :param request_dict: The request dict (created from the - ``serialize`` module). - - :type user_agent: string - :param user_agent: The user agent to use for this request. - - :type endpoint_url: string - :param endpoint_url: The full endpoint url, which contains at least - the scheme, the hostname, and optionally any path components. - """ - r = request_dict - if user_agent is not None: - headers = r['headers'] - headers['User-Agent'] = user_agent - host_prefix = r.get('host_prefix') - url = _urljoin(endpoint_url, r['url_path'], host_prefix) - if r['query_string']: - # NOTE: This is to avoid circular import with utils. This is being - # done to avoid moving classes to different modules as to not cause - # breaking chainges. - percent_encode_sequence = botocore.utils.percent_encode_sequence - encoded_query_string = percent_encode_sequence(r['query_string']) - if '?' not in url: - url += f'?{encoded_query_string}' - else: - url += f'&{encoded_query_string}' - r['url'] = url - r['context'] = context - if context is None: - r['context'] = {} - - -def create_request_object(request_dict): - """ - This method takes a request dict and creates an AWSRequest object - from it. - - :type request_dict: dict - :param request_dict: The request dict (created from the - ``prepare_request_dict`` method). - - :rtype: ``botocore.awsrequest.AWSRequest`` - :return: An AWSRequest object based on the request_dict. - - """ - r = request_dict - request_object = AWSRequest( - method=r['method'], - url=r['url'], - data=r['body'], - headers=r['headers'], - auth_path=r.get('auth_path'), - ) - request_object.context = r['context'] - return request_object - - -def _urljoin(endpoint_url, url_path, host_prefix): - p = urlsplit(endpoint_url) - # - - # scheme - p[0] - # netloc - p[1] - # path - p[2] - # query - p[3] - # fragment - p[4] - if not url_path or url_path == '/': - # If there's no path component, ensure the URL ends with - # a '/' for backwards compatibility. - if not p[2]: - new_path = '/' - else: - new_path = p[2] - elif p[2].endswith('/') and url_path.startswith('/'): - new_path = p[2][:-1] + url_path - else: - new_path = p[2] + url_path - - new_netloc = p[1] - if host_prefix is not None: - new_netloc = host_prefix + new_netloc - - reconstructed = urlunsplit((p[0], new_netloc, new_path, p[3], p[4])) - return reconstructed - - -class AWSRequestPreparer: - """ - This class performs preparation on AWSRequest objects similar to that of - the PreparedRequest class does in the requests library. However, the logic - has been boiled down to meet the specific use cases in botocore. Of note - there are the following differences: - This class does not heavily prepare the URL. Requests performed many - validations and corrections to ensure the URL is properly formatted. - Botocore either performs these validations elsewhere or otherwise - consistently provides well formatted URLs. - - This class does not heavily prepare the body. Body preperation is - simple and supports only the cases that we document: bytes and - file-like objects to determine the content-length. This will also - additionally prepare a body that is a dict to be url encoded params - string as some signers rely on this. Finally, this class does not - support multipart file uploads. - - This class does not prepare the method, auth or cookies. - """ - - def prepare(self, original): - method = original.method - url = self._prepare_url(original) - body = self._prepare_body(original) - headers = self._prepare_headers(original, body) - stream_output = original.stream_output - - return AWSPreparedRequest(method, url, headers, body, stream_output) - - def _prepare_url(self, original): - url = original.url - if original.params: - url_parts = urlparse(url) - delim = '&' if url_parts.query else '?' - if isinstance(original.params, Mapping): - params_to_encode = list(original.params.items()) - else: - params_to_encode = original.params - params = urlencode(params_to_encode, doseq=True) - url = delim.join((url, params)) - return url - - def _prepare_headers(self, original, prepared_body=None): - headers = HeadersDict(original.headers.items()) - - # If the transfer encoding or content length is already set, use that - if 'Transfer-Encoding' in headers or 'Content-Length' in headers: - return headers - - # Ensure we set the content length when it is expected - if original.method not in ('GET', 'HEAD', 'OPTIONS'): - length = self._determine_content_length(prepared_body) - if length is not None: - headers['Content-Length'] = str(length) - else: - # Failed to determine content length, using chunked - # NOTE: This shouldn't ever happen in practice - body_type = type(prepared_body) - logger.debug('Failed to determine length of %s', body_type) - headers['Transfer-Encoding'] = 'chunked' - - return headers - - def _to_utf8(self, item): - key, value = item - if isinstance(key, str): - key = key.encode('utf-8') - if isinstance(value, str): - value = value.encode('utf-8') - return key, value - - def _prepare_body(self, original): - """Prepares the given HTTP body data.""" - body = original.data - if body == b'': - body = None - - if isinstance(body, dict): - params = [self._to_utf8(item) for item in body.items()] - body = urlencode(params, doseq=True) - - return body - - def _determine_content_length(self, body): - return botocore.utils.determine_content_length(body) - - -class AWSRequest: - """Represents the elements of an HTTP request. - - This class was originally inspired by requests.models.Request, but has been - boiled down to meet the specific use cases in botocore. That being said this - class (even in requests) is effectively a named-tuple. - """ - - _REQUEST_PREPARER_CLS = AWSRequestPreparer - - def __init__( - self, - method=None, - url=None, - headers=None, - data=None, - params=None, - auth_path=None, - stream_output=False, - ): - self._request_preparer = self._REQUEST_PREPARER_CLS() - - # Default empty dicts for dict params. - params = {} if params is None else params - - self.method = method - self.url = url - self.headers = HTTPHeaders() - self.data = data - self.params = params - self.auth_path = auth_path - self.stream_output = stream_output - - if headers is not None: - for key, value in headers.items(): - self.headers[key] = value - - # This is a dictionary to hold information that is used when - # processing the request. What is inside of ``context`` is open-ended. - # For example, it may have a timestamp key that is used for holding - # what the timestamp is when signing the request. Note that none - # of the information that is inside of ``context`` is directly - # sent over the wire; the information is only used to assist in - # creating what is sent over the wire. - self.context = {} - - def prepare(self): - """Constructs a :class:`AWSPreparedRequest `.""" - return self._request_preparer.prepare(self) - - @property - def body(self): - body = self.prepare().body - if isinstance(body, str): - body = body.encode('utf-8') - return body - - -class AWSPreparedRequest: - """A data class representing a finalized request to be sent over the wire. - - Requests at this stage should be treated as final, and the properties of - the request should not be modified. - - :ivar method: The HTTP Method - :ivar url: The full url - :ivar headers: The HTTP headers to send. - :ivar body: The HTTP body. - :ivar stream_output: If the response for this request should be streamed. - """ - - def __init__(self, method, url, headers, body, stream_output): - self.method = method - self.url = url - self.headers = headers - self.body = body - self.stream_output = stream_output - - def __repr__(self): - fmt = ( - '' - ) - return fmt % (self.stream_output, self.method, self.url, self.headers) - - def reset_stream(self): - """Resets the streaming body to it's initial position. - - If the request contains a streaming body (a streamable file-like object) - seek to the object's initial position to ensure the entire contents of - the object is sent. This is a no-op for static bytes-like body types. - """ - # Trying to reset a stream when there is a no stream will - # just immediately return. It's not an error, it will produce - # the same result as if we had actually reset the stream (we'll send - # the entire body contents again if we need to). - # Same case if the body is a string/bytes/bytearray type. - - non_seekable_types = (bytes, str, bytearray) - if self.body is None or isinstance(self.body, non_seekable_types): - return - try: - logger.debug("Rewinding stream: %s", self.body) - self.body.seek(0) - except Exception as e: - logger.debug("Unable to rewind stream: %s", e) - raise UnseekableStreamError(stream_object=self.body) - - -class AWSResponse: - """A data class representing an HTTP response. - - This class was originally inspired by requests.models.Response, but has - been boiled down to meet the specific use cases in botocore. This has - effectively been reduced to a named tuple. - - :ivar url: The full url. - :ivar status_code: The status code of the HTTP response. - :ivar headers: The HTTP headers received. - :ivar body: The HTTP response body. - """ - - def __init__(self, url, status_code, headers, raw): - self.url = url - self.status_code = status_code - self.headers = HeadersDict(headers) - self.raw = raw - - self._content = None - - @property - def content(self): - """Content of the response as bytes.""" - - if self._content is None: - # Read the contents. - # NOTE: requests would attempt to call stream and fall back - # to a custom generator that would call read in a loop, but - # we don't rely on this behavior - self._content = b''.join(self.raw.stream()) or b'' - - return self._content - - @property - def text(self): - """Content of the response as a proper text type. - - Uses the encoding type provided in the reponse headers to decode the - response content into a proper text type. If the encoding is not - present in the headers, UTF-8 is used as a default. - """ - encoding = botocore.utils.get_encoding_from_headers(self.headers) - if encoding: - return self.content.decode(encoding) - else: - return self.content.decode('utf-8') - - -class _HeaderKey: - def __init__(self, key): - self._key = key - self._lower = key.lower() - - def __hash__(self): - return hash(self._lower) - - def __eq__(self, other): - return isinstance(other, _HeaderKey) and self._lower == other._lower - - def __str__(self): - return self._key - - def __repr__(self): - return repr(self._key) - - -class HeadersDict(MutableMapping): - """A case-insenseitive dictionary to represent HTTP headers.""" - - def __init__(self, *args, **kwargs): - self._dict = {} - self.update(*args, **kwargs) - - def __setitem__(self, key, value): - self._dict[_HeaderKey(key)] = value - - def __getitem__(self, key): - return self._dict[_HeaderKey(key)] - - def __delitem__(self, key): - del self._dict[_HeaderKey(key)] - - def __iter__(self): - return (str(key) for key in self._dict) - - def __len__(self): - return len(self._dict) - - def __repr__(self): - return repr(self._dict) - - def copy(self): - return HeadersDict(self.items()) diff --git a/venv/Lib/site-packages/botocore/cacert.pem b/venv/Lib/site-packages/botocore/cacert.pem deleted file mode 100644 index 919478e..0000000 --- a/venv/Lib/site-packages/botocore/cacert.pem +++ /dev/null @@ -1,4361 +0,0 @@ - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Label: "Visa eCommerce Root" -# Serial: 25952180776285836048024890241505565794 -# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 -# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 -# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 ------BEGIN CERTIFICATE----- -MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr -MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl -cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv -bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw -CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h -dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l -cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h -2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E -lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV -ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq -299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t -vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL -dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF -AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR -zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 -LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd -7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw -++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt -398znM/jra6O1I7mT1GvFpLgXPYHDw== ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Label: "QuoVadis Root CA" -# Serial: 985026699 -# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 -# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 -# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz -MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw -IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR -dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp -li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D -rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ -WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug -F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU -xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC -Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv -dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw -ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl -IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh -c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy -ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI -KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T -KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq -y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p -dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD -VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk -fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 -7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R -cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y -mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW -xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK -SnQ2+Q== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=Sonera Class2 CA O=Sonera -# Subject: CN=Sonera Class2 CA O=Sonera -# Label: "Sonera Class 2 Root CA" -# Serial: 29 -# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb -# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 -# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP -MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx -MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV -BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o -Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt -5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s -3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej -vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu -8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw -DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG -MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil -zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ -3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD -FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 -Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 -ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Label: "Chambers of Commerce Root - 2008" -# Serial: 11806822484801597146 -# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 -# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c -# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz -IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz -MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj -dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw -EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp -MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 -28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq -VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q -DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR -5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL -ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a -Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl -UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s -+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 -Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx -hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV -HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 -+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN -YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t -L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy -ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt -IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV -HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w -DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW -PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF -5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 -glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH -FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 -pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD -xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG -tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq -jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De -fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ -d0jQ ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Label: "Global Chambersign Root - 2008" -# Serial: 14541511773111788494 -# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 -# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c -# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx -MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy -cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG -A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl -BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed -KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 -G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 -zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 -ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG -HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 -Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V -yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e -beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r -6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog -zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW -BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr -ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp -ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk -cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt -YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC -CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow -KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI -hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ -UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz -X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x -fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz -a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd -Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd -SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O -AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso -M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge -v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z -09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: O=Trustis Limited OU=Trustis FPS Root CA -# Subject: O=Trustis Limited OU=Trustis FPS Root CA -# Label: "Trustis FPS Root CA" -# Serial: 36053640375399034304724988975563710553 -# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d -# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 -# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL -ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx -MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc -MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ -AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH -iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj -vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA -0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB -OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ -BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E -FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 -GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW -zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 -1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE -f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F -jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN -ZetX2fNXlrtIzYE= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G3" -# Serial: 10003001 -# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 -# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc -# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX -DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP -cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW -IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX -xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy -KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR -9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az -5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 -6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 -Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP -bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt -BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt -XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd -INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD -U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp -LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 -Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp -gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh -/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw -0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A -fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq -4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR -1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ -QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM -94B7IWcnMFk= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. -# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. -# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5" -# Serial: 156233699172481 -# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e -# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb -# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 ------BEGIN CERTIFICATE----- -MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE -BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn -aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg -QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg -SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 -MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD -VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 -dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF -bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB -IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom -/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR -Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 -4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z -5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 -hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID -AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX -SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l -VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq -URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf -peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF -Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW -+qtB4Uu2NQvAmxU= ------END CERTIFICATE----- - -# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Label: "Certinomis - Root CA" -# Serial: 1 -# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f -# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 -# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 ------BEGIN CERTIFICATE----- -MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET -MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb -BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz -MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx -FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g -Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 -fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl -LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV -WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF -TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb -5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc -CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri -wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ -wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG -m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 -F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng -WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 -2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF -AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ -0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw -F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS -g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj -qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN -h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ -ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V -btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj -Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ -8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW -gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=Certplus Root CA G1 O=Certplus -# Subject: CN=Certplus Root CA G1 O=Certplus -# Label: "Certplus Root CA G1" -# Serial: 1491911565779898356709731176965615564637713 -# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 -# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 -# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA -MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy -dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa -MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy -dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a -iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt -6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP -0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f -6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE -EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN -1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc -h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT -mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV -4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO -WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud -DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd -Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq -hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh -66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 -/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS -S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j -2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R -Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr -RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy -6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV -V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 -g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl -++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= ------END CERTIFICATE----- - -# Issuer: CN=Certplus Root CA G2 O=Certplus -# Subject: CN=Certplus Root CA G2 O=Certplus -# Label: "Certplus Root CA G2" -# Serial: 1492087096131536844209563509228951875861589 -# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 -# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a -# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 ------BEGIN CERTIFICATE----- -MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x -CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs -dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x -CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs -dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat -93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x -Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P -AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj -FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG -SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch -p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal -U5ORGpOucGpnutee5WEaXw== ------END CERTIFICATE----- - -# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust -# Subject: CN=OpenTrust Root CA G1 O=OpenTrust -# Label: "OpenTrust Root CA G1" -# Serial: 1492036577811947013770400127034825178844775 -# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da -# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e -# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 ------BEGIN CERTIFICATE----- -MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA -MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w -ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw -MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU -T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b -wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX -/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 -77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP -uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx -p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx -Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 -TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W -G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw -vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY -EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 -2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw -DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E -PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf -gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS -FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 -V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P -XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I -i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t -TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 -09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky -Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ -AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj -1oxx ------END CERTIFICATE----- - -# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust -# Subject: CN=OpenTrust Root CA G2 O=OpenTrust -# Label: "OpenTrust Root CA G2" -# Serial: 1492012448042702096986875987676935573415441 -# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb -# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b -# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 ------BEGIN CERTIFICATE----- -MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA -MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w -ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw -MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU -T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh -/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e -CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 -1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE -FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS -gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X -G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy -YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH -vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 -t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ -gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 -5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w -DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz -Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 -nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT -RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT -wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 -t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa -TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 -o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU -3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA -iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f -WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM -S1IK ------END CERTIFICATE----- - -# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust -# Subject: CN=OpenTrust Root CA G3 O=OpenTrust -# Label: "OpenTrust Root CA G3" -# Serial: 1492104908271485653071219941864171170455615 -# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 -# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 -# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 ------BEGIN CERTIFICATE----- -MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx -CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U -cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow -QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl -blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm -3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d -oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G -A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 -DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK -BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q -j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx -4nxp5V2a+EEfOzmTk51V6s2N8fvB ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- diff --git a/venv/Lib/site-packages/botocore/client.py b/venv/Lib/site-packages/botocore/client.py deleted file mode 100644 index 87eb7f1..0000000 --- a/venv/Lib/site-packages/botocore/client.py +++ /dev/null @@ -1,1440 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import logging - -from botocore import ( - UNSIGNED, # noqa: F401 - waiter, - xform_name, -) -from botocore.args import ClientArgsCreator -from botocore.auth import AUTH_TYPE_MAPS, resolve_auth_type -from botocore.awsrequest import prepare_request_dict -from botocore.compress import maybe_compress_request -from botocore.config import Config -from botocore.context import with_current_context -from botocore.credentials import RefreshableCredentials -from botocore.discovery import ( - EndpointDiscoveryHandler, - EndpointDiscoveryManager, - block_endpoint_discovery_required_operations, -) -from botocore.docs.docstring import ClientMethodDocstring, PaginatorDocstring -from botocore.exceptions import ( - ClientError, # noqa: F401 - DataNotFoundError, - InvalidEndpointDiscoveryConfigurationError, - OperationNotPageableError, - UnknownServiceError, - UnknownSignatureVersionError, -) -from botocore.history import get_global_history_recorder -from botocore.hooks import first_non_none_response -from botocore.httpchecksum import ( - apply_request_checksum, - resolve_checksum_context, -) -from botocore.model import ServiceModel -from botocore.paginate import Paginator -from botocore.retries import adaptive, standard -from botocore.useragent import UserAgentString, register_feature_id -from botocore.utils import ( - CachedProperty, - EventbridgeSignerSetter, - S3ArnParamHandler, # noqa: F401 - S3ControlArnParamHandler, # noqa: F401 - S3ControlArnParamHandlerv2, - S3ControlEndpointSetter, # noqa: F401 - S3EndpointSetter, # noqa: F401 - S3ExpressIdentityResolver, - S3RegionRedirector, # noqa: F401 - S3RegionRedirectorv2, - ensure_boolean, - get_service_module_name, -) - -logger = logging.getLogger(__name__) -history_recorder = get_global_history_recorder() - - -class ClientCreator: - """Creates client objects for a service.""" - - def __init__( - self, - loader, - endpoint_resolver, - user_agent, - event_emitter, - retry_handler_factory, - retry_config_translator, - response_parser_factory=None, - exceptions_factory=None, - config_store=None, - user_agent_creator=None, - auth_token_resolver=None, - ): - self._loader = loader - self._endpoint_resolver = endpoint_resolver - self._user_agent = user_agent - self._event_emitter = event_emitter - self._retry_handler_factory = retry_handler_factory - self._retry_config_translator = retry_config_translator - self._response_parser_factory = response_parser_factory - self._exceptions_factory = exceptions_factory - # TODO: Migrate things away from scoped_config in favor of the - # config_store. The config store can pull things from both the scoped - # config and environment variables (and potentially more in the - # future). - self._config_store = config_store - self._user_agent_creator = user_agent_creator - self._auth_token_resolver = auth_token_resolver - - def create_client( - self, - service_name, - region_name, - is_secure=True, - endpoint_url=None, - verify=None, - credentials=None, - scoped_config=None, - api_version=None, - client_config=None, - auth_token=None, - ): - responses = self._event_emitter.emit( - 'choose-service-name', service_name=service_name - ) - service_name = first_non_none_response(responses, default=service_name) - service_model = self._load_service_model(service_name, api_version) - try: - endpoints_ruleset_data = self._load_service_endpoints_ruleset( - service_name, api_version - ) - partition_data = self._loader.load_data('partitions') - except UnknownServiceError: - endpoints_ruleset_data = None - partition_data = None - logger.info( - 'No endpoints ruleset found for service %s, falling back to ' - 'legacy endpoint routing.', - service_name, - ) - - cls = self._create_client_class(service_name, service_model) - region_name, client_config = self._normalize_fips_region( - region_name, client_config - ) - if auth := service_model.metadata.get('auth'): - service_signature_version = resolve_auth_type(auth) - else: - service_signature_version = service_model.metadata.get( - 'signatureVersion' - ) - endpoint_bridge = ClientEndpointBridge( - self._endpoint_resolver, - scoped_config, - client_config, - service_signing_name=service_model.metadata.get('signingName'), - config_store=self._config_store, - service_signature_version=service_signature_version, - ) - if token := self._evaluate_client_specific_token( - service_model.signing_name - ): - auth_token = token - client_args = self._get_client_args( - service_model, - region_name, - is_secure, - endpoint_url, - verify, - credentials, - scoped_config, - client_config, - endpoint_bridge, - auth_token, - endpoints_ruleset_data, - partition_data, - ) - service_client = cls(**client_args) - self._register_retries(service_client) - self._register_s3_events( - client=service_client, - endpoint_bridge=None, - endpoint_url=None, - client_config=client_config, - scoped_config=scoped_config, - ) - self._register_s3express_events(client=service_client) - self._register_s3_control_events(client=service_client) - self._register_importexport_events(client=service_client) - self._register_endpoint_discovery( - service_client, endpoint_url, client_config - ) - return service_client - - def create_client_class(self, service_name, api_version=None): - service_model = self._load_service_model(service_name, api_version) - return self._create_client_class(service_name, service_model) - - def _create_client_class(self, service_name, service_model): - class_attributes = self._create_methods(service_model) - py_name_to_operation_name = self._create_name_mapping(service_model) - class_attributes['_PY_TO_OP_NAME'] = py_name_to_operation_name - bases = [BaseClient] - service_id = service_model.service_id.hyphenize() - self._event_emitter.emit( - f'creating-client-class.{service_id}', - class_attributes=class_attributes, - base_classes=bases, - ) - class_name = get_service_module_name(service_model) - cls = type(str(class_name), tuple(bases), class_attributes) - return cls - - def _normalize_fips_region(self, region_name, client_config): - if region_name is not None: - normalized_region_name = region_name.replace('fips-', '').replace( - '-fips', '' - ) - # If region has been transformed then set flag - if normalized_region_name != region_name: - config_use_fips_endpoint = Config(use_fips_endpoint=True) - if client_config: - # Keeping endpoint setting client specific - client_config = client_config.merge( - config_use_fips_endpoint - ) - else: - client_config = config_use_fips_endpoint - logger.warning( - 'transforming region from %s to %s and setting ' - 'use_fips_endpoint to true. client should not ' - 'be configured with a fips psuedo region.', - region_name, - normalized_region_name, - ) - region_name = normalized_region_name - return region_name, client_config - - def _load_service_model(self, service_name, api_version=None): - json_model = self._loader.load_service_model( - service_name, 'service-2', api_version=api_version - ) - service_model = ServiceModel(json_model, service_name=service_name) - return service_model - - def _load_service_endpoints_ruleset(self, service_name, api_version=None): - return self._loader.load_service_model( - service_name, 'endpoint-rule-set-1', api_version=api_version - ) - - def _register_retries(self, client): - retry_mode = client.meta.config.retries['mode'] - if retry_mode == 'standard': - self._register_v2_standard_retries(client) - elif retry_mode == 'adaptive': - self._register_v2_standard_retries(client) - self._register_v2_adaptive_retries(client) - elif retry_mode == 'legacy': - self._register_legacy_retries(client) - else: - return - register_feature_id(f'RETRY_MODE_{retry_mode.upper()}') - - def _register_v2_standard_retries(self, client): - max_attempts = client.meta.config.retries.get('total_max_attempts') - kwargs = {'client': client} - if max_attempts is not None: - kwargs['max_attempts'] = max_attempts - standard.register_retry_handler(**kwargs) - - def _register_v2_adaptive_retries(self, client): - adaptive.register_retry_handler(client) - - def _register_legacy_retries(self, client): - endpoint_prefix = client.meta.service_model.endpoint_prefix - service_id = client.meta.service_model.service_id - service_event_name = service_id.hyphenize() - - # First, we load the entire retry config for all services, - # then pull out just the information we need. - original_config = self._loader.load_data('_retry') - if not original_config: - return - - retries = self._transform_legacy_retries(client.meta.config.retries) - retry_config = self._retry_config_translator.build_retry_config( - endpoint_prefix, - original_config.get('retry', {}), - original_config.get('definitions', {}), - retries, - ) - - logger.debug( - "Registering retry handlers for service: %s", - client.meta.service_model.service_name, - ) - handler = self._retry_handler_factory.create_retry_handler( - retry_config, endpoint_prefix - ) - unique_id = f'retry-config-{service_event_name}' - client.meta.events.register( - f"needs-retry.{service_event_name}", handler, unique_id=unique_id - ) - - def _transform_legacy_retries(self, retries): - if retries is None: - return - copied_args = retries.copy() - if 'total_max_attempts' in retries: - copied_args = retries.copy() - copied_args['max_attempts'] = ( - copied_args.pop('total_max_attempts') - 1 - ) - return copied_args - - def _get_retry_mode(self, client, config_store): - client_retries = client.meta.config.retries - if ( - client_retries is not None - and client_retries.get('mode') is not None - ): - return client_retries['mode'] - return config_store.get_config_variable('retry_mode') or 'legacy' - - def _register_endpoint_discovery(self, client, endpoint_url, config): - if endpoint_url is not None: - # Don't register any handlers in the case of a custom endpoint url - return - # Only attach handlers if the service supports discovery - if client.meta.service_model.endpoint_discovery_operation is None: - return - events = client.meta.events - service_id = client.meta.service_model.service_id.hyphenize() - enabled = False - if config and config.endpoint_discovery_enabled is not None: - enabled = config.endpoint_discovery_enabled - elif self._config_store: - enabled = self._config_store.get_config_variable( - 'endpoint_discovery_enabled' - ) - - enabled = self._normalize_endpoint_discovery_config(enabled) - if enabled and self._requires_endpoint_discovery(client, enabled): - discover = enabled is True - manager = EndpointDiscoveryManager( - client, always_discover=discover - ) - handler = EndpointDiscoveryHandler(manager) - handler.register(events, service_id) - else: - events.register( - 'before-parameter-build', - block_endpoint_discovery_required_operations, - ) - - def _normalize_endpoint_discovery_config(self, enabled): - """Config must either be a boolean-string or string-literal 'auto'""" - if isinstance(enabled, str): - enabled = enabled.lower().strip() - if enabled == 'auto': - return enabled - elif enabled in ('true', 'false'): - return ensure_boolean(enabled) - elif isinstance(enabled, bool): - return enabled - - raise InvalidEndpointDiscoveryConfigurationError(config_value=enabled) - - def _requires_endpoint_discovery(self, client, enabled): - if enabled == "auto": - return client.meta.service_model.endpoint_discovery_required - return enabled - - def _register_eventbridge_events( - self, client, endpoint_bridge, endpoint_url - ): - if client.meta.service_model.service_name != 'events': - return - EventbridgeSignerSetter( - endpoint_resolver=self._endpoint_resolver, - region=client.meta.region_name, - endpoint_url=endpoint_url, - ).register(client.meta.events) - - def _register_s3express_events( - self, - client, - endpoint_bridge=None, - endpoint_url=None, - client_config=None, - scoped_config=None, - ): - if client.meta.service_model.service_name != 's3': - return - S3ExpressIdentityResolver(client, RefreshableCredentials).register() - - def _register_s3_events( - self, - client, - endpoint_bridge, - endpoint_url, - client_config, - scoped_config, - ): - if client.meta.service_model.service_name != 's3': - return - S3RegionRedirectorv2(None, client).register() - self._set_s3_presign_signature_version( - client.meta, client_config, scoped_config - ) - client.meta.events.register( - 'before-parameter-build.s3', self._inject_s3_input_parameters - ) - - def _register_s3_control_events( - self, - client, - endpoint_bridge=None, - endpoint_url=None, - client_config=None, - scoped_config=None, - ): - if client.meta.service_model.service_name != 's3control': - return - S3ControlArnParamHandlerv2().register(client.meta.events) - - def _set_s3_presign_signature_version( - self, client_meta, client_config, scoped_config - ): - # This will return the manually configured signature version, or None - # if none was manually set. If a customer manually sets the signature - # version, we always want to use what they set. - provided_signature_version = _get_configured_signature_version( - 's3', client_config, scoped_config - ) - if provided_signature_version is not None: - return - - # Check to see if the region is a region that we know about. If we - # don't know about a region, then we can safely assume it's a new - # region that is sigv4 only, since all new S3 regions only allow sigv4. - # The only exception is aws-global. This is a pseudo-region for the - # global endpoint, we should respect the signature versions it - # supports, which includes v2. - regions = self._endpoint_resolver.get_available_endpoints( - 's3', client_meta.partition - ) - if ( - client_meta.region_name != 'aws-global' - and client_meta.region_name not in regions - ): - return - - # If it is a region we know about, we want to default to sigv2, so here - # we check to see if it is available. - endpoint = self._endpoint_resolver.construct_endpoint( - 's3', client_meta.region_name - ) - signature_versions = endpoint['signatureVersions'] - if 's3' not in signature_versions: - return - - # We now know that we're in a known region that supports sigv2 and - # the customer hasn't set a signature version so we default the - # signature version to sigv2. - client_meta.events.register( - 'choose-signer.s3', self._default_s3_presign_to_sigv2 - ) - - def _inject_s3_input_parameters(self, params, context, **kwargs): - context['input_params'] = {} - inject_parameters = ('Bucket', 'Delete', 'Key', 'Prefix') - for inject_parameter in inject_parameters: - if inject_parameter in params: - context['input_params'][inject_parameter] = params[ - inject_parameter - ] - - def _default_s3_presign_to_sigv2(self, signature_version, **kwargs): - """ - Returns the 's3' (sigv2) signer if presigning an s3 request. This is - intended to be used to set the default signature version for the signer - to sigv2. Situations where an asymmetric signature is required are the - exception, for example MRAP needs v4a. - - :type signature_version: str - :param signature_version: The current client signature version. - - :type signing_name: str - :param signing_name: The signing name of the service. - - :return: 's3' if the request is an s3 presign request, None otherwise - """ - if signature_version.startswith('v4a'): - return - - if signature_version.startswith('v4-s3express'): - return signature_version - - for suffix in ['-query', '-presign-post']: - if signature_version.endswith(suffix): - return f's3{suffix}' - - def _register_importexport_events( - self, - client, - endpoint_bridge=None, - endpoint_url=None, - client_config=None, - scoped_config=None, - ): - if client.meta.service_model.service_name != 'importexport': - return - self._set_importexport_signature_version( - client.meta, client_config, scoped_config - ) - - def _set_importexport_signature_version( - self, client_meta, client_config, scoped_config - ): - # This will return the manually configured signature version, or None - # if none was manually set. If a customer manually sets the signature - # version, we always want to use what they set. - configured_signature_version = _get_configured_signature_version( - 'importexport', client_config, scoped_config - ) - if configured_signature_version is not None: - return - - # importexport has a modeled signatureVersion of v2, but we - # previously switched to v4 via endpoint.json before endpoint rulesets. - # Override the model's signatureVersion for backwards compatability. - client_meta.events.register( - 'choose-signer.importexport', self._default_signer_to_sigv4 - ) - - def _default_signer_to_sigv4(self, signature_version, **kwargs): - return 'v4' - - def _get_client_args( - self, - service_model, - region_name, - is_secure, - endpoint_url, - verify, - credentials, - scoped_config, - client_config, - endpoint_bridge, - auth_token, - endpoints_ruleset_data, - partition_data, - ): - args_creator = ClientArgsCreator( - self._event_emitter, - self._user_agent, - self._response_parser_factory, - self._loader, - self._exceptions_factory, - config_store=self._config_store, - user_agent_creator=self._user_agent_creator, - ) - return args_creator.get_client_args( - service_model, - region_name, - is_secure, - endpoint_url, - verify, - credentials, - scoped_config, - client_config, - endpoint_bridge, - auth_token, - endpoints_ruleset_data, - partition_data, - ) - - def _create_methods(self, service_model): - op_dict = {} - for operation_name in service_model.operation_names: - py_operation_name = xform_name(operation_name) - op_dict[py_operation_name] = self._create_api_method( - py_operation_name, operation_name, service_model - ) - return op_dict - - def _create_name_mapping(self, service_model): - # py_name -> OperationName, for every operation available - # for a service. - mapping = {} - for operation_name in service_model.operation_names: - py_operation_name = xform_name(operation_name) - mapping[py_operation_name] = operation_name - return mapping - - def _create_api_method( - self, py_operation_name, operation_name, service_model - ): - def _api_call(self, *args, **kwargs): - # We're accepting *args so that we can give a more helpful - # error message than TypeError: _api_call takes exactly - # 1 argument. - if args: - raise TypeError( - f"{py_operation_name}() only accepts keyword arguments." - ) - # The "self" in this scope is referring to the BaseClient. - return self._make_api_call(operation_name, kwargs) - - _api_call.__name__ = str(py_operation_name) - - # Add the docstring to the client method - operation_model = service_model.operation_model(operation_name) - docstring = ClientMethodDocstring( - operation_model=operation_model, - method_name=operation_name, - event_emitter=self._event_emitter, - method_description=operation_model.documentation, - example_prefix=f'response = client.{py_operation_name}', - include_signature=False, - ) - _api_call.__doc__ = docstring - return _api_call - - def _evaluate_client_specific_token(self, signing_name): - # Resolves an auth_token for the given signing_name. - # Returns None if no resolver is set or if resolution fails. - resolver = self._auth_token_resolver - if not resolver or not signing_name: - return None - - return resolver(signing_name=signing_name) - - -class ClientEndpointBridge: - """Bridges endpoint data and client creation - - This class handles taking out the relevant arguments from the endpoint - resolver and determining which values to use, taking into account any - client configuration options and scope configuration options. - - This class also handles determining what, if any, region to use if no - explicit region setting is provided. For example, Amazon S3 client will - utilize "us-east-1" by default if no region can be resolved.""" - - DEFAULT_ENDPOINT = '{service}.{region}.amazonaws.com' - _DUALSTACK_CUSTOMIZED_SERVICES = ['s3', 's3-control'] - - def __init__( - self, - endpoint_resolver, - scoped_config=None, - client_config=None, - default_endpoint=None, - service_signing_name=None, - config_store=None, - service_signature_version=None, - ): - self.service_signing_name = service_signing_name - self.endpoint_resolver = endpoint_resolver - self.scoped_config = scoped_config - self.client_config = client_config - self.default_endpoint = default_endpoint or self.DEFAULT_ENDPOINT - self.config_store = config_store - self.service_signature_version = service_signature_version - - def resolve( - self, service_name, region_name=None, endpoint_url=None, is_secure=True - ): - region_name = self._check_default_region(service_name, region_name) - use_dualstack_endpoint = self._resolve_use_dualstack_endpoint( - service_name - ) - use_fips_endpoint = self._resolve_endpoint_variant_config_var( - 'use_fips_endpoint' - ) - resolved = self.endpoint_resolver.construct_endpoint( - service_name, - region_name, - use_dualstack_endpoint=use_dualstack_endpoint, - use_fips_endpoint=use_fips_endpoint, - ) - - # If we can't resolve the region, we'll attempt to get a global - # endpoint for non-regionalized services (iam, route53, etc) - if not resolved: - # TODO: fallback partition_name should be configurable in the - # future for users to define as needed. - resolved = self.endpoint_resolver.construct_endpoint( - service_name, - region_name, - partition_name='aws', - use_dualstack_endpoint=use_dualstack_endpoint, - use_fips_endpoint=use_fips_endpoint, - ) - - if resolved: - return self._create_endpoint( - resolved, service_name, region_name, endpoint_url, is_secure - ) - else: - return self._assume_endpoint( - service_name, region_name, endpoint_url, is_secure - ) - - def resolver_uses_builtin_data(self): - return self.endpoint_resolver.uses_builtin_data - - def _check_default_region(self, service_name, region_name): - if region_name is not None: - return region_name - # Use the client_config region if no explicit region was provided. - if self.client_config and self.client_config.region_name is not None: - return self.client_config.region_name - - def _create_endpoint( - self, resolved, service_name, region_name, endpoint_url, is_secure - ): - region_name, signing_region = self._pick_region_values( - resolved, region_name, endpoint_url - ) - if endpoint_url is None: - endpoint_url = self._make_url( - resolved.get('hostname'), - is_secure, - resolved.get('protocols', []), - ) - signature_version = self._resolve_signature_version( - service_name, resolved - ) - signing_name = self._resolve_signing_name(service_name, resolved) - return self._create_result( - service_name=service_name, - region_name=region_name, - signing_region=signing_region, - signing_name=signing_name, - endpoint_url=endpoint_url, - metadata=resolved, - signature_version=signature_version, - ) - - def _resolve_endpoint_variant_config_var(self, config_var): - client_config = self.client_config - config_val = False - - # Client configuration arg has precedence - if client_config and getattr(client_config, config_var) is not None: - return getattr(client_config, config_var) - elif self.config_store is not None: - # Check config store - config_val = self.config_store.get_config_variable(config_var) - return config_val - - def _resolve_use_dualstack_endpoint(self, service_name): - s3_dualstack_mode = self._is_s3_dualstack_mode(service_name) - if s3_dualstack_mode is not None: - return s3_dualstack_mode - return self._resolve_endpoint_variant_config_var( - 'use_dualstack_endpoint' - ) - - def _is_s3_dualstack_mode(self, service_name): - if service_name not in self._DUALSTACK_CUSTOMIZED_SERVICES: - return None - # TODO: This normalization logic is duplicated from the - # ClientArgsCreator class. Consolidate everything to - # ClientArgsCreator. _resolve_signature_version also has similarly - # duplicated logic. - client_config = self.client_config - if ( - client_config is not None - and client_config.s3 is not None - and 'use_dualstack_endpoint' in client_config.s3 - ): - # Client config trumps scoped config. - return client_config.s3['use_dualstack_endpoint'] - if self.scoped_config is not None: - enabled = self.scoped_config.get('s3', {}).get( - 'use_dualstack_endpoint' - ) - if enabled in [True, 'True', 'true']: - return True - - def _assume_endpoint( - self, service_name, region_name, endpoint_url, is_secure - ): - if endpoint_url is None: - # Expand the default hostname URI template. - hostname = self.default_endpoint.format( - service=service_name, region=region_name - ) - endpoint_url = self._make_url( - hostname, is_secure, ['http', 'https'] - ) - logger.debug( - 'Assuming an endpoint for %s, %s: %s', - service_name, - region_name, - endpoint_url, - ) - # We still want to allow the user to provide an explicit version. - signature_version = self._resolve_signature_version( - service_name, {'signatureVersions': ['v4']} - ) - signing_name = self._resolve_signing_name(service_name, resolved={}) - return self._create_result( - service_name=service_name, - region_name=region_name, - signing_region=region_name, - signing_name=signing_name, - signature_version=signature_version, - endpoint_url=endpoint_url, - metadata={}, - ) - - def _create_result( - self, - service_name, - region_name, - signing_region, - signing_name, - endpoint_url, - signature_version, - metadata, - ): - return { - 'service_name': service_name, - 'region_name': region_name, - 'signing_region': signing_region, - 'signing_name': signing_name, - 'endpoint_url': endpoint_url, - 'signature_version': signature_version, - 'metadata': metadata, - } - - def _make_url(self, hostname, is_secure, supported_protocols): - if is_secure and 'https' in supported_protocols: - scheme = 'https' - else: - scheme = 'http' - return f'{scheme}://{hostname}' - - def _resolve_signing_name(self, service_name, resolved): - # CredentialScope overrides everything else. - if ( - 'credentialScope' in resolved - and 'service' in resolved['credentialScope'] - ): - return resolved['credentialScope']['service'] - # Use the signingName from the model if present. - if self.service_signing_name: - return self.service_signing_name - # Just assume is the same as the service name. - return service_name - - def _pick_region_values(self, resolved, region_name, endpoint_url): - signing_region = region_name - if endpoint_url is None: - # Do not use the region name or signing name from the resolved - # endpoint if the user explicitly provides an endpoint_url. This - # would happen if we resolve to an endpoint where the service has - # a "defaults" section that overrides all endpoint with a single - # hostname and credentialScope. This has been the case historically - # for how STS has worked. The only way to resolve an STS endpoint - # was to provide a region_name and an endpoint_url. In that case, - # we would still resolve an endpoint, but we would not use the - # resolved endpointName or signingRegion because we want to allow - # custom endpoints. - region_name = resolved['endpointName'] - signing_region = region_name - if ( - 'credentialScope' in resolved - and 'region' in resolved['credentialScope'] - ): - signing_region = resolved['credentialScope']['region'] - return region_name, signing_region - - def _resolve_signature_version(self, service_name, resolved): - configured_version = _get_configured_signature_version( - service_name, self.client_config, self.scoped_config - ) - if configured_version is not None: - return configured_version - - # These have since added the "auth" key to the service model - # with "aws.auth#sigv4", but preserve existing behavior from - # when we preferred endpoints.json over the service models - if service_name in ('s3', 's3-control'): - return 's3v4' - - if self.service_signature_version is not None: - # Prefer the service model - potential_versions = [self.service_signature_version] - else: - # Fall back to endpoints.json to preserve existing behavior, which - # may be useful for users who have custom service models - potential_versions = resolved.get('signatureVersions', []) - # This was added for the V2 -> V4 transition, - # for services that added V4 after V2 in endpoints.json - if 'v4' in potential_versions: - return 'v4' - # Now just iterate over the signature versions in order until we - # find the first one that is known to Botocore. - for known in potential_versions: - if known in AUTH_TYPE_MAPS: - return known - - raise UnknownSignatureVersionError( - signature_version=potential_versions - ) - - -class BaseClient: - # This is actually reassigned with the py->op_name mapping - # when the client creator creates the subclass. This value is used - # because calls such as client.get_paginator('list_objects') use the - # snake_case name, but we need to know the ListObjects form. - # xform_name() does the ListObjects->list_objects conversion, but - # we need the reverse mapping here. - _PY_TO_OP_NAME = {} - - def __init__( - self, - serializer, - endpoint, - response_parser, - event_emitter, - request_signer, - service_model, - loader, - client_config, - partition, - exceptions_factory, - endpoint_ruleset_resolver=None, - user_agent_creator=None, - ): - self._serializer = serializer - self._endpoint = endpoint - self._ruleset_resolver = endpoint_ruleset_resolver - self._response_parser = response_parser - self._request_signer = request_signer - self._cache = {} - self._loader = loader - self._client_config = client_config - self.meta = ClientMeta( - event_emitter, - self._client_config, - endpoint.host, - service_model, - self._PY_TO_OP_NAME, - partition, - ) - self._exceptions_factory = exceptions_factory - self._exceptions = None - self._user_agent_creator = user_agent_creator - if self._user_agent_creator is None: - self._user_agent_creator = ( - UserAgentString.from_environment().with_client_config( - self._client_config - ) - ) - self._register_handlers() - - def __getattr__(self, item): - service_id = self._service_model.service_id.hyphenize() - event_name = f'getattr.{service_id}.{item}' - - handler, event_response = self.meta.events.emit_until_response( - event_name, client=self - ) - - if event_response is not None: - return event_response - - raise AttributeError( - f"'{self.__class__.__name__}' object has no attribute '{item}'" - ) - - def close(self): - """Closes underlying endpoint connections.""" - self._endpoint.close() - - def _register_handlers(self): - # Register the handler required to sign requests. - service_id = self.meta.service_model.service_id.hyphenize() - self.meta.events.register( - f"request-created.{service_id}", self._request_signer.handler - ) - # Rebuild user agent string right before request is sent - # to ensure all registered features are included. - self.meta.events.register_last( - f"request-created.{service_id}", - self._user_agent_creator.rebuild_and_replace_user_agent_handler, - ) - - @property - def _service_model(self): - return self.meta.service_model - - @with_current_context() - def _make_api_call(self, operation_name, api_params): - operation_model = self._service_model.operation_model(operation_name) - service_name = self._service_model.service_name - history_recorder.record( - 'API_CALL', - { - 'service': service_name, - 'operation': operation_name, - 'params': api_params, - }, - ) - if operation_model.deprecated: - logger.debug( - 'Warning: %s.%s() is deprecated', service_name, operation_name - ) - request_context = { - 'client_region': self.meta.region_name, - 'client_config': self.meta.config, - 'has_streaming_input': operation_model.has_streaming_input, - 'auth_type': operation_model.resolved_auth_type, - 'unsigned_payload': operation_model.unsigned_payload, - 'auth_options': self._service_model.metadata.get('auth'), - } - - api_params = self._emit_api_params( - api_params=api_params, - operation_model=operation_model, - context=request_context, - ) - ( - endpoint_url, - additional_headers, - properties, - ) = self._resolve_endpoint_ruleset( - operation_model, api_params, request_context - ) - if properties: - # Pass arbitrary endpoint info with the Request - # for use during construction. - request_context['endpoint_properties'] = properties - request_dict = self._convert_to_request_dict( - api_params=api_params, - operation_model=operation_model, - endpoint_url=endpoint_url, - context=request_context, - headers=additional_headers, - ) - resolve_checksum_context(request_dict, operation_model, api_params) - - service_id = self._service_model.service_id.hyphenize() - handler, event_response = self.meta.events.emit_until_response( - f'before-call.{service_id}.{operation_name}', - model=operation_model, - params=request_dict, - request_signer=self._request_signer, - context=request_context, - ) - - if event_response is not None: - http, parsed_response = event_response - else: - maybe_compress_request( - self.meta.config, request_dict, operation_model - ) - apply_request_checksum(request_dict) - http, parsed_response = self._make_request( - operation_model, request_dict, request_context - ) - - self.meta.events.emit( - f'after-call.{service_id}.{operation_name}', - http_response=http, - parsed=parsed_response, - model=operation_model, - context=request_context, - ) - - if http.status_code >= 300: - error_info = parsed_response.get("Error", {}) - error_code = request_context.get( - 'error_code_override' - ) or error_info.get("Code") - error_class = self.exceptions.from_code(error_code) - raise error_class(parsed_response, operation_name) - else: - return parsed_response - - def _make_request(self, operation_model, request_dict, request_context): - try: - return self._endpoint.make_request(operation_model, request_dict) - except Exception as e: - self.meta.events.emit( - f'after-call-error.{self._service_model.service_id.hyphenize()}.{operation_model.name}', - exception=e, - context=request_context, - ) - raise - - def _convert_to_request_dict( - self, - api_params, - operation_model, - endpoint_url, - context=None, - headers=None, - set_user_agent_header=True, - ): - request_dict = self._serializer.serialize_to_request( - api_params, operation_model - ) - if not self._client_config.inject_host_prefix: - request_dict.pop('host_prefix', None) - if headers is not None: - request_dict['headers'].update(headers) - if set_user_agent_header: - user_agent = self._user_agent_creator.to_string() - else: - user_agent = None - prepare_request_dict( - request_dict, - endpoint_url=endpoint_url, - user_agent=user_agent, - context=context, - ) - return request_dict - - def _emit_api_params(self, api_params, operation_model, context): - # Given the API params provided by the user and the operation_model - # we can serialize the request to a request_dict. - operation_name = operation_model.name - - # Emit an event that allows users to modify the parameters at the - # beginning of the method. It allows handlers to modify existing - # parameters or return a new set of parameters to use. - service_id = self._service_model.service_id.hyphenize() - responses = self.meta.events.emit( - f'provide-client-params.{service_id}.{operation_name}', - params=api_params, - model=operation_model, - context=context, - ) - api_params = first_non_none_response(responses, default=api_params) - - self.meta.events.emit( - f'before-parameter-build.{service_id}.{operation_name}', - params=api_params, - model=operation_model, - context=context, - ) - return api_params - - def _resolve_endpoint_ruleset( - self, - operation_model, - params, - request_context, - ignore_signing_region=False, - ): - """Returns endpoint URL and list of additional headers returned from - EndpointRulesetResolver for the given operation and params. If the - ruleset resolver is not available, for example because the service has - no endpoints ruleset file, the legacy endpoint resolver's value is - returned. - - Use ignore_signing_region for generating presigned URLs or any other - situation where the signing region information from the ruleset - resolver should be ignored. - - Returns tuple of URL and headers dictionary. Additionally, the - request_context dict is modified in place with any signing information - returned from the ruleset resolver. - """ - if self._ruleset_resolver is None: - endpoint_url = self.meta.endpoint_url - additional_headers = {} - endpoint_properties = {} - else: - endpoint_info = self._ruleset_resolver.construct_endpoint( - operation_model=operation_model, - call_args=params, - request_context=request_context, - ) - endpoint_url = endpoint_info.url - additional_headers = endpoint_info.headers - endpoint_properties = endpoint_info.properties - # If authSchemes is present, overwrite default auth type and - # signing context derived from service model. - auth_schemes = endpoint_info.properties.get('authSchemes') - if auth_schemes is not None: - auth_info = self._ruleset_resolver.auth_schemes_to_signing_ctx( - auth_schemes - ) - auth_type, signing_context = auth_info - request_context['auth_type'] = auth_type - if 'region' in signing_context and ignore_signing_region: - del signing_context['region'] - if 'signing' in request_context: - request_context['signing'].update(signing_context) - else: - request_context['signing'] = signing_context - - return endpoint_url, additional_headers, endpoint_properties - - def get_paginator(self, operation_name): - """Create a paginator for an operation. - - :type operation_name: string - :param operation_name: The operation name. This is the same name - as the method name on the client. For example, if the - method name is ``create_foo``, and you'd normally invoke the - operation as ``client.create_foo(**kwargs)``, if the - ``create_foo`` operation can be paginated, you can use the - call ``client.get_paginator("create_foo")``. - - :raise OperationNotPageableError: Raised if the operation is not - pageable. You can use the ``client.can_paginate`` method to - check if an operation is pageable. - - :rtype: ``botocore.paginate.Paginator`` - :return: A paginator object. - - """ - if not self.can_paginate(operation_name): - raise OperationNotPageableError(operation_name=operation_name) - else: - actual_operation_name = self._PY_TO_OP_NAME[operation_name] - - # Create a new paginate method that will serve as a proxy to - # the underlying Paginator.paginate method. This is needed to - # attach a docstring to the method. - def paginate(self, **kwargs): - return Paginator.paginate(self, **kwargs) - - paginator_config = self._cache['page_config'][ - actual_operation_name - ] - # Add the docstring for the paginate method. - paginate.__doc__ = PaginatorDocstring( - paginator_name=actual_operation_name, - event_emitter=self.meta.events, - service_model=self.meta.service_model, - paginator_config=paginator_config, - include_signature=False, - ) - - # Rename the paginator class based on the type of paginator. - service_module_name = get_service_module_name( - self.meta.service_model - ) - paginator_class_name = ( - f"{service_module_name}.Paginator.{actual_operation_name}" - ) - - # Create the new paginator class - documented_paginator_cls = type( - paginator_class_name, (Paginator,), {'paginate': paginate} - ) - - operation_model = self._service_model.operation_model( - actual_operation_name - ) - paginator = documented_paginator_cls( - getattr(self, operation_name), - paginator_config, - operation_model, - ) - return paginator - - def can_paginate(self, operation_name): - """Check if an operation can be paginated. - - :type operation_name: string - :param operation_name: The operation name. This is the same name - as the method name on the client. For example, if the - method name is ``create_foo``, and you'd normally invoke the - operation as ``client.create_foo(**kwargs)``, if the - ``create_foo`` operation can be paginated, you can use the - call ``client.get_paginator("create_foo")``. - - :return: ``True`` if the operation can be paginated, - ``False`` otherwise. - - """ - if 'page_config' not in self._cache: - try: - page_config = self._loader.load_service_model( - self._service_model.service_name, - 'paginators-1', - self._service_model.api_version, - )['pagination'] - self._cache['page_config'] = page_config - except DataNotFoundError: - self._cache['page_config'] = {} - actual_operation_name = self._PY_TO_OP_NAME[operation_name] - return actual_operation_name in self._cache['page_config'] - - def _get_waiter_config(self): - if 'waiter_config' not in self._cache: - try: - waiter_config = self._loader.load_service_model( - self._service_model.service_name, - 'waiters-2', - self._service_model.api_version, - ) - self._cache['waiter_config'] = waiter_config - except DataNotFoundError: - self._cache['waiter_config'] = {} - return self._cache['waiter_config'] - - def get_waiter(self, waiter_name): - """Returns an object that can wait for some condition. - - :type waiter_name: str - :param waiter_name: The name of the waiter to get. See the waiters - section of the service docs for a list of available waiters. - - :returns: The specified waiter object. - :rtype: ``botocore.waiter.Waiter`` - """ - config = self._get_waiter_config() - if not config: - raise ValueError(f"Waiter does not exist: {waiter_name}") - model = waiter.WaiterModel(config) - mapping = {} - for name in model.waiter_names: - mapping[xform_name(name)] = name - if waiter_name not in mapping: - raise ValueError(f"Waiter does not exist: {waiter_name}") - - return waiter.create_waiter_with_client( - mapping[waiter_name], model, self - ) - - @CachedProperty - def waiter_names(self): - """Returns a list of all available waiters.""" - config = self._get_waiter_config() - if not config: - return [] - model = waiter.WaiterModel(config) - # Waiter configs is a dict, we just want the waiter names - # which are the keys in the dict. - return [xform_name(name) for name in model.waiter_names] - - @property - def exceptions(self): - if self._exceptions is None: - self._exceptions = self._load_exceptions() - return self._exceptions - - def _load_exceptions(self): - return self._exceptions_factory.create_client_exceptions( - self._service_model - ) - - def _get_credentials(self): - """ - This private interface is subject to abrupt breaking changes, including - removal, in any botocore release. - """ - return self._request_signer._credentials - - -class ClientMeta: - """Holds additional client methods. - - This class holds additional information for clients. It exists for - two reasons: - - * To give advanced functionality to clients - * To namespace additional client attributes from the operation - names which are mapped to methods at runtime. This avoids - ever running into collisions with operation names. - - """ - - def __init__( - self, - events, - client_config, - endpoint_url, - service_model, - method_to_api_mapping, - partition, - ): - self.events = events - self._client_config = client_config - self._endpoint_url = endpoint_url - self._service_model = service_model - self._method_to_api_mapping = method_to_api_mapping - self._partition = partition - - @property - def service_model(self): - return self._service_model - - @property - def region_name(self): - return self._client_config.region_name - - @property - def endpoint_url(self): - return self._endpoint_url - - @property - def config(self): - return self._client_config - - @property - def method_to_api_mapping(self): - return self._method_to_api_mapping - - @property - def partition(self): - return self._partition - - -def _get_configured_signature_version( - service_name, client_config, scoped_config -): - """ - Gets the manually configured signature version. - - :returns: the customer configured signature version, or None if no - signature version was configured. - """ - # Client config overrides everything. - if client_config and client_config.signature_version is not None: - return client_config.signature_version - - # Scoped config overrides picking from the endpoint metadata. - if scoped_config is not None: - # A given service may have service specific configuration in the - # config file, so we need to check there as well. - service_config = scoped_config.get(service_name) - if service_config is not None and isinstance(service_config, dict): - version = service_config.get('signature_version') - if version: - logger.debug( - "Switching signature version for service %s " - "to version %s based on config file override.", - service_name, - version, - ) - return version - return None diff --git a/venv/Lib/site-packages/botocore/compat.py b/venv/Lib/site-packages/botocore/compat.py deleted file mode 100644 index 62a265c..0000000 --- a/venv/Lib/site-packages/botocore/compat.py +++ /dev/null @@ -1,371 +0,0 @@ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import copy -import datetime -import sys -import inspect -import warnings -import hashlib -from http.client import HTTPMessage -import logging -import shlex -import re -import os -from collections import OrderedDict -from collections.abc import MutableMapping -from math import floor - -from botocore.vendored import six -from botocore.exceptions import MD5UnavailableError -from dateutil.tz import tzlocal -from urllib3 import exceptions - -logger = logging.getLogger(__name__) - - -class HTTPHeaders(HTTPMessage): - pass - -from urllib.parse import ( - quote, - urlencode, - unquote, - unquote_plus, - urlparse, - urlsplit, - urlunsplit, - urljoin, - parse_qsl, - parse_qs, -) -from http.client import HTTPResponse -from io import IOBase as _IOBase -from base64 import encodebytes -from email.utils import formatdate -from itertools import zip_longest -file_type = _IOBase -zip = zip - -# In python3, unquote takes a str() object, url decodes it, -# then takes the bytestring and decodes it to utf-8. -unquote_str = unquote_plus - -def set_socket_timeout(http_response, timeout): - """Set the timeout of the socket from an HTTPResponse. - - :param http_response: An instance of ``httplib.HTTPResponse`` - - """ - http_response._fp.fp.raw._sock.settimeout(timeout) - -def accepts_kwargs(func): - return inspect.getfullargspec(func)[2] - -def ensure_unicode(s, encoding=None, errors=None): - # NOOP in Python 3, because every string is already unicode - return s - -def ensure_bytes(s, encoding='utf-8', errors='strict'): - if isinstance(s, str): - return s.encode(encoding, errors) - if isinstance(s, bytes): - return s - raise ValueError(f"Expected str or bytes, received {type(s)}.") - - -import xml.etree.ElementTree as ETree -XMLParseError = ETree.ParseError - -import json - - -def filter_ssl_warnings(): - # Ignore warnings related to SNI as it is not being used in validations. - warnings.filterwarnings( - 'ignore', - message="A true SSLContext object is not available.*", - category=exceptions.InsecurePlatformWarning, - module=r".*urllib3\.util\.ssl_", - ) - - -@classmethod -def from_dict(cls, d): - new_instance = cls() - for key, value in d.items(): - new_instance[key] = value - return new_instance - - -@classmethod -def from_pairs(cls, pairs): - new_instance = cls() - for key, value in pairs: - new_instance[key] = value - return new_instance - - -HTTPHeaders.from_dict = from_dict -HTTPHeaders.from_pairs = from_pairs - - -def copy_kwargs(kwargs): - """ - This used to be a compat shim for 2.6 but is now just an alias. - """ - copy_kwargs = copy.copy(kwargs) - return copy_kwargs - - -def total_seconds(delta): - """ - Returns the total seconds in a ``datetime.timedelta``. - - This used to be a compat shim for 2.6 but is now just an alias. - - :param delta: The timedelta object - :type delta: ``datetime.timedelta`` - """ - return delta.total_seconds() - - -# Checks to see if md5 is available on this system. A given system might not -# have access to it for various reasons, such as FIPS mode being enabled. -try: - hashlib.md5(usedforsecurity=False) - MD5_AVAILABLE = True -except (AttributeError, ValueError): - MD5_AVAILABLE = False - - -def get_md5(*args, **kwargs): - """ - Attempts to get an md5 hashing object. - - :param args: Args to pass to the MD5 constructor - :param kwargs: Key word arguments to pass to the MD5 constructor - :return: An MD5 hashing object if available. If it is unavailable, None - is returned if raise_error_if_unavailable is set to False. - """ - if MD5_AVAILABLE: - return hashlib.md5(*args, **kwargs) - else: - raise MD5UnavailableError() - - -def compat_shell_split(s, platform=None): - if platform is None: - platform = sys.platform - - if platform == "win32": - return _windows_shell_split(s) - else: - return shlex.split(s) - - -def _windows_shell_split(s): - """Splits up a windows command as the built-in command parser would. - - Windows has potentially bizarre rules depending on where you look. When - spawning a process via the Windows C runtime (which is what python does - when you call popen) the rules are as follows: - - https://docs.microsoft.com/en-us/cpp/cpp/parsing-cpp-command-line-arguments - - To summarize: - - * Only space and tab are valid delimiters - * Double quotes are the only valid quotes - * Backslash is interpreted literally unless it is part of a chain that - leads up to a double quote. Then the backslashes escape the backslashes, - and if there is an odd number the final backslash escapes the quote. - - :param s: The command string to split up into parts. - :return: A list of command components. - """ - if not s: - return [] - - components = [] - buff = [] - is_quoted = False - num_backslashes = 0 - for character in s: - if character == '\\': - # We can't simply append backslashes because we don't know if - # they are being used as escape characters or not. Instead we - # keep track of how many we've encountered and handle them when - # we encounter a different character. - num_backslashes += 1 - elif character == '"': - if num_backslashes > 0: - # The backslashes are in a chain leading up to a double - # quote, so they are escaping each other. - buff.append('\\' * int(floor(num_backslashes / 2))) - remainder = num_backslashes % 2 - num_backslashes = 0 - if remainder == 1: - # The number of backslashes is uneven, so they are also - # escaping the double quote, so it needs to be added to - # the current component buffer. - buff.append('"') - continue - - # We've encountered a double quote that is not escaped, - # so we toggle is_quoted. - is_quoted = not is_quoted - - # If there are quotes, then we may want an empty string. To be - # safe, we add an empty string to the buffer so that we make - # sure it sticks around if there's nothing else between quotes. - # If there is other stuff between quotes, the empty string will - # disappear during the joining process. - buff.append('') - elif character in [' ', '\t'] and not is_quoted: - # Since the backslashes aren't leading up to a quote, we put in - # the exact number of backslashes. - if num_backslashes > 0: - buff.append('\\' * num_backslashes) - num_backslashes = 0 - - # Excess whitespace is ignored, so only add the components list - # if there is anything in the buffer. - if buff: - components.append(''.join(buff)) - buff = [] - else: - # Since the backslashes aren't leading up to a quote, we put in - # the exact number of backslashes. - if num_backslashes > 0: - buff.append('\\' * num_backslashes) - num_backslashes = 0 - buff.append(character) - - # Quotes must be terminated. - if is_quoted: - raise ValueError(f"No closing quotation in string: {s}") - - # There may be some leftover backslashes, so we need to add them in. - # There's no quote so we add the exact number. - if num_backslashes > 0: - buff.append('\\' * num_backslashes) - - # Add the final component in if there is anything in the buffer. - if buff: - components.append(''.join(buff)) - - return components - - -def get_tzinfo_options(): - # Due to dateutil/dateutil#197, Windows may fail to parse times in the past - # with the system clock. We can alternatively fallback to tzwininfo when - # this happens, which will get time info from the Windows registry. - if sys.platform == 'win32': - from dateutil.tz import tzwinlocal - - return (tzlocal, tzwinlocal) - else: - return (tzlocal,) - - -# Detect if CRT is available for use -try: - import awscrt.auth - - # Allow user opt-out if needed - disabled = os.environ.get('BOTO_DISABLE_CRT', "false") - HAS_CRT = not disabled.lower() == 'true' -except ImportError: - HAS_CRT = False - - -def has_minimum_crt_version(minimum_version): - """Not intended for use outside botocore.""" - if not HAS_CRT: - return False - - crt_version_str = awscrt.__version__ - try: - crt_version_ints = map(int, crt_version_str.split(".")) - crt_version_tuple = tuple(crt_version_ints) - except (TypeError, ValueError): - return False - - return crt_version_tuple >= minimum_version - - -def get_current_datetime(remove_tzinfo=True): - """Retrieve the current timezone in UTC, with or without an explicit timezone.""" - datetime_now = datetime.datetime.now(datetime.timezone.utc) - if remove_tzinfo: - datetime_now = datetime_now.replace(tzinfo=None) - return datetime_now - - -######################################################## -# urllib3 compat backports # -######################################################## - -# Vendoring IPv6 validation regex patterns from urllib3 -# https://github.com/urllib3/urllib3/blob/7e856c0/src/urllib3/util/url.py -IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" -IPV4_RE = re.compile("^" + IPV4_PAT + "$") -HEX_PAT = "[0-9A-Fa-f]{1,4}" -LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) -_subs = {"hex": HEX_PAT, "ls32": LS32_PAT} -_variations = [ - # 6( h16 ":" ) ls32 - "(?:%(hex)s:){6}%(ls32)s", - # "::" 5( h16 ":" ) ls32 - "::(?:%(hex)s:){5}%(ls32)s", - # [ h16 ] "::" 4( h16 ":" ) ls32 - "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", - # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 - "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", - # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 - "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", - # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 - "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", - # [ *4( h16 ":" ) h16 ] "::" ls32 - "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", - # [ *5( h16 ":" ) h16 ] "::" h16 - "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", - # [ *6( h16 ":" ) h16 ] "::" - "(?:(?:%(hex)s:){0,6}%(hex)s)?::", -] - -UNRESERVED_PAT = ( - r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" -) -IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" -ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" -IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" -IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$") - -# These are the characters that are stripped by post-bpo-43882 urlparse(). -UNSAFE_URL_CHARS = frozenset('\t\r\n') - -# Detect if gzip is available for use -try: - import gzip - HAS_GZIP = True -except ImportError: - HAS_GZIP = False - -# Conditional import for awscrt EC crypto functionality -if HAS_CRT and has_minimum_crt_version((0, 28, 4)): - from awscrt.crypto import EC -else: - EC = None diff --git a/venv/Lib/site-packages/botocore/compress.py b/venv/Lib/site-packages/botocore/compress.py deleted file mode 100644 index d3dac6f..0000000 --- a/venv/Lib/site-packages/botocore/compress.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -""" -NOTE: All functions in this module are considered private and are -subject to abrupt breaking changes. Please do not use them directly. - -""" - -import io -import logging -from gzip import GzipFile -from gzip import compress as gzip_compress - -from botocore.compat import urlencode -from botocore.useragent import register_feature_id -from botocore.utils import determine_content_length - -logger = logging.getLogger(__name__) - - -def maybe_compress_request(config, request_dict, operation_model): - """Attempt to compress the request body using the modeled encodings.""" - if _should_compress_request(config, request_dict, operation_model): - for encoding in operation_model.request_compression['encodings']: - encoder = COMPRESSION_MAPPING.get(encoding) - if encoder is not None: - logger.debug('Compressing request with %s encoding.', encoding) - request_dict['body'] = encoder(request_dict['body']) - _set_compression_header(request_dict['headers'], encoding) - return - else: - logger.debug('Unsupported compression encoding: %s', encoding) - - -def _should_compress_request(config, request_dict, operation_model): - if ( - config.disable_request_compression is not True - and config.signature_version != 'v2' - and operation_model.request_compression is not None - ): - if not _is_compressible_type(request_dict): - body_type = type(request_dict['body']) - log_msg = 'Body type %s does not support compression.' - logger.debug(log_msg, body_type) - return False - - if operation_model.has_streaming_input: - streaming_input = operation_model.get_streaming_input() - streaming_metadata = streaming_input.metadata - return 'requiresLength' not in streaming_metadata - - body_size = _get_body_size(request_dict['body']) - min_size = config.request_min_compression_size_bytes - return min_size <= body_size - - return False - - -def _is_compressible_type(request_dict): - body = request_dict['body'] - # Coerce dict to a format compatible with compression. - if isinstance(body, dict): - body = urlencode(body, doseq=True, encoding='utf-8').encode('utf-8') - request_dict['body'] = body - is_supported_type = isinstance(body, (str, bytes, bytearray)) - return is_supported_type or hasattr(body, 'read') - - -def _get_body_size(body): - size = determine_content_length(body) - if size is None: - logger.debug( - 'Unable to get length of the request body: %s. ' - 'Skipping compression.', - body, - ) - size = 0 - return size - - -def _gzip_compress_body(body): - register_feature_id('GZIP_REQUEST_COMPRESSION') - if isinstance(body, str): - return gzip_compress(body.encode('utf-8')) - elif isinstance(body, (bytes, bytearray)): - return gzip_compress(body) - elif hasattr(body, 'read'): - if hasattr(body, 'seek') and hasattr(body, 'tell'): - current_position = body.tell() - compressed_obj = _gzip_compress_fileobj(body) - body.seek(current_position) - return compressed_obj - return _gzip_compress_fileobj(body) - - -def _gzip_compress_fileobj(body): - compressed_obj = io.BytesIO() - with GzipFile(fileobj=compressed_obj, mode='wb') as gz: - while True: - chunk = body.read(8192) - if not chunk: - break - if isinstance(chunk, str): - chunk = chunk.encode('utf-8') - gz.write(chunk) - compressed_obj.seek(0) - return compressed_obj - - -def _set_compression_header(headers, encoding): - ce_header = headers.get('Content-Encoding') - if ce_header is None: - headers['Content-Encoding'] = encoding - else: - headers['Content-Encoding'] = f'{ce_header},{encoding}' - - -COMPRESSION_MAPPING = {'gzip': _gzip_compress_body} diff --git a/venv/Lib/site-packages/botocore/config.py b/venv/Lib/site-packages/botocore/config.py deleted file mode 100644 index e0ca5a7..0000000 --- a/venv/Lib/site-packages/botocore/config.py +++ /dev/null @@ -1,477 +0,0 @@ -# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import copy - -from botocore.compat import OrderedDict -from botocore.endpoint import DEFAULT_TIMEOUT, MAX_POOL_CONNECTIONS -from botocore.exceptions import ( - InvalidMaxRetryAttemptsError, - InvalidRetryConfigurationError, - InvalidRetryModeError, - InvalidS3AddressingStyleError, -) - - -class Config: - """Advanced configuration for Botocore clients. - - :type region_name: str - :param region_name: The region to use in instantiating the client - - :type signature_version: str - :param signature_version: The signature version when signing requests. - - :type user_agent: str - :param user_agent: The value to use in the User-Agent header. - - :type user_agent_extra: str - :param user_agent_extra: The value to append to the current User-Agent - header value. - - :type user_agent_appid: str - :param user_agent_appid: A value that gets included in the User-Agent - string in the format "app/". Allowed characters are - ASCII alphanumerics and ``!#$%&'*+-.^_`|~``. All other characters will - be replaced by a ``-``. - - :type connect_timeout: float or int - :param connect_timeout: The time in seconds till a timeout exception is - thrown when attempting to make a connection. The default is 60 - seconds. - - :type read_timeout: float or int - :param read_timeout: The time in seconds till a timeout exception is - thrown when attempting to read from a connection. The default is - 60 seconds. - - :type parameter_validation: bool - :param parameter_validation: Whether parameter validation should occur - when serializing requests. The default is True. You can disable - parameter validation for performance reasons. Otherwise, it's - recommended to leave parameter validation enabled. - - :type max_pool_connections: int - :param max_pool_connections: The maximum number of connections to - keep in a connection pool. If this value is not set, the default - value of 10 is used. - - :type proxies: dict - :param proxies: A dictionary of proxy servers to use by protocol or - endpoint, e.g.: - ``{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}``. - The proxies are used on each request. - - :type proxies_config: dict - :param proxies_config: A dictionary of additional proxy configurations. - Valid keys are: - - * ``proxy_ca_bundle`` -- The path to a custom certificate bundle to use - when establishing SSL/TLS connections with proxy. - - * ``proxy_client_cert`` -- The path to a certificate for proxy - TLS client authentication. - - When a string is provided it is treated as a path to a proxy client - certificate. When a two element tuple is provided, it will be - interpreted as the path to the client certificate, and the path - to the certificate key. - - * ``proxy_use_forwarding_for_https`` -- For HTTPS proxies, - forward your requests to HTTPS destinations with an absolute - URI. We strongly recommend you only use this option with - trusted or corporate proxies. Value must be boolean. - - :type s3: dict - :param s3: A dictionary of S3 specific configurations. - Valid keys are: - - * ``use_accelerate_endpoint`` -- Refers to whether to use the S3 - Accelerate endpoint. The value must be a boolean. If True, the - client will use the S3 Accelerate endpoint. If the S3 Accelerate - endpoint is being used then the addressing style will always - be virtual. - - * ``payload_signing_enabled`` -- Refers to whether or not to SHA256 - sign SigV4 payloads. For operations that support request checksums, - this only applies when ``request_checksum_calculation`` is set to - ``when_required``. Otherwise, this is disabled for - streaming uploads (UploadPart and PutObject) by default. - - * ``addressing_style`` -- Refers to the style in which to address - s3 endpoints. Values must be a string that equals one of: - - * ``auto`` -- Addressing style is chosen for user. Depending - on the configuration of client, the endpoint may be addressed in - the virtual or the path style. Note that this is the default - behavior if no style is specified. - - * ``virtual`` -- Addressing style is always virtual. The name of the - bucket must be DNS compatible or an exception will be thrown. - Endpoints will be addressed as such: ``amzn-s3-demo-bucket.s3.amazonaws.com`` - - * ``path`` -- Addressing style is always by path. Endpoints will be - addressed as such: ``s3.amazonaws.com/amzn-s3-demo-bucket`` - - * ``us_east_1_regional_endpoint`` -- Refers to what S3 endpoint to use - when the region is configured to be us-east-1. Values must be a - string that equals: - - * ``regional`` -- Use the us-east-1.amazonaws.com endpoint if the - client is configured to use the us-east-1 region. - - * ``legacy`` -- Use the s3.amazonaws.com endpoint if the client is - configured to use the us-east-1 region. This is the default if - the configuration option is not specified. - - - :type retries: dict - :param retries: A dictionary for configuration related to retry behavior. - Valid keys are: - - * ``total_max_attempts`` -- An integer representing the maximum number of - total attempts that will be made on a single request. This includes - the initial request, so a value of 1 indicates that no requests - will be retried. If ``total_max_attempts`` and ``max_attempts`` - are both provided, ``total_max_attempts`` takes precedence. - ``total_max_attempts`` is preferred over ``max_attempts`` because - it maps to the ``AWS_MAX_ATTEMPTS`` environment variable and - the ``max_attempts`` config file value. - * ``max_attempts`` -- An integer representing the maximum number of - retry attempts that will be made on a single request. For - example, setting this value to 2 will result in the request - being retried at most two times after the initial request. Setting - this value to 0 will result in no retries ever being attempted after - the initial request. If not provided, the number of retries will - default to the value specified in the service model, which is - typically four retries. - * ``mode`` -- A string representing the type of retry mode botocore - should use. Valid values are: - - * ``legacy`` - The pre-existing retry behavior. - - * ``standard`` - The standardized set of retry rules. This will also - default to 3 max attempts unless overridden. - - * ``adaptive`` - Retries with additional client side throttling. - - :type client_cert: str, (str, str) - :param client_cert: The path to a certificate for TLS client authentication. - - When a string is provided it is treated as a path to a client - certificate to be used when creating a TLS connection. - - If a client key is to be provided alongside the client certificate the - client_cert should be set to a tuple of length two where the first - element is the path to the client certificate and the second element is - the path to the certificate key. - - :type inject_host_prefix: bool - :param inject_host_prefix: Whether host prefix injection should occur. - - Defaults to None. - - The default of None is equivalent to setting to True, which enables - the injection of operation parameters into the prefix of the hostname. - Setting this to False disables the injection of operation parameters - into the prefix of the hostname. Setting this to False is useful for - clients providing custom endpoints that should not have their host - prefix modified. - - :type use_dualstack_endpoint: bool - :param use_dualstack_endpoint: Setting to True enables dualstack - endpoint resolution. - - Defaults to None. - - :type use_fips_endpoint: bool - :param use_fips_endpoint: Setting to True enables fips - endpoint resolution. - - Defaults to None. - - :type ignore_configured_endpoint_urls: bool - :param ignore_configured_endpoint_urls: Setting to True disables use - of endpoint URLs provided via environment variables and - the shared configuration file. - - Defaults to None. - - :type tcp_keepalive: bool - :param tcp_keepalive: Enables the TCP Keep-Alive socket option used when - creating new connections if set to True. - - Defaults to False. - - :type request_min_compression_size_bytes: int - :param request_min_compression_size_bytes: The minimum size in bytes that a - request body should be to trigger compression. All requests with - streaming input that don't contain the ``requiresLength`` trait will be - compressed regardless of this setting. - - Defaults to None. - - :type disable_request_compression: bool - :param disable_request_compression: Disables request body compression if - set to True. - - Defaults to None. - - :type sigv4a_signing_region_set: string - :param sigv4a_signing_region_set: A set of AWS regions to apply the signature for - when using SigV4a for signing. Set to ``*`` to represent all regions. - - Defaults to None. - - :type client_context_params: dict - :param client_context_params: A dictionary of parameters specific to - individual services. If available, valid parameters can be found in - the ``Client Context Parameters`` section of the service client's - documentation. Invalid parameters or ones that are not used by the - specified service will be ignored. - - Defaults to None. - - :type request_checksum_calculation: str - :param request_checksum_calculation: Determines when a checksum will be - calculated for request payloads. Valid values are: - - * ``when_supported`` -- When set, a checksum will be calculated for - all request payloads of operations modeled with the ``httpChecksum`` - trait where ``requestChecksumRequired`` is ``true`` or a - ``requestAlgorithmMember`` is modeled. - - * ``when_required`` -- When set, a checksum will only be calculated - for request payloads of operations modeled with the ``httpChecksum`` - trait where ``requestChecksumRequired`` is ``true`` or where a - ``requestAlgorithmMember`` is modeled and supplied. - - Defaults to None. - - :type response_checksum_validation: str - :param response_checksum_validation: Determines when checksum validation - will be performed on response payloads. Valid values are: - - * ``when_supported`` -- When set, checksum validation is performed on - all response payloads of operations modeled with the ``httpChecksum`` - trait where ``responseAlgorithms`` is modeled, except when no modeled - checksum algorithms are supported. - - * ``when_required`` -- When set, checksum validation is not performed - on response payloads of operations unless the checksum algorithm is - supported and the ``requestValidationModeMember`` member is set to ``ENABLED``. - - Defaults to None. - - :type account_id_endpoint_mode: str - :param account_id_endpoint_mode: The value used to determine the client's - behavior for account ID based endpoint routing. Valid values are: - - * ``preferred`` - The endpoint should include account ID if available. - * ``disabled`` - A resolved endpoint does not include account ID. - * ``required`` - The endpoint must include account ID. If the account ID - isn't available, an exception will be raised. - - If a value is not provided, the client will default to ``preferred``. - - Defaults to None. - - :type auth_scheme_preference: str - :param auth_scheme_preference: A comma-delimited string of case-sensitive - auth scheme names used to determine the client's auth scheme preference. - - Defaults to None. - """ - - OPTION_DEFAULTS = OrderedDict( - [ - ('region_name', None), - ('signature_version', None), - ('user_agent', None), - ('user_agent_extra', None), - ('user_agent_appid', None), - ('connect_timeout', DEFAULT_TIMEOUT), - ('read_timeout', DEFAULT_TIMEOUT), - ('parameter_validation', True), - ('max_pool_connections', MAX_POOL_CONNECTIONS), - ('proxies', None), - ('proxies_config', None), - ('s3', None), - ('retries', None), - ('client_cert', None), - ('inject_host_prefix', None), - ('endpoint_discovery_enabled', None), - ('use_dualstack_endpoint', None), - ('use_fips_endpoint', None), - ('ignore_configured_endpoint_urls', None), - ('defaults_mode', None), - ('tcp_keepalive', None), - ('request_min_compression_size_bytes', None), - ('disable_request_compression', None), - ('client_context_params', None), - ('sigv4a_signing_region_set', None), - ('request_checksum_calculation', None), - ('response_checksum_validation', None), - ('account_id_endpoint_mode', None), - ('auth_scheme_preference', None), - ] - ) - - NON_LEGACY_OPTION_DEFAULTS = { - 'connect_timeout': None, - } - - # The original default value of the inject_host_prefix parameter was True. - # This prevented the ability to override the value from other locations in - # the parameter provider chain, like env vars or the shared configuration - # file. TO accomplish this, we need to disambiguate when the value was set - # by the user or not. This overrides the parameter with a property so the - # default value of inject_host_prefix is still True if it is not set by the - # user. - @property - def inject_host_prefix(self): - if self._inject_host_prefix == "UNSET": - return True - - return self._inject_host_prefix - - # Override the setter for the case where the user does supply a value; - # _inject_host_prefix will no longer be "UNSET". - @inject_host_prefix.setter - def inject_host_prefix(self, value): - self._inject_host_prefix = value - - def __init__(self, *args, **kwargs): - self._user_provided_options = self._record_user_provided_options( - args, kwargs - ) - - # By default, we use a value that indicates the user did not - # set it. This value MUST persist on the Config object to be used - # elsewhere. - self._inject_host_prefix = 'UNSET' - - # Merge the user_provided options onto the default options - config_vars = copy.copy(self.OPTION_DEFAULTS) - defaults_mode = self._user_provided_options.get( - 'defaults_mode', 'legacy' - ) - if defaults_mode != 'legacy': - config_vars.update(self.NON_LEGACY_OPTION_DEFAULTS) - - config_vars.update(self._user_provided_options) - - # Set the attributes based on the config_vars - for key, value in config_vars.items(): - # Default values for the Config object are set here. We don't want - # to use `setattr` in the case where the user already supplied a - # value. - if ( - key == 'inject_host_prefix' - and 'inject_host_prefix' - not in self._user_provided_options.keys() - ): - continue - setattr(self, key, value) - - # Validate the s3 options - self._validate_s3_configuration(self.s3) - - self._validate_retry_configuration(self.retries) - - def _record_user_provided_options(self, args, kwargs): - option_order = list(self.OPTION_DEFAULTS) - user_provided_options = {} - - # Iterate through the kwargs passed through to the constructor and - # map valid keys to the dictionary - for key, value in kwargs.items(): - if key in self.OPTION_DEFAULTS: - user_provided_options[key] = value - # The key must exist in the available options - else: - raise TypeError(f"Got unexpected keyword argument '{key}'") - - # The number of args should not be longer than the allowed - # options - if len(args) > len(option_order): - raise TypeError( - f"Takes at most {len(option_order)} arguments ({len(args)} given)" - ) - - # Iterate through the args passed through to the constructor and map - # them to appropriate keys. - for i, arg in enumerate(args): - # If a kwarg was specified for the arg, then error out - if option_order[i] in user_provided_options: - raise TypeError( - f"Got multiple values for keyword argument '{option_order[i]}'" - ) - user_provided_options[option_order[i]] = arg - - return user_provided_options - - def _validate_s3_configuration(self, s3): - if s3 is not None: - addressing_style = s3.get('addressing_style') - if addressing_style not in ['virtual', 'auto', 'path', None]: - raise InvalidS3AddressingStyleError( - s3_addressing_style=addressing_style - ) - - def _validate_retry_configuration(self, retries): - valid_options = ('max_attempts', 'mode', 'total_max_attempts') - valid_modes = ('legacy', 'standard', 'adaptive') - if retries is not None: - for key, value in retries.items(): - if key not in valid_options: - raise InvalidRetryConfigurationError( - retry_config_option=key, - valid_options=valid_options, - ) - if key == 'max_attempts' and value < 0: - raise InvalidMaxRetryAttemptsError( - provided_max_attempts=value, - min_value=0, - ) - if key == 'total_max_attempts' and value < 1: - raise InvalidMaxRetryAttemptsError( - provided_max_attempts=value, - min_value=1, - ) - if key == 'mode' and value not in valid_modes: - raise InvalidRetryModeError( - provided_retry_mode=value, - valid_modes=valid_modes, - ) - - def merge(self, other_config): - """Merges the config object with another config object - - This will merge in all non-default values from the provided config - and return a new config object - - :type other_config: botocore.config.Config - :param other config: Another config object to merge with. The values - in the provided config object will take precedence in the merging - - :returns: A config object built from the merged values of both - config objects. - """ - # Make a copy of the current attributes in the config object. - config_options = copy.copy(self._user_provided_options) - - # Merge in the user provided options from the other config - config_options.update(other_config._user_provided_options) - - # Return a new config object with the merged properties. - return Config(**config_options) diff --git a/venv/Lib/site-packages/botocore/configloader.py b/venv/Lib/site-packages/botocore/configloader.py deleted file mode 100644 index 0b6c82b..0000000 --- a/venv/Lib/site-packages/botocore/configloader.py +++ /dev/null @@ -1,287 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import configparser -import copy -import os -import shlex -import sys - -import botocore.exceptions - - -def multi_file_load_config(*filenames): - """Load and combine multiple INI configs with profiles. - - This function will take a list of filesnames and return - a single dictionary that represents the merging of the loaded - config files. - - If any of the provided filenames does not exist, then that file - is ignored. It is therefore ok to provide a list of filenames, - some of which may not exist. - - Configuration files are **not** deep merged, only the top level - keys are merged. The filenames should be passed in order of - precedence. The first config file has precedence over the - second config file, which has precedence over the third config file, - etc. The only exception to this is that the "profiles" key is - merged to combine profiles from multiple config files into a - single profiles mapping. However, if a profile is defined in - multiple config files, then the config file with the highest - precedence is used. Profile values themselves are not merged. - For example:: - - FileA FileB FileC - [foo] [foo] [bar] - a=1 a=2 a=3 - b=2 - - [bar] [baz] [profile a] - a=2 a=3 region=e - - [profile a] [profile b] [profile c] - region=c region=d region=f - - The final result of ``multi_file_load_config(FileA, FileB, FileC)`` - would be:: - - {"foo": {"a": 1}, "bar": {"a": 2}, "baz": {"a": 3}, - "profiles": {"a": {"region": "c"}}, {"b": {"region": d"}}, - {"c": {"region": "f"}}} - - Note that the "foo" key comes from A, even though it's defined in both - FileA and FileB. Because "foo" was defined in FileA first, then the values - for "foo" from FileA are used and the values for "foo" from FileB are - ignored. Also note where the profiles originate from. Profile "a" - comes FileA, profile "b" comes from FileB, and profile "c" comes - from FileC. - - """ - configs = [] - profiles = [] - for filename in filenames: - try: - loaded = load_config(filename) - except botocore.exceptions.ConfigNotFound: - continue - profiles.append(loaded.pop('profiles')) - configs.append(loaded) - merged_config = _merge_list_of_dicts(configs) - merged_profiles = _merge_list_of_dicts(profiles) - merged_config['profiles'] = merged_profiles - return merged_config - - -def _merge_list_of_dicts(list_of_dicts): - merged_dicts = {} - for single_dict in list_of_dicts: - for key, value in single_dict.items(): - if key not in merged_dicts: - merged_dicts[key] = value - return merged_dicts - - -def load_config(config_filename): - """Parse a INI config with profiles. - - This will parse an INI config file and map top level profiles - into a top level "profile" key. - - If you want to parse an INI file and map all section names to - top level keys, use ``raw_config_parse`` instead. - - """ - parsed = raw_config_parse(config_filename) - return build_profile_map(parsed) - - -def raw_config_parse(config_filename, parse_subsections=True): - """Returns the parsed INI config contents. - - Each section name is a top level key. - - :param config_filename: The name of the INI file to parse - - :param parse_subsections: If True, parse indented blocks as - subsections that represent their own configuration dictionary. - For example, if the config file had the contents:: - - s3 = - signature_version = s3v4 - addressing_style = path - - The resulting ``raw_config_parse`` would be:: - - {'s3': {'signature_version': 's3v4', 'addressing_style': 'path'}} - - If False, do not try to parse subsections and return the indented - block as its literal value:: - - {'s3': '\nsignature_version = s3v4\naddressing_style = path'} - - :returns: A dict with keys for each profile found in the config - file and the value of each key being a dict containing name - value pairs found in that profile. - - :raises: ConfigNotFound, ConfigParseError - """ - config = {} - path = config_filename - if path is not None: - path = os.path.expandvars(path) - path = os.path.expanduser(path) - if not os.path.isfile(path): - raise botocore.exceptions.ConfigNotFound(path=_unicode_path(path)) - cp = configparser.RawConfigParser() - try: - cp.read([path]) - except (configparser.Error, UnicodeDecodeError) as e: - raise botocore.exceptions.ConfigParseError( - path=_unicode_path(path), error=e - ) from None - else: - for section in cp.sections(): - config[section] = {} - for option in cp.options(section): - config_value = cp.get(section, option) - if parse_subsections and config_value.startswith('\n'): - # Then we need to parse the inner contents as - # hierarchical. We support a single level - # of nesting for now. - try: - config_value = _parse_nested(config_value) - except ValueError as e: - raise botocore.exceptions.ConfigParseError( - path=_unicode_path(path), error=e - ) from None - config[section][option] = config_value - return config - - -def _unicode_path(path): - if isinstance(path, str): - return path - # According to the documentation getfilesystemencoding can return None - # on unix in which case the default encoding is used instead. - filesystem_encoding = sys.getfilesystemencoding() - if filesystem_encoding is None: - filesystem_encoding = sys.getdefaultencoding() - return path.decode(filesystem_encoding, 'replace') - - -def _parse_nested(config_value): - # Given a value like this: - # \n - # foo = bar - # bar = baz - # We need to parse this into - # {'foo': 'bar', 'bar': 'baz} - parsed = {} - for line in config_value.splitlines(): - line = line.strip() - if not line: - continue - # The caller will catch ValueError - # and raise an appropriate error - # if this fails. - key, value = line.split('=', 1) - parsed[key.strip()] = value.strip() - return parsed - - -def _parse_section(key, values): - result = {} - try: - parts = shlex.split(key) - except ValueError: - return result - if len(parts) == 2: - result[parts[1]] = values - return result - - -def build_profile_map(parsed_ini_config): - """Convert the parsed INI config into a profile map. - - The config file format requires that every profile except the - default to be prepended with "profile", e.g.:: - - [profile test] - aws_... = foo - aws_... = bar - - [profile bar] - aws_... = foo - aws_... = bar - - # This is *not* a profile - [preview] - otherstuff = 1 - - # Neither is this - [foobar] - morestuff = 2 - - The build_profile_map will take a parsed INI config file where each top - level key represents a section name, and convert into a format where all - the profiles are under a single top level "profiles" key, and each key in - the sub dictionary is a profile name. For example, the above config file - would be converted from:: - - {"profile test": {"aws_...": "foo", "aws...": "bar"}, - "profile bar": {"aws...": "foo", "aws...": "bar"}, - "preview": {"otherstuff": ...}, - "foobar": {"morestuff": ...}, - } - - into:: - - {"profiles": {"test": {"aws_...": "foo", "aws...": "bar"}, - "bar": {"aws...": "foo", "aws...": "bar"}, - "preview": {"otherstuff": ...}, - "foobar": {"morestuff": ...}, - } - - If there are no profiles in the provided parsed INI contents, then - an empty dict will be the value associated with the ``profiles`` key. - - .. note:: - - This will not mutate the passed in parsed_ini_config. Instead it will - make a deepcopy and return that value. - - """ - parsed_config = copy.deepcopy(parsed_ini_config) - profiles = {} - sso_sessions = {} - services = {} - final_config = {} - for key, values in parsed_config.items(): - if key.startswith("profile"): - profiles.update(_parse_section(key, values)) - elif key.startswith("sso-session"): - sso_sessions.update(_parse_section(key, values)) - elif key.startswith("services"): - services.update(_parse_section(key, values)) - elif key == 'default': - # default section is special and is considered a profile - # name but we don't require you use 'profile "default"' - # as a section. - profiles[key] = values - else: - final_config[key] = values - final_config['profiles'] = profiles - final_config['sso_sessions'] = sso_sessions - final_config['services'] = services - return final_config diff --git a/venv/Lib/site-packages/botocore/configprovider.py b/venv/Lib/site-packages/botocore/configprovider.py deleted file mode 100644 index b4a9c5b..0000000 --- a/venv/Lib/site-packages/botocore/configprovider.py +++ /dev/null @@ -1,1051 +0,0 @@ -# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""This module contains the interface for controlling how configuration -is loaded. -""" - -import copy -import logging -import os - -from botocore import utils -from botocore.exceptions import InvalidConfigError - -logger = logging.getLogger(__name__) - - -#: A default dictionary that maps the logical names for session variables -#: to the specific environment variables and configuration file names -#: that contain the values for these variables. -#: When creating a new Session object, you can pass in your own dictionary -#: to remap the logical names or to add new logical names. You can then -#: get the current value for these variables by using the -#: ``get_config_variable`` method of the :class:`botocore.session.Session` -#: class. -#: These form the keys of the dictionary. The values in the dictionary -#: are tuples of (, , , -#: ). -#: The conversion func is a function that takes the configuration value -#: as an argument and returns the converted value. If this value is -#: None, then the configuration value is returned unmodified. This -#: conversion function can be used to type convert config values to -#: values other than the default values of strings. -#: The ``profile`` and ``config_file`` variables should always have a -#: None value for the first entry in the tuple because it doesn't make -#: sense to look inside the config file for the location of the config -#: file or for the default profile to use. -#: The ``config_name`` is the name to look for in the configuration file, -#: the ``env var`` is the OS environment variable (``os.environ``) to -#: use, and ``default_value`` is the value to use if no value is otherwise -#: found. -#: NOTE: Fixing the spelling of this variable would be a breaking change. -#: Please leave as is. -BOTOCORE_DEFAUT_SESSION_VARIABLES = { - # logical: config_file, env_var, default_value, conversion_func - 'profile': (None, ['AWS_DEFAULT_PROFILE', 'AWS_PROFILE'], None, None), - 'region': ('region', 'AWS_DEFAULT_REGION', None, None), - 'data_path': ('data_path', 'AWS_DATA_PATH', None, None), - 'config_file': (None, 'AWS_CONFIG_FILE', '~/.aws/config', None), - 'ca_bundle': ('ca_bundle', 'AWS_CA_BUNDLE', None, None), - 'api_versions': ('api_versions', None, {}, None), - # This is the shared credentials file amongst sdks. - 'credentials_file': ( - None, - 'AWS_SHARED_CREDENTIALS_FILE', - '~/.aws/credentials', - None, - ), - # These variables only exist in the config file. - # This is the number of seconds until we time out a request to - # the instance metadata service. - 'metadata_service_timeout': ( - 'metadata_service_timeout', - 'AWS_METADATA_SERVICE_TIMEOUT', - 1, - int, - ), - # This is the number of request attempts we make until we give - # up trying to retrieve data from the instance metadata service. - 'metadata_service_num_attempts': ( - 'metadata_service_num_attempts', - 'AWS_METADATA_SERVICE_NUM_ATTEMPTS', - 1, - int, - ), - 'ec2_metadata_service_endpoint': ( - 'ec2_metadata_service_endpoint', - 'AWS_EC2_METADATA_SERVICE_ENDPOINT', - None, - None, - ), - 'ec2_metadata_service_endpoint_mode': ( - 'ec2_metadata_service_endpoint_mode', - 'AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE', - None, - None, - ), - 'ec2_metadata_v1_disabled': ( - 'ec2_metadata_v1_disabled', - 'AWS_EC2_METADATA_V1_DISABLED', - False, - utils.ensure_boolean, - ), - 'imds_use_ipv6': ( - 'imds_use_ipv6', - 'AWS_IMDS_USE_IPV6', - False, - utils.ensure_boolean, - ), - 'use_dualstack_endpoint': ( - 'use_dualstack_endpoint', - 'AWS_USE_DUALSTACK_ENDPOINT', - None, - utils.ensure_boolean, - ), - 'use_fips_endpoint': ( - 'use_fips_endpoint', - 'AWS_USE_FIPS_ENDPOINT', - None, - utils.ensure_boolean, - ), - 'ignore_configured_endpoint_urls': ( - 'ignore_configured_endpoint_urls', - 'AWS_IGNORE_CONFIGURED_ENDPOINT_URLS', - None, - utils.ensure_boolean, - ), - 'parameter_validation': ('parameter_validation', None, True, None), - # Client side monitoring configurations. - # Note: These configurations are considered internal to botocore. - # Do not use them until publicly documented. - 'csm_enabled': ( - 'csm_enabled', - 'AWS_CSM_ENABLED', - False, - utils.ensure_boolean, - ), - 'csm_host': ('csm_host', 'AWS_CSM_HOST', '127.0.0.1', None), - 'csm_port': ('csm_port', 'AWS_CSM_PORT', 31000, int), - 'csm_client_id': ('csm_client_id', 'AWS_CSM_CLIENT_ID', '', None), - # Endpoint discovery configuration - 'endpoint_discovery_enabled': ( - 'endpoint_discovery_enabled', - 'AWS_ENDPOINT_DISCOVERY_ENABLED', - 'auto', - None, - ), - 'sts_regional_endpoints': ( - 'sts_regional_endpoints', - 'AWS_STS_REGIONAL_ENDPOINTS', - 'regional', - None, - ), - 'retry_mode': ('retry_mode', 'AWS_RETRY_MODE', 'legacy', None), - 'defaults_mode': ('defaults_mode', 'AWS_DEFAULTS_MODE', 'legacy', None), - # We can't have a default here for v1 because we need to defer to - # whatever the defaults are in _retry.json. - 'max_attempts': ('max_attempts', 'AWS_MAX_ATTEMPTS', None, int), - 'user_agent_appid': ('sdk_ua_app_id', 'AWS_SDK_UA_APP_ID', None, None), - 'request_min_compression_size_bytes': ( - 'request_min_compression_size_bytes', - 'AWS_REQUEST_MIN_COMPRESSION_SIZE_BYTES', - 10240, - None, - ), - 'disable_request_compression': ( - 'disable_request_compression', - 'AWS_DISABLE_REQUEST_COMPRESSION', - False, - utils.ensure_boolean, - ), - 'sigv4a_signing_region_set': ( - 'sigv4a_signing_region_set', - 'AWS_SIGV4A_SIGNING_REGION_SET', - None, - None, - ), - 'request_checksum_calculation': ( - 'request_checksum_calculation', - 'AWS_REQUEST_CHECKSUM_CALCULATION', - "when_supported", - None, - ), - 'response_checksum_validation': ( - 'response_checksum_validation', - 'AWS_RESPONSE_CHECKSUM_VALIDATION', - "when_supported", - None, - ), - 'account_id_endpoint_mode': ( - 'account_id_endpoint_mode', - 'AWS_ACCOUNT_ID_ENDPOINT_MODE', - 'preferred', - None, - ), - 'disable_host_prefix_injection': ( - 'disable_host_prefix_injection', - 'AWS_DISABLE_HOST_PREFIX_INJECTION', - None, - utils.ensure_boolean, - ), - 'auth_scheme_preference': ( - 'auth_scheme_preference', - 'AWS_AUTH_SCHEME_PREFERENCE', - None, - None, - ), -} -# A mapping for the s3 specific configuration vars. These are the configuration -# vars that typically go in the s3 section of the config file. This mapping -# follows the same schema as the previous session variable mapping. -DEFAULT_S3_CONFIG_VARS = { - 'addressing_style': (('s3', 'addressing_style'), None, None, None), - 'use_accelerate_endpoint': ( - ('s3', 'use_accelerate_endpoint'), - None, - None, - utils.ensure_boolean, - ), - 'use_dualstack_endpoint': ( - ('s3', 'use_dualstack_endpoint'), - None, - None, - utils.ensure_boolean, - ), - 'payload_signing_enabled': ( - ('s3', 'payload_signing_enabled'), - None, - None, - utils.ensure_boolean, - ), - 'use_arn_region': ( - ['s3_use_arn_region', ('s3', 'use_arn_region')], - 'AWS_S3_USE_ARN_REGION', - None, - utils.ensure_boolean, - ), - 'us_east_1_regional_endpoint': ( - [ - 's3_us_east_1_regional_endpoint', - ('s3', 'us_east_1_regional_endpoint'), - ], - 'AWS_S3_US_EAST_1_REGIONAL_ENDPOINT', - None, - None, - ), - 's3_disable_multiregion_access_points': ( - ('s3', 's3_disable_multiregion_access_points'), - 'AWS_S3_DISABLE_MULTIREGION_ACCESS_POINTS', - None, - utils.ensure_boolean, - ), -} -# A mapping for the proxy specific configuration vars. These are -# used to configure how botocore interacts with proxy setups while -# sending requests. -DEFAULT_PROXIES_CONFIG_VARS = { - 'proxy_ca_bundle': ('proxy_ca_bundle', None, None, None), - 'proxy_client_cert': ('proxy_client_cert', None, None, None), - 'proxy_use_forwarding_for_https': ( - 'proxy_use_forwarding_for_https', - None, - None, - utils.normalize_boolean, - ), -} - - -def create_botocore_default_config_mapping(session): - chain_builder = ConfigChainFactory(session=session) - config_mapping = _create_config_chain_mapping( - chain_builder, BOTOCORE_DEFAUT_SESSION_VARIABLES - ) - config_mapping['s3'] = SectionConfigProvider( - 's3', - session, - _create_config_chain_mapping(chain_builder, DEFAULT_S3_CONFIG_VARS), - ) - config_mapping['proxies_config'] = SectionConfigProvider( - 'proxies_config', - session, - _create_config_chain_mapping( - chain_builder, DEFAULT_PROXIES_CONFIG_VARS - ), - ) - return config_mapping - - -def _create_config_chain_mapping(chain_builder, config_variables): - mapping = {} - for logical_name, config in config_variables.items(): - mapping[logical_name] = chain_builder.create_config_chain( - instance_name=logical_name, - env_var_names=config[1], - config_property_names=config[0], - default=config[2], - conversion_func=config[3], - ) - return mapping - - -class DefaultConfigResolver: - def __init__(self, default_config_data): - self._base_default_config = default_config_data['base'] - self._modes = default_config_data['modes'] - self._resolved_default_configurations = {} - - def _resolve_default_values_by_mode(self, mode): - default_config = self._base_default_config.copy() - modifications = self._modes.get(mode) - - for config_var in modifications: - default_value = default_config[config_var] - modification_dict = modifications[config_var] - modification = list(modification_dict.keys())[0] - modification_value = modification_dict[modification] - if modification == 'multiply': - default_value *= modification_value - elif modification == 'add': - default_value += modification_value - elif modification == 'override': - default_value = modification_value - default_config[config_var] = default_value - return default_config - - def get_default_modes(self): - default_modes = ['legacy', 'auto'] - default_modes.extend(self._modes.keys()) - return default_modes - - def get_default_config_values(self, mode): - if mode not in self._resolved_default_configurations: - defaults = self._resolve_default_values_by_mode(mode) - self._resolved_default_configurations[mode] = defaults - return self._resolved_default_configurations[mode] - - -class ConfigChainFactory: - """Factory class to create our most common configuration chain case. - - This is a convenience class to construct configuration chains that follow - our most common pattern. This is to prevent ordering them incorrectly, - and to make the config chain construction more readable. - """ - - def __init__(self, session, environ=None): - """Initialize a ConfigChainFactory. - - :type session: :class:`botocore.session.Session` - :param session: This is the session that should be used to look up - values from the config file. - - :type environ: dict - :param environ: A mapping to use for environment variables. If this - is not provided it will default to use os.environ. - """ - self._session = session - if environ is None: - environ = os.environ - self._environ = environ - - def create_config_chain( - self, - instance_name=None, - env_var_names=None, - config_property_names=None, - default=None, - conversion_func=None, - ): - """Build a config chain following the standard botocore pattern. - - In botocore most of our config chains follow the the precendence: - session_instance_variables, environment, config_file, default_value. - - This is a convenience function for creating a chain that follow - that precendence. - - :type instance_name: str - :param instance_name: This indicates what session instance variable - corresponds to this config value. If it is None it will not be - added to the chain. - - :type env_var_names: str or list of str or None - :param env_var_names: One or more environment variable names to - search for this value. They are searched in order. If it is None - it will not be added to the chain. - - :type config_property_names: str/tuple or list of str/tuple or None - :param config_property_names: One of more strings or tuples - representing the name of the key in the config file for this - config option. They are searched in order. If it is None it will - not be added to the chain. - - :type default: Any - :param default: Any constant value to be returned. - - :type conversion_func: None or callable - :param conversion_func: If this value is None then it has no effect on - the return type. Otherwise, it is treated as a function that will - conversion_func our provided type. - - :rvalue: ConfigChain - :returns: A ConfigChain that resolves in the order env_var_names -> - config_property_name -> default. Any values that were none are - omitted form the chain. - """ - providers = [] - if instance_name is not None: - providers.append( - InstanceVarProvider( - instance_var=instance_name, session=self._session - ) - ) - if env_var_names is not None: - providers.extend(self._get_env_providers(env_var_names)) - if config_property_names is not None: - providers.extend( - self._get_scoped_config_providers(config_property_names) - ) - if default is not None: - providers.append(ConstantProvider(value=default)) - - return ChainProvider( - providers=providers, - conversion_func=conversion_func, - ) - - def _get_env_providers(self, env_var_names): - env_var_providers = [] - if not isinstance(env_var_names, list): - env_var_names = [env_var_names] - for env_var_name in env_var_names: - env_var_providers.append( - EnvironmentProvider(name=env_var_name, env=self._environ) - ) - return env_var_providers - - def _get_scoped_config_providers(self, config_property_names): - scoped_config_providers = [] - if not isinstance(config_property_names, list): - config_property_names = [config_property_names] - for config_property_name in config_property_names: - scoped_config_providers.append( - ScopedConfigProvider( - config_var_name=config_property_name, - session=self._session, - ) - ) - return scoped_config_providers - - -class ConfigValueStore: - """The ConfigValueStore object stores configuration values.""" - - def __init__(self, mapping=None): - """Initialize a ConfigValueStore. - - :type mapping: dict - :param mapping: The mapping parameter is a map of string to a subclass - of BaseProvider. When a config variable is asked for via the - get_config_variable method, the corresponding provider will be - invoked to load the value. - """ - self._overrides = {} - self._mapping = {} - if mapping is not None: - for logical_name, provider in mapping.items(): - self.set_config_provider(logical_name, provider) - - def __deepcopy__(self, memo): - config_store = ConfigValueStore(copy.deepcopy(self._mapping, memo)) - for logical_name, override_value in self._overrides.items(): - config_store.set_config_variable(logical_name, override_value) - - return config_store - - def __copy__(self): - config_store = ConfigValueStore(copy.copy(self._mapping)) - for logical_name, override_value in self._overrides.items(): - config_store.set_config_variable(logical_name, override_value) - - return config_store - - def get_config_variable(self, logical_name): - """ - Retrieve the value associated with the specified logical_name - from the corresponding provider. If no value is found None will - be returned. - - :type logical_name: str - :param logical_name: The logical name of the session variable - you want to retrieve. This name will be mapped to the - appropriate environment variable name for this session as - well as the appropriate config file entry. - - :returns: value of variable or None if not defined. - """ - if logical_name in self._overrides: - return self._overrides[logical_name] - if logical_name not in self._mapping: - return None - provider = self._mapping[logical_name] - return provider.provide() - - def get_config_provider(self, logical_name): - """ - Retrieve the provider associated with the specified logical_name. - If no provider is found None will be returned. - - :type logical_name: str - :param logical_name: The logical name of the session variable - you want to retrieve. This name will be mapped to the - appropriate environment variable name for this session as - well as the appropriate config file entry. - - :returns: configuration provider or None if not defined. - """ - if ( - logical_name in self._overrides - or logical_name not in self._mapping - ): - return None - provider = self._mapping[logical_name] - return provider - - def set_config_variable(self, logical_name, value): - """Set a configuration variable to a specific value. - - By using this method, you can override the normal lookup - process used in ``get_config_variable`` by explicitly setting - a value. Subsequent calls to ``get_config_variable`` will - use the ``value``. This gives you per-session specific - configuration values. - - :: - >>> # Assume logical name 'foo' maps to env var 'FOO' - >>> os.environ['FOO'] = 'myvalue' - >>> s.get_config_variable('foo') - 'myvalue' - >>> s.set_config_variable('foo', 'othervalue') - >>> s.get_config_variable('foo') - 'othervalue' - - :type logical_name: str - :param logical_name: The logical name of the session variable - you want to set. These are the keys in ``SESSION_VARIABLES``. - - :param value: The value to associate with the config variable. - """ - self._overrides[logical_name] = value - - def clear_config_variable(self, logical_name): - """Remove an override config variable from the session. - - :type logical_name: str - :param logical_name: The name of the parameter to clear the override - value from. - """ - self._overrides.pop(logical_name, None) - - def set_config_provider(self, logical_name, provider): - """Set the provider for a config value. - - This provides control over how a particular configuration value is - loaded. This replaces the provider for ``logical_name`` with the new - ``provider``. - - :type logical_name: str - :param logical_name: The name of the config value to change the config - provider for. - - :type provider: :class:`botocore.configprovider.BaseProvider` - :param provider: The new provider that should be responsible for - providing a value for the config named ``logical_name``. - """ - self._mapping[logical_name] = provider - - -class SmartDefaultsConfigStoreFactory: - def __init__(self, default_config_resolver, imds_region_provider): - self._default_config_resolver = default_config_resolver - self._imds_region_provider = imds_region_provider - # Initializing _instance_metadata_region as None so we - # can fetch region in a lazy fashion only when needed. - self._instance_metadata_region = None - - def merge_smart_defaults(self, config_store, mode, region_name): - if mode == 'auto': - mode = self.resolve_auto_mode(region_name) - default_configs = ( - self._default_config_resolver.get_default_config_values(mode) - ) - for config_var in default_configs: - config_value = default_configs[config_var] - method = getattr(self, f'_set_{config_var}', None) - if method: - method(config_store, config_value) - - def resolve_auto_mode(self, region_name): - current_region = None - if os.environ.get('AWS_EXECUTION_ENV'): - default_region = os.environ.get('AWS_DEFAULT_REGION') - current_region = os.environ.get('AWS_REGION', default_region) - if not current_region: - if self._instance_metadata_region: - current_region = self._instance_metadata_region - else: - try: - current_region = self._imds_region_provider.provide() - self._instance_metadata_region = current_region - except Exception: - pass - - if current_region: - if region_name == current_region: - return 'in-region' - else: - return 'cross-region' - return 'standard' - - def _update_provider(self, config_store, variable, value): - original_provider = config_store.get_config_provider(variable) - default_provider = ConstantProvider(value) - if isinstance(original_provider, ChainProvider): - chain_provider_copy = copy.deepcopy(original_provider) - chain_provider_copy.set_default_provider(default_provider) - default_provider = chain_provider_copy - elif isinstance(original_provider, BaseProvider): - default_provider = ChainProvider( - providers=[original_provider, default_provider] - ) - config_store.set_config_provider(variable, default_provider) - - def _update_section_provider( - self, config_store, section_name, variable, value - ): - section_provider_copy = copy.deepcopy( - config_store.get_config_provider(section_name) - ) - section_provider_copy.set_default_provider( - variable, ConstantProvider(value) - ) - config_store.set_config_provider(section_name, section_provider_copy) - - def _set_retryMode(self, config_store, value): - self._update_provider(config_store, 'retry_mode', value) - - def _set_stsRegionalEndpoints(self, config_store, value): - self._update_provider(config_store, 'sts_regional_endpoints', value) - - def _set_s3UsEast1RegionalEndpoints(self, config_store, value): - self._update_section_provider( - config_store, 's3', 'us_east_1_regional_endpoint', value - ) - - def _set_connectTimeoutInMillis(self, config_store, value): - self._update_provider(config_store, 'connect_timeout', value / 1000) - - -class BaseProvider: - """Base class for configuration value providers. - - A configuration provider has some method of providing a configuration - value. - """ - - def provide(self): - """Provide a config value.""" - raise NotImplementedError('provide') - - -class ChainProvider(BaseProvider): - """This provider wraps one or more other providers. - - Each provider in the chain is called, the first one returning a non-None - value is then returned. - """ - - def __init__(self, providers=None, conversion_func=None): - """Initalize a ChainProvider. - - :type providers: list - :param providers: The initial list of providers to check for values - when invoked. - - :type conversion_func: None or callable - :param conversion_func: If this value is None then it has no affect on - the return type. Otherwise, it is treated as a function that will - transform provided value. - """ - if providers is None: - providers = [] - self._providers = providers - self._conversion_func = conversion_func - - def __deepcopy__(self, memo): - return ChainProvider( - copy.deepcopy(self._providers, memo), self._conversion_func - ) - - def provide(self): - """Provide the value from the first provider to return non-None. - - Each provider in the chain has its provide method called. The first - one in the chain to return a non-None value is the returned from the - ChainProvider. When no non-None value is found, None is returned. - """ - for provider in self._providers: - value = provider.provide() - if value is not None: - return self._convert_type(value) - return None - - def set_default_provider(self, default_provider): - if self._providers and isinstance( - self._providers[-1], ConstantProvider - ): - self._providers[-1] = default_provider - else: - self._providers.append(default_provider) - - num_of_constants = sum( - isinstance(provider, ConstantProvider) - for provider in self._providers - ) - if num_of_constants > 1: - logger.info( - 'ChainProvider object contains multiple ' - 'instances of ConstantProvider objects' - ) - - def _convert_type(self, value): - if self._conversion_func is not None: - return self._conversion_func(value) - return value - - def __repr__(self): - return '[{}]'.format(', '.join([str(p) for p in self._providers])) - - -class InstanceVarProvider(BaseProvider): - """This class loads config values from the session instance vars.""" - - def __init__(self, instance_var, session): - """Initialize InstanceVarProvider. - - :type instance_var: str - :param instance_var: The instance variable to load from the session. - - :type session: :class:`botocore.session.Session` - :param session: The botocore session to get the loaded configuration - file variables from. - """ - self._instance_var = instance_var - self._session = session - - def __deepcopy__(self, memo): - return InstanceVarProvider( - copy.deepcopy(self._instance_var, memo), self._session - ) - - def provide(self): - """Provide a config value from the session instance vars.""" - instance_vars = self._session.instance_variables() - value = instance_vars.get(self._instance_var) - return value - - def __repr__(self): - return f'InstanceVarProvider(instance_var={self._instance_var}, session={self._session})' - - -class ScopedConfigProvider(BaseProvider): - def __init__(self, config_var_name, session): - """Initialize ScopedConfigProvider. - - :type config_var_name: str or tuple - :param config_var_name: The name of the config variable to load from - the configuration file. If the value is a tuple, it must only - consist of two items, where the first item represents the section - and the second item represents the config var name in the section. - - :type session: :class:`botocore.session.Session` - :param session: The botocore session to get the loaded configuration - file variables from. - """ - self._config_var_name = config_var_name - self._session = session - - def __deepcopy__(self, memo): - return ScopedConfigProvider( - copy.deepcopy(self._config_var_name, memo), self._session - ) - - def provide(self): - """Provide a value from a config file property.""" - scoped_config = self._session.get_scoped_config() - if isinstance(self._config_var_name, tuple): - section_config = scoped_config.get(self._config_var_name[0]) - if not isinstance(section_config, dict): - return None - return section_config.get(self._config_var_name[1]) - return scoped_config.get(self._config_var_name) - - def __repr__(self): - return f'ScopedConfigProvider(config_var_name={self._config_var_name}, session={self._session})' - - -class EnvironmentProvider(BaseProvider): - """This class loads config values from environment variables.""" - - def __init__(self, name, env): - """Initialize with the keys in the dictionary to check. - - :type name: str - :param name: The key with that name will be loaded and returned. - - :type env: dict - :param env: Environment variables dictionary to get variables from. - """ - self._name = name - self._env = env - - def __deepcopy__(self, memo): - return EnvironmentProvider( - copy.deepcopy(self._name, memo), copy.deepcopy(self._env, memo) - ) - - def provide(self): - """Provide a config value from a source dictionary.""" - if self._name in self._env: - return self._env[self._name] - return None - - def __repr__(self): - return f'EnvironmentProvider(name={self._name}, env={self._env})' - - -class SectionConfigProvider(BaseProvider): - """Provides a dictionary from a section in the scoped config - - This is useful for retrieving scoped config variables (i.e. s3) that have - their own set of config variables and resolving logic. - """ - - def __init__(self, section_name, session, override_providers=None): - self._section_name = section_name - self._session = session - self._scoped_config_provider = ScopedConfigProvider( - self._section_name, self._session - ) - self._override_providers = override_providers - if self._override_providers is None: - self._override_providers = {} - - def __deepcopy__(self, memo): - return SectionConfigProvider( - copy.deepcopy(self._section_name, memo), - self._session, - copy.deepcopy(self._override_providers, memo), - ) - - def provide(self): - section_config = self._scoped_config_provider.provide() - if section_config and not isinstance(section_config, dict): - logger.debug( - "The %s config key is not a dictionary type, " - "ignoring its value of: %s", - self._section_name, - section_config, - ) - return None - for section_config_var, provider in self._override_providers.items(): - provider_val = provider.provide() - if provider_val is not None: - if section_config is None: - section_config = {} - section_config[section_config_var] = provider_val - return section_config - - def set_default_provider(self, key, default_provider): - provider = self._override_providers.get(key) - if isinstance(provider, ChainProvider): - provider.set_default_provider(default_provider) - return - elif isinstance(provider, BaseProvider): - default_provider = ChainProvider( - providers=[provider, default_provider] - ) - self._override_providers[key] = default_provider - - def __repr__(self): - return ( - f'SectionConfigProvider(section_name={self._section_name}, ' - f'session={self._session}, ' - f'override_providers={self._override_providers})' - ) - - -class ConstantProvider(BaseProvider): - """This provider provides a constant value.""" - - def __init__(self, value): - self._value = value - - def __deepcopy__(self, memo): - return ConstantProvider(copy.deepcopy(self._value, memo)) - - def provide(self): - """Provide the constant value given during initialization.""" - return self._value - - def __repr__(self): - return f'ConstantProvider(value={self._value})' - - -class ConfiguredEndpointProvider(BaseProvider): - """Lookup an endpoint URL from environment variable or shared config file. - - NOTE: This class is considered private and is subject to abrupt breaking - changes or removal without prior announcement. Please do not use it - directly. - """ - - _ENDPOINT_URL_LOOKUP_ORDER = [ - 'environment_service', - 'environment_global', - 'config_service', - 'config_global', - ] - - def __init__( - self, - full_config, - scoped_config, - client_name, - environ=None, - ): - """Initialize a ConfiguredEndpointProviderChain. - - :type full_config: dict - :param full_config: This is the dict representing the full - configuration file. - - :type scoped_config: dict - :param scoped_config: This is the dict representing the configuration - for the current profile for the session. - - :type client_name: str - :param client_name: The name used to instantiate a client using - botocore.session.Session.create_client. - - :type environ: dict - :param environ: A mapping to use for environment variables. If this - is not provided it will default to use os.environ. - """ - self._full_config = full_config - self._scoped_config = scoped_config - self._client_name = client_name - self._transformed_service_id = self._get_snake_case_service_id( - self._client_name - ) - if environ is None: - environ = os.environ - self._environ = environ - - def provide(self): - """Lookup the configured endpoint URL. - - The order is: - - 1. The value provided by a service-specific environment variable. - 2. The value provided by the global endpoint environment variable - (AWS_ENDPOINT_URL). - 3. The value provided by a service-specific parameter from a services - definition section in the shared configuration file. - 4. The value provided by the global parameter from a services - definition section in the shared configuration file. - """ - for location in self._ENDPOINT_URL_LOOKUP_ORDER: - logger.debug( - 'Looking for endpoint for %s via: %s', - self._client_name, - location, - ) - - endpoint_url = getattr(self, f'_get_endpoint_url_{location}')() - - if endpoint_url: - logger.info( - 'Found endpoint for %s via: %s.', - self._client_name, - location, - ) - return endpoint_url - - logger.debug('No configured endpoint found.') - return None - - def _get_snake_case_service_id(self, client_name): - # Get the service ID without loading the service data file, accounting - # for any aliases and standardizing the names with hyphens. - client_name = utils.SERVICE_NAME_ALIASES.get(client_name, client_name) - hyphenized_service_id = ( - utils.CLIENT_NAME_TO_HYPHENIZED_SERVICE_ID_OVERRIDES.get( - client_name, client_name - ) - ) - return hyphenized_service_id.replace('-', '_') - - def _get_service_env_var_name(self): - transformed_service_id_env = self._transformed_service_id.upper() - return f'AWS_ENDPOINT_URL_{transformed_service_id_env}' - - def _get_services_config(self): - if 'services' not in self._scoped_config: - return {} - - section_name = self._scoped_config['services'] - services_section = self._full_config.get('services', {}).get( - section_name - ) - - if not services_section: - error_msg = ( - f'The profile is configured to use the services ' - f'section but the "{section_name}" services ' - f'configuration does not exist.' - ) - raise InvalidConfigError(error_msg=error_msg) - - return services_section - - def _get_endpoint_url_config_service(self): - snakecase_service_id = self._transformed_service_id.lower() - return ( - self._get_services_config() - .get(snakecase_service_id, {}) - .get('endpoint_url') - ) - - def _get_endpoint_url_config_global(self): - return self._scoped_config.get('endpoint_url') - - def _get_endpoint_url_environment_service(self): - return EnvironmentProvider( - name=self._get_service_env_var_name(), env=self._environ - ).provide() - - def _get_endpoint_url_environment_global(self): - return EnvironmentProvider( - name='AWS_ENDPOINT_URL', env=self._environ - ).provide() diff --git a/venv/Lib/site-packages/botocore/context.py b/venv/Lib/site-packages/botocore/context.py deleted file mode 100644 index 8034747..0000000 --- a/venv/Lib/site-packages/botocore/context.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -""" -NOTE: All classes and functions in this module are considered private and are -subject to abrupt breaking changes. Please do not use them directly. -""" - -from contextlib import contextmanager -from contextvars import ContextVar -from copy import deepcopy -from dataclasses import dataclass, field -from functools import wraps - - -@dataclass -class ClientContext: - """ - Encapsulation of objects tracked within the ``_context`` context variable. - - ``features`` is a set responsible for storing features used during - preparation of an AWS request. ``botocore.useragent.register_feature_id`` - is used to add to this set. - """ - - features: set[str] = field(default_factory=set) - - -_context = ContextVar("_context") - - -def get_context(): - """Get the current ``_context`` context variable if set, else None.""" - return _context.get(None) - - -def set_context(ctx): - """Set the current ``_context`` context variable. - - :type ctx: ClientContext - :param ctx: Client context object to set as the current context variable. - - :rtype: contextvars.Token - :returns: Token object used to revert the context variable to what it was - before the corresponding set. - """ - token = _context.set(ctx) - return token - - -def reset_context(token): - """Reset the current ``_context`` context variable. - - :type token: contextvars.Token - :param token: Token object to reset the context variable. - """ - _context.reset(token) - - -@contextmanager -def start_as_current_context(ctx=None): - """ - Context manager that copies the passed or current context object and sets - it as the current context variable. If no context is found, a new - ``ClientContext`` object is created. It mainly ensures the context variable - is reset to the previous value once the executed code returns. - - Example usage: - - def my_feature(): - with start_as_current_context(): - register_feature_id('MY_FEATURE') - pass - - :type ctx: ClientContext - :param ctx: The client context object to set as the new context variable. - If not provided, the current or a new context variable is used. - """ - current = ctx or get_context() - if current is None: - new = ClientContext() - else: - new = deepcopy(current) - token = set_context(new) - try: - yield - finally: - reset_context(token) - - -def with_current_context(hook=None): - """ - Decorator that wraps ``start_as_current_context`` and optionally invokes a - hook within the newly-set context. This is just syntactic sugar to avoid - indenting existing code under the context manager. - - Example usage: - - @with_current_context(partial(register_feature_id, 'MY_FEATURE')) - def my_feature(): - pass - - :type hook: callable - :param hook: A callable that will be invoked within the scope of the - ``start_as_current_context`` context manager. - """ - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - with start_as_current_context(): - if hook: - hook() - return func(*args, **kwargs) - - return wrapper - - return decorator diff --git a/venv/Lib/site-packages/botocore/credentials.py b/venv/Lib/site-packages/botocore/credentials.py deleted file mode 100644 index 571dfea..0000000 --- a/venv/Lib/site-packages/botocore/credentials.py +++ /dev/null @@ -1,2781 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import base64 -import datetime -import getpass -import json -import logging -import os -import subprocess -import threading -import time -import uuid -from collections import namedtuple -from copy import deepcopy -from hashlib import sha1, sha256 - -import dateutil.parser -from dateutil.parser import parse -from dateutil.tz import tzlocal, tzutc - -import botocore.compat -import botocore.configloader -from botocore import UNSIGNED -from botocore.compat import ( - EC, - compat_shell_split, - total_seconds, -) -from botocore.config import Config -from botocore.exceptions import ( - ConfigNotFound, - CredentialRetrievalError, - InfiniteLoopConfigError, - InvalidConfigError, - LoginError, - LoginInsufficientPermissions, - LoginRefreshRequired, - LoginTokenLoadError, - MetadataRetrievalError, - MissingDependencyException, - PartialCredentialsError, - RefreshWithMFAUnsupportedError, - UnauthorizedSSOTokenError, - UnknownCredentialError, -) -from botocore.tokens import SSOTokenProvider -from botocore.useragent import register_feature_id, register_feature_ids -from botocore.utils import ( - ArnParser, - ContainerMetadataFetcher, - FileWebIdentityTokenLoader, - InstanceMetadataFetcher, - JSONFileCache, - LoginTokenLoader, - SSOTokenLoader, - create_nested_client, - get_login_token_cache_directory, - parse_key_val_file, - resolve_imds_endpoint_mode, -) - -logger = logging.getLogger(__name__) -ReadOnlyCredentials = namedtuple( - 'ReadOnlyCredentials', - ['access_key', 'secret_key', 'token', 'account_id'], - defaults=(None,), -) - -_DEFAULT_MANDATORY_REFRESH_TIMEOUT = 10 * 60 # 10 min -_DEFAULT_ADVISORY_REFRESH_TIMEOUT = 15 * 60 # 15 min - - -def create_credential_resolver(session, cache=None, region_name=None): - """Create a default credential resolver. - - This creates a pre-configured credential resolver - that includes the default lookup chain for - credentials. - - """ - profile_name = session.get_config_variable('profile') or 'default' - metadata_timeout = session.get_config_variable('metadata_service_timeout') - num_attempts = session.get_config_variable('metadata_service_num_attempts') - disable_env_vars = session.instance_variables().get('profile') is not None - - imds_config = { - 'ec2_metadata_service_endpoint': session.get_config_variable( - 'ec2_metadata_service_endpoint' - ), - 'ec2_metadata_service_endpoint_mode': resolve_imds_endpoint_mode( - session - ), - 'ec2_credential_refresh_window': _DEFAULT_ADVISORY_REFRESH_TIMEOUT, - 'ec2_metadata_v1_disabled': session.get_config_variable( - 'ec2_metadata_v1_disabled' - ), - } - - if cache is None: - cache = {} - - env_provider = EnvProvider() - container_provider = ContainerProvider() - instance_metadata_provider = InstanceMetadataProvider( - iam_role_fetcher=InstanceMetadataFetcher( - timeout=metadata_timeout, - num_attempts=num_attempts, - user_agent=session.user_agent(), - config=imds_config, - ) - ) - - profile_provider_builder = ProfileProviderBuilder( - session, cache=cache, region_name=region_name - ) - assume_role_provider = AssumeRoleProvider( - load_config=lambda: session.full_config, - client_creator=_get_client_creator(session, region_name), - cache=cache, - profile_name=profile_name, - credential_sourcer=CanonicalNameCredentialSourcer( - [env_provider, container_provider, instance_metadata_provider] - ), - profile_provider_builder=profile_provider_builder, - ) - - pre_profile = [ - env_provider, - assume_role_provider, - ] - profile_providers = profile_provider_builder.providers( - profile_name=profile_name, - disable_env_vars=disable_env_vars, - ) - post_profile = [ - OriginalEC2Provider(), - BotoProvider(), - container_provider, - instance_metadata_provider, - ] - providers = pre_profile + profile_providers + post_profile - - if disable_env_vars: - # An explicitly provided profile will negate an EnvProvider. - # We will defer to providers that understand the "profile" - # concept to retrieve credentials. - # The one edge case if is all three values are provided via - # env vars: - # export AWS_ACCESS_KEY_ID=foo - # export AWS_SECRET_ACCESS_KEY=bar - # export AWS_PROFILE=baz - # Then, just like our client() calls, the explicit credentials - # will take precedence. - # - # This precedence is enforced by leaving the EnvProvider in the chain. - # This means that the only way a "profile" would win is if the - # EnvProvider does not return credentials, which is what we want - # in this scenario. - providers.remove(env_provider) - logger.debug( - 'Skipping environment variable credential check' - ' because profile name was explicitly set.' - ) - - resolver = CredentialResolver(providers=providers) - return resolver - - -class ProfileProviderBuilder: - """This class handles the creation of profile based providers. - - NOTE: This class is only intended for internal use. - - This class handles the creation and ordering of the various credential - providers that primarly source their configuration from the shared config. - This is needed to enable sharing between the default credential chain and - the source profile chain created by the assume role provider. - """ - - def __init__( - self, - session, - cache=None, - region_name=None, - sso_token_cache=None, - login_token_cache=None, - ): - self._session = session - self._cache = cache - self._region_name = region_name - self._sso_token_cache = sso_token_cache - self._login_token_cache = login_token_cache - - def providers(self, profile_name, disable_env_vars=False): - return [ - self._create_web_identity_provider( - profile_name, - disable_env_vars, - ), - self._create_sso_provider(profile_name), - self._create_shared_credential_provider(profile_name), - self._create_login_provider(profile_name), - self._create_process_provider(profile_name), - self._create_config_provider(profile_name), - ] - - def _create_process_provider(self, profile_name): - return ProcessProvider( - profile_name=profile_name, - load_config=lambda: self._session.full_config, - ) - - def _create_shared_credential_provider(self, profile_name): - credential_file = self._session.get_config_variable('credentials_file') - return SharedCredentialProvider( - profile_name=profile_name, - creds_filename=credential_file, - ) - - def _create_config_provider(self, profile_name): - config_file = self._session.get_config_variable('config_file') - return ConfigProvider( - profile_name=profile_name, - config_filename=config_file, - ) - - def _create_web_identity_provider(self, profile_name, disable_env_vars): - return AssumeRoleWithWebIdentityProvider( - load_config=lambda: self._session.full_config, - client_creator=_get_client_creator( - self._session, self._region_name - ), - cache=self._cache, - profile_name=profile_name, - disable_env_vars=disable_env_vars, - ) - - def _create_sso_provider(self, profile_name): - return SSOProvider( - load_config=lambda: self._session.full_config, - client_creator=self._session.create_client, - profile_name=profile_name, - cache=self._cache, - token_cache=self._sso_token_cache, - token_provider=SSOTokenProvider( - self._session, - cache=self._sso_token_cache, - profile_name=profile_name, - ), - ) - - def _create_login_provider(self, profile_name): - return LoginProvider( - load_config=lambda: self._session.full_config, - client_creator=self._session.create_client, - profile_name=profile_name, - token_cache=self._login_token_cache, - ) - - -def get_credentials(session): - resolver = create_credential_resolver(session) - return resolver.load_credentials() - - -def _local_now(): - return datetime.datetime.now(tzlocal()) - - -def _parse_if_needed(value): - if isinstance(value, datetime.datetime): - return value - return parse(value) - - -def _serialize_if_needed(value, iso=False): - if isinstance(value, datetime.datetime): - if iso: - return value.isoformat() - return value.strftime('%Y-%m-%dT%H:%M:%S%Z') - return value - - -def _get_client_creator(session, region_name): - def client_creator(service_name, **kwargs): - create_client_kwargs = {'region_name': region_name} - create_client_kwargs.update(**kwargs) - return create_nested_client( - session, service_name, **create_client_kwargs - ) - - return client_creator - - -def create_assume_role_refresher(client, params): - def refresh(): - response = client.assume_role(**params) - credentials = response['Credentials'] - # We need to normalize the credential names to - # the values expected by the refresh creds. - return { - 'access_key': credentials['AccessKeyId'], - 'secret_key': credentials['SecretAccessKey'], - 'token': credentials['SessionToken'], - 'expiry_time': _serialize_if_needed(credentials['Expiration']), - } - - return refresh - - -def create_mfa_serial_refresher(actual_refresh): - class _Refresher: - def __init__(self, refresh): - self._refresh = refresh - self._has_been_called = False - - def __call__(self): - if self._has_been_called: - # We can explore an option in the future to support - # reprompting for MFA, but for now we just error out - # when the temp creds expire. - raise RefreshWithMFAUnsupportedError() - self._has_been_called = True - return self._refresh() - - return _Refresher(actual_refresh) - - -class Credentials: - """ - Holds the credentials needed to authenticate requests. - - :param str access_key: The access key part of the credentials. - :param str secret_key: The secret key part of the credentials. - :param str token: The security token, valid only for session credentials. - :param str method: A string which identifies where the credentials - were found. - :param str account_id: (optional) An account ID associated with the credentials. - """ - - def __init__( - self, access_key, secret_key, token=None, method=None, account_id=None - ): - self.access_key = access_key - self.secret_key = secret_key - self.token = token - - if method is None: - method = 'explicit' - self.method = method - self.account_id = account_id - - self._normalize() - - def _normalize(self): - # Keys would sometimes (accidentally) contain non-ascii characters. - # It would cause a confusing UnicodeDecodeError in Python 2. - # We explicitly convert them into unicode to avoid such error. - # - # Eventually the service will decide whether to accept the credential. - # This also complies with the behavior in Python 3. - self.access_key = botocore.compat.ensure_unicode(self.access_key) - self.secret_key = botocore.compat.ensure_unicode(self.secret_key) - - def get_frozen_credentials(self): - return ReadOnlyCredentials( - self.access_key, self.secret_key, self.token, self.account_id - ) - - def get_deferred_property(self, property_name): - def get_property(): - return getattr(self, property_name, None) - - return get_property - - -class RefreshableCredentials(Credentials): - """ - Holds the credentials needed to authenticate requests. In addition, it - knows how to refresh itself. - - :param str access_key: The access key part of the credentials. - :param str secret_key: The secret key part of the credentials. - :param str token: The security token, valid only for session credentials. - :param datetime expiry_time: The expiration time of the credentials. - :param function refresh_using: Callback function to refresh the credentials. - :param str method: A string which identifies where the credentials - were found. - :param function time_fetcher: Callback function to retrieve current time. - """ - - # The time at which we'll attempt to refresh, but not - # block if someone else is refreshing. - _advisory_refresh_timeout = _DEFAULT_ADVISORY_REFRESH_TIMEOUT - # The time at which all threads will block waiting for - # refreshed credentials. - _mandatory_refresh_timeout = _DEFAULT_MANDATORY_REFRESH_TIMEOUT - - def __init__( - self, - access_key, - secret_key, - token, - expiry_time, - refresh_using, - method, - time_fetcher=_local_now, - advisory_timeout=None, - mandatory_timeout=None, - account_id=None, - ): - self._refresh_using = refresh_using - self._access_key = access_key - self._secret_key = secret_key - self._token = token - self._account_id = account_id - self._expiry_time = expiry_time - self._time_fetcher = time_fetcher - self._refresh_lock = threading.Lock() - self.method = method - self._frozen_credentials = ReadOnlyCredentials( - access_key, secret_key, token, account_id - ) - self._normalize() - if advisory_timeout is not None: - self._advisory_refresh_timeout = advisory_timeout - if mandatory_timeout is not None: - self._mandatory_refresh_timeout = mandatory_timeout - - def _normalize(self): - self._access_key = botocore.compat.ensure_unicode(self._access_key) - self._secret_key = botocore.compat.ensure_unicode(self._secret_key) - - @classmethod - def create_from_metadata( - cls, - metadata, - refresh_using, - method, - advisory_timeout=None, - mandatory_timeout=None, - ): - kwargs = {} - if advisory_timeout is not None: - kwargs['advisory_timeout'] = advisory_timeout - if mandatory_timeout is not None: - kwargs['mandatory_timeout'] = mandatory_timeout - - instance = cls( - access_key=metadata['access_key'], - secret_key=metadata['secret_key'], - token=metadata['token'], - expiry_time=cls._expiry_datetime(metadata['expiry_time']), - method=method, - refresh_using=refresh_using, - account_id=metadata.get('account_id'), - **kwargs, - ) - return instance - - @property - def access_key(self): - """Warning: Using this property can lead to race conditions if you - access another property subsequently along the refresh boundary. - Please use get_frozen_credentials instead. - """ - self._refresh() - return self._access_key - - @access_key.setter - def access_key(self, value): - self._access_key = value - - @property - def secret_key(self): - """Warning: Using this property can lead to race conditions if you - access another property subsequently along the refresh boundary. - Please use get_frozen_credentials instead. - """ - self._refresh() - return self._secret_key - - @secret_key.setter - def secret_key(self, value): - self._secret_key = value - - @property - def token(self): - """Warning: Using this property can lead to race conditions if you - access another property subsequently along the refresh boundary. - Please use get_frozen_credentials instead. - """ - self._refresh() - return self._token - - @token.setter - def token(self, value): - self._token = value - - @property - def account_id(self): - """Warning: Using this property can lead to race conditions if you - access another property subsequently along the refresh boundary. - Please use get_frozen_credentials instead. - """ - self._refresh() - return self._account_id - - @account_id.setter - def account_id(self, value): - self._account_id = value - - def _seconds_remaining(self): - delta = self._expiry_time - self._time_fetcher() - return total_seconds(delta) - - def refresh_needed(self, refresh_in=None): - """Check if a refresh is needed. - - A refresh is needed if the expiry time associated - with the temporary credentials is less than the - provided ``refresh_in``. If ``time_delta`` is not - provided, ``self.advisory_refresh_needed`` will be used. - - For example, if your temporary credentials expire - in 10 minutes and the provided ``refresh_in`` is - ``15 * 60``, then this function will return ``True``. - - :type refresh_in: int - :param refresh_in: The number of seconds before the - credentials expire in which refresh attempts should - be made. - - :return: True if refresh needed, False otherwise. - - """ - if self._expiry_time is None: - # No expiration, so assume we don't need to refresh. - return False - - if refresh_in is None: - refresh_in = self._advisory_refresh_timeout - # The credentials should be refreshed if they're going to expire - # in less than 5 minutes. - if self._seconds_remaining() >= refresh_in: - # There's enough time left. Don't refresh. - return False - logger.debug("Credentials need to be refreshed.") - return True - - def _is_expired(self): - # Checks if the current credentials are expired. - return self.refresh_needed(refresh_in=0) - - def _refresh(self): - # In the common case where we don't need a refresh, we - # can immediately exit and not require acquiring the - # refresh lock. - if not self.refresh_needed(self._advisory_refresh_timeout): - return - - # acquire() doesn't accept kwargs, but False is indicating - # that we should not block if we can't acquire the lock. - # If we aren't able to acquire the lock, we'll trigger - # the else clause. - if self._refresh_lock.acquire(False): - try: - if not self.refresh_needed(self._advisory_refresh_timeout): - return - is_mandatory_refresh = self.refresh_needed( - self._mandatory_refresh_timeout - ) - self._protected_refresh(is_mandatory=is_mandatory_refresh) - return - finally: - self._refresh_lock.release() - elif self.refresh_needed(self._mandatory_refresh_timeout): - # If we're within the mandatory refresh window, - # we must block until we get refreshed credentials. - with self._refresh_lock: - if not self.refresh_needed(self._mandatory_refresh_timeout): - return - self._protected_refresh(is_mandatory=True) - - def _protected_refresh(self, is_mandatory): - # precondition: this method should only be called if you've acquired - # the self._refresh_lock. - try: - metadata = self._refresh_using() - except Exception: - period_name = 'mandatory' if is_mandatory else 'advisory' - logger.warning( - "Refreshing temporary credentials failed " - "during %s refresh period.", - period_name, - exc_info=True, - ) - if is_mandatory: - # If this is a mandatory refresh, then - # all errors that occur when we attempt to refresh - # credentials are propagated back to the user. - raise - # Otherwise we'll just return. - # The end result will be that we'll use the current - # set of temporary credentials we have. - return - self._set_from_data(metadata) - self._frozen_credentials = ReadOnlyCredentials( - self._access_key, self._secret_key, self._token, self._account_id - ) - if self._is_expired(): - # We successfully refreshed credentials but for whatever - # reason, our refreshing function returned credentials - # that are still expired. In this scenario, the only - # thing we can do is let the user know and raise - # an exception. - msg = ( - "Credentials were refreshed, but the " - "refreshed credentials are still expired." - ) - logger.warning(msg) - raise RuntimeError(msg) - - @staticmethod - def _expiry_datetime(time_str): - return parse(time_str) - - def _set_from_data(self, data): - expected_keys = ['access_key', 'secret_key', 'token', 'expiry_time'] - if not data: - missing_keys = expected_keys - else: - missing_keys = [k for k in expected_keys if k not in data] - - if missing_keys: - message = "Credential refresh failed, response did not contain: %s" - raise CredentialRetrievalError( - provider=self.method, - error_msg=message % ', '.join(missing_keys), - ) - - self.access_key = data['access_key'] - self.secret_key = data['secret_key'] - self.token = data['token'] - self._expiry_time = parse(data['expiry_time']) - self.account_id = data.get('account_id') - logger.debug( - "Retrieved credentials will expire at: %s", self._expiry_time - ) - self._normalize() - - def get_frozen_credentials(self): - """Return immutable credentials. - - The ``access_key``, ``secret_key``, and ``token`` properties - on this class will always check and refresh credentials if - needed before returning the particular credentials. - - This has an edge case where you can get inconsistent - credentials. Imagine this: - - # Current creds are "t1" - tmp.access_key ---> expired? no, so return t1.access_key - # ---- time is now expired, creds need refreshing to "t2" ---- - tmp.secret_key ---> expired? yes, refresh and return t2.secret_key - - This means we're using the access key from t1 with the secret key - from t2. To fix this issue, you can request a frozen credential object - which is guaranteed not to change. - - The frozen credentials returned from this method should be used - immediately and then discarded. The typical usage pattern would - be:: - - creds = RefreshableCredentials(...) - some_code = SomeSignerObject() - # I'm about to sign the request. - # The frozen credentials are only used for the - # duration of generate_presigned_url and will be - # immediately thrown away. - request = some_code.sign_some_request( - with_credentials=creds.get_frozen_credentials()) - print("Signed request:", request) - - """ - self._refresh() - return self._frozen_credentials - - -class DeferredRefreshableCredentials(RefreshableCredentials): - """Refreshable credentials that don't require initial credentials. - - refresh_using will be called upon first access. - """ - - def __init__(self, refresh_using, method, time_fetcher=_local_now): - self._refresh_using = refresh_using - self._access_key = None - self._secret_key = None - self._token = None - self._account_id = None - self._expiry_time = None - self._time_fetcher = time_fetcher - self._refresh_lock = threading.Lock() - self.method = method - self._frozen_credentials = None - - def refresh_needed(self, refresh_in=None): - if self._frozen_credentials is None: - return True - return super().refresh_needed(refresh_in) - - -class CachedCredentialFetcher: - DEFAULT_EXPIRY_WINDOW_SECONDS = 60 * 15 - - def __init__(self, cache=None, expiry_window_seconds=None): - if cache is None: - cache = {} - self._cache = cache - self._cache_key = self._create_cache_key() - if expiry_window_seconds is None: - expiry_window_seconds = self.DEFAULT_EXPIRY_WINDOW_SECONDS - self._expiry_window_seconds = expiry_window_seconds - self.feature_ids = set() - - def _create_cache_key(self): - raise NotImplementedError('_create_cache_key()') - - def _make_file_safe(self, filename): - # Replace :, path sep, and / to make it the string filename safe. - filename = filename.replace(':', '_').replace(os.sep, '_') - return filename.replace('/', '_') - - def _get_credentials(self): - raise NotImplementedError('_get_credentials()') - - def fetch_credentials(self): - return self._get_cached_credentials() - - def _get_cached_credentials(self): - """Get up-to-date credentials. - - This will check the cache for up-to-date credentials, calling assume - role if none are available. - """ - response = self._load_from_cache() - if response is None: - response = self._get_credentials() - self._write_to_cache(response) - else: - logger.debug("Credentials for role retrieved from cache.") - - creds = response['Credentials'] - expiration = _serialize_if_needed(creds['Expiration'], iso=True) - credentials = { - 'access_key': creds['AccessKeyId'], - 'secret_key': creds['SecretAccessKey'], - 'token': creds['SessionToken'], - 'expiry_time': expiration, - 'account_id': creds.get('AccountId'), - } - - return credentials - - def _load_from_cache(self): - if self._cache_key in self._cache: - creds = deepcopy(self._cache[self._cache_key]) - if not self._is_expired(creds): - return creds - else: - logger.debug( - "Credentials were found in cache, but they are expired." - ) - return None - - def _write_to_cache(self, response): - self._cache[self._cache_key] = deepcopy(response) - - def _is_expired(self, credentials): - """Check if credentials are expired.""" - end_time = _parse_if_needed(credentials['Credentials']['Expiration']) - seconds = total_seconds(end_time - _local_now()) - return seconds < self._expiry_window_seconds - - -class BaseAssumeRoleCredentialFetcher(CachedCredentialFetcher): - def __init__( - self, - client_creator, - role_arn, - extra_args=None, - cache=None, - expiry_window_seconds=None, - ): - self._client_creator = client_creator - self._role_arn = role_arn - - if extra_args is None: - self._assume_kwargs = {} - else: - self._assume_kwargs = deepcopy(extra_args) - self._assume_kwargs['RoleArn'] = self._role_arn - - self._role_session_name = self._assume_kwargs.get('RoleSessionName') - self._using_default_session_name = False - if not self._role_session_name: - self._generate_assume_role_name() - - super().__init__(cache, expiry_window_seconds) - - def _generate_assume_role_name(self): - self._role_session_name = f'botocore-session-{int(time.time())}' - self._assume_kwargs['RoleSessionName'] = self._role_session_name - self._using_default_session_name = True - - def _create_cache_key(self): - """Create a predictable cache key for the current configuration. - - The cache key is intended to be compatible with file names. - """ - args = deepcopy(self._assume_kwargs) - - # The role session name gets randomly generated, so we don't want it - # in the hash. - if self._using_default_session_name: - del args['RoleSessionName'] - - if 'Policy' in args: - # To have a predictable hash, the keys of the policy must be - # sorted, so we have to load it here to make sure it gets sorted - # later on. - args['Policy'] = json.loads(args['Policy']) - - args = json.dumps(args, sort_keys=True) - argument_hash = sha1(args.encode('utf-8')).hexdigest() - return self._make_file_safe(argument_hash) - - def _add_account_id_to_response(self, response): - role_arn = response.get('AssumedRoleUser', {}).get('Arn') - if ArnParser.is_arn(role_arn): - arn_parser = ArnParser() - account_id = arn_parser.parse_arn(role_arn)['account'] - response['Credentials']['AccountId'] = account_id - else: - logger.debug("Unable to extract account ID from Arn: %s", role_arn) - - -class AssumeRoleCredentialFetcher(BaseAssumeRoleCredentialFetcher): - def __init__( - self, - client_creator, - source_credentials, - role_arn, - extra_args=None, - mfa_prompter=None, - cache=None, - expiry_window_seconds=None, - ): - """ - :type client_creator: callable - :param client_creator: A callable that creates a client taking - arguments like ``Session.create_client``. - - :type source_credentials: Credentials - :param source_credentials: The credentials to use to create the - client for the call to AssumeRole. - - :type role_arn: str - :param role_arn: The ARN of the role to be assumed. - - :type extra_args: dict - :param extra_args: Any additional arguments to add to the assume - role request using the format of the botocore operation. - Possible keys include, but may not be limited to, - DurationSeconds, Policy, SerialNumber, ExternalId and - RoleSessionName. - - :type mfa_prompter: callable - :param mfa_prompter: A callable that returns input provided by the - user (i.e raw_input, getpass.getpass, etc.). - - :type cache: dict - :param cache: An object that supports ``__getitem__``, - ``__setitem__``, and ``__contains__``. An example of this is - the ``JSONFileCache`` class in aws-cli. - - :type expiry_window_seconds: int - :param expiry_window_seconds: The amount of time, in seconds, - """ - self._source_credentials = source_credentials - self._mfa_prompter = mfa_prompter - if self._mfa_prompter is None: - self._mfa_prompter = getpass.getpass - - super().__init__( - client_creator, - role_arn, - extra_args=extra_args, - cache=cache, - expiry_window_seconds=expiry_window_seconds, - ) - - def _get_credentials(self): - """Get credentials by calling assume role.""" - register_feature_ids(self.feature_ids) - kwargs = self._assume_role_kwargs() - client = self._create_client() - response = client.assume_role(**kwargs) - self._add_account_id_to_response(response) - return response - - def _assume_role_kwargs(self): - """Get the arguments for assume role based on current configuration.""" - assume_role_kwargs = deepcopy(self._assume_kwargs) - - mfa_serial = assume_role_kwargs.get('SerialNumber') - - if mfa_serial is not None: - prompt = f'Enter MFA code for {mfa_serial}: ' - token_code = self._mfa_prompter(prompt) - assume_role_kwargs['TokenCode'] = token_code - - duration_seconds = assume_role_kwargs.get('DurationSeconds') - - if duration_seconds is not None: - assume_role_kwargs['DurationSeconds'] = duration_seconds - - return assume_role_kwargs - - def _create_client(self): - """Create an STS client using the source credentials.""" - frozen_credentials = self._source_credentials.get_frozen_credentials() - return self._client_creator( - 'sts', - aws_access_key_id=frozen_credentials.access_key, - aws_secret_access_key=frozen_credentials.secret_key, - aws_session_token=frozen_credentials.token, - ) - - -class AssumeRoleWithWebIdentityCredentialFetcher( - BaseAssumeRoleCredentialFetcher -): - def __init__( - self, - client_creator, - web_identity_token_loader, - role_arn, - extra_args=None, - cache=None, - expiry_window_seconds=None, - ): - """ - :type client_creator: callable - :param client_creator: A callable that creates a client taking - arguments like ``Session.create_client``. - - :type web_identity_token_loader: callable - :param web_identity_token_loader: A callable that takes no arguments - and returns a web identity token str. - - :type role_arn: str - :param role_arn: The ARN of the role to be assumed. - - :type extra_args: dict - :param extra_args: Any additional arguments to add to the assume - role request using the format of the botocore operation. - Possible keys include, but may not be limited to, - DurationSeconds, Policy, SerialNumber, ExternalId and - RoleSessionName. - - :type cache: dict - :param cache: An object that supports ``__getitem__``, - ``__setitem__``, and ``__contains__``. An example of this is - the ``JSONFileCache`` class in aws-cli. - - :type expiry_window_seconds: int - :param expiry_window_seconds: The amount of time, in seconds, - """ - self._web_identity_token_loader = web_identity_token_loader - - super().__init__( - client_creator, - role_arn, - extra_args=extra_args, - cache=cache, - expiry_window_seconds=expiry_window_seconds, - ) - - def _get_credentials(self): - """Get credentials by calling assume role.""" - register_feature_ids(self.feature_ids) - kwargs = self._assume_role_kwargs() - # Assume role with web identity does not require credentials other than - # the token, explicitly configure the client to not sign requests. - config = Config(signature_version=UNSIGNED) - client = self._client_creator('sts', config=config) - response = client.assume_role_with_web_identity(**kwargs) - self._add_account_id_to_response(response) - return response - - def _assume_role_kwargs(self): - """Get the arguments for assume role based on current configuration.""" - assume_role_kwargs = deepcopy(self._assume_kwargs) - identity_token = self._web_identity_token_loader() - assume_role_kwargs['WebIdentityToken'] = identity_token - - return assume_role_kwargs - - -class CredentialProvider: - # A short name to identify the provider within botocore. - METHOD = None - - # A name to identify the provider for use in cross-sdk features like - # assume role's `credential_source` configuration option. These names - # are to be treated in a case-insensitive way. NOTE: any providers not - # implemented in botocore MUST prefix their canonical names with - # 'custom' or we DO NOT guarantee that it will work with any features - # that this provides. - CANONICAL_NAME = None - - def __init__(self, session=None): - self.session = session - - def load(self): - """ - Loads the credentials from their source & sets them on the object. - - Subclasses should implement this method (by reading from disk, the - environment, the network or wherever), returning ``True`` if they were - found & loaded. - - If not found, this method should return ``False``, indicating that the - ``CredentialResolver`` should fall back to the next available method. - - The default implementation does nothing, assuming the user has set the - ``access_key/secret_key/token`` themselves. - - :returns: Whether credentials were found & set - :rtype: Credentials - """ - return True - - def _extract_creds_from_mapping(self, mapping, *key_names): - found = [] - for key_name in key_names: - try: - found.append(mapping[key_name]) - except KeyError: - raise PartialCredentialsError( - provider=self.METHOD, cred_var=key_name - ) - return found - - -class ProcessProvider(CredentialProvider): - METHOD = 'custom-process' - - def __init__(self, profile_name, load_config, popen=subprocess.Popen): - self._profile_name = profile_name - self._load_config = load_config - self._loaded_config = None - self._popen = popen - - def load(self): - credential_process = self._credential_process - if credential_process is None: - return - - register_feature_id('CREDENTIALS_PROFILE_PROCESS') - creds_dict = self._retrieve_credentials_using(credential_process) - register_feature_id('CREDENTIALS_PROCESS') - if creds_dict.get('expiry_time') is not None: - return RefreshableCredentials.create_from_metadata( - creds_dict, - lambda: self._retrieve_credentials_using(credential_process), - self.METHOD, - ) - - return Credentials( - access_key=creds_dict['access_key'], - secret_key=creds_dict['secret_key'], - token=creds_dict.get('token'), - method=self.METHOD, - account_id=creds_dict.get('account_id'), - ) - - def _retrieve_credentials_using(self, credential_process): - # We're not using shell=True, so we need to pass the - # command and all arguments as a list. - process_list = compat_shell_split(credential_process) - p = self._popen( - process_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) - stdout, stderr = p.communicate() - if p.returncode != 0: - raise CredentialRetrievalError( - provider=self.METHOD, error_msg=stderr.decode('utf-8') - ) - parsed = botocore.compat.json.loads(stdout.decode('utf-8')) - version = parsed.get('Version', '') - if version != 1: - raise CredentialRetrievalError( - provider=self.METHOD, - error_msg=( - f"Unsupported version '{version}' for credential process " - f"provider, supported versions: 1" - ), - ) - try: - return { - 'access_key': parsed['AccessKeyId'], - 'secret_key': parsed['SecretAccessKey'], - 'token': parsed.get('SessionToken'), - 'expiry_time': parsed.get('Expiration'), - 'account_id': self._get_account_id(parsed), - } - except KeyError as e: - raise CredentialRetrievalError( - provider=self.METHOD, - error_msg=f"Missing required key in response: {e}", - ) - - @property - def _credential_process(self): - return self.profile_config.get('credential_process') - - @property - def profile_config(self): - if self._loaded_config is None: - self._loaded_config = self._load_config() - profile_config = self._loaded_config.get('profiles', {}).get( - self._profile_name, {} - ) - return profile_config - - def _get_account_id(self, parsed): - account_id = parsed.get('AccountId') - return account_id or self.profile_config.get('aws_account_id') - - -class InstanceMetadataProvider(CredentialProvider): - METHOD = 'iam-role' - CANONICAL_NAME = 'Ec2InstanceMetadata' - - def __init__(self, iam_role_fetcher): - self._role_fetcher = iam_role_fetcher - - def load(self): - fetcher = self._role_fetcher - # We do the first request, to see if we get useful data back. - # If not, we'll pass & move on to whatever's next in the credential - # chain. - metadata = fetcher.retrieve_iam_role_credentials() - if not metadata: - return None - register_feature_id('CREDENTIALS_IMDS') - logger.info( - 'Found credentials from IAM Role: %s', metadata['role_name'] - ) - # We manually set the data here, since we already made the request & - # have it. When the expiry is hit, the credentials will auto-refresh - # themselves. - creds = RefreshableCredentials.create_from_metadata( - metadata, - method=self.METHOD, - refresh_using=fetcher.retrieve_iam_role_credentials, - ) - return creds - - -class EnvProvider(CredentialProvider): - METHOD = 'env' - CANONICAL_NAME = 'Environment' - ACCESS_KEY = 'AWS_ACCESS_KEY_ID' - SECRET_KEY = 'AWS_SECRET_ACCESS_KEY' - # The token can come from either of these env var. - # AWS_SESSION_TOKEN is what other AWS SDKs have standardized on. - TOKENS = ['AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN'] - EXPIRY_TIME = 'AWS_CREDENTIAL_EXPIRATION' - ACCOUNT_ID = 'AWS_ACCOUNT_ID' - - def __init__(self, environ=None, mapping=None): - """ - - :param environ: The environment variables (defaults to - ``os.environ`` if no value is provided). - :param mapping: An optional mapping of variable names to - environment variable names. Use this if you want to - change the mapping of access_key->AWS_ACCESS_KEY_ID, etc. - The dict can have up to 5 keys: - * ``access_key`` - * ``secret_key`` - * ``token`` - * ``expiry_time`` - * ``account_id`` - """ - if environ is None: - environ = os.environ - self.environ = environ - self._mapping = self._build_mapping(mapping) - - def _build_mapping(self, mapping): - # Mapping of variable name to env var name. - var_mapping = {} - if mapping is None: - # Use the class var default. - var_mapping['access_key'] = self.ACCESS_KEY - var_mapping['secret_key'] = self.SECRET_KEY - var_mapping['token'] = self.TOKENS - var_mapping['expiry_time'] = self.EXPIRY_TIME - var_mapping['account_id'] = self.ACCOUNT_ID - else: - var_mapping['access_key'] = mapping.get( - 'access_key', self.ACCESS_KEY - ) - var_mapping['secret_key'] = mapping.get( - 'secret_key', self.SECRET_KEY - ) - var_mapping['token'] = mapping.get('token', self.TOKENS) - if not isinstance(var_mapping['token'], list): - var_mapping['token'] = [var_mapping['token']] - var_mapping['expiry_time'] = mapping.get( - 'expiry_time', self.EXPIRY_TIME - ) - var_mapping['account_id'] = mapping.get( - 'account_id', self.ACCOUNT_ID - ) - return var_mapping - - def load(self): - """ - Search for credentials in explicit environment variables. - """ - - access_key = self.environ.get(self._mapping['access_key'], '') - - if access_key: - logger.info('Found credentials in environment variables.') - fetcher = self._create_credentials_fetcher() - credentials = fetcher(require_expiry=False) - register_feature_id('CREDENTIALS_ENV_VARS') - - expiry_time = credentials['expiry_time'] - if expiry_time is not None: - expiry_time = parse(expiry_time) - return RefreshableCredentials( - credentials['access_key'], - credentials['secret_key'], - credentials['token'], - expiry_time, - refresh_using=fetcher, - method=self.METHOD, - account_id=credentials['account_id'], - ) - - return Credentials( - credentials['access_key'], - credentials['secret_key'], - credentials['token'], - method=self.METHOD, - account_id=credentials['account_id'], - ) - else: - return None - - def _create_credentials_fetcher(self): - mapping = self._mapping - method = self.METHOD - environ = self.environ - - def fetch_credentials(require_expiry=True): - credentials = {} - - access_key = environ.get(mapping['access_key'], '') - if not access_key: - raise PartialCredentialsError( - provider=method, cred_var=mapping['access_key'] - ) - credentials['access_key'] = access_key - - secret_key = environ.get(mapping['secret_key'], '') - if not secret_key: - raise PartialCredentialsError( - provider=method, cred_var=mapping['secret_key'] - ) - credentials['secret_key'] = secret_key - - credentials['token'] = None - for token_env_var in mapping['token']: - token = environ.get(token_env_var, '') - if token: - credentials['token'] = token - break - - credentials['expiry_time'] = None - expiry_time = environ.get(mapping['expiry_time'], '') - if expiry_time: - credentials['expiry_time'] = expiry_time - if require_expiry and not expiry_time: - raise PartialCredentialsError( - provider=method, cred_var=mapping['expiry_time'] - ) - - credentials['account_id'] = None - account_id = environ.get(mapping['account_id'], '') - if account_id: - credentials['account_id'] = account_id - - return credentials - - return fetch_credentials - - -class OriginalEC2Provider(CredentialProvider): - METHOD = 'ec2-credentials-file' - CANONICAL_NAME = 'Ec2Config' - - CRED_FILE_ENV = 'AWS_CREDENTIAL_FILE' - ACCESS_KEY = 'AWSAccessKeyId' - SECRET_KEY = 'AWSSecretKey' - - def __init__(self, environ=None, parser=None): - if environ is None: - environ = os.environ - if parser is None: - parser = parse_key_val_file - self._environ = environ - self._parser = parser - - def load(self): - """ - Search for a credential file used by original EC2 CLI tools. - """ - if 'AWS_CREDENTIAL_FILE' in self._environ: - full_path = os.path.expanduser( - self._environ['AWS_CREDENTIAL_FILE'] - ) - creds = self._parser(full_path) - if self.ACCESS_KEY in creds: - logger.info('Found credentials in AWS_CREDENTIAL_FILE.') - access_key = creds[self.ACCESS_KEY] - secret_key = creds[self.SECRET_KEY] - # EC2 creds file doesn't support session tokens. - return Credentials(access_key, secret_key, method=self.METHOD) - else: - return None - - -class SharedCredentialProvider(CredentialProvider): - METHOD = 'shared-credentials-file' - CANONICAL_NAME = 'SharedCredentials' - - ACCESS_KEY = 'aws_access_key_id' - SECRET_KEY = 'aws_secret_access_key' - # Same deal as the EnvProvider above. Botocore originally supported - # aws_security_token, but the SDKs are standardizing on aws_session_token - # so we support both. - TOKENS = ['aws_security_token', 'aws_session_token'] - ACCOUNT_ID = 'aws_account_id' - - def __init__(self, creds_filename, profile_name=None, ini_parser=None): - self._creds_filename = creds_filename - if profile_name is None: - profile_name = 'default' - self._profile_name = profile_name - if ini_parser is None: - ini_parser = botocore.configloader.raw_config_parse - self._ini_parser = ini_parser - - def load(self): - try: - available_creds = self._ini_parser(self._creds_filename) - except ConfigNotFound: - return None - if self._profile_name in available_creds: - config = available_creds[self._profile_name] - if self.ACCESS_KEY in config: - logger.info( - "Found credentials in shared credentials file: %s", - self._creds_filename, - ) - access_key, secret_key = self._extract_creds_from_mapping( - config, self.ACCESS_KEY, self.SECRET_KEY - ) - token = self._get_session_token(config) - account_id = self._get_account_id(config) - register_feature_id('CREDENTIALS_PROFILE') - return Credentials( - access_key, - secret_key, - token, - method=self.METHOD, - account_id=account_id, - ) - - def _get_session_token(self, config): - for token_envvar in self.TOKENS: - if token_envvar in config: - return config[token_envvar] - - def _get_account_id(self, config): - return config.get(self.ACCOUNT_ID) - - -class ConfigProvider(CredentialProvider): - """INI based config provider with profile sections.""" - - METHOD = 'config-file' - CANONICAL_NAME = 'SharedConfig' - - ACCESS_KEY = 'aws_access_key_id' - SECRET_KEY = 'aws_secret_access_key' - # Same deal as the EnvProvider above. Botocore originally supported - # aws_security_token, but the SDKs are standardizing on aws_session_token - # so we support both. - TOKENS = ['aws_security_token', 'aws_session_token'] - ACCOUNT_ID = 'aws_account_id' - - def __init__(self, config_filename, profile_name, config_parser=None): - """ - - :param config_filename: The session configuration scoped to the current - profile. This is available via ``session.config``. - :param profile_name: The name of the current profile. - :param config_parser: A config parser callable. - - """ - self._config_filename = config_filename - self._profile_name = profile_name - if config_parser is None: - config_parser = botocore.configloader.load_config - self._config_parser = config_parser - - def load(self): - """ - If there is are credentials in the configuration associated with - the session, use those. - """ - try: - full_config = self._config_parser(self._config_filename) - except ConfigNotFound: - return None - if self._profile_name in full_config['profiles']: - profile_config = full_config['profiles'][self._profile_name] - if self.ACCESS_KEY in profile_config: - logger.info( - "Credentials found in config file: %s", - self._config_filename, - ) - access_key, secret_key = self._extract_creds_from_mapping( - profile_config, self.ACCESS_KEY, self.SECRET_KEY - ) - token = self._get_session_token(profile_config) - account_id = self._get_account_id(profile_config) - register_feature_id('CREDENTIALS_PROFILE') - return Credentials( - access_key, - secret_key, - token, - method=self.METHOD, - account_id=account_id, - ) - else: - return None - - def _get_session_token(self, profile_config): - for token_name in self.TOKENS: - if token_name in profile_config: - return profile_config[token_name] - - def _get_account_id(self, config): - return config.get(self.ACCOUNT_ID) - - -class BotoProvider(CredentialProvider): - METHOD = 'boto-config' - CANONICAL_NAME = 'Boto2Config' - - BOTO_CONFIG_ENV = 'BOTO_CONFIG' - DEFAULT_CONFIG_FILENAMES = ['/etc/boto.cfg', '~/.boto'] - ACCESS_KEY = 'aws_access_key_id' - SECRET_KEY = 'aws_secret_access_key' - - def __init__(self, environ=None, ini_parser=None): - if environ is None: - environ = os.environ - if ini_parser is None: - ini_parser = botocore.configloader.raw_config_parse - self._environ = environ - self._ini_parser = ini_parser - - def load(self): - """ - Look for credentials in boto config file. - """ - if self.BOTO_CONFIG_ENV in self._environ: - potential_locations = [self._environ[self.BOTO_CONFIG_ENV]] - else: - potential_locations = self.DEFAULT_CONFIG_FILENAMES - for filename in potential_locations: - try: - config = self._ini_parser(filename) - except ConfigNotFound: - # Move on to the next potential config file name. - continue - if 'Credentials' in config: - credentials = config['Credentials'] - if self.ACCESS_KEY in credentials: - logger.info( - "Found credentials in boto config file: %s", filename - ) - access_key, secret_key = self._extract_creds_from_mapping( - credentials, self.ACCESS_KEY, self.SECRET_KEY - ) - register_feature_id('CREDENTIALS_BOTO2_CONFIG_FILE') - return Credentials( - access_key, secret_key, method=self.METHOD - ) - - -class AssumeRoleProvider(CredentialProvider): - METHOD = 'assume-role' - # The AssumeRole provider is logically part of the SharedConfig and - # SharedCredentials providers. Since the purpose of the canonical name - # is to provide cross-sdk compatibility, calling code will need to be - # aware that either of those providers should be tied to the AssumeRole - # provider as much as possible. - CANONICAL_NAME = None - ROLE_CONFIG_VAR = 'role_arn' - WEB_IDENTITY_TOKE_FILE_VAR = 'web_identity_token_file' - # Credentials are considered expired (and will be refreshed) once the total - # remaining time left until the credentials expires is less than the - # EXPIRY_WINDOW. - EXPIRY_WINDOW_SECONDS = 60 * 15 - NAMED_PROVIDER_FEATURE_MAP = { - 'Ec2InstanceMetadata': 'CREDENTIALS_IMDS', - 'Environment': 'CREDENTIALS_ENV_VARS', - 'EcsContainer': 'CREDENTIALS_HTTP', - } - - def __init__( - self, - load_config, - client_creator, - cache, - profile_name, - prompter=getpass.getpass, - credential_sourcer=None, - profile_provider_builder=None, - ): - """ - :type load_config: callable - :param load_config: A function that accepts no arguments, and - when called, will return the full configuration dictionary - for the session (``session.full_config``). - - :type client_creator: callable - :param client_creator: A factory function that will create - a client when called. Has the same interface as - ``botocore.session.Session.create_client``. - - :type cache: dict - :param cache: An object that supports ``__getitem__``, - ``__setitem__``, and ``__contains__``. An example - of this is the ``JSONFileCache`` class in the CLI. - - :type profile_name: str - :param profile_name: The name of the profile. - - :type prompter: callable - :param prompter: A callable that returns input provided - by the user (i.e raw_input, getpass.getpass, etc.). - - :type credential_sourcer: CanonicalNameCredentialSourcer - :param credential_sourcer: A credential provider that takes a - configuration, which is used to provide the source credentials - for the STS call. - """ - #: The cache used to first check for assumed credentials. - #: This is checked before making the AssumeRole API - #: calls and can be useful if you have short lived - #: scripts and you'd like to avoid calling AssumeRole - #: until the credentials are expired. - self.cache = cache - self._load_config = load_config - # client_creator is a callable that creates function. - # It's basically session.create_client - self._client_creator = client_creator - self._profile_name = profile_name - self._prompter = prompter - # The _loaded_config attribute will be populated from the - # load_config() function once the configuration is actually - # loaded. The reason we go through all this instead of just - # requiring that the loaded_config be passed to us is to that - # we can defer configuration loaded until we actually try - # to load credentials (as opposed to when the object is - # instantiated). - self._loaded_config = {} - self._credential_sourcer = credential_sourcer - self._profile_provider_builder = profile_provider_builder - self._visited_profiles = [self._profile_name] - self._feature_ids = set() - - def load(self): - self._loaded_config = self._load_config() - profiles = self._loaded_config.get('profiles', {}) - profile = profiles.get(self._profile_name, {}) - if self._has_assume_role_config_vars(profile): - return self._load_creds_via_assume_role(self._profile_name) - - def _has_assume_role_config_vars(self, profile): - return ( - self.ROLE_CONFIG_VAR in profile - and - # We need to ensure this provider doesn't look at a profile when - # the profile has configuration for web identity. Simply relying on - # the order in the credential chain is insufficient as it doesn't - # prevent the case when we're doing an assume role chain. - self.WEB_IDENTITY_TOKE_FILE_VAR not in profile - ) - - def _load_creds_via_assume_role(self, profile_name): - role_config = self._get_role_config(profile_name) - source_credentials = self._resolve_source_credentials( - role_config, profile_name - ) - - extra_args = {} - role_session_name = role_config.get('role_session_name') - if role_session_name is not None: - extra_args['RoleSessionName'] = role_session_name - - external_id = role_config.get('external_id') - if external_id is not None: - extra_args['ExternalId'] = external_id - - mfa_serial = role_config.get('mfa_serial') - if mfa_serial is not None: - extra_args['SerialNumber'] = mfa_serial - - duration_seconds = role_config.get('duration_seconds') - if duration_seconds is not None: - extra_args['DurationSeconds'] = duration_seconds - - fetcher = AssumeRoleCredentialFetcher( - client_creator=self._client_creator, - source_credentials=source_credentials, - role_arn=role_config['role_arn'], - extra_args=extra_args, - mfa_prompter=self._prompter, - cache=self.cache, - ) - fetcher.feature_ids = self._feature_ids.copy() - refresher = fetcher.fetch_credentials - if mfa_serial is not None: - refresher = create_mfa_serial_refresher(refresher) - - self._feature_ids.add('CREDENTIALS_STS_ASSUME_ROLE') - register_feature_ids(self._feature_ids) - # The initial credentials are empty and the expiration time is set - # to now so that we can delay the call to assume role until it is - # strictly needed. - return DeferredRefreshableCredentials( - method=self.METHOD, - refresh_using=refresher, - time_fetcher=_local_now, - ) - - def _get_role_config(self, profile_name): - """Retrieves and validates the role configuration for the profile.""" - profiles = self._loaded_config.get('profiles', {}) - - profile = profiles[profile_name] - source_profile = profile.get('source_profile') - role_arn = profile['role_arn'] - credential_source = profile.get('credential_source') - mfa_serial = profile.get('mfa_serial') - external_id = profile.get('external_id') - role_session_name = profile.get('role_session_name') - duration_seconds = profile.get('duration_seconds') - - role_config = { - 'role_arn': role_arn, - 'external_id': external_id, - 'mfa_serial': mfa_serial, - 'role_session_name': role_session_name, - 'source_profile': source_profile, - 'credential_source': credential_source, - } - - if duration_seconds is not None: - try: - role_config['duration_seconds'] = int(duration_seconds) - except ValueError: - pass - - # Either the credential source or the source profile must be - # specified, but not both. - if credential_source is not None and source_profile is not None: - raise InvalidConfigError( - error_msg=( - f'The profile "{profile_name}" contains both ' - 'source_profile and credential_source.' - ) - ) - elif credential_source is None and source_profile is None: - raise PartialCredentialsError( - provider=self.METHOD, - cred_var='source_profile or credential_source', - ) - elif credential_source is not None: - self._validate_credential_source(profile_name, credential_source) - else: - self._validate_source_profile(profile_name, source_profile) - - return role_config - - def _validate_credential_source(self, parent_profile, credential_source): - if self._credential_sourcer is None: - raise InvalidConfigError( - error_msg=( - f"The credential_source \"{credential_source}\" is specified " - f"in profile \"{parent_profile}\", " - f"but no source provider was configured." - ) - ) - if not self._credential_sourcer.is_supported(credential_source): - raise InvalidConfigError( - error_msg=( - f"The credential source \"{credential_source}\" referenced " - f"in profile \"{parent_profile}\" is not valid." - ) - ) - - def _source_profile_has_credentials(self, profile): - return any( - [ - self._has_static_credentials(profile), - self._has_assume_role_config_vars(profile), - ] - ) - - def _validate_source_profile( - self, parent_profile_name, source_profile_name - ): - profiles = self._loaded_config.get('profiles', {}) - if source_profile_name not in profiles: - raise InvalidConfigError( - error_msg=( - f"The source_profile \"{source_profile_name}\" referenced in " - f"the profile \"{parent_profile_name}\" does not exist." - ) - ) - - source_profile = profiles[source_profile_name] - - # Make sure we aren't going into an infinite loop. If we haven't - # visited the profile yet, we're good. - if source_profile_name not in self._visited_profiles: - return - - # If we have visited the profile and the profile isn't simply - # referencing itself, that's an infinite loop. - if source_profile_name != parent_profile_name: - raise InfiniteLoopConfigError( - source_profile=source_profile_name, - visited_profiles=self._visited_profiles, - ) - - # A profile is allowed to reference itself so that it can source - # static credentials and have configuration all in the same - # profile. This will only ever work for the top level assume - # role because the static credentials will otherwise take - # precedence. - if not self._has_static_credentials(source_profile): - raise InfiniteLoopConfigError( - source_profile=source_profile_name, - visited_profiles=self._visited_profiles, - ) - - def _has_static_credentials(self, profile): - static_keys = ['aws_secret_access_key', 'aws_access_key_id'] - return any(static_key in profile for static_key in static_keys) - - def _resolve_source_credentials(self, role_config, profile_name): - credential_source = role_config.get('credential_source') - if credential_source is not None: - self._feature_ids.add('CREDENTIALS_PROFILE_NAMED_PROVIDER') - return self._resolve_credentials_from_source( - credential_source, profile_name - ) - - source_profile = role_config['source_profile'] - self._visited_profiles.append(source_profile) - self._feature_ids.add('CREDENTIALS_PROFILE_SOURCE_PROFILE') - return self._resolve_credentials_from_profile(source_profile) - - def _resolve_credentials_from_profile(self, profile_name): - profiles = self._loaded_config.get('profiles', {}) - profile = profiles[profile_name] - self._feature_ids.add('CREDENTIALS_PROFILE') - if ( - self._has_static_credentials(profile) - and not self._profile_provider_builder - ): - # This is only here for backwards compatibility. If this provider - # isn't given a profile provider builder we still want to be able - # to handle the basic static credential case as we would before the - # profile provider builder parameter was added. - return self._resolve_static_credentials_from_profile(profile) - elif self._has_static_credentials( - profile - ) or not self._has_assume_role_config_vars(profile): - profile_providers = self._profile_provider_builder.providers( - profile_name=profile_name, - disable_env_vars=True, - ) - profile_chain = CredentialResolver(profile_providers) - credentials = profile_chain.load_credentials() - if credentials is None: - error_message = ( - 'The source profile "%s" must have credentials.' - ) - raise InvalidConfigError( - error_msg=error_message % profile_name, - ) - return credentials - - return self._load_creds_via_assume_role(profile_name) - - def _resolve_static_credentials_from_profile(self, profile): - try: - return Credentials( - access_key=profile['aws_access_key_id'], - secret_key=profile['aws_secret_access_key'], - token=profile.get('aws_session_token'), - ) - except KeyError as e: - raise PartialCredentialsError( - provider=self.METHOD, cred_var=str(e) - ) - - def _resolve_credentials_from_source( - self, credential_source, profile_name - ): - credentials = self._credential_sourcer.source_credentials( - credential_source - ) - if credentials is None: - raise CredentialRetrievalError( - provider=credential_source, - error_msg=( - 'No credentials found in credential_source referenced ' - f'in profile {profile_name}' - ), - ) - named_provider_feature_id = self.NAMED_PROVIDER_FEATURE_MAP.get( - credential_source - ) - if named_provider_feature_id: - self._feature_ids.add(named_provider_feature_id) - return credentials - - -class AssumeRoleWithWebIdentityProvider(CredentialProvider): - METHOD = 'assume-role-with-web-identity' - CANONICAL_NAME = None - _CONFIG_TO_ENV_VAR = { - 'web_identity_token_file': 'AWS_WEB_IDENTITY_TOKEN_FILE', - 'role_session_name': 'AWS_ROLE_SESSION_NAME', - 'role_arn': 'AWS_ROLE_ARN', - } - - def __init__( - self, - load_config, - client_creator, - profile_name, - cache=None, - disable_env_vars=False, - token_loader_cls=None, - ): - self.cache = cache - self._load_config = load_config - self._client_creator = client_creator - self._profile_name = profile_name - self._profile_config = None - self._disable_env_vars = disable_env_vars - if token_loader_cls is None: - token_loader_cls = FileWebIdentityTokenLoader - self._token_loader_cls = token_loader_cls - self._feature_ids = set() - - def load(self): - return self._assume_role_with_web_identity() - - def _get_profile_config(self, key): - if self._profile_config is None: - loaded_config = self._load_config() - profiles = loaded_config.get('profiles', {}) - self._profile_config = profiles.get(self._profile_name, {}) - return self._profile_config.get(key) - - def _get_env_config(self, key): - if self._disable_env_vars: - return None - env_key = self._CONFIG_TO_ENV_VAR.get(key) - if env_key and env_key in os.environ: - return os.environ[env_key] - return None - - def _get_config(self, key): - env_value = self._get_env_config(key) - if env_value is not None: - self._feature_ids.add('CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN') - return env_value - - config_value = self._get_profile_config(key) - if config_value is not None: - self._feature_ids.add('CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN') - return config_value - - return None - - def _assume_role_with_web_identity(self): - token_path = self._get_config('web_identity_token_file') - if not token_path: - return None - token_loader = self._token_loader_cls(token_path) - - role_arn = self._get_config('role_arn') - if not role_arn: - error_msg = ( - 'The provided profile or the current environment is ' - 'configured to assume role with web identity but has no ' - 'role ARN configured. Ensure that the profile has the role_arn' - 'configuration set or the AWS_ROLE_ARN env var is set.' - ) - raise InvalidConfigError(error_msg=error_msg) - - extra_args = {} - role_session_name = self._get_config('role_session_name') - if role_session_name is not None: - extra_args['RoleSessionName'] = role_session_name - - fetcher = AssumeRoleWithWebIdentityCredentialFetcher( - client_creator=self._client_creator, - web_identity_token_loader=token_loader, - role_arn=role_arn, - extra_args=extra_args, - cache=self.cache, - ) - fetcher.feature_ids = self._feature_ids.copy() - - self._feature_ids.add('CREDENTIALS_STS_ASSUME_ROLE_WEB_ID') - register_feature_ids(self._feature_ids) - # The initial credentials are empty and the expiration time is set - # to now so that we can delay the call to assume role until it is - # strictly needed. - return DeferredRefreshableCredentials( - method=self.METHOD, - refresh_using=fetcher.fetch_credentials, - ) - - -class CanonicalNameCredentialSourcer: - def __init__(self, providers): - self._providers = providers - - def is_supported(self, source_name): - """Validates a given source name. - - :type source_name: str - :param source_name: The value of credential_source in the config - file. This is the canonical name of the credential provider. - - :rtype: bool - :returns: True if the credential provider is supported, - False otherwise. - """ - return source_name in [p.CANONICAL_NAME for p in self._providers] - - def source_credentials(self, source_name): - """Loads source credentials based on the provided configuration. - - :type source_name: str - :param source_name: The value of credential_source in the config - file. This is the canonical name of the credential provider. - - :rtype: Credentials - """ - source = self._get_provider(source_name) - if isinstance(source, CredentialResolver): - return source.load_credentials() - return source.load() - - def _get_provider(self, canonical_name): - """Return a credential provider by its canonical name. - - :type canonical_name: str - :param canonical_name: The canonical name of the provider. - - :raises UnknownCredentialError: Raised if no - credential provider by the provided name - is found. - """ - provider = self._get_provider_by_canonical_name(canonical_name) - - # The AssumeRole provider should really be part of the SharedConfig - # provider rather than being its own thing, but it is not. It is - # effectively part of both the SharedConfig provider and the - # SharedCredentials provider now due to the way it behaves. - # Therefore if we want either of those providers we should return - # the AssumeRole provider with it. - if canonical_name.lower() in ['sharedconfig', 'sharedcredentials']: - assume_role_provider = self._get_provider_by_method('assume-role') - if assume_role_provider is not None: - # The SharedConfig or SharedCredentials provider may not be - # present if it was removed for some reason, but the - # AssumeRole provider could still be present. In that case, - # return the assume role provider by itself. - if provider is None: - return assume_role_provider - - # If both are present, return them both as a - # CredentialResolver so that calling code can treat them as - # a single entity. - return CredentialResolver([assume_role_provider, provider]) - - if provider is None: - raise UnknownCredentialError(name=canonical_name) - - return provider - - def _get_provider_by_canonical_name(self, canonical_name): - """Return a credential provider by its canonical name. - - This function is strict, it does not attempt to address - compatibility issues. - """ - for provider in self._providers: - name = provider.CANONICAL_NAME - # Canonical names are case-insensitive - if name and name.lower() == canonical_name.lower(): - return provider - - def _get_provider_by_method(self, method): - """Return a credential provider by its METHOD name.""" - for provider in self._providers: - if provider.METHOD == method: - return provider - - -class ContainerProvider(CredentialProvider): - METHOD = 'container-role' - CANONICAL_NAME = 'EcsContainer' - ENV_VAR = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI' - ENV_VAR_FULL = 'AWS_CONTAINER_CREDENTIALS_FULL_URI' - ENV_VAR_AUTH_TOKEN = 'AWS_CONTAINER_AUTHORIZATION_TOKEN' - ENV_VAR_AUTH_TOKEN_FILE = 'AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE' - - def __init__(self, environ=None, fetcher=None): - if environ is None: - environ = os.environ - if fetcher is None: - fetcher = ContainerMetadataFetcher() - self._environ = environ - self._fetcher = fetcher - - def load(self): - # This cred provider is only triggered if the self.ENV_VAR is set, - # which only happens if you opt into this feature. - if self.ENV_VAR in self._environ or self.ENV_VAR_FULL in self._environ: - return self._retrieve_or_fail() - - def _retrieve_or_fail(self): - if self._provided_relative_uri(): - full_uri = self._fetcher.full_url(self._environ[self.ENV_VAR]) - else: - full_uri = self._environ[self.ENV_VAR_FULL] - fetcher = self._create_fetcher(full_uri) - creds = fetcher() - return RefreshableCredentials( - access_key=creds['access_key'], - secret_key=creds['secret_key'], - token=creds['token'], - method=self.METHOD, - expiry_time=_parse_if_needed(creds['expiry_time']), - refresh_using=fetcher, - account_id=creds.get('account_id'), - ) - - def _build_headers(self): - auth_token = None - if self.ENV_VAR_AUTH_TOKEN_FILE in self._environ: - auth_token_file_path = self._environ[self.ENV_VAR_AUTH_TOKEN_FILE] - with open(auth_token_file_path) as token_file: - auth_token = token_file.read() - elif self.ENV_VAR_AUTH_TOKEN in self._environ: - auth_token = self._environ[self.ENV_VAR_AUTH_TOKEN] - if auth_token is not None: - self._validate_auth_token(auth_token) - return {'Authorization': auth_token} - - def _validate_auth_token(self, auth_token): - if "\r" in auth_token or "\n" in auth_token: - raise ValueError("Auth token value is not a legal header value") - - def _create_fetcher(self, full_uri, *args, **kwargs): - def fetch_creds(): - try: - headers = self._build_headers() - response = self._fetcher.retrieve_full_uri( - full_uri, headers=headers - ) - register_feature_id('CREDENTIALS_HTTP') - except MetadataRetrievalError as e: - logger.debug( - "Error retrieving container metadata: %s", e, exc_info=True - ) - raise CredentialRetrievalError( - provider=self.METHOD, error_msg=str(e) - ) - return { - 'access_key': response['AccessKeyId'], - 'secret_key': response['SecretAccessKey'], - 'token': response['Token'], - 'expiry_time': response['Expiration'], - 'account_id': response.get('AccountId'), - } - - return fetch_creds - - def _provided_relative_uri(self): - return self.ENV_VAR in self._environ - - -class CredentialResolver: - def __init__(self, providers): - """ - - :param providers: A list of ``CredentialProvider`` instances. - - """ - self.providers = providers - - def insert_before(self, name, credential_provider): - """ - Inserts a new instance of ``CredentialProvider`` into the chain that - will be tried before an existing one. - - :param name: The short name of the credentials you'd like to insert the - new credentials before. (ex. ``env`` or ``config``). Existing names - & ordering can be discovered via ``self.available_methods``. - :type name: string - - :param cred_instance: An instance of the new ``Credentials`` object - you'd like to add to the chain. - :type cred_instance: A subclass of ``Credentials`` - """ - try: - offset = [p.METHOD for p in self.providers].index(name) - except ValueError: - raise UnknownCredentialError(name=name) - self.providers.insert(offset, credential_provider) - - def insert_after(self, name, credential_provider): - """ - Inserts a new type of ``Credentials`` instance into the chain that will - be tried after an existing one. - - :param name: The short name of the credentials you'd like to insert the - new credentials after. (ex. ``env`` or ``config``). Existing names - & ordering can be discovered via ``self.available_methods``. - :type name: string - - :param cred_instance: An instance of the new ``Credentials`` object - you'd like to add to the chain. - :type cred_instance: A subclass of ``Credentials`` - """ - offset = self._get_provider_offset(name) - self.providers.insert(offset + 1, credential_provider) - - def remove(self, name): - """ - Removes a given ``Credentials`` instance from the chain. - - :param name: The short name of the credentials instance to remove. - :type name: string - """ - available_methods = [p.METHOD for p in self.providers] - if name not in available_methods: - # It's not present. Fail silently. - return - - offset = available_methods.index(name) - self.providers.pop(offset) - - def get_provider(self, name): - """Return a credential provider by name. - - :type name: str - :param name: The name of the provider. - - :raises UnknownCredentialError: Raised if no - credential provider by the provided name - is found. - """ - return self.providers[self._get_provider_offset(name)] - - def _get_provider_offset(self, name): - try: - return [p.METHOD for p in self.providers].index(name) - except ValueError: - raise UnknownCredentialError(name=name) - - def load_credentials(self): - """ - Goes through the credentials chain, returning the first ``Credentials`` - that could be loaded. - """ - # First provider to return a non-None response wins. - for provider in self.providers: - logger.debug("Looking for credentials via: %s", provider.METHOD) - creds = provider.load() - if creds is not None: - return creds - - # If we got here, no credentials could be found. - # This feels like it should be an exception, but historically, ``None`` - # is returned. - # - # +1 - # -js - return None - - -class SSOCredentialFetcher(CachedCredentialFetcher): - _UTC_DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' - - def __init__( - self, - start_url, - sso_region, - role_name, - account_id, - client_creator, - token_loader=None, - cache=None, - expiry_window_seconds=None, - token_provider=None, - sso_session_name=None, - time_fetcher=_local_now, - ): - self._client_creator = client_creator - self._sso_region = sso_region - self._role_name = role_name - self._account_id = account_id - self._start_url = start_url - self._token_loader = token_loader - self._token_provider = token_provider - self._sso_session_name = sso_session_name - self._time_fetcher = time_fetcher - super().__init__(cache, expiry_window_seconds) - - def _create_cache_key(self): - """Create a predictable cache key for the current configuration. - - The cache key is intended to be compatible with file names. - """ - args = { - 'roleName': self._role_name, - 'accountId': self._account_id, - } - if self._sso_session_name: - args['sessionName'] = self._sso_session_name - else: - args['startUrl'] = self._start_url - # NOTE: It would be good to hoist this cache key construction logic - # into the CachedCredentialFetcher class as we should be consistent. - # Unfortunately, the current assume role fetchers that sub class don't - # pass separators resulting in non-minified JSON. In the long term, - # all fetchers should use the below caching scheme. - args = json.dumps(args, sort_keys=True, separators=(',', ':')) - argument_hash = sha1(args.encode('utf-8')).hexdigest() - return self._make_file_safe(argument_hash) - - def _parse_timestamp(self, timestamp_ms): - # fromtimestamp expects seconds so: milliseconds / 1000 = seconds - timestamp_seconds = timestamp_ms / 1000.0 - timestamp = datetime.datetime.fromtimestamp(timestamp_seconds, tzutc()) - return timestamp.strftime(self._UTC_DATE_FORMAT) - - def _get_credentials(self): - """Get credentials by calling SSO get role credentials.""" - config = Config( - signature_version=UNSIGNED, - region_name=self._sso_region, - ) - client = self._client_creator('sso', config=config) - if self._token_provider: - initial_token_data = self._token_provider.load_token() - token = initial_token_data.get_frozen_token().token - else: - token_dict = self._token_loader(self._start_url) - token = token_dict['accessToken'] - - # raise an UnauthorizedSSOTokenError if the loaded legacy token - # is expired to save a call to GetRoleCredentials with an - # expired token. - expiration = dateutil.parser.parse(token_dict['expiresAt']) - remaining = total_seconds(expiration - self._time_fetcher()) - if remaining <= 0: - raise UnauthorizedSSOTokenError() - - kwargs = { - 'roleName': self._role_name, - 'accountId': self._account_id, - 'accessToken': token, - } - try: - register_feature_ids(self.feature_ids) - response = client.get_role_credentials(**kwargs) - except client.exceptions.UnauthorizedException: - raise UnauthorizedSSOTokenError() - credentials = response['roleCredentials'] - - credentials = { - 'ProviderType': 'sso', - 'Credentials': { - 'AccessKeyId': credentials['accessKeyId'], - 'SecretAccessKey': credentials['secretAccessKey'], - 'SessionToken': credentials['sessionToken'], - 'Expiration': self._parse_timestamp(credentials['expiration']), - 'AccountId': self._account_id, - }, - } - return credentials - - -class SSOProvider(CredentialProvider): - METHOD = 'sso' - - _SSO_TOKEN_CACHE_DIR = os.path.expanduser( - os.path.join('~', '.aws', 'sso', 'cache') - ) - _PROFILE_REQUIRED_CONFIG_VARS = ( - 'sso_role_name', - 'sso_account_id', - ) - _SSO_REQUIRED_CONFIG_VARS = ( - 'sso_start_url', - 'sso_region', - ) - _ALL_REQUIRED_CONFIG_VARS = ( - _PROFILE_REQUIRED_CONFIG_VARS + _SSO_REQUIRED_CONFIG_VARS - ) - - def __init__( - self, - load_config, - client_creator, - profile_name, - cache=None, - token_cache=None, - token_provider=None, - ): - if token_cache is None: - token_cache = JSONFileCache(self._SSO_TOKEN_CACHE_DIR) - self._token_cache = token_cache - self._token_provider = token_provider - if cache is None: - cache = {} - self.cache = cache - self._load_config = load_config - self._client_creator = client_creator - self._profile_name = profile_name - self._feature_ids = set() - - def _load_sso_config(self): - loaded_config = self._load_config() - profiles = loaded_config.get('profiles', {}) - profile_name = self._profile_name - profile_config = profiles.get(self._profile_name, {}) - sso_sessions = loaded_config.get('sso_sessions', {}) - - # Role name & Account ID indicate the cred provider should be used - if all( - c not in profile_config for c in self._PROFILE_REQUIRED_CONFIG_VARS - ): - return None - - resolved_config, extra_reqs = self._resolve_sso_session_reference( - profile_config, sso_sessions - ) - - config = {} - missing_config_vars = [] - all_required_configs = self._ALL_REQUIRED_CONFIG_VARS + extra_reqs - for config_var in all_required_configs: - if config_var in resolved_config: - config[config_var] = resolved_config[config_var] - else: - missing_config_vars.append(config_var) - - if missing_config_vars: - missing = ', '.join(missing_config_vars) - raise InvalidConfigError( - error_msg=( - f'The profile "{profile_name}" is configured to use SSO ' - f'but is missing required configuration: {missing}' - ) - ) - return config - - def _resolve_sso_session_reference(self, profile_config, sso_sessions): - sso_session_name = profile_config.get('sso_session') - if sso_session_name is None: - # No reference to resolve, proceed with legacy flow - return profile_config, () - - if sso_session_name not in sso_sessions: - error_msg = f'The specified sso-session does not exist: "{sso_session_name}"' - raise InvalidConfigError(error_msg=error_msg) - - config = profile_config.copy() - session = sso_sessions[sso_session_name] - for config_var, val in session.items(): - # Validate any keys referenced in both profile and sso_session match - if config.get(config_var, val) != val: - error_msg = ( - f"The value for {config_var} is inconsistent between " - f"profile ({config[config_var]}) and sso-session ({val})." - ) - raise InvalidConfigError(error_msg=error_msg) - config[config_var] = val - return config, ('sso_session',) - - def load(self): - sso_config = self._load_sso_config() - if not sso_config: - return None - - fetcher_kwargs = { - 'start_url': sso_config['sso_start_url'], - 'sso_region': sso_config['sso_region'], - 'role_name': sso_config['sso_role_name'], - 'account_id': sso_config['sso_account_id'], - 'client_creator': self._client_creator, - 'token_loader': SSOTokenLoader(cache=self._token_cache), - 'cache': self.cache, - } - sso_session_in_config = 'sso_session' in sso_config - if sso_session_in_config: - fetcher_kwargs['sso_session_name'] = sso_config['sso_session'] - fetcher_kwargs['token_provider'] = self._token_provider - self._feature_ids.add('CREDENTIALS_PROFILE_SSO') - else: - self._feature_ids.add('CREDENTIALS_PROFILE_SSO_LEGACY') - - sso_fetcher = SSOCredentialFetcher(**fetcher_kwargs) - sso_fetcher.feature_ids = self._feature_ids.copy() - - if sso_session_in_config: - self._feature_ids.add('CREDENTIALS_SSO') - else: - self._feature_ids.add('CREDENTIALS_SSO_LEGACY') - - register_feature_ids(self._feature_ids) - return DeferredRefreshableCredentials( - method=self.METHOD, - refresh_using=sso_fetcher.fetch_credentials, - ) - - -def _base64_url_encode_no_padding(data): - return base64.urlsafe_b64encode(data).rstrip(b'=').decode('ascii') - - -def _build_dpop_header(private_key, uri, uid=None, ts=None): - if EC is None: - raise MissingDependencyException( - msg=( - "This operation requires an additional dependency. You" - " will need to pip install \"botocore[crt]\" before proceeding." - ) - ) - x, y = private_key.get_public_coords() - jwk = { - "kty": "EC", - "x": _base64_url_encode_no_padding(x), - "y": _base64_url_encode_no_padding(y), - "crv": "P-256", - } - - header = { - "typ": "dpop+jwt", - "alg": "ES256", - "jwk": jwk, - } - - payload = { - "htm": "POST", - "htu": uri, - "iat": ts or int(time.time()), - "jti": uid or str(uuid.uuid4()), - } - header_b64 = _base64_url_encode_no_padding( - json.dumps(header, separators=(',', ':')).encode() - ) - payload_b64 = _base64_url_encode_no_padding( - json.dumps(payload, separators=(',', ':')).encode() - ) - signing_input = f"{header_b64}.{payload_b64}".encode() - signature = private_key.sign(sha256(signing_input).digest()) - signature_bytes = EC.decode_der_signature_to_padded_pair( - signature, pad_to=32 - ) - signature_b64 = _base64_url_encode_no_padding(signature_bytes) - - return f"{header_b64}.{payload_b64}.{signature_b64}" - - -def _build_add_dpop_header_handler(private_key): - """Builds a before-call handler for calculating and setting the DPoP header""" - - def _add_dpop_header_handler(**kwargs): - kwargs['params']['headers']['DPoP'] = _build_dpop_header( - private_key, kwargs['params']['url'] - ) - - return _add_dpop_header_handler - - -class LoginCredentialFetcher: - """ - Converts login access tokens from the cached token to - credentials, and supports refreshing them. - """ - - _REFRESH_THRESHOLD = 5 * 60 - _REQUIRED_TOKEN_FIELDS = ( - 'accessToken', - 'refreshToken', - 'dpopKey', - 'clientId', - ) - - def __init__( - self, - session_name, - token_loader, - client_creator, - time_fetcher=_local_now, - feature_ids=None, - ): - self._session_name = session_name - self._token_loader = token_loader - self._client_creator = client_creator - self._time_fetcher = time_fetcher - if feature_ids is None: - feature_ids = set() - self.feature_ids = feature_ids - - def load_cached_credentials(self): - """Loads cached credentials without checking their expiry.""" - token = self._token_loader.load_token(self._session_name) - - if token is None: - raise LoginTokenLoadError( - error_msg='Unable to load a existing login session for session ' - f'{self._session_name}. Please reauthenticate with ' - "'aws login'.", - ) - - missing_fields = [ - key for key in self._REQUIRED_TOKEN_FIELDS if key not in token - ] - if missing_fields: - raise LoginTokenLoadError( - error_msg=f'Failed to load access token from token cache, missing required fields: {", ".join(missing_fields)}.' - ) - - return self._token_to_credentials(token) - - def refresh_credentials(self): - """Refreshes login credentials, including saving them to the cache.""" - if self.feature_ids: - register_feature_ids(self.feature_ids) - # Reload the token from disk, we need the refresh info - token = self._token_loader.load_token(self._session_name) - private_key = self._load_private_key(token) - - # Check if token has already been refreshed and is still valid - if ( - token - and 'accessToken' in token - and 'expiresAt' in token['accessToken'] - ): - expiry_time = _parse_if_needed(token['accessToken']['expiresAt']) - remaining_time = total_seconds(expiry_time - self._time_fetcher()) - if remaining_time > self._REFRESH_THRESHOLD: - return self._token_to_credentials(token) - - config = botocore.config.Config( - signature_version=botocore.UNSIGNED, - ) - client = self._client_creator( - 'signin', - config=config, - ) - - client.meta.events.register( - 'before-call.signin.CreateOAuth2Token', - _build_add_dpop_header_handler(private_key), - ) - - try: - response = client.create_o_auth2_token( - tokenInput={ - 'clientId': token['clientId'], - 'refreshToken': token['refreshToken'], - 'grantType': 'refresh_token', - }, - ) - except client.exceptions.AccessDeniedException as e: - error_type = e.response.get('error', '') - if error_type in ('TOKEN_EXPIRED', 'USER_CREDENTIALS_CHANGED'): - raise LoginRefreshRequired() from e - elif error_type == 'INSUFFICIENT_PERMISSIONS': - raise LoginInsufficientPermissions() from e - raise LoginError() from e - - if response is None or 'tokenOutput' not in response: - raise LoginTokenLoadError( - error_msg=( - "Unable to refresh access token due to an invalid service response. " - "Please try running 'aws login' again. If the issue persists, there " - "may be a temporary signin service problem." - ) - ) - - output = response.get('tokenOutput') - - expires_timestamp = self._time_fetcher().astimezone( - tzutc() - ) + datetime.timedelta(seconds=output['expiresIn']) - - # Overwrite token with refreshed fields - token.update( - { - 'accessToken': { - 'accessKeyId': output['accessToken']['accessKeyId'], - 'secretAccessKey': output['accessToken'][ - 'secretAccessKey' - ], - 'sessionToken': output['accessToken']['sessionToken'], - 'accountId': token['accessToken']['accountId'], - 'expiresAt': expires_timestamp.strftime( - '%Y-%m-%dT%H:%M:%SZ' - ), - }, - 'refreshToken': output['refreshToken'], - } - ) - self._token_loader.save_token(self._session_name, token) - - return self._token_to_credentials(token) - - @staticmethod - def _token_to_credentials(token): - return { - 'access_key': token['accessToken']['accessKeyId'], - 'secret_key': token['accessToken']['secretAccessKey'], - 'token': token['accessToken']['sessionToken'], - 'expiry_time': token['accessToken']['expiresAt'], - 'account_id': token['accessToken']['accountId'], - } - - @staticmethod - def _load_private_key(token): - if 'dpopKey' not in token: - raise LoginTokenLoadError( - error_msg='Private key not found in cached token.' - ) - - # Remove the PEM header and footer lines - lines = token['dpopKey'].splitlines() - content_lines = [ - line - for line in lines - if not line.startswith('-----BEGIN') - and not line.startswith('-----END') - ] - - # strip should handle the optional newline at the end as well - contents = ''.join(content_lines).strip() - - try: - return EC.new_key_from_der_data(base64.b64decode(contents)) - except ValueError as e: - raise LoginTokenLoadError( - error_msg='Unable to load private key from cached token.' - ) from e - - -class LoginProvider(CredentialProvider): - METHOD = 'login' - - def __init__( - self, - load_config, - client_creator, - profile_name, - token_cache=None, - ): - super().__init__() - if token_cache is None: - token_cache = JSONFileCache(get_login_token_cache_directory()) - self._token_cache = token_cache - - self._load_config = load_config - self._client_creator = client_creator - self._profile_name = profile_name - self._feature_ids = {'CREDENTIALS_PROFILE_LOGIN', 'CREDENTIALS_LOGIN'} - - def load(self): - loaded_config = self._load_config() - profiles = loaded_config.get('profiles', {}) - profile_config = profiles.get(self._profile_name, {}) - - if 'login_session' not in profile_config: - return None - - if EC is None: - raise MissingDependencyException( - msg=( - "Using the login credential provider requires an " - "additional dependency. You will need to pip install " - "\"botocore[crt]\" before proceeding." - ) - ) - - fetcher = LoginCredentialFetcher( - session_name=profile_config['login_session'], - token_loader=LoginTokenLoader(self._token_cache), - client_creator=self._client_creator, - time_fetcher=_local_now, - feature_ids=self._feature_ids, - ) - - register_feature_ids(self._feature_ids) - - # Return the current cached credentials initially, - # regardless if they're expired - cached_credentials = fetcher.load_cached_credentials() - - return RefreshableCredentials( - access_key=cached_credentials['access_key'], - secret_key=cached_credentials['secret_key'], - token=cached_credentials['token'], - expiry_time=_parse_if_needed(cached_credentials['expiry_time']), - account_id=cached_credentials['account_id'], - method=self.METHOD, - refresh_using=fetcher.refresh_credentials, - time_fetcher=_local_now, - ) diff --git a/venv/Lib/site-packages/botocore/crt/__init__.py b/venv/Lib/site-packages/botocore/crt/__init__.py deleted file mode 100644 index 952ebf3..0000000 --- a/venv/Lib/site-packages/botocore/crt/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -# A list of auth types supported by the signers in botocore/crt/auth.py. This -# should always match the keys of botocore.crt.auth.CRT_AUTH_TYPE_MAPS. The -# information is duplicated here so that it can be accessed in environments -# where `awscrt` is not present and any import from botocore.crt.auth would -# fail. -CRT_SUPPORTED_AUTH_TYPES = ( - 'v4', - 'v4-query', - 'v4a', - 's3v4', - 's3v4-query', - 's3v4a', - 's3v4a-query', -) diff --git a/venv/Lib/site-packages/botocore/crt/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/crt/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 51711ac..0000000 Binary files a/venv/Lib/site-packages/botocore/crt/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/crt/__pycache__/auth.cpython-312.pyc b/venv/Lib/site-packages/botocore/crt/__pycache__/auth.cpython-312.pyc deleted file mode 100644 index 9ef4039..0000000 Binary files a/venv/Lib/site-packages/botocore/crt/__pycache__/auth.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/crt/auth.py b/venv/Lib/site-packages/botocore/crt/auth.py deleted file mode 100644 index e36730e..0000000 --- a/venv/Lib/site-packages/botocore/crt/auth.py +++ /dev/null @@ -1,629 +0,0 @@ -# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -from io import BytesIO - -from botocore.auth import ( - SIGNED_HEADERS_BLACKLIST, - STREAMING_UNSIGNED_PAYLOAD_TRAILER, - UNSIGNED_PAYLOAD, - BaseSigner, - _get_body_as_dict, - _host_from_url, -) -from botocore.compat import ( - HTTPHeaders, - awscrt, - get_current_datetime, - parse_qs, - urlsplit, - urlunsplit, -) -from botocore.exceptions import NoCredentialsError -from botocore.useragent import register_feature_id -from botocore.utils import percent_encode_sequence - - -class CrtSigV4Auth(BaseSigner): - REQUIRES_REGION = True - _PRESIGNED_HEADERS_BLOCKLIST = [ - 'Authorization', - 'X-Amz-Date', - 'X-Amz-Content-SHA256', - 'X-Amz-Security-Token', - ] - _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_HEADERS - _USE_DOUBLE_URI_ENCODE = True - _SHOULD_NORMALIZE_URI_PATH = True - - def __init__(self, credentials, service_name, region_name): - self.credentials = credentials - self._service_name = service_name - self._region_name = region_name - self._expiration_in_seconds = None - - def _is_streaming_checksum_payload(self, request): - checksum_context = request.context.get('checksum', {}) - algorithm = checksum_context.get('request_algorithm') - return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer' - - def add_auth(self, request): - if self.credentials is None: - raise NoCredentialsError() - - datetime_now = get_current_datetime(remove_tzinfo=False) - - # Use existing 'X-Amz-Content-SHA256' header if able - existing_sha256 = self._get_existing_sha256(request) - - self._modify_request_before_signing(request) - - credentials_provider = awscrt.auth.AwsCredentialsProvider.new_static( - access_key_id=self.credentials.access_key, - secret_access_key=self.credentials.secret_key, - session_token=self.credentials.token, - ) - - if self._is_streaming_checksum_payload(request): - explicit_payload = STREAMING_UNSIGNED_PAYLOAD_TRAILER - elif self._should_sha256_sign_payload(request): - if existing_sha256: - explicit_payload = existing_sha256 - else: - explicit_payload = None # to be calculated during signing - else: - explicit_payload = UNSIGNED_PAYLOAD - - if self._should_add_content_sha256_header(explicit_payload): - body_header = ( - awscrt.auth.AwsSignedBodyHeaderType.X_AMZ_CONTENT_SHA_256 - ) - else: - body_header = awscrt.auth.AwsSignedBodyHeaderType.NONE - - signing_config = awscrt.auth.AwsSigningConfig( - algorithm=awscrt.auth.AwsSigningAlgorithm.V4, - signature_type=self._SIGNATURE_TYPE, - credentials_provider=credentials_provider, - region=self._region_name, - service=self._service_name, - date=datetime_now, - should_sign_header=self._should_sign_header, - use_double_uri_encode=self._USE_DOUBLE_URI_ENCODE, - should_normalize_uri_path=self._SHOULD_NORMALIZE_URI_PATH, - signed_body_value=explicit_payload, - signed_body_header_type=body_header, - expiration_in_seconds=self._expiration_in_seconds, - ) - crt_request = self._crt_request_from_aws_request(request) - future = awscrt.auth.aws_sign_request(crt_request, signing_config) - future.result() - self._apply_signing_changes(request, crt_request) - - def _crt_request_from_aws_request(self, aws_request): - url_parts = urlsplit(aws_request.url) - crt_path = url_parts.path if url_parts.path else '/' - if aws_request.params: - array = [] - for param, value in aws_request.params.items(): - value = str(value) - array.append(f'{param}={value}') - crt_path = crt_path + '?' + '&'.join(array) - elif url_parts.query: - crt_path = f'{crt_path}?{url_parts.query}' - - crt_headers = awscrt.http.HttpHeaders(aws_request.headers.items()) - - # CRT requires body (if it exists) to be an I/O stream. - crt_body_stream = None - if aws_request.body: - if hasattr(aws_request.body, 'seek'): - crt_body_stream = aws_request.body - else: - crt_body_stream = BytesIO(aws_request.body) - - crt_request = awscrt.http.HttpRequest( - method=aws_request.method, - path=crt_path, - headers=crt_headers, - body_stream=crt_body_stream, - ) - return crt_request - - def _apply_signing_changes(self, aws_request, signed_crt_request): - # Apply changes from signed CRT request to the AWSRequest - aws_request.headers = HTTPHeaders.from_pairs( - list(signed_crt_request.headers) - ) - - def _should_sign_header(self, name, **kwargs): - return name.lower() not in SIGNED_HEADERS_BLACKLIST - - def _modify_request_before_signing(self, request): - # This could be a retry. Make sure the previous - # authorization headers are removed first. - for h in self._PRESIGNED_HEADERS_BLOCKLIST: - if h in request.headers: - del request.headers[h] - # If necessary, add the host header - if 'host' not in request.headers: - request.headers['host'] = _host_from_url(request.url) - - def _get_existing_sha256(self, request): - return request.headers.get('X-Amz-Content-SHA256') - - def _should_sha256_sign_payload(self, request): - # Payloads will always be signed over insecure connections. - if not request.url.startswith('https'): - return True - - # Certain operations may have payload signing disabled by default. - # Since we don't have access to the operation model, we pass in this - # bit of metadata through the request context. - return request.context.get('payload_signing_enabled', True) - - def _should_add_content_sha256_header(self, explicit_payload): - # only add X-Amz-Content-SHA256 header if payload is explicitly set - return explicit_payload is not None - - -class CrtS3SigV4Auth(CrtSigV4Auth): - # For S3, we do not normalize the path. - _USE_DOUBLE_URI_ENCODE = False - _SHOULD_NORMALIZE_URI_PATH = False - - def _get_existing_sha256(self, request): - # always recalculate - return None - - def _should_sha256_sign_payload(self, request): - # S3 allows optional body signing, so to minimize the performance - # impact, we opt to not SHA256 sign the body on streaming uploads, - # provided that we're on https. - client_config = request.context.get('client_config') - s3_config = getattr(client_config, 's3', None) - - # The config could be None if it isn't set, or if the customer sets it - # to None. - if s3_config is None: - s3_config = {} - - # The explicit configuration takes precedence over any implicit - # configuration. - sign_payload = s3_config.get('payload_signing_enabled', None) - if sign_payload is not None: - return sign_payload - - # We require that both a checksum be present and https be enabled - # to implicitly disable body signing. The combination of TLS and - # a checksum is sufficiently secure and durable for us to be - # confident in the request without body signing. - checksum_header = 'Content-MD5' - checksum_context = request.context.get('checksum', {}) - algorithm = checksum_context.get('request_algorithm') - if isinstance(algorithm, dict) and algorithm.get('in') == 'header': - checksum_header = algorithm['name'] - if ( - not request.url.startswith('https') - or checksum_header not in request.headers - ): - return True - - # If the input is streaming we disable body signing by default. - if request.context.get('has_streaming_input', False): - return False - - # If the S3-specific checks had no results, delegate to the generic - # checks. - return super()._should_sha256_sign_payload(request) - - def _should_add_content_sha256_header(self, explicit_payload): - # Always add X-Amz-Content-SHA256 header - return True - - -class CrtSigV4AsymAuth(BaseSigner): - REQUIRES_REGION = True - _PRESIGNED_HEADERS_BLOCKLIST = [ - 'Authorization', - 'X-Amz-Date', - 'X-Amz-Content-SHA256', - 'X-Amz-Security-Token', - ] - _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_HEADERS - _USE_DOUBLE_URI_ENCODE = True - _SHOULD_NORMALIZE_URI_PATH = True - - def __init__(self, credentials, service_name, region_name): - self.credentials = credentials - self._service_name = service_name - self._region_name = region_name - self._expiration_in_seconds = None - - def add_auth(self, request): - register_feature_id("SIGV4A_SIGNING") - if self.credentials is None: - raise NoCredentialsError() - - datetime_now = get_current_datetime(remove_tzinfo=False) - - # Use existing 'X-Amz-Content-SHA256' header if able - existing_sha256 = self._get_existing_sha256(request) - - self._modify_request_before_signing(request) - - credentials_provider = awscrt.auth.AwsCredentialsProvider.new_static( - access_key_id=self.credentials.access_key, - secret_access_key=self.credentials.secret_key, - session_token=self.credentials.token, - ) - - if self._is_streaming_checksum_payload(request): - explicit_payload = STREAMING_UNSIGNED_PAYLOAD_TRAILER - elif self._should_sha256_sign_payload(request): - if existing_sha256: - explicit_payload = existing_sha256 - else: - explicit_payload = None # to be calculated during signing - else: - explicit_payload = UNSIGNED_PAYLOAD - - if self._should_add_content_sha256_header(explicit_payload): - body_header = ( - awscrt.auth.AwsSignedBodyHeaderType.X_AMZ_CONTENT_SHA_256 - ) - else: - body_header = awscrt.auth.AwsSignedBodyHeaderType.NONE - - signing_config = awscrt.auth.AwsSigningConfig( - algorithm=awscrt.auth.AwsSigningAlgorithm.V4_ASYMMETRIC, - signature_type=self._SIGNATURE_TYPE, - credentials_provider=credentials_provider, - region=self._region_name, - service=self._service_name, - date=datetime_now, - should_sign_header=self._should_sign_header, - use_double_uri_encode=self._USE_DOUBLE_URI_ENCODE, - should_normalize_uri_path=self._SHOULD_NORMALIZE_URI_PATH, - signed_body_value=explicit_payload, - signed_body_header_type=body_header, - expiration_in_seconds=self._expiration_in_seconds, - ) - crt_request = self._crt_request_from_aws_request(request) - future = awscrt.auth.aws_sign_request(crt_request, signing_config) - future.result() - self._apply_signing_changes(request, crt_request) - - def _crt_request_from_aws_request(self, aws_request): - url_parts = urlsplit(aws_request.url) - crt_path = url_parts.path if url_parts.path else '/' - if aws_request.params: - array = [] - for param, value in aws_request.params.items(): - value = str(value) - array.append(f'{param}={value}') - crt_path = crt_path + '?' + '&'.join(array) - elif url_parts.query: - crt_path = f'{crt_path}?{url_parts.query}' - - crt_headers = awscrt.http.HttpHeaders(aws_request.headers.items()) - - # CRT requires body (if it exists) to be an I/O stream. - crt_body_stream = None - if aws_request.body: - if hasattr(aws_request.body, 'seek'): - crt_body_stream = aws_request.body - else: - crt_body_stream = BytesIO(aws_request.body) - - crt_request = awscrt.http.HttpRequest( - method=aws_request.method, - path=crt_path, - headers=crt_headers, - body_stream=crt_body_stream, - ) - return crt_request - - def _apply_signing_changes(self, aws_request, signed_crt_request): - # Apply changes from signed CRT request to the AWSRequest - aws_request.headers = HTTPHeaders.from_pairs( - list(signed_crt_request.headers) - ) - - def _should_sign_header(self, name, **kwargs): - return name.lower() not in SIGNED_HEADERS_BLACKLIST - - def _modify_request_before_signing(self, request): - # This could be a retry. Make sure the previous - # authorization headers are removed first. - for h in self._PRESIGNED_HEADERS_BLOCKLIST: - if h in request.headers: - del request.headers[h] - # If necessary, add the host header - if 'host' not in request.headers: - request.headers['host'] = _host_from_url(request.url) - - def _get_existing_sha256(self, request): - return request.headers.get('X-Amz-Content-SHA256') - - def _is_streaming_checksum_payload(self, request): - checksum_context = request.context.get('checksum', {}) - algorithm = checksum_context.get('request_algorithm') - return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer' - - def _should_sha256_sign_payload(self, request): - # Payloads will always be signed over insecure connections. - if not request.url.startswith('https'): - return True - - # Certain operations may have payload signing disabled by default. - # Since we don't have access to the operation model, we pass in this - # bit of metadata through the request context. - return request.context.get('payload_signing_enabled', True) - - def _should_add_content_sha256_header(self, explicit_payload): - # only add X-Amz-Content-SHA256 header if payload is explicitly set - return explicit_payload is not None - - -class CrtS3SigV4AsymAuth(CrtSigV4AsymAuth): - # For S3, we do not normalize the path. - _USE_DOUBLE_URI_ENCODE = False - _SHOULD_NORMALIZE_URI_PATH = False - - def _get_existing_sha256(self, request): - # always recalculate - return None - - def _should_sha256_sign_payload(self, request): - # S3 allows optional body signing, so to minimize the performance - # impact, we opt to not SHA256 sign the body on streaming uploads, - # provided that we're on https. - client_config = request.context.get('client_config') - s3_config = getattr(client_config, 's3', None) - - # The config could be None if it isn't set, or if the customer sets it - # to None. - if s3_config is None: - s3_config = {} - - # The explicit configuration takes precedence over any implicit - # configuration. - sign_payload = s3_config.get('payload_signing_enabled', None) - if sign_payload is not None: - return sign_payload - - # We require that both content-md5 be present and https be enabled - # to implicitly disable body signing. The combination of TLS and - # content-md5 is sufficiently secure and durable for us to be - # confident in the request without body signing. - if ( - not request.url.startswith('https') - or 'Content-MD5' not in request.headers - ): - return True - - # If the input is streaming we disable body signing by default. - if request.context.get('has_streaming_input', False): - return False - - # If the S3-specific checks had no results, delegate to the generic - # checks. - return super()._should_sha256_sign_payload(request) - - def _should_add_content_sha256_header(self, explicit_payload): - # Always add X-Amz-Content-SHA256 header - return True - - -class CrtSigV4AsymQueryAuth(CrtSigV4AsymAuth): - DEFAULT_EXPIRES = 3600 - _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS - - def __init__( - self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES - ): - super().__init__(credentials, service_name, region_name) - self._expiration_in_seconds = expires - - def _modify_request_before_signing(self, request): - super()._modify_request_before_signing(request) - - # We automatically set this header, so if it's the auto-set value we - # want to get rid of it since it doesn't make sense for presigned urls. - content_type = request.headers.get('content-type') - if content_type == 'application/x-www-form-urlencoded; charset=utf-8': - del request.headers['content-type'] - - # Now parse the original query string to a dict, inject our new query - # params, and serialize back to a query string. - url_parts = urlsplit(request.url) - # parse_qs makes each value a list, but in our case we know we won't - # have repeated keys so we know we have single element lists which we - # can convert back to scalar values. - query_string_parts = parse_qs(url_parts.query, keep_blank_values=True) - query_dict = {k: v[0] for k, v in query_string_parts.items()} - - # The spec is particular about this. It *has* to be: - # https://?& - # You can't mix the two types of params together, i.e just keep doing - # new_query_params.update(op_params) - # new_query_params.update(auth_params) - # percent_encode_sequence(new_query_params) - if request.data: - # We also need to move the body params into the query string. To - # do this, we first have to convert it to a dict. - query_dict.update(_get_body_as_dict(request)) - request.data = '' - new_query_string = percent_encode_sequence(query_dict) - # url_parts is a tuple (and therefore immutable) so we need to create - # a new url_parts with the new query string. - # - - # scheme - 0 - # netloc - 1 - # path - 2 - # query - 3 <-- we're replacing this. - # fragment - 4 - p = url_parts - new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) - request.url = urlunsplit(new_url_parts) - - def _apply_signing_changes(self, aws_request, signed_crt_request): - # Apply changes from signed CRT request to the AWSRequest - super()._apply_signing_changes(aws_request, signed_crt_request) - - signed_query = urlsplit(signed_crt_request.path).query - p = urlsplit(aws_request.url) - # urlsplit() returns a tuple (and therefore immutable) so we - # need to create new url with the new query string. - # - - # scheme - 0 - # netloc - 1 - # path - 2 - # query - 3 <-- we're replacing this. - # fragment - 4 - aws_request.url = urlunsplit((p[0], p[1], p[2], signed_query, p[4])) - - -class CrtS3SigV4AsymQueryAuth(CrtSigV4AsymQueryAuth): - """S3 SigV4A auth using query parameters. - This signer will sign a request using query parameters and signature - version 4A, i.e a "presigned url" signer. - """ - - # For S3, we do not normalize the path. - _USE_DOUBLE_URI_ENCODE = False - _SHOULD_NORMALIZE_URI_PATH = False - - def _should_sha256_sign_payload(self, request): - # From the doc link above: - # "You don't include a payload hash in the Canonical Request, because - # when you create a presigned URL, you don't know anything about the - # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". - return False - - def _should_add_content_sha256_header(self, explicit_payload): - # Never add X-Amz-Content-SHA256 header - return False - - -class CrtSigV4QueryAuth(CrtSigV4Auth): - DEFAULT_EXPIRES = 3600 - _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS - - def __init__( - self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES - ): - super().__init__(credentials, service_name, region_name) - self._expiration_in_seconds = expires - - def _modify_request_before_signing(self, request): - super()._modify_request_before_signing(request) - - # We automatically set this header, so if it's the auto-set value we - # want to get rid of it since it doesn't make sense for presigned urls. - content_type = request.headers.get('content-type') - if content_type == 'application/x-www-form-urlencoded; charset=utf-8': - del request.headers['content-type'] - - # Now parse the original query string to a dict, inject our new query - # params, and serialize back to a query string. - url_parts = urlsplit(request.url) - # parse_qs makes each value a list, but in our case we know we won't - # have repeated keys so we know we have single element lists which we - # can convert back to scalar values. - query_dict = { - k: v[0] - for k, v in parse_qs( - url_parts.query, keep_blank_values=True - ).items() - } - if request.params: - query_dict.update(request.params) - request.params = {} - # The spec is particular about this. It *has* to be: - # https://?& - # You can't mix the two types of params together, i.e just keep doing - # new_query_params.update(op_params) - # new_query_params.update(auth_params) - # percent_encode_sequence(new_query_params) - if request.data: - # We also need to move the body params into the query string. To - # do this, we first have to convert it to a dict. - query_dict.update(_get_body_as_dict(request)) - request.data = '' - new_query_string = percent_encode_sequence(query_dict) - # url_parts is a tuple (and therefore immutable) so we need to create - # a new url_parts with the new query string. - # - - # scheme - 0 - # netloc - 1 - # path - 2 - # query - 3 <-- we're replacing this. - # fragment - 4 - p = url_parts - new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) - request.url = urlunsplit(new_url_parts) - - def _apply_signing_changes(self, aws_request, signed_crt_request): - # Apply changes from signed CRT request to the AWSRequest - super()._apply_signing_changes(aws_request, signed_crt_request) - - signed_query = urlsplit(signed_crt_request.path).query - p = urlsplit(aws_request.url) - # urlsplit() returns a tuple (and therefore immutable) so we - # need to create new url with the new query string. - # - - # scheme - 0 - # netloc - 1 - # path - 2 - # query - 3 <-- we're replacing this. - # fragment - 4 - aws_request.url = urlunsplit((p[0], p[1], p[2], signed_query, p[4])) - - -class CrtS3SigV4QueryAuth(CrtSigV4QueryAuth): - """S3 SigV4 auth using query parameters. - This signer will sign a request using query parameters and signature - version 4, i.e a "presigned url" signer. - Based off of: - http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html - """ - - # For S3, we do not normalize the path. - _USE_DOUBLE_URI_ENCODE = False - _SHOULD_NORMALIZE_URI_PATH = False - - def _should_sha256_sign_payload(self, request): - # From the doc link above: - # "You don't include a payload hash in the Canonical Request, because - # when you create a presigned URL, you don't know anything about the - # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". - return False - - def _should_add_content_sha256_header(self, explicit_payload): - # Never add X-Amz-Content-SHA256 header - return False - - -# Defined at the bottom of module to ensure all Auth -# classes are defined. -CRT_AUTH_TYPE_MAPS = { - 'v4': CrtSigV4Auth, - 'v4-query': CrtSigV4QueryAuth, - 'v4a': CrtSigV4AsymAuth, - 's3v4': CrtS3SigV4Auth, - 's3v4-query': CrtS3SigV4QueryAuth, - 's3v4a': CrtS3SigV4AsymAuth, - 's3v4a-query': CrtS3SigV4AsymQueryAuth, -} diff --git a/venv/Lib/site-packages/botocore/data/_retry.json b/venv/Lib/site-packages/botocore/data/_retry.json deleted file mode 100644 index 31e486e..0000000 --- a/venv/Lib/site-packages/botocore/data/_retry.json +++ /dev/null @@ -1,300 +0,0 @@ -{ - "definitions": { - "throttling": { - "applies_when": { - "response": { - "service_error_code": "Throttling", - "http_status_code": 400 - } - } - }, - "throttling_exception": { - "applies_when": { - "response": { - "service_error_code": "ThrottlingException", - "http_status_code": 400 - } - } - }, - "throttled_exception": { - "applies_when": { - "response": { - "service_error_code": "ThrottledException", - "http_status_code": 400 - } - } - }, - "request_throttled_exception": { - "applies_when": { - "response": { - "service_error_code": "RequestThrottledException", - "http_status_code": 400 - } - } - }, - "too_many_requests": { - "applies_when": { - "response": { - "http_status_code": 429 - } - } - }, - "general_socket_errors": { - "applies_when": { - "socket_errors": ["GENERAL_CONNECTION_ERROR"] - } - }, - "general_server_error": { - "applies_when": { - "response": { - "http_status_code": 500 - } - } - }, - "bad_gateway": { - "applies_when": { - "response": { - "http_status_code": 502 - } - } - }, - "service_unavailable": { - "applies_when": { - "response": { - "http_status_code": 503 - } - } - }, - "gateway_timeout": { - "applies_when": { - "response": { - "http_status_code": 504 - } - } - }, - "limit_exceeded": { - "applies_when": { - "response": { - "http_status_code": 509 - } - } - }, - "throughput_exceeded": { - "applies_when": { - "response": { - "service_error_code": "ProvisionedThroughputExceededException", - "http_status_code": 400 - } - } - } - }, - "retry": { - "__default__": { - "max_attempts": 5, - "delay": { - "type": "exponential", - "base": "rand", - "growth_factor": 2 - }, - "policies": { - "general_socket_errors": {"$ref": "general_socket_errors"}, - "general_server_error": {"$ref": "general_server_error"}, - "bad_gateway": {"$ref": "bad_gateway"}, - "service_unavailable": {"$ref": "service_unavailable"}, - "gateway_timeout": {"$ref": "gateway_timeout"}, - "limit_exceeded": {"$ref": "limit_exceeded"}, - "throttling_exception": {"$ref": "throttling_exception"}, - "throttled_exception": {"$ref": "throttled_exception"}, - "request_throttled_exception": {"$ref": "request_throttled_exception"}, - "throttling": {"$ref": "throttling"}, - "too_many_requests": {"$ref": "too_many_requests"}, - "throughput_exceeded": {"$ref": "throughput_exceeded"} - } - }, - "organizations": { - "__default__": { - "policies": { - "too_many_requests": { - "applies_when": { - "response": { - "service_error_code": "TooManyRequestsException", - "http_status_code": 400 - } - } - } - } - } - }, - "dynamodb": { - "__default__": { - "max_attempts": 10, - "delay": { - "type": "exponential", - "base": 0.05, - "growth_factor": 2 - }, - "policies": { - "write_conflict": { - "applies_when": { - "response": { - "service_error_code": "ReplicatedWriteConflictException", - "http_status_code": 409 - } - } - }, - "still_processing": { - "applies_when": { - "response": { - "service_error_code": "TransactionInProgressException", - "http_status_code": 400 - } - } - }, - "crc32": { - "applies_when": { - "response": { - "crc32body": "x-amz-crc32" - } - } - } - } - } - }, - "ec2": { - "__default__": { - "policies": { - "request_limit_exceeded": { - "applies_when": { - "response": { - "service_error_code": "RequestLimitExceeded", - "http_status_code": 503 - } - } - }, - "ec2_throttled_exception": { - "applies_when": { - "response": { - "service_error_code": "EC2ThrottledException", - "http_status_code": 503 - } - } - } - } - } - }, - "cloudsearch": { - "__default__": { - "policies": { - "request_limit_exceeded": { - "applies_when": { - "response": { - "service_error_code": "BandwidthLimitExceeded", - "http_status_code": 509 - } - } - } - } - } - }, - "kinesis": { - "__default__": { - "policies": { - "request_limit_exceeded": { - "applies_when": { - "response": { - "service_error_code": "LimitExceededException", - "http_status_code": 400 - } - } - } - } - } - }, - "sqs": { - "__default__": { - "policies": { - "request_limit_exceeded": { - "applies_when": { - "response": { - "service_error_code": "RequestThrottled", - "http_status_code": 403 - } - } - } - } - } - }, - "s3": { - "__default__": { - "policies": { - "timeouts": { - "applies_when": { - "response": { - "http_status_code": 400, - "service_error_code": "RequestTimeout" - } - } - }, - "contentmd5": { - "applies_when": { - "response": { - "http_status_code": 400, - "service_error_code": "BadDigest" - } - } - } - } - } - }, - "glacier": { - "__default__": { - "policies": { - "timeouts": { - "applies_when": { - "response": { - "http_status_code": 408, - "service_error_code": "RequestTimeoutException" - } - } - } - } - } - }, - "route53": { - "__default__": { - "policies": { - "request_limit_exceeded": { - "applies_when": { - "response": { - "service_error_code": "Throttling", - "http_status_code": 400 - } - } - }, - "still_processing": { - "applies_when": { - "response": { - "service_error_code": "PriorRequestNotComplete", - "http_status_code": 400 - } - } - } - } - } - }, - "sts": { - "__default__": { - "policies": { - "idp_unreachable_error": { - "applies_when": { - "response": { - "service_error_code": "IDPCommunicationError", - "http_status_code": 400 - } - } - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 58885c6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/paginators-1.json deleted file mode 100644 index de88b6d..0000000 --- a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "ListAnalyzedResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "analyzedResources" - }, - "ListAnalyzers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "analyzers" - }, - "ListArchiveRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "archiveRules" - }, - "ListFindings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - }, - "ListAccessPreviewFindings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - }, - "ListAccessPreviews": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accessPreviews" - }, - "ValidatePolicy": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - }, - "ListPolicyGenerations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policyGenerations" - }, - "GetFindingV2": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findingDetails" - }, - "ListFindingsV2": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - }, - "GetFindingRecommendation": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendedSteps" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/paginators-1.sdk-extras.json deleted file mode 100644 index 2fe19c0..0000000 --- a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/paginators-1.sdk-extras.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetFindingV2": { - "non_aggregate_keys": [ - "resource", - "status", - "error", - "createdAt", - "resourceType", - "findingType", - "resourceOwnerAccount", - "analyzedAt", - "id", - "updatedAt" - ] - }, - "GetFindingRecommendation": { - "non_aggregate_keys": [ - "status", - "error", - "completedAt", - "recommendationType", - "resourceArn", - "startedAt" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/service-2.json.gz deleted file mode 100644 index 744b4ec..0000000 Binary files a/venv/Lib/site-packages/botocore/data/accessanalyzer/2019-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/account/2021-02-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/account/2021-02-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1312e8f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/account/2021-02-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/account/2021-02-01/examples-1.json b/venv/Lib/site-packages/botocore/data/account/2021-02-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/account/2021-02-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/account/2021-02-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/account/2021-02-01/paginators-1.json deleted file mode 100644 index 5e75ec8..0000000 --- a/venv/Lib/site-packages/botocore/data/account/2021-02-01/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListRegions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Regions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/account/2021-02-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/account/2021-02-01/service-2.json.gz deleted file mode 100644 index 08b3654..0000000 Binary files a/venv/Lib/site-packages/botocore/data/account/2021-02-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index 64468fa..0000000 Binary files a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/examples-1.json b/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/paginators-1.json deleted file mode 100644 index c1f4e23..0000000 --- a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListCertificateAuthorities": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CertificateAuthorities" - }, - "ListTags": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tags" - }, - "ListPermissions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Permissions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/service-2.json.gz deleted file mode 100644 index b0c0499..0000000 Binary files a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/waiters-2.json b/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/waiters-2.json deleted file mode 100644 index 1d48140..0000000 --- a/venv/Lib/site-packages/botocore/data/acm-pca/2017-08-22/waiters-2.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "AuditReportCreated" : { - "description" : "Wait until a Audit Report is created", - "delay" : 3, - "maxAttempts" : 60, - "operation" : "DescribeCertificateAuthorityAuditReport", - "acceptors" : [ { - "matcher" : "path", - "argument" : "AuditReportStatus", - "state" : "success", - "expected" : "SUCCESS" - }, { - "matcher" : "path", - "argument" : "AuditReportStatus", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "AccessDeniedException" - } ] - }, - "CertificateAuthorityCSRCreated" : { - "description" : "Wait until a Certificate Authority CSR is created", - "delay" : 3, - "maxAttempts" : 60, - "operation" : "GetCertificateAuthorityCsr", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : false - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "RequestInProgressException" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "AccessDeniedException" - } ] - }, - "CertificateIssued" : { - "description" : "Wait until a certificate is issued", - "delay" : 1, - "maxAttempts" : 60, - "operation" : "GetCertificate", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : false - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "RequestInProgressException" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "AccessDeniedException" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/acm/2015-12-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index 371c499..0000000 Binary files a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/examples-1.json b/venv/Lib/site-packages/botocore/data/acm/2015-12-08/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/acm/2015-12-08/paginators-1.json deleted file mode 100644 index 2e2e4f9..0000000 --- a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListCertificates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxItems", - "result_key": "CertificateSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/acm/2015-12-08/service-2.json.gz deleted file mode 100644 index 5900f16..0000000 Binary files a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/waiters-2.json b/venv/Lib/site-packages/botocore/data/acm/2015-12-08/waiters-2.json deleted file mode 100644 index 2d3c4ef..0000000 --- a/venv/Lib/site-packages/botocore/data/acm/2015-12-08/waiters-2.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "CertificateValidated" : { - "delay" : 60, - "maxAttempts" : 5, - "operation" : "DescribeCertificate", - "acceptors" : [ { - "matcher" : "pathAll", - "argument" : "Certificate.DomainValidationOptions[].ValidationStatus", - "state" : "success", - "expected" : "SUCCESS" - }, { - "matcher" : "pathAny", - "argument" : "Certificate.DomainValidationOptions[].ValidationStatus", - "state" : "retry", - "expected" : "PENDING_VALIDATION" - }, { - "matcher" : "path", - "argument" : "Certificate.Status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ResourceNotFoundException" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 26b850a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/paginators-1.json deleted file mode 100644 index 7377349..0000000 --- a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListInvestigationGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "investigationGroups" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/service-2.json.gz deleted file mode 100644 index 62433ae..0000000 Binary files a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/aiops/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/amp/2020-08-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7cda198..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/examples-1.json b/venv/Lib/site-packages/botocore/data/amp/2020-08-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/amp/2020-08-01/paginators-1.json deleted file mode 100644 index 656dd3a..0000000 --- a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListWorkspaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workspaces" - }, - "ListRuleGroupsNamespaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ruleGroupsNamespaces" - }, - "ListScrapers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "scrapers" - }, - "ListAnomalyDetectors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "anomalyDetectors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/amp/2020-08-01/service-2.json.gz deleted file mode 100644 index 5264d1d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/amp/2020-08-01/waiters-2.json deleted file mode 100644 index 0222806..0000000 --- a/venv/Lib/site-packages/botocore/data/amp/2020-08-01/waiters-2.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "AnomalyDetectorActive" : { - "description" : "Wait until the anomaly detector reaches ACTIVE status", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "DescribeAnomalyDetector", - "acceptors" : [ { - "matcher" : "path", - "argument" : "anomalyDetector.status.statusCode", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "anomalyDetector.status.statusCode", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "anomalyDetector.status.statusCode", - "state" : "retry", - "expected" : "UPDATING" - } ] - }, - "AnomalyDetectorDeleted" : { - "description" : "Wait until the anomaly detector reaches DELETED status", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "DescribeAnomalyDetector", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "anomalyDetector.status.statusCode", - "state" : "retry", - "expected" : "DELETING" - } ] - }, - "ScraperActive" : { - "description" : "Wait until a scraper reaches ACTIVE status", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "DescribeScraper", - "acceptors" : [ { - "matcher" : "path", - "argument" : "scraper.status.statusCode", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "scraper.status.statusCode", - "state" : "failure", - "expected" : "CREATION_FAILED" - } ] - }, - "ScraperDeleted" : { - "description" : "Wait until a scraper reaches DELETED status", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "DescribeScraper", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "scraper.status.statusCode", - "state" : "failure", - "expected" : "DELETION_FAILED" - } ] - }, - "WorkspaceActive" : { - "description" : "Wait until a workspace reaches ACTIVE status", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "DescribeWorkspace", - "acceptors" : [ { - "matcher" : "path", - "argument" : "workspace.status.statusCode", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "workspace.status.statusCode", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "path", - "argument" : "workspace.status.statusCode", - "state" : "retry", - "expected" : "CREATING" - } ] - }, - "WorkspaceDeleted" : { - "description" : "Wait until a workspace reaches DELETED status", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "DescribeWorkspace", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "workspace.status.statusCode", - "state" : "retry", - "expected" : "DELETING" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0442e46..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/examples-1.json b/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/paginators-1.json deleted file mode 100644 index f84208e..0000000 --- a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListApps": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "apps" - }, - "ListBranches": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "branches" - }, - "ListDomainAssociations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "domainAssociations" - }, - "ListJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "jobSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/service-2.json.gz deleted file mode 100644 index dc3b82e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amplify/2017-07-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index e315d46..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/paginators-1.json deleted file mode 100644 index 40304c7..0000000 --- a/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListBackendJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Jobs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/service-2.json.gz deleted file mode 100644 index b25c06f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amplifybackend/2020-08-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3c2f05f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/examples-1.json b/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/paginators-1.json deleted file mode 100644 index d0a0242..0000000 --- a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/paginators-1.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "pagination": { - "ListComponents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "entities" - }, - "ListThemes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "entities" - }, - "ExportComponents": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "entities" - }, - "ExportThemes": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "entities" - }, - "ExportForms": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "entities" - }, - "ListForms": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "entities" - }, - "ListCodegenJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "entities" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/service-2.json.gz deleted file mode 100644 index c7ddcee..0000000 Binary files a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/waiters-2.json b/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/amplifyuibuilder/2021-08-11/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/endpoint-rule-set-1.json.gz deleted file mode 100644 index d94cf41..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/examples-1.json b/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/paginators-1.json b/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/paginators-1.json deleted file mode 100644 index 2a875c5..0000000 --- a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/paginators-1.json +++ /dev/null @@ -1,117 +0,0 @@ -{ - "pagination": { - "GetApiKeys": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetBasePathMappings": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetClientCertificates": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetDeployments": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetDomainNames": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetModels": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetResources": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetRestApis": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetUsage": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items", - "non_aggregate_keys": [ - "usagePlanId", - "startDate", - "endDate" - ] - }, - "GetUsagePlans": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetUsagePlanKeys": { - "input_token": "position", - "output_token": "position", - "limit_key": "limit", - "result_key": "items" - }, - "GetVpcLinks": { - "input_token": "position", - "limit_key": "limit", - "output_token": "position", - "result_key": "items" - }, - "GetAuthorizers": { - "input_token": "position", - "limit_key": "limit", - "output_token": "position", - "result_key": "items" - }, - "GetDocumentationParts": { - "input_token": "position", - "limit_key": "limit", - "output_token": "position", - "result_key": "items" - }, - "GetDocumentationVersions": { - "input_token": "position", - "limit_key": "limit", - "output_token": "position", - "result_key": "items" - }, - "GetGatewayResponses": { - "input_token": "position", - "limit_key": "limit", - "output_token": "position", - "result_key": "items" - }, - "GetRequestValidators": { - "input_token": "position", - "limit_key": "limit", - "output_token": "position", - "result_key": "items" - }, - "GetSdkTypes": { - "input_token": "position", - "limit_key": "limit", - "output_token": "position", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/service-2.json.gz b/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/service-2.json.gz deleted file mode 100644 index 7031ea8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apigateway/2015-07-09/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index c39a128..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/service-2.json.gz deleted file mode 100644 index 6c0d87d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index d94cf41..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/paginators-1.json deleted file mode 100644 index 0c9022f..0000000 --- a/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/paginators-1.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "pagination": { - "GetApis": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetAuthorizers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetDeployments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetDomainNames": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetIntegrationResponses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetIntegrations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetModels": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetRouteResponses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetRoutes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetStages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "ListRoutingRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RoutingRules" - }, - "ListPortalProducts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListPortals": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListProductPages": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListProductRestEndpointPages": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/service-2.json.gz deleted file mode 100644 index 1fed3b0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apigatewayv2/2018-11-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9ec17a5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/examples-1.json b/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/examples-1.json deleted file mode 100644 index 664e05e..0000000 --- a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/examples-1.json +++ /dev/null @@ -1,720 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateApplication": [ - { - "input": { - "Description": "An application used for creating an example.", - "Name": "example-application" - }, - "output": { - "Description": "An application used for creating an example.", - "Id": "339ohji", - "Name": "example-application" - }, - "comments": { - }, - "description": "The following create-application example creates an application in AWS AppConfig.", - "id": "to-create-an-application-1632264511615", - "title": "To create an application" - } - ], - "CreateConfigurationProfile": [ - { - "input": { - "ApplicationId": "339ohji", - "LocationUri": "ssm-parameter://Example-Parameter", - "Name": "Example-Configuration-Profile", - "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" - }, - "output": { - "ApplicationId": "339ohji", - "Id": "ur8hx2f", - "LocationUri": "ssm-parameter://Example-Parameter", - "Name": "Example-Configuration-Profile", - "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" - }, - "comments": { - }, - "description": "The following create-configuration-profile example creates a configuration profile using a configuration stored in Parameter Store, a capability of Systems Manager.", - "id": "to-create-a-configuration-profile-1632264580336", - "title": "To create a configuration profile" - } - ], - "CreateDeploymentStrategy": [ - { - "input": { - "DeploymentDurationInMinutes": 15, - "GrowthFactor": 25, - "Name": "Example-Deployment", - "ReplicateTo": "SSM_DOCUMENT" - }, - "output": { - "DeploymentDurationInMinutes": 15, - "FinalBakeTimeInMinutes": 0, - "GrowthFactor": 25, - "GrowthType": "LINEAR", - "Id": "1225qzk", - "Name": "Example-Deployment", - "ReplicateTo": "SSM_DOCUMENT" - }, - "comments": { - }, - "description": "The following create-deployment-strategy example creates a deployment strategy called Example-Deployment that takes 15 minutes and deploys the configuration to 25% of the application at a time. The strategy is also copied to an SSM Document.", - "id": "to-create-a-deployment-strategy-1632264783812", - "title": "To create a deployment strategy" - } - ], - "CreateEnvironment": [ - { - "input": { - "ApplicationId": "339ohji", - "Name": "Example-Environment" - }, - "output": { - "ApplicationId": "339ohji", - "Id": "54j1r29", - "Name": "Example-Environment", - "State": "READY_FOR_DEPLOYMENT" - }, - "comments": { - }, - "description": "The following create-environment example creates an AWS AppConfig environment named Example-Environment using the application you created using create-application", - "id": "to-create-an-environment-1632265124975", - "title": "To create an environment" - } - ], - "CreateHostedConfigurationVersion": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "Content": "eyAiTmFtZSI6ICJFeGFtcGxlQXBwbGljYXRpb24iLCAiSWQiOiBFeGFtcGxlSUQsICJSYW5rIjogNyB9", - "ContentType": "text", - "LatestVersionNumber": 1 - }, - "output": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "ContentType": "text", - "VersionNumber": 1 - }, - "comments": { - }, - "description": "The following create-hosted-configuration-version example creates a new configuration in the AWS AppConfig configuration store.", - "id": "to-create-a-hosted-configuration-version-1632265196980", - "title": "To create a hosted configuration version" - } - ], - "DeleteApplication": [ - { - "input": { - "ApplicationId": "339ohji" - }, - "comments": { - }, - "description": "The following delete-application example deletes the specified application. \n", - "id": "to-delete-an-application-1632265343951", - "title": "To delete an application" - } - ], - "DeleteConfigurationProfile": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f" - }, - "comments": { - }, - "description": "The following delete-configuration-profile example deletes the specified configuration profile.", - "id": "to-delete-a-configuration-profile-1632265401308", - "title": "To delete a configuration profile" - } - ], - "DeleteDeploymentStrategy": [ - { - "input": { - "DeploymentStrategyId": "1225qzk" - }, - "comments": { - }, - "description": "The following delete-deployment-strategy example deletes the specified deployment strategy.", - "id": "to-delete-a-deployment-strategy-1632265473708", - "title": "To delete a deployment strategy" - } - ], - "DeleteEnvironment": [ - { - "input": { - "ApplicationId": "339ohji", - "EnvironmentId": "54j1r29" - }, - "comments": { - }, - "description": "The following delete-environment example deletes the specified application environment.", - "id": "to-delete-an-environment-1632265641044", - "title": "To delete an environment" - } - ], - "DeleteHostedConfigurationVersion": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "VersionNumber": 1 - }, - "comments": { - }, - "description": "The following delete-hosted-configuration-version example deletes a configuration version hosted in the AWS AppConfig configuration store.", - "id": "to-delete-a-hosted-configuration-version-1632265720740", - "title": "To delete a hosted configuration version" - } - ], - "GetApplication": [ - { - "input": { - "ApplicationId": "339ohji" - }, - "output": { - "Id": "339ohji", - "Name": "example-application" - }, - "comments": { - }, - "description": "The following get-application example lists the details of the specified application.", - "id": "to-list-details-of-an-application-1632265864702", - "title": "To list details of an application" - } - ], - "GetConfiguration": [ - { - "input": { - "Application": "example-application", - "ClientId": "example-id", - "Configuration": "Example-Configuration-Profile", - "Environment": "Example-Environment" - }, - "output": { - "ConfigurationVersion": "1", - "ContentType": "application/octet-stream" - }, - "comments": { - }, - "description": "The following get-configuration example returns the configuration details of the example application. On subsequent calls to get-configuration, use the client-configuration-version parameter to only update the configuration of your application if the version has changed. Only updating the configuration when the version has changed avoids excess charges incurred by calling get-configuration.", - "id": "to-retrieve-configuration-details-1632265954314", - "title": "To retrieve configuration details" - } - ], - "GetConfigurationProfile": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f" - }, - "output": { - "ApplicationId": "339ohji", - "Id": "ur8hx2f", - "LocationUri": "ssm-parameter://Example-Parameter", - "Name": "Example-Configuration-Profile", - "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" - }, - "comments": { - }, - "description": "The following get-configuration-profile example returns the details of the specified configuration profile.", - "id": "to-retrieve-configuration-profile-details-1632266081013", - "title": "To retrieve configuration profile details" - } - ], - "GetDeployment": [ - { - "input": { - "ApplicationId": "339ohji", - "DeploymentNumber": 1, - "EnvironmentId": "54j1r29" - }, - "output": { - "ApplicationId": "339ohji", - "CompletedAt": "2021-09-17T21:59:03.888000+00:00", - "ConfigurationLocationUri": "ssm-parameter://Example-Parameter", - "ConfigurationName": "Example-Configuration-Profile", - "ConfigurationProfileId": "ur8hx2f", - "ConfigurationVersion": "1", - "DeploymentDurationInMinutes": 15, - "DeploymentNumber": 1, - "DeploymentStrategyId": "1225qzk", - "EnvironmentId": "54j1r29", - "EventLog": [ - { - "Description": "Deployment completed", - "EventType": "DEPLOYMENT_COMPLETED", - "OccurredAt": "2021-09-17T21:59:03.888000+00:00", - "TriggeredBy": "APPCONFIG" - }, - { - "Description": "Deployment bake time started", - "EventType": "BAKE_TIME_STARTED", - "OccurredAt": "2021-09-17T21:58:57.722000+00:00", - "TriggeredBy": "APPCONFIG" - }, - { - "Description": "Configuration available to 100.00% of clients", - "EventType": "PERCENTAGE_UPDATED", - "OccurredAt": "2021-09-17T21:55:56.816000+00:00", - "TriggeredBy": "APPCONFIG" - }, - { - "Description": "Configuration available to 75.00% of clients", - "EventType": "PERCENTAGE_UPDATED", - "OccurredAt": "2021-09-17T21:52:56.567000+00:00", - "TriggeredBy": "APPCONFIG" - }, - { - "Description": "Configuration available to 50.00% of clients", - "EventType": "PERCENTAGE_UPDATED", - "OccurredAt": "2021-09-17T21:49:55.737000+00:00", - "TriggeredBy": "APPCONFIG" - }, - { - "Description": "Configuration available to 25.00% of clients", - "EventType": "PERCENTAGE_UPDATED", - "OccurredAt": "2021-09-17T21:46:55.187000+00:00", - "TriggeredBy": "APPCONFIG" - }, - { - "Description": "Deployment started", - "EventType": "DEPLOYMENT_STARTED", - "OccurredAt": "2021-09-17T21:43:54.205000+00:00", - "TriggeredBy": "USER" - } - ], - "FinalBakeTimeInMinutes": 0, - "GrowthFactor": 25, - "GrowthType": "LINEAR", - "PercentageComplete": 100, - "StartedAt": "2021-09-17T21:43:54.205000+00:00", - "State": "COMPLETE" - }, - "comments": { - }, - "description": "The following get-deployment example lists details of the deployment to the application in the specified environment and deployment.", - "id": "to-retrieve-deployment-details-1633976766883", - "title": "To retrieve deployment details" - } - ], - "GetDeploymentStrategy": [ - { - "input": { - "DeploymentStrategyId": "1225qzk" - }, - "output": { - "DeploymentDurationInMinutes": 15, - "FinalBakeTimeInMinutes": 0, - "GrowthFactor": 25, - "GrowthType": "LINEAR", - "Id": "1225qzk", - "Name": "Example-Deployment", - "ReplicateTo": "SSM_DOCUMENT" - }, - "comments": { - }, - "description": "The following get-deployment-strategy example lists the details of the specified deployment strategy.", - "id": "to-retrieve-details-of-a-deployment-strategy-1632266385805", - "title": "To retrieve details of a deployment strategy" - } - ], - "GetEnvironment": [ - { - "input": { - "ApplicationId": "339ohji", - "EnvironmentId": "54j1r29" - }, - "output": { - "ApplicationId": "339ohji", - "Id": "54j1r29", - "Name": "Example-Environment", - "State": "READY_FOR_DEPLOYMENT" - }, - "comments": { - }, - "description": "The following get-environment example returns the details and state of the specified environment.", - "id": "to-retrieve-environment-details-1632266924806", - "title": "To retrieve environment details" - } - ], - "GetHostedConfigurationVersion": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "VersionNumber": 1 - }, - "output": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "ContentType": "application/json", - "VersionNumber": 1 - }, - "comments": { - }, - "description": "The following get-hosted-configuration-version example retrieves the configuration details of the AWS AppConfig hosted configuration.", - "id": "to-retrieve-hosted-configuration-details-1632267003527", - "title": "To retrieve hosted configuration details" - } - ], - "ListApplications": [ - { - "input": { - }, - "output": { - "Items": [ - { - "Description": "An application used for creating an example.", - "Id": "339ohji", - "Name": "test-application" - }, - { - "Id": "rwalwu7", - "Name": "Test-Application" - } - ] - }, - "comments": { - }, - "description": "The following list-applications example lists the available applications in your AWS account.", - "id": "to-list-the-available-applications-1632267111131", - "title": "To list the available applications" - } - ], - "ListConfigurationProfiles": [ - { - "input": { - "ApplicationId": "339ohji" - }, - "output": { - "Items": [ - { - "ApplicationId": "339ohji", - "Id": "ur8hx2f", - "LocationUri": "ssm-parameter://Example-Parameter", - "Name": "Example-Configuration-Profile" - } - ] - }, - "comments": { - }, - "description": "The following list-configuration-profiles example lists the available configuration profiles for the specified application.", - "id": "to-list-the-available-configuration-profiles-1632267193265", - "title": "To list the available configuration profiles" - } - ], - "ListDeploymentStrategies": [ - { - "input": { - }, - "output": { - "Items": [ - { - "DeploymentDurationInMinutes": 15, - "FinalBakeTimeInMinutes": 0, - "GrowthFactor": 25, - "GrowthType": "LINEAR", - "Id": "1225qzk", - "Name": "Example-Deployment", - "ReplicateTo": "SSM_DOCUMENT" - } - ] - }, - "comments": { - }, - "description": "The following list-deployment-strategies example lists the available deployment strategies in your AWS account.", - "id": "to-list-the-available-deployment-strategies-1632267364180", - "title": "To list the available deployment strategies" - } - ], - "ListDeployments": [ - { - "input": { - "ApplicationId": "339ohji", - "EnvironmentId": "54j1r29" - }, - "output": { - "Items": [ - { - "CompletedAt": "2021-09-17T21:59:03.888000+00:00", - "ConfigurationName": "Example-Configuration-Profile", - "ConfigurationVersion": "1", - "DeploymentDurationInMinutes": 15, - "DeploymentNumber": 1, - "FinalBakeTimeInMinutes": 0, - "GrowthFactor": 25, - "GrowthType": "LINEAR", - "PercentageComplete": 100, - "StartedAt": "2021-09-17T21:43:54.205000+00:00", - "State": "COMPLETE" - } - ] - }, - "comments": { - }, - "description": "The following list-deployments example lists the available deployments in your AWS account for the specified application and environment.", - "id": "to-list-the-available-deployments-1632267282025", - "title": "To list the available deployments" - } - ], - "ListEnvironments": [ - { - "input": { - "ApplicationId": "339ohji" - }, - "output": { - "Items": [ - { - "ApplicationId": "339ohji", - "Id": "54j1r29", - "Name": "Example-Environment", - "State": "READY_FOR_DEPLOYMENT" - } - ] - }, - "comments": { - }, - "description": "The following list-environments example lists the available environments in your AWS account for the specified application.", - "id": "to-list-the-available-environments-1632267474389", - "title": "To list the available environments" - } - ], - "ListHostedConfigurationVersions": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f" - }, - "output": { - "Items": [ - { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "ContentType": "application/json", - "VersionNumber": 1 - } - ] - }, - "comments": { - }, - "description": "The following list-hosted-configuration-versions example lists the configurations versions hosted in the AWS AppConfig hosted configuration store for the specified application and configuration profile.", - "id": "to-list-the-available-hosted-configuration-versions-1632267647667", - "title": "To list the available hosted configuration versions" - } - ], - "ListTagsForResource": [ - { - "input": { - "ResourceArn": "arn:aws:appconfig:us-east-1:111122223333:application/339ohji" - }, - "output": { - "Tags": { - "group1": "1" - } - }, - "comments": { - }, - "description": "The following list-tags-for-resource example lists the tags of a specified application.", - "id": "to-list-the-tags-of-an-application-1632328796560", - "title": "To list the tags of an application" - } - ], - "StartDeployment": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "ConfigurationVersion": "1", - "DeploymentStrategyId": "1225qzk", - "Description": "", - "EnvironmentId": "54j1r29", - "Tags": { - } - }, - "output": { - "ApplicationId": "339ohji", - "ConfigurationLocationUri": "ssm-parameter://Example-Parameter", - "ConfigurationName": "Example-Configuration-Profile", - "ConfigurationProfileId": "ur8hx2f", - "ConfigurationVersion": "1", - "DeploymentDurationInMinutes": 15, - "DeploymentNumber": 1, - "DeploymentStrategyId": "1225qzk", - "EnvironmentId": "54j1r29", - "EventLog": [ - { - "Description": "Deployment started", - "EventType": "DEPLOYMENT_STARTED", - "OccurredAt": "2021-09-17T21:43:54.205000+00:00", - "TriggeredBy": "USER" - } - ], - "FinalBakeTimeInMinutes": 0, - "GrowthFactor": 25, - "GrowthType": "LINEAR", - "PercentageComplete": 1.0, - "StartedAt": "2021-09-17T21:43:54.205000+00:00", - "State": "DEPLOYING" - }, - "comments": { - }, - "description": "The following start-deployment example starts a deployment to the application using the specified environment, deployment strategy, and configuration profile.", - "id": "to-start-a-configuration-deployment-1632328956790", - "title": "To start a configuration deployment" - } - ], - "StopDeployment": [ - { - "input": { - "ApplicationId": "339ohji", - "DeploymentNumber": 2, - "EnvironmentId": "54j1r29" - }, - "output": { - "DeploymentDurationInMinutes": 15, - "DeploymentNumber": 2, - "FinalBakeTimeInMinutes": 0, - "GrowthFactor": 25.0, - "PercentageComplete": 1.0 - }, - "comments": { - }, - "description": "The following stop-deployment example stops the deployment of an application configuration to the specified environment.", - "id": "to-stop-configuration-deployment-1632329139126", - "title": "To stop configuration deployment" - } - ], - "TagResource": [ - { - "input": { - "ResourceArn": "arn:aws:appconfig:us-east-1:111122223333:application/339ohji", - "Tags": { - "group1": "1" - } - }, - "comments": { - }, - "description": "The following tag-resource example tags an application resource.", - "id": "to-tag-an-application-1632330350645", - "title": "To tag an application" - } - ], - "UntagResource": [ - { - "input": { - "ResourceArn": "arn:aws:appconfig:us-east-1:111122223333:application/339ohji", - "TagKeys": [ - "group1" - ] - }, - "comments": { - }, - "description": "The following untag-resource example removes the group1 tag from the specified application.", - "id": "to-remove-a-tag-from-an-application-1632330429881", - "title": "To remove a tag from an application" - } - ], - "UpdateApplication": [ - { - "input": { - "ApplicationId": "339ohji", - "Description": "", - "Name": "Example-Application" - }, - "output": { - "Description": "An application used for creating an example.", - "Id": "339ohji", - "Name": "Example-Application" - }, - "comments": { - }, - "description": "The following update-application example updates the name of the specified application.", - "id": "to-update-an-application-1632330585893", - "title": "To update an application" - } - ], - "UpdateConfigurationProfile": [ - { - "input": { - "ApplicationId": "339ohji", - "ConfigurationProfileId": "ur8hx2f", - "Description": "Configuration profile used for examples." - }, - "output": { - "ApplicationId": "339ohji", - "Description": "Configuration profile used for examples.", - "Id": "ur8hx2f", - "LocationUri": "ssm-parameter://Example-Parameter", - "Name": "Example-Configuration-Profile", - "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" - }, - "comments": { - }, - "description": "The following update-configuration-profile example updates the description of the specified configuration profile.", - "id": "to-update-a-configuration-profile-1632330721974", - "title": "To update a configuration profile" - } - ], - "UpdateDeploymentStrategy": [ - { - "input": { - "DeploymentStrategyId": "1225qzk", - "FinalBakeTimeInMinutes": 20 - }, - "output": { - "DeploymentDurationInMinutes": 15, - "FinalBakeTimeInMinutes": 20, - "GrowthFactor": 25, - "GrowthType": "LINEAR", - "Id": "1225qzk", - "Name": "Example-Deployment", - "ReplicateTo": "SSM_DOCUMENT" - }, - "comments": { - }, - "description": "The following update-deployment-strategy example updates final bake time to 20 minutes in the specified deployment strategy. ::\n", - "id": "to-update-a-deployment-strategy-1632330896602", - "title": "To update a deployment strategy" - } - ], - "UpdateEnvironment": [ - { - "input": { - "ApplicationId": "339ohji", - "Description": "An environment for examples.", - "EnvironmentId": "54j1r29" - }, - "output": { - "ApplicationId": "339ohji", - "Description": "An environment for examples.", - "Id": "54j1r29", - "Name": "Example-Environment", - "State": "ROLLED_BACK" - }, - "comments": { - }, - "description": "The following update-environment example updates an environment's description.", - "id": "to-update-an-environment-1632331382428", - "title": "To update an environment" - } - ], - "ValidateConfiguration": [ - { - "input": { - "ApplicationId": "abc1234", - "ConfigurationProfileId": "ur8hx2f", - "ConfigurationVersion": "1" - }, - "comments": { - }, - "description": "The following validate-configuration example uses the validators in a configuration profile to validate a configuration.", - "id": "to-validate-a-configuration-1632331491365", - "title": "To validate a configuration" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/paginators-1.json b/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/paginators-1.json deleted file mode 100644 index f176bab..0000000 --- a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListConfigurationProfiles": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListDeploymentStrategies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListDeployments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListEnvironments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListExtensionAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListExtensions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListHostedConfigurationVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/service-2.json.gz deleted file mode 100644 index a7c62e8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/waiters-2.json b/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/waiters-2.json deleted file mode 100644 index faaf59d..0000000 --- a/venv/Lib/site-packages/botocore/data/appconfig/2019-10-09/waiters-2.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "version": 2, - "waiters": { - "EnvironmentReadyForDeployment": { - "operation": "GetEnvironment", - "delay": 30, - "maxAttempts": 999, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "ReadyForDeployment" - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "RolledBack" - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "Reverted" - } - ] - }, - "DeploymentComplete": { - "operation": "GetDeployment", - "delay": 30, - "maxAttempts": 999, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "COMPLETE" - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "ROLLED_BACK" - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "REVERTED" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index a34e673..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/examples-1.json b/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/service-2.json.gz deleted file mode 100644 index 77552cd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appconfigdata/2021-11-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 319c5c7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/paginators-1.json deleted file mode 100644 index 8138e8a..0000000 --- a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListAppAuthorizations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "appAuthorizationSummaryList" - }, - "ListAppBundles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "appBundleSummaryList" - }, - "ListIngestionDestinations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ingestionDestinations" - }, - "ListIngestions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ingestions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/service-2.json.gz deleted file mode 100644 index f736886..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/waiters-2.json b/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/appfabric/2023-05-19/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0032c5b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/examples-1.json b/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/service-2.json.gz deleted file mode 100644 index aa451ef..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appflow/2020-08-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index 564fad9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/examples-1.json b/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/paginators-1.json deleted file mode 100644 index 64b4b5c..0000000 --- a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Applications" - }, - "ListDataIntegrationAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DataIntegrationAssociations" - }, - "ListDataIntegrations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DataIntegrations" - }, - "ListEventIntegrationAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EventIntegrationAssociations" - }, - "ListEventIntegrations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EventIntegrations" - }, - "ListApplicationAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ApplicationAssociations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/service-2.json.gz deleted file mode 100644 index af39528..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appintegrations/2020-07-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index bec2f09..0000000 Binary files a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/examples-1.json b/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/examples-1.json deleted file mode 100644 index 5abcd55..0000000 --- a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/examples-1.json +++ /dev/null @@ -1,221 +0,0 @@ -{ - "version": "1.0", - "examples": { - "DeleteScalingPolicy": [ - { - "input": { - "PolicyName": "web-app-cpu-lt-25", - "ResourceId": "service/default/web-app", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes a scaling policy for the Amazon ECS service called web-app, which is running in the default cluster.", - "id": "to-delete-a-scaling-policy-1470863892689", - "title": "To delete a scaling policy" - } - ], - "DeregisterScalableTarget": [ - { - "input": { - "ResourceId": "service/default/web-app", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deregisters a scalable target for an Amazon ECS service called web-app that is running in the default cluster.", - "id": "to-deregister-a-scalable-target-1470864164895", - "title": "To deregister a scalable target" - } - ], - "DescribeScalableTargets": [ - { - "input": { - "ServiceNamespace": "ecs" - }, - "output": { - "ScalableTargets": [ - { - "CreationTime": "2019-05-06T11:21:46.199Z", - "MaxCapacity": 10, - "MinCapacity": 1, - "ResourceId": "service/default/web-app", - "RoleARN": "arn:aws:iam::012345678910:role/aws-service-role/ecs.application-autoscaling.amazonaws.com/AWSServiceRoleForApplicationAutoScaling_ECSService", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs", - "SuspendedState": { - "DynamicScalingInSuspended": false, - "DynamicScalingOutSuspended": false, - "ScheduledScalingSuspended": false - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the scalable targets for the ECS service namespace.", - "id": "to-describe-scalable-targets-1470864286961", - "title": "To describe scalable targets" - } - ], - "DescribeScalingActivities": [ - { - "input": { - "ResourceId": "service/default/web-app", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs" - }, - "output": { - "ScalingActivities": [ - { - "ActivityId": "e6c5f7d1-dbbb-4a3f-89b2-51f33e766399", - "Cause": "monitor alarm web-app-cpu-lt-25 in state ALARM triggered policy web-app-cpu-lt-25", - "Description": "Setting desired count to 1.", - "EndTime": "2019-05-06T16:04:32.111Z", - "ResourceId": "service/default/web-app", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs", - "StartTime": "2019-05-06T16:03:58.171Z", - "StatusCode": "Successful", - "StatusMessage": "Successfully set desired count to 1. Change successfully fulfilled by ecs." - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the scaling activities for an Amazon ECS service called web-app that is running in the default cluster.", - "id": "to-describe-scaling-activities-for-a-scalable-target-1470864398629", - "title": "To describe scaling activities for a scalable target" - } - ], - "DescribeScalingPolicies": [ - { - "input": { - "ServiceNamespace": "ecs" - }, - "output": { - "NextToken": "", - "ScalingPolicies": [ - { - "Alarms": [ - { - "AlarmARN": "arn:aws:cloudwatch:us-west-2:012345678910:alarm:web-app-cpu-gt-75", - "AlarmName": "web-app-cpu-gt-75" - } - ], - "CreationTime": "2019-05-06T12:11:39.230Z", - "PolicyARN": "arn:aws:autoscaling:us-west-2:012345678910:scalingPolicy:6d8972f3-efc8-437c-92d1-6270f29a66e7:resource/ecs/service/default/web-app:policyName/web-app-cpu-gt-75", - "PolicyName": "web-app-cpu-gt-75", - "PolicyType": "StepScaling", - "ResourceId": "service/default/web-app", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs", - "StepScalingPolicyConfiguration": { - "AdjustmentType": "PercentChangeInCapacity", - "Cooldown": 60, - "StepAdjustments": [ - { - "MetricIntervalLowerBound": 0, - "ScalingAdjustment": 200 - } - ] - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the scaling policies for the ECS service namespace.", - "id": "to-describe-scaling-policies-1470864609734", - "title": "To describe scaling policies" - } - ], - "PutScalingPolicy": [ - { - "input": { - "PolicyName": "cpu75-target-tracking-scaling-policy", - "PolicyType": "TargetTrackingScaling", - "ResourceId": "service/default/web-app", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs", - "TargetTrackingScalingPolicyConfiguration": { - "PredefinedMetricSpecification": { - "PredefinedMetricType": "ECSServiceAverageCPUUtilization" - }, - "ScaleInCooldown": 60, - "ScaleOutCooldown": 60, - "TargetValue": 75 - } - }, - "output": { - "Alarms": [ - { - "AlarmARN": "arn:aws:cloudwatch:us-west-2:012345678910:alarm:TargetTracking-service/default/web-app-AlarmHigh-d4f0770c-b46e-434a-a60f-3b36d653feca", - "AlarmName": "TargetTracking-service/default/web-app-AlarmHigh-d4f0770c-b46e-434a-a60f-3b36d653feca" - }, - { - "AlarmARN": "arn:aws:cloudwatch:us-west-2:012345678910:alarm:TargetTracking-service/default/web-app-AlarmLow-1b437334-d19b-4a63-a812-6c67aaf2910d", - "AlarmName": "TargetTracking-service/default/web-app-AlarmLow-1b437334-d19b-4a63-a812-6c67aaf2910d" - } - ], - "PolicyARN": "arn:aws:autoscaling:us-west-2:012345678910:scalingPolicy:6d8972f3-efc8-437c-92d1-6270f29a66e7:resource/ecs/service/default/web-app:policyName/cpu75-target-tracking-scaling-policy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example applies a target tracking scaling policy with a predefined metric specification to an Amazon ECS service called web-app in the default cluster. The policy keeps the average CPU utilization of the service at 75 percent, with scale-out and scale-in cooldown periods of 60 seconds.", - "id": "to-apply-a-target-tracking-scaling-policy-with-a-predefined-metric-specification-1569364247984", - "title": "To apply a target tracking scaling policy with a predefined metric specification" - } - ], - "RegisterScalableTarget": [ - { - "input": { - "MaxCapacity": 10, - "MinCapacity": 1, - "ResourceId": "service/default/web-app", - "ScalableDimension": "ecs:service:DesiredCount", - "ServiceNamespace": "ecs" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example registers a scalable target from an Amazon ECS service called web-app that is running on the default cluster, with a minimum desired count of 1 task and a maximum desired count of 10 tasks.", - "id": "to-register-a-new-scalable-target-1470864910380", - "title": "To register an ECS service as a scalable target" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/paginators-1.json deleted file mode 100644 index 7ec8f3a..0000000 --- a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "DescribeScalableTargets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ScalableTargets" - }, - "DescribeScalingActivities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ScalingActivities" - }, - "DescribeScalingPolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ScalingPolicies" - }, - "DescribeScheduledActions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ScheduledActions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/service-2.json.gz deleted file mode 100644 index 53da326..0000000 Binary files a/venv/Lib/site-packages/botocore/data/application-autoscaling/2016-02-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index c913a0a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/examples-1.json b/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/service-2.json.gz deleted file mode 100644 index 99d2645..0000000 Binary files a/venv/Lib/site-packages/botocore/data/application-insights/2018-11-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 58cec8a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.json deleted file mode 100644 index 6eef2f1..0000000 --- a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "ListServiceDependencies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServiceDependencies" - }, - "ListServiceDependents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServiceDependents" - }, - "ListServiceLevelObjectives": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SloSummaries" - }, - "ListServiceOperations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServiceOperations" - }, - "ListServices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServiceSummaries" - }, - "ListServiceLevelObjectiveExclusionWindows": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ExclusionWindows" - }, - "ListServiceStates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServiceStates" - }, - "ListEntityEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ChangeEvents" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.sdk-extras.json deleted file mode 100644 index 4813a89..0000000 --- a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.sdk-extras.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListServiceDependencies": { - "non_aggregate_keys": [ - "StartTime", - "EndTime" - ] - }, - "ListServiceDependents": { - "non_aggregate_keys": [ - "StartTime", - "EndTime" - ] - }, - "ListServiceOperations": { - "non_aggregate_keys": [ - "StartTime", - "EndTime" - ] - }, - "ListServices": { - "non_aggregate_keys": [ - "StartTime", - "EndTime" - ] - }, - "ListServiceStates": { - "non_aggregate_keys": [ - "StartTime", - "EndTime" - ] - }, - "ListEntityEvents": { - "non_aggregate_keys": [ - "StartTime", - "EndTime" - ] - } - } - } - } diff --git a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/service-2.json.gz deleted file mode 100644 index 96fdac9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/application-signals/2024-04-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index ca95346..0000000 Binary files a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/examples-1.json b/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/paginators-1.json deleted file mode 100644 index adffd06..0000000 --- a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListReportDefinitions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "reportDefinitions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/service-2.json.gz deleted file mode 100644 index 7fbe014..0000000 Binary files a/venv/Lib/site-packages/botocore/data/applicationcostprofiler/2020-09-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index c2e979b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/examples-1.json b/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/examples-1.json deleted file mode 100644 index 752e89e..0000000 --- a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/examples-1.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "version": "1.0", - "examples": { } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/paginators-1.json deleted file mode 100644 index 162b8b9..0000000 --- a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListMeshes": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "meshes" - }, - "ListRoutes": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "routes" - }, - "ListVirtualNodes": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "virtualNodes" - }, - "ListVirtualRouters": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "virtualRouters" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/service-2.json.gz deleted file mode 100644 index d058ce3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appmesh/2018-10-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index ff5e1a6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/examples-1.json b/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/paginators-1.json deleted file mode 100644 index 5a79b5b..0000000 --- a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "ListMeshes": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "meshes" - }, - "ListRoutes": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "routes" - }, - "ListVirtualNodes": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "virtualNodes" - }, - "ListVirtualRouters": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "virtualRouters" - }, - "ListVirtualServices": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "virtualServices" - }, - "ListTagsForResource": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "tags" - }, - "ListGatewayRoutes": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "gatewayRoutes" - }, - "ListVirtualGateways": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "virtualGateways" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/service-2.json.gz deleted file mode 100644 index d2f2e35..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appmesh/2019-01-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index d213d49..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/examples-1.json b/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/service-2.json.gz deleted file mode 100644 index e1e3887..0000000 Binary files a/venv/Lib/site-packages/botocore/data/apprunner/2020-05-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 16fd2dc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/paginators-1.json deleted file mode 100644 index 40cbf4b..0000000 --- a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/paginators-1.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "pagination": { - "DescribeDirectoryConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DirectoryConfigs" - }, - "DescribeFleets": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Fleets" - }, - "DescribeImageBuilders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ImageBuilders" - }, - "DescribeImages": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Images" - }, - "DescribeSessions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Sessions" - }, - "DescribeStacks": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Stacks" - }, - "DescribeUserStackAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "UserStackAssociations" - }, - "DescribeUsers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Users" - }, - "ListAssociatedFleets": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Names" - }, - "ListAssociatedStacks": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Names" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/service-2.json.gz deleted file mode 100644 index 7caf5c1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/waiters-2.json deleted file mode 100644 index 1c8dea0..0000000 --- a/venv/Lib/site-packages/botocore/data/appstream/2016-12-01/waiters-2.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "version": 2, - "waiters": { - "FleetStarted": { - "delay": 30, - "maxAttempts": 40, - "operation": "DescribeFleets", - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Fleets[].State", - "expected": "RUNNING" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Fleets[].State", - "expected": "STOPPING" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Fleets[].State", - "expected": "STOPPED" - } - ] - }, - "FleetStopped": { - "delay": 30, - "maxAttempts": 40, - "operation": "DescribeFleets", - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Fleets[].State", - "expected": "STOPPED" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Fleets[].State", - "expected": "STARTING" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Fleets[].State", - "expected": "RUNNING" - } - ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1eb7252..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/examples-1.json b/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/paginators-1.json deleted file mode 100644 index fbb7de0..0000000 --- a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/paginators-1.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "pagination": { - "ListApiKeys": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "apiKeys" - }, - "ListDataSources": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "dataSources" - }, - "ListFunctions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "functions" - }, - "ListGraphqlApis": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "graphqlApis" - }, - "ListResolvers": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "resolvers" - }, - "ListResolversByFunction": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "resolvers" - }, - "ListTypes": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "types" - }, - "ListDomainNames": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "domainNameConfigs" - }, - "ListSourceApiAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sourceApiAssociationSummaries" - }, - "ListTypesByAssociation": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "types" - }, - "ListApis": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "apis" - }, - "ListChannelNamespaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "channelNamespaces" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/service-2.json.gz deleted file mode 100644 index 11e4f6c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/appsync/2017-07-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 56a8964..0000000 Binary files a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/paginators-1.json deleted file mode 100644 index 2e521c9..0000000 --- a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "GetPlanEvaluationStatus": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "warnings" - }, - "GetPlanExecution": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "stepStates" - }, - "ListPlanExecutionEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListPlanExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListPlans": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "plans" - }, - "ListPlansInRegion": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "plans" - }, - "ListRoute53HealthChecks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "healthChecks" - }, - "ListRoute53HealthChecksInRegion": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "healthChecks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/paginators-1.sdk-extras.json deleted file mode 100644 index f8c1737..0000000 --- a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/paginators-1.sdk-extras.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetPlanEvaluationStatus": { - "non_aggregate_keys": [ - "lastEvaluatedVersion", - "planArn", - "lastEvaluationTime", - "evaluationState", - "region" - ] - }, - "GetPlanExecution": { - "non_aggregate_keys": [ - "comment", - "executionRegion", - "endTime", - "startTime", - "executionState", - "executionAction", - "plan", - "executionId", - "updatedAt", - "planArn", - "actualRecoveryTime", - "version", - "mode", - "generatedReportDetails" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/service-2.json.gz deleted file mode 100644 index ea6a214..0000000 Binary files a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/waiters-2.json deleted file mode 100644 index c9624e4..0000000 --- a/venv/Lib/site-packages/botocore/data/arc-region-switch/2022-07-26/waiters-2.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "PlanEvaluationStatusPassed" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetPlanEvaluationStatus", - "acceptors" : [ { - "matcher" : "path", - "argument" : "evaluationState", - "state" : "success", - "expected" : "passed" - }, { - "matcher" : "path", - "argument" : "evaluationState", - "state" : "failure", - "expected" : "actionRequired" - }, { - "matcher" : "path", - "argument" : "evaluationState", - "state" : "retry", - "expected" : "pendingEvaluation" - } ] - }, - "PlanExecutionCompleted" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetPlanExecution", - "acceptors" : [ { - "matcher" : "path", - "argument" : "executionState", - "state" : "success", - "expected" : "completed" - }, { - "matcher" : "path", - "argument" : "executionState", - "state" : "success", - "expected" : "completedWithExceptions" - }, { - "matcher" : "path", - "argument" : "executionState", - "state" : "failure", - "expected" : "failed" - }, { - "matcher" : "path", - "argument" : "executionState", - "state" : "failure", - "expected" : "canceled" - }, { - "matcher" : "path", - "argument" : "executionState", - "state" : "failure", - "expected" : "planExecutionTimedOut" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6654e4c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/paginators-1.json deleted file mode 100644 index 9fcfc56..0000000 --- a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListManagedResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListZonalShifts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListAutoshifts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/service-2.json.gz deleted file mode 100644 index 34ae445..0000000 Binary files a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/waiters-2.json b/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/arc-zonal-shift/2022-10-30/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index b70b230..0000000 Binary files a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/paginators-1.json deleted file mode 100644 index ae2de6d..0000000 --- a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListReports": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "reports" - }, - "ListCustomerAgreements": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "customerAgreements" - }, - "ListReportVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "reports" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/service-2.json.gz deleted file mode 100644 index e09f855..0000000 Binary files a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/artifact/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/athena/2017-05-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7490f38..0000000 Binary files a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/examples-1.json b/venv/Lib/site-packages/botocore/data/athena/2017-05-18/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/athena/2017-05-18/paginators-1.json deleted file mode 100644 index 3b126ba..0000000 --- a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/paginators-1.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "pagination": { - "ListNamedQueries": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "NamedQueryIds" - }, - "ListQueryExecutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "QueryExecutionIds" - }, - "GetQueryResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResultSet.Rows", - "non_aggregate_keys": [ - "ResultSet.ResultSetMetadata", - "UpdateCount" - ] - }, - "ListDataCatalogs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DataCatalogsSummary" - }, - "ListDatabases": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DatabaseList" - }, - "ListTableMetadata": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TableMetadataList" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/athena/2017-05-18/service-2.json.gz deleted file mode 100644 index c420a88..0000000 Binary files a/venv/Lib/site-packages/botocore/data/athena/2017-05-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8beb4e9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/examples-1.json b/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/service-2.json.gz deleted file mode 100644 index 3e5ad9b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/auditmanager/2017-07-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index 251f228..0000000 Binary files a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/examples-1.json b/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/paginators-1.json deleted file mode 100644 index e3f812a..0000000 --- a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "DescribeScalingPlanResources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ScalingPlanResources" - }, - "DescribeScalingPlans": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ScalingPlans" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/service-2.json.gz deleted file mode 100644 index 9e1f0c0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/autoscaling-plans/2018-01-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2c5c3c5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/examples-1.json deleted file mode 100644 index af6929b..0000000 --- a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/examples-1.json +++ /dev/null @@ -1,1696 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AttachInstances": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceIds": [ - "i-93633f9b" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified instance to the specified Auto Scaling group.", - "id": "autoscaling-attach-instances-1", - "title": "To attach an instance to an Auto Scaling group" - } - ], - "AttachLoadBalancerTargetGroups": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "TargetGroupARNs": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified target group to the specified Auto Scaling group.", - "id": "autoscaling-attach-load-balancer-target-groups-1", - "title": "To attach a target group to an Auto Scaling group" - } - ], - "AttachLoadBalancers": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "LoadBalancerNames": [ - "my-load-balancer" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified load balancer to the specified Auto Scaling group.", - "id": "autoscaling-attach-load-balancers-1", - "title": "To attach a load balancer to an Auto Scaling group" - } - ], - "AttachTrafficSources": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "TrafficSources": [ - { - "Identifier": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified target group to the specified Auto Scaling group.", - "id": "to-attach-a-target-group-to-an-auto-scaling-group-1680036570089", - "title": "To attach a target group to an Auto Scaling group" - } - ], - "CancelInstanceRefresh": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "InstanceRefreshId": "08b91cf7-8fa6-48af-b6a6-d227f40f1b9b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels an instance refresh operation in progress.", - "id": "to-cancel-an-instance-refresh-1592960979817", - "title": "To cancel an instance refresh" - } - ], - "CompleteLifecycleAction": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "LifecycleActionResult": "CONTINUE", - "LifecycleActionToken": "bcd2f1b8-9a78-44d3-8a7a-4dd07d7cf635", - "LifecycleHookName": "my-lifecycle-hook" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example notifies Auto Scaling that the specified lifecycle action is complete so that it can finish launching or terminating the instance.", - "id": "autoscaling-complete-lifecycle-action-1", - "title": "To complete the lifecycle action" - } - ], - "CreateAutoScalingGroup": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "DefaultInstanceWarmup": 120, - "LaunchTemplate": { - "LaunchTemplateName": "my-template-for-auto-scaling", - "Version": "$Default" - }, - "MaxInstanceLifetime": 2592000, - "MaxSize": 3, - "MinSize": 1, - "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Auto Scaling group.", - "id": "autoscaling-create-auto-scaling-group-1", - "title": "To create an Auto Scaling group" - }, - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "HealthCheckGracePeriod": 300, - "HealthCheckType": "ELB", - "LaunchTemplate": { - "LaunchTemplateName": "my-template-for-auto-scaling", - "Version": "$Default" - }, - "MaxSize": 3, - "MinSize": 1, - "TargetGroupARNs": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - ], - "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE, subnet-610acd08EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Auto Scaling group and attaches the specified target group.", - "id": "autoscaling-create-auto-scaling-group-2", - "title": "To create an Auto Scaling group with an attached target group" - }, - { - "input": { - "AutoScalingGroupName": "my-asg", - "DesiredCapacity": 3, - "MaxSize": 5, - "MinSize": 1, - "MixedInstancesPolicy": { - "InstancesDistribution": { - "OnDemandBaseCapacity": 1, - "OnDemandPercentageAboveBaseCapacity": 50, - "SpotAllocationStrategy": "price-capacity-optimized" - }, - "LaunchTemplate": { - "LaunchTemplateSpecification": { - "LaunchTemplateName": "my-launch-template-for-x86", - "Version": "$Default" - }, - "Overrides": [ - { - "InstanceType": "c6g.large", - "LaunchTemplateSpecification": { - "LaunchTemplateName": "my-launch-template-for-arm", - "Version": "$Default" - } - }, - { - "InstanceType": "c5.large" - }, - { - "InstanceType": "c5a.large" - } - ] - } - }, - "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE, subnet-610acd08EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Auto Scaling group with a mixed instances policy. It specifies the c5.large, c5a.large, and c6g.large instance types and defines a different launch template for the c6g.large instance type.", - "id": "autoscaling-create-auto-scaling-group-3", - "title": "To create an Auto Scaling group with a mixed instances policy" - }, - { - "input": { - "AutoScalingGroupName": "my-asg", - "DesiredCapacity": 4, - "DesiredCapacityType": "units", - "MaxSize": 100, - "MinSize": 0, - "MixedInstancesPolicy": { - "InstancesDistribution": { - "OnDemandPercentageAboveBaseCapacity": 50, - "SpotAllocationStrategy": "price-capacity-optimized" - }, - "LaunchTemplate": { - "LaunchTemplateSpecification": { - "LaunchTemplateName": "my-template-for-auto-scaling", - "Version": "$Default" - }, - "Overrides": [ - { - "InstanceRequirements": { - "CpuManufacturers": [ - "intel" - ], - "MemoryMiB": { - "Min": 16384 - }, - "VCpuCount": { - "Max": 8, - "Min": 4 - } - } - } - ] - } - }, - "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE, subnet-610acd08EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Auto Scaling group using attribute-based instance type selection. It requires the instance types to have a minimum of four vCPUs and a maximum of eight vCPUs, a minimum of 16,384 MiB of memory, and an Intel manufactured CPU.", - "id": "autoscaling-create-auto-scaling-group-4", - "title": "To create an Auto Scaling group using attribute-based instance type selection" - } - ], - "CreateLaunchConfiguration": [ - { - "input": { - "IamInstanceProfile": "my-iam-role", - "ImageId": "ami-12345678", - "InstanceType": "m3.medium", - "LaunchConfigurationName": "my-launch-config", - "SecurityGroups": [ - "sg-eb2af88e" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a launch configuration.", - "id": "autoscaling-create-launch-configuration-1", - "title": "To create a launch configuration" - } - ], - "CreateOrUpdateTags": [ - { - "input": { - "Tags": [ - { - "Key": "Role", - "PropagateAtLaunch": true, - "ResourceId": "my-auto-scaling-group", - "ResourceType": "auto-scaling-group", - "Value": "WebServer" - }, - { - "Key": "Dept", - "PropagateAtLaunch": true, - "ResourceId": "my-auto-scaling-group", - "ResourceType": "auto-scaling-group", - "Value": "Research" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds two tags to the specified Auto Scaling group.", - "id": "autoscaling-create-or-update-tags-1", - "title": "To create or update tags for an Auto Scaling group" - } - ], - "DeleteAutoScalingGroup": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified Auto Scaling group.", - "id": "autoscaling-delete-auto-scaling-group-1", - "title": "To delete an Auto Scaling group" - }, - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "ForceDelete": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified Auto Scaling group and all its instances.", - "id": "autoscaling-delete-auto-scaling-group-2", - "title": "To delete an Auto Scaling group and all its instances" - } - ], - "DeleteLaunchConfiguration": [ - { - "input": { - "LaunchConfigurationName": "my-launch-config" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified launch configuration.", - "id": "autoscaling-delete-launch-configuration-1", - "title": "To delete a launch configuration" - } - ], - "DeleteLifecycleHook": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "LifecycleHookName": "my-lifecycle-hook" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified lifecycle hook.", - "id": "autoscaling-delete-lifecycle-hook-1", - "title": "To delete a lifecycle hook" - } - ], - "DeleteNotificationConfiguration": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified notification from the specified Auto Scaling group.", - "id": "autoscaling-delete-notification-configuration-1", - "title": "To delete an Auto Scaling notification" - } - ], - "DeletePolicy": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "PolicyName": "my-step-scale-out-policy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified Auto Scaling policy.", - "id": "autoscaling-delete-policy-1", - "title": "To delete an Auto Scaling policy" - } - ], - "DeleteScheduledAction": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "ScheduledActionName": "my-scheduled-action" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified scheduled action from the specified Auto Scaling group.", - "id": "autoscaling-delete-scheduled-action-1", - "title": "To delete a scheduled action from an Auto Scaling group" - } - ], - "DeleteTags": [ - { - "input": { - "Tags": [ - { - "Key": "Dept", - "ResourceId": "my-auto-scaling-group", - "ResourceType": "auto-scaling-group", - "Value": "Research" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified tag from the specified Auto Scaling group.", - "id": "autoscaling-delete-tags-1", - "title": "To delete a tag from an Auto Scaling group" - } - ], - "DescribeAccountLimits": [ - { - "output": { - "MaxNumberOfAutoScalingGroups": 20, - "MaxNumberOfLaunchConfigurations": 100, - "NumberOfAutoScalingGroups": 3, - "NumberOfLaunchConfigurations": 5 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Amazon EC2 Auto Scaling service quotas for your account.", - "id": "autoscaling-describe-account-limits-1", - "title": "To describe your Auto Scaling account limits" - } - ], - "DescribeAdjustmentTypes": [ - { - "output": { - "AdjustmentTypes": [ - { - "AdjustmentType": "ChangeInCapacity" - }, - { - "AdjustmentType": "ExactCapcity" - }, - { - "AdjustmentType": "PercentChangeInCapacity" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the available adjustment types.", - "id": "autoscaling-describe-adjustment-types-1", - "title": "To describe the Amazon EC2 Auto Scaling adjustment types" - } - ], - "DescribeAutoScalingGroups": [ - { - "input": { - "AutoScalingGroupNames": [ - "my-auto-scaling-group" - ] - }, - "output": { - "AutoScalingGroups": [ - { - "AutoScalingGroupARN": "arn:aws:autoscaling:us-west-1:123456789012:autoScalingGroup:12345678-1234-1234-1234-123456789012:autoScalingGroupName/my-auto-scaling-group", - "AutoScalingGroupName": "my-auto-scaling-group", - "AvailabilityZones": [ - "us-west-2a", - "us-west-2b", - "us-west-2c" - ], - "CreatedTime": "2023-03-09T22:15:11.611Z", - "DefaultCooldown": 300, - "DesiredCapacity": 2, - "EnabledMetrics": [ - - ], - "HealthCheckGracePeriod": 300, - "HealthCheckType": "EC2", - "Instances": [ - { - "AvailabilityZone": "us-west-2c", - "HealthStatus": "Healthy", - "InstanceId": "i-05b4f7d5be44822a6", - "InstanceType": "t3.micro", - "LaunchConfigurationName": "my-launch-config", - "LifecycleState": "InService", - "ProtectedFromScaleIn": false - }, - { - "AvailabilityZone": "us-west-2b", - "HealthStatus": "Healthy", - "InstanceId": "i-0c20ac468fa3049e8", - "InstanceType": "t3.micro", - "LaunchConfigurationName": "my-launch-config", - "LifecycleState": "InService", - "ProtectedFromScaleIn": false - } - ], - "LaunchConfigurationName": "my-launch-config", - "LoadBalancerNames": [ - - ], - "MaxSize": 5, - "MinSize": 1, - "NewInstancesProtectedFromScaleIn": false, - "ServiceLinkedRoleARN": "arn:aws:iam::123456789012:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling", - "SuspendedProcesses": [ - - ], - "Tags": [ - - ], - "TargetGroupARNs": [ - - ], - "TerminationPolicies": [ - "Default" - ], - "TrafficSources": [ - - ], - "VPCZoneIdentifier": "subnet-5ea0c127,subnet-6194ea3b,subnet-c934b782" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Auto Scaling group.", - "id": "autoscaling-describe-auto-scaling-groups-1", - "title": "To describe an Auto Scaling group" - } - ], - "DescribeAutoScalingInstances": [ - { - "input": { - "InstanceIds": [ - "i-05b4f7d5be44822a6" - ] - }, - "output": { - "AutoScalingInstances": [ - { - "AutoScalingGroupName": "my-auto-scaling-group", - "AvailabilityZone": "us-west-2c", - "HealthStatus": "HEALTHY", - "InstanceId": "i-05b4f7d5be44822a6", - "InstanceType": "t3.micro", - "LaunchConfigurationName": "my-launch-config", - "LifecycleState": "InService", - "ProtectedFromScaleIn": false - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Auto Scaling instance.", - "id": "autoscaling-describe-auto-scaling-instances-1", - "title": "To describe one or more Auto Scaling instances" - } - ], - "DescribeAutoScalingNotificationTypes": [ - { - "output": { - "AutoScalingNotificationTypes": [ - "autoscaling:EC2_INSTANCE_LAUNCH", - "autoscaling:EC2_INSTANCE_LAUNCH_ERROR", - "autoscaling:EC2_INSTANCE_TERMINATE", - "autoscaling:EC2_INSTANCE_TERMINATE_ERROR", - "autoscaling:TEST_NOTIFICATION" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the available notification types.", - "id": "autoscaling-describe-auto-scaling-notification-types-1", - "title": "To describe the Auto Scaling notification types" - } - ], - "DescribeInstanceRefreshes": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "InstanceRefreshes": [ - { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceRefreshId": "08b91cf7-8fa6-48af-b6a6-d227f40f1b9b", - "InstancesToUpdate": 0, - "PercentageComplete": 50, - "Preferences": { - "AlarmSpecification": { - "Alarms": [ - "my-alarm" - ] - }, - "AutoRollback": true, - "InstanceWarmup": 200, - "MinHealthyPercentage": 90, - "ScaleInProtectedInstances": "Ignore", - "SkipMatching": false, - "StandbyInstances": "Ignore" - }, - "StartTime": "2023-06-13T16:46:52+00:00", - "Status": "InProgress", - "StatusReason": "Waiting for instances to warm up before continuing. For example: i-0645704820a8e83ff is warming up." - }, - { - "AutoScalingGroupName": "my-auto-scaling-group", - "EndTime": "2023-06-02T13:59:45+00:00", - "InstanceRefreshId": "0e151305-1e57-4a32-a256-1fd14157c5ec", - "InstancesToUpdate": 0, - "PercentageComplete": 100, - "Preferences": { - "AlarmSpecification": { - "Alarms": [ - "my-alarm" - ] - }, - "AutoRollback": true, - "InstanceWarmup": 200, - "MinHealthyPercentage": 90, - "ScaleInProtectedInstances": "Ignore", - "SkipMatching": false, - "StandbyInstances": "Ignore" - }, - "StartTime": "2023-06-02T13:53:37+00:00", - "Status": "Successful" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the instance refreshes for the specified Auto Scaling group.", - "id": "to-list-instance-refreshes-1592959593746", - "title": "To list instance refreshes" - } - ], - "DescribeLaunchConfigurations": [ - { - "input": { - "LaunchConfigurationNames": [ - "my-launch-config" - ] - }, - "output": { - "LaunchConfigurations": [ - { - "AssociatePublicIpAddress": true, - "BlockDeviceMappings": [ - - ], - "CreatedTime": "2014-05-07T17:39:28.599Z", - "EbsOptimized": false, - "ImageId": "ami-043a5034", - "InstanceMonitoring": { - "Enabled": true - }, - "InstanceType": "t1.micro", - "LaunchConfigurationARN": "arn:aws:autoscaling:us-west-2:123456789012:launchConfiguration:98d3b196-4cf9-4e88-8ca1-8547c24ced8b:launchConfigurationName/my-launch-config", - "LaunchConfigurationName": "my-launch-config", - "SecurityGroups": [ - "sg-67ef0308" - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified launch configuration.", - "id": "autoscaling-describe-launch-configurations-1", - "title": "To describe Auto Scaling launch configurations" - } - ], - "DescribeLifecycleHookTypes": [ - { - "output": { - "LifecycleHookTypes": [ - "autoscaling:EC2_INSTANCE_LAUNCHING", - "autoscaling:EC2_INSTANCE_TERMINATING" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the available lifecycle hook types.", - "id": "autoscaling-describe-lifecycle-hook-types-1", - "title": "To describe the available types of lifecycle hooks" - } - ], - "DescribeLifecycleHooks": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "LifecycleHooks": [ - { - "AutoScalingGroupName": "my-auto-scaling-group", - "DefaultResult": "ABANDON", - "GlobalTimeout": 172800, - "HeartbeatTimeout": 3600, - "LifecycleHookName": "my-lifecycle-hook", - "LifecycleTransition": "autoscaling:EC2_INSTANCE_LAUNCHING", - "NotificationTargetARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic", - "RoleARN": "arn:aws:iam::123456789012:role/my-auto-scaling-role" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the lifecycle hooks for the specified Auto Scaling group.", - "id": "autoscaling-describe-lifecycle-hooks-1", - "title": "To describe your lifecycle hooks" - } - ], - "DescribeLoadBalancerTargetGroups": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "LoadBalancerTargetGroups": [ - { - "LoadBalancerTargetGroupARN": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "State": "Added" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the target groups attached to the specified Auto Scaling group.", - "id": "autoscaling-describe-load-balancer-target-groups-1", - "title": "To describe the target groups for an Auto Scaling group" - } - ], - "DescribeLoadBalancers": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "LoadBalancers": [ - { - "LoadBalancerName": "my-load-balancer", - "State": "Added" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the load balancers attached to the specified Auto Scaling group.", - "id": "autoscaling-describe-load-balancers-1", - "title": "To describe the load balancers for an Auto Scaling group" - } - ], - "DescribeMetricCollectionTypes": [ - { - "output": { - "Granularities": [ - { - "Granularity": "1Minute" - } - ], - "Metrics": [ - { - "Metric": "GroupMinSize" - }, - { - "Metric": "GroupMaxSize" - }, - { - "Metric": "GroupDesiredCapacity" - }, - { - "Metric": "GroupInServiceInstances" - }, - { - "Metric": "GroupPendingInstances" - }, - { - "Metric": "GroupTerminatingInstances" - }, - { - "Metric": "GroupStandbyInstances" - }, - { - "Metric": "GroupTotalInstances" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the available metric collection types.", - "id": "autoscaling-describe-metric-collection-types-1", - "title": "To describe the Auto Scaling metric collection types" - } - ], - "DescribeNotificationConfigurations": [ - { - "input": { - "AutoScalingGroupNames": [ - "my-auto-scaling-group" - ] - }, - "output": { - "NotificationConfigurations": [ - { - "AutoScalingGroupName": "my-auto-scaling-group", - "NotificationType": "autoscaling:TEST_NOTIFICATION", - "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic-2" - }, - { - "AutoScalingGroupName": "my-auto-scaling-group", - "NotificationType": "autoscaling:TEST_NOTIFICATION", - "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the notification configurations for the specified Auto Scaling group.", - "id": "autoscaling-describe-notification-configurations-1", - "title": "To describe Auto Scaling notification configurations" - } - ], - "DescribePolicies": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "ScalingPolicies": [ - { - "AdjustmentType": "ChangeInCapacity", - "Alarms": [ - - ], - "AutoScalingGroupName": "my-auto-scaling-group", - "PolicyARN": "arn:aws:autoscaling:us-west-2:123456789012:scalingPolicy:2233f3d7-6290-403b-b632-93c553560106:autoScalingGroupName/my-auto-scaling-group:policyName/ScaleIn", - "PolicyName": "ScaleIn", - "ScalingAdjustment": -1 - }, - { - "AdjustmentType": "PercentChangeInCapacity", - "Alarms": [ - - ], - "AutoScalingGroupName": "my-auto-scaling-group", - "Cooldown": 60, - "MinAdjustmentStep": 2, - "PolicyARN": "arn:aws:autoscaling:us-west-2:123456789012:scalingPolicy:2b435159-cf77-4e89-8c0e-d63b497baad7:autoScalingGroupName/my-auto-scaling-group:policyName/ScalePercentChange", - "PolicyName": "ScalePercentChange", - "ScalingAdjustment": 25 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the policies for the specified Auto Scaling group.", - "id": "autoscaling-describe-policies-1", - "title": "To describe scaling policies" - } - ], - "DescribeScalingActivities": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "Activities": [ - { - "ActivityId": "f9f2d65b-f1f2-43e7-b46d-d86756459699", - "AutoScalingGroupARN": "arn:aws:autoscaling:us-east-1:123456789012:autoScalingGroup:12345678-1234-1234-1234-123456789012:autoScalingGroupName/my-auto-scaling-group", - "AutoScalingGroupName": "my-auto-scaling-group", - "Cause": "At 2013-08-19T20:53:25Z a user request created an AutoScalingGroup changing the desired capacity from 0 to 1. At 2013-08-19T20:53:29Z an instance was started in response to a difference between desired and actual capacity, increasing the capacity from 0 to 1.", - "Description": "Launching a new EC2 instance: i-4ba0837f", - "Details": "details", - "EndTime": "2013-08-19T20:54:02Z", - "Progress": 100, - "StartTime": "2013-08-19T20:53:29.930Z", - "StatusCode": "Successful" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the scaling activities for the specified Auto Scaling group.", - "id": "autoscaling-describe-scaling-activities-1", - "title": "To describe the scaling activities for an Auto Scaling group" - } - ], - "DescribeScalingProcessTypes": [ - { - "output": { - "Processes": [ - { - "ProcessName": "AZRebalance" - }, - { - "ProcessName": "AddToLoadBalancer" - }, - { - "ProcessName": "AlarmNotification" - }, - { - "ProcessName": "HealthCheck" - }, - { - "ProcessName": "Launch" - }, - { - "ProcessName": "ReplaceUnhealthy" - }, - { - "ProcessName": "ScheduledActions" - }, - { - "ProcessName": "Terminate" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Auto Scaling process types.", - "id": "autoscaling-describe-scaling-process-types-1", - "title": "To describe the Auto Scaling process types" - } - ], - "DescribeScheduledActions": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "ScheduledUpdateGroupActions": [ - { - "AutoScalingGroupName": "my-auto-scaling-group", - "DesiredCapacity": 4, - "MaxSize": 6, - "MinSize": 2, - "Recurrence": "30 0 1 12 0", - "ScheduledActionARN": "arn:aws:autoscaling:us-west-2:123456789012:scheduledUpdateGroupAction:8e86b655-b2e6-4410-8f29-b4f094d6871c:autoScalingGroupName/my-auto-scaling-group:scheduledActionName/my-scheduled-action", - "ScheduledActionName": "my-scheduled-action", - "StartTime": "2016-12-01T00:30:00Z", - "Time": "2016-12-01T00:30:00Z" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the scheduled actions for the specified Auto Scaling group.", - "id": "autoscaling-describe-scheduled-actions-1", - "title": "To describe scheduled actions" - } - ], - "DescribeTags": [ - { - "input": { - "Filters": [ - { - "Name": "auto-scaling-group", - "Values": [ - "my-auto-scaling-group" - ] - } - ] - }, - "output": { - "Tags": [ - { - "Key": "Dept", - "PropagateAtLaunch": true, - "ResourceId": "my-auto-scaling-group", - "ResourceType": "auto-scaling-group", - "Value": "Research" - }, - { - "Key": "Role", - "PropagateAtLaunch": true, - "ResourceId": "my-auto-scaling-group", - "ResourceType": "auto-scaling-group", - "Value": "WebServer" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the tags for the specified Auto Scaling group.", - "id": "autoscaling-describe-tags-1", - "title": "To describe tags" - } - ], - "DescribeTerminationPolicyTypes": [ - { - "output": { - "TerminationPolicyTypes": [ - "ClosestToNextInstanceHour", - "Default", - "NewestInstance", - "OldestInstance", - "OldestLaunchConfiguration" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the available termination policy types.", - "id": "autoscaling-describe-termination-policy-types-1", - "title": "To describe termination policy types" - } - ], - "DescribeTrafficSources": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group" - }, - "output": { - "NextToken": "", - "TrafficSources": [ - { - "Identifier": "arn:aws:vpc-lattice:us-west-2:123456789012:targetgroup/tg-0e2f2665eEXAMPLE", - "State": "InService", - "Type": "vpc-lattice" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the target groups attached to the specified Auto Scaling group.", - "id": "to-describe-the-target-groups-for-an-auto-scaling-group-1680040714521", - "title": "To describe the target groups for an Auto Scaling group" - } - ], - "DetachInstances": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceIds": [ - "i-93633f9b" - ], - "ShouldDecrementDesiredCapacity": true - }, - "output": { - "Activities": [ - { - "ActivityId": "5091cb52-547a-47ce-a236-c9ccbc2cb2c9", - "AutoScalingGroupName": "my-auto-scaling-group", - "Cause": "At 2015-04-12T15:02:16Z instance i-93633f9b was detached in response to a user request, shrinking the capacity from 2 to 1.", - "Description": "Detaching EC2 instance: i-93633f9b", - "Details": "details", - "Progress": 50, - "StartTime": "2015-04-12T15:02:16.179Z", - "StatusCode": "InProgress" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified instance from the specified Auto Scaling group.", - "id": "autoscaling-detach-instances-1", - "title": "To detach an instance from an Auto Scaling group" - } - ], - "DetachLoadBalancerTargetGroups": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "TargetGroupARNs": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified target group from the specified Auto Scaling group", - "id": "autoscaling-detach-load-balancer-target-groups-1", - "title": "To detach a target group from an Auto Scaling group" - } - ], - "DetachLoadBalancers": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "LoadBalancerNames": [ - "my-load-balancer" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified load balancer from the specified Auto Scaling group.", - "id": "autoscaling-detach-load-balancers-1", - "title": "To detach a load balancer from an Auto Scaling group" - } - ], - "DetachTrafficSources": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "TrafficSources": [ - { - "Identifier": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified target group from the specified Auto Scaling group.", - "id": "to-detach-a-target-group-from-an-auto-scaling-group-1680040404169", - "title": "To detach a target group from an Auto Scaling group" - } - ], - "DisableMetricsCollection": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "Metrics": [ - "GroupDesiredCapacity" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disables collecting data for the GroupDesiredCapacity metric for the specified Auto Scaling group.", - "id": "autoscaling-disable-metrics-collection-1", - "title": "To disable metrics collection for an Auto Scaling group" - } - ], - "EnableMetricsCollection": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "Granularity": "1Minute" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables data collection for the specified Auto Scaling group.", - "id": "autoscaling-enable-metrics-collection-1", - "title": "To enable metrics collection for an Auto Scaling group" - } - ], - "EnterStandby": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceIds": [ - "i-93633f9b" - ], - "ShouldDecrementDesiredCapacity": true - }, - "output": { - "Activities": [ - { - "ActivityId": "ffa056b4-6ed3-41ba-ae7c-249dfae6eba1", - "AutoScalingGroupName": "my-auto-scaling-group", - "Cause": "At 2015-04-12T15:10:23Z instance i-93633f9b was moved to standby in response to a user request, shrinking the capacity from 2 to 1.", - "Description": "Moving EC2 instance to Standby: i-93633f9b", - "Details": "details", - "Progress": 50, - "StartTime": "2015-04-12T15:10:23.640Z", - "StatusCode": "InProgress" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example puts the specified instance into standby mode.", - "id": "autoscaling-enter-standby-1", - "title": "To move instances into standby mode" - } - ], - "ExecutePolicy": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "BreachThreshold": 50.0, - "MetricValue": 59.0, - "PolicyName": "my-step-scale-out-policy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example executes the specified policy.", - "id": "autoscaling-execute-policy-1", - "title": "To execute a scaling policy" - } - ], - "ExitStandby": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceIds": [ - "i-93633f9b" - ] - }, - "output": { - "Activities": [ - { - "ActivityId": "142928e1-a2dc-453a-9b24-b85ad6735928", - "AutoScalingGroupName": "my-auto-scaling-group", - "Cause": "At 2015-04-12T15:14:29Z instance i-93633f9b was moved out of standby in response to a user request, increasing the capacity from 1 to 2.", - "Description": "Moving EC2 instance out of Standby: i-93633f9b", - "Details": "details", - "Progress": 30, - "StartTime": "2015-04-12T15:14:29.886Z", - "StatusCode": "PreInService" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example moves the specified instance out of standby mode.", - "id": "autoscaling-exit-standby-1", - "title": "To move instances out of standby mode" - } - ], - "PutLifecycleHook": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "DefaultResult": "CONTINUE", - "HeartbeatTimeout": 300, - "LifecycleHookName": "my-launch-lifecycle-hook", - "LifecycleTransition": "autoscaling:EC2_INSTANCE_LAUNCHING" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a lifecycle hook for instance launch.", - "id": "autoscaling-put-lifecycle-hook-1", - "title": "To create a launch lifecycle hook" - } - ], - "PutNotificationConfiguration": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "NotificationTypes": [ - "autoscaling:TEST_NOTIFICATION" - ], - "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the specified notification to the specified Auto Scaling group.", - "id": "autoscaling-put-notification-configuration-1", - "title": "To add an Auto Scaling notification" - } - ], - "PutScalingPolicy": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "PolicyName": "alb1000-target-tracking-scaling-policy", - "PolicyType": "TargetTrackingScaling", - "TargetTrackingConfiguration": { - "PredefinedMetricSpecification": { - "PredefinedMetricType": "ALBRequestCountPerTarget", - "ResourceLabel": "app/my-alb/778d41231b141a0f/targetgroup/my-alb-target-group/943f017f100becff" - }, - "TargetValue": 1000.0 - } - }, - "output": { - "Alarms": [ - { - "AlarmARN": "arn:aws:cloudwatch:us-west-2:123456789012:alarm:TargetTracking-my-asg-AlarmHigh-fc0e4183-23ac-497e-9992-691c9980c38e", - "AlarmName": "TargetTracking-my-asg-AlarmHigh-fc0e4183-23ac-497e-9992-691c9980c38e" - }, - { - "AlarmARN": "arn:aws:cloudwatch:us-west-2:123456789012:alarm:TargetTracking-my-asg-AlarmLow-61a39305-ed0c-47af-bd9e-471a352ee1a2", - "AlarmName": "TargetTracking-my-asg-AlarmLow-61a39305-ed0c-47af-bd9e-471a352ee1a2" - } - ], - "PolicyARN": "arn:aws:autoscaling:us-west-2:123456789012:scalingPolicy:228f02c2-c665-4bfd-aaac-8b04080bea3c:autoScalingGroupName/my-auto-scaling-group:policyName/alb1000-target-tracking-scaling-policy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the specified policy to the specified Auto Scaling group.", - "id": "autoscaling-put-scaling-policy-1", - "title": "To add a scaling policy to an Auto Scaling group" - } - ], - "PutScheduledUpdateGroupAction": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "DesiredCapacity": 4, - "EndTime": "2014-05-12T08:00:00Z", - "MaxSize": 6, - "MinSize": 2, - "ScheduledActionName": "my-scheduled-action", - "StartTime": "2014-05-12T08:00:00Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the specified scheduled action to the specified Auto Scaling group.", - "id": "autoscaling-put-scheduled-update-group-action-1", - "title": "To add a scheduled action to an Auto Scaling group" - } - ], - "PutWarmPool": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceReusePolicy": { - "ReuseOnScaleIn": true - }, - "MinSize": 30, - "PoolState": "Hibernated" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a warm pool for the specified Auto Scaling group.", - "id": "to-add-a-warm-pool-to-an-auto-scaling-group-1617818810383", - "title": "To create a warm pool for an Auto Scaling group" - } - ], - "RecordLifecycleActionHeartbeat": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "LifecycleActionToken": "bcd2f1b8-9a78-44d3-8a7a-4dd07d7cf635", - "LifecycleHookName": "my-lifecycle-hook" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example records a lifecycle action heartbeat to keep the instance in a pending state.", - "id": "autoscaling-record-lifecycle-action-heartbeat-1", - "title": "To record a lifecycle action heartbeat" - } - ], - "ResumeProcesses": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "ScalingProcesses": [ - "AlarmNotification" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resumes the specified suspended scaling process for the specified Auto Scaling group.", - "id": "autoscaling-resume-processes-1", - "title": "To resume Auto Scaling processes" - } - ], - "SetDesiredCapacity": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "DesiredCapacity": 2, - "HonorCooldown": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example sets the desired capacity for the specified Auto Scaling group.", - "id": "autoscaling-set-desired-capacity-1", - "title": "To set the desired capacity for an Auto Scaling group" - } - ], - "SetInstanceHealth": [ - { - "input": { - "HealthStatus": "Unhealthy", - "InstanceId": "i-93633f9b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example sets the health status of the specified instance to Unhealthy.", - "id": "autoscaling-set-instance-health-1", - "title": "To set the health status of an instance" - } - ], - "SetInstanceProtection": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceIds": [ - "i-93633f9b" - ], - "ProtectedFromScaleIn": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables instance protection for the specified instance.", - "id": "autoscaling-set-instance-protection-1", - "title": "To enable instance protection for an instance" - }, - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "InstanceIds": [ - "i-93633f9b" - ], - "ProtectedFromScaleIn": false - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disables instance protection for the specified instance.", - "id": "autoscaling-set-instance-protection-2", - "title": "To disable instance protection for an instance" - } - ], - "StartInstanceRefresh": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "DesiredConfiguration": { - "LaunchTemplate": { - "LaunchTemplateName": "my-template-for-auto-scaling", - "Version": "$Latest" - } - }, - "Preferences": { - "AlarmSpecification": { - "Alarms": [ - "my-alarm" - ] - }, - "AutoRollback": true, - "InstanceWarmup": 200, - "MinHealthyPercentage": 90 - } - }, - "output": { - "InstanceRefreshId": "08b91cf7-8fa6-48af-b6a6-d227f40f1b9b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example starts an instance refresh for the specified Auto Scaling group.", - "id": "to-start-an-instance-refresh-1592957271522", - "title": "To start an instance refresh" - } - ], - "SuspendProcesses": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "ScalingProcesses": [ - "AlarmNotification" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example suspends the specified scaling process for the specified Auto Scaling group.", - "id": "autoscaling-suspend-processes-1", - "title": "To suspend Auto Scaling processes" - } - ], - "TerminateInstanceInAutoScalingGroup": [ - { - "input": { - "InstanceId": "i-93633f9b", - "ShouldDecrementDesiredCapacity": false - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example terminates the specified instance from the specified Auto Scaling group without updating the size of the group. Auto Scaling launches a replacement instance after the specified instance terminates.", - "id": "autoscaling-terminate-instance-in-auto-scaling-group-1", - "title": "To terminate an instance in an Auto Scaling group" - } - ], - "UpdateAutoScalingGroup": [ - { - "input": { - "AutoScalingGroupName": "my-auto-scaling-group", - "LaunchTemplate": { - "LaunchTemplateName": "my-template-for-auto-scaling", - "Version": "2" - }, - "MaxSize": 5, - "MinSize": 1, - "NewInstancesProtectedFromScaleIn": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example updates multiple properties at the same time.", - "id": "autoscaling-update-auto-scaling-group-1", - "title": "To update an Auto Scaling group" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.json deleted file mode 100644 index ac5939d..0000000 --- a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "DescribeAutoScalingGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "AutoScalingGroups" - }, - "DescribeAutoScalingInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "AutoScalingInstances" - }, - "DescribeLaunchConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "LaunchConfigurations" - }, - "DescribeNotificationConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "NotificationConfigurations" - }, - "DescribePolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "ScalingPolicies" - }, - "DescribeScalingActivities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "Activities" - }, - "DescribeScheduledActions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "ScheduledUpdateGroupActions" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "Tags" - }, - "DescribeLoadBalancerTargetGroups": { - "input_token": "NextToken", - "limit_key": "MaxRecords", - "output_token": "NextToken", - "result_key": "LoadBalancerTargetGroups" - }, - "DescribeLoadBalancers": { - "input_token": "NextToken", - "limit_key": "MaxRecords", - "output_token": "NextToken", - "result_key": "LoadBalancers" - }, - "DescribeWarmPool": { - "input_token": "NextToken", - "limit_key": "MaxRecords", - "output_token": "NextToken", - "result_key": "Instances" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.sdk-extras.json deleted file mode 100644 index 1c63499..0000000 --- a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "DescribeWarmPool": { - "non_aggregate_keys": [ - "WarmPoolConfiguration" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/service-2.json.gz deleted file mode 100644 index 831fb61..0000000 Binary files a/venv/Lib/site-packages/botocore/data/autoscaling/2011-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index 42d97f1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/paginators-1.json deleted file mode 100644 index 5f3b0d2..0000000 --- a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListCapabilities": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "capabilities" - }, - "ListPartnerships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "partnerships" - }, - "ListProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "profiles" - }, - "ListTransformers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "transformers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/service-2.json.gz deleted file mode 100644 index bd61791..0000000 Binary files a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/waiters-2.json b/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/waiters-2.json deleted file mode 100644 index 35d8698..0000000 --- a/venv/Lib/site-packages/botocore/data/b2bi/2022-06-23/waiters-2.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "TransformerJobSucceeded" : { - "delay" : 10, - "maxAttempts" : 12, - "operation" : "GetTransformerJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "succeeded" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "failed" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 70c3a34..0000000 Binary files a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/paginators-1.json deleted file mode 100644 index 462aacd..0000000 --- a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListGateways": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Gateways" - }, - "ListHypervisors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Hypervisors" - }, - "ListVirtualMachines": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VirtualMachines" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/service-2.json.gz deleted file mode 100644 index 51c7c75..0000000 Binary files a/venv/Lib/site-packages/botocore/data/backup-gateway/2021-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/backup/2018-11-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 036129c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/examples-1.json b/venv/Lib/site-packages/botocore/data/backup/2018-11-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/backup/2018-11-15/paginators-1.json deleted file mode 100644 index 47f5874..0000000 --- a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/paginators-1.json +++ /dev/null @@ -1,136 +0,0 @@ -{ - "pagination": { - "ListBackupJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BackupJobs" - }, - "ListBackupPlanTemplates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BackupPlanTemplatesList" - }, - "ListBackupPlanVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BackupPlanVersionsList" - }, - "ListBackupPlans": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BackupPlansList" - }, - "ListBackupSelections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BackupSelectionsList" - }, - "ListBackupVaults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BackupVaultList" - }, - "ListCopyJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CopyJobs" - }, - "ListProtectedResources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Results" - }, - "ListRecoveryPointsByBackupVault": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RecoveryPoints" - }, - "ListRecoveryPointsByResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RecoveryPoints" - }, - "ListRestoreJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RestoreJobs" - }, - "ListLegalHolds": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "LegalHolds" - }, - "ListRecoveryPointsByLegalHold": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RecoveryPoints" - }, - "ListProtectedResourcesByBackupVault": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Results" - }, - "ListRestoreJobsByProtectedResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RestoreJobs" - }, - "ListRestoreTestingPlans": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RestoreTestingPlans" - }, - "ListRestoreTestingSelections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RestoreTestingSelections" - }, - "ListIndexedRecoveryPoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "IndexedRecoveryPoints" - }, - "ListRestoreAccessBackupVaults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RestoreAccessBackupVaults" - }, - "ListTieringConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TieringConfigurations" - }, - "ListScanJobSummaries": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ScanJobSummaries" - }, - "ListScanJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ScanJobs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/backup/2018-11-15/paginators-1.sdk-extras.json deleted file mode 100644 index 9a5d767..0000000 --- a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListScanJobSummaries": { - "non_aggregate_keys": [ - "AggregationPeriod" - ] - } - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/backup/2018-11-15/service-2.json.gz deleted file mode 100644 index 59f535b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/backup/2018-11-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 67431dd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/paginators-1.json deleted file mode 100644 index bc482fd..0000000 --- a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListSearchJobBackups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Results" - }, - "ListSearchJobResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Results" - }, - "ListSearchJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SearchJobs" - }, - "ListSearchResultExportJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ExportJobs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/service-2.json.gz deleted file mode 100644 index ef303eb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/backupsearch/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/batch/2016-08-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 127c096..0000000 Binary files a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/examples-1.json b/venv/Lib/site-packages/botocore/data/batch/2016-08-10/examples-1.json deleted file mode 100644 index 18203dc..0000000 --- a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/examples-1.json +++ /dev/null @@ -1,711 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CancelJob": [ - { - "input": { - "jobId": "1d828f65-7a4d-42e8-996d-3b900ed59dc4", - "reason": "Cancelling job." - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels a job with the specified job ID.", - "id": "to-cancel-a-job-1481152314733", - "title": "To cancel a job" - } - ], - "CreateComputeEnvironment": [ - { - "input": { - "type": "MANAGED", - "computeEnvironmentName": "C4OnDemand", - "computeResources": { - "type": "EC2", - "desiredvCpus": 48, - "ec2KeyPair": "id_rsa", - "instanceRole": "ecsInstanceRole", - "instanceTypes": [ - "c4.large", - "c4.xlarge", - "c4.2xlarge", - "c4.4xlarge", - "c4.8xlarge" - ], - "maxvCpus": 128, - "minvCpus": 0, - "securityGroupIds": [ - "sg-cf5093b2" - ], - "subnets": [ - "subnet-220c0e0a", - "subnet-1a95556d", - "subnet-978f6dce" - ], - "tags": { - "Name": "Batch Instance - C4OnDemand" - } - }, - "serviceRole": "arn:aws:iam::012345678910:role/AWSBatchServiceRole", - "state": "ENABLED" - }, - "output": { - "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/C4OnDemand", - "computeEnvironmentName": "C4OnDemand" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a managed compute environment with specific C4 instance types that are launched on demand. The compute environment is called C4OnDemand.", - "id": "to-create-a-managed-ec2-compute-environment-1481152600017", - "title": "To create a managed EC2 compute environment" - }, - { - "input": { - "type": "MANAGED", - "computeEnvironmentName": "M4Spot", - "computeResources": { - "type": "SPOT", - "bidPercentage": 20, - "desiredvCpus": 4, - "ec2KeyPair": "id_rsa", - "instanceRole": "ecsInstanceRole", - "instanceTypes": [ - "m4" - ], - "maxvCpus": 128, - "minvCpus": 0, - "securityGroupIds": [ - "sg-cf5093b2" - ], - "spotIamFleetRole": "arn:aws:iam::012345678910:role/aws-ec2-spot-fleet-role", - "subnets": [ - "subnet-220c0e0a", - "subnet-1a95556d", - "subnet-978f6dce" - ], - "tags": { - "Name": "Batch Instance - M4Spot" - } - }, - "serviceRole": "arn:aws:iam::012345678910:role/AWSBatchServiceRole", - "state": "ENABLED" - }, - "output": { - "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/M4Spot", - "computeEnvironmentName": "M4Spot" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a managed compute environment with the M4 instance type that is launched when the Spot bid price is at or below 20% of the On-Demand price for the instance type. The compute environment is called M4Spot.", - "id": "to-create-a-managed-ec2-spot-compute-environment-1481152844190", - "title": "To create a managed EC2 Spot compute environment" - } - ], - "CreateJobQueue": [ - { - "input": { - "computeEnvironmentOrder": [ - { - "computeEnvironment": "M4Spot", - "order": 1 - } - ], - "jobQueueName": "LowPriority", - "priority": 1, - "state": "ENABLED" - }, - "output": { - "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/LowPriority", - "jobQueueName": "LowPriority" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a job queue called LowPriority that uses the M4Spot compute environment.", - "id": "to-create-a-job-queue-with-a-single-compute-environment-1481152967946", - "title": "To create a job queue with a single compute environment" - }, - { - "input": { - "computeEnvironmentOrder": [ - { - "computeEnvironment": "C4OnDemand", - "order": 1 - }, - { - "computeEnvironment": "M4Spot", - "order": 2 - } - ], - "jobQueueName": "HighPriority", - "priority": 10, - "state": "ENABLED" - }, - "output": { - "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/HighPriority", - "jobQueueName": "HighPriority" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a job queue called HighPriority that uses the C4OnDemand compute environment with an order of 1 and the M4Spot compute environment with an order of 2.", - "id": "to-create-a-job-queue-with-multiple-compute-environments-1481153027051", - "title": "To create a job queue with multiple compute environments" - } - ], - "DeleteComputeEnvironment": [ - { - "input": { - "computeEnvironment": "P2OnDemand" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the P2OnDemand compute environment.", - "id": "to-delete-a-compute-environment-1481153105644", - "title": "To delete a compute environment" - } - ], - "DeleteJobQueue": [ - { - "input": { - "jobQueue": "GPGPU" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the GPGPU job queue.", - "id": "to-delete-a-job-queue-1481153508134", - "title": "To delete a job queue" - } - ], - "DeregisterJobDefinition": [ - { - "input": { - "jobDefinition": "sleep10" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deregisters a job definition called sleep10.", - "id": "to-deregister-a-job-definition-1481153579565", - "title": "To deregister a job definition" - } - ], - "DescribeComputeEnvironments": [ - { - "input": { - "computeEnvironments": [ - "P2OnDemand" - ] - }, - "output": { - "computeEnvironments": [ - { - "type": "MANAGED", - "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/P2OnDemand", - "computeEnvironmentName": "P2OnDemand", - "computeResources": { - "type": "EC2", - "desiredvCpus": 48, - "ec2KeyPair": "id_rsa", - "instanceRole": "ecsInstanceRole", - "instanceTypes": [ - "p2" - ], - "maxvCpus": 128, - "minvCpus": 0, - "securityGroupIds": [ - "sg-cf5093b2" - ], - "subnets": [ - "subnet-220c0e0a", - "subnet-1a95556d", - "subnet-978f6dce" - ], - "tags": { - "Name": "Batch Instance - P2OnDemand" - } - }, - "ecsClusterArn": "arn:aws:ecs:us-east-1:012345678910:cluster/P2OnDemand_Batch_2c06f29d-d1fe-3a49-879d-42394c86effc", - "serviceRole": "arn:aws:iam::012345678910:role/AWSBatchServiceRole", - "state": "ENABLED", - "status": "VALID", - "statusReason": "ComputeEnvironment Healthy" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the P2OnDemand compute environment.", - "id": "to-describe-a-compute-environment-1481153713334", - "title": "To describe a compute environment" - } - ], - "DescribeJobDefinitions": [ - { - "input": { - "status": "ACTIVE" - }, - "output": { - "jobDefinitions": [ - { - "type": "container", - "containerProperties": { - "command": [ - "sleep", - "60" - ], - "environment": [ - - ], - "image": "busybox", - "mountPoints": [ - - ], - "resourceRequirements": [ - { - "type": "MEMORY", - "value": "128" - }, - { - "type": "VCPU", - "value": "1" - } - ], - "ulimits": [ - - ], - "volumes": [ - - ] - }, - "jobDefinitionArn": "arn:aws:batch:us-east-1:012345678910:job-definition/sleep60:1", - "jobDefinitionName": "sleep60", - "revision": 1, - "status": "ACTIVE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all of your active job definitions.", - "id": "to-describe-active-job-definitions-1481153895831", - "title": "To describe active job definitions" - } - ], - "DescribeJobQueues": [ - { - "input": { - "jobQueues": [ - "HighPriority" - ] - }, - "output": { - "jobQueues": [ - { - "computeEnvironmentOrder": [ - { - "computeEnvironment": "arn:aws:batch:us-east-1:012345678910:compute-environment/C4OnDemand", - "order": 1 - } - ], - "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/HighPriority", - "jobQueueName": "HighPriority", - "priority": 1, - "state": "ENABLED", - "status": "VALID", - "statusReason": "JobQueue Healthy" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the HighPriority job queue.", - "id": "to-describe-a-job-queue-1481153995804", - "title": "To describe a job queue" - } - ], - "DescribeJobs": [ - { - "input": { - "jobs": [ - "24fa2d7a-64c4-49d2-8b47-f8da4fbde8e9" - ] - }, - "output": { - "jobs": [ - { - "container": { - "command": [ - "sleep", - "60" - ], - "containerInstanceArn": "arn:aws:ecs:us-east-1:012345678910:container-instance/5406d7cd-58bd-4b8f-9936-48d7c6b1526c", - "environment": [ - - ], - "exitCode": 0, - "image": "busybox", - "memory": 128, - "mountPoints": [ - - ], - "ulimits": [ - - ], - "vcpus": 1, - "volumes": [ - - ] - }, - "createdAt": 1480460782010, - "dependsOn": [ - - ], - "jobDefinition": "sleep60", - "jobId": "24fa2d7a-64c4-49d2-8b47-f8da4fbde8e9", - "jobName": "example", - "jobQueue": "arn:aws:batch:us-east-1:012345678910:job-queue/HighPriority", - "parameters": { - }, - "startedAt": 1480460816500, - "status": "SUCCEEDED", - "stoppedAt": 1480460880699 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes a job with the specified job ID.", - "id": "to-describe-a-specific-job-1481154090490", - "title": "To describe a specific job" - } - ], - "ListJobs": [ - { - "input": { - "jobQueue": "HighPriority" - }, - "output": { - "jobSummaryList": [ - { - "jobId": "e66ff5fd-a1ff-4640-b1a2-0b0a142f49bb", - "jobName": "example" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the running jobs in the HighPriority job queue.", - "id": "to-list-running-jobs-1481154202164", - "title": "To list running jobs" - }, - { - "input": { - "jobQueue": "HighPriority", - "jobStatus": "SUBMITTED" - }, - "output": { - "jobSummaryList": [ - { - "jobId": "68f0c163-fbd4-44e6-9fd1-25b14a434786", - "jobName": "example" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists jobs in the HighPriority job queue that are in the SUBMITTED job status.", - "id": "to-list-submitted-jobs-1481154251623", - "title": "To list submitted jobs" - } - ], - "ListTagsForResource": [ - { - "input": { - "resourceArn": "arn:aws:batch:us-east-1:123456789012:job-definition/sleep30:1" - }, - "output": { - "tags": { - "Department": "Engineering", - "Stage": "Alpha", - "User": "JaneDoe" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This demonstrates calling the ListTagsForResource action.", - "id": "listtagsforresource-example-1591293003710", - "title": "ListTagsForResource Example" - } - ], - "RegisterJobDefinition": [ - { - "input": { - "type": "container", - "containerProperties": { - "command": [ - "sleep", - "10" - ], - "image": "busybox", - "resourceRequirements": [ - { - "type": "MEMORY", - "value": "128" - }, - { - "type": "VCPU", - "value": "1" - } - ] - }, - "jobDefinitionName": "sleep10" - }, - "output": { - "jobDefinitionArn": "arn:aws:batch:us-east-1:012345678910:job-definition/sleep10:1", - "jobDefinitionName": "sleep10", - "revision": 1 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example registers a job definition for a simple container job.", - "id": "to-register-a-job-definition-1481154325325", - "title": "To register a job definition" - }, - { - "input": { - "type": "container", - "containerProperties": { - "command": [ - "sleep", - "30" - ], - "image": "busybox", - "resourceRequirements": [ - { - "type": "MEMORY", - "value": "128" - }, - { - "type": "VCPU", - "value": "1" - } - ] - }, - "jobDefinitionName": "sleep30", - "tags": { - "Department": "Engineering", - "User": "JaneDoe" - } - }, - "output": { - "jobDefinitionArn": "arn:aws:batch:us-east-1:012345678910:job-definition/sleep30:1", - "jobDefinitionName": "sleep30", - "revision": 1 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This demonstrates calling the RegisterJobDefinition action, including tags.", - "id": "registerjobdefinition-with-tags-1591290509028", - "title": "RegisterJobDefinition with tags" - } - ], - "SubmitJob": [ - { - "input": { - "jobDefinition": "sleep60", - "jobName": "example", - "jobQueue": "HighPriority" - }, - "output": { - "jobId": "876da822-4198-45f2-a252-6cea32512ea8", - "jobName": "example" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example submits a simple container job called example to the HighPriority job queue.", - "id": "to-submit-a-job-to-a-queue-1481154481673", - "title": "To submit a job to a queue" - } - ], - "TagResource": [ - { - "input": { - "resourceArn": "arn:aws:batch:us-east-1:123456789012:job-definition/sleep30:1", - "tags": { - "Stage": "Alpha" - } - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This demonstrates calling the TagResource action.", - "id": "tagresource-example-1591291959952", - "title": "TagResource Example" - } - ], - "TerminateJob": [ - { - "input": { - "jobId": "61e743ed-35e4-48da-b2de-5c8333821c84", - "reason": "Terminating job." - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example terminates a job with the specified job ID.", - "id": "to-terminate-a-job-1481154558276", - "title": "To terminate a job" - } - ], - "UntagResource": [ - { - "input": { - "resourceArn": "arn:aws:batch:us-east-1:123456789012:job-definition/sleep30:1", - "tagKeys": [ - "Stage" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This demonstrates calling the UntagResource action.", - "id": "untagresource-example-1591292811042", - "title": "UntagResource Example" - } - ], - "UpdateComputeEnvironment": [ - { - "input": { - "computeEnvironment": "P2OnDemand", - "state": "DISABLED" - }, - "output": { - "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/P2OnDemand", - "computeEnvironmentName": "P2OnDemand" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disables the P2OnDemand compute environment so it can be deleted.", - "id": "to-update-a-compute-environment-1481154702731", - "title": "To update a compute environment" - } - ], - "UpdateJobQueue": [ - { - "input": { - "jobQueue": "GPGPU", - "state": "DISABLED" - }, - "output": { - "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/GPGPU", - "jobQueueName": "GPGPU" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disables a job queue so that it can be deleted.", - "id": "to-update-a-job-queue-1481154806981", - "title": "To update a job queue" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/batch/2016-08-10/paginators-1.json deleted file mode 100644 index b844b08..0000000 --- a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "DescribeComputeEnvironments": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "computeEnvironments" - }, - "DescribeJobDefinitions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "jobDefinitions" - }, - "DescribeJobQueues": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "jobQueues" - }, - "ListJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "jobSummaryList" - }, - "ListSchedulingPolicies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "schedulingPolicies" - }, - "ListConsumableResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "consumableResources" - }, - "ListJobsByConsumableResource": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobs" - }, - "DescribeServiceEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "serviceEnvironments" - }, - "ListServiceJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/batch/2016-08-10/service-2.json.gz deleted file mode 100644 index 5daed4e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/batch/2016-08-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index a655944..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/paginators-1.json deleted file mode 100644 index 22ea369..0000000 --- a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListDashboards": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dashboards" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/service-2.json.gz deleted file mode 100644 index 6b1d37c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/waiters-2.json b/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/bcm-dashboards/2025-08-18/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index e4a9e78..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/paginators-1.json deleted file mode 100644 index 3d03805..0000000 --- a/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListExecutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Executions" - }, - "ListExports": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Exports" - }, - "ListTables": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tables" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/service-2.json.gz deleted file mode 100644 index 5f0a14e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-data-exports/2023-11-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8990488..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/paginators-1.json deleted file mode 100644 index 678b04a..0000000 --- a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListBillEstimateCommitments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListBillEstimateInputCommitmentModifications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListBillEstimateInputUsageModifications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListBillEstimateLineItems": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListBillEstimates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListBillScenarioCommitmentModifications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListBillScenarioUsageModifications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListBillScenarios": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListWorkloadEstimateUsage": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListWorkloadEstimates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/service-2.json.gz deleted file mode 100644 index 51684a5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/waiters-2.json b/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/bcm-pricing-calculator/2024-06-19/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index ae6a603..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/paginators-1.json deleted file mode 100644 index 11c6a2d..0000000 --- a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListRecommendedActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendedActions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/service-2.json.gz deleted file mode 100644 index e7b2226..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/waiters-2.json b/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/bcm-recommended-actions/2024-11-14/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index a0e506f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.json deleted file mode 100644 index d3b7246..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "pagination": { - "Retrieve": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "retrievalResults" - }, - "GetAgentMemory": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxItems", - "result_key": "memoryContents" - }, - "Rerank": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "results" - }, - "ListInvocationSteps": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "invocationStepSummaries" - }, - "ListInvocations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "invocationSummaries" - }, - "ListSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessionSummaries" - }, - "ListFlowExecutionEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "flowExecutionEvents" - }, - "ListFlowExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "flowExecutionSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.sdk-extras.json deleted file mode 100644 index 30da00d..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "Retrieve": { - "non_aggregate_keys": [ - "guardrailAction" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/service-2.json.gz deleted file mode 100644 index 1e169df..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0f1eb6e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/paginators-1.json deleted file mode 100644 index 0daea1d..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/paginators-1.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "pagination": { - "ListAgentActionGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "actionGroupSummaries" - }, - "ListAgentAliases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "agentAliasSummaries" - }, - "ListAgentKnowledgeBases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "agentKnowledgeBaseSummaries" - }, - "ListAgentVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "agentVersionSummaries" - }, - "ListAgents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "agentSummaries" - }, - "ListDataSources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataSourceSummaries" - }, - "ListIngestionJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ingestionJobSummaries" - }, - "ListKnowledgeBases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "knowledgeBaseSummaries" - }, - "ListFlowAliases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "flowAliasSummaries" - }, - "ListFlowVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "flowVersionSummaries" - }, - "ListFlows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "flowSummaries" - }, - "ListPrompts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "promptSummaries" - }, - "ListKnowledgeBaseDocuments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "documentDetails" - }, - "ListAgentCollaborators": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "agentCollaboratorSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/service-2.json.gz deleted file mode 100644 index 030fcd7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/waiters-2.json b/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agent/2023-06-05/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index 902b50a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/paginators-1.json deleted file mode 100644 index 745be82..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/paginators-1.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "pagination": { - "ListAgentRuntimeEndpoints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "runtimeEndpoints" - }, - "ListAgentRuntimeVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "agentRuntimes" - }, - "ListAgentRuntimes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "agentRuntimes" - }, - "ListApiKeyCredentialProviders": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "credentialProviders" - }, - "ListBrowsers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "browserSummaries" - }, - "ListCodeInterpreters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "codeInterpreterSummaries" - }, - "ListGatewayTargets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListGateways": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListMemories": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "memories" - }, - "ListOauth2CredentialProviders": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "credentialProviders" - }, - "ListWorkloadIdentities": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workloadIdentities" - }, - "ListEvaluators": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "evaluators" - }, - "ListOnlineEvaluationConfigs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "onlineEvaluationConfigs" - }, - "ListPolicies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policies" - }, - "ListPolicyEngines": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policyEngines" - }, - "ListPolicyGenerationAssets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policyGenerationAssets" - }, - "ListPolicyGenerations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policyGenerations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/service-2.json.gz deleted file mode 100644 index 066b37c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/waiters-2.json b/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/waiters-2.json deleted file mode 100644 index f6b2821..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agentcore-control/2023-06-05/waiters-2.json +++ /dev/null @@ -1,149 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "MemoryCreated" : { - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetMemory", - "acceptors" : [ { - "matcher" : "path", - "argument" : "memory.status", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "memory.status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "memory.status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "PolicyActive" : { - "description" : "Wait until a Policy is active", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetPolicy", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CREATE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "UPDATE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETE_FAILED" - } ] - }, - "PolicyDeleted" : { - "description" : "Wait until a Policy is deleted", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetPolicy", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "DELETING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETE_FAILED" - } ] - }, - "PolicyEngineActive" : { - "description" : "Wait until a PolicyEngine is active", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetPolicyEngine", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CREATE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "UPDATE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETE_FAILED" - } ] - }, - "PolicyEngineDeleted" : { - "description" : "Wait until a PolicyEngine is deleted", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetPolicyEngine", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "DELETING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETE_FAILED" - } ] - }, - "PolicyGenerationCompleted" : { - "description" : "Wait until policy generation is completed", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetPolicyGeneration", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "GENERATED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "GENERATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "GENERATE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETE_FAILED" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 399e7c8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/paginators-1.json deleted file mode 100644 index 129086a..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListActors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "actorSummaries" - }, - "ListEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "events" - }, - "ListMemoryRecords": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "memoryRecordSummaries" - }, - "ListSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessionSummaries" - }, - "RetrieveMemoryRecords": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "memoryRecordSummaries" - }, - "ListMemoryExtractionJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/service-2.json.gz deleted file mode 100644 index 6c03524..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/waiters-2.json b/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-agentcore/2024-02-28/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index aebe256..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/service-2.json.gz deleted file mode 100644 index f2304c7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-data-automation-runtime/2024-06-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index b1eb180..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/paginators-1.json deleted file mode 100644 index 741799c..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListBlueprints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "blueprints" - }, - "ListDataAutomationProjects": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "projects" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/service-2.json.gz deleted file mode 100644 index 5d1c60d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-data-automation/2023-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 449c936..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/paginators-1.json deleted file mode 100644 index b6c3d8b..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListAsyncInvokes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "asyncInvokeSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/service-2.json.gz deleted file mode 100644 index 73afe8b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/waiters-2.json b/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/waiters-2.json deleted file mode 100644 index 4b20636..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock-runtime/2023-09-30/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index e2cb6ce..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/paginators-1.json deleted file mode 100644 index 0cbf092..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/paginators-1.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "pagination": { - "ListCustomModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "modelSummaries" - }, - "ListModelCustomizationJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "modelCustomizationJobSummaries" - }, - "ListProvisionedModelThroughputs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "provisionedModelSummaries" - }, - "ListEvaluationJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobSummaries" - }, - "ListGuardrails": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "guardrails" - }, - "ListModelCopyJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "modelCopyJobSummaries" - }, - "ListModelInvocationJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "invocationJobSummaries" - }, - "ListImportedModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "modelSummaries" - }, - "ListModelImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "modelImportJobSummaries" - }, - "ListInferenceProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "inferenceProfileSummaries" - }, - "ListMarketplaceModelEndpoints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "marketplaceModelEndpoints" - }, - "ListPromptRouters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "promptRouterSummaries" - }, - "ListCustomModelDeployments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "modelDeploymentSummaries" - }, - "ListAutomatedReasoningPolicies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "automatedReasoningPolicySummaries" - }, - "ListAutomatedReasoningPolicyBuildWorkflows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "automatedReasoningPolicyBuildWorkflowSummaries" - }, - "ListAutomatedReasoningPolicyTestCases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "testCases" - }, - "ListAutomatedReasoningPolicyTestResults": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "testResults" - }, - "ListEnforcedGuardrailsConfiguration": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "guardrailsConfig" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/service-2.json.gz deleted file mode 100644 index 28b87f5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/waiters-2.json b/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/waiters-2.json deleted file mode 100644 index 4b20636..0000000 --- a/venv/Lib/site-packages/botocore/data/bedrock/2023-04-20/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/billing/2023-09-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 71d2aef..0000000 Binary files a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/billing/2023-09-07/paginators-1.json deleted file mode 100644 index bc77891..0000000 --- a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListBillingViews": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "billingViews" - }, - "ListSourceViewsForBillingView": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sourceViews" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/billing/2023-09-07/service-2.json.gz deleted file mode 100644 index b4d11ee..0000000 Binary files a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/waiters-2.json b/venv/Lib/site-packages/botocore/data/billing/2023-09-07/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/billing/2023-09-07/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index a3a3ff7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/examples-1.json b/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/paginators-1.json deleted file mode 100644 index 2ca4d75..0000000 --- a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/paginators-1.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "pagination": { - "ListAccountAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "LinkedAccounts" - }, - "ListBillingGroupCostReports": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "BillingGroupCostReports" - }, - "ListBillingGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "BillingGroups" - }, - "ListCustomLineItems": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CustomLineItems" - }, - "ListPricingPlans": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "BillingPeriod" - ], - "output_token": "NextToken", - "result_key": "PricingPlans" - }, - "ListPricingPlansAssociatedWithPricingRule": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "BillingPeriod", - "PricingRuleArn" - ], - "output_token": "NextToken", - "result_key": "PricingPlanArns" - }, - "ListPricingRules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "BillingPeriod" - ], - "output_token": "NextToken", - "result_key": "PricingRules" - }, - "ListPricingRulesAssociatedToPricingPlan": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "BillingPeriod", - "PricingPlanArn" - ], - "output_token": "NextToken", - "result_key": "PricingRuleArns" - }, - "ListResourcesAssociatedToCustomLineItem": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "Arn" - ], - "output_token": "NextToken", - "result_key": "AssociatedResources" - }, - "ListCustomLineItemVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CustomLineItemVersions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/service-2.json.gz deleted file mode 100644 index b05b061..0000000 Binary files a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/waiters-2.json b/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/waiters-2.json deleted file mode 100644 index ee5023d..0000000 --- a/venv/Lib/site-packages/botocore/data/billingconductor/2021-07-30/waiters-2.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "version": 2, - "waiters": {} -} diff --git a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/braket/2019-09-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index d8e485b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/examples-1.json b/venv/Lib/site-packages/botocore/data/braket/2019-09-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/braket/2019-09-01/paginators-1.json deleted file mode 100644 index 2f0f69e..0000000 --- a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "SearchDevices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "devices" - }, - "SearchQuantumTasks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "quantumTasks" - }, - "SearchJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobs" - }, - "SearchSpendingLimits": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "spendingLimits" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/braket/2019-09-01/service-2.json.gz deleted file mode 100644 index 4079bc7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/braket/2019-09-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index 66e9eb0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/examples-1.json b/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/paginators-1.json deleted file mode 100644 index 15f7a63..0000000 --- a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "DescribeBudgets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Budgets" - }, - "DescribeNotificationsForBudget": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Notifications" - }, - "DescribeSubscribersForNotification": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Subscribers" - }, - "DescribeBudgetPerformanceHistory": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "BudgetPerformanceHistory" - }, - "DescribeBudgetActionHistories": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ActionHistories" - }, - "DescribeBudgetActionsForAccount": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Actions" - }, - "DescribeBudgetActionsForBudget": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Actions" - }, - "DescribeBudgetNotificationsForAccount": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "BudgetNotificationsForAccount" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/service-2.json.gz deleted file mode 100644 index a3b9f02..0000000 Binary files a/venv/Lib/site-packages/botocore/data/budgets/2016-10-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ce/2017-10-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 302c69c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/examples-1.json b/venv/Lib/site-packages/botocore/data/ce/2017-10-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/ce/2017-10-25/paginators-1.json deleted file mode 100644 index 764af9f..0000000 --- a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/paginators-1.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "pagination": { - "GetAnomalies": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "MaxResults", - "result_key": "Anomalies" - }, - "GetAnomalyMonitors": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "MaxResults", - "result_key": "AnomalyMonitors" - }, - "GetAnomalySubscriptions": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "MaxResults", - "result_key": "AnomalySubscriptions" - }, - "GetCostAndUsageComparisons": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "MaxResults", - "result_key": "CostAndUsageComparisons" - }, - "GetCostComparisonDrivers": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "MaxResults", - "result_key": "CostComparisonDrivers" - }, - "ListCostAllocationTagBackfillHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BackfillRequests" - }, - "ListCostAllocationTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CostAllocationTags" - }, - "ListCostCategoryDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CostCategoryReferences" - }, - "ListCostCategoryResourceAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CostCategoryResourceAssociations" - }, - "GetReservationPurchaseRecommendation": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "PageSize", - "result_key": "Recommendations" - }, - "GetRightsizingRecommendation": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "PageSize", - "result_key": "RightsizingRecommendations" - }, - "ListCommitmentPurchaseAnalyses": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "PageSize", - "result_key": "AnalysisSummaryList" - }, - "ListSavingsPlansPurchaseRecommendationGeneration": { - "input_token": "NextPageToken", - "output_token": "NextPageToken", - "limit_key": "PageSize", - "result_key": "GenerationSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/ce/2017-10-25/paginators-1.sdk-extras.json deleted file mode 100644 index 83b187a..0000000 --- a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/paginators-1.sdk-extras.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetCostAndUsageComparisons": { - "non_aggregate_keys": [ - "TotalCostAndUsage" - ] - }, - "GetReservationPurchaseRecommendation": { - "non_aggregate_keys": [ - "Metadata" - ] - }, - "GetRightsizingRecommendation": { - "non_aggregate_keys": [ - "Configuration", - "Metadata", - "Summary" - ] - } - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ce/2017-10-25/service-2.json.gz deleted file mode 100644 index 8816783..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ce/2017-10-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index 923a9bd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/paginators-1.json deleted file mode 100644 index 5e4b764..0000000 --- a/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "DescribeChimeWebhookConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "WebhookConfigurations" - }, - "DescribeSlackChannelConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SlackChannelConfigurations" - }, - "DescribeSlackUserIdentities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SlackUserIdentities" - }, - "DescribeSlackWorkspaces": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SlackWorkspaces" - }, - "ListMicrosoftTeamsChannelConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TeamChannelConfigurations" - }, - "ListMicrosoftTeamsConfiguredTeams": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConfiguredTeams" - }, - "ListMicrosoftTeamsUserIdentities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TeamsUserIdentities" - }, - "ListAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Associations" - }, - "ListCustomActions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CustomActions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/service-2.json.gz deleted file mode 100644 index ebb2a21..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chatbot/2017-10-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1d952bd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/examples-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/service-2.json.gz deleted file mode 100644 index 06aac58..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-identity/2021-04-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3fde97a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/service-2.json.gz deleted file mode 100644 index 3e5d55b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-media-pipelines/2021-07-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index f83312f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/examples-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/service-2.json.gz deleted file mode 100644 index 2a5fecf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-meetings/2021-07-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0f849b4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/examples-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/service-2.json.gz deleted file mode 100644 index becb3dd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-messaging/2021-05-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index ea08637..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/paginators-1.json deleted file mode 100644 index 648f71e..0000000 --- a/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListSipMediaApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SipMediaApplications" - }, - "ListSipRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SipRules" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/service-2.json.gz deleted file mode 100644 index 1a7016a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime-sdk-voice/2022-08-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/chime/2018-05-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index d8a388f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/examples-1.json b/venv/Lib/site-packages/botocore/data/chime/2018-05-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/chime/2018-05-01/paginators-1.json deleted file mode 100644 index 617b114..0000000 --- a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListAccounts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Accounts" - }, - "ListUsers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Users" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/chime/2018-05-01/service-2.json.gz deleted file mode 100644 index 586182b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/chime/2018-05-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4d07833..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/paginators-1.json deleted file mode 100644 index f605ad2..0000000 --- a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/paginators-1.json +++ /dev/null @@ -1,124 +0,0 @@ -{ - "pagination": { - "ListCollaborations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationList" - }, - "ListConfiguredTableAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "configuredTableAssociationSummaries" - }, - "ListConfiguredTables": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "configuredTableSummaries" - }, - "ListMembers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "memberSummaries" - }, - "ListMemberships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "membershipSummaries" - }, - "ListProtectedQueries": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "protectedQueries" - }, - "ListSchemas": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "schemaSummaries" - }, - "ListAnalysisTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "analysisTemplateSummaries" - }, - "ListCollaborationAnalysisTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationAnalysisTemplateSummaries" - }, - "ListCollaborationConfiguredAudienceModelAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationConfiguredAudienceModelAssociationSummaries" - }, - "ListCollaborationPrivacyBudgetTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationPrivacyBudgetTemplateSummaries" - }, - "ListCollaborationPrivacyBudgets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationPrivacyBudgetSummaries" - }, - "ListConfiguredAudienceModelAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "configuredAudienceModelAssociationSummaries" - }, - "ListPrivacyBudgetTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "privacyBudgetTemplateSummaries" - }, - "ListPrivacyBudgets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "privacyBudgetSummaries" - }, - "ListCollaborationIdNamespaceAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationIdNamespaceAssociationSummaries" - }, - "ListIdMappingTables": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "idMappingTableSummaries" - }, - "ListIdNamespaceAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "idNamespaceAssociationSummaries" - }, - "ListProtectedJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "protectedJobs" - }, - "ListCollaborationChangeRequests": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationChangeRequestSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/service-2.json.gz deleted file mode 100644 index 2937840..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/waiters-2.json b/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/cleanrooms/2022-02-17/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4d51c73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/paginators-1.json deleted file mode 100644 index 570b41c..0000000 --- a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "ListAudienceExportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "audienceExportJobs" - }, - "ListAudienceGenerationJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "audienceGenerationJobs" - }, - "ListAudienceModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "audienceModels" - }, - "ListConfiguredAudienceModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "configuredAudienceModels" - }, - "ListTrainingDatasets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "trainingDatasets" - }, - "ListCollaborationConfiguredModelAlgorithmAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationConfiguredModelAlgorithmAssociations" - }, - "ListCollaborationMLInputChannels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationMLInputChannelsList" - }, - "ListCollaborationTrainedModelExportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationTrainedModelExportJobs" - }, - "ListCollaborationTrainedModelInferenceJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationTrainedModelInferenceJobs" - }, - "ListCollaborationTrainedModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "collaborationTrainedModels" - }, - "ListConfiguredModelAlgorithmAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "configuredModelAlgorithmAssociations" - }, - "ListConfiguredModelAlgorithms": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "configuredModelAlgorithms" - }, - "ListMLInputChannels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "mlInputChannelsList" - }, - "ListTrainedModelInferenceJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "trainedModelInferenceJobs" - }, - "ListTrainedModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "trainedModels" - }, - "ListTrainedModelVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "trainedModels" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/service-2.json.gz deleted file mode 100644 index 74bb37b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/waiters-2.json b/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/cleanroomsml/2023-09-06/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index ebe1fdd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/examples-1.json b/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/examples-1.json deleted file mode 100644 index fdef270..0000000 --- a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/examples-1.json +++ /dev/null @@ -1,315 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateEnvironmentEC2": [ - { - "input": { - "name": "my-demo-environment", - "automaticStopTimeMinutes": 60, - "description": "This is my demonstration environment.", - "instanceType": "t2.micro", - "ownerArn": "arn:aws:iam::123456789012:user/MyDemoUser", - "subnetId": "subnet-6300cd1b" - }, - "output": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "createenvironmentec2-1516821730547", - "title": "CreateEnvironmentEC2" - } - ], - "CreateEnvironmentMembership": [ - { - "input": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "permissions": "read-write", - "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser" - }, - "output": { - "membership": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "permissions": "read-write", - "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser", - "userId": "AIDAJ3BA6O2FMJWCWXHEX" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "createenvironmentmembership-1516822583452", - "title": "CreateEnvironmentMembership" - } - ], - "DeleteEnvironment": [ - { - "input": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "deleteenvironment-1516822903149", - "title": "DeleteEnvironment" - } - ], - "DeleteEnvironmentMembership": [ - { - "input": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "deleteenvironmentmembership-1516822975655", - "title": "DeleteEnvironmentMembership" - } - ], - "DescribeEnvironmentMemberships": [ - { - "input": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" - }, - "output": { - "memberships": [ - { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "permissions": "read-write", - "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser", - "userId": "AIDAJ3BA6O2FMJWCWXHEX" - }, - { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "permissions": "owner", - "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", - "userId": "AIDAJNUEDQAQWFELJDLEX" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example gets information about all of the environment members for the specified development environment.", - "id": "describeenvironmentmemberships1-1516823070453", - "title": "DescribeEnvironmentMemberships1" - }, - { - "input": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "permissions": [ - "owner" - ] - }, - "output": { - "memberships": [ - { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "permissions": "owner", - "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", - "userId": "AIDAJNUEDQAQWFELJDLEX" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example gets information about the owner of the specified development environment.", - "id": "describeenvironmentmemberships2-1516823191355", - "title": "DescribeEnvironmentMemberships2" - }, - { - "input": { - "userArn": "arn:aws:iam::123456789012:user/MyDemoUser" - }, - "output": { - "memberships": [ - { - "environmentId": "10a75714bd494714929e7f5ec4125aEX", - "lastAccess": "2018-01-19T11:06:13Z", - "permissions": "owner", - "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", - "userId": "AIDAJNUEDQAQWFELJDLEX" - }, - { - "environmentId": "12bfc3cd537f41cb9776f8af5525c9EX", - "lastAccess": "2018-01-19T11:39:19Z", - "permissions": "owner", - "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", - "userId": "AIDAJNUEDQAQWFELJDLEX" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example gets development environment membership information for the specified user.", - "id": "describeenvironmentmemberships3-1516823268793", - "title": "DescribeEnvironmentMemberships3" - } - ], - "DescribeEnvironmentStatus": [ - { - "input": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" - }, - "output": { - "message": "Environment is ready to use", - "status": "ready" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "describeenvironmentstatus-1516823462133", - "title": "DescribeEnvironmentStatus" - } - ], - "DescribeEnvironments": [ - { - "input": { - "environmentIds": [ - "8d9967e2f0624182b74e7690ad69ebEX", - "349c86d4579e4e7298d500ff57a6b2EX" - ] - }, - "output": { - "environments": [ - { - "name": "my-demo-environment", - "type": "ec2", - "arn": "arn:aws:cloud9:us-east-2:123456789012:environment:8d9967e2f0624182b74e7690ad69ebEX", - "description": "This is my demonstration environment.", - "id": "8d9967e2f0624182b74e7690ad69ebEX", - "lifecycle": { - "status": "CREATED" - }, - "ownerArn": "arn:aws:iam::123456789012:user/MyDemoUser" - }, - { - "name": "another-demo-environment", - "type": "ssh", - "arn": "arn:aws:cloud9:us-east-2:123456789012:environment:349c86d4579e4e7298d500ff57a6b2EX", - "description": "", - "id": "349c86d4579e4e7298d500ff57a6b2EX", - "lifecycle": { - "status": "CREATED" - }, - "ownerArn": "arn:aws:sts::123456789012:assumed-role/AnotherDemoUser/AnotherDemoUser" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "describeenvironments-1516823568291", - "title": "DescribeEnvironments" - } - ], - "ListEnvironments": [ - { - "input": { - }, - "output": { - "environmentIds": [ - "349c86d4579e4e7298d500ff57a6b2EX", - "45a3da47af0840f2b0c0824f5ee232EX" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "listenvironments-1516823687205", - "title": "ListEnvironments" - } - ], - "UpdateEnvironment": [ - { - "input": { - "name": "my-changed-demo-environment", - "description": "This is my changed demonstration environment.", - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "updateenvironment-1516823781910", - "title": "UpdateEnvironment" - } - ], - "UpdateEnvironmentMembership": [ - { - "input": { - "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", - "permissions": "read-only", - "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser" - }, - "output": { - "membership": { - "environmentId": "8d9967e2f0624182b74e7690ad69eb31", - "permissions": "read-only", - "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser", - "userId": "AIDAJ3BA6O2FMJWCWXHEX" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "updateenvironmentmembership-1516823876645", - "title": "UpdateEnvironmentMembership" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/paginators-1.json deleted file mode 100644 index 1c4c2ff..0000000 --- a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "DescribeEnvironmentMemberships": { - "result_key": "memberships", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListEnvironments": { - "result_key": "environmentIds", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/service-2.json.gz deleted file mode 100644 index 05a9af4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloud9/2017-09-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index b3203a1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.json deleted file mode 100644 index 14380b0..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListResourceRequests": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourceRequestStatusSummaries" - }, - "ListResources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourceDescriptions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.sdk-extras.json deleted file mode 100644 index d0d47fb..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListResources": { - "non_aggregate_keys": [ - "TypeName" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/service-2.json.gz deleted file mode 100644 index 304dc66..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/waiters-2.json deleted file mode 100644 index e5f82ac..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudcontrol/2021-09-30/waiters-2.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ResourceRequestSuccess" : { - "description" : "Wait until resource operation request is successful", - "delay" : 5, - "maxAttempts" : 24, - "operation" : "GetResourceRequestStatus", - "acceptors" : [ { - "matcher" : "path", - "argument" : "ProgressEvent.OperationStatus", - "state" : "success", - "expected" : "SUCCESS" - }, { - "matcher" : "path", - "argument" : "ProgressEvent.OperationStatus", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "ProgressEvent.OperationStatus", - "state" : "failure", - "expected" : "CANCEL_COMPLETE" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index b34ed8f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/examples-1.json b/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/paginators-1.json deleted file mode 100644 index 22cc439..0000000 --- a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "ListObjectParentPaths": { - "result_key": "PathToObjectIdentifiersList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListFacetNames": { - "result_key": "FacetNames", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListPublishedSchemaArns": { - "result_key": "SchemaArns", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListDirectories": { - "result_key": "Directories", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListDevelopmentSchemaArns": { - "result_key": "SchemaArns", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTypedLinkFacetNames": { - "result_key": "FacetNames", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListIndex": { - "result_key": "IndexAttachments", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListFacetAttributes": { - "result_key": "Attributes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListObjectPolicies": { - "result_key": "AttachedPolicyIds", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTagsForResource": { - "result_key": "Tags", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListAttachedIndices": { - "result_key": "IndexAttachments", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "LookupPolicy": { - "result_key": "PolicyToPathList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListPolicyAttachments": { - "result_key": "ObjectIdentifiers", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListObjectAttributes": { - "result_key": "Attributes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListAppliedSchemaArns": { - "result_key": "SchemaArns", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTypedLinkFacetAttributes": { - "result_key": "Attributes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/service-2.json.gz deleted file mode 100644 index 10b2d12..0000000 Binary files a/venv/Lib/site-packages/botocore/data/clouddirectory/2016-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index ad3087d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/examples-1.json b/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/paginators-1.json deleted file mode 100644 index 5a06fb0..0000000 --- a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/paginators-1.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "pagination": { - "ListObjectParentPaths": { - "result_key": "PathToObjectIdentifiersList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListFacetNames": { - "result_key": "FacetNames", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListPublishedSchemaArns": { - "result_key": "SchemaArns", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListDirectories": { - "result_key": "Directories", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListDevelopmentSchemaArns": { - "result_key": "SchemaArns", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTypedLinkFacetNames": { - "result_key": "FacetNames", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListIndex": { - "result_key": "IndexAttachments", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListFacetAttributes": { - "result_key": "Attributes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListObjectPolicies": { - "result_key": "AttachedPolicyIds", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTagsForResource": { - "result_key": "Tags", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListAttachedIndices": { - "result_key": "IndexAttachments", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "LookupPolicy": { - "result_key": "PolicyToPathList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListPolicyAttachments": { - "result_key": "ObjectIdentifiers", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListObjectAttributes": { - "result_key": "Attributes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListAppliedSchemaArns": { - "result_key": "SchemaArns", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTypedLinkFacetAttributes": { - "result_key": "Attributes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListIncomingTypedLinks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LinkSpecifiers" - }, - "ListManagedSchemaArns": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SchemaArns" - }, - "ListOutgoingTypedLinks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TypedLinkSpecifiers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/service-2.json.gz deleted file mode 100644 index 6459109..0000000 Binary files a/venv/Lib/site-packages/botocore/data/clouddirectory/2017-01-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1945cf7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/paginators-1.json deleted file mode 100644 index 55e601f..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/paginators-1.json +++ /dev/null @@ -1,143 +0,0 @@ -{ - "pagination": { - "DescribeAccountLimits": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "AccountLimits" - }, - "DescribeChangeSet": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Changes", - "non_aggregate_keys": [ - "ChangeSetName", - "ChangeSetId", - "StackId", - "StackName", - "Description", - "Parameters", - "CreationTime", - "ExecutionStatus", - "Status", - "StatusReason", - "NotificationARNs", - "RollbackConfiguration", - "Capabilities", - "Tags", - "ParentChangeSetId", - "IncludeNestedStacks", - "RootChangeSetId", - "OnStackFailure", - "ImportExistingResources", - "StackDriftStatus", - "DeploymentMode" - ] - }, - "DescribeStackEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "StackEvents" - }, - "DescribeStacks": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Stacks" - }, - "ListChangeSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Summaries" - }, - "ListStackInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Summaries" - }, - "ListStackResources": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "StackResourceSummaries" - }, - "ListStacks": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "StackSummaries" - }, - "ListStackSetOperationResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Summaries" - }, - "ListStackSetOperations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Summaries" - }, - "ListStackSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Summaries" - }, - "ListExports": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Exports" - }, - "ListImports": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Imports" - }, - "ListTypes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TypeSummaries" - }, - "ListGeneratedTemplates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Summaries" - }, - "ListResourceScanRelatedResources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RelatedResources" - }, - "ListResourceScanResources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Resources" - }, - "ListResourceScans": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ResourceScanSummaries" - }, - "ListStackRefactorActions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StackRefactorActions" - }, - "ListStackRefactors": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StackRefactorSummaries" - }, - "DescribeEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "OperationEvents" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/service-2.json.gz deleted file mode 100644 index 665d355..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/waiters-2.json deleted file mode 100644 index 0bfa9b0..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudformation/2010-05-15/waiters-2.json +++ /dev/null @@ -1,404 +0,0 @@ -{ - "version": 2, - "waiters": { - "StackExists": { - "delay": 5, - "operation": "DescribeStacks", - "maxAttempts": 20, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "ValidationError", - "state": "retry" - } - ] - }, - "StackCreateComplete": { - "delay": 30, - "operation": "DescribeStacks", - "maxAttempts": 120, - "description": "Wait until stack status is CREATE_COMPLETE.", - "acceptors": [ - { - "argument": "Stacks[].StackStatus", - "expected": "CREATE_COMPLETE", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_COMPLETE", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_IN_PROGRESS", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_FAILED", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_IN_PROGRESS", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_FAILED", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_COMPLETE", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "CREATE_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "DELETE_COMPLETE", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "DELETE_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "ROLLBACK_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "ROLLBACK_COMPLETE", - "matcher": "pathAny", - "state": "failure" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "failure" - } - ] - }, - "StackDeleteComplete": { - "delay": 30, - "operation": "DescribeStacks", - "maxAttempts": 120, - "description": "Wait until stack status is DELETE_COMPLETE.", - "acceptors": [ - { - "argument": "Stacks[].StackStatus", - "expected": "DELETE_COMPLETE", - "matcher": "pathAll", - "state": "success" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "DELETE_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "CREATE_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "ROLLBACK_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_IN_PROGRESS", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_COMPLETE", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_COMPLETE", - "matcher": "pathAny", - "state": "failure" - } - ] - }, - "StackUpdateComplete": { - "delay": 30, - "maxAttempts": 120, - "operation": "DescribeStacks", - "description": "Wait until stack status is UPDATE_COMPLETE.", - "acceptors": [ - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_COMPLETE", - "matcher": "pathAll", - "state": "success" - }, - { - "expected": "UPDATE_FAILED", - "matcher": "pathAny", - "state": "failure", - "argument": "Stacks[].StackStatus" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "expected": "UPDATE_ROLLBACK_COMPLETE", - "matcher": "pathAny", - "state": "failure", - "argument": "Stacks[].StackStatus" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "failure" - } - ] - }, - "StackImportComplete": { - "delay": 30, - "maxAttempts": 120, - "operation": "DescribeStacks", - "description": "Wait until stack status is IMPORT_COMPLETE.", - "acceptors": [ - { - "argument": "Stacks[].StackStatus", - "expected": "IMPORT_COMPLETE", - "matcher": "pathAll", - "state": "success" - }, - { - "expected": "ROLLBACK_COMPLETE", - "matcher": "pathAny", - "state": "failure", - "argument": "Stacks[].StackStatus" - }, - { - "expected": "ROLLBACK_FAILED", - "matcher": "pathAny", - "state": "failure", - "argument": "Stacks[].StackStatus" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "IMPORT_ROLLBACK_IN_PROGRESS", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "IMPORT_ROLLBACK_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "expected": "IMPORT_ROLLBACK_COMPLETE", - "matcher": "pathAny", - "state": "failure", - "argument": "Stacks[].StackStatus" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "failure" - } - ] - }, - "StackRollbackComplete": { - "delay": 30, - "operation": "DescribeStacks", - "maxAttempts": 120, - "description": "Wait until stack status is UPDATE_ROLLBACK_COMPLETE.", - "acceptors": [ - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_COMPLETE", - "matcher": "pathAll", - "state": "success" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "UPDATE_ROLLBACK_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "argument": "Stacks[].StackStatus", - "expected": "DELETE_FAILED", - "matcher": "pathAny", - "state": "failure" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "failure" - } - ] - }, - "ChangeSetCreateComplete": { - "delay": 30, - "operation": "DescribeChangeSet", - "maxAttempts": 120, - "description": "Wait until change set status is CREATE_COMPLETE.", - "acceptors": [ - { - "argument": "Status", - "expected": "CREATE_COMPLETE", - "matcher": "path", - "state": "success" - }, - { - "argument": "Status", - "expected": "FAILED", - "matcher": "path", - "state": "failure" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "failure" - } - ] - }, - "StackRefactorCreateComplete": { - "delay": 5, - "operation": "DescribeStackRefactor", - "maxAttempts": 120, - "description": "Wait until the stack refactor status is CREATE_COMPLETE.", - "acceptors": [ - { - "argument": "Status", - "expected": "CREATE_COMPLETE", - "matcher": "path", - "state": "success" - }, - { - "argument": "Status", - "expected": "CREATE_FAILED", - "matcher": "path", - "state": "failure" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "failure" - } - ] - }, - "StackRefactorExecuteComplete": { - "delay": 15, - "operation": "DescribeStackRefactor", - "maxAttempts": 120, - "description": "Wait until the stack refactor status is EXECUTE_COMPLETE.", - "acceptors": [ - { - "argument": "ExecutionStatus", - "expected": "EXECUTE_COMPLETE", - "matcher": "path", - "state": "success" - }, - { - "argument": "ExecutionStatus", - "expected": "EXECUTE_FAILED", - "matcher": "path", - "state": "failure" - }, - { - "argument": "ExecutionStatus", - "expected": "ROLLBACK_COMPLETE", - "matcher": "path", - "state": "failure" - }, - { - "expected": "ValidationError", - "matcher": "error", - "state": "failure" - } - ] - }, - "TypeRegistrationComplete": { - "delay": 30, - "operation": "DescribeTypeRegistration", - "maxAttempts": 120, - "description": "Wait until type registration is COMPLETE.", - "acceptors": [ - { - "argument": "ProgressStatus", - "expected": "COMPLETE", - "matcher": "path", - "state": "success" - }, - { - "argument": "ProgressStatus", - "expected": "FAILED", - "matcher": "path", - "state": "failure" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2da91fb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/paginators-1.json deleted file mode 100644 index 8fda57a..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListKeys": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/service-2.json.gz deleted file mode 100644 index 53b4844..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97ab0d6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/service-2.json.gz deleted file mode 100644 index 6010352..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2014-05-31/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97ab0d6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/service-2.json.gz deleted file mode 100644 index 66260e7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2014-10-21/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97ab0d6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/service-2.json.gz deleted file mode 100644 index 7c1e77c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2014-11-06/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97ab0d6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/service-2.json.gz deleted file mode 100644 index 1fa0526..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2015-04-17/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97ab0d6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/service-2.json.gz deleted file mode 100644 index ae080f3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2015-07-27/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97ab0d6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/service-2.json.gz deleted file mode 100644 index a0499db..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2015-09-17/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97ab0d6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/service-2.json.gz deleted file mode 100644 index 093a7cc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-13/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/service-2.json.gz deleted file mode 100644 index c18e54d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-01-28/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/service-2.json.gz deleted file mode 100644 index 1ffc381..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-01/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/service-2.json.gz deleted file mode 100644 index 47e1d7f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-08-20/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/service-2.json.gz deleted file mode 100644 index 3b70027..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-07/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/service-2.json.gz deleted file mode 100644 index 991587d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-09-29/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/service-2.json.gz deleted file mode 100644 index 8942264..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/waiters-2.json deleted file mode 100644 index 6e044bc..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2016-11-25/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 60, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/service-2.json.gz deleted file mode 100644 index 48d93de..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/waiters-2.json deleted file mode 100644 index edd74b2..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2017-03-25/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 30, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/service-2.json.gz deleted file mode 100644 index 5b6cf2c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/waiters-2.json deleted file mode 100644 index edd74b2..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2017-10-30/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 30, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/service-2.json.gz deleted file mode 100644 index 9051bba..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/waiters-2.json deleted file mode 100644 index edd74b2..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2018-06-18/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 30, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/service-2.json.gz deleted file mode 100644 index 0f0cfeb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/waiters-2.json deleted file mode 100644 index edd74b2..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2018-11-05/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 25, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 30, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 666d92e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/paginators-1.json deleted file mode 100644 index 51fbb90..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/service-2.json.gz deleted file mode 100644 index 84f37c7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/waiters-2.json deleted file mode 100644 index 95f0a2d..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2019-03-26/waiters-2.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": 2, - "waiters": { - "DistributionDeployed": { - "delay": 60, - "operation": "GetDistribution", - "maxAttempts": 35, - "description": "Wait until a distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "Distribution.Status" - } - ] - }, - "InvalidationCompleted": { - "delay": 20, - "operation": "GetInvalidation", - "maxAttempts": 30, - "description": "Wait until an invalidation has completed.", - "acceptors": [ - { - "expected": "Completed", - "matcher": "path", - "state": "success", - "argument": "Invalidation.Status" - } - ] - }, - "StreamingDistributionDeployed": { - "delay": 60, - "operation": "GetStreamingDistribution", - "maxAttempts": 25, - "description": "Wait until a streaming distribution is deployed.", - "acceptors": [ - { - "expected": "Deployed", - "matcher": "path", - "state": "success", - "argument": "StreamingDistribution.Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5391806..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/paginators-1.json deleted file mode 100644 index f956149..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/paginators-1.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "pagination": { - "ListCloudFrontOriginAccessIdentities": { - "input_token": "Marker", - "output_token": "CloudFrontOriginAccessIdentityList.NextMarker", - "limit_key": "MaxItems", - "more_results": "CloudFrontOriginAccessIdentityList.IsTruncated", - "result_key": "CloudFrontOriginAccessIdentityList.Items" - }, - "ListDistributions": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "DistributionList.IsTruncated", - "result_key": "DistributionList.Items" - }, - "ListInvalidations": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "more_results": "InvalidationList.IsTruncated", - "result_key": "InvalidationList.Items" - }, - "ListStreamingDistributions": { - "input_token": "Marker", - "output_token": "StreamingDistributionList.NextMarker", - "limit_key": "MaxItems", - "more_results": "StreamingDistributionList.IsTruncated", - "result_key": "StreamingDistributionList.Items" - }, - "ListKeyValueStores": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "KeyValueStoreList.NextMarker", - "result_key": "KeyValueStoreList.Items" - }, - "ListPublicKeys": { - "input_token": "Marker", - "output_token": "PublicKeyList.NextMarker", - "limit_key": "MaxItems", - "result_key": "PublicKeyList.Items" - }, - "ListConnectionGroups": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "ConnectionGroups" - }, - "ListDistributionTenants": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "DistributionTenantList" - }, - "ListDistributionTenantsByCustomization": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "DistributionTenantList" - }, - "ListDistributionsByConnectionMode": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "result_key": "DistributionList.Items" - }, - "ListDomainConflicts": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "DomainConflicts" - }, - "ListInvalidationsForDistributionTenant": { - "input_token": "Marker", - "output_token": "InvalidationList.NextMarker", - "limit_key": "MaxItems", - "result_key": "InvalidationList.Items" - }, - "ListOriginAccessControls": { - "input_token": "Marker", - "output_token": "OriginAccessControlList.NextMarker", - "limit_key": "MaxItems", - "result_key": "OriginAccessControlList.Items" - }, - "ListConnectionFunctions": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "ConnectionFunctions" - }, - "ListDistributionsByConnectionFunction": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "result_key": "DistributionList.Items" - }, - "ListDistributionsByTrustStore": { - "input_token": "Marker", - "output_token": "DistributionList.NextMarker", - "limit_key": "MaxItems", - "result_key": "DistributionList.Items" - }, - "ListTrustStores": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "TrustStoreList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/service-2.json.gz deleted file mode 100644 index e44f3d0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/waiters-2.json deleted file mode 100644 index 1e2c625..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudfront/2020-05-31/waiters-2.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "DistributionDeployed" : { - "description" : "Wait until a distribution is deployed.", - "delay" : 60, - "maxAttempts" : 35, - "operation" : "GetDistribution", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Distribution.Status", - "state" : "success", - "expected" : "Deployed" - } ] - }, - "InvalidationCompleted" : { - "description" : "Wait until an invalidation has completed.", - "delay" : 20, - "maxAttempts" : 30, - "operation" : "GetInvalidation", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Invalidation.Status", - "state" : "success", - "expected" : "Completed" - } ] - }, - "InvalidationForDistributionTenantCompleted" : { - "description" : "Wait until an invalidation for distribution tenant has completed.", - "delay" : 20, - "maxAttempts" : 30, - "operation" : "GetInvalidationForDistributionTenant", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Invalidation.Status", - "state" : "success", - "expected" : "Completed" - } ] - }, - "StreamingDistributionDeployed" : { - "description" : "Wait until a streaming distribution is deployed.", - "delay" : 60, - "maxAttempts" : 25, - "operation" : "GetStreamingDistribution", - "acceptors" : [ { - "matcher" : "path", - "argument" : "StreamingDistribution.Status", - "state" : "success", - "expected" : "Deployed" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 54dfa69..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/paginators-1.json deleted file mode 100644 index 3dedddf..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/paginators-1.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "pagination": { - "ListHapgs": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "HapgList" - }, - "ListHsms": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "HsmList" - }, - "ListLunaClients": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ClientList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/service-2.json.gz deleted file mode 100644 index 2edea4c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudhsm/2014-05-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4f61c92..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/paginators-1.json deleted file mode 100644 index 19c403f..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "DescribeBackups": { - "result_key": "Backups", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "DescribeClusters": { - "result_key": "Clusters", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTags": { - "result_key": "TagList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/service-2.json.gz deleted file mode 100644 index faa42bb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudhsmv2/2017-04-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudsearch/2011-02-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudsearch/2011-02-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index c41387e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudsearch/2011-02-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudsearch/2011-02-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudsearch/2011-02-01/service-2.json.gz deleted file mode 100644 index e79cb86..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudsearch/2011-02-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8b0acf1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/service-2.json.gz deleted file mode 100644 index c165507..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudsearch/2013-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7fd26c8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/service-2.json.gz deleted file mode 100644 index da107b4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudsearchdomain/2013-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index f14b50c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/service-2.json.gz deleted file mode 100644 index 93ff767..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudtrail-data/2021-08-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2c8c223..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/paginators-1.json deleted file mode 100644 index 1e17b41..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/paginators-1.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "pagination": { - "LookupEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Events" - }, - "ListPublicKeys": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "PublicKeyList" - }, - "ListTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ResourceTagList" - }, - "ListTrails": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Trails" - }, - "ListImportFailures": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Failures" - }, - "ListImports": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Imports" - }, - "ListInsightsData": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Events" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/service-2.json.gz deleted file mode 100644 index ed8c62c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudtrail/2013-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 00dbece..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/examples-1.json b/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/paginators-1.json deleted file mode 100644 index b386c2f..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/paginators-1.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "pagination": { - "DescribeAlarmHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "AlarmHistoryItems" - }, - "DescribeAlarms": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": [ - "MetricAlarms", - "CompositeAlarms" - ] - }, - "ListDashboards": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "DashboardEntries" - }, - "ListMetrics": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": [ - "Metrics", - "OwningAccounts" - ] - }, - "GetMetricData": { - "input_token": "NextToken", - "limit_key": "MaxDatapoints", - "output_token": "NextToken", - "result_key": [ - "MetricDataResults", - "Messages" - ] - }, - "DescribeAnomalyDetectors": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AnomalyDetectors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/service-2.json.gz deleted file mode 100644 index 35fbc23..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/waiters-2.json deleted file mode 100644 index 32803bb..0000000 --- a/venv/Lib/site-packages/botocore/data/cloudwatch/2010-08-01/waiters-2.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "version": 2, - "waiters": { - "AlarmExists": { - "delay": 5, - "maxAttempts": 40, - "operation": "DescribeAlarms", - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(MetricAlarms[]) > `0`", - "state": "success" - } - ] - }, - "CompositeAlarmExists": { - "delay": 5, - "maxAttempts": 40, - "operation": "DescribeAlarms", - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(CompositeAlarms[]) > `0`", - "state": "success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1e14bc3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/examples-1.json b/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/paginators-1.json deleted file mode 100644 index ad47997..0000000 --- a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListDomains": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "domains" - }, - "ListPackageVersionAssets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assets" - }, - "ListPackageVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "versions" - }, - "ListPackages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "packages" - }, - "ListRepositories": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "repositories" - }, - "ListRepositoriesInDomain": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "repositories" - }, - "ListAllowedRepositoriesForGroup": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "allowedRepositories" - }, - "ListAssociatedPackages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "packages" - }, - "ListPackageGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "packageGroups" - }, - "ListSubPackageGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "packageGroups" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/paginators-1.sdk-extras.json deleted file mode 100644 index d58fb3f..0000000 --- a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/paginators-1.sdk-extras.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListPackageVersionAssets": { - "non_aggregate_keys": [ - "package", - "format", - "namespace", - "version", - "versionRevision" - ] - }, - "ListPackageVersions": { - "non_aggregate_keys": [ - "defaultDisplayVersion", - "format", - "package", - "namespace" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/service-2.json.gz deleted file mode 100644 index 8a8871a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeartifact/2018-09-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2452ad9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/examples-1.json b/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/examples-1.json deleted file mode 100644 index a5fb660..0000000 --- a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/examples-1.json +++ /dev/null @@ -1,281 +0,0 @@ -{ - "version": "1.0", - "examples": { - "BatchGetBuilds": [ - { - "input": { - "ids": [ - "codebuild-demo-project:9b0ac37f-d19e-4254-9079-f47e9a389eEX", - "codebuild-demo-project:b79a46f7-1473-4636-a23f-da9c45c208EX" - ] - }, - "output": { - "builds": [ - { - "arn": "arn:aws:codebuild:us-east-1:123456789012:build/codebuild-demo-project:9b0ac37f-d19e-4254-9079-f47e9a389eEX", - "artifacts": { - "location": "arn:aws:s3:::codebuild-123456789012-output-bucket/codebuild-demo-project" - }, - "buildComplete": true, - "buildStatus": "SUCCEEDED", - "currentPhase": "COMPLETED", - "endTime": 1479832474.764, - "environment": { - "type": "LINUX_CONTAINER", - "computeType": "BUILD_GENERAL1_SMALL", - "environmentVariables": [ - - ], - "image": "aws/codebuild/java:openjdk-8", - "privilegedMode": false - }, - "id": "codebuild-demo-project:9b0ac37f-d19e-4254-9079-f47e9a389eEX", - "initiator": "MyDemoUser", - "logs": { - "deepLink": "https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#logEvent:group=/aws/codebuild/codebuild-demo-project;stream=9b0ac37f-d19e-4254-9079-f47e9a389eEX", - "groupName": "/aws/codebuild/codebuild-demo-project", - "streamName": "9b0ac37f-d19e-4254-9079-f47e9a389eEX" - }, - "phases": [ - { - "durationInSeconds": 0, - "endTime": 1479832342.23, - "phaseStatus": "SUCCEEDED", - "phaseType": "SUBMITTED", - "startTime": 1479832341.854 - }, - { - "contexts": [ - - ], - "durationInSeconds": 72, - "endTime": 1479832415.064, - "phaseStatus": "SUCCEEDED", - "phaseType": "PROVISIONING", - "startTime": 1479832342.23 - }, - { - "contexts": [ - - ], - "durationInSeconds": 46, - "endTime": 1479832461.261, - "phaseStatus": "SUCCEEDED", - "phaseType": "DOWNLOAD_SOURCE", - "startTime": 1479832415.064 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479832461.354, - "phaseStatus": "SUCCEEDED", - "phaseType": "INSTALL", - "startTime": 1479832461.261 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479832461.448, - "phaseStatus": "SUCCEEDED", - "phaseType": "PRE_BUILD", - "startTime": 1479832461.354 - }, - { - "contexts": [ - - ], - "durationInSeconds": 9, - "endTime": 1479832471.115, - "phaseStatus": "SUCCEEDED", - "phaseType": "BUILD", - "startTime": 1479832461.448 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479832471.224, - "phaseStatus": "SUCCEEDED", - "phaseType": "POST_BUILD", - "startTime": 1479832471.115 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479832471.791, - "phaseStatus": "SUCCEEDED", - "phaseType": "UPLOAD_ARTIFACTS", - "startTime": 1479832471.224 - }, - { - "contexts": [ - - ], - "durationInSeconds": 2, - "endTime": 1479832474.764, - "phaseStatus": "SUCCEEDED", - "phaseType": "FINALIZING", - "startTime": 1479832471.791 - }, - { - "phaseType": "COMPLETED", - "startTime": 1479832474.764 - } - ], - "projectName": "codebuild-demo-project", - "source": { - "type": "S3", - "buildspec": "", - "location": "arn:aws:s3:::codebuild-123456789012-input-bucket/MessageUtil.zip" - }, - "startTime": 1479832341.854, - "timeoutInMinutes": 60 - }, - { - "arn": "arn:aws:codebuild:us-east-1:123456789012:build/codebuild-demo-project:b79a46f7-1473-4636-a23f-da9c45c208EX", - "artifacts": { - "location": "arn:aws:s3:::codebuild-123456789012-output-bucket/codebuild-demo-project" - }, - "buildComplete": true, - "buildStatus": "SUCCEEDED", - "currentPhase": "COMPLETED", - "endTime": 1479401214.239, - "environment": { - "type": "LINUX_CONTAINER", - "computeType": "BUILD_GENERAL1_SMALL", - "environmentVariables": [ - - ], - "image": "aws/codebuild/java:openjdk-8", - "privilegedMode": false - }, - "id": "codebuild-demo-project:b79a46f7-1473-4636-a23f-da9c45c208EX", - "initiator": "MyDemoUser", - "logs": { - "deepLink": "https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#logEvent:group=/aws/codebuild/codebuild-demo-project;stream=b79a46f7-1473-4636-a23f-da9c45c208EX", - "groupName": "/aws/codebuild/codebuild-demo-project", - "streamName": "b79a46f7-1473-4636-a23f-da9c45c208EX" - }, - "phases": [ - { - "durationInSeconds": 0, - "endTime": 1479401082.342, - "phaseStatus": "SUCCEEDED", - "phaseType": "SUBMITTED", - "startTime": 1479401081.869 - }, - { - "contexts": [ - - ], - "durationInSeconds": 71, - "endTime": 1479401154.129, - "phaseStatus": "SUCCEEDED", - "phaseType": "PROVISIONING", - "startTime": 1479401082.342 - }, - { - "contexts": [ - - ], - "durationInSeconds": 45, - "endTime": 1479401199.136, - "phaseStatus": "SUCCEEDED", - "phaseType": "DOWNLOAD_SOURCE", - "startTime": 1479401154.129 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479401199.236, - "phaseStatus": "SUCCEEDED", - "phaseType": "INSTALL", - "startTime": 1479401199.136 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479401199.345, - "phaseStatus": "SUCCEEDED", - "phaseType": "PRE_BUILD", - "startTime": 1479401199.236 - }, - { - "contexts": [ - - ], - "durationInSeconds": 9, - "endTime": 1479401208.68, - "phaseStatus": "SUCCEEDED", - "phaseType": "BUILD", - "startTime": 1479401199.345 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479401208.783, - "phaseStatus": "SUCCEEDED", - "phaseType": "POST_BUILD", - "startTime": 1479401208.68 - }, - { - "contexts": [ - - ], - "durationInSeconds": 0, - "endTime": 1479401209.463, - "phaseStatus": "SUCCEEDED", - "phaseType": "UPLOAD_ARTIFACTS", - "startTime": 1479401208.783 - }, - { - "contexts": [ - - ], - "durationInSeconds": 4, - "endTime": 1479401214.239, - "phaseStatus": "SUCCEEDED", - "phaseType": "FINALIZING", - "startTime": 1479401209.463 - }, - { - "phaseType": "COMPLETED", - "startTime": 1479401214.239 - } - ], - "projectName": "codebuild-demo-project", - "source": { - "type": "S3", - "location": "arn:aws:s3:::codebuild-123456789012-input-bucket/MessageUtil.zip" - }, - "startTime": 1479401081.869, - "timeoutInMinutes": 60 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example gets information about builds with the specified build IDs.", - "id": "to-get-information-about-builds-1501187184588", - "title": "To get information about builds" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/paginators-1.json deleted file mode 100644 index b482a96..0000000 --- a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/paginators-1.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "pagination": { - "ListBuilds": { - "output_token": "nextToken", - "input_token": "nextToken", - "result_key": "ids" - }, - "ListProjects": { - "output_token": "nextToken", - "input_token": "nextToken", - "result_key": "projects" - }, - "ListBuildsForProject": { - "output_token": "nextToken", - "input_token": "nextToken", - "result_key": "ids" - }, - "DescribeTestCases": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "testCases" - }, - "ListReportGroups": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "reportGroups" - }, - "ListReports": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "reports" - }, - "ListReportsForReportGroup": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "reports" - }, - "ListSharedProjects": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "projects" - }, - "ListSharedReportGroups": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "reportGroups" - }, - "DescribeCodeCoverages": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "codeCoverages" - }, - "ListBuildBatches": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "ids" - }, - "ListBuildBatchesForProject": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "ids" - }, - "ListCommandExecutionsForSandbox": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "commandExecutions" - }, - "ListSandboxes": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "ids" - }, - "ListSandboxesForProject": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "ids" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/service-2.json.gz deleted file mode 100644 index fcc1436..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codebuild/2016-10-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 14f0690..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/paginators-1.json deleted file mode 100644 index 15ed87f..0000000 --- a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/paginators-1.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "pagination": { - "ListAccessTokens": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDevEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEventLogs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListProjects": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSourceRepositories": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSourceRepositoryBranches": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSpaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "items" - }, - "ListDevEnvironmentSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListWorkflowRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListWorkflows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/service-2.json.gz deleted file mode 100644 index ee92b13..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/waiters-2.json b/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/codecatalyst/2022-09-28/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index ced8663..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/examples-1.json b/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/paginators-1.json deleted file mode 100644 index b3310fc..0000000 --- a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/paginators-1.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "pagination": { - "ListBranches": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "branches" - }, - "ListRepositories": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "repositories" - }, - "GetCommentsForComparedCommit": { - "result_key": "commentsForComparedCommitData", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "DescribePullRequestEvents": { - "result_key": "pullRequestEvents", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetCommentsForPullRequest": { - "result_key": "commentsForPullRequestData", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListPullRequests": { - "result_key": "pullRequestIds", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetDifferences": { - "result_key": "differences", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/service-2.json.gz deleted file mode 100644 index cd96387..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codecommit/2015-04-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 86fd494..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/service-2.json.gz deleted file mode 100644 index b2d5f57..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeconnections/2023-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7f2ac60..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/examples-1.json b/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/paginators-1.json deleted file mode 100644 index aae3fad..0000000 --- a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/paginators-1.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "pagination": { - "ListApplicationRevisions": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "revisions" - }, - "ListApplications": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "applications" - }, - "ListDeploymentConfigs": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "deploymentConfigsList" - }, - "ListDeploymentGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "deploymentGroups" - }, - "ListDeploymentInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "instancesList" - }, - "ListDeployments": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "deployments" - }, - "ListDeploymentTargets": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "targetIds" - }, - "ListGitHubAccountTokenNames": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "tokenNameList" - }, - "ListOnPremisesInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "instanceNames" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/service-2.json.gz deleted file mode 100644 index b37b32c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/waiters-2.json b/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/waiters-2.json deleted file mode 100644 index 0fea4fa..0000000 --- a/venv/Lib/site-packages/botocore/data/codedeploy/2014-10-06/waiters-2.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "version": 2, - "waiters": { - "DeploymentSuccessful": { - "delay": 15, - "operation": "GetDeployment", - "maxAttempts": 120, - "acceptors": [ - { - "expected": "Succeeded", - "matcher": "path", - "state": "success", - "argument": "deploymentInfo.status" - }, - { - "expected": "Failed", - "matcher": "path", - "state": "failure", - "argument": "deploymentInfo.status" - }, - { - "expected": "Stopped", - "matcher": "path", - "state": "failure", - "argument": "deploymentInfo.status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index f858956..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/examples-1.json b/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/paginators-1.json deleted file mode 100644 index bbc1f58..0000000 --- a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListRepositoryAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RepositoryAssociationSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/service-2.json.gz deleted file mode 100644 index 4ac2863..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/waiters-2.json b/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/waiters-2.json deleted file mode 100644 index cd1730f..0000000 --- a/venv/Lib/site-packages/botocore/data/codeguru-reviewer/2019-09-19/waiters-2.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "version": 2, - "waiters": - { - "RepositoryAssociationSucceeded": - { - "description": "Wait until a repository association is complete.", - "operation": "DescribeRepositoryAssociation", - "delay": 10, - "maxAttempts": 30, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "RepositoryAssociation.State", - "expected": "Associated" - }, - { - "state": "failure", - "matcher": "path", - "argument": "RepositoryAssociation.State", - "expected": "Failed" - }, - { - "state": "retry", - "matcher": "path", - "argument": "RepositoryAssociation.State", - "expected": "Associating" - }] - }, - "CodeReviewCompleted": - { - "description": "Wait until a code review is complete.", - "operation": "DescribeCodeReview", - "delay": 10, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "CodeReview.State", - "expected": "Completed" - }, - { - "state": "failure", - "matcher": "path", - "argument": "CodeReview.State", - "expected": "Failed" - }, - { - "state": "retry", - "matcher": "path", - "argument": "CodeReview.State", - "expected": "Pending" - }] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5ed1559..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/paginators-1.json deleted file mode 100644 index 03e1cbf..0000000 --- a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "GetFindings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - }, - "ListFindingsMetrics": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findingsMetrics" - }, - "ListScans": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "summaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/service-2.json.gz deleted file mode 100644 index 56c11f6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/codeguru-security/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index d5d95a3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/examples-1.json b/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/paginators-1.json deleted file mode 100644 index c787d76..0000000 --- a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListProfileTimes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "profileTimes" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/service-2.json.gz deleted file mode 100644 index a24cd51..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codeguruprofiler/2019-07-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/endpoint-rule-set-1.json.gz deleted file mode 100644 index ae82779..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/examples-1.json b/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/paginators-1.json b/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/paginators-1.json deleted file mode 100644 index 32217ac..0000000 --- a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/paginators-1.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "pagination": { - "ListActionTypes": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "actionTypes" - }, - "ListPipelineExecutions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "pipelineExecutionSummaries" - }, - "ListPipelines": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "pipelines", - "limit_key": "maxResults" - }, - "ListWebhooks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "webhooks" - }, - "ListActionExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "actionExecutionDetails" - }, - "ListTagsForResource": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tags" - }, - "ListRuleExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ruleExecutionDetails" - }, - "ListDeployActionExecutionTargets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "targets" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/service-2.json.gz deleted file mode 100644 index 68f1c8d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codepipeline/2015-07-09/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index b2e5012..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/service-2.json.gz deleted file mode 100644 index a17b22a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codestar-connections/2019-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 992af30..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/examples-1.json b/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/paginators-1.json deleted file mode 100644 index 9501814..0000000 --- a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListEventTypes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EventTypes" - }, - "ListNotificationRules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NotificationRules" - }, - "ListTargets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Targets" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/service-2.json.gz deleted file mode 100644 index 0da96c1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/codestar-notifications/2019-10-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 15e4ea8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/examples-1.json b/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/paginators-1.json deleted file mode 100644 index 2af6e40..0000000 --- a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListIdentityPools": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IdentityPools" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/service-2.json.gz deleted file mode 100644 index 40bff6f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cognito-identity/2014-06-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6ee94f0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/examples-1.json b/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/paginators-1.json deleted file mode 100644 index 51b7c94..0000000 --- a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "AdminListGroupsForUser": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Groups" - }, - "AdminListUserAuthEvents": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AuthEvents" - }, - "ListGroups": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Groups" - }, - "ListIdentityProviders": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Providers" - }, - "ListResourceServers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ResourceServers" - }, - "ListUserPoolClients": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "UserPoolClients" - }, - "ListUserPools": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "UserPools" - }, - "ListUsersInGroup": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Users" - }, - "ListUsers": { - "input_token": "PaginationToken", - "limit_key": "Limit", - "output_token": "PaginationToken", - "result_key": "Users" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/service-2.json.gz deleted file mode 100644 index 0c87656..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cognito-idp/2016-04-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index c53b0cf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/examples-1.json b/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/service-2.json.gz deleted file mode 100644 index bea33a7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cognito-sync/2014-06-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2b10fef..0000000 Binary files a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/examples-1.json b/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/paginators-1.json deleted file mode 100644 index 3a45818..0000000 --- a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListTopicsDetectionJobs": { - "result_key": "TopicsDetectionJobPropertiesList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListDocumentClassificationJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DocumentClassificationJobPropertiesList" - }, - "ListDocumentClassifiers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DocumentClassifierPropertiesList" - }, - "ListDominantLanguageDetectionJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DominantLanguageDetectionJobPropertiesList" - }, - "ListEntitiesDetectionJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EntitiesDetectionJobPropertiesList" - }, - "ListEntityRecognizers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EntityRecognizerPropertiesList" - }, - "ListKeyPhrasesDetectionJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "KeyPhrasesDetectionJobPropertiesList" - }, - "ListSentimentDetectionJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SentimentDetectionJobPropertiesList" - }, - "ListEndpoints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EndpointPropertiesList" - }, - "ListPiiEntitiesDetectionJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PiiEntitiesDetectionJobPropertiesList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/service-2.json.gz deleted file mode 100644 index a6dbc32..0000000 Binary files a/venv/Lib/site-packages/botocore/data/comprehend/2017-11-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 75db6d3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/examples-1.json b/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/service-2.json.gz deleted file mode 100644 index 219e3b8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/comprehendmedical/2018-10-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index bc45dbe..0000000 Binary files a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/paginators-1.json deleted file mode 100644 index 93d1188..0000000 --- a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "ListAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accounts" - }, - "ListAutomationEventSteps": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "automationEventSteps" - }, - "ListAutomationEventSummaries": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "automationEventSummaries" - }, - "ListAutomationEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "automationEvents" - }, - "ListAutomationRulePreview": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "previewResults" - }, - "ListAutomationRulePreviewSummaries": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "previewResultSummaries" - }, - "ListAutomationRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "automationRules" - }, - "ListRecommendedActionSummaries": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendedActionSummaries" - }, - "ListRecommendedActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendedActions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/service-2.json.gz deleted file mode 100644 index d638f28..0000000 Binary files a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/waiters-2.json b/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/compute-optimizer-automation/2025-09-22/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 49a6b54..0000000 Binary files a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/paginators-1.json deleted file mode 100644 index 1d115fc..0000000 --- a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "DescribeRecommendationExportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendationExportJobs" - }, - "GetEnrollmentStatusesForOrganization": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accountEnrollmentStatuses" - }, - "GetLambdaFunctionRecommendations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "lambdaFunctionRecommendations" - }, - "GetRecommendationPreferences": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendationPreferencesDetails" - }, - "GetRecommendationSummaries": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendationSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/service-2.json.gz deleted file mode 100644 index 025bffc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/compute-optimizer/2019-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/config/2014-11-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/config/2014-11-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index 95b3804..0000000 Binary files a/venv/Lib/site-packages/botocore/data/config/2014-11-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/config/2014-11-12/examples-1.json b/venv/Lib/site-packages/botocore/data/config/2014-11-12/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/config/2014-11-12/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/config/2014-11-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/config/2014-11-12/paginators-1.json deleted file mode 100644 index 04fd5c5..0000000 --- a/venv/Lib/site-packages/botocore/data/config/2014-11-12/paginators-1.json +++ /dev/null @@ -1,198 +0,0 @@ -{ - "pagination": { - "DescribeComplianceByConfigRule": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ComplianceByConfigRules" - }, - "DescribeComplianceByResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ComplianceByResources", - "limit_key": "Limit" - }, - "DescribeConfigRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ConfigRules" - }, - "GetComplianceDetailsByConfigRule": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "EvaluationResults", - "limit_key": "Limit" - }, - "GetComplianceDetailsByResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "EvaluationResults" - }, - "GetResourceConfigHistory": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "configurationItems", - "limit_key": "limit" - }, - "ListDiscoveredResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "resourceIdentifiers", - "limit_key": "limit" - }, - "DescribeAggregateComplianceByConfigRules": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "AggregateComplianceByConfigRules" - }, - "DescribeAggregationAuthorizations": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "AggregationAuthorizations" - }, - "DescribeConfigRuleEvaluationStatus": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ConfigRulesEvaluationStatus" - }, - "DescribeConfigurationAggregatorSourcesStatus": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "AggregatedSourceStatusList" - }, - "DescribeConfigurationAggregators": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ConfigurationAggregators" - }, - "DescribePendingAggregationRequests": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "PendingAggregationRequests" - }, - "DescribeRetentionConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "RetentionConfigurations" - }, - "GetAggregateComplianceDetailsByConfigRule": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "AggregateEvaluationResults" - }, - "ListAggregateDiscoveredResources": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ResourceIdentifiers" - }, - "DescribeRemediationExecutionStatus": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "RemediationExecutionStatuses" - }, - "DescribeAggregateComplianceByConformancePacks": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "AggregateComplianceByConformancePacks" - }, - "DescribeConformancePackStatus": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ConformancePackStatusDetails" - }, - "DescribeConformancePacks": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ConformancePackDetails" - }, - "DescribeOrganizationConfigRuleStatuses": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "OrganizationConfigRuleStatuses" - }, - "DescribeOrganizationConfigRules": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "OrganizationConfigRules" - }, - "DescribeOrganizationConformancePackStatuses": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "OrganizationConformancePackStatuses" - }, - "DescribeOrganizationConformancePacks": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "OrganizationConformancePacks" - }, - "GetConformancePackComplianceSummary": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ConformancePackComplianceSummaryList" - }, - "GetOrganizationConfigRuleDetailedStatus": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "OrganizationConfigRuleDetailedStatus" - }, - "GetOrganizationConformancePackDetailedStatus": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "OrganizationConformancePackDetailedStatuses" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Tags" - }, - "SelectAggregateResourceConfig": { - "input_token": "NextToken", - "limit_key": "Limit", - "non_aggregate_keys": [ - "QueryInfo" - ], - "output_token": "NextToken", - "result_key": "Results" - }, - "SelectResourceConfig": { - "input_token": "NextToken", - "limit_key": "Limit", - "non_aggregate_keys": [ - "QueryInfo" - ], - "output_token": "NextToken", - "result_key": "Results" - }, - "ListResourceEvaluations": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ResourceEvaluations" - }, - "ListConfigurationRecorders": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ConfigurationRecorderSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/config/2014-11-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/config/2014-11-12/service-2.json.gz deleted file mode 100644 index cc473c0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/config/2014-11-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/endpoint-rule-set-1.json.gz deleted file mode 100644 index 29b60b1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/examples-1.json b/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/paginators-1.json b/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/service-2.json.gz b/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/service-2.json.gz deleted file mode 100644 index 4ad9f7d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connect-contact-lens/2020-08-21/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/connect/2017-08-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index 92d8752..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/examples-1.json b/venv/Lib/site-packages/botocore/data/connect/2017-08-08/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/connect/2017-08-08/paginators-1.json deleted file mode 100644 index 64bb258..0000000 --- a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/paginators-1.json +++ /dev/null @@ -1,550 +0,0 @@ -{ - "pagination": { - "GetMetricData": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MetricResults" - }, - "ListRoutingProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RoutingProfileSummaryList" - }, - "ListSecurityProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityProfileSummaryList" - }, - "ListUserHierarchyGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "UserHierarchyGroupSummaryList" - }, - "ListUsers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "UserSummaryList" - }, - "ListContactFlows": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ContactFlowSummaryList" - }, - "ListHoursOfOperations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "HoursOfOperationSummaryList" - }, - "ListPhoneNumbers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PhoneNumberSummaryList" - }, - "ListQueues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "QueueSummaryList" - }, - "ListPrompts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PromptSummaryList" - }, - "ListRoutingProfileQueues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RoutingProfileQueueConfigSummaryList", - "non_aggregate_keys": [ - "LastModifiedRegion", - "LastModifiedTime" - ] - }, - "ListApprovedOrigins": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Origins" - }, - "ListInstanceAttributes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Attributes" - }, - "ListInstanceStorageConfigs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StorageConfigs" - }, - "ListInstances": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceSummaryList" - }, - "ListLambdaFunctions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LambdaFunctions" - }, - "ListLexBots": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LexBots" - }, - "ListSecurityKeys": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityKeys" - }, - "ListIntegrationAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IntegrationAssociationSummaryList" - }, - "ListUseCases": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "UseCaseSummaryList" - }, - "ListQuickConnects": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "QuickConnectSummaryList" - }, - "ListQueueQuickConnects": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "QuickConnectSummaryList", - "non_aggregate_keys": [ - "LastModifiedRegion", - "LastModifiedTime" - ] - }, - "ListBots": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LexBots" - }, - "ListAgentStatuses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AgentStatusSummaryList" - }, - "ListSecurityProfilePermissions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Permissions", - "non_aggregate_keys": [ - "LastModifiedRegion", - "LastModifiedTime" - ] - }, - "ListContactReferences": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReferenceSummaryList" - }, - "ListContactFlowModules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ContactFlowModulesSummaryList" - }, - "ListDefaultVocabularies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DefaultVocabularyList" - }, - "SearchVocabularies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VocabularySummaryList" - }, - "ListPhoneNumbersV2": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ListPhoneNumbersSummaryList" - }, - "SearchAvailablePhoneNumbers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AvailableNumbersList" - }, - "SearchUsers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "Users" - }, - "ListTaskTemplates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TaskTemplates" - }, - "SearchSecurityProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "SecurityProfiles" - }, - "SearchQueues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "Queues" - }, - "SearchRoutingProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "RoutingProfiles" - }, - "ListTrafficDistributionGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TrafficDistributionGroupSummaryList" - }, - "ListRules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RuleSummaryList" - }, - "ListContactEvaluations": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "EvaluationSummaryList" - }, - "ListEvaluationFormVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EvaluationFormVersionSummaryList" - }, - "ListEvaluationForms": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EvaluationFormSummaryList" - }, - "SearchHoursOfOperations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "HoursOfOperations" - }, - "SearchPrompts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "Prompts" - }, - "SearchQuickConnects": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "QuickConnects" - }, - "SearchResourceTags": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tags" - }, - "ListTrafficDistributionGroupUsers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TrafficDistributionGroupUserSummaryList" - }, - "ListViewVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ViewVersionSummaryList" - }, - "ListViews": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ViewsSummaryList" - }, - "ListSecurityProfileApplications": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Applications", - "non_aggregate_keys": [ - "LastModifiedRegion", - "LastModifiedTime" - ] - }, - "ListFlowAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FlowAssociationSummaryList" - }, - "ListPredefinedAttributes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PredefinedAttributeSummaryList" - }, - "ListUserProficiencies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "LastModifiedTime", - "LastModifiedRegion" - ], - "output_token": "NextToken", - "result_key": "UserProficiencyList" - }, - "SearchContacts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "TotalCount" - ], - "output_token": "NextToken", - "result_key": "Contacts" - }, - "SearchPredefinedAttributes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "PredefinedAttributes" - }, - "SearchContactFlowModules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "ContactFlowModules" - }, - "SearchContactFlows": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "ContactFlows" - }, - "ListAuthenticationProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AuthenticationProfileSummaryList" - }, - "SearchAgentStatuses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "AgentStatuses" - }, - "SearchUserHierarchyGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "UserHierarchyGroups" - }, - "ListContactFlowVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ContactFlowVersionSummaryList" - }, - "ListHoursOfOperationOverrides": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "LastModifiedRegion", - "LastModifiedTime" - ], - "output_token": "NextToken", - "result_key": "HoursOfOperationOverrideList" - }, - "SearchHoursOfOperationOverrides": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "HoursOfOperationOverrides" - }, - "ListRoutingProfileManualAssignmentQueues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "LastModifiedRegion", - "LastModifiedTime" - ], - "output_token": "NextToken", - "result_key": "RoutingProfileManualAssignmentQueueConfigSummaryList" - }, - "ListContactFlowModuleAliases": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ContactFlowModuleAliasSummaryList" - }, - "ListContactFlowModuleVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ContactFlowModuleVersionSummaryList" - }, - "ListDataTableAttributes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Attributes" - }, - "ListDataTablePrimaryValues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PrimaryValuesList" - }, - "ListDataTableValues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Values" - }, - "ListDataTables": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DataTableSummaryList" - }, - "ListWorkspacePages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WorkspacePageList" - }, - "ListWorkspaces": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WorkspaceSummaryList" - }, - "SearchDataTables": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "DataTables" - }, - "SearchViews": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "Views" - }, - "SearchWorkspaceAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "WorkspaceAssociations" - }, - "SearchWorkspaces": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ApproximateTotalCount" - ], - "output_token": "NextToken", - "result_key": "Workspaces" - }, - "ListEntitySecurityProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityProfiles" - }, - "ListSecurityProfileFlowModules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "LastModifiedRegion", - "LastModifiedTime" - ], - "output_token": "NextToken", - "result_key": "AllowedFlowModules" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/connect/2017-08-08/service-2.json.gz deleted file mode 100644 index e6e8764..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connect/2017-08-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index a632525..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/paginators-1.json deleted file mode 100644 index 6ab0451..0000000 --- a/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListCampaigns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "campaignSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/service-2.json.gz deleted file mode 100644 index 9da6b54..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectcampaigns/2021-01-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index d9bbdcd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/paginators-1.json deleted file mode 100644 index c839607..0000000 --- a/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListCampaigns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "campaignSummaryList" - }, - "ListConnectInstanceIntegrations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "integrationSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/service-2.json.gz deleted file mode 100644 index f906bc2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectcampaignsv2/2024-04-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index c2028ee..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/paginators-1.json deleted file mode 100644 index 502a550..0000000 --- a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "SearchCases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "cases" - }, - "SearchRelatedItems": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "relatedItems" - }, - "ListCaseRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "caseRules" - }, - "SearchAllRelatedItems": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "relatedItems" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/service-2.json.gz deleted file mode 100644 index 6e94a0d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/waiters-2.json b/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/connectcases/2022-10-03/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 290637b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/examples-1.json b/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/service-2.json.gz deleted file mode 100644 index 532e171..0000000 Binary files a/venv/Lib/site-packages/botocore/data/connectparticipant/2018-09-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2da03d3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/paginators-1.json deleted file mode 100644 index 2e3358d..0000000 --- a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListCommonControls": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CommonControls" - }, - "ListDomains": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Domains" - }, - "ListObjectives": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Objectives" - }, - "ListControls": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Controls" - }, - "ListControlMappings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ControlMappings" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/service-2.json.gz deleted file mode 100644 index 914e634..0000000 Binary files a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/controlcatalog/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 45f3310..0000000 Binary files a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/paginators-1.json deleted file mode 100644 index e34843b..0000000 --- a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListEnabledControls": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "enabledControls" - }, - "ListLandingZones": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "landingZones" - }, - "ListBaselines": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "baselines" - }, - "ListEnabledBaselines": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "enabledBaselines" - }, - "ListControlOperations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "controlOperations" - }, - "ListLandingZoneOperations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "landingZoneOperations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/service-2.json.gz deleted file mode 100644 index 3facbf4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/controltower/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 379432a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/paginators-1.json deleted file mode 100644 index 79511ab..0000000 --- a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListEnrollmentStatuses": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListRecommendationSummaries": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListRecommendations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEfficiencyMetrics": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "efficiencyMetricsByGroup" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/paginators-1.sdk-extras.json deleted file mode 100644 index f9c62d5..0000000 --- a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/paginators-1.sdk-extras.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListEnrollmentStatuses": { - "non_aggregate_keys": [ - "includeMemberAccounts" - ] - }, - "ListRecommendationSummaries": { - "non_aggregate_keys": [ - "groupBy", - "currencyCode", - "estimatedTotalDedupedSavings", - "metrics" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/service-2.json.gz deleted file mode 100644 index d277388..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/cost-optimization-hub/2022-07-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/cur/2017-01-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index 893069f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/examples-1.json b/venv/Lib/site-packages/botocore/data/cur/2017-01-06/examples-1.json deleted file mode 100644 index d647e38..0000000 --- a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/examples-1.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "version": "1.0", - "examples": { - "DeleteReportDefinition": [ - { - "input": { - "ReportName": "ExampleReport" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes the AWS Cost and Usage report named ExampleReport.", - "id": "to-delete-a-report", - "title": "To delete the AWS Cost and Usage report named ExampleReport." - } - ], - "DescribeReportDefinitions": [ - { - "input": { - "MaxResults": 5 - }, - "output": { - "ReportDefinitions": [ - { - "AdditionalArtifacts": [ - "QUICKSIGHT" - ], - "AdditionalSchemaElements": [ - "RESOURCES" - ], - "Compression": "GZIP", - "Format": "textORcsv", - "ReportName": "ExampleReport", - "S3Bucket": "example-s3-bucket", - "S3Prefix": "exampleprefix", - "S3Region": "us-east-1", - "TimeUnit": "HOURLY" - }, - { - "AdditionalArtifacts": [ - "QUICKSIGHT" - ], - "AdditionalSchemaElements": [ - "RESOURCES" - ], - "Compression": "GZIP", - "Format": "textORcsv", - "ReportName": "ExampleReport2", - "S3Bucket": "example-s3-bucket", - "S3Prefix": "exampleprefix", - "S3Region": "us-east-1", - "TimeUnit": "HOURLY" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists the AWS Cost and Usage reports for the account.", - "id": "to-retrieve-report-definitions", - "title": "To list the AWS Cost and Usage reports for the account." - } - ], - "PutReportDefinition": [ - { - "input": { - "ReportDefinition": { - "AdditionalArtifacts": [ - "REDSHIFT", - "QUICKSIGHT" - ], - "AdditionalSchemaElements": [ - "RESOURCES" - ], - "Compression": "ZIP", - "Format": "textORcsv", - "ReportName": "ExampleReport", - "S3Bucket": "example-s3-bucket", - "S3Prefix": "exampleprefix", - "S3Region": "us-east-1", - "TimeUnit": "DAILY" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a AWS Cost and Usage report named ExampleReport.", - "id": "to-create-a-report-definitions", - "title": "To create a report named ExampleReport." - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/cur/2017-01-06/paginators-1.json deleted file mode 100644 index 7db4dfe..0000000 --- a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "DescribeReportDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReportDefinitions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/cur/2017-01-06/service-2.json.gz deleted file mode 100644 index 33125fd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/cur/2017-01-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1bd3427..0000000 Binary files a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/examples-1.json b/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/paginators-1.json deleted file mode 100644 index 38a591c..0000000 --- a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "ListEventStreams": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "GetSimilarProfiles": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProfileIds" - }, - "ListObjectTypeAttributes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListRuleBasedMatches": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MatchIds" - }, - "ListSegmentDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListEventTriggers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListDomainLayouts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListUploadJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListDomainObjectTypes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListRecommenderRecipes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RecommenderRecipes" - }, - "ListRecommenders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Recommenders" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/paginators-1.sdk-extras.json deleted file mode 100644 index 15e5e31..0000000 --- a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/paginators-1.sdk-extras.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetSimilarProfiles": { - "non_aggregate_keys": [ - "MatchType", - "MatchId", - "RuleLevel", - "ConfidenceScore" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/service-2.json.gz deleted file mode 100644 index 7db20d3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/customer-profiles/2020-08-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6b4c7fb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/examples-1.json b/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/paginators-1.json deleted file mode 100644 index d18a749..0000000 --- a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "ListDatasets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Datasets" - }, - "ListJobRuns": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "JobRuns" - }, - "ListJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Jobs" - }, - "ListProjects": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Projects" - }, - "ListRecipeVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Recipes" - }, - "ListRecipes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Recipes" - }, - "ListSchedules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Schedules" - }, - "ListRulesets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Rulesets" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/service-2.json.gz deleted file mode 100644 index c070cf1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/databrew/2017-07-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index f1475f4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/paginators-1.json deleted file mode 100644 index cf704b0..0000000 --- a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListDataSetRevisions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Revisions" - }, - "ListDataSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DataSets" - }, - "ListJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Jobs" - }, - "ListRevisionAssets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Assets" - }, - "ListEventActions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EventActions" - }, - "ListDataGrants": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DataGrantSummaries" - }, - "ListReceivedDataGrants": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DataGrantSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/service-2.json.gz deleted file mode 100644 index 951da55..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/waiters-2.json b/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/dataexchange/2017-07-25/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index 150128f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/examples-1.json b/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/paginators-1.json deleted file mode 100644 index c859c9f..0000000 --- a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/paginators-1.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "pagination": { - "ListPipelines": { - "input_token": "marker", - "output_token": "marker", - "more_results": "hasMoreResults", - "result_key": "pipelineIdList" - }, - "DescribeObjects": { - "input_token": "marker", - "output_token": "marker", - "more_results": "hasMoreResults", - "result_key": "pipelineObjects" - }, - "QueryObjects": { - "input_token": "marker", - "output_token": "marker", - "more_results": "hasMoreResults", - "limit_key": "limit", - "result_key": "ids" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/service-2.json.gz deleted file mode 100644 index 4d29c33..0000000 Binary files a/venv/Lib/site-packages/botocore/data/datapipeline/2012-10-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/endpoint-rule-set-1.json.gz deleted file mode 100644 index a9db480..0000000 Binary files a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/examples-1.json b/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/paginators-1.json b/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/paginators-1.json deleted file mode 100644 index c6892f7..0000000 --- a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListAgents": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Agents" - }, - "ListLocations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Locations" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tags" - }, - "ListTaskExecutions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TaskExecutions" - }, - "ListTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/service-2.json.gz b/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/service-2.json.gz deleted file mode 100644 index 7479085..0000000 Binary files a/venv/Lib/site-packages/botocore/data/datasync/2018-11-09/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index de197d4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/paginators-1.json deleted file mode 100644 index 3ab5240..0000000 --- a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/paginators-1.json +++ /dev/null @@ -1,226 +0,0 @@ -{ - "pagination": { - "ListAssetRevisions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDataSourceRunActivities": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDataSourceRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDataSources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDomains": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEnvironmentBlueprintConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEnvironmentBlueprints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEnvironmentProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListNotifications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "notifications" - }, - "ListProjectMemberships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "members" - }, - "ListProjects": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSubscriptionGrants": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSubscriptionRequests": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSubscriptionTargets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSubscriptions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "Search": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "SearchGroupProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "SearchListings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "SearchTypes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "SearchUserProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListMetadataGenerationRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListTimeSeriesDataPoints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEnvironmentActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListLineageNodeHistory": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "nodes" - }, - "ListAssetFilters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDataProductRevisions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDomainUnitsForParent": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListEntityOwners": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "owners" - }, - "ListPolicyGrants": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "grantList" - }, - "ListRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListConnections": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListJobRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListLineageEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListProjectProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListAccountPools": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListAccountsInAccountPool": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/paginators-1.sdk-extras.json deleted file mode 100644 index 10bdbf1..0000000 --- a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/paginators-1.sdk-extras.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "Search": { - "non_aggregate_keys": [ - "totalMatchCount" - ] - }, - "SearchListings": { - "non_aggregate_keys": [ - "totalMatchCount", - "aggregates" - ] - }, - "SearchTypes": { - "non_aggregate_keys": [ - "totalMatchCount" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/service-2.json.gz deleted file mode 100644 index 8260875..0000000 Binary files a/venv/Lib/site-packages/botocore/data/datazone/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dax/2017-04-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 656257c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/examples-1.json b/venv/Lib/site-packages/botocore/data/dax/2017-04-19/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/dax/2017-04-19/paginators-1.json deleted file mode 100644 index c13b2df..0000000 --- a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/paginators-1.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "pagination": { - "DescribeClusters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Clusters" - }, - "DescribeDefaultParameters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Parameters" - }, - "DescribeEvents": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Events" - }, - "DescribeParameterGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ParameterGroups" - }, - "DescribeParameters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Parameters" - }, - "DescribeSubnetGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SubnetGroups" - }, - "ListTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Tags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/dax/2017-04-19/service-2.json.gz deleted file mode 100644 index 61bf771..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dax/2017-04-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index 05879e1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/paginators-1.json deleted file mode 100644 index 0a87c68..0000000 --- a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/paginators-1.json +++ /dev/null @@ -1,178 +0,0 @@ -{ - "pagination": { - "GetSessionsStatisticsAggregation": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "statistics" - }, - "ListAvailableMeteredProducts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "meteredProducts" - }, - "ListBudgets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "budgets" - }, - "ListFarmMembers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "members" - }, - "ListFarms": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "farms" - }, - "ListFleetMembers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "members" - }, - "ListFleets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "fleets" - }, - "ListJobMembers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "members" - }, - "ListJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobs" - }, - "ListLicenseEndpoints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "licenseEndpoints" - }, - "ListMeteredProducts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "meteredProducts" - }, - "ListMonitors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "monitors" - }, - "ListQueueEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environments" - }, - "ListQueueFleetAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "queueFleetAssociations" - }, - "ListQueueMembers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "members" - }, - "ListQueues": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "queues" - }, - "ListSessionActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessionActions" - }, - "ListSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessions" - }, - "ListSessionsForWorker": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessions" - }, - "ListStepConsumers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "consumers" - }, - "ListStepDependencies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dependencies" - }, - "ListSteps": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "steps" - }, - "ListStorageProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "storageProfiles" - }, - "ListStorageProfilesForQueue": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "storageProfiles" - }, - "ListTasks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tasks" - }, - "ListWorkers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workers" - }, - "ListJobParameterDefinitions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobParameterDefinitions" - }, - "ListLimits": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "limits" - }, - "ListQueueLimitAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "queueLimitAssociations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/paginators-1.sdk-extras.json deleted file mode 100644 index 27c22e7..0000000 --- a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/paginators-1.sdk-extras.json +++ /dev/null @@ -1,13 +0,0 @@ - { - "version": 1.0, - "merge": { - "pagination": { - "GetSessionsStatisticsAggregation": { - "non_aggregate_keys": [ - "status", - "statusMessage" - ] - } - } - } - } diff --git a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/service-2.json.gz deleted file mode 100644 index 8a2b6f3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/waiters-2.json b/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/waiters-2.json deleted file mode 100644 index eb6bcc6..0000000 --- a/venv/Lib/site-packages/botocore/data/deadline/2023-10-12/waiters-2.json +++ /dev/null @@ -1,143 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "FleetActive" : { - "description" : "Wait until a Fleet is activated. Use this after invoking CreateFleet or UpdateFleet.", - "delay" : 5, - "maxAttempts" : 180, - "operation" : "GetFleet", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CREATE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "UPDATE_FAILED" - } ] - }, - "JobCreateComplete" : { - "description" : "Wait until a Job is created. Use this after invoking CreateJob.", - "delay" : 1, - "maxAttempts" : 120, - "operation" : "GetJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "lifecycleStatus", - "state" : "success", - "expected" : "CREATE_COMPLETE" - }, { - "matcher" : "path", - "argument" : "lifecycleStatus", - "state" : "success", - "expected" : "UPDATE_IN_PROGRESS" - }, { - "matcher" : "path", - "argument" : "lifecycleStatus", - "state" : "success", - "expected" : "UPDATE_FAILED" - }, { - "matcher" : "path", - "argument" : "lifecycleStatus", - "state" : "success", - "expected" : "UPDATE_SUCCEEDED" - }, { - "matcher" : "path", - "argument" : "lifecycleStatus", - "state" : "failure", - "expected" : "UPLOAD_FAILED" - }, { - "matcher" : "path", - "argument" : "lifecycleStatus", - "state" : "failure", - "expected" : "CREATE_FAILED" - } ] - }, - "LicenseEndpointDeleted" : { - "description" : "Wait until a LicenseEndpoint is Deleted. Use this after invoking DeleteLicenseEndpoint.", - "delay" : 10, - "maxAttempts" : 234, - "operation" : "GetLicenseEndpoint", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "NOT_READY" - } ] - }, - "LicenseEndpointValid" : { - "description" : "Wait until a LicenseEndpoint is Ready. Use this after invoking CreateLicenseEndpoint.", - "delay" : 10, - "maxAttempts" : 114, - "operation" : "GetLicenseEndpoint", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "READY" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "NOT_READY" - } ] - }, - "QueueFleetAssociationStopped" : { - "description" : "Wait until a QueueFleetAssociation is stopped. Use this after setting the status to STOP_SCHEDULING_AND_COMPLETE_TASKS or STOP_SCHEDULING_AND_CANCEL_TASKS to wait for a QueueFleetAssociation to reach STOPPED", - "delay" : 10, - "maxAttempts" : 60, - "operation" : "GetQueueFleetAssociation", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "STOPPED" - } ] - }, - "QueueLimitAssociationStopped" : { - "description" : "Wait until a QueueLimitAssociation is stopped. Use this after setting the status to STOP_LIMIT_USAGE_AND_COMPLETE_TASKS or STOP_LIMIT_USAGE_AND_CANCEL_TASKS to wait for a QueueLimitAssociation to reach STOPPED", - "delay" : 10, - "maxAttempts" : 60, - "operation" : "GetQueueLimitAssociation", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "STOPPED" - } ] - }, - "QueueScheduling" : { - "delay" : 10, - "maxAttempts" : 70, - "operation" : "GetQueue", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "SCHEDULING" - } ] - }, - "QueueSchedulingBlocked" : { - "delay" : 10, - "maxAttempts" : 30, - "operation" : "GetQueue", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "SCHEDULING_BLOCKED" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/detective/2018-10-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index f0a4074..0000000 Binary files a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/examples-1.json b/venv/Lib/site-packages/botocore/data/detective/2018-10-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/detective/2018-10-26/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/detective/2018-10-26/service-2.json.gz deleted file mode 100644 index 8b2acd9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/detective/2018-10-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index d048994..0000000 Binary files a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/examples-1.json b/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/examples-1.json deleted file mode 100644 index 9db4e46..0000000 --- a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/examples-1.json +++ /dev/null @@ -1,1242 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateDevicePool": [ - { - "input": { - "name": "MyDevicePool", - "description": "My Android devices", - "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", - "rules": [ - - ] - }, - "output": { - "devicePool": { - } - }, - "comments": { - "input": { - "name": "A device pool contains related devices, such as devices that run only on Android or that run only on iOS.", - "projectArn": "You can get the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example creates a new device pool named MyDevicePool inside an existing project.", - "id": "createdevicepool-example-1470862210860", - "title": "To create a new device pool" - } - ], - "CreateProject": [ - { - "input": { - "name": "MyProject" - }, - "output": { - "project": { - "name": "MyProject", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE", - "created": "1472660939.152" - } - }, - "comments": { - "input": { - "name": "A project in Device Farm is a workspace that contains test runs. A run is a test of a single app against one or more devices." - }, - "output": { - } - }, - "description": "The following example creates a new project named MyProject.", - "id": "createproject-example-1470862210860", - "title": "To create a new project" - } - ], - "CreateRemoteAccessSession": [ - { - "input": { - "name": "MySession", - "configuration": { - "billingMethod": "METERED" - }, - "deviceArn": "arn:aws:devicefarm:us-west-2::device:123EXAMPLE", - "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" - }, - "output": { - "remoteAccessSession": { - } - }, - "comments": { - "input": { - "deviceArn": "You can get the device ARN by using the list-devices CLI command.", - "projectArn": "You can get the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example creates a remote access session named MySession.", - "id": "to-create-a-remote-access-session-1470970668274", - "title": "To create a remote access session" - } - ], - "CreateUpload": [ - { - "input": { - "name": "MyAppiumPythonUpload", - "type": "APPIUM_PYTHON_TEST_PACKAGE", - "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" - }, - "output": { - "upload": { - "name": "MyAppiumPythonUpload", - "type": "APPIUM_PYTHON_TEST_PACKAGE", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:upload:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/b5340a65-3da7-4da6-a26e-12345EXAMPLE", - "created": "1472661404.186", - "status": "INITIALIZED", - "url": "https://prod-us-west-2-uploads.s3-us-west-2.amazonaws.com/arn%3Aaws%3Adevicefarm%3Aus-west-2%3A123456789101%3Aproject%3A5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE/uploads/arn%3Aaws%3Adevicefarm%3Aus-west-2%3A123456789101%3Aupload%3A5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/b5340a65-3da7-4da6-a26e-12345EXAMPLE/MyAppiumPythonUpload?AWSAccessKeyId=1234567891011EXAMPLE&Expires=1472747804&Signature=1234567891011EXAMPLE" - } - }, - "comments": { - "input": { - "projectArn": "You can get the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example creates a new Appium Python test package upload inside an existing project.", - "id": "createupload-example-1470864711775", - "title": "To create a new test package upload" - } - ], - "DeleteDevicePool": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2::devicepool:123-456-EXAMPLE-GUID" - }, - "output": { - }, - "comments": { - "input": { - "arn": "You can get the device pool ARN by using the list-device-pools CLI command." - }, - "output": { - } - }, - "description": "The following example deletes a specific device pool.", - "id": "deletedevicepool-example-1470866975494", - "title": "To delete a device pool" - } - ], - "DeleteProject": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" - }, - "output": { - }, - "comments": { - "input": { - "arn": "You can get the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example deletes a specific project.", - "id": "deleteproject-example-1470867374212", - "title": "To delete a project" - } - ], - "DeleteRemoteAccessSession": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456" - }, - "output": { - }, - "comments": { - "input": { - "arn": "You can get the remote access session ARN by using the list-remote-access-sessions CLI command." - }, - "output": { - } - }, - "description": "The following example deletes a specific remote access session.", - "id": "to-delete-a-specific-remote-access-session-1470971431677", - "title": "To delete a specific remote access session" - } - ], - "DeleteRun": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:EXAMPLE-GUID-123-456" - }, - "output": { - }, - "comments": { - "input": { - "arn": "You can get the run ARN by using the list-runs CLI command." - }, - "output": { - } - }, - "description": "The following example deletes a specific test run.", - "id": "deleterun-example-1470867905129", - "title": "To delete a run" - } - ], - "DeleteUpload": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:upload:EXAMPLE-GUID-123-456" - }, - "output": { - }, - "comments": { - "input": { - "arn": "You can get the upload ARN by using the list-uploads CLI command." - }, - "output": { - } - }, - "description": "The following example deletes a specific upload.", - "id": "deleteupload-example-1470868363942", - "title": "To delete a specific upload" - } - ], - "GetAccountSettings": [ - { - "input": { - }, - "output": { - "accountSettings": { - "awsAccountNumber": "123456789101", - "unmeteredDevices": { - "ANDROID": 1, - "IOS": 2 - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns information about your Device Farm account settings.", - "id": "to-get-information-about-account-settings-1472567568189", - "title": "To get information about account settings" - } - ], - "GetDevice": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2::device:123EXAMPLE" - }, - "output": { - "device": { - "name": "LG G2 (Sprint)", - "arn": "arn:aws:devicefarm:us-west-2::device:A0E6E6E1059E45918208DF75B2B7EF6C", - "cpu": { - "architecture": "armeabi-v7a", - "clock": 2265.6, - "frequency": "MHz" - }, - "formFactor": "PHONE", - "heapSize": 256000000, - "image": "75B2B7EF6C12345EXAMPLE", - "manufacturer": "LG", - "memory": 16000000000, - "model": "G2 (Sprint)", - "os": "4.2.2", - "platform": "ANDROID", - "resolution": { - "height": 1920, - "width": 1080 - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns information about a specific device.", - "id": "getdevice-example-1470870602173", - "title": "To get information about a device" - } - ], - "GetDevicePool": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" - }, - "output": { - "devicePool": { - } - }, - "comments": { - "input": { - "arn": "You can obtain the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example returns information about a specific device pool, given a project ARN.", - "id": "getdevicepool-example-1470870873136", - "title": "To get information about a device pool" - } - ], - "GetDevicePoolCompatibility": [ - { - "input": { - "appArn": "arn:aws:devicefarm:us-west-2::app:123-456-EXAMPLE-GUID", - "devicePoolArn": "arn:aws:devicefarm:us-west-2::devicepool:123-456-EXAMPLE-GUID", - "testType": "APPIUM_PYTHON" - }, - "output": { - "compatibleDevices": [ - - ], - "incompatibleDevices": [ - - ] - }, - "comments": { - "input": { - "devicePoolArn": "You can get the device pool ARN by using the list-device-pools CLI command." - }, - "output": { - } - }, - "description": "The following example returns information about the compatibility of a specific device pool, given its ARN.", - "id": "getdevicepoolcompatibility-example-1470925003466", - "title": "To get information about the compatibility of a device pool" - } - ], - "GetJob": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2::job:123-456-EXAMPLE-GUID" - }, - "output": { - "job": { - } - }, - "comments": { - "input": { - "arn": "You can get the job ARN by using the list-jobs CLI command." - }, - "output": { - } - }, - "description": "The following example returns information about a specific job.", - "id": "getjob-example-1470928294268", - "title": "To get information about a job" - } - ], - "GetOfferingStatus": [ - { - "input": { - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" - }, - "output": { - "current": { - "D68B3C05-1BA6-4360-BC69-12345EXAMPLE": { - "offering": { - "type": "RECURRING", - "description": "Android Remote Access Unmetered Device Slot", - "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "platform": "ANDROID" - }, - "quantity": 1 - } - }, - "nextPeriod": { - "D68B3C05-1BA6-4360-BC69-12345EXAMPLE": { - "effectiveOn": "1472688000", - "offering": { - "type": "RECURRING", - "description": "Android Remote Access Unmetered Device Slot", - "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "platform": "ANDROID" - }, - "quantity": 1 - } - } - }, - "comments": { - "input": { - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about Device Farm offerings available to your account.", - "id": "to-get-status-information-about-device-offerings-1472568124402", - "title": "To get status information about device offerings" - } - ], - "GetProject": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE" - }, - "output": { - "project": { - "name": "My Project", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE", - "created": "1472660939.152" - } - }, - "comments": { - "input": { - "arn": "You can get the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example gets information about a specific project.", - "id": "to-get-a-project-1470975038449", - "title": "To get information about a project" - } - ], - "GetRemoteAccessSession": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456" - }, - "output": { - "remoteAccessSession": { - } - }, - "comments": { - "input": { - "arn": "You can get the remote access session ARN by using the list-remote-access-sessions CLI command." - }, - "output": { - } - }, - "description": "The following example gets a specific remote access session.", - "id": "to-get-a-remote-access-session-1471014119414", - "title": "To get a remote access session" - } - ], - "GetRun": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE" - }, - "output": { - "run": { - "name": "My Test Run", - "type": "BUILTIN_EXPLORER", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE", - "billingMethod": "METERED", - "completedJobs": 0, - "counters": { - "errored": 0, - "failed": 0, - "passed": 0, - "skipped": 0, - "stopped": 0, - "total": 0, - "warned": 0 - }, - "created": "1472667509.852", - "deviceMinutes": { - "metered": 0.0, - "total": 0.0, - "unmetered": 0.0 - }, - "platform": "ANDROID", - "result": "PENDING", - "status": "RUNNING", - "totalJobs": 3 - } - }, - "comments": { - "input": { - "arn": "You can get the run ARN by using the list-runs CLI command." - }, - "output": { - } - }, - "description": "The following example gets information about a specific test run.", - "id": "to-get-a-test-run-1471015895657", - "title": "To get information about a test run" - } - ], - "GetSuite": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:suite:EXAMPLE-GUID-123-456" - }, - "output": { - "suite": { - } - }, - "comments": { - "input": { - "arn": "You can get the suite ARN by using the list-suites CLI command." - }, - "output": { - } - }, - "description": "The following example gets information about a specific test suite.", - "id": "to-get-information-about-a-test-suite-1471016525008", - "title": "To get information about a test suite" - } - ], - "GetTest": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:test:EXAMPLE-GUID-123-456" - }, - "output": { - "test": { - } - }, - "comments": { - "input": { - "arn": "You can get the test ARN by using the list-tests CLI command." - }, - "output": { - } - }, - "description": "The following example gets information about a specific test.", - "id": "to-get-information-about-a-specific-test-1471025744238", - "title": "To get information about a specific test" - } - ], - "GetUpload": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:upload:EXAMPLE-GUID-123-456" - }, - "output": { - "upload": { - } - }, - "comments": { - "input": { - "arn": "You can get the test ARN by using the list-uploads CLI command." - }, - "output": { - } - }, - "description": "The following example gets information about a specific upload.", - "id": "to-get-information-about-a-specific-upload-1471025996221", - "title": "To get information about a specific upload" - } - ], - "InstallToRemoteAccessSession": [ - { - "input": { - "appArn": "arn:aws:devicefarm:us-west-2:123456789101:app:EXAMPLE-GUID-123-456", - "remoteAccessSessionArn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456" - }, - "output": { - "appUpload": { - } - }, - "comments": { - "input": { - "remoteAccessSessionArn": "You can get the remote access session ARN by using the list-remote-access-sessions CLI command." - }, - "output": { - } - }, - "description": "The following example installs a specific app to a device in a specific remote access session.", - "id": "to-install-to-a-remote-access-session-1471634453818", - "title": "To install to a remote access session" - } - ], - "ListArtifacts": [ - { - "input": { - "type": "SCREENSHOT", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:EXAMPLE-GUID-123-456" - }, - "comments": { - "input": { - "arn": "Can also be used to list artifacts for a Job, Suite, or Test ARN." - }, - "output": { - } - }, - "description": "The following example lists screenshot artifacts for a specific run.", - "id": "to-list-artifacts-for-a-resource-1471635409527", - "title": "To list artifacts for a resource" - } - ], - "ListDevicePools": [ - { - "input": { - "type": "PRIVATE", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" - }, - "output": { - "devicePools": [ - { - "name": "Top Devices", - "arn": "arn:aws:devicefarm:us-west-2::devicepool:082d10e5-d7d7-48a5-ba5c-12345EXAMPLE", - "description": "Top devices", - "rules": [ - { - "value": "[\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\"]", - "attribute": "ARN", - "operator": "IN" - } - ] - }, - { - "name": "My Android Device Pool", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:devicepool:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/bf96e75a-28f6-4e61-b6a7-12345EXAMPLE", - "description": "Samsung Galaxy Android devices", - "rules": [ - { - "value": "[\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\"]", - "attribute": "ARN", - "operator": "IN" - } - ] - } - ] - }, - "comments": { - "input": { - "arn": "You can get the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example returns information about the private device pools in a specific project.", - "id": "to-get-information-about-device-pools-1471635745170", - "title": "To get information about device pools" - } - ], - "ListDevices": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" - }, - "output": { - }, - "comments": { - "input": { - "arn": "You can get the project ARN by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example returns information about the available devices in a specific project.", - "id": "to-get-information-about-devices-1471641699344", - "title": "To get information about devices" - } - ], - "ListJobs": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" - }, - "comments": { - "input": { - "arn": "You can get the project ARN by using the list-jobs CLI command." - }, - "output": { - } - }, - "description": "The following example returns information about jobs in a specific project.", - "id": "to-get-information-about-jobs-1471642228071", - "title": "To get information about jobs" - } - ], - "ListOfferingTransactions": [ - { - "input": { - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" - }, - "output": { - "offeringTransactions": [ - { - "cost": { - "amount": 0, - "currencyCode": "USD" - }, - "createdOn": "1470021420", - "offeringStatus": { - "type": "RENEW", - "effectiveOn": "1472688000", - "offering": { - "type": "RECURRING", - "description": "Android Remote Access Unmetered Device Slot", - "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "platform": "ANDROID" - }, - "quantity": 0 - }, - "transactionId": "03728003-d1ea-4851-abd6-12345EXAMPLE" - }, - { - "cost": { - "amount": 250, - "currencyCode": "USD" - }, - "createdOn": "1470021420", - "offeringStatus": { - "type": "PURCHASE", - "effectiveOn": "1470021420", - "offering": { - "type": "RECURRING", - "description": "Android Remote Access Unmetered Device Slot", - "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "platform": "ANDROID" - }, - "quantity": 1 - }, - "transactionId": "56820b6e-06bd-473a-8ff8-12345EXAMPLE" - }, - { - "cost": { - "amount": 175, - "currencyCode": "USD" - }, - "createdOn": "1465538520", - "offeringStatus": { - "type": "PURCHASE", - "effectiveOn": "1465538520", - "offering": { - "type": "RECURRING", - "description": "Android Unmetered Device Slot", - "id": "8980F81C-00D7-469D-8EC6-12345EXAMPLE", - "platform": "ANDROID" - }, - "quantity": 1 - }, - "transactionId": "953ae2c6-d760-4a04-9597-12345EXAMPLE" - }, - { - "cost": { - "amount": 8.07, - "currencyCode": "USD" - }, - "createdOn": "1459344300", - "offeringStatus": { - "type": "PURCHASE", - "effectiveOn": "1459344300", - "offering": { - "type": "RECURRING", - "description": "iOS Unmetered Device Slot", - "id": "A53D4D73-A6F6-4B82-A0B0-12345EXAMPLE", - "platform": "IOS" - }, - "quantity": 1 - }, - "transactionId": "2baf9021-ae3e-47f5-ab52-12345EXAMPLE" - } - ] - }, - "comments": { - "input": { - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about Device Farm offering transactions.", - "id": "to-get-information-about-device-offering-transactions-1472561712315", - "title": "To get information about device offering transactions" - } - ], - "ListOfferings": [ - { - "input": { - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" - }, - "output": { - "offerings": [ - { - "type": "RECURRING", - "description": "iOS Unmetered Device Slot", - "id": "A53D4D73-A6F6-4B82-A0B0-12345EXAMPLE", - "platform": "IOS", - "recurringCharges": [ - { - "cost": { - "amount": 250, - "currencyCode": "USD" - }, - "frequency": "MONTHLY" - } - ] - }, - { - "type": "RECURRING", - "description": "Android Unmetered Device Slot", - "id": "8980F81C-00D7-469D-8EC6-12345EXAMPLE", - "platform": "ANDROID", - "recurringCharges": [ - { - "cost": { - "amount": 250, - "currencyCode": "USD" - }, - "frequency": "MONTHLY" - } - ] - }, - { - "type": "RECURRING", - "description": "Android Remote Access Unmetered Device Slot", - "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "platform": "ANDROID", - "recurringCharges": [ - { - "cost": { - "amount": 250, - "currencyCode": "USD" - }, - "frequency": "MONTHLY" - } - ] - }, - { - "type": "RECURRING", - "description": "iOS Remote Access Unmetered Device Slot", - "id": "552B4DAD-A6C9-45C4-94FB-12345EXAMPLE", - "platform": "IOS", - "recurringCharges": [ - { - "cost": { - "amount": 250, - "currencyCode": "USD" - }, - "frequency": "MONTHLY" - } - ] - } - ] - }, - "comments": { - "input": { - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about available device offerings.", - "id": "to-get-information-about-device-offerings-1472562810999", - "title": "To get information about device offerings" - } - ], - "ListProjects": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:7ad300ed-8183-41a7-bf94-12345EXAMPLE", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" - }, - "output": { - "projects": [ - { - "name": "My Test Project", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:7ad300ed-8183-41a7-bf94-12345EXAMPLE", - "created": "1453163262.105" - }, - { - "name": "Hello World", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:d6b087d9-56db-4e44-b9ec-12345EXAMPLE", - "created": "1470350112.439" - } - ] - }, - "comments": { - "input": { - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about the specified project in Device Farm.", - "id": "to-get-information-about-a-device-farm-project-1472564014388", - "title": "To get information about a Device Farm project" - } - ], - "ListRemoteAccessSessions": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" - }, - "output": { - "remoteAccessSessions": [ - - ] - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the session by using the list-sessions CLI command.", - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about a specific Device Farm remote access session.", - "id": "to-get-information-about-a-remote-access-session-1472581144803", - "title": "To get information about a remote access session" - } - ], - "ListRuns": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" - }, - "output": { - "runs": [ - { - "name": "My Test Run", - "type": "BUILTIN_EXPLORER", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE", - "billingMethod": "METERED", - "completedJobs": 0, - "counters": { - "errored": 0, - "failed": 0, - "passed": 0, - "skipped": 0, - "stopped": 0, - "total": 0, - "warned": 0 - }, - "created": "1472667509.852", - "deviceMinutes": { - "metered": 0.0, - "total": 0.0, - "unmetered": 0.0 - }, - "platform": "ANDROID", - "result": "PENDING", - "status": "RUNNING", - "totalJobs": 3 - } - ] - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the run by using the list-runs CLI command.", - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about a specific test run.", - "id": "to-get-information-about-test-runs-1472582711069", - "title": "To get information about a test run" - } - ], - "ListSamples": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" - }, - "output": { - "samples": [ - - ] - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about samples, given a specific Device Farm project.", - "id": "to-get-information-about-samples-1472582847534", - "title": "To get information about samples" - } - ], - "ListSuites": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:job:EXAMPLE-GUID-123-456", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" - }, - "output": { - "suites": [ - - ] - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the job by using the list-jobs CLI command.", - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about suites, given a specific Device Farm job.", - "id": "to-get-information-about-suites-1472583038218", - "title": "To get information about suites" - } - ], - "ListTests": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" - }, - "output": { - "tests": [ - - ] - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about tests, given a specific Device Farm project.", - "id": "to-get-information-about-tests-1472617372212", - "title": "To get information about tests" - } - ], - "ListUniqueProblems": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" - }, - "output": { - "uniqueProblems": { - } - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about unique problems, given a specific Device Farm project.", - "id": "to-get-information-about-unique-problems-1472617781008", - "title": "To get information about unique problems" - } - ], - "ListUploads": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", - "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" - }, - "output": { - "uploads": [ - - ] - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", - "nextToken": "A dynamically generated value, used for paginating results." - }, - "output": { - } - }, - "description": "The following example returns information about uploads, given a specific Device Farm project.", - "id": "to-get-information-about-uploads-1472617943090", - "title": "To get information about uploads" - } - ], - "PurchaseOffering": [ - { - "input": { - "offeringId": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "quantity": 1 - }, - "output": { - "offeringTransaction": { - "cost": { - "amount": 8.07, - "currencyCode": "USD" - }, - "createdOn": "1472648340", - "offeringStatus": { - "type": "PURCHASE", - "effectiveOn": "1472648340", - "offering": { - "type": "RECURRING", - "description": "Android Remote Access Unmetered Device Slot", - "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "platform": "ANDROID" - }, - "quantity": 1 - }, - "transactionId": "d30614ed-1b03-404c-9893-12345EXAMPLE" - } - }, - "comments": { - "input": { - "offeringId": "You can get the offering ID by using the list-offerings CLI command." - }, - "output": { - } - }, - "description": "The following example purchases a specific device slot offering.", - "id": "to-purchase-a-device-slot-offering-1472648146343", - "title": "To purchase a device slot offering" - } - ], - "RenewOffering": [ - { - "input": { - "offeringId": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "quantity": 1 - }, - "output": { - "offeringTransaction": { - "cost": { - "amount": 250, - "currencyCode": "USD" - }, - "createdOn": "1472648880", - "offeringStatus": { - "type": "RENEW", - "effectiveOn": "1472688000", - "offering": { - "type": "RECURRING", - "description": "Android Remote Access Unmetered Device Slot", - "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", - "platform": "ANDROID" - }, - "quantity": 1 - }, - "transactionId": "e90f1405-8c35-4561-be43-12345EXAMPLE" - } - }, - "comments": { - "input": { - "offeringId": "You can get the offering ID by using the list-offerings CLI command." - }, - "output": { - } - }, - "description": "The following example renews a specific device slot offering.", - "id": "to-renew-a-device-slot-offering-1472648899785", - "title": "To renew a device slot offering" - } - ], - "ScheduleRun": [ - { - "input": { - "name": "MyRun", - "devicePoolArn": "arn:aws:devicefarm:us-west-2:123456789101:pool:EXAMPLE-GUID-123-456", - "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", - "test": { - "type": "APPIUM_JAVA_JUNIT", - "testPackageArn": "arn:aws:devicefarm:us-west-2:123456789101:test:EXAMPLE-GUID-123-456" - } - }, - "output": { - "run": { - } - }, - "comments": { - "input": { - "devicePoolArn": "You can get the Amazon Resource Name (ARN) of the device pool by using the list-pools CLI command.", - "projectArn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", - "testPackageArn": "You can get the Amazon Resource Name (ARN) of the test package by using the list-tests CLI command." - }, - "output": { - } - }, - "description": "The following example schedules a test run named MyRun.", - "id": "to-schedule-a-test-run-1472652429636", - "title": "To schedule a test run" - } - ], - "StopRun": [ - { - "input": { - "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:EXAMPLE-GUID-123-456" - }, - "output": { - "run": { - } - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the test run by using the list-runs CLI command." - }, - "output": { - } - }, - "description": "The following example stops a specific test run.", - "id": "to-stop-a-test-run-1472653770340", - "title": "To stop a test run" - } - ], - "UpdateDevicePool": [ - { - "input": { - "name": "NewName", - "arn": "arn:aws:devicefarm:us-west-2::devicepool:082d10e5-d7d7-48a5-ba5c-12345EXAMPLE", - "description": "NewDescription", - "rules": [ - { - "value": "True", - "attribute": "REMOTE_ACCESS_ENABLED", - "operator": "EQUALS" - } - ] - }, - "output": { - "devicePool": { - } - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the device pool by using the list-pools CLI command." - }, - "output": { - "devicePool": "Note: you cannot update curated device pools." - } - }, - "description": "The following example updates the specified device pool with a new name and description. It also enables remote access of devices in the device pool.", - "id": "to-update-a-device-pool-1472653887677", - "title": "To update a device pool" - } - ], - "UpdateProject": [ - { - "input": { - "name": "NewName", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:8f75187d-101e-4625-accc-12345EXAMPLE" - }, - "output": { - "project": { - "name": "NewName", - "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:8f75187d-101e-4625-accc-12345EXAMPLE", - "created": "1448400709.927" - } - }, - "comments": { - "input": { - "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command." - }, - "output": { - } - }, - "description": "The following example updates the specified project with a new name.", - "id": "to-update-a-device-pool-1472653887677", - "title": "To update a device pool" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/paginators-1.json deleted file mode 100644 index 982e07f..0000000 --- a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/paginators-1.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "pagination": { - "ListArtifacts": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "artifacts" - }, - "ListDevicePools": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "devicePools" - }, - "ListDevices": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "devices" - }, - "ListJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "jobs" - }, - "ListProjects": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "projects" - }, - "ListRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "runs" - }, - "ListSamples": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "samples" - }, - "ListSuites": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "suites" - }, - "ListTests": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "tests" - }, - "ListUniqueProblems": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "uniqueProblems" - }, - "ListUploads": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "uploads" - }, - "GetOfferingStatus": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": [ - "current", - "nextPeriod" - ] - }, - "ListOfferingTransactions": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "offeringTransactions" - }, - "ListOfferings": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "offerings" - }, - "ListDeviceInstances": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "deviceInstances" - }, - "ListInstanceProfiles": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "instanceProfiles" - }, - "ListNetworkProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "networkProfiles" - }, - "ListOfferingPromotions": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "offeringPromotions" - }, - "ListRemoteAccessSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "remoteAccessSessions" - }, - "ListVPCEConfigurations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "vpceConfigurations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/service-2.json.gz deleted file mode 100644 index c14a095..0000000 Binary files a/venv/Lib/site-packages/botocore/data/devicefarm/2015-06-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6fc92dc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/paginators-1.json deleted file mode 100644 index d0d5871..0000000 --- a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/paginators-1.json +++ /dev/null @@ -1,125 +0,0 @@ -{ - "pagination": { - "DescribeResourceCollectionHealth": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": [ - "CloudFormation", - "Service", - "Tags" - ] - }, - "GetResourceCollection": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": [ - "ResourceCollection.CloudFormation.StackNames", - "ResourceCollection.Tags" - ], - "non_aggregate_keys": [ - "ResourceCollection" - ] - }, - "ListAnomaliesForInsight": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": [ - "ReactiveAnomalies", - "ProactiveAnomalies" - ] - }, - "ListEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Events" - }, - "ListInsights": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": [ - "ProactiveInsights", - "ReactiveInsights" - ] - }, - "ListNotificationChannels": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Channels" - }, - "ListRecommendations": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Recommendations" - }, - "SearchInsights": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": [ - "ProactiveInsights", - "ReactiveInsights" - ] - }, - "GetCostEstimation": { - "input_token": "NextToken", - "non_aggregate_keys": [ - "Status", - "TotalCost", - "TimeRange", - "ResourceCollection" - ], - "output_token": "NextToken", - "result_key": [ - "Costs" - ] - }, - "DescribeOrganizationResourceCollectionHealth": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": [ - "CloudFormation", - "Account", - "Service", - "Tags" - ] - }, - "ListOrganizationInsights": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": [ - "ProactiveInsights", - "ReactiveInsights" - ] - }, - "SearchOrganizationInsights": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": [ - "ProactiveInsights", - "ReactiveInsights" - ] - }, - "ListAnomalousLogGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": [ - "InsightId", - "AnomalousLogGroups" - ] - }, - "ListMonitoredResources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": [ - "MonitoredResourceIdentifiers" - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/service-2.json.gz deleted file mode 100644 index c40c0d7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/devops-guru/2020-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index b8dabf2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/examples-1.json b/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/paginators-1.json deleted file mode 100644 index dbca668..0000000 --- a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "DescribeDirectConnectGatewayAssociations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "directConnectGatewayAssociations" - }, - "DescribeDirectConnectGatewayAttachments": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "directConnectGatewayAttachments" - }, - "DescribeDirectConnectGateways": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "directConnectGateways" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/service-2.json.gz deleted file mode 100644 index bb1df10..0000000 Binary files a/venv/Lib/site-packages/botocore/data/directconnect/2012-10-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2046732..0000000 Binary files a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/paginators-1.json deleted file mode 100644 index 5567a77..0000000 --- a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "DescribeAgents": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "agentsInfo" - }, - "DescribeContinuousExports": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "descriptions" - }, - "DescribeExportConfigurations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "exportsInfo" - }, - "DescribeExportTasks": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "exportsInfo" - }, - "DescribeTags": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "tags" - }, - "ListConfigurations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "configurations" - }, - "DescribeImportTasks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/service-2.json.gz deleted file mode 100644 index e74cf3a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/discovery/2015-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index bda1df7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/examples-1.json b/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/service-2.json.gz deleted file mode 100644 index d71c958..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dlm/2018-01-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dms/2016-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3619094..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/dms/2016-01-01/examples-1.json deleted file mode 100644 index f9e8c4e..0000000 --- a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/examples-1.json +++ /dev/null @@ -1,1074 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddTagsToResource": [ - { - "input": { - "ResourceArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E", - "Tags": [ - { - "Key": "Acount", - "Value": "1633456" - } - ] - }, - "output": { - }, - "comments": { - "input": { - "ResourceArn": "Required. Use the ARN of the resource you want to tag.", - "Tags": "Required. Use the Key/Value pair format." - }, - "output": { - } - }, - "description": "Adds metadata tags to an AWS DMS resource, including replication instance, endpoint, security group, and migration task. These tags can also be used with cost allocation reporting to track cost associated with AWS DMS resources, or used in a Condition statement in an IAM policy for AWS DMS.", - "id": "add-tags-to-resource-1481744141435", - "title": "Add tags to resource" - } - ], - "CreateEndpoint": [ - { - "input": { - "CertificateArn": "", - "DatabaseName": "testdb", - "EndpointIdentifier": "test-endpoint-1", - "EndpointType": "source", - "EngineName": "mysql", - "ExtraConnectionAttributes": "", - "KmsKeyId": "arn:aws:kms:us-east-1:123456789012:key/4c1731d6-5435-ed4d-be13-d53411a7cfbd", - "Password": "pasword", - "Port": 3306, - "ServerName": "mydb.cx1llnox7iyx.us-west-2.rds.amazonaws.com", - "SslMode": "require", - "Tags": [ - { - "Key": "Acount", - "Value": "143327655" - } - ], - "Username": "username" - }, - "output": { - "Endpoint": { - "EndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:RAAR3R22XSH46S3PWLC3NJAWKM", - "EndpointIdentifier": "test-endpoint-1", - "EndpointType": "source", - "EngineName": "mysql", - "KmsKeyId": "arn:aws:kms:us-east-1:123456789012:key/4c1731d6-5435-ed4d-be13-d53411a7cfbd", - "Port": 3306, - "ServerName": "mydb.cx1llnox7iyx.us-west-2.rds.amazonaws.com", - "Status": "active", - "Username": "username" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates an endpoint using the provided settings.", - "id": "create-endpoint-1481746254348", - "title": "Create endpoint" - } - ], - "CreateReplicationInstance": [ - { - "input": { - "AllocatedStorage": 123, - "AutoMinorVersionUpgrade": true, - "AvailabilityZone": "", - "EngineVersion": "", - "KmsKeyId": "", - "MultiAZ": true, - "PreferredMaintenanceWindow": "", - "PubliclyAccessible": true, - "ReplicationInstanceClass": "", - "ReplicationInstanceIdentifier": "", - "ReplicationSubnetGroupIdentifier": "", - "Tags": [ - { - "Key": "string", - "Value": "string" - } - ], - "VpcSecurityGroupIds": [ - - ] - }, - "output": { - "ReplicationInstance": { - "AllocatedStorage": 5, - "AutoMinorVersionUpgrade": true, - "EngineVersion": "1.5.0", - "KmsKeyId": "arn:aws:kms:us-east-1:123456789012:key/4c1731d6-5435-ed4d-be13-d53411a7cfbd", - "PendingModifiedValues": { - }, - "PreferredMaintenanceWindow": "sun:06:00-sun:14:00", - "PubliclyAccessible": true, - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationInstanceClass": "dms.t2.micro", - "ReplicationInstanceIdentifier": "test-rep-1", - "ReplicationInstanceStatus": "creating", - "ReplicationSubnetGroup": { - "ReplicationSubnetGroupDescription": "default", - "ReplicationSubnetGroupIdentifier": "default", - "SubnetGroupStatus": "Complete", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-east-1d" - }, - "SubnetIdentifier": "subnet-f6dd91af", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1b" - }, - "SubnetIdentifier": "subnet-3605751d", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1c" - }, - "SubnetIdentifier": "subnet-c2daefb5", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1e" - }, - "SubnetIdentifier": "subnet-85e90cb8", - "SubnetStatus": "Active" - } - ], - "VpcId": "vpc-6741a603" - } - } - }, - "comments": { - "output": { - } - }, - "description": "Creates the replication instance using the specified parameters.", - "id": "create-replication-instance-1481746705295", - "title": "Create replication instance" - } - ], - "CreateReplicationSubnetGroup": [ - { - "input": { - "ReplicationSubnetGroupDescription": "US West subnet group", - "ReplicationSubnetGroupIdentifier": "us-west-2ab-vpc-215ds366", - "SubnetIds": [ - "subnet-e145356n", - "subnet-58f79200" - ], - "Tags": [ - { - "Key": "Acount", - "Value": "145235" - } - ] - }, - "output": { - "ReplicationSubnetGroup": { - } - }, - "comments": { - "output": { - } - }, - "description": "Creates a replication subnet group given a list of the subnet IDs in a VPC.", - "id": "create-replication-subnet-group-1481747297930", - "title": "Create replication subnet group" - } - ], - "CreateReplicationTask": [ - { - "input": { - "CdcStartTime": "2016-12-14T18:25:43Z", - "MigrationType": "full-load", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationTaskIdentifier": "task1", - "ReplicationTaskSettings": "", - "SourceEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ZW5UAN6P4E77EC7YWHK4RZZ3BE", - "TableMappings": "file://mappingfile.json", - "Tags": [ - { - "Key": "Acount", - "Value": "24352226" - } - ], - "TargetEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E" - }, - "output": { - "ReplicationTask": { - "MigrationType": "full-load", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationTaskArn": "arn:aws:dms:us-east-1:123456789012:task:OEAMB3NXSTZ6LFYZFEPPBBXPYM", - "ReplicationTaskCreationDate": "2016-12-14T18:25:43Z", - "ReplicationTaskIdentifier": "task1", - "ReplicationTaskSettings": "{\"TargetMetadata\":{\"TargetSchema\":\"\",\"SupportLobs\":true,\"FullLobMode\":true,\"LobChunkSize\":64,\"LimitedSizeLobMode\":false,\"LobMaxSize\":0},\"FullLoadSettings\":{\"FullLoadEnabled\":true,\"ApplyChangesEnabled\":false,\"TargetTablePrepMode\":\"DROP_AND_CREATE\",\"CreatePkAfterFullLoad\":false,\"StopTaskCachedChangesApplied\":false,\"StopTaskCachedChangesNotApplied\":false,\"ResumeEnabled\":false,\"ResumeMinTableSize\":100000,\"ResumeOnlyClusteredPKTables\":true,\"MaxFullLoadSubTasks\":8,\"TransactionConsistencyTimeout\":600,\"CommitRate\":10000},\"Logging\":{\"EnableLogging\":false}}", - "SourceEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ZW5UAN6P4E77EC7YWHK4RZZ3BE", - "Status": "creating", - "TableMappings": "file://mappingfile.json", - "TargetEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a replication task using the specified parameters.", - "id": "create-replication-task-1481747646288", - "title": "Create replication task" - } - ], - "DeleteCertificate": [ - { - "input": { - "CertificateArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUSM457DE6XFJCJQ" - }, - "output": { - "Certificate": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the specified certificate.", - "id": "delete-certificate-1481751957981", - "title": "Delete Certificate" - } - ], - "DeleteConnection": [ - { - "input": { - "EndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:RAAR3R22XSH46S3PWLC3NJAWKM", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ" - }, - "output": { - "Connection": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the connection between the replication instance and the endpoint.", - "id": "delete-connection-1481751957981", - "title": "Delete Connection" - } - ], - "DeleteEndpoint": [ - { - "input": { - "EndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:RAAR3R22XSH46S3PWLC3NJAWKM" - }, - "output": { - "Endpoint": { - "EndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:RAAR3R22XSH46S3PWLC3NJAWKM", - "EndpointIdentifier": "test-endpoint-1", - "EndpointType": "source", - "EngineName": "mysql", - "KmsKeyId": "arn:aws:kms:us-east-1:123456789012:key/4c1731d6-5435-ed4d-be13-d53411a7cfbd", - "Port": 3306, - "ServerName": "mydb.cx1llnox7iyx.us-west-2.rds.amazonaws.com", - "Status": "active", - "Username": "username" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the specified endpoint. All tasks associated with the endpoint must be deleted before you can delete the endpoint.\n", - "id": "delete-endpoint-1481752425530", - "title": "Delete Endpoint" - } - ], - "DeleteReplicationInstance": [ - { - "input": { - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ" - }, - "output": { - "ReplicationInstance": { - "AllocatedStorage": 5, - "AutoMinorVersionUpgrade": true, - "EngineVersion": "1.5.0", - "KmsKeyId": "arn:aws:kms:us-east-1:123456789012:key/4c1731d6-5435-ed4d-be13-d53411a7cfbd", - "PendingModifiedValues": { - }, - "PreferredMaintenanceWindow": "sun:06:00-sun:14:00", - "PubliclyAccessible": true, - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationInstanceClass": "dms.t2.micro", - "ReplicationInstanceIdentifier": "test-rep-1", - "ReplicationInstanceStatus": "creating", - "ReplicationSubnetGroup": { - "ReplicationSubnetGroupDescription": "default", - "ReplicationSubnetGroupIdentifier": "default", - "SubnetGroupStatus": "Complete", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-east-1d" - }, - "SubnetIdentifier": "subnet-f6dd91af", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1b" - }, - "SubnetIdentifier": "subnet-3605751d", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1c" - }, - "SubnetIdentifier": "subnet-c2daefb5", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1e" - }, - "SubnetIdentifier": "subnet-85e90cb8", - "SubnetStatus": "Active" - } - ], - "VpcId": "vpc-6741a603" - } - } - }, - "comments": { - "output": { - } - }, - "description": "Deletes the specified replication instance. You must delete any migration tasks that are associated with the replication instance before you can delete it.\n\n", - "id": "delete-replication-instance-1481752552839", - "title": "Delete Replication Instance" - } - ], - "DeleteReplicationSubnetGroup": [ - { - "input": { - "ReplicationSubnetGroupIdentifier": "us-west-2ab-vpc-215ds366" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes a replication subnet group.", - "id": "delete-replication-subnet-group-1481752728597", - "title": "Delete Replication Subnet Group" - } - ], - "DeleteReplicationTask": [ - { - "input": { - "ReplicationTaskArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ" - }, - "output": { - "ReplicationTask": { - "MigrationType": "full-load", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationTaskArn": "arn:aws:dms:us-east-1:123456789012:task:OEAMB3NXSTZ6LFYZFEPPBBXPYM", - "ReplicationTaskCreationDate": "2016-12-14T18:25:43Z", - "ReplicationTaskIdentifier": "task1", - "ReplicationTaskSettings": "{\"TargetMetadata\":{\"TargetSchema\":\"\",\"SupportLobs\":true,\"FullLobMode\":true,\"LobChunkSize\":64,\"LimitedSizeLobMode\":false,\"LobMaxSize\":0},\"FullLoadSettings\":{\"FullLoadEnabled\":true,\"ApplyChangesEnabled\":false,\"TargetTablePrepMode\":\"DROP_AND_CREATE\",\"CreatePkAfterFullLoad\":false,\"StopTaskCachedChangesApplied\":false,\"StopTaskCachedChangesNotApplied\":false,\"ResumeEnabled\":false,\"ResumeMinTableSize\":100000,\"ResumeOnlyClusteredPKTables\":true,\"MaxFullLoadSubTasks\":8,\"TransactionConsistencyTimeout\":600,\"CommitRate\":10000},\"Logging\":{\"EnableLogging\":false}}", - "SourceEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ZW5UAN6P4E77EC7YWHK4RZZ3BE", - "Status": "creating", - "TableMappings": "file://mappingfile.json", - "TargetEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the specified replication task.", - "id": "delete-replication-task-1481752903506", - "title": "Delete Replication Task" - } - ], - "DescribeAccountAttributes": [ - { - "input": { - }, - "output": { - "AccountQuotas": [ - { - "AccountQuotaName": "ReplicationInstances", - "Max": 20, - "Used": 0 - }, - { - "AccountQuotaName": "AllocatedStorage", - "Max": 20, - "Used": 0 - }, - { - "AccountQuotaName": "Endpoints", - "Max": 20, - "Used": 0 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all of the AWS DMS attributes for a customer account. The attributes include AWS DMS quotas for the account, such as the number of replication instances allowed. The description for a quota includes the quota name, current usage toward that quota, and the quota's maximum value. This operation does not take any parameters.", - "id": "describe-acount-attributes-1481753085663", - "title": "Describe acount attributes" - } - ], - "DescribeCertificates": [ - { - "input": { - "Filters": [ - { - "Name": "string", - "Values": [ - "string", - "string" - ] - } - ], - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Certificates": [ - - ], - "Marker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Provides a description of the certificate.", - "id": "describe-certificates-1481753186244", - "title": "Describe certificates" - } - ], - "DescribeConnections": [ - { - "input": { - "Filters": [ - { - "Name": "string", - "Values": [ - "string", - "string" - ] - } - ], - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Connections": [ - { - "EndpointArn": "arn:aws:dms:us-east-arn:aws:dms:us-east-1:123456789012:endpoint:ZW5UAN6P4E77EC7YWHK4RZZ3BE", - "EndpointIdentifier": "testsrc1", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationInstanceIdentifier": "test", - "Status": "successful" - } - ], - "Marker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the status of the connections that have been made between the replication instance and an endpoint. Connections are created when you test an endpoint.", - "id": "describe-connections-1481754477953", - "title": "Describe connections" - } - ], - "DescribeEndpointTypes": [ - { - "input": { - "Filters": [ - { - "Name": "string", - "Values": [ - "string", - "string" - ] - } - ], - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Marker": "", - "SupportedEndpointTypes": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the type of endpoints available.", - "id": "describe-endpoint-types-1481754742591", - "title": "Describe endpoint types" - } - ], - "DescribeEndpoints": [ - { - "input": { - "Filters": [ - { - "Name": "string", - "Values": [ - "string", - "string" - ] - } - ], - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Endpoints": [ - - ], - "Marker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the endpoints for your account in the current region.", - "id": "describe-endpoints-1481754926060", - "title": "Describe endpoints" - } - ], - "DescribeOrderableReplicationInstances": [ - { - "input": { - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Marker": "", - "OrderableReplicationInstances": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the replication instance types that can be created in the specified region.", - "id": "describe-orderable-replication-instances-1481755123669", - "title": "Describe orderable replication instances" - } - ], - "DescribeRefreshSchemasStatus": [ - { - "input": { - "EndpointArn": "" - }, - "output": { - "RefreshSchemasStatus": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns the status of the refresh-schemas operation.", - "id": "describe-refresh-schema-status-1481755303497", - "title": "Describe refresh schema status" - } - ], - "DescribeReplicationInstances": [ - { - "input": { - "Filters": [ - { - "Name": "string", - "Values": [ - "string", - "string" - ] - } - ], - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Marker": "", - "ReplicationInstances": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns the status of the refresh-schemas operation.", - "id": "describe-replication-instances-1481755443952", - "title": "Describe replication instances" - } - ], - "DescribeReplicationSubnetGroups": [ - { - "input": { - "Filters": [ - { - "Name": "string", - "Values": [ - "string", - "string" - ] - } - ], - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Marker": "", - "ReplicationSubnetGroups": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the replication subnet groups.", - "id": "describe-replication-subnet-groups-1481755621284", - "title": "Describe replication subnet groups" - } - ], - "DescribeReplicationTasks": [ - { - "input": { - "Filters": [ - { - "Name": "string", - "Values": [ - "string", - "string" - ] - } - ], - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Marker": "", - "ReplicationTasks": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about replication tasks for your account in the current region.", - "id": "describe-replication-tasks-1481755777563", - "title": "Describe replication tasks" - } - ], - "DescribeSchemas": [ - { - "input": { - "EndpointArn": "", - "Marker": "", - "MaxRecords": 123 - }, - "output": { - "Marker": "", - "Schemas": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the schema for the specified endpoint.", - "id": "describe-schemas-1481755933924", - "title": "Describe schemas" - } - ], - "DescribeTableStatistics": [ - { - "input": { - "Marker": "", - "MaxRecords": 123, - "ReplicationTaskArn": "" - }, - "output": { - "Marker": "", - "ReplicationTaskArn": "", - "TableStatistics": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns table statistics on the database migration task, including table name, rows inserted, rows updated, and rows deleted.", - "id": "describe-table-statistics-1481756071890", - "title": "Describe table statistics" - } - ], - "ImportCertificate": [ - { - "input": { - "CertificateIdentifier": "", - "CertificatePem": "" - }, - "output": { - "Certificate": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Uploads the specified certificate.", - "id": "import-certificate-1481756197206", - "title": "Import certificate" - } - ], - "ListTagsForResource": [ - { - "input": { - "ResourceArn": "" - }, - "output": { - "TagList": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all tags for an AWS DMS resource.", - "id": "list-tags-for-resource-1481761095501", - "title": "List tags for resource" - } - ], - "ModifyEndpoint": [ - { - "input": { - "CertificateArn": "", - "DatabaseName": "", - "EndpointArn": "", - "EndpointIdentifier": "", - "EndpointType": "source", - "EngineName": "", - "ExtraConnectionAttributes": "", - "Password": "", - "Port": 123, - "ServerName": "", - "SslMode": "require", - "Username": "" - }, - "output": { - "Endpoint": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Modifies the specified endpoint.", - "id": "modify-endpoint-1481761649937", - "title": "Modify endpoint" - } - ], - "ModifyReplicationInstance": [ - { - "input": { - "AllocatedStorage": 123, - "AllowMajorVersionUpgrade": true, - "ApplyImmediately": true, - "AutoMinorVersionUpgrade": true, - "EngineVersion": "1.5.0", - "MultiAZ": true, - "PreferredMaintenanceWindow": "sun:06:00-sun:14:00", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationInstanceClass": "dms.t2.micro", - "ReplicationInstanceIdentifier": "test-rep-1", - "VpcSecurityGroupIds": [ - - ] - }, - "output": { - "ReplicationInstance": { - "AllocatedStorage": 5, - "AutoMinorVersionUpgrade": true, - "EngineVersion": "1.5.0", - "KmsKeyId": "arn:aws:kms:us-east-1:123456789012:key/4c1731d6-5435-ed4d-be13-d53411a7cfbd", - "PendingModifiedValues": { - }, - "PreferredMaintenanceWindow": "sun:06:00-sun:14:00", - "PubliclyAccessible": true, - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationInstanceClass": "dms.t2.micro", - "ReplicationInstanceIdentifier": "test-rep-1", - "ReplicationInstanceStatus": "available", - "ReplicationSubnetGroup": { - "ReplicationSubnetGroupDescription": "default", - "ReplicationSubnetGroupIdentifier": "default", - "SubnetGroupStatus": "Complete", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-east-1d" - }, - "SubnetIdentifier": "subnet-f6dd91af", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1b" - }, - "SubnetIdentifier": "subnet-3605751d", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1c" - }, - "SubnetIdentifier": "subnet-c2daefb5", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1e" - }, - "SubnetIdentifier": "subnet-85e90cb8", - "SubnetStatus": "Active" - } - ], - "VpcId": "vpc-6741a603" - } - } - }, - "comments": { - "output": { - } - }, - "description": "Modifies the replication instance to apply new settings. You can change one or more parameters by specifying these parameters and the new values in the request. Some settings are applied during the maintenance window.", - "id": "modify-replication-instance-1481761784746", - "title": "Modify replication instance" - } - ], - "ModifyReplicationSubnetGroup": [ - { - "input": { - "ReplicationSubnetGroupDescription": "", - "ReplicationSubnetGroupIdentifier": "", - "SubnetIds": [ - - ] - }, - "output": { - "ReplicationSubnetGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Modifies the settings for the specified replication subnet group.", - "id": "modify-replication-subnet-group-1481762275392", - "title": "Modify replication subnet group" - } - ], - "RefreshSchemas": [ - { - "input": { - "EndpointArn": "", - "ReplicationInstanceArn": "" - }, - "output": { - "RefreshSchemasStatus": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Populates the schema for the specified endpoint. This is an asynchronous operation and can take several minutes. You can check the status of this operation by calling the describe-refresh-schemas-status operation.", - "id": "refresh-schema-1481762399111", - "title": "Refresh schema" - } - ], - "RemoveTagsFromResource": [ - { - "input": { - "ResourceArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E", - "TagKeys": [ - - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Removes metadata tags from an AWS DMS resource.", - "id": "remove-tags-from-resource-1481762571330", - "title": "Remove tags from resource" - } - ], - "StartReplicationTask": [ - { - "input": { - "CdcStartTime": "2016-12-14T13:33:20Z", - "ReplicationTaskArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "StartReplicationTaskType": "start-replication" - }, - "output": { - "ReplicationTask": { - "MigrationType": "full-load", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationTaskArn": "arn:aws:dms:us-east-1:123456789012:task:OEAMB3NXSTZ6LFYZFEPPBBXPYM", - "ReplicationTaskCreationDate": "2016-12-14T18:25:43Z", - "ReplicationTaskIdentifier": "task1", - "ReplicationTaskSettings": "{\"TargetMetadata\":{\"TargetSchema\":\"\",\"SupportLobs\":true,\"FullLobMode\":true,\"LobChunkSize\":64,\"LimitedSizeLobMode\":false,\"LobMaxSize\":0},\"FullLoadSettings\":{\"FullLoadEnabled\":true,\"ApplyChangesEnabled\":false,\"TargetTablePrepMode\":\"DROP_AND_CREATE\",\"CreatePkAfterFullLoad\":false,\"StopTaskCachedChangesApplied\":false,\"StopTaskCachedChangesNotApplied\":false,\"ResumeEnabled\":false,\"ResumeMinTableSize\":100000,\"ResumeOnlyClusteredPKTables\":true,\"MaxFullLoadSubTasks\":8,\"TransactionConsistencyTimeout\":600,\"CommitRate\":10000},\"Logging\":{\"EnableLogging\":false}}", - "SourceEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ZW5UAN6P4E77EC7YWHK4RZZ3BE", - "Status": "creating", - "TableMappings": "file://mappingfile.json", - "TargetEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Starts the replication task.", - "id": "start-replication-task-1481762706778", - "title": "Start replication task" - } - ], - "StopReplicationTask": [ - { - "input": { - "ReplicationTaskArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E" - }, - "output": { - "ReplicationTask": { - "MigrationType": "full-load", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ", - "ReplicationTaskArn": "arn:aws:dms:us-east-1:123456789012:task:OEAMB3NXSTZ6LFYZFEPPBBXPYM", - "ReplicationTaskCreationDate": "2016-12-14T18:25:43Z", - "ReplicationTaskIdentifier": "task1", - "ReplicationTaskSettings": "{\"TargetMetadata\":{\"TargetSchema\":\"\",\"SupportLobs\":true,\"FullLobMode\":true,\"LobChunkSize\":64,\"LimitedSizeLobMode\":false,\"LobMaxSize\":0},\"FullLoadSettings\":{\"FullLoadEnabled\":true,\"ApplyChangesEnabled\":false,\"TargetTablePrepMode\":\"DROP_AND_CREATE\",\"CreatePkAfterFullLoad\":false,\"StopTaskCachedChangesApplied\":false,\"StopTaskCachedChangesNotApplied\":false,\"ResumeEnabled\":false,\"ResumeMinTableSize\":100000,\"ResumeOnlyClusteredPKTables\":true,\"MaxFullLoadSubTasks\":8,\"TransactionConsistencyTimeout\":600,\"CommitRate\":10000},\"Logging\":{\"EnableLogging\":false}}", - "SourceEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ZW5UAN6P4E77EC7YWHK4RZZ3BE", - "Status": "creating", - "TableMappings": "file://mappingfile.json", - "TargetEndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:ASXWXJZLNWNT5HTWCGV2BUJQ7E" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Stops the replication task.", - "id": "stop-replication-task-1481762924947", - "title": "Stop replication task" - } - ], - "TestConnection": [ - { - "input": { - "EndpointArn": "arn:aws:dms:us-east-1:123456789012:endpoint:RAAR3R22XSH46S3PWLC3NJAWKM", - "ReplicationInstanceArn": "arn:aws:dms:us-east-1:123456789012:rep:6UTDJGBOUS3VI3SUWA66XFJCJQ" - }, - "output": { - "Connection": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Tests the connection between the replication instance and the endpoint.", - "id": "test-conection-1481763017636", - "title": "Test conection" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/dms/2016-01-01/paginators-1.json deleted file mode 100644 index ae92c82..0000000 --- a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "DescribeSchemas": { - "result_key": "Schemas", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeCertificates": { - "result_key": "Certificates", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeEndpoints": { - "result_key": "Endpoints", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeEventSubscriptions": { - "result_key": "EventSubscriptionsList", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeEndpointTypes": { - "result_key": "SupportedEndpointTypes", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeReplicationInstances": { - "result_key": "ReplicationInstances", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeTableStatistics": { - "result_key": "TableStatistics", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeConnections": { - "result_key": "Connections", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeReplicationTaskAssessmentResults": { - "result_key": "ReplicationTaskAssessmentResults", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeEvents": { - "result_key": "Events", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeOrderableReplicationInstances": { - "result_key": "OrderableReplicationInstances", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeReplicationSubnetGroups": { - "result_key": "ReplicationSubnetGroups", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeReplicationTasks": { - "result_key": "ReplicationTasks", - "output_token": "Marker", - "input_token": "Marker", - "limit_key": "MaxRecords" - }, - "DescribeDataMigrations": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DataMigrations" - }, - "DescribeMetadataModelChildren": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "MetadataModelChildren" - }, - "DescribeMetadataModelCreations": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Requests" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/dms/2016-01-01/service-2.json.gz deleted file mode 100644 index f268f81..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/dms/2016-01-01/waiters-2.json deleted file mode 100644 index 73fba51..0000000 --- a/venv/Lib/site-packages/botocore/data/dms/2016-01-01/waiters-2.json +++ /dev/null @@ -1,330 +0,0 @@ -{ - "version":2, - "waiters":{ - "TestConnectionSucceeds":{ - "acceptors":[ - { - "argument":"Connections[].Status", - "expected":"successful", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"Connections[].Status", - "expected":"failed", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":5, - "description":"Wait until testing connection succeeds.", - "maxAttempts":60, - "operation":"DescribeConnections" - }, - "EndpointDeleted":{ - "acceptors":[ - { - "expected":"ResourceNotFoundFault", - "matcher":"error", - "state":"success" - }, - { - "argument":"Endpoints[].Status", - "expected":"active", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"Endpoints[].Status", - "expected":"creating", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":5, - "description":"Wait until testing endpoint is deleted.", - "maxAttempts":60, - "operation":"DescribeEndpoints" - }, - "ReplicationInstanceAvailable":{ - "acceptors":[ - { - "argument":"ReplicationInstances[].ReplicationInstanceStatus", - "expected":"available", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"ReplicationInstances[].ReplicationInstanceStatus", - "expected":"deleting", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationInstances[].ReplicationInstanceStatus", - "expected":"incompatible-credentials", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationInstances[].ReplicationInstanceStatus", - "expected":"incompatible-network", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationInstances[].ReplicationInstanceStatus", - "expected":"inaccessible-encryption-credentials", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":60, - "description":"Wait until DMS replication instance is available.", - "maxAttempts":60, - "operation":"DescribeReplicationInstances" - }, - "ReplicationInstanceDeleted":{ - "acceptors":[ - { - "argument":"ReplicationInstances[].ReplicationInstanceStatus", - "expected":"available", - "matcher":"pathAny", - "state":"failure" - }, - { - "expected":"ResourceNotFoundFault", - "matcher":"error", - "state":"success" - } - ], - "delay":15, - "description":"Wait until DMS replication instance is deleted.", - "maxAttempts":60, - "operation":"DescribeReplicationInstances" - }, - "ReplicationTaskReady":{ - "acceptors":[ - { - "argument":"ReplicationTasks[].Status", - "expected":"ready", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"starting", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"running", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"stopping", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"stopped", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"failed", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"modifying", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"testing", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"deleting", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":15, - "description":"Wait until DMS replication task is ready.", - "maxAttempts":60, - "operation":"DescribeReplicationTasks" - }, - "ReplicationTaskStopped":{ - "acceptors":[ - { - "argument":"ReplicationTasks[].Status", - "expected":"stopped", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"ready", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"creating", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"starting", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"failed", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"modifying", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"testing", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"deleting", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":15, - "description":"Wait until DMS replication task is stopped.", - "maxAttempts":60, - "operation":"DescribeReplicationTasks" - }, - "ReplicationTaskRunning":{ - "acceptors":[ - { - "argument":"ReplicationTasks[].Status", - "expected":"running", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"ready", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"creating", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"stopping", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"stopped", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"failed", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"modifying", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"testing", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"deleting", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":15, - "description":"Wait until DMS replication task is running.", - "maxAttempts":60, - "operation":"DescribeReplicationTasks" - }, - "ReplicationTaskDeleted":{ - "acceptors":[ - { - "argument":"ReplicationTasks[].Status", - "expected":"ready", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"creating", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"stopped", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"running", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"ReplicationTasks[].Status", - "expected":"failed", - "matcher":"pathAny", - "state":"failure" - }, - { - "expected":"ResourceNotFoundFault", - "matcher":"error", - "state":"success" - } - ], - "delay":15, - "description":"Wait until DMS replication task is deleted.", - "maxAttempts":60, - "operation":"DescribeReplicationTasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2170905..0000000 Binary files a/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/paginators-1.json deleted file mode 100644 index bed516d..0000000 --- a/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListClusterSnapshots": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "snapshots" - }, - "ListClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "clusters" - }, - "ListPendingMaintenanceActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "resourcePendingMaintenanceActions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/service-2.json.gz deleted file mode 100644 index 18f74ad..0000000 Binary files a/venv/Lib/site-packages/botocore/data/docdb-elastic/2022-11-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index cfb467b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/examples-1.json b/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/paginators-1.json deleted file mode 100644 index cc1a2f1..0000000 --- a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/paginators-1.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "pagination": { - "DescribeCertificates": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Certificates" - }, - "DescribeDBClusterParameterGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusterParameterGroups" - }, - "DescribeDBClusterParameters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Parameters" - }, - "DescribeDBClusterSnapshots": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusterSnapshots" - }, - "DescribeDBClusters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusters" - }, - "DescribeDBEngineVersions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBEngineVersions" - }, - "DescribeDBInstances": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBInstances" - }, - "DescribeDBSubnetGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBSubnetGroups" - }, - "DescribeEvents": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Events" - }, - "DescribeOrderableDBInstanceOptions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "OrderableDBInstanceOptions" - }, - "DescribePendingMaintenanceActions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "PendingMaintenanceActions" - }, - "DescribeEventSubscriptions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "EventSubscriptionsList" - }, - "DescribeGlobalClusters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "GlobalClusters" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/service-2.json.gz deleted file mode 100644 index 08f6fbe..0000000 Binary files a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/service-2.sdk-extras.json b/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/service-2.sdk-extras.json deleted file mode 100644 index 85e8a10..0000000 --- a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/service-2.sdk-extras.json +++ /dev/null @@ -1,23 +0,0 @@ - { - "version": 1.0, - "merge": { - "shapes": { - "CopyDBClusterSnapshotMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the snapshot to be copied.

    " - } - } - }, - "CreateDBClusterMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the source for the db cluster.

    " - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/waiters-2.json deleted file mode 100644 index e75f03b..0000000 --- a/venv/Lib/site-packages/botocore/data/docdb/2014-10-31/waiters-2.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "version": 2, - "waiters": { - "DBInstanceAvailable": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - }, - "DBInstanceDeleted": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "DBInstanceNotFound", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "resetting-master-credentials", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/drs/2020-02-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0593040..0000000 Binary files a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/examples-1.json b/venv/Lib/site-packages/botocore/data/drs/2020-02-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/drs/2020-02-26/paginators-1.json deleted file mode 100644 index cfe134c..0000000 --- a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "DescribeJobLogItems": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeRecoveryInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeRecoverySnapshots": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeReplicationConfigurationTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeSourceServers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListExtensibleSourceServers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListStagingAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accounts" - }, - "DescribeLaunchConfigurationTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeSourceNetworks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListLaunchActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/drs/2020-02-26/service-2.json.gz deleted file mode 100644 index a65bc5e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/drs/2020-02-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index a856437..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/paginators-1.json deleted file mode 100644 index 5038135..0000000 --- a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListGroupMembers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Members" - }, - "ListGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Groups" - }, - "ListGroupsForMember": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Groups" - }, - "ListUsers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Users" - }, - "SearchGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Groups" - }, - "SearchUsers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Users" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/paginators-1.sdk-extras.json deleted file mode 100644 index 35f9e23..0000000 --- a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/paginators-1.sdk-extras.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListGroupMembers": { - "non_aggregate_keys": [ - "DirectoryId", - "MemberRealm", - "Realm" - ] - }, - "ListGroups": { - "non_aggregate_keys": [ - "DirectoryId", - "Realm" - ] - }, - "ListGroupsForMember": { - "non_aggregate_keys": [ - "DirectoryId", - "MemberRealm", - "Realm" - ] - }, - "ListUsers": { - "non_aggregate_keys": [ - "DirectoryId", - "Realm" - ] - }, - "SearchGroups": { - "non_aggregate_keys": [ - "DirectoryId", - "Realm" - ] - }, - "SearchUsers": { - "non_aggregate_keys": [ - "DirectoryId", - "Realm" - ] - } - } - } - } - \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/service-2.json.gz deleted file mode 100644 index fdad954..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ds-data/2023-05-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ds/2015-04-16/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6cda421..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/examples-1.json b/venv/Lib/site-packages/botocore/data/ds/2015-04-16/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/paginators-1.json b/venv/Lib/site-packages/botocore/data/ds/2015-04-16/paginators-1.json deleted file mode 100644 index 71bc41d..0000000 --- a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/paginators-1.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "pagination": { - "DescribeDomainControllers": { - "result_key": "DomainControllers", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "Limit" - }, - "DescribeDirectories": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "DirectoryDescriptions" - }, - "DescribeSharedDirectories": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "SharedDirectories" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Snapshots" - }, - "DescribeTrusts": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Trusts" - }, - "ListIpRoutes": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "IpRoutesInfo" - }, - "ListLogSubscriptions": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "LogSubscriptions" - }, - "ListSchemaExtensions": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "SchemaExtensionsInfo" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Tags" - }, - "DescribeClientAuthenticationSettings": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ClientAuthenticationSettingsInfo" - }, - "DescribeLDAPSSettings": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "LDAPSSettingsInfo" - }, - "DescribeRegions": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "RegionsDescription" - }, - "DescribeUpdateDirectory": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "UpdateActivities" - }, - "ListCertificates": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "CertificatesInfo" - }, - "ListADAssessments": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Assessments" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ds/2015-04-16/service-2.json.gz deleted file mode 100644 index a335fe0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/waiters-2.json b/venv/Lib/site-packages/botocore/data/ds/2015-04-16/waiters-2.json deleted file mode 100644 index 962c7b7..0000000 --- a/venv/Lib/site-packages/botocore/data/ds/2015-04-16/waiters-2.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "version": 2, - "waiters": { - "HybridADUpdated": { - "operation": "DescribeHybridADUpdate", - "delay": 120, - "maxAttempts": 60, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "UpdateActivities.SelfManagedInstances[].Status", - "expected": "Updated" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "UpdateActivities.SelfManagedInstances[].Status", - "expected": "UpdateFailed" - } - ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 50bb58d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/paginators-1.json deleted file mode 100644 index c9cfb66..0000000 --- a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "clusters" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/service-2.json.gz deleted file mode 100644 index 266f60a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/waiters-2.json deleted file mode 100644 index 69cd9ba..0000000 --- a/venv/Lib/site-packages/botocore/data/dsql/2018-05-10/waiters-2.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ClusterActive" : { - "description" : "Wait until a Cluster is ACTIVE", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetCluster", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - } ] - }, - "ClusterNotExists" : { - "description" : "Wait until a Cluster is gone", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetCluster", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/dynamodb/2011-12-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dynamodb/2011-12-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index 25a814f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dynamodb/2011-12-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dynamodb/2011-12-05/examples-1.json b/venv/Lib/site-packages/botocore/data/dynamodb/2011-12-05/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/dynamodb/2011-12-05/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1215791..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/examples-1.json b/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/examples-1.json deleted file mode 100644 index bbc763c..0000000 --- a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/examples-1.json +++ /dev/null @@ -1,631 +0,0 @@ -{ - "version": "1.0", - "examples": { - "BatchGetItem": [ - { - "input": { - "RequestItems": { - "Music": { - "Keys": [ - { - "Artist": { - "S": "No One You Know" - }, - "SongTitle": { - "S": "Call Me Today" - } - }, - { - "Artist": { - "S": "Acme Band" - }, - "SongTitle": { - "S": "Happy Day" - } - }, - { - "Artist": { - "S": "No One You Know" - }, - "SongTitle": { - "S": "Scared of My Shadow" - } - } - ], - "ProjectionExpression": "AlbumTitle" - } - } - }, - "output": { - "Responses": { - "Music": [ - { - "AlbumTitle": { - "S": "Somewhat Famous" - } - }, - { - "AlbumTitle": { - "S": "Blue Sky Blues" - } - }, - { - "AlbumTitle": { - "S": "Louder Than Ever" - } - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example reads multiple items from the Music table using a batch of three GetItem requests. Only the AlbumTitle attribute is returned.", - "id": "to-retrieve-multiple-items-from-a-table-1476118438992", - "title": "To retrieve multiple items from a table" - } - ], - "BatchWriteItem": [ - { - "input": { - "RequestItems": { - "Music": [ - { - "PutRequest": { - "Item": { - "AlbumTitle": { - "S": "Somewhat Famous" - }, - "Artist": { - "S": "No One You Know" - }, - "SongTitle": { - "S": "Call Me Today" - } - } - } - }, - { - "PutRequest": { - "Item": { - "AlbumTitle": { - "S": "Songs About Life" - }, - "Artist": { - "S": "Acme Band" - }, - "SongTitle": { - "S": "Happy Day" - } - } - } - }, - { - "PutRequest": { - "Item": { - "AlbumTitle": { - "S": "Blue Sky Blues" - }, - "Artist": { - "S": "No One You Know" - }, - "SongTitle": { - "S": "Scared of My Shadow" - } - } - } - } - ] - } - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds three new items to the Music table using a batch of three PutItem requests.", - "id": "to-add-multiple-items-to-a-table-1476118519747", - "title": "To add multiple items to a table" - } - ], - "CreateTable": [ - { - "input": { - "AttributeDefinitions": [ - { - "AttributeName": "Artist", - "AttributeType": "S" - }, - { - "AttributeName": "SongTitle", - "AttributeType": "S" - } - ], - "KeySchema": [ - { - "AttributeName": "Artist", - "KeyType": "HASH" - }, - { - "AttributeName": "SongTitle", - "KeyType": "RANGE" - } - ], - "ProvisionedThroughput": { - "ReadCapacityUnits": 5, - "WriteCapacityUnits": 5 - }, - "TableName": "Music" - }, - "output": { - "TableDescription": { - "AttributeDefinitions": [ - { - "AttributeName": "Artist", - "AttributeType": "S" - }, - { - "AttributeName": "SongTitle", - "AttributeType": "S" - } - ], - "CreationDateTime": "1421866952.062", - "ItemCount": 0, - "KeySchema": [ - { - "AttributeName": "Artist", - "KeyType": "HASH" - }, - { - "AttributeName": "SongTitle", - "KeyType": "RANGE" - } - ], - "ProvisionedThroughput": { - "ReadCapacityUnits": 5, - "WriteCapacityUnits": 5 - }, - "TableName": "Music", - "TableSizeBytes": 0, - "TableStatus": "CREATING" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a table named Music.", - "id": "to-create-a-table-1476116291743", - "title": "To create a table" - } - ], - "DeleteItem": [ - { - "input": { - "Key": { - "Artist": { - "S": "No One You Know" - }, - "SongTitle": { - "S": "Scared of My Shadow" - } - }, - "TableName": "Music" - }, - "output": { - "ConsumedCapacity": { - "CapacityUnits": 1, - "TableName": "Music" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes an item from the Music table.", - "id": "to-delete-an-item-1475884573758", - "title": "To delete an item" - } - ], - "DeleteTable": [ - { - "input": { - "TableName": "Music" - }, - "output": { - "TableDescription": { - "ItemCount": 0, - "ProvisionedThroughput": { - "NumberOfDecreasesToday": 1, - "ReadCapacityUnits": 5, - "WriteCapacityUnits": 5 - }, - "TableName": "Music", - "TableSizeBytes": 0, - "TableStatus": "DELETING" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the Music table.", - "id": "to-delete-a-table-1475884368755", - "title": "To delete a table" - } - ], - "DescribeLimits": [ - { - "input": { - }, - "output": { - "AccountMaxReadCapacityUnits": 20000, - "AccountMaxWriteCapacityUnits": 20000, - "TableMaxReadCapacityUnits": 10000, - "TableMaxWriteCapacityUnits": 10000 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the maximum read and write capacity units per table, and for the AWS account, in the current AWS region.", - "id": "to-determine-capacity-limits-per-table-and-account-in-the-current-aws-region-1475884162064", - "title": "To determine capacity limits per table and account, in the current AWS region" - } - ], - "DescribeTable": [ - { - "input": { - "TableName": "Music" - }, - "output": { - "Table": { - "AttributeDefinitions": [ - { - "AttributeName": "Artist", - "AttributeType": "S" - }, - { - "AttributeName": "SongTitle", - "AttributeType": "S" - } - ], - "CreationDateTime": "1421866952.062", - "ItemCount": 0, - "KeySchema": [ - { - "AttributeName": "Artist", - "KeyType": "HASH" - }, - { - "AttributeName": "SongTitle", - "KeyType": "RANGE" - } - ], - "ProvisionedThroughput": { - "NumberOfDecreasesToday": 1, - "ReadCapacityUnits": 5, - "WriteCapacityUnits": 5 - }, - "TableName": "Music", - "TableSizeBytes": 0, - "TableStatus": "ACTIVE" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Music table.", - "id": "to-describe-a-table-1475884440502", - "title": "To describe a table" - } - ], - "GetItem": [ - { - "input": { - "Key": { - "Artist": { - "S": "Acme Band" - }, - "SongTitle": { - "S": "Happy Day" - } - }, - "TableName": "Music" - }, - "output": { - "Item": { - "AlbumTitle": { - "S": "Songs About Life" - }, - "Artist": { - "S": "Acme Band" - }, - "SongTitle": { - "S": "Happy Day" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example retrieves an item from the Music table. The table has a partition key and a sort key (Artist and SongTitle), so you must specify both of these attributes.", - "id": "to-read-an-item-from-a-table-1475884258350", - "title": "To read an item from a table" - } - ], - "ListTables": [ - { - "input": { - }, - "output": { - "TableNames": [ - "Forum", - "ProductCatalog", - "Reply", - "Thread" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all of the tables associated with the current AWS account and endpoint.", - "id": "to-list-tables-1475884741238", - "title": "To list tables" - } - ], - "PutItem": [ - { - "input": { - "Item": { - "AlbumTitle": { - "S": "Somewhat Famous" - }, - "Artist": { - "S": "No One You Know" - }, - "SongTitle": { - "S": "Call Me Today" - } - }, - "ReturnConsumedCapacity": "TOTAL", - "TableName": "Music" - }, - "output": { - "ConsumedCapacity": { - "CapacityUnits": 1, - "TableName": "Music" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds a new item to the Music table.", - "id": "to-add-an-item-to-a-table-1476116191110", - "title": "To add an item to a table" - } - ], - "Query": [ - { - "input": { - "ExpressionAttributeValues": { - ":v1": { - "S": "No One You Know" - } - }, - "KeyConditionExpression": "Artist = :v1", - "ProjectionExpression": "SongTitle", - "TableName": "Music" - }, - "output": { - "ConsumedCapacity": { - }, - "Count": 2, - "Items": [ - { - "SongTitle": { - "S": "Call Me Today" - } - } - ], - "ScannedCount": 2 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example queries items in the Music table. The table has a partition key and sort key (Artist and SongTitle), but this query only specifies the partition key value. It returns song titles by the artist named \"No One You Know\".", - "id": "to-query-an-item-1475883874631", - "title": "To query an item" - } - ], - "Scan": [ - { - "input": { - "ExpressionAttributeNames": { - "#AT": "AlbumTitle", - "#ST": "SongTitle" - }, - "ExpressionAttributeValues": { - ":a": { - "S": "No One You Know" - } - }, - "FilterExpression": "Artist = :a", - "ProjectionExpression": "#ST, #AT", - "TableName": "Music" - }, - "output": { - "ConsumedCapacity": { - }, - "Count": 2, - "Items": [ - { - "AlbumTitle": { - "S": "Somewhat Famous" - }, - "SongTitle": { - "S": "Call Me Today" - } - }, - { - "AlbumTitle": { - "S": "Blue Sky Blues" - }, - "SongTitle": { - "S": "Scared of My Shadow" - } - } - ], - "ScannedCount": 3 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example scans the entire Music table, and then narrows the results to songs by the artist \"No One You Know\". For each item, only the album title and song title are returned.", - "id": "to-scan-a-table-1475883652470", - "title": "To scan a table" - } - ], - "UpdateItem": [ - { - "input": { - "ExpressionAttributeNames": { - "#AT": "AlbumTitle", - "#Y": "Year" - }, - "ExpressionAttributeValues": { - ":t": { - "S": "Louder Than Ever" - }, - ":y": { - "N": "2015" - } - }, - "Key": { - "Artist": { - "S": "Acme Band" - }, - "SongTitle": { - "S": "Happy Day" - } - }, - "ReturnValues": "ALL_NEW", - "TableName": "Music", - "UpdateExpression": "SET #Y = :y, #AT = :t" - }, - "output": { - "Attributes": { - "AlbumTitle": { - "S": "Louder Than Ever" - }, - "Artist": { - "S": "Acme Band" - }, - "SongTitle": { - "S": "Happy Day" - }, - "Year": { - "N": "2015" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example updates an item in the Music table. It adds a new attribute (Year) and modifies the AlbumTitle attribute. All of the attributes in the item, as they appear after the update, are returned in the response.", - "id": "to-update-an-item-in-a-table-1476118250055", - "title": "To update an item in a table" - } - ], - "UpdateTable": [ - { - "input": { - "ProvisionedThroughput": { - "ReadCapacityUnits": 10, - "WriteCapacityUnits": 10 - }, - "TableName": "MusicCollection" - }, - "output": { - "TableDescription": { - "AttributeDefinitions": [ - { - "AttributeName": "Artist", - "AttributeType": "S" - }, - { - "AttributeName": "SongTitle", - "AttributeType": "S" - } - ], - "CreationDateTime": "1421866952.062", - "ItemCount": 0, - "KeySchema": [ - { - "AttributeName": "Artist", - "KeyType": "HASH" - }, - { - "AttributeName": "SongTitle", - "KeyType": "RANGE" - } - ], - "ProvisionedThroughput": { - "LastIncreaseDateTime": "1421874759.194", - "NumberOfDecreasesToday": 1, - "ReadCapacityUnits": 1, - "WriteCapacityUnits": 1 - }, - "TableName": "MusicCollection", - "TableSizeBytes": 0, - "TableStatus": "UPDATING" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example increases the provisioned read and write capacity on the Music table.", - "id": "to-modify-a-tables-provisioned-throughput-1476118076147", - "title": "To modify a table's provisioned throughput" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/paginators-1.json deleted file mode 100644 index 8e10a0c..0000000 --- a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/paginators-1.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "pagination": { - "ListBackups": { - "input_token": "ExclusiveStartBackupArn", - "output_token": "LastEvaluatedBackupArn", - "limit_key": "Limit", - "result_key": "BackupSummaries" - }, - "ListTables": { - "input_token": "ExclusiveStartTableName", - "output_token": "LastEvaluatedTableName", - "limit_key": "Limit", - "result_key": "TableNames" - }, - "Query": { - "input_token": "ExclusiveStartKey", - "output_token": "LastEvaluatedKey", - "limit_key": "Limit", - "result_key": [ - "Items", - "Count", - "ScannedCount" - ], - "non_aggregate_keys": [ - "ConsumedCapacity" - ] - }, - "Scan": { - "input_token": "ExclusiveStartKey", - "output_token": "LastEvaluatedKey", - "limit_key": "Limit", - "result_key": [ - "Items", - "Count", - "ScannedCount" - ], - "non_aggregate_keys": [ - "ConsumedCapacity" - ] - }, - "ListTagsOfResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Tags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/service-2.json.gz deleted file mode 100644 index 19a7025..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/waiters-2.json deleted file mode 100644 index 43a55ca..0000000 --- a/venv/Lib/site-packages/botocore/data/dynamodb/2012-08-10/waiters-2.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "version": 2, - "waiters": { - "TableExists": { - "delay": 20, - "operation": "DescribeTable", - "maxAttempts": 25, - "acceptors": [ - { - "expected": "ACTIVE", - "matcher": "path", - "state": "success", - "argument": "Table.TableStatus" - }, - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "retry" - } - ] - }, - "TableNotExists": { - "delay": 20, - "operation": "DescribeTable", - "maxAttempts": 25, - "acceptors": [ - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3fb2875..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/examples-1.json b/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/examples-1.json deleted file mode 100644 index 8287e2c..0000000 --- a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/examples-1.json +++ /dev/null @@ -1,212 +0,0 @@ -{ - "version": "1.0", - "examples": { - "DescribeStream": [ - { - "input": { - "StreamArn": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-20T20:51:10.252" - }, - "output": { - "StreamDescription": { - "CreationRequestDateTime": "Wed May 20 13:51:10 PDT 2015", - "KeySchema": [ - { - "AttributeName": "ForumName", - "KeyType": "HASH" - }, - { - "AttributeName": "Subject", - "KeyType": "RANGE" - } - ], - "Shards": [ - { - "SequenceNumberRange": { - "EndingSequenceNumber": "20500000000000000910398", - "StartingSequenceNumber": "20500000000000000910398" - }, - "ShardId": "shardId-00000001414562045508-2bac9cd2" - }, - { - "ParentShardId": "shardId-00000001414562045508-2bac9cd2", - "SequenceNumberRange": { - "EndingSequenceNumber": "820400000000000001192334", - "StartingSequenceNumber": "820400000000000001192334" - }, - "ShardId": "shardId-00000001414576573621-f55eea83" - }, - { - "ParentShardId": "shardId-00000001414576573621-f55eea83", - "SequenceNumberRange": { - "EndingSequenceNumber": "1683700000000000001135967", - "StartingSequenceNumber": "1683700000000000001135967" - }, - "ShardId": "shardId-00000001414592258131-674fd923" - }, - { - "ParentShardId": "shardId-00000001414592258131-674fd923", - "SequenceNumberRange": { - "StartingSequenceNumber": "2574600000000000000935255" - }, - "ShardId": "shardId-00000001414608446368-3a1afbaf" - } - ], - "StreamArn": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-20T20:51:10.252", - "StreamLabel": "2015-05-20T20:51:10.252", - "StreamStatus": "ENABLED", - "StreamViewType": "NEW_AND_OLD_IMAGES", - "TableName": "Forum" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example describes a stream with a given stream ARN.", - "id": "to-describe-a-stream-with-a-given-stream-arn-1473457835200", - "title": "To describe a stream with a given stream ARN" - } - ], - "GetRecords": [ - { - "input": { - "ShardIterator": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-20T20:51:10.252|1|AAAAAAAAAAEvJp6D+zaQ... ..." - }, - "output": { - "NextShardIterator": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-20T20:51:10.252|1|AAAAAAAAAAGQBYshYDEe ... ...", - "Records": [ - { - "awsRegion": "us-west-2", - "dynamodb": { - "ApproximateCreationDateTime": "1.46480646E9", - "Keys": { - "ForumName": { - "S": "DynamoDB" - }, - "Subject": { - "S": "DynamoDB Thread 3" - } - }, - "SequenceNumber": "300000000000000499659", - "SizeBytes": 41, - "StreamViewType": "KEYS_ONLY" - }, - "eventID": "e2fd9c34eff2d779b297b26f5fef4206", - "eventName": "INSERT", - "eventSource": "aws:dynamodb", - "eventVersion": "1.0" - }, - { - "awsRegion": "us-west-2", - "dynamodb": { - "ApproximateCreationDateTime": "1.46480527E9", - "Keys": { - "ForumName": { - "S": "DynamoDB" - }, - "Subject": { - "S": "DynamoDB Thread 1" - } - }, - "SequenceNumber": "400000000000000499660", - "SizeBytes": 41, - "StreamViewType": "KEYS_ONLY" - }, - "eventID": "4b25bd0da9a181a155114127e4837252", - "eventName": "MODIFY", - "eventSource": "aws:dynamodb", - "eventVersion": "1.0" - }, - { - "awsRegion": "us-west-2", - "dynamodb": { - "ApproximateCreationDateTime": "1.46480646E9", - "Keys": { - "ForumName": { - "S": "DynamoDB" - }, - "Subject": { - "S": "DynamoDB Thread 2" - } - }, - "SequenceNumber": "500000000000000499661", - "SizeBytes": 41, - "StreamViewType": "KEYS_ONLY" - }, - "eventID": "740280c73a3df7842edab3548a1b08ad", - "eventName": "REMOVE", - "eventSource": "aws:dynamodb", - "eventVersion": "1.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves all the stream records from a shard.", - "id": "to-retrieve-all-the-stream-records-from-a-shard-1473707781419", - "title": "To retrieve all the stream records from a shard" - } - ], - "GetShardIterator": [ - { - "input": { - "ShardId": "00000001414576573621-f55eea83", - "ShardIteratorType": "TRIM_HORIZON", - "StreamArn": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-20T20:51:10.252" - }, - "output": { - "ShardIterator": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-20T20:51:10.252|1|AAAAAAAAAAEvJp6D+zaQ... ..." - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a shard iterator for the provided stream ARN and shard ID.", - "id": "to-obtain-a-shard-iterator-for-the-provided-stream-arn-and-shard-id-1473459941476", - "title": "To obtain a shard iterator for the provided stream ARN and shard ID" - } - ], - "ListStreams": [ - { - "input": { - }, - "output": { - "Streams": [ - { - "StreamArn": "arn:aws:dynamodb:us-wesst-2:111122223333:table/Forum/stream/2015-05-20T20:51:10.252", - "StreamLabel": "2015-05-20T20:51:10.252", - "TableName": "Forum" - }, - { - "StreamArn": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-20T20:50:02.714", - "StreamLabel": "2015-05-20T20:50:02.714", - "TableName": "Forum" - }, - { - "StreamArn": "arn:aws:dynamodb:us-west-2:111122223333:table/Forum/stream/2015-05-19T23:03:50.641", - "StreamLabel": "2015-05-19T23:03:50.641", - "TableName": "Forum" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists all of the stream ARNs.", - "id": "to-list-all-of-the-stream-arns--1473459534285", - "title": "To list all of the stream ARNs " - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/service-2.json.gz deleted file mode 100644 index f572ce4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/dynamodbstreams/2012-08-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index 15a5b35..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/examples-1.json b/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/service-2.json.gz deleted file mode 100644 index b313a70..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ebs/2019-11-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index 249475e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/examples-1.json b/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/examples-1.json deleted file mode 100644 index c5c6001..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/examples-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "version": "1.0", - "examples": { - "SendSSHPublicKey": [ - { - "input": { - "AvailabilityZone": "us-west-2a", - "InstanceId": "i-abcd1234", - "InstanceOSUser": "ec2-user", - "SSHPublicKey": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC3FlHqj2eqCdrGHuA6dRjfZXQ4HX5lXEIRHaNbxEwE5Te7xNF7StwhrDtiV7IdT5fDqbRyGw/szPj3xGkNTVoElCZ2dDFb2qYZ1WLIpZwj/UhO9l2mgfjR56UojjQut5Jvn2KZ1OcyrNO0J83kCaJCV7JoVbXY79FBMUccYNY45zmv9+1FMCfY6i2jdIhwR6+yLk8oubL8lIPyq7X+6b9S0yKCkB7Peml1DvghlybpAIUrC9vofHt6XP4V1i0bImw1IlljQS+DUmULRFSccATDscCX9ajnj7Crhm0HAZC0tBPXpFdHkPwL3yzYo546SCS9LKEwz62ymxxbL9k7h09t" - }, - "output": { - "RequestId": "abcd1234-abcd-1234-abcd-1234abcd1234", - "Success": true - }, - "comments": { - "input": { - "AvailabilityZone": "The zone where the instance was launched", - "InstanceId": "The instance ID to publish the key to.", - "InstanceOSUser": "This should be the user you wish to be when ssh-ing to the instance (eg, ec2-user@[instance IP])", - "SSHPublicKey": "This should be in standard OpenSSH format (ssh-rsa [key body])" - }, - "output": { - "RequestId": "This request ID should be provided when contacting AWS Support.", - "Success": "Should be true if the service does not return an error response." - } - }, - "description": "The following example pushes a sample SSH public key to the EC2 instance i-abcd1234 in AZ us-west-2b for use by the instance OS user ec2-user.", - "id": "send-ssh-key-to-an-ec2-instance-1518124883100", - "title": "To push an SSH key to an EC2 instance" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/service-2.json.gz deleted file mode 100644 index 7f9cc31..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2-instance-connect/2018-04-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index cd7ba73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/paginators-1.json deleted file mode 100644 index b643e69..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/paginators-1.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "pagination": { - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/service-2.json.gz deleted file mode 100644 index 888bd70..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/waiters-2.json deleted file mode 100644 index fb8c16b..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2014-09-01/waiters-2.json +++ /dev/null @@ -1,341 +0,0 @@ -{ - "version": 2, - "waiters": { - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index cd7ba73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/paginators-1.json deleted file mode 100644 index ca7a876..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/paginators-1.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "pagination": { - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/service-2.json.gz deleted file mode 100644 index cf8e7ba..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/waiters-2.json deleted file mode 100644 index 17f0870..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2014-10-01/waiters-2.json +++ /dev/null @@ -1,436 +0,0 @@ -{ - "version": 2, - "waiters": { - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ImageAvailable": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Images[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Images[].State", - "expected": "failed" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].InstanceStatus.Status", - "expected": "ok" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "PasswordDataAvailable": { - "operation": "GetPasswordData", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(PasswordData) > `0`", - "expected": true - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - } - ] - }, - "SpotInstanceRequestFulfilled": { - "operation": "DescribeSpotInstanceRequests", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "fulfilled" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "schedule-expired" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "canceled-before-fulfillment" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "bad-parameters" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "system-error" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "SystemStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].SystemStatus.Status", - "expected": "ok" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index cd7ba73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/paginators-1.json deleted file mode 100644 index ca7a876..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/paginators-1.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "pagination": { - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/service-2.json.gz deleted file mode 100644 index 035e8ea..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/waiters-2.json deleted file mode 100644 index 17f0870..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2015-03-01/waiters-2.json +++ /dev/null @@ -1,436 +0,0 @@ -{ - "version": 2, - "waiters": { - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ImageAvailable": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Images[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Images[].State", - "expected": "failed" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].InstanceStatus.Status", - "expected": "ok" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "PasswordDataAvailable": { - "operation": "GetPasswordData", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(PasswordData) > `0`", - "expected": true - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - } - ] - }, - "SpotInstanceRequestFulfilled": { - "operation": "DescribeSpotInstanceRequests", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "fulfilled" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "schedule-expired" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "canceled-before-fulfillment" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "bad-parameters" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "system-error" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "SystemStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].SystemStatus.Status", - "expected": "ok" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index cd7ba73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/paginators-1.json deleted file mode 100644 index ca7a876..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/paginators-1.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "pagination": { - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/service-2.json.gz deleted file mode 100644 index 6adc284..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/waiters-2.json deleted file mode 100644 index 5a6dbbc..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2015-04-15/waiters-2.json +++ /dev/null @@ -1,458 +0,0 @@ -{ - "version": 2, - "waiters": { - "InstanceExists": { - "delay": 5, - "maxAttempts": 40, - "operation": "DescribeInstances", - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidInstanceIDNotFound", - "state": "retry" - } - ] - }, - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ImageAvailable": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Images[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Images[].State", - "expected": "failed" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].InstanceStatus.Status", - "expected": "ok" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "PasswordDataAvailable": { - "operation": "GetPasswordData", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(PasswordData) > `0`", - "expected": true - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - } - ] - }, - "SpotInstanceRequestFulfilled": { - "operation": "DescribeSpotInstanceRequests", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "fulfilled" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "schedule-expired" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "canceled-before-fulfillment" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "bad-parameters" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "system-error" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "SystemStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].SystemStatus.Status", - "expected": "ok" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "matcher": "error", - "expected": "InvalidVolumeNotFound", - "state": "success" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 44010fb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/examples-1.json b/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/paginators-1.json deleted file mode 100644 index 2bd01ad..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/paginators-1.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "pagination": { - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - }, - "DescribeSpotFleetRequests": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotFleetRequestConfigs" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - }, - "DescribeVolumes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Volumes" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/service-2.json.gz deleted file mode 100644 index 55728da..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/waiters-2.json deleted file mode 100644 index 652a8ca..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2015-10-01/waiters-2.json +++ /dev/null @@ -1,589 +0,0 @@ -{ - "version": 2, - "waiters": { - "InstanceExists": { - "delay": 5, - "maxAttempts": 40, - "operation": "DescribeInstances", - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Reservations[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConsoleOutputAvailable": { - "operation": "GetConsoleOutput", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(Output || '') > `0`", - "expected": true - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ImageExists": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Images[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidAMIID.NotFound", - "state": "retry" - } - ] - }, - "ImageAvailable": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Images[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Images[].State", - "expected": "failed" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].InstanceStatus.Status", - "expected": "ok" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "KeyPairExists": { - "operation": "DescribeKeyPairs", - "delay": 5, - "maxAttempts": 6, - "acceptors": [ - { - "expected": true, - "matcher": "pathAll", - "state": "success", - "argument": "length(KeyPairs[].KeyName) > `0`" - }, - { - "expected": "InvalidKeyPair.NotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "NatGatewayAvailable": { - "operation": "DescribeNatGateways", - "delay": 15, - "maxAttempts": 40, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "NatGateways[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "failed" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleting" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleted" - }, - { - "state": "retry", - "matcher": "error", - "expected": "NatGatewayNotFound" - } - ] - }, - "NetworkInterfaceAvailable": { - "operation": "DescribeNetworkInterfaces", - "delay": 20, - "maxAttempts": 10, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "NetworkInterfaces[].Status" - }, - { - "expected": "InvalidNetworkInterfaceID.NotFound", - "matcher": "error", - "state": "failure" - } - ] - }, - "PasswordDataAvailable": { - "operation": "GetPasswordData", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(PasswordData) > `0`", - "expected": true - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - } - ] - }, - "SpotInstanceRequestFulfilled": { - "operation": "DescribeSpotInstanceRequests", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "fulfilled" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "schedule-expired" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "canceled-before-fulfillment" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "bad-parameters" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "system-error" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "SystemStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].SystemStatus.Status", - "expected": "ok" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "matcher": "error", - "expected": "InvalidVolume.NotFound", - "state": "success" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 60, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpcPeeringConnectionExists": { - "delay": 15, - "operation": "DescribeVpcPeeringConnections", - "maxAttempts": 40, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidVpcPeeringConnectionID.NotFound", - "state": "retry" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 44010fb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/examples-1.json b/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/examples-1.json deleted file mode 100644 index 3f584e9..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/examples-1.json +++ /dev/null @@ -1,3729 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AllocateAddress": [ - { - "input": { - "Domain": "vpc" - }, - "output": { - "AllocationId": "eipalloc-64d5890a", - "Domain": "vpc", - "PublicIp": "203.0.113.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example allocates an Elastic IP address to use with an instance in a VPC.", - "id": "ec2-allocate-address-1", - "title": "To allocate an Elastic IP address for EC2-VPC" - }, - { - "output": { - "Domain": "standard", - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example allocates an Elastic IP address to use with an instance in EC2-Classic.", - "id": "ec2-allocate-address-2", - "title": "To allocate an Elastic IP address for EC2-Classic" - } - ], - "AssignPrivateIpAddresses": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "PrivateIpAddresses": [ - "10.0.0.82" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns the specified secondary private IP address to the specified network interface.", - "id": "ec2-assign-private-ip-addresses-1", - "title": "To assign a specific secondary private IP address to an interface" - }, - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "SecondaryPrivateIpAddressCount": 2 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns two secondary private IP addresses to the specified network interface. Amazon EC2 automatically assigns these IP addresses from the available IP addresses in the CIDR block range of the subnet the network interface is associated with.", - "id": "ec2-assign-private-ip-addresses-2", - "title": "To assign secondary private IP addresses that Amazon EC2 selects to an interface" - } - ], - "AssociateAddress": [ - { - "input": { - "AllocationId": "eipalloc-64d5890a", - "InstanceId": "i-0b263919b6498b123" - }, - "output": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified Elastic IP address with the specified instance in a VPC.", - "id": "ec2-associate-address-1", - "title": "To associate an Elastic IP address in EC2-VPC" - }, - { - "input": { - "AllocationId": "eipalloc-64d5890a", - "NetworkInterfaceId": "eni-1a2b3c4d" - }, - "output": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified Elastic IP address with the specified network interface.", - "id": "ec2-associate-address-2", - "title": "To associate an Elastic IP address with a network interface" - }, - { - "input": { - "InstanceId": "i-07ffe74c7330ebf53", - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates an Elastic IP address with an instance in EC2-Classic.", - "id": "ec2-associate-address-3", - "title": "To associate an Elastic IP address in EC2-Classic" - } - ], - "AssociateDhcpOptions": [ - { - "input": { - "DhcpOptionsId": "dopt-d9070ebb", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified DHCP options set with the specified VPC.", - "id": "ec2-associate-dhcp-options-1", - "title": "To associate a DHCP options set with a VPC" - }, - { - "input": { - "DhcpOptionsId": "default", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the default DHCP options set with the specified VPC.", - "id": "ec2-associate-dhcp-options-2", - "title": "To associate the default DHCP options set with a VPC" - } - ], - "AssociateRouteTable": [ - { - "input": { - "RouteTableId": "rtb-22574640", - "SubnetId": "subnet-9d4a7b6" - }, - "output": { - "AssociationId": "rtbassoc-781d0d1a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified route table with the specified subnet.", - "id": "ec2-associate-route-table-1", - "title": "To associate a route table with a subnet" - } - ], - "AttachInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified Internet gateway to the specified VPC.", - "id": "ec2-attach-internet-gateway-1", - "title": "To attach an Internet gateway to a VPC" - } - ], - "AttachNetworkInterface": [ - { - "input": { - "DeviceIndex": 1, - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-e5aa89a3" - }, - "output": { - "AttachmentId": "eni-attach-66c4350a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified network interface to the specified instance.", - "id": "ec2-attach-network-interface-1", - "title": "To attach a network interface to an instance" - } - ], - "AttachVolume": [ - { - "input": { - "Device": "/dev/sdf", - "InstanceId": "i-01474ef662b89480", - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "AttachTime": "2016-08-29T18:52:32.724Z", - "Device": "/dev/sdf", - "InstanceId": "i-01474ef662b89480", - "State": "attaching", - "VolumeId": "vol-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches a volume (``vol-1234567890abcdef0``) to an instance (``i-01474ef662b89480``) as ``/dev/sdf``.", - "id": "to-attach-a-volume-to-an-instance-1472499213109", - "title": "To attach a volume to an instance" - } - ], - "CancelSpotFleetRequests": [ - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ], - "TerminateInstances": true - }, - "output": { - "SuccessfulFleetRequests": [ - { - "CurrentSpotFleetRequestState": "cancelled_running", - "PreviousSpotFleetRequestState": "active", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels the specified Spot fleet request and terminates its associated Spot Instances.", - "id": "ec2-cancel-spot-fleet-requests-1", - "title": "To cancel a Spot fleet request" - }, - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ], - "TerminateInstances": false - }, - "output": { - "SuccessfulFleetRequests": [ - { - "CurrentSpotFleetRequestState": "cancelled_terminating", - "PreviousSpotFleetRequestState": "active", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels the specified Spot fleet request without terminating its associated Spot Instances.", - "id": "ec2-cancel-spot-fleet-requests-2", - "title": "To cancel a Spot fleet request without terminating its Spot Instances" - } - ], - "CancelSpotInstanceRequests": [ - { - "input": { - "SpotInstanceRequestIds": [ - "sir-08b93456" - ] - }, - "output": { - "CancelledSpotInstanceRequests": [ - { - "SpotInstanceRequestId": "sir-08b93456", - "State": "cancelled" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels a Spot Instance request.", - "id": "ec2-cancel-spot-instance-requests-1", - "title": "To cancel Spot Instance requests" - } - ], - "ConfirmProductInstance": [ - { - "input": { - "InstanceId": "i-1234567890abcdef0", - "ProductCode": "774F4FF8" - }, - "output": { - "OwnerId": "123456789012" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example determines whether the specified product code is associated with the specified instance.", - "id": "to-confirm-the-product-instance-1472712108494", - "title": "To confirm the product instance" - } - ], - "CopySnapshot": [ - { - "input": { - "Description": "This is my copied snapshot.", - "DestinationRegion": "us-east-1", - "SourceRegion": "us-west-2", - "SourceSnapshotId": "snap-066877671789bd71b" - }, - "output": { - "SnapshotId": "snap-066877671789bd71b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies a snapshot with the snapshot ID of ``snap-066877671789bd71b`` from the ``us-west-2`` region to the ``us-east-1`` region and adds a short description to identify the snapshot.", - "id": "to-copy-a-snapshot-1472502259774", - "title": "To copy a snapshot" - } - ], - "CreateCustomerGateway": [ - { - "input": { - "BgpAsn": 65534, - "PublicIp": "12.1.2.3", - "Type": "ipsec.1" - }, - "output": { - "CustomerGateway": { - "BgpAsn": "65534", - "CustomerGatewayId": "cgw-0e11f167", - "IpAddress": "12.1.2.3", - "State": "available", - "Type": "ipsec.1" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a customer gateway with the specified IP address for its outside interface.", - "id": "ec2-create-customer-gateway-1", - "title": "To create a customer gateway" - } - ], - "CreateDhcpOptions": [ - { - "input": { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - "10.2.5.1", - "10.2.5.2" - ] - } - ] - }, - "output": { - "DhcpOptions": { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - "10.2.5.2", - "10.2.5.1" - ] - } - ], - "DhcpOptionsId": "dopt-d9070ebb" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DHCP options set.", - "id": "ec2-create-dhcp-options-1", - "title": "To create a DHCP options set" - } - ], - "CreateInternetGateway": [ - { - "output": { - "InternetGateway": { - "Attachments": [ - - ], - "InternetGatewayId": "igw-c0a643a9", - "Tags": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Internet gateway.", - "id": "ec2-create-internet-gateway-1", - "title": "To create an Internet gateway" - } - ], - "CreateKeyPair": [ - { - "input": { - "KeyName": "my-key-pair" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a key pair named my-key-pair.", - "id": "ec2-create-key-pair-1", - "title": "To create a key pair" - } - ], - "CreateNatGateway": [ - { - "input": { - "AllocationId": "eipalloc-37fc1a52", - "SubnetId": "subnet-1a2b3c4d" - }, - "output": { - "NatGateway": { - "CreateTime": "2015-12-17T12:45:26.732Z", - "NatGatewayAddresses": [ - { - "AllocationId": "eipalloc-37fc1a52" - } - ], - "NatGatewayId": "nat-08d48af2a8e83edfd", - "State": "pending", - "SubnetId": "subnet-1a2b3c4d", - "VpcId": "vpc-1122aabb" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a NAT gateway in subnet subnet-1a2b3c4d and associates an Elastic IP address with the allocation ID eipalloc-37fc1a52 with the NAT gateway.", - "id": "ec2-create-nat-gateway-1", - "title": "To create a NAT gateway" - } - ], - "CreateNetworkAcl": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "output": { - "NetworkAcl": { - "Associations": [ - - ], - "Entries": [ - { - "CidrBlock": "0.0.0.0/0", - "Egress": true, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - }, - { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - } - ], - "IsDefault": false, - "NetworkAclId": "acl-5fb85d36", - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a network ACL for the specified VPC.", - "id": "ec2-create-network-acl-1", - "title": "To create a network ACL" - } - ], - "CreateNetworkAclEntry": [ - { - "input": { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "NetworkAclId": "acl-5fb85d36", - "PortRange": { - "From": 53, - "To": 53 - }, - "Protocol": "udp", - "RuleAction": "allow", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an entry for the specified network ACL. The rule allows ingress traffic from anywhere (0.0.0.0/0) on UDP port 53 (DNS) into any associated subnet.", - "id": "ec2-create-network-acl-entry-1", - "title": "To create a network ACL entry" - } - ], - "CreateNetworkInterface": [ - { - "input": { - "Description": "my network interface", - "Groups": [ - "sg-903004f8" - ], - "PrivateIpAddress": "10.0.2.17", - "SubnetId": "subnet-9d4a7b6c" - }, - "output": { - "NetworkInterface": { - "AvailabilityZone": "us-east-1d", - "Description": "my network interface", - "Groups": [ - { - "GroupId": "sg-903004f8", - "GroupName": "default" - } - ], - "MacAddress": "02:1a:80:41:52:9c", - "NetworkInterfaceId": "eni-e5aa89a3", - "OwnerId": "123456789012", - "PrivateIpAddress": "10.0.2.17", - "PrivateIpAddresses": [ - { - "Primary": true, - "PrivateIpAddress": "10.0.2.17" - } - ], - "RequesterManaged": false, - "SourceDestCheck": true, - "Status": "pending", - "SubnetId": "subnet-9d4a7b6c", - "TagSet": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a network interface for the specified subnet.", - "id": "ec2-create-network-interface-1", - "title": "To create a network interface" - } - ], - "CreatePlacementGroup": [ - { - "input": { - "GroupName": "my-cluster", - "Strategy": "cluster" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a placement group with the specified name.", - "id": "to-create-a-placement-group-1472712245768", - "title": "To create a placement group" - } - ], - "CreateRoute": [ - { - "input": { - "DestinationCidrBlock": "0.0.0.0/0", - "GatewayId": "igw-c0a643a9", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a route for the specified route table. The route matches all traffic (0.0.0.0/0) and routes it to the specified Internet gateway.", - "id": "ec2-create-route-1", - "title": "To create a route" - } - ], - "CreateRouteTable": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "output": { - "RouteTable": { - "Associations": [ - - ], - "PropagatingVgws": [ - - ], - "RouteTableId": "rtb-22574640", - "Routes": [ - { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "local", - "State": "active" - } - ], - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a route table for the specified VPC.", - "id": "ec2-create-route-table-1", - "title": "To create a route table" - } - ], - "CreateSnapshot": [ - { - "input": { - "Description": "This is my root volume snapshot.", - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "Description": "This is my root volume snapshot.", - "OwnerId": "012345678910", - "SnapshotId": "snap-066877671789bd71b", - "StartTime": "2014-02-28T21:06:01.000Z", - "State": "pending", - "Tags": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeSize": 8 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a snapshot of the volume with a volume ID of ``vol-1234567890abcdef0`` and a short description to identify the snapshot.", - "id": "to-create-a-snapshot-1472502529790", - "title": "To create a snapshot" - } - ], - "CreateSpotDatafeedSubscription": [ - { - "input": { - "Bucket": "my-s3-bucket", - "Prefix": "spotdata" - }, - "output": { - "SpotDatafeedSubscription": { - "Bucket": "my-s3-bucket", - "OwnerId": "123456789012", - "Prefix": "spotdata", - "State": "Active" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot Instance data feed for your AWS account.", - "id": "ec2-create-spot-datafeed-subscription-1", - "title": "To create a Spot Instance datafeed" - } - ], - "CreateSubnet": [ - { - "input": { - "CidrBlock": "10.0.1.0/24", - "VpcId": "vpc-a01106c2" - }, - "output": { - "Subnet": { - "AvailabilityZone": "us-west-2c", - "AvailableIpAddressCount": 251, - "CidrBlock": "10.0.1.0/24", - "State": "pending", - "SubnetId": "subnet-9d4a7b6c", - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a subnet in the specified VPC with the specified CIDR block. We recommend that you let us select an Availability Zone for you.", - "id": "ec2-create-subnet-1", - "title": "To create a subnet" - } - ], - "CreateTags": [ - { - "input": { - "Resources": [ - "ami-78a54011" - ], - "Tags": [ - { - "Key": "Stack", - "Value": "production" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the tag Stack=production to the specified image, or overwrites an existing tag for the AMI where the tag key is Stack.", - "id": "ec2-create-tags-1", - "title": "To add a tag to a resource" - } - ], - "CreateVolume": [ - { - "input": { - "AvailabilityZone": "us-east-1a", - "Size": 80, - "VolumeType": "gp2" - }, - "output": { - "AvailabilityZone": "us-east-1a", - "CreateTime": "2016-08-29T18:52:32.724Z", - "Encrypted": false, - "Iops": 240, - "Size": 80, - "SnapshotId": "", - "State": "creating", - "VolumeId": "vol-6b60b7c7", - "VolumeType": "gp2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an 80 GiB General Purpose (SSD) volume in the Availability Zone ``us-east-1a``.", - "id": "to-create-a-new-volume-1472496724296", - "title": "To create a new volume" - }, - { - "input": { - "AvailabilityZone": "us-east-1a", - "Iops": 1000, - "SnapshotId": "snap-066877671789bd71b", - "VolumeType": "io1" - }, - "output": { - "Attachments": [ - - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2016-08-29T18:52:32.724Z", - "Iops": 1000, - "Size": 500, - "SnapshotId": "snap-066877671789bd71b", - "State": "creating", - "Tags": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeType": "io1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a new Provisioned IOPS (SSD) volume with 1000 provisioned IOPS from a snapshot in the Availability Zone ``us-east-1a``.", - "id": "to-create-a-new-provisioned-iops-ssd-volume-from-a-snapshot-1472498975176", - "title": "To create a new Provisioned IOPS (SSD) volume from a snapshot" - } - ], - "CreateVpc": [ - { - "input": { - "CidrBlock": "10.0.0.0/16" - }, - "output": { - "Vpc": { - "CidrBlock": "10.0.0.0/16", - "DhcpOptionsId": "dopt-7a8b9c2d", - "InstanceTenancy": "default", - "State": "pending", - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a VPC with the specified CIDR block.", - "id": "ec2-create-vpc-1", - "title": "To create a VPC" - } - ], - "DeleteCustomerGateway": [ - { - "input": { - "CustomerGatewayId": "cgw-0e11f167" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified customer gateway.", - "id": "ec2-delete-customer-gateway-1", - "title": "To delete a customer gateway" - } - ], - "DeleteDhcpOptions": [ - { - "input": { - "DhcpOptionsId": "dopt-d9070ebb" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DHCP options set.", - "id": "ec2-delete-dhcp-options-1", - "title": "To delete a DHCP options set" - } - ], - "DeleteInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified Internet gateway.", - "id": "ec2-delete-internet-gateway-1", - "title": "To delete an Internet gateway" - } - ], - "DeleteKeyPair": [ - { - "input": { - "KeyName": "my-key-pair" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified key pair.", - "id": "ec2-delete-key-pair-1", - "title": "To delete a key pair" - } - ], - "DeleteNatGateway": [ - { - "input": { - "NatGatewayId": "nat-04ae55e711cec5680" - }, - "output": { - "NatGatewayId": "nat-04ae55e711cec5680" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified NAT gateway.", - "id": "ec2-delete-nat-gateway-1", - "title": "To delete a NAT gateway" - } - ], - "DeleteNetworkAcl": [ - { - "input": { - "NetworkAclId": "acl-5fb85d36" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified network ACL.", - "id": "ec2-delete-network-acl-1", - "title": "To delete a network ACL" - } - ], - "DeleteNetworkAclEntry": [ - { - "input": { - "Egress": true, - "NetworkAclId": "acl-5fb85d36", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes ingress rule number 100 from the specified network ACL.", - "id": "ec2-delete-network-acl-entry-1", - "title": "To delete a network ACL entry" - } - ], - "DeleteNetworkInterface": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified network interface.", - "id": "ec2-delete-network-interface-1", - "title": "To delete a network interface" - } - ], - "DeletePlacementGroup": [ - { - "input": { - "GroupName": "my-cluster" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified placement group.\n", - "id": "to-delete-a-placement-group-1472712349959", - "title": "To delete a placement group" - } - ], - "DeleteRoute": [ - { - "input": { - "DestinationCidrBlock": "0.0.0.0/0", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified route from the specified route table.", - "id": "ec2-delete-route-1", - "title": "To delete a route" - } - ], - "DeleteRouteTable": [ - { - "input": { - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified route table.", - "id": "ec2-delete-route-table-1", - "title": "To delete a route table" - } - ], - "DeleteSnapshot": [ - { - "input": { - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes a snapshot with the snapshot ID of ``snap-1234567890abcdef0``. If the command succeeds, no output is returned.", - "id": "to-delete-a-snapshot-1472503042567", - "title": "To delete a snapshot" - } - ], - "DeleteSpotDatafeedSubscription": [ - { - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes a Spot data feed subscription for the account.", - "id": "ec2-delete-spot-datafeed-subscription-1", - "title": "To cancel a Spot Instance data feed subscription" - } - ], - "DeleteSubnet": [ - { - "input": { - "SubnetId": "subnet-9d4a7b6c" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified subnet.", - "id": "ec2-delete-subnet-1", - "title": "To delete a subnet" - } - ], - "DeleteTags": [ - { - "input": { - "Resources": [ - "ami-78a54011" - ], - "Tags": [ - { - "Key": "Stack", - "Value": "test" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the tag Stack=test from the specified image.", - "id": "ec2-delete-tags-1", - "title": "To delete a tag from a resource" - } - ], - "DeleteVolume": [ - { - "input": { - "VolumeId": "vol-049df61146c4d7901" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes an available volume with the volume ID of ``vol-049df61146c4d7901``. If the command succeeds, no output is returned.", - "id": "to-delete-a-volume-1472503111160", - "title": "To delete a volume" - } - ], - "DeleteVpc": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified VPC.", - "id": "ec2-delete-vpc-1", - "title": "To delete a VPC" - } - ], - "DescribeAccountAttributes": [ - { - "input": { - "AttributeNames": [ - "supported-platforms" - ] - }, - "output": { - "AccountAttributes": [ - { - "AttributeName": "supported-platforms", - "AttributeValues": [ - { - "AttributeValue": "EC2" - }, - { - "AttributeValue": "VPC" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the supported-platforms attribute for your AWS account.", - "id": "ec2-describe-account-attributes-1", - "title": "To describe a single attribute for your AWS account" - }, - { - "output": { - "AccountAttributes": [ - { - "AttributeName": "supported-platforms", - "AttributeValues": [ - { - "AttributeValue": "EC2" - }, - { - "AttributeValue": "VPC" - } - ] - }, - { - "AttributeName": "vpc-max-security-groups-per-interface", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "max-elastic-ips", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "max-instances", - "AttributeValues": [ - { - "AttributeValue": "20" - } - ] - }, - { - "AttributeName": "vpc-max-elastic-ips", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "default-vpc", - "AttributeValues": [ - { - "AttributeValue": "none" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attributes for your AWS account.", - "id": "ec2-describe-account-attributes-2", - "title": "To describe all attributes for your AWS account" - } - ], - "DescribeAddresses": [ - { - "output": { - "Addresses": [ - { - "Domain": "standard", - "InstanceId": "i-1234567890abcdef0", - "PublicIp": "198.51.100.0" - }, - { - "AllocationId": "eipalloc-12345678", - "AssociationId": "eipassoc-12345678", - "Domain": "vpc", - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-12345678", - "NetworkInterfaceOwnerId": "123456789012", - "PrivateIpAddress": "10.0.1.241", - "PublicIp": "203.0.113.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses.", - "id": "ec2-describe-addresses-1", - "title": "To describe your Elastic IP addresses" - }, - { - "input": { - "Filters": [ - { - "Name": "domain", - "Values": [ - "vpc" - ] - } - ] - }, - "output": { - "Addresses": [ - { - "AllocationId": "eipalloc-12345678", - "AssociationId": "eipassoc-12345678", - "Domain": "vpc", - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-12345678", - "NetworkInterfaceOwnerId": "123456789012", - "PrivateIpAddress": "10.0.1.241", - "PublicIp": "203.0.113.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses for use with instances in a VPC.", - "id": "ec2-describe-addresses-2", - "title": "To describe your Elastic IP addresses for EC2-VPC" - }, - { - "input": { - "Filters": [ - { - "Name": "domain", - "Values": [ - "standard" - ] - } - ] - }, - "output": { - "Addresses": [ - { - "Domain": "standard", - "InstanceId": "i-1234567890abcdef0", - "PublicIp": "198.51.100.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses for use with instances in EC2-Classic.", - "id": "ec2-describe-addresses-3", - "title": "To describe your Elastic IP addresses for EC2-Classic" - } - ], - "DescribeAvailabilityZones": [ - { - "output": { - "AvailabilityZones": [ - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1b" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1c" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1d" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1e" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Availability Zones that are available to you. The response includes Availability Zones only for the current region.", - "id": "ec2-describe-availability-zones-1", - "title": "To describe your Availability Zones" - } - ], - "DescribeCustomerGateways": [ - { - "input": { - "CustomerGatewayIds": [ - "cgw-0e11f167" - ] - }, - "output": { - "CustomerGateways": [ - { - "BgpAsn": "65534", - "CustomerGatewayId": "cgw-0e11f167", - "IpAddress": "12.1.2.3", - "State": "available", - "Type": "ipsec.1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified customer gateway.", - "id": "ec2-describe-customer-gateways-1", - "title": "To describe a customer gateway" - } - ], - "DescribeDhcpOptions": [ - { - "input": { - "DhcpOptionsIds": [ - "dopt-d9070ebb" - ] - }, - "output": { - "DhcpOptions": [ - { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - "10.2.5.2", - "10.2.5.1" - ] - } - ], - "DhcpOptionsId": "dopt-d9070ebb" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified DHCP options set.", - "id": "ec2-describe-dhcp-options-1", - "title": "To describe a DHCP options set" - } - ], - "DescribeInstanceAttribute": [ - { - "input": { - "Attribute": "instanceType", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "InstanceId": "i-1234567890abcdef0", - "InstanceType": { - "Value": "t1.micro" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the instance type of the specified instance.\n", - "id": "to-describe-the-instance-type-1472712432132", - "title": "To describe the instance type" - }, - { - "input": { - "Attribute": "disableApiTermination", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "DisableApiTermination": { - "Value": "false" - }, - "InstanceId": "i-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``disableApiTermination`` attribute of the specified instance.\n", - "id": "to-describe-the-disableapitermination-attribute-1472712533466", - "title": "To describe the disableApiTermination attribute" - }, - { - "input": { - "Attribute": "blockDeviceMapping", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sda1", - "Ebs": { - "AttachTime": "2013-05-17T22:42:34.000Z", - "DeleteOnTermination": true, - "Status": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - }, - { - "DeviceName": "/dev/sdf", - "Ebs": { - "AttachTime": "2013-09-10T23:07:00.000Z", - "DeleteOnTermination": false, - "Status": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - } - ], - "InstanceId": "i-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``blockDeviceMapping`` attribute of the specified instance.\n", - "id": "to-describe-the-block-device-mapping-for-an-instance-1472712645423", - "title": "To describe the block device mapping for an instance" - } - ], - "DescribeInternetGateways": [ - { - "input": { - "Filters": [ - { - "Name": "attachment.vpc-id", - "Values": [ - "vpc-a01106c2" - ] - } - ] - }, - "output": { - "InternetGateways": [ - { - "Attachments": [ - { - "State": "available", - "VpcId": "vpc-a01106c2" - } - ], - "InternetGatewayId": "igw-c0a643a9", - "Tags": [ - - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Internet gateway for the specified VPC.", - "id": "ec2-describe-internet-gateways-1", - "title": "To describe the Internet gateway for a VPC" - } - ], - "DescribeKeyPairs": [ - { - "input": { - "KeyNames": [ - "my-key-pair" - ] - }, - "output": { - "KeyPairs": [ - { - "KeyFingerprint": "1f:51:ae:28:bf:89:e9:d8:1f:25:5d:37:2d:7d:b8:ca:9f:f5:f1:6f", - "KeyName": "my-key-pair" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example displays the fingerprint for the specified key.", - "id": "ec2-describe-key-pairs-1", - "title": "To display a key pair" - } - ], - "DescribeMovingAddresses": [ - { - "output": { - "MovingAddressStatuses": [ - { - "MoveStatus": "MovingToVpc", - "PublicIp": "198.51.100.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all of your moving Elastic IP addresses.", - "id": "ec2-describe-moving-addresses-1", - "title": "To describe your moving addresses" - } - ], - "DescribeNatGateways": [ - { - "input": { - "Filters": [ - { - "Name": "vpc-id", - "Values": [ - "vpc-1a2b3c4d" - ] - } - ] - }, - "output": { - "NatGateways": [ - { - "CreateTime": "2015-12-01T12:26:55.983Z", - "NatGatewayAddresses": [ - { - "AllocationId": "eipalloc-89c620ec", - "NetworkInterfaceId": "eni-9dec76cd", - "PrivateIp": "10.0.0.149", - "PublicIp": "198.11.222.333" - } - ], - "NatGatewayId": "nat-05dba92075d71c408", - "State": "available", - "SubnetId": "subnet-847e4dc2", - "VpcId": "vpc-1a2b3c4d" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the NAT gateway for the specified VPC.", - "id": "ec2-describe-nat-gateways-1", - "title": "To describe a NAT gateway" - } - ], - "DescribeNetworkAcls": [ - { - "input": { - "NetworkAclIds": [ - "acl-5fb85d36" - ] - }, - "output": { - "NetworkAcls": [ - { - "Associations": [ - { - "NetworkAclAssociationId": "aclassoc-66ea5f0b", - "NetworkAclId": "acl-9aeb5ef7", - "SubnetId": "subnet-65ea5f08" - } - ], - "Entries": [ - { - "CidrBlock": "0.0.0.0/0", - "Egress": true, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - }, - { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - } - ], - "IsDefault": false, - "NetworkAclId": "acl-5fb85d36", - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified network ACL.", - "id": "ec2-", - "title": "To describe a network ACL" - } - ], - "DescribeNetworkInterfaceAttribute": [ - { - "input": { - "Attribute": "attachment", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Attachment": { - "AttachTime": "2015-05-21T20:02:20.000Z", - "AttachmentId": "eni-attach-43348162", - "DeleteOnTermination": true, - "DeviceIndex": 0, - "InstanceId": "i-1234567890abcdef0", - "InstanceOwnerId": "123456789012", - "Status": "attached" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attachment attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-1", - "title": "To describe the attachment attribute of a network interface" - }, - { - "input": { - "Attribute": "description", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Description": { - "Value": "My description" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the description attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-2", - "title": "To describe the description attribute of a network interface" - }, - { - "input": { - "Attribute": "groupSet", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Groups": [ - { - "GroupId": "sg-903004f8", - "GroupName": "my-security-group" - } - ], - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the groupSet attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-3", - "title": "To describe the groupSet attribute of a network interface" - }, - { - "input": { - "Attribute": "sourceDestCheck", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "NetworkInterfaceId": "eni-686ea200", - "SourceDestCheck": { - "Value": true - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the sourceDestCheck attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-4", - "title": "To describe the sourceDestCheck attribute of a network interface" - } - ], - "DescribeNetworkInterfaces": [ - { - "input": { - "NetworkInterfaceIds": [ - "eni-e5aa89a3" - ] - }, - "output": { - "NetworkInterfaces": [ - { - "Association": { - "AssociationId": "eipassoc-0fbb766a", - "IpOwnerId": "123456789012", - "PublicDnsName": "ec2-203-0-113-12.compute-1.amazonaws.com", - "PublicIp": "203.0.113.12" - }, - "Attachment": { - "AttachTime": "2013-11-30T23:36:42.000Z", - "AttachmentId": "eni-attach-66c4350a", - "DeleteOnTermination": false, - "DeviceIndex": 1, - "InstanceId": "i-1234567890abcdef0", - "InstanceOwnerId": "123456789012", - "Status": "attached" - }, - "AvailabilityZone": "us-east-1d", - "Description": "my network interface", - "Groups": [ - { - "GroupId": "sg-8637d3e3", - "GroupName": "default" - } - ], - "MacAddress": "02:2f:8f:b0:cf:75", - "NetworkInterfaceId": "eni-e5aa89a3", - "OwnerId": "123456789012", - "PrivateDnsName": "ip-10-0-1-17.ec2.internal", - "PrivateIpAddress": "10.0.1.17", - "PrivateIpAddresses": [ - { - "Association": { - "AssociationId": "eipassoc-0fbb766a", - "IpOwnerId": "123456789012", - "PublicDnsName": "ec2-203-0-113-12.compute-1.amazonaws.com", - "PublicIp": "203.0.113.12" - }, - "Primary": true, - "PrivateDnsName": "ip-10-0-1-17.ec2.internal", - "PrivateIpAddress": "10.0.1.17" - } - ], - "RequesterManaged": false, - "SourceDestCheck": true, - "Status": "in-use", - "SubnetId": "subnet-b61f49f0", - "TagSet": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "ec2-describe-network-interfaces-1", - "title": "To describe a network interface" - } - ], - "DescribeRegions": [ - { - "output": { - "Regions": [ - { - "Endpoint": "ec2.ap-south-1.amazonaws.com", - "RegionName": "ap-south-1" - }, - { - "Endpoint": "ec2.eu-west-1.amazonaws.com", - "RegionName": "eu-west-1" - }, - { - "Endpoint": "ec2.ap-southeast-1.amazonaws.com", - "RegionName": "ap-southeast-1" - }, - { - "Endpoint": "ec2.ap-southeast-2.amazonaws.com", - "RegionName": "ap-southeast-2" - }, - { - "Endpoint": "ec2.eu-central-1.amazonaws.com", - "RegionName": "eu-central-1" - }, - { - "Endpoint": "ec2.ap-northeast-2.amazonaws.com", - "RegionName": "ap-northeast-2" - }, - { - "Endpoint": "ec2.ap-northeast-1.amazonaws.com", - "RegionName": "ap-northeast-1" - }, - { - "Endpoint": "ec2.us-east-1.amazonaws.com", - "RegionName": "us-east-1" - }, - { - "Endpoint": "ec2.sa-east-1.amazonaws.com", - "RegionName": "sa-east-1" - }, - { - "Endpoint": "ec2.us-west-1.amazonaws.com", - "RegionName": "us-west-1" - }, - { - "Endpoint": "ec2.us-west-2.amazonaws.com", - "RegionName": "us-west-2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all the regions that are available to you.", - "id": "ec2-describe-regions-1", - "title": "To describe your regions" - } - ], - "DescribeRouteTables": [ - { - "input": { - "RouteTableIds": [ - "rtb-1f382e7d" - ] - }, - "output": { - "RouteTables": [ - { - "Associations": [ - { - "Main": true, - "RouteTableAssociationId": "rtbassoc-d8ccddba", - "RouteTableId": "rtb-1f382e7d" - } - ], - "PropagatingVgws": [ - - ], - "RouteTableId": "rtb-1f382e7d", - "Routes": [ - { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "local", - "State": "active" - } - ], - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified route table.", - "id": "ec2-describe-route-tables-1", - "title": "To describe a route table" - } - ], - "DescribeScheduledInstanceAvailability": [ - { - "input": { - "FirstSlotStartTimeRange": { - "EarliestTime": "2016-01-31T00:00:00Z", - "LatestTime": "2016-01-31T04:00:00Z" - }, - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDays": [ - 1 - ] - } - }, - "output": { - "ScheduledInstanceAvailabilitySet": [ - { - "AvailabilityZone": "us-west-2b", - "AvailableInstanceCount": 20, - "FirstSlotStartTime": "2016-01-31T00:00:00Z", - "HourlyPrice": "0.095", - "InstanceType": "c4.large", - "MaxTermDurationInDays": 366, - "MinTermDurationInDays": 366, - "NetworkPlatform": "EC2-VPC", - "Platform": "Linux/UNIX", - "PurchaseToken": "eyJ2IjoiMSIsInMiOjEsImMiOi...", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false - }, - "SlotDurationInHours": 23, - "TotalScheduledInstanceHours": 1219 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes a schedule that occurs every week on Sunday, starting on the specified date. Note that the output contains a single schedule as an example.", - "id": "ec2-describe-scheduled-instance-availability-1", - "title": "To describe an available schedule" - } - ], - "DescribeScheduledInstances": [ - { - "input": { - "ScheduledInstanceIds": [ - "sci-1234-1234-1234-1234-123456789012" - ] - }, - "output": { - "ScheduledInstanceSet": [ - { - "AvailabilityZone": "us-west-2b", - "CreateDate": "2016-01-25T21:43:38.612Z", - "HourlyPrice": "0.095", - "InstanceCount": 1, - "InstanceType": "c4.large", - "NetworkPlatform": "EC2-VPC", - "NextSlotStartTime": "2016-01-31T09:00:00Z", - "Platform": "Linux/UNIX", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false, - "OccurrenceUnit": "" - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012", - "SlotDurationInHours": 32, - "TermEndDate": "2017-01-31T09:00:00Z", - "TermStartDate": "2016-01-31T09:00:00Z", - "TotalScheduledInstanceHours": 1696 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Scheduled Instance.", - "id": "ec2-describe-scheduled-instances-1", - "title": "To describe your Scheduled Instances" - } - ], - "DescribeSnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "SnapshotId": "snap-066877671789bd71b" - }, - "output": { - "CreateVolumePermissions": [ - - ], - "SnapshotId": "snap-066877671789bd71b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``createVolumePermission`` attribute on a snapshot with the snapshot ID of ``snap-066877671789bd71b``.", - "id": "to-describe-snapshot-attributes-1472503199736", - "title": "To describe snapshot attributes" - } - ], - "DescribeSnapshots": [ - { - "input": { - "SnapshotIds": [ - "snap-1234567890abcdef0" - ] - }, - "output": { - "NextToken": "", - "Snapshots": [ - { - "Description": "This is my snapshot.", - "OwnerId": "012345678910", - "Progress": "100%", - "SnapshotId": "snap-1234567890abcdef0", - "StartTime": "2014-02-28T21:28:32.000Z", - "State": "completed", - "VolumeId": "vol-049df61146c4d7901", - "VolumeSize": 8 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes a snapshot with the snapshot ID of ``snap-1234567890abcdef0``.", - "id": "to-describe-a-snapshot-1472503807850", - "title": "To describe a snapshot" - }, - { - "input": { - "Filters": [ - { - "Name": "status", - "Values": [ - "pending" - ] - } - ], - "OwnerIds": [ - "012345678910" - ] - }, - "output": { - "NextToken": "", - "Snapshots": [ - { - "Description": "This is my copied snapshot.", - "OwnerId": "012345678910", - "Progress": "87%", - "SnapshotId": "snap-066877671789bd71b", - "StartTime": "2014-02-28T21:37:27.000Z", - "State": "pending", - "VolumeId": "vol-1234567890abcdef0", - "VolumeSize": 8 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all snapshots owned by the ID 012345678910 that are in the ``pending`` status.", - "id": "to-describe-snapshots-using-filters-1472503929793", - "title": "To describe snapshots using filters" - } - ], - "DescribeSpotDatafeedSubscription": [ - { - "output": { - "SpotDatafeedSubscription": { - "Bucket": "my-s3-bucket", - "OwnerId": "123456789012", - "Prefix": "spotdata", - "State": "Active" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Spot Instance datafeed subscription for your AWS account.", - "id": "ec2-describe-spot-datafeed-subscription-1", - "title": "To describe the datafeed for your AWS account" - } - ], - "DescribeSpotFleetInstances": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "output": { - "ActiveInstances": [ - { - "InstanceId": "i-1234567890abcdef0", - "InstanceType": "m3.medium", - "SpotInstanceRequestId": "sir-08b93456" - } - ], - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the Spot Instances associated with the specified Spot fleet.", - "id": "ec2-describe-spot-fleet-instances-1", - "title": "To describe the Spot Instances associated with a Spot fleet" - } - ], - "DescribeSpotFleetRequestHistory": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "StartTime": "2015-05-26T00:00:00Z" - }, - "output": { - "HistoryRecords": [ - { - "EventInformation": { - "EventSubType": "submitted" - }, - "EventType": "fleetRequestChange", - "Timestamp": "2015-05-26T23:17:20.697Z" - }, - { - "EventInformation": { - "EventSubType": "active" - }, - "EventType": "fleetRequestChange", - "Timestamp": "2015-05-26T23:17:20.873Z" - }, - { - "EventInformation": { - "EventSubType": "launched", - "InstanceId": "i-1234567890abcdef0" - }, - "EventType": "instanceChange", - "Timestamp": "2015-05-26T23:21:21.712Z" - }, - { - "EventInformation": { - "EventSubType": "launched", - "InstanceId": "i-1234567890abcdef1" - }, - "EventType": "instanceChange", - "Timestamp": "2015-05-26T23:21:21.816Z" - } - ], - "NextToken": "CpHNsscimcV5oH7bSbub03CI2Qms5+ypNpNm+53MNlR0YcXAkp0xFlfKf91yVxSExmbtma3awYxMFzNA663ZskT0AHtJ6TCb2Z8bQC2EnZgyELbymtWPfpZ1ZbauVg+P+TfGlWxWWB/Vr5dk5d4LfdgA/DRAHUrYgxzrEXAMPLE=", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "StartTime": "2015-05-26T00:00:00Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example returns the history for the specified Spot fleet starting at the specified time.", - "id": "ec2-describe-spot-fleet-request-history-1", - "title": "To describe Spot fleet history" - } - ], - "DescribeSpotFleetRequests": [ - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ] - }, - "output": { - "SpotFleetRequestConfigs": [ - { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "EbsOptimized": false, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "cc2.8xlarge", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeleteOnTermination": false, - "DeviceIndex": 0, - "SecondaryPrivateIpAddressCount": 0, - "SubnetId": "subnet-a61dafcf" - } - ] - }, - { - "EbsOptimized": false, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "r3.8xlarge", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeleteOnTermination": false, - "DeviceIndex": 0, - "SecondaryPrivateIpAddressCount": 0, - "SubnetId": "subnet-a61dafcf" - } - ] - } - ], - "SpotPrice": "0.05", - "TargetCapacity": 20 - }, - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "SpotFleetRequestState": "active" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Spot fleet request.", - "id": "ec2-describe-spot-fleet-requests-1", - "title": "To describe a Spot fleet request" - } - ], - "DescribeSpotInstanceRequests": [ - { - "input": { - "SpotInstanceRequestIds": [ - "sir-08b93456" - ] - }, - "output": { - "SpotInstanceRequests": [ - { - "CreateTime": "2014-04-30T18:14:55.000Z", - "InstanceId": "i-1234567890abcdef0", - "LaunchSpecification": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sda1", - "Ebs": { - "DeleteOnTermination": true, - "VolumeSize": 8, - "VolumeType": "standard" - } - } - ], - "EbsOptimized": false, - "ImageId": "ami-7aba833f", - "InstanceType": "m1.small", - "KeyName": "my-key-pair", - "SecurityGroups": [ - { - "GroupId": "sg-e38f24a7", - "GroupName": "my-security-group" - } - ] - }, - "LaunchedAvailabilityZone": "us-west-1b", - "ProductDescription": "Linux/UNIX", - "SpotInstanceRequestId": "sir-08b93456", - "SpotPrice": "0.010000", - "State": "active", - "Status": { - "Code": "fulfilled", - "Message": "Your Spot request is fulfilled.", - "UpdateTime": "2014-04-30T18:16:21.000Z" - }, - "Type": "one-time" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Spot Instance request.", - "id": "ec2-describe-spot-instance-requests-1", - "title": "To describe a Spot Instance request" - } - ], - "DescribeSpotPriceHistory": [ - { - "input": { - "EndTime": "2014-01-06T08:09:10", - "InstanceTypes": [ - "m1.xlarge" - ], - "ProductDescriptions": [ - "Linux/UNIX (Amazon VPC)" - ], - "StartTime": "2014-01-06T07:08:09" - }, - "output": { - "SpotPriceHistory": [ - { - "AvailabilityZone": "us-west-1a", - "InstanceType": "m1.xlarge", - "ProductDescription": "Linux/UNIX (Amazon VPC)", - "SpotPrice": "0.080000", - "Timestamp": "2014-01-06T04:32:53.000Z" - }, - { - "AvailabilityZone": "us-west-1c", - "InstanceType": "m1.xlarge", - "ProductDescription": "Linux/UNIX (Amazon VPC)", - "SpotPrice": "0.080000", - "Timestamp": "2014-01-05T11:28:26.000Z" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example returns the Spot Price history for m1.xlarge, Linux/UNIX (Amazon VPC) instances for a particular day in January.", - "id": "ec2-describe-spot-price-history-1", - "title": "To describe Spot price history for Linux/UNIX (Amazon VPC)" - } - ], - "DescribeSubnets": [ - { - "input": { - "Filters": [ - { - "Name": "vpc-id", - "Values": [ - "vpc-a01106c2" - ] - } - ] - }, - "output": { - "Subnets": [ - { - "AvailabilityZone": "us-east-1c", - "AvailableIpAddressCount": 251, - "CidrBlock": "10.0.1.0/24", - "DefaultForAz": false, - "MapPublicIpOnLaunch": false, - "State": "available", - "SubnetId": "subnet-9d4a7b6c", - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the subnets for the specified VPC.", - "id": "ec2-describe-subnets-1", - "title": "To describe the subnets for a VPC" - } - ], - "DescribeTags": [ - { - "input": { - "Filters": [ - { - "Name": "resource-id", - "Values": [ - "i-1234567890abcdef8" - ] - } - ] - }, - "output": { - "Tags": [ - { - "Key": "Stack", - "ResourceId": "i-1234567890abcdef8", - "ResourceType": "instance", - "Value": "test" - }, - { - "Key": "Name", - "ResourceId": "i-1234567890abcdef8", - "ResourceType": "instance", - "Value": "Beta Server" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the tags for the specified instance.", - "id": "ec2-describe-tags-1", - "title": "To describe the tags for a single resource" - } - ], - "DescribeVolumeAttribute": [ - { - "input": { - "Attribute": "autoEnableIO", - "VolumeId": "vol-049df61146c4d7901" - }, - "output": { - "AutoEnableIO": { - "Value": false - }, - "VolumeId": "vol-049df61146c4d7901" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``autoEnableIo`` attribute of the volume with the ID ``vol-049df61146c4d7901``.", - "id": "to-describe-a-volume-attribute-1472505773492", - "title": "To describe a volume attribute" - } - ], - "DescribeVolumeStatus": [ - { - "input": { - "VolumeIds": [ - "vol-1234567890abcdef0" - ] - }, - "output": { - "VolumeStatuses": [ - { - "Actions": [ - - ], - "AvailabilityZone": "us-east-1a", - "Events": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeStatus": { - "Details": [ - { - "Name": "io-enabled", - "Status": "passed" - }, - { - "Name": "io-performance", - "Status": "not-applicable" - } - ], - "Status": "ok" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the status for the volume ``vol-1234567890abcdef0``.", - "id": "to-describe-the-status-of-a-single-volume-1472507016193", - "title": "To describe the status of a single volume" - }, - { - "input": { - "Filters": [ - { - "Name": "volume-status.status", - "Values": [ - "impaired" - ] - } - ] - }, - "output": { - "VolumeStatuses": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the status for all volumes that are impaired. In this example output, there are no impaired volumes.", - "id": "to-describe-the-status-of-impaired-volumes-1472507239821", - "title": "To describe the status of impaired volumes" - } - ], - "DescribeVolumes": [ - { - "input": { - }, - "output": { - "NextToken": "", - "Volumes": [ - { - "Attachments": [ - { - "AttachTime": "2013-12-18T22:35:00.000Z", - "DeleteOnTermination": true, - "Device": "/dev/sda1", - "InstanceId": "i-1234567890abcdef0", - "State": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2013-12-18T22:35:00.084Z", - "Size": 8, - "SnapshotId": "snap-1234567890abcdef0", - "State": "in-use", - "VolumeId": "vol-049df61146c4d7901", - "VolumeType": "standard" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all of your volumes in the default region.", - "id": "to-describe-all-volumes-1472506358883", - "title": "To describe all volumes" - }, - { - "input": { - "Filters": [ - { - "Name": "attachment.instance-id", - "Values": [ - "i-1234567890abcdef0" - ] - }, - { - "Name": "attachment.delete-on-termination", - "Values": [ - "true" - ] - } - ] - }, - "output": { - "Volumes": [ - { - "Attachments": [ - { - "AttachTime": "2013-12-18T22:35:00.000Z", - "DeleteOnTermination": true, - "Device": "/dev/sda1", - "InstanceId": "i-1234567890abcdef0", - "State": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2013-12-18T22:35:00.084Z", - "Size": 8, - "SnapshotId": "snap-1234567890abcdef0", - "State": "in-use", - "VolumeId": "vol-049df61146c4d7901", - "VolumeType": "standard" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all volumes that are both attached to the instance with the ID i-1234567890abcdef0 and set to delete when the instance terminates.", - "id": "to-describe-volumes-that-are-attached-to-a-specific-instance-1472506613578", - "title": "To describe volumes that are attached to a specific instance" - } - ], - "DescribeVpcAttribute": [ - { - "input": { - "Attribute": "enableDnsSupport", - "VpcId": "vpc-a01106c2" - }, - "output": { - "EnableDnsSupport": { - "Value": true - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the enableDnsSupport attribute. This attribute indicates whether DNS resolution is enabled for the VPC. If this attribute is true, the Amazon DNS server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.", - "id": "ec2-describe-vpc-attribute-1", - "title": "To describe the enableDnsSupport attribute" - }, - { - "input": { - "Attribute": "enableDnsHostnames", - "VpcId": "vpc-a01106c2" - }, - "output": { - "EnableDnsHostnames": { - "Value": true - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the enableDnsHostnames attribute. This attribute indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is true, instances in the VPC get DNS hostnames; otherwise, they do not.", - "id": "ec2-describe-vpc-attribute-2", - "title": "To describe the enableDnsHostnames attribute" - } - ], - "DescribeVpcs": [ - { - "input": { - "VpcIds": [ - "vpc-a01106c2" - ] - }, - "output": { - "Vpcs": [ - { - "CidrBlock": "10.0.0.0/16", - "DhcpOptionsId": "dopt-7a8b9c2d", - "InstanceTenancy": "default", - "IsDefault": false, - "State": "available", - "Tags": [ - { - "Key": "Name", - "Value": "MyVPC" - } - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified VPC.", - "id": "ec2-describe-vpcs-1", - "title": "To describe a VPC" - } - ], - "DetachInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified Internet gateway from the specified VPC.", - "id": "ec2-detach-internet-gateway-1", - "title": "To detach an Internet gateway from a VPC" - } - ], - "DetachNetworkInterface": [ - { - "input": { - "AttachmentId": "eni-attach-66c4350a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified network interface from its attached instance.", - "id": "ec2-detach-network-interface-1", - "title": "To detach a network interface from an instance" - } - ], - "DetachVolume": [ - { - "input": { - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "AttachTime": "2014-02-27T19:23:06.000Z", - "Device": "/dev/sdb", - "InstanceId": "i-1234567890abcdef0", - "State": "detaching", - "VolumeId": "vol-049df61146c4d7901" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the volume (``vol-049df61146c4d7901``) from the instance it is attached to.", - "id": "to-detach-a-volume-from-an-instance-1472507977694", - "title": "To detach a volume from an instance" - } - ], - "DisableVgwRoutePropagation": [ - { - "input": { - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disables the specified virtual private gateway from propagating static routes to the specified route table.", - "id": "ec2-disable-vgw-route-propagation-1", - "title": "To disable route propagation" - } - ], - "DisassociateAddress": [ - { - "input": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates an Elastic IP address from an instance in a VPC.", - "id": "ec2-disassociate-address-1", - "title": "To disassociate an Elastic IP address in EC2-VPC" - }, - { - "input": { - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates an Elastic IP address from an instance in EC2-Classic.", - "id": "ec2-disassociate-address-2", - "title": "To disassociate an Elastic IP addresses in EC2-Classic" - } - ], - "DisassociateRouteTable": [ - { - "input": { - "AssociationId": "rtbassoc-781d0d1a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates the specified route table from its associated subnet.", - "id": "ec2-disassociate-route-table-1", - "title": "To disassociate a route table" - } - ], - "EnableVgwRoutePropagation": [ - { - "input": { - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables the specified virtual private gateway to propagate static routes to the specified route table.", - "id": "ec2-enable-vgw-route-propagation-1", - "title": "To enable route propagation" - } - ], - "EnableVolumeIO": [ - { - "input": { - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "Return": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables I/O on volume ``vol-1234567890abcdef0``.", - "id": "to-enable-io-for-a-volume-1472508114867", - "title": "To enable I/O for a volume" - } - ], - "ModifyNetworkInterfaceAttribute": [ - { - "input": { - "Attachment": { - "AttachmentId": "eni-attach-43348162", - "DeleteOnTermination": false - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the attachment attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-1", - "title": "To modify the attachment attribute of a network interface" - }, - { - "input": { - "Description": "My description", - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the description attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-2", - "title": "To modify the description attribute of a network interface" - }, - { - "input": { - "Groups": [ - "sg-903004f8", - "sg-1a2b3c4d" - ], - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command modifies the groupSet attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-3", - "title": "To modify the groupSet attribute of a network interface" - }, - { - "input": { - "NetworkInterfaceId": "eni-686ea200", - "SourceDestCheck": false - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command modifies the sourceDestCheck attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-4", - "title": "To modify the sourceDestCheck attribute of a network interface" - } - ], - "ModifySnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "OperationType": "remove", - "SnapshotId": "snap-1234567890abcdef0", - "UserIds": [ - "123456789012" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies snapshot ``snap-1234567890abcdef0`` to remove the create volume permission for a user with the account ID ``123456789012``. If the command succeeds, no output is returned.", - "id": "to-modify-a-snapshot-attribute-1472508385907", - "title": "To modify a snapshot attribute" - }, - { - "input": { - "Attribute": "createVolumePermission", - "GroupNames": [ - "all" - ], - "OperationType": "add", - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example makes the snapshot ``snap-1234567890abcdef0`` public.", - "id": "to-make-a-snapshot-public-1472508470529", - "title": "To make a snapshot public" - } - ], - "ModifySpotFleetRequest": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "TargetCapacity": 20 - }, - "output": { - "Return": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example increases the target capacity of the specified Spot fleet request.", - "id": "ec2-modify-spot-fleet-request-1", - "title": "To increase the target capacity of a Spot fleet request" - }, - { - "input": { - "ExcessCapacityTerminationPolicy": "NoTermination ", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "TargetCapacity": 10 - }, - "output": { - "Return": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example decreases the target capacity of the specified Spot fleet request without terminating any Spot Instances as a result.", - "id": "ec2-modify-spot-fleet-request-2", - "title": "To decrease the target capacity of a Spot fleet request" - } - ], - "ModifySubnetAttribute": [ - { - "input": { - "MapPublicIpOnLaunch": true, - "SubnetId": "subnet-1a2b3c4d" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the specified subnet so that all instances launched into this subnet are assigned a public IP address.", - "id": "ec2-modify-subnet-attribute-1", - "title": "To change a subnet's public IP addressing behavior" - } - ], - "ModifyVolumeAttribute": [ - { - "input": { - "AutoEnableIO": { - "Value": true - }, - "DryRun": true, - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example sets the ``autoEnableIo`` attribute of the volume with the ID ``vol-1234567890abcdef0`` to ``true``. If the command succeeds, no output is returned.", - "id": "to-modify-a-volume-attribute-1472508596749", - "title": "To modify a volume attribute" - } - ], - "ModifyVpcAttribute": [ - { - "input": { - "EnableDnsSupport": { - "Value": false - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the enableDnsSupport attribute. This attribute indicates whether DNS resolution is enabled for the VPC. If this attribute is true, the Amazon DNS server resolves DNS hostnames for instances in the VPC to their corresponding IP addresses; otherwise, it does not.", - "id": "ec2-modify-vpc-attribute-1", - "title": "To modify the enableDnsSupport attribute" - }, - { - "input": { - "EnableDnsHostnames": { - "Value": false - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the enableDnsHostnames attribute. This attribute indicates whether instances launched in the VPC get DNS hostnames. If this attribute is true, instances in the VPC get DNS hostnames; otherwise, they do not.", - "id": "ec2-modify-vpc-attribute-2", - "title": "To modify the enableDnsHostnames attribute" - } - ], - "MoveAddressToVpc": [ - { - "input": { - "PublicIp": "54.123.4.56" - }, - "output": { - "Status": "MoveInProgress" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example moves the specified Elastic IP address to the EC2-VPC platform.", - "id": "ec2-move-address-to-vpc-1", - "title": "To move an address to EC2-VPC" - } - ], - "PurchaseScheduledInstances": [ - { - "input": { - "PurchaseRequests": [ - { - "InstanceCount": 1, - "PurchaseToken": "eyJ2IjoiMSIsInMiOjEsImMiOi..." - } - ] - }, - "output": { - "ScheduledInstanceSet": [ - { - "AvailabilityZone": "us-west-2b", - "CreateDate": "2016-01-25T21:43:38.612Z", - "HourlyPrice": "0.095", - "InstanceCount": 1, - "InstanceType": "c4.large", - "NetworkPlatform": "EC2-VPC", - "NextSlotStartTime": "2016-01-31T09:00:00Z", - "Platform": "Linux/UNIX", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false, - "OccurrenceUnit": "" - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012", - "SlotDurationInHours": 32, - "TermEndDate": "2017-01-31T09:00:00Z", - "TermStartDate": "2016-01-31T09:00:00Z", - "TotalScheduledInstanceHours": 1696 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example purchases a Scheduled Instance.", - "id": "ec2-purchase-scheduled-instances-1", - "title": "To purchase a Scheduled Instance" - } - ], - "ReleaseAddress": [ - { - "input": { - "AllocationId": "eipalloc-64d5890a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example releases an Elastic IP address for use with instances in a VPC.", - "id": "ec2-release-address-1", - "title": "To release an Elastic IP address for EC2-VPC" - }, - { - "input": { - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example releases an Elastic IP address for use with instances in EC2-Classic.", - "id": "ec2-release-address-2", - "title": "To release an Elastic IP addresses for EC2-Classic" - } - ], - "ReplaceNetworkAclAssociation": [ - { - "input": { - "AssociationId": "aclassoc-e5b95c8c", - "NetworkAclId": "acl-5fb85d36" - }, - "output": { - "NewAssociationId": "aclassoc-3999875b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified network ACL with the subnet for the specified network ACL association.", - "id": "ec2-replace-network-acl-association-1", - "title": "To replace the network ACL associated with a subnet" - } - ], - "ReplaceNetworkAclEntry": [ - { - "input": { - "CidrBlock": "203.0.113.12/24", - "Egress": false, - "NetworkAclId": "acl-5fb85d36", - "PortRange": { - "From": 53, - "To": 53 - }, - "Protocol": "udp", - "RuleAction": "allow", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces an entry for the specified network ACL. The new rule 100 allows ingress traffic from 203.0.113.12/24 on UDP port 53 (DNS) into any associated subnet.", - "id": "ec2-replace-network-acl-entry-1", - "title": "To replace a network ACL entry" - } - ], - "ReplaceRoute": [ - { - "input": { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces the specified route in the specified table table. The new route matches the specified CIDR and sends the traffic to the specified virtual private gateway.", - "id": "ec2-replace-route-1", - "title": "To replace a route" - } - ], - "ReplaceRouteTableAssociation": [ - { - "input": { - "AssociationId": "rtbassoc-781d0d1a", - "RouteTableId": "rtb-22574640" - }, - "output": { - "NewAssociationId": "rtbassoc-3a1f0f58" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified route table with the subnet for the specified route table association.", - "id": "ec2-replace-route-table-association-1", - "title": "To replace the route table associated with a subnet" - } - ], - "RequestSpotFleet": [ - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "SecurityGroups": [ - { - "GroupId": "sg-1a2b3c4d" - } - ], - "SubnetId": "subnet-1a2b3c4d, subnet-3c4d5e6f" - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request with two launch specifications that differ only by subnet. The Spot fleet launches the instances in the specified subnet with the lowest price. If the instances are launched in a default VPC, they receive a public IP address by default. If the instances are launched in a nondefault VPC, they do not receive a public IP address by default. Note that you can't specify different subnets from the same Availability Zone in a Spot fleet request.", - "id": "ec2-request-spot-fleet-1", - "title": "To request a Spot fleet in the subnet with the lowest price" - }, - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2a, us-west-2b" - }, - "SecurityGroups": [ - { - "GroupId": "sg-1a2b3c4d" - } - ] - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request with two launch specifications that differ only by Availability Zone. The Spot fleet launches the instances in the specified Availability Zone with the lowest price. If your account supports EC2-VPC only, Amazon EC2 launches the Spot instances in the default subnet of the Availability Zone. If your account supports EC2-Classic, Amazon EC2 launches the instances in EC2-Classic in the Availability Zone.", - "id": "ec2-request-spot-fleet-2", - "title": "To request a Spot fleet in the Availability Zone with the lowest price" - }, - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::880185128111:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Groups": [ - "sg-1a2b3c4d" - ], - "SubnetId": "subnet-1a2b3c4d" - } - ] - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns public addresses to instances launched in a nondefault VPC. Note that when you specify a network interface, you must include the subnet ID and security group ID using the network interface.", - "id": "ec2-request-spot-fleet-3", - "title": "To launch Spot instances in a subnet and assign them public IP addresses" - }, - { - "input": { - "SpotFleetRequestConfig": { - "AllocationStrategy": "diversified", - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "c4.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - }, - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - }, - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "r3.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - } - ], - "SpotPrice": "0.70", - "TargetCapacity": 30 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request that launches 30 instances using the diversified allocation strategy. The launch specifications differ by instance type. The Spot fleet distributes the instances across the launch specifications such that there are 10 instances of each type.", - "id": "ec2-request-spot-fleet-4", - "title": "To request a Spot fleet using the diversified allocation strategy" - } - ], - "RequestSpotInstances": [ - { - "input": { - "InstanceCount": 5, - "LaunchSpecification": { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2a" - }, - "SecurityGroupIds": [ - "sg-1a2b3c4d" - ] - }, - "SpotPrice": "0.03", - "Type": "one-time" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a one-time Spot Instance request for five instances in the specified Availability Zone. If your account supports EC2-VPC only, Amazon EC2 launches the instances in the default subnet of the specified Availability Zone. If your account supports EC2-Classic, Amazon EC2 launches the instances in EC2-Classic in the specified Availability Zone.", - "id": "ec2-request-spot-instances-1", - "title": "To create a one-time Spot Instance request" - }, - { - "input": { - "InstanceCount": 5, - "LaunchSpecification": { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "SecurityGroupIds": [ - "sg-1a2b3c4d" - ], - "SubnetId": "subnet-1a2b3c4d" - }, - "SpotPrice": "0.050", - "Type": "one-time" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command creates a one-time Spot Instance request for five instances in the specified subnet. Amazon EC2 launches the instances in the specified subnet. If the VPC is a nondefault VPC, the instances do not receive a public IP address by default.", - "id": "ec2-request-spot-instances-2", - "title": "To create a one-time Spot Instance request" - } - ], - "ResetSnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resets the create volume permissions for snapshot ``snap-1234567890abcdef0``. If the command succeeds, no output is returned.", - "id": "to-reset-a-snapshot-attribute-1472508825735", - "title": "To reset a snapshot attribute" - } - ], - "RestoreAddressToClassic": [ - { - "input": { - "PublicIp": "198.51.100.0" - }, - "output": { - "PublicIp": "198.51.100.0", - "Status": "MoveInProgress" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example restores the specified Elastic IP address to the EC2-Classic platform.", - "id": "ec2-restore-address-to-classic-1", - "title": "To restore an address to EC2-Classic" - } - ], - "RunScheduledInstances": [ - { - "input": { - "InstanceCount": 1, - "LaunchSpecification": { - "IamInstanceProfile": { - "Name": "my-iam-role" - }, - "ImageId": "ami-12345678", - "InstanceType": "c4.large", - "KeyName": "my-key-pair", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Groups": [ - "sg-12345678" - ], - "SubnetId": "subnet-12345678" - } - ] - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012" - }, - "output": { - "InstanceIdSet": [ - "i-1234567890abcdef0" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example launches the specified Scheduled Instance in a VPC.", - "id": "ec2-run-scheduled-instances-1", - "title": "To launch a Scheduled Instance in a VPC" - }, - { - "input": { - "InstanceCount": 1, - "LaunchSpecification": { - "IamInstanceProfile": { - "Name": "my-iam-role" - }, - "ImageId": "ami-12345678", - "InstanceType": "c4.large", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2b" - }, - "SecurityGroupIds": [ - "sg-12345678" - ] - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012" - }, - "output": { - "InstanceIdSet": [ - "i-1234567890abcdef0" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example launches the specified Scheduled Instance in EC2-Classic.", - "id": "ec2-run-scheduled-instances-2", - "title": "To launch a Scheduled Instance in EC2-Classic" - } - ], - "UnassignPrivateIpAddresses": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "PrivateIpAddresses": [ - "10.0.0.82" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example unassigns the specified private IP address from the specified network interface.", - "id": "ec2-unassign-private-ip-addresses-1", - "title": "To unassign a secondary private IP address from a network interface" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/paginators-1.json deleted file mode 100644 index 2bd01ad..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/paginators-1.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "pagination": { - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - }, - "DescribeSpotFleetRequests": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotFleetRequestConfigs" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - }, - "DescribeVolumes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Volumes" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/service-2.json.gz deleted file mode 100644 index 2372e11..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/waiters-2.json deleted file mode 100644 index aa36a04..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-04-01/waiters-2.json +++ /dev/null @@ -1,607 +0,0 @@ -{ - "version": 2, - "waiters": { - "InstanceExists": { - "delay": 5, - "maxAttempts": 40, - "operation": "DescribeInstances", - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Reservations[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConsoleOutputAvailable": { - "operation": "GetConsoleOutput", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(Output || '') > `0`", - "expected": true - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ImageExists": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Images[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidAMIID.NotFound", - "state": "retry" - } - ] - }, - "ImageAvailable": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Images[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Images[].State", - "expected": "failed" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].InstanceStatus.Status", - "expected": "ok" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "KeyPairExists": { - "operation": "DescribeKeyPairs", - "delay": 5, - "maxAttempts": 6, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(KeyPairs[].KeyName) > `0`" - }, - { - "expected": "InvalidKeyPair.NotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "NatGatewayAvailable": { - "operation": "DescribeNatGateways", - "delay": 15, - "maxAttempts": 40, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "NatGateways[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "failed" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleting" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleted" - }, - { - "state": "retry", - "matcher": "error", - "expected": "NatGatewayNotFound" - } - ] - }, - "NetworkAclExists": { - "operation": "DescribeNetworkAcls", - "delay": 20, - "maxAttempts": 10, - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(NetworkAcls[]) > `0`", - "state": "success" - }, - { - "expected": "InvalidNetworkAclID.NotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "NetworkInterfaceAvailable": { - "operation": "DescribeNetworkInterfaces", - "delay": 20, - "maxAttempts": 10, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "NetworkInterfaces[].Status" - }, - { - "expected": "InvalidNetworkInterfaceID.NotFound", - "matcher": "error", - "state": "failure" - } - ] - }, - "PasswordDataAvailable": { - "operation": "GetPasswordData", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(PasswordData) > `0`", - "expected": true - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - } - ] - }, - "SpotInstanceRequestFulfilled": { - "operation": "DescribeSpotInstanceRequests", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "fulfilled" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "schedule-expired" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "canceled-before-fulfillment" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "bad-parameters" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "system-error" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "SystemStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].SystemStatus.Status", - "expected": "ok" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "matcher": "error", - "expected": "InvalidVolume.NotFound", - "state": "success" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 60, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpcPeeringConnectionExists": { - "delay": 15, - "operation": "DescribeVpcPeeringConnections", - "maxAttempts": 40, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidVpcPeeringConnectionID.NotFound", - "state": "retry" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 44010fb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/examples-1.json b/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/examples-1.json deleted file mode 100644 index f6a8719..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/examples-1.json +++ /dev/null @@ -1,3740 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AllocateAddress": [ - { - "input": { - "Domain": "vpc" - }, - "output": { - "AllocationId": "eipalloc-64d5890a", - "Domain": "vpc", - "PublicIp": "203.0.113.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example allocates an Elastic IP address to use with an instance in a VPC.", - "id": "ec2-allocate-address-1", - "title": "To allocate an Elastic IP address for EC2-VPC" - }, - { - "output": { - "Domain": "standard", - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example allocates an Elastic IP address to use with an instance in EC2-Classic.", - "id": "ec2-allocate-address-2", - "title": "To allocate an Elastic IP address for EC2-Classic" - } - ], - "AssignPrivateIpAddresses": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "PrivateIpAddresses": [ - "10.0.0.82" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns the specified secondary private IP address to the specified network interface.", - "id": "ec2-assign-private-ip-addresses-1", - "title": "To assign a specific secondary private IP address to an interface" - }, - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "SecondaryPrivateIpAddressCount": 2 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns two secondary private IP addresses to the specified network interface. Amazon EC2 automatically assigns these IP addresses from the available IP addresses in the CIDR block range of the subnet the network interface is associated with.", - "id": "ec2-assign-private-ip-addresses-2", - "title": "To assign secondary private IP addresses that Amazon EC2 selects to an interface" - } - ], - "AssociateAddress": [ - { - "input": { - "AllocationId": "eipalloc-64d5890a", - "InstanceId": "i-0b263919b6498b123" - }, - "output": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified Elastic IP address with the specified instance in a VPC.", - "id": "ec2-associate-address-1", - "title": "To associate an Elastic IP address in EC2-VPC" - }, - { - "input": { - "AllocationId": "eipalloc-64d5890a", - "NetworkInterfaceId": "eni-1a2b3c4d" - }, - "output": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified Elastic IP address with the specified network interface.", - "id": "ec2-associate-address-2", - "title": "To associate an Elastic IP address with a network interface" - }, - { - "input": { - "InstanceId": "i-07ffe74c7330ebf53", - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates an Elastic IP address with an instance in EC2-Classic.", - "id": "ec2-associate-address-3", - "title": "To associate an Elastic IP address in EC2-Classic" - } - ], - "AssociateDhcpOptions": [ - { - "input": { - "DhcpOptionsId": "dopt-d9070ebb", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified DHCP options set with the specified VPC.", - "id": "ec2-associate-dhcp-options-1", - "title": "To associate a DHCP options set with a VPC" - }, - { - "input": { - "DhcpOptionsId": "default", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the default DHCP options set with the specified VPC.", - "id": "ec2-associate-dhcp-options-2", - "title": "To associate the default DHCP options set with a VPC" - } - ], - "AssociateRouteTable": [ - { - "input": { - "RouteTableId": "rtb-22574640", - "SubnetId": "subnet-9d4a7b6" - }, - "output": { - "AssociationId": "rtbassoc-781d0d1a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified route table with the specified subnet.", - "id": "ec2-associate-route-table-1", - "title": "To associate a route table with a subnet" - } - ], - "AttachInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified Internet gateway to the specified VPC.", - "id": "ec2-attach-internet-gateway-1", - "title": "To attach an Internet gateway to a VPC" - } - ], - "AttachNetworkInterface": [ - { - "input": { - "DeviceIndex": 1, - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-e5aa89a3" - }, - "output": { - "AttachmentId": "eni-attach-66c4350a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified network interface to the specified instance.", - "id": "ec2-attach-network-interface-1", - "title": "To attach a network interface to an instance" - } - ], - "AttachVolume": [ - { - "input": { - "Device": "/dev/sdf", - "InstanceId": "i-01474ef662b89480", - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "AttachTime": "2016-08-29T18:52:32.724Z", - "Device": "/dev/sdf", - "InstanceId": "i-01474ef662b89480", - "State": "attaching", - "VolumeId": "vol-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches a volume (``vol-1234567890abcdef0``) to an instance (``i-01474ef662b89480``) as ``/dev/sdf``.", - "id": "to-attach-a-volume-to-an-instance-1472499213109", - "title": "To attach a volume to an instance" - } - ], - "CancelSpotFleetRequests": [ - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ], - "TerminateInstances": true - }, - "output": { - "SuccessfulFleetRequests": [ - { - "CurrentSpotFleetRequestState": "cancelled_running", - "PreviousSpotFleetRequestState": "active", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels the specified Spot fleet request and terminates its associated Spot Instances.", - "id": "ec2-cancel-spot-fleet-requests-1", - "title": "To cancel a Spot fleet request" - }, - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ], - "TerminateInstances": false - }, - "output": { - "SuccessfulFleetRequests": [ - { - "CurrentSpotFleetRequestState": "cancelled_terminating", - "PreviousSpotFleetRequestState": "active", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels the specified Spot fleet request without terminating its associated Spot Instances.", - "id": "ec2-cancel-spot-fleet-requests-2", - "title": "To cancel a Spot fleet request without terminating its Spot Instances" - } - ], - "CancelSpotInstanceRequests": [ - { - "input": { - "SpotInstanceRequestIds": [ - "sir-08b93456" - ] - }, - "output": { - "CancelledSpotInstanceRequests": [ - { - "SpotInstanceRequestId": "sir-08b93456", - "State": "cancelled" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels a Spot Instance request.", - "id": "ec2-cancel-spot-instance-requests-1", - "title": "To cancel Spot Instance requests" - } - ], - "ConfirmProductInstance": [ - { - "input": { - "InstanceId": "i-1234567890abcdef0", - "ProductCode": "774F4FF8" - }, - "output": { - "OwnerId": "123456789012" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example determines whether the specified product code is associated with the specified instance.", - "id": "to-confirm-the-product-instance-1472712108494", - "title": "To confirm the product instance" - } - ], - "CopySnapshot": [ - { - "input": { - "Description": "This is my copied snapshot.", - "DestinationRegion": "us-east-1", - "SourceRegion": "us-west-2", - "SourceSnapshotId": "snap-066877671789bd71b" - }, - "output": { - "SnapshotId": "snap-066877671789bd71b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies a snapshot with the snapshot ID of ``snap-066877671789bd71b`` from the ``us-west-2`` region to the ``us-east-1`` region and adds a short description to identify the snapshot.", - "id": "to-copy-a-snapshot-1472502259774", - "title": "To copy a snapshot" - } - ], - "CreateCustomerGateway": [ - { - "input": { - "BgpAsn": 65534, - "PublicIp": "12.1.2.3", - "Type": "ipsec.1" - }, - "output": { - "CustomerGateway": { - "BgpAsn": "65534", - "CustomerGatewayId": "cgw-0e11f167", - "IpAddress": "12.1.2.3", - "State": "available", - "Type": "ipsec.1" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a customer gateway with the specified IP address for its outside interface.", - "id": "ec2-create-customer-gateway-1", - "title": "To create a customer gateway" - } - ], - "CreateDhcpOptions": [ - { - "input": { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - "10.2.5.1", - "10.2.5.2" - ] - } - ] - }, - "output": { - "DhcpOptions": { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - { - "Value": "10.2.5.2" - }, - { - "Value": "10.2.5.1" - } - ] - } - ], - "DhcpOptionsId": "dopt-d9070ebb" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DHCP options set.", - "id": "ec2-create-dhcp-options-1", - "title": "To create a DHCP options set" - } - ], - "CreateInternetGateway": [ - { - "output": { - "InternetGateway": { - "Attachments": [ - - ], - "InternetGatewayId": "igw-c0a643a9", - "Tags": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Internet gateway.", - "id": "ec2-create-internet-gateway-1", - "title": "To create an Internet gateway" - } - ], - "CreateKeyPair": [ - { - "input": { - "KeyName": "my-key-pair" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a key pair named my-key-pair.", - "id": "ec2-create-key-pair-1", - "title": "To create a key pair" - } - ], - "CreateNatGateway": [ - { - "input": { - "AllocationId": "eipalloc-37fc1a52", - "SubnetId": "subnet-1a2b3c4d" - }, - "output": { - "NatGateway": { - "CreateTime": "2015-12-17T12:45:26.732Z", - "NatGatewayAddresses": [ - { - "AllocationId": "eipalloc-37fc1a52" - } - ], - "NatGatewayId": "nat-08d48af2a8e83edfd", - "State": "pending", - "SubnetId": "subnet-1a2b3c4d", - "VpcId": "vpc-1122aabb" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a NAT gateway in subnet subnet-1a2b3c4d and associates an Elastic IP address with the allocation ID eipalloc-37fc1a52 with the NAT gateway.", - "id": "ec2-create-nat-gateway-1", - "title": "To create a NAT gateway" - } - ], - "CreateNetworkAcl": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "output": { - "NetworkAcl": { - "Associations": [ - - ], - "Entries": [ - { - "CidrBlock": "0.0.0.0/0", - "Egress": true, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - }, - { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - } - ], - "IsDefault": false, - "NetworkAclId": "acl-5fb85d36", - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a network ACL for the specified VPC.", - "id": "ec2-create-network-acl-1", - "title": "To create a network ACL" - } - ], - "CreateNetworkAclEntry": [ - { - "input": { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "NetworkAclId": "acl-5fb85d36", - "PortRange": { - "From": 53, - "To": 53 - }, - "Protocol": "udp", - "RuleAction": "allow", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an entry for the specified network ACL. The rule allows ingress traffic from anywhere (0.0.0.0/0) on UDP port 53 (DNS) into any associated subnet.", - "id": "ec2-create-network-acl-entry-1", - "title": "To create a network ACL entry" - } - ], - "CreateNetworkInterface": [ - { - "input": { - "Description": "my network interface", - "Groups": [ - "sg-903004f8" - ], - "PrivateIpAddress": "10.0.2.17", - "SubnetId": "subnet-9d4a7b6c" - }, - "output": { - "NetworkInterface": { - "AvailabilityZone": "us-east-1d", - "Description": "my network interface", - "Groups": [ - { - "GroupId": "sg-903004f8", - "GroupName": "default" - } - ], - "MacAddress": "02:1a:80:41:52:9c", - "NetworkInterfaceId": "eni-e5aa89a3", - "OwnerId": "123456789012", - "PrivateIpAddress": "10.0.2.17", - "PrivateIpAddresses": [ - { - "Primary": true, - "PrivateIpAddress": "10.0.2.17" - } - ], - "RequesterManaged": false, - "SourceDestCheck": true, - "Status": "pending", - "SubnetId": "subnet-9d4a7b6c", - "TagSet": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a network interface for the specified subnet.", - "id": "ec2-create-network-interface-1", - "title": "To create a network interface" - } - ], - "CreatePlacementGroup": [ - { - "input": { - "GroupName": "my-cluster", - "Strategy": "cluster" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a placement group with the specified name.", - "id": "to-create-a-placement-group-1472712245768", - "title": "To create a placement group" - } - ], - "CreateRoute": [ - { - "input": { - "DestinationCidrBlock": "0.0.0.0/0", - "GatewayId": "igw-c0a643a9", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a route for the specified route table. The route matches all traffic (0.0.0.0/0) and routes it to the specified Internet gateway.", - "id": "ec2-create-route-1", - "title": "To create a route" - } - ], - "CreateRouteTable": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "output": { - "RouteTable": { - "Associations": [ - - ], - "PropagatingVgws": [ - - ], - "RouteTableId": "rtb-22574640", - "Routes": [ - { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "local", - "State": "active" - } - ], - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a route table for the specified VPC.", - "id": "ec2-create-route-table-1", - "title": "To create a route table" - } - ], - "CreateSnapshot": [ - { - "input": { - "Description": "This is my root volume snapshot.", - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "Description": "This is my root volume snapshot.", - "OwnerId": "012345678910", - "SnapshotId": "snap-066877671789bd71b", - "StartTime": "2014-02-28T21:06:01.000Z", - "State": "pending", - "Tags": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeSize": 8 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a snapshot of the volume with a volume ID of ``vol-1234567890abcdef0`` and a short description to identify the snapshot.", - "id": "to-create-a-snapshot-1472502529790", - "title": "To create a snapshot" - } - ], - "CreateSpotDatafeedSubscription": [ - { - "input": { - "Bucket": "my-s3-bucket", - "Prefix": "spotdata" - }, - "output": { - "SpotDatafeedSubscription": { - "Bucket": "my-s3-bucket", - "OwnerId": "123456789012", - "Prefix": "spotdata", - "State": "Active" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot Instance data feed for your AWS account.", - "id": "ec2-create-spot-datafeed-subscription-1", - "title": "To create a Spot Instance datafeed" - } - ], - "CreateSubnet": [ - { - "input": { - "CidrBlock": "10.0.1.0/24", - "VpcId": "vpc-a01106c2" - }, - "output": { - "Subnet": { - "AvailabilityZone": "us-west-2c", - "AvailableIpAddressCount": 251, - "CidrBlock": "10.0.1.0/24", - "State": "pending", - "SubnetId": "subnet-9d4a7b6c", - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a subnet in the specified VPC with the specified CIDR block. We recommend that you let us select an Availability Zone for you.", - "id": "ec2-create-subnet-1", - "title": "To create a subnet" - } - ], - "CreateTags": [ - { - "input": { - "Resources": [ - "ami-78a54011" - ], - "Tags": [ - { - "Key": "Stack", - "Value": "production" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the tag Stack=production to the specified image, or overwrites an existing tag for the AMI where the tag key is Stack.", - "id": "ec2-create-tags-1", - "title": "To add a tag to a resource" - } - ], - "CreateVolume": [ - { - "input": { - "AvailabilityZone": "us-east-1a", - "Size": 80, - "VolumeType": "gp2" - }, - "output": { - "AvailabilityZone": "us-east-1a", - "CreateTime": "2016-08-29T18:52:32.724Z", - "Encrypted": false, - "Iops": 240, - "Size": 80, - "SnapshotId": "", - "State": "creating", - "VolumeId": "vol-6b60b7c7", - "VolumeType": "gp2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an 80 GiB General Purpose (SSD) volume in the Availability Zone ``us-east-1a``.", - "id": "to-create-a-new-volume-1472496724296", - "title": "To create a new volume" - }, - { - "input": { - "AvailabilityZone": "us-east-1a", - "Iops": 1000, - "SnapshotId": "snap-066877671789bd71b", - "VolumeType": "io1" - }, - "output": { - "Attachments": [ - - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2016-08-29T18:52:32.724Z", - "Iops": 1000, - "Size": 500, - "SnapshotId": "snap-066877671789bd71b", - "State": "creating", - "Tags": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeType": "io1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a new Provisioned IOPS (SSD) volume with 1000 provisioned IOPS from a snapshot in the Availability Zone ``us-east-1a``.", - "id": "to-create-a-new-provisioned-iops-ssd-volume-from-a-snapshot-1472498975176", - "title": "To create a new Provisioned IOPS (SSD) volume from a snapshot" - } - ], - "CreateVpc": [ - { - "input": { - "CidrBlock": "10.0.0.0/16" - }, - "output": { - "Vpc": { - "CidrBlock": "10.0.0.0/16", - "DhcpOptionsId": "dopt-7a8b9c2d", - "InstanceTenancy": "default", - "State": "pending", - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a VPC with the specified CIDR block.", - "id": "ec2-create-vpc-1", - "title": "To create a VPC" - } - ], - "DeleteCustomerGateway": [ - { - "input": { - "CustomerGatewayId": "cgw-0e11f167" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified customer gateway.", - "id": "ec2-delete-customer-gateway-1", - "title": "To delete a customer gateway" - } - ], - "DeleteDhcpOptions": [ - { - "input": { - "DhcpOptionsId": "dopt-d9070ebb" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DHCP options set.", - "id": "ec2-delete-dhcp-options-1", - "title": "To delete a DHCP options set" - } - ], - "DeleteInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified Internet gateway.", - "id": "ec2-delete-internet-gateway-1", - "title": "To delete an Internet gateway" - } - ], - "DeleteKeyPair": [ - { - "input": { - "KeyName": "my-key-pair" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified key pair.", - "id": "ec2-delete-key-pair-1", - "title": "To delete a key pair" - } - ], - "DeleteNatGateway": [ - { - "input": { - "NatGatewayId": "nat-04ae55e711cec5680" - }, - "output": { - "NatGatewayId": "nat-04ae55e711cec5680" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified NAT gateway.", - "id": "ec2-delete-nat-gateway-1", - "title": "To delete a NAT gateway" - } - ], - "DeleteNetworkAcl": [ - { - "input": { - "NetworkAclId": "acl-5fb85d36" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified network ACL.", - "id": "ec2-delete-network-acl-1", - "title": "To delete a network ACL" - } - ], - "DeleteNetworkAclEntry": [ - { - "input": { - "Egress": true, - "NetworkAclId": "acl-5fb85d36", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes ingress rule number 100 from the specified network ACL.", - "id": "ec2-delete-network-acl-entry-1", - "title": "To delete a network ACL entry" - } - ], - "DeleteNetworkInterface": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified network interface.", - "id": "ec2-delete-network-interface-1", - "title": "To delete a network interface" - } - ], - "DeletePlacementGroup": [ - { - "input": { - "GroupName": "my-cluster" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified placement group.\n", - "id": "to-delete-a-placement-group-1472712349959", - "title": "To delete a placement group" - } - ], - "DeleteRoute": [ - { - "input": { - "DestinationCidrBlock": "0.0.0.0/0", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified route from the specified route table.", - "id": "ec2-delete-route-1", - "title": "To delete a route" - } - ], - "DeleteRouteTable": [ - { - "input": { - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified route table.", - "id": "ec2-delete-route-table-1", - "title": "To delete a route table" - } - ], - "DeleteSnapshot": [ - { - "input": { - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes a snapshot with the snapshot ID of ``snap-1234567890abcdef0``. If the command succeeds, no output is returned.", - "id": "to-delete-a-snapshot-1472503042567", - "title": "To delete a snapshot" - } - ], - "DeleteSpotDatafeedSubscription": [ - { - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes a Spot data feed subscription for the account.", - "id": "ec2-delete-spot-datafeed-subscription-1", - "title": "To cancel a Spot Instance data feed subscription" - } - ], - "DeleteSubnet": [ - { - "input": { - "SubnetId": "subnet-9d4a7b6c" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified subnet.", - "id": "ec2-delete-subnet-1", - "title": "To delete a subnet" - } - ], - "DeleteTags": [ - { - "input": { - "Resources": [ - "ami-78a54011" - ], - "Tags": [ - { - "Key": "Stack", - "Value": "test" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the tag Stack=test from the specified image.", - "id": "ec2-delete-tags-1", - "title": "To delete a tag from a resource" - } - ], - "DeleteVolume": [ - { - "input": { - "VolumeId": "vol-049df61146c4d7901" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes an available volume with the volume ID of ``vol-049df61146c4d7901``. If the command succeeds, no output is returned.", - "id": "to-delete-a-volume-1472503111160", - "title": "To delete a volume" - } - ], - "DeleteVpc": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified VPC.", - "id": "ec2-delete-vpc-1", - "title": "To delete a VPC" - } - ], - "DescribeAccountAttributes": [ - { - "input": { - "AttributeNames": [ - "supported-platforms" - ] - }, - "output": { - "AccountAttributes": [ - { - "AttributeName": "supported-platforms", - "AttributeValues": [ - { - "AttributeValue": "EC2" - }, - { - "AttributeValue": "VPC" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the supported-platforms attribute for your AWS account.", - "id": "ec2-describe-account-attributes-1", - "title": "To describe a single attribute for your AWS account" - }, - { - "output": { - "AccountAttributes": [ - { - "AttributeName": "supported-platforms", - "AttributeValues": [ - { - "AttributeValue": "EC2" - }, - { - "AttributeValue": "VPC" - } - ] - }, - { - "AttributeName": "vpc-max-security-groups-per-interface", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "max-elastic-ips", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "max-instances", - "AttributeValues": [ - { - "AttributeValue": "20" - } - ] - }, - { - "AttributeName": "vpc-max-elastic-ips", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "default-vpc", - "AttributeValues": [ - { - "AttributeValue": "none" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attributes for your AWS account.", - "id": "ec2-describe-account-attributes-2", - "title": "To describe all attributes for your AWS account" - } - ], - "DescribeAddresses": [ - { - "output": { - "Addresses": [ - { - "Domain": "standard", - "InstanceId": "i-1234567890abcdef0", - "PublicIp": "198.51.100.0" - }, - { - "AllocationId": "eipalloc-12345678", - "AssociationId": "eipassoc-12345678", - "Domain": "vpc", - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-12345678", - "NetworkInterfaceOwnerId": "123456789012", - "PrivateIpAddress": "10.0.1.241", - "PublicIp": "203.0.113.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses.", - "id": "ec2-describe-addresses-1", - "title": "To describe your Elastic IP addresses" - }, - { - "input": { - "Filters": [ - { - "Name": "domain", - "Values": [ - "vpc" - ] - } - ] - }, - "output": { - "Addresses": [ - { - "AllocationId": "eipalloc-12345678", - "AssociationId": "eipassoc-12345678", - "Domain": "vpc", - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-12345678", - "NetworkInterfaceOwnerId": "123456789012", - "PrivateIpAddress": "10.0.1.241", - "PublicIp": "203.0.113.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses for use with instances in a VPC.", - "id": "ec2-describe-addresses-2", - "title": "To describe your Elastic IP addresses for EC2-VPC" - }, - { - "input": { - "Filters": [ - { - "Name": "domain", - "Values": [ - "standard" - ] - } - ] - }, - "output": { - "Addresses": [ - { - "Domain": "standard", - "InstanceId": "i-1234567890abcdef0", - "PublicIp": "198.51.100.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses for use with instances in EC2-Classic.", - "id": "ec2-describe-addresses-3", - "title": "To describe your Elastic IP addresses for EC2-Classic" - } - ], - "DescribeAvailabilityZones": [ - { - "output": { - "AvailabilityZones": [ - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1b" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1c" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1d" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1e" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Availability Zones that are available to you. The response includes Availability Zones only for the current region.", - "id": "ec2-describe-availability-zones-1", - "title": "To describe your Availability Zones" - } - ], - "DescribeCustomerGateways": [ - { - "input": { - "CustomerGatewayIds": [ - "cgw-0e11f167" - ] - }, - "output": { - "CustomerGateways": [ - { - "BgpAsn": "65534", - "CustomerGatewayId": "cgw-0e11f167", - "IpAddress": "12.1.2.3", - "State": "available", - "Type": "ipsec.1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified customer gateway.", - "id": "ec2-describe-customer-gateways-1", - "title": "To describe a customer gateway" - } - ], - "DescribeDhcpOptions": [ - { - "input": { - "DhcpOptionsIds": [ - "dopt-d9070ebb" - ] - }, - "output": { - "DhcpOptions": [ - { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - { - "Value": "10.2.5.2" - }, - { - "Value": "10.2.5.1" - } - ] - } - ], - "DhcpOptionsId": "dopt-d9070ebb" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified DHCP options set.", - "id": "ec2-describe-dhcp-options-1", - "title": "To describe a DHCP options set" - } - ], - "DescribeInstanceAttribute": [ - { - "input": { - "Attribute": "instanceType", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "InstanceId": "i-1234567890abcdef0", - "InstanceType": { - "Value": "t1.micro" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the instance type of the specified instance.\n", - "id": "to-describe-the-instance-type-1472712432132", - "title": "To describe the instance type" - }, - { - "input": { - "Attribute": "disableApiTermination", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "DisableApiTermination": { - "Value": "false" - }, - "InstanceId": "i-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``disableApiTermination`` attribute of the specified instance.\n", - "id": "to-describe-the-disableapitermination-attribute-1472712533466", - "title": "To describe the disableApiTermination attribute" - }, - { - "input": { - "Attribute": "blockDeviceMapping", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sda1", - "Ebs": { - "AttachTime": "2013-05-17T22:42:34.000Z", - "DeleteOnTermination": true, - "Status": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - }, - { - "DeviceName": "/dev/sdf", - "Ebs": { - "AttachTime": "2013-09-10T23:07:00.000Z", - "DeleteOnTermination": false, - "Status": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - } - ], - "InstanceId": "i-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``blockDeviceMapping`` attribute of the specified instance.\n", - "id": "to-describe-the-block-device-mapping-for-an-instance-1472712645423", - "title": "To describe the block device mapping for an instance" - } - ], - "DescribeInternetGateways": [ - { - "input": { - "Filters": [ - { - "Name": "attachment.vpc-id", - "Values": [ - "vpc-a01106c2" - ] - } - ] - }, - "output": { - "InternetGateways": [ - { - "Attachments": [ - { - "State": "available", - "VpcId": "vpc-a01106c2" - } - ], - "InternetGatewayId": "igw-c0a643a9", - "Tags": [ - - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Internet gateway for the specified VPC.", - "id": "ec2-describe-internet-gateways-1", - "title": "To describe the Internet gateway for a VPC" - } - ], - "DescribeKeyPairs": [ - { - "input": { - "KeyNames": [ - "my-key-pair" - ] - }, - "output": { - "KeyPairs": [ - { - "KeyFingerprint": "1f:51:ae:28:bf:89:e9:d8:1f:25:5d:37:2d:7d:b8:ca:9f:f5:f1:6f", - "KeyName": "my-key-pair" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example displays the fingerprint for the specified key.", - "id": "ec2-describe-key-pairs-1", - "title": "To display a key pair" - } - ], - "DescribeMovingAddresses": [ - { - "output": { - "MovingAddressStatuses": [ - { - "MoveStatus": "MovingToVpc", - "PublicIp": "198.51.100.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all of your moving Elastic IP addresses.", - "id": "ec2-describe-moving-addresses-1", - "title": "To describe your moving addresses" - } - ], - "DescribeNatGateways": [ - { - "input": { - "Filter": [ - { - "Name": "vpc-id", - "Values": [ - "vpc-1a2b3c4d" - ] - } - ] - }, - "output": { - "NatGateways": [ - { - "CreateTime": "2015-12-01T12:26:55.983Z", - "NatGatewayAddresses": [ - { - "AllocationId": "eipalloc-89c620ec", - "NetworkInterfaceId": "eni-9dec76cd", - "PrivateIp": "10.0.0.149", - "PublicIp": "198.11.222.333" - } - ], - "NatGatewayId": "nat-05dba92075d71c408", - "State": "available", - "SubnetId": "subnet-847e4dc2", - "VpcId": "vpc-1a2b3c4d" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the NAT gateway for the specified VPC.", - "id": "ec2-describe-nat-gateways-1", - "title": "To describe a NAT gateway" - } - ], - "DescribeNetworkAcls": [ - { - "input": { - "NetworkAclIds": [ - "acl-5fb85d36" - ] - }, - "output": { - "NetworkAcls": [ - { - "Associations": [ - { - "NetworkAclAssociationId": "aclassoc-66ea5f0b", - "NetworkAclId": "acl-9aeb5ef7", - "SubnetId": "subnet-65ea5f08" - } - ], - "Entries": [ - { - "CidrBlock": "0.0.0.0/0", - "Egress": true, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - }, - { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - } - ], - "IsDefault": false, - "NetworkAclId": "acl-5fb85d36", - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified network ACL.", - "id": "ec2-", - "title": "To describe a network ACL" - } - ], - "DescribeNetworkInterfaceAttribute": [ - { - "input": { - "Attribute": "attachment", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Attachment": { - "AttachTime": "2015-05-21T20:02:20.000Z", - "AttachmentId": "eni-attach-43348162", - "DeleteOnTermination": true, - "DeviceIndex": 0, - "InstanceId": "i-1234567890abcdef0", - "InstanceOwnerId": "123456789012", - "Status": "attached" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attachment attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-1", - "title": "To describe the attachment attribute of a network interface" - }, - { - "input": { - "Attribute": "description", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Description": { - "Value": "My description" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the description attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-2", - "title": "To describe the description attribute of a network interface" - }, - { - "input": { - "Attribute": "groupSet", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Groups": [ - { - "GroupId": "sg-903004f8", - "GroupName": "my-security-group" - } - ], - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the groupSet attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-3", - "title": "To describe the groupSet attribute of a network interface" - }, - { - "input": { - "Attribute": "sourceDestCheck", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "NetworkInterfaceId": "eni-686ea200", - "SourceDestCheck": { - "Value": true - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the sourceDestCheck attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-4", - "title": "To describe the sourceDestCheck attribute of a network interface" - } - ], - "DescribeNetworkInterfaces": [ - { - "input": { - "NetworkInterfaceIds": [ - "eni-e5aa89a3" - ] - }, - "output": { - "NetworkInterfaces": [ - { - "Association": { - "AssociationId": "eipassoc-0fbb766a", - "IpOwnerId": "123456789012", - "PublicDnsName": "ec2-203-0-113-12.compute-1.amazonaws.com", - "PublicIp": "203.0.113.12" - }, - "Attachment": { - "AttachTime": "2013-11-30T23:36:42.000Z", - "AttachmentId": "eni-attach-66c4350a", - "DeleteOnTermination": false, - "DeviceIndex": 1, - "InstanceId": "i-1234567890abcdef0", - "InstanceOwnerId": "123456789012", - "Status": "attached" - }, - "AvailabilityZone": "us-east-1d", - "Description": "my network interface", - "Groups": [ - { - "GroupId": "sg-8637d3e3", - "GroupName": "default" - } - ], - "MacAddress": "02:2f:8f:b0:cf:75", - "NetworkInterfaceId": "eni-e5aa89a3", - "OwnerId": "123456789012", - "PrivateDnsName": "ip-10-0-1-17.ec2.internal", - "PrivateIpAddress": "10.0.1.17", - "PrivateIpAddresses": [ - { - "Association": { - "AssociationId": "eipassoc-0fbb766a", - "IpOwnerId": "123456789012", - "PublicDnsName": "ec2-203-0-113-12.compute-1.amazonaws.com", - "PublicIp": "203.0.113.12" - }, - "Primary": true, - "PrivateDnsName": "ip-10-0-1-17.ec2.internal", - "PrivateIpAddress": "10.0.1.17" - } - ], - "RequesterManaged": false, - "SourceDestCheck": true, - "Status": "in-use", - "SubnetId": "subnet-b61f49f0", - "TagSet": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "ec2-describe-network-interfaces-1", - "title": "To describe a network interface" - } - ], - "DescribeRegions": [ - { - "output": { - "Regions": [ - { - "Endpoint": "ec2.ap-south-1.amazonaws.com", - "RegionName": "ap-south-1" - }, - { - "Endpoint": "ec2.eu-west-1.amazonaws.com", - "RegionName": "eu-west-1" - }, - { - "Endpoint": "ec2.ap-southeast-1.amazonaws.com", - "RegionName": "ap-southeast-1" - }, - { - "Endpoint": "ec2.ap-southeast-2.amazonaws.com", - "RegionName": "ap-southeast-2" - }, - { - "Endpoint": "ec2.eu-central-1.amazonaws.com", - "RegionName": "eu-central-1" - }, - { - "Endpoint": "ec2.ap-northeast-2.amazonaws.com", - "RegionName": "ap-northeast-2" - }, - { - "Endpoint": "ec2.ap-northeast-1.amazonaws.com", - "RegionName": "ap-northeast-1" - }, - { - "Endpoint": "ec2.us-east-1.amazonaws.com", - "RegionName": "us-east-1" - }, - { - "Endpoint": "ec2.sa-east-1.amazonaws.com", - "RegionName": "sa-east-1" - }, - { - "Endpoint": "ec2.us-west-1.amazonaws.com", - "RegionName": "us-west-1" - }, - { - "Endpoint": "ec2.us-west-2.amazonaws.com", - "RegionName": "us-west-2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all the regions that are available to you.", - "id": "ec2-describe-regions-1", - "title": "To describe your regions" - } - ], - "DescribeRouteTables": [ - { - "input": { - "RouteTableIds": [ - "rtb-1f382e7d" - ] - }, - "output": { - "RouteTables": [ - { - "Associations": [ - { - "Main": true, - "RouteTableAssociationId": "rtbassoc-d8ccddba", - "RouteTableId": "rtb-1f382e7d" - } - ], - "PropagatingVgws": [ - - ], - "RouteTableId": "rtb-1f382e7d", - "Routes": [ - { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "local", - "State": "active" - } - ], - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified route table.", - "id": "ec2-describe-route-tables-1", - "title": "To describe a route table" - } - ], - "DescribeScheduledInstanceAvailability": [ - { - "input": { - "FirstSlotStartTimeRange": { - "EarliestTime": "2016-01-31T00:00:00Z", - "LatestTime": "2016-01-31T04:00:00Z" - }, - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDays": [ - 1 - ] - } - }, - "output": { - "ScheduledInstanceAvailabilitySet": [ - { - "AvailabilityZone": "us-west-2b", - "AvailableInstanceCount": 20, - "FirstSlotStartTime": "2016-01-31T00:00:00Z", - "HourlyPrice": "0.095", - "InstanceType": "c4.large", - "MaxTermDurationInDays": 366, - "MinTermDurationInDays": 366, - "NetworkPlatform": "EC2-VPC", - "Platform": "Linux/UNIX", - "PurchaseToken": "eyJ2IjoiMSIsInMiOjEsImMiOi...", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false - }, - "SlotDurationInHours": 23, - "TotalScheduledInstanceHours": 1219 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes a schedule that occurs every week on Sunday, starting on the specified date. Note that the output contains a single schedule as an example.", - "id": "ec2-describe-scheduled-instance-availability-1", - "title": "To describe an available schedule" - } - ], - "DescribeScheduledInstances": [ - { - "input": { - "ScheduledInstanceIds": [ - "sci-1234-1234-1234-1234-123456789012" - ] - }, - "output": { - "ScheduledInstanceSet": [ - { - "AvailabilityZone": "us-west-2b", - "CreateDate": "2016-01-25T21:43:38.612Z", - "HourlyPrice": "0.095", - "InstanceCount": 1, - "InstanceType": "c4.large", - "NetworkPlatform": "EC2-VPC", - "NextSlotStartTime": "2016-01-31T09:00:00Z", - "Platform": "Linux/UNIX", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false, - "OccurrenceUnit": "" - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012", - "SlotDurationInHours": 32, - "TermEndDate": "2017-01-31T09:00:00Z", - "TermStartDate": "2016-01-31T09:00:00Z", - "TotalScheduledInstanceHours": 1696 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Scheduled Instance.", - "id": "ec2-describe-scheduled-instances-1", - "title": "To describe your Scheduled Instances" - } - ], - "DescribeSnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "SnapshotId": "snap-066877671789bd71b" - }, - "output": { - "CreateVolumePermissions": [ - - ], - "SnapshotId": "snap-066877671789bd71b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``createVolumePermission`` attribute on a snapshot with the snapshot ID of ``snap-066877671789bd71b``.", - "id": "to-describe-snapshot-attributes-1472503199736", - "title": "To describe snapshot attributes" - } - ], - "DescribeSnapshots": [ - { - "input": { - "SnapshotIds": [ - "snap-1234567890abcdef0" - ] - }, - "output": { - "NextToken": "", - "Snapshots": [ - { - "Description": "This is my snapshot.", - "OwnerId": "012345678910", - "Progress": "100%", - "SnapshotId": "snap-1234567890abcdef0", - "StartTime": "2014-02-28T21:28:32.000Z", - "State": "completed", - "VolumeId": "vol-049df61146c4d7901", - "VolumeSize": 8 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes a snapshot with the snapshot ID of ``snap-1234567890abcdef0``.", - "id": "to-describe-a-snapshot-1472503807850", - "title": "To describe a snapshot" - }, - { - "input": { - "Filters": [ - { - "Name": "status", - "Values": [ - "pending" - ] - } - ], - "OwnerIds": [ - "012345678910" - ] - }, - "output": { - "NextToken": "", - "Snapshots": [ - { - "Description": "This is my copied snapshot.", - "OwnerId": "012345678910", - "Progress": "87%", - "SnapshotId": "snap-066877671789bd71b", - "StartTime": "2014-02-28T21:37:27.000Z", - "State": "pending", - "VolumeId": "vol-1234567890abcdef0", - "VolumeSize": 8 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all snapshots owned by the ID 012345678910 that are in the ``pending`` status.", - "id": "to-describe-snapshots-using-filters-1472503929793", - "title": "To describe snapshots using filters" - } - ], - "DescribeSpotDatafeedSubscription": [ - { - "output": { - "SpotDatafeedSubscription": { - "Bucket": "my-s3-bucket", - "OwnerId": "123456789012", - "Prefix": "spotdata", - "State": "Active" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Spot Instance datafeed subscription for your AWS account.", - "id": "ec2-describe-spot-datafeed-subscription-1", - "title": "To describe the datafeed for your AWS account" - } - ], - "DescribeSpotFleetInstances": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "output": { - "ActiveInstances": [ - { - "InstanceId": "i-1234567890abcdef0", - "InstanceType": "m3.medium", - "SpotInstanceRequestId": "sir-08b93456" - } - ], - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the Spot Instances associated with the specified Spot fleet.", - "id": "ec2-describe-spot-fleet-instances-1", - "title": "To describe the Spot Instances associated with a Spot fleet" - } - ], - "DescribeSpotFleetRequestHistory": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "StartTime": "2015-05-26T00:00:00Z" - }, - "output": { - "HistoryRecords": [ - { - "EventInformation": { - "EventSubType": "submitted" - }, - "EventType": "fleetRequestChange", - "Timestamp": "2015-05-26T23:17:20.697Z" - }, - { - "EventInformation": { - "EventSubType": "active" - }, - "EventType": "fleetRequestChange", - "Timestamp": "2015-05-26T23:17:20.873Z" - }, - { - "EventInformation": { - "EventSubType": "launched", - "InstanceId": "i-1234567890abcdef0" - }, - "EventType": "instanceChange", - "Timestamp": "2015-05-26T23:21:21.712Z" - }, - { - "EventInformation": { - "EventSubType": "launched", - "InstanceId": "i-1234567890abcdef1" - }, - "EventType": "instanceChange", - "Timestamp": "2015-05-26T23:21:21.816Z" - } - ], - "NextToken": "CpHNsscimcV5oH7bSbub03CI2Qms5+ypNpNm+53MNlR0YcXAkp0xFlfKf91yVxSExmbtma3awYxMFzNA663ZskT0AHtJ6TCb2Z8bQC2EnZgyELbymtWPfpZ1ZbauVg+P+TfGlWxWWB/Vr5dk5d4LfdgA/DRAHUrYgxzrEXAMPLE=", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "StartTime": "2015-05-26T00:00:00Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example returns the history for the specified Spot fleet starting at the specified time.", - "id": "ec2-describe-spot-fleet-request-history-1", - "title": "To describe Spot fleet history" - } - ], - "DescribeSpotFleetRequests": [ - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ] - }, - "output": { - "SpotFleetRequestConfigs": [ - { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "EbsOptimized": false, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "cc2.8xlarge", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeleteOnTermination": false, - "DeviceIndex": 0, - "SecondaryPrivateIpAddressCount": 0, - "SubnetId": "subnet-a61dafcf" - } - ] - }, - { - "EbsOptimized": false, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "r3.8xlarge", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeleteOnTermination": false, - "DeviceIndex": 0, - "SecondaryPrivateIpAddressCount": 0, - "SubnetId": "subnet-a61dafcf" - } - ] - } - ], - "SpotPrice": "0.05", - "TargetCapacity": 20 - }, - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "SpotFleetRequestState": "active" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Spot fleet request.", - "id": "ec2-describe-spot-fleet-requests-1", - "title": "To describe a Spot fleet request" - } - ], - "DescribeSpotInstanceRequests": [ - { - "input": { - "SpotInstanceRequestIds": [ - "sir-08b93456" - ] - }, - "output": { - "SpotInstanceRequests": [ - { - "CreateTime": "2014-04-30T18:14:55.000Z", - "InstanceId": "i-1234567890abcdef0", - "LaunchSpecification": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sda1", - "Ebs": { - "DeleteOnTermination": true, - "VolumeSize": 8, - "VolumeType": "standard" - } - } - ], - "EbsOptimized": false, - "ImageId": "ami-7aba833f", - "InstanceType": "m1.small", - "KeyName": "my-key-pair", - "SecurityGroups": [ - { - "GroupId": "sg-e38f24a7", - "GroupName": "my-security-group" - } - ] - }, - "LaunchedAvailabilityZone": "us-west-1b", - "ProductDescription": "Linux/UNIX", - "SpotInstanceRequestId": "sir-08b93456", - "SpotPrice": "0.010000", - "State": "active", - "Status": { - "Code": "fulfilled", - "Message": "Your Spot request is fulfilled.", - "UpdateTime": "2014-04-30T18:16:21.000Z" - }, - "Type": "one-time" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Spot Instance request.", - "id": "ec2-describe-spot-instance-requests-1", - "title": "To describe a Spot Instance request" - } - ], - "DescribeSpotPriceHistory": [ - { - "input": { - "EndTime": "2014-01-06T08:09:10", - "InstanceTypes": [ - "m1.xlarge" - ], - "ProductDescriptions": [ - "Linux/UNIX (Amazon VPC)" - ], - "StartTime": "2014-01-06T07:08:09" - }, - "output": { - "SpotPriceHistory": [ - { - "AvailabilityZone": "us-west-1a", - "InstanceType": "m1.xlarge", - "ProductDescription": "Linux/UNIX (Amazon VPC)", - "SpotPrice": "0.080000", - "Timestamp": "2014-01-06T04:32:53.000Z" - }, - { - "AvailabilityZone": "us-west-1c", - "InstanceType": "m1.xlarge", - "ProductDescription": "Linux/UNIX (Amazon VPC)", - "SpotPrice": "0.080000", - "Timestamp": "2014-01-05T11:28:26.000Z" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example returns the Spot Price history for m1.xlarge, Linux/UNIX (Amazon VPC) instances for a particular day in January.", - "id": "ec2-describe-spot-price-history-1", - "title": "To describe Spot price history for Linux/UNIX (Amazon VPC)" - } - ], - "DescribeSubnets": [ - { - "input": { - "Filters": [ - { - "Name": "vpc-id", - "Values": [ - "vpc-a01106c2" - ] - } - ] - }, - "output": { - "Subnets": [ - { - "AvailabilityZone": "us-east-1c", - "AvailableIpAddressCount": 251, - "CidrBlock": "10.0.1.0/24", - "DefaultForAz": false, - "MapPublicIpOnLaunch": false, - "State": "available", - "SubnetId": "subnet-9d4a7b6c", - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the subnets for the specified VPC.", - "id": "ec2-describe-subnets-1", - "title": "To describe the subnets for a VPC" - } - ], - "DescribeTags": [ - { - "input": { - "Filters": [ - { - "Name": "resource-id", - "Values": [ - "i-1234567890abcdef8" - ] - } - ] - }, - "output": { - "Tags": [ - { - "Key": "Stack", - "ResourceId": "i-1234567890abcdef8", - "ResourceType": "instance", - "Value": "test" - }, - { - "Key": "Name", - "ResourceId": "i-1234567890abcdef8", - "ResourceType": "instance", - "Value": "Beta Server" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the tags for the specified instance.", - "id": "ec2-describe-tags-1", - "title": "To describe the tags for a single resource" - } - ], - "DescribeVolumeAttribute": [ - { - "input": { - "Attribute": "autoEnableIO", - "VolumeId": "vol-049df61146c4d7901" - }, - "output": { - "AutoEnableIO": { - "Value": false - }, - "VolumeId": "vol-049df61146c4d7901" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``autoEnableIo`` attribute of the volume with the ID ``vol-049df61146c4d7901``.", - "id": "to-describe-a-volume-attribute-1472505773492", - "title": "To describe a volume attribute" - } - ], - "DescribeVolumeStatus": [ - { - "input": { - "VolumeIds": [ - "vol-1234567890abcdef0" - ] - }, - "output": { - "VolumeStatuses": [ - { - "Actions": [ - - ], - "AvailabilityZone": "us-east-1a", - "Events": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeStatus": { - "Details": [ - { - "Name": "io-enabled", - "Status": "passed" - }, - { - "Name": "io-performance", - "Status": "not-applicable" - } - ], - "Status": "ok" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the status for the volume ``vol-1234567890abcdef0``.", - "id": "to-describe-the-status-of-a-single-volume-1472507016193", - "title": "To describe the status of a single volume" - }, - { - "input": { - "Filters": [ - { - "Name": "volume-status.status", - "Values": [ - "impaired" - ] - } - ] - }, - "output": { - "VolumeStatuses": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the status for all volumes that are impaired. In this example output, there are no impaired volumes.", - "id": "to-describe-the-status-of-impaired-volumes-1472507239821", - "title": "To describe the status of impaired volumes" - } - ], - "DescribeVolumes": [ - { - "input": { - }, - "output": { - "NextToken": "", - "Volumes": [ - { - "Attachments": [ - { - "AttachTime": "2013-12-18T22:35:00.000Z", - "DeleteOnTermination": true, - "Device": "/dev/sda1", - "InstanceId": "i-1234567890abcdef0", - "State": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2013-12-18T22:35:00.084Z", - "Size": 8, - "SnapshotId": "snap-1234567890abcdef0", - "State": "in-use", - "VolumeId": "vol-049df61146c4d7901", - "VolumeType": "standard" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all of your volumes in the default region.", - "id": "to-describe-all-volumes-1472506358883", - "title": "To describe all volumes" - }, - { - "input": { - "Filters": [ - { - "Name": "attachment.instance-id", - "Values": [ - "i-1234567890abcdef0" - ] - }, - { - "Name": "attachment.delete-on-termination", - "Values": [ - "true" - ] - } - ] - }, - "output": { - "Volumes": [ - { - "Attachments": [ - { - "AttachTime": "2013-12-18T22:35:00.000Z", - "DeleteOnTermination": true, - "Device": "/dev/sda1", - "InstanceId": "i-1234567890abcdef0", - "State": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2013-12-18T22:35:00.084Z", - "Size": 8, - "SnapshotId": "snap-1234567890abcdef0", - "State": "in-use", - "VolumeId": "vol-049df61146c4d7901", - "VolumeType": "standard" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all volumes that are both attached to the instance with the ID i-1234567890abcdef0 and set to delete when the instance terminates.", - "id": "to-describe-volumes-that-are-attached-to-a-specific-instance-1472506613578", - "title": "To describe volumes that are attached to a specific instance" - } - ], - "DescribeVpcAttribute": [ - { - "input": { - "Attribute": "enableDnsSupport", - "VpcId": "vpc-a01106c2" - }, - "output": { - "EnableDnsSupport": { - "Value": true - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the enableDnsSupport attribute. This attribute indicates whether DNS resolution is enabled for the VPC. If this attribute is true, the Amazon DNS server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.", - "id": "ec2-describe-vpc-attribute-1", - "title": "To describe the enableDnsSupport attribute" - }, - { - "input": { - "Attribute": "enableDnsHostnames", - "VpcId": "vpc-a01106c2" - }, - "output": { - "EnableDnsHostnames": { - "Value": true - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the enableDnsHostnames attribute. This attribute indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is true, instances in the VPC get DNS hostnames; otherwise, they do not.", - "id": "ec2-describe-vpc-attribute-2", - "title": "To describe the enableDnsHostnames attribute" - } - ], - "DescribeVpcs": [ - { - "input": { - "VpcIds": [ - "vpc-a01106c2" - ] - }, - "output": { - "Vpcs": [ - { - "CidrBlock": "10.0.0.0/16", - "DhcpOptionsId": "dopt-7a8b9c2d", - "InstanceTenancy": "default", - "IsDefault": false, - "State": "available", - "Tags": [ - { - "Key": "Name", - "Value": "MyVPC" - } - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified VPC.", - "id": "ec2-describe-vpcs-1", - "title": "To describe a VPC" - } - ], - "DetachInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified Internet gateway from the specified VPC.", - "id": "ec2-detach-internet-gateway-1", - "title": "To detach an Internet gateway from a VPC" - } - ], - "DetachNetworkInterface": [ - { - "input": { - "AttachmentId": "eni-attach-66c4350a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified network interface from its attached instance.", - "id": "ec2-detach-network-interface-1", - "title": "To detach a network interface from an instance" - } - ], - "DetachVolume": [ - { - "input": { - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "AttachTime": "2014-02-27T19:23:06.000Z", - "Device": "/dev/sdb", - "InstanceId": "i-1234567890abcdef0", - "State": "detaching", - "VolumeId": "vol-049df61146c4d7901" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the volume (``vol-049df61146c4d7901``) from the instance it is attached to.", - "id": "to-detach-a-volume-from-an-instance-1472507977694", - "title": "To detach a volume from an instance" - } - ], - "DisableVgwRoutePropagation": [ - { - "input": { - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disables the specified virtual private gateway from propagating static routes to the specified route table.", - "id": "ec2-disable-vgw-route-propagation-1", - "title": "To disable route propagation" - } - ], - "DisassociateAddress": [ - { - "input": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates an Elastic IP address from an instance in a VPC.", - "id": "ec2-disassociate-address-1", - "title": "To disassociate an Elastic IP address in EC2-VPC" - }, - { - "input": { - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates an Elastic IP address from an instance in EC2-Classic.", - "id": "ec2-disassociate-address-2", - "title": "To disassociate an Elastic IP addresses in EC2-Classic" - } - ], - "DisassociateRouteTable": [ - { - "input": { - "AssociationId": "rtbassoc-781d0d1a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates the specified route table from its associated subnet.", - "id": "ec2-disassociate-route-table-1", - "title": "To disassociate a route table" - } - ], - "EnableVgwRoutePropagation": [ - { - "input": { - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables the specified virtual private gateway to propagate static routes to the specified route table.", - "id": "ec2-enable-vgw-route-propagation-1", - "title": "To enable route propagation" - } - ], - "EnableVolumeIO": [ - { - "input": { - "VolumeId": "vol-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables I/O on volume ``vol-1234567890abcdef0``.", - "id": "to-enable-io-for-a-volume-1472508114867", - "title": "To enable I/O for a volume" - } - ], - "ModifyNetworkInterfaceAttribute": [ - { - "input": { - "Attachment": { - "AttachmentId": "eni-attach-43348162", - "DeleteOnTermination": false - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the attachment attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-1", - "title": "To modify the attachment attribute of a network interface" - }, - { - "input": { - "Description": { - "Value": "My description" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the description attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-2", - "title": "To modify the description attribute of a network interface" - }, - { - "input": { - "Groups": [ - "sg-903004f8", - "sg-1a2b3c4d" - ], - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command modifies the groupSet attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-3", - "title": "To modify the groupSet attribute of a network interface" - }, - { - "input": { - "NetworkInterfaceId": "eni-686ea200", - "SourceDestCheck": { - "Value": false - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command modifies the sourceDestCheck attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-4", - "title": "To modify the sourceDestCheck attribute of a network interface" - } - ], - "ModifySnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "OperationType": "remove", - "SnapshotId": "snap-1234567890abcdef0", - "UserIds": [ - "123456789012" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies snapshot ``snap-1234567890abcdef0`` to remove the create volume permission for a user with the account ID ``123456789012``. If the command succeeds, no output is returned.", - "id": "to-modify-a-snapshot-attribute-1472508385907", - "title": "To modify a snapshot attribute" - }, - { - "input": { - "Attribute": "createVolumePermission", - "GroupNames": [ - "all" - ], - "OperationType": "add", - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example makes the snapshot ``snap-1234567890abcdef0`` public.", - "id": "to-make-a-snapshot-public-1472508470529", - "title": "To make a snapshot public" - } - ], - "ModifySpotFleetRequest": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "TargetCapacity": 20 - }, - "output": { - "Return": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example increases the target capacity of the specified Spot fleet request.", - "id": "ec2-modify-spot-fleet-request-1", - "title": "To increase the target capacity of a Spot fleet request" - }, - { - "input": { - "ExcessCapacityTerminationPolicy": "NoTermination ", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "TargetCapacity": 10 - }, - "output": { - "Return": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example decreases the target capacity of the specified Spot fleet request without terminating any Spot Instances as a result.", - "id": "ec2-modify-spot-fleet-request-2", - "title": "To decrease the target capacity of a Spot fleet request" - } - ], - "ModifySubnetAttribute": [ - { - "input": { - "MapPublicIpOnLaunch": { - "Value": true - }, - "SubnetId": "subnet-1a2b3c4d" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the specified subnet so that all instances launched into this subnet are assigned a public IP address.", - "id": "ec2-modify-subnet-attribute-1", - "title": "To change a subnet's public IP addressing behavior" - } - ], - "ModifyVolumeAttribute": [ - { - "input": { - "AutoEnableIO": { - "Value": true - }, - "DryRun": true, - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example sets the ``autoEnableIo`` attribute of the volume with the ID ``vol-1234567890abcdef0`` to ``true``. If the command succeeds, no output is returned.", - "id": "to-modify-a-volume-attribute-1472508596749", - "title": "To modify a volume attribute" - } - ], - "ModifyVpcAttribute": [ - { - "input": { - "EnableDnsSupport": { - "Value": false - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the enableDnsSupport attribute. This attribute indicates whether DNS resolution is enabled for the VPC. If this attribute is true, the Amazon DNS server resolves DNS hostnames for instances in the VPC to their corresponding IP addresses; otherwise, it does not.", - "id": "ec2-modify-vpc-attribute-1", - "title": "To modify the enableDnsSupport attribute" - }, - { - "input": { - "EnableDnsHostnames": { - "Value": false - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the enableDnsHostnames attribute. This attribute indicates whether instances launched in the VPC get DNS hostnames. If this attribute is true, instances in the VPC get DNS hostnames; otherwise, they do not.", - "id": "ec2-modify-vpc-attribute-2", - "title": "To modify the enableDnsHostnames attribute" - } - ], - "MoveAddressToVpc": [ - { - "input": { - "PublicIp": "54.123.4.56" - }, - "output": { - "Status": "MoveInProgress" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example moves the specified Elastic IP address to the EC2-VPC platform.", - "id": "ec2-move-address-to-vpc-1", - "title": "To move an address to EC2-VPC" - } - ], - "PurchaseScheduledInstances": [ - { - "input": { - "PurchaseRequests": [ - { - "InstanceCount": 1, - "PurchaseToken": "eyJ2IjoiMSIsInMiOjEsImMiOi..." - } - ] - }, - "output": { - "ScheduledInstanceSet": [ - { - "AvailabilityZone": "us-west-2b", - "CreateDate": "2016-01-25T21:43:38.612Z", - "HourlyPrice": "0.095", - "InstanceCount": 1, - "InstanceType": "c4.large", - "NetworkPlatform": "EC2-VPC", - "NextSlotStartTime": "2016-01-31T09:00:00Z", - "Platform": "Linux/UNIX", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false, - "OccurrenceUnit": "" - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012", - "SlotDurationInHours": 32, - "TermEndDate": "2017-01-31T09:00:00Z", - "TermStartDate": "2016-01-31T09:00:00Z", - "TotalScheduledInstanceHours": 1696 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example purchases a Scheduled Instance.", - "id": "ec2-purchase-scheduled-instances-1", - "title": "To purchase a Scheduled Instance" - } - ], - "ReleaseAddress": [ - { - "input": { - "AllocationId": "eipalloc-64d5890a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example releases an Elastic IP address for use with instances in a VPC.", - "id": "ec2-release-address-1", - "title": "To release an Elastic IP address for EC2-VPC" - }, - { - "input": { - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example releases an Elastic IP address for use with instances in EC2-Classic.", - "id": "ec2-release-address-2", - "title": "To release an Elastic IP addresses for EC2-Classic" - } - ], - "ReplaceNetworkAclAssociation": [ - { - "input": { - "AssociationId": "aclassoc-e5b95c8c", - "NetworkAclId": "acl-5fb85d36" - }, - "output": { - "NewAssociationId": "aclassoc-3999875b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified network ACL with the subnet for the specified network ACL association.", - "id": "ec2-replace-network-acl-association-1", - "title": "To replace the network ACL associated with a subnet" - } - ], - "ReplaceNetworkAclEntry": [ - { - "input": { - "CidrBlock": "203.0.113.12/24", - "Egress": false, - "NetworkAclId": "acl-5fb85d36", - "PortRange": { - "From": 53, - "To": 53 - }, - "Protocol": "udp", - "RuleAction": "allow", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces an entry for the specified network ACL. The new rule 100 allows ingress traffic from 203.0.113.12/24 on UDP port 53 (DNS) into any associated subnet.", - "id": "ec2-replace-network-acl-entry-1", - "title": "To replace a network ACL entry" - } - ], - "ReplaceRoute": [ - { - "input": { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces the specified route in the specified table table. The new route matches the specified CIDR and sends the traffic to the specified virtual private gateway.", - "id": "ec2-replace-route-1", - "title": "To replace a route" - } - ], - "ReplaceRouteTableAssociation": [ - { - "input": { - "AssociationId": "rtbassoc-781d0d1a", - "RouteTableId": "rtb-22574640" - }, - "output": { - "NewAssociationId": "rtbassoc-3a1f0f58" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified route table with the subnet for the specified route table association.", - "id": "ec2-replace-route-table-association-1", - "title": "To replace the route table associated with a subnet" - } - ], - "RequestSpotFleet": [ - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "SecurityGroups": [ - { - "GroupId": "sg-1a2b3c4d" - } - ], - "SubnetId": "subnet-1a2b3c4d, subnet-3c4d5e6f" - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request with two launch specifications that differ only by subnet. The Spot fleet launches the instances in the specified subnet with the lowest price. If the instances are launched in a default VPC, they receive a public IP address by default. If the instances are launched in a nondefault VPC, they do not receive a public IP address by default. Note that you can't specify different subnets from the same Availability Zone in a Spot fleet request.", - "id": "ec2-request-spot-fleet-1", - "title": "To request a Spot fleet in the subnet with the lowest price" - }, - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2a, us-west-2b" - }, - "SecurityGroups": [ - { - "GroupId": "sg-1a2b3c4d" - } - ] - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request with two launch specifications that differ only by Availability Zone. The Spot fleet launches the instances in the specified Availability Zone with the lowest price. If your account supports EC2-VPC only, Amazon EC2 launches the Spot instances in the default subnet of the Availability Zone. If your account supports EC2-Classic, Amazon EC2 launches the instances in EC2-Classic in the Availability Zone.", - "id": "ec2-request-spot-fleet-2", - "title": "To request a Spot fleet in the Availability Zone with the lowest price" - }, - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::880185128111:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Groups": [ - "sg-1a2b3c4d" - ], - "SubnetId": "subnet-1a2b3c4d" - } - ] - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns public addresses to instances launched in a nondefault VPC. Note that when you specify a network interface, you must include the subnet ID and security group ID using the network interface.", - "id": "ec2-request-spot-fleet-3", - "title": "To launch Spot instances in a subnet and assign them public IP addresses" - }, - { - "input": { - "SpotFleetRequestConfig": { - "AllocationStrategy": "diversified", - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "c4.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - }, - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - }, - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "r3.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - } - ], - "SpotPrice": "0.70", - "TargetCapacity": 30 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request that launches 30 instances using the diversified allocation strategy. The launch specifications differ by instance type. The Spot fleet distributes the instances across the launch specifications such that there are 10 instances of each type.", - "id": "ec2-request-spot-fleet-4", - "title": "To request a Spot fleet using the diversified allocation strategy" - } - ], - "RequestSpotInstances": [ - { - "input": { - "InstanceCount": 5, - "LaunchSpecification": { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2a" - }, - "SecurityGroupIds": [ - "sg-1a2b3c4d" - ] - }, - "SpotPrice": "0.03", - "Type": "one-time" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a one-time Spot Instance request for five instances in the specified Availability Zone. If your account supports EC2-VPC only, Amazon EC2 launches the instances in the default subnet of the specified Availability Zone. If your account supports EC2-Classic, Amazon EC2 launches the instances in EC2-Classic in the specified Availability Zone.", - "id": "ec2-request-spot-instances-1", - "title": "To create a one-time Spot Instance request" - }, - { - "input": { - "InstanceCount": 5, - "LaunchSpecification": { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "SecurityGroupIds": [ - "sg-1a2b3c4d" - ], - "SubnetId": "subnet-1a2b3c4d" - }, - "SpotPrice": "0.050", - "Type": "one-time" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command creates a one-time Spot Instance request for five instances in the specified subnet. Amazon EC2 launches the instances in the specified subnet. If the VPC is a nondefault VPC, the instances do not receive a public IP address by default.", - "id": "ec2-request-spot-instances-2", - "title": "To create a one-time Spot Instance request" - } - ], - "ResetSnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resets the create volume permissions for snapshot ``snap-1234567890abcdef0``. If the command succeeds, no output is returned.", - "id": "to-reset-a-snapshot-attribute-1472508825735", - "title": "To reset a snapshot attribute" - } - ], - "RestoreAddressToClassic": [ - { - "input": { - "PublicIp": "198.51.100.0" - }, - "output": { - "PublicIp": "198.51.100.0", - "Status": "MoveInProgress" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example restores the specified Elastic IP address to the EC2-Classic platform.", - "id": "ec2-restore-address-to-classic-1", - "title": "To restore an address to EC2-Classic" - } - ], - "RunScheduledInstances": [ - { - "input": { - "InstanceCount": 1, - "LaunchSpecification": { - "IamInstanceProfile": { - "Name": "my-iam-role" - }, - "ImageId": "ami-12345678", - "InstanceType": "c4.large", - "KeyName": "my-key-pair", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Groups": [ - "sg-12345678" - ], - "SubnetId": "subnet-12345678" - } - ] - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012" - }, - "output": { - "InstanceIdSet": [ - "i-1234567890abcdef0" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example launches the specified Scheduled Instance in a VPC.", - "id": "ec2-run-scheduled-instances-1", - "title": "To launch a Scheduled Instance in a VPC" - }, - { - "input": { - "InstanceCount": 1, - "LaunchSpecification": { - "IamInstanceProfile": { - "Name": "my-iam-role" - }, - "ImageId": "ami-12345678", - "InstanceType": "c4.large", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2b" - }, - "SecurityGroupIds": [ - "sg-12345678" - ] - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012" - }, - "output": { - "InstanceIdSet": [ - "i-1234567890abcdef0" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example launches the specified Scheduled Instance in EC2-Classic.", - "id": "ec2-run-scheduled-instances-2", - "title": "To launch a Scheduled Instance in EC2-Classic" - } - ], - "UnassignPrivateIpAddresses": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "PrivateIpAddresses": [ - "10.0.0.82" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example unassigns the specified private IP address from the specified network interface.", - "id": "ec2-unassign-private-ip-addresses-1", - "title": "To unassign a secondary private IP address from a network interface" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/paginators-1.json deleted file mode 100644 index 2bd01ad..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/paginators-1.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "pagination": { - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - }, - "DescribeSpotFleetRequests": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotFleetRequestConfigs" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - }, - "DescribeVolumes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Volumes" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/service-2.json.gz deleted file mode 100644 index 101b1f8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/waiters-2.json deleted file mode 100644 index 7105194..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-09-15/waiters-2.json +++ /dev/null @@ -1,593 +0,0 @@ -{ - "version": 2, - "waiters": { - "InstanceExists": { - "delay": 5, - "maxAttempts": 40, - "operation": "DescribeInstances", - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Reservations[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ImageExists": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Images[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidAMIID.NotFound", - "state": "retry" - } - ] - }, - "ImageAvailable": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Images[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Images[].State", - "expected": "failed" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].InstanceStatus.Status", - "expected": "ok" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "KeyPairExists": { - "operation": "DescribeKeyPairs", - "delay": 5, - "maxAttempts": 6, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(KeyPairs[].KeyName) > `0`" - }, - { - "expected": "InvalidKeyPair.NotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "NatGatewayAvailable": { - "operation": "DescribeNatGateways", - "delay": 15, - "maxAttempts": 40, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "NatGateways[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "failed" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleting" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleted" - }, - { - "state": "retry", - "matcher": "error", - "expected": "NatGatewayNotFound" - } - ] - }, - "NetworkInterfaceAvailable": { - "operation": "DescribeNetworkInterfaces", - "delay": 20, - "maxAttempts": 10, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "NetworkInterfaces[].Status" - }, - { - "expected": "InvalidNetworkInterfaceID.NotFound", - "matcher": "error", - "state": "failure" - } - ] - }, - "PasswordDataAvailable": { - "operation": "GetPasswordData", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(PasswordData) > `0`", - "expected": true - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - } - ] - }, - "SpotInstanceRequestFulfilled": { - "operation": "DescribeSpotInstanceRequests", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "fulfilled" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "schedule-expired" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "canceled-before-fulfillment" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "bad-parameters" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "system-error" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "SystemStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].SystemStatus.Status", - "expected": "ok" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "matcher": "error", - "expected": "InvalidVolume.NotFound", - "state": "success" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpcExists": { - "operation": "DescribeVpcs", - "delay": 1, - "maxAttempts": 5, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidVpcID.NotFound", - "state": "retry" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpcPeeringConnectionExists": { - "delay": 15, - "operation": "DescribeVpcPeeringConnections", - "maxAttempts": 40, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidVpcPeeringConnectionID.NotFound", - "state": "retry" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index fd24c9d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/examples-1.json b/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/examples-1.json deleted file mode 100644 index 93b4bf8..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/examples-1.json +++ /dev/null @@ -1,5048 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AllocateAddress": [ - { - "input": { - "Domain": "vpc" - }, - "output": { - "AllocationId": "eipalloc-64d5890a", - "Domain": "vpc", - "PublicIp": "203.0.113.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example allocates an Elastic IP address to use with an instance in a VPC.", - "id": "ec2-allocate-address-1", - "title": "To allocate an Elastic IP address for EC2-VPC" - }, - { - "output": { - "Domain": "standard", - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example allocates an Elastic IP address to use with an instance in EC2-Classic.", - "id": "ec2-allocate-address-2", - "title": "To allocate an Elastic IP address for EC2-Classic" - } - ], - "AssignPrivateIpAddresses": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "PrivateIpAddresses": [ - "10.0.0.82" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns the specified secondary private IP address to the specified network interface.", - "id": "ec2-assign-private-ip-addresses-1", - "title": "To assign a specific secondary private IP address to an interface" - }, - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "SecondaryPrivateIpAddressCount": 2 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns two secondary private IP addresses to the specified network interface. Amazon EC2 automatically assigns these IP addresses from the available IP addresses in the CIDR block range of the subnet the network interface is associated with.", - "id": "ec2-assign-private-ip-addresses-2", - "title": "To assign secondary private IP addresses that Amazon EC2 selects to an interface" - } - ], - "AssociateAddress": [ - { - "input": { - "AllocationId": "eipalloc-64d5890a", - "InstanceId": "i-0b263919b6498b123" - }, - "output": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified Elastic IP address with the specified instance in a VPC.", - "id": "ec2-associate-address-1", - "title": "To associate an Elastic IP address in EC2-VPC" - }, - { - "input": { - "AllocationId": "eipalloc-64d5890a", - "NetworkInterfaceId": "eni-1a2b3c4d" - }, - "output": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified Elastic IP address with the specified network interface.", - "id": "ec2-associate-address-2", - "title": "To associate an Elastic IP address with a network interface" - }, - { - "input": { - "InstanceId": "i-07ffe74c7330ebf53", - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates an Elastic IP address with an instance in EC2-Classic.", - "id": "ec2-associate-address-3", - "title": "To associate an Elastic IP address in EC2-Classic" - } - ], - "AssociateDhcpOptions": [ - { - "input": { - "DhcpOptionsId": "dopt-d9070ebb", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified DHCP options set with the specified VPC.", - "id": "ec2-associate-dhcp-options-1", - "title": "To associate a DHCP options set with a VPC" - }, - { - "input": { - "DhcpOptionsId": "default", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the default DHCP options set with the specified VPC.", - "id": "ec2-associate-dhcp-options-2", - "title": "To associate the default DHCP options set with a VPC" - } - ], - "AssociateIamInstanceProfile": [ - { - "input": { - "IamInstanceProfile": { - "Name": "admin-role" - }, - "InstanceId": "i-123456789abcde123" - }, - "output": { - "IamInstanceProfileAssociation": { - "AssociationId": "iip-assoc-0e7736511a163c209", - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/admin-role", - "Id": "AIPAJBLK7RKJKWDXVHIEC" - }, - "InstanceId": "i-123456789abcde123", - "State": "associating" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates an IAM instance profile named admin-role with the specified instance.", - "id": "to-associate-an-iam-instance-profile-with-an-instance-1528928429850", - "title": "To associate an IAM instance profile with an instance" - } - ], - "AssociateRouteTable": [ - { - "input": { - "RouteTableId": "rtb-22574640", - "SubnetId": "subnet-9d4a7b6" - }, - "output": { - "AssociationId": "rtbassoc-781d0d1a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified route table with the specified subnet.", - "id": "ec2-associate-route-table-1", - "title": "To associate a route table with a subnet" - } - ], - "AttachInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified Internet gateway to the specified VPC.", - "id": "ec2-attach-internet-gateway-1", - "title": "To attach an Internet gateway to a VPC" - } - ], - "AttachNetworkInterface": [ - { - "input": { - "DeviceIndex": 1, - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-e5aa89a3" - }, - "output": { - "AttachmentId": "eni-attach-66c4350a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches the specified network interface to the specified instance.", - "id": "ec2-attach-network-interface-1", - "title": "To attach a network interface to an instance" - } - ], - "AttachVolume": [ - { - "input": { - "Device": "/dev/sdf", - "InstanceId": "i-01474ef662b89480", - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "AttachTime": "2016-08-29T18:52:32.724Z", - "Device": "/dev/sdf", - "InstanceId": "i-01474ef662b89480", - "State": "attaching", - "VolumeId": "vol-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example attaches a volume (``vol-1234567890abcdef0``) to an instance (``i-01474ef662b89480``) as ``/dev/sdf``.", - "id": "to-attach-a-volume-to-an-instance-1472499213109", - "title": "To attach a volume to an instance" - } - ], - "AuthorizeSecurityGroupEgress": [ - { - "input": { - "GroupId": "sg-1a2b3c4d", - "IpPermissions": [ - { - "FromPort": 80, - "IpProtocol": "tcp", - "IpRanges": [ - { - "CidrIp": "10.0.0.0/16" - } - ], - "ToPort": 80 - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds a rule that grants access to the specified address ranges on TCP port 80.", - "id": "to-add-a-rule-that-allows-outbound-traffic-to-a-specific-address-range-1528929309636", - "title": "To add a rule that allows outbound traffic to a specific address range" - }, - { - "input": { - "GroupId": "sg-1a2b3c4d", - "IpPermissions": [ - { - "FromPort": 80, - "IpProtocol": "tcp", - "ToPort": 80, - "UserIdGroupPairs": [ - { - "GroupId": "sg-4b51a32f" - } - ] - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds a rule that grants access to the specified security group on TCP port 80.", - "id": "to-add-a-rule-that-allows-outbound-traffic-to-a-specific-security-group-1528929760260", - "title": "To add a rule that allows outbound traffic to a specific security group" - } - ], - "AuthorizeSecurityGroupIngress": [ - { - "input": { - "GroupId": "sg-903004f8", - "IpPermissions": [ - { - "FromPort": 22, - "IpProtocol": "tcp", - "IpRanges": [ - { - "CidrIp": "203.0.113.0/24", - "Description": "SSH access from the LA office" - } - ], - "ToPort": 22 - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables inbound traffic on TCP port 22 (SSH). The rule includes a description to help you identify it later.", - "id": "to-add-a-rule-that-allows-inbound-ssh-traffic-1529011610328", - "title": "To add a rule that allows inbound SSH traffic from an IPv4 address range" - }, - { - "input": { - "GroupId": "sg-111aaa22", - "IpPermissions": [ - { - "FromPort": 80, - "IpProtocol": "tcp", - "ToPort": 80, - "UserIdGroupPairs": [ - { - "Description": "HTTP access from other instances", - "GroupId": "sg-1a2b3c4d" - } - ] - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables inbound traffic on TCP port 80 from the specified security group. The group must be in the same VPC or a peer VPC. Incoming traffic is allowed based on the private IP addresses of instances that are associated with the specified security group.", - "id": "to-add-a-rule-that-allows-inbound-http-traffic-from-another-security-group-1529012163168", - "title": "To add a rule that allows inbound HTTP traffic from another security group" - }, - { - "input": { - "GroupId": "sg-123abc12 ", - "IpPermissions": [ - { - "FromPort": 3389, - "IpProtocol": "tcp", - "Ipv6Ranges": [ - { - "CidrIpv6": "2001:db8:1234:1a00::/64", - "Description": "RDP access from the NY office" - } - ], - "ToPort": 3389 - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds an inbound rule that allows RDP traffic from the specified IPv6 address range. The rule includes a description to help you identify it later.", - "id": "to-add-a-rule-with-a-description-1529012418116", - "title": "To add a rule that allows inbound RDP traffic from an IPv6 address range" - } - ], - "CancelSpotFleetRequests": [ - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ], - "TerminateInstances": true - }, - "output": { - "SuccessfulFleetRequests": [ - { - "CurrentSpotFleetRequestState": "cancelled_running", - "PreviousSpotFleetRequestState": "active", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels the specified Spot fleet request and terminates its associated Spot Instances.", - "id": "ec2-cancel-spot-fleet-requests-1", - "title": "To cancel a Spot fleet request" - }, - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ], - "TerminateInstances": false - }, - "output": { - "SuccessfulFleetRequests": [ - { - "CurrentSpotFleetRequestState": "cancelled_terminating", - "PreviousSpotFleetRequestState": "active", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels the specified Spot fleet request without terminating its associated Spot Instances.", - "id": "ec2-cancel-spot-fleet-requests-2", - "title": "To cancel a Spot fleet request without terminating its Spot Instances" - } - ], - "CancelSpotInstanceRequests": [ - { - "input": { - "SpotInstanceRequestIds": [ - "sir-08b93456" - ] - }, - "output": { - "CancelledSpotInstanceRequests": [ - { - "SpotInstanceRequestId": "sir-08b93456", - "State": "cancelled" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example cancels a Spot Instance request.", - "id": "ec2-cancel-spot-instance-requests-1", - "title": "To cancel Spot Instance requests" - } - ], - "ConfirmProductInstance": [ - { - "input": { - "InstanceId": "i-1234567890abcdef0", - "ProductCode": "774F4FF8" - }, - "output": { - "OwnerId": "123456789012" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example determines whether the specified product code is associated with the specified instance.", - "id": "to-confirm-the-product-instance-1472712108494", - "title": "To confirm the product instance" - } - ], - "CopyImage": [ - { - "input": { - "Description": "", - "Name": "My server", - "SourceImageId": "ami-5731123e", - "SourceRegion": "us-east-1" - }, - "output": { - "ImageId": "ami-438bea42" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies the specified AMI from the us-east-1 region to the current region.", - "id": "to-copy-an-ami-to-another-region-1529022820832", - "title": "To copy an AMI to another region" - } - ], - "CopySnapshot": [ - { - "input": { - "Description": "This is my copied snapshot.", - "DestinationRegion": "us-east-1", - "SourceRegion": "us-west-2", - "SourceSnapshotId": "snap-066877671789bd71b" - }, - "output": { - "SnapshotId": "snap-066877671789bd71b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies a snapshot with the snapshot ID of ``snap-066877671789bd71b`` from the ``us-west-2`` region to the ``us-east-1`` region and adds a short description to identify the snapshot.", - "id": "to-copy-a-snapshot-1472502259774", - "title": "To copy a snapshot" - } - ], - "CreateCustomerGateway": [ - { - "input": { - "BgpAsn": 65534, - "PublicIp": "12.1.2.3", - "Type": "ipsec.1" - }, - "output": { - "CustomerGateway": { - "BgpAsn": "65534", - "CustomerGatewayId": "cgw-0e11f167", - "IpAddress": "12.1.2.3", - "State": "available", - "Type": "ipsec.1" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a customer gateway with the specified IP address for its outside interface.", - "id": "ec2-create-customer-gateway-1", - "title": "To create a customer gateway" - } - ], - "CreateDhcpOptions": [ - { - "input": { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - "10.2.5.1", - "10.2.5.2" - ] - } - ] - }, - "output": { - "DhcpOptions": { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - { - "Value": "10.2.5.2" - }, - { - "Value": "10.2.5.1" - } - ] - } - ], - "DhcpOptionsId": "dopt-d9070ebb" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DHCP options set.", - "id": "ec2-create-dhcp-options-1", - "title": "To create a DHCP options set" - } - ], - "CreateImage": [ - { - "input": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sdh", - "Ebs": { - "VolumeSize": "100" - } - }, - { - "DeviceName": "/dev/sdc", - "VirtualName": "ephemeral1" - } - ], - "Description": "An AMI for my server", - "InstanceId": "i-1234567890abcdef0", - "Name": "My server", - "NoReboot": true - }, - "output": { - "ImageId": "ami-1a2b3c4d" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an AMI from the specified instance and adds an EBS volume with the device name /dev/sdh and an instance store volume with the device name /dev/sdc.", - "id": "to-create-an-ami-from-an-amazon-ebs-backed-instance-1529023150636", - "title": "To create an AMI from an Amazon EBS-backed instance" - } - ], - "CreateInternetGateway": [ - { - "output": { - "InternetGateway": { - "Attachments": [ - - ], - "InternetGatewayId": "igw-c0a643a9", - "Tags": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Internet gateway.", - "id": "ec2-create-internet-gateway-1", - "title": "To create an Internet gateway" - } - ], - "CreateKeyPair": [ - { - "input": { - "KeyName": "my-key-pair" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a key pair named my-key-pair.", - "id": "ec2-create-key-pair-1", - "title": "To create a key pair" - } - ], - "CreateLaunchTemplate": [ - { - "input": { - "LaunchTemplateData": { - "ImageId": "ami-8c1be5f6", - "InstanceType": "t2.small", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Ipv6AddressCount": 1, - "SubnetId": "subnet-7b16de0c" - } - ], - "TagSpecifications": [ - { - "ResourceType": "instance", - "Tags": [ - { - "Key": "Name", - "Value": "webserver" - } - ] - } - ] - }, - "LaunchTemplateName": "my-template", - "VersionDescription": "WebVersion1" - }, - "output": { - "LaunchTemplate": { - "CreateTime": "2017-11-27T09:13:24.000Z", - "CreatedBy": "arn:aws:iam::123456789012:root", - "DefaultVersionNumber": 1, - "LatestVersionNumber": 1, - "LaunchTemplateId": "lt-01238c059e3466abc", - "LaunchTemplateName": "my-template" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a launch template that specifies the subnet in which to launch the instance, assigns a public IP address and an IPv6 address to the instance, and creates a tag for the instance.", - "id": "to-create-a-launch-template-1529023655488", - "title": "To create a launch template" - } - ], - "CreateLaunchTemplateVersion": [ - { - "input": { - "LaunchTemplateData": { - "ImageId": "ami-c998b6b2" - }, - "LaunchTemplateId": "lt-0abcd290751193123", - "SourceVersion": "1", - "VersionDescription": "WebVersion2" - }, - "output": { - "LaunchTemplateVersion": { - "CreateTime": "2017-12-01T13:35:46.000Z", - "CreatedBy": "arn:aws:iam::123456789012:root", - "DefaultVersion": false, - "LaunchTemplateData": { - "ImageId": "ami-c998b6b2", - "InstanceType": "t2.micro", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Ipv6Addresses": [ - { - "Ipv6Address": "2001:db8:1234:1a00::123" - } - ], - "SubnetId": "subnet-7b16de0c" - } - ] - }, - "LaunchTemplateId": "lt-0abcd290751193123", - "LaunchTemplateName": "my-template", - "VersionDescription": "WebVersion2", - "VersionNumber": 2 - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a new launch template version based on version 1 of the specified launch template and specifies a different AMI ID.", - "id": "to-create-a-launch-template-version-1529024195702", - "title": "To create a launch template version" - } - ], - "CreateNatGateway": [ - { - "input": { - "AllocationId": "eipalloc-37fc1a52", - "SubnetId": "subnet-1a2b3c4d" - }, - "output": { - "NatGateway": { - "CreateTime": "2015-12-17T12:45:26.732Z", - "NatGatewayAddresses": [ - { - "AllocationId": "eipalloc-37fc1a52" - } - ], - "NatGatewayId": "nat-08d48af2a8e83edfd", - "State": "pending", - "SubnetId": "subnet-1a2b3c4d", - "VpcId": "vpc-1122aabb" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a NAT gateway in subnet subnet-1a2b3c4d and associates an Elastic IP address with the allocation ID eipalloc-37fc1a52 with the NAT gateway.", - "id": "ec2-create-nat-gateway-1", - "title": "To create a NAT gateway" - } - ], - "CreateNetworkAcl": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "output": { - "NetworkAcl": { - "Associations": [ - - ], - "Entries": [ - { - "CidrBlock": "0.0.0.0/0", - "Egress": true, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - }, - { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - } - ], - "IsDefault": false, - "NetworkAclId": "acl-5fb85d36", - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a network ACL for the specified VPC.", - "id": "ec2-create-network-acl-1", - "title": "To create a network ACL" - } - ], - "CreateNetworkAclEntry": [ - { - "input": { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "NetworkAclId": "acl-5fb85d36", - "PortRange": { - "From": 53, - "To": 53 - }, - "Protocol": "17", - "RuleAction": "allow", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an entry for the specified network ACL. The rule allows ingress traffic from anywhere (0.0.0.0/0) on UDP port 53 (DNS) into any associated subnet.", - "id": "ec2-create-network-acl-entry-1", - "title": "To create a network ACL entry" - } - ], - "CreateNetworkInterface": [ - { - "input": { - "Description": "my network interface", - "Groups": [ - "sg-903004f8" - ], - "PrivateIpAddress": "10.0.2.17", - "SubnetId": "subnet-9d4a7b6c" - }, - "output": { - "NetworkInterface": { - "AvailabilityZone": "us-east-1d", - "Description": "my network interface", - "Groups": [ - { - "GroupId": "sg-903004f8", - "GroupName": "default" - } - ], - "MacAddress": "02:1a:80:41:52:9c", - "NetworkInterfaceId": "eni-e5aa89a3", - "OwnerId": "123456789012", - "PrivateIpAddress": "10.0.2.17", - "PrivateIpAddresses": [ - { - "Primary": true, - "PrivateIpAddress": "10.0.2.17" - } - ], - "RequesterManaged": false, - "SourceDestCheck": true, - "Status": "pending", - "SubnetId": "subnet-9d4a7b6c", - "TagSet": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a network interface for the specified subnet.", - "id": "ec2-create-network-interface-1", - "title": "To create a network interface" - } - ], - "CreatePlacementGroup": [ - { - "input": { - "GroupName": "my-cluster", - "Strategy": "cluster" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a placement group with the specified name.", - "id": "to-create-a-placement-group-1472712245768", - "title": "To create a placement group" - } - ], - "CreateRoute": [ - { - "input": { - "DestinationCidrBlock": "0.0.0.0/0", - "GatewayId": "igw-c0a643a9", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a route for the specified route table. The route matches all traffic (0.0.0.0/0) and routes it to the specified Internet gateway.", - "id": "ec2-create-route-1", - "title": "To create a route" - } - ], - "CreateRouteTable": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "output": { - "RouteTable": { - "Associations": [ - - ], - "PropagatingVgws": [ - - ], - "RouteTableId": "rtb-22574640", - "Routes": [ - { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "local", - "State": "active" - } - ], - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a route table for the specified VPC.", - "id": "ec2-create-route-table-1", - "title": "To create a route table" - } - ], - "CreateSecurityGroup": [ - { - "input": { - "Description": "My security group", - "GroupName": "my-security-group", - "VpcId": "vpc-1a2b3c4d" - }, - "output": { - "GroupId": "sg-903004f8" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a security group for the specified VPC.", - "id": "to-create-a-security-group-for-a-vpc-1529024532716", - "title": "To create a security group for a VPC" - } - ], - "CreateSnapshot": [ - { - "input": { - "Description": "This is my root volume snapshot.", - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "Description": "This is my root volume snapshot.", - "OwnerId": "012345678910", - "SnapshotId": "snap-066877671789bd71b", - "StartTime": "2014-02-28T21:06:01.000Z", - "State": "pending", - "Tags": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeSize": 8 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a snapshot of the volume with a volume ID of ``vol-1234567890abcdef0`` and a short description to identify the snapshot.", - "id": "to-create-a-snapshot-1472502529790", - "title": "To create a snapshot" - } - ], - "CreateSpotDatafeedSubscription": [ - { - "input": { - "Bucket": "my-s3-bucket", - "Prefix": "spotdata" - }, - "output": { - "SpotDatafeedSubscription": { - "Bucket": "my-s3-bucket", - "OwnerId": "123456789012", - "Prefix": "spotdata", - "State": "Active" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot Instance data feed for your AWS account.", - "id": "ec2-create-spot-datafeed-subscription-1", - "title": "To create a Spot Instance datafeed" - } - ], - "CreateSubnet": [ - { - "input": { - "CidrBlock": "10.0.1.0/24", - "VpcId": "vpc-a01106c2" - }, - "output": { - "Subnet": { - "AvailabilityZone": "us-west-2c", - "AvailableIpAddressCount": 251, - "CidrBlock": "10.0.1.0/24", - "State": "pending", - "SubnetId": "subnet-9d4a7b6c", - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a subnet in the specified VPC with the specified CIDR block. We recommend that you let us select an Availability Zone for you.", - "id": "ec2-create-subnet-1", - "title": "To create a subnet" - } - ], - "CreateTags": [ - { - "input": { - "Resources": [ - "ami-78a54011" - ], - "Tags": [ - { - "Key": "Stack", - "Value": "production" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the tag Stack=production to the specified image, or overwrites an existing tag for the AMI where the tag key is Stack.", - "id": "ec2-create-tags-1", - "title": "To add a tag to a resource" - } - ], - "CreateVolume": [ - { - "input": { - "AvailabilityZone": "us-east-1a", - "Size": 80, - "VolumeType": "gp2" - }, - "output": { - "AvailabilityZone": "us-east-1a", - "CreateTime": "2016-08-29T18:52:32.724Z", - "Encrypted": false, - "Iops": 240, - "Size": 80, - "SnapshotId": "", - "State": "creating", - "VolumeId": "vol-6b60b7c7", - "VolumeType": "gp2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an 80 GiB General Purpose (SSD) volume in the Availability Zone ``us-east-1a``.", - "id": "to-create-a-new-volume-1472496724296", - "title": "To create a new volume" - }, - { - "input": { - "AvailabilityZone": "us-east-1a", - "Iops": 1000, - "SnapshotId": "snap-066877671789bd71b", - "VolumeType": "io1" - }, - "output": { - "Attachments": [ - - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2016-08-29T18:52:32.724Z", - "Iops": 1000, - "Size": 500, - "SnapshotId": "snap-066877671789bd71b", - "State": "creating", - "Tags": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeType": "io1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a new Provisioned IOPS (SSD) volume with 1000 provisioned IOPS from a snapshot in the Availability Zone ``us-east-1a``.", - "id": "to-create-a-new-provisioned-iops-ssd-volume-from-a-snapshot-1472498975176", - "title": "To create a new Provisioned IOPS (SSD) volume from a snapshot" - } - ], - "CreateVpc": [ - { - "input": { - "CidrBlock": "10.0.0.0/16" - }, - "output": { - "Vpc": { - "CidrBlock": "10.0.0.0/16", - "DhcpOptionsId": "dopt-7a8b9c2d", - "InstanceTenancy": "default", - "State": "pending", - "VpcId": "vpc-a01106c2" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a VPC with the specified CIDR block.", - "id": "ec2-create-vpc-1", - "title": "To create a VPC" - } - ], - "DeleteCustomerGateway": [ - { - "input": { - "CustomerGatewayId": "cgw-0e11f167" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified customer gateway.", - "id": "ec2-delete-customer-gateway-1", - "title": "To delete a customer gateway" - } - ], - "DeleteDhcpOptions": [ - { - "input": { - "DhcpOptionsId": "dopt-d9070ebb" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DHCP options set.", - "id": "ec2-delete-dhcp-options-1", - "title": "To delete a DHCP options set" - } - ], - "DeleteInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified Internet gateway.", - "id": "ec2-delete-internet-gateway-1", - "title": "To delete an Internet gateway" - } - ], - "DeleteKeyPair": [ - { - "input": { - "KeyName": "my-key-pair" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified key pair.", - "id": "ec2-delete-key-pair-1", - "title": "To delete a key pair" - } - ], - "DeleteLaunchTemplate": [ - { - "input": { - "LaunchTemplateId": "lt-0abcd290751193123" - }, - "output": { - "LaunchTemplate": { - "CreateTime": "2017-11-23T16:46:25.000Z", - "CreatedBy": "arn:aws:iam::123456789012:root", - "DefaultVersionNumber": 2, - "LatestVersionNumber": 2, - "LaunchTemplateId": "lt-0abcd290751193123", - "LaunchTemplateName": "my-template" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified launch template.", - "id": "to-delete-a-launch-template-1529024658216", - "title": "To delete a launch template" - } - ], - "DeleteLaunchTemplateVersions": [ - { - "input": { - "LaunchTemplateId": "lt-0abcd290751193123", - "Versions": [ - "1" - ] - }, - "output": { - "SuccessfullyDeletedLaunchTemplateVersions": [ - { - "LaunchTemplateId": "lt-0abcd290751193123", - "LaunchTemplateName": "my-template", - "VersionNumber": 1 - } - ], - "UnsuccessfullyDeletedLaunchTemplateVersions": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified launch template version.", - "id": "to-delete-a-launch-template-version-1529024790864", - "title": "To delete a launch template version" - } - ], - "DeleteNatGateway": [ - { - "input": { - "NatGatewayId": "nat-04ae55e711cec5680" - }, - "output": { - "NatGatewayId": "nat-04ae55e711cec5680" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified NAT gateway.", - "id": "ec2-delete-nat-gateway-1", - "title": "To delete a NAT gateway" - } - ], - "DeleteNetworkAcl": [ - { - "input": { - "NetworkAclId": "acl-5fb85d36" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified network ACL.", - "id": "ec2-delete-network-acl-1", - "title": "To delete a network ACL" - } - ], - "DeleteNetworkAclEntry": [ - { - "input": { - "Egress": true, - "NetworkAclId": "acl-5fb85d36", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes ingress rule number 100 from the specified network ACL.", - "id": "ec2-delete-network-acl-entry-1", - "title": "To delete a network ACL entry" - } - ], - "DeleteNetworkInterface": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified network interface.", - "id": "ec2-delete-network-interface-1", - "title": "To delete a network interface" - } - ], - "DeletePlacementGroup": [ - { - "input": { - "GroupName": "my-cluster" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified placement group.\n", - "id": "to-delete-a-placement-group-1472712349959", - "title": "To delete a placement group" - } - ], - "DeleteRoute": [ - { - "input": { - "DestinationCidrBlock": "0.0.0.0/0", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified route from the specified route table.", - "id": "ec2-delete-route-1", - "title": "To delete a route" - } - ], - "DeleteRouteTable": [ - { - "input": { - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified route table.", - "id": "ec2-delete-route-table-1", - "title": "To delete a route table" - } - ], - "DeleteSecurityGroup": [ - { - "input": { - "GroupId": "sg-903004f8" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified security group.", - "id": "to-delete-a-security-group-1529024952972", - "title": "To delete a security group" - } - ], - "DeleteSnapshot": [ - { - "input": { - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes a snapshot with the snapshot ID of ``snap-1234567890abcdef0``. If the command succeeds, no output is returned.", - "id": "to-delete-a-snapshot-1472503042567", - "title": "To delete a snapshot" - } - ], - "DeleteSpotDatafeedSubscription": [ - { - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes a Spot data feed subscription for the account.", - "id": "ec2-delete-spot-datafeed-subscription-1", - "title": "To cancel a Spot Instance data feed subscription" - } - ], - "DeleteSubnet": [ - { - "input": { - "SubnetId": "subnet-9d4a7b6c" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified subnet.", - "id": "ec2-delete-subnet-1", - "title": "To delete a subnet" - } - ], - "DeleteTags": [ - { - "input": { - "Resources": [ - "ami-78a54011" - ], - "Tags": [ - { - "Key": "Stack", - "Value": "test" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the tag Stack=test from the specified image.", - "id": "ec2-delete-tags-1", - "title": "To delete a tag from a resource" - } - ], - "DeleteVolume": [ - { - "input": { - "VolumeId": "vol-049df61146c4d7901" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes an available volume with the volume ID of ``vol-049df61146c4d7901``. If the command succeeds, no output is returned.", - "id": "to-delete-a-volume-1472503111160", - "title": "To delete a volume" - } - ], - "DeleteVpc": [ - { - "input": { - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified VPC.", - "id": "ec2-delete-vpc-1", - "title": "To delete a VPC" - } - ], - "DescribeAccountAttributes": [ - { - "input": { - "AttributeNames": [ - "supported-platforms" - ] - }, - "output": { - "AccountAttributes": [ - { - "AttributeName": "supported-platforms", - "AttributeValues": [ - { - "AttributeValue": "EC2" - }, - { - "AttributeValue": "VPC" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the supported-platforms attribute for your AWS account.", - "id": "ec2-describe-account-attributes-1", - "title": "To describe a single attribute for your AWS account" - }, - { - "output": { - "AccountAttributes": [ - { - "AttributeName": "supported-platforms", - "AttributeValues": [ - { - "AttributeValue": "EC2" - }, - { - "AttributeValue": "VPC" - } - ] - }, - { - "AttributeName": "vpc-max-security-groups-per-interface", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "max-elastic-ips", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "max-instances", - "AttributeValues": [ - { - "AttributeValue": "20" - } - ] - }, - { - "AttributeName": "vpc-max-elastic-ips", - "AttributeValues": [ - { - "AttributeValue": "5" - } - ] - }, - { - "AttributeName": "default-vpc", - "AttributeValues": [ - { - "AttributeValue": "none" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attributes for your AWS account.", - "id": "ec2-describe-account-attributes-2", - "title": "To describe all attributes for your AWS account" - } - ], - "DescribeAddresses": [ - { - "output": { - "Addresses": [ - { - "Domain": "standard", - "InstanceId": "i-1234567890abcdef0", - "PublicIp": "198.51.100.0" - }, - { - "AllocationId": "eipalloc-12345678", - "AssociationId": "eipassoc-12345678", - "Domain": "vpc", - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-12345678", - "NetworkInterfaceOwnerId": "123456789012", - "PrivateIpAddress": "10.0.1.241", - "PublicIp": "203.0.113.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses.", - "id": "ec2-describe-addresses-1", - "title": "To describe your Elastic IP addresses" - }, - { - "input": { - "Filters": [ - { - "Name": "domain", - "Values": [ - "vpc" - ] - } - ] - }, - "output": { - "Addresses": [ - { - "AllocationId": "eipalloc-12345678", - "AssociationId": "eipassoc-12345678", - "Domain": "vpc", - "InstanceId": "i-1234567890abcdef0", - "NetworkInterfaceId": "eni-12345678", - "NetworkInterfaceOwnerId": "123456789012", - "PrivateIpAddress": "10.0.1.241", - "PublicIp": "203.0.113.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses for use with instances in a VPC.", - "id": "ec2-describe-addresses-2", - "title": "To describe your Elastic IP addresses for EC2-VPC" - }, - { - "input": { - "Filters": [ - { - "Name": "domain", - "Values": [ - "standard" - ] - } - ] - }, - "output": { - "Addresses": [ - { - "Domain": "standard", - "InstanceId": "i-1234567890abcdef0", - "PublicIp": "198.51.100.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes your Elastic IP addresses for use with instances in EC2-Classic.", - "id": "ec2-describe-addresses-3", - "title": "To describe your Elastic IP addresses for EC2-Classic" - } - ], - "DescribeAvailabilityZones": [ - { - "output": { - "AvailabilityZones": [ - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1b" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1c" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1d" - }, - { - "Messages": [ - - ], - "RegionName": "us-east-1", - "State": "available", - "ZoneName": "us-east-1e" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Availability Zones that are available to you. The response includes Availability Zones only for the current region.", - "id": "ec2-describe-availability-zones-1", - "title": "To describe your Availability Zones" - } - ], - "DescribeCustomerGateways": [ - { - "input": { - "CustomerGatewayIds": [ - "cgw-0e11f167" - ] - }, - "output": { - "CustomerGateways": [ - { - "BgpAsn": "65534", - "CustomerGatewayId": "cgw-0e11f167", - "IpAddress": "12.1.2.3", - "State": "available", - "Type": "ipsec.1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified customer gateway.", - "id": "ec2-describe-customer-gateways-1", - "title": "To describe a customer gateway" - } - ], - "DescribeDhcpOptions": [ - { - "input": { - "DhcpOptionsIds": [ - "dopt-d9070ebb" - ] - }, - "output": { - "DhcpOptions": [ - { - "DhcpConfigurations": [ - { - "Key": "domain-name-servers", - "Values": [ - { - "Value": "10.2.5.2" - }, - { - "Value": "10.2.5.1" - } - ] - } - ], - "DhcpOptionsId": "dopt-d9070ebb" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified DHCP options set.", - "id": "ec2-describe-dhcp-options-1", - "title": "To describe a DHCP options set" - } - ], - "DescribeIamInstanceProfileAssociations": [ - { - "input": { - "AssociationIds": [ - "iip-assoc-0db249b1f25fa24b8" - ] - }, - "output": { - "IamInstanceProfileAssociations": [ - { - "AssociationId": "iip-assoc-0db249b1f25fa24b8", - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/admin-role", - "Id": "AIPAJVQN4F5WVLGCJDRGM" - }, - "InstanceId": "i-09eb09efa73ec1dee", - "State": "associated" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified IAM instance profile association.", - "id": "to-describe-an-iam-instance-profile-association-1529025123918", - "title": "To describe an IAM instance profile association" - } - ], - "DescribeImageAttribute": [ - { - "input": { - "Attribute": "launchPermission", - "ImageId": "ami-5731123e" - }, - "output": { - "ImageId": "ami-5731123e", - "LaunchPermissions": [ - { - "UserId": "123456789012" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the launch permissions for the specified AMI.", - "id": "to-describe-the-launch-permissions-for-an-ami-1529025296264", - "title": "To describe the launch permissions for an AMI" - } - ], - "DescribeImages": [ - { - "input": { - "ImageIds": [ - "ami-5731123e" - ] - }, - "output": { - "Images": [ - { - "Architecture": "x86_64", - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sda1", - "Ebs": { - "DeleteOnTermination": true, - "SnapshotId": "snap-1234567890abcdef0", - "VolumeSize": 8, - "VolumeType": "standard" - } - } - ], - "Description": "An AMI for my server", - "Hypervisor": "xen", - "ImageId": "ami-5731123e", - "ImageLocation": "123456789012/My server", - "ImageType": "machine", - "KernelId": "aki-88aa75e1", - "Name": "My server", - "OwnerId": "123456789012", - "Public": false, - "RootDeviceName": "/dev/sda1", - "RootDeviceType": "ebs", - "State": "available", - "VirtualizationType": "paravirtual" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified AMI.", - "id": "to-describe-an-ami-1529025482866", - "title": "To describe an AMI" - } - ], - "DescribeInstanceAttribute": [ - { - "input": { - "Attribute": "instanceType", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "InstanceId": "i-1234567890abcdef0", - "InstanceType": { - "Value": "t1.micro" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the instance type of the specified instance.\n", - "id": "to-describe-the-instance-type-1472712432132", - "title": "To describe the instance type" - }, - { - "input": { - "Attribute": "disableApiTermination", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "DisableApiTermination": { - "Value": "false" - }, - "InstanceId": "i-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``disableApiTermination`` attribute of the specified instance.\n", - "id": "to-describe-the-disableapitermination-attribute-1472712533466", - "title": "To describe the disableApiTermination attribute" - }, - { - "input": { - "Attribute": "blockDeviceMapping", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sda1", - "Ebs": { - "AttachTime": "2013-05-17T22:42:34.000Z", - "DeleteOnTermination": true, - "Status": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - }, - { - "DeviceName": "/dev/sdf", - "Ebs": { - "AttachTime": "2013-09-10T23:07:00.000Z", - "DeleteOnTermination": false, - "Status": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - } - ], - "InstanceId": "i-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``blockDeviceMapping`` attribute of the specified instance.\n", - "id": "to-describe-the-block-device-mapping-for-an-instance-1472712645423", - "title": "To describe the block device mapping for an instance" - } - ], - "DescribeInstanceStatus": [ - { - "input": { - "InstanceIds": [ - "i-1234567890abcdef0" - ] - }, - "output": { - "InstanceStatuses": [ - { - "AvailabilityZone": "us-east-1d", - "InstanceId": "i-1234567890abcdef0", - "InstanceState": { - "Code": 16, - "Name": "running" - }, - "InstanceStatus": { - "Details": [ - { - "Name": "reachability", - "Status": "passed" - } - ], - "Status": "ok" - }, - "SystemStatus": { - "Details": [ - { - "Name": "reachability", - "Status": "passed" - } - ], - "Status": "ok" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the current status of the specified instance.", - "id": "to-describe-the-status-of-an-instance-1529025696830", - "title": "To describe the status of an instance" - } - ], - "DescribeInstances": [ - { - "input": { - "InstanceIds": [ - "i-1234567890abcdef0" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified instance.", - "id": "to-describe-an-amazon-ec2-instance-1529025982172", - "title": "To describe an Amazon EC2 instance" - }, - { - "input": { - "Filters": [ - { - "Name": "instance-type", - "Values": [ - "t2.micro" - ] - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the instances with the t2.micro instance type.", - "id": "to-describe-the-instances-with-the-instance-type-t2micro-1529026147602", - "title": "To describe the instances with a specific instance type" - }, - { - "input": { - "Filters": [ - { - "Name": "tag:Purpose", - "Values": [ - "test" - ] - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the instances with the Purpose=test tag.", - "id": "to-describe-the-instances-with-a-specific-tag-1529026251928", - "title": "To describe the instances with a specific tag" - } - ], - "DescribeInternetGateways": [ - { - "input": { - "Filters": [ - { - "Name": "attachment.vpc-id", - "Values": [ - "vpc-a01106c2" - ] - } - ] - }, - "output": { - "InternetGateways": [ - { - "Attachments": [ - { - "State": "available", - "VpcId": "vpc-a01106c2" - } - ], - "InternetGatewayId": "igw-c0a643a9", - "Tags": [ - - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Internet gateway for the specified VPC.", - "id": "ec2-describe-internet-gateways-1", - "title": "To describe the Internet gateway for a VPC" - } - ], - "DescribeKeyPairs": [ - { - "input": { - "KeyNames": [ - "my-key-pair" - ] - }, - "output": { - "KeyPairs": [ - { - "KeyFingerprint": "1f:51:ae:28:bf:89:e9:d8:1f:25:5d:37:2d:7d:b8:ca:9f:f5:f1:6f", - "KeyName": "my-key-pair" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example displays the fingerprint for the specified key.", - "id": "ec2-describe-key-pairs-1", - "title": "To display a key pair" - } - ], - "DescribeLaunchTemplateVersions": [ - { - "input": { - "LaunchTemplateId": "068f72b72934aff71" - }, - "output": { - "LaunchTemplateVersions": [ - { - "CreateTime": "2017-11-20T13:12:32.000Z", - "CreatedBy": "arn:aws:iam::123456789102:root", - "DefaultVersion": false, - "LaunchTemplateData": { - "ImageId": "ami-6057e21a", - "InstanceType": "t2.medium", - "KeyName": "kp-us-east", - "NetworkInterfaces": [ - { - "DeviceIndex": 0, - "Groups": [ - "sg-7c227019" - ], - "SubnetId": "subnet-1a2b3c4d" - } - ] - }, - "LaunchTemplateId": "lt-068f72b72934aff71", - "LaunchTemplateName": "Webservers", - "VersionNumber": 2 - }, - { - "CreateTime": "2017-11-20T12:52:33.000Z", - "CreatedBy": "arn:aws:iam::123456789102:root", - "DefaultVersion": true, - "LaunchTemplateData": { - "ImageId": "ami-aabbcc11", - "InstanceType": "t2.medium", - "KeyName": "kp-us-east", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeleteOnTermination": false, - "DeviceIndex": 0, - "Groups": [ - "sg-7c227019" - ], - "SubnetId": "subnet-7b16de0c" - } - ], - "UserData": "" - }, - "LaunchTemplateId": "lt-068f72b72934aff71", - "LaunchTemplateName": "Webservers", - "VersionNumber": 1 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the versions for the specified launch template.", - "id": "to-describe-the-versions-for-a-launch-template-1529344425048", - "title": "To describe the versions for a launch template" - } - ], - "DescribeLaunchTemplates": [ - { - "input": { - "LaunchTemplateIds": [ - "lt-01238c059e3466abc" - ] - }, - "output": { - "LaunchTemplates": [ - { - "CreateTime": "2018-01-16T04:32:57.000Z", - "CreatedBy": "arn:aws:iam::123456789012:root", - "DefaultVersionNumber": 1, - "LatestVersionNumber": 1, - "LaunchTemplateId": "lt-01238c059e3466abc", - "LaunchTemplateName": "my-template" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified launch template.", - "id": "to-describe-a-launch-template-1529344182862", - "title": "To describe a launch template" - } - ], - "DescribeMovingAddresses": [ - { - "output": { - "MovingAddressStatuses": [ - { - "MoveStatus": "MovingToVpc", - "PublicIp": "198.51.100.0" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all of your moving Elastic IP addresses.", - "id": "ec2-describe-moving-addresses-1", - "title": "To describe your moving addresses" - } - ], - "DescribeNatGateways": [ - { - "input": { - "Filter": [ - { - "Name": "vpc-id", - "Values": [ - "vpc-1a2b3c4d" - ] - } - ] - }, - "output": { - "NatGateways": [ - { - "CreateTime": "2015-12-01T12:26:55.983Z", - "NatGatewayAddresses": [ - { - "AllocationId": "eipalloc-89c620ec", - "NetworkInterfaceId": "eni-9dec76cd", - "PrivateIp": "10.0.0.149", - "PublicIp": "198.11.222.333" - } - ], - "NatGatewayId": "nat-05dba92075d71c408", - "State": "available", - "SubnetId": "subnet-847e4dc2", - "VpcId": "vpc-1a2b3c4d" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the NAT gateway for the specified VPC.", - "id": "ec2-describe-nat-gateways-1", - "title": "To describe a NAT gateway" - } - ], - "DescribeNetworkAcls": [ - { - "input": { - "NetworkAclIds": [ - "acl-5fb85d36" - ] - }, - "output": { - "NetworkAcls": [ - { - "Associations": [ - { - "NetworkAclAssociationId": "aclassoc-66ea5f0b", - "NetworkAclId": "acl-9aeb5ef7", - "SubnetId": "subnet-65ea5f08" - } - ], - "Entries": [ - { - "CidrBlock": "0.0.0.0/0", - "Egress": true, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - }, - { - "CidrBlock": "0.0.0.0/0", - "Egress": false, - "Protocol": "-1", - "RuleAction": "deny", - "RuleNumber": 32767 - } - ], - "IsDefault": false, - "NetworkAclId": "acl-5fb85d36", - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified network ACL.", - "id": "ec2-", - "title": "To describe a network ACL" - } - ], - "DescribeNetworkInterfaceAttribute": [ - { - "input": { - "Attribute": "attachment", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Attachment": { - "AttachTime": "2015-05-21T20:02:20.000Z", - "AttachmentId": "eni-attach-43348162", - "DeleteOnTermination": true, - "DeviceIndex": 0, - "InstanceId": "i-1234567890abcdef0", - "InstanceOwnerId": "123456789012", - "Status": "attached" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attachment attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-1", - "title": "To describe the attachment attribute of a network interface" - }, - { - "input": { - "Attribute": "description", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Description": { - "Value": "My description" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the description attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-2", - "title": "To describe the description attribute of a network interface" - }, - { - "input": { - "Attribute": "groupSet", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "Groups": [ - { - "GroupId": "sg-903004f8", - "GroupName": "my-security-group" - } - ], - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the groupSet attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-3", - "title": "To describe the groupSet attribute of a network interface" - }, - { - "input": { - "Attribute": "sourceDestCheck", - "NetworkInterfaceId": "eni-686ea200" - }, - "output": { - "NetworkInterfaceId": "eni-686ea200", - "SourceDestCheck": { - "Value": true - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the sourceDestCheck attribute of the specified network interface.", - "id": "ec2-describe-network-interface-attribute-4", - "title": "To describe the sourceDestCheck attribute of a network interface" - } - ], - "DescribeNetworkInterfaces": [ - { - "input": { - "NetworkInterfaceIds": [ - "eni-e5aa89a3" - ] - }, - "output": { - "NetworkInterfaces": [ - { - "Association": { - "AssociationId": "eipassoc-0fbb766a", - "IpOwnerId": "123456789012", - "PublicDnsName": "ec2-203-0-113-12.compute-1.amazonaws.com", - "PublicIp": "203.0.113.12" - }, - "Attachment": { - "AttachTime": "2013-11-30T23:36:42.000Z", - "AttachmentId": "eni-attach-66c4350a", - "DeleteOnTermination": false, - "DeviceIndex": 1, - "InstanceId": "i-1234567890abcdef0", - "InstanceOwnerId": "123456789012", - "Status": "attached" - }, - "AvailabilityZone": "us-east-1d", - "Description": "my network interface", - "Groups": [ - { - "GroupId": "sg-8637d3e3", - "GroupName": "default" - } - ], - "MacAddress": "02:2f:8f:b0:cf:75", - "NetworkInterfaceId": "eni-e5aa89a3", - "OwnerId": "123456789012", - "PrivateDnsName": "ip-10-0-1-17.ec2.internal", - "PrivateIpAddress": "10.0.1.17", - "PrivateIpAddresses": [ - { - "Association": { - "AssociationId": "eipassoc-0fbb766a", - "IpOwnerId": "123456789012", - "PublicDnsName": "ec2-203-0-113-12.compute-1.amazonaws.com", - "PublicIp": "203.0.113.12" - }, - "Primary": true, - "PrivateDnsName": "ip-10-0-1-17.ec2.internal", - "PrivateIpAddress": "10.0.1.17" - } - ], - "RequesterManaged": false, - "SourceDestCheck": true, - "Status": "in-use", - "SubnetId": "subnet-b61f49f0", - "TagSet": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "ec2-describe-network-interfaces-1", - "title": "To describe a network interface" - } - ], - "DescribeRegions": [ - { - "output": { - "Regions": [ - { - "Endpoint": "ec2.ap-south-1.amazonaws.com", - "RegionName": "ap-south-1" - }, - { - "Endpoint": "ec2.eu-west-1.amazonaws.com", - "RegionName": "eu-west-1" - }, - { - "Endpoint": "ec2.ap-southeast-1.amazonaws.com", - "RegionName": "ap-southeast-1" - }, - { - "Endpoint": "ec2.ap-southeast-2.amazonaws.com", - "RegionName": "ap-southeast-2" - }, - { - "Endpoint": "ec2.eu-central-1.amazonaws.com", - "RegionName": "eu-central-1" - }, - { - "Endpoint": "ec2.ap-northeast-2.amazonaws.com", - "RegionName": "ap-northeast-2" - }, - { - "Endpoint": "ec2.ap-northeast-1.amazonaws.com", - "RegionName": "ap-northeast-1" - }, - { - "Endpoint": "ec2.us-east-1.amazonaws.com", - "RegionName": "us-east-1" - }, - { - "Endpoint": "ec2.sa-east-1.amazonaws.com", - "RegionName": "sa-east-1" - }, - { - "Endpoint": "ec2.us-west-1.amazonaws.com", - "RegionName": "us-west-1" - }, - { - "Endpoint": "ec2.us-west-2.amazonaws.com", - "RegionName": "us-west-2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all the regions that are available to you.", - "id": "ec2-describe-regions-1", - "title": "To describe your regions" - } - ], - "DescribeRouteTables": [ - { - "input": { - "RouteTableIds": [ - "rtb-1f382e7d" - ] - }, - "output": { - "RouteTables": [ - { - "Associations": [ - { - "Main": true, - "RouteTableAssociationId": "rtbassoc-d8ccddba", - "RouteTableId": "rtb-1f382e7d" - } - ], - "PropagatingVgws": [ - - ], - "RouteTableId": "rtb-1f382e7d", - "Routes": [ - { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "local", - "State": "active" - } - ], - "Tags": [ - - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified route table.", - "id": "ec2-describe-route-tables-1", - "title": "To describe a route table" - } - ], - "DescribeScheduledInstanceAvailability": [ - { - "input": { - "FirstSlotStartTimeRange": { - "EarliestTime": "2016-01-31T00:00:00Z", - "LatestTime": "2016-01-31T04:00:00Z" - }, - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDays": [ - 1 - ] - } - }, - "output": { - "ScheduledInstanceAvailabilitySet": [ - { - "AvailabilityZone": "us-west-2b", - "AvailableInstanceCount": 20, - "FirstSlotStartTime": "2016-01-31T00:00:00Z", - "HourlyPrice": "0.095", - "InstanceType": "c4.large", - "MaxTermDurationInDays": 366, - "MinTermDurationInDays": 366, - "NetworkPlatform": "EC2-VPC", - "Platform": "Linux/UNIX", - "PurchaseToken": "eyJ2IjoiMSIsInMiOjEsImMiOi...", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false - }, - "SlotDurationInHours": 23, - "TotalScheduledInstanceHours": 1219 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes a schedule that occurs every week on Sunday, starting on the specified date. Note that the output contains a single schedule as an example.", - "id": "ec2-describe-scheduled-instance-availability-1", - "title": "To describe an available schedule" - } - ], - "DescribeScheduledInstances": [ - { - "input": { - "ScheduledInstanceIds": [ - "sci-1234-1234-1234-1234-123456789012" - ] - }, - "output": { - "ScheduledInstanceSet": [ - { - "AvailabilityZone": "us-west-2b", - "CreateDate": "2016-01-25T21:43:38.612Z", - "HourlyPrice": "0.095", - "InstanceCount": 1, - "InstanceType": "c4.large", - "NetworkPlatform": "EC2-VPC", - "NextSlotStartTime": "2016-01-31T09:00:00Z", - "Platform": "Linux/UNIX", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false, - "OccurrenceUnit": "" - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012", - "SlotDurationInHours": 32, - "TermEndDate": "2017-01-31T09:00:00Z", - "TermStartDate": "2016-01-31T09:00:00Z", - "TotalScheduledInstanceHours": 1696 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Scheduled Instance.", - "id": "ec2-describe-scheduled-instances-1", - "title": "To describe your Scheduled Instances" - } - ], - "DescribeSecurityGroupReferences": [ - { - "input": { - "GroupId": [ - "sg-903004f8" - ] - }, - "output": { - "SecurityGroupReferenceSet": [ - { - "GroupId": "sg-903004f8", - "ReferencingVpcId": "vpc-1a2b3c4d", - "VpcPeeringConnectionId": "pcx-b04deed9" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the security group references for the specified security group.", - "id": "to-describe-security-group-references-1529354312088", - "title": "To describe security group references" - } - ], - "DescribeSecurityGroups": [ - { - "input": { - "GroupIds": [ - "sg-903004f8" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified security group.", - "id": "to-describe-a-security-group-1529354426314", - "title": "To describe a security group" - }, - { - "input": { - "Filters": [ - { - "Name": "tag:Purpose", - "Values": [ - "test" - ] - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the security groups that include the specified tag (Purpose=test).", - "id": "to-describe-a-tagged-security-group-1529354553880", - "title": "To describe a tagged security group" - } - ], - "DescribeSnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "SnapshotId": "snap-066877671789bd71b" - }, - "output": { - "CreateVolumePermissions": [ - - ], - "SnapshotId": "snap-066877671789bd71b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``createVolumePermission`` attribute on a snapshot with the snapshot ID of ``snap-066877671789bd71b``.", - "id": "to-describe-snapshot-attributes-1472503199736", - "title": "To describe snapshot attributes" - } - ], - "DescribeSnapshots": [ - { - "input": { - "SnapshotIds": [ - "snap-1234567890abcdef0" - ] - }, - "output": { - "NextToken": "", - "Snapshots": [ - { - "Description": "This is my snapshot.", - "OwnerId": "012345678910", - "Progress": "100%", - "SnapshotId": "snap-1234567890abcdef0", - "StartTime": "2014-02-28T21:28:32.000Z", - "State": "completed", - "VolumeId": "vol-049df61146c4d7901", - "VolumeSize": 8 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes a snapshot with the snapshot ID of ``snap-1234567890abcdef0``.", - "id": "to-describe-a-snapshot-1472503807850", - "title": "To describe a snapshot" - }, - { - "input": { - "Filters": [ - { - "Name": "status", - "Values": [ - "pending" - ] - } - ], - "OwnerIds": [ - "012345678910" - ] - }, - "output": { - "NextToken": "", - "Snapshots": [ - { - "Description": "This is my copied snapshot.", - "OwnerId": "012345678910", - "Progress": "87%", - "SnapshotId": "snap-066877671789bd71b", - "StartTime": "2014-02-28T21:37:27.000Z", - "State": "pending", - "VolumeId": "vol-1234567890abcdef0", - "VolumeSize": 8 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all snapshots owned by the ID 012345678910 that are in the ``pending`` status.", - "id": "to-describe-snapshots-using-filters-1472503929793", - "title": "To describe snapshots using filters" - } - ], - "DescribeSpotDatafeedSubscription": [ - { - "output": { - "SpotDatafeedSubscription": { - "Bucket": "my-s3-bucket", - "OwnerId": "123456789012", - "Prefix": "spotdata", - "State": "Active" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the Spot Instance datafeed subscription for your AWS account.", - "id": "ec2-describe-spot-datafeed-subscription-1", - "title": "To describe the datafeed for your AWS account" - } - ], - "DescribeSpotFleetInstances": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "output": { - "ActiveInstances": [ - { - "InstanceId": "i-1234567890abcdef0", - "InstanceType": "m3.medium", - "SpotInstanceRequestId": "sir-08b93456" - } - ], - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the Spot Instances associated with the specified Spot fleet.", - "id": "ec2-describe-spot-fleet-instances-1", - "title": "To describe the Spot Instances associated with a Spot fleet" - } - ], - "DescribeSpotFleetRequestHistory": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "StartTime": "2015-05-26T00:00:00Z" - }, - "output": { - "HistoryRecords": [ - { - "EventInformation": { - "EventSubType": "submitted" - }, - "EventType": "fleetRequestChange", - "Timestamp": "2015-05-26T23:17:20.697Z" - }, - { - "EventInformation": { - "EventSubType": "active" - }, - "EventType": "fleetRequestChange", - "Timestamp": "2015-05-26T23:17:20.873Z" - }, - { - "EventInformation": { - "EventSubType": "launched", - "InstanceId": "i-1234567890abcdef0" - }, - "EventType": "instanceChange", - "Timestamp": "2015-05-26T23:21:21.712Z" - }, - { - "EventInformation": { - "EventSubType": "launched", - "InstanceId": "i-1234567890abcdef1" - }, - "EventType": "instanceChange", - "Timestamp": "2015-05-26T23:21:21.816Z" - } - ], - "NextToken": "CpHNsscimcV5oH7bSbub03CI2Qms5+ypNpNm+53MNlR0YcXAkp0xFlfKf91yVxSExmbtma3awYxMFzNA663ZskT0AHtJ6TCb2Z8bQC2EnZgyELbymtWPfpZ1ZbauVg+P+TfGlWxWWB/Vr5dk5d4LfdgA/DRAHUrYgxzrEXAMPLE=", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "StartTime": "2015-05-26T00:00:00Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example returns the history for the specified Spot fleet starting at the specified time.", - "id": "ec2-describe-spot-fleet-request-history-1", - "title": "To describe Spot fleet history" - } - ], - "DescribeSpotFleetRequests": [ - { - "input": { - "SpotFleetRequestIds": [ - "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - ] - }, - "output": { - "SpotFleetRequestConfigs": [ - { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "EbsOptimized": false, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "cc2.8xlarge", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeleteOnTermination": false, - "DeviceIndex": 0, - "SecondaryPrivateIpAddressCount": 0, - "SubnetId": "subnet-a61dafcf" - } - ] - }, - { - "EbsOptimized": false, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "r3.8xlarge", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeleteOnTermination": false, - "DeviceIndex": 0, - "SecondaryPrivateIpAddressCount": 0, - "SubnetId": "subnet-a61dafcf" - } - ] - } - ], - "SpotPrice": "0.05", - "TargetCapacity": 20 - }, - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "SpotFleetRequestState": "active" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Spot fleet request.", - "id": "ec2-describe-spot-fleet-requests-1", - "title": "To describe a Spot fleet request" - } - ], - "DescribeSpotInstanceRequests": [ - { - "input": { - "SpotInstanceRequestIds": [ - "sir-08b93456" - ] - }, - "output": { - "SpotInstanceRequests": [ - { - "CreateTime": "2014-04-30T18:14:55.000Z", - "InstanceId": "i-1234567890abcdef0", - "LaunchSpecification": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sda1", - "Ebs": { - "DeleteOnTermination": true, - "VolumeSize": 8, - "VolumeType": "standard" - } - } - ], - "EbsOptimized": false, - "ImageId": "ami-7aba833f", - "InstanceType": "m1.small", - "KeyName": "my-key-pair", - "SecurityGroups": [ - { - "GroupId": "sg-e38f24a7", - "GroupName": "my-security-group" - } - ] - }, - "LaunchedAvailabilityZone": "us-west-1b", - "ProductDescription": "Linux/UNIX", - "SpotInstanceRequestId": "sir-08b93456", - "SpotPrice": "0.010000", - "State": "active", - "Status": { - "Code": "fulfilled", - "Message": "Your Spot request is fulfilled.", - "UpdateTime": "2014-04-30T18:16:21.000Z" - }, - "Type": "one-time" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified Spot Instance request.", - "id": "ec2-describe-spot-instance-requests-1", - "title": "To describe a Spot Instance request" - } - ], - "DescribeSpotPriceHistory": [ - { - "input": { - "EndTime": "2014-01-06T08:09:10", - "InstanceTypes": [ - "m1.xlarge" - ], - "ProductDescriptions": [ - "Linux/UNIX (Amazon VPC)" - ], - "StartTime": "2014-01-06T07:08:09" - }, - "output": { - "SpotPriceHistory": [ - { - "AvailabilityZone": "us-west-1a", - "InstanceType": "m1.xlarge", - "ProductDescription": "Linux/UNIX (Amazon VPC)", - "SpotPrice": "0.080000", - "Timestamp": "2014-01-06T04:32:53.000Z" - }, - { - "AvailabilityZone": "us-west-1c", - "InstanceType": "m1.xlarge", - "ProductDescription": "Linux/UNIX (Amazon VPC)", - "SpotPrice": "0.080000", - "Timestamp": "2014-01-05T11:28:26.000Z" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example returns the Spot Price history for m1.xlarge, Linux/UNIX (Amazon VPC) instances for a particular day in January.", - "id": "ec2-describe-spot-price-history-1", - "title": "To describe Spot price history for Linux/UNIX (Amazon VPC)" - } - ], - "DescribeSubnets": [ - { - "input": { - "Filters": [ - { - "Name": "vpc-id", - "Values": [ - "vpc-a01106c2" - ] - } - ] - }, - "output": { - "Subnets": [ - { - "AvailabilityZone": "us-east-1c", - "AvailableIpAddressCount": 251, - "CidrBlock": "10.0.1.0/24", - "DefaultForAz": false, - "MapPublicIpOnLaunch": false, - "State": "available", - "SubnetId": "subnet-9d4a7b6c", - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the subnets for the specified VPC.", - "id": "ec2-describe-subnets-1", - "title": "To describe the subnets for a VPC" - } - ], - "DescribeTags": [ - { - "input": { - "Filters": [ - { - "Name": "resource-id", - "Values": [ - "i-1234567890abcdef8" - ] - } - ] - }, - "output": { - "Tags": [ - { - "Key": "Stack", - "ResourceId": "i-1234567890abcdef8", - "ResourceType": "instance", - "Value": "test" - }, - { - "Key": "Name", - "ResourceId": "i-1234567890abcdef8", - "ResourceType": "instance", - "Value": "Beta Server" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the tags for the specified instance.", - "id": "ec2-describe-tags-1", - "title": "To describe the tags for a single resource" - } - ], - "DescribeVolumeAttribute": [ - { - "input": { - "Attribute": "autoEnableIO", - "VolumeId": "vol-049df61146c4d7901" - }, - "output": { - "AutoEnableIO": { - "Value": false - }, - "VolumeId": "vol-049df61146c4d7901" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the ``autoEnableIo`` attribute of the volume with the ID ``vol-049df61146c4d7901``.", - "id": "to-describe-a-volume-attribute-1472505773492", - "title": "To describe a volume attribute" - } - ], - "DescribeVolumeStatus": [ - { - "input": { - "VolumeIds": [ - "vol-1234567890abcdef0" - ] - }, - "output": { - "VolumeStatuses": [ - { - "Actions": [ - - ], - "AvailabilityZone": "us-east-1a", - "Events": [ - - ], - "VolumeId": "vol-1234567890abcdef0", - "VolumeStatus": { - "Details": [ - { - "Name": "io-enabled", - "Status": "passed" - }, - { - "Name": "io-performance", - "Status": "not-applicable" - } - ], - "Status": "ok" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the status for the volume ``vol-1234567890abcdef0``.", - "id": "to-describe-the-status-of-a-single-volume-1472507016193", - "title": "To describe the status of a single volume" - }, - { - "input": { - "Filters": [ - { - "Name": "volume-status.status", - "Values": [ - "impaired" - ] - } - ] - }, - "output": { - "VolumeStatuses": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the status for all volumes that are impaired. In this example output, there are no impaired volumes.", - "id": "to-describe-the-status-of-impaired-volumes-1472507239821", - "title": "To describe the status of impaired volumes" - } - ], - "DescribeVolumes": [ - { - "input": { - }, - "output": { - "NextToken": "", - "Volumes": [ - { - "Attachments": [ - { - "AttachTime": "2013-12-18T22:35:00.000Z", - "DeleteOnTermination": true, - "Device": "/dev/sda1", - "InstanceId": "i-1234567890abcdef0", - "State": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2013-12-18T22:35:00.084Z", - "Size": 8, - "SnapshotId": "snap-1234567890abcdef0", - "State": "in-use", - "VolumeId": "vol-049df61146c4d7901", - "VolumeType": "standard" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all of your volumes in the default region.", - "id": "to-describe-all-volumes-1472506358883", - "title": "To describe all volumes" - }, - { - "input": { - "Filters": [ - { - "Name": "attachment.instance-id", - "Values": [ - "i-1234567890abcdef0" - ] - }, - { - "Name": "attachment.delete-on-termination", - "Values": [ - "true" - ] - } - ] - }, - "output": { - "Volumes": [ - { - "Attachments": [ - { - "AttachTime": "2013-12-18T22:35:00.000Z", - "DeleteOnTermination": true, - "Device": "/dev/sda1", - "InstanceId": "i-1234567890abcdef0", - "State": "attached", - "VolumeId": "vol-049df61146c4d7901" - } - ], - "AvailabilityZone": "us-east-1a", - "CreateTime": "2013-12-18T22:35:00.084Z", - "Size": 8, - "SnapshotId": "snap-1234567890abcdef0", - "State": "in-use", - "VolumeId": "vol-049df61146c4d7901", - "VolumeType": "standard" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes all volumes that are both attached to the instance with the ID i-1234567890abcdef0 and set to delete when the instance terminates.", - "id": "to-describe-volumes-that-are-attached-to-a-specific-instance-1472506613578", - "title": "To describe volumes that are attached to a specific instance" - } - ], - "DescribeVpcAttribute": [ - { - "input": { - "Attribute": "enableDnsSupport", - "VpcId": "vpc-a01106c2" - }, - "output": { - "EnableDnsSupport": { - "Value": true - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the enableDnsSupport attribute. This attribute indicates whether DNS resolution is enabled for the VPC. If this attribute is true, the Amazon DNS server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.", - "id": "ec2-describe-vpc-attribute-1", - "title": "To describe the enableDnsSupport attribute" - }, - { - "input": { - "Attribute": "enableDnsHostnames", - "VpcId": "vpc-a01106c2" - }, - "output": { - "EnableDnsHostnames": { - "Value": true - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the enableDnsHostnames attribute. This attribute indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is true, instances in the VPC get DNS hostnames; otherwise, they do not.", - "id": "ec2-describe-vpc-attribute-2", - "title": "To describe the enableDnsHostnames attribute" - } - ], - "DescribeVpcs": [ - { - "input": { - "VpcIds": [ - "vpc-a01106c2" - ] - }, - "output": { - "Vpcs": [ - { - "CidrBlock": "10.0.0.0/16", - "DhcpOptionsId": "dopt-7a8b9c2d", - "InstanceTenancy": "default", - "IsDefault": false, - "State": "available", - "Tags": [ - { - "Key": "Name", - "Value": "MyVPC" - } - ], - "VpcId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified VPC.", - "id": "ec2-describe-vpcs-1", - "title": "To describe a VPC" - } - ], - "DetachInternetGateway": [ - { - "input": { - "InternetGatewayId": "igw-c0a643a9", - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified Internet gateway from the specified VPC.", - "id": "ec2-detach-internet-gateway-1", - "title": "To detach an Internet gateway from a VPC" - } - ], - "DetachNetworkInterface": [ - { - "input": { - "AttachmentId": "eni-attach-66c4350a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified network interface from its attached instance.", - "id": "ec2-detach-network-interface-1", - "title": "To detach a network interface from an instance" - } - ], - "DetachVolume": [ - { - "input": { - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - "AttachTime": "2014-02-27T19:23:06.000Z", - "Device": "/dev/sdb", - "InstanceId": "i-1234567890abcdef0", - "State": "detaching", - "VolumeId": "vol-049df61146c4d7901" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the volume (``vol-049df61146c4d7901``) from the instance it is attached to.", - "id": "to-detach-a-volume-from-an-instance-1472507977694", - "title": "To detach a volume from an instance" - } - ], - "DisableVgwRoutePropagation": [ - { - "input": { - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disables the specified virtual private gateway from propagating static routes to the specified route table.", - "id": "ec2-disable-vgw-route-propagation-1", - "title": "To disable route propagation" - } - ], - "DisassociateAddress": [ - { - "input": { - "AssociationId": "eipassoc-2bebb745" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates an Elastic IP address from an instance in a VPC.", - "id": "ec2-disassociate-address-1", - "title": "To disassociate an Elastic IP address in EC2-VPC" - }, - { - "input": { - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates an Elastic IP address from an instance in EC2-Classic.", - "id": "ec2-disassociate-address-2", - "title": "To disassociate an Elastic IP addresses in EC2-Classic" - } - ], - "DisassociateIamInstanceProfile": [ - { - "input": { - "AssociationId": "iip-assoc-05020b59952902f5f" - }, - "output": { - "IamInstanceProfileAssociation": { - "AssociationId": "iip-assoc-05020b59952902f5f", - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/admin-role", - "Id": "AIPAI5IVIHMFFYY2DKV5Y" - }, - "InstanceId": "i-123456789abcde123", - "State": "disassociating" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates the specified IAM instance profile from an instance.", - "id": "to-disassociate-an-iam-instance-profile-1529355364478", - "title": "To disassociate an IAM instance profile" - } - ], - "DisassociateRouteTable": [ - { - "input": { - "AssociationId": "rtbassoc-781d0d1a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example disassociates the specified route table from its associated subnet.", - "id": "ec2-disassociate-route-table-1", - "title": "To disassociate a route table" - } - ], - "EnableVgwRoutePropagation": [ - { - "input": { - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables the specified virtual private gateway to propagate static routes to the specified route table.", - "id": "ec2-enable-vgw-route-propagation-1", - "title": "To enable route propagation" - } - ], - "EnableVolumeIO": [ - { - "input": { - "VolumeId": "vol-1234567890abcdef0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables I/O on volume ``vol-1234567890abcdef0``.", - "id": "to-enable-io-for-a-volume-1472508114867", - "title": "To enable I/O for a volume" - } - ], - "GetConsoleOutput": [ - { - "input": { - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - "InstanceId": "i-1234567890abcdef0", - "Output": "...", - "Timestamp": "2018-05-25T21:23:53.000Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example gets the console output for the specified instance.", - "id": "to-get-the-console-output-1529355683194", - "title": "To get the console output" - } - ], - "GetLaunchTemplateData": [ - { - "input": { - "InstanceId": "0123d646e8048babc" - }, - "output": { - "LaunchTemplateData": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/xvda", - "Ebs": { - "DeleteOnTermination": true, - "Encrypted": false, - "Iops": 100, - "SnapshotId": "snap-02594938353ef77d3", - "VolumeSize": 8, - "VolumeType": "gp2" - } - } - ], - "EbsOptimized": false, - "ImageId": "ami-32cf7b4a", - "InstanceType": "t2.medium", - "KeyName": "my-key-pair", - "Monitoring": { - "Enabled": false - }, - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": false, - "DeleteOnTermination": true, - "Description": "", - "DeviceIndex": 0, - "Groups": [ - "sg-d14e1bb4" - ], - "Ipv6Addresses": [ - - ], - "NetworkInterfaceId": "eni-4338b5a9", - "PrivateIpAddress": "10.0.3.233", - "PrivateIpAddresses": [ - { - "Primary": true, - "PrivateIpAddress": "10.0.3.233" - } - ], - "SubnetId": "subnet-5264e837" - } - ], - "Placement": { - "AvailabilityZone": "us-east-2b", - "GroupName": "", - "Tenancy": "default" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example gets the launch template data for the specified instance.", - "id": "to-get-the-launch-template-data-for-an-instance--1529356515702", - "title": "To get the launch template data for an instance " - } - ], - "ModifyImageAttribute": [ - { - "input": { - "ImageId": "ami-5731123e", - "LaunchPermission": { - "Add": [ - { - "Group": "all" - } - ] - } - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example makes the specified AMI public.", - "id": "to-make-an-ami-public-1529357395278", - "title": "To make an AMI public" - }, - { - "input": { - "ImageId": "ami-5731123e", - "LaunchPermission": { - "Add": [ - { - "UserId": "123456789012" - } - ] - } - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example grants launch permissions for the specified AMI to the specified AWS account.", - "id": "to-grant-launch-permissions-1529357727906", - "title": "To grant launch permissions" - } - ], - "ModifyInstanceAttribute": [ - { - "input": { - "InstanceId": "i-1234567890abcdef0", - "InstanceType": { - "Value": "m5.large" - } - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the instance type of the specified stopped instance.", - "id": "to-modify-the-instance-type-1529357844378", - "title": "To modify the instance type" - }, - { - "input": { - "EnaSupport": { - "Value": true - }, - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables enhanced networking for the specified stopped instance.", - "id": "to-enable-enhanced-networking-1529358279870", - "title": "To enable enhanced networking" - } - ], - "ModifyLaunchTemplate": [ - { - "input": { - "DefaultVersion": "2", - "LaunchTemplateId": "lt-0abcd290751193123" - }, - "output": { - "LaunchTemplate": { - "CreateTime": "2017-12-01T13:35:46.000Z", - "CreatedBy": "arn:aws:iam::123456789012:root", - "DefaultVersionNumber": 2, - "LatestVersionNumber": 2, - "LaunchTemplateId": "lt-0abcd290751193123", - "LaunchTemplateName": "WebServers" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example specifies version 2 as the default version of the specified launch template.", - "id": "to-change-the-default-version-of-a-launch-template-1529358440364", - "title": "To change the default version of a launch template" - } - ], - "ModifyNetworkInterfaceAttribute": [ - { - "input": { - "Attachment": { - "AttachmentId": "eni-attach-43348162", - "DeleteOnTermination": false - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the attachment attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-1", - "title": "To modify the attachment attribute of a network interface" - }, - { - "input": { - "Description": { - "Value": "My description" - }, - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the description attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-2", - "title": "To modify the description attribute of a network interface" - }, - { - "input": { - "Groups": [ - "sg-903004f8", - "sg-1a2b3c4d" - ], - "NetworkInterfaceId": "eni-686ea200" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command modifies the groupSet attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-3", - "title": "To modify the groupSet attribute of a network interface" - }, - { - "input": { - "NetworkInterfaceId": "eni-686ea200", - "SourceDestCheck": { - "Value": false - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command modifies the sourceDestCheck attribute of the specified network interface.", - "id": "ec2-modify-network-interface-attribute-4", - "title": "To modify the sourceDestCheck attribute of a network interface" - } - ], - "ModifySnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "OperationType": "remove", - "SnapshotId": "snap-1234567890abcdef0", - "UserIds": [ - "123456789012" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies snapshot ``snap-1234567890abcdef0`` to remove the create volume permission for a user with the account ID ``123456789012``. If the command succeeds, no output is returned.", - "id": "to-modify-a-snapshot-attribute-1472508385907", - "title": "To modify a snapshot attribute" - }, - { - "input": { - "Attribute": "createVolumePermission", - "GroupNames": [ - "all" - ], - "OperationType": "add", - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example makes the snapshot ``snap-1234567890abcdef0`` public.", - "id": "to-make-a-snapshot-public-1472508470529", - "title": "To make a snapshot public" - } - ], - "ModifySpotFleetRequest": [ - { - "input": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "TargetCapacity": 20 - }, - "output": { - "Return": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example increases the target capacity of the specified Spot fleet request.", - "id": "ec2-modify-spot-fleet-request-1", - "title": "To increase the target capacity of a Spot fleet request" - }, - { - "input": { - "ExcessCapacityTerminationPolicy": "NoTermination ", - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE", - "TargetCapacity": 10 - }, - "output": { - "Return": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example decreases the target capacity of the specified Spot fleet request without terminating any Spot Instances as a result.", - "id": "ec2-modify-spot-fleet-request-2", - "title": "To decrease the target capacity of a Spot fleet request" - } - ], - "ModifySubnetAttribute": [ - { - "input": { - "MapPublicIpOnLaunch": { - "Value": true - }, - "SubnetId": "subnet-1a2b3c4d" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the specified subnet so that all instances launched into this subnet are assigned a public IP address.", - "id": "ec2-modify-subnet-attribute-1", - "title": "To change a subnet's public IP addressing behavior" - } - ], - "ModifyVolumeAttribute": [ - { - "input": { - "AutoEnableIO": { - "Value": true - }, - "DryRun": true, - "VolumeId": "vol-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example sets the ``autoEnableIo`` attribute of the volume with the ID ``vol-1234567890abcdef0`` to ``true``. If the command succeeds, no output is returned.", - "id": "to-modify-a-volume-attribute-1472508596749", - "title": "To modify a volume attribute" - } - ], - "ModifyVpcAttribute": [ - { - "input": { - "EnableDnsSupport": { - "Value": false - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the enableDnsSupport attribute. This attribute indicates whether DNS resolution is enabled for the VPC. If this attribute is true, the Amazon DNS server resolves DNS hostnames for instances in the VPC to their corresponding IP addresses; otherwise, it does not.", - "id": "ec2-modify-vpc-attribute-1", - "title": "To modify the enableDnsSupport attribute" - }, - { - "input": { - "EnableDnsHostnames": { - "Value": false - }, - "VpcId": "vpc-a01106c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the enableDnsHostnames attribute. This attribute indicates whether instances launched in the VPC get DNS hostnames. If this attribute is true, instances in the VPC get DNS hostnames; otherwise, they do not.", - "id": "ec2-modify-vpc-attribute-2", - "title": "To modify the enableDnsHostnames attribute" - } - ], - "MoveAddressToVpc": [ - { - "input": { - "PublicIp": "54.123.4.56" - }, - "output": { - "Status": "MoveInProgress" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example moves the specified Elastic IP address to the EC2-VPC platform.", - "id": "ec2-move-address-to-vpc-1", - "title": "To move an address to EC2-VPC" - } - ], - "PurchaseScheduledInstances": [ - { - "input": { - "PurchaseRequests": [ - { - "InstanceCount": 1, - "PurchaseToken": "eyJ2IjoiMSIsInMiOjEsImMiOi..." - } - ] - }, - "output": { - "ScheduledInstanceSet": [ - { - "AvailabilityZone": "us-west-2b", - "CreateDate": "2016-01-25T21:43:38.612Z", - "HourlyPrice": "0.095", - "InstanceCount": 1, - "InstanceType": "c4.large", - "NetworkPlatform": "EC2-VPC", - "NextSlotStartTime": "2016-01-31T09:00:00Z", - "Platform": "Linux/UNIX", - "Recurrence": { - "Frequency": "Weekly", - "Interval": 1, - "OccurrenceDaySet": [ - 1 - ], - "OccurrenceRelativeToEnd": false, - "OccurrenceUnit": "" - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012", - "SlotDurationInHours": 32, - "TermEndDate": "2017-01-31T09:00:00Z", - "TermStartDate": "2016-01-31T09:00:00Z", - "TotalScheduledInstanceHours": 1696 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example purchases a Scheduled Instance.", - "id": "ec2-purchase-scheduled-instances-1", - "title": "To purchase a Scheduled Instance" - } - ], - "RebootInstances": [ - { - "input": { - "InstanceIds": [ - "i-1234567890abcdef5" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example reboots the specified EC2 instance.", - "id": "to-reboot-an-ec2-instance-1529358566382", - "title": "To reboot an EC2 instance" - } - ], - "ReleaseAddress": [ - { - "input": { - "AllocationId": "eipalloc-64d5890a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example releases an Elastic IP address for use with instances in a VPC.", - "id": "ec2-release-address-1", - "title": "To release an Elastic IP address for EC2-VPC" - }, - { - "input": { - "PublicIp": "198.51.100.0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example releases an Elastic IP address for use with instances in EC2-Classic.", - "id": "ec2-release-address-2", - "title": "To release an Elastic IP addresses for EC2-Classic" - } - ], - "ReplaceNetworkAclAssociation": [ - { - "input": { - "AssociationId": "aclassoc-e5b95c8c", - "NetworkAclId": "acl-5fb85d36" - }, - "output": { - "NewAssociationId": "aclassoc-3999875b" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified network ACL with the subnet for the specified network ACL association.", - "id": "ec2-replace-network-acl-association-1", - "title": "To replace the network ACL associated with a subnet" - } - ], - "ReplaceNetworkAclEntry": [ - { - "input": { - "CidrBlock": "203.0.113.12/24", - "Egress": false, - "NetworkAclId": "acl-5fb85d36", - "PortRange": { - "From": 53, - "To": 53 - }, - "Protocol": "17", - "RuleAction": "allow", - "RuleNumber": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces an entry for the specified network ACL. The new rule 100 allows ingress traffic from 203.0.113.12/24 on UDP port 53 (DNS) into any associated subnet.", - "id": "ec2-replace-network-acl-entry-1", - "title": "To replace a network ACL entry" - } - ], - "ReplaceRoute": [ - { - "input": { - "DestinationCidrBlock": "10.0.0.0/16", - "GatewayId": "vgw-9a4cacf3", - "RouteTableId": "rtb-22574640" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces the specified route in the specified table table. The new route matches the specified CIDR and sends the traffic to the specified virtual private gateway.", - "id": "ec2-replace-route-1", - "title": "To replace a route" - } - ], - "ReplaceRouteTableAssociation": [ - { - "input": { - "AssociationId": "rtbassoc-781d0d1a", - "RouteTableId": "rtb-22574640" - }, - "output": { - "NewAssociationId": "rtbassoc-3a1f0f58" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified route table with the subnet for the specified route table association.", - "id": "ec2-replace-route-table-association-1", - "title": "To replace the route table associated with a subnet" - } - ], - "RequestSpotFleet": [ - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "SecurityGroups": [ - { - "GroupId": "sg-1a2b3c4d" - } - ], - "SubnetId": "subnet-1a2b3c4d, subnet-3c4d5e6f" - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request with two launch specifications that differ only by subnet. The Spot fleet launches the instances in the specified subnet with the lowest price. If the instances are launched in a default VPC, they receive a public IP address by default. If the instances are launched in a nondefault VPC, they do not receive a public IP address by default. Note that you can't specify different subnets from the same Availability Zone in a Spot fleet request.", - "id": "ec2-request-spot-fleet-1", - "title": "To request a Spot fleet in the subnet with the lowest price" - }, - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2a, us-west-2b" - }, - "SecurityGroups": [ - { - "GroupId": "sg-1a2b3c4d" - } - ] - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request with two launch specifications that differ only by Availability Zone. The Spot fleet launches the instances in the specified Availability Zone with the lowest price. If your account supports EC2-VPC only, Amazon EC2 launches the Spot instances in the default subnet of the Availability Zone. If your account supports EC2-Classic, Amazon EC2 launches the instances in EC2-Classic in the Availability Zone.", - "id": "ec2-request-spot-fleet-2", - "title": "To request a Spot fleet in the Availability Zone with the lowest price" - }, - { - "input": { - "SpotFleetRequestConfig": { - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::880185128111:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Groups": [ - "sg-1a2b3c4d" - ], - "SubnetId": "subnet-1a2b3c4d" - } - ] - } - ], - "SpotPrice": "0.04", - "TargetCapacity": 2 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example assigns public addresses to instances launched in a nondefault VPC. Note that when you specify a network interface, you must include the subnet ID and security group ID using the network interface.", - "id": "ec2-request-spot-fleet-3", - "title": "To launch Spot instances in a subnet and assign them public IP addresses" - }, - { - "input": { - "SpotFleetRequestConfig": { - "AllocationStrategy": "diversified", - "IamFleetRole": "arn:aws:iam::123456789012:role/my-spot-fleet-role", - "LaunchSpecifications": [ - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "c4.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - }, - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - }, - { - "ImageId": "ami-1a2b3c4d", - "InstanceType": "r3.2xlarge", - "SubnetId": "subnet-1a2b3c4d" - } - ], - "SpotPrice": "0.70", - "TargetCapacity": 30 - } - }, - "output": { - "SpotFleetRequestId": "sfr-73fbd2ce-aa30-494c-8788-1cee4EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a Spot fleet request that launches 30 instances using the diversified allocation strategy. The launch specifications differ by instance type. The Spot fleet distributes the instances across the launch specifications such that there are 10 instances of each type.", - "id": "ec2-request-spot-fleet-4", - "title": "To request a Spot fleet using the diversified allocation strategy" - } - ], - "RequestSpotInstances": [ - { - "input": { - "InstanceCount": 5, - "LaunchSpecification": { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2a" - }, - "SecurityGroupIds": [ - "sg-1a2b3c4d" - ] - }, - "SpotPrice": "0.03", - "Type": "one-time" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a one-time Spot Instance request for five instances in the specified Availability Zone. If your account supports EC2-VPC only, Amazon EC2 launches the instances in the default subnet of the specified Availability Zone. If your account supports EC2-Classic, Amazon EC2 launches the instances in EC2-Classic in the specified Availability Zone.", - "id": "ec2-request-spot-instances-1", - "title": "To create a one-time Spot Instance request" - }, - { - "input": { - "InstanceCount": 5, - "LaunchSpecification": { - "IamInstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/my-iam-role" - }, - "ImageId": "ami-1a2b3c4d", - "InstanceType": "m3.medium", - "SecurityGroupIds": [ - "sg-1a2b3c4d" - ], - "SubnetId": "subnet-1a2b3c4d" - }, - "SpotPrice": "0.050", - "Type": "one-time" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command creates a one-time Spot Instance request for five instances in the specified subnet. Amazon EC2 launches the instances in the specified subnet. If the VPC is a nondefault VPC, the instances do not receive a public IP address by default.", - "id": "ec2-request-spot-instances-2", - "title": "To create a one-time Spot Instance request" - } - ], - "ResetImageAttribute": [ - { - "input": { - "Attribute": "launchPermission", - "ImageId": "ami-5731123e" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resets the launchPermission attribute for the specified AMI. By default, AMIs are private.", - "id": "to-reset-the-launchpermission-attribute-1529359519534", - "title": "To reset the launchPermission attribute" - } - ], - "ResetInstanceAttribute": [ - { - "input": { - "Attribute": "sourceDestCheck", - "InstanceId": "i-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resets the sourceDestCheck attribute for the specified instance.", - "id": "to-reset-the-sourcedestcheck-attribute-1529359630708", - "title": "To reset the sourceDestCheck attribute" - } - ], - "ResetSnapshotAttribute": [ - { - "input": { - "Attribute": "createVolumePermission", - "SnapshotId": "snap-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resets the create volume permissions for snapshot ``snap-1234567890abcdef0``. If the command succeeds, no output is returned.", - "id": "to-reset-a-snapshot-attribute-1472508825735", - "title": "To reset a snapshot attribute" - } - ], - "RestoreAddressToClassic": [ - { - "input": { - "PublicIp": "198.51.100.0" - }, - "output": { - "PublicIp": "198.51.100.0", - "Status": "MoveInProgress" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example restores the specified Elastic IP address to the EC2-Classic platform.", - "id": "ec2-restore-address-to-classic-1", - "title": "To restore an address to EC2-Classic" - } - ], - "RunInstances": [ - { - "input": { - "BlockDeviceMappings": [ - { - "DeviceName": "/dev/sdh", - "Ebs": { - "VolumeSize": 100 - } - } - ], - "ImageId": "ami-abc12345", - "InstanceType": "t2.micro", - "KeyName": "my-key-pair", - "MaxCount": 1, - "MinCount": 1, - "SecurityGroupIds": [ - "sg-1a2b3c4d" - ], - "SubnetId": "subnet-6e7f829e", - "TagSpecifications": [ - { - "ResourceType": "instance", - "Tags": [ - { - "Key": "Purpose", - "Value": "test" - } - ] - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example launches an instance using the specified AMI, instance type, security group, subnet, block device mapping, and tags.", - "id": "to-launch-an-instance-1529360150806", - "title": "To launch an instance" - } - ], - "RunScheduledInstances": [ - { - "input": { - "InstanceCount": 1, - "LaunchSpecification": { - "IamInstanceProfile": { - "Name": "my-iam-role" - }, - "ImageId": "ami-12345678", - "InstanceType": "c4.large", - "KeyName": "my-key-pair", - "NetworkInterfaces": [ - { - "AssociatePublicIpAddress": true, - "DeviceIndex": 0, - "Groups": [ - "sg-12345678" - ], - "SubnetId": "subnet-12345678" - } - ] - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012" - }, - "output": { - "InstanceIdSet": [ - "i-1234567890abcdef0" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example launches the specified Scheduled Instance in a VPC.", - "id": "ec2-run-scheduled-instances-1", - "title": "To launch a Scheduled Instance in a VPC" - }, - { - "input": { - "InstanceCount": 1, - "LaunchSpecification": { - "IamInstanceProfile": { - "Name": "my-iam-role" - }, - "ImageId": "ami-12345678", - "InstanceType": "c4.large", - "KeyName": "my-key-pair", - "Placement": { - "AvailabilityZone": "us-west-2b" - }, - "SecurityGroupIds": [ - "sg-12345678" - ] - }, - "ScheduledInstanceId": "sci-1234-1234-1234-1234-123456789012" - }, - "output": { - "InstanceIdSet": [ - "i-1234567890abcdef0" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example launches the specified Scheduled Instance in EC2-Classic.", - "id": "ec2-run-scheduled-instances-2", - "title": "To launch a Scheduled Instance in EC2-Classic" - } - ], - "StartInstances": [ - { - "input": { - "InstanceIds": [ - "i-1234567890abcdef0" - ] - }, - "output": { - "StartingInstances": [ - { - "CurrentState": { - "Code": 0, - "Name": "pending" - }, - "InstanceId": "i-1234567890abcdef0", - "PreviousState": { - "Code": 80, - "Name": "stopped" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example starts the specified EC2 instance.", - "id": "to-start-a-stopped-ec2-instance-1529358792730", - "title": "To start a stopped EC2 instance" - } - ], - "StopInstances": [ - { - "input": { - "InstanceIds": [ - "i-1234567890abcdef0" - ] - }, - "output": { - "StoppingInstances": [ - { - "CurrentState": { - "Code": 64, - "Name": "stopping" - }, - "InstanceId": "i-1234567890abcdef0", - "PreviousState": { - "Code": 16, - "Name": "running" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example stops the specified EC2 instance.", - "id": "to-stop-a-running-ec2-instance-1529358905540", - "title": "To stop a running EC2 instance" - } - ], - "TerminateInstances": [ - { - "input": { - "InstanceIds": [ - "i-1234567890abcdef0" - ] - }, - "output": { - "TerminatingInstances": [ - { - "CurrentState": { - "Code": 32, - "Name": "shutting-down" - }, - "InstanceId": "i-1234567890abcdef0", - "PreviousState": { - "Code": 16, - "Name": "running" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example terminates the specified EC2 instance.", - "id": "to-terminate-an-ec2-instance-1529359350660", - "title": "To terminate an EC2 instance" - } - ], - "UnassignPrivateIpAddresses": [ - { - "input": { - "NetworkInterfaceId": "eni-e5aa89a3", - "PrivateIpAddresses": [ - "10.0.0.82" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example unassigns the specified private IP address from the specified network interface.", - "id": "ec2-unassign-private-ip-addresses-1", - "title": "To unassign a secondary private IP address from a network interface" - } - ], - "UpdateSecurityGroupRuleDescriptionsEgress": [ - { - "input": { - "GroupId": "sg-123abc12", - "IpPermissions": [ - { - "FromPort": 80, - "IpProtocol": "tcp", - "IpRanges": [ - { - "CidrIp": "203.0.113.0/24", - "Description": "Outbound HTTP access to server 2" - } - ], - "ToPort": 80 - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example updates the description for the specified security group rule.", - "id": "to-update-an-outbound-security-group-rule-description-1529360481544", - "title": "To update an outbound security group rule description" - } - ], - "UpdateSecurityGroupRuleDescriptionsIngress": [ - { - "input": { - "GroupId": "sg-123abc12", - "IpPermissions": [ - { - "FromPort": 22, - "IpProtocol": "tcp", - "IpRanges": [ - { - "CidrIp": "203.0.113.0/16", - "Description": "SSH access from the LA office" - } - ], - "ToPort": 22 - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example updates the description for the specified security group rule.", - "id": "to-update-an-inbound-security-group-rule-description-1529360820372", - "title": "To update an inbound security group rule description" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/paginators-1.json deleted file mode 100644 index 04c0e06..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/paginators-1.json +++ /dev/null @@ -1,996 +0,0 @@ -{ - "pagination": { - "DescribeRouteTables": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RouteTables" - }, - "DescribeIamInstanceProfileAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "IamInstanceProfileAssociations" - }, - "DescribeInstanceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceStatuses" - }, - "DescribeInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeReservedInstancesOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReservedInstancesOfferings" - }, - "DescribeReservedInstancesModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ReservedInstancesModifications" - }, - "DescribeSecurityGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SecurityGroups" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - }, - "DescribeSpotFleetInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ActiveInstances" - }, - "DescribeSpotFleetRequests": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotFleetRequestConfigs" - }, - "DescribeSpotPriceHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpotPriceHistory" - }, - "DescribeTags": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "DescribeVolumeStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VolumeStatuses" - }, - "DescribeVolumes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Volumes" - }, - "DescribeNatGateways": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NatGateways" - }, - "DescribeNetworkInterfaces": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NetworkInterfaces" - }, - "DescribeVpcEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VpcEndpoints" - }, - "DescribeVpcEndpointServices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": [ - "ServiceDetails", - "ServiceNames" - ] - }, - "DescribeVpcEndpointConnections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VpcEndpointConnections" - }, - "DescribeByoipCidrs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ByoipCidrs" - }, - "DescribeCapacityReservations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityReservations" - }, - "DescribeClassicLinkInstances": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Instances" - }, - "DescribeClientVpnAuthorizationRules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AuthorizationRules" - }, - "DescribeClientVpnConnections": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Connections" - }, - "DescribeClientVpnEndpoints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ClientVpnEndpoints" - }, - "DescribeClientVpnRoutes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Routes" - }, - "DescribeClientVpnTargetNetworks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ClientVpnTargetNetworks" - }, - "DescribeEgressOnlyInternetGateways": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EgressOnlyInternetGateways" - }, - "DescribeFleets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Fleets" - }, - "DescribeFlowLogs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FlowLogs" - }, - "DescribeFpgaImages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FpgaImages" - }, - "DescribeHostReservationOfferings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "OfferingSet" - }, - "DescribeHostReservations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "HostReservationSet" - }, - "DescribeHosts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Hosts" - }, - "DescribeImportImageTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ImportImageTasks" - }, - "DescribeImportSnapshotTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ImportSnapshotTasks" - }, - "DescribeInstanceCreditSpecifications": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceCreditSpecifications" - }, - "DescribeLaunchTemplateVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LaunchTemplateVersions" - }, - "DescribeLaunchTemplates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LaunchTemplates" - }, - "DescribeMovingAddresses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MovingAddressStatuses" - }, - "DescribeNetworkInterfacePermissions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NetworkInterfacePermissions" - }, - "DescribePrefixLists": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PrefixLists" - }, - "DescribePrincipalIdFormat": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Principals" - }, - "DescribePublicIpv4Pools": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PublicIpv4Pools" - }, - "DescribeScheduledInstanceAvailability": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ScheduledInstanceAvailabilitySet" - }, - "DescribeScheduledInstances": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ScheduledInstanceSet" - }, - "DescribeStaleSecurityGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StaleSecurityGroupSet" - }, - "DescribeTransitGatewayAttachments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayAttachments" - }, - "DescribeTransitGatewayRouteTables": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayRouteTables" - }, - "DescribeTransitGatewayVpcAttachments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayVpcAttachments" - }, - "DescribeTransitGateways": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGateways" - }, - "DescribeVolumesModifications": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VolumesModifications" - }, - "DescribeVpcClassicLinkDnsSupport": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Vpcs" - }, - "DescribeVpcEndpointConnectionNotifications": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ConnectionNotificationSet" - }, - "DescribeVpcEndpointServiceConfigurations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ServiceConfigurations" - }, - "DescribeVpcEndpointServicePermissions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AllowedPrincipals" - }, - "DescribeVpcPeeringConnections": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VpcPeeringConnections" - }, - "GetTransitGatewayAttachmentPropagations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayAttachmentPropagations" - }, - "GetTransitGatewayRouteTableAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Associations" - }, - "GetTransitGatewayRouteTablePropagations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayRouteTablePropagations" - }, - "DescribeInternetGateways": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InternetGateways" - }, - "DescribeNetworkAcls": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NetworkAcls" - }, - "DescribeVpcs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Vpcs" - }, - "DescribeSpotInstanceRequests": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SpotInstanceRequests" - }, - "DescribeDhcpOptions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DhcpOptions" - }, - "DescribeSubnets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Subnets" - }, - "DescribeTrafficMirrorFilters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TrafficMirrorFilters" - }, - "DescribeTrafficMirrorSessions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TrafficMirrorSessions" - }, - "DescribeTrafficMirrorTargets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TrafficMirrorTargets" - }, - "DescribeExportImageTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ExportImageTasks" - }, - "DescribeFastSnapshotRestores": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FastSnapshotRestores" - }, - "DescribeIpv6Pools": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Ipv6Pools" - }, - "GetAssociatedIpv6PoolCidrs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Ipv6CidrAssociations" - }, - "DescribeCoipPools": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CoipPools" - }, - "DescribeInstanceTypeOfferings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceTypeOfferings" - }, - "DescribeInstanceTypes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceTypes" - }, - "DescribeLocalGatewayRouteTableVirtualInterfaceGroupAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LocalGatewayRouteTableVirtualInterfaceGroupAssociations" - }, - "DescribeLocalGatewayRouteTableVpcAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LocalGatewayRouteTableVpcAssociations" - }, - "DescribeLocalGatewayRouteTables": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LocalGatewayRouteTables" - }, - "DescribeLocalGatewayVirtualInterfaceGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LocalGatewayVirtualInterfaceGroups" - }, - "DescribeLocalGatewayVirtualInterfaces": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LocalGatewayVirtualInterfaces" - }, - "DescribeLocalGateways": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LocalGateways" - }, - "DescribeTransitGatewayMulticastDomains": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayMulticastDomains" - }, - "DescribeTransitGatewayPeeringAttachments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayPeeringAttachments" - }, - "GetTransitGatewayMulticastDomainAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MulticastDomainAssociations" - }, - "SearchLocalGatewayRoutes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Routes" - }, - "SearchTransitGatewayMulticastGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MulticastGroups" - }, - "DescribeManagedPrefixLists": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PrefixLists" - }, - "GetManagedPrefixListAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PrefixListAssociations" - }, - "GetManagedPrefixListEntries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Entries" - }, - "GetGroupsForCapacityReservation": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityReservationGroups" - }, - "DescribeCarrierGateways": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CarrierGateways" - }, - "GetTransitGatewayPrefixListReferences": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayPrefixListReferences" - }, - "DescribeNetworkInsightsAnalyses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NetworkInsightsAnalyses" - }, - "DescribeNetworkInsightsPaths": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NetworkInsightsPaths" - }, - "DescribeTransitGatewayConnectPeers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayConnectPeers" - }, - "DescribeTransitGatewayConnects": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayConnects" - }, - "DescribeAddressesAttribute": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Addresses" - }, - "DescribeReplaceRootVolumeTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ReplaceRootVolumeTasks" - }, - "DescribeStoreImageTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StoreImageTaskResults" - }, - "DescribeSecurityGroupRules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityGroupRules" - }, - "DescribeInstanceEventWindows": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceEventWindows" - }, - "DescribeTrunkInterfaceAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InterfaceAssociations" - }, - "GetVpnConnectionDeviceTypes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VpnConnectionDeviceTypes" - }, - "DescribeCapacityReservationFleets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityReservationFleets" - }, - "GetInstanceTypesFromInstanceRequirements": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceTypes" - }, - "GetSpotPlacementScores": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SpotPlacementScores" - }, - "DescribeSnapshotTierStatus": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SnapshotTierStatuses" - }, - "ListSnapshotsInRecycleBin": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Snapshots" - }, - "DescribeIpamPools": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamPools" - }, - "DescribeIpamScopes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamScopes" - }, - "DescribeIpams": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Ipams" - }, - "DescribeNetworkInsightsAccessScopeAnalyses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NetworkInsightsAccessScopeAnalyses" - }, - "DescribeNetworkInsightsAccessScopes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NetworkInsightsAccessScopes" - }, - "GetIpamAddressHistory": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "HistoryRecords" - }, - "GetIpamPoolAllocations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamPoolAllocations" - }, - "GetIpamPoolCidrs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamPoolCidrs" - }, - "GetIpamResourceCidrs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamResourceCidrs" - }, - "DescribeFastLaunchImages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FastLaunchImages" - }, - "ListImagesInRecycleBin": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Images" - }, - "DescribeTransitGatewayPolicyTables": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayPolicyTables" - }, - "DescribeTransitGatewayRouteTableAnnouncements": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransitGatewayRouteTableAnnouncements" - }, - "GetTransitGatewayPolicyTableAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Associations" - }, - "DescribeAddressTransfers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AddressTransfers" - }, - "DescribeAwsNetworkPerformanceMetricSubscriptions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Subscriptions" - }, - "GetAwsNetworkPerformanceData": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DataResponses" - }, - "DescribeVerifiedAccessEndpoints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VerifiedAccessEndpoints" - }, - "DescribeVerifiedAccessGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VerifiedAccessGroups" - }, - "DescribeVerifiedAccessInstanceLoggingConfigurations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LoggingConfigurations" - }, - "DescribeVerifiedAccessInstances": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VerifiedAccessInstances" - }, - "DescribeVerifiedAccessTrustProviders": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VerifiedAccessTrustProviders" - }, - "DescribeImages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Images" - }, - "DescribeIpamResourceDiscoveries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamResourceDiscoveries" - }, - "DescribeIpamResourceDiscoveryAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamResourceDiscoveryAssociations" - }, - "GetIpamDiscoveredAccounts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamDiscoveredAccounts" - }, - "GetIpamDiscoveredResourceCidrs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamDiscoveredResourceCidrs" - }, - "GetNetworkInsightsAccessScopeAnalysisFindings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AnalysisFindings" - }, - "DescribeInstanceConnectEndpoints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceConnectEndpoints" - }, - "GetSecurityGroupsForVpc": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityGroupForVpcs" - }, - "DescribeCapacityBlockOfferings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityBlockOfferings" - }, - "DescribeInstanceTopology": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Instances" - }, - "DescribeMacHosts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MacHosts" - }, - "DescribeCapacityReservationBillingRequests": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityReservationBillingRequests" - }, - "DescribeInstanceImageMetadata": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceImageMetadata" - }, - "DescribeSecurityGroupVpcAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityGroupVpcAssociations" - }, - "DescribeCapacityBlockExtensionHistory": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityBlockExtensions" - }, - "DescribeCapacityBlockExtensionOfferings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityBlockExtensionOfferings" - }, - "DescribeRouteServerEndpoints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RouteServerEndpoints" - }, - "DescribeRouteServerPeers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RouteServerPeers" - }, - "DescribeRouteServers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RouteServers" - }, - "DescribeMacModificationTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MacModificationTasks" - }, - "DescribeCapacityBlockStatus": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityBlockStatuses" - }, - "DescribeCapacityBlocks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityBlocks" - }, - "DescribeImageReferences": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ImageReferences" - }, - "DescribeImageUsageReportEntries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ImageUsageReportEntries" - }, - "DescribeImageUsageReports": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ImageUsageReports" - }, - "DescribeCapacityManagerDataExports": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CapacityManagerDataExports" - }, - "GetCapacityManagerMetricData": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MetricDataResults" - }, - "GetCapacityManagerMetricDimensions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MetricDimensionResults" - }, - "DescribeIpamPrefixListResolverTargets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamPrefixListResolverTargets" - }, - "DescribeIpamPrefixListResolvers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamPrefixListResolvers" - }, - "GetIpamPrefixListResolverRules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Rules" - }, - "GetIpamPrefixListResolverVersionEntries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Entries" - }, - "GetIpamPrefixListResolverVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpamPrefixListResolverVersions" - }, - "DescribeVpnConcentrators": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "VpnConcentrators" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/paginators-1.sdk-extras.json deleted file mode 100644 index 823bbb5..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/paginators-1.sdk-extras.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetNetworkInsightsAccessScopeAnalysisFindings": { - "non_aggregate_keys": [ - "AnalysisStatus", - "NetworkInsightsAccessScopeAnalysisId" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/service-2.json.gz deleted file mode 100644 index a1ce34d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/waiters-2.json b/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/waiters-2.json deleted file mode 100644 index 31f9d15..0000000 --- a/venv/Lib/site-packages/botocore/data/ec2/2016-11-15/waiters-2.json +++ /dev/null @@ -1,801 +0,0 @@ -{ - "version": 2, - "waiters": { - "InstanceExists": { - "delay": 5, - "maxAttempts": 40, - "operation": "DescribeInstances", - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Reservations[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "BundleTaskComplete": { - "delay": 15, - "operation": "DescribeBundleTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "complete", - "matcher": "pathAll", - "state": "success", - "argument": "BundleTasks[].State" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "BundleTasks[].State" - } - ] - }, - "ConversionTaskCancelled": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskCompleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelled", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - }, - { - "expected": "cancelling", - "matcher": "pathAny", - "state": "failure", - "argument": "ConversionTasks[].State" - } - ] - }, - "ConversionTaskDeleted": { - "delay": 15, - "operation": "DescribeConversionTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "ConversionTasks[].State" - } - ] - }, - "CustomerGatewayAvailable": { - "delay": 15, - "operation": "DescribeCustomerGateways", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CustomerGateways[].State" - } - ] - }, - "ExportTaskCancelled": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "cancelled", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ExportTaskCompleted": { - "delay": 15, - "operation": "DescribeExportTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ExportTasks[].State" - } - ] - }, - "ImageExists": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "matcher": "path", - "expected": true, - "argument": "length(Images[]) > `0`", - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidAMIID.NotFound", - "state": "retry" - } - ] - }, - "ImageAvailable": { - "operation": "DescribeImages", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Images[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Images[].State", - "expected": "failed" - } - ] - }, - "ImageUsageReportAvailable": { - "operation": "DescribeImageUsageReports", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "ImageUsageReports[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "ImageUsageReports[].State", - "expected": "failed" - } - ] - }, - "InstanceRunning": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "running", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "shutting-down", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].InstanceStatus.Status", - "expected": "ok" - }, - { - "matcher": "error", - "expected": "InvalidInstanceID.NotFound", - "state": "retry" - } - ] - }, - "InstanceStopped": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "stopped", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "terminated", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InstanceTerminated": { - "delay": 15, - "operation": "DescribeInstances", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "terminated", - "matcher": "pathAll", - "state": "success", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - }, - { - "expected": "stopping", - "matcher": "pathAny", - "state": "failure", - "argument": "Reservations[].Instances[].State.Name" - } - ] - }, - "InternetGatewayExists": { - "operation": "DescribeInternetGateways", - "delay": 5, - "maxAttempts": 6, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(InternetGateways[].InternetGatewayId) > `0`" - }, - { - "expected": "InvalidInternetGateway.NotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "KeyPairExists": { - "operation": "DescribeKeyPairs", - "delay": 5, - "maxAttempts": 6, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(KeyPairs[].KeyName) > `0`" - }, - { - "expected": "InvalidKeyPair.NotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "NatGatewayAvailable": { - "operation": "DescribeNatGateways", - "delay": 15, - "maxAttempts": 40, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "NatGateways[].State", - "expected": "available" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "failed" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleting" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "NatGateways[].State", - "expected": "deleted" - }, - { - "state": "retry", - "matcher": "error", - "expected": "NatGatewayNotFound" - } - ] - }, - "NatGatewayDeleted": { - "operation": "DescribeNatGateways", - "delay": 15, - "maxAttempts": 40, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "NatGateways[].State", - "expected": "deleted" - }, - { - "state": "success", - "matcher": "error", - "expected": "NatGatewayNotFound" - } - ] - }, - "NetworkInterfaceAvailable": { - "operation": "DescribeNetworkInterfaces", - "delay": 20, - "maxAttempts": 10, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "NetworkInterfaces[].Status" - }, - { - "expected": "InvalidNetworkInterfaceID.NotFound", - "matcher": "error", - "state": "failure" - } - ] - }, - "PasswordDataAvailable": { - "operation": "GetPasswordData", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "length(PasswordData) > `0`", - "expected": true - } - ] - }, - "SnapshotCompleted": { - "delay": 15, - "operation": "DescribeSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].State" - }, - { - "expected": "error", - "matcher": "pathAny", - "state": "failure", - "argument": "Snapshots[].State" - } - ] - }, - "SnapshotImported": { - "delay": 15, - "operation": "DescribeImportSnapshotTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "completed", - "matcher": "pathAll", - "state": "success", - "argument": "ImportSnapshotTasks[].SnapshotTaskDetail.Status" - }, - { - "expected": "error", - "matcher": "pathAny", - "state": "failure", - "argument": "ImportSnapshotTasks[].SnapshotTaskDetail.Status" - } - ] - }, - "SecurityGroupExists": { - "operation": "DescribeSecurityGroups", - "delay": 5, - "maxAttempts": 6, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(SecurityGroups[].GroupId) > `0`" - }, - { - "expected": "InvalidGroup.NotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "SecurityGroupVpcAssociationAssociated": { - "delay": 10, - "maxAttempts": 7, - "operation": "DescribeSecurityGroupVpcAssociations", - "acceptors": [ - { - "expected": "associated", - "matcher": "pathAll", - "state": "success", - "argument": "SecurityGroupVpcAssociations[].State" - }, - { - "expected": "associating", - "matcher": "pathAny", - "state": "retry", - "argument": "SecurityGroupVpcAssociations[].State" - }, - { - "expected": "association-failed", - "matcher": "pathAny", - "state": "failure", - "argument": "SecurityGroupVpcAssociations[].State" - } - ] - }, - "SecurityGroupVpcAssociationDisassociated": { - "delay": 10, - "maxAttempts": 7, - "operation": "DescribeSecurityGroupVpcAssociations", - "acceptors": [ - { - "expected": "disassociated", - "matcher": "pathAll", - "state": "success", - "argument": "SecurityGroupVpcAssociations[].State" - }, - { - "expected": "disassociating", - "matcher": "pathAny", - "state": "retry", - "argument": "SecurityGroupVpcAssociations[].State" - }, - { - "expected": "disassociation-failed", - "matcher": "pathAny", - "state": "failure", - "argument": "SecurityGroupVpcAssociations[].State" - }, - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(SecurityGroupVpcAssociations[]) == `0`" - } - ] - }, - "SpotInstanceRequestFulfilled": { - "operation": "DescribeSpotInstanceRequests", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "fulfilled" - }, - { - "state": "success", - "matcher": "pathAll", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "request-canceled-and-instance-running" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "schedule-expired" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "canceled-before-fulfillment" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "bad-parameters" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "SpotInstanceRequests[].Status.Code", - "expected": "system-error" - }, - { - "state": "retry", - "matcher": "error", - "expected": "InvalidSpotInstanceRequestID.NotFound" - } - ] - }, - "StoreImageTaskComplete": { - "delay": 5, - "operation": "DescribeStoreImageTasks", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "Completed", - "matcher": "pathAll", - "state": "success", - "argument": "StoreImageTaskResults[].StoreTaskState" - }, - { - "expected": "Failed", - "matcher": "pathAny", - "state": "failure", - "argument": "StoreImageTaskResults[].StoreTaskState" - }, - { - "expected": "InProgress", - "matcher": "pathAny", - "state": "retry", - "argument": "StoreImageTaskResults[].StoreTaskState" - } - ] - }, - "SubnetAvailable": { - "delay": 15, - "operation": "DescribeSubnets", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Subnets[].State" - } - ] - }, - "SystemStatusOk": { - "operation": "DescribeInstanceStatus", - "maxAttempts": 40, - "delay": 15, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "InstanceStatuses[].SystemStatus.Status", - "expected": "ok" - } - ] - }, - "VolumeAvailable": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VolumeDeleted": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "matcher": "error", - "expected": "InvalidVolume.NotFound", - "state": "success" - } - ] - }, - "VolumeInUse": { - "delay": 15, - "operation": "DescribeVolumes", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "in-use", - "matcher": "pathAll", - "state": "success", - "argument": "Volumes[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Volumes[].State" - } - ] - }, - "VpcAvailable": { - "delay": 15, - "operation": "DescribeVpcs", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Vpcs[].State" - } - ] - }, - "VpcExists": { - "operation": "DescribeVpcs", - "delay": 1, - "maxAttempts": 5, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidVpcID.NotFound", - "state": "retry" - } - ] - }, - "VpnConnectionAvailable": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpnConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpnConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpnConnections[].State" - }, - { - "expected": "pending", - "matcher": "pathAny", - "state": "failure", - "argument": "VpnConnections[].State" - } - ] - }, - "VpcPeeringConnectionExists": { - "delay": 15, - "operation": "DescribeVpcPeeringConnections", - "maxAttempts": 40, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "InvalidVpcPeeringConnectionID.NotFound", - "state": "retry" - } - ] - }, - "VpcPeeringConnectionDeleted": { - "delay": 15, - "operation": "DescribeVpcPeeringConnections", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "VpcPeeringConnections[].Status.Code" - }, - { - "matcher": "error", - "expected": "InvalidVpcPeeringConnectionID.NotFound", - "state": "success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 93e02d3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/examples-1.json b/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/paginators-1.json deleted file mode 100644 index b9dbda4..0000000 --- a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "DescribeImageTags": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "imageTagDetails" - }, - "DescribeImages": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "imageDetails" - }, - "DescribeRegistries": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "registries" - }, - "DescribeRepositories": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "repositories" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/service-2.json.gz deleted file mode 100644 index c51c6ce..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ecr-public/2020-10-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0ec56a9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/examples-1.json b/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/examples-1.json deleted file mode 100644 index 7daf57f..0000000 --- a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/examples-1.json +++ /dev/null @@ -1,195 +0,0 @@ -{ - "version": "1.0", - "examples": { - "BatchDeleteImage": [ - { - "input": { - "imageIds": [ - { - "imageTag": "precise" - } - ], - "repositoryName": "ubuntu" - }, - "output": { - "failures": [ - - ], - "imageIds": [ - { - "imageDigest": "sha256:examplee6d1e504117a17000003d3753086354a38375961f2e665416ef4b1b2f", - "imageTag": "precise" - } - ] - }, - "comments": { - }, - "description": "This example deletes images with the tags precise and trusty in a repository called ubuntu in the default registry for an account.", - "id": "batchdeleteimages-example-1470860541707", - "title": "To delete multiple images" - } - ], - "BatchGetImage": [ - { - "input": { - "imageIds": [ - { - "imageTag": "precise" - } - ], - "repositoryName": "ubuntu" - }, - "output": { - "failures": [ - - ], - "images": [ - { - "imageId": { - "imageDigest": "sha256:example76bdff6d83a09ba2a818f0d00000063724a9ac3ba5019c56f74ebf42a", - "imageTag": "precise" - }, - "imageManifest": "{\n \"schemaVersion\": 1,\n \"name\": \"ubuntu\",\n \"tag\": \"precise\",\n...", - "registryId": "244698725403", - "repositoryName": "ubuntu" - } - ] - }, - "comments": { - "output": { - "imageManifest": "In this example, the imageManifest in the output JSON has been truncated." - } - }, - "description": "This example obtains information for an image with a specified image digest ID from the repository named ubuntu in the current account.", - "id": "batchgetimage-example-1470862771437", - "title": "To obtain multiple images in a single request" - } - ], - "CreateRepository": [ - { - "input": { - "repositoryName": "project-a/nginx-web-app" - }, - "output": { - "repository": { - "registryId": "012345678901", - "repositoryArn": "arn:aws:ecr:us-west-2:012345678901:repository/project-a/nginx-web-app", - "repositoryName": "project-a/nginx-web-app" - } - }, - "comments": { - "output": { - "imageManifest": "In this example, the imageManifest in the output JSON has been truncated." - } - }, - "description": "This example creates a repository called nginx-web-app inside the project-a namespace in the default registry for an account.", - "id": "createrepository-example-1470863688724", - "title": "To create a new repository" - } - ], - "DeleteRepository": [ - { - "input": { - "force": true, - "repositoryName": "ubuntu" - }, - "output": { - "repository": { - "registryId": "012345678901", - "repositoryArn": "arn:aws:ecr:us-west-2:012345678901:repository/ubuntu", - "repositoryName": "ubuntu" - } - }, - "comments": { - "output": { - "imageManifest": "In this example, the imageManifest in the output JSON has been truncated." - } - }, - "description": "This example force deletes a repository named ubuntu in the default registry for an account. The force parameter is required if the repository contains images.", - "id": "deleterepository-example-1470863805703", - "title": "To force delete a repository" - } - ], - "DeleteRepositoryPolicy": [ - { - "input": { - "repositoryName": "ubuntu" - }, - "output": { - "policyText": "{ ... }", - "registryId": "012345678901", - "repositoryName": "ubuntu" - }, - "comments": { - }, - "description": "This example deletes the policy associated with the repository named ubuntu in the current account.", - "id": "deleterepositorypolicy-example-1470866943748", - "title": "To delete the policy associated with a repository" - } - ], - "DescribeRepositories": [ - { - "input": { - }, - "output": { - "repositories": [ - { - "registryId": "012345678910", - "repositoryArn": "arn:aws:ecr:us-west-2:012345678910:repository/ubuntu", - "repositoryName": "ubuntu" - }, - { - "registryId": "012345678910", - "repositoryArn": "arn:aws:ecr:us-west-2:012345678910:repository/test", - "repositoryName": "test" - } - ] - }, - "comments": { - "output": { - } - }, - "description": "The following example obtains a list and description of all repositories in the default registry to which the current user has access.", - "id": "describe-repositories-1470856017467", - "title": "To describe all repositories in the current account" - } - ], - "GetRepositoryPolicy": [ - { - "input": { - "repositoryName": "ubuntu" - }, - "output": { - "policyText": "{\n \"Version\" : \"2008-10-17\",\n \"Statement\" : [ {\n \"Sid\" : \"new statement\",\n \"Effect\" : \"Allow\",\n \"Principal\" : {\n \"AWS\" : \"arn:aws:iam::012345678901:role/CodeDeployDemo\"\n },\n\"Action\" : [ \"ecr:GetDownloadUrlForLayer\", \"ecr:BatchGetImage\", \"ecr:BatchCheckLayerAvailability\" ]\n } ]\n}", - "registryId": "012345678901", - "repositoryName": "ubuntu" - }, - "comments": { - }, - "description": "This example obtains the repository policy for the repository named ubuntu.", - "id": "getrepositorypolicy-example-1470867669211", - "title": "To get the current policy for a repository" - } - ], - "ListImages": [ - { - "input": { - "repositoryName": "ubuntu" - }, - "output": { - "imageIds": [ - { - "imageDigest": "sha256:764f63476bdff6d83a09ba2a818f0d35757063724a9ac3ba5019c56f74ebf42a", - "imageTag": "precise" - } - ] - }, - "comments": { - }, - "description": "This example lists all of the images in the repository named ubuntu in the default registry in the current account. ", - "id": "listimages-example-1470868161594", - "title": "To list all images in a repository" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/paginators-1.json b/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/paginators-1.json deleted file mode 100644 index a2161d3..0000000 --- a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/paginators-1.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "pagination": { - "ListImages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageIds" - }, - "DescribeImages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageDetails" - }, - "DescribeRepositories": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "repositories" - }, - "DescribeImageScanFindings": { - "input_token": "nextToken", - "limit_key": "maxResults", - "non_aggregate_keys": [ - "registryId", - "repositoryName", - "imageId", - "imageScanStatus", - "imageScanFindings" - ], - "output_token": "nextToken", - "result_key": [ - "imageScanFindings.findings", - "imageScanFindings.enhancedFindings" - ] - }, - "GetLifecyclePolicyPreview": { - "input_token": "nextToken", - "limit_key": "maxResults", - "non_aggregate_keys": [ - "registryId", - "repositoryName", - "lifecyclePolicyText", - "status", - "summary" - ], - "output_token": "nextToken", - "result_key": "previewResults" - }, - "DescribePullThroughCacheRules": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "pullThroughCacheRules" - }, - "DescribeRepositoryCreationTemplates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "non_aggregate_keys": [ - "registryId" - ], - "output_token": "nextToken", - "result_key": "repositoryCreationTemplates" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/service-2.json.gz deleted file mode 100644 index 5e798ec..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/waiters-2.json b/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/waiters-2.json deleted file mode 100644 index 9ef9608..0000000 --- a/venv/Lib/site-packages/botocore/data/ecr/2015-09-21/waiters-2.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "version": 2, - "waiters": { - "ImageScanComplete": { - "description": "Wait until an image scan is complete and findings can be accessed", - "operation": "DescribeImageScanFindings", - "delay": 5, - "maxAttempts": 60, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "imageScanStatus.status", - "expected": "COMPLETE" - }, - { - "state": "failure", - "matcher": "path", - "argument": "imageScanStatus.status", - "expected": "FAILED" - } - ] - }, - "LifecyclePolicyPreviewComplete": { - "description": "Wait until a lifecycle policy preview request is complete and results can be accessed", - "operation": "GetLifecyclePolicyPreview", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "status", - "expected": "COMPLETE" - }, - { - "state": "failure", - "matcher": "path", - "argument": "status", - "expected": "FAILED" - } - ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index 98b8428..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/examples-1.json b/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/examples-1.json deleted file mode 100644 index 0fbf7b3..0000000 --- a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/examples-1.json +++ /dev/null @@ -1,1137 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateCluster": [ - { - "input": { - "clusterName": "my_cluster" - }, - "output": { - "cluster": { - "activeServicesCount": 0, - "clusterArn": "arn:aws:ecs:us-east-1:012345678910:cluster/my_cluster", - "clusterName": "my_cluster", - "pendingTasksCount": 0, - "registeredContainerInstancesCount": 0, - "runningTasksCount": 0, - "status": "ACTIVE" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a cluster in your default region.", - "id": "to-create-a-new-cluster-1472514079365", - "title": "To create a new cluster" - } - ], - "CreateService": [ - { - "input": { - "desiredCount": 10, - "serviceName": "ecs-simple-service", - "taskDefinition": "hello_world" - }, - "output": { - "service": { - "clusterArn": "arn:aws:ecs:us-east-1:012345678910:cluster/default", - "createdAt": "2016-08-29T16:13:47.298Z", - "deploymentConfiguration": { - "maximumPercent": 200, - "minimumHealthyPercent": 100 - }, - "deployments": [ - { - "createdAt": "2016-08-29T16:13:47.298Z", - "desiredCount": 10, - "id": "ecs-svc/9223370564342348388", - "pendingCount": 0, - "runningCount": 0, - "status": "PRIMARY", - "taskDefinition": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:6", - "updatedAt": "2016-08-29T16:13:47.298Z" - }, - { - "createdAt": "2016-08-29T15:52:44.481Z", - "desiredCount": 0, - "id": "ecs-svc/9223370564343611322", - "pendingCount": 0, - "runningCount": 0, - "status": "ACTIVE", - "taskDefinition": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:6", - "updatedAt": "2016-08-29T16:11:38.941Z" - } - ], - "desiredCount": 10, - "events": [ - - ], - "loadBalancers": [ - - ], - "pendingCount": 0, - "runningCount": 0, - "serviceArn": "arn:aws:ecs:us-east-1:012345678910:service/ecs-simple-service", - "serviceName": "ecs-simple-service", - "status": "ACTIVE", - "taskDefinition": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:6" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a service in your default region called ``ecs-simple-service``. The service uses the ``hello_world`` task definition and it maintains 10 copies of that task.", - "id": "to-create-a-new-service-1472512584282", - "title": "To create a new service" - }, - { - "input": { - "desiredCount": 10, - "loadBalancers": [ - { - "containerName": "simple-app", - "containerPort": 80, - "loadBalancerName": "EC2Contai-EcsElast-15DCDAURT3ZO2" - } - ], - "role": "ecsServiceRole", - "serviceName": "ecs-simple-service-elb", - "taskDefinition": "console-sample-app-static" - }, - "output": { - "service": { - "clusterArn": "arn:aws:ecs:us-east-1:012345678910:cluster/default", - "createdAt": "2016-08-29T16:02:54.884Z", - "deploymentConfiguration": { - "maximumPercent": 200, - "minimumHealthyPercent": 100 - }, - "deployments": [ - { - "createdAt": "2016-08-29T16:02:54.884Z", - "desiredCount": 10, - "id": "ecs-svc/9223370564343000923", - "pendingCount": 0, - "runningCount": 0, - "status": "PRIMARY", - "taskDefinition": "arn:aws:ecs:us-east-1:012345678910:task-definition/console-sample-app-static:6", - "updatedAt": "2016-08-29T16:02:54.884Z" - } - ], - "desiredCount": 10, - "events": [ - - ], - "loadBalancers": [ - { - "containerName": "simple-app", - "containerPort": 80, - "loadBalancerName": "EC2Contai-EcsElast-15DCDAURT3ZO2" - } - ], - "pendingCount": 0, - "roleArn": "arn:aws:iam::012345678910:role/ecsServiceRole", - "runningCount": 0, - "serviceArn": "arn:aws:ecs:us-east-1:012345678910:service/ecs-simple-service-elb", - "serviceName": "ecs-simple-service-elb", - "status": "ACTIVE", - "taskDefinition": "arn:aws:ecs:us-east-1:012345678910:task-definition/console-sample-app-static:6" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a service in your default region called ``ecs-simple-service-elb``. The service uses the ``ecs-demo`` task definition and it maintains 10 copies of that task. You must reference an existing load balancer in the same region by its name.", - "id": "to-create-a-new-service-behind-a-load-balancer-1472512484823", - "title": "To create a new service behind a load balancer" - } - ], - "DeleteAccountSetting": [ - { - "input": { - "name": "serviceLongArnFormat" - }, - "output": { - "setting": { - "name": "serviceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::user/principalName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the account setting for your user for the specified resource type.", - "id": "to-delete-the-account-setting-for-your-user-account-1549524548115", - "title": "To delete your account setting" - }, - { - "input": { - "name": "containerInstanceLongArnFormat", - "principalArn": "arn:aws:iam:::user/principalName" - }, - "output": { - "setting": { - "name": "containerInstanceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::user/principalName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the account setting for a specific IAM user or IAM role for the specified resource type. Only the root user can view or modify the account settings for another user.", - "id": "to-delete-the-account-setting-for-a-specific-iam-user-or-iam-role-1549524612917", - "title": "To delete the account settings for a specific IAM user or IAM role" - } - ], - "DeleteCluster": [ - { - "input": { - "cluster": "my_cluster" - }, - "output": { - "cluster": { - "activeServicesCount": 0, - "clusterArn": "arn:aws:ecs:us-east-1:012345678910:cluster/my_cluster", - "clusterName": "my_cluster", - "pendingTasksCount": 0, - "registeredContainerInstancesCount": 0, - "runningTasksCount": 0, - "status": "INACTIVE" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes an empty cluster in your default region.", - "id": "to-delete-an-empty-cluster-1472512705352", - "title": "To delete an empty cluster" - } - ], - "DeleteService": [ - { - "input": { - "service": "my-http-service" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the my-http-service service. The service must have a desired count and running count of 0 before you can delete it.", - "id": "e8183e38-f86e-4390-b811-f74f30a6007d", - "title": "To delete a service" - } - ], - "DeregisterContainerInstance": [ - { - "input": { - "cluster": "default", - "containerInstance": "container_instance_UUID", - "force": true - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deregisters a container instance from the specified cluster in your default region. If there are still tasks running on the container instance, you must either stop those tasks before deregistering, or use the force option.", - "id": "bf624927-cf64-4f4b-8b7e-c024a4e682f6", - "title": "To deregister a container instance from a cluster" - } - ], - "DescribeClusters": [ - { - "input": { - "clusters": [ - "default" - ] - }, - "output": { - "clusters": [ - { - "clusterArn": "arn:aws:ecs:us-east-1:aws_account_id:cluster/default", - "clusterName": "default", - "status": "ACTIVE" - } - ], - "failures": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example provides a description of the specified cluster in your default region.", - "id": "ba88d100-9672-4231-80da-a4bd210bf728", - "title": "To describe a cluster" - } - ], - "DescribeContainerInstances": [ - { - "input": { - "cluster": "default", - "containerInstances": [ - "f2756532-8f13-4d53-87c9-aed50dc94cd7" - ] - }, - "output": { - "containerInstances": [ - { - "agentConnected": true, - "containerInstanceArn": "arn:aws:ecs:us-east-1:012345678910:container-instance/f2756532-8f13-4d53-87c9-aed50dc94cd7", - "ec2InstanceId": "i-807f3249", - "pendingTasksCount": 0, - "registeredResources": [ - { - "name": "CPU", - "type": "INTEGER", - "doubleValue": 0.0, - "integerValue": 2048, - "longValue": 0 - }, - { - "name": "MEMORY", - "type": "INTEGER", - "doubleValue": 0.0, - "integerValue": 3768, - "longValue": 0 - }, - { - "name": "PORTS", - "type": "STRINGSET", - "doubleValue": 0.0, - "integerValue": 0, - "longValue": 0, - "stringSetValue": [ - "2376", - "22", - "51678", - "2375" - ] - } - ], - "remainingResources": [ - { - "name": "CPU", - "type": "INTEGER", - "doubleValue": 0.0, - "integerValue": 1948, - "longValue": 0 - }, - { - "name": "MEMORY", - "type": "INTEGER", - "doubleValue": 0.0, - "integerValue": 3668, - "longValue": 0 - }, - { - "name": "PORTS", - "type": "STRINGSET", - "doubleValue": 0.0, - "integerValue": 0, - "longValue": 0, - "stringSetValue": [ - "2376", - "22", - "80", - "51678", - "2375" - ] - } - ], - "runningTasksCount": 1, - "status": "ACTIVE" - } - ], - "failures": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example provides a description of the specified container instance in your default region, using the container instance UUID as an identifier.", - "id": "c8f439de-eb27-4269-8ca7-2c0a7ba75ab0", - "title": "To describe container instance" - } - ], - "DescribeServices": [ - { - "input": { - "services": [ - "ecs-simple-service" - ] - }, - "output": { - "failures": [ - - ], - "services": [ - { - "clusterArn": "arn:aws:ecs:us-east-1:012345678910:cluster/default", - "createdAt": "2016-08-29T16:25:52.130Z", - "deploymentConfiguration": { - "maximumPercent": 200, - "minimumHealthyPercent": 100 - }, - "deployments": [ - { - "createdAt": "2016-08-29T16:25:52.130Z", - "desiredCount": 1, - "id": "ecs-svc/9223370564341623665", - "pendingCount": 0, - "runningCount": 0, - "status": "PRIMARY", - "taskDefinition": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:6", - "updatedAt": "2016-08-29T16:25:52.130Z" - } - ], - "desiredCount": 1, - "events": [ - { - "createdAt": "2016-08-29T16:25:58.520Z", - "id": "38c285e5-d335-4b68-8b15-e46dedc8e88d", - "message": "(service ecs-simple-service) was unable to place a task because no container instance met all of its requirements. The closest matching (container-instance 3f4de1c5-ffdd-4954-af7e-75b4be0c8841) is already using a port required by your task. For more information, see the Troubleshooting section of the Amazon ECS Developer Guide." - } - ], - "loadBalancers": [ - - ], - "pendingCount": 0, - "runningCount": 0, - "serviceArn": "arn:aws:ecs:us-east-1:012345678910:service/ecs-simple-service", - "serviceName": "ecs-simple-service", - "status": "ACTIVE", - "taskDefinition": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:6" - } - ] - }, - "comments": { - "input": { - }, - "output": { - "services[0].events[0].message": "In this example, there is a service event that shows unavailable cluster resources." - } - }, - "description": "This example provides descriptive information about the service named ``ecs-simple-service``.", - "id": "to-describe-a-service-1472513256350", - "title": "To describe a service" - } - ], - "DescribeTaskDefinition": [ - { - "input": { - "taskDefinition": "hello_world:8" - }, - "output": { - "taskDefinition": { - "containerDefinitions": [ - { - "name": "wordpress", - "cpu": 10, - "environment": [ - - ], - "essential": true, - "image": "wordpress", - "links": [ - "mysql" - ], - "memory": 500, - "mountPoints": [ - - ], - "portMappings": [ - { - "containerPort": 80, - "hostPort": 80 - } - ], - "volumesFrom": [ - - ] - }, - { - "name": "mysql", - "cpu": 10, - "environment": [ - { - "name": "MYSQL_ROOT_PASSWORD", - "value": "password" - } - ], - "essential": true, - "image": "mysql", - "memory": 500, - "mountPoints": [ - - ], - "portMappings": [ - - ], - "volumesFrom": [ - - ] - } - ], - "family": "hello_world", - "revision": 8, - "taskDefinitionArn": "arn:aws:ecs:us-east-1::task-definition/hello_world:8", - "volumes": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example provides a description of the specified task definition.", - "id": "4c21eeb1-f1da-4a08-8c44-297fc8d0ea88", - "title": "To describe a task definition" - } - ], - "DescribeTasks": [ - { - "input": { - "tasks": [ - "c5cba4eb-5dad-405e-96db-71ef8eefe6a8" - ] - }, - "output": { - "failures": [ - - ], - "tasks": [ - { - "clusterArn": "arn:aws:ecs:::cluster/default", - "containerInstanceArn": "arn:aws:ecs:::container-instance/18f9eda5-27d7-4c19-b133-45adc516e8fb", - "containers": [ - { - "name": "ecs-demo", - "containerArn": "arn:aws:ecs:::container/7c01765b-c588-45b3-8290-4ba38bd6c5a6", - "lastStatus": "RUNNING", - "networkBindings": [ - { - "bindIP": "0.0.0.0", - "containerPort": 80, - "hostPort": 80 - } - ], - "taskArn": "arn:aws:ecs:::task/c5cba4eb-5dad-405e-96db-71ef8eefe6a8" - } - ], - "desiredStatus": "RUNNING", - "lastStatus": "RUNNING", - "overrides": { - "containerOverrides": [ - { - "name": "ecs-demo" - } - ] - }, - "startedBy": "ecs-svc/9223370608528463088", - "taskArn": "arn:aws:ecs:::task/c5cba4eb-5dad-405e-96db-71ef8eefe6a8", - "taskDefinitionArn": "arn:aws:ecs:::task-definition/amazon-ecs-sample:1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example provides a description of the specified task, using the task UUID as an identifier.", - "id": "a90b0cde-f965-4946-b55e-cfd8cc54e827", - "title": "To describe a task" - } - ], - "ListAccountSettings": [ - { - "input": { - "effectiveSettings": true - }, - "output": { - "settings": [ - { - "name": "containerInstanceLongArnFormat", - "value": "disabled", - "principalArn": "arn:aws:iam:::user/principalName" - }, - { - "name": "serviceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::user/principalName" - }, - { - "name": "taskLongArnFormat", - "value": "disabled", - "principalArn": "arn:aws:iam:::user/principalName" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example displays the effective account settings for your account.", - "id": "to-view-your-account-settings-1549524118170", - "title": "To view your effective account settings" - }, - { - "input": { - "effectiveSettings": true, - "principalArn": "arn:aws:iam:::user/principalName" - }, - "output": { - "settings": [ - { - "name": "containerInstanceLongArnFormat", - "value": "disabled", - "principalArn": "arn:aws:iam:::user/principalName" - }, - { - "name": "serviceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::user/principalName" - }, - { - "name": "taskLongArnFormat", - "value": "disabled", - "principalArn": "arn:aws:iam:::user/principalName" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example displays the effective account settings for the specified user or role.", - "id": "to-view-the-account-settings-for-a-specific-iam-user-or-iam-role-1549524237932", - "title": "To view the effective account settings for a specific IAM user or IAM role" - } - ], - "ListClusters": [ - { - "input": { - }, - "output": { - "clusterArns": [ - "arn:aws:ecs:us-east-1::cluster/test", - "arn:aws:ecs:us-east-1::cluster/default" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all of your available clusters in your default region.", - "id": "e337d059-134f-4125-ba8e-4f499139facf", - "title": "To list your available clusters" - } - ], - "ListContainerInstances": [ - { - "input": { - "cluster": "default" - }, - "output": { - "containerInstanceArns": [ - "arn:aws:ecs:us-east-1::container-instance/f6bbb147-5370-4ace-8c73-c7181ded911f", - "arn:aws:ecs:us-east-1::container-instance/ffe3d344-77e2-476c-a4d0-bf560ad50acb" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all of your available container instances in the specified cluster in your default region.", - "id": "62a82a94-713c-4e18-8420-1d2b2ba9d484", - "title": "To list your available container instances in a cluster" - } - ], - "ListServices": [ - { - "input": { - }, - "output": { - "serviceArns": [ - "arn:aws:ecs:us-east-1:012345678910:service/my-http-service" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the services running in the default cluster for an account.", - "id": "1d9a8037-4e0e-4234-a528-609656809a3a", - "title": "To list the services in a cluster" - } - ], - "ListTagsForResource": [ - { - "input": { - "resourceArn": "arn:aws:ecs:region:aws_account_id:cluster/dev" - }, - "output": { - "tags": [ - { - "key": "team", - "value": "dev" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the tags for the 'dev' cluster.", - "id": "to-list-the-tags-for-a-cluster-1540582700259", - "title": "To list the tags for a cluster." - } - ], - "ListTaskDefinitionFamilies": [ - { - "input": { - }, - "output": { - "families": [ - "node-js-app", - "web-timer", - "hpcc", - "hpcc-c4-8xlarge" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all of your registered task definition families.", - "id": "b5c89769-1d94-4ca2-a79e-8069103c7f75", - "title": "To list your registered task definition families" - }, - { - "input": { - "familyPrefix": "hpcc" - }, - "output": { - "families": [ - "hpcc", - "hpcc-c4-8xlarge" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the task definition revisions that start with \"hpcc\".", - "id": "8a4cf9a6-42c1-4fe3-852d-99ac8968e11b", - "title": "To filter your registered task definition families" - } - ], - "ListTaskDefinitions": [ - { - "input": { - }, - "output": { - "taskDefinitionArns": [ - "arn:aws:ecs:us-east-1::task-definition/sleep300:2", - "arn:aws:ecs:us-east-1::task-definition/sleep360:1", - "arn:aws:ecs:us-east-1::task-definition/wordpress:3", - "arn:aws:ecs:us-east-1::task-definition/wordpress:4", - "arn:aws:ecs:us-east-1::task-definition/wordpress:5", - "arn:aws:ecs:us-east-1::task-definition/wordpress:6" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all of your registered task definitions.", - "id": "b381ebaf-7eba-4d60-b99b-7f6ae49d3d60", - "title": "To list your registered task definitions" - }, - { - "input": { - "familyPrefix": "wordpress" - }, - "output": { - "taskDefinitionArns": [ - "arn:aws:ecs:us-east-1::task-definition/wordpress:3", - "arn:aws:ecs:us-east-1::task-definition/wordpress:4", - "arn:aws:ecs:us-east-1::task-definition/wordpress:5", - "arn:aws:ecs:us-east-1::task-definition/wordpress:6" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the task definition revisions of a specified family.", - "id": "734e7afd-753a-4bc2-85d0-badddce10910", - "title": "To list the registered task definitions in a family" - } - ], - "ListTasks": [ - { - "input": { - "cluster": "default" - }, - "output": { - "taskArns": [ - "arn:aws:ecs:us-east-1:012345678910:task/0cc43cdb-3bee-4407-9c26-c0e6ea5bee84", - "arn:aws:ecs:us-east-1:012345678910:task/6b809ef6-c67e-4467-921f-ee261c15a0a1" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all of the tasks in a cluster.", - "id": "9a6ec707-1a77-45d0-b2eb-516b5dd9e924", - "title": "To list the tasks in a cluster" - }, - { - "input": { - "cluster": "default", - "containerInstance": "f6bbb147-5370-4ace-8c73-c7181ded911f" - }, - "output": { - "taskArns": [ - "arn:aws:ecs:us-east-1:012345678910:task/0cc43cdb-3bee-4407-9c26-c0e6ea5bee84" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the tasks of a specified container instance. Specifying a ``containerInstance`` value limits the results to tasks that belong to that container instance.", - "id": "024bf3b7-9cbb-44e3-848f-9d074e1fecce", - "title": "To list the tasks on a particular container instance" - } - ], - "PutAccountSetting": [ - { - "input": { - "name": "serviceLongArnFormat", - "value": "enabled" - }, - "output": { - "setting": { - "name": "serviceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::user/principalName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies your account settings to opt in to the new ARN and resource ID format for Amazon ECS services. If you’re using this command as the root user, then changes apply to the entire AWS account, unless an IAM user or role explicitly overrides these settings for themselves.", - "id": "to-modify-the-account-settings-for-your-iam-user-account-1549523130939", - "title": "To modify your account settings" - }, - { - "input": { - "name": "containerInstanceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::user/principalName" - }, - "output": { - "setting": { - "name": "containerInstanceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::user/principalName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the account setting for a specific IAM user or IAM role to opt in to the new ARN and resource ID format for Amazon ECS container instances. If you’re using this command as the root user, then changes apply to the entire AWS account, unless an IAM user or role explicitly overrides these settings for themselves.", - "id": "to-modify-the-account-settings-for-a-specific-iam-user-or-iam-role-1549523518390", - "title": "To modify the account settings for a specific IAM user or IAM role" - } - ], - "PutAccountSettingDefault": [ - { - "input": { - "name": "serviceLongArnFormat", - "value": "enabled" - }, - "output": { - "setting": { - "name": "serviceLongArnFormat", - "value": "enabled", - "principalArn": "arn:aws:iam:::root" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the default account setting for the specified resource for all IAM users or roles on an account. These changes apply to the entire AWS account, unless an IAM user or role explicitly overrides these settings for themselves.", - "id": "to-modify-the-default-account-settings-for-all-iam-users-or-roles-on-your-account-1549523794603", - "title": "To modify the default account settings for all IAM users or roles on an account" - } - ], - "RegisterTaskDefinition": [ - { - "input": { - "containerDefinitions": [ - { - "name": "sleep", - "command": [ - "sleep", - "360" - ], - "cpu": 10, - "essential": true, - "image": "busybox", - "memory": 10 - } - ], - "family": "sleep360", - "taskRoleArn": "", - "volumes": [ - - ] - }, - "output": { - "taskDefinition": { - "containerDefinitions": [ - { - "name": "sleep", - "command": [ - "sleep", - "360" - ], - "cpu": 10, - "environment": [ - - ], - "essential": true, - "image": "busybox", - "memory": 10, - "mountPoints": [ - - ], - "portMappings": [ - - ], - "volumesFrom": [ - - ] - } - ], - "family": "sleep360", - "revision": 1, - "taskDefinitionArn": "arn:aws:ecs:us-east-1::task-definition/sleep360:19", - "volumes": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example registers a task definition to the specified family.", - "id": "to-register-a-task-definition-1470764550877", - "title": "To register a task definition" - } - ], - "RunTask": [ - { - "input": { - "cluster": "default", - "taskDefinition": "sleep360:1" - }, - "output": { - "tasks": [ - { - "containerInstanceArn": "arn:aws:ecs:us-east-1::container-instance/ffe3d344-77e2-476c-a4d0-bf560ad50acb", - "containers": [ - { - "name": "sleep", - "containerArn": "arn:aws:ecs:us-east-1::container/58591c8e-be29-4ddf-95aa-ee459d4c59fd", - "lastStatus": "PENDING", - "taskArn": "arn:aws:ecs:us-east-1::task/a9f21ea7-c9f5-44b1-b8e6-b31f50ed33c0" - } - ], - "desiredStatus": "RUNNING", - "lastStatus": "PENDING", - "overrides": { - "containerOverrides": [ - { - "name": "sleep" - } - ] - }, - "taskArn": "arn:aws:ecs:us-east-1::task/a9f21ea7-c9f5-44b1-b8e6-b31f50ed33c0", - "taskDefinitionArn": "arn:aws:ecs:us-east-1::task-definition/sleep360:1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example runs the specified task definition on your default cluster.", - "id": "6f238c83-a133-42cd-ab3d-abeca0560445", - "title": "To run a task on your default cluster" - } - ], - "TagResource": [ - { - "input": { - "resourceArn": "arn:aws:ecs:region:aws_account_id:cluster/dev", - "tags": [ - { - "key": "team", - "value": "dev" - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example tags the 'dev' cluster with key 'team' and value 'dev'.", - "id": "to-tag-a-cluster-1540581863751", - "title": "To tag a cluster." - } - ], - "UntagResource": [ - { - "input": { - "resourceArn": "arn:aws:ecs:region:aws_account_id:cluster/dev", - "tagKeys": [ - "team" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the 'team' tag from the 'dev' cluster.", - "id": "to-untag-a-cluster-1540582546056", - "title": "To untag a cluster." - } - ], - "UpdateService": [ - { - "input": { - "service": "my-http-service", - "taskDefinition": "amazon-ecs-sample" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example updates the my-http-service service to use the amazon-ecs-sample task definition.", - "id": "cc9e8900-0cc2-44d2-8491-64d1d3d37887", - "title": "To change the task definition used in a service" - }, - { - "input": { - "desiredCount": 10, - "service": "my-http-service" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example updates the desired count of the my-http-service service to 10.", - "id": "9581d6c5-02e3-4140-8cc1-5a4301586633", - "title": "To change the number of tasks in a service" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/paginators-1.json deleted file mode 100644 index cd66d4a..0000000 --- a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "ListClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "clusterArns" - }, - "ListContainerInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "containerInstanceArns" - }, - "ListTaskDefinitions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "taskDefinitionArns" - }, - "ListTaskDefinitionFamilies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "families" - }, - "ListTasks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "taskArns" - }, - "ListServices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "serviceArns" - }, - "ListAccountSettings": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "settings" - }, - "ListAttributes": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "attributes" - }, - "ListServicesByNamespace": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "serviceArns" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/service-2.json.gz deleted file mode 100644 index bdaa60d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/waiters-2.json b/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/waiters-2.json deleted file mode 100644 index 8a0b19d..0000000 --- a/venv/Lib/site-packages/botocore/data/ecs/2014-11-13/waiters-2.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "version": 2, - "waiters": { - "TasksRunning": { - "delay": 6, - "operation": "DescribeTasks", - "maxAttempts": 100, - "acceptors": [ - { - "expected": "STOPPED", - "matcher": "pathAny", - "state": "failure", - "argument": "tasks[].lastStatus" - }, - { - "expected": "MISSING", - "matcher": "pathAny", - "state": "failure", - "argument": "failures[].reason" - }, - { - "expected": "RUNNING", - "matcher": "pathAll", - "state": "success", - "argument": "tasks[].lastStatus" - } - ] - }, - "TasksStopped": { - "delay": 6, - "operation": "DescribeTasks", - "maxAttempts": 100, - "acceptors": [ - { - "expected": "STOPPED", - "matcher": "pathAll", - "state": "success", - "argument": "tasks[].lastStatus" - } - ] - }, - "ServicesStable": { - "delay": 15, - "operation": "DescribeServices", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "MISSING", - "matcher": "pathAny", - "state": "failure", - "argument": "failures[].reason" - }, - { - "expected": "DRAINING", - "matcher": "pathAny", - "state": "failure", - "argument": "services[].status" - }, - { - "expected": "INACTIVE", - "matcher": "pathAny", - "state": "failure", - "argument": "services[].status" - }, - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(services[?!(length(deployments) == `1` && runningCount == desiredCount)]) == `0`" - } - ] - }, - "ServicesInactive": { - "delay": 15, - "operation": "DescribeServices", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "MISSING", - "matcher": "pathAny", - "state": "failure", - "argument": "failures[].reason" - }, - { - "expected": "INACTIVE", - "matcher": "pathAny", - "state": "success", - "argument": "services[].status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/efs/2015-02-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index ce45f6b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/examples-1.json b/venv/Lib/site-packages/botocore/data/efs/2015-02-01/examples-1.json deleted file mode 100644 index f3c75b3..0000000 --- a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/examples-1.json +++ /dev/null @@ -1,294 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateFileSystem": [ - { - "input": { - "Backup": true, - "CreationToken": "tokenstring", - "Encrypted": true, - "PerformanceMode": "generalPurpose", - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ] - }, - "output": { - "CreationTime": "1481841524.0", - "CreationToken": "tokenstring", - "Encrypted": true, - "FileSystemId": "fs-01234567", - "LifeCycleState": "creating", - "NumberOfMountTargets": 0, - "OwnerId": "012345678912", - "PerformanceMode": "generalPurpose", - "SizeInBytes": { - "Value": 0 - }, - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation creates a new, encrypted file system with automatic backups enabled, and the default generalpurpose performance mode.", - "id": "to-create-a-new-file-system-1481840798547", - "title": "To create a new file system" - } - ], - "CreateMountTarget": [ - { - "input": { - "FileSystemId": "fs-01234567", - "SubnetId": "subnet-1234abcd" - }, - "output": { - "FileSystemId": "fs-01234567", - "IpAddress": "192.0.0.2", - "LifeCycleState": "creating", - "MountTargetId": "fsmt-12340abc", - "NetworkInterfaceId": "eni-cedf6789", - "OwnerId": "012345678912", - "SubnetId": "subnet-1234abcd" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation creates a new mount target for an EFS file system.", - "id": "to-create-a-new-mount-target-1481842289329", - "title": "To create a new mount target" - } - ], - "CreateTags": [ - { - "input": { - "FileSystemId": "fs-01234567", - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ] - }, - "comments": { - }, - "description": "This operation creates a new tag for an EFS file system.", - "id": "to-create-a-new-tag-1481843409357", - "title": "To create a new tag" - } - ], - "DeleteFileSystem": [ - { - "input": { - "FileSystemId": "fs-01234567" - }, - "comments": { - }, - "description": "This operation deletes an EFS file system.", - "id": "to-delete-a-file-system-1481847318348", - "title": "To delete a file system" - } - ], - "DeleteMountTarget": [ - { - "input": { - "MountTargetId": "fsmt-12340abc" - }, - "comments": { - }, - "description": "This operation deletes a mount target.", - "id": "to-delete-a-mount-target-1481847635607", - "title": "To delete a mount target" - } - ], - "DeleteTags": [ - { - "input": { - "FileSystemId": "fs-01234567", - "TagKeys": [ - "Name" - ] - }, - "comments": { - }, - "description": "This operation deletes tags for an EFS file system.", - "id": "to-delete-tags-for-an-efs-file-system-1481848189061", - "title": "To delete tags for an EFS file system" - } - ], - "DescribeFileSystems": [ - { - "input": { - }, - "output": { - "FileSystems": [ - { - "CreationTime": "1481841524.0", - "CreationToken": "tokenstring", - "FileSystemId": "fs-01234567", - "LifeCycleState": "available", - "Name": "MyFileSystem", - "NumberOfMountTargets": 1, - "OwnerId": "012345678912", - "PerformanceMode": "generalPurpose", - "SizeInBytes": { - "Value": 6144 - }, - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ] - } - ] - }, - "comments": { - }, - "description": "This operation describes all of the EFS file systems in an account.", - "id": "to-describe-an-efs-file-system-1481848448460", - "title": "To describe an EFS file system" - } - ], - "DescribeLifecycleConfiguration": [ - { - "input": { - "FileSystemId": "fs-01234567" - }, - "output": { - "LifecyclePolicies": [ - { - "TransitionToIA": "AFTER_30_DAYS" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation describes a file system's LifecycleConfiguration. EFS lifecycle management uses the LifecycleConfiguration object to identify which files to move to the EFS Infrequent Access (IA) storage class. ", - "id": "to-describe-the-lifecycle-configuration-for-a-file-system-1551200664502", - "title": "To describe the lifecycle configuration for a file system" - } - ], - "DescribeMountTargetSecurityGroups": [ - { - "input": { - "MountTargetId": "fsmt-12340abc" - }, - "output": { - "SecurityGroups": [ - "sg-4567abcd" - ] - }, - "comments": { - }, - "description": "This operation describes all of the security groups for a file system's mount target.", - "id": "to-describe-the-security-groups-for-a-mount-target-1481849317823", - "title": "To describe the security groups for a mount target" - } - ], - "DescribeMountTargets": [ - { - "input": { - "FileSystemId": "fs-01234567" - }, - "output": { - "MountTargets": [ - { - "FileSystemId": "fs-01234567", - "IpAddress": "192.0.0.2", - "LifeCycleState": "available", - "MountTargetId": "fsmt-12340abc", - "NetworkInterfaceId": "eni-cedf6789", - "OwnerId": "012345678912", - "SubnetId": "subnet-1234abcd" - } - ] - }, - "comments": { - }, - "description": "This operation describes all of a file system's mount targets.", - "id": "to-describe-the-mount-targets-for-a-file-system-1481849958584", - "title": "To describe the mount targets for a file system" - } - ], - "DescribeTags": [ - { - "input": { - "FileSystemId": "fs-01234567" - }, - "output": { - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ] - }, - "comments": { - }, - "description": "This operation describes all of a file system's tags.", - "id": "to-describe-the-tags-for-a-file-system-1481850497090", - "title": "To describe the tags for a file system" - } - ], - "ModifyMountTargetSecurityGroups": [ - { - "input": { - "MountTargetId": "fsmt-12340abc", - "SecurityGroups": [ - "sg-abcd1234" - ] - }, - "comments": { - }, - "description": "This operation modifies the security groups associated with a mount target for a file system.", - "id": "to-modify-the-security-groups-associated-with-a-mount-target-for-a-file-system-1481850772562", - "title": "To modify the security groups associated with a mount target for a file system" - } - ], - "PutLifecycleConfiguration": [ - { - "input": { - "FileSystemId": "fs-01234567", - "LifecyclePolicies": [ - { - "TransitionToIA": "AFTER_30_DAYS" - } - ] - }, - "output": { - "LifecyclePolicies": [ - { - "TransitionToIA": "AFTER_30_DAYS" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation enables lifecycle management on a file system by creating a new LifecycleConfiguration object. A LifecycleConfiguration object defines when files in an Amazon EFS file system are automatically transitioned to the lower-cost EFS Infrequent Access (IA) storage class. A LifecycleConfiguration applies to all files in a file system.", - "id": "creates-a-new-lifecycleconfiguration-object-for-a-file-system-1551201594692", - "title": "Creates a new lifecycleconfiguration object for a file system" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/efs/2015-02-01/paginators-1.json deleted file mode 100644 index 047d3e2..0000000 --- a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "DescribeFileSystems": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "FileSystems" - }, - "DescribeMountTargets": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "MountTargets" - }, - "DescribeTags": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "Tags" - }, - "DescribeAccessPoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AccessPoints" - }, - "DescribeReplicationConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Replications" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/efs/2015-02-01/service-2.json.gz deleted file mode 100644 index cfac374..0000000 Binary files a/venv/Lib/site-packages/botocore/data/efs/2015-02-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1c8e891..0000000 Binary files a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/service-2.json.gz deleted file mode 100644 index 8ad6731..0000000 Binary files a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/waiters-2.json deleted file mode 100644 index 4b20636..0000000 --- a/venv/Lib/site-packages/botocore/data/eks-auth/2023-11-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/eks/2017-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index d184892..0000000 Binary files a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/eks/2017-11-01/examples-1.json deleted file mode 100644 index 8ea2517..0000000 --- a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/examples-1.json +++ /dev/null @@ -1,135 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateCluster": [ - { - "input": { - "version": "1.10", - "name": "prod", - "clientRequestToken": "1d2129a1-3d38-460a-9756-e5b91fddb951", - "resourcesVpcConfig": { - "securityGroupIds": [ - "sg-6979fe18" - ], - "subnetIds": [ - "subnet-6782e71e", - "subnet-e7e761ac" - ] - }, - "roleArn": "arn:aws:iam::012345678910:role/eks-service-role-AWSServiceRoleForAmazonEKS-J7ONKE3BQ4PI" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an Amazon EKS cluster called prod.", - "id": "to-create-a-new-cluster-1527868185648", - "title": "To create a new cluster" - } - ], - "DeleteCluster": [ - { - "input": { - "name": "devel" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command deletes a cluster named `devel` in your default region.", - "id": "to-delete-a-cluster-1527868641252", - "title": "To delete a cluster" - } - ], - "DescribeCluster": [ - { - "input": { - "name": "devel" - }, - "output": { - "cluster": { - "version": "1.10", - "name": "devel", - "arn": "arn:aws:eks:us-west-2:012345678910:cluster/devel", - "certificateAuthority": { - "data": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUN5RENDQWJDZ0F3SUJBZ0lCQURBTkJna3Foa2lHOXcwQkFRc0ZBREFWTVJNd0VRWURWUVFERXdwcmRXSmwKY201bGRHVnpNQjRYRFRFNE1EVXpNVEl6TVRFek1Wb1hEVEk0TURVeU9ESXpNVEV6TVZvd0ZURVRNQkVHQTFVRQpBeE1LYTNWaVpYSnVaWFJsY3pDQ0FTSXdEUVlKS29aSWh2Y05BUUVCQlFBRGdnRVBBRENDQVFvQ2dnRUJBTTZWCjVUaG4rdFcySm9Xa2hQMzRlVUZMNitaRXJOZGIvWVdrTmtDdWNGS2RaaXl2TjlMVmdvUmV2MjlFVFZlN1ZGbSsKUTJ3ZURyRXJiQyt0dVlibkFuN1ZLYmE3ay9hb1BHekZMdmVnb0t6b0M1N2NUdGVwZzRIazRlK2tIWHNaME10MApyb3NzcjhFM1ROeExETnNJTThGL1cwdjhsTGNCbWRPcjQyV2VuTjFHZXJnaDNSZ2wzR3JIazBnNTU0SjFWenJZCm9hTi8zODFUczlOTFF2QTBXb0xIcjBFRlZpTFdSZEoyZ3lXaC9ybDVyOFNDOHZaQXg1YW1BU0hVd01aTFpWRC8KTDBpOW4wRVM0MkpVdzQyQmxHOEdpd3NhTkJWV3lUTHZKclNhRXlDSHFtVVZaUTFDZkFXUjl0L3JleVVOVXM3TApWV1FqM3BFbk9RMitMSWJrc0RzQ0F3RUFBYU1qTUNFd0RnWURWUjBQQVFIL0JBUURBZ0trTUE4R0ExVWRFd0VCCi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFNZ3RsQ1dIQ2U2YzVHMXl2YlFTS0Q4K2hUalkKSm1NSG56L2EvRGt0WG9YUjFVQzIrZUgzT1BZWmVjRVZZZHVaSlZCckNNQ2VWR0ZkeWdBYlNLc1FxWDg0S2RXbAp1MU5QaERDSmEyRHliN2pVMUV6VThTQjFGZUZ5ZFE3a0hNS1E1blpBRVFQOTY4S01hSGUrSm0yQ2x1UFJWbEJVCjF4WlhTS1gzTVZ0K1Q0SU1EV2d6c3JRSjVuQkRjdEtLcUZtM3pKdVVubHo5ZEpVckdscEltMjVJWXJDckxYUFgKWkUwRUtRNWEzMHhkVWNrTHRGQkQrOEtBdFdqSS9yZUZPNzM1YnBMdVoyOTBaNm42QlF3elRrS0p4cnhVc3QvOAppNGsxcnlsaUdWMm5SSjBUYjNORkczNHgrYWdzYTRoSTFPbU90TFM0TmgvRXJxT3lIUXNDc2hEQUtKUT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=" - }, - "createdAt": 1527807879.988, - "endpoint": "https://A0DCCD80A04F01705DD065655C30CC3D.yl4.us-west-2.eks.amazonaws.com", - "resourcesVpcConfig": { - "securityGroupIds": [ - "sg-6979fe18" - ], - "subnetIds": [ - "subnet-6782e71e", - "subnet-e7e761ac" - ], - "vpcId": "vpc-950809ec" - }, - "roleArn": "arn:aws:iam::012345678910:role/eks-service-role-AWSServiceRoleForAmazonEKS-J7ONKE3BQ4PI", - "status": "ACTIVE" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command provides a description of the specified cluster in your default region.", - "id": "to-describe-a-cluster-1527868708512", - "title": "To describe a cluster" - } - ], - "ListClusters": [ - { - "input": { - }, - "output": { - "clusters": [ - "devel", - "prod" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example command lists all of your available clusters in your default region.", - "id": "to-list-your-available-clusters-1527868801040", - "title": "To list your available clusters" - } - ], - "ListTagsForResource": [ - { - "input": { - "resourceArn": "arn:aws:eks:us-west-2:012345678910:cluster/beta" - }, - "output": { - "tags": { - "aws:tag:domain": "beta" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all of the tags for the `beta` cluster.", - "id": "to-list-tags-for-a-cluster-1568666903378", - "title": "To list tags for a cluster" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/eks/2017-11-01/paginators-1.json deleted file mode 100644 index 652eef1..0000000 --- a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/paginators-1.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "pagination": { - "ListClusters": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "clusters" - }, - "ListUpdates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "updateIds" - }, - "ListNodegroups": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "nodegroups" - }, - "ListFargateProfiles": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "fargateProfileNames" - }, - "DescribeAddonVersions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "addons" - }, - "ListAddons": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "addons" - }, - "ListIdentityProviderConfigs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "identityProviderConfigs" - }, - "ListEksAnywhereSubscriptions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "subscriptions" - }, - "ListPodIdentityAssociations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "associations" - }, - "ListAccessEntries": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "accessEntries" - }, - "ListAccessPolicies": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "accessPolicies" - }, - "ListAssociatedAccessPolicies": { - "input_token": "nextToken", - "limit_key": "maxResults", - "non_aggregate_keys": [ - "clusterName", - "principalArn" - ], - "output_token": "nextToken", - "result_key": "associatedAccessPolicies" - }, - "ListInsights": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "insights" - }, - "DescribeClusterVersions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "clusterVersions" - }, - "ListCapabilities": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "capabilities" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/eks/2017-11-01/service-2.json.gz deleted file mode 100644 index 83c8913..0000000 Binary files a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/service-2.sdk-extras.json b/venv/Lib/site-packages/botocore/data/eks/2017-11-01/service-2.sdk-extras.json deleted file mode 100644 index b636c21..0000000 --- a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/service-2.sdk-extras.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "version": 1.0, - "merge": { - "metadata": { - "serviceId":"EKS" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/eks/2017-11-01/waiters-2.json deleted file mode 100644 index c068909..0000000 --- a/venv/Lib/site-packages/botocore/data/eks/2017-11-01/waiters-2.json +++ /dev/null @@ -1,177 +0,0 @@ -{ - "version": 2, - "waiters": { - "ClusterActive": { - "delay": 30, - "operation": "DescribeCluster", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "DELETING", - "matcher": "path", - "state": "failure", - "argument": "cluster.status" - }, - { - "expected": "FAILED", - "matcher": "path", - "state": "failure", - "argument": "cluster.status" - }, - { - "expected": "ACTIVE", - "matcher": "path", - "state": "success", - "argument": "cluster.status" - } - ] - }, - "ClusterDeleted": { - "delay": 30, - "operation": "DescribeCluster", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "ACTIVE", - "matcher": "path", - "state": "failure", - "argument": "cluster.status" - }, - { - "expected": "CREATING", - "matcher": "path", - "state": "failure", - "argument": "cluster.status" - }, - { - "expected": "PENDING", - "matcher": "path", - "state": "failure", - "argument": "cluster.status" - }, - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "success" - } - ] - }, - "NodegroupActive": { - "delay": 30, - "operation": "DescribeNodegroup", - "maxAttempts": 80, - "acceptors": [ - { - "expected": "CREATE_FAILED", - "matcher": "path", - "state": "failure", - "argument": "nodegroup.status" - }, - { - "expected": "ACTIVE", - "matcher": "path", - "state": "success", - "argument": "nodegroup.status" - } - ] - }, - "NodegroupDeleted": { - "delay": 30, - "operation": "DescribeNodegroup", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "DELETE_FAILED", - "matcher": "path", - "state": "failure", - "argument": "nodegroup.status" - }, - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "success" - } - ] - }, - "AddonActive": { - "delay": 10, - "operation": "DescribeAddon", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "CREATE_FAILED", - "matcher": "path", - "state": "failure", - "argument": "addon.status" - }, - { - "expected": "DEGRADED", - "matcher": "path", - "state": "failure", - "argument": "addon.status" - }, - { - "expected": "ACTIVE", - "matcher": "path", - "state": "success", - "argument": "addon.status" - } - ] - }, - "AddonDeleted": { - "delay": 10, - "operation": "DescribeAddon", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "DELETE_FAILED", - "matcher": "path", - "state": "failure", - "argument": "addon.status" - }, - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "success" - } - ] - }, - "FargateProfileActive": { - "delay": 10, - "operation": "DescribeFargateProfile", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "CREATE_FAILED", - "matcher": "path", - "state": "failure", - "argument": "fargateProfile.status" - }, - { - "expected": "ACTIVE", - "matcher": "path", - "state": "success", - "argument": "fargateProfile.status" - } - ] - }, - "FargateProfileDeleted": { - "delay": 30, - "operation": "DescribeFargateProfile", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "DELETE_FAILED", - "matcher": "path", - "state": "failure", - "argument": "fargateProfile.status" - }, - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index d8d19de..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/paginators-1.json deleted file mode 100644 index 8724740..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/paginators-1.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "pagination": { - "DescribeCacheClusters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheClusters" - }, - "DescribeCacheEngineVersions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheEngineVersions" - }, - "DescribeCacheParameterGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheParameterGroups" - }, - "DescribeCacheParameters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Parameters" - }, - "DescribeCacheSecurityGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheSecurityGroups" - }, - "DescribeCacheSubnetGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheSubnetGroups" - }, - "DescribeEngineDefaultParameters": { - "input_token": "Marker", - "output_token": "EngineDefaults.Marker", - "limit_key": "MaxRecords", - "result_key": "EngineDefaults.Parameters" - }, - "DescribeEvents": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Events" - }, - "DescribeReservedCacheNodes": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedCacheNodes" - }, - "DescribeReservedCacheNodesOfferings": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedCacheNodesOfferings" - }, - "DescribeReplicationGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReplicationGroups" - }, - "DescribeSnapshots": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Snapshots" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/service-2.json.gz deleted file mode 100644 index 21ef48b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/waiters-2.json b/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/waiters-2.json deleted file mode 100644 index ccb904a..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticache/2014-09-30/waiters-2.json +++ /dev/null @@ -1,139 +0,0 @@ -{ - "version": 2, - "waiters": { - "CacheClusterAvailable": { - "delay": 30, - "operation": "DescribeCacheClusters", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "CacheClusters[].CacheClusterStatus" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "CacheClusters[].CacheClusterStatus" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "CacheClusters[].CacheClusterStatus" - }, - { - "expected": "incompatible-network", - "matcher": "pathAny", - "state": "failure", - "argument": "CacheClusters[].CacheClusterStatus" - }, - { - "expected": "restore-failed", - "matcher": "pathAny", - "state": "failure", - "argument": "CacheClusters[].CacheClusterStatus" - } - ] - }, - "CacheClusterDeleted": { - "delay": 30, - "operation": "DescribeCacheClusters", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "CacheClusterNotFound", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "CacheClusters[].CacheClusterStatus" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "CacheClusters[].CacheClusterStatus" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "CacheClusters[].CacheClusterStatus" - } - ] - }, - "ReplicationGroupAvailable": { - "delay": 30, - "operation": "DescribeReplicationGroups", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "ReplicationGroups[].Status" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "ReplicationGroups[].Status" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "ReplicationGroups[].Status" - }, - { - "expected": "incompatible-network", - "matcher": "pathAny", - "state": "failure", - "argument": "ReplicationGroups[].Status" - }, - { - "expected": "restore-failed", - "matcher": "pathAny", - "state": "failure", - "argument": "ReplicationGroups[].Status" - } - ] - }, - "ReplicationGroupDeleted": { - "delay": 30, - "operation": "DescribeReplicationGroups", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "ReplicationGroupNotFoundFault", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "ReplicationGroups[].Status" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "ReplicationGroups[].Status" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "ReplicationGroups[].Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6eb9b55..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/examples-1.json b/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/examples-1.json deleted file mode 100644 index f1d21bd..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/examples-1.json +++ /dev/null @@ -1,3149 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddTagsToResource": [ - { - "input": { - "ResourceName": "arn:aws:elasticache:us-east-1:1234567890:cluster:my-mem-cluster", - "Tags": [ - { - "Key": "APIVersion", - "Value": "20150202" - }, - { - "Key": "Service", - "Value": "ElastiCache" - } - ] - }, - "output": { - "TagList": [ - { - "Key": "APIVersion", - "Value": "20150202" - }, - { - "Key": "Service", - "Value": "ElastiCache" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Adds up to 10 tags, key/value pairs, to a cluster or snapshot resource.", - "id": "addtagstoresource-1482430264385", - "title": "AddTagsToResource" - } - ], - "AuthorizeCacheSecurityGroupIngress": [ - { - "input": { - "CacheSecurityGroupName": "my-sec-grp", - "EC2SecurityGroupName": "my-ec2-sec-grp", - "EC2SecurityGroupOwnerId": "1234567890" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Allows network ingress to a cache security group. Applications using ElastiCache must be running on Amazon EC2. Amazon EC2 security groups are used as the authorization mechanism.", - "id": "authorizecachecachesecuritygroupingress-1483046446206", - "title": "AuthorizeCacheCacheSecurityGroupIngress" - } - ], - "CopySnapshot": [ - { - "input": { - "SourceSnapshotName": "my-snapshot", - "TargetBucket": "", - "TargetSnapshotName": "my-snapshot-copy" - }, - "output": { - "Snapshot": { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-21T22:24:04.955Z", - "CacheClusterId": "my-redis4", - "CacheNodeType": "cache.m3.large", - "CacheParameterGroupName": "default.redis3.2", - "CacheSubnetGroupName": "default", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NodeSnapshots": [ - { - "CacheNodeCreateTime": "2016-12-21T22:24:04.955Z", - "CacheNodeId": "0001", - "CacheSize": "3 MB", - "SnapshotCreateTime": "2016-12-28T07:00:52Z" - } - ], - "NumCacheNodes": 1, - "Port": 6379, - "PreferredAvailabilityZone": "us-east-1c", - "PreferredMaintenanceWindow": "tue:09:30-tue:10:30", - "SnapshotName": "my-snapshot-copy", - "SnapshotRetentionLimit": 7, - "SnapshotSource": "manual", - "SnapshotStatus": "creating", - "SnapshotWindow": "07:00-08:00", - "VpcId": "vpc-3820329f3" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Copies a snapshot to a specified name.", - "id": "copysnapshot-1482961393820", - "title": "CopySnapshot" - } - ], - "CreateCacheCluster": [ - { - "input": { - "AZMode": "cross-az", - "CacheClusterId": "my-memcached-cluster", - "CacheNodeType": "cache.r3.large", - "CacheSubnetGroupName": "default", - "Engine": "memcached", - "EngineVersion": "1.4.24", - "NumCacheNodes": 2, - "Port": 11211 - }, - "output": { - "CacheCluster": { - "AutoMinorVersionUpgrade": true, - "CacheClusterId": "my-memcached-cluster", - "CacheClusterStatus": "creating", - "CacheNodeType": "cache.r3.large", - "CacheParameterGroup": { - "CacheNodeIdsToReboot": [ - - ], - "CacheParameterGroupName": "default.memcached1.4", - "ParameterApplyStatus": "in-sync" - }, - "CacheSecurityGroups": [ - - ], - "CacheSubnetGroupName": "default", - "ClientDownloadLandingPage": "https://console.aws.amazon.com/elasticache/home#client-download:", - "Engine": "memcached", - "EngineVersion": "1.4.24", - "NumCacheNodes": 2, - "PendingModifiedValues": { - }, - "PreferredAvailabilityZone": "Multiple", - "PreferredMaintenanceWindow": "wed:09:00-wed:10:00" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a Memcached cluster with 2 nodes. ", - "id": "createcachecluster-1474994727381", - "title": "CreateCacheCluster" - }, - { - "input": { - "AutoMinorVersionUpgrade": true, - "CacheClusterId": "my-redis", - "CacheNodeType": "cache.r3.larage", - "CacheSubnetGroupName": "default", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NumCacheNodes": 1, - "Port": 6379, - "PreferredAvailabilityZone": "us-east-1c", - "SnapshotRetentionLimit": 7 - }, - "output": { - "CacheCluster": { - "AutoMinorVersionUpgrade": true, - "CacheClusterId": "my-redis", - "CacheClusterStatus": "creating", - "CacheNodeType": "cache.m3.large", - "CacheParameterGroup": { - "CacheNodeIdsToReboot": [ - - ], - "CacheParameterGroupName": "default.redis3.2", - "ParameterApplyStatus": "in-sync" - }, - "CacheSecurityGroups": [ - - ], - "CacheSubnetGroupName": "default", - "ClientDownloadLandingPage": "https: //console.aws.amazon.com/elasticache/home#client-download: ", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NumCacheNodes": 1, - "PendingModifiedValues": { - }, - "PreferredAvailabilityZone": "us-east-1c", - "PreferredMaintenanceWindow": "fri: 05: 30-fri: 06: 30", - "SnapshotRetentionLimit": 7, - "SnapshotWindow": "10: 00-11: 00" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a Redis cluster with 1 node. ", - "id": "createcachecluster-1474994727381", - "title": "CreateCacheCluster" - } - ], - "CreateCacheParameterGroup": [ - { - "input": { - "CacheParameterGroupFamily": "redis2.8", - "CacheParameterGroupName": "custom-redis2-8", - "Description": "Custom Redis 2.8 parameter group." - }, - "output": { - "CacheParameterGroup": { - "CacheParameterGroupFamily": "redis2.8", - "CacheParameterGroupName": "custom-redis2-8", - "Description": "Custom Redis 2.8 parameter group." - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates the Amazon ElastiCache parameter group custom-redis2-8.", - "id": "createcacheparametergroup-1474997699362", - "title": "CreateCacheParameterGroup" - } - ], - "CreateCacheSecurityGroup": [ - { - "input": { - "CacheSecurityGroupName": "my-cache-sec-grp", - "Description": "Example ElastiCache security group." - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates an ElastiCache security group. ElastiCache security groups are only for clusters not running in an AWS VPC.", - "id": "createcachesecuritygroup-1483041506604", - "title": "CreateCacheSecurityGroup" - } - ], - "CreateCacheSubnetGroup": [ - { - "input": { - "CacheSubnetGroupDescription": "Sample subnet group", - "CacheSubnetGroupName": "my-sn-grp2", - "SubnetIds": [ - "subnet-6f28c982", - "subnet-bcd382f3", - "subnet-845b3e7c0" - ] - }, - "output": { - "CacheSubnetGroup": { - "CacheSubnetGroupDescription": "My subnet group.", - "CacheSubnetGroupName": "my-sn-grp", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-east-1a" - }, - "SubnetIdentifier": "subnet-6f28c982" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1c" - }, - "SubnetIdentifier": "subnet-bcd382f3" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1b" - }, - "SubnetIdentifier": "subnet-845b3e7c0" - } - ], - "VpcId": "vpc-91280df6" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a new cache subnet group.", - "id": "createcachesubnet-1483042274558", - "title": "CreateCacheSubnet" - } - ], - "CreateReplicationGroup": [ - { - "input": { - "AutomaticFailoverEnabled": true, - "CacheNodeType": "cache.m3.medium", - "Engine": "redis", - "EngineVersion": "2.8.24", - "NumCacheClusters": 3, - "ReplicationGroupDescription": "A Redis replication group.", - "ReplicationGroupId": "my-redis-rg", - "SnapshotRetentionLimit": 30 - }, - "output": { - "ReplicationGroup": { - "AutomaticFailover": "enabling", - "Description": "A Redis replication group.", - "MemberClusters": [ - "my-redis-rg-001", - "my-redis-rg-002", - "my-redis-rg-003" - ], - "PendingModifiedValues": { - }, - "ReplicationGroupId": "my-redis-rg", - "SnapshottingClusterId": "my-redis-rg-002", - "Status": "creating" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a Redis replication group with 3 nodes.", - "id": "createcachereplicationgroup-1474998730655", - "title": "CreateCacheReplicationGroup" - }, - { - "input": { - "AutoMinorVersionUpgrade": true, - "CacheNodeType": "cache.m3.medium", - "CacheParameterGroupName": "default.redis3.2.cluster.on", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NodeGroupConfiguration": [ - { - "PrimaryAvailabilityZone": "us-east-1c", - "ReplicaAvailabilityZones": [ - "us-east-1b" - ], - "ReplicaCount": 1, - "Slots": "0-8999" - }, - { - "PrimaryAvailabilityZone": "us-east-1a", - "ReplicaAvailabilityZones": [ - "us-east-1a", - "us-east-1c" - ], - "ReplicaCount": 2, - "Slots": "9000-16383" - } - ], - "NumNodeGroups": 2, - "ReplicationGroupDescription": "A multi-sharded replication group", - "ReplicationGroupId": "clustered-redis-rg", - "SnapshotRetentionLimit": 8 - }, - "output": { - "ReplicationGroup": { - "AutomaticFailover": "enabled", - "Description": "Sharded replication group", - "MemberClusters": [ - "rc-rg3-0001-001", - "rc-rg3-0001-002", - "rc-rg3-0002-001", - "rc-rg3-0002-002", - "rc-rg3-0002-003" - ], - "PendingModifiedValues": { - }, - "ReplicationGroupId": "clustered-redis-rg", - "SnapshotRetentionLimit": 8, - "SnapshotWindow": "05:30-06:30", - "Status": "creating" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a Redis (cluster mode enabled) replication group with two shards. One shard has one read replica node and the other shard has two read replicas.", - "id": "createreplicationgroup-1483657035585", - "title": "CreateReplicationGroup" - } - ], - "CreateSnapshot": [ - { - "input": { - "CacheClusterId": "onenoderedis", - "SnapshotName": "snapshot-1" - }, - "output": { - "Snapshot": { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2017-02-03T15:43:36.278Z", - "CacheClusterId": "onenoderedis", - "CacheNodeType": "cache.m3.medium", - "CacheParameterGroupName": "default.redis3.2", - "CacheSubnetGroupName": "default", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NodeSnapshots": [ - { - "CacheNodeCreateTime": "2017-02-03T15:43:36.278Z", - "CacheNodeId": "0001", - "CacheSize": "" - } - ], - "NumCacheNodes": 1, - "Port": 6379, - "PreferredAvailabilityZone": "us-west-2c", - "PreferredMaintenanceWindow": "sat:08:00-sat:09:00", - "SnapshotName": "snapshot-1", - "SnapshotRetentionLimit": 1, - "SnapshotSource": "manual", - "SnapshotStatus": "creating", - "SnapshotWindow": "00:00-01:00", - "VpcId": "vpc-73c3cd17" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a snapshot of a non-clustered Redis cluster that has only one node.", - "id": "createsnapshot-1474999681024", - "title": "CreateSnapshot - NonClustered Redis, no read-replicas" - }, - { - "input": { - "CacheClusterId": "threenoderedis-001", - "SnapshotName": "snapshot-2" - }, - "output": { - "Snapshot": { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2017-02-03T15:43:36.278Z", - "CacheClusterId": "threenoderedis-001", - "CacheNodeType": "cache.m3.medium", - "CacheParameterGroupName": "default.redis3.2", - "CacheSubnetGroupName": "default", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NodeSnapshots": [ - { - "CacheNodeCreateTime": "2017-02-03T15:43:36.278Z", - "CacheNodeId": "0001", - "CacheSize": "" - } - ], - "NumCacheNodes": 1, - "Port": 6379, - "PreferredAvailabilityZone": "us-west-2c", - "PreferredMaintenanceWindow": "sat:08:00-sat:09:00", - "SnapshotName": "snapshot-2", - "SnapshotRetentionLimit": 1, - "SnapshotSource": "manual", - "SnapshotStatus": "creating", - "SnapshotWindow": "00:00-01:00", - "VpcId": "vpc-73c3cd17" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a snapshot of a non-clustered Redis cluster that has only three nodes, primary and two read-replicas. CacheClusterId must be a specific node in the cluster.", - "id": "createsnapshot-1474999681024", - "title": "CreateSnapshot - NonClustered Redis, 2 read-replicas" - }, - { - "input": { - "ReplicationGroupId": "clusteredredis", - "SnapshotName": "snapshot-2x5" - }, - "output": { - "Snapshot": { - "AutoMinorVersionUpgrade": true, - "AutomaticFailover": "enabled", - "CacheNodeType": "cache.m3.medium", - "CacheParameterGroupName": "default.redis3.2.cluster.on", - "CacheSubnetGroupName": "default", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NodeSnapshots": [ - { - "CacheSize": "", - "NodeGroupId": "0001" - }, - { - "CacheSize": "", - "NodeGroupId": "0002" - } - ], - "NumNodeGroups": 2, - "Port": 6379, - "PreferredMaintenanceWindow": "mon:09:30-mon:10:30", - "ReplicationGroupDescription": "Redis cluster with 2 shards.", - "ReplicationGroupId": "clusteredredis", - "SnapshotName": "snapshot-2x5", - "SnapshotRetentionLimit": 1, - "SnapshotSource": "manual", - "SnapshotStatus": "creating", - "SnapshotWindow": "12:00-13:00", - "VpcId": "vpc-73c3cd17" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a snapshot of a clustered Redis cluster that has 2 shards, each with a primary and 4 read-replicas.", - "id": "createsnapshot-clustered-redis-1486144841758", - "title": "CreateSnapshot-clustered Redis" - } - ], - "DeleteCacheCluster": [ - { - "input": { - "CacheClusterId": "my-memcached" - }, - "output": { - "CacheCluster": { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-22T16:05:17.314Z", - "CacheClusterId": "my-memcached", - "CacheClusterStatus": "deleting", - "CacheNodeType": "cache.r3.large", - "CacheParameterGroup": { - "CacheNodeIdsToReboot": [ - - ], - "CacheParameterGroupName": "default.memcached1.4", - "ParameterApplyStatus": "in-sync" - }, - "CacheSecurityGroups": [ - - ], - "CacheSubnetGroupName": "default", - "ClientDownloadLandingPage": "https://console.aws.amazon.com/elasticache/home#client-download:", - "ConfigurationEndpoint": { - "Address": "my-memcached2.ameaqx.cfg.use1.cache.amazonaws.com", - "Port": 11211 - }, - "Engine": "memcached", - "EngineVersion": "1.4.24", - "NumCacheNodes": 2, - "PendingModifiedValues": { - }, - "PreferredAvailabilityZone": "Multiple", - "PreferredMaintenanceWindow": "tue:07:30-tue:08:30" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes an Amazon ElastiCache cluster.", - "id": "deletecachecluster-1475010605291", - "title": "DeleteCacheCluster" - } - ], - "DeleteCacheParameterGroup": [ - { - "input": { - "CacheParameterGroupName": "custom-mem1-4" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the Amazon ElastiCache parameter group custom-mem1-4.", - "id": "deletecacheparametergroup-1475010933957", - "title": "DeleteCacheParameterGroup" - } - ], - "DeleteCacheSecurityGroup": [ - { - "input": { - "CacheSecurityGroupName": "my-sec-group" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes a cache security group.", - "id": "deletecachesecuritygroup-1483046967507", - "title": "DeleteCacheSecurityGroup" - } - ], - "DeleteCacheSubnetGroup": [ - { - "input": { - "CacheSubnetGroupName": "my-subnet-group" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the Amazon ElastiCache subnet group my-subnet-group.", - "id": "deletecachesubnetgroup-1475011431325", - "title": "DeleteCacheSubnetGroup" - } - ], - "DeleteReplicationGroup": [ - { - "input": { - "ReplicationGroupId": "my-redis-rg", - "RetainPrimaryCluster": false - }, - "output": { - "ReplicationGroup": { - "AutomaticFailover": "disabled", - "Description": "simple redis cluster", - "PendingModifiedValues": { - }, - "ReplicationGroupId": "my-redis-rg", - "Status": "deleting" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the Amazon ElastiCache replication group my-redis-rg.", - "id": "deletereplicationgroup-1475011641804", - "title": "DeleteReplicationGroup" - } - ], - "DeleteSnapshot": [ - { - "input": { - "SnapshotName": "snapshot-20161212" - }, - "output": { - "Snapshot": { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-21T22:27:12.543Z", - "CacheClusterId": "my-redis5", - "CacheNodeType": "cache.m3.large", - "CacheParameterGroupName": "default.redis3.2", - "CacheSubnetGroupName": "default", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NodeSnapshots": [ - { - "CacheNodeCreateTime": "2016-12-21T22:27:12.543Z", - "CacheNodeId": "0001", - "CacheSize": "3 MB", - "SnapshotCreateTime": "2016-12-21T22:30:26Z" - } - ], - "NumCacheNodes": 1, - "Port": 6379, - "PreferredAvailabilityZone": "us-east-1c", - "PreferredMaintenanceWindow": "fri:05:30-fri:06:30", - "SnapshotName": "snapshot-20161212", - "SnapshotRetentionLimit": 7, - "SnapshotSource": "manual", - "SnapshotStatus": "deleting", - "SnapshotWindow": "10:00-11:00", - "VpcId": "vpc-91280df6" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the Redis snapshot snapshot-20160822.", - "id": "deletesnapshot-1475011945779", - "title": "DeleteSnapshot" - } - ], - "DescribeCacheClusters": [ - { - "input": { - "CacheClusterId": "my-mem-cluster" - }, - "output": { - "CacheClusters": [ - { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-21T21:59:43.794Z", - "CacheClusterId": "my-mem-cluster", - "CacheClusterStatus": "available", - "CacheNodeType": "cache.t2.medium", - "CacheParameterGroup": { - "CacheNodeIdsToReboot": [ - - ], - "CacheParameterGroupName": "default.memcached1.4", - "ParameterApplyStatus": "in-sync" - }, - "CacheSecurityGroups": [ - - ], - "CacheSubnetGroupName": "default", - "ClientDownloadLandingPage": "https://console.aws.amazon.com/elasticache/home#client-download:", - "ConfigurationEndpoint": { - "Address": "my-mem-cluster.abcdef.cfg.use1.cache.amazonaws.com", - "Port": 11211 - }, - "Engine": "memcached", - "EngineVersion": "1.4.24", - "NumCacheNodes": 2, - "PendingModifiedValues": { - }, - "PreferredAvailabilityZone": "Multiple", - "PreferredMaintenanceWindow": "wed:06:00-wed:07:00" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the details for up to 50 cache clusters.", - "id": "describecacheclusters-1475012269754", - "title": "DescribeCacheClusters" - }, - { - "input": { - "CacheClusterId": "my-mem-cluster", - "ShowCacheNodeInfo": true - }, - "output": { - "CacheClusters": [ - { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-21T21:59:43.794Z", - "CacheClusterId": "my-mem-cluster", - "CacheClusterStatus": "available", - "CacheNodeType": "cache.t2.medium", - "CacheNodes": [ - { - "CacheNodeCreateTime": "2016-12-21T21:59:43.794Z", - "CacheNodeId": "0001", - "CacheNodeStatus": "available", - "CustomerAvailabilityZone": "us-east-1b", - "Endpoint": { - "Address": "my-mem-cluster.ameaqx.0001.use1.cache.amazonaws.com", - "Port": 11211 - }, - "ParameterGroupStatus": "in-sync" - }, - { - "CacheNodeCreateTime": "2016-12-21T21:59:43.794Z", - "CacheNodeId": "0002", - "CacheNodeStatus": "available", - "CustomerAvailabilityZone": "us-east-1a", - "Endpoint": { - "Address": "my-mem-cluster.ameaqx.0002.use1.cache.amazonaws.com", - "Port": 11211 - }, - "ParameterGroupStatus": "in-sync" - } - ], - "CacheParameterGroup": { - "CacheNodeIdsToReboot": [ - - ], - "CacheParameterGroupName": "default.memcached1.4", - "ParameterApplyStatus": "in-sync" - }, - "CacheSecurityGroups": [ - - ], - "CacheSubnetGroupName": "default", - "ClientDownloadLandingPage": "https://console.aws.amazon.com/elasticache/home#client-download:", - "ConfigurationEndpoint": { - "Address": "my-mem-cluster.ameaqx.cfg.use1.cache.amazonaws.com", - "Port": 11211 - }, - "Engine": "memcached", - "EngineVersion": "1.4.24", - "NumCacheNodes": 2, - "PendingModifiedValues": { - }, - "PreferredAvailabilityZone": "Multiple", - "PreferredMaintenanceWindow": "wed:06:00-wed:07:00" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the details for the cache cluster my-mem-cluster.", - "id": "describecacheclusters-1475012269754", - "title": "DescribeCacheClusters" - } - ], - "DescribeCacheEngineVersions": [ - { - "input": { - }, - "output": { - "CacheEngineVersions": [ - { - "CacheEngineDescription": "memcached", - "CacheEngineVersionDescription": "memcached version 1.4.14", - "CacheParameterGroupFamily": "memcached1.4", - "Engine": "memcached", - "EngineVersion": "1.4.14" - }, - { - "CacheEngineDescription": "memcached", - "CacheEngineVersionDescription": "memcached version 1.4.24", - "CacheParameterGroupFamily": "memcached1.4", - "Engine": "memcached", - "EngineVersion": "1.4.24" - }, - { - "CacheEngineDescription": "memcached", - "CacheEngineVersionDescription": "memcached version 1.4.33", - "CacheParameterGroupFamily": "memcached1.4", - "Engine": "memcached", - "EngineVersion": "1.4.33" - }, - { - "CacheEngineDescription": "memcached", - "CacheEngineVersionDescription": "memcached version 1.4.5", - "CacheParameterGroupFamily": "memcached1.4", - "Engine": "memcached", - "EngineVersion": "1.4.5" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.6.13", - "CacheParameterGroupFamily": "redis2.6", - "Engine": "redis", - "EngineVersion": "2.6.13" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.19", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.19" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.21", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.21" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.22 R5", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.22" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.23 R4", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.23" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.24 R3", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.24" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.6", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.6" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 3.2.4", - "CacheParameterGroupFamily": "redis3.2", - "Engine": "redis", - "EngineVersion": "3.2.4" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the details for up to 25 Memcached and Redis cache engine versions.", - "id": "describecacheengineversions-1475012638790", - "title": "DescribeCacheEngineVersions" - }, - { - "input": { - "DefaultOnly": false, - "Engine": "redis", - "MaxRecords": 50 - }, - "output": { - "CacheEngineVersions": [ - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.6.13", - "CacheParameterGroupFamily": "redis2.6", - "Engine": "redis", - "EngineVersion": "2.6.13" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.19", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.19" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.21", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.21" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.22 R5", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.22" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.23 R4", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.23" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.24 R3", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.24" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 2.8.6", - "CacheParameterGroupFamily": "redis2.8", - "Engine": "redis", - "EngineVersion": "2.8.6" - }, - { - "CacheEngineDescription": "Redis", - "CacheEngineVersionDescription": "redis version 3.2.4", - "CacheParameterGroupFamily": "redis3.2", - "Engine": "redis", - "EngineVersion": "3.2.4" - } - ], - "Marker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the details for up to 50 Redis cache engine versions.", - "id": "describecacheengineversions-1475012638790", - "title": "DescribeCacheEngineVersions" - } - ], - "DescribeCacheParameterGroups": [ - { - "input": { - "CacheParameterGroupName": "custom-mem1-4" - }, - "output": { - "CacheParameterGroups": [ - { - "CacheParameterGroupFamily": "memcached1.4", - "CacheParameterGroupName": "custom-mem1-4", - "Description": "Custom memcache param group" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a list of cache parameter group descriptions. If a cache parameter group name is specified, the list contains only the descriptions for that group.", - "id": "describecacheparametergroups-1483045457557", - "title": "DescribeCacheParameterGroups" - } - ], - "DescribeCacheParameters": [ - { - "input": { - "CacheParameterGroupName": "custom-redis2-8", - "MaxRecords": 100, - "Source": "user" - }, - "output": { - "Marker": "", - "Parameters": [ - { - "AllowedValues": "yes,no", - "ChangeType": "requires-reboot", - "DataType": "string", - "Description": "Apply rehashing or not.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "activerehashing", - "ParameterValue": "yes", - "Source": "system" - }, - { - "AllowedValues": "always,everysec,no", - "ChangeType": "immediate", - "DataType": "string", - "Description": "fsync policy for AOF persistence", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "appendfsync", - "ParameterValue": "everysec", - "Source": "system" - }, - { - "AllowedValues": "yes,no", - "ChangeType": "immediate", - "DataType": "string", - "Description": "Enable Redis persistence.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "appendonly", - "ParameterValue": "no", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Normal client output buffer hard limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-normal-hard-limit", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Normal client output buffer soft limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-normal-soft-limit", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Normal client output buffer soft limit in seconds.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-normal-soft-seconds", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Pubsub client output buffer hard limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-pubsub-hard-limit", - "ParameterValue": "33554432", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Pubsub client output buffer soft limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-pubsub-soft-limit", - "ParameterValue": "8388608", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Pubsub client output buffer soft limit in seconds.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-pubsub-soft-seconds", - "ParameterValue": "60", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Slave client output buffer soft limit in seconds.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-slave-soft-seconds", - "ParameterValue": "60", - "Source": "system" - }, - { - "AllowedValues": "yes,no", - "ChangeType": "immediate", - "DataType": "string", - "Description": "If enabled, clients who attempt to write to a read-only slave will be disconnected. Applicable to 2.8.23 and higher.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.23", - "ParameterName": "close-on-slave-write", - "ParameterValue": "yes", - "Source": "system" - }, - { - "AllowedValues": "1-1200000", - "ChangeType": "requires-reboot", - "DataType": "integer", - "Description": "Set the number of databases.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "databases", - "ParameterValue": "16", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The maximum number of hash entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "hash-max-ziplist-entries", - "ParameterValue": "512", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The threshold of biggest hash entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "hash-max-ziplist-value", - "ParameterValue": "64", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The maximum number of list entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "list-max-ziplist-entries", - "ParameterValue": "512", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The threshold of biggest list entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "list-max-ziplist-value", - "ParameterValue": "64", - "Source": "system" - }, - { - "AllowedValues": "5000", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Max execution time of a Lua script in milliseconds. 0 for unlimited execution without warnings.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "lua-time-limit", - "ParameterValue": "5000", - "Source": "system" - }, - { - "AllowedValues": "1-65000", - "ChangeType": "requires-reboot", - "DataType": "integer", - "Description": "The maximum number of Redis clients.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "maxclients", - "ParameterValue": "65000", - "Source": "system" - }, - { - "AllowedValues": "volatile-lru,allkeys-lru,volatile-random,allkeys-random,volatile-ttl,noeviction", - "ChangeType": "immediate", - "DataType": "string", - "Description": "Max memory policy.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "maxmemory-policy", - "ParameterValue": "volatile-lru", - "Source": "system" - }, - { - "AllowedValues": "1-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Max memory samples.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "maxmemory-samples", - "ParameterValue": "3", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Maximum number of seconds within which the master must receive a ping from a slave to take writes. Use this parameter together with min-slaves-to-write to regulate when the master stops accepting writes. Setting this value to 0 means the master always takes writes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "min-slaves-max-lag", - "ParameterValue": "10", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Number of slaves that must be connected in order for master to take writes. Use this parameter together with min-slaves-max-lag to regulate when the master stops accepting writes. Setting this to 0 means the master always takes writes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "min-slaves-to-write", - "ParameterValue": "0", - "Source": "system" - }, - { - "ChangeType": "immediate", - "DataType": "string", - "Description": "The keyspace events for Redis to notify Pub/Sub clients about. By default all notifications are disabled", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "notify-keyspace-events", - "Source": "system" - }, - { - "AllowedValues": "16384-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The replication backlog size in bytes for PSYNC. This is the size of the buffer which accumulates slave data when slave is disconnected for some time, so that when slave reconnects again, only transfer the portion of data which the slave missed. Minimum value is 16K.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "repl-backlog-size", - "ParameterValue": "1048576", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The amount of time in seconds after the master no longer have any slaves connected for the master to free the replication backlog. A value of 0 means to never release the backlog.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "repl-backlog-ttl", - "ParameterValue": "3600", - "Source": "system" - }, - { - "AllowedValues": "11-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The timeout in seconds for bulk transfer I/O during sync and master timeout from the perspective of the slave, and slave timeout from the perspective of the master.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "repl-timeout", - "ParameterValue": "60", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The amount of memory reserved for non-cache memory usage, in bytes. You may want to increase this parameter for nodes with read replicas, AOF enabled, etc, to reduce swap usage.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "reserved-memory", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The limit in the size of the set in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "set-max-intset-entries", - "ParameterValue": "512", - "Source": "system" - }, - { - "AllowedValues": "yes,no", - "ChangeType": "immediate", - "DataType": "string", - "Description": "Configures if chaining of slaves is allowed", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "slave-allow-chaining", - "ParameterValue": "no", - "Source": "system" - }, - { - "AllowedValues": "-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The execution time, in microseconds, to exceed in order for the command to get logged. Note that a negative number disables the slow log, while a value of zero forces the logging of every command.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "slowlog-log-slower-than", - "ParameterValue": "10000", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The length of the slow log. There is no limit to this length. Just be aware that it will consume memory. You can reclaim memory used by the slow log with SLOWLOG RESET.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "slowlog-max-len", - "ParameterValue": "128", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "If non-zero, send ACKs every given number of seconds.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "tcp-keepalive", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0,20-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Close connection if client is idle for a given number of seconds, or never if 0.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "timeout", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The maximum number of sorted set entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "zset-max-ziplist-entries", - "ParameterValue": "128", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The threshold of biggest sorted set entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "zset-max-ziplist-value", - "ParameterValue": "64", - "Source": "system" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists up to 100 user parameter values for the parameter group custom.redis2.8.", - "id": "describecacheparameters-1475013576900", - "title": "DescribeCacheParameters" - } - ], - "DescribeCacheSecurityGroups": [ - { - "input": { - "CacheSecurityGroupName": "my-sec-group" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a list of cache security group descriptions. If a cache security group name is specified, the list contains only the description of that group.", - "id": "describecachesecuritygroups-1483047200801", - "title": "DescribeCacheSecurityGroups" - } - ], - "DescribeCacheSubnetGroups": [ - { - "input": { - "MaxRecords": 25 - }, - "output": { - "CacheSubnetGroups": [ - { - "CacheSubnetGroupDescription": "Default CacheSubnetGroup", - "CacheSubnetGroupName": "default", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-east-1a" - }, - "SubnetIdentifier": "subnet-1a2b3c4d" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1c" - }, - "SubnetIdentifier": "subnet-a1b2c3d4" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1e" - }, - "SubnetIdentifier": "subnet-abcd1234" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1b" - }, - "SubnetIdentifier": "subnet-1234abcd" - } - ], - "VpcId": "vpc-91280df6" - } - ], - "Marker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes up to 25 cache subnet groups.", - "id": "describecachesubnetgroups-1482439214064", - "title": "DescribeCacheSubnetGroups" - } - ], - "DescribeEngineDefaultParameters": [ - { - "input": { - "CacheParameterGroupFamily": "redis2.8", - "MaxRecords": 25 - }, - "output": { - "EngineDefaults": { - "CacheNodeTypeSpecificParameters": [ - { - "AllowedValues": "0-", - "CacheNodeTypeSpecificValues": [ - { - "CacheNodeType": "cache.c1.xlarge", - "Value": "650117120" - }, - { - "CacheNodeType": "cache.m1.large", - "Value": "702545920" - }, - { - "CacheNodeType": "cache.m1.medium", - "Value": "309329920" - }, - { - "CacheNodeType": "cache.m1.small", - "Value": "94371840" - }, - { - "CacheNodeType": "cache.m1.xlarge", - "Value": "1488977920" - }, - { - "CacheNodeType": "cache.m2.2xlarge", - "Value": "3502243840" - }, - { - "CacheNodeType": "cache.m2.4xlarge", - "Value": "7088373760" - }, - { - "CacheNodeType": "cache.m2.xlarge", - "Value": "1709178880" - }, - { - "CacheNodeType": "cache.m3.2xlarge", - "Value": "2998927360" - }, - { - "CacheNodeType": "cache.m3.large", - "Value": "650117120" - }, - { - "CacheNodeType": "cache.m3.medium", - "Value": "309329920" - }, - { - "CacheNodeType": "cache.m3.xlarge", - "Value": "1426063360" - }, - { - "CacheNodeType": "cache.m4.10xlarge", - "Value": "16604761424" - }, - { - "CacheNodeType": "cache.m4.2xlarge", - "Value": "3188912636" - }, - { - "CacheNodeType": "cache.m4.4xlarge", - "Value": "6525729063" - }, - { - "CacheNodeType": "cache.m4.large", - "Value": "689259315" - }, - { - "CacheNodeType": "cache.m4.xlarge", - "Value": "1532850176" - }, - { - "CacheNodeType": "cache.r3.2xlarge", - "Value": "6081740800" - }, - { - "CacheNodeType": "cache.r3.4xlarge", - "Value": "12268339200" - }, - { - "CacheNodeType": "cache.r3.8xlarge", - "Value": "24536678400" - }, - { - "CacheNodeType": "cache.r3.large", - "Value": "1468006400" - }, - { - "CacheNodeType": "cache.r3.xlarge", - "Value": "3040870400" - }, - { - "CacheNodeType": "cache.t1.micro", - "Value": "14260633" - }, - { - "CacheNodeType": "cache.t2.medium", - "Value": "346134937" - }, - { - "CacheNodeType": "cache.t2.micro", - "Value": "58195968" - }, - { - "CacheNodeType": "cache.t2.small", - "Value": "166513868" - } - ], - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Slave client output buffer hard limit in bytes.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-slave-hard-limit", - "Source": "system" - }, - { - "AllowedValues": "0-", - "CacheNodeTypeSpecificValues": [ - { - "CacheNodeType": "cache.c1.xlarge", - "Value": "650117120" - }, - { - "CacheNodeType": "cache.m1.large", - "Value": "702545920" - }, - { - "CacheNodeType": "cache.m1.medium", - "Value": "309329920" - }, - { - "CacheNodeType": "cache.m1.small", - "Value": "94371840" - }, - { - "CacheNodeType": "cache.m1.xlarge", - "Value": "1488977920" - }, - { - "CacheNodeType": "cache.m2.2xlarge", - "Value": "3502243840" - }, - { - "CacheNodeType": "cache.m2.4xlarge", - "Value": "7088373760" - }, - { - "CacheNodeType": "cache.m2.xlarge", - "Value": "1709178880" - }, - { - "CacheNodeType": "cache.m3.2xlarge", - "Value": "2998927360" - }, - { - "CacheNodeType": "cache.m3.large", - "Value": "650117120" - }, - { - "CacheNodeType": "cache.m3.medium", - "Value": "309329920" - }, - { - "CacheNodeType": "cache.m3.xlarge", - "Value": "1426063360" - }, - { - "CacheNodeType": "cache.m4.10xlarge", - "Value": "16604761424" - }, - { - "CacheNodeType": "cache.m4.2xlarge", - "Value": "3188912636" - }, - { - "CacheNodeType": "cache.m4.4xlarge", - "Value": "6525729063" - }, - { - "CacheNodeType": "cache.m4.large", - "Value": "689259315" - }, - { - "CacheNodeType": "cache.m4.xlarge", - "Value": "1532850176" - }, - { - "CacheNodeType": "cache.r3.2xlarge", - "Value": "6081740800" - }, - { - "CacheNodeType": "cache.r3.4xlarge", - "Value": "12268339200" - }, - { - "CacheNodeType": "cache.r3.8xlarge", - "Value": "24536678400" - }, - { - "CacheNodeType": "cache.r3.large", - "Value": "1468006400" - }, - { - "CacheNodeType": "cache.r3.xlarge", - "Value": "3040870400" - }, - { - "CacheNodeType": "cache.t1.micro", - "Value": "14260633" - }, - { - "CacheNodeType": "cache.t2.medium", - "Value": "346134937" - }, - { - "CacheNodeType": "cache.t2.micro", - "Value": "58195968" - }, - { - "CacheNodeType": "cache.t2.small", - "Value": "166513868" - } - ], - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Slave client output buffer soft limit in bytes.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-slave-soft-limit", - "Source": "system" - }, - { - "AllowedValues": "0-", - "CacheNodeTypeSpecificValues": [ - { - "CacheNodeType": "cache.c1.xlarge", - "Value": "6501171200" - }, - { - "CacheNodeType": "cache.m1.large", - "Value": "7025459200" - }, - { - "CacheNodeType": "cache.m1.medium", - "Value": "3093299200" - }, - { - "CacheNodeType": "cache.m1.small", - "Value": "943718400" - }, - { - "CacheNodeType": "cache.m1.xlarge", - "Value": "14889779200" - }, - { - "CacheNodeType": "cache.m2.2xlarge", - "Value": "35022438400" - }, - { - "CacheNodeType": "cache.m2.4xlarge", - "Value": "70883737600" - }, - { - "CacheNodeType": "cache.m2.xlarge", - "Value": "17091788800" - }, - { - "CacheNodeType": "cache.m3.2xlarge", - "Value": "29989273600" - }, - { - "CacheNodeType": "cache.m3.large", - "Value": "6501171200" - }, - { - "CacheNodeType": "cache.m3.medium", - "Value": "2988441600" - }, - { - "CacheNodeType": "cache.m3.xlarge", - "Value": "14260633600" - }, - { - "CacheNodeType": "cache.m4.10xlarge", - "Value": "166047614239" - }, - { - "CacheNodeType": "cache.m4.2xlarge", - "Value": "31889126359" - }, - { - "CacheNodeType": "cache.m4.4xlarge", - "Value": "65257290629" - }, - { - "CacheNodeType": "cache.m4.large", - "Value": "6892593152" - }, - { - "CacheNodeType": "cache.m4.xlarge", - "Value": "15328501760" - }, - { - "CacheNodeType": "cache.r3.2xlarge", - "Value": "62495129600" - }, - { - "CacheNodeType": "cache.r3.4xlarge", - "Value": "126458265600" - }, - { - "CacheNodeType": "cache.r3.8xlarge", - "Value": "254384537600" - }, - { - "CacheNodeType": "cache.r3.large", - "Value": "14470348800" - }, - { - "CacheNodeType": "cache.r3.xlarge", - "Value": "30513561600" - }, - { - "CacheNodeType": "cache.t1.micro", - "Value": "142606336" - }, - { - "CacheNodeType": "cache.t2.medium", - "Value": "3461349376" - }, - { - "CacheNodeType": "cache.t2.micro", - "Value": "581959680" - }, - { - "CacheNodeType": "cache.t2.small", - "Value": "1665138688" - } - ], - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The maximum configurable amount of memory to use to store items, in bytes.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "maxmemory", - "Source": "system" - } - ], - "CacheParameterGroupFamily": "redis2.8", - "Marker": "bWluLXNsYXZlcy10by13cml0ZQ==", - "Parameters": [ - { - "AllowedValues": "yes,no", - "ChangeType": "requires-reboot", - "DataType": "string", - "Description": "Apply rehashing or not.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "activerehashing", - "ParameterValue": "yes", - "Source": "system" - }, - { - "AllowedValues": "always,everysec,no", - "ChangeType": "immediate", - "DataType": "string", - "Description": "fsync policy for AOF persistence", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "appendfsync", - "ParameterValue": "everysec", - "Source": "system" - }, - { - "AllowedValues": "yes,no", - "ChangeType": "immediate", - "DataType": "string", - "Description": "Enable Redis persistence.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "appendonly", - "ParameterValue": "no", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Normal client output buffer hard limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-normal-hard-limit", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Normal client output buffer soft limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-normal-soft-limit", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Normal client output buffer soft limit in seconds.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-normal-soft-seconds", - "ParameterValue": "0", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Pubsub client output buffer hard limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-pubsub-hard-limit", - "ParameterValue": "33554432", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Pubsub client output buffer soft limit in bytes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-pubsub-soft-limit", - "ParameterValue": "8388608", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Pubsub client output buffer soft limit in seconds.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-pubsub-soft-seconds", - "ParameterValue": "60", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Slave client output buffer soft limit in seconds.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "client-output-buffer-limit-slave-soft-seconds", - "ParameterValue": "60", - "Source": "system" - }, - { - "AllowedValues": "yes,no", - "ChangeType": "immediate", - "DataType": "string", - "Description": "If enabled, clients who attempt to write to a read-only slave will be disconnected. Applicable to 2.8.23 and higher.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.23", - "ParameterName": "close-on-slave-write", - "ParameterValue": "yes", - "Source": "system" - }, - { - "AllowedValues": "1-1200000", - "ChangeType": "requires-reboot", - "DataType": "integer", - "Description": "Set the number of databases.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "databases", - "ParameterValue": "16", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The maximum number of hash entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "hash-max-ziplist-entries", - "ParameterValue": "512", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The threshold of biggest hash entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "hash-max-ziplist-value", - "ParameterValue": "64", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The maximum number of list entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "list-max-ziplist-entries", - "ParameterValue": "512", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "The threshold of biggest list entries in order for the dataset to be compressed.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "list-max-ziplist-value", - "ParameterValue": "64", - "Source": "system" - }, - { - "AllowedValues": "5000", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Max execution time of a Lua script in milliseconds. 0 for unlimited execution without warnings.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "lua-time-limit", - "ParameterValue": "5000", - "Source": "system" - }, - { - "AllowedValues": "1-65000", - "ChangeType": "requires-reboot", - "DataType": "integer", - "Description": "The maximum number of Redis clients.", - "IsModifiable": false, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "maxclients", - "ParameterValue": "65000", - "Source": "system" - }, - { - "AllowedValues": "volatile-lru,allkeys-lru,volatile-random,allkeys-random,volatile-ttl,noeviction", - "ChangeType": "immediate", - "DataType": "string", - "Description": "Max memory policy.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "maxmemory-policy", - "ParameterValue": "volatile-lru", - "Source": "system" - }, - { - "AllowedValues": "1-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Max memory samples.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "maxmemory-samples", - "ParameterValue": "3", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Maximum number of seconds within which the master must receive a ping from a slave to take writes. Use this parameter together with min-slaves-to-write to regulate when the master stops accepting writes. Setting this value to 0 means the master always takes writes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "min-slaves-max-lag", - "ParameterValue": "10", - "Source": "system" - }, - { - "AllowedValues": "0-", - "ChangeType": "immediate", - "DataType": "integer", - "Description": "Number of slaves that must be connected in order for master to take writes. Use this parameter together with min-slaves-max-lag to regulate when the master stops accepting writes. Setting this to 0 means the master always takes writes.", - "IsModifiable": true, - "MinimumEngineVersion": "2.8.6", - "ParameterName": "min-slaves-to-write", - "ParameterValue": "0", - "Source": "system" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns the default engine and system parameter information for the specified cache engine.", - "id": "describeenginedefaultparameters-1481738057686", - "title": "DescribeEngineDefaultParameters" - } - ], - "DescribeEvents": [ - { - "input": { - "Duration": 360, - "SourceType": "cache-cluster" - }, - "output": { - "Events": [ - { - "Date": "2016-12-22T16:27:56.088Z", - "Message": "Added cache node 0001 in availability zone us-east-1e", - "SourceIdentifier": "redis-cluster", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:27:56.078Z", - "Message": "Cache cluster created", - "SourceIdentifier": "redis-cluster", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:05:17.326Z", - "Message": "Added cache node 0002 in availability zone us-east-1c", - "SourceIdentifier": "my-memcached2", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:05:17.323Z", - "Message": "Added cache node 0001 in availability zone us-east-1e", - "SourceIdentifier": "my-memcached2", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:05:17.314Z", - "Message": "Cache cluster created", - "SourceIdentifier": "my-memcached2", - "SourceType": "cache-cluster" - } - ], - "Marker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes all the cache-cluster events for the past 120 minutes.", - "id": "describeevents-1481843894757", - "title": "DescribeEvents" - }, - { - "input": { - "StartTime": "2016-12-22T15:00:00.000Z" - }, - "output": { - "Events": [ - { - "Date": "2016-12-22T21:35:46.674Z", - "Message": "Snapshot succeeded for snapshot with ID 'cr-bkup' of replication group with ID 'clustered-redis'", - "SourceIdentifier": "clustered-redis-0001-001", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:27:56.088Z", - "Message": "Added cache node 0001 in availability zone us-east-1e", - "SourceIdentifier": "redis-cluster", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:27:56.078Z", - "Message": "Cache cluster created", - "SourceIdentifier": "redis-cluster", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:05:17.326Z", - "Message": "Added cache node 0002 in availability zone us-east-1c", - "SourceIdentifier": "my-memcached2", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:05:17.323Z", - "Message": "Added cache node 0001 in availability zone us-east-1e", - "SourceIdentifier": "my-memcached2", - "SourceType": "cache-cluster" - }, - { - "Date": "2016-12-22T16:05:17.314Z", - "Message": "Cache cluster created", - "SourceIdentifier": "my-memcached2", - "SourceType": "cache-cluster" - } - ], - "Marker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes all the replication-group events from 3:00P to 5:00P on November 11, 2016.", - "id": "describeevents-1481843894757", - "title": "DescribeEvents" - } - ], - "DescribeReplicationGroups": [ - { - "input": { - }, - "output": { - "Marker": "", - "ReplicationGroups": [ - { - "AutomaticFailover": "enabled", - "Description": "Test cluster", - "MemberClusters": [ - "clustered-redis-0001-001", - "clustered-redis-0001-002", - "clustered-redis-0002-001", - "clustered-redis-0002-002" - ], - "NodeGroups": [ - { - "NodeGroupId": "0001", - "NodeGroupMembers": [ - { - "CacheClusterId": "clustered-redis-0001-001", - "CacheNodeId": "0001", - "PreferredAvailabilityZone": "us-east-1e" - }, - { - "CacheClusterId": "clustered-redis-0001-002", - "CacheNodeId": "0001", - "PreferredAvailabilityZone": "us-east-1c" - } - ], - "Status": "available" - }, - { - "NodeGroupId": "0002", - "NodeGroupMembers": [ - { - "CacheClusterId": "clustered-redis-0002-001", - "CacheNodeId": "0001", - "PreferredAvailabilityZone": "us-east-1c" - }, - { - "CacheClusterId": "clustered-redis-0002-002", - "CacheNodeId": "0001", - "PreferredAvailabilityZone": "us-east-1b" - } - ], - "Status": "available" - } - ], - "PendingModifiedValues": { - }, - "ReplicationGroupId": "clustered-redis", - "Status": "available" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the replication group myreplgroup.", - "id": "describereplicationgroups-1481742639427", - "title": "DescribeReplicationGroups" - } - ], - "DescribeReservedCacheNodes": [ - { - "input": { - "MaxRecords": 25 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about reserved cache nodes for this account, or about a specified reserved cache node. If the account has no reserved cache nodes, the operation returns an empty list, as shown here.", - "id": "describereservedcachenodes-1481742348045", - "title": "DescribeReservedCacheNodes" - } - ], - "DescribeReservedCacheNodesOfferings": [ - { - "input": { - "MaxRecords": 20 - }, - "output": { - "Marker": "1ef01f5b-433f-94ff-a530-61a56bfc8e7a", - "ReservedCacheNodesOfferings": [ - { - "CacheNodeType": "cache.m1.small", - "Duration": 94608000, - "FixedPrice": 157.0, - "OfferingType": "Medium Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "0167633d-37f6-4222-b872-b1f22eb79ba4", - "UsagePrice": 0.017 - }, - { - "CacheNodeType": "cache.m4.xlarge", - "Duration": 94608000, - "FixedPrice": 1248.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.077, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "02c04e13-baca-4e71-9ceb-620eed94827d", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.m2.4xlarge", - "Duration": 94608000, - "FixedPrice": 2381.0, - "OfferingType": "Medium Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "02e1755e-76e8-48e3-8d82-820a5726a458", - "UsagePrice": 0.276 - }, - { - "CacheNodeType": "cache.m1.small", - "Duration": 94608000, - "FixedPrice": 188.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.013, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "03315215-7b87-421a-a3dd-785021e4113f", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.m4.10xlarge", - "Duration": 31536000, - "FixedPrice": 6158.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - { - "RecurringChargeAmount": 1.125, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "05ffbb44-2ace-4476-a2a5-8ec99f866fb3", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.m1.small", - "Duration": 31536000, - "FixedPrice": 101.0, - "OfferingType": "Medium Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "065c71ae-4a4e-4f1e-bebf-37525f4c6cb2", - "UsagePrice": 0.023 - }, - { - "CacheNodeType": "cache.m1.medium", - "Duration": 94608000, - "FixedPrice": 314.0, - "OfferingType": "Medium Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "06774b12-7f5e-48c1-907a-f286c63f327d", - "UsagePrice": 0.034 - }, - { - "CacheNodeType": "cache.m2.xlarge", - "Duration": 31536000, - "FixedPrice": 163.0, - "OfferingType": "Light Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "0924ac6b-847f-4761-ba6b-4290b2adf719", - "UsagePrice": 0.137 - }, - { - "CacheNodeType": "cache.m2.xlarge", - "Duration": 94608000, - "FixedPrice": 719.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.049, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "09eeb126-69b6-4d3f-8f94-ca3510629f53", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.r3.2xlarge", - "Duration": 94608000, - "FixedPrice": 4132.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.182, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "0a516ad8-557f-4310-9dd0-2448c2ff4d62", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.c1.xlarge", - "Duration": 94608000, - "FixedPrice": 875.0, - "OfferingType": "Light Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "0b0c1cc5-2177-4150-95d7-c67ec34dcb19", - "UsagePrice": 0.363 - }, - { - "CacheNodeType": "cache.m4.10xlarge", - "Duration": 94608000, - "FixedPrice": 12483.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.76, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "0c2b139b-1cff-43d0-8fba-0c753f9b1950", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.c1.xlarge", - "Duration": 31536000, - "FixedPrice": 1620.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.207, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "0c52115b-38cb-47a2-8dbc-e02e40b6a13f", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.m2.4xlarge", - "Duration": 94608000, - "FixedPrice": 2381.0, - "OfferingType": "Medium Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "12fcb19c-5416-4e1d-934f-28f1e2cb8599", - "UsagePrice": 0.276 - }, - { - "CacheNodeType": "cache.m4.xlarge", - "Duration": 31536000, - "FixedPrice": 616.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.112, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "13af20ad-914d-4d8b-9763-fa2e565f3549", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.r3.8xlarge", - "Duration": 94608000, - "FixedPrice": 16528.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.729, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "14da3d3f-b526-4dbf-b09b-355578b2a576", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.m1.medium", - "Duration": 94608000, - "FixedPrice": 140.0, - "OfferingType": "Light Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "15d7018c-71fb-4717-8409-4bdcdca18da7", - "UsagePrice": 0.052 - }, - { - "CacheNodeType": "cache.m4.4xlarge", - "Duration": 94608000, - "FixedPrice": 4993.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.304, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "1ae7ec5f-a76e-49b6-822b-629b1768a13a", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.m3.2xlarge", - "Duration": 31536000, - "FixedPrice": 1772.0, - "OfferingType": "Heavy Utilization", - "ProductDescription": "redis", - "RecurringCharges": [ - { - "RecurringChargeAmount": 0.25, - "RecurringChargeFrequency": "Hourly" - } - ], - "ReservedCacheNodesOfferingId": "1d31242b-3925-48d1-b882-ce03204e6013", - "UsagePrice": 0.0 - }, - { - "CacheNodeType": "cache.t1.micro", - "Duration": 31536000, - "FixedPrice": 54.0, - "OfferingType": "Medium Utilization", - "ProductDescription": "memcached", - "RecurringCharges": [ - - ], - "ReservedCacheNodesOfferingId": "1ef01f5b-94ff-433f-a530-61a56bfc8e7a", - "UsagePrice": 0.008 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists available reserved cache node offerings.", - "id": "describereseredcachenodeofferings-1481742869998", - "title": "DescribeReseredCacheNodeOfferings" - }, - { - "input": { - "CacheNodeType": "cache.r3.large", - "Duration": "3", - "MaxRecords": 25, - "OfferingType": "Light Utilization", - "ReservedCacheNodesOfferingId": "" - }, - "output": { - "Marker": "", - "ReservedCacheNodesOfferings": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists available reserved cache node offerings for cache.r3.large nodes with a 3 year commitment.", - "id": "describereseredcachenodeofferings-1481742869998", - "title": "DescribeReseredCacheNodeOfferings" - }, - { - "input": { - "CacheNodeType": "", - "Duration": "", - "Marker": "", - "MaxRecords": 25, - "OfferingType": "", - "ProductDescription": "", - "ReservedCacheNodesOfferingId": "438012d3-4052-4cc7-b2e3-8d3372e0e706" - }, - "output": { - "Marker": "", - "ReservedCacheNodesOfferings": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists available reserved cache node offerings.", - "id": "describereseredcachenodeofferings-1481742869998", - "title": "DescribeReseredCacheNodeOfferings" - } - ], - "DescribeSnapshots": [ - { - "input": { - "SnapshotName": "snapshot-20161212" - }, - "output": { - "Marker": "", - "Snapshots": [ - { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-21T22:27:12.543Z", - "CacheClusterId": "my-redis5", - "CacheNodeType": "cache.m3.large", - "CacheParameterGroupName": "default.redis3.2", - "CacheSubnetGroupName": "default", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NodeSnapshots": [ - { - "CacheNodeCreateTime": "2016-12-21T22:27:12.543Z", - "CacheNodeId": "0001", - "CacheSize": "3 MB", - "SnapshotCreateTime": "2016-12-21T22:30:26Z" - } - ], - "NumCacheNodes": 1, - "Port": 6379, - "PreferredAvailabilityZone": "us-east-1c", - "PreferredMaintenanceWindow": "fri:05:30-fri:06:30", - "SnapshotName": "snapshot-20161212", - "SnapshotRetentionLimit": 7, - "SnapshotSource": "manual", - "SnapshotStatus": "available", - "SnapshotWindow": "10:00-11:00", - "VpcId": "vpc-91280df6" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the snapshot mysnapshot. By default.", - "id": "describesnapshots-1481743399584", - "title": "DescribeSnapshots" - } - ], - "ListAllowedNodeTypeModifications": [ - { - "input": { - "ReplicationGroupId": "myreplgroup" - }, - "output": { - "ScaleUpModifications": [ - "cache.m4.10xlarge", - "cache.m4.2xlarge", - "cache.m4.4xlarge", - "cache.m4.xlarge", - "cache.r3.2xlarge", - "cache.r3.4xlarge", - "cache.r3.8xlarge", - "cache.r3.xlarge" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all available node types that you can scale your Redis cluster's or replication group's current node type up to.", - "id": "listallowednodetypemodifications-1481748494872", - "title": "ListAllowedNodeTypeModifications" - }, - { - "input": { - "CacheClusterId": "mycluster" - }, - "output": { - "ScaleUpModifications": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all available node types that you can scale your Redis cluster's or replication group's current node type up to.", - "id": "listallowednodetypemodifications-1481748494872", - "title": "ListAllowedNodeTypeModifications" - } - ], - "ListTagsForResource": [ - { - "input": { - "ResourceName": "arn:aws:elasticache:us-west-2::cluster:mycluster" - }, - "output": { - "TagList": [ - { - "Key": "APIVersion", - "Value": "20150202" - }, - { - "Key": "Service", - "Value": "ElastiCache" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all cost allocation tags currently on the named resource. A cost allocation tag is a key-value pair where the key is case-sensitive and the value is optional. You can use cost allocation tags to categorize and track your AWS costs.", - "id": "listtagsforresource-1481748784584", - "title": "ListTagsForResource" - } - ], - "ModifyCacheCluster": [ - { - "input": { - "ApplyImmediately": true, - "CacheClusterId": "redis-cluster", - "SnapshotRetentionLimit": 14 - }, - "output": { - "CacheCluster": { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-22T16:27:56.078Z", - "CacheClusterId": "redis-cluster", - "CacheClusterStatus": "available", - "CacheNodeType": "cache.r3.large", - "CacheParameterGroup": { - "CacheNodeIdsToReboot": [ - - ], - "CacheParameterGroupName": "default.redis3.2", - "ParameterApplyStatus": "in-sync" - }, - "CacheSecurityGroups": [ - - ], - "CacheSubnetGroupName": "default", - "ClientDownloadLandingPage": "https://console.aws.amazon.com/elasticache/home#client-download:", - "Engine": "redis", - "EngineVersion": "3.2.4", - "NumCacheNodes": 1, - "PendingModifiedValues": { - }, - "PreferredAvailabilityZone": "us-east-1e", - "PreferredMaintenanceWindow": "fri:09:00-fri:10:00", - "SnapshotRetentionLimit": 14, - "SnapshotWindow": "07:00-08:00" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Copies a snapshot to a specified name.", - "id": "modifycachecluster-1482962725919", - "title": "ModifyCacheCluster" - } - ], - "ModifyCacheParameterGroup": [ - { - "input": { - "CacheParameterGroupName": "custom-mem1-4", - "ParameterNameValues": [ - { - "ParameterName": "binding_protocol", - "ParameterValue": "ascii" - }, - { - "ParameterName": "chunk_size", - "ParameterValue": "96" - } - ] - }, - "output": { - "CacheParameterGroupName": "custom-mem1-4" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Modifies one or more parameter values in the specified parameter group. You cannot modify any default parameter group.", - "id": "modifycacheparametergroup-1482966746787", - "title": "ModifyCacheParameterGroup" - } - ], - "ModifyCacheSubnetGroup": [ - { - "input": { - "CacheSubnetGroupName": "my-sn-grp", - "SubnetIds": [ - "subnet-bcde2345" - ] - }, - "output": { - "CacheSubnetGroup": { - "CacheSubnetGroupDescription": "My subnet group.", - "CacheSubnetGroupName": "my-sn-grp", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-east-1c" - }, - "SubnetIdentifier": "subnet-a1b2c3d4" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1e" - }, - "SubnetIdentifier": "subnet-1a2b3c4d" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1e" - }, - "SubnetIdentifier": "subnet-bcde2345" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1c" - }, - "SubnetIdentifier": "subnet-1234abcd" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-east-1b" - }, - "SubnetIdentifier": "subnet-abcd1234" - } - ], - "VpcId": "vpc-91280df6" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Modifies an existing ElastiCache subnet group.", - "id": "modifycachesubnetgroup-1483043446226", - "title": "ModifyCacheSubnetGroup" - } - ], - "ModifyReplicationGroup": [ - { - "input": { - "ApplyImmediately": true, - "ReplicationGroupDescription": "Modified replication group", - "ReplicationGroupId": "my-redis-rg", - "SnapshotRetentionLimit": 30, - "SnapshottingClusterId": "my-redis-rg-001" - }, - "output": { - "ReplicationGroup": { - "AutomaticFailover": "enabled", - "Description": "Modified replication group", - "MemberClusters": [ - "my-redis-rg-001", - "my-redis-rg-002", - "my-redis-rg-003" - ], - "NodeGroups": [ - { - "NodeGroupId": "0001", - "NodeGroupMembers": [ - { - "CacheClusterId": "my-redis-rg-001", - "CacheNodeId": "0001", - "CurrentRole": "primary", - "PreferredAvailabilityZone": "us-east-1b", - "ReadEndpoint": { - "Address": "my-redis-rg-001.abcdef.0001.use1.cache.amazonaws.com", - "Port": 6379 - } - }, - { - "CacheClusterId": "my-redis-rg-002", - "CacheNodeId": "0001", - "CurrentRole": "replica", - "PreferredAvailabilityZone": "us-east-1a", - "ReadEndpoint": { - "Address": "my-redis-rg-002.abcdef.0001.use1.cache.amazonaws.com", - "Port": 6379 - } - }, - { - "CacheClusterId": "my-redis-rg-003", - "CacheNodeId": "0001", - "CurrentRole": "replica", - "PreferredAvailabilityZone": "us-east-1c", - "ReadEndpoint": { - "Address": "my-redis-rg-003.abcdef.0001.use1.cache.amazonaws.com", - "Port": 6379 - } - } - ], - "PrimaryEndpoint": { - "Address": "my-redis-rg.abcdef.ng.0001.use1.cache.amazonaws.com", - "Port": 6379 - }, - "Status": "available" - } - ], - "PendingModifiedValues": { - }, - "ReplicationGroupId": "my-redis-rg", - "SnapshottingClusterId": "my-redis-rg-002", - "Status": "available" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "modifyreplicationgroup-1483039689581", - "title": "ModifyReplicationGroup" - } - ], - "PurchaseReservedCacheNodesOffering": [ - { - "input": { - "ReservedCacheNodesOfferingId": "1ef01f5b-94ff-433f-a530-61a56bfc8e7a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Allows you to purchase a reserved cache node offering.", - "id": "purchasereservedcachenodesofferings-1483040798484", - "title": "PurchaseReservedCacheNodesOfferings" - } - ], - "RebootCacheCluster": [ - { - "input": { - "CacheClusterId": "custom-mem1-4 ", - "CacheNodeIdsToReboot": [ - "0001", - "0002" - ] - }, - "output": { - "CacheCluster": { - "AutoMinorVersionUpgrade": true, - "CacheClusterCreateTime": "2016-12-21T21:59:43.794Z", - "CacheClusterId": "my-mem-cluster", - "CacheClusterStatus": "rebooting cache cluster nodes", - "CacheNodeType": "cache.t2.medium", - "CacheParameterGroup": { - "CacheNodeIdsToReboot": [ - - ], - "CacheParameterGroupName": "default.memcached1.4", - "ParameterApplyStatus": "in-sync" - }, - "CacheSecurityGroups": [ - - ], - "CacheSubnetGroupName": "default", - "ClientDownloadLandingPage": "https://console.aws.amazon.com/elasticache/home#client-download:", - "ConfigurationEndpoint": { - "Address": "my-mem-cluster.abcdef.cfg.use1.cache.amazonaws.com", - "Port": 11211 - }, - "Engine": "memcached", - "EngineVersion": "1.4.24", - "NumCacheNodes": 2, - "PendingModifiedValues": { - }, - "PreferredAvailabilityZone": "Multiple", - "PreferredMaintenanceWindow": "wed:06:00-wed:07:00" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Reboots the specified nodes in the names cluster.", - "id": "rebootcachecluster-1482969019505", - "title": "RebootCacheCluster" - } - ], - "RemoveTagsFromResource": [ - { - "input": { - "ResourceName": "arn:aws:elasticache:us-east-1:1234567890:cluster:my-mem-cluster", - "TagKeys": [ - "A", - "C", - "E" - ] - }, - "output": { - "TagList": [ - { - "Key": "B", - "Value": "Banana" - }, - { - "Key": "D", - "Value": "Dog" - }, - { - "Key": "F", - "Value": "Fox" - }, - { - "Key": "I", - "Value": "" - }, - { - "Key": "K", - "Value": "Kite" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Removes tags identified by a list of tag keys from the list of tags on the specified resource.", - "id": "removetagsfromresource-1483037920947", - "title": "RemoveTagsFromResource" - } - ], - "ResetCacheParameterGroup": [ - { - "input": { - "CacheParameterGroupName": "custom-mem1-4", - "ResetAllParameters": true - }, - "output": { - "CacheParameterGroupName": "custom-mem1-4" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Modifies the parameters of a cache parameter group to the engine or system default value.", - "id": "resetcacheparametergroup-1483038334014", - "title": "ResetCacheParameterGroup" - } - ], - "RevokeCacheSecurityGroupIngress": [ - { - "input": { - "CacheSecurityGroupName": "my-sec-grp", - "EC2SecurityGroupName": "my-ec2-sec-grp", - "EC2SecurityGroupOwnerId": "1234567890" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a list of cache security group descriptions. If a cache security group name is specified, the list contains only the description of that group.", - "id": "describecachesecuritygroups-1483047200801", - "title": "DescribeCacheSecurityGroups" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/paginators-1.json deleted file mode 100644 index 12368b9..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/paginators-1.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "pagination": { - "DescribeCacheClusters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheClusters" - }, - "DescribeCacheEngineVersions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheEngineVersions" - }, - "DescribeCacheParameterGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheParameterGroups" - }, - "DescribeCacheParameters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Parameters" - }, - "DescribeCacheSecurityGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheSecurityGroups" - }, - "DescribeCacheSubnetGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "CacheSubnetGroups" - }, - "DescribeEngineDefaultParameters": { - "input_token": "Marker", - "output_token": "EngineDefaults.Marker", - "limit_key": "MaxRecords", - "result_key": "EngineDefaults.Parameters" - }, - "DescribeEvents": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Events" - }, - "DescribeReservedCacheNodes": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedCacheNodes" - }, - "DescribeReservedCacheNodesOfferings": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedCacheNodesOfferings" - }, - "DescribeReplicationGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReplicationGroups" - }, - "DescribeSnapshots": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Snapshots" - }, - "DescribeServiceUpdates": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "ServiceUpdates" - }, - "DescribeUpdateActions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "UpdateActions" - }, - "DescribeGlobalReplicationGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "GlobalReplicationGroups" - }, - "DescribeUserGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "UserGroups" - }, - "DescribeUsers": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Users" - }, - "DescribeServerlessCacheSnapshots": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ServerlessCacheSnapshots" - }, - "DescribeServerlessCaches": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ServerlessCaches" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/service-2.json.gz deleted file mode 100644 index 6a0b8e4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/waiters-2.json b/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/waiters-2.json deleted file mode 100644 index c177d7b..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticache/2015-02-02/waiters-2.json +++ /dev/null @@ -1,143 +0,0 @@ -{ - "version":2, - "waiters":{ - "CacheClusterAvailable":{ - "acceptors":[ - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"available", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"deleted", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"deleting", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"incompatible-network", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"restore-failed", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":15, - "description":"Wait until ElastiCache cluster is available.", - "maxAttempts":40, - "operation":"DescribeCacheClusters" - }, - "CacheClusterDeleted":{ - "acceptors":[ - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"deleted", - "matcher":"pathAll", - "state":"success" - }, - { - "expected":"CacheClusterNotFound", - "matcher":"error", - "state":"success" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"available", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"creating", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"incompatible-network", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"modifying", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"restore-failed", - "matcher":"pathAny", - "state":"failure" - }, - { - "argument":"CacheClusters[].CacheClusterStatus", - "expected":"snapshotting", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":15, - "description":"Wait until ElastiCache cluster is deleted.", - "maxAttempts":40, - "operation":"DescribeCacheClusters" - }, - "ReplicationGroupAvailable":{ - "acceptors":[ - { - "argument":"ReplicationGroups[].Status", - "expected":"available", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"ReplicationGroups[].Status", - "expected":"deleted", - "matcher":"pathAny", - "state":"failure" - } - ], - "delay":15, - "description":"Wait until ElastiCache replication group is available.", - "maxAttempts":40, - "operation":"DescribeReplicationGroups" - }, - "ReplicationGroupDeleted":{ - "acceptors":[ - { - "argument":"ReplicationGroups[].Status", - "expected":"deleted", - "matcher":"pathAll", - "state":"success" - }, - { - "argument":"ReplicationGroups[].Status", - "expected":"available", - "matcher":"pathAny", - "state":"failure" - }, - { - "expected":"ReplicationGroupNotFoundFault", - "matcher":"error", - "state":"success" - } - ], - "delay":15, - "description":"Wait until ElastiCache replication group is deleted.", - "maxAttempts":40, - "operation":"DescribeReplicationGroups" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index f94e6ef..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/examples-1.json deleted file mode 100644 index 0fded62..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/examples-1.json +++ /dev/null @@ -1,1109 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AbortEnvironmentUpdate": [ - { - "input": { - "EnvironmentName": "my-env" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following code aborts a running application version deployment for an environment named my-env:", - "id": "to-abort-a-deployment-1456267848227", - "title": "To abort a deployment" - } - ], - "CheckDNSAvailability": [ - { - "input": { - "CNAMEPrefix": "my-cname" - }, - "output": { - "Available": true, - "FullyQualifiedCNAME": "my-cname.us-west-2.elasticbeanstalk.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation checks the availability of the subdomain my-cname:", - "id": "to-check-the-availability-of-a-cname-1456268589537", - "title": "To check the availability of a CNAME" - } - ], - "CreateApplication": [ - { - "input": { - "ApplicationName": "my-app", - "Description": "my application" - }, - "output": { - "Application": { - "ApplicationName": "my-app", - "ConfigurationTemplates": [ - - ], - "DateCreated": "2015-02-12T18:32:21.181Z", - "DateUpdated": "2015-02-12T18:32:21.181Z", - "Description": "my application" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation creates a new application named my-app:", - "id": "to-create-a-new-application-1456268895683", - "title": "To create a new application" - } - ], - "CreateApplicationVersion": [ - { - "input": { - "ApplicationName": "my-app", - "AutoCreateApplication": true, - "Description": "my-app-v1", - "Process": true, - "SourceBundle": { - "S3Bucket": "my-bucket", - "S3Key": "sample.war" - }, - "VersionLabel": "v1" - }, - "output": { - "ApplicationVersion": { - "ApplicationName": "my-app", - "DateCreated": "2015-02-03T23:01:25.412Z", - "DateUpdated": "2015-02-03T23:01:25.412Z", - "Description": "my-app-v1", - "SourceBundle": { - "S3Bucket": "my-bucket", - "S3Key": "sample.war" - }, - "VersionLabel": "v1" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation creates a new version (v1) of an application named my-app:", - "id": "to-create-a-new-application-1456268895683", - "title": "To create a new application" - } - ], - "CreateConfigurationTemplate": [ - { - "input": { - "ApplicationName": "my-app", - "EnvironmentId": "e-rpqsewtp2j", - "TemplateName": "my-app-v1" - }, - "output": { - "ApplicationName": "my-app", - "DateCreated": "2015-08-12T18:40:39Z", - "DateUpdated": "2015-08-12T18:40:39Z", - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "TemplateName": "my-app-v1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation creates a configuration template named my-app-v1 from the settings applied to an environment with the id e-rpqsewtp2j:", - "id": "to-create-a-configuration-template-1456269283586", - "title": "To create a configuration template" - } - ], - "CreateEnvironment": [ - { - "input": { - "ApplicationName": "my-app", - "CNAMEPrefix": "my-app", - "EnvironmentName": "my-env", - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "VersionLabel": "v1" - }, - "output": { - "ApplicationName": "my-app", - "CNAME": "my-app.elasticbeanstalk.com", - "DateCreated": "2015-02-03T23:04:54.479Z", - "DateUpdated": "2015-02-03T23:04:54.479Z", - "EnvironmentId": "e-izqpassy4h", - "EnvironmentName": "my-env", - "Health": "Grey", - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "Status": "Launching", - "Tier": { - "Name": "WebServer", - "Type": "Standard", - "Version": " " - }, - "VersionLabel": "v1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation creates a new environment for version v1 of a java application named my-app:", - "id": "to-create-a-new-environment-for-an-application-1456269380396", - "title": "To create a new environment for an application" - } - ], - "CreateStorageLocation": [ - { - "output": { - "S3Bucket": "elasticbeanstalk-us-west-2-0123456789012" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation creates a new environment for version v1 of a java application named my-app:", - "id": "to-create-a-new-environment-for-an-application-1456269380396", - "title": "To create a new environment for an application" - } - ], - "DeleteApplication": [ - { - "input": { - "ApplicationName": "my-app" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation deletes an application named my-app:", - "id": "to-delete-an-application-1456269699366", - "title": "To delete an application" - } - ], - "DeleteApplicationVersion": [ - { - "input": { - "ApplicationName": "my-app", - "DeleteSourceBundle": true, - "VersionLabel": "22a0-stage-150819_182129" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation deletes an application version named 22a0-stage-150819_182129 for an application named my-app:", - "id": "to-delete-an-application-version-1456269792956", - "title": "To delete an application version" - } - ], - "DeleteConfigurationTemplate": [ - { - "input": { - "ApplicationName": "my-app", - "TemplateName": "my-template" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation deletes a configuration template named my-template for an application named my-app:", - "id": "to-delete-a-configuration-template-1456269836701", - "title": "To delete a configuration template" - } - ], - "DeleteEnvironmentConfiguration": [ - { - "input": { - "ApplicationName": "my-app", - "EnvironmentName": "my-env" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation deletes a draft configuration for an environment named my-env:", - "id": "to-delete-a-draft-configuration-1456269886654", - "title": "To delete a draft configuration" - } - ], - "DescribeApplicationVersions": [ - { - "input": { - "ApplicationName": "my-app", - "VersionLabels": [ - "v2" - ] - }, - "output": { - "ApplicationVersions": [ - { - "ApplicationName": "my-app", - "DateCreated": "2015-07-23T01:32:26.079Z", - "DateUpdated": "2015-07-23T01:32:26.079Z", - "Description": "update cover page", - "SourceBundle": { - "S3Bucket": "elasticbeanstalk-us-west-2-015321684451", - "S3Key": "my-app/5026-stage-150723_224258.war" - }, - "VersionLabel": "v2" - }, - { - "ApplicationName": "my-app", - "DateCreated": "2015-07-23T22:26:10.816Z", - "DateUpdated": "2015-07-23T22:26:10.816Z", - "Description": "initial version", - "SourceBundle": { - "S3Bucket": "elasticbeanstalk-us-west-2-015321684451", - "S3Key": "my-app/5026-stage-150723_222618.war" - }, - "VersionLabel": "v1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves information about an application version labeled v2:", - "id": "to-view-information-about-an-application-version-1456269947428", - "title": "To view information about an application version" - } - ], - "DescribeApplications": [ - { - "input": { - }, - "output": { - "Applications": [ - { - "ApplicationName": "ruby", - "ConfigurationTemplates": [ - - ], - "DateCreated": "2015-08-13T21:05:44.376Z", - "DateUpdated": "2015-08-13T21:05:44.376Z", - "Versions": [ - "Sample Application" - ] - }, - { - "ApplicationName": "pythonsample", - "ConfigurationTemplates": [ - - ], - "DateCreated": "2015-08-13T19:05:43.637Z", - "DateUpdated": "2015-08-13T19:05:43.637Z", - "Description": "Application created from the EB CLI using \"eb init\"", - "Versions": [ - "Sample Application" - ] - }, - { - "ApplicationName": "nodejs-example", - "ConfigurationTemplates": [ - - ], - "DateCreated": "2015-08-06T17:50:02.486Z", - "DateUpdated": "2015-08-06T17:50:02.486Z", - "Versions": [ - "add elasticache", - "First Release" - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves information about applications in the current region:", - "id": "to-view-a-list-of-applications-1456270027373", - "title": "To view a list of applications" - } - ], - "DescribeConfigurationOptions": [ - { - "input": { - "ApplicationName": "my-app", - "EnvironmentName": "my-env" - }, - "output": { - "Options": [ - { - "ChangeSeverity": "NoInterruption", - "DefaultValue": "30", - "MaxValue": 300, - "MinValue": 5, - "Name": "Interval", - "Namespace": "aws:elb:healthcheck", - "UserDefined": false, - "ValueType": "Scalar" - }, - { - "ChangeSeverity": "NoInterruption", - "DefaultValue": "2000000", - "MinValue": 0, - "Name": "LowerThreshold", - "Namespace": "aws:autoscaling:trigger", - "UserDefined": false, - "ValueType": "Scalar" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves descriptions of all available configuration options for an environment named my-env:", - "id": "to-view-configuration-options-for-an-environment-1456276763917", - "title": "To view configuration options for an environment" - } - ], - "DescribeConfigurationSettings": [ - { - "input": { - "ApplicationName": "my-app", - "EnvironmentName": "my-env" - }, - "output": { - "ConfigurationSettings": [ - { - "ApplicationName": "my-app", - "DateCreated": "2015-08-13T19:16:25Z", - "DateUpdated": "2015-08-13T23:30:07Z", - "DeploymentStatus": "deployed", - "Description": "Environment created from the EB CLI using \"eb create\"", - "EnvironmentName": "my-env", - "OptionSettings": [ - { - "Namespace": "aws:autoscaling:asg", - "OptionName": "Availability Zones", - "ResourceName": "AWSEBAutoScalingGroup", - "Value": "Any" - }, - { - "Namespace": "aws:autoscaling:asg", - "OptionName": "Cooldown", - "ResourceName": "AWSEBAutoScalingGroup", - "Value": "360" - }, - { - "Namespace": "aws:elb:policies", - "OptionName": "ConnectionDrainingTimeout", - "ResourceName": "AWSEBLoadBalancer", - "Value": "20" - }, - { - "Namespace": "aws:elb:policies", - "OptionName": "ConnectionSettingIdleTimeout", - "ResourceName": "AWSEBLoadBalancer", - "Value": "60" - } - ], - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8" - } - ] - }, - "comments": { - "input": { - }, - "output": { - "abbreviated": "Output is abbreviated" - } - }, - "description": "The following operation retrieves configuration settings for an environment named my-env:", - "id": "to-view-configurations-settings-for-an-environment-1456276924537", - "title": "To view configurations settings for an environment" - } - ], - "DescribeEnvironmentHealth": [ - { - "input": { - "AttributeNames": [ - "All" - ], - "EnvironmentName": "my-env" - }, - "output": { - "ApplicationMetrics": { - "Duration": 10, - "Latency": { - "P10": 0.001, - "P50": 0.001, - "P75": 0.002, - "P85": 0.003, - "P90": 0.003, - "P95": 0.004, - "P99": 0.004, - "P999": 0.004 - }, - "RequestCount": 45, - "StatusCodes": { - "Status2xx": 45, - "Status3xx": 0, - "Status4xx": 0, - "Status5xx": 0 - } - }, - "Causes": [ - - ], - "Color": "Green", - "EnvironmentName": "my-env", - "HealthStatus": "Ok", - "InstancesHealth": { - "Degraded": 0, - "Info": 0, - "NoData": 0, - "Ok": 1, - "Pending": 0, - "Severe": 0, - "Unknown": 0, - "Warning": 0 - }, - "RefreshedAt": "2015-08-20T21:09:18Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves overall health information for an environment named my-env:", - "id": "to-view-environment-health-1456277109510", - "title": "To view environment health" - } - ], - "DescribeEnvironmentResources": [ - { - "input": { - "EnvironmentName": "my-env" - }, - "output": { - "EnvironmentResources": { - "AutoScalingGroups": [ - { - "Name": "awseb-e-qu3fyyjyjs-stack-AWSEBAutoScalingGroup-QSB2ZO88SXZT" - } - ], - "EnvironmentName": "my-env", - "Instances": [ - { - "Id": "i-0c91c786" - } - ], - "LaunchConfigurations": [ - { - "Name": "awseb-e-qu3fyyjyjs-stack-AWSEBAutoScalingLaunchConfiguration-1UUVQIBC96TQ2" - } - ], - "LoadBalancers": [ - { - "Name": "awseb-e-q-AWSEBLoa-1EEPZ0K98BIF0" - } - ], - "Queues": [ - - ], - "Triggers": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves information about resources in an environment named my-env:", - "id": "to-view-information-about-the-aws-resources-in-your-environment-1456277206232", - "title": "To view information about the AWS resources in your environment" - } - ], - "DescribeEnvironments": [ - { - "input": { - "EnvironmentNames": [ - "my-env" - ] - }, - "output": { - "Environments": [ - { - "AbortableOperationInProgress": false, - "ApplicationName": "my-app", - "CNAME": "my-env.elasticbeanstalk.com", - "DateCreated": "2015-08-07T20:48:49.599Z", - "DateUpdated": "2015-08-12T18:16:55.019Z", - "EndpointURL": "awseb-e-w-AWSEBLoa-1483140XB0Q4L-109QXY8121.us-west-2.elb.amazonaws.com", - "EnvironmentId": "e-rpqsewtp2j", - "EnvironmentName": "my-env", - "Health": "Green", - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "Status": "Ready", - "Tier": { - "Name": "WebServer", - "Type": "Standard", - "Version": " " - }, - "VersionLabel": "7f58-stage-150812_025409" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves information about an environment named my-env:", - "id": "to-view-information-about-an-environment-1456277288662", - "title": "To view information about an environment" - } - ], - "DescribeEvents": [ - { - "input": { - "EnvironmentName": "my-env" - }, - "output": { - "Events": [ - { - "ApplicationName": "my-app", - "EnvironmentName": "my-env", - "EventDate": "2015-08-20T07:06:53.535Z", - "Message": "Environment health has transitioned from Info to Ok.", - "Severity": "INFO" - }, - { - "ApplicationName": "my-app", - "EnvironmentName": "my-env", - "EventDate": "2015-08-20T07:06:02.049Z", - "Message": "Environment update completed successfully.", - "RequestId": "b7f3960b-4709-11e5-ba1e-07e16200da41", - "Severity": "INFO" - }, - { - "ApplicationName": "my-app", - "EnvironmentName": "my-env", - "EventDate": "2015-08-13T19:16:27.561Z", - "Message": "Using elasticbeanstalk-us-west-2-012445113685 as Amazon S3 storage bucket for environment data.", - "RequestId": "ca8dfbf6-41ef-11e5-988b-651aa638f46b", - "Severity": "INFO" - }, - { - "ApplicationName": "my-app", - "EnvironmentName": "my-env", - "EventDate": "2015-08-13T19:16:26.581Z", - "Message": "createEnvironment is starting.", - "RequestId": "cdfba8f6-41ef-11e5-988b-65638f41aa6b", - "Severity": "INFO" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves events for an environment named my-env:", - "id": "to-view-events-for-an-environment-1456277367589", - "title": "To view events for an environment" - } - ], - "DescribeInstancesHealth": [ - { - "input": { - "AttributeNames": [ - "All" - ], - "EnvironmentName": "my-env" - }, - "output": { - "InstanceHealthList": [ - { - "ApplicationMetrics": { - "Duration": 10, - "Latency": { - "P10": 0, - "P50": 0.001, - "P75": 0.002, - "P85": 0.003, - "P90": 0.004, - "P95": 0.005, - "P99": 0.006, - "P999": 0.006 - }, - "RequestCount": 48, - "StatusCodes": { - "Status2xx": 47, - "Status3xx": 0, - "Status4xx": 1, - "Status5xx": 0 - } - }, - "Causes": [ - - ], - "Color": "Green", - "HealthStatus": "Ok", - "InstanceId": "i-08691cc7", - "LaunchedAt": "2015-08-13T19:17:09Z", - "System": { - "CPUUtilization": { - "IOWait": 0.2, - "IRQ": 0, - "Idle": 97.8, - "Nice": 0.1, - "SoftIRQ": 0.1, - "System": 0.3, - "User": 1.5 - }, - "LoadAverage": [ - 0, - 0.02, - 0.05 - ] - } - } - ], - "RefreshedAt": "2015-08-20T21:09:08Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves health information for instances in an environment named my-env:", - "id": "to-view-environment-health-1456277424757", - "title": "To view environment health" - } - ], - "ListAvailableSolutionStacks": [ - { - "output": { - "SolutionStackDetails": [ - { - "PermittedFileTypes": [ - "zip" - ], - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Node.js" - } - ], - "SolutionStacks": [ - "64bit Amazon Linux 2015.03 v2.0.0 running Node.js", - "64bit Amazon Linux 2015.03 v2.0.0 running PHP 5.6", - "64bit Amazon Linux 2015.03 v2.0.0 running PHP 5.5", - "64bit Amazon Linux 2015.03 v2.0.0 running PHP 5.4", - "64bit Amazon Linux 2015.03 v2.0.0 running Python 3.4", - "64bit Amazon Linux 2015.03 v2.0.0 running Python 2.7", - "64bit Amazon Linux 2015.03 v2.0.0 running Python", - "64bit Amazon Linux 2015.03 v2.0.0 running Ruby 2.2 (Puma)", - "64bit Amazon Linux 2015.03 v2.0.0 running Ruby 2.2 (Passenger Standalone)", - "64bit Amazon Linux 2015.03 v2.0.0 running Ruby 2.1 (Puma)", - "64bit Amazon Linux 2015.03 v2.0.0 running Ruby 2.1 (Passenger Standalone)", - "64bit Amazon Linux 2015.03 v2.0.0 running Ruby 2.0 (Puma)", - "64bit Amazon Linux 2015.03 v2.0.0 running Ruby 2.0 (Passenger Standalone)", - "64bit Amazon Linux 2015.03 v2.0.0 running Ruby 1.9.3", - "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 7 Java 7", - "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 7 Java 6", - "64bit Windows Server Core 2012 R2 running IIS 8.5", - "64bit Windows Server 2012 R2 running IIS 8.5", - "64bit Windows Server 2012 running IIS 8", - "64bit Windows Server 2008 R2 running IIS 7.5", - "64bit Amazon Linux 2015.03 v2.0.0 running Docker 1.6.2", - "64bit Amazon Linux 2015.03 v2.0.0 running Multi-container Docker 1.6.2 (Generic)", - "64bit Debian jessie v2.0.0 running GlassFish 4.1 Java 8 (Preconfigured - Docker)", - "64bit Debian jessie v2.0.0 running GlassFish 4.0 Java 7 (Preconfigured - Docker)", - "64bit Debian jessie v2.0.0 running Go 1.4 (Preconfigured - Docker)", - "64bit Debian jessie v2.0.0 running Go 1.3 (Preconfigured - Docker)", - "64bit Debian jessie v2.0.0 running Python 3.4 (Preconfigured - Docker)" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation lists solution stacks for all currently available platform configurations and any that you have used in the past:", - "id": "to-view-solution-stacks-1456277504811", - "title": "To view solution stacks" - } - ], - "RebuildEnvironment": [ - { - "input": { - "EnvironmentName": "my-env" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation terminates and recreates the resources in an environment named my-env:", - "id": "to-rebuild-an-environment-1456277600918", - "title": "To rebuild an environment" - } - ], - "RequestEnvironmentInfo": [ - { - "input": { - "EnvironmentName": "my-env", - "InfoType": "tail" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation requests logs from an environment named my-env:", - "id": "to-request-tailed-logs-1456277657045", - "title": "To request tailed logs" - } - ], - "RestartAppServer": [ - { - "input": { - "EnvironmentName": "my-env" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation restarts application servers on all instances in an environment named my-env:", - "id": "to-restart-application-servers-1456277739302", - "title": "To restart application servers" - } - ], - "RetrieveEnvironmentInfo": [ - { - "input": { - "EnvironmentName": "my-env", - "InfoType": "tail" - }, - "output": { - "EnvironmentInfo": [ - { - "Ec2InstanceId": "i-09c1c867", - "InfoType": "tail", - "Message": "https://elasticbeanstalk-us-west-2-0123456789012.s3.amazonaws.com/resources/environments/logs/tail/e-fyqyju3yjs/i-09c1c867/TailLogs-1440109397703.out?AWSAccessKeyId=AKGPT4J56IAJ2EUBL5CQ&Expires=1440195891&Signature=n%2BEalOV6A2HIOx4Rcfb7LT16bBM%3D", - "SampleTimestamp": "2015-08-20T22:23:17.703Z" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation retrieves a link to logs from an environment named my-env:", - "id": "to-retrieve-tailed-logs-1456277792734", - "title": "To retrieve tailed logs" - } - ], - "SwapEnvironmentCNAMEs": [ - { - "input": { - "DestinationEnvironmentName": "my-env-green", - "SourceEnvironmentName": "my-env-blue" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation swaps the assigned subdomains of two environments:", - "id": "to-swap-environment-cnames-1456277839438", - "title": "To swap environment CNAMES" - } - ], - "TerminateEnvironment": [ - { - "input": { - "EnvironmentName": "my-env" - }, - "output": { - "AbortableOperationInProgress": false, - "ApplicationName": "my-app", - "CNAME": "my-env.elasticbeanstalk.com", - "DateCreated": "2015-08-12T18:52:53.622Z", - "DateUpdated": "2015-08-12T19:05:54.744Z", - "EndpointURL": "awseb-e-f-AWSEBLoa-1I9XUMP4-8492WNUP202574.us-west-2.elb.amazonaws.com", - "EnvironmentId": "e-fh2eravpns", - "EnvironmentName": "my-env", - "Health": "Grey", - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "Status": "Terminating", - "Tier": { - "Name": "WebServer", - "Type": "Standard", - "Version": " " - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation terminates an Elastic Beanstalk environment named my-env:", - "id": "to-terminate-an-environment-1456277888556", - "title": "To terminate an environment" - } - ], - "UpdateApplication": [ - { - "input": { - "ApplicationName": "my-app", - "Description": "my Elastic Beanstalk application" - }, - "output": { - "Application": { - "ApplicationName": "my-app", - "ConfigurationTemplates": [ - - ], - "DateCreated": "2015-08-13T19:15:50.449Z", - "DateUpdated": "2015-08-20T22:34:56.195Z", - "Description": "my Elastic Beanstalk application", - "Versions": [ - "2fba-stage-150819_234450", - "bf07-stage-150820_214945", - "93f8", - "fd7c-stage-150820_000431", - "22a0-stage-150819_185942" - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation updates the description of an application named my-app:", - "id": "to-change-an-applications-description-1456277957075", - "title": "To change an application's description" - } - ], - "UpdateApplicationVersion": [ - { - "input": { - "ApplicationName": "my-app", - "Description": "new description", - "VersionLabel": "22a0-stage-150819_185942" - }, - "output": { - "ApplicationVersion": { - "ApplicationName": "my-app", - "DateCreated": "2015-08-19T18:59:17.646Z", - "DateUpdated": "2015-08-20T22:53:28.871Z", - "Description": "new description", - "SourceBundle": { - "S3Bucket": "elasticbeanstalk-us-west-2-0123456789012", - "S3Key": "my-app/22a0-stage-150819_185942.war" - }, - "VersionLabel": "22a0-stage-150819_185942" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation updates the description of an application version named 22a0-stage-150819_185942:", - "id": "to-change-an-application-versions-description-1456278019237", - "title": "To change an application version's description" - } - ], - "UpdateConfigurationTemplate": [ - { - "input": { - "ApplicationName": "my-app", - "OptionsToRemove": [ - { - "Namespace": "aws:elasticbeanstalk:healthreporting:system", - "OptionName": "ConfigDocument" - } - ], - "TemplateName": "my-template" - }, - "output": { - "ApplicationName": "my-app", - "DateCreated": "2015-08-20T22:39:31Z", - "DateUpdated": "2015-08-20T22:43:11Z", - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "TemplateName": "my-template" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation removes the configured CloudWatch custom health metrics configuration ConfigDocument from a saved configuration template named my-template:", - "id": "to-update-a-configuration-template-1456278075300", - "title": "To update a configuration template" - } - ], - "UpdateEnvironment": [ - { - "input": { - "EnvironmentName": "my-env", - "VersionLabel": "v2" - }, - "output": { - "ApplicationName": "my-app", - "CNAME": "my-env.elasticbeanstalk.com", - "DateCreated": "2015-02-03T23:04:54.453Z", - "DateUpdated": "2015-02-03T23:12:29.119Z", - "EndpointURL": "awseb-e-i-AWSEBLoa-1RDLX6TC9VUAO-0123456789.us-west-2.elb.amazonaws.com", - "EnvironmentId": "e-szqipays4h", - "EnvironmentName": "my-env", - "Health": "Grey", - "SolutionStackName": "64bit Amazon Linux running Tomcat 7", - "Status": "Updating", - "Tier": { - "Name": "WebServer", - "Type": "Standard", - "Version": " " - }, - "VersionLabel": "v2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation updates an environment named \"my-env\" to version \"v2\" of the application to which it belongs:", - "id": "to-update-an-environment-to-a-new-version-1456278210718", - "title": "To update an environment to a new version" - }, - { - "input": { - "EnvironmentName": "my-env", - "OptionSettings": [ - { - "Namespace": "aws:elb:healthcheck", - "OptionName": "Interval", - "Value": "15" - }, - { - "Namespace": "aws:elb:healthcheck", - "OptionName": "Timeout", - "Value": "8" - }, - { - "Namespace": "aws:elb:healthcheck", - "OptionName": "HealthyThreshold", - "Value": "2" - }, - { - "Namespace": "aws:elb:healthcheck", - "OptionName": "UnhealthyThreshold", - "Value": "3" - } - ] - }, - "output": { - "AbortableOperationInProgress": true, - "ApplicationName": "my-app", - "CNAME": "my-env.elasticbeanstalk.com", - "DateCreated": "2015-08-07T20:48:49.599Z", - "DateUpdated": "2015-08-12T18:15:23.804Z", - "EndpointURL": "awseb-e-w-AWSEBLoa-14XB83101Q4L-104QXY80921.sa-east-1.elb.amazonaws.com", - "EnvironmentId": "e-wtp2rpqsej", - "EnvironmentName": "my-env", - "Health": "Grey", - "SolutionStackName": "64bit Amazon Linux 2015.03 v2.0.0 running Tomcat 8 Java 8", - "Status": "Updating", - "Tier": { - "Name": "WebServer", - "Type": "Standard", - "Version": " " - }, - "VersionLabel": "7f58-stage-150812_025409" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation configures several options in the aws:elb:loadbalancer namespace:", - "id": "to-configure-option-settings-1456278286349", - "title": "To configure option settings" - } - ], - "ValidateConfigurationSettings": [ - { - "input": { - "ApplicationName": "my-app", - "EnvironmentName": "my-env", - "OptionSettings": [ - { - "Namespace": "aws:elasticbeanstalk:healthreporting:system", - "OptionName": "ConfigDocument", - "Value": "{\"CloudWatchMetrics\": {\"Environment\": {\"ApplicationLatencyP99.9\": null,\"InstancesSevere\": 60,\"ApplicationLatencyP90\": 60,\"ApplicationLatencyP99\": null,\"ApplicationLatencyP95\": 60,\"InstancesUnknown\": 60,\"ApplicationLatencyP85\": 60,\"InstancesInfo\": null,\"ApplicationRequests2xx\": null,\"InstancesDegraded\": null,\"InstancesWarning\": 60,\"ApplicationLatencyP50\": 60,\"ApplicationRequestsTotal\": null,\"InstancesNoData\": null,\"InstancesPending\": 60,\"ApplicationLatencyP10\": null,\"ApplicationRequests5xx\": null,\"ApplicationLatencyP75\": null,\"InstancesOk\": 60,\"ApplicationRequests3xx\": null,\"ApplicationRequests4xx\": null},\"Instance\": {\"ApplicationLatencyP99.9\": null,\"ApplicationLatencyP90\": 60,\"ApplicationLatencyP99\": null,\"ApplicationLatencyP95\": null,\"ApplicationLatencyP85\": null,\"CPUUser\": 60,\"ApplicationRequests2xx\": null,\"CPUIdle\": null,\"ApplicationLatencyP50\": null,\"ApplicationRequestsTotal\": 60,\"RootFilesystemUtil\": null,\"LoadAverage1min\": null,\"CPUIrq\": null,\"CPUNice\": 60,\"CPUIowait\": 60,\"ApplicationLatencyP10\": null,\"LoadAverage5min\": null,\"ApplicationRequests5xx\": null,\"ApplicationLatencyP75\": 60,\"CPUSystem\": 60,\"ApplicationRequests3xx\": 60,\"ApplicationRequests4xx\": null,\"InstanceHealth\": null,\"CPUSoftirq\": 60}},\"Version\": 1}" - } - ] - }, - "output": { - "Messages": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation validates a CloudWatch custom metrics config document:", - "id": "to-validate-configuration-settings-1456278393654", - "title": "To validate configuration settings" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/paginators-1.json deleted file mode 100644 index 4f53c86..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "DescribeEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxRecords", - "result_key": "Events" - }, - "DescribeApplicationVersions": { - "input_token": "NextToken", - "limit_key": "MaxRecords", - "output_token": "NextToken", - "result_key": "ApplicationVersions" - }, - "DescribeEnvironmentManagedActionHistory": { - "input_token": "NextToken", - "limit_key": "MaxItems", - "output_token": "NextToken", - "result_key": "ManagedActionHistoryItems" - }, - "DescribeEnvironments": { - "input_token": "NextToken", - "limit_key": "MaxRecords", - "output_token": "NextToken", - "result_key": "Environments" - }, - "ListPlatformVersions": { - "input_token": "NextToken", - "limit_key": "MaxRecords", - "output_token": "NextToken", - "result_key": "PlatformSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/service-2.json.gz deleted file mode 100644 index 7f42b40..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/waiters-2.json deleted file mode 100644 index 4fb906b..0000000 --- a/venv/Lib/site-packages/botocore/data/elasticbeanstalk/2010-12-01/waiters-2.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "version": 2, - "waiters": { - "EnvironmentExists": { - "delay": 20, - "maxAttempts": 20, - "operation": "DescribeEnvironments", - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Environments[].Status", - "expected": "Ready" - }, - { - "state": "retry", - "matcher": "pathAll", - "argument": "Environments[].Status", - "expected": "Launching" - } - ] - }, - "EnvironmentUpdated": { - "delay": 20, - "maxAttempts": 20, - "operation": "DescribeEnvironments", - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Environments[].Status", - "expected": "Ready" - }, - { - "state": "retry", - "matcher": "pathAll", - "argument": "Environments[].Status", - "expected": "Updating" - } - ] - }, - "EnvironmentTerminated": { - "delay": 20, - "maxAttempts": 20, - "operation": "DescribeEnvironments", - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Environments[].Status", - "expected": "Terminated" - }, - { - "state": "retry", - "matcher": "pathAll", - "argument": "Environments[].Status", - "expected": "Terminating" - } - ] - } - } -} - diff --git a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/elb/2012-06-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index b6f52f8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/examples-1.json b/venv/Lib/site-packages/botocore/data/elb/2012-06-01/examples-1.json deleted file mode 100644 index ce50fdd..0000000 --- a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/examples-1.json +++ /dev/null @@ -1,1036 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddTags": [ - { - "input": { - "LoadBalancerNames": [ - "my-load-balancer" - ], - "Tags": [ - { - "Key": "project", - "Value": "lima" - }, - { - "Key": "department", - "Value": "digital-media" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds two tags to the specified load balancer.", - "id": "elb-add-tags-1", - "title": "To add tags to a load balancer" - } - ], - "ApplySecurityGroupsToLoadBalancer": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "SecurityGroups": [ - "sg-fc448899" - ] - }, - "output": { - "SecurityGroups": [ - "sg-fc448899" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates a security group with the specified load balancer in a VPC.", - "id": "elb-apply-security-groups-to-load-balancer-1", - "title": "To associate a security group with a load balancer in a VPC" - } - ], - "AttachLoadBalancerToSubnets": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "Subnets": [ - "subnet-0ecac448" - ] - }, - "output": { - "Subnets": [ - "subnet-15aaab61", - "subnet-0ecac448" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the specified subnet to the set of configured subnets for the specified load balancer.", - "id": "elb-attach-load-balancer-to-subnets-1", - "title": "To attach subnets to a load balancer" - } - ], - "ConfigureHealthCheck": [ - { - "input": { - "HealthCheck": { - "HealthyThreshold": 2, - "Interval": 30, - "Target": "HTTP:80/png", - "Timeout": 3, - "UnhealthyThreshold": 2 - }, - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "HealthCheck": { - "HealthyThreshold": 2, - "Interval": 30, - "Target": "HTTP:80/png", - "Timeout": 3, - "UnhealthyThreshold": 2 - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example specifies the health check settings used to evaluate the health of your backend EC2 instances.", - "id": "elb-configure-health-check-1", - "title": "To specify the health check settings for your backend EC2 instances" - } - ], - "CreateAppCookieStickinessPolicy": [ - { - "input": { - "CookieName": "my-app-cookie", - "LoadBalancerName": "my-load-balancer", - "PolicyName": "my-app-cookie-policy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example generates a stickiness policy that follows the sticky session lifetimes of the application-generated cookie.", - "id": "elb-create-app-cookie-stickiness-policy-1", - "title": "To generate a stickiness policy for your load balancer" - } - ], - "CreateLBCookieStickinessPolicy": [ - { - "input": { - "CookieExpirationPeriod": 60, - "LoadBalancerName": "my-load-balancer", - "PolicyName": "my-duration-cookie-policy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example generates a stickiness policy with sticky session lifetimes controlled by the specified expiration period.", - "id": "elb-create-lb-cookie-stickiness-policy-1", - "title": "To generate a duration-based stickiness policy for your load balancer" - } - ], - "CreateLoadBalancer": [ - { - "input": { - "Listeners": [ - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 80, - "Protocol": "HTTP" - } - ], - "LoadBalancerName": "my-load-balancer", - "SecurityGroups": [ - "sg-a61988c3" - ], - "Subnets": [ - "subnet-15aaab61" - ] - }, - "output": { - "DNSName": "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a load balancer with an HTTP listener in a VPC.", - "id": "elb-create-load-balancer-1", - "title": "To create an HTTP load balancer in a VPC" - }, - { - "input": { - "AvailabilityZones": [ - "us-west-2a" - ], - "Listeners": [ - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 80, - "Protocol": "HTTP" - } - ], - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "DNSName": "my-load-balancer-123456789.us-west-2.elb.amazonaws.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a load balancer with an HTTP listener in EC2-Classic.", - "id": "elb-create-load-balancer-2", - "title": "To create an HTTP load balancer in EC2-Classic" - }, - { - "input": { - "Listeners": [ - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 80, - "Protocol": "HTTP" - }, - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 443, - "Protocol": "HTTPS", - "SSLCertificateId": "arn:aws:iam::123456789012:server-certificate/my-server-cert" - } - ], - "LoadBalancerName": "my-load-balancer", - "SecurityGroups": [ - "sg-a61988c3" - ], - "Subnets": [ - "subnet-15aaab61" - ] - }, - "output": { - "DNSName": "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a load balancer with an HTTPS listener in a VPC.", - "id": "elb-create-load-balancer-3", - "title": "To create an HTTPS load balancer in a VPC" - }, - { - "input": { - "AvailabilityZones": [ - "us-west-2a" - ], - "Listeners": [ - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 80, - "Protocol": "HTTP" - }, - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 443, - "Protocol": "HTTPS", - "SSLCertificateId": "arn:aws:iam::123456789012:server-certificate/my-server-cert" - } - ], - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "DNSName": "my-load-balancer-123456789.us-west-2.elb.amazonaws.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a load balancer with an HTTPS listener in EC2-Classic.", - "id": "elb-create-load-balancer-4", - "title": "To create an HTTPS load balancer in EC2-Classic" - }, - { - "input": { - "Listeners": [ - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 80, - "Protocol": "HTTP" - } - ], - "LoadBalancerName": "my-load-balancer", - "Scheme": "internal", - "SecurityGroups": [ - "sg-a61988c3" - ], - "Subnets": [ - "subnet-15aaab61" - ] - }, - "output": { - "DNSName": "internal-my-load-balancer-123456789.us-west-2.elb.amazonaws.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an internal load balancer with an HTTP listener in a VPC.", - "id": "elb-create-load-balancer-5", - "title": "To create an internal load balancer" - } - ], - "CreateLoadBalancerListeners": [ - { - "input": { - "Listeners": [ - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 80, - "Protocol": "HTTP" - } - ], - "LoadBalancerName": "my-load-balancer" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a listener for your load balancer at port 80 using the HTTP protocol.", - "id": "elb-create-load-balancer-listeners-1", - "title": "To create an HTTP listener for a load balancer" - }, - { - "input": { - "Listeners": [ - { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 443, - "Protocol": "HTTPS", - "SSLCertificateId": "arn:aws:iam::123456789012:server-certificate/my-server-cert" - } - ], - "LoadBalancerName": "my-load-balancer" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a listener for your load balancer at port 443 using the HTTPS protocol.", - "id": "elb-create-load-balancer-listeners-2", - "title": "To create an HTTPS listener for a load balancer" - } - ], - "CreateLoadBalancerPolicy": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "PolicyAttributes": [ - { - "AttributeName": "ProxyProtocol", - "AttributeValue": "true" - } - ], - "PolicyName": "my-ProxyProtocol-policy", - "PolicyTypeName": "ProxyProtocolPolicyType" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a policy that enables Proxy Protocol on the specified load balancer.", - "id": "elb-create-load-balancer-policy-1", - "title": "To create a policy that enables Proxy Protocol on a load balancer" - }, - { - "input": { - "LoadBalancerName": "my-load-balancer", - "PolicyAttributes": [ - { - "AttributeName": "PublicKey", - "AttributeValue": "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwAYUjnfyEyXr1pxjhFWBpMlggUcqoi3kl+dS74kj//c6x7ROtusUaeQCTgIUkayttRDWchuqo1pHC1u+n5xxXnBBe2ejbb2WRsKIQ5rXEeixsjFpFsojpSQKkzhVGI6mJVZBJDVKSHmswnwLBdofLhzvllpovBPTHe+o4haAWvDBALJU0pkSI1FecPHcs2hwxf14zHoXy1e2k36A64nXW43wtfx5qcVSIxtCEOjnYRg7RPvybaGfQ+v6Iaxb/+7J5kEvZhTFQId+bSiJImF1FSUT1W1xwzBZPUbcUkkXDj45vC2s3Z8E+Lk7a3uZhvsQHLZnrfuWjBWGWvZ/MhZYgEXAMPLE" - } - ], - "PolicyName": "my-PublicKey-policy", - "PolicyTypeName": "PublicKeyPolicyType" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a public key policy.", - "id": "elb-create-load-balancer-policy-2", - "title": "To create a public key policy" - }, - { - "input": { - "LoadBalancerName": "my-load-balancer", - "PolicyAttributes": [ - { - "AttributeName": "PublicKeyPolicyName", - "AttributeValue": "my-PublicKey-policy" - } - ], - "PolicyName": "my-authentication-policy", - "PolicyTypeName": "BackendServerAuthenticationPolicyType" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a backend server authentication policy that enables authentication on your backend instance using a public key policy.", - "id": "elb-create-load-balancer-policy-3", - "title": "To create a backend server authentication policy" - } - ], - "DeleteLoadBalancer": [ - { - "input": { - "LoadBalancerName": "my-load-balancer" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified load balancer.", - "id": "elb-delete-load-balancer-1", - "title": "To delete a load balancer" - } - ], - "DeleteLoadBalancerListeners": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "LoadBalancerPorts": [ - 80 - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the listener for the specified port from the specified load balancer.", - "id": "elb-delete-load-balancer-listeners-1", - "title": "To delete a listener from your load balancer" - } - ], - "DeleteLoadBalancerPolicy": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "PolicyName": "my-duration-cookie-policy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified policy from the specified load balancer. The policy must not be enabled on any listener.", - "id": "elb-delete-load-balancer-policy-1", - "title": "To delete a policy from your load balancer" - } - ], - "DeregisterInstancesFromLoadBalancer": [ - { - "input": { - "Instances": [ - { - "InstanceId": "i-d6f6fae3" - } - ], - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "Instances": [ - { - "InstanceId": "i-207d9717" - }, - { - "InstanceId": "i-afefb49b" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deregisters the specified instance from the specified load balancer.", - "id": "elb-deregister-instances-from-load-balancer-1", - "title": "To deregister instances from a load balancer" - } - ], - "DescribeInstanceHealth": [ - { - "input": { - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "InstanceStates": [ - { - "Description": "N/A", - "InstanceId": "i-207d9717", - "ReasonCode": "N/A", - "State": "InService" - }, - { - "Description": "N/A", - "InstanceId": "i-afefb49b", - "ReasonCode": "N/A", - "State": "InService" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the health of the instances for the specified load balancer.", - "id": "elb-describe-instance-health-1", - "title": "To describe the health of the instances for a load balancer" - } - ], - "DescribeLoadBalancerAttributes": [ - { - "input": { - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "LoadBalancerAttributes": { - "AccessLog": { - "Enabled": false - }, - "ConnectionDraining": { - "Enabled": false, - "Timeout": 300 - }, - "ConnectionSettings": { - "IdleTimeout": 60 - }, - "CrossZoneLoadBalancing": { - "Enabled": false - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attributes of the specified load balancer.", - "id": "elb-describe-load-balancer-attributes-1", - "title": "To describe the attributes of a load balancer" - } - ], - "DescribeLoadBalancerPolicies": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "PolicyNames": [ - "my-authentication-policy" - ] - }, - "output": { - "PolicyDescriptions": [ - { - "PolicyAttributeDescriptions": [ - { - "AttributeName": "PublicKeyPolicyName", - "AttributeValue": "my-PublicKey-policy" - } - ], - "PolicyName": "my-authentication-policy", - "PolicyTypeName": "BackendServerAuthenticationPolicyType" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified policy associated with the specified load balancer.", - "id": "elb-describe-load-balancer-policies-1", - "title": "To describe a policy associated with a load balancer" - } - ], - "DescribeLoadBalancerPolicyTypes": [ - { - "input": { - "PolicyTypeNames": [ - "ProxyProtocolPolicyType" - ] - }, - "output": { - "PolicyTypeDescriptions": [ - { - "Description": "Policy that controls whether to include the IP address and port of the originating request for TCP messages. This policy operates on TCP listeners only.", - "PolicyAttributeTypeDescriptions": [ - { - "AttributeName": "ProxyProtocol", - "AttributeType": "Boolean", - "Cardinality": "ONE" - } - ], - "PolicyTypeName": "ProxyProtocolPolicyType" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified load balancer policy type.", - "id": "elb-describe-load-balancer-policy-types-1", - "title": "To describe a load balancer policy type defined by Elastic Load Balancing" - } - ], - "DescribeLoadBalancers": [ - { - "input": { - "LoadBalancerNames": [ - "my-load-balancer" - ] - }, - "output": { - "LoadBalancerDescriptions": [ - { - "AvailabilityZones": [ - "us-west-2a" - ], - "BackendServerDescriptions": [ - { - "InstancePort": 80, - "PolicyNames": [ - "my-ProxyProtocol-policy" - ] - } - ], - "CanonicalHostedZoneName": "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com", - "CanonicalHostedZoneNameID": "Z3DZXE0EXAMPLE", - "CreatedTime": "2015-03-19T03:24:02.650Z", - "DNSName": "my-load-balancer-1234567890.us-west-2.elb.amazonaws.com", - "HealthCheck": { - "HealthyThreshold": 2, - "Interval": 30, - "Target": "HTTP:80/png", - "Timeout": 3, - "UnhealthyThreshold": 2 - }, - "Instances": [ - { - "InstanceId": "i-207d9717" - }, - { - "InstanceId": "i-afefb49b" - } - ], - "ListenerDescriptions": [ - { - "Listener": { - "InstancePort": 80, - "InstanceProtocol": "HTTP", - "LoadBalancerPort": 80, - "Protocol": "HTTP" - }, - "PolicyNames": [ - - ] - }, - { - "Listener": { - "InstancePort": 443, - "InstanceProtocol": "HTTPS", - "LoadBalancerPort": 443, - "Protocol": "HTTPS", - "SSLCertificateId": "arn:aws:iam::123456789012:server-certificate/my-server-cert" - }, - "PolicyNames": [ - "ELBSecurityPolicy-2015-03" - ] - } - ], - "LoadBalancerName": "my-load-balancer", - "Policies": { - "AppCookieStickinessPolicies": [ - - ], - "LBCookieStickinessPolicies": [ - { - "CookieExpirationPeriod": 60, - "PolicyName": "my-duration-cookie-policy" - } - ], - "OtherPolicies": [ - "my-PublicKey-policy", - "my-authentication-policy", - "my-SSLNegotiation-policy", - "my-ProxyProtocol-policy", - "ELBSecurityPolicy-2015-03" - ] - }, - "Scheme": "internet-facing", - "SecurityGroups": [ - "sg-a61988c3" - ], - "SourceSecurityGroup": { - "GroupName": "my-elb-sg", - "OwnerAlias": "123456789012" - }, - "Subnets": [ - "subnet-15aaab61" - ], - "VPCId": "vpc-a01106c2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified load balancer.", - "id": "elb-describe-load-balancers-1", - "title": "To describe one of your load balancers" - } - ], - "DescribeTags": [ - { - "input": { - "LoadBalancerNames": [ - "my-load-balancer" - ] - }, - "output": { - "TagDescriptions": [ - { - "LoadBalancerName": "my-load-balancer", - "Tags": [ - { - "Key": "project", - "Value": "lima" - }, - { - "Key": "department", - "Value": "digital-media" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the tags for the specified load balancer.", - "id": "elb-describe-tags-1", - "title": "To describe the tags for a load balancer" - } - ], - "DetachLoadBalancerFromSubnets": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "Subnets": [ - "subnet-0ecac448" - ] - }, - "output": { - "Subnets": [ - "subnet-15aaab61" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example detaches the specified load balancer from the specified subnet.", - "id": "elb-detach-load-balancer-from-subnets-1", - "title": "To detach a load balancer from a subnet" - } - ], - "DisableAvailabilityZonesForLoadBalancer": [ - { - "input": { - "AvailabilityZones": [ - "us-west-2a" - ], - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "AvailabilityZones": [ - "us-west-2b" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example removes the specified Availability Zone from the set of Availability Zones for the specified load balancer.", - "id": "elb-disable-availability-zones-for-load-balancer-1", - "title": "To disable an Availability Zone for a load balancer" - } - ], - "EnableAvailabilityZonesForLoadBalancer": [ - { - "input": { - "AvailabilityZones": [ - "us-west-2b" - ], - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "AvailabilityZones": [ - "us-west-2a", - "us-west-2b" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the specified Availability Zone to the specified load balancer.", - "id": "elb-enable-availability-zones-for-load-balancer-1", - "title": "To enable an Availability Zone for a load balancer" - } - ], - "ModifyLoadBalancerAttributes": [ - { - "input": { - "LoadBalancerAttributes": { - "CrossZoneLoadBalancing": { - "Enabled": true - } - }, - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "LoadBalancerAttributes": { - "CrossZoneLoadBalancing": { - "Enabled": true - } - }, - "LoadBalancerName": "my-load-balancer" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables cross-zone load balancing for the specified load balancer.", - "id": "elb-modify-load-balancer-attributes-1", - "title": "To enable cross-zone load balancing" - }, - { - "input": { - "LoadBalancerAttributes": { - "ConnectionDraining": { - "Enabled": true, - "Timeout": 300 - } - }, - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "LoadBalancerAttributes": { - "ConnectionDraining": { - "Enabled": true, - "Timeout": 300 - } - }, - "LoadBalancerName": "my-load-balancer" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables connection draining for the specified load balancer.", - "id": "elb-modify-load-balancer-attributes-2", - "title": "To enable connection draining" - } - ], - "RegisterInstancesWithLoadBalancer": [ - { - "input": { - "Instances": [ - { - "InstanceId": "i-d6f6fae3" - } - ], - "LoadBalancerName": "my-load-balancer" - }, - "output": { - "Instances": [ - { - "InstanceId": "i-d6f6fae3" - }, - { - "InstanceId": "i-207d9717" - }, - { - "InstanceId": "i-afefb49b" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example registers the specified instance with the specified load balancer.", - "id": "elb-register-instances-with-load-balancer-1", - "title": "To register instances with a load balancer" - } - ], - "RemoveTags": [ - { - "input": { - "LoadBalancerNames": [ - "my-load-balancer" - ], - "Tags": [ - { - "Key": "project" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example removes the specified tag from the specified load balancer.", - "id": "elb-remove-tags-1", - "title": "To remove tags from a load balancer" - } - ], - "SetLoadBalancerListenerSSLCertificate": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "LoadBalancerPort": 443, - "SSLCertificateId": "arn:aws:iam::123456789012:server-certificate/new-server-cert" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces the existing SSL certificate for the specified HTTPS listener.", - "id": "elb-set-load-balancer-listener-ssl-certificate-1", - "title": "To update the SSL certificate for an HTTPS listener" - } - ], - "SetLoadBalancerPoliciesForBackendServer": [ - { - "input": { - "InstancePort": 80, - "LoadBalancerName": "my-load-balancer", - "PolicyNames": [ - "my-ProxyProtocol-policy" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces the policies that are currently associated with the specified port.", - "id": "elb-set-load-balancer-policies-for-backend-server-1", - "title": "To replace the policies associated with a port for a backend instance" - } - ], - "SetLoadBalancerPoliciesOfListener": [ - { - "input": { - "LoadBalancerName": "my-load-balancer", - "LoadBalancerPort": 80, - "PolicyNames": [ - "my-SSLNegotiation-policy" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example replaces the policies that are currently associated with the specified listener.", - "id": "elb-set-load-balancer-policies-of-listener-1", - "title": "To replace the policies associated with a listener" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/elb/2012-06-01/paginators-1.json deleted file mode 100644 index b3bd330..0000000 --- a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "DescribeLoadBalancers": { - "input_token": "Marker", - "output_token": "NextMarker", - "result_key": "LoadBalancerDescriptions", - "limit_key": "PageSize" - }, - "DescribeAccountLimits": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "Limits" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/elb/2012-06-01/service-2.json.gz deleted file mode 100644 index 0b6e445..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/elb/2012-06-01/waiters-2.json deleted file mode 100644 index 182e070..0000000 --- a/venv/Lib/site-packages/botocore/data/elb/2012-06-01/waiters-2.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "version":2, - "waiters":{ - "InstanceDeregistered": { - "delay": 15, - "operation": "DescribeInstanceHealth", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "OutOfService", - "matcher": "pathAll", - "state": "success", - "argument": "InstanceStates[].State" - }, - { - "matcher": "error", - "expected": "InvalidInstance", - "state": "success" - } - ] - }, - "AnyInstanceInService":{ - "acceptors":[ - { - "argument":"InstanceStates[].State", - "expected":"InService", - "matcher":"pathAny", - "state":"success" - } - ], - "delay":15, - "maxAttempts":40, - "operation":"DescribeInstanceHealth" - }, - "InstanceInService":{ - "acceptors":[ - { - "argument":"InstanceStates[].State", - "expected":"InService", - "matcher":"pathAll", - "state":"success" - }, - { - "matcher": "error", - "expected": "InvalidInstance", - "state": "retry" - } - ], - "delay":15, - "maxAttempts":40, - "operation":"DescribeInstanceHealth" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index b6f52f8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/examples-1.json deleted file mode 100644 index 508b099..0000000 --- a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/examples-1.json +++ /dev/null @@ -1,1384 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddTags": [ - { - "input": { - "ResourceArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - ], - "Tags": [ - { - "Key": "project", - "Value": "lima" - }, - { - "Key": "department", - "Value": "digital-media" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the specified tags to the specified load balancer.", - "id": "elbv2-add-tags-1", - "title": "To add tags to a load balancer" - } - ], - "CreateListener": [ - { - "input": { - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Port": 80, - "Protocol": "HTTP" - }, - "output": { - "Listeners": [ - { - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Port": 80, - "Protocol": "HTTP" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an HTTP listener for the specified load balancer that forwards requests to the specified target group.", - "id": "elbv2-create-listener-1", - "title": "To create an HTTP listener" - }, - { - "input": { - "Certificates": [ - { - "CertificateArn": "arn:aws:iam::123456789012:server-certificate/my-server-cert" - } - ], - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Port": 443, - "Protocol": "HTTPS", - "SslPolicy": "ELBSecurityPolicy-2015-05" - }, - "output": { - "Listeners": [ - { - "Certificates": [ - { - "CertificateArn": "arn:aws:iam::123456789012:server-certificate/my-server-cert" - } - ], - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Port": 443, - "Protocol": "HTTPS", - "SslPolicy": "ELBSecurityPolicy-2015-05" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an HTTPS listener for the specified load balancer that forwards requests to the specified target group. Note that you must specify an SSL certificate for an HTTPS listener. You can create and manage certificates using AWS Certificate Manager (ACM). Alternatively, you can create a certificate using SSL/TLS tools, get the certificate signed by a certificate authority (CA), and upload the certificate to AWS Identity and Access Management (IAM).", - "id": "elbv2-create-listener-2", - "title": "To create an HTTPS listener" - } - ], - "CreateLoadBalancer": [ - { - "input": { - "Name": "my-load-balancer", - "Subnets": [ - "subnet-b7d581c0", - "subnet-8360a9e7" - ] - }, - "output": { - "LoadBalancers": [ - { - "AvailabilityZones": [ - { - "SubnetId": "subnet-8360a9e7", - "ZoneName": "us-west-2a" - }, - { - "SubnetId": "subnet-b7d581c0", - "ZoneName": "us-west-2b" - } - ], - "CanonicalHostedZoneId": "Z2P70J7EXAMPLE", - "CreatedTime": "2016-03-25T21:26:12.920Z", - "DNSName": "my-load-balancer-424835706.us-west-2.elb.amazonaws.com", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "LoadBalancerName": "my-load-balancer", - "Scheme": "internet-facing", - "SecurityGroups": [ - "sg-5943793c" - ], - "State": { - "Code": "provisioning" - }, - "Type": "application", - "VpcId": "vpc-3ac0fb5f" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an Internet-facing load balancer and enables the Availability Zones for the specified subnets.", - "id": "elbv2-create-load-balancer-1", - "title": "To create an Internet-facing load balancer" - }, - { - "input": { - "Name": "my-internal-load-balancer", - "Scheme": "internal", - "SecurityGroups": [ - - ], - "Subnets": [ - "subnet-b7d581c0", - "subnet-8360a9e7" - ] - }, - "output": { - "LoadBalancers": [ - { - "AvailabilityZones": [ - { - "SubnetId": "subnet-8360a9e7", - "ZoneName": "us-west-2a" - }, - { - "SubnetId": "subnet-b7d581c0", - "ZoneName": "us-west-2b" - } - ], - "CanonicalHostedZoneId": "Z2P70J7EXAMPLE", - "CreatedTime": "2016-03-25T21:29:48.850Z", - "DNSName": "internal-my-internal-load-balancer-1529930873.us-west-2.elb.amazonaws.com", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-internal-load-balancer/5b49b8d4303115c2", - "LoadBalancerName": "my-internal-load-balancer", - "Scheme": "internal", - "SecurityGroups": [ - "sg-5943793c" - ], - "State": { - "Code": "provisioning" - }, - "Type": "application", - "VpcId": "vpc-3ac0fb5f" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an internal load balancer and enables the Availability Zones for the specified subnets.", - "id": "elbv2-create-load-balancer-2", - "title": "To create an internal load balancer" - } - ], - "CreateRule": [ - { - "input": { - "Actions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "Conditions": [ - { - "Field": "path-pattern", - "Values": [ - "/img/*" - ] - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2", - "Priority": 10 - }, - "output": { - "Rules": [ - { - "Actions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "Conditions": [ - { - "Field": "path-pattern", - "Values": [ - "/img/*" - ] - } - ], - "IsDefault": false, - "Priority": "10", - "RuleArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/9683b2d02a6cabee" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a rule that forwards requests to the specified target group if the URL contains the specified pattern (for example, /img/*).", - "id": "elbv2-create-rule-1", - "title": "To create a rule" - } - ], - "CreateTargetGroup": [ - { - "input": { - "Name": "my-targets", - "Port": 80, - "Protocol": "HTTP", - "VpcId": "vpc-3ac0fb5f" - }, - "output": { - "TargetGroups": [ - { - "HealthCheckIntervalSeconds": 30, - "HealthCheckPath": "/", - "HealthCheckPort": "traffic-port", - "HealthCheckProtocol": "HTTP", - "HealthCheckTimeoutSeconds": 5, - "HealthyThresholdCount": 5, - "Matcher": { - "HttpCode": "200" - }, - "Port": 80, - "Protocol": "HTTP", - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "TargetGroupName": "my-targets", - "UnhealthyThresholdCount": 2, - "VpcId": "vpc-3ac0fb5f" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a target group that you can use to route traffic to targets using HTTP on port 80. This target group uses the default health check configuration.", - "id": "elbv2-create-target-group-1", - "title": "To create a target group" - } - ], - "DeleteListener": [ - { - "input": { - "ListenerArn": "arn:aws:elasticloadbalancing:ua-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified listener.", - "id": "elbv2-delete-listener-1", - "title": "To delete a listener" - } - ], - "DeleteLoadBalancer": [ - { - "input": { - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified load balancer.", - "id": "elbv2-delete-load-balancer-1", - "title": "To delete a load balancer" - } - ], - "DeleteRule": [ - { - "input": { - "RuleArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/1291d13826f405c3" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified rule.", - "id": "elbv2-delete-rule-1", - "title": "To delete a rule" - } - ], - "DeleteTargetGroup": [ - { - "input": { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified target group.", - "id": "elbv2-delete-target-group-1", - "title": "To delete a target group" - } - ], - "DeregisterTargets": [ - { - "input": { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Targets": [ - { - "Id": "i-0f76fade" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deregisters the specified instance from the specified target group.", - "id": "elbv2-deregister-targets-1", - "title": "To deregister a target from a target group" - } - ], - "DescribeListeners": [ - { - "input": { - "ListenerArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2" - ] - }, - "output": { - "Listeners": [ - { - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Port": 80, - "Protocol": "HTTP" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified listener.", - "id": "elbv2-describe-listeners-1", - "title": "To describe a listener" - } - ], - "DescribeLoadBalancerAttributes": [ - { - "input": { - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - }, - "output": { - "Attributes": [ - { - "Key": "access_logs.s3.enabled", - "Value": "false" - }, - { - "Key": "idle_timeout.timeout_seconds", - "Value": "60" - }, - { - "Key": "access_logs.s3.prefix", - "Value": "" - }, - { - "Key": "deletion_protection.enabled", - "Value": "false" - }, - { - "Key": "access_logs.s3.bucket", - "Value": "" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attributes of the specified load balancer.", - "id": "elbv2-describe-load-balancer-attributes-1", - "title": "To describe load balancer attributes" - } - ], - "DescribeLoadBalancers": [ - { - "input": { - "LoadBalancerArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - ] - }, - "output": { - "LoadBalancers": [ - { - "AvailabilityZones": [ - { - "SubnetId": "subnet-8360a9e7", - "ZoneName": "us-west-2a" - }, - { - "SubnetId": "subnet-b7d581c0", - "ZoneName": "us-west-2b" - } - ], - "CanonicalHostedZoneId": "Z2P70J7EXAMPLE", - "CreatedTime": "2016-03-25T21:26:12.920Z", - "DNSName": "my-load-balancer-424835706.us-west-2.elb.amazonaws.com", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "LoadBalancerName": "my-load-balancer", - "Scheme": "internet-facing", - "SecurityGroups": [ - "sg-5943793c" - ], - "State": { - "Code": "active" - }, - "Type": "application", - "VpcId": "vpc-3ac0fb5f" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified load balancer.", - "id": "elbv2-describe-load-balancers-1", - "title": "To describe a load balancer" - } - ], - "DescribeRules": [ - { - "input": { - "RuleArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/9683b2d02a6cabee" - ] - }, - "output": { - "Rules": [ - { - "Actions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "Conditions": [ - { - "Field": "path-pattern", - "Values": [ - "/img/*" - ] - } - ], - "IsDefault": false, - "Priority": "10", - "RuleArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/9683b2d02a6cabee" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified rule.", - "id": "elbv2-describe-rules-1", - "title": "To describe a rule" - } - ], - "DescribeSSLPolicies": [ - { - "input": { - "Names": [ - "ELBSecurityPolicy-2015-05" - ] - }, - "output": { - "SslPolicies": [ - { - "Ciphers": [ - { - "Name": "ECDHE-ECDSA-AES128-GCM-SHA256", - "Priority": 1 - }, - { - "Name": "ECDHE-RSA-AES128-GCM-SHA256", - "Priority": 2 - }, - { - "Name": "ECDHE-ECDSA-AES128-SHA256", - "Priority": 3 - }, - { - "Name": "ECDHE-RSA-AES128-SHA256", - "Priority": 4 - }, - { - "Name": "ECDHE-ECDSA-AES128-SHA", - "Priority": 5 - }, - { - "Name": "ECDHE-RSA-AES128-SHA", - "Priority": 6 - }, - { - "Name": "DHE-RSA-AES128-SHA", - "Priority": 7 - }, - { - "Name": "ECDHE-ECDSA-AES256-GCM-SHA384", - "Priority": 8 - }, - { - "Name": "ECDHE-RSA-AES256-GCM-SHA384", - "Priority": 9 - }, - { - "Name": "ECDHE-ECDSA-AES256-SHA384", - "Priority": 10 - }, - { - "Name": "ECDHE-RSA-AES256-SHA384", - "Priority": 11 - }, - { - "Name": "ECDHE-RSA-AES256-SHA", - "Priority": 12 - }, - { - "Name": "ECDHE-ECDSA-AES256-SHA", - "Priority": 13 - }, - { - "Name": "AES128-GCM-SHA256", - "Priority": 14 - }, - { - "Name": "AES128-SHA256", - "Priority": 15 - }, - { - "Name": "AES128-SHA", - "Priority": 16 - }, - { - "Name": "AES256-GCM-SHA384", - "Priority": 17 - }, - { - "Name": "AES256-SHA256", - "Priority": 18 - }, - { - "Name": "AES256-SHA", - "Priority": 19 - } - ], - "Name": "ELBSecurityPolicy-2015-05", - "SslProtocols": [ - "TLSv1", - "TLSv1.1", - "TLSv1.2" - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified policy used for SSL negotiation.", - "id": "elbv2-describe-ssl-policies-1", - "title": "To describe a policy used for SSL negotiation" - } - ], - "DescribeTags": [ - { - "input": { - "ResourceArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - ] - }, - "output": { - "TagDescriptions": [ - { - "ResourceArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Tags": [ - { - "Key": "project", - "Value": "lima" - }, - { - "Key": "department", - "Value": "digital-media" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the tags assigned to the specified load balancer.", - "id": "elbv2-describe-tags-1", - "title": "To describe the tags assigned to a load balancer" - } - ], - "DescribeTargetGroupAttributes": [ - { - "input": { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - }, - "output": { - "Attributes": [ - { - "Key": "stickiness.enabled", - "Value": "false" - }, - { - "Key": "deregistration_delay.timeout_seconds", - "Value": "300" - }, - { - "Key": "stickiness.type", - "Value": "lb_cookie" - }, - { - "Key": "stickiness.lb_cookie.duration_seconds", - "Value": "86400" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the attributes of the specified target group.", - "id": "elbv2-describe-target-group-attributes-1", - "title": "To describe target group attributes" - } - ], - "DescribeTargetGroups": [ - { - "input": { - "TargetGroupArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - ] - }, - "output": { - "TargetGroups": [ - { - "HealthCheckIntervalSeconds": 30, - "HealthCheckPath": "/", - "HealthCheckPort": "traffic-port", - "HealthCheckProtocol": "HTTP", - "HealthCheckTimeoutSeconds": 5, - "HealthyThresholdCount": 5, - "LoadBalancerArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - ], - "Matcher": { - "HttpCode": "200" - }, - "Port": 80, - "Protocol": "HTTP", - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "TargetGroupName": "my-targets", - "UnhealthyThresholdCount": 2, - "VpcId": "vpc-3ac0fb5f" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the specified target group.", - "id": "elbv2-describe-target-groups-1", - "title": "To describe a target group" - } - ], - "DescribeTargetHealth": [ - { - "input": { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - }, - "output": { - "TargetHealthDescriptions": [ - { - "Target": { - "Id": "i-0f76fade", - "Port": 80 - }, - "TargetHealth": { - "Description": "Given target group is not configured to receive traffic from ELB", - "Reason": "Target.NotInUse", - "State": "unused" - } - }, - { - "HealthCheckPort": "80", - "Target": { - "Id": "i-0f76fade", - "Port": 80 - }, - "TargetHealth": { - "State": "healthy" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the health of the targets for the specified target group. One target is healthy but the other is not specified in an action, so it can't receive traffic from the load balancer.", - "id": "elbv2-describe-target-health-1", - "title": "To describe the health of the targets for a target group" - }, - { - "input": { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Targets": [ - { - "Id": "i-0f76fade", - "Port": 80 - } - ] - }, - "output": { - "TargetHealthDescriptions": [ - { - "HealthCheckPort": "80", - "Target": { - "Id": "i-0f76fade", - "Port": 80 - }, - "TargetHealth": { - "State": "healthy" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example describes the health of the specified target. This target is healthy.", - "id": "elbv2-describe-target-health-2", - "title": "To describe the health of a target" - } - ], - "ModifyListener": [ - { - "input": { - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-new-targets/2453ed029918f21f", - "Type": "forward" - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2" - }, - "output": { - "Listeners": [ - { - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-new-targets/2453ed029918f21f", - "Type": "forward" - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Port": 80, - "Protocol": "HTTP" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example changes the default action for the specified listener.", - "id": "elbv2-modify-listener-1", - "title": "To change the default action for a listener" - }, - { - "input": { - "Certificates": [ - { - "CertificateArn": "arn:aws:iam::123456789012:server-certificate/my-new-server-cert" - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/0467ef3c8400ae65" - }, - "output": { - "Listeners": [ - { - "Certificates": [ - { - "CertificateArn": "arn:aws:iam::123456789012:server-certificate/my-new-server-cert" - } - ], - "DefaultActions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "ListenerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/my-load-balancer/50dc6c495c0c9188/0467ef3c8400ae65", - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Port": 443, - "Protocol": "HTTPS", - "SslPolicy": "ELBSecurityPolicy-2015-05" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example changes the server certificate for the specified HTTPS listener.", - "id": "elbv2-modify-listener-2", - "title": "To change the server certificate" - } - ], - "ModifyLoadBalancerAttributes": [ - { - "input": { - "Attributes": [ - { - "Key": "deletion_protection.enabled", - "Value": "true" - } - ], - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - }, - "output": { - "Attributes": [ - { - "Key": "deletion_protection.enabled", - "Value": "true" - }, - { - "Key": "access_logs.s3.enabled", - "Value": "false" - }, - { - "Key": "idle_timeout.timeout_seconds", - "Value": "60" - }, - { - "Key": "access_logs.s3.prefix", - "Value": "" - }, - { - "Key": "access_logs.s3.bucket", - "Value": "" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables deletion protection for the specified load balancer.", - "id": "elbv2-modify-load-balancer-attributes-1", - "title": "To enable deletion protection" - }, - { - "input": { - "Attributes": [ - { - "Key": "idle_timeout.timeout_seconds", - "Value": "30" - } - ], - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - }, - "output": { - "Attributes": [ - { - "Key": "idle_timeout.timeout_seconds", - "Value": "30" - }, - { - "Key": "access_logs.s3.enabled", - "Value": "false" - }, - { - "Key": "access_logs.s3.prefix", - "Value": "" - }, - { - "Key": "deletion_protection.enabled", - "Value": "true" - }, - { - "Key": "access_logs.s3.bucket", - "Value": "" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example changes the idle timeout value for the specified load balancer.", - "id": "elbv2-modify-load-balancer-attributes-2", - "title": "To change the idle timeout" - }, - { - "input": { - "Attributes": [ - { - "Key": "access_logs.s3.enabled", - "Value": "true" - }, - { - "Key": "access_logs.s3.bucket", - "Value": "my-loadbalancer-logs" - }, - { - "Key": "access_logs.s3.prefix", - "Value": "myapp" - } - ], - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - }, - "output": { - "Attributes": [ - { - "Key": "access_logs.s3.enabled", - "Value": "true" - }, - { - "Key": "access_logs.s3.bucket", - "Value": "my-load-balancer-logs" - }, - { - "Key": "access_logs.s3.prefix", - "Value": "myapp" - }, - { - "Key": "idle_timeout.timeout_seconds", - "Value": "60" - }, - { - "Key": "deletion_protection.enabled", - "Value": "false" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables access logs for the specified load balancer. Note that the S3 bucket must exist in the same region as the load balancer and must have a policy attached that grants access to the Elastic Load Balancing service.", - "id": "elbv2-modify-load-balancer-attributes-3", - "title": "To enable access logs" - } - ], - "ModifyRule": [ - { - "input": { - "Conditions": [ - { - "Field": "path-pattern", - "Values": [ - "/images/*" - ] - } - ], - "RuleArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/9683b2d02a6cabee" - }, - "output": { - "Rules": [ - { - "Actions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "Conditions": [ - { - "Field": "path-pattern", - "Values": [ - "/images/*" - ] - } - ], - "IsDefault": false, - "Priority": "10", - "RuleArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/9683b2d02a6cabee" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example modifies the condition for the specified rule.", - "id": "elbv2-modify-rule-1", - "title": "To modify a rule" - } - ], - "ModifyTargetGroup": [ - { - "input": { - "HealthCheckPort": "443", - "HealthCheckProtocol": "HTTPS", - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-https-targets/2453ed029918f21f" - }, - "output": { - "TargetGroups": [ - { - "HealthCheckIntervalSeconds": 30, - "HealthCheckPort": "443", - "HealthCheckProtocol": "HTTPS", - "HealthCheckTimeoutSeconds": 5, - "HealthyThresholdCount": 5, - "LoadBalancerArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - ], - "Matcher": { - "HttpCode": "200" - }, - "Port": 443, - "Protocol": "HTTPS", - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-https-targets/2453ed029918f21f", - "TargetGroupName": "my-https-targets", - "UnhealthyThresholdCount": 2, - "VpcId": "vpc-3ac0fb5f" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example changes the configuration of the health checks used to evaluate the health of the targets for the specified target group.", - "id": "elbv2-modify-target-group-1", - "title": "To modify the health check configuration for a target group" - } - ], - "ModifyTargetGroupAttributes": [ - { - "input": { - "Attributes": [ - { - "Key": "deregistration_delay.timeout_seconds", - "Value": "600" - } - ], - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" - }, - "output": { - "Attributes": [ - { - "Key": "stickiness.enabled", - "Value": "false" - }, - { - "Key": "deregistration_delay.timeout_seconds", - "Value": "600" - }, - { - "Key": "stickiness.type", - "Value": "lb_cookie" - }, - { - "Key": "stickiness.lb_cookie.duration_seconds", - "Value": "86400" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example sets the deregistration delay timeout to the specified value for the specified target group.", - "id": "elbv2-modify-target-group-attributes-1", - "title": "To modify the deregistration delay timeout" - } - ], - "RegisterTargets": [ - { - "input": { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Targets": [ - { - "Id": "i-80c8dd94" - }, - { - "Id": "i-ceddcd4d" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example registers the specified instances with the specified target group.", - "id": "elbv2-register-targets-1", - "title": "To register targets with a target group" - }, - { - "input": { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-new-targets/3bb63f11dfb0faf9", - "Targets": [ - { - "Id": "i-80c8dd94", - "Port": 80 - }, - { - "Id": "i-80c8dd94", - "Port": 766 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example registers the specified instance with the specified target group using multiple ports. This enables you to register ECS containers on the same instance as targets in the target group.", - "id": "elbv2-register-targets-2", - "title": "To register targets with a target group using port overrides" - } - ], - "RemoveTags": [ - { - "input": { - "ResourceArns": [ - "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188" - ], - "TagKeys": [ - "project", - "department" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example removes the specified tags from the specified load balancer.", - "id": "elbv2-remove-tags-1", - "title": "To remove tags from a load balancer" - } - ], - "SetRulePriorities": [ - { - "input": { - "RulePriorities": [ - { - "Priority": 5, - "RuleArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/1291d13826f405c3" - } - ] - }, - "output": { - "Rules": [ - { - "Actions": [ - { - "TargetGroupArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", - "Type": "forward" - } - ], - "Conditions": [ - { - "Field": "path-pattern", - "Values": [ - "/img/*" - ] - } - ], - "IsDefault": false, - "Priority": "5", - "RuleArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:listener-rule/app/my-load-balancer/50dc6c495c0c9188/f2f7dc8efc522ab2/1291d13826f405c3" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example sets the priority of the specified rule.", - "id": "elbv2-set-rule-priorities-1", - "title": "To set the rule priority" - } - ], - "SetSecurityGroups": [ - { - "input": { - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "SecurityGroups": [ - "sg-5943793c" - ] - }, - "output": { - "SecurityGroupIds": [ - "sg-5943793c" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example associates the specified security group with the specified load balancer.", - "id": "elbv2-set-security-groups-1", - "title": "To associate a security group with a load balancer" - } - ], - "SetSubnets": [ - { - "input": { - "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188", - "Subnets": [ - "subnet-8360a9e7", - "subnet-b7d581c0" - ] - }, - "output": { - "AvailabilityZones": [ - { - "SubnetId": "subnet-8360a9e7", - "ZoneName": "us-west-2a" - }, - { - "SubnetId": "subnet-b7d581c0", - "ZoneName": "us-west-2b" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example enables the Availability Zones for the specified subnets for the specified load balancer.", - "id": "elbv2-set-subnets-1", - "title": "To enable Availability Zones for a load balancer" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/paginators-1.json deleted file mode 100644 index a8e8468..0000000 --- a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "DescribeLoadBalancers": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "PageSize", - "result_key": "LoadBalancers" - }, - "DescribeTargetGroups": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "PageSize", - "result_key": "TargetGroups" - }, - "DescribeListeners": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "PageSize", - "result_key": "Listeners" - }, - "DescribeAccountLimits": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "Limits" - }, - "DescribeListenerCertificates": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "Certificates" - }, - "DescribeRules": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "Rules" - }, - "DescribeSSLPolicies": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "SslPolicies" - }, - "DescribeTrustStoreAssociations": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "TrustStoreAssociations" - }, - "DescribeTrustStoreRevocations": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "TrustStoreRevocations" - }, - "DescribeTrustStores": { - "input_token": "Marker", - "limit_key": "PageSize", - "output_token": "NextMarker", - "result_key": "TrustStores" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/service-2.json.gz deleted file mode 100644 index 3ba14c9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/waiters-2.json deleted file mode 100644 index 9f3d77d..0000000 --- a/venv/Lib/site-packages/botocore/data/elbv2/2015-12-01/waiters-2.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "version": 2, - "waiters": { - "LoadBalancerExists": { - "delay": 15, - "operation": "DescribeLoadBalancers", - "maxAttempts": 40, - "acceptors": [ - { - "matcher": "status", - "expected": 200, - "state": "success" - }, - { - "matcher": "error", - "expected": "LoadBalancerNotFound", - "state": "retry" - } - ] - }, - "LoadBalancerAvailable": { - "delay": 15, - "operation": "DescribeLoadBalancers", - "maxAttempts": 40, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "LoadBalancers[].State.Code", - "expected": "active" - }, - { - "state": "retry", - "matcher": "pathAny", - "argument": "LoadBalancers[].State.Code", - "expected": "provisioning" - }, - { - "state": "retry", - "matcher": "error", - "expected": "LoadBalancerNotFound" - } - ] - }, - "LoadBalancersDeleted": { - "delay": 15, - "operation": "DescribeLoadBalancers", - "maxAttempts": 40, - "acceptors": [ - { - "state": "retry", - "matcher": "pathAll", - "argument": "LoadBalancers[].State.Code", - "expected": "active" - }, - { - "matcher": "error", - "expected": "LoadBalancerNotFound", - "state": "success" - } - ] - }, - "TargetInService":{ - "delay":15, - "maxAttempts":40, - "operation":"DescribeTargetHealth", - "acceptors":[ - { - "argument":"TargetHealthDescriptions[].TargetHealth.State", - "expected":"healthy", - "matcher":"pathAll", - "state":"success" - }, - { - "matcher": "error", - "expected": "InvalidInstance", - "state": "retry" - } - ] - }, - "TargetDeregistered": { - "delay": 15, - "maxAttempts": 40, - "operation": "DescribeTargetHealth", - "acceptors": [ - { - "matcher": "error", - "expected": "InvalidTarget", - "state": "success" - }, - { - "argument":"TargetHealthDescriptions[].TargetHealth.State", - "expected":"unused", - "matcher":"pathAll", - "state":"success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2f324d2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/examples-1.json b/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/paginators-1.json deleted file mode 100644 index c21db2d..0000000 --- a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListJobRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobRuns" - }, - "ListManagedEndpoints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "endpoints" - }, - "ListVirtualClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "virtualClusters" - }, - "ListJobTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templates" - }, - "ListSecurityConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "securityConfigurations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/service-2.json.gz deleted file mode 100644 index dd32a25..0000000 Binary files a/venv/Lib/site-packages/botocore/data/emr-containers/2020-10-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index 659c093..0000000 Binary files a/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/paginators-1.json deleted file mode 100644 index aa3966b..0000000 --- a/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "applications" - }, - "ListJobRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobRuns" - }, - "ListJobRunAttempts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobRunAttempts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/service-2.json.gz deleted file mode 100644 index 450f107..0000000 Binary files a/venv/Lib/site-packages/botocore/data/emr-serverless/2021-07-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/emr/2009-03-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 76e04e8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/examples-1.json b/venv/Lib/site-packages/botocore/data/emr/2009-03-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/emr/2009-03-31/paginators-1.json deleted file mode 100644 index 447759f..0000000 --- a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/paginators-1.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "pagination": { - "ListBootstrapActions": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "BootstrapActions" - }, - "ListClusters": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "Clusters" - }, - "ListInstanceGroups": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "InstanceGroups" - }, - "ListInstances": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "Instances" - }, - "ListSteps": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "Steps" - }, - "ListInstanceFleets": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "InstanceFleets" - }, - "ListSecurityConfigurations": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "SecurityConfigurations" - }, - "ListNotebookExecutions": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "NotebookExecutions" - }, - "ListStudioSessionMappings": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "SessionMappings" - }, - "ListStudios": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "Studios" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/emr/2009-03-31/service-2.json.gz deleted file mode 100644 index 458a275..0000000 Binary files a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/emr/2009-03-31/waiters-2.json deleted file mode 100644 index abba8c3..0000000 --- a/venv/Lib/site-packages/botocore/data/emr/2009-03-31/waiters-2.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "version": 2, - "waiters": { - "ClusterRunning": { - "delay": 30, - "operation": "DescribeCluster", - "maxAttempts": 60, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "Cluster.Status.State", - "expected": "RUNNING" - }, - { - "state": "success", - "matcher": "path", - "argument": "Cluster.Status.State", - "expected": "WAITING" - }, - { - "state": "failure", - "matcher": "path", - "argument": "Cluster.Status.State", - "expected": "TERMINATING" - }, - { - "state": "failure", - "matcher": "path", - "argument": "Cluster.Status.State", - "expected": "TERMINATED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "Cluster.Status.State", - "expected": "TERMINATED_WITH_ERRORS" - } - ] - }, - "StepComplete": { - "delay": 30, - "operation": "DescribeStep", - "maxAttempts": 60, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "Step.Status.State", - "expected": "COMPLETED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "Step.Status.State", - "expected": "FAILED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "Step.Status.State", - "expected": "CANCELLED" - } - ] - }, - "ClusterTerminated": { - "delay": 30, - "operation": "DescribeCluster", - "maxAttempts": 60, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "Cluster.Status.State", - "expected": "TERMINATED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "Cluster.Status.State", - "expected": "TERMINATED_WITH_ERRORS" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/endpoints.json b/venv/Lib/site-packages/botocore/data/endpoints.json deleted file mode 100644 index 819db4f..0000000 --- a/venv/Lib/site-packages/botocore/data/endpoints.json +++ /dev/null @@ -1,40716 +0,0 @@ -{ - "partitions" : [ { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "{service}.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "dnsSuffix" : "amazonaws.com", - "partition" : "aws", - "partitionName" : "AWS Standard", - "regionRegex" : "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", - "regions" : { - "af-south-1" : { - "description" : "Africa (Cape Town)" - }, - "ap-east-1" : { - "description" : "Asia Pacific (Hong Kong)" - }, - "ap-east-2" : { - "description" : "Asia Pacific (Taipei)" - }, - "ap-northeast-1" : { - "description" : "Asia Pacific (Tokyo)" - }, - "ap-northeast-2" : { - "description" : "Asia Pacific (Seoul)" - }, - "ap-northeast-3" : { - "description" : "Asia Pacific (Osaka)" - }, - "ap-south-1" : { - "description" : "Asia Pacific (Mumbai)" - }, - "ap-south-2" : { - "description" : "Asia Pacific (Hyderabad)" - }, - "ap-southeast-1" : { - "description" : "Asia Pacific (Singapore)" - }, - "ap-southeast-2" : { - "description" : "Asia Pacific (Sydney)" - }, - "ap-southeast-3" : { - "description" : "Asia Pacific (Jakarta)" - }, - "ap-southeast-4" : { - "description" : "Asia Pacific (Melbourne)" - }, - "ap-southeast-5" : { - "description" : "Asia Pacific (Malaysia)" - }, - "ap-southeast-6" : { - "description" : "Asia Pacific (New Zealand)" - }, - "ap-southeast-7" : { - "description" : "Asia Pacific (Thailand)" - }, - "ca-central-1" : { - "description" : "Canada (Central)" - }, - "ca-west-1" : { - "description" : "Canada West (Calgary)" - }, - "eu-central-1" : { - "description" : "Europe (Frankfurt)" - }, - "eu-central-2" : { - "description" : "Europe (Zurich)" - }, - "eu-north-1" : { - "description" : "Europe (Stockholm)" - }, - "eu-south-1" : { - "description" : "Europe (Milan)" - }, - "eu-south-2" : { - "description" : "Europe (Spain)" - }, - "eu-west-1" : { - "description" : "Europe (Ireland)" - }, - "eu-west-2" : { - "description" : "Europe (London)" - }, - "eu-west-3" : { - "description" : "Europe (Paris)" - }, - "il-central-1" : { - "description" : "Israel (Tel Aviv)" - }, - "me-central-1" : { - "description" : "Middle East (UAE)" - }, - "me-south-1" : { - "description" : "Middle East (Bahrain)" - }, - "mx-central-1" : { - "description" : "Mexico (Central)" - }, - "sa-east-1" : { - "description" : "South America (Sao Paulo)" - }, - "us-east-1" : { - "description" : "US East (N. Virginia)" - }, - "us-east-2" : { - "description" : "US East (Ohio)" - }, - "us-west-1" : { - "description" : "US West (N. California)" - }, - "us-west-2" : { - "description" : "US West (Oregon)" - } - }, - "services" : { - "access-analyzer" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "access-analyzer.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "access-analyzer.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "access-analyzer-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "access-analyzer.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "access-analyzer-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "access-analyzer.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "access-analyzer-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "access-analyzer-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "access-analyzer-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "access-analyzer-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "access-analyzer-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "access-analyzer-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "access-analyzer.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "access-analyzer.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "access-analyzer.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "access-analyzer.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "access-analyzer.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "access-analyzer-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "access-analyzer.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "access-analyzer-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "access-analyzer.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "access-analyzer-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "access-analyzer.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "access-analyzer-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "access-analyzer.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "account" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "account.us-east-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "acm" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "acm-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "acm-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "acm-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "acm-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "acm-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "acm-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "acm-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "acm-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "acm-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "acm-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "acm-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "acm-fips.us-west-2.amazonaws.com" - } - } - }, - "acm-pca" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "acm-pca-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "acm-pca-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "acm-pca-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "acm-pca-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "acm-pca-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "acm-pca-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "acm-pca-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "acm-pca-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "acm-pca-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "acm-pca-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "acm-pca-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "acm-pca-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "agreement-marketplace" : { - "endpoints" : { - "us-east-1" : { - "variants" : [ { - "hostname" : "agreement-marketplace.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "airflow" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "amplify" : { - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "amplifybackend" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "amplifyuibuilder" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "aoss" : { - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "api.detective" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "detective.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "detective.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "detective.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "detective.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "detective.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "detective.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "detective.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "api.detective-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "detective-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "detective.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "api.detective-fips.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "detective.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "detective.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "detective.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "detective.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "detective.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "detective.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "detective.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "detective.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "detective.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "api.detective-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "detective-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "detective.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "api.detective-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "api.detective-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "detective-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "detective.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "api.detective-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "api.detective-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "detective-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "detective.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "api.detective-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "api.detective-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "detective-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "detective.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "api.detective-fips.us-west-2.amazonaws.com" - } - } - }, - "api.ecr" : { - "defaults" : { - "variants" : [ { - "hostname" : "ecr-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "api.ecr.af-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "api.ecr.ap-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "api.ecr.ap-northeast-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "api.ecr.ap-northeast-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "api.ecr.ap-northeast-3.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "api.ecr.ap-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "api.ecr.ap-south-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "api.ecr.ap-southeast-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "api.ecr.ap-southeast-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "hostname" : "api.ecr.ap-southeast-3.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "hostname" : "api.ecr.ap-southeast-4.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "credentialScope" : { - "region" : "ap-southeast-5" - }, - "hostname" : "api.ecr.ap-southeast-5.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "credentialScope" : { - "region" : "ap-southeast-7" - }, - "hostname" : "api.ecr.ap-southeast-7.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "api.ecr.ca-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "hostname" : "api.ecr.ca-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "dkr-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ecr-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "dkr-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ecr-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "dkr-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ecr-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "dkr-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ecr-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "api.ecr.eu-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "api.ecr.eu-central-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "api.ecr.eu-north-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "api.ecr.eu-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "api.ecr.eu-south-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "api.ecr.eu-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "api.ecr.eu-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "api.ecr.eu-west-3.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-dkr-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-east-1.amazonaws.com" - }, - "fips-dkr-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-east-2.amazonaws.com" - }, - "fips-dkr-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-west-1.amazonaws.com" - }, - "fips-dkr-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-west-2.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "api.ecr.il-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "hostname" : "api.ecr.me-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "api.ecr.me-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "credentialScope" : { - "region" : "mx-central-1" - }, - "hostname" : "api.ecr.mx-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "api.ecr.sa-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "api.ecr.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ecr-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ecr.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "api.ecr.us-east-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ecr-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ecr.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "api.ecr.us-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ecr-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ecr.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "api.ecr.us-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "ecr-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ecr-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ecr.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "api.ecr-public" : { - "endpoints" : { - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "api.ecr-public.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr-public.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "api.ecr-public.us-west-2.amazonaws.com" - } - } - }, - "api.iotdeviceadvisor" : { - "endpoints" : { - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "api.iotdeviceadvisor.ap-northeast-1.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "api.iotdeviceadvisor.eu-west-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "api.iotdeviceadvisor.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "api.iotdeviceadvisor.us-west-2.amazonaws.com" - } - } - }, - "api.iotwireless" : { - "endpoints" : { - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "api.iotwireless.ap-northeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "api.iotwireless.ap-southeast-2.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "api.iotwireless.eu-central-1.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "api.iotwireless.eu-west-1.amazonaws.com" - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "api.iotwireless.sa-east-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "api.iotwireless.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "api.iotwireless.us-west-2.amazonaws.com" - } - } - }, - "api.mediatailor" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-3" : { }, - "me-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "api.pricing" : { - "defaults" : { - "credentialScope" : { - "service" : "pricing" - } - }, - "endpoints" : { - "ap-south-1" : { }, - "eu-central-1" : { }, - "us-east-1" : { } - } - }, - "api.sagemaker" : { - "defaults" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.us-west-2.amazonaws.com" - } - } - }, - "api.tunneling.iot" : { - "defaults" : { - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "api.iot-tunneling-fips.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "api.iot-tunneling.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "ap-east-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "api.iot-tunneling.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "api.iot-tunneling.us-east-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "api.iot-tunneling-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "api.iot-tunneling.us-east-2.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "api.iot-tunneling.us-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "api.iot-tunneling-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "api.iot-tunneling.us-west-2.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "apigateway" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "apigateway-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "apigateway-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "apigateway-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "apigateway-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "apigateway-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "apigateway-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "apigateway-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "apigateway-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "apigateway-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "apigateway-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "apigateway-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "apigateway-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "app-integrations" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "appconfig" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "appconfigdata" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "appflow" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "appflow-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "appflow-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "appflow-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "appflow-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "appflow-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "appflow-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "appflow-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "appflow-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "application-autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "applicationinsights" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "applicationinsights.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "applicationinsights.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "applicationinsights.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "applicationinsights.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "applicationinsights.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "applicationinsights.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "applicationinsights.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "appmesh" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "appmesh.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "appmesh.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "appmesh.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "appmesh.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "appmesh.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "appmesh.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "appmesh.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "appmesh.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "appmesh.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "appmesh-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "appmesh-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "appmesh.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "appmesh-fips.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "appmesh.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "appmesh.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "appmesh.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "appmesh.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "appmesh.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "appmesh.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "appmesh.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "appmesh.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "appmesh.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "appmesh.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "appmesh.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "appmesh-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "appmesh-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "appmesh.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "appmesh-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "appmesh-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "appmesh-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "appmesh.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "appmesh-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "appmesh-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "appmesh-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "appmesh.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "appmesh-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "appmesh-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "appmesh-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "appmesh.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "appmesh-fips.us-west-2.amazonaws.com" - } - } - }, - "apprunner" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "apprunner-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "apprunner-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "apprunner-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "apprunner-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "apprunner-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "apprunner-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "appstream2" : { - "defaults" : { - "credentialScope" : { - "service" : "appstream" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "appstream2-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "appstream2-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "appstream2-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { }, - "us-west-2" : { - "variants" : [ { - "hostname" : "appstream2-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "appstream2-fips.us-west-2.amazonaws.com" - } - } - }, - "appsync" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "appsync.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "appsync.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "appsync.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "appsync.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "appsync.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "appsync.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "appsync.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "appsync.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "appsync.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "appsync.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "appsync.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "appsync.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "appsync.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "appsync.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "appsync.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "appsync.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "appsync.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "appsync.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "appsync.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "appsync.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "appsync.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "appsync.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "appsync.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "appsync.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "appsync.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "appsync.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "appsync.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "appsync.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "aps" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { }, - "ap-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { }, - "ap-southeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { }, - "eu-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "deprecated" : true - }, - "us-east-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "deprecated" : true - }, - "us-west-1" : { }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "deprecated" : true - } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "athena" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "athena.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "athena.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "athena.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "athena.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "athena.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "athena.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "athena.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "athena.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "athena.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "athena.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "athena.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "athena.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "athena-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "athena-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "athena.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "athena.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "athena.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "athena.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "athena.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "athena.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "athena.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "athena.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "athena-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "athena-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "athena-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "athena-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "athena-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "athena-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "athena.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "athena.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "athena.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "athena.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "athena-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "athena-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "athena-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "athena-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "auditmanager" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "auditmanager-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "auditmanager-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "auditmanager-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "auditmanager-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "auditmanager-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "auditmanager-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "auditmanager-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "auditmanager-fips.us-west-2.amazonaws.com" - } - } - }, - "autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "autoscaling-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "autoscaling-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "autoscaling-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "autoscaling-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "autoscaling-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "autoscaling-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "autoscaling-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "autoscaling-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "autoscaling-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "autoscaling-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "autoscaling-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "autoscaling-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "autoscaling-plans" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "backup" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "backup-gateway" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "batch" : { - "defaults" : { - "variants" : [ { - "hostname" : "fips.batch.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "fips.batch.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "fips.batch.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "fips.batch.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "fips.batch.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "fips.batch.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "fips.batch.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "fips.batch.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "fips.batch.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "bedrock" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "bedrock-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "bedrock.ap-northeast-1.amazonaws.com" - }, - "bedrock-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "bedrock.ap-northeast-2.amazonaws.com" - }, - "bedrock-ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "bedrock.ap-northeast-3.amazonaws.com" - }, - "bedrock-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "bedrock.ap-south-1.amazonaws.com" - }, - "bedrock-ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "bedrock.ap-south-2.amazonaws.com" - }, - "bedrock-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "bedrock.ap-southeast-1.amazonaws.com" - }, - "bedrock-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "bedrock.ap-southeast-2.amazonaws.com" - }, - "bedrock-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "bedrock.ca-central-1.amazonaws.com" - }, - "bedrock-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "bedrock.eu-central-1.amazonaws.com" - }, - "bedrock-eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "bedrock.eu-central-2.amazonaws.com" - }, - "bedrock-eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "bedrock.eu-north-1.amazonaws.com" - }, - "bedrock-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "bedrock.eu-south-1.amazonaws.com" - }, - "bedrock-eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "bedrock.eu-south-2.amazonaws.com" - }, - "bedrock-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "bedrock.eu-west-1.amazonaws.com" - }, - "bedrock-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "bedrock.eu-west-2.amazonaws.com" - }, - "bedrock-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "bedrock.eu-west-3.amazonaws.com" - }, - "bedrock-fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "bedrock-fips.ca-central-1.amazonaws.com" - }, - "bedrock-fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "bedrock-fips.us-east-1.amazonaws.com" - }, - "bedrock-fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "bedrock-fips.us-east-2.amazonaws.com" - }, - "bedrock-fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "bedrock-fips.us-west-2.amazonaws.com" - }, - "bedrock-runtime-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "bedrock-runtime.ap-northeast-1.amazonaws.com" - }, - "bedrock-runtime-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "bedrock-runtime.ap-northeast-2.amazonaws.com" - }, - "bedrock-runtime-ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "bedrock-runtime.ap-northeast-3.amazonaws.com" - }, - "bedrock-runtime-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "bedrock-runtime.ap-south-1.amazonaws.com" - }, - "bedrock-runtime-ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "bedrock-runtime.ap-south-2.amazonaws.com" - }, - "bedrock-runtime-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "bedrock-runtime.ap-southeast-1.amazonaws.com" - }, - "bedrock-runtime-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "bedrock-runtime.ap-southeast-2.amazonaws.com" - }, - "bedrock-runtime-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "bedrock-runtime.ca-central-1.amazonaws.com" - }, - "bedrock-runtime-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "bedrock-runtime.eu-central-1.amazonaws.com" - }, - "bedrock-runtime-eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "bedrock-runtime.eu-central-2.amazonaws.com" - }, - "bedrock-runtime-eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "bedrock-runtime.eu-north-1.amazonaws.com" - }, - "bedrock-runtime-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "bedrock-runtime.eu-south-1.amazonaws.com" - }, - "bedrock-runtime-eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "bedrock-runtime.eu-south-2.amazonaws.com" - }, - "bedrock-runtime-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "bedrock-runtime.eu-west-1.amazonaws.com" - }, - "bedrock-runtime-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "bedrock-runtime.eu-west-2.amazonaws.com" - }, - "bedrock-runtime-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "bedrock-runtime.eu-west-3.amazonaws.com" - }, - "bedrock-runtime-fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "bedrock-runtime-fips.ca-central-1.amazonaws.com" - }, - "bedrock-runtime-fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "bedrock-runtime-fips.us-east-1.amazonaws.com" - }, - "bedrock-runtime-fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "bedrock-runtime-fips.us-east-2.amazonaws.com" - }, - "bedrock-runtime-fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "bedrock-runtime-fips.us-west-2.amazonaws.com" - }, - "bedrock-runtime-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "bedrock-runtime.sa-east-1.amazonaws.com" - }, - "bedrock-runtime-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "bedrock-runtime.us-east-1.amazonaws.com" - }, - "bedrock-runtime-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "bedrock-runtime.us-east-2.amazonaws.com" - }, - "bedrock-runtime-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "bedrock-runtime.us-west-2.amazonaws.com" - }, - "bedrock-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "bedrock.sa-east-1.amazonaws.com" - }, - "bedrock-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "bedrock.us-east-1.amazonaws.com" - }, - "bedrock-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "bedrock.us-east-2.amazonaws.com" - }, - "bedrock-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "bedrock.us-west-2.amazonaws.com" - }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "billingconductor" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "billingconductor.us-east-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "braket" : { - "endpoints" : { - "eu-north-1" : { - "variants" : [ { - "hostname" : "braket.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "braket.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "braket.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "braket.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "braket.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "budgets" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "budgets.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "cases" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "fips-us-east-1" : { - "deprecated" : true - }, - "fips-us-west-2" : { - "deprecated" : true - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - } - } - }, - "cassandra" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cassandra-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cassandra-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cassandra-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cassandra-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "catalog.marketplace" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "ce" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "ce.us-east-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "chime" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "chime.us-east-1.amazonaws.com", - "protocols" : [ "https" ] - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "cleanrooms" : { - "endpoints" : { - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "cleanrooms.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "cleanrooms.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "cleanrooms.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "cleanrooms.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "cleanrooms.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "cleanrooms.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "cleanrooms.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "cleanrooms.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cleanrooms-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "cleanrooms-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cleanrooms-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cleanrooms-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cleanrooms-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cleanrooms.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "cleanrooms-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cleanrooms-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cleanrooms.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cleanrooms-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cleanrooms-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cleanrooms.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "cloud9" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "cloud9-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloud9-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "cloud9-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cloud9-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "cloud9-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "cloud9-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cloud9-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "cloud9-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloud9-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "cloud9-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloud9-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "cloud9-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloud9-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "cloud9-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloud9-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - } - } - }, - "cloudcontrolapi" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "clouddirectory" : { - "endpoints" : { - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "cloudformation" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cloudformation-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cloudformation-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "cloudformation-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "cloudformation-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "cloudformation-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "cloudformation-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cloudformation-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cloudformation-fips.us-west-2.amazonaws.com" - } - } - }, - "cloudfront" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "cloudfront.amazonaws.com", - "protocols" : [ "http", "https" ] - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "cloudhsm" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "cloudhsmv2" : { - "defaults" : { - "credentialScope" : { - "service" : "cloudhsm" - } - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "cloudhsmv2.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "cloudhsmv2.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "cloudhsmv2.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "cloudhsmv2.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cloudhsmv2.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "cloudsearch" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "cloudtrail" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cloudtrail-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "cloudtrail-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "cloudtrail-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cloudtrail-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cloudtrail-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "cloudtrail-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "cloudtrail-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cloudtrail-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "cloudtrail-data" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "codeartifact" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "codebuild" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "codebuild-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "codebuild-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "codebuild-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "codebuild-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "codebuild-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "codebuild-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "codebuild-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "codebuild-fips.us-west-2.amazonaws.com" - } - } - }, - "codecatalyst" : { - "endpoints" : { - "aws-global" : { - "hostname" : "codecatalyst.global.api.aws" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "codecommit" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "codecommit-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.ca-central-1.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "codecommit-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "codecommit-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "codecommit-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "codecommit-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.us-west-2.amazonaws.com" - } - } - }, - "codedeploy" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "codedeploy-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "codedeploy-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "codedeploy-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "codedeploy-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "codedeploy-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "codedeploy-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "codedeploy-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "codedeploy-fips.us-west-2.amazonaws.com" - } - } - }, - "codeguru-profiler" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "codeguru-reviewer" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "codepipeline" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "codepipeline-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "codepipeline-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "codepipeline-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "codepipeline-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "codepipeline-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "codepipeline-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "codepipeline-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "codepipeline-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "codepipeline-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "codepipeline-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "codestar-connections" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "codestar-notifications" : { - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "cognito-identity" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "cognito-identity.af-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-northeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-northeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-northeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-southeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-southeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-southeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-southeast-4.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "cognito-identity.ap-southeast-5.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "cognito-identity.ca-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "cognito-identity.ca-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-central-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-north-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "cognito-identity.eu-west-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cognito-identity-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "cognito-identity-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "cognito-identity-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cognito-identity-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "cognito-identity.il-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "cognito-identity.me-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "cognito-identity.me-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "cognito-identity.sa-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cognito-identity-fips.us-east-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-identity-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-identity.us-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "cognito-identity-fips.us-east-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-identity-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-identity.us-east-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "cognito-identity-fips.us-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-identity-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-identity.us-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cognito-identity-fips.us-west-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-identity-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-identity.us-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "cognito-idp" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "cognito-idp.af-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-northeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-northeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-northeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-southeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-southeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-southeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-southeast-4.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "cognito-idp.ap-southeast-5.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "cognito-idp.ca-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "cognito-idp.ca-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-central-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-north-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "cognito-idp.eu-west-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "cognito-idp-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "cognito-idp-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "cognito-idp-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "cognito-idp-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "cognito-idp.il-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "cognito-idp.me-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "cognito-idp.me-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "cognito-idp.sa-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "cognito-idp-fips.us-east-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-idp-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-idp.us-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "cognito-idp-fips.us-east-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-idp-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-idp.us-east-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "cognito-idp-fips.us-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-idp-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-idp.us-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "cognito-idp-fips.us-west-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-idp-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-idp.us-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "cognito-sync" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "comprehend" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "comprehend.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "comprehend.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "comprehend.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "comprehend.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "comprehend.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "comprehend-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "comprehend-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "comprehend.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "comprehend.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "comprehend.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "comprehend.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "comprehend-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "comprehend-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "comprehend-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "comprehend-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "comprehend-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "comprehend-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "comprehend.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "comprehend-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "comprehend-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "comprehend.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "comprehend-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "comprehend-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "comprehend.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "comprehendmedical" : { - "endpoints" : { - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "comprehendmedical-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "comprehendmedical-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "comprehendmedical-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "comprehendmedical-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "comprehendmedical-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "comprehendmedical-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "comprehendmedical-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "comprehendmedical-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "compute-optimizer" : { - "endpoints" : { - "af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "compute-optimizer.af-south-1.amazonaws.com" - }, - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "compute-optimizer.ap-east-1.amazonaws.com" - }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "compute-optimizer.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "compute-optimizer.ap-northeast-2.amazonaws.com" - }, - "ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "compute-optimizer.ap-northeast-3.amazonaws.com" - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "compute-optimizer.ap-south-1.amazonaws.com" - }, - "ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "compute-optimizer.ap-south-2.amazonaws.com" - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "compute-optimizer.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "compute-optimizer.ap-southeast-2.amazonaws.com" - }, - "ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "hostname" : "compute-optimizer.ap-southeast-3.amazonaws.com" - }, - "ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "hostname" : "compute-optimizer.ap-southeast-4.amazonaws.com" - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "compute-optimizer.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "compute-optimizer.eu-central-1.amazonaws.com" - }, - "eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "compute-optimizer.eu-central-2.amazonaws.com" - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "compute-optimizer.eu-north-1.amazonaws.com" - }, - "eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "compute-optimizer.eu-south-1.amazonaws.com" - }, - "eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "compute-optimizer.eu-south-2.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "compute-optimizer.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "compute-optimizer.eu-west-2.amazonaws.com" - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "compute-optimizer.eu-west-3.amazonaws.com" - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "compute-optimizer.il-central-1.amazonaws.com" - }, - "me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "hostname" : "compute-optimizer.me-central-1.amazonaws.com" - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "compute-optimizer.me-south-1.amazonaws.com" - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "compute-optimizer.sa-east-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "compute-optimizer.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "compute-optimizer.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "compute-optimizer.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "compute-optimizer.us-west-2.amazonaws.com" - } - } - }, - "config" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "config-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "config-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "config-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "config-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "config-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "config-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "config-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "config-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "connect" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "connect-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "connect-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "connect-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "connect-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "connect-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "connect-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "connect-campaigns" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "connect-campaigns-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "connect-campaigns-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "connect-campaigns-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "connect-campaigns-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "contact-lens" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "controltower" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "controltower-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "controltower-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "controltower-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "controltower-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "controltower-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "controltower-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "controltower-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "controltower-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "controltower-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "controltower-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "controltower-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "controltower-fips.us-west-2.amazonaws.com" - } - } - }, - "cost-optimization-hub" : { - "endpoints" : { - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "cost-optimization-hub.us-east-1.amazonaws.com" - } - } - }, - "cur" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "data-ats.iot" : { - "defaults" : { - "credentialScope" : { - "service" : "iotdata" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "data.iot" : { - "defaults" : { - "credentialScope" : { - "service" : "iotdata" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-west-2.amazonaws.com" - }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "data.jobs.iot" : { - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "data.jobs.iot-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "data.jobs.iot-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "data.jobs.iot-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "data.jobs.iot-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "data.jobs.iot-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "data.jobs.iot-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "data.jobs.iot-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "data.jobs.iot-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "data.jobs.iot-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "data.jobs.iot-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "data.mediastore" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "databrew" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "databrew-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "databrew-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "databrew-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "databrew-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "databrew-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "databrew-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "databrew-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "databrew-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "dataexchange" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "datapipeline" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-2" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "datasync" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "datasync.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "datasync.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "datasync.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "datasync.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "datasync.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "datasync.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "datasync.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "datasync.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "datasync.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "datasync.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "datasync.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "datasync.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "datasync.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "datasync-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "datasync-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "datasync.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "datasync.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "datasync.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "datasync.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "datasync.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "datasync.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "datasync.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "datasync.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "datasync.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "datasync.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "datasync.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "datasync.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "datasync.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "datasync-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "datasync-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "datasync-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "datasync-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "datazone" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "datazone.ap-northeast-1.api.aws" - }, - "ap-northeast-2" : { - "hostname" : "datazone.ap-northeast-2.api.aws" - }, - "ap-northeast-3" : { - "hostname" : "datazone.ap-northeast-3.api.aws" - }, - "ap-south-1" : { - "hostname" : "datazone.ap-south-1.api.aws" - }, - "ap-south-2" : { - "hostname" : "datazone.ap-south-2.api.aws" - }, - "ap-southeast-1" : { - "hostname" : "datazone.ap-southeast-1.api.aws" - }, - "ap-southeast-2" : { - "hostname" : "datazone.ap-southeast-2.api.aws" - }, - "ap-southeast-3" : { - "hostname" : "datazone.ap-southeast-3.api.aws" - }, - "ap-southeast-4" : { - "hostname" : "datazone.ap-southeast-4.api.aws" - }, - "ap-southeast-5" : { - "hostname" : "datazone.ap-southeast-5.api.aws" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "hostname" : "datazone.ap-southeast-7.api.aws" - }, - "ca-central-1" : { - "hostname" : "datazone.ca-central-1.api.aws", - "variants" : [ { - "hostname" : "datazone-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "hostname" : "datazone.ca-west-1.api.aws" - }, - "eu-central-1" : { - "hostname" : "datazone.eu-central-1.api.aws" - }, - "eu-central-2" : { }, - "eu-north-1" : { - "hostname" : "datazone.eu-north-1.api.aws" - }, - "eu-south-1" : { - "hostname" : "datazone.eu-south-1.api.aws" - }, - "eu-west-1" : { - "hostname" : "datazone.eu-west-1.api.aws" - }, - "eu-west-2" : { - "hostname" : "datazone.eu-west-2.api.aws" - }, - "eu-west-3" : { - "hostname" : "datazone.eu-west-3.api.aws" - }, - "il-central-1" : { - "hostname" : "datazone.il-central-1.api.aws" - }, - "me-central-1" : { - "hostname" : "datazone.me-central-1.api.aws" - }, - "me-south-1" : { - "hostname" : "datazone.me-south-1.api.aws" - }, - "mx-central-1" : { - "hostname" : "datazone.mx-central-1.api.aws" - }, - "sa-east-1" : { - "hostname" : "datazone.sa-east-1.api.aws" - }, - "us-east-1" : { - "hostname" : "datazone.us-east-1.api.aws", - "variants" : [ { - "hostname" : "datazone-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "hostname" : "datazone.us-east-2.api.aws", - "variants" : [ { - "hostname" : "datazone-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "hostname" : "datazone.us-west-1.api.aws" - }, - "us-west-2" : { - "hostname" : "datazone.us-west-2.api.aws", - "variants" : [ { - "hostname" : "datazone-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "dax" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "devicefarm" : { - "endpoints" : { - "us-west-2" : { } - } - }, - "devops-guru" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "devops-guru-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "devops-guru-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "devops-guru-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "devops-guru-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "devops-guru-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "devops-guru-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "devops-guru-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "devops-guru-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "devops-guru-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "devops-guru-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "directconnect" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "directconnect-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "directconnect-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "directconnect-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "directconnect-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "directconnect-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "directconnect-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "discovery" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "dlm" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "dlm.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "dlm.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "dlm.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "dlm.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "dlm.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "dlm.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "dlm.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "dlm.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "dlm.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "dlm.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "dlm.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "dlm.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "dlm.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "dlm-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "dlm-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "dlm.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "dlm.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "dlm.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "dlm.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "dlm.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "dlm.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "dlm.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "dlm.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "dlm.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "dlm.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "dlm.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "dlm.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "dlm.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "dlm-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "dlm-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "dlm-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "dlm-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "dms" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "dms" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "dms-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "dms-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "dms-fips.us-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "dms-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "dms-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "dms-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "dms-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "dms-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "dms-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "dms-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "dms-fips.us-west-2.amazonaws.com" - } - } - }, - "docdb" : { - "endpoints" : { - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "rds.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "rds.ap-northeast-2.amazonaws.com" - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "rds.ap-south-1.amazonaws.com" - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "rds.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "rds.ap-southeast-2.amazonaws.com" - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "rds.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "rds.eu-central-1.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "rds.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "rds.eu-west-2.amazonaws.com" - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "rds.eu-west-3.amazonaws.com" - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "rds.sa-east-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "rds.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "rds.us-east-2.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "rds.us-west-2.amazonaws.com" - } - } - }, - "drs" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "drs-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "drs-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "drs-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "drs-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "drs-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "drs-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "drs-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "drs-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ds" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "ds-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "ds-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ds-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ds-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ds-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ds-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "dynamodb" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "dynamodb-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "dynamodb-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "local" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "localhost:8000", - "protocols" : [ "http" ] - }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "dynamodb-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "dynamodb-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "dynamodb-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "dynamodb-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.us-west-2.amazonaws.com" - } - } - }, - "ebs" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "ebs-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "ebs-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ebs-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "ebs-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ebs-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ebs-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ebs-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ebs-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ebs-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ebs-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ebs-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ebs-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ec2" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "ec2.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "ec2.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "ec2.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "ec2.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "ec2.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "ec2.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "ec2.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "ec2-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ec2.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "ec2-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "ec2.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "ec2.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "ec2.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "ec2.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "ec2.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "ec2.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ec2-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "ec2-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ec2-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ec2-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ec2-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ec2-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { - "variants" : [ { - "hostname" : "ec2.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "ec2.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ec2-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ec2.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ec2-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ec2.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ec2-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ec2.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ec2-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ec2.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "ecs" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ecs-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ecs-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ecs-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ecs-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ecs-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ecs-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ecs-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ecs-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "edge.sagemaker" : { - "endpoints" : { - "ap-northeast-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "eks" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "fips.eks.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "fips.eks.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "fips.eks.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "fips.eks.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "fips.eks.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "fips.eks.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "fips.eks.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "fips.eks.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "fips.eks.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "eks-auth" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "hostname" : "eks-auth.af-south-1.api.aws" - }, - "ap-east-1" : { - "hostname" : "eks-auth.ap-east-1.api.aws" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "eks-auth.ap-northeast-1.api.aws" - }, - "ap-northeast-2" : { - "hostname" : "eks-auth.ap-northeast-2.api.aws" - }, - "ap-northeast-3" : { - "hostname" : "eks-auth.ap-northeast-3.api.aws" - }, - "ap-south-1" : { - "hostname" : "eks-auth.ap-south-1.api.aws" - }, - "ap-south-2" : { - "hostname" : "eks-auth.ap-south-2.api.aws" - }, - "ap-southeast-1" : { - "hostname" : "eks-auth.ap-southeast-1.api.aws" - }, - "ap-southeast-2" : { - "hostname" : "eks-auth.ap-southeast-2.api.aws" - }, - "ap-southeast-3" : { - "hostname" : "eks-auth.ap-southeast-3.api.aws" - }, - "ap-southeast-4" : { - "hostname" : "eks-auth.ap-southeast-4.api.aws" - }, - "ap-southeast-5" : { - "hostname" : "eks-auth.ap-southeast-5.api.aws" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "hostname" : "eks-auth.ap-southeast-7.api.aws" - }, - "ca-central-1" : { - "hostname" : "eks-auth.ca-central-1.api.aws" - }, - "ca-west-1" : { - "hostname" : "eks-auth.ca-west-1.api.aws" - }, - "eu-central-1" : { - "hostname" : "eks-auth.eu-central-1.api.aws" - }, - "eu-central-2" : { - "hostname" : "eks-auth.eu-central-2.api.aws" - }, - "eu-north-1" : { - "hostname" : "eks-auth.eu-north-1.api.aws" - }, - "eu-south-1" : { - "hostname" : "eks-auth.eu-south-1.api.aws" - }, - "eu-south-2" : { - "hostname" : "eks-auth.eu-south-2.api.aws" - }, - "eu-west-1" : { - "hostname" : "eks-auth.eu-west-1.api.aws" - }, - "eu-west-2" : { - "hostname" : "eks-auth.eu-west-2.api.aws" - }, - "eu-west-3" : { - "hostname" : "eks-auth.eu-west-3.api.aws" - }, - "il-central-1" : { - "hostname" : "eks-auth.il-central-1.api.aws" - }, - "me-central-1" : { - "hostname" : "eks-auth.me-central-1.api.aws" - }, - "me-south-1" : { - "hostname" : "eks-auth.me-south-1.api.aws" - }, - "mx-central-1" : { - "hostname" : "eks-auth.mx-central-1.api.aws" - }, - "sa-east-1" : { - "hostname" : "eks-auth.sa-east-1.api.aws" - }, - "us-east-1" : { - "hostname" : "eks-auth.us-east-1.api.aws" - }, - "us-east-2" : { - "hostname" : "eks-auth.us-east-2.api.aws" - }, - "us-west-1" : { - "hostname" : "eks-auth.us-west-1.api.aws" - }, - "us-west-2" : { - "hostname" : "eks-auth.us-west-2.api.aws" - } - } - }, - "elasticache" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "elasticache-fips.us-west-1.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "elasticache-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "elasticache-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "elasticache-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "elasticache-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "elasticache-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "elasticache-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "elasticache-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "elasticache-fips.us-west-2.amazonaws.com" - } - } - }, - "elasticbeanstalk" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "elasticbeanstalk-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "elasticbeanstalk-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "elasticbeanstalk-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "elasticbeanstalk-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { }, - "me-south-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticbeanstalk-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "elasticbeanstalk.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "elasticbeanstalk-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticbeanstalk-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "elasticbeanstalk.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticbeanstalk-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "elasticbeanstalk.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "elasticbeanstalk-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticbeanstalk-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "elasticbeanstalk.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.af-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-northeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-northeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-northeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-southeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-southeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-southeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-southeast-4.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-southeast-5.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ap-southeast-7.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-central-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-north-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-west-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.af-south-1.amazonaws.com" - }, - "fips-ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-east-1.amazonaws.com" - }, - "fips-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-northeast-1.amazonaws.com" - }, - "fips-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-northeast-2.amazonaws.com" - }, - "fips-ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-northeast-3.amazonaws.com" - }, - "fips-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-south-1.amazonaws.com" - }, - "fips-ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-south-2.amazonaws.com" - }, - "fips-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-southeast-1.amazonaws.com" - }, - "fips-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-southeast-2.amazonaws.com" - }, - "fips-ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-southeast-3.amazonaws.com" - }, - "fips-ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-southeast-4.amazonaws.com" - }, - "fips-ap-southeast-5" : { - "credentialScope" : { - "region" : "ap-southeast-5" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-southeast-5.amazonaws.com" - }, - "fips-ap-southeast-7" : { - "credentialScope" : { - "region" : "ap-southeast-7" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ap-southeast-7.amazonaws.com" - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.ca-west-1.amazonaws.com" - }, - "fips-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-central-1.amazonaws.com" - }, - "fips-eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-central-2.amazonaws.com" - }, - "fips-eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-north-1.amazonaws.com" - }, - "fips-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-south-1.amazonaws.com" - }, - "fips-eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-south-2.amazonaws.com" - }, - "fips-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-west-1.amazonaws.com" - }, - "fips-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-west-2.amazonaws.com" - }, - "fips-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-west-3.amazonaws.com" - }, - "fips-il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.il-central-1.amazonaws.com" - }, - "fips-me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.me-central-1.amazonaws.com" - }, - "fips-me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.me-south-1.amazonaws.com" - }, - "fips-mx-central-1" : { - "credentialScope" : { - "region" : "mx-central-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.mx-central-1.amazonaws.com" - }, - "fips-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.sa-east-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.il-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.me-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.me-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.mx-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.sa-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "elasticloadbalancing" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "elasticloadbalancing-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "elasticloadbalancing-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "elasticloadbalancing-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "elasticloadbalancing-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "elasticloadbalancing-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "elasticloadbalancing-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "elasticloadbalancing-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "elasticloadbalancing-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "elasticmapreduce" : { - "defaults" : { - "protocols" : [ "https" ], - "sslCommonName" : "{region}.{service}.{dnsSuffix}" - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "sslCommonName" : "{service}.{region}.{dnsSuffix}" - }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "sslCommonName" : "{service}.{region}.{dnsSuffix}", - "variants" : [ { - "hostname" : "elasticmapreduce-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "elasticmapreduce-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticmapreduce.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "elasticmapreduce-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "email" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "email-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "email-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "email-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "email-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "email-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "email-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "email-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "email-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "email-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "email-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "emr-containers" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "emr-containers-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "emr-containers-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "emr-containers-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "emr-containers-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "emr-containers-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "emr-containers-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "emr-containers-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "emr-containers-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "emr-containers-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "emr-containers-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "emr-serverless" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "emr-serverless-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "emr-serverless-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "emr-serverless-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "emr-serverless-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "emr-serverless-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "emr-serverless-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "emr-serverless-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "emr-serverless-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "emr-serverless-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "emr-serverless-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "entitlement.marketplace" : { - "defaults" : { - "credentialScope" : { - "service" : "aws-marketplace" - } - }, - "endpoints" : { - "us-east-1" : { - "variants" : [ { - "hostname" : "entitlement-marketplace.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "es" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "aos.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "aos.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "aos.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "aos.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "aos.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "aos.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "aos.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "aos.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "aos.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "aos.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "aos.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "aos.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "aos.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "aos.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "aos.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "aos.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "aos.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "aos.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "aos.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "aos.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "aos.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "aos.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "aos.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "es-fips.us-west-1.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "aos.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "aos.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "aos.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "aos.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "aos.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "aos.us-east-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "es-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "es-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "aos.us-east-2.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "es-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "es-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "aos.us-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "es-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "es-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "aos.us-west-2.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "es-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "es-fips.us-west-2.amazonaws.com" - } - } - }, - "events" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "events.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "events.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "events.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "events.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "events.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "events.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "events.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "events.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "events.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "events.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "events.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "events.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "events.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "events.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "events.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "events.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "events.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "events.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "events.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "events.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "events.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "events.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "events.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "events-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "events-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "events-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "events-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "events.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "events.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "events.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "events.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "events.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "events-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "events-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "events.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "events-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "events-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "events.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "events-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "events-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "events.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "events-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "events-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "events.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "evidently" : { - "endpoints" : { - "ap-northeast-1" : { - "hostname" : "evidently.ap-northeast-1.amazonaws.com" - }, - "ap-southeast-1" : { - "hostname" : "evidently.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "hostname" : "evidently.ap-southeast-2.amazonaws.com" - }, - "eu-central-1" : { - "hostname" : "evidently.eu-central-1.amazonaws.com" - }, - "eu-north-1" : { - "hostname" : "evidently.eu-north-1.amazonaws.com" - }, - "eu-west-1" : { - "hostname" : "evidently.eu-west-1.amazonaws.com" - }, - "us-east-1" : { - "hostname" : "evidently.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "hostname" : "evidently.us-east-2.amazonaws.com" - }, - "us-west-2" : { - "hostname" : "evidently.us-west-2.amazonaws.com" - } - } - }, - "finspace" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "finspace-api" : { - "endpoints" : { - "ca-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "firehose" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "firehose.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "firehose.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "firehose.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "firehose.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "firehose.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "firehose.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "firehose.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "firehose.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "firehose.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "firehose.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "firehose.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "firehose.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "firehose.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "firehose.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "firehose.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "firehose.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "firehose.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "firehose.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "firehose.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "firehose.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "firehose.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "firehose.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "firehose-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "firehose-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "firehose-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "firehose-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "firehose.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "firehose.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "firehose.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "firehose.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "firehose.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "firehose-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "firehose-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "firehose.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "firehose-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "firehose-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "firehose.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "firehose-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "firehose-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "firehose.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "firehose-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "firehose-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "firehose.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "fms" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "fms-fips.af-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "fms-fips.ap-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "fms-fips.ap-northeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "fms-fips.ap-northeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-3" : { }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "fms-fips.ap-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-2" : { }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "fms-fips.ap-southeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "fms-fips.ap-southeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "fms-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "fms-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "fms-fips.eu-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "fms-fips.eu-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-2" : { }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "fms-fips.eu-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "fms-fips.eu-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "fms-fips.eu-west-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.af-south-1.amazonaws.com" - }, - "fips-ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.ap-east-1.amazonaws.com" - }, - "fips-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.ap-northeast-1.amazonaws.com" - }, - "fips-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "deprecated" : true, - "hostname" : "fms-fips.ap-northeast-2.amazonaws.com" - }, - "fips-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.ap-south-1.amazonaws.com" - }, - "fips-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.ap-southeast-1.amazonaws.com" - }, - "fips-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "deprecated" : true, - "hostname" : "fms-fips.ap-southeast-2.amazonaws.com" - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.ca-west-1.amazonaws.com" - }, - "fips-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.eu-central-1.amazonaws.com" - }, - "fips-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.eu-south-1.amazonaws.com" - }, - "fips-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.eu-west-1.amazonaws.com" - }, - "fips-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "deprecated" : true, - "hostname" : "fms-fips.eu-west-2.amazonaws.com" - }, - "fips-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "deprecated" : true, - "hostname" : "fms-fips.eu-west-3.amazonaws.com" - }, - "fips-me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.me-south-1.amazonaws.com" - }, - "fips-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.sa-east-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "fms-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "fms-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { - "variants" : [ { - "hostname" : "fms-fips.me-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "fms-fips.sa-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "fms-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "fms-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "fms-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "fms-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "forecast" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "forecast-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "forecast-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "forecast-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "forecast-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "forecast-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "forecast-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "forecastquery" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "forecastquery-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "forecastquery-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "forecastquery-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "forecastquery-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "forecastquery-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "forecastquery-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "frauddetector" : { - "endpoints" : { - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "fsx" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "fsx-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "fsx-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.ca-west-1.amazonaws.com" - }, - "fips-prod-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.ca-central-1.amazonaws.com" - }, - "fips-prod-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.ca-west-1.amazonaws.com" - }, - "fips-prod-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-east-1.amazonaws.com" - }, - "fips-prod-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-east-2.amazonaws.com" - }, - "fips-prod-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-west-1.amazonaws.com" - }, - "fips-prod-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-west-2.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "prod-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "prod-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "prod-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "prod-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "prod-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "prod-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "fsx-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "fsx-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "fsx-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "fsx-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "gamelift" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "gameliftstreams" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "hostname" : "gameliftstreams.af-south-1.api.aws" - }, - "ap-east-1" : { - "hostname" : "gameliftstreams.ap-east-1.api.aws" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "gameliftstreams.ap-northeast-1.api.aws" - }, - "ap-northeast-2" : { - "hostname" : "gameliftstreams.ap-northeast-2.api.aws" - }, - "ap-northeast-3" : { - "hostname" : "gameliftstreams.ap-northeast-3.api.aws" - }, - "ap-south-1" : { - "hostname" : "gameliftstreams.ap-south-1.api.aws" - }, - "ap-south-2" : { - "hostname" : "gameliftstreams.ap-south-2.api.aws" - }, - "ap-southeast-1" : { - "hostname" : "gameliftstreams.ap-southeast-1.api.aws" - }, - "ap-southeast-2" : { - "hostname" : "gameliftstreams.ap-southeast-2.api.aws" - }, - "ap-southeast-3" : { - "hostname" : "gameliftstreams.ap-southeast-3.api.aws" - }, - "ap-southeast-4" : { - "hostname" : "gameliftstreams.ap-southeast-4.api.aws" - }, - "ap-southeast-5" : { - "hostname" : "gameliftstreams.ap-southeast-5.api.aws" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "hostname" : "gameliftstreams.ap-southeast-7.api.aws" - }, - "ca-central-1" : { - "hostname" : "gameliftstreams.ca-central-1.api.aws" - }, - "ca-west-1" : { - "hostname" : "gameliftstreams.ca-west-1.api.aws" - }, - "eu-central-1" : { - "hostname" : "gameliftstreams.eu-central-1.api.aws" - }, - "eu-central-2" : { - "hostname" : "gameliftstreams.eu-central-2.api.aws" - }, - "eu-north-1" : { - "hostname" : "gameliftstreams.eu-north-1.api.aws" - }, - "eu-south-1" : { - "hostname" : "gameliftstreams.eu-south-1.api.aws" - }, - "eu-south-2" : { - "hostname" : "gameliftstreams.eu-south-2.api.aws" - }, - "eu-west-1" : { - "hostname" : "gameliftstreams.eu-west-1.api.aws" - }, - "eu-west-2" : { - "hostname" : "gameliftstreams.eu-west-2.api.aws" - }, - "eu-west-3" : { - "hostname" : "gameliftstreams.eu-west-3.api.aws" - }, - "il-central-1" : { - "hostname" : "gameliftstreams.il-central-1.api.aws" - }, - "me-central-1" : { - "hostname" : "gameliftstreams.me-central-1.api.aws" - }, - "me-south-1" : { - "hostname" : "gameliftstreams.me-south-1.api.aws" - }, - "mx-central-1" : { - "hostname" : "gameliftstreams.mx-central-1.api.aws" - }, - "sa-east-1" : { - "hostname" : "gameliftstreams.sa-east-1.api.aws" - }, - "us-east-1" : { - "hostname" : "gameliftstreams.us-east-1.api.aws" - }, - "us-east-2" : { - "hostname" : "gameliftstreams.us-east-2.api.aws" - }, - "us-west-1" : { - "hostname" : "gameliftstreams.us-west-1.api.aws" - }, - "us-west-2" : { - "hostname" : "gameliftstreams.us-west-2.api.aws" - } - } - }, - "geo" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "glacier" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "glacier-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "glacier-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "glacier-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "glacier-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "glacier-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "glacier-fips.us-west-2.amazonaws.com" - }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "glacier-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "glacier-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "glacier-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "glacier-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "globalaccelerator" : { - "endpoints" : { - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "globalaccelerator-fips.us-west-2.amazonaws.com" - } - } - }, - "glue" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "glue.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "glue.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "glue.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "glue.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "glue.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "glue.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "glue.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "glue.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "glue.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "glue.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "glue.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "glue.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "glue.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "glue.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "glue.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "glue.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "glue.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "glue.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "glue.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "glue.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "glue.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "glue.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "glue.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "glue-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "glue-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "glue-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "glue-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "glue.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "glue.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "glue.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "glue.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "glue.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "glue-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "glue-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "glue.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "glue-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "glue-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "glue.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "glue-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "glue-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "glue.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "glue-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "glue-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "glue.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "grafana" : { - "endpoints" : { - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "grafana.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "grafana.ap-northeast-2.amazonaws.com" - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "grafana.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "grafana.ap-southeast-2.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "grafana.eu-central-1.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "grafana.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "grafana.eu-west-2.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "grafana.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "grafana.us-east-2.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "grafana.us-west-2.amazonaws.com" - } - } - }, - "greengrass" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "greengrass-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "greengrass-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "greengrass-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "greengrass-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "greengrass-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "greengrass-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "greengrass-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "greengrass-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - }, - "isRegionalized" : true - }, - "groundstation" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "groundstation-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "groundstation-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "groundstation-fips.us-west-2.amazonaws.com" - }, - "me-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "groundstation-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "groundstation-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "groundstation-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "groundstation-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "groundstation-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "groundstation-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - } - } - }, - "guardduty" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "guardduty-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "guardduty-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "guardduty-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "guardduty-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "guardduty-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "guardduty-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "guardduty-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "guardduty-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "guardduty-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "guardduty-fips.us-west-2.amazonaws.com" - } - }, - "isRegionalized" : true - }, - "health" : { - "defaults" : { - "protocols" : [ "https" ], - "sslCommonName" : "health.us-east-1.amazonaws.com" - }, - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "global.health.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "health-fips.us-east-2.amazonaws.com" - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "health-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "healthlake" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-south-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "iam" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "iam.amazonaws.com", - "variants" : [ { - "hostname" : "iam-fips.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "iam.global.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "aws-global-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "iam-fips.amazonaws.com" - }, - "iam" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "iam-fips.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "iam-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "iam-fips.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "identity-chime" : { - "endpoints" : { - "eu-central-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "identity-chime-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "identity-chime-fips.us-east-1.amazonaws.com" - } - } - }, - "identitystore" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "importexport" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1", - "service" : "IngestionService" - }, - "hostname" : "importexport.amazonaws.com", - "signatureVersions" : [ "v2", "v4" ] - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "ingest.timestream" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "ingest-fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ingest.timestream-fips.us-east-1.amazonaws.com" - }, - "ingest-fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ingest.timestream-fips.us-east-2.amazonaws.com" - }, - "ingest-fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ingest.timestream-fips.us-west-2.amazonaws.com" - }, - "ingest-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ingest.timestream-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ingest-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ingest.timestream-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ingest-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ingest.timestream-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "inspector" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "inspector-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "inspector-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "inspector-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "inspector-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "inspector-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "inspector-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "inspector-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "inspector-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "inspector2" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "inspector2-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "inspector2-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "inspector2-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "inspector2-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "inspector2-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "inspector2-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "inspector2-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "inspector2-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "internetmonitor" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "hostname" : "internetmonitor.af-south-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "hostname" : "internetmonitor.ap-east-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "internetmonitor.ap-northeast-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "hostname" : "internetmonitor.ap-northeast-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "hostname" : "internetmonitor.ap-northeast-3.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "hostname" : "internetmonitor.ap-south-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "hostname" : "internetmonitor.ap-south-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "hostname" : "internetmonitor.ap-southeast-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "hostname" : "internetmonitor.ap-southeast-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "hostname" : "internetmonitor.ap-southeast-3.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "hostname" : "internetmonitor.ap-southeast-4.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "hostname" : "internetmonitor.ap-southeast-5.api.aws" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "hostname" : "internetmonitor.ap-southeast-7.api.aws" - }, - "ca-central-1" : { - "hostname" : "internetmonitor.ca-central-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "internetmonitor-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "internetmonitor.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "hostname" : "internetmonitor.ca-west-1.api.aws" - }, - "eu-central-1" : { - "hostname" : "internetmonitor.eu-central-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "hostname" : "internetmonitor.eu-central-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "hostname" : "internetmonitor.eu-north-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "hostname" : "internetmonitor.eu-south-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "hostname" : "internetmonitor.eu-south-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "hostname" : "internetmonitor.eu-west-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "hostname" : "internetmonitor.eu-west-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "hostname" : "internetmonitor.eu-west-3.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "hostname" : "internetmonitor.il-central-1.api.aws" - }, - "me-central-1" : { - "hostname" : "internetmonitor.me-central-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "hostname" : "internetmonitor.me-south-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "hostname" : "internetmonitor.mx-central-1.api.aws" - }, - "sa-east-1" : { - "hostname" : "internetmonitor.sa-east-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "hostname" : "internetmonitor.us-east-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "internetmonitor-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "internetmonitor.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "hostname" : "internetmonitor.us-east-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "internetmonitor-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "internetmonitor.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "hostname" : "internetmonitor.us-west-1.api.aws", - "variants" : [ { - "hostname" : "internetmonitor-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "internetmonitor-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "internetmonitor.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "hostname" : "internetmonitor.us-west-2.api.aws", - "variants" : [ { - "hostname" : "internetmonitor-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "internetmonitor-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "internetmonitor.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "iot" : { - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "iot-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "deprecated" : true, - "hostname" : "iot-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "deprecated" : true, - "hostname" : "iot-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "deprecated" : true, - "hostname" : "iot-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "deprecated" : true, - "hostname" : "iot-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "deprecated" : true, - "hostname" : "iot-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "iot-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "iot-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "iot-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "iot-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotanalytics" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "iotevents" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "iotevents-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "iotevents-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "iotevents-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "iotevents-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "iotevents-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "iotevents-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "iotevents-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "iotevents-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ioteventsdata" : { - "endpoints" : { - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "data.iotevents.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "data.iotevents.ap-northeast-2.amazonaws.com" - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "data.iotevents.ap-south-1.amazonaws.com" - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "data.iotevents.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "data.iotevents.ap-southeast-2.amazonaws.com" - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "data.iotevents.ca-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "data.iotevents-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "data.iotevents.eu-central-1.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "data.iotevents.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "data.iotevents.eu-west-2.amazonaws.com" - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "data.iotevents-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "data.iotevents-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "data.iotevents-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "data.iotevents-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "data.iotevents.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "data.iotevents-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "data.iotevents.us-east-2.amazonaws.com", - "variants" : [ { - "hostname" : "data.iotevents-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "data.iotevents.us-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "data.iotevents-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotfleetwise" : { - "endpoints" : { - "ap-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - } - } - }, - "iotsecuredtunneling" : { - "defaults" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com" - }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotsitewise" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "iotsitewise-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "iotsitewise-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "iotsitewise-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "iotsitewise-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "iotsitewise-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "iotsitewise-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "iotsitewise-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "iotsitewise-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotthingsgraph" : { - "defaults" : { - "credentialScope" : { - "service" : "iotthingsgraph" - } - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-2" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "iottwinmaker" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "api-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "api.iottwinmaker.ap-northeast-1.amazonaws.com" - }, - "api-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "api.iottwinmaker.ap-northeast-2.amazonaws.com" - }, - "api-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "api.iottwinmaker.ap-south-1.amazonaws.com" - }, - "api-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "api.iottwinmaker.ap-southeast-1.amazonaws.com" - }, - "api-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "api.iottwinmaker.ap-southeast-2.amazonaws.com" - }, - "api-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "api.iottwinmaker.eu-central-1.amazonaws.com" - }, - "api-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "api.iottwinmaker.eu-west-1.amazonaws.com" - }, - "api-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "api.iottwinmaker.us-east-1.amazonaws.com" - }, - "api-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "api.iottwinmaker.us-west-2.amazonaws.com" - }, - "data-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "data.iottwinmaker.ap-northeast-1.amazonaws.com" - }, - "data-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "data.iottwinmaker.ap-northeast-2.amazonaws.com" - }, - "data-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "data.iottwinmaker.ap-south-1.amazonaws.com" - }, - "data-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "data.iottwinmaker.ap-southeast-1.amazonaws.com" - }, - "data-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "data.iottwinmaker.ap-southeast-2.amazonaws.com" - }, - "data-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "data.iottwinmaker.eu-central-1.amazonaws.com" - }, - "data-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "data.iottwinmaker.eu-west-1.amazonaws.com" - }, - "data-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "data.iottwinmaker.us-east-1.amazonaws.com" - }, - "data-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "data.iottwinmaker.us-west-2.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "fips-api-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "api.iottwinmaker-fips.us-east-1.amazonaws.com" - }, - "fips-api-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "api.iottwinmaker-fips.us-west-2.amazonaws.com" - }, - "fips-data-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "data.iottwinmaker-fips.us-east-1.amazonaws.com" - }, - "fips-data-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "data.iottwinmaker-fips.us-west-2.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "iottwinmaker-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "iottwinmaker-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "iottwinmaker-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "iottwinmaker-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotwireless" : { - "endpoints" : { - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "api.iotwireless.ap-northeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "api.iotwireless.ap-southeast-2.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "api.iotwireless.eu-west-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "api.iotwireless.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "api.iotwireless.us-west-2.amazonaws.com" - } - } - }, - "ivs" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "ivschat" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "ivsrealtime" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "kafka" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "kafka-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "kafka-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "kafka-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "kafka-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "kafka-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "kafka-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "kafka-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "kafka-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "kafka-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "kafka-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "kafka-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "kafka-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kafkaconnect" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "kendra" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "kendra-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "kendra-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "kendra-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "kendra-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "kendra-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "kendra-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "kendra-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "kendra-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kendra-ranking" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "hostname" : "kendra-ranking.af-south-1.api.aws" - }, - "ap-east-1" : { - "hostname" : "kendra-ranking.ap-east-1.api.aws" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "kendra-ranking.ap-northeast-1.api.aws" - }, - "ap-northeast-2" : { - "hostname" : "kendra-ranking.ap-northeast-2.api.aws" - }, - "ap-northeast-3" : { - "hostname" : "kendra-ranking.ap-northeast-3.api.aws" - }, - "ap-south-1" : { - "hostname" : "kendra-ranking.ap-south-1.api.aws" - }, - "ap-south-2" : { - "hostname" : "kendra-ranking.ap-south-2.api.aws" - }, - "ap-southeast-1" : { - "hostname" : "kendra-ranking.ap-southeast-1.api.aws" - }, - "ap-southeast-2" : { - "hostname" : "kendra-ranking.ap-southeast-2.api.aws" - }, - "ap-southeast-3" : { - "hostname" : "kendra-ranking.ap-southeast-3.api.aws" - }, - "ap-southeast-4" : { - "hostname" : "kendra-ranking.ap-southeast-4.api.aws" - }, - "ap-southeast-5" : { - "hostname" : "kendra-ranking.ap-southeast-5.api.aws" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "hostname" : "kendra-ranking.ap-southeast-7.api.aws" - }, - "ca-central-1" : { - "hostname" : "kendra-ranking.ca-central-1.api.aws", - "variants" : [ { - "hostname" : "kendra-ranking-fips.ca-central-1.api.aws", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "hostname" : "kendra-ranking.ca-west-1.api.aws" - }, - "eu-central-2" : { - "hostname" : "kendra-ranking.eu-central-2.api.aws" - }, - "eu-north-1" : { - "hostname" : "kendra-ranking.eu-north-1.api.aws" - }, - "eu-south-1" : { - "hostname" : "kendra-ranking.eu-south-1.api.aws" - }, - "eu-south-2" : { - "hostname" : "kendra-ranking.eu-south-2.api.aws" - }, - "eu-west-1" : { - "hostname" : "kendra-ranking.eu-west-1.api.aws" - }, - "eu-west-3" : { - "hostname" : "kendra-ranking.eu-west-3.api.aws" - }, - "il-central-1" : { - "hostname" : "kendra-ranking.il-central-1.api.aws" - }, - "me-central-1" : { - "hostname" : "kendra-ranking.me-central-1.api.aws" - }, - "me-south-1" : { - "hostname" : "kendra-ranking.me-south-1.api.aws" - }, - "mx-central-1" : { - "hostname" : "kendra-ranking.mx-central-1.api.aws" - }, - "sa-east-1" : { - "hostname" : "kendra-ranking.sa-east-1.api.aws" - }, - "us-east-1" : { - "hostname" : "kendra-ranking.us-east-1.api.aws", - "variants" : [ { - "hostname" : "kendra-ranking-fips.us-east-1.api.aws", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "hostname" : "kendra-ranking.us-east-2.api.aws", - "variants" : [ { - "hostname" : "kendra-ranking-fips.us-east-2.api.aws", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "hostname" : "kendra-ranking.us-west-1.api.aws" - }, - "us-west-2" : { - "hostname" : "kendra-ranking.us-west-2.api.aws", - "variants" : [ { - "hostname" : "kendra-ranking-fips.us-west-2.api.aws", - "tags" : [ "fips" ] - } ] - } - } - }, - "kinesis" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "kinesis-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "kinesis-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "kinesis-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "kinesis-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "kinesis-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "kinesis-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "kinesis-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "kinesis-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kinesisanalytics" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kinesisvideo" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-5" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "kms" : { - "endpoints" : { - "ProdFips" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-central-2.amazonaws.com" - }, - "af-south-1" : { - "variants" : [ { - "hostname" : "kms-fips.af-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "af-south-1-fips" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.af-south-1.amazonaws.com" - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "kms-fips.ap-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-1-fips" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-east-1.amazonaws.com" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "kms-fips.ap-northeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-1-fips" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "kms-fips.ap-northeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-2-fips" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-northeast-2.amazonaws.com" - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "kms-fips.ap-northeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-3-fips" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-northeast-3.amazonaws.com" - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "kms-fips.ap-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-1-fips" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-south-1.amazonaws.com" - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "kms-fips.ap-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-2-fips" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-south-2.amazonaws.com" - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "kms-fips.ap-southeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-1-fips" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "kms-fips.ap-southeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-2-fips" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-southeast-2.amazonaws.com" - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "kms-fips.ap-southeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-3-fips" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-southeast-3.amazonaws.com" - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "kms-fips.ap-southeast-4.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-4-fips" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-southeast-4.amazonaws.com" - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "kms-fips.ap-southeast-5.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-5-fips" : { - "credentialScope" : { - "region" : "ap-southeast-5" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-southeast-5.amazonaws.com" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "kms-fips.ap-southeast-7.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-7-fips" : { - "credentialScope" : { - "region" : "ap-southeast-7" - }, - "deprecated" : true, - "hostname" : "kms-fips.ap-southeast-7.amazonaws.com" - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "kms-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "kms-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "kms-fips.eu-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1-fips" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-central-1.amazonaws.com" - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "kms-fips.eu-central-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-2-fips" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-central-2.amazonaws.com" - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "kms-fips.eu-north-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-north-1-fips" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-north-1.amazonaws.com" - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "kms-fips.eu-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-1-fips" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-south-1.amazonaws.com" - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "kms-fips.eu-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-2-fips" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-south-2.amazonaws.com" - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "kms-fips.eu-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-1-fips" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "kms-fips.eu-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-2-fips" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-west-2.amazonaws.com" - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "kms-fips.eu-west-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-3-fips" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-west-3.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "kms-fips.il-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "il-central-1-fips" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.il-central-1.amazonaws.com" - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "kms-fips.me-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-central-1-fips" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.me-central-1.amazonaws.com" - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "kms-fips.me-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-south-1-fips" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.me-south-1.amazonaws.com" - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "kms-fips.mx-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "mx-central-1-fips" : { - "credentialScope" : { - "region" : "mx-central-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.mx-central-1.amazonaws.com" - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "kms-fips.sa-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "sa-east-1-fips" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.sa-east-1.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "kms-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "kms-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-west-2.amazonaws.com" - } - } - }, - "lakeformation" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "lakeformation.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "lakeformation.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "lakeformation.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "lakeformation.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "lakeformation.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "lakeformation.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "lakeformation.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "lakeformation.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "lakeformation.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "lakeformation.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "lakeformation.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "lakeformation.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "lakeformation.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "lakeformation.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "lakeformation.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "lakeformation.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "lakeformation.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "lakeformation.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "lakeformation.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "lakeformation.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "lakeformation.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "lakeformation.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "lakeformation.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "lakeformation-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "lakeformation-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "lakeformation-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "lakeformation-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "lakeformation.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "lakeformation.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "lakeformation.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "lakeformation.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "lakeformation.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "lakeformation-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lakeformation-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "lakeformation.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "lakeformation-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lakeformation-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "lakeformation.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "lakeformation-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lakeformation-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "lakeformation.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "lakeformation-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lakeformation-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "lakeformation.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "lambda" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "lambda.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "lambda.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "lambda.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "lambda.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "lambda.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "lambda.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "lambda.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "lambda.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "lambda.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "lambda.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "lambda.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "lambda.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "lambda.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "lambda.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "lambda.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "lambda.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "lambda.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "lambda.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "lambda.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "lambda.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "lambda.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "lambda.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "lambda.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "lambda-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "lambda-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "lambda-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "lambda-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "lambda.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "lambda.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "lambda.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "lambda.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "lambda.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "lambda-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lambda.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "lambda-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lambda.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "lambda-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lambda.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "lambda-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lambda.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "license-manager" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "license-manager-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "license-manager-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "license-manager-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "license-manager-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "license-manager-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "license-manager-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "license-manager-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "license-manager-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "license-manager-linux-subscriptions" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "license-manager-linux-subscriptions-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "license-manager-linux-subscriptions-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "license-manager-linux-subscriptions-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "license-manager-linux-subscriptions-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "license-manager-linux-subscriptions-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "license-manager-linux-subscriptions-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "license-manager-linux-subscriptions-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "license-manager-linux-subscriptions-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "license-manager-user-subscriptions" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "license-manager-user-subscriptions-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "license-manager-user-subscriptions-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "license-manager-user-subscriptions-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "license-manager-user-subscriptions-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "license-manager-user-subscriptions-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "license-manager-user-subscriptions-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "license-manager-user-subscriptions-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "license-manager-user-subscriptions-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "lightsail" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "logs" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "logs.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "logs.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "logs.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "logs.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "logs.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "logs.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "logs.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "logs.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "logs.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "logs.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "logs.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "logs-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "logs-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "logs.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "logs.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "logs.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "logs.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "logs.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "logs.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "logs.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "logs.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "logs-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "logs-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "logs-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "logs-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "logs-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "logs-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "logs.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "logs.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "logs.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "logs.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "logs-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "logs-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "logs-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "logs-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "lookoutequipment" : { - "endpoints" : { - "ap-northeast-2" : { }, - "eu-west-1" : { }, - "us-east-1" : { } - } - }, - "m2" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "deprecated" : true - }, - "fips-us-east-1" : { - "deprecated" : true - }, - "fips-us-east-2" : { - "deprecated" : true - }, - "fips-us-west-1" : { - "deprecated" : true - }, - "fips-us-west-2" : { - "deprecated" : true - }, - "il-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - } - } - }, - "machinelearning" : { - "endpoints" : { - "eu-west-1" : { }, - "us-east-1" : { } - } - }, - "macie2" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "macie2.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "macie2.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "macie2.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "macie2.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "macie2.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "macie2.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "macie2.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "macie2.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "macie2.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "macie2.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "macie2.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "macie2.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "macie2.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "macie2.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "macie2.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "macie2-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "macie2-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "macie2-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "macie2-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "macie2.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "macie2.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "macie2.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "macie2-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "macie2-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "macie2.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "macie2-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "macie2-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "macie2.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "macie2-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "macie2-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "macie2.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "macie2-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "macie2-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "macie2.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "managedblockchain" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { } - } - }, - "managedblockchain-query" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "marketplacecommerceanalytics" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "media-pipelines-chime" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "media-pipelines-chime-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "media-pipelines-chime-fips.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "media-pipelines-chime-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "media-pipelines-chime-fips.us-west-2.amazonaws.com" - } - } - }, - "mediaconnect" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "mediaconvert" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "mediaconvert.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "mediaconvert.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "mediaconvert.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "mediaconvert.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "mediaconvert.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "mediaconvert.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "mediaconvert.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "mediaconvert.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "mediaconvert-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "mediaconvert-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "mediaconvert.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "mediaconvert.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "mediaconvert.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "mediaconvert.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "mediaconvert.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "mediaconvert.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "mediaconvert-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "mediaconvert-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "mediaconvert-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "mediaconvert-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "mediaconvert-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "mediaconvert.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "mediaconvert.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "mediaconvert-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "mediaconvert-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "mediaconvert.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "mediaconvert-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "mediaconvert-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "mediaconvert.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "mediaconvert-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "mediaconvert-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "mediaconvert.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "mediaconvert-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "mediaconvert-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "mediaconvert.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "medialive" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "medialive-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "medialive-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "medialive-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "medialive-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "medialive-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "medialive-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "mediapackage" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "mediapackage-vod" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "mediapackagev2" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "mediapackagev2-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "mediapackagev2-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "mediapackagev2-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "mediapackagev2-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "mediapackagev2-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "mediapackagev2-fips.us-west-2.amazonaws.com" - }, - "me-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "mediapackagev2-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "mediapackagev2-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "mediapackagev2-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "mediapackagev2-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "mediastore" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "meetings-chime" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "meetings-chime-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "meetings-chime-fips.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "il-central-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "meetings-chime-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "meetings-chime-fips.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "meetings-chime-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "meetings-chime-fips.us-west-2.amazonaws.com" - } - } - }, - "memory-db" : { - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "memory-db-fips.us-west-1.amazonaws.com" - }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "messaging-chime" : { - "endpoints" : { - "eu-central-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "messaging-chime-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "messaging-chime-fips.us-east-1.amazonaws.com" - } - } - }, - "metering.marketplace" : { - "defaults" : { - "credentialScope" : { - "service" : "aws-marketplace" - } - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "metering-marketplace.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "metering-marketplace.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "metering-marketplace.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "metrics.sagemaker" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "metrics-fips.sagemaker.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "metrics-fips.sagemaker.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "metrics-fips.sagemaker.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "metrics-fips.sagemaker.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "metrics-fips.sagemaker.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "metrics-fips.sagemaker.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "metrics-fips.sagemaker.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "metrics-fips.sagemaker.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "metrics-fips.sagemaker.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "metrics-fips.sagemaker.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "metrics-fips.sagemaker.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "metrics-fips.sagemaker.us-west-2.amazonaws.com" - } - } - }, - "mgh" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "mgn" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "mgn-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "mgn-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "mgn-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "mgn-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "mgn-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "mgn-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "mgn-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "mgn-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "migrationhub-orchestrator" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "migrationhub-strategy" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "mobileanalytics" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "models-v2-lex" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "models.lex" : { - "defaults" : { - "credentialScope" : { - "service" : "lex" - }, - "variants" : [ { - "hostname" : "models-fips.lex.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "models-fips.lex.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "models-fips.lex.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "models-fips.lex.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "models-fips.lex.us-west-2.amazonaws.com" - } - } - }, - "monitoring" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "monitoring-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "monitoring-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "monitoring-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "monitoring-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "monitoring-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "monitoring-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "monitoring-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "monitoring-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "mq" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "mq-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "mq-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "mq-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "mq-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "mq-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "mq-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "mq-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "mq-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "mturk-requester" : { - "endpoints" : { - "sandbox" : { - "hostname" : "mturk-requester-sandbox.us-east-1.amazonaws.com" - }, - "us-east-1" : { } - }, - "isRegionalized" : false - }, - "neptune" : { - "endpoints" : { - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "rds.ap-east-1.amazonaws.com" - }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "rds.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "rds.ap-northeast-2.amazonaws.com" - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "rds.ap-south-1.amazonaws.com" - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "rds.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "rds.ap-southeast-2.amazonaws.com" - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "rds.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "rds.eu-central-1.amazonaws.com" - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "rds.eu-north-1.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "rds.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "rds.eu-west-2.amazonaws.com" - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "rds.eu-west-3.amazonaws.com" - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "rds.me-south-1.amazonaws.com" - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "rds.sa-east-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "rds.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "rds.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "rds.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "rds.us-west-2.amazonaws.com" - } - } - }, - "network-firewall" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "network-firewall-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "network-firewall-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "network-firewall-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "network-firewall-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "network-firewall-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "network-firewall-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "network-firewall-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "network-firewall-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "network-firewall-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "network-firewall-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "networkmanager" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "networkmanager.us-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "networkmanager-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "networkmanager-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "networkmanager.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-aws-global" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "networkmanager-fips.us-west-2.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "notifications" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "hostname" : "notifications.af-south-1.api.aws" - }, - "ap-east-1" : { - "hostname" : "notifications.ap-east-1.api.aws" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "notifications.ap-northeast-1.api.aws" - }, - "ap-northeast-2" : { - "hostname" : "notifications.ap-northeast-2.api.aws" - }, - "ap-northeast-3" : { - "hostname" : "notifications.ap-northeast-3.api.aws" - }, - "ap-south-1" : { - "hostname" : "notifications.ap-south-1.api.aws" - }, - "ap-south-2" : { - "hostname" : "notifications.ap-south-2.api.aws" - }, - "ap-southeast-1" : { - "hostname" : "notifications.ap-southeast-1.api.aws" - }, - "ap-southeast-2" : { - "hostname" : "notifications.ap-southeast-2.api.aws" - }, - "ap-southeast-3" : { - "hostname" : "notifications.ap-southeast-3.api.aws" - }, - "ap-southeast-4" : { - "hostname" : "notifications.ap-southeast-4.api.aws" - }, - "ap-southeast-5" : { - "hostname" : "notifications.ap-southeast-5.api.aws" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "hostname" : "notifications.ap-southeast-7.api.aws" - }, - "ca-central-1" : { - "hostname" : "notifications.ca-central-1.api.aws" - }, - "ca-west-1" : { - "hostname" : "notifications.ca-west-1.api.aws" - }, - "eu-central-1" : { - "hostname" : "notifications.eu-central-1.api.aws" - }, - "eu-central-2" : { - "hostname" : "notifications.eu-central-2.api.aws" - }, - "eu-north-1" : { - "hostname" : "notifications.eu-north-1.api.aws" - }, - "eu-south-1" : { - "hostname" : "notifications.eu-south-1.api.aws" - }, - "eu-south-2" : { - "hostname" : "notifications.eu-south-2.api.aws" - }, - "eu-west-1" : { - "hostname" : "notifications.eu-west-1.api.aws" - }, - "eu-west-2" : { - "hostname" : "notifications.eu-west-2.api.aws" - }, - "eu-west-3" : { - "hostname" : "notifications.eu-west-3.api.aws" - }, - "il-central-1" : { - "hostname" : "notifications.il-central-1.api.aws" - }, - "me-central-1" : { - "hostname" : "notifications.me-central-1.api.aws" - }, - "me-south-1" : { - "hostname" : "notifications.me-south-1.api.aws" - }, - "mx-central-1" : { - "hostname" : "notifications.mx-central-1.api.aws" - }, - "sa-east-1" : { - "hostname" : "notifications.sa-east-1.api.aws" - }, - "us-east-1" : { - "hostname" : "notifications.us-east-1.api.aws" - }, - "us-east-2" : { - "hostname" : "notifications.us-east-2.api.aws" - }, - "us-west-1" : { - "hostname" : "notifications.us-west-1.api.aws" - }, - "us-west-2" : { - "hostname" : "notifications.us-west-2.api.aws" - } - } - }, - "notifications-contacts" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "notifications-contacts.us-east-1.api.aws" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "nova-act" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "oam" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "oidc" : { - "endpoints" : { - "af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "oidc.af-south-1.amazonaws.com" - }, - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "oidc.ap-east-1.amazonaws.com" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "oidc.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "oidc.ap-northeast-2.amazonaws.com" - }, - "ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "oidc.ap-northeast-3.amazonaws.com" - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "oidc.ap-south-1.amazonaws.com" - }, - "ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "oidc.ap-south-2.amazonaws.com" - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "oidc.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "oidc.ap-southeast-2.amazonaws.com" - }, - "ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "hostname" : "oidc.ap-southeast-3.amazonaws.com" - }, - "ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "hostname" : "oidc.ap-southeast-4.amazonaws.com" - }, - "ap-southeast-5" : { - "credentialScope" : { - "region" : "ap-southeast-5" - }, - "hostname" : "oidc.ap-southeast-5.amazonaws.com" - }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "oidc.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "hostname" : "oidc.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "oidc.eu-central-1.amazonaws.com" - }, - "eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "oidc.eu-central-2.amazonaws.com" - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "oidc.eu-north-1.amazonaws.com" - }, - "eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "oidc.eu-south-1.amazonaws.com" - }, - "eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "oidc.eu-south-2.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "oidc.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "oidc.eu-west-2.amazonaws.com" - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "oidc.eu-west-3.amazonaws.com" - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "oidc.il-central-1.amazonaws.com" - }, - "me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "hostname" : "oidc.me-central-1.amazonaws.com" - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "oidc.me-south-1.amazonaws.com" - }, - "mx-central-1" : { }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "oidc.sa-east-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "oidc.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "oidc.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "oidc.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "oidc.us-west-2.amazonaws.com" - } - } - }, - "omics" : { - "endpoints" : { - "ap-northeast-2" : { }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "omics.ap-southeast-1.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "omics.eu-central-1.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "omics.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "omics.eu-west-2.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "omics-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "omics-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "omics.il-central-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "omics.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "omics-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "omics.us-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "omics-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "organizations" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "organizations.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "organizations-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "organizations-fips.us-east-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "osis" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "outposts" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "outposts-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "outposts-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "outposts-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "outposts-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "outposts-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "outposts-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "outposts-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "outposts-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "outposts-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "outposts-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "participant.connect" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "participant.connect-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "participant.connect-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "participant.connect-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "participant.connect-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "partnercentral-channel" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "personalize" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "pi" : { - "endpoints" : { - "af-south-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "pi-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "pi-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "pi-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "pi-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "pi-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "pi-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "pinpoint" : { - "defaults" : { - "credentialScope" : { - "service" : "mobiletargeting" - } - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "pinpoint.ca-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "pinpoint-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "pinpoint-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "pinpoint-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "pinpoint-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "pinpoint-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "pinpoint.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "pinpoint-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "pinpoint.us-east-2.amazonaws.com", - "variants" : [ { - "hostname" : "pinpoint-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "pinpoint.us-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "pinpoint-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "pipes" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "polly" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "polly.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "polly.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "polly.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "polly.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "polly.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "polly.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "polly.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "polly.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "polly.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "polly-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "polly-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "polly.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "polly.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "polly.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "polly.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "polly.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "polly.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "polly.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "polly-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "polly-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "polly-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "polly-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "polly-fips.us-west-2.amazonaws.com" - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "polly.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "polly.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "polly-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "polly-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "polly.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "polly-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "polly-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "polly.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "polly-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "polly-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "polly.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "polly-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "polly-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "polly.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "portal.sso" : { - "endpoints" : { - "af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "portal.sso.af-south-1.amazonaws.com" - }, - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "portal.sso.ap-east-1.amazonaws.com" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "portal.sso.ap-northeast-1.amazonaws.com" - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "portal.sso.ap-northeast-2.amazonaws.com" - }, - "ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "portal.sso.ap-northeast-3.amazonaws.com" - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "portal.sso.ap-south-1.amazonaws.com" - }, - "ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "portal.sso.ap-south-2.amazonaws.com" - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "portal.sso.ap-southeast-1.amazonaws.com" - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "portal.sso.ap-southeast-2.amazonaws.com" - }, - "ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "hostname" : "portal.sso.ap-southeast-3.amazonaws.com" - }, - "ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "hostname" : "portal.sso.ap-southeast-4.amazonaws.com" - }, - "ap-southeast-5" : { - "credentialScope" : { - "region" : "ap-southeast-5" - }, - "hostname" : "portal.sso.ap-southeast-5.amazonaws.com" - }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "portal.sso.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "hostname" : "portal.sso.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "portal.sso.eu-central-1.amazonaws.com" - }, - "eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "portal.sso.eu-central-2.amazonaws.com" - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "portal.sso.eu-north-1.amazonaws.com" - }, - "eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "portal.sso.eu-south-1.amazonaws.com" - }, - "eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "portal.sso.eu-south-2.amazonaws.com" - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "portal.sso.eu-west-1.amazonaws.com" - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "portal.sso.eu-west-2.amazonaws.com" - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "portal.sso.eu-west-3.amazonaws.com" - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "portal.sso.il-central-1.amazonaws.com" - }, - "me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "hostname" : "portal.sso.me-central-1.amazonaws.com" - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "portal.sso.me-south-1.amazonaws.com" - }, - "mx-central-1" : { }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "portal.sso.sa-east-1.amazonaws.com" - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "portal.sso.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "portal.sso.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "portal.sso.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "portal.sso.us-west-2.amazonaws.com" - } - } - }, - "profile" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "profile-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "profile-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "profile-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "profile-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "profile-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "profile-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "proton" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "qbusiness" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "hostname" : "qbusiness.af-south-1.api.aws" - }, - "ap-east-1" : { - "hostname" : "qbusiness.ap-east-1.api.aws" - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "qbusiness.ap-northeast-1.api.aws" - }, - "ap-northeast-2" : { - "hostname" : "qbusiness.ap-northeast-2.api.aws" - }, - "ap-northeast-3" : { - "hostname" : "qbusiness.ap-northeast-3.api.aws" - }, - "ap-south-1" : { - "hostname" : "qbusiness.ap-south-1.api.aws" - }, - "ap-south-2" : { - "hostname" : "qbusiness.ap-south-2.api.aws" - }, - "ap-southeast-1" : { - "hostname" : "qbusiness.ap-southeast-1.api.aws" - }, - "ap-southeast-2" : { - "hostname" : "qbusiness.ap-southeast-2.api.aws" - }, - "ap-southeast-3" : { - "hostname" : "qbusiness.ap-southeast-3.api.aws" - }, - "ap-southeast-4" : { - "hostname" : "qbusiness.ap-southeast-4.api.aws" - }, - "ap-southeast-5" : { - "hostname" : "qbusiness.ap-southeast-5.api.aws" - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "hostname" : "qbusiness.ap-southeast-7.api.aws" - }, - "ca-central-1" : { - "hostname" : "qbusiness.ca-central-1.api.aws" - }, - "ca-west-1" : { - "hostname" : "qbusiness.ca-west-1.api.aws" - }, - "eu-central-1" : { - "hostname" : "qbusiness.eu-central-1.api.aws" - }, - "eu-central-2" : { - "hostname" : "qbusiness.eu-central-2.api.aws" - }, - "eu-north-1" : { - "hostname" : "qbusiness.eu-north-1.api.aws" - }, - "eu-south-1" : { - "hostname" : "qbusiness.eu-south-1.api.aws" - }, - "eu-south-2" : { - "hostname" : "qbusiness.eu-south-2.api.aws" - }, - "eu-west-1" : { - "hostname" : "qbusiness.eu-west-1.api.aws" - }, - "eu-west-2" : { - "hostname" : "qbusiness.eu-west-2.api.aws" - }, - "eu-west-3" : { - "hostname" : "qbusiness.eu-west-3.api.aws" - }, - "il-central-1" : { - "hostname" : "qbusiness.il-central-1.api.aws" - }, - "me-central-1" : { - "hostname" : "qbusiness.me-central-1.api.aws" - }, - "me-south-1" : { - "hostname" : "qbusiness.me-south-1.api.aws" - }, - "mx-central-1" : { - "hostname" : "qbusiness.mx-central-1.api.aws" - }, - "sa-east-1" : { - "hostname" : "qbusiness.sa-east-1.api.aws" - }, - "us-east-1" : { - "hostname" : "qbusiness.us-east-1.api.aws" - }, - "us-east-2" : { - "hostname" : "qbusiness.us-east-2.api.aws" - }, - "us-west-1" : { - "hostname" : "qbusiness.us-west-1.api.aws" - }, - "us-west-2" : { - "hostname" : "qbusiness.us-west-2.api.aws" - } - } - }, - "query.timestream" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "quicksight" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "ram" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ram-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ram-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ram-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "ram-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ram-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ram-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ram-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ram-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ram-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ram-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ram-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ram-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - } - } - }, - "rbin" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "rbin.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "rbin.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "rbin.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "rbin.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "rbin.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "rbin.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "rbin.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "rbin.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "rbin.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "rbin.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "rbin.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "rbin.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "rbin-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "rbin-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "rbin.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "rbin.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "rbin.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "rbin.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "rbin.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "rbin.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "rbin.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "rbin.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "rbin.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "rbin.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "rbin.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "rbin.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "rbin-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "rbin-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "rbin-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "rbin-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "rds" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "rds-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "rds-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "rds-fips.ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.ca-central-1.amazonaws.com" - }, - "rds-fips.ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.ca-west-1.amazonaws.com" - }, - "rds-fips.us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-east-1.amazonaws.com" - }, - "rds-fips.us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-east-2.amazonaws.com" - }, - "rds-fips.us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-west-1.amazonaws.com" - }, - "rds-fips.us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-west-2.amazonaws.com" - }, - "rds.ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rds-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rds.ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rds-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rds.us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rds-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rds.us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rds-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rds.us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rds-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rds.us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rds-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "sa-east-1" : { }, - "us-east-1" : { - "sslCommonName" : "{service}.{dnsSuffix}", - "variants" : [ { - "hostname" : "rds-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "rds-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "rds-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "rds-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "rds-fips.us-west-2.amazonaws.com" - } - } - }, - "rds-data" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "rds-data-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "rds-data-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "rds-data-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "rds-data-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "rds-data-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "rds-data-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "rds-data-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "rds-data-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "redshift" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "redshift-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "redshift-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "redshift-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "redshift-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "redshift-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "redshift-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "redshift-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "redshift-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "redshift-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "redshift-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "redshift-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "redshift-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "redshift-serverless" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "redshift-serverless-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "redshift-serverless-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "redshift-serverless-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "redshift-serverless-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "redshift-serverless-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "redshift-serverless-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "redshift-serverless-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "redshift-serverless-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "redshift-serverless-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "redshift-serverless-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "rekognition" : { - "endpoints" : { - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "rekognition.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "rekognition.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "rekognition.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "rekognition.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "rekognition.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "rekognition-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rekognition-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rekognition.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "rekognition.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "rekognition.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "rekognition.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "rekognition.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "rekognition.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "rekognition-fips.ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.ca-central-1.amazonaws.com" - }, - "rekognition-fips.us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-east-1.amazonaws.com" - }, - "rekognition-fips.us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-east-2.amazonaws.com" - }, - "rekognition-fips.us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-west-1.amazonaws.com" - }, - "rekognition-fips.us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-west-2.amazonaws.com" - }, - "rekognition.ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rekognition-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rekognition.us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rekognition-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rekognition.us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rekognition-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rekognition.us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rekognition-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "rekognition.us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rekognition-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "rekognition-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rekognition-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rekognition.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "rekognition-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rekognition-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rekognition.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "rekognition-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rekognition-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rekognition.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "rekognition-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rekognition-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rekognition.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-west-2.amazonaws.com" - } - } - }, - "resiliencehub" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "resiliencehub.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "resiliencehub.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "resiliencehub.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "resiliencehub.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "resiliencehub.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "resiliencehub.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "resiliencehub.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "resiliencehub.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "resiliencehub.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "resiliencehub.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "resiliencehub.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "resiliencehub.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "resiliencehub.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "resiliencehub.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "resiliencehub.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "resiliencehub.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "resiliencehub.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "resiliencehub.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "resiliencehub.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "resiliencehub.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "resource-explorer-2" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "resource-explorer-2-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resource-explorer-2-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "resource-explorer-2-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resource-explorer-2-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "resource-explorer-2-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "resource-explorer-2-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "resource-explorer-2-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "resource-explorer-2-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "resource-explorer-2-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "resource-explorer-2-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "resource-explorer-2-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resource-explorer-2-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "resource-explorer-2-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resource-explorer-2-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "resource-explorer-2-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resource-explorer-2-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "resource-explorer-2-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resource-explorer-2-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - } - } - }, - "resource-groups" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "resource-groups-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "resource-groups-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "resource-groups-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "resource-groups-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "resource-groups-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "resource-groups-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "resource-groups-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "resource-groups-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "rolesanywhere" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "rolesanywhere-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "rolesanywhere-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "rolesanywhere-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "rolesanywhere-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "rolesanywhere-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "rolesanywhere-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "rolesanywhere-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "rolesanywhere-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "route53" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "route53.amazonaws.com", - "variants" : [ { - "hostname" : "route53-fips.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "route53-fips.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "route53-recovery-control-config" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "route53-recovery-control-config.us-west-2.amazonaws.com" - } - } - }, - "route53domains" : { - "endpoints" : { - "us-east-1" : { } - } - }, - "route53profiles" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "route53profiles.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "route53profiles.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "route53profiles.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "route53profiles.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "route53profiles.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "route53profiles.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "route53profiles.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "route53profiles.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "route53profiles.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "route53profiles.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "route53profiles.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "route53profiles-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "route53profiles-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "route53profiles.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "route53profiles.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "route53profiles.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "route53profiles.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "route53profiles.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "route53profiles.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "route53profiles.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "route53profiles.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "route53profiles.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "route53profiles.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "route53profiles.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "route53profiles.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "route53profiles-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "route53profiles-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "route53profiles-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "route53profiles-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "route53resolver" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "route53resolver.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "route53resolver.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "route53resolver.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "route53resolver.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "route53resolver.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "route53resolver.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "route53resolver.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "route53resolver.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "route53resolver.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "route53resolver.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "route53resolver.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "route53resolver.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "route53resolver.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "route53resolver-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53resolver.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "route53resolver-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "route53resolver-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53resolver.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "route53resolver-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "route53resolver.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "route53resolver.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "route53resolver.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "route53resolver.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "route53resolver.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "route53resolver.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "route53resolver.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "route53resolver.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "route53resolver.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "route53resolver.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "route53resolver.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "route53resolver.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "route53resolver.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "route53resolver-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53resolver.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "route53resolver-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "route53resolver-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53resolver.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "route53resolver-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "route53resolver-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53resolver.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "route53resolver-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "route53resolver-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53resolver.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "route53resolver-fips.us-west-2.amazonaws.com" - } - } - }, - "rum" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "runtime-v2-lex" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "runtime.lex" : { - "defaults" : { - "credentialScope" : { - "service" : "lex" - }, - "variants" : [ { - "hostname" : "runtime-fips.lex.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "runtime-fips.lex.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "runtime-fips.lex.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "runtime-fips.lex.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "runtime-fips.lex.us-west-2.amazonaws.com" - } - } - }, - "runtime.sagemaker" : { - "defaults" : { - "variants" : [ { - "hostname" : "runtime-fips.sagemaker.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "runtime-fips.sagemaker.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "runtime-fips.sagemaker.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "runtime-fips.sagemaker.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "runtime-fips.sagemaker.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "runtime-fips.sagemaker.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "runtime-fips.sagemaker.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "runtime-fips.sagemaker.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "runtime-fips.sagemaker.us-west-2.amazonaws.com" - } - } - }, - "s3" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.af-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "hostname" : "s3.ap-northeast-1.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3.dualstack.ap-northeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-northeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-northeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "hostname" : "s3.ap-southeast-1.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3.dualstack.ap-southeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "hostname" : "s3.ap-southeast-2.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3.dualstack.ap-southeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-southeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-southeast-4.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-southeast-5.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "s3.dualstack.ap-southeast-7.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "s3.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "s3-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-fips.dualstack.ca-central-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3.dualstack.ca-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "s3-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-fips.dualstack.ca-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3.dualstack.ca-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.eu-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "s3.dualstack.eu-central-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.eu-north-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.eu-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "s3.dualstack.eu-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "hostname" : "s3.eu-west-1.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3.dualstack.eu-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "s3.dualstack.eu-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "s3.dualstack.eu-west-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.il-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.me-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.me-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.mx-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "s3-external-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "s3-external-1.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ] - }, - "sa-east-1" : { - "hostname" : "s3.sa-east-1.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3.dualstack.sa-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "hostname" : "s3.us-east-1.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3-fips.dualstack.us-east-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3.dualstack.us-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "s3-fips.dualstack.us-east-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3.dualstack.us-east-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "hostname" : "s3.us-west-1.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3-fips.dualstack.us-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3.dualstack.us-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "hostname" : "s3.us-west-2.amazonaws.com", - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "hostname" : "s3-fips.dualstack.us-west-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3.dualstack.us-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - } - }, - "isRegionalized" : true, - "partitionEndpoint" : "aws-global" - }, - "s3-control" : { - "defaults" : { - "protocols" : [ "https" ], - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "s3-control.af-south-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.af-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "s3-control.ap-east-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "s3-control.ap-northeast-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-northeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "s3-control.ap-northeast-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-northeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "s3-control.ap-northeast-3.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-northeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "s3-control.ap-south-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "s3-control.ap-south-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "s3-control.ap-southeast-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-southeast-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "s3-control.ap-southeast-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-southeast-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "hostname" : "s3-control.ap-southeast-3.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-southeast-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "hostname" : "s3-control.ap-southeast-4.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.ap-southeast-4.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "s3-control.ca-central-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control-fips.dualstack.ca-central-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control.dualstack.ca-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.ca-central-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - }, - "ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "hostname" : "s3-control.ca-west-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control-fips.dualstack.ca-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control.dualstack.ca-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.ca-west-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "s3-control.eu-central-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "s3-control.eu-central-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-central-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "s3-control.eu-north-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-north-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "s3-control.eu-south-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "s3-control.eu-south-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-south-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "s3-control.eu-west-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "s3-control.eu-west-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "s3-control.eu-west-3.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.eu-west-3.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "s3-control.il-central-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.il-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "hostname" : "s3-control.me-central-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.me-central-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "s3-control.me-south-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.me-south-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "s3-control.sa-east-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.sa-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "s3-control.us-east-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-east-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-east-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "s3-control.us-east-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-east-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-east-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-east-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "s3-control.us-west-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-west-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "s3-control.us-west-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-west-2.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-west-2.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-west-2.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - } - } - }, - "s3-outposts" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "deprecated" : true - }, - "fips-us-east-1" : { - "deprecated" : true - }, - "fips-us-east-2" : { - "deprecated" : true - }, - "fips-us-west-1" : { - "deprecated" : true - }, - "fips-us-west-2" : { - "deprecated" : true - }, - "il-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - } - } - }, - "sagemaker-geospatial" : { - "endpoints" : { - "us-west-2" : { } - } - }, - "savingsplans" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "savingsplans.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "scheduler" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "schemas" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "sdb" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "signatureVersions" : [ "v2" ] - }, - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-west-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "hostname" : "sdb.amazonaws.com" - }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "deprecated" : true - }, - "ca-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "deprecated" : true - }, - "eu-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "deprecated" : true - }, - "us-east-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "deprecated" : true - }, - "us-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "deprecated" : true - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "deprecated" : true - } - } - }, - "securityhub" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "securityhub.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "securityhub.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "securityhub.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "securityhub.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "securityhub.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "securityhub.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "securityhub.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "securityhub.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "securityhub.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "securityhub.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "securityhub.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "securityhub.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "securityhub.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "securityhub.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "securityhub.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "securityhub.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "securityhub.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "securityhub.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "securityhub.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "securityhub.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "securityhub.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "securityhub.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "securityhub.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "securityhub-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "securityhub-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "securityhub-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "securityhub-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "securityhub.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "securityhub.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "securityhub.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "securityhub.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "securityhub.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "securityhub-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securityhub.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "securityhub-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securityhub.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "securityhub-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securityhub.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "securityhub-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securityhub.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "securitylake" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "securitylake-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "securitylake-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "securitylake-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "securitylake-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "securitylake-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securitylake-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "securitylake-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securitylake-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "securitylake-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securitylake-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "securitylake-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securitylake-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - } - } - }, - "serverlessrepo" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-east-1" : { - "protocols" : [ "https" ] - }, - "ap-northeast-1" : { - "protocols" : [ "https" ] - }, - "ap-northeast-2" : { - "protocols" : [ "https" ] - }, - "ap-south-1" : { - "protocols" : [ "https" ] - }, - "ap-southeast-1" : { - "protocols" : [ "https" ] - }, - "ap-southeast-2" : { - "protocols" : [ "https" ] - }, - "ca-central-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "serverlessrepo-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "serverlessrepo-fips.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { - "protocols" : [ "https" ] - }, - "eu-north-1" : { - "protocols" : [ "https" ] - }, - "eu-west-1" : { - "protocols" : [ "https" ] - }, - "eu-west-2" : { - "protocols" : [ "https" ] - }, - "eu-west-3" : { - "protocols" : [ "https" ] - }, - "me-south-1" : { - "protocols" : [ "https" ] - }, - "sa-east-1" : { - "protocols" : [ "https" ] - }, - "us-east-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "serverlessrepo-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "serverlessrepo-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "serverlessrepo-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "serverlessrepo-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "serverlessrepo-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "serverlessrepo-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "serverlessrepo-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "serverlessrepo-fips.us-west-2.amazonaws.com" - } - } - }, - "servicecatalog" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "servicecatalog-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "servicecatalog-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "servicecatalog-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "servicecatalog-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "servicecatalog-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "servicecatalog-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "servicecatalog-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "servicecatalog-fips.us-west-2.amazonaws.com" - } - } - }, - "servicecatalog-appregistry" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "servicecatalog-appregistry-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "servicecatalog-appregistry-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "servicecatalog-appregistry-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "servicecatalog-appregistry-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "servicecatalog-appregistry-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "servicecatalog-appregistry-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "servicecatalog-appregistry-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "servicecatalog-appregistry-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "servicecatalog-appregistry-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "servicecatalog-appregistry-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "servicediscovery" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "servicediscovery.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "servicediscovery.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "servicediscovery.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "servicediscovery.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "servicediscovery.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "servicediscovery.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "servicediscovery.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "servicediscovery.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.us-west-2.amazonaws.com" - } - } - }, - "servicequotas" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "shield" : { - "defaults" : { - "protocols" : [ "https" ], - "sslCommonName" : "shield.us-east-1.amazonaws.com" - }, - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "shield.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "shield-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "shield-fips.us-east-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "signer" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "signer-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "signer-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "signer-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "signer-fips.us-west-2.amazonaws.com" - }, - "fips-verification-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "verification.signer-fips.us-east-1.amazonaws.com" - }, - "fips-verification-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "verification.signer-fips.us-east-2.amazonaws.com" - }, - "fips-verification-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "verification.signer-fips.us-west-1.amazonaws.com" - }, - "fips-verification-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "verification.signer-fips.us-west-2.amazonaws.com" - }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "signer-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "signer-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "signer-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "signer-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "verification-af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "verification.signer.af-south-1.amazonaws.com" - }, - "verification-ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "verification.signer.ap-east-1.amazonaws.com" - }, - "verification-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "verification.signer.ap-northeast-1.amazonaws.com" - }, - "verification-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "verification.signer.ap-northeast-2.amazonaws.com" - }, - "verification-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "verification.signer.ap-south-1.amazonaws.com" - }, - "verification-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "verification.signer.ap-southeast-1.amazonaws.com" - }, - "verification-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "verification.signer.ap-southeast-2.amazonaws.com" - }, - "verification-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "verification.signer.ca-central-1.amazonaws.com" - }, - "verification-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "verification.signer.eu-central-1.amazonaws.com" - }, - "verification-eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "verification.signer.eu-north-1.amazonaws.com" - }, - "verification-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "verification.signer.eu-south-1.amazonaws.com" - }, - "verification-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "verification.signer.eu-west-1.amazonaws.com" - }, - "verification-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "verification.signer.eu-west-2.amazonaws.com" - }, - "verification-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "verification.signer.eu-west-3.amazonaws.com" - }, - "verification-me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "verification.signer.me-south-1.amazonaws.com" - }, - "verification-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "verification.signer.sa-east-1.amazonaws.com" - }, - "verification-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "verification.signer.us-east-1.amazonaws.com" - }, - "verification-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "verification.signer.us-east-2.amazonaws.com" - }, - "verification-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "verification.signer.us-west-1.amazonaws.com" - }, - "verification-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "verification.signer.us-west-2.amazonaws.com" - } - } - }, - "simspaceweaver" : { - "endpoints" : { - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "sms-voice" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "sms-voice.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "sms-voice.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "sms-voice.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "sms-voice.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "sms-voice.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "sms-voice.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "sms-voice.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "sms-voice.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "sms-voice.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "sms-voice.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "sms-voice-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "sms-voice-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "sms-voice.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "sms-voice.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "sms-voice.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "sms-voice.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "sms-voice.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "sms-voice.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "sms-voice.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "sms-voice.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "sms-voice.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "sms-voice.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "sms-voice.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "sms-voice.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "sms-voice-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "sms-voice-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "sms-voice-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "sms-voice-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "snowball" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "snowball-fips.af-south-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.af-south-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-northeast-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-northeast-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-northeast-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-northeast-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-northeast-3.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-northeast-3.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-south-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-south-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-southeast-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-southeast-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-southeast-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-southeast-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "snowball-fips.ap-southeast-3.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ap-southeast-3.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "snowball-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "snowball-fips.eu-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.eu-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "snowball-fips.eu-north-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.eu-north-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "snowball-fips.eu-south-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.eu-south-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "snowball-fips.eu-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.eu-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "snowball-fips.eu-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.eu-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "snowball-fips.eu-west-3.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.eu-west-3.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.af-south-1.amazonaws.com" - }, - "fips-ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-east-1.amazonaws.com" - }, - "fips-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-northeast-1.amazonaws.com" - }, - "fips-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-northeast-2.amazonaws.com" - }, - "fips-ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-northeast-3.amazonaws.com" - }, - "fips-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-south-1.amazonaws.com" - }, - "fips-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-southeast-1.amazonaws.com" - }, - "fips-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-southeast-2.amazonaws.com" - }, - "fips-ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ap-southeast-3.amazonaws.com" - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.ca-central-1.amazonaws.com" - }, - "fips-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.eu-central-1.amazonaws.com" - }, - "fips-eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.eu-north-1.amazonaws.com" - }, - "fips-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.eu-south-1.amazonaws.com" - }, - "fips-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.eu-west-1.amazonaws.com" - }, - "fips-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "deprecated" : true, - "hostname" : "snowball-fips.eu-west-2.amazonaws.com" - }, - "fips-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "deprecated" : true, - "hostname" : "snowball-fips.eu-west-3.amazonaws.com" - }, - "fips-il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.il-central-1.amazonaws.com" - }, - "fips-me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.me-central-1.amazonaws.com" - }, - "fips-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.sa-east-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "snowball-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "snowball-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "snowball-fips.il-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.il-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "snowball-fips.me-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.me-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "snowball-fips.sa-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.sa-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "snowball-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "snowball-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "snowball-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "snowball-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "sns" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "sns.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "sns.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "sns.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "sns.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "sns.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "sns.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "sns.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "sns.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "sns.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "sns.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "sns.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "sns.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "sns.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "sns.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "sns-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sns.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "sns.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "sns.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "sns.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "sns.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "sns.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "sns.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "sns.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "sns.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "sns-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "sns-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "sns-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "sns-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "sns-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "sns.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "sns.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "sns.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "sns.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "sns.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "sns-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sns.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "sns-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sns.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "sns-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sns.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "sns-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sns.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "sqs" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "sslCommonName" : "{region}.queue.{dnsSuffix}" - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "sqs.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "sqs.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "sqs.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "sqs.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "sqs.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "sqs.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "sqs.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "sqs.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "sqs.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "sqs.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "sqs.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "sqs.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "sqs.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "sqs-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sqs.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "sqs-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sqs.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "sqs.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "sqs.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "sqs.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "sqs.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "sqs.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "sqs.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "sqs.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "sqs.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "sqs-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "sqs-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "sqs-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "sqs-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "sqs-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "sqs-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "sqs.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "sqs.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "sqs.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "sqs.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "sqs.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "sslCommonName" : "queue.{dnsSuffix}", - "variants" : [ { - "hostname" : "sqs-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sqs.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "sqs-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sqs.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "sqs-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sqs.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "sqs-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sqs.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "ssm" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "ssm-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "ssm-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ssm-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "ssm-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ssm-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ssm-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ssm-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ssm-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ssm-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ssm-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ssm-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ssm-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ssm-contacts" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ssm-contacts-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ssm-contacts-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ssm-contacts-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ssm-contacts-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ssm-contacts-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ssm-contacts-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ssm-contacts-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ssm-contacts-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ssm-incidents" : { - "endpoints" : { - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "ssm-incidents.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "ssm-incidents.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "ssm-incidents.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "ssm-incidents.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "ssm-incidents.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "ssm-incidents-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-incidents-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-incidents.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "ssm-incidents.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "ssm-incidents.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "ssm-incidents.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "ssm-incidents.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "ssm-incidents.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ssm-incidents-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ssm-incidents-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ssm-incidents-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ssm-incidents-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ssm-incidents-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "ssm-incidents.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ssm-incidents-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-incidents-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-incidents.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ssm-incidents-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-incidents-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-incidents.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ssm-incidents-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-incidents-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-incidents.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ssm-incidents-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-incidents-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-incidents.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "ssm-quicksetup" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "ssm-quicksetup-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ssm-quicksetup-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ssm-quicksetup-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ssm-quicksetup-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ssm-quicksetup-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ssm-quicksetup-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ssm-quicksetup-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ssm-quicksetup-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ssm-quicksetup-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ssm-quicksetup-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ssm-sap" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "ssm-sap.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "ssm-sap.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "ssm-sap-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-sap-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-sap.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "ssm-sap.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "ssm-sap-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "ssm-sap-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "ssm-sap-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "ssm-sap-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "ssm-sap-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "ssm-sap.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "ssm-sap.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "ssm-sap.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "ssm-sap.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "ssm-sap-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-sap-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-sap.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "ssm-sap-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-sap-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-sap.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "ssm-sap-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-sap-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-sap.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "ssm-sap-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ssm-sap-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ssm-sap.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "sso" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "states" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "states-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "states-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "states-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "states-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "states-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "states-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "states-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "states-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "states-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "states-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "states-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "states-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "storagegateway" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "storagegateway-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.ca-central-1.amazonaws.com" - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "storagegateway-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1-fips" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.ca-west-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "storagegateway-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "storagegateway-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "storagegateway-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "storagegateway-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-west-2.amazonaws.com" - } - } - }, - "streams.dynamodb" : { - "defaults" : { - "credentialScope" : { - "service" : "dynamodb" - }, - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "local" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "localhost:8000", - "protocols" : [ "http" ] - }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "sts" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "sts.amazonaws.com" - }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "sts-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "sts-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "sts-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "sts-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "sts-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "sts-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "sts-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "sts-fips.us-west-2.amazonaws.com" - } - }, - "partitionEndpoint" : "aws-global" - }, - "support" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "support.us-east-1.amazonaws.com" - } - }, - "partitionEndpoint" : "aws-global" - }, - "supportapp" : { - "endpoints" : { - "eu-west-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "swf" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "swf-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "swf-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "swf-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "swf-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "swf-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "swf-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "swf-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "swf-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "swf-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "swf-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "swf-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "swf-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "synthetics" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "synthetics.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "synthetics.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "synthetics.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "synthetics.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-3" : { - "variants" : [ { - "hostname" : "synthetics.ap-northeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "synthetics.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-2" : { - "variants" : [ { - "hostname" : "synthetics.ap-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "synthetics.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "synthetics.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-3" : { - "variants" : [ { - "hostname" : "synthetics.ap-southeast-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-4" : { - "variants" : [ { - "hostname" : "synthetics.ap-southeast-4.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-5" : { - "variants" : [ { - "hostname" : "synthetics.ap-southeast-5.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "variants" : [ { - "hostname" : "synthetics.ap-southeast-7.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "synthetics-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "synthetics-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.ca-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.ca-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "synthetics.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-2" : { - "variants" : [ { - "hostname" : "synthetics.eu-central-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "synthetics.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-1" : { - "variants" : [ { - "hostname" : "synthetics.eu-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "synthetics.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "synthetics.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "synthetics.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "synthetics.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "variants" : [ { - "hostname" : "synthetics.il-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-central-1" : { - "variants" : [ { - "hostname" : "synthetics.me-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "synthetics.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "mx-central-1" : { - "variants" : [ { - "hostname" : "synthetics.mx-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "synthetics.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "synthetics-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "synthetics-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "synthetics-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "synthetics-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "tagging" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "tax" : { - "endpoints" : { - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "tax.us-east-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "textract" : { - "endpoints" : { - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "textract.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "textract.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "textract.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "textract.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "textract-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "textract-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "textract.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "textract.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-south-2" : { - "variants" : [ { - "hostname" : "textract.eu-south-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "textract.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "textract.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "textract.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "textract-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "textract-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "textract-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "textract-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "textract-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "textract-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "textract-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "textract.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "textract-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "textract-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "textract.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "textract-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "textract-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "textract.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "textract-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "textract-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "textract.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "thinclient" : { - "endpoints" : { - "ap-south-1" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "tnb" : { - "endpoints" : { - "ap-northeast-2" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-south-2" : { }, - "eu-west-3" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "transcribe" : { - "defaults" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "fips.transcribe.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "transcribe.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-east-1" : { - "variants" : [ { - "hostname" : "transcribe.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "transcribe.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "transcribe.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "transcribe.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "transcribe.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "transcribe.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "fips.transcribe.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribe-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribe.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "transcribe.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "transcribe.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "transcribe.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "transcribe.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "transcribe.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.us-west-2.amazonaws.com" - }, - "me-south-1" : { - "variants" : [ { - "hostname" : "transcribe.me-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "transcribe.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "fips.transcribe.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribe-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribe.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "fips.transcribe.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribe-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribe.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "fips.transcribe.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribe-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribe.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "fips.transcribe.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribe-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribe.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "transcribestreaming" : { - "endpoints" : { - "af-south-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.af-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "transcribestreaming.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "transcribestreaming.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "transcribestreaming-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribestreaming-fips.ca-central-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribestreaming.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "transcribestreaming.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "transcribestreaming-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "transcribestreaming-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "transcribestreaming-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "transcribestreaming-fips.us-west-2.amazonaws.com" - }, - "sa-east-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.sa-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "transcribestreaming-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribestreaming-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribestreaming.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "transcribestreaming-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribestreaming-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribestreaming.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "transcribestreaming-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribestreaming-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribestreaming.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "transfer" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "transfer-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "transfer-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "transfer-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "transfer-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "transfer-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "transfer-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "transfer-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "transfer-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "transfer-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "transfer-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "transfer-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "transfer-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "translate" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "ap-east-1" : { - "variants" : [ { - "hostname" : "translate.ap-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-1" : { - "variants" : [ { - "hostname" : "translate.ap-northeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-northeast-2" : { - "variants" : [ { - "hostname" : "translate.ap-northeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-south-1" : { - "variants" : [ { - "hostname" : "translate.ap-south-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-1" : { - "variants" : [ { - "hostname" : "translate.ap-southeast-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ap-southeast-2" : { - "variants" : [ { - "hostname" : "translate.ap-southeast-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "translate.ca-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-central-1" : { - "variants" : [ { - "hostname" : "translate.eu-central-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-north-1" : { - "variants" : [ { - "hostname" : "translate.eu-north-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-1" : { - "variants" : [ { - "hostname" : "translate.eu-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-2" : { - "variants" : [ { - "hostname" : "translate.eu-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "eu-west-3" : { - "variants" : [ { - "hostname" : "translate.eu-west-3.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "translate-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "translate-fips.us-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "translate.us-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "translate-fips.us-east-1.amazonaws.com" - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "translate-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "translate-fips.us-east-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "translate.us-east-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-east-2-fips" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "translate-fips.us-east-2.amazonaws.com" - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "translate-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "translate-fips.us-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "translate.us-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-1-fips" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "translate-fips.us-west-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "translate-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "translate-fips.us-west-2.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "translate.us-west-2.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "translate-fips.us-west-2.amazonaws.com" - } - } - }, - "trustedadvisor" : { - "endpoints" : { - "ap-northeast-2" : { }, - "ap-southeast-2" : { }, - "eu-west-1" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "trustedadvisor-fips.us-east-1.api.aws" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "trustedadvisor-fips.us-east-2.api.aws" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "trustedadvisor-fips.us-west-2.api.aws" - }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, - "verifiedpermissions" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.ca-west-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "voice-chime" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "voice-chime-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1-fips" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "voice-chime-fips.ca-central-1.amazonaws.com" - }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "voice-chime-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "voice-chime-fips.us-east-1.amazonaws.com" - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "voice-chime-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2-fips" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "voice-chime-fips.us-west-2.amazonaws.com" - } - } - }, - "voiceid" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "hostname" : "voiceid-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "voiceid-fips.ca-central-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "voiceid-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "voiceid-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "voiceid-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "voiceid-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "vpc-lattice" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "waf" : { - "endpoints" : { - "aws" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "waf-fips.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "aws-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "waf-fips.amazonaws.com" - }, - "aws-global" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "waf.amazonaws.com", - "variants" : [ { - "hostname" : "waf-fips.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "aws-global-fips" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "waf-fips.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-global" - }, - "waf-regional" : { - "endpoints" : { - "af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "waf-regional.af-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.af-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "waf-regional.ap-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "waf-regional.ap-northeast-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-northeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "waf-regional.ap-northeast-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-northeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "waf-regional.ap-northeast-3.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-northeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "waf-regional.ap-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "waf-regional.ap-south-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "waf-regional.ap-southeast-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-southeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "waf-regional.ap-southeast-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-southeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "hostname" : "waf-regional.ap-southeast-3.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-southeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "hostname" : "waf-regional.ap-southeast-4.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ap-southeast-4.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "waf-regional.ca-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "waf-regional.eu-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "waf-regional.eu-central-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-central-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "waf-regional.eu-north-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-north-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "waf-regional.eu-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "waf-regional.eu-south-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "waf-regional.eu-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "waf-regional.eu-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "waf-regional.eu-west-3.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.eu-west-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.af-south-1.amazonaws.com" - }, - "fips-ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-east-1.amazonaws.com" - }, - "fips-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-northeast-1.amazonaws.com" - }, - "fips-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-northeast-2.amazonaws.com" - }, - "fips-ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-northeast-3.amazonaws.com" - }, - "fips-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-south-1.amazonaws.com" - }, - "fips-ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-south-2.amazonaws.com" - }, - "fips-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-southeast-1.amazonaws.com" - }, - "fips-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-southeast-2.amazonaws.com" - }, - "fips-ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-southeast-3.amazonaws.com" - }, - "fips-ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ap-southeast-4.amazonaws.com" - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.ca-central-1.amazonaws.com" - }, - "fips-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-central-1.amazonaws.com" - }, - "fips-eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-central-2.amazonaws.com" - }, - "fips-eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-north-1.amazonaws.com" - }, - "fips-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-south-1.amazonaws.com" - }, - "fips-eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-south-2.amazonaws.com" - }, - "fips-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-west-1.amazonaws.com" - }, - "fips-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-west-2.amazonaws.com" - }, - "fips-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.eu-west-3.amazonaws.com" - }, - "fips-il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.il-central-1.amazonaws.com" - }, - "fips-me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.me-central-1.amazonaws.com" - }, - "fips-me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.me-south-1.amazonaws.com" - }, - "fips-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.sa-east-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "waf-regional.il-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.il-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "hostname" : "waf-regional.me-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.me-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "waf-regional.me-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.me-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "waf-regional.sa-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.sa-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "waf-regional.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "waf-regional.us-east-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "waf-regional.us-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "waf-regional.us-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "wafv2" : { - "endpoints" : { - "af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "hostname" : "wafv2.af-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.af-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "hostname" : "wafv2.ap-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-east-2" : { }, - "ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "hostname" : "wafv2.ap-northeast-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-northeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "hostname" : "wafv2.ap-northeast-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-northeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "hostname" : "wafv2.ap-northeast-3.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-northeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "hostname" : "wafv2.ap-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "hostname" : "wafv2.ap-south-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "hostname" : "wafv2.ap-southeast-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-southeast-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "hostname" : "wafv2.ap-southeast-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-southeast-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "hostname" : "wafv2.ap-southeast-3.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-southeast-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "hostname" : "wafv2.ap-southeast-4.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-southeast-4.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-5" : { - "credentialScope" : { - "region" : "ap-southeast-5" - }, - "hostname" : "wafv2.ap-southeast-5.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-southeast-5.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { - "credentialScope" : { - "region" : "ap-southeast-7" - }, - "hostname" : "wafv2.ap-southeast-7.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ap-southeast-7.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "hostname" : "wafv2.ca-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ca-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "hostname" : "wafv2.ca-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.ca-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "hostname" : "wafv2.eu-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "hostname" : "wafv2.eu-central-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-central-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "hostname" : "wafv2.eu-north-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-north-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "hostname" : "wafv2.eu-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "hostname" : "wafv2.eu-south-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-south-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "hostname" : "wafv2.eu-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "hostname" : "wafv2.eu-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "hostname" : "wafv2.eu-west-3.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.eu-west-3.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-af-south-1" : { - "credentialScope" : { - "region" : "af-south-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.af-south-1.amazonaws.com" - }, - "fips-ap-east-1" : { - "credentialScope" : { - "region" : "ap-east-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-east-1.amazonaws.com" - }, - "fips-ap-northeast-1" : { - "credentialScope" : { - "region" : "ap-northeast-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-northeast-1.amazonaws.com" - }, - "fips-ap-northeast-2" : { - "credentialScope" : { - "region" : "ap-northeast-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-northeast-2.amazonaws.com" - }, - "fips-ap-northeast-3" : { - "credentialScope" : { - "region" : "ap-northeast-3" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-northeast-3.amazonaws.com" - }, - "fips-ap-south-1" : { - "credentialScope" : { - "region" : "ap-south-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-south-1.amazonaws.com" - }, - "fips-ap-south-2" : { - "credentialScope" : { - "region" : "ap-south-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-south-2.amazonaws.com" - }, - "fips-ap-southeast-1" : { - "credentialScope" : { - "region" : "ap-southeast-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-southeast-1.amazonaws.com" - }, - "fips-ap-southeast-2" : { - "credentialScope" : { - "region" : "ap-southeast-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-southeast-2.amazonaws.com" - }, - "fips-ap-southeast-3" : { - "credentialScope" : { - "region" : "ap-southeast-3" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-southeast-3.amazonaws.com" - }, - "fips-ap-southeast-4" : { - "credentialScope" : { - "region" : "ap-southeast-4" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-southeast-4.amazonaws.com" - }, - "fips-ap-southeast-5" : { - "credentialScope" : { - "region" : "ap-southeast-5" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-southeast-5.amazonaws.com" - }, - "fips-ap-southeast-7" : { - "credentialScope" : { - "region" : "ap-southeast-7" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ap-southeast-7.amazonaws.com" - }, - "fips-ca-central-1" : { - "credentialScope" : { - "region" : "ca-central-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ca-central-1.amazonaws.com" - }, - "fips-ca-west-1" : { - "credentialScope" : { - "region" : "ca-west-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.ca-west-1.amazonaws.com" - }, - "fips-eu-central-1" : { - "credentialScope" : { - "region" : "eu-central-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-central-1.amazonaws.com" - }, - "fips-eu-central-2" : { - "credentialScope" : { - "region" : "eu-central-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-central-2.amazonaws.com" - }, - "fips-eu-north-1" : { - "credentialScope" : { - "region" : "eu-north-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-north-1.amazonaws.com" - }, - "fips-eu-south-1" : { - "credentialScope" : { - "region" : "eu-south-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-south-1.amazonaws.com" - }, - "fips-eu-south-2" : { - "credentialScope" : { - "region" : "eu-south-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-south-2.amazonaws.com" - }, - "fips-eu-west-1" : { - "credentialScope" : { - "region" : "eu-west-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-west-1.amazonaws.com" - }, - "fips-eu-west-2" : { - "credentialScope" : { - "region" : "eu-west-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-west-2.amazonaws.com" - }, - "fips-eu-west-3" : { - "credentialScope" : { - "region" : "eu-west-3" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.eu-west-3.amazonaws.com" - }, - "fips-il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.il-central-1.amazonaws.com" - }, - "fips-me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.me-central-1.amazonaws.com" - }, - "fips-me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.me-south-1.amazonaws.com" - }, - "fips-mx-central-1" : { - "credentialScope" : { - "region" : "mx-central-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.mx-central-1.amazonaws.com" - }, - "fips-sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.sa-east-1.amazonaws.com" - }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { - "credentialScope" : { - "region" : "il-central-1" - }, - "hostname" : "wafv2.il-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.il-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-central-1" : { - "credentialScope" : { - "region" : "me-central-1" - }, - "hostname" : "wafv2.me-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.me-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "me-south-1" : { - "credentialScope" : { - "region" : "me-south-1" - }, - "hostname" : "wafv2.me-south-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.me-south-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "mx-central-1" : { - "credentialScope" : { - "region" : "mx-central-1" - }, - "hostname" : "wafv2.mx-central-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.mx-central-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "sa-east-1" : { - "credentialScope" : { - "region" : "sa-east-1" - }, - "hostname" : "wafv2.sa-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.sa-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "hostname" : "wafv2.us-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "hostname" : "wafv2.us-east-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "hostname" : "wafv2.us-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "hostname" : "wafv2.us-west-2.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "wellarchitected" : { - "endpoints" : { - "ap-east-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "me-south-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } - } - }, - "wisdom" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "eu-central-1" : { }, - "eu-west-2" : { }, - "fips-ca-central-1" : { - "deprecated" : true - }, - "fips-us-east-1" : { - "deprecated" : true - }, - "fips-us-west-2" : { - "deprecated" : true - }, - "ui-ap-northeast-1" : { }, - "ui-ap-northeast-2" : { }, - "ui-ap-southeast-1" : { }, - "ui-ap-southeast-2" : { }, - "ui-ca-central-1" : { }, - "ui-eu-central-1" : { }, - "ui-eu-west-2" : { }, - "ui-us-east-1" : { }, - "ui-us-west-2" : { }, - "us-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - } - } - }, - "workdocs" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-west-1" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "workdocs-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "workdocs-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "workdocs-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "workdocs-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "workmail" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "eu-west-1" : { }, - "us-east-1" : { }, - "us-west-2" : { } - } - }, - "workspaces" : { - "endpoints" : { - "af-south-1" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "workspaces-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "workspaces-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "workspaces-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "workspaces-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "workspaces-web" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-south-1" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ca-central-1" : { }, - "eu-central-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "workspaces-web-fips.us-east-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "workspaces-web-fips.us-west-2.amazonaws.com" - }, - "us-east-1" : { - "variants" : [ { - "hostname" : "workspaces-web-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "workspaces-web-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "xray" : { - "endpoints" : { - "af-south-1" : { }, - "ap-east-1" : { }, - "ap-east-2" : { }, - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-northeast-3" : { }, - "ap-south-1" : { }, - "ap-south-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "ap-southeast-3" : { }, - "ap-southeast-4" : { }, - "ap-southeast-5" : { }, - "ap-southeast-6" : { }, - "ap-southeast-7" : { }, - "ca-central-1" : { }, - "ca-west-1" : { }, - "eu-central-1" : { }, - "eu-central-2" : { }, - "eu-north-1" : { }, - "eu-south-1" : { }, - "eu-south-2" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "eu-west-3" : { }, - "fips-us-east-1" : { - "credentialScope" : { - "region" : "us-east-1" - }, - "deprecated" : true, - "hostname" : "xray-fips.us-east-1.amazonaws.com" - }, - "fips-us-east-2" : { - "credentialScope" : { - "region" : "us-east-2" - }, - "deprecated" : true, - "hostname" : "xray-fips.us-east-2.amazonaws.com" - }, - "fips-us-west-1" : { - "credentialScope" : { - "region" : "us-west-1" - }, - "deprecated" : true, - "hostname" : "xray-fips.us-west-1.amazonaws.com" - }, - "fips-us-west-2" : { - "credentialScope" : { - "region" : "us-west-2" - }, - "deprecated" : true, - "hostname" : "xray-fips.us-west-2.amazonaws.com" - }, - "il-central-1" : { }, - "me-central-1" : { }, - "me-south-1" : { }, - "mx-central-1" : { }, - "sa-east-1" : { }, - "us-east-1" : { - "variants" : [ { - "hostname" : "xray-fips.us-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-east-2" : { - "variants" : [ { - "hostname" : "xray-fips.us-east-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-1" : { - "variants" : [ { - "hostname" : "xray-fips.us-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-west-2" : { - "variants" : [ { - "hostname" : "xray-fips.us-west-2.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - } - } - }, { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - }, { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "dnsSuffix" : "amazonaws.com.cn", - "partition" : "aws-cn", - "partitionName" : "AWS China", - "regionRegex" : "^cn\\-\\w+\\-\\d+$", - "regions" : { - "cn-north-1" : { - "description" : "China (Beijing)" - }, - "cn-northwest-1" : { - "description" : "China (Ningxia)" - } - }, - "services" : { - "access-analyzer" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "access-analyzer.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "access-analyzer.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "account" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "account.cn-northwest-1.amazonaws.com.cn" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "acm" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "acm-pca" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "airflow" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "api.ecr" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "api.ecr.cn-north-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "ecr.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "api.ecr.cn-northwest-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "ecr.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "api.pricing" : { - "defaults" : { - "credentialScope" : { - "service" : "pricing" - } - }, - "endpoints" : { - "cn-northwest-1" : { } - } - }, - "api.sagemaker" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "api.tunneling.iot" : { - "defaults" : { - "variants" : [ { - "dnsSuffix" : "amazonaws.com.cn", - "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - }, { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "api.iot-tunneling-fips.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "api.iot-tunneling.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "apigateway" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "appconfig" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "appconfigdata" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "application-autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "applicationinsights" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "applicationinsights.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "applicationinsights.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "appmesh" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "appmesh.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "appmesh.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "appsync" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "appsync.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "appsync.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "athena" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "athena.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "athena.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "autoscaling-plans" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "backup" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "batch" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "budgets" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "budgets.amazonaws.com.cn" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "cassandra" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "ce" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "ce.cn-northwest-1.amazonaws.com.cn" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "cloudcontrolapi" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "cloudformation" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "cloudfront" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "cloudfront.cn-northwest-1.amazonaws.com.cn", - "protocols" : [ "http", "https" ] - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "cloudtrail" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "codebuild" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "codecommit" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "codedeploy" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "codepipeline" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "cognito-identity" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "cognito-identity.cn-north-1.amazonaws.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "compute-optimizer" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "compute-optimizer.cn-north-1.amazonaws.com.cn" - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "compute-optimizer.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "config" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "cur" : { - "endpoints" : { - "cn-northwest-1" : { } - } - }, - "data-ats.iot" : { - "defaults" : { - "credentialScope" : { - "service" : "iotdata" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "data.ats.iot.cn-north-1.amazonaws.com.cn", - "protocols" : [ "https" ] - }, - "cn-northwest-1" : { } - } - }, - "data.iot" : { - "defaults" : { - "credentialScope" : { - "service" : "iotdata" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "data.jobs.iot" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "databrew" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "datasync" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "datasync.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "datasync.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "datazone" : { - "defaults" : { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "variants" : [ { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "datazone.cn-north-1.api.amazonwebservices.com.cn" - }, - "cn-northwest-1" : { - "hostname" : "datazone.cn-northwest-1.api.amazonwebservices.com.cn" - } - } - }, - "dax" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "directconnect" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "dlm" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "dlm.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "dlm.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "dms" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "docdb" : { - "endpoints" : { - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "rds.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "ds" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "dynamodb" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "ebs" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "ec2" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "ecs" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "eks" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "eks-auth" : { - "defaults" : { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "variants" : [ { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "eks-auth.cn-north-1.api.amazonwebservices.com.cn" - }, - "cn-northwest-1" : { - "hostname" : "eks-auth.cn-northwest-1.api.amazonwebservices.com.cn" - } - } - }, - "elasticache" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "elasticbeanstalk" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "elasticbeanstalk.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.cn-north-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.cn-northwest-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "fips-cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.cn-north-1.amazonaws.com.cn" - }, - "fips-cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "elasticloadbalancing" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "elasticmapreduce" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "emr-containers" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "emr-serverless" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "entitlement.marketplace" : { - "endpoints" : { - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "entitlement-marketplace.cn-northwest-1.amazonaws.com.cn", - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "entitlement-marketplace.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "es" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "aos.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "aos.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "events" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "events.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "events.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "firehose" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "firehose.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "firehose.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "fms" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "fsx" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "gamelift" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "gameliftstreams" : { - "defaults" : { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "variants" : [ { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "gameliftstreams.cn-north-1.api.amazonwebservices.com.cn" - }, - "cn-northwest-1" : { - "hostname" : "gameliftstreams.cn-northwest-1.api.amazonwebservices.com.cn" - } - } - }, - "glacier" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "glue" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "glue.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "glue.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "greengrass" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { } - }, - "isRegionalized" : true - }, - "guardduty" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - }, - "isRegionalized" : true - }, - "health" : { - "defaults" : { - "protocols" : [ "https" ], - "sslCommonName" : "health.cn-northwest-1.amazonaws.com.cn" - }, - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "global.health.amazonaws.com.cn" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "iam" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "iam.cn-north-1.amazonaws.com.cn" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "identitystore" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "inspector2" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "internetmonitor" : { - "defaults" : { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "variants" : [ { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "internetmonitor.cn-north-1.api.amazonwebservices.com.cn" - }, - "cn-northwest-1" : { - "hostname" : "internetmonitor.cn-northwest-1.api.amazonwebservices.com.cn" - } - } - }, - "iot" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "iotanalytics" : { - "endpoints" : { - "cn-north-1" : { } - } - }, - "iotevents" : { - "endpoints" : { - "cn-north-1" : { } - } - }, - "ioteventsdata" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "data.iotevents.cn-north-1.amazonaws.com.cn" - } - } - }, - "iotsecuredtunneling" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "iotsitewise" : { - "endpoints" : { - "cn-north-1" : { } - } - }, - "iottwinmaker" : { - "endpoints" : { - "api-cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "api.iottwinmaker.cn-north-1.amazonaws.com.cn" - }, - "cn-north-1" : { }, - "data-cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "data.iottwinmaker.cn-north-1.amazonaws.com.cn" - } - } - }, - "kafka" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "kafkaconnect" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "kendra-ranking" : { - "defaults" : { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "variants" : [ { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "kendra-ranking.cn-north-1.api.amazonwebservices.com.cn" - }, - "cn-northwest-1" : { - "hostname" : "kendra-ranking.cn-northwest-1.api.amazonwebservices.com.cn" - } - } - }, - "kinesis" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "kinesisanalytics" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "kinesisvideo" : { - "endpoints" : { - "cn-north-1" : { } - } - }, - "kms" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "lakeformation" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "lakeformation.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "lakeformation.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "lambda" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "lambda.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "lambda.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "license-manager" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "license-manager-linux-subscriptions" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "logs" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "logs.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "logs.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "mediaconvert" : { - "endpoints" : { - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "mediaconvert.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "memory-db" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "metering.marketplace" : { - "defaults" : { - "credentialScope" : { - "service" : "aws-marketplace" - } - }, - "endpoints" : { - "cn-northwest-1" : { } - } - }, - "metrics.sagemaker" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "monitoring" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "mq" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "neptune" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "rds.cn-north-1.amazonaws.com.cn" - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "rds.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "network-firewall" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "notifications" : { - "defaults" : { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "variants" : [ { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "notifications.cn-north-1.api.amazonwebservices.com.cn" - }, - "cn-northwest-1" : { - "hostname" : "notifications.cn-northwest-1.api.amazonwebservices.com.cn" - } - } - }, - "oam" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "oidc" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "oidc.cn-north-1.amazonaws.com.cn" - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "oidc.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "organizations" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "organizations.cn-northwest-1.amazonaws.com.cn" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "personalize" : { - "endpoints" : { - "cn-north-1" : { } - } - }, - "pi" : { - "endpoints" : { - "cn-north-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "pipes" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "polly" : { - "endpoints" : { - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "polly.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "portal.sso" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "portal.sso.cn-north-1.amazonaws.com.cn" - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "portal.sso.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "qbusiness" : { - "defaults" : { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "variants" : [ { - "dnsSuffix" : "api.amazonwebservices.com.cn", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "hostname" : "qbusiness.cn-north-1.api.amazonwebservices.com.cn" - }, - "cn-northwest-1" : { - "hostname" : "qbusiness.cn-northwest-1.api.amazonwebservices.com.cn" - } - } - }, - "quicksight" : { - "endpoints" : { - "cn-north-1" : { } - } - }, - "ram" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - } - } - }, - "rbin" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "rbin.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "rbin.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "rds" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "redshift" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "redshift-serverless" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "resource-groups" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "rolesanywhere" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "route53" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "route53.amazonaws.com.cn" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-cn-global" - }, - "route53profiles" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "route53resolver" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "route53resolver.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "route53resolver.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "runtime.sagemaker" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "s3" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com.cn", - "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.cn-north-1.amazonaws.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "s3.dualstack.cn-northwest-1.amazonaws.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "s3-control" : { - "defaults" : { - "protocols" : [ "https" ], - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com.cn", - "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "s3-control.cn-north-1.amazonaws.com.cn", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.cn-north-1.amazonaws.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "s3-control.cn-northwest-1.amazonaws.com.cn", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control.dualstack.cn-northwest-1.amazonaws.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "savingsplans" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "savingsplans.cn-north-1.amazonaws.com.cn" - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "savingsplans.cn-northwest-1.amazonaws.com.cn" - } - }, - "isRegionalized" : true - }, - "scheduler" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "schemas" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - } ] - } - } - }, - "securityhub" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "serverlessrepo" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { - "protocols" : [ "https" ] - }, - "cn-northwest-1" : { - "protocols" : [ "https" ] - } - } - }, - "servicecatalog" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "servicediscovery" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "servicediscovery.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "servicediscovery.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "servicequotas" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "signer" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { }, - "verification-cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "verification.signer.cn-north-1.amazonaws.com.cn" - }, - "verification-cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "verification.signer.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "snowball" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "snowball-fips.cn-north-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "snowball-fips.cn-northwest-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "fips-cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.cn-north-1.amazonaws.com.cn" - }, - "fips-cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "sns" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "sns.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "sns.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "sqs" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "sslCommonName" : "{region}.queue.{dnsSuffix}" - }, - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "sqs.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "sqs.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "ssm" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "sso" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "states" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "states.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "states.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "storagegateway" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "streams.dynamodb" : { - "defaults" : { - "credentialScope" : { - "service" : "dynamodb" - }, - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "sts" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "support" : { - "endpoints" : { - "aws-cn-global" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "support.cn-north-1.amazonaws.com.cn" - } - }, - "partitionEndpoint" : "aws-cn-global" - }, - "swf" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "synthetics" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "synthetics.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "synthetics.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "tagging" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "transcribe" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "cn.transcribe.cn-north-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "transcribe.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "cn.transcribe.cn-northwest-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "transcribe.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "transcribestreaming" : { - "endpoints" : { - "cn-north-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.cn-north-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - }, - "cn-northwest-1" : { - "variants" : [ { - "hostname" : "transcribestreaming.cn-northwest-1.api.amazonwebservices.com.cn", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "transfer" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "verifiedpermissions" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - }, - "waf-regional" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "waf-regional.cn-north-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "waf-regional-fips.cn-north-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "waf-regional.cn-northwest-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "waf-regional-fips.cn-northwest-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "fips-cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.cn-north-1.amazonaws.com.cn" - }, - "fips-cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "wafv2" : { - "endpoints" : { - "cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "hostname" : "wafv2.cn-north-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "wafv2-fips.cn-north-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "hostname" : "wafv2.cn-northwest-1.amazonaws.com.cn", - "variants" : [ { - "hostname" : "wafv2-fips.cn-northwest-1.amazonaws.com.cn", - "tags" : [ "fips" ] - } ] - }, - "fips-cn-north-1" : { - "credentialScope" : { - "region" : "cn-north-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.cn-north-1.amazonaws.com.cn" - }, - "fips-cn-northwest-1" : { - "credentialScope" : { - "region" : "cn-northwest-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.cn-northwest-1.amazonaws.com.cn" - } - } - }, - "workspaces" : { - "endpoints" : { - "cn-northwest-1" : { } - } - }, - "xray" : { - "endpoints" : { - "cn-north-1" : { }, - "cn-northwest-1" : { } - } - } - } - }, { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "{service}.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "dnsSuffix" : "amazonaws.com", - "partition" : "aws-us-gov", - "partitionName" : "AWS GovCloud (US)", - "regionRegex" : "^us\\-gov\\-\\w+\\-\\d+$", - "regions" : { - "us-gov-east-1" : { - "description" : "AWS GovCloud (US-East)" - }, - "us-gov-west-1" : { - "description" : "AWS GovCloud (US-West)" - } - }, - "services" : { - "access-analyzer" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "access-analyzer.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "access-analyzer.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "access-analyzer.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "access-analyzer.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "access-analyzer.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "access-analyzer.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "access-analyzer.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "access-analyzer.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "access-analyzer.us-gov-west-1.amazonaws.com" - } - } - }, - "acm" : { - "defaults" : { - "variants" : [ { - "hostname" : "acm.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "acm.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "acm.us-gov-west-1.amazonaws.com" - } - } - }, - "acm-pca" : { - "defaults" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "acm-pca.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "acm-pca.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "acm-pca.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "acm-pca.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "acm-pca.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "aoss" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "api.detective" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "api.detective-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "detective-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "detective.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "api.detective-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "api.detective-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "detective-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "detective.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "api.detective-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "api.ecr" : { - "defaults" : { - "variants" : [ { - "hostname" : "ecr-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "dkr-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "dkr-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-dkr-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com" - }, - "fips-dkr-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com" - }, - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "api.ecr.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ecr-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ecr.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "api.ecr.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ecr-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "ecr.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "api.sagemaker" : { - "defaults" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "api-fips.sagemaker.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "api-fips.sagemaker.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1-fips-secondary" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "api.sagemaker.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1-secondary" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "api.sagemaker.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "api.tunneling.iot" : { - "defaults" : { - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "api.iot-tunneling-fips.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "api.aws", - "hostname" : "api.iot-tunneling.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "api.iot-tunneling.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "api.iot-tunneling-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "api.iot-tunneling.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "apigateway" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "appconfig" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "appconfig.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "appconfig.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "appconfig.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "appconfig.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "appconfigdata" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "appconfigdata.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "appconfigdata.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "appconfigdata.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "appconfigdata.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "application-autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "application-autoscaling.us-gov-east-1.amazonaws.com", - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "application-autoscaling.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "deprecated" : true, - "hostname" : "application-autoscaling.us-gov-east-1.amazonaws.com", - "protocols" : [ "http", "https" ] - }, - "us-gov-west-1" : { - "hostname" : "application-autoscaling.us-gov-west-1.amazonaws.com", - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "application-autoscaling.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "deprecated" : true, - "hostname" : "application-autoscaling.us-gov-west-1.amazonaws.com", - "protocols" : [ "http", "https" ] - } - } - }, - "applicationinsights" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "applicationinsights-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "applicationinsights-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "applicationinsights-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "applicationinsights.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "appstream2" : { - "defaults" : { - "credentialScope" : { - "service" : "appstream" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "appstream2-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "appstream2-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "appstream2-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "appstream2-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "appstream2-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "aps" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "athena" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "athena-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "athena-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "athena-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "athena-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "athena-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "athena.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "autoscaling" : { - "defaults" : { - "variants" : [ { - "hostname" : "autoscaling.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "protocols" : [ "http", "https" ] - }, - "us-gov-west-1" : { - "protocols" : [ "http", "https" ] - } - } - }, - "autoscaling-plans" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "autoscaling-plans.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "deprecated" : true, - "hostname" : "autoscaling-plans.us-gov-east-1.amazonaws.com", - "protocols" : [ "http", "https" ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "autoscaling-plans.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "deprecated" : true, - "hostname" : "autoscaling-plans.us-gov-west-1.amazonaws.com", - "protocols" : [ "http", "https" ] - } - } - }, - "backup" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "backup-gateway" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "batch" : { - "defaults" : { - "variants" : [ { - "hostname" : "batch.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "batch.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "batch.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "batch.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "batch.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "bedrock" : { - "endpoints" : { - "bedrock-fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "bedrock-fips.us-gov-east-1.amazonaws.com" - }, - "bedrock-fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "bedrock-fips.us-gov-west-1.amazonaws.com" - }, - "bedrock-runtime-fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "bedrock-runtime-fips.us-gov-east-1.amazonaws.com" - }, - "bedrock-runtime-fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "bedrock-runtime-fips.us-gov-west-1.amazonaws.com" - }, - "bedrock-runtime-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "bedrock-runtime.us-gov-east-1.amazonaws.com" - }, - "bedrock-runtime-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "bedrock-runtime.us-gov-west-1.amazonaws.com" - }, - "bedrock-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "bedrock.us-gov-east-1.amazonaws.com" - }, - "bedrock-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "bedrock.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "cassandra" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "cassandra.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "cassandra.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "cassandra.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "cassandra.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "cassandra.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "cassandra.us-gov-west-1.amazonaws.com" - } - } - }, - "cloudcontrolapi" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "cloudcontrolapi-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "cloudcontrolapi-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cloudcontrolapi-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cloudcontrolapi.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "clouddirectory" : { - "endpoints" : { - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "clouddirectory.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "clouddirectory.us-gov-west-1.amazonaws.com" - } - } - }, - "cloudformation" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "cloudformation.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "cloudformation.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "cloudformation.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "cloudformation.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "cloudformation.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "cloudformation.us-gov-west-1.amazonaws.com" - } - } - }, - "cloudhsm" : { - "endpoints" : { - "us-gov-west-1" : { } - } - }, - "cloudhsmv2" : { - "defaults" : { - "credentialScope" : { - "service" : "cloudhsm" - } - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "cloudhsmv2.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "cloudtrail" : { - "defaults" : { - "variants" : [ { - "hostname" : "cloudtrail.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "cloudtrail.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "cloudtrail.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "cloudtrail.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "cloudtrail.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "codebuild" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "codebuild-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "codebuild-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "codebuild-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "codebuild-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "codecommit" : { - "endpoints" : { - "fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "codecommit-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "codecommit-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "codecommit-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "codedeploy" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "codedeploy-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "codedeploy-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "codedeploy-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "codedeploy-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "codepipeline" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "codepipeline-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "codepipeline-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "codepipeline-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "codepipeline-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "codestar-connections" : { - "endpoints" : { - "us-gov-east-1" : { } - } - }, - "cognito-identity" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "cognito-identity-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "cognito-identity.us-gov-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "cognito-identity-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-identity-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-identity.us-gov-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "cognito-idp" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "cognito-idp-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "cognito-idp.us-gov-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "cognito-idp-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "cognito-idp-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "cognito-idp.us-gov-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "comprehend" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "comprehend-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "comprehend-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "comprehend-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "comprehend.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "comprehendmedical" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "comprehendmedical-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "comprehendmedical-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "compute-optimizer" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "compute-optimizer-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "compute-optimizer-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "config" : { - "defaults" : { - "variants" : [ { - "hostname" : "config.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "config.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "config.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "config.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "config.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "connect" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "connect.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "connect.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "controltower" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "controltower-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "controltower-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "controltower-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "controltower-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "data-ats.iot" : { - "defaults" : { - "credentialScope" : { - "service" : "iotdata" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "data.iot" : { - "defaults" : { - "credentialScope" : { - "service" : "iotdata" - }, - "protocols" : [ "https" ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "service" : "iotdata" - }, - "deprecated" : true, - "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "data.jobs.iot" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "data.jobs.iot-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "data.jobs.iot-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "data.jobs.iot-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "data.jobs.iot-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "databrew" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "databrew.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "databrew.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "datasync" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "datasync-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "datasync-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "datasync-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "datasync.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "datazone" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "datazone.us-gov-east-1.api.aws" - }, - "us-gov-west-1" : { - "hostname" : "datazone.us-gov-west-1.api.aws" - } - } - }, - "directconnect" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "directconnect-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "directconnect-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "directconnect-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "dlm" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "dlm-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "dlm.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "dlm.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "dlm-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "dlm.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "dlm.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "dlm.us-gov-west-1.amazonaws.com" - } - } - }, - "dms" : { - "defaults" : { - "variants" : [ { - "hostname" : "dms.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "dms" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "dms.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "dms-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "dms.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "dms.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "dms.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "dms.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "dms.us-gov-west-1.amazonaws.com" - } - } - }, - "docdb" : { - "endpoints" : { - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "rds.us-gov-west-1.amazonaws.com" - } - } - }, - "drs" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "drs-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "drs-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "drs-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "drs-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ds" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "ds-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "ds-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "dynamodb" : { - "defaults" : { - "variants" : [ { - "hostname" : "dynamodb.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "dynamodb-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "dynamodb-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "dynamodb-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "ebs" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "ec2" : { - "defaults" : { - "variants" : [ { - "hostname" : "ec2.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "ec2.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "ec2.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "ec2.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "ec2.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "ecs" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "ecs-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "ecs-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "ecs-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "ecs-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "eks" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "eks.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "eks.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "eks.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "eks.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "eks.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "eks-auth" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "eks-auth.us-gov-east-1.api.aws" - }, - "us-gov-west-1" : { - "hostname" : "eks-auth.us-gov-west-1.api.aws" - } - } - }, - "elasticache" : { - "defaults" : { - "variants" : [ { - "hostname" : "elasticache.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "elasticache.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "elasticache.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "elasticache.us-gov-west-1.amazonaws.com" - } - } - }, - "elasticbeanstalk" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "elasticbeanstalk.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "elasticbeanstalk.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticbeanstalk.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "elasticbeanstalk.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "elasticbeanstalk.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "elasticbeanstalk.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticbeanstalk.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "elasticbeanstalk.us-gov-west-1.amazonaws.com" - } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "elasticloadbalancing" : { - "defaults" : { - "variants" : [ { - "hostname" : "elasticloadbalancing.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "elasticloadbalancing.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "elasticloadbalancing.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "elasticloadbalancing.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "elasticloadbalancing.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "elasticmapreduce" : { - "defaults" : { - "variants" : [ { - "hostname" : "elasticmapreduce.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticmapreduce.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "elasticmapreduce.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "elasticmapreduce.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "email" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "email-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "email-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "email-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "email-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "emr-containers" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "emr-containers.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "emr-containers.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "emr-containers.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "emr-containers.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "emr-serverless" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "emr-serverless.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "emr-serverless.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "emr-serverless.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "emr-serverless.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "es" : { - "endpoints" : { - "fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "es-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "aos.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "es-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "es-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "aos.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "es-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "es-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "events" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "events.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "events.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "events.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "events.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "firehose" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "firehose-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "firehose-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "firehose-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "firehose-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "fms" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "fms-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "fms-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "fms-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "fsx" : { - "endpoints" : { - "fips-prod-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com" - }, - "fips-prod-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com" - }, - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com" - }, - "prod-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "prod-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "gameliftstreams" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "gameliftstreams.us-gov-east-1.api.aws" - }, - "us-gov-west-1" : { - "hostname" : "gameliftstreams.us-gov-west-1.api.aws" - } - } - }, - "geo" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "geo-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "geo-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "glacier" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "glacier.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "glacier.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "glacier.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "glacier.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "glue" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "glue-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "glue-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "glue-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "glue-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "glue.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "glue-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "glue-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "glue.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "greengrass" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "dataplane-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "greengrass-ats.iot.us-gov-east-1.amazonaws.com" - }, - "dataplane-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "greengrass-ats.iot.us-gov-west-1.amazonaws.com" - }, - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "greengrass.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "greengrass.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "greengrass.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "greengrass.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - }, - "isRegionalized" : true - }, - "guardduty" : { - "defaults" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "guardduty.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "guardduty.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "guardduty.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "guardduty.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "guardduty.us-gov-west-1.amazonaws.com" - } - }, - "isRegionalized" : true - }, - "health" : { - "defaults" : { - "protocols" : [ "https" ], - "sslCommonName" : "health.us-gov-west-1.amazonaws.com" - }, - "endpoints" : { - "aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "global.health.us-gov.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "health-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "health-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iam" : { - "endpoints" : { - "aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "iam.us-gov.amazonaws.com", - "variants" : [ { - "hostname" : "iam.us-gov.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "aws-us-gov-global-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "iam.us-gov.amazonaws.com" - }, - "iam-govcloud" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "iam.us-gov.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "iam-govcloud-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "iam.us-gov.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-us-gov-global" - }, - "identitystore" : { - "defaults" : { - "variants" : [ { - "hostname" : "identitystore.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "identitystore.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "identitystore.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "identitystore.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "identitystore.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ingest.timestream" : { - "endpoints" : { - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "ingest.timestream.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "ingest.timestream.us-gov-west-1.amazonaws.com" - } - } - }, - "inspector" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "inspector-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "inspector-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "inspector-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "inspector-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "inspector2" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "inspector2-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "inspector2-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "inspector2-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "inspector2-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "internetmonitor" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "internetmonitor.us-gov-east-1.api.aws" - }, - "us-gov-west-1" : { - "hostname" : "internetmonitor.us-gov-west-1.api.aws" - } - } - }, - "iot" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "deprecated" : true, - "hostname" : "iot-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "deprecated" : true, - "hostname" : "iot-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "iot-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "iot-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotevents" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "iotevents-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "iotevents-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ioteventsdata" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "data.iotevents-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "data.iotevents.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "data.iotevents-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotsecuredtunneling" : { - "defaults" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iotsitewise" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "iotsitewise-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "iotsitewise-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "iottwinmaker" : { - "endpoints" : { - "api-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "api.iottwinmaker.us-gov-west-1.amazonaws.com" - }, - "data-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "data.iottwinmaker.us-gov-west-1.amazonaws.com" - }, - "fips-api-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "api.iottwinmaker-fips.us-gov-west-1.amazonaws.com" - }, - "fips-data-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "data.iottwinmaker-fips.us-gov-west-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "iottwinmaker-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "iottwinmaker-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kafka" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "kafka.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "kafka.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "kafka.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "kafka.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "kafka.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "kafka.us-gov-west-1.amazonaws.com" - } - } - }, - "kafkaconnect" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "kendra" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "kendra-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "kendra-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kendra-ranking" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "kendra-ranking.us-gov-east-1.api.aws" - }, - "us-gov-west-1" : { - "hostname" : "kendra-ranking.us-gov-west-1.api.aws" - } - } - }, - "kinesis" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "kinesis.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "kinesis.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "kinesis.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "kinesis.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "kinesis.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "kinesis.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kinesisanalytics" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "kinesisanalytics-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "kinesisanalytics-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kinesisvideo" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "kinesisvideo-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "kinesisvideo-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "kinesisvideo-fips.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "kinesisvideo-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "kinesisvideo-fips.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "kinesisvideo-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "kms" : { - "endpoints" : { - "ProdFips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "lakeformation" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "lakeformation-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "lakeformation-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "lakeformation-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lakeformation-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "lakeformation.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "lakeformation-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lakeformation-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "lakeformation.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "lambda" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "lambda-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "lambda-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "lambda-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lambda.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "lambda-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "lambda.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "license-manager" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "license-manager-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "license-manager-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "license-manager-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "license-manager-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "license-manager-linux-subscriptions" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "license-manager-user-subscriptions" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "logs" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "logs.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "logs.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "logs.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "logs.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "logs.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "m2" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "deprecated" : true - }, - "fips-us-gov-west-1" : { - "deprecated" : true - }, - "us-gov-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - } - } - }, - "managedblockchain" : { - "endpoints" : { - "us-gov-west-1" : { } - } - }, - "mediaconvert" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "mediaconvert.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "mediaconvert.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "mediaconvert.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "mediaconvert.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - } - } - }, - "meetings-chime" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "meetings-chime-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "meetings-chime-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "meetings-chime-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "meetings-chime-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "memory-db" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "metering.marketplace" : { - "defaults" : { - "credentialScope" : { - "service" : "aws-marketplace" - } - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "metering-marketplace.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "metrics.sagemaker" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "mgn" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "mgn-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "mgn-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "mgn-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "mgn-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "models-v2-lex" : { - "endpoints" : { - "us-gov-west-1" : { } - } - }, - "models.lex" : { - "defaults" : { - "credentialScope" : { - "service" : "lex" - }, - "variants" : [ { - "hostname" : "models-fips.lex.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "models-fips.lex.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "models-fips.lex.us-gov-west-1.amazonaws.com" - } - } - }, - "monitoring" : { - "defaults" : { - "variants" : [ { - "hostname" : "monitoring.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "monitoring.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "monitoring.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "monitoring.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "monitoring.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "mq" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "mq-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "mq-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "mq-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "mq-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "neptune" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "rds.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "rds.us-gov-west-1.amazonaws.com" - } - } - }, - "network-firewall" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "network-firewall-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "network-firewall-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "network-firewall-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "network-firewall-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "networkmanager" : { - "endpoints" : { - "aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "networkmanager.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "networkmanager.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "networkmanager.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "networkmanager.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "fips-aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "networkmanager.us-gov-west-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-us-gov-global" - }, - "notifications" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "notifications.us-gov-east-1.api.aws" - }, - "us-gov-west-1" : { - "hostname" : "notifications.us-gov-west-1.api.aws" - } - } - }, - "oam" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "oidc" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "oidc.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "oidc.us-gov-west-1.amazonaws.com" - } - } - }, - "organizations" : { - "endpoints" : { - "aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "organizations.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "organizations.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "organizations.us-gov-west-1.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-us-gov-global" - }, - "outposts" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "outposts.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "outposts.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "outposts.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "outposts.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "participant.connect" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "participant.connect.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "participant.connect.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "pi" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "pi-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "pi-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "pi-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "pi-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "pi.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "pinpoint" : { - "defaults" : { - "credentialScope" : { - "service" : "mobiletargeting" - } - }, - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "pinpoint-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "pinpoint.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "pinpoint-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "polly" : { - "endpoints" : { - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "polly-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "polly-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "polly-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "polly.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "portal.sso" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "portal.sso.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "portal.sso.us-gov-west-1.amazonaws.com" - } - } - }, - "qbusiness" : { - "defaults" : { - "dnsSuffix" : "api.aws", - "variants" : [ { - "dnsSuffix" : "api.aws", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "hostname" : "qbusiness.us-gov-east-1.api.aws" - }, - "us-gov-west-1" : { - "hostname" : "qbusiness.us-gov-west-1.api.aws" - } - } - }, - "query.timestream" : { - "endpoints" : { - "us-gov-west-1" : { } - } - }, - "quicksight" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "ram" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "ram.us-gov-east-1.amazonaws.com", - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ramus-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "ram.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "ram.us-gov-west-1.amazonaws.com", - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "hostname" : "ram.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "ramus-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "ram.us-gov-west-1.amazonaws.com" - } - } - }, - "rbin" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "rbin-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "rbin-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rbin-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rbin.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "rds" : { - "defaults" : { - "variants" : [ { - "hostname" : "rds.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "rds.us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "rds.us-gov-east-1.amazonaws.com" - }, - "rds.us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "rds.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "rds.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "rds.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "rds.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "rds.us-gov-west-1.amazonaws.com" - } - } - }, - "redshift" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "redshift.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "redshift.us-gov-west-1.amazonaws.com" - } - } - }, - "redshift-serverless" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "redshift-serverless-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "redshift-serverless-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "redshift-serverless-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "redshift-serverless-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "rekognition" : { - "endpoints" : { - "rekognition-fips.us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com" - }, - "rekognition.us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "rekognition-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "rekognition.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "resiliencehub" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "resiliencehub-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "resiliencehub-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "resiliencehub-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resiliencehub-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "resiliencehub.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "resiliencehub-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "resiliencehub-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "resiliencehub.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "resource-groups" : { - "defaults" : { - "variants" : [ { - "hostname" : "resource-groups.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "resource-groups.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "resource-groups.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "resource-groups.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "resource-groups.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "rolesanywhere" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "rolesanywhere-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "rolesanywhere-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "rolesanywhere-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "rolesanywhere-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "route53" : { - "endpoints" : { - "aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "route53.us-gov.amazonaws.com", - "variants" : [ { - "hostname" : "route53.us-gov.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "fips-aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "route53.us-gov.amazonaws.com" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-us-gov-global" - }, - "route53profiles" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "route53profiles-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "route53profiles-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "route53profiles.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "route53resolver" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "route53resolver.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "route53resolver.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "deprecated" : true, - "hostname" : "route53resolver.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "route53resolver.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "route53resolver.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - }, { - "hostname" : "route53resolver.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "deprecated" : true, - "hostname" : "route53resolver.us-gov-west-1.amazonaws.com" - } - } - }, - "rum" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "runtime-v2-lex" : { - "endpoints" : { - "us-gov-west-1" : { } - } - }, - "runtime.lex" : { - "defaults" : { - "credentialScope" : { - "service" : "lex" - }, - "variants" : [ { - "hostname" : "runtime-fips.lex.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "runtime-fips.lex.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "runtime-fips.lex.us-gov-west-1.amazonaws.com" - } - } - }, - "runtime.sagemaker" : { - "defaults" : { - "variants" : [ { - "hostname" : "runtime.sagemaker.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "runtime.sagemaker.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "runtime.sagemaker.us-gov-west-1.amazonaws.com" - } - } - }, - "s3" : { - "defaults" : { - "signatureVersions" : [ "s3", "s3v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "hostname" : "s3.us-gov-east-1.amazonaws.com", - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "s3-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3.dualstack.us-gov-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "hostname" : "s3.us-gov-west-1.amazonaws.com", - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "s3-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3.dualstack.us-gov-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "s3-control" : { - "defaults" : { - "protocols" : [ "https" ], - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "amazonaws.com", - "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "s3-control.us-gov-east-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-gov-east-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-gov-east-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-gov-east-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "s3-control.us-gov-west-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-gov-west-1.amazonaws.com", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-gov-west-1.amazonaws.com", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-gov-west-1.amazonaws.com", - "signatureVersions" : [ "s3v4" ] - } - } - }, - "s3-outposts" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "deprecated" : true - }, - "fips-us-gov-west-1" : { - "deprecated" : true - }, - "us-gov-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - } - } - }, - "scheduler" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "schemas" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "deprecated" : true - }, - "us-gov-west-1" : { - "variants" : [ { - "tags" : [ "dualstack" ] - }, { - "tags" : [ "dualstack", "fips" ] - }, { - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "deprecated" : true - } - } - }, - "securityhub" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "securityhub-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "securityhub-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "securityhub-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securityhub.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "securityhub-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securityhub.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "securitylake" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "securitylake.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securitylake.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "securitylake.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "securitylake.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "securitylake.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "securitylake.us-gov-west-1.amazonaws.com" - } - } - }, - "serverlessrepo" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-gov-east-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "serverlessrepo.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "serverlessrepo.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "serverlessrepo.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "serverlessrepo.us-gov-west-1.amazonaws.com" - } - } - }, - "servicecatalog" : { - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "servicecatalog-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "servicecatalog-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "servicecatalog-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "servicecatalog-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "servicecatalog-appregistry" : { - "defaults" : { - "variants" : [ { - "hostname" : "servicecatalog-appregistry.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "servicediscovery" : { - "endpoints" : { - "servicediscovery" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "servicediscovery-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "servicediscovery-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "servicediscovery.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "servicequotas" : { - "defaults" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "servicequotas.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "servicequotas.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "servicequotas.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "servicequotas.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "servicequotas.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "signer" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "signer-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "signer-fips.us-gov-west-1.amazonaws.com" - }, - "fips-verification-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "verification.signer-fips.us-gov-east-1.amazonaws.com" - }, - "fips-verification-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "verification.signer-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "signer-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "signer-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "verification-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "verification.signer.us-gov-east-1.amazonaws.com" - }, - "verification-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "verification.signer.us-gov-west-1.amazonaws.com" - } - } - }, - "simspaceweaver" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "simspaceweaver.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "simspaceweaver.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "simspaceweaver.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "simspaceweaver.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "sms-voice" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "sms-voice-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "sms-voice-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "sms-voice-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "sms-voice-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "sms-voice.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "snowball" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "snowball-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "snowball-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "snowball-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "snowball-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "snowball.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "sns" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "sns.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "sns.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "sns.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "sns.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "sqs" : { - "defaults" : { - "variants" : [ { - "hostname" : "sqs.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "sqs.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "sqs.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "sqs.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "protocols" : [ "http", "https" ], - "sslCommonName" : "{region}.queue.{dnsSuffix}", - "variants" : [ { - "hostname" : "sqs.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "ssm" : { - "defaults" : { - "variants" : [ { - "hostname" : "ssm.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "ssm.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "ssm.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "ssm.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "ssm.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "sso" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "sso.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "sso.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "sso.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "sso.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "sso.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "sso.us-gov-west-1.amazonaws.com" - } - } - }, - "states" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "states-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "states.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "states-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "states.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "storagegateway" : { - "endpoints" : { - "fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "storagegateway-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "storagegateway-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "streams.dynamodb" : { - "defaults" : { - "credentialScope" : { - "service" : "dynamodb" - }, - "variants" : [ { - "hostname" : "streams.dynamodb.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "sts" : { - "defaults" : { - "variants" : [ { - "hostname" : "sts.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "sts.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "sts.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "sts.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "sts.us-gov-west-1.amazonaws.com" - } - } - }, - "support" : { - "endpoints" : { - "aws-us-gov-global" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "support.us-gov-west-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "support.us-gov-west-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "support.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - }, - "partitionEndpoint" : "aws-us-gov-global" - }, - "swf" : { - "endpoints" : { - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "swf.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "swf.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-east-1-fips" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "swf.us-gov-east-1.amazonaws.com" - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "swf.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "swf.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "swf.us-gov-west-1.amazonaws.com" - } - } - }, - "synthetics" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "synthetics-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "synthetics-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "synthetics-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "synthetics-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "synthetics.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "tagging" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "textract" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "textract-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "textract-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "textract-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "textract-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "textract.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "textract-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "textract-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "textract.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "transcribe" : { - "defaults" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "fips.transcribe.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "fips.transcribe.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribe-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribe.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "fips.transcribe.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribe-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribe.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "transcribestreaming" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "transcribestreaming-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "transcribestreaming-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "transcribestreaming-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribestreaming-fips.us-gov-east-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribestreaming.us-gov-east-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "transcribestreaming-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "transcribestreaming-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "transcribestreaming.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "transfer" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "transfer-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "transfer-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "transfer-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "transfer-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "translate" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "translate-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - }, { - "hostname" : "translate-fips.us-gov-west-1.api.aws", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "translate.us-gov-west-1.api.aws", - "tags" : [ "dualstack" ] - } ] - }, - "us-gov-west-1-fips" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "translate-fips.us-gov-west-1.amazonaws.com" - } - } - }, - "verifiedpermissions" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "verifiedpermissions-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "verifiedpermissions-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "waf-regional" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "waf-regional-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "waf-regional.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "waf-regional.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "waf-regional-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "wafv2" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "wafv2-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "hostname" : "wafv2.us-gov-east-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "hostname" : "wafv2.us-gov-west-1.amazonaws.com", - "variants" : [ { - "hostname" : "wafv2-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "wellarchitected" : { - "endpoints" : { - "us-gov-east-1" : { }, - "us-gov-west-1" : { } - } - }, - "workspaces" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "workspaces-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "workspaces-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "workspaces-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "workspaces-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - }, - "xray" : { - "endpoints" : { - "fips-us-gov-east-1" : { - "credentialScope" : { - "region" : "us-gov-east-1" - }, - "deprecated" : true, - "hostname" : "xray-fips.us-gov-east-1.amazonaws.com" - }, - "fips-us-gov-west-1" : { - "credentialScope" : { - "region" : "us-gov-west-1" - }, - "deprecated" : true, - "hostname" : "xray-fips.us-gov-west-1.amazonaws.com" - }, - "us-gov-east-1" : { - "variants" : [ { - "hostname" : "xray-fips.us-gov-east-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - }, - "us-gov-west-1" : { - "variants" : [ { - "hostname" : "xray-fips.us-gov-west-1.amazonaws.com", - "tags" : [ "fips" ] - } ] - } - } - } - } - }, { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "c2s.ic.gov", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "dnsSuffix" : "c2s.ic.gov", - "partition" : "aws-iso", - "partitionName" : "AWS ISO (US)", - "regionRegex" : "^us\\-iso\\-\\w+\\-\\d+$", - "regions" : { - "us-iso-east-1" : { - "description" : "US ISO East" - }, - "us-iso-west-1" : { - "description" : "US ISO WEST" - } - }, - "services" : { - "acm" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "agreement-marketplace" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "agreement-marketplace-fips.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "agreement-marketplace-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "api.ecr" : { - "endpoints" : { - "us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "api.ecr.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "hostname" : "api.ecr.us-iso-west-1.c2s.ic.gov" - } - } - }, - "api.pricing" : { - "defaults" : { - "credentialScope" : { - "service" : "pricing" - } - }, - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "api.sagemaker" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "apigateway" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "appconfig" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "appconfigdata" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "application-autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "athena" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "autoscaling" : { - "endpoints" : { - "us-iso-east-1" : { - "protocols" : [ "http", "https" ] - }, - "us-iso-west-1" : { } - } - }, - "backup" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "batch" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "bedrock" : { - "endpoints" : { - "bedrock-runtime-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "bedrock-runtime.us-iso-east-1.c2s.ic.gov" - }, - "bedrock-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "bedrock.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-east-1" : { } - } - }, - "budgets" : { - "endpoints" : { - "aws-iso-global" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "budgets.c2s.ic.gov" - }, - "us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "budgets.c2s.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-global" - }, - "ce" : { - "endpoints" : { - "aws-iso-global" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "ce.us-iso-east-1.c2s.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-global" - }, - "cloudcontrolapi" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "cloudformation" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "cloudtrail" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "cloudtrail-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "cloudtrail-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "cloudtrail-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "cloudtrail-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "codebuild" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "codedeploy" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "comprehend" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "comprehend-fips.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "comprehend-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "config" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "config-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "config-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "config-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "config-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "datapipeline" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "datasync" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "datasync-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "datasync-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "datasync-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "directconnect" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "dlm" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "dms" : { - "defaults" : { - "variants" : [ { - "hostname" : "dms.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "dms" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "dms.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "dms-fips" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "dms.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "dms.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-east-1-fips" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "dms.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "dms.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1-fips" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "dms.us-iso-west-1.c2s.ic.gov" - } - } - }, - "ds" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "ds-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "ds-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "dynamodb" : { - "endpoints" : { - "us-iso-east-1" : { - "protocols" : [ "http", "https" ] - }, - "us-iso-west-1" : { } - } - }, - "ebs" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "ec2" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "ecs" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "eks" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "elasticache" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "elasticloadbalancing" : { - "endpoints" : { - "us-iso-east-1" : { - "protocols" : [ "http", "https" ] - }, - "us-iso-west-1" : { } - } - }, - "elasticmapreduce" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "protocols" : [ "https" ], - "variants" : [ { - "hostname" : "elasticmapreduce.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "es" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "events" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "firehose" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "fsx" : { - "endpoints" : { - "fips-prod-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov" - }, - "prod-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { } - } - }, - "glacier" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "glacier-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "glacier-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "glacier-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "glacier-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "glue" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "guardduty" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-iso-east-1" : { } - }, - "isRegionalized" : true - }, - "health" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "iam" : { - "endpoints" : { - "aws-iso-global" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "iam.us-iso-east-1.c2s.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-global" - }, - "kinesis" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "kinesisanalytics" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "kinesisvideo" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "kms" : { - "endpoints" : { - "ProdFips" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-east-1-fips" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1-fips" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-iso-west-1.c2s.ic.gov" - } - } - }, - "lakeformation" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "lambda" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "license-manager" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "logs" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "medialive" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "medialive-fips.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "medialive-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "mediapackage" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "metrics.sagemaker" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "monitoring" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "network-firewall" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "oam" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "organizations" : { - "endpoints" : { - "aws-iso-global" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "organizations.us-iso-east-1.c2s.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-global" - }, - "outposts" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "pi" : { - "endpoints" : { - "us-iso-east-1" : { - "protocols" : [ "https" ] - }, - "us-iso-west-1" : { - "protocols" : [ "https" ] - } - } - }, - "ram" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "rbin" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "rbin-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "rbin-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "rds" : { - "endpoints" : { - "rds.us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "rds.us-iso-east-1.c2s.ic.gov" - }, - "rds.us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "rds.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "rds.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-east-1-fips" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "rds.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "rds.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1-fips" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "rds.us-iso-west-1.c2s.ic.gov" - } - } - }, - "redshift" : { - "endpoints" : { - "us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "redshift.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "hostname" : "redshift.us-iso-west-1.c2s.ic.gov" - } - } - }, - "resource-groups" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "route53" : { - "endpoints" : { - "aws-iso-global" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "route53.c2s.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-global" - }, - "route53resolver" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "runtime.sagemaker" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "s3" : { - "defaults" : { - "signatureVersions" : [ "s3v4" ] - }, - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "protocols" : [ "http", "https" ], - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-fips.dualstack.us-iso-east-1.c2s.ic.gov", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "s3-fips.dualstack.us-iso-west-1.c2s.ic.gov", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "s3-control" : { - "defaults" : { - "protocols" : [ "https" ], - "signatureVersions" : [ "s3v4" ] - }, - "endpoints" : { - "us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "s3-control.us-iso-east-1.c2s.ic.gov", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-iso-east-1.c2s.ic.gov", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-iso-east-1.c2s.ic.gov", - "tags" : [ "dualstack" ] - } ] - }, - "us-iso-east-1-fips" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-iso-east-1.c2s.ic.gov", - "signatureVersions" : [ "s3v4" ] - }, - "us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "hostname" : "s3-control.us-iso-west-1.c2s.ic.gov", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-iso-west-1.c2s.ic.gov", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-iso-west-1.c2s.ic.gov", - "tags" : [ "dualstack" ] - } ] - }, - "us-iso-west-1-fips" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-iso-west-1.c2s.ic.gov", - "signatureVersions" : [ "s3v4" ] - } - } - }, - "s3-outposts" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "deprecated" : true - }, - "us-iso-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - } - } - }, - "scheduler" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "us-iso-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-iso-east-1-fips" : { - "deprecated" : true - }, - "us-iso-west-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1-fips" : { - "deprecated" : true - } - } - }, - "securityhub" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "servicediscovery" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "servicequotas" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "snowball" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "sns" : { - "endpoints" : { - "us-iso-east-1" : { - "protocols" : [ "http", "https" ] - }, - "us-iso-west-1" : { } - } - }, - "sqs" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "sqs.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "sqs.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "protocols" : [ "http", "https" ], - "variants" : [ { - "hostname" : "sqs.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "sqs.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "ssm" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "states" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "states-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "states-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "states-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "states-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "storagegateway" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "streams.dynamodb" : { - "defaults" : { - "credentialScope" : { - "service" : "dynamodb" - } - }, - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "sts" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "support" : { - "endpoints" : { - "aws-iso-global" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "hostname" : "support.us-iso-east-1.c2s.ic.gov" - } - }, - "partitionEndpoint" : "aws-iso-global" - }, - "swf" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "swf-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "swf-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "swf-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "swf-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "synthetics" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "tagging" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - }, - "textract" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "transcribe" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "fips.transcribe.us-iso-east-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "fips.transcribe.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "transcribestreaming" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "translate" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "translate-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-east-1-fips" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "translate-fips.us-iso-east-1.c2s.ic.gov" - } - } - }, - "wafv2" : { - "endpoints" : { - "us-iso-east-1" : { } - } - }, - "workspaces" : { - "endpoints" : { - "fips-us-iso-east-1" : { - "credentialScope" : { - "region" : "us-iso-east-1" - }, - "deprecated" : true, - "hostname" : "workspaces-fips.us-iso-east-1.c2s.ic.gov" - }, - "fips-us-iso-west-1" : { - "credentialScope" : { - "region" : "us-iso-west-1" - }, - "deprecated" : true, - "hostname" : "workspaces-fips.us-iso-west-1.c2s.ic.gov" - }, - "us-iso-east-1" : { - "variants" : [ { - "hostname" : "workspaces-fips.us-iso-east-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-iso-west-1" : { - "variants" : [ { - "hostname" : "workspaces-fips.us-iso-west-1.c2s.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "xray" : { - "endpoints" : { - "us-iso-east-1" : { }, - "us-iso-west-1" : { } - } - } - } - }, { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "sc2s.sgov.gov", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "dnsSuffix" : "sc2s.sgov.gov", - "partition" : "aws-iso-b", - "partitionName" : "AWS ISOB (US)", - "regionRegex" : "^us\\-isob\\-\\w+\\-\\d+$", - "regions" : { - "us-isob-east-1" : { - "description" : "US ISOB East (Ohio)" - }, - "us-isob-west-1" : { - "description" : "US ISOB West" - } - }, - "services" : { - "agreement-marketplace" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "api.ecr" : { - "endpoints" : { - "us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "api.ecr.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-west-1" : { } - } - }, - "api.pricing" : { - "defaults" : { - "credentialScope" : { - "service" : "pricing" - } - }, - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "api.sagemaker" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "apigateway" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "appconfig" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "appconfigdata" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "application-autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "athena" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "backup" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "batch" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "bedrock" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "budgets" : { - "endpoints" : { - "aws-iso-b-global" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "budgets.global.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "budgets.global.sc2s.sgov.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-b-global" - }, - "ce" : { - "endpoints" : { - "aws-iso-b-global" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "ce.us-isob-east-1.sc2s.sgov.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-b-global" - }, - "cloudcontrolapi" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "cloudformation" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "cloudtrail" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "cloudtrail-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "cloudtrail-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "codebuild" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "codedeploy" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "config" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "config-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "config-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "datasync" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "directconnect" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "dlm" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "dms" : { - "defaults" : { - "variants" : [ { - "hostname" : "dms.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "endpoints" : { - "dms" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "dms-fips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-east-1-fips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-west-1" : { } - } - }, - "ds" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "ds-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "ds-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "dynamodb" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "ebs" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "ec2" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "ecs" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "eks" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "elasticache" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "elasticloadbalancing" : { - "endpoints" : { - "us-isob-east-1" : { - "protocols" : [ "https" ] - }, - "us-isob-west-1" : { } - } - }, - "elasticmapreduce" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "elasticmapreduce.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "elasticmapreduce.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "es" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "events" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "firehose" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "fsx" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "glacier" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "glacier-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "glacier-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "glue" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "guardduty" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "health" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "iam" : { - "endpoints" : { - "aws-iso-b-global" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "iam.us-isob-east-1.sc2s.sgov.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-b-global" - }, - "kinesis" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "kinesisanalytics" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "kms" : { - "endpoints" : { - "ProdFips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-east-1-fips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-west-1" : { } - } - }, - "lakeformation" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "lambda" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "license-manager" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "logs" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "medialive" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "medialive-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "medialive-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "mediapackage" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "metering.marketplace" : { - "defaults" : { - "credentialScope" : { - "service" : "aws-marketplace" - } - }, - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "metrics.sagemaker" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "monitoring" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "network-firewall" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "oam" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "organizations" : { - "endpoints" : { - "aws-iso-b-global" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "organizations.us-isob-east-1.sc2s.sgov.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-b-global" - }, - "outposts" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "pi" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "ram" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "rbin" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "rbin-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "rbin-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "rds" : { - "endpoints" : { - "rds.us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "rds.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "rds.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-east-1-fips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "rds.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-west-1" : { } - } - }, - "redshift" : { - "endpoints" : { - "us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "redshift.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-west-1" : { } - } - }, - "resource-groups" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "route53" : { - "endpoints" : { - "aws-iso-b-global" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "route53.sc2s.sgov.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-b-global" - }, - "route53resolver" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "runtime.sagemaker" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "s3" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "signatureVersions" : [ "s3v4" ] - }, - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "s3-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "s3-fips.dualstack.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "s3-control" : { - "defaults" : { - "protocols" : [ "https" ], - "signatureVersions" : [ "s3v4" ] - }, - "endpoints" : { - "us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "s3-control.us-isob-east-1.sc2s.sgov.gov", - "signatureVersions" : [ "s3v4" ], - "variants" : [ { - "hostname" : "s3-control-fips.dualstack.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "dualstack", "fips" ] - }, { - "hostname" : "s3-control-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - }, { - "hostname" : "s3-control.dualstack.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "dualstack" ] - } ] - }, - "us-isob-east-1-fips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "s3-control-fips.us-isob-east-1.sc2s.sgov.gov", - "signatureVersions" : [ "s3v4" ] - } - } - }, - "s3-outposts" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "deprecated" : true - }, - "us-isob-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - } - } - }, - "scheduler" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "us-isob-east-1" : { - "variants" : [ { - "tags" : [ "fips" ] - } ] - }, - "us-isob-east-1-fips" : { - "deprecated" : true - }, - "us-isob-west-1" : { } - } - }, - "securityhub" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "servicediscovery" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "servicequotas" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "snowball" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "sns" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "sqs" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "sslCommonName" : "{region}.queue.{dnsSuffix}" - }, - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "sqs.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "sqs.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "ssm" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "states" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "states-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "states-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "storagegateway" : { - "endpoints" : { - "fips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "storagegateway-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-east-1-fips" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "storagegateway-fips.us-isob-east-1.sc2s.sgov.gov" - } - } - }, - "streams.dynamodb" : { - "defaults" : { - "credentialScope" : { - "service" : "dynamodb" - }, - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "sts" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "support" : { - "endpoints" : { - "aws-iso-b-global" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "hostname" : "support.us-isob-east-1.sc2s.sgov.gov" - } - }, - "partitionEndpoint" : "aws-iso-b-global" - }, - "swf" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "swf-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "swf-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isob-west-1" : { } - } - }, - "synthetics" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "tagging" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - }, - "wafv2" : { - "endpoints" : { - "us-isob-east-1" : { } - } - }, - "workspaces" : { - "endpoints" : { - "fips-us-isob-east-1" : { - "credentialScope" : { - "region" : "us-isob-east-1" - }, - "deprecated" : true, - "hostname" : "workspaces-fips.us-isob-east-1.sc2s.sgov.gov" - }, - "us-isob-east-1" : { - "variants" : [ { - "hostname" : "workspaces-fips.us-isob-east-1.sc2s.sgov.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "xray" : { - "endpoints" : { - "us-isob-east-1" : { }, - "us-isob-west-1" : { } - } - } - } - }, { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "cloud.adc-e.uk", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "dnsSuffix" : "cloud.adc-e.uk", - "partition" : "aws-iso-e", - "partitionName" : "AWS ISOE (Europe)", - "regionRegex" : "^eu\\-isoe\\-\\w+\\-\\d+$", - "regions" : { - "eu-isoe-west-1" : { - "description" : "EU ISOE West" - } - }, - "services" : { - "access-analyzer" : { - "endpoints" : { - "eu-isoe-west-1" : { - "variants" : [ { - "hostname" : "access-analyzer.eu-isoe-west-1.api.cloud-aws.adc-e.uk", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "acm" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "acm-pca" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "api.ecr" : { - "endpoints" : { - "eu-isoe-west-1" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "api.ecr.eu-isoe-west-1.cloud.adc-e.uk" - } - } - }, - "api.pricing" : { - "defaults" : { - "credentialScope" : { - "service" : "pricing" - } - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "apigateway" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "appconfig" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "appconfigdata" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "application-autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "athena" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "batch" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "budgets" : { - "endpoints" : { - "aws-iso-e-global" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "budgets.global.cloud.adc-e.uk" - }, - "eu-isoe-west-1" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "budgets.global.cloud.adc-e.uk" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-e-global" - }, - "cloudcontrolapi" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "cloudformation" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "cloudtrail" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "cloudtrail-data" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "codedeploy" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "compute-optimizer" : { - "endpoints" : { - "eu-isoe-west-1" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "compute-optimizer.eu-isoe-west-1.cloud.adc-e.uk" - } - } - }, - "config" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "cost-optimization-hub" : { - "endpoints" : { - "eu-isoe-west-1" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "cost-optimization-hub.eu-isoe-west-1.cloud.adc-e.uk" - } - } - }, - "directconnect" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "dlm" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "dms" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "ds" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "dynamodb" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "ebs" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "ec2" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "ecs" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "eks" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "elasticache" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "eu-isoe-west-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.eu-isoe-west-1.cloud.adc-e.uk", - "tags" : [ "fips" ] - } ] - }, - "fips-eu-isoe-west-1" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.eu-isoe-west-1.cloud.adc-e.uk" - } - } - }, - "elasticloadbalancing" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "elasticmapreduce" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "emr-serverless" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "es" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "events" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "firehose" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "glue" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "kinesis" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "kms" : { - "endpoints" : { - "ProdFips" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-isoe-west-1.cloud.adc-e.uk" - }, - "eu-isoe-west-1" : { - "variants" : [ { - "hostname" : "kms-fips.eu-isoe-west-1.cloud.adc-e.uk", - "tags" : [ "fips" ] - } ] - }, - "eu-isoe-west-1-fips" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.eu-isoe-west-1.cloud.adc-e.uk" - } - } - }, - "lakeformation" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "lambda" : { - "endpoints" : { - "eu-isoe-west-1" : { - "variants" : [ { - "hostname" : "lambda.eu-isoe-west-1.api.cloud-aws.adc-e.uk", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "license-manager" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "logs" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "monitoring" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "oam" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "organizations" : { - "endpoints" : { - "aws-iso-e-global" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "organizations.eu-isoe-west-1.cloud.adc-e.uk" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-e-global" - }, - "pi" : { - "endpoints" : { - "eu-isoe-west-1" : { - "protocols" : [ "https" ] - } - } - }, - "pipes" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "ram" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "rbin" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "rds" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "redshift" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "redshift-serverless" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "resource-groups" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "route53" : { - "endpoints" : { - "aws-iso-e-global" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "route53.cloud.adc-e.uk" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-e-global" - }, - "route53profiles" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "route53resolver" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "s3" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "signatureVersions" : [ "s3v4" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "savingsplans" : { - "endpoints" : { - "aws-iso-e-global" : { - "credentialScope" : { - "region" : "eu-isoe-west-1" - }, - "hostname" : "savingsplans.cloud.adc-e.uk" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-e-global" - }, - "scheduler" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "schemas" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "servicecatalog" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "servicediscovery" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "servicequotas" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "sns" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "sqs" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "sslCommonName" : "{region}.queue.{dnsSuffix}" - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "ssm" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "states" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "streams.dynamodb" : { - "defaults" : { - "credentialScope" : { - "service" : "dynamodb" - }, - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "sts" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "swf" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "synthetics" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "tagging" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "trustedadvisor" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - }, - "xray" : { - "endpoints" : { - "eu-isoe-west-1" : { } - } - } - } - }, { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "csp.hci.ic.gov", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - } ] - }, - "dnsSuffix" : "csp.hci.ic.gov", - "partition" : "aws-iso-f", - "partitionName" : "AWS ISOF", - "regionRegex" : "^us\\-isof\\-\\w+\\-\\d+$", - "regions" : { - "us-isof-east-1" : { - "description" : "US ISOF EAST" - }, - "us-isof-south-1" : { - "description" : "US ISOF SOUTH" - } - }, - "services" : { - "access-analyzer" : { - "endpoints" : { - "us-isof-east-1" : { - "variants" : [ { - "hostname" : "access-analyzer.us-isof-east-1.api.aws.hci.ic.gov", - "tags" : [ "dualstack" ] - } ] - }, - "us-isof-south-1" : { - "variants" : [ { - "hostname" : "access-analyzer.us-isof-south-1.api.aws.hci.ic.gov", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "acm" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "acm-pca" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "api.ecr" : { - "endpoints" : { - "us-isof-east-1" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "hostname" : "api.ecr.us-isof-east-1.csp.hci.ic.gov" - }, - "us-isof-south-1" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "api.ecr.us-isof-south-1.csp.hci.ic.gov" - } - } - }, - "api.pricing" : { - "defaults" : { - "credentialScope" : { - "service" : "pricing" - } - }, - "endpoints" : { - "us-isof-south-1" : { } - } - }, - "api.sagemaker" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "appconfig" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "appconfigdata" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "application-autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "athena" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "autoscaling" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "backup" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "batch" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "budgets" : { - "endpoints" : { - "aws-iso-f-global" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "budgets.global.csp.hci.ic.gov" - }, - "us-isof-south-1" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "budgets.global.csp.hci.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-f-global" - }, - "ce" : { - "endpoints" : { - "aws-iso-f-global" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "ce.us-isof-south-1.csp.hci.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-f-global" - }, - "cloudcontrolapi" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "cloudformation" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "cloudtrail" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "cloudtrail-data" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "codebuild" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "codedeploy" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "codepipeline" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "comprehend" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "compute-optimizer" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "compute-optimizer.us-isof-south-1.csp.hci.ic.gov" - } - } - }, - "config" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "cost-optimization-hub" : { - "endpoints" : { - "us-isof-south-1" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "cost-optimization-hub.us-isof-south-1.csp.hci.ic.gov" - } - } - }, - "directconnect" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "dlm" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "dms" : { - "endpoints" : { - "dms" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "deprecated" : true, - "variants" : [ { - "hostname" : "dms.us-isof-east-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "dms-fips" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "deprecated" : true, - "hostname" : "dms.us-isof-east-1.csp.hci.ic.gov" - }, - "us-isof-east-1" : { - "variants" : [ { - "hostname" : "dms.us-isof-east-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isof-east-1-fips" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "deprecated" : true, - "hostname" : "dms.us-isof-east-1.csp.hci.ic.gov" - }, - "us-isof-south-1" : { - "variants" : [ { - "hostname" : "dms.us-isof-south-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isof-south-1-fips" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "deprecated" : true, - "hostname" : "dms.us-isof-south-1.csp.hci.ic.gov" - } - } - }, - "ds" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "dynamodb" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "ebs" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "ec2" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "ecs" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "eks" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "elasticache" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "fips-us-isof-east-1" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-isof-east-1.csp.hci.ic.gov" - }, - "fips-us-isof-south-1" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "deprecated" : true, - "hostname" : "elasticfilesystem-fips.us-isof-south-1.csp.hci.ic.gov" - }, - "us-isof-east-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-isof-east-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isof-south-1" : { - "variants" : [ { - "hostname" : "elasticfilesystem-fips.us-isof-south-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "elasticloadbalancing" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "elasticmapreduce" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "es" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "events" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "firehose" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "fsx" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "glue" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "guardduty" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - }, - "isRegionalized" : true - }, - "iam" : { - "endpoints" : { - "aws-iso-f-global" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "iam.us-isof-south-1.csp.hci.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-f-global" - }, - "identitystore" : { - "endpoints" : { - "us-isof-east-1" : { } - } - }, - "kinesis" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "kms" : { - "endpoints" : { - "ProdFips" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-isof-east-1.csp.hci.ic.gov" - }, - "us-isof-east-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-isof-east-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isof-east-1-fips" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-isof-east-1.csp.hci.ic.gov" - }, - "us-isof-south-1" : { - "variants" : [ { - "hostname" : "kms-fips.us-isof-south-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isof-south-1-fips" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "deprecated" : true, - "hostname" : "kms-fips.us-isof-south-1.csp.hci.ic.gov" - } - } - }, - "lakeformation" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "lambda" : { - "endpoints" : { - "us-isof-east-1" : { - "variants" : [ { - "hostname" : "lambda.us-isof-east-1.api.aws.hci.ic.gov", - "tags" : [ "dualstack" ] - } ] - }, - "us-isof-south-1" : { - "variants" : [ { - "hostname" : "lambda.us-isof-south-1.api.aws.hci.ic.gov", - "tags" : [ "dualstack" ] - } ] - } - } - }, - "license-manager" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "logs" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "metrics.sagemaker" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "monitoring" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "oam" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "organizations" : { - "endpoints" : { - "aws-iso-f-global" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "organizations.us-isof-south-1.csp.hci.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-f-global" - }, - "pi" : { - "endpoints" : { - "us-isof-east-1" : { - "protocols" : [ "https" ] - }, - "us-isof-south-1" : { - "protocols" : [ "https" ] - } - } - }, - "pipes" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "quicksight" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "ram" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "rbin" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "rds" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "redshift" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "redshift-serverless" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "rekognition" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "resource-groups" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "rolesanywhere" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "route53" : { - "endpoints" : { - "aws-iso-f-global" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "route53.csp.hci.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-f-global" - }, - "route53profiles" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "route53resolver" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "runtime.sagemaker" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "s3" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "signatureVersions" : [ "s3v4" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "savingsplans" : { - "endpoints" : { - "aws-iso-f-global" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "hostname" : "savingsplans.csp.hci.ic.gov" - } - }, - "isRegionalized" : false, - "partitionEndpoint" : "aws-iso-f-global" - }, - "scheduler" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "schemas" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "servicediscovery" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "servicequotas" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "sns" : { - "defaults" : { - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "sqs" : { - "defaults" : { - "protocols" : [ "http", "https" ], - "sslCommonName" : "{region}.queue.{dnsSuffix}" - }, - "endpoints" : { - "fips-us-isof-east-1" : { - "credentialScope" : { - "region" : "us-isof-east-1" - }, - "deprecated" : true, - "hostname" : "sqs.us-isof-east-1.csp.hci.ic.gov" - }, - "fips-us-isof-south-1" : { - "credentialScope" : { - "region" : "us-isof-south-1" - }, - "deprecated" : true, - "hostname" : "sqs.us-isof-south-1.csp.hci.ic.gov" - }, - "us-isof-east-1" : { - "variants" : [ { - "hostname" : "sqs.us-isof-east-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - }, - "us-isof-south-1" : { - "variants" : [ { - "hostname" : "sqs.us-isof-south-1.csp.hci.ic.gov", - "tags" : [ "fips" ] - } ] - } - } - }, - "ssm" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "states" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "streams.dynamodb" : { - "defaults" : { - "credentialScope" : { - "service" : "dynamodb" - }, - "protocols" : [ "http", "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "sts" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "swf" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "synthetics" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "tagging" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "textract" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "transcribe" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "transcribestreaming" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "translate" : { - "defaults" : { - "protocols" : [ "https" ] - }, - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - }, - "trustedadvisor" : { - "endpoints" : { - "us-isof-south-1" : { } - } - }, - "xray" : { - "endpoints" : { - "us-isof-east-1" : { }, - "us-isof-south-1" : { } - } - } - } - }, { - "defaults" : { - "hostname" : "{service}.{region}.{dnsSuffix}", - "protocols" : [ "https" ], - "signatureVersions" : [ "v4" ], - "variants" : [ { - "dnsSuffix" : "amazonaws.eu", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "fips" ] - }, { - "dnsSuffix" : "api.amazonwebservices.eu", - "hostname" : "{service}-fips.{region}.{dnsSuffix}", - "tags" : [ "dualstack", "fips" ] - }, { - "dnsSuffix" : "api.amazonwebservices.eu", - "hostname" : "{service}.{region}.{dnsSuffix}", - "tags" : [ "dualstack" ] - } ] - }, - "dnsSuffix" : "amazonaws.eu", - "partition" : "aws-eusc", - "partitionName" : "AWS EUSC", - "regionRegex" : "^eusc\\-(de)\\-\\w+\\-\\d+$", - "regions" : { - "eusc-de-east-1" : { - "description" : "AWS European Sovereign Cloud (Germany)" - } - }, - "services" : { - "access-analyzer" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "acm" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "acm-pca" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "agreement-marketplace" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "api.ecr" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "api.pricing" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "api.sagemaker" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "appconfig" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "appconfigdata" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "application-autoscaling" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "arc-zonal-shift" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "athena" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "autoscaling" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "backup" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "batch" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "bedrock" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "cloudcontrolapi" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "cloudformation" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "cloudtrail" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "codedeploy" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "compute-optimizer" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "config" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "cost-optimization-hub" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "datasync" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "datazone" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "directconnect" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "dlm" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "dms" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "ds" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "dynamodb" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "ebs" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "ec2" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "ecs" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "eks" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "eks-auth" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "elasticache" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "elasticfilesystem" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "elasticloadbalancing" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "elasticmapreduce" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "email" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "entitlement.marketplace" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "es" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "events" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "firehose" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "gameliftstreams" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "glue" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "guardduty" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "health" : { - "endpoints" : { - "eusc-de-east-1" : { - "deprecated" : true - } - } - }, - "identitystore" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "internetmonitor" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "kafka" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "kendra-ranking" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "kinesis" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "kinesisanalytics" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "kms" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "lakeformation" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "lambda" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "license-manager" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "logs" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "metering.marketplace" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "metrics.sagemaker" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "monitoring" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "notifications" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "oam" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "pi" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "qbusiness" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "ram" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "rbin" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "rds" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "redshift" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "resource-groups" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "rolesanywhere" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "route53resolver" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "s3" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "s3-control" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "scheduler" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "secretsmanager" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "securityhub" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "servicediscovery" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "servicequotas" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "signer" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "sms-voice" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "sns" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "sqs" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "ssm" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "states" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "storagegateway" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "streams.dynamodb" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "sts" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "swf" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "synthetics" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "tagging" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "transfer" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "trustedadvisor" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "wafv2" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - }, - "xray" : { - "endpoints" : { - "eusc-de-east-1" : { } - } - } - } - } ], - "version" : 3 -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 10c368a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/paginators-1.json deleted file mode 100644 index b34d382..0000000 --- a/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListMatchingJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobs" - }, - "ListMatchingWorkflows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowSummaries" - }, - "ListSchemaMappings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "schemaList" - }, - "ListIdMappingJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobs" - }, - "ListIdMappingWorkflows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowSummaries" - }, - "ListProviderServices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "providerServiceSummaries" - }, - "ListIdNamespaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "idNamespaceSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/service-2.json.gz deleted file mode 100644 index e42fb33..0000000 Binary files a/venv/Lib/site-packages/botocore/data/entityresolution/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/es/2015-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/es/2015-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9fe5115..0000000 Binary files a/venv/Lib/site-packages/botocore/data/es/2015-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/es/2015-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/es/2015-01-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/es/2015-01-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/es/2015-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/es/2015-01-01/paginators-1.json deleted file mode 100644 index 4c0f24e..0000000 --- a/venv/Lib/site-packages/botocore/data/es/2015-01-01/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListElasticsearchInstanceTypes": { - "result_key": "ElasticsearchInstanceTypes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListElasticsearchVersions": { - "result_key": "ElasticsearchVersions", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "DescribeReservedElasticsearchInstanceOfferings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ReservedElasticsearchInstanceOfferings" - }, - "DescribeReservedElasticsearchInstances": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ReservedElasticsearchInstances" - }, - "GetUpgradeHistory": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "UpgradeHistories" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/es/2015-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/es/2015-01-01/service-2.json.gz deleted file mode 100644 index 91a2850..0000000 Binary files a/venv/Lib/site-packages/botocore/data/es/2015-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/events/2014-02-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/events/2014-02-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9059c65..0000000 Binary files a/venv/Lib/site-packages/botocore/data/events/2014-02-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/events/2014-02-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/events/2014-02-03/service-2.json.gz deleted file mode 100644 index 3595b10..0000000 Binary files a/venv/Lib/site-packages/botocore/data/events/2014-02-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/events/2015-10-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/events/2015-10-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 774b7f5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/events/2015-10-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/events/2015-10-07/examples-1.json b/venv/Lib/site-packages/botocore/data/events/2015-10-07/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/events/2015-10-07/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/events/2015-10-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/events/2015-10-07/paginators-1.json deleted file mode 100644 index 501a322..0000000 --- a/venv/Lib/site-packages/botocore/data/events/2015-10-07/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListRuleNamesByTarget": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "RuleNames" - }, - "ListRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Rules" - }, - "ListTargetsByRule": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Targets" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/events/2015-10-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/events/2015-10-07/service-2.json.gz deleted file mode 100644 index 1a1ba49..0000000 Binary files a/venv/Lib/site-packages/botocore/data/events/2015-10-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index bf6a324..0000000 Binary files a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/examples-1.json b/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/paginators-1.json deleted file mode 100644 index c72d3cb..0000000 --- a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListExperiments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "experiments" - }, - "ListFeatures": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "features" - }, - "ListLaunches": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "launches" - }, - "ListProjects": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "projects" - }, - "ListSegmentReferences": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "referencedBy" - }, - "ListSegments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "segments" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/service-2.json.gz deleted file mode 100644 index 9b2e4f6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/evidently/2021-02-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/evs/2023-07-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 003027c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/evs/2023-07-27/paginators-1.json deleted file mode 100644 index c5e0850..0000000 --- a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListEnvironmentHosts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environmentHosts" - }, - "ListEnvironmentVlans": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environmentVlans" - }, - "ListEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environmentSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/evs/2023-07-27/service-2.json.gz deleted file mode 100644 index 3f34915..0000000 Binary files a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/waiters-2.json b/venv/Lib/site-packages/botocore/data/evs/2023-07-27/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/evs/2023-07-27/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index 06b4b70..0000000 Binary files a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/examples-1.json b/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/paginators-1.json deleted file mode 100644 index aa6632a..0000000 --- a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListChangesets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "changesets" - }, - "ListDataViews": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataViews" - }, - "ListDatasets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "datasets" - }, - "ListPermissionGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "permissionGroups" - }, - "ListUsers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "users" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/service-2.json.gz deleted file mode 100644 index c89f772..0000000 Binary files a/venv/Lib/site-packages/botocore/data/finspace-data/2020-07-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8e4b7e6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/examples-1.json b/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/paginators-1.json deleted file mode 100644 index 741ca79..0000000 --- a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListKxEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environments" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/service-2.json.gz deleted file mode 100644 index d6e59bc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/finspace/2021-03-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1945afa..0000000 Binary files a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/examples-1.json b/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/paginators-1.json b/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/service-2.json.gz b/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/service-2.json.gz deleted file mode 100644 index f089778..0000000 Binary files a/venv/Lib/site-packages/botocore/data/firehose/2015-08-04/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/fis/2020-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index dc933f5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/fis/2020-12-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/fis/2020-12-01/paginators-1.json deleted file mode 100644 index 740e56e..0000000 --- a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "actions" - }, - "ListExperimentResolvedTargets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "resolvedTargets" - }, - "ListExperimentTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "experimentTemplates" - }, - "ListExperiments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "experiments" - }, - "ListTargetAccountConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "targetAccountConfigurations" - }, - "ListTargetResourceTypes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "targetResourceTypes" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/fis/2020-12-01/service-2.json.gz deleted file mode 100644 index 3c33c90..0000000 Binary files a/venv/Lib/site-packages/botocore/data/fis/2020-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/fms/2018-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index d52470a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/fms/2018-01-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/fms/2018-01-01/paginators-1.json deleted file mode 100644 index 730571d..0000000 --- a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "ListComplianceStatus": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PolicyComplianceStatusList" - }, - "ListMemberAccounts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MemberAccounts" - }, - "ListPolicies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PolicyList" - }, - "ListAppsLists": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AppsLists" - }, - "ListProtocolsLists": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ProtocolsLists" - }, - "ListThirdPartyFirewallFirewallPolicies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ThirdPartyFirewallFirewallPolicies" - }, - "ListAdminAccountsForOrganization": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AdminAccounts" - }, - "ListAdminsManagingAccount": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AdminAccounts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/fms/2018-01-01/service-2.json.gz deleted file mode 100644 index e5c7e29..0000000 Binary files a/venv/Lib/site-packages/botocore/data/fms/2018-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 95986ff..0000000 Binary files a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/examples-1.json b/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/paginators-1.json deleted file mode 100644 index 853dee4..0000000 --- a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/paginators-1.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "pagination": { - "ListDatasetGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DatasetGroups" - }, - "ListDatasetImportJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DatasetImportJobs" - }, - "ListDatasets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Datasets" - }, - "ListForecastExportJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ForecastExportJobs" - }, - "ListForecasts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Forecasts" - }, - "ListPredictors": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Predictors" - }, - "ListPredictorBacktestExportJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PredictorBacktestExportJobs" - }, - "ListExplainabilities": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Explainabilities" - }, - "ListExplainabilityExports": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ExplainabilityExports" - }, - "ListMonitorEvaluations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PredictorMonitorEvaluations" - }, - "ListMonitors": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Monitors" - }, - "ListWhatIfAnalyses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WhatIfAnalyses" - }, - "ListWhatIfForecastExports": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WhatIfForecastExports" - }, - "ListWhatIfForecasts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WhatIfForecasts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/service-2.json.gz deleted file mode 100644 index bfe9684..0000000 Binary files a/venv/Lib/site-packages/botocore/data/forecast/2018-06-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8a2d493..0000000 Binary files a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/examples-1.json b/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/service-2.json.gz deleted file mode 100644 index fcada73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/forecastquery/2018-06-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3f08a5b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/examples-1.json b/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/service-2.json.gz deleted file mode 100644 index e5b2e1c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/frauddetector/2019-11-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 82c5525..0000000 Binary files a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/paginators-1.json deleted file mode 100644 index f01e3ed..0000000 --- a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "GetFreeTierUsage": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "freeTierUsages" - }, - "ListAccountActivities": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "activities" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/service-2.json.gz deleted file mode 100644 index 989f24a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/waiters-2.json b/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/freetier/2023-09-07/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index b7cfffe..0000000 Binary files a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/examples-1.json b/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/examples-1.json deleted file mode 100644 index 1993a23..0000000 --- a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/examples-1.json +++ /dev/null @@ -1,438 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CopyBackup": [ - { - "input": { - "SourceBackupId": "backup-03e3c82e0183b7b6b", - "SourceRegion": "us-east-2" - }, - "output": { - "Backup": { - "BackupId": "backup-0a3364eded1014b28", - "CreationTime": 1617954808.068, - "FileSystem": { - "FileSystemId": "fs-0498eed5fe91001ec", - "FileSystemType": "LUSTRE", - "LustreConfiguration": { - "AutomaticBackupRetentionDays": 0, - "DeploymentType": "PERSISTENT_1", - "PerUnitStorageThroughput": 50, - "WeeklyMaintenanceStartTime": "1:05:00" - }, - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0f5179e395f597e66", - "StorageCapacity": 2400, - "StorageType": "SSD" - }, - "KmsKeyId": "arn:aws:fsx:us-east-1:012345678912:key/d1234e22-543a-12b7-a98f-e12c2b54001a", - "Lifecycle": "COPYING", - "OwnerId": "123456789012", - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:backup/backup-0a3364eded1014b28", - "Tags": [ - { - "Key": "Name", - "Value": "MyBackup" - } - ], - "Type": "USER_INITIATED" - } - }, - "comments": { - }, - "description": "This operation copies an Amazon FSx backup.", - "id": "to-copy-a-backup-1481847318640", - "title": "To copy a backup" - } - ], - "CreateBackup": [ - { - "input": { - "FileSystemId": "fs-0498eed5fe91001ec", - "Tags": [ - { - "Key": "Name", - "Value": "MyBackup" - } - ] - }, - "output": { - "Backup": { - "BackupId": "backup-03e3c82e0183b7b6b", - "CreationTime": "1481841524.0", - "FileSystem": { - "FileSystemId": "fs-0498eed5fe91001ec", - "OwnerId": "012345678912", - "StorageCapacity": 300, - "WindowsConfiguration": { - "ActiveDirectoryId": "d-1234abcd12", - "AutomaticBackupRetentionDays": 30, - "DailyAutomaticBackupStartTime": "05:00", - "WeeklyMaintenanceStartTime": "1:05:00" - } - }, - "Lifecycle": "CREATING", - "ProgressPercent": 0, - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:backup/backup-03e3c82e0183b7b6b", - "Tags": [ - { - "Key": "Name", - "Value": "MyBackup" - } - ], - "Type": "USER_INITIATED" - } - }, - "comments": { - }, - "description": "This operation creates a new backup.", - "id": "to-create-a-new-backup-1481840798597", - "title": "To create a new backup" - } - ], - "CreateFileSystem": [ - { - "input": { - "ClientRequestToken": "a8ca07e4-61ec-4399-99f4-19853801bcd5", - "FileSystemType": "WINDOWS", - "KmsKeyId": "arn:aws:kms:us-east-1:012345678912:key/1111abcd-2222-3333-4444-55556666eeff", - "SecurityGroupIds": [ - "sg-edcd9784" - ], - "StorageCapacity": 3200, - "StorageType": "HDD", - "SubnetIds": [ - "subnet-1234abcd" - ], - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ], - "WindowsConfiguration": { - "ActiveDirectoryId": "d-1234abcd12", - "Aliases": [ - "accounting.corp.example.com" - ], - "AutomaticBackupRetentionDays": 30, - "DailyAutomaticBackupStartTime": "05:00", - "ThroughputCapacity": 32, - "WeeklyMaintenanceStartTime": "1:05:00" - } - }, - "output": { - "FileSystem": { - "CreationTime": "1481841524.0", - "DNSName": "fs-0123456789abcdef0.fsx.com", - "FileSystemId": "fs-0123456789abcdef0", - "KmsKeyId": "arn:aws:kms:us-east-1:012345678912:key/1111abcd-2222-3333-4444-55556666eeff", - "Lifecycle": "CREATING", - "OwnerId": "012345678912", - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0123456789abcdef0", - "StorageCapacity": 3200, - "StorageType": "HDD", - "SubnetIds": [ - "subnet-1234abcd" - ], - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ], - "VpcId": "vpc-ab1234cd", - "WindowsConfiguration": { - "ActiveDirectoryId": "d-1234abcd12", - "Aliases": [ - { - "Lifecycle": "CREATING", - "Name": "accounting.corp.example.com" - } - ], - "AutomaticBackupRetentionDays": 30, - "DailyAutomaticBackupStartTime": "05:00", - "ThroughputCapacity": 32, - "WeeklyMaintenanceStartTime": "1:05:00" - } - } - }, - "comments": { - }, - "description": "This operation creates a new Amazon FSx for Windows File Server file system.", - "id": "to-create-a-new-file-system-1481840798547", - "title": "To create a new file system" - } - ], - "CreateFileSystemFromBackup": [ - { - "input": { - "BackupId": "backup-03e3c82e0183b7b6b", - "ClientRequestToken": "f4c94ed7-238d-4c46-93db-48cd62ec33b7", - "SecurityGroupIds": [ - "sg-edcd9784" - ], - "SubnetIds": [ - "subnet-1234abcd" - ], - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ], - "WindowsConfiguration": { - "ThroughputCapacity": 8 - } - }, - "output": { - "FileSystem": { - "CreationTime": "1481841524.0", - "DNSName": "fs-0498eed5fe91001ec.fsx.com", - "FileSystemId": "fs-0498eed5fe91001ec", - "KmsKeyId": "arn:aws:kms:us-east-1:012345678912:key/0ff3ea8d-130e-4133-877f-93908b6fdbd6", - "Lifecycle": "CREATING", - "OwnerId": "012345678912", - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0498eed5fe91001ec", - "StorageCapacity": 300, - "SubnetIds": [ - "subnet-1234abcd" - ], - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ], - "VpcId": "vpc-ab1234cd", - "WindowsConfiguration": { - "ActiveDirectoryId": "d-1234abcd12", - "AutomaticBackupRetentionDays": 30, - "DailyAutomaticBackupStartTime": "05:00", - "ThroughputCapacity": 8, - "WeeklyMaintenanceStartTime": "1:05:00" - } - } - }, - "comments": { - }, - "description": "This operation creates a new file system from backup.", - "id": "to-create-a-new-file-system-from-backup-1481840798598", - "title": "To create a new file system from backup" - } - ], - "DeleteBackup": [ - { - "input": { - "BackupId": "backup-03e3c82e0183b7b6b" - }, - "output": { - "BackupId": "backup-03e3c82e0183b7b6b", - "Lifecycle": "DELETED" - }, - "comments": { - }, - "description": "This operation deletes an Amazon FSx file system backup.", - "id": "to-delete-a-file-system-1481847318399", - "title": "To delete a backup" - } - ], - "DeleteFileSystem": [ - { - "input": { - "FileSystemId": "fs-0498eed5fe91001ec" - }, - "output": { - "FileSystemId": "fs-0498eed5fe91001ec", - "Lifecycle": "DELETING" - }, - "comments": { - }, - "description": "This operation deletes an Amazon FSx file system.", - "id": "to-delete-a-file-system-1481847318348", - "title": "To delete a file system" - } - ], - "DescribeBackups": [ - { - "input": { - }, - "output": { - "Backups": [ - { - "BackupId": "backup-03e3c82e0183b7b6b", - "CreationTime": "1481841524.0", - "FileSystem": { - "FileSystemId": "fs-0498eed5fe91001ec", - "OwnerId": "012345678912", - "StorageCapacity": 300, - "WindowsConfiguration": { - "ActiveDirectoryId": "d-1234abcd12", - "AutomaticBackupRetentionDays": 30, - "DailyAutomaticBackupStartTime": "05:00", - "WeeklyMaintenanceStartTime": "1:05:00" - } - }, - "Lifecycle": "AVAILABLE", - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:backup/backup-03e3c82e0183b7b6b", - "Tags": [ - { - "Key": "Name", - "Value": "MyBackup" - } - ], - "Type": "USER_INITIATED" - } - ] - }, - "comments": { - }, - "description": "This operation describes all of the Amazon FSx backups in an account.", - "id": "to-describe-backups-1481848448499", - "title": "To describe Amazon FSx backups" - } - ], - "DescribeFileSystems": [ - { - "input": { - }, - "output": { - "FileSystems": [ - { - "CreationTime": "1481841524.0", - "DNSName": "fs-0498eed5fe91001ec.fsx.com", - "FileSystemId": "fs-0498eed5fe91001ec", - "KmsKeyId": "arn:aws:kms:us-east-1:012345678912:key/0ff3ea8d-130e-4133-877f-93908b6fdbd6", - "Lifecycle": "AVAILABLE", - "NetworkInterfaceIds": [ - "eni-abcd1234" - ], - "OwnerId": "012345678912", - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0498eed5fe91001ec", - "StorageCapacity": 300, - "SubnetIds": [ - "subnet-1234abcd" - ], - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ], - "VpcId": "vpc-ab1234cd", - "WindowsConfiguration": { - "ActiveDirectoryId": "d-1234abcd12", - "AutomaticBackupRetentionDays": 30, - "DailyAutomaticBackupStartTime": "05:00", - "ThroughputCapacity": 8, - "WeeklyMaintenanceStartTime": "1:05:00" - } - } - ] - }, - "comments": { - }, - "description": "This operation describes all of the Amazon FSx file systems in an account.", - "id": "to-describe-a-file-systems-1481848448460", - "title": "To describe an Amazon FSx file system" - } - ], - "ListTagsForResource": [ - { - "input": { - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0498eed5fe91001ec" - }, - "output": { - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ] - }, - "comments": { - }, - "description": "This operation lists tags for an Amazon FSx resource.", - "id": "to-list-tags-for-a-fsx-resource-1481847318372", - "title": "To list tags for a resource" - } - ], - "TagResource": [ - { - "input": { - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0498eed5fe91001ec", - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ] - }, - "comments": { - }, - "description": "This operation tags an Amazon FSx resource.", - "id": "to-tag-a-fsx-resource-1481847318371", - "title": "To tag a resource" - } - ], - "UntagResource": [ - { - "input": { - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0498eed5fe91001ec", - "TagKeys": [ - "Name" - ] - }, - "comments": { - }, - "description": "This operation untags an Amazon FSx resource.", - "id": "to-untag-a-fsx-resource-1481847318373", - "title": "To untag a resource" - } - ], - "UpdateFileSystem": [ - { - "input": { - "FileSystemId": "fs-0498eed5fe91001ec", - "WindowsConfiguration": { - "AutomaticBackupRetentionDays": 10, - "DailyAutomaticBackupStartTime": "06:00", - "WeeklyMaintenanceStartTime": "3:06:00" - } - }, - "output": { - "FileSystem": { - "CreationTime": "1481841524.0", - "DNSName": "fs-0498eed5fe91001ec.fsx.com", - "FileSystemId": "fs-0498eed5fe91001ec", - "KmsKeyId": "arn:aws:kms:us-east-1:012345678912:key/0ff3ea8d-130e-4133-877f-93908b6fdbd6", - "Lifecycle": "AVAILABLE", - "OwnerId": "012345678912", - "ResourceARN": "arn:aws:fsx:us-east-1:012345678912:file-system/fs-0498eed5fe91001ec", - "StorageCapacity": 300, - "SubnetIds": [ - "subnet-1234abcd" - ], - "Tags": [ - { - "Key": "Name", - "Value": "MyFileSystem" - } - ], - "VpcId": "vpc-ab1234cd", - "WindowsConfiguration": { - "AutomaticBackupRetentionDays": 10, - "DailyAutomaticBackupStartTime": "06:00", - "ThroughputCapacity": 8, - "WeeklyMaintenanceStartTime": "3:06:00" - } - } - }, - "comments": { - }, - "description": "This operation updates an existing file system.", - "id": "to-update-a-file-system-1481840798595", - "title": "To update an existing file system" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/paginators-1.json deleted file mode 100644 index 6b645bd..0000000 --- a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "DescribeBackups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Backups" - }, - "DescribeFileSystems": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FileSystems" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tags" - }, - "DescribeStorageVirtualMachines": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "StorageVirtualMachines" - }, - "DescribeVolumes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Volumes" - }, - "DescribeS3AccessPointAttachments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "S3AccessPointAttachments" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Snapshots" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/service-2.json.gz deleted file mode 100644 index 32d9652..0000000 Binary files a/venv/Lib/site-packages/botocore/data/fsx/2018-03-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index b64c469..0000000 Binary files a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/examples-1.json b/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/paginators-1.json deleted file mode 100644 index a5a9183..0000000 --- a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/paginators-1.json +++ /dev/null @@ -1,160 +0,0 @@ -{ - "pagination": { - "DescribeFleetAttributes": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "FleetAttributes" - }, - "DescribeFleetCapacity": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "FleetCapacity" - }, - "DescribeFleetEvents": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Events" - }, - "DescribeFleetUtilization": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "FleetUtilization" - }, - "DescribeGameSessionDetails": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "GameSessionDetails" - }, - "DescribeGameSessionQueues": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "GameSessionQueues" - }, - "DescribeGameSessions": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "GameSessions" - }, - "DescribeInstances": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Instances" - }, - "DescribeMatchmakingConfigurations": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Configurations" - }, - "DescribeMatchmakingRuleSets": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "RuleSets" - }, - "DescribePlayerSessions": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "PlayerSessions" - }, - "DescribeScalingPolicies": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ScalingPolicies" - }, - "ListAliases": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Aliases" - }, - "ListBuilds": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "Builds" - }, - "ListFleets": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "FleetIds" - }, - "SearchGameSessions": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "GameSessions" - }, - "DescribeGameServerInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "GameServerInstances" - }, - "ListGameServerGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "GameServerGroups" - }, - "ListGameServers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "GameServers" - }, - "ListScripts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Scripts" - }, - "ListCompute": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "ComputeList" - }, - "ListLocations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Locations" - }, - "ListContainerGroupDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "ContainerGroupDefinitions" - }, - "ListContainerFleets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "ContainerFleets" - }, - "ListContainerGroupDefinitionVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "ContainerGroupDefinitions" - }, - "ListFleetDeployments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "FleetDeployments" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/service-2.json.gz deleted file mode 100644 index 02f6917..0000000 Binary files a/venv/Lib/site-packages/botocore/data/gamelift/2015-10-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0827a86..0000000 Binary files a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/paginators-1.json deleted file mode 100644 index a396ef4..0000000 --- a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListStreamGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListStreamSessions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListStreamSessionsByAccount": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/service-2.json.gz deleted file mode 100644 index 2933c18..0000000 Binary files a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/waiters-2.json deleted file mode 100644 index 2c064b0..0000000 --- a/venv/Lib/site-packages/botocore/data/gameliftstreams/2018-05-10/waiters-2.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ApplicationDeleted" : { - "description" : "Waits until an application is deleted", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetApplication", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - } ] - }, - "ApplicationReady" : { - "description" : "Waits until an application is ready", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetApplication", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Status", - "state" : "success", - "expected" : "READY" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "StreamGroupActive" : { - "description" : "Waits until a stream group is active", - "delay" : 30, - "maxAttempts" : 120, - "operation" : "GetStreamGroup", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "failure", - "expected" : "ERROR" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "failure", - "expected" : "ACTIVE_WITH_ERRORS" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "failure", - "expected" : "DELETING" - } ] - }, - "StreamGroupDeleted" : { - "description" : "Waits until a stream group is deleted", - "delay" : 30, - "maxAttempts" : 60, - "operation" : "GetStreamGroup", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - } ] - }, - "StreamSessionActive" : { - "description" : "Waits until a stream session is active", - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetStreamSession", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "failure", - "expected" : "ERROR" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8f5337f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/service-2.json.gz deleted file mode 100644 index f890bbd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/geo-maps/2020-11-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7f02d66..0000000 Binary files a/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/service-2.json.gz deleted file mode 100644 index 7d52800..0000000 Binary files a/venv/Lib/site-packages/botocore/data/geo-places/2020-11-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index b0ba01e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/service-2.json.gz deleted file mode 100644 index e91fa5c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/geo-routes/2020-11-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index a98ca53..0000000 Binary files a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/examples-1.json b/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/examples-1.json deleted file mode 100644 index 7ecea25..0000000 --- a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/examples-1.json +++ /dev/null @@ -1,806 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AbortMultipartUpload": [ - { - "input": { - "accountId": "-", - "uploadId": "19gaRezEXAMPLES6Ry5YYdqthHOC_kGRCT03L9yetr220UmPtBYKk-OssZtLqyFu7sY1_lR7vgFuJV6NtcV5zpsJ", - "vaultName": "my-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example deletes an in-progress multipart upload to a vault named my-vault:", - "id": "f3d907f6-e71c-420c-8f71-502346a2c48a", - "title": "To abort a multipart upload identified by the upload ID" - } - ], - "AbortVaultLock": [ - { - "input": { - "accountId": "-", - "vaultName": "examplevault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example aborts the vault locking process if the vault lock is not in the Locked state for the vault named examplevault.", - "id": "to-abort-a-vault-lock-1481839357947", - "title": "To abort a vault lock" - } - ], - "AddTagsToVault": [ - { - "input": { - "Tags": { - "examplekey1": "examplevalue1", - "examplekey2": "examplevalue2" - }, - "accountId": "-", - "vaultName": "my-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example adds two tags to a my-vault.", - "id": "add-tags-to-vault-post-tags-add-1481663457694", - "title": "To add tags to a vault" - } - ], - "CompleteMultipartUpload": [ - { - "input": { - "accountId": "-", - "archiveSize": "3145728", - "checksum": "9628195fcdbcbbe76cdde456d4646fa7de5f219fb39823836d81f0cc0e18aa67", - "uploadId": "19gaRezEXAMPLES6Ry5YYdqthHOC_kGRCT03L9yetr220UmPtBYKk-OssZtLqyFu7sY1_lR7vgFuJV6NtcV5zpsJ", - "vaultName": "my-vault" - }, - "output": { - "archiveId": "NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUsuhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqrEXAMPLEArchiveId", - "checksum": "9628195fcdbcbbe76cdde456d4646fa7de5f219fb39823836d81f0cc0e18aa67", - "location": "/111122223333/vaults/my-vault/archives/NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUsuhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqrEXAMPLEArchiveId" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example completes a multipart upload for a 3 MiB archive.", - "id": "272aa0b8-e44c-4a64-add2-ad905a37984d", - "title": "To complete a multipart upload" - } - ], - "CompleteVaultLock": [ - { - "input": { - "accountId": "-", - "lockId": "AE863rKkWZU53SLW5be4DUcW", - "vaultName": "example-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example completes the vault locking process by transitioning the vault lock from the InProgress state to the Locked state.", - "id": "to-complete-a-vault-lock-1481839721312", - "title": "To complete a vault lock" - } - ], - "CreateVault": [ - { - "input": { - "accountId": "-", - "vaultName": "my-vault" - }, - "output": { - "location": "/111122223333/vaults/my-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a new vault named my-vault.", - "id": "1dc0313d-ace1-4e6c-9d13-1ec7813b14b7", - "title": "To create a new vault" - } - ], - "DeleteArchive": [ - { - "input": { - "accountId": "-", - "archiveId": "NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUsuhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqrEXAMPLEArchiveId", - "vaultName": "examplevault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example deletes the archive specified by the archive ID.", - "id": "delete-archive-1481667809463", - "title": "To delete an archive" - } - ], - "DeleteVault": [ - { - "input": { - "accountId": "-", - "vaultName": "my-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example deletes a vault named my-vault:", - "id": "7f7f000b-4bdb-40d2-91e6-7c902f60f60f", - "title": "To delete a vault" - } - ], - "DeleteVaultAccessPolicy": [ - { - "input": { - "accountId": "-", - "vaultName": "examplevault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example deletes the access policy associated with the vault named examplevault.", - "id": "to-delete-the-vault-access-policy-1481840424677", - "title": "To delete the vault access policy" - } - ], - "DeleteVaultNotifications": [ - { - "input": { - "accountId": "-", - "vaultName": "examplevault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example deletes the notification configuration set for the vault named examplevault.", - "id": "to-delete-the-notification-configuration-set-for-a-vault-1481840646090", - "title": "To delete the notification configuration set for a vault" - } - ], - "DescribeJob": [ - { - "input": { - "accountId": "-", - "jobId": "zbxcm3Z_3z5UkoroF7SuZKrxgGoDc3RloGduS7Eg-RO47Yc6FxsdGBgf_Q2DK5Ejh18CnTS5XW4_XqlNHS61dsO4Cn", - "vaultName": "my-vault" - }, - "output": { - "Action": "InventoryRetrieval", - "Completed": false, - "CreationDate": "2015-07-17T20:23:41.616Z", - "InventoryRetrievalParameters": { - "Format": "JSON" - }, - "JobId": "zbxcm3Z_3z5UkoroF7SuZKrxgGoDc3RloGduS7Eg-RO47Yc6FxsdGBgf_Q2DK5Ejh18CnTS5XW4_XqlNHS61dsO4CnMW", - "StatusCode": "InProgress", - "VaultARN": "arn:aws:glacier:us-west-2:0123456789012:vaults/my-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example returns information about the previously initiated job specified by the job ID.", - "id": "to-get-information-about-a-job-you-previously-initiated-1481840928592", - "title": "To get information about a previously initiated job" - } - ], - "DescribeVault": [ - { - "input": { - "accountId": "-", - "vaultName": "my-vault" - }, - "output": { - "CreationDate": "2016-09-23T19:27:18.665Z", - "NumberOfArchives": 0, - "SizeInBytes": 0, - "VaultARN": "arn:aws:glacier:us-west-2:111122223333:vaults/my-vault", - "VaultName": "my-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example retrieves data about a vault named my-vault.", - "id": "3c1c6e9d-f5a2-427a-aa6a-f439eacfc05f", - "title": "To retrieve information about a vault" - } - ], - "GetDataRetrievalPolicy": [ - { - "input": { - "accountId": "-" - }, - "output": { - "Policy": { - "Rules": [ - { - "BytesPerHour": 10737418240, - "Strategy": "BytesPerHour" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example returns the current data retrieval policy for the account.", - "id": "to-get-the-current-data-retrieval-policy-for-the-account-1481851580439", - "title": "To get the current data retrieval policy for an account" - } - ], - "GetJobOutput": [ - { - "input": { - "accountId": "-", - "jobId": "zbxcm3Z_3z5UkoroF7SuZKrxgGoDc3RloGduS7Eg-RO47Yc6FxsdGBgf_Q2DK5Ejh18CnTS5XW4_XqlNHS61dsO4CnMW", - "range": "", - "vaultName": "my-vaul" - }, - "output": { - "acceptRanges": "bytes", - "body": "inventory-data", - "contentType": "application/json", - "status": 200 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example downloads the output of a previously initiated inventory retrieval job that is identified by the job ID.", - "id": "to-get-the-output-of-a-previously-initiated-job-1481848550859", - "title": "To get the output of a previously initiated job" - } - ], - "GetVaultAccessPolicy": [ - { - "input": { - "accountId": "-", - "vaultName": "example-vault" - }, - "output": { - "policy": { - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"Define-owner-access-rights\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::999999999999:root\"},\"Action\":\"glacier:DeleteArchive\",\"Resource\":\"arn:aws:glacier:us-west-2:999999999999:vaults/examplevault\"}]}" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example retrieves the access-policy set on the vault named example-vault.", - "id": "to--get-the-access-policy-set-on-the-vault-1481936004590", - "title": "To get the access-policy set on the vault" - } - ], - "GetVaultLock": [ - { - "input": { - "accountId": "-", - "vaultName": "examplevault" - }, - "output": { - "CreationDate": "exampledate", - "ExpirationDate": "exampledate", - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"Define-vault-lock\",\"Effect\":\"Deny\",\"Principal\":{\"AWS\":\"arn:aws:iam::999999999999:root\"},\"Action\":\"glacier:DeleteArchive\",\"Resource\":\"arn:aws:glacier:us-west-2:999999999999:vaults/examplevault\",\"Condition\":{\"NumericLessThanEquals\":{\"glacier:ArchiveAgeinDays\":\"365\"}}}]}", - "State": "InProgress" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example retrieves the attributes from the lock-policy subresource set on the vault named examplevault.", - "id": "to-retrieve-vault-lock-policy-related-attributes-that-are-set-on-a-vault-1481851363097", - "title": "To retrieve vault lock-policy related attributes that are set on a vault" - } - ], - "GetVaultNotifications": [ - { - "input": { - "accountId": "-", - "vaultName": "my-vault" - }, - "output": { - "vaultNotificationConfig": { - "Events": [ - "InventoryRetrievalCompleted", - "ArchiveRetrievalCompleted" - ], - "SNSTopic": "arn:aws:sns:us-west-2:0123456789012:my-vault" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example retrieves the notification-configuration for the vault named my-vault.", - "id": "to-get-the-notification-configuration-for-the-specified-vault-1481918746677", - "title": "To get the notification-configuration for the specified vault" - } - ], - "InitiateJob": [ - { - "input": { - "accountId": "-", - "jobParameters": { - "Description": "My inventory job", - "Format": "CSV", - "SNSTopic": "arn:aws:sns:us-west-2:111111111111:Glacier-InventoryRetrieval-topic-Example", - "Type": "inventory-retrieval" - }, - "vaultName": "examplevault" - }, - "output": { - "jobId": " HkF9p6o7yjhFx-K3CGl6fuSm6VzW9T7esGQfco8nUXVYwS0jlb5gq1JZ55yHgt5vP54ZShjoQzQVVh7vEXAMPLEjobID", - "location": "/111122223333/vaults/examplevault/jobs/HkF9p6o7yjhFx-K3CGl6fuSm6VzW9T7esGQfco8nUXVYwS0jlb5gq1JZ55yHgt5vP54ZShjoQzQVVh7vEXAMPLEjobID" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example initiates an inventory-retrieval job for the vault named examplevault.", - "id": "to-initiate-an-inventory-retrieval-job-1482186883826", - "title": "To initiate an inventory-retrieval job" - } - ], - "InitiateMultipartUpload": [ - { - "input": { - "accountId": "-", - "partSize": "1048576", - "vaultName": "my-vault" - }, - "output": { - "location": "/111122223333/vaults/my-vault/multipart-uploads/19gaRezEXAMPLES6Ry5YYdqthHOC_kGRCT03L9yetr220UmPtBYKk-OssZtLqyFu7sY1_lR7vgFuJV6NtcV5zpsJ", - "uploadId": "19gaRezEXAMPLES6Ry5YYdqthHOC_kGRCT03L9yetr220UmPtBYKk-OssZtLqyFu7sY1_lR7vgFuJV6NtcV5zpsJ" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example initiates a multipart upload to a vault named my-vault with a part size of 1 MiB (1024 x 1024 bytes) per file.", - "id": "72f2db19-3d93-4c74-b2ed-38703baacf49", - "title": "To initiate a multipart upload" - } - ], - "InitiateVaultLock": [ - { - "input": { - "accountId": "-", - "policy": { - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"Define-vault-lock\",\"Effect\":\"Deny\",\"Principal\":{\"AWS\":\"arn:aws:iam::999999999999:root\"},\"Action\":\"glacier:DeleteArchive\",\"Resource\":\"arn:aws:glacier:us-west-2:999999999999:vaults/examplevault\",\"Condition\":{\"NumericLessThanEquals\":{\"glacier:ArchiveAgeinDays\":\"365\"}}}]}" - }, - "vaultName": "my-vault" - }, - "output": { - "lockId": "AE863rKkWZU53SLW5be4DUcW" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example initiates the vault locking process for the vault named my-vault.", - "id": "to-initiate-the-vault-locking-process-1481919693394", - "title": "To initiate the vault locking process" - } - ], - "ListJobs": [ - { - "input": { - "accountId": "-", - "vaultName": "my-vault" - }, - "output": { - "JobList": [ - { - "Action": "ArchiveRetrieval", - "ArchiveId": "kKB7ymWJVpPSwhGP6ycSOAekp9ZYe_--zM_mw6k76ZFGEIWQX-ybtRDvc2VkPSDtfKmQrj0IRQLSGsNuDp-AJVlu2ccmDSyDUmZwKbwbpAdGATGDiB3hHO0bjbGehXTcApVud_wyDw", - "ArchiveSHA256TreeHash": "9628195fcdbcbbe76cdde932d4646fa7de5f219fb39823836d81f0cc0e18aa67", - "ArchiveSizeInBytes": 3145728, - "Completed": false, - "CreationDate": "2015-07-17T21:16:13.840Z", - "JobDescription": "Retrieve archive on 2015-07-17", - "JobId": "l7IL5-EkXyEY9Ws95fClzIbk2O5uLYaFdAYOi-azsX_Z8V6NH4yERHzars8wTKYQMX6nBDI9cMNHzyZJO59-8N9aHWav", - "RetrievalByteRange": "0-3145727", - "SHA256TreeHash": "9628195fcdbcbbe76cdde932d4646fa7de5f219fb39823836d81f0cc0e18aa67", - "SNSTopic": "arn:aws:sns:us-west-2:0123456789012:my-vault", - "StatusCode": "InProgress", - "VaultARN": "arn:aws:glacier:us-west-2:0123456789012:vaults/my-vault" - }, - { - "Action": "InventoryRetrieval", - "Completed": false, - "CreationDate": "2015-07-17T20:23:41.616Z", - "InventoryRetrievalParameters": { - "Format": "JSON" - }, - "JobId": "zbxcm3Z_3z5UkoroF7SuZKrxgGoDc3RloGduS7Eg-RO47Yc6FxsdGBgf_Q2DK5Ejh18CnTS5XW4_XqlNHS61dsO4CnMW", - "StatusCode": "InProgress", - "VaultARN": "arn:aws:glacier:us-west-2:0123456789012:vaults/my-vault" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example lists jobs for the vault named my-vault.", - "id": "to-list-jobs-for-a-vault-1481920530537", - "title": "To list jobs for a vault" - } - ], - "ListMultipartUploads": [ - { - "input": { - "accountId": "-", - "vaultName": "examplevault" - }, - "output": { - "Marker": "null", - "UploadsList": [ - { - "ArchiveDescription": "archive 1", - "CreationDate": "2012-03-19T23:20:59.130Z", - "MultipartUploadId": "xsQdFIRsfJr20CW2AbZBKpRZAFTZSJIMtL2hYf8mvp8dM0m4RUzlaqoEye6g3h3ecqB_zqwB7zLDMeSWhwo65re4C4Ev", - "PartSizeInBytes": 4194304, - "VaultARN": "arn:aws:glacier:us-west-2:012345678901:vaults/examplevault" - }, - { - "ArchiveDescription": "archive 2", - "CreationDate": "2012-04-01T15:00:00.000Z", - "MultipartUploadId": "nPyGOnyFcx67qqX7E-0tSGiRi88hHMOwOxR-_jNyM6RjVMFfV29lFqZ3rNsSaWBugg6OP92pRtufeHdQH7ClIpSF6uJc", - "PartSizeInBytes": 4194304, - "VaultARN": "arn:aws:glacier:us-west-2:012345678901:vaults/examplevault" - }, - { - "ArchiveDescription": "archive 3", - "CreationDate": "2012-03-20T17:03:43.221Z", - "MultipartUploadId": "qt-RBst_7yO8gVIonIBsAxr2t-db0pE4s8MNeGjKjGdNpuU-cdSAcqG62guwV9r5jh5mLyFPzFEitTpNE7iQfHiu1XoV", - "PartSizeInBytes": 4194304, - "VaultARN": "arn:aws:glacier:us-west-2:012345678901:vaults/examplevault" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example lists all the in-progress multipart uploads for the vault named examplevault.", - "id": "to-list-all-the-in-progress-multipart-uploads-for-a-vault-1481935250590", - "title": "To list all the in-progress multipart uploads for a vault" - } - ], - "ListParts": [ - { - "input": { - "accountId": "-", - "uploadId": "OW2fM5iVylEpFEMM9_HpKowRapC3vn5sSL39_396UW9zLFUWVrnRHaPjUJddQ5OxSHVXjYtrN47NBZ-khxOjyEXAMPLE", - "vaultName": "examplevault" - }, - "output": { - "ArchiveDescription": "archive description", - "CreationDate": "2012-03-20T17:03:43.221Z", - "Marker": "null", - "MultipartUploadId": "OW2fM5iVylEpFEMM9_HpKowRapC3vn5sSL39_396UW9zLFUWVrnRHaPjUJddQ5OxSHVXjYtrN47NBZ-khxOjyEXAMPLE", - "PartSizeInBytes": 4194304, - "Parts": [ - { - "RangeInBytes": "0-4194303", - "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4" - }, - { - "RangeInBytes": "4194304-8388607", - "SHA256TreeHash": "0195875365afda349fc21c84c099987164" - } - ], - "VaultARN": "arn:aws:glacier:us-west-2:012345678901:vaults/demo1-vault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example lists all the parts of a multipart upload.", - "id": "to-list-the-parts-of-an-archive-that-have-been-uploaded-in-a-multipart-upload-1481921767590", - "title": "To list the parts of an archive that have been uploaded in a multipart upload" - } - ], - "ListProvisionedCapacity": [ - { - "input": { - "accountId": "-" - }, - "output": { - "ProvisionedCapacityList": [ - { - "CapacityId": "zSaq7NzHFQDANTfQkDen4V7z", - "ExpirationDate": "2016-12-12T00:00:00.000Z", - "StartDate": "2016-11-11T20:11:51.095Z" - }, - { - "CapacityId": "yXaq7NzHFQNADTfQkDen4V7z", - "ExpirationDate": "2017-01-15T00:00:00.000Z", - "StartDate": "2016-12-13T20:11:51.095Z" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example lists the provisioned capacity units for an account.", - "id": "to-list-the-provisioned-capacity-units-for-an-account-1481923656130", - "title": "To list the provisioned capacity units for an account" - } - ], - "ListTagsForVault": [ - { - "input": { - "accountId": "-", - "vaultName": "examplevault" - }, - "output": { - "Tags": { - "date": "july2015", - "id": "1234" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example lists all the tags attached to the vault examplevault.", - "id": "list-tags-for-vault-1481755839720", - "title": "To list the tags for a vault" - } - ], - "ListVaults": [ - { - "input": { - "accountId": "-", - "limit": "", - "marker": "" - }, - "output": { - "VaultList": [ - { - "CreationDate": "2015-04-06T21:23:45.708Z", - "LastInventoryDate": "2015-04-07T00:26:19.028Z", - "NumberOfArchives": 1, - "SizeInBytes": 3178496, - "VaultARN": "arn:aws:glacier:us-west-2:0123456789012:vaults/my-vault", - "VaultName": "my-vault" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example lists all vaults owned by the specified AWS account.", - "id": "list-vaults-1481753006990", - "title": "To list all vaults owned by the calling user's account" - } - ], - "PurchaseProvisionedCapacity": [ - { - "input": { - "accountId": "-" - }, - "output": { - "capacityId": "zSaq7NzHFQDANTfQkDen4V7z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example purchases provisioned capacity unit for an AWS account.", - "id": "to-purchases-a-provisioned-capacity-unit-for-an-aws-account-1481927446662", - "title": "To purchases a provisioned capacity unit for an AWS account" - } - ], - "RemoveTagsFromVault": [ - { - "input": { - "TagKeys": [ - "examplekey1", - "examplekey2" - ], - "accountId": "-", - "vaultName": "examplevault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example removes two tags from the vault named examplevault.", - "id": "remove-tags-from-vault-1481754998801", - "title": "To remove tags from a vault" - } - ], - "SetDataRetrievalPolicy": [ - { - "input": { - "Policy": { - "Rules": [ - { - "BytesPerHour": 10737418240, - "Strategy": "BytesPerHour" - } - ] - }, - "accountId": "-" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example sets and then enacts a data retrieval policy.", - "id": "to-set-and-then-enact-a-data-retrieval-policy--1481928352408", - "title": "To set and then enact a data retrieval policy " - } - ], - "SetVaultAccessPolicy": [ - { - "input": { - "accountId": "-", - "policy": { - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"Define-owner-access-rights\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::999999999999:root\"},\"Action\":\"glacier:DeleteArchive\",\"Resource\":\"arn:aws:glacier:us-west-2:999999999999:vaults/examplevault\"}]}" - }, - "vaultName": "examplevault" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example configures an access policy for the vault named examplevault.", - "id": "to--set-the-access-policy-on-a-vault-1482185872517", - "title": "To set the access-policy on a vault" - } - ], - "SetVaultNotifications": [ - { - "input": { - "accountId": "-", - "vaultName": "examplevault", - "vaultNotificationConfig": { - "Events": [ - "ArchiveRetrievalCompleted", - "InventoryRetrievalCompleted" - ], - "SNSTopic": "arn:aws:sns:us-west-2:012345678901:mytopic" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example sets the examplevault notification configuration.", - "id": "to-configure-a-vault-to-post-a-message-to-an-amazon-simple-notification-service-amazon-sns-topic-when-jobs-complete-1482186397475", - "title": "To configure a vault to post a message to an Amazon SNS topic when jobs complete" - } - ], - "UploadArchive": [ - { - "input": { - "accountId": "-", - "archiveDescription": "", - "body": "example-data-to-upload", - "checksum": "", - "vaultName": "my-vault" - }, - "output": { - "archiveId": "kKB7ymWJVpPSwhGP6ycSOAekp9ZYe_--zM_mw6k76ZFGEIWQX-ybtRDvc2VkPSDtfKmQrj0IRQLSGsNuDp-AJVlu2ccmDSyDUmZwKbwbpAdGATGDiB3hHO0bjbGehXTcApVud_wyDw", - "checksum": "969fb39823836d81f0cc028195fcdbcbbe76cdde932d4646fa7de5f21e18aa67", - "location": "/0123456789012/vaults/my-vault/archives/kKB7ymWJVpPSwhGP6ycSOAekp9ZYe_--zM_mw6k76ZFGEIWQX-ybtRDvc2VkPSDtfKmQrj0IRQLSGsNuDp-AJVlu2ccmDSyDUmZwKbwbpAdGATGDiB3hHO0bjbGehXTcApVud_wyDw" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example adds an archive to a vault.", - "id": "upload-archive-1481668510494", - "title": "To upload an archive" - } - ], - "UploadMultipartPart": [ - { - "input": { - "accountId": "-", - "body": "part1", - "checksum": "c06f7cd4baacb087002a99a5f48bf953", - "range": "bytes 0-1048575/*", - "uploadId": "19gaRezEXAMPLES6Ry5YYdqthHOC_kGRCT03L9yetr220UmPtBYKk-OssZtLqyFu7sY1_lR7vgFuJV6NtcV5zpsJ", - "vaultName": "examplevault" - }, - "output": { - "checksum": "c06f7cd4baacb087002a99a5f48bf953" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The example uploads the first 1 MiB (1024 x 1024 bytes) part of an archive.", - "id": "to-upload-the-first-part-of-an-archive-1481835899519", - "title": "To upload the first part of an archive" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/paginators-1.json deleted file mode 100644 index 6969143..0000000 --- a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListJobs": { - "input_token": "marker", - "output_token": "Marker", - "limit_key": "limit", - "result_key": "JobList" - }, - "ListMultipartUploads": { - "input_token": "marker", - "output_token": "Marker", - "limit_key": "limit", - "result_key": "UploadsList" - }, - "ListParts": { - "input_token": "marker", - "output_token": "Marker", - "limit_key": "limit", - "result_key": "Parts" - }, - "ListVaults": { - "input_token": "marker", - "output_token": "Marker", - "limit_key": "limit", - "result_key": "VaultList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/service-2.json.gz deleted file mode 100644 index f823f98..0000000 Binary files a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/waiters-2.json deleted file mode 100644 index 07a64a0..0000000 --- a/venv/Lib/site-packages/botocore/data/glacier/2012-06-01/waiters-2.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "version": 2, - "waiters": { - "VaultExists": { - "operation": "DescribeVault", - "delay": 3, - "maxAttempts": 15, - "acceptors": [ - { - "state": "success", - "matcher": "status", - "expected": 200 - }, - { - "state": "retry", - "matcher": "error", - "expected": "ResourceNotFoundException" - } - ] - }, - "VaultNotExists": { - "operation": "DescribeVault", - "delay": 3, - "maxAttempts": 15, - "acceptors": [ - { - "state": "retry", - "matcher": "status", - "expected": 200 - }, - { - "state": "success", - "matcher": "error", - "expected": "ResourceNotFoundException" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index cb68561..0000000 Binary files a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/examples-1.json b/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/paginators-1.json deleted file mode 100644 index 2a0f825..0000000 --- a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "ListAccelerators": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Accelerators" - }, - "ListEndpointGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EndpointGroups" - }, - "ListListeners": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Listeners" - }, - "ListByoipCidrs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ByoipCidrs" - }, - "ListCustomRoutingAccelerators": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Accelerators" - }, - "ListCustomRoutingListeners": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Listeners" - }, - "ListCustomRoutingPortMappings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PortMappings" - }, - "ListCustomRoutingPortMappingsByDestination": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DestinationPortMappings" - }, - "ListCustomRoutingEndpointGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EndpointGroups" - }, - "ListCrossAccountAttachments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CrossAccountAttachments" - }, - "ListCrossAccountResources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CrossAccountResources" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/service-2.json.gz deleted file mode 100644 index 9ea0821..0000000 Binary files a/venv/Lib/site-packages/botocore/data/globalaccelerator/2018-08-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/glue/2017-03-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1643b5a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/examples-1.json b/venv/Lib/site-packages/botocore/data/glue/2017-03-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/glue/2017-03-31/paginators-1.json deleted file mode 100644 index 54d4f8e..0000000 --- a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/paginators-1.json +++ /dev/null @@ -1,181 +0,0 @@ -{ - "pagination": { - "GetJobs": { - "result_key": "Jobs", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetPartitions": { - "result_key": "Partitions", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetDatabases": { - "result_key": "DatabaseList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetClassifiers": { - "result_key": "Classifiers", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetTableVersions": { - "result_key": "TableVersions", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetCrawlers": { - "result_key": "Crawlers", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetDevEndpoints": { - "result_key": "DevEndpoints", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetJobRuns": { - "result_key": "JobRuns", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetTriggers": { - "result_key": "Triggers", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetTables": { - "result_key": "TableList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetUserDefinedFunctions": { - "result_key": "UserDefinedFunctions", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetCrawlerMetrics": { - "result_key": "CrawlerMetricsList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetConnections": { - "result_key": "ConnectionList", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetSecurityConfigurations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityConfigurations" - }, - "GetPartitionIndexes": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "PartitionIndexDescriptorList" - }, - "GetResourcePolicies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "GetResourcePoliciesResponseList" - }, - "ListRegistries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Registries" - }, - "ListSchemaVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Schemas" - }, - "ListSchemas": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Schemas" - }, - "ListUsageProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Profiles" - }, - "GetWorkflowRuns": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Runs" - }, - "ListBlueprints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Blueprints" - }, - "ListJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "JobNames" - }, - "ListTriggers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TriggerNames" - }, - "ListWorkflows": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Workflows" - }, - "ListTableOptimizerRuns": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TableOptimizerRuns" - }, - "DescribeEntity": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Fields" - }, - "ListConnectionTypes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ConnectionTypes" - }, - "ListEntities": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Entities" - }, - "ListMaterializedViewRefreshTaskRuns": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MaterializedViewRefreshTaskRuns" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/glue/2017-03-31/paginators-1.sdk-extras.json deleted file mode 100644 index 4d788b1..0000000 --- a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/paginators-1.sdk-extras.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "version": 1, - "merge": { - "pagination": { - "ListTableOptimizerRuns": { - "non_aggregate_keys": [ - "CatalogId", - "DatabaseName", - "TableName" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/glue/2017-03-31/service-2.json.gz deleted file mode 100644 index f667e14..0000000 Binary files a/venv/Lib/site-packages/botocore/data/glue/2017-03-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index 16a9ec5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/examples-1.json b/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/paginators-1.json deleted file mode 100644 index 55d05f2..0000000 --- a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListPermissions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "permissions" - }, - "ListWorkspaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workspaces" - }, - "ListVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "grafanaVersions" - }, - "ListWorkspaceServiceAccountTokens": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "serviceAccountTokens" - }, - "ListWorkspaceServiceAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "serviceAccounts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/paginators-1.sdk-extras.json deleted file mode 100644 index 421e2f6..0000000 --- a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/paginators-1.sdk-extras.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListWorkspaceServiceAccounts": { - "non_aggregate_keys": [ - "workspaceId" - ] - }, - "ListWorkspaceServiceAccountTokens": { - "non_aggregate_keys": [ - "serviceAccountId", - "workspaceId" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/service-2.json.gz deleted file mode 100644 index 2ae1e6c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/grafana/2020-08-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 302bb9c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/paginators-1.json deleted file mode 100644 index 303b438..0000000 --- a/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/paginators-1.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "pagination": { - "ListBulkDeploymentDetailedReports": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Deployments" - }, - "ListBulkDeployments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "BulkDeployments" - }, - "ListConnectorDefinitionVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListConnectorDefinitions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Definitions" - }, - "ListCoreDefinitionVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListCoreDefinitions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Definitions" - }, - "ListDeployments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Deployments" - }, - "ListDeviceDefinitionVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListDeviceDefinitions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Definitions" - }, - "ListFunctionDefinitionVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListFunctionDefinitions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Definitions" - }, - "ListGroupVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Groups" - }, - "ListLoggerDefinitionVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListLoggerDefinitions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Definitions" - }, - "ListResourceDefinitionVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListResourceDefinitions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Definitions" - }, - "ListSubscriptionDefinitionVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListSubscriptionDefinitions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Definitions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/service-2.json.gz deleted file mode 100644 index 4caba19..0000000 Binary files a/venv/Lib/site-packages/botocore/data/greengrass/2017-06-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 302bb9c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/examples-1.json b/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/paginators-1.json deleted file mode 100644 index 2e2af05..0000000 --- a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListComponentVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "componentVersions" - }, - "ListComponents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "components" - }, - "ListCoreDevices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "coreDevices" - }, - "ListDeployments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "deployments" - }, - "ListEffectiveDeployments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "effectiveDeployments" - }, - "ListInstalledComponents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "installedComponents" - }, - "ListClientDevicesAssociatedWithCoreDevice": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "associatedClientDevices" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/service-2.json.gz deleted file mode 100644 index f2d78cf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/greengrassv2/2020-11-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index f311ea3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/examples-1.json b/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/paginators-1.json deleted file mode 100644 index 0ead110..0000000 --- a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListConfigs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "configList" - }, - "ListContacts": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "contactList" - }, - "ListDataflowEndpointGroups": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "dataflowEndpointGroupList" - }, - "ListMissionProfiles": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "missionProfileList" - }, - "ListGroundStations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "groundStationList" - }, - "ListSatellites": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "satellites" - }, - "ListEphemerides": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ephemerides" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/service-2.json.gz deleted file mode 100644 index 00efd2c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/waiters-2.json b/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/waiters-2.json deleted file mode 100644 index c0080e2..0000000 --- a/venv/Lib/site-packages/botocore/data/groundstation/2019-05-23/waiters-2.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ContactScheduled" : { - "description" : "Waits until a contact has been scheduled", - "delay" : 5, - "maxAttempts" : 180, - "operation" : "DescribeContact", - "acceptors" : [ { - "matcher" : "path", - "argument" : "contactStatus", - "state" : "failure", - "expected" : "FAILED_TO_SCHEDULE" - }, { - "matcher" : "path", - "argument" : "contactStatus", - "state" : "success", - "expected" : "SCHEDULED" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index fa7ddb6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/examples-1.json b/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/paginators-1.json deleted file mode 100644 index e38f5e2..0000000 --- a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/paginators-1.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "pagination": { - "ListDetectors": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DetectorIds" - }, - "ListFindings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FindingIds" - }, - "ListIPSets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "IpSetIds" - }, - "ListThreatIntelSets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ThreatIntelSetIds" - }, - "ListInvitations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Invitations" - }, - "ListMembers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Members" - }, - "ListFilters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "FilterNames" - }, - "ListOrganizationAdminAccounts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AdminAccounts" - }, - "DescribeMalwareScans": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Scans" - }, - "ListCoverage": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Resources" - }, - "ListThreatEntitySets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ThreatEntitySetIds" - }, - "ListTrustedEntitySets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrustedEntitySetIds" - }, - "ListMalwareScans": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Scans" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/service-2.json.gz deleted file mode 100644 index 200ddb2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/guardduty/2017-11-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/health/2016-08-04/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/health/2016-08-04/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7c74cf9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/health/2016-08-04/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/health/2016-08-04/examples-1.json b/venv/Lib/site-packages/botocore/data/health/2016-08-04/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/health/2016-08-04/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/health/2016-08-04/paginators-1.json b/venv/Lib/site-packages/botocore/data/health/2016-08-04/paginators-1.json deleted file mode 100644 index 5109481..0000000 --- a/venv/Lib/site-packages/botocore/data/health/2016-08-04/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "DescribeAffectedEntities": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "entities" - }, - "DescribeEventAggregates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "eventAggregates" - }, - "DescribeEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "events" - }, - "DescribeEventTypes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "eventTypes" - }, - "DescribeAffectedAccountsForOrganization": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "affectedAccounts", - "non_aggregate_keys": [ - "eventScopeCode" - ] - }, - "DescribeAffectedEntitiesForOrganization": { - "input_token": "nextToken", - "limit_key": "maxResults", - "non_aggregate_keys": [ - "failedSet" - ], - "output_token": "nextToken", - "result_key": "entities" - }, - "DescribeEventsForOrganization": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "events" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/health/2016-08-04/service-2.json.gz b/venv/Lib/site-packages/botocore/data/health/2016-08-04/service-2.json.gz deleted file mode 100644 index e1da764..0000000 Binary files a/venv/Lib/site-packages/botocore/data/health/2016-08-04/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6c53c82..0000000 Binary files a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/examples-1.json b/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/service-2.json.gz deleted file mode 100644 index 2df3ee9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/waiters-2.json deleted file mode 100644 index ba6478f..0000000 --- a/venv/Lib/site-packages/botocore/data/healthlake/2017-07-01/waiters-2.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "version": 2, - "waiters": { - "FHIRDatastoreActive": { - "operation": "DescribeFHIRDatastore", - "delay": 60, - "maxAttempts": 360, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "DatastoreProperties.DatastoreStatus", - "expected": "ACTIVE" - }, - { - "state": "failure", - "matcher": "path", - "argument": "DatastoreProperties.DatastoreStatus", - "expected": "CREATE_FAILED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "DatastoreProperties.DatastoreStatus", - "expected": "DELETED" - } - ] - }, - "FHIRDatastoreDeleted": { - "operation": "DescribeFHIRDatastore", - "delay": 120, - "maxAttempts": 360, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "DatastoreProperties.DatastoreStatus", - "expected": "DELETED" - } - ] - }, - "FHIRExportJobCompleted": { - "operation": "DescribeFHIRExportJob", - "delay": 120, - "maxAttempts": 360, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "ExportJobProperties.JobStatus", - "expected": "COMPLETED" - }, - { - "state": "success", - "matcher": "path", - "argument": "ExportJobProperties.JobStatus", - "expected": "COMPLETED_WITH_ERRORS" - }, - { - "state": "failure", - "matcher": "path", - "argument": "ExportJobProperties.JobStatus", - "expected": "CANCEL_COMPLETED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "ExportJobProperties.JobStatus", - "expected": "FAILED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "ExportJobProperties.JobStatus", - "expected": "CANCEL_FAILED" - } - ] - }, - "FHIRImportJobCompleted": { - "operation": "DescribeFHIRImportJob", - "delay": 120, - "maxAttempts": 720, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "ImportJobProperties.JobStatus", - "expected": "COMPLETED" - }, - { - "state": "success", - "matcher": "path", - "argument": "ImportJobProperties.JobStatus", - "expected": "COMPLETED_WITH_ERRORS" - }, - { - "state": "failure", - "matcher": "path", - "argument": "ImportJobProperties.JobStatus", - "expected": "FAILED" - } - ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iam/2010-05-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index be9ec48..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/examples-1.json b/venv/Lib/site-packages/botocore/data/iam/2010-05-08/examples-1.json deleted file mode 100644 index cd3a94a..0000000 --- a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/examples-1.json +++ /dev/null @@ -1,1577 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddClientIDToOpenIDConnectProvider": [ - { - "input": { - "ClientID": "my-application-ID", - "OpenIDConnectProviderArn": "arn:aws:iam::123456789012:oidc-provider/server.example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following add-client-id-to-open-id-connect-provider command adds the client ID my-application-ID to the OIDC provider named server.example.com:", - "id": "028e91f4-e2a6-4d59-9e3b-4965a3fb19be", - "title": "To add a client ID (audience) to an Open-ID Connect (OIDC) provider" - } - ], - "AddRoleToInstanceProfile": [ - { - "input": { - "InstanceProfileName": "Webserver", - "RoleName": "S3Access" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command adds the role named S3Access to the instance profile named Webserver:", - "id": "c107fac3-edb6-4827-8a71-8863ec91c81f", - "title": "To add a role to an instance profile" - } - ], - "AddUserToGroup": [ - { - "input": { - "GroupName": "Admins", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command adds an IAM user named Bob to the IAM group named Admins:", - "id": "619c7e6b-09f8-4036-857b-51a6ea5027ca", - "title": "To add a user to an IAM group" - } - ], - "AttachGroupPolicy": [ - { - "input": { - "GroupName": "Finance", - "PolicyArn": "arn:aws:iam::aws:policy/ReadOnlyAccess" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command attaches the AWS managed policy named ReadOnlyAccess to the IAM group named Finance.", - "id": "87551489-86f0-45db-9889-759936778f2b", - "title": "To attach a managed policy to an IAM group" - } - ], - "AttachRolePolicy": [ - { - "input": { - "PolicyArn": "arn:aws:iam::aws:policy/ReadOnlyAccess", - "RoleName": "ReadOnlyRole" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command attaches the AWS managed policy named ReadOnlyAccess to the IAM role named ReadOnlyRole.", - "id": "3e1b8c7c-99c8-4fc4-a20c-131fe3f22c7e", - "title": "To attach a managed policy to an IAM role" - } - ], - "AttachUserPolicy": [ - { - "input": { - "PolicyArn": "arn:aws:iam::aws:policy/AdministratorAccess", - "UserName": "Alice" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command attaches the AWS managed policy named AdministratorAccess to the IAM user named Alice.", - "id": "1372ebd8-9475-4b1a-a479-23b6fd4b8b3e", - "title": "To attach a managed policy to an IAM user" - } - ], - "ChangePassword": [ - { - "input": { - "NewPassword": "]35d/{pB9Fo9wJ", - "OldPassword": "3s0K_;xh4~8XXI" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command changes the password for the current IAM user.", - "id": "3a80c66f-bffb-46df-947c-1e8fa583b470", - "title": "To change the password for your IAM user" - } - ], - "CreateAccessKey": [ - { - "input": { - "UserName": "Bob" - }, - "output": { - "AccessKey": { - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", - "CreateDate": "2015-03-09T18:39:23.411Z", - "SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", - "Status": "Active", - "UserName": "Bob" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command creates an access key (access key ID and secret access key) for the IAM user named Bob.", - "id": "1fbb3211-4cf2-41db-8c20-ba58d9f5802d", - "title": "To create an access key for an IAM user" - } - ], - "CreateAccountAlias": [ - { - "input": { - "AccountAlias": "examplecorp" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command associates the alias examplecorp to your AWS account.", - "id": "5adaf6fb-94fc-4ca2-b825-2fbc2062add1", - "title": "To create an account alias" - } - ], - "CreateGroup": [ - { - "input": { - "GroupName": "Admins" - }, - "output": { - "Group": { - "Arn": "arn:aws:iam::123456789012:group/Admins", - "CreateDate": "2015-03-09T20:30:24.940Z", - "GroupId": "AIDGPMS9RO4H3FEXAMPLE", - "GroupName": "Admins", - "Path": "/" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command creates an IAM group named Admins.", - "id": "d5da2a90-5e69-4ef7-8ae8-4c33dc21fd21", - "title": "To create an IAM group" - } - ], - "CreateInstanceProfile": [ - { - "input": { - "InstanceProfileName": "Webserver" - }, - "output": { - "InstanceProfile": { - "Arn": "arn:aws:iam::123456789012:instance-profile/Webserver", - "CreateDate": "2015-03-09T20:33:19.626Z", - "InstanceProfileId": "AIPAJMBYC7DLSPEXAMPLE", - "InstanceProfileName": "Webserver", - "Path": "/", - "Roles": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command creates an instance profile named Webserver that is ready to have a role attached and then be associated with an EC2 instance.", - "id": "5d84e6ae-5921-4e39-8454-10232cd9ff9a", - "title": "To create an instance profile" - } - ], - "CreateLoginProfile": [ - { - "input": { - "Password": "h]6EszR}vJ*m", - "PasswordResetRequired": true, - "UserName": "Bob" - }, - "output": { - "LoginProfile": { - "CreateDate": "2015-03-10T20:55:40.274Z", - "PasswordResetRequired": true, - "UserName": "Bob" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command changes IAM user Bob's password and sets the flag that required Bob to change the password the next time he signs in.", - "id": "c63795bc-3444-40b3-89df-83c474ef88be", - "title": "To create an instance profile" - } - ], - "CreateOpenIDConnectProvider": [ - { - "input": { - "ClientIDList": [ - "my-application-id" - ], - "ThumbprintList": [ - "3768084dfb3d2b68b7897bf5f565da8efEXAMPLE" - ], - "Url": "https://server.example.com" - }, - "output": { - "OpenIDConnectProviderArn": "arn:aws:iam::123456789012:oidc-provider/server.example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example defines a new OIDC provider in IAM with a client ID of my-application-id and pointing at the server with a URL of https://server.example.com.", - "id": "4e4a6bff-cc97-4406-922e-0ab4a82cdb63", - "title": "To create an instance profile" - } - ], - "CreateRole": [ - { - "input": { - "AssumeRolePolicyDocument": "", - "Path": "/", - "RoleName": "Test-Role" - }, - "output": { - "Role": { - "Arn": "arn:aws:iam::123456789012:role/Test-Role", - "AssumeRolePolicyDocument": "", - "CreateDate": "2013-06-07T20:43:32.821Z", - "Path": "/", - "RoleId": "AKIAIOSFODNN7EXAMPLE", - "RoleName": "Test-Role" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command creates a role named Test-Role and attaches a trust policy that you must convert from JSON to a string. Upon success, the response includes the same policy as a URL-encoded JSON string.", - "id": "eaaa4b5f-51f1-4f73-b0d3-30127040eff8", - "title": "To create an IAM role" - } - ], - "CreateUser": [ - { - "input": { - "UserName": "Bob" - }, - "output": { - "User": { - "Arn": "arn:aws:iam::123456789012:user/Bob", - "CreateDate": "2013-06-08T03:20:41.270Z", - "Path": "/", - "UserId": "AKIAIOSFODNN7EXAMPLE", - "UserName": "Bob" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following create-user command creates an IAM user named Bob in the current account.", - "id": "eb15f90b-e5f5-4af8-a594-e4e82b181a62", - "title": "To create an IAM user" - } - ], - "DeleteAccessKey": [ - { - "input": { - "AccessKeyId": "AKIDPMS9RO4H3FEXAMPLE", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command deletes one access key (access key ID and secret access key) assigned to the IAM user named Bob.", - "id": "61a785a7-d30a-415a-ae18-ab9236e56871", - "title": "To delete an access key for an IAM user" - } - ], - "DeleteAccountAlias": [ - { - "input": { - "AccountAlias": "mycompany" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command removes the alias mycompany from the current AWS account:", - "id": "7abeca65-04a8-4500-a890-47f1092bf766", - "title": "To delete an account alias" - } - ], - "DeleteAccountPasswordPolicy": [ - { - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command removes the password policy from the current AWS account:", - "id": "9ddf755e-495c-49bc-ae3b-ea6cc9b8ebcf", - "title": "To delete the current account password policy" - } - ], - "DeleteGroupPolicy": [ - { - "input": { - "GroupName": "Admins", - "PolicyName": "ExamplePolicy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command deletes the policy named ExamplePolicy from the group named Admins:", - "id": "e683f2bd-98a4-4fe0-bb66-33169c692d4a", - "title": "To delete a policy from an IAM group" - } - ], - "DeleteInstanceProfile": [ - { - "input": { - "InstanceProfileName": "ExampleInstanceProfile" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command deletes the instance profile named ExampleInstanceProfile", - "id": "12d74fb8-3433-49db-8171-a1fc764e354d", - "title": "To delete an instance profile" - } - ], - "DeleteLoginProfile": [ - { - "input": { - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command deletes the password for the IAM user named Bob.", - "id": "1fe57059-fc73-42e2-b992-517b7d573b5c", - "title": "To delete a password for an IAM user" - } - ], - "DeleteRole": [ - { - "input": { - "RoleName": "Test-Role" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command removes the role named Test-Role.", - "id": "053cdf74-9bda-44b8-bdbb-140fd5a32603", - "title": "To delete an IAM role" - } - ], - "DeleteRolePolicy": [ - { - "input": { - "PolicyName": "ExamplePolicy", - "RoleName": "Test-Role" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command removes the policy named ExamplePolicy from the role named Test-Role.", - "id": "9c667336-fde3-462c-b8f3-950800821e27", - "title": "To remove a policy from an IAM role" - } - ], - "DeleteSigningCertificate": [ - { - "input": { - "CertificateId": "TA7SMP42TDN5Z26OBPJE7EXAMPLE", - "UserName": "Anika" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command deletes the specified signing certificate for the IAM user named Anika.", - "id": "e3357586-ba9c-4070-b35b-d1a899b71987", - "title": "To delete a signing certificate for an IAM user" - } - ], - "DeleteUser": [ - { - "input": { - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command removes the IAM user named Bob from the current account.", - "id": "a13dc3f9-59fe-42d9-abbb-fb98b204fdf0", - "title": "To delete an IAM user" - } - ], - "DeleteUserPolicy": [ - { - "input": { - "PolicyName": "ExamplePolicy", - "UserName": "Juan" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following delete-user-policy command removes the specified policy from the IAM user named Juan:", - "id": "34f07ddc-9bc1-4f52-bc59-cd0a3ccd06c8", - "title": "To remove a policy from an IAM user" - } - ], - "DeleteVirtualMFADevice": [ - { - "input": { - "SerialNumber": "arn:aws:iam::123456789012:mfa/ExampleName" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following delete-virtual-mfa-device command removes the specified MFA device from the current AWS account.", - "id": "2933b08b-dbe7-4b89-b8c1-fdf75feea1ee", - "title": "To remove a virtual MFA device" - } - ], - "GenerateOrganizationsAccessReport": [ - { - "input": { - "EntityPath": "o-a1b2c3d4e5/r-f6g7h8i9j0example/ou-1a2b3c-k9l8m7n6o5example" - }, - "output": { - "JobId": "examplea-1234-b567-cde8-90fg123abcd4" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation generates a report for the organizational unit ou-rge0-awexample", - "id": "generateorganizationsaccessreport-ou", - "title": "To generate a service last accessed data report for an organizational unit" - } - ], - "GenerateServiceLastAccessedDetails": [ - { - "input": { - "Arn": "arn:aws:iam::123456789012:policy/ExamplePolicy1" - }, - "output": { - "JobId": "examplef-1305-c245-eba4-71fe298bcda7" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation generates a report for the policy: ExamplePolicy1", - "id": "generateaccessdata-policy-1541695178514", - "title": "To generate a service last accessed data report for a policy" - } - ], - "GetAccountPasswordPolicy": [ - { - "output": { - "PasswordPolicy": { - "AllowUsersToChangePassword": false, - "ExpirePasswords": false, - "HardExpiry": false, - "MaxPasswordAge": 90, - "MinimumPasswordLength": 8, - "PasswordReusePrevention": 12, - "RequireLowercaseCharacters": false, - "RequireNumbers": true, - "RequireSymbols": true, - "RequireUppercaseCharacters": false - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command displays details about the password policy for the current AWS account.", - "id": "5e4598c7-c425-431f-8af1-19073b3c4a5f", - "title": "To see the current account password policy" - } - ], - "GetAccountSummary": [ - { - "output": { - "SummaryMap": { - "AccessKeysPerUserQuota": 2, - "AccountAccessKeysPresent": 1, - "AccountMFAEnabled": 0, - "AccountSigningCertificatesPresent": 0, - "AttachedPoliciesPerGroupQuota": 10, - "AttachedPoliciesPerRoleQuota": 10, - "AttachedPoliciesPerUserQuota": 10, - "GlobalEndpointTokenVersion": 2, - "GroupPolicySizeQuota": 5120, - "Groups": 15, - "GroupsPerUserQuota": 10, - "GroupsQuota": 100, - "MFADevices": 6, - "MFADevicesInUse": 3, - "Policies": 8, - "PoliciesQuota": 1000, - "PolicySizeQuota": 5120, - "PolicyVersionsInUse": 22, - "PolicyVersionsInUseQuota": 10000, - "ServerCertificates": 1, - "ServerCertificatesQuota": 20, - "SigningCertificatesPerUserQuota": 2, - "UserPolicySizeQuota": 2048, - "Users": 27, - "UsersQuota": 5000, - "VersionsPerPolicyQuota": 5 - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command returns information about the IAM entity quotas and usage in the current AWS account.", - "id": "9d8447af-f344-45de-8219-2cebc3cce7f2", - "title": "To get information about IAM entity quotas and usage in the current account" - } - ], - "GetInstanceProfile": [ - { - "input": { - "InstanceProfileName": "ExampleInstanceProfile" - }, - "output": { - "InstanceProfile": { - "Arn": "arn:aws:iam::336924118301:instance-profile/ExampleInstanceProfile", - "CreateDate": "2013-06-12T23:52:02Z", - "InstanceProfileId": "AID2MAB8DPLSRHEXAMPLE", - "InstanceProfileName": "ExampleInstanceProfile", - "Path": "/", - "Roles": [ - { - "Arn": "arn:aws:iam::336924118301:role/Test-Role", - "AssumeRolePolicyDocument": "", - "CreateDate": "2013-01-09T06:33:26Z", - "Path": "/", - "RoleId": "AIDGPMS9RO4H3FEXAMPLE", - "RoleName": "Test-Role" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command gets information about the instance profile named ExampleInstanceProfile.", - "id": "463b9ba5-18cc-4608-9ccb-5a7c6b6e5fe7", - "title": "To get information about an instance profile" - } - ], - "GetLoginProfile": [ - { - "input": { - "UserName": "Anika" - }, - "output": { - "LoginProfile": { - "CreateDate": "2012-09-21T23:03:39Z", - "UserName": "Anika" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command gets information about the password for the IAM user named Anika.", - "id": "d6b580cc-909f-4925-9caa-d425cbc1ad47", - "title": "To get password information for an IAM user" - } - ], - "GetOrganizationsAccessReport": [ - { - "input": { - "JobId": "examplea-1234-b567-cde8-90fg123abcd4" - }, - "output": { - "AccessDetails": [ - { - "EntityPath": "o-a1b2c3d4e5/r-f6g7h8i9j0example/ou-1a2b3c-k9l8m7n6o5example/111122223333", - "LastAuthenticatedTime": "2019-05-25T16:29:52Z", - "Region": "us-east-1", - "ServiceName": "Amazon DynamoDB", - "ServiceNamespace": "dynamodb", - "TotalAuthenticatedEntities": 2 - }, - { - "EntityPath": "o-a1b2c3d4e5/r-f6g7h8i9j0example/ou-1a2b3c-k9l8m7n6o5example/123456789012", - "LastAuthenticatedTime": "2019-06-15T13:12:06Z", - "Region": "us-east-1", - "ServiceName": "AWS Identity and Access Management", - "ServiceNamespace": "iam", - "TotalAuthenticatedEntities": 4 - }, - { - "ServiceName": "Amazon Simple Storage Service", - "ServiceNamespace": "s3", - "TotalAuthenticatedEntities": 0 - } - ], - "IsTruncated": false, - "JobCompletionDate": "2019-06-18T19:47:35.241Z", - "JobCreationDate": "2019-06-18T19:47:31.466Z", - "JobStatus": "COMPLETED", - "NumberOfServicesAccessible": 3, - "NumberOfServicesNotAccessed": 1 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation gets details about the report with the job ID: examplea-1234-b567-cde8-90fg123abcd4", - "id": "getorganizationsaccessreport-ou", - "title": "To get details from a previously generated organizational unit report" - } - ], - "GetRole": [ - { - "input": { - "RoleName": "Test-Role" - }, - "output": { - "Role": { - "Arn": "arn:aws:iam::123456789012:role/Test-Role", - "AssumeRolePolicyDocument": "", - "CreateDate": "2013-04-18T05:01:58Z", - "MaxSessionDuration": 3600, - "Path": "/", - "RoleId": "AROADBQP57FF2AEXAMPLE", - "RoleLastUsed": { - "LastUsedDate": "2019-11-18T05:01:58Z", - "Region": "us-east-1" - }, - "RoleName": "Test-Role" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command gets information about the role named Test-Role.", - "id": "5b7d03a6-340c-472d-aa77-56425950d8b0", - "title": "To get information about an IAM role" - } - ], - "GetServiceLastAccessedDetails": [ - { - "input": { - "JobId": "examplef-1305-c245-eba4-71fe298bcda7" - }, - "output": { - "IsTruncated": false, - "JobCompletionDate": "2018-10-24T19:47:35.241Z", - "JobCreationDate": "2018-10-24T19:47:31.466Z", - "JobStatus": "COMPLETED", - "ServicesLastAccessed": [ - { - "LastAuthenticated": "2018-10-24T19:11:00Z", - "LastAuthenticatedEntity": "arn:aws:iam::123456789012:user/AWSExampleUser01", - "ServiceName": "AWS Identity and Access Management", - "ServiceNamespace": "iam", - "TotalAuthenticatedEntities": 2 - }, - { - "ServiceName": "Amazon Simple Storage Service", - "ServiceNamespace": "s3", - "TotalAuthenticatedEntities": 0 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation gets details about the report with the job ID: examplef-1305-c245-eba4-71fe298bcda7", - "id": "getserviceaccessdetails-policy-1541696298085", - "title": "To get details from a previously-generated report" - } - ], - "GetServiceLastAccessedDetailsWithEntities": [ - { - "input": { - "JobId": "examplef-1305-c245-eba4-71fe298bcda7", - "ServiceNamespace": "iam" - }, - "output": { - "EntityDetailsList": [ - { - "EntityInfo": { - "Arn": "arn:aws:iam::123456789012:user/AWSExampleUser01", - "Id": "AIDAEX2EXAMPLEB6IGCDC", - "Name": "AWSExampleUser01", - "Path": "/", - "Type": "USER" - }, - "LastAuthenticated": "2018-10-24T19:10:00Z" - }, - { - "EntityInfo": { - "Arn": "arn:aws:iam::123456789012:role/AWSExampleRole01", - "Id": "AROAEAEXAMPLEIANXSIU4", - "Name": "AWSExampleRole01", - "Path": "/", - "Type": "ROLE" - } - } - ], - "IsTruncated": false, - "JobCompletionDate": "2018-10-24T19:47:35.241Z", - "JobCreationDate": "2018-10-24T19:47:31.466Z", - "JobStatus": "COMPLETED" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation returns details about the entities that attempted to access the IAM service.", - "id": "getserviceaccessdetailsentity-policy-1541697621384", - "title": "To get sntity details from a previously-generated report" - } - ], - "GetUser": [ - { - "input": { - "UserName": "Bob" - }, - "output": { - "User": { - "Arn": "arn:aws:iam::123456789012:user/Bob", - "CreateDate": "2012-09-21T23:03:13Z", - "Path": "/", - "UserId": "AKIAIOSFODNN7EXAMPLE", - "UserName": "Bob" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command gets information about the IAM user named Bob.", - "id": "ede000a1-9e4c-40db-bd0a-d4f95e41a6ab", - "title": "To get information about an IAM user" - } - ], - "ListAccessKeys": [ - { - "input": { - "UserName": "Alice" - }, - "output": { - "AccessKeyMetadata": [ - { - "AccessKeyId": "AKIA111111111EXAMPLE", - "CreateDate": "2016-12-01T22:19:58Z", - "Status": "Active", - "UserName": "Alice" - }, - { - "AccessKeyId": "AKIA222222222EXAMPLE", - "CreateDate": "2016-12-01T22:20:01Z", - "Status": "Active", - "UserName": "Alice" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command lists the access keys IDs for the IAM user named Alice.", - "id": "15571463-ebea-411a-a021-1c76bd2a3625", - "title": "To list the access key IDs for an IAM user" - } - ], - "ListAccountAliases": [ - { - "input": { - }, - "output": { - "AccountAliases": [ - "exmaple-corporation" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command lists the aliases for the current account.", - "id": "e27b457a-16f9-4e05-a006-3df7b3472741", - "title": "To list account aliases" - } - ], - "ListGroupPolicies": [ - { - "input": { - "GroupName": "Admins" - }, - "output": { - "PolicyNames": [ - "AdminRoot", - "KeyPolicy" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command lists the names of in-line policies that are embedded in the IAM group named Admins.", - "id": "02de5095-2410-4d3a-ac1b-cc40234af68f", - "title": "To list the in-line policies for an IAM group" - } - ], - "ListGroups": [ - { - "input": { - }, - "output": { - "Groups": [ - { - "Arn": "arn:aws:iam::123456789012:group/Admins", - "CreateDate": "2016-12-15T21:40:08.121Z", - "GroupId": "AGPA1111111111EXAMPLE", - "GroupName": "Admins", - "Path": "/division_abc/subdivision_xyz/" - }, - { - "Arn": "arn:aws:iam::123456789012:group/division_abc/subdivision_xyz/product_1234/engineering/Test", - "CreateDate": "2016-11-30T14:10:01.156Z", - "GroupId": "AGP22222222222EXAMPLE", - "GroupName": "Test", - "Path": "/division_abc/subdivision_xyz/product_1234/engineering/" - }, - { - "Arn": "arn:aws:iam::123456789012:group/division_abc/subdivision_xyz/product_1234/Managers", - "CreateDate": "2016-06-12T20:14:52.032Z", - "GroupId": "AGPI3333333333EXAMPLE", - "GroupName": "Managers", - "Path": "/division_abc/subdivision_xyz/product_1234/" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command lists the IAM groups in the current account:", - "id": "b3ab1380-2a21-42fb-8e85-503f65512c66", - "title": "To list the IAM groups for the current account" - } - ], - "ListGroupsForUser": [ - { - "input": { - "UserName": "Bob" - }, - "output": { - "Groups": [ - { - "Arn": "arn:aws:iam::123456789012:group/division_abc/subdivision_xyz/product_1234/engineering/Test", - "CreateDate": "2016-11-30T14:10:01.156Z", - "GroupId": "AGP2111111111EXAMPLE", - "GroupName": "Test", - "Path": "/division_abc/subdivision_xyz/product_1234/engineering/" - }, - { - "Arn": "arn:aws:iam::123456789012:group/division_abc/subdivision_xyz/product_1234/Managers", - "CreateDate": "2016-06-12T20:14:52.032Z", - "GroupId": "AGPI222222222SEXAMPLE", - "GroupName": "Managers", - "Path": "/division_abc/subdivision_xyz/product_1234/" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command displays the groups that the IAM user named Bob belongs to.", - "id": "278ec2ee-fc28-4136-83fb-433af0ae46a2", - "title": "To list the groups that an IAM user belongs to" - } - ], - "ListPoliciesGrantingServiceAccess": [ - { - "input": { - "Arn": "arn:aws:iam::123456789012:user/ExampleUser01", - "ServiceNamespaces": [ - "iam", - "ec2" - ] - }, - "output": { - "IsTruncated": false, - "PoliciesGrantingServiceAccess": [ - { - "Policies": [ - { - "PolicyArn": "arn:aws:iam::123456789012:policy/ExampleIamPolicy", - "PolicyName": "ExampleIamPolicy", - "PolicyType": "MANAGED" - }, - { - "EntityName": "AWSExampleGroup1", - "EntityType": "GROUP", - "PolicyName": "ExampleGroup1Policy", - "PolicyType": "INLINE" - } - ], - "ServiceNamespace": "iam" - }, - { - "Policies": [ - { - "PolicyArn": "arn:aws:iam::123456789012:policy/ExampleEc2Policy", - "PolicyName": "ExampleEc2Policy", - "PolicyType": "MANAGED" - } - ], - "ServiceNamespace": "ec2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following operation lists policies that allow ExampleUser01 to access IAM or EC2.", - "id": "listpoliciesaccess-user-1541698749508", - "title": "To list policies that allow access to a service" - } - ], - "ListRoleTags": [ - { - "input": { - "RoleName": "taggedrole1" - }, - "output": { - "IsTruncated": false, - "Tags": [ - { - "Key": "Dept", - "Value": "12345" - }, - { - "Key": "Team", - "Value": "Accounting" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to list the tags attached to a role.", - "id": "to-list-the-tags-attached-to-an-iam-role-1506719238376", - "title": "To list the tags attached to an IAM role" - } - ], - "ListSigningCertificates": [ - { - "input": { - "UserName": "Bob" - }, - "output": { - "Certificates": [ - { - "CertificateBody": "-----BEGIN CERTIFICATE----------END CERTIFICATE-----", - "CertificateId": "TA7SMP42TDN5Z26OBPJE7EXAMPLE", - "Status": "Active", - "UploadDate": "2013-06-06T21:40:08Z", - "UserName": "Bob" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command lists the signing certificates for the IAM user named Bob.", - "id": "b4c10256-4fc9-457e-b3fd-4a110d4d73dc", - "title": "To list the signing certificates for an IAM user" - } - ], - "ListUserTags": [ - { - "input": { - "UserName": "anika" - }, - "output": { - "IsTruncated": false, - "Tags": [ - { - "Key": "Dept", - "Value": "12345" - }, - { - "Key": "Team", - "Value": "Accounting" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to list the tags attached to a user.", - "id": "to-list-the-tags-attached-to-an-iam-user-1506719473186", - "title": "To list the tags attached to an IAM user" - } - ], - "ListUsers": [ - { - "input": { - }, - "output": { - "Users": [ - { - "Arn": "arn:aws:iam::123456789012:user/division_abc/subdivision_xyz/engineering/Juan", - "CreateDate": "2012-09-05T19:38:48Z", - "PasswordLastUsed": "2016-09-08T21:47:36Z", - "Path": "/division_abc/subdivision_xyz/engineering/", - "UserId": "AID2MAB8DPLSRHEXAMPLE", - "UserName": "Juan" - }, - { - "Arn": "arn:aws:iam::123456789012:user/division_abc/subdivision_xyz/engineering/Anika", - "CreateDate": "2014-04-09T15:43:45Z", - "PasswordLastUsed": "2016-09-24T16:18:07Z", - "Path": "/division_abc/subdivision_xyz/engineering/", - "UserId": "AIDIODR4TAW7CSEXAMPLE", - "UserName": "Anika" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command lists the IAM users in the current account.", - "id": "9edfbd73-03d8-4d8a-9a79-76c85e8c8298", - "title": "To list IAM users" - } - ], - "ListVirtualMFADevices": [ - { - "input": { - }, - "output": { - "VirtualMFADevices": [ - { - "SerialNumber": "arn:aws:iam::123456789012:mfa/ExampleMFADevice" - }, - { - "SerialNumber": "arn:aws:iam::123456789012:mfa/Juan" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command lists the virtual MFA devices that have been configured for the current account.", - "id": "54f9ac18-5100-4070-bec4-fe5f612710d5", - "title": "To list virtual MFA devices" - } - ], - "PutGroupPolicy": [ - { - "input": { - "GroupName": "Admins", - "PolicyDocument": "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"*\",\"Resource\":\"*\"}}", - "PolicyName": "AllPerms" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command adds a policy named AllPerms to the IAM group named Admins.", - "id": "4bc17418-758f-4d0f-ab0c-4d00265fec2e", - "title": "To add a policy to a group" - } - ], - "PutRolePolicy": [ - { - "input": { - "PolicyDocument": "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"*\"}}", - "PolicyName": "S3AccessPolicy", - "RoleName": "S3Access" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command adds a permissions policy to the role named Test-Role.", - "id": "de62fd00-46c7-4601-9e0d-71d5fbb11ecb", - "title": "To attach a permissions policy to an IAM role" - } - ], - "PutUserPolicy": [ - { - "input": { - "PolicyDocument": "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"*\",\"Resource\":\"*\"}}", - "PolicyName": "AllAccessPolicy", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command attaches a policy to the IAM user named Bob.", - "id": "2551ffc6-3576-4d39-823f-30b60bffc2c7", - "title": "To attach a policy to an IAM user" - } - ], - "RemoveRoleFromInstanceProfile": [ - { - "input": { - "InstanceProfileName": "ExampleInstanceProfile", - "RoleName": "Test-Role" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command removes the role named Test-Role from the instance profile named ExampleInstanceProfile.", - "id": "6d9f46f1-9f4a-4873-b403-51a85c5c627c", - "title": "To remove a role from an instance profile" - } - ], - "RemoveUserFromGroup": [ - { - "input": { - "GroupName": "Admins", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command removes the user named Bob from the IAM group named Admins.", - "id": "fb54d5b4-0caf-41d8-af0e-10a84413f174", - "title": "To remove a user from an IAM group" - } - ], - "SetSecurityTokenServicePreferences": [ - { - "input": { - "GlobalEndpointTokenVersion": "v2Token" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command sets the STS global endpoint token to version 2. Version 2 tokens are valid in all Regions.", - "id": "61a785a7-d30a-415a-ae18-ab9236e56871", - "title": "To delete an access key for an IAM user" - } - ], - "TagRole": [ - { - "input": { - "RoleName": "taggedrole", - "Tags": [ - { - "Key": "Dept", - "Value": "Accounting" - }, - { - "Key": "CostCenter", - "Value": "12345" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to add tags to an existing role.", - "id": "to-add-a-tag-key-and-value-to-an-iam-role-1506718791513", - "title": "To add a tag key and value to an IAM role" - } - ], - "TagUser": [ - { - "input": { - "Tags": [ - { - "Key": "Dept", - "Value": "Accounting" - }, - { - "Key": "CostCenter", - "Value": "12345" - } - ], - "UserName": "anika" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to add tags to an existing user.", - "id": "to-add-a-tag-key-and-value-to-an-iam-user-1506719044227", - "title": "To add a tag key and value to an IAM user" - } - ], - "UntagRole": [ - { - "input": { - "RoleName": "taggedrole", - "TagKeys": [ - "Dept" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to remove a tag with the key 'Dept' from a role named 'taggedrole'.", - "id": "to-remove-a-tag-from-an-iam-role-1506719589943", - "title": "To remove a tag from an IAM role" - } - ], - "UntagUser": [ - { - "input": { - "TagKeys": [ - "Dept" - ], - "UserName": "anika" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to remove tags that are attached to a user named 'anika'.", - "id": "to-remove-a-tag-from-an-iam-user-1506719725554", - "title": "To remove a tag from an IAM user" - } - ], - "UpdateAccessKey": [ - { - "input": { - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", - "Status": "Inactive", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command deactivates the specified access key (access key ID and secret access key) for the IAM user named Bob.", - "id": "02b556fd-e673-49b7-ab6b-f2f9035967d0", - "title": "To activate or deactivate an access key for an IAM user" - } - ], - "UpdateAccountPasswordPolicy": [ - { - "input": { - "MinimumPasswordLength": 8, - "RequireNumbers": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command sets the password policy to require a minimum length of eight characters and to require one or more numbers in the password:", - "id": "c263a1af-37dc-4423-8dba-9790284ef5e0", - "title": "To set or change the current account password policy" - } - ], - "UpdateAssumeRolePolicy": [ - { - "input": { - "PolicyDocument": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Service\":[\"ec2.amazonaws.com\"]},\"Action\":[\"sts:AssumeRole\"]}]}", - "RoleName": "S3AccessForEC2Instances" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command updates the role trust policy for the role named Test-Role:", - "id": "c9150063-d953-4e99-9576-9685872006c6", - "title": "To update the trust policy for an IAM role" - } - ], - "UpdateGroup": [ - { - "input": { - "GroupName": "Test", - "NewGroupName": "Test-1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command changes the name of the IAM group Test to Test-1.", - "id": "f0cf1662-91ae-4278-a80e-7db54256ccba", - "title": "To rename an IAM group" - } - ], - "UpdateLoginProfile": [ - { - "input": { - "Password": "SomeKindOfPassword123!@#", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command creates or changes the password for the IAM user named Bob.", - "id": "036d9498-ecdb-4ed6-a8d8-366c383d1487", - "title": "To change the password for an IAM user" - } - ], - "UpdateSigningCertificate": [ - { - "input": { - "CertificateId": "TA7SMP42TDN5Z26OBPJE7EXAMPLE", - "Status": "Inactive", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command changes the status of a signing certificate for a user named Bob to Inactive.", - "id": "829aee7b-efc5-4b3b-84a5-7f899b38018d", - "title": "To change the active status of a signing certificate for an IAM user" - } - ], - "UpdateUser": [ - { - "input": { - "NewUserName": "Robert", - "UserName": "Bob" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command changes the name of the IAM user Bob to Robert. It does not change the user's path.", - "id": "275d53ed-347a-44e6-b7d0-a96276154352", - "title": "To change an IAM user's name" - } - ], - "UploadServerCertificate": [ - { - "input": { - "CertificateBody": "-----BEGIN CERTIFICATE----------END CERTIFICATE-----", - "Path": "/company/servercerts/", - "PrivateKey": "-----BEGIN DSA PRIVATE KEY----------END DSA PRIVATE KEY-----", - "ServerCertificateName": "ProdServerCert" - }, - "output": { - "ServerCertificateMetadata": { - "Arn": "arn:aws:iam::123456789012:server-certificate/company/servercerts/ProdServerCert", - "Expiration": "2012-05-08T01:02:03.004Z", - "Path": "/company/servercerts/", - "ServerCertificateId": "ASCA1111111111EXAMPLE", - "ServerCertificateName": "ProdServerCert", - "UploadDate": "2010-05-08T01:02:03.004Z" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following upload-server-certificate command uploads a server certificate to your AWS account:", - "id": "06eab6d1-ebf2-4bd9-839d-f7508b9a38b6", - "title": "To upload a server certificate to your AWS account" - } - ], - "UploadSigningCertificate": [ - { - "input": { - "CertificateBody": "-----BEGIN CERTIFICATE----------END CERTIFICATE-----", - "UserName": "Bob" - }, - "output": { - "Certificate": { - "CertificateBody": "-----BEGIN CERTIFICATE----------END CERTIFICATE-----", - "CertificateId": "ID123456789012345EXAMPLE", - "Status": "Active", - "UploadDate": "2015-06-06T21:40:08.121Z", - "UserName": "Bob" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following command uploads a signing certificate for the IAM user named Bob.", - "id": "e67489b6-7b73-4e30-9ed3-9a9e0231e458", - "title": "To upload a signing certificate for an IAM user" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/iam/2010-05-08/paginators-1.json deleted file mode 100644 index 91c09a2..0000000 --- a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/paginators-1.json +++ /dev/null @@ -1,254 +0,0 @@ -{ - "pagination": { - "GetAccountAuthorizationDetails": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": [ - "UserDetailList", - "GroupDetailList", - "RoleDetailList", - "Policies" - ] - }, - "GetGroup": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Users", - "non_aggregate_keys": [ - "Group" - ] - }, - "ListAccessKeys": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "AccessKeyMetadata" - }, - "ListAccountAliases": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "AccountAliases" - }, - "ListAttachedGroupPolicies": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "AttachedPolicies" - }, - "ListAttachedRolePolicies": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "AttachedPolicies" - }, - "ListAttachedUserPolicies": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "AttachedPolicies" - }, - "ListEntitiesForPolicy": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": [ - "PolicyGroups", - "PolicyUsers", - "PolicyRoles" - ] - }, - "ListGroupPolicies": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "PolicyNames" - }, - "ListGroups": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Groups" - }, - "ListGroupsForUser": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Groups" - }, - "ListInstanceProfiles": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "InstanceProfiles" - }, - "ListInstanceProfilesForRole": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "InstanceProfiles" - }, - "ListMFADevices": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "MFADevices" - }, - "ListPolicies": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Policies" - }, - "ListPolicyVersions": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Versions" - }, - "ListRolePolicies": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "PolicyNames" - }, - "ListRoles": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Roles" - }, - "ListServerCertificates": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "ServerCertificateMetadataList" - }, - "ListSigningCertificates": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Certificates" - }, - "ListSSHPublicKeys": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "SSHPublicKeys" - }, - "ListUserPolicies": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "PolicyNames" - }, - "ListUsers": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Users" - }, - "ListVirtualMFADevices": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "VirtualMFADevices" - }, - "SimulateCustomPolicy": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "EvaluationResults" - }, - "SimulatePrincipalPolicy": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "EvaluationResults" - }, - "ListUserTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - }, - "ListInstanceProfileTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - }, - "ListMFADeviceTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - }, - "ListOpenIDConnectProviderTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - }, - "ListPolicyTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - }, - "ListRoleTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - }, - "ListSAMLProviderTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - }, - "ListServerCertificateTags": { - "input_token": "Marker", - "limit_key": "MaxItems", - "more_results": "IsTruncated", - "output_token": "Marker", - "result_key": "Tags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iam/2010-05-08/service-2.json.gz deleted file mode 100644 index ef44bfc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/waiters-2.json b/venv/Lib/site-packages/botocore/data/iam/2010-05-08/waiters-2.json deleted file mode 100644 index 6248041..0000000 --- a/venv/Lib/site-packages/botocore/data/iam/2010-05-08/waiters-2.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "version": 2, - "waiters": { - "InstanceProfileExists": { - "delay": 1, - "operation": "GetInstanceProfile", - "maxAttempts": 40, - "acceptors": [ - { - "expected": 200, - "matcher": "status", - "state": "success" - }, - { - "state": "retry", - "matcher": "status", - "expected": 404 - } - ] - }, - "UserExists": { - "delay": 1, - "operation": "GetUser", - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "status", - "expected": 200 - }, - { - "state": "retry", - "matcher": "error", - "expected": "NoSuchEntity" - } - ] - }, - "RoleExists": { - "delay": 1, - "operation": "GetRole", - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "status", - "expected": 200 - }, - { - "state": "retry", - "matcher": "error", - "expected": "NoSuchEntity" - } - ] - }, - "PolicyExists": { - "delay": 1, - "operation": "GetPolicy", - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "status", - "expected": 200 - }, - { - "state": "retry", - "matcher": "error", - "expected": "NoSuchEntity" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index f44a510..0000000 Binary files a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/examples-1.json b/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/paginators-1.json deleted file mode 100644 index 766e7c4..0000000 --- a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListGroupMemberships": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GroupMemberships" - }, - "ListGroupMembershipsForMember": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GroupMemberships" - }, - "ListGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Groups" - }, - "ListUsers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Users" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/service-2.json.gz deleted file mode 100644 index 1f35a6b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/identitystore/2020-06-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index cda088d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/examples-1.json b/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/paginators-1.json deleted file mode 100644 index f259294..0000000 --- a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/paginators-1.json +++ /dev/null @@ -1,129 +0,0 @@ -{ - "pagination": { - "ListComponentBuildVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "componentSummaryList" - }, - "ListComponents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "componentVersionList" - }, - "ListContainerRecipes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "containerRecipeSummaryList" - }, - "ListDistributionConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "distributionConfigurationSummaryList" - }, - "ListImageBuildVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageSummaryList" - }, - "ListImagePackages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imagePackageList" - }, - "ListImagePipelineImages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageSummaryList" - }, - "ListImagePipelines": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imagePipelineList" - }, - "ListImageRecipes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageRecipeSummaryList" - }, - "ListImageScanFindingAggregations": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "responses" - }, - "ListImageScanFindings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - }, - "ListImages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageVersionList" - }, - "ListInfrastructureConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "infrastructureConfigurationSummaryList" - }, - "ListLifecycleExecutionResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "resources" - }, - "ListLifecycleExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "lifecycleExecutions" - }, - "ListLifecyclePolicies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "lifecyclePolicySummaryList" - }, - "ListWaitingWorkflowSteps": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "steps" - }, - "ListWorkflowBuildVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowSummaryList" - }, - "ListWorkflowExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowExecutions" - }, - "ListWorkflowStepExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "steps" - }, - "ListWorkflows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowVersionList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/paginators-1.sdk-extras.json deleted file mode 100644 index 4106060..0000000 --- a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/paginators-1.sdk-extras.json +++ /dev/null @@ -1,95 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListComponentBuildVersions": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListComponents": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListContainerRecipes": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListDistributionConfigurations": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListImageBuildVersions": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListImagePackages": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListImagePipelineImages": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListImagePipelines": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListImageRecipes": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListImageScanFindingAggregations": { - "non_aggregate_keys": [ - "requestId", - "aggregationType" - ] - }, - "ListImageScanFindings": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListImages": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListInfrastructureConfigurations": { - "non_aggregate_keys": [ - "requestId" - ] - }, - "ListLifecycleExecutionResources": { - "non_aggregate_keys": [ - "lifecycleExecutionId", - "lifecycleExecutionState" - ] - }, - "ListWorkflowExecutions": { - "non_aggregate_keys": [ - "requestId", - "imageBuildVersionArn", - "message" - ] - }, - "ListWorkflowStepExecutions": { - "non_aggregate_keys": [ - "requestId", - "workflowBuildVersionArn", - "workflowExecutionId", - "imageBuildVersionArn", - "message" - ] - } - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/service-2.json.gz deleted file mode 100644 index d16b9ff..0000000 Binary files a/venv/Lib/site-packages/botocore/data/imagebuilder/2019-12-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 87339bf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/paginators-1.json deleted file mode 100644 index 702385e..0000000 --- a/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/paginators-1.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "pagination": { - "ListJobs": { - "input_token": "Marker", - "output_token": "Jobs[-1].JobId", - "more_results": "IsTruncated", - "limit_key": "MaxJobs", - "result_key": "Jobs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/service-2.json.gz deleted file mode 100644 index 96a3af7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/importexport/2010-06-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3defba8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/service-2.json.gz deleted file mode 100644 index d89fb48..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector-scan/2023-08-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector/2015-08-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/inspector/2015-08-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5414227..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector/2015-08-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector/2015-08-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/inspector/2015-08-18/service-2.json.gz deleted file mode 100644 index 40d86cb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector/2015-08-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/endpoint-rule-set-1.json.gz deleted file mode 100644 index b690b3c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/examples-1.json b/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/examples-1.json deleted file mode 100644 index 05b541f..0000000 --- a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/examples-1.json +++ /dev/null @@ -1,1148 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddAttributesToFindings": [ - { - "input": { - "attributes": [ - { - "key": "Example", - "value": "example" - } - ], - "findingArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-8l1VIE0D/run/0-Z02cjjug/finding/0-T8yM9mEU" - ] - }, - "output": { - "failedItems": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Assigns attributes (key and value pairs) to the findings that are specified by the ARNs of the findings.", - "id": "add-attributes-to-findings-1481063856401", - "title": "Add attributes to findings" - } - ], - "CreateAssessmentTarget": [ - { - "input": { - "assessmentTargetName": "ExampleAssessmentTarget", - "resourceGroupArn": "arn:aws:inspector:us-west-2:123456789012:resourcegroup/0-AB6DMKnv" - }, - "output": { - "assessmentTargetArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a new assessment target using the ARN of the resource group that is generated by CreateResourceGroup. You can create up to 50 assessment targets per AWS account. You can run up to 500 concurrent agents per AWS account.", - "id": "create-assessment-target-1481063953657", - "title": "Create assessment target" - } - ], - "CreateAssessmentTemplate": [ - { - "input": { - "assessmentTargetArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX", - "assessmentTemplateName": "ExampleAssessmentTemplate", - "durationInSeconds": 180, - "rulesPackageArns": [ - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-11B9DBXp" - ], - "userAttributesForFindings": [ - { - "key": "Example", - "value": "example" - } - ] - }, - "output": { - "assessmentTemplateArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-it5r2S4T" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates an assessment template for the assessment target that is specified by the ARN of the assessment target.", - "id": "create-assessment-template-1481064046719", - "title": "Create assessment template" - } - ], - "CreateResourceGroup": [ - { - "input": { - "resourceGroupTags": [ - { - "key": "Name", - "value": "example" - } - ] - }, - "output": { - "resourceGroupArn": "arn:aws:inspector:us-west-2:123456789012:resourcegroup/0-AB6DMKnv" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a resource group using the specified set of tags (key and value pairs) that are used to select the EC2 instances to be included in an Amazon Inspector assessment target. The created resource group is then used to create an Amazon Inspector assessment target. ", - "id": "create-resource-group-1481064169037", - "title": "Create resource group" - } - ], - "DeleteAssessmentRun": [ - { - "input": { - "assessmentRunArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-it5r2S4T/run/0-11LMTAVe" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the assessment run that is specified by the ARN of the assessment run.", - "id": "delete-assessment-run-1481064251629", - "title": "Delete assessment run" - } - ], - "DeleteAssessmentTarget": [ - { - "input": { - "assessmentTargetArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the assessment target that is specified by the ARN of the assessment target.", - "id": "delete-assessment-target-1481064309029", - "title": "Delete assessment target" - } - ], - "DeleteAssessmentTemplate": [ - { - "input": { - "assessmentTemplateArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-it5r2S4T" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the assessment template that is specified by the ARN of the assessment template.", - "id": "delete-assessment-template-1481064364074", - "title": "Delete assessment template" - } - ], - "DescribeAssessmentRuns": [ - { - "input": { - "assessmentRunArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE" - ] - }, - "output": { - "assessmentRuns": [ - { - "name": "Run 1 for ExampleAssessmentTemplate", - "arn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE", - "assessmentTemplateArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw", - "completedAt": "1458680301.4", - "createdAt": "1458680170.035", - "dataCollected": true, - "durationInSeconds": 3600, - "findingCounts": { - "High": 14, - "Informational": 0, - "Low": 0, - "Medium": 2, - "Undefined": 0 - }, - "notifications": [ - - ], - "rulesPackageArns": [ - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-X1KXtawP" - ], - "startedAt": "1458680170.161", - "state": "COMPLETED", - "stateChangedAt": "1458680301.4", - "stateChanges": [ - { - "state": "CREATED", - "stateChangedAt": "1458680170.035" - }, - { - "state": "START_DATA_COLLECTION_PENDING", - "stateChangedAt": "1458680170.065" - }, - { - "state": "START_DATA_COLLECTION_IN_PROGRESS", - "stateChangedAt": "1458680170.096" - }, - { - "state": "COLLECTING_DATA", - "stateChangedAt": "1458680170.161" - }, - { - "state": "STOP_DATA_COLLECTION_PENDING", - "stateChangedAt": "1458680239.883" - }, - { - "state": "DATA_COLLECTED", - "stateChangedAt": "1458680299.847" - }, - { - "state": "EVALUATING_RULES", - "stateChangedAt": "1458680300.099" - }, - { - "state": "COMPLETED", - "stateChangedAt": "1458680301.4" - } - ], - "userAttributesForFindings": [ - - ] - } - ], - "failedItems": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the assessment runs that are specified by the ARNs of the assessment runs.", - "id": "describte-assessment-runs-1481064424352", - "title": "Describte assessment runs" - } - ], - "DescribeAssessmentTargets": [ - { - "input": { - "assessmentTargetArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq" - ] - }, - "output": { - "assessmentTargets": [ - { - "name": "ExampleAssessmentTarget", - "arn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq", - "createdAt": "1458074191.459", - "resourceGroupArn": "arn:aws:inspector:us-west-2:123456789012:resourcegroup/0-PyGXopAI", - "updatedAt": "1458074191.459" - } - ], - "failedItems": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the assessment targets that are specified by the ARNs of the assessment targets.", - "id": "describte-assessment-targets-1481064527735", - "title": "Describte assessment targets" - } - ], - "DescribeAssessmentTemplates": [ - { - "input": { - "assessmentTemplateArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw" - ] - }, - "output": { - "assessmentTemplates": [ - { - "name": "ExampleAssessmentTemplate", - "arn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw", - "assessmentRunCount": 0, - "assessmentTargetArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq", - "createdAt": "1458074191.844", - "durationInSeconds": 3600, - "rulesPackageArns": [ - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-X1KXtawP" - ], - "userAttributesForFindings": [ - - ] - } - ], - "failedItems": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the assessment templates that are specified by the ARNs of the assessment templates.", - "id": "describte-assessment-templates-1481064606829", - "title": "Describte assessment templates" - } - ], - "DescribeCrossAccountAccessRole": [ - { - "output": { - "registeredAt": "1458069182.826", - "roleArn": "arn:aws:iam::123456789012:role/inspector", - "valid": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the IAM role that enables Amazon Inspector to access your AWS account.", - "id": "describte-cross-account-access-role-1481064682267", - "title": "Describte cross account access role" - } - ], - "DescribeFindings": [ - { - "input": { - "findingArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE/finding/0-HwPnsDm4" - ] - }, - "output": { - "failedItems": { - }, - "findings": [ - { - "arn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE/finding/0-HwPnsDm4", - "assetAttributes": { - "ipv4Addresses": [ - - ], - "schemaVersion": 1 - }, - "assetType": "ec2-instance", - "attributes": [ - - ], - "confidence": 10, - "createdAt": "1458680301.37", - "description": "Amazon Inspector did not find any potential security issues during this assessment.", - "indicatorOfCompromise": false, - "numericSeverity": 0, - "recommendation": "No remediation needed.", - "schemaVersion": 1, - "service": "Inspector", - "serviceAttributes": { - "assessmentRunArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE", - "rulesPackageArn": "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-X1KXtawP", - "schemaVersion": 1 - }, - "severity": "Informational", - "title": "No potential security issues found", - "updatedAt": "1458680301.37", - "userAttributes": [ - - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the findings that are specified by the ARNs of the findings.", - "id": "describte-findings-1481064771803", - "title": "Describe findings" - } - ], - "DescribeResourceGroups": [ - { - "input": { - "resourceGroupArns": [ - "arn:aws:inspector:us-west-2:123456789012:resourcegroup/0-PyGXopAI" - ] - }, - "output": { - "failedItems": { - }, - "resourceGroups": [ - { - "arn": "arn:aws:inspector:us-west-2:123456789012:resourcegroup/0-PyGXopAI", - "createdAt": "1458074191.098", - "tags": [ - { - "key": "Name", - "value": "example" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the resource groups that are specified by the ARNs of the resource groups.", - "id": "describe-resource-groups-1481065787743", - "title": "Describe resource groups" - } - ], - "DescribeRulesPackages": [ - { - "input": { - "rulesPackageArns": [ - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-JJOtZiqQ" - ] - }, - "output": { - "failedItems": { - }, - "rulesPackages": [ - { - "version": "1.1", - "name": "Security Best Practices", - "arn": "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-JJOtZiqQ", - "description": "The rules in this package help determine whether your systems are configured securely.", - "provider": "Amazon Web Services, Inc." - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the rules packages that are specified by the ARNs of the rules packages.", - "id": "describe-rules-packages-1481069641979", - "title": "Describe rules packages" - } - ], - "GetTelemetryMetadata": [ - { - "input": { - "assessmentRunArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE" - }, - "output": { - "telemetryMetadata": [ - { - "count": 2, - "dataSize": 345, - "messageType": "InspectorDuplicateProcess" - }, - { - "count": 3, - "dataSize": 255, - "messageType": "InspectorTimeEventMsg" - }, - { - "count": 4, - "dataSize": 1082, - "messageType": "InspectorNetworkInterface" - }, - { - "count": 2, - "dataSize": 349, - "messageType": "InspectorDnsEntry" - }, - { - "count": 11, - "dataSize": 2514, - "messageType": "InspectorDirectoryInfoMsg" - }, - { - "count": 1, - "dataSize": 179, - "messageType": "InspectorTcpV6ListeningPort" - }, - { - "count": 101, - "dataSize": 10949, - "messageType": "InspectorTerminal" - }, - { - "count": 26, - "dataSize": 5916, - "messageType": "InspectorUser" - }, - { - "count": 282, - "dataSize": 32148, - "messageType": "InspectorDynamicallyLoadedCodeModule" - }, - { - "count": 18, - "dataSize": 10172, - "messageType": "InspectorCreateProcess" - }, - { - "count": 3, - "dataSize": 8001, - "messageType": "InspectorProcessPerformance" - }, - { - "count": 1, - "dataSize": 360, - "messageType": "InspectorOperatingSystem" - }, - { - "count": 6, - "dataSize": 546, - "messageType": "InspectorStopProcess" - }, - { - "count": 1, - "dataSize": 1553, - "messageType": "InspectorInstanceMetaData" - }, - { - "count": 2, - "dataSize": 434, - "messageType": "InspectorTcpV4Connection" - }, - { - "count": 474, - "dataSize": 2960322, - "messageType": "InspectorPackageInfo" - }, - { - "count": 3, - "dataSize": 2235, - "messageType": "InspectorSystemPerformance" - }, - { - "count": 105, - "dataSize": 46048, - "messageType": "InspectorCodeModule" - }, - { - "count": 1, - "dataSize": 182, - "messageType": "InspectorUdpV6ListeningPort" - }, - { - "count": 2, - "dataSize": 371, - "messageType": "InspectorUdpV4ListeningPort" - }, - { - "count": 18, - "dataSize": 8362, - "messageType": "InspectorKernelModule" - }, - { - "count": 29, - "dataSize": 48788, - "messageType": "InspectorConfigurationInfo" - }, - { - "count": 1, - "dataSize": 79, - "messageType": "InspectorMonitoringStart" - }, - { - "count": 5, - "dataSize": 0, - "messageType": "InspectorSplitMsgBegin" - }, - { - "count": 51, - "dataSize": 4593, - "messageType": "InspectorGroup" - }, - { - "count": 1, - "dataSize": 184, - "messageType": "InspectorTcpV4ListeningPort" - }, - { - "count": 1159, - "dataSize": 3146579, - "messageType": "Total" - }, - { - "count": 5, - "dataSize": 0, - "messageType": "InspectorSplitMsgEnd" - }, - { - "count": 1, - "dataSize": 612, - "messageType": "InspectorLoadImageInProcess" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Information about the data that is collected for the specified assessment run.", - "id": "get-telemetry-metadata-1481066021297", - "title": "Get telemetry metadata" - } - ], - "ListAssessmentRunAgents": [ - { - "input": { - "assessmentRunArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE", - "maxResults": 123 - }, - "output": { - "assessmentRunAgents": [ - { - "agentHealth": "HEALTHY", - "agentHealthCode": "RUNNING", - "agentId": "i-49113b93", - "assessmentRunArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE", - "telemetryMetadata": [ - { - "count": 2, - "dataSize": 345, - "messageType": "InspectorDuplicateProcess" - }, - { - "count": 3, - "dataSize": 255, - "messageType": "InspectorTimeEventMsg" - }, - { - "count": 4, - "dataSize": 1082, - "messageType": "InspectorNetworkInterface" - }, - { - "count": 2, - "dataSize": 349, - "messageType": "InspectorDnsEntry" - }, - { - "count": 11, - "dataSize": 2514, - "messageType": "InspectorDirectoryInfoMsg" - }, - { - "count": 1, - "dataSize": 179, - "messageType": "InspectorTcpV6ListeningPort" - }, - { - "count": 101, - "dataSize": 10949, - "messageType": "InspectorTerminal" - }, - { - "count": 26, - "dataSize": 5916, - "messageType": "InspectorUser" - }, - { - "count": 282, - "dataSize": 32148, - "messageType": "InspectorDynamicallyLoadedCodeModule" - }, - { - "count": 18, - "dataSize": 10172, - "messageType": "InspectorCreateProcess" - }, - { - "count": 3, - "dataSize": 8001, - "messageType": "InspectorProcessPerformance" - }, - { - "count": 1, - "dataSize": 360, - "messageType": "InspectorOperatingSystem" - }, - { - "count": 6, - "dataSize": 546, - "messageType": "InspectorStopProcess" - }, - { - "count": 1, - "dataSize": 1553, - "messageType": "InspectorInstanceMetaData" - }, - { - "count": 2, - "dataSize": 434, - "messageType": "InspectorTcpV4Connection" - }, - { - "count": 474, - "dataSize": 2960322, - "messageType": "InspectorPackageInfo" - }, - { - "count": 3, - "dataSize": 2235, - "messageType": "InspectorSystemPerformance" - }, - { - "count": 105, - "dataSize": 46048, - "messageType": "InspectorCodeModule" - }, - { - "count": 1, - "dataSize": 182, - "messageType": "InspectorUdpV6ListeningPort" - }, - { - "count": 2, - "dataSize": 371, - "messageType": "InspectorUdpV4ListeningPort" - }, - { - "count": 18, - "dataSize": 8362, - "messageType": "InspectorKernelModule" - }, - { - "count": 29, - "dataSize": 48788, - "messageType": "InspectorConfigurationInfo" - }, - { - "count": 1, - "dataSize": 79, - "messageType": "InspectorMonitoringStart" - }, - { - "count": 5, - "dataSize": 0, - "messageType": "InspectorSplitMsgBegin" - }, - { - "count": 51, - "dataSize": 4593, - "messageType": "InspectorGroup" - }, - { - "count": 1, - "dataSize": 184, - "messageType": "InspectorTcpV4ListeningPort" - }, - { - "count": 1159, - "dataSize": 3146579, - "messageType": "Total" - }, - { - "count": 5, - "dataSize": 0, - "messageType": "InspectorSplitMsgEnd" - }, - { - "count": 1, - "dataSize": 612, - "messageType": "InspectorLoadImageInProcess" - } - ] - } - ], - "nextToken": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the agents of the assessment runs that are specified by the ARNs of the assessment runs.", - "id": "list-assessment-run-agents-1481918140642", - "title": "List assessment run agents" - } - ], - "ListAssessmentRuns": [ - { - "input": { - "assessmentTemplateArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw" - ], - "maxResults": 123 - }, - "output": { - "assessmentRunArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE", - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-v5D6fI3v" - ], - "nextToken": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the assessment runs that correspond to the assessment templates that are specified by the ARNs of the assessment templates.", - "id": "list-assessment-runs-1481066340844", - "title": "List assessment runs" - } - ], - "ListAssessmentTargets": [ - { - "input": { - "maxResults": 123 - }, - "output": { - "assessmentTargetArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq" - ], - "nextToken": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the ARNs of the assessment targets within this AWS account. ", - "id": "list-assessment-targets-1481066540849", - "title": "List assessment targets" - } - ], - "ListAssessmentTemplates": [ - { - "input": { - "assessmentTargetArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq" - ], - "maxResults": 123 - }, - "output": { - "assessmentTemplateArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw", - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-Uza6ihLh" - ], - "nextToken": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the assessment templates that correspond to the assessment targets that are specified by the ARNs of the assessment targets.", - "id": "list-assessment-templates-1481066623520", - "title": "List assessment templates" - } - ], - "ListEventSubscriptions": [ - { - "input": { - "maxResults": 123, - "resourceArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-7sbz2Kz0" - }, - "output": { - "nextToken": "1", - "subscriptions": [ - { - "eventSubscriptions": [ - { - "event": "ASSESSMENT_RUN_COMPLETED", - "subscribedAt": "1459455440.867" - } - ], - "resourceArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-7sbz2Kz0", - "topicArn": "arn:aws:sns:us-west-2:123456789012:exampletopic" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all the event subscriptions for the assessment template that is specified by the ARN of the assessment template. ", - "id": "list-event-subscriptions-1481068376945", - "title": "List event subscriptions" - } - ], - "ListFindings": [ - { - "input": { - "assessmentRunArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE" - ], - "maxResults": 123 - }, - "output": { - "findingArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-MKkpXXPE/finding/0-HwPnsDm4", - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-4r1V2mAw/run/0-v5D6fI3v/finding/0-tyvmqBLy" - ], - "nextToken": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists findings that are generated by the assessment runs that are specified by the ARNs of the assessment runs.", - "id": "list-findings-1481066840611", - "title": "List findings" - } - ], - "ListRulesPackages": [ - { - "input": { - "maxResults": 123 - }, - "output": { - "nextToken": "1", - "rulesPackageArns": [ - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-9hgA516p", - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-H5hpSawc", - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-JJOtZiqQ", - "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-vg5GGHSD" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all available Amazon Inspector rules packages.", - "id": "list-rules-packages-1481066954883", - "title": "List rules packages" - } - ], - "ListTagsForResource": [ - { - "input": { - "resourceArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-gcwFliYu" - }, - "output": { - "tags": [ - { - "key": "Name", - "value": "Example" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists all tags associated with an assessment template.", - "id": "list-tags-for-resource-1481067025240", - "title": "List tags for resource" - } - ], - "PreviewAgents": [ - { - "input": { - "maxResults": 123, - "previewAgentsArn": "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq" - }, - "output": { - "agentPreviews": [ - { - "agentId": "i-49113b93" - } - ], - "nextToken": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Previews the agents installed on the EC2 instances that are part of the specified assessment target.", - "id": "preview-agents-1481067101888", - "title": "Preview agents" - } - ], - "RegisterCrossAccountAccessRole": [ - { - "input": { - "roleArn": "arn:aws:iam::123456789012:role/inspector" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Registers the IAM role that Amazon Inspector uses to list your EC2 instances at the start of the assessment run or when you call the PreviewAgents action.", - "id": "register-cross-account-access-role-1481067178301", - "title": "Register cross account access role" - } - ], - "RemoveAttributesFromFindings": [ - { - "input": { - "attributeKeys": [ - "key=Example,value=example" - ], - "findingArns": [ - "arn:aws:inspector:us-west-2:123456789012:target/0-0kFIPusq/template/0-8l1VIE0D/run/0-Z02cjjug/finding/0-T8yM9mEU" - ] - }, - "output": { - "failedItems": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Removes entire attributes (key and value pairs) from the findings that are specified by the ARNs of the findings where an attribute with the specified key exists.", - "id": "remove-attributes-from-findings-1481067246548", - "title": "Remove attributes from findings" - } - ], - "SetTagsForResource": [ - { - "input": { - "resourceArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-7sbz2Kz0", - "tags": [ - { - "key": "Example", - "value": "example" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Sets tags (key and value pairs) to the assessment template that is specified by the ARN of the assessment template.", - "id": "set-tags-for-resource-1481067329646", - "title": "Set tags for resource" - } - ], - "StartAssessmentRun": [ - { - "input": { - "assessmentRunName": "examplerun", - "assessmentTemplateArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-it5r2S4T" - }, - "output": { - "assessmentRunArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-it5r2S4T/run/0-jOoroxyY" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Starts the assessment run specified by the ARN of the assessment template. For this API to function properly, you must not exceed the limit of running up to 500 concurrent agents per AWS account.", - "id": "start-assessment-run-1481067407484", - "title": "Start assessment run" - } - ], - "StopAssessmentRun": [ - { - "input": { - "assessmentRunArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-it5r2S4T/run/0-11LMTAVe" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Stops the assessment run that is specified by the ARN of the assessment run.", - "id": "stop-assessment-run-1481067502857", - "title": "Stop assessment run" - } - ], - "SubscribeToEvent": [ - { - "input": { - "event": "ASSESSMENT_RUN_COMPLETED", - "resourceArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-7sbz2Kz0", - "topicArn": "arn:aws:sns:us-west-2:123456789012:exampletopic" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Enables the process of sending Amazon Simple Notification Service (SNS) notifications about a specified event to a specified SNS topic.", - "id": "subscribe-to-event-1481067686031", - "title": "Subscribe to event" - } - ], - "UnsubscribeFromEvent": [ - { - "input": { - "event": "ASSESSMENT_RUN_COMPLETED", - "resourceArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX/template/0-7sbz2Kz0", - "topicArn": "arn:aws:sns:us-west-2:123456789012:exampletopic" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Disables the process of sending Amazon Simple Notification Service (SNS) notifications about a specified event to a specified SNS topic.", - "id": "unsubscribe-from-event-1481067781705", - "title": "Unsubscribe from event" - } - ], - "UpdateAssessmentTarget": [ - { - "input": { - "assessmentTargetArn": "arn:aws:inspector:us-west-2:123456789012:target/0-nvgVhaxX", - "assessmentTargetName": "Example", - "resourceGroupArn": "arn:aws:inspector:us-west-2:123456789012:resourcegroup/0-yNbgL5Pt" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates the assessment target that is specified by the ARN of the assessment target.", - "id": "update-assessment-target-1481067866692", - "title": "Update assessment target" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/paginators-1.json b/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/paginators-1.json deleted file mode 100644 index 8dec041..0000000 --- a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "ListFindings": { - "result_key": "findingArns", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListAssessmentTemplates": { - "result_key": "assessmentTemplateArns", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "PreviewAgents": { - "result_key": "agentPreviews", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListEventSubscriptions": { - "result_key": "subscriptions", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListRulesPackages": { - "result_key": "rulesPackageArns", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListAssessmentRunAgents": { - "result_key": "assessmentRunAgents", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListAssessmentRuns": { - "result_key": "assessmentRunArns", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListAssessmentTargets": { - "result_key": "assessmentTargetArns", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "ListExclusions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "exclusionArns" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/service-2.json.gz b/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/service-2.json.gz deleted file mode 100644 index 85c778e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector/2016-02-16/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9b8e7d2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/examples-1.json b/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/paginators-1.json deleted file mode 100644 index 0df0287..0000000 --- a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/paginators-1.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "pagination": { - "ListAccountPermissions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "permissions" - }, - "ListCoverage": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "coveredResources" - }, - "ListCoverageStatistics": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "countsByGroup" - }, - "ListDelegatedAdminAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "delegatedAdminAccounts" - }, - "ListFilters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "filters" - }, - "ListFindingAggregations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "responses" - }, - "ListFindings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - }, - "ListMembers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "members" - }, - "ListUsageTotals": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "totals" - }, - "SearchVulnerabilities": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "vulnerabilities" - }, - "GetCisScanResultDetails": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "scanResultDetails" - }, - "ListCisScanConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "scanConfigurations" - }, - "ListCisScanResultsAggregatedByChecks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "checkAggregations" - }, - "ListCisScanResultsAggregatedByTargetResource": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "targetResourceAggregations" - }, - "ListCisScans": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "scans" - }, - "GetClustersForImage": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "cluster" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/paginators-1.sdk-extras.json deleted file mode 100644 index b01a0bf..0000000 --- a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/paginators-1.sdk-extras.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListFindingAggregations": { - "non_aggregate_keys": [ - "aggregationType" - ] - }, - "ListCoverageStatistics": { - "non_aggregate_keys": [ - "totalCounts" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/service-2.json.gz deleted file mode 100644 index 0d90c4f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/inspector2/2020-06-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3054702..0000000 Binary files a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/paginators-1.json deleted file mode 100644 index c08ea4f..0000000 --- a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListHealthEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "HealthEvents" - }, - "ListMonitors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Monitors" - }, - "ListInternetEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InternetEvents" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/service-2.json.gz deleted file mode 100644 index 382c323..0000000 Binary files a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/waiters-2.json b/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/internetmonitor/2021-06-03/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index a6f9d81..0000000 Binary files a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/paginators-1.json deleted file mode 100644 index 642fc32..0000000 --- a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListInvoiceUnits": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InvoiceUnits" - }, - "ListInvoiceSummaries": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InvoiceSummaries" - }, - "ListProcurementPortalPreferences": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProcurementPortalPreferences" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/service-2.json.gz deleted file mode 100644 index ef01d04..0000000 Binary files a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/invoicing/2024-12-01/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 212430e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/examples-1.json b/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/paginators-1.json deleted file mode 100644 index 26d4a56..0000000 --- a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListRetainedMessages": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "retainedTopics" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/service-2.json.gz deleted file mode 100644 index 1a9c488..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot-data/2015-05-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index fd1aa74..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/examples-1.json b/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/service-2.json.gz deleted file mode 100644 index a333348..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot-jobs-data/2017-09-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1ec03c9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/paginators-1.json deleted file mode 100644 index 0815ead..0000000 --- a/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/paginators-1.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "pagination": { - "ListCredentialLockers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListDestinations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DestinationList" - }, - "ListEventLogConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EventLogConfigurationList" - }, - "ListManagedThingSchemas": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListManagedThings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListNotificationConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "NotificationConfigurationList" - }, - "ListOtaTaskConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListOtaTaskExecutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ExecutionSummaries" - }, - "ListOtaTasks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tasks" - }, - "ListProvisioningProfiles": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListSchemaVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListAccountAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListCloudConnectors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListConnectorDestinations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConnectorDestinationList" - }, - "ListDeviceDiscoveries": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListDiscoveredDevices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListManagedThingAccountAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/service-2.json.gz deleted file mode 100644 index fc7213f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot-managed-integrations/2025-03-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iot/2015-05-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 58bc5ff..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/examples-1.json b/venv/Lib/site-packages/botocore/data/iot/2015-05-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/iot/2015-05-28/paginators-1.json deleted file mode 100644 index 4b227cd..0000000 --- a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/paginators-1.json +++ /dev/null @@ -1,390 +0,0 @@ -{ - "pagination": { - "ListCACertificates": { - "input_token": "marker", - "output_token": "nextMarker", - "limit_key": "pageSize", - "result_key": "certificates" - }, - "ListCertificates": { - "input_token": "marker", - "output_token": "nextMarker", - "limit_key": "pageSize", - "result_key": "certificates" - }, - "ListCertificatesByCA": { - "input_token": "marker", - "output_token": "nextMarker", - "limit_key": "pageSize", - "result_key": "certificates" - }, - "ListOutgoingCertificates": { - "input_token": "marker", - "output_token": "nextMarker", - "limit_key": "pageSize", - "result_key": "outgoingCertificates" - }, - "ListPolicies": { - "input_token": "marker", - "output_token": "nextMarker", - "limit_key": "pageSize", - "result_key": "policies" - }, - "ListPolicyPrincipals": { - "input_token": "marker", - "output_token": "nextMarker", - "limit_key": "pageSize", - "result_key": "principals" - }, - "ListPrincipalPolicies": { - "input_token": "marker", - "output_token": "nextMarker", - "limit_key": "pageSize", - "result_key": "policies" - }, - "ListPrincipalThings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "things" - }, - "ListThingTypes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "thingTypes" - }, - "ListThings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "things" - }, - "ListTopicRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "rules" - }, - "ListActiveViolations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "activeViolations" - }, - "ListAttachedPolicies": { - "input_token": "marker", - "limit_key": "pageSize", - "output_token": "nextMarker", - "result_key": "policies" - }, - "ListAuditFindings": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "findings" - }, - "ListAuditTasks": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "tasks" - }, - "ListAuthorizers": { - "input_token": "marker", - "limit_key": "pageSize", - "output_token": "nextMarker", - "result_key": "authorizers" - }, - "ListBillingGroups": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "billingGroups" - }, - "ListIndices": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "indexNames" - }, - "ListJobExecutionsForJob": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "executionSummaries" - }, - "ListJobExecutionsForThing": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "executionSummaries" - }, - "ListJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "jobs" - }, - "ListOTAUpdates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "otaUpdates" - }, - "ListRoleAliases": { - "input_token": "marker", - "limit_key": "pageSize", - "output_token": "nextMarker", - "result_key": "roleAliases" - }, - "ListScheduledAudits": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "scheduledAudits" - }, - "ListSecurityProfiles": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "securityProfileIdentifiers" - }, - "ListSecurityProfilesForTarget": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "securityProfileTargetMappings" - }, - "ListStreams": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "streams" - }, - "ListTagsForResource": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "tags" - }, - "ListTargetsForPolicy": { - "input_token": "marker", - "limit_key": "pageSize", - "output_token": "nextMarker", - "result_key": "targets" - }, - "ListTargetsForSecurityProfile": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "securityProfileTargets" - }, - "ListThingGroups": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "thingGroups" - }, - "ListThingGroupsForThing": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "thingGroups" - }, - "ListThingRegistrationTasks": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "taskIds" - }, - "ListThingsInBillingGroup": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "things" - }, - "ListThingsInThingGroup": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "things" - }, - "ListV2LoggingLevels": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "logTargetConfigurations" - }, - "ListViolationEvents": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "violationEvents" - }, - "ListAuditMitigationActionsExecutions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "actionsExecutions" - }, - "ListAuditMitigationActionsTasks": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "tasks" - }, - "ListAuditSuppressions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "suppressions" - }, - "ListDimensions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "dimensionNames" - }, - "ListDomainConfigurations": { - "input_token": "marker", - "limit_key": "pageSize", - "output_token": "nextMarker", - "result_key": "domainConfigurations" - }, - "ListMitigationActions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "actionIdentifiers" - }, - "ListProvisioningTemplateVersions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "versions" - }, - "ListProvisioningTemplates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "templates" - }, - "ListThingRegistrationTaskReports": { - "input_token": "nextToken", - "limit_key": "maxResults", - "non_aggregate_keys": [ - "reportType" - ], - "output_token": "nextToken", - "result_key": "resourceLinks" - }, - "ListTopicRuleDestinations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "destinationSummaries" - }, - "ListThingPrincipals": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "principals" - }, - "GetBehaviorModelTrainingSummaries": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "summaries" - }, - "ListCustomMetrics": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "metricNames" - }, - "ListDetectMitigationActionsExecutions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "actionsExecutions" - }, - "ListDetectMitigationActionsTasks": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "tasks" - }, - "ListFleetMetrics": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "fleetMetrics" - }, - "ListJobTemplates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "jobTemplates" - }, - "ListMetricValues": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "metricDatumList" - }, - "ListManagedJobTemplates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "managedJobTemplates" - }, - "ListRelatedResourcesForAuditFinding": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "relatedResources" - }, - "ListPackageVersions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "packageVersionSummaries" - }, - "ListPackages": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "packageSummaries" - }, - "ListSbomValidationResults": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "validationResultSummaries" - }, - "ListPrincipalThingsV2": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "principalThingObjects" - }, - "ListThingPrincipalsV2": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "thingPrincipalObjects" - }, - "ListCommandExecutions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "commandExecutions" - }, - "ListCommands": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "commands" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iot/2015-05-28/service-2.json.gz deleted file mode 100644 index 4560784..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iot/2015-05-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0feb415..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/examples-1.json b/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/paginators-1.json deleted file mode 100644 index d1bfaaa..0000000 --- a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListChannels": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "channelSummaries" - }, - "ListDatasetContents": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "datasetContentSummaries" - }, - "ListDatasets": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "datasetSummaries" - }, - "ListDatastores": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "datastoreSummaries" - }, - "ListPipelines": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "pipelineSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/service-2.json.gz deleted file mode 100644 index a559260..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotanalytics/2017-11-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5c0ebf8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/examples-1.json b/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/service-2.json.gz deleted file mode 100644 index 41c7573..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotdeviceadvisor/2020-09-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4fae74b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/examples-1.json b/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/service-2.json.gz deleted file mode 100644 index 7393910..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotevents-data/2018-10-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index d609fa5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/examples-1.json b/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/service-2.json.gz deleted file mode 100644 index 95c3ebc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotevents/2018-07-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6679d27..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/paginators-1.json deleted file mode 100644 index cc8dcba..0000000 --- a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/paginators-1.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "pagination": { - "GetVehicleStatus": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "campaigns" - }, - "ListCampaigns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "campaignSummaries" - }, - "ListDecoderManifestNetworkInterfaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "networkInterfaces" - }, - "ListDecoderManifestSignals": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "signalDecoders" - }, - "ListDecoderManifests": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "summaries" - }, - "ListFleets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "fleetSummaries" - }, - "ListFleetsForVehicle": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "fleets" - }, - "ListModelManifestNodes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "nodes" - }, - "ListModelManifests": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "summaries" - }, - "ListSignalCatalogNodes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "nodes" - }, - "ListSignalCatalogs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "summaries" - }, - "ListVehicles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "vehicleSummaries" - }, - "ListVehiclesInFleet": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "vehicles" - }, - "ListStateTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "summaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/service-2.json.gz deleted file mode 100644 index 9417ec8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/waiters-2.json b/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/iotfleetwise/2021-06-17/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7a63ec1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/examples-1.json b/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/service-2.json.gz deleted file mode 100644 index d2668ba..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotsecuretunneling/2018-10-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index 70ba132..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/examples-1.json b/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/paginators-1.json deleted file mode 100644 index 9e685d7..0000000 --- a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/paginators-1.json +++ /dev/null @@ -1,166 +0,0 @@ -{ - "pagination": { - "GetAssetPropertyAggregates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "aggregatedValues" - }, - "GetAssetPropertyValueHistory": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetPropertyValueHistory" - }, - "ListAccessPolicies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accessPolicySummaries" - }, - "ListAssetModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetModelSummaries" - }, - "ListAssets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetSummaries" - }, - "ListAssociatedAssets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetSummaries" - }, - "ListDashboards": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dashboardSummaries" - }, - "ListGateways": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "gatewaySummaries" - }, - "ListPortals": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "portalSummaries" - }, - "ListProjectAssets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetIds" - }, - "ListProjects": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "projectSummaries" - }, - "ListAssetRelationships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetRelationshipSummaries" - }, - "GetInterpolatedAssetPropertyValues": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "interpolatedAssetPropertyValues" - }, - "ListTimeSeries": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "TimeSeriesSummaries" - }, - "ListBulkImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobSummaries" - }, - "ListAssetModelProperties": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetModelPropertySummaries" - }, - "ListAssetProperties": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetPropertySummaries" - }, - "ExecuteQuery": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "rows" - }, - "ListActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "actionSummaries" - }, - "ListAssetModelCompositeModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assetModelCompositeModelSummaries" - }, - "ListCompositionRelationships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "compositionRelationshipSummaries" - }, - "ListDatasets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "datasetSummaries" - }, - "ListComputationModelDataBindingUsages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataBindingUsageSummaries" - }, - "ListComputationModelResolveToResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "computationModelResolveToResourceSummaries" - }, - "ListComputationModels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "computationModelSummaries" - }, - "ListExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "executionSummaries" - }, - "ListInterfaceRelationships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "interfaceRelationshipSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/paginators-1.sdk-extras.json deleted file mode 100644 index 77dcd65..0000000 --- a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ExecuteQuery": { - "non_aggregate_keys": [ - "columns" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/service-2.json.gz deleted file mode 100644 index 360b8c4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/waiters-2.json b/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/waiters-2.json deleted file mode 100644 index e51df5f..0000000 --- a/venv/Lib/site-packages/botocore/data/iotsitewise/2019-12-02/waiters-2.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "version": 2, - "waiters": { - "AssetModelNotExists": { - "delay": 3, - "maxAttempts": 20, - "operation": "DescribeAssetModel", - "acceptors": [ - { - "state": "success", - "matcher": "error", - "expected": "ResourceNotFoundException" - } - ] - }, - "AssetModelActive": { - "delay": 3, - "maxAttempts": 20, - "operation": "DescribeAssetModel", - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "assetModelStatus.state", - "expected": "ACTIVE" - }, - { - "state": "failure", - "matcher": "path", - "argument": "assetModelStatus.state", - "expected": "FAILED" - } - ] - }, - "AssetNotExists": { - "delay": 3, - "maxAttempts": 20, - "operation": "DescribeAsset", - "acceptors": [ - { - "state": "success", - "matcher": "error", - "expected": "ResourceNotFoundException" - } - ] - }, - "AssetActive": { - "delay": 3, - "maxAttempts": 20, - "operation": "DescribeAsset", - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "assetStatus.state", - "expected": "ACTIVE" - }, - { - "state": "failure", - "matcher": "path", - "argument": "assetStatus.state", - "expected": "FAILED" - } - ] - }, - "PortalNotExists": { - "delay": 3, - "maxAttempts": 20, - "operation": "DescribePortal", - "acceptors": [ - { - "state": "success", - "matcher": "error", - "expected": "ResourceNotFoundException" - } - ] - }, - "PortalActive": { - "delay": 3, - "maxAttempts": 20, - "operation": "DescribePortal", - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "portalStatus.state", - "expected": "ACTIVE" - } - ] - } - } - } diff --git a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index f3b4f33..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/examples-1.json b/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/paginators-1.json deleted file mode 100644 index bc92f84..0000000 --- a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "GetFlowTemplateRevisions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "summaries" - }, - "GetSystemTemplateRevisions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "summaries" - }, - "ListFlowExecutionMessages": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "messages" - }, - "ListTagsForResource": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "tags" - }, - "SearchEntities": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "descriptions" - }, - "SearchFlowExecutions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "summaries" - }, - "SearchFlowTemplates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "summaries" - }, - "SearchSystemInstances": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "summaries" - }, - "SearchSystemTemplates": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "summaries" - }, - "SearchThings": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "things" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/service-2.json.gz deleted file mode 100644 index dec48ed..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotthingsgraph/2018-09-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5fd071e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/examples-1.json b/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/service-2.json.gz deleted file mode 100644 index bd9f847..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/waiters-2.json b/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/iottwinmaker/2021-11-29/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index ea18626..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/examples-1.json b/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/service-2.json.gz deleted file mode 100644 index 014167f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/iotwireless/2020-11-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5f032b5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/paginators-1.json deleted file mode 100644 index 8eb0208..0000000 --- a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListPublicKeys": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "publicKeys" - }, - "ListIngestConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ingestConfigurations" - }, - "ListParticipantReplicas": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "replicas" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/service-2.json.gz deleted file mode 100644 index 36e47a3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/waiters-2.json b/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/ivs-realtime/2020-07-14/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index 56b0cf5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/examples-1.json b/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/paginators-1.json deleted file mode 100644 index 572d1c7..0000000 --- a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListChannels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "channels" - }, - "ListStreamKeys": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "streamKeys" - }, - "ListStreams": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "streams" - }, - "ListPlaybackKeyPairs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "keyPairs" - }, - "ListRecordingConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recordingConfigurations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/service-2.json.gz deleted file mode 100644 index 396d462..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ivs/2020-07-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index b8c587b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/examples-1.json b/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/service-2.json.gz deleted file mode 100644 index 09cdbee..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/waiters-2.json b/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/ivschat/2020-07-14/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index ed98ea1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/paginators-1.json deleted file mode 100644 index 158c50c..0000000 --- a/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/paginators-1.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "pagination": { - "ListClusters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ClusterInfoList" - }, - "ListNodes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NodeInfoList" - }, - "ListConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Configurations" - }, - "ListClusterOperations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ClusterOperationInfoList" - }, - "ListConfigurationRevisions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Revisions" - }, - "ListKafkaVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "KafkaVersions" - }, - "ListScramSecrets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SecretArnList" - }, - "ListClustersV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ClusterInfoList" - }, - "ListVpcConnections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VpcConnections" - }, - "ListClientVpcConnections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ClientVpcConnections" - }, - "ListClusterOperationsV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ClusterOperationInfoList" - }, - "ListReplicators": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Replicators" - }, - "ListTopics": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Topics" - }, - "DescribeTopicPartitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Partitions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/service-2.json.gz deleted file mode 100644 index dac5bad..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kafka/2018-11-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index ceab029..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/examples-1.json b/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/paginators-1.json deleted file mode 100644 index d84b26f..0000000 --- a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListConnectors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "connectors" - }, - "ListCustomPlugins": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "customPlugins" - }, - "ListWorkerConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workerConfigurations" - }, - "ListConnectorOperations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "connectorOperations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/service-2.json.gz deleted file mode 100644 index 7d16af5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/waiters-2.json b/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/kafkaconnect/2021-09-14/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6427638..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/service-2.json.gz deleted file mode 100644 index 88b2d92..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kendra-ranking/2022-10-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index f58eaab..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/examples-1.json b/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/service-2.json.gz deleted file mode 100644 index 2cabb90..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kendra/2019-02-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 62ab3ab..0000000 Binary files a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/examples-1.json b/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/paginators-1.json deleted file mode 100644 index 4145b28..0000000 --- a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListKeyspaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "keyspaces" - }, - "ListTables": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tables" - }, - "ListTagsForResource": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tags" - }, - "ListTypes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "types" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/service-2.json.gz deleted file mode 100644 index d3e616e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/waiters-2.json deleted file mode 100644 index 4b20636..0000000 --- a/venv/Lib/site-packages/botocore/data/keyspaces/2022-02-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/endpoint-rule-set-1.json.gz deleted file mode 100644 index 65767bc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/paginators-1.json b/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/paginators-1.json deleted file mode 100644 index 570c7a7..0000000 --- a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "GetStream": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "shards" - }, - "ListStreams": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "streams" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/paginators-1.sdk-extras.json deleted file mode 100644 index 166cb24..0000000 --- a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/paginators-1.sdk-extras.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetStream": { - "non_aggregate_keys": [ - "streamStatus", - "streamLabel", - "creationRequestDateTime", - "keyspaceName", - "tableName", - "streamArn", - "streamViewType" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/service-2.json.gz b/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/service-2.json.gz deleted file mode 100644 index e5558d9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/keyspacesstreams/2024-09-09/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7624d63..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/examples-1.json b/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/paginators-1.json deleted file mode 100644 index a9a7041..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListFragments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Fragments" - }, - "GetImages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Images" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/service-2.json.gz deleted file mode 100644 index ce9c217..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-archived-media/2017-09-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7624d63..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/examples-1.json b/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/service-2.json.gz deleted file mode 100644 index 46e5eae..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-media/2017-09-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7624d63..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/examples-1.json b/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/service-2.json.gz deleted file mode 100644 index ff6a53b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-signaling/2019-12-04/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2429463..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/service-2.json.gz deleted file mode 100644 index 94f7e05..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis-video-webrtc-storage/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index dc73164..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/examples-1.json b/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/paginators-1.json deleted file mode 100644 index 3d680e6..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/paginators-1.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "pagination": { - "DescribeStream": { - "input_token": "ExclusiveStartShardId", - "limit_key": "Limit", - "more_results": "StreamDescription.HasMoreShards", - "output_token": "StreamDescription.Shards[-1].ShardId", - "result_key": "StreamDescription.Shards", - "non_aggregate_keys": [ - "StreamDescription.StreamARN", - "StreamDescription.StreamName", - "StreamDescription.StreamStatus", - "StreamDescription.RetentionPeriodHours", - "StreamDescription.EnhancedMonitoring", - "StreamDescription.EncryptionType", - "StreamDescription.KeyId", - "StreamDescription.StreamCreationTimestamp" - ] - }, - "ListStreams": { - "input_token": "NextToken", - "limit_key": "Limit", - "more_results": "HasMoreStreams", - "output_token": "NextToken", - "result_key": [ - "StreamNames", - "StreamSummaries" - ] - }, - "ListShards": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Shards" - }, - "ListStreamConsumers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Consumers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/service-2.json.gz deleted file mode 100644 index f80e884..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/waiters-2.json b/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/waiters-2.json deleted file mode 100644 index d61efe4..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesis/2013-12-02/waiters-2.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "version": 2, - "waiters": { - "StreamExists": { - "delay": 10, - "operation": "DescribeStream", - "maxAttempts": 18, - "acceptors": [ - { - "expected": "ACTIVE", - "matcher": "path", - "state": "success", - "argument": "StreamDescription.StreamStatus" - } - ] - }, - "StreamNotExists": { - "delay": 10, - "operation": "DescribeStream", - "maxAttempts": 18, - "acceptors": [ - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7750e32..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/examples-1.json b/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/service-2.json.gz deleted file mode 100644 index 334e8a9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesisanalytics/2015-08-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7750e32..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/examples-1.json b/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/paginators-1.json deleted file mode 100644 index eb315fd..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListApplicationSnapshots": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "SnapshotSummaries" - }, - "ListApplications": { - "input_token": "NextToken", - "limit_key": "Limit", - "output_token": "NextToken", - "result_key": "ApplicationSummaries" - }, - "ListApplicationOperations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "ApplicationOperationInfoList" - }, - "ListApplicationVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "ApplicationVersionSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json.gz deleted file mode 100644 index c64d936..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesisanalyticsv2/2018-05-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7624d63..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/examples-1.json b/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/paginators-1.json deleted file mode 100644 index 9d83731..0000000 --- a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListStreams": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StreamInfoList" - }, - "ListSignalingChannels": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ChannelInfoList" - }, - "DescribeMappedResourceConfiguration": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MappedResourceConfigurationList" - }, - "ListEdgeAgentConfigurations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EdgeConfigs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/service-2.json.gz deleted file mode 100644 index f2b292a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kinesisvideo/2017-09-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/kms/2014-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1060eab..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/kms/2014-11-01/examples-1.json deleted file mode 100644 index c770d0e..0000000 --- a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/examples-1.json +++ /dev/null @@ -1,1750 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CancelKeyDeletion": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose deletion you are canceling. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - }, - "output": { - "KeyId": "The ARN of the KMS key whose deletion you canceled." - } - }, - "description": "The following example cancels deletion of the specified KMS key.", - "id": "to-cancel-deletion-of-a-cmk-1477428535102", - "title": "To cancel deletion of a KMS key" - } - ], - "ConnectCustomKeyStore": [ - { - "input": { - "CustomKeyStoreId": "cks-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - "CustomKeyStoreId": "The ID of the AWS KMS custom key store." - }, - "output": { - } - }, - "description": "This example connects an AWS KMS custom key store to its AWS CloudHSM cluster. This operation does not return any data. To verify that the custom key store is connected, use the DescribeCustomKeyStores operation.", - "id": "to-connect-a-custom-key-store-to-its-cloudhsm-cluster-1628626947750", - "title": "To connect a custom key store to its CloudHSM cluster" - } - ], - "CreateAlias": [ - { - "input": { - "AliasName": "alias/ExampleAlias", - "TargetKeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "AliasName": "The alias to create. Aliases must begin with 'alias/'. Do not use aliases that begin with 'alias/aws' because they are reserved for use by AWS.", - "TargetKeyId": "The identifier of the KMS key whose alias you are creating. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example creates an alias for the specified KMS key.", - "id": "to-create-an-alias-1477505685119", - "title": "To create an alias" - } - ], - "CreateCustomKeyStore": [ - { - "input": { - "CloudHsmClusterId": "cluster-1a23b4cdefg", - "CustomKeyStoreName": "ExampleKeyStore", - "KeyStorePassword": "kmsPswd", - "TrustAnchorCertificate": "" - }, - "output": { - "CustomKeyStoreId": "cks-1234567890abcdef0" - }, - "comments": { - "input": { - "CloudHsmClusterId": "The ID of the CloudHSM cluster.", - "CustomKeyStoreName": "A friendly name for the custom key store.", - "KeyStorePassword": "The password for the kmsuser CU account in the specified cluster.", - "TrustAnchorCertificate": "The content of the customerCA.crt file that you created when you initialized the cluster." - }, - "output": { - "CustomKeyStoreId": "The ID of the new custom key store." - } - }, - "description": "This example creates a custom key store that is associated with an AWS CloudHSM cluster.", - "id": "to-create-an-aws-cloudhsm-custom-key-store-1628627769469", - "title": "To create an AWS CloudHSM custom key store" - } - ], - "CreateGrant": [ - { - "input": { - "GranteePrincipal": "arn:aws:iam::111122223333:role/ExampleRole", - "KeyId": "arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Operations": [ - "Encrypt", - "Decrypt" - ] - }, - "output": { - "GrantId": "0c237476b39f8bc44e45212e08498fbe3151305030726c0590dd8d3e9f3d6a60", - "GrantToken": "AQpAM2RhZTk1MGMyNTk2ZmZmMzEyYWVhOWViN2I1MWM4Mzc0MWFiYjc0ZDE1ODkyNGFlNTIzODZhMzgyZjBlNGY3NiKIAgEBAgB4Pa6VDCWW__MSrqnre1HIN0Grt00ViSSuUjhqOC8OT3YAAADfMIHcBgkqhkiG9w0BBwaggc4wgcsCAQAwgcUGCSqGSIb3DQEHATAeBglghkgBZQMEAS4wEQQMmqLyBTAegIn9XlK5AgEQgIGXZQjkBcl1dykDdqZBUQ6L1OfUivQy7JVYO2-ZJP7m6f1g8GzV47HX5phdtONAP7K_HQIflcgpkoCqd_fUnE114mSmiagWkbQ5sqAVV3ov-VeqgrvMe5ZFEWLMSluvBAqdjHEdMIkHMlhlj4ENZbzBfo9Wxk8b8SnwP4kc4gGivedzFXo-dwN8fxjjq_ZZ9JFOj2ijIbj5FyogDCN0drOfi8RORSEuCEmPvjFRMFAwcmwFkN2NPp89amA" - }, - "comments": { - "input": { - "GranteePrincipal": "The identity that is given permission to perform the operations specified in the grant.", - "KeyId": "The identifier of the KMS key to which the grant applies. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key.", - "Operations": "A list of operations that the grant allows." - }, - "output": { - "GrantId": "The unique identifier of the grant.", - "GrantToken": "The grant token." - } - }, - "description": "The following example creates a grant that allows the specified IAM role to encrypt data with the specified KMS key.", - "id": "to-create-a-grant-1477972226782", - "title": "To create a grant" - } - ], - "CreateKey": [ - { - "input": { - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": "2017-07-05T14:04:55-07:00", - "CustomerMasterKeySpec": "SYMMETRIC_DEFAULT", - "Description": "", - "Enabled": true, - "EncryptionAlgorithms": [ - "SYMMETRIC_DEFAULT" - ], - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "SYMMETRIC_DEFAULT", - "KeyState": "Enabled", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": false, - "Origin": "AWS_KMS" - } - }, - "comments": { - "input": { - "Tags": "One or more tags. Each tag consists of a tag key and a tag value." - }, - "output": { - "KeyMetadata": "Detailed information about the KMS key that this operation creates." - } - }, - "description": "The following example creates a symmetric KMS key for encryption and decryption. No parameters are required for this operation.", - "id": "to-create-a-cmk-1478028992966", - "title": "To create a KMS key" - }, - { - "input": { - "KeySpec": "RSA_4096", - "KeyUsage": "ENCRYPT_DECRYPT" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": "2021-04-05T14:04:55-07:00", - "CustomerMasterKeySpec": "RSA_4096", - "Description": "", - "Enabled": true, - "EncryptionAlgorithms": [ - "RSAES_OAEP_SHA_1", - "RSAES_OAEP_SHA_256" - ], - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "RSA_4096", - "KeyState": "Enabled", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": false, - "Origin": "AWS_KMS" - } - }, - "comments": { - "input": { - "KeySpec": "Describes the type of key material in the KMS key.", - "KeyUsage": "The cryptographic operations for which you can use the KMS key." - }, - "output": { - "KeyMetadata": "Detailed information about the KMS key that this operation creates." - } - }, - "description": "This example creates a KMS key that contains an asymmetric RSA key pair for encryption and decryption. The key spec and key usage can't be changed after the key is created.", - "id": "to-create-an-asymmetric-rsa-kms-key-for-encryption-and-decryption-1630533897833", - "title": "To create an asymmetric RSA KMS key for encryption and decryption" - }, - { - "input": { - "KeySpec": "ECC_NIST_P521", - "KeyUsage": "SIGN_VERIFY" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": "2019-12-02T07:48:55-07:00", - "CustomerMasterKeySpec": "ECC_NIST_P521", - "Description": "", - "Enabled": true, - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "ECC_NIST_P521", - "KeyState": "Enabled", - "KeyUsage": "SIGN_VERIFY", - "MultiRegion": false, - "Origin": "AWS_KMS", - "SigningAlgorithms": [ - "ECDSA_SHA_512" - ] - } - }, - "comments": { - "input": { - "KeySpec": "Describes the type of key material in the KMS key.", - "KeyUsage": "The cryptographic operations for which you can use the KMS key." - }, - "output": { - "KeyMetadata": "Detailed information about the KMS key that this operation creates." - } - }, - "description": "This example creates a KMS key that contains an asymmetric elliptic curve (ECC) key pair for signing and verification. The key usage is required even though \"SIGN_VERIFY\" is the only valid value for ECC KMS keys. The key spec and key usage can't be changed after the key is created.", - "id": "to-create-an-asymmetric-elliptic-curve-kms-key-for-signing-and-verification-1630541089401", - "title": "To create an asymmetric elliptic curve KMS key for signing and verification" - }, - { - "input": { - "MultiRegion": true - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-west-2:111122223333:key/mrk-1234abcd12ab34cd56ef12345678990ab", - "CreationDate": "2021-09-02T016:15:21-09:00", - "CustomerMasterKeySpec": "SYMMETRIC_DEFAULT", - "Description": "", - "Enabled": true, - "EncryptionAlgorithms": [ - "SYMMETRIC_DEFAULT" - ], - "KeyId": "mrk-1234abcd12ab34cd56ef12345678990ab", - "KeyManager": "CUSTOMER", - "KeySpec": "SYMMETRIC_DEFAULT", - "KeyState": "Enabled", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": true, - "MultiRegionConfiguration": { - "MultiRegionKeyType": "PRIMARY", - "PrimaryKey": { - "Arn": "arn:aws:kms:us-west-2:111122223333:key/mrk-1234abcd12ab34cd56ef12345678990ab", - "Region": "us-west-2" - }, - "ReplicaKeys": [ - - ] - }, - "Origin": "AWS_KMS" - } - }, - "comments": { - "input": { - "MultiRegion": "Indicates whether the KMS key is a multi-Region (True) or regional (False) key." - }, - "output": { - "KeyMetadata": "Detailed information about the KMS key that this operation creates." - } - }, - "description": "This example creates a multi-Region primary symmetric encryption key. Because the default values for all parameters create a symmetric encryption key, only the MultiRegion parameter is required for this KMS key.", - "id": "to-create-a-multi-region-primary-kms-key-1630599158567", - "title": "To create a multi-Region primary KMS key" - }, - { - "input": { - "Origin": "EXTERNAL" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": "2019-12-02T07:48:55-07:00", - "CustomerMasterKeySpec": "SYMMETRIC_DEFAULT", - "Description": "", - "Enabled": false, - "EncryptionAlgorithms": [ - "SYMMETRIC_DEFAULT" - ], - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "SYMMETRIC_DEFAULT", - "KeyState": "PendingImport", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": false, - "Origin": "EXTERNAL" - } - }, - "comments": { - "input": { - "Origin": "The source of the key material for the KMS key." - }, - "output": { - "KeyMetadata": "Detailed information about the KMS key that this operation creates." - } - }, - "description": "This example creates a KMS key with no key material. When the operation is complete, you can import your own key material into the KMS key. To create this KMS key, set the Origin parameter to EXTERNAL. ", - "id": "to-create-a-kms-key-for-imported-key-material-1630603607560", - "title": "To create a KMS key for imported key material" - }, - { - "input": { - "CustomKeyStoreId": "cks-1234567890abcdef0", - "Origin": "AWS_CLOUDHSM" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CloudHsmClusterId": "cluster-1a23b4cdefg", - "CreationDate": "2019-12-02T07:48:55-07:00", - "CustomKeyStoreId": "cks-1234567890abcdef0", - "CustomerMasterKeySpec": "SYMMETRIC_DEFAULT", - "Description": "", - "Enabled": true, - "EncryptionAlgorithms": [ - "SYMMETRIC_DEFAULT" - ], - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "SYMMETRIC_DEFAULT", - "KeyState": "Enabled", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": false, - "Origin": "AWS_CLOUDHSM" - } - }, - "comments": { - "input": { - "CustomKeyStoreId": "Identifies the custom key store that hosts the KMS key.", - "Origin": "Indicates the source of the key material for the KMS key." - }, - "output": { - "KeyMetadata": "Detailed information about the KMS key that this operation creates." - } - }, - "description": "This example creates a KMS key in the specified custom key store. The operation creates the KMS key and its metadata in AWS KMS and the key material in the AWS CloudHSM cluster associated with the custom key store. This example requires the Origin and CustomKeyStoreId parameters.", - "id": "to-create-a-kms-key-in-a-custom-key-store-1630604382908", - "title": "To create a KMS key in a custom key store" - }, - { - "input": { - "KeySpec": "HMAC_384", - "KeyUsage": "GENERATE_VERIFY_MAC" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": "2022-04-05T14:04:55-07:00", - "CustomerMasterKeySpec": "HMAC_384", - "Description": "", - "Enabled": true, - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "HMAC_384", - "KeyState": "Enabled", - "KeyUsage": "GENERATE_VERIFY_MAC", - "MacAlgorithms": [ - "HMAC_SHA_384" - ], - "MultiRegion": false, - "Origin": "AWS_KMS" - } - }, - "comments": { - "input": { - "KeySpec": "Describes the type of key material in the KMS key.", - "KeyUsage": "The cryptographic operations for which you can use the KMS key." - }, - "output": { - "KeyMetadata": "Detailed information about the KMS key that this operation creates." - } - }, - "description": "This example creates a 384-bit symmetric HMAC KMS key. The GENERATE_VERIFY_MAC key usage value is required even though it's the only valid value for HMAC KMS keys. The key spec and key usage can't be changed after the key is created. ", - "id": "to-create-an-hmac-kms-key-1630628752841", - "title": "To create an HMAC KMS key" - } - ], - "Decrypt": [ - { - "input": { - "CiphertextBlob": "", - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Plaintext": "" - }, - "comments": { - "input": { - "CiphertextBlob": "The encrypted data (ciphertext).", - "KeyId": "A key identifier for the KMS key to use to decrypt the data." - }, - "output": { - "KeyId": "The Amazon Resource Name (ARN) of the KMS key that was used to decrypt the data.", - "Plaintext": "The decrypted (plaintext) data." - } - }, - "description": "The following example decrypts data that was encrypted with a KMS key.", - "id": "to-decrypt-data-1478281622886", - "title": "To decrypt data" - } - ], - "DeleteAlias": [ - { - "input": { - "AliasName": "alias/ExampleAlias" - }, - "comments": { - "input": { - "AliasName": "The alias to delete." - } - }, - "description": "The following example deletes the specified alias.", - "id": "to-delete-an-alias-1478285209338", - "title": "To delete an alias" - } - ], - "DeleteCustomKeyStore": [ - { - "input": { - "CustomKeyStoreId": "cks-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - "CustomKeyStoreId": "The ID of the custom key store to be deleted." - }, - "output": { - } - }, - "description": "This example deletes a custom key store from AWS KMS. This operation does not delete the AWS CloudHSM cluster that was associated with the CloudHSM cluster. This operation doesn't return any data. To verify that the operation was successful, use the DescribeCustomKeyStores operation. ", - "id": "to-delete-a-custom-key-store-from-aws-kms-1628630837145", - "title": "To delete a custom key store from AWS KMS" - } - ], - "DeleteImportedKeyMaterial": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose imported key material you are deleting. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example deletes the imported key material from the specified KMS key.", - "id": "to-delete-imported-key-material-1478561674507", - "title": "To delete imported key material" - } - ], - "DescribeCustomKeyStores": [ - { - "input": { - }, - "output": { - "CustomKeyStores": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - "CustomKeyStores": "Details about each custom key store in the account and Region." - } - }, - "description": "This example gets detailed information about all AWS KMS custom key stores in an AWS account and Region. To get all key stores, do not enter a custom key store name or ID.", - "id": "to-get-detailed-information-about-custom-key-stores-in-the-account-and-region-1628628556811", - "title": "To get detailed information about custom key stores in the account and Region" - }, - { - "input": { - "CustomKeyStoreName": "ExampleKeyStore" - }, - "output": { - "CustomKeyStores": [ - { - "CloudHsmClusterId": "cluster-1a23b4cdefg", - "ConnectionState": "CONNECTED", - "CreationDate": "1.499288695918E9", - "CustomKeyStoreId": "cks-1234567890abcdef0", - "CustomKeyStoreName": "ExampleKeyStore", - "TrustAnchorCertificate": "" - } - ] - }, - "comments": { - "input": { - "CustomKeyStoreName": "The friendly name of the custom key store." - }, - "output": { - "CustomKeyStores": "Detailed information about the specified custom key store." - } - }, - "description": "This example gets detailed information about a particular AWS KMS custom key store that is associate with an AWS CloudHSM cluster. To limit the output to a particular custom key store, provide the custom key store name or ID. ", - "id": "to-get-detailed-information-about-a-custom-key-store-associated-with-a-cloudhsm-cluster-1628628885843", - "title": "To get detailed information about a custom key store associated with a CloudHSM cluster." - } - ], - "DescribeKey": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": "2017-07-05T14:04:55-07:00", - "CustomerMasterKeySpec": "SYMMETRIC_DEFAULT", - "Description": "", - "Enabled": true, - "EncryptionAlgorithms": [ - "SYMMETRIC_DEFAULT" - ], - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "SYMMETRIC_DEFAULT", - "KeyState": "Enabled", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": false, - "Origin": "AWS_KMS" - } - }, - "comments": { - "input": { - "KeyId": "An identifier for the KMS key. You can use the key ID, key ARN, alias name, alias ARN of the KMS key." - }, - "output": { - "KeyMetadata": "An object that contains information about the specified KMS key." - } - }, - "description": "The following example gets metadata for a symmetric encryption KMS key.", - "id": "get-key-details-1478565820907", - "title": "To get details about a KMS key" - }, - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": 1571767572.317, - "CustomerMasterKeySpec": "RSA_2048", - "Description": "", - "Enabled": false, - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeySpec": "RSA_2048", - "KeyState": "Disabled", - "KeyUsage": "SIGN_VERIFY", - "MultiRegion": false, - "Origin": "AWS_KMS", - "SigningAlgorithms": [ - "RSASSA_PKCS1_V1_5_SHA_256", - "RSASSA_PKCS1_V1_5_SHA_384", - "RSASSA_PKCS1_V1_5_SHA_512", - "RSASSA_PSS_SHA_256", - "RSASSA_PSS_SHA_384", - "RSASSA_PSS_SHA_512" - ] - } - }, - "comments": { - "input": { - "KeyId": "An identifier for the KMS key. You can use the key ID, key ARN, alias name, alias ARN of the KMS key." - }, - "output": { - "KeyMetadata": "An object that contains information about the specified KMS key." - } - }, - "description": "The following example gets metadata for an asymmetric RSA KMS key used for signing and verification.", - "id": "to-get-details-about-an-rsa-asymmetric-kms-key-1637971611761", - "title": "To get details about an RSA asymmetric KMS key" - }, - { - "input": { - "KeyId": "arn:aws:kms:ap-northeast-1:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:ap-northeast-1:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "CreationDate": 1586329200.918, - "CustomerMasterKeySpec": "SYMMETRIC_DEFAULT", - "Description": "", - "Enabled": true, - "EncryptionAlgorithms": [ - "SYMMETRIC_DEFAULT" - ], - "KeyId": "mrk-1234abcd12ab34cd56ef1234567890ab", - "KeyManager": "CUSTOMER", - "KeyState": "Enabled", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": true, - "MultiRegionConfiguration": { - "MultiRegionKeyType": "PRIMARY", - "PrimaryKey": { - "Arn": "arn:aws:kms:us-west-2:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "Region": "us-west-2" - }, - "ReplicaKeys": [ - { - "Arn": "arn:aws:kms:eu-west-1:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "Region": "eu-west-1" - }, - { - "Arn": "arn:aws:kms:ap-northeast-1:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "Region": "ap-northeast-1" - }, - { - "Arn": "arn:aws:kms:sa-east-1:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "Region": "sa-east-1" - } - ] - }, - "Origin": "AWS_KMS" - } - }, - "comments": { - "input": { - "KeyId": "An identifier for the KMS key. You can use the key ID, key ARN, alias name, alias ARN of the KMS key." - }, - "output": { - "KeyMetadata": "An object that contains information about the specified KMS key." - } - }, - "description": "The following example gets metadata for a multi-Region replica key. This multi-Region key is a symmetric encryption key. DescribeKey returns information about the primary key and all of its replicas.", - "id": "to-get-details-about-a-multi-region-key-1637969624239", - "title": "To get details about a multi-Region key" - }, - { - "input": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "KeyMetadata": { - "AWSAccountId": "123456789012", - "Arn": "arn:aws:kms:us-west-2:123456789012:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "CreationDate": 1566160362.664, - "CustomerMasterKeySpec": "HMAC_256", - "Description": "Development test key", - "Enabled": true, - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyManager": "CUSTOMER", - "KeyState": "Enabled", - "KeyUsage": "GENERATE_VERIFY_MAC", - "MacAlgorithms": [ - "HMAC_SHA_256" - ], - "MultiRegion": false, - "Origin": "AWS_KMS" - } - }, - "comments": { - "input": { - "KeyId": "An identifier for the KMS key. You can use the key ID, key ARN, alias name, alias ARN of the KMS key." - }, - "output": { - "KeyMetadata": "An object that contains information about the specified KMS key." - } - }, - "description": "The following example gets the metadata of an HMAC KMS key. ", - "id": "to-get-details-about-an-hmac-kms-key-1637970472619", - "title": "To get details about an HMAC KMS key" - } - ], - "DisableKey": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key to disable. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example disables the specified KMS key.", - "id": "to-disable-a-cmk-1478566583659", - "title": "To disable a KMS key" - } - ], - "DisableKeyRotation": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose key material will no longer be rotated. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example disables automatic annual rotation of the key material for the specified KMS key.", - "id": "to-disable-automatic-rotation-of-key-material-1478624396092", - "title": "To disable automatic rotation of key material" - } - ], - "DisconnectCustomKeyStore": [ - { - "input": { - "CustomKeyStoreId": "cks-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - "CustomKeyStoreId": "The ID of the custom key store." - }, - "output": { - } - }, - "description": "This example disconnects an AWS KMS custom key store from its AWS CloudHSM cluster. This operation doesn't return any data. To verify that the custom key store is disconnected, use the DescribeCustomKeyStores operation.", - "id": "to-disconnect-a-custom-key-store-from-its-cloudhsm-cluster-1628627955156", - "title": "To disconnect a custom key store from its CloudHSM cluster" - } - ], - "EnableKey": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key to enable. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example enables the specified KMS key.", - "id": "to-enable-a-cmk-1478627501129", - "title": "To enable a KMS key" - } - ], - "EnableKeyRotation": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose key material will be rotated annually. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example enables automatic annual rotation of the key material for the specified KMS key.", - "id": "to-enable-automatic-rotation-of-key-material-1478629109677", - "title": "To enable automatic rotation of key material" - } - ], - "Encrypt": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "Plaintext": "" - }, - "output": { - "CiphertextBlob": "", - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key to use for encryption. You can use the key ID or Amazon Resource Name (ARN) of the KMS key, or the name or ARN of an alias that refers to the KMS key.", - "Plaintext": "The data to encrypt." - }, - "output": { - "CiphertextBlob": "The encrypted data (ciphertext).", - "KeyId": "The ARN of the KMS key that was used to encrypt the data." - } - }, - "description": "The following example encrypts data with the specified KMS key.", - "id": "to-encrypt-data-1478906026012", - "title": "To encrypt data" - } - ], - "GenerateDataKey": [ - { - "input": { - "KeyId": "alias/ExampleAlias", - "KeySpec": "AES_256" - }, - "output": { - "CiphertextBlob": "", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Plaintext": "" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key to use to encrypt the data key. You can use the key ID or Amazon Resource Name (ARN) of the KMS key, or the name or ARN of an alias that refers to the KMS key.", - "KeySpec": "Specifies the type of data key to return." - }, - "output": { - "CiphertextBlob": "The encrypted data key.", - "KeyId": "The ARN of the KMS key that was used to encrypt the data key.", - "Plaintext": "The unencrypted (plaintext) data key." - } - }, - "description": "The following example generates a 256-bit symmetric data encryption key (data key) in two formats. One is the unencrypted (plainext) data key, and the other is the data key encrypted with the specified KMS key.", - "id": "to-generate-a-data-key-1478912956062", - "title": "To generate a data key" - } - ], - "GenerateDataKeyPair": [ - { - "input": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyPairSpec": "RSA_3072" - }, - "output": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyPairSpec": "RSA_3072", - "PrivateKeyCiphertextBlob": "", - "PrivateKeyPlaintext": "", - "PublicKey": "" - }, - "comments": { - "input": { - "KeyId": "The key ID of the symmetric encryption KMS key that encrypts the private RSA key in the data key pair.", - "KeyPairSpec": "The requested key spec of the RSA data key pair." - }, - "output": { - "KeyId": "The key ARN of the symmetric encryption KMS key that was used to encrypt the private key.", - "KeyPairSpec": "The actual key spec of the RSA data key pair.", - "PrivateKeyCiphertextBlob": "The encrypted private key of the RSA data key pair.", - "PrivateKeyPlaintext": "The plaintext private key of the RSA data key pair.", - "PublicKey": "The public key (plaintext) of the RSA data key pair." - } - }, - "description": "This example generates an RSA data key pair for encryption and decryption. The operation returns a plaintext public key and private key, and a copy of the private key that is encrypted under a symmetric encryption KMS key that you specify.", - "id": "to-generate-an-rsa-key-pair-for-encryption-and-decryption-1628619376878", - "title": "To generate an RSA key pair for encryption and decryption" - } - ], - "GenerateDataKeyPairWithoutPlaintext": [ - { - "input": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyPairSpec": "ECC_NIST_P521" - }, - "output": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "KeyPairSpec": "ECC_NIST_P521", - "PrivateKeyCiphertextBlob": "", - "PublicKey": "" - }, - "comments": { - "input": { - "KeyId": "The symmetric encryption KMS key that encrypts the private key of the ECC data key pair.", - "KeyPairSpec": "The requested key spec of the ECC asymmetric data key pair." - }, - "output": { - "KeyId": "The key ARN of the symmetric encryption KMS key that encrypted the private key in the ECC asymmetric data key pair.", - "KeyPairSpec": "The actual key spec of the ECC asymmetric data key pair.", - "PrivateKeyCiphertextBlob": "The encrypted private key of the asymmetric ECC data key pair.", - "PublicKey": "The public key (plaintext)." - } - }, - "description": "This example returns an asymmetric elliptic curve (ECC) data key pair. The private key is encrypted under the symmetric encryption KMS key that you specify. This operation doesn't return a plaintext (unencrypted) private key.", - "id": "to-generate-an-asymmetric-data-key-pair-without-a-plaintext-key-1628620971564", - "title": "To generate an asymmetric data key pair without a plaintext key" - } - ], - "GenerateDataKeyWithoutPlaintext": [ - { - "input": { - "KeyId": "alias/ExampleAlias", - "KeySpec": "AES_256" - }, - "output": { - "CiphertextBlob": "", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key to use to encrypt the data key. You can use the key ID or Amazon Resource Name (ARN) of the KMS key, or the name or ARN of an alias that refers to the KMS key.", - "KeySpec": "Specifies the type of data key to return." - }, - "output": { - "CiphertextBlob": "The encrypted data key.", - "KeyId": "The ARN of the KMS key that was used to encrypt the data key." - } - }, - "description": "The following example generates an encrypted copy of a 256-bit symmetric data encryption key (data key). The data key is encrypted with the specified KMS key.", - "id": "to-generate-an-encrypted-data-key-1478914121134", - "title": "To generate an encrypted data key" - } - ], - "GenerateMac": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "MacAlgorithm": "HMAC_SHA_384", - "Message": "Hello World" - }, - "output": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Mac": "", - "MacAlgorithm": "HMAC_SHA_384" - }, - "comments": { - "input": { - "KeyId": "The HMAC KMS key input to the HMAC algorithm.", - "MacAlgorithm": "The HMAC algorithm requested for the operation.", - "Message": "The message input to the HMAC algorithm." - }, - "output": { - "KeyId": "The key ARN of the HMAC KMS key used in the operation.", - "Mac": "The HMAC tag that results from this operation.", - "MacAlgorithm": "The HMAC algorithm used in the operation." - } - }, - "description": "This example generates an HMAC for a message, an HMAC KMS key, and a MAC algorithm. The algorithm must be supported by the specified HMAC KMS key.", - "id": "to-generate-an-hmac-for-a-message-1631570135665", - "title": "To generate an HMAC for a message" - } - ], - "GenerateRandom": [ - { - "input": { - "NumberOfBytes": 32 - }, - "output": { - "Plaintext": "" - }, - "comments": { - "input": { - "NumberOfBytes": "The length of the random data, specified in number of bytes." - }, - "output": { - "Plaintext": "The random data." - } - }, - "description": "The following example generates 32 bytes of random data.", - "id": "to-generate-random-data-1479163645600", - "title": "To generate random data" - } - ], - "GetKeyPolicy": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "PolicyName": "default" - }, - "output": { - "Policy": "{\n \"Version\" : \"2012-10-17\",\n \"Id\" : \"key-default-1\",\n \"Statement\" : [ {\n \"Sid\" : \"Enable IAM User Permissions\",\n \"Effect\" : \"Allow\",\n \"Principal\" : {\n \"AWS\" : \"arn:aws:iam::111122223333:root\"\n },\n \"Action\" : \"kms:*\",\n \"Resource\" : \"*\"\n } ]\n}" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose key policy you want to retrieve. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key.", - "PolicyName": "The name of the key policy to retrieve." - }, - "output": { - "Policy": "The key policy document." - } - }, - "description": "The following example retrieves the key policy for the specified KMS key.", - "id": "to-retrieve-a-key-policy-1479170128325", - "title": "To retrieve a key policy" - } - ], - "GetKeyRotationStatus": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "KeyRotationEnabled": true - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose key material rotation status you want to retrieve. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - }, - "output": { - "KeyRotationEnabled": "A boolean that indicates the key material rotation status. Returns true when automatic annual rotation of the key material is enabled, or false when it is not." - } - }, - "description": "The following example retrieves the status of automatic annual rotation of the key material for the specified KMS key.", - "id": "to-retrieve-the-rotation-status-for-a-cmk-1479172287408", - "title": "To retrieve the rotation status for a KMS key" - } - ], - "GetParametersForImport": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "WrappingAlgorithm": "RSAES_OAEP_SHA_1", - "WrappingKeySpec": "RSA_2048" - }, - "output": { - "ImportToken": "", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "ParametersValidTo": "2016-12-01T14:52:17-08:00", - "PublicKey": "" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key for which to retrieve the public key and import token. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key.", - "WrappingAlgorithm": "The algorithm that you will use to encrypt the key material before importing it.", - "WrappingKeySpec": "The type of wrapping key (public key) to return in the response." - }, - "output": { - "ImportToken": "The import token to send with a subsequent ImportKeyMaterial request.", - "KeyId": "The ARN of the KMS key for which you are retrieving the public key and import token. This is the same KMS key specified in the request.", - "ParametersValidTo": "The time at which the import token and public key are no longer valid.", - "PublicKey": "The public key to use to encrypt the key material before importing it." - } - }, - "description": "The following example retrieves the public key and import token for the specified KMS key.", - "id": "to-retrieve-the-public-key-and-import-token-for-a-cmk-1480626483211", - "title": "To retrieve the public key and import token for a KMS key" - } - ], - "GetPublicKey": [ - { - "input": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/0987dcba-09fe-87dc-65ba-ab0987654321" - }, - "output": { - "CustomerMasterKeySpec": "RSA_4096", - "EncryptionAlgorithms": [ - "RSAES_OAEP_SHA_1", - "RSAES_OAEP_SHA_256" - ], - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/0987dcba-09fe-87dc-65ba-ab0987654321", - "KeyUsage": "ENCRYPT_DECRYPT", - "PublicKey": "" - }, - "comments": { - "input": { - "KeyId": "The key ARN of the asymmetric KMS key." - }, - "output": { - "CustomerMasterKeySpec": "The key spec of the asymmetric KMS key from which the public key was downloaded.", - "EncryptionAlgorithms": "The encryption algorithms supported by the asymmetric KMS key that was downloaded.", - "KeyId": "The key ARN of the asymmetric KMS key from which the public key was downloaded.", - "KeyUsage": "The key usage of the asymmetric KMS key from which the public key was downloaded.", - "PublicKey": "The public key (plaintext) of the asymmetric KMS key." - } - }, - "description": "This example gets the public key of an asymmetric RSA KMS key used for encryption and decryption. The operation returns the key spec, key usage, and encryption or signing algorithms to help you use the public key correctly outside of AWS KMS.", - "id": "to-download-the-public-key-of-an-asymmetric-kms-key-1628621691873", - "title": "To download the public key of an asymmetric KMS key" - } - ], - "ImportKeyMaterial": [ - { - "input": { - "EncryptedKeyMaterial": "", - "ExpirationModel": "KEY_MATERIAL_DOES_NOT_EXPIRE", - "ImportToken": "", - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "EncryptedKeyMaterial": "The encrypted key material to import.", - "ExpirationModel": "A value that specifies whether the key material expires.", - "ImportToken": "The import token that you received in the response to a previous GetParametersForImport request.", - "KeyId": "The identifier of the KMS key to import the key material into. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example imports key material into the specified KMS key.", - "id": "to-import-key-material-into-a-cmk-1480630551969", - "title": "To import key material into a KMS key" - } - ], - "ListAliases": [ - { - "output": { - "Aliases": [ - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/aws/acm", - "AliasName": "alias/aws/acm", - "TargetKeyId": "da03f6f7-d279-427a-9cae-de48d07e5b66" - }, - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/aws/ebs", - "AliasName": "alias/aws/ebs", - "TargetKeyId": "25a217e7-7170-4b8c-8bf6-045ea5f70e5b" - }, - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/aws/rds", - "AliasName": "alias/aws/rds", - "TargetKeyId": "7ec3104e-c3f2-4b5c-bf42-bfc4772c6685" - }, - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/aws/redshift", - "AliasName": "alias/aws/redshift", - "TargetKeyId": "08f7a25a-69e2-4fb5-8f10-393db27326fa" - }, - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/aws/s3", - "AliasName": "alias/aws/s3", - "TargetKeyId": "d2b0f1a3-580d-4f79-b836-bc983be8cfa5" - }, - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/example1", - "AliasName": "alias/example1", - "TargetKeyId": "4da1e216-62d0-46c5-a7c0-5f3a3d2f8046" - }, - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/example2", - "AliasName": "alias/example2", - "TargetKeyId": "f32fef59-2cc2-445b-8573-2d73328acbee" - }, - { - "AliasArn": "arn:aws:kms:us-east-2:111122223333:alias/example3", - "AliasName": "alias/example3", - "TargetKeyId": "1374ef38-d34e-4d5f-b2c9-4e0daee38855" - } - ], - "Truncated": false - }, - "comments": { - "output": { - "Aliases": "A list of aliases, including the key ID of the KMS key that each alias refers to.", - "Truncated": "A boolean that indicates whether there are more items in the list. Returns true when there are more items, or false when there are not." - } - }, - "description": "The following example lists aliases.", - "id": "to-list-aliases-1480729693349", - "title": "To list aliases" - } - ], - "ListGrants": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "Grants": [ - { - "CreationDate": "2016-10-25T14:37:41-07:00", - "GrantId": "91ad875e49b04a9d1f3bdeb84d821f9db6ea95e1098813f6d47f0c65fbe2a172", - "GranteePrincipal": "acm.us-east-2.amazonaws.com", - "IssuingAccount": "arn:aws:iam::111122223333:root", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Operations": [ - "Encrypt", - "ReEncryptFrom", - "ReEncryptTo" - ], - "RetiringPrincipal": "acm.us-east-2.amazonaws.com" - }, - { - "CreationDate": "2016-10-25T14:37:41-07:00", - "GrantId": "a5d67d3e207a8fc1f4928749ee3e52eb0440493a8b9cf05bbfad91655b056200", - "GranteePrincipal": "acm.us-east-2.amazonaws.com", - "IssuingAccount": "arn:aws:iam::111122223333:root", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Operations": [ - "ReEncryptFrom", - "ReEncryptTo" - ], - "RetiringPrincipal": "acm.us-east-2.amazonaws.com" - }, - { - "CreationDate": "2016-10-25T14:37:41-07:00", - "GrantId": "c541aaf05d90cb78846a73b346fc43e65be28b7163129488c738e0c9e0628f4f", - "GranteePrincipal": "acm.us-east-2.amazonaws.com", - "IssuingAccount": "arn:aws:iam::111122223333:root", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Operations": [ - "Encrypt", - "ReEncryptFrom", - "ReEncryptTo" - ], - "RetiringPrincipal": "acm.us-east-2.amazonaws.com" - }, - { - "CreationDate": "2016-10-25T14:37:41-07:00", - "GrantId": "dd2052c67b4c76ee45caf1dc6a1e2d24e8dc744a51b36ae2f067dc540ce0105c", - "GranteePrincipal": "acm.us-east-2.amazonaws.com", - "IssuingAccount": "arn:aws:iam::111122223333:root", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Operations": [ - "Encrypt", - "ReEncryptFrom", - "ReEncryptTo" - ], - "RetiringPrincipal": "acm.us-east-2.amazonaws.com" - } - ], - "Truncated": true - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose grants you want to list. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - }, - "output": { - "Grants": "A list of grants.", - "Truncated": "A boolean that indicates whether there are more items in the list. Returns true when there are more items, or false when there are not." - } - }, - "description": "The following example lists grants for the specified KMS key.", - "id": "to-list-grants-for-a-cmk-1481067365389", - "title": "To list grants for a KMS key" - } - ], - "ListKeyPolicies": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "PolicyNames": [ - "default" - ], - "Truncated": false - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose key policies you want to list. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - }, - "output": { - "PolicyNames": "A list of key policy names.", - "Truncated": "A boolean that indicates whether there are more items in the list. Returns true when there are more items, or false when there are not." - } - }, - "description": "The following example lists key policies for the specified KMS key.", - "id": "to-list-key-policies-for-a-cmk-1481069780998", - "title": "To list key policies for a KMS key" - } - ], - "ListKeys": [ - { - "output": { - "Keys": [ - { - "KeyArn": "arn:aws:kms:us-east-2:111122223333:key/0d990263-018e-4e65-a703-eff731de951e", - "KeyId": "0d990263-018e-4e65-a703-eff731de951e" - }, - { - "KeyArn": "arn:aws:kms:us-east-2:111122223333:key/144be297-0ae1-44ac-9c8f-93cd8c82f841", - "KeyId": "144be297-0ae1-44ac-9c8f-93cd8c82f841" - }, - { - "KeyArn": "arn:aws:kms:us-east-2:111122223333:key/21184251-b765-428e-b852-2c7353e72571", - "KeyId": "21184251-b765-428e-b852-2c7353e72571" - }, - { - "KeyArn": "arn:aws:kms:us-east-2:111122223333:key/214fe92f-5b03-4ae1-b350-db2a45dbe10c", - "KeyId": "214fe92f-5b03-4ae1-b350-db2a45dbe10c" - }, - { - "KeyArn": "arn:aws:kms:us-east-2:111122223333:key/339963f2-e523-49d3-af24-a0fe752aa458", - "KeyId": "339963f2-e523-49d3-af24-a0fe752aa458" - }, - { - "KeyArn": "arn:aws:kms:us-east-2:111122223333:key/b776a44b-df37-4438-9be4-a27494e4271a", - "KeyId": "b776a44b-df37-4438-9be4-a27494e4271a" - }, - { - "KeyArn": "arn:aws:kms:us-east-2:111122223333:key/deaf6c9e-cf2c-46a6-bf6d-0b6d487cffbb", - "KeyId": "deaf6c9e-cf2c-46a6-bf6d-0b6d487cffbb" - } - ], - "Truncated": false - }, - "comments": { - "output": { - "Keys": "A list of KMS keys, including the key ID and Amazon Resource Name (ARN) of each one.", - "Truncated": "A boolean that indicates whether there are more items in the list. Returns true when there are more items, or false when there are not." - } - }, - "description": "The following example lists KMS keys.", - "id": "to-list-cmks-1481071643069", - "title": "To list KMS keys" - } - ], - "ListResourceTags": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "output": { - "Tags": [ - { - "TagKey": "CostCenter", - "TagValue": "87654" - }, - { - "TagKey": "CreatedBy", - "TagValue": "ExampleUser" - }, - { - "TagKey": "Purpose", - "TagValue": "Test" - } - ], - "Truncated": false - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose tags you are listing. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - }, - "output": { - "Tags": "A list of tags.", - "Truncated": "A boolean that indicates whether there are more items in the list. Returns true when there are more items, or false when there are not." - } - }, - "description": "The following example lists tags for a KMS key.", - "id": "to-list-tags-for-a-cmk-1483996855796", - "title": "To list tags for a KMS key" - } - ], - "ListRetirableGrants": [ - { - "input": { - "RetiringPrincipal": "arn:aws:iam::111122223333:role/ExampleRole" - }, - "output": { - "Grants": [ - { - "CreationDate": "2016-12-07T11:09:35-08:00", - "GrantId": "0c237476b39f8bc44e45212e08498fbe3151305030726c0590dd8d3e9f3d6a60", - "GranteePrincipal": "arn:aws:iam::111122223333:role/ExampleRole", - "IssuingAccount": "arn:aws:iam::444455556666:root", - "KeyId": "arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Operations": [ - "Decrypt", - "Encrypt" - ], - "RetiringPrincipal": "arn:aws:iam::111122223333:role/ExampleRole" - } - ], - "Truncated": false - }, - "comments": { - "input": { - "RetiringPrincipal": "The retiring principal whose grants you want to list. Use the Amazon Resource Name (ARN) of a principal such as an AWS account (root), IAM user, federated user, or assumed role user." - }, - "output": { - "Grants": "A list of grants that the specified principal can retire.", - "Truncated": "A boolean that indicates whether there are more items in the list. Returns true when there are more items, or false when there are not." - } - }, - "description": "The following example lists the grants that the specified principal (identity) can retire.", - "id": "to-list-grants-that-the-specified-principal-can-retire-1481140499620", - "title": "To list grants that the specified principal can retire" - } - ], - "PutKeyPolicy": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "Policy": "{\n \"Version\": \"2012-10-17\",\n \"Id\": \"custom-policy-2016-12-07\",\n \"Statement\": [\n {\n \"Sid\": \"Enable IAM User Permissions\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"AWS\": \"arn:aws:iam::111122223333:root\"\n },\n \"Action\": \"kms:*\",\n \"Resource\": \"*\"\n },\n {\n \"Sid\": \"Allow access for Key Administrators\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"AWS\": [\n \"arn:aws:iam::111122223333:user/ExampleAdminUser\",\n \"arn:aws:iam::111122223333:role/ExampleAdminRole\"\n ]\n },\n \"Action\": [\n \"kms:Create*\",\n \"kms:Describe*\",\n \"kms:Enable*\",\n \"kms:List*\",\n \"kms:Put*\",\n \"kms:Update*\",\n \"kms:Revoke*\",\n \"kms:Disable*\",\n \"kms:Get*\",\n \"kms:Delete*\",\n \"kms:ScheduleKeyDeletion\",\n \"kms:CancelKeyDeletion\"\n ],\n \"Resource\": \"*\"\n },\n {\n \"Sid\": \"Allow use of the key\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"AWS\": \"arn:aws:iam::111122223333:role/ExamplePowerUserRole\"\n },\n \"Action\": [\n \"kms:Encrypt\",\n \"kms:Decrypt\",\n \"kms:ReEncrypt*\",\n \"kms:GenerateDataKey*\",\n \"kms:DescribeKey\"\n ],\n \"Resource\": \"*\"\n },\n {\n \"Sid\": \"Allow attachment of persistent resources\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"AWS\": \"arn:aws:iam::111122223333:role/ExamplePowerUserRole\"\n },\n \"Action\": [\n \"kms:CreateGrant\",\n \"kms:ListGrants\",\n \"kms:RevokeGrant\"\n ],\n \"Resource\": \"*\",\n \"Condition\": {\n \"Bool\": {\n \"kms:GrantIsForAWSResource\": \"true\"\n }\n }\n }\n ]\n}\n", - "PolicyName": "default" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key to attach the key policy to. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key.", - "Policy": "The key policy document.", - "PolicyName": "The name of the key policy." - } - }, - "description": "The following example attaches a key policy to the specified KMS key.", - "id": "to-attach-a-key-policy-to-a-cmk-1481147345018", - "title": "To attach a key policy to a KMS key" - } - ], - "ReEncrypt": [ - { - "input": { - "CiphertextBlob": "", - "DestinationKeyId": "0987dcba-09fe-87dc-65ba-ab0987654321" - }, - "output": { - "CiphertextBlob": "", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/0987dcba-09fe-87dc-65ba-ab0987654321", - "SourceKeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "CiphertextBlob": "The data to reencrypt.", - "DestinationKeyId": "The identifier of the KMS key to use to reencrypt the data. You can use any valid key identifier.", - "SourceKeyId": "The identifier of the KMS key to use to decrypt the data. You can use any valid key identifier." - }, - "output": { - "CiphertextBlob": "The reencrypted data.", - "KeyId": "The ARN of the KMS key that was used to reencrypt the data.", - "SourceKeyId": "The ARN of the KMS key that was originally used to encrypt the data." - } - }, - "description": "The following example reencrypts data with the specified KMS key.", - "id": "to-reencrypt-data-1481230358001", - "title": "To reencrypt data" - } - ], - "ReplicateKey": [ - { - "input": { - "KeyId": "arn:aws:kms:us-east-1:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "ReplicaRegion": "us-west-2" - }, - "output": { - "ReplicaKeyMetadata": { - "AWSAccountId": "111122223333", - "Arn": "arn:aws:kms:us-west-2:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "CreationDate": 1607472987.918, - "CustomerMasterKeySpec": "SYMMETRIC_DEFAULT", - "Description": "", - "Enabled": true, - "EncryptionAlgorithms": [ - "SYMMETRIC_DEFAULT" - ], - "KeyId": "mrk-1234abcd12ab34cd56ef1234567890ab", - "KeyManager": "CUSTOMER", - "KeyState": "Enabled", - "KeyUsage": "ENCRYPT_DECRYPT", - "MultiRegion": true, - "MultiRegionConfiguration": { - "MultiRegionKeyType": "REPLICA", - "PrimaryKey": { - "Arn": "arn:aws:kms:us-east-1:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "Region": "us-east-1" - }, - "ReplicaKeys": [ - { - "Arn": "arn:aws:kms:us-west-2:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab", - "Region": "us-west-2" - } - ] - }, - "Origin": "AWS_KMS" - }, - "ReplicaPolicy": "{\n \"Version\" : \"2012-10-17\",\n \"Id\" : \"key-default-1\",...}", - "ReplicaTags": [ - - ] - }, - "comments": { - "input": { - "KeyId": "The key ID or key ARN of the multi-Region primary key", - "ReplicaRegion": "The Region of the new replica." - }, - "output": { - "ReplicaKeyMetadata": "An object that displays detailed information about the replica key.", - "ReplicaPolicy": "The key policy of the replica key. If you don't specify a key policy, the replica key gets the default key policy for a KMS key.", - "ReplicaTags": "The tags on the replica key, if any." - } - }, - "description": "This example creates a multi-Region replica key in us-west-2 of a multi-Region primary key in us-east-1. ", - "id": "to-replicate-a-multi-region-key-in-a-different-aws-region-1628622402887", - "title": "To replicate a multi-Region key in a different AWS Region" - } - ], - "RetireGrant": [ - { - "input": { - "GrantId": "0c237476b39f8bc44e45212e08498fbe3151305030726c0590dd8d3e9f3d6a60", - "KeyId": "arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "GrantId": "The identifier of the grant to retire.", - "KeyId": "The Amazon Resource Name (ARN) of the KMS key associated with the grant." - } - }, - "description": "The following example retires a grant.", - "id": "to-retire-a-grant-1481327028297", - "title": "To retire a grant" - } - ], - "RevokeGrant": [ - { - "input": { - "GrantId": "0c237476b39f8bc44e45212e08498fbe3151305030726c0590dd8d3e9f3d6a60", - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "GrantId": "The identifier of the grant to revoke.", - "KeyId": "The identifier of the KMS key associated with the grant. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example revokes a grant.", - "id": "to-revoke-a-grant-1481329549302", - "title": "To revoke a grant" - } - ], - "ScheduleKeyDeletion": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "PendingWindowInDays": 7 - }, - "output": { - "DeletionDate": "2016-12-17T16:00:00-08:00", - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key to schedule for deletion. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key.", - "PendingWindowInDays": "The waiting period, specified in number of days. After the waiting period ends, KMS deletes the KMS key." - }, - "output": { - "DeletionDate": "The date and time after which KMS deletes the KMS key.", - "KeyId": "The ARN of the KMS key that is scheduled for deletion." - } - }, - "description": "The following example schedules the specified KMS key for deletion.", - "id": "to-schedule-a-cmk-for-deletion-1481331111094", - "title": "To schedule a KMS key for deletion" - } - ], - "Sign": [ - { - "input": { - "KeyId": "alias/ECC_signing_key", - "Message": "", - "MessageType": "RAW", - "SigningAlgorithm": "ECDSA_SHA_384" - }, - "output": { - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "Signature": "", - "SigningAlgorithm": "ECDSA_SHA_384" - }, - "comments": { - "input": { - "KeyId": "The asymmetric KMS key to be used to generate the digital signature. This example uses an alias of the KMS key.", - "Message": "Message to be signed. Use Base-64 for the CLI.", - "MessageType": "Indicates whether the message is RAW or a DIGEST.", - "SigningAlgorithm": "The requested signing algorithm. This must be an algorithm that the KMS key supports." - }, - "output": { - "KeyId": "The key ARN of the asymmetric KMS key that was used to sign the message.", - "Signature": "The digital signature of the message.", - "SigningAlgorithm": "The actual signing algorithm that was used to generate the signature." - } - }, - "description": "This operation uses the private key in an asymmetric elliptic curve (ECC) KMS key to generate a digital signature for a given message.", - "id": "to-digitally-sign-a-message-with-an-asymmetric-kms-key-1628631433832", - "title": "To digitally sign a message with an asymmetric KMS key." - } - ], - "TagResource": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "Tags": [ - { - "TagKey": "Purpose", - "TagValue": "Test" - } - ] - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key you are tagging. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key.", - "Tags": "A list of tags." - } - }, - "description": "The following example tags a KMS key.", - "id": "to-tag-a-cmk-1483997246518", - "title": "To tag a KMS key" - } - ], - "UntagResource": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "TagKeys": [ - "Purpose", - "CostCenter" - ] - }, - "comments": { - "input": { - "KeyId": "The identifier of the KMS key whose tags you are removing.", - "TagKeys": "A list of tag keys. Provide only the tag keys, not the tag values." - } - }, - "description": "The following example removes tags from a KMS key.", - "id": "to-remove-tags-from-a-cmk-1483997590962", - "title": "To remove tags from a KMS key" - } - ], - "UpdateAlias": [ - { - "input": { - "AliasName": "alias/ExampleAlias", - "TargetKeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "AliasName": "The alias to update.", - "TargetKeyId": "The identifier of the KMS key that the alias will refer to after this operation succeeds. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example updates the specified alias to refer to the specified KMS key.", - "id": "to-update-an-alias-1481572726920", - "title": "To update an alias" - } - ], - "UpdateCustomKeyStore": [ - { - "input": { - "CustomKeyStoreId": "cks-1234567890abcdef0", - "KeyStorePassword": "ExamplePassword" - }, - "output": { - }, - "comments": { - "input": { - "CustomKeyStoreId": "The ID of the custom key store that you are updating.", - "KeyStorePassword": "The password for the kmsuser crypto user in the CloudHSM cluster." - }, - "output": { - } - }, - "description": "This example tells KMS the password for the kmsuser crypto user in the AWS CloudHSM cluster that is associated with the AWS KMS custom key store. (It does not change the password in the CloudHSM cluster.) This operation does not return any data.", - "id": "to-edit-the-properties-of-a-custom-key-store-1628629851834", - "title": "To edit the password of a custom key store" - }, - { - "input": { - "CustomKeyStoreId": "cks-1234567890abcdef0", - "NewCustomKeyStoreName": "DevelopmentKeys" - }, - "output": { - }, - "comments": { - "input": { - "CustomKeyStoreId": "The ID of the custom key store that you are updating.", - "NewCustomKeyStoreName": "A new friendly name for the custom key store." - }, - "output": { - } - }, - "description": "This example changes the friendly name of the AWS KMS custom key store to the name that you specify. This operation does not return any data. To verify that the operation worked, use the DescribeCustomKeyStores operation.", - "id": "to-edit-the-friendly-name-of-a-custom-key-store-1630451340904", - "title": "To edit the friendly name of a custom key store" - }, - { - "input": { - "CloudHsmClusterId": "cluster-1a23b4cdefg", - "CustomKeyStoreId": "cks-1234567890abcdef0" - }, - "output": { - }, - "comments": { - "input": { - "CloudHsmClusterId": "The ID of the AWS CloudHSM cluster that you want to associate with the custom key store. This cluster must be related to the original CloudHSM cluster for this key store.", - "CustomKeyStoreId": "The ID of the custom key store that you are updating." - }, - "output": { - } - }, - "description": "This example changes the cluster that is associated with a custom key store to a related cluster, such as a different backup of the same cluster. This operation does not return any data. To verify that the operation worked, use the DescribeCustomKeyStores operation.", - "id": "to-associate-the-custom-key-store-with-a-different-but-related-aws-cloudhsm-cluster-1630451842438", - "title": "To associate the custom key store with a different, but related, AWS CloudHSM cluster." - } - ], - "UpdateKeyDescription": [ - { - "input": { - "Description": "Example description that indicates the intended use of this KMS key.", - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab" - }, - "comments": { - "input": { - "Description": "The updated description.", - "KeyId": "The identifier of the KMS key whose description you are updating. You can use the key ID or the Amazon Resource Name (ARN) of the KMS key." - } - }, - "description": "The following example updates the description of the specified KMS key.", - "id": "to-update-the-description-of-a-cmk-1481574808619", - "title": "To update the description of a KMS key" - } - ], - "Verify": [ - { - "input": { - "KeyId": "alias/ECC_signing_key", - "Message": "", - "MessageType": "RAW", - "Signature": "", - "SigningAlgorithm": "ECDSA_SHA_384" - }, - "output": { - "KeyId": "arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "SignatureValid": true, - "SigningAlgorithm": "ECDSA_SHA_384" - }, - "comments": { - "input": { - "KeyId": "The asymmetric KMS key to be used to verify the digital signature. This example uses an alias to identify the KMS key.", - "Message": "The message that was signed.", - "MessageType": "Indicates whether the message is RAW or a DIGEST.", - "Signature": "The signature to be verified.", - "SigningAlgorithm": "The signing algorithm to be used to verify the signature." - }, - "output": { - "KeyId": "The key ARN of the asymmetric KMS key that was used to verify the digital signature.", - "SignatureValid": "A value of 'true' Indicates that the signature was verified. If verification fails, the call to Verify fails.", - "SigningAlgorithm": "The signing algorithm that was used to verify the signature." - } - }, - "description": "This operation uses the public key in an elliptic curve (ECC) asymmetric key to verify a digital signature within AWS KMS. ", - "id": "to-use-an-asymmetric-kms-key-to-verify-a-digital-signature-1628633365663", - "title": "To use an asymmetric KMS key to verify a digital signature" - } - ], - "VerifyMac": [ - { - "input": { - "KeyId": "1234abcd-12ab-34cd-56ef-1234567890ab", - "Mac": "", - "MacAlgorithm": "HMAC_SHA_384", - "Message": "Hello World" - }, - "output": { - "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "MacAlgorithm": "HMAC_SHA_384", - "MacValid": true - }, - "comments": { - "input": { - "KeyId": "The HMAC KMS key input to the HMAC algorithm.", - "Mac": "The HMAC to be verified.", - "MacAlgorithm": "The HMAC algorithm requested for the operation.", - "Message": "The message input to the HMAC algorithm." - }, - "output": { - "KeyId": "The key ARN of the HMAC key used in the operation.", - "MacAlgorithm": "The HMAC algorithm used in the operation.", - "MacValid": "A value of 'true' indicates that verification succeeded. If verification fails, the call to VerifyMac fails." - } - }, - "description": "This example verifies an HMAC for a particular message, HMAC KMS keys, and MAC algorithm. A value of 'true' in the MacValid value in the response indicates that the HMAC is valid.", - "id": "to-verify-an-hmac-1631570863401", - "title": "To verify an HMAC" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/kms/2014-11-01/paginators-1.json deleted file mode 100644 index 0c48130..0000000 --- a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/paginators-1.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "pagination": { - "ListAliases": { - "limit_key": "Limit", - "input_token": "Marker", - "output_token": "NextMarker", - "more_results": "Truncated", - "result_key": "Aliases" - }, - "ListGrants": { - "limit_key": "Limit", - "input_token": "Marker", - "output_token": "NextMarker", - "more_results": "Truncated", - "result_key": "Grants" - }, - "ListKeyPolicies": { - "limit_key": "Limit", - "input_token": "Marker", - "output_token": "NextMarker", - "more_results": "Truncated", - "result_key": "PolicyNames" - }, - "ListKeys": { - "limit_key": "Limit", - "input_token": "Marker", - "output_token": "NextMarker", - "more_results": "Truncated", - "result_key": "Keys" - }, - "DescribeCustomKeyStores": { - "input_token": "Marker", - "limit_key": "Limit", - "more_results": "Truncated", - "output_token": "NextMarker", - "result_key": "CustomKeyStores" - }, - "ListResourceTags": { - "input_token": "Marker", - "limit_key": "Limit", - "more_results": "Truncated", - "output_token": "NextMarker", - "result_key": "Tags" - }, - "ListRetirableGrants": { - "input_token": "Marker", - "limit_key": "Limit", - "more_results": "Truncated", - "output_token": "NextMarker", - "result_key": "Grants" - }, - "ListKeyRotations": { - "input_token": "Marker", - "limit_key": "Limit", - "more_results": "Truncated", - "output_token": "NextMarker", - "result_key": "Rotations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/kms/2014-11-01/service-2.json.gz deleted file mode 100644 index cb60336..0000000 Binary files a/venv/Lib/site-packages/botocore/data/kms/2014-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1237b3a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/examples-1.json b/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/paginators-1.json deleted file mode 100644 index 5970057..0000000 --- a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "GetWorkUnits": { - "input_token": "NextToken", - "limit_key": "PageSize", - "output_token": "NextToken", - "result_key": "WorkUnitRanges" - }, - "ListDataCellsFilter": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DataCellsFilters" - }, - "ListLFTags": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LFTags" - }, - "SearchDatabasesByLFTags": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DatabaseList" - }, - "SearchTablesByLFTags": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TableList" - }, - "ListLFTagExpressions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LFTagExpressions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/paginators-1.sdk-extras.json deleted file mode 100644 index aea980d..0000000 --- a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetWorkUnits": { - "non_aggregate_keys": [ - "QueryId" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/service-2.json.gz deleted file mode 100644 index e23411b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lakeformation/2017-03-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lambda/2014-11-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lambda/2014-11-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index f62b16b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lambda/2014-11-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lambda/2014-11-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lambda/2014-11-11/service-2.json.gz deleted file mode 100644 index 251a1a6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lambda/2014-11-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index c1fc88d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/examples-1.json b/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/examples-1.json deleted file mode 100644 index c33c1bb..0000000 --- a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/examples-1.json +++ /dev/null @@ -1,1513 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddLayerVersionPermission": [ - { - "input": { - "Action": "lambda:GetLayerVersion", - "LayerName": "my-layer", - "Principal": "223456789012", - "StatementId": "xaccount", - "VersionNumber": 1 - }, - "output": { - "RevisionId": "35d87451-f796-4a3f-a618-95a3671b0a0c", - "Statement": "{\"Sid\":\"xaccount\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::223456789012:root\"},\"Action\":\"lambda:GetLayerVersion\",\"Resource\":\"arn:aws:lambda:us-east-2:123456789012:layer:my-layer:1\"}" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example grants permission for the account 223456789012 to use version 1 of a layer named my-layer.", - "id": "to-add-permissions-to-a-layer-version-1586479797163", - "title": "To add permissions to a layer version" - } - ], - "AddPermission": [ - { - "input": { - "Action": "lambda:InvokeFunction", - "FunctionName": "my-function", - "Principal": "s3.amazonaws.com", - "SourceAccount": "123456789012", - "SourceArn": "arn:aws:s3:::my-bucket-1xpuxmplzrlbh/*", - "StatementId": "s3" - }, - "output": { - "Statement": "{\"Sid\":\"s3\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"s3.amazonaws.com\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-2:123456789012:function:my-function\",\"Condition\":{\"StringEquals\":{\"AWS:SourceAccount\":\"123456789012\"},\"ArnLike\":{\"AWS:SourceArn\":\"arn:aws:s3:::my-bucket-1xpuxmplzrlbh\"}}}" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds permission for Amazon S3 to invoke a Lambda function named my-function for notifications from a bucket named my-bucket-1xpuxmplzrlbh in account 123456789012.", - "id": "add-permission-1474651469455", - "title": "To grant Amazon S3 permission to invoke a function" - }, - { - "input": { - "Action": "lambda:InvokeFunction", - "FunctionName": "my-function", - "Principal": "223456789012", - "StatementId": "xaccount" - }, - "output": { - "Statement": "{\"Sid\":\"xaccount\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::223456789012:root\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-2:123456789012:function:my-function\"}" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds permission for account 223456789012 invoke a Lambda function named my-function.", - "id": "add-permission-1474651469456", - "title": "To grant another account permission to invoke a function" - } - ], - "CreateAlias": [ - { - "input": { - "Description": "alias for live version of function", - "FunctionName": "my-function", - "FunctionVersion": "1", - "Name": "LIVE" - }, - "output": { - "AliasArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:LIVE", - "Description": "alias for live version of function", - "FunctionVersion": "1", - "Name": "LIVE", - "RevisionId": "873282ed-xmpl-4dc8-a069-d0c647e470c6" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an alias named LIVE that points to version 1 of the my-function Lambda function.", - "id": "to-create-an-alias-for-a-lambda-function-1586480324259", - "title": "To create an alias for a Lambda function" - } - ], - "CreateEventSourceMapping": [ - { - "input": { - "BatchSize": 5, - "EventSourceArn": "arn:aws:sqs:us-west-2:123456789012:my-queue", - "FunctionName": "my-function" - }, - "output": { - "BatchSize": 5, - "EventSourceArn": "arn:aws:sqs:us-west-2:123456789012:my-queue", - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "LastModified": 1569284520.333, - "State": "Creating", - "StateTransitionReason": "USER_INITIATED", - "UUID": "a1b2c3d4-5678-90ab-cdef-11111EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a mapping between an SQS queue and the my-function Lambda function.", - "id": "to-create-a-mapping-between-an-event-source-and-an-aws-lambda-function-1586480555467", - "title": "To create a mapping between an event source and an AWS Lambda function" - } - ], - "CreateFunction": [ - { - "input": { - "Code": { - "S3Bucket": "my-bucket-1xpuxmplzrlbh", - "S3Key": "function.zip" - }, - "Description": "Process image objects from Amazon S3.", - "Environment": { - "Variables": { - "BUCKET": "my-bucket-1xpuxmplzrlbh", - "PREFIX": "inbound" - } - }, - "FunctionName": "my-function", - "Handler": "index.handler", - "KMSKeyArn": "arn:aws:kms:us-west-2:123456789012:key/b0844d6c-xmpl-4463-97a4-d49f50839966", - "MemorySize": 256, - "Publish": true, - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "Tags": { - "DEPARTMENT": "Assets" - }, - "Timeout": 15, - "TracingConfig": { - "Mode": "Active" - } - }, - "output": { - "CodeSha256": "YFgDgEKG3ugvF1+pX64gV6tu9qNuIYNUdgJm8nCxsm4=", - "CodeSize": 5797206, - "Description": "Process image objects from Amazon S3.", - "Environment": { - "Variables": { - "BUCKET": "my-bucket-1xpuxmplzrlbh", - "PREFIX": "inbound" - } - }, - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "KMSKeyArn": "arn:aws:kms:us-west-2:123456789012:key/b0844d6c-xmpl-4463-97a4-d49f50839966", - "LastModified": "2020-04-10T19:06:32.563+0000", - "LastUpdateStatus": "Successful", - "MemorySize": 256, - "RevisionId": "b75dcd81-xmpl-48a8-a75a-93ba8b5b9727", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "State": "Active", - "Timeout": 15, - "TracingConfig": { - "Mode": "Active" - }, - "Version": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a function with a deployment package in Amazon S3 and enables X-Ray tracing and environment variable encryption.", - "id": "to-create-a-function-1586492061186", - "title": "To create a function" - } - ], - "DeleteAlias": [ - { - "input": { - "FunctionName": "my-function", - "Name": "BLUE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an alias named BLUE from a function named my-function", - "id": "to-delete-a-lambda-function-alias-1481660370804", - "title": "To delete a Lambda function alias" - } - ], - "DeleteEventSourceMapping": [ - { - "input": { - "UUID": "14e0db71-xmpl-4eb5-b481-8945cf9d10c2" - }, - "output": { - "BatchSize": 5, - "EventSourceArn": "arn:aws:sqs:us-west-2:123456789012:my-queue", - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function", - "LastModified": "2016-11-21T19:49:20.006+0000", - "State": "Enabled", - "StateTransitionReason": "USER_INITIATED", - "UUID": "14e0db71-xmpl-4eb5-b481-8945cf9d10c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an event source mapping. To get a mapping's UUID, use ListEventSourceMappings.", - "id": "to-delete-a-lambda-function-event-source-mapping-1481658973862", - "title": "To delete a Lambda function event source mapping" - } - ], - "DeleteFunction": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes version 1 of a Lambda function named my-function.", - "id": "to-delete-a-lambda-function-1481648553696", - "title": "To delete a version of a Lambda function" - } - ], - "DeleteFunctionConcurrency": [ - { - "input": { - "FunctionName": "my-function" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes the reserved concurrent execution limit from a function named my-function.", - "id": "to-remove-the-reserved-concurrent-execution-limit-from-a-function-1586480714680", - "title": "To remove the reserved concurrent execution limit from a function" - } - ], - "DeleteFunctionEventInvokeConfig": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "GREEN" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes the asynchronous invocation configuration for the GREEN alias of a function named my-function.", - "id": "to-delete-an-asynchronous-invocation-configuration-1586481102187", - "title": "To delete an asynchronous invocation configuration" - } - ], - "DeleteLayerVersion": [ - { - "input": { - "LayerName": "my-layer", - "VersionNumber": 2 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes version 2 of a layer named my-layer.", - "id": "to-delete-a-version-of-a-lambda-layer-1586481157547", - "title": "To delete a version of a Lambda layer" - } - ], - "DeleteProvisionedConcurrencyConfig": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "GREEN" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes the provisioned concurrency configuration for the GREEN alias of a function named my-function.", - "id": "to-delete-a-provisioned-concurrency-configuration-1586481032551", - "title": "To delete a provisioned concurrency configuration" - } - ], - "GetAccountSettings": [ - { - "input": { - }, - "output": { - "AccountLimit": { - "CodeSizeUnzipped": 262144000, - "CodeSizeZipped": 52428800, - "ConcurrentExecutions": 1000, - "TotalCodeSize": 80530636800, - "UnreservedConcurrentExecutions": 1000 - }, - "AccountUsage": { - "FunctionCount": 4, - "TotalCodeSize": 9426 - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation takes no parameters and returns details about storage and concurrency quotas in the current Region.", - "id": "to-get-account-settings-1481657495274", - "title": "To get account settings" - } - ], - "GetAlias": [ - { - "input": { - "FunctionName": "my-function", - "Name": "BLUE" - }, - "output": { - "AliasArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function:BLUE", - "Description": "Production environment BLUE.", - "FunctionVersion": "3", - "Name": "BLUE", - "RevisionId": "594f41fb-xmpl-4c20-95c7-6ca5f2a92c93" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns details about an alias named BLUE for a function named my-function", - "id": "to-retrieve-a-lambda-function-alias-1481648742254", - "title": "To get a Lambda function alias" - } - ], - "GetEventSourceMapping": [ - { - "input": { - "UUID": "14e0db71-xmpl-4eb5-b481-8945cf9d10c2" - }, - "output": { - "BatchSize": 500, - "BisectBatchOnFunctionError": false, - "DestinationConfig": { - }, - "EventSourceArn": "arn:aws:sqs:us-east-2:123456789012:mySQSqueue", - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:myFunction", - "LastModified": "2016-11-21T19:49:20.006+0000", - "LastProcessingResult": "No records processed", - "MaximumRecordAgeInSeconds": 604800, - "MaximumRetryAttempts": 10000, - "State": "Creating", - "StateTransitionReason": "User action", - "UUID": "14e0db71-xmpl-4eb5-b481-8945cf9d10c2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns details about an event source mapping. To get a mapping's UUID, use ListEventSourceMappings.", - "id": "to-get-a-lambda-functions-event-source-mapping-1481661622799", - "title": "To get a Lambda function's event source mapping" - } - ], - "GetFunction": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "1" - }, - "output": { - "Code": { - "Location": "https://awslambda-us-west-2-tasks.s3.us-west-2.amazonaws.com/snapshots/123456789012/my-function-e7d9d1ed-xmpl-4f79-904a-4b87f2681f30?versionId=sH3TQwBOaUy...", - "RepositoryType": "S3" - }, - "Configuration": { - "CodeSha256": "YFgDgEKG3ugvF1+pX64gV6tu9qNuIYNUdgJm8nCxsm4=", - "CodeSize": 5797206, - "Description": "Process image objects from Amazon S3.", - "Environment": { - "Variables": { - "BUCKET": "my-bucket-1xpuxmplzrlbh", - "PREFIX": "inbound" - } - }, - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "KMSKeyArn": "arn:aws:kms:us-west-2:123456789012:key/b0844d6c-xmpl-4463-97a4-d49f50839966", - "LastModified": "2020-04-10T19:06:32.563+0000", - "LastUpdateStatus": "Successful", - "MemorySize": 256, - "RevisionId": "b75dcd81-xmpl-48a8-a75a-93ba8b5b9727", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "State": "Active", - "Timeout": 15, - "TracingConfig": { - "Mode": "Active" - }, - "Version": "$LATEST" - }, - "Tags": { - "DEPARTMENT": "Assets" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns code and configuration details for version 1 of a function named my-function.", - "id": "to-get-a-lambda-function-1481661622799", - "title": "To get a Lambda function" - } - ], - "GetFunctionConcurrency": [ - { - "input": { - "FunctionName": "my-function" - }, - "output": { - "ReservedConcurrentExecutions": 250 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the reserved concurrency setting for a function named my-function.", - "id": "to-get-the-reserved-concurrency-setting-for-a-function-1586481279992", - "title": "To get the reserved concurrency setting for a function" - } - ], - "GetFunctionConfiguration": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "1" - }, - "output": { - "CodeSha256": "YFgDgEKG3ugvF1+pX64gV6tu9qNuIYNUdgJm8nCxsm4=", - "CodeSize": 5797206, - "Description": "Process image objects from Amazon S3.", - "Environment": { - "Variables": { - "BUCKET": "my-bucket-1xpuxmplzrlbh", - "PREFIX": "inbound" - } - }, - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "KMSKeyArn": "arn:aws:kms:us-west-2:123456789012:key/b0844d6c-xmpl-4463-97a4-d49f50839966", - "LastModified": "2020-04-10T19:06:32.563+0000", - "LastUpdateStatus": "Successful", - "MemorySize": 256, - "RevisionId": "b75dcd81-xmpl-48a8-a75a-93ba8b5b9727", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "State": "Active", - "Timeout": 15, - "TracingConfig": { - "Mode": "Active" - }, - "Version": "$LATEST" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns and configuration details for version 1 of a function named my-function.", - "id": "to-get-a-lambda-functions-event-source-mapping-1481661622799", - "title": "To get a Lambda function's event source mapping" - } - ], - "GetFunctionEventInvokeConfig": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "BLUE" - }, - "output": { - "DestinationConfig": { - "OnFailure": { - "Destination": "arn:aws:sqs:us-east-2:123456789012:failed-invocations" - }, - "OnSuccess": { - } - }, - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:BLUE", - "LastModified": "2016-11-21T19:49:20.006+0000", - "MaximumEventAgeInSeconds": 3600, - "MaximumRetryAttempts": 0 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the asynchronous invocation configuration for the BLUE alias of a function named my-function.", - "id": "to-get-an-asynchronous-invocation-configuration-1586481338463", - "title": "To get an asynchronous invocation configuration" - } - ], - "GetLayerVersion": [ - { - "input": { - "LayerName": "my-layer", - "VersionNumber": 1 - }, - "output": { - "CompatibleRuntimes": [ - "python3.6", - "python3.7" - ], - "Content": { - "CodeSha256": "tv9jJO+rPbXUUXuRKi7CwHzKtLDkDRJLB3cC3Z/ouXo=", - "CodeSize": 169, - "Location": "https://awslambda-us-east-2-layers.s3.us-east-2.amazonaws.com/snapshots/123456789012/my-layer-4aaa2fbb-ff77-4b0a-ad92-5b78a716a96a?versionId=27iWyA73cCAYqyH..." - }, - "CreatedDate": "2018-11-14T23:03:52.894+0000", - "Description": "My Python layer", - "LayerArn": "arn:aws:lambda:us-east-2:123456789012:layer:my-layer", - "LayerVersionArn": "arn:aws:lambda:us-east-2:123456789012:layer:my-layer:1", - "LicenseInfo": "MIT", - "Version": 1 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns information for version 1 of a layer named my-layer.", - "id": "to-get-information-about-a-lambda-layer-version-1586481457839", - "title": "To get information about a Lambda layer version" - } - ], - "GetLayerVersionByArn": [ - { - "input": { - "Arn": "arn:aws:lambda:ca-central-1:123456789012:layer:blank-python-lib:3" - }, - "output": { - "CompatibleRuntimes": [ - "python3.8" - ], - "Content": { - "CodeSha256": "6x+xmpl/M3BnQUk7gS9sGmfeFsR/npojXoA3fZUv4eU=", - "CodeSize": 9529009, - "Location": "https://awslambda-us-east-2-layers.s3.us-east-2.amazonaws.com/snapshots/123456789012/blank-python-lib-e5212378-xmpl-44ee-8398-9d8ec5113949?versionId=WbZnvf..." - }, - "CreatedDate": "2020-03-31T00:35:18.949+0000", - "Description": "Dependencies for the blank-python sample app.", - "LayerArn": "arn:aws:lambda:us-east-2:123456789012:layer:blank-python-lib", - "LayerVersionArn": "arn:aws:lambda:us-east-2:123456789012:layer:blank-python-lib:3", - "Version": 3 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns information about the layer version with the specified Amazon Resource Name (ARN).", - "id": "to-get-information-about-a-lambda-layer-version-1586481457839", - "title": "To get information about a Lambda layer version" - } - ], - "GetPolicy": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "1" - }, - "output": { - "Policy": "{\"Version\":\"2012-10-17\",\"Id\":\"default\",\"Statement\":[{\"Sid\":\"xaccount\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::123456789012:root\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-2:123456789012:function:my-function:1\"}]}", - "RevisionId": "4843f2f6-7c59-4fda-b484-afd0bc0e22b8" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the resource-based policy for version 1 of a Lambda function named my-function.", - "id": "to-retrieve-a-lambda-function-policy-1481649319053", - "title": "To retrieve a Lambda function policy" - } - ], - "GetProvisionedConcurrencyConfig": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "BLUE" - }, - "output": { - "AllocatedProvisionedConcurrentExecutions": 100, - "AvailableProvisionedConcurrentExecutions": 100, - "LastModified": "2019-12-31T20:28:49+0000", - "RequestedProvisionedConcurrentExecutions": 100, - "Status": "READY" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns details for the provisioned concurrency configuration for the BLUE alias of the specified function.", - "id": "to-get-a-provisioned-concurrency-configuration-1586490192690", - "title": "To get a provisioned concurrency configuration" - }, - { - "input": { - "FunctionName": "my-function", - "Qualifier": "BLUE" - }, - "output": { - "AllocatedProvisionedConcurrentExecutions": 100, - "AvailableProvisionedConcurrentExecutions": 100, - "LastModified": "2019-12-31T20:28:49+0000", - "RequestedProvisionedConcurrentExecutions": 100, - "Status": "READY" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example displays details for the provisioned concurrency configuration for the BLUE alias of the specified function.", - "id": "to-view-a-provisioned-concurrency-configuration-1586490192690", - "title": "To view a provisioned concurrency configuration" - } - ], - "Invoke": [ - { - "input": { - "FunctionName": "my-function", - "Payload": "{}", - "Qualifier": "1" - }, - "output": { - "Payload": "200 SUCCESS", - "StatusCode": 200 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example invokes version 1 of a function named my-function with an empty event payload.", - "id": "to-invoke-a-lambda-function-1481659683915", - "title": "To invoke a Lambda function" - }, - { - "input": { - "FunctionName": "my-function", - "InvocationType": "Event", - "Payload": "{}", - "Qualifier": "1" - }, - "output": { - "Payload": "", - "StatusCode": 202 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example invokes version 1 of a function named my-function asynchronously.", - "id": "to-invoke-a-lambda-function-async-1481659683915", - "title": "To invoke a Lambda function asynchronously" - } - ], - "InvokeAsync": [ - { - "input": { - "FunctionName": "my-function", - "InvokeArgs": "{}" - }, - "output": { - "Status": 202 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example invokes a Lambda function asynchronously", - "id": "to-invoke-a-lambda-function-asynchronously-1481649694923", - "title": "To invoke a Lambda function asynchronously" - } - ], - "ListAliases": [ - { - "input": { - "FunctionName": "my-function" - }, - "output": { - "Aliases": [ - { - "AliasArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function:BETA", - "Description": "Production environment BLUE.", - "FunctionVersion": "2", - "Name": "BLUE", - "RevisionId": "a410117f-xmpl-494e-8035-7e204bb7933b", - "RoutingConfig": { - "AdditionalVersionWeights": { - "1": 0.7 - } - } - }, - { - "AliasArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function:LIVE", - "Description": "Production environment GREEN.", - "FunctionVersion": "1", - "Name": "GREEN", - "RevisionId": "21d40116-xmpl-40ba-9360-3ea284da1bb5" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a list of aliases for a function named my-function.", - "id": "to-list-a-functions-aliases-1481650199732", - "title": "To list a function's aliases" - } - ], - "ListEventSourceMappings": [ - { - "input": { - "FunctionName": "my-function" - }, - "output": { - "EventSourceMappings": [ - { - "BatchSize": 5, - "EventSourceArn": "arn:aws:sqs:us-west-2:123456789012:mySQSqueue", - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "LastModified": 1569284520.333, - "State": "Enabled", - "StateTransitionReason": "USER_INITIATED", - "UUID": "a1b2c3d4-5678-90ab-cdef-11111EXAMPLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a list of the event source mappings for a function named my-function.", - "id": "to-list-the-event-source-mappings-for-a-function-1586490285906", - "title": "To list the event source mappings for a function" - } - ], - "ListFunctionEventInvokeConfigs": [ - { - "input": { - "FunctionName": "my-function" - }, - "output": { - "FunctionEventInvokeConfigs": [ - { - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:GREEN", - "LastModified": 1577824406.719, - "MaximumEventAgeInSeconds": 1800, - "MaximumRetryAttempts": 2 - }, - { - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:BLUE", - "LastModified": 1577824396.653, - "MaximumEventAgeInSeconds": 3600, - "MaximumRetryAttempts": 0 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a list of asynchronous invocation configurations for a function named my-function.", - "id": "to-view-a-list-of-asynchronous-invocation-configurations-1586490355611", - "title": "To view a list of asynchronous invocation configurations" - } - ], - "ListFunctions": [ - { - "input": { - }, - "output": { - "Functions": [ - { - "CodeSha256": "dBG9m8SGdmlEjw/JYXlhhvCrAv5TxvXsbL/RMr0fT/I=", - "CodeSize": 294, - "Description": "", - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:helloworld", - "FunctionName": "helloworld", - "Handler": "helloworld.handler", - "LastModified": "2019-09-23T18:32:33.857+0000", - "MemorySize": 128, - "RevisionId": "1718e831-badf-4253-9518-d0644210af7b", - "Role": "arn:aws:iam::123456789012:role/service-role/MyTestFunction-role-zgur6bf4", - "Runtime": "nodejs10.x", - "Timeout": 3, - "TracingConfig": { - "Mode": "PassThrough" - }, - "Version": "$LATEST" - }, - { - "CodeSha256": "sU0cJ2/hOZevwV/lTxCuQqK3gDZP3i8gUoqUUVRmY6E=", - "CodeSize": 266, - "Description": "", - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "LastModified": "2019-10-01T16:47:28.490+0000", - "MemorySize": 256, - "RevisionId": "93017fc9-59cb-41dc-901b-4845ce4bf668", - "Role": "arn:aws:iam::123456789012:role/service-role/helloWorldPython-role-uy3l9qyq", - "Runtime": "nodejs10.x", - "Timeout": 3, - "TracingConfig": { - "Mode": "PassThrough" - }, - "Version": "$LATEST", - "VpcConfig": { - "SecurityGroupIds": [ - - ], - "SubnetIds": [ - - ], - "VpcId": "" - } - } - ], - "NextMarker": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation returns a list of Lambda functions.", - "id": "to-get-a-list-of-lambda-functions-1481650507425", - "title": "To get a list of Lambda functions" - } - ], - "ListLayerVersions": [ - { - "input": { - "LayerName": "blank-java-lib" - }, - "output": { - "LayerVersions": [ - { - "CompatibleRuntimes": [ - "java8" - ], - "CreatedDate": "2020-03-18T23:38:42.284+0000", - "Description": "Dependencies for the blank-java sample app.", - "LayerVersionArn": "arn:aws:lambda:us-east-2:123456789012:layer:blank-java-lib:7", - "Version": 7 - }, - { - "CompatibleRuntimes": [ - "java8" - ], - "CreatedDate": "2020-03-17T07:24:21.960+0000", - "Description": "Dependencies for the blank-java sample app.", - "LayerVersionArn": "arn:aws:lambda:us-east-2:123456789012:layer:blank-java-lib:6", - "Version": 6 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example displays information about the versions for the layer named blank-java-lib", - "id": "to-list-versions-of-a-layer-1586490857297", - "title": "To list versions of a layer" - } - ], - "ListLayers": [ - { - "input": { - "CompatibleRuntime": "python3.7" - }, - "output": { - "Layers": [ - { - "LatestMatchingVersion": { - "CompatibleRuntimes": [ - "python3.6", - "python3.7" - ], - "CreatedDate": "2018-11-15T00:37:46.592+0000", - "Description": "My layer", - "LayerVersionArn": "arn:aws:lambda:us-east-2:123456789012:layer:my-layer:2", - "Version": 2 - }, - "LayerArn": "arn:aws:lambda:us-east-2:123456789012:layer:my-layer", - "LayerName": "my-layer" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns information about layers that are compatible with the Python 3.7 runtime.", - "id": "to-list-the-layers-that-are-compatible-with-your-functions-runtime-1586490857297", - "title": "To list the layers that are compatible with your function's runtime" - } - ], - "ListProvisionedConcurrencyConfigs": [ - { - "input": { - "FunctionName": "my-function" - }, - "output": { - "ProvisionedConcurrencyConfigs": [ - { - "AllocatedProvisionedConcurrentExecutions": 100, - "AvailableProvisionedConcurrentExecutions": 100, - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:GREEN", - "LastModified": "2019-12-31T20:29:00+0000", - "RequestedProvisionedConcurrentExecutions": 100, - "Status": "READY" - }, - { - "AllocatedProvisionedConcurrentExecutions": 100, - "AvailableProvisionedConcurrentExecutions": 100, - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:BLUE", - "LastModified": "2019-12-31T20:28:49+0000", - "RequestedProvisionedConcurrentExecutions": 100, - "Status": "READY" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a list of provisioned concurrency configurations for a function named my-function.", - "id": "to-get-a-list-of-provisioned-concurrency-configurations-1586491032592", - "title": "To get a list of provisioned concurrency configurations" - } - ], - "ListTags": [ - { - "input": { - "Resource": "arn:aws:lambda:us-west-2:123456789012:function:my-function" - }, - "output": { - "Tags": { - "Category": "Web Tools", - "Department": "Sales" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example displays the tags attached to the my-function Lambda function.", - "id": "to-retrieve-the-list-of-tags-for-a-lambda-function-1586491111498", - "title": "To retrieve the list of tags for a Lambda function" - } - ], - "ListVersionsByFunction": [ - { - "input": { - "FunctionName": "my-function" - }, - "output": { - "Versions": [ - { - "CodeSha256": "YFgDgEKG3ugvF1+pX64gV6tu9qNuIYNUdgJm8nCxsm4=", - "CodeSize": 5797206, - "Description": "Process image objects from Amazon S3.", - "Environment": { - "Variables": { - "BUCKET": "my-bucket-1xpuxmplzrlbh", - "PREFIX": "inbound" - } - }, - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "KMSKeyArn": "arn:aws:kms:us-west-2:123456789012:key/b0844d6c-xmpl-4463-97a4-d49f50839966", - "LastModified": "2020-04-10T19:06:32.563+0000", - "MemorySize": 256, - "RevisionId": "850ca006-2d98-4ff4-86db-8766e9d32fe9", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "Timeout": 15, - "TracingConfig": { - "Mode": "Active" - }, - "Version": "$LATEST" - }, - { - "CodeSha256": "YFgDgEKG3ugvF1+pX64gV6tu9qNuIYNUdgJm8nCxsm4=", - "CodeSize": 5797206, - "Description": "Process image objects from Amazon S3.", - "Environment": { - "Variables": { - "BUCKET": "my-bucket-1xpuxmplzrlbh", - "PREFIX": "inbound" - } - }, - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "KMSKeyArn": "arn:aws:kms:us-west-2:123456789012:key/b0844d6c-xmpl-4463-97a4-d49f50839966", - "LastModified": "2020-04-10T19:06:32.563+0000", - "MemorySize": 256, - "RevisionId": "b75dcd81-xmpl-48a8-a75a-93ba8b5b9727", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "Timeout": 5, - "TracingConfig": { - "Mode": "Active" - }, - "Version": "1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a list of versions of a function named my-function", - "id": "to-list-versions-1481650603750", - "title": "To list versions of a function" - } - ], - "PublishLayerVersion": [ - { - "input": { - "CompatibleRuntimes": [ - "python3.6", - "python3.7" - ], - "Content": { - "S3Bucket": "lambda-layers-us-west-2-123456789012", - "S3Key": "layer.zip" - }, - "Description": "My Python layer", - "LayerName": "my-layer", - "LicenseInfo": "MIT" - }, - "output": { - "CompatibleRuntimes": [ - "python3.6", - "python3.7" - ], - "Content": { - "CodeSha256": "tv9jJO+rPbXUUXuRKi7CwHzKtLDkDRJLB3cC3Z/ouXo=", - "CodeSize": 169, - "Location": "https://awslambda-us-west-2-layers.s3.us-west-2.amazonaws.com/snapshots/123456789012/my-layer-4aaa2fbb-ff77-4b0a-ad92-5b78a716a96a?versionId=27iWyA73cCAYqyH..." - }, - "CreatedDate": "2018-11-14T23:03:52.894+0000", - "Description": "My Python layer", - "LayerArn": "arn:aws:lambda:us-west-2:123456789012:layer:my-layer", - "LayerVersionArn": "arn:aws:lambda:us-west-2:123456789012:layer:my-layer:1", - "LicenseInfo": "MIT", - "Version": 1 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a new Python library layer version. The command retrieves the layer content a file named layer.zip in the specified S3 bucket.", - "id": "to-create-a-lambda-layer-version-1586491213595", - "title": "To create a Lambda layer version" - } - ], - "PublishVersion": [ - { - "input": { - "CodeSha256": "", - "Description": "", - "FunctionName": "myFunction" - }, - "output": { - "CodeSha256": "YFgDgEKG3ugvF1+pX64gV6tu9qNuIYNUdgJm8nCxsm4=", - "CodeSize": 5797206, - "Description": "Process image objects from Amazon S3.", - "Environment": { - "Variables": { - "BUCKET": "my-bucket-1xpuxmplzrlbh", - "PREFIX": "inbound" - } - }, - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "KMSKeyArn": "arn:aws:kms:us-west-2:123456789012:key/b0844d6c-xmpl-4463-97a4-d49f50839966", - "LastModified": "2020-04-10T19:06:32.563+0000", - "LastUpdateStatus": "Successful", - "MemorySize": 256, - "RevisionId": "b75dcd81-xmpl-48a8-a75a-93ba8b5b9727", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "State": "Active", - "Timeout": 5, - "TracingConfig": { - "Mode": "Active" - }, - "Version": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation publishes a version of a Lambda function", - "id": "to-publish-a-version-of-a-lambda-function-1481650704986", - "title": "To publish a version of a Lambda function" - } - ], - "PutFunctionConcurrency": [ - { - "input": { - "FunctionName": "my-function", - "ReservedConcurrentExecutions": 100 - }, - "output": { - "ReservedConcurrentExecutions": 100 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example configures 100 reserved concurrent executions for the my-function function.", - "id": "to-configure-a-reserved-concurrency-limit-for-a-function-1586491405956", - "title": "To configure a reserved concurrency limit for a function" - } - ], - "PutFunctionEventInvokeConfig": [ - { - "input": { - "FunctionName": "my-function", - "MaximumEventAgeInSeconds": 3600, - "MaximumRetryAttempts": 0 - }, - "output": { - "DestinationConfig": { - "OnFailure": { - }, - "OnSuccess": { - } - }, - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:$LATEST", - "LastModified": "2016-11-21T19:49:20.006+0000", - "MaximumEventAgeInSeconds": 3600, - "MaximumRetryAttempts": 0 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets a maximum event age of one hour and disables retries for the specified function.", - "id": "to-configure-error-handling-for-asynchronous-invocation-1586491524021", - "title": "To configure error handling for asynchronous invocation" - } - ], - "PutProvisionedConcurrencyConfig": [ - { - "input": { - "FunctionName": "my-function", - "ProvisionedConcurrentExecutions": 100, - "Qualifier": "BLUE" - }, - "output": { - "AllocatedProvisionedConcurrentExecutions": 0, - "LastModified": "2019-11-21T19:32:12+0000", - "RequestedProvisionedConcurrentExecutions": 100, - "Status": "IN_PROGRESS" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example allocates 100 provisioned concurrency for the BLUE alias of the specified function.", - "id": "to-allocate-provisioned-concurrency-1586491651377", - "title": "To allocate provisioned concurrency" - } - ], - "RemoveLayerVersionPermission": [ - { - "input": { - "LayerName": "my-layer", - "StatementId": "xaccount", - "VersionNumber": 1 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes permission for an account to configure a layer version.", - "id": "to-delete-layer-version-permissions-1586491829416", - "title": "To delete layer-version permissions" - } - ], - "RemovePermission": [ - { - "input": { - "FunctionName": "my-function", - "Qualifier": "PROD", - "StatementId": "xaccount" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example removes a permissions statement named xaccount from the PROD alias of a function named my-function.", - "id": "to-remove-a-lambda-functions-permissions-1481661337021", - "title": "To remove a Lambda function's permissions" - } - ], - "TagResource": [ - { - "input": { - "Resource": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "Tags": { - "DEPARTMENT": "Department A" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds a tag with the key name DEPARTMENT and a value of 'Department A' to the specified Lambda function.", - "id": "to-add-tags-to-an-existing-lambda-function-1586491890446", - "title": "To add tags to an existing Lambda function" - } - ], - "UntagResource": [ - { - "input": { - "Resource": "arn:aws:lambda:us-west-2:123456789012:function:my-function", - "TagKeys": [ - "DEPARTMENT" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example removes the tag with the key name DEPARTMENT tag from the my-function Lambda function.", - "id": "to-remove-tags-from-an-existing-lambda-function-1586491956425", - "title": "To remove tags from an existing Lambda function" - } - ], - "UpdateAlias": [ - { - "input": { - "FunctionName": "my-function", - "FunctionVersion": "2", - "Name": "BLUE", - "RoutingConfig": { - "AdditionalVersionWeights": { - "1": 0.7 - } - } - }, - "output": { - "AliasArn": "arn:aws:lambda:us-west-2:123456789012:function:my-function:BLUE", - "Description": "Production environment BLUE.", - "FunctionVersion": "2", - "Name": "BLUE", - "RevisionId": "594f41fb-xmpl-4c20-95c7-6ca5f2a92c93", - "RoutingConfig": { - "AdditionalVersionWeights": { - "1": 0.7 - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example updates the alias named BLUE to send 30% of traffic to version 2 and 70% to version 1.", - "id": "to-update-a-function-alias-1481650817950", - "title": "To update a function alias" - } - ], - "UpdateEventSourceMapping": [ - { - "input": { - "BatchSize": 123, - "Enabled": true, - "FunctionName": "myFunction", - "UUID": "1234xCy789012" - }, - "output": { - "BatchSize": 123, - "EventSourceArn": "arn:aws:s3:::examplebucket/*", - "FunctionArn": "arn:aws:lambda:us-west-2:123456789012:function:myFunction", - "LastModified": "2016-11-21T19:49:20.006+0000", - "LastProcessingResult": "", - "State": "", - "StateTransitionReason": "", - "UUID": "1234xCy789012" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation updates a Lambda function event source mapping", - "id": "to-update-a-lambda-function-event-source-mapping-1481650907413", - "title": "To update a Lambda function event source mapping" - } - ], - "UpdateFunctionCode": [ - { - "input": { - "FunctionName": "my-function", - "S3Bucket": "my-bucket-1xpuxmplzrlbh", - "S3Key": "function.zip" - }, - "output": { - "CodeSha256": "PFn4S+er27qk+UuZSTKEQfNKG/XNn7QJs90mJgq6oH8=", - "CodeSize": 308, - "Description": "", - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "LastModified": "2019-08-14T22:26:11.234+0000", - "MemorySize": 128, - "RevisionId": "873282ed-xmpl-4dc8-a069-d0c647e470c6", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "Timeout": 3, - "TracingConfig": { - "Mode": "PassThrough" - }, - "Version": "$LATEST" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example replaces the code of the unpublished ($LATEST) version of a function named my-function with the contents of the specified zip file in Amazon S3.", - "id": "to-update-a-lambda-functions-code-1481650992672", - "title": "To update a Lambda function's code" - } - ], - "UpdateFunctionConfiguration": [ - { - "input": { - "FunctionName": "my-function", - "MemorySize": 256 - }, - "output": { - "CodeSha256": "PFn4S+er27qk+UuZSTKEQfNKG/XNn7QJs90mJgq6oH8=", - "CodeSize": 308, - "Description": "", - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function", - "FunctionName": "my-function", - "Handler": "index.handler", - "LastModified": "2019-08-14T22:26:11.234+0000", - "MemorySize": 256, - "RevisionId": "873282ed-xmpl-4dc8-a069-d0c647e470c6", - "Role": "arn:aws:iam::123456789012:role/lambda-role", - "Runtime": "nodejs12.x", - "Timeout": 3, - "TracingConfig": { - "Mode": "PassThrough" - }, - "Version": "$LATEST" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example modifies the memory size to be 256 MB for the unpublished ($LATEST) version of a function named my-function.", - "id": "to-update-a-lambda-functions-configuration-1481651096447", - "title": "To update a Lambda function's configuration" - } - ], - "UpdateFunctionEventInvokeConfig": [ - { - "input": { - "DestinationConfig": { - "OnFailure": { - "Destination": "arn:aws:sqs:us-east-2:123456789012:destination" - } - }, - "FunctionName": "my-function" - }, - "output": { - "DestinationConfig": { - "OnFailure": { - "Destination": "arn:aws:sqs:us-east-2:123456789012:destination" - }, - "OnSuccess": { - } - }, - "FunctionArn": "arn:aws:lambda:us-east-2:123456789012:function:my-function:$LATEST", - "LastModified": 1573687896.493, - "MaximumEventAgeInSeconds": 3600, - "MaximumRetryAttempts": 0 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds an on-failure destination to the existing asynchronous invocation configuration for a function named my-function.", - "id": "to-update-an-asynchronous-invocation-configuration-1586492061186", - "title": "To update an asynchronous invocation configuration" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/paginators-1.json deleted file mode 100644 index a8a19ab..0000000 --- a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "ListEventSourceMappings": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "EventSourceMappings" - }, - "ListFunctions": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "Functions" - }, - "ListAliases": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "Aliases" - }, - "ListLayerVersions": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "LayerVersions" - }, - "ListLayers": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "Layers" - }, - "ListVersionsByFunction": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "Versions" - }, - "ListFunctionEventInvokeConfigs": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "FunctionEventInvokeConfigs" - }, - "ListProvisionedConcurrencyConfigs": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "ProvisionedConcurrencyConfigs" - }, - "ListCodeSigningConfigs": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "CodeSigningConfigs" - }, - "ListFunctionsByCodeSigningConfig": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "FunctionArns" - }, - "ListFunctionUrlConfigs": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextMarker", - "result_key": "FunctionUrlConfigs" - }, - "ListCapacityProviders": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "CapacityProviders" - }, - "ListFunctionVersionsByCapacityProvider": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "FunctionVersions" - }, - "GetDurableExecutionHistory": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "Events" - }, - "GetDurableExecutionState": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "Operations" - }, - "ListDurableExecutionsByFunction": { - "input_token": "Marker", - "output_token": "NextMarker", - "limit_key": "MaxItems", - "result_key": "DurableExecutions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/paginators-1.sdk-extras.json deleted file mode 100644 index 98cc059..0000000 --- a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListFunctionVersionsByCapacityProvider": { - "non_aggregate_keys": [ - "CapacityProviderArn" - ] - } - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/service-2.json.gz deleted file mode 100644 index 295794b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/waiters-2.json deleted file mode 100644 index 92defa1..0000000 --- a/venv/Lib/site-packages/botocore/data/lambda/2015-03-31/waiters-2.json +++ /dev/null @@ -1,129 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "FunctionActive" : { - "description" : "Waits for the function's State to be Active. This waiter uses GetFunctionConfiguration API. This should be used after new function creation.", - "delay" : 5, - "maxAttempts" : 60, - "operation" : "GetFunctionConfiguration", - "acceptors" : [ { - "matcher" : "path", - "argument" : "State", - "state" : "success", - "expected" : "Active" - }, { - "matcher" : "path", - "argument" : "State", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "path", - "argument" : "State", - "state" : "retry", - "expected" : "Pending" - } ] - }, - "FunctionActiveV2" : { - "description" : "Waits for the function's State to be Active. This waiter uses GetFunction API. This should be used after new function creation.", - "delay" : 1, - "maxAttempts" : 300, - "operation" : "GetFunction", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Configuration.State", - "state" : "success", - "expected" : "Active" - }, { - "matcher" : "path", - "argument" : "Configuration.State", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "path", - "argument" : "Configuration.State", - "state" : "retry", - "expected" : "Pending" - } ] - }, - "FunctionExists" : { - "delay" : 1, - "maxAttempts" : 20, - "operation" : "GetFunction", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : false - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ResourceNotFoundException" - } ] - }, - "FunctionUpdated" : { - "description" : "Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunctionConfiguration API. This should be used after function updates.", - "delay" : 5, - "maxAttempts" : 60, - "operation" : "GetFunctionConfiguration", - "acceptors" : [ { - "matcher" : "path", - "argument" : "LastUpdateStatus", - "state" : "success", - "expected" : "Successful" - }, { - "matcher" : "path", - "argument" : "LastUpdateStatus", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "path", - "argument" : "LastUpdateStatus", - "state" : "retry", - "expected" : "InProgress" - } ] - }, - "FunctionUpdatedV2" : { - "description" : "Waits for the function's LastUpdateStatus to be Successful. This waiter uses GetFunction API. This should be used after function updates.", - "delay" : 1, - "maxAttempts" : 300, - "operation" : "GetFunction", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Configuration.LastUpdateStatus", - "state" : "success", - "expected" : "Successful" - }, { - "matcher" : "path", - "argument" : "Configuration.LastUpdateStatus", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "path", - "argument" : "Configuration.LastUpdateStatus", - "state" : "retry", - "expected" : "InProgress" - } ] - }, - "PublishedVersionActive" : { - "description" : "Waits for the published version's State to be Active. This waiter uses GetFunctionConfiguration API. This should be used after new version is published.", - "delay" : 5, - "maxAttempts" : 312, - "operation" : "GetFunctionConfiguration", - "acceptors" : [ { - "matcher" : "path", - "argument" : "State", - "state" : "success", - "expected" : "Active" - }, { - "matcher" : "path", - "argument" : "State", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "path", - "argument" : "State", - "state" : "retry", - "expected" : "Pending" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index b15b266..0000000 Binary files a/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/paginators-1.json deleted file mode 100644 index 7f61095..0000000 --- a/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListDeploymentEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "deploymentEvents" - }, - "ListDeployments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "deployments" - }, - "ListWorkloadDeploymentPatterns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workloadDeploymentPatterns" - }, - "ListWorkloads": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workloads" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/service-2.json.gz deleted file mode 100644 index ede913d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/launch-wizard/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index f98305a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/examples-1.json b/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/examples-1.json deleted file mode 100644 index 0982d97..0000000 --- a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/examples-1.json +++ /dev/null @@ -1,758 +0,0 @@ -{ - "version": "1.0", - "examples": { - "GetBot": [ - { - "input": { - "name": "DocOrderPizza", - "versionOrAlias": "$LATEST" - }, - "output": { - "version": "$LATEST", - "name": "DocOrderPizzaBot", - "abortStatement": { - "messages": [ - { - "content": "I don't understand. Can you try again?", - "contentType": "PlainText" - }, - { - "content": "I'm sorry, I don't understand.", - "contentType": "PlainText" - } - ] - }, - "checksum": "20172ee3-fa06-49b2-bbc5-667c090303e9", - "childDirected": true, - "clarificationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "I'm sorry, I didn't hear that. Can you repeate what you just said?", - "contentType": "PlainText" - }, - { - "content": "Can you say that again?", - "contentType": "PlainText" - } - ] - }, - "createdDate": 1494360160.133, - "description": "Orders a pizza from a local pizzeria.", - "idleSessionTTLInSeconds": 300, - "intents": [ - { - "intentName": "DocOrderPizza", - "intentVersion": "$LATEST" - } - ], - "lastUpdatedDate": 1494360160.133, - "locale": "en-US", - "status": "NOT_BUILT" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to get configuration information for a bot.", - "id": "to-get-information-about-a-bot-1494431724188", - "title": "To get information about a bot" - } - ], - "GetBots": [ - { - "input": { - "maxResults": 5, - "nextToken": "" - }, - "output": { - "bots": [ - { - "version": "$LATEST", - "name": "DocOrderPizzaBot", - "createdDate": 1494360160.133, - "description": "Orders a pizza from a local pizzeria.", - "lastUpdatedDate": 1494360160.133, - "status": "NOT_BUILT" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to get a list of all of the bots in your account.", - "id": "to-get-a-list-of-bots-1494432220036", - "title": "To get a list of bots" - } - ], - "GetIntent": [ - { - "input": { - "version": "$LATEST", - "name": "DocOrderPizza" - }, - "output": { - "version": "$LATEST", - "name": "DocOrderPizza", - "checksum": "ca9bc13d-afc8-4706-bbaf-091f7a5935d6", - "conclusionStatement": { - "messages": [ - { - "content": "All right, I ordered you a {Crust} crust {Type} pizza with {Sauce} sauce.", - "contentType": "PlainText" - }, - { - "content": "OK, your {Crust} crust {Type} pizza with {Sauce} sauce is on the way.", - "contentType": "PlainText" - } - ], - "responseCard": "foo" - }, - "confirmationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "Should I order your {Crust} crust {Type} pizza with {Sauce} sauce?", - "contentType": "PlainText" - } - ] - }, - "createdDate": 1494359783.453, - "description": "Order a pizza from a local pizzeria.", - "fulfillmentActivity": { - "type": "ReturnIntent" - }, - "lastUpdatedDate": 1494359783.453, - "rejectionStatement": { - "messages": [ - { - "content": "Ok, I'll cancel your order.", - "contentType": "PlainText" - }, - { - "content": "I cancelled your order.", - "contentType": "PlainText" - } - ] - }, - "sampleUtterances": [ - "Order me a pizza.", - "Order me a {Type} pizza.", - "I want a {Crust} crust {Type} pizza", - "I want a {Crust} crust {Type} pizza with {Sauce} sauce." - ], - "slots": [ - { - "name": "Type", - "description": "The type of pizza to order.", - "priority": 1, - "sampleUtterances": [ - "Get me a {Type} pizza.", - "A {Type} pizza please.", - "I'd like a {Type} pizza." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "What type of pizza would you like?", - "contentType": "PlainText" - }, - { - "content": "Vegie or cheese pizza?", - "contentType": "PlainText" - }, - { - "content": "I can get you a vegie or a cheese pizza.", - "contentType": "PlainText" - } - ] - } - }, - { - "name": "Crust", - "description": "The type of pizza crust to order.", - "priority": 2, - "sampleUtterances": [ - "Make it a {Crust} crust.", - "I'd like a {Crust} crust." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaCrustType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "What type of crust would you like?", - "contentType": "PlainText" - }, - { - "content": "Thick or thin crust?", - "contentType": "PlainText" - } - ] - } - }, - { - "name": "Sauce", - "description": "The type of sauce to use on the pizza.", - "priority": 3, - "sampleUtterances": [ - "Make it {Sauce} sauce.", - "I'd like {Sauce} sauce." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaSauceType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "White or red sauce?", - "contentType": "PlainText" - }, - { - "content": "Garlic or tomato sauce?", - "contentType": "PlainText" - } - ] - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to get information about an intent.", - "id": "to-get-a-information-about-an-intent-1494432574147", - "title": "To get a information about an intent" - } - ], - "GetIntents": [ - { - "input": { - "maxResults": 10, - "nextToken": "" - }, - "output": { - "intents": [ - { - "version": "$LATEST", - "name": "DocOrderPizza", - "createdDate": 1494359783.453, - "description": "Order a pizza from a local pizzeria.", - "lastUpdatedDate": 1494359783.453 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to get a list of all of the intents in your account.", - "id": "to-get-a-list-of-intents-1494432416363", - "title": "To get a list of intents" - } - ], - "GetSlotType": [ - { - "input": { - "version": "$LATEST", - "name": "DocPizzaCrustType" - }, - "output": { - "version": "$LATEST", - "name": "DocPizzaCrustType", - "checksum": "210b3d5a-90a3-4b22-ac7e-f50c2c71095f", - "createdDate": 1494359274.403, - "description": "Available crust types", - "enumerationValues": [ - { - "value": "thick" - }, - { - "value": "thin" - } - ], - "lastUpdatedDate": 1494359274.403 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to get information about a slot type.", - "id": "to-get-information-about-a-slot-type-1494432961004", - "title": "To get information about a slot type" - } - ], - "GetSlotTypes": [ - { - "input": { - "maxResults": 10, - "nextToken": "" - }, - "output": { - "slotTypes": [ - { - "version": "$LATEST", - "name": "DocPizzaCrustType", - "createdDate": 1494359274.403, - "description": "Available crust types", - "lastUpdatedDate": 1494359274.403 - }, - { - "version": "$LATEST", - "name": "DocPizzaSauceType", - "createdDate": 1494356442.23, - "description": "Available pizza sauces", - "lastUpdatedDate": 1494356442.23 - }, - { - "version": "$LATEST", - "name": "DocPizzaType", - "createdDate": 1494359198.656, - "description": "Available pizzas", - "lastUpdatedDate": 1494359198.656 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to get a list of all of the slot types in your account.", - "id": "to-get-a-list-of-slot-types-1494432757458", - "title": "To get a list of slot types" - } - ], - "PutBot": [ - { - "input": { - "name": "DocOrderPizzaBot", - "abortStatement": { - "messages": [ - { - "content": "I don't understand. Can you try again?", - "contentType": "PlainText" - }, - { - "content": "I'm sorry, I don't understand.", - "contentType": "PlainText" - } - ] - }, - "childDirected": true, - "clarificationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "I'm sorry, I didn't hear that. Can you repeat what you just said?", - "contentType": "PlainText" - }, - { - "content": "Can you say that again?", - "contentType": "PlainText" - } - ] - }, - "description": "Orders a pizza from a local pizzeria.", - "idleSessionTTLInSeconds": 300, - "intents": [ - { - "intentName": "DocOrderPizza", - "intentVersion": "$LATEST" - } - ], - "locale": "en-US", - "processBehavior": "SAVE" - }, - "output": { - "version": "$LATEST", - "name": "DocOrderPizzaBot", - "abortStatement": { - "messages": [ - { - "content": "I don't understand. Can you try again?", - "contentType": "PlainText" - }, - { - "content": "I'm sorry, I don't understand.", - "contentType": "PlainText" - } - ] - }, - "checksum": "20172ee3-fa06-49b2-bbc5-667c090303e9", - "childDirected": true, - "clarificationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "I'm sorry, I didn't hear that. Can you repeate what you just said?", - "contentType": "PlainText" - }, - { - "content": "Can you say that again?", - "contentType": "PlainText" - } - ] - }, - "createdDate": 1494360160.133, - "description": "Orders a pizza from a local pizzeria.", - "idleSessionTTLInSeconds": 300, - "intents": [ - { - "intentName": "DocOrderPizza", - "intentVersion": "$LATEST" - } - ], - "lastUpdatedDate": 1494360160.133, - "locale": "en-US", - "status": "NOT_BUILT" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to create a bot for ordering pizzas.", - "id": "to-create-a-bot-1494360003886", - "title": "To create a bot" - } - ], - "PutIntent": [ - { - "input": { - "name": "DocOrderPizza", - "conclusionStatement": { - "messages": [ - { - "content": "All right, I ordered you a {Crust} crust {Type} pizza with {Sauce} sauce.", - "contentType": "PlainText" - }, - { - "content": "OK, your {Crust} crust {Type} pizza with {Sauce} sauce is on the way.", - "contentType": "PlainText" - } - ], - "responseCard": "foo" - }, - "confirmationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "Should I order your {Crust} crust {Type} pizza with {Sauce} sauce?", - "contentType": "PlainText" - } - ] - }, - "description": "Order a pizza from a local pizzeria.", - "fulfillmentActivity": { - "type": "ReturnIntent" - }, - "rejectionStatement": { - "messages": [ - { - "content": "Ok, I'll cancel your order.", - "contentType": "PlainText" - }, - { - "content": "I cancelled your order.", - "contentType": "PlainText" - } - ] - }, - "sampleUtterances": [ - "Order me a pizza.", - "Order me a {Type} pizza.", - "I want a {Crust} crust {Type} pizza", - "I want a {Crust} crust {Type} pizza with {Sauce} sauce." - ], - "slots": [ - { - "name": "Type", - "description": "The type of pizza to order.", - "priority": 1, - "sampleUtterances": [ - "Get me a {Type} pizza.", - "A {Type} pizza please.", - "I'd like a {Type} pizza." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "What type of pizza would you like?", - "contentType": "PlainText" - }, - { - "content": "Vegie or cheese pizza?", - "contentType": "PlainText" - }, - { - "content": "I can get you a vegie or a cheese pizza.", - "contentType": "PlainText" - } - ] - } - }, - { - "name": "Crust", - "description": "The type of pizza crust to order.", - "priority": 2, - "sampleUtterances": [ - "Make it a {Crust} crust.", - "I'd like a {Crust} crust." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaCrustType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "What type of crust would you like?", - "contentType": "PlainText" - }, - { - "content": "Thick or thin crust?", - "contentType": "PlainText" - } - ] - } - }, - { - "name": "Sauce", - "description": "The type of sauce to use on the pizza.", - "priority": 3, - "sampleUtterances": [ - "Make it {Sauce} sauce.", - "I'd like {Sauce} sauce." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaSauceType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "White or red sauce?", - "contentType": "PlainText" - }, - { - "content": "Garlic or tomato sauce?", - "contentType": "PlainText" - } - ] - } - } - ] - }, - "output": { - "version": "$LATEST", - "name": "DocOrderPizza", - "checksum": "ca9bc13d-afc8-4706-bbaf-091f7a5935d6", - "conclusionStatement": { - "messages": [ - { - "content": "All right, I ordered you a {Crust} crust {Type} pizza with {Sauce} sauce.", - "contentType": "PlainText" - }, - { - "content": "OK, your {Crust} crust {Type} pizza with {Sauce} sauce is on the way.", - "contentType": "PlainText" - } - ], - "responseCard": "foo" - }, - "confirmationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "Should I order your {Crust} crust {Type} pizza with {Sauce} sauce?", - "contentType": "PlainText" - } - ] - }, - "createdDate": 1494359783.453, - "description": "Order a pizza from a local pizzeria.", - "fulfillmentActivity": { - "type": "ReturnIntent" - }, - "lastUpdatedDate": 1494359783.453, - "rejectionStatement": { - "messages": [ - { - "content": "Ok, I'll cancel your order.", - "contentType": "PlainText" - }, - { - "content": "I cancelled your order.", - "contentType": "PlainText" - } - ] - }, - "sampleUtterances": [ - "Order me a pizza.", - "Order me a {Type} pizza.", - "I want a {Crust} crust {Type} pizza", - "I want a {Crust} crust {Type} pizza with {Sauce} sauce." - ], - "slots": [ - { - "name": "Sauce", - "description": "The type of sauce to use on the pizza.", - "priority": 3, - "sampleUtterances": [ - "Make it {Sauce} sauce.", - "I'd like {Sauce} sauce." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaSauceType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "White or red sauce?", - "contentType": "PlainText" - }, - { - "content": "Garlic or tomato sauce?", - "contentType": "PlainText" - } - ] - } - }, - { - "name": "Type", - "description": "The type of pizza to order.", - "priority": 1, - "sampleUtterances": [ - "Get me a {Type} pizza.", - "A {Type} pizza please.", - "I'd like a {Type} pizza." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "What type of pizza would you like?", - "contentType": "PlainText" - }, - { - "content": "Vegie or cheese pizza?", - "contentType": "PlainText" - }, - { - "content": "I can get you a vegie or a cheese pizza.", - "contentType": "PlainText" - } - ] - } - }, - { - "name": "Crust", - "description": "The type of pizza crust to order.", - "priority": 2, - "sampleUtterances": [ - "Make it a {Crust} crust.", - "I'd like a {Crust} crust." - ], - "slotConstraint": "Required", - "slotType": "DocPizzaCrustType", - "slotTypeVersion": "$LATEST", - "valueElicitationPrompt": { - "maxAttempts": 1, - "messages": [ - { - "content": "What type of crust would you like?", - "contentType": "PlainText" - }, - { - "content": "Thick or thin crust?", - "contentType": "PlainText" - } - ] - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to create an intent for ordering pizzas.", - "id": "to-create-an-intent-1494358144659", - "title": "To create an intent" - } - ], - "PutSlotType": [ - { - "input": { - "name": "PizzaSauceType", - "description": "Available pizza sauces", - "enumerationValues": [ - { - "value": "red" - }, - { - "value": "white" - } - ] - }, - "output": { - "version": "$LATEST", - "name": "DocPizzaSauceType", - "checksum": "cfd00ed1-775d-4357-947c-aca7e73b44ba", - "createdDate": 1494356442.23, - "description": "Available pizza sauces", - "enumerationValues": [ - { - "value": "red" - }, - { - "value": "white" - } - ], - "lastUpdatedDate": 1494356442.23 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to create a slot type that describes pizza sauces.", - "id": "to-create-a-slot-type-1494357262258", - "title": "To Create a Slot Type" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/paginators-1.json deleted file mode 100644 index 02d2308..0000000 --- a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "GetSlotTypeVersions": { - "result_key": "slotTypes", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetSlotTypes": { - "result_key": "slotTypes", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetIntents": { - "result_key": "intents", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetBotChannelAssociations": { - "result_key": "botChannelAssociations", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetBots": { - "result_key": "bots", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetBuiltinSlotTypes": { - "result_key": "slotTypes", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetIntentVersions": { - "result_key": "intents", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetBotAliases": { - "result_key": "BotAliases", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetBuiltinIntents": { - "result_key": "intents", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - }, - "GetBotVersions": { - "result_key": "bots", - "output_token": "nextToken", - "input_token": "nextToken", - "limit_key": "maxResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/service-2.json.gz deleted file mode 100644 index d872c2e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lex-models/2017-04-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index a4ae3f7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/examples-1.json b/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/service-2.json.gz deleted file mode 100644 index bcf88be..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lex-runtime/2016-11-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index bc5c0fe..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/examples-1.json b/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/service-2.json.gz deleted file mode 100644 index 1e5a0e6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/waiters-2.json b/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/waiters-2.json deleted file mode 100644 index 1ec9604..0000000 --- a/venv/Lib/site-packages/botocore/data/lexv2-models/2020-08-07/waiters-2.json +++ /dev/null @@ -1,255 +0,0 @@ -{ - "version":2, - "waiters":{ - "BotAvailable":{ - "delay":10, - "operation":"DescribeBot", - "maxAttempts":35, - "description":"Wait until a bot is available", - "acceptors":[ - { - "expected":"Available", - "matcher":"path", - "state":"success", - "argument":"botStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"botStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"botStatus" - }, - { - "expected":"Inactive", - "matcher":"path", - "state":"failure", - "argument":"botStatus" - } - ] - }, - "BotAliasAvailable":{ - "delay":10, - "operation":"DescribeBotAlias", - "maxAttempts":35, - "description":"Wait until a bot alias is available", - "acceptors":[ - { - "expected":"Available", - "matcher":"path", - "state":"success", - "argument":"botAliasStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"botAliasStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"botAliasStatus" - } - ] - }, - "BotExportCompleted":{ - "delay":10, - "operation":"DescribeExport", - "maxAttempts":35, - "description":"Wait until a bot has been exported", - "acceptors":[ - { - "expected":"Completed", - "matcher":"path", - "state":"success", - "argument":"exportStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"exportStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"exportStatus" - } - ] - }, - "BotImportCompleted":{ - "delay":10, - "operation":"DescribeImport", - "maxAttempts":35, - "description":"Wait until a bot has been imported", - "acceptors":[ - { - "expected":"Completed", - "matcher":"path", - "state":"success", - "argument":"importStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"importStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"importStatus" - } - ] - }, - "BotLocaleBuilt":{ - "delay":10, - "operation":"DescribeBotLocale", - "maxAttempts":35, - "description":"Wait until a bot locale is built", - "acceptors":[ - { - "expected":"Built", - "matcher":"path", - "state":"success", - "argument":"botLocaleStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - }, - { - "expected":"NotBuilt", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - } - ] - }, - "BotLocaleExpressTestingAvailable":{ - "delay":10, - "operation":"DescribeBotLocale", - "maxAttempts":35, - "description":"Wait until a bot locale build is ready for express testing", - "acceptors":[ - { - "expected":"Built", - "matcher":"path", - "state":"success", - "argument":"botLocaleStatus" - }, - { - "expected":"ReadyExpressTesting", - "matcher":"path", - "state":"success", - "argument":"botLocaleStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - }, - { - "expected":"NotBuilt", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - } - ] - }, - "BotVersionAvailable":{ - "delay":10, - "operation":"DescribeBotVersion", - "maxAttempts":35, - "description":"Wait until a bot version is available", - "acceptors":[ - { - "expected":"Available", - "matcher":"path", - "state":"success", - "argument":"botStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"botStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"botStatus" - }, - { - "state":"retry", - "matcher":"status", - "expected":404 - } - ] - }, - "BotLocaleCreated":{ - "delay":10, - "operation":"DescribeBotLocale", - "maxAttempts":35, - "description":"Wait unit a bot locale is created", - "acceptors":[ - { - "expected":"Built", - "matcher":"path", - "state":"success", - "argument":"botLocaleStatus" - }, - { - "expected":"ReadyExpressTesting", - "matcher":"path", - "state":"success", - "argument":"botLocaleStatus" - }, - { - "expected":"NotBuilt", - "matcher":"path", - "state":"success", - "argument":"botLocaleStatus" - }, - { - "expected":"Deleting", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - }, - { - "expected":"Failed", - "matcher":"path", - "state":"failure", - "argument":"botLocaleStatus" - } - ] - } - } -} - diff --git a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 019b634..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/examples-1.json b/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/service-2.json.gz deleted file mode 100644 index cd1bf07..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lexv2-runtime/2020-08-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index e658dda..0000000 Binary files a/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/paginators-1.json deleted file mode 100644 index 6d74d22..0000000 --- a/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListLinuxSubscriptionInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Instances" - }, - "ListLinuxSubscriptions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Subscriptions" - }, - "ListRegisteredSubscriptionProviders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegisteredSubscriptionProviders" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/service-2.json.gz deleted file mode 100644 index 7724156..0000000 Binary files a/venv/Lib/site-packages/botocore/data/license-manager-linux-subscriptions/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index b35a059..0000000 Binary files a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/paginators-1.json deleted file mode 100644 index 413006e..0000000 --- a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListIdentityProviders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "IdentityProviderSummaries" - }, - "ListInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceSummaries" - }, - "ListProductSubscriptions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProductUserSummaries" - }, - "ListUserAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceUserSummaries" - }, - "ListLicenseServerEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "LicenseServerEndpoints" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/service-2.json.gz deleted file mode 100644 index c222662..0000000 Binary files a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/license-manager-user-subscriptions/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index bab4e88..0000000 Binary files a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/examples-1.json b/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/paginators-1.json deleted file mode 100644 index 03a3ca4..0000000 --- a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListAssociationsForLicenseConfiguration": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LicenseConfigurationAssociations" - }, - "ListLicenseConfigurations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LicenseConfigurations" - }, - "ListLicenseSpecificationsForResource": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LicenseSpecifications" - }, - "ListResourceInventory": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ResourceInventoryList" - }, - "ListUsageForLicenseConfiguration": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LicenseConfigurationUsageList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/service-2.json.gz deleted file mode 100644 index 76528e1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/license-manager/2018-08-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index a73a928..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/examples-1.json b/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/paginators-1.json deleted file mode 100644 index fbea938..0000000 --- a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/paginators-1.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "pagination": { - "GetActiveNames": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "activeNames" - }, - "GetBlueprints": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "blueprints" - }, - "GetBundles": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "bundles" - }, - "GetDomains": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "domains" - }, - "GetInstanceSnapshots": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "instanceSnapshots" - }, - "GetInstances": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "instances" - }, - "GetKeyPairs": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "keyPairs" - }, - "GetOperations": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "operations" - }, - "GetStaticIps": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "staticIps" - }, - "GetCloudFormationStackRecords": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "cloudFormationStackRecords" - }, - "GetDiskSnapshots": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "diskSnapshots" - }, - "GetDisks": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "disks" - }, - "GetExportSnapshotRecords": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "exportSnapshotRecords" - }, - "GetLoadBalancers": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "loadBalancers" - }, - "GetRelationalDatabaseBlueprints": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "blueprints" - }, - "GetRelationalDatabaseBundles": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "bundles" - }, - "GetRelationalDatabaseEvents": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "relationalDatabaseEvents" - }, - "GetRelationalDatabaseParameters": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "parameters" - }, - "GetRelationalDatabaseSnapshots": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "relationalDatabaseSnapshots" - }, - "GetRelationalDatabases": { - "input_token": "pageToken", - "output_token": "nextPageToken", - "result_key": "relationalDatabases" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/service-2.json.gz deleted file mode 100644 index 2463ff9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lightsail/2016-11-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/location/2020-11-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/location/2020-11-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 746e8cd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/location/2020-11-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/location/2020-11-19/examples-1.json b/venv/Lib/site-packages/botocore/data/location/2020-11-19/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/location/2020-11-19/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/location/2020-11-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/location/2020-11-19/paginators-1.json deleted file mode 100644 index 55e77c9..0000000 --- a/venv/Lib/site-packages/botocore/data/location/2020-11-19/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "GetDevicePositionHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "DevicePositions", - "limit_key": "MaxResults" - }, - "ListGeofenceCollections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entries" - }, - "ListGeofences": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Entries", - "limit_key": "MaxResults" - }, - "ListMaps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entries" - }, - "ListPlaceIndexes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entries" - }, - "ListTrackerConsumers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConsumerArns" - }, - "ListTrackers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entries" - }, - "ListDevicePositions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entries" - }, - "ListRouteCalculators": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entries" - }, - "ListKeys": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entries" - }, - "ForecastGeofenceEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ForecastedEvents" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/location/2020-11-19/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/location/2020-11-19/paginators-1.sdk-extras.json deleted file mode 100644 index 2aba0bf..0000000 --- a/venv/Lib/site-packages/botocore/data/location/2020-11-19/paginators-1.sdk-extras.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ForecastGeofenceEvents": { - "non_aggregate_keys": [ - "DistanceUnit", - "SpeedUnit" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/location/2020-11-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/location/2020-11-19/service-2.json.gz deleted file mode 100644 index b7c73ff..0000000 Binary files a/venv/Lib/site-packages/botocore/data/location/2020-11-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/logs/2014-03-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 94cd6ec..0000000 Binary files a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/examples-1.json b/venv/Lib/site-packages/botocore/data/logs/2014-03-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/logs/2014-03-28/paginators-1.json deleted file mode 100644 index ac2062d..0000000 --- a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/paginators-1.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "pagination": { - "DescribeDestinations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": "destinations" - }, - "DescribeLogGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": "logGroups" - }, - "DescribeLogStreams": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": "logStreams" - }, - "DescribeMetricFilters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": "metricFilters" - }, - "DescribeSubscriptionFilters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": "subscriptionFilters" - }, - "FilterLogEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": [ - "events", - "searchedLogStreams" - ] - }, - "DescribeExportTasks": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "exportTasks" - }, - "DescribeQueries": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "queries" - }, - "DescribeResourcePolicies": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "resourcePolicies" - }, - "DescribeDeliveries": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "deliveries" - }, - "DescribeDeliveryDestinations": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "deliveryDestinations" - }, - "DescribeDeliverySources": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "deliverySources" - }, - "ListAnomalies": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "anomalies" - }, - "ListLogAnomalyDetectors": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "anomalyDetectors" - }, - "DescribeConfigurationTemplates": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "configurationTemplates" - }, - "ListLogGroupsForQuery": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "logGroupIdentifiers" - }, - "GetScheduledQueryHistory": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "triggerHistory" - }, - "ListScheduledQueries": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "scheduledQueries" - }, - "ListSourcesForS3TableIntegration": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "sources" - }, - "ListAggregateLogGroupSummaries": { - "input_token": "nextToken", - "limit_key": "limit", - "output_token": "nextToken", - "result_key": "aggregateLogGroupSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/logs/2014-03-28/paginators-1.sdk-extras.json deleted file mode 100644 index 23b4966..0000000 --- a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/paginators-1.sdk-extras.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetScheduledQueryHistory": { - "non_aggregate_keys": [ - "name", - "scheduledQueryArn" - ] - } - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/logs/2014-03-28/service-2.json.gz deleted file mode 100644 index 72c99e0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/logs/2014-03-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4ddc4e7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/examples-1.json b/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/service-2.json.gz deleted file mode 100644 index ba49910..0000000 Binary files a/venv/Lib/site-packages/botocore/data/lookoutequipment/2020-12-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/m2/2021-04-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/m2/2021-04-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9496f00..0000000 Binary files a/venv/Lib/site-packages/botocore/data/m2/2021-04-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/m2/2021-04-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/m2/2021-04-28/paginators-1.json deleted file mode 100644 index a6ccf11..0000000 --- a/venv/Lib/site-packages/botocore/data/m2/2021-04-28/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListApplicationVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "applicationVersions" - }, - "ListApplications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "applications" - }, - "ListBatchJobDefinitions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "batchJobDefinitions" - }, - "ListBatchJobExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "batchJobExecutions" - }, - "ListDataSetImportHistory": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataSetImportTasks" - }, - "ListDataSets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataSets" - }, - "ListDeployments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "deployments" - }, - "ListEngineVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "engineVersions" - }, - "ListEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environments" - }, - "ListDataSetExportHistory": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataSetExportTasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/m2/2021-04-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/m2/2021-04-28/service-2.json.gz deleted file mode 100644 index db5c704..0000000 Binary files a/venv/Lib/site-packages/botocore/data/m2/2021-04-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2ed9838..0000000 Binary files a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/examples-1.json b/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/paginators-1.json deleted file mode 100644 index c13ce65..0000000 --- a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "DescribeBatchPredictions": { - "limit_key": "Limit", - "output_token": "NextToken", - "input_token": "NextToken", - "result_key": "Results" - }, - "DescribeDataSources": { - "limit_key": "Limit", - "output_token": "NextToken", - "input_token": "NextToken", - "result_key": "Results" - }, - "DescribeEvaluations": { - "limit_key": "Limit", - "output_token": "NextToken", - "input_token": "NextToken", - "result_key": "Results" - }, - "DescribeMLModels": { - "limit_key": "Limit", - "output_token": "NextToken", - "input_token": "NextToken", - "result_key": "Results" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/service-2.json.gz deleted file mode 100644 index a1c3b5a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/waiters-2.json b/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/waiters-2.json deleted file mode 100644 index da6b1c9..0000000 --- a/venv/Lib/site-packages/botocore/data/machinelearning/2014-12-12/waiters-2.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "version": 2, - "waiters": { - "DataSourceAvailable": { - "delay": 30, - "operation": "DescribeDataSources", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "COMPLETED", - "matcher": "pathAll", - "state": "success", - "argument": "Results[].Status" - }, - { - "expected": "FAILED", - "matcher": "pathAny", - "state": "failure", - "argument": "Results[].Status" - } - ] - }, - "MLModelAvailable": { - "delay": 30, - "operation": "DescribeMLModels", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "COMPLETED", - "matcher": "pathAll", - "state": "success", - "argument": "Results[].Status" - }, - { - "expected": "FAILED", - "matcher": "pathAny", - "state": "failure", - "argument": "Results[].Status" - } - ] - }, - "EvaluationAvailable": { - "delay": 30, - "operation": "DescribeEvaluations", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "COMPLETED", - "matcher": "pathAll", - "state": "success", - "argument": "Results[].Status" - }, - { - "expected": "FAILED", - "matcher": "pathAny", - "state": "failure", - "argument": "Results[].Status" - } - ] - }, - "BatchPredictionAvailable": { - "delay": 30, - "operation": "DescribeBatchPredictions", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "COMPLETED", - "matcher": "pathAll", - "state": "success", - "argument": "Results[].Status" - }, - { - "expected": "FAILED", - "matcher": "pathAny", - "state": "failure", - "argument": "Results[].Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2cd2ee1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/paginators-1.json deleted file mode 100644 index 8037ac7..0000000 --- a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/paginators-1.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "pagination": { - "DescribeBuckets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "buckets" - }, - "GetUsageStatistics": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "records", - "non_aggregate_keys": [ - "timeRange" - ] - }, - "ListClassificationJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListCustomDataIdentifiers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListFindings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findingIds" - }, - "ListFindingsFilters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findingsFilterListItems" - }, - "ListInvitations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "invitations" - }, - "ListMembers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "members" - }, - "ListOrganizationAdminAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "adminAccounts" - }, - "SearchResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "matchingResources" - }, - "ListClassificationScopes": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "classificationScopes" - }, - "ListAllowLists": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "allowLists" - }, - "ListManagedDataIdentifiers": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "items" - }, - "ListResourceProfileDetections": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "detections" - }, - "ListSensitivityInspectionTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sensitivityInspectionTemplates" - }, - "ListResourceProfileArtifacts": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "artifacts" - }, - "ListAutomatedDiscoveryAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/service-2.json.gz deleted file mode 100644 index 5faa892..0000000 Binary files a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/waiters-2.json deleted file mode 100644 index 12c4a4a..0000000 --- a/venv/Lib/site-packages/botocore/data/macie2/2020-01-01/waiters-2.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "version": 2, - "waiters": { - "FindingRevealed": { - "description": "Wait until the sensitive data occurrences are ready.", - "delay": 2, - "maxAttempts": 60, - "operation": "GetSensitiveDataOccurrences", - "acceptors": [ - { - "matcher": "path", - "argument": "status", - "state": "success", - "expected": "SUCCESS" - }, - { - "matcher": "path", - "argument": "status", - "state": "success", - "expected": "ERROR" - } - ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index b8518d5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/paginators-1.json deleted file mode 100644 index a87e968..0000000 --- a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/paginators-1.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "pagination": { - "ListAddonInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "AddonInstances" - }, - "ListAddonSubscriptions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "AddonSubscriptions" - }, - "ListArchiveExports": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "Exports" - }, - "ListArchiveSearches": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "Searches" - }, - "ListArchives": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "Archives" - }, - "ListIngressPoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "IngressPoints" - }, - "ListRelays": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "Relays" - }, - "ListRuleSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "RuleSets" - }, - "ListTrafficPolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "TrafficPolicies" - }, - "ListAddressListImportJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "ImportJobs" - }, - "ListAddressLists": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "AddressLists" - }, - "ListMembersOfAddressList": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "Addresses" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/service-2.json.gz deleted file mode 100644 index 6bee678..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/waiters-2.json b/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/mailmanager/2023-10-17/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/endpoint-rule-set-1.json.gz deleted file mode 100644 index 16346bb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/paginators-1.json b/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/paginators-1.json deleted file mode 100644 index 61eb591..0000000 --- a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListTokenBalances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tokenBalances" - }, - "ListTransactionEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "events" - }, - "ListTransactions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "transactions" - }, - "ListAssetContracts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "contracts" - }, - "ListFilteredTransactionEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "events" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/service-2.json.gz b/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/service-2.json.gz deleted file mode 100644 index b6f56b4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/waiters-2.json b/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/managedblockchain-query/2023-05-04/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/endpoint-rule-set-1.json.gz deleted file mode 100644 index be01935..0000000 Binary files a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/examples-1.json b/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/paginators-1.json b/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/paginators-1.json deleted file mode 100644 index 8d30a03..0000000 --- a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListAccessors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Accessors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/service-2.json.gz b/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/service-2.json.gz deleted file mode 100644 index 6e8c440..0000000 Binary files a/venv/Lib/site-packages/botocore/data/managedblockchain/2018-09-24/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 44eef09..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/service-2.json.gz deleted file mode 100644 index 6340c14..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-agreement/2020-03-01/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index baf2068..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/examples-1.json b/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/paginators-1.json deleted file mode 100644 index 8bbef96..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListChangeSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ChangeSetSummaryList" - }, - "ListEntities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EntitySummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/service-2.json.gz deleted file mode 100644 index 0bbc111..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-catalog/2018-09-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 46f5fa3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/service-2.json.gz deleted file mode 100644 index 4ec9f00..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-deployment/2023-01-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index 72f8ce8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/examples-1.json b/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/paginators-1.json deleted file mode 100644 index 8dbf525..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "GetEntitlements": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Entitlements" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/service-2.json.gz deleted file mode 100644 index 239b291..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-entitlement/2017-01-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 195639c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/service-2.json.gz deleted file mode 100644 index 55a83f1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplace-reporting/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index e479193..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/examples-1.json b/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/service-2.json.gz deleted file mode 100644 index 62f8a35..0000000 Binary files a/venv/Lib/site-packages/botocore/data/marketplacecommerceanalytics/2015-07-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4c4198a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/paginators-1.json deleted file mode 100644 index 16736a6..0000000 --- a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListFlows": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Flows" - }, - "ListEntitlements": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Entitlements" - }, - "ListOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Offerings" - }, - "ListReservations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "ListBridges": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Bridges" - }, - "ListGatewayInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Instances" - }, - "ListGateways": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Gateways" - }, - "ListRouterInputs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RouterInputs" - }, - "ListRouterNetworkInterfaces": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RouterNetworkInterfaces" - }, - "ListRouterOutputs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RouterOutputs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/service-2.json.gz deleted file mode 100644 index a7f11c8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/waiters-2.json b/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/waiters-2.json deleted file mode 100644 index c01bd1c..0000000 --- a/venv/Lib/site-packages/botocore/data/mediaconnect/2018-11-14/waiters-2.json +++ /dev/null @@ -1,317 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "FlowActive" : { - "description" : "Wait until a flow is active", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "DescribeFlow", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "retry", - "expected" : "STARTING" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "failure", - "expected" : "STANDBY" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "FlowDeleted" : { - "description" : "Wait until a flow is deleted", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "DescribeFlow", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "NotFoundException" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "retry", - "expected" : "DELETING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "FlowStandby" : { - "description" : "Wait until a flow is in standby mode", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "DescribeFlow", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "success", - "expected" : "STANDBY" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "retry", - "expected" : "STOPPING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "Flow.Status", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "InputActive" : { - "description" : "Wait until the Input is ACTIVE", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "GetRouterInput", - "acceptors" : [ { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "retry", - "expected" : "STARTING" - }, { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "InputDeleted" : { - "description" : "Wait until the Input is deleted", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "GetRouterInput", - "acceptors" : [ { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "retry", - "expected" : "DELETING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "failure", - "expected" : "ERROR" - }, { - "matcher" : "error", - "state" : "success", - "expected" : "NotFoundException" - } ] - }, - "InputStandby" : { - "description" : "Wait until the Input is STANDBY", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "GetRouterInput", - "acceptors" : [ { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "success", - "expected" : "STANDBY" - }, { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "retry", - "expected" : "STOPPING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "RouterInput.State", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "OutputActive" : { - "description" : "Wait until the Output is ACTIVE", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "GetRouterOutput", - "acceptors" : [ { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "retry", - "expected" : "STARTING" - }, { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "OutputDeleted" : { - "description" : "Wait until the Output is deleted", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "GetRouterOutput", - "acceptors" : [ { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "retry", - "expected" : "DELETING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "failure", - "expected" : "ERROR" - }, { - "matcher" : "error", - "state" : "success", - "expected" : "NotFoundException" - } ] - }, - "OutputRouted" : { - "description" : "Wait until the Output is ROUTED", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "GetRouterOutput", - "acceptors" : [ { - "matcher" : "path", - "argument" : "RouterOutput.RoutedState", - "state" : "success", - "expected" : "ROUTED" - }, { - "matcher" : "path", - "argument" : "RouterOutput.RoutedState", - "state" : "retry", - "expected" : "ROUTING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - } ] - }, - "OutputStandby" : { - "description" : "Wait until the Output is STANDBY", - "delay" : 3, - "maxAttempts" : 40, - "operation" : "GetRouterOutput", - "acceptors" : [ { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "success", - "expected" : "STANDBY" - }, { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "retry", - "expected" : "STOPPING" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "InternalServerErrorException" - }, { - "matcher" : "error", - "state" : "retry", - "expected" : "ServiceUnavailableException" - }, { - "matcher" : "path", - "argument" : "RouterOutput.State", - "state" : "failure", - "expected" : "ERROR" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index 715ed94..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/paginators-1.json deleted file mode 100644 index 1f93efd..0000000 --- a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "DescribeEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Endpoints" - }, - "ListJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Jobs" - }, - "ListPresets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Presets" - }, - "ListJobTemplates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "JobTemplates" - }, - "ListQueues": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Queues" - }, - "SearchJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Jobs" - }, - "ListVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Versions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/paginators-1.sdk-extras.json deleted file mode 100644 index c121990..0000000 --- a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/paginators-1.sdk-extras.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListQueues": { - "non_aggregate_keys": [ - "TotalConcurrentJobs", - "UnallocatedConcurrentJobs" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/service-2.json.gz deleted file mode 100644 index 0c65089..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediaconvert/2017-08-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index ca76616..0000000 Binary files a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/paginators-1.json deleted file mode 100644 index 2a60014..0000000 --- a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/paginators-1.json +++ /dev/null @@ -1,142 +0,0 @@ -{ - "pagination": { - "ListInputs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Inputs" - }, - "ListChannels": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Channels" - }, - "ListInputSecurityGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InputSecurityGroups" - }, - "ListOfferings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Offerings" - }, - "ListReservations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Reservations" - }, - "DescribeSchedule": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ScheduleActions" - }, - "ListMultiplexPrograms": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MultiplexPrograms" - }, - "ListMultiplexes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Multiplexes" - }, - "ListInputDevices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InputDevices" - }, - "ListInputDeviceTransfers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InputDeviceTransfers" - }, - "ListSignalMaps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SignalMaps" - }, - "ListCloudWatchAlarmTemplates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CloudWatchAlarmTemplates" - }, - "ListCloudWatchAlarmTemplateGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CloudWatchAlarmTemplateGroups" - }, - "ListEventBridgeRuleTemplates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EventBridgeRuleTemplates" - }, - "ListEventBridgeRuleTemplateGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EventBridgeRuleTemplateGroups" - }, - "ListNodes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Nodes" - }, - "ListClusters": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Clusters" - }, - "ListChannelPlacementGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ChannelPlacementGroups" - }, - "ListNetworks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Networks" - }, - "ListSdiSources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SdiSources" - }, - "ListAlerts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Alerts" - }, - "ListClusterAlerts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Alerts" - }, - "ListMultiplexAlerts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Alerts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/service-2.json.gz deleted file mode 100644 index c170b32..0000000 Binary files a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/waiters-2.json b/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/waiters-2.json deleted file mode 100644 index d95e09a..0000000 --- a/venv/Lib/site-packages/botocore/data/medialive/2017-10-14/waiters-2.json +++ /dev/null @@ -1,624 +0,0 @@ -{ - "version": 2, - "waiters": { - "ChannelCreated": { - "description": "Wait until a channel has been created", - "operation": "DescribeChannel", - "delay": 3, - "maxAttempts": 5, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "IDLE" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "CREATING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "CREATE_FAILED" - } - ] - }, - "ChannelRunning": { - "description": "Wait until a channel is running", - "operation": "DescribeChannel", - "delay": 5, - "maxAttempts": 120, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "RUNNING" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "STARTING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ChannelStopped": { - "description": "Wait until a channel has is stopped", - "operation": "DescribeChannel", - "delay": 5, - "maxAttempts": 60, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "IDLE" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "STOPPING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ChannelDeleted": { - "description": "Wait until a channel has been deleted", - "operation": "DescribeChannel", - "delay": 5, - "maxAttempts": 84, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "DELETED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DELETING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "InputAttached": { - "description": "Wait until an input has been attached", - "operation": "DescribeInput", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "ATTACHED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DETACHED" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "InputDetached": { - "description": "Wait until an input has been detached", - "operation": "DescribeInput", - "delay": 5, - "maxAttempts": 84, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "DETACHED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "CREATING" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "ATTACHED" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "InputDeleted": { - "description": "Wait until an input has been deleted", - "operation": "DescribeInput", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "DELETED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DELETING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "MultiplexCreated": { - "description": "Wait until a multiplex has been created", - "operation": "DescribeMultiplex", - "delay": 3, - "maxAttempts": 5, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "IDLE" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "CREATING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "CREATE_FAILED" - } - ] - }, - "MultiplexRunning": { - "description": "Wait until a multiplex is running", - "operation": "DescribeMultiplex", - "delay": 5, - "maxAttempts": 120, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "RUNNING" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "STARTING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "MultiplexStopped": { - "description": "Wait until a multiplex has is stopped", - "operation": "DescribeMultiplex", - "delay": 5, - "maxAttempts": 28, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "IDLE" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "STOPPING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "MultiplexDeleted": { - "description": "Wait until a multiplex has been deleted", - "operation": "DescribeMultiplex", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "DELETED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DELETING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "SignalMapCreated": { - "description": "Wait until a signal map has been created", - "delay": 5, - "maxAttempts": 60, - "operation": "GetSignalMap", - "acceptors": [ - { - "matcher": "path", - "argument": "Status", - "state": "success", - "expected": "CREATE_COMPLETE" - }, - { - "matcher": "path", - "argument": "Status", - "state": "retry", - "expected": "CREATE_IN_PROGRESS" - }, - { - "matcher": "path", - "argument": "Status", - "state": "failure", - "expected": "CREATE_FAILED" - } - ] - }, - "SignalMapMonitorDeleted": { - "description": "Wait until a signal map's monitor has been deleted", - "delay": 5, - "maxAttempts": 120, - "operation": "GetSignalMap", - "acceptors": [ - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "success", - "expected": "DELETE_COMPLETE" - }, - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "retry", - "expected": "DELETE_IN_PROGRESS" - }, - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "failure", - "expected": "DELETE_FAILED" - } - ] - }, - "SignalMapMonitorDeployed": { - "description": "Wait until a signal map's monitor has been deployed", - "delay": 5, - "maxAttempts": 120, - "operation": "GetSignalMap", - "acceptors": [ - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "success", - "expected": "DRY_RUN_DEPLOYMENT_COMPLETE" - }, - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "success", - "expected": "DEPLOYMENT_COMPLETE" - }, - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "retry", - "expected": "DRY_RUN_DEPLOYMENT_IN_PROGRESS" - }, - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "retry", - "expected": "DEPLOYMENT_IN_PROGRESS" - }, - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "failure", - "expected": "DRY_RUN_DEPLOYMENT_FAILED" - }, - { - "matcher": "path", - "argument": "MonitorDeployment.Status", - "state": "failure", - "expected": "DEPLOYMENT_FAILED" - } - ] - }, - "SignalMapUpdated": { - "description": "Wait until a signal map has been updated", - "delay": 5, - "maxAttempts": 60, - "operation": "GetSignalMap", - "acceptors": [ - { - "matcher": "path", - "argument": "Status", - "state": "success", - "expected": "UPDATE_COMPLETE" - }, - { - "matcher": "path", - "argument": "Status", - "state": "retry", - "expected": "UPDATE_IN_PROGRESS" - }, - { - "matcher": "path", - "argument": "Status", - "state": "failure", - "expected": "UPDATE_FAILED" - }, - { - "matcher": "path", - "argument": "Status", - "state": "failure", - "expected": "UPDATE_REVERTED" - } - ] - }, - "ClusterCreated": { - "description": "Wait until a cluster has been created", - "operation": "DescribeCluster", - "delay": 3, - "maxAttempts": 5, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "ACTIVE" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "CREATING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "CREATE_FAILED" - } - ] - }, - "ClusterDeleted": { - "description": "Wait until a cluster has been deleted", - "operation": "DescribeCluster", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "DELETED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DELETING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "NodeRegistered": { - "description": "Wait until a node has been registered", - "operation": "DescribeNode", - "delay": 3, - "maxAttempts": 5, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "ACTIVE" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "REGISTERING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 404 - }, - { - "state": "failure", - "matcher": "path", - "argument": "State", - "expected": "REGISTRATION_FAILED" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "NodeDeregistered": { - "description": "Wait until a node has been deregistered", - "operation": "DescribeNode", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "DEREGISTERED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DEREGISTERING" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DRAINING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ChannelPlacementGroupAssigned": { - "description": "Wait until the channel placement group has been assigned", - "operation": "DescribeChannelPlacementGroup", - "delay": 3, - "maxAttempts": 5, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "ASSIGNED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "ASSIGNING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ChannelPlacementGroupUnassigned": { - "description": "Wait until the channel placement group has been unassigned", - "operation": "DescribeChannelPlacementGroup", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "UNASSIGNED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "UNASSIGNING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ChannelPlacementGroupDeleted": { - "description": "Wait until the channel placement group has been deleted", - "operation": "DescribeChannelPlacementGroup", - "delay": 5, - "maxAttempts": 20, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "State", - "expected": "DELETED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "State", - "expected": "DELETING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 50540b7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/paginators-1.json deleted file mode 100644 index df498b9..0000000 --- a/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListAssets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Assets" - }, - "ListPackagingConfigurations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PackagingConfigurations" - }, - "ListPackagingGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PackagingGroups" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/service-2.json.gz deleted file mode 100644 index 266e2b7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediapackage-vod/2018-11-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index d7974b7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/paginators-1.json deleted file mode 100644 index 24e4410..0000000 --- a/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListChannels": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Channels" - }, - "ListOriginEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OriginEndpoints" - }, - "ListHarvestJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "HarvestJobs" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/service-2.json.gz deleted file mode 100644 index a4b9b1e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediapackage/2017-10-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3aadf6a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/paginators-1.json deleted file mode 100644 index 531aa7c..0000000 --- a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListChannelGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListChannels": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListOriginEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListHarvestJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/service-2.json.gz deleted file mode 100644 index 0503fa7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/waiters-2.json b/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/waiters-2.json deleted file mode 100644 index 138579f..0000000 --- a/venv/Lib/site-packages/botocore/data/mediapackagev2/2022-12-25/waiters-2.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "HarvestJobFinished" : { - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetHarvestJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Status", - "state" : "success", - "expected" : "COMPLETED" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "success", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "retry", - "expected" : "QUEUED" - }, { - "matcher" : "path", - "argument" : "Status", - "state" : "retry", - "expected" : "IN_PROGRESS" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 594949c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/examples-1.json b/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/paginators-1.json deleted file mode 100644 index 7b1c0f7..0000000 --- a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListItems": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/service-2.json.gz deleted file mode 100644 index 66bedeb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediastore-data/2017-09-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index e03b97c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/examples-1.json b/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/paginators-1.json deleted file mode 100644 index ed3ece0..0000000 --- a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListContainers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Containers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/service-2.json.gz deleted file mode 100644 index 0c1fe0e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediastore/2017-09-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index f4743ca..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/paginators-1.json deleted file mode 100644 index fe39ff8..0000000 --- a/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "ListPlaybackConfigurations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "GetChannelSchedule": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "ListChannels": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "ListSourceLocations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "ListVodSources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "ListAlerts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "ListPrefetchSchedules": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - }, - "ListLiveSources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/service-2.json.gz deleted file mode 100644 index 63d0ad4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mediatailor/2018-04-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index b2576fa..0000000 Binary files a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/paginators-1.json deleted file mode 100644 index 807d052..0000000 --- a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListDICOMImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "jobSummaries" - }, - "ListDatastores": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "datastoreSummaries" - }, - "ListImageSetVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageSetPropertiesList" - }, - "SearchImageSets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "imageSetsMetadataSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/paginators-1.sdk-extras.json deleted file mode 100644 index 383da17..0000000 --- a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "SearchImageSets": { - "non_aggregate_keys": [ - "sort" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/service-2.json.gz deleted file mode 100644 index e2bd1f1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/waiters-2.json b/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/medical-imaging/2023-07-19/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8af9645..0000000 Binary files a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/paginators-1.json deleted file mode 100644 index abafc00..0000000 --- a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/paginators-1.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "pagination": { - "DescribeACLs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ACLs" - }, - "DescribeClusters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Clusters" - }, - "DescribeEngineVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EngineVersions" - }, - "DescribeEvents": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Events" - }, - "DescribeParameterGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ParameterGroups" - }, - "DescribeParameters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Parameters" - }, - "DescribeReservedNodes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ReservedNodes" - }, - "DescribeReservedNodesOfferings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ReservedNodesOfferings" - }, - "DescribeServiceUpdates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ServiceUpdates" - }, - "DescribeSnapshots": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Snapshots" - }, - "DescribeSubnetGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SubnetGroups" - }, - "DescribeUsers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Users" - }, - "DescribeMultiRegionClusters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MultiRegionClusters" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/service-2.json.gz deleted file mode 100644 index 9171621..0000000 Binary files a/venv/Lib/site-packages/botocore/data/memorydb/2021-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9bd8cdf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/examples-1.json b/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/service-2.json.gz deleted file mode 100644 index cb139ff..0000000 Binary files a/venv/Lib/site-packages/botocore/data/meteringmarketplace/2016-01-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index aed6dcb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/examples-1.json b/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/paginators-1.json deleted file mode 100644 index db029bf..0000000 --- a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListCreatedArtifacts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CreatedArtifactList" - }, - "ListDiscoveredResources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DiscoveredResourceList" - }, - "ListMigrationTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MigrationTaskSummaryList" - }, - "ListProgressUpdateStreams": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ProgressUpdateStreamSummaryList" - }, - "ListApplicationStates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ApplicationStateList" - }, - "ListMigrationTaskUpdates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "MigrationTaskUpdateList" - }, - "ListSourceResources": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SourceResourceList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/service-2.json.gz deleted file mode 100644 index e4a85a8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mgh/2017-05-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 58f6688..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/examples-1.json b/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/paginators-1.json deleted file mode 100644 index 6cdc06a..0000000 --- a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "DescribeJobLogItems": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeReplicationConfigurationTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeSourceServers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeVcenterClients": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "DescribeLaunchConfigurationTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListApplications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSourceServerActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListTemplateActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListWaves": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListExportErrors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListExports": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListImportErrors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListImports": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListManagedAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListConnectors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/service-2.json.gz deleted file mode 100644 index f4d4aa4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/mgn/2020-02-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3f65149..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/examples-1.json b/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/paginators-1.json deleted file mode 100644 index 79ae0ff..0000000 --- a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ApplicationSummaryList" - }, - "ListEnvironmentVpcs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EnvironmentVpcList" - }, - "ListEnvironments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EnvironmentSummaryList" - }, - "ListRoutes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RouteSummaryList" - }, - "ListServices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServiceSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/service-2.json.gz deleted file mode 100644 index bd6035c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migration-hub-refactor-spaces/2021-10-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 282a464..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/examples-1.json b/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/service-2.json.gz deleted file mode 100644 index 5119a57..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migrationhub-config/2019-06-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index b1b4ee7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/paginators-1.json deleted file mode 100644 index 4c45222..0000000 --- a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListPlugins": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "plugins" - }, - "ListTemplateStepGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templateStepGroupSummary" - }, - "ListTemplateSteps": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templateStepSummaryList" - }, - "ListTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templateSummary" - }, - "ListWorkflowStepGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowStepGroupsSummary" - }, - "ListWorkflowSteps": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowStepsSummary" - }, - "ListWorkflows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "migrationWorkflowSummary" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/service-2.json.gz deleted file mode 100644 index ef0b6a6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/waiters-2.json b/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/migrationhuborchestrator/2021-08-28/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 496fe68..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/examples-1.json b/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/paginators-1.json deleted file mode 100644 index 889a45e..0000000 --- a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "GetServerDetails": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "associatedApplications" - }, - "ListApplicationComponents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "applicationComponentInfos" - }, - "ListCollectors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "Collectors" - }, - "ListImportFileTask": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "taskInfos" - }, - "ListServers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "serverInfos" - }, - "ListAnalyzableServers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "analyzableServers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/paginators-1.sdk-extras.json deleted file mode 100644 index 2524463..0000000 --- a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetServerDetails": { - "non_aggregate_keys": [ - "serverDetail" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/service-2.json.gz deleted file mode 100644 index b6e97c1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/migrationhubstrategy/2020-02-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index dc8da06..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/paginators-1.json deleted file mode 100644 index f3f8b1b..0000000 --- a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListApprovalTeams": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ApprovalTeams" - }, - "ListIdentitySources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "IdentitySources" - }, - "ListPolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Policies" - }, - "ListPolicyVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PolicyVersions" - }, - "ListResourcePolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourcePolicies" - }, - "ListSessions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Sessions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/service-2.json.gz deleted file mode 100644 index 47aaa67..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/mpa/2022-07-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mq/2017-11-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mq/2017-11-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 73c22ae..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mq/2017-11-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mq/2017-11-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/mq/2017-11-27/paginators-1.json deleted file mode 100644 index 5516073..0000000 --- a/venv/Lib/site-packages/botocore/data/mq/2017-11-27/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListBrokers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "BrokerSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mq/2017-11-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mq/2017-11-27/service-2.json.gz deleted file mode 100644 index 81d78b6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mq/2017-11-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index e982bdc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/examples-1.json b/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/paginators-1.json deleted file mode 100644 index ea50cac..0000000 --- a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "ListAssignmentsForHIT": { - "result_key": "Assignments", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListQualificationTypes": { - "result_key": "QualificationTypes", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListHITs": { - "result_key": "HITs", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListWorkerBlocks": { - "result_key": "WorkerBlocks", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListReviewableHITs": { - "result_key": "HITs", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListHITsForQualificationType": { - "result_key": "HITs", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListQualificationRequests": { - "result_key": "QualificationRequests", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListWorkersWithQualificationType": { - "result_key": "Qualifications", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListBonusPayments": { - "result_key": "BonusPayments", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/service-2.json.gz deleted file mode 100644 index 9ac0d9c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mturk/2017-01-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index c601c9d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/paginators-1.json deleted file mode 100644 index 27dae07..0000000 --- a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListTaskInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TaskInstances" - }, - "ListWorkflowRuns": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "WorkflowRuns" - }, - "ListWorkflowVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "WorkflowVersions" - }, - "ListWorkflows": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Workflows" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/service-2.json.gz deleted file mode 100644 index 50cd72e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/mwaa-serverless/2024-07-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index f8908c8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/examples-1.json b/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/paginators-1.json deleted file mode 100644 index 5e218e4..0000000 --- a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListEnvironments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Environments" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/service-2.json.gz deleted file mode 100644 index 28ad8cf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/mwaa/2020-07-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index e540d12..0000000 Binary files a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/paginators-1.json deleted file mode 100644 index 7ce2e6d..0000000 --- a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListGraphSnapshots": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "graphSnapshots" - }, - "ListGraphs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "graphs" - }, - "ListImportTasks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tasks" - }, - "ListPrivateGraphEndpoints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "privateGraphEndpoints" - }, - "ListExportTasks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/service-2.json.gz deleted file mode 100644 index c8a26de..0000000 Binary files a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/waiters-2.json b/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/waiters-2.json deleted file mode 100644 index b3b59fb..0000000 --- a/venv/Lib/site-packages/botocore/data/neptune-graph/2023-11-29/waiters-2.json +++ /dev/null @@ -1,229 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ExportTaskCancelled" : { - "description" : "Wait until Export Task is Cancelled", - "delay" : 60, - "maxAttempts" : 60, - "operation" : "GetExportTask", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status != 'CANCELLING' && status != 'CANCELLED'", - "state" : "failure", - "expected" : true - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "CANCELLED" - } ] - }, - "ExportTaskSuccessful" : { - "description" : "Wait until Export Task is Successful", - "delay" : 60, - "maxAttempts" : 480, - "operation" : "GetExportTask", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "SUCCEEDED" - } ] - }, - "GraphAvailable" : { - "description" : "Wait until Graph is Available", - "delay" : 60, - "maxAttempts" : 480, - "operation" : "GetGraph", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "AVAILABLE" - } ] - }, - "GraphDeleted" : { - "description" : "Wait until Graph is Deleted", - "delay" : 60, - "maxAttempts" : 60, - "operation" : "GetGraph", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status != 'DELETING'", - "state" : "failure", - "expected" : true - }, { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - } ] - }, - "GraphSnapshotAvailable" : { - "description" : "Wait until GraphSnapshot is Available", - "delay" : 60, - "maxAttempts" : 120, - "operation" : "GetGraphSnapshot", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "AVAILABLE" - } ] - }, - "GraphSnapshotDeleted" : { - "description" : "Wait until GraphSnapshot is Deleted", - "delay" : 60, - "maxAttempts" : 60, - "operation" : "GetGraphSnapshot", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status != 'DELETING'", - "state" : "failure", - "expected" : true - }, { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - } ] - }, - "GraphStopped" : { - "description" : "Wait until Graph is Stopped", - "delay" : 20, - "maxAttempts" : 90, - "operation" : "GetGraph", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "STOPPED" - }, { - "matcher" : "path", - "argument" : "status != 'STOPPING'", - "state" : "failure", - "expected" : true - } ] - }, - "ImportTaskCancelled" : { - "description" : "Wait until Import Task is Cancelled", - "delay" : 60, - "maxAttempts" : 60, - "operation" : "GetImportTask", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status != 'CANCELLING' && status != 'CANCELLED'", - "state" : "failure", - "expected" : true - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "CANCELLED" - } ] - }, - "ImportTaskSuccessful" : { - "description" : "Wait until Import Task is Successful", - "delay" : 60, - "maxAttempts" : 480, - "operation" : "GetImportTask", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "ROLLING_BACK" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "SUCCEEDED" - } ] - }, - "PrivateGraphEndpointAvailable" : { - "description" : "Wait until PrivateGraphEndpoint is Available", - "delay" : 10, - "maxAttempts" : 180, - "operation" : "GetPrivateGraphEndpoint", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "AVAILABLE" - } ] - }, - "PrivateGraphEndpointDeleted" : { - "description" : "Wait until PrivateGraphEndpoint is Deleted", - "delay" : 10, - "maxAttempts" : 180, - "operation" : "GetPrivateGraphEndpoint", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status != 'DELETING'", - "state" : "failure", - "expected" : true - }, { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index cfb467b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/examples-1.json b/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/paginators-1.json deleted file mode 100644 index 1de1303..0000000 --- a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "DescribeDBEngineVersions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBEngineVersions" - }, - "DescribeDBInstances": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBInstances" - }, - "DescribeDBParameterGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBParameterGroups" - }, - "DescribeDBParameters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Parameters" - }, - "DescribeDBSubnetGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBSubnetGroups" - }, - "DescribeEngineDefaultParameters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "EngineDefaults.Marker", - "result_key": "EngineDefaults.Parameters" - }, - "DescribeEventSubscriptions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "EventSubscriptionsList" - }, - "DescribeEvents": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Events" - }, - "DescribeOrderableDBInstanceOptions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "OrderableDBInstanceOptions" - }, - "DescribeDBClusterParameterGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusterParameterGroups" - }, - "DescribeDBClusterParameters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Parameters" - }, - "DescribeDBClusterSnapshots": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusterSnapshots" - }, - "DescribeDBClusters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusters" - }, - "DescribePendingMaintenanceActions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "PendingMaintenanceActions" - }, - "DescribeDBClusterEndpoints": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusterEndpoints" - }, - "DescribeGlobalClusters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "GlobalClusters" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/service-2.json.gz deleted file mode 100644 index 26a9325..0000000 Binary files a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/service-2.sdk-extras.json b/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/service-2.sdk-extras.json deleted file mode 100644 index 85e8a10..0000000 --- a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/service-2.sdk-extras.json +++ /dev/null @@ -1,23 +0,0 @@ - { - "version": 1.0, - "merge": { - "shapes": { - "CopyDBClusterSnapshotMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the snapshot to be copied.

    " - } - } - }, - "CreateDBClusterMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the source for the db cluster.

    " - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/waiters-2.json deleted file mode 100644 index e75f03b..0000000 --- a/venv/Lib/site-packages/botocore/data/neptune/2014-10-31/waiters-2.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "version": 2, - "waiters": { - "DBInstanceAvailable": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - }, - "DBInstanceDeleted": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "DBInstanceNotFound", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "resetting-master-credentials", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6c5a0b6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/service-2.json.gz deleted file mode 100644 index 76f47be..0000000 Binary files a/venv/Lib/site-packages/botocore/data/neptunedata/2023-08-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index 331a62c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/examples-1.json b/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/paginators-1.json deleted file mode 100644 index 66d35f2..0000000 --- a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/paginators-1.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "pagination": { - "ListFirewallPolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FirewallPolicies" - }, - "ListFirewalls": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Firewalls" - }, - "ListRuleGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RuleGroups" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - }, - "ListTLSInspectionConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TLSInspectionConfigurations" - }, - "GetAnalysisReportResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AnalysisReportResults" - }, - "ListAnalysisReports": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AnalysisReports" - }, - "ListFlowOperationResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Flows" - }, - "ListFlowOperations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FlowOperations" - }, - "ListVpcEndpointAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VpcEndpointAssociations" - }, - "ListProxies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Proxies" - }, - "ListProxyConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProxyConfigurations" - }, - "ListProxyRuleGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProxyRuleGroups" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/paginators-1.sdk-extras.json deleted file mode 100644 index a0bc4ff..0000000 --- a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/paginators-1.sdk-extras.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetAnalysisReportResults": { - "non_aggregate_keys": [ - "EndTime", - "ReportTime", - "Status", - "AnalysisType", - "StartTime" - ] - }, - "ListFlowOperationResults": { - "non_aggregate_keys": [ - "FirewallArn", - "FlowOperationId", - "AvailabilityZone", - "FlowOperationStatus", - "FlowRequestTimestamp", - "StatusMessage", - "VpcEndpointAssociationArn", - "VpcEndpointId" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/service-2.json.gz deleted file mode 100644 index e361209..0000000 Binary files a/venv/Lib/site-packages/botocore/data/network-firewall/2020-11-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index e5d09cc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/paginators-1.json deleted file mode 100644 index b41617d..0000000 --- a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "GetQueryResultsMonitorTopContributors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "topContributors" - }, - "GetQueryResultsWorkloadInsightsTopContributors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "topContributors" - }, - "GetQueryResultsWorkloadInsightsTopContributorsData": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "datapoints" - }, - "ListMonitors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "monitors" - }, - "ListScopes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "scopes" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/paginators-1.sdk-extras.json deleted file mode 100644 index 8fe3188..0000000 --- a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/paginators-1.sdk-extras.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetQueryResultsMonitorTopContributors": { - "non_aggregate_keys": [ - "unit" - ] - }, - "GetQueryResultsWorkloadInsightsTopContributorsData": { - "non_aggregate_keys": [ - "unit" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/service-2.json.gz deleted file mode 100644 index e06e834..0000000 Binary files a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/waiters-2.json b/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/networkflowmonitor/2023-04-19/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index 996579f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/examples-1.json b/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/paginators-1.json b/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/paginators-1.json deleted file mode 100644 index b93e201..0000000 --- a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/paginators-1.json +++ /dev/null @@ -1,148 +0,0 @@ -{ - "pagination": { - "DescribeGlobalNetworks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GlobalNetworks" - }, - "GetCustomerGatewayAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CustomerGatewayAssociations" - }, - "GetDevices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Devices" - }, - "GetLinkAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "LinkAssociations" - }, - "GetLinks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Links" - }, - "GetSites": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Sites" - }, - "GetTransitGatewayRegistrations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TransitGatewayRegistrations" - }, - "GetConnections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Connections" - }, - "GetTransitGatewayConnectPeerAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TransitGatewayConnectPeerAssociations" - }, - "GetNetworkResourceCounts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "NetworkResourceCounts" - }, - "GetNetworkResourceRelationships": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Relationships" - }, - "GetNetworkResources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "NetworkResources" - }, - "GetNetworkTelemetry": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "NetworkTelemetry" - }, - "GetConnectPeerAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConnectPeerAssociations" - }, - "GetCoreNetworkChangeSet": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CoreNetworkChanges" - }, - "ListAttachments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Attachments" - }, - "ListConnectPeers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConnectPeers" - }, - "ListCoreNetworkPolicyVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CoreNetworkPolicyVersions" - }, - "ListCoreNetworks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CoreNetworks" - }, - "GetCoreNetworkChangeEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CoreNetworkChangeEvents" - }, - "ListPeerings": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Peerings" - }, - "ListAttachmentRoutingPolicyAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AttachmentRoutingPolicyAssociations" - }, - "ListCoreNetworkPrefixListAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PrefixListAssociations" - }, - "ListCoreNetworkRoutingInformation": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CoreNetworkRoutingInformation" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/service-2.json.gz deleted file mode 100644 index 6c1bc4b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/networkmanager/2019-07-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 60d5634..0000000 Binary files a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/paginators-1.json deleted file mode 100644 index d885a0f..0000000 --- a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListMonitors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "monitors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/service-2.json.gz deleted file mode 100644 index 3325d45..0000000 Binary files a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/networkmonitor/2023-08-01/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 628c7f7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/paginators-1.json deleted file mode 100644 index b167aaa..0000000 --- a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "ListChannels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "channels" - }, - "ListEventRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "eventRules" - }, - "ListNotificationConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "notificationConfigurations" - }, - "ListNotificationEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "notificationEvents" - }, - "ListNotificationHubs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "notificationHubs" - }, - "ListManagedNotificationChannelAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "channelAssociations" - }, - "ListManagedNotificationChildEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "managedNotificationChildEvents" - }, - "ListManagedNotificationConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "managedNotificationConfigurations" - }, - "ListManagedNotificationEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "managedNotificationEvents" - }, - "ListMemberAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "memberAccounts" - }, - "ListOrganizationalUnits": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "organizationalUnits" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/service-2.json.gz deleted file mode 100644 index 8497acd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/notifications/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 329a3e4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/paginators-1.json deleted file mode 100644 index 39f96a3..0000000 --- a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListEmailContacts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "emailContacts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/service-2.json.gz deleted file mode 100644 index c73d26c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/notificationscontacts/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index 543c0a2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/paginators-1.json deleted file mode 100644 index d50ded2..0000000 --- a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListActs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "actSummaries" - }, - "ListSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessionSummaries" - }, - "ListWorkflowDefinitions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowDefinitionSummaries" - }, - "ListWorkflowRuns": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workflowRunSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/service-2.json.gz deleted file mode 100644 index 2414cbe..0000000 Binary files a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/waiters-2.json b/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/nova-act/2025-08-22/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/oam/2022-06-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/oam/2022-06-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index bfa15ca..0000000 Binary files a/venv/Lib/site-packages/botocore/data/oam/2022-06-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/oam/2022-06-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/oam/2022-06-10/paginators-1.json deleted file mode 100644 index 3595f00..0000000 --- a/venv/Lib/site-packages/botocore/data/oam/2022-06-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListAttachedLinks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListLinks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - }, - "ListSinks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/oam/2022-06-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/oam/2022-06-10/service-2.json.gz deleted file mode 100644 index 7fb46ee..0000000 Binary files a/venv/Lib/site-packages/botocore/data/oam/2022-06-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index bbe09dd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/paginators-1.json deleted file mode 100644 index 6fe945f..0000000 --- a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListResourceTelemetry": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TelemetryConfigurations" - }, - "ListResourceTelemetryForOrganization": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TelemetryConfigurations" - }, - "ListTelemetryRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TelemetryRuleSummaries" - }, - "ListTelemetryRulesForOrganization": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TelemetryRuleSummaries" - }, - "ListCentralizationRulesForOrganization": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CentralizationRuleSummaries" - }, - "ListS3TableIntegrations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "IntegrationSummaries" - }, - "ListTelemetryPipelines": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/service-2.json.gz deleted file mode 100644 index 1532818..0000000 Binary files a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/observabilityadmin/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/odb/2024-08-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index 865feff..0000000 Binary files a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/odb/2024-08-20/paginators-1.json deleted file mode 100644 index 6bda94b..0000000 --- a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "ListAutonomousVirtualMachines": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "autonomousVirtualMachines" - }, - "ListCloudAutonomousVmClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "cloudAutonomousVmClusters" - }, - "ListCloudExadataInfrastructures": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "cloudExadataInfrastructures" - }, - "ListCloudVmClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "cloudVmClusters" - }, - "ListDbNodes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dbNodes" - }, - "ListDbServers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dbServers" - }, - "ListDbSystemShapes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dbSystemShapes" - }, - "ListGiVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "giVersions" - }, - "ListOdbNetworks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "odbNetworks" - }, - "ListOdbPeeringConnections": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "odbPeeringConnections" - }, - "ListSystemVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "systemVersions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/odb/2024-08-20/service-2.json.gz deleted file mode 100644 index e537306..0000000 Binary files a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/waiters-2.json b/venv/Lib/site-packages/botocore/data/odb/2024-08-20/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/odb/2024-08-20/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/omics/2022-11-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index ae16cf7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/omics/2022-11-28/paginators-1.json deleted file mode 100644 index 853a523..0000000 --- a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/paginators-1.json +++ /dev/null @@ -1,136 +0,0 @@ -{ - "pagination": { - "ListAnnotationImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "annotationImportJobs" - }, - "ListAnnotationStores": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "annotationStores" - }, - "ListReadSetActivationJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "activationJobs" - }, - "ListReadSetExportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "exportJobs" - }, - "ListReadSetImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "importJobs" - }, - "ListReadSets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "readSets" - }, - "ListReferenceImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "importJobs" - }, - "ListReferenceStores": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "referenceStores" - }, - "ListReferences": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "references" - }, - "ListRunGroups": { - "input_token": "startingToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListRunTasks": { - "input_token": "startingToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListRuns": { - "input_token": "startingToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListSequenceStores": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sequenceStores" - }, - "ListVariantImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "variantImportJobs" - }, - "ListVariantStores": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "variantStores" - }, - "ListWorkflows": { - "input_token": "startingToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListMultipartReadSetUploads": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "uploads" - }, - "ListReadSetUploadParts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "parts" - }, - "ListAnnotationStoreVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "annotationStoreVersions" - }, - "ListShares": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "shares" - }, - "ListRunCaches": { - "input_token": "startingToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListWorkflowVersions": { - "input_token": "startingToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/omics/2022-11-28/service-2.json.gz deleted file mode 100644 index e8a03f5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/waiters-2.json b/venv/Lib/site-packages/botocore/data/omics/2022-11-28/waiters-2.json deleted file mode 100644 index c11adf7..0000000 --- a/venv/Lib/site-packages/botocore/data/omics/2022-11-28/waiters-2.json +++ /dev/null @@ -1,573 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "AnnotationImportJobCreated" : { - "description" : "Wait until an annotation import is completed", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetAnnotationImportJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "SUBMITTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "IN_PROGRESS" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - } ] - }, - "AnnotationStoreCreated" : { - "description" : "Wait until an annotation store is created", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetAnnotationStore", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "AnnotationStoreDeleted" : { - "description" : "Wait until an annotation store is deleted.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetAnnotationStore", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "DELETED" - }, { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "DELETING" - } ] - }, - "AnnotationStoreVersionCreated" : { - "description" : "Wait until an annotation store version is created", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetAnnotationStoreVersion", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "AnnotationStoreVersionDeleted" : { - "description" : "Wait until an annotation store version is deleted.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetAnnotationStoreVersion", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "DELETED" - }, { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "DELETING" - } ] - }, - "ReadSetActivationJobCompleted" : { - "description" : "Wait until a job is completed.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetReadSetActivationJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "SUBMITTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "IN_PROGRESS" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CANCELLING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "COMPLETED_WITH_FAILURES" - } ] - }, - "ReadSetExportJobCompleted" : { - "description" : "Wait until a job is completed.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetReadSetExportJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "SUBMITTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "IN_PROGRESS" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CANCELLING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "COMPLETED_WITH_FAILURES" - } ] - }, - "ReadSetImportJobCompleted" : { - "description" : "Wait until a job is completed.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetReadSetImportJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "SUBMITTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "IN_PROGRESS" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CANCELLING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "COMPLETED_WITH_FAILURES" - } ] - }, - "ReferenceImportJobCompleted" : { - "description" : "Wait until a job is completed.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetReferenceImportJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "SUBMITTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "IN_PROGRESS" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CANCELLING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "COMPLETED_WITH_FAILURES" - } ] - }, - "RunCompleted" : { - "description" : "Wait until a run is completed.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetRun", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "PENDING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "STARTING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "RUNNING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "STOPPING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "RunRunning" : { - "description" : "Wait until a run is running.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetRun", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "RUNNING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "PENDING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "STARTING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - } ] - }, - "TaskCompleted" : { - "description" : "Wait until a task is completed.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetRunTask", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "PENDING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "STARTING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "RUNNING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "STOPPING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "TaskRunning" : { - "description" : "Wait until a task is running.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetRunTask", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "RUNNING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "PENDING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "STARTING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CANCELLED" - } ] - }, - "VariantImportJobCreated" : { - "description" : "Wait until variant import is completed", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetVariantImportJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "SUBMITTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "IN_PROGRESS" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "CANCELLED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "COMPLETED" - } ] - }, - "VariantStoreCreated" : { - "description" : "Wait until a variant store is created", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetVariantStore", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "VariantStoreDeleted" : { - "description" : "Wait until a variant store is deleted.", - "delay" : 30, - "maxAttempts" : 20, - "operation" : "GetVariantStore", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "DELETED" - }, { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "DELETING" - } ] - }, - "WorkflowActive" : { - "description" : "Wait until a workflow is active.", - "delay" : 3, - "maxAttempts" : 10, - "operation" : "GetWorkflow", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "WorkflowVersionActive" : { - "description" : "Wait until a workflow version is active.", - "delay" : 3, - "maxAttempts" : 10, - "operation" : "GetWorkflowVersion", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9fe5115..0000000 Binary files a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/examples-1.json b/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/paginators-1.json deleted file mode 100644 index 1f134cc..0000000 --- a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "ApplicationSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/service-2.json.gz deleted file mode 100644 index 56f9980..0000000 Binary files a/venv/Lib/site-packages/botocore/data/opensearch/2021-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8c2bad9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/service-2.json.gz deleted file mode 100644 index 03d6cf0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/opensearchserverless/2021-11-01/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5f28a62..0000000 Binary files a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/examples-1.json b/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/examples-1.json deleted file mode 100644 index 8e39290..0000000 --- a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/examples-1.json +++ /dev/null @@ -1,1409 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AcceptHandshake": [ - { - "input": { - "HandshakeId": "h-examplehandshakeid111" - }, - "output": { - "Handshake": { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "20170228T1215Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - }, - { - "Id": "juan@example.com", - "Type": "EMAIL" - } - ], - "RequestedTimestamp": "20170214T1215Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@amazon.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Org Master Account" - }, - { - "Type": "ORGANIZATION_FEATURE_SET", - "Value": "ALL" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "ACCOUNT", - "Value": "222222222222" - } - ], - "State": "ACCEPTED" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Bill is the owner of an organization, and he invites Juan's account (222222222222) to join his organization. The following example shows Juan's account accepting the handshake and thus agreeing to the invitation.", - "id": "to-accept-a-handshake-from-another-account-1472500561150", - "title": "To accept a handshake from another account" - } - ], - "AttachPolicy": [ - { - "input": { - "PolicyId": "p-examplepolicyid111", - "TargetId": "ou-examplerootid111-exampleouid111" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to attach a service control policy (SCP) to an OU:\n", - "id": "to-attach-a-policy-to-an-ou", - "title": "To attach a policy to an OU" - }, - { - "input": { - "PolicyId": "p-examplepolicyid111", - "TargetId": "333333333333" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to attach a service control policy (SCP) to an account:\n", - "id": "to-attach-a-policy-to-an-account", - "title": "To attach a policy to an account" - } - ], - "CancelHandshake": [ - { - "input": { - "HandshakeId": "h-examplehandshakeid111" - }, - "output": { - "Handshake": { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "20170228T1215Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - }, - { - "Id": "susan@example.com", - "Type": "EMAIL" - } - ], - "RequestedTimestamp": "20170214T1215Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@example.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Master Account" - }, - { - "Type": "ORGANIZATION_FEATURE_SET", - "Value": "CONSOLIDATED_BILLING" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "ACCOUNT", - "Value": "222222222222" - }, - { - "Type": "NOTES", - "Value": "This is a request for Susan's account to join Bob's organization." - } - ], - "State": "CANCELED" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Bill previously sent an invitation to Susan's account to join his organization. He changes his mind and decides to cancel the invitation before Susan accepts it. The following example shows Bill's cancellation:\n", - "id": "to-cancel-a-handshake-sent-to-a-member-account-1472501320506", - "title": "To cancel a handshake sent to a member account" - } - ], - "CreateAccount": [ - { - "input": { - "AccountName": "Production Account", - "Email": "susan@example.com" - }, - "output": { - "CreateAccountStatus": { - "Id": "car-examplecreateaccountrequestid111", - "State": "IN_PROGRESS" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The owner of an organization creates a member account in the organization. The following example shows that when the organization owner creates the member account, the account is preconfigured with the name \"Production Account\" and an owner email address of susan@example.com. An IAM role is automatically created using the default name because the roleName parameter is not used. AWS Organizations sends Susan a \"Welcome to AWS\" email:\n\n", - "id": "to-create-a-new-account-that-is-automatically-part-of-the-organization-1472501463507", - "title": "To create a new account that is automatically part of the organization" - } - ], - "CreateOrganization": [ - { - "input": { - }, - "output": { - "Organization": { - "Arn": "arn:aws:organizations::111111111111:organization/o-exampleorgid", - "AvailablePolicyTypes": [ - { - "Status": "ENABLED", - "Type": "SERVICE_CONTROL_POLICY" - } - ], - "FeatureSet": "ALL", - "Id": "o-exampleorgid", - "MasterAccountArn": "arn:aws:organizations::111111111111:account/o-exampleorgid/111111111111", - "MasterAccountEmail": "bill@example.com", - "MasterAccountId": "111111111111" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Bill wants to create an organization using credentials from account 111111111111. The following example shows that the account becomes the master account in the new organization. Because he does not specify a feature set, the new organization defaults to all features enabled and service control policies enabled on the root:\n\n", - "id": "to-create-a-new-organization-with-all-features enabled", - "title": "To create a new organization with all features enabled" - }, - { - "input": { - "FeatureSet": "CONSOLIDATED_BILLING" - }, - "output": { - "Organization": { - "Arn": "arn:aws:organizations::111111111111:organization/o-exampleorgid", - "AvailablePolicyTypes": [ - - ], - "FeatureSet": "CONSOLIDATED_BILLING", - "Id": "o-exampleorgid", - "MasterAccountArn": "arn:aws:organizations::111111111111:account/o-exampleorgid/111111111111", - "MasterAccountEmail": "bill@example.com", - "MasterAccountId": "111111111111" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "In the following example, Bill creates an organization using credentials from account 111111111111, and configures the organization to support only the consolidated billing feature set:\n\n", - "id": "to-create-a-new-organization-with-consolidated-billing-features-only", - "title": "To create a new organization with consolidated billing features only" - } - ], - "CreateOrganizationalUnit": [ - { - "input": { - "Name": "AccountingOU", - "ParentId": "r-examplerootid111" - }, - "output": { - "OrganizationalUnit": { - "Arn": "arn:aws:organizations::111111111111:ou/o-exampleorgid/ou-examplerootid111-exampleouid111", - "Id": "ou-examplerootid111-exampleouid111", - "Name": "AccountingOU" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to create an OU that is named AccountingOU. The new OU is directly under the root.:\n\n", - "id": "to-create-a-new-organizational-unit", - "title": "To create a new organization unit" - } - ], - "CreatePolicy": [ - { - "input": { - "Content": "{\\\"Version\\\":\\\"2012-10-17\\\",\\\"Statement\\\":{\\\"Effect\\\":\\\"Allow\\\",\\\"Action\\\":\\\"s3:*\\\"}}", - "Description": "Enables admins of attached accounts to delegate all S3 permissions", - "Name": "AllowAllS3Actions", - "Type": "SERVICE_CONTROL_POLICY" - }, - "output": { - "Policy": { - "Content": "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\"}}", - "PolicySummary": { - "Arn": "arn:aws:organizations::111111111111:policy/o-exampleorgid/service_control_policy/p-examplepolicyid111", - "Description": "Allows delegation of all S3 actions", - "Name": "AllowAllS3Actions", - "Type": "SERVICE_CONTROL_POLICY" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to create a service control policy (SCP) that is named AllowAllS3Actions. The JSON string in the content parameter specifies the content in the policy. The parameter string is escaped with backslashes to ensure that the embedded double quotes in the JSON policy are treated as literals in the parameter, which itself is surrounded by double quotes:\n\n", - "id": "to-create-a-service-control-policy", - "title": "To create a service control policy" - } - ], - "DeclineHandshake": [ - { - "input": { - "HandshakeId": "h-examplehandshakeid111" - }, - "output": { - "Handshake": { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "2016-12-15T19:27:58Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "222222222222", - "Type": "ACCOUNT" - }, - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - } - ], - "RequestedTimestamp": "2016-11-30T19:27:58Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@example.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Master Account" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "ACCOUNT", - "Value": "222222222222" - }, - { - "Type": "NOTES", - "Value": "This is an invitation to Susan's account to join the Bill's organization." - } - ], - "State": "DECLINED" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows Susan declining an invitation to join Bill's organization. The DeclineHandshake operation returns a handshake object, showing that the state is now DECLINED:", - "id": "to-decline-a-handshake-sent-from-the-master-account-1472502666967", - "title": "To decline a handshake sent from the master account" - } - ], - "DeleteOrganizationalUnit": [ - { - "input": { - "OrganizationalUnitId": "ou-examplerootid111-exampleouid111" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to delete an OU. The example assumes that you previously removed all accounts and other OUs from the OU:\n\n", - "id": "to-delete-an-organizational-unit", - "title": "To delete an organization unit" - } - ], - "DeletePolicy": [ - { - "input": { - "PolicyId": "p-examplepolicyid111" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to delete a policy from an organization. The example assumes that you previously detached the policy from all entities:\n\n", - "id": "to-delete-a-policy", - "title": "To delete a policy" - } - ], - "DescribeAccount": [ - { - "input": { - "AccountId": "555555555555" - }, - "output": { - "Account": { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/555555555555", - "Email": "anika@example.com", - "Id": "555555555555", - "Name": "Beta Account" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows a user in the master account (111111111111) asking for details about account 555555555555:", - "id": "to-get-the-details-about-an-account-1472503166868", - "title": "To get the details about an account" - } - ], - "DescribeCreateAccountStatus": [ - { - "input": { - "CreateAccountRequestId": "car-exampleaccountcreationrequestid" - }, - "output": { - "CreateAccountStatus": { - "AccountId": "333333333333", - "Id": "car-exampleaccountcreationrequestid", - "State": "SUCCEEDED" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to request the status about a previous request to create an account in an organization. This operation can be called only by a principal from the organization's master account. In the example, the specified \"createAccountRequestId\" comes from the response of the original call to \"CreateAccount\":", - "id": "to-get-information-about-a-request-to-create-an-account-1472503727223", - "title": "To get information about a request to create an account" - } - ], - "DescribeHandshake": [ - { - "input": { - "HandshakeId": "h-examplehandshakeid111" - }, - "output": { - "Handshake": { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "2016-11-30T17:24:58.046Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - }, - { - "Id": "333333333333", - "Type": "ACCOUNT" - } - ], - "RequestedTimestamp": "2016-11-30T17:24:58.046Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@example.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Master Account" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "ACCOUNT", - "Value": "333333333333" - } - ], - "State": "OPEN" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to request details about a handshake. The handshake ID comes either from the original call to \"InviteAccountToOrganization\", or from a call to \"ListHandshakesForAccount\" or \"ListHandshakesForOrganization\":", - "id": "to-get-information-about-a-handshake-1472503400505", - "title": "To get information about a handshake" - } - ], - "DescribeOrganization": [ - { - "output": { - "Organization": { - "Arn": "arn:aws:organizations::111111111111:organization/o-exampleorgid", - "AvailablePolicyTypes": [ - { - "Status": "ENABLED", - "Type": "SERVICE_CONTROL_POLICY" - } - ], - "FeatureSet": "ALL", - "Id": "o-exampleorgid", - "MasterAccountArn": "arn:aws:organizations::111111111111:account/o-exampleorgid/111111111111", - "MasterAccountEmail": "bill@example.com" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to request information about the current user's organization:/n/n", - "id": "to-get-information-about-an-organization-1472503400505", - "title": "To get information about an organization" - } - ], - "DescribeOrganizationalUnit": [ - { - "input": { - "OrganizationalUnitId": "ou-examplerootid111-exampleouid111" - }, - "output": { - "OrganizationalUnit": { - "Arn": "arn:aws:organizations::111111111111:ou/o-exampleorgid/ou-examplerootid111-exampleouid111", - "Id": "ou-examplerootid111-exampleouid111", - "Name": "Accounting Group" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to request details about an OU:/n/n", - "id": "to-get-information-about-an-organizational-unit", - "title": "To get information about an organizational unit" - } - ], - "DescribePolicy": [ - { - "input": { - "PolicyId": "p-examplepolicyid111" - }, - "output": { - "Policy": { - "Content": "{\\n \\\"Version\\\": \\\"2012-10-17\\\",\\n \\\"Statement\\\": [\\n {\\n \\\"Effect\\\": \\\"Allow\\\",\\n \\\"Action\\\": \\\"*\\\",\\n \\\"Resource\\\": \\\"*\\\"\\n }\\n ]\\n}", - "PolicySummary": { - "Arn": "arn:aws:organizations::111111111111:policy/o-exampleorgid/service_control_policy/p-examplepolicyid111", - "AwsManaged": false, - "Description": "Enables admins to delegate S3 permissions", - "Id": "p-examplepolicyid111", - "Name": "AllowAllS3Actions", - "Type": "SERVICE_CONTROL_POLICY" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to request information about a policy:/n/n", - "id": "to-get-information-about-a-policy", - "title": "To get information about a policy" - } - ], - "DetachPolicy": [ - { - "input": { - "PolicyId": "p-examplepolicyid111", - "TargetId": "ou-examplerootid111-exampleouid111" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to detach a policy from an OU:/n/n", - "id": "to-detach-a-policy-from-a-root-ou-or-account", - "title": "To detach a policy from a root, OU, or account" - } - ], - "DisablePolicyType": [ - { - "input": { - "PolicyType": "SERVICE_CONTROL_POLICY", - "RootId": "r-examplerootid111" - }, - "output": { - "Root": { - "Arn": "arn:aws:organizations::111111111111:root/o-exampleorgid/r-examplerootid111", - "Id": "r-examplerootid111", - "Name": "Root", - "PolicyTypes": [ - - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to disable the service control policy (SCP) policy type in a root. The response shows that the PolicyTypes response element no longer includes SERVICE_CONTROL_POLICY:/n/n", - "id": "to-disable-a-policy-type-in-a-root", - "title": "To disable a policy type in a root" - } - ], - "EnableAllFeatures": [ - { - "input": { - }, - "output": { - "Handshake": { - "Action": "ENABLE_ALL_FEATURES", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/enable_all_features/h-examplehandshakeid111", - "ExpirationTimestamp": "2017-02-28T09:35:40.05Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - } - ], - "RequestedTimestamp": "2017-02-13T09:35:40.05Z", - "Resources": [ - { - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - } - ], - "State": "REQUESTED" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows the administrator asking all the invited accounts in the organization to approve enabling all features in the organization. AWS Organizations sends an email to the address that is registered with every invited member account asking the owner to approve the change by accepting the handshake that is sent. After all invited member accounts accept the handshake, the organization administrator can finalize the change to enable all features, and those with appropriate permissions can create policies and apply them to roots, OUs, and accounts:/n/n", - "id": "to-enable-all-features-in-an-organization", - "title": "To enable all features in an organization" - } - ], - "EnablePolicyType": [ - { - "input": { - "PolicyType": "SERVICE_CONTROL_POLICY", - "RootId": "r-examplerootid111" - }, - "output": { - "Root": { - "Arn": "arn:aws:organizations::111111111111:root/o-exampleorgid/r-examplerootid111", - "Id": "r-examplerootid111", - "Name": "Root", - "PolicyTypes": [ - { - "Status": "ENABLED", - "Type": "SERVICE_CONTROL_POLICY" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to enable the service control policy (SCP) policy type in a root. The output shows a root object with a PolicyTypes response element showing that SCPs are now enabled:/n/n", - "id": "to-enable-a-policy-type-in-a-root", - "title": "To enable a policy type in a root" - } - ], - "InviteAccountToOrganization": [ - { - "input": { - "Notes": "This is a request for Juan's account to join Bill's organization", - "Target": { - "Id": "juan@example.com", - "Type": "EMAIL" - } - }, - "output": { - "Handshake": { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "2017-02-16T09:36:05.02Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - }, - { - "Id": "juan@example.com", - "Type": "EMAIL" - } - ], - "RequestedTimestamp": "2017-02-01T09:36:05.02Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@amazon.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Org Master Account" - }, - { - "Type": "ORGANIZATION_FEATURE_SET", - "Value": "FULL" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "EMAIL", - "Value": "juan@example.com" - } - ], - "State": "OPEN" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows the admin of the master account owned by bill@example.com inviting the account owned by juan@example.com to join an organization.", - "id": "to-invite-an-account-to-join-an-organization-1472508594110", - "title": "To invite an account to join an organization" - } - ], - "LeaveOrganization": [ - { - "comments": { - "input": { - }, - "output": { - } - }, - "description": "TThe following example shows how to remove your member account from an organization:", - "id": "to-leave-an-organization-as-a-member-account-1472508784736", - "title": "To leave an organization as a member account" - } - ], - "ListAccounts": [ - { - "input": { - }, - "output": { - "Accounts": [ - { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/111111111111", - "Email": "bill@example.com", - "Id": "111111111111", - "JoinedMethod": "INVITED", - "JoinedTimestamp": "20161215T193015Z", - "Name": "Master Account", - "Status": "ACTIVE" - }, - { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/222222222222", - "Email": "alice@example.com", - "Id": "222222222222", - "JoinedMethod": "INVITED", - "JoinedTimestamp": "20161215T210221Z", - "Name": "Developer Account", - "Status": "ACTIVE" - }, - { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/333333333333", - "Email": "juan@example.com", - "Id": "333333333333", - "JoinedMethod": "INVITED", - "JoinedTimestamp": "20161215T210347Z", - "Name": "Test Account", - "Status": "ACTIVE" - }, - { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/444444444444", - "Email": "anika@example.com", - "Id": "444444444444", - "JoinedMethod": "INVITED", - "JoinedTimestamp": "20161215T210332Z", - "Name": "Production Account", - "Status": "ACTIVE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to request a list of the accounts in an organization:", - "id": "to-retrieve-a-list-of-all-of-the-accounts-in-an-organization-1472509590974", - "title": "To retrieve a list of all of the accounts in an organization" - } - ], - "ListAccountsForParent": [ - { - "input": { - "ParentId": "ou-examplerootid111-exampleouid111" - }, - "output": { - "Accounts": [ - { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/333333333333", - "Email": "juan@example.com", - "Id": "333333333333", - "JoinedMethod": "INVITED", - "JoinedTimestamp": 1481835795.536, - "Name": "Development Account", - "Status": "ACTIVE" - }, - { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/444444444444", - "Email": "anika@example.com", - "Id": "444444444444", - "JoinedMethod": "INVITED", - "JoinedTimestamp": 1481835812.143, - "Name": "Test Account", - "Status": "ACTIVE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to request a list of the accounts in an OU:/n/n", - "id": "to-retrieve-a-list-of-all-of-the-accounts-in-a-root-or-ou-1472509590974", - "title": "To retrieve a list of all of the accounts in a root or OU" - } - ], - "ListChildren": [ - { - "input": { - "ChildType": "ORGANIZATIONAL_UNIT", - "ParentId": "ou-examplerootid111-exampleouid111" - }, - "output": { - "Children": [ - { - "Id": "ou-examplerootid111-exampleouid111", - "Type": "ORGANIZATIONAL_UNIT" - }, - { - "Id": "ou-examplerootid111-exampleouid222", - "Type": "ORGANIZATIONAL_UNIT" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to request a list of the child OUs in a parent root or OU:/n/n", - "id": "to-retrieve-a-list-of-all-of-the-child-accounts-and-OUs-in-a-parent-container", - "title": "To retrieve a list of all of the child accounts and OUs in a parent root or OU" - } - ], - "ListCreateAccountStatus": [ - { - "input": { - "States": [ - "SUCCEEDED" - ] - }, - "output": { - "CreateAccountStatuses": [ - { - "AccountId": "444444444444", - "AccountName": "Developer Test Account", - "CompletedTimestamp": "2017-01-15T13:45:23.6Z", - "Id": "car-exampleaccountcreationrequestid1", - "RequestedTimestamp": "2017-01-15T13:45:23.01Z", - "State": "SUCCEEDED" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows a user requesting a list of only the completed account creation requests made for the current organization:", - "id": "to-get-a-list-of-completed-account-creation-requests-made-in-the-organization", - "title": "To get a list of completed account creation requests made in the organization" - }, - { - "input": { - "States": [ - "IN_PROGRESS" - ] - }, - "output": { - "CreateAccountStatuses": [ - { - "AccountName": "Production Account", - "Id": "car-exampleaccountcreationrequestid2", - "RequestedTimestamp": "2017-01-15T13:45:23.01Z", - "State": "IN_PROGRESS" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows a user requesting a list of only the in-progress account creation requests made for the current organization:", - "id": "to-get-a-list-of-all-account-creation-requests-made-in-the-organization-1472509174532", - "title": "To get a list of all account creation requests made in the organization" - } - ], - "ListHandshakesForAccount": [ - { - "output": { - "Handshakes": [ - { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "2017-01-28T14:35:23.3Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - }, - { - "Id": "juan@example.com", - "Type": "EMAIL" - } - ], - "RequestedTimestamp": "2017-01-13T14:35:23.3Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@amazon.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Org Master Account" - }, - { - "Type": "ORGANIZATION_FEATURE_SET", - "Value": "FULL" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "EMAIL", - "Value": "juan@example.com" - } - ], - "State": "OPEN" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to get a list of handshakes that are associated with the account of the credentials used to call the operation:", - "id": "to-retrieve-a-list-of-the-handshakes-sent-to-an-account-1472510214747", - "title": "To retrieve a list of the handshakes sent to an account" - } - ], - "ListHandshakesForOrganization": [ - { - "output": { - "Handshakes": [ - { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "2017-01-28T14:35:23.3Z", - "Id": "h-examplehandshakeid111", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - }, - { - "Id": "juan@example.com", - "Type": "EMAIL" - } - ], - "RequestedTimestamp": "2017-01-13T14:35:23.3Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@amazon.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Org Master Account" - }, - { - "Type": "ORGANIZATION_FEATURE_SET", - "Value": "FULL" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "EMAIL", - "Value": "juan@example.com" - } - ], - "State": "OPEN" - }, - { - "Action": "INVITE", - "Arn": "arn:aws:organizations::111111111111:handshake/o-exampleorgid/invite/h-examplehandshakeid111", - "ExpirationTimestamp": "2017-01-28T14:35:23.3Z", - "Id": "h-examplehandshakeid222", - "Parties": [ - { - "Id": "o-exampleorgid", - "Type": "ORGANIZATION" - }, - { - "Id": "anika@example.com", - "Type": "EMAIL" - } - ], - "RequestedTimestamp": "2017-01-13T14:35:23.3Z", - "Resources": [ - { - "Resources": [ - { - "Type": "MASTER_EMAIL", - "Value": "bill@example.com" - }, - { - "Type": "MASTER_NAME", - "Value": "Master Account" - } - ], - "Type": "ORGANIZATION", - "Value": "o-exampleorgid" - }, - { - "Type": "EMAIL", - "Value": "anika@example.com" - }, - { - "Type": "NOTES", - "Value": "This is an invitation to Anika's account to join Bill's organization." - } - ], - "State": "ACCEPTED" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to get a list of handshakes associated with the current organization:", - "id": "to-retrieve-a-list-of-the-handshakes-associated-with-an-organization-1472511206653", - "title": "To retrieve a list of the handshakes associated with an organization" - } - ], - "ListOrganizationalUnitsForParent": [ - { - "input": { - "ParentId": "r-examplerootid111" - }, - "output": { - "OrganizationalUnits": [ - { - "Arn": "arn:aws:organizations::111111111111:ou/o-exampleorgid/ou-examlerootid111-exampleouid111", - "Id": "ou-examplerootid111-exampleouid111", - "Name": "Development" - }, - { - "Arn": "arn:aws:organizations::111111111111:ou/o-exampleorgid/ou-examlerootid111-exampleouid222", - "Id": "ou-examplerootid111-exampleouid222", - "Name": "Production" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to get a list of OUs in a specified root:/n/n", - "id": "to-retrieve-a-list-of-all-of-the-OUs-in-a-parent-container", - "title": "To retrieve a list of all of the child OUs in a parent root or OU" - } - ], - "ListParents": [ - { - "input": { - "ChildId": "444444444444" - }, - "output": { - "Parents": [ - { - "Id": "ou-examplerootid111-exampleouid111", - "Type": "ORGANIZATIONAL_UNIT" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to list the root or OUs that contain account 444444444444:/n/n", - "id": "to-retrieve-a-list-of-all-of-the-parents-of-a-child-ou-or-account", - "title": "To retrieve a list of all of the parents of a child OU or account" - } - ], - "ListPolicies": [ - { - "input": { - "Filter": "SERVICE_CONTROL_POLICY" - }, - "output": { - "Policies": [ - { - "Arn": "arn:aws:organizations::111111111111:policy/o-exampleorgid/service_control_policy/p-examplepolicyid111", - "AwsManaged": false, - "Description": "Enables account admins to delegate permissions for any S3 actions to users and roles in their accounts.", - "Id": "p-examplepolicyid111", - "Name": "AllowAllS3Actions", - "Type": "SERVICE_CONTROL_POLICY" - }, - { - "Arn": "arn:aws:organizations::111111111111:policy/o-exampleorgid/service_control_policy/p-examplepolicyid222", - "AwsManaged": false, - "Description": "Enables account admins to delegate permissions for any EC2 actions to users and roles in their accounts.", - "Id": "p-examplepolicyid222", - "Name": "AllowAllEC2Actions", - "Type": "SERVICE_CONTROL_POLICY" - }, - { - "Arn": "arn:aws:organizations::aws:policy/service_control_policy/p-FullAWSAccess", - "AwsManaged": true, - "Description": "Allows access to every operation", - "Id": "p-FullAWSAccess", - "Name": "FullAWSAccess", - "Type": "SERVICE_CONTROL_POLICY" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to get a list of service control policies (SCPs):/n/n", - "id": "to-retrieve-a-list-of--policies-in-the-organization", - "title": "To retrieve a list policies in the organization" - } - ], - "ListPoliciesForTarget": [ - { - "input": { - "Filter": "SERVICE_CONTROL_POLICY", - "TargetId": "444444444444" - }, - "output": { - "Policies": [ - { - "Arn": "arn:aws:organizations::111111111111:policy/o-exampleorgid/service_control_policy/p-examplepolicyid222", - "AwsManaged": false, - "Description": "Enables account admins to delegate permissions for any EC2 actions to users and roles in their accounts.", - "Id": "p-examplepolicyid222", - "Name": "AllowAllEC2Actions", - "Type": "SERVICE_CONTROL_POLICY" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to get a list of all service control policies (SCPs) of the type specified by the Filter parameter, that are directly attached to an account. The returned list does not include policies that apply to the account because of inheritance from its location in an OU hierarchy:/n/n", - "id": "to-retrieve-a-list-of-policies-attached-to-a-root-ou-or-account", - "title": "To retrieve a list policies attached to a root, OU, or account" - } - ], - "ListRoots": [ - { - "input": { - }, - "output": { - "Roots": [ - { - "Arn": "arn:aws:organizations::111111111111:root/o-exampleorgid/r-examplerootid111", - "Id": "r-examplerootid111", - "Name": "Root", - "PolicyTypes": [ - { - "Status": "ENABLED", - "Type": "SERVICE_CONTROL_POLICY" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to get the list of the roots in the current organization:/n/n", - "id": "to-retrieve-a-list-of-roots-in-the-organization", - "title": "To retrieve a list of roots in the organization" - } - ], - "ListTargetsForPolicy": [ - { - "input": { - "PolicyId": "p-FullAWSAccess" - }, - "output": { - "Targets": [ - { - "Arn": "arn:aws:organizations::111111111111:root/o-exampleorgid/r-examplerootid111", - "Name": "Root", - "TargetId": "r-examplerootid111", - "Type": "ROOT" - }, - { - "Arn": "arn:aws:organizations::111111111111:account/o-exampleorgid/333333333333;", - "Name": "Developer Test Account", - "TargetId": "333333333333", - "Type": "ACCOUNT" - }, - { - "Arn": "arn:aws:organizations::111111111111:ou/o-exampleorgid/ou-examplerootid111-exampleouid111", - "Name": "Accounting", - "TargetId": "ou-examplerootid111-exampleouid111", - "Type": "ORGANIZATIONAL_UNIT" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to get the list of roots, OUs, and accounts to which the specified policy is attached:/n/n", - "id": "to-retrieve-a-list-of-roots-ous-and-accounts-to-which-a-policy-is-attached", - "title": "To retrieve a list of roots, OUs, and accounts to which a policy is attached" - } - ], - "MoveAccount": [ - { - "input": { - "AccountId": "333333333333", - "DestinationParentId": "ou-examplerootid111-exampleouid111", - "SourceParentId": "r-examplerootid111" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to move a member account from the root to an OU:/n/n", - "id": "to-move-an-ou-or-account-to-another-ou-or-the-root", - "title": "To move an OU or account to another OU or the root" - } - ], - "RemoveAccountFromOrganization": [ - { - "input": { - "AccountId": "333333333333" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to remove an account from an organization:", - "id": "to-remove-an-account-from-an-organization-as-the-master-account", - "title": "To remove an account from an organization as the master account" - } - ], - "UpdateOrganizationalUnit": [ - { - "input": { - "Name": "AccountingOU", - "OrganizationalUnitId": "ou-examplerootid111-exampleouid111" - }, - "output": { - "OrganizationalUnit": { - "Arn": "arn:aws:organizations::111111111111:ou/o-exampleorgid/ou-examplerootid111-exampleouid111", - "Id": "ou-examplerootid111-exampleouid111", - "Name": "AccountingOU" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to rename an OU. The output confirms the new name:/n/n", - "id": "to-rename-an-organizational-unit", - "title": "To rename an organizational unit" - } - ], - "UpdatePolicy": [ - { - "input": { - "Description": "This description replaces the original.", - "Name": "Renamed-Policy", - "PolicyId": "p-examplepolicyid111" - }, - "output": { - "Policy": { - "Content": "{ \"Version\": \"2012-10-17\", \"Statement\": { \"Effect\": \"Allow\", \"Action\": \"ec2:*\", \"Resource\": \"*\" } }", - "PolicySummary": { - "Arn": "arn:aws:organizations::111111111111:policy/o-exampleorgid/service_control_policy/p-examplepolicyid111", - "AwsManaged": false, - "Description": "This description replaces the original.", - "Id": "p-examplepolicyid111", - "Name": "Renamed-Policy", - "Type": "SERVICE_CONTROL_POLICY" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to rename a policy and give it a new description and new content. The output confirms the new name and description text:/n/n", - "id": "to-update-the-details-of-a-policy", - "title": "To update the details of a policy" - }, - { - "input": { - "Content": "{ \\\"Version\\\": \\\"2012-10-17\\\", \\\"Statement\\\": {\\\"Effect\\\": \\\"Allow\\\", \\\"Action\\\": \\\"s3:*\\\", \\\"Resource\\\": \\\"*\\\" } }", - "PolicyId": "p-examplepolicyid111" - }, - "output": { - "Policy": { - "Content": "{ \\\"Version\\\": \\\"2012-10-17\\\", \\\"Statement\\\": { \\\"Effect\\\": \\\"Allow\\\", \\\"Action\\\": \\\"s3:*\\\", \\\"Resource\\\": \\\"*\\\" } }", - "PolicySummary": { - "Arn": "arn:aws:organizations::111111111111:policy/o-exampleorgid/service_control_policy/p-examplepolicyid111", - "AwsManaged": false, - "Description": "This description replaces the original.", - "Id": "p-examplepolicyid111", - "Name": "Renamed-Policy", - "Type": "SERVICE_CONTROL_POLICY" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to replace the JSON text of the SCP from the preceding example with a new JSON policy text string that allows S3 actions instead of EC2 actions:/n/n", - "id": "to-update-the-content-of-a-policy", - "title": "To update the content of a policy" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/paginators-1.json deleted file mode 100644 index 7d04f56..0000000 --- a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/paginators-1.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "pagination": { - "ListAccounts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Accounts" - }, - "ListAccountsForParent": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Accounts" - }, - "ListChildren": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Children" - }, - "ListCreateAccountStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CreateAccountStatuses" - }, - "ListHandshakesForAccount": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Handshakes" - }, - "ListHandshakesForOrganization": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Handshakes" - }, - "ListOrganizationalUnitsForParent": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OrganizationalUnits" - }, - "ListParents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Parents" - }, - "ListPolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Policies" - }, - "ListPoliciesForTarget": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Policies" - }, - "ListRoots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Roots" - }, - "ListTargetsForPolicy": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Targets" - }, - "ListAWSServiceAccessForOrganization": { - "result_key": "EnabledServicePrincipals", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Tags" - }, - "ListDelegatedAdministrators": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DelegatedAdministrators" - }, - "ListDelegatedServicesForAccount": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DelegatedServices" - }, - "ListAccountsWithInvalidEffectivePolicy": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Accounts" - }, - "ListEffectivePolicyValidationErrors": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EffectivePolicyValidationErrors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/paginators-1.sdk-extras.json deleted file mode 100644 index 16e6eac..0000000 --- a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/paginators-1.sdk-extras.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListAccountsWithInvalidEffectivePolicy": { - "non_aggregate_keys": [ - "PolicyType" - ] - }, - "ListEffectivePolicyValidationErrors": { - "non_aggregate_keys": [ - "PolicyType", - "AccountId", - "EvaluationTimestamp", - "Path" - ] - } - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/service-2.json.gz deleted file mode 100644 index 83d68ac..0000000 Binary files a/venv/Lib/site-packages/botocore/data/organizations/2016-11-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/osis/2022-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/osis/2022-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index b07a869..0000000 Binary files a/venv/Lib/site-packages/botocore/data/osis/2022-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/osis/2022-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/osis/2022-01-01/paginators-1.json deleted file mode 100644 index ace2992..0000000 --- a/venv/Lib/site-packages/botocore/data/osis/2022-01-01/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListPipelineEndpointConnections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineEndpointConnections" - }, - "ListPipelineEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineEndpoints" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/osis/2022-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/osis/2022-01-01/service-2.json.gz deleted file mode 100644 index e031ac3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/osis/2022-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8924098..0000000 Binary files a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/examples-1.json b/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/paginators-1.json deleted file mode 100644 index 8ccc34f..0000000 --- a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "GetOutpostInstanceTypes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceTypes" - }, - "ListAssets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Assets" - }, - "ListCatalogItems": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CatalogItems" - }, - "ListOrders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Orders" - }, - "ListOutposts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Outposts" - }, - "ListSites": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Sites" - }, - "GetOutpostSupportedInstanceTypes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceTypes" - }, - "ListCapacityTasks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CapacityTasks" - }, - "ListAssetInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AssetInstances" - }, - "ListBlockingInstancesForCapacityTask": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BlockingInstances" - }, - "GetOutpostBillingInformation": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Subscriptions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/paginators-1.sdk-extras.json deleted file mode 100644 index c020d0a..0000000 --- a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/paginators-1.sdk-extras.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetOutpostInstanceTypes": { - "non_aggregate_keys": [ - "OutpostArn", - "OutpostId" - ] - }, - "GetOutpostBillingInformation": { - "non_aggregate_keys": [ - "ContractEndDate" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/service-2.json.gz deleted file mode 100644 index 3cf554d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/outposts/2019-12-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/endpoint-rule-set-1.json.gz deleted file mode 100644 index 710f0b9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/examples-1.json b/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/paginators-1.json b/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/service-2.json.gz b/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/service-2.json.gz deleted file mode 100644 index d3507af..0000000 Binary files a/venv/Lib/site-packages/botocore/data/panorama/2019-07-24/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partitions.json b/venv/Lib/site-packages/botocore/data/partitions.json deleted file mode 100644 index c789264..0000000 --- a/venv/Lib/site-packages/botocore/data/partitions.json +++ /dev/null @@ -1,267 +0,0 @@ -{ - "partitions" : [ { - "id" : "aws", - "outputs" : { - "dnsSuffix" : "amazonaws.com", - "dualStackDnsSuffix" : "api.aws", - "implicitGlobalRegion" : "us-east-1", - "name" : "aws", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", - "regions" : { - "af-south-1" : { - "description" : "Africa (Cape Town)" - }, - "ap-east-1" : { - "description" : "Asia Pacific (Hong Kong)" - }, - "ap-east-2" : { - "description" : "Asia Pacific (Taipei)" - }, - "ap-northeast-1" : { - "description" : "Asia Pacific (Tokyo)" - }, - "ap-northeast-2" : { - "description" : "Asia Pacific (Seoul)" - }, - "ap-northeast-3" : { - "description" : "Asia Pacific (Osaka)" - }, - "ap-south-1" : { - "description" : "Asia Pacific (Mumbai)" - }, - "ap-south-2" : { - "description" : "Asia Pacific (Hyderabad)" - }, - "ap-southeast-1" : { - "description" : "Asia Pacific (Singapore)" - }, - "ap-southeast-2" : { - "description" : "Asia Pacific (Sydney)" - }, - "ap-southeast-3" : { - "description" : "Asia Pacific (Jakarta)" - }, - "ap-southeast-4" : { - "description" : "Asia Pacific (Melbourne)" - }, - "ap-southeast-5" : { - "description" : "Asia Pacific (Malaysia)" - }, - "ap-southeast-6" : { - "description" : "Asia Pacific (New Zealand)" - }, - "ap-southeast-7" : { - "description" : "Asia Pacific (Thailand)" - }, - "aws-global" : { - "description" : "aws global region" - }, - "ca-central-1" : { - "description" : "Canada (Central)" - }, - "ca-west-1" : { - "description" : "Canada West (Calgary)" - }, - "eu-central-1" : { - "description" : "Europe (Frankfurt)" - }, - "eu-central-2" : { - "description" : "Europe (Zurich)" - }, - "eu-north-1" : { - "description" : "Europe (Stockholm)" - }, - "eu-south-1" : { - "description" : "Europe (Milan)" - }, - "eu-south-2" : { - "description" : "Europe (Spain)" - }, - "eu-west-1" : { - "description" : "Europe (Ireland)" - }, - "eu-west-2" : { - "description" : "Europe (London)" - }, - "eu-west-3" : { - "description" : "Europe (Paris)" - }, - "il-central-1" : { - "description" : "Israel (Tel Aviv)" - }, - "me-central-1" : { - "description" : "Middle East (UAE)" - }, - "me-south-1" : { - "description" : "Middle East (Bahrain)" - }, - "mx-central-1" : { - "description" : "Mexico (Central)" - }, - "sa-east-1" : { - "description" : "South America (Sao Paulo)" - }, - "us-east-1" : { - "description" : "US East (N. Virginia)" - }, - "us-east-2" : { - "description" : "US East (Ohio)" - }, - "us-west-1" : { - "description" : "US West (N. California)" - }, - "us-west-2" : { - "description" : "US West (Oregon)" - } - } - }, { - "id" : "aws-cn", - "outputs" : { - "dnsSuffix" : "amazonaws.com.cn", - "dualStackDnsSuffix" : "api.amazonwebservices.com.cn", - "implicitGlobalRegion" : "cn-northwest-1", - "name" : "aws-cn", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^cn\\-\\w+\\-\\d+$", - "regions" : { - "aws-cn-global" : { - "description" : "aws-cn global region" - }, - "cn-north-1" : { - "description" : "China (Beijing)" - }, - "cn-northwest-1" : { - "description" : "China (Ningxia)" - } - } - }, { - "id" : "aws-eusc", - "outputs" : { - "dnsSuffix" : "amazonaws.eu", - "dualStackDnsSuffix" : "api.amazonwebservices.eu", - "implicitGlobalRegion" : "eusc-de-east-1", - "name" : "aws-eusc", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^eusc\\-(de)\\-\\w+\\-\\d+$", - "regions" : { - "eusc-de-east-1" : { - "description" : "EU (Germany)" - } - } - }, { - "id" : "aws-iso", - "outputs" : { - "dnsSuffix" : "c2s.ic.gov", - "dualStackDnsSuffix" : "api.aws.ic.gov", - "implicitGlobalRegion" : "us-iso-east-1", - "name" : "aws-iso", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^us\\-iso\\-\\w+\\-\\d+$", - "regions" : { - "aws-iso-global" : { - "description" : "aws-iso global region" - }, - "us-iso-east-1" : { - "description" : "US ISO East" - }, - "us-iso-west-1" : { - "description" : "US ISO WEST" - } - } - }, { - "id" : "aws-iso-b", - "outputs" : { - "dnsSuffix" : "sc2s.sgov.gov", - "dualStackDnsSuffix" : "api.aws.scloud", - "implicitGlobalRegion" : "us-isob-east-1", - "name" : "aws-iso-b", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^us\\-isob\\-\\w+\\-\\d+$", - "regions" : { - "aws-iso-b-global" : { - "description" : "aws-iso-b global region" - }, - "us-isob-east-1" : { - "description" : "US ISOB East (Ohio)" - }, - "us-isob-west-1" : { - "description" : "US ISOB West" - } - } - }, { - "id" : "aws-iso-e", - "outputs" : { - "dnsSuffix" : "cloud.adc-e.uk", - "dualStackDnsSuffix" : "api.cloud-aws.adc-e.uk", - "implicitGlobalRegion" : "eu-isoe-west-1", - "name" : "aws-iso-e", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^eu\\-isoe\\-\\w+\\-\\d+$", - "regions" : { - "aws-iso-e-global" : { - "description" : "aws-iso-e global region" - }, - "eu-isoe-west-1" : { - "description" : "EU ISOE West" - } - } - }, { - "id" : "aws-iso-f", - "outputs" : { - "dnsSuffix" : "csp.hci.ic.gov", - "dualStackDnsSuffix" : "api.aws.hci.ic.gov", - "implicitGlobalRegion" : "us-isof-south-1", - "name" : "aws-iso-f", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^us\\-isof\\-\\w+\\-\\d+$", - "regions" : { - "aws-iso-f-global" : { - "description" : "aws-iso-f global region" - }, - "us-isof-east-1" : { - "description" : "US ISOF EAST" - }, - "us-isof-south-1" : { - "description" : "US ISOF SOUTH" - } - } - }, { - "id" : "aws-us-gov", - "outputs" : { - "dnsSuffix" : "amazonaws.com", - "dualStackDnsSuffix" : "api.aws", - "implicitGlobalRegion" : "us-gov-west-1", - "name" : "aws-us-gov", - "supportsDualStack" : true, - "supportsFIPS" : true - }, - "regionRegex" : "^us\\-gov\\-\\w+\\-\\d+$", - "regions" : { - "aws-us-gov-global" : { - "description" : "aws-us-gov global region" - }, - "us-gov-east-1" : { - "description" : "AWS GovCloud (US-East)" - }, - "us-gov-west-1" : { - "description" : "AWS GovCloud (US-West)" - } - } - } ], - "version" : "1.1" -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/endpoint-rule-set-1.json.gz deleted file mode 100644 index b450037..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/paginators-1.json b/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/paginators-1.json deleted file mode 100644 index aa4ba4c..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/paginators-1.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "pagination": { - "ListConnectionInvitations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConnectionInvitationSummaries" - }, - "ListConnections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConnectionSummaries" - }, - "ListPartners": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "PartnerSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/service-2.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/service-2.json.gz deleted file mode 100644 index ab1849d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/waiters-2.json b/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-account/2025-04-04/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 693d46d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/paginators-1.json deleted file mode 100644 index 348398a..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListBenefitAllocations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BenefitAllocationSummaries" - }, - "ListBenefitApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BenefitApplicationSummaries" - }, - "ListBenefits": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "BenefitSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/service-2.json.gz deleted file mode 100644 index 0e25c4a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-benefits/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/endpoint-rule-set-1.json.gz deleted file mode 100644 index 329e1a9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/paginators-1.json b/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/paginators-1.json deleted file mode 100644 index 1e923f4..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListChannelHandshakes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListProgramManagementAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListRelationships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/service-2.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/service-2.json.gz deleted file mode 100644 index 5498000..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/waiters-2.json b/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-channel/2024-03-18/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5ba945f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/paginators-1.json deleted file mode 100644 index b7373ff..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "ListEngagementInvitations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EngagementInvitationSummaries" - }, - "ListOpportunities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OpportunitySummaries" - }, - "ListSolutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SolutionSummaries" - }, - "ListEngagementByAcceptingInvitationTasks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TaskSummaries" - }, - "ListEngagementFromOpportunityTasks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TaskSummaries" - }, - "ListEngagementMembers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EngagementMemberList" - }, - "ListEngagementResourceAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EngagementResourceAssociationSummaries" - }, - "ListEngagements": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EngagementSummaryList" - }, - "ListResourceSnapshotJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourceSnapshotJobSummaries" - }, - "ListResourceSnapshots": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourceSnapshotSummaries" - }, - "ListOpportunityFromEngagementTasks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TaskSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/service-2.json.gz deleted file mode 100644 index 00dee42..0000000 Binary files a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/partnercentral-selling/2022-07-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index c6c041d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/service-2.json.gz deleted file mode 100644 index 53636cd3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/waiters-2.json b/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/payment-cryptography-data/2022-02-03/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index 239449e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/paginators-1.json deleted file mode 100644 index 02af499..0000000 --- a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListAliases": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Aliases" - }, - "ListKeys": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Keys" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/service-2.json.gz deleted file mode 100644 index 19b9a91..0000000 Binary files a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/waiters-2.json b/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/payment-cryptography/2021-09-14/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index fe33b60..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/paginators-1.json deleted file mode 100644 index 8923477..0000000 --- a/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListConnectors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Connectors" - }, - "ListDirectoryRegistrations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DirectoryRegistrations" - }, - "ListServicePrincipalNames": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServicePrincipalNames" - }, - "ListTemplateGroupAccessControlEntries": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AccessControlEntries" - }, - "ListTemplates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Templates" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/service-2.json.gz deleted file mode 100644 index 204997c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pca-connector-ad/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index c8ca4d7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/paginators-1.json deleted file mode 100644 index 7a913db..0000000 --- a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListChallengeMetadata": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Challenges" - }, - "ListConnectors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Connectors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/service-2.json.gz deleted file mode 100644 index 5e54e83..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/pca-connector-scep/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0429576..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/paginators-1.json deleted file mode 100644 index 555a266..0000000 --- a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "clusters" - }, - "ListComputeNodeGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "computeNodeGroups" - }, - "ListQueues": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "queues" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/service-2.json.gz deleted file mode 100644 index 01645be..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/pcs/2023-02-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index a9f509d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/examples-1.json b/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/service-2.json.gz deleted file mode 100644 index 4644568..0000000 Binary files a/venv/Lib/site-packages/botocore/data/personalize-events/2018-03-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6bef30c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/examples-1.json b/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/service-2.json.gz deleted file mode 100644 index 7361898..0000000 Binary files a/venv/Lib/site-packages/botocore/data/personalize-runtime/2018-05-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0f617ec..0000000 Binary files a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/examples-1.json b/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/paginators-1.json deleted file mode 100644 index ea43852..0000000 --- a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "ListCampaigns": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "campaigns" - }, - "ListDatasetGroups": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "datasetGroups" - }, - "ListDatasetImportJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "datasetImportJobs" - }, - "ListDatasets": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "datasets" - }, - "ListEventTrackers": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "eventTrackers" - }, - "ListRecipes": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "recipes" - }, - "ListSchemas": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "schemas" - }, - "ListSolutionVersions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "solutionVersions" - }, - "ListSolutions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "solutions" - }, - "ListBatchInferenceJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "batchInferenceJobs" - }, - "ListDatasetExportJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "datasetExportJobs" - }, - "ListFilters": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "Filters" - }, - "ListBatchSegmentJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "batchSegmentJobs" - }, - "ListRecommenders": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "recommenders" - }, - "ListMetricAttributionMetrics": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "metrics" - }, - "ListMetricAttributions": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "metricAttributions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/service-2.json.gz deleted file mode 100644 index a834a37..0000000 Binary files a/venv/Lib/site-packages/botocore/data/personalize/2018-05-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pi/2018-02-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 021d873..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/examples-1.json b/venv/Lib/site-packages/botocore/data/pi/2018-02-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/pi/2018-02-27/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pi/2018-02-27/service-2.json.gz deleted file mode 100644 index e351dec..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pi/2018-02-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9749d50..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/examples-1.json b/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/paginators-1.json deleted file mode 100644 index f2693b1..0000000 --- a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "GetDedicatedIps": { - "input_token": "NextToken", - "limit_key": "PageSize", - "output_token": "NextToken", - "result_key": "DedicatedIps" - }, - "ListConfigurationSets": { - "input_token": "NextToken", - "limit_key": "PageSize", - "output_token": "NextToken", - "result_key": "ConfigurationSets" - }, - "ListDedicatedIpPools": { - "input_token": "NextToken", - "limit_key": "PageSize", - "output_token": "NextToken", - "result_key": "DedicatedIpPools" - }, - "ListDeliverabilityTestReports": { - "input_token": "NextToken", - "limit_key": "PageSize", - "output_token": "NextToken", - "result_key": "DeliverabilityTestReports" - }, - "ListEmailIdentities": { - "input_token": "NextToken", - "limit_key": "PageSize", - "output_token": "NextToken", - "result_key": "EmailIdentities" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/service-2.json.gz deleted file mode 100644 index ef17954..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint-email/2018-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 794728f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/examples-1.json b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.json deleted file mode 100644 index 8c4ea30..0000000 --- a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.json +++ /dev/null @@ -1,136 +0,0 @@ -{ - "pagination": { - "DescribeAccountAttributes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AccountAttributes" - }, - "DescribeAccountLimits": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AccountLimits" - }, - "DescribeConfigurationSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConfigurationSets" - }, - "DescribeKeywords": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Keywords" - }, - "DescribeOptOutLists": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OptOutLists" - }, - "DescribeOptedOutNumbers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OptedOutNumbers" - }, - "DescribePhoneNumbers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PhoneNumbers" - }, - "DescribePools": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Pools" - }, - "DescribeSenderIds": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SenderIds" - }, - "DescribeSpendLimits": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpendLimits" - }, - "ListPoolOriginationIdentities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OriginationIdentities" - }, - "DescribeRegistrationAttachments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegistrationAttachments" - }, - "DescribeRegistrationFieldDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegistrationFieldDefinitions" - }, - "DescribeRegistrationFieldValues": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegistrationFieldValues" - }, - "DescribeRegistrationSectionDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegistrationSectionDefinitions" - }, - "DescribeRegistrationTypeDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegistrationTypeDefinitions" - }, - "DescribeRegistrationVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegistrationVersions" - }, - "DescribeRegistrations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Registrations" - }, - "DescribeVerifiedDestinationNumbers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VerifiedDestinationNumbers" - }, - "ListRegistrationAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RegistrationAssociations" - }, - "DescribeProtectConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProtectConfigurations" - }, - "ListProtectConfigurationRuleSetNumberOverrides": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RuleSetNumberOverrides" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.sdk-extras.json deleted file mode 100644 index 581feaa..0000000 --- a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/paginators-1.sdk-extras.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "DescribeKeywords": { - "non_aggregate_keys": [ - "OriginationIdentity", - "OriginationIdentityArn" - ] - }, - "DescribeRegistrationFieldDefinitions": { - "non_aggregate_keys": [ - "RegistrationType" - ] - }, - "DescribeRegistrationFieldValues": { - "non_aggregate_keys": [ - "RegistrationId", - "RegistrationArn", - "VersionNumber" - ] - }, - "DescribeRegistrationSectionDefinitions": { - "non_aggregate_keys": [ - "RegistrationType" - ] - }, - "DescribeRegistrationVersions": { - "non_aggregate_keys": [ - "RegistrationId", - "RegistrationArn" - ] - }, - "DescribeOptedOutNumbers": { - "non_aggregate_keys": [ - "OptOutListArn", - "OptOutListName" - ] - }, - "ListPoolOriginationIdentities": { - "non_aggregate_keys": [ - "PoolArn", - "PoolId" - ] - }, - "ListProtectConfigurationRuleSetNumberOverrides": { - "non_aggregate_keys": [ - "ProtectConfigurationId", - "ProtectConfigurationArn" - ] - }, - "ListRegistrationAssociations": { - "non_aggregate_keys": [ - "RegistrationId", - "RegistrationArn", - "RegistrationType" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/service-2.json.gz deleted file mode 100644 index e73c86d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice-v2/2022-03-31/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice/2018-09-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice/2018-09-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index c3f0377..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice/2018-09-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice/2018-09-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice/2018-09-05/service-2.json.gz deleted file mode 100644 index ccd2314..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint-sms-voice/2018-09-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index fe6d6c9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/service-2.json.gz deleted file mode 100644 index a2f47d0..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pinpoint/2016-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 09ac3b6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/paginators-1.json deleted file mode 100644 index 4663077..0000000 --- a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListPipes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Pipes" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/service-2.json.gz deleted file mode 100644 index c8ff48b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/waiters-2.json b/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/pipes/2015-10-07/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/polly/2016-06-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0f36b1d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/examples-1.json b/venv/Lib/site-packages/botocore/data/polly/2016-06-10/examples-1.json deleted file mode 100644 index a0e354e..0000000 --- a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/examples-1.json +++ /dev/null @@ -1,171 +0,0 @@ -{ - "version": "1.0", - "examples": { - "DeleteLexicon": [ - { - "input": { - "Name": "example" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes a specified pronunciation lexicon stored in an AWS Region.", - "id": "to-delete-a-lexicon-1481922498332", - "title": "To delete a lexicon" - } - ], - "DescribeVoices": [ - { - "input": { - "LanguageCode": "en-GB" - }, - "output": { - "Voices": [ - { - "Gender": "Female", - "Id": "Emma", - "LanguageCode": "en-GB", - "LanguageName": "British English", - "Name": "Emma" - }, - { - "Gender": "Male", - "Id": "Brian", - "LanguageCode": "en-GB", - "LanguageName": "British English", - "Name": "Brian" - }, - { - "Gender": "Female", - "Id": "Amy", - "LanguageCode": "en-GB", - "LanguageName": "British English", - "Name": "Amy" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns the list of voices that are available for use when requesting speech synthesis. Displayed languages are those within the specified language code. If no language code is specified, voices for all available languages are displayed.", - "id": "to-describe-available-voices-1482180557753", - "title": "To describe available voices" - } - ], - "GetLexicon": [ - { - "input": { - "Name": "" - }, - "output": { - "Lexicon": { - "Content": "\r\n\r\n \r\n W3C\r\n World Wide Web Consortium\r\n \r\n", - "Name": "example" - }, - "LexiconAttributes": { - "Alphabet": "ipa", - "LanguageCode": "en-US", - "LastModified": 1478542980.117, - "LexemesCount": 1, - "LexiconArn": "arn:aws:polly:us-east-1:123456789012:lexicon/example", - "Size": 503 - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns the content of the specified pronunciation lexicon stored in an AWS Region.", - "id": "to-retrieve-a-lexicon-1481912870836", - "title": "To retrieve a lexicon" - } - ], - "ListLexicons": [ - { - "input": { - }, - "output": { - "Lexicons": [ - { - "Attributes": { - "Alphabet": "ipa", - "LanguageCode": "en-US", - "LastModified": 1478542980.117, - "LexemesCount": 1, - "LexiconArn": "arn:aws:polly:us-east-1:123456789012:lexicon/example", - "Size": 503 - }, - "Name": "example" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a list of pronunciation lexicons stored in an AWS Region.", - "id": "to-list-all-lexicons-in-a-region-1481842106487", - "title": "To list all lexicons in a region" - } - ], - "PutLexicon": [ - { - "input": { - "Content": "", - "Name": "W3C" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Stores a pronunciation lexicon in an AWS Region.", - "id": "to-save-a-lexicon-1482272584088", - "title": "To save a lexicon" - } - ], - "SynthesizeSpeech": [ - { - "input": { - "LexiconNames": [ - "example" - ], - "OutputFormat": "mp3", - "SampleRate": "8000", - "Text": "All Gaul is divided into three parts", - "TextType": "text", - "VoiceId": "Joanna" - }, - "output": { - "AudioStream": "TEXT", - "ContentType": "audio/mpeg", - "RequestCharacters": 37 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Synthesizes plain text or SSML into a file of human-like speech.", - "id": "to-synthesize-speech-1482186064046", - "title": "To synthesize speech" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/polly/2016-06-10/paginators-1.json deleted file mode 100644 index dc76e7c..0000000 --- a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/paginators-1.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "pagination": { - "DescribeVoices": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Voices" - }, - "ListLexicons": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Lexicons" - }, - "ListSpeechSynthesisTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SynthesisTasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/polly/2016-06-10/service-2.json.gz deleted file mode 100644 index 6bb5cc3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/polly/2016-06-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 457eb4c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/examples-1.json b/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/examples-1.json deleted file mode 100644 index abc1c59..0000000 --- a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/examples-1.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "version": "1.0", - "examples": { - "DescribeServices": [ - { - "input": { - "FormatVersion": "aws_v1", - "MaxResults": 1, - "ServiceCode": "AmazonEC2" - }, - "output": { - "FormatVersion": "aws_v1", - "NextToken": "abcdefg123", - "Services": [ - { - "AttributeNames": [ - "volumeType", - "maxIopsvolume", - "instanceCapacity10xlarge", - "locationType", - "operation" - ], - "ServiceCode": "AmazonEC2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Retrieves the service for the given Service Code.", - "id": "to-retrieve-service-metadata", - "title": "To retrieve a list of services and service codes" - } - ], - "GetAttributeValues": [ - { - "input": { - "AttributeName": "volumeType", - "MaxResults": 2, - "ServiceCode": "AmazonEC2" - }, - "output": { - "AttributeValues": [ - { - "Value": "Throughput Optimized HDD" - }, - { - "Value": "Provisioned IOPS" - } - ], - "NextToken": "GpgauEXAMPLEezucl5LV0w==:7GzYJ0nw0DBTJ2J66EoTIIynE6O1uXwQtTRqioJzQadBnDVgHPzI1en4BUQnPCLpzeBk9RQQAWaFieA4+DapFAGLgk+Z/9/cTw9GldnPOHN98+FdmJP7wKU3QQpQ8MQr5KOeBkIsAqvAQYdL0DkL7tHwPtE5iCEByAmg9gcC/yBU1vAOsf7R3VaNN4M5jMDv3woSWqASSIlBVB6tgW78YL22KhssoItM/jWW+aP6Jqtq4mldxp/ct6DWAl+xLFwHU/CbketimPPXyqHF3/UXDw==" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation returns a list of values available for the given attribute.", - "id": "to-retreive-attribute-values", - "title": "To retrieve a list of attribute values" - } - ], - "GetProducts": [ - { - "input": { - "Filters": [ - { - "Field": "ServiceCode", - "Type": "TERM_MATCH", - "Value": "AmazonEC2" - }, - { - "Field": "volumeType", - "Type": "TERM_MATCH", - "Value": "Provisioned IOPS" - } - ], - "FormatVersion": "aws_v1", - "MaxResults": 1 - }, - "output": { - "FormatVersion": "aws_v1", - "NextToken": "57r3EXAMPLEujbzWfHF7Ciw==:ywSmZsD3mtpQmQLQ5XfOsIMkYybSj+vAT+kGmwMFq+K9DGmIoJkz7lunVeamiOPgthdWSO2a7YKojCO+zY4dJmuNl2QvbNhXs+AJ2Ufn7xGmJncNI2TsEuAsVCUfTAvAQNcwwamtk6XuZ4YdNnooV62FjkV3ZAn40d9+wAxV7+FImvhUHi/+f8afgZdGh2zPUlH8jlV9uUtj0oHp8+DhPUuHXh+WBII1E/aoKpPSm3c=", - "PriceList": [ - "{\"product\":{\"productFamily\":\"Storage\",\"attributes\":{\"storageMedia\":\"SSD-backed\",\"maxThroughputvolume\":\"320 MB/sec\",\"volumeType\":\"Provisioned IOPS\",\"maxIopsvolume\":\"20000\",\"servicecode\":\"AmazonEC2\",\"usagetype\":\"CAN1-EBS:VolumeUsage.piops\",\"locationType\":\"AWS Region\",\"location\":\"Canada (Central)\",\"servicename\":\"Amazon Elastic Compute Cloud\",\"maxVolumeSize\":\"16 TiB\",\"operation\":\"\"},\"sku\":\"WQGC34PB2AWS8R4U\"},\"serviceCode\":\"AmazonEC2\",\"terms\":{\"OnDemand\":{\"WQGC34PB2AWS8R4U.JRTCKXETXF\":{\"priceDimensions\":{\"WQGC34PB2AWS8R4U.JRTCKXETXF.6YS6EN2CT7\":{\"unit\":\"GB-Mo\",\"endRange\":\"Inf\",\"description\":\"$0.138 per GB-month of Provisioned IOPS SSD (io1) provisioned storage - Canada (Central)\",\"appliesTo\":[],\"rateCode\":\"WQGC34PB2AWS8R4U.JRTCKXETXF.6YS6EN2CT7\",\"beginRange\":\"0\",\"pricePerUnit\":{\"USD\":\"0.1380000000\"}}},\"sku\":\"WQGC34PB2AWS8R4U\",\"effectiveDate\":\"2017-08-01T00:00:00Z\",\"offerTermCode\":\"JRTCKXETXF\",\"termAttributes\":{}}}},\"version\":\"20170901182201\",\"publicationDate\":\"2017-09-01T18:22:01Z\"}" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation returns a list of products that match the given criteria.", - "id": "to-retrieve-available products", - "title": "To retrieve a list of products" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/paginators-1.json deleted file mode 100644 index 0f2ce4e..0000000 --- a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "DescribeServices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Services", - "non_aggregate_keys": [ - "FormatVersion" - ] - }, - "GetAttributeValues": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AttributeValues" - }, - "GetProducts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PriceList", - "non_aggregate_keys": [ - "FormatVersion" - ] - }, - "ListPriceLists": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PriceLists" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/service-2.json.gz deleted file mode 100644 index 2bdf482..0000000 Binary files a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/waiters-2.json b/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/pricing/2017-10-15/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/proton/2020-07-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index 907b69b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/examples-1.json b/venv/Lib/site-packages/botocore/data/proton/2020-07-20/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/proton/2020-07-20/paginators-1.json deleted file mode 100644 index a52075c..0000000 --- a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/paginators-1.json +++ /dev/null @@ -1,121 +0,0 @@ -{ - "pagination": { - "ListEnvironmentAccountConnections": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environmentAccountConnections" - }, - "ListEnvironmentTemplateVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templateVersions" - }, - "ListEnvironmentTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templates" - }, - "ListEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environments" - }, - "ListServiceInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "serviceInstances" - }, - "ListServiceTemplateVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templateVersions" - }, - "ListServiceTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templates" - }, - "ListServices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "services" - }, - "ListTagsForResource": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tags" - }, - "ListEnvironmentOutputs": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "outputs" - }, - "ListEnvironmentProvisionedResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "provisionedResources" - }, - "ListRepositories": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "repositories" - }, - "ListRepositorySyncDefinitions": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "syncDefinitions" - }, - "ListServiceInstanceOutputs": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "outputs" - }, - "ListServiceInstanceProvisionedResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "provisionedResources" - }, - "ListServicePipelineOutputs": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "outputs" - }, - "ListServicePipelineProvisionedResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "provisionedResources" - }, - "ListComponentOutputs": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "outputs" - }, - "ListComponentProvisionedResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "provisionedResources" - }, - "ListComponents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "components" - }, - "ListDeployments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "deployments" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/proton/2020-07-20/service-2.json.gz deleted file mode 100644 index b9de556..0000000 Binary files a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/waiters-2.json b/venv/Lib/site-packages/botocore/data/proton/2020-07-20/waiters-2.json deleted file mode 100644 index f99a6fe..0000000 --- a/venv/Lib/site-packages/botocore/data/proton/2020-07-20/waiters-2.json +++ /dev/null @@ -1,208 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ComponentDeleted" : { - "description" : "Wait until a Component is deleted. Use this after invoking DeleteComponent", - "delay" : 5, - "maxAttempts" : 999, - "operation" : "GetComponent", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "component.deploymentStatus", - "state" : "failure", - "expected" : "DELETE_FAILED" - } ] - }, - "ComponentDeployed" : { - "description" : "Wait until a Component is deployed. Use this after invoking CreateComponent or UpdateComponent", - "delay" : 5, - "maxAttempts" : 999, - "operation" : "GetComponent", - "acceptors" : [ { - "matcher" : "path", - "argument" : "component.deploymentStatus", - "state" : "success", - "expected" : "SUCCEEDED" - }, { - "matcher" : "path", - "argument" : "component.deploymentStatus", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "EnvironmentDeployed" : { - "description" : "Wait until an Environment is deployed. Use this after invoking CreateEnvironment or UpdateEnvironment", - "delay" : 5, - "maxAttempts" : 999, - "operation" : "GetEnvironment", - "acceptors" : [ { - "matcher" : "path", - "argument" : "environment.deploymentStatus", - "state" : "success", - "expected" : "SUCCEEDED" - }, { - "matcher" : "path", - "argument" : "environment.deploymentStatus", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "EnvironmentTemplateVersionRegistered" : { - "description" : "Wait until an EnvironmentTemplateVersion is registered. Use this after invoking CreateEnvironmentTemplateVersion", - "delay" : 2, - "maxAttempts" : 150, - "operation" : "GetEnvironmentTemplateVersion", - "acceptors" : [ { - "matcher" : "path", - "argument" : "environmentTemplateVersion.status", - "state" : "success", - "expected" : "DRAFT" - }, { - "matcher" : "path", - "argument" : "environmentTemplateVersion.status", - "state" : "success", - "expected" : "PUBLISHED" - }, { - "matcher" : "path", - "argument" : "environmentTemplateVersion.status", - "state" : "failure", - "expected" : "REGISTRATION_FAILED" - } ] - }, - "ServiceCreated" : { - "description" : "Wait until an Service has deployed its instances and possibly pipeline. Use this after invoking CreateService", - "delay" : 5, - "maxAttempts" : 999, - "operation" : "GetService", - "acceptors" : [ { - "matcher" : "path", - "argument" : "service.status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "CREATE_FAILED_CLEANUP_COMPLETE" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "CREATE_FAILED_CLEANUP_FAILED" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "CREATE_FAILED" - } ] - }, - "ServiceDeleted" : { - "description" : "Wait until a Service, its instances, and possibly pipeline have been deleted after DeleteService is invoked", - "delay" : 5, - "maxAttempts" : 999, - "operation" : "GetService", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "DELETE_FAILED" - } ] - }, - "ServiceInstanceDeployed" : { - "description" : "Wait until a ServiceInstance is deployed. Use this after invoking CreateService or UpdateServiceInstance", - "delay" : 5, - "maxAttempts" : 999, - "operation" : "GetServiceInstance", - "acceptors" : [ { - "matcher" : "path", - "argument" : "serviceInstance.deploymentStatus", - "state" : "success", - "expected" : "SUCCEEDED" - }, { - "matcher" : "path", - "argument" : "serviceInstance.deploymentStatus", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "ServicePipelineDeployed" : { - "description" : "Wait until an ServicePipeline is deployed. Use this after invoking CreateService or UpdateServicePipeline", - "delay" : 10, - "maxAttempts" : 360, - "operation" : "GetService", - "acceptors" : [ { - "matcher" : "path", - "argument" : "service.pipeline.deploymentStatus", - "state" : "success", - "expected" : "SUCCEEDED" - }, { - "matcher" : "path", - "argument" : "service.pipeline.deploymentStatus", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "ServiceTemplateVersionRegistered" : { - "description" : "Wait until a ServiceTemplateVersion is registered. Use this after invoking CreateServiceTemplateVersion", - "delay" : 2, - "maxAttempts" : 150, - "operation" : "GetServiceTemplateVersion", - "acceptors" : [ { - "matcher" : "path", - "argument" : "serviceTemplateVersion.status", - "state" : "success", - "expected" : "DRAFT" - }, { - "matcher" : "path", - "argument" : "serviceTemplateVersion.status", - "state" : "success", - "expected" : "PUBLISHED" - }, { - "matcher" : "path", - "argument" : "serviceTemplateVersion.status", - "state" : "failure", - "expected" : "REGISTRATION_FAILED" - } ] - }, - "ServiceUpdated" : { - "description" : "Wait until a Service, its instances, and possibly pipeline have been deployed after UpdateService is invoked", - "delay" : 5, - "maxAttempts" : 999, - "operation" : "GetService", - "acceptors" : [ { - "matcher" : "path", - "argument" : "service.status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "UPDATE_FAILED_CLEANUP_COMPLETE" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "UPDATE_FAILED_CLEANUP_FAILED" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "UPDATE_FAILED" - }, { - "matcher" : "path", - "argument" : "service.status", - "state" : "failure", - "expected" : "UPDATE_COMPLETE_CLEANUP_FAILED" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index aaad303..0000000 Binary files a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/paginators-1.json deleted file mode 100644 index 0d13e6c..0000000 --- a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListLibraryItems": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": "libraryItems" - }, - "ListQApps": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "limit", - "result_key": "apps" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/service-2.json.gz deleted file mode 100644 index d768008..0000000 Binary files a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/waiters-2.json b/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/qapps/2023-11-27/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index d2200bc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/paginators-1.json deleted file mode 100644 index 47e8498..0000000 --- a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/paginators-1.json +++ /dev/null @@ -1,124 +0,0 @@ -{ - "pagination": { - "GetChatControlsConfiguration": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "topicConfigurations" - }, - "ListApplications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "applications" - }, - "ListConversations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "conversations" - }, - "ListDataSourceSyncJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "history" - }, - "ListDataSources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataSources" - }, - "ListDocuments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "documentDetailList" - }, - "ListGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListIndices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "indices" - }, - "ListMessages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "messages" - }, - "ListPlugins": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "plugins" - }, - "ListRetrievers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "retrievers" - }, - "ListWebExperiences": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "webExperiences" - }, - "ListAttachments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "attachments" - }, - "ListDataAccessors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataAccessors" - }, - "ListPluginActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListPluginTypeActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListPluginTypeMetadata": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "SearchRelevantContent": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "relevantContent" - }, - "ListSubscriptions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "subscriptions" - }, - "ListChatResponseConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "chatResponseConfigurations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/paginators-1.sdk-extras.json deleted file mode 100644 index b5a1e6e..0000000 --- a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/paginators-1.sdk-extras.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetChatControlsConfiguration": { - "non_aggregate_keys": [ - "responseScope", - "blockedPhrases", - "creatorModeConfiguration", - "orchestrationConfiguration", - "hallucinationReductionConfiguration" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/service-2.json.gz deleted file mode 100644 index 404b987..0000000 Binary files a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/waiters-2.json b/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/qbusiness/2023-11-27/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9229433..0000000 Binary files a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/paginators-1.json deleted file mode 100644 index 4cd8e1c..0000000 --- a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/paginators-1.json +++ /dev/null @@ -1,136 +0,0 @@ -{ - "pagination": { - "ListAssistantAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assistantAssociationSummaries" - }, - "ListAssistants": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assistantSummaries" - }, - "ListContents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "contentSummaries" - }, - "ListImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "importJobSummaries" - }, - "ListKnowledgeBases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "knowledgeBaseSummaries" - }, - "ListQuickResponses": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "quickResponseSummaries" - }, - "QueryAssistant": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "results" - }, - "SearchContent": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "contentSummaries" - }, - "SearchQuickResponses": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "results" - }, - "SearchSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessionSummaries" - }, - "ListContentAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "contentAssociationSummaries" - }, - "ListAIAgentVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "aiAgentVersionSummaries" - }, - "ListAIAgents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "aiAgentSummaries" - }, - "ListAIPromptVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "aiPromptVersionSummaries" - }, - "ListAIPrompts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "aiPromptSummaries" - }, - "ListMessageTemplateVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "messageTemplateVersionSummaries" - }, - "ListMessageTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "messageTemplateSummaries" - }, - "SearchMessageTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "results" - }, - "ListAIGuardrailVersions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "aiGuardrailVersionSummaries" - }, - "ListAIGuardrails": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "aiGuardrailSummaries" - }, - "ListMessages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "messages" - }, - "ListSpans": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "spans" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/service-2.json.gz deleted file mode 100644 index c2f12c4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/waiters-2.json b/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/qconnect/2020-10-19/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index d5dcc3b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/examples-1.json b/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/paginators-1.json deleted file mode 100644 index 9045a11..0000000 --- a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/paginators-1.json +++ /dev/null @@ -1,238 +0,0 @@ -{ - "pagination": { - "ListAnalyses": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AnalysisSummaryList" - }, - "ListDashboardVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DashboardVersionSummaryList" - }, - "ListDashboards": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DashboardSummaryList" - }, - "ListDataSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DataSetSummaries" - }, - "ListDataSources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DataSources" - }, - "ListIngestions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Ingestions" - }, - "ListNamespaces": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Namespaces" - }, - "ListTemplateAliases": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TemplateAliasList" - }, - "ListTemplateVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TemplateVersionSummaryList" - }, - "ListTemplates": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TemplateSummaryList" - }, - "ListThemeVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ThemeVersionSummaryList" - }, - "ListThemes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ThemeSummaryList" - }, - "SearchAnalyses": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AnalysisSummaryList" - }, - "SearchDashboards": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DashboardSummaryList" - }, - "SearchDataSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DataSetSummaries" - }, - "SearchDataSources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DataSourceSummaries" - }, - "ListAssetBundleExportJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AssetBundleExportJobSummaryList" - }, - "ListAssetBundleImportJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AssetBundleImportJobSummaryList" - }, - "ListGroupMemberships": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GroupMemberList" - }, - "ListGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GroupList" - }, - "ListIAMPolicyAssignments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "IAMPolicyAssignments" - }, - "ListIAMPolicyAssignmentsForUser": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ActiveAssignments" - }, - "ListUserGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GroupList" - }, - "ListUsers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "UserList" - }, - "SearchGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GroupList" - }, - "DescribeFolderPermissions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Permissions" - }, - "DescribeFolderResolvedPermissions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Permissions" - }, - "ListFolderMembers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FolderMemberList" - }, - "ListFolders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FolderSummaryList" - }, - "SearchFolders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FolderSummaryList" - }, - "ListRoleMemberships": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MembersList" - }, - "ListFoldersForResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Folders" - }, - "ListBrands": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Brands" - }, - "ListCustomPermissions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CustomPermissionsList" - }, - "SearchTopics": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TopicSummaryList" - }, - "ListActionConnectors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ActionConnectorSummaries" - }, - "ListFlows": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FlowSummaryList" - }, - "SearchActionConnectors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ActionConnectorSummaries" - }, - "SearchFlows": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FlowSummaryList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/paginators-1.sdk-extras.json deleted file mode 100644 index b87908d..0000000 --- a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/paginators-1.sdk-extras.json +++ /dev/null @@ -1,239 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListAnalyses": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListDashboardVersions": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListTemplateAliases": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListTemplateVersions": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListTemplates": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListThemeVersions": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListThemes": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchAnalyses": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchDashboards": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchDataSets": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchDataSources": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListNamespaces": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListIngestions": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListDataSources": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListDataSets": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListDashboards": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListAssetBundleExportJobs": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListAssetBundleImportJobs": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListGroupMemberships": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListGroups": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListIAMPolicyAssignments": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListIAMPolicyAssignmentsForUser": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListUserGroups": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListUsers": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchGroups": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListFolders": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListFolderMembers": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchFolders": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "DescribeFolderPermissions": { - "non_aggregate_keys": [ - "Status", - "RequestId", - "Arn", - "FolderId" - ] - }, - "DescribeFolderResolvedPermissions": { - "non_aggregate_keys": [ - "Status", - "RequestId", - "Arn", - "FolderId" - ] - }, - "ListRoleMemberships": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListFoldersForResource": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListCustomPermissions": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchTopics": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListActionConnectors": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "ListFlows": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchActionConnectors": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - }, - "SearchFlows": { - "non_aggregate_keys": [ - "Status", - "RequestId" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/service-2.json.gz deleted file mode 100644 index 7e6bfe5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/quicksight/2018-04-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ram/2018-01-04/endpoint-rule-set-1.json.gz deleted file mode 100644 index 088f256..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/examples-1.json b/venv/Lib/site-packages/botocore/data/ram/2018-01-04/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/paginators-1.json b/venv/Lib/site-packages/botocore/data/ram/2018-01-04/paginators-1.json deleted file mode 100644 index ec438a0..0000000 --- a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "GetResourcePolicies": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "policies" - }, - "GetResourceShareAssociations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "resourceShareAssociations" - }, - "GetResourceShareInvitations": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "resourceShareInvitations" - }, - "GetResourceShares": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "resourceShares" - }, - "ListPrincipals": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "principals" - }, - "ListResources": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "resources" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ram/2018-01-04/service-2.json.gz deleted file mode 100644 index f5730ed..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ram/2018-01-04/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index e296f05..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/examples-1.json b/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/paginators-1.json deleted file mode 100644 index bdbfafb..0000000 --- a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Rules" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/service-2.json.gz deleted file mode 100644 index dd9ea15..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rbin/2021-06-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 10e243d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/examples-1.json b/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/service-2.json.gz deleted file mode 100644 index 1eb3d14..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rds-data/2018-08-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rds/2014-09-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index a1dab0f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/rds/2014-09-01/paginators-1.json deleted file mode 100644 index 76c4f3a..0000000 --- a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/paginators-1.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "pagination": { - "DescribeDBEngineVersions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBEngineVersions" - }, - "DescribeDBInstances": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBInstances" - }, - "DescribeDBLogFiles": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DescribeDBLogFiles" - }, - "DescribeDBParameterGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBParameterGroups" - }, - "DescribeDBParameters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Parameters" - }, - "DescribeDBSecurityGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBSecurityGroups" - }, - "DescribeDBSnapshots": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBSnapshots" - }, - "DescribeDBSubnetGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBSubnetGroups" - }, - "DescribeEngineDefaultParameters": { - "input_token": "Marker", - "output_token": "EngineDefaults.Marker", - "limit_key": "MaxRecords", - "result_key": "EngineDefaults.Parameters" - }, - "DescribeEventSubscriptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "EventSubscriptionsList" - }, - "DescribeEvents": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Events" - }, - "DescribeOptionGroupOptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "OptionGroupOptions" - }, - "DescribeOptionGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "OptionGroupsList" - }, - "DescribeOrderableDBInstanceOptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "OrderableDBInstanceOptions" - }, - "DescribeReservedDBInstances": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedDBInstances" - }, - "DescribeReservedDBInstancesOfferings": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedDBInstancesOfferings" - }, - "DownloadDBLogFilePortion": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "NumberOfLines", - "more_results": "AdditionalDataPending", - "result_key": "LogFileData" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rds/2014-09-01/service-2.json.gz deleted file mode 100644 index 66e64f9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/rds/2014-09-01/waiters-2.json deleted file mode 100644 index b015007..0000000 --- a/venv/Lib/site-packages/botocore/data/rds/2014-09-01/waiters-2.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "version": 2, - "waiters": { - "DBInstanceAvailable": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - }, - "DBInstanceDeleted": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "deleted", - "matcher": "pathAll", - "state": "success", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "resetting-master-credentials", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rds/2014-10-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index cfb467b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/examples-1.json b/venv/Lib/site-packages/botocore/data/rds/2014-10-31/examples-1.json deleted file mode 100644 index e72a328..0000000 --- a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/examples-1.json +++ /dev/null @@ -1,1951 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AddSourceIdentifierToSubscription": [ - { - "input": { - "SourceIdentifier": "mymysqlinstance", - "SubscriptionName": "mymysqleventsubscription" - }, - "output": { - "EventSubscription": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example add a source identifier to an event notification subscription.", - "id": "add-source-identifier-to-subscription-93fb6a15-0a59-4577-a7b5-e12db9752c14", - "title": "To add a source identifier to an event notification subscription" - } - ], - "AddTagsToResource": [ - { - "input": { - "ResourceName": "arn:aws:rds:us-east-1:992648334831:og:mymysqloptiongroup", - "Tags": [ - { - "Key": "Staging", - "Value": "LocationDB" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds a tag to an option group.", - "id": "add-tags-to-resource-fa99ef50-228b-449d-b893-ca4d4e9768ab", - "title": "To add tags to a resource" - } - ], - "ApplyPendingMaintenanceAction": [ - { - "input": { - "ApplyAction": "system-update", - "OptInType": "immediate", - "ResourceIdentifier": "arn:aws:rds:us-east-1:992648334831:db:mymysqlinstance" - }, - "output": { - "ResourcePendingMaintenanceActions": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example immediately applies a pending system update to a DB instance.", - "id": "apply-pending-maintenance-action-2a026047-8bbb-47fc-b695-abad9f308c24", - "title": "To apply a pending maintenance action" - } - ], - "AuthorizeDBSecurityGroupIngress": [ - { - "input": { - "CIDRIP": "203.0.113.5/32", - "DBSecurityGroupName": "mydbsecuritygroup" - }, - "output": { - "DBSecurityGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example authorizes access to the specified security group by the specified CIDR block.", - "id": "authorize-db-security-group-ingress-ebf9ab91-8912-4b07-a32e-ca150668164f", - "title": "To authorize DB security group integress" - } - ], - "CopyDBClusterParameterGroup": [ - { - "input": { - "SourceDBClusterParameterGroupIdentifier": "mydbclusterparametergroup", - "TargetDBClusterParameterGroupDescription": "My DB cluster parameter group copy", - "TargetDBClusterParameterGroupIdentifier": "mydbclusterparametergroup-copy" - }, - "output": { - "DBClusterParameterGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies a DB cluster parameter group.", - "id": "copy-db-cluster-parameter-group-6fefaffe-cde9-4dba-9f0b-d3f593572fe4", - "title": "To copy a DB cluster parameter group" - } - ], - "CopyDBClusterSnapshot": [ - { - "input": { - "SourceDBClusterSnapshotIdentifier": "rds:sample-cluster-2016-09-14-10-38", - "TargetDBClusterSnapshotIdentifier": "cluster-snapshot-copy-1" - }, - "output": { - "DBClusterSnapshot": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example copies an automated snapshot of a DB cluster to a new DB cluster snapshot.", - "id": "to-copy-a-db-cluster-snapshot-1473879770564", - "title": "To copy a DB cluster snapshot" - } - ], - "CopyDBParameterGroup": [ - { - "input": { - "SourceDBParameterGroupIdentifier": "mymysqlparametergroup", - "TargetDBParameterGroupDescription": "My MySQL parameter group copy", - "TargetDBParameterGroupIdentifier": "mymysqlparametergroup-copy" - }, - "output": { - "DBParameterGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies a DB parameter group.", - "id": "copy-db-parameter-group-610d4dba-2c87-467f-ae5d-edd7f8e47349", - "title": "To copy a DB parameter group" - } - ], - "CopyDBSnapshot": [ - { - "input": { - "SourceDBSnapshotIdentifier": "mydbsnapshot", - "TargetDBSnapshotIdentifier": "mydbsnapshot-copy" - }, - "output": { - "DBSnapshot": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies a DB snapshot.", - "id": "copy-db-snapshot-1b2f0210-bc67-415d-9822-6eecf447dc86", - "title": "To copy a DB snapshot" - } - ], - "CopyOptionGroup": [ - { - "input": { - "SourceOptionGroupIdentifier": "mymysqloptiongroup", - "TargetOptionGroupDescription": "My MySQL option group copy", - "TargetOptionGroupIdentifier": "mymysqloptiongroup-copy" - }, - "output": { - "OptionGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example copies an option group.", - "id": "copy-option-group-8d5c01c3-8846-4e9c-a4b0-1b7237f7d0ec", - "title": "To copy an option group" - } - ], - "CreateDBCluster": [ - { - "input": { - "AvailabilityZones": [ - "us-east-1a" - ], - "BackupRetentionPeriod": 1, - "DBClusterIdentifier": "mydbcluster", - "DBClusterParameterGroupName": "mydbclusterparametergroup", - "DatabaseName": "myauroradb", - "Engine": "aurora", - "EngineVersion": "5.6.10a", - "MasterUserPassword": "mypassword", - "MasterUsername": "myuser", - "Port": 3306, - "StorageEncrypted": true - }, - "output": { - "DBCluster": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB cluster.", - "id": "create-db-cluster-423b998d-eba9-40dd-8e19-96c5b6e5f31d", - "title": "To create a DB cluster" - } - ], - "CreateDBClusterParameterGroup": [ - { - "input": { - "DBClusterParameterGroupName": "mydbclusterparametergroup", - "DBParameterGroupFamily": "aurora5.6", - "Description": "My DB cluster parameter group" - }, - "output": { - "DBClusterParameterGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB cluster parameter group.", - "id": "create-db-cluster-parameter-group-8eb1c3ae-1965-4262-afe3-ee134c4430b1", - "title": "To create a DB cluster parameter group" - } - ], - "CreateDBClusterSnapshot": [ - { - "input": { - "DBClusterIdentifier": "mydbcluster", - "DBClusterSnapshotIdentifier": "mydbclustersnapshot" - }, - "output": { - "DBClusterSnapshot": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB cluster snapshot.", - "id": "create-db-cluster-snapshot-", - "title": "To create a DB cluster snapshot" - } - ], - "CreateDBInstance": [ - { - "input": { - "AllocatedStorage": 5, - "DBInstanceClass": "db.t2.micro", - "DBInstanceIdentifier": "mymysqlinstance", - "Engine": "MySQL", - "MasterUserPassword": "MyPassword", - "MasterUsername": "MyUser" - }, - "output": { - "DBInstance": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB instance.", - "id": "create-db-instance-57eb5d16-8bf8-4c84-9709-1700322b37b9", - "title": "To create a DB instance." - } - ], - "CreateDBInstanceReadReplica": [ - { - "input": { - "AvailabilityZone": "us-east-1a", - "CopyTagsToSnapshot": true, - "DBInstanceClass": "db.t2.micro", - "DBInstanceIdentifier": "mydbreadreplica", - "PubliclyAccessible": true, - "SourceDBInstanceIdentifier": "mymysqlinstance", - "StorageType": "gp2", - "Tags": [ - { - "Key": "mydbreadreplicakey", - "Value": "mydbreadreplicavalue" - } - ] - }, - "output": { - "DBInstance": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB instance read replica.", - "id": "create-db-instance-read-replica-81b41cd5-2871-4dae-bc59-3e264449d5fe", - "title": "To create a DB instance read replica." - } - ], - "CreateDBParameterGroup": [ - { - "input": { - "DBParameterGroupFamily": "mysql5.6", - "DBParameterGroupName": "mymysqlparametergroup", - "Description": "My MySQL parameter group" - }, - "output": { - "DBParameterGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB parameter group.", - "id": "create-db-parameter-group-42afcc37-12e9-4b6a-a55c-b8a141246e87", - "title": "To create a DB parameter group." - } - ], - "CreateDBSecurityGroup": [ - { - "input": { - "DBSecurityGroupDescription": "My DB security group", - "DBSecurityGroupName": "mydbsecuritygroup" - }, - "output": { - "DBSecurityGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB security group.", - "id": "create-db-security-group-41b6786a-539e-42a5-a645-a8bc3cf99353", - "title": "To create a DB security group." - } - ], - "CreateDBSnapshot": [ - { - "input": { - "DBInstanceIdentifier": "mymysqlinstance", - "DBSnapshotIdentifier": "mydbsnapshot" - }, - "output": { - "DBSnapshot": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB snapshot.", - "id": "create-db-snapshot-e10e0e2c-9ac4-426d-9b17-6b6a3e382ce2", - "title": "To create a DB snapshot." - } - ], - "CreateDBSubnetGroup": [ - { - "input": { - "DBSubnetGroupDescription": "My DB subnet group", - "DBSubnetGroupName": "mydbsubnetgroup", - "SubnetIds": [ - "subnet-1fab8a69", - "subnet-d43a468c" - ] - }, - "output": { - "DBSubnetGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a DB subnet group.", - "id": "create-db-subnet-group-c3d162c2-0ec4-4955-ba89-18967615fdb8", - "title": "To create a DB subnet group." - } - ], - "CreateEventSubscription": [ - { - "input": { - "Enabled": true, - "EventCategories": [ - "availability" - ], - "SnsTopicArn": "arn:aws:sns:us-east-1:992648334831:MyDemoSNSTopic", - "SourceIds": [ - "mymysqlinstance" - ], - "SourceType": "db-instance", - "SubscriptionName": "mymysqleventsubscription" - }, - "output": { - "EventSubscription": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an event notification subscription.", - "id": "create-event-subscription-00dd0ee6-0e0f-4a38-ae83-e5f2ded5f69a", - "title": "To create an event notification subscription" - } - ], - "CreateOptionGroup": [ - { - "input": { - "EngineName": "MySQL", - "MajorEngineVersion": "5.6", - "OptionGroupDescription": "My MySQL 5.6 option group", - "OptionGroupName": "mymysqloptiongroup" - }, - "output": { - "OptionGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an option group.", - "id": "create-option-group-a7708c87-1b79-4a5e-a762-21cf8fc62b78", - "title": "To create an option group" - } - ], - "DeleteDBCluster": [ - { - "input": { - "DBClusterIdentifier": "mydbcluster", - "SkipFinalSnapshot": true - }, - "output": { - "DBCluster": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DB cluster.", - "id": "delete-db-cluster-927fc2c8-6c67-4075-b1ba-75490be0f7d6", - "title": "To delete a DB cluster." - } - ], - "DeleteDBClusterParameterGroup": [ - { - "input": { - "DBClusterParameterGroupName": "mydbclusterparametergroup" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DB cluster parameter group.", - "id": "delete-db-cluster-parameter-group-364f5555-ba0a-4cc8-979c-e769098924fc", - "title": "To delete a DB cluster parameter group." - } - ], - "DeleteDBClusterSnapshot": [ - { - "input": { - "DBClusterSnapshotIdentifier": "mydbclustersnapshot" - }, - "output": { - "DBClusterSnapshot": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DB cluster snapshot.", - "id": "delete-db-cluster-snapshot-c67e0d95-670e-4fb5-af90-6d9a70a91b07", - "title": "To delete a DB cluster snapshot." - } - ], - "DeleteDBInstance": [ - { - "input": { - "DBInstanceIdentifier": "mymysqlinstance", - "SkipFinalSnapshot": true - }, - "output": { - "DBInstance": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DB instance.", - "id": "delete-db-instance-4412e650-949c-488a-b32a-7d3038ebccc4", - "title": "To delete a DB instance." - } - ], - "DeleteDBParameterGroup": [ - { - "input": { - "DBParameterGroupName": "mydbparamgroup3" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a DB parameter group.", - "id": "to-delete-a-db-parameter-group-1473888796509", - "title": "To delete a DB parameter group" - } - ], - "DeleteDBSecurityGroup": [ - { - "input": { - "DBSecurityGroupName": "mysecgroup" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a DB security group.", - "id": "to-delete-a-db-security-group-1473960141889", - "title": "To delete a DB security group" - } - ], - "DeleteDBSnapshot": [ - { - "input": { - "DBSnapshotIdentifier": "mydbsnapshot" - }, - "output": { - "DBSnapshot": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DB snapshot.", - "id": "delete-db-snapshot-505d6b4e-8ced-479c-856a-c460a33fe07b", - "title": "To delete a DB cluster snapshot." - } - ], - "DeleteDBSubnetGroup": [ - { - "input": { - "DBSubnetGroupName": "mydbsubnetgroup" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DB subnetgroup.", - "id": "delete-db-subnet-group-4ae00375-511e-443d-a01d-4b9f552244aa", - "title": "To delete a DB subnet group." - } - ], - "DeleteEventSubscription": [ - { - "input": { - "SubscriptionName": "myeventsubscription" - }, - "output": { - "EventSubscription": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified DB event subscription.", - "id": "delete-db-event-subscription-d33567e3-1d5d-48ff-873f-0270453f4a75", - "title": "To delete a DB event subscription." - } - ], - "DeleteOptionGroup": [ - { - "input": { - "OptionGroupName": "mydboptiongroup" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified option group.", - "id": "delete-db-option-group-578be2be-3095-431a-9ea4-9a3c3b0daef4", - "title": "To delete an option group." - } - ], - "DescribeAccountAttributes": [ - { - "input": { - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists account attributes.", - "id": "describe-account-attributes-683d3ff7-5524-421a-8da5-e88f1ea2222b", - "title": "To list account attributes" - } - ], - "DescribeCertificates": [ - { - "input": { - "CertificateIdentifier": "rds-ca-2015", - "MaxRecords": 20 - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists up to 20 certificates for the specified certificate identifier.", - "id": "describe-certificates-9d71a70d-7908-4444-b43f-321d842c62dc", - "title": "To list certificates" - } - ], - "DescribeDBClusterParameterGroups": [ - { - "input": { - "DBClusterParameterGroupName": "mydbclusterparametergroup" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists settings for the specified DB cluster parameter group.", - "id": "describe-db-cluster-parameter-groups-cf9c6e66-664e-4f57-8e29-a9080abfc013", - "title": "To list DB cluster parameter group settings" - } - ], - "DescribeDBClusterParameters": [ - { - "input": { - "DBClusterParameterGroupName": "mydbclusterparametergroup", - "Source": "system" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists system parameters for the specified DB cluster parameter group.", - "id": "describe-db-cluster-parameters-98043c28-e489-41a7-b118-bfd96dc779a1", - "title": "To list DB cluster parameters" - } - ], - "DescribeDBClusterSnapshotAttributes": [ - { - "input": { - "DBClusterSnapshotIdentifier": "mydbclustersnapshot" - }, - "output": { - "DBClusterSnapshotAttributesResult": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists attributes for the specified DB cluster snapshot.", - "id": "describe-db-cluster-snapshot-attributes-6752ade3-0c7b-4b06-a8e4-b76bf4e2d3571", - "title": "To list DB cluster snapshot attributes" - } - ], - "DescribeDBClusterSnapshots": [ - { - "input": { - "DBClusterSnapshotIdentifier": "mydbclustersnapshot", - "SnapshotType": "manual" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists settings for the specified, manually-created cluster snapshot.", - "id": "describe-db-cluster-snapshots-52f38af1-3431-4a51-9a6a-e6bb8c961b32", - "title": "To list DB cluster snapshots" - } - ], - "DescribeDBClusters": [ - { - "input": { - "DBClusterIdentifier": "mynewdbcluster" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists settings for the specified DB cluster.", - "id": "describe-db-clusters-7aae8861-cb95-4b3b-9042-f62df7698635", - "title": "To list DB clusters" - } - ], - "DescribeDBEngineVersions": [ - { - "input": { - "DBParameterGroupFamily": "mysql5.6", - "DefaultOnly": true, - "Engine": "mysql", - "EngineVersion": "5.6", - "ListSupportedCharacterSets": true - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists settings for the specified DB engine version.", - "id": "describe-db-engine-versions-8e698cf2-2162-425a-a854-111cdaceb52b", - "title": "To list DB engine version settings" - } - ], - "DescribeDBInstances": [ - { - "input": { - "DBInstanceIdentifier": "mymysqlinstance" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists settings for the specified DB instance.", - "id": "describe-db-instances-0e11a8c5-4ec3-4463-8cbf-f7254d04c4fc", - "title": "To list DB instance settings" - } - ], - "DescribeDBLogFiles": [ - { - "input": { - "DBInstanceIdentifier": "mymysqlinstance", - "FileLastWritten": 1470873600000, - "FileSize": 0, - "FilenameContains": "error" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists matching log file names for the specified DB instance, file name pattern, last write date in POSIX time with milleseconds, and minimum file size.", - "id": "describe-db-log-files-5f002d8d-5c1d-44c2-b5f4-bd284c0f1285", - "title": "To list DB log file names" - } - ], - "DescribeDBParameterGroups": [ - { - "input": { - "DBParameterGroupName": "mymysqlparametergroup" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information about the specified DB parameter group.", - "id": "describe-db-parameter-groups-", - "title": "To list information about DB parameter groups" - } - ], - "DescribeDBParameters": [ - { - "input": { - "DBParameterGroupName": "mymysqlparametergroup", - "MaxRecords": 20, - "Source": "system" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for up to the first 20 system parameters for the specified DB parameter group.", - "id": "describe-db-parameters-09db4201-ef4f-4d97-a4b5-d71c0715b901", - "title": "To list information about DB parameters" - } - ], - "DescribeDBSecurityGroups": [ - { - "input": { - "DBSecurityGroupName": "mydbsecuritygroup" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists settings for the specified security group.", - "id": "describe-db-security-groups-66fe9ea1-17dd-4275-b82e-f771cee0c849", - "title": "To list DB security group settings" - } - ], - "DescribeDBSnapshotAttributes": [ - { - "input": { - "DBSnapshotIdentifier": "mydbsnapshot" - }, - "output": { - "DBSnapshotAttributesResult": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists attributes for the specified DB snapshot.", - "id": "describe-db-snapshot-attributes-1d4fb750-34f6-4e43-8b3d-b2751d796a95", - "title": "To list DB snapshot attributes" - } - ], - "DescribeDBSnapshots": [ - { - "input": { - "DBInstanceIdentifier": "mymysqlinstance", - "IncludePublic": false, - "IncludeShared": true, - "SnapshotType": "manual" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all manually-created, shared snapshots for the specified DB instance.", - "id": "describe-db-snapshots-2c935989-a1ef-4c85-aea4-1d0f45f17f26", - "title": "To list DB snapshot attributes" - } - ], - "DescribeDBSubnetGroups": [ - { - "input": { - "DBSubnetGroupName": "mydbsubnetgroup" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information about the specified DB subnet group.", - "id": "describe-db-subnet-groups-1d97b340-682f-4dd6-9653-8ed72a8d1221", - "title": "To list information about DB subnet groups" - } - ], - "DescribeEngineDefaultClusterParameters": [ - { - "input": { - "DBParameterGroupFamily": "aurora5.6" - }, - "output": { - "EngineDefaults": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists default parameters for the specified DB cluster engine.", - "id": "describe-engine-default-cluster-parameters-f130374a-7bee-434b-b51d-da20b6e000e0", - "title": "To list default parameters for a DB cluster engine" - } - ], - "DescribeEngineDefaultParameters": [ - { - "input": { - "DBParameterGroupFamily": "mysql5.6" - }, - "output": { - "EngineDefaults": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists default parameters for the specified DB engine.", - "id": "describe-engine-default-parameters-35d5108e-1d44-4fac-8aeb-04b8fdfface1", - "title": "To list default parameters for a DB engine" - } - ], - "DescribeEventCategories": [ - { - "input": { - "SourceType": "db-instance" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists all DB instance event categories.", - "id": "describe-event-categories-97bd4c77-12da-4be6-b42f-edf77771428b", - "title": "To list event categories." - } - ], - "DescribeEventSubscriptions": [ - { - "input": { - "SubscriptionName": "mymysqleventsubscription" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for the specified DB event notification subscription.", - "id": "describe-event-subscriptions-11184a82-e58a-4d0c-b558-f3a7489e0850", - "title": "To list information about DB event notification subscriptions" - } - ], - "DescribeEvents": [ - { - "input": { - "Duration": 10080, - "EventCategories": [ - "backup" - ], - "SourceIdentifier": "mymysqlinstance", - "SourceType": "db-instance" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for all backup-related events for the specified DB instance for the past 7 days (7 days * 24 hours * 60 minutes = 10,080 minutes).", - "id": "describe-events-3836e5ed-3913-4f76-8452-c77fcad5016b", - "title": "To list information about events" - } - ], - "DescribeOptionGroupOptions": [ - { - "input": { - "EngineName": "mysql", - "MajorEngineVersion": "5.6" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for all option group options for the specified DB engine.", - "id": "describe-option-group-options-30d735a4-81f1-49e4-b3f2-5dc45d50c8ed", - "title": "To list information about DB option group options" - } - ], - "DescribeOptionGroups": [ - { - "input": { - "EngineName": "mysql", - "MajorEngineVersion": "5.6" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for all option groups for the specified DB engine.", - "id": "describe-option-groups-4ef478a1-66d5-45f2-bec3-e608720418a4", - "title": "To list information about DB option groups" - } - ], - "DescribeOrderableDBInstanceOptions": [ - { - "input": { - "DBInstanceClass": "db.t2.micro", - "Engine": "mysql", - "EngineVersion": "5.6.27", - "LicenseModel": "general-public-license", - "Vpc": true - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for all orderable DB instance options for the specified DB engine, engine version, DB instance class, license model, and VPC settings.", - "id": "describe-orderable-db-instance-options-7444d3ed-82eb-42b9-9ed9-896b8c27a782", - "title": "To list information about orderable DB instance options" - } - ], - "DescribePendingMaintenanceActions": [ - { - "input": { - "ResourceIdentifier": "arn:aws:rds:us-east-1:992648334831:db:mymysqlinstance" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for all pending maintenance actions for the specified DB instance.", - "id": "describe-pending-maintenance-actions-e6021f7e-58ae-49cc-b874-11996176835c", - "title": "To list information about pending maintenance actions" - } - ], - "DescribeReservedDBInstances": [ - { - "input": { - "DBInstanceClass": "db.t2.micro", - "Duration": "1y", - "MultiAZ": false, - "OfferingType": "No Upfront", - "ProductDescription": "mysql" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for all reserved DB instances for the specified DB instance class, duration, product, offering type, and availability zone settings.", - "id": "describe-reserved-db-instances-d45adaca-2e30-407c-a0f3-aa7b98bea17f", - "title": "To list information about reserved DB instances" - } - ], - "DescribeReservedDBInstancesOfferings": [ - { - "input": { - "DBInstanceClass": "db.t2.micro", - "Duration": "1y", - "MultiAZ": false, - "OfferingType": "No Upfront", - "ProductDescription": "mysql" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for all reserved DB instance offerings for the specified DB instance class, duration, product, offering type, and availability zone settings.", - "id": "describe-reserved-db-instances-offerings-9de7d1fd-d6a6-4a72-84ae-b2ef58d47d8d", - "title": "To list information about reserved DB instance offerings" - } - ], - "DescribeSourceRegions": [ - { - "input": { - }, - "output": { - "SourceRegions": [ - { - "Endpoint": "https://rds.ap-northeast-1.amazonaws.com", - "RegionName": "ap-northeast-1", - "Status": "available" - }, - { - "Endpoint": "https://rds.ap-northeast-2.amazonaws.com", - "RegionName": "ap-northeast-2", - "Status": "available" - }, - { - "Endpoint": "https://rds.ap-south-1.amazonaws.com", - "RegionName": "ap-south-1", - "Status": "available" - }, - { - "Endpoint": "https://rds.ap-southeast-1.amazonaws.com", - "RegionName": "ap-southeast-1", - "Status": "available" - }, - { - "Endpoint": "https://rds.ap-southeast-2.amazonaws.com", - "RegionName": "ap-southeast-2", - "Status": "available" - }, - { - "Endpoint": "https://rds.eu-central-1.amazonaws.com", - "RegionName": "eu-central-1", - "Status": "available" - }, - { - "Endpoint": "https://rds.eu-west-1.amazonaws.com", - "RegionName": "eu-west-1", - "Status": "available" - }, - { - "Endpoint": "https://rds.sa-east-1.amazonaws.com", - "RegionName": "sa-east-1", - "Status": "available" - }, - { - "Endpoint": "https://rds.us-west-1.amazonaws.com", - "RegionName": "us-west-1", - "Status": "available" - }, - { - "Endpoint": "https://rds.us-west-2.amazonaws.com", - "RegionName": "us-west-2", - "Status": "available" - } - ] - }, - "comments": { - }, - "description": "To list the AWS regions where a Read Replica can be created.", - "id": "to-describe-source-regions-1473457722410", - "title": "To describe source regions" - } - ], - "DownloadDBLogFilePortion": [ - { - "input": { - "DBInstanceIdentifier": "mymysqlinstance", - "LogFileName": "mysqlUpgrade" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information for the specified log file for the specified DB instance.", - "id": "download-db-log-file-portion-54a82731-a441-4fc7-a010-8eccae6fa202", - "title": "To list information about DB log files" - } - ], - "FailoverDBCluster": [ - { - "input": { - "DBClusterIdentifier": "myaurorainstance-cluster", - "TargetDBInstanceIdentifier": "myaurorareplica" - }, - "output": { - "DBCluster": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example performs a failover for the specified DB cluster to the specified DB instance.", - "id": "failover-db-cluster-9e7f2f93-d98c-42c7-bb0e-d6c485c096d6", - "title": "To perform a failover for a DB cluster" - } - ], - "ListTagsForResource": [ - { - "input": { - "ResourceName": "arn:aws:rds:us-east-1:992648334831:og:mymysqloptiongroup" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists information about all tags associated with the specified DB option group.", - "id": "list-tags-for-resource-8401f3c2-77cd-4f90-bfd5-b523f0adcc2f", - "title": "To list information about tags associated with a resource" - } - ], - "ModifyDBCluster": [ - { - "input": { - "ApplyImmediately": true, - "DBClusterIdentifier": "mydbcluster", - "MasterUserPassword": "mynewpassword", - "NewDBClusterIdentifier": "mynewdbcluster", - "PreferredBackupWindow": "04:00-04:30", - "PreferredMaintenanceWindow": "Tue:05:00-Tue:05:30" - }, - "output": { - "DBCluster": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example changes the specified settings for the specified DB cluster.", - "id": "modify-db-cluster-a370ee1b-768d-450a-853b-707cb1ab663d", - "title": "To change DB cluster settings" - } - ], - "ModifyDBClusterParameterGroup": [ - { - "input": { - "DBClusterParameterGroupName": "mydbclusterparametergroup", - "Parameters": [ - { - "ApplyMethod": "immediate", - "ParameterName": "time_zone", - "ParameterValue": "America/Phoenix" - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example immediately changes the specified setting for the specified DB cluster parameter group.", - "id": "modify-db-cluster-parameter-group-f9156bc9-082a-442e-8d12-239542c1a113", - "title": "To change DB cluster parameter group settings" - } - ], - "ModifyDBClusterSnapshotAttribute": [ - { - "input": { - "AttributeName": "restore", - "DBClusterSnapshotIdentifier": "manual-cluster-snapshot1", - "ValuesToAdd": [ - "123451234512", - "123456789012" - ], - "ValuesToRemove": [ - "all" - ] - }, - "output": { - "DBClusterSnapshotAttributesResult": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example gives two AWS accounts access to a manual DB cluster snapshot and ensures that the DB cluster snapshot is private by removing the value \"all\".", - "id": "to-add-or-remove-access-to-a-manual-db-cluster-snapshot-1473889426431", - "title": "To add or remove access to a manual DB cluster snapshot" - } - ], - "ModifyDBInstance": [ - { - "input": { - "AllocatedStorage": 10, - "ApplyImmediately": true, - "BackupRetentionPeriod": 1, - "DBInstanceClass": "db.t2.small", - "DBInstanceIdentifier": "mymysqlinstance", - "MasterUserPassword": "mynewpassword", - "PreferredBackupWindow": "04:00-04:30", - "PreferredMaintenanceWindow": "Tue:05:00-Tue:05:30" - }, - "output": { - "DBInstance": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example immediately changes the specified settings for the specified DB instance.", - "id": "modify-db-instance-6979a368-6254-467b-8a8d-61103f4fcde9", - "title": "To change DB instance settings" - } - ], - "ModifyDBParameterGroup": [ - { - "input": { - "DBParameterGroupName": "mymysqlparametergroup", - "Parameters": [ - { - "ApplyMethod": "immediate", - "ParameterName": "time_zone", - "ParameterValue": "America/Phoenix" - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example immediately changes the specified setting for the specified DB parameter group.", - "id": "modify-db-parameter-group-f3a4e52a-68e4-4b88-b559-f912d34c457a", - "title": "To change DB parameter group settings" - } - ], - "ModifyDBSnapshotAttribute": [ - { - "input": { - "AttributeName": "restore", - "DBSnapshotIdentifier": "mydbsnapshot", - "ValuesToAdd": [ - "all" - ] - }, - "output": { - "DBSnapshotAttributesResult": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds the specified attribute for the specified DB snapshot.", - "id": "modify-db-snapshot-attribute-2e66f120-2b21-4a7c-890b-4474da88bde6", - "title": "To change DB snapshot attributes" - } - ], - "ModifyDBSubnetGroup": [ - { - "input": { - "DBSubnetGroupName": "mydbsubnetgroup", - "SubnetIds": [ - "subnet-70e1975a", - "subnet-747a5c49" - ] - }, - "output": { - "DBSubnetGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example changes the specified setting for the specified DB subnet group.", - "id": "modify-db-subnet-group-e34a97d9-8fe6-4239-a4ed-ad6e73a956b0", - "title": "To change DB subnet group settings" - } - ], - "ModifyEventSubscription": [ - { - "input": { - "Enabled": true, - "EventCategories": [ - "deletion", - "low storage" - ], - "SourceType": "db-instance", - "SubscriptionName": "mymysqleventsubscription" - }, - "output": { - "EventSubscription": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example changes the specified setting for the specified event notification subscription.", - "id": "modify-event-subscription-405ac869-1f02-42cd-b8f4-6950a435f30e", - "title": "To change event notification subscription settings" - } - ], - "ModifyOptionGroup": [ - { - "input": { - "ApplyImmediately": true, - "OptionGroupName": "myawsuser-og02", - "OptionsToInclude": [ - { - "DBSecurityGroupMemberships": [ - "default" - ], - "OptionName": "MEMCACHED" - } - ] - }, - "output": { - "OptionGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds an option to an option group.", - "id": "to-modify-an-option-group-1473890247875", - "title": "To modify an option group" - } - ], - "PromoteReadReplica": [ - { - "input": { - "BackupRetentionPeriod": 1, - "DBInstanceIdentifier": "mydbreadreplica", - "PreferredBackupWindow": "03:30-04:00" - }, - "output": { - "DBInstance": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example promotes the specified read replica and sets its backup retention period and preferred backup window.", - "id": "promote-read-replica-cc580039-c55d-4035-838a-def4a1ae4181", - "title": "To promote a read replica" - } - ], - "PurchaseReservedDBInstancesOffering": [ - { - "input": { - "ReservedDBInstanceId": "myreservationid", - "ReservedDBInstancesOfferingId": "fb29428a-646d-4390-850e-5fe89926e727" - }, - "output": { - "ReservedDBInstance": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example purchases a reserved DB instance offering that matches the specified settings.", - "id": "purchase-reserved-db-instances-offfering-f423c736-8413-429b-ba13-850fd4fa4dcd", - "title": "To purchase a reserved DB instance offering" - } - ], - "RebootDBInstance": [ - { - "input": { - "DBInstanceIdentifier": "mymysqlinstance", - "ForceFailover": false - }, - "output": { - "DBInstance": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example reboots the specified DB instance without forcing a failover.", - "id": "reboot-db-instance-b9ce8a0a-2920-451d-a1f3-01d288aa7366", - "title": "To reboot a DB instance" - } - ], - "RemoveSourceIdentifierFromSubscription": [ - { - "input": { - "SourceIdentifier": "mymysqlinstance", - "SubscriptionName": "myeventsubscription" - }, - "output": { - "EventSubscription": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example removes the specified source identifier from the specified DB event subscription.", - "id": "remove-source-identifier-from-subscription-30d25493-c19d-4cf7-b4e5-68371d0d8770", - "title": "To remove a source identifier from a DB event subscription" - } - ], - "RemoveTagsFromResource": [ - { - "input": { - "ResourceName": "arn:aws:rds:us-east-1:992648334831:og:mydboptiongroup", - "TagKeys": [ - "MyKey" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example removes the specified tag associated with the specified DB option group.", - "id": "remove-tags-from-resource-49f00574-38f6-4d01-ac89-d3c668449ce3", - "title": "To remove tags from a resource" - } - ], - "ResetDBClusterParameterGroup": [ - { - "input": { - "DBClusterParameterGroupName": "mydbclusterparametergroup", - "ResetAllParameters": true - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resets all parameters for the specified DB cluster parameter group to their default values.", - "id": "reset-db-cluster-parameter-group-b04aeaf7-7f73-49e1-9bb4-857573ea3ee4", - "title": "To reset the values of a DB cluster parameter group" - } - ], - "ResetDBParameterGroup": [ - { - "input": { - "DBParameterGroupName": "mydbparametergroup", - "ResetAllParameters": true - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example resets all parameters for the specified DB parameter group to their default values.", - "id": "reset-db-parameter-group-ed2ed723-de0d-4824-8af5-3c65fa130abf", - "title": "To reset the values of a DB parameter group" - } - ], - "RestoreDBClusterFromSnapshot": [ - { - "input": { - "DBClusterIdentifier": "restored-cluster1", - "Engine": "aurora", - "SnapshotIdentifier": "sample-cluster-snapshot1" - }, - "output": { - "DBCluster": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example restores an Amazon Aurora DB cluster from a DB cluster snapshot.", - "id": "to-restore-an-amazon-aurora-db-cluster-from-a-db-cluster-snapshot-1473958144325", - "title": "To restore an Amazon Aurora DB cluster from a DB cluster snapshot" - } - ], - "RestoreDBClusterToPointInTime": [ - { - "input": { - "DBClusterIdentifier": "sample-restored-cluster1", - "RestoreToTime": "2016-09-13T18:45:00Z", - "SourceDBClusterIdentifier": "sample-cluster1" - }, - "output": { - "DBCluster": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example restores a DB cluster to a new DB cluster at a point in time from the source DB cluster.", - "id": "to-restore-a-db-cluster-to-a-point-in-time-1473962082214", - "title": "To restore a DB cluster to a point in time." - } - ], - "RestoreDBInstanceFromDBSnapshot": [ - { - "input": { - "DBInstanceIdentifier": "mysqldb-restored", - "DBSnapshotIdentifier": "rds:mysqldb-2014-04-22-08-15" - }, - "output": { - "DBInstance": { - "AllocatedStorage": 200, - "AutoMinorVersionUpgrade": true, - "AvailabilityZone": "us-west-2b", - "BackupRetentionPeriod": 7, - "CACertificateIdentifier": "rds-ca-2015", - "CopyTagsToSnapshot": false, - "DBInstanceArn": "arn:aws:rds:us-west-2:123456789012:db:mysqldb-restored", - "DBInstanceClass": "db.t2.small", - "DBInstanceIdentifier": "mysqldb-restored", - "DBInstanceStatus": "available", - "DBName": "sample", - "DBParameterGroups": [ - { - "DBParameterGroupName": "default.mysql5.6", - "ParameterApplyStatus": "in-sync" - } - ], - "DBSecurityGroups": [ - - ], - "DBSubnetGroup": { - "DBSubnetGroupDescription": "default", - "DBSubnetGroupName": "default", - "SubnetGroupStatus": "Complete", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-west-2a" - }, - "SubnetIdentifier": "subnet-77e8db03", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-west-2b" - }, - "SubnetIdentifier": "subnet-c39989a1", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-west-2c" - }, - "SubnetIdentifier": "subnet-4b267b0d", - "SubnetStatus": "Active" - } - ], - "VpcId": "vpc-c1c5b3a3" - }, - "DbInstancePort": 0, - "DbiResourceId": "db-VNZUCCBTEDC4WR7THXNJO72HVQ", - "DomainMemberships": [ - - ], - "Engine": "mysql", - "EngineVersion": "5.6.27", - "LicenseModel": "general-public-license", - "MasterUsername": "mymasteruser", - "MonitoringInterval": 0, - "MultiAZ": false, - "OptionGroupMemberships": [ - { - "OptionGroupName": "default:mysql-5-6", - "Status": "in-sync" - } - ], - "PendingModifiedValues": { - }, - "PreferredBackupWindow": "12:58-13:28", - "PreferredMaintenanceWindow": "tue:10:16-tue:10:46", - "PubliclyAccessible": true, - "ReadReplicaDBInstanceIdentifiers": [ - - ], - "StorageEncrypted": false, - "StorageType": "gp2", - "VpcSecurityGroups": [ - { - "Status": "active", - "VpcSecurityGroupId": "sg-e5e5b0d2" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example restores a DB instance from a DB snapshot.", - "id": "to-restore-a-db-instance-from-a-db-snapshot-1473961657311", - "title": "To restore a DB instance from a DB snapshot." - } - ], - "RestoreDBInstanceToPointInTime": [ - { - "input": { - "RestoreTime": "2016-09-13T18:45:00Z", - "SourceDBInstanceIdentifier": "mysql-sample", - "TargetDBInstanceIdentifier": "mysql-sample-restored" - }, - "output": { - "DBInstance": { - "AllocatedStorage": 200, - "AutoMinorVersionUpgrade": true, - "AvailabilityZone": "us-west-2b", - "BackupRetentionPeriod": 7, - "CACertificateIdentifier": "rds-ca-2015", - "CopyTagsToSnapshot": false, - "DBInstanceArn": "arn:aws:rds:us-west-2:123456789012:db:mysql-sample-restored", - "DBInstanceClass": "db.t2.small", - "DBInstanceIdentifier": "mysql-sample-restored", - "DBInstanceStatus": "available", - "DBName": "sample", - "DBParameterGroups": [ - { - "DBParameterGroupName": "default.mysql5.6", - "ParameterApplyStatus": "in-sync" - } - ], - "DBSecurityGroups": [ - - ], - "DBSubnetGroup": { - "DBSubnetGroupDescription": "default", - "DBSubnetGroupName": "default", - "SubnetGroupStatus": "Complete", - "Subnets": [ - { - "SubnetAvailabilityZone": { - "Name": "us-west-2a" - }, - "SubnetIdentifier": "subnet-77e8db03", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-west-2b" - }, - "SubnetIdentifier": "subnet-c39989a1", - "SubnetStatus": "Active" - }, - { - "SubnetAvailabilityZone": { - "Name": "us-west-2c" - }, - "SubnetIdentifier": "subnet-4b267b0d", - "SubnetStatus": "Active" - } - ], - "VpcId": "vpc-c1c5b3a3" - }, - "DbInstancePort": 0, - "DbiResourceId": "db-VNZUCCBTEDC4WR7THXNJO72HVQ", - "DomainMemberships": [ - - ], - "Engine": "mysql", - "EngineVersion": "5.6.27", - "LicenseModel": "general-public-license", - "MasterUsername": "mymasteruser", - "MonitoringInterval": 0, - "MultiAZ": false, - "OptionGroupMemberships": [ - { - "OptionGroupName": "default:mysql-5-6", - "Status": "in-sync" - } - ], - "PendingModifiedValues": { - }, - "PreferredBackupWindow": "12:58-13:28", - "PreferredMaintenanceWindow": "tue:10:16-tue:10:46", - "PubliclyAccessible": true, - "ReadReplicaDBInstanceIdentifiers": [ - - ], - "StorageEncrypted": false, - "StorageType": "gp2", - "VpcSecurityGroups": [ - { - "Status": "active", - "VpcSecurityGroupId": "sg-e5e5b0d2" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example restores a DB instance to a new DB instance at a point in time from the source DB instance.", - "id": "to-restore-a-db-instance-to-a-point-in-time-1473962652154", - "title": "To restore a DB instance to a point in time." - } - ], - "RevokeDBSecurityGroupIngress": [ - { - "input": { - "CIDRIP": "203.0.113.5/32", - "DBSecurityGroupName": "mydbsecuritygroup" - }, - "output": { - "DBSecurityGroup": { - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example revokes ingress for the specified CIDR block associated with the specified DB security group.", - "id": "revoke-db-security-group-ingress-ce5b2c1c-bd4e-4809-b04a-6d78ec448813", - "title": "To revoke ingress for a DB security group" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/rds/2014-10-31/paginators-1.json deleted file mode 100644 index edf4103..0000000 --- a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/paginators-1.json +++ /dev/null @@ -1,251 +0,0 @@ -{ - "pagination": { - "DescribeCertificates": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Certificates" - }, - "DescribeDBClusterBacktracks": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBClusterBacktracks" - }, - "DescribeDBClusterParameterGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBClusterParameterGroups" - }, - "DescribeDBClusterParameters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Parameters" - }, - "DescribeDBClusterSnapshots": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBClusterSnapshots" - }, - "DescribeDBClusters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusters" - }, - "DescribeDBEngineVersions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBEngineVersions" - }, - "DescribeDBInstanceAutomatedBackups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBInstanceAutomatedBackups" - }, - "DescribeDBInstances": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBInstances" - }, - "DescribeDBLogFiles": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DescribeDBLogFiles" - }, - "DescribeDBParameterGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBParameterGroups" - }, - "DescribeDBParameters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Parameters" - }, - "DescribeDBSecurityGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBSecurityGroups" - }, - "DescribeDBSnapshots": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBSnapshots" - }, - "DescribeDBSubnetGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "DBSubnetGroups" - }, - "DescribeEngineDefaultClusterParameters": { - "input_token": "Marker", - "output_token": "EngineDefaults.Marker", - "limit_key": "MaxRecords", - "result_key": "EngineDefaults.Parameters" - }, - "DescribeEngineDefaultParameters": { - "input_token": "Marker", - "output_token": "EngineDefaults.Marker", - "limit_key": "MaxRecords", - "result_key": "EngineDefaults.Parameters" - }, - "DescribeEventSubscriptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "EventSubscriptionsList" - }, - "DescribeEvents": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Events" - }, - "DescribeGlobalClusters": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "GlobalClusters" - }, - "DescribeOptionGroupOptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "OptionGroupOptions" - }, - "DescribeOptionGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "OptionGroupsList" - }, - "DescribeOrderableDBInstanceOptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "OrderableDBInstanceOptions" - }, - "DescribePendingMaintenanceActions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "PendingMaintenanceActions" - }, - "DescribeReservedDBInstances": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedDBInstances" - }, - "DescribeReservedDBInstancesOfferings": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedDBInstancesOfferings" - }, - "DescribeSourceRegions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "SourceRegions" - }, - "DownloadDBLogFilePortion": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "NumberOfLines", - "more_results": "AdditionalDataPending", - "result_key": "LogFileData" - }, - "DescribeDBClusterEndpoints": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusterEndpoints" - }, - "DescribeDBProxies": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBProxies" - }, - "DescribeDBProxyTargetGroups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "TargetGroups" - }, - "DescribeDBProxyTargets": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Targets" - }, - "DescribeExportTasks": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "ExportTasks" - }, - "DescribeDBProxyEndpoints": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBProxyEndpoints" - }, - "DescribeBlueGreenDeployments": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "BlueGreenDeployments" - }, - "DescribeDBClusterAutomatedBackups": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBClusterAutomatedBackups" - }, - "DescribeIntegrations": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Integrations" - }, - "DescribeDBSnapshotTenantDatabases": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBSnapshotTenantDatabases" - }, - "DescribeTenantDatabases": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "TenantDatabases" - }, - "DescribeDBRecommendations": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBRecommendations" - }, - "DescribeDBMajorEngineVersions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DBMajorEngineVersions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/rds/2014-10-31/paginators-1.sdk-extras.json deleted file mode 100644 index 5016e02..0000000 --- a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "DescribeCertificates": { - "non_aggregate_keys": [ - "DefaultCertificateForNewLaunches" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rds/2014-10-31/service-2.json.gz deleted file mode 100644 index cf10de3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/service-2.sdk-extras.json b/venv/Lib/site-packages/botocore/data/rds/2014-10-31/service-2.sdk-extras.json deleted file mode 100644 index 36aea1f..0000000 --- a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/service-2.sdk-extras.json +++ /dev/null @@ -1,47 +0,0 @@ - { - "version": 1.0, - "merge": { - "shapes": { - "CopyDBClusterSnapshotMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the snapshot to be copied.

    " - } - } - }, - "CreateDBClusterMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the source for the db cluster.

    " - } - } - }, - "CopyDBSnapshotMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the snapshot to be copied.

    " - } - } - }, - "CreateDBInstanceReadReplicaMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the source for the read replica.

    " - } - } - }, - "StartDBInstanceAutomatedBackupsReplicationMessage": { - "members": { - "SourceRegion": { - "shape": "String", - "documentation": "

    The ID of the region that contains the source for the db instance.

    " - } - } - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/waiters-2.json b/venv/Lib/site-packages/botocore/data/rds/2014-10-31/waiters-2.json deleted file mode 100644 index d91adea..0000000 --- a/venv/Lib/site-packages/botocore/data/rds/2014-10-31/waiters-2.json +++ /dev/null @@ -1,412 +0,0 @@ -{ - "version": 2, - "waiters": { - "DBInstanceAvailable": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - }, - "DBInstanceDeleted": { - "delay": 30, - "operation": "DescribeDBInstances", - "maxAttempts": 60, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(DBInstances) == `0`" - }, - { - "expected": "DBInstanceNotFound", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - }, - { - "expected": "resetting-master-credentials", - "matcher": "pathAny", - "state": "failure", - "argument": "DBInstances[].DBInstanceStatus" - } - ] - }, - "DBSnapshotAvailable": { - "delay": 30, - "operation": "DescribeDBSnapshots", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - } - ] - }, - "DBSnapshotDeleted": { - "delay": 30, - "operation": "DescribeDBSnapshots", - "maxAttempts": 60, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(DBSnapshots) == `0`" - }, - { - "expected": "DBSnapshotNotFound", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - }, - { - "expected": "resetting-master-credentials", - "matcher": "pathAny", - "state": "failure", - "argument": "DBSnapshots[].Status" - } - ] - }, - "DBClusterSnapshotAvailable": { - "delay": 30, - "operation": "DescribeDBClusterSnapshots", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - } - ] - }, - "DBClusterSnapshotDeleted": { - "delay": 30, - "operation": "DescribeDBClusterSnapshots", - "maxAttempts": 60, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(DBClusterSnapshots) == `0`" - }, - { - "expected": "DBClusterSnapshotNotFoundFault", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - }, - { - "expected": "resetting-master-credentials", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusterSnapshots[].Status" - } - ] - }, - "DBClusterAvailable": { - "delay": 30, - "operation": "DescribeDBClusters", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBClusters[].Status" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - } - ] - }, - "DBClusterDeleted": { - "delay": 30, - "operation": "DescribeDBClusters", - "maxAttempts": 60, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(DBClusters) == `0`" - }, - { - "expected": "DBClusterNotFoundFault", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - }, - { - "expected": "rebooting", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - }, - { - "expected": "resetting-master-credentials", - "matcher": "pathAny", - "state": "failure", - "argument": "DBClusters[].Status" - } - ] - }, - "TenantDatabaseAvailable": { - "delay": 30, - "operation": "DescribeTenantDatabases", - "maxAttempts": 60, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "TenantDatabases[].Status" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "TenantDatabases[].Status" - }, - { - "expected": "incompatible-parameters", - "matcher": "pathAny", - "state": "failure", - "argument": "TenantDatabases[].Status" - }, - { - "expected": "incompatible-restore", - "matcher": "pathAny", - "state": "failure", - "argument": "TenantDatabases[].Status" - } - ] - }, - "TenantDatabaseDeleted": { - "delay": 30, - "operation": "DescribeTenantDatabases", - "maxAttempts": 60, - "acceptors": [ - { - "expected": true, - "matcher": "path", - "state": "success", - "argument": "length(TenantDatabases) == `0`" - }, - { - "expected": "DBInstanceNotFoundFault", - "matcher": "error", - "state": "success" - } - ] - }, - "DBSnapshotCompleted": { - "delay": 15, - "operation": "DescribeDBSnapshots", - "maxAttempts": 40, - "acceptors": [ - { - "expected": "DBSnapshotNotFound", - "matcher": "error", - "state": "success" - }, - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "DBSnapshots[].Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index 86c48ea..0000000 Binary files a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/examples-1.json b/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/paginators-1.json deleted file mode 100644 index ba46aa4..0000000 --- a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/paginators-1.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "pagination": { - "DescribeTable": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ColumnList" - }, - "GetStatementResult": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Records" - }, - "ListDatabases": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Databases" - }, - "ListSchemas": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Schemas" - }, - "ListStatements": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Statements" - }, - "ListTables": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Tables" - }, - "GetStatementResultV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Records" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/paginators-1.sdk-extras.json deleted file mode 100644 index 8a06016..0000000 --- a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/paginators-1.sdk-extras.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetStatementResult": { - "non_aggregate_keys": [ - "ColumnMetadata", - "TotalNumRows" - ] - }, - "GetStatementResultV2": { - "non_aggregate_keys": [ - "ColumnMetadata", - "TotalNumRows", - "ResultFormat" - ] - }, - "DescribeTable": { - "non_aggregate_keys": [ - "TableName" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/service-2.json.gz deleted file mode 100644 index 1645c16..0000000 Binary files a/venv/Lib/site-packages/botocore/data/redshift-data/2019-12-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/endpoint-rule-set-1.json.gz deleted file mode 100644 index 697e2d4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/paginators-1.json b/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/paginators-1.json deleted file mode 100644 index 0b5e6ab..0000000 --- a/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/paginators-1.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "pagination": { - "ListEndpointAccess": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "endpoints" - }, - "ListNamespaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "namespaces" - }, - "ListRecoveryPoints": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recoveryPoints" - }, - "ListSnapshots": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "snapshots" - }, - "ListUsageLimits": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "usageLimits" - }, - "ListWorkgroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "workgroups" - }, - "ListTableRestoreStatus": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tableRestoreStatuses" - }, - "ListCustomDomainAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "associations" - }, - "ListScheduledActions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "scheduledActions" - }, - "ListSnapshotCopyConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "snapshotCopyConfigurations" - }, - "ListManagedWorkgroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "managedWorkgroups" - }, - "ListTracks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tracks" - }, - "ListReservationOfferings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "reservationOfferingsList" - }, - "ListReservations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "reservationsList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/service-2.json.gz b/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/service-2.json.gz deleted file mode 100644 index 88f175c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/redshift-serverless/2021-04-21/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 923ae06..0000000 Binary files a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/paginators-1.json deleted file mode 100644 index 97559e4..0000000 --- a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/paginators-1.json +++ /dev/null @@ -1,226 +0,0 @@ -{ - "pagination": { - "DescribeClusterParameterGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ParameterGroups" - }, - "DescribeClusterParameters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Parameters" - }, - "DescribeClusterSecurityGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ClusterSecurityGroups" - }, - "DescribeClusterSnapshots": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Snapshots" - }, - "DescribeClusterSubnetGroups": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ClusterSubnetGroups" - }, - "DescribeClusterVersions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ClusterVersions" - }, - "DescribeClusters": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Clusters" - }, - "DescribeDefaultClusterParameters": { - "input_token": "Marker", - "output_token": "DefaultClusterParameters.Marker", - "limit_key": "MaxRecords", - "result_key": "DefaultClusterParameters.Parameters" - }, - "DescribeEventSubscriptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "EventSubscriptionsList" - }, - "DescribeEvents": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "Events" - }, - "DescribeHsmClientCertificates": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "HsmClientCertificates" - }, - "DescribeHsmConfigurations": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "HsmConfigurations" - }, - "DescribeOrderableClusterOptions": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "OrderableClusterOptions" - }, - "DescribeReservedNodeOfferings": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedNodeOfferings" - }, - "DescribeReservedNodes": { - "input_token": "Marker", - "output_token": "Marker", - "limit_key": "MaxRecords", - "result_key": "ReservedNodes" - }, - "DescribeClusterDbRevisions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "ClusterDbRevisions" - }, - "DescribeClusterTracks": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "MaintenanceTracks" - }, - "DescribeSnapshotCopyGrants": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "SnapshotCopyGrants" - }, - "DescribeSnapshotSchedules": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "SnapshotSchedules" - }, - "DescribeTableRestoreStatus": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "TableRestoreStatusDetails" - }, - "DescribeTags": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "TaggedResources" - }, - "GetReservedNodeExchangeOfferings": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "ReservedNodeOfferings" - }, - "DescribeNodeConfigurationOptions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "NodeConfigurationOptionList" - }, - "DescribeScheduledActions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "ScheduledActions" - }, - "DescribeUsageLimits": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "UsageLimits" - }, - "DescribeEndpointAccess": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "EndpointAccessList" - }, - "DescribeEndpointAuthorization": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "EndpointAuthorizationList" - }, - "DescribeDataShares": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DataShares" - }, - "DescribeDataSharesForConsumer": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DataShares" - }, - "DescribeDataSharesForProducer": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "DataShares" - }, - "DescribeReservedNodeExchangeStatus": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "ReservedNodeExchangeStatusDetails" - }, - "GetReservedNodeExchangeConfigurationOptions": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "ReservedNodeConfigurationOptionList" - }, - "DescribeCustomDomainAssociations": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Associations" - }, - "DescribeInboundIntegrations": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "InboundIntegrations" - }, - "DescribeRedshiftIdcApplications": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "RedshiftIdcApplications" - }, - "ListRecommendations": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Recommendations" - }, - "DescribeIntegrations": { - "input_token": "Marker", - "limit_key": "MaxRecords", - "output_token": "Marker", - "result_key": "Integrations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/service-2.json.gz deleted file mode 100644 index 627ca45..0000000 Binary files a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/waiters-2.json deleted file mode 100644 index 164e9b0..0000000 --- a/venv/Lib/site-packages/botocore/data/redshift/2012-12-01/waiters-2.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "version": 2, - "waiters": { - "ClusterAvailable": { - "delay": 60, - "operation": "DescribeClusters", - "maxAttempts": 30, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Clusters[].ClusterStatus" - }, - { - "expected": "deleting", - "matcher": "pathAny", - "state": "failure", - "argument": "Clusters[].ClusterStatus" - }, - { - "expected": "ClusterNotFound", - "matcher": "error", - "state": "retry" - } - ] - }, - "ClusterDeleted": { - "delay": 60, - "operation": "DescribeClusters", - "maxAttempts": 30, - "acceptors": [ - { - "expected": "ClusterNotFound", - "matcher": "error", - "state": "success" - }, - { - "expected": "creating", - "matcher": "pathAny", - "state": "failure", - "argument": "Clusters[].ClusterStatus" - }, - { - "expected": "modifying", - "matcher": "pathAny", - "state": "failure", - "argument": "Clusters[].ClusterStatus" - } - ] - }, - "ClusterRestored": { - "operation": "DescribeClusters", - "maxAttempts": 30, - "delay": 60, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "Clusters[].RestoreStatus.Status", - "expected": "completed" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "Clusters[].ClusterStatus", - "expected": "deleting" - } - ] - }, - "SnapshotAvailable": { - "delay": 15, - "operation": "DescribeClusterSnapshots", - "maxAttempts": 20, - "acceptors": [ - { - "expected": "available", - "matcher": "pathAll", - "state": "success", - "argument": "Snapshots[].Status" - }, - { - "expected": "failed", - "matcher": "pathAny", - "state": "failure", - "argument": "Snapshots[].Status" - }, - { - "expected": "deleted", - "matcher": "pathAny", - "state": "failure", - "argument": "Snapshots[].Status" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3928359..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/examples-1.json b/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/examples-1.json deleted file mode 100644 index 039e04d..0000000 --- a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/examples-1.json +++ /dev/null @@ -1,651 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CompareFaces": [ - { - "input": { - "SimilarityThreshold": 90, - "SourceImage": { - "S3Object": { - "Bucket": "mybucket", - "Name": "mysourceimage" - } - }, - "TargetImage": { - "S3Object": { - "Bucket": "mybucket", - "Name": "mytargetimage" - } - } - }, - "output": { - "FaceMatches": [ - { - "Face": { - "BoundingBox": { - "Height": 0.33481481671333313, - "Left": 0.31888890266418457, - "Top": 0.4933333396911621, - "Width": 0.25 - }, - "Confidence": 99.9991226196289 - }, - "Similarity": 100 - } - ], - "SourceImageFace": { - "BoundingBox": { - "Height": 0.33481481671333313, - "Left": 0.31888890266418457, - "Top": 0.4933333396911621, - "Width": 0.25 - }, - "Confidence": 99.9991226196289 - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation compares the largest face detected in the source image with each face detected in the target image.", - "id": "to-compare-two-images-1482181985581", - "title": "To compare two images" - } - ], - "CreateCollection": [ - { - "input": { - "CollectionId": "myphotos" - }, - "output": { - "CollectionArn": "aws:rekognition:us-west-2:123456789012:collection/myphotos", - "StatusCode": 200 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation creates a Rekognition collection for storing image data.", - "id": "to-create-a-collection-1481833313674", - "title": "To create a collection" - } - ], - "DeleteCollection": [ - { - "input": { - "CollectionId": "myphotos" - }, - "output": { - "StatusCode": 200 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation deletes a Rekognition collection.", - "id": "to-delete-a-collection-1481838179973", - "title": "To delete a collection" - } - ], - "DeleteFaces": [ - { - "input": { - "CollectionId": "myphotos", - "FaceIds": [ - "ff43d742-0c13-5d16-a3e8-03d3f58e980b" - ] - }, - "output": { - "DeletedFaces": [ - "ff43d742-0c13-5d16-a3e8-03d3f58e980b" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation deletes one or more faces from a Rekognition collection.", - "id": "to-delete-a-face-1482182799377", - "title": "To delete a face" - } - ], - "DetectFaces": [ - { - "input": { - "Image": { - "S3Object": { - "Bucket": "mybucket", - "Name": "myphoto" - } - } - }, - "output": { - "FaceDetails": [ - { - "BoundingBox": { - "Height": 0.18000000715255737, - "Left": 0.5555555820465088, - "Top": 0.33666667342185974, - "Width": 0.23999999463558197 - }, - "Confidence": 100, - "Landmarks": [ - { - "Type": "eyeLeft", - "X": 0.6394737362861633, - "Y": 0.40819624066352844 - }, - { - "Type": "eyeRight", - "X": 0.7266660928726196, - "Y": 0.41039225459098816 - }, - { - "Type": "eyeRight", - "X": 0.6912462115287781, - "Y": 0.44240960478782654 - }, - { - "Type": "mouthDown", - "X": 0.6306198239326477, - "Y": 0.46700039505958557 - }, - { - "Type": "mouthUp", - "X": 0.7215608954429626, - "Y": 0.47114261984825134 - } - ], - "Pose": { - "Pitch": 4.050806522369385, - "Roll": 0.9950747489929199, - "Yaw": 13.693790435791016 - }, - "Quality": { - "Brightness": 37.60169982910156, - "Sharpness": 80 - } - } - ], - "OrientationCorrection": "ROTATE_0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation detects faces in an image stored in an AWS S3 bucket.", - "id": "to-detect-faces-in-an-image-1481841782793", - "title": "To detect faces in an image" - } - ], - "DetectLabels": [ - { - "input": { - "Image": { - "S3Object": { - "Bucket": "mybucket", - "Name": "myphoto" - } - }, - "MaxLabels": 123, - "MinConfidence": 70 - }, - "output": { - "Labels": [ - { - "Confidence": 99.25072479248047, - "Name": "People" - }, - { - "Confidence": 99.25074005126953, - "Name": "Person" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation detects labels in the supplied image", - "id": "to-detect-labels-1481834255770", - "title": "To detect labels" - } - ], - "IndexFaces": [ - { - "input": { - "CollectionId": "myphotos", - "DetectionAttributes": [ - - ], - "ExternalImageId": "myphotoid", - "Image": { - "S3Object": { - "Bucket": "mybucket", - "Name": "myphoto" - } - } - }, - "output": { - "FaceRecords": [ - { - "Face": { - "BoundingBox": { - "Height": 0.33481481671333313, - "Left": 0.31888890266418457, - "Top": 0.4933333396911621, - "Width": 0.25 - }, - "Confidence": 99.9991226196289, - "FaceId": "ff43d742-0c13-5d16-a3e8-03d3f58e980b", - "ImageId": "465f4e93-763e-51d0-b030-b9667a2d94b1" - }, - "FaceDetail": { - "BoundingBox": { - "Height": 0.33481481671333313, - "Left": 0.31888890266418457, - "Top": 0.4933333396911621, - "Width": 0.25 - }, - "Confidence": 99.9991226196289, - "Landmarks": [ - { - "Type": "eyeLeft", - "X": 0.3976764678955078, - "Y": 0.6248345971107483 - }, - { - "Type": "eyeRight", - "X": 0.4810936450958252, - "Y": 0.6317117214202881 - }, - { - "Type": "noseLeft", - "X": 0.41986238956451416, - "Y": 0.7111940383911133 - }, - { - "Type": "mouthDown", - "X": 0.40525302290916443, - "Y": 0.7497701048851013 - }, - { - "Type": "mouthUp", - "X": 0.4753248989582062, - "Y": 0.7558549642562866 - } - ], - "Pose": { - "Pitch": -9.713645935058594, - "Roll": 4.707281112670898, - "Yaw": -24.438663482666016 - }, - "Quality": { - "Brightness": 29.23358917236328, - "Sharpness": 80 - } - } - }, - { - "Face": { - "BoundingBox": { - "Height": 0.32592591643333435, - "Left": 0.5144444704055786, - "Top": 0.15111111104488373, - "Width": 0.24444444477558136 - }, - "Confidence": 99.99950408935547, - "FaceId": "8be04dba-4e58-520d-850e-9eae4af70eb2", - "ImageId": "465f4e93-763e-51d0-b030-b9667a2d94b1" - }, - "FaceDetail": { - "BoundingBox": { - "Height": 0.32592591643333435, - "Left": 0.5144444704055786, - "Top": 0.15111111104488373, - "Width": 0.24444444477558136 - }, - "Confidence": 99.99950408935547, - "Landmarks": [ - { - "Type": "eyeLeft", - "X": 0.6006892323493958, - "Y": 0.290842205286026 - }, - { - "Type": "eyeRight", - "X": 0.6808141469955444, - "Y": 0.29609042406082153 - }, - { - "Type": "noseLeft", - "X": 0.6395332217216492, - "Y": 0.3522595763206482 - }, - { - "Type": "mouthDown", - "X": 0.5892083048820496, - "Y": 0.38689887523651123 - }, - { - "Type": "mouthUp", - "X": 0.674560010433197, - "Y": 0.394125759601593 - } - ], - "Pose": { - "Pitch": -4.683138370513916, - "Roll": 2.1029529571533203, - "Yaw": 6.716655254364014 - }, - "Quality": { - "Brightness": 34.951698303222656, - "Sharpness": 160 - } - } - } - ], - "OrientationCorrection": "ROTATE_0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation detects faces in an image and adds them to the specified Rekognition collection.", - "id": "to-add-a-face-to-a-collection-1482179542923", - "title": "To add a face to a collection" - } - ], - "ListCollections": [ - { - "input": { - }, - "output": { - "CollectionIds": [ - "myphotos" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation returns a list of Rekognition collections.", - "id": "to-list-the-collections-1482179199088", - "title": "To list the collections" - } - ], - "ListFaces": [ - { - "input": { - "CollectionId": "myphotos", - "MaxResults": 20 - }, - "output": { - "Faces": [ - { - "BoundingBox": { - "Height": 0.18000000715255737, - "Left": 0.5555559992790222, - "Top": 0.336667001247406, - "Width": 0.23999999463558197 - }, - "Confidence": 100, - "FaceId": "1c62e8b5-69a7-5b7d-b3cd-db4338a8a7e7", - "ImageId": "147fdf82-7a71-52cf-819b-e786c7b9746e" - }, - { - "BoundingBox": { - "Height": 0.16555599868297577, - "Left": 0.30963000655174255, - "Top": 0.7066670060157776, - "Width": 0.22074100375175476 - }, - "Confidence": 100, - "FaceId": "29a75abe-397b-5101-ba4f-706783b2246c", - "ImageId": "147fdf82-7a71-52cf-819b-e786c7b9746e" - }, - { - "BoundingBox": { - "Height": 0.3234420120716095, - "Left": 0.3233329951763153, - "Top": 0.5, - "Width": 0.24222199618816376 - }, - "Confidence": 99.99829864501953, - "FaceId": "38271d79-7bc2-5efb-b752-398a8d575b85", - "ImageId": "d5631190-d039-54e4-b267-abd22c8647c5" - }, - { - "BoundingBox": { - "Height": 0.03555560111999512, - "Left": 0.37388700246810913, - "Top": 0.2477779984474182, - "Width": 0.04747769981622696 - }, - "Confidence": 99.99210357666016, - "FaceId": "3b01bef0-c883-5654-ba42-d5ad28b720b3", - "ImageId": "812d9f04-86f9-54fc-9275-8d0dcbcb6784" - }, - { - "BoundingBox": { - "Height": 0.05333330109715462, - "Left": 0.2937690019607544, - "Top": 0.35666701197624207, - "Width": 0.07121659815311432 - }, - "Confidence": 99.99919891357422, - "FaceId": "4839a608-49d0-566c-8301-509d71b534d1", - "ImageId": "812d9f04-86f9-54fc-9275-8d0dcbcb6784" - }, - { - "BoundingBox": { - "Height": 0.3249259889125824, - "Left": 0.5155559778213501, - "Top": 0.1513350009918213, - "Width": 0.24333299696445465 - }, - "Confidence": 99.99949645996094, - "FaceId": "70008e50-75e4-55d0-8e80-363fb73b3a14", - "ImageId": "d5631190-d039-54e4-b267-abd22c8647c5" - }, - { - "BoundingBox": { - "Height": 0.03777780011296272, - "Left": 0.7002969980239868, - "Top": 0.18777799606323242, - "Width": 0.05044509842991829 - }, - "Confidence": 99.92639923095703, - "FaceId": "7f5f88ed-d684-5a88-b0df-01e4a521552b", - "ImageId": "812d9f04-86f9-54fc-9275-8d0dcbcb6784" - }, - { - "BoundingBox": { - "Height": 0.05555560067296028, - "Left": 0.13946600258350372, - "Top": 0.46333301067352295, - "Width": 0.07270029932260513 - }, - "Confidence": 99.99469757080078, - "FaceId": "895b4e2c-81de-5902-a4bd-d1792bda00b2", - "ImageId": "812d9f04-86f9-54fc-9275-8d0dcbcb6784" - }, - { - "BoundingBox": { - "Height": 0.3259260058403015, - "Left": 0.5144439935684204, - "Top": 0.15111100673675537, - "Width": 0.24444399774074554 - }, - "Confidence": 99.99949645996094, - "FaceId": "8be04dba-4e58-520d-850e-9eae4af70eb2", - "ImageId": "465f4e93-763e-51d0-b030-b9667a2d94b1" - }, - { - "BoundingBox": { - "Height": 0.18888899683952332, - "Left": 0.3783380091190338, - "Top": 0.2355560064315796, - "Width": 0.25222599506378174 - }, - "Confidence": 99.9999008178711, - "FaceId": "908544ad-edc3-59df-8faf-6a87cc256cf5", - "ImageId": "3c731605-d772-541a-a5e7-0375dbc68a07" - }, - { - "BoundingBox": { - "Height": 0.33481499552726746, - "Left": 0.31888899207115173, - "Top": 0.49333301186561584, - "Width": 0.25 - }, - "Confidence": 99.99909973144531, - "FaceId": "ff43d742-0c13-5d16-a3e8-03d3f58e980b", - "ImageId": "465f4e93-763e-51d0-b030-b9667a2d94b1" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation lists the faces in a Rekognition collection.", - "id": "to-list-the-faces-in-a-collection-1482181416530", - "title": "To list the faces in a collection" - } - ], - "SearchFaces": [ - { - "input": { - "CollectionId": "myphotos", - "FaceId": "70008e50-75e4-55d0-8e80-363fb73b3a14", - "FaceMatchThreshold": 90, - "MaxFaces": 10 - }, - "output": { - "FaceMatches": [ - { - "Face": { - "BoundingBox": { - "Height": 0.3259260058403015, - "Left": 0.5144439935684204, - "Top": 0.15111100673675537, - "Width": 0.24444399774074554 - }, - "Confidence": 99.99949645996094, - "FaceId": "8be04dba-4e58-520d-850e-9eae4af70eb2", - "ImageId": "465f4e93-763e-51d0-b030-b9667a2d94b1" - }, - "Similarity": 99.97222137451172 - }, - { - "Face": { - "BoundingBox": { - "Height": 0.16555599868297577, - "Left": 0.30963000655174255, - "Top": 0.7066670060157776, - "Width": 0.22074100375175476 - }, - "Confidence": 100, - "FaceId": "29a75abe-397b-5101-ba4f-706783b2246c", - "ImageId": "147fdf82-7a71-52cf-819b-e786c7b9746e" - }, - "Similarity": 97.04154968261719 - }, - { - "Face": { - "BoundingBox": { - "Height": 0.18888899683952332, - "Left": 0.3783380091190338, - "Top": 0.2355560064315796, - "Width": 0.25222599506378174 - }, - "Confidence": 99.9999008178711, - "FaceId": "908544ad-edc3-59df-8faf-6a87cc256cf5", - "ImageId": "3c731605-d772-541a-a5e7-0375dbc68a07" - }, - "Similarity": 95.94520568847656 - } - ], - "SearchedFaceId": "70008e50-75e4-55d0-8e80-363fb73b3a14" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation searches for matching faces in the collection the supplied face belongs to.", - "id": "to-delete-a-face-1482182799377", - "title": "To delete a face" - } - ], - "SearchFacesByImage": [ - { - "input": { - "CollectionId": "myphotos", - "FaceMatchThreshold": 95, - "Image": { - "S3Object": { - "Bucket": "mybucket", - "Name": "myphoto" - } - }, - "MaxFaces": 5 - }, - "output": { - "FaceMatches": [ - { - "Face": { - "BoundingBox": { - "Height": 0.3234420120716095, - "Left": 0.3233329951763153, - "Top": 0.5, - "Width": 0.24222199618816376 - }, - "Confidence": 99.99829864501953, - "FaceId": "38271d79-7bc2-5efb-b752-398a8d575b85", - "ImageId": "d5631190-d039-54e4-b267-abd22c8647c5" - }, - "Similarity": 99.97036743164062 - } - ], - "SearchedFaceBoundingBox": { - "Height": 0.33481481671333313, - "Left": 0.31888890266418457, - "Top": 0.4933333396911621, - "Width": 0.25 - }, - "SearchedFaceConfidence": 99.9991226196289 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation searches for faces in a Rekognition collection that match the largest face in an S3 bucket stored image.", - "id": "to-search-for-faces-matching-a-supplied-image-1482175994491", - "title": "To search for faces matching a supplied image" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/paginators-1.json deleted file mode 100644 index 436503d..0000000 --- a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListCollections": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": [ - "CollectionIds", - "FaceModelVersions" - ] - }, - "ListFaces": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Faces", - "non_aggregate_keys": [ - "FaceModelVersion" - ] - }, - "ListStreamProcessors": { - "result_key": "StreamProcessors", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "DescribeProjectVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ProjectVersionDescriptions" - }, - "DescribeProjects": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ProjectDescriptions" - }, - "ListDatasetEntries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DatasetEntries" - }, - "ListDatasetLabels": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DatasetLabelDescriptions" - }, - "ListProjectPolicies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ProjectPolicies" - }, - "ListUsers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Users" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/service-2.json.gz deleted file mode 100644 index d8d70c7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/waiters-2.json b/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/waiters-2.json deleted file mode 100644 index c67dc62..0000000 --- a/venv/Lib/site-packages/botocore/data/rekognition/2016-06-27/waiters-2.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "version": 2, - "waiters": { - "ProjectVersionTrainingCompleted": { - "description": "Wait until the ProjectVersion training completes.", - "operation": "DescribeProjectVersions", - "delay": 120, - "maxAttempts": 360, - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "ProjectVersionDescriptions[].Status", - "expected": "TRAINING_COMPLETED" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "ProjectVersionDescriptions[].Status", - "expected": "TRAINING_FAILED" - } - ] - }, - "ProjectVersionRunning": { - "description": "Wait until the ProjectVersion is running.", - "delay": 30, - "maxAttempts": 40, - "operation": "DescribeProjectVersions", - "acceptors": [ - { - "state": "success", - "matcher": "pathAll", - "argument": "ProjectVersionDescriptions[].Status", - "expected": "RUNNING" - }, - { - "state": "failure", - "matcher": "pathAny", - "argument": "ProjectVersionDescriptions[].Status", - "expected": "FAILED" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index d7008d7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/paginators-1.json deleted file mode 100644 index 005e53f..0000000 --- a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListSpaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "spaces" - }, - "ListChannels": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "channels" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/service-2.json.gz deleted file mode 100644 index fa80c69..0000000 Binary files a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/waiters-2.json b/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/waiters-2.json deleted file mode 100644 index 3439ab1..0000000 --- a/venv/Lib/site-packages/botocore/data/repostspace/2022-05-13/waiters-2.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ChannelCreated" : { - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetChannel", - "acceptors" : [ { - "matcher" : "path", - "argument" : "channelStatus", - "state" : "success", - "expected" : "CREATED" - }, { - "matcher" : "path", - "argument" : "channelStatus", - "state" : "failure", - "expected" : "CREATE_FAILED" - }, { - "matcher" : "path", - "argument" : "channelStatus", - "state" : "retry", - "expected" : "CREATING" - } ] - }, - "ChannelDeleted" : { - "delay" : 2, - "maxAttempts" : 60, - "operation" : "GetChannel", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "channelStatus", - "state" : "success", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "channelStatus", - "state" : "failure", - "expected" : "DELETE_FAILED" - }, { - "matcher" : "path", - "argument" : "channelStatus", - "state" : "retry", - "expected" : "DELETING" - } ] - }, - "SpaceCreated" : { - "delay" : 300, - "maxAttempts" : 24, - "operation" : "GetSpace", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "CREATED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "CREATE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "CREATING" - } ] - }, - "SpaceDeleted" : { - "delay" : 300, - "maxAttempts" : 24, - "operation" : "GetSpace", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETE_FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "retry", - "expected" : "DELETING" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0f1cc3d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/examples-1.json b/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/paginators-1.json deleted file mode 100644 index 665877a..0000000 --- a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListAppAssessmentResourceDrifts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "resourceDrifts" - }, - "ListResourceGroupingRecommendations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "groupingRecommendations" - }, - "ListMetrics": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "rows" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/service-2.json.gz deleted file mode 100644 index 3271cd3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resiliencehub/2020-04-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2089235..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/paginators-1.json deleted file mode 100644 index 205f17d..0000000 --- a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/paginators-1.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "pagination": { - "ListIndexes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Indexes" - }, - "ListSupportedResourceTypes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourceTypes" - }, - "ListViews": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Views" - }, - "Search": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Resources" - }, - "ListIndexesForMembers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Indexes" - }, - "ListResources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Resources" - }, - "ListManagedViews": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ManagedViews" - }, - "GetResourceExplorerSetup": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Regions" - }, - "ListServiceIndexes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Indexes" - }, - "ListServiceViews": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ServiceViews" - }, - "ListStreamingAccessForServices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "StreamingAccessForServices" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/paginators-1.sdk-extras.json deleted file mode 100644 index 92a00fe..0000000 --- a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/paginators-1.sdk-extras.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "Search": { - "non_aggregate_keys": [ - "ViewArn", - "Count" - ] - }, - "ListResources": { - "non_aggregate_keys": [ - "ViewArn" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/service-2.json.gz deleted file mode 100644 index 916f26c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/waiters-2.json b/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/resource-explorer-2/2022-07-28/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index dc5b4e3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/examples-1.json b/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/paginators-1.json deleted file mode 100644 index 1210885..0000000 --- a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListGroups": { - "result_key": [ - "GroupIdentifiers", - "Groups" - ], - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "SearchResources": { - "result_key": "ResourceIdentifiers", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListGroupResources": { - "result_key": [ - "ResourceIdentifiers", - "Resources" - ], - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListGroupingStatuses": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "GroupingStatuses" - }, - "ListTagSyncTasks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TagSyncTasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/paginators-1.sdk-extras.json deleted file mode 100644 index eaa5232..0000000 --- a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListGroupingStatuses": { - "non_aggregate_keys": [ - "Group" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/service-2.json.gz deleted file mode 100644 index f4c4fb4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resource-groups/2017-11-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3208043..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/examples-1.json b/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/paginators-1.json deleted file mode 100644 index bbf7282..0000000 --- a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/paginators-1.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "pagination": { - "GetResources": { - "input_token": "PaginationToken", - "limit_key": "ResourcesPerPage", - "output_token": "PaginationToken", - "result_key": "ResourceTagMappingList" - }, - "GetTagKeys": { - "input_token": "PaginationToken", - "output_token": "PaginationToken", - "result_key": "TagKeys" - }, - "GetTagValues": { - "input_token": "PaginationToken", - "output_token": "PaginationToken", - "result_key": "TagValues" - }, - "GetComplianceSummary": { - "input_token": "PaginationToken", - "limit_key": "MaxResults", - "output_token": "PaginationToken", - "result_key": "SummaryList" - }, - "ListRequiredTags": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RequiredTags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/service-2.json.gz deleted file mode 100644 index 0a654d9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/resourcegroupstaggingapi/2017-01-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 92bf790..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/paginators-1.json deleted file mode 100644 index 9729861..0000000 --- a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/paginators-1.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "pagination": { - "ListCrls": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "crls" - }, - "ListProfiles": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "profiles" - }, - "ListSubjects": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "subjects" - }, - "ListTrustAnchors": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "trustAnchors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/service-2.json.gz deleted file mode 100644 index f577e5a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/rolesanywhere/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index 99ce79a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/examples-1.json b/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/paginators-1.json deleted file mode 100644 index a2ef01b..0000000 --- a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListRoutingControls": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RoutingControls" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/service-2.json.gz deleted file mode 100644 index 05952c5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53-recovery-cluster/2019-12-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index c88876d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/paginators-1.json deleted file mode 100644 index 024682b..0000000 --- a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListAssociatedRoute53HealthChecks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "HealthCheckIds" - }, - "ListClusters": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Clusters" - }, - "ListControlPanels": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ControlPanels" - }, - "ListRoutingControls": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RoutingControls" - }, - "ListSafetyRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SafetyRules" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/service-2.json.gz deleted file mode 100644 index 7c88531..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/waiters-2.json b/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/waiters-2.json deleted file mode 100644 index 1794757..0000000 --- a/venv/Lib/site-packages/botocore/data/route53-recovery-control-config/2020-11-02/waiters-2.json +++ /dev/null @@ -1,152 +0,0 @@ -{ - "version": 2, - "waiters": { - "ClusterCreated": { - "description": "Wait until a cluster is created", - "operation": "DescribeCluster", - "delay": 5, - "maxAttempts": 26, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "Cluster.Status", - "expected": "DEPLOYED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "Cluster.Status", - "expected": "PENDING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ClusterDeleted": { - "description": "Wait for a cluster to be deleted", - "operation": "DescribeCluster", - "delay": 5, - "maxAttempts": 26, - "acceptors": [ - { - "state": "success", - "matcher": "status", - "expected": 404 - }, - { - "state": "retry", - "matcher": "path", - "argument": "Cluster.Status", - "expected": "PENDING_DELETION" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ControlPanelCreated": { - "description": "Wait until a control panel is created", - "operation": "DescribeControlPanel", - "delay": 5, - "maxAttempts": 26, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "ControlPanel.Status", - "expected": "DEPLOYED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "ControlPanel.Status", - "expected": "PENDING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "ControlPanelDeleted": { - "description": "Wait until a control panel is deleted", - "operation": "DescribeControlPanel", - "delay": 5, - "maxAttempts": 26, - "acceptors": [ - { - "state": "success", - "matcher": "status", - "expected": 404 - }, - { - "state": "retry", - "matcher": "path", - "argument": "ControlPanel.Status", - "expected": "PENDING_DELETION" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "RoutingControlCreated": { - "description": "Wait until a routing control is created", - "operation": "DescribeRoutingControl", - "delay": 5, - "maxAttempts": 26, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "RoutingControl.Status", - "expected": "DEPLOYED" - }, - { - "state": "retry", - "matcher": "path", - "argument": "RoutingControl.Status", - "expected": "PENDING" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - }, - "RoutingControlDeleted": { - "description": "Wait for a routing control to be deleted", - "operation": "DescribeRoutingControl", - "delay": 5, - "maxAttempts": 26, - "acceptors": [ - { - "state": "success", - "matcher": "status", - "expected": 404 - }, - { - "state": "retry", - "matcher": "path", - "argument": "RoutingControl.Status", - "expected": "PENDING_DELETION" - }, - { - "state": "retry", - "matcher": "status", - "expected": 500 - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index 20e40d7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/paginators-1.json deleted file mode 100644 index a71f088..0000000 --- a/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/paginators-1.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "pagination": { - "ListReadinessChecks": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReadinessChecks" - }, - "ListResourceSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourceSets" - }, - "ListCells": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Cells" - }, - "ListRecoveryGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RecoveryGroups" - }, - "ListRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Rules" - }, - "ListCrossAccountAuthorizations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CrossAccountAuthorizations" - }, - "GetCellReadinessSummary": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReadinessChecks", - "non_aggregate_keys": [ - "Readiness" - ] - }, - "GetRecoveryGroupReadinessSummary": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ReadinessChecks", - "non_aggregate_keys": [ - "Readiness" - ] - }, - "GetReadinessCheckStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Resources", - "non_aggregate_keys": [ - "Readiness", - "Messages" - ] - }, - "GetReadinessCheckResourceStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Rules", - "non_aggregate_keys": [ - "Readiness" - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/service-2.json.gz deleted file mode 100644 index 1446957..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53-recovery-readiness/2019-12-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53/2013-04-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6addefe..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/examples-1.json b/venv/Lib/site-packages/botocore/data/route53/2013-04-01/examples-1.json deleted file mode 100644 index d757c2b..0000000 --- a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/examples-1.json +++ /dev/null @@ -1,762 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AssociateVPCWithHostedZone": [ - { - "input": { - "Comment": "", - "HostedZoneId": "Z3M3LMPEXAMPLE", - "VPC": { - "VPCId": "vpc-1a2b3c4d", - "VPCRegion": "us-east-2" - } - }, - "output": { - "ChangeInfo": { - "Comment": "", - "Id": "/change/C3HC6WDB2UANE2", - "Status": "INSYNC", - "SubmittedAt": "2017-01-31T01:36:41.958Z" - } - }, - "comments": { - "input": { - }, - "output": { - "Status": "Valid values are PENDING and INSYNC.", - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example associates the VPC with ID vpc-1a2b3c4d with the hosted zone with ID Z3M3LMPEXAMPLE.", - "id": "to-associate-a-vpc-with-a-hosted-zone-1484069228699", - "title": "To associate a VPC with a hosted zone" - } - ], - "ChangeResourceRecordSets": [ - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.44" - } - ], - "TTL": 60, - "Type": "A" - } - } - ], - "Comment": "Web server for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "Web server for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "TTL": "The amount of time in seconds that you want DNS resolvers to cache the values in this resource record set before submitting another request to Route 53", - "Value": "The value that is applicable to the value of Type. For example, if Type is A, Value is an IPv4 address" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates a resource record set that routes Internet traffic to a resource with an IP address of 192.0.2.44.", - "id": "to-create-update-or-delete-resource-record-sets-1484344703668", - "title": "To create a basic resource record set" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "HealthCheckId": "abcdef11-2222-3333-4444-555555fedcba", - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.44" - } - ], - "SetIdentifier": "Seattle data center", - "TTL": 60, - "Type": "A", - "Weight": 100 - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "HealthCheckId": "abcdef66-7777-8888-9999-000000fedcba", - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.45" - } - ], - "SetIdentifier": "Portland data center", - "TTL": 60, - "Type": "A", - "Weight": 200 - } - } - ], - "Comment": "Web servers for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "Web servers for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "TTL": "The amount of time in seconds that you want DNS resolvers to cache the values in this resource record set before submitting another request to Route 53. TTLs must be the same for all weighted resource record sets that have the same name and type.", - "Value": "The value that is applicable to the value of Type. For example, if Type is A, Value is an IPv4 address" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates two weighted resource record sets. The resource with a Weight of 100 will get 1/3rd of traffic (100/100+200), and the other resource will get the rest of the traffic for example.com.", - "id": "to-create-weighted-resource-record-sets-1484348208522", - "title": "To create weighted resource record sets" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "d123rk29d0stfj.cloudfront.net", - "EvaluateTargetHealth": false, - "HostedZoneId": "Z2FDTNDATAQYW2" - }, - "Name": "example.com", - "Type": "A" - } - } - ], - "Comment": "CloudFront distribution for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "CloudFront distribution for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "DNSName": "The DNS name assigned to the resource", - "HostedZoneId": "Depends on the type of resource that you want to route traffic to", - "Type": "A or AAAA, depending on the type of resource that you want to route traffic to" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates an alias resource record set that routes traffic to a CloudFront distribution.", - "id": "to-create-an-alias-resource-record-set-1484348404062", - "title": "To create an alias resource record set" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-123456789.us-east-2.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z3AADJGX6KTTL2" - }, - "Name": "example.com", - "SetIdentifier": "Ohio region", - "Type": "A", - "Weight": 100 - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-987654321.us-west-2.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z1H1FL5HABSF5" - }, - "Name": "example.com", - "SetIdentifier": "Oregon region", - "Type": "A", - "Weight": 200 - } - } - ], - "Comment": "ELB load balancers for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "ELB load balancers for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "DNSName": "The DNS name assigned to the resource", - "HostedZoneId": "Depends on the type of resource that you want to route traffic to", - "Type": "A or AAAA, depending on the type of resource that you want to route traffic to" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates two weighted alias resource record sets that route traffic to ELB load balancers. The resource with a Weight of 100 will get 1/3rd of traffic (100/100+200), and the other resource will get the rest of the traffic for example.com.", - "id": "to-create-weighted-alias-resource-record-sets-1484349467416", - "title": "To create weighted alias resource record sets" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "HealthCheckId": "abcdef11-2222-3333-4444-555555fedcba", - "Name": "example.com", - "Region": "us-east-2", - "ResourceRecords": [ - { - "Value": "192.0.2.44" - } - ], - "SetIdentifier": "Ohio region", - "TTL": 60, - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "HealthCheckId": "abcdef66-7777-8888-9999-000000fedcba", - "Name": "example.com", - "Region": "us-west-2", - "ResourceRecords": [ - { - "Value": "192.0.2.45" - } - ], - "SetIdentifier": "Oregon region", - "TTL": 60, - "Type": "A" - } - } - ], - "Comment": "EC2 instances for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "EC2 instances for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "TTL": "The amount of time in seconds that you want DNS resolvers to cache the values in this resource record set before submitting another request to Route 53", - "Value": "The value that is applicable to the value of Type. For example, if Type is A, Value is an IPv4 address" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates two latency resource record sets that route traffic to EC2 instances. Traffic for example.com is routed either to the Ohio region or the Oregon region, depending on the latency between the user and those regions.", - "id": "to-create-latency-resource-record-sets-1484350219917", - "title": "To create latency resource record sets" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-123456789.us-east-2.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z3AADJGX6KTTL2" - }, - "Name": "example.com", - "Region": "us-east-2", - "SetIdentifier": "Ohio region", - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-987654321.us-west-2.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z1H1FL5HABSF5" - }, - "Name": "example.com", - "Region": "us-west-2", - "SetIdentifier": "Oregon region", - "Type": "A" - } - } - ], - "Comment": "ELB load balancers for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "ELB load balancers for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "DNSName": "The DNS name assigned to the resource", - "HostedZoneId": "Depends on the type of resource that you want to route traffic to", - "Type": "A or AAAA, depending on the type of resource that you want to route traffic to" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates two latency alias resource record sets that route traffic for example.com to ELB load balancers. Requests are routed either to the Ohio region or the Oregon region, depending on the latency between the user and those regions.", - "id": "to-create-latency-alias-resource-record-sets-1484601774179", - "title": "To create latency alias resource record sets" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "Failover": "PRIMARY", - "HealthCheckId": "abcdef11-2222-3333-4444-555555fedcba", - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.44" - } - ], - "SetIdentifier": "Ohio region", - "TTL": 60, - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "Failover": "SECONDARY", - "HealthCheckId": "abcdef66-7777-8888-9999-000000fedcba", - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.45" - } - ], - "SetIdentifier": "Oregon region", - "TTL": 60, - "Type": "A" - } - } - ], - "Comment": "Failover configuration for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "Failover configuration for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "TTL": "The amount of time in seconds that you want DNS resolvers to cache the values in this resource record set before submitting another request to Route 53", - "Value": "The value that is applicable to the value of Type. For example, if Type is A, Value is an IPv4 address" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates primary and secondary failover resource record sets that route traffic to EC2 instances. Traffic is generally routed to the primary resource, in the Ohio region. If that resource is unavailable, traffic is routed to the secondary resource, in the Oregon region.", - "id": "to-create-failover-resource-record-sets-1484604541740", - "title": "To create failover resource record sets" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-123456789.us-east-2.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z3AADJGX6KTTL2" - }, - "Failover": "PRIMARY", - "Name": "example.com", - "SetIdentifier": "Ohio region", - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-987654321.us-west-2.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z1H1FL5HABSF5" - }, - "Failover": "SECONDARY", - "Name": "example.com", - "SetIdentifier": "Oregon region", - "Type": "A" - } - } - ], - "Comment": "Failover alias configuration for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "Failover alias configuration for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "DNSName": "The DNS name assigned to the resource", - "HostedZoneId": "Depends on the type of resource that you want to route traffic to", - "Type": "A or AAAA, depending on the type of resource that you want to route traffic to" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates primary and secondary failover alias resource record sets that route traffic to ELB load balancers. Traffic is generally routed to the primary resource, in the Ohio region. If that resource is unavailable, traffic is routed to the secondary resource, in the Oregon region.", - "id": "to-create-failover-alias-resource-record-sets-1484607497724", - "title": "To create failover alias resource record sets" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "GeoLocation": { - "ContinentCode": "NA" - }, - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.44" - } - ], - "SetIdentifier": "North America", - "TTL": 60, - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "GeoLocation": { - "ContinentCode": "SA" - }, - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.45" - } - ], - "SetIdentifier": "South America", - "TTL": 60, - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "GeoLocation": { - "ContinentCode": "EU" - }, - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.46" - } - ], - "SetIdentifier": "Europe", - "TTL": 60, - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "GeoLocation": { - "CountryCode": "*" - }, - "Name": "example.com", - "ResourceRecords": [ - { - "Value": "192.0.2.47" - } - ], - "SetIdentifier": "Other locations", - "TTL": 60, - "Type": "A" - } - } - ], - "Comment": "Geolocation configuration for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "Geolocation configuration for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "TTL": "The amount of time in seconds that you want DNS resolvers to cache the values in this resource record set before submitting another request to Route 53", - "Value": "The value that is applicable to the value of Type. For example, if Type is A, Value is an IPv4 address" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates four geolocation resource record sets that use IPv4 addresses to route traffic to resources such as web servers running on EC2 instances. Traffic is routed to one of four IP addresses, for North America (NA), for South America (SA), for Europe (EU), and for all other locations (*).", - "id": "to-create-geolocation-resource-record-sets-1484612462466", - "title": "To create geolocation resource record sets" - }, - { - "input": { - "ChangeBatch": { - "Changes": [ - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-123456789.us-east-2.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z3AADJGX6KTTL2" - }, - "GeoLocation": { - "ContinentCode": "NA" - }, - "Name": "example.com", - "SetIdentifier": "North America", - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-234567890.sa-east-1.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z2P70J7HTTTPLU" - }, - "GeoLocation": { - "ContinentCode": "SA" - }, - "Name": "example.com", - "SetIdentifier": "South America", - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-234567890.eu-central-1.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z215JYRZR1TBD5" - }, - "GeoLocation": { - "ContinentCode": "EU" - }, - "Name": "example.com", - "SetIdentifier": "Europe", - "Type": "A" - } - }, - { - "Action": "CREATE", - "ResourceRecordSet": { - "AliasTarget": { - "DNSName": "example-com-234567890.ap-southeast-1.elb.amazonaws.com ", - "EvaluateTargetHealth": true, - "HostedZoneId": "Z1LMS91P8CMLE5" - }, - "GeoLocation": { - "CountryCode": "*" - }, - "Name": "example.com", - "SetIdentifier": "Other locations", - "Type": "A" - } - } - ], - "Comment": "Geolocation alias configuration for example.com" - }, - "HostedZoneId": "Z3M3LMPEXAMPLE" - }, - "output": { - "ChangeInfo": { - "Comment": "Geolocation alias configuration for example.com", - "Id": "/change/C2682N5HXP0BZ4", - "Status": "PENDING", - "SubmittedAt": "2017-02-10T01:36:41.958Z" - } - }, - "comments": { - "input": { - "Action": "Valid values: CREATE, DELETE, UPSERT", - "DNSName": "The DNS name assigned to the resource", - "HostedZoneId": "Depends on the type of resource that you want to route traffic to", - "Type": "A or AAAA, depending on the type of resource that you want to route traffic to" - }, - "output": { - "SubmittedAt": "The date and time are in Coordinated Universal Time (UTC) and ISO 8601 format." - } - }, - "description": "The following example creates four geolocation alias resource record sets that route traffic to ELB load balancers. Traffic is routed to one of four IP addresses, for North America (NA), for South America (SA), for Europe (EU), and for all other locations (*).", - "id": "to-create-geolocation-alias-resource-record-sets-1484612871203", - "title": "To create geolocation alias resource record sets" - } - ], - "ChangeTagsForResource": [ - { - "input": { - "AddTags": [ - { - "Key": "apex", - "Value": "3874" - }, - { - "Key": "acme", - "Value": "4938" - } - ], - "RemoveTagKeys": [ - "Nadir" - ], - "ResourceId": "Z3M3LMPEXAMPLE", - "ResourceType": "hostedzone" - }, - "output": { - }, - "comments": { - "input": { - "ResourceType": "Valid values are healthcheck and hostedzone." - }, - "output": { - } - }, - "description": "The following example adds two tags and removes one tag from the hosted zone with ID Z3M3LMPEXAMPLE.", - "id": "to-add-or-remove-tags-from-a-hosted-zone-or-health-check-1484084752409", - "title": "To add or remove tags from a hosted zone or health check" - } - ], - "GetHostedZone": [ - { - "input": { - "Id": "Z3M3LMPEXAMPLE" - }, - "output": { - "DelegationSet": { - "NameServers": [ - "ns-2048.awsdns-64.com", - "ns-2049.awsdns-65.net", - "ns-2050.awsdns-66.org", - "ns-2051.awsdns-67.co.uk" - ] - }, - "HostedZone": { - "CallerReference": "C741617D-04E4-F8DE-B9D7-0D150FC61C2E", - "Config": { - "PrivateZone": false - }, - "Id": "/hostedzone/Z3M3LMPEXAMPLE", - "Name": "myawsbucket.com.", - "ResourceRecordSetCount": 8 - } - }, - "comments": { - "input": { - }, - "output": { - "Id": "The ID of the hosted zone that you specified in the GetHostedZone request.", - "Name": "The name of the hosted zone.", - "NameServers": "The servers that you specify in your domain configuration.", - "PrivateZone": "True if this is a private hosted zone, false if it's a public hosted zone." - } - }, - "description": "The following example gets information about the Z3M3LMPEXAMPLE hosted zone.", - "id": "to-get-information-about-a-hosted-zone-1481752361124", - "title": "To get information about a hosted zone" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53/2013-04-01/paginators-1.json deleted file mode 100644 index 2c37096..0000000 --- a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/paginators-1.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "pagination": { - "ListHealthChecks": { - "input_token": "Marker", - "output_token": "NextMarker", - "more_results": "IsTruncated", - "limit_key": "MaxItems", - "result_key": "HealthChecks" - }, - "ListHostedZones": { - "input_token": "Marker", - "output_token": "NextMarker", - "more_results": "IsTruncated", - "limit_key": "MaxItems", - "result_key": "HostedZones" - }, - "ListResourceRecordSets": { - "more_results": "IsTruncated", - "limit_key": "MaxItems", - "result_key": "ResourceRecordSets", - "input_token": [ - "StartRecordName", - "StartRecordType", - "StartRecordIdentifier" - ], - "output_token": [ - "NextRecordName", - "NextRecordType", - "NextRecordIdentifier" - ] - }, - "ListVPCAssociationAuthorizations": { - "input_token": "NextToken", - "output_token": "NextToken", - "non_aggregate_keys": [ - "HostedZoneId" - ], - "result_key": [ - "VPCs" - ] - }, - "ListQueryLoggingConfigs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "QueryLoggingConfigs" - }, - "ListCidrBlocks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CidrBlocks" - }, - "ListCidrCollections": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CidrCollections" - }, - "ListCidrLocations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CidrLocations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53/2013-04-01/service-2.json.gz deleted file mode 100644 index 36ff283..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/route53/2013-04-01/waiters-2.json deleted file mode 100644 index 94aad39..0000000 --- a/venv/Lib/site-packages/botocore/data/route53/2013-04-01/waiters-2.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 2, - "waiters": { - "ResourceRecordSetsChanged": { - "delay": 30, - "maxAttempts": 60, - "operation": "GetChange", - "acceptors": [ - { - "matcher": "path", - "expected": "INSYNC", - "argument": "ChangeInfo.Status", - "state": "success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index a52fbb3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/examples-1.json b/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/paginators-1.json deleted file mode 100644 index c2f5cbc..0000000 --- a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/paginators-1.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "version": "1.0", - "pagination": { - "ListDomains": { - "limit_key": "MaxItems", - "input_token": "Marker", - "output_token": "NextPageMarker", - "result_key": "Domains" - }, - "ListOperations": { - "limit_key": "MaxItems", - "input_token": "Marker", - "output_token": "NextPageMarker", - "result_key": "Operations" - }, - "ViewBilling": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextPageMarker", - "result_key": "BillingRecords" - }, - "ListPrices": { - "input_token": "Marker", - "limit_key": "MaxItems", - "output_token": "NextPageMarker", - "result_key": "Prices" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/service-2.json.gz deleted file mode 100644 index 68ccddb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53domains/2014-05-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3ed7f54..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/paginators-1.json deleted file mode 100644 index abc8b60..0000000 --- a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "ListAccessSources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accessSources" - }, - "ListAccessTokens": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accessTokens" - }, - "ListDNSViews": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dnsViews" - }, - "ListFirewallDomainLists": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "firewallDomainLists" - }, - "ListFirewallDomains": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "domains" - }, - "ListFirewallRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "firewallRules" - }, - "ListGlobalResolvers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "globalResolvers" - }, - "ListHostedZoneAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "hostedZoneAssociations" - }, - "ListManagedFirewallDomainLists": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "managedFirewallDomainLists" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/service-2.json.gz deleted file mode 100644 index a0404e8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/waiters-2.json b/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/route53globalresolver/2022-09-27/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4efda4c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/paginators-1.json deleted file mode 100644 index a321bb7..0000000 --- a/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListProfileAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProfileAssociations" - }, - "ListProfileResourceAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProfileResourceAssociations" - }, - "ListProfiles": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProfileSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/service-2.json.gz deleted file mode 100644 index a3afff8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53profiles/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9e49f98..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/examples-1.json b/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/paginators-1.json deleted file mode 100644 index d652943..0000000 --- a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tags" - }, - "ListResolverEndpointIpAddresses": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "IpAddresses" - }, - "ListResolverEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResolverEndpoints" - }, - "ListResolverQueryLogConfigAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResolverQueryLogConfigAssociations" - }, - "ListResolverQueryLogConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResolverQueryLogConfigs" - }, - "ListResolverRuleAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResolverRuleAssociations" - }, - "ListResolverRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResolverRules" - }, - "ListResolverDnssecConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResolverDnssecConfigs" - }, - "ListFirewallConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FirewallConfigs" - }, - "ListFirewallDomainLists": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FirewallDomainLists" - }, - "ListFirewallDomains": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Domains" - }, - "ListFirewallRuleGroupAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FirewallRuleGroupAssociations" - }, - "ListFirewallRuleGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FirewallRuleGroups" - }, - "ListFirewallRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FirewallRules" - }, - "ListResolverConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResolverConfigs" - }, - "ListOutpostResolvers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OutpostResolvers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/paginators-1.sdk-extras.json deleted file mode 100644 index 6808793..0000000 --- a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/paginators-1.sdk-extras.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListResolverEndpointIpAddresses": { - "non_aggregate_keys": [ - "MaxResults" - ] - }, - "ListResolverEndpoints": { - "non_aggregate_keys": [ - "MaxResults" - ] - }, - "ListResolverQueryLogConfigAssociations": { - "non_aggregate_keys": [ - "TotalCount", - "TotalFilteredCount" - ] - }, - "ListResolverQueryLogConfigs": { - "non_aggregate_keys": [ - "TotalCount", - "TotalFilteredCount" - ] - }, - "ListResolverRuleAssociations": { - "non_aggregate_keys": [ - "MaxResults" - ] - }, - "ListResolverRules": { - "non_aggregate_keys": [ - "MaxResults" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/service-2.json.gz deleted file mode 100644 index d94aef2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/route53resolver/2018-04-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index fd635a6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/paginators-1.json deleted file mode 100644 index 3c45764..0000000 --- a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListLinks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "links" - }, - "ListRequesterGateways": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "gatewayIds" - }, - "ListResponderGateways": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "gatewayIds" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/service-2.json.gz deleted file mode 100644 index b12ddc1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/waiters-2.json b/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/waiters-2.json deleted file mode 100644 index 5e25124..0000000 --- a/venv/Lib/site-packages/botocore/data/rtbfabric/2023-05-15/waiters-2.json +++ /dev/null @@ -1,193 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "InboundExternalLinkActive" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetInboundExternalLink", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "REJECTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "ISOLATED" - } ] - }, - "LinkAccepted" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetLink", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACCEPTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "REJECTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETED" - } ] - }, - "LinkActive" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetLink", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "REJECTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETED" - } ] - }, - "OutboundExternalLinkActive" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetOutboundExternalLink", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "REJECTED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "FAILED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "ISOLATED" - } ] - }, - "RequesterGatewayActive" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetRequesterGateway", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "RequesterGatewayDeleted" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetRequesterGateway", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "ResponderGatewayActive" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetResponderGateway", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "ERROR" - } ] - }, - "ResponderGatewayDeleted" : { - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetResponderGateway", - "acceptors" : [ { - "matcher" : "path", - "argument" : "status", - "state" : "success", - "expected" : "DELETED" - }, { - "matcher" : "path", - "argument" : "status", - "state" : "failure", - "expected" : "ERROR" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/rum/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 97e27fc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/examples-1.json b/venv/Lib/site-packages/botocore/data/rum/2018-05-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/rum/2018-05-10/paginators-1.json deleted file mode 100644 index 1a04492..0000000 --- a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "GetAppMonitorData": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Events" - }, - "ListAppMonitors": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AppMonitorSummaries" - }, - "BatchGetRumMetricDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MetricDefinitions" - }, - "ListRumMetricsDestinations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Destinations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/rum/2018-05-10/service-2.json.gz deleted file mode 100644 index 4a34e72..0000000 Binary files a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/rum/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/rum/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/s3/2006-03-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 34b713b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/examples-1.json b/venv/Lib/site-packages/botocore/data/s3/2006-03-01/examples-1.json deleted file mode 100644 index 38a47bb..0000000 --- a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/examples-1.json +++ /dev/null @@ -1,1843 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AbortMultipartUpload": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "bigobject", - "UploadId": "xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example aborts a multipart upload.", - "id": "to-abort-a-multipart-upload-1481853354987", - "title": "To abort a multipart upload" - } - ], - "CompleteMultipartUpload": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "bigobject", - "MultipartUpload": { - "Parts": [ - { - "ETag": "\"d8c2eafd90c266e19ab9dcacc479f8af\"", - "PartNumber": "1" - }, - { - "ETag": "\"d8c2eafd90c266e19ab9dcacc479f8af\"", - "PartNumber": "2" - } - ] - }, - "UploadId": "7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" - }, - "output": { - "Bucket": "acexamplebucket", - "ETag": "\"4d9031c7644d8081c2829f4ea23c55f7-2\"", - "Key": "bigobject", - "Location": "https://examplebucket.s3.amazonaws.com/bigobject" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example completes a multipart upload.", - "id": "to-complete-multipart-upload-1481851590483", - "title": "To complete multipart upload" - } - ], - "CopyObject": [ - { - "input": { - "Bucket": "destinationbucket", - "CopySource": "/sourcebucket/HappyFacejpg", - "Key": "HappyFaceCopyjpg" - }, - "output": { - "CopyObjectResult": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "LastModified": "2016-12-15T17:38:53.000Z" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example copies an object from one bucket to another.", - "id": "to-copy-an-object-1481823186878", - "title": "To copy an object" - } - ], - "CreateBucket": [ - { - "input": { - "Bucket": "examplebucket", - "CreateBucketConfiguration": { - "LocationConstraint": "eu-west-1" - } - }, - "output": { - "Location": "http://examplebucket.s3.amazonaws.com/" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a bucket. The request specifies an AWS region where to create the bucket.", - "id": "to-create-a-bucket-in-a-specific-region-1483399072992", - "title": "To create a bucket in a specific region" - }, - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "Location": "/examplebucket" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a bucket.", - "id": "to-create-a-bucket--1472851826060", - "title": "To create a bucket " - } - ], - "CreateMultipartUpload": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "largeobject" - }, - "output": { - "Bucket": "examplebucket", - "Key": "largeobject", - "UploadId": "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6ZQjEs.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example initiates a multipart upload.", - "id": "to-initiate-a-multipart-upload-1481836794513", - "title": "To initiate a multipart upload" - } - ], - "DeleteBucket": [ - { - "input": { - "Bucket": "forrandall2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes the specified bucket.", - "id": "to-delete-a-bucket-1473108514262", - "title": "To delete a bucket" - } - ], - "DeleteBucketCors": [ - { - "input": { - "Bucket": "examplebucket" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes CORS configuration on a bucket.", - "id": "to-delete-cors-configuration-on-a-bucket-1483042856112", - "title": "To delete cors configuration on a bucket." - } - ], - "DeleteBucketLifecycle": [ - { - "input": { - "Bucket": "examplebucket" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes lifecycle configuration on a bucket.", - "id": "to-delete-lifecycle-configuration-on-a-bucket-1483043310583", - "title": "To delete lifecycle configuration on a bucket." - } - ], - "DeleteBucketPolicy": [ - { - "input": { - "Bucket": "examplebucket" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes bucket policy on the specified bucket.", - "id": "to-delete-bucket-policy-1483043406577", - "title": "To delete bucket policy" - } - ], - "DeleteBucketReplication": [ - { - "input": { - "Bucket": "example" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes replication configuration set on bucket.", - "id": "to-delete-bucket-replication-configuration-1483043684668", - "title": "To delete bucket replication configuration" - } - ], - "DeleteBucketTagging": [ - { - "input": { - "Bucket": "examplebucket" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes bucket tags.", - "id": "to-delete-bucket-tags-1483043846509", - "title": "To delete bucket tags" - } - ], - "DeleteBucketWebsite": [ - { - "input": { - "Bucket": "examplebucket" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes bucket website configuration.", - "id": "to-delete-bucket-website-configuration-1483043937825", - "title": "To delete bucket website configuration" - } - ], - "DeleteObject": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "objectkey.jpg" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an object from an S3 bucket.", - "id": "to-delete-an-object-1472850136595", - "title": "To delete an object" - }, - { - "input": { - "Bucket": "ExampleBucket", - "Key": "HappyFace.jpg" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an object from a non-versioned bucket.", - "id": "to-delete-an-object-from-a-non-versioned-bucket-1481588533089", - "title": "To delete an object (from a non-versioned bucket)" - } - ], - "DeleteObjectTagging": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg", - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" - }, - "output": { - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example removes tag set associated with the specified object version. The request specifies both the object key and object version.", - "id": "to-remove-tag-set-from-an-object-version-1483145285913", - "title": "To remove tag set from an object version" - }, - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg" - }, - "output": { - "VersionId": "null" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example removes tag set associated with the specified object. If the bucket is versioning enabled, the operation removes tag set from the latest object version.", - "id": "to-remove-tag-set-from-an-object-1483145342862", - "title": "To remove tag set from an object" - } - ], - "DeleteObjects": [ - { - "input": { - "Bucket": "examplebucket", - "Delete": { - "Objects": [ - { - "Key": "HappyFace.jpg", - "VersionId": "2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b" - }, - { - "Key": "HappyFace.jpg", - "VersionId": "yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd" - } - ], - "Quiet": false - } - }, - "output": { - "Deleted": [ - { - "Key": "HappyFace.jpg", - "VersionId": "yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd" - }, - { - "Key": "HappyFace.jpg", - "VersionId": "2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes objects from a bucket. The request specifies object versions. S3 deletes specific object versions and returns the key and versions of deleted objects in the response.", - "id": "to-delete-multiple-object-versions-from-a-versioned-bucket-1483147087737", - "title": "To delete multiple object versions from a versioned bucket" - }, - { - "input": { - "Bucket": "examplebucket", - "Delete": { - "Objects": [ - { - "Key": "objectkey1" - }, - { - "Key": "objectkey2" - } - ], - "Quiet": false - } - }, - "output": { - "Deleted": [ - { - "DeleteMarker": "true", - "DeleteMarkerVersionId": "A._w1z6EFiCF5uhtQMDal9JDkID9tQ7F", - "Key": "objectkey1" - }, - { - "DeleteMarker": "true", - "DeleteMarkerVersionId": "iOd_ORxhkKe_e8G8_oSGxt2PjsCZKlkt", - "Key": "objectkey2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes objects from a bucket. The bucket is versioned, and the request does not specify the object version to delete. In this case, all versions remain in the bucket and S3 adds a delete marker.", - "id": "to-delete-multiple-objects-from-a-versioned-bucket-1483146248805", - "title": "To delete multiple objects from a versioned bucket" - } - ], - "GetBucketCors": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "CORSRules": [ - { - "AllowedHeaders": [ - "Authorization" - ], - "AllowedMethods": [ - "GET" - ], - "AllowedOrigins": [ - "*" - ], - "MaxAgeSeconds": 3000 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns cross-origin resource sharing (CORS) configuration set on a bucket.", - "id": "to-get-cors-configuration-set-on-a-bucket-1481596855475", - "title": "To get cors configuration set on a bucket" - } - ], - "GetBucketLifecycle": [ - { - "input": { - "Bucket": "acl1" - }, - "output": { - "Rules": [ - { - "Expiration": { - "Days": 1 - }, - "ID": "delete logs", - "Prefix": "123/", - "Status": "Enabled" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example gets ACL on the specified bucket.", - "id": "to-get-a-bucket-acl-1474413606503", - "title": "To get a bucket acl" - } - ], - "GetBucketLifecycleConfiguration": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "Rules": [ - { - "ID": "Rule for TaxDocs/", - "Prefix": "TaxDocs", - "Status": "Enabled", - "Transitions": [ - { - "Days": 365, - "StorageClass": "STANDARD_IA" - } - ] - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves lifecycle configuration on set on a bucket. ", - "id": "to-get-lifecycle-configuration-on-a-bucket-1481666063200", - "title": "To get lifecycle configuration on a bucket" - } - ], - "GetBucketLocation": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "LocationConstraint": "us-west-2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns bucket location.", - "id": "to-get-bucket-location-1481594573609", - "title": "To get bucket location" - } - ], - "GetBucketNotification": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "QueueConfiguration": { - "Event": "s3:ObjectCreated:Put", - "Events": [ - "s3:ObjectCreated:Put" - ], - "Id": "MDQ2OGQ4NDEtOTBmNi00YTM4LTk0NzYtZDIwN2I3NWQ1NjIx", - "Queue": "arn:aws:sqs:us-east-1:acct-id:S3ObjectCreatedEventQueue" - }, - "TopicConfiguration": { - "Event": "s3:ObjectCreated:Copy", - "Events": [ - "s3:ObjectCreated:Copy" - ], - "Id": "YTVkMWEzZGUtNTY1NS00ZmE2LWJjYjktMmRlY2QwODFkNTJi", - "Topic": "arn:aws:sns:us-east-1:acct-id:S3ObjectCreatedEventTopic" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns notification configuration set on a bucket.", - "id": "to-get-notification-configuration-set-on-a-bucket-1481594028667", - "title": "To get notification configuration set on a bucket" - }, - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "QueueConfiguration": { - "Event": "s3:ObjectCreated:Put", - "Events": [ - "s3:ObjectCreated:Put" - ], - "Id": "MDQ2OGQ4NDEtOTBmNi00YTM4LTk0NzYtZDIwN2I3NWQ1NjIx", - "Queue": "arn:aws:sqs:us-east-1:acct-id:S3ObjectCreatedEventQueue" - }, - "TopicConfiguration": { - "Event": "s3:ObjectCreated:Copy", - "Events": [ - "s3:ObjectCreated:Copy" - ], - "Id": "YTVkMWEzZGUtNTY1NS00ZmE2LWJjYjktMmRlY2QwODFkNTJi", - "Topic": "arn:aws:sns:us-east-1:acct-id:S3ObjectCreatedEventTopic" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns notification configuration set on a bucket.", - "id": "to-get-notification-configuration-set-on-a-bucket-1481594028667", - "title": "To get notification configuration set on a bucket" - } - ], - "GetBucketPolicy": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "Policy": "{\"Version\":\"2008-10-17\",\"Id\":\"LogPolicy\",\"Statement\":[{\"Sid\":\"Enables the log delivery group to publish logs to your bucket \",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"111122223333\"},\"Action\":[\"s3:GetBucketAcl\",\"s3:GetObjectAcl\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::policytest1/*\",\"arn:aws:s3:::policytest1\"]}]}" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns bucket policy associated with a bucket.", - "id": "to-get-bucket-policy-1481595098424", - "title": "To get bucket policy" - } - ], - "GetBucketReplication": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "ReplicationConfiguration": { - "Role": "arn:aws:iam::acct-id:role/example-role", - "Rules": [ - { - "Destination": { - "Bucket": "arn:aws:s3:::destination-bucket" - }, - "ID": "MWIwNTkwZmItMTE3MS00ZTc3LWJkZDEtNzRmODQwYzc1OTQy", - "Prefix": "Tax", - "Status": "Enabled" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns replication configuration set on a bucket.", - "id": "to-get-replication-configuration-set-on-a-bucket-1481593597175", - "title": "To get replication configuration set on a bucket" - } - ], - "GetBucketRequestPayment": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "Payer": "BucketOwner" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves bucket versioning configuration.", - "id": "to-get-bucket-versioning-configuration-1483037183929", - "title": "To get bucket versioning configuration" - } - ], - "GetBucketTagging": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "TagSet": [ - { - "Key": "key1", - "Value": "value1" - }, - { - "Key": "key2", - "Value": "value2" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns tag set associated with a bucket", - "id": "to-get-tag-set-associated-with-a-bucket-1481593232107", - "title": "To get tag set associated with a bucket" - } - ], - "GetBucketVersioning": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "MFADelete": "Disabled", - "Status": "Enabled" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves bucket versioning configuration.", - "id": "to-get-bucket-versioning-configuration-1483037183929", - "title": "To get bucket versioning configuration" - } - ], - "GetBucketWebsite": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "ErrorDocument": { - "Key": "error.html" - }, - "IndexDocument": { - "Suffix": "index.html" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves website configuration of a bucket.", - "id": "to-get-bucket-website-configuration-1483037016926", - "title": "To get bucket website configuration" - } - ], - "GetObject": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg" - }, - "output": { - "AcceptRanges": "bytes", - "ContentLength": "3191", - "ContentType": "image/jpeg", - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "LastModified": "Thu, 15 Dec 2016 01:19:41 GMT", - "Metadata": { - }, - "TagCount": 2, - "VersionId": "null" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves an object for an S3 bucket.", - "id": "to-retrieve-an-object-1481827837012", - "title": "To retrieve an object" - }, - { - "input": { - "Bucket": "examplebucket", - "Key": "SampleFile.txt", - "Range": "bytes=0-9" - }, - "output": { - "AcceptRanges": "bytes", - "ContentLength": "10", - "ContentRange": "bytes 0-9/43", - "ContentType": "text/plain", - "ETag": "\"0d94420ffd0bc68cd3d152506b97a9cc\"", - "LastModified": "Thu, 09 Oct 2014 22:57:28 GMT", - "Metadata": { - }, - "VersionId": "null" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves an object for an S3 bucket. The request specifies the range header to retrieve a specific byte range.", - "id": "to-retrieve-a-byte-range-of-an-object--1481832674603", - "title": "To retrieve a byte range of an object " - } - ], - "GetObjectAcl": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg" - }, - "output": { - "Grants": [ - { - "Grantee": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", - "Type": "CanonicalUser" - }, - "Permission": "WRITE" - }, - { - "Grantee": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", - "Type": "CanonicalUser" - }, - "Permission": "WRITE_ACP" - }, - { - "Grantee": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", - "Type": "CanonicalUser" - }, - "Permission": "READ" - }, - { - "Grantee": { - "DisplayName": "owner-display-name", - "ID": "852b113eexamplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc", - "Type": "CanonicalUser" - }, - "Permission": "READ_ACP" - } - ], - "Owner": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves access control list (ACL) of an object.", - "id": "to-retrieve-object-acl-1481833557740", - "title": "To retrieve object ACL" - } - ], - "GetObjectTagging": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg" - }, - "output": { - "TagSet": [ - { - "Key": "Key4", - "Value": "Value4" - }, - { - "Key": "Key3", - "Value": "Value3" - } - ], - "VersionId": "null" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves tag set of an object.", - "id": "to-retrieve-tag-set-of-an-object-1481833847896", - "title": "To retrieve tag set of an object" - }, - { - "input": { - "Bucket": "examplebucket", - "Key": "exampleobject", - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" - }, - "output": { - "TagSet": [ - { - "Key": "Key1", - "Value": "Value1" - } - ], - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves tag set of an object. The request specifies object version.", - "id": "to-retrieve-tag-set-of-a-specific-object-version-1483400283663", - "title": "To retrieve tag set of a specific object version" - } - ], - "GetObjectTorrent": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves torrent files of an object.", - "id": "to-retrieve-torrent-files-for-an-object-1481834115959", - "title": "To retrieve torrent files for an object" - } - ], - "HeadBucket": [ - { - "input": { - "Bucket": "acl1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation checks to see if a bucket exists.", - "id": "to-determine-if-bucket-exists-1473110292262", - "title": "To determine if bucket exists" - } - ], - "HeadObject": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg" - }, - "output": { - "AcceptRanges": "bytes", - "ContentLength": "3191", - "ContentType": "image/jpeg", - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "LastModified": "Thu, 15 Dec 2016 01:19:41 GMT", - "Metadata": { - }, - "VersionId": "null" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves an object metadata.", - "id": "to-retrieve-metadata-of-an-object-without-returning-the-object-itself-1481834820480", - "title": "To retrieve metadata of an object without returning the object itself" - } - ], - "ListMultipartUploads": [ - { - "input": { - "Bucket": "examplebucket" - }, - "output": { - "Uploads": [ - { - "Initiated": "2014-05-01T05:40:58.000Z", - "Initiator": { - "DisplayName": "display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Key": "JavaFile", - "Owner": { - "DisplayName": "display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "StorageClass": "STANDARD", - "UploadId": "examplelUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--" - }, - { - "Initiated": "2014-05-01T05:41:27.000Z", - "Initiator": { - "DisplayName": "display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Key": "JavaFile", - "Owner": { - "DisplayName": "display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "StorageClass": "STANDARD", - "UploadId": "examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists in-progress multipart uploads on a specific bucket.", - "id": "to-list-in-progress-multipart-uploads-on-a-bucket-1481852775260", - "title": "To list in-progress multipart uploads on a bucket" - }, - { - "input": { - "Bucket": "examplebucket", - "KeyMarker": "nextkeyfrompreviousresponse", - "MaxUploads": "2", - "UploadIdMarker": "valuefrompreviousresponse" - }, - "output": { - "Bucket": "acl1", - "IsTruncated": true, - "KeyMarker": "", - "MaxUploads": "2", - "NextKeyMarker": "someobjectkey", - "NextUploadIdMarker": "examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--", - "UploadIdMarker": "", - "Uploads": [ - { - "Initiated": "2014-05-01T05:40:58.000Z", - "Initiator": { - "DisplayName": "ownder-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Key": "JavaFile", - "Owner": { - "DisplayName": "mohanataws", - "ID": "852b113e7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "StorageClass": "STANDARD", - "UploadId": "gZ30jIqlUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--" - }, - { - "Initiated": "2014-05-01T05:41:27.000Z", - "Initiator": { - "DisplayName": "ownder-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Key": "JavaFile", - "Owner": { - "DisplayName": "ownder-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "StorageClass": "STANDARD", - "UploadId": "b7tZSqIlo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example specifies the upload-id-marker and key-marker from previous truncated response to retrieve next setup of multipart uploads.", - "id": "list-next-set-of-multipart-uploads-when-previous-result-is-truncated-1482428106748", - "title": "List next set of multipart uploads when previous result is truncated" - } - ], - "ListObjectVersions": [ - { - "input": { - "Bucket": "examplebucket", - "Prefix": "HappyFace.jpg" - }, - "output": { - "Versions": [ - { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "IsLatest": true, - "Key": "HappyFace.jpg", - "LastModified": "2016-12-15T01:19:41.000Z", - "Owner": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Size": 3191, - "StorageClass": "STANDARD", - "VersionId": "null" - }, - { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "IsLatest": false, - "Key": "HappyFace.jpg", - "LastModified": "2016-12-13T00:58:26.000Z", - "Owner": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Size": 3191, - "StorageClass": "STANDARD", - "VersionId": "PHtexPGjH2y.zBgT8LmB7wwLI2mpbz.k" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example return versions of an object with specific key name prefix. The request limits the number of items returned to two. If there are are more than two object version, S3 returns NextToken in the response. You can specify this token value in your next request to fetch next set of object versions.", - "id": "to-list-object-versions-1481910996058", - "title": "To list object versions" - } - ], - "ListObjects": [ - { - "input": { - "Bucket": "examplebucket", - "MaxKeys": "2" - }, - "output": { - "Contents": [ - { - "ETag": "\"70ee1738b6b21e2c8a43f3a5ab0eee71\"", - "Key": "example1.jpg", - "LastModified": "2014-11-21T19:40:05.000Z", - "Owner": { - "DisplayName": "myname", - "ID": "12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Size": 11, - "StorageClass": "STANDARD" - }, - { - "ETag": "\"9c8af9a76df052144598c115ef33e511\"", - "Key": "example2.jpg", - "LastModified": "2013-11-15T01:10:49.000Z", - "Owner": { - "DisplayName": "myname", - "ID": "12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Size": 713193, - "StorageClass": "STANDARD" - } - ], - "NextMarker": "eyJNYXJrZXIiOiBudWxsLCAiYm90b190cnVuY2F0ZV9hbW91bnQiOiAyfQ==" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example list two objects in a bucket.", - "id": "to-list-objects-in-a-bucket-1473447646507", - "title": "To list objects in a bucket" - } - ], - "ListObjectsV2": [ - { - "input": { - "Bucket": "examplebucket", - "MaxKeys": "2" - }, - "output": { - "Contents": [ - { - "ETag": "\"70ee1738b6b21e2c8a43f3a5ab0eee71\"", - "Key": "happyface.jpg", - "LastModified": "2014-11-21T19:40:05.000Z", - "Size": 11, - "StorageClass": "STANDARD" - }, - { - "ETag": "\"becf17f89c30367a9a44495d62ed521a-1\"", - "Key": "test.jpg", - "LastModified": "2014-05-02T04:51:50.000Z", - "Size": 4192256, - "StorageClass": "STANDARD" - } - ], - "IsTruncated": true, - "KeyCount": "2", - "MaxKeys": "2", - "Name": "examplebucket", - "NextContinuationToken": "1w41l63U0xa8q7smH50vCxyTQqdxo69O3EmK28Bi5PcROI4wI/EyIJg==", - "Prefix": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves object list. The request specifies max keys to limit response to include only 2 object keys. ", - "id": "to-get-object-list", - "title": "To get object list" - } - ], - "ListParts": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "bigobject", - "UploadId": "example7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" - }, - "output": { - "Initiator": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Owner": { - "DisplayName": "owner-display-name", - "ID": "examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc" - }, - "Parts": [ - { - "ETag": "\"d8c2eafd90c266e19ab9dcacc479f8af\"", - "LastModified": "2016-12-16T00:11:42.000Z", - "PartNumber": "1", - "Size": 26246026 - }, - { - "ETag": "\"d8c2eafd90c266e19ab9dcacc479f8af\"", - "LastModified": "2016-12-16T00:15:01.000Z", - "PartNumber": "2", - "Size": 26246026 - } - ], - "StorageClass": "STANDARD" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists parts uploaded for a specific multipart upload.", - "id": "to-list-parts-of-a-multipart-upload-1481852006923", - "title": "To list parts of a multipart upload." - } - ], - "PutBucketAcl": [ - { - "input": { - "Bucket": "examplebucket", - "GrantFullControl": "id=examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484", - "GrantWrite": "uri=http://acs.amazonaws.com/groups/s3/LogDelivery" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example replaces existing ACL on a bucket. The ACL grants the bucket owner (specified using the owner ID) and write permission to the LogDelivery group. Because this is a replace operation, you must specify all the grants in your request. To incrementally add or remove ACL grants, you might use the console.", - "id": "put-bucket-acl-1482260397033", - "title": "Put bucket acl" - } - ], - "PutBucketCors": [ - { - "input": { - "Bucket": "", - "CORSConfiguration": { - "CORSRules": [ - { - "AllowedHeaders": [ - "*" - ], - "AllowedMethods": [ - "PUT", - "POST", - "DELETE" - ], - "AllowedOrigins": [ - "http://www.example.com" - ], - "ExposeHeaders": [ - "x-amz-server-side-encryption" - ], - "MaxAgeSeconds": 3000 - }, - { - "AllowedHeaders": [ - "Authorization" - ], - "AllowedMethods": [ - "GET" - ], - "AllowedOrigins": [ - "*" - ], - "MaxAgeSeconds": 3000 - } - ] - }, - "ContentMD5": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example enables PUT, POST, and DELETE requests from www.example.com, and enables GET requests from any domain.", - "id": "to-set-cors-configuration-on-a-bucket-1483037818805", - "title": "To set cors configuration on a bucket." - } - ], - "PutBucketLifecycleConfiguration": [ - { - "input": { - "Bucket": "examplebucket", - "LifecycleConfiguration": { - "Rules": [ - { - "Expiration": { - "Days": 3650 - }, - "Filter": { - "Prefix": "documents/" - }, - "ID": "TestOnly", - "Status": "Enabled", - "Transitions": [ - { - "Days": 365, - "StorageClass": "GLACIER" - } - ] - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example replaces existing lifecycle configuration, if any, on the specified bucket. ", - "id": "put-bucket-lifecycle-1482264533092", - "title": "Put bucket lifecycle" - } - ], - "PutBucketLogging": [ - { - "input": { - "Bucket": "sourcebucket", - "BucketLoggingStatus": { - "LoggingEnabled": { - "TargetBucket": "targetbucket", - "TargetGrants": [ - { - "Grantee": { - "Type": "Group", - "URI": "http://acs.amazonaws.com/groups/global/AllUsers" - }, - "Permission": "READ" - } - ], - "TargetPrefix": "MyBucketLogs/" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets logging policy on a bucket. For the Log Delivery group to deliver logs to the destination bucket, it needs permission for the READ_ACP action which the policy grants.", - "id": "set-logging-configuration-for-a-bucket-1482269119909", - "title": "Set logging configuration for a bucket" - } - ], - "PutBucketNotificationConfiguration": [ - { - "input": { - "Bucket": "examplebucket", - "NotificationConfiguration": { - "TopicConfigurations": [ - { - "Events": [ - "s3:ObjectCreated:*" - ], - "TopicArn": "arn:aws:sns:us-west-2:123456789012:s3-notification-topic" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets notification configuration on a bucket to publish the object created events to an SNS topic.", - "id": "set-notification-configuration-for-a-bucket-1482270296426", - "title": "Set notification configuration for a bucket" - } - ], - "PutBucketPolicy": [ - { - "input": { - "Bucket": "examplebucket", - "Policy": "{\"Version\": \"2012-10-17\", \"Statement\": [{ \"Sid\": \"id-1\",\"Effect\": \"Allow\",\"Principal\": {\"AWS\": \"arn:aws:iam::123456789012:root\"}, \"Action\": [ \"s3:PutObject\",\"s3:PutObjectAcl\"], \"Resource\": [\"arn:aws:s3:::acl3/*\" ] } ]}" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets a permission policy on a bucket.", - "id": "set-bucket-policy-1482448903302", - "title": "Set bucket policy" - } - ], - "PutBucketReplication": [ - { - "input": { - "Bucket": "examplebucket", - "ReplicationConfiguration": { - "Role": "arn:aws:iam::123456789012:role/examplerole", - "Rules": [ - { - "Destination": { - "Bucket": "arn:aws:s3:::destinationbucket", - "StorageClass": "STANDARD" - }, - "Prefix": "", - "Status": "Enabled" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets replication configuration on a bucket.", - "id": "id-1", - "title": "Set replication configuration on a bucket" - } - ], - "PutBucketRequestPayment": [ - { - "input": { - "Bucket": "examplebucket", - "RequestPaymentConfiguration": { - "Payer": "Requester" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets request payment configuration on a bucket so that person requesting the download is charged.", - "id": "set-request-payment-configuration-on-a-bucket-1482343596680", - "title": "Set request payment configuration on a bucket." - } - ], - "PutBucketTagging": [ - { - "input": { - "Bucket": "examplebucket", - "Tagging": { - "TagSet": [ - { - "Key": "Key1", - "Value": "Value1" - }, - { - "Key": "Key2", - "Value": "Value2" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets tags on a bucket. Any existing tags are replaced.", - "id": "set-tags-on-a-bucket-1482346269066", - "title": "Set tags on a bucket" - } - ], - "PutBucketVersioning": [ - { - "input": { - "Bucket": "examplebucket", - "VersioningConfiguration": { - "MFADelete": "Disabled", - "Status": "Enabled" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets versioning configuration on bucket. The configuration enables versioning on the bucket.", - "id": "set-versioning-configuration-on-a-bucket-1482344186279", - "title": "Set versioning configuration on a bucket" - } - ], - "PutBucketWebsite": [ - { - "input": { - "Bucket": "examplebucket", - "ContentMD5": "", - "WebsiteConfiguration": { - "ErrorDocument": { - "Key": "error.html" - }, - "IndexDocument": { - "Suffix": "index.html" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds website configuration to a bucket.", - "id": "set-website-configuration-on-a-bucket-1482346836261", - "title": "Set website configuration on a bucket" - } - ], - "PutObject": [ - { - "input": { - "Body": "filetoupload", - "Bucket": "examplebucket", - "Key": "objectkey" - }, - "output": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "VersionId": "Bvq0EDKxOcXLJXNo_Lkz37eM3R4pfzyQ" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an object. If the bucket is versioning enabled, S3 returns version ID in response.", - "id": "to-create-an-object-1483147613675", - "title": "To create an object." - }, - { - "input": { - "Body": "HappyFace.jpg", - "Bucket": "examplebucket", - "Key": "HappyFace.jpg", - "ServerSideEncryption": "AES256", - "StorageClass": "STANDARD_IA" - }, - "output": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "ServerSideEncryption": "AES256", - "VersionId": "CG612hodqujkf8FaaNfp8U..FIhLROcp" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads an object. The request specifies optional request headers to directs S3 to use specific storage class and use server-side encryption.", - "id": "to-upload-an-object-(specify-optional-headers)", - "title": "To upload an object (specify optional headers)" - }, - { - "input": { - "ACL": "authenticated-read", - "Body": "filetoupload", - "Bucket": "examplebucket", - "Key": "exampleobject" - }, - "output": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "VersionId": "Kirh.unyZwjQ69YxcQLA8z4F5j3kJJKr" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads and object. The request specifies optional canned ACL (access control list) to all READ access to authenticated users. If the bucket is versioning enabled, S3 returns version ID in response.", - "id": "to-upload-an-object-and-specify-canned-acl-1483397779571", - "title": "To upload an object and specify canned ACL." - }, - { - "input": { - "Body": "HappyFace.jpg", - "Bucket": "examplebucket", - "Key": "HappyFace.jpg" - }, - "output": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "VersionId": "tpf3zF08nBplQK1XLOefGskR7mGDwcDk" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads an object to a versioning-enabled bucket. The source file is specified using Windows file syntax. S3 returns VersionId of the newly created object.", - "id": "to-upload-an-object-1481760101010", - "title": "To upload an object" - }, - { - "input": { - "Body": "filetoupload", - "Bucket": "examplebucket", - "Key": "exampleobject", - "Metadata": { - "metadata1": "value1", - "metadata2": "value2" - } - }, - "output": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "VersionId": "pSKidl4pHBiNwukdbcPXAIs.sshFFOc0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an object. The request also specifies optional metadata. If the bucket is versioning enabled, S3 returns version ID in response.", - "id": "to-upload-object-and-specify-user-defined-metadata-1483396974757", - "title": "To upload object and specify user-defined metadata" - }, - { - "input": { - "Body": "c:\\HappyFace.jpg", - "Bucket": "examplebucket", - "Key": "HappyFace.jpg", - "Tagging": "key1=value1&key2=value2" - }, - "output": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "VersionId": "psM2sYY4.o1501dSx8wMvnkOzSBB.V4a" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads an object. The request specifies optional object tags. The bucket is versioned, therefore S3 returns version ID of the newly created object.", - "id": "to-upload-an-object-and-specify-optional-tags-1481762310955", - "title": "To upload an object and specify optional tags" - }, - { - "input": { - "Body": "filetoupload", - "Bucket": "examplebucket", - "Key": "exampleobject", - "ServerSideEncryption": "AES256", - "Tagging": "key1=value1&key2=value2" - }, - "output": { - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "ServerSideEncryption": "AES256", - "VersionId": "Ri.vC6qVlA4dEnjgRV4ZHsHoFIjqEMNt" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads and object. The request specifies the optional server-side encryption option. The request also specifies optional object tags. If the bucket is versioning enabled, S3 returns version ID in response.", - "id": "to-upload-an-object-and-specify-server-side-encryption-and-object-tags-1483398331831", - "title": "To upload an object and specify server-side encryption and object tags" - } - ], - "PutObjectAcl": [ - { - "input": { - "AccessControlPolicy": { - }, - "Bucket": "examplebucket", - "GrantFullControl": "emailaddress=user1@example.com,emailaddress=user2@example.com", - "GrantRead": "uri=http://acs.amazonaws.com/groups/global/AllUsers", - "Key": "HappyFace.jpg" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds grants to an object ACL. The first permission grants user1 and user2 FULL_CONTROL and the AllUsers group READ permission.", - "id": "to-grant-permissions-using-object-acl-1481835549285", - "title": "To grant permissions using object ACL" - } - ], - "PutObjectTagging": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "HappyFace.jpg", - "Tagging": { - "TagSet": [ - { - "Key": "Key3", - "Value": "Value3" - }, - { - "Key": "Key4", - "Value": "Value4" - } - ] - } - }, - "output": { - "VersionId": "null" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds tags to an existing object.", - "id": "to-add-tags-to-an-existing-object-1481764668793", - "title": "To add tags to an existing object" - } - ], - "RestoreObject": [ - { - "input": { - "Bucket": "examplebucket", - "Key": "archivedobjectkey", - "RestoreRequest": { - "Days": 1, - "GlacierJobParameters": { - "Tier": "Expedited" - } - } - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example restores for one day an archived copy of an object back into Amazon S3 bucket.", - "id": "to-restore-an-archived-object-1483049329953", - "title": "To restore an archived object" - } - ], - "UploadPart": [ - { - "input": { - "Body": "fileToUpload", - "Bucket": "examplebucket", - "Key": "examplelargeobject", - "PartNumber": "1", - "UploadId": "xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--" - }, - "output": { - "ETag": "\"d8c2eafd90c266e19ab9dcacc479f8af\"" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads part 1 of a multipart upload. The example specifies a file name for the part data. The Upload ID is same that is returned by the initiate multipart upload.", - "id": "to-upload-a-part-1481847914943", - "title": "To upload a part" - } - ], - "UploadPartCopy": [ - { - "input": { - "Bucket": "examplebucket", - "CopySource": "/bucketname/sourceobjectkey", - "Key": "examplelargeobject", - "PartNumber": "1", - "UploadId": "exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--" - }, - "output": { - "CopyPartResult": { - "ETag": "\"b0c6f0e7e054ab8fa2536a2677f8734d\"", - "LastModified": "2016-12-29T21:24:43.000Z" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads a part of a multipart upload by copying data from an existing object as data source.", - "id": "to-upload-a-part-by-copying-data-from-an-existing-object-as-data-source-1483046746348", - "title": "To upload a part by copying data from an existing object as data source" - }, - { - "input": { - "Bucket": "examplebucket", - "CopySource": "/bucketname/sourceobjectkey", - "CopySourceRange": "bytes=1-100000", - "Key": "examplelargeobject", - "PartNumber": "2", - "UploadId": "exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--" - }, - "output": { - "CopyPartResult": { - "ETag": "\"65d16d19e65a7508a51f043180edcc36\"", - "LastModified": "2016-12-29T21:44:28.000Z" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example uploads a part of a multipart upload by copying a specified byte range from an existing object as data source.", - "id": "to-upload-a-part-by-copying-byte-range-from-an-existing-object-as-data-source-1483048068594", - "title": "To upload a part by copying byte range from an existing object as data source" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/s3/2006-03-01/paginators-1.json deleted file mode 100644 index b1b4320..0000000 --- a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/paginators-1.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "pagination": { - "ListMultipartUploads": { - "limit_key": "MaxUploads", - "more_results": "IsTruncated", - "output_token": [ - "NextKeyMarker", - "NextUploadIdMarker" - ], - "input_token": [ - "KeyMarker", - "UploadIdMarker" - ], - "result_key": [ - "Uploads", - "CommonPrefixes" - ] - }, - "ListObjectVersions": { - "more_results": "IsTruncated", - "limit_key": "MaxKeys", - "output_token": [ - "NextKeyMarker", - "NextVersionIdMarker" - ], - "input_token": [ - "KeyMarker", - "VersionIdMarker" - ], - "result_key": [ - "Versions", - "DeleteMarkers", - "CommonPrefixes" - ] - }, - "ListObjects": { - "more_results": "IsTruncated", - "limit_key": "MaxKeys", - "output_token": "NextMarker || Contents[-1].Key", - "input_token": "Marker", - "result_key": [ - "Contents", - "CommonPrefixes" - ] - }, - "ListObjectsV2": { - "more_results": "IsTruncated", - "limit_key": "MaxKeys", - "output_token": "NextContinuationToken", - "input_token": "ContinuationToken", - "result_key": [ - "Contents", - "CommonPrefixes" - ] - }, - "ListParts": { - "more_results": "IsTruncated", - "limit_key": "MaxParts", - "output_token": "NextPartNumberMarker", - "input_token": "PartNumberMarker", - "result_key": "Parts" - }, - "ListDirectoryBuckets": { - "input_token": "ContinuationToken", - "limit_key": "MaxDirectoryBuckets", - "output_token": "ContinuationToken", - "result_key": "Buckets" - }, - "ListBuckets": { - "input_token": "ContinuationToken", - "limit_key": "MaxBuckets", - "output_token": "ContinuationToken", - "result_key": "Buckets" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/s3/2006-03-01/paginators-1.sdk-extras.json deleted file mode 100644 index 39e1360..0000000 --- a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/paginators-1.sdk-extras.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListBuckets": { - "non_aggregate_keys": [ - "Owner", - "Prefix" - ] - }, - "ListMultipartUploads": { - "non_aggregate_keys": [ - "RequestCharged", - "Prefix" - ] - }, - "ListObjectVersions": { - "non_aggregate_keys": [ - "RequestCharged", - "Prefix" - ] - }, - "ListObjects": { - "non_aggregate_keys": [ - "RequestCharged", - "Prefix" - ] - }, - "ListObjectsV2": { - "non_aggregate_keys": [ - "RequestCharged", - "Prefix" - ] - }, - "ListParts": { - "non_aggregate_keys": [ - "ChecksumAlgorithm", - "Initiator", - "Owner", - "StorageClass", - "ChecksumType" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/s3/2006-03-01/service-2.json.gz deleted file mode 100644 index 363fa55..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/service-2.sdk-extras.json b/venv/Lib/site-packages/botocore/data/s3/2006-03-01/service-2.sdk-extras.json deleted file mode 100644 index d04e9d0..0000000 --- a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/service-2.sdk-extras.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "version": 1.0, - "merge": { - "shapes": { - "Expires":{"type":"timestamp"} - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/s3/2006-03-01/waiters-2.json deleted file mode 100644 index b508a8f..0000000 --- a/venv/Lib/site-packages/botocore/data/s3/2006-03-01/waiters-2.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "version": 2, - "waiters": { - "BucketExists": { - "delay": 5, - "operation": "HeadBucket", - "maxAttempts": 20, - "acceptors": [ - { - "expected": 200, - "matcher": "status", - "state": "success" - }, - { - "expected": 301, - "matcher": "status", - "state": "success" - }, - { - "expected": 403, - "matcher": "status", - "state": "success" - }, - { - "expected": 404, - "matcher": "status", - "state": "retry" - } - ] - }, - "BucketNotExists": { - "delay": 5, - "operation": "HeadBucket", - "maxAttempts": 20, - "acceptors": [ - { - "expected": 404, - "matcher": "status", - "state": "success" - } - ] - }, - "ObjectExists": { - "delay": 5, - "operation": "HeadObject", - "maxAttempts": 20, - "acceptors": [ - { - "expected": 200, - "matcher": "status", - "state": "success" - }, - { - "expected": 404, - "matcher": "status", - "state": "retry" - } - ] - }, - "ObjectNotExists": { - "delay": 5, - "operation": "HeadObject", - "maxAttempts": 20, - "acceptors": [ - { - "expected": 404, - "matcher": "status", - "state": "success" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index 29d7f61..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/examples-1.json b/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/paginators-1.json deleted file mode 100644 index d843965..0000000 --- a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListAccessPointsForObjectLambda": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ObjectLambdaAccessPointList" - }, - "ListCallerAccessGrants": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CallerAccessGrantsList" - }, - "ListAccessPointsForDirectoryBuckets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AccessPointList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/service-2.json.gz deleted file mode 100644 index 01b4c73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3control/2018-08-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index f930b65..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/examples-1.json b/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/paginators-1.json deleted file mode 100644 index 5a8fa86..0000000 --- a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListEndpoints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Endpoints" - }, - "ListSharedEndpoints": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Endpoints" - }, - "ListOutpostsWithS3": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Outposts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/service-2.json.gz deleted file mode 100644 index fe19899..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3outposts/2017-07-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8b0f46c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/paginators-1.json deleted file mode 100644 index 2215b7b..0000000 --- a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListNamespaces": { - "input_token": "continuationToken", - "output_token": "continuationToken", - "limit_key": "maxNamespaces", - "result_key": "namespaces" - }, - "ListTableBuckets": { - "input_token": "continuationToken", - "output_token": "continuationToken", - "limit_key": "maxBuckets", - "result_key": "tableBuckets" - }, - "ListTables": { - "input_token": "continuationToken", - "output_token": "continuationToken", - "limit_key": "maxTables", - "result_key": "tables" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/service-2.json.gz deleted file mode 100644 index dcd7618..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/s3tables/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6e2f803..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/paginators-1.json deleted file mode 100644 index 90e3363..0000000 --- a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListIndexes": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "indexes" - }, - "ListVectorBuckets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "vectorBuckets" - }, - "ListVectors": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "vectors" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/service-2.json.gz deleted file mode 100644 index 6eab34b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/waiters-2.json b/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/s3vectors/2025-07-15/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2aa64c5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/examples-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/paginators-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/paginators-1.json deleted file mode 100644 index b19128c..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListHumanLoops": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "HumanLoopSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/service-2.json.gz deleted file mode 100644 index 435cdd6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-a2i-runtime/2019-11-07/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index aa1ee8d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/examples-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/service-2.json.gz deleted file mode 100644 index d9e5b98..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-edge/2020-09-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 805e8fa..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/examples-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/service-2.json.gz deleted file mode 100644 index 6c05e94..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-featurestore-runtime/2020-07-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6f128cd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/paginators-1.json deleted file mode 100644 index 8802d94..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListEarthObservationJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EarthObservationJobSummaries" - }, - "ListRasterDataCollections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RasterDataCollectionSummaries" - }, - "ListVectorEnrichmentJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "VectorEnrichmentJobSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/service-2.json.gz deleted file mode 100644 index b394429..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-geospatial/2020-05-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9657e3c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/service-2.json.gz deleted file mode 100644 index 8949e55..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-metrics/2022-09-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/endpoint-rule-set-1.json.gz deleted file mode 100644 index ab26a3f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/examples-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/paginators-1.json b/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/service-2.json.gz deleted file mode 100644 index c47851e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker-runtime/2017-05-13/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/endpoint-rule-set-1.json.gz deleted file mode 100644 index 8aca85f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/examples-1.json b/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/paginators-1.json b/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/paginators-1.json deleted file mode 100644 index 0b965cb..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/paginators-1.json +++ /dev/null @@ -1,502 +0,0 @@ -{ - "pagination": { - "ListTrainingJobs": { - "result_key": "TrainingJobSummaries", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListEndpoints": { - "result_key": "Endpoints", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListEndpointConfigs": { - "result_key": "EndpointConfigs", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListNotebookInstances": { - "result_key": "NotebookInstances", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListTags": { - "result_key": "Tags", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListModels": { - "result_key": "Models", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListAlgorithms": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AlgorithmSummaryList" - }, - "ListCodeRepositories": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CodeRepositorySummaryList" - }, - "ListCompilationJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CompilationJobSummaries" - }, - "ListHyperParameterTuningJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "HyperParameterTuningJobSummaries" - }, - "ListLabelingJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LabelingJobSummaryList" - }, - "ListLabelingJobsForWorkteam": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LabelingJobSummaryList" - }, - "ListModelPackages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ModelPackageSummaryList" - }, - "ListNotebookInstanceLifecycleConfigs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "NotebookInstanceLifecycleConfigs" - }, - "ListSubscribedWorkteams": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SubscribedWorkteams" - }, - "ListTrainingJobsForHyperParameterTuningJob": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TrainingJobSummaries" - }, - "ListTransformJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TransformJobSummaries" - }, - "ListWorkteams": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Workteams" - }, - "Search": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Results" - }, - "ListApps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Apps" - }, - "ListAutoMLJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AutoMLJobSummaries" - }, - "ListCandidatesForAutoMLJob": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Candidates" - }, - "ListDomains": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Domains" - }, - "ListExperiments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ExperimentSummaries" - }, - "ListFlowDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FlowDefinitionSummaries" - }, - "ListHumanTaskUis": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "HumanTaskUiSummaries" - }, - "ListMonitoringExecutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MonitoringExecutionSummaries" - }, - "ListMonitoringSchedules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MonitoringScheduleSummaries" - }, - "ListProcessingJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProcessingJobSummaries" - }, - "ListTrialComponents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrialComponentSummaries" - }, - "ListTrials": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrialSummaries" - }, - "ListUserProfiles": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "UserProfiles" - }, - "ListWorkforces": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Workforces" - }, - "ListImageVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ImageVersions" - }, - "ListImages": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Images" - }, - "ListActions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ActionSummaries" - }, - "ListAppImageConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AppImageConfigs" - }, - "ListArtifacts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ArtifactSummaries" - }, - "ListAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AssociationSummaries" - }, - "ListContexts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ContextSummaries" - }, - "ListFeatureGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FeatureGroupSummaries" - }, - "ListModelPackageGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ModelPackageGroupSummaryList" - }, - "ListPipelineExecutionSteps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineExecutionSteps" - }, - "ListPipelineExecutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineExecutionSummaries" - }, - "ListPipelineParametersForExecution": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineParameters" - }, - "ListPipelines": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineSummaries" - }, - "ListDataQualityJobDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "JobDefinitionSummaries" - }, - "ListDeviceFleets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DeviceFleetSummaries" - }, - "ListDevices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DeviceSummaries" - }, - "ListEdgePackagingJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EdgePackagingJobSummaries" - }, - "ListModelBiasJobDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "JobDefinitionSummaries" - }, - "ListModelExplainabilityJobDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "JobDefinitionSummaries" - }, - "ListModelQualityJobDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "JobDefinitionSummaries" - }, - "ListStudioLifecycleConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "StudioLifecycleConfigs" - }, - "ListInferenceRecommendationsJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InferenceRecommendationsJobs" - }, - "ListLineageGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "LineageGroupSummaries" - }, - "ListModelMetadata": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ModelMetadataSummaries" - }, - "ListEdgeDeploymentPlans": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "EdgeDeploymentPlanSummaries" - }, - "ListStageDevices": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DeviceDeploymentSummaries" - }, - "ListInferenceRecommendationsJobSteps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Steps" - }, - "ListInferenceExperiments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InferenceExperiments" - }, - "ListModelCardExportJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ModelCardExportJobSummaries" - }, - "ListModelCardVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ModelCardVersionSummaryList" - }, - "ListModelCards": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ModelCardSummaries" - }, - "ListMonitoringAlertHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MonitoringAlertHistory" - }, - "ListMonitoringAlerts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "MonitoringAlertSummaries" - }, - "ListSpaces": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Spaces" - }, - "ListAliases": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SageMakerImageVersionAliases" - }, - "ListResourceCatalogs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ResourceCatalogs" - }, - "ListClusterNodes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ClusterNodeSummaries" - }, - "ListClusters": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ClusterSummaries" - }, - "ListInferenceComponents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InferenceComponents" - }, - "ListMlflowTrackingServers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrackingServerSummaries" - }, - "ListOptimizationJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OptimizationJobSummaries" - }, - "ListClusterSchedulerConfigs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ClusterSchedulerConfigSummaries" - }, - "ListComputeQuotas": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ComputeQuotaSummaries" - }, - "ListPartnerApps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Summaries" - }, - "ListTrainingPlans": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrainingPlanSummaries" - }, - "CreateHubContentPresignedUrls": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AuthorizedUrlConfigs" - }, - "ListPipelineVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PipelineVersionSummaries" - }, - "ListClusterEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Events" - }, - "ListUltraServersByReservedCapacity": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "UltraServers" - }, - "ListMlflowApps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Summaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/paginators-1.sdk-extras.json deleted file mode 100644 index 5ab3083..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "Search": { - "non_aggregate_keys": [ - "TotalHits" - ] - } - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/service-2.json.gz deleted file mode 100644 index 0983321..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/waiters-2.json b/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/waiters-2.json deleted file mode 100644 index d01aa3f..0000000 --- a/venv/Lib/site-packages/botocore/data/sagemaker/2017-07-24/waiters-2.json +++ /dev/null @@ -1,260 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "EndpointDeleted" : { - "delay" : 30, - "maxAttempts" : 60, - "operation" : "DescribeEndpoint", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ValidationException" - }, { - "matcher" : "path", - "argument" : "EndpointStatus", - "state" : "failure", - "expected" : "Failed" - } ] - }, - "EndpointInService" : { - "delay" : 30, - "maxAttempts" : 120, - "operation" : "DescribeEndpoint", - "acceptors" : [ { - "matcher" : "path", - "argument" : "EndpointStatus", - "state" : "success", - "expected" : "InService" - }, { - "matcher" : "path", - "argument" : "EndpointStatus", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "ImageCreated" : { - "delay" : 60, - "maxAttempts" : 60, - "operation" : "DescribeImage", - "acceptors" : [ { - "matcher" : "path", - "argument" : "ImageStatus", - "state" : "success", - "expected" : "CREATED" - }, { - "matcher" : "path", - "argument" : "ImageStatus", - "state" : "failure", - "expected" : "CREATE_FAILED" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "ImageDeleted" : { - "delay" : 60, - "maxAttempts" : 60, - "operation" : "DescribeImage", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "ImageStatus", - "state" : "failure", - "expected" : "DELETE_FAILED" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "ImageUpdated" : { - "delay" : 60, - "maxAttempts" : 60, - "operation" : "DescribeImage", - "acceptors" : [ { - "matcher" : "path", - "argument" : "ImageStatus", - "state" : "success", - "expected" : "CREATED" - }, { - "matcher" : "path", - "argument" : "ImageStatus", - "state" : "failure", - "expected" : "UPDATE_FAILED" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "ImageVersionCreated" : { - "delay" : 60, - "maxAttempts" : 60, - "operation" : "DescribeImageVersion", - "acceptors" : [ { - "matcher" : "path", - "argument" : "ImageVersionStatus", - "state" : "success", - "expected" : "CREATED" - }, { - "matcher" : "path", - "argument" : "ImageVersionStatus", - "state" : "failure", - "expected" : "CREATE_FAILED" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "ImageVersionDeleted" : { - "delay" : 60, - "maxAttempts" : 60, - "operation" : "DescribeImageVersion", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "ImageVersionStatus", - "state" : "failure", - "expected" : "DELETE_FAILED" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "NotebookInstanceDeleted" : { - "delay" : 30, - "maxAttempts" : 60, - "operation" : "DescribeNotebookInstance", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ValidationException" - }, { - "matcher" : "path", - "argument" : "NotebookInstanceStatus", - "state" : "failure", - "expected" : "Failed" - } ] - }, - "NotebookInstanceInService" : { - "delay" : 30, - "maxAttempts" : 60, - "operation" : "DescribeNotebookInstance", - "acceptors" : [ { - "matcher" : "path", - "argument" : "NotebookInstanceStatus", - "state" : "success", - "expected" : "InService" - }, { - "matcher" : "path", - "argument" : "NotebookInstanceStatus", - "state" : "failure", - "expected" : "Failed" - } ] - }, - "NotebookInstanceStopped" : { - "delay" : 30, - "maxAttempts" : 60, - "operation" : "DescribeNotebookInstance", - "acceptors" : [ { - "matcher" : "path", - "argument" : "NotebookInstanceStatus", - "state" : "success", - "expected" : "Stopped" - }, { - "matcher" : "path", - "argument" : "NotebookInstanceStatus", - "state" : "failure", - "expected" : "Failed" - } ] - }, - "ProcessingJobCompletedOrStopped" : { - "delay" : 60, - "maxAttempts" : 60, - "operation" : "DescribeProcessingJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "ProcessingJobStatus", - "state" : "success", - "expected" : "Completed" - }, { - "matcher" : "path", - "argument" : "ProcessingJobStatus", - "state" : "success", - "expected" : "Stopped" - }, { - "matcher" : "path", - "argument" : "ProcessingJobStatus", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "TrainingJobCompletedOrStopped" : { - "delay" : 120, - "maxAttempts" : 180, - "operation" : "DescribeTrainingJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "TrainingJobStatus", - "state" : "success", - "expected" : "Completed" - }, { - "matcher" : "path", - "argument" : "TrainingJobStatus", - "state" : "success", - "expected" : "Stopped" - }, { - "matcher" : "path", - "argument" : "TrainingJobStatus", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - }, - "TransformJobCompletedOrStopped" : { - "delay" : 60, - "maxAttempts" : 60, - "operation" : "DescribeTransformJob", - "acceptors" : [ { - "matcher" : "path", - "argument" : "TransformJobStatus", - "state" : "success", - "expected" : "Completed" - }, { - "matcher" : "path", - "argument" : "TransformJobStatus", - "state" : "success", - "expected" : "Stopped" - }, { - "matcher" : "path", - "argument" : "TransformJobStatus", - "state" : "failure", - "expected" : "Failed" - }, { - "matcher" : "error", - "state" : "failure", - "expected" : "ValidationException" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6e82230..0000000 Binary files a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/examples-1.json b/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/service-2.json.gz deleted file mode 100644 index 9fb9493..0000000 Binary files a/venv/Lib/site-packages/botocore/data/savingsplans/2019-06-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index cab57ba..0000000 Binary files a/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/paginators-1.json deleted file mode 100644 index 93b98ee..0000000 --- a/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListScheduleGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ScheduleGroups" - }, - "ListSchedules": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Schedules" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/service-2.json.gz deleted file mode 100644 index d458bfc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/scheduler/2021-06-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index 5b86fbe..0000000 Binary files a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/paginators-1.json deleted file mode 100644 index ef2fe19..0000000 --- a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListDiscoverers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Discoverers" - }, - "ListRegistries": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Registries" - }, - "ListSchemaVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "SchemaVersions" - }, - "ListSchemas": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Schemas" - }, - "SearchSchemas": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "Limit", - "result_key": "Schemas" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/service-2.json.gz deleted file mode 100644 index 210fead..0000000 Binary files a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/waiters-2.json b/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/waiters-2.json deleted file mode 100644 index 4f642f6..0000000 --- a/venv/Lib/site-packages/botocore/data/schemas/2019-12-02/waiters-2.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "version": 2, - "waiters": { - "CodeBindingExists": { - "description": "Wait until code binding is generated", - "delay": 2, - "operation": "DescribeCodeBinding", - "maxAttempts": 30, - "acceptors": [ - { - "expected": "CREATE_COMPLETE", - "matcher": "path", - "state": "success", - "argument": "Status" - }, - { - "expected": "CREATE_IN_PROGRESS", - "matcher": "path", - "state": "retry", - "argument": "Status" - }, - { - "expected": "CREATE_FAILED", - "matcher": "path", - "state": "failure", - "argument": "Status" - }, - { - "matcher": "error", - "expected": "NotFoundException", - "state": "failure" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 06988a1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/paginators-1.json deleted file mode 100644 index 2362098..0000000 --- a/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/paginators-1.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "pagination": { - "ListDomains": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxNumberOfDomains", - "result_key": "DomainNames" - }, - "Select": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/service-2.json.gz deleted file mode 100644 index 7751329..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sdb/2009-04-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sdk-default-configuration.json b/venv/Lib/site-packages/botocore/data/sdk-default-configuration.json deleted file mode 100644 index 3db13b2..0000000 --- a/venv/Lib/site-packages/botocore/data/sdk-default-configuration.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "version": 1, - "base": { - "retryMode": "standard", - "stsRegionalEndpoints": "regional", - "s3UsEast1RegionalEndpoints": "regional", - "connectTimeoutInMillis": 1100, - "tlsNegotiationTimeoutInMillis": 1100 - }, - "modes": { - "standard": { - "connectTimeoutInMillis": { - "override": 3100 - }, - "tlsNegotiationTimeoutInMillis": { - "override": 3100 - } - }, - "in-region": { - }, - "cross-region": { - "connectTimeoutInMillis": { - "override": 3100 - }, - "tlsNegotiationTimeoutInMillis": { - "override": 3100 - } - }, - "mobile": { - "connectTimeoutInMillis": { - "override": 30000 - }, - "tlsNegotiationTimeoutInMillis": { - "override": 30000 - } - } - }, - "documentation": { - "modes": { - "standard": "

    The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

    Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

    ", - "in-region": "

    The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

    Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

    ", - "cross-region": "

    The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

    Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

    ", - "mobile": "

    The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

    Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

    ", - "auto": "

    The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

    Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

    ", - "legacy": "

    The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

    " - }, - "configuration": { - "retryMode": "

    A retry mode specifies how the SDK attempts retries. See Retry Mode

    ", - "stsRegionalEndpoints": "

    Specifies how the SDK determines the AWS service endpoint that it uses to talk to the AWS Security Token Service (AWS STS). See Setting STS Regional endpoints

    ", - "s3UsEast1RegionalEndpoints": "

    Specifies how the SDK determines the AWS service endpoint that it uses to talk to the Amazon S3 for the us-east-1 region

    ", - "connectTimeoutInMillis": "

    The amount of time after making an initial connection attempt on a socket, where if the client does not receive a completion of the connect handshake, the client gives up and fails the operation

    ", - "tlsNegotiationTimeoutInMillis": "

    The maximum amount of time that a TLS handshake is allowed to take from the time the CLIENT HELLO message is sent to ethe time the client and server have fully negotiated ciphers and exchanged keys

    " - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/endpoint-rule-set-1.json.gz deleted file mode 100644 index 538b4aa..0000000 Binary files a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/examples-1.json b/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/examples-1.json deleted file mode 100644 index 43a3ec4..0000000 --- a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/examples-1.json +++ /dev/null @@ -1,596 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CancelRotateSecret": [ - { - "input": { - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "Name" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to cancel rotation for a secret. The operation sets the RotationEnabled field to false and cancels all scheduled rotations. To resume scheduled rotations, you must re-enable rotation by calling the rotate-secret operation.", - "id": "to-cancel-scheduled-rotation-for-a-secret-1523996016032", - "title": "To cancel scheduled rotation for a secret" - } - ], - "CreateSecret": [ - { - "input": { - "ClientRequestToken": "EXAMPLE1-90ab-cdef-fedc-ba987SECRET1", - "Description": "My test database secret created with the CLI", - "Name": "MyTestDatabaseSecret", - "SecretString": "{\"username\":\"david\",\"password\":\"EXAMPLE-PASSWORD\"}" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret", - "VersionId": "EXAMPLE1-90ab-cdef-fedc-ba987SECRET1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to create a secret. The credentials stored in the encrypted secret value are retrieved from a file on disk named mycreds.json.", - "id": "to-create-a-basic-secret-1523996473658", - "title": "To create a basic secret" - } - ], - "DeleteResourcePolicy": [ - { - "input": { - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseMasterSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to delete the resource-based policy that is attached to a secret.", - "id": "to-delete-the-resource-based-policy-attached-to-a-secret-1530209419204", - "title": "To delete the resource-based policy attached to a secret" - } - ], - "DeleteSecret": [ - { - "input": { - "RecoveryWindowInDays": 7, - "SecretId": "MyTestDatabaseSecret1" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "DeletionDate": "1524085349.095", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to delete a secret. The secret stays in your account in a deprecated and inaccessible state until the recovery window ends. After the date and time in the DeletionDate response field has passed, you can no longer recover this secret with restore-secret.", - "id": "to-delete-a-secret-1523996905092", - "title": "To delete a secret" - } - ], - "DescribeSecret": [ - { - "input": { - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Description": "My test database secret", - "KmsKeyId": "arn:aws:kms:us-west-2:123456789012:key/EXAMPLE1-90ab-cdef-fedc-ba987KMSKEY1", - "LastAccessedDate": "1523923200", - "LastChangedDate": 1523477145.729, - "LastRotatedDate": 1525747253.72, - "Name": "MyTestDatabaseSecret", - "RotationEnabled": true, - "RotationLambdaARN": "arn:aws:lambda:us-west-2:123456789012:function:MyTestRotationLambda", - "RotationRules": { - "AutomaticallyAfterDays": 14, - "Duration": "2h", - "ScheduleExpression": "cron(0 16 1,15 * ? *)" - }, - "Tags": [ - { - "Key": "SecondTag", - "Value": "AnotherValue" - }, - { - "Key": "FirstTag", - "Value": "SomeValue" - } - ], - "VersionIdsToStages": { - "EXAMPLE1-90ab-cdef-fedc-ba987EXAMPLE": [ - "AWSPREVIOUS" - ], - "EXAMPLE2-90ab-cdef-fedc-ba987EXAMPLE": [ - "AWSCURRENT" - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to get the details about a secret.", - "id": "to-retrieve-the-details-of-a-secret-1524000138629", - "title": "To retrieve the details of a secret" - } - ], - "GetRandomPassword": [ - { - "input": { - "IncludeSpace": true, - "PasswordLength": 20, - "RequireEachIncludedType": true - }, - "output": { - "RandomPassword": "EXAMPLE-PASSWORD" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to request a randomly generated password. This example includes the optional flags to require spaces and at least one character of each included type. It specifies a length of 20 characters.", - "id": "to-generate-a-random-password-1524000546092", - "title": "To generate a random password" - } - ], - "GetResourcePolicy": [ - { - "input": { - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret", - "ResourcePolicy": "{\n\"Version\":\"2012-10-17\",\n\"Statement\":[{\n\"Effect\":\"Allow\",\n\"Principal\":{\n\"AWS\":\"arn:aws:iam::123456789012:root\"\n},\n\"Action\":\"secretsmanager:GetSecretValue\",\n\"Resource\":\"*\"\n}]\n}" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to retrieve the resource-based policy that is attached to a secret.", - "id": "to-retrieve-the-resource-based-policy-attached-to-a-secret-1530209677536", - "title": "To retrieve the resource-based policy attached to a secret" - } - ], - "GetSecretValue": [ - { - "input": { - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "CreatedDate": 1523477145.713, - "Name": "MyTestDatabaseSecret", - "SecretString": "{\n \"username\":\"david\",\n \"password\":\"EXAMPLE-PASSWORD\"\n}\n", - "VersionId": "EXAMPLE1-90ab-cdef-fedc-ba987SECRET1", - "VersionStages": [ - "AWSPREVIOUS" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to retrieve a secret string value.", - "id": "to-retrieve-the-encrypted-secret-value-of-a-secret-1524000702484", - "title": "To retrieve the encrypted secret value of a secret" - } - ], - "ListSecretVersionIds": [ - { - "input": { - "IncludeDeprecated": true, - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret", - "Versions": [ - { - "CreatedDate": 1523477145.713, - "VersionId": "EXAMPLE1-90ab-cdef-fedc-ba987EXAMPLE", - "VersionStages": [ - "AWSPREVIOUS" - ] - }, - { - "CreatedDate": 1523486221.391, - "VersionId": "EXAMPLE2-90ab-cdef-fedc-ba987EXAMPLE", - "VersionStages": [ - "AWSCURRENT" - ] - }, - { - "CreatedDate": 1511974462.36, - "VersionId": "EXAMPLE3-90ab-cdef-fedc-ba987EXAMPLE;" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to retrieve a list of all of the versions of a secret, including those without any staging labels.", - "id": "to-list-all-of-the-secret-versions-associated-with-a-secret-1524000999164", - "title": "To list all of the secret versions associated with a secret" - } - ], - "ListSecrets": [ - { - "input": { - }, - "output": { - "SecretList": [ - { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Description": "My test database secret", - "LastChangedDate": 1523477145.729, - "Name": "MyTestDatabaseSecret", - "SecretVersionsToStages": { - "EXAMPLE1-90ab-cdef-fedc-ba987EXAMPLE": [ - "AWSCURRENT" - ] - } - }, - { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret1-d4e5f6", - "Description": "Another secret created for a different database", - "LastChangedDate": 1523482025.685, - "Name": "MyTestDatabaseSecret1", - "SecretVersionsToStages": { - "EXAMPLE2-90ab-cdef-fedc-ba987EXAMPLE": [ - "AWSCURRENT" - ] - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to list all of the secrets in your account.", - "id": "to-list-the-secrets-in-your-account-1524001246087", - "title": "To list the secrets in your account" - } - ], - "PutResourcePolicy": [ - { - "input": { - "ResourcePolicy": "{\n\"Version\":\"2012-10-17\",\n\"Statement\":[{\n\"Effect\":\"Allow\",\n\"Principal\":{\n\"AWS\":\"arn:aws:iam::123456789012:root\"\n},\n\"Action\":\"secretsmanager:GetSecretValue\",\n\"Resource\":\"*\"\n}]\n}", - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to add a resource-based policy to a secret.", - "id": "to-add-a-resource-based-policy-to-a-secret-1530209881839", - "title": "To add a resource-based policy to a secret" - } - ], - "PutSecretValue": [ - { - "input": { - "ClientRequestToken": "EXAMPLE2-90ab-cdef-fedc-ba987EXAMPLE", - "SecretId": "MyTestDatabaseSecret", - "SecretString": "{\"username\":\"david\",\"password\":\"EXAMPLE-PASSWORD\"}" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret", - "VersionId": "EXAMPLE2-90ab-cdef-fedc-ba987EXAMPLE", - "VersionStages": [ - "AWSCURRENT" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to create a new version of the secret. Alternatively, you can use the update-secret command.", - "id": "to-store-a-secret-value-in-a-new-version-of-a-secret-1524001393971", - "title": "To store a secret value in a new version of a secret" - } - ], - "RestoreSecret": [ - { - "input": { - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to restore a secret that you previously scheduled for deletion.", - "id": "to-restore-a-previously-deleted-secret-1524001513930", - "title": "To restore a previously deleted secret" - } - ], - "RotateSecret": [ - { - "input": { - "RotationLambdaARN": "arn:aws:lambda:us-west-2:123456789012:function:MyTestDatabaseRotationLambda", - "RotationRules": { - "Duration": "2h", - "ScheduleExpression": "cron(0 16 1,15 * ? *)" - }, - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret", - "VersionId": "EXAMPLE2-90ab-cdef-fedc-ba987SECRET2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example configures rotation for a secret using a cron expression. The first rotation happens immediately after the changes are stored in the secret. The rotation schedule is the first and 15th day of every month. The rotation window begins at 4:00 PM UTC and ends at 6:00 PM.", - "id": "to-configure-rotation-for-a-secret-1524001629475", - "title": "To configure rotation for a secret" - }, - { - "input": { - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret", - "VersionId": "EXAMPLE2-90ab-cdef-fedc-ba987SECRET2" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example requests an immediate invocation of the secret's Lambda rotation function. It assumes that the specified secret already has rotation configured. The rotation function runs asynchronously in the background.", - "id": "to-request-an-immediate-rotation-for-a-secret-1524001949004", - "title": "To request an immediate rotation for a secret" - } - ], - "TagResource": [ - { - "input": { - "SecretId": "MyExampleSecret", - "Tags": [ - { - "Key": "FirstTag", - "Value": "SomeValue" - }, - { - "Key": "SecondTag", - "Value": "AnotherValue" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to attach two tags each with a Key and Value to a secret. There is no output from this API. To see the result, use the DescribeSecret operation.", - "id": "to-add-tags-to-a-secret-1524002106718", - "title": "To add tags to a secret" - } - ], - "UntagResource": [ - { - "input": { - "SecretId": "MyTestDatabaseSecret", - "TagKeys": [ - "FirstTag", - "SecondTag" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to remove two tags from a secret's metadata. For each, both the tag and the associated value are removed. There is no output from this API. To see the result, use the DescribeSecret operation.", - "id": "to-remove-tags-from-a-secret-1524002239065", - "title": "To remove tags from a secret" - } - ], - "UpdateSecret": [ - { - "input": { - "ClientRequestToken": "EXAMPLE1-90ab-cdef-fedc-ba987EXAMPLE", - "Description": "This is a new description for the secret.", - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to modify the description of a secret.", - "id": "to-update-the-description-of-a-secret-1524002349094", - "title": "To update the description of a secret" - }, - { - "input": { - "KmsKeyId": "arn:aws:kms:us-west-2:123456789012:key/EXAMPLE2-90ab-cdef-fedc-ba987EXAMPLE", - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows how to update the KMS customer managed key (CMK) used to encrypt the secret value. The KMS CMK must be in the same region as the secret.", - "id": "to-update-the-kms-key-associated-with-a-secret-1524002421563", - "title": "To update the KMS key associated with a secret" - }, - { - "input": { - "SecretId": "MyTestDatabaseSecret", - "SecretString": "{JSON STRING WITH CREDENTIALS}" - }, - "output": { - "ARN": "aws:arn:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret", - "VersionId": "EXAMPLE1-90ab-cdef-fedc-ba987EXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to create a new version of the secret by updating the SecretString field. Alternatively, you can use the put-secret-value operation.", - "id": "to-create-a-new-version-of-the-encrypted-secret-value-1524004651836", - "title": "To create a new version of the encrypted secret value" - } - ], - "UpdateSecretVersionStage": [ - { - "input": { - "MoveToVersionId": "EXAMPLE1-90ab-cdef-fedc-ba987SECRET1", - "SecretId": "MyTestDatabaseSecret", - "VersionStage": "STAGINGLABEL1" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to add a staging label to a version of a secret. You can review the results by running the operation ListSecretVersionIds and viewing the VersionStages response field for the affected version.", - "id": "to-add-a-staging-label-attached-to-a-version-of-a-secret-1524004783841", - "title": "To add a staging label attached to a version of a secret" - }, - { - "input": { - "RemoveFromVersionId": "EXAMPLE1-90ab-cdef-fedc-ba987SECRET1", - "SecretId": "MyTestDatabaseSecret", - "VersionStage": "STAGINGLABEL1" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to delete a staging label that is attached to a version of a secret. You can review the results by running the operation ListSecretVersionIds and viewing the VersionStages response field for the affected version.", - "id": "to-delete-a-staging-label-attached-to-a-version-of-a-secret-1524004862181", - "title": "To delete a staging label attached to a version of a secret" - }, - { - "input": { - "MoveToVersionId": "EXAMPLE2-90ab-cdef-fedc-ba987SECRET2", - "RemoveFromVersionId": "EXAMPLE1-90ab-cdef-fedc-ba987SECRET1", - "SecretId": "MyTestDatabaseSecret", - "VersionStage": "AWSCURRENT" - }, - "output": { - "ARN": "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3", - "Name": "MyTestDatabaseSecret" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows you how to move a staging label that is attached to one version of a secret to a different version. You can review the results by running the operation ListSecretVersionIds and viewing the VersionStages response field for the affected version.", - "id": "to-move-a-staging-label-from-one-version-of-a-secret-to-another-1524004963841", - "title": "To move a staging label from one version of a secret to another" - } - ], - "ValidateResourcePolicy": [ - { - "input": { - "ResourcePolicy": "{\n\"Version\":\"2012-10-17\",\n\"Statement\":[{\n\"Effect\":\"Allow\",\n\"Principal\":{\n\"AWS\":\"arn:aws:iam::123456789012:root\"\n},\n\"Action\":\"secretsmanager:GetSecretValue\",\n\"Resource\":\"*\"\n}]\n}", - "SecretId": "MyTestDatabaseSecret" - }, - "output": { - "PolicyValidationPassed": true, - "ValidationErrors": [ - - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows how to validate a resource-based policy to a secret.", - "id": "to-validate-the-resource-policy-of-a-secret-1524000138629", - "title": "To validate a resource-based policy to a secret" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/paginators-1.json b/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/paginators-1.json deleted file mode 100644 index 0f62e8e..0000000 --- a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListSecrets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecretList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/service-2.json.gz b/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/service-2.json.gz deleted file mode 100644 index 998d726..0000000 Binary files a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/service-2.sdk-extras.json b/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/service-2.sdk-extras.json deleted file mode 100644 index dc78f89..0000000 --- a/venv/Lib/site-packages/botocore/data/secretsmanager/2017-10-17/service-2.sdk-extras.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "version": 1.0, - "merge": { - "metadata": { - "serviceId": "Secrets Manager" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 10f8c83..0000000 Binary files a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/paginators-1.json deleted file mode 100644 index 9f78bef..0000000 --- a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListCaseEdits": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListCases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListComments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListMemberships": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListInvestigations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "investigationActions" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/paginators-1.sdk-extras.json deleted file mode 100644 index 74820d5..0000000 --- a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/paginators-1.sdk-extras.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "version": 1, - "merge": { - "pagination": { - "ListCaseEdits": { - "non_aggregate_keys": [ - "total" - ] - }, - "ListCases": { - "non_aggregate_keys": [ - "total" - ] - }, - "ListComments": { - "non_aggregate_keys": [ - "total" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/service-2.json.gz deleted file mode 100644 index ddb2769..0000000 Binary files a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/security-ir/2018-05-10/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 64d0a28..0000000 Binary files a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/examples-1.json b/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/paginators-1.json deleted file mode 100644 index 76fbaa1..0000000 --- a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/paginators-1.json +++ /dev/null @@ -1,142 +0,0 @@ -{ - "pagination": { - "GetEnabledStandards": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StandardsSubscriptions" - }, - "GetFindings": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Findings" - }, - "GetInsights": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Insights" - }, - "ListEnabledProductsForImport": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ProductSubscriptions" - }, - "ListInvitations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Invitations" - }, - "ListMembers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Members" - }, - "DescribeActionTargets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ActionTargets" - }, - "DescribeProducts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Products" - }, - "DescribeStandards": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Standards" - }, - "DescribeStandardsControls": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Controls" - }, - "ListOrganizationAdminAccounts": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AdminAccounts" - }, - "ListFindingAggregators": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FindingAggregators" - }, - "ListSecurityControlDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SecurityControlDefinitions" - }, - "ListStandardsControlAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "StandardsControlAssociationSummaries" - }, - "GetFindingHistory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Records" - }, - "ListConfigurationPolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConfigurationPolicySummaries" - }, - "ListConfigurationPolicyAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConfigurationPolicyAssociationSummaries" - }, - "DescribeProductsV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ProductsV2" - }, - "GetFindingsV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Findings" - }, - "GetResourcesV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Resources" - }, - "ListAggregatorsV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AggregatorsV2" - }, - "GetFindingsTrendsV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrendsMetrics" - }, - "GetResourcesTrendsV2": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrendsMetrics" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/paginators-1.sdk-extras.json deleted file mode 100644 index 73ef2ee..0000000 --- a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/paginators-1.sdk-extras.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListOrganizationAdminAccounts": { - "non_aggregate_keys": [ - "Feature" - ] - }, - "GetFindingsTrendsV2": { - "non_aggregate_keys": [ - "Granularity" - ] - }, - "GetResourcesTrendsV2": { - "non_aggregate_keys": [ - "Granularity" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/service-2.json.gz deleted file mode 100644 index fa8b0f4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/securityhub/2018-10-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index fafe052..0000000 Binary files a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/paginators-1.json deleted file mode 100644 index 19e482b..0000000 --- a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "GetDataLakeSources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataLakeSources" - }, - "ListDataLakeExceptions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "exceptions" - }, - "ListLogSources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sources" - }, - "ListSubscribers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "subscribers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/paginators-1.sdk-extras.json deleted file mode 100644 index 41ae7fe..0000000 --- a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "GetDataLakeSources": { - "non_aggregate_keys": [ - "dataLakeArn" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/service-2.json.gz deleted file mode 100644 index 13ca62c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/securitylake/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6890675..0000000 Binary files a/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/paginators-1.json deleted file mode 100644 index a39e547..0000000 --- a/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListApplicationDependencies": { - "input_token": "NextToken", - "limit_key": "MaxItems", - "output_token": "NextToken", - "result_key": "Dependencies" - }, - "ListApplicationVersions": { - "input_token": "NextToken", - "limit_key": "MaxItems", - "output_token": "NextToken", - "result_key": "Versions" - }, - "ListApplications": { - "input_token": "NextToken", - "limit_key": "MaxItems", - "output_token": "NextToken", - "result_key": "Applications" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/service-2.json.gz deleted file mode 100644 index 93c9eed..0000000 Binary files a/venv/Lib/site-packages/botocore/data/serverlessrepo/2017-09-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/endpoint-rule-set-1.json.gz deleted file mode 100644 index 13ff5a4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/examples-1.json b/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/paginators-1.json b/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/paginators-1.json deleted file mode 100644 index e0d4547..0000000 --- a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListAWSDefaultServiceQuotas": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Quotas" - }, - "ListRequestedServiceQuotaChangeHistory": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RequestedQuotas" - }, - "ListRequestedServiceQuotaChangeHistoryByQuota": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RequestedQuotas" - }, - "ListServiceQuotaIncreaseRequestsInTemplate": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ServiceQuotaIncreaseRequestInTemplateList" - }, - "ListServiceQuotas": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Quotas" - }, - "ListServices": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Services" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/service-2.json.gz b/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/service-2.json.gz deleted file mode 100644 index 8058adf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/service-quotas/2019-06-24/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1095a16..0000000 Binary files a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/examples-1.json b/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/paginators-1.json b/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/paginators-1.json deleted file mode 100644 index 55281fb..0000000 --- a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "applications" - }, - "ListAssociatedAttributeGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "attributeGroups" - }, - "ListAssociatedResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "resources" - }, - "ListAttributeGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "attributeGroups" - }, - "ListAttributeGroupsForApplication": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "attributeGroupsDetails" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/service-2.json.gz b/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/service-2.json.gz deleted file mode 100644 index 28b2e36..0000000 Binary files a/venv/Lib/site-packages/botocore/data/servicecatalog-appregistry/2020-06-24/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index e41b8b5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/examples-1.json b/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/paginators-1.json deleted file mode 100644 index 5770fef..0000000 --- a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/paginators-1.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "pagination": { - "SearchProductsAsAdmin": { - "result_key": "ProductViewDetails", - "output_token": "NextPageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListAcceptedPortfolioShares": { - "result_key": "PortfolioDetails", - "output_token": "NextPageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListPortfolios": { - "result_key": "PortfolioDetails", - "output_token": "NextPageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListConstraintsForPortfolio": { - "result_key": "ConstraintDetails", - "output_token": "NextPageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListLaunchPaths": { - "result_key": "LaunchPathSummaries", - "output_token": "NextPageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListTagOptions": { - "result_key": "TagOptionDetails", - "output_token": "PageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListPortfoliosForProduct": { - "result_key": "PortfolioDetails", - "output_token": "NextPageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListPrincipalsForPortfolio": { - "result_key": "Principals", - "output_token": "NextPageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListResourcesForTagOption": { - "result_key": "ResourceDetails", - "output_token": "PageToken", - "input_token": "PageToken", - "limit_key": "PageSize" - }, - "ListOrganizationPortfolioAccess": { - "input_token": "PageToken", - "limit_key": "PageSize", - "output_token": "NextPageToken", - "result_key": "OrganizationNodes" - }, - "ListProvisionedProductPlans": { - "input_token": "PageToken", - "limit_key": "PageSize", - "output_token": "NextPageToken", - "result_key": "ProvisionedProductPlans" - }, - "ListProvisioningArtifactsForServiceAction": { - "input_token": "PageToken", - "limit_key": "PageSize", - "output_token": "NextPageToken", - "result_key": "ProvisioningArtifactViews" - }, - "ListRecordHistory": { - "input_token": "PageToken", - "limit_key": "PageSize", - "output_token": "NextPageToken", - "result_key": "RecordDetails" - }, - "ListServiceActions": { - "input_token": "PageToken", - "limit_key": "PageSize", - "output_token": "NextPageToken", - "result_key": "ServiceActionSummaries" - }, - "ListServiceActionsForProvisioningArtifact": { - "input_token": "PageToken", - "limit_key": "PageSize", - "output_token": "NextPageToken", - "result_key": "ServiceActionSummaries" - }, - "ScanProvisionedProducts": { - "input_token": "PageToken", - "limit_key": "PageSize", - "output_token": "NextPageToken", - "result_key": "ProvisionedProducts" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/service-2.json.gz deleted file mode 100644 index 8d40f2b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/servicecatalog/2015-12-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4c2e620..0000000 Binary files a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/examples-1.json b/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/examples-1.json deleted file mode 100644 index cc99fe4..0000000 --- a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/examples-1.json +++ /dev/null @@ -1,672 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateHttpNamespace": [ - { - "input": { - "CreatorRequestId": "example-creator-request-id-0001", - "Description": "Example.com AWS Cloud Map HTTP Namespace", - "Name": "example-http.com" - }, - "output": { - "OperationId": "httpvoqozuhfet5kzxoxg-a-response-example" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates an HTTP namespace.", - "id": "createhttpnamespace-example-1590114811304", - "title": "CreateHttpNamespace example" - } - ], - "CreatePrivateDnsNamespace": [ - { - "input": { - "CreatorRequestId": "eedd6892-50f3-41b2-8af9-611d6e1d1a8c", - "Name": "example.com", - "Vpc": "vpc-1c56417b" - }, - "output": { - "OperationId": "gv4g5meo7ndmeh4fqskygvk23d2fijwa-k9302yzd" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Create private DNS namespace", - "id": "example-create-private-dns-namespace-1587058592930", - "title": "Example: Create private DNS namespace" - } - ], - "CreatePublicDnsNamespace": [ - { - "input": { - "CreatorRequestId": "example-creator-request-id-0003", - "Description": "Example.com AWS Cloud Map Public DNS Namespace", - "Name": "example-public-dns.com" - }, - "output": { - "OperationId": "dns2voqozuhfet5kzxoxg-a-response-example" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example creates a public namespace based on DNS.", - "id": "createpublicdnsnamespace-example-1590114940910", - "title": "CreatePublicDnsNamespace example" - } - ], - "CreateService": [ - { - "input": { - "CreatorRequestId": "567c1193-6b00-4308-bd57-ad38a8822d25", - "DnsConfig": { - "DnsRecords": [ - { - "TTL": 60, - "Type": "A" - } - ], - "NamespaceId": "ns-ylexjili4cdxy3xm", - "RoutingPolicy": "MULTIVALUE" - }, - "Name": "myservice", - "NamespaceId": "ns-ylexjili4cdxy3xm" - }, - "output": { - "Service": { - "Arn": "arn:aws:servicediscovery:us-west-2:123456789012:service/srv-p5zdwlg5uvvzjita", - "CreateDate": 1587081768.334, - "CreatorRequestId": "567c1193-6b00-4308-bd57-ad38a8822d25", - "DnsConfig": { - "DnsRecords": [ - { - "TTL": 60, - "Type": "A" - } - ], - "NamespaceId": "ns-ylexjili4cdxy3xm", - "RoutingPolicy": "MULTIVALUE" - }, - "Id": "srv-p5zdwlg5uvvzjita", - "Name": "myservice", - "NamespaceId": "ns-ylexjili4cdxy3xm" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Create service", - "id": "example-create-service-1587235913584", - "title": "Example: Create service" - } - ], - "DeleteNamespace": [ - { - "input": { - "Id": "ns-ylexjili4cdxy3xm" - }, - "output": { - "OperationId": "gv4g5meo7ndmeh4fqskygvk23d2fijwa-k98y6drk" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Delete namespace", - "id": "example-delete-namespace-1587416093508", - "title": "Example: Delete namespace" - } - ], - "DeleteService": [ - { - "input": { - "Id": "srv-p5zdwlg5uvvzjita" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Delete service", - "id": "example-delete-service-1587416462902", - "title": "Example: Delete service" - } - ], - "DeregisterInstance": [ - { - "input": { - "InstanceId": "myservice-53", - "ServiceId": "srv-p5zdwlg5uvvzjita" - }, - "output": { - "OperationId": "4yejorelbukcjzpnr6tlmrghsjwpngf4-k98rnaiq" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Deregister a service instance", - "id": "example-deregister-a-service-instance-1587416305738", - "title": "Example: Deregister a service instance" - } - ], - "DiscoverInstances": [ - { - "input": { - "HealthStatus": "ALL", - "MaxResults": 10, - "NamespaceName": "example.com", - "ServiceName": "myservice" - }, - "output": { - "Instances": [ - { - "Attributes": { - "AWS_INSTANCE_IPV4": "172.2.1.3", - "AWS_INSTANCE_PORT": "808" - }, - "HealthStatus": "UNKNOWN", - "InstanceId": "myservice-53", - "NamespaceName": "example.com", - "ServiceName": "myservice" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Discover registered instances", - "id": "example-discover-registered-instances-1587236343568", - "title": "Example: Discover registered instances" - } - ], - "GetInstance": [ - { - "input": { - "InstanceId": "i-abcd1234", - "ServiceId": "srv-e4anhexample0004" - }, - "output": { - "Instance": { - "Attributes": { - "AWS_INSTANCE_IPV4": "192.0.2.44", - "AWS_INSTANCE_PORT": "80", - "color": "green", - "region": "us-west-2", - "stage": "beta" - }, - "Id": "i-abcd1234" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example gets information about a specified instance.", - "id": "getinstance-example-1590115065598", - "title": "GetInstance example" - } - ], - "GetInstancesHealthStatus": [ - { - "input": { - "ServiceId": "srv-e4anhexample0004" - }, - "output": { - "Status": { - "i-abcd1234": "HEALTHY", - "i-abcd1235": "UNHEALTHY" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example gets the current health status of one or more instances that are associate with a specified service.", - "id": "getinstanceshealthstatus-example-1590115176146", - "title": "GetInstancesHealthStatus example" - } - ], - "GetNamespace": [ - { - "input": { - "Id": "ns-e4anhexample0004" - }, - "output": { - "Namespace": { - "Arn": "arn:aws:servicediscovery:us-west-2: 123456789120:namespace/ns-e1tpmexample0001", - "CreateDate": "20181118T211712Z", - "CreatorRequestId": "example-creator-request-id-0001", - "Description": "Example.com AWS Cloud Map HTTP Namespace", - "Id": "ns-e1tpmexample0001", - "Name": "example-http.com", - "Properties": { - "DnsProperties": { - }, - "HttpProperties": { - "HttpName": "example-http.com" - } - }, - "Type": "HTTP" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example gets information about a specified namespace.", - "id": "getnamespace-example-1590115383708", - "title": "GetNamespace example" - } - ], - "GetOperation": [ - { - "input": { - "OperationId": "gv4g5meo7ndmeh4fqskygvk23d2fijwa-k9302yzd" - }, - "output": { - "Operation": { - "CreateDate": 1587055860.121, - "Id": "gv4g5meo7ndmeh4fqskygvk23d2fijwa-k9302yzd", - "Status": "SUCCESS", - "Targets": { - "NAMESPACE": "ns-ylexjili4cdxy3xm" - }, - "Type": "CREATE_NAMESPACE", - "UpdateDate": 1587055900.469 - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Get operation result", - "id": "example-get-operation-result-1587073807124", - "title": "Example: Get operation result" - } - ], - "GetService": [ - { - "input": { - "Id": "srv-e4anhexample0004" - }, - "output": { - "Service": { - "Arn": "arn:aws:servicediscovery:us-west-2:123456789120:service/srv-e4anhexample0004", - "CreateDate": "20181118T211707Z", - "CreatorRequestId": "example-creator-request-id-0004", - "Description": "Example.com AWS Cloud Map HTTP Service", - "HealthCheckConfig": { - "FailureThreshold": 3, - "ResourcePath": "/", - "Type": "HTTPS" - }, - "Id": "srv-e4anhexample0004", - "Name": "example-http-service", - "NamespaceId": "ns-e4anhexample0004" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example gets the settings for a specified service.", - "id": "getservice-example-1590117234294", - "title": "GetService Example" - } - ], - "ListInstances": [ - { - "input": { - "ServiceId": "srv-qzpwvt2tfqcegapy" - }, - "output": { - "Instances": [ - { - "Attributes": { - "AWS_INSTANCE_IPV4": "172.2.1.3", - "AWS_INSTANCE_PORT": "808" - }, - "Id": "i-06bdabbae60f65a4e" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: List service instances", - "id": "example-list-service-instances-1587236237008", - "title": "Example: List service instances" - } - ], - "ListNamespaces": [ - { - "input": { - }, - "output": { - "Namespaces": [ - { - "Arn": "arn:aws:servicediscovery:us-west-2:123456789012:namespace/ns-a3ccy2e7e3a7rile", - "CreateDate": 1585354387.357, - "Id": "ns-a3ccy2e7e3a7rile", - "Name": "local", - "Properties": { - "DnsProperties": { - "HostedZoneId": "Z06752353VBUDTC32S84S" - }, - "HttpProperties": { - "HttpName": "local" - } - }, - "Type": "DNS_PRIVATE" - }, - { - "Arn": "arn:aws:servicediscovery:us-west-2:123456789012:namespace/ns-pocfyjtrsmwtvcxx", - "CreateDate": 1586468974.698, - "Description": "My second namespace", - "Id": "ns-pocfyjtrsmwtvcxx", - "Name": "My-second-namespace", - "Properties": { - "DnsProperties": { - }, - "HttpProperties": { - "HttpName": "My-second-namespace" - } - }, - "Type": "HTTP" - }, - { - "Arn": "arn:aws:servicediscovery:us-west-2:123456789012:namespace/ns-ylexjili4cdxy3xm", - "CreateDate": 1587055896.798, - "Id": "ns-ylexjili4cdxy3xm", - "Name": "example.com", - "Properties": { - "DnsProperties": { - "HostedZoneId": "Z09983722P0QME1B3KC8I" - }, - "HttpProperties": { - "HttpName": "example.com" - } - }, - "Type": "DNS_PRIVATE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: List namespaces", - "id": "example-list-namespaces-1587401553154", - "title": "Example: List namespaces" - } - ], - "ListOperations": [ - { - "input": { - "Filters": [ - { - "Condition": "IN", - "Name": "STATUS", - "Values": [ - "PENDING", - "SUCCESS" - ] - } - ] - }, - "output": { - "Operations": [ - { - "Id": "76yy8ovhpdz0plmjzbsnqgnrqvpv2qdt-kexample", - "Status": "SUCCESS" - }, - { - "Id": "prysnyzpji3u2ciy45nke83x2zanl7yk-dexample", - "Status": "SUCCESS" - }, - { - "Id": "ko4ekftir7kzlbechsh7xvcdgcpk66gh-7example", - "Status": "PENDING" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example gets the operations that have a STATUS of either PENDING or SUCCESS.", - "id": "listoperations-example-1590117354396", - "title": "ListOperations Example" - } - ], - "ListServices": [ - { - "input": { - }, - "output": { - "Services": [ - { - "Arn": "arn:aws:servicediscovery:us-west-2:123456789012:service/srv-p5zdwlg5uvvzjita", - "CreateDate": 1587081768.334, - "DnsConfig": { - "DnsRecords": [ - { - "TTL": 60, - "Type": "A" - } - ], - "RoutingPolicy": "MULTIVALUE" - }, - "Id": "srv-p5zdwlg5uvvzjita", - "Name": "myservice" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: List services", - "id": "example-list-services-1587236889840", - "title": "Example: List services" - } - ], - "ListTagsForResource": [ - { - "input": { - "ResourceARN": "arn:aws:servicediscovery:us-east-1:123456789012:namespace/ns-ylexjili4cdxy3xm" - }, - "output": { - "Tags": [ - { - "Key": "Project", - "Value": "Zeta" - }, - { - "Key": "Department", - "Value": "Engineering" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example lists the tags of a resource.", - "id": "listtagsforresource-example-1590093928416", - "title": "ListTagsForResource example" - } - ], - "RegisterInstance": [ - { - "input": { - "Attributes": { - "AWS_INSTANCE_IPV4": "172.2.1.3", - "AWS_INSTANCE_PORT": "808" - }, - "CreatorRequestId": "7a48a98a-72e6-4849-bfa7-1a458e030d7b", - "InstanceId": "myservice-53", - "ServiceId": "srv-p5zdwlg5uvvzjita" - }, - "output": { - "OperationId": "4yejorelbukcjzpnr6tlmrghsjwpngf4-k95yg2u7" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Example: Register Instance", - "id": "example-register-instance-1587236116314", - "title": "Example: Register Instance" - } - ], - "TagResource": [ - { - "input": { - "ResourceARN": "arn:aws:servicediscovery:us-east-1:123456789012:namespace/ns-ylexjili4cdxy3xm", - "Tags": [ - { - "Key": "Department", - "Value": "Engineering" - }, - { - "Key": "Project", - "Value": "Zeta" - } - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example adds \"Department\" and \"Project\" tags to a resource.", - "id": "tagresource-example-1590093532240", - "title": "TagResource example" - } - ], - "UntagResource": [ - { - "input": { - "ResourceARN": "arn:aws:servicediscovery:us-east-1:123456789012:namespace/ns-ylexjili4cdxy3xm", - "TagKeys": [ - "Project", - "Department" - ] - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example removes the \"Department\" and \"Project\" tags from a resource.", - "id": "untagresource-example-1590094024672", - "title": "UntagResource example" - } - ], - "UpdateInstanceCustomHealthStatus": [ - { - "input": { - "InstanceId": "i-abcd1234", - "ServiceId": "srv-e4anhexample0004", - "Status": "HEALTHY" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example submits a request to change the health status of an instance associated with a service with a custom health check to HEALTHY.", - "id": "updateinstancecustomhealthstatus-example-1590118408574", - "title": "UpdateInstanceCustomHealthStatus Example" - } - ], - "UpdateService": [ - { - "input": { - "Id": "srv-e4anhexample0004", - "Service": { - "DnsConfig": { - "DnsRecords": [ - { - "TTL": 60, - "Type": "A" - } - ] - }, - "HealthCheckConfig": { - "FailureThreshold": 2, - "ResourcePath": "/", - "Type": "HTTP" - } - } - }, - "output": { - "OperationId": "m35hsdrkxwjffm3xef4bxyy6vc3ewakx-jdn3y5g5" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example submits a request to replace the DnsConfig and HealthCheckConfig settings of a specified service.", - "id": "updateservice-example-1590117830880", - "title": "UpdateService Example" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/paginators-1.json b/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/paginators-1.json deleted file mode 100644 index f58df70..0000000 --- a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListServices": { - "result_key": "Services", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListInstances": { - "result_key": "Instances", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListNamespaces": { - "result_key": "Namespaces", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListOperations": { - "result_key": "Operations", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/paginators-1.sdk-extras.json b/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/paginators-1.sdk-extras.json deleted file mode 100644 index 1e17fb6..0000000 --- a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/paginators-1.sdk-extras.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1.0, - "merge": { - "pagination": { - "ListInstances": { - "non_aggregate_keys": [ - "ResourceOwner" - ] - } - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/service-2.json.gz b/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/service-2.json.gz deleted file mode 100644 index a8946a6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/servicediscovery/2017-03-14/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ses/2010-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9749d50..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/examples-1.json b/venv/Lib/site-packages/botocore/data/ses/2010-12-01/examples-1.json deleted file mode 100644 index e569033..0000000 --- a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/examples-1.json +++ /dev/null @@ -1,1021 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CloneReceiptRuleSet": [ - { - "input": { - "OriginalRuleSetName": "RuleSetToClone", - "RuleSetName": "RuleSetToCreate" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a receipt rule set by cloning an existing one:", - "id": "clonereceiptruleset-1469055039770", - "title": "CloneReceiptRuleSet" - } - ], - "CreateReceiptFilter": [ - { - "input": { - "Filter": { - "IpFilter": { - "Cidr": "1.2.3.4/24", - "Policy": "Allow" - }, - "Name": "MyFilter" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a new IP address filter:", - "id": "createreceiptfilter-1469122681253", - "title": "CreateReceiptFilter" - } - ], - "CreateReceiptRule": [ - { - "input": { - "After": "", - "Rule": { - "Actions": [ - { - "S3Action": { - "BucketName": "MyBucket", - "ObjectKeyPrefix": "email" - } - } - ], - "Enabled": true, - "Name": "MyRule", - "ScanEnabled": true, - "TlsPolicy": "Optional" - }, - "RuleSetName": "MyRuleSet" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a new receipt rule:", - "id": "createreceiptrule-1469122946515", - "title": "CreateReceiptRule" - } - ], - "CreateReceiptRuleSet": [ - { - "input": { - "RuleSetName": "MyRuleSet" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an empty receipt rule set:", - "id": "createreceiptruleset-1469058761646", - "title": "CreateReceiptRuleSet" - } - ], - "DeleteIdentity": [ - { - "input": { - "Identity": "user@example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an identity from the list of identities that have been submitted for verification with Amazon SES:", - "id": "deleteidentity-1469047858906", - "title": "DeleteIdentity" - } - ], - "DeleteIdentityPolicy": [ - { - "input": { - "Identity": "user@example.com", - "PolicyName": "MyPolicy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a sending authorization policy for an identity:", - "id": "deleteidentitypolicy-1469055282499", - "title": "DeleteIdentityPolicy" - } - ], - "DeleteReceiptFilter": [ - { - "input": { - "FilterName": "MyFilter" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an IP address filter:", - "id": "deletereceiptfilter-1469055456835", - "title": "DeleteReceiptFilter" - } - ], - "DeleteReceiptRule": [ - { - "input": { - "RuleName": "MyRule", - "RuleSetName": "MyRuleSet" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a receipt rule:", - "id": "deletereceiptrule-1469055563599", - "title": "DeleteReceiptRule" - } - ], - "DeleteReceiptRuleSet": [ - { - "input": { - "RuleSetName": "MyRuleSet" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a receipt rule set:", - "id": "deletereceiptruleset-1469055713690", - "title": "DeleteReceiptRuleSet" - } - ], - "DeleteVerifiedEmailAddress": [ - { - "input": { - "EmailAddress": "user@example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an email address from the list of identities that have been submitted for verification with Amazon SES:", - "id": "deleteverifiedemailaddress-1469051086444", - "title": "DeleteVerifiedEmailAddress" - } - ], - "DescribeActiveReceiptRuleSet": [ - { - "input": { - }, - "output": { - "Metadata": { - "CreatedTimestamp": "2016-07-15T16:25:59.607Z", - "Name": "default-rule-set" - }, - "Rules": [ - { - "Actions": [ - { - "S3Action": { - "BucketName": "MyBucket", - "ObjectKeyPrefix": "email" - } - } - ], - "Enabled": true, - "Name": "MyRule", - "ScanEnabled": true, - "TlsPolicy": "Optional" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the metadata and receipt rules for the receipt rule set that is currently active:", - "id": "describeactivereceiptruleset-1469121611502", - "title": "DescribeActiveReceiptRuleSet" - } - ], - "DescribeReceiptRule": [ - { - "input": { - "RuleName": "MyRule", - "RuleSetName": "MyRuleSet" - }, - "output": { - "Rule": { - "Actions": [ - { - "S3Action": { - "BucketName": "MyBucket", - "ObjectKeyPrefix": "email" - } - } - ], - "Enabled": true, - "Name": "MyRule", - "ScanEnabled": true, - "TlsPolicy": "Optional" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a receipt rule:", - "id": "describereceiptrule-1469055813118", - "title": "DescribeReceiptRule" - } - ], - "DescribeReceiptRuleSet": [ - { - "input": { - "RuleSetName": "MyRuleSet" - }, - "output": { - "Metadata": { - "CreatedTimestamp": "2016-07-15T16:25:59.607Z", - "Name": "MyRuleSet" - }, - "Rules": [ - { - "Actions": [ - { - "S3Action": { - "BucketName": "MyBucket", - "ObjectKeyPrefix": "email" - } - } - ], - "Enabled": true, - "Name": "MyRule", - "ScanEnabled": true, - "TlsPolicy": "Optional" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the metadata and receipt rules of a receipt rule set:", - "id": "describereceiptruleset-1469121240385", - "title": "DescribeReceiptRuleSet" - } - ], - "GetAccountSendingEnabled": [ - { - "output": { - "Enabled": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns if sending status for an account is enabled. (true / false):", - "id": "getaccountsendingenabled-1469047741333", - "title": "GetAccountSendingEnabled" - } - ], - "GetIdentityDkimAttributes": [ - { - "input": { - "Identities": [ - "example.com", - "user@example.com" - ] - }, - "output": { - "DkimAttributes": { - "example.com": { - "DkimEnabled": true, - "DkimTokens": [ - "EXAMPLEjcs5xoyqytjsotsijas7236gr", - "EXAMPLEjr76cvoc6mysspnioorxsn6ep", - "EXAMPLEkbmkqkhlm2lyz77ppkulerm4k" - ], - "DkimVerificationStatus": "Success" - }, - "user@example.com": { - "DkimEnabled": false, - "DkimVerificationStatus": "NotStarted" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example retrieves the Amazon SES Easy DKIM attributes for a list of identities:", - "id": "getidentitydkimattributes-1469050695628", - "title": "GetIdentityDkimAttributes" - } - ], - "GetIdentityMailFromDomainAttributes": [ - { - "input": { - "Identities": [ - "example.com" - ] - }, - "output": { - "MailFromDomainAttributes": { - "example.com": { - "BehaviorOnMXFailure": "UseDefaultValue", - "MailFromDomain": "bounces.example.com", - "MailFromDomainStatus": "Success" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the custom MAIL FROM attributes for an identity:", - "id": "getidentitymailfromdomainattributes-1469123114860", - "title": "GetIdentityMailFromDomainAttributes" - } - ], - "GetIdentityNotificationAttributes": [ - { - "input": { - "Identities": [ - "example.com" - ] - }, - "output": { - "NotificationAttributes": { - "example.com": { - "BounceTopic": "arn:aws:sns:us-east-1:EXAMPLE65304:ExampleTopic", - "ComplaintTopic": "arn:aws:sns:us-east-1:EXAMPLE65304:ExampleTopic", - "DeliveryTopic": "arn:aws:sns:us-east-1:EXAMPLE65304:ExampleTopic", - "ForwardingEnabled": true, - "HeadersInBounceNotificationsEnabled": false, - "HeadersInComplaintNotificationsEnabled": false, - "HeadersInDeliveryNotificationsEnabled": false - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the notification attributes for an identity:", - "id": "getidentitynotificationattributes-1469123466947", - "title": "GetIdentityNotificationAttributes" - } - ], - "GetIdentityPolicies": [ - { - "input": { - "Identity": "example.com", - "PolicyNames": [ - "MyPolicy" - ] - }, - "output": { - "Policies": { - "MyPolicy": "{\"Version\":\"2008-10-17\",\"Statement\":[{\"Sid\":\"stmt1469123904194\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::123456789012:root\"},\"Action\":[\"ses:SendEmail\",\"ses:SendRawEmail\"],\"Resource\":\"arn:aws:ses:us-east-1:EXAMPLE65304:identity/example.com\"}]}" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a sending authorization policy for an identity:", - "id": "getidentitypolicies-1469123949351", - "title": "GetIdentityPolicies" - } - ], - "GetIdentityVerificationAttributes": [ - { - "input": { - "Identities": [ - "example.com" - ] - }, - "output": { - "VerificationAttributes": { - "example.com": { - "VerificationStatus": "Success", - "VerificationToken": "EXAMPLE3VYb9EDI2nTOQRi/Tf6MI/6bD6THIGiP1MVY=" - } - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the verification status and the verification token for a domain identity:", - "id": "getidentityverificationattributes-1469124205897", - "title": "GetIdentityVerificationAttributes" - } - ], - "GetSendQuota": [ - { - "output": { - "Max24HourSend": 200, - "MaxSendRate": 1, - "SentLast24Hours": 1 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the Amazon SES sending limits for an AWS account:", - "id": "getsendquota-1469047324508", - "title": "GetSendQuota" - } - ], - "GetSendStatistics": [ - { - "output": { - "SendDataPoints": [ - { - "Bounces": 0, - "Complaints": 0, - "DeliveryAttempts": 5, - "Rejects": 0, - "Timestamp": "2016-07-13T22:43:00Z" - }, - { - "Bounces": 0, - "Complaints": 0, - "DeliveryAttempts": 3, - "Rejects": 0, - "Timestamp": "2016-07-13T23:13:00Z" - }, - { - "Bounces": 0, - "Complaints": 0, - "DeliveryAttempts": 1, - "Rejects": 0, - "Timestamp": "2016-07-13T21:13:00Z" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns Amazon SES sending statistics:", - "id": "getsendstatistics-1469047741329", - "title": "GetSendStatistics" - } - ], - "ListIdentities": [ - { - "input": { - "IdentityType": "EmailAddress", - "MaxItems": 123, - "NextToken": "" - }, - "output": { - "Identities": [ - "user@example.com" - ], - "NextToken": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists the email address identities that have been submitted for verification with Amazon SES:", - "id": "listidentities-1469048638493", - "title": "ListIdentities" - } - ], - "ListIdentityPolicies": [ - { - "input": { - "Identity": "example.com" - }, - "output": { - "PolicyNames": [ - "MyPolicy" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a list of sending authorization policies that are attached to an identity:", - "id": "listidentitypolicies-1469124417674", - "title": "ListIdentityPolicies" - } - ], - "ListReceiptFilters": [ - { - "output": { - "Filters": [ - { - "IpFilter": { - "Cidr": "1.2.3.4/24", - "Policy": "Block" - }, - "Name": "MyFilter" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists the IP address filters that are associated with an AWS account:", - "id": "listreceiptfilters-1469120786789", - "title": "ListReceiptFilters" - } - ], - "ListReceiptRuleSets": [ - { - "input": { - "NextToken": "" - }, - "output": { - "NextToken": "", - "RuleSets": [ - { - "CreatedTimestamp": "2016-07-15T16:25:59.607Z", - "Name": "MyRuleSet" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists the receipt rule sets that exist under an AWS account:", - "id": "listreceiptrulesets-1469121037235", - "title": "ListReceiptRuleSets" - } - ], - "ListVerifiedEmailAddresses": [ - { - "output": { - "VerifiedEmailAddresses": [ - "user1@example.com", - "user2@example.com" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example lists all email addresses that have been submitted for verification with Amazon SES:", - "id": "listverifiedemailaddresses-1469051402570", - "title": "ListVerifiedEmailAddresses" - } - ], - "PutIdentityPolicy": [ - { - "input": { - "Identity": "example.com", - "Policy": "{\"Version\":\"2008-10-17\",\"Statement\":[{\"Sid\":\"stmt1469123904194\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::123456789012:root\"},\"Action\":[\"ses:SendEmail\",\"ses:SendRawEmail\"],\"Resource\":\"arn:aws:ses:us-east-1:EXAMPLE65304:identity/example.com\"}]}", - "PolicyName": "MyPolicy" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example adds a sending authorization policy to an identity:", - "id": "putidentitypolicy-1469124560016", - "title": "PutIdentityPolicy" - } - ], - "ReorderReceiptRuleSet": [ - { - "input": { - "RuleNames": [ - "MyRule", - "MyOtherRule" - ], - "RuleSetName": "MyRuleSet" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example reorders the receipt rules within a receipt rule set:", - "id": "reorderreceiptruleset-1469058156806", - "title": "ReorderReceiptRuleSet" - } - ], - "SendEmail": [ - { - "input": { - "Destination": { - "BccAddresses": [ - - ], - "CcAddresses": [ - "recipient3@example.com" - ], - "ToAddresses": [ - "recipient1@example.com", - "recipient2@example.com" - ] - }, - "Message": { - "Body": { - "Html": { - "Charset": "UTF-8", - "Data": "This message body contains HTML formatting. It can, for example, contain links like this one: Amazon SES Developer Guide." - }, - "Text": { - "Charset": "UTF-8", - "Data": "This is the message body in text format." - } - }, - "Subject": { - "Charset": "UTF-8", - "Data": "Test email" - } - }, - "ReplyToAddresses": [ - - ], - "ReturnPath": "", - "ReturnPathArn": "", - "Source": "sender@example.com", - "SourceArn": "" - }, - "output": { - "MessageId": "EXAMPLE78603177f-7a5433e7-8edb-42ae-af10-f0181f34d6ee-000000" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sends a formatted email:", - "id": "sendemail-1469049656296", - "title": "SendEmail" - } - ], - "SendRawEmail": [ - { - "input": { - "Destinations": [ - - ], - "FromArn": "", - "RawMessage": { - "Data": "From: sender@example.com\\nTo: recipient@example.com\\nSubject: Test email (contains an attachment)\\nMIME-Version: 1.0\\nContent-type: Multipart/Mixed; boundary=\"NextPart\"\\n\\n--NextPart\\nContent-Type: text/plain\\n\\nThis is the message body.\\n\\n--NextPart\\nContent-Type: text/plain;\\nContent-Disposition: attachment; filename=\"attachment.txt\"\\n\\nThis is the text in the attachment.\\n\\n--NextPart--" - }, - "ReturnPathArn": "", - "Source": "", - "SourceArn": "" - }, - "output": { - "MessageId": "EXAMPLEf3f73d99b-c63fb06f-d263-41f8-a0fb-d0dc67d56c07-000000" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sends an email with an attachment:", - "id": "sendrawemail-1469118548649", - "title": "SendRawEmail" - } - ], - "SetActiveReceiptRuleSet": [ - { - "input": { - "RuleSetName": "RuleSetToActivate" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets the active receipt rule set:", - "id": "setactivereceiptruleset-1469058391329", - "title": "SetActiveReceiptRuleSet" - } - ], - "SetIdentityDkimEnabled": [ - { - "input": { - "DkimEnabled": true, - "Identity": "user@example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example configures Amazon SES to Easy DKIM-sign the email sent from an identity:", - "id": "setidentitydkimenabled-1469057485202", - "title": "SetIdentityDkimEnabled" - } - ], - "SetIdentityFeedbackForwardingEnabled": [ - { - "input": { - "ForwardingEnabled": true, - "Identity": "user@example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example configures Amazon SES to forward an identity's bounces and complaints via email:", - "id": "setidentityfeedbackforwardingenabled-1469056811329", - "title": "SetIdentityFeedbackForwardingEnabled" - } - ], - "SetIdentityHeadersInNotificationsEnabled": [ - { - "input": { - "Enabled": true, - "Identity": "user@example.com", - "NotificationType": "Bounce" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example configures Amazon SES to include the original email headers in the Amazon SNS bounce notifications for an identity:", - "id": "setidentityheadersinnotificationsenabled-1469057295001", - "title": "SetIdentityHeadersInNotificationsEnabled" - } - ], - "SetIdentityMailFromDomain": [ - { - "input": { - "BehaviorOnMXFailure": "UseDefaultValue", - "Identity": "user@example.com", - "MailFromDomain": "bounces.example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example configures Amazon SES to use a custom MAIL FROM domain for an identity:", - "id": "setidentitymailfromdomain-1469057693908", - "title": "SetIdentityMailFromDomain" - } - ], - "SetIdentityNotificationTopic": [ - { - "input": { - "Identity": "user@example.com", - "NotificationType": "Bounce", - "SnsTopic": "arn:aws:sns:us-west-2:111122223333:MyTopic" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets the Amazon SNS topic to which Amazon SES will publish bounce, complaint, and/or delivery notifications for emails sent with the specified identity as the Source:", - "id": "setidentitynotificationtopic-1469057854966", - "title": "SetIdentityNotificationTopic" - } - ], - "SetReceiptRulePosition": [ - { - "input": { - "After": "PutRuleAfterThisRule", - "RuleName": "RuleToReposition", - "RuleSetName": "MyRuleSet" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example sets the position of a receipt rule in a receipt rule set:", - "id": "setreceiptruleposition-1469058530550", - "title": "SetReceiptRulePosition" - } - ], - "UpdateAccountSendingEnabled": [ - { - "input": { - "Enabled": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example updated the sending status for this account.", - "id": "updateaccountsendingenabled-1469047741333", - "title": "UpdateAccountSendingEnabled" - } - ], - "UpdateConfigurationSetReputationMetricsEnabled": [ - { - "input": { - "ConfigurationSetName": "foo", - "Enabled": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Set the reputationMetricsEnabled flag for a specific configuration set.", - "id": "updateconfigurationsetreputationmetricsenabled-2362747741333", - "title": "UpdateConfigurationSetReputationMetricsEnabled" - } - ], - "UpdateConfigurationSetSendingEnabled": [ - { - "input": { - "ConfigurationSetName": "foo", - "Enabled": true - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Set the sending enabled flag for a specific configuration set.", - "id": "updateconfigurationsetsendingenabled-2362747741333", - "title": "UpdateConfigurationSetReputationMetricsEnabled" - } - ], - "UpdateReceiptRule": [ - { - "input": { - "Rule": { - "Actions": [ - { - "S3Action": { - "BucketName": "MyBucket", - "ObjectKeyPrefix": "email" - } - } - ], - "Enabled": true, - "Name": "MyRule", - "ScanEnabled": true, - "TlsPolicy": "Optional" - }, - "RuleSetName": "MyRuleSet" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example updates a receipt rule to use an Amazon S3 action:", - "id": "updatereceiptrule-1469051756940", - "title": "UpdateReceiptRule" - } - ], - "VerifyDomainDkim": [ - { - "input": { - "Domain": "example.com" - }, - "output": { - "DkimTokens": [ - "EXAMPLEq76owjnks3lnluwg65scbemvw", - "EXAMPLEi3dnsj67hstzaj673klariwx2", - "EXAMPLEwfbtcukvimehexktmdtaz6naj" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example generates DKIM tokens for a domain that has been verified with Amazon SES:", - "id": "verifydomaindkim-1469049503083", - "title": "VerifyDomainDkim" - } - ], - "VerifyDomainIdentity": [ - { - "input": { - "Domain": "example.com" - }, - "output": { - "VerificationToken": "eoEmxw+YaYhb3h3iVJHuXMJXqeu1q1/wwmvjuEXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example starts the domain verification process with Amazon SES:", - "id": "verifydomainidentity-1469049165936", - "title": "VerifyDomainIdentity" - } - ], - "VerifyEmailAddress": [ - { - "input": { - "EmailAddress": "user@example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example starts the email address verification process with Amazon SES:", - "id": "verifyemailaddress-1469048849187", - "title": "VerifyEmailAddress" - } - ], - "VerifyEmailIdentity": [ - { - "input": { - "EmailAddress": "user@example.com" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example starts the email address verification process with Amazon SES:", - "id": "verifyemailidentity-1469049068623", - "title": "VerifyEmailIdentity" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/ses/2010-12-01/paginators-1.json deleted file mode 100644 index 1eb0054..0000000 --- a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/paginators-1.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "pagination": { - "ListIdentities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxItems", - "result_key": "Identities" - }, - "ListCustomVerificationEmailTemplates": { - "result_key": "CustomVerificationEmailTemplates", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListConfigurationSets": { - "input_token": "NextToken", - "limit_key": "MaxItems", - "output_token": "NextToken", - "result_key": "ConfigurationSets" - }, - "ListReceiptRuleSets": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "RuleSets" - }, - "ListTemplates": { - "input_token": "NextToken", - "limit_key": "MaxItems", - "output_token": "NextToken", - "result_key": "TemplatesMetadata" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ses/2010-12-01/service-2.json.gz deleted file mode 100644 index 3c80869..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/ses/2010-12-01/waiters-2.json deleted file mode 100644 index b585d30..0000000 --- a/venv/Lib/site-packages/botocore/data/ses/2010-12-01/waiters-2.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 2, - "waiters": { - "IdentityExists": { - "delay": 3, - "operation": "GetIdentityVerificationAttributes", - "maxAttempts": 20, - "acceptors": [ - { - "expected": "Success", - "matcher": "pathAll", - "state": "success", - "argument": "VerificationAttributes.*.VerificationStatus" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index ab4d407..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/examples-1.json b/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/paginators-1.json deleted file mode 100644 index efa658c..0000000 --- a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListMultiRegionEndpoints": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "MultiRegionEndpoints" - }, - "ListReputationEntities": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "ReputationEntities" - }, - "ListResourceTenants": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "ResourceTenants" - }, - "ListTenantResources": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "TenantResources" - }, - "ListTenants": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "PageSize", - "result_key": "Tenants" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/service-2.json.gz deleted file mode 100644 index b57b6c8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sesv2/2019-09-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/shield/2016-06-02/endpoint-rule-set-1.json.gz deleted file mode 100644 index eaaced4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/examples-1.json b/venv/Lib/site-packages/botocore/data/shield/2016-06-02/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/paginators-1.json b/venv/Lib/site-packages/botocore/data/shield/2016-06-02/paginators-1.json deleted file mode 100644 index c5ded64..0000000 --- a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListProtections": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Protections" - }, - "ListAttacks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AttackSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/service-2.json.gz b/venv/Lib/site-packages/botocore/data/shield/2016-06-02/service-2.json.gz deleted file mode 100644 index 0e5c670..0000000 Binary files a/venv/Lib/site-packages/botocore/data/shield/2016-06-02/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/signer/2017-08-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 720f575..0000000 Binary files a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/examples-1.json b/venv/Lib/site-packages/botocore/data/signer/2017-08-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/signer/2017-08-25/paginators-1.json deleted file mode 100644 index 1e049e7..0000000 --- a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListSigningJobs": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "jobs" - }, - "ListSigningPlatforms": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "platforms" - }, - "ListSigningProfiles": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "profiles" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/signer/2017-08-25/service-2.json.gz deleted file mode 100644 index ca8c4bf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/waiters-2.json b/venv/Lib/site-packages/botocore/data/signer/2017-08-25/waiters-2.json deleted file mode 100644 index a0890ad..0000000 --- a/venv/Lib/site-packages/botocore/data/signer/2017-08-25/waiters-2.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "version": 2, - "waiters": { - "SuccessfulSigningJob": { - "delay": 20, - "operation": "DescribeSigningJob", - "maxAttempts": 25, - "acceptors": [ - { - "expected": "Succeeded", - "matcher": "path", - "state": "success", - "argument": "status" - }, - { - "expected": "Failed", - "matcher": "path", - "state": "failure", - "argument": "status" - }, - { - "expected": "ResourceNotFoundException", - "matcher": "error", - "state": "failure" - } - ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/signin/2023-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/signin/2023-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index c5a9ad4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/signin/2023-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/signin/2023-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/signin/2023-01-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/signin/2023-01-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/signin/2023-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/signin/2023-01-01/service-2.json.gz deleted file mode 100644 index 0d90555..0000000 Binary files a/venv/Lib/site-packages/botocore/data/signin/2023-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3149f50..0000000 Binary files a/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/service-2.json.gz deleted file mode 100644 index e3e6741..0000000 Binary files a/venv/Lib/site-packages/botocore/data/simspaceweaver/2022-10-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sms-voice/2018-09-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sms-voice/2018-09-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index d341a98..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sms-voice/2018-09-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sms-voice/2018-09-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sms-voice/2018-09-05/service-2.json.gz deleted file mode 100644 index ccd2314..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sms-voice/2018-09-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/endpoint-rule-set-1.json.gz deleted file mode 100644 index c3a9432..0000000 Binary files a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/examples-1.json b/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/paginators-1.json b/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/paginators-1.json deleted file mode 100644 index 8b11209..0000000 --- a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListDeviceResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "resources" - }, - "ListDevices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "devices" - }, - "ListExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "executions" - }, - "ListTasks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "tasks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/service-2.json.gz b/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/service-2.json.gz deleted file mode 100644 index 761047d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/snow-device-management/2021-08-04/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2fbb3c6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/examples-1.json b/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/examples-1.json deleted file mode 100644 index 2b13f7b..0000000 --- a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/examples-1.json +++ /dev/null @@ -1,442 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CancelCluster": [ - { - "input": { - "ClusterId": "CID123e4567-e89b-12d3-a456-426655440000" - }, - "comments": { - }, - "description": "This operation cancels a cluster job. You can only cancel a cluster job while it's in the AwaitingQuorum status.", - "id": "to-cancel-a-cluster-job-1482533760554", - "title": "To cancel a cluster job" - } - ], - "CancelJob": [ - { - "input": { - "JobId": "JID123e4567-e89b-12d3-a456-426655440000" - }, - "comments": { - }, - "description": "This operation cancels a job. You can only cancel a job before its JobState value changes to PreparingAppliance.", - "id": "to-cancel-a-job-for-a-snowball-device-1482534699477", - "title": "To cancel a job for a Snowball device" - } - ], - "CreateAddress": [ - { - "input": { - "Address": { - "City": "Seattle", - "Company": "My Company's Name", - "Country": "USA", - "Name": "My Name", - "PhoneNumber": "425-555-5555", - "PostalCode": "98101", - "StateOrProvince": "WA", - "Street1": "123 Main Street" - } - }, - "output": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b" - }, - "comments": { - }, - "description": "This operation creates an address for a job. Addresses are validated at the time of creation. The address you provide must be located within the serviceable area of your region. If the address is invalid or unsupported, then an exception is thrown.", - "id": "to-create-an-address-for-a-job-1482535416294", - "title": "To create an address for a job" - } - ], - "CreateCluster": [ - { - "input": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b", - "Description": "MyCluster", - "JobType": "LOCAL_USE", - "KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456", - "Notification": { - "JobStatesToNotify": [ - - ], - "NotifyAll": false - }, - "Resources": { - "S3Resources": [ - { - "BucketArn": "arn:aws:s3:::MyBucket", - "KeyRange": { - } - } - ] - }, - "RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role", - "ShippingOption": "SECOND_DAY", - "SnowballType": "EDGE" - }, - "output": { - "ClusterId": "CID123e4567-e89b-12d3-a456-426655440000" - }, - "comments": { - }, - "description": "Creates an empty cluster. Each cluster supports five nodes. You use the CreateJob action separately to create the jobs for each of these nodes. The cluster does not ship until these five node jobs have been created.", - "id": "to-create-a-cluster-1482864724077", - "title": "To create a cluster" - } - ], - "CreateJob": [ - { - "input": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b", - "Description": "My Job", - "JobType": "IMPORT", - "KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456", - "Notification": { - "JobStatesToNotify": [ - - ], - "NotifyAll": false - }, - "Resources": { - "S3Resources": [ - { - "BucketArn": "arn:aws:s3:::MyBucket", - "KeyRange": { - } - } - ] - }, - "RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role", - "ShippingOption": "SECOND_DAY", - "SnowballCapacityPreference": "T80", - "SnowballType": "STANDARD" - }, - "output": { - "JobId": "JID123e4567-e89b-12d3-a456-426655440000" - }, - "comments": { - }, - "description": "Creates a job to import or export data between Amazon S3 and your on-premises data center. Your AWS account must have the right trust policies and permissions in place to create a job for Snowball. If you're creating a job for a node in a cluster, you only need to provide the clusterId value; the other job attributes are inherited from the cluster.", - "id": "to-create-a-job-1482864834886", - "title": "To create a job" - } - ], - "DescribeAddress": [ - { - "input": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b" - }, - "output": { - "Address": { - "AddressId": "ADID5643ec50-3eec-4eb3-9be6-9374c10eb51b", - "City": "Seattle", - "Company": "My Company", - "Country": "US", - "Name": "My Name", - "PhoneNumber": "425-555-5555", - "PostalCode": "98101", - "StateOrProvince": "WA", - "Street1": "123 Main Street" - } - }, - "comments": { - }, - "description": "This operation describes an address for a job.", - "id": "to-describe-an-address-for-a-job-1482538608745", - "title": "To describe an address for a job" - } - ], - "DescribeAddresses": [ - { - "input": { - }, - "output": { - "Addresses": [ - { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b", - "City": "Seattle", - "Company": "My Company", - "Country": "US", - "Name": "My Name", - "PhoneNumber": "425-555-5555", - "PostalCode": "98101", - "StateOrProvince": "WA", - "Street1": "123 Main Street" - } - ] - }, - "comments": { - }, - "description": "This operation describes all the addresses that you've created for AWS Snowball. Calling this API in one of the US regions will return addresses from the list of all addresses associated with this account in all US regions.", - "id": "to-describe-all-the-addresses-youve-created-for-aws-snowball-1482538936603", - "title": "To describe all the addresses you've created for AWS Snowball" - } - ], - "DescribeCluster": [ - { - "input": { - "ClusterId": "CID123e4567-e89b-12d3-a456-426655440000" - }, - "output": { - "ClusterMetadata": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b", - "ClusterId": "CID123e4567-e89b-12d3-a456-426655440000", - "ClusterState": "Pending", - "CreationDate": "1480475517.0", - "Description": "MyCluster", - "JobType": "LOCAL_USE", - "KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456", - "Notification": { - "JobStatesToNotify": [ - - ], - "NotifyAll": false - }, - "Resources": { - "S3Resources": [ - { - "BucketArn": "arn:aws:s3:::MyBucket", - "KeyRange": { - } - } - ] - }, - "RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role", - "ShippingOption": "SECOND_DAY" - } - }, - "comments": { - }, - "description": "Returns information about a specific cluster including shipping information, cluster status, and other important metadata.", - "id": "to-describe-a-cluster-1482864218396", - "title": "To describe a cluster" - } - ], - "DescribeJob": [ - { - "input": { - "JobId": "JID123e4567-e89b-12d3-a456-426655440000" - }, - "output": { - "JobMetadata": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b", - "CreationDate": "1475626164", - "Description": "My Job", - "JobId": "JID123e4567-e89b-12d3-a456-426655440000", - "JobState": "New", - "JobType": "IMPORT", - "KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456", - "Notification": { - "JobStatesToNotify": [ - - ], - "NotifyAll": false - }, - "Resources": { - "S3Resources": [ - { - "BucketArn": "arn:aws:s3:::MyBucket", - "KeyRange": { - } - } - ] - }, - "RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role", - "ShippingDetails": { - "ShippingOption": "SECOND_DAY" - }, - "SnowballCapacityPreference": "T80", - "SnowballType": "STANDARD" - } - }, - "comments": { - }, - "description": "This operation describes a job you've created for AWS Snowball.", - "id": "to-describe-a-job-youve-created-for-aws-snowball-1482539500180", - "title": "To describe a job you've created for AWS Snowball" - } - ], - "GetJobManifest": [ - { - "input": { - "JobId": "JID123e4567-e89b-12d3-a456-426655440000" - }, - "output": { - "ManifestURI": "https://awsie-frosty-manifests-prod.s3.amazonaws.com/JID123e4567-e89b-12d3-a456-426655440000_manifest.bin?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20161224T005115Z&X-Amz-SignedHeaders=..." - }, - "comments": { - }, - "description": "Returns a link to an Amazon S3 presigned URL for the manifest file associated with the specified JobId value. You can access the manifest file for up to 60 minutes after this request has been made. To access the manifest file after 60 minutes have passed, you'll have to make another call to the GetJobManifest action.\n\nThe manifest is an encrypted file that you can download after your job enters the WithCustomer status. The manifest is decrypted by using the UnlockCode code value, when you pass both values to the Snowball through the Snowball client when the client is started for the first time.\n\nAs a best practice, we recommend that you don't save a copy of an UnlockCode value in the same location as the manifest file for that job. Saving these separately helps prevent unauthorized parties from gaining access to the Snowball associated with that job.\n\nThe credentials of a given job, including its manifest file and unlock code, expire 90 days after the job is created.", - "id": "to-get-the-manifest-for-a-job-youve-created-for-aws-snowball-1482540389246", - "title": "To get the manifest for a job you've created for AWS Snowball" - } - ], - "GetJobUnlockCode": [ - { - "input": { - "JobId": "JID123e4567-e89b-12d3-a456-426655440000" - }, - "output": { - "UnlockCode": "12345-abcde-56789-fghij-01234" - }, - "comments": { - }, - "description": "Returns the UnlockCode code value for the specified job. A particular UnlockCode value can be accessed for up to 90 days after the associated job has been created.\n\nThe UnlockCode value is a 29-character code with 25 alphanumeric characters and 4 hyphens. This code is used to decrypt the manifest file when it is passed along with the manifest to the Snowball through the Snowball client when the client is started for the first time.\n\nAs a best practice, we recommend that you don't save a copy of the UnlockCode in the same location as the manifest file for that job. Saving these separately helps prevent unauthorized parties from gaining access to the Snowball associated with that job.", - "id": "to-get-the-unlock-code-for-a-job-youve-created-for-aws-snowball-1482541987286", - "title": "To get the unlock code for a job you've created for AWS Snowball" - } - ], - "GetSnowballUsage": [ - { - "input": { - }, - "output": { - "SnowballLimit": 1, - "SnowballsInUse": 0 - }, - "comments": { - }, - "description": "Returns information about the Snowball service limit for your account, and also the number of Snowballs your account has in use.\n\nThe default service limit for the number of Snowballs that you can have at one time is 1. If you want to increase your service limit, contact AWS Support.", - "id": "to-see-your-snowball-service-limit-and-the-number-of-snowballs-you-have-in-use-1482863394588", - "title": "To see your Snowball service limit and the number of Snowballs you have in use" - } - ], - "ListClusterJobs": [ - { - "input": { - "ClusterId": "CID123e4567-e89b-12d3-a456-426655440000" - }, - "output": { - "JobListEntries": [ - { - "CreationDate": "1480475524.0", - "Description": "MyClustrer-node-001", - "IsMaster": false, - "JobId": "JID123e4567-e89b-12d3-a456-426655440000", - "JobState": "New", - "JobType": "LOCAL_USE", - "SnowballType": "EDGE" - }, - { - "CreationDate": "1480475525.0", - "Description": "MyClustrer-node-002", - "IsMaster": false, - "JobId": "JID123e4567-e89b-12d3-a456-426655440001", - "JobState": "New", - "JobType": "LOCAL_USE", - "SnowballType": "EDGE" - }, - { - "CreationDate": "1480475525.0", - "Description": "MyClustrer-node-003", - "IsMaster": false, - "JobId": "JID123e4567-e89b-12d3-a456-426655440002", - "JobState": "New", - "JobType": "LOCAL_USE", - "SnowballType": "EDGE" - }, - { - "CreationDate": "1480475525.0", - "Description": "MyClustrer-node-004", - "IsMaster": false, - "JobId": "JID123e4567-e89b-12d3-a456-426655440003", - "JobState": "New", - "JobType": "LOCAL_USE", - "SnowballType": "EDGE" - }, - { - "CreationDate": "1480475525.0", - "Description": "MyClustrer-node-005", - "IsMaster": false, - "JobId": "JID123e4567-e89b-12d3-a456-426655440004", - "JobState": "New", - "JobType": "LOCAL_USE", - "SnowballType": "EDGE" - } - ] - }, - "comments": { - }, - "description": "Returns an array of JobListEntry objects of the specified length. Each JobListEntry object is for a job in the specified cluster and contains a job's state, a job's ID, and other information.", - "id": "to-get-a-list-of-jobs-in-a-cluster-that-youve-created-for-aws-snowball-1482863105773", - "title": "To get a list of jobs in a cluster that you've created for AWS Snowball" - } - ], - "ListClusters": [ - { - "input": { - }, - "output": { - "ClusterListEntries": [ - { - "ClusterId": "CID123e4567-e89b-12d3-a456-426655440000", - "ClusterState": "Pending", - "CreationDate": "1480475517.0", - "Description": "MyCluster" - } - ] - }, - "comments": { - }, - "description": "Returns an array of ClusterListEntry objects of the specified length. Each ClusterListEntry object contains a cluster's state, a cluster's ID, and other important status information.", - "id": "to-get-a-list-of-clusters-that-youve-created-for-aws-snowball-1482862223003", - "title": "To get a list of clusters that you've created for AWS Snowball" - } - ], - "ListJobs": [ - { - "input": { - }, - "output": { - "JobListEntries": [ - { - "CreationDate": "1460678186.0", - "Description": "MyJob", - "IsMaster": false, - "JobId": "JID123e4567-e89b-12d3-a456-426655440000", - "JobState": "New", - "JobType": "IMPORT", - "SnowballType": "STANDARD" - } - ] - }, - "comments": { - }, - "description": "Returns an array of JobListEntry objects of the specified length. Each JobListEntry object contains a job's state, a job's ID, and a value that indicates whether the job is a job part, in the case of export jobs. Calling this API action in one of the US regions will return jobs from the list of all jobs associated with this account in all US regions.", - "id": "to-get-a-list-of-jobs-that-youve-created-for-aws-snowball-1482542167627", - "title": "To get a list of jobs that you've created for AWS Snowball" - } - ], - "UpdateCluster": [ - { - "input": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b", - "ClusterId": "CID123e4567-e89b-12d3-a456-426655440000", - "Description": "updated-cluster-name" - }, - "comments": { - }, - "description": "This action allows you to update certain parameters for a cluster. Once the cluster changes to a different state, usually within 60 minutes of it being created, this action is no longer available.", - "id": "to-update-a-cluster-1482863900595", - "title": "To update a cluster" - } - ], - "UpdateJob": [ - { - "input": { - "AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b", - "Description": "updated-job-name", - "JobId": "JID123e4567-e89b-12d3-a456-426655440000", - "ShippingOption": "NEXT_DAY", - "SnowballCapacityPreference": "T100" - }, - "comments": { - }, - "description": "This action allows you to update certain parameters for a job. Once the job changes to a different job state, usually within 60 minutes of the job being created, this action is no longer available.", - "id": "to-update-a-job-1482863556886", - "title": "To update a job" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/paginators-1.json deleted file mode 100644 index 05a7ea8..0000000 --- a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListJobs": { - "limit_key": "MaxResults", - "output_token": "NextToken", - "input_token": "NextToken", - "result_key": "JobListEntries" - }, - "DescribeAddresses": { - "limit_key": "MaxResults", - "output_token": "NextToken", - "input_token": "NextToken", - "result_key": "Addresses" - }, - "ListClusterJobs": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "JobListEntries" - }, - "ListClusters": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ClusterListEntries" - }, - "ListCompatibleImages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CompatibleImages" - }, - "ListLongTermPricing": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "LongTermPricingEntries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/service-2.json.gz deleted file mode 100644 index aaf9ca6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/snowball/2016-06-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sns/2010-03-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 38d3d9e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/examples-1.json b/venv/Lib/site-packages/botocore/data/sns/2010-03-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/sns/2010-03-31/paginators-1.json deleted file mode 100644 index 5be5250..0000000 --- a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "ListEndpointsByPlatformApplication": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Endpoints" - }, - "ListPlatformApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "PlatformApplications" - }, - "ListSubscriptions": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Subscriptions" - }, - "ListSubscriptionsByTopic": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Subscriptions" - }, - "ListTopics": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Topics" - }, - "ListPhoneNumbersOptedOut": { - "input_token": "nextToken", - "output_token": "nextToken", - "result_key": "phoneNumbers" - }, - "ListOriginationNumbers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PhoneNumbers" - }, - "ListSMSSandboxPhoneNumbers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PhoneNumbers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sns/2010-03-31/service-2.json.gz deleted file mode 100644 index e08fa27..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sns/2010-03-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 73131ba..0000000 Binary files a/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/paginators-1.json deleted file mode 100644 index a137743..0000000 --- a/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListLinkedWhatsAppBusinessAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "linkedAccounts" - }, - "ListWhatsAppMessageTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "templates" - }, - "ListWhatsAppTemplateLibrary": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "metaLibraryTemplates" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/service-2.json.gz deleted file mode 100644 index 38529d9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/socialmessaging/2024-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index a9e50d4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/examples-1.json b/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/paginators-1.json b/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/paginators-1.json deleted file mode 100644 index 7c22d43..0000000 --- a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListDeadLetterSourceQueues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "queueUrls" - }, - "ListQueues": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "QueueUrls" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/service-2.json.gz deleted file mode 100644 index c2d996a..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sqs/2012-11-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/endpoint-rule-set-1.json.gz deleted file mode 100644 index ca052fd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/examples-1.json b/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/examples-1.json deleted file mode 100644 index d7c714d..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/examples-1.json +++ /dev/null @@ -1,714 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AcceptPage": [ - { - "input": { - "AcceptCode": "425440", - "AcceptType": "READ", - "PageId": "arn:aws:ssm-contacts:us-east-2:682428703967:page/akuam/94ea0c7b-56d9-46c3-b84a-a37c8b067ad3" - }, - "output": { - }, - "comments": { - }, - "description": "The following accept-page operation uses an accept code sent to the contact channel to accept a page.", - "id": "to-accept-a-page-during-and-engagement-1630357840187", - "title": "To accept a page during and engagement" - } - ], - "ActivateContactChannel": [ - { - "input": { - "ActivationCode": "466136", - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d" - }, - "output": { - }, - "comments": { - }, - "description": "The following activate-contact-channel example activates a contact channel and makes it usable as part of an incident.", - "id": "activate-a-contacts-contact-channel-1630359780075", - "title": "Activate a contact's contact channel" - } - ], - "CreateContact": [ - { - "input": { - "Alias": "akuam", - "DisplayName": "Akua Mansa", - "Plan": { - "Stages": [ - - ] - }, - "Type": "PERSONAL" - }, - "output": { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam" - }, - "comments": { - }, - "description": "The following create-contact example creates a contact in your environment with a blank plan. The plan can be updated after creating contact channels. Use the create-contact-channel operation with the output ARN of this command. After you have created contact channels for this contact use update-contact to update the plan.", - "id": "to-create-a-contact-1630360152750", - "title": "To create a contact" - } - ], - "CreateContactChannel": [ - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/akuam", - "DeliveryAddress": { - "SimpleAddress": "+15005550199" - }, - "Name": "akuas sms-test", - "Type": "SMS" - }, - "output": { - "ContactChannelArn": "arn:aws:ssm-contacts:us-east-1:111122223333:contact-channel/akuam/02f506b9-ea5d-4764-af89-2daa793ff024" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a contact channel of type SMS for the contact Akua Mansa. Contact channels can be created of type SMS, EMAIL, or VOICE.", - "id": "to-create-a-contact-channel-1630360447010", - "title": "To create a contact channel" - } - ], - "DeactivateContactChannel": [ - { - "input": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d" - }, - "output": { - }, - "comments": { - }, - "description": "The following ``deactivate-contact-channel`` example deactivates a contact channel. Deactivating a contact channel means the contact channel will no longer be paged during an incident. You can also reactivate a contact channel at any time using the activate-contact-channel operation.", - "id": "to-deactivate-a-contact-channel-1630360853894", - "title": "To deactivate a contact channel" - } - ], - "DeleteContact": [ - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/alejr" - }, - "output": { - }, - "comments": { - }, - "description": "The following delete-contact example deletes a contact. The contact will no longer be reachable from any escalation plan that refers to them.", - "id": "to-delete-a-contact-1630361093863", - "title": "To delete a contact" - } - ], - "DeleteContactChannel": [ - { - "input": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-1:111122223333:contact-channel/akuam/13149bad-52ee-45ea-ae1e-45857f78f9b2" - }, - "output": { - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following delete-contact-channel example deletes a contact channel. Deleting a contact channel ensures the contact channel will not be paged during an incident.", - "id": "to-delete-a-contact-channel-1630364616682", - "title": "To delete a contact channel" - } - ], - "DescribeEngagement": [ - { - "input": { - "EngagementId": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/example_escalation/69e40ce1-8dbb-4d57-8962-5fbe7fc53356" - }, - "output": { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/example_escalation", - "Content": "Testing engagements", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/example_escalation/69e40ce1-8dbb-4d57-8962-5fbe7fc53356", - "PublicContent": "Testing engagements", - "PublicSubject": "test", - "Sender": "tester", - "StartTime": "2021-05-18T18:25:41.151000+00:00", - "Subject": "test" - }, - "comments": { - }, - "description": "The following describe-engagement example lists the details of an engagement to a contact or escalation plan. The subject and content are sent to the contact channels.", - "id": "to-describe-the-details-of-an-engagement-1630364719475", - "title": "To describe the details of an engagement" - } - ], - "DescribePage": [ - { - "input": { - "PageId": "arn:aws:ssm-contacts:us-east-2:111122223333:page/akuam/ad0052bd-e606-498a-861b-25726292eb93" - }, - "output": { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "Content": "Testing engagements", - "DeliveryTime": "2021-05-18T18:43:55.265000+00:00", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/78a29753-3674-4ac5-9f83-0468563567f0", - "PageArn": "arn:aws:ssm-contacts:us-east-2:111122223333:page/akuam/ad0052bd-e606-498a-861b-25726292eb93", - "PublicContent": "Testing engagements", - "PublicSubject": "test", - "ReadTime": "2021-05-18T18:43:55.708000+00:00", - "Sender": "tester", - "SentTime": "2021-05-18T18:43:29.301000+00:00", - "Subject": "test" - }, - "comments": { - }, - "description": "The following describe-page example lists details of a page to a contact channel. The page will include the subject and content provided.", - "id": "to-list-the-details-of-a-page-to-a-contact-channel-1630364907282", - "title": "To list the details of a page to a contact channel" - } - ], - "GetContact": [ - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam" - }, - "output": { - "Alias": "akuam", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "DisplayName": "Akua Mansa", - "Plan": { - "Stages": [ - { - "DurationInMinutes": 5, - "Targets": [ - { - "ChannelTargetInfo": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/beb25840-5ac8-4644-95cc-7a8de390fa65", - "RetryIntervalInMinutes": 1 - } - } - ] - }, - { - "DurationInMinutes": 5, - "Targets": [ - { - "ChannelTargetInfo": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/49f3c24d-5f9f-4638-ae25-3f49e04229ad", - "RetryIntervalInMinutes": 1 - } - } - ] - }, - { - "DurationInMinutes": 5, - "Targets": [ - { - "ChannelTargetInfo": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/77d4f447-f619-4954-afff-85551e369c2a", - "RetryIntervalInMinutes": 1 - } - } - ] - } - ] - }, - "Type": "PERSONAL" - }, - "comments": { - }, - "description": "The following get-contact example describes a contact.", - "id": "example-1-to-describe-a-contact-plan-1630365360005", - "title": "Example 1: To describe a contact plan" - }, - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/example_escalation" - }, - "output": { - "Alias": "example_escalation", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/example_escalation", - "DisplayName": "Example Escalation Plan", - "Plan": { - "Stages": [ - { - "DurationInMinutes": 5, - "Targets": [ - { - "ContactTargetInfo": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "IsEssential": true - } - } - ] - }, - { - "DurationInMinutes": 5, - "Targets": [ - { - "ContactTargetInfo": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/alejr", - "IsEssential": false - } - } - ] - }, - { - "DurationInMinutes": 0, - "Targets": [ - { - "ContactTargetInfo": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/anasi", - "IsEssential": false - } - } - ] - } - ] - }, - "Type": "ESCALATION" - }, - "comments": { - }, - "description": "The following get-contact example describes an escalation plan.", - "id": "example-2-to-describe-an-escalation-plan-1630365515731", - "title": "Example 2: To describe an escalation plan" - } - ], - "GetContactChannel": [ - { - "input": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d" - }, - "output": { - "ActivationStatus": "ACTIVATED", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "ContactChannelArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d", - "DeliveryAddress": { - "SimpleAddress": "+15005550199" - }, - "Name": "akuas sms", - "Type": "SMS" - }, - "comments": { - }, - "description": "The following get-contact-channel example lists the details of a contact channel.", - "id": "to-list-the-details-of-a-contact-channel-1630365682730", - "title": "To list the details of a contact channel" - } - ], - "GetContactPolicy": [ - { - "input": { - "ContactArn": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/akuam" - }, - "output": { - "ContactArn": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/akuam", - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"SharePolicyForDocumentationDralia\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"222233334444\"},\"Action\":[\"ssm-contacts:GetContact\",\"ssm-contacts:StartEngagement\",\"ssm-contacts:DescribeEngagement\",\"ssm-contacts:ListPagesByEngagement\",\"ssm-contacts:StopEngagement\"],\"Resource\":[\"arn:aws:ssm-contacts:*:111122223333:contact/akuam\",\"arn:aws:ssm-contacts:*:111122223333:engagement/akuam/*\"]}]}" - }, - "comments": { - }, - "description": "The following get-contact-policy example lists the resource policies associated with the specified contact.", - "id": "to-list-the-details-of-a-contact-channel-1630365682730", - "title": "To list the resource policies of a contact" - } - ], - "ListContactChannels": [ - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam" - }, - "output": { - "ContactChannels": [ - { - "ActivationStatus": "ACTIVATED", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "ContactChannelArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d", - "DeliveryAddress": { - "SimpleAddress": "+15005550100" - }, - "Name": "akuas sms", - "Type": "SMS" - } - ] - }, - "comments": { - }, - "description": "The following list-contact-channels example lists the available contact channels of the specified contact.", - "id": "to-list-the-contact-channels-of-a-contact-1630366544252", - "title": "To list the contact channels of a contact" - } - ], - "ListContacts": [ - { - "input": { - }, - "output": { - "Contacts": [ - { - "Alias": "akuam", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "DisplayName": "Akua Mansa", - "Type": "PERSONAL" - }, - { - "Alias": "alejr", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/alejr", - "DisplayName": "Alejandro Rosalez", - "Type": "PERSONAL" - }, - { - "Alias": "anasi", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/anasi", - "DisplayName": "Ana Carolina Silva", - "Type": "PERSONAL" - }, - { - "Alias": "example_escalation", - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/example_escalation", - "DisplayName": "Example Escalation", - "Type": "ESCALATION" - } - ] - }, - "comments": { - }, - "description": "The following list-contacts example lists the contacts and escalation plans in your account.", - "id": "to-list-all-escalation-plans-and-contacts-1630367103082", - "title": "To list all escalation plans and contacts" - } - ], - "ListEngagements": [ - { - "input": { - }, - "output": { - "Engagements": [ - { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/91792571-0b53-4821-9f73-d25d13d9e529", - "Sender": "cli", - "StartTime": "2021-05-18T20:37:50.300000+00:00" - }, - { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/78a29753-3674-4ac5-9f83-0468563567f0", - "Sender": "cli", - "StartTime": "2021-05-18T18:40:26.666000+00:00" - }, - { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/example_escalation", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/example_escalation/69e40ce1-8dbb-4d57-8962-5fbe7fc53356", - "Sender": "cli", - "StartTime": "2021-05-18T18:25:41.151000+00:00" - }, - { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/607ced0e-e8fa-4ea7-8958-a237b8803f8f", - "Sender": "cli", - "StartTime": "2021-05-18T18:20:58.093000+00:00" - } - ] - }, - "comments": { - }, - "description": "The following list-engagements example lists engagements to escalation plans and contacts. You can also list engagements for a single incident.", - "id": "to-list-all-engagements-1630367432635", - "title": "To list all engagements" - } - ], - "ListPageReceipts": [ - { - "input": { - "PageId": "arn:aws:ssm-contacts:us-east-2:111122223333:page/akuam/94ea0c7b-56d9-46c3-b84a-a37c8b067ad3" - }, - "output": { - "Receipts": [ - { - "ContactChannelArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d", - "ReceiptInfo": "425440", - "ReceiptTime": "2021-05-18T20:42:57.485000+00:00", - "ReceiptType": "DELIVERED" - }, - { - "ContactChannelArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d", - "ReceiptInfo": "425440", - "ReceiptTime": "2021-05-18T20:42:57.907000+00:00", - "ReceiptType": "READ" - }, - { - "ContactChannelArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/fc7405c4-46b2-48b7-87b2-93e2f225b90d", - "ReceiptInfo": "SM6656c19132f1465f9c9c1123a5dde7c9", - "ReceiptTime": "2021-05-18T20:40:52.962000+00:00", - "ReceiptType": "SENT" - } - ] - }, - "comments": { - }, - "description": "The following command-name example lists whether a page was received or not by a contact.", - "id": "to-list-page-receipts-1630367706869", - "title": "To list page receipts" - } - ], - "ListPagesByContact": [ - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam" - }, - "output": { - "Pages": [ - { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "DeliveryTime": "2021-05-18T18:43:55.265000+00:00", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/78a29753-3674-4ac5-9f83-0468563567f0", - "PageArn": "arn:aws:ssm-contacts:us-east-2:111122223333:page/akuam/ad0052bd-e606-498a-861b-25726292eb93", - "ReadTime": "2021-05-18T18:43:55.708000+00:00", - "Sender": "cli", - "SentTime": "2021-05-18T18:43:29.301000+00:00" - } - ] - }, - "comments": { - }, - "description": "The following list-pages-by-contact example lists all pages to the specified contact.", - "id": "to-list-pages-by-contact-1630435789132", - "title": "To list pages by contact" - } - ], - "ListPagesByEngagement": [ - { - "input": { - "EngagementId": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/78a29753-3674-4ac5-9f83-0468563567f0" - }, - "output": { - "Pages": [ - { - "ContactArn": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/78a29753-3674-4ac5-9f83-0468563567f0", - "PageArn": "arn:aws:ssm-contacts:us-east-2:111122223333:page/akuam/ad0052bd-e606-498a-861b-25726292eb93", - "Sender": "cli", - "SentTime": "2021-05-18T18:40:27.245000+00:00" - } - ] - }, - "comments": { - }, - "description": "The following list-pages-by-engagement example lists the pages that occurred while engaging the defined engagement plan.", - "id": "to-list-pages-to-contact-channels-started-from-an-engagement-1630435864674", - "title": "To list pages to contact channels started from an engagement." - } - ], - "ListTagsForResource": [ - { - "input": { - "ResourceARN": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/akuam" - }, - "output": { - "Tags": [ - { - "Key": "group1", - "Value": "1" - } - ] - }, - "comments": { - }, - "description": "The following list-tags-for-resource example lists the tags of the specified contact.", - "id": "to-list-tags-for-a-contact-1630436051681", - "title": "To list tags for a contact" - } - ], - "PutContactPolicy": [ - { - "input": { - "ContactArn": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/akuam", - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"ExampleResourcePolicy\",\"Action\":[\"ssm-contacts:GetContact\",\"ssm-contacts:StartEngagement\",\"ssm-contacts:DescribeEngagement\",\"ssm-contacts:ListPagesByEngagement\",\"ssm-contacts:StopEngagement\"],\"Principal\":{\"AWS\":\"222233334444\"},\"Effect\":\"Allow\",\"Resource\":[\"arn:aws:ssm-contacts:*:111122223333:contact/akuam\",\"arn:aws:ssm-contacts:*:111122223333:engagement/akuam/*\"]}]}" - }, - "output": { - }, - "comments": { - }, - "description": "The following put-contact-policy example adds a resource policy to the contact Akua that shares the contact and related engagements with the principal.", - "id": "to-share-a-contact-and-engagements-1630436278898", - "title": "To share a contact and engagements" - } - ], - "SendActivationCode": [ - { - "input": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-1:111122223333:contact-channel/akuam/8ddae2d1-12c8-4e45-b852-c8587266c400" - }, - "output": { - }, - "comments": { - }, - "description": "The following send-activation-code example sends an activation code and message to the specified contact channel.", - "id": "to-send-an-activation-code-1630436453574", - "title": "To send an activation code" - } - ], - "StartEngagement": [ - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "Content": "Testing engagements", - "PublicContent": "Testing engagements", - "PublicSubject": "test", - "Sender": "tester", - "Subject": "test" - }, - "output": { - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/akuam/607ced0e-e8fa-4ea7-8958-a237b8803f8f" - }, - "comments": { - }, - "description": "The following start-engagement pages contact's contact channels. Sender, subject, public-subject, and public-content are all free from fields. Incident Manager sends the subject and content to the provided VOICE or EMAIL contact channels. Incident Manager sends the public-subject and public-content to the provided SMS contact channels. Sender is used to track who started the engagement.", - "id": "example-1-to-page-a-contacts-contact-channels-1630436634872", - "title": "Example 1: To page a contact's contact channels" - }, - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/example_escalation", - "Content": "Testing engagements", - "PublicContent": "Testing engagements", - "PublicSubject": "test", - "Sender": "tester", - "Subject": "test" - }, - "output": { - "EngagementArn": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/example_escalation/69e40ce1-8dbb-4d57-8962-5fbe7fc53356" - }, - "comments": { - }, - "description": "The following start-engagement engages contact's through an escalation plan. Each contact is paged according to their engagement plan.", - "id": "example-2-to-page-a-contact-in-the-provided-escalation-plan-1630436808480", - "title": "Example 2: To page a contact in the provided escalation plan." - } - ], - "StopEngagement": [ - { - "input": { - "EngagementId": "arn:aws:ssm-contacts:us-east-2:111122223333:engagement/example_escalation/69e40ce1-8dbb-4d57-8962-5fbe7fc53356" - }, - "output": { - }, - "comments": { - }, - "description": "The following stop-engagement example stops an engagement from paging further contacts and contact channels.", - "id": "to-stop-an-engagement-1630436882864", - "title": "To stop an engagement" - } - ], - "TagResource": [ - { - "input": { - "ResourceARN": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/akuam", - "Tags": [ - { - "Key": "group1", - "Value": "1" - } - ] - }, - "output": { - }, - "comments": { - }, - "description": "The following tag-resource example tags a specified contact with the provided tag key value pair.", - "id": "to-tag-a-contact-1630437124572", - "title": "To tag a contact" - } - ], - "UntagResource": [ - { - "input": { - "ResourceARN": "arn:aws:ssm-contacts:us-east-1:111122223333:contact/akuam", - "TagKeys": [ - "group1" - ] - }, - "output": { - }, - "comments": { - }, - "description": "The following untag-resource example removes the group1 tag from the specified contact.", - "id": "to-remove-tags-from-a-contact-1630437251110", - "title": "To remove tags from a contact" - } - ], - "UpdateContact": [ - { - "input": { - "ContactId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact/akuam", - "Plan": { - "Stages": [ - { - "DurationInMinutes": 5, - "Targets": [ - { - "ChannelTargetInfo": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/beb25840-5ac8-4644-95cc-7a8de390fa65", - "RetryIntervalInMinutes": 1 - } - } - ] - }, - { - "DurationInMinutes": 5, - "Targets": [ - { - "ChannelTargetInfo": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/49f3c24d-5f9f-4638-ae25-3f49e04229ad", - "RetryIntervalInMinutes": 1 - } - } - ] - }, - { - "DurationInMinutes": 5, - "Targets": [ - { - "ChannelTargetInfo": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/77d4f447-f619-4954-afff-85551e369c2a", - "RetryIntervalInMinutes": 1 - } - } - ] - } - ] - } - }, - "output": { - }, - "comments": { - }, - "description": "The following update-contact example updates the engagement plan of the contact Akua to include the three types of contacts channels. This is done after creating contact channels for Akua.", - "id": "to-update-the-engagement-plan-of-contact-1630437436599", - "title": "To update the engagement plan of contact" - } - ], - "UpdateContactChannel": [ - { - "input": { - "ContactChannelId": "arn:aws:ssm-contacts:us-east-2:111122223333:contact-channel/akuam/49f3c24d-5f9f-4638-ae25-3f49e04229ad", - "DeliveryAddress": { - "SimpleAddress": "+15005550198" - }, - "Name": "akuas voice channel" - }, - "output": { - }, - "comments": { - }, - "description": "The following update-contact-channel example updates the name and delivery address of a contact channel.", - "id": "to-update-a-contact-channel-1630437610256", - "title": "To update a contact channel" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/paginators-1.json b/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/paginators-1.json deleted file mode 100644 index 621bde8..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/paginators-1.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "pagination": { - "ListContactChannels": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ContactChannels" - }, - "ListContacts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Contacts" - }, - "ListEngagements": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Engagements" - }, - "ListPageReceipts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Receipts" - }, - "ListPagesByContact": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Pages" - }, - "ListPagesByEngagement": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Pages" - }, - "ListPageResolutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "PageResolutions" - }, - "ListPreviewRotationShifts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RotationShifts" - }, - "ListRotationOverrides": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RotationOverrides" - }, - "ListRotationShifts": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "RotationShifts" - }, - "ListRotations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Rotations" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/service-2.json.gz deleted file mode 100644 index 56f4743..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-contacts/2021-05-03/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index d5266fa..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/service-2.json.gz deleted file mode 100644 index 88a5689..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-guiconnect/2021-05-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index dca6436..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/examples-1.json b/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/paginators-1.json deleted file mode 100644 index 662c714..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/paginators-1.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "pagination": { - "GetResourcePolicies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "resourcePolicies" - }, - "ListIncidentRecords": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "incidentRecordSummaries" - }, - "ListRelatedItems": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "relatedItems" - }, - "ListReplicationSets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "replicationSetArns" - }, - "ListResponsePlans": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "responsePlanSummaries" - }, - "ListTimelineEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "eventSummaries" - }, - "ListIncidentFindings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "findings" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/service-2.json.gz deleted file mode 100644 index 5f45fdb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/waiters-2.json b/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/waiters-2.json deleted file mode 100644 index 47c19b3..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-incidents/2018-05-10/waiters-2.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "WaitForReplicationSetActive" : { - "description" : "Wait for a replication set to become ACTIVE", - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetReplicationSet", - "acceptors" : [ { - "matcher" : "path", - "argument" : "replicationSet.status", - "state" : "success", - "expected" : "ACTIVE" - }, { - "matcher" : "path", - "argument" : "replicationSet.status", - "state" : "retry", - "expected" : "CREATING" - }, { - "matcher" : "path", - "argument" : "replicationSet.status", - "state" : "retry", - "expected" : "UPDATING" - }, { - "matcher" : "path", - "argument" : "replicationSet.status", - "state" : "failure", - "expected" : "FAILED" - } ] - }, - "WaitForReplicationSetDeleted" : { - "description" : "Wait for a replication set to be deleted", - "delay" : 30, - "maxAttempts" : 5, - "operation" : "GetReplicationSet", - "acceptors" : [ { - "matcher" : "error", - "state" : "success", - "expected" : "ResourceNotFoundException" - }, { - "matcher" : "path", - "argument" : "replicationSet.status", - "state" : "retry", - "expected" : "DELETING" - }, { - "matcher" : "path", - "argument" : "replicationSet.status", - "state" : "failure", - "expected" : "FAILED" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7f33281..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/paginators-1.json deleted file mode 100644 index fed0f28..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListConfigurationManagers": { - "input_token": "StartingToken", - "output_token": "NextToken", - "limit_key": "MaxItems", - "result_key": "ConfigurationManagersList" - }, - "ListConfigurations": { - "input_token": "StartingToken", - "output_token": "NextToken", - "limit_key": "MaxItems", - "result_key": "ConfigurationsList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/service-2.json.gz deleted file mode 100644 index b2bcfba..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-quicksetup/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index bbc02e6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/paginators-1.json deleted file mode 100644 index 52391d7..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/paginators-1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "pagination": { - "ListApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Applications" - }, - "ListComponents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Components" - }, - "ListDatabases": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Databases" - }, - "ListOperations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Operations" - }, - "ListOperationEvents": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "OperationEvents" - }, - "ListConfigurationCheckDefinitions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConfigurationChecks" - }, - "ListConfigurationCheckOperations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ConfigurationCheckOperations" - }, - "ListSubCheckResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SubCheckResults" - }, - "ListSubCheckRuleResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "RuleResults" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/service-2.json.gz deleted file mode 100644 index 844845d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm-sap/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/endpoint-rule-set-1.json.gz deleted file mode 100644 index f097f63..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/examples-1.json b/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/paginators-1.json b/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/paginators-1.json deleted file mode 100644 index 3aafccc..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/paginators-1.json +++ /dev/null @@ -1,304 +0,0 @@ -{ - "pagination": { - "ListAssociations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Associations" - }, - "ListCommandInvocations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "CommandInvocations" - }, - "ListCommands": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Commands" - }, - "ListDocuments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DocumentIdentifiers" - }, - "DescribeInstanceInformation": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceInformationList" - }, - "DescribeActivations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ActivationList" - }, - "DescribeParameters": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Parameters" - }, - "DescribeAssociationExecutions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AssociationExecutions" - }, - "DescribeAssociationExecutionTargets": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AssociationExecutionTargets" - }, - "GetInventory": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Entities" - }, - "GetParametersByPath": { - "result_key": "Parameters", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "GetParameterHistory": { - "result_key": "Parameters", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "DescribeAutomationExecutions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AutomationExecutionMetadataList" - }, - "DescribeAutomationStepExecutions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "StepExecutions" - }, - "DescribeAvailablePatches": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Patches" - }, - "DescribeEffectiveInstanceAssociations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Associations" - }, - "DescribeEffectivePatchesForPatchBaseline": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "EffectivePatches" - }, - "DescribeInstanceAssociationsStatus": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceAssociationStatusInfos" - }, - "DescribeInstancePatchStates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstancePatchStates" - }, - "DescribeInstancePatchStatesForPatchGroup": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstancePatchStates" - }, - "DescribeInstancePatches": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Patches" - }, - "DescribeInventoryDeletions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InventoryDeletions" - }, - "DescribeMaintenanceWindowExecutionTaskInvocations": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WindowExecutionTaskInvocationIdentities" - }, - "DescribeMaintenanceWindowExecutionTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WindowExecutionTaskIdentities" - }, - "DescribeMaintenanceWindowExecutions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WindowExecutions" - }, - "DescribeMaintenanceWindowSchedule": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ScheduledWindowExecutions" - }, - "DescribeMaintenanceWindowTargets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Targets" - }, - "DescribeMaintenanceWindowTasks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tasks" - }, - "DescribeMaintenanceWindows": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WindowIdentities" - }, - "DescribeMaintenanceWindowsForTarget": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "WindowIdentities" - }, - "DescribePatchBaselines": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "BaselineIdentities" - }, - "DescribePatchGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Mappings" - }, - "DescribeSessions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Sessions" - }, - "GetInventorySchema": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Schemas" - }, - "ListAssociationVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AssociationVersions" - }, - "ListComplianceItems": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ComplianceItems" - }, - "ListComplianceSummaries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ComplianceSummaryItems" - }, - "ListDocumentVersions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "DocumentVersions" - }, - "ListResourceComplianceSummaries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ResourceComplianceSummaryItems" - }, - "ListResourceDataSync": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ResourceDataSyncItems" - }, - "DescribeOpsItems": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "OpsItemSummaries" - }, - "DescribePatchProperties": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Properties" - }, - "GetOpsSummary": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Entities" - }, - "ListOpsItemEvents": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Summaries" - }, - "ListOpsMetadata": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "OpsMetadataList" - }, - "ListOpsItemRelatedItems": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Summaries" - }, - "GetResourcePolicies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Policies" - }, - "DescribeInstanceProperties": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "InstanceProperties" - }, - "ListNodes": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Nodes" - }, - "ListNodesSummary": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Summary" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/service-2.json.gz b/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/service-2.json.gz deleted file mode 100644 index 262be3e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/waiters-2.json b/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/waiters-2.json deleted file mode 100644 index 43f5237..0000000 --- a/venv/Lib/site-packages/botocore/data/ssm/2014-11-06/waiters-2.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "version": 2, - "waiters": { - "CommandExecuted": { - "delay": 5, - "operation": "GetCommandInvocation", - "maxAttempts": 20, - "acceptors": [ - { - "expected": "Pending", - "matcher": "path", - "state": "retry", - "argument": "Status" - }, - { - "expected": "InProgress", - "matcher": "path", - "state": "retry", - "argument": "Status" - }, - { - "expected": "Delayed", - "matcher": "path", - "state": "retry", - "argument": "Status" - }, - { - "expected": "Success", - "matcher": "path", - "state": "success", - "argument": "Status" - }, - { - "expected": "Cancelled", - "matcher": "path", - "state": "failure", - "argument": "Status" - }, - { - "expected": "TimedOut", - "matcher": "path", - "state": "failure", - "argument": "Status" - }, - { - "expected": "Failed", - "matcher": "path", - "state": "failure", - "argument": "Status" - }, - { - "expected": "Cancelling", - "matcher": "path", - "state": "failure", - "argument": "Status" - }, - { - "state": "retry", - "matcher": "error", - "expected": "InvocationDoesNotExist" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index c67ca73..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/examples-1.json b/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/paginators-1.json deleted file mode 100644 index d2c8b68..0000000 --- a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/paginators-1.json +++ /dev/null @@ -1,121 +0,0 @@ -{ - "pagination": { - "ListAccountAssignmentCreationStatus": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AccountAssignmentsCreationStatus" - }, - "ListAccountAssignmentDeletionStatus": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AccountAssignmentsDeletionStatus" - }, - "ListAccountAssignments": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AccountAssignments" - }, - "ListAccountsForProvisionedPermissionSet": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AccountIds" - }, - "ListInstances": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Instances" - }, - "ListManagedPoliciesInPermissionSet": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AttachedManagedPolicies" - }, - "ListPermissionSetProvisioningStatus": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PermissionSetsProvisioningStatus" - }, - "ListPermissionSets": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PermissionSets" - }, - "ListPermissionSetsProvisionedToAccount": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "PermissionSets" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Tags" - }, - "ListCustomerManagedPolicyReferencesInPermissionSet": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "CustomerManagedPolicyReferences" - }, - "ListAccountAssignmentsForPrincipal": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AccountAssignments" - }, - "ListApplicationAccessScopes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Scopes" - }, - "ListApplicationAssignments": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ApplicationAssignments" - }, - "ListApplicationAssignmentsForPrincipal": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ApplicationAssignments" - }, - "ListApplicationAuthenticationMethods": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "AuthenticationMethods" - }, - "ListApplicationGrants": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Grants" - }, - "ListApplicationProviders": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "ApplicationProviders" - }, - "ListApplications": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Applications" - }, - "ListTrustedTokenIssuers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "TrustedTokenIssuers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/service-2.json.gz deleted file mode 100644 index fa8d7a3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/waiters-2.json b/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/sso-admin/2020-07-20/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index 2792772..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/examples-1.json b/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/service-2.json.gz deleted file mode 100644 index 08859f8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sso-oidc/2019-06-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sso/2019-06-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index fec0023..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/examples-1.json b/venv/Lib/site-packages/botocore/data/sso/2019-06-10/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/sso/2019-06-10/paginators-1.json deleted file mode 100644 index daaed6f..0000000 --- a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListAccountRoles": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "roleList" - }, - "ListAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accountList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sso/2019-06-10/service-2.json.gz deleted file mode 100644 index e2ee055..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sso/2019-06-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0e517d9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/examples-1.json b/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/paginators-1.json b/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/paginators-1.json deleted file mode 100644 index fb8eb5e..0000000 --- a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "GetExecutionHistory": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "events" - }, - "ListActivities": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "activities" - }, - "ListExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "executions" - }, - "ListStateMachines": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "stateMachines" - }, - "ListMapRuns": { - "input_token": "nextToken", - "limit_key": "maxResults", - "output_token": "nextToken", - "result_key": "mapRuns" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/service-2.json.gz b/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/service-2.json.gz deleted file mode 100644 index 1c1b4d7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/stepfunctions/2016-11-23/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index db175d7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/examples-1.json b/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/examples-1.json deleted file mode 100644 index 7cc0d7d..0000000 --- a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/examples-1.json +++ /dev/null @@ -1,1381 +0,0 @@ -{ - "version": "1.0", - "examples": { - "ActivateGateway": [ - { - "input": { - "ActivationKey": "29AV1-3OFV9-VVIUB-NKT0I-LRO6V", - "GatewayName": "My_Gateway", - "GatewayRegion": "us-east-1", - "GatewayTimezone": "GMT-12:00", - "GatewayType": "STORED", - "MediumChangerType": "AWS-Gateway-VTL", - "TapeDriveType": "IBM-ULT3580-TD5" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-11A2222B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Activates the gateway you previously deployed on your host.", - "id": "to-activate-the-gateway-1471281611207", - "title": "To activate the gateway" - } - ], - "AddCache": [ - { - "input": { - "DiskIds": [ - "pci-0000:03:00.0-scsi-0:0:0:0", - "pci-0000:03:00.0-scsi-0:0:1:0" - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example shows a request that activates a gateway-stored volume.", - "id": "to-add-a-cache-1471043606854", - "title": "To add a cache" - } - ], - "AddTagsToResource": [ - { - "input": { - "ResourceARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-11A2222B", - "Tags": [ - { - "Key": "Dev Gatgeway Region", - "Value": "East Coast" - } - ] - }, - "output": { - "ResourceARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-11A2222B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Adds one or more tags to the specified resource.", - "id": "to-add-tags-to-resource-1471283689460", - "title": "To add tags to resource" - } - ], - "AddUploadBuffer": [ - { - "input": { - "DiskIds": [ - "pci-0000:03:00.0-scsi-0:0:0:0", - "pci-0000:03:00.0-scsi-0:0:1:0" - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Configures one or more gateway local disks as upload buffer for a specified gateway.", - "id": "to-add-upload-buffer-on-local-disk-1471293902847", - "title": "To add upload buffer on local disk" - } - ], - "AddWorkingStorage": [ - { - "input": { - "DiskIds": [ - "pci-0000:03:00.0-scsi-0:0:0:0", - "pci-0000:03:00.0-scsi-0:0:1:0" - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Configures one or more gateway local disks as working storage for a gateway. (Working storage is also referred to as upload buffer.)", - "id": "to-add-storage-on-local-disk-1471294305401", - "title": "To add storage on local disk" - } - ], - "CancelArchival": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/AMZN01A2A4" - }, - "output": { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/AMZN01A2A4" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Cancels archiving of a virtual tape to the virtual tape shelf (VTS) after the archiving process is initiated.", - "id": "to-cancel-virtual-tape-archiving-1471294865203", - "title": "To cancel virtual tape archiving" - } - ], - "CancelRetrieval": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/AMZN01A2A4" - }, - "output": { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/AMZN01A2A4" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Cancels retrieval of a virtual tape from the virtual tape shelf (VTS) to a gateway after the retrieval process is initiated.", - "id": "to-cancel-virtual-tape-retrieval-1471295704491", - "title": "To cancel virtual tape retrieval" - } - ], - "CreateCachediSCSIVolume": [ - { - "input": { - "ClientToken": "cachedvol112233", - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "NetworkInterfaceId": "10.1.1.1", - "SnapshotId": "snap-f47b7b94", - "TargetName": "my-volume", - "VolumeSizeInBytes": 536870912000 - }, - "output": { - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a cached volume on a specified cached gateway.", - "id": "to-create-a-cached-iscsi-volume-1471296661787", - "title": "To create a cached iSCSI volume" - } - ], - "CreateSnapshot": [ - { - "input": { - "SnapshotDescription": "My root volume snapshot as of 10/03/2017", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "output": { - "SnapshotId": "snap-78e22663", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Initiates an ad-hoc snapshot of a gateway volume.", - "id": "to-create-a-snapshot-of-a-gateway-volume-1471301469561", - "title": "To create a snapshot of a gateway volume" - } - ], - "CreateSnapshotFromVolumeRecoveryPoint": [ - { - "input": { - "SnapshotDescription": "My root volume snapshot as of 2017-06-30T10:10:10.000Z", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "output": { - "SnapshotId": "snap-78e22663", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB", - "VolumeRecoveryPointTime": "2017-06-30T10:10:10.000Z" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Initiates a snapshot of a gateway from a volume recovery point.", - "id": "to-create-a-snapshot-of-a-gateway-volume-1471301469561", - "title": "To create a snapshot of a gateway volume" - } - ], - "CreateStorediSCSIVolume": [ - { - "input": { - "DiskId": "pci-0000:03:00.0-scsi-0:0:0:0", - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "NetworkInterfaceId": "10.1.1.1", - "PreserveExistingData": true, - "SnapshotId": "snap-f47b7b94", - "TargetName": "my-volume" - }, - "output": { - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB", - "VolumeSizeInBytes": 1099511627776 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a stored volume on a specified stored gateway.", - "id": "to-create-a-stored-iscsi-volume-1471367662813", - "title": "To create a stored iSCSI volume" - } - ], - "CreateTapeWithBarcode": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "TapeBarcode": "TEST12345", - "TapeSizeInBytes": 107374182400 - }, - "output": { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST12345" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates a virtual tape by using your own barcode.", - "id": "to-create-a-virtual-tape-using-a-barcode-1471371842452", - "title": "To create a virtual tape using a barcode" - } - ], - "CreateTapes": [ - { - "input": { - "ClientToken": "77777", - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "NumTapesToCreate": 3, - "TapeBarcodePrefix": "TEST", - "TapeSizeInBytes": 107374182400 - }, - "output": { - "TapeARNs": [ - "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST38A29D", - "arn:aws:storagegateway:us-east-1:204469490176:tape/TEST3AA29F", - "arn:aws:storagegateway:us-east-1:204469490176:tape/TEST3BA29E" - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Creates one or more virtual tapes.", - "id": "to-create-a-virtual-tape-1471372061659", - "title": "To create a virtual tape" - } - ], - "DeleteBandwidthRateLimit": [ - { - "input": { - "BandwidthType": "All", - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the bandwidth rate limits of a gateway; either the upload or download limit, or both.", - "id": "to-delete-bandwidth-rate-limits-of-gateway-1471373225520", - "title": "To delete bandwidth rate limits of gateway" - } - ], - "DeleteChapCredentials": [ - { - "input": { - "InitiatorName": "iqn.1991-05.com.microsoft:computername.domain.example.com", - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - }, - "output": { - "InitiatorName": "iqn.1991-05.com.microsoft:computername.domain.example.com", - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes Challenge-Handshake Authentication Protocol (CHAP) credentials for a specified iSCSI target and initiator pair.", - "id": "to-delete-chap-credentials-1471375025612", - "title": "To delete CHAP credentials" - } - ], - "DeleteGateway": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation deletes the gateway, but not the gateway's VM from the host computer.", - "id": "to-delete-a-gatgeway-1471381697333", - "title": "To delete a gatgeway" - } - ], - "DeleteSnapshotSchedule": [ - { - "input": { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "output": { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This action enables you to delete a snapshot schedule for a volume.", - "id": "to-delete-a-snapshot-of-a-volume-1471382234377", - "title": "To delete a snapshot of a volume" - } - ], - "DeleteTape": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:204469490176:gateway/sgw-12A3456B", - "TapeARN": "arn:aws:storagegateway:us-east-1:204469490176:tape/TEST05A2A0" - }, - "output": { - "TapeARN": "arn:aws:storagegateway:us-east-1:204469490176:tape/TEST05A2A0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example deletes the specified virtual tape.", - "id": "to-delete-a-virtual-tape-1471382444157", - "title": "To delete a virtual tape" - } - ], - "DeleteTapeArchive": [ - { - "input": { - "TapeARN": "arn:aws:storagegateway:us-east-1:204469490176:tape/TEST05A2A0" - }, - "output": { - "TapeARN": "arn:aws:storagegateway:us-east-1:204469490176:tape/TEST05A2A0" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the specified virtual tape from the virtual tape shelf (VTS).", - "id": "to-delete-a-virtual-tape-from-the-shelf-vts-1471383964329", - "title": "To delete a virtual tape from the shelf (VTS)" - } - ], - "DeleteVolume": [ - { - "input": { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "output": { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Deletes the specified gateway volume that you previously created using the CreateCachediSCSIVolume or CreateStorediSCSIVolume API.", - "id": "to-delete-a-gateway-volume-1471384418416", - "title": "To delete a gateway volume" - } - ], - "DescribeBandwidthRateLimit": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "AverageDownloadRateLimitInBitsPerSec": 204800, - "AverageUploadRateLimitInBitsPerSec": 102400, - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a value for a bandwidth rate limit if set. If not set, then only the gateway ARN is returned.", - "id": "to-describe-the-bandwidth-rate-limits-of-a-gateway-1471384826404", - "title": "To describe the bandwidth rate limits of a gateway" - } - ], - "DescribeCache": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "CacheAllocatedInBytes": 2199023255552, - "CacheDirtyPercentage": 0.07, - "CacheHitPercentage": 99.68, - "CacheMissPercentage": 0.32, - "CacheUsedPercentage": 0.07, - "DiskIds": [ - "pci-0000:03:00.0-scsi-0:0:0:0", - "pci-0000:04:00.0-scsi-0:1:0:0" - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the cache of a gateway.", - "id": "to-describe-cache-information-1471385756036", - "title": "To describe cache information" - } - ], - "DescribeCachediSCSIVolumes": [ - { - "input": { - "VolumeARNs": [ - "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - ] - }, - "output": { - "CachediSCSIVolumes": [ - { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB", - "VolumeId": "vol-1122AABB", - "VolumeSizeInBytes": 1099511627776, - "VolumeStatus": "AVAILABLE", - "VolumeType": "CACHED iSCSI", - "VolumeiSCSIAttributes": { - "ChapEnabled": true, - "LunNumber": 1, - "NetworkInterfaceId": "10.243.43.207", - "NetworkInterfacePort": 3260, - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a description of the gateway cached iSCSI volumes specified in the request.", - "id": "to-describe-gateway-cached-iscsi-volumes-1471458094649", - "title": "To describe gateway cached iSCSI volumes" - } - ], - "DescribeChapCredentials": [ - { - "input": { - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - }, - "output": { - "ChapCredentials": [ - { - "InitiatorName": "iqn.1991-05.com.microsoft:computername.domain.example.com", - "SecretToAuthenticateInitiator": "111111111111", - "SecretToAuthenticateTarget": "222222222222", - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns an array of Challenge-Handshake Authentication Protocol (CHAP) credentials information for a specified iSCSI target, one for each target-initiator pair.", - "id": "to-describe-chap-credetnitals-for-an-iscsi-1471467462967", - "title": "To describe CHAP credetnitals for an iSCSI" - } - ], - "DescribeGatewayInformation": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "GatewayId": "sgw-AABB1122", - "GatewayName": "My_Gateway", - "GatewayNetworkInterfaces": [ - { - "Ipv4Address": "10.35.69.216" - } - ], - "GatewayState": "STATE_RUNNING", - "GatewayTimezone": "GMT-8:00", - "GatewayType": "STORED", - "LastSoftwareUpdate": "2016-01-02T16:00:00", - "NextUpdateAvailabilityDate": "2017-01-02T16:00:00" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns metadata about a gateway such as its name, network interfaces, configured time zone, and the state (whether the gateway is running or not).", - "id": "to-describe-metadata-about-the-gateway-1471467849079", - "title": "To describe metadata about the gateway" - } - ], - "DescribeMaintenanceStartTime": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "DayOfWeek": 2, - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "HourOfDay": 15, - "MinuteOfHour": 35, - "Timezone": "GMT+7:00" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns your gateway's weekly maintenance start time including the day and time of the week.", - "id": "to-describe-gateways-maintenance-start-time-1471470727387", - "title": "To describe gateway's maintenance start time" - } - ], - "DescribeSnapshotSchedule": [ - { - "input": { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "output": { - "Description": "sgw-AABB1122:vol-AABB1122:Schedule", - "RecurrenceInHours": 24, - "StartAt": 6, - "Timezone": "GMT+7:00", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Describes the snapshot schedule for the specified gateway volume including intervals at which snapshots are automatically initiated.", - "id": "to-describe-snapshot-schedule-for-gateway-volume-1471471139538", - "title": "To describe snapshot schedule for gateway volume" - } - ], - "DescribeStorediSCSIVolumes": [ - { - "input": { - "VolumeARNs": [ - "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - ] - }, - "output": { - "StorediSCSIVolumes": [ - { - "PreservedExistingData": false, - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB", - "VolumeDiskId": "pci-0000:03:00.0-scsi-0:0:0:0", - "VolumeId": "vol-1122AABB", - "VolumeProgress": 23.7, - "VolumeSizeInBytes": 1099511627776, - "VolumeStatus": "BOOTSTRAPPING", - "VolumeiSCSIAttributes": { - "ChapEnabled": true, - "NetworkInterfaceId": "10.243.43.207", - "NetworkInterfacePort": 3260, - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - } - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns the description of the gateway volumes specified in the request belonging to the same gateway.", - "id": "to-describe-the-volumes-of-a-gateway-1471472640660", - "title": "To describe the volumes of a gateway" - } - ], - "DescribeTapeArchives": [ - { - "input": { - "Limit": 123, - "Marker": "1", - "TapeARNs": [ - "arn:aws:storagegateway:us-east-1:999999999999:tape/AM08A1AD", - "arn:aws:storagegateway:us-east-1:999999999999:tape/AMZN01A2A4" - ] - }, - "output": { - "Marker": "1", - "TapeArchives": [ - { - "CompletionTime": "2016-12-16T13:50Z", - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999:tape/AM08A1AD", - "TapeBarcode": "AM08A1AD", - "TapeSizeInBytes": 107374182400, - "TapeStatus": "ARCHIVED" - }, - { - "CompletionTime": "2016-12-16T13:59Z", - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999:tape/AMZN01A2A4", - "TapeBarcode": "AMZN01A2A4", - "TapeSizeInBytes": 429496729600, - "TapeStatus": "ARCHIVED" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a description of specified virtual tapes in the virtual tape shelf (VTS).", - "id": "to-describe-virtual-tapes-in-the-vts-1471473188198", - "title": "To describe virtual tapes in the VTS" - } - ], - "DescribeTapeRecoveryPoints": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "Limit": 1, - "Marker": "1" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "Marker": "1", - "TapeRecoveryPointInfos": [ - { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999:tape/AMZN01A2A4", - "TapeRecoveryPointTime": "2016-12-16T13:50Z", - "TapeSizeInBytes": 1471550497, - "TapeStatus": "AVAILABLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a list of virtual tape recovery points that are available for the specified gateway-VTL.", - "id": "to-describe-virtual-tape-recovery-points-1471542042026", - "title": "To describe virtual tape recovery points" - } - ], - "DescribeTapes": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "Limit": 2, - "Marker": "1", - "TapeARNs": [ - "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST04A2A1", - "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST05A2A0" - ] - }, - "output": { - "Marker": "1", - "Tapes": [ - { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST04A2A1", - "TapeBarcode": "TEST04A2A1", - "TapeSizeInBytes": 107374182400, - "TapeStatus": "AVAILABLE" - }, - { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST05A2A0", - "TapeBarcode": "TEST05A2A0", - "TapeSizeInBytes": 107374182400, - "TapeStatus": "AVAILABLE" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a description of the specified Amazon Resource Name (ARN) of virtual tapes. If a TapeARN is not specified, returns a description of all virtual tapes.", - "id": "to-describe-virtual-tapes-associated-with-gateway-1471629287727", - "title": "To describe virtual tape(s) associated with gateway" - } - ], - "DescribeUploadBuffer": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "DiskIds": [ - "pci-0000:03:00.0-scsi-0:0:0:0", - "pci-0000:04:00.0-scsi-0:1:0:0" - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "UploadBufferAllocatedInBytes": 0, - "UploadBufferUsedInBytes": 161061273600 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the upload buffer of a gateway including disk IDs and the amount of upload buffer space allocated/used.", - "id": "to-describe-upload-buffer-of-gateway-1471631099003", - "title": "To describe upload buffer of gateway" - }, - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "DiskIds": [ - "pci-0000:03:00.0-scsi-0:0:0:0", - "pci-0000:04:00.0-scsi-0:1:0:0" - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "UploadBufferAllocatedInBytes": 161061273600, - "UploadBufferUsedInBytes": 0 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns information about the upload buffer of a gateway including disk IDs and the amount of upload buffer space allocated and used.", - "id": "to-describe-upload-buffer-of-a-gateway--1471904566370", - "title": "To describe upload buffer of a gateway" - } - ], - "DescribeVTLDevices": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "Limit": 123, - "Marker": "1", - "VTLDeviceARNs": [ - - ] - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "Marker": "1", - "VTLDevices": [ - { - "DeviceiSCSIAttributes": { - "ChapEnabled": false, - "NetworkInterfaceId": "10.243.43.207", - "NetworkInterfacePort": 3260, - "TargetARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:sgw-1fad4876-mediachanger" - }, - "VTLDeviceARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/device/AMZN_SGW-1FAD4876_MEDIACHANGER_00001", - "VTLDeviceProductIdentifier": "L700", - "VTLDeviceType": "Medium Changer", - "VTLDeviceVendor": "STK" - }, - { - "DeviceiSCSIAttributes": { - "ChapEnabled": false, - "NetworkInterfaceId": "10.243.43.209", - "NetworkInterfacePort": 3260, - "TargetARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:sgw-1fad4876-tapedrive-01" - }, - "VTLDeviceARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/device/AMZN_SGW-1FAD4876_TAPEDRIVE_00001", - "VTLDeviceProductIdentifier": "ULT3580-TD5", - "VTLDeviceType": "Tape Drive", - "VTLDeviceVendor": "IBM" - }, - { - "DeviceiSCSIAttributes": { - "ChapEnabled": false, - "NetworkInterfaceId": "10.243.43.209", - "NetworkInterfacePort": 3260, - "TargetARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:sgw-1fad4876-tapedrive-02" - }, - "VTLDeviceARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/device/AMZN_SGW-1FAD4876_TAPEDRIVE_00002", - "VTLDeviceProductIdentifier": "ULT3580-TD5", - "VTLDeviceType": "Tape Drive", - "VTLDeviceVendor": "IBM" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Returns a description of virtual tape library (VTL) devices for the specified gateway.", - "id": "to-describe-virtual-tape-library-vtl-devices-of-a-single-gateway-1471906071410", - "title": "To describe virtual tape library (VTL) devices of a single gateway" - } - ], - "DescribeWorkingStorage": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "DiskIds": [ - "pci-0000:03:00.0-scsi-0:0:0:0", - "pci-0000:03:00.0-scsi-0:0:1:0" - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "WorkingStorageAllocatedInBytes": 2199023255552, - "WorkingStorageUsedInBytes": 789207040 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation is supported only for the gateway-stored volume architecture. This operation is deprecated in cached-volumes API version (20120630). Use DescribeUploadBuffer instead.", - "id": "to-describe-the-working-storage-of-a-gateway-depreciated-1472070842332", - "title": "To describe the working storage of a gateway [Depreciated]" - } - ], - "DisableGateway": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Disables a gateway when the gateway is no longer functioning. Use this operation for a gateway-VTL that is not reachable or not functioning.", - "id": "to-disable-a-gateway-when-it-is-no-longer-functioning-1472076046936", - "title": "To disable a gateway when it is no longer functioning" - } - ], - "ListGateways": [ - { - "input": { - "Limit": 2, - "Marker": "1" - }, - "output": { - "Gateways": [ - { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-23A4567C" - } - ], - "Marker": "1" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists gateways owned by an AWS account in a specified region as requested. Results are sorted by gateway ARN up to a maximum of 100 gateways.", - "id": "to-lists-region-specific-gateways-per-aws-account-1472077860657", - "title": "To lists region specific gateways per AWS account" - } - ], - "ListLocalDisks": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "Disks": [ - { - "DiskAllocationType": "CACHE_STORAGE", - "DiskId": "pci-0000:03:00.0-scsi-0:0:0:0", - "DiskNode": "SCSI(0:0)", - "DiskPath": "/dev/sda", - "DiskSizeInBytes": 1099511627776, - "DiskStatus": "missing" - }, - { - "DiskAllocationResource": "", - "DiskAllocationType": "UPLOAD_BUFFER", - "DiskId": "pci-0000:03:00.0-scsi-0:0:1:0", - "DiskNode": "SCSI(0:1)", - "DiskPath": "/dev/sdb", - "DiskSizeInBytes": 1099511627776, - "DiskStatus": "present" - } - ], - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The request returns a list of all disks, specifying which are configured as working storage, cache storage, or stored volume or not configured at all.", - "id": "to-list-the-gateways-local-disks-1472079564618", - "title": "To list the gateway's local disks" - } - ], - "ListTagsForResource": [ - { - "input": { - "Limit": 1, - "Marker": "1", - "ResourceARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-11A2222B" - }, - "output": { - "Marker": "1", - "ResourceARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-11A2222B", - "Tags": [ - { - "Key": "Dev Gatgeway Region", - "Value": "East Coast" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the tags that have been added to the specified resource.", - "id": "to-list-tags-that-have-been-added-to-a-resource-1472080268972", - "title": "To list tags that have been added to a resource" - } - ], - "ListVolumeRecoveryPoints": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "VolumeRecoveryPointInfos": [ - { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB", - "VolumeRecoveryPointTime": "2012-09-04T21:08:44.627Z", - "VolumeSizeInBytes": 536870912000 - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the recovery points for a specified gateway in which all data of the volume is consistent and can be used to create a snapshot.", - "id": "to-list-recovery-points-for-a-gateway-1472143015088", - "title": "To list recovery points for a gateway" - } - ], - "ListVolumes": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "Limit": 2, - "Marker": "1" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "Marker": "1", - "VolumeInfos": [ - { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "GatewayId": "sgw-12A3456B", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB", - "VolumeId": "vol-1122AABB", - "VolumeSizeInBytes": 107374182400, - "VolumeType": "STORED" - }, - { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-13B4567C", - "GatewayId": "sgw-gw-13B4567C", - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-13B4567C/volume/vol-3344CCDD", - "VolumeId": "vol-1122AABB", - "VolumeSizeInBytes": 107374182400, - "VolumeType": "STORED" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the iSCSI stored volumes of a gateway. Results are sorted by volume ARN up to a maximum of 100 volumes.", - "id": "to-list-the-iscsi-stored-volumes-of-a-gateway-1472145723653", - "title": "To list the iSCSI stored volumes of a gateway" - } - ], - "RemoveTagsFromResource": [ - { - "input": { - "ResourceARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-11A2222B", - "TagKeys": [ - "Dev Gatgeway Region", - "East Coast" - ] - }, - "output": { - "ResourceARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-11A2222B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Lists the iSCSI stored volumes of a gateway. Removes one or more tags from the specified resource.", - "id": "to-remove-tags-from-a-resource-1472147210553", - "title": "To remove tags from a resource" - } - ], - "ResetCache": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-13B4567C" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-13B4567C" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Resets all cache disks that have encountered a error and makes the disks available for reconfiguration as cache storage.", - "id": "to-reset-cache-disks-in-error-status-1472148909807", - "title": "To reset cache disks in error status" - } - ], - "RetrieveTapeArchive": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST0AA2AF" - }, - "output": { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST0AA2AF" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Retrieves an archived virtual tape from the virtual tape shelf (VTS) to a gateway-VTL. Virtual tapes archived in the VTS are not associated with any gateway.", - "id": "to-retrieve-an-archived-tape-from-the-vts-1472149812358", - "title": "To retrieve an archived tape from the VTS" - } - ], - "RetrieveTapeRecoveryPoint": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST0AA2AF" - }, - "output": { - "TapeARN": "arn:aws:storagegateway:us-east-1:999999999999:tape/TEST0AA2AF" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Retrieves the recovery point for the specified virtual tape.", - "id": "to-retrieve-the-recovery-point-of-a-virtual-tape-1472150014805", - "title": "To retrieve the recovery point of a virtual tape" - } - ], - "SetLocalConsolePassword": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B", - "LocalConsolePassword": "PassWordMustBeAtLeast6Chars." - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Sets the password for your VM local console.", - "id": "to-set-a-password-for-your-vm-1472150202632", - "title": "To set a password for your VM" - } - ], - "ShutdownGateway": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This operation shuts down the gateway service component running in the storage gateway's virtual machine (VM) and not the VM.", - "id": "to-shut-down-a-gateway-service-1472150508835", - "title": "To shut down a gateway service" - } - ], - "StartGateway": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Starts a gateway service that was previously shut down.", - "id": "to-start-a-gateway-service-1472150722315", - "title": "To start a gateway service" - } - ], - "UpdateBandwidthRateLimit": [ - { - "input": { - "AverageDownloadRateLimitInBitsPerSec": 102400, - "AverageUploadRateLimitInBitsPerSec": 51200, - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates the bandwidth rate limits of a gateway. Both the upload and download bandwidth rate limit can be set, or either one of the two. If a new limit is not set, the existing rate limit remains.", - "id": "to-update-the-bandwidth-rate-limits-of-a-gateway-1472151016202", - "title": "To update the bandwidth rate limits of a gateway" - } - ], - "UpdateChapCredentials": [ - { - "input": { - "InitiatorName": "iqn.1991-05.com.microsoft:computername.domain.example.com", - "SecretToAuthenticateInitiator": "111111111111", - "SecretToAuthenticateTarget": "222222222222", - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - }, - "output": { - "InitiatorName": "iqn.1991-05.com.microsoft:computername.domain.example.com", - "TargetARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/target/iqn.1997-05.com.amazon:myvolume" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates the Challenge-Handshake Authentication Protocol (CHAP) credentials for a specified iSCSI target.", - "id": "to-update-chap-credentials-for-an-iscsi-target-1472151325795", - "title": "To update CHAP credentials for an iSCSI target" - } - ], - "UpdateGatewayInformation": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "GatewayName": "MyGateway2", - "GatewayTimezone": "GMT-12:00" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "GatewayName": "" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates a gateway's metadata, which includes the gateway's name and time zone.", - "id": "to-update-a-gateways-metadata-1472151688693", - "title": "To update a gateway's metadata" - } - ], - "UpdateGatewaySoftwareNow": [ - { - "input": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates the gateway virtual machine (VM) software. The request immediately triggers the software update.", - "id": "to-update-a-gateways-vm-software-1472152020929", - "title": "To update a gateway's VM software" - } - ], - "UpdateMaintenanceStartTime": [ - { - "input": { - "DayOfWeek": 2, - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B", - "HourOfDay": 0, - "MinuteOfHour": 30 - }, - "output": { - "GatewayARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates a gateway's weekly maintenance start time information, including day and time of the week. The maintenance time is in your gateway's time zone.", - "id": "to-update-a-gateways-maintenance-start-time-1472152552031", - "title": "To update a gateway's maintenance start time" - } - ], - "UpdateSnapshotSchedule": [ - { - "input": { - "Description": "Hourly snapshot", - "RecurrenceInHours": 1, - "StartAt": 0, - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "output": { - "VolumeARN": "arn:aws:storagegateway:us-east-1:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABB" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates a snapshot schedule configured for a gateway volume.", - "id": "to-update-a-volume-snapshot-schedule-1472152757068", - "title": "To update a volume snapshot schedule" - } - ], - "UpdateVTLDeviceType": [ - { - "input": { - "DeviceType": "Medium Changer", - "VTLDeviceARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/device/AMZN_SGW-1FAD4876_MEDIACHANGER_00001" - }, - "output": { - "VTLDeviceARN": "arn:aws:storagegateway:us-east-1:999999999999:gateway/sgw-12A3456B/device/AMZN_SGW-1FAD4876_MEDIACHANGER_00001" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "Updates the type of medium changer in a gateway-VTL after a gateway-VTL is activated.", - "id": "to-update-a-vtl-device-type-1472153012967", - "title": "To update a VTL device type" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/paginators-1.json deleted file mode 100644 index e43b890..0000000 --- a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/paginators-1.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "pagination": { - "DescribeTapeArchives": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "TapeArchives" - }, - "DescribeTapeRecoveryPoints": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "TapeRecoveryPointInfos" - }, - "DescribeTapes": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Tapes" - }, - "DescribeVTLDevices": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "VTLDevices" - }, - "ListGateways": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Gateways" - }, - "ListVolumes": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "VolumeInfos" - }, - "ListTapes": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "TapeInfos" - }, - "ListFileShares": { - "input_token": "Marker", - "limit_key": "Limit", - "non_aggregate_keys": [ - "Marker" - ], - "output_token": "NextMarker", - "result_key": "FileShareInfoList" - }, - "ListTagsForResource": { - "input_token": "Marker", - "limit_key": "Limit", - "non_aggregate_keys": [ - "ResourceARN" - ], - "output_token": "Marker", - "result_key": "Tags" - }, - "ListTapePools": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "PoolInfos" - }, - "ListFileSystemAssociations": { - "input_token": "Marker", - "limit_key": "Limit", - "non_aggregate_keys": [ - "Marker" - ], - "output_token": "NextMarker", - "result_key": "FileSystemAssociationSummaryList" - }, - "ListCacheReports": { - "input_token": "Marker", - "output_token": "Marker", - "result_key": "CacheReportList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/service-2.json.gz deleted file mode 100644 index 80100bc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/storagegateway/2013-06-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/sts/2011-06-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7c2302e..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/examples-1.json b/venv/Lib/site-packages/botocore/data/sts/2011-06-15/examples-1.json deleted file mode 100644 index 7396aef..0000000 --- a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/examples-1.json +++ /dev/null @@ -1,271 +0,0 @@ -{ - "version": "1.0", - "examples": { - "AssumeRole": [ - { - "input": { - "ExternalId": "123ABC", - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"Stmt1\",\"Effect\":\"Allow\",\"Action\":\"s3:ListAllMyBuckets\",\"Resource\":\"*\"}]}", - "RoleArn": "arn:aws:iam::123456789012:role/demo", - "RoleSessionName": "testAssumeRoleSession", - "Tags": [ - { - "Key": "Project", - "Value": "Unicorn" - }, - { - "Key": "Team", - "Value": "Automation" - }, - { - "Key": "Cost-Center", - "Value": "12345" - } - ], - "TransitiveTagKeys": [ - "Project", - "Cost-Center" - ] - }, - "output": { - "AssumedRoleUser": { - "Arn": "arn:aws:sts::123456789012:assumed-role/demo/Bob", - "AssumedRoleId": "ARO123EXAMPLE123:Bob" - }, - "Credentials": { - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", - "Expiration": "2011-07-15T23:28:33.359Z", - "SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", - "SessionToken": "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==" - }, - "PackedPolicySize": 8 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "to-assume-a-role-1480532402212", - "title": "To assume a role" - } - ], - "AssumeRoleWithSAML": [ - { - "input": { - "DurationSeconds": 3600, - "PrincipalArn": "arn:aws:iam::123456789012:saml-provider/SAML-test", - "RoleArn": "arn:aws:iam::123456789012:role/TestSaml", - "SAMLAssertion": "VERYLONGENCODEDASSERTIONEXAMPLExzYW1sOkF1ZGllbmNlPmJsYW5rPC9zYW1sOkF1ZGllbmNlPjwvc2FtbDpBdWRpZW5jZVJlc3RyaWN0aW9uPjwvc2FtbDpDb25kaXRpb25zPjxzYW1sOlN1YmplY3Q+PHNhbWw6TmFtZUlEIEZvcm1hdD0idXJuOm9hc2lzOm5hbWVzOnRjOlNBTUw6Mi4wOm5hbWVpZC1mb3JtYXQ6dHJhbnNpZW50Ij5TYW1sRXhhbXBsZTwvc2FtbDpOYW1lSUQ+PHNhbWw6U3ViamVjdENvbmZpcm1hdGlvbiBNZXRob2Q9InVybjpvYXNpczpuYW1lczp0YzpTQU1MOjIuMDpjbTpiZWFyZXIiPjxzYW1sOlN1YmplY3RDb25maXJtYXRpb25EYXRhIE5vdE9uT3JBZnRlcj0iMjAxOS0xMS0wMVQyMDoyNTowNS4xNDVaIiBSZWNpcGllbnQ9Imh0dHBzOi8vc2lnbmluLmF3cy5hbWF6b24uY29tL3NhbWwiLz48L3NhbWw6U3ViamVjdENvbmZpcm1hdGlvbj48L3NhbWw6U3ViamVjdD48c2FtbDpBdXRoblN0YXRlbWVudCBBdXRoPD94bWwgdmpSZXNwb25zZT4=" - }, - "output": { - "AssumedRoleUser": { - "Arn": "arn:aws:sts::123456789012:assumed-role/TestSaml", - "AssumedRoleId": "ARO456EXAMPLE789:TestSaml" - }, - "Audience": "https://signin.aws.amazon.com/saml", - "Credentials": { - "AccessKeyId": "ASIAV3ZUEFP6EXAMPLE", - "Expiration": "2019-11-01T20:26:47Z", - "SecretAccessKey": "8P+SQvWIuLnKhh8d++jpw0nNmQRBZvNEXAMPLEKEY", - "SessionToken": "IQoJb3JpZ2luX2VjEOz////////////////////wEXAMPLEtMSJHMEUCIDoKK3JH9uGQE1z0sINr5M4jk+Na8KHDcCYRVjJCZEvOAiEA3OvJGtw1EcViOleS2vhs8VdCKFJQWPQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==" - }, - "Issuer": "https://integ.example.com/idp/shibboleth", - "NameQualifier": "SbdGOnUkh1i4+EXAMPLExL/jEvs=", - "PackedPolicySize": 6, - "Subject": "SamlExample", - "SubjectType": "transient" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "to-assume-role-with-saml-14882749597814", - "title": "To assume a role using a SAML assertion" - } - ], - "AssumeRoleWithWebIdentity": [ - { - "input": { - "DurationSeconds": 3600, - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"Stmt1\",\"Effect\":\"Allow\",\"Action\":\"s3:ListAllMyBuckets\",\"Resource\":\"*\"}]}", - "ProviderId": "www.amazon.com", - "RoleArn": "arn:aws:iam::123456789012:role/FederatedWebIdentityRole", - "RoleSessionName": "app1", - "WebIdentityToken": "Atza%7CIQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDansFBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFOzTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ" - }, - "output": { - "AssumedRoleUser": { - "Arn": "arn:aws:sts::123456789012:assumed-role/FederatedWebIdentityRole/app1", - "AssumedRoleId": "AROACLKWSDQRAOEXAMPLE:app1" - }, - "Audience": "client.5498841531868486423.1548@apps.example.com", - "Credentials": { - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", - "Expiration": "2014-10-24T23:00:23Z", - "SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", - "SessionToken": "AQoDYXdzEE0a8ANXXXXXXXXNO1ewxE5TijQyp+IEXAMPLE" - }, - "PackedPolicySize": 123, - "Provider": "www.amazon.com", - "SubjectFromWebIdentityToken": "amzn1.account.AF6RHO7KZU5XRVQJGXK6HEXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "to-assume-a-role-as-an-openid-connect-federated-user-1480533445696", - "title": "To assume a role as an OpenID Connect-federated user" - } - ], - "DecodeAuthorizationMessage": [ - { - "input": { - "EncodedMessage": "" - }, - "output": { - "DecodedMessage": "{\"allowed\": \"false\",\"explicitDeny\": \"false\",\"matchedStatements\": \"\",\"failures\": \"\",\"context\": {\"principal\": {\"id\": \"AIDACKCEVSQ6C2EXAMPLE\",\"name\": \"Bob\",\"arn\": \"arn:aws:iam::123456789012:user/Bob\"},\"action\": \"ec2:StopInstances\",\"resource\": \"arn:aws:ec2:us-east-1:123456789012:instance/i-dd01c9bd\",\"conditions\": [{\"item\": {\"key\": \"ec2:Tenancy\",\"values\": [\"default\"]},{\"item\": {\"key\": \"ec2:ResourceTag/elasticbeanstalk:environment-name\",\"values\": [\"Default-Environment\"]}},(Additional items ...)]}}" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "to-decode-information-about-an-authorization-status-of-a-request-1480533854499", - "title": "To decode information about an authorization status of a request" - } - ], - "GetCallerIdentity": [ - { - "input": { - }, - "output": { - "Account": "123456789012", - "Arn": "arn:aws:iam::123456789012:user/Alice", - "UserId": "AKIAI44QH8DHBEXAMPLE" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows a request and response made with the credentials for a user named Alice in the AWS account 123456789012.", - "id": "to-get-details-about-a-calling-iam-user-1480540050376", - "title": "To get details about a calling IAM user" - }, - { - "input": { - }, - "output": { - "Account": "123456789012", - "Arn": "arn:aws:sts::123456789012:assumed-role/my-role-name/my-role-session-name", - "UserId": "AKIAI44QH8DHBEXAMPLE:my-role-session-name" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows a request and response made with temporary credentials created by AssumeRole. The name of the assumed role is my-role-name, and the RoleSessionName is set to my-role-session-name.", - "id": "to-get-details-about-a-calling-user-federated-with-assumerole-1480540158545", - "title": "To get details about a calling user federated with AssumeRole" - }, - { - "input": { - }, - "output": { - "Account": "123456789012", - "Arn": "arn:aws:sts::123456789012:federated-user/my-federated-user-name", - "UserId": "123456789012:my-federated-user-name" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "This example shows a request and response made with temporary credentials created by using GetFederationToken. The Name parameter is set to my-federated-user-name.", - "id": "to-get-details-about-a-calling-user-federated-with-getfederationtoken-1480540231316", - "title": "To get details about a calling user federated with GetFederationToken" - } - ], - "GetFederationToken": [ - { - "input": { - "DurationSeconds": 3600, - "Name": "testFedUserSession", - "Policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"Stmt1\",\"Effect\":\"Allow\",\"Action\":\"s3:ListAllMyBuckets\",\"Resource\":\"*\"}]}", - "Tags": [ - { - "Key": "Project", - "Value": "Pegasus" - }, - { - "Key": "Cost-Center", - "Value": "98765" - } - ] - }, - "output": { - "Credentials": { - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", - "Expiration": "2011-07-15T23:28:33.359Z", - "SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", - "SessionToken": "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==" - }, - "FederatedUser": { - "Arn": "arn:aws:sts::123456789012:federated-user/Bob", - "FederatedUserId": "123456789012:Bob" - }, - "PackedPolicySize": 8 - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "to-get-temporary-credentials-for-a-role-by-using-getfederationtoken-1480540749900", - "title": "To get temporary credentials for a role by using GetFederationToken" - } - ], - "GetSessionToken": [ - { - "input": { - "DurationSeconds": 3600, - "SerialNumber": "YourMFASerialNumber", - "TokenCode": "123456" - }, - "output": { - "Credentials": { - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", - "Expiration": "2011-07-11T19:55:29.611Z", - "SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", - "SessionToken": "AQoEXAMPLEH4aoAH0gNCAPyJxz4BlCFFxWNE1OPTgk5TthT+FvwqnKwRcOIfrRh3c/LTo6UDdyJwOOvEVPvLXCrrrUtdnniCEXAMPLE/IvU1dYUg2RVAJBanLiHb4IgRmpRV3zrkuWJOgQs8IZZaIv2BXIa2R4OlgkBN9bkUDNCJiBeb/AXlzBBko7b15fjrBs2+cTQtpZ3CYWFXG8C5zqx37wnOE49mRl/+OtkIKGO7fAE" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "", - "id": "to-get-temporary-credentials-for-an-iam-user-or-an-aws-account-1480540814038", - "title": "To get temporary credentials for an IAM user or an AWS account" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/sts/2011-06-15/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/sts/2011-06-15/service-2.json.gz deleted file mode 100644 index 1caed60..0000000 Binary files a/venv/Lib/site-packages/botocore/data/sts/2011-06-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 77f6816..0000000 Binary files a/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/paginators-1.json deleted file mode 100644 index cd55b30..0000000 --- a/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListDataIntegrationFlows": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "flows" - }, - "ListDataLakeDatasets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "datasets" - }, - "ListInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "instances" - }, - "ListDataIntegrationEvents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "events" - }, - "ListDataIntegrationFlowExecutions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "flowExecutions" - }, - "ListDataLakeNamespaces": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "namespaces" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/service-2.json.gz deleted file mode 100644 index 5195032..0000000 Binary files a/venv/Lib/site-packages/botocore/data/supplychain/2024-01-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/endpoint-rule-set-1.json.gz deleted file mode 100644 index dde1cf2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/paginators-1.json b/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/service-2.json.gz b/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/service-2.json.gz deleted file mode 100644 index 7eb5b12..0000000 Binary files a/venv/Lib/site-packages/botocore/data/support-app/2021-08-20/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/support/2013-04-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/support/2013-04-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index a074465..0000000 Binary files a/venv/Lib/site-packages/botocore/data/support/2013-04-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/support/2013-04-15/examples-1.json b/venv/Lib/site-packages/botocore/data/support/2013-04-15/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/support/2013-04-15/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/support/2013-04-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/support/2013-04-15/paginators-1.json deleted file mode 100644 index 11bdb62..0000000 --- a/venv/Lib/site-packages/botocore/data/support/2013-04-15/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "DescribeCases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "cases" - }, - "DescribeCommunications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "communications" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/support/2013-04-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/support/2013-04-15/service-2.json.gz deleted file mode 100644 index 1d4f6fd..0000000 Binary files a/venv/Lib/site-packages/botocore/data/support/2013-04-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/swf/2012-01-25/endpoint-rule-set-1.json.gz deleted file mode 100644 index 057110f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/examples-1.json b/venv/Lib/site-packages/botocore/data/swf/2012-01-25/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/paginators-1.json b/venv/Lib/site-packages/botocore/data/swf/2012-01-25/paginators-1.json deleted file mode 100644 index e92bfeb..0000000 --- a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/paginators-1.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "pagination": { - "GetWorkflowExecutionHistory": { - "limit_key": "maximumPageSize", - "input_token": "nextPageToken", - "output_token": "nextPageToken", - "result_key": "events" - }, - "ListActivityTypes": { - "limit_key": "maximumPageSize", - "input_token": "nextPageToken", - "output_token": "nextPageToken", - "result_key": "typeInfos" - }, - "ListClosedWorkflowExecutions": { - "limit_key": "maximumPageSize", - "input_token": "nextPageToken", - "output_token": "nextPageToken", - "result_key": "executionInfos" - }, - "ListDomains": { - "limit_key": "maximumPageSize", - "input_token": "nextPageToken", - "output_token": "nextPageToken", - "result_key": "domainInfos" - }, - "ListOpenWorkflowExecutions": { - "limit_key": "maximumPageSize", - "input_token": "nextPageToken", - "output_token": "nextPageToken", - "result_key": "executionInfos" - }, - "ListWorkflowTypes": { - "limit_key": "maximumPageSize", - "input_token": "nextPageToken", - "output_token": "nextPageToken", - "result_key": "typeInfos" - }, - "PollForDecisionTask": { - "limit_key": "maximumPageSize", - "input_token": "nextPageToken", - "output_token": "nextPageToken", - "result_key": "events", - "non_aggregate_keys": [ - "taskToken", - "startedEventId", - "workflowExecution", - "workflowType", - "previousStartedEventId" - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/service-2.json.gz b/venv/Lib/site-packages/botocore/data/swf/2012-01-25/service-2.json.gz deleted file mode 100644 index 87a8136..0000000 Binary files a/venv/Lib/site-packages/botocore/data/swf/2012-01-25/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/endpoint-rule-set-1.json.gz deleted file mode 100644 index 1398819..0000000 Binary files a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/examples-1.json b/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/paginators-1.json b/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/service-2.json.gz b/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/service-2.json.gz deleted file mode 100644 index 89b07e2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/synthetics/2017-10-11/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/endpoint-rule-set-1.json.gz deleted file mode 100644 index fd4b9ba..0000000 Binary files a/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/paginators-1.json b/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/paginators-1.json deleted file mode 100644 index bd57abc..0000000 --- a/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListTaxRegistrations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accountDetails" - }, - "ListSupplementalTaxRegistrations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "taxRegistrations" - }, - "ListTaxExemptions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "taxExemptionDetailsMap" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/service-2.json.gz b/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/service-2.json.gz deleted file mode 100644 index 96f5d33..0000000 Binary files a/venv/Lib/site-packages/botocore/data/taxsettings/2018-05-10/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/textract/2018-06-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 46b2441..0000000 Binary files a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/examples-1.json b/venv/Lib/site-packages/botocore/data/textract/2018-06-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/textract/2018-06-27/paginators-1.json deleted file mode 100644 index f0d0405..0000000 --- a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/paginators-1.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "pagination": { - "ListAdapterVersions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AdapterVersions" - }, - "ListAdapters": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Adapters" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/textract/2018-06-27/service-2.json.gz deleted file mode 100644 index 598e803..0000000 Binary files a/venv/Lib/site-packages/botocore/data/textract/2018-06-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3c7f2d8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/paginators-1.json deleted file mode 100644 index e0b4f3c..0000000 --- a/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListDbInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDbParameterGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDbClusters": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDbInstancesForCluster": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/service-2.json.gz deleted file mode 100644 index e75302c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/timestream-influxdb/2023-01-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4be91a7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/paginators-1.json deleted file mode 100644 index a20456b..0000000 --- a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "Query": { - "input_token": "NextToken", - "limit_key": "MaxRows", - "non_aggregate_keys": [ - "ColumnInfo", - "QueryId", - "QueryStatus", - "QueryInsightsResponse" - ], - "output_token": "NextToken", - "result_key": "Rows" - }, - "ListScheduledQueries": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ScheduledQueries" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Tags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/service-2.json.gz deleted file mode 100644 index 29b0691..0000000 Binary files a/venv/Lib/site-packages/botocore/data/timestream-query/2018-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 3077b58..0000000 Binary files a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/examples-1.json b/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/service-2.json.gz deleted file mode 100644 index 9244f6b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/timestream-write/2018-11-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9fdcacb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/paginators-1.json b/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/paginators-1.json deleted file mode 100644 index 18ac477..0000000 --- a/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/paginators-1.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pagination": { - "ListSolFunctionInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "functionInstances" - }, - "ListSolFunctionPackages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "functionPackages" - }, - "ListSolNetworkInstances": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "networkInstances" - }, - "ListSolNetworkOperations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "networkOperations" - }, - "ListSolNetworkPackages": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "networkPackages" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/service-2.json.gz b/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/service-2.json.gz deleted file mode 100644 index 7bffb37..0000000 Binary files a/venv/Lib/site-packages/botocore/data/tnb/2008-10-21/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index bd070bf..0000000 Binary files a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/examples-1.json b/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/service-2.json.gz deleted file mode 100644 index c4621a5..0000000 Binary files a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/waiters-2.json deleted file mode 100644 index db0329a..0000000 --- a/venv/Lib/site-packages/botocore/data/transcribe/2017-10-26/waiters-2.json +++ /dev/null @@ -1,138 +0,0 @@ -{ - "version": 2, - "waiters": { - "CallAnalyticsJobCompleted": { - "operation": "GetCallAnalyticsJob", - "delay": 10, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "CallAnalyticsJob.CallAnalyticsJobStatus", - "expected": "COMPLETED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "CallAnalyticsJob.CallAnalyticsJobStatus", - "expected": "FAILED" - } - ] - }, - "LanguageModelCompleted": { - "operation": "DescribeLanguageModel", - "delay": 120, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "LanguageModel.ModelStatus", - "expected": "COMPLETED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "LanguageModel.ModelStatus", - "expected": "FAILED" - } - ] - }, - "MedicalScribeJobCompleted": { - "operation": "GetMedicalScribeJob", - "delay": 10, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "MedicalScribeJob.MedicalScribeJobStatus", - "expected": "COMPLETED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "MedicalScribeJob.MedicalScribeJobStatus", - "expected": "FAILED" - } - ] - }, - "MedicalTranscriptionJobCompleted": { - "operation": "GetMedicalTranscriptionJob", - "delay": 10, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "MedicalTranscriptionJob.TranscriptionJobStatus", - "expected": "COMPLETED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "MedicalTranscriptionJob.TranscriptionJobStatus", - "expected": "FAILED" - } - ] - }, - "MedicalVocabularyReady": { - "operation": "GetMedicalVocabulary", - "delay": 10, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "VocabularyState", - "expected": "READY" - }, - { - "state": "failure", - "matcher": "path", - "argument": "VocabularyState", - "expected": "FAILED" - } - ] - }, - "TranscriptionJobCompleted": { - "operation": "GetTranscriptionJob", - "delay": 10, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "TranscriptionJob.TranscriptionJobStatus", - "expected": "COMPLETED" - }, - { - "state": "failure", - "matcher": "path", - "argument": "TranscriptionJob.TranscriptionJobStatus", - "expected": "FAILED" - } - ] - }, - "VocabularyReady": { - "operation": "GetVocabulary", - "delay": 10, - "maxAttempts": 180, - "acceptors": [ - { - "state": "success", - "matcher": "path", - "argument": "VocabularyState", - "expected": "READY" - }, - { - "state": "failure", - "matcher": "path", - "argument": "VocabularyState", - "expected": "FAILED" - } - ] - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/endpoint-rule-set-1.json.gz deleted file mode 100644 index b92e204..0000000 Binary files a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/examples-1.json b/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/paginators-1.json b/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/paginators-1.json deleted file mode 100644 index a343b61..0000000 --- a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/paginators-1.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "pagination": { - "ListServers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Servers" - }, - "ListAccesses": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ServerId" - ], - "output_token": "NextToken", - "result_key": "Accesses" - }, - "ListExecutions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "WorkflowId" - ], - "output_token": "NextToken", - "result_key": "Executions" - }, - "ListSecurityPolicies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "SecurityPolicyNames" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "Arn" - ], - "output_token": "NextToken", - "result_key": "Tags" - }, - "ListUsers": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "non_aggregate_keys": [ - "ServerId" - ], - "output_token": "NextToken", - "result_key": "Users" - }, - "ListWorkflows": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Workflows" - }, - "ListAgreements": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Agreements" - }, - "ListCertificates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Certificates" - }, - "ListConnectors": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Connectors" - }, - "ListProfiles": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Profiles" - }, - "ListFileTransferResults": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FileTransferResults" - }, - "ListWebApps": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "WebApps" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/service-2.json.gz b/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/service-2.json.gz deleted file mode 100644 index 3ce8b5f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/waiters-2.json b/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/waiters-2.json deleted file mode 100644 index ddcd604..0000000 --- a/venv/Lib/site-packages/botocore/data/transfer/2018-11-05/waiters-2.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "version" : 2, - "waiters" : { - "ServerOffline" : { - "delay" : 30, - "maxAttempts" : 120, - "operation" : "DescribeServer", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Server.State", - "state" : "success", - "expected" : "OFFLINE" - }, { - "matcher" : "path", - "argument" : "Server.State", - "state" : "failure", - "expected" : "STOP_FAILED" - } ] - }, - "ServerOnline" : { - "delay" : 30, - "maxAttempts" : 120, - "operation" : "DescribeServer", - "acceptors" : [ { - "matcher" : "path", - "argument" : "Server.State", - "state" : "success", - "expected" : "ONLINE" - }, { - "matcher" : "path", - "argument" : "Server.State", - "state" : "failure", - "expected" : "START_FAILED" - } ] - } - } -} \ No newline at end of file diff --git a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/translate/2017-07-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 70bcbce..0000000 Binary files a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/examples-1.json b/venv/Lib/site-packages/botocore/data/translate/2017-07-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/translate/2017-07-01/paginators-1.json deleted file mode 100644 index 6898cd4..0000000 --- a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/paginators-1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "pagination": { - "ListTerminologies": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "TerminologyPropertiesList" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/translate/2017-07-01/service-2.json.gz deleted file mode 100644 index a7054f7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/translate/2017-07-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/endpoint-rule-set-1.json.gz deleted file mode 100644 index e5f7c9d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/paginators-1.json b/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/paginators-1.json deleted file mode 100644 index 0ac4c7b..0000000 --- a/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListChecks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "checkSummaries" - }, - "ListOrganizationRecommendationAccounts": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "accountRecommendationLifecycleSummaries" - }, - "ListOrganizationRecommendationResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "organizationRecommendationResourceSummaries" - }, - "ListOrganizationRecommendations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "organizationRecommendationSummaries" - }, - "ListRecommendationResources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendationResourceSummaries" - }, - "ListRecommendations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "recommendationSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/service-2.json.gz b/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/service-2.json.gz deleted file mode 100644 index 2cde047..0000000 Binary files a/venv/Lib/site-packages/botocore/data/trustedadvisor/2022-09-15/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index f1a6f88..0000000 Binary files a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/paginators-1.json deleted file mode 100644 index 4314d71..0000000 --- a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/paginators-1.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "pagination": { - "ListIdentitySources": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "identitySources" - }, - "ListPolicies": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policies" - }, - "ListPolicyStores": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policyStores" - }, - "ListPolicyTemplates": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "policyTemplates" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/service-2.json.gz deleted file mode 100644 index aec456f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/verifiedpermissions/2021-12-01/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/endpoint-rule-set-1.json.gz deleted file mode 100644 index cc231cc..0000000 Binary files a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/examples-1.json b/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/paginators-1.json b/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/paginators-1.json deleted file mode 100644 index 49dd7cc..0000000 --- a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/paginators-1.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "pagination": { - "ListDomains": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "DomainSummaries" - }, - "ListFraudsterRegistrationJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "JobSummaries" - }, - "ListSpeakerEnrollmentJobs": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "JobSummaries" - }, - "ListSpeakers": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "SpeakerSummaries" - }, - "ListFraudsters": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "FraudsterSummaries" - }, - "ListWatchlists": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "WatchlistSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/service-2.json.gz b/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/service-2.json.gz deleted file mode 100644 index 1e3590b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/voice-id/2021-09-27/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/endpoint-rule-set-1.json.gz deleted file mode 100644 index ab7b5a8..0000000 Binary files a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/paginators-1.json b/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/paginators-1.json deleted file mode 100644 index 7bb8082..0000000 --- a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/paginators-1.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "pagination": { - "ListAccessLogSubscriptions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListListeners": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListRules": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListServiceNetworkServiceAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListServiceNetworkVpcAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListServiceNetworks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListServices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListTargetGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListTargets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListResourceConfigurations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListResourceEndpointAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListResourceGateways": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListServiceNetworkResourceAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListServiceNetworkVpcEndpointAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - }, - "ListDomainVerifications": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/service-2.json.gz b/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/service-2.json.gz deleted file mode 100644 index 124c89f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/waiters-2.json b/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/vpc-lattice/2022-11-30/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/endpoint-rule-set-1.json.gz deleted file mode 100644 index 18dc36c..0000000 Binary files a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/examples-1.json b/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/examples-1.json deleted file mode 100644 index eee5b6f..0000000 --- a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/examples-1.json +++ /dev/null @@ -1,1017 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateIPSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MyIPSetFriendlyName" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "IPSet": { - "IPSetDescriptors": [ - { - "Type": "IPV4", - "Value": "192.0.2.44/32" - } - ], - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Name": "MyIPSetFriendlyName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an IP match set named MyIPSetFriendlyName.", - "id": "createipset-1472501003122", - "title": "To create an IP set" - } - ], - "CreateRule": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "MetricName": "WAFByteHeaderRule", - "Name": "WAFByteHeaderRule" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Rule": { - "MetricName": "WAFByteHeaderRule", - "Name": "WAFByteHeaderRule", - "Predicates": [ - { - "DataId": "MyByteMatchSetID", - "Negated": false, - "Type": "ByteMatch" - } - ], - "RuleId": "WAFRule-1-Example" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a rule named WAFByteHeaderRule.", - "id": "createrule-1474072675555", - "title": "To create a rule" - } - ], - "CreateSizeConstraintSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MySampleSizeConstraintSet" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SizeConstraintSet": { - "Name": "MySampleSizeConstraintSet", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SizeConstraints": [ - { - "ComparisonOperator": "GT", - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "Size": 0, - "TextTransformation": "NONE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates size constraint set named MySampleSizeConstraintSet.", - "id": "createsizeconstraint-1474299140754", - "title": "To create a size constraint" - } - ], - "CreateSqlInjectionMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MySQLInjectionMatchSet" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SqlInjectionMatchSet": { - "Name": "MySQLInjectionMatchSet", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SqlInjectionMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a SQL injection match set named MySQLInjectionMatchSet.", - "id": "createsqlinjectionmatchset-1474492796105", - "title": "To create a SQL injection match set" - } - ], - "CreateWebACL": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "DefaultAction": { - "Type": "ALLOW" - }, - "MetricName": "CreateExample", - "Name": "CreateExample" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "WebACL": { - "DefaultAction": { - "Type": "ALLOW" - }, - "MetricName": "CreateExample", - "Name": "CreateExample", - "Rules": [ - { - "Action": { - "Type": "ALLOW" - }, - "Priority": 1, - "RuleId": "WAFRule-1-Example" - } - ], - "WebACLId": "example-46da-4444-5555-example" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a web ACL named CreateExample.", - "id": "createwebacl-1472061481310", - "title": "To create a web ACL" - } - ], - "CreateXssMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MySampleXssMatchSet" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "XssMatchSet": { - "Name": "MySampleXssMatchSet", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "XssMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an XSS match set named MySampleXssMatchSet.", - "id": "createxssmatchset-1474560868500", - "title": "To create an XSS match set" - } - ], - "DeleteByteMatchSet": [ - { - "input": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5", - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a byte match set with the ID exampleIDs3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletebytematchset-1473367566229", - "title": "To delete a byte match set" - } - ], - "DeleteIPSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an IP match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deleteipset-1472767434306", - "title": "To delete an IP set" - } - ], - "DeleteRule": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "RuleId": "WAFRule-1-Example" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a rule with the ID WAFRule-1-Example.", - "id": "deleterule-1474073108749", - "title": "To delete a rule" - } - ], - "DeleteSizeConstraintSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a size constraint set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletesizeconstraintset-1474299857905", - "title": "To delete a size constraint set" - } - ], - "DeleteSqlInjectionMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a SQL injection match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletesqlinjectionmatchset-1474493373197", - "title": "To delete a SQL injection match set" - } - ], - "DeleteWebACL": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "WebACLId": "example-46da-4444-5555-example" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a web ACL with the ID example-46da-4444-5555-example.", - "id": "deletewebacl-1472767755931", - "title": "To delete a web ACL" - } - ], - "DeleteXssMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an XSS match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletexssmatchset-1474561302618", - "title": "To delete an XSS match set" - } - ], - "GetByteMatchSet": [ - { - "input": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ByteMatchSet": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5", - "ByteMatchTuples": [ - { - "FieldToMatch": { - "Data": "referer", - "Type": "HEADER" - }, - "PositionalConstraint": "CONTAINS", - "TargetString": "badrefer1", - "TextTransformation": "NONE" - } - ], - "Name": "ByteMatchNameExample" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a byte match set with the ID exampleIDs3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getbytematchset-1473273311532", - "title": "To get a byte match set" - } - ], - "GetChangeToken": [ - { - "input": { - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a change token to use for a create, update or delete operation.", - "id": "get-change-token-example-1471635120794", - "title": "To get a change token" - } - ], - "GetChangeTokenStatus": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "output": { - "ChangeTokenStatus": "PENDING" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the status of a change token with the ID abcd12f2-46da-4fdb-b8d5-fbd4c466928f.", - "id": "getchangetokenstatus-1474658417107", - "title": "To get the change token status" - } - ], - "GetIPSet": [ - { - "input": { - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "IPSet": { - "IPSetDescriptors": [ - { - "Type": "IPV4", - "Value": "192.0.2.44/32" - } - ], - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Name": "MyIPSetFriendlyName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of an IP match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getipset-1474658688675", - "title": "To get an IP set" - } - ], - "GetRule": [ - { - "input": { - "RuleId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "Rule": { - "MetricName": "WAFByteHeaderRule", - "Name": "WAFByteHeaderRule", - "Predicates": [ - { - "DataId": "MyByteMatchSetID", - "Negated": false, - "Type": "ByteMatch" - } - ], - "RuleId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a rule with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getrule-1474659238790", - "title": "To get a rule" - } - ], - "GetSampledRequests": [ - { - "input": { - "MaxItems": 100, - "RuleId": "WAFRule-1-Example", - "TimeWindow": { - "EndTime": "2016-09-27T15:50Z", - "StartTime": "2016-09-27T15:50Z" - }, - "WebAclId": "createwebacl-1472061481310" - }, - "output": { - "PopulationSize": 50, - "SampledRequests": [ - { - "Action": "BLOCK", - "Request": { - "ClientIP": "192.0.2.44", - "Country": "US", - "HTTPVersion": "HTTP/1.1", - "Headers": [ - { - "Name": "User-Agent", - "Value": "BadBot " - } - ], - "Method": "HEAD" - }, - "Timestamp": "2016-09-27T14:55Z", - "Weight": 1 - } - ], - "TimeWindow": { - "EndTime": "2016-09-27T15:50Z", - "StartTime": "2016-09-27T14:50Z" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns detailed information about 100 requests --a sample-- that AWS WAF randomly selects from among the first 5,000 requests that your AWS resource received between the time period 2016-09-27T15:50Z to 2016-09-27T15:50Z.", - "id": "getsampledrequests-1474927997195", - "title": "To get a sampled requests" - } - ], - "GetSizeConstraintSet": [ - { - "input": { - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "SizeConstraintSet": { - "Name": "MySampleSizeConstraintSet", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SizeConstraints": [ - { - "ComparisonOperator": "GT", - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "Size": 0, - "TextTransformation": "NONE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a size constraint match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getsizeconstraintset-1475005422493", - "title": "To get a size constraint set" - } - ], - "GetSqlInjectionMatchSet": [ - { - "input": { - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "SqlInjectionMatchSet": { - "Name": "MySQLInjectionMatchSet", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SqlInjectionMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a SQL injection match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getsqlinjectionmatchset-1475005940137", - "title": "To get a SQL injection match set" - } - ], - "GetWebACL": [ - { - "input": { - "WebACLId": "createwebacl-1472061481310" - }, - "output": { - "WebACL": { - "DefaultAction": { - "Type": "ALLOW" - }, - "MetricName": "CreateExample", - "Name": "CreateExample", - "Rules": [ - { - "Action": { - "Type": "ALLOW" - }, - "Priority": 1, - "RuleId": "WAFRule-1-Example" - } - ], - "WebACLId": "createwebacl-1472061481310" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a web ACL with the ID createwebacl-1472061481310.", - "id": "getwebacl-1475006348525", - "title": "To get a web ACL" - } - ], - "GetXssMatchSet": [ - { - "input": { - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "XssMatchSet": { - "Name": "MySampleXssMatchSet", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "XssMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of an XSS match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getxssmatchset-1475187879017", - "title": "To get an XSS match set" - } - ], - "ListIPSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "IPSets": [ - { - "IPSetId": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MyIPSetFriendlyName" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 IP match sets.", - "id": "listipsets-1472235676229", - "title": "To list IP sets" - } - ], - "ListRules": [ - { - "input": { - "Limit": 100 - }, - "output": { - "Rules": [ - { - "Name": "WAFByteHeaderRule", - "RuleId": "WAFRule-1-Example" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 rules.", - "id": "listrules-1475258406433", - "title": "To list rules" - } - ], - "ListSizeConstraintSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "SizeConstraintSets": [ - { - "Name": "MySampleSizeConstraintSet", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 size contraint match sets.", - "id": "listsizeconstraintsets-1474300067597", - "title": "To list a size constraint sets" - } - ], - "ListSqlInjectionMatchSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "SqlInjectionMatchSets": [ - { - "Name": "MySQLInjectionMatchSet", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 SQL injection match sets.", - "id": "listsqlinjectionmatchset-1474493560103", - "title": "To list SQL injection match sets" - } - ], - "ListWebACLs": [ - { - "input": { - "Limit": 100 - }, - "output": { - "WebACLs": [ - { - "Name": "WebACLexample", - "WebACLId": "webacl-1472061481310" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 web ACLs.", - "id": "listwebacls-1475258732691", - "title": "To list Web ACLs" - } - ], - "ListXssMatchSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "XssMatchSets": [ - { - "Name": "MySampleXssMatchSet", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 XSS match sets.", - "id": "listxssmatchsets-1474561481168", - "title": "To list XSS match sets" - } - ], - "UpdateByteMatchSet": [ - { - "input": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5", - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Updates": [ - { - "Action": "DELETE", - "ByteMatchTuple": { - "FieldToMatch": { - "Data": "referer", - "Type": "HEADER" - }, - "PositionalConstraint": "CONTAINS", - "TargetString": "badrefer1", - "TextTransformation": "NONE" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a ByteMatchTuple object (filters) in an byte match set with the ID exampleIDs3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatebytematchset-1475259074558", - "title": "To update a byte match set" - } - ], - "UpdateIPSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "IPSetDescriptor": { - "Type": "IPV4", - "Value": "192.0.2.44/32" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an IPSetDescriptor object in an IP match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updateipset-1475259733625", - "title": "To update an IP set" - } - ], - "UpdateRule": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "RuleId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "Predicate": { - "DataId": "MyByteMatchSetID", - "Negated": false, - "Type": "ByteMatch" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a Predicate object in a rule with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updaterule-1475260064720", - "title": "To update a rule" - } - ], - "UpdateSizeConstraintSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "SizeConstraint": { - "ComparisonOperator": "GT", - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "Size": 0, - "TextTransformation": "NONE" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a SizeConstraint object (filters) in a size constraint set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatesizeconstraintset-1475531697891", - "title": "To update a size constraint set" - } - ], - "UpdateSqlInjectionMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "SqlInjectionMatchTuple": { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a SqlInjectionMatchTuple object (filters) in a SQL injection match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatesqlinjectionmatchset-1475532094686", - "title": "To update a SQL injection match set" - } - ], - "UpdateWebACL": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "DefaultAction": { - "Type": "ALLOW" - }, - "Updates": [ - { - "Action": "DELETE", - "ActivatedRule": { - "Action": { - "Type": "ALLOW" - }, - "Priority": 1, - "RuleId": "WAFRule-1-Example" - } - } - ], - "WebACLId": "webacl-1472061481310" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an ActivatedRule object in a WebACL with the ID webacl-1472061481310.", - "id": "updatewebacl-1475533627385", - "title": "To update a Web ACL" - } - ], - "UpdateXssMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Updates": [ - { - "Action": "DELETE", - "XssMatchTuple": { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - } - ], - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an XssMatchTuple object (filters) in an XssMatchSet with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatexssmatchset-1475534098881", - "title": "To update an XSS match set" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/paginators-1.json b/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/service-2.json.gz b/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/service-2.json.gz deleted file mode 100644 index 17852ca..0000000 Binary files a/venv/Lib/site-packages/botocore/data/waf-regional/2016-11-28/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/waf/2015-08-24/endpoint-rule-set-1.json.gz deleted file mode 100644 index e87e34f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/examples-1.json b/venv/Lib/site-packages/botocore/data/waf/2015-08-24/examples-1.json deleted file mode 100644 index eee5b6f..0000000 --- a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/examples-1.json +++ /dev/null @@ -1,1017 +0,0 @@ -{ - "version": "1.0", - "examples": { - "CreateIPSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MyIPSetFriendlyName" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "IPSet": { - "IPSetDescriptors": [ - { - "Type": "IPV4", - "Value": "192.0.2.44/32" - } - ], - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Name": "MyIPSetFriendlyName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an IP match set named MyIPSetFriendlyName.", - "id": "createipset-1472501003122", - "title": "To create an IP set" - } - ], - "CreateRule": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "MetricName": "WAFByteHeaderRule", - "Name": "WAFByteHeaderRule" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Rule": { - "MetricName": "WAFByteHeaderRule", - "Name": "WAFByteHeaderRule", - "Predicates": [ - { - "DataId": "MyByteMatchSetID", - "Negated": false, - "Type": "ByteMatch" - } - ], - "RuleId": "WAFRule-1-Example" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a rule named WAFByteHeaderRule.", - "id": "createrule-1474072675555", - "title": "To create a rule" - } - ], - "CreateSizeConstraintSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MySampleSizeConstraintSet" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SizeConstraintSet": { - "Name": "MySampleSizeConstraintSet", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SizeConstraints": [ - { - "ComparisonOperator": "GT", - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "Size": 0, - "TextTransformation": "NONE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates size constraint set named MySampleSizeConstraintSet.", - "id": "createsizeconstraint-1474299140754", - "title": "To create a size constraint" - } - ], - "CreateSqlInjectionMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MySQLInjectionMatchSet" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SqlInjectionMatchSet": { - "Name": "MySQLInjectionMatchSet", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SqlInjectionMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a SQL injection match set named MySQLInjectionMatchSet.", - "id": "createsqlinjectionmatchset-1474492796105", - "title": "To create a SQL injection match set" - } - ], - "CreateWebACL": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "DefaultAction": { - "Type": "ALLOW" - }, - "MetricName": "CreateExample", - "Name": "CreateExample" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "WebACL": { - "DefaultAction": { - "Type": "ALLOW" - }, - "MetricName": "CreateExample", - "Name": "CreateExample", - "Rules": [ - { - "Action": { - "Type": "ALLOW" - }, - "Priority": 1, - "RuleId": "WAFRule-1-Example" - } - ], - "WebACLId": "example-46da-4444-5555-example" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates a web ACL named CreateExample.", - "id": "createwebacl-1472061481310", - "title": "To create a web ACL" - } - ], - "CreateXssMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MySampleXssMatchSet" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "XssMatchSet": { - "Name": "MySampleXssMatchSet", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "XssMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example creates an XSS match set named MySampleXssMatchSet.", - "id": "createxssmatchset-1474560868500", - "title": "To create an XSS match set" - } - ], - "DeleteByteMatchSet": [ - { - "input": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5", - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a byte match set with the ID exampleIDs3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletebytematchset-1473367566229", - "title": "To delete a byte match set" - } - ], - "DeleteIPSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an IP match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deleteipset-1472767434306", - "title": "To delete an IP set" - } - ], - "DeleteRule": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "RuleId": "WAFRule-1-Example" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a rule with the ID WAFRule-1-Example.", - "id": "deleterule-1474073108749", - "title": "To delete a rule" - } - ], - "DeleteSizeConstraintSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a size constraint set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletesizeconstraintset-1474299857905", - "title": "To delete a size constraint set" - } - ], - "DeleteSqlInjectionMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a SQL injection match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletesqlinjectionmatchset-1474493373197", - "title": "To delete a SQL injection match set" - } - ], - "DeleteWebACL": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "WebACLId": "example-46da-4444-5555-example" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a web ACL with the ID example-46da-4444-5555-example.", - "id": "deletewebacl-1472767755931", - "title": "To delete a web ACL" - } - ], - "DeleteXssMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an XSS match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "deletexssmatchset-1474561302618", - "title": "To delete an XSS match set" - } - ], - "GetByteMatchSet": [ - { - "input": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ByteMatchSet": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5", - "ByteMatchTuples": [ - { - "FieldToMatch": { - "Data": "referer", - "Type": "HEADER" - }, - "PositionalConstraint": "CONTAINS", - "TargetString": "badrefer1", - "TextTransformation": "NONE" - } - ], - "Name": "ByteMatchNameExample" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a byte match set with the ID exampleIDs3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getbytematchset-1473273311532", - "title": "To get a byte match set" - } - ], - "GetChangeToken": [ - { - "input": { - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns a change token to use for a create, update or delete operation.", - "id": "get-change-token-example-1471635120794", - "title": "To get a change token" - } - ], - "GetChangeTokenStatus": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "output": { - "ChangeTokenStatus": "PENDING" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the status of a change token with the ID abcd12f2-46da-4fdb-b8d5-fbd4c466928f.", - "id": "getchangetokenstatus-1474658417107", - "title": "To get the change token status" - } - ], - "GetIPSet": [ - { - "input": { - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "IPSet": { - "IPSetDescriptors": [ - { - "Type": "IPV4", - "Value": "192.0.2.44/32" - } - ], - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Name": "MyIPSetFriendlyName" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of an IP match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getipset-1474658688675", - "title": "To get an IP set" - } - ], - "GetRule": [ - { - "input": { - "RuleId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "Rule": { - "MetricName": "WAFByteHeaderRule", - "Name": "WAFByteHeaderRule", - "Predicates": [ - { - "DataId": "MyByteMatchSetID", - "Negated": false, - "Type": "ByteMatch" - } - ], - "RuleId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a rule with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getrule-1474659238790", - "title": "To get a rule" - } - ], - "GetSampledRequests": [ - { - "input": { - "MaxItems": 100, - "RuleId": "WAFRule-1-Example", - "TimeWindow": { - "EndTime": "2016-09-27T15:50Z", - "StartTime": "2016-09-27T15:50Z" - }, - "WebAclId": "createwebacl-1472061481310" - }, - "output": { - "PopulationSize": 50, - "SampledRequests": [ - { - "Action": "BLOCK", - "Request": { - "ClientIP": "192.0.2.44", - "Country": "US", - "HTTPVersion": "HTTP/1.1", - "Headers": [ - { - "Name": "User-Agent", - "Value": "BadBot " - } - ], - "Method": "HEAD" - }, - "Timestamp": "2016-09-27T14:55Z", - "Weight": 1 - } - ], - "TimeWindow": { - "EndTime": "2016-09-27T15:50Z", - "StartTime": "2016-09-27T14:50Z" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns detailed information about 100 requests --a sample-- that AWS WAF randomly selects from among the first 5,000 requests that your AWS resource received between the time period 2016-09-27T15:50Z to 2016-09-27T15:50Z.", - "id": "getsampledrequests-1474927997195", - "title": "To get a sampled requests" - } - ], - "GetSizeConstraintSet": [ - { - "input": { - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "SizeConstraintSet": { - "Name": "MySampleSizeConstraintSet", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SizeConstraints": [ - { - "ComparisonOperator": "GT", - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "Size": 0, - "TextTransformation": "NONE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a size constraint match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getsizeconstraintset-1475005422493", - "title": "To get a size constraint set" - } - ], - "GetSqlInjectionMatchSet": [ - { - "input": { - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "SqlInjectionMatchSet": { - "Name": "MySQLInjectionMatchSet", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "SqlInjectionMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a SQL injection match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getsqlinjectionmatchset-1475005940137", - "title": "To get a SQL injection match set" - } - ], - "GetWebACL": [ - { - "input": { - "WebACLId": "createwebacl-1472061481310" - }, - "output": { - "WebACL": { - "DefaultAction": { - "Type": "ALLOW" - }, - "MetricName": "CreateExample", - "Name": "CreateExample", - "Rules": [ - { - "Action": { - "Type": "ALLOW" - }, - "Priority": 1, - "RuleId": "WAFRule-1-Example" - } - ], - "WebACLId": "createwebacl-1472061481310" - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of a web ACL with the ID createwebacl-1472061481310.", - "id": "getwebacl-1475006348525", - "title": "To get a web ACL" - } - ], - "GetXssMatchSet": [ - { - "input": { - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "XssMatchSet": { - "Name": "MySampleXssMatchSet", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "XssMatchTuples": [ - { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - ] - } - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns the details of an XSS match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "getxssmatchset-1475187879017", - "title": "To get an XSS match set" - } - ], - "ListIPSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "IPSets": [ - { - "IPSetId": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Name": "MyIPSetFriendlyName" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 IP match sets.", - "id": "listipsets-1472235676229", - "title": "To list IP sets" - } - ], - "ListRules": [ - { - "input": { - "Limit": 100 - }, - "output": { - "Rules": [ - { - "Name": "WAFByteHeaderRule", - "RuleId": "WAFRule-1-Example" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 rules.", - "id": "listrules-1475258406433", - "title": "To list rules" - } - ], - "ListSizeConstraintSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "SizeConstraintSets": [ - { - "Name": "MySampleSizeConstraintSet", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 size contraint match sets.", - "id": "listsizeconstraintsets-1474300067597", - "title": "To list a size constraint sets" - } - ], - "ListSqlInjectionMatchSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "SqlInjectionMatchSets": [ - { - "Name": "MySQLInjectionMatchSet", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 SQL injection match sets.", - "id": "listsqlinjectionmatchset-1474493560103", - "title": "To list SQL injection match sets" - } - ], - "ListWebACLs": [ - { - "input": { - "Limit": 100 - }, - "output": { - "WebACLs": [ - { - "Name": "WebACLexample", - "WebACLId": "webacl-1472061481310" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 web ACLs.", - "id": "listwebacls-1475258732691", - "title": "To list Web ACLs" - } - ], - "ListXssMatchSets": [ - { - "input": { - "Limit": 100 - }, - "output": { - "XssMatchSets": [ - { - "Name": "MySampleXssMatchSet", - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - } - ] - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example returns an array of up to 100 XSS match sets.", - "id": "listxssmatchsets-1474561481168", - "title": "To list XSS match sets" - } - ], - "UpdateByteMatchSet": [ - { - "input": { - "ByteMatchSetId": "exampleIDs3t-46da-4fdb-b8d5-abc321j569j5", - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Updates": [ - { - "Action": "DELETE", - "ByteMatchTuple": { - "FieldToMatch": { - "Data": "referer", - "Type": "HEADER" - }, - "PositionalConstraint": "CONTAINS", - "TargetString": "badrefer1", - "TextTransformation": "NONE" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a ByteMatchTuple object (filters) in an byte match set with the ID exampleIDs3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatebytematchset-1475259074558", - "title": "To update a byte match set" - } - ], - "UpdateIPSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "IPSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "IPSetDescriptor": { - "Type": "IPV4", - "Value": "192.0.2.44/32" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an IPSetDescriptor object in an IP match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updateipset-1475259733625", - "title": "To update an IP set" - } - ], - "UpdateRule": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "RuleId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "Predicate": { - "DataId": "MyByteMatchSetID", - "Negated": false, - "Type": "ByteMatch" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a Predicate object in a rule with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updaterule-1475260064720", - "title": "To update a rule" - } - ], - "UpdateSizeConstraintSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SizeConstraintSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "SizeConstraint": { - "ComparisonOperator": "GT", - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "Size": 0, - "TextTransformation": "NONE" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a SizeConstraint object (filters) in a size constraint set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatesizeconstraintset-1475531697891", - "title": "To update a size constraint set" - } - ], - "UpdateSqlInjectionMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "SqlInjectionMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5", - "Updates": [ - { - "Action": "DELETE", - "SqlInjectionMatchTuple": { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - } - ] - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes a SqlInjectionMatchTuple object (filters) in a SQL injection match set with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatesqlinjectionmatchset-1475532094686", - "title": "To update a SQL injection match set" - } - ], - "UpdateWebACL": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "DefaultAction": { - "Type": "ALLOW" - }, - "Updates": [ - { - "Action": "DELETE", - "ActivatedRule": { - "Action": { - "Type": "ALLOW" - }, - "Priority": 1, - "RuleId": "WAFRule-1-Example" - } - } - ], - "WebACLId": "webacl-1472061481310" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an ActivatedRule object in a WebACL with the ID webacl-1472061481310.", - "id": "updatewebacl-1475533627385", - "title": "To update a Web ACL" - } - ], - "UpdateXssMatchSet": [ - { - "input": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f", - "Updates": [ - { - "Action": "DELETE", - "XssMatchTuple": { - "FieldToMatch": { - "Type": "QUERY_STRING" - }, - "TextTransformation": "URL_DECODE" - } - } - ], - "XssMatchSetId": "example1ds3t-46da-4fdb-b8d5-abc321j569j5" - }, - "output": { - "ChangeToken": "abcd12f2-46da-4fdb-b8d5-fbd4c466928f" - }, - "comments": { - "input": { - }, - "output": { - } - }, - "description": "The following example deletes an XssMatchTuple object (filters) in an XssMatchSet with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.", - "id": "updatexssmatchset-1475534098881", - "title": "To update an XSS match set" - } - ] - } -} diff --git a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/paginators-1.json b/venv/Lib/site-packages/botocore/data/waf/2015-08-24/paginators-1.json deleted file mode 100644 index 9f2eba8..0000000 --- a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/paginators-1.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "pagination": { - "ListByteMatchSets": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "limit_key": "Limit", - "result_key": "ByteMatchSets" - }, - "ListIPSets": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "limit_key": "Limit", - "result_key": "IPSets" - }, - "ListRules": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "limit_key": "Limit", - "result_key": "Rules" - }, - "ListSizeConstraintSets": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "limit_key": "Limit", - "result_key": "SizeConstraintSets" - }, - "ListSqlInjectionMatchSets": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "limit_key": "Limit", - "result_key": "SqlInjectionMatchSets" - }, - "ListWebACLs": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "limit_key": "Limit", - "result_key": "WebACLs" - }, - "ListXssMatchSets": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "limit_key": "Limit", - "result_key": "XssMatchSets" - }, - "GetRateBasedRuleManagedKeys": { - "input_token": "NextMarker", - "output_token": "NextMarker", - "result_key": "ManagedKeys" - }, - "ListActivatedRulesInRuleGroup": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "ActivatedRules" - }, - "ListGeoMatchSets": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "GeoMatchSets" - }, - "ListLoggingConfigurations": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "LoggingConfigurations" - }, - "ListRateBasedRules": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "Rules" - }, - "ListRegexMatchSets": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "RegexMatchSets" - }, - "ListRegexPatternSets": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "RegexPatternSets" - }, - "ListRuleGroups": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "RuleGroups" - }, - "ListSubscribedRuleGroups": { - "input_token": "NextMarker", - "limit_key": "Limit", - "output_token": "NextMarker", - "result_key": "RuleGroups" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/service-2.json.gz b/venv/Lib/site-packages/botocore/data/waf/2015-08-24/service-2.json.gz deleted file mode 100644 index a7994f1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/waf/2015-08-24/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9630892..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/examples-1.json b/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/paginators-1.json b/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/service-2.json.gz b/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/service-2.json.gz deleted file mode 100644 index b70e8eb..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wafv2/2019-07-29/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/endpoint-rule-set-1.json.gz deleted file mode 100644 index 7a9fcb2..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/examples-1.json b/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/paginators-1.json b/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/service-2.json.gz b/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/service-2.json.gz deleted file mode 100644 index 80a2672..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wellarchitected/2020-03-31/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 0676094..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/paginators-1.json deleted file mode 100644 index f51f71e..0000000 --- a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/paginators-1.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "pagination": { - "ListBlockedGuestUsers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "blocklist" - }, - "ListBots": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "bots" - }, - "ListDevicesForUser": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "devices" - }, - "ListGuestUsers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "guestlist" - }, - "ListNetworks": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "networks" - }, - "ListSecurityGroupUsers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "users" - }, - "ListSecurityGroups": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "securityGroups" - }, - "ListUsers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "users" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/service-2.json.gz deleted file mode 100644 index eee17b4..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/waiters-2.json b/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/wickr/2024-02-01/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/endpoint-rule-set-1.json.gz deleted file mode 100644 index f15abe9..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/examples-1.json b/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/paginators-1.json b/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/paginators-1.json deleted file mode 100644 index 60b1dca..0000000 --- a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListAssistantAssociations": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assistantAssociationSummaries" - }, - "ListAssistants": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "assistantSummaries" - }, - "ListContents": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "contentSummaries" - }, - "ListKnowledgeBases": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "knowledgeBaseSummaries" - }, - "QueryAssistant": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "results" - }, - "SearchContent": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "contentSummaries" - }, - "SearchSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessionSummaries" - }, - "ListImportJobs": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "importJobSummaries" - }, - "ListQuickResponses": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "quickResponseSummaries" - }, - "SearchQuickResponses": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "results" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/service-2.json.gz b/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/service-2.json.gz deleted file mode 100644 index c87d495..0000000 Binary files a/venv/Lib/site-packages/botocore/data/wisdom/2020-10-19/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index acbad5f..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/examples-1.json b/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/paginators-1.json deleted file mode 100644 index ff2f410..0000000 --- a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/paginators-1.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "pagination": { - "DescribeDocumentVersions": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "DocumentVersions" - }, - "DescribeFolderContents": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": [ - "Folders", - "Documents" - ] - }, - "DescribeUsers": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Users" - }, - "DescribeActivities": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "UserActivities" - }, - "DescribeComments": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Comments" - }, - "DescribeGroups": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Groups" - }, - "DescribeNotificationSubscriptions": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Subscriptions" - }, - "DescribeResourcePermissions": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Principals" - }, - "DescribeRootFolders": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Folders" - }, - "SearchResources": { - "input_token": "Marker", - "limit_key": "Limit", - "output_token": "Marker", - "result_key": "Items" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/service-2.json.gz deleted file mode 100644 index 717e8a7..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workdocs/2016-05-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 4b700df..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/examples-1.json b/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/paginators-1.json deleted file mode 100644 index a4d5bd2..0000000 --- a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/paginators-1.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "pagination": { - "ListUsers": { - "result_key": "Users", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListGroupMembers": { - "result_key": "Members", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListOrganizations": { - "result_key": "OrganizationSummaries", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListGroups": { - "result_key": "Groups", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListResources": { - "result_key": "Resources", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListAliases": { - "result_key": "Aliases", - "output_token": "NextToken", - "input_token": "NextToken", - "limit_key": "MaxResults" - }, - "ListMailboxPermissions": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Permissions" - }, - "ListResourceDelegates": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Delegates" - }, - "ListAvailabilityConfigurations": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "AvailabilityConfigurations" - }, - "ListPersonalAccessTokens": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "PersonalAccessTokenSummaries" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/service-2.json.gz deleted file mode 100644 index 1e09721..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workmail/2017-10-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/endpoint-rule-set-1.json.gz deleted file mode 100644 index 32b5df1..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/examples-1.json b/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/paginators-1.json b/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/paginators-1.json deleted file mode 100644 index ea14245..0000000 --- a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/paginators-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pagination": {} -} diff --git a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/service-2.json.gz b/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/service-2.json.gz deleted file mode 100644 index 398ca0d..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workmailmessageflow/2019-05-01/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/endpoint-rule-set-1.json.gz deleted file mode 100644 index 75a3c60..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/paginators-1.json b/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/paginators-1.json deleted file mode 100644 index 988974d..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListInstanceTypes": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "InstanceTypes" - }, - "ListRegions": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "Regions" - }, - "ListWorkspaceInstances": { - "input_token": "NextToken", - "output_token": "NextToken", - "limit_key": "MaxResults", - "result_key": "WorkspaceInstances" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/service-2.json.gz b/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/service-2.json.gz deleted file mode 100644 index 8b15d17..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/waiters-2.json b/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces-instances/2022-07-26/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/endpoint-rule-set-1.json.gz deleted file mode 100644 index 764ff05..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/paginators-1.json b/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/paginators-1.json deleted file mode 100644 index 9fe21a4..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListDevices": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "devices" - }, - "ListEnvironments": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "environments" - }, - "ListSoftwareSets": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "softwareSets" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/service-2.json.gz b/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/service-2.json.gz deleted file mode 100644 index 0f214a6..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces-thin-client/2023-08-22/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index 9e0cb02..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/examples-1.json b/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/paginators-1.json deleted file mode 100644 index 73aba4d..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/paginators-1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "pagination": { - "ListSessions": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessions" - }, - "ListDataProtectionSettings": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "dataProtectionSettings" - }, - "ListSessionLoggers": { - "input_token": "nextToken", - "output_token": "nextToken", - "limit_key": "maxResults", - "result_key": "sessionLoggers" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/service-2.json.gz deleted file mode 100644 index a21bea3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/waiters-2.json b/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/waiters-2.json deleted file mode 100644 index 13f60ee..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces-web/2020-07-08/waiters-2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 2, - "waiters": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/endpoint-rule-set-1.json.gz deleted file mode 100644 index 6edfb4b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/examples-1.json b/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/paginators-1.json b/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/paginators-1.json deleted file mode 100644 index 01176ff..0000000 --- a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/paginators-1.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "pagination": { - "DescribeWorkspaceBundles": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Bundles" - }, - "DescribeWorkspaceDirectories": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Directories" - }, - "DescribeWorkspaces": { - "limit_key": "Limit", - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Workspaces" - }, - "DescribeAccountModifications": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "AccountModifications" - }, - "DescribeIpGroups": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Result" - }, - "DescribeWorkspaceImages": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "Images" - }, - "DescribeWorkspacesConnectionStatus": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "WorkspacesConnectionStatus" - }, - "ListAvailableManagementCidrRanges": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "ManagementCidrRanges" - }, - "ListAccountLinks": { - "input_token": "NextToken", - "limit_key": "MaxResults", - "output_token": "NextToken", - "result_key": "AccountLinks" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/service-2.json.gz b/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/service-2.json.gz deleted file mode 100644 index 3f5043b..0000000 Binary files a/venv/Lib/site-packages/botocore/data/workspaces/2015-04-08/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/endpoint-rule-set-1.json.gz b/venv/Lib/site-packages/botocore/data/xray/2016-04-12/endpoint-rule-set-1.json.gz deleted file mode 100644 index e5c14b3..0000000 Binary files a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/endpoint-rule-set-1.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/examples-1.json b/venv/Lib/site-packages/botocore/data/xray/2016-04-12/examples-1.json deleted file mode 100644 index 0ea7e3b..0000000 --- a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/examples-1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": "1.0", - "examples": { - } -} diff --git a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/paginators-1.json b/venv/Lib/site-packages/botocore/data/xray/2016-04-12/paginators-1.json deleted file mode 100644 index 0f65898..0000000 --- a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/paginators-1.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "pagination": { - "BatchGetTraces": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Traces", - "non_aggregate_keys": [ - "UnprocessedTraceIds" - ] - }, - "GetServiceGraph": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Services", - "non_aggregate_keys": [ - "StartTime", - "EndTime", - "ContainsOldGroupVersions" - ] - }, - "GetTraceGraph": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Services" - }, - "GetTraceSummaries": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "TraceSummaries", - "non_aggregate_keys": [ - "TracesProcessedCount", - "ApproximateTime" - ] - }, - "GetGroups": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Groups" - }, - "GetSamplingRules": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "SamplingRuleRecords" - }, - "GetSamplingStatisticSummaries": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "SamplingStatisticSummaries" - }, - "GetTimeSeriesServiceStatistics": { - "input_token": "NextToken", - "non_aggregate_keys": [ - "ContainsOldGroupVersions" - ], - "output_token": "NextToken", - "result_key": "TimeSeriesServiceStatistics" - }, - "ListResourcePolicies": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "ResourcePolicies" - }, - "ListTagsForResource": { - "input_token": "NextToken", - "output_token": "NextToken", - "result_key": "Tags" - } - } -} diff --git a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/service-2.json.gz b/venv/Lib/site-packages/botocore/data/xray/2016-04-12/service-2.json.gz deleted file mode 100644 index 6174395..0000000 Binary files a/venv/Lib/site-packages/botocore/data/xray/2016-04-12/service-2.json.gz and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/discovery.py b/venv/Lib/site-packages/botocore/discovery.py deleted file mode 100644 index 3d16e97..0000000 --- a/venv/Lib/site-packages/botocore/discovery.py +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import logging -import time -import weakref - -from botocore import xform_name -from botocore.exceptions import BotoCoreError, ConnectionError, HTTPClientError -from botocore.model import OperationNotFoundError -from botocore.utils import CachedProperty - -logger = logging.getLogger(__name__) - - -class EndpointDiscoveryException(BotoCoreError): - pass - - -class EndpointDiscoveryRequired(EndpointDiscoveryException): - """Endpoint Discovery is disabled but is required for this operation.""" - - fmt = 'Endpoint Discovery is not enabled but this operation requires it.' - - -class EndpointDiscoveryRefreshFailed(EndpointDiscoveryException): - """Endpoint Discovery failed to the refresh the known endpoints.""" - - fmt = 'Endpoint Discovery failed to refresh the required endpoints.' - - -def block_endpoint_discovery_required_operations(model, **kwargs): - endpoint_discovery = model.endpoint_discovery - if endpoint_discovery and endpoint_discovery.get('required'): - raise EndpointDiscoveryRequired() - - -class EndpointDiscoveryModel: - def __init__(self, service_model): - self._service_model = service_model - - @CachedProperty - def discovery_operation_name(self): - discovery_operation = self._service_model.endpoint_discovery_operation - return xform_name(discovery_operation.name) - - @CachedProperty - def discovery_operation_keys(self): - discovery_operation = self._service_model.endpoint_discovery_operation - keys = [] - if discovery_operation.input_shape: - keys = list(discovery_operation.input_shape.members.keys()) - return keys - - def discovery_required_for(self, operation_name): - try: - operation_model = self._service_model.operation_model( - operation_name - ) - return operation_model.endpoint_discovery.get('required', False) - except OperationNotFoundError: - return False - - def discovery_operation_kwargs(self, **kwargs): - input_keys = self.discovery_operation_keys - # Operation and Identifiers are only sent if there are Identifiers - if not kwargs.get('Identifiers'): - kwargs.pop('Operation', None) - kwargs.pop('Identifiers', None) - return {k: v for k, v in kwargs.items() if k in input_keys} - - def gather_identifiers(self, operation, params): - return self._gather_ids(operation.input_shape, params) - - def _gather_ids(self, shape, params, ids=None): - # Traverse the input shape and corresponding parameters, gathering - # any input fields labeled as an endpoint discovery id - if ids is None: - ids = {} - for member_name, member_shape in shape.members.items(): - if member_shape.metadata.get('endpointdiscoveryid'): - ids[member_name] = params[member_name] - elif ( - member_shape.type_name == 'structure' and member_name in params - ): - self._gather_ids(member_shape, params[member_name], ids) - return ids - - -class EndpointDiscoveryManager: - def __init__( - self, client, cache=None, current_time=None, always_discover=True - ): - if cache is None: - cache = {} - self._cache = cache - self._failed_attempts = {} - if current_time is None: - current_time = time.time - self._time = current_time - self._always_discover = always_discover - - # This needs to be a weak ref in order to prevent memory leaks on - # python 2.6 - self._client = weakref.proxy(client) - self._model = EndpointDiscoveryModel(client.meta.service_model) - - def _parse_endpoints(self, response): - endpoints = response['Endpoints'] - current_time = self._time() - for endpoint in endpoints: - cache_time = endpoint.get('CachePeriodInMinutes') - endpoint['Expiration'] = current_time + cache_time * 60 - return endpoints - - def _cache_item(self, value): - if isinstance(value, dict): - return tuple(sorted(value.items())) - else: - return value - - def _create_cache_key(self, **kwargs): - kwargs = self._model.discovery_operation_kwargs(**kwargs) - return tuple(self._cache_item(v) for k, v in sorted(kwargs.items())) - - def gather_identifiers(self, operation, params): - return self._model.gather_identifiers(operation, params) - - def delete_endpoints(self, **kwargs): - cache_key = self._create_cache_key(**kwargs) - if cache_key in self._cache: - del self._cache[cache_key] - - def _describe_endpoints(self, **kwargs): - # This is effectively a proxy to whatever name/kwargs the service - # supports for endpoint discovery. - kwargs = self._model.discovery_operation_kwargs(**kwargs) - operation_name = self._model.discovery_operation_name - discovery_operation = getattr(self._client, operation_name) - logger.debug('Discovering endpoints with kwargs: %s', kwargs) - return discovery_operation(**kwargs) - - def _get_current_endpoints(self, key): - if key not in self._cache: - return None - now = self._time() - return [e for e in self._cache[key] if now < e['Expiration']] - - def _refresh_current_endpoints(self, **kwargs): - cache_key = self._create_cache_key(**kwargs) - try: - response = self._describe_endpoints(**kwargs) - endpoints = self._parse_endpoints(response) - self._cache[cache_key] = endpoints - self._failed_attempts.pop(cache_key, None) - return endpoints - except (ConnectionError, HTTPClientError): - self._failed_attempts[cache_key] = self._time() + 60 - return None - - def _recently_failed(self, cache_key): - if cache_key in self._failed_attempts: - now = self._time() - if now < self._failed_attempts[cache_key]: - return True - del self._failed_attempts[cache_key] - return False - - def _select_endpoint(self, endpoints): - return endpoints[0]['Address'] - - def describe_endpoint(self, **kwargs): - operation = kwargs['Operation'] - discovery_required = self._model.discovery_required_for(operation) - - if not self._always_discover and not discovery_required: - # Discovery set to only run on required operations - logger.debug( - 'Optional discovery disabled. Skipping discovery for Operation: %s', - operation, - ) - return None - - # Get the endpoint for the provided operation and identifiers - cache_key = self._create_cache_key(**kwargs) - endpoints = self._get_current_endpoints(cache_key) - if endpoints: - return self._select_endpoint(endpoints) - # All known endpoints are stale - recently_failed = self._recently_failed(cache_key) - if not recently_failed: - # We haven't failed to discover recently, go ahead and refresh - endpoints = self._refresh_current_endpoints(**kwargs) - if endpoints: - return self._select_endpoint(endpoints) - # Discovery has failed recently, do our best to get an endpoint - logger.debug('Endpoint Discovery has failed for: %s', kwargs) - stale_entries = self._cache.get(cache_key, None) - if stale_entries: - # We have stale entries, use those while discovery is failing - return self._select_endpoint(stale_entries) - if discovery_required: - # It looks strange to be checking recently_failed again but, - # this informs us as to whether or not we tried to refresh earlier - if recently_failed: - # Discovery is required and we haven't already refreshed - endpoints = self._refresh_current_endpoints(**kwargs) - if endpoints: - return self._select_endpoint(endpoints) - # No endpoints even refresh, raise hard error - raise EndpointDiscoveryRefreshFailed() - # Discovery is optional, just use the default endpoint for now - return None - - -class EndpointDiscoveryHandler: - def __init__(self, manager): - self._manager = manager - - def register(self, events, service_id): - events.register( - f'before-parameter-build.{service_id}', self.gather_identifiers - ) - events.register_first( - f'request-created.{service_id}', self.discover_endpoint - ) - events.register(f'needs-retry.{service_id}', self.handle_retries) - - def gather_identifiers(self, params, model, context, **kwargs): - endpoint_discovery = model.endpoint_discovery - # Only continue if the operation supports endpoint discovery - if endpoint_discovery is None: - return - ids = self._manager.gather_identifiers(model, params) - context['discovery'] = {'identifiers': ids} - - def discover_endpoint(self, request, operation_name, **kwargs): - ids = request.context.get('discovery', {}).get('identifiers') - if ids is None: - return - endpoint = self._manager.describe_endpoint( - Operation=operation_name, Identifiers=ids - ) - if endpoint is None: - logger.debug('Failed to discover and inject endpoint') - return - if not endpoint.startswith('http'): - endpoint = 'https://' + endpoint - logger.debug('Injecting discovered endpoint: %s', endpoint) - request.url = endpoint - - def handle_retries(self, request_dict, response, operation, **kwargs): - if response is None: - return None - - _, response = response - status = response.get('ResponseMetadata', {}).get('HTTPStatusCode') - error_code = response.get('Error', {}).get('Code') - if status != 421 and error_code != 'InvalidEndpointException': - return None - - context = request_dict.get('context', {}) - ids = context.get('discovery', {}).get('identifiers') - if ids is None: - return None - - # Delete the cached endpoints, forcing a refresh on retry - # TODO: Improve eviction behavior to only evict the bad endpoint if - # there are multiple. This will almost certainly require a lock. - self._manager.delete_endpoints( - Operation=operation.name, Identifiers=ids - ) - return 0 diff --git a/venv/Lib/site-packages/botocore/docs/__init__.py b/venv/Lib/site-packages/botocore/docs/__init__.py deleted file mode 100644 index 844f5de..0000000 --- a/venv/Lib/site-packages/botocore/docs/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore.docs.service import ServiceDocumenter - -DEPRECATED_SERVICE_NAMES = {'sms-voice'} - - -def generate_docs(root_dir, session): - """Generates the reference documentation for botocore - - This will go through every available AWS service and output ReSTructured - text files documenting each service. - - :param root_dir: The directory to write the reference files to. Each - service's reference documentation is loacated at - root_dir/reference/services/service-name.rst - """ - # Create the root directory where all service docs live. - services_dir_path = os.path.join(root_dir, 'reference', 'services') - if not os.path.exists(services_dir_path): - os.makedirs(services_dir_path) - - # Prevents deprecated service names from being generated in docs. - available_services = [ - service - for service in session.get_available_services() - if service not in DEPRECATED_SERVICE_NAMES - ] - - # Generate reference docs and write them out. - for service_name in available_services: - docs = ServiceDocumenter( - service_name, session, services_dir_path - ).document_service() - - # Write the main service documentation page. - # Path: /reference/services//index.rst - service_file_path = os.path.join( - services_dir_path, f'{service_name}.rst' - ) - with open(service_file_path, 'wb') as f: - f.write(docs) diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 71ca571..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/client.cpython-312.pyc deleted file mode 100644 index 854d76c..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/docstring.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/docstring.cpython-312.pyc deleted file mode 100644 index 363473c..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/docstring.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/example.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/example.cpython-312.pyc deleted file mode 100644 index c59e61a..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/example.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/method.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/method.cpython-312.pyc deleted file mode 100644 index a9e2e23..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/method.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/paginator.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/paginator.cpython-312.pyc deleted file mode 100644 index f7fa97a..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/paginator.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/params.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/params.cpython-312.pyc deleted file mode 100644 index 9ded756..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/params.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/service.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/service.cpython-312.pyc deleted file mode 100644 index 18fcd86..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/service.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/shape.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/shape.cpython-312.pyc deleted file mode 100644 index 051f3ac..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/shape.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/sharedexample.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/sharedexample.cpython-312.pyc deleted file mode 100644 index 926907f..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/sharedexample.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/translator.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/translator.cpython-312.pyc deleted file mode 100644 index 4ee5ceb..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/translator.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 9fa4793..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/__pycache__/waiter.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/__pycache__/waiter.cpython-312.pyc deleted file mode 100644 index 9da93ab..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/__pycache__/waiter.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/__init__.py b/venv/Lib/site-packages/botocore/docs/bcdoc/__init__.py deleted file mode 100644 index b687f69..0000000 --- a/venv/Lib/site-packages/botocore/docs/bcdoc/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -__version__ = '0.16.0' diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 69b31e4..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/docstringparser.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/docstringparser.cpython-312.pyc deleted file mode 100644 index 82648cb..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/docstringparser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/restdoc.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/restdoc.cpython-312.pyc deleted file mode 100644 index 6f1560e..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/restdoc.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/style.cpython-312.pyc b/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/style.cpython-312.pyc deleted file mode 100644 index a5df2bb..0000000 Binary files a/venv/Lib/site-packages/botocore/docs/bcdoc/__pycache__/style.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/docstringparser.py b/venv/Lib/site-packages/botocore/docs/bcdoc/docstringparser.py deleted file mode 100644 index ebe16ba..0000000 --- a/venv/Lib/site-packages/botocore/docs/bcdoc/docstringparser.py +++ /dev/null @@ -1,315 +0,0 @@ -# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from html.parser import HTMLParser -from itertools import zip_longest - -PRIORITY_PARENT_TAGS = ('code', 'a') -OMIT_NESTED_TAGS = ('span', 'i', 'code', 'a') -OMIT_SELF_TAGS = ('i', 'b') -HTML_BLOCK_DISPLAY_TAGS = ('p', 'note', 'ul', 'li') - - -class DocStringParser(HTMLParser): - """ - A simple HTML parser. Focused on converting the subset of HTML - that appears in the documentation strings of the JSON models into - simple ReST format. - """ - - def __init__(self, doc): - self.tree = None - self.doc = doc - super().__init__() - - def reset(self): - HTMLParser.reset(self) - self.tree = HTMLTree(self.doc) - - def feed(self, data): - super().feed(data) - self.tree.write() - self.tree = HTMLTree(self.doc) - - def close(self): - super().close() - # Write if there is anything remaining. - self.tree.write() - self.tree = HTMLTree(self.doc) - - def handle_starttag(self, tag, attrs): - self.tree.add_tag(tag, attrs=attrs) - - def handle_endtag(self, tag): - self.tree.add_tag(tag, is_start=False) - - def handle_data(self, data): - self.tree.add_data(data) - - -class HTMLTree: - """ - A tree which handles HTML nodes. Designed to work with a python HTML parser, - meaning that the current_node will be the most recently opened tag. When - a tag is closed, the current_node moves up to the parent node. - """ - - def __init__(self, doc): - self.doc = doc - self.head = StemNode() - self.current_node = self.head - self.unhandled_tags = [] - - def add_tag(self, tag, attrs=None, is_start=True): - if not self._doc_has_handler(tag, is_start): - self.unhandled_tags.append(tag) - return - - if is_start: - node = TagNode(tag, attrs) - self.current_node.add_child(node) - self.current_node = node - else: - self.current_node = self.current_node.parent - - def _doc_has_handler(self, tag, is_start): - if is_start: - handler_name = f'start_{tag}' - else: - handler_name = f'end_{tag}' - - return hasattr(self.doc.style, handler_name) - - def add_data(self, data): - self.current_node.add_child(DataNode(data)) - - def write(self): - self.head.write(self.doc) - - -class Node: - def __init__(self, parent=None): - self.parent = parent - - def write(self, doc): - raise NotImplementedError - - -class StemNode(Node): - def __init__(self, parent=None): - super().__init__(parent) - self.children = [] - - def add_child(self, child): - child.parent = self - self.children.append(child) - - def write(self, doc): - self.collapse_whitespace() - self._write_children(doc) - - def _write_children(self, doc): - for child, next_child in zip_longest(self.children, self.children[1:]): - if isinstance(child, TagNode) and next_child is not None: - child.write(doc, next_child) - else: - child.write(doc) - - def is_whitespace(self): - return all(child.is_whitespace() for child in self.children) - - def startswith_whitespace(self): - return self.children and self.children[0].startswith_whitespace() - - def endswith_whitespace(self): - return self.children and self.children[-1].endswith_whitespace() - - def lstrip(self): - while self.children and self.children[0].is_whitespace(): - self.children = self.children[1:] - if self.children: - self.children[0].lstrip() - - def rstrip(self): - while self.children and self.children[-1].is_whitespace(): - self.children = self.children[:-1] - if self.children: - self.children[-1].rstrip() - - def collapse_whitespace(self): - """Remove collapsible white-space from HTML. - - HTML in docstrings often contains extraneous white-space around tags, - for readability. Browsers would collapse this white-space before - rendering. If not removed before conversion to RST where white-space is - part of the syntax, for example for indentation, it can result in - incorrect output. - """ - self.lstrip() - self.rstrip() - for child in self.children: - child.collapse_whitespace() - - -class TagNode(StemNode): - """ - A generic Tag node. It will verify that handlers exist before writing. - """ - - def __init__(self, tag, attrs=None, parent=None): - super().__init__(parent) - self.attrs = attrs - self.tag = tag - - def _has_nested_tags(self): - # Returns True if any children are TagNodes and False otherwise. - return any(isinstance(child, TagNode) for child in self.children) - - def write(self, doc, next_child=None): - prioritize_nested_tags = ( - self.tag in OMIT_SELF_TAGS and self._has_nested_tags() - ) - prioritize_parent_tag = ( - isinstance(self.parent, TagNode) - and self.parent.tag in PRIORITY_PARENT_TAGS - and self.tag in OMIT_NESTED_TAGS - ) - if prioritize_nested_tags or prioritize_parent_tag: - self._write_children(doc) - return - - self._write_start(doc) - self._write_children(doc) - self._write_end(doc, next_child) - - def collapse_whitespace(self): - """Remove collapsible white-space. - - All tags collapse internal whitespace. Block-display HTML tags also - strip all leading and trailing whitespace. - - Approximately follows the specification used in browsers: - https://www.w3.org/TR/css-text-3/#white-space-rules - https://developer.mozilla.org/en-US/docs/Web/API/Document_Object_Model/Whitespace - """ - if self.tag in HTML_BLOCK_DISPLAY_TAGS: - self.lstrip() - self.rstrip() - # Collapse whitespace in situations like `` foo`` into - # `` foo``. - for prev, cur in zip(self.children[:-1], self.children[1:]): - if ( - isinstance(prev, DataNode) - and prev.endswith_whitespace() - and cur.startswith_whitespace() - ): - cur.lstrip() - # Same logic, but for situations like ``bar ``: - for cur, nxt in zip(self.children[:-1], self.children[1:]): - if ( - isinstance(nxt, DataNode) - and cur.endswith_whitespace() - and nxt.startswith_whitespace() - ): - cur.rstrip() - # Recurse into children - for child in self.children: - child.collapse_whitespace() - - def _write_start(self, doc): - handler_name = f'start_{self.tag}' - if hasattr(doc.style, handler_name): - getattr(doc.style, handler_name)(self.attrs) - - def _write_end(self, doc, next_child): - handler_name = f'end_{self.tag}' - if hasattr(doc.style, handler_name): - if handler_name == 'end_a': - # We use lookahead to determine if a space is needed after a link node - getattr(doc.style, handler_name)(next_child) - else: - getattr(doc.style, handler_name)() - - -class DataNode(Node): - """ - A Node that contains only string data. - """ - - def __init__(self, data, parent=None): - super().__init__(parent) - if not isinstance(data, str): - raise ValueError(f"Expecting string type, {type(data)} given.") - self._leading_whitespace = '' - self._trailing_whitespace = '' - self._stripped_data = '' - if data == '': - return - if data.isspace(): - self._trailing_whitespace = data - return - first_non_space = next( - idx for idx, ch in enumerate(data) if not ch.isspace() - ) - last_non_space = len(data) - next( - idx for idx, ch in enumerate(reversed(data)) if not ch.isspace() - ) - self._leading_whitespace = data[:first_non_space] - self._trailing_whitespace = data[last_non_space:] - self._stripped_data = data[first_non_space:last_non_space] - - @property - def data(self): - return ( - f'{self._leading_whitespace}{self._stripped_data}' - f'{self._trailing_whitespace}' - ) - - def is_whitespace(self): - return self._stripped_data == '' and ( - self._leading_whitespace != '' or self._trailing_whitespace != '' - ) - - def startswith_whitespace(self): - return self._leading_whitespace != '' or ( - self._stripped_data == '' and self._trailing_whitespace != '' - ) - - def endswith_whitespace(self): - return self._trailing_whitespace != '' or ( - self._stripped_data == '' and self._leading_whitespace != '' - ) - - def lstrip(self): - if self._leading_whitespace != '': - self._leading_whitespace = '' - elif self._stripped_data == '': - self.rstrip() - - def rstrip(self): - if self._trailing_whitespace != '': - self._trailing_whitespace = '' - elif self._stripped_data == '': - self.lstrip() - - def collapse_whitespace(self): - """Noop, ``DataNode.write`` always collapses whitespace""" - return - - def write(self, doc): - words = doc.translate_words(self._stripped_data.split()) - str_data = ( - f'{self._leading_whitespace}{" ".join(words)}' - f'{self._trailing_whitespace}' - ) - if str_data != '': - doc.handle_data(str_data) diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/restdoc.py b/venv/Lib/site-packages/botocore/docs/bcdoc/restdoc.py deleted file mode 100644 index 3868126..0000000 --- a/venv/Lib/site-packages/botocore/docs/bcdoc/restdoc.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import logging -import os -import re - -from botocore.compat import OrderedDict -from botocore.docs.bcdoc.docstringparser import DocStringParser -from botocore.docs.bcdoc.style import ReSTStyle - -DEFAULT_AWS_DOCS_LINK = 'https://docs.aws.amazon.com/index.html' -DOCUMENTATION_LINK_REGEX = re.compile( - r'`AWS API Documentation ' - r'`_' -) -LARGE_SECTION_MESSAGE = """ - - **{}** - :: - - # This section is too large to render. - # Please see the AWS API Documentation linked below. - - {} - """ -LOG = logging.getLogger('bcdocs') -SECTION_LINE_LIMIT_CONFIG = { - 'response-example': {'name': 'Response Syntax', 'line_limit': 1500}, - 'description': {'name': 'Response Structure', 'line_limit': 5000}, - 'request-example': {'name': 'Request Syntax', 'line_limit': 1500}, - 'request-params': {'name': 'Parameters', 'line_limit': 5000}, -} -SECTION_METHOD_PATH_DEPTH = { - 'client-api': 4, - 'paginator-api': 3, - 'waiter-api': 3, -} - - -class ReSTDocument: - def __init__(self, target='man'): - self.style = ReSTStyle(self) - self.target = target - self.parser = DocStringParser(self) - self.keep_data = True - self.do_translation = False - self.translation_map = {} - self.hrefs = {} - self._writes = [] - self._last_doc_string = None - - def _write(self, s): - if self.keep_data and s is not None: - self._writes.append(s) - - def write(self, content): - """ - Write content into the document. - """ - self._write(content) - - def writeln(self, content): - """ - Write content on a newline. - """ - self._write(f'{self.style.spaces()}{content}\n') - - def peek_write(self): - """ - Returns the last content written to the document without - removing it from the stack. - """ - return self._writes[-1] - - def pop_write(self): - """ - Removes and returns the last content written to the stack. - """ - return self._writes.pop() if len(self._writes) > 0 else None - - def push_write(self, s): - """ - Places new content on the stack. - """ - self._writes.append(s) - - def getvalue(self): - """ - Returns the current content of the document as a string. - """ - if self.hrefs: - self.style.new_paragraph() - for refname, link in self.hrefs.items(): - self.style.link_target_definition(refname, link) - return ''.join(self._writes).encode('utf-8') - - def translate_words(self, words): - return [self.translation_map.get(w, w) for w in words] - - def handle_data(self, data): - if data and self.keep_data: - self._write(data) - - def include_doc_string(self, doc_string): - if doc_string: - try: - start = len(self._writes) - self.parser.feed(doc_string) - self.parser.close() - end = len(self._writes) - self._last_doc_string = (start, end) - except Exception: - LOG.debug('Error parsing doc string', exc_info=True) - LOG.debug(doc_string) - - def remove_last_doc_string(self): - # Removes all writes inserted by last doc string - if self._last_doc_string is not None: - start, end = self._last_doc_string - del self._writes[start:end] - - -class DocumentStructure(ReSTDocument): - def __init__(self, name, section_names=None, target='man', context=None): - """Provides a Hierarichial structure to a ReSTDocument - - You can write to it similiar to as you can to a ReSTDocument but - has an innate structure for more orginaztion and abstraction. - - :param name: The name of the document - :param section_names: A list of sections to be included - in the document. - :param target: The target documentation of the Document structure - :param context: A dictionary of data to store with the strucuture. These - are only stored per section not the entire structure. - """ - super().__init__(target=target) - self._name = name - self._structure = OrderedDict() - self._path = [self._name] - self._context = {} - if context is not None: - self._context = context - if section_names is not None: - self._generate_structure(section_names) - - @property - def name(self): - """The name of the document structure""" - return self._name - - @property - def path(self): - """ - A list of where to find a particular document structure in the - overlying document structure. - """ - return self._path - - @path.setter - def path(self, value): - self._path = value - - @property - def available_sections(self): - return list(self._structure) - - @property - def context(self): - return self._context - - def _generate_structure(self, section_names): - for section_name in section_names: - self.add_new_section(section_name) - - def add_new_section(self, name, context=None): - """Adds a new section to the current document structure - - This document structure will be considered a section to the - current document structure but will in itself be an entirely - new document structure that can be written to and have sections - as well - - :param name: The name of the section. - :param context: A dictionary of data to store with the strucuture. These - are only stored per section not the entire structure. - :rtype: DocumentStructure - :returns: A new document structure to add to but lives as a section - to the document structure it was instantiated from. - """ - # Add a new section - section = self.__class__( - name=name, target=self.target, context=context - ) - section.path = self.path + [name] - # Indent the section apporpriately as well - section.style.indentation = self.style.indentation - section.translation_map = self.translation_map - section.hrefs = self.hrefs - self._structure[name] = section - return section - - def get_section(self, name): - """Retrieve a section""" - return self._structure[name] - - def has_section(self, name): - return name in self._structure - - def delete_section(self, name): - """Delete a section""" - del self._structure[name] - - def flush_structure(self, docs_link=None): - """Flushes a doc structure to a ReSTructed string - - The document is flushed out in a DFS style where sections and their - subsections' values are added to the string as they are visited. - """ - # We are at the root flush the links at the beginning of the - # document - path_length = len(self.path) - if path_length == 1: - if self.hrefs: - self.style.new_paragraph() - for refname, link in self.hrefs.items(): - self.style.link_target_definition(refname, link) - # Clear docs_link at the correct depth to prevent passing a non-related link. - elif path_length == SECTION_METHOD_PATH_DEPTH.get(self.path[1]): - docs_link = None - value = self.getvalue() - for name, section in self._structure.items(): - # Checks is the AWS API Documentation link has been generated. - # If it has been generated, it gets passed as a the doc_link parameter. - match = DOCUMENTATION_LINK_REGEX.search(value.decode()) - docs_link = ( - f'{match.group(0)}\n\n'.encode() if match else docs_link - ) - value += section.flush_structure(docs_link) - - # Replace response/request sections if the line number exceeds our limit. - # The section is replaced with a message linking to AWS API Documentation. - line_count = len(value.splitlines()) - section_config = SECTION_LINE_LIMIT_CONFIG.get(self.name) - aws_docs_link = ( - docs_link.decode() - if docs_link is not None - else DEFAULT_AWS_DOCS_LINK - ) - if section_config and line_count > section_config['line_limit']: - value = LARGE_SECTION_MESSAGE.format( - section_config['name'], aws_docs_link - ).encode() - return value - - def getvalue(self): - return ''.join(self._writes).encode('utf-8') - - def remove_all_sections(self): - self._structure = OrderedDict() - - def clear_text(self): - self._writes = [] - - def add_title_section(self, title): - title_section = self.add_new_section('title') - title_section.style.h1(title) - return title_section - - def write_to_file(self, full_path, file_name): - if not os.path.exists(full_path): - os.makedirs(full_path) - sub_resource_file_path = os.path.join(full_path, f'{file_name}.rst') - with open(sub_resource_file_path, 'wb') as f: - f.write(self.flush_structure()) diff --git a/venv/Lib/site-packages/botocore/docs/bcdoc/style.py b/venv/Lib/site-packages/botocore/docs/bcdoc/style.py deleted file mode 100644 index 205d238..0000000 --- a/venv/Lib/site-packages/botocore/docs/bcdoc/style.py +++ /dev/null @@ -1,447 +0,0 @@ -# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging - -logger = logging.getLogger('bcdocs') -# Terminal punctuation where a space is not needed before. -PUNCTUATION_CHARACTERS = ('.', ',', '?', '!', ':', ';') - - -class BaseStyle: - def __init__(self, doc, indent_width=2): - self.doc = doc - self.indent_width = indent_width - self._indent = 0 - self.keep_data = True - - @property - def indentation(self): - return self._indent - - @indentation.setter - def indentation(self, value): - self._indent = value - - def new_paragraph(self): - return f'\n{self.spaces()}' - - def indent(self): - self._indent += 1 - - def dedent(self): - if self._indent > 0: - self._indent -= 1 - - def spaces(self): - return ' ' * (self._indent * self.indent_width) - - def bold(self, s): - return s - - def ref(self, link, title=None): - return link - - def h2(self, s): - return s - - def h3(self, s): - return s - - def underline(self, s): - return s - - def italics(self, s): - return s - - def add_trailing_space_to_previous_write(self): - # Adds a trailing space if none exists. This is mainly used for - # ensuring inline code and links are separated from surrounding text. - last_write = self.doc.pop_write() - if last_write is None: - last_write = '' - if last_write != '' and last_write[-1] != ' ': - last_write += ' ' - self.doc.push_write(last_write) - - -class ReSTStyle(BaseStyle): - def __init__(self, doc, indent_width=2): - BaseStyle.__init__(self, doc, indent_width) - self.do_p = True - self.a_href = None - self.list_depth = 0 - - def new_paragraph(self): - self.doc.write(f'\n\n{self.spaces()}') - - def new_line(self): - self.doc.write(f'\n{self.spaces()}') - - def _start_inline(self, markup): - # Insert space between any directly adjacent bold and italic inlines to - # avoid situations like ``**abc***def*``. - try: - last_write = self.doc.peek_write() - except IndexError: - pass - else: - if last_write in ('*', '**') and markup in ('*', '**'): - self.doc.write(' ') - self.doc.write(markup) - - def _end_inline(self, markup): - # Remove empty and self-closing tags like ```` and ````. - # If we simply translate that directly then we end up with something - # like ****, which rst will assume is a heading instead of an empty - # bold. - last_write = self.doc.pop_write() - if last_write == markup: - return - self.doc.push_write(last_write) - self.doc.write(markup) - - def start_bold(self, attrs=None): - self._start_inline('**') - - def end_bold(self): - self._end_inline('**') - - def start_b(self, attrs=None): - self.doc.do_translation = True - self.start_bold(attrs) - - def end_b(self): - self.doc.do_translation = False - self.end_bold() - - def bold(self, s): - if s: - self.start_bold() - self.doc.write(s) - self.end_bold() - - def ref(self, title, link=None): - if link is None: - link = title - self.doc.write(f':doc:`{title} <{link}>`') - - def _heading(self, s, border_char): - border = border_char * len(s) - self.new_paragraph() - self.doc.write(f'{border}\n{s}\n{border}') - self.new_paragraph() - - def h1(self, s): - self._heading(s, '*') - - def h2(self, s): - self._heading(s, '=') - - def h3(self, s): - self._heading(s, '-') - - def start_italics(self, attrs=None): - self._start_inline('*') - - def end_italics(self): - self._end_inline('*') - - def italics(self, s): - if s: - self.start_italics() - self.doc.write(s) - self.end_italics() - - def start_p(self, attrs=None): - if self.do_p: - self.doc.write(f'\n\n{self.spaces()}') - - def end_p(self): - if self.do_p: - self.doc.write(f'\n\n{self.spaces()}') - - def start_code(self, attrs=None): - self.doc.do_translation = True - self.add_trailing_space_to_previous_write() - self._start_inline('``') - - def end_code(self): - self.doc.do_translation = False - self._end_inline('``') - - def code(self, s): - if s: - self.start_code() - self.doc.write(s) - self.end_code() - - def start_note(self, attrs=None): - self.new_paragraph() - self.doc.write('.. note::') - self.indent() - self.new_paragraph() - - def end_note(self): - self.dedent() - self.new_paragraph() - - def start_important(self, attrs=None): - self.new_paragraph() - self.doc.write('.. warning::') - self.indent() - self.new_paragraph() - - def end_important(self): - self.dedent() - self.new_paragraph() - - def start_danger(self, attrs=None): - self.new_paragraph() - self.doc.write('.. danger::') - self.indent() - self.new_paragraph() - - def end_danger(self): - self.dedent() - self.new_paragraph() - - def start_a(self, attrs=None): - # Write an empty space to guard against zero whitespace - # before an "a" tag. Example: hiExample - self.add_trailing_space_to_previous_write() - if attrs: - for attr_key, attr_value in attrs: - if attr_key == 'href': - # Removes unnecessary whitespace around the href link. - # Example: Example - self.a_href = attr_value.strip() - self.doc.write('`') - else: - # There are some model documentation that - # looks like this: DescribeInstances. - # In this case we just write out an empty - # string. - self.doc.write(' ') - self.doc.do_translation = True - - def link_target_definition(self, refname, link): - self.doc.writeln(f'.. _{refname}: {link}') - - def sphinx_reference_label(self, label, text=None): - if text is None: - text = label - if self.doc.target == 'html': - self.doc.write(f':ref:`{text} <{label}>`') - else: - self.doc.write(text) - - def _clean_link_text(self): - doc = self.doc - # Pop till we reach the link start character to retrieve link text. - last_write = doc.pop_write() - while not last_write.startswith('`'): - last_write = doc.pop_write() + last_write - if last_write != '': - # Remove whitespace from the start of link text. - if last_write.startswith('` '): - last_write = f'`{last_write[1:].lstrip(" ")}' - doc.push_write(last_write) - - def end_a(self, next_child=None): - self.doc.do_translation = False - if self.a_href: - self._clean_link_text() - last_write = self.doc.pop_write() - last_write = last_write.rstrip(' ') - if last_write and last_write != '`': - if ':' in last_write: - last_write = last_write.replace(':', r'\:') - self.doc.push_write(last_write) - self.doc.push_write(f' <{self.a_href}>`__') - elif last_write == '`': - # Look at start_a(). It will do a self.doc.write('`') - # which is the start of the link title. If that is the - # case then there was no link text. We should just - # use an inline link. The syntax of this is - # ``_ - self.doc.push_write(f'`<{self.a_href}>`__') - else: - self.doc.push_write(self.a_href) - self.doc.hrefs[self.a_href] = self.a_href - self.doc.write('`__') - self.a_href = None - - def start_i(self, attrs=None): - self.doc.do_translation = True - self.start_italics() - - def end_i(self): - self.doc.do_translation = False - self.end_italics() - - def start_li(self, attrs=None): - self.new_line() - self.do_p = False - self.doc.write('* ') - - def end_li(self): - self.do_p = True - self.new_line() - - def li(self, s): - if s: - self.start_li() - self.doc.writeln(s) - self.end_li() - - def start_ul(self, attrs=None): - if self.list_depth != 0: - self.indent() - self.list_depth += 1 - self.new_paragraph() - - def end_ul(self): - self.list_depth -= 1 - if self.list_depth != 0: - self.dedent() - self.new_paragraph() - - def start_ol(self, attrs=None): - # TODO: Need to control the bullets used for LI items - if self.list_depth != 0: - self.indent() - self.list_depth += 1 - self.new_paragraph() - - def end_ol(self): - self.list_depth -= 1 - if self.list_depth != 0: - self.dedent() - self.new_paragraph() - - def start_examples(self, attrs=None): - self.doc.keep_data = False - - def end_examples(self): - self.doc.keep_data = True - - def start_fullname(self, attrs=None): - self.doc.keep_data = False - - def end_fullname(self): - self.doc.keep_data = True - - def start_codeblock(self, attrs=None): - self.doc.write('::') - self.indent() - self.new_paragraph() - - def end_codeblock(self): - self.dedent() - self.new_paragraph() - - def codeblock(self, code): - """ - Literal code blocks are introduced by ending a paragraph with - the special marker ::. The literal block must be indented - (and, like all paragraphs, separated from the surrounding - ones by blank lines). - """ - self.start_codeblock() - self.doc.writeln(code) - self.end_codeblock() - - def toctree(self): - if self.doc.target == 'html': - self.doc.write('\n.. toctree::\n') - self.doc.write(' :maxdepth: 1\n') - self.doc.write(' :titlesonly:\n\n') - else: - self.start_ul() - - def tocitem(self, item, file_name=None): - if self.doc.target == 'man': - self.li(item) - else: - if file_name: - self.doc.writeln(f' {file_name}') - else: - self.doc.writeln(f' {item}') - - def hidden_toctree(self): - if self.doc.target == 'html': - self.doc.write('\n.. toctree::\n') - self.doc.write(' :maxdepth: 1\n') - self.doc.write(' :hidden:\n\n') - - def hidden_tocitem(self, item): - if self.doc.target == 'html': - self.tocitem(item) - - def table_of_contents(self, title=None, depth=None): - self.doc.write('.. contents:: ') - if title is not None: - self.doc.writeln(title) - if depth is not None: - self.doc.writeln(f' :depth: {depth}') - - def start_sphinx_py_class(self, class_name): - self.new_paragraph() - self.doc.write(f'.. py:class:: {class_name}') - self.indent() - self.new_paragraph() - - def end_sphinx_py_class(self): - self.dedent() - self.new_paragraph() - - def start_sphinx_py_method(self, method_name, parameters=None): - self.new_paragraph() - content = f'.. py:method:: {method_name}' - if parameters is not None: - content += f'({parameters})' - self.doc.write(content) - self.indent() - self.new_paragraph() - - def end_sphinx_py_method(self): - self.dedent() - self.new_paragraph() - - def start_sphinx_py_attr(self, attr_name): - self.new_paragraph() - self.doc.write(f'.. py:attribute:: {attr_name}') - self.indent() - self.new_paragraph() - - def end_sphinx_py_attr(self): - self.dedent() - self.new_paragraph() - - def write_py_doc_string(self, docstring): - docstring_lines = docstring.splitlines() - for docstring_line in docstring_lines: - self.doc.writeln(docstring_line) - - def external_link(self, title, link): - if self.doc.target == 'html': - self.doc.write(f'`{title} <{link}>`_') - else: - self.doc.write(title) - - def internal_link(self, title, page): - if self.doc.target == 'html': - self.doc.write(f':doc:`{title} <{page}>`') - else: - self.doc.write(title) diff --git a/venv/Lib/site-packages/botocore/docs/client.py b/venv/Lib/site-packages/botocore/docs/client.py deleted file mode 100644 index 41e3742..0000000 --- a/venv/Lib/site-packages/botocore/docs/client.py +++ /dev/null @@ -1,453 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.compat import OrderedDict -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.example import ResponseExampleDocumenter -from botocore.docs.method import ( - document_custom_method, - document_model_driven_method, - get_instance_public_methods, -) -from botocore.docs.params import ResponseParamsDocumenter -from botocore.docs.sharedexample import document_shared_examples -from botocore.docs.utils import DocumentedShape, get_official_service_name - - -def _allowlist_generate_presigned_url(method_name, service_name, **kwargs): - if method_name != 'generate_presigned_url': - return None - return service_name in ['s3'] - - -class ClientDocumenter: - _CLIENT_METHODS_FILTERS = [ - _allowlist_generate_presigned_url, - ] - - def __init__(self, client, root_docs_path, shared_examples=None): - self._client = client - self._client_class_name = self._client.__class__.__name__ - self._root_docs_path = root_docs_path - self._shared_examples = shared_examples - if self._shared_examples is None: - self._shared_examples = {} - self._service_name = self._client.meta.service_model.service_name - - def document_client(self, section): - """Documents a client and its methods - - :param section: The section to write to. - """ - self._add_title(section) - self._add_class_signature(section) - client_methods = self._get_client_methods() - self._add_client_intro(section, client_methods) - self._add_client_methods(client_methods) - - def _get_client_methods(self): - client_methods = get_instance_public_methods(self._client) - return self._filter_client_methods(client_methods) - - def _filter_client_methods(self, client_methods): - filtered_methods = {} - for method_name, method in client_methods.items(): - include = self._filter_client_method( - method=method, - method_name=method_name, - service_name=self._service_name, - ) - if include: - filtered_methods[method_name] = method - return filtered_methods - - def _filter_client_method(self, **kwargs): - # Apply each filter to the method - for filter in self._CLIENT_METHODS_FILTERS: - filter_include = filter(**kwargs) - # Use the first non-None value returned by any of the filters - if filter_include is not None: - return filter_include - # Otherwise default to including it - return True - - def _add_title(self, section): - section.style.h2('Client') - - def _add_client_intro(self, section, client_methods): - section = section.add_new_section('intro') - # Write out the top level description for the client. - official_service_name = get_official_service_name( - self._client.meta.service_model - ) - section.write( - f"A low-level client representing {official_service_name}" - ) - section.style.new_line() - section.include_doc_string( - self._client.meta.service_model.documentation - ) - - # Write out the client example instantiation. - self._add_client_creation_example(section) - - # List out all of the possible client methods. - section.style.dedent() - section.style.new_paragraph() - section.writeln('These are the available methods:') - section.style.toctree() - for method_name in sorted(client_methods): - section.style.tocitem(f'{self._service_name}/client/{method_name}') - - def _add_class_signature(self, section): - section.style.start_sphinx_py_class( - class_name=f'{self._client_class_name}.Client' - ) - - def _add_client_creation_example(self, section): - section.style.start_codeblock() - section.style.new_line() - section.write( - f'client = session.create_client(\'{self._service_name}\')' - ) - section.style.end_codeblock() - - def _add_client_methods(self, client_methods): - for method_name in sorted(client_methods): - # Create a new DocumentStructure for each client method and add contents. - method_doc_structure = DocumentStructure( - method_name, target='html' - ) - self._add_client_method( - method_doc_structure, method_name, client_methods[method_name] - ) - # Write client methods in individual/nested files. - # Path: /reference/services//client/.rst - client_dir_path = os.path.join( - self._root_docs_path, self._service_name, 'client' - ) - method_doc_structure.write_to_file(client_dir_path, method_name) - - def _add_client_method(self, section, method_name, method): - breadcrumb_section = section.add_new_section('breadcrumb') - breadcrumb_section.style.ref( - self._client_class_name, f'../../{self._service_name}' - ) - breadcrumb_section.write(f' / Client / {method_name}') - section.add_title_section(method_name) - method_section = section.add_new_section( - method_name, - context={'qualifier': f'{self._client_class_name}.Client.'}, - ) - if self._is_custom_method(method_name): - self._add_custom_method( - method_section, - method_name, - method, - ) - else: - self._add_model_driven_method(method_section, method_name) - - def _is_custom_method(self, method_name): - return method_name not in self._client.meta.method_to_api_mapping - - def _add_custom_method(self, section, method_name, method): - document_custom_method(section, method_name, method) - - def _add_method_exceptions_list(self, section, operation_model): - error_section = section.add_new_section('exceptions') - error_section.style.new_line() - error_section.style.bold('Exceptions') - error_section.style.new_line() - for error in operation_model.error_shapes: - class_name = ( - f'{self._client_class_name}.Client.exceptions.{error.name}' - ) - error_section.style.li(f':py:class:`{class_name}`') - - def _add_model_driven_method(self, section, method_name): - service_model = self._client.meta.service_model - operation_name = self._client.meta.method_to_api_mapping[method_name] - operation_model = service_model.operation_model(operation_name) - - example_prefix = f'response = client.{method_name}' - full_method_name = ( - f"{section.context.get('qualifier', '')}{method_name}" - ) - document_model_driven_method( - section, - full_method_name, - operation_model, - event_emitter=self._client.meta.events, - method_description=operation_model.documentation, - example_prefix=example_prefix, - ) - - # Add any modeled exceptions - if operation_model.error_shapes: - self._add_method_exceptions_list(section, operation_model) - - # Add the shared examples - shared_examples = self._shared_examples.get(operation_name) - if shared_examples: - document_shared_examples( - section, operation_model, example_prefix, shared_examples - ) - - -class ClientExceptionsDocumenter: - _USER_GUIDE_LINK = ( - 'https://boto3.amazonaws.com/' - 'v1/documentation/api/latest/guide/error-handling.html' - ) - _GENERIC_ERROR_SHAPE = DocumentedShape( - name='Error', - type_name='structure', - documentation=('Normalized access to common exception attributes.'), - members=OrderedDict( - [ - ( - 'Code', - DocumentedShape( - name='Code', - type_name='string', - documentation=( - 'An identifier specifying the exception type.' - ), - ), - ), - ( - 'Message', - DocumentedShape( - name='Message', - type_name='string', - documentation=( - 'A descriptive message explaining why the exception ' - 'occured.' - ), - ), - ), - ] - ), - ) - - def __init__(self, client, root_docs_path): - self._client = client - self._client_class_name = self._client.__class__.__name__ - self._service_name = self._client.meta.service_model.service_name - self._root_docs_path = root_docs_path - - def document_exceptions(self, section): - self._add_title(section) - self._add_overview(section) - self._add_exceptions_list(section) - self._add_exception_classes() - - def _add_title(self, section): - section.style.h2('Client Exceptions') - - def _add_overview(self, section): - section.style.new_line() - section.write( - 'Client exceptions are available on a client instance ' - 'via the ``exceptions`` property. For more detailed instructions ' - 'and examples on the exact usage of client exceptions, see the ' - 'error handling ' - ) - section.style.external_link( - title='user guide', - link=self._USER_GUIDE_LINK, - ) - section.write('.') - section.style.new_line() - - def _exception_class_name(self, shape): - return f'{self._client_class_name}.Client.exceptions.{shape.name}' - - def _add_exceptions_list(self, section): - error_shapes = self._client.meta.service_model.error_shapes - if not error_shapes: - section.style.new_line() - section.write('This client has no modeled exception classes.') - section.style.new_line() - return - section.style.new_line() - section.writeln('The available client exceptions are:') - section.style.toctree() - for shape in error_shapes: - section.style.tocitem( - f'{self._service_name}/client/exceptions/{shape.name}' - ) - - def _add_exception_classes(self): - for shape in self._client.meta.service_model.error_shapes: - # Create a new DocumentStructure for each exception method and add contents. - exception_doc_structure = DocumentStructure( - shape.name, target='html' - ) - self._add_exception_class(exception_doc_structure, shape) - # Write exceptions in individual/nested files. - # Path: /reference/services//client/exceptions/.rst - exception_dir_path = os.path.join( - self._root_docs_path, - self._service_name, - 'client', - 'exceptions', - ) - exception_doc_structure.write_to_file( - exception_dir_path, shape.name - ) - - def _add_exception_class(self, section, shape): - breadcrumb_section = section.add_new_section('breadcrumb') - breadcrumb_section.style.ref( - self._client_class_name, f'../../../{self._service_name}' - ) - breadcrumb_section.write(f' / Client / exceptions / {shape.name}') - section.add_title_section(shape.name) - class_section = section.add_new_section(shape.name) - class_name = self._exception_class_name(shape) - class_section.style.start_sphinx_py_class(class_name=class_name) - self._add_top_level_documentation(class_section, shape) - self._add_exception_catch_example(class_section, shape) - self._add_response_attr(class_section, shape) - class_section.style.end_sphinx_py_class() - - def _add_top_level_documentation(self, section, shape): - if shape.documentation: - section.style.new_line() - section.include_doc_string(shape.documentation) - section.style.new_line() - - def _add_exception_catch_example(self, section, shape): - section.style.new_line() - section.style.bold('Example') - section.style.new_paragraph() - section.style.start_codeblock() - section.write('try:') - section.style.indent() - section.style.new_line() - section.write('...') - section.style.dedent() - section.style.new_line() - section.write(f'except client.exceptions.{shape.name} as e:') - section.style.indent() - section.style.new_line() - section.write('print(e.response)') - section.style.dedent() - section.style.end_codeblock() - - def _add_response_attr(self, section, shape): - response_section = section.add_new_section('response') - response_section.style.start_sphinx_py_attr('response') - self._add_response_attr_description(response_section) - self._add_response_example(response_section, shape) - self._add_response_params(response_section, shape) - response_section.style.end_sphinx_py_attr() - - def _add_response_attr_description(self, section): - section.style.new_line() - section.include_doc_string( - 'The parsed error response. All exceptions have a top level ' - '``Error`` key that provides normalized access to common ' - 'exception atrributes. All other keys are specific to this ' - 'service or exception class.' - ) - section.style.new_line() - - def _add_response_example(self, section, shape): - example_section = section.add_new_section('syntax') - example_section.style.new_line() - example_section.style.bold('Syntax') - example_section.style.new_paragraph() - documenter = ResponseExampleDocumenter( - service_name=self._service_name, - operation_name=None, - event_emitter=self._client.meta.events, - ) - documenter.document_example( - example_section, - shape, - include=[self._GENERIC_ERROR_SHAPE], - ) - - def _add_response_params(self, section, shape): - params_section = section.add_new_section('Structure') - params_section.style.new_line() - params_section.style.bold('Structure') - params_section.style.new_paragraph() - documenter = ResponseParamsDocumenter( - service_name=self._service_name, - operation_name=None, - event_emitter=self._client.meta.events, - ) - documenter.document_params( - params_section, - shape, - include=[self._GENERIC_ERROR_SHAPE], - ) - - -class ClientContextParamsDocumenter: - _CONFIG_GUIDE_LINK = ( - 'https://boto3.amazonaws.com/' - 'v1/documentation/api/latest/guide/configuration.html' - ) - - OMITTED_CONTEXT_PARAMS = { - 's3': ( - 'Accelerate', - 'DisableMultiRegionAccessPoints', - 'ForcePathStyle', - 'UseArnRegion', - ), - 's3control': ('UseArnRegion',), - } - - def __init__(self, service_name, context_params): - self._service_name = service_name - self._context_params = context_params - - def document_context_params(self, section): - self._add_title(section) - self._add_overview(section) - self._add_context_params_list(section) - - def _add_title(self, section): - section.style.h2('Client Context Parameters') - - def _add_overview(self, section): - section.style.new_line() - section.write( - 'Client context parameters are configurable on a client ' - 'instance via the ``client_context_params`` parameter in the ' - '``Config`` object. For more detailed instructions and examples ' - 'on the exact usage of context params see the ' - ) - section.style.external_link( - title='configuration guide', - link=self._CONFIG_GUIDE_LINK, - ) - section.write('.') - section.style.new_line() - - def _add_context_params_list(self, section): - section.style.new_line() - sn = f'``{self._service_name}``' - section.writeln(f'The available {sn} client context params are:') - for param in self._context_params: - section.style.new_line() - name = f'``{xform_name(param.name)}``' - section.write(f'* {name} ({param.type}) - {param.documentation}') diff --git a/venv/Lib/site-packages/botocore/docs/docstring.py b/venv/Lib/site-packages/botocore/docs/docstring.py deleted file mode 100644 index 93b2e6b..0000000 --- a/venv/Lib/site-packages/botocore/docs/docstring.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.method import document_model_driven_method -from botocore.docs.paginator import document_paginate_method -from botocore.docs.waiter import document_wait_method - - -class LazyLoadedDocstring(str): - """Used for lazily loading docstrings - - You can instantiate this class and assign it to a __doc__ value. - The docstring will not be generated till accessed via __doc__ or - help(). Note that all docstring classes **must** subclass from - this class. It cannot be used directly as a docstring. - """ - - def __init__(self, *args, **kwargs): - """ - The args and kwargs are the same as the underlying document - generation function. These just get proxied to the underlying - function. - """ - super().__init__() - self._gen_args = args - self._gen_kwargs = kwargs - self._docstring = None - - def __new__(cls, *args, **kwargs): - # Needed in order to sub class from str with args and kwargs - return super().__new__(cls) - - def _write_docstring(self, *args, **kwargs): - raise NotImplementedError( - '_write_docstring is not implemented. Please subclass from ' - 'this class and provide your own _write_docstring method' - ) - - def expandtabs(self, tabsize=8): - """Expands tabs to spaces - - So this is a big hack in order to get lazy loaded docstring work - for the ``help()``. In the ``help()`` function, ``pydoc`` and - ``inspect`` are used. At some point the ``inspect.cleandoc`` - method is called. To clean the docs ``expandtabs`` is called - and that is where we override the method to generate and return the - docstrings. - """ - if self._docstring is None: - self._generate() - return self._docstring.expandtabs(tabsize) - - def __str__(self): - return self._generate() - - # __doc__ of target will use either __repr__ or __str__ of this class. - __repr__ = __str__ - - def _generate(self): - # Generate the docstring if it is not already cached. - if self._docstring is None: - self._docstring = self._create_docstring() - return self._docstring - - def _create_docstring(self): - docstring_structure = DocumentStructure('docstring', target='html') - # Call the document method function with the args and kwargs - # passed to the class. - self._write_docstring( - docstring_structure, *self._gen_args, **self._gen_kwargs - ) - return docstring_structure.flush_structure().decode('utf-8') - - -class ClientMethodDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_model_driven_method(*args, **kwargs) - - -class WaiterDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_wait_method(*args, **kwargs) - - -class PaginatorDocstring(LazyLoadedDocstring): - def _write_docstring(self, *args, **kwargs): - document_paginate_method(*args, **kwargs) diff --git a/venv/Lib/site-packages/botocore/docs/example.py b/venv/Lib/site-packages/botocore/docs/example.py deleted file mode 100644 index cb43db5..0000000 --- a/venv/Lib/site-packages/botocore/docs/example.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.shape import ShapeDocumenter -from botocore.docs.utils import py_default - - -class BaseExampleDocumenter(ShapeDocumenter): - def document_example( - self, section, shape, prefix=None, include=None, exclude=None - ): - """Generates an example based on a shape - - :param section: The section to write the documentation to. - - :param shape: The shape of the operation. - - :param prefix: Anything to be included before the example - - :type include: Dictionary where keys are parameter names and - values are the shapes of the parameter names. - :param include: The parameter shapes to include in the documentation. - - :type exclude: List of the names of the parameters to exclude. - :param exclude: The names of the parameters to exclude from - documentation. - """ - history = [] - section.style.new_line() - section.style.start_codeblock() - if prefix is not None: - section.write(prefix) - self.traverse_and_document_shape( - section=section, - shape=shape, - history=history, - include=include, - exclude=exclude, - ) - final_blank_line_section = section.add_new_section('final-blank-line') - final_blank_line_section.style.new_line() - - def document_recursive_shape(self, section, shape, **kwargs): - section.write('{\'... recursive ...\'}') - - def document_shape_default( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - py_type = self._get_special_py_default(shape) - if py_type is None: - py_type = py_default(shape.type_name) - - if self._context.get('streaming_shape') == shape: - py_type = 'StreamingBody()' - section.write(py_type) - - def document_shape_type_string( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - if 'enum' in shape.metadata: - for i, enum in enumerate(shape.metadata['enum']): - section.write(f'\'{enum}\'') - if i < len(shape.metadata['enum']) - 1: - section.write('|') - else: - self.document_shape_default(section, shape, history) - - def document_shape_type_list( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - param_shape = shape.member - list_section = section.add_new_section('list-value') - self._start_nested_param(list_section, '[') - param_section = list_section.add_new_section( - 'member', context={'shape': param_shape.name} - ) - self.traverse_and_document_shape( - section=param_section, shape=param_shape, history=history - ) - ending_comma_section = list_section.add_new_section('ending-comma') - ending_comma_section.write(',') - ending_bracket_section = list_section.add_new_section('ending-bracket') - self._end_nested_param(ending_bracket_section, ']') - - def document_shape_type_structure( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - if not shape.members: - section.write('{}') - return - - section = section.add_new_section('structure-value') - self._start_nested_param(section, '{') - - input_members = self._add_members_to_shape(shape.members, include) - - for i, param in enumerate(input_members): - if exclude and param in exclude: - continue - param_section = section.add_new_section(param) - param_section.write(f'\'{param}\': ') - param_shape = input_members[param] - param_value_section = param_section.add_new_section( - 'member-value', context={'shape': param_shape.name} - ) - self.traverse_and_document_shape( - section=param_value_section, - shape=param_shape, - history=history, - name=param, - ) - if i < len(input_members) - 1: - ending_comma_section = param_section.add_new_section( - 'ending-comma' - ) - ending_comma_section.write(',') - ending_comma_section.style.new_line() - self._end_structure(section, '{', '}') - - def document_shape_type_map( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - map_section = section.add_new_section('map-value') - self._start_nested_param(map_section, '{') - value_shape = shape.value - key_section = map_section.add_new_section( - 'key', context={'shape': shape.key.name} - ) - key_section.write('\'string\': ') - value_section = map_section.add_new_section( - 'value', context={'shape': value_shape.name} - ) - self.traverse_and_document_shape( - section=value_section, shape=value_shape, history=history - ) - end_bracket_section = map_section.add_new_section('ending-bracket') - self._end_nested_param(end_bracket_section, '}') - - def _add_members_to_shape(self, members, include): - if include: - members = members.copy() - for param in include: - members[param.name] = param - return members - - def _start_nested_param(self, section, start=None): - if start is not None: - section.write(start) - section.style.indent() - section.style.indent() - section.style.new_line() - - def _end_nested_param(self, section, end=None): - section.style.dedent() - section.style.dedent() - section.style.new_line() - if end is not None: - section.write(end) - - def _end_structure(self, section, start, end): - # If there are no members in the strucuture, then make sure the - # start and the end bracket are on the same line, by removing all - # previous text and writing the start and end. - if not section.available_sections: - section.clear_text() - section.write(start + end) - self._end_nested_param(section) - else: - end_bracket_section = section.add_new_section('ending-bracket') - self._end_nested_param(end_bracket_section, end) - - -class ResponseExampleDocumenter(BaseExampleDocumenter): - EVENT_NAME = 'response-example' - - def document_shape_type_event_stream( - self, section, shape, history, **kwargs - ): - section.write('EventStream(') - self.document_shape_type_structure(section, shape, history, **kwargs) - end_section = section.add_new_section('event-stream-end') - end_section.write(')') - - -class RequestExampleDocumenter(BaseExampleDocumenter): - EVENT_NAME = 'request-example' - - def document_shape_type_structure( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - param_format = '\'%s\'' - operator = ': ' - start = '{' - end = '}' - - if len(history) <= 1: - operator = '=' - start = '(' - end = ')' - param_format = '%s' - section = section.add_new_section('structure-value') - self._start_nested_param(section, start) - input_members = self._add_members_to_shape(shape.members, include) - - for i, param in enumerate(input_members): - if exclude and param in exclude: - continue - param_section = section.add_new_section(param) - param_section.write(param_format % param) - param_section.write(operator) - param_shape = input_members[param] - param_value_section = param_section.add_new_section( - 'member-value', context={'shape': param_shape.name} - ) - self.traverse_and_document_shape( - section=param_value_section, - shape=param_shape, - history=history, - name=param, - ) - if i < len(input_members) - 1: - ending_comma_section = param_section.add_new_section( - 'ending-comma' - ) - ending_comma_section.write(',') - ending_comma_section.style.new_line() - self._end_structure(section, start, end) diff --git a/venv/Lib/site-packages/botocore/docs/method.py b/venv/Lib/site-packages/botocore/docs/method.py deleted file mode 100644 index 5db906c..0000000 --- a/venv/Lib/site-packages/botocore/docs/method.py +++ /dev/null @@ -1,328 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import inspect -import types - -from botocore.docs.example import ( - RequestExampleDocumenter, - ResponseExampleDocumenter, -) -from botocore.docs.params import ( - RequestParamsDocumenter, - ResponseParamsDocumenter, -) - -AWS_DOC_BASE = 'https://docs.aws.amazon.com/goto/WebAPI' - - -def get_instance_public_methods(instance): - """Retrieves an objects public methods - - :param instance: The instance of the class to inspect - :rtype: dict - :returns: A dictionary that represents an instance's methods where - the keys are the name of the methods and the - values are the handler to the method. - """ - instance_members = inspect.getmembers(instance) - instance_methods = {} - for name, member in instance_members: - if not name.startswith('_'): - if inspect.ismethod(member): - instance_methods[name] = member - return instance_methods - - -def document_model_driven_signature( - section, name, operation_model, include=None, exclude=None -): - """Documents the signature of a model-driven method - - :param section: The section to write the documentation to. - - :param name: The name of the method - - :param operation_model: The operation model for the method - - :type include: Dictionary where keys are parameter names and - values are the shapes of the parameter names. - :param include: The parameter shapes to include in the documentation. - - :type exclude: List of the names of the parameters to exclude. - :param exclude: The names of the parameters to exclude from - documentation. - """ - params = {} - if operation_model.input_shape: - params = operation_model.input_shape.members - - parameter_names = list(params.keys()) - - if include is not None: - for member in include: - parameter_names.append(member.name) - - if exclude is not None: - for member in exclude: - if member in parameter_names: - parameter_names.remove(member) - - signature_params = '' - if parameter_names: - signature_params = '**kwargs' - section.style.start_sphinx_py_method(name, signature_params) - - -def document_custom_signature( - section, name, method, include=None, exclude=None -): - """Documents the signature of a custom method - - :param section: The section to write the documentation to. - - :param name: The name of the method - - :param method: The handle to the method being documented - - :type include: Dictionary where keys are parameter names and - values are the shapes of the parameter names. - :param include: The parameter shapes to include in the documentation. - - :type exclude: List of the names of the parameters to exclude. - :param exclude: The names of the parameters to exclude from - documentation. - """ - signature = inspect.signature(method) - # "raw" class methods are FunctionType and they include "self" param - # object methods are MethodType and they skip the "self" param - if isinstance(method, types.FunctionType): - self_param = next(iter(signature.parameters)) - self_kind = signature.parameters[self_param].kind - # safety check that we got the right parameter - assert self_kind == inspect.Parameter.POSITIONAL_OR_KEYWORD - new_params = signature.parameters.copy() - del new_params[self_param] - signature = signature.replace(parameters=new_params.values()) - signature_params = str(signature).lstrip('(') - signature_params = signature_params.rstrip(')') - section.style.start_sphinx_py_method(name, signature_params) - - -def document_custom_method(section, method_name, method): - """Documents a non-data driven method - - :param section: The section to write the documentation to. - - :param method_name: The name of the method - - :param method: The handle to the method being documented - """ - full_method_name = f"{section.context.get('qualifier', '')}{method_name}" - document_custom_signature(section, full_method_name, method) - method_intro_section = section.add_new_section('method-intro') - method_intro_section.writeln('') - doc_string = inspect.getdoc(method) - if doc_string is not None: - method_intro_section.style.write_py_doc_string(doc_string) - - -def document_model_driven_method( - section, - method_name, - operation_model, - event_emitter, - method_description=None, - example_prefix=None, - include_input=None, - include_output=None, - exclude_input=None, - exclude_output=None, - document_output=True, - include_signature=True, -): - """Documents an individual method - - :param section: The section to write to - - :param method_name: The name of the method - - :param operation_model: The model of the operation - - :param event_emitter: The event emitter to use to emit events - - :param example_prefix: The prefix to use in the method example. - - :type include_input: Dictionary where keys are parameter names and - values are the shapes of the parameter names. - :param include_input: The parameter shapes to include in the - input documentation. - - :type include_output: Dictionary where keys are parameter names and - values are the shapes of the parameter names. - :param include_input: The parameter shapes to include in the - output documentation. - - :type exclude_input: List of the names of the parameters to exclude. - :param exclude_input: The names of the parameters to exclude from - input documentation. - - :type exclude_output: List of the names of the parameters to exclude. - :param exclude_input: The names of the parameters to exclude from - output documentation. - - :param document_output: A boolean flag to indicate whether to - document the output. - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - # Add the signature if specified. - if include_signature: - document_model_driven_signature( - section, - method_name, - operation_model, - include=include_input, - exclude=exclude_input, - ) - - # Add the description for the method. - method_intro_section = section.add_new_section('method-intro') - method_intro_section.include_doc_string(method_description) - if operation_model.deprecated: - method_intro_section.style.start_danger() - method_intro_section.writeln( - 'This operation is deprecated and may not function as ' - 'expected. This operation should not be used going forward ' - 'and is only kept for the purpose of backwards compatiblity.' - ) - method_intro_section.style.end_danger() - service_uid = operation_model.service_model.metadata.get('uid') - if service_uid is not None: - method_intro_section.style.new_paragraph() - method_intro_section.write("See also: ") - link = f"{AWS_DOC_BASE}/{service_uid}/{operation_model.name}" - method_intro_section.style.external_link( - title="AWS API Documentation", link=link - ) - method_intro_section.writeln('') - - # Add the example section. - example_section = section.add_new_section('request-example') - example_section.style.new_paragraph() - example_section.style.bold('Request Syntax') - - context = { - 'special_shape_types': { - 'streaming_input_shape': operation_model.get_streaming_input(), - 'streaming_output_shape': operation_model.get_streaming_output(), - 'eventstream_output_shape': operation_model.get_event_stream_output(), - }, - } - - if operation_model.input_shape: - RequestExampleDocumenter( - service_name=operation_model.service_model.service_name, - operation_name=operation_model.name, - event_emitter=event_emitter, - context=context, - ).document_example( - example_section, - operation_model.input_shape, - prefix=example_prefix, - include=include_input, - exclude=exclude_input, - ) - else: - example_section.style.new_paragraph() - example_section.style.start_codeblock() - example_section.write(example_prefix + '()') - - # Add the request parameter documentation. - request_params_section = section.add_new_section('request-params') - if operation_model.input_shape: - RequestParamsDocumenter( - service_name=operation_model.service_model.service_name, - operation_name=operation_model.name, - event_emitter=event_emitter, - context=context, - ).document_params( - request_params_section, - operation_model.input_shape, - include=include_input, - exclude=exclude_input, - ) - - # Add the return value documentation - return_section = section.add_new_section('return') - return_section.style.new_line() - if operation_model.output_shape is not None and document_output: - return_section.write(':rtype: dict') - return_section.style.new_line() - return_section.write(':returns: ') - return_section.style.indent() - return_section.style.new_line() - - # If the operation is an event stream, describe the tagged union - event_stream_output = operation_model.get_event_stream_output() - if event_stream_output: - event_section = return_section.add_new_section('event-stream') - event_section.style.new_paragraph() - event_section.write( - 'The response of this operation contains an ' - ':class:`.EventStream` member. When iterated the ' - ':class:`.EventStream` will yield events based on the ' - 'structure below, where only one of the top level keys ' - 'will be present for any given event.' - ) - event_section.style.new_line() - - # Add an example return value - return_example_section = return_section.add_new_section( - 'response-example' - ) - return_example_section.style.new_line() - return_example_section.style.bold('Response Syntax') - return_example_section.style.new_paragraph() - ResponseExampleDocumenter( - service_name=operation_model.service_model.service_name, - operation_name=operation_model.name, - event_emitter=event_emitter, - context=context, - ).document_example( - return_example_section, - operation_model.output_shape, - include=include_output, - exclude=exclude_output, - ) - - # Add a description for the return value - return_description_section = return_section.add_new_section( - 'description' - ) - return_description_section.style.new_line() - return_description_section.style.bold('Response Structure') - return_description_section.style.new_paragraph() - ResponseParamsDocumenter( - service_name=operation_model.service_model.service_name, - operation_name=operation_model.name, - event_emitter=event_emitter, - context=context, - ).document_params( - return_description_section, - operation_model.output_shape, - include=include_output, - exclude=exclude_output, - ) - else: - return_section.write(':returns: None') diff --git a/venv/Lib/site-packages/botocore/docs/paginator.py b/venv/Lib/site-packages/botocore/docs/paginator.py deleted file mode 100644 index 2c9b300..0000000 --- a/venv/Lib/site-packages/botocore/docs/paginator.py +++ /dev/null @@ -1,241 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.compat import OrderedDict -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.method import document_model_driven_method -from botocore.docs.utils import DocumentedShape -from botocore.utils import get_service_module_name - - -class PaginatorDocumenter: - def __init__(self, client, service_paginator_model, root_docs_path): - self._client = client - self._client_class_name = self._client.__class__.__name__ - self._service_name = self._client.meta.service_model.service_name - self._service_paginator_model = service_paginator_model - self._root_docs_path = root_docs_path - self._USER_GUIDE_LINK = ( - 'https://boto3.amazonaws.com/' - 'v1/documentation/api/latest/guide/paginators.html' - ) - - def document_paginators(self, section): - """Documents the various paginators for a service - - param section: The section to write to. - """ - section.style.h2('Paginators') - self._add_overview(section) - section.style.new_line() - section.writeln('The available paginators are:') - section.style.toctree() - - paginator_names = sorted( - self._service_paginator_model._paginator_config - ) - - # List the available paginators and then document each paginator. - for paginator_name in paginator_names: - section.style.tocitem( - f'{self._service_name}/paginator/{paginator_name}' - ) - # Create a new DocumentStructure for each paginator and add contents. - paginator_doc_structure = DocumentStructure( - paginator_name, target='html' - ) - self._add_paginator(paginator_doc_structure, paginator_name) - # Write paginators in individual/nested files. - # Path: /reference/services//paginator/.rst - paginator_dir_path = os.path.join( - self._root_docs_path, self._service_name, 'paginator' - ) - paginator_doc_structure.write_to_file( - paginator_dir_path, paginator_name - ) - - def _add_paginator(self, section, paginator_name): - breadcrumb_section = section.add_new_section('breadcrumb') - breadcrumb_section.style.ref( - self._client_class_name, f'../../{self._service_name}' - ) - breadcrumb_section.write(f' / Paginator / {paginator_name}') - section.add_title_section(paginator_name) - - # Docment the paginator class - paginator_section = section.add_new_section(paginator_name) - paginator_section.style.start_sphinx_py_class( - class_name=( - f'{self._client_class_name}.Paginator.{paginator_name}' - ) - ) - paginator_section.style.start_codeblock() - paginator_section.style.new_line() - - # Document how to instantiate the paginator. - paginator_section.write( - f"paginator = client.get_paginator('{xform_name(paginator_name)}')" - ) - paginator_section.style.end_codeblock() - paginator_section.style.new_line() - # Get the pagination model for the particular paginator. - paginator_config = self._service_paginator_model.get_paginator( - paginator_name - ) - document_paginate_method( - section=paginator_section, - paginator_name=paginator_name, - event_emitter=self._client.meta.events, - service_model=self._client.meta.service_model, - paginator_config=paginator_config, - ) - - def _add_overview(self, section): - section.style.new_line() - section.write( - 'Paginators are available on a client instance ' - 'via the ``get_paginator`` method. For more detailed instructions ' - 'and examples on the usage of paginators, see the ' - 'paginators ' - ) - section.style.external_link( - title='user guide', - link=self._USER_GUIDE_LINK, - ) - section.write('.') - section.style.new_line() - - -def document_paginate_method( - section, - paginator_name, - event_emitter, - service_model, - paginator_config, - include_signature=True, -): - """Documents the paginate method of a paginator - - :param section: The section to write to - - :param paginator_name: The name of the paginator. It is snake cased. - - :param event_emitter: The event emitter to use to emit events - - :param service_model: The service model - - :param paginator_config: The paginator config associated to a particular - paginator. - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - # Retrieve the operation model of the underlying operation. - operation_model = service_model.operation_model(paginator_name) - - # Add representations of the request and response parameters - # we want to include in the description of the paginate method. - # These are parameters we expose via the botocore interface. - pagination_config_members = OrderedDict() - - pagination_config_members['MaxItems'] = DocumentedShape( - name='MaxItems', - type_name='integer', - documentation=( - '

    The total number of items to return. If the total ' - 'number of items available is more than the value ' - 'specified in max-items then a NextToken ' - 'will be provided in the output that you can use to ' - 'resume pagination.

    ' - ), - ) - - if paginator_config.get('limit_key', None): - pagination_config_members['PageSize'] = DocumentedShape( - name='PageSize', - type_name='integer', - documentation='

    The size of each page.

    ', - ) - - pagination_config_members['StartingToken'] = DocumentedShape( - name='StartingToken', - type_name='string', - documentation=( - '

    A token to specify where to start paginating. ' - 'This is the NextToken from a previous ' - 'response.

    ' - ), - ) - - botocore_pagination_params = [ - DocumentedShape( - name='PaginationConfig', - type_name='structure', - documentation=( - '

    A dictionary that provides parameters to control ' - 'pagination.

    ' - ), - members=pagination_config_members, - ) - ] - - botocore_pagination_response_params = [ - DocumentedShape( - name='NextToken', - type_name='string', - documentation=('

    A token to resume pagination.

    '), - ) - ] - - service_pagination_params = [] - - # Add the normal input token of the method to a list - # of input paramters that we wish to hide since we expose our own. - if isinstance(paginator_config['input_token'], list): - service_pagination_params += paginator_config['input_token'] - else: - service_pagination_params.append(paginator_config['input_token']) - - # Hide the limit key in the documentation. - if paginator_config.get('limit_key', None): - service_pagination_params.append(paginator_config['limit_key']) - - # Hide the output tokens in the documentation. - service_pagination_response_params = [] - if isinstance(paginator_config['output_token'], list): - service_pagination_response_params += paginator_config['output_token'] - else: - service_pagination_response_params.append( - paginator_config['output_token'] - ) - - paginate_description = ( - 'Creates an iterator that will paginate through responses ' - f'from :py:meth:`{get_service_module_name(service_model)}.Client.{xform_name(paginator_name)}`.' - ) - - document_model_driven_method( - section, - 'paginate', - operation_model, - event_emitter=event_emitter, - method_description=paginate_description, - example_prefix='response_iterator = paginator.paginate', - include_input=botocore_pagination_params, - include_output=botocore_pagination_response_params, - exclude_input=service_pagination_params, - exclude_output=service_pagination_response_params, - include_signature=include_signature, - ) diff --git a/venv/Lib/site-packages/botocore/docs/params.py b/venv/Lib/site-packages/botocore/docs/params.py deleted file mode 100644 index 7e0f398..0000000 --- a/venv/Lib/site-packages/botocore/docs/params.py +++ /dev/null @@ -1,302 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.shape import ShapeDocumenter -from botocore.docs.utils import py_type_name - - -class BaseParamsDocumenter(ShapeDocumenter): - def document_params(self, section, shape, include=None, exclude=None): - """Fills out the documentation for a section given a model shape. - - :param section: The section to write the documentation to. - - :param shape: The shape of the operation. - - :type include: Dictionary where keys are parameter names and - values are the shapes of the parameter names. - :param include: The parameter shapes to include in the documentation. - - :type exclude: List of the names of the parameters to exclude. - :param exclude: The names of the parameters to exclude from - documentation. - """ - history = [] - self.traverse_and_document_shape( - section=section, - shape=shape, - history=history, - name=None, - include=include, - exclude=exclude, - ) - - def document_recursive_shape(self, section, shape, **kwargs): - self._add_member_documentation(section, shape, **kwargs) - - def document_shape_default( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - self._add_member_documentation(section, shape, **kwargs) - - def document_shape_type_list( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - self._add_member_documentation(section, shape, **kwargs) - param_shape = shape.member - param_section = section.add_new_section( - param_shape.name, context={'shape': shape.member.name} - ) - self._start_nested_param(param_section) - self.traverse_and_document_shape( - section=param_section, - shape=param_shape, - history=history, - name=None, - ) - section = section.add_new_section('end-list') - self._end_nested_param(section) - - def document_shape_type_map( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - self._add_member_documentation(section, shape, **kwargs) - - key_section = section.add_new_section( - 'key', context={'shape': shape.key.name} - ) - self._start_nested_param(key_section) - self._add_member_documentation(key_section, shape.key) - - param_section = section.add_new_section( - shape.value.name, context={'shape': shape.value.name} - ) - param_section.style.indent() - self._start_nested_param(param_section) - self.traverse_and_document_shape( - section=param_section, - shape=shape.value, - history=history, - name=None, - ) - - end_section = section.add_new_section('end-map') - self._end_nested_param(end_section) - self._end_nested_param(end_section) - - def document_shape_type_structure( - self, - section, - shape, - history, - include=None, - exclude=None, - name=None, - **kwargs, - ): - members = self._add_members_to_shape(shape.members, include) - self._add_member_documentation(section, shape, name=name) - for param in members: - if exclude and param in exclude: - continue - param_shape = members[param] - param_section = section.add_new_section( - param, context={'shape': param_shape.name} - ) - self._start_nested_param(param_section) - self.traverse_and_document_shape( - section=param_section, - shape=param_shape, - history=history, - name=param, - ) - section = section.add_new_section('end-structure') - self._end_nested_param(section) - - def _add_member_documentation(self, section, shape, **kwargs): - pass - - def _add_members_to_shape(self, members, include): - if include: - members = members.copy() - for param in include: - members[param.name] = param - return members - - def _document_non_top_level_param_type(self, type_section, shape): - special_py_type = self._get_special_py_type_name(shape) - py_type = py_type_name(shape.type_name) - - type_format = '(%s) --' - if special_py_type is not None: - # Special type can reference a linked class. - # Italicizing it blows away the link. - type_section.write(type_format % special_py_type) - else: - type_section.style.italics(type_format % py_type) - type_section.write(' ') - - def _start_nested_param(self, section): - section.style.indent() - section.style.new_line() - - def _end_nested_param(self, section): - section.style.dedent() - section.style.new_line() - - -class ResponseParamsDocumenter(BaseParamsDocumenter): - """Generates the description for the response parameters""" - - EVENT_NAME = 'response-params' - - def _add_member_documentation(self, section, shape, name=None, **kwargs): - name_section = section.add_new_section('param-name') - name_section.write('- ') - if name is not None: - name_section.style.bold(f'{name}') - name_section.write(' ') - type_section = section.add_new_section('param-type') - self._document_non_top_level_param_type(type_section, shape) - - documentation_section = section.add_new_section('param-documentation') - if shape.documentation: - documentation_section.style.indent() - if getattr(shape, 'is_tagged_union', False): - tagged_union_docs = section.add_new_section( - 'param-tagged-union-docs' - ) - note = ( - '.. note::' - ' This is a Tagged Union structure. Only one of the ' - ' following top level keys will be set: %s. ' - ' If a client receives an unknown member it will ' - ' set ``SDK_UNKNOWN_MEMBER`` as the top level key, ' - ' which maps to the name or tag of the unknown ' - ' member. The structure of ``SDK_UNKNOWN_MEMBER`` is ' - ' as follows' - ) - tagged_union_members_str = ', '.join( - [f'``{key}``' for key in shape.members.keys()] - ) - unknown_code_example = ( - '\'SDK_UNKNOWN_MEMBER\': {\'name\': \'UnknownMemberName\'}' - ) - tagged_union_docs.write(note % (tagged_union_members_str)) - example = section.add_new_section('param-unknown-example') - example.style.codeblock(unknown_code_example) - documentation_section.include_doc_string(shape.documentation) - section.style.new_paragraph() - - def document_shape_type_event_stream( - self, section, shape, history, **kwargs - ): - self.document_shape_type_structure(section, shape, history, **kwargs) - - -class RequestParamsDocumenter(BaseParamsDocumenter): - """Generates the description for the request parameters""" - - EVENT_NAME = 'request-params' - - def document_shape_type_structure( - self, section, shape, history, include=None, exclude=None, **kwargs - ): - if len(history) > 1: - self._add_member_documentation(section, shape, **kwargs) - section.style.indent() - members = self._add_members_to_shape(shape.members, include) - for i, param in enumerate(members): - if exclude and param in exclude: - continue - param_shape = members[param] - param_section = section.add_new_section( - param, context={'shape': param_shape.name} - ) - param_section.style.new_line() - is_required = param in shape.required_members - self.traverse_and_document_shape( - section=param_section, - shape=param_shape, - history=history, - name=param, - is_required=is_required, - ) - section = section.add_new_section('end-structure') - if len(history) > 1: - section.style.dedent() - section.style.new_line() - - def _add_member_documentation( - self, - section, - shape, - name=None, - is_top_level_param=False, - is_required=False, - **kwargs, - ): - py_type = self._get_special_py_type_name(shape) - if py_type is None: - py_type = py_type_name(shape.type_name) - if is_top_level_param: - type_section = section.add_new_section('param-type') - type_section.write(f':type {name}: {py_type}') - end_type_section = type_section.add_new_section('end-param-type') - end_type_section.style.new_line() - name_section = section.add_new_section('param-name') - name_section.write(f':param {name}: ') - - else: - name_section = section.add_new_section('param-name') - name_section.write('- ') - if name is not None: - name_section.style.bold(f'{name}') - name_section.write(' ') - type_section = section.add_new_section('param-type') - self._document_non_top_level_param_type(type_section, shape) - - if is_required: - is_required_section = section.add_new_section('is-required') - is_required_section.style.indent() - is_required_section.style.bold('[REQUIRED]') - is_required_section.write(' ') - if shape.documentation: - documentation_section = section.add_new_section( - 'param-documentation' - ) - documentation_section.style.indent() - if getattr(shape, 'is_tagged_union', False): - tagged_union_docs = section.add_new_section( - 'param-tagged-union-docs' - ) - note = ( - '.. note::' - ' This is a Tagged Union structure. Only one of the ' - ' following top level keys can be set: %s. ' - ) - tagged_union_members_str = ', '.join( - [f'``{key}``' for key in shape.members.keys()] - ) - tagged_union_docs.write(note % (tagged_union_members_str)) - documentation_section.include_doc_string(shape.documentation) - self._add_special_trait_documentation(documentation_section, shape) - end_param_section = section.add_new_section('end-param') - end_param_section.style.new_paragraph() - - def _add_special_trait_documentation(self, section, shape): - if 'idempotencyToken' in shape.metadata: - self._append_idempotency_documentation(section) - - def _append_idempotency_documentation(self, section): - docstring = 'This field is autopopulated if not provided.' - section.write(docstring) diff --git a/venv/Lib/site-packages/botocore/docs/service.py b/venv/Lib/site-packages/botocore/docs/service.py deleted file mode 100644 index d20a889..0000000 --- a/venv/Lib/site-packages/botocore/docs/service.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.client import ( - ClientContextParamsDocumenter, - ClientDocumenter, - ClientExceptionsDocumenter, -) -from botocore.docs.paginator import PaginatorDocumenter -from botocore.docs.waiter import WaiterDocumenter -from botocore.exceptions import DataNotFoundError - - -class ServiceDocumenter: - def __init__(self, service_name, session, root_docs_path): - self._session = session - self._service_name = service_name - self._root_docs_path = root_docs_path - - self._client = self._session.create_client( - service_name, - region_name='us-east-1', - aws_access_key_id='foo', - aws_secret_access_key='bar', - ) - self._event_emitter = self._client.meta.events - - self.sections = [ - 'title', - 'client-api', - 'client-exceptions', - 'paginator-api', - 'waiter-api', - 'client-context-params', - ] - - def document_service(self): - """Documents an entire service. - - :returns: The reStructured text of the documented service. - """ - doc_structure = DocumentStructure( - self._service_name, section_names=self.sections, target='html' - ) - self.title(doc_structure.get_section('title')) - self.client_api(doc_structure.get_section('client-api')) - self.client_exceptions(doc_structure.get_section('client-exceptions')) - self.paginator_api(doc_structure.get_section('paginator-api')) - self.waiter_api(doc_structure.get_section('waiter-api')) - context_params_section = doc_structure.get_section( - 'client-context-params' - ) - self.client_context_params(context_params_section) - return doc_structure.flush_structure() - - def title(self, section): - section.style.h1(self._client.__class__.__name__) - self._event_emitter.emit( - f"docs.title.{self._service_name}", section=section - ) - - def table_of_contents(self, section): - section.style.table_of_contents(title='Table of Contents', depth=2) - - def client_api(self, section): - examples = None - try: - examples = self.get_examples(self._service_name) - except DataNotFoundError: - pass - - ClientDocumenter( - self._client, self._root_docs_path, examples - ).document_client(section) - - def client_exceptions(self, section): - ClientExceptionsDocumenter( - self._client, self._root_docs_path - ).document_exceptions(section) - - def paginator_api(self, section): - try: - service_paginator_model = self._session.get_paginator_model( - self._service_name - ) - except DataNotFoundError: - return - if service_paginator_model._paginator_config: - paginator_documenter = PaginatorDocumenter( - self._client, service_paginator_model, self._root_docs_path - ) - paginator_documenter.document_paginators(section) - - def waiter_api(self, section): - if self._client.waiter_names: - service_waiter_model = self._session.get_waiter_model( - self._service_name - ) - waiter_documenter = WaiterDocumenter( - self._client, service_waiter_model, self._root_docs_path - ) - waiter_documenter.document_waiters(section) - - def get_examples(self, service_name, api_version=None): - loader = self._session.get_component('data_loader') - examples = loader.load_service_model( - service_name, 'examples-1', api_version - ) - return examples['examples'] - - def client_context_params(self, section): - omitted_params = ClientContextParamsDocumenter.OMITTED_CONTEXT_PARAMS - params_to_omit = omitted_params.get(self._service_name, []) - service_model = self._client.meta.service_model - raw_context_params = service_model.client_context_parameters - context_params = [ - p for p in raw_context_params if p.name not in params_to_omit - ] - if context_params: - context_param_documenter = ClientContextParamsDocumenter( - self._service_name, context_params - ) - context_param_documenter.document_context_params(section) diff --git a/venv/Lib/site-packages/botocore/docs/shape.py b/venv/Lib/site-packages/botocore/docs/shape.py deleted file mode 100644 index 640a5d1..0000000 --- a/venv/Lib/site-packages/botocore/docs/shape.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - - -# NOTE: This class should not be instantiated and its -# ``traverse_and_document_shape`` method called directly. It should be -# inherited from a Documenter class with the appropriate methods -# and attributes. -from botocore.utils import is_json_value_header - - -class ShapeDocumenter: - EVENT_NAME = '' - - def __init__( - self, service_name, operation_name, event_emitter, context=None - ): - self._service_name = service_name - self._operation_name = operation_name - self._event_emitter = event_emitter - self._context = context - if context is None: - self._context = {'special_shape_types': {}} - - def traverse_and_document_shape( - self, - section, - shape, - history, - include=None, - exclude=None, - name=None, - is_required=False, - ): - """Traverses and documents a shape - - Will take a self class and call its appropriate methods as a shape - is traversed. - - :param section: The section to document. - - :param history: A list of the names of the shapes that have been - traversed. - - :type include: Dictionary where keys are parameter names and - values are the shapes of the parameter names. - :param include: The parameter shapes to include in the documentation. - - :type exclude: List of the names of the parameters to exclude. - :param exclude: The names of the parameters to exclude from - documentation. - - :param name: The name of the shape. - - :param is_required: If the shape is a required member. - """ - param_type = shape.type_name - if getattr(shape, 'serialization', {}).get('eventstream'): - param_type = 'event_stream' - if shape.name in history: - self.document_recursive_shape(section, shape, name=name) - else: - history.append(shape.name) - is_top_level_param = len(history) == 2 - if hasattr(shape, 'is_document_type') and shape.is_document_type: - param_type = 'document' - getattr( - self, - f"document_shape_type_{param_type}", - self.document_shape_default, - )( - section, - shape, - history=history, - name=name, - include=include, - exclude=exclude, - is_top_level_param=is_top_level_param, - is_required=is_required, - ) - if is_top_level_param: - self._event_emitter.emit( - f"docs.{self.EVENT_NAME}.{self._service_name}.{self._operation_name}.{name}", - section=section, - ) - at_overlying_method_section = len(history) == 1 - if at_overlying_method_section: - self._event_emitter.emit( - f"docs.{self.EVENT_NAME}.{self._service_name}.{self._operation_name}.complete-section", - section=section, - ) - history.pop() - - def _get_special_py_default(self, shape): - special_defaults = { - 'document_type': '{...}|[...]|123|123.4|\'string\'|True|None', - 'jsonvalue_header': '{...}|[...]|123|123.4|\'string\'|True|None', - 'streaming_input_shape': 'b\'bytes\'|file', - 'streaming_output_shape': 'StreamingBody()', - 'eventstream_output_shape': 'EventStream()', - } - return self._get_value_for_special_type(shape, special_defaults) - - def _get_special_py_type_name(self, shape): - special_type_names = { - 'document_type': ':ref:`document`', - 'jsonvalue_header': 'JSON serializable', - 'streaming_input_shape': 'bytes or seekable file-like object', - 'streaming_output_shape': ':class:`.StreamingBody`', - 'eventstream_output_shape': ':class:`.EventStream`', - } - return self._get_value_for_special_type(shape, special_type_names) - - def _get_value_for_special_type(self, shape, special_type_map): - if is_json_value_header(shape): - return special_type_map['jsonvalue_header'] - if hasattr(shape, 'is_document_type') and shape.is_document_type: - return special_type_map['document_type'] - for special_type, marked_shape in self._context[ - 'special_shape_types' - ].items(): - if special_type in special_type_map: - if shape == marked_shape: - return special_type_map[special_type] - return None diff --git a/venv/Lib/site-packages/botocore/docs/sharedexample.py b/venv/Lib/site-packages/botocore/docs/sharedexample.py deleted file mode 100644 index 29d3df5..0000000 --- a/venv/Lib/site-packages/botocore/docs/sharedexample.py +++ /dev/null @@ -1,227 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import numbers -import re - -from botocore.docs.utils import escape_controls -from botocore.utils import parse_timestamp - - -class SharedExampleDocumenter: - def document_shared_example( - self, example, prefix, section, operation_model - ): - """Documents a single shared example based on its definition. - - :param example: The model of the example - - :param prefix: The prefix to use in the method example. - - :param section: The section to write to. - - :param operation_model: The model of the operation used in the example - """ - section.style.new_paragraph() - section.write(example.get('description')) - section.style.new_line() - self.document_input( - section, example, prefix, operation_model.input_shape - ) - self.document_output(section, example, operation_model.output_shape) - - def document_input(self, section, example, prefix, shape): - input_section = section.add_new_section('input') - input_section.style.start_codeblock() - if prefix is not None: - input_section.write(prefix) - params = example.get('input', {}) - comments = example.get('comments') - if comments: - comments = comments.get('input') - param_section = input_section.add_new_section('parameters') - self._document_params(param_section, params, comments, [], shape) - closing_section = input_section.add_new_section('input-close') - closing_section.style.new_line() - closing_section.style.new_line() - closing_section.write('print(response)') - closing_section.style.end_codeblock() - - def document_output(self, section, example, shape): - output_section = section.add_new_section('output') - output_section.style.new_line() - output_section.write('Expected Output:') - output_section.style.new_line() - output_section.style.start_codeblock() - params = example.get('output', {}) - - # There might not be an output, but we will return metadata anyway - params['ResponseMetadata'] = {"...": "..."} - comments = example.get('comments') - if comments: - comments = comments.get('output') - self._document_dict(output_section, params, comments, [], shape, True) - closing_section = output_section.add_new_section('output-close') - closing_section.style.end_codeblock() - - def _document(self, section, value, comments, path, shape): - """ - :param section: The section to add the docs to. - - :param value: The input / output values representing the parameters that - are included in the example. - - :param comments: The dictionary containing all the comments to be - applied to the example. - - :param path: A list describing where the documenter is in traversing the - parameters. This is used to find the equivalent location - in the comments dictionary. - """ - if isinstance(value, dict): - self._document_dict(section, value, comments, path, shape) - elif isinstance(value, list): - self._document_list(section, value, comments, path, shape) - elif isinstance(value, numbers.Number): - self._document_number(section, value, path) - elif shape and shape.type_name == 'timestamp': - self._document_datetime(section, value, path) - else: - self._document_str(section, value, path) - - def _document_dict( - self, section, value, comments, path, shape, top_level=False - ): - dict_section = section.add_new_section('dict-value') - self._start_nested_value(dict_section, '{') - for key, val in value.items(): - path.append(f'.{key}') - item_section = dict_section.add_new_section(key) - item_section.style.new_line() - item_comment = self._get_comment(path, comments) - if item_comment: - item_section.write(item_comment) - item_section.style.new_line() - item_section.write(f"'{key}': ") - - # Shape could be none if there is no output besides ResponseMetadata - item_shape = None - if shape: - if shape.type_name == 'structure': - item_shape = shape.members.get(key) - elif shape.type_name == 'map': - item_shape = shape.value - self._document(item_section, val, comments, path, item_shape) - path.pop() - dict_section_end = dict_section.add_new_section('ending-brace') - self._end_nested_value(dict_section_end, '}') - if not top_level: - dict_section_end.write(',') - - def _document_params(self, section, value, comments, path, shape): - param_section = section.add_new_section('param-values') - self._start_nested_value(param_section, '(') - for key, val in value.items(): - path.append(f'.{key}') - item_section = param_section.add_new_section(key) - item_section.style.new_line() - item_comment = self._get_comment(path, comments) - if item_comment: - item_section.write(item_comment) - item_section.style.new_line() - item_section.write(key + '=') - - # Shape could be none if there are no input parameters - item_shape = None - if shape: - item_shape = shape.members.get(key) - self._document(item_section, val, comments, path, item_shape) - path.pop() - param_section_end = param_section.add_new_section('ending-parenthesis') - self._end_nested_value(param_section_end, ')') - - def _document_list(self, section, value, comments, path, shape): - list_section = section.add_new_section('list-section') - self._start_nested_value(list_section, '[') - item_shape = shape.member - for index, val in enumerate(value): - item_section = list_section.add_new_section(index) - item_section.style.new_line() - path.append(f'[{index}]') - item_comment = self._get_comment(path, comments) - if item_comment: - item_section.write(item_comment) - item_section.style.new_line() - self._document(item_section, val, comments, path, item_shape) - path.pop() - list_section_end = list_section.add_new_section('ending-bracket') - self._end_nested_value(list_section_end, '],') - - def _document_str(self, section, value, path): - # We do the string conversion because this might accept a type that - # we don't specifically address. - safe_value = escape_controls(value) - section.write(f"'{safe_value}',") - - def _document_number(self, section, value, path): - section.write(f"{str(value)},") - - def _document_datetime(self, section, value, path): - datetime_tuple = parse_timestamp(value).timetuple() - datetime_str = str(datetime_tuple[0]) - for i in range(1, len(datetime_tuple)): - datetime_str += ", " + str(datetime_tuple[i]) - section.write(f"datetime({datetime_str}),") - - def _get_comment(self, path, comments): - key = re.sub(r'^\.', '', ''.join(path)) - if comments and key in comments: - return '# ' + comments[key] - else: - return '' - - def _start_nested_value(self, section, start): - section.write(start) - section.style.indent() - section.style.indent() - - def _end_nested_value(self, section, end): - section.style.dedent() - section.style.dedent() - section.style.new_line() - section.write(end) - - -def document_shared_examples( - section, operation_model, example_prefix, shared_examples -): - """Documents the shared examples - - :param section: The section to write to. - - :param operation_model: The model of the operation. - - :param example_prefix: The prefix to use in the method example. - - :param shared_examples: The shared JSON examples from the model. - """ - container_section = section.add_new_section('shared-examples') - container_section.style.new_paragraph() - container_section.style.bold('Examples') - documenter = SharedExampleDocumenter() - for example in shared_examples: - documenter.document_shared_example( - example=example, - section=container_section.add_new_section(example['id']), - prefix=example_prefix, - operation_model=operation_model, - ) diff --git a/venv/Lib/site-packages/botocore/docs/translator.py b/venv/Lib/site-packages/botocore/docs/translator.py deleted file mode 100644 index 0b0a308..0000000 --- a/venv/Lib/site-packages/botocore/docs/translator.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from docutils import nodes -from sphinx.locale import admonitionlabels -from sphinx.writers.html5 import HTML5Translator as SphinxHTML5Translator - - -class BotoHTML5Translator(SphinxHTML5Translator): - """Extension of Sphinx's ``HTML5Translator`` for Botocore documentation.""" - - IGNORE_IMPLICIT_HEADINGS = [ - '[REQUIRED]', - ] - - def visit_admonition(self, node, name=""): - """Uses the h3 tag for admonition titles instead of the p tag.""" - self.body.append( - self.starttag(node, "div", CLASS=("admonition " + name)) - ) - if name: - title = ( - f"

    {admonitionlabels[name]}

    " - ) - self.body.append(title) - - def is_implicit_heading(self, node): - """Determines if a node is an implicit heading. - - An implicit heading is represented by a paragraph node whose only - child is a strong node with text that isnt in `IGNORE_IMPLICIT_HEADINGS`. - """ - return ( - len(node) == 1 - and isinstance(node[0], nodes.strong) - and len(node[0]) == 1 - and isinstance(node[0][0], nodes.Text) - and node[0][0].astext() not in self.IGNORE_IMPLICIT_HEADINGS - ) - - def visit_paragraph(self, node): - """Visit a paragraph HTML element. - - Replaces implicit headings with an h3 tag and defers to default - behavior for normal paragraph elements. - """ - if self.is_implicit_heading(node): - text = node[0][0] - self.body.append(f'

    {text}

    \n') - # Do not visit the current nodes children or call its depart method. - raise nodes.SkipNode - else: - super().visit_paragraph(node) diff --git a/venv/Lib/site-packages/botocore/docs/utils.py b/venv/Lib/site-packages/botocore/docs/utils.py deleted file mode 100644 index 161e260..0000000 --- a/venv/Lib/site-packages/botocore/docs/utils.py +++ /dev/null @@ -1,225 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import re -from collections import namedtuple - - -def py_type_name(type_name): - """Get the Python type name for a given model type. - - >>> py_type_name('list') - 'list' - >>> py_type_name('structure') - 'dict' - - :rtype: string - """ - return { - 'blob': 'bytes', - 'character': 'string', - 'double': 'float', - 'long': 'integer', - 'map': 'dict', - 'structure': 'dict', - 'timestamp': 'datetime', - }.get(type_name, type_name) - - -def py_default(type_name): - """Get the Python default value for a given model type. - - >>> py_default('string') - '\'string\'' - >>> py_default('list') - '[...]' - >>> py_default('unknown') - '...' - - :rtype: string - """ - return { - 'double': '123.0', - 'long': '123', - 'integer': '123', - 'string': "'string'", - 'blob': "b'bytes'", - 'boolean': 'True|False', - 'list': '[...]', - 'map': '{...}', - 'structure': '{...}', - 'timestamp': 'datetime(2015, 1, 1)', - }.get(type_name, '...') - - -def get_official_service_name(service_model): - """Generate the official name of an AWS Service - - :param service_model: The service model representing the service - """ - official_name = service_model.metadata.get('serviceFullName') - short_name = service_model.metadata.get('serviceAbbreviation', '') - if short_name.startswith('Amazon'): - short_name = short_name[7:] - if short_name.startswith('AWS'): - short_name = short_name[4:] - if short_name and short_name.lower() not in official_name.lower(): - official_name += f' ({short_name})' - return official_name - - -_DocumentedShape = namedtuple( - 'DocumentedShape', - [ - 'name', - 'type_name', - 'documentation', - 'metadata', - 'members', - 'required_members', - ], -) - - -class DocumentedShape(_DocumentedShape): - """Use this class to inject new shapes into a model for documentation""" - - def __new__( - cls, - name, - type_name, - documentation, - metadata=None, - members=None, - required_members=None, - ): - if metadata is None: - metadata = [] - if members is None: - members = [] - if required_members is None: - required_members = [] - return super().__new__( - cls, - name, - type_name, - documentation, - metadata, - members, - required_members, - ) - - -class AutoPopulatedParam: - def __init__(self, name, param_description=None): - self.name = name - self.param_description = param_description - if param_description is None: - self.param_description = ( - 'Please note that this parameter is automatically populated ' - 'if it is not provided. Including this parameter is not ' - 'required\n' - ) - - def document_auto_populated_param(self, event_name, section, **kwargs): - """Documents auto populated parameters - - It will remove any required marks for the parameter, remove the - parameter from the example, and add a snippet about the parameter - being autopopulated in the description. - """ - if event_name.startswith('docs.request-params'): - if self.name in section.available_sections: - section = section.get_section(self.name) - if 'is-required' in section.available_sections: - section.delete_section('is-required') - description_section = section.get_section( - 'param-documentation' - ) - description_section.writeln(self.param_description) - elif event_name.startswith('docs.request-example'): - section = section.get_section('structure-value') - if self.name in section.available_sections: - section.delete_section(self.name) - - -class HideParamFromOperations: - """Hides a single parameter from multiple operations. - - This method will remove a parameter from documentation and from - examples. This method is typically used for things that are - automatically populated because a user would be unable to provide - a value (e.g., a checksum of a serialized XML request body).""" - - def __init__(self, service_name, parameter_name, operation_names): - """ - :type service_name: str - :param service_name: Name of the service to modify. - - :type parameter_name: str - :param parameter_name: Name of the parameter to modify. - - :type operation_names: list - :param operation_names: Operation names to modify. - """ - self._parameter_name = parameter_name - self._params_events = set() - self._example_events = set() - # Build up the sets of relevant event names. - param_template = 'docs.request-params.%s.%s.complete-section' - example_template = 'docs.request-example.%s.%s.complete-section' - for name in operation_names: - self._params_events.add(param_template % (service_name, name)) - self._example_events.add(example_template % (service_name, name)) - - def hide_param(self, event_name, section, **kwargs): - if event_name in self._example_events: - # Modify the structure value for example events. - section = section.get_section('structure-value') - elif event_name not in self._params_events: - return - if self._parameter_name in section.available_sections: - section.delete_section(self._parameter_name) - - -class AppendParamDocumentation: - """Appends documentation to a specific parameter""" - - def __init__(self, parameter_name, doc_string): - self._parameter_name = parameter_name - self._doc_string = doc_string - - def append_documentation(self, event_name, section, **kwargs): - if self._parameter_name in section.available_sections: - section = section.get_section(self._parameter_name) - description_section = section.get_section('param-documentation') - description_section.writeln(self._doc_string) - - -_CONTROLS = { - '\n': '\\n', - '\r': '\\r', - '\t': '\\t', - '\b': '\\b', - '\f': '\\f', -} -# Combines all CONTROLS keys into a big or regular expression -_ESCAPE_CONTROLS_RE = re.compile('|'.join(map(re.escape, _CONTROLS))) - - -# Based on the match get the appropriate replacement from CONTROLS -def _CONTROLS_MATCH_HANDLER(match): - return _CONTROLS[match.group(0)] - - -def escape_controls(value): - return _ESCAPE_CONTROLS_RE.sub(_CONTROLS_MATCH_HANDLER, value) diff --git a/venv/Lib/site-packages/botocore/docs/waiter.py b/venv/Lib/site-packages/botocore/docs/waiter.py deleted file mode 100644 index 2918602..0000000 --- a/venv/Lib/site-packages/botocore/docs/waiter.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import os - -from botocore import xform_name -from botocore.compat import OrderedDict -from botocore.docs.bcdoc.restdoc import DocumentStructure -from botocore.docs.method import document_model_driven_method -from botocore.docs.utils import DocumentedShape -from botocore.utils import get_service_module_name - - -class WaiterDocumenter: - def __init__(self, client, service_waiter_model, root_docs_path): - self._client = client - self._client_class_name = self._client.__class__.__name__ - self._service_name = self._client.meta.service_model.service_name - self._service_waiter_model = service_waiter_model - self._root_docs_path = root_docs_path - self._USER_GUIDE_LINK = ( - 'https://boto3.amazonaws.com/' - 'v1/documentation/api/latest/guide/clients.html#waiters' - ) - - def document_waiters(self, section): - """Documents the various waiters for a service. - - :param section: The section to write to. - """ - section.style.h2('Waiters') - self._add_overview(section) - section.style.new_line() - section.writeln('The available waiters are:') - section.style.toctree() - for waiter_name in self._service_waiter_model.waiter_names: - section.style.tocitem(f'{self._service_name}/waiter/{waiter_name}') - # Create a new DocumentStructure for each waiter and add contents. - waiter_doc_structure = DocumentStructure( - waiter_name, target='html' - ) - self._add_single_waiter(waiter_doc_structure, waiter_name) - # Write waiters in individual/nested files. - # Path: /reference/services//waiter/.rst - waiter_dir_path = os.path.join( - self._root_docs_path, self._service_name, 'waiter' - ) - waiter_doc_structure.write_to_file(waiter_dir_path, waiter_name) - - def _add_single_waiter(self, section, waiter_name): - breadcrumb_section = section.add_new_section('breadcrumb') - breadcrumb_section.style.ref( - self._client_class_name, f'../../{self._service_name}' - ) - breadcrumb_section.write(f' / Waiter / {waiter_name}') - section.add_title_section(waiter_name) - waiter_section = section.add_new_section(waiter_name) - waiter_section.style.start_sphinx_py_class( - class_name=f"{self._client_class_name}.Waiter.{waiter_name}" - ) - - # Add example on how to instantiate waiter. - waiter_section.style.start_codeblock() - waiter_section.style.new_line() - waiter_section.write( - f'waiter = client.get_waiter(\'{xform_name(waiter_name)}\')' - ) - waiter_section.style.end_codeblock() - - # Add information on the wait() method - waiter_section.style.new_line() - document_wait_method( - section=waiter_section, - waiter_name=waiter_name, - event_emitter=self._client.meta.events, - service_model=self._client.meta.service_model, - service_waiter_model=self._service_waiter_model, - ) - - def _add_overview(self, section): - section.style.new_line() - section.write( - 'Waiters are available on a client instance ' - 'via the ``get_waiter`` method. For more detailed instructions ' - 'and examples on the usage or waiters, see the ' - 'waiters ' - ) - section.style.external_link( - title='user guide', - link=self._USER_GUIDE_LINK, - ) - section.write('.') - section.style.new_line() - - -def document_wait_method( - section, - waiter_name, - event_emitter, - service_model, - service_waiter_model, - include_signature=True, -): - """Documents a the wait method of a waiter - - :param section: The section to write to - - :param waiter_name: The name of the waiter - - :param event_emitter: The event emitter to use to emit events - - :param service_model: The service model - - :param service_waiter_model: The waiter model associated to the service - - :param include_signature: Whether or not to include the signature. - It is useful for generating docstrings. - """ - waiter_model = service_waiter_model.get_waiter(waiter_name) - operation_model = service_model.operation_model(waiter_model.operation) - - waiter_config_members = OrderedDict() - - waiter_config_members['Delay'] = DocumentedShape( - name='Delay', - type_name='integer', - documentation=( - '

    The amount of time in seconds to wait between ' - f'attempts. Default: {waiter_model.delay}

    ' - ), - ) - - waiter_config_members['MaxAttempts'] = DocumentedShape( - name='MaxAttempts', - type_name='integer', - documentation=( - '

    The maximum number of attempts to be made. ' - f'Default: {waiter_model.max_attempts}

    ' - ), - ) - - botocore_waiter_params = [ - DocumentedShape( - name='WaiterConfig', - type_name='structure', - documentation=( - '

    A dictionary that provides parameters to control ' - 'waiting behavior.

    ' - ), - members=waiter_config_members, - ) - ] - - wait_description = ( - f'Polls :py:meth:`{get_service_module_name(service_model)}.Client.' - f'{xform_name(waiter_model.operation)}` every {waiter_model.delay} ' - 'seconds until a successful state is reached. An error is ' - f'raised after {waiter_model.max_attempts} failed checks.' - ) - - document_model_driven_method( - section, - 'wait', - operation_model, - event_emitter=event_emitter, - method_description=wait_description, - example_prefix='waiter.wait', - include_input=botocore_waiter_params, - document_output=False, - include_signature=include_signature, - ) diff --git a/venv/Lib/site-packages/botocore/endpoint.py b/venv/Lib/site-packages/botocore/endpoint.py deleted file mode 100644 index f393de7..0000000 --- a/venv/Lib/site-packages/botocore/endpoint.py +++ /dev/null @@ -1,449 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import datetime -import logging -import os -import threading -import time -import uuid - -from botocore import parsers -from botocore.awsrequest import create_request_object -from botocore.compat import get_current_datetime -from botocore.exceptions import HTTPClientError -from botocore.history import get_global_history_recorder -from botocore.hooks import first_non_none_response -from botocore.httpchecksum import handle_checksum_body -from botocore.httpsession import URLLib3Session -from botocore.response import StreamingBody -from botocore.utils import ( - get_environ_proxies, - is_valid_endpoint_url, - is_valid_ipv6_endpoint_url, -) - -logger = logging.getLogger(__name__) -history_recorder = get_global_history_recorder() -DEFAULT_TIMEOUT = 60 -MAX_POOL_CONNECTIONS = 10 - - -def convert_to_response_dict(http_response, operation_model): - """Convert an HTTP response object to a request dict. - - This converts the HTTP response object to a dictionary. - - :type http_response: botocore.awsrequest.AWSResponse - :param http_response: The HTTP response from an AWS service request. - - :rtype: dict - :return: A response dictionary which will contain the following keys: - * headers (dict) - * status_code (int) - * body (string or file-like object) - - """ - response_dict = { - 'headers': http_response.headers, - 'status_code': http_response.status_code, - 'context': { - 'operation_name': operation_model.name, - }, - } - if response_dict['status_code'] >= 300: - response_dict['body'] = http_response.content - elif operation_model.has_event_stream_output: - response_dict['body'] = http_response.raw - elif operation_model.has_streaming_output: - length = response_dict['headers'].get('content-length') - response_dict['body'] = StreamingBody(http_response.raw, length) - else: - response_dict['body'] = http_response.content - return response_dict - - -class Endpoint: - """ - Represents an endpoint for a particular service in a specific - region. Only an endpoint can make requests. - - :ivar service: The Service object that describes this endpoints - service. - :ivar host: The fully qualified endpoint hostname. - :ivar session: The session object. - """ - - def __init__( - self, - host, - endpoint_prefix, - event_emitter, - response_parser_factory=None, - http_session=None, - ): - self._endpoint_prefix = endpoint_prefix - self._event_emitter = event_emitter - self.host = host - self._lock = threading.Lock() - if response_parser_factory is None: - response_parser_factory = parsers.ResponseParserFactory() - self._response_parser_factory = response_parser_factory - self.http_session = http_session - if self.http_session is None: - self.http_session = URLLib3Session() - - def __repr__(self): - return f'{self._endpoint_prefix}({self.host})' - - def close(self): - self.http_session.close() - - def make_request(self, operation_model, request_dict): - logger.debug( - "Making request for %s with params: %s", - operation_model, - request_dict, - ) - return self._send_request(request_dict, operation_model) - - def create_request(self, params, operation_model=None): - request = create_request_object(params) - if operation_model: - request.stream_output = any( - [ - operation_model.has_streaming_output, - operation_model.has_event_stream_output, - ] - ) - service_id = operation_model.service_model.service_id.hyphenize() - event_name = f'request-created.{service_id}.{operation_model.name}' - self._event_emitter.emit( - event_name, - request=request, - operation_name=operation_model.name, - ) - prepared_request = self.prepare_request(request) - return prepared_request - - def _encode_headers(self, headers): - # In place encoding of headers to utf-8 if they are unicode. - for key, value in headers.items(): - if isinstance(value, str): - headers[key] = value.encode('utf-8') - - def prepare_request(self, request): - self._encode_headers(request.headers) - return request.prepare() - - def _calculate_ttl( - self, response_received_timestamp, date_header, read_timeout - ): - local_timestamp = get_current_datetime() - date_conversion = datetime.datetime.strptime( - date_header, "%a, %d %b %Y %H:%M:%S %Z" - ) - estimated_skew = date_conversion - response_received_timestamp - ttl = ( - local_timestamp - + datetime.timedelta(seconds=read_timeout) - + estimated_skew - ) - return ttl.strftime('%Y%m%dT%H%M%SZ') - - def _set_ttl(self, retries_context, read_timeout, success_response): - response_date_header = success_response[0].headers.get('Date') - has_streaming_input = retries_context.get('has_streaming_input') - if response_date_header and not has_streaming_input: - try: - response_received_timestamp = get_current_datetime() - retries_context['ttl'] = self._calculate_ttl( - response_received_timestamp, - response_date_header, - read_timeout, - ) - except Exception: - logger.debug( - "Exception received when updating retries context with TTL", - exc_info=True, - ) - - def _update_retries_context(self, context, attempt, success_response=None): - retries_context = context.setdefault('retries', {}) - retries_context['attempt'] = attempt - if 'invocation-id' not in retries_context: - retries_context['invocation-id'] = str(uuid.uuid4()) - - if success_response: - read_timeout = context['client_config'].read_timeout - self._set_ttl(retries_context, read_timeout, success_response) - - def _send_request(self, request_dict, operation_model): - attempts = 1 - context = request_dict['context'] - self._update_retries_context(context, attempts) - request = self.create_request(request_dict, operation_model) - success_response, exception = self._get_response( - request, operation_model, context - ) - while self._needs_retry( - attempts, - operation_model, - request_dict, - success_response, - exception, - ): - attempts += 1 - self._update_retries_context(context, attempts, success_response) - # If there is a stream associated with the request, we need - # to reset it before attempting to send the request again. - # This will ensure that we resend the entire contents of the - # body. - request.reset_stream() - # Create a new request when retried (including a new signature). - request = self.create_request(request_dict, operation_model) - success_response, exception = self._get_response( - request, operation_model, context - ) - if ( - success_response is not None - and 'ResponseMetadata' in success_response[1] - ): - # We want to share num retries, not num attempts. - total_retries = attempts - 1 - success_response[1]['ResponseMetadata']['RetryAttempts'] = ( - total_retries - ) - if exception is not None: - raise exception - else: - return success_response - - def _get_response(self, request, operation_model, context): - # This will return a tuple of (success_response, exception) - # and success_response is itself a tuple of - # (http_response, parsed_dict). - # If an exception occurs then the success_response is None. - # If no exception occurs then exception is None. - success_response, exception = self._do_get_response( - request, operation_model, context - ) - kwargs_to_emit = { - 'response_dict': None, - 'parsed_response': None, - 'context': context, - 'exception': exception, - } - if success_response is not None: - http_response, parsed_response = success_response - kwargs_to_emit['parsed_response'] = parsed_response - kwargs_to_emit['response_dict'] = convert_to_response_dict( - http_response, operation_model - ) - service_id = operation_model.service_model.service_id.hyphenize() - self._event_emitter.emit( - f"response-received.{service_id}.{operation_model.name}", - **kwargs_to_emit, - ) - return success_response, exception - - def _do_get_response(self, request, operation_model, context): - try: - logger.debug("Sending http request: %s", request) - history_recorder.record( - 'HTTP_REQUEST', - { - 'method': request.method, - 'headers': request.headers, - 'streaming': operation_model.has_streaming_input, - 'url': request.url, - 'body': request.body, - }, - ) - service_id = operation_model.service_model.service_id.hyphenize() - event_name = f"before-send.{service_id}.{operation_model.name}" - responses = self._event_emitter.emit(event_name, request=request) - http_response = first_non_none_response(responses) - if http_response is None: - http_response = self._send(request) - except HTTPClientError as e: - return (None, e) - except Exception as e: - logger.debug( - "Exception received when sending HTTP request.", exc_info=True - ) - return (None, e) - # This returns the http_response and the parsed_data. - response_dict = convert_to_response_dict( - http_response, operation_model - ) - handle_checksum_body( - http_response, - response_dict, - context, - operation_model, - ) - - http_response_record_dict = response_dict.copy() - http_response_record_dict['streaming'] = ( - operation_model.has_streaming_output - ) - history_recorder.record('HTTP_RESPONSE', http_response_record_dict) - - protocol = operation_model.service_model.resolved_protocol - customized_response_dict = {} - self._event_emitter.emit( - f"before-parse.{service_id}.{operation_model.name}", - operation_model=operation_model, - response_dict=response_dict, - customized_response_dict=customized_response_dict, - ) - parser = self._response_parser_factory.create_parser(protocol) - parsed_response = parser.parse( - response_dict, operation_model.output_shape - ) - parsed_response.update(customized_response_dict) - # Do a second parsing pass to pick up on any modeled error fields - # NOTE: Ideally, we would push this down into the parser classes but - # they currently have no reference to the operation or service model - # The parsers should probably take the operation model instead of - # output shape but we can't change that now - if http_response.status_code >= 300: - self._add_modeled_error_fields( - response_dict, - parsed_response, - operation_model, - parser, - ) - history_recorder.record('PARSED_RESPONSE', parsed_response) - return (http_response, parsed_response), None - - def _add_modeled_error_fields( - self, - response_dict, - parsed_response, - operation_model, - parser, - ): - error_code = parsed_response.get("Error", {}).get("Code") - if error_code is None: - return - service_model = operation_model.service_model - error_shape = service_model.shape_for_error_code(error_code) - if error_shape is None: - return - modeled_parse = parser.parse(response_dict, error_shape) - # TODO: avoid naming conflicts with ResponseMetadata and Error - parsed_response.update(modeled_parse) - - def _needs_retry( - self, - attempts, - operation_model, - request_dict, - response=None, - caught_exception=None, - ): - service_id = operation_model.service_model.service_id.hyphenize() - event_name = f"needs-retry.{service_id}.{operation_model.name}" - responses = self._event_emitter.emit( - event_name, - response=response, - endpoint=self, - operation=operation_model, - attempts=attempts, - caught_exception=caught_exception, - request_dict=request_dict, - ) - handler_response = first_non_none_response(responses) - if handler_response is None: - return False - else: - # Request needs to be retried, and we need to sleep - # for the specified number of times. - logger.debug( - "Response received to retry, sleeping for %s seconds", - handler_response, - ) - time.sleep(handler_response) - return True - - def _send(self, request): - return self.http_session.send(request) - - -class EndpointCreator: - def __init__(self, event_emitter): - self._event_emitter = event_emitter - - def create_endpoint( - self, - service_model, - region_name, - endpoint_url, - verify=None, - response_parser_factory=None, - timeout=DEFAULT_TIMEOUT, - max_pool_connections=MAX_POOL_CONNECTIONS, - http_session_cls=URLLib3Session, - proxies=None, - socket_options=None, - client_cert=None, - proxies_config=None, - ): - if not is_valid_endpoint_url( - endpoint_url - ) and not is_valid_ipv6_endpoint_url(endpoint_url): - raise ValueError(f"Invalid endpoint: {endpoint_url}") - - if proxies is None: - proxies = self._get_proxies(endpoint_url) - endpoint_prefix = service_model.endpoint_prefix - - logger.debug('Setting %s timeout as %s', endpoint_prefix, timeout) - http_session = http_session_cls( - timeout=timeout, - proxies=proxies, - verify=self._get_verify_value(verify), - max_pool_connections=max_pool_connections, - socket_options=socket_options, - client_cert=client_cert, - proxies_config=proxies_config, - ) - - return Endpoint( - endpoint_url, - endpoint_prefix=endpoint_prefix, - event_emitter=self._event_emitter, - response_parser_factory=response_parser_factory, - http_session=http_session, - ) - - def _get_proxies(self, url): - # We could also support getting proxies from a config file, - # but for now proxy support is taken from the environment. - return get_environ_proxies(url) - - def _get_verify_value(self, verify): - # This is to account for: - # https://github.com/kennethreitz/requests/issues/1436 - # where we need to honor REQUESTS_CA_BUNDLE because we're creating our - # own request objects. - # First, if verify is not None, then the user explicitly specified - # a value so this automatically wins. - if verify is not None: - return verify - # Otherwise use the value from REQUESTS_CA_BUNDLE, or default to - # True if the env var does not exist. - return os.environ.get('REQUESTS_CA_BUNDLE', True) diff --git a/venv/Lib/site-packages/botocore/endpoint_provider.py b/venv/Lib/site-packages/botocore/endpoint_provider.py deleted file mode 100644 index d76f9ac..0000000 --- a/venv/Lib/site-packages/botocore/endpoint_provider.py +++ /dev/null @@ -1,723 +0,0 @@ -# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -""" -NOTE: All classes and functions in this module are considered private and are -subject to abrupt breaking changes. Please do not use them directly. - -To view the raw JSON that the objects in this module represent, please -go to any `endpoint-rule-set.json` file in /botocore/data/// -or you can look at the test files in /tests/unit/data/endpoints/valid-rules/ -""" - -import logging -import re -from enum import Enum -from string import Formatter -from typing import NamedTuple - -from botocore import xform_name -from botocore.compat import IPV4_RE, quote, urlparse -from botocore.exceptions import EndpointResolutionError -from botocore.utils import ( - ArnParser, - InvalidArnException, - is_valid_ipv4_endpoint_url, - is_valid_ipv6_endpoint_url, - lru_cache_weakref, - normalize_url_path, - percent_encode, -) - -logger = logging.getLogger(__name__) - -TEMPLATE_STRING_RE = re.compile(r"\{[a-zA-Z#]+\}") -GET_ATTR_RE = re.compile(r"(\w*)\[(\d+)\]") -VALID_HOST_LABEL_RE = re.compile( - r"^(?!-)[a-zA-Z\d-]{1,63}(?= len(value): - return None - return value[index] - else: - value = value[part] - return value - - def format_partition_output(self, partition): - output = partition["outputs"] - output["name"] = partition["id"] - return output - - def is_partition_match(self, region, partition): - matches_regex = re.match(partition["regionRegex"], region) is not None - return region in partition["regions"] or matches_regex - - def aws_partition(self, value): - """Match a region string to an AWS partition. - - :type value: str - :rtype: dict - """ - partitions = self.partitions_data['partitions'] - - if value is not None: - for partition in partitions: - if self.is_partition_match(value, partition): - return self.format_partition_output(partition) - - # return the default partition if no matches were found - aws_partition = partitions[0] - return self.format_partition_output(aws_partition) - - def aws_parse_arn(self, value): - """Parse and validate string for ARN components. - - :type value: str - :rtype: dict - """ - if value is None or not value.startswith("arn:"): - return None - - try: - arn_dict = ARN_PARSER.parse_arn(value) - except InvalidArnException: - return None - - # partition, resource, and service are required - if not all( - (arn_dict["partition"], arn_dict["service"], arn_dict["resource"]) - ): - return None - - arn_dict["accountId"] = arn_dict.pop("account") - - resource = arn_dict.pop("resource") - arn_dict["resourceId"] = resource.replace(":", "/").split("/") - - return arn_dict - - def is_valid_host_label(self, value, allow_subdomains): - """Evaluates whether a value is a valid host label per - RFC 1123. If allow_subdomains is True, split on `.` and validate - each component separately. - - :type value: str - :type allow_subdomains: bool - :rtype: bool - """ - if value is None or allow_subdomains is False and value.count(".") > 0: - return False - - if allow_subdomains is True: - return all( - self.is_valid_host_label(label, False) - for label in value.split(".") - ) - - return VALID_HOST_LABEL_RE.match(value) is not None - - def string_equals(self, value1, value2): - """Evaluates two string values for equality. - - :type value1: str - :type value2: str - :rtype: bool - """ - if not all(isinstance(val, str) for val in (value1, value2)): - msg = f"Both values must be strings, not {type(value1)} and {type(value2)}." - raise EndpointResolutionError(msg=msg) - return value1 == value2 - - def uri_encode(self, value): - """Perform percent-encoding on an input string. - - :type value: str - :rytpe: str - """ - if value is None: - return None - - return percent_encode(value) - - def parse_url(self, value): - """Parse a URL string into components. - - :type value: str - :rtype: dict - """ - if value is None: - return None - - url_components = urlparse(value) - try: - # url_parse may assign non-integer values to - # `port` and will fail when accessed. - url_components.port - except ValueError: - return None - - scheme = url_components.scheme - query = url_components.query - # URLs with queries are not supported - if scheme not in ("https", "http") or len(query) > 0: - return None - - path = url_components.path - normalized_path = quote(normalize_url_path(path)) - if not normalized_path.endswith("/"): - normalized_path = f"{normalized_path}/" - - return { - "scheme": scheme, - "authority": url_components.netloc, - "path": path, - "normalizedPath": normalized_path, - "isIp": is_valid_ipv4_endpoint_url(value) - or is_valid_ipv6_endpoint_url(value), - } - - def boolean_equals(self, value1, value2): - """Evaluates two boolean values for equality. - - :type value1: bool - :type value2: bool - :rtype: bool - """ - if not all(isinstance(val, bool) for val in (value1, value2)): - msg = f"Both arguments must be bools, not {type(value1)} and {type(value2)}." - raise EndpointResolutionError(msg=msg) - return value1 is value2 - - def is_ascii(self, value): - """Evaluates if a string only contains ASCII characters. - - :type value: str - :rtype: bool - """ - try: - value.encode("ascii") - return True - except UnicodeEncodeError: - return False - - def substring(self, value, start, stop, reverse): - """Computes a substring given the start index and end index. If `reverse` is - True, slice the string from the end instead. - - :type value: str - :type start: int - :type end: int - :type reverse: bool - :rtype: str - """ - if not isinstance(value, str): - msg = f"Input must be a string, not {type(value)}." - raise EndpointResolutionError(msg=msg) - if start >= stop or len(value) < stop or not self.is_ascii(value): - return None - - if reverse is True: - r_start = len(value) - stop - r_stop = len(value) - start - return value[r_start:r_stop] - - return value[start:stop] - - def _not(self, value): - """A function implementation of the logical operator `not`. - - :type value: Any - :rtype: bool - """ - return not value - - def aws_is_virtual_hostable_s3_bucket(self, value, allow_subdomains): - """Evaluates whether a value is a valid bucket name for virtual host - style bucket URLs. To pass, the value must meet the following criteria: - 1. is_valid_host_label(value) is True - 2. length between 3 and 63 characters (inclusive) - 3. does not contain uppercase characters - 4. is not formatted as an IP address - - If allow_subdomains is True, split on `.` and validate - each component separately. - - :type value: str - :type allow_subdomains: bool - :rtype: bool - """ - if ( - value is None - or len(value) < 3 - or value.lower() != value - or IPV4_RE.match(value) is not None - ): - return False - - return self.is_valid_host_label( - value, allow_subdomains=allow_subdomains - ) - - -# maintains backwards compatibility as `Library` was misspelled -# in earlier versions -RuleSetStandardLibary = RuleSetStandardLibrary - - -class BaseRule: - """Base interface for individual endpoint rules.""" - - def __init__(self, conditions, documentation=None): - self.conditions = conditions - self.documentation = documentation - - def evaluate(self, scope_vars, rule_lib): - raise NotImplementedError() - - def evaluate_conditions(self, scope_vars, rule_lib): - """Determine if all conditions in a rule are met. - - :type scope_vars: dict - :type rule_lib: RuleSetStandardLibrary - :rtype: bool - """ - for func_signature in self.conditions: - result = rule_lib.call_function(func_signature, scope_vars) - if result is False or result is None: - return False - return True - - -class RuleSetEndpoint(NamedTuple): - """A resolved endpoint object returned by a rule.""" - - url: str - properties: dict - headers: dict - - -class EndpointRule(BaseRule): - def __init__(self, endpoint, **kwargs): - super().__init__(**kwargs) - self.endpoint = endpoint - - def evaluate(self, scope_vars, rule_lib): - """Determine if conditions are met to provide a valid endpoint. - - :type scope_vars: dict - :rtype: RuleSetEndpoint - """ - if self.evaluate_conditions(scope_vars, rule_lib): - url = rule_lib.resolve_value(self.endpoint["url"], scope_vars) - properties = self.resolve_properties( - self.endpoint.get("properties", {}), - scope_vars, - rule_lib, - ) - headers = self.resolve_headers(scope_vars, rule_lib) - return RuleSetEndpoint( - url=url, properties=properties, headers=headers - ) - - return None - - def resolve_properties(self, properties, scope_vars, rule_lib): - """Traverse `properties` attribute, resolving any template strings. - - :type properties: dict/list/str - :type scope_vars: dict - :type rule_lib: RuleSetStandardLibrary - :rtype: dict - """ - if isinstance(properties, list): - return [ - self.resolve_properties(prop, scope_vars, rule_lib) - for prop in properties - ] - elif isinstance(properties, dict): - return { - key: self.resolve_properties(value, scope_vars, rule_lib) - for key, value in properties.items() - } - elif rule_lib.is_template(properties): - return rule_lib.resolve_template_string(properties, scope_vars) - - return properties - - def resolve_headers(self, scope_vars, rule_lib): - """Iterate through headers attribute resolving all values. - - :type scope_vars: dict - :type rule_lib: RuleSetStandardLibrary - :rtype: dict - """ - resolved_headers = {} - headers = self.endpoint.get("headers", {}) - - for header, values in headers.items(): - resolved_headers[header] = [ - rule_lib.resolve_value(item, scope_vars) for item in values - ] - return resolved_headers - - -class ErrorRule(BaseRule): - def __init__(self, error, **kwargs): - super().__init__(**kwargs) - self.error = error - - def evaluate(self, scope_vars, rule_lib): - """If an error rule's conditions are met, raise an error rule. - - :type scope_vars: dict - :type rule_lib: RuleSetStandardLibrary - :rtype: EndpointResolutionError - """ - if self.evaluate_conditions(scope_vars, rule_lib): - error = rule_lib.resolve_value(self.error, scope_vars) - raise EndpointResolutionError(msg=error) - return None - - -class TreeRule(BaseRule): - """A tree rule is non-terminal meaning it will never be returned to a provider. - Additionally this means it has no attributes that need to be resolved. - """ - - def __init__(self, rules, **kwargs): - super().__init__(**kwargs) - self.rules = [RuleCreator.create(**rule) for rule in rules] - - def evaluate(self, scope_vars, rule_lib): - """If a tree rule's conditions are met, iterate its sub-rules - and return first result found. - - :type scope_vars: dict - :type rule_lib: RuleSetStandardLibrary - :rtype: RuleSetEndpoint/EndpointResolutionError - """ - if self.evaluate_conditions(scope_vars, rule_lib): - for rule in self.rules: - # don't share scope_vars between rules - rule_result = rule.evaluate(scope_vars.copy(), rule_lib) - if rule_result: - return rule_result - return None - - -class RuleCreator: - endpoint = EndpointRule - error = ErrorRule - tree = TreeRule - - @classmethod - def create(cls, **kwargs): - """Create a rule instance from metadata. - - :rtype: TreeRule/EndpointRule/ErrorRule - """ - rule_type = kwargs.pop("type") - try: - rule_class = getattr(cls, rule_type) - except AttributeError: - raise EndpointResolutionError( - msg=f"Unknown rule type: {rule_type}. A rule must " - "be of type tree, endpoint or error." - ) - else: - return rule_class(**kwargs) - - -class ParameterType(Enum): - """Translation from `type` attribute to native Python type.""" - - string = str - boolean = bool - stringarray = tuple - - -class ParameterDefinition: - """The spec of an individual parameter defined in a RuleSet.""" - - def __init__( - self, - name, - parameter_type, - documentation=None, - builtIn=None, - default=None, - required=None, - deprecated=None, - ): - self.name = name - try: - self.parameter_type = getattr( - ParameterType, parameter_type.lower() - ).value - except AttributeError: - raise EndpointResolutionError( - msg=f"Unknown parameter type: {parameter_type}. " - "A parameter must be of type string, boolean, or stringarray." - ) - self.documentation = documentation - self.builtin = builtIn - self.default = default - self.required = required - self.deprecated = deprecated - - def validate_input(self, value): - """Perform base validation on parameter input. - - :type value: Any - :raises: EndpointParametersError - """ - - if not isinstance(value, self.parameter_type): - raise EndpointResolutionError( - msg=f"Value ({self.name}) is the wrong " - f"type. Must be {self.parameter_type}." - ) - if self.deprecated is not None: - depr_str = f"{self.name} has been deprecated." - msg = self.deprecated.get("message") - since = self.deprecated.get("since") - if msg: - depr_str += f"\n{msg}" - if since: - depr_str += f"\nDeprecated since {since}." - logger.info(depr_str) - - return None - - def process_input(self, value): - """Process input against spec, applying default if value is None.""" - if value is None: - if self.default is not None: - return self.default - if self.required: - raise EndpointResolutionError( - msg=f"Cannot find value for required parameter {self.name}" - ) - # in all other cases, the parameter will keep the value None - else: - self.validate_input(value) - return value - - -class RuleSet: - """Collection of rules to derive a routable service endpoint.""" - - def __init__( - self, version, parameters, rules, partitions, documentation=None - ): - self.version = version - self.parameters = self._ingest_parameter_spec(parameters) - self.rules = [RuleCreator.create(**rule) for rule in rules] - self.rule_lib = RuleSetStandardLibrary(partitions) - self.documentation = documentation - - def _ingest_parameter_spec(self, parameters): - return { - name: ParameterDefinition( - name, - spec["type"], - spec.get("documentation"), - spec.get("builtIn"), - spec.get("default"), - spec.get("required"), - spec.get("deprecated"), - ) - for name, spec in parameters.items() - } - - def process_input_parameters(self, input_params): - """Process each input parameter against its spec. - - :type input_params: dict - """ - for name, spec in self.parameters.items(): - value = spec.process_input(input_params.get(name)) - if value is not None: - input_params[name] = value - return None - - def evaluate(self, input_parameters): - """Evaluate input parameters against rules returning first match. - - :type input_parameters: dict - """ - self.process_input_parameters(input_parameters) - for rule in self.rules: - evaluation = rule.evaluate(input_parameters.copy(), self.rule_lib) - if evaluation is not None: - return evaluation - return None - - -class EndpointProvider: - """Derives endpoints from a RuleSet for given input parameters.""" - - def __init__(self, ruleset_data, partition_data): - self.ruleset = RuleSet(**ruleset_data, partitions=partition_data) - - @lru_cache_weakref(maxsize=CACHE_SIZE) - def resolve_endpoint(self, **input_parameters): - """Match input parameters to a rule. - - :type input_parameters: dict - :rtype: RuleSetEndpoint - """ - params_for_error = input_parameters.copy() - endpoint = self.ruleset.evaluate(input_parameters) - if endpoint is None: - param_string = "\n".join( - [f"{key}: {value}" for key, value in params_for_error.items()] - ) - raise EndpointResolutionError( - msg=f"No endpoint found for parameters:\n{param_string}" - ) - return endpoint diff --git a/venv/Lib/site-packages/botocore/errorfactory.py b/venv/Lib/site-packages/botocore/errorfactory.py deleted file mode 100644 index 6084e51..0000000 --- a/venv/Lib/site-packages/botocore/errorfactory.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -from botocore.exceptions import ClientError -from botocore.utils import get_service_module_name - - -class BaseClientExceptions: - ClientError = ClientError - - def __init__(self, code_to_exception): - """Base class for exceptions object on a client - - :type code_to_exception: dict - :param code_to_exception: Mapping of error codes (strings) to exception - class that should be raised when encountering a particular - error code. - """ - self._code_to_exception = code_to_exception - - def from_code(self, error_code): - """Retrieves the error class based on the error code - - This is helpful for identifying the exception class needing to be - caught based on the ClientError.parsed_reponse['Error']['Code'] value - - :type error_code: string - :param error_code: The error code associated to a ClientError exception - - :rtype: ClientError or a subclass of ClientError - :returns: The appropriate modeled exception class for that error - code. If the error code does not match any of the known - modeled exceptions then return a generic ClientError. - """ - return self._code_to_exception.get(error_code, self.ClientError) - - def __getattr__(self, name): - exception_cls_names = [ - exception_cls.__name__ - for exception_cls in self._code_to_exception.values() - ] - raise AttributeError( - rf"{self} object has no attribute {name}. " - rf"Valid exceptions are: {', '.join(exception_cls_names)}" - ) - - -class ClientExceptionsFactory: - def __init__(self): - self._client_exceptions_cache = {} - - def create_client_exceptions(self, service_model): - """Creates a ClientExceptions object for the particular service client - - :type service_model: botocore.model.ServiceModel - :param service_model: The service model for the client - - :rtype: object that subclasses from BaseClientExceptions - :returns: The exceptions object of a client that can be used - to grab the various different modeled exceptions. - """ - service_name = service_model.service_name - if service_name not in self._client_exceptions_cache: - client_exceptions = self._create_client_exceptions(service_model) - self._client_exceptions_cache[service_name] = client_exceptions - return self._client_exceptions_cache[service_name] - - def _create_client_exceptions(self, service_model): - cls_props = {} - code_to_exception = {} - for error_shape in service_model.error_shapes: - exception_name = str(error_shape.name) - exception_cls = type(exception_name, (ClientError,), {}) - cls_props[exception_name] = exception_cls - code = str(error_shape.error_code) - code_to_exception[code] = exception_cls - cls_name = str(get_service_module_name(service_model) + 'Exceptions') - client_exceptions_cls = type( - cls_name, (BaseClientExceptions,), cls_props - ) - return client_exceptions_cls(code_to_exception) diff --git a/venv/Lib/site-packages/botocore/eventstream.py b/venv/Lib/site-packages/botocore/eventstream.py deleted file mode 100644 index 865e65b..0000000 --- a/venv/Lib/site-packages/botocore/eventstream.py +++ /dev/null @@ -1,622 +0,0 @@ -# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Binary Event Stream Decoding""" - -from binascii import crc32 -from struct import unpack - -from botocore.exceptions import EventStreamError - -# byte length of the prelude (total_length + header_length + prelude_crc) -_PRELUDE_LENGTH = 12 -_MAX_HEADERS_LENGTH = 128 * 1024 # 128 Kb -_MAX_PAYLOAD_LENGTH = 24 * 1024 * 1024 # 24 Mb - - -class ParserError(Exception): - """Base binary flow encoding parsing exception.""" - - pass - - -class DuplicateHeader(ParserError): - """Duplicate header found in the event.""" - - def __init__(self, header): - message = f'Duplicate header present: "{header}"' - super().__init__(message) - - -class InvalidHeadersLength(ParserError): - """Headers length is longer than the maximum.""" - - def __init__(self, length): - message = f'Header length of {length} exceeded the maximum of {_MAX_HEADERS_LENGTH}' - super().__init__(message) - - -class InvalidPayloadLength(ParserError): - """Payload length is longer than the maximum.""" - - def __init__(self, length): - message = f'Payload length of {length} exceeded the maximum of {_MAX_PAYLOAD_LENGTH}' - super().__init__(message) - - -class ChecksumMismatch(ParserError): - """Calculated checksum did not match the expected checksum.""" - - def __init__(self, expected, calculated): - message = f'Checksum mismatch: expected 0x{expected:08x}, calculated 0x{calculated:08x}' - super().__init__(message) - - -class NoInitialResponseError(ParserError): - """An event of type initial-response was not received. - - This exception is raised when the event stream produced no events or - the first event in the stream was not of the initial-response type. - """ - - def __init__(self): - message = 'First event was not of the initial-response type' - super().__init__(message) - - -class DecodeUtils: - """Unpacking utility functions used in the decoder. - - All methods on this class take raw bytes and return a tuple containing - the value parsed from the bytes and the number of bytes consumed to parse - that value. - """ - - UINT8_BYTE_FORMAT = '!B' - UINT16_BYTE_FORMAT = '!H' - UINT32_BYTE_FORMAT = '!I' - INT8_BYTE_FORMAT = '!b' - INT16_BYTE_FORMAT = '!h' - INT32_BYTE_FORMAT = '!i' - INT64_BYTE_FORMAT = '!q' - PRELUDE_BYTE_FORMAT = '!III' - - # uint byte size to unpack format - UINT_BYTE_FORMAT = { - 1: UINT8_BYTE_FORMAT, - 2: UINT16_BYTE_FORMAT, - 4: UINT32_BYTE_FORMAT, - } - - @staticmethod - def unpack_true(data): - """This method consumes none of the provided bytes and returns True. - - :type data: bytes - :param data: The bytes to parse from. This is ignored in this method. - - :rtype: tuple - :rtype: (bool, int) - :returns: The tuple (True, 0) - """ - return True, 0 - - @staticmethod - def unpack_false(data): - """This method consumes none of the provided bytes and returns False. - - :type data: bytes - :param data: The bytes to parse from. This is ignored in this method. - - :rtype: tuple - :rtype: (bool, int) - :returns: The tuple (False, 0) - """ - return False, 0 - - @staticmethod - def unpack_uint8(data): - """Parse an unsigned 8-bit integer from the bytes. - - :type data: bytes - :param data: The bytes to parse from. - - :rtype: (int, int) - :returns: A tuple containing the (parsed integer value, bytes consumed) - """ - value = unpack(DecodeUtils.UINT8_BYTE_FORMAT, data[:1])[0] - return value, 1 - - @staticmethod - def unpack_uint32(data): - """Parse an unsigned 32-bit integer from the bytes. - - :type data: bytes - :param data: The bytes to parse from. - - :rtype: (int, int) - :returns: A tuple containing the (parsed integer value, bytes consumed) - """ - value = unpack(DecodeUtils.UINT32_BYTE_FORMAT, data[:4])[0] - return value, 4 - - @staticmethod - def unpack_int8(data): - """Parse a signed 8-bit integer from the bytes. - - :type data: bytes - :param data: The bytes to parse from. - - :rtype: (int, int) - :returns: A tuple containing the (parsed integer value, bytes consumed) - """ - value = unpack(DecodeUtils.INT8_BYTE_FORMAT, data[:1])[0] - return value, 1 - - @staticmethod - def unpack_int16(data): - """Parse a signed 16-bit integer from the bytes. - - :type data: bytes - :param data: The bytes to parse from. - - :rtype: tuple - :rtype: (int, int) - :returns: A tuple containing the (parsed integer value, bytes consumed) - """ - value = unpack(DecodeUtils.INT16_BYTE_FORMAT, data[:2])[0] - return value, 2 - - @staticmethod - def unpack_int32(data): - """Parse a signed 32-bit integer from the bytes. - - :type data: bytes - :param data: The bytes to parse from. - - :rtype: tuple - :rtype: (int, int) - :returns: A tuple containing the (parsed integer value, bytes consumed) - """ - value = unpack(DecodeUtils.INT32_BYTE_FORMAT, data[:4])[0] - return value, 4 - - @staticmethod - def unpack_int64(data): - """Parse a signed 64-bit integer from the bytes. - - :type data: bytes - :param data: The bytes to parse from. - - :rtype: tuple - :rtype: (int, int) - :returns: A tuple containing the (parsed integer value, bytes consumed) - """ - value = unpack(DecodeUtils.INT64_BYTE_FORMAT, data[:8])[0] - return value, 8 - - @staticmethod - def unpack_byte_array(data, length_byte_size=2): - """Parse a variable length byte array from the bytes. - - The bytes are expected to be in the following format: - [ length ][0 ... length bytes] - where length is an unsigned integer represented in the smallest number - of bytes to hold the maximum length of the array. - - :type data: bytes - :param data: The bytes to parse from. - - :type length_byte_size: int - :param length_byte_size: The byte size of the preceding integer that - represents the length of the array. Supported values are 1, 2, and 4. - - :rtype: (bytes, int) - :returns: A tuple containing the (parsed byte array, bytes consumed). - """ - uint_byte_format = DecodeUtils.UINT_BYTE_FORMAT[length_byte_size] - length = unpack(uint_byte_format, data[:length_byte_size])[0] - bytes_end = length + length_byte_size - array_bytes = data[length_byte_size:bytes_end] - return array_bytes, bytes_end - - @staticmethod - def unpack_utf8_string(data, length_byte_size=2): - """Parse a variable length utf-8 string from the bytes. - - The bytes are expected to be in the following format: - [ length ][0 ... length bytes] - where length is an unsigned integer represented in the smallest number - of bytes to hold the maximum length of the array and the following - bytes are a valid utf-8 string. - - :type data: bytes - :param bytes: The bytes to parse from. - - :type length_byte_size: int - :param length_byte_size: The byte size of the preceding integer that - represents the length of the array. Supported values are 1, 2, and 4. - - :rtype: (str, int) - :returns: A tuple containing the (utf-8 string, bytes consumed). - """ - array_bytes, consumed = DecodeUtils.unpack_byte_array( - data, length_byte_size - ) - return array_bytes.decode('utf-8'), consumed - - @staticmethod - def unpack_uuid(data): - """Parse a 16-byte uuid from the bytes. - - :type data: bytes - :param data: The bytes to parse from. - - :rtype: (bytes, int) - :returns: A tuple containing the (uuid bytes, bytes consumed). - """ - return data[:16], 16 - - @staticmethod - def unpack_prelude(data): - """Parse the prelude for an event stream message from the bytes. - - The prelude for an event stream message has the following format: - [total_length][header_length][prelude_crc] - where each field is an unsigned 32-bit integer. - - :rtype: ((int, int, int), int) - :returns: A tuple of ((total_length, headers_length, prelude_crc), - consumed) - """ - return (unpack(DecodeUtils.PRELUDE_BYTE_FORMAT, data), _PRELUDE_LENGTH) - - -def _validate_checksum(data, checksum, crc=0): - # To generate the same numeric value across all Python versions and - # platforms use crc32(data) & 0xffffffff. - computed_checksum = crc32(data, crc) & 0xFFFFFFFF - if checksum != computed_checksum: - raise ChecksumMismatch(checksum, computed_checksum) - - -class MessagePrelude: - """Represents the prelude of an event stream message.""" - - def __init__(self, total_length, headers_length, crc): - self.total_length = total_length - self.headers_length = headers_length - self.crc = crc - - @property - def payload_length(self): - """Calculates the total payload length. - - The extra minus 4 bytes is for the message CRC. - - :rtype: int - :returns: The total payload length. - """ - return self.total_length - self.headers_length - _PRELUDE_LENGTH - 4 - - @property - def payload_end(self): - """Calculates the byte offset for the end of the message payload. - - The extra minus 4 bytes is for the message CRC. - - :rtype: int - :returns: The byte offset from the beginning of the event stream - message to the end of the payload. - """ - return self.total_length - 4 - - @property - def headers_end(self): - """Calculates the byte offset for the end of the message headers. - - :rtype: int - :returns: The byte offset from the beginning of the event stream - message to the end of the headers. - """ - return _PRELUDE_LENGTH + self.headers_length - - -class EventStreamMessage: - """Represents an event stream message.""" - - def __init__(self, prelude, headers, payload, crc): - self.prelude = prelude - self.headers = headers - self.payload = payload - self.crc = crc - - def to_response_dict(self, status_code=200): - message_type = self.headers.get(':message-type') - if message_type == 'error' or message_type == 'exception': - status_code = 400 - return { - 'status_code': status_code, - 'headers': self.headers, - 'body': self.payload, - } - - -class EventStreamHeaderParser: - """Parses the event headers from an event stream message. - - Expects all of the header data upfront and creates a dictionary of headers - to return. This object can be reused multiple times to parse the headers - from multiple event stream messages. - """ - - # Maps header type to appropriate unpacking function - # These unpacking functions return the value and the amount unpacked - _HEADER_TYPE_MAP = { - # boolean_true - 0: DecodeUtils.unpack_true, - # boolean_false - 1: DecodeUtils.unpack_false, - # byte - 2: DecodeUtils.unpack_int8, - # short - 3: DecodeUtils.unpack_int16, - # integer - 4: DecodeUtils.unpack_int32, - # long - 5: DecodeUtils.unpack_int64, - # byte_array - 6: DecodeUtils.unpack_byte_array, - # string - 7: DecodeUtils.unpack_utf8_string, - # timestamp - 8: DecodeUtils.unpack_int64, - # uuid - 9: DecodeUtils.unpack_uuid, - } - - def __init__(self): - self._data = None - - def parse(self, data): - """Parses the event stream headers from an event stream message. - - :type data: bytes - :param data: The bytes that correspond to the headers section of an - event stream message. - - :rtype: dict - :returns: A dictionary of header key, value pairs. - """ - self._data = data - return self._parse_headers() - - def _parse_headers(self): - headers = {} - while self._data: - name, value = self._parse_header() - if name in headers: - raise DuplicateHeader(name) - headers[name] = value - return headers - - def _parse_header(self): - name = self._parse_name() - value = self._parse_value() - return name, value - - def _parse_name(self): - name, consumed = DecodeUtils.unpack_utf8_string(self._data, 1) - self._advance_data(consumed) - return name - - def _parse_type(self): - type, consumed = DecodeUtils.unpack_uint8(self._data) - self._advance_data(consumed) - return type - - def _parse_value(self): - header_type = self._parse_type() - value_unpacker = self._HEADER_TYPE_MAP[header_type] - value, consumed = value_unpacker(self._data) - self._advance_data(consumed) - return value - - def _advance_data(self, consumed): - self._data = self._data[consumed:] - - -class EventStreamBuffer: - """Streaming based event stream buffer - - A buffer class that wraps bytes from an event stream providing parsed - messages as they become available via an iterable interface. - """ - - def __init__(self): - self._data = b'' - self._prelude = None - self._header_parser = EventStreamHeaderParser() - - def add_data(self, data): - """Add data to the buffer. - - :type data: bytes - :param data: The bytes to add to the buffer to be used when parsing - """ - self._data += data - - def _validate_prelude(self, prelude): - if prelude.headers_length > _MAX_HEADERS_LENGTH: - raise InvalidHeadersLength(prelude.headers_length) - - if prelude.payload_length > _MAX_PAYLOAD_LENGTH: - raise InvalidPayloadLength(prelude.payload_length) - - def _parse_prelude(self): - prelude_bytes = self._data[:_PRELUDE_LENGTH] - raw_prelude, _ = DecodeUtils.unpack_prelude(prelude_bytes) - prelude = MessagePrelude(*raw_prelude) - # The minus 4 removes the prelude crc from the bytes to be checked - _validate_checksum(prelude_bytes[: _PRELUDE_LENGTH - 4], prelude.crc) - self._validate_prelude(prelude) - return prelude - - def _parse_headers(self): - header_bytes = self._data[_PRELUDE_LENGTH : self._prelude.headers_end] - return self._header_parser.parse(header_bytes) - - def _parse_payload(self): - prelude = self._prelude - payload_bytes = self._data[prelude.headers_end : prelude.payload_end] - return payload_bytes - - def _parse_message_crc(self): - prelude = self._prelude - crc_bytes = self._data[prelude.payload_end : prelude.total_length] - message_crc, _ = DecodeUtils.unpack_uint32(crc_bytes) - return message_crc - - def _parse_message_bytes(self): - # The minus 4 includes the prelude crc to the bytes to be checked - message_bytes = self._data[ - _PRELUDE_LENGTH - 4 : self._prelude.payload_end - ] - return message_bytes - - def _validate_message_crc(self): - message_crc = self._parse_message_crc() - message_bytes = self._parse_message_bytes() - _validate_checksum(message_bytes, message_crc, crc=self._prelude.crc) - return message_crc - - def _parse_message(self): - crc = self._validate_message_crc() - headers = self._parse_headers() - payload = self._parse_payload() - message = EventStreamMessage(self._prelude, headers, payload, crc) - self._prepare_for_next_message() - return message - - def _prepare_for_next_message(self): - # Advance the data and reset the current prelude - self._data = self._data[self._prelude.total_length :] - self._prelude = None - - def next(self): - """Provides the next available message parsed from the stream - - :rtype: EventStreamMessage - :returns: The next event stream message - """ - if len(self._data) < _PRELUDE_LENGTH: - raise StopIteration() - - if self._prelude is None: - self._prelude = self._parse_prelude() - - if len(self._data) < self._prelude.total_length: - raise StopIteration() - - return self._parse_message() - - def __next__(self): - return self.next() - - def __iter__(self): - return self - - -class EventStream: - """Wrapper class for an event stream body. - - This wraps the underlying streaming body, parsing it for individual events - and yielding them as they come available through the iterator interface. - - The following example uses the S3 select API to get structured data out of - an object stored in S3 using an event stream. - - **Example:** - :: - from botocore.session import Session - - s3 = Session().create_client('s3') - response = s3.select_object_content( - Bucket='bucketname', - Key='keyname', - ExpressionType='SQL', - RequestProgress={'Enabled': True}, - Expression="SELECT * FROM S3Object s", - InputSerialization={'CSV': {}}, - OutputSerialization={'CSV': {}}, - ) - # This is the event stream in the response - event_stream = response['Payload'] - end_event_received = False - with open('output', 'wb') as f: - # Iterate over events in the event stream as they come - for event in event_stream: - # If we received a records event, write the data to a file - if 'Records' in event: - data = event['Records']['Payload'] - f.write(data) - # If we received a progress event, print the details - elif 'Progress' in event: - print(event['Progress']['Details']) - # End event indicates that the request finished successfully - elif 'End' in event: - print('Result is complete') - end_event_received = True - if not end_event_received: - raise Exception("End event not received, request incomplete.") - """ - - def __init__(self, raw_stream, output_shape, parser, operation_name): - self._raw_stream = raw_stream - self._output_shape = output_shape - self._operation_name = operation_name - self._parser = parser - self._event_generator = self._create_raw_event_generator() - - def __iter__(self): - for event in self._event_generator: - parsed_event = self._parse_event(event) - if parsed_event: - yield parsed_event - - def _create_raw_event_generator(self): - event_stream_buffer = EventStreamBuffer() - for chunk in self._raw_stream.stream(): - event_stream_buffer.add_data(chunk) - yield from event_stream_buffer - - def _parse_event(self, event): - response_dict = event.to_response_dict() - parsed_response = self._parser.parse(response_dict, self._output_shape) - if response_dict['status_code'] == 200: - return parsed_response - else: - raise EventStreamError(parsed_response, self._operation_name) - - def get_initial_response(self): - try: - initial_event = next(self._event_generator) - event_type = initial_event.headers.get(':event-type') - if event_type == 'initial-response': - return initial_event - except StopIteration: - pass - raise NoInitialResponseError() - - def close(self): - """Closes the underlying streaming body.""" - self._raw_stream.close() diff --git a/venv/Lib/site-packages/botocore/exceptions.py b/venv/Lib/site-packages/botocore/exceptions.py deleted file mode 100644 index 294bbf4..0000000 --- a/venv/Lib/site-packages/botocore/exceptions.py +++ /dev/null @@ -1,857 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -from botocore.vendored import requests -from botocore.vendored.requests.packages import urllib3 - - -def _exception_from_packed_args(exception_cls, args=None, kwargs=None): - # This is helpful for reducing Exceptions that only accept kwargs as - # only positional arguments can be provided for __reduce__ - # Ideally, this would also be a class method on the BotoCoreError - # but instance methods cannot be pickled. - if args is None: - args = () - if kwargs is None: - kwargs = {} - return exception_cls(*args, **kwargs) - - -class BotoCoreError(Exception): - """ - The base exception class for BotoCore exceptions. - - :ivar msg: The descriptive message associated with the error. - """ - - fmt = 'An unspecified error occurred' - - def __init__(self, **kwargs): - msg = self.fmt.format(**kwargs) - Exception.__init__(self, msg) - self.kwargs = kwargs - - def __reduce__(self): - return _exception_from_packed_args, (self.__class__, None, self.kwargs) - - -class DataNotFoundError(BotoCoreError): - """ - The data associated with a particular path could not be loaded. - - :ivar data_path: The data path that the user attempted to load. - """ - - fmt = 'Unable to load data for: {data_path}' - - -class UnknownServiceError(DataNotFoundError): - """Raised when trying to load data for an unknown service. - - :ivar service_name: The name of the unknown service. - - """ - - fmt = ( - "Unknown service: '{service_name}'. Valid service names are: " - "{known_service_names}" - ) - - -class UnknownRegionError(BotoCoreError): - """Raised when trying to load data for an unknown region. - - :ivar region_name: The name of the unknown region. - - """ - - fmt = "Unknown region: '{region_name}'. {error_msg}" - - -class ApiVersionNotFoundError(BotoCoreError): - """ - The data associated with either the API version or a compatible one - could not be loaded. - - :ivar data_path: The data path that the user attempted to load. - :ivar api_version: The API version that the user attempted to load. - """ - - fmt = 'Unable to load data {data_path} for: {api_version}' - - -class HTTPClientError(BotoCoreError): - fmt = 'An HTTP Client raised an unhandled exception: {error}' - - def __init__(self, request=None, response=None, **kwargs): - self.request = request - self.response = response - super().__init__(**kwargs) - - def __reduce__(self): - return _exception_from_packed_args, ( - self.__class__, - (self.request, self.response), - self.kwargs, - ) - - -class ConnectionError(BotoCoreError): - fmt = 'An HTTP Client failed to establish a connection: {error}' - - -class InvalidIMDSEndpointError(BotoCoreError): - fmt = 'Invalid endpoint EC2 Instance Metadata endpoint: {endpoint}' - - -class InvalidIMDSEndpointModeError(BotoCoreError): - fmt = ( - 'Invalid EC2 Instance Metadata endpoint mode: {mode}' - ' Valid endpoint modes (case-insensitive): {valid_modes}.' - ) - - -class EndpointConnectionError(ConnectionError): - fmt = 'Could not connect to the endpoint URL: "{endpoint_url}"' - - -class SSLError(ConnectionError, requests.exceptions.SSLError): - fmt = 'SSL validation failed for {endpoint_url} {error}' - - -class ConnectionClosedError(HTTPClientError): - fmt = ( - 'Connection was closed before we received a valid response ' - 'from endpoint URL: "{endpoint_url}".' - ) - - -class ReadTimeoutError( - HTTPClientError, - requests.exceptions.ReadTimeout, - urllib3.exceptions.ReadTimeoutError, -): - fmt = 'Read timeout on endpoint URL: "{endpoint_url}"' - - -class ConnectTimeoutError(ConnectionError, requests.exceptions.ConnectTimeout): - fmt = 'Connect timeout on endpoint URL: "{endpoint_url}"' - - -class ProxyConnectionError(ConnectionError, requests.exceptions.ProxyError): - fmt = 'Failed to connect to proxy URL: "{proxy_url}"' - - -class ResponseStreamingError(HTTPClientError): - fmt = 'An error occurred while reading from response stream: {error}' - - -class NoCredentialsError(BotoCoreError): - """ - No credentials could be found. - """ - - fmt = 'Unable to locate credentials' - - -class NoAuthTokenError(BotoCoreError): - """ - No authorization token could be found. - """ - - fmt = 'Unable to locate authorization token' - - -class TokenRetrievalError(BotoCoreError): - """ - Error attempting to retrieve a token from a remote source. - - :ivar provider: The name of the token provider. - :ivar error_msg: The msg explaining why the token could not be retrieved. - - """ - - fmt = 'Error when retrieving token from {provider}: {error_msg}' - - -class PartialCredentialsError(BotoCoreError): - """ - Only partial credentials were found. - - :ivar cred_var: The missing credential variable name. - - """ - - fmt = 'Partial credentials found in {provider}, missing: {cred_var}' - - -class CredentialRetrievalError(BotoCoreError): - """ - Error attempting to retrieve credentials from a remote source. - - :ivar provider: The name of the credential provider. - :ivar error_msg: The msg explaining why credentials could not be - retrieved. - - """ - - fmt = 'Error when retrieving credentials from {provider}: {error_msg}' - - -class UnknownSignatureVersionError(BotoCoreError): - """ - Requested Signature Version is not known. - - :ivar signature_version: The name of the requested signature version. - """ - - fmt = 'Unknown Signature Version: {signature_version}.' - - -class ServiceNotInRegionError(BotoCoreError): - """ - The service is not available in requested region. - - :ivar service_name: The name of the service. - :ivar region_name: The name of the region. - """ - - fmt = 'Service {service_name} not available in region {region_name}' - - -class BaseEndpointResolverError(BotoCoreError): - """Base error for endpoint resolving errors. - - Should never be raised directly, but clients can catch - this exception if they want to generically handle any errors - during the endpoint resolution process. - - """ - - -class NoRegionError(BaseEndpointResolverError): - """No region was specified.""" - - fmt = 'You must specify a region.' - - -class EndpointVariantError(BaseEndpointResolverError): - """ - Could not construct modeled endpoint variant. - - :ivar error_msg: The message explaining why the modeled endpoint variant - is unable to be constructed. - - """ - - fmt = ( - 'Unable to construct a modeled endpoint with the following ' - 'variant(s) {tags}: ' - ) - - -class UnknownEndpointError(BaseEndpointResolverError, ValueError): - """ - Could not construct an endpoint. - - :ivar service_name: The name of the service. - :ivar region_name: The name of the region. - """ - - fmt = ( - 'Unable to construct an endpoint for ' - '{service_name} in region {region_name}' - ) - - -class UnknownFIPSEndpointError(BaseEndpointResolverError): - """ - Could not construct a FIPS endpoint. - - :ivar service_name: The name of the service. - :ivar region_name: The name of the region. - """ - - fmt = ( - 'The provided FIPS pseudo-region "{region_name}" is not known for ' - 'the service "{service_name}". A FIPS compliant endpoint cannot be ' - 'constructed.' - ) - - -class ProfileNotFound(BotoCoreError): - """ - The specified configuration profile was not found in the - configuration file. - - :ivar profile: The name of the profile the user attempted to load. - """ - - fmt = 'The config profile ({profile}) could not be found' - - -class ConfigParseError(BotoCoreError): - """ - The configuration file could not be parsed. - - :ivar path: The path to the configuration file. - """ - - fmt = 'Unable to parse config file: {path}' - - -class ConfigNotFound(BotoCoreError): - """ - The specified configuration file could not be found. - - :ivar path: The path to the configuration file. - """ - - fmt = 'The specified config file ({path}) could not be found.' - - -class MissingParametersError(BotoCoreError): - """ - One or more required parameters were not supplied. - - :ivar object: The object that has missing parameters. - This can be an operation or a parameter (in the - case of inner params). The str() of this object - will be used so it doesn't need to implement anything - other than str(). - :ivar missing: The names of the missing parameters. - """ - - fmt = ( - 'The following required parameters are missing for ' - '{object_name}: {missing}' - ) - - -class ValidationError(BotoCoreError): - """ - An exception occurred validating parameters. - - Subclasses must accept a ``value`` and ``param`` - argument in their ``__init__``. - - :ivar value: The value that was being validated. - :ivar param: The parameter that failed validation. - :ivar type_name: The name of the underlying type. - """ - - fmt = "Invalid value ('{value}') for param {param} of type {type_name} " - - -class ParamValidationError(BotoCoreError): - fmt = 'Parameter validation failed:\n{report}' - - -# These exceptions subclass from ValidationError so that code -# can just 'except ValidationError' to catch any possibly validation -# error. -class UnknownKeyError(ValidationError): - """ - Unknown key in a struct parameter. - - :ivar value: The value that was being checked. - :ivar param: The name of the parameter. - :ivar choices: The valid choices the value can be. - """ - - fmt = ( - "Unknown key '{value}' for param '{param}'. Must be one of: {choices}" - ) - - -class RangeError(ValidationError): - """ - A parameter value was out of the valid range. - - :ivar value: The value that was being checked. - :ivar param: The parameter that failed validation. - :ivar min_value: The specified minimum value. - :ivar max_value: The specified maximum value. - """ - - fmt = ( - 'Value out of range for param {param}: ' - '{min_value} <= {value} <= {max_value}' - ) - - -class UnknownParameterError(ValidationError): - """ - Unknown top level parameter. - - :ivar name: The name of the unknown parameter. - :ivar operation: The name of the operation. - :ivar choices: The valid choices the parameter name can be. - """ - - fmt = ( - "Unknown parameter '{name}' for operation {operation}. Must be one " - "of: {choices}" - ) - - -class InvalidRegionError(ValidationError, ValueError): - """ - Invalid region_name provided to client or resource. - - :ivar region_name: region_name that was being validated. - """ - - fmt = "Provided region_name '{region_name}' doesn't match a supported format." - - -class AliasConflictParameterError(ValidationError): - """ - Error when an alias is provided for a parameter as well as the original. - - :ivar original: The name of the original parameter. - :ivar alias: The name of the alias - :ivar operation: The name of the operation. - """ - - fmt = ( - "Parameter '{original}' and its alias '{alias}' were provided " - "for operation {operation}. Only one of them may be used." - ) - - -class UnknownServiceStyle(BotoCoreError): - """ - Unknown style of service invocation. - - :ivar service_style: The style requested. - """ - - fmt = 'The service style ({service_style}) is not understood.' - - -class PaginationError(BotoCoreError): - fmt = 'Error during pagination: {message}' - - -class OperationNotPageableError(BotoCoreError): - fmt = 'Operation cannot be paginated: {operation_name}' - - -class ChecksumError(BotoCoreError): - """The expected checksum did not match the calculated checksum.""" - - fmt = ( - 'Checksum {checksum_type} failed, expected checksum ' - '{expected_checksum} did not match calculated checksum ' - '{actual_checksum}.' - ) - - -class UnseekableStreamError(BotoCoreError): - """Need to seek a stream, but stream does not support seeking.""" - - fmt = ( - 'Need to rewind the stream {stream_object}, but stream ' - 'is not seekable.' - ) - - -class WaiterError(BotoCoreError): - """Waiter failed to reach desired state.""" - - fmt = 'Waiter {name} failed: {reason}' - - def __init__(self, name, reason, last_response): - super().__init__(name=name, reason=reason) - self.last_response = last_response - - -class IncompleteReadError(BotoCoreError): - """HTTP response did not return expected number of bytes.""" - - fmt = '{actual_bytes} read, but total bytes expected is {expected_bytes}.' - - -class InvalidExpressionError(BotoCoreError): - """Expression is either invalid or too complex.""" - - fmt = 'Invalid expression {expression}: Only dotted lookups are supported.' - - -class UnknownCredentialError(BotoCoreError): - """Tried to insert before/after an unregistered credential type.""" - - fmt = 'Credential named {name} not found.' - - -class WaiterConfigError(BotoCoreError): - """Error when processing waiter configuration.""" - - fmt = 'Error processing waiter config: {error_msg}' - - -class UnknownClientMethodError(BotoCoreError): - """Error when trying to access a method on a client that does not exist.""" - - fmt = 'Client does not have method: {method_name}' - - -class UnsupportedSignatureVersionError(BotoCoreError): - """Error when trying to use an unsupported Signature Version.""" - - fmt = 'Signature version(s) are not supported: {signature_version}' - - -class ClientError(Exception): - MSG_TEMPLATE = ( - 'An error occurred ({error_code}) when calling the {operation_name} ' - 'operation{retry_info}: {error_message}' - ) - - def __init__(self, error_response, operation_name): - retry_info = self._get_retry_info(error_response) - error = error_response.get('Error', {}) - msg = self.MSG_TEMPLATE.format( - error_code=error.get('Code', 'Unknown'), - error_message=error.get('Message', 'Unknown'), - operation_name=operation_name, - retry_info=retry_info, - ) - super().__init__(msg) - self.response = error_response - self.operation_name = operation_name - - def _get_retry_info(self, response): - retry_info = '' - if 'ResponseMetadata' in response: - metadata = response['ResponseMetadata'] - if metadata.get('MaxAttemptsReached', False): - if 'RetryAttempts' in metadata: - retry_info = ( - f" (reached max retries: {metadata['RetryAttempts']})" - ) - return retry_info - - def __reduce__(self): - # Subclasses of ClientError's are dynamically generated and - # cannot be pickled unless they are attributes of a - # module. So at the very least return a ClientError back. - return ClientError, (self.response, self.operation_name) - - -class EventStreamError(ClientError): - pass - - -class UnsupportedTLSVersionWarning(Warning): - """Warn when an openssl version that uses TLS 1.2 is required""" - - pass - - -class ImminentRemovalWarning(Warning): - pass - - -class InvalidDNSNameError(BotoCoreError): - """Error when virtual host path is forced on a non-DNS compatible bucket""" - - fmt = ( - 'Bucket named {bucket_name} is not DNS compatible. Virtual ' - 'hosted-style addressing cannot be used. The addressing style ' - 'can be configured by removing the addressing_style value ' - 'or setting that value to \'path\' or \'auto\' in the AWS Config ' - 'file or in the botocore.client.Config object.' - ) - - -class InvalidS3AddressingStyleError(BotoCoreError): - """Error when an invalid path style is specified""" - - fmt = ( - 'S3 addressing style {s3_addressing_style} is invalid. Valid options ' - 'are: \'auto\', \'virtual\', and \'path\'' - ) - - -class UnsupportedS3ArnError(BotoCoreError): - """Error when S3 ARN provided to Bucket parameter is not supported""" - - fmt = ( - 'S3 ARN {arn} provided to "Bucket" parameter is invalid. Only ' - 'ARNs for S3 access-points are supported.' - ) - - -class UnsupportedS3ControlArnError(BotoCoreError): - """Error when S3 ARN provided to S3 control parameter is not supported""" - - fmt = 'S3 ARN "{arn}" provided is invalid for this operation. {msg}' - - -class InvalidHostLabelError(BotoCoreError): - """Error when an invalid host label would be bound to an endpoint""" - - fmt = ( - 'Invalid host label to be bound to the hostname of the endpoint: ' - '"{label}".' - ) - - -class UnsupportedOutpostResourceError(BotoCoreError): - """Error when S3 Outpost ARN provided to Bucket parameter is incomplete""" - - fmt = ( - 'S3 Outpost ARN resource "{resource_name}" provided to "Bucket" ' - 'parameter is invalid. Only ARNs for S3 Outpost arns with an ' - 'access-point sub-resource are supported.' - ) - - -class UnsupportedS3ConfigurationError(BotoCoreError): - """Error when an unsupported configuration is used with access-points""" - - fmt = 'Unsupported configuration when using S3: {msg}' - - -class UnsupportedS3AccesspointConfigurationError(BotoCoreError): - """Error when an unsupported configuration is used with access-points""" - - fmt = 'Unsupported configuration when using S3 access-points: {msg}' - - -class InvalidEndpointDiscoveryConfigurationError(BotoCoreError): - """Error when invalid value supplied for endpoint_discovery_enabled""" - - fmt = ( - 'Unsupported configuration value for endpoint_discovery_enabled. ' - 'Expected one of ("true", "false", "auto") but got {config_value}.' - ) - - -class UnsupportedS3ControlConfigurationError(BotoCoreError): - """Error when an unsupported configuration is used with S3 Control""" - - fmt = 'Unsupported configuration when using S3 Control: {msg}' - - -class InvalidRetryConfigurationError(BotoCoreError): - """Error when invalid retry configuration is specified""" - - fmt = ( - 'Cannot provide retry configuration for "{retry_config_option}". ' - 'Valid retry configuration options are: {valid_options}' - ) - - -class InvalidMaxRetryAttemptsError(InvalidRetryConfigurationError): - """Error when invalid retry configuration is specified""" - - fmt = ( - 'Value provided to "max_attempts": {provided_max_attempts} must ' - 'be an integer greater than or equal to {min_value}.' - ) - - -class InvalidRetryModeError(InvalidRetryConfigurationError): - """Error when invalid retry mode configuration is specified""" - - fmt = ( - 'Invalid value provided to "mode": "{provided_retry_mode}" must ' - 'be one of: {valid_modes}' - ) - - -class InvalidS3UsEast1RegionalEndpointConfigError(BotoCoreError): - """Error for invalid s3 us-east-1 regional endpoints configuration""" - - fmt = ( - 'S3 us-east-1 regional endpoint option ' - '{s3_us_east_1_regional_endpoint_config} is ' - 'invalid. Valid options are: "legacy", "regional"' - ) - - -class InvalidSTSRegionalEndpointsConfigError(BotoCoreError): - """Error when invalid sts regional endpoints configuration is specified""" - - fmt = ( - 'STS regional endpoints option {sts_regional_endpoints_config} is ' - 'invalid. Valid options are: "legacy", "regional"' - ) - - -class StubResponseError(BotoCoreError): - fmt = ( - 'Error getting response stub for operation {operation_name}: {reason}' - ) - - -class StubAssertionError(StubResponseError, AssertionError): - pass - - -class UnStubbedResponseError(StubResponseError): - pass - - -class InvalidConfigError(BotoCoreError): - fmt = '{error_msg}' - - -class InfiniteLoopConfigError(InvalidConfigError): - fmt = ( - 'Infinite loop in credential configuration detected. Attempting to ' - 'load from profile {source_profile} which has already been visited. ' - 'Visited profiles: {visited_profiles}' - ) - - -class RefreshWithMFAUnsupportedError(BotoCoreError): - fmt = 'Cannot refresh credentials: MFA token required.' - - -class MD5UnavailableError(BotoCoreError): - fmt = "This system does not support MD5 generation." - - -class MissingDependencyException(BotoCoreError): - fmt = "Missing Dependency: {msg}" - - -class MetadataRetrievalError(BotoCoreError): - fmt = "Error retrieving metadata: {error_msg}" - - -class UndefinedModelAttributeError(Exception): - pass - - -class MissingServiceIdError(UndefinedModelAttributeError): - fmt = ( - "The model being used for the service {service_name} is missing the " - "serviceId metadata property, which is required." - ) - - def __init__(self, **kwargs): - msg = self.fmt.format(**kwargs) - Exception.__init__(self, msg) - self.kwargs = kwargs - - -class SSOError(BotoCoreError): - fmt = ( - "An unspecified error happened when resolving AWS credentials or an " - "access token from SSO." - ) - - -class SSOTokenLoadError(SSOError): - fmt = "Error loading SSO Token: {error_msg}" - - -class UnauthorizedSSOTokenError(SSOError): - fmt = ( - "The SSO session associated with this profile has expired or is " - "otherwise invalid. To refresh this SSO session run aws sso login " - "with the corresponding profile." - ) - - -class LoginError(BotoCoreError): - fmt = ( - "An unspecified error happened when resolving AWS credentials or " - "refreshing a login session profile." - ) - - -class LoginRefreshRequired(LoginError): - fmt = "Your session has expired or credentials have changed. Please reauthenticate using 'aws login'." - - -class LoginInsufficientPermissions(LoginError): - fmt = ( - "Unable to create or refresh login credentials due to insufficient " - "permissions. You may be missing permission for the 'signin:CreateOAuth2Token' action." - ) - - -class LoginTokenLoadError(LoginError): - fmt = "Error loading login session token: {error_msg}" - - -class LoginAuthorizationCodeError(LoginError): - fmt = "Error loading or redeeming a login authorization code: {error_msg} " - - -class CapacityNotAvailableError(BotoCoreError): - fmt = 'Insufficient request capacity available.' - - -class InvalidProxiesConfigError(BotoCoreError): - fmt = 'Invalid configuration value(s) provided for proxies_config.' - - -class InvalidDefaultsMode(BotoCoreError): - fmt = ( - 'Client configured with invalid defaults mode: {mode}. ' - 'Valid defaults modes include: {valid_modes}.' - ) - - -class AwsChunkedWrapperError(BotoCoreError): - fmt = '{error_msg}' - - -class FlexibleChecksumError(BotoCoreError): - fmt = '{error_msg}' - - -class InvalidEndpointConfigurationError(BotoCoreError): - fmt = 'Invalid endpoint configuration: {msg}' - - -class EndpointProviderError(BotoCoreError): - """Base error for the EndpointProvider class""" - - fmt = '{msg}' - - -class EndpointResolutionError(EndpointProviderError): - """Error when input parameters resolve to an error rule""" - - fmt = '{msg}' - - -class UnknownEndpointResolutionBuiltInName(EndpointProviderError): - fmt = 'Unknown builtin variable name: {name}' - - -class InvalidChecksumConfigError(BotoCoreError): - """Error when an invalid checksum config value is supplied.""" - - fmt = ( - 'Unsupported configuration value for {config_key}. ' - 'Expected one of {valid_options} but got {config_value}.' - ) - - -class UnsupportedServiceProtocolsError(BotoCoreError): - """Error when a service does not use any protocol supported by botocore.""" - - fmt = ( - 'Botocore supports {botocore_supported_protocols}, but service {service} only ' - 'supports {service_supported_protocols}.' - ) diff --git a/venv/Lib/site-packages/botocore/handlers.py b/venv/Lib/site-packages/botocore/handlers.py deleted file mode 100644 index 65f12db..0000000 --- a/venv/Lib/site-packages/botocore/handlers.py +++ /dev/null @@ -1,1727 +0,0 @@ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -"""Builtin event handlers. - -This module contains builtin handlers for events emitted by botocore. -""" - -import base64 -import copy -import logging -import os -import re -import uuid -import warnings -from io import BytesIO - -import botocore -import botocore.auth -from botocore import ( - retryhandler, # noqa: F401 - translate, # noqa: F401 - utils, -) -from botocore.args import ClientConfigString -from botocore.compat import ( - MD5_AVAILABLE, # noqa: F401 - ETree, - OrderedDict, - XMLParseError, - ensure_bytes, - get_md5, - json, - quote, - unquote, - unquote_str, - urlsplit, - urlunsplit, -) -from botocore.docs.utils import ( - AppendParamDocumentation, - AutoPopulatedParam, - HideParamFromOperations, -) -from botocore.endpoint_provider import VALID_HOST_LABEL_RE -from botocore.exceptions import ( - AliasConflictParameterError, - MissingServiceIdError, # noqa: F401 - ParamValidationError, - UnsupportedTLSVersionWarning, -) -from botocore.regions import EndpointResolverBuiltins -from botocore.serialize import TIMESTAMP_PRECISION_MILLISECOND -from botocore.signers import ( - add_dsql_generate_db_auth_token_methods, - add_generate_db_auth_token, - add_generate_presigned_post, - add_generate_presigned_url, -) -from botocore.useragent import register_feature_id -from botocore.utils import ( - SAFE_CHARS, - SERVICE_NAME_ALIASES, # noqa: F401 - ArnParser, - get_token_from_environment, - hyphenize_service_id, # noqa: F401 - is_global_accesspoint, # noqa: F401 - percent_encode, - switch_host_with_param, -) - -logger = logging.getLogger(__name__) - -REGISTER_FIRST = object() -REGISTER_LAST = object() -# From the S3 docs: -# The rules for bucket names in the US Standard region allow bucket names -# to be as long as 255 characters, and bucket names can contain any -# combination of uppercase letters, lowercase letters, numbers, periods -# (.), hyphens (-), and underscores (_). -VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$') -_ACCESSPOINT_ARN = ( - r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]*:[0-9]{12}:accesspoint[/:]' - r'[a-zA-Z0-9\-.]{1,63}$' -) -_OUTPOST_ARN = ( - r'^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]' - r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$' -) -VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN])) -# signing names used for the services s3 and s3-control, for example in -# botocore/data/s3/2006-03-01/endpoints-rule-set-1.json -S3_SIGNING_NAMES = ('s3', 's3-outposts', 's3-object-lambda', 's3express') -VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$') - - -def handle_service_name_alias(service_name, **kwargs): - return SERVICE_NAME_ALIASES.get(service_name, service_name) - - -def add_recursion_detection_header(params, **kwargs): - has_lambda_name = 'AWS_LAMBDA_FUNCTION_NAME' in os.environ - trace_id = os.environ.get('_X_AMZN_TRACE_ID') - if has_lambda_name and trace_id: - headers = params['headers'] - if 'X-Amzn-Trace-Id' not in headers: - headers['X-Amzn-Trace-Id'] = quote(trace_id, safe='-=;:+&[]{}"\',') - - -def escape_xml_payload(params, **kwargs): - # Replace \r and \n with the escaped sequence over the whole XML document - # to avoid linebreak normalization modifying customer input when the - # document is parsed. Ideally, we would do this in ElementTree.tostring, - # but it doesn't allow us to override entity escaping for text fields. For - # this operation \r and \n can only appear in the XML document if they were - # passed as part of the customer input. - body = params['body'] - if b'\r' in body: - body = body.replace(b'\r', b' ') - if b'\n' in body: - body = body.replace(b'\n', b' ') - - params['body'] = body - - -def check_for_200_error(response, **kwargs): - """This function has been deprecated, but is kept for backwards compatibility.""" - # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html - # There are two opportunities for a copy request to return an error. One - # can occur when Amazon S3 receives the copy request and the other can - # occur while Amazon S3 is copying the files. If the error occurs before - # the copy operation starts, you receive a standard Amazon S3 error. If the - # error occurs during the copy operation, the error response is embedded in - # the 200 OK response. This means that a 200 OK response can contain either - # a success or an error. Make sure to design your application to parse the - # contents of the response and handle it appropriately. - # - # So this handler checks for this case. Even though the server sends a - # 200 response, conceptually this should be handled exactly like a - # 500 response (with respect to raising exceptions, retries, etc.) - # We're connected *before* all the other retry logic handlers, so as long - # as we switch the error code to 500, we'll retry the error as expected. - if response is None: - # A None response can happen if an exception is raised while - # trying to retrieve the response. See Endpoint._get_response(). - return - http_response, parsed = response - if _looks_like_special_case_error( - http_response.status_code, http_response.content - ): - logger.debug( - "Error found for response with 200 status code, " - "errors: %s, changing status code to " - "500.", - parsed, - ) - http_response.status_code = 500 - - -def _looks_like_special_case_error(status_code, body): - if status_code == 200 and body: - try: - parser = ETree.XMLParser( - target=ETree.TreeBuilder(), encoding='utf-8' - ) - parser.feed(body) - root = parser.close() - except XMLParseError: - # In cases of network disruptions, we may end up with a partial - # streamed response from S3. We need to treat these cases as - # 500 Service Errors and try again. - return True - if root.tag == 'Error': - return True - return False - - -def set_operation_specific_signer(context, signing_name, **kwargs): - """Choose the operation-specific signer. - - Individual operations may have a different auth type than the service as a - whole. This will most often manifest as operations that should not be - authenticated at all, but can include other auth modes such as sigv4 - without body signing. - """ - auth_type = context.get('auth_type') - - # Auth type will be None if the operation doesn't have a configured auth - # type. - if not auth_type: - return - - # Auth type will be the string value 'none' if the operation should not - # be signed at all. - if auth_type == 'none': - return botocore.UNSIGNED - - if auth_type == 'bearer': - return 'bearer' - - # If the operation needs an unsigned body, we set additional context - # allowing the signer to be aware of this. - if context.get('unsigned_payload') or auth_type == 'v4-unsigned-body': - context['payload_signing_enabled'] = False - - if auth_type.startswith('v4'): - if auth_type == 'v4-s3express': - return auth_type - - if auth_type == 'v4a': - # If sigv4a is chosen, we must add additional signing config for - # global signature. - region = _resolve_sigv4a_region(context) - signing = {'region': region, 'signing_name': signing_name} - if 'signing' in context: - context['signing'].update(signing) - else: - context['signing'] = signing - signature_version = 'v4a' - else: - signature_version = 'v4' - - # Signing names used by s3 and s3-control use customized signers "s3v4" - # and "s3v4a". - if signing_name in S3_SIGNING_NAMES: - signature_version = f's3{signature_version}' - - return signature_version - - -def _handle_sqs_compatible_error(parsed, context, **kwargs): - """ - Ensures backward compatibility for SQS errors. - - SQS's migration from the Query protocol to JSON was done prior to SDKs allowing a - service to support multiple protocols. Because of this, SQS is missing the "error" - key from its modeled exceptions, which is used by most query compatible services - to map error codes to the proper exception. Instead, SQS uses the error's shape name, - which is preserved in the QueryErrorCode key. - """ - parsed_error = parsed.get("Error", {}) - if not parsed_error: - return - - if query_code := parsed_error.get("QueryErrorCode"): - context['error_code_override'] = query_code - - -def _resolve_sigv4a_region(context): - region = None - if 'client_config' in context: - region = context['client_config'].sigv4a_signing_region_set - if not region and context.get('signing', {}).get('region'): - region = context['signing']['region'] - return region or '*' - - -def decode_console_output(parsed, **kwargs): - if 'Output' in parsed: - try: - # We're using 'replace' for errors because it is - # possible that console output contains non string - # chars we can't utf-8 decode. - value = base64.b64decode( - bytes(parsed['Output'], 'latin-1') - ).decode('utf-8', 'replace') - parsed['Output'] = value - except (ValueError, TypeError, AttributeError): - logger.debug('Error decoding base64', exc_info=True) - - -def generate_idempotent_uuid(params, model, **kwargs): - for name in model.idempotent_members: - if name not in params: - params[name] = str(uuid.uuid4()) - logger.debug( - "injecting idempotency token (%s) into param '%s'.", - params[name], - name, - ) - - -def decode_quoted_jsondoc(value): - try: - value = json.loads(unquote(value)) - except (ValueError, TypeError): - logger.debug('Error loading quoted JSON', exc_info=True) - return value - - -def json_decode_template_body(parsed, **kwargs): - if 'TemplateBody' in parsed: - try: - value = json.loads( - parsed['TemplateBody'], object_pairs_hook=OrderedDict - ) - parsed['TemplateBody'] = value - except (ValueError, TypeError): - logger.debug('error loading JSON', exc_info=True) - - -def validate_bucket_name(params, **kwargs): - if 'Bucket' not in params: - return - bucket = params['Bucket'] - if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket): - error_msg = ( - f'Invalid bucket name "{bucket}": Bucket name must match ' - f'the regex "{VALID_BUCKET.pattern}" or be an ARN matching ' - f'the regex "{VALID_S3_ARN.pattern}"' - ) - raise ParamValidationError(report=error_msg) - - -def sse_md5(params, **kwargs): - """ - S3 server-side encryption requires the encryption key to be sent to the - server base64 encoded, as well as a base64-encoded MD5 hash of the - encryption key. This handler does both if the MD5 has not been set by - the caller. - """ - _sse_md5(params, 'SSECustomer') - - -def copy_source_sse_md5(params, **kwargs): - """ - S3 server-side encryption requires the encryption key to be sent to the - server base64 encoded, as well as a base64-encoded MD5 hash of the - encryption key. This handler does both if the MD5 has not been set by - the caller specifically if the parameter is for the copy-source sse-c key. - """ - _sse_md5(params, 'CopySourceSSECustomer') - - -def _sse_md5(params, sse_member_prefix='SSECustomer'): - if not _needs_s3_sse_customization(params, sse_member_prefix): - return - - sse_key_member = sse_member_prefix + 'Key' - sse_md5_member = sse_member_prefix + 'KeyMD5' - key_as_bytes = params[sse_key_member] - if isinstance(key_as_bytes, str): - key_as_bytes = key_as_bytes.encode('utf-8') - md5_val = get_md5(key_as_bytes, usedforsecurity=False).digest() - key_md5_str = base64.b64encode(md5_val).decode('utf-8') - key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8') - params[sse_key_member] = key_b64_encoded - params[sse_md5_member] = key_md5_str - - -def _needs_s3_sse_customization(params, sse_member_prefix): - return ( - params.get(sse_member_prefix + 'Key') is not None - and sse_member_prefix + 'KeyMD5' not in params - ) - - -def disable_signing(**kwargs): - """ - This handler disables request signing by setting the signer - name to a special sentinel value. - """ - return botocore.UNSIGNED - - -def add_expect_header(model, params, **kwargs): - if model.http.get('method', '') not in ['PUT', 'POST']: - return - if 'body' in params: - body = params['body'] - if hasattr(body, 'read'): - check_body = utils.ensure_boolean( - os.environ.get( - 'BOTO_EXPERIMENTAL__NO_EMPTY_CONTINUE', - False, - ) - ) - if check_body and utils.determine_content_length(body) == 0: - return - # Any file like object will use an expect 100-continue - # header regardless of size. - logger.debug("Adding expect 100 continue header to request.") - params['headers']['Expect'] = '100-continue' - - -class DeprecatedServiceDocumenter: - def __init__(self, replacement_service_name): - self._replacement_service_name = replacement_service_name - - def inject_deprecation_notice(self, section, event_name, **kwargs): - section.style.start_important() - section.write('This service client is deprecated. Please use ') - section.style.ref( - self._replacement_service_name, - self._replacement_service_name, - ) - section.write(' instead.') - section.style.end_important() - - -def document_copy_source_form(section, event_name, **kwargs): - if 'request-example' in event_name: - parent = section.get_section('structure-value') - param_line = parent.get_section('CopySource') - value_portion = param_line.get_section('member-value') - value_portion.clear_text() - value_portion.write( - "'string' or {'Bucket': 'string', " - "'Key': 'string', 'VersionId': 'string'}" - ) - elif 'request-params' in event_name: - param_section = section.get_section('CopySource') - type_section = param_section.get_section('param-type') - type_section.clear_text() - type_section.write(':type CopySource: str or dict') - doc_section = param_section.get_section('param-documentation') - doc_section.clear_text() - doc_section.write( - "The name of the source bucket, key name of the source object, " - "and optional version ID of the source object. You can either " - "provide this value as a string or a dictionary. The " - "string form is {bucket}/{key} or " - "{bucket}/{key}?versionId={versionId} if you want to copy a " - "specific version. You can also provide this value as a " - "dictionary. The dictionary format is recommended over " - "the string format because it is more explicit. The dictionary " - "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}." - " Note that the VersionId key is optional and may be omitted." - " To specify an S3 access point, provide the access point" - " ARN for the ``Bucket`` key in the copy source dictionary. If you" - " want to provide the copy source for an S3 access point as a" - " string instead of a dictionary, the ARN provided must be the" - " full S3 access point object ARN" - " (i.e. {accesspoint_arn}/object/{key})" - ) - - -def handle_copy_source_param(params, **kwargs): - """Convert CopySource param for CopyObject/UploadPartCopy. - - This handler will deal with two cases: - - * CopySource provided as a string. We'll make a best effort - to URL encode the key name as required. This will require - parsing the bucket and version id from the CopySource value - and only encoding the key. - * CopySource provided as a dict. In this case we're - explicitly given the Bucket, Key, and VersionId so we're - able to encode the key and ensure this value is serialized - and correctly sent to S3. - - """ - source = params.get('CopySource') - if source is None: - # The call will eventually fail but we'll let the - # param validator take care of this. It will - # give a better error message. - return - if isinstance(source, str): - params['CopySource'] = _quote_source_header(source) - elif isinstance(source, dict): - params['CopySource'] = _quote_source_header_from_dict(source) - - -def _quote_source_header_from_dict(source_dict): - try: - bucket = source_dict['Bucket'] - key = source_dict['Key'] - version_id = source_dict.get('VersionId') - if VALID_S3_ARN.search(bucket): - final = f'{bucket}/object/{key}' - else: - final = f'{bucket}/{key}' - except KeyError as e: - raise ParamValidationError( - report=f'Missing required parameter: {str(e)}' - ) - final = percent_encode(final, safe=SAFE_CHARS + '/') - if version_id is not None: - final += f'?versionId={version_id}' - return final - - -def _quote_source_header(value): - result = VERSION_ID_SUFFIX.search(value) - if result is None: - return percent_encode(value, safe=SAFE_CHARS + '/') - else: - first, version_id = value[: result.start()], value[result.start() :] - return percent_encode(first, safe=SAFE_CHARS + '/') + version_id - - -def _get_cross_region_presigned_url( - request_signer, request_dict, model, source_region, destination_region -): - # The better way to do this is to actually get the - # endpoint_resolver and get the endpoint_url given the - # source region. In this specific case, we know that - # we can safely replace the dest region with the source - # region because of the supported EC2 regions, but in - # general this is not a safe assumption to make. - # I think eventually we should try to plumb through something - # that allows us to resolve endpoints from regions. - request_dict_copy = copy.deepcopy(request_dict) - request_dict_copy['body']['DestinationRegion'] = destination_region - request_dict_copy['url'] = request_dict['url'].replace( - destination_region, source_region - ) - request_dict_copy['method'] = 'GET' - request_dict_copy['headers'] = {} - return request_signer.generate_presigned_url( - request_dict_copy, region_name=source_region, operation_name=model.name - ) - - -def _get_presigned_url_source_and_destination_regions(request_signer, params): - # Gets the source and destination regions to be used - destination_region = request_signer._region_name - source_region = params.get('SourceRegion') - return source_region, destination_region - - -def inject_presigned_url_ec2(params, request_signer, model, **kwargs): - # The customer can still provide this, so we should pass if they do. - if 'PresignedUrl' in params['body']: - return - src, dest = _get_presigned_url_source_and_destination_regions( - request_signer, params['body'] - ) - url = _get_cross_region_presigned_url( - request_signer, params, model, src, dest - ) - params['body']['PresignedUrl'] = url - # EC2 Requires that the destination region be sent over the wire in - # addition to the source region. - params['body']['DestinationRegion'] = dest - - -def inject_presigned_url_rds(params, request_signer, model, **kwargs): - # SourceRegion is not required for RDS operations, so it's possible that - # it isn't set. In that case it's probably a local copy so we don't need - # to do anything else. - if 'SourceRegion' not in params['body']: - return - - src, dest = _get_presigned_url_source_and_destination_regions( - request_signer, params['body'] - ) - - # Since SourceRegion isn't actually modeled for RDS, it needs to be - # removed from the request params before we send the actual request. - del params['body']['SourceRegion'] - - if 'PreSignedUrl' in params['body']: - return - - url = _get_cross_region_presigned_url( - request_signer, params, model, src, dest - ) - params['body']['PreSignedUrl'] = url - - -def json_decode_policies(parsed, model, **kwargs): - # Any time an IAM operation returns a policy document - # it is a string that is json that has been urlencoded, - # i.e urlencode(json.dumps(policy_document)). - # To give users something more useful, we will urldecode - # this value and json.loads() the result so that they have - # the policy document as a dictionary. - output_shape = model.output_shape - if output_shape is not None: - _decode_policy_types(parsed, model.output_shape) - - -def _decode_policy_types(parsed, shape): - # IAM consistently uses the policyDocumentType shape to indicate - # strings that have policy documents. - shape_name = 'policyDocumentType' - if shape.type_name == 'structure': - for member_name, member_shape in shape.members.items(): - if ( - member_shape.type_name == 'string' - and member_shape.name == shape_name - and member_name in parsed - ): - parsed[member_name] = decode_quoted_jsondoc( - parsed[member_name] - ) - elif member_name in parsed: - _decode_policy_types(parsed[member_name], member_shape) - if shape.type_name == 'list': - shape_member = shape.member - for item in parsed: - _decode_policy_types(item, shape_member) - - -def parse_get_bucket_location(parsed, http_response, **kwargs): - # s3.GetBucketLocation cannot be modeled properly. To - # account for this we just manually parse the XML document. - # The "parsed" passed in only has the ResponseMetadata - # filled out. This handler will fill in the LocationConstraint - # value. - if http_response.raw is None: - return - response_body = http_response.content - parser = ETree.XMLParser(target=ETree.TreeBuilder(), encoding='utf-8') - parser.feed(response_body) - root = parser.close() - region = root.text - parsed['LocationConstraint'] = region - - -def base64_encode_user_data(params, **kwargs): - if 'UserData' in params: - if isinstance(params['UserData'], str): - # Encode it to bytes if it is text. - params['UserData'] = params['UserData'].encode('utf-8') - params['UserData'] = base64.b64encode(params['UserData']).decode( - 'utf-8' - ) - - -def document_base64_encoding(param): - description = ( - '**This value will be base64 encoded automatically. Do ' - 'not base64 encode this value prior to performing the ' - 'operation.**' - ) - append = AppendParamDocumentation(param, description) - return append.append_documentation - - -def validate_ascii_metadata(params, **kwargs): - """Verify S3 Metadata only contains ascii characters. - - From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html - - "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair - must conform to US-ASCII when using REST and UTF-8 when using SOAP or - browser-based uploads via POST." - - """ - metadata = params.get('Metadata') - if not metadata or not isinstance(metadata, dict): - # We have to at least type check the metadata as a dict type - # because this handler is called before param validation. - # We'll go ahead and return because the param validator will - # give a descriptive error message for us. - # We might need a post-param validation event. - return - for key, value in metadata.items(): - try: - key.encode('ascii') - value.encode('ascii') - except UnicodeEncodeError: - error_msg = ( - 'Non ascii characters found in S3 metadata ' - f'for key "{key}", value: "{value}". \nS3 metadata can only ' - 'contain ASCII characters. ' - ) - raise ParamValidationError(report=error_msg) - - -def fix_route53_ids(params, model, **kwargs): - """ - Check for and split apart Route53 resource IDs, setting - only the last piece. This allows the output of one operation - (e.g. ``'foo/1234'``) to be used as input in another - operation (e.g. it expects just ``'1234'``). - """ - input_shape = model.input_shape - if not input_shape or not hasattr(input_shape, 'members'): - return - - members = [ - name - for (name, shape) in input_shape.members.items() - if shape.name in ['ResourceId', 'DelegationSetId', 'ChangeId'] - ] - - for name in members: - if name in params: - orig_value = params[name] - params[name] = orig_value.split('/')[-1] - logger.debug('%s %s -> %s', name, orig_value, params[name]) - - -def inject_account_id(params, **kwargs): - if params.get('accountId') is None: - # Glacier requires accountId, but allows you - # to specify '-' for the current owners account. - # We add this default value if the user does not - # provide the accountId as a convenience. - params['accountId'] = '-' - - -def add_glacier_version(model, params, **kwargs): - request_dict = params - request_dict['headers']['x-amz-glacier-version'] = model.metadata[ - 'apiVersion' - ] - - -def add_accept_header(model, params, **kwargs): - if params['headers'].get('Accept', None) is None: - request_dict = params - request_dict['headers']['Accept'] = 'application/json' - - -def add_glacier_checksums(params, **kwargs): - """Add glacier checksums to the http request. - - This will add two headers to the http request: - - * x-amz-content-sha256 - * x-amz-sha256-tree-hash - - These values will only be added if they are not present - in the HTTP request. - - """ - request_dict = params - headers = request_dict['headers'] - body = request_dict['body'] - if isinstance(body, bytes): - # If the user provided a bytes type instead of a file - # like object, we're temporarily create a BytesIO object - # so we can use the util functions to calculate the - # checksums which assume file like objects. Note that - # we're not actually changing the body in the request_dict. - body = BytesIO(body) - starting_position = body.tell() - if 'x-amz-content-sha256' not in headers: - headers['x-amz-content-sha256'] = utils.calculate_sha256( - body, as_hex=True - ) - body.seek(starting_position) - if 'x-amz-sha256-tree-hash' not in headers: - headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body) - body.seek(starting_position) - - -def document_glacier_tree_hash_checksum(): - doc = ''' - This is a required field. - - Ideally you will want to compute this value with checksums from - previous uploaded parts, using the algorithm described in - `Glacier documentation `_. - - But if you prefer, you can also use botocore.utils.calculate_tree_hash() - to compute it from raw file by:: - - checksum = calculate_tree_hash(open('your_file.txt', 'rb')) - - ''' - return AppendParamDocumentation('checksum', doc).append_documentation - - -def document_cloudformation_get_template_return_type( - section, event_name, **kwargs -): - if 'response-params' in event_name: - template_body_section = section.get_section('TemplateBody') - type_section = template_body_section.get_section('param-type') - type_section.clear_text() - type_section.write('(*dict*) --') - elif 'response-example' in event_name: - parent = section.get_section('structure-value') - param_line = parent.get_section('TemplateBody') - value_portion = param_line.get_section('member-value') - value_portion.clear_text() - value_portion.write('{}') - - -def switch_host_machinelearning(request, **kwargs): - switch_host_with_param(request, 'PredictEndpoint') - - -def check_openssl_supports_tls_version_1_2(**kwargs): - import ssl - - try: - openssl_version_tuple = ssl.OPENSSL_VERSION_INFO - if openssl_version_tuple < (1, 0, 1): - warnings.warn( - f'Currently installed openssl version: {ssl.OPENSSL_VERSION} does not ' - 'support TLS 1.2, which is required for use of iot-data. ' - 'Please use python installed with openssl version 1.0.1 or ' - 'higher.', - UnsupportedTLSVersionWarning, - ) - # We cannot check the openssl version on python2.6, so we should just - # pass on this conveniency check. - except AttributeError: - pass - - -def change_get_to_post(request, **kwargs): - # This is useful when we need to change a potentially large GET request - # into a POST with x-www-form-urlencoded encoding. - if request.method == 'GET' and '?' in request.url: - request.headers['Content-Type'] = 'application/x-www-form-urlencoded' - request.method = 'POST' - request.url, request.data = request.url.split('?', 1) - - -def set_list_objects_encoding_type_url(params, context, **kwargs): - if 'EncodingType' not in params: - # We set this context so that we know it wasn't the customer that - # requested the encoding. - context['encoding_type_auto_set'] = True - params['EncodingType'] = 'url' - - -def decode_list_object(parsed, context, **kwargs): - # This is needed because we are passing url as the encoding type. Since the - # paginator is based on the key, we need to handle it before it can be - # round tripped. - # - # From the documentation: If you specify encoding-type request parameter, - # Amazon S3 includes this element in the response, and returns encoded key - # name values in the following response elements: - # Delimiter, Marker, Prefix, NextMarker, Key. - _decode_list_object( - top_level_keys=['Delimiter', 'Marker', 'NextMarker'], - nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], - parsed=parsed, - context=context, - ) - - -def decode_list_object_v2(parsed, context, **kwargs): - # From the documentation: If you specify encoding-type request parameter, - # Amazon S3 includes this element in the response, and returns encoded key - # name values in the following response elements: - # Delimiter, Prefix, ContinuationToken, Key, and StartAfter. - _decode_list_object( - top_level_keys=['Delimiter', 'Prefix', 'StartAfter'], - nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], - parsed=parsed, - context=context, - ) - - -def decode_list_object_versions(parsed, context, **kwargs): - # From the documentation: If you specify encoding-type request parameter, - # Amazon S3 includes this element in the response, and returns encoded key - # name values in the following response elements: - # KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter. - _decode_list_object( - top_level_keys=[ - 'KeyMarker', - 'NextKeyMarker', - 'Prefix', - 'Delimiter', - ], - nested_keys=[ - ('Versions', 'Key'), - ('DeleteMarkers', 'Key'), - ('CommonPrefixes', 'Prefix'), - ], - parsed=parsed, - context=context, - ) - - -def _decode_list_object(top_level_keys, nested_keys, parsed, context): - if parsed.get('EncodingType') == 'url' and context.get( - 'encoding_type_auto_set' - ): - # URL decode top-level keys in the response if present. - for key in top_level_keys: - if key in parsed: - parsed[key] = unquote_str(parsed[key]) - # URL decode nested keys from the response if present. - for top_key, child_key in nested_keys: - if top_key in parsed: - for member in parsed[top_key]: - member[child_key] = unquote_str(member[child_key]) - - -def convert_body_to_file_like_object(params, **kwargs): - if 'Body' in params: - if isinstance(params['Body'], str): - params['Body'] = BytesIO(ensure_bytes(params['Body'])) - elif isinstance(params['Body'], bytes): - params['Body'] = BytesIO(params['Body']) - - -def _add_parameter_aliases(handler_list): - # Mapping of original parameter to parameter alias. - # The key is ..parameter - # The first part of the key is used for event registration. - # The last part is the original parameter name and the value is the - # alias to expose in documentation. - aliases = { - 'ec2.*.Filter': 'Filters', - 'logs.CreateExportTask.from': 'fromTime', - 'cloudsearchdomain.Search.return': 'returnFields', - } - - for original, new_name in aliases.items(): - event_portion, original_name = original.rsplit('.', 1) - parameter_alias = ParameterAlias(original_name, new_name) - - # Add the handlers to the list of handlers. - # One handler is to handle when users provide the alias. - # The other handler is to update the documentation to show only - # the alias. - parameter_build_event_handler_tuple = ( - 'before-parameter-build.' + event_portion, - parameter_alias.alias_parameter_in_call, - REGISTER_FIRST, - ) - docs_event_handler_tuple = ( - 'docs.*.' + event_portion + '.complete-section', - parameter_alias.alias_parameter_in_documentation, - ) - handler_list.append(parameter_build_event_handler_tuple) - handler_list.append(docs_event_handler_tuple) - - -class ParameterAlias: - def __init__(self, original_name, alias_name): - self._original_name = original_name - self._alias_name = alias_name - - def alias_parameter_in_call(self, params, model, **kwargs): - if model.input_shape: - # Only consider accepting the alias if it is modeled in the - # input shape. - if self._original_name in model.input_shape.members: - if self._alias_name in params: - if self._original_name in params: - raise AliasConflictParameterError( - original=self._original_name, - alias=self._alias_name, - operation=model.name, - ) - # Remove the alias parameter value and use the old name - # instead. - params[self._original_name] = params.pop(self._alias_name) - - def alias_parameter_in_documentation(self, event_name, section, **kwargs): - if event_name.startswith('docs.request-params'): - if self._original_name not in section.available_sections: - return - # Replace the name for parameter type - param_section = section.get_section(self._original_name) - param_type_section = param_section.get_section('param-type') - self._replace_content(param_type_section) - - # Replace the name for the parameter description - param_name_section = param_section.get_section('param-name') - self._replace_content(param_name_section) - elif event_name.startswith('docs.request-example'): - section = section.get_section('structure-value') - if self._original_name not in section.available_sections: - return - # Replace the name for the example - param_section = section.get_section(self._original_name) - self._replace_content(param_section) - - def _replace_content(self, section): - content = section.getvalue().decode('utf-8') - updated_content = content.replace( - self._original_name, self._alias_name - ) - section.clear_text() - section.write(updated_content) - - -class ClientMethodAlias: - def __init__(self, actual_name): - """Aliases a non-extant method to an existing method. - - :param actual_name: The name of the method that actually exists on - the client. - """ - self._actual = actual_name - - def __call__(self, client, **kwargs): - return getattr(client, self._actual) - - -# TODO: Remove this class as it is no longer used -class HeaderToHostHoister: - """Takes a header and moves it to the front of the hoststring.""" - - _VALID_HOSTNAME = re.compile(r'(?!-)[a-z\d-]{1,63}(? 1 and ArnParser.is_arn( - unquote(auth_path_parts[1]) - ): - request.auth_path = '/'.join(['', *auth_path_parts[2:]]) - - -def customize_endpoint_resolver_builtins( - builtins, model, params, context, **kwargs -): - """Modify builtin parameter values for endpoint resolver - - Modifies the builtins dict in place. Changes are in effect for one call. - The corresponding event is emitted only if at least one builtin parameter - value is required for endpoint resolution for the operation. - """ - bucket_name = params.get('Bucket') - bucket_is_arn = bucket_name is not None and ArnParser.is_arn(bucket_name) - # In some situations the host will return AuthorizationHeaderMalformed - # when the signing region of a sigv4 request is not the bucket's - # region (which is likely unknown by the user of GetBucketLocation). - # Avoid this by always using path-style addressing. - if model.name == 'GetBucketLocation': - builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = True - # All situations where the bucket name is an ARN are not compatible - # with path style addressing. - elif bucket_is_arn: - builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = False - - # Bucket names that are invalid host labels require path-style addressing. - # If path-style addressing was specifically requested, the default builtin - # value is already set. - path_style_required = ( - bucket_name is not None and not VALID_HOST_LABEL_RE.match(bucket_name) - ) - path_style_requested = builtins[ - EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE - ] - - # Path-style addressing is incompatible with the global endpoint for - # presigned URLs. If the bucket name is an ARN, the ARN's region should be - # used in the endpoint. - if ( - context.get('use_global_endpoint') - and not path_style_required - and not path_style_requested - and not bucket_is_arn - and not utils.is_s3express_bucket(bucket_name) - ): - builtins[EndpointResolverBuiltins.AWS_REGION] = 'aws-global' - builtins[EndpointResolverBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT] = True - - -def remove_content_type_header_for_presigning(request, **kwargs): - if ( - request.context.get('is_presign_request') is True - and 'Content-Type' in request.headers - ): - del request.headers['Content-Type'] - - -def handle_expires_header( - operation_model, response_dict, customized_response_dict, **kwargs -): - if _has_expires_shape(operation_model.output_shape): - if expires_value := response_dict.get('headers', {}).get('Expires'): - customized_response_dict['ExpiresString'] = expires_value - try: - utils.parse_timestamp(expires_value) - except (ValueError, RuntimeError): - logger.warning( - 'Failed to parse the "Expires" member as a timestamp: %s. ' - 'The unparsed value is available in the response under "ExpiresString".', - expires_value, - ) - del response_dict['headers']['Expires'] - - -def _has_expires_shape(shape): - if not shape: - return False - return any( - member_shape.name == 'Expires' - and member_shape.serialization.get('name') == 'Expires' - for member_shape in shape.members.values() - ) - - -def document_expires_shape(section, event_name, **kwargs): - # Updates the documentation for S3 operations that include the 'Expires' member - # in their response structure. Documents a synthetic member 'ExpiresString' and - # includes a deprecation notice for 'Expires'. - if 'response-example' in event_name: - if not section.has_section('structure-value'): - return - parent = section.get_section('structure-value') - if not parent.has_section('Expires'): - return - param_line = parent.get_section('Expires') - param_line.add_new_section('ExpiresString') - new_param_line = param_line.get_section('ExpiresString') - new_param_line.write("'ExpiresString': 'string',") - new_param_line.style.new_line() - elif 'response-params' in event_name: - if not section.has_section('Expires'): - return - param_section = section.get_section('Expires') - # Add a deprecation notice for the "Expires" param - doc_section = param_section.get_section('param-documentation') - doc_section.style.start_note() - doc_section.write( - 'This member has been deprecated. Please use ``ExpiresString`` instead.' - ) - doc_section.style.end_note() - # Document the "ExpiresString" param - new_param_section = param_section.add_new_section('ExpiresString') - new_param_section.style.new_paragraph() - new_param_section.write('- **ExpiresString** *(string) --*') - new_param_section.style.indent() - new_param_section.style.new_paragraph() - new_param_section.write( - 'The raw, unparsed value of the ``Expires`` field.' - ) - - -def _handle_200_error(operation_model, response_dict, **kwargs): - # S3 can return a 200 response with an error embedded in the body. - # Convert the 200 to a 500 for retry resolution in ``_update_status_code``. - if not _should_handle_200_error(operation_model, response_dict): - # Operations with streaming response blobs are excluded as they - # can't be reliably distinguished from an S3 error. - return - if _looks_like_special_case_error( - response_dict['status_code'], response_dict['body'] - ): - response_dict['status_code'] = 500 - logger.debug( - "Error found for response with 200 status code: %s.", - response_dict['body'], - ) - - -def _should_handle_200_error(operation_model, response_dict): - output_shape = operation_model.output_shape - if ( - not response_dict - or operation_model.has_event_stream_output - or not output_shape - ): - return False - payload = output_shape.serialization.get('payload') - if payload is not None: - payload_shape = output_shape.members[payload] - if payload_shape.type_name in ('blob', 'string'): - return False - return True - - -def _update_status_code(response, **kwargs): - # Update the http_response status code when the parsed response has been - # modified in a handler. This enables retries for cases like ``_handle_200_error``. - if response is None: - return - http_response, parsed = response - parsed_status_code = parsed.get('ResponseMetadata', {}).get( - 'HTTPStatusCode', http_response.status_code - ) - if http_response.status_code != parsed_status_code: - http_response.status_code = parsed_status_code - - -def _handle_request_validation_mode_member(params, model, **kwargs): - client_config = kwargs.get("context", {}).get("client_config") - if client_config is None: - return - response_checksum_validation = client_config.response_checksum_validation - http_checksum = model.http_checksum - mode_member = http_checksum.get("requestValidationModeMember") - if ( - mode_member is not None - and response_checksum_validation == "when_supported" - ): - params.setdefault(mode_member, "ENABLED") - - -def _set_extra_headers_for_unsigned_request( - request, signature_version, **kwargs -): - # When sending a checksum in the trailer of an unsigned chunked request, S3 - # requires us to set the "X-Amz-Content-SHA256" header to "STREAMING-UNSIGNED-PAYLOAD-TRAILER". - checksum_context = request.context.get("checksum", {}) - algorithm = checksum_context.get("request_algorithm", {}) - in_trailer = algorithm.get("in") == "trailer" - headers = request.headers - if signature_version == botocore.UNSIGNED and in_trailer: - headers["X-Amz-Content-SHA256"] = "STREAMING-UNSIGNED-PAYLOAD-TRAILER" - - -def _set_auth_scheme_preference_signer(context, signing_name, **kwargs): - """ - Determines the appropriate signer to use based on the client configuration, - authentication scheme preferences, and the availability of a bearer token. - """ - client_config = context.get('client_config') - if client_config is None: - return - - signature_version = client_config.signature_version - auth_scheme_preference = client_config.auth_scheme_preference - auth_options = context.get('auth_options') - - signature_version_set_in_code = ( - isinstance(signature_version, ClientConfigString) - or signature_version is botocore.UNSIGNED - ) - auth_preference_set_in_code = isinstance( - auth_scheme_preference, ClientConfigString - ) - has_in_code_configuration = ( - signature_version_set_in_code or auth_preference_set_in_code - ) - - resolved_signature_version = signature_version - - # If signature version was not set in code, but an auth scheme preference - # is available, resolve it based on the preferred schemes and supported auth - # options for this service. - if ( - not signature_version_set_in_code - and auth_scheme_preference - and auth_options - ): - preferred_schemes = auth_scheme_preference.split(',') - resolved = botocore.auth.resolve_auth_scheme_preference( - preferred_schemes, auth_options - ) - resolved_signature_version = ( - botocore.UNSIGNED if resolved == 'none' else resolved - ) - - # Prefer 'bearer' signature version if a bearer token is available, and it - # is allowed for this service. This can override earlier resolution if the - # config object didn't explicitly set a signature version. - if _should_prefer_bearer_auth( - has_in_code_configuration, - signing_name, - resolved_signature_version, - auth_options, - ): - register_feature_id('BEARER_SERVICE_ENV_VARS') - resolved_signature_version = 'bearer' - - if resolved_signature_version == signature_version: - return None - return resolved_signature_version - - -def _should_prefer_bearer_auth( - has_in_code_configuration, - signing_name, - resolved_signature_version, - auth_options, -): - if signing_name not in get_bearer_auth_supported_services(): - return False - - if not auth_options or 'smithy.api#httpBearerAuth' not in auth_options: - return False - - has_token = get_token_from_environment(signing_name) is not None - - # Prefer 'bearer' if a bearer token is available, and either: - # Bearer was already resolved, or - # No auth-related values were explicitly set in code - return has_token and ( - resolved_signature_version == 'bearer' or not has_in_code_configuration - ) - - -def get_bearer_auth_supported_services(): - """ - Returns a set of services that support bearer token authentication. - These values correspond to the service's `signingName` property as defined - in model.py, falling back to `endpointPrefix` if `signingName` is not set. - - Warning: This is a private interface and is subject to abrupt breaking changes, - including removal, in any botocore release. It is not intended for external use, - and its usage outside of botocore is not advised or supported. - """ - return {'bedrock'} - - -# This is a list of (event_name, handler). -# When a Session is created, everything in this list will be -# automatically registered with that Session. - -BUILTIN_HANDLERS = [ - ('choose-service-name', handle_service_name_alias), - ( - 'getattr.mturk.list_hi_ts_for_qualification_type', - ClientMethodAlias('list_hits_for_qualification_type'), - ), - ( - 'getattr.socialmessaging.delete_whatsapp_media_message', - ClientMethodAlias('delete_whatsapp_message_media'), - ), - ( - 'before-parameter-build.s3.UploadPart', - convert_body_to_file_like_object, - REGISTER_LAST, - ), - ( - 'before-parameter-build.s3.PutObject', - convert_body_to_file_like_object, - REGISTER_LAST, - ), - ('creating-client-class', add_generate_presigned_url), - ('creating-client-class.s3', add_generate_presigned_post), - ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2), - ('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation), - ('creating-client-class.qbusiness', remove_qbusiness_chat), - ( - 'creating-client-class.bedrock-runtime', - remove_bedrock_runtime_invoke_model_with_bidirectional_stream, - ), - ( - 'creating-serializer.bedrock-agentcore', - enable_millisecond_timestamp_precision, - ), - ('after-call.iam', json_decode_policies), - ('after-call.ec2.GetConsoleOutput', decode_console_output), - ('after-call.cloudformation.GetTemplate', json_decode_template_body), - ('after-call.s3.GetBucketLocation', parse_get_bucket_location), - ( - 'after-call.sqs.*', - _handle_sqs_compatible_error, - ), - ('before-parse.s3.*', handle_expires_header), - ('before-parse.s3.*', _handle_200_error, REGISTER_FIRST), - ('before-parameter-build', generate_idempotent_uuid), - ('before-parameter-build', _handle_request_validation_mode_member), - ('before-parameter-build.s3', validate_bucket_name), - ('before-parameter-build.s3', remove_bucket_from_url_paths_from_model), - ( - 'before-parameter-build.s3.ListObjects', - set_list_objects_encoding_type_url, - ), - ( - 'before-parameter-build.s3.ListObjectsV2', - set_list_objects_encoding_type_url, - ), - ( - 'before-parameter-build.s3.ListObjectVersions', - set_list_objects_encoding_type_url, - ), - ('before-parameter-build.s3.CopyObject', handle_copy_source_param), - ('before-parameter-build.s3.UploadPartCopy', handle_copy_source_param), - ('before-parameter-build.s3.CopyObject', validate_ascii_metadata), - ('before-parameter-build.s3.PutObject', validate_ascii_metadata), - ( - 'before-parameter-build.s3.CreateMultipartUpload', - validate_ascii_metadata, - ), - ('before-parameter-build.s3-control', remove_accid_host_prefix_from_model), - ('docs.*.s3.CopyObject.complete-section', document_copy_source_form), - ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form), - ('docs.response-example.s3.*.complete-section', document_expires_shape), - ('docs.response-params.s3.*.complete-section', document_expires_shape), - ('before-endpoint-resolution.s3', customize_endpoint_resolver_builtins), - ('before-call', add_recursion_detection_header), - ('before-call.s3', add_expect_header), - ('before-call.glacier', add_glacier_version), - ('before-call.apigateway', add_accept_header), - ('before-call.s3.DeleteObjects', escape_xml_payload), - ('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload), - ('before-call.glacier.UploadArchive', add_glacier_checksums), - ('before-call.glacier.UploadMultipartPart', add_glacier_checksums), - ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2), - ('request-created', add_retry_headers), - ('request-created.machinelearning.Predict', switch_host_machinelearning), - ('needs-retry.s3.*', _update_status_code, REGISTER_FIRST), - ('choose-signer.cognito-identity.GetId', disable_signing), - ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing), - ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing), - ( - 'choose-signer.cognito-identity.GetCredentialsForIdentity', - disable_signing, - ), - ('choose-signer.sts.AssumeRoleWithSAML', disable_signing), - ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing), - ('choose-signer', set_operation_specific_signer), - ('choose-signer', _set_auth_scheme_preference_signer), - ('before-parameter-build.s3.HeadObject', sse_md5), - ('before-parameter-build.s3.GetObject', sse_md5), - ('before-parameter-build.s3.PutObject', sse_md5), - ('before-parameter-build.s3.CopyObject', sse_md5), - ('before-parameter-build.s3.CopyObject', copy_source_sse_md5), - ('before-parameter-build.s3.CreateMultipartUpload', sse_md5), - ('before-parameter-build.s3.UploadPart', sse_md5), - ('before-parameter-build.s3.UploadPartCopy', sse_md5), - ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5), - ('before-parameter-build.s3.CompleteMultipartUpload', sse_md5), - ('before-parameter-build.s3.SelectObjectContent', sse_md5), - ('before-parameter-build.ec2.RunInstances', base64_encode_user_data), - ( - 'before-parameter-build.autoscaling.CreateLaunchConfiguration', - base64_encode_user_data, - ), - ('before-parameter-build.route53', fix_route53_ids), - ('before-parameter-build.glacier', inject_account_id), - ('before-sign.s3', remove_arn_from_signing_path), - ('before-sign.s3', _set_extra_headers_for_unsigned_request), - ( - 'before-sign.polly.SynthesizeSpeech', - remove_content_type_header_for_presigning, - ), - ('after-call.s3.ListObjects', decode_list_object), - ('after-call.s3.ListObjectsV2', decode_list_object_v2), - ('after-call.s3.ListObjectVersions', decode_list_object_versions), - # Cloudsearchdomain search operation will be sent by HTTP POST - ('request-created.cloudsearchdomain.Search', change_get_to_post), - # Glacier documentation customizations - ( - 'docs.*.glacier.*.complete-section', - AutoPopulatedParam( - 'accountId', - 'Note: this parameter is set to "-" by' - 'default if no value is not specified.', - ).document_auto_populated_param, - ), - ( - 'docs.*.glacier.UploadArchive.complete-section', - AutoPopulatedParam('checksum').document_auto_populated_param, - ), - ( - 'docs.*.glacier.UploadMultipartPart.complete-section', - AutoPopulatedParam('checksum').document_auto_populated_param, - ), - ( - 'docs.request-params.glacier.CompleteMultipartUpload.complete-section', - document_glacier_tree_hash_checksum(), - ), - # Cloudformation documentation customizations - ( - 'docs.*.cloudformation.GetTemplate.complete-section', - document_cloudformation_get_template_return_type, - ), - # UserData base64 encoding documentation customizations - ( - 'docs.*.ec2.RunInstances.complete-section', - document_base64_encoding('UserData'), - ), - ( - 'docs.*.autoscaling.CreateLaunchConfiguration.complete-section', - document_base64_encoding('UserData'), - ), - # EC2 CopySnapshot documentation customizations - ( - 'docs.*.ec2.CopySnapshot.complete-section', - AutoPopulatedParam('PresignedUrl').document_auto_populated_param, - ), - ( - 'docs.*.ec2.CopySnapshot.complete-section', - AutoPopulatedParam('DestinationRegion').document_auto_populated_param, - ), - # S3 SSE documentation modifications - ( - 'docs.*.s3.*.complete-section', - AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param, - ), - # S3 SSE Copy Source documentation modifications - ( - 'docs.*.s3.*.complete-section', - AutoPopulatedParam( - 'CopySourceSSECustomerKeyMD5' - ).document_auto_populated_param, - ), - # Add base64 information to Lambda - ( - 'docs.*.lambda.UpdateFunctionCode.complete-section', - document_base64_encoding('ZipFile'), - ), - # The following S3 operations cannot actually accept a ContentMD5 - ( - 'docs.*.s3.*.complete-section', - HideParamFromOperations( - 's3', - 'ContentMD5', - [ - 'DeleteObjects', - 'PutBucketAcl', - 'PutBucketCors', - 'PutBucketLifecycle', - 'PutBucketLogging', - 'PutBucketNotification', - 'PutBucketPolicy', - 'PutBucketReplication', - 'PutBucketRequestPayment', - 'PutBucketTagging', - 'PutBucketVersioning', - 'PutBucketWebsite', - 'PutObjectAcl', - ], - ).hide_param, - ), - ############# - # DSQL - ############# - ('creating-client-class.dsql', add_dsql_generate_db_auth_token_methods), - ############# - # RDS - ############# - ('creating-client-class.rds', add_generate_db_auth_token), - ('before-call.rds.CopyDBClusterSnapshot', inject_presigned_url_rds), - ('before-call.rds.CreateDBCluster', inject_presigned_url_rds), - ('before-call.rds.CopyDBSnapshot', inject_presigned_url_rds), - ('before-call.rds.CreateDBInstanceReadReplica', inject_presigned_url_rds), - ( - 'before-call.rds.StartDBInstanceAutomatedBackupsReplication', - inject_presigned_url_rds, - ), - # RDS PresignedUrl documentation customizations - ( - 'docs.*.rds.CopyDBClusterSnapshot.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ( - 'docs.*.rds.CreateDBCluster.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ( - 'docs.*.rds.CopyDBSnapshot.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ( - 'docs.*.rds.CreateDBInstanceReadReplica.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ( - 'docs.*.rds.StartDBInstanceAutomatedBackupsReplication.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ############# - # Neptune - ############# - ('before-call.neptune.CopyDBClusterSnapshot', inject_presigned_url_rds), - ('before-call.neptune.CreateDBCluster', inject_presigned_url_rds), - # Neptune PresignedUrl documentation customizations - ( - 'docs.*.neptune.CopyDBClusterSnapshot.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ( - 'docs.*.neptune.CreateDBCluster.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ############# - # DocDB - ############# - ('before-call.docdb.CopyDBClusterSnapshot', inject_presigned_url_rds), - ('before-call.docdb.CreateDBCluster', inject_presigned_url_rds), - # DocDB PresignedUrl documentation customizations - ( - 'docs.*.docdb.CopyDBClusterSnapshot.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ( - 'docs.*.docdb.CreateDBCluster.complete-section', - AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, - ), - ('before-call', inject_api_version_header_if_needed), -] -_add_parameter_aliases(BUILTIN_HANDLERS) diff --git a/venv/Lib/site-packages/botocore/history.py b/venv/Lib/site-packages/botocore/history.py deleted file mode 100644 index 59d9481..0000000 --- a/venv/Lib/site-packages/botocore/history.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import logging - -HISTORY_RECORDER = None -logger = logging.getLogger(__name__) - - -class BaseHistoryHandler: - def emit(self, event_type, payload, source): - raise NotImplementedError('emit()') - - -class HistoryRecorder: - def __init__(self): - self._enabled = False - self._handlers = [] - - def enable(self): - self._enabled = True - - def disable(self): - self._enabled = False - - def add_handler(self, handler): - self._handlers.append(handler) - - def record(self, event_type, payload, source='BOTOCORE'): - if self._enabled and self._handlers: - for handler in self._handlers: - try: - handler.emit(event_type, payload, source) - except Exception: - # Never let the process die because we had a failure in - # a record collection handler. - logger.debug( - "Exception raised in %s.", handler, exc_info=True - ) - - -def get_global_history_recorder(): - global HISTORY_RECORDER - if HISTORY_RECORDER is None: - HISTORY_RECORDER = HistoryRecorder() - return HISTORY_RECORDER diff --git a/venv/Lib/site-packages/botocore/hooks.py b/venv/Lib/site-packages/botocore/hooks.py deleted file mode 100644 index c672b1c..0000000 --- a/venv/Lib/site-packages/botocore/hooks.py +++ /dev/null @@ -1,660 +0,0 @@ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import copy -import logging -from collections import deque, namedtuple - -from botocore.compat import accepts_kwargs -from botocore.utils import EVENT_ALIASES - -logger = logging.getLogger(__name__) - - -_NodeList = namedtuple('NodeList', ['first', 'middle', 'last']) -_FIRST = 0 -_MIDDLE = 1 -_LAST = 2 - - -class NodeList(_NodeList): - def __copy__(self): - first_copy = copy.copy(self.first) - middle_copy = copy.copy(self.middle) - last_copy = copy.copy(self.last) - copied = NodeList(first_copy, middle_copy, last_copy) - return copied - - -def first_non_none_response(responses, default=None): - """Find first non None response in a list of tuples. - - This function can be used to find the first non None response from - handlers connected to an event. This is useful if you are interested - in the returned responses from event handlers. Example usage:: - - print(first_non_none_response([(func1, None), (func2, 'foo'), - (func3, 'bar')])) - # This will print 'foo' - - :type responses: list of tuples - :param responses: The responses from the ``EventHooks.emit`` method. - This is a list of tuples, and each tuple is - (handler, handler_response). - - :param default: If no non-None responses are found, then this default - value will be returned. - - :return: The first non-None response in the list of tuples. - - """ - for response in responses: - if response[1] is not None: - return response[1] - return default - - -class BaseEventHooks: - def emit(self, event_name, **kwargs): - """Call all handlers subscribed to an event. - - :type event_name: str - :param event_name: The name of the event to emit. - - :type **kwargs: dict - :param **kwargs: Arbitrary kwargs to pass through to the - subscribed handlers. The ``event_name`` will be injected - into the kwargs so it's not necessary to add this to **kwargs. - - :rtype: list of tuples - :return: A list of ``(handler_func, handler_func_return_value)`` - - """ - return [] - - def register( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - """Register an event handler for a given event. - - If a ``unique_id`` is given, the handler will not be registered - if a handler with the ``unique_id`` has already been registered. - - Handlers are called in the order they have been registered. - Note handlers can also be registered with ``register_first()`` - and ``register_last()``. All handlers registered with - ``register_first()`` are called before handlers registered - with ``register()`` which are called before handlers registered - with ``register_last()``. - - """ - self._verify_and_register( - event_name, - handler, - unique_id, - register_method=self._register, - unique_id_uses_count=unique_id_uses_count, - ) - - def register_first( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - """Register an event handler to be called first for an event. - - All event handlers registered with ``register_first()`` will - be called before handlers registered with ``register()`` and - ``register_last()``. - - """ - self._verify_and_register( - event_name, - handler, - unique_id, - register_method=self._register_first, - unique_id_uses_count=unique_id_uses_count, - ) - - def register_last( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - """Register an event handler to be called last for an event. - - All event handlers registered with ``register_last()`` will be called - after handlers registered with ``register_first()`` and ``register()``. - - """ - self._verify_and_register( - event_name, - handler, - unique_id, - register_method=self._register_last, - unique_id_uses_count=unique_id_uses_count, - ) - - def _verify_and_register( - self, - event_name, - handler, - unique_id, - register_method, - unique_id_uses_count, - ): - self._verify_is_callable(handler) - self._verify_accept_kwargs(handler) - register_method(event_name, handler, unique_id, unique_id_uses_count) - - def unregister( - self, - event_name, - handler=None, - unique_id=None, - unique_id_uses_count=False, - ): - """Unregister an event handler for a given event. - - If no ``unique_id`` was given during registration, then the - first instance of the event handler is removed (if the event - handler has been registered multiple times). - - """ - pass - - def _verify_is_callable(self, func): - if not callable(func): - raise ValueError(f"Event handler {func} must be callable.") - - def _verify_accept_kwargs(self, func): - """Verifies a callable accepts kwargs - - :type func: callable - :param func: A callable object. - - :returns: True, if ``func`` accepts kwargs, otherwise False. - - """ - try: - if not accepts_kwargs(func): - raise ValueError( - f"Event handler {func} must accept keyword " - f"arguments (**kwargs)" - ) - except TypeError: - return False - - -class HierarchicalEmitter(BaseEventHooks): - def __init__(self): - # We keep a reference to the handlers for quick - # read only access (we never modify self._handlers). - # A cache of event name to handler list. - self._lookup_cache = {} - self._handlers = _PrefixTrie() - # This is used to ensure that unique_id's are only - # registered once. - self._unique_id_handlers = {} - - def _emit(self, event_name, kwargs, stop_on_response=False): - """ - Emit an event with optional keyword arguments. - - :type event_name: string - :param event_name: Name of the event - :type kwargs: dict - :param kwargs: Arguments to be passed to the handler functions. - :type stop_on_response: boolean - :param stop_on_response: Whether to stop on the first non-None - response. If False, then all handlers - will be called. This is especially useful - to handlers which mutate data and then - want to stop propagation of the event. - :rtype: list - :return: List of (handler, response) tuples from all processed - handlers. - """ - responses = [] - # Invoke the event handlers from most specific - # to least specific, each time stripping off a dot. - handlers_to_call = self._lookup_cache.get(event_name) - if handlers_to_call is None: - handlers_to_call = self._handlers.prefix_search(event_name) - self._lookup_cache[event_name] = handlers_to_call - elif not handlers_to_call: - # Short circuit and return an empty response is we have - # no handlers to call. This is the common case where - # for the majority of signals, nothing is listening. - return [] - kwargs['event_name'] = event_name - responses = [] - for handler in handlers_to_call: - logger.debug('Event %s: calling handler %s', event_name, handler) - response = handler(**kwargs) - responses.append((handler, response)) - if stop_on_response and response is not None: - return responses - return responses - - def emit(self, event_name, **kwargs): - """ - Emit an event by name with arguments passed as keyword args. - - >>> responses = emitter.emit( - ... 'my-event.service.operation', arg1='one', arg2='two') - - :rtype: list - :return: List of (handler, response) tuples from all processed - handlers. - """ - return self._emit(event_name, kwargs) - - def emit_until_response(self, event_name, **kwargs): - """ - Emit an event by name with arguments passed as keyword args, - until the first non-``None`` response is received. This - method prevents subsequent handlers from being invoked. - - >>> handler, response = emitter.emit_until_response( - 'my-event.service.operation', arg1='one', arg2='two') - - :rtype: tuple - :return: The first (handler, response) tuple where the response - is not ``None``, otherwise (``None``, ``None``). - """ - responses = self._emit(event_name, kwargs, stop_on_response=True) - if responses: - return responses[-1] - else: - return (None, None) - - def _register( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - self._register_section( - event_name, - handler, - unique_id, - unique_id_uses_count, - section=_MIDDLE, - ) - - def _register_first( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - self._register_section( - event_name, - handler, - unique_id, - unique_id_uses_count, - section=_FIRST, - ) - - def _register_last( - self, event_name, handler, unique_id, unique_id_uses_count=False - ): - self._register_section( - event_name, handler, unique_id, unique_id_uses_count, section=_LAST - ) - - def _register_section( - self, event_name, handler, unique_id, unique_id_uses_count, section - ): - if unique_id is not None: - if unique_id in self._unique_id_handlers: - # We've already registered a handler using this unique_id - # so we don't need to register it again. - count = self._unique_id_handlers[unique_id].get('count', None) - if unique_id_uses_count: - if not count: - raise ValueError( - f"Initial registration of unique id {unique_id} was " - "specified to use a counter. Subsequent register " - "calls to unique id must specify use of a counter " - "as well." - ) - else: - self._unique_id_handlers[unique_id]['count'] += 1 - else: - if count: - raise ValueError( - f"Initial registration of unique id {unique_id} was " - "specified to not use a counter. Subsequent " - "register calls to unique id must specify not to " - "use a counter as well." - ) - return - else: - # Note that the trie knows nothing about the unique - # id. We track uniqueness in this class via the - # _unique_id_handlers. - self._handlers.append_item( - event_name, handler, section=section - ) - unique_id_handler_item = {'handler': handler} - if unique_id_uses_count: - unique_id_handler_item['count'] = 1 - self._unique_id_handlers[unique_id] = unique_id_handler_item - else: - self._handlers.append_item(event_name, handler, section=section) - # Super simple caching strategy for now, if we change the registrations - # clear the cache. This has the opportunity for smarter invalidations. - self._lookup_cache = {} - - def unregister( - self, - event_name, - handler=None, - unique_id=None, - unique_id_uses_count=False, - ): - if unique_id is not None: - try: - count = self._unique_id_handlers[unique_id].get('count', None) - except KeyError: - # There's no handler matching that unique_id so we have - # nothing to unregister. - return - if unique_id_uses_count: - if count is None: - raise ValueError( - f"Initial registration of unique id {unique_id} was specified to " - "use a counter. Subsequent unregister calls to unique " - "id must specify use of a counter as well." - ) - elif count == 1: - handler = self._unique_id_handlers.pop(unique_id)[ - 'handler' - ] - else: - self._unique_id_handlers[unique_id]['count'] -= 1 - return - else: - if count: - raise ValueError( - f"Initial registration of unique id {unique_id} was specified " - "to not use a counter. Subsequent unregister calls " - "to unique id must specify not to use a counter as " - "well." - ) - handler = self._unique_id_handlers.pop(unique_id)['handler'] - try: - self._handlers.remove_item(event_name, handler) - self._lookup_cache = {} - except ValueError: - pass - - def __copy__(self): - new_instance = self.__class__() - new_state = self.__dict__.copy() - new_state['_handlers'] = copy.copy(self._handlers) - new_state['_unique_id_handlers'] = copy.copy(self._unique_id_handlers) - new_instance.__dict__ = new_state - return new_instance - - -class EventAliaser(BaseEventHooks): - def __init__(self, event_emitter, event_aliases=None): - self._event_aliases = event_aliases - if event_aliases is None: - self._event_aliases = EVENT_ALIASES - self._alias_name_cache = {} - self._emitter = event_emitter - - def emit(self, event_name, **kwargs): - aliased_event_name = self._alias_event_name(event_name) - return self._emitter.emit(aliased_event_name, **kwargs) - - def emit_until_response(self, event_name, **kwargs): - aliased_event_name = self._alias_event_name(event_name) - return self._emitter.emit_until_response(aliased_event_name, **kwargs) - - def register( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - aliased_event_name = self._alias_event_name(event_name) - return self._emitter.register( - aliased_event_name, handler, unique_id, unique_id_uses_count - ) - - def register_first( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - aliased_event_name = self._alias_event_name(event_name) - return self._emitter.register_first( - aliased_event_name, handler, unique_id, unique_id_uses_count - ) - - def register_last( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - aliased_event_name = self._alias_event_name(event_name) - return self._emitter.register_last( - aliased_event_name, handler, unique_id, unique_id_uses_count - ) - - def unregister( - self, - event_name, - handler=None, - unique_id=None, - unique_id_uses_count=False, - ): - aliased_event_name = self._alias_event_name(event_name) - return self._emitter.unregister( - aliased_event_name, handler, unique_id, unique_id_uses_count - ) - - def _alias_event_name(self, event_name): - if event_name in self._alias_name_cache: - return self._alias_name_cache[event_name] - - for old_part, new_part in self._event_aliases.items(): - # We can't simply do a string replace for everything, otherwise we - # might end up translating substrings that we never intended to - # translate. When there aren't any dots in the old event name - # part, then we can quickly replace the item in the list if it's - # there. - event_parts = event_name.split('.') - if '.' not in old_part: - try: - # Theoretically a given event name could have the same part - # repeated, but in practice this doesn't happen - event_parts[event_parts.index(old_part)] = new_part - except ValueError: - continue - - # If there's dots in the name, it gets more complicated. Now we - # have to replace multiple sections of the original event. - elif old_part in event_name: - old_parts = old_part.split('.') - self._replace_subsection(event_parts, old_parts, new_part) - else: - continue - - new_name = '.'.join(event_parts) - logger.debug( - "Changing event name from %s to %s", event_name, new_name - ) - self._alias_name_cache[event_name] = new_name - return new_name - - self._alias_name_cache[event_name] = event_name - return event_name - - def _replace_subsection(self, sections, old_parts, new_part): - for i in range(len(sections)): - if ( - sections[i] == old_parts[0] - and sections[i : i + len(old_parts)] == old_parts - ): - sections[i : i + len(old_parts)] = [new_part] - return - - def __copy__(self): - return self.__class__( - copy.copy(self._emitter), copy.copy(self._event_aliases) - ) - - -class _PrefixTrie: - """Specialized prefix trie that handles wildcards. - - The prefixes in this case are based on dot separated - names so 'foo.bar.baz' is:: - - foo -> bar -> baz - - Wildcard support just means that having a key such as 'foo.bar.*.baz' will - be matched with a call to ``get_items(key='foo.bar.ANYTHING.baz')``. - - You can think of this prefix trie as the equivalent as defaultdict(list), - except that it can do prefix searches: - - foo.bar.baz -> A - foo.bar -> B - foo -> C - - Calling ``get_items('foo.bar.baz')`` will return [A + B + C], from - most specific to least specific. - - """ - - def __init__(self): - # Each dictionary can be though of as a node, where a node - # has values associated with the node, and children is a link - # to more nodes. So 'foo.bar' would have a 'foo' node with - # a 'bar' node as a child of foo. - # {'foo': {'children': {'bar': {...}}}}. - self._root = {'chunk': None, 'children': {}, 'values': None} - - def append_item(self, key, value, section=_MIDDLE): - """Add an item to a key. - - If a value is already associated with that key, the new - value is appended to the list for the key. - """ - key_parts = key.split('.') - current = self._root - for part in key_parts: - if part not in current['children']: - new_child = {'chunk': part, 'values': None, 'children': {}} - current['children'][part] = new_child - current = new_child - else: - current = current['children'][part] - if current['values'] is None: - current['values'] = NodeList([], [], []) - current['values'][section].append(value) - - def prefix_search(self, key): - """Collect all items that are prefixes of key. - - Prefix in this case are delineated by '.' characters so - 'foo.bar.baz' is a 3 chunk sequence of 3 "prefixes" ( - "foo", "bar", and "baz"). - - """ - collected = deque() - key_parts = key.split('.') - current = self._root - self._get_items(current, key_parts, collected, 0) - return collected - - def _get_items(self, starting_node, key_parts, collected, starting_index): - stack = [(starting_node, starting_index)] - key_parts_len = len(key_parts) - # Traverse down the nodes, where at each level we add the - # next part from key_parts as well as the wildcard element '*'. - # This means for each node we see we potentially add two more - # elements to our stack. - while stack: - current_node, index = stack.pop() - if current_node['values']: - # We're using extendleft because we want - # the values associated with the node furthest - # from the root to come before nodes closer - # to the root. extendleft() also adds its items - # in right-left order so .extendleft([1, 2, 3]) - # will result in final_list = [3, 2, 1], which is - # why we reverse the lists. - node_list = current_node['values'] - complete_order = ( - node_list.first + node_list.middle + node_list.last - ) - collected.extendleft(reversed(complete_order)) - if not index == key_parts_len: - children = current_node['children'] - directs = children.get(key_parts[index]) - wildcard = children.get('*') - next_index = index + 1 - if wildcard is not None: - stack.append((wildcard, next_index)) - if directs is not None: - stack.append((directs, next_index)) - - def remove_item(self, key, value): - """Remove an item associated with a key. - - If the value is not associated with the key a ``ValueError`` - will be raised. If the key does not exist in the trie, a - ``ValueError`` will be raised. - - """ - key_parts = key.split('.') - current = self._root - self._remove_item(current, key_parts, value, index=0) - - def _remove_item(self, current_node, key_parts, value, index): - if current_node is None: - return - elif index < len(key_parts): - next_node = current_node['children'].get(key_parts[index]) - if next_node is not None: - self._remove_item(next_node, key_parts, value, index + 1) - if index == len(key_parts) - 1: - node_list = next_node['values'] - if value in node_list.first: - node_list.first.remove(value) - elif value in node_list.middle: - node_list.middle.remove(value) - elif value in node_list.last: - node_list.last.remove(value) - if not next_node['children'] and not next_node['values']: - # Then this is a leaf node with no values so - # we can just delete this link from the parent node. - # This makes subsequent search faster in the case - # where a key does not exist. - del current_node['children'][key_parts[index]] - else: - raise ValueError(f"key is not in trie: {'.'.join(key_parts)}") - - def __copy__(self): - # The fact that we're using a nested dict under the covers - # is an implementation detail, and the user shouldn't have - # to know that they'd normally need a deepcopy so we expose - # __copy__ instead of __deepcopy__. - new_copy = self.__class__() - copied_attrs = self._recursive_copy(self.__dict__) - new_copy.__dict__ = copied_attrs - return new_copy - - def _recursive_copy(self, node): - # We can't use copy.deepcopy because we actually only want to copy - # the structure of the trie, not the handlers themselves. - # Each node has a chunk, children, and values. - copied_node = {} - for key, value in node.items(): - if isinstance(value, NodeList): - copied_node[key] = copy.copy(value) - elif isinstance(value, dict): - copied_node[key] = self._recursive_copy(value) - else: - copied_node[key] = value - return copied_node diff --git a/venv/Lib/site-packages/botocore/httpchecksum.py b/venv/Lib/site-packages/botocore/httpchecksum.py deleted file mode 100644 index c4a6876..0000000 --- a/venv/Lib/site-packages/botocore/httpchecksum.py +++ /dev/null @@ -1,574 +0,0 @@ -# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -"""The interfaces in this module are not intended for public use. - -This module defines interfaces for applying checksums to HTTP requests within -the context of botocore. This involves both resolving the checksum to be used -based on client configuration and environment, as well as application of the -checksum to the request. -""" - -import base64 -import io -import logging -from binascii import crc32 -from hashlib import sha1, sha256 - -from botocore.compat import HAS_CRT, has_minimum_crt_version, urlparse -from botocore.exceptions import ( - AwsChunkedWrapperError, - FlexibleChecksumError, - MissingDependencyException, -) -from botocore.model import StructureShape -from botocore.response import StreamingBody -from botocore.useragent import register_feature_id -from botocore.utils import ( - conditionally_calculate_md5, - determine_content_length, - has_checksum_header, -) - -if HAS_CRT: - from awscrt import checksums as crt_checksums -else: - crt_checksums = None - -logger = logging.getLogger(__name__) - -DEFAULT_CHECKSUM_ALGORITHM = "CRC32" - - -class BaseChecksum: - _CHUNK_SIZE = 1024 * 1024 - - def update(self, chunk): - pass - - def digest(self): - pass - - def b64digest(self): - bs = self.digest() - return base64.b64encode(bs).decode("ascii") - - def _handle_fileobj(self, fileobj): - start_position = fileobj.tell() - for chunk in iter(lambda: fileobj.read(self._CHUNK_SIZE), b""): - self.update(chunk) - fileobj.seek(start_position) - - def handle(self, body): - if isinstance(body, (bytes, bytearray)): - self.update(body) - else: - self._handle_fileobj(body) - return self.b64digest() - - -class Crc32Checksum(BaseChecksum): - def __init__(self): - self._int_crc32 = 0 - - def update(self, chunk): - self._int_crc32 = crc32(chunk, self._int_crc32) & 0xFFFFFFFF - - def digest(self): - return self._int_crc32.to_bytes(4, byteorder="big") - - -class CrtCrc32Checksum(BaseChecksum): - # Note: This class is only used if the CRT is available - def __init__(self): - self._int_crc32 = 0 - - def update(self, chunk): - new_checksum = crt_checksums.crc32(chunk, self._int_crc32) - self._int_crc32 = new_checksum & 0xFFFFFFFF - - def digest(self): - return self._int_crc32.to_bytes(4, byteorder="big") - - -class CrtCrc32cChecksum(BaseChecksum): - # Note: This class is only used if the CRT is available - def __init__(self): - self._int_crc32c = 0 - - def update(self, chunk): - new_checksum = crt_checksums.crc32c(chunk, self._int_crc32c) - self._int_crc32c = new_checksum & 0xFFFFFFFF - - def digest(self): - return self._int_crc32c.to_bytes(4, byteorder="big") - - -class CrtCrc64NvmeChecksum(BaseChecksum): - # Note: This class is only used if the CRT is available - def __init__(self): - self._int_crc64nvme = 0 - - def update(self, chunk): - new_checksum = crt_checksums.crc64nvme(chunk, self._int_crc64nvme) - self._int_crc64nvme = new_checksum & 0xFFFFFFFFFFFFFFFF - - def digest(self): - return self._int_crc64nvme.to_bytes(8, byteorder="big") - - -class Sha1Checksum(BaseChecksum): - def __init__(self): - self._checksum = sha1() - - def update(self, chunk): - self._checksum.update(chunk) - - def digest(self): - return self._checksum.digest() - - -class Sha256Checksum(BaseChecksum): - def __init__(self): - self._checksum = sha256() - - def update(self, chunk): - self._checksum.update(chunk) - - def digest(self): - return self._checksum.digest() - - -class AwsChunkedWrapper: - _DEFAULT_CHUNK_SIZE = 1024 * 1024 - - def __init__( - self, - raw, - checksum_cls=None, - checksum_name="x-amz-checksum", - chunk_size=None, - ): - self._raw = raw - self._checksum_name = checksum_name - self._checksum_cls = checksum_cls - self._reset() - - if chunk_size is None: - chunk_size = self._DEFAULT_CHUNK_SIZE - self._chunk_size = chunk_size - - def _reset(self): - self._remaining = b"" - self._complete = False - self._checksum = None - if self._checksum_cls: - self._checksum = self._checksum_cls() - - def seek(self, offset, whence=0): - if offset != 0 or whence != 0: - raise AwsChunkedWrapperError( - error_msg="Can only seek to start of stream" - ) - self._reset() - self._raw.seek(0) - - def read(self, size=None): - # Normalize "read all" size values to None - if size is not None and size <= 0: - size = None - - # If the underlying body is done and we have nothing left then - # end the stream - if self._complete and not self._remaining: - return b"" - - # While we're not done and want more bytes - want_more_bytes = size is None or size > len(self._remaining) - while not self._complete and want_more_bytes: - self._remaining += self._make_chunk() - want_more_bytes = size is None or size > len(self._remaining) - - # If size was None, we want to return everything - if size is None: - size = len(self._remaining) - - # Return a chunk up to the size asked for - to_return = self._remaining[:size] - self._remaining = self._remaining[size:] - return to_return - - def _make_chunk(self): - # NOTE: Chunk size is not deterministic as read could return less. This - # means we cannot know the content length of the encoded aws-chunked - # stream ahead of time without ensuring a consistent chunk size - raw_chunk = self._raw.read(self._chunk_size) - hex_len = hex(len(raw_chunk))[2:].encode("ascii") - self._complete = not raw_chunk - - if self._checksum: - self._checksum.update(raw_chunk) - - if self._checksum and self._complete: - name = self._checksum_name.encode("ascii") - checksum = self._checksum.b64digest().encode("ascii") - return b"0\r\n%s:%s\r\n\r\n" % (name, checksum) - - return b"%s\r\n%s\r\n" % (hex_len, raw_chunk) - - def __iter__(self): - while not self._complete: - yield self._make_chunk() - - -class StreamingChecksumBody(StreamingBody): - def __init__(self, raw_stream, content_length, checksum, expected): - super().__init__(raw_stream, content_length) - self._checksum = checksum - self._expected = expected - - def read(self, amt=None): - chunk = super().read(amt=amt) - self._checksum.update(chunk) - if amt is None or (not chunk and amt > 0): - self._validate_checksum() - return chunk - - def readinto(self, b): - amount_read = super().readinto(b) - if amount_read == len(b): - view = b - else: - view = memoryview(b)[:amount_read] - self._checksum.update(view) - if amount_read == 0 and len(b) > 0: - self._validate_checksum() - return amount_read - - def _validate_checksum(self): - if self._checksum.digest() != base64.b64decode(self._expected): - error_msg = ( - f"Expected checksum {self._expected} did not match calculated " - f"checksum: {self._checksum.b64digest()}" - ) - raise FlexibleChecksumError(error_msg=error_msg) - - -def resolve_checksum_context(request, operation_model, params): - resolve_request_checksum_algorithm(request, operation_model, params) - resolve_response_checksum_algorithms(request, operation_model, params) - - -def resolve_request_checksum_algorithm( - request, - operation_model, - params, - supported_algorithms=None, -): - # If the header is already set by the customer, skip calculation - if has_checksum_header(request): - return - - checksum_context = request["context"].get("checksum", {}) - request_checksum_calculation = request["context"][ - "client_config" - ].request_checksum_calculation - http_checksum = operation_model.http_checksum - request_checksum_required = ( - operation_model.http_checksum_required - or http_checksum.get("requestChecksumRequired") - ) - algorithm_member = http_checksum.get("requestAlgorithmMember") - if algorithm_member and algorithm_member in params: - # If the client has opted into using flexible checksums and the - # request supports it, use that instead of checksum required - if supported_algorithms is None: - supported_algorithms = _SUPPORTED_CHECKSUM_ALGORITHMS - - algorithm_name = params[algorithm_member].lower() - if algorithm_name not in supported_algorithms: - if not HAS_CRT and algorithm_name in _CRT_CHECKSUM_ALGORITHMS: - raise MissingDependencyException( - msg=( - f"Using {algorithm_name.upper()} requires an " - "additional dependency. You will need to pip install " - "botocore[crt] before proceeding." - ) - ) - raise FlexibleChecksumError( - error_msg=f"Unsupported checksum algorithm: {algorithm_name}" - ) - elif request_checksum_required or ( - algorithm_member and request_checksum_calculation == "when_supported" - ): - # Don't use a default checksum for presigned requests. - if request["context"].get("is_presign_request"): - return - algorithm_name = DEFAULT_CHECKSUM_ALGORITHM.lower() - algorithm_member_header = _get_request_algorithm_member_header( - operation_model, request, algorithm_member - ) - if algorithm_member_header is not None: - checksum_context["request_algorithm_header"] = { - "name": algorithm_member_header, - "value": DEFAULT_CHECKSUM_ALGORITHM, - } - else: - return - - location_type = "header" - if ( - operation_model.has_streaming_input - and urlparse(request["url"]).scheme == "https" - ): - if request["context"]["client_config"].signature_version != 's3': - # Operations with streaming input must support trailers. - # We only support unsigned trailer checksums currently. As this - # disables payload signing we'll only use trailers over TLS. - location_type = "trailer" - - algorithm = { - "algorithm": algorithm_name, - "in": location_type, - "name": f"x-amz-checksum-{algorithm_name}", - } - - checksum_context["request_algorithm"] = algorithm - request["context"]["checksum"] = checksum_context - - -def _get_request_algorithm_member_header( - operation_model, request, algorithm_member -): - """Get the name of the header targeted by the "requestAlgorithmMember".""" - operation_input_shape = operation_model.input_shape - if not isinstance(operation_input_shape, StructureShape): - return - - algorithm_member_shape = operation_input_shape.members.get( - algorithm_member - ) - - if algorithm_member_shape: - return algorithm_member_shape.serialization.get("name") - - -def apply_request_checksum(request): - checksum_context = request.get("context", {}).get("checksum", {}) - algorithm = checksum_context.get("request_algorithm") - - if not algorithm: - return - - if algorithm == "conditional-md5": - # Special case to handle the http checksum required trait - conditionally_calculate_md5(request) - elif algorithm["in"] == "header": - _apply_request_header_checksum(request) - elif algorithm["in"] == "trailer": - _apply_request_trailer_checksum(request) - else: - raise FlexibleChecksumError( - error_msg="Unknown checksum variant: {}".format(algorithm["in"]) - ) - if "request_algorithm_header" in checksum_context: - request_algorithm_header = checksum_context["request_algorithm_header"] - request["headers"][request_algorithm_header["name"]] = ( - request_algorithm_header["value"] - ) - - -def _apply_request_header_checksum(request): - checksum_context = request.get("context", {}).get("checksum", {}) - algorithm = checksum_context.get("request_algorithm") - location_name = algorithm["name"] - if location_name in request["headers"]: - # If the header is already set by the customer, skip calculation - return - checksum_cls = _CHECKSUM_CLS.get(algorithm["algorithm"]) - digest = checksum_cls().handle(request["body"]) - request["headers"][location_name] = digest - _register_checksum_algorithm_feature_id(algorithm) - - -def _apply_request_trailer_checksum(request): - checksum_context = request.get("context", {}).get("checksum", {}) - algorithm = checksum_context.get("request_algorithm") - location_name = algorithm["name"] - checksum_cls = _CHECKSUM_CLS.get(algorithm["algorithm"]) - - headers = request["headers"] - body = request["body"] - - if location_name in headers: - # If the header is already set by the customer, skip calculation - return - - headers["Transfer-Encoding"] = "chunked" - if "Content-Encoding" in headers: - # We need to preserve the existing content encoding and add - # aws-chunked as a new content encoding. - headers["Content-Encoding"] += ",aws-chunked" - else: - headers["Content-Encoding"] = "aws-chunked" - headers["X-Amz-Trailer"] = location_name - _register_checksum_algorithm_feature_id(algorithm) - - content_length = determine_content_length(body) - if content_length is not None: - # Send the decoded content length if we can determine it. Some - # services such as S3 may require the decoded content length - headers["X-Amz-Decoded-Content-Length"] = str(content_length) - - if "Content-Length" in headers: - del headers["Content-Length"] - logger.debug( - "Removing the Content-Length header since 'chunked' is specified for Transfer-Encoding." - ) - - if isinstance(body, (bytes, bytearray)): - body = io.BytesIO(body) - - request["body"] = AwsChunkedWrapper( - body, - checksum_cls=checksum_cls, - checksum_name=location_name, - ) - - -def _register_checksum_algorithm_feature_id(algorithm): - checksum_algorithm_name = algorithm["algorithm"].upper() - if checksum_algorithm_name == "CRC64NVME": - checksum_algorithm_name = "CRC64" - checksum_algorithm_name_feature_id = ( - f"FLEXIBLE_CHECKSUMS_REQ_{checksum_algorithm_name}" - ) - register_feature_id(checksum_algorithm_name_feature_id) - - -def resolve_response_checksum_algorithms( - request, operation_model, params, supported_algorithms=None -): - http_checksum = operation_model.http_checksum - mode_member = http_checksum.get("requestValidationModeMember") - if mode_member and mode_member in params: - if supported_algorithms is None: - supported_algorithms = _SUPPORTED_CHECKSUM_ALGORITHMS - response_algorithms = { - a.lower() for a in http_checksum.get("responseAlgorithms", []) - } - - usable_algorithms = [] - for algorithm in _ALGORITHMS_PRIORITY_LIST: - if algorithm not in response_algorithms: - continue - if algorithm in supported_algorithms: - usable_algorithms.append(algorithm) - - checksum_context = request["context"].get("checksum", {}) - checksum_context["response_algorithms"] = usable_algorithms - request["context"]["checksum"] = checksum_context - - -def handle_checksum_body(http_response, response, context, operation_model): - headers = response["headers"] - checksum_context = context.get("checksum", {}) - algorithms = checksum_context.get("response_algorithms") - - if not algorithms: - return - - for algorithm in algorithms: - header_name = f"x-amz-checksum-{algorithm}" - # If the header is not found, check the next algorithm - if header_name not in headers: - continue - - # If a - is in the checksum this is not valid Base64. S3 returns - # checksums that include a -# suffix to indicate a checksum derived - # from the hash of all part checksums. We cannot wrap this response - if "-" in headers[header_name]: - continue - - if operation_model.has_streaming_output: - response["body"] = _handle_streaming_response( - http_response, response, algorithm - ) - else: - response["body"] = _handle_bytes_response( - http_response, response, algorithm - ) - - # Expose metadata that the checksum check actually occurred - checksum_context = response["context"].get("checksum", {}) - checksum_context["response_algorithm"] = algorithm - response["context"]["checksum"] = checksum_context - return - - logger.debug( - 'Skipping checksum validation. Response did not contain one of the following algorithms: %s.', - algorithms, - ) - - -def _handle_streaming_response(http_response, response, algorithm): - checksum_cls = _CHECKSUM_CLS.get(algorithm) - header_name = f"x-amz-checksum-{algorithm}" - return StreamingChecksumBody( - http_response.raw, - response["headers"].get("content-length"), - checksum_cls(), - response["headers"][header_name], - ) - - -def _handle_bytes_response(http_response, response, algorithm): - body = http_response.content - header_name = f"x-amz-checksum-{algorithm}" - checksum_cls = _CHECKSUM_CLS.get(algorithm) - checksum = checksum_cls() - checksum.update(body) - expected = response["headers"][header_name] - if checksum.digest() != base64.b64decode(expected): - error_msg = ( - f"Expected checksum {expected} did not match calculated " - f"checksum: {checksum.b64digest()}" - ) - raise FlexibleChecksumError(error_msg=error_msg) - return body - - -_CHECKSUM_CLS = { - "crc32": Crc32Checksum, - "sha1": Sha1Checksum, - "sha256": Sha256Checksum, -} -_CRT_CHECKSUM_ALGORITHMS = ["crc32", "crc32c", "crc64nvme"] -if HAS_CRT: - # Use CRT checksum implementations if available - _CRT_CHECKSUM_CLS = { - "crc32": CrtCrc32Checksum, - "crc32c": CrtCrc32cChecksum, - } - - if has_minimum_crt_version((0, 23, 4)): - # CRC64NVME support wasn't officially added until 0.23.4 - _CRT_CHECKSUM_CLS["crc64nvme"] = CrtCrc64NvmeChecksum - - _CHECKSUM_CLS.update(_CRT_CHECKSUM_CLS) - # Validate this list isn't out of sync with _CRT_CHECKSUM_CLS keys - assert all( - name in _CRT_CHECKSUM_ALGORITHMS for name in _CRT_CHECKSUM_CLS.keys() - ) -_SUPPORTED_CHECKSUM_ALGORITHMS = list(_CHECKSUM_CLS.keys()) -_ALGORITHMS_PRIORITY_LIST = ['crc64nvme', 'crc32c', 'crc32', 'sha1', 'sha256'] diff --git a/venv/Lib/site-packages/botocore/httpsession.py b/venv/Lib/site-packages/botocore/httpsession.py deleted file mode 100644 index 6d55e0e..0000000 --- a/venv/Lib/site-packages/botocore/httpsession.py +++ /dev/null @@ -1,512 +0,0 @@ -import logging -import os -import os.path -import socket -import sys -import warnings -from base64 import b64encode -from concurrent.futures import CancelledError - -from urllib3 import PoolManager, Timeout, proxy_from_url -from urllib3.exceptions import ( - ConnectTimeoutError as URLLib3ConnectTimeoutError, -) -from urllib3.exceptions import ( - LocationParseError, - NewConnectionError, - ProtocolError, - ProxyError, -) -from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError -from urllib3.exceptions import SSLError as URLLib3SSLError -from urllib3.util.retry import Retry -from urllib3.util.ssl_ import ( - OP_NO_COMPRESSION, - PROTOCOL_TLS, - OP_NO_SSLv2, - OP_NO_SSLv3, - is_ipaddress, - ssl, -) -from urllib3.util.url import parse_url - -try: - from urllib3.util.ssl_ import OP_NO_TICKET, PROTOCOL_TLS_CLIENT -except ImportError: - # Fallback directly to ssl for version of urllib3 before 1.26. - # They are available in the standard library starting in Python 3.6. - from ssl import OP_NO_TICKET, PROTOCOL_TLS_CLIENT - -try: - # pyopenssl will be removed in urllib3 2.0, we'll fall back to ssl_ at that point. - # This can be removed once our urllib3 floor is raised to >= 2.0. - with warnings.catch_warnings(): - warnings.simplefilter("ignore", category=DeprecationWarning) - # Always import the original SSLContext, even if it has been patched - from urllib3.contrib.pyopenssl import ( - orig_util_SSLContext as SSLContext, - ) -except (AttributeError, ImportError): - from urllib3.util.ssl_ import SSLContext - -try: - from urllib3.util.ssl_ import DEFAULT_CIPHERS -except ImportError: - # Defer to system configuration starting with - # urllib3 2.0. This will choose the ciphers provided by - # Openssl 1.1.1+ or secure system defaults. - DEFAULT_CIPHERS = None - -import botocore.awsrequest -from botocore.compat import ( - IPV6_ADDRZ_RE, - ensure_bytes, - filter_ssl_warnings, - unquote, - urlparse, -) -from botocore.exceptions import ( - ConnectionClosedError, - ConnectTimeoutError, - EndpointConnectionError, - HTTPClientError, - InvalidProxiesConfigError, - ProxyConnectionError, - ReadTimeoutError, - SSLError, -) - -filter_ssl_warnings() -logger = logging.getLogger(__name__) -DEFAULT_TIMEOUT = 60 -MAX_POOL_CONNECTIONS = 10 -DEFAULT_CA_BUNDLE = os.path.join(os.path.dirname(__file__), 'cacert.pem') - -try: - from certifi import where -except ImportError: - - def where(): - return DEFAULT_CA_BUNDLE - - -def get_cert_path(verify): - if verify is not True: - return verify - - cert_path = where() - logger.debug("Certificate path: %s", cert_path) - - return cert_path - - -def create_urllib3_context( - ssl_version=None, cert_reqs=None, options=None, ciphers=None -): - """This function is a vendored version of the same function in urllib3 - - We vendor this function to ensure that the SSL contexts we construct - always use the std lib SSLContext instead of pyopenssl. - """ - # PROTOCOL_TLS is deprecated in Python 3.10 - if not ssl_version or ssl_version == PROTOCOL_TLS: - ssl_version = PROTOCOL_TLS_CLIENT - - context = SSLContext(ssl_version) - - if ciphers: - context.set_ciphers(ciphers) - elif DEFAULT_CIPHERS: - context.set_ciphers(DEFAULT_CIPHERS) - - # Setting the default here, as we may have no ssl module on import - cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs - - if options is None: - options = 0 - # SSLv2 is easily broken and is considered harmful and dangerous - options |= OP_NO_SSLv2 - # SSLv3 has several problems and is now dangerous - options |= OP_NO_SSLv3 - # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ - # (issue urllib3#309) - options |= OP_NO_COMPRESSION - # TLSv1.2 only. Unless set explicitly, do not request tickets. - # This may save some bandwidth on wire, and although the ticket is encrypted, - # there is a risk associated with it being on wire, - # if the server is not rotating its ticketing keys properly. - options |= OP_NO_TICKET - - context.options |= options - - # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is - # necessary for conditional client cert authentication with TLS 1.3. - # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older - # versions of Python. We only enable on Python 3.7.4+ or if certificate - # verification is enabled to work around Python issue #37428 - # See: https://bugs.python.org/issue37428 - if ( - cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4) - ) and getattr(context, "post_handshake_auth", None) is not None: - context.post_handshake_auth = True - - def disable_check_hostname(): - if ( - getattr(context, "check_hostname", None) is not None - ): # Platform-specific: Python 3.2 - # We do our own verification, including fingerprints and alternative - # hostnames. So disable it here - context.check_hostname = False - - # The order of the below lines setting verify_mode and check_hostname - # matter due to safe-guards SSLContext has to prevent an SSLContext with - # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more - # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used - # or not so we don't know the initial state of the freshly created SSLContext. - if cert_reqs == ssl.CERT_REQUIRED: - context.verify_mode = cert_reqs - disable_check_hostname() - else: - disable_check_hostname() - context.verify_mode = cert_reqs - - # Enable logging of TLS session keys via defacto standard environment variable - # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. - if hasattr(context, "keylog_filename"): - sslkeylogfile = os.environ.get("SSLKEYLOGFILE") - if sslkeylogfile and not sys.flags.ignore_environment: - context.keylog_filename = sslkeylogfile - - return context - - -def ensure_boolean(val): - """Ensures a boolean value if a string or boolean is provided - - For strings, the value for True/False is case insensitive - """ - if isinstance(val, bool): - return val - else: - return val.lower() == 'true' - - -def mask_proxy_url(proxy_url): - """ - Mask proxy url credentials. - - :type proxy_url: str - :param proxy_url: The proxy url, i.e. https://username:password@proxy.com - - :return: Masked proxy url, i.e. https://***:***@proxy.com - """ - mask = '*' * 3 - parsed_url = urlparse(proxy_url) - if parsed_url.username: - proxy_url = proxy_url.replace(parsed_url.username, mask, 1) - if parsed_url.password: - proxy_url = proxy_url.replace(parsed_url.password, mask, 1) - return proxy_url - - -def _is_ipaddress(host): - """Wrap urllib3's is_ipaddress to support bracketed IPv6 addresses.""" - return is_ipaddress(host) or bool(IPV6_ADDRZ_RE.match(host)) - - -class ProxyConfiguration: - """Represents a proxy configuration dictionary and additional settings. - - This class represents a proxy configuration dictionary and provides utility - functions to retrieve well structured proxy urls and proxy headers from the - proxy configuration dictionary. - """ - - def __init__(self, proxies=None, proxies_settings=None): - if proxies is None: - proxies = {} - if proxies_settings is None: - proxies_settings = {} - - self._proxies = proxies - self._proxies_settings = proxies_settings - - def proxy_url_for(self, url): - """Retrieves the corresponding proxy url for a given url.""" - parsed_url = urlparse(url) - proxy = self._proxies.get(parsed_url.scheme) - if proxy: - proxy = self._fix_proxy_url(proxy) - return proxy - - def proxy_headers_for(self, proxy_url): - """Retrieves the corresponding proxy headers for a given proxy url.""" - headers = {} - username, password = self._get_auth_from_url(proxy_url) - if username and password: - basic_auth = self._construct_basic_auth(username, password) - headers['Proxy-Authorization'] = basic_auth - return headers - - @property - def settings(self): - return self._proxies_settings - - def _fix_proxy_url(self, proxy_url): - if proxy_url.startswith('http:') or proxy_url.startswith('https:'): - return proxy_url - elif proxy_url.startswith('//'): - return 'http:' + proxy_url - else: - return 'http://' + proxy_url - - def _construct_basic_auth(self, username, password): - auth_str = f'{username}:{password}' - encoded_str = b64encode(auth_str.encode('ascii')).strip().decode() - return f'Basic {encoded_str}' - - def _get_auth_from_url(self, url): - parsed_url = urlparse(url) - try: - return unquote(parsed_url.username), unquote(parsed_url.password) - except (AttributeError, TypeError): - return None, None - - -class URLLib3Session: - """A basic HTTP client that supports connection pooling and proxies. - - This class is inspired by requests.adapters.HTTPAdapter, but has been - boiled down to meet the use cases needed by botocore. For the most part - this classes matches the functionality of HTTPAdapter in requests v2.7.0 - (the same as our vendored version). The only major difference of note is - that we currently do not support sending chunked requests. While requests - v2.7.0 implemented this themselves, later version urllib3 support this - directly via a flag to urlopen so enabling it if needed should be trivial. - """ - - def __init__( - self, - verify=True, - proxies=None, - timeout=None, - max_pool_connections=MAX_POOL_CONNECTIONS, - socket_options=None, - client_cert=None, - proxies_config=None, - ): - self._verify = verify - self._proxy_config = ProxyConfiguration( - proxies=proxies, proxies_settings=proxies_config - ) - self._pool_classes_by_scheme = { - 'http': botocore.awsrequest.AWSHTTPConnectionPool, - 'https': botocore.awsrequest.AWSHTTPSConnectionPool, - } - if timeout is None: - timeout = DEFAULT_TIMEOUT - if not isinstance(timeout, (int, float)): - timeout = Timeout(connect=timeout[0], read=timeout[1]) - - self._cert_file = None - self._key_file = None - if isinstance(client_cert, str): - self._cert_file = client_cert - elif isinstance(client_cert, tuple): - self._cert_file, self._key_file = client_cert - - self._timeout = timeout - self._max_pool_connections = max_pool_connections - self._socket_options = socket_options - if socket_options is None: - self._socket_options = [] - self._proxy_managers = {} - self._manager = PoolManager(**self._get_pool_manager_kwargs()) - self._manager.pool_classes_by_scheme = self._pool_classes_by_scheme - - def _proxies_kwargs(self, **kwargs): - proxies_settings = self._proxy_config.settings - proxies_kwargs = { - 'use_forwarding_for_https': proxies_settings.get( - 'proxy_use_forwarding_for_https' - ), - **kwargs, - } - return {k: v for k, v in proxies_kwargs.items() if v is not None} - - def _get_pool_manager_kwargs(self, **extra_kwargs): - pool_manager_kwargs = { - 'timeout': self._timeout, - 'maxsize': self._max_pool_connections, - 'ssl_context': self._get_ssl_context(), - 'socket_options': self._socket_options, - 'cert_file': self._cert_file, - 'key_file': self._key_file, - } - pool_manager_kwargs.update(**extra_kwargs) - return pool_manager_kwargs - - def _get_ssl_context(self): - return create_urllib3_context() - - def _get_proxy_manager(self, proxy_url): - if proxy_url not in self._proxy_managers: - proxy_headers = self._proxy_config.proxy_headers_for(proxy_url) - proxy_ssl_context = self._setup_proxy_ssl_context(proxy_url) - proxy_manager_kwargs = self._get_pool_manager_kwargs( - proxy_headers=proxy_headers - ) - proxy_manager_kwargs.update( - self._proxies_kwargs(proxy_ssl_context=proxy_ssl_context) - ) - proxy_manager = proxy_from_url(proxy_url, **proxy_manager_kwargs) - proxy_manager.pool_classes_by_scheme = self._pool_classes_by_scheme - self._proxy_managers[proxy_url] = proxy_manager - - return self._proxy_managers[proxy_url] - - def _path_url(self, url): - parsed_url = urlparse(url) - path = parsed_url.path - if not path: - path = '/' - if parsed_url.query: - path = path + '?' + parsed_url.query - return path - - def _setup_ssl_cert(self, conn, url, verify): - if url.lower().startswith('https') and verify: - conn.cert_reqs = 'CERT_REQUIRED' - conn.ca_certs = get_cert_path(verify) - else: - conn.cert_reqs = 'CERT_NONE' - conn.ca_certs = None - - def _setup_proxy_ssl_context(self, proxy_url): - proxies_settings = self._proxy_config.settings - proxy_ca_bundle = proxies_settings.get('proxy_ca_bundle') - proxy_cert = proxies_settings.get('proxy_client_cert') - if proxy_ca_bundle is None and proxy_cert is None: - return None - - context = self._get_ssl_context() - try: - url = parse_url(proxy_url) - # urllib3 disables this by default but we need it for proper - # proxy tls negotiation when proxy_url is not an IP Address - if not _is_ipaddress(url.host): - context.check_hostname = True - if proxy_ca_bundle is not None: - context.load_verify_locations(cafile=proxy_ca_bundle) - - if isinstance(proxy_cert, tuple): - context.load_cert_chain(proxy_cert[0], keyfile=proxy_cert[1]) - elif isinstance(proxy_cert, str): - context.load_cert_chain(proxy_cert) - - return context - except (OSError, URLLib3SSLError, LocationParseError) as e: - raise InvalidProxiesConfigError(error=e) - - def _get_connection_manager(self, url, proxy_url=None): - if proxy_url: - manager = self._get_proxy_manager(proxy_url) - else: - manager = self._manager - return manager - - def _get_request_target(self, url, proxy_url): - has_proxy = proxy_url is not None - - if not has_proxy: - return self._path_url(url) - - # HTTP proxies expect the request_target to be the absolute url to know - # which host to establish a connection to. urllib3 also supports - # forwarding for HTTPS through the 'use_forwarding_for_https' parameter. - proxy_scheme = urlparse(proxy_url).scheme - using_https_forwarding_proxy = ( - proxy_scheme == 'https' - and self._proxies_kwargs().get('use_forwarding_for_https', False) - ) - - if using_https_forwarding_proxy or url.startswith('http:'): - return url - else: - return self._path_url(url) - - def _chunked(self, headers): - transfer_encoding = headers.get('Transfer-Encoding', b'') - transfer_encoding = ensure_bytes(transfer_encoding) - return transfer_encoding.lower() == b'chunked' - - def close(self): - self._manager.clear() - for manager in self._proxy_managers.values(): - manager.clear() - - def send(self, request): - try: - proxy_url = self._proxy_config.proxy_url_for(request.url) - manager = self._get_connection_manager(request.url, proxy_url) - conn = manager.connection_from_url(request.url) - self._setup_ssl_cert(conn, request.url, self._verify) - if ensure_boolean( - os.environ.get('BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER', '') - ): - # This is currently an "experimental" feature which provides - # no guarantees of backwards compatibility. It may be subject - # to change or removal in any patch version. Anyone opting in - # to this feature should strictly pin botocore. - host = urlparse(request.url).hostname - conn.proxy_headers['host'] = host - - request_target = self._get_request_target(request.url, proxy_url) - urllib_response = conn.urlopen( - method=request.method, - url=request_target, - body=request.body, - headers=request.headers, - retries=Retry(False), - assert_same_host=False, - preload_content=False, - decode_content=False, - chunked=self._chunked(request.headers), - ) - - http_response = botocore.awsrequest.AWSResponse( - request.url, - urllib_response.status, - urllib_response.headers, - urllib_response, - ) - - if not request.stream_output: - # Cause the raw stream to be exhausted immediately. We do it - # this way instead of using preload_content because - # preload_content will never buffer chunked responses - http_response.content - - return http_response - except URLLib3SSLError as e: - raise SSLError(endpoint_url=request.url, error=e) - except (NewConnectionError, socket.gaierror) as e: - raise EndpointConnectionError(endpoint_url=request.url, error=e) - except ProxyError as e: - raise ProxyConnectionError( - proxy_url=mask_proxy_url(proxy_url), error=e - ) - except URLLib3ConnectTimeoutError as e: - raise ConnectTimeoutError(endpoint_url=request.url, error=e) - except URLLib3ReadTimeoutError as e: - raise ReadTimeoutError(endpoint_url=request.url, error=e) - except ProtocolError as e: - raise ConnectionClosedError( - error=e, request=request, endpoint_url=request.url - ) - except CancelledError: - raise - except Exception as e: - message = 'Exception received when sending urllib3 HTTP request' - logger.debug(message, exc_info=True) - raise HTTPClientError(error=e) diff --git a/venv/Lib/site-packages/botocore/loaders.py b/venv/Lib/site-packages/botocore/loaders.py deleted file mode 100644 index f5072a3..0000000 --- a/venv/Lib/site-packages/botocore/loaders.py +++ /dev/null @@ -1,525 +0,0 @@ -# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Module for loading various model files. - -This module provides the classes that are used to load models used -by botocore. This can include: - - * Service models (e.g. the model for EC2, S3, DynamoDB, etc.) - * Service model extras which customize the service models - * Other models associated with a service (pagination, waiters) - * Non service-specific config (Endpoint data, retry config) - -Loading a module is broken down into several steps: - - * Determining the path to load - * Search the data_path for files to load - * The mechanics of loading the file - * Searching for extras and applying them to the loaded file - -The last item is used so that other faster loading mechanism -besides the default JSON loader can be used. - -The Search Path -=============== - -Similar to how the PATH environment variable is to finding executables -and the PYTHONPATH environment variable is to finding python modules -to import, the botocore loaders have the concept of a data path exposed -through AWS_DATA_PATH. - -This enables end users to provide additional search paths where we -will attempt to load models outside of the models we ship with -botocore. When you create a ``Loader``, there are two paths -automatically added to the model search path: - - * /data/ - * ~/.aws/models - -The first value is the path where all the model files shipped with -botocore are located. - -The second path is so that users can just drop new model files in -``~/.aws/models`` without having to mess around with the AWS_DATA_PATH. - -The AWS_DATA_PATH using the platform specific path separator to -separate entries (typically ``:`` on linux and ``;`` on windows). - - -Directory Layout -================ - -The Loader expects a particular directory layout. In order for any -directory specified in AWS_DATA_PATH to be considered, it must have -this structure for service models:: - - - | - |-- servicename1 - | |-- 2012-10-25 - | |-- service-2.json - |-- ec2 - | |-- 2014-01-01 - | | |-- paginators-1.json - | | |-- service-2.json - | | |-- waiters-2.json - | |-- 2015-03-01 - | |-- paginators-1.json - | |-- service-2.json - | |-- waiters-2.json - | |-- service-2.sdk-extras.json - - -That is: - - * The root directory contains sub directories that are the name - of the services. - * Within each service directory, there's a sub directory for each - available API version. - * Within each API version, there are model specific files, including - (but not limited to): service-2.json, waiters-2.json, paginators-1.json - -The ``-1`` and ``-2`` suffix at the end of the model files denote which version -schema is used within the model. Even though this information is available in -the ``version`` key within the model, this version is also part of the filename -so that code does not need to load the JSON model in order to determine which -version to use. - -The ``sdk-extras`` and similar files represent extra data that needs to be -applied to the model after it is loaded. Data in these files might represent -information that doesn't quite fit in the original models, but is still needed -for the sdk. For instance, additional operation parameters might be added here -which don't represent the actual service api. -""" - -import logging -import os - -from botocore import BOTOCORE_ROOT -from botocore.compat import HAS_GZIP, OrderedDict, json -from botocore.exceptions import DataNotFoundError, UnknownServiceError -from botocore.utils import deep_merge - -_JSON_OPEN_METHODS = { - '.json': open, -} - - -if HAS_GZIP: - from gzip import open as gzip_open - - _JSON_OPEN_METHODS['.json.gz'] = gzip_open - - -logger = logging.getLogger(__name__) - - -def instance_cache(func): - """Cache the result of a method on a per instance basis. - - This is not a general purpose caching decorator. In order - for this to be used, it must be used on methods on an - instance, and that instance *must* provide a - ``self._cache`` dictionary. - - """ - - def _wrapper(self, *args, **kwargs): - key = (func.__name__,) + args - for pair in sorted(kwargs.items()): - key += pair - if key in self._cache: - return self._cache[key] - data = func(self, *args, **kwargs) - self._cache[key] = data - return data - - return _wrapper - - -class JSONFileLoader: - """Loader JSON files. - - This class can load the default format of models, which is a JSON file. - - """ - - def exists(self, file_path): - """Checks if the file exists. - - :type file_path: str - :param file_path: The full path to the file to load without - the '.json' extension. - - :return: True if file path exists, False otherwise. - - """ - for ext in _JSON_OPEN_METHODS: - if os.path.isfile(file_path + ext): - return True - return False - - def _load_file(self, full_path, open_method): - if not os.path.isfile(full_path): - return - - # By default the file will be opened with locale encoding on Python 3. - # We specify "utf8" here to ensure the correct behavior. - with open_method(full_path, 'rb') as fp: - payload = fp.read().decode('utf-8') - - logger.debug("Loading JSON file: %s", full_path) - return json.loads(payload, object_pairs_hook=OrderedDict) - - def load_file(self, file_path): - """Attempt to load the file path. - - :type file_path: str - :param file_path: The full path to the file to load without - the '.json' extension. - - :return: The loaded data if it exists, otherwise None. - - """ - for ext, open_method in _JSON_OPEN_METHODS.items(): - data = self._load_file(file_path + ext, open_method) - if data is not None: - return data - return None - - -def create_loader(search_path_string=None): - """Create a Loader class. - - This factory function creates a loader given a search string path. - - :type search_string_path: str - :param search_string_path: The AWS_DATA_PATH value. A string - of data path values separated by the ``os.path.pathsep`` value, - which is typically ``:`` on POSIX platforms and ``;`` on - windows. - - :return: A ``Loader`` instance. - - """ - if search_path_string is None: - return Loader() - paths = [] - extra_paths = search_path_string.split(os.pathsep) - for path in extra_paths: - path = os.path.expanduser(os.path.expandvars(path)) - paths.append(path) - return Loader(extra_search_paths=paths) - - -class Loader: - """Find and load data models. - - This class will handle searching for and loading data models. - - The main method used here is ``load_service_model``, which is a - convenience method over ``load_data`` and ``determine_latest_version``. - - """ - - FILE_LOADER_CLASS = JSONFileLoader - # The included models in botocore/data/ that we ship with botocore. - BUILTIN_DATA_PATH = os.path.join(BOTOCORE_ROOT, 'data') - # For convenience we automatically add ~/.aws/models to the data path. - CUSTOMER_DATA_PATH = os.path.join( - os.path.expanduser('~'), '.aws', 'models' - ) - BUILTIN_EXTRAS_TYPES = ['sdk'] - - def __init__( - self, - extra_search_paths=None, - file_loader=None, - cache=None, - include_default_search_paths=True, - include_default_extras=True, - ): - self._cache = {} - if file_loader is None: - file_loader = self.FILE_LOADER_CLASS() - self.file_loader = file_loader - if extra_search_paths is not None: - self._search_paths = extra_search_paths - else: - self._search_paths = [] - if include_default_search_paths: - self._search_paths.extend( - [self.CUSTOMER_DATA_PATH, self.BUILTIN_DATA_PATH] - ) - - self._extras_types = [] - if include_default_extras: - self._extras_types.extend(self.BUILTIN_EXTRAS_TYPES) - - self._extras_processor = ExtrasProcessor() - - @property - def search_paths(self): - return self._search_paths - - @property - def extras_types(self): - return self._extras_types - - @instance_cache - def list_available_services(self, type_name): - """List all known services. - - This will traverse the search path and look for all known - services. - - :type type_name: str - :param type_name: The type of the service (service-2, - paginators-1, waiters-2, etc). This is needed because - the list of available services depends on the service - type. For example, the latest API version available for - a resource-1.json file may not be the latest API version - available for a services-2.json file. - - :return: A list of all services. The list of services will - be sorted. - - """ - services = set() - for possible_path in self._potential_locations(): - # Any directory in the search path is potentially a service. - # We'll collect any initial list of potential services, - # but we'll then need to further process these directories - # by searching for the corresponding type_name in each - # potential directory. - possible_services = [ - d - for d in os.listdir(possible_path) - if os.path.isdir(os.path.join(possible_path, d)) - ] - for service_name in possible_services: - full_dirname = os.path.join(possible_path, service_name) - api_versions = os.listdir(full_dirname) - for api_version in api_versions: - full_load_path = os.path.join( - full_dirname, api_version, type_name - ) - if self.file_loader.exists(full_load_path): - services.add(service_name) - break - return sorted(services) - - @instance_cache - def determine_latest_version(self, service_name, type_name): - """Find the latest API version available for a service. - - :type service_name: str - :param service_name: The name of the service. - - :type type_name: str - :param type_name: The type of the service (service-2, - paginators-1, waiters-2, etc). This is needed because - the latest API version available can depend on the service - type. For example, the latest API version available for - a resource-1.json file may not be the latest API version - available for a services-2.json file. - - :rtype: str - :return: The latest API version. If the service does not exist - or does not have any available API data, then a - ``DataNotFoundError`` exception will be raised. - - """ - return max(self.list_api_versions(service_name, type_name)) - - @instance_cache - def list_api_versions(self, service_name, type_name): - """List all API versions available for a particular service type - - :type service_name: str - :param service_name: The name of the service - - :type type_name: str - :param type_name: The type name for the service (i.e service-2, - paginators-1, etc.) - - :rtype: list - :return: A list of API version strings in sorted order. - - """ - known_api_versions = set() - for possible_path in self._potential_locations( - service_name, must_exist=True, is_dir=True - ): - for dirname in os.listdir(possible_path): - full_path = os.path.join(possible_path, dirname, type_name) - # Only add to the known_api_versions if the directory - # contains a service-2, paginators-1, etc. file corresponding - # to the type_name passed in. - if self.file_loader.exists(full_path): - known_api_versions.add(dirname) - if not known_api_versions: - raise DataNotFoundError(data_path=service_name) - return sorted(known_api_versions) - - @instance_cache - def load_service_model(self, service_name, type_name, api_version=None): - """Load a botocore service model - - This is the main method for loading botocore models (e.g. a service - model, pagination configs, waiter configs, etc.). - - :type service_name: str - :param service_name: The name of the service (e.g ``ec2``, ``s3``). - - :type type_name: str - :param type_name: The model type. Valid types include, but are not - limited to: ``service-2``, ``paginators-1``, ``waiters-2``. - - :type api_version: str - :param api_version: The API version to load. If this is not - provided, then the latest API version will be used. - - :type load_extras: bool - :param load_extras: Whether or not to load the tool extras which - contain additional data to be added to the model. - - :raises: UnknownServiceError if there is no known service with - the provided service_name. - - :raises: DataNotFoundError if no data could be found for the - service_name/type_name/api_version. - - :return: The loaded data, as a python type (e.g. dict, list, etc). - """ - # Wrapper around the load_data. This will calculate the path - # to call load_data with. - known_services = self.list_available_services(type_name) - if service_name not in known_services: - raise UnknownServiceError( - service_name=service_name, - known_service_names=', '.join(sorted(known_services)), - ) - if api_version is None: - api_version = self.determine_latest_version( - service_name, type_name - ) - full_path = os.path.join(service_name, api_version, type_name) - model = self.load_data(full_path) - - # Load in all the extras - extras_data = self._find_extras(service_name, type_name, api_version) - self._extras_processor.process(model, extras_data) - - return model - - def _find_extras(self, service_name, type_name, api_version): - """Creates an iterator over all the extras data.""" - for extras_type in self.extras_types: - extras_name = f'{type_name}.{extras_type}-extras' - full_path = os.path.join(service_name, api_version, extras_name) - - try: - yield self.load_data(full_path) - except DataNotFoundError: - pass - - @instance_cache - def load_data_with_path(self, name): - """Same as ``load_data`` but returns file path as second return value. - - :type name: str - :param name: The data path, i.e ``ec2/2015-03-01/service-2``. - - :return: Tuple of the loaded data and the path to the data file - where the data was loaded from. If no data could be found then a - DataNotFoundError is raised. - """ - for possible_path in self._potential_locations(name): - found = self.file_loader.load_file(possible_path) - if found is not None: - return found, possible_path - - # We didn't find anything that matched on any path. - raise DataNotFoundError(data_path=name) - - def load_data(self, name): - """Load data given a data path. - - This is a low level method that will search through the various - search paths until it's able to load a value. This is typically - only needed to load *non* model files (such as _endpoints and - _retry). If you need to load model files, you should prefer - ``load_service_model``. Use ``load_data_with_path`` to get the - data path of the data file as second return value. - - :type name: str - :param name: The data path, i.e ``ec2/2015-03-01/service-2``. - - :return: The loaded data. If no data could be found then - a DataNotFoundError is raised. - """ - data, _ = self.load_data_with_path(name) - return data - - def _potential_locations(self, name=None, must_exist=False, is_dir=False): - # Will give an iterator over the full path of potential locations - # according to the search path. - for path in self.search_paths: - if os.path.isdir(path): - full_path = path - if name is not None: - full_path = os.path.join(path, name) - if not must_exist: - yield full_path - else: - if is_dir and os.path.isdir(full_path): - yield full_path - elif os.path.exists(full_path): - yield full_path - - def is_builtin_path(self, path): - """Whether a given path is within the package's data directory. - - This method can be used together with load_data_with_path(name) - to determine if data has been loaded from a file bundled with the - package, as opposed to a file in a separate location. - - :type path: str - :param path: The file path to check. - - :return: Whether the given path is within the package's data directory. - """ - path = os.path.expanduser(os.path.expandvars(path)) - return path.startswith(self.BUILTIN_DATA_PATH) - - -class ExtrasProcessor: - """Processes data from extras files into service models.""" - - def process(self, original_model, extra_models): - """Processes data from a list of loaded extras files into a model - - :type original_model: dict - :param original_model: The service model to load all the extras into. - - :type extra_models: iterable of dict - :param extra_models: A list of loaded extras models. - """ - for extras in extra_models: - self._process(original_model, extras) - - def _process(self, model, extra_model): - """Process a single extras model into a service model.""" - if 'merge' in extra_model: - deep_merge(model, extra_model['merge']) diff --git a/venv/Lib/site-packages/botocore/model.py b/venv/Lib/site-packages/botocore/model.py deleted file mode 100644 index b198652..0000000 --- a/venv/Lib/site-packages/botocore/model.py +++ /dev/null @@ -1,1006 +0,0 @@ -# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Abstractions to interact with service models.""" - -from collections import defaultdict -from typing import NamedTuple, Union - -from botocore.auth import resolve_auth_type -from botocore.compat import OrderedDict -from botocore.exceptions import ( - MissingServiceIdError, - UndefinedModelAttributeError, - UnsupportedServiceProtocolsError, -) -from botocore.utils import ( - PRIORITY_ORDERED_SUPPORTED_PROTOCOLS, - CachedProperty, - hyphenize_service_id, - instance_cache, -) - -NOT_SET = object() - - -class NoShapeFoundError(Exception): - pass - - -class InvalidShapeError(Exception): - pass - - -class OperationNotFoundError(Exception): - pass - - -class InvalidShapeReferenceError(Exception): - pass - - -class ServiceId(str): - def hyphenize(self): - return hyphenize_service_id(self) - - -class Shape: - """Object representing a shape from the service model.""" - - # To simplify serialization logic, all shape params that are - # related to serialization are moved from the top level hash into - # a 'serialization' hash. This list below contains the names of all - # the attributes that should be moved. - SERIALIZED_ATTRS = [ - 'locationName', - 'queryName', - 'flattened', - 'location', - 'payload', - 'streaming', - 'timestampFormat', - 'xmlNamespace', - 'resultWrapper', - 'xmlAttribute', - 'eventstream', - 'event', - 'eventheader', - 'eventpayload', - 'jsonvalue', - 'timestampFormat', - 'hostLabel', - ] - METADATA_ATTRS = [ - 'required', - 'min', - 'max', - 'pattern', - 'sensitive', - 'enum', - 'idempotencyToken', - 'error', - 'exception', - 'endpointdiscoveryid', - 'retryable', - 'document', - 'union', - 'contextParam', - 'clientContextParams', - 'requiresLength', - ] - MAP_TYPE = OrderedDict - - def __init__(self, shape_name, shape_model, shape_resolver=None): - """ - - :type shape_name: string - :param shape_name: The name of the shape. - - :type shape_model: dict - :param shape_model: The shape model. This would be the value - associated with the key in the "shapes" dict of the - service model (i.e ``model['shapes'][shape_name]``) - - :type shape_resolver: botocore.model.ShapeResolver - :param shape_resolver: A shape resolver object. This is used to - resolve references to other shapes. For scalar shape types - (string, integer, boolean, etc.), this argument is not - required. If a shape_resolver is not provided for a complex - type, then a ``ValueError`` will be raised when an attempt - to resolve a shape is made. - - """ - self.name = shape_name - self.type_name = shape_model['type'] - self.documentation = shape_model.get('documentation', '') - self._shape_model = shape_model - if shape_resolver is None: - # If a shape_resolver is not provided, we create an object - # that will throw errors if you attempt to resolve - # a shape. This is actually ok for scalar shapes - # because they don't need to resolve shapes and shouldn't - # be required to provide an object they won't use. - shape_resolver = UnresolvableShapeMap() - self._shape_resolver = shape_resolver - self._cache = {} - - @CachedProperty - def serialization(self): - """Serialization information about the shape. - - This contains information that may be needed for input serialization - or response parsing. This can include: - - * name - * queryName - * flattened - * location - * payload - * streaming - * xmlNamespace - * resultWrapper - * xmlAttribute - * jsonvalue - * timestampFormat - - :rtype: dict - :return: Serialization information about the shape. - - """ - model = self._shape_model - serialization = {} - for attr in self.SERIALIZED_ATTRS: - if attr in self._shape_model: - serialization[attr] = model[attr] - # For consistency, locationName is renamed to just 'name'. - if 'locationName' in serialization: - serialization['name'] = serialization.pop('locationName') - return serialization - - @CachedProperty - def metadata(self): - """Metadata about the shape. - - This requires optional information about the shape, including: - - * min - * max - * pattern - * enum - * sensitive - * required - * idempotencyToken - * document - * union - * contextParam - * clientContextParams - * requiresLength - - :rtype: dict - :return: Metadata about the shape. - - """ - model = self._shape_model - metadata = {} - for attr in self.METADATA_ATTRS: - if attr in self._shape_model: - metadata[attr] = model[attr] - return metadata - - @CachedProperty - def required_members(self): - """A list of members that are required. - - A structure shape can define members that are required. - This value will return a list of required members. If there - are no required members an empty list is returned. - - """ - return self.metadata.get('required', []) - - def _resolve_shape_ref(self, shape_ref): - return self._shape_resolver.resolve_shape_ref(shape_ref) - - def __repr__(self): - return f"<{self.__class__.__name__}({self.name})>" - - @property - def event_stream_name(self): - return None - - -class StructureShape(Shape): - @CachedProperty - def members(self): - members = self._shape_model.get('members', self.MAP_TYPE()) - # The members dict looks like: - # 'members': { - # 'MemberName': {'shape': 'shapeName'}, - # 'MemberName2': {'shape': 'shapeName'}, - # } - # We return a dict of member name to Shape object. - shape_members = self.MAP_TYPE() - for name, shape_ref in members.items(): - shape_members[name] = self._resolve_shape_ref(shape_ref) - return shape_members - - @CachedProperty - def event_stream_name(self): - for member_name, member in self.members.items(): - if member.serialization.get('eventstream'): - return member_name - return None - - @CachedProperty - def error_code(self): - if not self.metadata.get('exception', False): - return None - error_metadata = self.metadata.get("error", {}) - code = error_metadata.get("code") - if code: - return code - # Use the exception name if there is no explicit code modeled - return self.name - - @CachedProperty - def is_document_type(self): - return self.metadata.get('document', False) - - @CachedProperty - def is_tagged_union(self): - return self.metadata.get('union', False) - - -class ListShape(Shape): - @CachedProperty - def member(self): - return self._resolve_shape_ref(self._shape_model['member']) - - -class MapShape(Shape): - @CachedProperty - def key(self): - return self._resolve_shape_ref(self._shape_model['key']) - - @CachedProperty - def value(self): - return self._resolve_shape_ref(self._shape_model['value']) - - -class StringShape(Shape): - @CachedProperty - def enum(self): - return self.metadata.get('enum', []) - - -class StaticContextParameter(NamedTuple): - name: str - value: Union[bool, str] - - -class ContextParameter(NamedTuple): - name: str - member_name: str - - -class ClientContextParameter(NamedTuple): - name: str - type: str - documentation: str - - -class ServiceModel: - """ - - :ivar service_description: The parsed service description dictionary. - - """ - - def __init__(self, service_description, service_name=None): - """ - - :type service_description: dict - :param service_description: The service description model. This value - is obtained from a botocore.loader.Loader, or from directly loading - the file yourself:: - - service_description = json.load( - open('/path/to/service-description-model.json')) - model = ServiceModel(service_description) - - :type service_name: str - :param service_name: The name of the service. Normally this is - the endpoint prefix defined in the service_description. However, - you can override this value to provide a more convenient name. - This is done in a few places in botocore (ses instead of email, - emr instead of elasticmapreduce). If this value is not provided, - it will default to the endpointPrefix defined in the model. - - """ - self._service_description = service_description - # We want clients to be able to access metadata directly. - self.metadata = service_description.get('metadata', {}) - self._shape_resolver = ShapeResolver( - service_description.get('shapes', {}) - ) - self._signature_version = NOT_SET - self._service_name = service_name - self._instance_cache = {} - - def shape_for(self, shape_name, member_traits=None): - return self._shape_resolver.get_shape_by_name( - shape_name, member_traits - ) - - def shape_for_error_code(self, error_code): - return self._error_code_cache.get(error_code, None) - - @CachedProperty - def _error_code_cache(self): - error_code_cache = {} - for error_shape in self.error_shapes: - code = error_shape.error_code - error_code_cache[code] = error_shape - return error_code_cache - - def resolve_shape_ref(self, shape_ref): - return self._shape_resolver.resolve_shape_ref(shape_ref) - - @CachedProperty - def shape_names(self): - return list(self._service_description.get('shapes', {})) - - @CachedProperty - def error_shapes(self): - error_shapes = [] - for shape_name in self.shape_names: - error_shape = self.shape_for(shape_name) - if error_shape.metadata.get('exception', False): - error_shapes.append(error_shape) - return error_shapes - - @instance_cache - def operation_model(self, operation_name): - try: - model = self._service_description['operations'][operation_name] - except KeyError: - raise OperationNotFoundError(operation_name) - return OperationModel(model, self, operation_name) - - @CachedProperty - def documentation(self): - return self._service_description.get('documentation', '') - - @CachedProperty - def operation_names(self): - return list(self._service_description.get('operations', [])) - - @CachedProperty - def service_name(self): - """The name of the service. - - This defaults to the endpointPrefix defined in the service model. - However, this value can be overriden when a ``ServiceModel`` is - created. If a service_name was not provided when the ``ServiceModel`` - was created and if there is no endpointPrefix defined in the - service model, then an ``UndefinedModelAttributeError`` exception - will be raised. - - """ - if self._service_name is not None: - return self._service_name - else: - return self.endpoint_prefix - - @CachedProperty - def service_id(self): - try: - return ServiceId(self._get_metadata_property('serviceId')) - except UndefinedModelAttributeError: - raise MissingServiceIdError(service_name=self._service_name) - - @CachedProperty - def signing_name(self): - """The name to use when computing signatures. - - If the model does not define a signing name, this - value will be the endpoint prefix defined in the model. - """ - signing_name = self.metadata.get('signingName') - if signing_name is None: - signing_name = self.endpoint_prefix - return signing_name - - @CachedProperty - def api_version(self): - return self._get_metadata_property('apiVersion') - - @CachedProperty - def protocol(self): - return self._get_metadata_property('protocol') - - @CachedProperty - def protocols(self): - return self._get_metadata_property('protocols') - - @CachedProperty - def resolved_protocol(self): - # We need to ensure `protocols` exists in the metadata before attempting to - # access it directly since referencing service_model.protocols directly will - # raise an UndefinedModelAttributeError if protocols is not defined - if self.metadata.get('protocols'): - for protocol in PRIORITY_ORDERED_SUPPORTED_PROTOCOLS: - if protocol in self.protocols: - return protocol - raise UnsupportedServiceProtocolsError( - botocore_supported_protocols=PRIORITY_ORDERED_SUPPORTED_PROTOCOLS, - service_supported_protocols=self.protocols, - service=self.service_name, - ) - # If a service does not have a `protocols` trait, fall back to the legacy - # `protocol` trait - return self.protocol - - @CachedProperty - def endpoint_prefix(self): - return self._get_metadata_property('endpointPrefix') - - @CachedProperty - def endpoint_discovery_operation(self): - for operation in self.operation_names: - model = self.operation_model(operation) - if model.is_endpoint_discovery_operation: - return model - - @CachedProperty - def endpoint_discovery_required(self): - for operation in self.operation_names: - model = self.operation_model(operation) - if ( - model.endpoint_discovery is not None - and model.endpoint_discovery.get('required') - ): - return True - return False - - @CachedProperty - def client_context_parameters(self): - params = self._service_description.get('clientContextParams', {}) - return [ - ClientContextParameter( - name=param_name, - type=param_val['type'], - documentation=param_val['documentation'], - ) - for param_name, param_val in params.items() - ] - - def _get_metadata_property(self, name): - try: - return self.metadata[name] - except KeyError: - raise UndefinedModelAttributeError( - f'"{name}" not defined in the metadata of the model: {self}' - ) - - # Signature version is one of the rare properties - # that can be modified so a CachedProperty is not used here. - - @property - def signature_version(self): - if self._signature_version is NOT_SET: - signature_version = self.metadata.get('signatureVersion') - self._signature_version = signature_version - return self._signature_version - - @signature_version.setter - def signature_version(self, value): - self._signature_version = value - - @CachedProperty - def is_query_compatible(self): - return 'awsQueryCompatible' in self.metadata - - def __repr__(self): - return f'{self.__class__.__name__}({self.service_name})' - - -class OperationModel: - def __init__(self, operation_model, service_model, name=None): - """ - - :type operation_model: dict - :param operation_model: The operation model. This comes from the - service model, and is the value associated with the operation - name in the service model (i.e ``model['operations'][op_name]``). - - :type service_model: botocore.model.ServiceModel - :param service_model: The service model associated with the operation. - - :type name: string - :param name: The operation name. This is the operation name exposed to - the users of this model. This can potentially be different from - the "wire_name", which is the operation name that *must* by - provided over the wire. For example, given:: - - "CreateCloudFrontOriginAccessIdentity":{ - "name":"CreateCloudFrontOriginAccessIdentity2014_11_06", - ... - } - - The ``name`` would be ``CreateCloudFrontOriginAccessIdentity``, - but the ``self.wire_name`` would be - ``CreateCloudFrontOriginAccessIdentity2014_11_06``, which is the - value we must send in the corresponding HTTP request. - - """ - self._operation_model = operation_model - self._service_model = service_model - self._api_name = name - # Clients can access '.name' to get the operation name - # and '.metadata' to get the top level metdata of the service. - self._wire_name = operation_model.get('name') - self.metadata = service_model.metadata - self.http = operation_model.get('http', {}) - - @CachedProperty - def name(self): - if self._api_name is not None: - return self._api_name - else: - return self.wire_name - - @property - def wire_name(self): - """The wire name of the operation. - - In many situations this is the same value as the - ``name``, value, but in some services, the operation name - exposed to the user is different from the operation name - we send across the wire (e.g cloudfront). - - Any serialization code should use ``wire_name``. - - """ - return self._operation_model.get('name') - - @property - def service_model(self): - return self._service_model - - @CachedProperty - def documentation(self): - return self._operation_model.get('documentation', '') - - @CachedProperty - def deprecated(self): - return self._operation_model.get('deprecated', False) - - @CachedProperty - def endpoint_discovery(self): - # Explicit None default. An empty dictionary for this trait means it is - # enabled but not required to be used. - return self._operation_model.get('endpointdiscovery', None) - - @CachedProperty - def is_endpoint_discovery_operation(self): - return self._operation_model.get('endpointoperation', False) - - @CachedProperty - def input_shape(self): - if 'input' not in self._operation_model: - # Some operations do not accept any input and do not define an - # input shape. - return None - return self._service_model.resolve_shape_ref( - self._operation_model['input'] - ) - - @CachedProperty - def output_shape(self): - if 'output' not in self._operation_model: - # Some operations do not define an output shape, - # in which case we return None to indicate the - # operation has no expected output. - return None - return self._service_model.resolve_shape_ref( - self._operation_model['output'] - ) - - @CachedProperty - def idempotent_members(self): - input_shape = self.input_shape - if not input_shape: - return [] - - return [ - name - for (name, shape) in input_shape.members.items() - if 'idempotencyToken' in shape.metadata - and shape.metadata['idempotencyToken'] - ] - - @CachedProperty - def static_context_parameters(self): - params = self._operation_model.get('staticContextParams', {}) - return [ - StaticContextParameter(name=name, value=props.get('value')) - for name, props in params.items() - ] - - @CachedProperty - def context_parameters(self): - if not self.input_shape: - return [] - - return [ - ContextParameter( - name=shape.metadata['contextParam']['name'], - member_name=name, - ) - for name, shape in self.input_shape.members.items() - if 'contextParam' in shape.metadata - and 'name' in shape.metadata['contextParam'] - ] - - @CachedProperty - def operation_context_parameters(self): - return self._operation_model.get('operationContextParams', []) - - @CachedProperty - def request_compression(self): - return self._operation_model.get('requestcompression') - - @CachedProperty - def auth(self): - return self._operation_model.get('auth') - - @CachedProperty - def auth_type(self): - return self._operation_model.get('authtype') - - @CachedProperty - def resolved_auth_type(self): - if self.auth: - return resolve_auth_type(self.auth) - return self.auth_type - - @CachedProperty - def unsigned_payload(self): - return self._operation_model.get('unsignedPayload') - - @CachedProperty - def error_shapes(self): - shapes = self._operation_model.get("errors", []) - return list(self._service_model.resolve_shape_ref(s) for s in shapes) - - @CachedProperty - def endpoint(self): - return self._operation_model.get('endpoint') - - @CachedProperty - def http_checksum_required(self): - return self._operation_model.get('httpChecksumRequired', False) - - @CachedProperty - def http_checksum(self): - return self._operation_model.get('httpChecksum', {}) - - @CachedProperty - def has_event_stream_input(self): - return self.get_event_stream_input() is not None - - @CachedProperty - def has_event_stream_output(self): - return self.get_event_stream_output() is not None - - def get_event_stream_input(self): - return self._get_event_stream(self.input_shape) - - def get_event_stream_output(self): - return self._get_event_stream(self.output_shape) - - def _get_event_stream(self, shape): - """Returns the event stream member's shape if any or None otherwise.""" - if shape is None: - return None - event_name = shape.event_stream_name - if event_name: - return shape.members[event_name] - return None - - @CachedProperty - def has_streaming_input(self): - return self.get_streaming_input() is not None - - @CachedProperty - def has_streaming_output(self): - return self.get_streaming_output() is not None - - def get_streaming_input(self): - return self._get_streaming_body(self.input_shape) - - def get_streaming_output(self): - return self._get_streaming_body(self.output_shape) - - def _get_streaming_body(self, shape): - """Returns the streaming member's shape if any; or None otherwise.""" - if shape is None: - return None - payload = shape.serialization.get('payload') - if payload is not None: - payload_shape = shape.members[payload] - if payload_shape.type_name == 'blob': - return payload_shape - return None - - def __repr__(self): - return f'{self.__class__.__name__}(name={self.name})' - - -class ShapeResolver: - """Resolves shape references.""" - - # Any type not in this mapping will default to the Shape class. - SHAPE_CLASSES = { - 'structure': StructureShape, - 'list': ListShape, - 'map': MapShape, - 'string': StringShape, - } - - def __init__(self, shape_map): - self._shape_map = shape_map - self._shape_cache = {} - - def get_shape_by_name(self, shape_name, member_traits=None): - try: - shape_model = self._shape_map[shape_name] - except KeyError: - raise NoShapeFoundError(shape_name) - try: - shape_cls = self.SHAPE_CLASSES.get(shape_model['type'], Shape) - except KeyError: - raise InvalidShapeError( - f"Shape is missing required key 'type': {shape_model}" - ) - if member_traits: - shape_model = shape_model.copy() - shape_model.update(member_traits) - result = shape_cls(shape_name, shape_model, self) - return result - - def resolve_shape_ref(self, shape_ref): - # A shape_ref is a dict that has a 'shape' key that - # refers to a shape name as well as any additional - # member traits that are then merged over the shape - # definition. For example: - # {"shape": "StringType", "locationName": "Foobar"} - if len(shape_ref) == 1 and 'shape' in shape_ref: - # It's just a shape ref with no member traits, we can avoid - # a .copy(). This is the common case so it's specifically - # called out here. - return self.get_shape_by_name(shape_ref['shape']) - else: - member_traits = shape_ref.copy() - try: - shape_name = member_traits.pop('shape') - except KeyError: - raise InvalidShapeReferenceError( - f"Invalid model, missing shape reference: {shape_ref}" - ) - return self.get_shape_by_name(shape_name, member_traits) - - -class UnresolvableShapeMap: - """A ShapeResolver that will throw ValueErrors when shapes are resolved.""" - - def get_shape_by_name(self, shape_name, member_traits=None): - raise ValueError( - f"Attempted to lookup shape '{shape_name}', but no shape map was provided." - ) - - def resolve_shape_ref(self, shape_ref): - raise ValueError( - f"Attempted to resolve shape '{shape_ref}', but no shape " - f"map was provided." - ) - - -class DenormalizedStructureBuilder: - """Build a StructureShape from a denormalized model. - - This is a convenience builder class that makes it easy to construct - ``StructureShape``s based on a denormalized model. - - It will handle the details of creating unique shape names and creating - the appropriate shape map needed by the ``StructureShape`` class. - - Example usage:: - - builder = DenormalizedStructureBuilder() - shape = builder.with_members({ - 'A': { - 'type': 'structure', - 'members': { - 'B': { - 'type': 'structure', - 'members': { - 'C': { - 'type': 'string', - } - } - } - } - } - }).build_model() - # ``shape`` is now an instance of botocore.model.StructureShape - - :type dict_type: class - :param dict_type: The dictionary type to use, allowing you to opt-in - to using OrderedDict or another dict type. This can - be particularly useful for testing when order - matters, such as for documentation. - - """ - - SCALAR_TYPES = ( - 'string', - 'integer', - 'boolean', - 'blob', - 'float', - 'timestamp', - 'long', - 'double', - 'char', - ) - - def __init__(self, name=None): - self.members = OrderedDict() - self._name_generator = ShapeNameGenerator() - if name is None: - self.name = self._name_generator.new_shape_name('structure') - - def with_members(self, members): - """ - - :type members: dict - :param members: The denormalized members. - - :return: self - - """ - self._members = members - return self - - def build_model(self): - """Build the model based on the provided members. - - :rtype: botocore.model.StructureShape - :return: The built StructureShape object. - - """ - shapes = OrderedDict() - denormalized = { - 'type': 'structure', - 'members': self._members, - } - self._build_model(denormalized, shapes, self.name) - resolver = ShapeResolver(shape_map=shapes) - return StructureShape( - shape_name=self.name, - shape_model=shapes[self.name], - shape_resolver=resolver, - ) - - def _build_model(self, model, shapes, shape_name): - if model['type'] == 'structure': - shapes[shape_name] = self._build_structure(model, shapes) - elif model['type'] == 'list': - shapes[shape_name] = self._build_list(model, shapes) - elif model['type'] == 'map': - shapes[shape_name] = self._build_map(model, shapes) - elif model['type'] in self.SCALAR_TYPES: - shapes[shape_name] = self._build_scalar(model) - else: - raise InvalidShapeError(f"Unknown shape type: {model['type']}") - - def _build_structure(self, model, shapes): - members = OrderedDict() - shape = self._build_initial_shape(model) - shape['members'] = members - - for name, member_model in model.get('members', OrderedDict()).items(): - member_shape_name = self._get_shape_name(member_model) - members[name] = {'shape': member_shape_name} - self._build_model(member_model, shapes, member_shape_name) - return shape - - def _build_list(self, model, shapes): - member_shape_name = self._get_shape_name(model) - shape = self._build_initial_shape(model) - shape['member'] = {'shape': member_shape_name} - self._build_model(model['member'], shapes, member_shape_name) - return shape - - def _build_map(self, model, shapes): - key_shape_name = self._get_shape_name(model['key']) - value_shape_name = self._get_shape_name(model['value']) - shape = self._build_initial_shape(model) - shape['key'] = {'shape': key_shape_name} - shape['value'] = {'shape': value_shape_name} - self._build_model(model['key'], shapes, key_shape_name) - self._build_model(model['value'], shapes, value_shape_name) - return shape - - def _build_initial_shape(self, model): - shape = { - 'type': model['type'], - } - if 'documentation' in model: - shape['documentation'] = model['documentation'] - for attr in Shape.METADATA_ATTRS: - if attr in model: - shape[attr] = model[attr] - return shape - - def _build_scalar(self, model): - return self._build_initial_shape(model) - - def _get_shape_name(self, model): - if 'shape_name' in model: - return model['shape_name'] - else: - return self._name_generator.new_shape_name(model['type']) - - -class ShapeNameGenerator: - """Generate unique shape names for a type. - - This class can be used in conjunction with the DenormalizedStructureBuilder - to generate unique shape names for a given type. - - """ - - def __init__(self): - self._name_cache = defaultdict(int) - - def new_shape_name(self, type_name): - """Generate a unique shape name. - - This method will guarantee a unique shape name each time it is - called with the same type. - - :: - - >>> s = ShapeNameGenerator() - >>> s.new_shape_name('structure') - 'StructureType1' - >>> s.new_shape_name('structure') - 'StructureType2' - >>> s.new_shape_name('list') - 'ListType1' - >>> s.new_shape_name('list') - 'ListType2' - - - :type type_name: string - :param type_name: The type name (structure, list, map, string, etc.) - - :rtype: string - :return: A unique shape name for the given type - - """ - self._name_cache[type_name] += 1 - current_index = self._name_cache[type_name] - return f'{type_name.capitalize()}Type{current_index}' diff --git a/venv/Lib/site-packages/botocore/monitoring.py b/venv/Lib/site-packages/botocore/monitoring.py deleted file mode 100644 index d57cf0a..0000000 --- a/venv/Lib/site-packages/botocore/monitoring.py +++ /dev/null @@ -1,586 +0,0 @@ -# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import json -import logging -import re -import time - -from botocore.compat import ensure_bytes, ensure_unicode, urlparse -from botocore.retryhandler import EXCEPTION_MAP as RETRYABLE_EXCEPTIONS - -logger = logging.getLogger(__name__) - - -class Monitor: - _EVENTS_TO_REGISTER = [ - 'before-parameter-build', - 'request-created', - 'response-received', - 'after-call', - 'after-call-error', - ] - - def __init__(self, adapter, publisher): - """Abstraction for monitoring clients API calls - - :param adapter: An adapter that takes event emitter events - and produces monitor events - - :param publisher: A publisher for generated monitor events - """ - self._adapter = adapter - self._publisher = publisher - - def register(self, event_emitter): - """Register an event emitter to the monitor""" - for event_to_register in self._EVENTS_TO_REGISTER: - event_emitter.register_last(event_to_register, self.capture) - - def capture(self, event_name, **payload): - """Captures an incoming event from the event emitter - - It will feed an event emitter event to the monitor's adaptor to create - a monitor event and then publish that event to the monitor's publisher. - """ - try: - monitor_event = self._adapter.feed(event_name, payload) - if monitor_event: - self._publisher.publish(monitor_event) - except Exception as e: - logger.debug( - 'Exception %s raised by client monitor in handling event %s', - e, - event_name, - exc_info=True, - ) - - -class MonitorEventAdapter: - def __init__(self, time=time.time): - """Adapts event emitter events to produce monitor events - - :type time: callable - :param time: A callable that produces the current time - """ - self._time = time - - def feed(self, emitter_event_name, emitter_payload): - """Feed an event emitter event to generate a monitor event - - :type emitter_event_name: str - :param emitter_event_name: The name of the event emitted - - :type emitter_payload: dict - :param emitter_payload: The payload to associated to the event - emitted - - :rtype: BaseMonitorEvent - :returns: A monitor event based on the event emitter events - fired - """ - return self._get_handler(emitter_event_name)(**emitter_payload) - - def _get_handler(self, event_name): - return getattr( - self, '_handle_' + event_name.split('.')[0].replace('-', '_') - ) - - def _handle_before_parameter_build(self, model, context, **kwargs): - context['current_api_call_event'] = APICallEvent( - service=model.service_model.service_id, - operation=model.wire_name, - timestamp=self._get_current_time(), - ) - - def _handle_request_created(self, request, **kwargs): - context = request.context - new_attempt_event = context[ - 'current_api_call_event' - ].new_api_call_attempt(timestamp=self._get_current_time()) - new_attempt_event.request_headers = request.headers - new_attempt_event.url = request.url - context['current_api_call_attempt_event'] = new_attempt_event - - def _handle_response_received( - self, parsed_response, context, exception, **kwargs - ): - attempt_event = context.pop('current_api_call_attempt_event') - attempt_event.latency = self._get_latency(attempt_event) - if parsed_response is not None: - attempt_event.http_status_code = parsed_response[ - 'ResponseMetadata' - ]['HTTPStatusCode'] - attempt_event.response_headers = parsed_response[ - 'ResponseMetadata' - ]['HTTPHeaders'] - attempt_event.parsed_error = parsed_response.get('Error') - else: - attempt_event.wire_exception = exception - return attempt_event - - def _handle_after_call(self, context, parsed, **kwargs): - context['current_api_call_event'].retries_exceeded = parsed[ - 'ResponseMetadata' - ].get('MaxAttemptsReached', False) - return self._complete_api_call(context) - - def _handle_after_call_error(self, context, exception, **kwargs): - # If the after-call-error was emitted and the error being raised - # was a retryable connection error, then the retries must have exceeded - # for that exception as this event gets emitted **after** retries - # happen. - context[ - 'current_api_call_event' - ].retries_exceeded = self._is_retryable_exception(exception) - return self._complete_api_call(context) - - def _is_retryable_exception(self, exception): - return isinstance( - exception, tuple(RETRYABLE_EXCEPTIONS['GENERAL_CONNECTION_ERROR']) - ) - - def _complete_api_call(self, context): - call_event = context.pop('current_api_call_event') - call_event.latency = self._get_latency(call_event) - return call_event - - def _get_latency(self, event): - return self._get_current_time() - event.timestamp - - def _get_current_time(self): - return int(self._time() * 1000) - - -class BaseMonitorEvent: - def __init__(self, service, operation, timestamp): - """Base monitor event - - :type service: str - :param service: A string identifying the service associated to - the event - - :type operation: str - :param operation: A string identifying the operation of service - associated to the event - - :type timestamp: int - :param timestamp: Epoch time in milliseconds from when the event began - """ - self.service = service - self.operation = operation - self.timestamp = timestamp - - def __repr__(self): - return f'{self.__class__.__name__}({self.__dict__!r})' - - def __eq__(self, other): - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return False - - -class APICallEvent(BaseMonitorEvent): - def __init__( - self, - service, - operation, - timestamp, - latency=None, - attempts=None, - retries_exceeded=False, - ): - """Monitor event for a single API call - - This event corresponds to a single client method call, which includes - every HTTP requests attempt made in order to complete the client call - - :type service: str - :param service: A string identifying the service associated to - the event - - :type operation: str - :param operation: A string identifying the operation of service - associated to the event - - :type timestamp: int - :param timestamp: Epoch time in milliseconds from when the event began - - :type latency: int - :param latency: The time in milliseconds to complete the client call - - :type attempts: list - :param attempts: The list of APICallAttempts associated to the - APICall - - :type retries_exceeded: bool - :param retries_exceeded: True if API call exceeded retries. False - otherwise - """ - super().__init__( - service=service, operation=operation, timestamp=timestamp - ) - self.latency = latency - self.attempts = attempts - if attempts is None: - self.attempts = [] - self.retries_exceeded = retries_exceeded - - def new_api_call_attempt(self, timestamp): - """Instantiates APICallAttemptEvent associated to the APICallEvent - - :type timestamp: int - :param timestamp: Epoch time in milliseconds to associate to the - APICallAttemptEvent - """ - attempt_event = APICallAttemptEvent( - service=self.service, operation=self.operation, timestamp=timestamp - ) - self.attempts.append(attempt_event) - return attempt_event - - -class APICallAttemptEvent(BaseMonitorEvent): - def __init__( - self, - service, - operation, - timestamp, - latency=None, - url=None, - http_status_code=None, - request_headers=None, - response_headers=None, - parsed_error=None, - wire_exception=None, - ): - """Monitor event for a single API call attempt - - This event corresponds to a single HTTP request attempt in completing - the entire client method call. - - :type service: str - :param service: A string identifying the service associated to - the event - - :type operation: str - :param operation: A string identifying the operation of service - associated to the event - - :type timestamp: int - :param timestamp: Epoch time in milliseconds from when the HTTP request - started - - :type latency: int - :param latency: The time in milliseconds to complete the HTTP request - whether it succeeded or failed - - :type url: str - :param url: The URL the attempt was sent to - - :type http_status_code: int - :param http_status_code: The HTTP status code of the HTTP response - if there was a response - - :type request_headers: dict - :param request_headers: The HTTP headers sent in making the HTTP - request - - :type response_headers: dict - :param response_headers: The HTTP headers returned in the HTTP response - if there was a response - - :type parsed_error: dict - :param parsed_error: The error parsed if the service returned an - error back - - :type wire_exception: Exception - :param wire_exception: The exception raised in sending the HTTP - request (i.e. ConnectionError) - """ - super().__init__( - service=service, operation=operation, timestamp=timestamp - ) - self.latency = latency - self.url = url - self.http_status_code = http_status_code - self.request_headers = request_headers - self.response_headers = response_headers - self.parsed_error = parsed_error - self.wire_exception = wire_exception - - -class CSMSerializer: - _MAX_CLIENT_ID_LENGTH = 255 - _MAX_EXCEPTION_CLASS_LENGTH = 128 - _MAX_ERROR_CODE_LENGTH = 128 - _MAX_USER_AGENT_LENGTH = 256 - _MAX_MESSAGE_LENGTH = 512 - _RESPONSE_HEADERS_TO_EVENT_ENTRIES = { - 'x-amzn-requestid': 'XAmznRequestId', - 'x-amz-request-id': 'XAmzRequestId', - 'x-amz-id-2': 'XAmzId2', - } - _AUTH_REGEXS = { - 'v4': re.compile( - r'AWS4-HMAC-SHA256 ' - r'Credential=(?P\w+)/\d+/' - r'(?P[a-z0-9-]+)/' - ), - 's3': re.compile(r'AWS (?P\w+):'), - } - _SERIALIZEABLE_EVENT_PROPERTIES = [ - 'service', - 'operation', - 'timestamp', - 'attempts', - 'latency', - 'retries_exceeded', - 'url', - 'request_headers', - 'http_status_code', - 'response_headers', - 'parsed_error', - 'wire_exception', - ] - - def __init__(self, csm_client_id): - """Serializes monitor events to CSM (Client Side Monitoring) format - - :type csm_client_id: str - :param csm_client_id: The application identifier to associate - to the serialized events - """ - self._validate_client_id(csm_client_id) - self.csm_client_id = csm_client_id - - def _validate_client_id(self, csm_client_id): - if len(csm_client_id) > self._MAX_CLIENT_ID_LENGTH: - raise ValueError( - f'The value provided for csm_client_id: {csm_client_id} exceeds ' - f'the maximum length of {self._MAX_CLIENT_ID_LENGTH} characters' - ) - - def serialize(self, event): - """Serializes a monitor event to the CSM format - - :type event: BaseMonitorEvent - :param event: The event to serialize to bytes - - :rtype: bytes - :returns: The CSM serialized form of the event - """ - event_dict = self._get_base_event_dict(event) - event_type = self._get_event_type(event) - event_dict['Type'] = event_type - for attr in self._SERIALIZEABLE_EVENT_PROPERTIES: - value = getattr(event, attr, None) - if value is not None: - getattr(self, '_serialize_' + attr)( - value, event_dict, event_type=event_type - ) - return ensure_bytes(json.dumps(event_dict, separators=(',', ':'))) - - def _get_base_event_dict(self, event): - return { - 'Version': 1, - 'ClientId': self.csm_client_id, - } - - def _serialize_service(self, service, event_dict, **kwargs): - event_dict['Service'] = service - - def _serialize_operation(self, operation, event_dict, **kwargs): - event_dict['Api'] = operation - - def _serialize_timestamp(self, timestamp, event_dict, **kwargs): - event_dict['Timestamp'] = timestamp - - def _serialize_attempts(self, attempts, event_dict, **kwargs): - event_dict['AttemptCount'] = len(attempts) - if attempts: - self._add_fields_from_last_attempt(event_dict, attempts[-1]) - - def _add_fields_from_last_attempt(self, event_dict, last_attempt): - if last_attempt.request_headers: - # It does not matter which attempt to use to grab the region - # for the ApiCall event, but SDKs typically do the last one. - region = self._get_region(last_attempt.request_headers) - if region is not None: - event_dict['Region'] = region - event_dict['UserAgent'] = self._get_user_agent( - last_attempt.request_headers - ) - if last_attempt.http_status_code is not None: - event_dict['FinalHttpStatusCode'] = last_attempt.http_status_code - if last_attempt.parsed_error is not None: - self._serialize_parsed_error( - last_attempt.parsed_error, event_dict, 'ApiCall' - ) - if last_attempt.wire_exception is not None: - self._serialize_wire_exception( - last_attempt.wire_exception, event_dict, 'ApiCall' - ) - - def _serialize_latency(self, latency, event_dict, event_type): - if event_type == 'ApiCall': - event_dict['Latency'] = latency - elif event_type == 'ApiCallAttempt': - event_dict['AttemptLatency'] = latency - - def _serialize_retries_exceeded( - self, retries_exceeded, event_dict, **kwargs - ): - event_dict['MaxRetriesExceeded'] = 1 if retries_exceeded else 0 - - def _serialize_url(self, url, event_dict, **kwargs): - event_dict['Fqdn'] = urlparse(url).netloc - - def _serialize_request_headers( - self, request_headers, event_dict, **kwargs - ): - event_dict['UserAgent'] = self._get_user_agent(request_headers) - if self._is_signed(request_headers): - event_dict['AccessKey'] = self._get_access_key(request_headers) - region = self._get_region(request_headers) - if region is not None: - event_dict['Region'] = region - if 'X-Amz-Security-Token' in request_headers: - event_dict['SessionToken'] = request_headers[ - 'X-Amz-Security-Token' - ] - - def _serialize_http_status_code( - self, http_status_code, event_dict, **kwargs - ): - event_dict['HttpStatusCode'] = http_status_code - - def _serialize_response_headers( - self, response_headers, event_dict, **kwargs - ): - for header, entry in self._RESPONSE_HEADERS_TO_EVENT_ENTRIES.items(): - if header in response_headers: - event_dict[entry] = response_headers[header] - - def _serialize_parsed_error( - self, parsed_error, event_dict, event_type, **kwargs - ): - field_prefix = 'Final' if event_type == 'ApiCall' else '' - event_dict[field_prefix + 'AwsException'] = self._truncate( - parsed_error['Code'], self._MAX_ERROR_CODE_LENGTH - ) - event_dict[field_prefix + 'AwsExceptionMessage'] = self._truncate( - parsed_error['Message'], self._MAX_MESSAGE_LENGTH - ) - - def _serialize_wire_exception( - self, wire_exception, event_dict, event_type, **kwargs - ): - field_prefix = 'Final' if event_type == 'ApiCall' else '' - event_dict[field_prefix + 'SdkException'] = self._truncate( - wire_exception.__class__.__name__, self._MAX_EXCEPTION_CLASS_LENGTH - ) - event_dict[field_prefix + 'SdkExceptionMessage'] = self._truncate( - str(wire_exception), self._MAX_MESSAGE_LENGTH - ) - - def _get_event_type(self, event): - if isinstance(event, APICallEvent): - return 'ApiCall' - elif isinstance(event, APICallAttemptEvent): - return 'ApiCallAttempt' - - def _get_access_key(self, request_headers): - auth_val = self._get_auth_value(request_headers) - _, auth_match = self._get_auth_match(auth_val) - return auth_match.group('access_key') - - def _get_region(self, request_headers): - if not self._is_signed(request_headers): - return None - auth_val = self._get_auth_value(request_headers) - signature_version, auth_match = self._get_auth_match(auth_val) - if signature_version != 'v4': - return None - return auth_match.group('signing_region') - - def _get_user_agent(self, request_headers): - return self._truncate( - ensure_unicode(request_headers.get('User-Agent', '')), - self._MAX_USER_AGENT_LENGTH, - ) - - def _is_signed(self, request_headers): - return 'Authorization' in request_headers - - def _get_auth_value(self, request_headers): - return ensure_unicode(request_headers['Authorization']) - - def _get_auth_match(self, auth_val): - for signature_version, regex in self._AUTH_REGEXS.items(): - match = regex.match(auth_val) - if match: - return signature_version, match - return None, None - - def _truncate(self, text, max_length): - if len(text) > max_length: - logger.debug( - 'Truncating following value to maximum length of %s: %s', - text, - max_length, - ) - return text[:max_length] - return text - - -class SocketPublisher: - _MAX_MONITOR_EVENT_LENGTH = 8 * 1024 - - def __init__(self, socket, host, port, serializer): - """Publishes monitor events to a socket - - :type socket: socket.socket - :param socket: The socket object to use to publish events - - :type host: string - :param host: The host to send events to - - :type port: integer - :param port: The port on the host to send events to - - :param serializer: The serializer to use to serialize the event - to a form that can be published to the socket. This must - have a `serialize()` method that accepts a monitor event - and return bytes - """ - self._socket = socket - self._address = (host, port) - self._serializer = serializer - - def publish(self, event): - """Publishes a specified monitor event - - :type event: BaseMonitorEvent - :param event: The monitor event to be sent - over the publisher's socket to the desired address. - """ - serialized_event = self._serializer.serialize(event) - if len(serialized_event) > self._MAX_MONITOR_EVENT_LENGTH: - logger.debug( - 'Serialized event of size %s exceeds the maximum length ' - 'allowed: %s. Not sending event to socket.', - len(serialized_event), - self._MAX_MONITOR_EVENT_LENGTH, - ) - return - self._socket.sendto(serialized_event, self._address) diff --git a/venv/Lib/site-packages/botocore/paginate.py b/venv/Lib/site-packages/botocore/paginate.py deleted file mode 100644 index 63b4a29..0000000 --- a/venv/Lib/site-packages/botocore/paginate.py +++ /dev/null @@ -1,728 +0,0 @@ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import base64 -import json -import logging -from functools import partial -from itertools import tee - -import jmespath - -from botocore.context import with_current_context -from botocore.exceptions import PaginationError -from botocore.useragent import register_feature_id -from botocore.utils import merge_dicts, set_value_from_jmespath - -log = logging.getLogger(__name__) - - -class TokenEncoder: - """Encodes dictionaries into opaque strings. - - This for the most part json dumps + base64 encoding, but also supports - having bytes in the dictionary in addition to the types that json can - handle by default. - - This is intended for use in encoding pagination tokens, which in some - cases can be complex structures and / or contain bytes. - """ - - def encode(self, token): - """Encodes a dictionary to an opaque string. - - :type token: dict - :param token: A dictionary containing pagination information, - particularly the service pagination token(s) but also other boto - metadata. - - :rtype: str - :returns: An opaque string - """ - try: - # Try just using json dumps first to avoid having to traverse - # and encode the dict. In 99.9999% of cases this will work. - json_string = json.dumps(token) - except (TypeError, UnicodeDecodeError): - # If normal dumping failed, go through and base64 encode all bytes. - encoded_token, encoded_keys = self._encode(token, []) - - # Save the list of all the encoded key paths. We can safely - # assume that no service will ever use this key. - encoded_token['boto_encoded_keys'] = encoded_keys - - # Now that the bytes are all encoded, dump the json. - json_string = json.dumps(encoded_token) - - # base64 encode the json string to produce an opaque token string. - return base64.b64encode(json_string.encode('utf-8')).decode('utf-8') - - def _encode(self, data, path): - """Encode bytes in given data, keeping track of the path traversed.""" - if isinstance(data, dict): - return self._encode_dict(data, path) - elif isinstance(data, list): - return self._encode_list(data, path) - elif isinstance(data, bytes): - return self._encode_bytes(data, path) - else: - return data, [] - - def _encode_list(self, data, path): - """Encode any bytes in a list, noting the index of what is encoded.""" - new_data = [] - encoded = [] - for i, value in enumerate(data): - new_path = path + [i] - new_value, new_encoded = self._encode(value, new_path) - new_data.append(new_value) - encoded.extend(new_encoded) - return new_data, encoded - - def _encode_dict(self, data, path): - """Encode any bytes in a dict, noting the index of what is encoded.""" - new_data = {} - encoded = [] - for key, value in data.items(): - new_path = path + [key] - new_value, new_encoded = self._encode(value, new_path) - new_data[key] = new_value - encoded.extend(new_encoded) - return new_data, encoded - - def _encode_bytes(self, data, path): - """Base64 encode a byte string.""" - return base64.b64encode(data).decode('utf-8'), [path] - - -class TokenDecoder: - """Decodes token strings back into dictionaries. - - This performs the inverse operation to the TokenEncoder, accepting - opaque strings and decoding them into a useable form. - """ - - def decode(self, token): - """Decodes an opaque string to a dictionary. - - :type token: str - :param token: A token string given by the botocore pagination - interface. - - :rtype: dict - :returns: A dictionary containing pagination information, - particularly the service pagination token(s) but also other boto - metadata. - """ - json_string = base64.b64decode(token.encode('utf-8')).decode('utf-8') - decoded_token = json.loads(json_string) - - # Remove the encoding metadata as it is read since it will no longer - # be needed. - encoded_keys = decoded_token.pop('boto_encoded_keys', None) - if encoded_keys is None: - return decoded_token - else: - return self._decode(decoded_token, encoded_keys) - - def _decode(self, token, encoded_keys): - """Find each encoded value and decode it.""" - for key in encoded_keys: - encoded = self._path_get(token, key) - decoded = base64.b64decode(encoded.encode('utf-8')) - self._path_set(token, key, decoded) - return token - - def _path_get(self, data, path): - """Return the nested data at the given path. - - For instance: - data = {'foo': ['bar', 'baz']} - path = ['foo', 0] - ==> 'bar' - """ - # jmespath isn't used here because it would be difficult to actually - # create the jmespath query when taking all of the unknowns of key - # structure into account. Gross though this is, it is simple and not - # very error prone. - d = data - for step in path: - d = d[step] - return d - - def _path_set(self, data, path, value): - """Set the value of a key in the given data. - - Example: - data = {'foo': ['bar', 'baz']} - path = ['foo', 1] - value = 'bin' - ==> data = {'foo': ['bar', 'bin']} - """ - container = self._path_get(data, path[:-1]) - container[path[-1]] = value - - -class PaginatorModel: - def __init__(self, paginator_config): - self._paginator_config = paginator_config['pagination'] - - def get_paginator(self, operation_name): - try: - single_paginator_config = self._paginator_config[operation_name] - except KeyError: - raise ValueError( - f"Paginator for operation does not exist: {operation_name}" - ) - return single_paginator_config - - -class PageIterator: - """An iterable object to paginate API results. - Please note it is NOT a python iterator. - Use ``iter`` to wrap this as a generator. - """ - - def __init__( - self, - method, - input_token, - output_token, - more_results, - result_keys, - non_aggregate_keys, - limit_key, - max_items, - starting_token, - page_size, - op_kwargs, - ): - self._method = method - self._input_token = input_token - self._output_token = output_token - self._more_results = more_results - self._result_keys = result_keys - self._max_items = max_items - self._limit_key = limit_key - self._starting_token = starting_token - self._page_size = page_size - self._op_kwargs = op_kwargs - self._resume_token = None - self._non_aggregate_key_exprs = non_aggregate_keys - self._non_aggregate_part = {} - self._token_encoder = TokenEncoder() - self._token_decoder = TokenDecoder() - - @property - def result_keys(self): - return self._result_keys - - @property - def resume_token(self): - """Token to specify to resume pagination.""" - return self._resume_token - - @resume_token.setter - def resume_token(self, value): - if not isinstance(value, dict): - raise ValueError(f"Bad starting token: {value}") - - if 'boto_truncate_amount' in value: - token_keys = sorted(self._input_token + ['boto_truncate_amount']) - else: - token_keys = sorted(self._input_token) - dict_keys = sorted(value.keys()) - - if token_keys == dict_keys: - self._resume_token = self._token_encoder.encode(value) - else: - raise ValueError(f"Bad starting token: {value}") - - @property - def non_aggregate_part(self): - return self._non_aggregate_part - - def __iter__(self): - current_kwargs = self._op_kwargs - previous_next_token = None - next_token = {key: None for key in self._input_token} - if self._starting_token is not None: - # If the starting token exists, populate the next_token with the - # values inside it. This ensures that we have the service's - # pagination token on hand if we need to truncate after the - # first response. - next_token = self._parse_starting_token()[0] - # The number of items from result_key we've seen so far. - total_items = 0 - first_request = True - primary_result_key = self.result_keys[0] - starting_truncation = 0 - self._inject_starting_params(current_kwargs) - while True: - response = self._make_request(current_kwargs) - parsed = self._extract_parsed_response(response) - if first_request: - # The first request is handled differently. We could - # possibly have a resume/starting token that tells us where - # to index into the retrieved page. - if self._starting_token is not None: - starting_truncation = self._handle_first_request( - parsed, primary_result_key, starting_truncation - ) - first_request = False - self._record_non_aggregate_key_values(parsed) - else: - # If this isn't the first request, we have already sliced into - # the first request and had to make additional requests after. - # We no longer need to add this to truncation. - starting_truncation = 0 - current_response = primary_result_key.search(parsed) - if current_response is None: - current_response = [] - num_current_response = len(current_response) - truncate_amount = 0 - if self._max_items is not None: - truncate_amount = ( - total_items + num_current_response - self._max_items - ) - if truncate_amount > 0: - self._truncate_response( - parsed, - primary_result_key, - truncate_amount, - starting_truncation, - next_token, - ) - yield response - break - else: - yield response - total_items += num_current_response - next_token = self._get_next_token(parsed) - if all(t is None for t in next_token.values()): - break - if ( - self._max_items is not None - and total_items == self._max_items - ): - # We're on a page boundary so we can set the current - # next token to be the resume token. - self.resume_token = next_token - break - if ( - previous_next_token is not None - and previous_next_token == next_token - ): - message = ( - f"The same next token was received twice: {next_token}" - ) - raise PaginationError(message=message) - self._inject_token_into_kwargs(current_kwargs, next_token) - previous_next_token = next_token - - def search(self, expression): - """Applies a JMESPath expression to a paginator - - Each page of results is searched using the provided JMESPath - expression. If the result is not a list, it is yielded - directly. If the result is a list, each element in the result - is yielded individually (essentially implementing a flatmap in - which the JMESPath search is the mapping function). - - :type expression: str - :param expression: JMESPath expression to apply to each page. - - :return: Returns an iterator that yields the individual - elements of applying a JMESPath expression to each page of - results. - """ - compiled = jmespath.compile(expression) - for page in self: - results = compiled.search(page) - if isinstance(results, list): - yield from results - else: - # Yield result directly if it is not a list. - yield results - - @with_current_context(partial(register_feature_id, 'PAGINATOR')) - def _make_request(self, current_kwargs): - return self._method(**current_kwargs) - - def _extract_parsed_response(self, response): - return response - - def _record_non_aggregate_key_values(self, response): - non_aggregate_keys = {} - for expression in self._non_aggregate_key_exprs: - result = expression.search(response) - set_value_from_jmespath( - non_aggregate_keys, expression.expression, result - ) - self._non_aggregate_part = non_aggregate_keys - - def _inject_starting_params(self, op_kwargs): - # If the user has specified a starting token we need to - # inject that into the operation's kwargs. - if self._starting_token is not None: - # Don't need to do anything special if there is no starting - # token specified. - next_token = self._parse_starting_token()[0] - self._inject_token_into_kwargs(op_kwargs, next_token) - if self._page_size is not None: - # Pass the page size as the parameter name for limiting - # page size, also known as the limit_key. - op_kwargs[self._limit_key] = self._page_size - - def _inject_token_into_kwargs(self, op_kwargs, next_token): - for name, token in next_token.items(): - if (token is not None) and (token != 'None'): - op_kwargs[name] = token - elif name in op_kwargs: - del op_kwargs[name] - - def _handle_first_request( - self, parsed, primary_result_key, starting_truncation - ): - # If the payload is an array or string, we need to slice into it - # and only return the truncated amount. - starting_truncation = self._parse_starting_token()[1] - all_data = primary_result_key.search(parsed) - if isinstance(all_data, (list, str)): - data = all_data[starting_truncation:] - else: - data = None - set_value_from_jmespath(parsed, primary_result_key.expression, data) - # We also need to truncate any secondary result keys - # because they were not truncated in the previous last - # response. - for token in self.result_keys: - if token == primary_result_key: - continue - sample = token.search(parsed) - if isinstance(sample, list): - empty_value = [] - elif isinstance(sample, str): - empty_value = '' - elif isinstance(sample, (int, float)): - # Even though we may be resuming from a truncated page, we - # still start from the actual numeric secondary result. For - # DynamoDB's Count/ScannedCount, this will still show how many - # items the server evaluated, even if the client is truncating - # due to a StartingToken. - empty_value = sample - else: - empty_value = None - set_value_from_jmespath(parsed, token.expression, empty_value) - return starting_truncation - - def _truncate_response( - self, - parsed, - primary_result_key, - truncate_amount, - starting_truncation, - next_token, - ): - original = primary_result_key.search(parsed) - if original is None: - original = [] - amount_to_keep = len(original) - truncate_amount - truncated = original[:amount_to_keep] - set_value_from_jmespath( - parsed, primary_result_key.expression, truncated - ) - # The issue here is that even though we know how much we've truncated - # we need to account for this globally including any starting - # left truncation. For example: - # Raw response: [0,1,2,3] - # Starting index: 1 - # Max items: 1 - # Starting left truncation: [1, 2, 3] - # End right truncation for max items: [1] - # However, even though we only kept 1, this is post - # left truncation so the next starting index should be 2, not 1 - # (left_truncation + amount_to_keep). - next_token['boto_truncate_amount'] = ( - amount_to_keep + starting_truncation - ) - self.resume_token = next_token - - def _get_next_token(self, parsed): - if self._more_results is not None: - if not self._more_results.search(parsed): - return {} - next_tokens = {} - for output_token, input_key in zip( - self._output_token, self._input_token - ): - next_token = output_token.search(parsed) - # We do not want to include any empty strings as actual tokens. - # Treat them as None. - if next_token: - next_tokens[input_key] = next_token - else: - next_tokens[input_key] = None - return next_tokens - - def result_key_iters(self): - teed_results = tee(self, len(self.result_keys)) - return [ - ResultKeyIterator(i, result_key) - for i, result_key in zip(teed_results, self.result_keys) - ] - - def build_full_result(self): - complete_result = {} - for response in self: - page = response - # We want to try to catch operation object pagination - # and format correctly for those. They come in the form - # of a tuple of two elements: (http_response, parsed_responsed). - # We want the parsed_response as that is what the page iterator - # uses. We can remove it though once operation objects are removed. - if isinstance(response, tuple) and len(response) == 2: - page = response[1] - # We're incrementally building the full response page - # by page. For each page in the response we need to - # inject the necessary components from the page - # into the complete_result. - for result_expression in self.result_keys: - # In order to incrementally update a result key - # we need to search the existing value from complete_result, - # then we need to search the _current_ page for the - # current result key value. Then we append the current - # value onto the existing value, and re-set that value - # as the new value. - result_value = result_expression.search(page) - if result_value is None: - continue - existing_value = result_expression.search(complete_result) - if existing_value is None: - # Set the initial result - set_value_from_jmespath( - complete_result, - result_expression.expression, - result_value, - ) - continue - # Now both result_value and existing_value contain something - if isinstance(result_value, list): - existing_value.extend(result_value) - elif isinstance(result_value, (int, float, str)): - # Modify the existing result with the sum or concatenation - set_value_from_jmespath( - complete_result, - result_expression.expression, - existing_value + result_value, - ) - merge_dicts(complete_result, self.non_aggregate_part) - if self.resume_token is not None: - complete_result['NextToken'] = self.resume_token - return complete_result - - def _parse_starting_token(self): - if self._starting_token is None: - return None - - # The starting token is a dict passed as a base64 encoded string. - next_token = self._starting_token - try: - next_token = self._token_decoder.decode(next_token) - index = 0 - if 'boto_truncate_amount' in next_token: - index = next_token.get('boto_truncate_amount') - del next_token['boto_truncate_amount'] - except (ValueError, TypeError): - next_token, index = self._parse_starting_token_deprecated() - return next_token, index - - def _parse_starting_token_deprecated(self): - """ - This handles parsing of old style starting tokens, and attempts to - coerce them into the new style. - """ - log.debug( - "Attempting to fall back to old starting token parser. For token: %s", - self._starting_token, - ) - if self._starting_token is None: - return None - - parts = self._starting_token.split('___') - next_token = [] - index = 0 - if len(parts) == len(self._input_token) + 1: - try: - index = int(parts.pop()) - except ValueError: - # This doesn't look like a valid old-style token, so we're - # passing it along as an opaque service token. - parts = [self._starting_token] - - for part in parts: - if part == 'None': - next_token.append(None) - else: - next_token.append(part) - return self._convert_deprecated_starting_token(next_token), index - - def _convert_deprecated_starting_token(self, deprecated_token): - """ - This attempts to convert a deprecated starting token into the new - style. - """ - len_deprecated_token = len(deprecated_token) - len_input_token = len(self._input_token) - if len_deprecated_token > len_input_token: - raise ValueError(f"Bad starting token: {self._starting_token}") - elif len_deprecated_token < len_input_token: - log.debug( - "Old format starting token does not contain all input " - "tokens. Setting the rest, in order, as None." - ) - for i in range(len_input_token - len_deprecated_token): - deprecated_token.append(None) - return dict(zip(self._input_token, deprecated_token)) - - -class Paginator: - PAGE_ITERATOR_CLS = PageIterator - - def __init__(self, method, pagination_config, model): - self._model = model - self._method = method - self._pagination_cfg = pagination_config - self._output_token = self._get_output_tokens(self._pagination_cfg) - self._input_token = self._get_input_tokens(self._pagination_cfg) - self._more_results = self._get_more_results_token(self._pagination_cfg) - self._non_aggregate_keys = self._get_non_aggregate_keys( - self._pagination_cfg - ) - self._result_keys = self._get_result_keys(self._pagination_cfg) - self._limit_key = self._get_limit_key(self._pagination_cfg) - - @property - def result_keys(self): - return self._result_keys - - def _get_non_aggregate_keys(self, config): - keys = [] - for key in config.get('non_aggregate_keys', []): - keys.append(jmespath.compile(key)) - return keys - - def _get_output_tokens(self, config): - output = [] - output_token = config['output_token'] - if not isinstance(output_token, list): - output_token = [output_token] - for config in output_token: - output.append(jmespath.compile(config)) - return output - - def _get_input_tokens(self, config): - input_token = self._pagination_cfg['input_token'] - if not isinstance(input_token, list): - input_token = [input_token] - return input_token - - def _get_more_results_token(self, config): - more_results = config.get('more_results') - if more_results is not None: - return jmespath.compile(more_results) - - def _get_result_keys(self, config): - result_key = config.get('result_key') - if result_key is not None: - if not isinstance(result_key, list): - result_key = [result_key] - result_key = [jmespath.compile(rk) for rk in result_key] - return result_key - - def _get_limit_key(self, config): - return config.get('limit_key') - - def paginate(self, **kwargs): - """Create paginator object for an operation. - - This returns an iterable object. Iterating over - this object will yield a single page of a response - at a time. - - """ - page_params = self._extract_paging_params(kwargs) - return self.PAGE_ITERATOR_CLS( - self._method, - self._input_token, - self._output_token, - self._more_results, - self._result_keys, - self._non_aggregate_keys, - self._limit_key, - page_params['MaxItems'], - page_params['StartingToken'], - page_params['PageSize'], - kwargs, - ) - - def _extract_paging_params(self, kwargs): - pagination_config = kwargs.pop('PaginationConfig', {}) - max_items = pagination_config.get('MaxItems', None) - if max_items is not None: - max_items = int(max_items) - page_size = pagination_config.get('PageSize', None) - if page_size is not None: - if self._limit_key is None: - raise PaginationError( - message="PageSize parameter is not supported for the " - "pagination interface for this operation." - ) - input_members = self._model.input_shape.members - limit_key_shape = input_members.get(self._limit_key) - if limit_key_shape.type_name == 'string': - if not isinstance(page_size, str): - page_size = str(page_size) - else: - page_size = int(page_size) - return { - 'MaxItems': max_items, - 'StartingToken': pagination_config.get('StartingToken', None), - 'PageSize': page_size, - } - - -class ResultKeyIterator: - """Iterates over the results of paginated responses. - - Each iterator is associated with a single result key. - Iterating over this object will give you each element in - the result key list. - - :param pages_iterator: An iterator that will give you - pages of results (a ``PageIterator`` class). - :param result_key: The JMESPath expression representing - the result key. - - """ - - def __init__(self, pages_iterator, result_key): - self._pages_iterator = pages_iterator - self.result_key = result_key - - def __iter__(self): - for page in self._pages_iterator: - results = self.result_key.search(page) - if results is None: - results = [] - yield from results diff --git a/venv/Lib/site-packages/botocore/parsers.py b/venv/Lib/site-packages/botocore/parsers.py deleted file mode 100644 index 9f1e604..0000000 --- a/venv/Lib/site-packages/botocore/parsers.py +++ /dev/null @@ -1,1484 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Response parsers for the various protocol types. - -The module contains classes that can take an HTTP response, and given -an output shape, parse the response into a dict according to the -rules in the output shape. - -There are many similarities amongst the different protocols with regard -to response parsing, and the code is structured in a way to avoid -code duplication when possible. The diagram below is a diagram -showing the inheritance hierarchy of the response classes. - -:: - - - +-------------------+ - | ResponseParser | - +-------------------+ - ^ ^ ^ ^ ^ - | | | | | - | | | | +--------------------------------------------+ - | | | +-----------------------------+ | - | | | | | - +--------------------+ | +----------------+ | | - | | | | | -+----------+----------+ +------+-------+ +-------+------+ +------+-------+ +------+--------+ -|BaseXMLResponseParser| |BaseRestParser| |BaseJSONParser| |BaseCBORParser| |BaseRpcV2Parser| -+---------------------+ +--------------+ +--------------+ +----------+---+ +-+-------------+ - ^ ^ ^ ^ ^ ^ ^ ^ - | | | | | | | | - | | | | | | | | - | ++----------+-+ +-+--------+---+ | +---+---------+-+ - | |RestXMLParser| |RestJSONParser| | |RpcV2CBORParser| - +-----+-----+ +-------------+ +--------------+ | +---+---------+-+ - |QueryParser| | - +-----------+ +----+-----+ - |JSONParser| - +----------+ - -The diagram above shows that there is a base class, ``ResponseParser`` that -contains logic that is similar amongst all the different protocols (``query``, -``json``, ``rest-json``, ``rest-xml``, ``smithy-rpc-v2-cbor``). Amongst the various services -there is shared logic that can be grouped several ways: - -* The ``query`` and ``rest-xml`` both have XML bodies that are parsed in the - same way. -* The ``json`` and ``rest-json`` protocols both have JSON bodies that are - parsed in the same way. -* The ``rest-json`` and ``rest-xml`` protocols have additional attributes - besides body parameters that are parsed the same (headers, query string, - status code). - -This is reflected in the class diagram above. The ``BaseXMLResponseParser`` -and the BaseJSONParser contain logic for parsing the XML/JSON body, -and the BaseRestParser contains logic for parsing out attributes that -come from other parts of the HTTP response. Classes like the -``RestXMLParser`` inherit from the ``BaseXMLResponseParser`` to get the -XML body parsing logic and the ``BaseRestParser`` to get the HTTP -header/status code/query string parsing. - -Additionally, there are event stream parsers that are used by the other parsers -to wrap streaming bodies that represent a stream of events. The -BaseEventStreamParser extends from ResponseParser and defines the logic for -parsing values from the headers and payload of a message from the underlying -binary encoding protocol. Currently, event streams support parsing bodies -encoded as JSON and XML through the following hierarchy. - - - +--------------+ - |ResponseParser| - +--------------+ - ^ ^ ^ - +--------------------+ | +------------------+ - | | | - +----------+----------+ +----------+----------+ +-------+------+ - |BaseXMLResponseParser| |BaseEventStreamParser| |BaseJSONParser| - +---------------------+ +---------------------+ +--------------+ - ^ ^ ^ ^ - | | | | - | | | | - +-+----------------+-+ +-+-----------------+-+ - |EventStreamXMLParser| |EventStreamJSONParser| - +--------------------+ +---------------------+ - -Return Values -============= - -Each call to ``parse()`` returns a dict has this form:: - - Standard Response - - { - "ResponseMetadata": {"RequestId": } - - } - - Error response - - { - "ResponseMetadata": {"RequestId": } - "Error": { - "Code": , - "Message": , - "Type": , - - } - } - -""" - -import base64 -import http.client -import io -import json -import logging -import os -import re -import struct - -from botocore.compat import ETree, XMLParseError -from botocore.eventstream import EventStream, NoInitialResponseError -from botocore.utils import ( - CachedProperty, - ensure_boolean, - is_json_value_header, - lowercase_dict, - merge_dicts, - parse_timestamp, -) - -LOG = logging.getLogger(__name__) - -DEFAULT_TIMESTAMP_PARSER = parse_timestamp - - -class ResponseParserFactory: - def __init__(self): - self._defaults = {} - - def set_parser_defaults(self, **kwargs): - """Set default arguments when a parser instance is created. - - You can specify any kwargs that are allowed by a ResponseParser - class. There are currently two arguments: - - * timestamp_parser - A callable that can parse a timestamp string - * blob_parser - A callable that can parse a blob type - - """ - self._defaults.update(kwargs) - - def create_parser(self, protocol_name): - parser_cls = PROTOCOL_PARSERS[protocol_name] - return parser_cls(**self._defaults) - - -def create_parser(protocol): - return ResponseParserFactory().create_parser(protocol) - - -def _text_content(func): - # This decorator hides the difference between - # an XML node with text or a plain string. It's used - # to ensure that scalar processing operates only on text - # strings, which allows the same scalar handlers to be used - # for XML nodes from the body and HTTP headers. - def _get_text_content(self, shape, node_or_string): - if hasattr(node_or_string, 'text'): - text = node_or_string.text - if text is None: - # If an XML node is empty , - # we want to parse that as an empty string, - # not as a null/None value. - text = '' - else: - text = node_or_string - return func(self, shape, text) - - return _get_text_content - - -class ResponseParserError(Exception): - pass - - -class ResponseParser: - """Base class for response parsing. - - This class represents the interface that all ResponseParsers for the - various protocols must implement. - - This class will take an HTTP response and a model shape and parse the - HTTP response into a dictionary. - - There is a single public method exposed: ``parse``. See the ``parse`` - docstring for more info. - - """ - - DEFAULT_ENCODING = 'utf-8' - EVENT_STREAM_PARSER_CLS = None - # This is a list of known values for the 'location' key in the - # serialization dict. The location key tells us where in the response - # to parse the value. Members with locations that aren't in this list - # will be parsed from the body. - KNOWN_LOCATIONS = ('header', 'headers', 'statusCode') - - def __init__(self, timestamp_parser=None, blob_parser=None): - if timestamp_parser is None: - timestamp_parser = DEFAULT_TIMESTAMP_PARSER - self._timestamp_parser = timestamp_parser - if blob_parser is None: - blob_parser = self._default_blob_parser - self._blob_parser = blob_parser - self._event_stream_parser = None - if self.EVENT_STREAM_PARSER_CLS is not None: - self._event_stream_parser = self.EVENT_STREAM_PARSER_CLS( - timestamp_parser, blob_parser - ) - - def _default_blob_parser(self, value): - # Blobs are always returned as bytes type (this matters on python3). - # We don't decode this to a str because it's entirely possible that the - # blob contains binary data that actually can't be decoded. - return base64.b64decode(value) - - def parse(self, response, shape): - """Parse the HTTP response given a shape. - - :param response: The HTTP response dictionary. This is a dictionary - that represents the HTTP request. The dictionary must have the - following keys, ``body``, ``headers``, and ``status_code``. - - :param shape: The model shape describing the expected output. - :return: Returns a dictionary representing the parsed response - described by the model. In addition to the shape described from - the model, each response will also have a ``ResponseMetadata`` - which contains metadata about the response, which contains at least - two keys containing ``RequestId`` and ``HTTPStatusCode``. Some - responses may populate additional keys, but ``RequestId`` will - always be present. - - """ - LOG.debug('Response headers: %r', response['headers']) - LOG.debug('Response body:\n%r', response['body']) - if response['status_code'] >= 301: - if self._is_generic_error_response(response): - parsed = self._do_generic_error_parse(response) - elif self._is_modeled_error_shape(shape): - parsed = self._do_modeled_error_parse(response, shape) - # We don't want to decorate the modeled fields with metadata - return parsed - else: - parsed = self._do_error_parse(response, shape) - else: - parsed = self._do_parse(response, shape) - - # We don't want to decorate event stream responses with metadata - if shape and shape.serialization.get('eventstream'): - return parsed - - # Add ResponseMetadata if it doesn't exist and inject the HTTP - # status code and headers from the response. - if isinstance(parsed, dict): - response_metadata = parsed.get('ResponseMetadata', {}) - response_metadata['HTTPStatusCode'] = response['status_code'] - # Ensure that the http header keys are all lower cased. Older - # versions of urllib3 (< 1.11) would unintentionally do this for us - # (see urllib3#633). We need to do this conversion manually now. - headers = response['headers'] - response_metadata['HTTPHeaders'] = lowercase_dict(headers) - parsed['ResponseMetadata'] = response_metadata - self._add_checksum_response_metadata(response, response_metadata) - return parsed - - def _add_checksum_response_metadata(self, response, response_metadata): - checksum_context = response.get('context', {}).get('checksum', {}) - algorithm = checksum_context.get('response_algorithm') - if algorithm: - response_metadata['ChecksumAlgorithm'] = algorithm - - def _is_modeled_error_shape(self, shape): - return shape is not None and shape.metadata.get('exception', False) - - def _is_generic_error_response(self, response): - # There are times when a service will respond with a generic - # error response such as: - # 'Http/1.1 Service Unavailable' - # - # This can also happen if you're going through a proxy. - # In this case the protocol specific _do_error_parse will either - # fail to parse the response (in the best case) or silently succeed - # and treat the HTML above as an XML response and return - # non sensical parsed data. - # To prevent this case from happening we first need to check - # whether or not this response looks like the generic response. - if response['status_code'] >= 500: - if 'body' not in response or response['body'] is None: - return True - - body = response['body'].strip() - return body.startswith(b'') or not body - - def _do_generic_error_parse(self, response): - # There's not really much we can do when we get a generic - # html response. - LOG.debug( - "Received a non protocol specific error response from the " - "service, unable to populate error code and message." - ) - return { - 'Error': { - 'Code': str(response['status_code']), - 'Message': http.client.responses.get( - response['status_code'], '' - ), - }, - 'ResponseMetadata': {}, - } - - def _do_parse(self, response, shape): - raise NotImplementedError(f"{self.__class__.__name__}._do_parse") - - def _do_error_parse(self, response, shape): - raise NotImplementedError(f"{self.__class__.__name__}._do_error_parse") - - def _do_modeled_error_parse(self, response, shape, parsed): - raise NotImplementedError( - f"{self.__class__.__name__}._do_modeled_error_parse" - ) - - def _parse_shape(self, shape, node): - handler = getattr( - self, f'_handle_{shape.type_name}', self._default_handle - ) - return handler(shape, node) - - def _handle_list(self, shape, node): - # Enough implementations share list serialization that it's moved - # up here in the base class. - parsed = [] - member_shape = shape.member - for item in node: - parsed.append(self._parse_shape(member_shape, item)) - return parsed - - def _default_handle(self, shape, value): - return value - - def _create_event_stream(self, response, shape): - parser = self._event_stream_parser - name = response['context'].get('operation_name') - return EventStream(response['body'], shape, parser, name) - - def _get_first_key(self, value): - return list(value)[0] - - def _has_unknown_tagged_union_member(self, shape, value): - if shape.is_tagged_union: - cleaned_value = value.copy() - cleaned_value.pop("__type", None) - cleaned_value = { - k: v for k, v in cleaned_value.items() if v is not None - } - if len(cleaned_value) != 1: - error_msg = ( - "Invalid service response: %s must have one and only " - "one member set." - ) - raise ResponseParserError(error_msg % shape.name) - tag = self._get_first_key(cleaned_value) - serialized_member_names = [ - shape.members[member].serialization.get('name', member) - for member in shape.members - ] - if tag not in serialized_member_names: - LOG.info( - "Received a tagged union response with member unknown to client: %s. " - "Please upgrade SDK for full response support.", - tag, - ) - return True - return False - - def _handle_unknown_tagged_union_member(self, tag): - return {'SDK_UNKNOWN_MEMBER': {'name': tag}} - - def _do_query_compatible_error_parse(self, code, headers, error): - """ - Error response may contain an x-amzn-query-error header to translate - errors codes from former `query` services into other protocols. We use this - to do our lookup in the errorfactory for modeled errors. - """ - query_error = headers['x-amzn-query-error'] - query_error_components = query_error.split(';') - - if len(query_error_components) == 2 and query_error_components[0]: - error['Error']['QueryErrorCode'] = code - error['Error']['Type'] = query_error_components[1] - return query_error_components[0] - return code - - -class BaseXMLResponseParser(ResponseParser): - def __init__(self, timestamp_parser=None, blob_parser=None): - super().__init__(timestamp_parser, blob_parser) - self._namespace_re = re.compile('{.*}') - - def _handle_map(self, shape, node): - parsed = {} - key_shape = shape.key - value_shape = shape.value - key_location_name = key_shape.serialization.get('name') or 'key' - value_location_name = value_shape.serialization.get('name') or 'value' - if shape.serialization.get('flattened') and not isinstance(node, list): - node = [node] - for keyval_node in node: - for single_pair in keyval_node: - # Within each there's a and a - tag_name = self._node_tag(single_pair) - if tag_name == key_location_name: - key_name = self._parse_shape(key_shape, single_pair) - elif tag_name == value_location_name: - val_name = self._parse_shape(value_shape, single_pair) - else: - raise ResponseParserError(f"Unknown tag: {tag_name}") - parsed[key_name] = val_name - return parsed - - def _node_tag(self, node): - return self._namespace_re.sub('', node.tag) - - def _handle_list(self, shape, node): - # When we use _build_name_to_xml_node, repeated elements are aggregated - # into a list. However, we can't tell the difference between a scalar - # value and a single element flattened list. So before calling the - # real _handle_list, we know that "node" should actually be a list if - # it's flattened, and if it's not, then we make it a one element list. - if shape.serialization.get('flattened') and not isinstance(node, list): - node = [node] - return super()._handle_list(shape, node) - - def _handle_structure(self, shape, node): - parsed = {} - members = shape.members - if shape.metadata.get('exception', False): - node = self._get_error_root(node) - xml_dict = self._build_name_to_xml_node(node) - if self._has_unknown_tagged_union_member(shape, xml_dict): - tag = self._get_first_key(xml_dict) - return self._handle_unknown_tagged_union_member(tag) - for member_name in members: - member_shape = members[member_name] - location = member_shape.serialization.get('location') - if ( - location in self.KNOWN_LOCATIONS - or member_shape.serialization.get('eventheader') - ): - # All members with known locations have already been handled, - # so we don't need to parse these members. - continue - xml_name = self._member_key_name(member_shape, member_name) - member_node = xml_dict.get(xml_name) - if member_node is not None: - parsed[member_name] = self._parse_shape( - member_shape, member_node - ) - elif member_shape.serialization.get('xmlAttribute'): - attribs = {} - location_name = member_shape.serialization['name'] - for key, value in node.attrib.items(): - new_key = self._namespace_re.sub( - location_name.split(':')[0] + ':', key - ) - attribs[new_key] = value - if location_name in attribs: - parsed[member_name] = attribs[location_name] - return parsed - - def _get_error_root(self, original_root): - if self._node_tag(original_root) == 'ErrorResponse': - for child in original_root: - if self._node_tag(child) == 'Error': - return child - return original_root - - def _member_key_name(self, shape, member_name): - # This method is needed because we have to special case flattened list - # with a serialization name. If this is the case we use the - # locationName from the list's member shape as the key name for the - # surrounding structure. - if shape.type_name == 'list' and shape.serialization.get('flattened'): - list_member_serialized_name = shape.member.serialization.get( - 'name' - ) - if list_member_serialized_name is not None: - return list_member_serialized_name - serialized_name = shape.serialization.get('name') - if serialized_name is not None: - return serialized_name - return member_name - - def _build_name_to_xml_node(self, parent_node): - # If the parent node is actually a list. We should not be trying - # to serialize it to a dictionary. Instead, return the first element - # in the list. - if isinstance(parent_node, list): - return self._build_name_to_xml_node(parent_node[0]) - xml_dict = {} - for item in parent_node: - key = self._node_tag(item) - if key in xml_dict: - # If the key already exists, the most natural - # way to handle this is to aggregate repeated - # keys into a single list. - # 12 -> {'foo': [Node(1), Node(2)]} - if isinstance(xml_dict[key], list): - xml_dict[key].append(item) - else: - # Convert from a scalar to a list. - xml_dict[key] = [xml_dict[key], item] - else: - xml_dict[key] = item - return xml_dict - - def _parse_xml_string_to_dom(self, xml_string): - try: - parser = ETree.XMLParser( - target=ETree.TreeBuilder(), encoding=self.DEFAULT_ENCODING - ) - parser.feed(xml_string) - root = parser.close() - except XMLParseError as e: - raise ResponseParserError( - f"Unable to parse response ({e}), " - f"invalid XML received. Further retries may succeed:\n{xml_string}" - ) - return root - - def _replace_nodes(self, parsed): - for key, value in parsed.items(): - if list(value): - sub_dict = self._build_name_to_xml_node(value) - parsed[key] = self._replace_nodes(sub_dict) - else: - parsed[key] = value.text - return parsed - - @_text_content - def _handle_boolean(self, shape, text): - if text == 'true': - return True - else: - return False - - @_text_content - def _handle_float(self, shape, text): - return float(text) - - @_text_content - def _handle_timestamp(self, shape, text): - return self._timestamp_parser(text) - - @_text_content - def _handle_integer(self, shape, text): - return int(text) - - @_text_content - def _handle_string(self, shape, text): - return text - - @_text_content - def _handle_blob(self, shape, text): - return self._blob_parser(text) - - _handle_character = _handle_string - _handle_double = _handle_float - _handle_long = _handle_integer - - -class QueryParser(BaseXMLResponseParser): - def _do_error_parse(self, response, shape): - xml_contents = response['body'] - root = self._parse_xml_string_to_dom(xml_contents) - parsed = self._build_name_to_xml_node(root) - self._replace_nodes(parsed) - # Once we've converted xml->dict, we need to make one or two - # more adjustments to extract nested errors and to be consistent - # with ResponseMetadata for non-error responses: - # 1. {"Errors": {"Error": {...}}} -> {"Error": {...}} - # 2. {"RequestId": "id"} -> {"ResponseMetadata": {"RequestId": "id"}} - if 'Errors' in parsed: - parsed.update(parsed.pop('Errors')) - if 'RequestId' in parsed: - parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')} - return parsed - - def _do_modeled_error_parse(self, response, shape): - return self._parse_body_as_xml(response, shape, inject_metadata=False) - - def _do_parse(self, response, shape): - return self._parse_body_as_xml(response, shape, inject_metadata=True) - - def _parse_body_as_xml(self, response, shape, inject_metadata=True): - xml_contents = response['body'] - root = self._parse_xml_string_to_dom(xml_contents) - parsed = {} - if shape is not None: - start = root - if 'resultWrapper' in shape.serialization: - start = self._find_result_wrapped_shape( - shape.serialization['resultWrapper'], root - ) - parsed = self._parse_shape(shape, start) - if inject_metadata: - self._inject_response_metadata(root, parsed) - return parsed - - def _find_result_wrapped_shape(self, element_name, xml_root_node): - mapping = self._build_name_to_xml_node(xml_root_node) - return mapping[element_name] - - def _inject_response_metadata(self, node, inject_into): - mapping = self._build_name_to_xml_node(node) - child_node = mapping.get('ResponseMetadata') - if child_node is not None: - sub_mapping = self._build_name_to_xml_node(child_node) - for key, value in sub_mapping.items(): - sub_mapping[key] = value.text - inject_into['ResponseMetadata'] = sub_mapping - - -class EC2QueryParser(QueryParser): - def _inject_response_metadata(self, node, inject_into): - mapping = self._build_name_to_xml_node(node) - child_node = mapping.get('requestId') - if child_node is not None: - inject_into['ResponseMetadata'] = {'RequestId': child_node.text} - - def _do_error_parse(self, response, shape): - # EC2 errors look like: - # - # - # - # InvalidInstanceID.Malformed - # Invalid id: "1343124" - # - # - # 12345 - # - # This is different from QueryParser in that it's RequestID, - # not RequestId - original = super()._do_error_parse(response, shape) - if 'RequestID' in original: - original['ResponseMetadata'] = { - 'RequestId': original.pop('RequestID') - } - return original - - def _get_error_root(self, original_root): - for child in original_root: - if self._node_tag(child) == 'Errors': - for errors_child in child: - if self._node_tag(errors_child) == 'Error': - return errors_child - return original_root - - -class BaseJSONParser(ResponseParser): - def _handle_structure(self, shape, value): - final_parsed = {} - if shape.is_document_type: - final_parsed = value - else: - member_shapes = shape.members - if value is None: - # If the comes across the wire as "null" (None in python), - # we should be returning this unchanged, instead of as an - # empty dict. - return None - final_parsed = {} - if self._has_unknown_tagged_union_member(shape, value): - tag = self._get_first_key(value) - return self._handle_unknown_tagged_union_member(tag) - for member_name in member_shapes: - member_shape = member_shapes[member_name] - json_name = member_shape.serialization.get('name', member_name) - raw_value = value.get(json_name) - if raw_value is not None: - final_parsed[member_name] = self._parse_shape( - member_shapes[member_name], raw_value - ) - return final_parsed - - def _handle_map(self, shape, value): - parsed = {} - key_shape = shape.key - value_shape = shape.value - for key, value in value.items(): - actual_key = self._parse_shape(key_shape, key) - actual_value = self._parse_shape(value_shape, value) - parsed[actual_key] = actual_value - return parsed - - def _handle_blob(self, shape, value): - return self._blob_parser(value) - - def _handle_timestamp(self, shape, value): - return self._timestamp_parser(value) - - def _do_error_parse(self, response, shape): - body = self._parse_body_as_json(response['body']) - error = {"Error": {"Message": '', "Code": ''}, "ResponseMetadata": {}} - headers = response['headers'] - # Error responses can have slightly different structures for json. - # The basic structure is: - # - # {"__type":"ConnectClientException", - # "message":"The error message."} - - # The error message can either come in the 'message' or 'Message' key - # so we need to check for both. - error['Error']['Message'] = body.get( - 'message', body.get('Message', '') - ) - # if the message did not contain an error code - # include the response status code - response_code = response.get('status_code') - - code = body.get('__type', response_code and str(response_code)) - if code is not None: - # code has a couple forms as well: - # * "com.aws.dynamodb.vAPI#ProvisionedThroughputExceededException" - # * "ResourceNotFoundException" - if ':' in code: - code = code.split(':', 1)[0] - if '#' in code: - code = code.rsplit('#', 1)[1] - if 'x-amzn-query-error' in headers: - code = self._do_query_compatible_error_parse( - code, headers, error - ) - error['Error']['Code'] = code - self._inject_response_metadata(error, response['headers']) - return error - - def _inject_response_metadata(self, parsed, headers): - if 'x-amzn-requestid' in headers: - parsed.setdefault('ResponseMetadata', {})['RequestId'] = headers[ - 'x-amzn-requestid' - ] - - def _parse_body_as_json(self, body_contents): - if not body_contents: - return {} - body = body_contents.decode(self.DEFAULT_ENCODING) - try: - original_parsed = json.loads(body) - return original_parsed - except ValueError: - # if the body cannot be parsed, include - # the literal string as the message - return {'message': body} - - -class BaseCBORParser(ResponseParser): - INDEFINITE_ITEM_ADDITIONAL_INFO = 31 - BREAK_CODE = 0xFF - - @CachedProperty - def major_type_to_parsing_method_map(self): - return { - 0: self._parse_unsigned_integer, - 1: self._parse_negative_integer, - 2: self._parse_byte_string, - 3: self._parse_text_string, - 4: self._parse_array, - 5: self._parse_map, - 6: self._parse_tag, - 7: self._parse_simple_and_float, - } - - def get_peekable_stream_from_bytes(self, bytes): - return io.BufferedReader(io.BytesIO(bytes)) - - def parse_data_item(self, stream): - # CBOR data is divided into "data items", and each data item starts - # with an initial byte that describes how the following bytes should be parsed - initial_byte = self._read_bytes_as_int(stream, 1) - # The highest order three bits of the initial byte describe the CBOR major type - major_type = initial_byte >> 5 - # The lowest order 5 bits of the initial byte tells us more information about - # how the bytes should be parsed that will be used - additional_info = initial_byte & 0b00011111 - - if major_type in self.major_type_to_parsing_method_map: - method = self.major_type_to_parsing_method_map[major_type] - return method(stream, additional_info) - else: - raise ResponseParserError( - f"Unsupported inital byte found for data item- " - f"Major type:{major_type}, Additional info: " - f"{additional_info}" - ) - - # Major type 0 - unsigned integers - def _parse_unsigned_integer(self, stream, additional_info): - additional_info_to_num_bytes = { - 24: 1, - 25: 2, - 26: 4, - 27: 8, - } - # Values under 24 don't need a full byte to be stored; their values are - # instead stored as the "additional info" in the initial byte - if additional_info < 24: - return additional_info - elif additional_info in additional_info_to_num_bytes: - num_bytes = additional_info_to_num_bytes[additional_info] - return self._read_bytes_as_int(stream, num_bytes) - else: - raise ResponseParserError( - "Invalid CBOR integer returned from the service; unparsable " - f"additional info found for major type 0 or 1: {additional_info}" - ) - - # Major type 1 - negative integers - def _parse_negative_integer(self, stream, additional_info): - return -1 - self._parse_unsigned_integer(stream, additional_info) - - # Major type 2 - byte string - def _parse_byte_string(self, stream, additional_info): - if additional_info != self.INDEFINITE_ITEM_ADDITIONAL_INFO: - length = self._parse_unsigned_integer(stream, additional_info) - return self._read_from_stream(stream, length) - else: - chunks = [] - while True: - if self._handle_break_code(stream): - break - initial_byte = self._read_bytes_as_int(stream, 1) - additional_info = initial_byte & 0b00011111 - length = self._parse_unsigned_integer(stream, additional_info) - chunks.append(self._read_from_stream(stream, length)) - return b''.join(chunks) - - # Major type 3 - text string - def _parse_text_string(self, stream, additional_info): - return self._parse_byte_string(stream, additional_info).decode('utf-8') - - # Major type 4 - lists - def _parse_array(self, stream, additional_info): - if additional_info != self.INDEFINITE_ITEM_ADDITIONAL_INFO: - length = self._parse_unsigned_integer(stream, additional_info) - return [self.parse_data_item(stream) for _ in range(length)] - else: - items = [] - while not self._handle_break_code(stream): - items.append(self.parse_data_item(stream)) - return items - - # Major type 5 - maps - def _parse_map(self, stream, additional_info): - items = {} - if additional_info != self.INDEFINITE_ITEM_ADDITIONAL_INFO: - length = self._parse_unsigned_integer(stream, additional_info) - for _ in range(length): - self._parse_key_value_pair(stream, items) - return items - - else: - while not self._handle_break_code(stream): - self._parse_key_value_pair(stream, items) - return items - - def _parse_key_value_pair(self, stream, items): - key = self.parse_data_item(stream) - value = self.parse_data_item(stream) - if value is not None: - items[key] = value - - # Major type 6 is tags. The only tag we currently support is tag 1 for unix - # timestamps - def _parse_tag(self, stream, additional_info): - tag = self._parse_unsigned_integer(stream, additional_info) - value = self.parse_data_item(stream) - if tag == 1: # Epoch-based date/time in milliseconds - return self._parse_datetime(value) - else: - raise ResponseParserError( - f"Found CBOR tag not supported by botocore: {tag}" - ) - - def _parse_datetime(self, value): - if isinstance(value, (int, float)): - return self._timestamp_parser(value) - else: - raise ResponseParserError( - f"Unable to parse datetime value: {value}" - ) - - # Major type 7 includes floats and "simple" types. Supported simple types are - # currently boolean values, CBOR's null, and CBOR's undefined type. All other - # values are either floats or invalid. - def _parse_simple_and_float(self, stream, additional_info): - # For major type 7, values 20-23 correspond to CBOR "simple" values - additional_info_simple_values = { - 20: False, # CBOR false - 21: True, # CBOR true - 22: None, # CBOR null - 23: None, # CBOR undefined - } - # First we check if the additional info corresponds to a supported simple value - if additional_info in additional_info_simple_values: - return additional_info_simple_values[additional_info] - - # If it's not a simple value, we need to parse it into the correct format and - # number fo bytes - float_formats = { - 25: ('>e', 2), - 26: ('>f', 4), - 27: ('>d', 8), - } - - if additional_info in float_formats: - float_format, num_bytes = float_formats[additional_info] - return struct.unpack( - float_format, self._read_from_stream(stream, num_bytes) - )[0] - raise ResponseParserError( - f"Invalid additional info found for major type 7: {additional_info}. " - f"This indicates an unsupported simple type or an indefinite float value" - ) - - # This helper method is intended for use when parsing indefinite length items. - # It does nothing if the next byte is not the break code. If the next byte is - # the break code, it advances past that byte and returns True so the calling - # method knows to stop parsing that data item. - def _handle_break_code(self, stream): - if int.from_bytes(stream.peek(1)[:1], 'big') == self.BREAK_CODE: - stream.seek(1, os.SEEK_CUR) - return True - - def _read_bytes_as_int(self, stream, num_bytes): - byte = self._read_from_stream(stream, num_bytes) - return int.from_bytes(byte, 'big') - - def _read_from_stream(self, stream, num_bytes): - value = stream.read(num_bytes) - if len(value) != num_bytes: - raise ResponseParserError( - "End of stream reached; this indicates a " - "malformed CBOR response from the server or an " - "issue in botocore" - ) - return value - - -class BaseEventStreamParser(ResponseParser): - def _do_parse(self, response, shape): - final_parsed = {} - if shape.serialization.get('eventstream'): - event_type = response['headers'].get(':event-type') - event_shape = shape.members.get(event_type) - if event_shape: - final_parsed[event_type] = self._do_parse( - response, event_shape - ) - else: - self._parse_non_payload_attrs( - response, shape, shape.members, final_parsed - ) - self._parse_payload(response, shape, shape.members, final_parsed) - return final_parsed - - def _do_error_parse(self, response, shape): - exception_type = response['headers'].get(':exception-type') - exception_shape = shape.members.get(exception_type) - if exception_shape is not None: - original_parsed = self._initial_body_parse(response['body']) - body = self._parse_shape(exception_shape, original_parsed) - error = { - 'Error': { - 'Code': exception_type, - 'Message': body.get('Message', body.get('message', '')), - } - } - else: - error = { - 'Error': { - 'Code': response['headers'].get(':error-code', ''), - 'Message': response['headers'].get(':error-message', ''), - } - } - return error - - def _parse_payload(self, response, shape, member_shapes, final_parsed): - if shape.serialization.get('event'): - for name in member_shapes: - member_shape = member_shapes[name] - if member_shape.serialization.get('eventpayload'): - body = response['body'] - if member_shape.type_name == 'blob': - parsed_body = body - elif member_shape.type_name == 'string': - parsed_body = body.decode(self.DEFAULT_ENCODING) - else: - raw_parse = self._initial_body_parse(body) - parsed_body = self._parse_shape( - member_shape, raw_parse - ) - final_parsed[name] = parsed_body - return - # If we didn't find an explicit payload, use the current shape - original_parsed = self._initial_body_parse(response['body']) - body_parsed = self._parse_shape(shape, original_parsed) - final_parsed.update(body_parsed) - - def _parse_non_payload_attrs( - self, response, shape, member_shapes, final_parsed - ): - headers = response['headers'] - for name in member_shapes: - member_shape = member_shapes[name] - if member_shape.serialization.get('eventheader'): - if name in headers: - value = headers[name] - if member_shape.type_name == 'timestamp': - # Event stream timestamps are an in milleseconds so we - # divide by 1000 to convert to seconds. - value = self._timestamp_parser(value / 1000.0) - final_parsed[name] = value - - def _initial_body_parse(self, body_contents): - # This method should do the initial xml/json parsing of the - # body. We still need to walk the parsed body in order - # to convert types, but this method will do the first round - # of parsing. - raise NotImplementedError("_initial_body_parse") - - -class EventStreamJSONParser(BaseEventStreamParser, BaseJSONParser): - def _initial_body_parse(self, body_contents): - return self._parse_body_as_json(body_contents) - - -class EventStreamXMLParser(BaseEventStreamParser, BaseXMLResponseParser): - def _initial_body_parse(self, xml_string): - if not xml_string: - return ETree.Element('') - return self._parse_xml_string_to_dom(xml_string) - - -class EventStreamCBORParser(BaseEventStreamParser, BaseCBORParser): - def _initial_body_parse(self, body_contents): - if body_contents == b'': - return {} - return self.parse_data_item( - self.get_peekable_stream_from_bytes(body_contents) - ) - - -class JSONParser(BaseJSONParser): - EVENT_STREAM_PARSER_CLS = EventStreamJSONParser - - """Response parser for the "json" protocol.""" - - def _do_parse(self, response, shape): - parsed = {} - if shape is not None: - event_name = shape.event_stream_name - if event_name: - parsed = self._handle_event_stream(response, shape, event_name) - else: - parsed = self._handle_json_body(response['body'], shape) - self._inject_response_metadata(parsed, response['headers']) - return parsed - - def _do_modeled_error_parse(self, response, shape): - return self._handle_json_body(response['body'], shape) - - def _handle_event_stream(self, response, shape, event_name): - event_stream_shape = shape.members[event_name] - event_stream = self._create_event_stream(response, event_stream_shape) - try: - event = event_stream.get_initial_response() - except NoInitialResponseError: - error_msg = 'First event was not of type initial-response' - raise ResponseParserError(error_msg) - parsed = self._handle_json_body(event.payload, shape) - parsed[event_name] = event_stream - return parsed - - def _handle_json_body(self, raw_body, shape): - # The json.loads() gives us the primitive JSON types, - # but we need to traverse the parsed JSON data to convert - # to richer types (blobs, timestamps, etc. - parsed_json = self._parse_body_as_json(raw_body) - return self._parse_shape(shape, parsed_json) - - -class BaseRestParser(ResponseParser): - def _do_parse(self, response, shape): - final_parsed = {} - final_parsed['ResponseMetadata'] = self._populate_response_metadata( - response - ) - self._add_modeled_parse(response, shape, final_parsed) - return final_parsed - - def _add_modeled_parse(self, response, shape, final_parsed): - if shape is None: - return final_parsed - member_shapes = shape.members - self._parse_non_payload_attrs( - response, shape, member_shapes, final_parsed - ) - self._parse_payload(response, shape, member_shapes, final_parsed) - - def _do_modeled_error_parse(self, response, shape): - final_parsed = {} - self._add_modeled_parse(response, shape, final_parsed) - return final_parsed - - def _populate_response_metadata(self, response): - metadata = {} - headers = response['headers'] - if 'x-amzn-requestid' in headers: - metadata['RequestId'] = headers['x-amzn-requestid'] - elif 'x-amz-request-id' in headers: - metadata['RequestId'] = headers['x-amz-request-id'] - # HostId is what it's called whenever this value is returned - # in an XML response body, so to be consistent, we'll always - # call is HostId. - metadata['HostId'] = headers.get('x-amz-id-2', '') - return metadata - - def _parse_payload(self, response, shape, member_shapes, final_parsed): - if 'payload' in shape.serialization: - # If a payload is specified in the output shape, then only that - # shape is used for the body payload. - payload_member_name = shape.serialization['payload'] - body_shape = member_shapes[payload_member_name] - if body_shape.serialization.get('eventstream'): - body = self._create_event_stream(response, body_shape) - final_parsed[payload_member_name] = body - elif body_shape.type_name in ['string', 'blob']: - # This is a stream - body = response['body'] - if isinstance(body, bytes): - body = body.decode(self.DEFAULT_ENCODING) - final_parsed[payload_member_name] = body - else: - original_parsed = self._initial_body_parse(response['body']) - final_parsed[payload_member_name] = self._parse_shape( - body_shape, original_parsed - ) - else: - original_parsed = self._initial_body_parse(response['body']) - body_parsed = self._parse_shape(shape, original_parsed) - final_parsed.update(body_parsed) - - def _parse_non_payload_attrs( - self, response, shape, member_shapes, final_parsed - ): - headers = response['headers'] - for name in member_shapes: - member_shape = member_shapes[name] - location = member_shape.serialization.get('location') - if location is None: - continue - elif location == 'statusCode': - final_parsed[name] = self._parse_shape( - member_shape, response['status_code'] - ) - elif location == 'headers': - final_parsed[name] = self._parse_header_map( - member_shape, headers - ) - elif location == 'header': - header_name = member_shape.serialization.get('name', name) - if header_name in headers: - final_parsed[name] = self._parse_shape( - member_shape, headers[header_name] - ) - - def _parse_header_map(self, shape, headers): - # Note that headers are case insensitive, so we .lower() - # all header names and header prefixes. - parsed = {} - prefix = shape.serialization.get('name', '').lower() - for header_name in headers: - if header_name.lower().startswith(prefix): - # The key name inserted into the parsed hash - # strips off the prefix. - name = header_name[len(prefix) :] - parsed[name] = headers[header_name] - return parsed - - def _initial_body_parse(self, body_contents): - # This method should do the initial xml/json parsing of the - # body. We still need to walk the parsed body in order - # to convert types, but this method will do the first round - # of parsing. - raise NotImplementedError("_initial_body_parse") - - def _handle_string(self, shape, value): - parsed = value - if is_json_value_header(shape): - decoded = base64.b64decode(value).decode(self.DEFAULT_ENCODING) - parsed = json.loads(decoded) - return parsed - - def _handle_list(self, shape, node): - location = shape.serialization.get('location') - if location == 'header' and not isinstance(node, list): - # List in headers may be a comma separated string as per RFC7230 - node = [e.strip() for e in node.split(',')] - return super()._handle_list(shape, node) - - -class BaseRpcV2Parser(ResponseParser): - def _do_parse(self, response, shape): - parsed = {} - if shape is not None: - event_stream_name = shape.event_stream_name - if event_stream_name: - parsed = self._handle_event_stream( - response, shape, event_stream_name - ) - else: - parsed = {} - self._parse_payload(response, shape, parsed) - parsed['ResponseMetadata'] = self._populate_response_metadata( - response - ) - return parsed - - def _add_modeled_parse(self, response, shape, final_parsed): - if shape is None: - return final_parsed - self._parse_payload(response, shape, final_parsed) - - def _do_modeled_error_parse(self, response, shape): - final_parsed = {} - self._add_modeled_parse(response, shape, final_parsed) - return final_parsed - - def _populate_response_metadata(self, response): - metadata = {} - headers = response['headers'] - if 'x-amzn-requestid' in headers: - metadata['RequestId'] = headers['x-amzn-requestid'] - return metadata - - def _handle_structure(self, shape, node): - parsed = {} - members = shape.members - if shape.is_tagged_union: - cleaned_value = node.copy() - cleaned_value.pop("__type", None) - cleaned_value = { - k: v for k, v in cleaned_value.items() if v is not None - } - if len(cleaned_value) != 1: - error_msg = ( - "Invalid service response: %s must have one and only " - "one member set." - ) - raise ResponseParserError(error_msg % shape.name) - for member_name in members: - member_shape = members[member_name] - member_node = node.get(member_name) - if member_node is not None: - parsed[member_name] = self._parse_shape( - member_shape, member_node - ) - return parsed - - def _parse_payload(self, response, shape, final_parsed): - original_parsed = self._initial_body_parse(response['body']) - body_parsed = self._parse_shape(shape, original_parsed) - final_parsed.update(body_parsed) - - def _initial_body_parse(self, body_contents): - # This method should do the initial parsing of the - # body. We still need to walk the parsed body in order - # to convert types, but this method will do the first round - # of parsing. - raise NotImplementedError("_initial_body_parse") - - -class RestJSONParser(BaseRestParser, BaseJSONParser): - EVENT_STREAM_PARSER_CLS = EventStreamJSONParser - - def _initial_body_parse(self, body_contents): - return self._parse_body_as_json(body_contents) - - def _do_error_parse(self, response, shape): - error = super()._do_error_parse(response, shape) - self._inject_error_code(error, response) - return error - - def _inject_error_code(self, error, response): - # The "Code" value can come from either a response - # header or a value in the JSON body. - body = self._initial_body_parse(response['body']) - code = None - if 'x-amzn-errortype' in response['headers']: - code = response['headers']['x-amzn-errortype'] - elif 'code' in body or 'Code' in body: - code = body.get('code', body.get('Code', '')) - if code is None: - return - if isinstance(code, str): - code = code.split(':', 1)[0].rsplit('#', 1)[-1] - error['Error']['Code'] = code - - def _handle_boolean(self, shape, value): - return ensure_boolean(value) - - def _handle_integer(self, shape, value): - return int(value) - - def _handle_float(self, shape, value): - return float(value) - - _handle_long = _handle_integer - _handle_double = _handle_float - - -class RpcV2CBORParser(BaseRpcV2Parser, BaseCBORParser): - EVENT_STREAM_PARSER_CLS = EventStreamCBORParser - - def _initial_body_parse(self, body_contents): - if body_contents == b'': - return body_contents - body_contents_stream = self.get_peekable_stream_from_bytes( - body_contents - ) - return self.parse_data_item(body_contents_stream) - - def _do_error_parse(self, response, shape): - body = self._initial_body_parse(response['body']) - error = { - "Error": { - "Message": body.get('message', body.get('Message', '')), - "Code": '', - }, - "ResponseMetadata": {}, - } - headers = response['headers'] - - code = body.get('__type') - if code is None: - response_code = response.get('status_code') - if response_code is not None: - code = str(response_code) - if code is not None: - if ':' in code: - code = code.split(':', 1)[0] - if '#' in code: - code = code.rsplit('#', 1)[1] - if 'x-amzn-query-error' in headers: - code = self._do_query_compatible_error_parse( - code, headers, error - ) - error['Error']['Code'] = code - if 'x-amzn-requestid' in headers: - error.setdefault('ResponseMetadata', {})['RequestId'] = headers[ - 'x-amzn-requestid' - ] - return error - - def _handle_event_stream(self, response, shape, event_name): - event_stream_shape = shape.members[event_name] - event_stream = self._create_event_stream(response, event_stream_shape) - try: - event = event_stream.get_initial_response() - except NoInitialResponseError: - error_msg = 'First event was not of type initial-response' - raise ResponseParserError(error_msg) - parsed = self._initial_body_parse(event.payload) - parsed[event_name] = event_stream - return parsed - - -class RestXMLParser(BaseRestParser, BaseXMLResponseParser): - EVENT_STREAM_PARSER_CLS = EventStreamXMLParser - - def _initial_body_parse(self, xml_string): - if not xml_string: - return ETree.Element('') - return self._parse_xml_string_to_dom(xml_string) - - def _do_error_parse(self, response, shape): - # We're trying to be service agnostic here, but S3 does have a slightly - # different response structure for its errors compared to other - # rest-xml serivces (route53/cloudfront). We handle this by just - # trying to parse both forms. - # First: - # - # - # Sender - # InvalidInput - # Invalid resource type: foo - # - # request-id - # - if response['body']: - # If the body ends up being invalid xml, the xml parser should not - # blow up. It should at least try to pull information about the - # the error response from other sources like the HTTP status code. - try: - return self._parse_error_from_body(response) - except ResponseParserError: - LOG.debug( - 'Exception caught when parsing error response body:', - exc_info=True, - ) - return self._parse_error_from_http_status(response) - - def _parse_error_from_http_status(self, response): - return { - 'Error': { - 'Code': str(response['status_code']), - 'Message': http.client.responses.get( - response['status_code'], '' - ), - }, - 'ResponseMetadata': { - 'RequestId': response['headers'].get('x-amz-request-id', ''), - 'HostId': response['headers'].get('x-amz-id-2', ''), - }, - } - - def _parse_error_from_body(self, response): - xml_contents = response['body'] - root = self._parse_xml_string_to_dom(xml_contents) - parsed = self._build_name_to_xml_node(root) - self._replace_nodes(parsed) - if root.tag == 'Error': - # This is an S3 error response. First we'll populate the - # response metadata. - metadata = self._populate_response_metadata(response) - # The RequestId and the HostId are already in the - # ResponseMetadata, but are also duplicated in the XML - # body. We don't need these values in both places, - # we'll just remove them from the parsed XML body. - parsed.pop('RequestId', '') - parsed.pop('HostId', '') - return {'Error': parsed, 'ResponseMetadata': metadata} - elif 'RequestId' in parsed: - # Other rest-xml services: - parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')} - default = {'Error': {'Message': '', 'Code': ''}} - merge_dicts(default, parsed) - return default - - @_text_content - def _handle_string(self, shape, text): - text = super()._handle_string(shape, text) - return text - - -PROTOCOL_PARSERS = { - 'ec2': EC2QueryParser, - 'query': QueryParser, - 'json': JSONParser, - 'rest-json': RestJSONParser, - 'rest-xml': RestXMLParser, - 'smithy-rpc-v2-cbor': RpcV2CBORParser, -} diff --git a/venv/Lib/site-packages/botocore/plugin.py b/venv/Lib/site-packages/botocore/plugin.py deleted file mode 100644 index a06a755..0000000 --- a/venv/Lib/site-packages/botocore/plugin.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -""" -NOTE: This module is considered private and is subject to abrupt breaking -changes without prior announcement. Please do not use it directly. -""" - -import importlib -import logging -import os -from contextvars import ContextVar -from dataclasses import dataclass -from typing import Optional - -log = logging.getLogger(__name__) - - -@dataclass -class PluginContext: - """ - Encapsulation of plugins tracked within the `_plugin_context` context variable. - """ - - plugins: Optional[str] = None - - -_plugin_context = ContextVar("_plugin_context") - - -def get_plugin_context(): - """Get the current `_plugin_context` context variable if set, else None.""" - return _plugin_context.get(None) - - -def set_plugin_context(ctx): - """Set the current `_plugin_context` context variable.""" - token = _plugin_context.set(ctx) - return token - - -def reset_plugin_context(token): - """Reset the current `_plugin_context` context variable.""" - _plugin_context.reset(token) - - -def get_botocore_plugins(): - context = get_plugin_context() - if context is not None: - plugins = context.plugins - if plugins is None: - context.plugins = os.environ.get('BOTOCORE_EXPERIMENTAL__PLUGINS') - else: - return plugins - return os.environ.get('BOTOCORE_EXPERIMENTAL__PLUGINS') - - -def load_client_plugins(client, plugins): - for plugin_name, module_name in plugins.items(): - log.debug( - "Importing client plugin %s from module %s", - plugin_name, - module_name, - ) - try: - module = importlib.import_module(module_name) - module.initialize_client_plugin(client) - except ModuleNotFoundError: - log.debug( - "Failed to locate the following plugin module: %s.", - plugin_name, - ) - except Exception as e: - log.debug( - "Error raised during the loading of %s: %s", plugin_name, e - ) diff --git a/venv/Lib/site-packages/botocore/regions.py b/venv/Lib/site-packages/botocore/regions.py deleted file mode 100644 index db7ece7..0000000 --- a/venv/Lib/site-packages/botocore/regions.py +++ /dev/null @@ -1,869 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Resolves regions and endpoints. - -This module implements endpoint resolution, including resolving endpoints for a -given service and region and resolving the available endpoints for a service -in a specific AWS partition. -""" - -import copy -import logging -import re -from enum import Enum - -import jmespath - -from botocore import UNSIGNED, xform_name -from botocore.auth import AUTH_TYPE_MAPS, HAS_CRT -from botocore.crt import CRT_SUPPORTED_AUTH_TYPES -from botocore.endpoint_provider import EndpointProvider -from botocore.exceptions import ( - EndpointProviderError, - EndpointVariantError, - InvalidEndpointConfigurationError, - InvalidHostLabelError, - MissingDependencyException, - NoRegionError, - ParamValidationError, - UnknownEndpointResolutionBuiltInName, - UnknownRegionError, - UnknownSignatureVersionError, - UnsupportedS3AccesspointConfigurationError, - UnsupportedS3ConfigurationError, - UnsupportedS3ControlArnError, - UnsupportedS3ControlConfigurationError, -) -from botocore.useragent import register_feature_id -from botocore.utils import ensure_boolean, instance_cache - -LOG = logging.getLogger(__name__) -DEFAULT_URI_TEMPLATE = '{service}.{region}.{dnsSuffix}' # noqa -DEFAULT_SERVICE_DATA = {'endpoints': {}} - - -class BaseEndpointResolver: - """Resolves regions and endpoints. Must be subclassed.""" - - def construct_endpoint(self, service_name, region_name=None): - """Resolves an endpoint for a service and region combination. - - :type service_name: string - :param service_name: Name of the service to resolve an endpoint for - (e.g., s3) - - :type region_name: string - :param region_name: Region/endpoint name to resolve (e.g., us-east-1) - if no region is provided, the first found partition-wide endpoint - will be used if available. - - :rtype: dict - :return: Returns a dict containing the following keys: - - partition: (string, required) Resolved partition name - - endpointName: (string, required) Resolved endpoint name - - hostname: (string, required) Hostname to use for this endpoint - - sslCommonName: (string) sslCommonName to use for this endpoint. - - credentialScope: (dict) Signature version 4 credential scope - - region: (string) region name override when signing. - - service: (string) service name override when signing. - - signatureVersions: (list) A list of possible signature - versions, including s3, v4, v2, and s3v4 - - protocols: (list) A list of supported protocols - (e.g., http, https) - - ...: Other keys may be included as well based on the metadata - """ - raise NotImplementedError - - def get_available_partitions(self): - """Lists the partitions available to the endpoint resolver. - - :return: Returns a list of partition names (e.g., ["aws", "aws-cn"]). - """ - raise NotImplementedError - - def get_available_endpoints( - self, service_name, partition_name='aws', allow_non_regional=False - ): - """Lists the endpoint names of a particular partition. - - :type service_name: string - :param service_name: Name of a service to list endpoint for (e.g., s3) - - :type partition_name: string - :param partition_name: Name of the partition to limit endpoints to. - (e.g., aws for the public AWS endpoints, aws-cn for AWS China - endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc. - - :type allow_non_regional: bool - :param allow_non_regional: Set to True to include endpoints that are - not regional endpoints (e.g., s3-external-1, - fips-us-gov-west-1, etc). - :return: Returns a list of endpoint names (e.g., ["us-east-1"]). - """ - raise NotImplementedError - - -class EndpointResolver(BaseEndpointResolver): - """Resolves endpoints based on partition endpoint metadata""" - - _UNSUPPORTED_DUALSTACK_PARTITIONS = ['aws-iso', 'aws-iso-b'] - - def __init__(self, endpoint_data, uses_builtin_data=False): - """ - :type endpoint_data: dict - :param endpoint_data: A dict of partition data. - - :type uses_builtin_data: boolean - :param uses_builtin_data: Whether the endpoint data originates in the - package's data directory. - """ - if 'partitions' not in endpoint_data: - raise ValueError('Missing "partitions" in endpoint data') - self._endpoint_data = endpoint_data - self.uses_builtin_data = uses_builtin_data - - def get_service_endpoints_data(self, service_name, partition_name='aws'): - for partition in self._endpoint_data['partitions']: - if partition['partition'] != partition_name: - continue - services = partition['services'] - if service_name not in services: - continue - return services[service_name]['endpoints'] - - def get_available_partitions(self): - result = [] - for partition in self._endpoint_data['partitions']: - result.append(partition['partition']) - return result - - def get_available_endpoints( - self, - service_name, - partition_name='aws', - allow_non_regional=False, - endpoint_variant_tags=None, - ): - result = [] - for partition in self._endpoint_data['partitions']: - if partition['partition'] != partition_name: - continue - services = partition['services'] - if service_name not in services: - continue - service_endpoints = services[service_name]['endpoints'] - for endpoint_name in service_endpoints: - is_regional_endpoint = endpoint_name in partition['regions'] - # Only regional endpoints can be modeled with variants - if endpoint_variant_tags and is_regional_endpoint: - variant_data = self._retrieve_variant_data( - service_endpoints[endpoint_name], endpoint_variant_tags - ) - if variant_data: - result.append(endpoint_name) - elif allow_non_regional or is_regional_endpoint: - result.append(endpoint_name) - return result - - def get_partition_dns_suffix( - self, partition_name, endpoint_variant_tags=None - ): - for partition in self._endpoint_data['partitions']: - if partition['partition'] == partition_name: - if endpoint_variant_tags: - variant = self._retrieve_variant_data( - partition.get('defaults'), endpoint_variant_tags - ) - if variant and 'dnsSuffix' in variant: - return variant['dnsSuffix'] - else: - return partition['dnsSuffix'] - return None - - def construct_endpoint( - self, - service_name, - region_name=None, - partition_name=None, - use_dualstack_endpoint=False, - use_fips_endpoint=False, - ): - if ( - service_name == 's3' - and use_dualstack_endpoint - and region_name is None - ): - region_name = 'us-east-1' - - if partition_name is not None: - valid_partition = None - for partition in self._endpoint_data['partitions']: - if partition['partition'] == partition_name: - valid_partition = partition - - if valid_partition is not None: - result = self._endpoint_for_partition( - valid_partition, - service_name, - region_name, - use_dualstack_endpoint, - use_fips_endpoint, - True, - ) - return result - return None - - # Iterate over each partition until a match is found. - for partition in self._endpoint_data['partitions']: - if use_dualstack_endpoint and ( - partition['partition'] - in self._UNSUPPORTED_DUALSTACK_PARTITIONS - ): - continue - result = self._endpoint_for_partition( - partition, - service_name, - region_name, - use_dualstack_endpoint, - use_fips_endpoint, - ) - if result: - return result - - def get_partition_for_region(self, region_name): - for partition in self._endpoint_data['partitions']: - if self._region_match(partition, region_name): - return partition['partition'] - raise UnknownRegionError( - region_name=region_name, - error_msg='No partition found for provided region_name.', - ) - - def _endpoint_for_partition( - self, - partition, - service_name, - region_name, - use_dualstack_endpoint, - use_fips_endpoint, - force_partition=False, - ): - partition_name = partition["partition"] - if ( - use_dualstack_endpoint - and partition_name in self._UNSUPPORTED_DUALSTACK_PARTITIONS - ): - error_msg = ( - "Dualstack endpoints are currently not supported" - f" for {partition_name} partition" - ) - raise EndpointVariantError(tags=['dualstack'], error_msg=error_msg) - - # Get the service from the partition, or an empty template. - service_data = partition['services'].get( - service_name, DEFAULT_SERVICE_DATA - ) - # Use the partition endpoint if no region is supplied. - if region_name is None: - if 'partitionEndpoint' in service_data: - region_name = service_data['partitionEndpoint'] - else: - raise NoRegionError() - - resolve_kwargs = { - 'partition': partition, - 'service_name': service_name, - 'service_data': service_data, - 'endpoint_name': region_name, - 'use_dualstack_endpoint': use_dualstack_endpoint, - 'use_fips_endpoint': use_fips_endpoint, - } - - # Attempt to resolve the exact region for this partition. - if region_name in service_data['endpoints']: - return self._resolve(**resolve_kwargs) - - # Check to see if the endpoint provided is valid for the partition. - if self._region_match(partition, region_name) or force_partition: - # Use the partition endpoint if set and not regionalized. - partition_endpoint = service_data.get('partitionEndpoint') - is_regionalized = service_data.get('isRegionalized', True) - if partition_endpoint and not is_regionalized: - LOG.debug( - 'Using partition endpoint for %s, %s: %s', - service_name, - region_name, - partition_endpoint, - ) - resolve_kwargs['endpoint_name'] = partition_endpoint - return self._resolve(**resolve_kwargs) - LOG.debug( - 'Creating a regex based endpoint for %s, %s', - service_name, - region_name, - ) - return self._resolve(**resolve_kwargs) - - def _region_match(self, partition, region_name): - if region_name in partition['regions']: - return True - if 'regionRegex' in partition: - return re.compile(partition['regionRegex']).match(region_name) - return False - - def _retrieve_variant_data(self, endpoint_data, tags): - variants = endpoint_data.get('variants', []) - for variant in variants: - if set(variant['tags']) == set(tags): - result = variant.copy() - return result - - def _create_tag_list(self, use_dualstack_endpoint, use_fips_endpoint): - tags = [] - if use_dualstack_endpoint: - tags.append('dualstack') - if use_fips_endpoint: - tags.append('fips') - return tags - - def _resolve_variant( - self, tags, endpoint_data, service_defaults, partition_defaults - ): - result = {} - for variants in [endpoint_data, service_defaults, partition_defaults]: - variant = self._retrieve_variant_data(variants, tags) - if variant: - self._merge_keys(variant, result) - return result - - def _resolve( - self, - partition, - service_name, - service_data, - endpoint_name, - use_dualstack_endpoint, - use_fips_endpoint, - ): - endpoint_data = service_data.get('endpoints', {}).get( - endpoint_name, {} - ) - - if endpoint_data.get('deprecated'): - LOG.warning( - 'Client is configured with the deprecated endpoint: %s', - endpoint_name, - ) - - service_defaults = service_data.get('defaults', {}) - partition_defaults = partition.get('defaults', {}) - tags = self._create_tag_list(use_dualstack_endpoint, use_fips_endpoint) - - if tags: - result = self._resolve_variant( - tags, endpoint_data, service_defaults, partition_defaults - ) - if result == {}: - error_msg = ( - f"Endpoint does not exist for {service_name} " - f"in region {endpoint_name}" - ) - raise EndpointVariantError(tags=tags, error_msg=error_msg) - self._merge_keys(endpoint_data, result) - else: - result = endpoint_data - - # If dnsSuffix has not already been consumed from a variant definition - if 'dnsSuffix' not in result: - result['dnsSuffix'] = partition['dnsSuffix'] - - result['partition'] = partition['partition'] - result['endpointName'] = endpoint_name - - # Merge in the service defaults then the partition defaults. - self._merge_keys(service_defaults, result) - self._merge_keys(partition_defaults, result) - - result['hostname'] = self._expand_template( - partition, - result['hostname'], - service_name, - endpoint_name, - result['dnsSuffix'], - ) - if 'sslCommonName' in result: - result['sslCommonName'] = self._expand_template( - partition, - result['sslCommonName'], - service_name, - endpoint_name, - result['dnsSuffix'], - ) - - return result - - def _merge_keys(self, from_data, result): - for key in from_data: - if key not in result: - result[key] = from_data[key] - - def _expand_template( - self, partition, template, service_name, endpoint_name, dnsSuffix - ): - return template.format( - service=service_name, region=endpoint_name, dnsSuffix=dnsSuffix - ) - - -class EndpointResolverBuiltins(str, Enum): - # The AWS Region configured for the SDK client (str) - AWS_REGION = "AWS::Region" - # Whether the UseFIPSEndpoint configuration option has been enabled for - # the SDK client (bool) - AWS_USE_FIPS = "AWS::UseFIPS" - # Whether the UseDualStackEndpoint configuration option has been enabled - # for the SDK client (bool) - AWS_USE_DUALSTACK = "AWS::UseDualStack" - # Whether the global endpoint should be used with STS, rather than the - # regional endpoint for us-east-1 (bool) - AWS_STS_USE_GLOBAL_ENDPOINT = "AWS::STS::UseGlobalEndpoint" - # Whether the global endpoint should be used with S3, rather than the - # regional endpoint for us-east-1 (bool) - AWS_S3_USE_GLOBAL_ENDPOINT = "AWS::S3::UseGlobalEndpoint" - # Whether S3 Transfer Acceleration has been requested (bool) - AWS_S3_ACCELERATE = "AWS::S3::Accelerate" - # Whether S3 Force Path Style has been enabled (bool) - AWS_S3_FORCE_PATH_STYLE = "AWS::S3::ForcePathStyle" - # Whether to use the ARN region or raise an error when ARN and client - # region differ (for s3 service only, bool) - AWS_S3_USE_ARN_REGION = "AWS::S3::UseArnRegion" - # Whether to use the ARN region or raise an error when ARN and client - # region differ (for s3-control service only, bool) - AWS_S3CONTROL_USE_ARN_REGION = 'AWS::S3Control::UseArnRegion' - # Whether multi-region access points (MRAP) should be disabled (bool) - AWS_S3_DISABLE_MRAP = "AWS::S3::DisableMultiRegionAccessPoints" - # Whether a custom endpoint has been configured (str) - SDK_ENDPOINT = "SDK::Endpoint" - # An AWS account ID that can be optionally configured for the SDK client (str) - ACCOUNT_ID = "AWS::Auth::AccountId" - # Whether an endpoint should include an account ID (str) - ACCOUNT_ID_ENDPOINT_MODE = "AWS::Auth::AccountIdEndpointMode" - - -class EndpointRulesetResolver: - """Resolves endpoints using a service's endpoint ruleset""" - - def __init__( - self, - endpoint_ruleset_data, - partition_data, - service_model, - builtins, - client_context, - event_emitter, - use_ssl=True, - requested_auth_scheme=None, - ): - self._provider = EndpointProvider( - ruleset_data=endpoint_ruleset_data, - partition_data=partition_data, - ) - self._param_definitions = self._provider.ruleset.parameters - self._service_model = service_model - self._builtins = builtins - self._client_context = client_context - self._event_emitter = event_emitter - self._use_ssl = use_ssl - self._requested_auth_scheme = requested_auth_scheme - self._instance_cache = {} - - def construct_endpoint( - self, - operation_model, - call_args, - request_context, - ): - """Invokes the provider with params defined in the service's ruleset""" - if call_args is None: - call_args = {} - - if request_context is None: - request_context = {} - - provider_params = self._get_provider_params( - operation_model, call_args, request_context - ) - LOG.debug( - 'Calling endpoint provider with parameters: %s', provider_params - ) - try: - provider_result = self._provider.resolve_endpoint( - **provider_params - ) - except EndpointProviderError as ex: - botocore_exception = self.ruleset_error_to_botocore_exception( - ex, provider_params - ) - if botocore_exception is None: - raise - else: - raise botocore_exception from ex - LOG.debug('Endpoint provider result: %s', provider_result.url) - - # The endpoint provider does not support non-secure transport. - if ( - not self._use_ssl - and provider_result.url.startswith('https://') - and 'Endpoint' not in provider_params - ): - provider_result = provider_result._replace( - url=f'http://{provider_result.url[8:]}' - ) - - # Multi-valued headers are not supported in botocore. Replace the list - # of values returned for each header with just its first entry, - # dropping any additionally entries. - provider_result = provider_result._replace( - headers={ - key: val[0] for key, val in provider_result.headers.items() - } - ) - - return provider_result - - def _get_provider_params( - self, operation_model, call_args, request_context - ): - """Resolve a value for each parameter defined in the service's ruleset - - The resolution order for parameter values is: - 1. Operation-specific static context values from the service definition - 2. Operation-specific dynamic context values from API parameters - 3. Client-specific context parameters - 4. Built-in values such as region, FIPS usage, ... - """ - provider_params = {} - # Builtin values can be customized for each operation by hooks - # subscribing to the ``before-endpoint-resolution.*`` event. - customized_builtins = self._get_customized_builtins( - operation_model, call_args, request_context - ) - for param_name, param_def in self._param_definitions.items(): - param_val = self._resolve_param_from_context( - param_name=param_name, - operation_model=operation_model, - call_args=call_args, - ) - if param_val is None and param_def.builtin is not None: - param_val = self._resolve_param_as_builtin( - builtin_name=param_def.builtin, - builtins=customized_builtins, - ) - if param_val is not None: - provider_params[param_name] = param_val - self._register_endpoint_feature_ids(param_name, param_val) - - return provider_params - - def _resolve_param_from_context( - self, param_name, operation_model, call_args - ): - static = self._resolve_param_as_static_context_param( - param_name, operation_model - ) - if static is not None: - return static - dynamic = self._resolve_param_as_dynamic_context_param( - param_name, operation_model, call_args - ) - if dynamic is not None: - return dynamic - operation_context_params = ( - self._resolve_param_as_operation_context_param( - param_name, operation_model, call_args - ) - ) - if operation_context_params is not None: - return operation_context_params - return self._resolve_param_as_client_context_param(param_name) - - def _resolve_param_as_static_context_param( - self, param_name, operation_model - ): - static_ctx_params = self._get_static_context_params(operation_model) - return static_ctx_params.get(param_name) - - def _resolve_param_as_dynamic_context_param( - self, param_name, operation_model, call_args - ): - dynamic_ctx_params = self._get_dynamic_context_params(operation_model) - if param_name in dynamic_ctx_params: - member_name = dynamic_ctx_params[param_name] - return call_args.get(member_name) - - def _resolve_param_as_client_context_param(self, param_name): - client_ctx_params = self._get_client_context_params() - if param_name in client_ctx_params: - client_ctx_varname = client_ctx_params[param_name] - return self._client_context.get(client_ctx_varname) - - def _resolve_param_as_operation_context_param( - self, param_name, operation_model, call_args - ): - operation_ctx_params = operation_model.operation_context_parameters - if param_name in operation_ctx_params: - path = operation_ctx_params[param_name]['path'] - return jmespath.search(path, call_args) - - def _resolve_param_as_builtin(self, builtin_name, builtins): - if builtin_name not in EndpointResolverBuiltins.__members__.values(): - raise UnknownEndpointResolutionBuiltInName(name=builtin_name) - builtin = builtins.get(builtin_name) - if callable(builtin): - return builtin() - return builtin - - @instance_cache - def _get_static_context_params(self, operation_model): - """Mapping of param names to static param value for an operation""" - return { - param.name: param.value - for param in operation_model.static_context_parameters - } - - @instance_cache - def _get_dynamic_context_params(self, operation_model): - """Mapping of param names to member names for an operation""" - return { - param.name: param.member_name - for param in operation_model.context_parameters - } - - @instance_cache - def _get_client_context_params(self): - """Mapping of param names to client configuration variable""" - return { - param.name: xform_name(param.name) - for param in self._service_model.client_context_parameters - } - - def _get_customized_builtins( - self, operation_model, call_args, request_context - ): - service_id = self._service_model.service_id.hyphenize() - customized_builtins = copy.copy(self._builtins) - # Handlers are expected to modify the builtins dict in place. - self._event_emitter.emit( - f'before-endpoint-resolution.{service_id}', - builtins=customized_builtins, - model=operation_model, - params=call_args, - context=request_context, - ) - return customized_builtins - - def auth_schemes_to_signing_ctx(self, auth_schemes): - """Convert an Endpoint's authSchemes property to a signing_context dict - - :type auth_schemes: list - :param auth_schemes: A list of dictionaries taken from the - ``authSchemes`` property of an Endpoint object returned by - ``EndpointProvider``. - - :rtype: str, dict - :return: Tuple of auth type string (to be used in - ``request_context['auth_type']``) and signing context dict (for use - in ``request_context['signing']``). - """ - if not isinstance(auth_schemes, list) or len(auth_schemes) == 0: - raise TypeError("auth_schemes must be a non-empty list.") - - LOG.debug( - 'Selecting from endpoint provider\'s list of auth schemes: %s. ' - 'User selected auth scheme is: "%s"', - ', '.join([f'"{s.get("name")}"' for s in auth_schemes]), - self._requested_auth_scheme, - ) - - if self._requested_auth_scheme == UNSIGNED: - return 'none', {} - - auth_schemes = [ - {**scheme, 'name': self._strip_sig_prefix(scheme['name'])} - for scheme in auth_schemes - ] - if self._requested_auth_scheme is not None: - try: - # Use the first scheme that matches the requested scheme, - # after accounting for naming differences between botocore and - # endpoint rulesets. Keep the requested name. - name, scheme = next( - (self._requested_auth_scheme, s) - for s in auth_schemes - if self._does_botocore_authname_match_ruleset_authname( - self._requested_auth_scheme, s['name'] - ) - ) - except StopIteration: - # For legacy signers, no match will be found. Do not raise an - # exception, instead default to the logic in botocore - # customizations. - return None, {} - else: - try: - name, scheme = next( - (s['name'], s) - for s in auth_schemes - if s['name'] in AUTH_TYPE_MAPS - ) - except StopIteration: - # If no auth scheme was specifically requested and an - # authSchemes list is present in the Endpoint object but none - # of the entries are supported, raise an exception. - fixable_with_crt = False - auth_type_options = [s['name'] for s in auth_schemes] - if not HAS_CRT: - fixable_with_crt = any( - scheme in CRT_SUPPORTED_AUTH_TYPES - for scheme in auth_type_options - ) - - if fixable_with_crt: - raise MissingDependencyException( - msg='This operation requires an additional dependency.' - ' Use pip install botocore[crt] before proceeding.' - ) - else: - raise UnknownSignatureVersionError( - signature_version=', '.join(auth_type_options) - ) - - signing_context = {} - if 'signingRegion' in scheme: - signing_context['region'] = scheme['signingRegion'] - elif 'signingRegionSet' in scheme: - if len(scheme['signingRegionSet']) > 0: - signing_context['region'] = ','.join( - scheme['signingRegionSet'] - ) - if 'signingName' in scheme: - signing_context.update(signing_name=scheme['signingName']) - if 'disableDoubleEncoding' in scheme: - signing_context['disableDoubleEncoding'] = ensure_boolean( - scheme['disableDoubleEncoding'] - ) - - LOG.debug( - 'Selected auth type "%s" as "%s" with signing context params: %s', - scheme['name'], # original name without "sig" - name, # chosen name can differ when `signature_version` is set - signing_context, - ) - return name, signing_context - - def _strip_sig_prefix(self, auth_name): - """Normalize auth type names by removing any "sig" prefix""" - return auth_name[3:] if auth_name.startswith('sig') else auth_name - - def _does_botocore_authname_match_ruleset_authname(self, botoname, rsname): - """ - Whether a valid string provided as signature_version parameter for - client construction refers to the same auth methods as a string - returned by the endpoint ruleset provider. This accounts for: - - * The ruleset prefixes auth names with "sig" - * The s3 and s3control rulesets don't distinguish between v4[a] and - s3v4[a] signers - * The v2, v3, and HMAC v1 based signers (s3, s3-*) are botocore legacy - features and do not exist in the rulesets - * Only characters up to the first dash are considered - - Example matches: - * v4, sigv4 - * v4, v4 - * s3v4, sigv4 - * s3v7, sigv7 (hypothetical example) - * s3v4a, sigv4a - * s3v4-query, sigv4 - - Example mismatches: - * v4a, sigv4 - * s3, sigv4 - * s3-presign-post, sigv4 - """ - rsname = self._strip_sig_prefix(rsname) - botoname = botoname.split('-')[0] - if botoname != 's3' and botoname.startswith('s3'): - botoname = botoname[2:] - return rsname == botoname - - def ruleset_error_to_botocore_exception(self, ruleset_exception, params): - """Attempts to translate ruleset errors to pre-existing botocore - exception types by string matching exception strings. - """ - msg = ruleset_exception.kwargs.get('msg') - if msg is None: - return - - if msg.startswith('Invalid region in ARN: '): - # Example message: - # "Invalid region in ARN: `us-we$t-2` (invalid DNS name)" - try: - label = msg.split('`')[1] - except IndexError: - label = msg - return InvalidHostLabelError(label=label) - - service_name = self._service_model.service_name - if service_name == 's3': - if ( - msg == 'S3 Object Lambda does not support S3 Accelerate' - or msg == 'Accelerate cannot be used with FIPS' - ): - return UnsupportedS3ConfigurationError(msg=msg) - if ( - msg.startswith('S3 Outposts does not support') - or msg.startswith('S3 MRAP does not support') - or msg.startswith('S3 Object Lambda does not support') - or msg.startswith('Access Points do not support') - or msg.startswith('Invalid configuration:') - or msg.startswith('Client was configured for partition') - ): - return UnsupportedS3AccesspointConfigurationError(msg=msg) - if msg.lower().startswith('invalid arn:'): - return ParamValidationError(report=msg) - if service_name == 's3control': - if msg.startswith('Invalid ARN:'): - arn = params.get('Bucket') - return UnsupportedS3ControlArnError(arn=arn, msg=msg) - if msg.startswith('Invalid configuration:') or msg.startswith( - 'Client was configured for partition' - ): - return UnsupportedS3ControlConfigurationError(msg=msg) - if msg == "AccountId is required but not set": - return ParamValidationError(report=msg) - if service_name == 'events': - if msg.startswith( - 'Invalid Configuration: FIPS is not supported with ' - 'EventBridge multi-region endpoints.' - ): - return InvalidEndpointConfigurationError(msg=msg) - if msg == 'EndpointId must be a valid host label.': - return InvalidEndpointConfigurationError(msg=msg) - return None - - def _register_endpoint_feature_ids(self, param_name, param_val): - if param_name == 'AccountIdEndpointMode': - register_feature_id(f'ACCOUNT_ID_MODE_{param_val.upper()}') - elif param_name == 'AccountId': - register_feature_id('RESOLVED_ACCOUNT_ID') diff --git a/venv/Lib/site-packages/botocore/response.py b/venv/Lib/site-packages/botocore/response.py deleted file mode 100644 index e123b54..0000000 --- a/venv/Lib/site-packages/botocore/response.py +++ /dev/null @@ -1,216 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging -from io import IOBase - -from urllib3.exceptions import ProtocolError as URLLib3ProtocolError -from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError - -from botocore import ( - ScalarTypes, # noqa: F401 - parsers, -) -from botocore.compat import ( - XMLParseError, # noqa: F401 - set_socket_timeout, -) -from botocore.exceptions import ( - IncompleteReadError, - ReadTimeoutError, - ResponseStreamingError, -) -from botocore.hooks import first_non_none_response # noqa - -logger = logging.getLogger(__name__) - - -class StreamingBody(IOBase): - """Wrapper class for an http response body. - - This provides a few additional conveniences that do not exist - in the urllib3 model: - - * Set the timeout on the socket (i.e read() timeouts) - * Auto validation of content length, if the amount of bytes - we read does not match the content length, an exception - is raised. - - """ - - _DEFAULT_CHUNK_SIZE = 1024 - - def __init__(self, raw_stream, content_length): - self._raw_stream = raw_stream - self._content_length = content_length - self._amount_read = 0 - - def __del__(self): - # Extending destructor in order to preserve the underlying raw_stream. - # The ability to add custom cleanup logic introduced in Python3.4+. - # https://www.python.org/dev/peps/pep-0442/ - pass - - def set_socket_timeout(self, timeout): - """Set the timeout seconds on the socket.""" - # The problem we're trying to solve is to prevent .read() calls from - # hanging. This can happen in rare cases. What we'd like to ideally - # do is set a timeout on the .read() call so that callers can retry - # the request. - # Unfortunately, this isn't currently possible in requests. - # See: https://github.com/kennethreitz/requests/issues/1803 - # So what we're going to do is reach into the guts of the stream and - # grab the socket object, which we can set the timeout on. We're - # putting in a check here so in case this interface goes away, we'll - # know. - try: - set_socket_timeout(self._raw_stream, timeout) - except AttributeError: - logger.exception( - "Cannot access the socket object of a streaming response. " - "It's possible the interface has changed." - ) - raise - - def readable(self): - try: - return self._raw_stream.readable() - except AttributeError: - return False - - def read(self, amt=None): - """Read at most amt bytes from the stream. - - If the amt argument is omitted, read all data. - """ - try: - chunk = self._raw_stream.read(amt) - except URLLib3ReadTimeoutError as e: - # TODO: the url will be None as urllib3 isn't setting it yet - raise ReadTimeoutError(endpoint_url=e.url, error=e) - except URLLib3ProtocolError as e: - raise ResponseStreamingError(error=e) - self._amount_read += len(chunk) - if amt is None or (not chunk and amt > 0): - # If the server sends empty contents or - # we ask to read all of the contents, then we know - # we need to verify the content length. - self._verify_content_length() - return chunk - - def readinto(self, b): - """Read bytes into a pre-allocated, writable bytes-like object b, and return the number of bytes read.""" - try: - amount_read = self._raw_stream.readinto(b) - except URLLib3ReadTimeoutError as e: - # TODO: the url will be None as urllib3 isn't setting it yet - raise ReadTimeoutError(endpoint_url=e.url, error=e) - except URLLib3ProtocolError as e: - raise ResponseStreamingError(error=e) - self._amount_read += amount_read - if amount_read == 0 and len(b) > 0: - # If the server sends empty contents then we know we need to verify - # the content length. - self._verify_content_length() - return amount_read - - def readlines(self): - return self._raw_stream.readlines() - - def __iter__(self): - """Return an iterator to yield 1k chunks from the raw stream.""" - return self.iter_chunks(self._DEFAULT_CHUNK_SIZE) - - def __next__(self): - """Return the next 1k chunk from the raw stream.""" - current_chunk = self.read(self._DEFAULT_CHUNK_SIZE) - if current_chunk: - return current_chunk - raise StopIteration() - - def __enter__(self): - return self._raw_stream - - def __exit__(self, type, value, traceback): - self._raw_stream.close() - - next = __next__ - - def iter_lines(self, chunk_size=_DEFAULT_CHUNK_SIZE, keepends=False): - """Return an iterator to yield lines from the raw stream. - - This is achieved by reading chunk of bytes (of size chunk_size) at a - time from the raw stream, and then yielding lines from there. - """ - pending = b'' - for chunk in self.iter_chunks(chunk_size): - lines = (pending + chunk).splitlines(True) - for line in lines[:-1]: - yield line.splitlines(keepends)[0] - pending = lines[-1] - if pending: - yield pending.splitlines(keepends)[0] - - def iter_chunks(self, chunk_size=_DEFAULT_CHUNK_SIZE): - """Return an iterator to yield chunks of chunk_size bytes from the raw - stream. - """ - while True: - current_chunk = self.read(chunk_size) - if current_chunk == b"": - break - yield current_chunk - - def _verify_content_length(self): - # See: https://github.com/kennethreitz/requests/issues/1855 - # Basically, our http library doesn't do this for us, so we have - # to do this ourself. - if self._content_length is not None and self._amount_read != int( - self._content_length - ): - raise IncompleteReadError( - actual_bytes=self._amount_read, - expected_bytes=int(self._content_length), - ) - - def tell(self): - return self._raw_stream.tell() - - def close(self): - """Close the underlying http response stream.""" - self._raw_stream.close() - - -def get_response(operation_model, http_response): - protocol = operation_model.service_model.resolved_protocol - response_dict = { - 'headers': http_response.headers, - 'status_code': http_response.status_code, - } - # TODO: Unfortunately, we have to have error logic here. - # If it looks like an error, in the streaming response case we - # need to actually grab the contents. - if response_dict['status_code'] >= 300: - response_dict['body'] = http_response.content - elif operation_model.has_streaming_output: - response_dict['body'] = StreamingBody( - http_response.raw, response_dict['headers'].get('content-length') - ) - else: - response_dict['body'] = http_response.content - - parser = parsers.create_parser(protocol) - return http_response, parser.parse( - response_dict, operation_model.output_shape - ) diff --git a/venv/Lib/site-packages/botocore/retries/__init__.py b/venv/Lib/site-packages/botocore/retries/__init__.py deleted file mode 100644 index a6d6b37..0000000 --- a/venv/Lib/site-packages/botocore/retries/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""New retry v2 handlers. - -This package obsoletes the botocore/retryhandler.py module and contains -new retry logic. - -""" diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 49d9df4..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/adaptive.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/adaptive.cpython-312.pyc deleted file mode 100644 index 2bb88a9..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/adaptive.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 735d569..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/bucket.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/bucket.cpython-312.pyc deleted file mode 100644 index f0233fb..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/bucket.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/quota.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/quota.cpython-312.pyc deleted file mode 100644 index 4afc539..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/quota.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/special.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/special.cpython-312.pyc deleted file mode 100644 index 40825c0..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/special.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/standard.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/standard.cpython-312.pyc deleted file mode 100644 index 61cd30d..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/standard.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/__pycache__/throttling.cpython-312.pyc b/venv/Lib/site-packages/botocore/retries/__pycache__/throttling.cpython-312.pyc deleted file mode 100644 index 13c9744..0000000 Binary files a/venv/Lib/site-packages/botocore/retries/__pycache__/throttling.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/retries/adaptive.py b/venv/Lib/site-packages/botocore/retries/adaptive.py deleted file mode 100644 index 5e638dd..0000000 --- a/venv/Lib/site-packages/botocore/retries/adaptive.py +++ /dev/null @@ -1,132 +0,0 @@ -import logging -import math -import threading - -from botocore.retries import bucket, standard, throttling - -logger = logging.getLogger(__name__) - - -def register_retry_handler(client): - clock = bucket.Clock() - rate_adjustor = throttling.CubicCalculator( - starting_max_rate=0, start_time=clock.current_time() - ) - token_bucket = bucket.TokenBucket(max_rate=1, clock=clock) - rate_clocker = RateClocker(clock) - throttling_detector = standard.ThrottlingErrorDetector( - retry_event_adapter=standard.RetryEventAdapter(), - ) - limiter = ClientRateLimiter( - rate_adjustor=rate_adjustor, - rate_clocker=rate_clocker, - token_bucket=token_bucket, - throttling_detector=throttling_detector, - clock=clock, - ) - client.meta.events.register( - 'before-send', - limiter.on_sending_request, - ) - client.meta.events.register( - 'needs-retry', - limiter.on_receiving_response, - ) - return limiter - - -class ClientRateLimiter: - _MAX_RATE_ADJUST_SCALE = 2.0 - - def __init__( - self, - rate_adjustor, - rate_clocker, - token_bucket, - throttling_detector, - clock, - ): - self._rate_adjustor = rate_adjustor - self._rate_clocker = rate_clocker - self._token_bucket = token_bucket - self._throttling_detector = throttling_detector - self._clock = clock - self._enabled = False - self._lock = threading.Lock() - - def on_sending_request(self, request, **kwargs): - if self._enabled: - self._token_bucket.acquire() - - # Hooked up to needs-retry. - def on_receiving_response(self, **kwargs): - measured_rate = self._rate_clocker.record() - timestamp = self._clock.current_time() - with self._lock: - if not self._throttling_detector.is_throttling_error(**kwargs): - new_rate = self._rate_adjustor.success_received(timestamp) - else: - if not self._enabled: - rate_to_use = measured_rate - else: - rate_to_use = min( - measured_rate, self._token_bucket.max_rate - ) - new_rate = self._rate_adjustor.error_received( - rate_to_use, timestamp - ) - logger.debug( - "Throttling response received, new send rate: %s " - "measured rate: %s, token bucket capacity " - "available: %s", - new_rate, - measured_rate, - self._token_bucket.available_capacity, - ) - self._enabled = True - self._token_bucket.max_rate = min( - new_rate, self._MAX_RATE_ADJUST_SCALE * measured_rate - ) - - -class RateClocker: - """Tracks the rate at which a client is sending a request.""" - - _DEFAULT_SMOOTHING = 0.8 - # Update the rate every _TIME_BUCKET_RANGE seconds. - _TIME_BUCKET_RANGE = 0.5 - - def __init__( - self, - clock, - smoothing=_DEFAULT_SMOOTHING, - time_bucket_range=_TIME_BUCKET_RANGE, - ): - self._clock = clock - self._measured_rate = 0 - self._smoothing = smoothing - self._last_bucket = math.floor(self._clock.current_time()) - self._time_bucket_scale = 1 / self._TIME_BUCKET_RANGE - self._count = 0 - self._lock = threading.Lock() - - def record(self, amount=1): - with self._lock: - t = self._clock.current_time() - bucket = ( - math.floor(t * self._time_bucket_scale) - / self._time_bucket_scale - ) - self._count += amount - if bucket > self._last_bucket: - current_rate = self._count / float(bucket - self._last_bucket) - self._measured_rate = (current_rate * self._smoothing) + ( - self._measured_rate * (1 - self._smoothing) - ) - self._count = 0 - self._last_bucket = bucket - return self._measured_rate - - @property - def measured_rate(self): - return self._measured_rate diff --git a/venv/Lib/site-packages/botocore/retries/base.py b/venv/Lib/site-packages/botocore/retries/base.py deleted file mode 100644 index 108bfed..0000000 --- a/venv/Lib/site-packages/botocore/retries/base.py +++ /dev/null @@ -1,26 +0,0 @@ -class BaseRetryBackoff: - def delay_amount(self, context): - """Calculate how long we should delay before retrying. - - :type context: RetryContext - - """ - raise NotImplementedError("delay_amount") - - -class BaseRetryableChecker: - """Base class for determining if a retry should happen. - - This base class checks for specific retryable conditions. - A single retryable checker doesn't necessarily indicate a retry - will happen. It's up to the ``RetryPolicy`` to use its - ``BaseRetryableCheckers`` to make the final decision on whether a retry - should happen. - """ - - def is_retryable(self, context): - """Returns True if retryable, False if not. - - :type context: RetryContext - """ - raise NotImplementedError("is_retryable") diff --git a/venv/Lib/site-packages/botocore/retries/bucket.py b/venv/Lib/site-packages/botocore/retries/bucket.py deleted file mode 100644 index 09d33c7..0000000 --- a/venv/Lib/site-packages/botocore/retries/bucket.py +++ /dev/null @@ -1,115 +0,0 @@ -"""This module implements token buckets used for client side throttling.""" - -import threading -import time - -from botocore.exceptions import CapacityNotAvailableError - - -class Clock: - def __init__(self): - pass - - def sleep(self, amount): - time.sleep(amount) - - def current_time(self): - return time.time() - - -class TokenBucket: - _MIN_RATE = 0.5 - - def __init__(self, max_rate, clock, min_rate=_MIN_RATE): - self._fill_rate = None - self._max_capacity = None - self._current_capacity = 0 - self._clock = clock - self._last_timestamp = None - self._min_rate = min_rate - self._lock = threading.Lock() - self._new_fill_rate_condition = threading.Condition(self._lock) - self.max_rate = max_rate - - @property - def max_rate(self): - return self._fill_rate - - @max_rate.setter - def max_rate(self, value): - with self._new_fill_rate_condition: - # Before we can change the rate we need to fill any pending - # tokens we might have based on the current rate. If we don't - # do this it means everything since the last recorded timestamp - # will accumulate at the rate we're about to set which isn't - # correct. - self._refill() - self._fill_rate = max(value, self._min_rate) - if value >= 1: - self._max_capacity = value - else: - self._max_capacity = 1 - # If we're scaling down, we also can't have a capacity that's - # more than our max_capacity. - self._current_capacity = min( - self._current_capacity, self._max_capacity - ) - self._new_fill_rate_condition.notify() - - @property - def max_capacity(self): - return self._max_capacity - - @property - def available_capacity(self): - return self._current_capacity - - def acquire(self, amount=1, block=True): - """Acquire token or return amount of time until next token available. - - If block is True, then this method will block until there's sufficient - capacity to acquire the desired amount. - - If block is False, then this method will return True is capacity - was successfully acquired, False otherwise. - - """ - with self._new_fill_rate_condition: - return self._acquire(amount=amount, block=block) - - def _acquire(self, amount, block): - self._refill() - if amount <= self._current_capacity: - self._current_capacity -= amount - return True - else: - if not block: - raise CapacityNotAvailableError() - # Not enough capacity. - sleep_amount = self._sleep_amount(amount) - while sleep_amount > 0: - # Until python3.2, wait() always returned None so we can't - # tell if a timeout occurred waiting on the cond var. - # Because of this we'll unconditionally call _refill(). - # The downside to this is that we were waken up via - # a notify(), we're calling unnecessarily calling _refill() an - # extra time. - self._new_fill_rate_condition.wait(sleep_amount) - self._refill() - sleep_amount = self._sleep_amount(amount) - self._current_capacity -= amount - return True - - def _sleep_amount(self, amount): - return (amount - self._current_capacity) / self._fill_rate - - def _refill(self): - timestamp = self._clock.current_time() - if self._last_timestamp is None: - self._last_timestamp = timestamp - return - current_capacity = self._current_capacity - fill_amount = (timestamp - self._last_timestamp) * self._fill_rate - new_capacity = min(self._max_capacity, current_capacity + fill_amount) - self._current_capacity = new_capacity - self._last_timestamp = timestamp diff --git a/venv/Lib/site-packages/botocore/retries/quota.py b/venv/Lib/site-packages/botocore/retries/quota.py deleted file mode 100644 index f039429..0000000 --- a/venv/Lib/site-packages/botocore/retries/quota.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Retry quota implementation.""" - -import threading - - -class RetryQuota: - INITIAL_CAPACITY = 500 - - def __init__(self, initial_capacity=INITIAL_CAPACITY, lock=None): - self._max_capacity = initial_capacity - self._available_capacity = initial_capacity - if lock is None: - lock = threading.Lock() - self._lock = lock - - def acquire(self, capacity_amount): - """Attempt to aquire a certain amount of capacity. - - If there's not sufficient amount of capacity available, ``False`` - is returned. Otherwise, ``True`` is returned, which indicates that - capacity was successfully allocated. - - """ - # The acquire() is only called when we encounter a retryable - # response so we aren't worried about locking the entire method. - with self._lock: - if capacity_amount > self._available_capacity: - return False - self._available_capacity -= capacity_amount - return True - - def release(self, capacity_amount): - """Release capacity back to the retry quota. - - The capacity being released will be truncated if necessary - to ensure the max capacity is never exceeded. - - """ - # Implementation note: The release() method is called as part - # of the "after-call" event, which means it gets invoked for - # every API call. In the common case where the request is - # successful and we're at full capacity, we can avoid locking. - # We can't exceed max capacity so there's no work we have to do. - if self._max_capacity == self._available_capacity: - return - with self._lock: - amount = min( - self._max_capacity - self._available_capacity, capacity_amount - ) - self._available_capacity += amount - - @property - def available_capacity(self): - return self._available_capacity diff --git a/venv/Lib/site-packages/botocore/retries/special.py b/venv/Lib/site-packages/botocore/retries/special.py deleted file mode 100644 index 9b78260..0000000 --- a/venv/Lib/site-packages/botocore/retries/special.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Special cased retries. - -These are additional retry cases we still have to handle from the legacy -retry handler. They don't make sense as part of the standard mode retry -module. Ideally we should be able to remove this module. - -""" - -import logging -from binascii import crc32 - -from botocore.retries.base import BaseRetryableChecker - -logger = logging.getLogger(__name__) - - -# TODO: This is an ideal candidate for the retryable trait once that's -# available. -class RetryIDPCommunicationError(BaseRetryableChecker): - _SERVICE_NAME = 'sts' - - def is_retryable(self, context): - service_name = context.operation_model.service_model.service_name - if service_name != self._SERVICE_NAME: - return False - error_code = context.get_error_code() - return error_code == 'IDPCommunicationError' - - -class RetryDDBChecksumError(BaseRetryableChecker): - _CHECKSUM_HEADER = 'x-amz-crc32' - _SERVICE_NAME = 'dynamodb' - - def is_retryable(self, context): - service_name = context.operation_model.service_model.service_name - if service_name != self._SERVICE_NAME: - return False - if context.http_response is None: - return False - checksum = context.http_response.headers.get(self._CHECKSUM_HEADER) - if checksum is None: - return False - actual_crc32 = crc32(context.http_response.content) & 0xFFFFFFFF - if actual_crc32 != int(checksum): - logger.debug( - "DynamoDB crc32 checksum does not match, " - "expected: %s, actual: %s", - checksum, - actual_crc32, - ) - return True diff --git a/venv/Lib/site-packages/botocore/retries/standard.py b/venv/Lib/site-packages/botocore/retries/standard.py deleted file mode 100644 index 8801530..0000000 --- a/venv/Lib/site-packages/botocore/retries/standard.py +++ /dev/null @@ -1,532 +0,0 @@ -"""Standard retry behavior. - -This contains the default standard retry behavior. -It provides consistent behavior with other AWS SDKs. - -The key base classes uses for retries: - - * ``BaseRetryableChecker`` - Use to check a specific condition that - indicates a retry should happen. This can include things like - max attempts, HTTP status code checks, error code checks etc. - * ``RetryBackoff`` - Use to determine how long we should backoff until - we retry a request. This is the class that will implement delay such - as exponential backoff. - * ``RetryPolicy`` - Main class that determines if a retry should - happen. It can combine data from a various BaseRetryableCheckers - to make a final call as to whether or not a retry should happen. - It then uses a ``BaseRetryBackoff`` to determine how long to delay. - * ``RetryHandler`` - The bridge between botocore's event system - used by endpoint.py to manage retries and the interfaces defined - in this module. - -This allows us to define an API that has minimal coupling to the event -based API used by botocore. - -""" - -import logging -import random - -from botocore.exceptions import ( - ConnectionError, - ConnectTimeoutError, - HTTPClientError, - ReadTimeoutError, -) -from botocore.retries import quota, special -from botocore.retries.base import BaseRetryableChecker, BaseRetryBackoff - -DEFAULT_MAX_ATTEMPTS = 3 -logger = logging.getLogger(__name__) - - -def register_retry_handler(client, max_attempts=DEFAULT_MAX_ATTEMPTS): - retry_quota = RetryQuotaChecker(quota.RetryQuota()) - - service_id = client.meta.service_model.service_id - service_event_name = service_id.hyphenize() - client.meta.events.register( - f'after-call.{service_event_name}', retry_quota.release_retry_quota - ) - - handler = RetryHandler( - retry_policy=RetryPolicy( - retry_checker=StandardRetryConditions(max_attempts=max_attempts), - retry_backoff=ExponentialBackoff(), - ), - retry_event_adapter=RetryEventAdapter(), - retry_quota=retry_quota, - ) - - unique_id = f'retry-config-{service_event_name}' - client.meta.events.register( - f'needs-retry.{service_event_name}', - handler.needs_retry, - unique_id=unique_id, - ) - return handler - - -class RetryHandler: - """Bridge between botocore's event system and this module. - - This class is intended to be hooked to botocore's event system - as an event handler. - """ - - def __init__(self, retry_policy, retry_event_adapter, retry_quota): - self._retry_policy = retry_policy - self._retry_event_adapter = retry_event_adapter - self._retry_quota = retry_quota - - def needs_retry(self, **kwargs): - """Connect as a handler to the needs-retry event.""" - retry_delay = None - context = self._retry_event_adapter.create_retry_context(**kwargs) - if self._retry_policy.should_retry(context): - # Before we can retry we need to ensure we have sufficient - # capacity in our retry quota. - if self._retry_quota.acquire_retry_quota(context): - retry_delay = self._retry_policy.compute_retry_delay(context) - logger.debug( - "Retry needed, retrying request after delay of: %s", - retry_delay, - ) - else: - logger.debug( - "Retry needed but retry quota reached, " - "not retrying request." - ) - else: - logger.debug("Not retrying request.") - self._retry_event_adapter.adapt_retry_response_from_context(context) - return retry_delay - - -class RetryEventAdapter: - """Adapter to existing retry interface used in the endpoints layer. - - This existing interface for determining if a retry needs to happen - is event based and used in ``botocore.endpoint``. The interface has - grown organically over the years and could use some cleanup. This - adapter converts that interface into the interface used by the - new retry strategies. - - """ - - def create_retry_context(self, **kwargs): - """Create context based on needs-retry kwargs.""" - response = kwargs['response'] - if response is None: - # If response is None it means that an exception was raised - # because we never received a response from the service. This - # could be something like a ConnectionError we get from our - # http layer. - http_response = None - parsed_response = None - else: - http_response, parsed_response = response - # This provides isolation between the kwargs emitted in the - # needs-retry event, and what this module uses to check for - # retries. - context = RetryContext( - attempt_number=kwargs['attempts'], - operation_model=kwargs['operation'], - http_response=http_response, - parsed_response=parsed_response, - caught_exception=kwargs['caught_exception'], - request_context=kwargs['request_dict']['context'], - ) - return context - - def adapt_retry_response_from_context(self, context): - """Modify response back to user back from context.""" - # This will mutate attributes that are returned back to the end - # user. We do it this way so that all the various retry classes - # don't mutate any input parameters from the needs-retry event. - metadata = context.get_retry_metadata() - if context.parsed_response is not None: - context.parsed_response.setdefault('ResponseMetadata', {}).update( - metadata - ) - - -# Implementation note: this is meant to encapsulate all the misc stuff -# that gets sent in the needs-retry event. This is mapped so that params -# are more clear and explicit. -class RetryContext: - """Normalize a response that we use to check if a retry should occur. - - This class smoothes over the different types of responses we may get - from a service including: - - * A modeled error response from the service that contains a service - code and error message. - * A raw HTTP response that doesn't contain service protocol specific - error keys. - * An exception received while attempting to retrieve a response. - This could be a ConnectionError we receive from our HTTP layer which - could represent that we weren't able to receive a response from - the service. - - This class guarantees that at least one of the above attributes will be - non None. - - This class is meant to provide a read-only view into the properties - associated with a possible retryable response. None of the properties - are meant to be modified directly. - - """ - - def __init__( - self, - attempt_number, - operation_model=None, - parsed_response=None, - http_response=None, - caught_exception=None, - request_context=None, - ): - # 1-based attempt number. - self.attempt_number = attempt_number - self.operation_model = operation_model - # This is the parsed response dictionary we get from parsing - # the HTTP response from the service. - self.parsed_response = parsed_response - # This is an instance of botocore.awsrequest.AWSResponse. - self.http_response = http_response - # This is a subclass of Exception that will be non None if - # an exception was raised when retrying to retrieve a response. - self.caught_exception = caught_exception - # This is the request context dictionary that's added to the - # request dict. This is used to story any additional state - # about the request. We use this for storing retry quota - # capacity. - if request_context is None: - request_context = {} - self.request_context = request_context - self._retry_metadata = {} - - # These are misc helper methods to avoid duplication in the various - # checkers. - def get_error_code(self): - """Check if there was a parsed response with an error code. - - If we could not find any error codes, ``None`` is returned. - - """ - if self.parsed_response is None: - return - error = self.parsed_response.get('Error', {}) - if not isinstance(error, dict): - return - return error.get('Code') - - def add_retry_metadata(self, **kwargs): - """Add key/value pairs to the retry metadata. - - This allows any objects during the retry process to add - metadata about any checks/validations that happened. - - This gets added to the response metadata in the retry handler. - - """ - self._retry_metadata.update(**kwargs) - - def get_retry_metadata(self): - return self._retry_metadata.copy() - - -class RetryPolicy: - def __init__(self, retry_checker, retry_backoff): - self._retry_checker = retry_checker - self._retry_backoff = retry_backoff - - def should_retry(self, context): - return self._retry_checker.is_retryable(context) - - def compute_retry_delay(self, context): - return self._retry_backoff.delay_amount(context) - - -class ExponentialBackoff(BaseRetryBackoff): - _BASE = 2 - _MAX_BACKOFF = 20 - - def __init__(self, max_backoff=20, random=random.random): - self._base = self._BASE - self._max_backoff = max_backoff - self._random = random - - def delay_amount(self, context): - """Calculates delay based on exponential backoff. - - This class implements truncated binary exponential backoff - with jitter:: - - t_i = rand(0, 1) * min(2 ** attempt, MAX_BACKOFF) - - where ``i`` is the request attempt (0 based). - - """ - # The context.attempt_number is a 1-based value, but we have - # to calculate the delay based on i based a 0-based value. We - # want the first delay to just be ``rand(0, 1)``. - return self._random() * min( - (self._base ** (context.attempt_number - 1)), - self._max_backoff, - ) - - -class MaxAttemptsChecker(BaseRetryableChecker): - def __init__(self, max_attempts): - self._max_attempts = max_attempts - - def is_retryable(self, context): - under_max_attempts = context.attempt_number < self._max_attempts - retries_context = context.request_context.get('retries') - if retries_context: - retries_context['max'] = max( - retries_context.get('max', 0), self._max_attempts - ) - if not under_max_attempts: - logger.debug("Max attempts of %s reached.", self._max_attempts) - context.add_retry_metadata(MaxAttemptsReached=True) - return under_max_attempts - - -class TransientRetryableChecker(BaseRetryableChecker): - _TRANSIENT_ERROR_CODES = [ - 'RequestTimeout', - 'RequestTimeoutException', - 'PriorRequestNotComplete', - ] - _TRANSIENT_STATUS_CODES = [500, 502, 503, 504] - _TRANSIENT_EXCEPTION_CLS = ( - ConnectionError, - HTTPClientError, - ) - - def __init__( - self, - transient_error_codes=None, - transient_status_codes=None, - transient_exception_cls=None, - ): - if transient_error_codes is None: - transient_error_codes = self._TRANSIENT_ERROR_CODES[:] - if transient_status_codes is None: - transient_status_codes = self._TRANSIENT_STATUS_CODES[:] - if transient_exception_cls is None: - transient_exception_cls = self._TRANSIENT_EXCEPTION_CLS - self._transient_error_codes = transient_error_codes - self._transient_status_codes = transient_status_codes - self._transient_exception_cls = transient_exception_cls - - def is_retryable(self, context): - if context.get_error_code() in self._transient_error_codes: - return True - if context.http_response is not None: - if ( - context.http_response.status_code - in self._transient_status_codes - ): - return True - if context.caught_exception is not None: - return isinstance( - context.caught_exception, self._transient_exception_cls - ) - return False - - -class ThrottledRetryableChecker(BaseRetryableChecker): - # This is the union of all error codes we've seen that represent - # a throttled error. - _THROTTLED_ERROR_CODES = [ - 'Throttling', - 'ThrottlingException', - 'ThrottledException', - 'RequestThrottledException', - 'TooManyRequestsException', - 'ProvisionedThroughputExceededException', - 'TransactionInProgressException', - 'RequestLimitExceeded', - 'BandwidthLimitExceeded', - 'LimitExceededException', - 'RequestThrottled', - 'SlowDown', - 'PriorRequestNotComplete', - 'EC2ThrottledException', - ] - - def __init__(self, throttled_error_codes=None): - if throttled_error_codes is None: - throttled_error_codes = self._THROTTLED_ERROR_CODES[:] - self._throttled_error_codes = throttled_error_codes - - def is_retryable(self, context): - # Only the error code from a parsed service response is used - # to determine if the response is a throttled response. - return context.get_error_code() in self._throttled_error_codes - - -class ModeledRetryableChecker(BaseRetryableChecker): - """Check if an error has been modeled as retryable.""" - - def __init__(self): - self._error_detector = ModeledRetryErrorDetector() - - def is_retryable(self, context): - error_code = context.get_error_code() - if error_code is None: - return False - return self._error_detector.detect_error_type(context) is not None - - -class ModeledRetryErrorDetector: - """Checks whether or not an error is a modeled retryable error.""" - - # There are return values from the detect_error_type() method. - TRANSIENT_ERROR = 'TRANSIENT_ERROR' - THROTTLING_ERROR = 'THROTTLING_ERROR' - # This class is lower level than ModeledRetryableChecker, which - # implements BaseRetryableChecker. This object allows you to distinguish - # between the various types of retryable errors. - - def detect_error_type(self, context): - """Detect the error type associated with an error code and model. - - This will either return: - - * ``self.TRANSIENT_ERROR`` - If the error is a transient error - * ``self.THROTTLING_ERROR`` - If the error is a throttling error - * ``None`` - If the error is neither type of error. - - """ - error_code = context.get_error_code() - op_model = context.operation_model - if op_model is None or not op_model.error_shapes: - return - for shape in op_model.error_shapes: - if shape.metadata.get('retryable') is not None: - # Check if this error code matches the shape. This can - # be either by name or by a modeled error code. - error_code_to_check = ( - shape.metadata.get('error', {}).get('code') or shape.name - ) - if error_code == error_code_to_check: - if shape.metadata['retryable'].get('throttling'): - return self.THROTTLING_ERROR - return self.TRANSIENT_ERROR - - -class ThrottlingErrorDetector: - def __init__(self, retry_event_adapter): - self._modeled_error_detector = ModeledRetryErrorDetector() - self._fixed_error_code_detector = ThrottledRetryableChecker() - self._retry_event_adapter = retry_event_adapter - - # This expects the kwargs from needs-retry to be passed through. - def is_throttling_error(self, **kwargs): - context = self._retry_event_adapter.create_retry_context(**kwargs) - if self._fixed_error_code_detector.is_retryable(context): - return True - error_type = self._modeled_error_detector.detect_error_type(context) - return error_type == self._modeled_error_detector.THROTTLING_ERROR - - -class StandardRetryConditions(BaseRetryableChecker): - """Concrete class that implements the standard retry policy checks. - - Specifically: - - not max_attempts and (transient or throttled or modeled_retry) - - """ - - def __init__(self, max_attempts=DEFAULT_MAX_ATTEMPTS): - # Note: This class is for convenience so you can have the - # standard retry condition in a single class. - self._max_attempts_checker = MaxAttemptsChecker(max_attempts) - self._additional_checkers = OrRetryChecker( - [ - TransientRetryableChecker(), - ThrottledRetryableChecker(), - ModeledRetryableChecker(), - OrRetryChecker( - [ - special.RetryIDPCommunicationError(), - special.RetryDDBChecksumError(), - ] - ), - ] - ) - - def is_retryable(self, context): - return self._max_attempts_checker.is_retryable( - context - ) and self._additional_checkers.is_retryable(context) - - -class OrRetryChecker(BaseRetryableChecker): - def __init__(self, checkers): - self._checkers = checkers - - def is_retryable(self, context): - return any(checker.is_retryable(context) for checker in self._checkers) - - -class RetryQuotaChecker: - _RETRY_COST = 5 - _NO_RETRY_INCREMENT = 1 - _TIMEOUT_RETRY_REQUEST = 10 - _TIMEOUT_EXCEPTIONS = (ConnectTimeoutError, ReadTimeoutError) - - # Implementation note: We're not making this a BaseRetryableChecker - # because this isn't just a check if we can retry. This also changes - # state so we have to careful when/how we call this. Making it - # a BaseRetryableChecker implies you can call .is_retryable(context) - # as many times as you want and not affect anything. - - def __init__(self, quota): - self._quota = quota - # This tracks the last amount - self._last_amount_acquired = None - - def acquire_retry_quota(self, context): - if self._is_timeout_error(context): - capacity_amount = self._TIMEOUT_RETRY_REQUEST - else: - capacity_amount = self._RETRY_COST - success = self._quota.acquire(capacity_amount) - if success: - # We add the capacity amount to the request context so we know - # how much to release later. The capacity amount can vary based - # on the error. - context.request_context['retry_quota_capacity'] = capacity_amount - return True - context.add_retry_metadata(RetryQuotaReached=True) - return False - - def _is_timeout_error(self, context): - return isinstance(context.caught_exception, self._TIMEOUT_EXCEPTIONS) - - # This is intended to be hooked up to ``after-call``. - def release_retry_quota(self, context, http_response, **kwargs): - # There's three possible options. - # 1. The HTTP response did not have a 2xx response. In that case we - # give no quota back. - # 2. The HTTP request was successful and was never retried. In - # that case we give _NO_RETRY_INCREMENT back. - # 3. The API call had retries, and we eventually receive an HTTP - # response with a 2xx status code. In that case we give back - # whatever quota was associated with the last acquisition. - if http_response is None: - return - status_code = http_response.status_code - if 200 <= status_code < 300: - if 'retry_quota_capacity' not in context: - self._quota.release(self._NO_RETRY_INCREMENT) - else: - capacity_amount = context['retry_quota_capacity'] - self._quota.release(capacity_amount) diff --git a/venv/Lib/site-packages/botocore/retries/throttling.py b/venv/Lib/site-packages/botocore/retries/throttling.py deleted file mode 100644 index 34ab417..0000000 --- a/venv/Lib/site-packages/botocore/retries/throttling.py +++ /dev/null @@ -1,55 +0,0 @@ -from collections import namedtuple - -CubicParams = namedtuple('CubicParams', ['w_max', 'k', 'last_fail']) - - -class CubicCalculator: - _SCALE_CONSTANT = 0.4 - _BETA = 0.7 - - def __init__( - self, - starting_max_rate, - start_time, - scale_constant=_SCALE_CONSTANT, - beta=_BETA, - ): - self._w_max = starting_max_rate - self._scale_constant = scale_constant - self._beta = beta - self._k = self._calculate_zero_point() - self._last_fail = start_time - - def _calculate_zero_point(self): - scaled_value = (self._w_max * (1 - self._beta)) / self._scale_constant - k = scaled_value ** (1 / 3.0) - return k - - def success_received(self, timestamp): - dt = timestamp - self._last_fail - new_rate = self._scale_constant * (dt - self._k) ** 3 + self._w_max - return new_rate - - def error_received(self, current_rate, timestamp): - # Consider not having this be the current measured rate. - - # We have a new max rate, which is the current rate we were sending - # at when we received an error response. - self._w_max = current_rate - self._k = self._calculate_zero_point() - self._last_fail = timestamp - return current_rate * self._beta - - def get_params_snapshot(self): - """Return a read-only object of the current cubic parameters. - - These parameters are intended to be used for debug/troubleshooting - purposes. These object is a read-only snapshot and cannot be used - to modify the behavior of the CUBIC calculations. - - New parameters may be added to this object in the future. - - """ - return CubicParams( - w_max=self._w_max, k=self._k, last_fail=self._last_fail - ) diff --git a/venv/Lib/site-packages/botocore/retryhandler.py b/venv/Lib/site-packages/botocore/retryhandler.py deleted file mode 100644 index c2eed1d..0000000 --- a/venv/Lib/site-packages/botocore/retryhandler.py +++ /dev/null @@ -1,416 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import functools -import logging -import random -from binascii import crc32 - -from botocore.exceptions import ( - ChecksumError, - ConnectionClosedError, - ConnectionError, - EndpointConnectionError, - ReadTimeoutError, -) - -logger = logging.getLogger(__name__) -# The only supported error for now is GENERAL_CONNECTION_ERROR -# which maps to requests generic ConnectionError. If we're able -# to get more specific exceptions from requests we can update -# this mapping with more specific exceptions. -EXCEPTION_MAP = { - 'GENERAL_CONNECTION_ERROR': [ - ConnectionError, - ConnectionClosedError, - ReadTimeoutError, - EndpointConnectionError, - ], -} - - -def delay_exponential(base, growth_factor, attempts): - """Calculate time to sleep based on exponential function. - - The format is:: - - base * growth_factor ^ (attempts - 1) - - If ``base`` is set to 'rand' then a random number between - 0 and 1 will be used as the base. - Base must be greater than 0, otherwise a ValueError will be - raised. - - """ - if base == 'rand': - base = random.random() - elif base <= 0: - raise ValueError( - f"The 'base' param must be greater than 0, got: {base}" - ) - time_to_sleep = base * (growth_factor ** (attempts - 1)) - return time_to_sleep - - -def create_exponential_delay_function(base, growth_factor): - """Create an exponential delay function based on the attempts. - - This is used so that you only have to pass it the attempts - parameter to calculate the delay. - - """ - return functools.partial( - delay_exponential, base=base, growth_factor=growth_factor - ) - - -def create_retry_handler(config, operation_name=None): - checker = create_checker_from_retry_config( - config, operation_name=operation_name - ) - action = create_retry_action_from_config( - config, operation_name=operation_name - ) - return RetryHandler(checker=checker, action=action) - - -def create_retry_action_from_config(config, operation_name=None): - # The spec has the possibility of supporting per policy - # actions, but right now, we assume this comes from the - # default section, which means that delay functions apply - # for every policy in the retry config (per service). - delay_config = config['__default__']['delay'] - if delay_config['type'] == 'exponential': - return create_exponential_delay_function( - base=delay_config['base'], - growth_factor=delay_config['growth_factor'], - ) - - -def create_checker_from_retry_config(config, operation_name=None): - checkers = [] - max_attempts = None - retryable_exceptions = [] - if '__default__' in config: - policies = config['__default__'].get('policies', []) - max_attempts = config['__default__']['max_attempts'] - for key in policies: - current_config = policies[key] - checkers.append(_create_single_checker(current_config)) - retry_exception = _extract_retryable_exception(current_config) - if retry_exception is not None: - retryable_exceptions.extend(retry_exception) - if operation_name is not None and config.get(operation_name) is not None: - operation_policies = config[operation_name]['policies'] - for key in operation_policies: - checkers.append(_create_single_checker(operation_policies[key])) - retry_exception = _extract_retryable_exception( - operation_policies[key] - ) - if retry_exception is not None: - retryable_exceptions.extend(retry_exception) - if len(checkers) == 1: - # Don't need to use a MultiChecker - return MaxAttemptsDecorator(checkers[0], max_attempts=max_attempts) - else: - multi_checker = MultiChecker(checkers) - return MaxAttemptsDecorator( - multi_checker, - max_attempts=max_attempts, - retryable_exceptions=tuple(retryable_exceptions), - ) - - -def _create_single_checker(config): - if 'response' in config['applies_when']: - return _create_single_response_checker( - config['applies_when']['response'] - ) - elif 'socket_errors' in config['applies_when']: - return ExceptionRaiser() - - -def _create_single_response_checker(response): - if 'service_error_code' in response: - checker = ServiceErrorCodeChecker( - status_code=response['http_status_code'], - error_code=response['service_error_code'], - ) - elif 'http_status_code' in response: - checker = HTTPStatusCodeChecker( - status_code=response['http_status_code'] - ) - elif 'crc32body' in response: - checker = CRC32Checker(header=response['crc32body']) - else: - # TODO: send a signal. - raise ValueError("Unknown retry policy") - return checker - - -def _extract_retryable_exception(config): - applies_when = config['applies_when'] - if 'crc32body' in applies_when.get('response', {}): - return [ChecksumError] - elif 'socket_errors' in applies_when: - exceptions = [] - for name in applies_when['socket_errors']: - exceptions.extend(EXCEPTION_MAP[name]) - return exceptions - - -class RetryHandler: - """Retry handler. - - The retry handler takes two params, ``checker`` object - and an ``action`` object. - - The ``checker`` object must be a callable object and based on a response - and an attempt number, determines whether or not sufficient criteria for - a retry has been met. If this is the case then the ``action`` object - (which also is a callable) determines what needs to happen in the event - of a retry. - - """ - - def __init__(self, checker, action): - self._checker = checker - self._action = action - - def __call__(self, attempts, response, caught_exception, **kwargs): - """Handler for a retry. - - Intended to be hooked up to an event handler (hence the **kwargs), - this will process retries appropriately. - - """ - checker_kwargs = { - 'attempt_number': attempts, - 'response': response, - 'caught_exception': caught_exception, - } - if isinstance(self._checker, MaxAttemptsDecorator): - retries_context = kwargs['request_dict']['context'].get('retries') - checker_kwargs.update({'retries_context': retries_context}) - - if self._checker(**checker_kwargs): - result = self._action(attempts=attempts) - logger.debug("Retry needed, action of: %s", result) - return result - logger.debug("No retry needed.") - - -class BaseChecker: - """Base class for retry checkers. - - Each class is responsible for checking a single criteria that determines - whether or not a retry should not happen. - - """ - - def __call__(self, attempt_number, response, caught_exception): - """Determine if retry criteria matches. - - Note that either ``response`` is not None and ``caught_exception`` is - None or ``response`` is None and ``caught_exception`` is not None. - - :type attempt_number: int - :param attempt_number: The total number of times we've attempted - to send the request. - - :param response: The HTTP response (if one was received). - - :type caught_exception: Exception - :param caught_exception: Any exception that was caught while trying to - send the HTTP response. - - :return: True, if the retry criteria matches (and therefore a retry - should occur. False if the criteria does not match. - - """ - # The default implementation allows subclasses to not have to check - # whether or not response is None or not. - if response is not None: - return self._check_response(attempt_number, response) - elif caught_exception is not None: - return self._check_caught_exception( - attempt_number, caught_exception - ) - else: - raise ValueError("Both response and caught_exception are None.") - - def _check_response(self, attempt_number, response): - pass - - def _check_caught_exception(self, attempt_number, caught_exception): - pass - - -class MaxAttemptsDecorator(BaseChecker): - """Allow retries up to a maximum number of attempts. - - This will pass through calls to the decorated retry checker, provided - that the number of attempts does not exceed max_attempts. It will - also catch any retryable_exceptions passed in. Once max_attempts has - been exceeded, then False will be returned or the retryable_exceptions - that was previously being caught will be raised. - - """ - - def __init__(self, checker, max_attempts, retryable_exceptions=None): - self._checker = checker - self._max_attempts = max_attempts - self._retryable_exceptions = retryable_exceptions - - def __call__( - self, attempt_number, response, caught_exception, retries_context - ): - if retries_context: - retries_context['max'] = max( - retries_context.get('max', 0), self._max_attempts - ) - - should_retry = self._should_retry( - attempt_number, response, caught_exception - ) - if should_retry: - if attempt_number >= self._max_attempts: - # explicitly set MaxAttemptsReached - if response is not None and 'ResponseMetadata' in response[1]: - response[1]['ResponseMetadata']['MaxAttemptsReached'] = ( - True - ) - logger.debug( - "Reached the maximum number of retry attempts: %s", - attempt_number, - ) - return False - else: - return should_retry - else: - return False - - def _should_retry(self, attempt_number, response, caught_exception): - if self._retryable_exceptions and attempt_number < self._max_attempts: - try: - return self._checker( - attempt_number, response, caught_exception - ) - except self._retryable_exceptions as e: - logger.debug( - "retry needed, retryable exception caught: %s", - e, - exc_info=True, - ) - return True - else: - # If we've exceeded the max attempts we just let the exception - # propagate if one has occurred. - return self._checker(attempt_number, response, caught_exception) - - -class HTTPStatusCodeChecker(BaseChecker): - def __init__(self, status_code): - self._status_code = status_code - - def _check_response(self, attempt_number, response): - if response[0].status_code == self._status_code: - logger.debug( - "retry needed: retryable HTTP status code received: %s", - self._status_code, - ) - return True - else: - return False - - -class ServiceErrorCodeChecker(BaseChecker): - def __init__(self, status_code, error_code): - self._status_code = status_code - self._error_code = error_code - - def _check_response(self, attempt_number, response): - if response[0].status_code == self._status_code: - actual_error_code = response[1].get('Error', {}).get('Code') - if actual_error_code == self._error_code: - logger.debug( - "retry needed: matching HTTP status and error code seen: " - "%s, %s", - self._status_code, - self._error_code, - ) - return True - return False - - -class MultiChecker(BaseChecker): - def __init__(self, checkers): - self._checkers = checkers - - def __call__(self, attempt_number, response, caught_exception): - for checker in self._checkers: - checker_response = checker( - attempt_number, response, caught_exception - ) - if checker_response: - return checker_response - return False - - -class CRC32Checker(BaseChecker): - def __init__(self, header): - # The header where the expected crc32 is located. - self._header_name = header - - def _check_response(self, attempt_number, response): - http_response = response[0] - expected_crc = http_response.headers.get(self._header_name) - if expected_crc is None: - logger.debug( - "crc32 check skipped, the %s header is not " - "in the http response.", - self._header_name, - ) - else: - actual_crc32 = crc32(response[0].content) & 0xFFFFFFFF - if not actual_crc32 == int(expected_crc): - logger.debug( - "retry needed: crc32 check failed, expected != actual: " - "%s != %s", - int(expected_crc), - actual_crc32, - ) - raise ChecksumError( - checksum_type='crc32', - expected_checksum=int(expected_crc), - actual_checksum=actual_crc32, - ) - - -class ExceptionRaiser(BaseChecker): - """Raise any caught exceptions. - - This class will raise any non None ``caught_exception``. - - """ - - def _check_caught_exception(self, attempt_number, caught_exception): - # This is implementation specific, but this class is useful by - # coordinating with the MaxAttemptsDecorator. - # The MaxAttemptsDecorator has a list of exceptions it should catch - # and retry, but something needs to come along and actually raise the - # caught_exception. That's what this class is being used for. If - # the MaxAttemptsDecorator is not interested in retrying the exception - # then this exception just propagates out past the retry code. - raise caught_exception diff --git a/venv/Lib/site-packages/botocore/serialize.py b/venv/Lib/site-packages/botocore/serialize.py deleted file mode 100644 index 9f8d760..0000000 --- a/venv/Lib/site-packages/botocore/serialize.py +++ /dev/null @@ -1,1277 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -"""Protocol input serializes. - -This module contains classes that implement input serialization -for the various AWS protocol types. - -These classes essentially take user input, a model object that -represents what the expected input should look like, and it returns -a dictionary that contains the various parts of a request. A few -high level design decisions: - - -* Each protocol type maps to a separate class, all inherit from - ``Serializer``. -* The return value for ``serialize_to_request`` (the main entry - point) returns a dictionary that represents a request. This - will have keys like ``url_path``, ``query_string``, etc. This - is done so that it's a) easy to test and b) not tied to a - particular HTTP library. See the ``serialize_to_request`` docstring - for more details. - -Unicode -------- - -The input to the serializers should be text (str/unicode), not bytes, -with the exception of blob types. Those are assumed to be binary, -and if a str/unicode type is passed in, it will be encoded as utf-8. -""" - -import base64 -import calendar -import datetime -import decimal -import json -import math -import re -import struct -from xml.etree import ElementTree - -from botocore import validate -from botocore.compat import formatdate -from botocore.exceptions import ParamValidationError -from botocore.useragent import register_feature_id -from botocore.utils import ( - has_header, - is_json_value_header, - parse_to_aware_datetime, - percent_encode, -) - -# From the spec, the default timestamp format if not specified is iso8601. -DEFAULT_TIMESTAMP_FORMAT = 'iso8601' -ISO8601 = '%Y-%m-%dT%H:%M:%SZ' -# Same as ISO8601, but with microsecond precision. -ISO8601_MICRO = '%Y-%m-%dT%H:%M:%S.%fZ' -HOST_PREFIX_RE = re.compile(r"^[A-Za-z0-9\.\-]+$") - -TIMESTAMP_PRECISION_DEFAULT = 'default' -TIMESTAMP_PRECISION_MILLISECOND = 'millisecond' -TIMESTAMP_PRECISION_OPTIONS = ( - TIMESTAMP_PRECISION_DEFAULT, - TIMESTAMP_PRECISION_MILLISECOND, -) - - -def create_serializer( - protocol_name, - include_validation=True, - timestamp_precision=TIMESTAMP_PRECISION_DEFAULT, -): - """Create a serializer for the given protocol. - :param protocol_name: The protocol name to create a serializer for. - :type protocol_name: str - :param include_validation: Whether to include parameter validation. - :type include_validation: bool - :param timestamp_precision: Timestamp precision level. - - 'default': Microseconds for ISO timestamps, seconds for Unix and RFC - - 'millisecond': Millisecond precision (ISO/Unix), seconds for RFC - :type timestamp_precision: str - :return: A serializer instance for the given protocol. - """ - # TODO: Unknown protocols. - serializer = SERIALIZERS[protocol_name]( - timestamp_precision=timestamp_precision - ) - if include_validation: - validator = validate.ParamValidator() - serializer = validate.ParamValidationDecorator(validator, serializer) - return serializer - - -class Serializer: - DEFAULT_METHOD = 'POST' - # Clients can change this to a different MutableMapping - # (i.e OrderedDict) if they want. This is used in the - # compliance test to match the hash ordering used in the - # tests. - MAP_TYPE = dict - DEFAULT_ENCODING = 'utf-8' - - def __init__(self, timestamp_precision=TIMESTAMP_PRECISION_DEFAULT): - if timestamp_precision not in TIMESTAMP_PRECISION_OPTIONS: - raise ValueError( - f"Invalid timestamp precision found while creating serializer: {timestamp_precision}" - ) - self._timestamp_precision = timestamp_precision - - def serialize_to_request(self, parameters, operation_model): - """Serialize parameters into an HTTP request. - - This method takes user provided parameters and a shape - model and serializes the parameters to an HTTP request. - More specifically, this method returns information about - parts of the HTTP request, it does not enforce a particular - interface or standard for an HTTP request. It instead returns - a dictionary of: - - * 'url_path' - * 'host_prefix' - * 'query_string' - * 'headers' - * 'body' - * 'method' - - It is then up to consumers to decide how to map this to a Request - object of their HTTP library of choice. Below is an example - return value:: - - {'body': {'Action': 'OperationName', - 'Bar': 'val2', - 'Foo': 'val1', - 'Version': '2014-01-01'}, - 'headers': {}, - 'method': 'POST', - 'query_string': '', - 'host_prefix': 'value.', - 'url_path': '/'} - - :param parameters: The dictionary input parameters for the - operation (i.e the user input). - :param operation_model: The OperationModel object that describes - the operation. - """ - raise NotImplementedError("serialize_to_request") - - def _create_default_request(self): - # Creates a boilerplate default request dict that subclasses - # can use as a starting point. - serialized = { - 'url_path': '/', - 'query_string': '', - 'method': self.DEFAULT_METHOD, - 'headers': {}, - # An empty body is represented as an empty byte string. - 'body': b'', - } - return serialized - - # Some extra utility methods subclasses can use. - - def _timestamp_iso8601(self, value): - """Return ISO8601 timestamp with precision based on timestamp_precision.""" - # Smithy's standard is milliseconds, so we truncate the timestamp if the millisecond flag is set to true - if self._timestamp_precision == TIMESTAMP_PRECISION_MILLISECOND: - milliseconds = value.microsecond // 1000 - return ( - value.strftime('%Y-%m-%dT%H:%M:%S') + f'.{milliseconds:03d}Z' - ) - else: - # Otherwise we continue supporting microseconds in iso8601 for legacy reasons - if value.microsecond > 0: - timestamp_format = ISO8601_MICRO - else: - timestamp_format = ISO8601 - return value.strftime(timestamp_format) - - def _timestamp_unixtimestamp(self, value): - """Return unix timestamp with precision based on timestamp_precision.""" - # As of the addition of the precision flag, we support millisecond precision here as well - if self._timestamp_precision == TIMESTAMP_PRECISION_MILLISECOND: - base_timestamp = calendar.timegm(value.timetuple()) - milliseconds = (value.microsecond // 1000) / 1000.0 - return base_timestamp + milliseconds - else: - return int(calendar.timegm(value.timetuple())) - - def _timestamp_rfc822(self, value): - """Return RFC822 timestamp (always second precision - RFC doesn't support sub-second).""" - # RFC 2822 doesn't support sub-second precision, so always use second precision format - if isinstance(value, datetime.datetime): - value = int(calendar.timegm(value.timetuple())) - return formatdate(value, usegmt=True) - - def _convert_timestamp_to_str(self, value, timestamp_format=None): - if timestamp_format is None: - timestamp_format = self.TIMESTAMP_FORMAT - timestamp_format = timestamp_format.lower() - datetime_obj = parse_to_aware_datetime(value) - converter = getattr(self, f'_timestamp_{timestamp_format}') - final_value = converter(datetime_obj) - return final_value - - def _get_serialized_name(self, shape, default_name): - # Returns the serialized name for the shape if it exists. - # Otherwise it will return the passed in default_name. - return shape.serialization.get('name', default_name) - - def _get_base64(self, value): - # Returns the base64-encoded version of value, handling - # both strings and bytes. The returned value is a string - # via the default encoding. - if isinstance(value, str): - value = value.encode(self.DEFAULT_ENCODING) - return base64.b64encode(value).strip().decode(self.DEFAULT_ENCODING) - - def _expand_host_prefix(self, parameters, operation_model): - operation_endpoint = operation_model.endpoint - if ( - operation_endpoint is None - or 'hostPrefix' not in operation_endpoint - ): - return None - - host_prefix_expression = operation_endpoint['hostPrefix'] - if operation_model.input_shape is None: - return host_prefix_expression - input_members = operation_model.input_shape.members - host_labels = [ - member - for member, shape in input_members.items() - if shape.serialization.get('hostLabel') - ] - format_kwargs = {} - bad_labels = [] - for name in host_labels: - param = parameters[name] - if not HOST_PREFIX_RE.match(param): - bad_labels.append(name) - format_kwargs[name] = param - if bad_labels: - raise ParamValidationError( - report=( - f"Invalid value for parameter(s): {', '.join(bad_labels)}. " - "Must contain only alphanumeric characters, hyphen, " - "or period." - ) - ) - return host_prefix_expression.format(**format_kwargs) - - def _is_shape_flattened(self, shape): - return shape.serialization.get('flattened') - - def _handle_float(self, value): - if value == float("Infinity"): - value = "Infinity" - elif value == float("-Infinity"): - value = "-Infinity" - elif math.isnan(value): - value = "NaN" - return value - - def _handle_query_compatible_trait(self, operation_model, serialized): - if operation_model.service_model.is_query_compatible: - serialized['headers']['x-amzn-query-mode'] = 'true' - - -class QuerySerializer(Serializer): - TIMESTAMP_FORMAT = 'iso8601' - - def serialize_to_request(self, parameters, operation_model): - shape = operation_model.input_shape - serialized = self._create_default_request() - serialized['method'] = operation_model.http.get( - 'method', self.DEFAULT_METHOD - ) - serialized['headers'] = { - 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8' - } - # The query serializer only deals with body params so - # that's what we hand off the _serialize_* methods. - body_params = self.MAP_TYPE() - body_params['Action'] = operation_model.name - body_params['Version'] = operation_model.metadata['apiVersion'] - if shape is not None: - self._serialize(body_params, parameters, shape) - serialized['body'] = body_params - - host_prefix = self._expand_host_prefix(parameters, operation_model) - if host_prefix is not None: - serialized['host_prefix'] = host_prefix - - return serialized - - def _serialize(self, serialized, value, shape, prefix=''): - # serialized: The dict that is incrementally added to with the - # final serialized parameters. - # value: The current user input value. - # shape: The shape object that describes the structure of the - # input. - # prefix: The incrementally built up prefix for the serialized - # key (i.e Foo.bar.members.1). - method = getattr( - self, - f'_serialize_type_{shape.type_name}', - self._default_serialize, - ) - method(serialized, value, shape, prefix=prefix) - - def _serialize_type_structure(self, serialized, value, shape, prefix=''): - members = shape.members - for key, value in value.items(): - member_shape = members[key] - member_prefix = self._get_serialized_name(member_shape, key) - if prefix: - member_prefix = f'{prefix}.{member_prefix}' - self._serialize(serialized, value, member_shape, member_prefix) - - def _serialize_type_list(self, serialized, value, shape, prefix=''): - if not value: - # The query protocol serializes empty lists. - serialized[prefix] = '' - return - if self._is_shape_flattened(shape): - list_prefix = prefix - if shape.member.serialization.get('name'): - name = self._get_serialized_name(shape.member, default_name='') - # Replace '.Original' with '.{name}'. - list_prefix = '.'.join(prefix.split('.')[:-1] + [name]) - else: - list_name = shape.member.serialization.get('name', 'member') - list_prefix = f'{prefix}.{list_name}' - for i, element in enumerate(value, 1): - element_prefix = f'{list_prefix}.{i}' - element_shape = shape.member - self._serialize(serialized, element, element_shape, element_prefix) - - def _serialize_type_map(self, serialized, value, shape, prefix=''): - if self._is_shape_flattened(shape): - full_prefix = prefix - else: - full_prefix = f'{prefix}.entry' - template = full_prefix + '.{i}.{suffix}' - key_shape = shape.key - value_shape = shape.value - key_suffix = self._get_serialized_name(key_shape, default_name='key') - value_suffix = self._get_serialized_name(value_shape, 'value') - for i, key in enumerate(value, 1): - key_prefix = template.format(i=i, suffix=key_suffix) - value_prefix = template.format(i=i, suffix=value_suffix) - self._serialize(serialized, key, key_shape, key_prefix) - self._serialize(serialized, value[key], value_shape, value_prefix) - - def _serialize_type_blob(self, serialized, value, shape, prefix=''): - # Blob args must be base64 encoded. - serialized[prefix] = self._get_base64(value) - - def _serialize_type_timestamp(self, serialized, value, shape, prefix=''): - serialized[prefix] = self._convert_timestamp_to_str( - value, shape.serialization.get('timestampFormat') - ) - - def _serialize_type_boolean(self, serialized, value, shape, prefix=''): - if value: - serialized[prefix] = 'true' - else: - serialized[prefix] = 'false' - - def _default_serialize(self, serialized, value, shape, prefix=''): - serialized[prefix] = value - - def _serialize_type_float(self, serialized, value, shape, prefix=''): - serialized[prefix] = self._handle_float(value) - - def _serialize_type_double(self, serialized, value, shape, prefix=''): - self._serialize_type_float(serialized, value, shape, prefix) - - -class EC2Serializer(QuerySerializer): - """EC2 specific customizations to the query protocol serializers. - - The EC2 model is almost, but not exactly, similar to the query protocol - serializer. This class encapsulates those differences. The model - will have be marked with a ``protocol`` of ``ec2``, so you don't need - to worry about wiring this class up correctly. - - """ - - def _get_serialized_name(self, shape, default_name): - # Returns the serialized name for the shape if it exists. - # Otherwise it will return the passed in capitalized default_name. - if 'queryName' in shape.serialization: - return shape.serialization['queryName'] - elif 'name' in shape.serialization: - # A locationName is always capitalized - # on input for the ec2 protocol. - name = shape.serialization['name'] - return name[0].upper() + name[1:] - else: - return default_name - - def _serialize_type_list(self, serialized, value, shape, prefix=''): - for i, element in enumerate(value, 1): - element_prefix = f'{prefix}.{i}' - element_shape = shape.member - self._serialize(serialized, element, element_shape, element_prefix) - - -class JSONSerializer(Serializer): - TIMESTAMP_FORMAT = 'unixtimestamp' - - def serialize_to_request(self, parameters, operation_model): - target = '{}.{}'.format( - operation_model.metadata['targetPrefix'], - operation_model.name, - ) - json_version = operation_model.metadata['jsonVersion'] - serialized = self._create_default_request() - serialized['method'] = operation_model.http.get( - 'method', self.DEFAULT_METHOD - ) - serialized['headers'] = { - 'X-Amz-Target': target, - 'Content-Type': f'application/x-amz-json-{json_version}', - } - self._handle_query_compatible_trait(operation_model, serialized) - - body = self.MAP_TYPE() - input_shape = operation_model.input_shape - if input_shape is not None: - self._serialize(body, parameters, input_shape) - serialized['body'] = json.dumps(body).encode(self.DEFAULT_ENCODING) - - host_prefix = self._expand_host_prefix(parameters, operation_model) - if host_prefix is not None: - serialized['host_prefix'] = host_prefix - - return serialized - - def _serialize(self, serialized, value, shape, key=None): - method = getattr( - self, - f'_serialize_type_{shape.type_name}', - self._default_serialize, - ) - method(serialized, value, shape, key) - - def _serialize_type_structure(self, serialized, value, shape, key): - if shape.is_document_type: - serialized[key] = value - else: - if key is not None: - # If a key is provided, this is a result of a recursive - # call so we need to add a new child dict as the value - # of the passed in serialized dict. We'll then add - # all the structure members as key/vals in the new serialized - # dictionary we just created. - new_serialized = self.MAP_TYPE() - serialized[key] = new_serialized - serialized = new_serialized - members = shape.members - for member_key, member_value in value.items(): - member_shape = members[member_key] - if 'name' in member_shape.serialization: - member_key = member_shape.serialization['name'] - self._serialize( - serialized, member_value, member_shape, member_key - ) - - def _serialize_type_map(self, serialized, value, shape, key): - map_obj = self.MAP_TYPE() - serialized[key] = map_obj - for sub_key, sub_value in value.items(): - self._serialize(map_obj, sub_value, shape.value, sub_key) - - def _serialize_type_list(self, serialized, value, shape, key): - list_obj = [] - serialized[key] = list_obj - for list_item in value: - wrapper = {} - # The JSON list serialization is the only case where we aren't - # setting a key on a dict. We handle this by using - # a __current__ key on a wrapper dict to serialize each - # list item before appending it to the serialized list. - self._serialize(wrapper, list_item, shape.member, "__current__") - list_obj.append(wrapper["__current__"]) - - def _default_serialize(self, serialized, value, shape, key): - serialized[key] = value - - def _serialize_type_timestamp(self, serialized, value, shape, key): - serialized[key] = self._convert_timestamp_to_str( - value, shape.serialization.get('timestampFormat') - ) - - def _serialize_type_blob(self, serialized, value, shape, key): - serialized[key] = self._get_base64(value) - - def _serialize_type_float(self, serialized, value, shape, prefix=''): - if isinstance(value, decimal.Decimal): - value = float(value) - serialized[prefix] = self._handle_float(value) - - def _serialize_type_double(self, serialized, value, shape, prefix=''): - self._serialize_type_float(serialized, value, shape, prefix) - - -class CBORSerializer(Serializer): - UNSIGNED_INT_MAJOR_TYPE = 0 - NEGATIVE_INT_MAJOR_TYPE = 1 - BLOB_MAJOR_TYPE = 2 - STRING_MAJOR_TYPE = 3 - LIST_MAJOR_TYPE = 4 - MAP_MAJOR_TYPE = 5 - TAG_MAJOR_TYPE = 6 - FLOAT_AND_SIMPLE_MAJOR_TYPE = 7 - - def _serialize_data_item(self, serialized, value, shape, key=None): - method = getattr(self, f'_serialize_type_{shape.type_name}') - if method is None: - raise ValueError( - f"Unrecognized C2J type: {shape.type_name}, unable to " - f"serialize request" - ) - method(serialized, value, shape, key) - - def _serialize_type_integer(self, serialized, value, shape, key): - if value >= 0: - major_type = self.UNSIGNED_INT_MAJOR_TYPE - else: - major_type = self.NEGATIVE_INT_MAJOR_TYPE - # The only differences in serializing negative and positive integers is - # that for negative, we set the major type to 1 and set the value to -1 - # minus the value - value = -1 - value - additional_info, num_bytes = self._get_additional_info_and_num_bytes( - value - ) - initial_byte = self._get_initial_byte(major_type, additional_info) - if num_bytes == 0: - serialized.extend(initial_byte) - else: - serialized.extend(initial_byte + value.to_bytes(num_bytes, "big")) - - def _serialize_type_long(self, serialized, value, shape, key): - self._serialize_type_integer(serialized, value, shape, key) - - def _serialize_type_blob(self, serialized, value, shape, key): - if isinstance(value, str): - value = value.encode('utf-8') - elif not isinstance(value, (bytes, bytearray)): - # We support file-like objects for blobs; these already have been - # validated to ensure they have a read method - value = value.read() - length = len(value) - additional_info, num_bytes = self._get_additional_info_and_num_bytes( - length - ) - initial_byte = self._get_initial_byte( - self.BLOB_MAJOR_TYPE, additional_info - ) - if num_bytes == 0: - serialized.extend(initial_byte) - else: - serialized.extend(initial_byte + length.to_bytes(num_bytes, "big")) - serialized.extend(value) - - def _serialize_type_string(self, serialized, value, shape, key): - encoded = value.encode('utf-8') - length = len(encoded) - additional_info, num_bytes = self._get_additional_info_and_num_bytes( - length - ) - initial_byte = self._get_initial_byte( - self.STRING_MAJOR_TYPE, additional_info - ) - if num_bytes == 0: - serialized.extend(initial_byte + encoded) - else: - serialized.extend( - initial_byte + length.to_bytes(num_bytes, "big") + encoded - ) - - def _serialize_type_list(self, serialized, value, shape, key): - length = len(value) - additional_info, num_bytes = self._get_additional_info_and_num_bytes( - length - ) - initial_byte = self._get_initial_byte( - self.LIST_MAJOR_TYPE, additional_info - ) - if num_bytes == 0: - serialized.extend(initial_byte) - else: - serialized.extend(initial_byte + length.to_bytes(num_bytes, "big")) - for item in value: - self._serialize_data_item(serialized, item, shape.member) - - def _serialize_type_map(self, serialized, value, shape, key): - length = len(value) - additional_info, num_bytes = self._get_additional_info_and_num_bytes( - length - ) - initial_byte = self._get_initial_byte( - self.MAP_MAJOR_TYPE, additional_info - ) - if num_bytes == 0: - serialized.extend(initial_byte) - else: - serialized.extend(initial_byte + length.to_bytes(num_bytes, "big")) - for key_item, item in value.items(): - self._serialize_data_item(serialized, key_item, shape.key) - self._serialize_data_item(serialized, item, shape.value) - - def _serialize_type_structure(self, serialized, value, shape, key): - if key is not None: - # For nested structures, we need to serialize the key first - self._serialize_data_item(serialized, key, shape.key_shape) - - # Remove `None` values from the dictionary - value = {k: v for k, v in value.items() if v is not None} - - map_length = len(value) - additional_info, num_bytes = self._get_additional_info_and_num_bytes( - map_length - ) - initial_byte = self._get_initial_byte( - self.MAP_MAJOR_TYPE, additional_info - ) - if num_bytes == 0: - serialized.extend(initial_byte) - else: - serialized.extend( - initial_byte + map_length.to_bytes(num_bytes, "big") - ) - - members = shape.members - for member_key, member_value in value.items(): - member_shape = members[member_key] - if 'name' in member_shape.serialization: - member_key = member_shape.serialization['name'] - if member_value is not None: - self._serialize_type_string(serialized, member_key, None, None) - self._serialize_data_item( - serialized, member_value, member_shape - ) - - def _serialize_type_timestamp(self, serialized, value, shape, key): - timestamp = self._convert_timestamp_to_str(value) - tag = 1 # Use tag 1 for unix timestamp - initial_byte = self._get_initial_byte(self.TAG_MAJOR_TYPE, tag) - serialized.extend(initial_byte) # Tagging the timestamp - additional_info, num_bytes = self._get_additional_info_and_num_bytes( - timestamp - ) - - if num_bytes == 0: - initial_byte = self._get_initial_byte( - self.UNSIGNED_INT_MAJOR_TYPE, timestamp - ) - serialized.extend(initial_byte) - else: - initial_byte = self._get_initial_byte( - self.UNSIGNED_INT_MAJOR_TYPE, additional_info - ) - serialized.extend( - initial_byte + timestamp.to_bytes(num_bytes, "big") - ) - - def _serialize_type_float(self, serialized, value, shape, key): - if self._is_special_number(value): - serialized.extend( - self._get_bytes_for_special_numbers(value) - ) # Handle special values like NaN or Infinity - else: - initial_byte = self._get_initial_byte( - self.FLOAT_AND_SIMPLE_MAJOR_TYPE, 26 - ) - serialized.extend(initial_byte + struct.pack(">f", value)) - - def _serialize_type_double(self, serialized, value, shape, key): - if self._is_special_number(value): - serialized.extend( - self._get_bytes_for_special_numbers(value) - ) # Handle special values like NaN or Infinity - else: - initial_byte = self._get_initial_byte( - self.FLOAT_AND_SIMPLE_MAJOR_TYPE, 27 - ) - serialized.extend(initial_byte + struct.pack(">d", value)) - - def _serialize_type_boolean(self, serialized, value, shape, key): - additional_info = 21 if value else 20 - serialized.extend( - self._get_initial_byte( - self.FLOAT_AND_SIMPLE_MAJOR_TYPE, additional_info - ) - ) - - def _get_additional_info_and_num_bytes(self, value): - # Values under 24 can be stored in the initial byte and don't need further - # encoding - if value < 24: - return value, 0 - # Values between 24 and 255 (inclusive) can be stored in 1 byte and - # correspond to additional info 24 - elif value < 256: - return 24, 1 - # Values up to 65535 can be stored in two bytes and correspond to additional - # info 25 - elif value < 65536: - return 25, 2 - # Values up to 4294967296 can be stored in four bytes and correspond to - # additional info 26 - elif value < 4294967296: - return 26, 4 - # The maximum number of bytes in a definite length data items is 8 which - # to additional info 27 - else: - return 27, 8 - - def _get_initial_byte(self, major_type, additional_info): - # The highest order three bits are the major type, so we need to bitshift the - # major type by 5 - major_type_bytes = major_type << 5 - return (major_type_bytes | additional_info).to_bytes(1, "big") - - def _is_special_number(self, value): - return any( - [ - value == float('inf'), - value == float('-inf'), - math.isnan(value), - ] - ) - - def _get_bytes_for_special_numbers(self, value): - additional_info = 25 - initial_byte = self._get_initial_byte( - self.FLOAT_AND_SIMPLE_MAJOR_TYPE, additional_info - ) - if value == float('inf'): - return initial_byte + struct.pack(">H", 0x7C00) - elif value == float('-inf'): - return initial_byte + struct.pack(">H", 0xFC00) - elif math.isnan(value): - return initial_byte + struct.pack(">H", 0x7E00) - - -class BaseRestSerializer(Serializer): - """Base class for rest protocols. - - The only variance between the various rest protocols is the - way that the body is serialized. All other aspects (headers, uri, etc.) - are the same and logic for serializing those aspects lives here. - - Subclasses must implement the ``_serialize_body_params`` method. - - """ - - QUERY_STRING_TIMESTAMP_FORMAT = 'iso8601' - HEADER_TIMESTAMP_FORMAT = 'rfc822' - # This is a list of known values for the "location" key in the - # serialization dict. The location key tells us where on the request - # to put the serialized value. - KNOWN_LOCATIONS = ['uri', 'querystring', 'header', 'headers'] - - def serialize_to_request(self, parameters, operation_model): - serialized = self._create_default_request() - serialized['method'] = operation_model.http.get( - 'method', self.DEFAULT_METHOD - ) - shape = operation_model.input_shape - - host_prefix = self._expand_host_prefix(parameters, operation_model) - if host_prefix is not None: - serialized['host_prefix'] = host_prefix - - if shape is None: - serialized['url_path'] = operation_model.http['requestUri'] - return serialized - shape_members = shape.members - # While the ``serialized`` key holds the final serialized request - # data, we need interim dicts for the various locations of the - # request. We need this for the uri_path_kwargs and the - # query_string_kwargs because they are templated, so we need - # to gather all the needed data for the string template, - # then we render the template. The body_kwargs is needed - # because once we've collected them all, we run them through - # _serialize_body_params, which for rest-json, creates JSON, - # and for rest-xml, will create XML. This is what the - # ``partitioned`` dict below is for. - partitioned = { - 'uri_path_kwargs': self.MAP_TYPE(), - 'query_string_kwargs': self.MAP_TYPE(), - 'body_kwargs': self.MAP_TYPE(), - 'headers': self.MAP_TYPE(), - } - for param_name, param_value in parameters.items(): - if param_value is None: - # Don't serialize any parameter with a None value. - continue - self._partition_parameters( - partitioned, param_name, param_value, shape_members - ) - serialized['url_path'] = self._render_uri_template( - operation_model.http['requestUri'], partitioned['uri_path_kwargs'] - ) - - if 'authPath' in operation_model.http: - serialized['auth_path'] = self._render_uri_template( - operation_model.http['authPath'], - partitioned['uri_path_kwargs'], - ) - # Note that we lean on the http implementation to handle the case - # where the requestUri path already has query parameters. - # The bundled http client, requests, already supports this. - serialized['query_string'] = partitioned['query_string_kwargs'] - if partitioned['headers']: - serialized['headers'] = partitioned['headers'] - self._serialize_payload( - partitioned, parameters, serialized, shape, shape_members - ) - self._serialize_content_type(serialized, shape, shape_members) - - return serialized - - def _render_uri_template(self, uri_template, params): - # We need to handle two cases:: - # - # /{Bucket}/foo - # /{Key+}/bar - # A label ending with '+' is greedy. There can only - # be one greedy key. - encoded_params = {} - for template_param in re.findall(r'{(.*?)}', uri_template): - if template_param.endswith('+'): - encoded_params[template_param] = percent_encode( - params[template_param[:-1]], safe='/~' - ) - else: - encoded_params[template_param] = percent_encode( - params[template_param] - ) - return uri_template.format(**encoded_params) - - def _serialize_payload( - self, partitioned, parameters, serialized, shape, shape_members - ): - # partitioned - The user input params partitioned by location. - # parameters - The user input params. - # serialized - The final serialized request dict. - # shape - Describes the expected input shape - # shape_members - The members of the input struct shape - payload_member = shape.serialization.get('payload') - if self._has_streaming_payload(payload_member, shape_members): - # If it's streaming, then the body is just the - # value of the payload. - body_payload = parameters.get(payload_member, b'') - body_payload = self._encode_payload(body_payload) - serialized['body'] = body_payload - elif payload_member is not None: - # If there's a payload member, we serialized that - # member to they body. - body_params = parameters.get(payload_member) - if body_params is not None: - serialized['body'] = self._serialize_body_params( - body_params, shape_members[payload_member] - ) - else: - serialized['body'] = self._serialize_empty_body() - elif partitioned['body_kwargs']: - serialized['body'] = self._serialize_body_params( - partitioned['body_kwargs'], shape - ) - elif self._requires_empty_body(shape): - serialized['body'] = self._serialize_empty_body() - - def _serialize_empty_body(self): - return b'' - - def _serialize_content_type(self, serialized, shape, shape_members): - """ - Some protocols require varied Content-Type headers - depending on user input. This allows subclasses to apply - this conditionally. - """ - pass - - def _requires_empty_body(self, shape): - """ - Some protocols require a specific body to represent an empty - payload. This allows subclasses to apply this conditionally. - """ - return False - - def _has_streaming_payload(self, payload, shape_members): - """Determine if payload is streaming (a blob or string).""" - return payload is not None and shape_members[payload].type_name in ( - 'blob', - 'string', - ) - - def _encode_payload(self, body): - if isinstance(body, str): - return body.encode(self.DEFAULT_ENCODING) - return body - - def _partition_parameters( - self, partitioned, param_name, param_value, shape_members - ): - # This takes the user provided input parameter (``param``) - # and figures out where they go in the request dict. - # Some params are HTTP headers, some are used in the URI, some - # are in the request body. This method deals with this. - member = shape_members[param_name] - location = member.serialization.get('location') - key_name = member.serialization.get('name', param_name) - if location == 'uri': - uri_path_value = self._get_uri_and_query_string_value( - param_value, member - ) - partitioned['uri_path_kwargs'][key_name] = uri_path_value - elif location == 'querystring': - if isinstance(param_value, dict): - partitioned['query_string_kwargs'].update(param_value) - elif member.type_name == 'list': - new_param = [ - self._get_uri_and_query_string_value(value, member.member) - for value in param_value - ] - partitioned['query_string_kwargs'][key_name] = new_param - else: - new_param = self._get_uri_and_query_string_value( - param_value, member - ) - partitioned['query_string_kwargs'][key_name] = new_param - elif location == 'header': - shape = shape_members[param_name] - if not param_value and shape.type_name == 'list': - # Empty lists should not be set on the headers - return - partitioned['headers'][key_name] = self._convert_header_value( - shape, param_value - ) - elif location == 'headers': - # 'headers' is a bit of an oddball. The ``key_name`` - # is actually really a prefix for the header names: - header_prefix = key_name - # The value provided by the user is a dict so we'll be - # creating multiple header key/val pairs. The key - # name to use for each header is the header_prefix (``key_name``) - # plus the key provided by the user. - self._do_serialize_header_map( - header_prefix, partitioned['headers'], param_value - ) - else: - partitioned['body_kwargs'][param_name] = param_value - - def _get_uri_and_query_string_value(self, param_value, member): - if member.type_name == 'boolean': - return str(param_value).lower() - elif member.type_name == 'timestamp': - timestamp_format = member.serialization.get( - 'timestampFormat', self.QUERY_STRING_TIMESTAMP_FORMAT - ) - return self._convert_timestamp_to_str( - param_value, timestamp_format - ) - elif member.type_name in ['float', 'double']: - return str(self._handle_float(param_value)) - return param_value - - def _do_serialize_header_map(self, header_prefix, headers, user_input): - for key, val in user_input.items(): - full_key = header_prefix + key - headers[full_key] = val - - def _serialize_body_params(self, params, shape): - raise NotImplementedError('_serialize_body_params') - - def _convert_header_value(self, shape, value): - if shape.type_name == 'timestamp': - datetime_obj = parse_to_aware_datetime(value) - timestamp = calendar.timegm(datetime_obj.utctimetuple()) - timestamp_format = shape.serialization.get( - 'timestampFormat', self.HEADER_TIMESTAMP_FORMAT - ) - return str( - self._convert_timestamp_to_str(timestamp, timestamp_format) - ) - elif shape.type_name == 'list': - if shape.member.type_name == "string": - converted_value = [ - self._escape_header_list_string(v) - for v in value - if v is not None - ] - else: - converted_value = [ - self._convert_header_value(shape.member, v) - for v in value - if v is not None - ] - return ",".join(converted_value) - elif is_json_value_header(shape): - # Serialize with no spaces after separators to save space in - # the header. - return self._get_base64(json.dumps(value, separators=(',', ':'))) - elif shape.type_name == 'boolean': - return str(value).lower() - elif shape.type_name in ['float', 'double']: - return str(self._handle_float(value)) - else: - return str(value) - - def _escape_header_list_string(self, value): - # Escapes a header list string by wrapping it in double quotes if it contains - # a comma or a double quote, and escapes any internal double quotes. - if '"' in value or ',' in value: - return '"' + value.replace('"', '\\"') + '"' - else: - return value - - -class BaseRpcV2Serializer(Serializer): - """Base class for RPCv2 protocols. - - The only variance between the various RPCv2 protocols is the - way that the body is serialized. All other aspects (headers, uri, etc.) - are the same and logic for serializing those aspects lives here. - - Subclasses must implement the ``_serialize_body_params`` and - ``_serialize_headers`` methods. - - """ - - def serialize_to_request(self, parameters, operation_model): - serialized = self._create_default_request() - service_name = operation_model.service_model.metadata['targetPrefix'] - operation_name = operation_model.name - serialized['url_path'] = ( - f'/service/{service_name}/operation/{operation_name}' - ) - - input_shape = operation_model.input_shape - if input_shape is not None: - self._serialize_payload(parameters, serialized, input_shape) - - host_prefix = self._expand_host_prefix(parameters, operation_model) - if host_prefix is not None: - serialized['host_prefix'] = host_prefix - - self._serialize_headers(serialized, operation_model) - - return serialized - - def _serialize_payload(self, parameters, serialized, shape): - body_payload = self._serialize_body_params(parameters, shape) - serialized['body'] = body_payload - - def _serialize_headers(self, serialized, operation_model): - raise NotImplementedError("_serialize_headers") - - def _serialize_body_params(self, parameters, shape): - raise NotImplementedError("_serialize_body_params") - - -class RestJSONSerializer(BaseRestSerializer, JSONSerializer): - def _serialize_empty_body(self): - return b'{}' - - def _requires_empty_body(self, shape): - """ - Serialize an empty JSON object whenever the shape has - members not targeting a location. - """ - for member, val in shape.members.items(): - if 'location' not in val.serialization: - return True - return False - - def _serialize_content_type(self, serialized, shape, shape_members): - """Set Content-Type to application/json for all structured bodies.""" - payload = shape.serialization.get('payload') - if self._has_streaming_payload(payload, shape_members): - # Don't apply content-type to streaming bodies - return - - has_body = serialized['body'] != b'' - has_content_type = has_header('Content-Type', serialized['headers']) - if has_body and not has_content_type: - serialized['headers']['Content-Type'] = 'application/json' - - def _serialize_body_params(self, params, shape): - serialized_body = self.MAP_TYPE() - self._serialize(serialized_body, params, shape) - return json.dumps(serialized_body).encode(self.DEFAULT_ENCODING) - - -class RestXMLSerializer(BaseRestSerializer): - TIMESTAMP_FORMAT = 'iso8601' - - def _serialize_body_params(self, params, shape): - root_name = shape.serialization['name'] - pseudo_root = ElementTree.Element('') - self._serialize(shape, params, pseudo_root, root_name) - real_root = list(pseudo_root)[0] - return ElementTree.tostring(real_root, encoding=self.DEFAULT_ENCODING) - - def _serialize(self, shape, params, xmlnode, name): - method = getattr( - self, - f'_serialize_type_{shape.type_name}', - self._default_serialize, - ) - method(xmlnode, params, shape, name) - - def _serialize_type_structure(self, xmlnode, params, shape, name): - structure_node = ElementTree.SubElement(xmlnode, name) - - self._add_xml_namespace(shape, structure_node) - for key, value in params.items(): - member_shape = shape.members[key] - member_name = member_shape.serialization.get('name', key) - # We need to special case member shapes that are marked as an - # xmlAttribute. Rather than serializing into an XML child node, - # we instead serialize the shape to an XML attribute of the - # *current* node. - if value is None: - # Don't serialize any param whose value is None. - return - if member_shape.serialization.get('xmlAttribute'): - # xmlAttributes must have a serialization name. - xml_attribute_name = member_shape.serialization['name'] - structure_node.attrib[xml_attribute_name] = value - continue - self._serialize(member_shape, value, structure_node, member_name) - - def _serialize_type_list(self, xmlnode, params, shape, name): - member_shape = shape.member - if shape.serialization.get('flattened'): - element_name = name - list_node = xmlnode - else: - element_name = member_shape.serialization.get('name', 'member') - list_node = ElementTree.SubElement(xmlnode, name) - self._add_xml_namespace(shape, list_node) - for item in params: - self._serialize(member_shape, item, list_node, element_name) - - def _serialize_type_map(self, xmlnode, params, shape, name): - # Given the ``name`` of MyMap, and input of {"key1": "val1"} - # we serialize this as: - # - # - # key1 - # val1 - # - # - if not self._is_shape_flattened(shape): - node = ElementTree.SubElement(xmlnode, name) - self._add_xml_namespace(shape, node) - - for key, value in params.items(): - sub_node = ( - ElementTree.SubElement(xmlnode, name) - if self._is_shape_flattened(shape) - else ElementTree.SubElement(node, 'entry') - ) - key_name = self._get_serialized_name(shape.key, default_name='key') - val_name = self._get_serialized_name( - shape.value, default_name='value' - ) - self._serialize(shape.key, key, sub_node, key_name) - self._serialize(shape.value, value, sub_node, val_name) - - def _serialize_type_boolean(self, xmlnode, params, shape, name): - # For scalar types, the 'params' attr is actually just a scalar - # value representing the data we need to serialize as a boolean. - # It will either be 'true' or 'false' - node = ElementTree.SubElement(xmlnode, name) - if params: - str_value = 'true' - else: - str_value = 'false' - node.text = str_value - self._add_xml_namespace(shape, node) - - def _serialize_type_blob(self, xmlnode, params, shape, name): - node = ElementTree.SubElement(xmlnode, name) - node.text = self._get_base64(params) - self._add_xml_namespace(shape, node) - - def _serialize_type_timestamp(self, xmlnode, params, shape, name): - node = ElementTree.SubElement(xmlnode, name) - node.text = str( - self._convert_timestamp_to_str( - params, shape.serialization.get('timestampFormat') - ) - ) - self._add_xml_namespace(shape, node) - - def _serialize_type_float(self, xmlnode, params, shape, name): - node = ElementTree.SubElement(xmlnode, name) - node.text = str(self._handle_float(params)) - self._add_xml_namespace(shape, node) - - def _serialize_type_double(self, xmlnode, params, shape, name): - self._serialize_type_float(xmlnode, params, shape, name) - - def _default_serialize(self, xmlnode, params, shape, name): - node = ElementTree.SubElement(xmlnode, name) - node.text = str(params) - self._add_xml_namespace(shape, node) - - def _add_xml_namespace(self, shape, structure_node): - if 'xmlNamespace' in shape.serialization: - namespace_metadata = shape.serialization['xmlNamespace'] - attribute_name = 'xmlns' - if isinstance(namespace_metadata, dict): - if namespace_metadata.get('prefix'): - attribute_name += f":{namespace_metadata['prefix']}" - structure_node.attrib[attribute_name] = namespace_metadata[ - 'uri' - ] - elif isinstance(namespace_metadata, str): - structure_node.attrib[attribute_name] = namespace_metadata - - -class RpcV2CBORSerializer(BaseRpcV2Serializer, CBORSerializer): - TIMESTAMP_FORMAT = 'unixtimestamp' - - def serialize_to_request(self, parameters, operation_model): - register_feature_id('PROTOCOL_RPC_V2_CBOR') - return super().serialize_to_request(parameters, operation_model) - - def _serialize_body_params(self, parameters, input_shape): - body = bytearray() - self._serialize_data_item(body, parameters, input_shape) - return bytes(body) - - def _serialize_headers(self, serialized, operation_model): - serialized['headers']['smithy-protocol'] = 'rpc-v2-cbor' - - if operation_model.has_event_stream_output: - header_val = 'application/vnd.amazon.eventstream' - else: - header_val = 'application/cbor' - self._handle_query_compatible_trait(operation_model, serialized) - - has_body = serialized['body'] != b'' - has_content_type = has_header('Content-Type', serialized['headers']) - - serialized['headers']['Accept'] = header_val - if not has_content_type and has_body: - serialized['headers']['Content-Type'] = header_val - - -SERIALIZERS = { - 'ec2': EC2Serializer, - 'query': QuerySerializer, - 'json': JSONSerializer, - 'rest-json': RestJSONSerializer, - 'rest-xml': RestXMLSerializer, - 'smithy-rpc-v2-cbor': RpcV2CBORSerializer, -} diff --git a/venv/Lib/site-packages/botocore/session.py b/venv/Lib/site-packages/botocore/session.py deleted file mode 100644 index c5a61b2..0000000 --- a/venv/Lib/site-packages/botocore/session.py +++ /dev/null @@ -1,1330 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -""" -This module contains the main interface to the botocore package, the -Session object. -""" - -import copy -import logging -import os -import platform -import socket -import warnings - -import botocore.client -import botocore.configloader -import botocore.credentials -import botocore.tokens -from botocore import ( - UNSIGNED, - __version__, - handlers, - invoke_initializers, - monitoring, - paginate, - retryhandler, - translate, - waiter, -) -from botocore.compat import ( - HAS_CRT, # noqa: F401 - MutableMapping, -) -from botocore.configprovider import ( - BOTOCORE_DEFAUT_SESSION_VARIABLES, - ConfigChainFactory, - ConfiguredEndpointProvider, - ConfigValueStore, - DefaultConfigResolver, - SmartDefaultsConfigStoreFactory, - create_botocore_default_config_mapping, -) -from botocore.context import get_context, with_current_context -from botocore.errorfactory import ClientExceptionsFactory -from botocore.exceptions import ( - ConfigNotFound, - InvalidDefaultsMode, - PartialCredentialsError, - ProfileNotFound, - UnknownServiceError, -) -from botocore.hooks import ( - EventAliaser, - HierarchicalEmitter, - first_non_none_response, -) -from botocore.loaders import create_loader -from botocore.model import ServiceModel -from botocore.parsers import ResponseParserFactory -from botocore.plugin import get_botocore_plugins, load_client_plugins -from botocore.regions import EndpointResolver -from botocore.useragent import UserAgentString, register_feature_id -from botocore.utils import ( - EVENT_ALIASES, - IMDSRegionProvider, - validate_region_name, -) - -logger = logging.getLogger(__name__) - - -class Session: - """ - The Session object collects together useful functionality - from `botocore` as well as important data such as configuration - information and credentials into a single, easy-to-use object. - - :ivar available_profiles: A list of profiles defined in the config - file associated with this session. - :ivar profile: The current profile. - """ - - SESSION_VARIABLES = copy.copy(BOTOCORE_DEFAUT_SESSION_VARIABLES) - - #: The default format string to use when configuring the botocore logger. - LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - - def __init__( - self, - session_vars=None, - event_hooks=None, - include_builtin_handlers=True, - profile=None, - ): - """ - Create a new Session object. - - :type session_vars: dict - :param session_vars: A dictionary that is used to override some or all - of the environment variables associated with this session. The - key/value pairs defined in this dictionary will override the - corresponding variables defined in ``SESSION_VARIABLES``. - - :type event_hooks: BaseEventHooks - :param event_hooks: The event hooks object to use. If one is not - provided, an event hooks object will be automatically created - for you. - - :type include_builtin_handlers: bool - :param include_builtin_handlers: Indicates whether or not to - automatically register builtin handlers. - - :type profile: str - :param profile: The name of the profile to use for this - session. Note that the profile can only be set when - the session is created. - - """ - if event_hooks is None: - self._original_handler = HierarchicalEmitter() - else: - self._original_handler = event_hooks - self._events = EventAliaser(self._original_handler) - if include_builtin_handlers: - self._register_builtin_handlers(self._events) - self.user_agent_name = 'Botocore' - self.user_agent_version = __version__ - self.user_agent_extra = '' - # The _profile attribute is just used to cache the value - # of the current profile to avoid going through the normal - # config lookup process each access time. - self._profile = None - self._config = None - self._credentials = None - self._auth_token = None - self._profile_map = None - # This is a dict that stores per session specific config variable - # overrides via set_config_variable(). - self._session_instance_vars = {} - if profile is not None: - self._session_instance_vars['profile'] = profile - self._client_config = None - self._last_client_region_used = None - self._components = ComponentLocator() - self._internal_components = ComponentLocator() - self._register_components() - self.session_var_map = SessionVarDict(self, self.SESSION_VARIABLES) - if session_vars is not None: - self.session_var_map.update(session_vars) - invoke_initializers(self) - - def _register_components(self): - self._register_credential_provider() - self._register_token_provider() - self._register_data_loader() - self._register_endpoint_resolver() - self._register_event_emitter() - self._register_response_parser_factory() - self._register_exceptions_factory() - self._register_config_store() - self._register_monitor() - self._register_default_config_resolver() - self._register_smart_defaults_factory() - self._register_user_agent_creator() - - def _register_event_emitter(self): - self._components.register_component('event_emitter', self._events) - - def _register_token_provider(self): - self._components.lazy_register_component( - 'token_provider', self._create_token_resolver - ) - - def _create_token_resolver(self): - return botocore.tokens.create_token_resolver(self) - - def _register_credential_provider(self): - self._components.lazy_register_component( - 'credential_provider', self._create_credential_resolver - ) - - def _create_credential_resolver(self): - return botocore.credentials.create_credential_resolver( - self, region_name=self._last_client_region_used - ) - - def _register_data_loader(self): - self._components.lazy_register_component( - 'data_loader', - lambda: create_loader(self.get_config_variable('data_path')), - ) - - def _register_endpoint_resolver(self): - def create_default_resolver(): - loader = self.get_component('data_loader') - endpoints, path = loader.load_data_with_path('endpoints') - uses_builtin = loader.is_builtin_path(path) - return EndpointResolver(endpoints, uses_builtin_data=uses_builtin) - - self._internal_components.lazy_register_component( - 'endpoint_resolver', create_default_resolver - ) - - def _register_default_config_resolver(self): - def create_default_config_resolver(): - loader = self.get_component('data_loader') - defaults = loader.load_data('sdk-default-configuration') - return DefaultConfigResolver(defaults) - - self._internal_components.lazy_register_component( - 'default_config_resolver', create_default_config_resolver - ) - - def _register_smart_defaults_factory(self): - def create_smart_defaults_factory(): - default_config_resolver = self._get_internal_component( - 'default_config_resolver' - ) - imds_region_provider = IMDSRegionProvider(session=self) - return SmartDefaultsConfigStoreFactory( - default_config_resolver, imds_region_provider - ) - - self._internal_components.lazy_register_component( - 'smart_defaults_factory', create_smart_defaults_factory - ) - - def _register_response_parser_factory(self): - self._components.register_component( - 'response_parser_factory', ResponseParserFactory() - ) - - def _register_exceptions_factory(self): - self._internal_components.register_component( - 'exceptions_factory', ClientExceptionsFactory() - ) - - def _register_builtin_handlers(self, events): - for spec in handlers.BUILTIN_HANDLERS: - if len(spec) == 2: - event_name, handler = spec - self.register(event_name, handler) - else: - event_name, handler, register_type = spec - if register_type is handlers.REGISTER_FIRST: - self._events.register_first(event_name, handler) - elif register_type is handlers.REGISTER_LAST: - self._events.register_last(event_name, handler) - - def _register_config_store(self): - config_store_component = ConfigValueStore( - mapping=create_botocore_default_config_mapping(self) - ) - self._components.register_component( - 'config_store', config_store_component - ) - - def _register_monitor(self): - self._internal_components.lazy_register_component( - 'monitor', self._create_csm_monitor - ) - - def _register_user_agent_creator(self): - uas = UserAgentString.from_environment() - self._components.register_component('user_agent_creator', uas) - - def _create_csm_monitor(self): - if self.get_config_variable('csm_enabled'): - client_id = self.get_config_variable('csm_client_id') - host = self.get_config_variable('csm_host') - port = self.get_config_variable('csm_port') - handler = monitoring.Monitor( - adapter=monitoring.MonitorEventAdapter(), - publisher=monitoring.SocketPublisher( - socket=socket.socket(socket.AF_INET, socket.SOCK_DGRAM), - host=host, - port=port, - serializer=monitoring.CSMSerializer( - csm_client_id=client_id - ), - ), - ) - return handler - return None - - def _get_crt_version(self): - user_agent_creator = self.get_component('user_agent_creator') - return user_agent_creator._crt_version or 'Unknown' - - @property - def available_profiles(self): - return list(self._build_profile_map().keys()) - - def _build_profile_map(self): - # This will build the profile map if it has not been created, - # otherwise it will return the cached value. The profile map - # is a list of profile names, to the config values for the profile. - if self._profile_map is None: - self._profile_map = self.full_config['profiles'] - return self._profile_map - - @property - def profile(self): - if self._profile is None: - profile = self.get_config_variable('profile') - self._profile = profile - return self._profile - - def get_config_variable(self, logical_name, methods=None): - if methods is not None: - return self._get_config_variable_with_custom_methods( - logical_name, methods - ) - return self.get_component('config_store').get_config_variable( - logical_name - ) - - def _get_config_variable_with_custom_methods(self, logical_name, methods): - # If a custom list of methods was supplied we need to perserve the - # behavior with the new system. To do so a new chain that is a copy of - # the old one will be constructed, but only with the supplied methods - # being added to the chain. This chain will be consulted for a value - # and then thrown out. This is not efficient, nor is the methods arg - # used in botocore, this is just for backwards compatibility. - chain_builder = SubsetChainConfigFactory(session=self, methods=methods) - mapping = create_botocore_default_config_mapping(self) - for name, config_options in self.session_var_map.items(): - config_name, env_vars, default, typecast = config_options - build_chain_config_args = { - 'conversion_func': typecast, - 'default': default, - } - if 'instance' in methods: - build_chain_config_args['instance_name'] = name - if 'env' in methods: - build_chain_config_args['env_var_names'] = env_vars - if 'config' in methods: - build_chain_config_args['config_property_name'] = config_name - mapping[name] = chain_builder.create_config_chain( - **build_chain_config_args - ) - config_store_component = ConfigValueStore(mapping=mapping) - value = config_store_component.get_config_variable(logical_name) - return value - - def set_config_variable(self, logical_name, value): - """Set a configuration variable to a specific value. - - By using this method, you can override the normal lookup - process used in ``get_config_variable`` by explicitly setting - a value. Subsequent calls to ``get_config_variable`` will - use the ``value``. This gives you per-session specific - configuration values. - - :: - >>> # Assume logical name 'foo' maps to env var 'FOO' - >>> os.environ['FOO'] = 'myvalue' - >>> s.get_config_variable('foo') - 'myvalue' - >>> s.set_config_variable('foo', 'othervalue') - >>> s.get_config_variable('foo') - 'othervalue' - - :type logical_name: str - :param logical_name: The logical name of the session variable - you want to set. These are the keys in ``SESSION_VARIABLES``. - :param value: The value to associate with the config variable. - - """ - logger.debug( - "Setting config variable for %s to %r", - logical_name, - value, - ) - self._session_instance_vars[logical_name] = value - - def instance_variables(self): - return copy.copy(self._session_instance_vars) - - def get_scoped_config(self): - """ - Returns the config values from the config file scoped to the current - profile. - - The configuration data is loaded **only** from the config file. - It does not resolve variables based on different locations - (e.g. first from the session instance, then from environment - variables, then from the config file). If you want this lookup - behavior, use the ``get_config_variable`` method instead. - - Note that this configuration is specific to a single profile (the - ``profile`` session variable). - - If the ``profile`` session variable is set and the profile does - not exist in the config file, a ``ProfileNotFound`` exception - will be raised. - - :raises: ConfigNotFound, ConfigParseError, ProfileNotFound - :rtype: dict - - """ - profile_name = self.get_config_variable('profile') - profile_map = self._build_profile_map() - # If a profile is not explicitly set return the default - # profile config or an empty config dict if we don't have - # a default profile. - if profile_name is None: - return profile_map.get('default', {}) - elif profile_name not in profile_map: - # Otherwise if they specified a profile, it has to - # exist (even if it's the default profile) otherwise - # we complain. - raise ProfileNotFound(profile=profile_name) - else: - return profile_map[profile_name] - - @property - def full_config(self): - """Return the parsed config file. - - The ``get_config`` method returns the config associated with the - specified profile. This property returns the contents of the - **entire** config file. - - :rtype: dict - """ - if self._config is None: - try: - config_file = self.get_config_variable('config_file') - self._config = botocore.configloader.load_config(config_file) - except ConfigNotFound: - self._config = {'profiles': {}} - try: - # Now we need to inject the profiles from the - # credentials file. We don't actually need the values - # in the creds file, only the profile names so that we - # can validate the user is not referring to a nonexistent - # profile. - cred_file = self.get_config_variable('credentials_file') - cred_profiles = botocore.configloader.raw_config_parse( - cred_file - ) - for profile in cred_profiles: - cred_vars = cred_profiles[profile] - if profile not in self._config['profiles']: - self._config['profiles'][profile] = cred_vars - else: - self._config['profiles'][profile].update(cred_vars) - except ConfigNotFound: - pass - return self._config - - def get_default_client_config(self): - """Retrieves the default config for creating clients - - :rtype: botocore.client.Config - :returns: The default client config object when creating clients. If - the value is ``None`` then there is no default config object - attached to the session. - """ - return self._client_config - - def set_default_client_config(self, client_config): - """Sets the default config for creating clients - - :type client_config: botocore.client.Config - :param client_config: The default client config object when creating - clients. If the value is ``None`` then there is no default config - object attached to the session. - """ - self._client_config = client_config - - def set_credentials( - self, access_key, secret_key, token=None, account_id=None - ): - """ - Manually create credentials for this session. If you would - prefer to use botocore without a config file, environment variables, - or IAM roles, you can pass explicit credentials into this - method to establish credentials for this session. - - :type access_key: str - :param access_key: The access key part of the credentials. - - :type secret_key: str - :param secret_key: The secret key part of the credentials. - - :type token: str - :param token: An option session token used by STS session - credentials. - - :type account_id: str - :param account_id: An optional account ID part of the credentials. - """ - self._credentials = botocore.credentials.Credentials( - access_key, secret_key, token, account_id=account_id - ) - - def get_credentials(self): - """ - Return the :class:`botocore.credential.Credential` object - associated with this session. If the credentials have not - yet been loaded, this will attempt to load them. If they - have already been loaded, this will return the cached - credentials. - - """ - if self._credentials is None: - self._credentials = self._components.get_component( - 'credential_provider' - ).load_credentials() - return self._credentials - - def get_auth_token(self, **kwargs): - """ - Return the :class:`botocore.tokens.AuthToken` object associated with - this session. If the authorization token has not yet been loaded, this - will attempt to load it. If it has already been loaded, this will - return the cached authorization token. - - """ - provider = self._components.get_component('token_provider') - - signing_name = kwargs.get('signing_name') - if signing_name is not None: - auth_token = provider.load_token(signing_name=signing_name) - if auth_token is not None: - return auth_token - - if self._auth_token is None: - self._auth_token = provider.load_token() - return self._auth_token - - def user_agent(self): - """ - Return a string suitable for use as a User-Agent header. - The string will be of the form: - - / Python/ / - - Where: - - - agent_name is the value of the `user_agent_name` attribute - of the session object (`Botocore` by default). - - agent_version is the value of the `user_agent_version` - attribute of the session object (the botocore version by default). - by default. - - py_ver is the version of the Python interpreter beng used. - - plat_name is the name of the platform (e.g. Darwin) - - plat_ver is the version of the platform - - exec_env is exec-env/$AWS_EXECUTION_ENV - - If ``user_agent_extra`` is not empty, then this value will be - appended to the end of the user agent string. - - """ - base = ( - f'{self.user_agent_name}/{self.user_agent_version} ' - f'Python/{platform.python_version()} ' - f'{platform.system()}/{platform.release()}' - ) - if HAS_CRT: - base += f' awscrt/{self._get_crt_version()}' - if os.environ.get('AWS_EXECUTION_ENV') is not None: - base += ' exec-env/{}'.format(os.environ.get('AWS_EXECUTION_ENV')) - if self.user_agent_extra: - base += f' {self.user_agent_extra}' - - return base - - def get_data(self, data_path): - """ - Retrieve the data associated with `data_path`. - - :type data_path: str - :param data_path: The path to the data you wish to retrieve. - """ - return self.get_component('data_loader').load_data(data_path) - - def get_service_model(self, service_name, api_version=None): - """Get the service model object. - - :type service_name: string - :param service_name: The service name - - :type api_version: string - :param api_version: The API version of the service. If none is - provided, then the latest API version will be used. - - :rtype: L{botocore.model.ServiceModel} - :return: The botocore service model for the service. - - """ - service_description = self.get_service_data(service_name, api_version) - return ServiceModel(service_description, service_name=service_name) - - def get_waiter_model(self, service_name, api_version=None): - loader = self.get_component('data_loader') - waiter_config = loader.load_service_model( - service_name, 'waiters-2', api_version - ) - return waiter.WaiterModel(waiter_config) - - def get_paginator_model(self, service_name, api_version=None): - loader = self.get_component('data_loader') - paginator_config = loader.load_service_model( - service_name, 'paginators-1', api_version - ) - return paginate.PaginatorModel(paginator_config) - - def get_service_data(self, service_name, api_version=None): - """ - Retrieve the fully merged data associated with a service. - """ - data_path = service_name - service_data = self.get_component('data_loader').load_service_model( - data_path, type_name='service-2', api_version=api_version - ) - service_id = EVENT_ALIASES.get(service_name, service_name) - self._events.emit( - f'service-data-loaded.{service_id}', - service_data=service_data, - service_name=service_name, - session=self, - ) - return service_data - - def get_available_services(self): - """ - Return a list of names of available services. - """ - return self.get_component('data_loader').list_available_services( - type_name='service-2' - ) - - def set_debug_logger(self, logger_name='botocore'): - """ - Convenience function to quickly configure full debug output - to go to the console. - """ - self.set_stream_logger(logger_name, logging.DEBUG) - - def set_stream_logger( - self, logger_name, log_level, stream=None, format_string=None - ): - """ - Convenience method to configure a stream logger. - - :type logger_name: str - :param logger_name: The name of the logger to configure - - :type log_level: str - :param log_level: The log level to set for the logger. This - is any param supported by the ``.setLevel()`` method of - a ``Log`` object. - - :type stream: file - :param stream: A file like object to log to. If none is provided - then sys.stderr will be used. - - :type format_string: str - :param format_string: The format string to use for the log - formatter. If none is provided this will default to - ``self.LOG_FORMAT``. - - """ - log = logging.getLogger(logger_name) - log.setLevel(logging.DEBUG) - - ch = logging.StreamHandler(stream) - ch.setLevel(log_level) - - # create formatter - if format_string is None: - format_string = self.LOG_FORMAT - formatter = logging.Formatter(format_string) - - # add formatter to ch - ch.setFormatter(formatter) - - # add ch to logger - log.addHandler(ch) - - def set_file_logger(self, log_level, path, logger_name='botocore'): - """ - Convenience function to quickly configure any level of logging - to a file. - - :type log_level: int - :param log_level: A log level as specified in the `logging` module - - :type path: string - :param path: Path to the log file. The file will be created - if it doesn't already exist. - """ - log = logging.getLogger(logger_name) - log.setLevel(logging.DEBUG) - - # create console handler and set level to debug - ch = logging.FileHandler(path) - ch.setLevel(log_level) - - # create formatter - formatter = logging.Formatter(self.LOG_FORMAT) - - # add formatter to ch - ch.setFormatter(formatter) - - # add ch to logger - log.addHandler(ch) - - def register( - self, event_name, handler, unique_id=None, unique_id_uses_count=False - ): - """Register a handler with an event. - - :type event_name: str - :param event_name: The name of the event. - - :type handler: callable - :param handler: The callback to invoke when the event - is emitted. This object must be callable, and must - accept ``**kwargs``. If either of these preconditions are - not met, a ``ValueError`` will be raised. - - :type unique_id: str - :param unique_id: An optional identifier to associate with the - registration. A unique_id can only be used once for - the entire session registration (unless it is unregistered). - This can be used to prevent an event handler from being - registered twice. - - :param unique_id_uses_count: boolean - :param unique_id_uses_count: Specifies if the event should maintain - a count when a ``unique_id`` is registered and unregisted. The - event can only be completely unregistered once every register call - using the unique id has been matched by an ``unregister`` call. - If ``unique_id`` is specified, subsequent ``register`` - calls must use the same value for ``unique_id_uses_count`` - as the ``register`` call that first registered the event. - - :raises ValueError: If the call to ``register`` uses ``unique_id`` - but the value for ``unique_id_uses_count`` differs from the - ``unique_id_uses_count`` value declared by the very first - ``register`` call for that ``unique_id``. - """ - self._events.register( - event_name, - handler, - unique_id, - unique_id_uses_count=unique_id_uses_count, - ) - - def unregister( - self, - event_name, - handler=None, - unique_id=None, - unique_id_uses_count=False, - ): - """Unregister a handler with an event. - - :type event_name: str - :param event_name: The name of the event. - - :type handler: callable - :param handler: The callback to unregister. - - :type unique_id: str - :param unique_id: A unique identifier identifying the callback - to unregister. You can provide either the handler or the - unique_id, you do not have to provide both. - - :param unique_id_uses_count: boolean - :param unique_id_uses_count: Specifies if the event should maintain - a count when a ``unique_id`` is registered and unregisted. The - event can only be completely unregistered once every ``register`` - call using the ``unique_id`` has been matched by an ``unregister`` - call. If the ``unique_id`` is specified, subsequent - ``unregister`` calls must use the same value for - ``unique_id_uses_count`` as the ``register`` call that first - registered the event. - - :raises ValueError: If the call to ``unregister`` uses ``unique_id`` - but the value for ``unique_id_uses_count`` differs from the - ``unique_id_uses_count`` value declared by the very first - ``register`` call for that ``unique_id``. - """ - self._events.unregister( - event_name, - handler=handler, - unique_id=unique_id, - unique_id_uses_count=unique_id_uses_count, - ) - - def emit(self, event_name, **kwargs): - return self._events.emit(event_name, **kwargs) - - def emit_first_non_none_response(self, event_name, **kwargs): - responses = self._events.emit(event_name, **kwargs) - return first_non_none_response(responses) - - def get_component(self, name): - try: - return self._components.get_component(name) - except ValueError: - if name in ['endpoint_resolver', 'exceptions_factory']: - warnings.warn( - f'Fetching the {name} component with the get_component() ' - 'method is deprecated as the component has always been ' - 'considered an internal interface of botocore', - DeprecationWarning, - ) - return self._internal_components.get_component(name) - raise - - def _get_internal_component(self, name): - # While this method may be called by botocore classes outside of the - # Session, this method should **never** be used by a class that lives - # outside of botocore. - return self._internal_components.get_component(name) - - def _register_internal_component(self, name, component): - # While this method may be called by botocore classes outside of the - # Session, this method should **never** be used by a class that lives - # outside of botocore. - return self._internal_components.register_component(name, component) - - def register_component(self, name, component): - self._components.register_component(name, component) - - def lazy_register_component(self, name, component): - self._components.lazy_register_component(name, component) - - @with_current_context() - def create_client( - self, - service_name, - region_name=None, - api_version=None, - use_ssl=True, - verify=None, - endpoint_url=None, - aws_access_key_id=None, - aws_secret_access_key=None, - aws_session_token=None, - config=None, - aws_account_id=None, - ): - """Create a botocore client. - - :type service_name: string - :param service_name: The name of the service for which a client will - be created. You can use the ``Session.get_available_services()`` - method to get a list of all available service names. - - :type region_name: string - :param region_name: The name of the region associated with the client. - A client is associated with a single region. - - :type api_version: string - :param api_version: The API version to use. By default, botocore will - use the latest API version when creating a client. You only need - to specify this parameter if you want to use a previous API version - of the client. - - :type use_ssl: boolean - :param use_ssl: Whether or not to use SSL. By default, SSL is used. - Note that not all services support non-ssl connections. - - :type verify: boolean/string - :param verify: Whether or not to verify SSL certificates. - By default SSL certificates are verified. You can provide the - following values: - - * False - do not validate SSL certificates. SSL will still be - used (unless use_ssl is False), but SSL certificates - will not be verified. - * path/to/cert/bundle.pem - A filename of the CA cert bundle to - uses. You can specify this argument if you want to use a - different CA cert bundle than the one used by botocore. - - :type endpoint_url: string - :param endpoint_url: The complete URL to use for the constructed - client. Normally, botocore will automatically construct the - appropriate URL to use when communicating with a service. You can - specify a complete URL (including the "http/https" scheme) to - override this behavior. If this value is provided, then - ``use_ssl`` is ignored. - - :type aws_access_key_id: string - :param aws_access_key_id: The access key to use when creating - the client. This is entirely optional, and if not provided, - the credentials configured for the session will automatically - be used. You only need to provide this argument if you want - to override the credentials used for this specific client. - - :type aws_secret_access_key: string - :param aws_secret_access_key: The secret key to use when creating - the client. Same semantics as aws_access_key_id above. - - :type aws_session_token: string - :param aws_session_token: The session token to use when creating - the client. Same semantics as aws_access_key_id above. - - :type config: botocore.client.Config - :param config: Advanced client configuration options. If a value - is specified in the client config, its value will take precedence - over environment variables and configuration values, but not over - a value passed explicitly to the method. If a default config - object is set on the session, the config object used when creating - the client will be the result of calling ``merge()`` on the - default config with the config provided to this call. - - :type aws_account_id: string - :param aws_account_id: The account id to use when creating - the client. Same semantics as aws_access_key_id above. - - :rtype: botocore.client.BaseClient - :return: A botocore client instance - - """ - default_client_config = self.get_default_client_config() - # If a config is provided and a default config is set, then - # use the config resulting from merging the two. - if config is not None and default_client_config is not None: - config = default_client_config.merge(config) - # If a config was not provided then use the default - # client config from the session - elif default_client_config is not None: - config = default_client_config - - region_name = self._resolve_region_name(region_name, config) - - # Figure out the verify value base on the various - # configuration options. - if verify is None: - verify = self.get_config_variable('ca_bundle') - - if api_version is None: - api_version = self.get_config_variable('api_versions').get( - service_name, None - ) - - loader = self.get_component('data_loader') - event_emitter = self.get_component('event_emitter') - response_parser_factory = self.get_component('response_parser_factory') - if config is not None and config.signature_version is UNSIGNED: - credentials = None - elif ( - aws_access_key_id is not None and aws_secret_access_key is not None - ): - credentials = botocore.credentials.Credentials( - access_key=aws_access_key_id, - secret_key=aws_secret_access_key, - token=aws_session_token, - account_id=aws_account_id, - ) - elif self._missing_cred_vars(aws_access_key_id, aws_secret_access_key): - raise PartialCredentialsError( - provider='explicit', - cred_var=self._missing_cred_vars( - aws_access_key_id, aws_secret_access_key - ), - ) - else: - if ignored_credentials := self._get_ignored_credentials( - aws_session_token, aws_account_id - ): - logger.debug( - "Ignoring the following credential-related values which were set without " - "an access key id and secret key on the session or client: %s", - ignored_credentials, - ) - credentials = self.get_credentials() - if getattr(credentials, 'method', None) == 'explicit': - register_feature_id('CREDENTIALS_CODE') - auth_token = self.get_auth_token() - endpoint_resolver = self._get_internal_component('endpoint_resolver') - exceptions_factory = self._get_internal_component('exceptions_factory') - config_store = copy.copy(self.get_component('config_store')) - user_agent_creator = self.get_component('user_agent_creator') - # Session configuration values for the user agent string are applied - # just before each client creation because they may have been modified - # at any time between session creation and client creation. - user_agent_creator.set_session_config( - session_user_agent_name=self.user_agent_name, - session_user_agent_version=self.user_agent_version, - session_user_agent_extra=self.user_agent_extra, - ) - defaults_mode = self._resolve_defaults_mode(config, config_store) - if defaults_mode != 'legacy': - smart_defaults_factory = self._get_internal_component( - 'smart_defaults_factory' - ) - smart_defaults_factory.merge_smart_defaults( - config_store, defaults_mode, region_name - ) - - self._add_configured_endpoint_provider( - client_name=service_name, - config_store=config_store, - ) - - user_agent_creator.set_client_features(get_context().features) - - client_creator = botocore.client.ClientCreator( - loader, - endpoint_resolver, - self.user_agent(), - event_emitter, - retryhandler, - translate, - response_parser_factory, - exceptions_factory, - config_store, - user_agent_creator=user_agent_creator, - auth_token_resolver=self.get_auth_token, - ) - client = client_creator.create_client( - service_name=service_name, - region_name=region_name, - is_secure=use_ssl, - endpoint_url=endpoint_url, - verify=verify, - credentials=credentials, - scoped_config=self.get_scoped_config(), - client_config=config, - api_version=api_version, - auth_token=auth_token, - ) - monitor = self._get_internal_component('monitor') - if monitor is not None: - monitor.register(client.meta.events) - self._register_client_plugins(client) - return client - - def _resolve_region_name(self, region_name, config): - # Figure out the user-provided region based on the various - # configuration options. - if region_name is None: - if config and config.region_name is not None: - region_name = config.region_name - else: - region_name = self.get_config_variable('region') - - validate_region_name(region_name) - # For any client that we create in retrieving credentials - # we want to create it using the same region as specified in - # creating this client. It is important to note though that the - # credentials client is only created once per session. So if a new - # client is created with a different region, its credential resolver - # will use the region of the first client. However, that is not an - # issue as of now because the credential resolver uses only STS and - # the credentials returned at regional endpoints are valid across - # all regions in the partition. - self._last_client_region_used = region_name - return region_name - - def _resolve_defaults_mode(self, client_config, config_store): - mode = config_store.get_config_variable('defaults_mode') - - if client_config and client_config.defaults_mode: - mode = client_config.defaults_mode - - default_config_resolver = self._get_internal_component( - 'default_config_resolver' - ) - default_modes = default_config_resolver.get_default_modes() - lmode = mode.lower() - if lmode not in default_modes: - raise InvalidDefaultsMode( - mode=mode, valid_modes=', '.join(default_modes) - ) - - return lmode - - def _add_configured_endpoint_provider(self, client_name, config_store): - chain = ConfiguredEndpointProvider( - full_config=self.full_config, - scoped_config=self.get_scoped_config(), - client_name=client_name, - ) - config_store.set_config_provider( - logical_name='endpoint_url', - provider=chain, - ) - - def _missing_cred_vars(self, access_key, secret_key): - if access_key is not None and secret_key is None: - return 'aws_secret_access_key' - if secret_key is not None and access_key is None: - return 'aws_access_key_id' - return None - - def get_available_partitions(self): - """Lists the available partitions found on disk - - :rtype: list - :return: Returns a list of partition names (e.g., ["aws", "aws-cn"]) - """ - resolver = self._get_internal_component('endpoint_resolver') - return resolver.get_available_partitions() - - def get_partition_for_region(self, region_name): - """Lists the partition name of a particular region. - - :type region_name: string - :param region_name: Name of the region to list partition for (e.g., - us-east-1). - - :rtype: string - :return: Returns the respective partition name (e.g., aws). - """ - resolver = self._get_internal_component('endpoint_resolver') - return resolver.get_partition_for_region(region_name) - - def get_available_regions( - self, service_name, partition_name='aws', allow_non_regional=False - ): - """Lists the region and endpoint names of a particular partition. - - :type service_name: string - :param service_name: Name of a service to list endpoint for (e.g., s3). - This parameter accepts a service name (e.g., "elb") or endpoint - prefix (e.g., "elasticloadbalancing"). - - :type partition_name: string - :param partition_name: Name of the partition to limit endpoints to. - (e.g., aws for the public AWS endpoints, aws-cn for AWS China - endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc. - - :type allow_non_regional: bool - :param allow_non_regional: Set to True to include endpoints that are - not regional endpoints (e.g., s3-external-1, - fips-us-gov-west-1, etc). - :return: Returns a list of endpoint names (e.g., ["us-east-1"]). - """ - resolver = self._get_internal_component('endpoint_resolver') - results = [] - try: - service_data = self.get_service_data(service_name) - endpoint_prefix = service_data['metadata'].get( - 'endpointPrefix', service_name - ) - results = resolver.get_available_endpoints( - endpoint_prefix, partition_name, allow_non_regional - ) - except UnknownServiceError: - pass - return results - - def _get_ignored_credentials(self, aws_session_token, aws_account_id): - credential_inputs = [] - if aws_session_token: - credential_inputs.append('aws_session_token') - if aws_account_id: - credential_inputs.append('aws_account_id') - return ', '.join(credential_inputs) if credential_inputs else None - - def _register_client_plugins(self, client): - plugins_list = get_botocore_plugins() - if plugins_list == "DISABLED" or not plugins_list: - return - - client_plugins = {} - for plugin in plugins_list.split(','): - try: - name, module = [part.strip() for part in plugin.split('=')] - client_plugins[name] = module - except ValueError: - logger.warning( - "Invalid plugin format: %s. Expected 'name=module'", plugin - ) - - if client_plugins: - load_client_plugins(client, client_plugins) - - -class ComponentLocator: - """Service locator for session components.""" - - def __init__(self): - self._components = {} - self._deferred = {} - - def get_component(self, name): - if name in self._deferred: - factory = self._deferred[name] - self._components[name] = factory() - # Only delete the component from the deferred dict after - # successfully creating the object from the factory as well as - # injecting the instantiated value into the _components dict. - try: - del self._deferred[name] - except KeyError: - # If we get here, it's likely that get_component was called - # concurrently from multiple threads, and another thread - # already deleted the entry. This means the factory was - # probably called twice, but cleaning up the deferred entry - # should not crash outright. - pass - try: - return self._components[name] - except KeyError: - raise ValueError(f"Unknown component: {name}") - - def register_component(self, name, component): - self._components[name] = component - try: - del self._deferred[name] - except KeyError: - pass - - def lazy_register_component(self, name, no_arg_factory): - self._deferred[name] = no_arg_factory - try: - del self._components[name] - except KeyError: - pass - - -class SessionVarDict(MutableMapping): - def __init__(self, session, session_vars): - self._session = session - self._store = copy.copy(session_vars) - - def __getitem__(self, key): - return self._store[key] - - def __setitem__(self, key, value): - self._store[key] = value - self._update_config_store_from_session_vars(key, value) - - def __delitem__(self, key): - del self._store[key] - - def __iter__(self): - return iter(self._store) - - def __len__(self): - return len(self._store) - - def _update_config_store_from_session_vars( - self, logical_name, config_options - ): - # This is for backwards compatibility. The new preferred way to - # modify configuration logic is to use the component system to get - # the config_store component from the session, and then update - # a key with a custom config provider(s). - # This backwards compatibility method takes the old session_vars - # list of tuples and and transforms that into a set of updates to - # the config_store component. - config_chain_builder = ConfigChainFactory(session=self._session) - config_name, env_vars, default, typecast = config_options - config_store = self._session.get_component('config_store') - config_store.set_config_provider( - logical_name, - config_chain_builder.create_config_chain( - instance_name=logical_name, - env_var_names=env_vars, - config_property_names=config_name, - default=default, - conversion_func=typecast, - ), - ) - - -class SubsetChainConfigFactory: - """A class for creating backwards compatible configuration chains. - - This class can be used instead of - :class:`botocore.configprovider.ConfigChainFactory` to make it honor the - methods argument to get_config_variable. This class can be used to filter - out providers that are not in the methods tuple when creating a new config - chain. - """ - - def __init__(self, session, methods, environ=None): - self._factory = ConfigChainFactory(session, environ) - self._supported_methods = methods - - def create_config_chain( - self, - instance_name=None, - env_var_names=None, - config_property_name=None, - default=None, - conversion_func=None, - ): - """Build a config chain following the standard botocore pattern. - - This config chain factory will omit any providers not in the methods - tuple provided at initialization. For example if given the tuple - ('instance', 'config',) it will not inject the environment provider - into the standard config chain. This lets the botocore session support - the custom ``methods`` argument for all the default botocore config - variables when calling ``get_config_variable``. - """ - if 'instance' not in self._supported_methods: - instance_name = None - if 'env' not in self._supported_methods: - env_var_names = None - if 'config' not in self._supported_methods: - config_property_name = None - return self._factory.create_config_chain( - instance_name=instance_name, - env_var_names=env_var_names, - config_property_names=config_property_name, - default=default, - conversion_func=conversion_func, - ) - - -def get_session(env_vars=None): - """ - Return a new session object. - """ - return Session(env_vars) diff --git a/venv/Lib/site-packages/botocore/signers.py b/venv/Lib/site-packages/botocore/signers.py deleted file mode 100644 index 1002d47..0000000 --- a/venv/Lib/site-packages/botocore/signers.py +++ /dev/null @@ -1,996 +0,0 @@ -# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import base64 -import datetime -import json -import weakref - -import botocore -import botocore.auth -from botocore.awsrequest import create_request_object, prepare_request_dict -from botocore.compat import OrderedDict, get_current_datetime -from botocore.exceptions import ( - ParamValidationError, - UnknownClientMethodError, - UnknownSignatureVersionError, - UnsupportedSignatureVersionError, -) -from botocore.tokens import FrozenAuthToken -from botocore.utils import ( - ArnParser, - datetime2timestamp, - fix_s3_host, # noqa: F401 -) - - -class RequestSigner: - """ - An object to sign requests before they go out over the wire using - one of the authentication mechanisms defined in ``auth.py``. This - class fires two events scoped to a service and operation name: - - * choose-signer: Allows overriding the auth signer name. - * before-sign: Allows mutating the request before signing. - - Together these events allow for customization of the request - signing pipeline, including overrides, request path manipulation, - and disabling signing per operation. - - - :type service_id: botocore.model.ServiceId - :param service_id: The service id for the service, e.g. ``S3`` - - :type region_name: string - :param region_name: Name of the service region, e.g. ``us-east-1`` - - :type signing_name: string - :param signing_name: Service signing name. This is usually the - same as the service name, but can differ. E.g. - ``emr`` vs. ``elasticmapreduce``. - - :type signature_version: string - :param signature_version: Signature name like ``v4``. - - :type credentials: :py:class:`~botocore.credentials.Credentials` - :param credentials: User credentials with which to sign requests. - - :type event_emitter: :py:class:`~botocore.hooks.BaseEventHooks` - :param event_emitter: Extension mechanism to fire events. - """ - - def __init__( - self, - service_id, - region_name, - signing_name, - signature_version, - credentials, - event_emitter, - auth_token=None, - ): - self._region_name = region_name - self._signing_name = signing_name - self._signature_version = signature_version - self._credentials = credentials - self._auth_token = auth_token - self._service_id = service_id - - # We need weakref to prevent leaking memory in Python 2.6 on Linux 2.6 - self._event_emitter = weakref.proxy(event_emitter) - - @property - def region_name(self): - return self._region_name - - @property - def signature_version(self): - return self._signature_version - - @property - def signing_name(self): - return self._signing_name - - def handler(self, operation_name=None, request=None, **kwargs): - # This is typically hooked up to the "request-created" event - # from a client's event emitter. When a new request is created - # this method is invoked to sign the request. - # Don't call this method directly. - return self.sign(operation_name, request) - - def sign( - self, - operation_name, - request, - region_name=None, - signing_type='standard', - expires_in=None, - signing_name=None, - ): - """Sign a request before it goes out over the wire. - - :type operation_name: string - :param operation_name: The name of the current operation, e.g. - ``ListBuckets``. - :type request: AWSRequest - :param request: The request object to be sent over the wire. - - :type region_name: str - :param region_name: The region to sign the request for. - - :type signing_type: str - :param signing_type: The type of signing to perform. This can be one of - three possible values: - - * 'standard' - This should be used for most requests. - * 'presign-url' - This should be used when pre-signing a request. - * 'presign-post' - This should be used when pre-signing an S3 post. - - :type expires_in: int - :param expires_in: The number of seconds the presigned url is valid - for. This parameter is only valid for signing type 'presign-url'. - - :type signing_name: str - :param signing_name: The name to use for the service when signing. - """ - explicit_region_name = region_name - if region_name is None: - region_name = self._region_name - - if signing_name is None: - signing_name = self._signing_name - - signature_version = self._choose_signer( - operation_name, signing_type, request.context - ) - - # Allow mutating request before signing - self._event_emitter.emit( - f'before-sign.{self._service_id.hyphenize()}.{operation_name}', - request=request, - signing_name=signing_name, - region_name=self._region_name, - signature_version=signature_version, - request_signer=self, - operation_name=operation_name, - ) - - if signature_version != botocore.UNSIGNED: - kwargs = { - 'signing_name': signing_name, - 'region_name': region_name, - 'signature_version': signature_version, - } - if expires_in is not None: - kwargs['expires'] = expires_in - signing_context = request.context.get('signing', {}) - if not explicit_region_name and signing_context.get('region'): - kwargs['region_name'] = signing_context['region'] - if signing_context.get('signing_name'): - kwargs['signing_name'] = signing_context['signing_name'] - if signing_context.get('request_credentials'): - kwargs['request_credentials'] = signing_context[ - 'request_credentials' - ] - if signing_context.get('identity_cache') is not None: - self._resolve_identity_cache( - kwargs, - signing_context['identity_cache'], - signing_context['cache_key'], - ) - try: - auth = self.get_auth_instance(**kwargs) - except UnknownSignatureVersionError as e: - if signing_type != 'standard': - raise UnsupportedSignatureVersionError( - signature_version=signature_version - ) - else: - raise e - - auth.add_auth(request) - - def _resolve_identity_cache(self, kwargs, cache, cache_key): - kwargs['identity_cache'] = cache - kwargs['cache_key'] = cache_key - - def _choose_signer(self, operation_name, signing_type, context): - """ - Allow setting the signature version via the choose-signer event. - A value of `botocore.UNSIGNED` means no signing will be performed. - - :param operation_name: The operation to sign. - :param signing_type: The type of signing that the signer is to be used - for. - :return: The signature version to sign with. - """ - signing_type_suffix_map = { - 'presign-post': '-presign-post', - 'presign-url': '-query', - } - suffix = signing_type_suffix_map.get(signing_type, '') - - # operation specific signing context takes precedent over client-level - # defaults - signature_version = context.get('auth_type') or self._signature_version - signing = context.get('signing', {}) - signing_name = signing.get('signing_name', self._signing_name) - region_name = signing.get('region', self._region_name) - if ( - signature_version is not botocore.UNSIGNED - and not signature_version.endswith(suffix) - ): - signature_version += suffix - - handler, response = self._event_emitter.emit_until_response( - f'choose-signer.{self._service_id.hyphenize()}.{operation_name}', - signing_name=signing_name, - region_name=region_name, - signature_version=signature_version, - context=context, - ) - - if response is not None: - signature_version = response - # The suffix needs to be checked again in case we get an improper - # signature version from choose-signer. - if ( - signature_version is not botocore.UNSIGNED - and not signature_version.endswith(suffix) - ): - signature_version += suffix - - return signature_version - - def get_auth_instance( - self, - signing_name, - region_name, - signature_version=None, - request_credentials=None, - **kwargs, - ): - """ - Get an auth instance which can be used to sign a request - using the given signature version. - - :type signing_name: string - :param signing_name: Service signing name. This is usually the - same as the service name, but can differ. E.g. - ``emr`` vs. ``elasticmapreduce``. - - :type region_name: string - :param region_name: Name of the service region, e.g. ``us-east-1`` - - :type signature_version: string - :param signature_version: Signature name like ``v4``. - - :rtype: :py:class:`~botocore.auth.BaseSigner` - :return: Auth instance to sign a request. - """ - if signature_version is None: - signature_version = self._signature_version - - cls = botocore.auth.AUTH_TYPE_MAPS.get(signature_version) - if cls is None: - raise UnknownSignatureVersionError( - signature_version=signature_version - ) - - if cls.REQUIRES_TOKEN is True: - if self._auth_token and not isinstance( - self._auth_token, FrozenAuthToken - ): - frozen_token = self._auth_token.get_frozen_token() - else: - frozen_token = self._auth_token - auth = cls(frozen_token) - return auth - - credentials = request_credentials or self._credentials - if getattr(cls, "REQUIRES_IDENTITY_CACHE", None) is True: - cache = kwargs["identity_cache"] - key = kwargs["cache_key"] - credentials = cache.get_credentials(key) - del kwargs["cache_key"] - - # If there's no credentials provided (i.e credentials is None), - # then we'll pass a value of "None" over to the auth classes, - # which already handle the cases where no credentials have - # been provided. - frozen_credentials = None - if credentials is not None: - frozen_credentials = credentials.get_frozen_credentials() - kwargs['credentials'] = frozen_credentials - if cls.REQUIRES_REGION: - if self._region_name is None: - raise botocore.exceptions.NoRegionError() - kwargs['region_name'] = region_name - kwargs['service_name'] = signing_name - auth = cls(**kwargs) - return auth - - # Alias get_auth for backwards compatibility. - get_auth = get_auth_instance - - def generate_presigned_url( - self, - request_dict, - operation_name, - expires_in=3600, - region_name=None, - signing_name=None, - ): - """Generates a presigned url - - :type request_dict: dict - :param request_dict: The prepared request dictionary returned by - ``botocore.awsrequest.prepare_request_dict()`` - - :type operation_name: str - :param operation_name: The operation being signed. - - :type expires_in: int - :param expires_in: The number of seconds the presigned url is valid - for. By default it expires in an hour (3600 seconds) - - :type region_name: string - :param region_name: The region name to sign the presigned url. - - :type signing_name: str - :param signing_name: The name to use for the service when signing. - - :returns: The presigned url - """ - request = create_request_object(request_dict) - self.sign( - operation_name, - request, - region_name, - 'presign-url', - expires_in, - signing_name, - ) - - request.prepare() - return request.url - - -class CloudFrontSigner: - '''A signer to create a signed CloudFront URL. - - First you create a cloudfront signer based on a normalized RSA signer:: - - import rsa - def rsa_signer(message): - private_key = open('private_key.pem', 'r').read() - return rsa.sign( - message, - rsa.PrivateKey.load_pkcs1(private_key.encode('utf8')), - 'SHA-1') # CloudFront requires SHA-1 hash - cf_signer = CloudFrontSigner(key_id, rsa_signer) - - To sign with a canned policy:: - - signed_url = cf_signer.generate_signed_url( - url, date_less_than=datetime(2015, 12, 1)) - - To sign with a custom policy:: - - signed_url = cf_signer.generate_signed_url(url, policy=my_policy) - ''' - - def __init__(self, key_id, rsa_signer): - """Create a CloudFrontSigner. - - :type key_id: str - :param key_id: The CloudFront Key Pair ID - - :type rsa_signer: callable - :param rsa_signer: An RSA signer. - Its only input parameter will be the message to be signed, - and its output will be the signed content as a binary string. - The hash algorithm needed by CloudFront is SHA-1. - """ - self.key_id = key_id - self.rsa_signer = rsa_signer - - def generate_presigned_url(self, url, date_less_than=None, policy=None): - """Creates a signed CloudFront URL based on given parameters. - - :type url: str - :param url: The URL of the protected object - - :type date_less_than: datetime - :param date_less_than: The URL will expire after that date and time - - :type policy: str - :param policy: The custom policy, possibly built by self.build_policy() - - :rtype: str - :return: The signed URL. - """ - both_args_supplied = date_less_than is not None and policy is not None - neither_arg_supplied = date_less_than is None and policy is None - if both_args_supplied or neither_arg_supplied: - e = 'Need to provide either date_less_than or policy, but not both' - raise ValueError(e) - if date_less_than is not None: - # We still need to build a canned policy for signing purpose - policy = self.build_policy(url, date_less_than) - if isinstance(policy, str): - policy = policy.encode('utf8') - if date_less_than is not None: - params = [f'Expires={int(datetime2timestamp(date_less_than))}'] - else: - params = [f"Policy={self._url_b64encode(policy).decode('utf8')}"] - signature = self.rsa_signer(policy) - params.extend( - [ - f"Signature={self._url_b64encode(signature).decode('utf8')}", - f"Key-Pair-Id={self.key_id}", - ] - ) - return self._build_url(url, params) - - def _build_url(self, base_url, extra_params): - separator = '&' if '?' in base_url else '?' - return base_url + separator + '&'.join(extra_params) - - def build_policy( - self, resource, date_less_than, date_greater_than=None, ip_address=None - ): - """A helper to build policy. - - :type resource: str - :param resource: The URL or the stream filename of the protected object - - :type date_less_than: datetime - :param date_less_than: The URL will expire after the time has passed - - :type date_greater_than: datetime - :param date_greater_than: The URL will not be valid until this time - - :type ip_address: str - :param ip_address: Use 'x.x.x.x' for an IP, or 'x.x.x.x/x' for a subnet - - :rtype: str - :return: The policy in a compact string. - """ - # Note: - # 1. Order in canned policy is significant. Special care has been taken - # to ensure the output will match the order defined by the document. - # There is also a test case to ensure that order. - # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-canned-policy.html#private-content-canned-policy-creating-policy-statement - # 2. Albeit the order in custom policy is not required by CloudFront, - # we still use OrderedDict internally to ensure the result is stable - # and also matches canned policy requirement. - # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-custom-policy.html - moment = int(datetime2timestamp(date_less_than)) - condition = OrderedDict({"DateLessThan": {"AWS:EpochTime": moment}}) - if ip_address: - if '/' not in ip_address: - ip_address += '/32' - condition["IpAddress"] = {"AWS:SourceIp": ip_address} - if date_greater_than: - moment = int(datetime2timestamp(date_greater_than)) - condition["DateGreaterThan"] = {"AWS:EpochTime": moment} - ordered_payload = [('Resource', resource), ('Condition', condition)] - custom_policy = {"Statement": [OrderedDict(ordered_payload)]} - return json.dumps(custom_policy, separators=(',', ':')) - - def _url_b64encode(self, data): - # Required by CloudFront. See also: - # http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-linux-openssl.html - return ( - base64.b64encode(data) - .replace(b'+', b'-') - .replace(b'=', b'_') - .replace(b'/', b'~') - ) - - -def add_generate_db_auth_token(class_attributes, **kwargs): - class_attributes['generate_db_auth_token'] = generate_db_auth_token - - -def add_dsql_generate_db_auth_token_methods(class_attributes, **kwargs): - class_attributes['generate_db_connect_auth_token'] = ( - dsql_generate_db_connect_auth_token - ) - class_attributes['generate_db_connect_admin_auth_token'] = ( - dsql_generate_db_connect_admin_auth_token - ) - - -def generate_db_auth_token(self, DBHostname, Port, DBUsername, Region=None): - """Generates an auth token used to connect to a db with IAM credentials. - - :type DBHostname: str - :param DBHostname: The hostname of the database to connect to. - - :type Port: int - :param Port: The port number the database is listening on. - - :type DBUsername: str - :param DBUsername: The username to log in as. - - :type Region: str - :param Region: The region the database is in. If None, the client - region will be used. - - :return: A presigned url which can be used as an auth token. - """ - region = Region - if region is None: - region = self.meta.region_name - - params = { - 'Action': 'connect', - 'DBUser': DBUsername, - } - - request_dict = { - 'url_path': '/', - 'query_string': '', - 'headers': {}, - 'body': params, - 'method': 'GET', - } - - # RDS requires that the scheme not be set when sent over. This can cause - # issues when signing because the Python url parsing libraries follow - # RFC 1808 closely, which states that a netloc must be introduced by `//`. - # Otherwise the url is presumed to be relative, and thus the whole - # netloc would be treated as a path component. To work around this we - # introduce https here and remove it once we're done processing it. - scheme = 'https://' - endpoint_url = f'{scheme}{DBHostname}:{Port}' - prepare_request_dict(request_dict, endpoint_url) - presigned_url = self._request_signer.generate_presigned_url( - operation_name='connect', - request_dict=request_dict, - region_name=region, - expires_in=900, - signing_name='rds-db', - ) - return presigned_url[len(scheme) :] - - -def _dsql_generate_db_auth_token( - self, Hostname, Action, Region=None, ExpiresIn=900 -): - """Generate a DSQL database token for an arbitrary action. - - :type Hostname: str - :param Hostname: The DSQL endpoint host name. - - :type Action: str - :param Action: Action to perform on the cluster (DbConnectAdmin or DbConnect). - - :type Region: str - :param Region: The AWS region where the DSQL Cluster is hosted. If None, the client region will be used. - - :type ExpiresIn: int - :param ExpiresIn: The token expiry duration in seconds (default is 900 seconds). - - :return: A presigned url which can be used as an auth token. - """ - possible_actions = ("DbConnect", "DbConnectAdmin") - - if Action not in possible_actions: - raise ParamValidationError( - report=f"Received {Action} for action but expected one of: {', '.join(possible_actions)}" - ) - - if Region is None: - Region = self.meta.region_name - - request_dict = { - 'url_path': '/', - 'query_string': '', - 'headers': {}, - 'body': { - 'Action': Action, - }, - 'method': 'GET', - } - scheme = 'https://' - endpoint_url = f'{scheme}{Hostname}' - prepare_request_dict(request_dict, endpoint_url) - presigned_url = self._request_signer.generate_presigned_url( - operation_name=Action, - request_dict=request_dict, - region_name=Region, - expires_in=ExpiresIn, - signing_name='dsql', - ) - return presigned_url[len(scheme) :] - - -def dsql_generate_db_connect_auth_token( - self, Hostname, Region=None, ExpiresIn=900 -): - """Generate a DSQL database token for the "DbConnect" action. - - :type Hostname: str - :param Hostname: The DSQL endpoint host name. - - :type Region: str - :param Region: The AWS region where the DSQL Cluster is hosted. If None, the client region will be used. - - :type ExpiresIn: int - :param ExpiresIn: The token expiry duration in seconds (default is 900 seconds). - - :return: A presigned url which can be used as an auth token. - """ - return _dsql_generate_db_auth_token( - self, Hostname, "DbConnect", Region, ExpiresIn - ) - - -def dsql_generate_db_connect_admin_auth_token( - self, Hostname, Region=None, ExpiresIn=900 -): - """Generate a DSQL database token for the "DbConnectAdmin" action. - - :type Hostname: str - :param Hostname: The DSQL endpoint host name. - - :type Region: str - :param Region: The AWS region where the DSQL Cluster is hosted. If None, the client region will be used. - - :type ExpiresIn: int - :param ExpiresIn: The token expiry duration in seconds (default is 900 seconds). - - :return: A presigned url which can be used as an auth token. - """ - return _dsql_generate_db_auth_token( - self, Hostname, "DbConnectAdmin", Region, ExpiresIn - ) - - -class S3PostPresigner: - def __init__(self, request_signer): - self._request_signer = request_signer - - def generate_presigned_post( - self, - request_dict, - fields=None, - conditions=None, - expires_in=3600, - region_name=None, - ): - """Generates the url and the form fields used for a presigned s3 post - - :type request_dict: dict - :param request_dict: The prepared request dictionary returned by - ``botocore.awsrequest.prepare_request_dict()`` - - :type fields: dict - :param fields: A dictionary of prefilled form fields to build on top - of. - - :type conditions: list - :param conditions: A list of conditions to include in the policy. Each - element can be either a list or a structure. For example: - - .. code:: python - - [ - {"acl": "public-read"}, - {"bucket": "amzn-s3-demo-bucket"}, - ["starts-with", "$key", "mykey"] - ] - - :type expires_in: int - :param expires_in: The number of seconds the presigned post is valid - for. - - :type region_name: string - :param region_name: The region name to sign the presigned post to. - - :rtype: dict - :returns: A dictionary with two elements: ``url`` and ``fields``. - Url is the url to post to. Fields is a dictionary filled with - the form fields and respective values to use when submitting the - post. For example: - - .. code:: python - - { - 'url': 'https://amzn-s3-demo-bucket.s3.amazonaws.com', - 'fields': { - 'acl': 'public-read', - 'key': 'mykey', - 'signature': 'mysignature', - 'policy': 'mybase64 encoded policy' - } - } - """ - if fields is None: - fields = {} - - if conditions is None: - conditions = [] - - # Create the policy for the post. - policy = {} - - # Create an expiration date for the policy - datetime_now = get_current_datetime() - expire_date = datetime_now + datetime.timedelta(seconds=expires_in) - policy['expiration'] = expire_date.strftime(botocore.auth.ISO8601) - - # Append all of the conditions that the user supplied. - policy['conditions'] = [] - for condition in conditions: - policy['conditions'].append(condition) - - # Store the policy and the fields in the request for signing - request = create_request_object(request_dict) - request.context['s3-presign-post-fields'] = fields - request.context['s3-presign-post-policy'] = policy - - self._request_signer.sign( - 'PutObject', request, region_name, 'presign-post' - ) - # Return the url and the fields for th form to post. - return {'url': request.url, 'fields': fields} - - -def add_generate_presigned_url(class_attributes, **kwargs): - class_attributes['generate_presigned_url'] = generate_presigned_url - - -def generate_presigned_url( - self, ClientMethod, Params=None, ExpiresIn=3600, HttpMethod=None -): - """Generate a presigned url given a client, its method, and arguments - - :type ClientMethod: string - :param ClientMethod: The client method to presign for - - :type Params: dict - :param Params: The parameters normally passed to - ``ClientMethod``. - - :type ExpiresIn: int - :param ExpiresIn: The number of seconds the presigned url is valid - for. By default it expires in an hour (3600 seconds) - - :type HttpMethod: string - :param HttpMethod: The http method to use on the generated url. By - default, the http method is whatever is used in the method's model. - - :returns: The presigned url - """ - client_method = ClientMethod - params = Params - if params is None: - params = {} - expires_in = ExpiresIn - http_method = HttpMethod - context = { - 'is_presign_request': True, - 'use_global_endpoint': _should_use_global_endpoint(self), - } - - request_signer = self._request_signer - - try: - operation_name = self._PY_TO_OP_NAME[client_method] - except KeyError: - raise UnknownClientMethodError(method_name=client_method) - - operation_model = self.meta.service_model.operation_model(operation_name) - params = self._emit_api_params( - api_params=params, - operation_model=operation_model, - context=context, - ) - bucket_is_arn = ArnParser.is_arn(params.get('Bucket', '')) - ( - endpoint_url, - additional_headers, - properties, - ) = self._resolve_endpoint_ruleset( - operation_model, - params, - context, - ignore_signing_region=(not bucket_is_arn), - ) - - request_dict = self._convert_to_request_dict( - api_params=params, - operation_model=operation_model, - endpoint_url=endpoint_url, - context=context, - headers=additional_headers, - set_user_agent_header=False, - ) - - # Switch out the http method if user specified it. - if http_method is not None: - request_dict['method'] = http_method - - # Generate the presigned url. - return request_signer.generate_presigned_url( - request_dict=request_dict, - expires_in=expires_in, - operation_name=operation_name, - ) - - -def add_generate_presigned_post(class_attributes, **kwargs): - class_attributes['generate_presigned_post'] = generate_presigned_post - - -def generate_presigned_post( - self, Bucket, Key, Fields=None, Conditions=None, ExpiresIn=3600 -): - """Builds the url and the form fields used for a presigned s3 post - - :type Bucket: string - :param Bucket: The name of the bucket to presign the post to. Note that - bucket related conditions should not be included in the - ``conditions`` parameter. - - :type Key: string - :param Key: Key name, optionally add ${filename} to the end to - attach the submitted filename. Note that key related conditions and - fields are filled out for you and should not be included in the - ``Fields`` or ``Conditions`` parameter. - - :type Fields: dict - :param Fields: A dictionary of prefilled form fields to build on top - of. Elements that may be included are acl, Cache-Control, - Content-Type, Content-Disposition, Content-Encoding, Expires, - success_action_redirect, redirect, success_action_status, - and x-amz-meta-. - - Note that if a particular element is included in the fields - dictionary it will not be automatically added to the conditions - list. You must specify a condition for the element as well. - - :type Conditions: list - :param Conditions: A list of conditions to include in the policy. Each - element can be either a list or a structure. For example: - - .. code:: python - - [ - {"acl": "public-read"}, - ["content-length-range", 2, 5], - ["starts-with", "$success_action_redirect", ""] - ] - - Conditions that are included may pertain to acl, - content-length-range, Cache-Control, Content-Type, - Content-Disposition, Content-Encoding, Expires, - success_action_redirect, redirect, success_action_status, - and/or x-amz-meta-. - - Note that if you include a condition, you must specify - a valid value in the fields dictionary as well. A value will - not be added automatically to the fields dictionary based on the - conditions. - - :type ExpiresIn: int - :param ExpiresIn: The number of seconds the presigned post - is valid for. - - :rtype: dict - :returns: A dictionary with two elements: ``url`` and ``fields``. - Url is the url to post to. Fields is a dictionary filled with - the form fields and respective values to use when submitting the - post. For example: - - .. code:: python - - { - 'url': 'https://amzn-s3-demo-bucket.s3.amazonaws.com', - 'fields': { - 'acl': 'public-read', - 'key': 'mykey', - 'signature': 'mysignature', - 'policy': 'mybase64 encoded policy' - } - } - """ - bucket = Bucket - key = Key - fields = Fields - conditions = Conditions - expires_in = ExpiresIn - - if fields is None: - fields = {} - else: - fields = fields.copy() - - if conditions is None: - conditions = [] - - context = { - 'is_presign_request': True, - 'use_global_endpoint': _should_use_global_endpoint(self), - } - - post_presigner = S3PostPresigner(self._request_signer) - - # We choose the CreateBucket operation model because its url gets - # serialized to what a presign post requires. - operation_model = self.meta.service_model.operation_model('CreateBucket') - params = self._emit_api_params( - api_params={'Bucket': bucket}, - operation_model=operation_model, - context=context, - ) - bucket_is_arn = ArnParser.is_arn(params.get('Bucket', '')) - ( - endpoint_url, - additional_headers, - properties, - ) = self._resolve_endpoint_ruleset( - operation_model, - params, - context, - ignore_signing_region=(not bucket_is_arn), - ) - - request_dict = self._convert_to_request_dict( - api_params=params, - operation_model=operation_model, - endpoint_url=endpoint_url, - context=context, - headers=additional_headers, - set_user_agent_header=False, - ) - - # Append that the bucket name to the list of conditions. - conditions.append({'bucket': bucket}) - - # If the key ends with filename, the only constraint that can be - # imposed is if it starts with the specified prefix. - if key.endswith('${filename}'): - conditions.append(["starts-with", '$key', key[: -len('${filename}')]]) - else: - conditions.append({'key': key}) - - # Add the key to the fields. - fields['key'] = key - - return post_presigner.generate_presigned_post( - request_dict=request_dict, - fields=fields, - conditions=conditions, - expires_in=expires_in, - ) - - -def _should_use_global_endpoint(client): - if client.meta.partition != 'aws': - return False - s3_config = client.meta.config.s3 - if s3_config: - if s3_config.get('use_dualstack_endpoint', False): - return False - if ( - s3_config.get('us_east_1_regional_endpoint') == 'regional' - and client.meta.config.region_name == 'us-east-1' - ): - return False - if s3_config.get('addressing_style') == 'virtual': - return False - return True diff --git a/venv/Lib/site-packages/botocore/stub.py b/venv/Lib/site-packages/botocore/stub.py deleted file mode 100644 index d85294a..0000000 --- a/venv/Lib/site-packages/botocore/stub.py +++ /dev/null @@ -1,452 +0,0 @@ -# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import copy -from collections import deque -from pprint import pformat - -from botocore.awsrequest import AWSResponse -from botocore.exceptions import ( - ParamValidationError, - StubAssertionError, - StubResponseError, - UnStubbedResponseError, -) -from botocore.validate import validate_parameters - - -class _ANY: - """ - A helper object that compares equal to everything. Copied from - unittest.mock - """ - - def __eq__(self, other): - return True - - def __ne__(self, other): - return False - - def __repr__(self): - return '' - - -ANY = _ANY() - - -class Stubber: - """ - This class will allow you to stub out requests so you don't have to hit - an endpoint to write tests. Responses are returned first in, first out. - If operations are called out of order, or are called with no remaining - queued responses, an error will be raised. - - **Example:** - :: - import datetime - import botocore.session - from botocore.stub import Stubber - - - s3 = botocore.session.get_session().create_client('s3') - stubber = Stubber(s3) - - response = { - 'IsTruncated': False, - 'Name': 'test-bucket', - 'MaxKeys': 1000, 'Prefix': '', - 'Contents': [{ - 'Key': 'test.txt', - 'ETag': '"abc123"', - 'StorageClass': 'STANDARD', - 'LastModified': datetime.datetime(2016, 1, 20, 22, 9), - 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'}, - 'Size': 14814 - }], - 'EncodingType': 'url', - 'ResponseMetadata': { - 'RequestId': 'abc123', - 'HTTPStatusCode': 200, - 'HostId': 'abc123' - }, - 'Marker': '' - } - - expected_params = {'Bucket': 'test-bucket'} - - stubber.add_response('list_objects', response, expected_params) - stubber.activate() - - service_response = s3.list_objects(Bucket='test-bucket') - assert service_response == response - - - This class can also be called as a context manager, which will handle - activation / deactivation for you. - - **Example:** - :: - import datetime - import botocore.session - from botocore.stub import Stubber - - - s3 = botocore.session.get_session().create_client('s3') - - response = { - "Owner": { - "ID": "foo", - "DisplayName": "bar" - }, - "Buckets": [{ - "CreationDate": datetime.datetime(2016, 1, 20, 22, 9), - "Name": "baz" - }] - } - - - with Stubber(s3) as stubber: - stubber.add_response('list_buckets', response, {}) - service_response = s3.list_buckets() - - assert service_response == response - - - If you have an input parameter that is a randomly generated value, or you - otherwise don't care about its value, you can use ``stub.ANY`` to ignore - it in validation. - - **Example:** - :: - import datetime - import botocore.session - from botocore.stub import Stubber, ANY - - - s3 = botocore.session.get_session().create_client('s3') - stubber = Stubber(s3) - - response = { - 'IsTruncated': False, - 'Name': 'test-bucket', - 'MaxKeys': 1000, 'Prefix': '', - 'Contents': [{ - 'Key': 'test.txt', - 'ETag': '"abc123"', - 'StorageClass': 'STANDARD', - 'LastModified': datetime.datetime(2016, 1, 20, 22, 9), - 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'}, - 'Size': 14814 - }], - 'EncodingType': 'url', - 'ResponseMetadata': { - 'RequestId': 'abc123', - 'HTTPStatusCode': 200, - 'HostId': 'abc123' - }, - 'Marker': '' - } - - expected_params = {'Bucket': ANY} - stubber.add_response('list_objects', response, expected_params) - - with stubber: - service_response = s3.list_objects(Bucket='test-bucket') - - assert service_response == response - """ - - def __init__(self, client): - """ - :param client: The client to add your stubs to. - """ - self.client = client - self._event_id = 'boto_stubber' - self._expected_params_event_id = 'boto_stubber_expected_params' - self._stub_account_id_event_id = 'boto_stubber_stub_account_id' - self._queue = deque() - - def __enter__(self): - self.activate() - return self - - def __exit__(self, exception_type, exception_value, traceback): - self.deactivate() - - def activate(self): - """ - Activates the stubber on the client - """ - self.client.meta.events.register_first( - 'before-parameter-build.*.*', - self._assert_expected_params, - unique_id=self._expected_params_event_id, - ) - self.client.meta.events.register( - 'before-call.*.*', - self._get_response_handler, - unique_id=self._event_id, - ) - self.client.meta.events.register( - 'before-endpoint-resolution.*', - self._set_account_id_for_endpoint_resolution, - unique_id=self._stub_account_id_event_id, - ) - - def deactivate(self): - """ - Deactivates the stubber on the client - """ - self.client.meta.events.unregister( - 'before-parameter-build.*.*', - self._assert_expected_params, - unique_id=self._expected_params_event_id, - ) - self.client.meta.events.unregister( - 'before-call.*.*', - self._get_response_handler, - unique_id=self._event_id, - ) - self.client.meta.events.unregister( - 'before-endpoint-resolution.*', - self._stub_account_id_event_id, - unique_id=self._stub_account_id_event_id, - ) - - def add_response(self, method, service_response, expected_params=None): - """ - Adds a service response to the response queue. This will be validated - against the service model to ensure correctness. It should be noted, - however, that while missing attributes are often considered correct, - your code may not function properly if you leave them out. Therefore - you should always fill in every value you see in a typical response for - your particular request. - - :param method: The name of the client method to stub. - :type method: str - - :param service_response: A dict response stub. Provided parameters will - be validated against the service model. - :type service_response: dict - - :param expected_params: A dictionary of the expected parameters to - be called for the provided service response. The parameters match - the names of keyword arguments passed to that client call. If - any of the parameters differ a ``StubResponseError`` is thrown. - You can use stub.ANY to indicate a particular parameter to ignore - in validation. stub.ANY is only valid for top level params. - """ - self._add_response(method, service_response, expected_params) - - def _add_response(self, method, service_response, expected_params): - if not hasattr(self.client, method): - raise ValueError( - f"Client {self.client.meta.service_model.service_name} " - f"does not have method: {method}" - ) - - # Create a successful http response - http_response = AWSResponse(None, 200, {}, None) - - operation_name = self.client.meta.method_to_api_mapping.get(method) - self._validate_operation_response(operation_name, service_response) - - # Add the service_response to the queue for returning responses - response = { - 'operation_name': operation_name, - 'response': (http_response, service_response), - 'expected_params': expected_params, - } - self._queue.append(response) - - def add_client_error( - self, - method, - service_error_code='', - service_message='', - http_status_code=400, - service_error_meta=None, - expected_params=None, - response_meta=None, - modeled_fields=None, - ): - """ - Adds a ``ClientError`` to the response queue. - - :param method: The name of the service method to return the error on. - :type method: str - - :param service_error_code: The service error code to return, - e.g. ``NoSuchBucket`` - :type service_error_code: str - - :param service_message: The service message to return, e.g. - 'The specified bucket does not exist.' - :type service_message: str - - :param http_status_code: The HTTP status code to return, e.g. 404, etc - :type http_status_code: int - - :param service_error_meta: Additional keys to be added to the - service Error - :type service_error_meta: dict - - :param expected_params: A dictionary of the expected parameters to - be called for the provided service response. The parameters match - the names of keyword arguments passed to that client call. If - any of the parameters differ a ``StubResponseError`` is thrown. - You can use stub.ANY to indicate a particular parameter to ignore - in validation. - - :param response_meta: Additional keys to be added to the - response's ResponseMetadata - :type response_meta: dict - - :param modeled_fields: Additional keys to be added to the response - based on fields that are modeled for the particular error code. - These keys will be validated against the particular error shape - designated by the error code. - :type modeled_fields: dict - - """ - http_response = AWSResponse(None, http_status_code, {}, None) - - # We don't look to the model to build this because the caller would - # need to know the details of what the HTTP body would need to - # look like. - parsed_response = { - 'ResponseMetadata': {'HTTPStatusCode': http_status_code}, - 'Error': {'Message': service_message, 'Code': service_error_code}, - } - - if service_error_meta is not None: - parsed_response['Error'].update(service_error_meta) - - if response_meta is not None: - parsed_response['ResponseMetadata'].update(response_meta) - - if modeled_fields is not None: - service_model = self.client.meta.service_model - shape = service_model.shape_for_error_code(service_error_code) - self._validate_response(shape, modeled_fields) - parsed_response.update(modeled_fields) - - operation_name = self.client.meta.method_to_api_mapping.get(method) - # Note that we do not allow for expected_params while - # adding errors into the queue yet. - response = { - 'operation_name': operation_name, - 'response': (http_response, parsed_response), - 'expected_params': expected_params, - } - self._queue.append(response) - - def assert_no_pending_responses(self): - """ - Asserts that all expected calls were made. - """ - remaining = len(self._queue) - if remaining != 0: - raise AssertionError(f"{remaining} responses remaining in queue.") - - def _assert_expected_call_order(self, model, params): - if not self._queue: - raise UnStubbedResponseError( - operation_name=model.name, - reason=( - 'Unexpected API Call: A call was made but no additional ' - 'calls expected. Either the API Call was not stubbed or ' - 'it was called multiple times.' - ), - ) - - name = self._queue[0]['operation_name'] - if name != model.name: - raise StubResponseError( - operation_name=model.name, - reason=f'Operation mismatch: found response for {name}.', - ) - - def _set_account_id_for_endpoint_resolution(self, builtins, **kwargs): - # Account ID comes from credentials and will try to resolve on endpoint resolution - # when it's a builtin. This breaks any stubber in environments where credentials - # are not available. We mock it to be a None value so that we don't attempt to - # resolve credentials. - if 'AWS::Auth::AccountId' in builtins: - builtins['AWS::Auth::AccountId'] = None - - def _get_response_handler(self, model, params, context, **kwargs): - self._assert_expected_call_order(model, params) - # Pop off the entire response once everything has been validated - return self._queue.popleft()['response'] - - def _assert_expected_params(self, model, params, context, **kwargs): - if self._should_not_stub(context): - return - self._assert_expected_call_order(model, params) - expected_params = self._queue[0]['expected_params'] - if expected_params is None: - return - - # Validate the parameters are equal - for param, value in expected_params.items(): - if param not in params or expected_params[param] != params[param]: - raise StubAssertionError( - operation_name=model.name, - reason=( - f'Expected parameters:\n{pformat(expected_params)},\n' - f'but received:\n{pformat(params)}' - ), - ) - - # Ensure there are no extra params hanging around - if sorted(expected_params.keys()) != sorted(params.keys()): - raise StubAssertionError( - operation_name=model.name, - reason=( - f'Expected parameters:\n{pformat(expected_params)},\n' - f'but received:\n{pformat(params)}' - ), - ) - - def _should_not_stub(self, context): - # Do not include presign requests when processing stubbed client calls - # as a presign request will never have an HTTP request sent over the - # wire for it and therefore not receive a response back. - if context and context.get('is_presign_request'): - return True - - def _validate_operation_response(self, operation_name, service_response): - service_model = self.client.meta.service_model - operation_model = service_model.operation_model(operation_name) - output_shape = operation_model.output_shape - - # Remove ResponseMetadata so that the validator doesn't attempt to - # perform validation on it. - response = service_response - if 'ResponseMetadata' in response: - response = copy.copy(service_response) - del response['ResponseMetadata'] - - self._validate_response(output_shape, response) - - def _validate_response(self, shape, response): - if shape is not None: - validate_parameters(response, shape) - elif response: - # If the output shape is None, that means the response should be - # empty apart from ResponseMetadata - raise ParamValidationError( - report=( - "Service response should only contain ResponseMetadata." - ) - ) diff --git a/venv/Lib/site-packages/botocore/tokens.py b/venv/Lib/site-packages/botocore/tokens.py deleted file mode 100644 index d908618..0000000 --- a/venv/Lib/site-packages/botocore/tokens.py +++ /dev/null @@ -1,363 +0,0 @@ -# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import json -import logging -import os -import threading -from datetime import datetime, timedelta -from typing import NamedTuple, Optional - -import dateutil.parser -from dateutil.tz import tzutc - -from botocore import UNSIGNED -from botocore.compat import total_seconds -from botocore.config import Config -from botocore.exceptions import ( - ClientError, - InvalidConfigError, - TokenRetrievalError, -) -from botocore.utils import ( - CachedProperty, - JSONFileCache, - SSOTokenLoader, - create_nested_client, - get_token_from_environment, -) - -logger = logging.getLogger(__name__) - - -def _utc_now(): - return datetime.now(tzutc()) - - -def create_token_resolver(session): - providers = [ - ScopedEnvTokenProvider(session), - SSOTokenProvider(session), - ] - return TokenProviderChain(providers=providers) - - -def _serialize_utc_timestamp(obj): - if isinstance(obj, datetime): - return obj.strftime("%Y-%m-%dT%H:%M:%SZ") - return obj - - -def _sso_json_dumps(obj): - return json.dumps(obj, default=_serialize_utc_timestamp) - - -class FrozenAuthToken(NamedTuple): - token: str - expiration: Optional[datetime] = None - - -class DeferredRefreshableToken: - # The time at which we'll attempt to refresh, but not block if someone else - # is refreshing. - _advisory_refresh_timeout = 15 * 60 - # The time at which all threads will block waiting for a refreshed token - _mandatory_refresh_timeout = 10 * 60 - # Refresh at most once every minute to avoid blocking every request - _attempt_timeout = 60 - - def __init__(self, method, refresh_using, time_fetcher=_utc_now): - self._time_fetcher = time_fetcher - self._refresh_using = refresh_using - self.method = method - - # The frozen token is protected by this lock - self._refresh_lock = threading.Lock() - self._frozen_token = None - self._next_refresh = None - - def get_frozen_token(self): - self._refresh() - return self._frozen_token - - def _refresh(self): - # If we don't need to refresh just return - refresh_type = self._should_refresh() - if not refresh_type: - return None - - # Block for refresh if we're in the mandatory refresh window - block_for_refresh = refresh_type == "mandatory" - if self._refresh_lock.acquire(block_for_refresh): - try: - self._protected_refresh() - finally: - self._refresh_lock.release() - - def _protected_refresh(self): - # This should only be called after acquiring the refresh lock - # Another thread may have already refreshed, double check refresh - refresh_type = self._should_refresh() - if not refresh_type: - return None - - try: - now = self._time_fetcher() - self._next_refresh = now + timedelta(seconds=self._attempt_timeout) - self._frozen_token = self._refresh_using() - except Exception: - logger.warning( - "Refreshing token failed during the %s refresh period.", - refresh_type, - exc_info=True, - ) - if refresh_type == "mandatory": - # This refresh was mandatory, error must be propagated back - raise - - if self._is_expired(): - # Fresh credentials should never be expired - raise TokenRetrievalError( - provider=self.method, - error_msg="Token has expired and refresh failed", - ) - - def _is_expired(self): - if self._frozen_token is None: - return False - - expiration = self._frozen_token.expiration - remaining = total_seconds(expiration - self._time_fetcher()) - return remaining <= 0 - - def _should_refresh(self): - if self._frozen_token is None: - # We don't have a token yet, mandatory refresh - return "mandatory" - - expiration = self._frozen_token.expiration - if expiration is None: - # No expiration, so assume we don't need to refresh. - return None - - now = self._time_fetcher() - if now < self._next_refresh: - return None - - remaining = total_seconds(expiration - now) - - if remaining < self._mandatory_refresh_timeout: - return "mandatory" - elif remaining < self._advisory_refresh_timeout: - return "advisory" - - return None - - -class TokenProviderChain: - def __init__(self, providers=None): - if providers is None: - providers = [] - self._providers = providers - - def load_token(self, **kwargs): - for provider in self._providers: - token = provider.load_token(**kwargs) - if token is not None: - return token - return None - - -class SSOTokenProvider: - METHOD = "sso" - _REFRESH_WINDOW = 15 * 60 - _SSO_TOKEN_CACHE_DIR = os.path.expanduser( - os.path.join("~", ".aws", "sso", "cache") - ) - _SSO_CONFIG_VARS = [ - "sso_start_url", - "sso_region", - ] - _GRANT_TYPE = "refresh_token" - DEFAULT_CACHE_CLS = JSONFileCache - - def __init__( - self, session, cache=None, time_fetcher=_utc_now, profile_name=None - ): - self._session = session - if cache is None: - cache = self.DEFAULT_CACHE_CLS( - self._SSO_TOKEN_CACHE_DIR, - dumps_func=_sso_json_dumps, - ) - self._now = time_fetcher - self._cache = cache - self._token_loader = SSOTokenLoader(cache=self._cache) - self._profile_name = ( - profile_name - or self._session.get_config_variable("profile") - or 'default' - ) - - def _load_sso_config(self): - loaded_config = self._session.full_config - profiles = loaded_config.get("profiles", {}) - sso_sessions = loaded_config.get("sso_sessions", {}) - profile_config = profiles.get(self._profile_name, {}) - - if "sso_session" not in profile_config: - return - - sso_session_name = profile_config["sso_session"] - sso_config = sso_sessions.get(sso_session_name, None) - - if not sso_config: - error_msg = ( - f'The profile "{self._profile_name}" is configured to use the SSO ' - f'token provider but the "{sso_session_name}" sso_session ' - f"configuration does not exist." - ) - raise InvalidConfigError(error_msg=error_msg) - - missing_configs = [] - for var in self._SSO_CONFIG_VARS: - if var not in sso_config: - missing_configs.append(var) - - if missing_configs: - error_msg = ( - f'The profile "{self._profile_name}" is configured to use the SSO ' - f"token provider but is missing the following configuration: " - f"{missing_configs}." - ) - raise InvalidConfigError(error_msg=error_msg) - - return { - "session_name": sso_session_name, - "sso_region": sso_config["sso_region"], - "sso_start_url": sso_config["sso_start_url"], - } - - @CachedProperty - def _sso_config(self): - return self._load_sso_config() - - @CachedProperty - def _client(self): - config = Config( - region_name=self._sso_config["sso_region"], - signature_version=UNSIGNED, - ) - return create_nested_client(self._session, "sso-oidc", config=config) - - def _attempt_create_token(self, token): - response = self._client.create_token( - grantType=self._GRANT_TYPE, - clientId=token["clientId"], - clientSecret=token["clientSecret"], - refreshToken=token["refreshToken"], - ) - expires_in = timedelta(seconds=response["expiresIn"]) - new_token = { - "startUrl": self._sso_config["sso_start_url"], - "region": self._sso_config["sso_region"], - "accessToken": response["accessToken"], - "expiresAt": self._now() + expires_in, - # Cache the registration alongside the token - "clientId": token["clientId"], - "clientSecret": token["clientSecret"], - "registrationExpiresAt": token["registrationExpiresAt"], - } - if "refreshToken" in response: - new_token["refreshToken"] = response["refreshToken"] - logger.info("SSO Token refresh succeeded") - return new_token - - def _refresh_access_token(self, token): - keys = ( - "refreshToken", - "clientId", - "clientSecret", - "registrationExpiresAt", - ) - missing_keys = [k for k in keys if k not in token] - if missing_keys: - msg = f"Unable to refresh SSO token: missing keys: {missing_keys}" - logger.info(msg) - return None - - expiry = dateutil.parser.parse(token["registrationExpiresAt"]) - if total_seconds(expiry - self._now()) <= 0: - logger.info("SSO token registration expired at %s", expiry) - return None - - try: - return self._attempt_create_token(token) - except ClientError: - logger.warning("SSO token refresh attempt failed", exc_info=True) - return None - - def _refresher(self): - start_url = self._sso_config["sso_start_url"] - session_name = self._sso_config["session_name"] - logger.info("Loading cached SSO token for %s", session_name) - token_dict = self._token_loader(start_url, session_name=session_name) - expiration = dateutil.parser.parse(token_dict["expiresAt"]) - logger.debug("Cached SSO token expires at %s", expiration) - - remaining = total_seconds(expiration - self._now()) - if remaining < self._REFRESH_WINDOW: - new_token_dict = self._refresh_access_token(token_dict) - if new_token_dict is not None: - token_dict = new_token_dict - expiration = token_dict["expiresAt"] - self._token_loader.save_token( - start_url, token_dict, session_name=session_name - ) - - return FrozenAuthToken( - token_dict["accessToken"], expiration=expiration - ) - - def load_token(self, **kwargs): - if self._sso_config is None: - return None - - return DeferredRefreshableToken( - self.METHOD, self._refresher, time_fetcher=self._now - ) - - -class ScopedEnvTokenProvider: - """ - Token provider that loads tokens from environment variables scoped to - a specific `signing_name`. - """ - - METHOD = 'env' - - def __init__(self, session, environ=None): - self._session = session - if environ is None: - environ = os.environ - self.environ = environ - - def load_token(self, **kwargs): - signing_name = kwargs.get("signing_name") - if signing_name is None: - return None - - token = get_token_from_environment(signing_name, self.environ) - - if token is not None: - logger.info("Found token in environment variables.") - return FrozenAuthToken(token) diff --git a/venv/Lib/site-packages/botocore/translate.py b/venv/Lib/site-packages/botocore/translate.py deleted file mode 100644 index ecfe3bc..0000000 --- a/venv/Lib/site-packages/botocore/translate.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ -# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import copy - -from botocore.utils import merge_dicts - - -def build_retry_config( - endpoint_prefix, retry_model, definitions, client_retry_config=None -): - service_config = retry_model.get(endpoint_prefix, {}) - resolve_references(service_config, definitions) - # We want to merge the global defaults with the service specific - # defaults, with the service specific defaults taking precedence. - # So we use the global defaults as the base. - # - # A deepcopy is done on the retry defaults because it ensures the - # retry model has no chance of getting mutated when the service specific - # configuration or client retry config is merged in. - final_retry_config = { - '__default__': copy.deepcopy(retry_model.get('__default__', {})) - } - resolve_references(final_retry_config, definitions) - # The merge the service specific config on top. - merge_dicts(final_retry_config, service_config) - if client_retry_config is not None: - _merge_client_retry_config(final_retry_config, client_retry_config) - return final_retry_config - - -def _merge_client_retry_config(retry_config, client_retry_config): - max_retry_attempts_override = client_retry_config.get('max_attempts') - if max_retry_attempts_override is not None: - # In the retry config, the max_attempts refers to the maximum number - # of requests in general will be made. However, for the client's - # retry config it refers to how many retry attempts will be made at - # most. So to translate this number from the client config, one is - # added to convert it to the maximum number request that will be made - # by including the initial request. - # - # It is also important to note that if we ever support per operation - # configuration in the retry model via the client, we will need to - # revisit this logic to make sure max_attempts gets applied - # per operation. - retry_config['__default__']['max_attempts'] = ( - max_retry_attempts_override + 1 - ) - - -def resolve_references(config, definitions): - """Recursively replace $ref keys. - - To cut down on duplication, common definitions can be declared - (and passed in via the ``definitions`` attribute) and then - references as {"$ref": "name"}, when this happens the reference - dict is placed with the value from the ``definition`` dict. - - This is recursively done. - - """ - for key, value in config.items(): - if isinstance(value, dict): - if len(value) == 1 and list(value.keys())[0] == '$ref': - # Then we need to resolve this reference. - config[key] = definitions[list(value.values())[0]] - else: - resolve_references(value, definitions) diff --git a/venv/Lib/site-packages/botocore/useragent.py b/venv/Lib/site-packages/botocore/useragent.py deleted file mode 100644 index 9054728..0000000 --- a/venv/Lib/site-packages/botocore/useragent.py +++ /dev/null @@ -1,672 +0,0 @@ -# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -""" -NOTE: All classes and functions in this module are considered private and are -subject to abrupt breaking changes. Please do not use them directly. - -To modify the User-Agent header sent by botocore, use one of these -configuration options: -* The ``AWS_SDK_UA_APP_ID`` environment variable. -* The ``sdk_ua_app_id`` setting in the shared AWS config file. -* The ``user_agent_appid`` field in the :py:class:`botocore.config.Config`. -* The ``user_agent_extra`` field in the :py:class:`botocore.config.Config`. - -""" - -import logging -import os -import platform -from copy import copy -from string import ascii_letters, digits -from typing import NamedTuple, Optional - -from botocore import __version__ as botocore_version -from botocore.compat import HAS_CRT -from botocore.context import get_context - -logger = logging.getLogger(__name__) - - -_USERAGENT_ALLOWED_CHARACTERS = ascii_letters + digits + "!$%&'*+-.^_`|~," -_USERAGENT_ALLOWED_OS_NAMES = ( - 'windows', - 'linux', - 'macos', - 'android', - 'ios', - 'watchos', - 'tvos', - 'other', -) -_USERAGENT_PLATFORM_NAME_MAPPINGS = {'darwin': 'macos'} -# The name by which botocore is identified in the User-Agent header. While most -# AWS SDKs follow a naming pattern of "aws-sdk-*", botocore and boto3 continue -# using their existing values. Uses uppercase "B" with all other characters -# lowercase. -_USERAGENT_SDK_NAME = 'Botocore' -_USERAGENT_FEATURE_MAPPINGS = { - 'WAITER': 'B', - 'PAGINATOR': 'C', - "RETRY_MODE_LEGACY": "D", - "RETRY_MODE_STANDARD": "E", - "RETRY_MODE_ADAPTIVE": "F", - 'S3_TRANSFER': 'G', - 'GZIP_REQUEST_COMPRESSION': 'L', - 'PROTOCOL_RPC_V2_CBOR': 'M', - 'ENDPOINT_OVERRIDE': 'N', - 'ACCOUNT_ID_MODE_PREFERRED': 'P', - 'ACCOUNT_ID_MODE_DISABLED': 'Q', - 'ACCOUNT_ID_MODE_REQUIRED': 'R', - 'SIGV4A_SIGNING': 'S', - 'RESOLVED_ACCOUNT_ID': 'T', - 'FLEXIBLE_CHECKSUMS_REQ_CRC32': 'U', - 'FLEXIBLE_CHECKSUMS_REQ_CRC32C': 'V', - 'FLEXIBLE_CHECKSUMS_REQ_CRC64': 'W', - 'FLEXIBLE_CHECKSUMS_REQ_SHA1': 'X', - 'FLEXIBLE_CHECKSUMS_REQ_SHA256': 'Y', - 'FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED': 'Z', - 'FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED': 'a', - 'FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED': 'b', - 'FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED': 'c', - 'CREDENTIALS_CODE': 'e', - 'CREDENTIALS_ENV_VARS': 'g', - 'CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN': 'h', - 'CREDENTIALS_STS_ASSUME_ROLE': 'i', - 'CREDENTIALS_STS_ASSUME_ROLE_WEB_ID': 'k', - 'CREDENTIALS_PROFILE': 'n', - 'CREDENTIALS_PROFILE_SOURCE_PROFILE': 'o', - 'CREDENTIALS_PROFILE_NAMED_PROVIDER': 'p', - 'CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN': 'q', - 'CREDENTIALS_PROFILE_SSO': 'r', - 'CREDENTIALS_SSO': 's', - 'CREDENTIALS_PROFILE_SSO_LEGACY': 't', - 'CREDENTIALS_SSO_LEGACY': 'u', - 'CREDENTIALS_PROFILE_PROCESS': 'v', - 'CREDENTIALS_PROCESS': 'w', - 'CREDENTIALS_BOTO2_CONFIG_FILE': 'x', - 'CREDENTIALS_HTTP': 'z', - 'CREDENTIALS_IMDS': '0', - 'BEARER_SERVICE_ENV_VARS': '3', - 'CLI_V1_TO_V2_MIGRATION_DEBUG_MODE': '-', - 'CREDENTIALS_PROFILE_LOGIN': 'AC', - 'CREDENTIALS_LOGIN': 'AD', -} - - -def register_feature_id(feature_id): - """Adds metric value to the current context object's ``features`` set. - - :type feature_id: str - :param feature_id: The name of the feature to register. Value must be a key - in the ``_USERAGENT_FEATURE_MAPPINGS`` dict. - """ - ctx = get_context() - if ctx is None: - # Never register features outside the scope of a - # ``botocore.context.start_as_current_context`` context manager. - # Otherwise, the context variable won't be reset and features will - # bleed into all subsequent requests. Return instead of raising an - # exception since this function could be invoked in a public interface. - return - if val := _USERAGENT_FEATURE_MAPPINGS.get(feature_id): - ctx.features.add(val) - - -def register_feature_ids(feature_ids): - """Adds multiple feature IDs to the current context object's ``features`` set. - - :type feature_ids: iterable of str - :param feature_ids: An iterable of feature ID strings to register. Each - value must be a key in the ``_USERAGENT_FEATURE_MAPPINGS`` dict. - """ - for feature_id in feature_ids: - register_feature_id(feature_id) - - -def sanitize_user_agent_string_component(raw_str, allow_hash): - """Replaces all not allowed characters in the string with a dash ("-"). - - Allowed characters are ASCII alphanumerics and ``!$%&'*+-.^_`|~,``. If - ``allow_hash`` is ``True``, "#"``" is also allowed. - - :type raw_str: str - :param raw_str: The input string to be sanitized. - - :type allow_hash: bool - :param allow_hash: Whether "#" is considered an allowed character. - """ - return ''.join( - c - if c in _USERAGENT_ALLOWED_CHARACTERS or (allow_hash and c == '#') - else '-' - for c in raw_str - ) - - -class UserAgentComponentSizeConfig: - """ - Configures the max size of a built user agent string component and the - delimiter used to truncate the string if the size is above the max. - """ - - def __init__(self, max_size_in_bytes: int, delimiter: str): - self.max_size_in_bytes = max_size_in_bytes - self.delimiter = delimiter - self._validate_input() - - def _validate_input(self): - if self.max_size_in_bytes < 1: - raise ValueError( - f'Invalid `max_size_in_bytes`: {self.max_size_in_bytes}. ' - 'Value must be a positive integer.' - ) - - -class UserAgentComponent(NamedTuple): - """ - Component of a Botocore User-Agent header string in the standard format. - - Each component consists of a prefix, a name, a value, and a size_config. - In the string representation these are combined in the format - ``prefix/name#value``. - - ``size_config`` configures the max size and truncation strategy for the - built user agent string component. - - This class is considered private and is subject to abrupt breaking changes. - """ - - prefix: str - name: str - value: Optional[str] = None - size_config: Optional[UserAgentComponentSizeConfig] = None - - def to_string(self): - """Create string like 'prefix/name#value' from a UserAgentComponent.""" - clean_prefix = sanitize_user_agent_string_component( - self.prefix, allow_hash=True - ) - clean_name = sanitize_user_agent_string_component( - self.name, allow_hash=False - ) - if self.value is None or self.value == '': - clean_string = f'{clean_prefix}/{clean_name}' - else: - clean_value = sanitize_user_agent_string_component( - self.value, allow_hash=True - ) - clean_string = f'{clean_prefix}/{clean_name}#{clean_value}' - if self.size_config is not None: - clean_string = self._truncate_string( - clean_string, - self.size_config.max_size_in_bytes, - self.size_config.delimiter, - ) - return clean_string - - def _truncate_string(self, string, max_size, delimiter): - """ - Pop ``delimiter``-separated values until encoded string is less than or - equal to ``max_size``. - """ - orig = string - while len(string.encode('utf-8')) > max_size: - parts = string.split(delimiter) - parts.pop() - string = delimiter.join(parts) - - if string == '': - logger.debug( - "User agent component `%s` could not be truncated to " - "`%s` bytes with delimiter " - "`%s` without losing all contents. " - "Value will be omitted from user agent string.", - orig, - max_size, - delimiter, - ) - return string - - -class RawStringUserAgentComponent: - """ - UserAgentComponent interface wrapper around ``str``. - - Use for User-Agent header components that are not constructed from - prefix+name+value but instead are provided as strings. No sanitization is - performed. - """ - - def __init__(self, value): - self._value = value - - def to_string(self): - return self._value - - -# This is not a public interface and is subject to abrupt breaking changes. -# Any usage is not advised or supported in external code bases. -try: - from botocore.customizations.useragent import modify_components -except ImportError: - # Default implementation that returns unmodified User-Agent components. - def modify_components(components): - return components - - -class UserAgentString: - """ - Generator for AWS SDK User-Agent header strings. - - The User-Agent header format contains information from session, client, and - request context. ``UserAgentString`` provides methods for collecting the - information and ``to_string`` for assembling it into the standardized - string format. - - Example usage: - - ua_session = UserAgentString.from_environment() - ua_session.set_session_config(...) - ua_client = ua_session.with_client_config(Config(...)) - ua_string = ua_request.to_string() - - For testing or when information from all sources is available at the same - time, the methods can be chained: - - ua_string = ( - UserAgentString - .from_environment() - .set_session_config(...) - .with_client_config(Config(...)) - .to_string() - ) - - """ - - def __init__( - self, - platform_name, - platform_version, - platform_machine, - python_version, - python_implementation, - execution_env, - crt_version=None, - ): - """ - :type platform_name: str - :param platform_name: Name of the operating system or equivalent - platform name. Should be sourced from :py:meth:`platform.system`. - :type platform_version: str - :param platform_version: Version of the operating system or equivalent - platform name. Should be sourced from :py:meth:`platform.version`. - :type platform_machine: str - :param platform_version: Processor architecture or machine type. For - example "x86_64". Should be sourced from :py:meth:`platform.machine`. - :type python_version: str - :param python_version: Version of the python implementation as str. - Should be sourced from :py:meth:`platform.python_version`. - :type python_implementation: str - :param python_implementation: Name of the python implementation. - Should be sourced from :py:meth:`platform.python_implementation`. - :type execution_env: str - :param execution_env: The value of the AWS execution environment. - Should be sourced from the ``AWS_EXECUTION_ENV` environment - variable. - :type crt_version: str - :param crt_version: Version string of awscrt package, if installed. - """ - self._platform_name = platform_name - self._platform_version = platform_version - self._platform_machine = platform_machine - self._python_version = python_version - self._python_implementation = python_implementation - self._execution_env = execution_env - self._crt_version = crt_version - - # Components that can be added with ``set_session_config()`` - self._session_user_agent_name = None - self._session_user_agent_version = None - self._session_user_agent_extra = None - - self._client_config = None - - # Component that can be set with ``set_client_features()`` - self._client_features = None - - @classmethod - def from_environment(cls): - crt_version = None - if HAS_CRT: - crt_version = _get_crt_version() or 'Unknown' - return cls( - platform_name=platform.system(), - platform_version=platform.release(), - platform_machine=platform.machine(), - python_version=platform.python_version(), - python_implementation=platform.python_implementation(), - execution_env=os.environ.get('AWS_EXECUTION_ENV'), - crt_version=crt_version, - ) - - def set_session_config( - self, - session_user_agent_name, - session_user_agent_version, - session_user_agent_extra, - ): - """ - Set the user agent configuration values that apply at session level. - - :param user_agent_name: The user agent name configured in the - :py:class:`botocore.session.Session` object. For backwards - compatibility, this will always be at the beginning of the - User-Agent string, together with ``user_agent_version``. - :param user_agent_version: The user agent version configured in the - :py:class:`botocore.session.Session` object. - :param user_agent_extra: The user agent "extra" configured in the - :py:class:`botocore.session.Session` object. - """ - self._session_user_agent_name = session_user_agent_name - self._session_user_agent_version = session_user_agent_version - self._session_user_agent_extra = session_user_agent_extra - return self - - def set_client_features(self, features): - """ - Persist client-specific features registered before or during client - creation. - - :type features: Set[str] - :param features: A set of client-specific features. - """ - self._client_features = features - - def with_client_config(self, client_config): - """ - Create a copy with all original values and client-specific values. - - :type client_config: botocore.config.Config - :param client_config: The client configuration object. - """ - cp = copy(self) - cp._client_config = client_config - return cp - - def to_string(self): - """ - Build User-Agent header string from the object's properties. - """ - config_ua_override = None - if self._client_config: - if hasattr(self._client_config, '_supplied_user_agent'): - config_ua_override = self._client_config._supplied_user_agent - else: - config_ua_override = self._client_config.user_agent - - if config_ua_override is not None: - return self._build_legacy_ua_string(config_ua_override) - - components = [ - *self._build_sdk_metadata(), - RawStringUserAgentComponent('ua/2.1'), - *self._build_os_metadata(), - *self._build_architecture_metadata(), - *self._build_language_metadata(), - *self._build_execution_env_metadata(), - *self._build_feature_metadata(), - *self._build_config_metadata(), - *self._build_app_id(), - *self._build_extra(), - ] - - components = modify_components(components) - - return ' '.join( - [comp.to_string() for comp in components if comp.to_string()] - ) - - def _build_sdk_metadata(self): - """ - Build the SDK name and version component of the User-Agent header. - - For backwards-compatibility both session-level and client-level config - of custom tool names are honored. If this removes the Botocore - information from the start of the string, Botocore's name and version - are included as a separate field with "md" prefix. - """ - sdk_md = [] - if ( - self._session_user_agent_name - and self._session_user_agent_version - and ( - self._session_user_agent_name != _USERAGENT_SDK_NAME - or self._session_user_agent_version != botocore_version - ) - ): - sdk_md.extend( - [ - UserAgentComponent( - self._session_user_agent_name, - self._session_user_agent_version, - ), - UserAgentComponent( - 'md', _USERAGENT_SDK_NAME, botocore_version - ), - ] - ) - else: - sdk_md.append( - UserAgentComponent(_USERAGENT_SDK_NAME, botocore_version) - ) - - if self._crt_version is not None: - sdk_md.append( - UserAgentComponent('md', 'awscrt', self._crt_version) - ) - - return sdk_md - - def _build_os_metadata(self): - """ - Build the OS/platform components of the User-Agent header string. - - For recognized platform names that match or map to an entry in the list - of standardized OS names, a single component with prefix "os" is - returned. Otherwise, one component "os/other" is returned and a second - with prefix "md" and the raw platform name. - - String representations of example return values: - * ``os/macos#10.13.6`` - * ``os/linux`` - * ``os/other`` - * ``os/other md/foobar#1.2.3`` - """ - if self._platform_name is None: - return [UserAgentComponent('os', 'other')] - - plt_name_lower = self._platform_name.lower() - if plt_name_lower in _USERAGENT_ALLOWED_OS_NAMES: - os_family = plt_name_lower - elif plt_name_lower in _USERAGENT_PLATFORM_NAME_MAPPINGS: - os_family = _USERAGENT_PLATFORM_NAME_MAPPINGS[plt_name_lower] - else: - os_family = None - - if os_family is not None: - return [ - UserAgentComponent('os', os_family, self._platform_version) - ] - else: - return [ - UserAgentComponent('os', 'other'), - UserAgentComponent( - 'md', self._platform_name, self._platform_version - ), - ] - - def _build_architecture_metadata(self): - """ - Build architecture component of the User-Agent header string. - - Returns the machine type with prefix "md" and name "arch", if one is - available. Common values include "x86_64", "arm64", "i386". - """ - if self._platform_machine: - return [ - UserAgentComponent( - 'md', 'arch', self._platform_machine.lower() - ) - ] - return [] - - def _build_language_metadata(self): - """ - Build the language components of the User-Agent header string. - - Returns the Python version in a component with prefix "lang" and name - "python". The Python implementation (e.g. CPython, PyPy) is returned as - separate metadata component with prefix "md" and name "pyimpl". - - String representation of an example return value: - ``lang/python#3.10.4 md/pyimpl#CPython`` - """ - lang_md = [ - UserAgentComponent('lang', 'python', self._python_version), - ] - if self._python_implementation: - lang_md.append( - UserAgentComponent('md', 'pyimpl', self._python_implementation) - ) - return lang_md - - def _build_execution_env_metadata(self): - """ - Build the execution environment component of the User-Agent header. - - Returns a single component prefixed with "exec-env", usually sourced - from the environment variable AWS_EXECUTION_ENV. - """ - if self._execution_env: - return [UserAgentComponent('exec-env', self._execution_env)] - else: - return [] - - def _build_feature_metadata(self): - """ - Build the features component of the User-Agent header string. - - Returns a single component with prefix "m" followed by a list of - comma-separated metric values. - """ - ctx = get_context() - context_features = set() if ctx is None else ctx.features - client_features = self._client_features or set() - features = client_features.union(context_features) - if not features: - return [] - size_config = UserAgentComponentSizeConfig(1024, ',') - return [ - UserAgentComponent( - 'm', ','.join(features), size_config=size_config - ) - ] - - def _build_config_metadata(self): - """ - Build the configuration components of the User-Agent header string. - - Returns a list of components with prefix "cfg" followed by the config - setting name and its value. Tracked configuration settings may be - added or removed in future versions. - """ - if not self._client_config or not self._client_config.retries: - return [] - retry_mode = self._client_config.retries.get('mode') - cfg_md = [UserAgentComponent('cfg', 'retry-mode', retry_mode)] - if self._client_config.endpoint_discovery_enabled: - cfg_md.append(UserAgentComponent('cfg', 'endpoint-discovery')) - return cfg_md - - def _build_app_id(self): - """ - Build app component of the User-Agent header string. - - Returns a single component with prefix "app" and value sourced from the - ``user_agent_appid`` field in :py:class:`botocore.config.Config` or - the ``sdk_ua_app_id`` setting in the shared configuration file, or the - ``AWS_SDK_UA_APP_ID`` environment variable. These are the recommended - ways for apps built with Botocore to insert their identifer into the - User-Agent header. - """ - if self._client_config and self._client_config.user_agent_appid: - appid = sanitize_user_agent_string_component( - raw_str=self._client_config.user_agent_appid, allow_hash=True - ) - return [RawStringUserAgentComponent(f'app/{appid}')] - else: - return [] - - def _build_extra(self): - """User agent string components based on legacy "extra" settings. - - Creates components from the session-level and client-level - ``user_agent_extra`` setting, if present. Both are passed through - verbatim and should be appended at the end of the string. - - Preferred ways to inject application-specific information into - botocore's User-Agent header string are the ``user_agent_appid` field - in :py:class:`botocore.config.Config`. The ``AWS_SDK_UA_APP_ID`` - environment variable and the ``sdk_ua_app_id`` configuration file - setting are alternative ways to set the ``user_agent_appid`` config. - """ - extra = [] - if self._session_user_agent_extra: - extra.append( - RawStringUserAgentComponent(self._session_user_agent_extra) - ) - if self._client_config and self._client_config.user_agent_extra: - extra.append( - RawStringUserAgentComponent( - self._client_config.user_agent_extra - ) - ) - return extra - - def _build_legacy_ua_string(self, config_ua_override): - components = [config_ua_override] - if self._session_user_agent_extra: - components.append(self._session_user_agent_extra) - if self._client_config.user_agent_extra: - components.append(self._client_config.user_agent_extra) - return ' '.join(components) - - def rebuild_and_replace_user_agent_handler( - self, operation_name, request, **kwargs - ): - ua_string = self.to_string() - if request.headers.get('User-Agent'): - request.headers.replace_header('User-Agent', ua_string) - - -def _get_crt_version(): - """ - This function is considered private and is subject to abrupt breaking - changes. - """ - try: - import awscrt - - return awscrt.__version__ - except AttributeError: - return None diff --git a/venv/Lib/site-packages/botocore/utils.py b/venv/Lib/site-packages/botocore/utils.py deleted file mode 100644 index 79f9337..0000000 --- a/venv/Lib/site-packages/botocore/utils.py +++ /dev/null @@ -1,3730 +0,0 @@ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import base64 -import binascii -import datetime -import email.message -import functools -import hashlib -import io -import logging -import os -import random -import re -import socket -import tempfile -import time -import warnings -import weakref -from datetime import datetime as _DatetimeClass -from ipaddress import ip_address -from pathlib import Path -from urllib.request import getproxies, proxy_bypass - -import dateutil.parser -from dateutil.tz import tzutc -from urllib3.exceptions import LocationParseError - -import botocore -import botocore.awsrequest -import botocore.httpsession - -# IP Regexes retained for backwards compatibility -from botocore.compat import ( - HAS_CRT, - HEX_PAT, # noqa: F401 - IPV4_PAT, # noqa: F401 - IPV4_RE, - IPV6_ADDRZ_PAT, # noqa: F401 - IPV6_ADDRZ_RE, - IPV6_PAT, # noqa: F401 - LS32_PAT, # noqa: F401 - MD5_AVAILABLE, - UNRESERVED_PAT, # noqa: F401 - UNSAFE_URL_CHARS, - ZONE_ID_PAT, # noqa: F401 - OrderedDict, - get_current_datetime, - get_md5, - get_tzinfo_options, - json, - quote, - urlparse, - urlsplit, - urlunsplit, - zip_longest, -) -from botocore.exceptions import ( - ClientError, - ConfigNotFound, - ConnectionClosedError, - ConnectTimeoutError, - EndpointConnectionError, - HTTPClientError, - InvalidDNSNameError, - InvalidEndpointConfigurationError, - InvalidExpressionError, - InvalidHostLabelError, - InvalidIMDSEndpointError, - InvalidIMDSEndpointModeError, - InvalidRegionError, - MetadataRetrievalError, - MissingDependencyException, - ReadTimeoutError, - SSOTokenLoadError, - UnsupportedOutpostResourceError, - UnsupportedS3AccesspointConfigurationError, - UnsupportedS3ArnError, - UnsupportedS3ConfigurationError, - UnsupportedS3ControlArnError, - UnsupportedS3ControlConfigurationError, -) -from botocore.plugin import ( - PluginContext, - reset_plugin_context, - set_plugin_context, -) - -logger = logging.getLogger(__name__) -DEFAULT_METADATA_SERVICE_TIMEOUT = 1 -METADATA_BASE_URL = 'http://169.254.169.254/' -METADATA_BASE_URL_IPv6 = 'http://[fd00:ec2::254]/' -METADATA_ENDPOINT_MODES = ('ipv4', 'ipv6') - -# These are chars that do not need to be urlencoded. -# Based on rfc2986, section 2.3 -SAFE_CHARS = '-._~' -LABEL_RE = re.compile(r'[a-z0-9][a-z0-9\-]*[a-z0-9]') -RETRYABLE_HTTP_ERRORS = ( - ReadTimeoutError, - EndpointConnectionError, - ConnectionClosedError, - ConnectTimeoutError, -) -S3_ACCELERATE_WHITELIST = ['dualstack'] -# In switching events from using service name / endpoint prefix to service -# id, we have to preserve compatibility. This maps the instances where either -# is different than the transformed service id. -EVENT_ALIASES = { - "api.mediatailor": "mediatailor", - "api.pricing": "pricing", - "api.sagemaker": "sagemaker", - "apigateway": "api-gateway", - "application-autoscaling": "application-auto-scaling", - "appstream2": "appstream", - "autoscaling": "auto-scaling", - "autoscaling-plans": "auto-scaling-plans", - "ce": "cost-explorer", - "cloudhsmv2": "cloudhsm-v2", - "cloudsearchdomain": "cloudsearch-domain", - "cognito-idp": "cognito-identity-provider", - "config": "config-service", - "cur": "cost-and-usage-report-service", - "data.iot": "iot-data-plane", - "data.jobs.iot": "iot-jobs-data-plane", - "data.mediastore": "mediastore-data", - "datapipeline": "data-pipeline", - "devicefarm": "device-farm", - "directconnect": "direct-connect", - "discovery": "application-discovery-service", - "dms": "database-migration-service", - "ds": "directory-service", - "dynamodbstreams": "dynamodb-streams", - "elasticbeanstalk": "elastic-beanstalk", - "elasticfilesystem": "efs", - "elasticloadbalancing": "elastic-load-balancing", - "elasticmapreduce": "emr", - "elb": "elastic-load-balancing", - "elbv2": "elastic-load-balancing-v2", - "email": "ses", - "entitlement.marketplace": "marketplace-entitlement-service", - "es": "elasticsearch-service", - "events": "eventbridge", - "cloudwatch-events": "eventbridge", - "iot-data": "iot-data-plane", - "iot-jobs-data": "iot-jobs-data-plane", - "kinesisanalytics": "kinesis-analytics", - "kinesisvideo": "kinesis-video", - "lex-models": "lex-model-building-service", - "lex-runtime": "lex-runtime-service", - "logs": "cloudwatch-logs", - "machinelearning": "machine-learning", - "marketplace-entitlement": "marketplace-entitlement-service", - "marketplacecommerceanalytics": "marketplace-commerce-analytics", - "metering.marketplace": "marketplace-metering", - "meteringmarketplace": "marketplace-metering", - "mgh": "migration-hub", - "models.lex": "lex-model-building-service", - "monitoring": "cloudwatch", - "mturk-requester": "mturk", - "resourcegroupstaggingapi": "resource-groups-tagging-api", - "route53": "route-53", - "route53domains": "route-53-domains", - "runtime.lex": "lex-runtime-service", - "runtime.sagemaker": "sagemaker-runtime", - "sdb": "simpledb", - "secretsmanager": "secrets-manager", - "serverlessrepo": "serverlessapplicationrepository", - "servicecatalog": "service-catalog", - "states": "sfn", - "stepfunctions": "sfn", - "storagegateway": "storage-gateway", - "streams.dynamodb": "dynamodb-streams", - "tagging": "resource-groups-tagging-api", -} - - -# This pattern can be used to detect if a header is a flexible checksum header -CHECKSUM_HEADER_PATTERN = re.compile( - r'^X-Amz-Checksum-([a-z0-9]*)$', - flags=re.IGNORECASE, -) - -PRIORITY_ORDERED_SUPPORTED_PROTOCOLS = ( - 'json', - 'rest-json', - 'rest-xml', - 'smithy-rpc-v2-cbor', - 'query', - 'ec2', -) - - -def ensure_boolean(val): - """Ensures a boolean value if a string or boolean is provided - - For strings, the value for True/False is case insensitive - """ - if isinstance(val, bool): - return val - elif isinstance(val, str): - return val.lower() == 'true' - else: - return False - - -def resolve_imds_endpoint_mode(session): - """Resolving IMDS endpoint mode to either IPv6 or IPv4. - - ec2_metadata_service_endpoint_mode takes precedence over imds_use_ipv6. - """ - endpoint_mode = session.get_config_variable( - 'ec2_metadata_service_endpoint_mode' - ) - if endpoint_mode is not None: - lendpoint_mode = endpoint_mode.lower() - if lendpoint_mode not in METADATA_ENDPOINT_MODES: - error_msg_kwargs = { - 'mode': endpoint_mode, - 'valid_modes': METADATA_ENDPOINT_MODES, - } - raise InvalidIMDSEndpointModeError(**error_msg_kwargs) - return lendpoint_mode - elif session.get_config_variable('imds_use_ipv6'): - return 'ipv6' - return 'ipv4' - - -def is_json_value_header(shape): - """Determines if the provided shape is the special header type jsonvalue. - - :type shape: botocore.shape - :param shape: Shape to be inspected for the jsonvalue trait. - - :return: True if this type is a jsonvalue, False otherwise - :rtype: Bool - """ - return ( - hasattr(shape, 'serialization') - and shape.serialization.get('jsonvalue', False) - and shape.serialization.get('location') == 'header' - and shape.type_name == 'string' - ) - - -def has_header(header_name, headers): - """Case-insensitive check for header key.""" - if header_name is None: - return False - elif isinstance(headers, botocore.awsrequest.HeadersDict): - return header_name in headers - else: - return header_name.lower() in [key.lower() for key in headers.keys()] - - -def get_service_module_name(service_model): - """Returns the module name for a service - - This is the value used in both the documentation and client class name - """ - name = service_model.metadata.get( - 'serviceAbbreviation', - service_model.metadata.get( - 'serviceFullName', service_model.service_name - ), - ) - name = name.replace('Amazon', '') - name = name.replace('AWS', '') - name = re.sub(r'\W+', '', name) - return name - - -def normalize_url_path(path): - if not path: - return '/' - return remove_dot_segments(path) - - -def normalize_boolean(val): - """Returns None if val is None, otherwise ensure value - converted to boolean""" - if val is None: - return val - else: - return ensure_boolean(val) - - -def remove_dot_segments(url): - # RFC 3986, section 5.2.4 "Remove Dot Segments" - # Also, AWS services require consecutive slashes to be removed, - # so that's done here as well - if not url: - return '' - input_url = url.split('/') - output_list = [] - for x in input_url: - if x and x != '.': - if x == '..': - if output_list: - output_list.pop() - else: - output_list.append(x) - - if url[0] == '/': - first = '/' - else: - first = '' - if url[-1] == '/' and output_list: - last = '/' - else: - last = '' - return first + '/'.join(output_list) + last - - -def validate_jmespath_for_set(expression): - # Validates a limited jmespath expression to determine if we can set a - # value based on it. Only works with dotted paths. - if not expression or expression == '.': - raise InvalidExpressionError(expression=expression) - - for invalid in ['[', ']', '*']: - if invalid in expression: - raise InvalidExpressionError(expression=expression) - - -def set_value_from_jmespath(source, expression, value, is_first=True): - # This takes a (limited) jmespath-like expression & can set a value based - # on it. - # Limitations: - # * Only handles dotted lookups - # * No offsets/wildcards/slices/etc. - if is_first: - validate_jmespath_for_set(expression) - - bits = expression.split('.', 1) - current_key, remainder = bits[0], bits[1] if len(bits) > 1 else '' - - if not current_key: - raise InvalidExpressionError(expression=expression) - - if remainder: - if current_key not in source: - # We've got something in the expression that's not present in the - # source (new key). If there's any more bits, we'll set the key - # with an empty dictionary. - source[current_key] = {} - - return set_value_from_jmespath( - source[current_key], remainder, value, is_first=False - ) - - # If we're down to a single key, set it. - source[current_key] = value - - -def is_global_accesspoint(context): - """Determine if request is intended for an MRAP accesspoint.""" - s3_accesspoint = context.get('s3_accesspoint', {}) - is_global = s3_accesspoint.get('region') == '' - return is_global - - -def create_nested_client(session, service_name, **kwargs): - # If a client is created from within a plugin based on the environment variable, - # an infinite loop could arise. Any clients created from within another client - # must use this method to prevent infinite loops. - ctx = PluginContext(plugins="DISABLED") - token = set_plugin_context(ctx) - try: - return session.create_client(service_name, **kwargs) - finally: - reset_plugin_context(token) - - -class _RetriesExceededError(Exception): - """Internal exception used when the number of retries are exceeded.""" - - pass - - -class BadIMDSRequestError(Exception): - def __init__(self, request): - self.request = request - - -class IMDSFetcher: - _RETRIES_EXCEEDED_ERROR_CLS = _RetriesExceededError - _TOKEN_PATH = 'latest/api/token' - _TOKEN_TTL = '21600' - - def __init__( - self, - timeout=DEFAULT_METADATA_SERVICE_TIMEOUT, - num_attempts=1, - base_url=METADATA_BASE_URL, - env=None, - user_agent=None, - config=None, - ): - self._timeout = timeout - self._num_attempts = num_attempts - if config is None: - config = {} - self._base_url = self._select_base_url(base_url, config) - self._config = config - - if env is None: - env = os.environ.copy() - self._disabled = ( - env.get('AWS_EC2_METADATA_DISABLED', 'false').lower() == 'true' - ) - self._imds_v1_disabled = config.get('ec2_metadata_v1_disabled') - self._user_agent = user_agent - self._session = botocore.httpsession.URLLib3Session( - timeout=self._timeout, - proxies=get_environ_proxies(self._base_url), - ) - - def get_base_url(self): - return self._base_url - - def _select_base_url(self, base_url, config): - if config is None: - config = {} - - requires_ipv6 = ( - config.get('ec2_metadata_service_endpoint_mode') == 'ipv6' - ) - custom_metadata_endpoint = config.get('ec2_metadata_service_endpoint') - - if requires_ipv6 and custom_metadata_endpoint: - logger.warning( - "Custom endpoint and IMDS_USE_IPV6 are both set. Using custom endpoint." - ) - - chosen_base_url = None - - if base_url != METADATA_BASE_URL: - chosen_base_url = base_url - elif custom_metadata_endpoint: - chosen_base_url = custom_metadata_endpoint - elif requires_ipv6: - chosen_base_url = METADATA_BASE_URL_IPv6 - else: - chosen_base_url = METADATA_BASE_URL - - logger.debug("IMDS ENDPOINT: %s", chosen_base_url) - if not is_valid_uri(chosen_base_url): - raise InvalidIMDSEndpointError(endpoint=chosen_base_url) - - return chosen_base_url - - def _construct_url(self, path): - sep = '' - if self._base_url and not self._base_url.endswith('/'): - sep = '/' - return f'{self._base_url}{sep}{path}' - - def _fetch_metadata_token(self): - self._assert_enabled() - url = self._construct_url(self._TOKEN_PATH) - headers = { - 'x-aws-ec2-metadata-token-ttl-seconds': self._TOKEN_TTL, - } - self._add_user_agent(headers) - request = botocore.awsrequest.AWSRequest( - method='PUT', url=url, headers=headers - ) - for i in range(self._num_attempts): - try: - response = self._session.send(request.prepare()) - if response.status_code == 200: - return response.text - elif response.status_code in (404, 403, 405): - return None - elif response.status_code in (400,): - raise BadIMDSRequestError(request) - except ReadTimeoutError: - return None - except RETRYABLE_HTTP_ERRORS as e: - logger.debug( - "Caught retryable HTTP exception while making metadata " - "service request to %s: %s", - url, - e, - exc_info=True, - ) - except HTTPClientError as e: - if isinstance(e.kwargs.get('error'), LocationParseError): - raise InvalidIMDSEndpointError(endpoint=url, error=e) - else: - raise - return None - - def _get_request(self, url_path, retry_func, token=None): - """Make a get request to the Instance Metadata Service. - - :type url_path: str - :param url_path: The path component of the URL to make a get request. - This arg is appended to the base_url that was provided in the - initializer. - - :type retry_func: callable - :param retry_func: A function that takes the response as an argument - and determines if it needs to retry. By default empty and non - 200 OK responses are retried. - - :type token: str - :param token: Metadata token to send along with GET requests to IMDS. - """ - self._assert_enabled() - if not token: - self._assert_v1_enabled() - if retry_func is None: - retry_func = self._default_retry - url = self._construct_url(url_path) - headers = {} - if token is not None: - headers['x-aws-ec2-metadata-token'] = token - self._add_user_agent(headers) - for i in range(self._num_attempts): - try: - request = botocore.awsrequest.AWSRequest( - method='GET', url=url, headers=headers - ) - response = self._session.send(request.prepare()) - if not retry_func(response): - return response - except RETRYABLE_HTTP_ERRORS as e: - logger.debug( - "Caught retryable HTTP exception while making metadata " - "service request to %s: %s", - url, - e, - exc_info=True, - ) - raise self._RETRIES_EXCEEDED_ERROR_CLS() - - def _add_user_agent(self, headers): - if self._user_agent is not None: - headers['User-Agent'] = self._user_agent - - def _assert_enabled(self): - if self._disabled: - logger.debug("Access to EC2 metadata has been disabled.") - raise self._RETRIES_EXCEEDED_ERROR_CLS() - - def _assert_v1_enabled(self): - if self._imds_v1_disabled: - raise MetadataRetrievalError( - error_msg="Unable to retrieve token for use in IMDSv2 call and IMDSv1 has been disabled" - ) - - def _default_retry(self, response): - return self._is_non_ok_response(response) or self._is_empty(response) - - def _is_non_ok_response(self, response): - if response.status_code != 200: - self._log_imds_response(response, 'non-200', log_body=True) - return True - return False - - def _is_empty(self, response): - if not response.content: - self._log_imds_response(response, 'no body', log_body=True) - return True - return False - - def _log_imds_response(self, response, reason_to_log, log_body=False): - statement = ( - "Metadata service returned %s response " - "with status code of %s for url: %s" - ) - logger_args = [reason_to_log, response.status_code, response.url] - if log_body: - statement += ", content body: %s" - logger_args.append(response.content) - logger.debug(statement, *logger_args) - - -class InstanceMetadataFetcher(IMDSFetcher): - _URL_PATH = 'latest/meta-data/iam/security-credentials/' - _REQUIRED_CREDENTIAL_FIELDS = [ - 'AccessKeyId', - 'SecretAccessKey', - 'Token', - 'Expiration', - ] - - def retrieve_iam_role_credentials(self): - try: - token = self._fetch_metadata_token() - role_name = self._get_iam_role(token) - credentials = self._get_credentials(role_name, token) - if self._contains_all_credential_fields(credentials): - credentials = { - 'role_name': role_name, - 'access_key': credentials['AccessKeyId'], - 'secret_key': credentials['SecretAccessKey'], - 'token': credentials['Token'], - 'expiry_time': credentials['Expiration'], - } - self._evaluate_expiration(credentials) - return credentials - else: - # IMDS can return a 200 response that has a JSON formatted - # error message (i.e. if ec2 is not trusted entity for the - # attached role). We do not necessarily want to retry for - # these and we also do not necessarily want to raise a key - # error. So at least log the problematic response and return - # an empty dictionary to signal that it was not able to - # retrieve credentials. These error will contain both a - # Code and Message key. - if 'Code' in credentials and 'Message' in credentials: - logger.debug( - 'Error response received when retrieving' - 'credentials: %s.', - credentials, - ) - return {} - except self._RETRIES_EXCEEDED_ERROR_CLS: - logger.debug( - "Max number of attempts exceeded (%s) when " - "attempting to retrieve data from metadata service.", - self._num_attempts, - ) - except BadIMDSRequestError as e: - logger.debug("Bad IMDS request: %s", e.request) - return {} - - def _get_iam_role(self, token=None): - return self._get_request( - url_path=self._URL_PATH, - retry_func=self._needs_retry_for_role_name, - token=token, - ).text - - def _get_credentials(self, role_name, token=None): - r = self._get_request( - url_path=self._URL_PATH + role_name, - retry_func=self._needs_retry_for_credentials, - token=token, - ) - return json.loads(r.text) - - def _is_invalid_json(self, response): - try: - json.loads(response.text) - return False - except ValueError: - self._log_imds_response(response, 'invalid json') - return True - - def _needs_retry_for_role_name(self, response): - return self._is_non_ok_response(response) or self._is_empty(response) - - def _needs_retry_for_credentials(self, response): - return ( - self._is_non_ok_response(response) - or self._is_empty(response) - or self._is_invalid_json(response) - ) - - def _contains_all_credential_fields(self, credentials): - for field in self._REQUIRED_CREDENTIAL_FIELDS: - if field not in credentials: - logger.debug( - 'Retrieved credentials is missing required field: %s', - field, - ) - return False - return True - - def _evaluate_expiration(self, credentials): - expiration = credentials.get("expiry_time") - if expiration is None: - return - try: - expiration = datetime.datetime.strptime( - expiration, "%Y-%m-%dT%H:%M:%SZ" - ) - refresh_interval = self._config.get( - "ec2_credential_refresh_window", 60 * 10 - ) - jitter = random.randint(120, 600) # Between 2 to 10 minutes - refresh_interval_with_jitter = refresh_interval + jitter - current_time = get_current_datetime() - refresh_offset = datetime.timedelta( - seconds=refresh_interval_with_jitter - ) - extension_time = expiration - refresh_offset - if current_time >= extension_time: - new_time = current_time + refresh_offset - credentials["expiry_time"] = new_time.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - logger.info( - "Attempting credential expiration extension due to a " - "credential service availability issue. A refresh of " - "these credentials will be attempted again within " - "the next %.0f minutes.", - refresh_interval_with_jitter / 60, - ) - except ValueError: - logger.debug( - "Unable to parse expiry_time in %s", credentials['expiry_time'] - ) - - -class IMDSRegionProvider: - def __init__(self, session, environ=None, fetcher=None): - """Initialize IMDSRegionProvider. - :type session: :class:`botocore.session.Session` - :param session: The session is needed to look up configuration for - how to contact the instance metadata service. Specifically the - whether or not it should use the IMDS region at all, and if so how - to configure the timeout and number of attempts to reach the - service. - :type environ: None or dict - :param environ: A dictionary of environment variables to use. If - ``None`` is the argument then ``os.environ`` will be used by - default. - :type fecther: :class:`botocore.utils.InstanceMetadataRegionFetcher` - :param fetcher: The class to actually handle the fetching of the region - from the IMDS. If not provided a default one will be created. - """ - self._session = session - if environ is None: - environ = os.environ - self._environ = environ - self._fetcher = fetcher - - def provide(self): - """Provide the region value from IMDS.""" - instance_region = self._get_instance_metadata_region() - return instance_region - - def _get_instance_metadata_region(self): - fetcher = self._get_fetcher() - region = fetcher.retrieve_region() - return region - - def _get_fetcher(self): - if self._fetcher is None: - self._fetcher = self._create_fetcher() - return self._fetcher - - def _create_fetcher(self): - metadata_timeout = self._session.get_config_variable( - 'metadata_service_timeout' - ) - metadata_num_attempts = self._session.get_config_variable( - 'metadata_service_num_attempts' - ) - imds_config = { - 'ec2_metadata_service_endpoint': self._session.get_config_variable( - 'ec2_metadata_service_endpoint' - ), - 'ec2_metadata_service_endpoint_mode': resolve_imds_endpoint_mode( - self._session - ), - 'ec2_metadata_v1_disabled': self._session.get_config_variable( - 'ec2_metadata_v1_disabled' - ), - } - fetcher = InstanceMetadataRegionFetcher( - timeout=metadata_timeout, - num_attempts=metadata_num_attempts, - env=self._environ, - user_agent=self._session.user_agent(), - config=imds_config, - ) - return fetcher - - -class InstanceMetadataRegionFetcher(IMDSFetcher): - _URL_PATH = 'latest/meta-data/placement/availability-zone/' - - def retrieve_region(self): - """Get the current region from the instance metadata service. - :rvalue: str - :returns: The region the current instance is running in or None - if the instance metadata service cannot be contacted or does not - give a valid response. - :rtype: None or str - :returns: Returns the region as a string if it is configured to use - IMDS as a region source. Otherwise returns ``None``. It will also - return ``None`` if it fails to get the region from IMDS due to - exhausting its retries or not being able to connect. - """ - try: - region = self._get_region() - return region - except self._RETRIES_EXCEEDED_ERROR_CLS: - logger.debug( - "Max number of attempts exceeded (%s) when " - "attempting to retrieve data from metadata service.", - self._num_attempts, - ) - return None - - def _get_region(self): - token = self._fetch_metadata_token() - response = self._get_request( - url_path=self._URL_PATH, - retry_func=self._default_retry, - token=token, - ) - availability_zone = response.text - region = availability_zone[:-1] - return region - - -def merge_dicts(dict1, dict2, append_lists=False): - """Given two dict, merge the second dict into the first. - - The dicts can have arbitrary nesting. - - :param append_lists: If true, instead of clobbering a list with the new - value, append all of the new values onto the original list. - """ - for key in dict2: - if isinstance(dict2[key], dict): - if key in dict1 and key in dict2: - merge_dicts(dict1[key], dict2[key]) - else: - dict1[key] = dict2[key] - # If the value is a list and the ``append_lists`` flag is set, - # append the new values onto the original list - elif isinstance(dict2[key], list) and append_lists: - # The value in dict1 must be a list in order to append new - # values onto it. - if key in dict1 and isinstance(dict1[key], list): - dict1[key].extend(dict2[key]) - else: - dict1[key] = dict2[key] - else: - # At scalar types, we iterate and merge the - # current dict that we're on. - dict1[key] = dict2[key] - - -def lowercase_dict(original): - """Copies the given dictionary ensuring all keys are lowercase strings.""" - copy = {} - for key in original: - copy[key.lower()] = original[key] - return copy - - -def parse_key_val_file(filename, _open=open): - try: - with _open(filename) as f: - contents = f.read() - return parse_key_val_file_contents(contents) - except OSError: - raise ConfigNotFound(path=filename) - - -def parse_key_val_file_contents(contents): - # This was originally extracted from the EC2 credential provider, which was - # fairly lenient in its parsing. We only try to parse key/val pairs if - # there's a '=' in the line. - final = {} - for line in contents.splitlines(): - if '=' not in line: - continue - key, val = line.split('=', 1) - key = key.strip() - val = val.strip() - final[key] = val - return final - - -def percent_encode_sequence(mapping, safe=SAFE_CHARS): - """Urlencode a dict or list into a string. - - This is similar to urllib.urlencode except that: - - * It uses quote, and not quote_plus - * It has a default list of safe chars that don't need - to be encoded, which matches what AWS services expect. - - If any value in the input ``mapping`` is a list type, - then each list element wil be serialized. This is the equivalent - to ``urlencode``'s ``doseq=True`` argument. - - This function should be preferred over the stdlib - ``urlencode()`` function. - - :param mapping: Either a dict to urlencode or a list of - ``(key, value)`` pairs. - - """ - encoded_pairs = [] - if hasattr(mapping, 'items'): - pairs = mapping.items() - else: - pairs = mapping - for key, value in pairs: - if isinstance(value, list): - for element in value: - encoded_pairs.append( - f'{percent_encode(key)}={percent_encode(element)}' - ) - else: - encoded_pairs.append( - f'{percent_encode(key)}={percent_encode(value)}' - ) - return '&'.join(encoded_pairs) - - -def percent_encode(input_str, safe=SAFE_CHARS): - """Urlencodes a string. - - Whereas percent_encode_sequence handles taking a dict/sequence and - producing a percent encoded string, this function deals only with - taking a string (not a dict/sequence) and percent encoding it. - - If given the binary type, will simply URL encode it. If given the - text type, will produce the binary type by UTF-8 encoding the - text. If given something else, will convert it to the text type - first. - """ - # If its not a binary or text string, make it a text string. - if not isinstance(input_str, (bytes, str)): - input_str = str(input_str) - # If it's not bytes, make it bytes by UTF-8 encoding it. - if not isinstance(input_str, bytes): - input_str = input_str.encode('utf-8') - return quote(input_str, safe=safe) - - -def _epoch_seconds_to_datetime(value, tzinfo): - """Parse numerical epoch timestamps (seconds since 1970) into a - ``datetime.datetime`` in UTC using ``datetime.timedelta``. This is intended - as fallback when ``fromtimestamp`` raises ``OverflowError`` or ``OSError``. - - :type value: float or int - :param value: The Unix timestamps as number. - - :type tzinfo: callable - :param tzinfo: A ``datetime.tzinfo`` class or compatible callable. - """ - epoch_zero = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc()) - epoch_zero_localized = epoch_zero.astimezone(tzinfo()) - return epoch_zero_localized + datetime.timedelta(seconds=value) - - -def _parse_timestamp_with_tzinfo(value, tzinfo): - """Parse timestamp with pluggable tzinfo options.""" - if isinstance(value, (int, float)): - # Possibly an epoch time. - return datetime.datetime.fromtimestamp(value, tzinfo()) - else: - try: - return datetime.datetime.fromtimestamp(float(value), tzinfo()) - except (TypeError, ValueError): - pass - try: - # In certain cases, a timestamp marked with GMT can be parsed into a - # different time zone, so here we provide a context which will - # enforce that GMT == UTC. - return dateutil.parser.parse(value, tzinfos={'GMT': tzutc()}) - except (TypeError, ValueError) as e: - raise ValueError(f'Invalid timestamp "{value}": {e}') - - -def parse_timestamp(value): - """Parse a timestamp into a datetime object. - - Supported formats: - - * iso8601 - * rfc822 - * epoch (value is an integer) - - This will return a ``datetime.datetime`` object. - - """ - tzinfo_options = get_tzinfo_options() - for tzinfo in tzinfo_options: - try: - return _parse_timestamp_with_tzinfo(value, tzinfo) - except (OSError, OverflowError) as e: - logger.debug( - 'Unable to parse timestamp with "%s" timezone info.', - tzinfo.__name__, - exc_info=e, - ) - # For numeric values attempt fallback to using fromtimestamp-free method. - # From Python's ``datetime.datetime.fromtimestamp`` documentation: "This - # may raise ``OverflowError``, if the timestamp is out of the range of - # values supported by the platform C localtime() function, and ``OSError`` - # on localtime() failure. It's common for this to be restricted to years - # from 1970 through 2038." - try: - numeric_value = float(value) - except (TypeError, ValueError): - pass - else: - try: - for tzinfo in tzinfo_options: - return _epoch_seconds_to_datetime(numeric_value, tzinfo=tzinfo) - except (OSError, OverflowError) as e: - logger.debug( - 'Unable to parse timestamp using fallback method with "%s" ' - 'timezone info.', - tzinfo.__name__, - exc_info=e, - ) - raise RuntimeError( - f'Unable to calculate correct timezone offset for "{value}"' - ) - - -def parse_to_aware_datetime(value): - """Converted the passed in value to a datetime object with tzinfo. - - This function can be used to normalize all timestamp inputs. This - function accepts a number of different types of inputs, but - will always return a datetime.datetime object with time zone - information. - - The input param ``value`` can be one of several types: - - * A datetime object (both naive and aware) - * An integer representing the epoch time (can also be a string - of the integer, i.e '0', instead of 0). The epoch time is - considered to be UTC. - * An iso8601 formatted timestamp. This does not need to be - a complete timestamp, it can contain just the date portion - without the time component. - - The returned value will be a datetime object that will have tzinfo. - If no timezone info was provided in the input value, then UTC is - assumed, not local time. - - """ - # This is a general purpose method that handles several cases of - # converting the provided value to a string timestamp suitable to be - # serialized to an http request. It can handle: - # 1) A datetime.datetime object. - if isinstance(value, _DatetimeClass): - datetime_obj = value - else: - # 2) A string object that's formatted as a timestamp. - # We document this as being an iso8601 timestamp, although - # parse_timestamp is a bit more flexible. - datetime_obj = parse_timestamp(value) - if datetime_obj.tzinfo is None: - # I think a case would be made that if no time zone is provided, - # we should use the local time. However, to restore backwards - # compat, the previous behavior was to assume UTC, which is - # what we're going to do here. - datetime_obj = datetime_obj.replace(tzinfo=tzutc()) - else: - datetime_obj = datetime_obj.astimezone(tzutc()) - return datetime_obj - - -def datetime2timestamp(dt, default_timezone=None): - """Calculate the timestamp based on the given datetime instance. - - :type dt: datetime - :param dt: A datetime object to be converted into timestamp - :type default_timezone: tzinfo - :param default_timezone: If it is provided as None, we treat it as tzutc(). - But it is only used when dt is a naive datetime. - :returns: The timestamp - """ - epoch = datetime.datetime(1970, 1, 1) - if dt.tzinfo is None: - if default_timezone is None: - default_timezone = tzutc() - dt = dt.replace(tzinfo=default_timezone) - d = dt.replace(tzinfo=None) - dt.utcoffset() - epoch - return d.total_seconds() - - -def calculate_sha256(body, as_hex=False): - """Calculate a sha256 checksum. - - This method will calculate the sha256 checksum of a file like - object. Note that this method will iterate through the entire - file contents. The caller is responsible for ensuring the proper - starting position of the file and ``seek()``'ing the file back - to its starting location if other consumers need to read from - the file like object. - - :param body: Any file like object. The file must be opened - in binary mode such that a ``.read()`` call returns bytes. - :param as_hex: If True, then the hex digest is returned. - If False, then the digest (as binary bytes) is returned. - - :returns: The sha256 checksum - - """ - checksum = hashlib.sha256() - for chunk in iter(lambda: body.read(1024 * 1024), b''): - checksum.update(chunk) - if as_hex: - return checksum.hexdigest() - else: - return checksum.digest() - - -def calculate_tree_hash(body): - """Calculate a tree hash checksum. - - For more information see: - - http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html - - :param body: Any file like object. This has the same constraints as - the ``body`` param in calculate_sha256 - - :rtype: str - :returns: The hex version of the calculated tree hash - - """ - chunks = [] - required_chunk_size = 1024 * 1024 - sha256 = hashlib.sha256 - for chunk in iter(lambda: body.read(required_chunk_size), b''): - chunks.append(sha256(chunk).digest()) - if not chunks: - return sha256(b'').hexdigest() - while len(chunks) > 1: - new_chunks = [] - for first, second in _in_pairs(chunks): - if second is not None: - new_chunks.append(sha256(first + second).digest()) - else: - # We're at the end of the list and there's no pair left. - new_chunks.append(first) - chunks = new_chunks - return binascii.hexlify(chunks[0]).decode('ascii') - - -def _in_pairs(iterable): - # Creates iterator that iterates over the list in pairs: - # for a, b in _in_pairs([0, 1, 2, 3, 4]): - # print(a, b) - # - # will print: - # 0, 1 - # 2, 3 - # 4, None - shared_iter = iter(iterable) - # Note that zip_longest is a compat import that uses - # the itertools izip_longest. This creates an iterator, - # this call below does _not_ immediately create the list - # of pairs. - return zip_longest(shared_iter, shared_iter) - - -class CachedProperty: - """A read only property that caches the initially computed value. - - This descriptor will only call the provided ``fget`` function once. - Subsequent access to this property will return the cached value. - - """ - - def __init__(self, fget): - self._fget = fget - - def __get__(self, obj, cls): - if obj is None: - return self - else: - computed_value = self._fget(obj) - obj.__dict__[self._fget.__name__] = computed_value - return computed_value - - -class ArgumentGenerator: - """Generate sample input based on a shape model. - - This class contains a ``generate_skeleton`` method that will take - an input/output shape (created from ``botocore.model``) and generate - a sample dictionary corresponding to the input/output shape. - - The specific values used are place holder values. For strings either an - empty string or the member name can be used, for numbers 0 or 0.0 is used. - The intended usage of this class is to generate the *shape* of the input - structure. - - This can be useful for operations that have complex input shapes. - This allows a user to just fill in the necessary data instead of - worrying about the specific structure of the input arguments. - - Example usage:: - - s = botocore.session.get_session() - ddb = s.get_service_model('dynamodb') - arg_gen = ArgumentGenerator() - sample_input = arg_gen.generate_skeleton( - ddb.operation_model('CreateTable').input_shape) - print("Sample input for dynamodb.CreateTable: %s" % sample_input) - - """ - - def __init__(self, use_member_names=False): - self._use_member_names = use_member_names - - def generate_skeleton(self, shape): - """Generate a sample input. - - :type shape: ``botocore.model.Shape`` - :param shape: The input shape. - - :return: The generated skeleton input corresponding to the - provided input shape. - - """ - stack = [] - return self._generate_skeleton(shape, stack) - - def _generate_skeleton(self, shape, stack, name=''): - stack.append(shape.name) - try: - if shape.type_name == 'structure': - return self._generate_type_structure(shape, stack) - elif shape.type_name == 'list': - return self._generate_type_list(shape, stack) - elif shape.type_name == 'map': - return self._generate_type_map(shape, stack) - elif shape.type_name == 'string': - if self._use_member_names: - return name - if shape.enum: - return random.choice(shape.enum) - return '' - elif shape.type_name in ['integer', 'long']: - return 0 - elif shape.type_name in ['float', 'double']: - return 0.0 - elif shape.type_name == 'boolean': - return True - elif shape.type_name == 'timestamp': - return datetime.datetime(1970, 1, 1, 0, 0, 0) - finally: - stack.pop() - - def _generate_type_structure(self, shape, stack): - if stack.count(shape.name) > 1: - return {} - skeleton = OrderedDict() - for member_name, member_shape in shape.members.items(): - skeleton[member_name] = self._generate_skeleton( - member_shape, stack, name=member_name - ) - return skeleton - - def _generate_type_list(self, shape, stack): - # For list elements we've arbitrarily decided to - # return two elements for the skeleton list. - name = '' - if self._use_member_names: - name = shape.member.name - return [ - self._generate_skeleton(shape.member, stack, name), - ] - - def _generate_type_map(self, shape, stack): - key_shape = shape.key - value_shape = shape.value - assert key_shape.type_name == 'string' - return OrderedDict( - [ - ('KeyName', self._generate_skeleton(value_shape, stack)), - ] - ) - - -def is_valid_ipv6_endpoint_url(endpoint_url): - if UNSAFE_URL_CHARS.intersection(endpoint_url): - return False - hostname = f'[{urlparse(endpoint_url).hostname}]' - return IPV6_ADDRZ_RE.match(hostname) is not None - - -def is_valid_ipv4_endpoint_url(endpoint_url): - hostname = urlparse(endpoint_url).hostname - return IPV4_RE.match(hostname) is not None - - -def is_valid_endpoint_url(endpoint_url): - """Verify the endpoint_url is valid. - - :type endpoint_url: string - :param endpoint_url: An endpoint_url. Must have at least a scheme - and a hostname. - - :return: True if the endpoint url is valid. False otherwise. - - """ - # post-bpo-43882 urlsplit() strips unsafe characters from URL, causing - # it to pass hostname validation below. Detect them early to fix that. - if UNSAFE_URL_CHARS.intersection(endpoint_url): - return False - parts = urlsplit(endpoint_url) - hostname = parts.hostname - if hostname is None: - return False - if len(hostname) > 255: - return False - if hostname[-1] == ".": - hostname = hostname[:-1] - allowed = re.compile( - r"^((?!-)[A-Z\d-]{1,63}(? 63: - # Wrong length - return False - match = LABEL_RE.match(bucket_name) - if match is None or match.end() != len(bucket_name): - return False - return True - - -def fix_s3_host( - request, - signature_version, - region_name, - default_endpoint_url=None, - **kwargs, -): - """ - This handler looks at S3 requests just before they are signed. - If there is a bucket name on the path (true for everything except - ListAllBuckets) it checks to see if that bucket name conforms to - the DNS naming conventions. If it does, it alters the request to - use ``virtual hosting`` style addressing rather than ``path-style`` - addressing. - - """ - if request.context.get('use_global_endpoint', False): - default_endpoint_url = 's3.amazonaws.com' - try: - switch_to_virtual_host_style( - request, signature_version, default_endpoint_url - ) - except InvalidDNSNameError as e: - bucket_name = e.kwargs['bucket_name'] - logger.debug( - 'Not changing URI, bucket is not DNS compatible: %s', bucket_name - ) - - -def switch_to_virtual_host_style( - request, signature_version, default_endpoint_url=None, **kwargs -): - """ - This is a handler to force virtual host style s3 addressing no matter - the signature version (which is taken in consideration for the default - case). If the bucket is not DNS compatible an InvalidDNSName is thrown. - - :param request: A AWSRequest object that is about to be sent. - :param signature_version: The signature version to sign with - :param default_endpoint_url: The endpoint to use when switching to a - virtual style. If None is supplied, the virtual host will be - constructed from the url of the request. - """ - if request.auth_path is not None: - # The auth_path has already been applied (this may be a - # retried request). We don't need to perform this - # customization again. - return - elif _is_get_bucket_location_request(request): - # For the GetBucketLocation response, we should not be using - # the virtual host style addressing so we can avoid any sigv4 - # issues. - logger.debug( - "Request is GetBucketLocation operation, not checking " - "for DNS compatibility." - ) - return - parts = urlsplit(request.url) - request.auth_path = parts.path - path_parts = parts.path.split('/') - - # Retrieve what the endpoint we will be prepending the bucket name to. - if default_endpoint_url is None: - default_endpoint_url = parts.netloc - - if len(path_parts) > 1: - bucket_name = path_parts[1] - if not bucket_name: - # If the bucket name is empty we should not be checking for - # dns compatibility. - return - logger.debug('Checking for DNS compatible bucket for: %s', request.url) - if check_dns_name(bucket_name): - # If the operation is on a bucket, the auth_path must be - # terminated with a '/' character. - if len(path_parts) == 2: - if request.auth_path[-1] != '/': - request.auth_path += '/' - path_parts.remove(bucket_name) - # At the very least the path must be a '/', such as with the - # CreateBucket operation when DNS style is being used. If this - # is not used you will get an empty path which is incorrect. - path = '/'.join(path_parts) or '/' - global_endpoint = default_endpoint_url - host = bucket_name + '.' + global_endpoint - new_tuple = (parts.scheme, host, path, parts.query, '') - new_uri = urlunsplit(new_tuple) - request.url = new_uri - logger.debug('URI updated to: %s', new_uri) - else: - raise InvalidDNSNameError(bucket_name=bucket_name) - - -def _is_get_bucket_location_request(request): - return request.url.endswith('?location') - - -def instance_cache(func): - """Method decorator for caching method calls to a single instance. - - **This is not a general purpose caching decorator.** - - In order to use this, you *must* provide an ``_instance_cache`` - attribute on the instance. - - This decorator is used to cache method calls. The cache is only - scoped to a single instance though such that multiple instances - will maintain their own cache. In order to keep things simple, - this decorator requires that you provide an ``_instance_cache`` - attribute on your instance. - - """ - func_name = func.__name__ - - @functools.wraps(func) - def _cache_guard(self, *args, **kwargs): - cache_key = (func_name, args) - if kwargs: - kwarg_items = tuple(sorted(kwargs.items())) - cache_key = (func_name, args, kwarg_items) - result = self._instance_cache.get(cache_key) - if result is not None: - return result - result = func(self, *args, **kwargs) - self._instance_cache[cache_key] = result - return result - - return _cache_guard - - -def lru_cache_weakref(*cache_args, **cache_kwargs): - """ - Version of functools.lru_cache that stores a weak reference to ``self``. - - Serves the same purpose as :py:func:`instance_cache` but uses Python's - functools implementation which offers ``max_size`` and ``typed`` properties. - - lru_cache is a global cache even when used on a method. The cache's - reference to ``self`` will prevent garbage collection of the object. This - wrapper around functools.lru_cache replaces the reference to ``self`` with - a weak reference to not interfere with garbage collection. - """ - - def wrapper(func): - @functools.lru_cache(*cache_args, **cache_kwargs) - def func_with_weakref(weakref_to_self, *args, **kwargs): - return func(weakref_to_self(), *args, **kwargs) - - @functools.wraps(func) - def inner(self, *args, **kwargs): - for kwarg_key, kwarg_value in kwargs.items(): - if isinstance(kwarg_value, list): - kwargs[kwarg_key] = tuple(kwarg_value) - return func_with_weakref(weakref.ref(self), *args, **kwargs) - - inner.cache_info = func_with_weakref.cache_info - return inner - - return wrapper - - -def switch_host_s3_accelerate(request, operation_name, **kwargs): - """Switches the current s3 endpoint with an S3 Accelerate endpoint""" - - # Note that when registered the switching of the s3 host happens - # before it gets changed to virtual. So we are not concerned with ensuring - # that the bucket name is translated to the virtual style here and we - # can hard code the Accelerate endpoint. - parts = urlsplit(request.url).netloc.split('.') - parts = [p for p in parts if p in S3_ACCELERATE_WHITELIST] - endpoint = 'https://s3-accelerate.' - if len(parts) > 0: - endpoint += '.'.join(parts) + '.' - endpoint += 'amazonaws.com' - - if operation_name in ['ListBuckets', 'CreateBucket', 'DeleteBucket']: - return - _switch_hosts(request, endpoint, use_new_scheme=False) - - -def switch_host_with_param(request, param_name): - """Switches the host using a parameter value from a JSON request body""" - request_json = json.loads(request.data.decode('utf-8')) - if request_json.get(param_name): - new_endpoint = request_json[param_name] - _switch_hosts(request, new_endpoint) - - -def _switch_hosts(request, new_endpoint, use_new_scheme=True): - final_endpoint = _get_new_endpoint( - request.url, new_endpoint, use_new_scheme - ) - request.url = final_endpoint - - -def _get_new_endpoint(original_endpoint, new_endpoint, use_new_scheme=True): - new_endpoint_components = urlsplit(new_endpoint) - original_endpoint_components = urlsplit(original_endpoint) - scheme = original_endpoint_components.scheme - if use_new_scheme: - scheme = new_endpoint_components.scheme - final_endpoint_components = ( - scheme, - new_endpoint_components.netloc, - original_endpoint_components.path, - original_endpoint_components.query, - '', - ) - final_endpoint = urlunsplit(final_endpoint_components) - logger.debug( - 'Updating URI from %s to %s', original_endpoint, final_endpoint - ) - return final_endpoint - - -def deep_merge(base, extra): - """Deeply two dictionaries, overriding existing keys in the base. - - :param base: The base dictionary which will be merged into. - :param extra: The dictionary to merge into the base. Keys from this - dictionary will take precedence. - """ - for key in extra: - # If the key represents a dict on both given dicts, merge the sub-dicts - if ( - key in base - and isinstance(base[key], dict) - and isinstance(extra[key], dict) - ): - deep_merge(base[key], extra[key]) - continue - - # Otherwise, set the key on the base to be the value of the extra. - base[key] = extra[key] - - -def hyphenize_service_id(service_id): - """Translate the form used for event emitters. - - :param service_id: The service_id to convert. - """ - return service_id.replace(' ', '-').lower() - - -class IdentityCache: - """Base IdentityCache implementation for storing and retrieving - highly accessed credentials. - - This class is not intended to be instantiated in user code. - """ - - METHOD = "base_identity_cache" - - def __init__(self, client, credential_cls): - self._client = client - self._credential_cls = credential_cls - - def get_credentials(self, **kwargs): - callback = self.build_refresh_callback(**kwargs) - metadata = callback() - credential_entry = self._credential_cls.create_from_metadata( - metadata=metadata, - refresh_using=callback, - method=self.METHOD, - advisory_timeout=45, - mandatory_timeout=10, - ) - return credential_entry - - def build_refresh_callback(**kwargs): - """Callback to be implemented by subclasses. - - Returns a set of metadata to be converted into a new - credential instance. - """ - raise NotImplementedError() - - -class S3ExpressIdentityCache(IdentityCache): - """S3Express IdentityCache for retrieving and storing - credentials from CreateSession calls. - - This class is not intended to be instantiated in user code. - """ - - METHOD = "s3express" - - def __init__(self, client, credential_cls): - self._client = client - self._credential_cls = credential_cls - - @functools.lru_cache(maxsize=100) - def get_credentials(self, bucket): - return super().get_credentials(bucket=bucket) - - def build_refresh_callback(self, bucket): - def refresher(): - response = self._client.create_session(Bucket=bucket) - creds = response['Credentials'] - expiration = self._serialize_if_needed( - creds['Expiration'], iso=True - ) - return { - "access_key": creds['AccessKeyId'], - "secret_key": creds['SecretAccessKey'], - "token": creds['SessionToken'], - "expiry_time": expiration, - } - - return refresher - - def _serialize_if_needed(self, value, iso=False): - if isinstance(value, _DatetimeClass): - if iso: - return value.isoformat() - return value.strftime('%Y-%m-%dT%H:%M:%S%Z') - return value - - -class S3ExpressIdentityResolver: - def __init__(self, client, credential_cls, cache=None): - self._client = weakref.proxy(client) - - if cache is None: - cache = S3ExpressIdentityCache(self._client, credential_cls) - self._cache = cache - - def register(self, event_emitter=None): - logger.debug('Registering S3Express Identity Resolver') - emitter = event_emitter or self._client.meta.events - emitter.register('before-call.s3', self.apply_signing_cache_key) - emitter.register('before-sign.s3', self.resolve_s3express_identity) - - def apply_signing_cache_key(self, params, context, **kwargs): - endpoint_properties = context.get('endpoint_properties', {}) - backend = endpoint_properties.get('backend', None) - - # Add cache key if Bucket supplied for s3express request - bucket_name = context.get('input_params', {}).get('Bucket') - if backend == 'S3Express' and bucket_name is not None: - context.setdefault('signing', {}) - context['signing']['cache_key'] = bucket_name - - def resolve_s3express_identity( - self, - request, - signing_name, - region_name, - signature_version, - request_signer, - operation_name, - **kwargs, - ): - signing_context = request.context.get('signing', {}) - signing_name = signing_context.get('signing_name') - if signing_name == 's3express' and signature_version.startswith( - 'v4-s3express' - ): - signing_context['identity_cache'] = self._cache - if 'cache_key' not in signing_context: - signing_context['cache_key'] = ( - request.context.get('s3_redirect', {}) - .get('params', {}) - .get('Bucket') - ) - - -class S3RegionRedirectorv2: - """Updated version of S3RegionRedirector for use when - EndpointRulesetResolver is in use for endpoint resolution. - - This class is considered private and subject to abrupt breaking changes or - removal without prior announcement. Please do not use it directly. - """ - - def __init__(self, endpoint_bridge, client, cache=None): - self._cache = cache or {} - self._client = weakref.proxy(client) - - def register(self, event_emitter=None): - logger.debug('Registering S3 region redirector handler') - emitter = event_emitter or self._client.meta.events - emitter.register('needs-retry.s3', self.redirect_from_error) - emitter.register( - 'before-parameter-build.s3', self.annotate_request_context - ) - emitter.register( - 'before-endpoint-resolution.s3', self.redirect_from_cache - ) - - def redirect_from_error(self, request_dict, response, operation, **kwargs): - """ - An S3 request sent to the wrong region will return an error that - contains the endpoint the request should be sent to. This handler - will add the redirect information to the signing context and then - redirect the request. - """ - if response is None: - # This could be none if there was a ConnectionError or other - # transport error. - return - - redirect_ctx = request_dict.get('context', {}).get('s3_redirect', {}) - if ArnParser.is_arn(redirect_ctx.get('bucket')): - logger.debug( - 'S3 request was previously for an Accesspoint ARN, not ' - 'redirecting.' - ) - return - - if redirect_ctx.get('redirected'): - logger.debug( - 'S3 request was previously redirected, not redirecting.' - ) - return - - error = response[1].get('Error', {}) - error_code = error.get('Code') - response_metadata = response[1].get('ResponseMetadata', {}) - - # We have to account for 400 responses because - # if we sign a Head* request with the wrong region, - # we'll get a 400 Bad Request but we won't get a - # body saying it's an "AuthorizationHeaderMalformed". - is_special_head_object = ( - error_code in ('301', '400') and operation.name == 'HeadObject' - ) - is_special_head_bucket = ( - error_code in ('301', '400') - and operation.name == 'HeadBucket' - and 'x-amz-bucket-region' - in response_metadata.get('HTTPHeaders', {}) - ) - is_wrong_signing_region = ( - error_code == 'AuthorizationHeaderMalformed' and 'Region' in error - ) - is_redirect_status = response[0] is not None and response[ - 0 - ].status_code in (301, 302, 307) - is_permanent_redirect = error_code == 'PermanentRedirect' - is_opt_in_region_redirect = ( - error_code == 'IllegalLocationConstraintException' - and operation.name != 'CreateBucket' - ) - if not any( - [ - is_special_head_object, - is_wrong_signing_region, - is_permanent_redirect, - is_special_head_bucket, - is_redirect_status, - is_opt_in_region_redirect, - ] - ): - return - - bucket = request_dict['context']['s3_redirect']['bucket'] - client_region = request_dict['context'].get('client_region') - new_region = self.get_bucket_region(bucket, response) - - if new_region is None: - logger.debug( - "S3 client configured for region %s but the " - "bucket %s is not in that region and the proper region " - "could not be automatically determined.", - client_region, - bucket, - ) - return - - logger.debug( - "S3 client configured for region %s but the bucket %s " - "is in region %s; Please configure the proper region to " - "avoid multiple unnecessary redirects and signing attempts.", - client_region, - bucket, - new_region, - ) - # Adding the new region to _cache will make construct_endpoint() to - # use the new region as value for the AWS::Region builtin parameter. - self._cache[bucket] = new_region - - # Re-resolve endpoint with new region and modify request_dict with - # the new URL, auth scheme, and signing context. - ep_resolver = self._client._ruleset_resolver - ep_info = ep_resolver.construct_endpoint( - operation_model=operation, - call_args=request_dict['context']['s3_redirect']['params'], - request_context=request_dict['context'], - ) - request_dict['url'] = self.set_request_url( - request_dict['url'], ep_info.url - ) - request_dict['context']['s3_redirect']['redirected'] = True - auth_schemes = ep_info.properties.get('authSchemes') - if auth_schemes is not None: - auth_info = ep_resolver.auth_schemes_to_signing_ctx(auth_schemes) - auth_type, signing_context = auth_info - request_dict['context']['auth_type'] = auth_type - request_dict['context']['signing'] = { - **request_dict['context'].get('signing', {}), - **signing_context, - } - - # Return 0 so it doesn't wait to retry - return 0 - - def get_bucket_region(self, bucket, response): - """ - There are multiple potential sources for the new region to redirect to, - but they aren't all universally available for use. This will try to - find region from response elements, but will fall back to calling - HEAD on the bucket if all else fails. - - :param bucket: The bucket to find the region for. This is necessary if - the region is not available in the error response. - :param response: A response representing a service request that failed - due to incorrect region configuration. - """ - # First try to source the region from the headers. - service_response = response[1] - response_headers = service_response['ResponseMetadata']['HTTPHeaders'] - if 'x-amz-bucket-region' in response_headers: - return response_headers['x-amz-bucket-region'] - - # Next, check the error body - region = service_response.get('Error', {}).get('Region', None) - if region is not None: - return region - - # Finally, HEAD the bucket. No other choice sadly. - try: - response = self._client.head_bucket(Bucket=bucket) - headers = response['ResponseMetadata']['HTTPHeaders'] - except ClientError as e: - headers = e.response['ResponseMetadata']['HTTPHeaders'] - - region = headers.get('x-amz-bucket-region', None) - return region - - def set_request_url(self, old_url, new_endpoint, **kwargs): - """ - Splice a new endpoint into an existing URL. Note that some endpoints - from the the endpoint provider have a path component which will be - discarded by this function. - """ - return _get_new_endpoint(old_url, new_endpoint, False) - - def redirect_from_cache(self, builtins, params, **kwargs): - """ - If a bucket name has been redirected before, it is in the cache. This - handler will update the AWS::Region endpoint resolver builtin param - to use the region from cache instead of the client region to avoid the - redirect. - """ - bucket = params.get('Bucket') - if bucket is not None and bucket in self._cache: - new_region = self._cache.get(bucket) - builtins['AWS::Region'] = new_region - - def annotate_request_context(self, params, context, **kwargs): - """Store the bucket name in context for later use when redirecting. - The bucket name may be an access point ARN or alias. - """ - bucket = params.get('Bucket') - context['s3_redirect'] = { - 'redirected': False, - 'bucket': bucket, - 'params': params, - } - - -class S3RegionRedirector: - """This handler has been replaced by S3RegionRedirectorv2. The original - version remains in place for any third-party libraries that import it. - """ - - def __init__(self, endpoint_bridge, client, cache=None): - self._endpoint_resolver = endpoint_bridge - self._cache = cache - if self._cache is None: - self._cache = {} - - # This needs to be a weak ref in order to prevent memory leaks on - # python 2.6 - self._client = weakref.proxy(client) - - warnings.warn( - 'The S3RegionRedirector class has been deprecated for a new ' - 'internal replacement. A future version of botocore may remove ' - 'this class.', - category=FutureWarning, - ) - - def register(self, event_emitter=None): - emitter = event_emitter or self._client.meta.events - emitter.register('needs-retry.s3', self.redirect_from_error) - emitter.register('before-call.s3', self.set_request_url) - emitter.register('before-parameter-build.s3', self.redirect_from_cache) - - def redirect_from_error(self, request_dict, response, operation, **kwargs): - """ - An S3 request sent to the wrong region will return an error that - contains the endpoint the request should be sent to. This handler - will add the redirect information to the signing context and then - redirect the request. - """ - if response is None: - # This could be none if there was a ConnectionError or other - # transport error. - return - - if self._is_s3_accesspoint(request_dict.get('context', {})): - logger.debug( - 'S3 request was previously to an accesspoint, not redirecting.' - ) - return - - if request_dict.get('context', {}).get('s3_redirected'): - logger.debug( - 'S3 request was previously redirected, not redirecting.' - ) - return - - error = response[1].get('Error', {}) - error_code = error.get('Code') - response_metadata = response[1].get('ResponseMetadata', {}) - - # We have to account for 400 responses because - # if we sign a Head* request with the wrong region, - # we'll get a 400 Bad Request but we won't get a - # body saying it's an "AuthorizationHeaderMalformed". - is_special_head_object = ( - error_code in ('301', '400') and operation.name == 'HeadObject' - ) - is_special_head_bucket = ( - error_code in ('301', '400') - and operation.name == 'HeadBucket' - and 'x-amz-bucket-region' - in response_metadata.get('HTTPHeaders', {}) - ) - is_wrong_signing_region = ( - error_code == 'AuthorizationHeaderMalformed' and 'Region' in error - ) - is_redirect_status = response[0] is not None and response[ - 0 - ].status_code in (301, 302, 307) - is_permanent_redirect = error_code == 'PermanentRedirect' - if not any( - [ - is_special_head_object, - is_wrong_signing_region, - is_permanent_redirect, - is_special_head_bucket, - is_redirect_status, - ] - ): - return - - bucket = request_dict['context']['signing']['bucket'] - client_region = request_dict['context'].get('client_region') - new_region = self.get_bucket_region(bucket, response) - - if new_region is None: - logger.debug( - "S3 client configured for region %s but the bucket %s is not " - "in that region and the proper region could not be " - "automatically determined.", - client_region, - bucket, - ) - return - - logger.debug( - "S3 client configured for region %s but the bucket %s is in region" - " %s; Please configure the proper region to avoid multiple " - "unnecessary redirects and signing attempts.", - client_region, - bucket, - new_region, - ) - endpoint = self._endpoint_resolver.resolve('s3', new_region) - endpoint = endpoint['endpoint_url'] - - signing_context = { - 'region': new_region, - 'bucket': bucket, - 'endpoint': endpoint, - } - request_dict['context']['signing'] = signing_context - - self._cache[bucket] = signing_context - self.set_request_url(request_dict, request_dict['context']) - - request_dict['context']['s3_redirected'] = True - - # Return 0 so it doesn't wait to retry - return 0 - - def get_bucket_region(self, bucket, response): - """ - There are multiple potential sources for the new region to redirect to, - but they aren't all universally available for use. This will try to - find region from response elements, but will fall back to calling - HEAD on the bucket if all else fails. - - :param bucket: The bucket to find the region for. This is necessary if - the region is not available in the error response. - :param response: A response representing a service request that failed - due to incorrect region configuration. - """ - # First try to source the region from the headers. - service_response = response[1] - response_headers = service_response['ResponseMetadata']['HTTPHeaders'] - if 'x-amz-bucket-region' in response_headers: - return response_headers['x-amz-bucket-region'] - - # Next, check the error body - region = service_response.get('Error', {}).get('Region', None) - if region is not None: - return region - - # Finally, HEAD the bucket. No other choice sadly. - try: - response = self._client.head_bucket(Bucket=bucket) - headers = response['ResponseMetadata']['HTTPHeaders'] - except ClientError as e: - headers = e.response['ResponseMetadata']['HTTPHeaders'] - - region = headers.get('x-amz-bucket-region', None) - return region - - def set_request_url(self, params, context, **kwargs): - endpoint = context.get('signing', {}).get('endpoint', None) - if endpoint is not None: - params['url'] = _get_new_endpoint(params['url'], endpoint, False) - - def redirect_from_cache(self, params, context, **kwargs): - """ - This handler retrieves a given bucket's signing context from the cache - and adds it into the request context. - """ - if self._is_s3_accesspoint(context): - return - bucket = params.get('Bucket') - signing_context = self._cache.get(bucket) - if signing_context is not None: - context['signing'] = signing_context - else: - context['signing'] = {'bucket': bucket} - - def _is_s3_accesspoint(self, context): - return 's3_accesspoint' in context - - -class InvalidArnException(ValueError): - pass - - -class ArnParser: - def parse_arn(self, arn): - arn_parts = arn.split(':', 5) - if len(arn_parts) < 6: - raise InvalidArnException( - f'Provided ARN: {arn} must be of the format: ' - 'arn:partition:service:region:account:resource' - ) - return { - 'partition': arn_parts[1], - 'service': arn_parts[2], - 'region': arn_parts[3], - 'account': arn_parts[4], - 'resource': arn_parts[5], - } - - @staticmethod - def is_arn(value): - if not isinstance(value, str) or not value.startswith('arn:'): - return False - arn_parser = ArnParser() - try: - arn_parser.parse_arn(value) - return True - except InvalidArnException: - return False - - -class S3ArnParamHandler: - _RESOURCE_REGEX = re.compile( - r'^(?Paccesspoint|outpost)[/:](?P.+)$' - ) - _OUTPOST_RESOURCE_REGEX = re.compile( - r'^(?P[a-zA-Z0-9\-]{1,63})[/:]accesspoint[/:]' - r'(?P[a-zA-Z0-9\-]{1,63}$)' - ) - _BLACKLISTED_OPERATIONS = ['CreateBucket'] - - def __init__(self, arn_parser=None): - self._arn_parser = arn_parser - if arn_parser is None: - self._arn_parser = ArnParser() - - def register(self, event_emitter): - event_emitter.register('before-parameter-build.s3', self.handle_arn) - - def handle_arn(self, params, model, context, **kwargs): - if model.name in self._BLACKLISTED_OPERATIONS: - return - arn_details = self._get_arn_details_from_bucket_param(params) - if arn_details is None: - return - if arn_details['resource_type'] == 'accesspoint': - self._store_accesspoint(params, context, arn_details) - elif arn_details['resource_type'] == 'outpost': - self._store_outpost(params, context, arn_details) - - def _get_arn_details_from_bucket_param(self, params): - if 'Bucket' in params: - try: - arn = params['Bucket'] - arn_details = self._arn_parser.parse_arn(arn) - self._add_resource_type_and_name(arn, arn_details) - return arn_details - except InvalidArnException: - pass - return None - - def _add_resource_type_and_name(self, arn, arn_details): - match = self._RESOURCE_REGEX.match(arn_details['resource']) - if match: - arn_details['resource_type'] = match.group('resource_type') - arn_details['resource_name'] = match.group('resource_name') - else: - raise UnsupportedS3ArnError(arn=arn) - - def _store_accesspoint(self, params, context, arn_details): - # Ideally the access-point would be stored as a parameter in the - # request where the serializer would then know how to serialize it, - # but access-points are not modeled in S3 operations so it would fail - # validation. Instead, we set the access-point to the bucket parameter - # to have some value set when serializing the request and additional - # information on the context from the arn to use in forming the - # access-point endpoint. - params['Bucket'] = arn_details['resource_name'] - context['s3_accesspoint'] = { - 'name': arn_details['resource_name'], - 'account': arn_details['account'], - 'partition': arn_details['partition'], - 'region': arn_details['region'], - 'service': arn_details['service'], - } - - def _store_outpost(self, params, context, arn_details): - resource_name = arn_details['resource_name'] - match = self._OUTPOST_RESOURCE_REGEX.match(resource_name) - if not match: - raise UnsupportedOutpostResourceError(resource_name=resource_name) - # Because we need to set the bucket name to something to pass - # validation we're going to use the access point name to be consistent - # with normal access point arns. - accesspoint_name = match.group('accesspoint_name') - params['Bucket'] = accesspoint_name - context['s3_accesspoint'] = { - 'outpost_name': match.group('outpost_name'), - 'name': accesspoint_name, - 'account': arn_details['account'], - 'partition': arn_details['partition'], - 'region': arn_details['region'], - 'service': arn_details['service'], - } - - -class S3EndpointSetter: - _DEFAULT_PARTITION = 'aws' - _DEFAULT_DNS_SUFFIX = 'amazonaws.com' - - def __init__( - self, - endpoint_resolver, - region=None, - s3_config=None, - endpoint_url=None, - partition=None, - use_fips_endpoint=False, - ): - # This is calling the endpoint_resolver in regions.py - self._endpoint_resolver = endpoint_resolver - self._region = region - self._s3_config = s3_config - self._use_fips_endpoint = use_fips_endpoint - if s3_config is None: - self._s3_config = {} - self._endpoint_url = endpoint_url - self._partition = partition - if partition is None: - self._partition = self._DEFAULT_PARTITION - - def register(self, event_emitter): - event_emitter.register('before-sign.s3', self.set_endpoint) - event_emitter.register('choose-signer.s3', self.set_signer) - event_emitter.register( - 'before-call.s3.WriteGetObjectResponse', - self.update_endpoint_to_s3_object_lambda, - ) - - def update_endpoint_to_s3_object_lambda(self, params, context, **kwargs): - if self._use_accelerate_endpoint: - raise UnsupportedS3ConfigurationError( - msg='S3 client does not support accelerate endpoints for S3 Object Lambda operations', - ) - - self._override_signing_name(context, 's3-object-lambda') - if self._endpoint_url: - # Only update the url if an explicit url was not provided - return - - resolver = self._endpoint_resolver - # Constructing endpoints as s3-object-lambda as region - resolved = resolver.construct_endpoint( - 's3-object-lambda', self._region - ) - - # Ideally we would be able to replace the endpoint before - # serialization but there's no event to do that currently - # host_prefix is all the arn/bucket specs - new_endpoint = 'https://{host_prefix}{hostname}'.format( - host_prefix=params['host_prefix'], - hostname=resolved['hostname'], - ) - - params['url'] = _get_new_endpoint(params['url'], new_endpoint, False) - - def set_endpoint(self, request, **kwargs): - if self._use_accesspoint_endpoint(request): - self._validate_accesspoint_supported(request) - self._validate_fips_supported(request) - self._validate_global_regions(request) - region_name = self._resolve_region_for_accesspoint_endpoint( - request - ) - self._resolve_signing_name_for_accesspoint_endpoint(request) - self._switch_to_accesspoint_endpoint(request, region_name) - return - if self._use_accelerate_endpoint: - if self._use_fips_endpoint: - raise UnsupportedS3ConfigurationError( - msg=( - 'Client is configured to use the FIPS psuedo region ' - f'for "{self._region}", but S3 Accelerate does not have any FIPS ' - 'compatible endpoints.' - ) - ) - switch_host_s3_accelerate(request=request, **kwargs) - if self._s3_addressing_handler: - self._s3_addressing_handler(request=request, **kwargs) - - def _use_accesspoint_endpoint(self, request): - return 's3_accesspoint' in request.context - - def _validate_fips_supported(self, request): - if not self._use_fips_endpoint: - return - if 'fips' in request.context['s3_accesspoint']['region']: - raise UnsupportedS3AccesspointConfigurationError( - msg={'Invalid ARN, FIPS region not allowed in ARN.'} - ) - if 'outpost_name' in request.context['s3_accesspoint']: - raise UnsupportedS3AccesspointConfigurationError( - msg=( - f'Client is configured to use the FIPS psuedo-region "{self._region}", ' - 'but outpost ARNs do not support FIPS endpoints.' - ) - ) - # Transforming psuedo region to actual region - accesspoint_region = request.context['s3_accesspoint']['region'] - if accesspoint_region != self._region: - if not self._s3_config.get('use_arn_region', True): - # TODO: Update message to reflect use_arn_region - # is not set - raise UnsupportedS3AccesspointConfigurationError( - msg=( - 'Client is configured to use the FIPS psuedo-region ' - f'for "{self._region}", but the access-point ARN provided is for ' - f'the "{accesspoint_region}" region. For clients using a FIPS ' - 'psuedo-region calls to access-point ARNs in another ' - 'region are not allowed.' - ) - ) - - def _validate_global_regions(self, request): - if self._s3_config.get('use_arn_region', True): - return - if self._region in ['aws-global', 's3-external-1']: - raise UnsupportedS3AccesspointConfigurationError( - msg=( - 'Client is configured to use the global psuedo-region ' - f'"{self._region}". When providing access-point ARNs a regional ' - 'endpoint must be specified.' - ) - ) - - def _validate_accesspoint_supported(self, request): - if self._use_accelerate_endpoint: - raise UnsupportedS3AccesspointConfigurationError( - msg=( - 'Client does not support s3 accelerate configuration ' - 'when an access-point ARN is specified.' - ) - ) - request_partition = request.context['s3_accesspoint']['partition'] - if request_partition != self._partition: - raise UnsupportedS3AccesspointConfigurationError( - msg=( - f'Client is configured for "{self._partition}" partition, but access-point' - f' ARN provided is for "{request_partition}" partition. The client and ' - ' access-point partition must be the same.' - ) - ) - s3_service = request.context['s3_accesspoint'].get('service') - if s3_service == 's3-object-lambda' and self._s3_config.get( - 'use_dualstack_endpoint' - ): - raise UnsupportedS3AccesspointConfigurationError( - msg=( - 'Client does not support s3 dualstack configuration ' - 'when an S3 Object Lambda access point ARN is specified.' - ) - ) - outpost_name = request.context['s3_accesspoint'].get('outpost_name') - if outpost_name and self._s3_config.get('use_dualstack_endpoint'): - raise UnsupportedS3AccesspointConfigurationError( - msg=( - 'Client does not support s3 dualstack configuration ' - 'when an outpost ARN is specified.' - ) - ) - self._validate_mrap_s3_config(request) - - def _validate_mrap_s3_config(self, request): - if not is_global_accesspoint(request.context): - return - if self._s3_config.get('s3_disable_multiregion_access_points'): - raise UnsupportedS3AccesspointConfigurationError( - msg=( - 'Invalid configuration, Multi-Region Access Point ' - 'ARNs are disabled.' - ) - ) - elif self._s3_config.get('use_dualstack_endpoint'): - raise UnsupportedS3AccesspointConfigurationError( - msg=( - 'Client does not support s3 dualstack configuration ' - 'when a Multi-Region Access Point ARN is specified.' - ) - ) - - def _resolve_region_for_accesspoint_endpoint(self, request): - if is_global_accesspoint(request.context): - # Requests going to MRAP endpoints MUST be set to any (*) region. - self._override_signing_region(request, '*') - elif self._s3_config.get('use_arn_region', True): - accesspoint_region = request.context['s3_accesspoint']['region'] - # If we are using the region from the access point, - # we will also want to make sure that we set it as the - # signing region as well - self._override_signing_region(request, accesspoint_region) - return accesspoint_region - return self._region - - def set_signer(self, context, **kwargs): - if is_global_accesspoint(context): - if HAS_CRT: - return 's3v4a' - else: - raise MissingDependencyException( - msg="Using S3 with an MRAP arn requires an additional " - "dependency. You will need to pip install " - "botocore[crt] before proceeding." - ) - - def _resolve_signing_name_for_accesspoint_endpoint(self, request): - accesspoint_service = request.context['s3_accesspoint']['service'] - self._override_signing_name(request.context, accesspoint_service) - - def _switch_to_accesspoint_endpoint(self, request, region_name): - original_components = urlsplit(request.url) - accesspoint_endpoint = urlunsplit( - ( - original_components.scheme, - self._get_netloc(request.context, region_name), - self._get_accesspoint_path( - original_components.path, request.context - ), - original_components.query, - '', - ) - ) - logger.debug( - 'Updating URI from %s to %s', request.url, accesspoint_endpoint - ) - request.url = accesspoint_endpoint - - def _get_netloc(self, request_context, region_name): - if is_global_accesspoint(request_context): - return self._get_mrap_netloc(request_context) - else: - return self._get_accesspoint_netloc(request_context, region_name) - - def _get_mrap_netloc(self, request_context): - s3_accesspoint = request_context['s3_accesspoint'] - region_name = 's3-global' - mrap_netloc_components = [s3_accesspoint['name']] - if self._endpoint_url: - endpoint_url_netloc = urlsplit(self._endpoint_url).netloc - mrap_netloc_components.append(endpoint_url_netloc) - else: - partition = s3_accesspoint['partition'] - mrap_netloc_components.extend( - [ - 'accesspoint', - region_name, - self._get_partition_dns_suffix(partition), - ] - ) - return '.'.join(mrap_netloc_components) - - def _get_accesspoint_netloc(self, request_context, region_name): - s3_accesspoint = request_context['s3_accesspoint'] - accesspoint_netloc_components = [ - '{}-{}'.format(s3_accesspoint['name'], s3_accesspoint['account']), - ] - outpost_name = s3_accesspoint.get('outpost_name') - if self._endpoint_url: - if outpost_name: - accesspoint_netloc_components.append(outpost_name) - endpoint_url_netloc = urlsplit(self._endpoint_url).netloc - accesspoint_netloc_components.append(endpoint_url_netloc) - else: - if outpost_name: - outpost_host = [outpost_name, 's3-outposts'] - accesspoint_netloc_components.extend(outpost_host) - elif s3_accesspoint['service'] == 's3-object-lambda': - component = self._inject_fips_if_needed( - 's3-object-lambda', request_context - ) - accesspoint_netloc_components.append(component) - else: - component = self._inject_fips_if_needed( - 's3-accesspoint', request_context - ) - accesspoint_netloc_components.append(component) - if self._s3_config.get('use_dualstack_endpoint'): - accesspoint_netloc_components.append('dualstack') - accesspoint_netloc_components.extend( - [region_name, self._get_dns_suffix(region_name)] - ) - return '.'.join(accesspoint_netloc_components) - - def _inject_fips_if_needed(self, component, request_context): - if self._use_fips_endpoint: - return f'{component}-fips' - return component - - def _get_accesspoint_path(self, original_path, request_context): - # The Bucket parameter was substituted with the access-point name as - # some value was required in serializing the bucket name. Now that - # we are making the request directly to the access point, we will - # want to remove that access-point name from the path. - name = request_context['s3_accesspoint']['name'] - # All S3 operations require at least a / in their path. - return original_path.replace('/' + name, '', 1) or '/' - - def _get_partition_dns_suffix(self, partition_name): - dns_suffix = self._endpoint_resolver.get_partition_dns_suffix( - partition_name - ) - if dns_suffix is None: - dns_suffix = self._DEFAULT_DNS_SUFFIX - return dns_suffix - - def _get_dns_suffix(self, region_name): - resolved = self._endpoint_resolver.construct_endpoint( - 's3', region_name - ) - dns_suffix = self._DEFAULT_DNS_SUFFIX - if resolved and 'dnsSuffix' in resolved: - dns_suffix = resolved['dnsSuffix'] - return dns_suffix - - def _override_signing_region(self, request, region_name): - signing_context = request.context.get('signing', {}) - # S3SigV4Auth will use the context['signing']['region'] value to - # sign with if present. This is used by the Bucket redirector - # as well but we should be fine because the redirector is never - # used in combination with the accesspoint setting logic. - signing_context['region'] = region_name - request.context['signing'] = signing_context - - def _override_signing_name(self, context, signing_name): - signing_context = context.get('signing', {}) - # S3SigV4Auth will use the context['signing']['signing_name'] value to - # sign with if present. This is used by the Bucket redirector - # as well but we should be fine because the redirector is never - # used in combination with the accesspoint setting logic. - signing_context['signing_name'] = signing_name - context['signing'] = signing_context - - @CachedProperty - def _use_accelerate_endpoint(self): - # Enable accelerate if the configuration is set to to true or the - # endpoint being used matches one of the accelerate endpoints. - - # Accelerate has been explicitly configured. - if self._s3_config.get('use_accelerate_endpoint'): - return True - - # Accelerate mode is turned on automatically if an endpoint url is - # provided that matches the accelerate scheme. - if self._endpoint_url is None: - return False - - # Accelerate is only valid for Amazon endpoints. - netloc = urlsplit(self._endpoint_url).netloc - if not netloc.endswith('amazonaws.com'): - return False - - # The first part of the url should always be s3-accelerate. - parts = netloc.split('.') - if parts[0] != 's3-accelerate': - return False - - # Url parts between 's3-accelerate' and 'amazonaws.com' which - # represent different url features. - feature_parts = parts[1:-2] - - # There should be no duplicate url parts. - if len(feature_parts) != len(set(feature_parts)): - return False - - # Remaining parts must all be in the whitelist. - return all(p in S3_ACCELERATE_WHITELIST for p in feature_parts) - - @CachedProperty - def _addressing_style(self): - # Use virtual host style addressing if accelerate is enabled or if - # the given endpoint url is an accelerate endpoint. - if self._use_accelerate_endpoint: - return 'virtual' - - # If a particular addressing style is configured, use it. - configured_addressing_style = self._s3_config.get('addressing_style') - if configured_addressing_style: - return configured_addressing_style - - @CachedProperty - def _s3_addressing_handler(self): - # If virtual host style was configured, use it regardless of whether - # or not the bucket looks dns compatible. - if self._addressing_style == 'virtual': - logger.debug("Using S3 virtual host style addressing.") - return switch_to_virtual_host_style - - # If path style is configured, no additional steps are needed. If - # endpoint_url was specified, don't default to virtual. We could - # potentially default provided endpoint urls to virtual hosted - # style, but for now it is avoided. - if self._addressing_style == 'path' or self._endpoint_url is not None: - logger.debug("Using S3 path style addressing.") - return None - - logger.debug( - "Defaulting to S3 virtual host style addressing with " - "path style addressing fallback." - ) - - # By default, try to use virtual style with path fallback. - return fix_s3_host - - -class S3ControlEndpointSetter: - _DEFAULT_PARTITION = 'aws' - _DEFAULT_DNS_SUFFIX = 'amazonaws.com' - _HOST_LABEL_REGEX = re.compile(r'^[a-zA-Z0-9\-]{1,63}$') - - def __init__( - self, - endpoint_resolver, - region=None, - s3_config=None, - endpoint_url=None, - partition=None, - use_fips_endpoint=False, - ): - self._endpoint_resolver = endpoint_resolver - self._region = region - self._s3_config = s3_config - self._use_fips_endpoint = use_fips_endpoint - if s3_config is None: - self._s3_config = {} - self._endpoint_url = endpoint_url - self._partition = partition - if partition is None: - self._partition = self._DEFAULT_PARTITION - - def register(self, event_emitter): - event_emitter.register('before-sign.s3-control', self.set_endpoint) - - def set_endpoint(self, request, **kwargs): - if self._use_endpoint_from_arn_details(request): - self._validate_endpoint_from_arn_details_supported(request) - region_name = self._resolve_region_from_arn_details(request) - self._resolve_signing_name_from_arn_details(request) - self._resolve_endpoint_from_arn_details(request, region_name) - self._add_headers_from_arn_details(request) - elif self._use_endpoint_from_outpost_id(request): - self._validate_outpost_redirection_valid(request) - self._override_signing_name(request, 's3-outposts') - new_netloc = self._construct_outpost_endpoint(self._region) - self._update_request_netloc(request, new_netloc) - - def _use_endpoint_from_arn_details(self, request): - return 'arn_details' in request.context - - def _use_endpoint_from_outpost_id(self, request): - return 'outpost_id' in request.context - - def _validate_endpoint_from_arn_details_supported(self, request): - if 'fips' in request.context['arn_details']['region']: - raise UnsupportedS3ControlArnError( - arn=request.context['arn_details']['original'], - msg='Invalid ARN, FIPS region not allowed in ARN.', - ) - if not self._s3_config.get('use_arn_region', False): - arn_region = request.context['arn_details']['region'] - if arn_region != self._region: - error_msg = ( - 'The use_arn_region configuration is disabled but ' - f'received arn for "{arn_region}" when the client is configured ' - f'to use "{self._region}"' - ) - raise UnsupportedS3ControlConfigurationError(msg=error_msg) - request_partion = request.context['arn_details']['partition'] - if request_partion != self._partition: - raise UnsupportedS3ControlConfigurationError( - msg=( - f'Client is configured for "{self._partition}" partition, but arn ' - f'provided is for "{request_partion}" partition. The client and ' - 'arn partition must be the same.' - ) - ) - if self._s3_config.get('use_accelerate_endpoint'): - raise UnsupportedS3ControlConfigurationError( - msg='S3 control client does not support accelerate endpoints', - ) - if 'outpost_name' in request.context['arn_details']: - self._validate_outpost_redirection_valid(request) - - def _validate_outpost_redirection_valid(self, request): - if self._s3_config.get('use_dualstack_endpoint'): - raise UnsupportedS3ControlConfigurationError( - msg=( - 'Client does not support s3 dualstack configuration ' - 'when an outpost is specified.' - ) - ) - - def _resolve_region_from_arn_details(self, request): - if self._s3_config.get('use_arn_region', False): - arn_region = request.context['arn_details']['region'] - # If we are using the region from the expanded arn, we will also - # want to make sure that we set it as the signing region as well - self._override_signing_region(request, arn_region) - return arn_region - return self._region - - def _resolve_signing_name_from_arn_details(self, request): - arn_service = request.context['arn_details']['service'] - self._override_signing_name(request, arn_service) - return arn_service - - def _resolve_endpoint_from_arn_details(self, request, region_name): - new_netloc = self._resolve_netloc_from_arn_details( - request, region_name - ) - self._update_request_netloc(request, new_netloc) - - def _update_request_netloc(self, request, new_netloc): - original_components = urlsplit(request.url) - arn_details_endpoint = urlunsplit( - ( - original_components.scheme, - new_netloc, - original_components.path, - original_components.query, - '', - ) - ) - logger.debug( - 'Updating URI from %s to %s', request.url, arn_details_endpoint - ) - request.url = arn_details_endpoint - - def _resolve_netloc_from_arn_details(self, request, region_name): - arn_details = request.context['arn_details'] - if 'outpost_name' in arn_details: - return self._construct_outpost_endpoint(region_name) - account = arn_details['account'] - return self._construct_s3_control_endpoint(region_name, account) - - def _is_valid_host_label(self, label): - return self._HOST_LABEL_REGEX.match(label) - - def _validate_host_labels(self, *labels): - for label in labels: - if not self._is_valid_host_label(label): - raise InvalidHostLabelError(label=label) - - def _construct_s3_control_endpoint(self, region_name, account): - self._validate_host_labels(region_name, account) - if self._endpoint_url: - endpoint_url_netloc = urlsplit(self._endpoint_url).netloc - netloc = [account, endpoint_url_netloc] - else: - netloc = [ - account, - 's3-control', - ] - self._add_dualstack(netloc) - dns_suffix = self._get_dns_suffix(region_name) - netloc.extend([region_name, dns_suffix]) - return self._construct_netloc(netloc) - - def _construct_outpost_endpoint(self, region_name): - self._validate_host_labels(region_name) - if self._endpoint_url: - return urlsplit(self._endpoint_url).netloc - else: - netloc = [ - 's3-outposts', - region_name, - self._get_dns_suffix(region_name), - ] - self._add_fips(netloc) - return self._construct_netloc(netloc) - - def _construct_netloc(self, netloc): - return '.'.join(netloc) - - def _add_fips(self, netloc): - if self._use_fips_endpoint: - netloc[0] = netloc[0] + '-fips' - - def _add_dualstack(self, netloc): - if self._s3_config.get('use_dualstack_endpoint'): - netloc.append('dualstack') - - def _get_dns_suffix(self, region_name): - resolved = self._endpoint_resolver.construct_endpoint( - 's3', region_name - ) - dns_suffix = self._DEFAULT_DNS_SUFFIX - if resolved and 'dnsSuffix' in resolved: - dns_suffix = resolved['dnsSuffix'] - return dns_suffix - - def _override_signing_region(self, request, region_name): - signing_context = request.context.get('signing', {}) - # S3SigV4Auth will use the context['signing']['region'] value to - # sign with if present. This is used by the Bucket redirector - # as well but we should be fine because the redirector is never - # used in combination with the accesspoint setting logic. - signing_context['region'] = region_name - request.context['signing'] = signing_context - - def _override_signing_name(self, request, signing_name): - signing_context = request.context.get('signing', {}) - # S3SigV4Auth will use the context['signing']['signing_name'] value to - # sign with if present. This is used by the Bucket redirector - # as well but we should be fine because the redirector is never - # used in combination with the accesspoint setting logic. - signing_context['signing_name'] = signing_name - request.context['signing'] = signing_context - - def _add_headers_from_arn_details(self, request): - arn_details = request.context['arn_details'] - outpost_name = arn_details.get('outpost_name') - if outpost_name: - self._add_outpost_id_header(request, outpost_name) - - def _add_outpost_id_header(self, request, outpost_name): - request.headers['x-amz-outpost-id'] = outpost_name - - -class S3ControlArnParamHandler: - """This handler has been replaced by S3ControlArnParamHandlerv2. The - original version remains in place for any third-party importers. - """ - - _RESOURCE_SPLIT_REGEX = re.compile(r'[/:]') - - def __init__(self, arn_parser=None): - self._arn_parser = arn_parser - if arn_parser is None: - self._arn_parser = ArnParser() - warnings.warn( - 'The S3ControlArnParamHandler class has been deprecated for a new ' - 'internal replacement. A future version of botocore may remove ' - 'this class.', - category=FutureWarning, - ) - - def register(self, event_emitter): - event_emitter.register( - 'before-parameter-build.s3-control', - self.handle_arn, - ) - - def handle_arn(self, params, model, context, **kwargs): - if model.name in ('CreateBucket', 'ListRegionalBuckets'): - # CreateBucket and ListRegionalBuckets are special cases that do - # not obey ARN based redirection but will redirect based off of the - # presence of the OutpostId parameter - self._handle_outpost_id_param(params, model, context) - else: - self._handle_name_param(params, model, context) - self._handle_bucket_param(params, model, context) - - def _get_arn_details_from_param(self, params, param_name): - if param_name not in params: - return None - try: - arn = params[param_name] - arn_details = self._arn_parser.parse_arn(arn) - arn_details['original'] = arn - arn_details['resources'] = self._split_resource(arn_details) - return arn_details - except InvalidArnException: - return None - - def _split_resource(self, arn_details): - return self._RESOURCE_SPLIT_REGEX.split(arn_details['resource']) - - def _override_account_id_param(self, params, arn_details): - account_id = arn_details['account'] - if 'AccountId' in params and params['AccountId'] != account_id: - error_msg = ( - 'Account ID in arn does not match the AccountId parameter ' - 'provided: "{}"' - ).format(params['AccountId']) - raise UnsupportedS3ControlArnError( - arn=arn_details['original'], - msg=error_msg, - ) - params['AccountId'] = account_id - - def _handle_outpost_id_param(self, params, model, context): - if 'OutpostId' not in params: - return - context['outpost_id'] = params['OutpostId'] - - def _handle_name_param(self, params, model, context): - # CreateAccessPoint is a special case that does not expand Name - if model.name == 'CreateAccessPoint': - return - arn_details = self._get_arn_details_from_param(params, 'Name') - if arn_details is None: - return - if self._is_outpost_accesspoint(arn_details): - self._store_outpost_accesspoint(params, context, arn_details) - else: - error_msg = 'The Name parameter does not support the provided ARN' - raise UnsupportedS3ControlArnError( - arn=arn_details['original'], - msg=error_msg, - ) - - def _is_outpost_accesspoint(self, arn_details): - if arn_details['service'] != 's3-outposts': - return False - resources = arn_details['resources'] - if len(resources) != 4: - return False - # Resource must be of the form outpost/op-123/accesspoint/name - return resources[0] == 'outpost' and resources[2] == 'accesspoint' - - def _store_outpost_accesspoint(self, params, context, arn_details): - self._override_account_id_param(params, arn_details) - accesspoint_name = arn_details['resources'][3] - params['Name'] = accesspoint_name - arn_details['accesspoint_name'] = accesspoint_name - arn_details['outpost_name'] = arn_details['resources'][1] - context['arn_details'] = arn_details - - def _handle_bucket_param(self, params, model, context): - arn_details = self._get_arn_details_from_param(params, 'Bucket') - if arn_details is None: - return - if self._is_outpost_bucket(arn_details): - self._store_outpost_bucket(params, context, arn_details) - else: - error_msg = ( - 'The Bucket parameter does not support the provided ARN' - ) - raise UnsupportedS3ControlArnError( - arn=arn_details['original'], - msg=error_msg, - ) - - def _is_outpost_bucket(self, arn_details): - if arn_details['service'] != 's3-outposts': - return False - resources = arn_details['resources'] - if len(resources) != 4: - return False - # Resource must be of the form outpost/op-123/bucket/name - return resources[0] == 'outpost' and resources[2] == 'bucket' - - def _store_outpost_bucket(self, params, context, arn_details): - self._override_account_id_param(params, arn_details) - bucket_name = arn_details['resources'][3] - params['Bucket'] = bucket_name - arn_details['bucket_name'] = bucket_name - arn_details['outpost_name'] = arn_details['resources'][1] - context['arn_details'] = arn_details - - -class S3ControlArnParamHandlerv2(S3ControlArnParamHandler): - """Updated version of S3ControlArnParamHandler for use when - EndpointRulesetResolver is in use for endpoint resolution. - - This class is considered private and subject to abrupt breaking changes or - removal without prior announcement. Please do not use it directly. - """ - - def __init__(self, arn_parser=None): - self._arn_parser = arn_parser - if arn_parser is None: - self._arn_parser = ArnParser() - - def register(self, event_emitter): - event_emitter.register( - 'before-endpoint-resolution.s3-control', - self.handle_arn, - ) - - def _handle_name_param(self, params, model, context): - # CreateAccessPoint is a special case that does not expand Name - if model.name == 'CreateAccessPoint': - return - arn_details = self._get_arn_details_from_param(params, 'Name') - if arn_details is None: - return - self._raise_for_fips_pseudo_region(arn_details) - self._raise_for_accelerate_endpoint(context) - if self._is_outpost_accesspoint(arn_details): - self._store_outpost_accesspoint(params, context, arn_details) - else: - error_msg = 'The Name parameter does not support the provided ARN' - raise UnsupportedS3ControlArnError( - arn=arn_details['original'], - msg=error_msg, - ) - - def _store_outpost_accesspoint(self, params, context, arn_details): - self._override_account_id_param(params, arn_details) - - def _handle_bucket_param(self, params, model, context): - arn_details = self._get_arn_details_from_param(params, 'Bucket') - if arn_details is None: - return - self._raise_for_fips_pseudo_region(arn_details) - self._raise_for_accelerate_endpoint(context) - if self._is_outpost_bucket(arn_details): - self._store_outpost_bucket(params, context, arn_details) - else: - error_msg = ( - 'The Bucket parameter does not support the provided ARN' - ) - raise UnsupportedS3ControlArnError( - arn=arn_details['original'], - msg=error_msg, - ) - - def _store_outpost_bucket(self, params, context, arn_details): - self._override_account_id_param(params, arn_details) - - def _raise_for_fips_pseudo_region(self, arn_details): - # FIPS pseudo region names cannot be used in ARNs - arn_region = arn_details['region'] - if arn_region.startswith('fips-') or arn_region.endswith('fips-'): - raise UnsupportedS3ControlArnError( - arn=arn_details['original'], - msg='Invalid ARN, FIPS region not allowed in ARN.', - ) - - def _raise_for_accelerate_endpoint(self, context): - s3_config = context['client_config'].s3 or {} - if s3_config.get('use_accelerate_endpoint'): - raise UnsupportedS3ControlConfigurationError( - msg='S3 control client does not support accelerate endpoints', - ) - - -class ContainerMetadataFetcher: - TIMEOUT_SECONDS = 2 - RETRY_ATTEMPTS = 3 - SLEEP_TIME = 1 - IP_ADDRESS = '169.254.170.2' - _ALLOWED_HOSTS = [ - IP_ADDRESS, - '169.254.170.23', - 'fd00:ec2::23', - 'localhost', - ] - - def __init__(self, session=None, sleep=time.sleep): - if session is None: - session = botocore.httpsession.URLLib3Session( - timeout=self.TIMEOUT_SECONDS - ) - self._session = session - self._sleep = sleep - - def retrieve_full_uri(self, full_url, headers=None): - """Retrieve JSON metadata from container metadata. - - :type full_url: str - :param full_url: The full URL of the metadata service. - This should include the scheme as well, e.g - "http://localhost:123/foo" - - """ - self._validate_allowed_url(full_url) - return self._retrieve_credentials(full_url, headers) - - def _validate_allowed_url(self, full_url): - parsed = botocore.compat.urlparse(full_url) - - if parsed.scheme == 'https': - return - if self._is_loopback_address(parsed.hostname): - return - is_whitelisted_host = self._check_if_whitelisted_host(parsed.hostname) - if not is_whitelisted_host: - raise ValueError( - f"Unsupported host '{parsed.hostname}'. Can only retrieve metadata " - f"from a loopback address or one of these hosts: {', '.join(self._ALLOWED_HOSTS)}" - ) - - def _is_loopback_address(self, hostname): - try: - ip = ip_address(hostname) - return ip.is_loopback - except ValueError: - return False - - def _check_if_whitelisted_host(self, host): - if host in self._ALLOWED_HOSTS: - return True - return False - - def retrieve_uri(self, relative_uri): - """Retrieve JSON metadata from container metadata. - - :type relative_uri: str - :param relative_uri: A relative URI, e.g "/foo/bar?id=123" - - :return: The parsed JSON response. - - """ - full_url = self.full_url(relative_uri) - return self._retrieve_credentials(full_url) - - def _retrieve_credentials(self, full_url, extra_headers=None): - headers = {'Accept': 'application/json'} - if extra_headers is not None: - headers.update(extra_headers) - attempts = 0 - while True: - try: - return self._get_response( - full_url, headers, self.TIMEOUT_SECONDS - ) - except MetadataRetrievalError as e: - logger.debug( - "Received error when attempting to retrieve " - "container metadata: %s", - e, - exc_info=True, - ) - self._sleep(self.SLEEP_TIME) - attempts += 1 - if attempts >= self.RETRY_ATTEMPTS: - raise - - def _get_response(self, full_url, headers, timeout): - try: - AWSRequest = botocore.awsrequest.AWSRequest - request = AWSRequest(method='GET', url=full_url, headers=headers) - response = self._session.send(request.prepare()) - response_text = response.content.decode('utf-8') - if response.status_code != 200: - raise MetadataRetrievalError( - error_msg=( - f"Received non 200 response {response.status_code} " - f"from container metadata: {response_text}" - ) - ) - try: - return json.loads(response_text) - except ValueError: - error_msg = "Unable to parse JSON returned from container metadata services" - logger.debug('%s:%s', error_msg, response_text) - raise MetadataRetrievalError(error_msg=error_msg) - except RETRYABLE_HTTP_ERRORS as e: - error_msg = ( - "Received error when attempting to retrieve " - f"container metadata: {e}" - ) - raise MetadataRetrievalError(error_msg=error_msg) - - def full_url(self, relative_uri): - return f'http://{self.IP_ADDRESS}{relative_uri}' - - -def get_environ_proxies(url): - if should_bypass_proxies(url): - return {} - else: - return getproxies() - - -def should_bypass_proxies(url): - """ - Returns whether we should bypass proxies or not. - """ - # NOTE: requests allowed for ip/cidr entries in no_proxy env that we don't - # support current as urllib only checks DNS suffix - # If the system proxy settings indicate that this URL should be bypassed, - # don't proxy. - # The proxy_bypass function is incredibly buggy on OS X in early versions - # of Python 2.6, so allow this call to fail. Only catch the specific - # exceptions we've seen, though: this call failing in other ways can reveal - # legitimate problems. - try: - if proxy_bypass(urlparse(url).netloc): - return True - except (TypeError, socket.gaierror): - pass - - return False - - -def determine_content_length(body): - # No body, content length of 0 - if not body: - return 0 - - # Try asking the body for it's length - try: - return len(body) - except (AttributeError, TypeError): - pass - - # Try getting the length from a seekable stream - if hasattr(body, 'seek') and hasattr(body, 'tell'): - try: - orig_pos = body.tell() - body.seek(0, 2) - end_file_pos = body.tell() - body.seek(orig_pos) - return end_file_pos - orig_pos - except io.UnsupportedOperation: - # in case when body is, for example, io.BufferedIOBase object - # it has "seek" method which throws "UnsupportedOperation" - # exception in such case we want to fall back to "chunked" - # encoding - pass - # Failed to determine the length - return None - - -def get_encoding_from_headers(headers, default='ISO-8859-1'): - """Returns encodings from given HTTP Header Dict. - - :param headers: dictionary to extract encoding from. - :param default: default encoding if the content-type is text - """ - - content_type = headers.get('content-type') - - if not content_type: - return None - - message = email.message.Message() - message['content-type'] = content_type - charset = message.get_param("charset") - - if charset is not None: - return charset - - if 'text' in content_type: - return default - - -def calculate_md5(body, **kwargs): - """This function has been deprecated, but is kept for backwards compatibility.""" - if isinstance(body, (bytes, bytearray)): - binary_md5 = _calculate_md5_from_bytes(body) - else: - binary_md5 = _calculate_md5_from_file(body) - return base64.b64encode(binary_md5).decode('ascii') - - -def _calculate_md5_from_bytes(body_bytes): - """This function has been deprecated, but is kept for backwards compatibility.""" - md5 = get_md5(body_bytes, usedforsecurity=False) - return md5.digest() - - -def _calculate_md5_from_file(fileobj): - """This function has been deprecated, but is kept for backwards compatibility.""" - start_position = fileobj.tell() - md5 = get_md5(usedforsecurity=False) - for chunk in iter(lambda: fileobj.read(1024 * 1024), b''): - md5.update(chunk) - fileobj.seek(start_position) - return md5.digest() - - -def _is_s3express_request(params): - endpoint_properties = params.get('context', {}).get( - 'endpoint_properties', {} - ) - return endpoint_properties.get('backend') == 'S3Express' - - -def has_checksum_header(params): - """ - Checks if a header starting with "x-amz-checksum-" is provided in a request. - - This function is considered private and subject to abrupt breaking changes or - removal without prior announcement. Please do not use it directly. - """ - headers = params['headers'] - - # If a header matching the x-amz-checksum-* pattern is present, we - # assume a checksum has already been provided by the user. - for header in headers: - if CHECKSUM_HEADER_PATTERN.match(header): - return True - - return False - - -def conditionally_calculate_checksum(params, **kwargs): - """This function has been deprecated, but is kept for backwards compatibility.""" - if not has_checksum_header(params): - conditionally_calculate_md5(params, **kwargs) - conditionally_enable_crc32(params, **kwargs) - - -def conditionally_enable_crc32(params, **kwargs): - """This function has been deprecated, but is kept for backwards compatibility.""" - checksum_context = params.get('context', {}).get('checksum', {}) - checksum_algorithm = checksum_context.get('request_algorithm') - if ( - _is_s3express_request(params) - and params['body'] is not None - and checksum_algorithm in (None, "conditional-md5") - ): - params['context']['checksum'] = { - 'request_algorithm': { - 'algorithm': 'crc32', - 'in': 'header', - 'name': 'x-amz-checksum-crc32', - } - } - - -def conditionally_calculate_md5(params, **kwargs): - """Only add a Content-MD5 if the system supports it. - - This function has been deprecated, but is kept for backwards compatibility. - """ - body = params['body'] - checksum_context = params.get('context', {}).get('checksum', {}) - checksum_algorithm = checksum_context.get('request_algorithm') - if checksum_algorithm and checksum_algorithm != 'conditional-md5': - # Skip for requests that will have a flexible checksum applied - return - - if has_checksum_header(params): - # Don't add a new header if one is already available. - return - - if _is_s3express_request(params): - # S3Express doesn't support MD5 - return - - if MD5_AVAILABLE and body is not None: - md5_digest = calculate_md5(body, **kwargs) - params['headers']['Content-MD5'] = md5_digest - - -class FileWebIdentityTokenLoader: - def __init__(self, web_identity_token_path, _open=open): - self._web_identity_token_path = web_identity_token_path - self._open = _open - - def __call__(self): - with self._open(self._web_identity_token_path) as token_file: - return token_file.read() - - -class SSOTokenLoader: - def __init__(self, cache=None): - if cache is None: - cache = {} - self._cache = cache - - def _generate_cache_key(self, start_url, session_name): - input_str = start_url - if session_name is not None: - input_str = session_name - return hashlib.sha1(input_str.encode('utf-8')).hexdigest() - - def save_token(self, start_url, token, session_name=None): - cache_key = self._generate_cache_key(start_url, session_name) - self._cache[cache_key] = token - - def __call__(self, start_url, session_name=None): - cache_key = self._generate_cache_key(start_url, session_name) - logger.debug('Checking for cached token at: %s', cache_key) - if cache_key not in self._cache: - name = start_url - if session_name is not None: - name = session_name - error_msg = f'Token for {name} does not exist' - raise SSOTokenLoadError(error_msg=error_msg) - - token = self._cache[cache_key] - if 'accessToken' not in token or 'expiresAt' not in token: - error_msg = f'Token for {start_url} is invalid' - raise SSOTokenLoadError(error_msg=error_msg) - return token - - -class EventbridgeSignerSetter: - _DEFAULT_PARTITION = 'aws' - _DEFAULT_DNS_SUFFIX = 'amazonaws.com' - - def __init__(self, endpoint_resolver, region=None, endpoint_url=None): - self._endpoint_resolver = endpoint_resolver - self._region = region - self._endpoint_url = endpoint_url - - def register(self, event_emitter): - event_emitter.register( - 'before-parameter-build.events.PutEvents', - self.check_for_global_endpoint, - ) - event_emitter.register( - 'before-call.events.PutEvents', self.set_endpoint_url - ) - - def set_endpoint_url(self, params, context, **kwargs): - if 'eventbridge_endpoint' in context: - endpoint = context['eventbridge_endpoint'] - logger.debug( - "Rewriting URL from %s to %s", params['url'], endpoint - ) - params['url'] = endpoint - - def check_for_global_endpoint(self, params, context, **kwargs): - endpoint = params.get('EndpointId') - if endpoint is None: - return - - if len(endpoint) == 0: - raise InvalidEndpointConfigurationError( - msg='EndpointId must not be a zero length string' - ) - - if not HAS_CRT: - raise MissingDependencyException( - msg="Using EndpointId requires an additional " - "dependency. You will need to pip install " - "botocore[crt] before proceeding." - ) - - config = context.get('client_config') - endpoint_variant_tags = None - if config is not None: - if config.use_fips_endpoint: - raise InvalidEndpointConfigurationError( - msg="FIPS is not supported with EventBridge " - "multi-region endpoints." - ) - if config.use_dualstack_endpoint: - endpoint_variant_tags = ['dualstack'] - - if self._endpoint_url is None: - # Validate endpoint is a valid hostname component - parts = urlparse(f'https://{endpoint}') - if parts.hostname != endpoint: - raise InvalidEndpointConfigurationError( - msg='EndpointId is not a valid hostname component.' - ) - resolved_endpoint = self._get_global_endpoint( - endpoint, endpoint_variant_tags=endpoint_variant_tags - ) - else: - resolved_endpoint = self._endpoint_url - - context['eventbridge_endpoint'] = resolved_endpoint - context['auth_type'] = 'v4a' - - def _get_global_endpoint(self, endpoint, endpoint_variant_tags=None): - resolver = self._endpoint_resolver - - partition = resolver.get_partition_for_region(self._region) - if partition is None: - partition = self._DEFAULT_PARTITION - dns_suffix = resolver.get_partition_dns_suffix( - partition, endpoint_variant_tags=endpoint_variant_tags - ) - if dns_suffix is None: - dns_suffix = self._DEFAULT_DNS_SUFFIX - - return f"https://{endpoint}.endpoint.events.{dns_suffix}/" - - -def is_s3_accelerate_url(url): - """Does the URL match the S3 Accelerate endpoint scheme? - - Virtual host naming style with bucket names in the netloc part of the URL - are not allowed by this function. - """ - if url is None: - return False - - # Accelerate is only valid for Amazon endpoints. - url_parts = urlsplit(url) - if not url_parts.netloc.endswith( - 'amazonaws.com' - ) or url_parts.scheme not in ['https', 'http']: - return False - - # The first part of the URL must be s3-accelerate. - parts = url_parts.netloc.split('.') - if parts[0] != 's3-accelerate': - return False - - # Url parts between 's3-accelerate' and 'amazonaws.com' which - # represent different url features. - feature_parts = parts[1:-2] - - # There should be no duplicate URL parts. - if len(feature_parts) != len(set(feature_parts)): - return False - - # Remaining parts must all be in the whitelist. - return all(p in S3_ACCELERATE_WHITELIST for p in feature_parts) - - -class JSONFileCache: - """JSON file cache. - This provides a dict like interface that stores JSON serializable - objects. - The objects are serialized to JSON and stored in a file. These - values can be retrieved at a later time. - """ - - CACHE_DIR = os.path.expanduser(os.path.join('~', '.aws', 'boto', 'cache')) - - def __init__(self, working_dir=CACHE_DIR, dumps_func=None): - self._working_dir = working_dir - if dumps_func is None: - dumps_func = self._default_dumps - self._dumps = dumps_func - - def _default_dumps(self, obj): - return json.dumps(obj, default=self._serialize_if_needed) - - def __contains__(self, cache_key): - actual_key = self._convert_cache_key(cache_key) - return os.path.isfile(actual_key) - - def __getitem__(self, cache_key): - """Retrieve value from a cache key.""" - actual_key = self._convert_cache_key(cache_key) - try: - with open(actual_key) as f: - return json.load(f) - except (OSError, ValueError): - raise KeyError(cache_key) - - def __delitem__(self, cache_key): - actual_key = self._convert_cache_key(cache_key) - try: - key_path = Path(actual_key) - key_path.unlink() - except FileNotFoundError: - raise KeyError(cache_key) - - def __setitem__(self, cache_key, value): - full_key = self._convert_cache_key(cache_key) - try: - file_content = self._dumps(value) - except (TypeError, ValueError): - raise ValueError( - f"Value cannot be cached, must be JSON serializable: {value}" - ) - if not os.path.isdir(self._working_dir): - os.makedirs(self._working_dir, exist_ok=True) - - temp_fd, temp_path = tempfile.mkstemp( - dir=self._working_dir, suffix='.tmp' - ) - with os.fdopen(temp_fd, 'w') as f: - f.write(file_content) - f.flush() - os.fsync(f.fileno()) - - os.replace(temp_path, full_key) - - def _convert_cache_key(self, cache_key): - full_path = os.path.join(self._working_dir, cache_key + '.json') - return full_path - - def _serialize_if_needed(self, value, iso=False): - if isinstance(value, _DatetimeClass): - if iso: - return value.isoformat() - return value.strftime('%Y-%m-%dT%H:%M:%S%Z') - return value - - -def generate_login_cache_key(sign_in_session_name): - return hashlib.sha256(sign_in_session_name.encode('utf-8')).hexdigest() - - -def is_s3express_bucket(bucket): - if bucket is None: - return False - return bucket.endswith('--x-s3') - - -def get_token_from_environment(signing_name, environ=None): - if not isinstance(signing_name, str) or not signing_name.strip(): - return None - - if environ is None: - environ = os.environ - env_var = _get_bearer_env_var_name(signing_name) - return environ.get(env_var) - - -def _get_bearer_env_var_name(signing_name): - bearer_name = signing_name.replace('-', '_').replace(' ', '_').upper() - return f"AWS_BEARER_TOKEN_{bearer_name}" - - -# This parameter is not part of the public interface and is subject to abrupt -# breaking changes or removal without prior announcement. -# Mapping of services that have been renamed for backwards compatibility reasons. -# Keys are the previous name that should be allowed, values are the documented -# and preferred client name. -SERVICE_NAME_ALIASES = {'runtime.sagemaker': 'sagemaker-runtime'} - - -# This parameter is not part of the public interface and is subject to abrupt -# breaking changes or removal without prior announcement. -# Mapping to determine the service ID for services that do not use it as the -# model data directory name. The keys are the data directory name and the -# values are the transformed service IDs (lower case and hyphenated). -CLIENT_NAME_TO_HYPHENIZED_SERVICE_ID_OVERRIDES = { - # Actual service name we use -> Allowed computed service name. - 'apigateway': 'api-gateway', - 'application-autoscaling': 'application-auto-scaling', - 'appmesh': 'app-mesh', - 'autoscaling': 'auto-scaling', - 'autoscaling-plans': 'auto-scaling-plans', - 'ce': 'cost-explorer', - 'cloudhsmv2': 'cloudhsm-v2', - 'cloudsearchdomain': 'cloudsearch-domain', - 'cognito-idp': 'cognito-identity-provider', - 'config': 'config-service', - 'cur': 'cost-and-usage-report-service', - 'datapipeline': 'data-pipeline', - 'directconnect': 'direct-connect', - 'devicefarm': 'device-farm', - 'discovery': 'application-discovery-service', - 'dms': 'database-migration-service', - 'ds': 'directory-service', - 'ds-data': 'directory-service-data', - 'dynamodbstreams': 'dynamodb-streams', - 'elasticbeanstalk': 'elastic-beanstalk', - 'elb': 'elastic-load-balancing', - 'elbv2': 'elastic-load-balancing-v2', - 'es': 'elasticsearch-service', - 'events': 'eventbridge', - 'globalaccelerator': 'global-accelerator', - 'iot-data': 'iot-data-plane', - 'iot-jobs-data': 'iot-jobs-data-plane', - 'iotevents-data': 'iot-events-data', - 'iotevents': 'iot-events', - 'iotwireless': 'iot-wireless', - 'kinesisanalytics': 'kinesis-analytics', - 'kinesisanalyticsv2': 'kinesis-analytics-v2', - 'kinesisvideo': 'kinesis-video', - 'lex-models': 'lex-model-building-service', - 'lexv2-models': 'lex-models-v2', - 'lex-runtime': 'lex-runtime-service', - 'lexv2-runtime': 'lex-runtime-v2', - 'logs': 'cloudwatch-logs', - 'machinelearning': 'machine-learning', - 'marketplacecommerceanalytics': 'marketplace-commerce-analytics', - 'marketplace-entitlement': 'marketplace-entitlement-service', - 'meteringmarketplace': 'marketplace-metering', - 'mgh': 'migration-hub', - 'sms-voice': 'pinpoint-sms-voice', - 'resourcegroupstaggingapi': 'resource-groups-tagging-api', - 'route53': 'route-53', - 'route53domains': 'route-53-domains', - 's3control': 's3-control', - 'sdb': 'simpledb', - 'secretsmanager': 'secrets-manager', - 'serverlessrepo': 'serverlessapplicationrepository', - 'servicecatalog': 'service-catalog', - 'servicecatalog-appregistry': 'service-catalog-appregistry', - 'stepfunctions': 'sfn', - 'storagegateway': 'storage-gateway', -} - - -def get_login_token_cache_directory(): - """Returns which directory contains the login_session token files""" - if 'AWS_LOGIN_CACHE_DIRECTORY' in os.environ: - path = os.path.expandvars(os.environ['AWS_LOGIN_CACHE_DIRECTORY']) - path = os.path.expanduser(path) - return path - else: - return os.path.expanduser(os.path.join('~', '.aws', 'login', 'cache')) - - -class LoginTokenLoader: - """Loads and saves login access tokens to disk""" - - def __init__(self, cache=None): - if cache is None: - cache = {} - self._cache = cache - - def save_token(self, session_name, token): - cache_key = generate_login_cache_key(session_name) - self._cache[cache_key] = token - - def load_token(self, session_name): - cache_key = generate_login_cache_key(session_name) - if cache_key not in self._cache: - return None - return self._cache[cache_key] diff --git a/venv/Lib/site-packages/botocore/validate.py b/venv/Lib/site-packages/botocore/validate.py deleted file mode 100644 index ba9f62f..0000000 --- a/venv/Lib/site-packages/botocore/validate.py +++ /dev/null @@ -1,384 +0,0 @@ -"""User input parameter validation. - -This module handles user input parameter validation -against a provided input model. - -Note that the objects in this module do *not* mutate any -arguments. No type version happens here. It is up to another -layer to properly convert arguments to any required types. - -Validation Errors ------------------ - - -""" - -import decimal -import json -from datetime import datetime - -from botocore.exceptions import ParamValidationError -from botocore.utils import is_json_value_header, parse_to_aware_datetime - - -def validate_parameters(params, shape): - """Validates input parameters against a schema. - - This is a convenience function that validates parameters against a schema. - You can also instantiate and use the ParamValidator class directly if you - want more control. - - If there are any validation errors then a ParamValidationError - will be raised. If there are no validation errors than no exception - is raised and a value of None is returned. - - :param params: The user provided input parameters. - - :type shape: botocore.model.Shape - :param shape: The schema which the input parameters should - adhere to. - - :raise: ParamValidationError - - """ - validator = ParamValidator() - report = validator.validate(params, shape) - if report.has_errors(): - raise ParamValidationError(report=report.generate_report()) - - -def type_check(valid_types): - def _create_type_check_guard(func): - def _on_passes_type_check(self, param, shape, errors, name): - if _type_check(param, errors, name): - return func(self, param, shape, errors, name) - - def _type_check(param, errors, name): - if not isinstance(param, valid_types): - valid_type_names = [str(t) for t in valid_types] - errors.report( - name, - 'invalid type', - param=param, - valid_types=valid_type_names, - ) - return False - return True - - return _on_passes_type_check - - return _create_type_check_guard - - -def range_check(name, value, shape, error_type, errors): - failed = False - min_allowed = float('-inf') - if 'min' in shape.metadata: - min_allowed = shape.metadata['min'] - if value < min_allowed: - failed = True - elif hasattr(shape, 'serialization'): - # Members that can be bound to the host have an implicit min of 1 - if shape.serialization.get('hostLabel'): - min_allowed = 1 - if value < min_allowed: - failed = True - if failed: - errors.report(name, error_type, param=value, min_allowed=min_allowed) - - -class ValidationErrors: - def __init__(self): - self._errors = [] - - def has_errors(self): - if self._errors: - return True - return False - - def generate_report(self): - error_messages = [] - for error in self._errors: - error_messages.append(self._format_error(error)) - return '\n'.join(error_messages) - - def _format_error(self, error): - error_type, name, additional = error - name = self._get_name(name) - if error_type == 'missing required field': - return ( - f"Missing required parameter in {name}: " - f"\"{additional['required_name']}\"" - ) - elif error_type == 'unknown field': - unknown_param = additional['unknown_param'] - valid_names = ', '.join(additional['valid_names']) - return ( - f'Unknown parameter in {name}: "{unknown_param}", ' - f'must be one of: {valid_names}' - ) - elif error_type == 'invalid type': - param = additional['param'] - param_type = type(param) - valid_types = ', '.join(additional['valid_types']) - return ( - f'Invalid type for parameter {name}, value: {param}, ' - f'type: {param_type}, valid types: {valid_types}' - ) - elif error_type == 'invalid range': - param = additional['param'] - min_allowed = additional['min_allowed'] - return ( - f'Invalid value for parameter {name}, value: {param}, ' - f'valid min value: {min_allowed}' - ) - elif error_type == 'invalid length': - param = additional['param'] - min_allowed = additional['min_allowed'] - return ( - f'Invalid length for parameter {name}, value: {param}, ' - f'valid min length: {min_allowed}' - ) - elif error_type == 'unable to encode to json': - return 'Invalid parameter {} must be json serializable: {}'.format( - name, - additional['type_error'], - ) - elif error_type == 'invalid type for document': - param = additional['param'] - param_type = type(param) - valid_types = ', '.join(additional['valid_types']) - return ( - f'Invalid type for document parameter {name}, value: {param}, ' - f'type: {param_type}, valid types: {valid_types}' - ) - elif error_type == 'more than one input': - members = ', '.join(additional['members']) - return ( - f'Invalid number of parameters set for tagged union structure ' - f'{name}. Can only set one of the following keys: ' - f'{members}.' - ) - elif error_type == 'empty input': - members = ', '.join(additional['members']) - return ( - f'Must set one of the following keys for tagged union' - f'structure {name}: {members}.' - ) - - def _get_name(self, name): - if not name: - return 'input' - elif name.startswith('.'): - return name[1:] - else: - return name - - def report(self, name, reason, **kwargs): - self._errors.append((reason, name, kwargs)) - - -class ParamValidator: - """Validates parameters against a shape model.""" - - def validate(self, params, shape): - """Validate parameters against a shape model. - - This method will validate the parameters against a provided shape model. - All errors will be collected before returning to the caller. This means - that this method will not stop at the first error, it will return all - possible errors. - - :param params: User provided dict of parameters - :param shape: A shape model describing the expected input. - - :return: A list of errors. - - """ - errors = ValidationErrors() - self._validate(params, shape, errors, name='') - return errors - - def _check_special_validation_cases(self, shape): - if is_json_value_header(shape): - return self._validate_jsonvalue_string - if shape.type_name == 'structure' and shape.is_document_type: - return self._validate_document - - def _validate(self, params, shape, errors, name): - special_validator = self._check_special_validation_cases(shape) - if special_validator: - special_validator(params, shape, errors, name) - else: - getattr(self, f'_validate_{shape.type_name}')( - params, shape, errors, name - ) - - def _validate_jsonvalue_string(self, params, shape, errors, name): - # Check to see if a value marked as a jsonvalue can be dumped to - # a json string. - try: - json.dumps(params) - except (ValueError, TypeError) as e: - errors.report(name, 'unable to encode to json', type_error=e) - - def _validate_document(self, params, shape, errors, name): - if params is None: - return - - if isinstance(params, dict): - for key in params: - self._validate_document(params[key], shape, errors, key) - elif isinstance(params, list): - for index, entity in enumerate(params): - self._validate_document( - entity, shape, errors, f'{name}[{index}]' - ) - elif not isinstance(params, ((str,), int, bool, float)): - valid_types = (str, int, bool, float, list, dict) - valid_type_names = [str(t) for t in valid_types] - errors.report( - name, - 'invalid type for document', - param=params, - param_type=type(params), - valid_types=valid_type_names, - ) - - @type_check(valid_types=(dict,)) - def _validate_structure(self, params, shape, errors, name): - if shape.is_tagged_union: - if len(params) == 0: - errors.report(name, 'empty input', members=shape.members) - elif len(params) > 1: - errors.report( - name, 'more than one input', members=shape.members - ) - - # Validate required fields. - for required_member in shape.metadata.get('required', []): - if required_member not in params: - errors.report( - name, - 'missing required field', - required_name=required_member, - user_params=params, - ) - members = shape.members - known_params = [] - # Validate known params. - for param in params: - if param not in members: - errors.report( - name, - 'unknown field', - unknown_param=param, - valid_names=list(members), - ) - else: - known_params.append(param) - # Validate structure members. - for param in known_params: - self._validate( - params[param], - shape.members[param], - errors, - f'{name}.{param}', - ) - - @type_check(valid_types=(str,)) - def _validate_string(self, param, shape, errors, name): - # Validate range. For a string, the min/max constraints - # are of the string length. - # Looks like: - # "WorkflowId":{ - # "type":"string", - # "min":1, - # "max":256 - # } - range_check(name, len(param), shape, 'invalid length', errors) - - @type_check(valid_types=(list, tuple)) - def _validate_list(self, param, shape, errors, name): - member_shape = shape.member - range_check(name, len(param), shape, 'invalid length', errors) - for i, item in enumerate(param): - self._validate(item, member_shape, errors, f'{name}[{i}]') - - @type_check(valid_types=(dict,)) - def _validate_map(self, param, shape, errors, name): - key_shape = shape.key - value_shape = shape.value - for key, value in param.items(): - self._validate(key, key_shape, errors, f"{name} (key: {key})") - self._validate(value, value_shape, errors, f'{name}.{key}') - - @type_check(valid_types=(int,)) - def _validate_integer(self, param, shape, errors, name): - range_check(name, param, shape, 'invalid range', errors) - - def _validate_blob(self, param, shape, errors, name): - if isinstance(param, (bytes, bytearray, str)): - return - elif hasattr(param, 'read'): - # File like objects are also allowed for blob types. - return - else: - errors.report( - name, - 'invalid type', - param=param, - valid_types=[str(bytes), str(bytearray), 'file-like object'], - ) - - @type_check(valid_types=(bool,)) - def _validate_boolean(self, param, shape, errors, name): - pass - - @type_check(valid_types=(float, decimal.Decimal) + (int,)) - def _validate_double(self, param, shape, errors, name): - range_check(name, param, shape, 'invalid range', errors) - - _validate_float = _validate_double - - @type_check(valid_types=(int,)) - def _validate_long(self, param, shape, errors, name): - range_check(name, param, shape, 'invalid range', errors) - - def _validate_timestamp(self, param, shape, errors, name): - # We don't use @type_check because datetimes are a bit - # more flexible. You can either provide a datetime - # object, or a string that parses to a datetime. - is_valid_type = self._type_check_datetime(param) - if not is_valid_type: - valid_type_names = [str(datetime), 'timestamp-string'] - errors.report( - name, 'invalid type', param=param, valid_types=valid_type_names - ) - - def _type_check_datetime(self, value): - try: - parse_to_aware_datetime(value) - return True - except (TypeError, ValueError, AttributeError): - # Yes, dateutil can sometimes raise an AttributeError - # when parsing timestamps. - return False - - -class ParamValidationDecorator: - def __init__(self, param_validator, serializer): - self._param_validator = param_validator - self._serializer = serializer - - def serialize_to_request(self, parameters, operation_model): - input_shape = operation_model.input_shape - if input_shape is not None: - report = self._param_validator.validate( - parameters, operation_model.input_shape - ) - if report.has_errors(): - raise ParamValidationError(report=report.generate_report()) - return self._serializer.serialize_to_request( - parameters, operation_model - ) diff --git a/venv/Lib/site-packages/botocore/vendored/__init__.py b/venv/Lib/site-packages/botocore/vendored/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/botocore/vendored/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/vendored/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 3ee2a24..0000000 Binary files a/venv/Lib/site-packages/botocore/vendored/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/vendored/__pycache__/six.cpython-312.pyc b/venv/Lib/site-packages/botocore/vendored/__pycache__/six.cpython-312.pyc deleted file mode 100644 index 297a1e1..0000000 Binary files a/venv/Lib/site-packages/botocore/vendored/__pycache__/six.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/vendored/requests/__init__.py b/venv/Lib/site-packages/botocore/vendored/requests/__init__.py deleted file mode 100644 index 0ada6e0..0000000 --- a/venv/Lib/site-packages/botocore/vendored/requests/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- - -# __ -# /__) _ _ _ _ _/ _ -# / ( (- (/ (/ (- _) / _) -# / -from .exceptions import ( - RequestException, Timeout, URLRequired, - TooManyRedirects, HTTPError, ConnectionError -) diff --git a/venv/Lib/site-packages/botocore/vendored/requests/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/vendored/requests/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 143517b..0000000 Binary files a/venv/Lib/site-packages/botocore/vendored/requests/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/vendored/requests/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/botocore/vendored/requests/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index fb8d22c..0000000 Binary files a/venv/Lib/site-packages/botocore/vendored/requests/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/vendored/requests/exceptions.py b/venv/Lib/site-packages/botocore/vendored/requests/exceptions.py deleted file mode 100644 index 89135a8..0000000 --- a/venv/Lib/site-packages/botocore/vendored/requests/exceptions.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.exceptions -~~~~~~~~~~~~~~~~~~~ - -This module contains the set of Requests' exceptions. - -""" -from .packages.urllib3.exceptions import HTTPError as BaseHTTPError - - -class RequestException(IOError): - """There was an ambiguous exception that occurred while handling your - request.""" - - def __init__(self, *args, **kwargs): - """ - Initialize RequestException with `request` and `response` objects. - """ - response = kwargs.pop('response', None) - self.response = response - self.request = kwargs.pop('request', None) - if (response is not None and not self.request and - hasattr(response, 'request')): - self.request = self.response.request - super(RequestException, self).__init__(*args, **kwargs) - - -class HTTPError(RequestException): - """An HTTP error occurred.""" - - -class ConnectionError(RequestException): - """A Connection error occurred.""" - - -class ProxyError(ConnectionError): - """A proxy error occurred.""" - - -class SSLError(ConnectionError): - """An SSL error occurred.""" - - -class Timeout(RequestException): - """The request timed out. - - Catching this error will catch both - :exc:`~requests.exceptions.ConnectTimeout` and - :exc:`~requests.exceptions.ReadTimeout` errors. - """ - - -class ConnectTimeout(ConnectionError, Timeout): - """The request timed out while trying to connect to the remote server. - - Requests that produced this error are safe to retry. - """ - - -class ReadTimeout(Timeout): - """The server did not send any data in the allotted amount of time.""" - - -class URLRequired(RequestException): - """A valid URL is required to make a request.""" - - -class TooManyRedirects(RequestException): - """Too many redirects.""" - - -class MissingSchema(RequestException, ValueError): - """The URL schema (e.g. http or https) is missing.""" - - -class InvalidSchema(RequestException, ValueError): - """See defaults.py for valid schemas.""" - - -class InvalidURL(RequestException, ValueError): - """ The URL provided was somehow invalid. """ - - -class ChunkedEncodingError(RequestException): - """The server declared chunked encoding but sent an invalid chunk.""" - - -class ContentDecodingError(RequestException, BaseHTTPError): - """Failed to decode response content""" - - -class StreamConsumedError(RequestException, TypeError): - """The content for this response was already consumed""" - - -class RetryError(RequestException): - """Custom retries logic failed""" diff --git a/venv/Lib/site-packages/botocore/vendored/requests/packages/__init__.py b/venv/Lib/site-packages/botocore/vendored/requests/packages/__init__.py deleted file mode 100644 index d62c4b7..0000000 --- a/venv/Lib/site-packages/botocore/vendored/requests/packages/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from __future__ import absolute_import - -from . import urllib3 diff --git a/venv/Lib/site-packages/botocore/vendored/requests/packages/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/vendored/requests/packages/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 91a5865..0000000 Binary files a/venv/Lib/site-packages/botocore/vendored/requests/packages/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__init__.py b/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__init__.py deleted file mode 100644 index 8869701..0000000 --- a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -urllib3 - Thread-safe connection pooling and re-using. -""" - -__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' -__license__ = 'MIT' -__version__ = '' - - -from . import exceptions diff --git a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index def83e4..0000000 Binary files a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index effd9fc..0000000 Binary files a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/exceptions.py b/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/exceptions.py deleted file mode 100644 index 31bda1c..0000000 --- a/venv/Lib/site-packages/botocore/vendored/requests/packages/urllib3/exceptions.py +++ /dev/null @@ -1,169 +0,0 @@ - -## Base Exceptions - -class HTTPError(Exception): - "Base exception used by this module." - pass - -class HTTPWarning(Warning): - "Base warning used by this module." - pass - - - -class PoolError(HTTPError): - "Base exception for errors caused within a pool." - def __init__(self, pool, message): - self.pool = pool - HTTPError.__init__(self, "%s: %s" % (pool, message)) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, None) - - -class RequestError(PoolError): - "Base exception for PoolErrors that have associated URLs." - def __init__(self, pool, url, message): - self.url = url - PoolError.__init__(self, pool, message) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, self.url, None) - - -class SSLError(HTTPError): - "Raised when SSL certificate fails in an HTTPS connection." - pass - - -class ProxyError(HTTPError): - "Raised when the connection to a proxy fails." - pass - - -class DecodeError(HTTPError): - "Raised when automatic decoding based on Content-Type fails." - pass - - -class ProtocolError(HTTPError): - "Raised when something unexpected happens mid-request/response." - pass - - -#: Renamed to ProtocolError but aliased for backwards compatibility. -ConnectionError = ProtocolError - - -## Leaf Exceptions - -class MaxRetryError(RequestError): - """Raised when the maximum number of retries is exceeded. - - :param pool: The connection pool - :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` - :param string url: The requested Url - :param exceptions.Exception reason: The underlying error - - """ - - def __init__(self, pool, url, reason=None): - self.reason = reason - - message = "Max retries exceeded with url: %s (Caused by %r)" % ( - url, reason) - - RequestError.__init__(self, pool, url, message) - - -class HostChangedError(RequestError): - "Raised when an existing pool gets a request for a foreign host." - - def __init__(self, pool, url, retries=3): - message = "Tried to open a foreign host with url: %s" % url - RequestError.__init__(self, pool, url, message) - self.retries = retries - - -class TimeoutStateError(HTTPError): - """ Raised when passing an invalid state to a timeout """ - pass - - -class TimeoutError(HTTPError): - """ Raised when a socket timeout error occurs. - - Catching this error will catch both :exc:`ReadTimeoutErrors - ` and :exc:`ConnectTimeoutErrors `. - """ - pass - - -class ReadTimeoutError(TimeoutError, RequestError): - "Raised when a socket timeout occurs while receiving data from a server" - pass - - -# This timeout error does not have a URL attached and needs to inherit from the -# base HTTPError -class ConnectTimeoutError(TimeoutError): - "Raised when a socket timeout occurs while connecting to a server" - pass - - -class EmptyPoolError(PoolError): - "Raised when a pool runs out of connections and no more are allowed." - pass - - -class ClosedPoolError(PoolError): - "Raised when a request enters a pool after the pool has been closed." - pass - - -class LocationValueError(ValueError, HTTPError): - "Raised when there is something wrong with a given URL input." - pass - - -class LocationParseError(LocationValueError): - "Raised when get_host or similar fails to parse the URL input." - - def __init__(self, location): - message = "Failed to parse: %s" % location - HTTPError.__init__(self, message) - - self.location = location - - -class ResponseError(HTTPError): - "Used as a container for an error reason supplied in a MaxRetryError." - GENERIC_ERROR = 'too many error responses' - SPECIFIC_ERROR = 'too many {status_code} error responses' - - -class SecurityWarning(HTTPWarning): - "Warned when perfoming security reducing actions" - pass - - -class InsecureRequestWarning(SecurityWarning): - "Warned when making an unverified HTTPS request." - pass - - -class SystemTimeWarning(SecurityWarning): - "Warned when system time is suspected to be wrong" - pass - - -class InsecurePlatformWarning(SecurityWarning): - "Warned when certain SSL configuration is not available on a platform." - pass - - -class ResponseNotChunked(ProtocolError, ValueError): - "Response needs to be chunked in order to read it as chunks." - pass diff --git a/venv/Lib/site-packages/botocore/vendored/six.py b/venv/Lib/site-packages/botocore/vendored/six.py deleted file mode 100644 index 4e15675..0000000 --- a/venv/Lib/site-packages/botocore/vendored/six.py +++ /dev/null @@ -1,998 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.16.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - -if PY34: - from importlib.util import spec_from_loader -else: - spec_from_loader = None - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def find_spec(self, fullname, path, target=None): - if fullname in self.known_modules: - return spec_from_loader(fullname, self) - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - - def create_module(self, spec): - return self.load_module(spec.name) - - def exec_module(self, module): - pass - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - del io - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" - _assertNotRegex = "assertNotRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -def assertNotRegex(self, *args, **kwargs): - return getattr(self, _assertNotRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] > (3,): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - # This does exactly the same what the :func:`py3:functools.update_wrapper` - # function does on Python versions after 3.2. It sets the ``__wrapped__`` - # attribute on ``wrapper`` object and it doesn't raise an error if any of - # the attributes mentioned in ``assigned`` and ``updated`` are missing on - # ``wrapped`` object. - def _update_wrapper(wrapper, wrapped, - assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - for attr in assigned: - try: - value = getattr(wrapped, attr) - except AttributeError: - continue - else: - setattr(wrapper, attr, value) - for attr in updated: - getattr(wrapper, attr).update(getattr(wrapped, attr, {})) - wrapper.__wrapped__ = wrapped - return wrapper - _update_wrapper.__doc__ = functools.update_wrapper.__doc__ - - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - return functools.partial(_update_wrapper, wrapped=wrapped, - assigned=assigned, updated=updated) - wraps.__doc__ = functools.wraps.__doc__ - -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): - # This version introduced PEP 560 that requires a bit - # of extra care (we mimic what is done by __build_class__). - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d['__orig_bases__'] = bases - else: - resolved_bases = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, binary_type): - return s - if isinstance(s, text_type): - return s.encode(encoding, errors) - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - # Optimization: Fast return for the common case. - if type(s) is str: - return s - if PY2 and isinstance(s, text_type): - return s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - return s.decode(encoding, errors) - elif not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def python_2_unicode_compatible(klass): - """ - A class decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/venv/Lib/site-packages/botocore/waiter.py b/venv/Lib/site-packages/botocore/waiter.py deleted file mode 100644 index 86110dd..0000000 --- a/venv/Lib/site-packages/botocore/waiter.py +++ /dev/null @@ -1,396 +0,0 @@ -# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. -import logging -import time -from functools import partial - -import jmespath - -from botocore.context import with_current_context -from botocore.docs.docstring import WaiterDocstring -from botocore.useragent import register_feature_id -from botocore.utils import get_service_module_name - -from . import xform_name -from .exceptions import ClientError, WaiterConfigError, WaiterError - -logger = logging.getLogger(__name__) - - -def create_waiter_with_client(waiter_name, waiter_model, client): - """ - - :type waiter_name: str - :param waiter_name: The name of the waiter. The name should match - the name (including the casing) of the key name in the waiter - model file (typically this is CamelCasing). - - :type waiter_model: botocore.waiter.WaiterModel - :param waiter_model: The model for the waiter configuration. - - :type client: botocore.client.BaseClient - :param client: The botocore client associated with the service. - - :rtype: botocore.waiter.Waiter - :return: The waiter object. - - """ - single_waiter_config = waiter_model.get_waiter(waiter_name) - operation_name = xform_name(single_waiter_config.operation) - operation_method = NormalizedOperationMethod( - getattr(client, operation_name) - ) - - # Create a new wait method that will serve as a proxy to the underlying - # Waiter.wait method. This is needed to attach a docstring to the - # method. - def wait(self, **kwargs): - Waiter.wait(self, **kwargs) - - wait.__doc__ = WaiterDocstring( - waiter_name=waiter_name, - event_emitter=client.meta.events, - service_model=client.meta.service_model, - service_waiter_model=waiter_model, - include_signature=False, - ) - - # Rename the waiter class based on the type of waiter. - waiter_class_name = str( - f'{get_service_module_name(client.meta.service_model)}.Waiter.{waiter_name}' - ) - - # Create the new waiter class - documented_waiter_cls = type(waiter_class_name, (Waiter,), {'wait': wait}) - - # Return an instance of the new waiter class. - return documented_waiter_cls( - waiter_name, single_waiter_config, operation_method - ) - - -def is_valid_waiter_error(response): - error = response.get('Error') - if isinstance(error, dict) and 'Code' in error: - return True - return False - - -class NormalizedOperationMethod: - def __init__(self, client_method): - self._client_method = client_method - - def __call__(self, **kwargs): - try: - return self._client_method(**kwargs) - except ClientError as e: - return e.response - - -class WaiterModel: - SUPPORTED_VERSION = 2 - - def __init__(self, waiter_config): - """ - - Note that the WaiterModel takes ownership of the waiter_config. - It may or may not mutate the waiter_config. If this is a concern, - it is best to make a copy of the waiter config before passing it to - the WaiterModel. - - :type waiter_config: dict - :param waiter_config: The loaded waiter config - from the *.waiters.json file. This can be - obtained from a botocore Loader object as well. - - """ - self._waiter_config = waiter_config['waiters'] - - # These are part of the public API. Changing these - # will result in having to update the consuming code, - # so don't change unless you really need to. - version = waiter_config.get('version', 'unknown') - self._verify_supported_version(version) - self.version = version - self.waiter_names = list(sorted(waiter_config['waiters'].keys())) - - def _verify_supported_version(self, version): - if version != self.SUPPORTED_VERSION: - raise WaiterConfigError( - error_msg=( - "Unsupported waiter version, supported version " - f"must be: {self.SUPPORTED_VERSION}, but version " - f"of waiter config is: {version}" - ) - ) - - def get_waiter(self, waiter_name): - try: - single_waiter_config = self._waiter_config[waiter_name] - except KeyError: - raise ValueError(f"Waiter does not exist: {waiter_name}") - return SingleWaiterConfig(single_waiter_config) - - -class SingleWaiterConfig: - """Represents the waiter configuration for a single waiter. - - A single waiter is considered the configuration for a single - value associated with a named waiter (i.e TableExists). - - """ - - def __init__(self, single_waiter_config): - self._config = single_waiter_config - - # These attributes are part of the public API. - self.description = single_waiter_config.get('description', '') - # Per the spec, these three fields are required. - self.operation = single_waiter_config['operation'] - self.delay = single_waiter_config['delay'] - self.max_attempts = single_waiter_config['maxAttempts'] - - @property - def acceptors(self): - acceptors = [] - for acceptor_config in self._config['acceptors']: - acceptor = AcceptorConfig(acceptor_config) - acceptors.append(acceptor) - return acceptors - - -class AcceptorConfig: - def __init__(self, config): - self.state = config['state'] - self.matcher = config['matcher'] - self.expected = config['expected'] - self.argument = config.get('argument') - self.matcher_func = self._create_matcher_func() - - @property - def explanation(self): - if self.matcher == 'path': - return f'For expression "{self.argument}" we matched expected path: "{self.expected}"' - elif self.matcher == 'pathAll': - return ( - f'For expression "{self.argument}" all members matched ' - f'expected path: "{self.expected}"' - ) - elif self.matcher == 'pathAny': - return ( - f'For expression "{self.argument}" we matched expected ' - f'path: "{self.expected}" at least once' - ) - elif self.matcher == 'status': - return f'Matched expected HTTP status code: {self.expected}' - elif self.matcher == 'error': - return f'Matched expected service error code: {self.expected}' - else: - return f'No explanation for unknown waiter type: "{self.matcher}"' - - def _create_matcher_func(self): - # An acceptor function is a callable that takes a single value. The - # parsed AWS response. Note that the parsed error response is also - # provided in the case of errors, so it's entirely possible to - # handle all the available matcher capabilities in the future. - # There's only three supported matchers, so for now, this is all - # contained to a single method. If this grows, we can expand this - # out to separate methods or even objects. - - if self.matcher == 'path': - return self._create_path_matcher() - elif self.matcher == 'pathAll': - return self._create_path_all_matcher() - elif self.matcher == 'pathAny': - return self._create_path_any_matcher() - elif self.matcher == 'status': - return self._create_status_matcher() - elif self.matcher == 'error': - return self._create_error_matcher() - else: - raise WaiterConfigError( - error_msg=f"Unknown acceptor: {self.matcher}" - ) - - def _create_path_matcher(self): - expression = jmespath.compile(self.argument) - expected = self.expected - - def acceptor_matches(response): - if is_valid_waiter_error(response): - return - return expression.search(response) == expected - - return acceptor_matches - - def _create_path_all_matcher(self): - expression = jmespath.compile(self.argument) - expected = self.expected - - def acceptor_matches(response): - if is_valid_waiter_error(response): - return - result = expression.search(response) - if not isinstance(result, list) or not result: - # pathAll matcher must result in a list. - # Also we require at least one element in the list, - # that is, an empty list should not result in this - # acceptor match. - return False - for element in result: - if element != expected: - return False - return True - - return acceptor_matches - - def _create_path_any_matcher(self): - expression = jmespath.compile(self.argument) - expected = self.expected - - def acceptor_matches(response): - if is_valid_waiter_error(response): - return - result = expression.search(response) - if not isinstance(result, list) or not result: - # pathAny matcher must result in a list. - # Also we require at least one element in the list, - # that is, an empty list should not result in this - # acceptor match. - return False - for element in result: - if element == expected: - return True - return False - - return acceptor_matches - - def _create_status_matcher(self): - expected = self.expected - - def acceptor_matches(response): - # We don't have any requirements on the expected incoming data - # other than it is a dict, so we don't assume there's - # a ResponseMetadata.HTTPStatusCode. - status_code = response.get('ResponseMetadata', {}).get( - 'HTTPStatusCode' - ) - return status_code == expected - - return acceptor_matches - - def _create_error_matcher(self): - expected = self.expected - - def acceptor_matches(response): - # When the client encounters an error, it will normally raise - # an exception. However, the waiter implementation will catch - # this exception, and instead send us the parsed error - # response. So response is still a dictionary, and in the case - # of an error response will contain the "Error" and - # "ResponseMetadata" key. - # When expected is True, accept any error code. - # When expected is False, check if any errors were encountered. - # Otherwise, check for a specific AWS error code. - if expected is True: - return "Error" in response and "Code" in response["Error"] - elif expected is False: - return "Error" not in response - else: - return response.get("Error", {}).get("Code", "") == expected - - return acceptor_matches - - -class Waiter: - def __init__(self, name, config, operation_method): - """ - - :type name: string - :param name: The name of the waiter - - :type config: botocore.waiter.SingleWaiterConfig - :param config: The configuration for the waiter. - - :type operation_method: callable - :param operation_method: A callable that accepts **kwargs - and returns a response. For example, this can be - a method from a botocore client. - - """ - self._operation_method = operation_method - # The two attributes are exposed to allow for introspection - # and documentation. - self.name = name - self.config = config - - @with_current_context(partial(register_feature_id, 'WAITER')) - def wait(self, **kwargs): - acceptors = list(self.config.acceptors) - current_state = 'waiting' - # pop the invocation specific config - config = kwargs.pop('WaiterConfig', {}) - sleep_amount = config.get('Delay', self.config.delay) - max_attempts = config.get('MaxAttempts', self.config.max_attempts) - last_matched_acceptor = None - num_attempts = 0 - - while True: - response = self._operation_method(**kwargs) - num_attempts += 1 - for acceptor in acceptors: - if acceptor.matcher_func(response): - last_matched_acceptor = acceptor - current_state = acceptor.state - break - else: - # If none of the acceptors matched, we should - # transition to the failure state if an error - # response was received. - if is_valid_waiter_error(response): - # Transition to a failure state, which we - # can just handle here by raising an exception. - raise WaiterError( - name=self.name, - reason='An error occurred ({}): {}'.format( - response['Error'].get('Code', 'Unknown'), - response['Error'].get('Message', 'Unknown'), - ), - last_response=response, - ) - if current_state == 'success': - logger.debug( - "Waiting complete, waiter matched the success state." - ) - return - if current_state == 'failure': - reason = f'Waiter encountered a terminal failure state: {acceptor.explanation}' - raise WaiterError( - name=self.name, - reason=reason, - last_response=response, - ) - if num_attempts >= max_attempts: - if last_matched_acceptor is None: - reason = 'Max attempts exceeded' - else: - reason = ( - f'Max attempts exceeded. Previously accepted state: ' - f'{acceptor.explanation}' - ) - raise WaiterError( - name=self.name, - reason=reason, - last_response=response, - ) - time.sleep(sleep_amount) diff --git a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/INSTALLER b/venv/Lib/site-packages/certifi-2026.1.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/METADATA b/venv/Lib/site-packages/certifi-2026.1.4.dist-info/METADATA deleted file mode 100644 index d1bc526..0000000 --- a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/METADATA +++ /dev/null @@ -1,78 +0,0 @@ -Metadata-Version: 2.4 -Name: certifi -Version: 2026.1.4 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://github.com/certifi/python-certifi -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Project-URL: Source, https://github.com/certifi/python-certifi -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Requires-Python: >=3.7 -License-File: LICENSE -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: license -Dynamic: license-file -Dynamic: project-url -Dynamic: requires-python -Dynamic: summary - -Certifi: Python SSL Certificates -================================ - -Certifi provides Mozilla's carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python3.7/site-packages/certifi/cacert.pem - -Enjoy! - -.. _`Requests`: https://requests.readthedocs.io/en/master/ - -Addition/Removal of Certificates --------------------------------- - -Certifi does not support any addition/removal or other modification of the -CA trust store content. This project is intended to provide a reliable and -highly portable root of trust to python deployments. Look to upstream projects -for methods to use alternate trust. diff --git a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/RECORD b/venv/Lib/site-packages/certifi-2026.1.4.dist-info/RECORD deleted file mode 100644 index 87a9de0..0000000 --- a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -certifi-2026.1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi-2026.1.4.dist-info/METADATA,sha256=FSfJEfKuMo6bJlofUrtRpn4PFTYtbYyXpHN_A3ZFpIY,2473 -certifi-2026.1.4.dist-info/RECORD,, -certifi-2026.1.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -certifi-2026.1.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -certifi-2026.1.4.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 -certifi-2026.1.4.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi/__init__.py,sha256=969deMMS7Uchipr0oO4dbRBUvRi0uNYCn07VmG1aTrg,94 -certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 -certifi/__pycache__/__init__.cpython-312.pyc,, -certifi/__pycache__/__main__.cpython-312.pyc,, -certifi/__pycache__/core.cpython-312.pyc,, -certifi/cacert.pem,sha256=Tzl1_zCrvzVEO0hgZK6Ly0Hf9wf_31dsdtKS-0WKoKk,270954 -certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394 -certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/REQUESTED b/venv/Lib/site-packages/certifi-2026.1.4.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/WHEEL b/venv/Lib/site-packages/certifi-2026.1.4.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE b/venv/Lib/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE deleted file mode 100644 index 62b076c..0000000 --- a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -This package contains a modified version of ca-bundle.crt: - -ca-bundle.crt -- Bundle of CA Root Certificates - -This is a bundle of X.509 certificates of public Certificate Authorities -(CA). These were automatically extracted from Mozilla's root certificates -file (certdata.txt). This file can be found in the mozilla source tree: -https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt -It contains the certificates in PEM format and therefore -can be directly used with curl / libcurl / php_curl, or with -an Apache+mod_ssl webserver for SSL client authentication. -Just configure this file as the SSLCACertificateFile.# - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** -@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/top_level.txt b/venv/Lib/site-packages/certifi-2026.1.4.dist-info/top_level.txt deleted file mode 100644 index 963eac5..0000000 --- a/venv/Lib/site-packages/certifi-2026.1.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/venv/Lib/site-packages/certifi/__init__.py b/venv/Lib/site-packages/certifi/__init__.py deleted file mode 100644 index 090fd58..0000000 --- a/venv/Lib/site-packages/certifi/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import contents, where - -__all__ = ["contents", "where"] -__version__ = "2026.01.04" diff --git a/venv/Lib/site-packages/certifi/__main__.py b/venv/Lib/site-packages/certifi/__main__.py deleted file mode 100644 index 8945b5d..0000000 --- a/venv/Lib/site-packages/certifi/__main__.py +++ /dev/null @@ -1,12 +0,0 @@ -import argparse - -from certifi import contents, where - -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--contents", action="store_true") -args = parser.parse_args() - -if args.contents: - print(contents()) -else: - print(where()) diff --git a/venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 9027f50..0000000 Binary files a/venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-312.pyc b/venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index bd7962a..0000000 Binary files a/venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/certifi/__pycache__/core.cpython-312.pyc b/venv/Lib/site-packages/certifi/__pycache__/core.cpython-312.pyc deleted file mode 100644 index e794e25..0000000 Binary files a/venv/Lib/site-packages/certifi/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/certifi/cacert.pem b/venv/Lib/site-packages/certifi/cacert.pem deleted file mode 100644 index 132db0d..0000000 --- a/venv/Lib/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4468 +0,0 @@ - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft ECC Root Certificate Authority 2017" -# Serial: 136839042543790627607696632466672567020 -# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 -# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 -# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 ------BEGIN CERTIFICATE----- -MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD -VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw -MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV -UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy -b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR -ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb -hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 -FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV -L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB -iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= ------END CERTIFICATE----- - -# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft RSA Root Certificate Authority 2017" -# Serial: 40975477897264996090493496164228220339 -# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 -# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 -# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 ------BEGIN CERTIFICATE----- -MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl -MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw -NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG -EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N -aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ -Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 -ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 -HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm -gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ -jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc -aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG -YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 -W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K -UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH -+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q -W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC -LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC -gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 -tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh -SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 -TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 -pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR -xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp -GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 -dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN -AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB -RA+GsCyRxj3qrg+E ------END CERTIFICATE----- - -# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Label: "e-Szigno Root CA 2017" -# Serial: 411379200276854331539784714 -# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 -# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 -# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 ------BEGIN CERTIFICATE----- -MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV -BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk -LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv -b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ -BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg -THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v -IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv -xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H -Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB -eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo -jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ -+efcMQ== ------END CERTIFICATE----- - -# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Label: "certSIGN Root CA G2" -# Serial: 313609486401300475190 -# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 -# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 -# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV -BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g -Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ -BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ -R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF -dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw -vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ -uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp -n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs -cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW -xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P -rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF -DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx -DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy -LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C -eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ -d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq -kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC -b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl -qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 -OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c -NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk -ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO -pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj -03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk -PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE -1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX -QRBdJ3NghVdJIgc= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global Certification Authority" -# Serial: 1846098327275375458322922162 -# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e -# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 -# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 ------BEGIN CERTIFICATE----- -MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw -CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x -ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 -c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx -OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI -b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn -swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu -7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 -1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW -80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP -JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l -RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw -hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 -coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc -BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n -twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud -DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W -0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe -uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q -lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB -aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE -sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT -MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe -qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh -VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 -h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 -EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK -yeC2nOnOcXHebD8WpHk= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P256 Certification Authority" -# Serial: 4151900041497450638097112925 -# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 -# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf -# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 ------BEGIN CERTIFICATE----- -MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG -SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN -FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w -DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw -CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh -DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P384 Certification Authority" -# Serial: 2704997926503831671788816187 -# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 -# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 -# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 ------BEGIN CERTIFICATE----- -MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB -BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ -j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF -1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G -A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 -AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC -MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu -Sw== ------END CERTIFICATE----- - -# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Label: "NAVER Global Root Certification Authority" -# Serial: 9013692873798656336226253319739695165984492813 -# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b -# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 -# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 ------BEGIN CERTIFICATE----- -MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM -BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG -T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx -CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD -b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA -iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH -38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE -HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz -kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP -szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq -vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf -nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG -YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo -0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a -CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K -AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I -36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN -qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj -cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm -+LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL -hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe -lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 -p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 -piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR -LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX -5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO -dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul -9XXeifdy ------END CERTIFICATE----- - -# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" -# Serial: 131542671362353147877283741781055151509 -# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb -# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a -# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb ------BEGIN CERTIFICATE----- -MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw -CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw -FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S -Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 -MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL -DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS -QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH -sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK -Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu -SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC -MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy -v+c= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Label: "GlobalSign Root R46" -# Serial: 1552617688466950547958867513931858518042577 -# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef -# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 -# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA -MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD -VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy -MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt -c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ -OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG -vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud -316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo -0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE -y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF -zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE -+cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN -I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs -x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa -ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC -4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 -7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg -JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti -2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk -pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF -FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt -rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk -ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 -u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP -4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 -N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 -vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Label: "GlobalSign Root E46" -# Serial: 1552617690338932563915843282459653771421763 -# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f -# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 -# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 ------BEGIN CERTIFICATE----- -MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx -CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD -ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw -MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex -HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq -R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd -yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ -7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 -+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= ------END CERTIFICATE----- - -# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Label: "ANF Secure Server Root CA" -# Serial: 996390341000653745 -# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 -# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 -# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 ------BEGIN CERTIFICATE----- -MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV -BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk -YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV -BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN -MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF -UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD -VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v -dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj -cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q -yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH -2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX -H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL -zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR -p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz -W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ -SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn -LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 -n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B -u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj -o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L -9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej -rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK -pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 -vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq -OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ -/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 -2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI -+PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 -MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo -tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= ------END CERTIFICATE----- - -# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum EC-384 CA" -# Serial: 160250656287871593594747141429395092468 -# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 -# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed -# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 ------BEGIN CERTIFICATE----- -MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw -CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw -JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT -EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 -WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT -LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX -BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE -KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm -Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 -EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J -UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn -nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Root CA" -# Serial: 40870380103424195783807378461123655149 -# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 -# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 -# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd ------BEGIN CERTIFICATE----- -MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 -MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu -MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV -BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw -MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg -U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ -n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q -p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq -NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF -8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 -HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa -mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi -7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF -ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P -qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ -v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 -Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 -vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD -ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 -WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo -zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR -5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ -GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf -5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq -0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D -P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM -qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP -0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf -E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb ------END CERTIFICATE----- - -# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Label: "TunTrust Root CA" -# Serial: 108534058042236574382096126452369648152337120275 -# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 -# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb -# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL -BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg -Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv -b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG -EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u -IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ -n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd -2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF -VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ -GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF -li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU -r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 -eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb -MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg -jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB -7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW -5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE -ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 -90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z -xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu -QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 -FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH -22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP -xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn -dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 -Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b -nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ -CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH -u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj -d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS RSA Root CA 2021" -# Serial: 76817823531813593706434026085292783742 -# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 -# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d -# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg -Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL -MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl -YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv -b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l -mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE -4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv -a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M -pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw -Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b -LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY -AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB -AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq -E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr -W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ -CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU -X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 -f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja -H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP -JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P -zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt -jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 -/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT -BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 -aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW -xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU -63ZTGI0RmLo= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS ECC Root CA 2021" -# Serial: 137515985548005187474074462014555733966 -# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 -# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 -# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 ------BEGIN CERTIFICATE----- -MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw -CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh -cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v -dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG -A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj -aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg -Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 -KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y -STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD -AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw -SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN -nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 1977337328857672817 -# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 -# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe -# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 -MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc -tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd -IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j -b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC -AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw -ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m -iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF -Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ -hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P -Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE -EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV -1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t -CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR -5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw -f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 -ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK -GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV ------END CERTIFICATE----- - -# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus ECC Root CA" -# Serial: 630369271402956006249506845124680065938238527194 -# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 -# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 -# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 ------BEGIN CERTIFICATE----- -MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw -RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY -BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz -MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u -LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 -v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd -e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw -V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA -AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG -GJTO ------END CERTIFICATE----- - -# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus Root CA" -# Serial: 387574501246983434957692974888460947164905180485 -# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc -# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 -# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 ------BEGIN CERTIFICATE----- -MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL -BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x -FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx -MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s -THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc -IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU -AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ -GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 -8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH -flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt -J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim -0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN -pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ -UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW -OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB -AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet -8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd -nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j -bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM -Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv -TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS -S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr -I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 -b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB -UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P -Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven -sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X2 O=Internet Security Research Group -# Subject: CN=ISRG Root X2 O=Internet Security Research Group -# Label: "ISRG Root X2" -# Serial: 87493402998870891108772069816698636114 -# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 -# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af -# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- - -# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Label: "HiPKI Root CA - G1" -# Serial: 60966262342023497858655262305426234976 -# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 -# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 -# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc ------BEGIN CERTIFICATE----- -MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa -Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 -YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw -qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv -Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 -lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz -Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ -KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK -FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj -HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr -y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ -/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM -a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 -fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG -SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi -7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc -SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza -ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc -XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg -iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho -L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF -Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr -kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ -vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU -YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 159662223612894884239637590694 -# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc -# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 -# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 ------BEGIN CERTIFICATE----- -MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD -VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw -MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g -UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT -BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx -uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV -HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ -+wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 -bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 159662320309726417404178440727 -# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 -# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a -# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo -27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w -Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw -TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl -qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH -szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 -Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk -MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 -wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p -aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN -VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb -C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe -QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy -h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 -7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J -ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef -MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ -Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT -6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ -0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm -2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb -bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 159662449406622349769042896298 -# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc -# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 -# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt -nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY -6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu -MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k -RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg -f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV -+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo -dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW -Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa -G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq -gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H -vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 -0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC -B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u -NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg -yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev -HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 -xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR -TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg -JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV -7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl -6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 159662495401136852707857743206 -# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 -# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 -# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G -jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 -4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 -VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm -ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 159662532700760215368942768210 -# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 -# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 -# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi -QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR -HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D -9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 -p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD ------END CERTIFICATE----- - -# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj -# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj -# Label: "Telia Root CA v2" -# Serial: 7288924052977061235122729490515358 -# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 -# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd -# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx -CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE -AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 -NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ -MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq -AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 -vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 -lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD -n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT -7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o -6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC -TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 -WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R -DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI -pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj -YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy -rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ -8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi -0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM -A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS -SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K -TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF -6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er -3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt -Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT -VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW -ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA -rBPuUBQemMc= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 1 2020" -# Serial: 165870826978392376648679885835942448534 -# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed -# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 -# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 -NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS -zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 -QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ -VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW -wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV -dWNbFJWcHwHP2NVypw87 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 1 2020" -# Serial: 126288379621884218666039612629459926992 -# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e -# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 -# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 -NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC -/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD -wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 -OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA -y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb -gfM0agPnIjhQW+0ZT0MW ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS ECC P384 Root G5" -# Serial: 13129116028163249804115411775095713523 -# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed -# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee -# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 ------BEGIN CERTIFICATE----- -MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp -Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 -MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ -bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS -7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp -0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS -B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 -BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ -LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 -DXZDjC5Ty3zfDBeWUA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS RSA4096 Root G5" -# Serial: 11930366277458970227240571539258396554 -# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 -# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 -# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN -MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT -HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN -NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs -IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ -ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 -2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp -wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM -pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD -nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po -sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx -Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd -Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX -KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe -XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL -tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv -TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN -AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw -GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H -PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF -O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ -REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik -AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv -/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ -p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw -MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF -qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK -ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root R1 O=Certainly -# Subject: CN=Certainly Root R1 O=Certainly -# Label: "Certainly Root R1" -# Serial: 188833316161142517227353805653483829216 -# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 -# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af -# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw -PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy -dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 -YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 -1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT -vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed -aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 -1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 -r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 -cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ -wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ -6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA -2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH -Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR -eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB -/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u -d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr -PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d -8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi -1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd -rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di -taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 -lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj -yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn -Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy -yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n -wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 -OV+KmalBWQewLK8= ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root E1 O=Certainly -# Subject: CN=Certainly Root E1 O=Certainly -# Label: "Certainly Root E1" -# Serial: 8168531406727139161245376702891150584 -# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 -# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b -# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 ------BEGIN CERTIFICATE----- -MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw -CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu -bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ -BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s -eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK -+IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 -QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 -hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm -ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG -BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR ------END CERTIFICATE----- - -# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication ECC RootCA1" -# Serial: 15446673492073852651 -# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 -# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 -# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 ------BEGIN CERTIFICATE----- -MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT -AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD -VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx -NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT -HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 -IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl -dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK -ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu -9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O -be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA1" -# Serial: 113562791157148395269083148143378328608 -# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 -# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a -# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU -MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI -T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz -MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF -SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh -bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z -xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ -spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 -58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR -at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll -5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq -nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK -V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ -pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO -z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn -jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ -WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF -7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 -YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli -awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u -+2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 -X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN -SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo -P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI -+pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz -znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 -eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 -YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy -r/6zcCwupvI= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA2" -# Serial: 58605626836079930195615843123109055211 -# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c -# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 -# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 ------BEGIN CERTIFICATE----- -MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw -CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ -VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy -MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ -TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS -b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B -IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ -+kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK -sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA -94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B -43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root E46" -# Serial: 88989738453351742415770396670917916916 -# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 -# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a -# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 ------BEGIN CERTIFICATE----- -MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw -CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T -ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN -MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG -A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT -ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC -WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ -6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B -Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa -qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q -4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root R46" -# Serial: 156256931880233212765902055439220583700 -# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 -# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 -# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 ------BEGIN CERTIFICATE----- -MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD -Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw -HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY -MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp -YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa -ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz -SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf -iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X -ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 -IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS -VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE -SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu -+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt -8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L -HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt -zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P -AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c -mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ -YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 -gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA -Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB -JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX -DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui -TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 -dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 -LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp -0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY -QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS RSA Root CA 2022" -# Serial: 148535279242832292258835760425842727825 -# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da -# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca -# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed ------BEGIN CERTIFICATE----- -MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO -MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD -DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX -DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw -b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC -AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP -L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY -t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins -S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 -PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO -L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 -R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w -dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS -+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS -d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG -AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f -gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j -BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z -NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt -hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM -QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf -R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ -DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW -P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy -lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq -bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w -AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q -r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji -Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU -98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS ECC Root CA 2022" -# Serial: 26605119622390491762507526719404364228 -# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 -# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 -# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 ------BEGIN CERTIFICATE----- -MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT -U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 -MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh -dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm -acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN -SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME -GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW -uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp -15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN -b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA ECC TLS 2021" -# Serial: 81873346711060652204712539181482831616 -# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 -# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd -# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 ------BEGIN CERTIFICATE----- -MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w -LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w -CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 -MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF -Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI -zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X -tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 -AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 -KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD -aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu -CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo -9H1/IISpQuQo ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA RSA TLS 2021" -# Serial: 111436099570196163832749341232207667876 -# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 -# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 -# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f ------BEGIN CERTIFICATE----- -MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM -MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx -MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 -MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD -QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z -4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv -Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ -kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs -GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln -nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh -3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD -0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy -geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 -ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB -c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI -pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS -4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs -o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ -qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw -xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM -rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 -AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR -0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY -o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 -dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE -oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G3" -# Serial: 576386314500428537169965010905813481816650257167 -# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 -# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 -# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 ------BEGIN CERTIFICATE----- -MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM -BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe -Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw -IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU -cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC -DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS -T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK -AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 -nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep -qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA -yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs -hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX -zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv -kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT -f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA -uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB -o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih -MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E -BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 -wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 -XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 -JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j -ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV -VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx -xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on -AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d -7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj -gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV -+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo -FGWsJwt0ivKH ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G4" -# Serial: 451799571007117016466790293371524403291602933463 -# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb -# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a -# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c ------BEGIN CERTIFICATE----- -MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw -WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y -MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD -VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz -dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx -s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw -LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD -pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE -AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR -UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj -/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS ECC Root 2020" -# Serial: 72082518505882327255703894282316633856 -# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd -# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec -# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 ------BEGIN CERTIFICATE----- -MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw -CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH -bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw -MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx -JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE -AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O -tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP -f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA -MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di -z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn -27iQ7t0l ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS RSA Root 2023" -# Serial: 44676229530606711399881795178081572759 -# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 -# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 -# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj -MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 -eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy -MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC -REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG -A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 -cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV -cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA -U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 -Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug -BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy -8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J -co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg -8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 -rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 -mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg -+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX -gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 -p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ -pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm -9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw -M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd -GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ -CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t -xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ -w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK -L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj -X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q -ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm -dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= ------END CERTIFICATE----- - -# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" -# Serial: 65916896770016886708751106294915943533 -# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 -# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 -# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a ------BEGIN CERTIFICATE----- -MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf -e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C -cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O -BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO -PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw -hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG -XSaQpYXFuXqUPoeovQA= ------END CERTIFICATE----- - -# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA CYBER Root CA" -# Serial: 85076849864375384482682434040119489222 -# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 -# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 -# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ -MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 -IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 -WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO -LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg -Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P -40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF -avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ -34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i -JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu -j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf -Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP -2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA -S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA -oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC -kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW -5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd -BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB -AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t -tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn -68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn -TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t -RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx -f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI -Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz -8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 -NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX -xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 -t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA12" -# Serial: 587887345431707215246142177076162061960426065942 -# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 -# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 -# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw -NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF -KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt -p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd -J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur -FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J -hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K -h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF -AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld -mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ -mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA -8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV -55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ -yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA14" -# Serial: 575790784512929437950770173562378038616896959179 -# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 -# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f -# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw -NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ -FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg -vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy -6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo -/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J -kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ -0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib -y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac -18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs -0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB -SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL -ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk -86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E -rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib -ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT -zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS -DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 -2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo -FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy -K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 -dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl -Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB -365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c -JRNItX+S ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA15" -# Serial: 126083514594751269499665114766174399806381178503 -# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 -# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d -# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a ------BEGIN CERTIFICATE----- -MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw -UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM -dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy -NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl -cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 -IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 -wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR -ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT -9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp -4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 -bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 2 2023" -# Serial: 153168538924886464690566649552453098598 -# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 -# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 -# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 ------BEGIN CERTIFICATE----- -MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI -MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE -LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw -OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi -MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr -i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE -gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 -k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT -Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl -2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U -cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP -/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS -uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ -0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N -DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ -XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 -GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG -OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y -XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI -FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n -riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR -VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc -LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn -4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD -hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG -koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 -ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS -Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 -knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ -hJ65bvspmZDogNOfJA== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. -# Label: "TrustAsia TLS ECC Root CA" -# Serial: 310892014698942880364840003424242768478804666567 -# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c -# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 -# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 ------BEGIN CERTIFICATE----- -MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw -WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw -NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE -ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB -c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ -AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp -guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw -DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 -L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR -OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. -# Label: "TrustAsia TLS RSA Root CA" -# Serial: 160405846464868906657516898462547310235378010780 -# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 -# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa -# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 ------BEGIN CERTIFICATE----- -MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM -BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN -MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG -A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 -c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC -AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ -NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ -Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 -HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 -ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb -xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX -i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ -UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j -TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT -bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 -S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 -Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 -iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt -7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp -2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ -g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj -pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M -pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP -XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe -SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 -ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy -323imttUQ/hHWKNddBWcwauwxzQ= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 2 2023" -# Serial: 139766439402180512324132425437959641711 -# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 -# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b -# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce ------BEGIN CERTIFICATE----- -MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI -MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE -LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw -OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi -MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK -F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE -7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe -EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 -lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb -RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV -jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc -jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx -TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ -ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk -hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF -NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH -kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG -OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y -XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 -QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 -pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q -3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU -t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX -cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 -ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT -2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs -7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP -gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst -Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh -XBxvWHZks/wCuPWdCg== ------END CERTIFICATE----- - -# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG -# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG -# Label: "SwissSign RSA TLS Root CA 2022 - 1" -# Serial: 388078645722908516278762308316089881486363258315 -# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 -# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce -# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 ------BEGIN CERTIFICATE----- -MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE -AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx -MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT -d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg -MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX -vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 -LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX -5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE -EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt -/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x -0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 -KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM -0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd -OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta -clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK -wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 -DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL -BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 -10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz -Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ -iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc -gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM -ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF -LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp -zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td -Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 -rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO -gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ ------END CERTIFICATE----- - -# Issuer: CN=OISTE Server Root ECC G1 O=OISTE Foundation -# Subject: CN=OISTE Server Root ECC G1 O=OISTE Foundation -# Label: "OISTE Server Root ECC G1" -# Serial: 47819833811561661340092227008453318557 -# MD5 Fingerprint: 42:a7:d2:35:ae:02:92:db:19:76:08:de:2f:05:b4:d4 -# SHA1 Fingerprint: 3b:f6:8b:09:ae:2a:92:7b:ba:e3:8d:3f:11:95:d9:e6:44:0c:45:e2 -# SHA256 Fingerprint: ee:c9:97:c0:c3:0f:21:6f:7e:3b:8b:30:7d:2b:ae:42:41:2d:75:3f:c8:21:9d:af:d1:52:0b:25:72:85:0f:49 ------BEGIN CERTIFICATE----- -MIICNTCCAbqgAwIBAgIQI/nD1jWvjyhLH/BU6n6XnTAKBggqhkjOPQQDAzBLMQsw -CQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UEAwwY -T0lTVEUgU2VydmVyIFJvb3QgRUNDIEcxMB4XDTIzMDUzMTE0NDIyOFoXDTQ4MDUy -NDE0NDIyN1owSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5kYXRp -b24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IEVDQyBHMTB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABBcv+hK8rBjzCvRE1nZCnrPoH7d5qVi2+GXROiFPqOujvqQy -cvO2Ackr/XeFblPdreqqLiWStukhEaivtUwL85Zgmjvn6hp4LrQ95SjeHIC6XG4N -2xml4z+cKrhAS93mT6NjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBQ3 -TYhlz/w9itWj8UnATgwQb0K0nDAdBgNVHQ4EFgQUN02IZc/8PYrVo/FJwE4MEG9C -tJwwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2kAMGYCMQCpKjAd0MKfkFFR -QD6VVCHNFmb3U2wIFjnQEnx/Yxvf4zgAOdktUyBFCxxgZzFDJe0CMQCSia7pXGKD -YmH5LVerVrkR3SW+ak5KGoJr3M/TvEqzPNcum9v4KGm8ay3sMaE641c= ------END CERTIFICATE----- - -# Issuer: CN=OISTE Server Root RSA G1 O=OISTE Foundation -# Subject: CN=OISTE Server Root RSA G1 O=OISTE Foundation -# Label: " OISTE Server Root RSA G1" -# Serial: 113845518112613905024960613408179309848 -# MD5 Fingerprint: 23:a7:9e:d4:70:b8:b9:14:57:41:8a:7e:44:59:e2:68 -# SHA1 Fingerprint: f7:00:34:25:94:88:68:31:e4:34:87:3f:70:fe:86:b3:86:9f:f0:6e -# SHA256 Fingerprint: 9a:e3:62:32:a5:18:9f:fd:db:35:3d:fd:26:52:0c:01:53:95:d2:27:77:da:c5:9d:b5:7b:98:c0:89:a6:51:e6 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIQVaXZZ5Qoxu0M+ifdWwFNGDANBgkqhkiG9w0BAQwFADBL -MQswCQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UE -AwwYT0lTVEUgU2VydmVyIFJvb3QgUlNBIEcxMB4XDTIzMDUzMTE0MzcxNloXDTQ4 -MDUyNDE0MzcxNVowSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5k -YXRpb24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IFJTQSBHMTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAKqu9KuCz/vlNwvn1ZatkOhLKdxVYOPM -vLO8LZK55KN68YG0nnJyQ98/qwsmtO57Gmn7KNByXEptaZnwYx4M0rH/1ow00O7b -rEi56rAUjtgHqSSY3ekJvqgiG1k50SeH3BzN+Puz6+mTeO0Pzjd8JnduodgsIUzk -ik/HEzxux9UTl7Ko2yRpg1bTacuCErudG/L4NPKYKyqOBGf244ehHa1uzjZ0Dl4z -O8vbUZeUapU8zhhabkvG/AePLhq5SvdkNCncpo1Q4Y2LS+VIG24ugBA/5J8bZT8R -tOpXaZ+0AOuFJJkk9SGdl6r7NH8CaxWQrbueWhl/pIzY+m0o/DjH40ytas7ZTpOS -jswMZ78LS5bOZmdTaMsXEY5Z96ycG7mOaES3GK/m5Q9l3JUJsJMStR8+lKXHiHUh -sd4JJCpM4rzsTGdHwimIuQq6+cF0zowYJmXa92/GjHtoXAvuY8BeS/FOzJ8vD+Ho -mnqT8eDI278n5mUpezbgMxVz8p1rhAhoKzYHKyfMeNhqhw5HdPSqoBNdZH702xSu -+zrkL8Fl47l6QGzwBrd7KJvX4V84c5Ss2XCTLdyEr0YconosP4EmQufU2MVshGYR -i3drVByjtdgQ8K4p92cIiBdcuJd5z+orKu5YM+Vt6SmqZQENghPsJQtdLEByFSnT -kCz3GkPVavBpAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU -8snBDw1jALvsRQ5KH7WxszbNDo0wHQYDVR0OBBYEFPLJwQ8NYwC77EUOSh+1sbM2 -zQ6NMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQwFAAOCAgEANGd5sjrG5T33 -I3K5Ce+SrScfoE4KsvXaFwyihdJ+klH9FWXXXGtkFu6KRcoMQzZENdl//nk6HOjG -5D1rd9QhEOP28yBOqb6J8xycqd+8MDoX0TJD0KqKchxRKEzdNsjkLWd9kYccnbz8 -qyiWXmFcuCIzGEgWUOrKL+mlSdx/PKQZvDatkuK59EvV6wit53j+F8Bdh3foZ3dP -AGav9LEDOr4SfEE15fSmG0eLy3n31r8Xbk5l8PjaV8GUgeV6Vg27Rn9vkf195hfk -gSe7BYhW3SCl95gtkRlpMV+bMPKZrXJAlszYd2abtNUOshD+FKrDgHGdPY3ofRRs -YWSGRqbXVMW215AWRqWFyp464+YTFrYVI8ypKVL9AMb2kI5Wj4kI3Zaq5tNqqYY1 -9tVFeEJKRvwDyF7YZvZFZSS0vod7VSCd9521Kvy5YhnLbDuv0204bKt7ph6N/Ome -/msVuduCmsuY33OhkKCgxeDoAaijFJzIwZqsFVAzje18KotzlUBDJvyBpCpfOZC3 -J8tRd/iWkx7P8nd9H0aTolkelUTFLXVksNb54Dxp6gS1HAviRkRNQzuXSXERvSS2 -wq1yVAb+axj5d9spLFKebXd7Yv0PTY6YMjAwcRLWJTXjn/hvnLXrahut6hDTlhZy -BiElxky8j3C7DOReIoMt0r7+hVu05L0= ------END CERTIFICATE----- diff --git a/venv/Lib/site-packages/certifi/core.py b/venv/Lib/site-packages/certifi/core.py deleted file mode 100644 index 1c9661c..0000000 --- a/venv/Lib/site-packages/certifi/core.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem or its contents. -""" -import sys -import atexit - -def exit_cacert_ctx() -> None: - _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] - - -if sys.version_info >= (3, 11): - - from importlib.resources import as_file, files - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the file - # in cases where we're inside of a zipimport situation until someone - # actually calls where(), but we don't want to re-extract the file - # on every call of where(), so we'll do it once then store it in a - # global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you to - # manage the cleanup of this file, so it doesn't actually return a - # path, it returns a context manager that will give you the path - # when you enter it and will do any cleanup when you leave it. In - # the common case of not needing a temporary file, it will just - # return the file system location and the __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") - -else: - - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the - # file in cases where we're inside of a zipimport situation until - # someone actually calls where(), but we don't want to re-extract - # the file on every call of where(), so we'll do it once then store - # it in a global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you - # to manage the cleanup of this file, so it doesn't actually - # return a path, it returns a context manager that will give - # you the path when you enter it and will do any cleanup when - # you leave it. In the common case of not needing a temporary - # file, it will just return the file system location and the - # __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/Lib/site-packages/certifi/py.typed b/venv/Lib/site-packages/certifi/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/INSTALLER b/venv/Lib/site-packages/cffi-2.0.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/METADATA b/venv/Lib/site-packages/cffi-2.0.0.dist-info/METADATA deleted file mode 100644 index 67508e5..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/METADATA +++ /dev/null @@ -1,68 +0,0 @@ -Metadata-Version: 2.4 -Name: cffi -Version: 2.0.0 -Summary: Foreign Function Interface for Python calling C code. -Author: Armin Rigo, Maciej Fijalkowski -Maintainer: Matt Davis, Matt Clay, Matti Picus -License-Expression: MIT -Project-URL: Documentation, https://cffi.readthedocs.io/ -Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html -Project-URL: Downloads, https://github.com/python-cffi/cffi/releases -Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi -Project-URL: Source Code, https://github.com/python-cffi/cffi -Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta -Classifier: Programming Language :: Python :: Implementation :: CPython -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -License-File: LICENSE -License-File: AUTHORS -Requires-Dist: pycparser; implementation_name != "PyPy" -Dynamic: license-file - -[![GitHub Actions Status](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++) -[![PyPI version](https://img.shields.io/pypi/v/cffi.svg)](https://pypi.org/project/cffi) -[![Read the Docs](https://img.shields.io/badge/docs-latest-blue.svg)][Documentation] - - -CFFI -==== - -Foreign Function Interface for Python calling C code. - -Please see the [Documentation] or uncompiled in the `doc/` subdirectory. - -Download --------- - -[Download page](https://github.com/python-cffi/cffi/releases) - -Source Code ------------ - -Source code is publicly available on -[GitHub](https://github.com/python-cffi/cffi). - -Contact -------- - -[Mailing list](https://groups.google.com/forum/#!forum/python-cffi) - -Testing/development tips ------------------------- - -After `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`: - - pip install pytest - pip install -e . # editable install of CFFI for local development - pytest src/c/ testing/ - -[Documentation]: http://cffi.readthedocs.org/ diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/RECORD b/venv/Lib/site-packages/cffi-2.0.0.dist-info/RECORD deleted file mode 100644 index 6e04ab3..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/RECORD +++ /dev/null @@ -1,50 +0,0 @@ -_cffi_backend.cp312-win_amd64.pyd,sha256=U940r8OTnYG_JJi3dCXFLiw_MWT4z-JUPTrMjyQTRIg,181248 -cffi-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -cffi-2.0.0.dist-info/METADATA,sha256=ZET6EC-RAp729tW1o4YJl61x9srq0O0A_tZ4N3Mi3uc,2627 -cffi-2.0.0.dist-info/RECORD,, -cffi-2.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -cffi-2.0.0.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101 -cffi-2.0.0.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 -cffi-2.0.0.dist-info/licenses/AUTHORS,sha256=KmemC7-zN1nWfWRf8TG45ta8TK_CMtdR_Kw-2k0xTMg,208 -cffi-2.0.0.dist-info/licenses/LICENSE,sha256=W6JN3FcGf5JJrdZEw6_EGl1tw34jQz73Wdld83Cwr2M,1123 -cffi-2.0.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 -cffi/__init__.py,sha256=-ksBQ7MfDzVvbBlV_ftYBWAmEqfA86ljIzMxzaZeAlI,511 -cffi/__pycache__/__init__.cpython-312.pyc,, -cffi/__pycache__/_imp_emulation.cpython-312.pyc,, -cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc,, -cffi/__pycache__/api.cpython-312.pyc,, -cffi/__pycache__/backend_ctypes.cpython-312.pyc,, -cffi/__pycache__/cffi_opcode.cpython-312.pyc,, -cffi/__pycache__/commontypes.cpython-312.pyc,, -cffi/__pycache__/cparser.cpython-312.pyc,, -cffi/__pycache__/error.cpython-312.pyc,, -cffi/__pycache__/ffiplatform.cpython-312.pyc,, -cffi/__pycache__/lock.cpython-312.pyc,, -cffi/__pycache__/model.cpython-312.pyc,, -cffi/__pycache__/pkgconfig.cpython-312.pyc,, -cffi/__pycache__/recompiler.cpython-312.pyc,, -cffi/__pycache__/setuptools_ext.cpython-312.pyc,, -cffi/__pycache__/vengine_cpy.cpython-312.pyc,, -cffi/__pycache__/vengine_gen.cpython-312.pyc,, -cffi/__pycache__/verifier.cpython-312.pyc,, -cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 -cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055 -cffi/_embedding.h,sha256=Ai33FHblE7XSpHOCp8kPcWwN5_9BV14OvN0JVa6ITpw,18786 -cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 -cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230 -cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169 -cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 -cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731 -cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805 -cffi/cparser.py,sha256=QUTfmlL-aO-MYR8bFGlvAUHc36OQr7XYLe0WLkGFjRo,44790 -cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 -cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 -cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 -cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797 -cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 -cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 -cffi/recompiler.py,sha256=78J6lMEEOygXNmjN9-fOFFO3j7eW-iFxSrxfvQb54bY,65509 -cffi/setuptools_ext.py,sha256=0rCwBJ1W7FHWtiMKfNXsSST88V8UXrui5oeXFlDNLG8,9411 -cffi/vengine_cpy.py,sha256=oyQKD23kpE0aChUKA8Jg0e723foPiYzLYEdb-J0MiNs,43881 -cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939 -cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/REQUESTED b/venv/Lib/site-packages/cffi-2.0.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/WHEEL b/venv/Lib/site-packages/cffi-2.0.0.dist-info/WHEEL deleted file mode 100644 index 10ac2c2..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/entry_points.txt b/venv/Lib/site-packages/cffi-2.0.0.dist-info/entry_points.txt deleted file mode 100644 index 4b0274f..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[distutils.setup_keywords] -cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS b/venv/Lib/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS deleted file mode 100644 index 370a25d..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS +++ /dev/null @@ -1,8 +0,0 @@ -This package has been mostly done by Armin Rigo with help from -Maciej Fijałkowski. The idea is heavily based (although not directly -copied) from LuaJIT ffi by Mike Pall. - - -Other contributors: - - Google Inc. diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE b/venv/Lib/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE deleted file mode 100644 index 0a1dbfb..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,23 +0,0 @@ - -Except when otherwise stated (look for LICENSE files in directories or -information at the beginning of each file) all software and -documentation is licensed as follows: - - MIT No Attribution - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or - sell copies of the Software, and to permit persons to whom the - Software is furnished to do so. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. - diff --git a/venv/Lib/site-packages/cffi-2.0.0.dist-info/top_level.txt b/venv/Lib/site-packages/cffi-2.0.0.dist-info/top_level.txt deleted file mode 100644 index f645779..0000000 --- a/venv/Lib/site-packages/cffi-2.0.0.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -_cffi_backend -cffi diff --git a/venv/Lib/site-packages/cffi/__init__.py b/venv/Lib/site-packages/cffi/__init__.py deleted file mode 100644 index c99ec3d..0000000 --- a/venv/Lib/site-packages/cffi/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', - 'FFIError'] - -from .api import FFI -from .error import CDefError, FFIError, VerificationError, VerificationMissing -from .error import PkgConfigError - -__version__ = "2.0.0" -__version_info__ = (2, 0, 0) - -# The verifier module file names are based on the CRC32 of a string that -# contains the following version number. It may be older than __version__ -# if nothing is clearly incompatible. -__version_verifier_modules__ = "0.8.6" diff --git a/venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index abd928b..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc deleted file mode 100644 index d4b7065..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc deleted file mode 100644 index 1d7d360..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/api.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/api.cpython-312.pyc deleted file mode 100644 index 89b88b2..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/api.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc deleted file mode 100644 index d510a74..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc deleted file mode 100644 index 7382d70..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc deleted file mode 100644 index bda7aa5..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-312.pyc deleted file mode 100644 index 8f32eed..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/error.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/error.cpython-312.pyc deleted file mode 100644 index fc9bae1..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/error.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc deleted file mode 100644 index db94674..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/lock.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/lock.cpython-312.pyc deleted file mode 100644 index 1269622..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/lock.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/model.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/model.cpython-312.pyc deleted file mode 100644 index cffdb21..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/model.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc deleted file mode 100644 index 68a84cf..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc deleted file mode 100644 index 8f3450f..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc deleted file mode 100644 index ba0d66e..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc deleted file mode 100644 index 11bdf9f..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc deleted file mode 100644 index 1c323b4..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-312.pyc b/venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-312.pyc deleted file mode 100644 index 76902ca..0000000 Binary files a/venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cffi/_cffi_errors.h b/venv/Lib/site-packages/cffi/_cffi_errors.h deleted file mode 100644 index 158e059..0000000 --- a/venv/Lib/site-packages/cffi/_cffi_errors.h +++ /dev/null @@ -1,149 +0,0 @@ -#ifndef CFFI_MESSAGEBOX -# ifdef _MSC_VER -# define CFFI_MESSAGEBOX 1 -# else -# define CFFI_MESSAGEBOX 0 -# endif -#endif - - -#if CFFI_MESSAGEBOX -/* Windows only: logic to take the Python-CFFI embedding logic - initialization errors and display them in a background thread - with MessageBox. The idea is that if the whole program closes - as a result of this problem, then likely it is already a console - program and you can read the stderr output in the console too. - If it is not a console program, then it will likely show its own - dialog to complain, or generally not abruptly close, and for this - case the background thread should stay alive. -*/ -static void *volatile _cffi_bootstrap_text; - -static PyObject *_cffi_start_error_capture(void) -{ - PyObject *result = NULL; - PyObject *x, *m, *bi; - - if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, - (void *)1, NULL) != NULL) - return (PyObject *)1; - - m = PyImport_AddModule("_cffi_error_capture"); - if (m == NULL) - goto error; - - result = PyModule_GetDict(m); - if (result == NULL) - goto error; - -#if PY_MAJOR_VERSION >= 3 - bi = PyImport_ImportModule("builtins"); -#else - bi = PyImport_ImportModule("__builtin__"); -#endif - if (bi == NULL) - goto error; - PyDict_SetItemString(result, "__builtins__", bi); - Py_DECREF(bi); - - x = PyRun_String( - "import sys\n" - "class FileLike:\n" - " def write(self, x):\n" - " try:\n" - " of.write(x)\n" - " except: pass\n" - " self.buf += x\n" - " def flush(self):\n" - " pass\n" - "fl = FileLike()\n" - "fl.buf = ''\n" - "of = sys.stderr\n" - "sys.stderr = fl\n" - "def done():\n" - " sys.stderr = of\n" - " return fl.buf\n", /* make sure the returned value stays alive */ - Py_file_input, - result, result); - Py_XDECREF(x); - - error: - if (PyErr_Occurred()) - { - PyErr_WriteUnraisable(Py_None); - PyErr_Clear(); - } - return result; -} - -#pragma comment(lib, "user32.lib") - -static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) -{ - Sleep(666); /* may be interrupted if the whole process is closing */ -#if PY_MAJOR_VERSION >= 3 - MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, - L"Python-CFFI error", - MB_OK | MB_ICONERROR); -#else - MessageBoxA(NULL, (char *)_cffi_bootstrap_text, - "Python-CFFI error", - MB_OK | MB_ICONERROR); -#endif - _cffi_bootstrap_text = NULL; - return 0; -} - -static void _cffi_stop_error_capture(PyObject *ecap) -{ - PyObject *s; - void *text; - - if (ecap == (PyObject *)1) - return; - - if (ecap == NULL) - goto error; - - s = PyRun_String("done()", Py_eval_input, ecap, ecap); - if (s == NULL) - goto error; - - /* Show a dialog box, but in a background thread, and - never show multiple dialog boxes at once. */ -#if PY_MAJOR_VERSION >= 3 - text = PyUnicode_AsWideCharString(s, NULL); -#else - text = PyString_AsString(s); -#endif - - _cffi_bootstrap_text = text; - - if (text != NULL) - { - HANDLE h; - h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, - NULL, 0, NULL); - if (h != NULL) - CloseHandle(h); - } - /* decref the string, but it should stay alive as 'fl.buf' - in the small module above. It will really be freed only if - we later get another similar error. So it's a leak of at - most one copy of the small module. That's fine for this - situation which is usually a "fatal error" anyway. */ - Py_DECREF(s); - PyErr_Clear(); - return; - - error: - _cffi_bootstrap_text = NULL; - PyErr_Clear(); -} - -#else - -static PyObject *_cffi_start_error_capture(void) { return NULL; } -static void _cffi_stop_error_capture(PyObject *ecap) { } - -#endif diff --git a/venv/Lib/site-packages/cffi/_cffi_include.h b/venv/Lib/site-packages/cffi/_cffi_include.h deleted file mode 100644 index 908a1d7..0000000 --- a/venv/Lib/site-packages/cffi/_cffi_include.h +++ /dev/null @@ -1,389 +0,0 @@ -#define _CFFI_ - -/* We try to define Py_LIMITED_API before including Python.h. - - Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and - Py_REF_DEBUG are not defined. This is a best-effort approximation: - we can learn about Py_DEBUG from pyconfig.h, but it is unclear if - the same works for the other two macros. Py_DEBUG implies them, - but not the other way around. - - The implementation is messy (issue #350): on Windows, with _MSC_VER, - we have to define Py_LIMITED_API even before including pyconfig.h. - In that case, we guess what pyconfig.h will do to the macros above, - and check our guess after the #include. - - Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv - version >= 16.0.0. With older versions of either, you don't get a - copy of PYTHON3.DLL in the virtualenv. We can't check the version of - CPython *before* we even include pyconfig.h. ffi.set_source() puts - a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is - running on Windows < 3.5, as an attempt at fixing it, but that's - arguably wrong because it may not be the target version of Python. - Still better than nothing I guess. As another workaround, you can - remove the definition of Py_LIMITED_API here. - - See also 'py_limited_api' in cffi/setuptools_ext.py. -*/ -#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) -# ifdef _MSC_VER -# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) -# define Py_LIMITED_API -# endif -# include - /* sanity-check: Py_LIMITED_API will cause crashes if any of these - are also defined. Normally, the Python file PC/pyconfig.h does not - cause any of these to be defined, with the exception that _DEBUG - causes Py_DEBUG. Double-check that. */ -# ifdef Py_LIMITED_API -# if defined(Py_DEBUG) -# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" -# endif -# if defined(Py_TRACE_REFS) -# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" -# endif -# if defined(Py_REF_DEBUG) -# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" -# endif -# endif -# else -# include -# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) -# define Py_LIMITED_API -# endif -# endif -#endif - -#include -#ifdef __cplusplus -extern "C" { -#endif -#include -#include "parse_c_type.h" - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif - -#ifdef __GNUC__ -# define _CFFI_UNUSED_FN __attribute__((unused)) -#else -# define _CFFI_UNUSED_FN /* nothing */ -#endif - -#ifdef __cplusplus -# ifndef _Bool - typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ -# endif -#endif - -/********** CPython-specific section **********/ -#ifndef PYPY_VERSION - - -#if PY_MAJOR_VERSION >= 3 -# define PyInt_FromLong PyLong_FromLong -#endif - -#define _cffi_from_c_double PyFloat_FromDouble -#define _cffi_from_c_float PyFloat_FromDouble -#define _cffi_from_c_long PyInt_FromLong -#define _cffi_from_c_ulong PyLong_FromUnsignedLong -#define _cffi_from_c_longlong PyLong_FromLongLong -#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong -#define _cffi_from_c__Bool PyBool_FromLong - -#define _cffi_to_c_double PyFloat_AsDouble -#define _cffi_to_c_float PyFloat_AsDouble - -#define _cffi_from_c_int(x, type) \ - (((type)-1) > 0 ? /* unsigned */ \ - (sizeof(type) < sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - sizeof(type) == sizeof(long) ? \ - PyLong_FromUnsignedLong((unsigned long)x) : \ - PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ - (sizeof(type) <= sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - PyLong_FromLongLong((long long)x))) - -#define _cffi_to_c_int(o, type) \ - ((type)( \ - sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ - : (type)_cffi_to_c_i8(o)) : \ - sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ - : (type)_cffi_to_c_i16(o)) : \ - sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ - : (type)_cffi_to_c_i32(o)) : \ - sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ - : (type)_cffi_to_c_i64(o)) : \ - (Py_FatalError("unsupported size for type " #type), (type)0))) - -#define _cffi_to_c_i8 \ - ((int(*)(PyObject *))_cffi_exports[1]) -#define _cffi_to_c_u8 \ - ((int(*)(PyObject *))_cffi_exports[2]) -#define _cffi_to_c_i16 \ - ((int(*)(PyObject *))_cffi_exports[3]) -#define _cffi_to_c_u16 \ - ((int(*)(PyObject *))_cffi_exports[4]) -#define _cffi_to_c_i32 \ - ((int(*)(PyObject *))_cffi_exports[5]) -#define _cffi_to_c_u32 \ - ((unsigned int(*)(PyObject *))_cffi_exports[6]) -#define _cffi_to_c_i64 \ - ((long long(*)(PyObject *))_cffi_exports[7]) -#define _cffi_to_c_u64 \ - ((unsigned long long(*)(PyObject *))_cffi_exports[8]) -#define _cffi_to_c_char \ - ((int(*)(PyObject *))_cffi_exports[9]) -#define _cffi_from_c_pointer \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) -#define _cffi_to_c_pointer \ - ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) -#define _cffi_get_struct_layout \ - not used any more -#define _cffi_restore_errno \ - ((void(*)(void))_cffi_exports[13]) -#define _cffi_save_errno \ - ((void(*)(void))_cffi_exports[14]) -#define _cffi_from_c_char \ - ((PyObject *(*)(char))_cffi_exports[15]) -#define _cffi_from_c_deref \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) -#define _cffi_to_c \ - ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) -#define _cffi_from_c_struct \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) -#define _cffi_to_c_wchar_t \ - ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) -#define _cffi_from_c_wchar_t \ - ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) -#define _cffi_to_c_long_double \ - ((long double(*)(PyObject *))_cffi_exports[21]) -#define _cffi_to_c__Bool \ - ((_Bool(*)(PyObject *))_cffi_exports[22]) -#define _cffi_prepare_pointer_call_argument \ - ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ - PyObject *, char **))_cffi_exports[23]) -#define _cffi_convert_array_from_object \ - ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) -#define _CFFI_CPIDX 25 -#define _cffi_call_python \ - ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) -#define _cffi_to_c_wchar3216_t \ - ((int(*)(PyObject *))_cffi_exports[26]) -#define _cffi_from_c_wchar3216_t \ - ((PyObject *(*)(int))_cffi_exports[27]) -#define _CFFI_NUM_EXPORTS 28 - -struct _cffi_ctypedescr; - -static void *_cffi_exports[_CFFI_NUM_EXPORTS]; - -#define _cffi_type(index) ( \ - assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ - (struct _cffi_ctypedescr *)_cffi_types[index]) - -static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, - const struct _cffi_type_context_s *ctx) -{ - PyObject *module, *o_arg, *new_module; - void *raw[] = { - (void *)module_name, - (void *)version, - (void *)_cffi_exports, - (void *)ctx, - }; - - module = PyImport_ImportModule("_cffi_backend"); - if (module == NULL) - goto failure; - - o_arg = PyLong_FromVoidPtr((void *)raw); - if (o_arg == NULL) - goto failure; - - new_module = PyObject_CallMethod( - module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); - - Py_DECREF(o_arg); - Py_DECREF(module); - return new_module; - - failure: - Py_XDECREF(module); - return NULL; -} - - -#ifdef HAVE_WCHAR_H -typedef wchar_t _cffi_wchar_t; -#else -typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ -#endif - -_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) -{ - if (sizeof(_cffi_wchar_t) == 2) - return (uint16_t)_cffi_to_c_wchar_t(o); - else - return (uint16_t)_cffi_to_c_wchar3216_t(o); -} - -_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) -{ - if (sizeof(_cffi_wchar_t) == 2) - return _cffi_from_c_wchar_t((_cffi_wchar_t)x); - else - return _cffi_from_c_wchar3216_t((int)x); -} - -_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) -{ - if (sizeof(_cffi_wchar_t) == 4) - return (int)_cffi_to_c_wchar_t(o); - else - return (int)_cffi_to_c_wchar3216_t(o); -} - -_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) -{ - if (sizeof(_cffi_wchar_t) == 4) - return _cffi_from_c_wchar_t((_cffi_wchar_t)x); - else - return _cffi_from_c_wchar3216_t((int)x); -} - -union _cffi_union_alignment_u { - unsigned char m_char; - unsigned short m_short; - unsigned int m_int; - unsigned long m_long; - unsigned long long m_longlong; - float m_float; - double m_double; - long double m_longdouble; -}; - -struct _cffi_freeme_s { - struct _cffi_freeme_s *next; - union _cffi_union_alignment_u alignment; -}; - -_CFFI_UNUSED_FN static int -_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, - char **output_data, Py_ssize_t datasize, - struct _cffi_freeme_s **freeme) -{ - char *p; - if (datasize < 0) - return -1; - - p = *output_data; - if (p == NULL) { - struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( - offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); - if (fp == NULL) - return -1; - fp->next = *freeme; - *freeme = fp; - p = *output_data = (char *)&fp->alignment; - } - memset((void *)p, 0, (size_t)datasize); - return _cffi_convert_array_from_object(p, ctptr, arg); -} - -_CFFI_UNUSED_FN static void -_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) -{ - do { - void *p = (void *)freeme; - freeme = freeme->next; - PyObject_Free(p); - } while (freeme != NULL); -} - -/********** end CPython-specific section **********/ -#else -_CFFI_UNUSED_FN -static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); -# define _cffi_call_python _cffi_call_python_org -#endif - - -#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) - -#define _cffi_prim_int(size, sign) \ - ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ - (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ - (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ - (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ - _CFFI__UNKNOWN_PRIM) - -#define _cffi_prim_float(size) \ - ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ - (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ - (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ - _CFFI__UNKNOWN_FLOAT_PRIM) - -#define _cffi_check_int(got, got_nonpos, expected) \ - ((got_nonpos) == (expected <= 0) && \ - (got) == (unsigned long long)expected) - -#ifdef MS_WIN32 -# define _cffi_stdcall __stdcall -#else -# define _cffi_stdcall /* nothing */ -#endif - -#ifdef __cplusplus -} -#endif diff --git a/venv/Lib/site-packages/cffi/_embedding.h b/venv/Lib/site-packages/cffi/_embedding.h deleted file mode 100644 index 64c04f6..0000000 --- a/venv/Lib/site-packages/cffi/_embedding.h +++ /dev/null @@ -1,550 +0,0 @@ - -/***** Support code for embedding *****/ - -#ifdef __cplusplus -extern "C" { -#endif - - -#if defined(_WIN32) -# define CFFI_DLLEXPORT __declspec(dllexport) -#elif defined(__GNUC__) -# define CFFI_DLLEXPORT __attribute__((visibility("default"))) -#else -# define CFFI_DLLEXPORT /* nothing */ -#endif - - -/* There are two global variables of type _cffi_call_python_fnptr: - - * _cffi_call_python, which we declare just below, is the one called - by ``extern "Python"`` implementations. - - * _cffi_call_python_org, which on CPython is actually part of the - _cffi_exports[] array, is the function pointer copied from - _cffi_backend. If _cffi_start_python() fails, then this is set - to NULL; otherwise, it should never be NULL. - - After initialization is complete, both are equal. However, the - first one remains equal to &_cffi_start_and_call_python until the - very end of initialization, when we are (or should be) sure that - concurrent threads also see a completely initialized world, and - only then is it changed. -*/ -#undef _cffi_call_python -typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); -static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); -static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; - - -#ifndef _MSC_VER - /* --- Assuming a GCC not infinitely old --- */ -# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) -# define cffi_write_barrier() __sync_synchronize() -# if !defined(__amd64__) && !defined(__x86_64__) && \ - !defined(__i386__) && !defined(__i386) -# define cffi_read_barrier() __sync_synchronize() -# else -# define cffi_read_barrier() (void)0 -# endif -#else - /* --- Windows threads version --- */ -# include -# define cffi_compare_and_swap(l,o,n) \ - (InterlockedCompareExchangePointer(l,n,o) == (o)) -# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) -# define cffi_read_barrier() (void)0 -static volatile LONG _cffi_dummy; -#endif - -#ifdef WITH_THREAD -# ifndef _MSC_VER -# include - static pthread_mutex_t _cffi_embed_startup_lock; -# else - static CRITICAL_SECTION _cffi_embed_startup_lock; -# endif - static char _cffi_embed_startup_lock_ready = 0; -#endif - -static void _cffi_acquire_reentrant_mutex(void) -{ - static void *volatile lock = NULL; - - while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: pthread_mutex_init() should be very fast, and - this is only run at start-up anyway. */ - } - -#ifdef WITH_THREAD - if (!_cffi_embed_startup_lock_ready) { -# ifndef _MSC_VER - pthread_mutexattr_t attr; - pthread_mutexattr_init(&attr); - pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); - pthread_mutex_init(&_cffi_embed_startup_lock, &attr); -# else - InitializeCriticalSection(&_cffi_embed_startup_lock); -# endif - _cffi_embed_startup_lock_ready = 1; - } -#endif - - while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) - ; - -#ifndef _MSC_VER - pthread_mutex_lock(&_cffi_embed_startup_lock); -#else - EnterCriticalSection(&_cffi_embed_startup_lock); -#endif -} - -static void _cffi_release_reentrant_mutex(void) -{ -#ifndef _MSC_VER - pthread_mutex_unlock(&_cffi_embed_startup_lock); -#else - LeaveCriticalSection(&_cffi_embed_startup_lock); -#endif -} - - -/********** CPython-specific section **********/ -#ifndef PYPY_VERSION - -#include "_cffi_errors.h" - - -#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] - -PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ - -static void _cffi_py_initialize(void) -{ - /* XXX use initsigs=0, which "skips initialization registration of - signal handlers, which might be useful when Python is - embedded" according to the Python docs. But review and think - if it should be a user-controllable setting. - - XXX we should also give a way to write errors to a buffer - instead of to stderr. - - XXX if importing 'site' fails, CPython (any version) calls - exit(). Should we try to work around this behavior here? - */ - Py_InitializeEx(0); -} - -static int _cffi_initialize_python(void) -{ - /* This initializes Python, imports _cffi_backend, and then the - present .dll/.so is set up as a CPython C extension module. - */ - int result; - PyGILState_STATE state; - PyObject *pycode=NULL, *global_dict=NULL, *x; - PyObject *builtins; - - state = PyGILState_Ensure(); - - /* Call the initxxx() function from the present module. It will - create and initialize us as a CPython extension module, instead - of letting the startup Python code do it---it might reimport - the same .dll/.so and get maybe confused on some platforms. - It might also have troubles locating the .dll/.so again for all - I know. - */ - (void)_CFFI_PYTHON_STARTUP_FUNC(); - if (PyErr_Occurred()) - goto error; - - /* Now run the Python code provided to ffi.embedding_init_code(). - */ - pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, - "", - Py_file_input); - if (pycode == NULL) - goto error; - global_dict = PyDict_New(); - if (global_dict == NULL) - goto error; - builtins = PyEval_GetBuiltins(); - if (builtins == NULL) - goto error; - if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) - goto error; - x = PyEval_EvalCode( -#if PY_MAJOR_VERSION < 3 - (PyCodeObject *) -#endif - pycode, global_dict, global_dict); - if (x == NULL) - goto error; - Py_DECREF(x); - - /* Done! Now if we've been called from - _cffi_start_and_call_python() in an ``extern "Python"``, we can - only hope that the Python code did correctly set up the - corresponding @ffi.def_extern() function. Otherwise, the - general logic of ``extern "Python"`` functions (inside the - _cffi_backend module) will find that the reference is still - missing and print an error. - */ - result = 0; - done: - Py_XDECREF(pycode); - Py_XDECREF(global_dict); - PyGILState_Release(state); - return result; - - error:; - { - /* Print as much information as potentially useful. - Debugging load-time failures with embedding is not fun - */ - PyObject *ecap; - PyObject *exception, *v, *tb, *f, *modules, *mod; - PyErr_Fetch(&exception, &v, &tb); - ecap = _cffi_start_error_capture(); - f = PySys_GetObject((char *)"stderr"); - if (f != NULL && f != Py_None) { - PyFile_WriteString( - "Failed to initialize the Python-CFFI embedding logic:\n\n", f); - } - - if (exception != NULL) { - PyErr_NormalizeException(&exception, &v, &tb); - PyErr_Display(exception, v, tb); - } - Py_XDECREF(exception); - Py_XDECREF(v); - Py_XDECREF(tb); - - if (f != NULL && f != Py_None) { - PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 2.0.0" - "\n_cffi_backend module: ", f); - modules = PyImport_GetModuleDict(); - mod = PyDict_GetItemString(modules, "_cffi_backend"); - if (mod == NULL) { - PyFile_WriteString("not loaded", f); - } - else { - v = PyObject_GetAttrString(mod, "__file__"); - PyFile_WriteObject(v, f, 0); - Py_XDECREF(v); - } - PyFile_WriteString("\nsys.path: ", f); - PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); - PyFile_WriteString("\n\n", f); - } - _cffi_stop_error_capture(ecap); - } - result = -1; - goto done; -} - -#if PY_VERSION_HEX < 0x03080000 -PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ -#endif - -static int _cffi_carefully_make_gil(void) -{ - /* This does the basic initialization of Python. It can be called - completely concurrently from unrelated threads. It assumes - that we don't hold the GIL before (if it exists), and we don't - hold it afterwards. - - (What it really does used to be completely different in Python 2 - and Python 3, with the Python 2 solution avoiding the spin-lock - around the Py_InitializeEx() call. However, after recent changes - to CPython 2.7 (issue #358) it no longer works. So we use the - Python 3 solution everywhere.) - - This initializes Python by calling Py_InitializeEx(). - Important: this must not be called concurrently at all. - So we use a global variable as a simple spin lock. This global - variable must be from 'libpythonX.Y.so', not from this - cffi-based extension module, because it must be shared from - different cffi-based extension modules. - - In Python < 3.8, we choose - _PyParser_TokenNames[0] as a completely arbitrary pointer value - that is never written to. The default is to point to the - string "ENDMARKER". We change it temporarily to point to the - next character in that string. (Yes, I know it's REALLY - obscure.) - - In Python >= 3.8, this string array is no longer writable, so - instead we pick PyCapsuleType.tp_version_tag. We can't change - Python < 3.8 because someone might use a mixture of cffi - embedded modules, some of which were compiled before this file - changed. - - In Python >= 3.12, this stopped working because that particular - tp_version_tag gets modified during interpreter startup. It's - arguably a bad idea before 3.12 too, but again we can't change - that because someone might use a mixture of cffi embedded - modules, and no-one reported a bug so far. In Python >= 3.12 - we go instead for PyCapsuleType.tp_as_buffer, which is supposed - to always be NULL. We write to it temporarily a pointer to - a struct full of NULLs, which is semantically the same. - */ - -#ifdef WITH_THREAD -# if PY_VERSION_HEX < 0x03080000 - char *volatile *lock = (char *volatile *)_PyParser_TokenNames; - char *old_value, *locked_value; - - while (1) { /* spin loop */ - old_value = *lock; - locked_value = old_value + 1; - if (old_value[0] == 'E') { - assert(old_value[1] == 'N'); - if (cffi_compare_and_swap(lock, old_value, locked_value)) - break; - } - else { - assert(old_value[0] == 'N'); - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: PyEval_InitThreads() should be very fast, and - this is only run at start-up anyway. */ - } - } -# else -# if PY_VERSION_HEX < 0x030C0000 - int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; - int old_value, locked_value = -42; - assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); -# else - static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; - empty_buffer_procs.mark = -42; - PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) - &PyCapsule_Type.tp_as_buffer; - PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; -# endif - - while (1) { /* spin loop */ - old_value = *lock; - if (old_value == 0) { - if (cffi_compare_and_swap(lock, old_value, locked_value)) - break; - } - else { -# if PY_VERSION_HEX < 0x030C0000 - assert(old_value == locked_value); -# else - /* The pointer should point to a possibly different - empty_buffer_procs from another C extension module */ - assert(((struct ebp_s *)old_value)->mark == -42); -# endif - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: PyEval_InitThreads() should be very fast, and - this is only run at start-up anyway. */ - } - } -# endif -#endif - - /* call Py_InitializeEx() */ - if (!Py_IsInitialized()) { - _cffi_py_initialize(); -#if PY_VERSION_HEX < 0x03070000 - PyEval_InitThreads(); -#endif - PyEval_SaveThread(); /* release the GIL */ - /* the returned tstate must be the one that has been stored into the - autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ - } - else { -#if PY_VERSION_HEX < 0x03070000 - /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ - PyGILState_STATE state = PyGILState_Ensure(); - PyEval_InitThreads(); - PyGILState_Release(state); -#endif - } - -#ifdef WITH_THREAD - /* release the lock */ - while (!cffi_compare_and_swap(lock, locked_value, old_value)) - ; -#endif - - return 0; -} - -/********** end CPython-specific section **********/ - - -#else - - -/********** PyPy-specific section **********/ - -PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ - -static struct _cffi_pypy_init_s { - const char *name; - void *func; /* function pointer */ - const char *code; -} _cffi_pypy_init = { - _CFFI_MODULE_NAME, - _CFFI_PYTHON_STARTUP_FUNC, - _CFFI_PYTHON_STARTUP_CODE, -}; - -extern int pypy_carefully_make_gil(const char *); -extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); - -static int _cffi_carefully_make_gil(void) -{ - return pypy_carefully_make_gil(_CFFI_MODULE_NAME); -} - -static int _cffi_initialize_python(void) -{ - return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); -} - -/********** end PyPy-specific section **********/ - - -#endif - - -#ifdef __GNUC__ -__attribute__((noinline)) -#endif -static _cffi_call_python_fnptr _cffi_start_python(void) -{ - /* Delicate logic to initialize Python. This function can be - called multiple times concurrently, e.g. when the process calls - its first ``extern "Python"`` functions in multiple threads at - once. It can also be called recursively, in which case we must - ignore it. We also have to consider what occurs if several - different cffi-based extensions reach this code in parallel - threads---it is a different copy of the code, then, and we - can't have any shared global variable unless it comes from - 'libpythonX.Y.so'. - - Idea: - - * _cffi_carefully_make_gil(): "carefully" call - PyEval_InitThreads() (possibly with Py_InitializeEx() first). - - * then we use a (local) custom lock to make sure that a call to this - cffi-based extension will wait if another call to the *same* - extension is running the initialization in another thread. - It is reentrant, so that a recursive call will not block, but - only one from a different thread. - - * then we grab the GIL and (Python 2) we call Py_InitializeEx(). - At this point, concurrent calls to Py_InitializeEx() are not - possible: we have the GIL. - - * do the rest of the specific initialization, which may - temporarily release the GIL but not the custom lock. - Only release the custom lock when we are done. - */ - static char called = 0; - - if (_cffi_carefully_make_gil() != 0) - return NULL; - - _cffi_acquire_reentrant_mutex(); - - /* Here the GIL exists, but we don't have it. We're only protected - from concurrency by the reentrant mutex. */ - - /* This file only initializes the embedded module once, the first - time this is called, even if there are subinterpreters. */ - if (!called) { - called = 1; /* invoke _cffi_initialize_python() only once, - but don't set '_cffi_call_python' right now, - otherwise concurrent threads won't call - this function at all (we need them to wait) */ - if (_cffi_initialize_python() == 0) { - /* now initialization is finished. Switch to the fast-path. */ - - /* We would like nobody to see the new value of - '_cffi_call_python' without also seeing the rest of the - data initialized. However, this is not possible. But - the new value of '_cffi_call_python' is the function - 'cffi_call_python()' from _cffi_backend. So: */ - cffi_write_barrier(); - /* ^^^ we put a write barrier here, and a corresponding - read barrier at the start of cffi_call_python(). This - ensures that after that read barrier, we see everything - done here before the write barrier. - */ - - assert(_cffi_call_python_org != NULL); - _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; - } - else { - /* initialization failed. Reset this to NULL, even if it was - already set to some other value. Future calls to - _cffi_start_python() are still forced to occur, and will - always return NULL from now on. */ - _cffi_call_python_org = NULL; - } - } - - _cffi_release_reentrant_mutex(); - - return (_cffi_call_python_fnptr)_cffi_call_python_org; -} - -static -void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) -{ - _cffi_call_python_fnptr fnptr; - int current_err = errno; -#ifdef _MSC_VER - int current_lasterr = GetLastError(); -#endif - fnptr = _cffi_start_python(); - if (fnptr == NULL) { - fprintf(stderr, "function %s() called, but initialization code " - "failed. Returning 0.\n", externpy->name); - memset(args, 0, externpy->size_of_result); - } -#ifdef _MSC_VER - SetLastError(current_lasterr); -#endif - errno = current_err; - - if (fnptr != NULL) - fnptr(externpy, args); -} - - -/* The cffi_start_python() function makes sure Python is initialized - and our cffi module is set up. It can be called manually from the - user C code. The same effect is obtained automatically from any - dll-exported ``extern "Python"`` function. This function returns - -1 if initialization failed, 0 if all is OK. */ -_CFFI_UNUSED_FN -static int cffi_start_python(void) -{ - if (_cffi_call_python == &_cffi_start_and_call_python) { - if (_cffi_start_python() == NULL) - return -1; - } - cffi_read_barrier(); - return 0; -} - -#undef cffi_compare_and_swap -#undef cffi_write_barrier -#undef cffi_read_barrier - -#ifdef __cplusplus -} -#endif diff --git a/venv/Lib/site-packages/cffi/_imp_emulation.py b/venv/Lib/site-packages/cffi/_imp_emulation.py deleted file mode 100644 index 136abdd..0000000 --- a/venv/Lib/site-packages/cffi/_imp_emulation.py +++ /dev/null @@ -1,83 +0,0 @@ - -try: - # this works on Python < 3.12 - from imp import * - -except ImportError: - # this is a limited emulation for Python >= 3.12. - # Note that this is used only for tests or for the old ffi.verify(). - # This is copied from the source code of Python 3.11. - - from _imp import (acquire_lock, release_lock, - is_builtin, is_frozen) - - from importlib._bootstrap import _load - - from importlib import machinery - import os - import sys - import tokenize - - SEARCH_ERROR = 0 - PY_SOURCE = 1 - PY_COMPILED = 2 - C_EXTENSION = 3 - PY_RESOURCE = 4 - PKG_DIRECTORY = 5 - C_BUILTIN = 6 - PY_FROZEN = 7 - PY_CODERESOURCE = 8 - IMP_HOOK = 9 - - def get_suffixes(): - extensions = [(s, 'rb', C_EXTENSION) - for s in machinery.EXTENSION_SUFFIXES] - source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] - bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] - return extensions + source + bytecode - - def find_module(name, path=None): - if not isinstance(name, str): - raise TypeError("'name' must be a str, not {}".format(type(name))) - elif not isinstance(path, (type(None), list)): - # Backwards-compatibility - raise RuntimeError("'path' must be None or a list, " - "not {}".format(type(path))) - - if path is None: - if is_builtin(name): - return None, None, ('', '', C_BUILTIN) - elif is_frozen(name): - return None, None, ('', '', PY_FROZEN) - else: - path = sys.path - - for entry in path: - package_directory = os.path.join(entry, name) - for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: - package_file_name = '__init__' + suffix - file_path = os.path.join(package_directory, package_file_name) - if os.path.isfile(file_path): - return None, package_directory, ('', '', PKG_DIRECTORY) - for suffix, mode, type_ in get_suffixes(): - file_name = name + suffix - file_path = os.path.join(entry, file_name) - if os.path.isfile(file_path): - break - else: - continue - break # Break out of outer loop when breaking out of inner loop. - else: - raise ImportError(name, name=name) - - encoding = None - if 'b' not in mode: - with open(file_path, 'rb') as file: - encoding = tokenize.detect_encoding(file.readline)[0] - file = open(file_path, mode, encoding=encoding) - return file, file_path, (suffix, mode, type_) - - def load_dynamic(name, path, file=None): - loader = machinery.ExtensionFileLoader(name, path) - spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) - return _load(spec) diff --git a/venv/Lib/site-packages/cffi/_shimmed_dist_utils.py b/venv/Lib/site-packages/cffi/_shimmed_dist_utils.py deleted file mode 100644 index c3d2312..0000000 --- a/venv/Lib/site-packages/cffi/_shimmed_dist_utils.py +++ /dev/null @@ -1,45 +0,0 @@ -""" -Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful -error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. - -This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. -""" -import sys - -try: - # import setuptools first; this is the most robust way to ensure its embedded distutils is available - # (the .pth shim should usually work, but this is even more robust) - import setuptools -except Exception as ex: - if sys.version_info >= (3, 12): - # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal - raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex - - # silently ignore on older Pythons (support fallback to stdlib distutils where available) -else: - del setuptools - -try: - # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils - from distutils import log, sysconfig - from distutils.ccompiler import CCompiler - from distutils.command.build_ext import build_ext - from distutils.core import Distribution, Extension - from distutils.dir_util import mkpath - from distutils.errors import DistutilsSetupError, CompileError, LinkError - from distutils.log import set_threshold, set_verbosity - - if sys.platform == 'win32': - try: - # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler` - from distutils.msvc9compiler import MSVCCompiler - except ImportError: - MSVCCompiler = None -except Exception as ex: - if sys.version_info >= (3, 12): - raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex - - # anything older, just let the underlying distutils import error fly - raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex - -del sys diff --git a/venv/Lib/site-packages/cffi/api.py b/venv/Lib/site-packages/cffi/api.py deleted file mode 100644 index 5a474f3..0000000 --- a/venv/Lib/site-packages/cffi/api.py +++ /dev/null @@ -1,967 +0,0 @@ -import sys, types -from .lock import allocate_lock -from .error import CDefError -from . import model - -try: - callable -except NameError: - # Python 3.1 - from collections import Callable - callable = lambda x: isinstance(x, Callable) - -try: - basestring -except NameError: - # Python 3.x - basestring = str - -_unspecified = object() - - - -class FFI(object): - r''' - The main top-level class that you instantiate once, or once per module. - - Example usage: - - ffi = FFI() - ffi.cdef(""" - int printf(const char *, ...); - """) - - C = ffi.dlopen(None) # standard library - -or- - C = ffi.verify() # use a C compiler: verify the decl above is right - - C.printf("hello, %s!\n", ffi.new("char[]", "world")) - ''' - - def __init__(self, backend=None): - """Create an FFI instance. The 'backend' argument is used to - select a non-default backend, mostly for tests. - """ - if backend is None: - # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with - # _cffi_backend.so compiled. - import _cffi_backend as backend - from . import __version__ - if backend.__version__ != __version__: - # bad version! Try to be as explicit as possible. - if hasattr(backend, '__file__'): - # CPython - raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( - __version__, __file__, - backend.__version__, backend.__file__)) - else: - # PyPy - raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( - __version__, __file__, backend.__version__)) - # (If you insist you can also try to pass the option - # 'backend=backend_ctypes.CTypesBackend()', but don't - # rely on it! It's probably not going to work well.) - - from . import cparser - self._backend = backend - self._lock = allocate_lock() - self._parser = cparser.Parser() - self._cached_btypes = {} - self._parsed_types = types.ModuleType('parsed_types').__dict__ - self._new_types = types.ModuleType('new_types').__dict__ - self._function_caches = [] - self._libraries = [] - self._cdefsources = [] - self._included_ffis = [] - self._windows_unicode = None - self._init_once_cache = {} - self._cdef_version = None - self._embedding = None - self._typecache = model.get_typecache(backend) - if hasattr(backend, 'set_ffi'): - backend.set_ffi(self) - for name in list(backend.__dict__): - if name.startswith('RTLD_'): - setattr(self, name, getattr(backend, name)) - # - with self._lock: - self.BVoidP = self._get_cached_btype(model.voidp_type) - self.BCharA = self._get_cached_btype(model.char_array_type) - if isinstance(backend, types.ModuleType): - # _cffi_backend: attach these constants to the class - if not hasattr(FFI, 'NULL'): - FFI.NULL = self.cast(self.BVoidP, 0) - FFI.CData, FFI.CType = backend._get_types() - else: - # ctypes backend: attach these constants to the instance - self.NULL = self.cast(self.BVoidP, 0) - self.CData, self.CType = backend._get_types() - self.buffer = backend.buffer - - def cdef(self, csource, override=False, packed=False, pack=None): - """Parse the given C source. This registers all declared functions, - types, and global variables. The functions and global variables can - then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. - The types can be used in 'ffi.new()' and other functions. - If 'packed' is specified as True, all structs declared inside this - cdef are packed, i.e. laid out without any field alignment at all. - Alternatively, 'pack' can be a small integer, and requests for - alignment greater than that are ignored (pack=1 is equivalent to - packed=True). - """ - self._cdef(csource, override=override, packed=packed, pack=pack) - - def embedding_api(self, csource, packed=False, pack=None): - self._cdef(csource, packed=packed, pack=pack, dllexport=True) - if self._embedding is None: - self._embedding = '' - - def _cdef(self, csource, override=False, **options): - if not isinstance(csource, str): # unicode, on Python 2 - if not isinstance(csource, basestring): - raise TypeError("cdef() argument must be a string") - csource = csource.encode('ascii') - with self._lock: - self._cdef_version = object() - self._parser.parse(csource, override=override, **options) - self._cdefsources.append(csource) - if override: - for cache in self._function_caches: - cache.clear() - finishlist = self._parser._recomplete - if finishlist: - self._parser._recomplete = [] - for tp in finishlist: - tp.finish_backend_type(self, finishlist) - - def dlopen(self, name, flags=0): - """Load and return a dynamic library identified by 'name'. - The standard C library can be loaded by passing None. - Note that functions and types declared by 'ffi.cdef()' are not - linked to a particular library, just like C headers; in the - library we only look for the actual (untyped) symbols. - """ - if not (isinstance(name, basestring) or - name is None or - isinstance(name, self.CData)): - raise TypeError("dlopen(name): name must be a file name, None, " - "or an already-opened 'void *' handle") - with self._lock: - lib, function_cache = _make_ffi_library(self, name, flags) - self._function_caches.append(function_cache) - self._libraries.append(lib) - return lib - - def dlclose(self, lib): - """Close a library obtained with ffi.dlopen(). After this call, - access to functions or variables from the library will fail - (possibly with a segmentation fault). - """ - type(lib).__cffi_close__(lib) - - def _typeof_locked(self, cdecl): - # call me with the lock! - key = cdecl - if key in self._parsed_types: - return self._parsed_types[key] - # - if not isinstance(cdecl, str): # unicode, on Python 2 - cdecl = cdecl.encode('ascii') - # - type = self._parser.parse_type(cdecl) - really_a_function_type = type.is_raw_function - if really_a_function_type: - type = type.as_function_pointer() - btype = self._get_cached_btype(type) - result = btype, really_a_function_type - self._parsed_types[key] = result - return result - - def _typeof(self, cdecl, consider_function_as_funcptr=False): - # string -> ctype object - try: - result = self._parsed_types[cdecl] - except KeyError: - with self._lock: - result = self._typeof_locked(cdecl) - # - btype, really_a_function_type = result - if really_a_function_type and not consider_function_as_funcptr: - raise CDefError("the type %r is a function type, not a " - "pointer-to-function type" % (cdecl,)) - return btype - - def typeof(self, cdecl): - """Parse the C type given as a string and return the - corresponding object. - It can also be used on 'cdata' instance to get its C type. - """ - if isinstance(cdecl, basestring): - return self._typeof(cdecl) - if isinstance(cdecl, self.CData): - return self._backend.typeof(cdecl) - if isinstance(cdecl, types.BuiltinFunctionType): - res = _builtin_function_type(cdecl) - if res is not None: - return res - if (isinstance(cdecl, types.FunctionType) - and hasattr(cdecl, '_cffi_base_type')): - with self._lock: - return self._get_cached_btype(cdecl._cffi_base_type) - raise TypeError(type(cdecl)) - - def sizeof(self, cdecl): - """Return the size in bytes of the argument. It can be a - string naming a C type, or a 'cdata' instance. - """ - if isinstance(cdecl, basestring): - BType = self._typeof(cdecl) - return self._backend.sizeof(BType) - else: - return self._backend.sizeof(cdecl) - - def alignof(self, cdecl): - """Return the natural alignment size in bytes of the C type - given as a string. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.alignof(cdecl) - - def offsetof(self, cdecl, *fields_or_indexes): - """Return the offset of the named field inside the given - structure or array, which must be given as a C type name. - You can give several field names in case of nested structures. - You can also give numeric values which correspond to array - items, in case of an array type. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._typeoffsetof(cdecl, *fields_or_indexes)[1] - - def new(self, cdecl, init=None): - """Allocate an instance according to the specified C type and - return a pointer to it. The specified C type must be either a - pointer or an array: ``new('X *')`` allocates an X and returns - a pointer to it, whereas ``new('X[n]')`` allocates an array of - n X'es and returns an array referencing it (which works - mostly like a pointer, like in C). You can also use - ``new('X[]', n)`` to allocate an array of a non-constant - length n. - - The memory is initialized following the rules of declaring a - global variable in C: by default it is zero-initialized, but - an explicit initializer can be given which can be used to - fill all or part of the memory. - - When the returned object goes out of scope, the memory - is freed. In other words the returned object has - ownership of the value of type 'cdecl' that it points to. This - means that the raw data can be used as long as this object is - kept alive, but must not be used for a longer time. Be careful - about that when copying the pointer to the memory somewhere - else, e.g. into another structure. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.newp(cdecl, init) - - def new_allocator(self, alloc=None, free=None, - should_clear_after_alloc=True): - """Return a new allocator, i.e. a function that behaves like ffi.new() - but uses the provided low-level 'alloc' and 'free' functions. - - 'alloc' is called with the size as argument. If it returns NULL, a - MemoryError is raised. 'free' is called with the result of 'alloc' - as argument. Both can be either Python function or directly C - functions. If 'free' is None, then no free function is called. - If both 'alloc' and 'free' are None, the default is used. - - If 'should_clear_after_alloc' is set to False, then the memory - returned by 'alloc' is assumed to be already cleared (or you are - fine with garbage); otherwise CFFI will clear it. - """ - compiled_ffi = self._backend.FFI() - allocator = compiled_ffi.new_allocator(alloc, free, - should_clear_after_alloc) - def allocate(cdecl, init=None): - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return allocator(cdecl, init) - return allocate - - def cast(self, cdecl, source): - """Similar to a C cast: returns an instance of the named C - type initialized with the given 'source'. The source is - casted between integers or pointers of any type. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.cast(cdecl, source) - - def string(self, cdata, maxlen=-1): - """Return a Python string (or unicode string) from the 'cdata'. - If 'cdata' is a pointer or array of characters or bytes, returns - the null-terminated string. The returned string extends until - the first null character, or at most 'maxlen' characters. If - 'cdata' is an array then 'maxlen' defaults to its length. - - If 'cdata' is a pointer or array of wchar_t, returns a unicode - string following the same rules. - - If 'cdata' is a single character or byte or a wchar_t, returns - it as a string or unicode string. - - If 'cdata' is an enum, returns the value of the enumerator as a - string, or 'NUMBER' if the value is out of range. - """ - return self._backend.string(cdata, maxlen) - - def unpack(self, cdata, length): - """Unpack an array of C data of the given length, - returning a Python string/unicode/list. - - If 'cdata' is a pointer to 'char', returns a byte string. - It does not stop at the first null. This is equivalent to: - ffi.buffer(cdata, length)[:] - - If 'cdata' is a pointer to 'wchar_t', returns a unicode string. - 'length' is measured in wchar_t's; it is not the size in bytes. - - If 'cdata' is a pointer to anything else, returns a list of - 'length' items. This is a faster equivalent to: - [cdata[i] for i in range(length)] - """ - return self._backend.unpack(cdata, length) - - #def buffer(self, cdata, size=-1): - # """Return a read-write buffer object that references the raw C data - # pointed to by the given 'cdata'. The 'cdata' must be a pointer or - # an array. Can be passed to functions expecting a buffer, or directly - # manipulated with: - # - # buf[:] get a copy of it in a regular string, or - # buf[idx] as a single character - # buf[:] = ... - # buf[idx] = ... change the content - # """ - # note that 'buffer' is a type, set on this instance by __init__ - - def from_buffer(self, cdecl, python_buffer=_unspecified, - require_writable=False): - """Return a cdata of the given type pointing to the data of the - given Python object, which must support the buffer interface. - Note that this is not meant to be used on the built-in types - str or unicode (you can build 'char[]' arrays explicitly) - but only on objects containing large quantities of raw data - in some other format, like 'array.array' or numpy arrays. - - The first argument is optional and default to 'char[]'. - """ - if python_buffer is _unspecified: - cdecl, python_buffer = self.BCharA, cdecl - elif isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.from_buffer(cdecl, python_buffer, - require_writable) - - def memmove(self, dest, src, n): - """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. - - Like the C function memmove(), the memory areas may overlap; - apart from that it behaves like the C function memcpy(). - - 'src' can be any cdata ptr or array, or any Python buffer object. - 'dest' can be any cdata ptr or array, or a writable Python buffer - object. The size to copy, 'n', is always measured in bytes. - - Unlike other methods, this one supports all Python buffer including - byte strings and bytearrays---but it still does not support - non-contiguous buffers. - """ - return self._backend.memmove(dest, src, n) - - def callback(self, cdecl, python_callable=None, error=None, onerror=None): - """Return a callback object or a decorator making such a - callback object. 'cdecl' must name a C function pointer type. - The callback invokes the specified 'python_callable' (which may - be provided either directly or via a decorator). Important: the - callback object must be manually kept alive for as long as the - callback may be invoked from the C level. - """ - def callback_decorator_wrap(python_callable): - if not callable(python_callable): - raise TypeError("the 'python_callable' argument " - "is not callable") - return self._backend.callback(cdecl, python_callable, - error, onerror) - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) - if python_callable is None: - return callback_decorator_wrap # decorator mode - else: - return callback_decorator_wrap(python_callable) # direct mode - - def getctype(self, cdecl, replace_with=''): - """Return a string giving the C type 'cdecl', which may be itself - a string or a object. If 'replace_with' is given, it gives - extra text to append (or insert for more complicated C types), like - a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - replace_with = replace_with.strip() - if (replace_with.startswith('*') - and '&[' in self._backend.getcname(cdecl, '&')): - replace_with = '(%s)' % replace_with - elif replace_with and not replace_with[0] in '[(': - replace_with = ' ' + replace_with - return self._backend.getcname(cdecl, replace_with) - - def gc(self, cdata, destructor, size=0): - """Return a new cdata object that points to the same - data. Later, when this new cdata object is garbage-collected, - 'destructor(old_cdata_object)' will be called. - - The optional 'size' gives an estimate of the size, used to - trigger the garbage collection more eagerly. So far only used - on PyPy. It tells the GC that the returned object keeps alive - roughly 'size' bytes of external memory. - """ - return self._backend.gcp(cdata, destructor, size) - - def _get_cached_btype(self, type): - assert self._lock.acquire(False) is False - # call me with the lock! - try: - BType = self._cached_btypes[type] - except KeyError: - finishlist = [] - BType = type.get_cached_btype(self, finishlist) - for type in finishlist: - type.finish_backend_type(self, finishlist) - return BType - - def verify(self, source='', tmpdir=None, **kwargs): - """Verify that the current ffi signatures compile on this - machine, and return a dynamic library object. The dynamic - library can be used to call functions and access global - variables declared in this 'ffi'. The library is compiled - by the C compiler: it gives you C-level API compatibility - (including calling macros). This is unlike 'ffi.dlopen()', - which requires binary compatibility in the signatures. - """ - from .verifier import Verifier, _caller_dir_pycache - # - # If set_unicode(True) was called, insert the UNICODE and - # _UNICODE macro declarations - if self._windows_unicode: - self._apply_windows_unicode(kwargs) - # - # Set the tmpdir here, and not in Verifier.__init__: it picks - # up the caller's directory, which we want to be the caller of - # ffi.verify(), as opposed to the caller of Veritier(). - tmpdir = tmpdir or _caller_dir_pycache() - # - # Make a Verifier() and use it to load the library. - self.verifier = Verifier(self, source, tmpdir, **kwargs) - lib = self.verifier.load_library() - # - # Save the loaded library for keep-alive purposes, even - # if the caller doesn't keep it alive itself (it should). - self._libraries.append(lib) - return lib - - def _get_errno(self): - return self._backend.get_errno() - def _set_errno(self, errno): - self._backend.set_errno(errno) - errno = property(_get_errno, _set_errno, None, - "the value of 'errno' from/to the C calls") - - def getwinerror(self, code=-1): - return self._backend.getwinerror(code) - - def _pointer_to(self, ctype): - with self._lock: - return model.pointer_cache(self, ctype) - - def addressof(self, cdata, *fields_or_indexes): - """Return the address of a . - If 'fields_or_indexes' are given, returns the address of that - field or array item in the structure or array, recursively in - case of nested structures. - """ - try: - ctype = self._backend.typeof(cdata) - except TypeError: - if '__addressof__' in type(cdata).__dict__: - return type(cdata).__addressof__(cdata, *fields_or_indexes) - raise - if fields_or_indexes: - ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) - else: - if ctype.kind == "pointer": - raise TypeError("addressof(pointer)") - offset = 0 - ctypeptr = self._pointer_to(ctype) - return self._backend.rawaddressof(ctypeptr, cdata, offset) - - def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): - ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) - for field1 in fields_or_indexes: - ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) - offset += offset1 - return ctype, offset - - def include(self, ffi_to_include): - """Includes the typedefs, structs, unions and enums defined - in another FFI instance. Usage is similar to a #include in C, - where a part of the program might include types defined in - another part for its own usage. Note that the include() - method has no effect on functions, constants and global - variables, which must anyway be accessed directly from the - lib object returned by the original FFI instance. - """ - if not isinstance(ffi_to_include, FFI): - raise TypeError("ffi.include() expects an argument that is also of" - " type cffi.FFI, not %r" % ( - type(ffi_to_include).__name__,)) - if ffi_to_include is self: - raise ValueError("self.include(self)") - with ffi_to_include._lock: - with self._lock: - self._parser.include(ffi_to_include._parser) - self._cdefsources.append('[') - self._cdefsources.extend(ffi_to_include._cdefsources) - self._cdefsources.append(']') - self._included_ffis.append(ffi_to_include) - - def new_handle(self, x): - return self._backend.newp_handle(self.BVoidP, x) - - def from_handle(self, x): - return self._backend.from_handle(x) - - def release(self, x): - self._backend.release(x) - - def set_unicode(self, enabled_flag): - """Windows: if 'enabled_flag' is True, enable the UNICODE and - _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR - to be (pointers to) wchar_t. If 'enabled_flag' is False, - declare these types to be (pointers to) plain 8-bit characters. - This is mostly for backward compatibility; you usually want True. - """ - if self._windows_unicode is not None: - raise ValueError("set_unicode() can only be called once") - enabled_flag = bool(enabled_flag) - if enabled_flag: - self.cdef("typedef wchar_t TBYTE;" - "typedef wchar_t TCHAR;" - "typedef const wchar_t *LPCTSTR;" - "typedef const wchar_t *PCTSTR;" - "typedef wchar_t *LPTSTR;" - "typedef wchar_t *PTSTR;" - "typedef TBYTE *PTBYTE;" - "typedef TCHAR *PTCHAR;") - else: - self.cdef("typedef char TBYTE;" - "typedef char TCHAR;" - "typedef const char *LPCTSTR;" - "typedef const char *PCTSTR;" - "typedef char *LPTSTR;" - "typedef char *PTSTR;" - "typedef TBYTE *PTBYTE;" - "typedef TCHAR *PTCHAR;") - self._windows_unicode = enabled_flag - - def _apply_windows_unicode(self, kwds): - defmacros = kwds.get('define_macros', ()) - if not isinstance(defmacros, (list, tuple)): - raise TypeError("'define_macros' must be a list or tuple") - defmacros = list(defmacros) + [('UNICODE', '1'), - ('_UNICODE', '1')] - kwds['define_macros'] = defmacros - - def _apply_embedding_fix(self, kwds): - # must include an argument like "-lpython2.7" for the compiler - def ensure(key, value): - lst = kwds.setdefault(key, []) - if value not in lst: - lst.append(value) - # - if '__pypy__' in sys.builtin_module_names: - import os - if sys.platform == "win32": - # we need 'libpypy-c.lib'. Current distributions of - # pypy (>= 4.1) contain it as 'libs/python27.lib'. - pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'libs')) - else: - # we need 'libpypy-c.{so,dylib}', which should be by - # default located in 'sys.prefix/bin' for installed - # systems. - if sys.version_info < (3,): - pythonlib = "pypy-c" - else: - pythonlib = "pypy3-c" - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - # On uninstalled pypy's, the libpypy-c is typically found in - # .../pypy/goal/. - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) - else: - if sys.platform == "win32": - template = "python%d%d" - if hasattr(sys, 'gettotalrefcount'): - template += '_d' - else: - try: - import sysconfig - except ImportError: # 2.6 - from cffi._shimmed_dist_utils import sysconfig - template = "python%d.%d" - if sysconfig.get_config_var('DEBUG_EXT'): - template += sysconfig.get_config_var('DEBUG_EXT') - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - if hasattr(sys, 'abiflags'): - pythonlib += sys.abiflags - ensure('libraries', pythonlib) - if sys.platform == "win32": - ensure('extra_link_args', '/MANIFEST') - - def set_source(self, module_name, source, source_extension='.c', **kwds): - import os - if hasattr(self, '_assigned_source'): - raise ValueError("set_source() cannot be called several times " - "per ffi object") - if not isinstance(module_name, basestring): - raise TypeError("'module_name' must be a string") - if os.sep in module_name or (os.altsep and os.altsep in module_name): - raise ValueError("'module_name' must not contain '/': use a dotted " - "name to make a 'package.module' location") - self._assigned_source = (str(module_name), source, - source_extension, kwds) - - def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, - source_extension='.c', **kwds): - from . import pkgconfig - if not isinstance(pkgconfig_libs, list): - raise TypeError("the pkgconfig_libs argument must be a list " - "of package names") - kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) - pkgconfig.merge_flags(kwds, kwds2) - self.set_source(module_name, source, source_extension, **kwds) - - def distutils_extension(self, tmpdir='build', verbose=True): - from cffi._shimmed_dist_utils import mkpath - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored - return self.verifier.get_extension() - raise ValueError("set_source() must be called before" - " distutils_extension()") - module_name, source, source_extension, kwds = self._assigned_source - if source is None: - raise TypeError("distutils_extension() is only for C extension " - "modules, not for dlopen()-style pure Python " - "modules") - mkpath(tmpdir) - ext, updated = recompile(self, module_name, - source, tmpdir=tmpdir, extradir=tmpdir, - source_extension=source_extension, - call_c_compiler=False, **kwds) - if verbose: - if updated: - sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) - else: - sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) - return ext - - def emit_c_code(self, filename): - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before emit_c_code()") - module_name, source, source_extension, kwds = self._assigned_source - if source is None: - raise TypeError("emit_c_code() is only for C extension modules, " - "not for dlopen()-style pure Python modules") - recompile(self, module_name, source, - c_file=filename, call_c_compiler=False, - uses_ffiplatform=False, **kwds) - - def emit_python_code(self, filename): - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before emit_c_code()") - module_name, source, source_extension, kwds = self._assigned_source - if source is not None: - raise TypeError("emit_python_code() is only for dlopen()-style " - "pure Python modules, not for C extension modules") - recompile(self, module_name, source, - c_file=filename, call_c_compiler=False, - uses_ffiplatform=False, **kwds) - - def compile(self, tmpdir='.', verbose=0, target=None, debug=None): - """The 'target' argument gives the final file name of the - compiled DLL. Use '*' to force distutils' choice, suitable for - regular CPython C API modules. Use a file name ending in '.*' - to ask for the system's default extension for dynamic libraries - (.so/.dll/.dylib). - - The default is '*' when building a non-embedded C API extension, - and (module_name + '.*') when building an embedded library. - """ - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before compile()") - module_name, source, source_extension, kwds = self._assigned_source - return recompile(self, module_name, source, tmpdir=tmpdir, - target=target, source_extension=source_extension, - compiler_verbose=verbose, debug=debug, **kwds) - - def init_once(self, func, tag): - # Read _init_once_cache[tag], which is either (False, lock) if - # we're calling the function now in some thread, or (True, result). - # Don't call setdefault() in most cases, to avoid allocating and - # immediately freeing a lock; but still use setdefaut() to avoid - # races. - try: - x = self._init_once_cache[tag] - except KeyError: - x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) - # Common case: we got (True, result), so we return the result. - if x[0]: - return x[1] - # Else, it's a lock. Acquire it to serialize the following tests. - with x[1]: - # Read again from _init_once_cache the current status. - x = self._init_once_cache[tag] - if x[0]: - return x[1] - # Call the function and store the result back. - result = func() - self._init_once_cache[tag] = (True, result) - return result - - def embedding_init_code(self, pysource): - if self._embedding: - raise ValueError("embedding_init_code() can only be called once") - # fix 'pysource' before it gets dumped into the C file: - # - remove empty lines at the beginning, so it starts at "line 1" - # - dedent, if all non-empty lines are indented - # - check for SyntaxErrors - import re - match = re.match(r'\s*\n', pysource) - if match: - pysource = pysource[match.end():] - lines = pysource.splitlines() or [''] - prefix = re.match(r'\s*', lines[0]).group() - for i in range(1, len(lines)): - line = lines[i] - if line.rstrip(): - while not line.startswith(prefix): - prefix = prefix[:-1] - i = len(prefix) - lines = [line[i:]+'\n' for line in lines] - pysource = ''.join(lines) - # - compile(pysource, "cffi_init", "exec") - # - self._embedding = pysource - - def def_extern(self, *args, **kwds): - raise ValueError("ffi.def_extern() is only available on API-mode FFI " - "objects") - - def list_types(self): - """Returns the user type names known to this FFI instance. - This returns a tuple containing three lists of names: - (typedef_names, names_of_structs, names_of_unions) - """ - typedefs = [] - structs = [] - unions = [] - for key in self._parser._declarations: - if key.startswith('typedef '): - typedefs.append(key[8:]) - elif key.startswith('struct '): - structs.append(key[7:]) - elif key.startswith('union '): - unions.append(key[6:]) - typedefs.sort() - structs.sort() - unions.sort() - return (typedefs, structs, unions) - - -def _load_backend_lib(backend, name, flags): - import os - if not isinstance(name, basestring): - if sys.platform != "win32" or name is not None: - return backend.load_library(name, flags) - name = "c" # Windows: load_library(None) fails, but this works - # on Python 2 (backward compatibility hack only) - first_error = None - if '.' in name or '/' in name or os.sep in name: - try: - return backend.load_library(name, flags) - except OSError as e: - first_error = e - import ctypes.util - path = ctypes.util.find_library(name) - if path is None: - if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): - raise OSError("dlopen(None) cannot work on Windows for Python 3 " - "(see http://bugs.python.org/issue23606)") - msg = ("ctypes.util.find_library() did not manage " - "to locate a library called %r" % (name,)) - if first_error is not None: - msg = "%s. Additionally, %s" % (first_error, msg) - raise OSError(msg) - return backend.load_library(path, flags) - -def _make_ffi_library(ffi, libname, flags): - backend = ffi._backend - backendlib = _load_backend_lib(backend, libname, flags) - # - def accessor_function(name): - key = 'function ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - value = backendlib.load_function(BType, name) - library.__dict__[name] = value - # - def accessor_variable(name): - key = 'variable ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - read_variable = backendlib.read_variable - write_variable = backendlib.write_variable - setattr(FFILibrary, name, property( - lambda self: read_variable(BType, name), - lambda self, value: write_variable(BType, name, value))) - # - def addressof_var(name): - try: - return addr_variables[name] - except KeyError: - with ffi._lock: - if name not in addr_variables: - key = 'variable ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - if BType.kind != 'array': - BType = model.pointer_cache(ffi, BType) - p = backendlib.load_function(BType, name) - addr_variables[name] = p - return addr_variables[name] - # - def accessor_constant(name): - raise NotImplementedError("non-integer constant '%s' cannot be " - "accessed from a dlopen() library" % (name,)) - # - def accessor_int_constant(name): - library.__dict__[name] = ffi._parser._int_constants[name] - # - accessors = {} - accessors_version = [False] - addr_variables = {} - # - def update_accessors(): - if accessors_version[0] is ffi._cdef_version: - return - # - for key, (tp, _) in ffi._parser._declarations.items(): - if not isinstance(tp, model.EnumType): - tag, name = key.split(' ', 1) - if tag == 'function': - accessors[name] = accessor_function - elif tag == 'variable': - accessors[name] = accessor_variable - elif tag == 'constant': - accessors[name] = accessor_constant - else: - for i, enumname in enumerate(tp.enumerators): - def accessor_enum(name, tp=tp, i=i): - tp.check_not_partial() - library.__dict__[name] = tp.enumvalues[i] - accessors[enumname] = accessor_enum - for name in ffi._parser._int_constants: - accessors.setdefault(name, accessor_int_constant) - accessors_version[0] = ffi._cdef_version - # - def make_accessor(name): - with ffi._lock: - if name in library.__dict__ or name in FFILibrary.__dict__: - return # added by another thread while waiting for the lock - if name not in accessors: - update_accessors() - if name not in accessors: - raise AttributeError(name) - accessors[name](name) - # - class FFILibrary(object): - def __getattr__(self, name): - make_accessor(name) - return getattr(self, name) - def __setattr__(self, name, value): - try: - property = getattr(self.__class__, name) - except AttributeError: - make_accessor(name) - setattr(self, name, value) - else: - property.__set__(self, value) - def __dir__(self): - with ffi._lock: - update_accessors() - return accessors.keys() - def __addressof__(self, name): - if name in library.__dict__: - return library.__dict__[name] - if name in FFILibrary.__dict__: - return addressof_var(name) - make_accessor(name) - if name in library.__dict__: - return library.__dict__[name] - if name in FFILibrary.__dict__: - return addressof_var(name) - raise AttributeError("cffi library has no function or " - "global variable named '%s'" % (name,)) - def __cffi_close__(self): - backendlib.close_lib() - self.__dict__.clear() - # - if isinstance(libname, basestring): - try: - if not isinstance(libname, str): # unicode, on Python 2 - libname = libname.encode('utf-8') - FFILibrary.__name__ = 'FFILibrary_%s' % libname - except UnicodeError: - pass - library = FFILibrary() - return library, library.__dict__ - -def _builtin_function_type(func): - # a hack to make at least ffi.typeof(builtin_function) work, - # if the builtin function was obtained by 'vengine_cpy'. - import sys - try: - module = sys.modules[func.__module__] - ffi = module._cffi_original_ffi - types_of_builtin_funcs = module._cffi_types_of_builtin_funcs - tp = types_of_builtin_funcs[func] - except (KeyError, AttributeError, TypeError): - return None - else: - with ffi._lock: - return ffi._get_cached_btype(tp) diff --git a/venv/Lib/site-packages/cffi/backend_ctypes.py b/venv/Lib/site-packages/cffi/backend_ctypes.py deleted file mode 100644 index e7956a7..0000000 --- a/venv/Lib/site-packages/cffi/backend_ctypes.py +++ /dev/null @@ -1,1121 +0,0 @@ -import ctypes, ctypes.util, operator, sys -from . import model - -if sys.version_info < (3,): - bytechr = chr -else: - unicode = str - long = int - xrange = range - bytechr = lambda num: bytes([num]) - -class CTypesType(type): - pass - -class CTypesData(object): - __metaclass__ = CTypesType - __slots__ = ['__weakref__'] - __name__ = '' - - def __init__(self, *args): - raise TypeError("cannot instantiate %r" % (self.__class__,)) - - @classmethod - def _newp(cls, init): - raise TypeError("expected a pointer or array ctype, got '%s'" - % (cls._get_c_name(),)) - - @staticmethod - def _to_ctypes(value): - raise TypeError - - @classmethod - def _arg_to_ctypes(cls, *value): - try: - ctype = cls._ctype - except AttributeError: - raise TypeError("cannot create an instance of %r" % (cls,)) - if value: - res = cls._to_ctypes(*value) - if not isinstance(res, ctype): - res = cls._ctype(res) - else: - res = cls._ctype() - return res - - @classmethod - def _create_ctype_obj(cls, init): - if init is None: - return cls._arg_to_ctypes() - else: - return cls._arg_to_ctypes(init) - - @staticmethod - def _from_ctypes(ctypes_value): - raise TypeError - - @classmethod - def _get_c_name(cls, replace_with=''): - return cls._reftypename.replace(' &', replace_with) - - @classmethod - def _fix_class(cls): - cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) - cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) - cls.__module__ = 'ffi' - - def _get_own_repr(self): - raise NotImplementedError - - def _addr_repr(self, address): - if address == 0: - return 'NULL' - else: - if address < 0: - address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) - return '0x%x' % address - - def __repr__(self, c_name=None): - own = self._get_own_repr() - return '' % (c_name or self._get_c_name(), own) - - def _convert_to_address(self, BClass): - if BClass is None: - raise TypeError("cannot convert %r to an address" % ( - self._get_c_name(),)) - else: - raise TypeError("cannot convert %r to %r" % ( - self._get_c_name(), BClass._get_c_name())) - - @classmethod - def _get_size(cls): - return ctypes.sizeof(cls._ctype) - - def _get_size_of_instance(self): - return ctypes.sizeof(self._ctype) - - @classmethod - def _cast_from(cls, source): - raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) - - def _cast_to_integer(self): - return self._convert_to_address(None) - - @classmethod - def _alignment(cls): - return ctypes.alignment(cls._ctype) - - def __iter__(self): - raise TypeError("cdata %r does not support iteration" % ( - self._get_c_name()),) - - def _make_cmp(name): - cmpfunc = getattr(operator, name) - def cmp(self, other): - v_is_ptr = not isinstance(self, CTypesGenericPrimitive) - w_is_ptr = (isinstance(other, CTypesData) and - not isinstance(other, CTypesGenericPrimitive)) - if v_is_ptr and w_is_ptr: - return cmpfunc(self._convert_to_address(None), - other._convert_to_address(None)) - elif v_is_ptr or w_is_ptr: - return NotImplemented - else: - if isinstance(self, CTypesGenericPrimitive): - self = self._value - if isinstance(other, CTypesGenericPrimitive): - other = other._value - return cmpfunc(self, other) - cmp.func_name = name - return cmp - - __eq__ = _make_cmp('__eq__') - __ne__ = _make_cmp('__ne__') - __lt__ = _make_cmp('__lt__') - __le__ = _make_cmp('__le__') - __gt__ = _make_cmp('__gt__') - __ge__ = _make_cmp('__ge__') - - def __hash__(self): - return hash(self._convert_to_address(None)) - - def _to_string(self, maxlen): - raise TypeError("string(): %r" % (self,)) - - -class CTypesGenericPrimitive(CTypesData): - __slots__ = [] - - def __hash__(self): - return hash(self._value) - - def _get_own_repr(self): - return repr(self._from_ctypes(self._value)) - - -class CTypesGenericArray(CTypesData): - __slots__ = [] - - @classmethod - def _newp(cls, init): - return cls(init) - - def __iter__(self): - for i in xrange(len(self)): - yield self[i] - - def _get_own_repr(self): - return self._addr_repr(ctypes.addressof(self._blob)) - - -class CTypesGenericPtr(CTypesData): - __slots__ = ['_address', '_as_ctype_ptr'] - _automatic_casts = False - kind = "pointer" - - @classmethod - def _newp(cls, init): - return cls(init) - - @classmethod - def _cast_from(cls, source): - if source is None: - address = 0 - elif isinstance(source, CTypesData): - address = source._cast_to_integer() - elif isinstance(source, (int, long)): - address = source - else: - raise TypeError("bad type for cast to %r: %r" % - (cls, type(source).__name__)) - return cls._new_pointer_at(address) - - @classmethod - def _new_pointer_at(cls, address): - self = cls.__new__(cls) - self._address = address - self._as_ctype_ptr = ctypes.cast(address, cls._ctype) - return self - - def _get_own_repr(self): - try: - return self._addr_repr(self._address) - except AttributeError: - return '???' - - def _cast_to_integer(self): - return self._address - - def __nonzero__(self): - return bool(self._address) - __bool__ = __nonzero__ - - @classmethod - def _to_ctypes(cls, value): - if not isinstance(value, CTypesData): - raise TypeError("unexpected %s object" % type(value).__name__) - address = value._convert_to_address(cls) - return ctypes.cast(address, cls._ctype) - - @classmethod - def _from_ctypes(cls, ctypes_ptr): - address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 - return cls._new_pointer_at(address) - - @classmethod - def _initialize(cls, ctypes_ptr, value): - if value: - ctypes_ptr.contents = cls._to_ctypes(value).contents - - def _convert_to_address(self, BClass): - if (BClass in (self.__class__, None) or BClass._automatic_casts - or self._automatic_casts): - return self._address - else: - return CTypesData._convert_to_address(self, BClass) - - -class CTypesBaseStructOrUnion(CTypesData): - __slots__ = ['_blob'] - - @classmethod - def _create_ctype_obj(cls, init): - # may be overridden - raise TypeError("cannot instantiate opaque type %s" % (cls,)) - - def _get_own_repr(self): - return self._addr_repr(ctypes.addressof(self._blob)) - - @classmethod - def _offsetof(cls, fieldname): - return getattr(cls._ctype, fieldname).offset - - def _convert_to_address(self, BClass): - if getattr(BClass, '_BItem', None) is self.__class__: - return ctypes.addressof(self._blob) - else: - return CTypesData._convert_to_address(self, BClass) - - @classmethod - def _from_ctypes(cls, ctypes_struct_or_union): - self = cls.__new__(cls) - self._blob = ctypes_struct_or_union - return self - - @classmethod - def _to_ctypes(cls, value): - return value._blob - - def __repr__(self, c_name=None): - return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) - - -class CTypesBackend(object): - - PRIMITIVE_TYPES = { - 'char': ctypes.c_char, - 'short': ctypes.c_short, - 'int': ctypes.c_int, - 'long': ctypes.c_long, - 'long long': ctypes.c_longlong, - 'signed char': ctypes.c_byte, - 'unsigned char': ctypes.c_ubyte, - 'unsigned short': ctypes.c_ushort, - 'unsigned int': ctypes.c_uint, - 'unsigned long': ctypes.c_ulong, - 'unsigned long long': ctypes.c_ulonglong, - 'float': ctypes.c_float, - 'double': ctypes.c_double, - '_Bool': ctypes.c_bool, - } - - for _name in ['unsigned long long', 'unsigned long', - 'unsigned int', 'unsigned short', 'unsigned char']: - _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) - PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_void_p): - PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_size_t): - PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] - - for _name in ['long long', 'long', 'int', 'short', 'signed char']: - _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) - PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_void_p): - PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] - PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_size_t): - PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] - - - def __init__(self): - self.RTLD_LAZY = 0 # not supported anyway by ctypes - self.RTLD_NOW = 0 - self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL - self.RTLD_LOCAL = ctypes.RTLD_LOCAL - - def set_ffi(self, ffi): - self.ffi = ffi - - def _get_types(self): - return CTypesData, CTypesType - - def load_library(self, path, flags=0): - cdll = ctypes.CDLL(path, flags) - return CTypesLibrary(self, cdll) - - def new_void_type(self): - class CTypesVoid(CTypesData): - __slots__ = [] - _reftypename = 'void &' - @staticmethod - def _from_ctypes(novalue): - return None - @staticmethod - def _to_ctypes(novalue): - if novalue is not None: - raise TypeError("None expected, got %s object" % - (type(novalue).__name__,)) - return None - CTypesVoid._fix_class() - return CTypesVoid - - def new_primitive_type(self, name): - if name == 'wchar_t': - raise NotImplementedError(name) - ctype = self.PRIMITIVE_TYPES[name] - if name == 'char': - kind = 'char' - elif name in ('float', 'double'): - kind = 'float' - else: - if name in ('signed char', 'unsigned char'): - kind = 'byte' - elif name == '_Bool': - kind = 'bool' - else: - kind = 'int' - is_signed = (ctype(-1).value == -1) - # - def _cast_source_to_int(source): - if isinstance(source, (int, long, float)): - source = int(source) - elif isinstance(source, CTypesData): - source = source._cast_to_integer() - elif isinstance(source, bytes): - source = ord(source) - elif source is None: - source = 0 - else: - raise TypeError("bad type for cast to %r: %r" % - (CTypesPrimitive, type(source).__name__)) - return source - # - kind1 = kind - class CTypesPrimitive(CTypesGenericPrimitive): - __slots__ = ['_value'] - _ctype = ctype - _reftypename = '%s &' % name - kind = kind1 - - def __init__(self, value): - self._value = value - - @staticmethod - def _create_ctype_obj(init): - if init is None: - return ctype() - return ctype(CTypesPrimitive._to_ctypes(init)) - - if kind == 'int' or kind == 'byte': - @classmethod - def _cast_from(cls, source): - source = _cast_source_to_int(source) - source = ctype(source).value # cast within range - return cls(source) - def __int__(self): - return self._value - - if kind == 'bool': - @classmethod - def _cast_from(cls, source): - if not isinstance(source, (int, long, float)): - source = _cast_source_to_int(source) - return cls(bool(source)) - def __int__(self): - return int(self._value) - - if kind == 'char': - @classmethod - def _cast_from(cls, source): - source = _cast_source_to_int(source) - source = bytechr(source & 0xFF) - return cls(source) - def __int__(self): - return ord(self._value) - - if kind == 'float': - @classmethod - def _cast_from(cls, source): - if isinstance(source, float): - pass - elif isinstance(source, CTypesGenericPrimitive): - if hasattr(source, '__float__'): - source = float(source) - else: - source = int(source) - else: - source = _cast_source_to_int(source) - source = ctype(source).value # fix precision - return cls(source) - def __int__(self): - return int(self._value) - def __float__(self): - return self._value - - _cast_to_integer = __int__ - - if kind == 'int' or kind == 'byte' or kind == 'bool': - @staticmethod - def _to_ctypes(x): - if not isinstance(x, (int, long)): - if isinstance(x, CTypesData): - x = int(x) - else: - raise TypeError("integer expected, got %s" % - type(x).__name__) - if ctype(x).value != x: - if not is_signed and x < 0: - raise OverflowError("%s: negative integer" % name) - else: - raise OverflowError("%s: integer out of bounds" - % name) - return x - - if kind == 'char': - @staticmethod - def _to_ctypes(x): - if isinstance(x, bytes) and len(x) == 1: - return x - if isinstance(x, CTypesPrimitive): # > - return x._value - raise TypeError("character expected, got %s" % - type(x).__name__) - def __nonzero__(self): - return ord(self._value) != 0 - else: - def __nonzero__(self): - return self._value != 0 - __bool__ = __nonzero__ - - if kind == 'float': - @staticmethod - def _to_ctypes(x): - if not isinstance(x, (int, long, float, CTypesData)): - raise TypeError("float expected, got %s" % - type(x).__name__) - return ctype(x).value - - @staticmethod - def _from_ctypes(value): - return getattr(value, 'value', value) - - @staticmethod - def _initialize(blob, init): - blob.value = CTypesPrimitive._to_ctypes(init) - - if kind == 'char': - def _to_string(self, maxlen): - return self._value - if kind == 'byte': - def _to_string(self, maxlen): - return chr(self._value & 0xff) - # - CTypesPrimitive._fix_class() - return CTypesPrimitive - - def new_pointer_type(self, BItem): - getbtype = self.ffi._get_cached_btype - if BItem is getbtype(model.PrimitiveType('char')): - kind = 'charp' - elif BItem in (getbtype(model.PrimitiveType('signed char')), - getbtype(model.PrimitiveType('unsigned char'))): - kind = 'bytep' - elif BItem is getbtype(model.void_type): - kind = 'voidp' - else: - kind = 'generic' - # - class CTypesPtr(CTypesGenericPtr): - __slots__ = ['_own'] - if kind == 'charp': - __slots__ += ['__as_strbuf'] - _BItem = BItem - if hasattr(BItem, '_ctype'): - _ctype = ctypes.POINTER(BItem._ctype) - _bitem_size = ctypes.sizeof(BItem._ctype) - else: - _ctype = ctypes.c_void_p - if issubclass(BItem, CTypesGenericArray): - _reftypename = BItem._get_c_name('(* &)') - else: - _reftypename = BItem._get_c_name(' * &') - - def __init__(self, init): - ctypeobj = BItem._create_ctype_obj(init) - if kind == 'charp': - self.__as_strbuf = ctypes.create_string_buffer( - ctypeobj.value + b'\x00') - self._as_ctype_ptr = ctypes.cast( - self.__as_strbuf, self._ctype) - else: - self._as_ctype_ptr = ctypes.pointer(ctypeobj) - self._address = ctypes.cast(self._as_ctype_ptr, - ctypes.c_void_p).value - self._own = True - - def __add__(self, other): - if isinstance(other, (int, long)): - return self._new_pointer_at(self._address + - other * self._bitem_size) - else: - return NotImplemented - - def __sub__(self, other): - if isinstance(other, (int, long)): - return self._new_pointer_at(self._address - - other * self._bitem_size) - elif type(self) is type(other): - return (self._address - other._address) // self._bitem_size - else: - return NotImplemented - - def __getitem__(self, index): - if getattr(self, '_own', False) and index != 0: - raise IndexError - return BItem._from_ctypes(self._as_ctype_ptr[index]) - - def __setitem__(self, index, value): - self._as_ctype_ptr[index] = BItem._to_ctypes(value) - - if kind == 'charp' or kind == 'voidp': - @classmethod - def _arg_to_ctypes(cls, *value): - if value and isinstance(value[0], bytes): - return ctypes.c_char_p(value[0]) - else: - return super(CTypesPtr, cls)._arg_to_ctypes(*value) - - if kind == 'charp' or kind == 'bytep': - def _to_string(self, maxlen): - if maxlen < 0: - maxlen = sys.maxsize - p = ctypes.cast(self._as_ctype_ptr, - ctypes.POINTER(ctypes.c_char)) - n = 0 - while n < maxlen and p[n] != b'\x00': - n += 1 - return b''.join([p[i] for i in range(n)]) - - def _get_own_repr(self): - if getattr(self, '_own', False): - return 'owning %d bytes' % ( - ctypes.sizeof(self._as_ctype_ptr.contents),) - return super(CTypesPtr, self)._get_own_repr() - # - if (BItem is self.ffi._get_cached_btype(model.void_type) or - BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): - CTypesPtr._automatic_casts = True - # - CTypesPtr._fix_class() - return CTypesPtr - - def new_array_type(self, CTypesPtr, length): - if length is None: - brackets = ' &[]' - else: - brackets = ' &[%d]' % length - BItem = CTypesPtr._BItem - getbtype = self.ffi._get_cached_btype - if BItem is getbtype(model.PrimitiveType('char')): - kind = 'char' - elif BItem in (getbtype(model.PrimitiveType('signed char')), - getbtype(model.PrimitiveType('unsigned char'))): - kind = 'byte' - else: - kind = 'generic' - # - class CTypesArray(CTypesGenericArray): - __slots__ = ['_blob', '_own'] - if length is not None: - _ctype = BItem._ctype * length - else: - __slots__.append('_ctype') - _reftypename = BItem._get_c_name(brackets) - _declared_length = length - _CTPtr = CTypesPtr - - def __init__(self, init): - if length is None: - if isinstance(init, (int, long)): - len1 = init - init = None - elif kind == 'char' and isinstance(init, bytes): - len1 = len(init) + 1 # extra null - else: - init = tuple(init) - len1 = len(init) - self._ctype = BItem._ctype * len1 - self._blob = self._ctype() - self._own = True - if init is not None: - self._initialize(self._blob, init) - - @staticmethod - def _initialize(blob, init): - if isinstance(init, bytes): - init = [init[i:i+1] for i in range(len(init))] - else: - if isinstance(init, CTypesGenericArray): - if (len(init) != len(blob) or - not isinstance(init, CTypesArray)): - raise TypeError("length/type mismatch: %s" % (init,)) - init = tuple(init) - if len(init) > len(blob): - raise IndexError("too many initializers") - addr = ctypes.cast(blob, ctypes.c_void_p).value - PTR = ctypes.POINTER(BItem._ctype) - itemsize = ctypes.sizeof(BItem._ctype) - for i, value in enumerate(init): - p = ctypes.cast(addr + i * itemsize, PTR) - BItem._initialize(p.contents, value) - - def __len__(self): - return len(self._blob) - - def __getitem__(self, index): - if not (0 <= index < len(self._blob)): - raise IndexError - return BItem._from_ctypes(self._blob[index]) - - def __setitem__(self, index, value): - if not (0 <= index < len(self._blob)): - raise IndexError - self._blob[index] = BItem._to_ctypes(value) - - if kind == 'char' or kind == 'byte': - def _to_string(self, maxlen): - if maxlen < 0: - maxlen = len(self._blob) - p = ctypes.cast(self._blob, - ctypes.POINTER(ctypes.c_char)) - n = 0 - while n < maxlen and p[n] != b'\x00': - n += 1 - return b''.join([p[i] for i in range(n)]) - - def _get_own_repr(self): - if getattr(self, '_own', False): - return 'owning %d bytes' % (ctypes.sizeof(self._blob),) - return super(CTypesArray, self)._get_own_repr() - - def _convert_to_address(self, BClass): - if BClass in (CTypesPtr, None) or BClass._automatic_casts: - return ctypes.addressof(self._blob) - else: - return CTypesData._convert_to_address(self, BClass) - - @staticmethod - def _from_ctypes(ctypes_array): - self = CTypesArray.__new__(CTypesArray) - self._blob = ctypes_array - return self - - @staticmethod - def _arg_to_ctypes(value): - return CTypesPtr._arg_to_ctypes(value) - - def __add__(self, other): - if isinstance(other, (int, long)): - return CTypesPtr._new_pointer_at( - ctypes.addressof(self._blob) + - other * ctypes.sizeof(BItem._ctype)) - else: - return NotImplemented - - @classmethod - def _cast_from(cls, source): - raise NotImplementedError("casting to %r" % ( - cls._get_c_name(),)) - # - CTypesArray._fix_class() - return CTypesArray - - def _new_struct_or_union(self, kind, name, base_ctypes_class): - # - class struct_or_union(base_ctypes_class): - pass - struct_or_union.__name__ = '%s_%s' % (kind, name) - kind1 = kind - # - class CTypesStructOrUnion(CTypesBaseStructOrUnion): - __slots__ = ['_blob'] - _ctype = struct_or_union - _reftypename = '%s &' % (name,) - _kind = kind = kind1 - # - CTypesStructOrUnion._fix_class() - return CTypesStructOrUnion - - def new_struct_type(self, name): - return self._new_struct_or_union('struct', name, ctypes.Structure) - - def new_union_type(self, name): - return self._new_struct_or_union('union', name, ctypes.Union) - - def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, - totalsize=-1, totalalignment=-1, sflags=0, - pack=0): - if totalsize >= 0 or totalalignment >= 0: - raise NotImplementedError("the ctypes backend of CFFI does not support " - "structures completed by verify(); please " - "compile and install the _cffi_backend module.") - struct_or_union = CTypesStructOrUnion._ctype - fnames = [fname for (fname, BField, bitsize) in fields] - btypes = [BField for (fname, BField, bitsize) in fields] - bitfields = [bitsize for (fname, BField, bitsize) in fields] - # - bfield_types = {} - cfields = [] - for (fname, BField, bitsize) in fields: - if bitsize < 0: - cfields.append((fname, BField._ctype)) - bfield_types[fname] = BField - else: - cfields.append((fname, BField._ctype, bitsize)) - bfield_types[fname] = Ellipsis - if sflags & 8: - struct_or_union._pack_ = 1 - elif pack: - struct_or_union._pack_ = pack - struct_or_union._fields_ = cfields - CTypesStructOrUnion._bfield_types = bfield_types - # - @staticmethod - def _create_ctype_obj(init): - result = struct_or_union() - if init is not None: - initialize(result, init) - return result - CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj - # - def initialize(blob, init): - if is_union: - if len(init) > 1: - raise ValueError("union initializer: %d items given, but " - "only one supported (use a dict if needed)" - % (len(init),)) - if not isinstance(init, dict): - if isinstance(init, (bytes, unicode)): - raise TypeError("union initializer: got a str") - init = tuple(init) - if len(init) > len(fnames): - raise ValueError("too many values for %s initializer" % - CTypesStructOrUnion._get_c_name()) - init = dict(zip(fnames, init)) - addr = ctypes.addressof(blob) - for fname, value in init.items(): - BField, bitsize = name2fieldtype[fname] - assert bitsize < 0, \ - "not implemented: initializer with bit fields" - offset = CTypesStructOrUnion._offsetof(fname) - PTR = ctypes.POINTER(BField._ctype) - p = ctypes.cast(addr + offset, PTR) - BField._initialize(p.contents, value) - is_union = CTypesStructOrUnion._kind == 'union' - name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) - # - for fname, BField, bitsize in fields: - if fname == '': - raise NotImplementedError("nested anonymous structs/unions") - if hasattr(CTypesStructOrUnion, fname): - raise ValueError("the field name %r conflicts in " - "the ctypes backend" % fname) - if bitsize < 0: - def getter(self, fname=fname, BField=BField, - offset=CTypesStructOrUnion._offsetof(fname), - PTR=ctypes.POINTER(BField._ctype)): - addr = ctypes.addressof(self._blob) - p = ctypes.cast(addr + offset, PTR) - return BField._from_ctypes(p.contents) - def setter(self, value, fname=fname, BField=BField): - setattr(self._blob, fname, BField._to_ctypes(value)) - # - if issubclass(BField, CTypesGenericArray): - setter = None - if BField._declared_length == 0: - def getter(self, fname=fname, BFieldPtr=BField._CTPtr, - offset=CTypesStructOrUnion._offsetof(fname), - PTR=ctypes.POINTER(BField._ctype)): - addr = ctypes.addressof(self._blob) - p = ctypes.cast(addr + offset, PTR) - return BFieldPtr._from_ctypes(p) - # - else: - def getter(self, fname=fname, BField=BField): - return BField._from_ctypes(getattr(self._blob, fname)) - def setter(self, value, fname=fname, BField=BField): - # xxx obscure workaround - value = BField._to_ctypes(value) - oldvalue = getattr(self._blob, fname) - setattr(self._blob, fname, value) - if value != getattr(self._blob, fname): - setattr(self._blob, fname, oldvalue) - raise OverflowError("value too large for bitfield") - setattr(CTypesStructOrUnion, fname, property(getter, setter)) - # - CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) - for fname in fnames: - if hasattr(CTypesPtr, fname): - raise ValueError("the field name %r conflicts in " - "the ctypes backend" % fname) - def getter(self, fname=fname): - return getattr(self[0], fname) - def setter(self, value, fname=fname): - setattr(self[0], fname, value) - setattr(CTypesPtr, fname, property(getter, setter)) - - def new_function_type(self, BArgs, BResult, has_varargs): - nameargs = [BArg._get_c_name() for BArg in BArgs] - if has_varargs: - nameargs.append('...') - nameargs = ', '.join(nameargs) - # - class CTypesFunctionPtr(CTypesGenericPtr): - __slots__ = ['_own_callback', '_name'] - _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), - *[BArg._ctype for BArg in BArgs], - use_errno=True) - _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) - - def __init__(self, init, error=None): - # create a callback to the Python callable init() - import traceback - assert not has_varargs, "varargs not supported for callbacks" - if getattr(BResult, '_ctype', None) is not None: - error = BResult._from_ctypes( - BResult._create_ctype_obj(error)) - else: - error = None - def callback(*args): - args2 = [] - for arg, BArg in zip(args, BArgs): - args2.append(BArg._from_ctypes(arg)) - try: - res2 = init(*args2) - res2 = BResult._to_ctypes(res2) - except: - traceback.print_exc() - res2 = error - if issubclass(BResult, CTypesGenericPtr): - if res2: - res2 = ctypes.cast(res2, ctypes.c_void_p).value - # .value: http://bugs.python.org/issue1574593 - else: - res2 = None - #print repr(res2) - return res2 - if issubclass(BResult, CTypesGenericPtr): - # The only pointers callbacks can return are void*s: - # http://bugs.python.org/issue5710 - callback_ctype = ctypes.CFUNCTYPE( - ctypes.c_void_p, - *[BArg._ctype for BArg in BArgs], - use_errno=True) - else: - callback_ctype = CTypesFunctionPtr._ctype - self._as_ctype_ptr = callback_ctype(callback) - self._address = ctypes.cast(self._as_ctype_ptr, - ctypes.c_void_p).value - self._own_callback = init - - @staticmethod - def _initialize(ctypes_ptr, value): - if value: - raise NotImplementedError("ctypes backend: not supported: " - "initializers for function pointers") - - def __repr__(self): - c_name = getattr(self, '_name', None) - if c_name: - i = self._reftypename.index('(* &)') - if self._reftypename[i-1] not in ' )*': - c_name = ' ' + c_name - c_name = self._reftypename.replace('(* &)', c_name) - return CTypesData.__repr__(self, c_name) - - def _get_own_repr(self): - if getattr(self, '_own_callback', None) is not None: - return 'calling %r' % (self._own_callback,) - return super(CTypesFunctionPtr, self)._get_own_repr() - - def __call__(self, *args): - if has_varargs: - assert len(args) >= len(BArgs) - extraargs = args[len(BArgs):] - args = args[:len(BArgs)] - else: - assert len(args) == len(BArgs) - ctypes_args = [] - for arg, BArg in zip(args, BArgs): - ctypes_args.append(BArg._arg_to_ctypes(arg)) - if has_varargs: - for i, arg in enumerate(extraargs): - if arg is None: - ctypes_args.append(ctypes.c_void_p(0)) # NULL - continue - if not isinstance(arg, CTypesData): - raise TypeError( - "argument %d passed in the variadic part " - "needs to be a cdata object (got %s)" % - (1 + len(BArgs) + i, type(arg).__name__)) - ctypes_args.append(arg._arg_to_ctypes(arg)) - result = self._as_ctype_ptr(*ctypes_args) - return BResult._from_ctypes(result) - # - CTypesFunctionPtr._fix_class() - return CTypesFunctionPtr - - def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): - assert isinstance(name, str) - reverse_mapping = dict(zip(reversed(enumvalues), - reversed(enumerators))) - # - class CTypesEnum(CTypesInt): - __slots__ = [] - _reftypename = '%s &' % name - - def _get_own_repr(self): - value = self._value - try: - return '%d: %s' % (value, reverse_mapping[value]) - except KeyError: - return str(value) - - def _to_string(self, maxlen): - value = self._value - try: - return reverse_mapping[value] - except KeyError: - return str(value) - # - CTypesEnum._fix_class() - return CTypesEnum - - def get_errno(self): - return ctypes.get_errno() - - def set_errno(self, value): - ctypes.set_errno(value) - - def string(self, b, maxlen=-1): - return b._to_string(maxlen) - - def buffer(self, bptr, size=-1): - raise NotImplementedError("buffer() with ctypes backend") - - def sizeof(self, cdata_or_BType): - if isinstance(cdata_or_BType, CTypesData): - return cdata_or_BType._get_size_of_instance() - else: - assert issubclass(cdata_or_BType, CTypesData) - return cdata_or_BType._get_size() - - def alignof(self, BType): - assert issubclass(BType, CTypesData) - return BType._alignment() - - def newp(self, BType, source): - if not issubclass(BType, CTypesData): - raise TypeError - return BType._newp(source) - - def cast(self, BType, source): - return BType._cast_from(source) - - def callback(self, BType, source, error, onerror): - assert onerror is None # XXX not implemented - return BType(source, error) - - _weakref_cache_ref = None - - def gcp(self, cdata, destructor, size=0): - if self._weakref_cache_ref is None: - import weakref - class MyRef(weakref.ref): - def __eq__(self, other): - myref = self() - return self is other or ( - myref is not None and myref is other()) - def __ne__(self, other): - return not (self == other) - def __hash__(self): - try: - return self._hash - except AttributeError: - self._hash = hash(self()) - return self._hash - self._weakref_cache_ref = {}, MyRef - weak_cache, MyRef = self._weakref_cache_ref - - if destructor is None: - try: - del weak_cache[MyRef(cdata)] - except KeyError: - raise TypeError("Can remove destructor only on a object " - "previously returned by ffi.gc()") - return None - - def remove(k): - cdata, destructor = weak_cache.pop(k, (None, None)) - if destructor is not None: - destructor(cdata) - - new_cdata = self.cast(self.typeof(cdata), cdata) - assert new_cdata is not cdata - weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) - return new_cdata - - typeof = type - - def getcname(self, BType, replace_with): - return BType._get_c_name(replace_with) - - def typeoffsetof(self, BType, fieldname, num=0): - if isinstance(fieldname, str): - if num == 0 and issubclass(BType, CTypesGenericPtr): - BType = BType._BItem - if not issubclass(BType, CTypesBaseStructOrUnion): - raise TypeError("expected a struct or union ctype") - BField = BType._bfield_types[fieldname] - if BField is Ellipsis: - raise TypeError("not supported for bitfields") - return (BField, BType._offsetof(fieldname)) - elif isinstance(fieldname, (int, long)): - if issubclass(BType, CTypesGenericArray): - BType = BType._CTPtr - if not issubclass(BType, CTypesGenericPtr): - raise TypeError("expected an array or ptr ctype") - BItem = BType._BItem - offset = BItem._get_size() * fieldname - if offset > sys.maxsize: - raise OverflowError - return (BItem, offset) - else: - raise TypeError(type(fieldname)) - - def rawaddressof(self, BTypePtr, cdata, offset=None): - if isinstance(cdata, CTypesBaseStructOrUnion): - ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) - elif isinstance(cdata, CTypesGenericPtr): - if offset is None or not issubclass(type(cdata)._BItem, - CTypesBaseStructOrUnion): - raise TypeError("unexpected cdata type") - ptr = type(cdata)._to_ctypes(cdata) - elif isinstance(cdata, CTypesGenericArray): - ptr = type(cdata)._to_ctypes(cdata) - else: - raise TypeError("expected a ") - if offset: - ptr = ctypes.cast( - ctypes.c_void_p( - ctypes.cast(ptr, ctypes.c_void_p).value + offset), - type(ptr)) - return BTypePtr._from_ctypes(ptr) - - -class CTypesLibrary(object): - - def __init__(self, backend, cdll): - self.backend = backend - self.cdll = cdll - - def load_function(self, BType, name): - c_func = getattr(self.cdll, name) - funcobj = BType._from_ctypes(c_func) - funcobj._name = name - return funcobj - - def read_variable(self, BType, name): - try: - ctypes_obj = BType._ctype.in_dll(self.cdll, name) - except AttributeError as e: - raise NotImplementedError(e) - return BType._from_ctypes(ctypes_obj) - - def write_variable(self, BType, name, value): - new_ctypes_obj = BType._to_ctypes(value) - ctypes_obj = BType._ctype.in_dll(self.cdll, name) - ctypes.memmove(ctypes.addressof(ctypes_obj), - ctypes.addressof(new_ctypes_obj), - ctypes.sizeof(BType._ctype)) diff --git a/venv/Lib/site-packages/cffi/cffi_opcode.py b/venv/Lib/site-packages/cffi/cffi_opcode.py deleted file mode 100644 index 6421df6..0000000 --- a/venv/Lib/site-packages/cffi/cffi_opcode.py +++ /dev/null @@ -1,187 +0,0 @@ -from .error import VerificationError - -class CffiOp(object): - def __init__(self, op, arg): - self.op = op - self.arg = arg - - def as_c_expr(self): - if self.op is None: - assert isinstance(self.arg, str) - return '(_cffi_opcode_t)(%s)' % (self.arg,) - classname = CLASS_NAME[self.op] - return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) - - def as_python_bytes(self): - if self.op is None and self.arg.isdigit(): - value = int(self.arg) # non-negative: '-' not in self.arg - if value >= 2**31: - raise OverflowError("cannot emit %r: limited to 2**31-1" - % (self.arg,)) - return format_four_bytes(value) - if isinstance(self.arg, str): - raise VerificationError("cannot emit to Python: %r" % (self.arg,)) - return format_four_bytes((self.arg << 8) | self.op) - - def __str__(self): - classname = CLASS_NAME.get(self.op, self.op) - return '(%s %s)' % (classname, self.arg) - -def format_four_bytes(num): - return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( - (num >> 24) & 0xFF, - (num >> 16) & 0xFF, - (num >> 8) & 0xFF, - (num ) & 0xFF) - -OP_PRIMITIVE = 1 -OP_POINTER = 3 -OP_ARRAY = 5 -OP_OPEN_ARRAY = 7 -OP_STRUCT_UNION = 9 -OP_ENUM = 11 -OP_FUNCTION = 13 -OP_FUNCTION_END = 15 -OP_NOOP = 17 -OP_BITFIELD = 19 -OP_TYPENAME = 21 -OP_CPYTHON_BLTN_V = 23 # varargs -OP_CPYTHON_BLTN_N = 25 # noargs -OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) -OP_CONSTANT = 29 -OP_CONSTANT_INT = 31 -OP_GLOBAL_VAR = 33 -OP_DLOPEN_FUNC = 35 -OP_DLOPEN_CONST = 37 -OP_GLOBAL_VAR_F = 39 -OP_EXTERN_PYTHON = 41 - -PRIM_VOID = 0 -PRIM_BOOL = 1 -PRIM_CHAR = 2 -PRIM_SCHAR = 3 -PRIM_UCHAR = 4 -PRIM_SHORT = 5 -PRIM_USHORT = 6 -PRIM_INT = 7 -PRIM_UINT = 8 -PRIM_LONG = 9 -PRIM_ULONG = 10 -PRIM_LONGLONG = 11 -PRIM_ULONGLONG = 12 -PRIM_FLOAT = 13 -PRIM_DOUBLE = 14 -PRIM_LONGDOUBLE = 15 - -PRIM_WCHAR = 16 -PRIM_INT8 = 17 -PRIM_UINT8 = 18 -PRIM_INT16 = 19 -PRIM_UINT16 = 20 -PRIM_INT32 = 21 -PRIM_UINT32 = 22 -PRIM_INT64 = 23 -PRIM_UINT64 = 24 -PRIM_INTPTR = 25 -PRIM_UINTPTR = 26 -PRIM_PTRDIFF = 27 -PRIM_SIZE = 28 -PRIM_SSIZE = 29 -PRIM_INT_LEAST8 = 30 -PRIM_UINT_LEAST8 = 31 -PRIM_INT_LEAST16 = 32 -PRIM_UINT_LEAST16 = 33 -PRIM_INT_LEAST32 = 34 -PRIM_UINT_LEAST32 = 35 -PRIM_INT_LEAST64 = 36 -PRIM_UINT_LEAST64 = 37 -PRIM_INT_FAST8 = 38 -PRIM_UINT_FAST8 = 39 -PRIM_INT_FAST16 = 40 -PRIM_UINT_FAST16 = 41 -PRIM_INT_FAST32 = 42 -PRIM_UINT_FAST32 = 43 -PRIM_INT_FAST64 = 44 -PRIM_UINT_FAST64 = 45 -PRIM_INTMAX = 46 -PRIM_UINTMAX = 47 -PRIM_FLOATCOMPLEX = 48 -PRIM_DOUBLECOMPLEX = 49 -PRIM_CHAR16 = 50 -PRIM_CHAR32 = 51 - -_NUM_PRIM = 52 -_UNKNOWN_PRIM = -1 -_UNKNOWN_FLOAT_PRIM = -2 -_UNKNOWN_LONG_DOUBLE = -3 - -_IO_FILE_STRUCT = -1 - -PRIMITIVE_TO_INDEX = { - 'char': PRIM_CHAR, - 'short': PRIM_SHORT, - 'int': PRIM_INT, - 'long': PRIM_LONG, - 'long long': PRIM_LONGLONG, - 'signed char': PRIM_SCHAR, - 'unsigned char': PRIM_UCHAR, - 'unsigned short': PRIM_USHORT, - 'unsigned int': PRIM_UINT, - 'unsigned long': PRIM_ULONG, - 'unsigned long long': PRIM_ULONGLONG, - 'float': PRIM_FLOAT, - 'double': PRIM_DOUBLE, - 'long double': PRIM_LONGDOUBLE, - '_cffi_float_complex_t': PRIM_FLOATCOMPLEX, - '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX, - '_Bool': PRIM_BOOL, - 'wchar_t': PRIM_WCHAR, - 'char16_t': PRIM_CHAR16, - 'char32_t': PRIM_CHAR32, - 'int8_t': PRIM_INT8, - 'uint8_t': PRIM_UINT8, - 'int16_t': PRIM_INT16, - 'uint16_t': PRIM_UINT16, - 'int32_t': PRIM_INT32, - 'uint32_t': PRIM_UINT32, - 'int64_t': PRIM_INT64, - 'uint64_t': PRIM_UINT64, - 'intptr_t': PRIM_INTPTR, - 'uintptr_t': PRIM_UINTPTR, - 'ptrdiff_t': PRIM_PTRDIFF, - 'size_t': PRIM_SIZE, - 'ssize_t': PRIM_SSIZE, - 'int_least8_t': PRIM_INT_LEAST8, - 'uint_least8_t': PRIM_UINT_LEAST8, - 'int_least16_t': PRIM_INT_LEAST16, - 'uint_least16_t': PRIM_UINT_LEAST16, - 'int_least32_t': PRIM_INT_LEAST32, - 'uint_least32_t': PRIM_UINT_LEAST32, - 'int_least64_t': PRIM_INT_LEAST64, - 'uint_least64_t': PRIM_UINT_LEAST64, - 'int_fast8_t': PRIM_INT_FAST8, - 'uint_fast8_t': PRIM_UINT_FAST8, - 'int_fast16_t': PRIM_INT_FAST16, - 'uint_fast16_t': PRIM_UINT_FAST16, - 'int_fast32_t': PRIM_INT_FAST32, - 'uint_fast32_t': PRIM_UINT_FAST32, - 'int_fast64_t': PRIM_INT_FAST64, - 'uint_fast64_t': PRIM_UINT_FAST64, - 'intmax_t': PRIM_INTMAX, - 'uintmax_t': PRIM_UINTMAX, - } - -F_UNION = 0x01 -F_CHECK_FIELDS = 0x02 -F_PACKED = 0x04 -F_EXTERNAL = 0x08 -F_OPAQUE = 0x10 - -G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) - for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', - 'F_EXTERNAL', 'F_OPAQUE']]) - -CLASS_NAME = {} -for _name, _value in list(globals().items()): - if _name.startswith('OP_') and isinstance(_value, int): - CLASS_NAME[_value] = _name[3:] diff --git a/venv/Lib/site-packages/cffi/commontypes.py b/venv/Lib/site-packages/cffi/commontypes.py deleted file mode 100644 index d4dae35..0000000 --- a/venv/Lib/site-packages/cffi/commontypes.py +++ /dev/null @@ -1,82 +0,0 @@ -import sys -from . import model -from .error import FFIError - - -COMMON_TYPES = {} - -try: - # fetch "bool" and all simple Windows types - from _cffi_backend import _get_common_types - _get_common_types(COMMON_TYPES) -except ImportError: - pass - -COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') -COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above -COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t' -COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t' - -for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - if _type.endswith('_t'): - COMMON_TYPES[_type] = _type -del _type - -_CACHE = {} - -def resolve_common_type(parser, commontype): - try: - return _CACHE[commontype] - except KeyError: - cdecl = COMMON_TYPES.get(commontype, commontype) - if not isinstance(cdecl, str): - result, quals = cdecl, 0 # cdecl is already a BaseType - elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - result, quals = model.PrimitiveType(cdecl), 0 - elif cdecl == 'set-unicode-needed': - raise FFIError("The Windows type %r is only available after " - "you call ffi.set_unicode()" % (commontype,)) - else: - if commontype == cdecl: - raise FFIError( - "Unsupported type: %r. Please look at " - "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " - "and file an issue if you think this type should really " - "be supported." % (commontype,)) - result, quals = parser.parse_type_and_quals(cdecl) # recursive - - assert isinstance(result, model.BaseTypeByIdentity) - _CACHE[commontype] = result, quals - return result, quals - - -# ____________________________________________________________ -# extra types for Windows (most of them are in commontypes.c) - - -def win_common_types(): - return { - "UNICODE_STRING": model.StructType( - "_UNICODE_STRING", - ["Length", - "MaximumLength", - "Buffer"], - [model.PrimitiveType("unsigned short"), - model.PrimitiveType("unsigned short"), - model.PointerType(model.PrimitiveType("wchar_t"))], - [-1, -1, -1]), - "PUNICODE_STRING": "UNICODE_STRING *", - "PCUNICODE_STRING": "const UNICODE_STRING *", - - "TBYTE": "set-unicode-needed", - "TCHAR": "set-unicode-needed", - "LPCTSTR": "set-unicode-needed", - "PCTSTR": "set-unicode-needed", - "LPTSTR": "set-unicode-needed", - "PTSTR": "set-unicode-needed", - "PTBYTE": "set-unicode-needed", - "PTCHAR": "set-unicode-needed", - } - -if sys.platform == 'win32': - COMMON_TYPES.update(win_common_types()) diff --git a/venv/Lib/site-packages/cffi/cparser.py b/venv/Lib/site-packages/cffi/cparser.py deleted file mode 100644 index dd590d8..0000000 --- a/venv/Lib/site-packages/cffi/cparser.py +++ /dev/null @@ -1,1015 +0,0 @@ -from . import model -from .commontypes import COMMON_TYPES, resolve_common_type -from .error import FFIError, CDefError -try: - from . import _pycparser as pycparser -except ImportError: - import pycparser -import weakref, re, sys - -try: - if sys.version_info < (3,): - import thread as _thread - else: - import _thread - lock = _thread.allocate_lock() -except ImportError: - lock = None - -def _workaround_for_static_import_finders(): - # Issue #392: packaging tools like cx_Freeze can not find these - # because pycparser uses exec dynamic import. This is an obscure - # workaround. This function is never called. - import pycparser.yacctab - import pycparser.lextab - -CDEF_SOURCE_STRING = "" -_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", - re.DOTALL | re.MULTILINE) -_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" - r"\b((?:[^\n\\]|\\.)*?)$", - re.DOTALL | re.MULTILINE) -_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) -_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") -_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") -_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") -_r_words = re.compile(r"\w+|\S") -_parser_cache = None -_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) -_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") -_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") -_r_cdecl = re.compile(r"\b__cdecl\b") -_r_extern_python = re.compile(r'\bextern\s*"' - r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') -_r_star_const_space = re.compile( # matches "* const " - r"[*]\s*((const|volatile|restrict)\b\s*)+") -_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" - r"\.\.\.") -_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") - -def _get_parser(): - global _parser_cache - if _parser_cache is None: - _parser_cache = pycparser.CParser() - return _parser_cache - -def _workaround_for_old_pycparser(csource): - # Workaround for a pycparser issue (fixed between pycparser 2.10 and - # 2.14): "char*const***" gives us a wrong syntax tree, the same as - # for "char***(*const)". This means we can't tell the difference - # afterwards. But "char(*const(***))" gives us the right syntax - # tree. The issue only occurs if there are several stars in - # sequence with no parenthesis in between, just possibly qualifiers. - # Attempt to fix it by adding some parentheses in the source: each - # time we see "* const" or "* const *", we add an opening - # parenthesis before each star---the hard part is figuring out where - # to close them. - parts = [] - while True: - match = _r_star_const_space.search(csource) - if not match: - break - #print repr(''.join(parts)+csource), '=>', - parts.append(csource[:match.start()]) - parts.append('('); closing = ')' - parts.append(match.group()) # e.g. "* const " - endpos = match.end() - if csource.startswith('*', endpos): - parts.append('('); closing += ')' - level = 0 - i = endpos - while i < len(csource): - c = csource[i] - if c == '(': - level += 1 - elif c == ')': - if level == 0: - break - level -= 1 - elif c in ',;=': - if level == 0: - break - i += 1 - csource = csource[endpos:i] + closing + csource[i:] - #print repr(''.join(parts)+csource) - parts.append(csource) - return ''.join(parts) - -def _preprocess_extern_python(csource): - # input: `extern "Python" int foo(int);` or - # `extern "Python" { int foo(int); }` - # output: - # void __cffi_extern_python_start; - # int foo(int); - # void __cffi_extern_python_stop; - # - # input: `extern "Python+C" int foo(int);` - # output: - # void __cffi_extern_python_plus_c_start; - # int foo(int); - # void __cffi_extern_python_stop; - parts = [] - while True: - match = _r_extern_python.search(csource) - if not match: - break - endpos = match.end() - 1 - #print - #print ''.join(parts)+csource - #print '=>' - parts.append(csource[:match.start()]) - if 'C' in match.group(1): - parts.append('void __cffi_extern_python_plus_c_start; ') - else: - parts.append('void __cffi_extern_python_start; ') - if csource[endpos] == '{': - # grouping variant - closing = csource.find('}', endpos) - if closing < 0: - raise CDefError("'extern \"Python\" {': no '}' found") - if csource.find('{', endpos + 1, closing) >= 0: - raise NotImplementedError("cannot use { } inside a block " - "'extern \"Python\" { ... }'") - parts.append(csource[endpos+1:closing]) - csource = csource[closing+1:] - else: - # non-grouping variant - semicolon = csource.find(';', endpos) - if semicolon < 0: - raise CDefError("'extern \"Python\": no ';' found") - parts.append(csource[endpos:semicolon+1]) - csource = csource[semicolon+1:] - parts.append(' void __cffi_extern_python_stop;') - #print ''.join(parts)+csource - #print - parts.append(csource) - return ''.join(parts) - -def _warn_for_string_literal(csource): - if '"' not in csource: - return - for line in csource.splitlines(): - if '"' in line and not line.lstrip().startswith('#'): - import warnings - warnings.warn("String literal found in cdef() or type source. " - "String literals are ignored here, but you should " - "remove them anyway because some character sequences " - "confuse pre-parsing.") - break - -def _warn_for_non_extern_non_static_global_variable(decl): - if not decl.storage: - import warnings - warnings.warn("Global variable '%s' in cdef(): for consistency " - "with C it should have a storage class specifier " - "(usually 'extern')" % (decl.name,)) - -def _remove_line_directives(csource): - # _r_line_directive matches whole lines, without the final \n, if they - # start with '#line' with some spacing allowed, or '#NUMBER'. This - # function stores them away and replaces them with exactly the string - # '#line@N', where N is the index in the list 'line_directives'. - line_directives = [] - def replace(m): - i = len(line_directives) - line_directives.append(m.group()) - return '#line@%d' % i - csource = _r_line_directive.sub(replace, csource) - return csource, line_directives - -def _put_back_line_directives(csource, line_directives): - def replace(m): - s = m.group() - if not s.startswith('#line@'): - raise AssertionError("unexpected #line directive " - "(should have been processed and removed") - return line_directives[int(s[6:])] - return _r_line_directive.sub(replace, csource) - -def _preprocess(csource): - # First, remove the lines of the form '#line N "filename"' because - # the "filename" part could confuse the rest - csource, line_directives = _remove_line_directives(csource) - # Remove comments. NOTE: this only work because the cdef() section - # should not contain any string literals (except in line directives)! - def replace_keeping_newlines(m): - return ' ' + m.group().count('\n') * '\n' - csource = _r_comment.sub(replace_keeping_newlines, csource) - # Remove the "#define FOO x" lines - macros = {} - for match in _r_define.finditer(csource): - macroname, macrovalue = match.groups() - macrovalue = macrovalue.replace('\\\n', '').strip() - macros[macroname] = macrovalue - csource = _r_define.sub('', csource) - # - if pycparser.__version__ < '2.14': - csource = _workaround_for_old_pycparser(csource) - # - # BIG HACK: replace WINAPI or __stdcall with "volatile const". - # It doesn't make sense for the return type of a function to be - # "volatile volatile const", so we abuse it to detect __stdcall... - # Hack number 2 is that "int(volatile *fptr)();" is not valid C - # syntax, so we place the "volatile" before the opening parenthesis. - csource = _r_stdcall2.sub(' volatile volatile const(', csource) - csource = _r_stdcall1.sub(' volatile volatile const ', csource) - csource = _r_cdecl.sub(' ', csource) - # - # Replace `extern "Python"` with start/end markers - csource = _preprocess_extern_python(csource) - # - # Now there should not be any string literal left; warn if we get one - _warn_for_string_literal(csource) - # - # Replace "[...]" with "[__dotdotdotarray__]" - csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) - # - # Replace "...}" with "__dotdotdotNUM__}". This construction should - # occur only at the end of enums; at the end of structs we have "...;}" - # and at the end of vararg functions "...);". Also replace "=...[,}]" - # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when - # giving an unknown value. - matches = list(_r_partial_enum.finditer(csource)) - for number, match in enumerate(reversed(matches)): - p = match.start() - if csource[p] == '=': - p2 = csource.find('...', p, match.end()) - assert p2 > p - csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, - csource[p2+3:]) - else: - assert csource[p:p+3] == '...' - csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, - csource[p+3:]) - # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" - csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) - # Replace "float ..." or "double..." with "__dotdotdotfloat__" - csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) - # Replace all remaining "..." with the same name, "__dotdotdot__", - # which is declared with a typedef for the purpose of C parsing. - csource = csource.replace('...', ' __dotdotdot__ ') - # Finally, put back the line directives - csource = _put_back_line_directives(csource, line_directives) - return csource, macros - -def _common_type_names(csource): - # Look in the source for what looks like usages of types from the - # list of common types. A "usage" is approximated here as the - # appearance of the word, minus a "definition" of the type, which - # is the last word in a "typedef" statement. Approximative only - # but should be fine for all the common types. - look_for_words = set(COMMON_TYPES) - look_for_words.add(';') - look_for_words.add(',') - look_for_words.add('(') - look_for_words.add(')') - look_for_words.add('typedef') - words_used = set() - is_typedef = False - paren = 0 - previous_word = '' - for word in _r_words.findall(csource): - if word in look_for_words: - if word == ';': - if is_typedef: - words_used.discard(previous_word) - look_for_words.discard(previous_word) - is_typedef = False - elif word == 'typedef': - is_typedef = True - paren = 0 - elif word == '(': - paren += 1 - elif word == ')': - paren -= 1 - elif word == ',': - if is_typedef and paren == 0: - words_used.discard(previous_word) - look_for_words.discard(previous_word) - else: # word in COMMON_TYPES - words_used.add(word) - previous_word = word - return words_used - - -class Parser(object): - - def __init__(self): - self._declarations = {} - self._included_declarations = set() - self._anonymous_counter = 0 - self._structnode2type = weakref.WeakKeyDictionary() - self._options = {} - self._int_constants = {} - self._recomplete = [] - self._uses_new_feature = None - - def _parse(self, csource): - csource, macros = _preprocess(csource) - # XXX: for more efficiency we would need to poke into the - # internals of CParser... the following registers the - # typedefs, because their presence or absence influences the - # parsing itself (but what they are typedef'ed to plays no role) - ctn = _common_type_names(csource) - typenames = [] - for name in sorted(self._declarations): - if name.startswith('typedef '): - name = name[8:] - typenames.append(name) - ctn.discard(name) - typenames += sorted(ctn) - # - csourcelines = [] - csourcelines.append('# 1 ""') - for typename in typenames: - csourcelines.append('typedef int %s;' % typename) - csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' - ' __dotdotdot__;') - # this forces pycparser to consider the following in the file - # called from line 1 - csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) - csourcelines.append(csource) - csourcelines.append('') # see test_missing_newline_bug - fullcsource = '\n'.join(csourcelines) - if lock is not None: - lock.acquire() # pycparser is not thread-safe... - try: - ast = _get_parser().parse(fullcsource) - except pycparser.c_parser.ParseError as e: - self.convert_pycparser_error(e, csource) - finally: - if lock is not None: - lock.release() - # csource will be used to find buggy source text - return ast, macros, csource - - def _convert_pycparser_error(self, e, csource): - # xxx look for ":NUM:" at the start of str(e) - # and interpret that as a line number. This will not work if - # the user gives explicit ``# NUM "FILE"`` directives. - line = None - msg = str(e) - match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) - if match: - linenum = int(match.group(1), 10) - csourcelines = csource.splitlines() - if 1 <= linenum <= len(csourcelines): - line = csourcelines[linenum-1] - return line - - def convert_pycparser_error(self, e, csource): - line = self._convert_pycparser_error(e, csource) - - msg = str(e) - if line: - msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) - else: - msg = 'parse error\n%s' % (msg,) - raise CDefError(msg) - - def parse(self, csource, override=False, packed=False, pack=None, - dllexport=False): - if packed: - if packed != True: - raise ValueError("'packed' should be False or True; use " - "'pack' to give another value") - if pack: - raise ValueError("cannot give both 'pack' and 'packed'") - pack = 1 - elif pack: - if pack & (pack - 1): - raise ValueError("'pack' must be a power of two, not %r" % - (pack,)) - else: - pack = 0 - prev_options = self._options - try: - self._options = {'override': override, - 'packed': pack, - 'dllexport': dllexport} - self._internal_parse(csource) - finally: - self._options = prev_options - - def _internal_parse(self, csource): - ast, macros, csource = self._parse(csource) - # add the macros - self._process_macros(macros) - # find the first "__dotdotdot__" and use that as a separator - # between the repeated typedefs and the real csource - iterator = iter(ast.ext) - for decl in iterator: - if decl.name == '__dotdotdot__': - break - else: - assert 0 - current_decl = None - # - try: - self._inside_extern_python = '__cffi_extern_python_stop' - for decl in iterator: - current_decl = decl - if isinstance(decl, pycparser.c_ast.Decl): - self._parse_decl(decl) - elif isinstance(decl, pycparser.c_ast.Typedef): - if not decl.name: - raise CDefError("typedef does not declare any name", - decl) - quals = 0 - if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and - decl.type.type.names[-1].startswith('__dotdotdot')): - realtype = self._get_unknown_type(decl) - elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and - isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and - isinstance(decl.type.type.type, - pycparser.c_ast.IdentifierType) and - decl.type.type.type.names[-1].startswith('__dotdotdot')): - realtype = self._get_unknown_ptr_type(decl) - else: - realtype, quals = self._get_type_and_quals( - decl.type, name=decl.name, partial_length_ok=True, - typedef_example="*(%s *)0" % (decl.name,)) - self._declare('typedef ' + decl.name, realtype, quals=quals) - elif decl.__class__.__name__ == 'Pragma': - # skip pragma, only in pycparser 2.15 - import warnings - warnings.warn( - "#pragma in cdef() are entirely ignored. " - "They should be removed for now, otherwise your " - "code might behave differently in a future version " - "of CFFI if #pragma support gets added. Note that " - "'#pragma pack' needs to be replaced with the " - "'packed' keyword argument to cdef().") - else: - raise CDefError("unexpected <%s>: this construct is valid " - "C but not valid in cdef()" % - decl.__class__.__name__, decl) - except CDefError as e: - if len(e.args) == 1: - e.args = e.args + (current_decl,) - raise - except FFIError as e: - msg = self._convert_pycparser_error(e, csource) - if msg: - e.args = (e.args[0] + "\n *** Err: %s" % msg,) - raise - - def _add_constants(self, key, val): - if key in self._int_constants: - if self._int_constants[key] == val: - return # ignore identical double declarations - raise FFIError( - "multiple declarations of constant: %s" % (key,)) - self._int_constants[key] = val - - def _add_integer_constant(self, name, int_str): - int_str = int_str.lower().rstrip("ul") - neg = int_str.startswith('-') - if neg: - int_str = int_str[1:] - # "010" is not valid oct in py3 - if (int_str.startswith("0") and int_str != '0' - and not int_str.startswith("0x")): - int_str = "0o" + int_str[1:] - pyvalue = int(int_str, 0) - if neg: - pyvalue = -pyvalue - self._add_constants(name, pyvalue) - self._declare('macro ' + name, pyvalue) - - def _process_macros(self, macros): - for key, value in macros.items(): - value = value.strip() - if _r_int_literal.match(value): - self._add_integer_constant(key, value) - elif value == '...': - self._declare('macro ' + key, value) - else: - raise CDefError( - 'only supports one of the following syntax:\n' - ' #define %s ... (literally dot-dot-dot)\n' - ' #define %s NUMBER (with NUMBER an integer' - ' constant, decimal/hex/octal)\n' - 'got:\n' - ' #define %s %s' - % (key, key, key, value)) - - def _declare_function(self, tp, quals, decl): - tp = self._get_type_pointer(tp, quals) - if self._options.get('dllexport'): - tag = 'dllexport_python ' - elif self._inside_extern_python == '__cffi_extern_python_start': - tag = 'extern_python ' - elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': - tag = 'extern_python_plus_c ' - else: - tag = 'function ' - self._declare(tag + decl.name, tp) - - def _parse_decl(self, decl): - node = decl.type - if isinstance(node, pycparser.c_ast.FuncDecl): - tp, quals = self._get_type_and_quals(node, name=decl.name) - assert isinstance(tp, model.RawFunctionType) - self._declare_function(tp, quals, decl) - else: - if isinstance(node, pycparser.c_ast.Struct): - self._get_struct_union_enum_type('struct', node) - elif isinstance(node, pycparser.c_ast.Union): - self._get_struct_union_enum_type('union', node) - elif isinstance(node, pycparser.c_ast.Enum): - self._get_struct_union_enum_type('enum', node) - elif not decl.name: - raise CDefError("construct does not declare any variable", - decl) - # - if decl.name: - tp, quals = self._get_type_and_quals(node, - partial_length_ok=True) - if tp.is_raw_function: - self._declare_function(tp, quals, decl) - elif (tp.is_integer_type() and - hasattr(decl, 'init') and - hasattr(decl.init, 'value') and - _r_int_literal.match(decl.init.value)): - self._add_integer_constant(decl.name, decl.init.value) - elif (tp.is_integer_type() and - isinstance(decl.init, pycparser.c_ast.UnaryOp) and - decl.init.op == '-' and - hasattr(decl.init.expr, 'value') and - _r_int_literal.match(decl.init.expr.value)): - self._add_integer_constant(decl.name, - '-' + decl.init.expr.value) - elif (tp is model.void_type and - decl.name.startswith('__cffi_extern_python_')): - # hack: `extern "Python"` in the C source is replaced - # with "void __cffi_extern_python_start;" and - # "void __cffi_extern_python_stop;" - self._inside_extern_python = decl.name - else: - if self._inside_extern_python !='__cffi_extern_python_stop': - raise CDefError( - "cannot declare constants or " - "variables with 'extern \"Python\"'") - if (quals & model.Q_CONST) and not tp.is_array_type: - self._declare('constant ' + decl.name, tp, quals=quals) - else: - _warn_for_non_extern_non_static_global_variable(decl) - self._declare('variable ' + decl.name, tp, quals=quals) - - def parse_type(self, cdecl): - return self.parse_type_and_quals(cdecl)[0] - - def parse_type_and_quals(self, cdecl): - ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] - assert not macros - exprnode = ast.ext[-1].type.args.params[0] - if isinstance(exprnode, pycparser.c_ast.ID): - raise CDefError("unknown identifier '%s'" % (exprnode.name,)) - return self._get_type_and_quals(exprnode.type) - - def _declare(self, name, obj, included=False, quals=0): - if name in self._declarations: - prevobj, prevquals = self._declarations[name] - if prevobj is obj and prevquals == quals: - return - if not self._options.get('override'): - raise FFIError( - "multiple declarations of %s (for interactive usage, " - "try cdef(xx, override=True))" % (name,)) - assert '__dotdotdot__' not in name.split() - self._declarations[name] = (obj, quals) - if included: - self._included_declarations.add(obj) - - def _extract_quals(self, type): - quals = 0 - if isinstance(type, (pycparser.c_ast.TypeDecl, - pycparser.c_ast.PtrDecl)): - if 'const' in type.quals: - quals |= model.Q_CONST - if 'volatile' in type.quals: - quals |= model.Q_VOLATILE - if 'restrict' in type.quals: - quals |= model.Q_RESTRICT - return quals - - def _get_type_pointer(self, type, quals, declname=None): - if isinstance(type, model.RawFunctionType): - return type.as_function_pointer() - if (isinstance(type, model.StructOrUnionOrEnum) and - type.name.startswith('$') and type.name[1:].isdigit() and - type.forcename is None and declname is not None): - return model.NamedPointerType(type, declname, quals) - return model.PointerType(type, quals) - - def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, - typedef_example=None): - # first, dereference typedefs, if we have it already parsed, we're good - if (isinstance(typenode, pycparser.c_ast.TypeDecl) and - isinstance(typenode.type, pycparser.c_ast.IdentifierType) and - len(typenode.type.names) == 1 and - ('typedef ' + typenode.type.names[0]) in self._declarations): - tp, quals = self._declarations['typedef ' + typenode.type.names[0]] - quals |= self._extract_quals(typenode) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.ArrayDecl): - # array type - if typenode.dim is None: - length = None - else: - length = self._parse_constant( - typenode.dim, partial_length_ok=partial_length_ok) - # a hack: in 'typedef int foo_t[...][...];', don't use '...' as - # the length but use directly the C expression that would be - # generated by recompiler.py. This lets the typedef be used in - # many more places within recompiler.py - if typedef_example is not None: - if length == '...': - length = '_cffi_array_len(%s)' % (typedef_example,) - typedef_example = "*" + typedef_example - # - tp, quals = self._get_type_and_quals(typenode.type, - partial_length_ok=partial_length_ok, - typedef_example=typedef_example) - return model.ArrayType(tp, length), quals - # - if isinstance(typenode, pycparser.c_ast.PtrDecl): - # pointer type - itemtype, itemquals = self._get_type_and_quals(typenode.type) - tp = self._get_type_pointer(itemtype, itemquals, declname=name) - quals = self._extract_quals(typenode) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.TypeDecl): - quals = self._extract_quals(typenode) - type = typenode.type - if isinstance(type, pycparser.c_ast.IdentifierType): - # assume a primitive type. get it from .names, but reduce - # synonyms to a single chosen combination - names = list(type.names) - if names != ['signed', 'char']: # keep this unmodified - prefixes = {} - while names: - name = names[0] - if name in ('short', 'long', 'signed', 'unsigned'): - prefixes[name] = prefixes.get(name, 0) + 1 - del names[0] - else: - break - # ignore the 'signed' prefix below, and reorder the others - newnames = [] - for prefix in ('unsigned', 'short', 'long'): - for i in range(prefixes.get(prefix, 0)): - newnames.append(prefix) - if not names: - names = ['int'] # implicitly - if names == ['int']: # but kill it if 'short' or 'long' - if 'short' in prefixes or 'long' in prefixes: - names = [] - names = newnames + names - ident = ' '.join(names) - if ident == 'void': - return model.void_type, quals - if ident == '__dotdotdot__': - raise FFIError(':%d: bad usage of "..."' % - typenode.coord.line) - tp0, quals0 = resolve_common_type(self, ident) - return tp0, (quals | quals0) - # - if isinstance(type, pycparser.c_ast.Struct): - # 'struct foobar' - tp = self._get_struct_union_enum_type('struct', type, name) - return tp, quals - # - if isinstance(type, pycparser.c_ast.Union): - # 'union foobar' - tp = self._get_struct_union_enum_type('union', type, name) - return tp, quals - # - if isinstance(type, pycparser.c_ast.Enum): - # 'enum foobar' - tp = self._get_struct_union_enum_type('enum', type, name) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.FuncDecl): - # a function type - return self._parse_function_type(typenode, name), 0 - # - # nested anonymous structs or unions end up here - if isinstance(typenode, pycparser.c_ast.Struct): - return self._get_struct_union_enum_type('struct', typenode, name, - nested=True), 0 - if isinstance(typenode, pycparser.c_ast.Union): - return self._get_struct_union_enum_type('union', typenode, name, - nested=True), 0 - # - raise FFIError(":%d: bad or unsupported type declaration" % - typenode.coord.line) - - def _parse_function_type(self, typenode, funcname=None): - params = list(getattr(typenode.args, 'params', [])) - for i, arg in enumerate(params): - if not hasattr(arg, 'type'): - raise CDefError("%s arg %d: unknown type '%s'" - " (if you meant to use the old C syntax of giving" - " untyped arguments, it is not supported)" - % (funcname or 'in expression', i + 1, - getattr(arg, 'name', '?'))) - ellipsis = ( - len(params) > 0 and - isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and - isinstance(params[-1].type.type, - pycparser.c_ast.IdentifierType) and - params[-1].type.type.names == ['__dotdotdot__']) - if ellipsis: - params.pop() - if not params: - raise CDefError( - "%s: a function with only '(...)' as argument" - " is not correct C" % (funcname or 'in expression')) - args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) - for argdeclnode in params] - if not ellipsis and args == [model.void_type]: - args = [] - result, quals = self._get_type_and_quals(typenode.type) - # the 'quals' on the result type are ignored. HACK: we absure them - # to detect __stdcall functions: we textually replace "__stdcall" - # with "volatile volatile const" above. - abi = None - if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway - if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: - abi = '__stdcall' - return model.RawFunctionType(tuple(args), result, ellipsis, abi) - - def _as_func_arg(self, type, quals): - if isinstance(type, model.ArrayType): - return model.PointerType(type.item, quals) - elif isinstance(type, model.RawFunctionType): - return type.as_function_pointer() - else: - return type - - def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): - # First, a level of caching on the exact 'type' node of the AST. - # This is obscure, but needed because pycparser "unrolls" declarations - # such as "typedef struct { } foo_t, *foo_p" and we end up with - # an AST that is not a tree, but a DAG, with the "type" node of the - # two branches foo_t and foo_p of the trees being the same node. - # It's a bit silly but detecting "DAG-ness" in the AST tree seems - # to be the only way to distinguish this case from two independent - # structs. See test_struct_with_two_usages. - try: - return self._structnode2type[type] - except KeyError: - pass - # - # Note that this must handle parsing "struct foo" any number of - # times and always return the same StructType object. Additionally, - # one of these times (not necessarily the first), the fields of - # the struct can be specified with "struct foo { ...fields... }". - # If no name is given, then we have to create a new anonymous struct - # with no caching; in this case, the fields are either specified - # right now or never. - # - force_name = name - name = type.name - # - # get the type or create it if needed - if name is None: - # 'force_name' is used to guess a more readable name for - # anonymous structs, for the common case "typedef struct { } foo". - if force_name is not None: - explicit_name = '$%s' % force_name - else: - self._anonymous_counter += 1 - explicit_name = '$%d' % self._anonymous_counter - tp = None - else: - explicit_name = name - key = '%s %s' % (kind, name) - tp, _ = self._declarations.get(key, (None, None)) - # - if tp is None: - if kind == 'struct': - tp = model.StructType(explicit_name, None, None, None) - elif kind == 'union': - tp = model.UnionType(explicit_name, None, None, None) - elif kind == 'enum': - if explicit_name == '__dotdotdot__': - raise CDefError("Enums cannot be declared with ...") - tp = self._build_enum_type(explicit_name, type.values) - else: - raise AssertionError("kind = %r" % (kind,)) - if name is not None: - self._declare(key, tp) - else: - if kind == 'enum' and type.values is not None: - raise NotImplementedError( - "enum %s: the '{}' declaration should appear on the first " - "time the enum is mentioned, not later" % explicit_name) - if not tp.forcename: - tp.force_the_name(force_name) - if tp.forcename and '$' in tp.name: - self._declare('anonymous %s' % tp.forcename, tp) - # - self._structnode2type[type] = tp - # - # enums: done here - if kind == 'enum': - return tp - # - # is there a 'type.decls'? If yes, then this is the place in the - # C sources that declare the fields. If no, then just return the - # existing type, possibly still incomplete. - if type.decls is None: - return tp - # - if tp.fldnames is not None: - raise CDefError("duplicate declaration of struct %s" % name) - fldnames = [] - fldtypes = [] - fldbitsize = [] - fldquals = [] - for decl in type.decls: - if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and - ''.join(decl.type.names) == '__dotdotdot__'): - # XXX pycparser is inconsistent: 'names' should be a list - # of strings, but is sometimes just one string. Use - # str.join() as a way to cope with both. - self._make_partial(tp, nested) - continue - if decl.bitsize is None: - bitsize = -1 - else: - bitsize = self._parse_constant(decl.bitsize) - self._partial_length = False - type, fqual = self._get_type_and_quals(decl.type, - partial_length_ok=True) - if self._partial_length: - self._make_partial(tp, nested) - if isinstance(type, model.StructType) and type.partial: - self._make_partial(tp, nested) - fldnames.append(decl.name or '') - fldtypes.append(type) - fldbitsize.append(bitsize) - fldquals.append(fqual) - tp.fldnames = tuple(fldnames) - tp.fldtypes = tuple(fldtypes) - tp.fldbitsize = tuple(fldbitsize) - tp.fldquals = tuple(fldquals) - if fldbitsize != [-1] * len(fldbitsize): - if isinstance(tp, model.StructType) and tp.partial: - raise NotImplementedError("%s: using both bitfields and '...;'" - % (tp,)) - tp.packed = self._options.get('packed') - if tp.completed: # must be re-completed: it is not opaque any more - tp.completed = 0 - self._recomplete.append(tp) - return tp - - def _make_partial(self, tp, nested): - if not isinstance(tp, model.StructOrUnion): - raise CDefError("%s cannot be partial" % (tp,)) - if not tp.has_c_name() and not nested: - raise NotImplementedError("%s is partial but has no C name" %(tp,)) - tp.partial = True - - def _parse_constant(self, exprnode, partial_length_ok=False): - # for now, limited to expressions that are an immediate number - # or positive/negative number - if isinstance(exprnode, pycparser.c_ast.Constant): - s = exprnode.value - if '0' <= s[0] <= '9': - s = s.rstrip('uUlL') - try: - if s.startswith('0'): - return int(s, 8) - else: - return int(s, 10) - except ValueError: - if len(s) > 1: - if s.lower()[0:2] == '0x': - return int(s, 16) - elif s.lower()[0:2] == '0b': - return int(s, 2) - raise CDefError("invalid constant %r" % (s,)) - elif s[0] == "'" and s[-1] == "'" and ( - len(s) == 3 or (len(s) == 4 and s[1] == "\\")): - return ord(s[-2]) - else: - raise CDefError("invalid constant %r" % (s,)) - # - if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and - exprnode.op == '+'): - return self._parse_constant(exprnode.expr) - # - if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and - exprnode.op == '-'): - return -self._parse_constant(exprnode.expr) - # load previously defined int constant - if (isinstance(exprnode, pycparser.c_ast.ID) and - exprnode.name in self._int_constants): - return self._int_constants[exprnode.name] - # - if (isinstance(exprnode, pycparser.c_ast.ID) and - exprnode.name == '__dotdotdotarray__'): - if partial_length_ok: - self._partial_length = True - return '...' - raise FFIError(":%d: unsupported '[...]' here, cannot derive " - "the actual array length in this context" - % exprnode.coord.line) - # - if isinstance(exprnode, pycparser.c_ast.BinaryOp): - left = self._parse_constant(exprnode.left) - right = self._parse_constant(exprnode.right) - if exprnode.op == '+': - return left + right - elif exprnode.op == '-': - return left - right - elif exprnode.op == '*': - return left * right - elif exprnode.op == '/': - return self._c_div(left, right) - elif exprnode.op == '%': - return left - self._c_div(left, right) * right - elif exprnode.op == '<<': - return left << right - elif exprnode.op == '>>': - return left >> right - elif exprnode.op == '&': - return left & right - elif exprnode.op == '|': - return left | right - elif exprnode.op == '^': - return left ^ right - # - raise FFIError(":%d: unsupported expression: expected a " - "simple numeric constant" % exprnode.coord.line) - - def _c_div(self, a, b): - result = a // b - if ((a < 0) ^ (b < 0)) and (a % b) != 0: - result += 1 - return result - - def _build_enum_type(self, explicit_name, decls): - if decls is not None: - partial = False - enumerators = [] - enumvalues = [] - nextenumvalue = 0 - for enum in decls.enumerators: - if _r_enum_dotdotdot.match(enum.name): - partial = True - continue - if enum.value is not None: - nextenumvalue = self._parse_constant(enum.value) - enumerators.append(enum.name) - enumvalues.append(nextenumvalue) - self._add_constants(enum.name, nextenumvalue) - nextenumvalue += 1 - enumerators = tuple(enumerators) - enumvalues = tuple(enumvalues) - tp = model.EnumType(explicit_name, enumerators, enumvalues) - tp.partial = partial - else: # opaque enum - tp = model.EnumType(explicit_name, (), ()) - return tp - - def include(self, other): - for name, (tp, quals) in other._declarations.items(): - if name.startswith('anonymous $enum_$'): - continue # fix for test_anonymous_enum_include - kind = name.split(' ', 1)[0] - if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): - self._declare(name, tp, included=True, quals=quals) - for k, v in other._int_constants.items(): - self._add_constants(k, v) - - def _get_unknown_type(self, decl): - typenames = decl.type.type.names - if typenames == ['__dotdotdot__']: - return model.unknown_type(decl.name) - - if typenames == ['__dotdotdotint__']: - if self._uses_new_feature is None: - self._uses_new_feature = "'typedef int... %s'" % decl.name - return model.UnknownIntegerType(decl.name) - - if typenames == ['__dotdotdotfloat__']: - # note: not for 'long double' so far - if self._uses_new_feature is None: - self._uses_new_feature = "'typedef float... %s'" % decl.name - return model.UnknownFloatType(decl.name) - - raise FFIError(':%d: unsupported usage of "..." in typedef' - % decl.coord.line) - - def _get_unknown_ptr_type(self, decl): - if decl.type.type.type.names == ['__dotdotdot__']: - return model.unknown_ptr_type(decl.name) - raise FFIError(':%d: unsupported usage of "..." in typedef' - % decl.coord.line) diff --git a/venv/Lib/site-packages/cffi/error.py b/venv/Lib/site-packages/cffi/error.py deleted file mode 100644 index 0a27247..0000000 --- a/venv/Lib/site-packages/cffi/error.py +++ /dev/null @@ -1,31 +0,0 @@ - -class FFIError(Exception): - __module__ = 'cffi' - -class CDefError(Exception): - __module__ = 'cffi' - def __str__(self): - try: - current_decl = self.args[1] - filename = current_decl.coord.file - linenum = current_decl.coord.line - prefix = '%s:%d: ' % (filename, linenum) - except (AttributeError, TypeError, IndexError): - prefix = '' - return '%s%s' % (prefix, self.args[0]) - -class VerificationError(Exception): - """ An error raised when verification fails - """ - __module__ = 'cffi' - -class VerificationMissing(Exception): - """ An error raised when incomplete structures are passed into - cdef, but no verification has been done - """ - __module__ = 'cffi' - -class PkgConfigError(Exception): - """ An error raised for missing modules in pkg-config - """ - __module__ = 'cffi' diff --git a/venv/Lib/site-packages/cffi/ffiplatform.py b/venv/Lib/site-packages/cffi/ffiplatform.py deleted file mode 100644 index adca28f..0000000 --- a/venv/Lib/site-packages/cffi/ffiplatform.py +++ /dev/null @@ -1,113 +0,0 @@ -import sys, os -from .error import VerificationError - - -LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', - 'extra_objects', 'depends'] - -def get_extension(srcfilename, modname, sources=(), **kwds): - from cffi._shimmed_dist_utils import Extension - allsources = [srcfilename] - for src in sources: - allsources.append(os.path.normpath(src)) - return Extension(name=modname, sources=allsources, **kwds) - -def compile(tmpdir, ext, compiler_verbose=0, debug=None): - """Compile a C extension module using distutils.""" - - saved_environ = os.environ.copy() - try: - outputfilename = _build(tmpdir, ext, compiler_verbose, debug) - outputfilename = os.path.abspath(outputfilename) - finally: - # workaround for a distutils bugs where some env vars can - # become longer and longer every time it is used - for key, value in saved_environ.items(): - if os.environ.get(key) != value: - os.environ[key] = value - return outputfilename - -def _build(tmpdir, ext, compiler_verbose=0, debug=None): - # XXX compact but horrible :-( - from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity - - dist = Distribution({'ext_modules': [ext]}) - dist.parse_config_files() - options = dist.get_option_dict('build_ext') - if debug is None: - debug = sys.flags.debug - options['debug'] = ('ffiplatform', debug) - options['force'] = ('ffiplatform', True) - options['build_lib'] = ('ffiplatform', tmpdir) - options['build_temp'] = ('ffiplatform', tmpdir) - # - try: - old_level = set_threshold(0) or 0 - try: - set_verbosity(compiler_verbose) - dist.run_command('build_ext') - cmd_obj = dist.get_command_obj('build_ext') - [soname] = cmd_obj.get_outputs() - finally: - set_threshold(old_level) - except (CompileError, LinkError) as e: - raise VerificationError('%s: %s' % (e.__class__.__name__, e)) - # - return soname - -try: - from os.path import samefile -except ImportError: - def samefile(f1, f2): - return os.path.abspath(f1) == os.path.abspath(f2) - -def maybe_relative_path(path): - if not os.path.isabs(path): - return path # already relative - dir = path - names = [] - while True: - prevdir = dir - dir, name = os.path.split(prevdir) - if dir == prevdir or not dir: - return path # failed to make it relative - names.append(name) - try: - if samefile(dir, os.curdir): - names.reverse() - return os.path.join(*names) - except OSError: - pass - -# ____________________________________________________________ - -try: - int_or_long = (int, long) - import cStringIO -except NameError: - int_or_long = int # Python 3 - import io as cStringIO - -def _flatten(x, f): - if isinstance(x, str): - f.write('%ds%s' % (len(x), x)) - elif isinstance(x, dict): - keys = sorted(x.keys()) - f.write('%dd' % len(keys)) - for key in keys: - _flatten(key, f) - _flatten(x[key], f) - elif isinstance(x, (list, tuple)): - f.write('%dl' % len(x)) - for value in x: - _flatten(value, f) - elif isinstance(x, int_or_long): - f.write('%di' % (x,)) - else: - raise TypeError( - "the keywords to verify() contains unsupported object %r" % (x,)) - -def flatten(x): - f = cStringIO.StringIO() - _flatten(x, f) - return f.getvalue() diff --git a/venv/Lib/site-packages/cffi/lock.py b/venv/Lib/site-packages/cffi/lock.py deleted file mode 100644 index db91b71..0000000 --- a/venv/Lib/site-packages/cffi/lock.py +++ /dev/null @@ -1,30 +0,0 @@ -import sys - -if sys.version_info < (3,): - try: - from thread import allocate_lock - except ImportError: - from dummy_thread import allocate_lock -else: - try: - from _thread import allocate_lock - except ImportError: - from _dummy_thread import allocate_lock - - -##import sys -##l1 = allocate_lock - -##class allocate_lock(object): -## def __init__(self): -## self._real = l1() -## def __enter__(self): -## for i in range(4, 0, -1): -## print sys._getframe(i).f_code -## print -## return self._real.__enter__() -## def __exit__(self, *args): -## return self._real.__exit__(*args) -## def acquire(self, f): -## assert f is False -## return self._real.acquire(f) diff --git a/venv/Lib/site-packages/cffi/model.py b/venv/Lib/site-packages/cffi/model.py deleted file mode 100644 index e5f4cae..0000000 --- a/venv/Lib/site-packages/cffi/model.py +++ /dev/null @@ -1,618 +0,0 @@ -import types -import weakref - -from .lock import allocate_lock -from .error import CDefError, VerificationError, VerificationMissing - -# type qualifiers -Q_CONST = 0x01 -Q_RESTRICT = 0x02 -Q_VOLATILE = 0x04 - -def qualify(quals, replace_with): - if quals & Q_CONST: - replace_with = ' const ' + replace_with.lstrip() - if quals & Q_VOLATILE: - replace_with = ' volatile ' + replace_with.lstrip() - if quals & Q_RESTRICT: - # It seems that __restrict is supported by gcc and msvc. - # If you hit some different compiler, add a #define in - # _cffi_include.h for it (and in its copies, documented there) - replace_with = ' __restrict ' + replace_with.lstrip() - return replace_with - - -class BaseTypeByIdentity(object): - is_array_type = False - is_raw_function = False - - def get_c_name(self, replace_with='', context='a C file', quals=0): - result = self.c_name_with_marker - assert result.count('&') == 1 - # some logic duplication with ffi.getctype()... :-( - replace_with = replace_with.strip() - if replace_with: - if replace_with.startswith('*') and '&[' in result: - replace_with = '(%s)' % replace_with - elif not replace_with[0] in '[(': - replace_with = ' ' + replace_with - replace_with = qualify(quals, replace_with) - result = result.replace('&', replace_with) - if '$' in result: - raise VerificationError( - "cannot generate '%s' in %s: unknown type name" - % (self._get_c_name(), context)) - return result - - def _get_c_name(self): - return self.c_name_with_marker.replace('&', '') - - def has_c_name(self): - return '$' not in self._get_c_name() - - def is_integer_type(self): - return False - - def get_cached_btype(self, ffi, finishlist, can_delay=False): - try: - BType = ffi._cached_btypes[self] - except KeyError: - BType = self.build_backend_type(ffi, finishlist) - BType2 = ffi._cached_btypes.setdefault(self, BType) - assert BType2 is BType - return BType - - def __repr__(self): - return '<%s>' % (self._get_c_name(),) - - def _get_items(self): - return [(name, getattr(self, name)) for name in self._attrs_] - - -class BaseType(BaseTypeByIdentity): - - def __eq__(self, other): - return (self.__class__ == other.__class__ and - self._get_items() == other._get_items()) - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.__class__, tuple(self._get_items()))) - - -class VoidType(BaseType): - _attrs_ = () - - def __init__(self): - self.c_name_with_marker = 'void&' - - def build_backend_type(self, ffi, finishlist): - return global_cache(self, ffi, 'new_void_type') - -void_type = VoidType() - - -class BasePrimitiveType(BaseType): - def is_complex_type(self): - return False - - -class PrimitiveType(BasePrimitiveType): - _attrs_ = ('name',) - - ALL_PRIMITIVE_TYPES = { - 'char': 'c', - 'short': 'i', - 'int': 'i', - 'long': 'i', - 'long long': 'i', - 'signed char': 'i', - 'unsigned char': 'i', - 'unsigned short': 'i', - 'unsigned int': 'i', - 'unsigned long': 'i', - 'unsigned long long': 'i', - 'float': 'f', - 'double': 'f', - 'long double': 'f', - '_cffi_float_complex_t': 'j', - '_cffi_double_complex_t': 'j', - '_Bool': 'i', - # the following types are not primitive in the C sense - 'wchar_t': 'c', - 'char16_t': 'c', - 'char32_t': 'c', - 'int8_t': 'i', - 'uint8_t': 'i', - 'int16_t': 'i', - 'uint16_t': 'i', - 'int32_t': 'i', - 'uint32_t': 'i', - 'int64_t': 'i', - 'uint64_t': 'i', - 'int_least8_t': 'i', - 'uint_least8_t': 'i', - 'int_least16_t': 'i', - 'uint_least16_t': 'i', - 'int_least32_t': 'i', - 'uint_least32_t': 'i', - 'int_least64_t': 'i', - 'uint_least64_t': 'i', - 'int_fast8_t': 'i', - 'uint_fast8_t': 'i', - 'int_fast16_t': 'i', - 'uint_fast16_t': 'i', - 'int_fast32_t': 'i', - 'uint_fast32_t': 'i', - 'int_fast64_t': 'i', - 'uint_fast64_t': 'i', - 'intptr_t': 'i', - 'uintptr_t': 'i', - 'intmax_t': 'i', - 'uintmax_t': 'i', - 'ptrdiff_t': 'i', - 'size_t': 'i', - 'ssize_t': 'i', - } - - def __init__(self, name): - assert name in self.ALL_PRIMITIVE_TYPES - self.name = name - self.c_name_with_marker = name + '&' - - def is_char_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' - def is_integer_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' - def is_float_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' - def is_complex_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' - - def build_backend_type(self, ffi, finishlist): - return global_cache(self, ffi, 'new_primitive_type', self.name) - - -class UnknownIntegerType(BasePrimitiveType): - _attrs_ = ('name',) - - def __init__(self, name): - self.name = name - self.c_name_with_marker = name + '&' - - def is_integer_type(self): - return True - - def build_backend_type(self, ffi, finishlist): - raise NotImplementedError("integer type '%s' can only be used after " - "compilation" % self.name) - -class UnknownFloatType(BasePrimitiveType): - _attrs_ = ('name', ) - - def __init__(self, name): - self.name = name - self.c_name_with_marker = name + '&' - - def build_backend_type(self, ffi, finishlist): - raise NotImplementedError("float type '%s' can only be used after " - "compilation" % self.name) - - -class BaseFunctionType(BaseType): - _attrs_ = ('args', 'result', 'ellipsis', 'abi') - - def __init__(self, args, result, ellipsis, abi=None): - self.args = args - self.result = result - self.ellipsis = ellipsis - self.abi = abi - # - reprargs = [arg._get_c_name() for arg in self.args] - if self.ellipsis: - reprargs.append('...') - reprargs = reprargs or ['void'] - replace_with = self._base_pattern % (', '.join(reprargs),) - if abi is not None: - replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] - self.c_name_with_marker = ( - self.result.c_name_with_marker.replace('&', replace_with)) - - -class RawFunctionType(BaseFunctionType): - # Corresponds to a C type like 'int(int)', which is the C type of - # a function, but not a pointer-to-function. The backend has no - # notion of such a type; it's used temporarily by parsing. - _base_pattern = '(&)(%s)' - is_raw_function = True - - def build_backend_type(self, ffi, finishlist): - raise CDefError("cannot render the type %r: it is a function " - "type, not a pointer-to-function type" % (self,)) - - def as_function_pointer(self): - return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) - - -class FunctionPtrType(BaseFunctionType): - _base_pattern = '(*&)(%s)' - - def build_backend_type(self, ffi, finishlist): - result = self.result.get_cached_btype(ffi, finishlist) - args = [] - for tp in self.args: - args.append(tp.get_cached_btype(ffi, finishlist)) - abi_args = () - if self.abi == "__stdcall": - if not self.ellipsis: # __stdcall ignored for variadic funcs - try: - abi_args = (ffi._backend.FFI_STDCALL,) - except AttributeError: - pass - return global_cache(self, ffi, 'new_function_type', - tuple(args), result, self.ellipsis, *abi_args) - - def as_raw_function(self): - return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) - - -class PointerType(BaseType): - _attrs_ = ('totype', 'quals') - - def __init__(self, totype, quals=0): - self.totype = totype - self.quals = quals - extra = " *&" - if totype.is_array_type: - extra = "(%s)" % (extra.lstrip(),) - extra = qualify(quals, extra) - self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) - - def build_backend_type(self, ffi, finishlist): - BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) - return global_cache(self, ffi, 'new_pointer_type', BItem) - -voidp_type = PointerType(void_type) - -def ConstPointerType(totype): - return PointerType(totype, Q_CONST) - -const_voidp_type = ConstPointerType(void_type) - - -class NamedPointerType(PointerType): - _attrs_ = ('totype', 'name') - - def __init__(self, totype, name, quals=0): - PointerType.__init__(self, totype, quals) - self.name = name - self.c_name_with_marker = name + '&' - - -class ArrayType(BaseType): - _attrs_ = ('item', 'length') - is_array_type = True - - def __init__(self, item, length): - self.item = item - self.length = length - # - if length is None: - brackets = '&[]' - elif length == '...': - brackets = '&[/*...*/]' - else: - brackets = '&[%s]' % length - self.c_name_with_marker = ( - self.item.c_name_with_marker.replace('&', brackets)) - - def length_is_unknown(self): - return isinstance(self.length, str) - - def resolve_length(self, newlength): - return ArrayType(self.item, newlength) - - def build_backend_type(self, ffi, finishlist): - if self.length_is_unknown(): - raise CDefError("cannot render the type %r: unknown length" % - (self,)) - self.item.get_cached_btype(ffi, finishlist) # force the item BType - BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) - return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) - -char_array_type = ArrayType(PrimitiveType('char'), None) - - -class StructOrUnionOrEnum(BaseTypeByIdentity): - _attrs_ = ('name',) - forcename = None - - def build_c_name_with_marker(self): - name = self.forcename or '%s %s' % (self.kind, self.name) - self.c_name_with_marker = name + '&' - - def force_the_name(self, forcename): - self.forcename = forcename - self.build_c_name_with_marker() - - def get_official_name(self): - assert self.c_name_with_marker.endswith('&') - return self.c_name_with_marker[:-1] - - -class StructOrUnion(StructOrUnionOrEnum): - fixedlayout = None - completed = 0 - partial = False - packed = 0 - - def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): - self.name = name - self.fldnames = fldnames - self.fldtypes = fldtypes - self.fldbitsize = fldbitsize - self.fldquals = fldquals - self.build_c_name_with_marker() - - def anonymous_struct_fields(self): - if self.fldtypes is not None: - for name, type in zip(self.fldnames, self.fldtypes): - if name == '' and isinstance(type, StructOrUnion): - yield type - - def enumfields(self, expand_anonymous_struct_union=True): - fldquals = self.fldquals - if fldquals is None: - fldquals = (0,) * len(self.fldnames) - for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, - self.fldbitsize, fldquals): - if (name == '' and isinstance(type, StructOrUnion) - and expand_anonymous_struct_union): - # nested anonymous struct/union - for result in type.enumfields(): - yield result - else: - yield (name, type, bitsize, quals) - - def force_flatten(self): - # force the struct or union to have a declaration that lists - # directly all fields returned by enumfields(), flattening - # nested anonymous structs/unions. - names = [] - types = [] - bitsizes = [] - fldquals = [] - for name, type, bitsize, quals in self.enumfields(): - names.append(name) - types.append(type) - bitsizes.append(bitsize) - fldquals.append(quals) - self.fldnames = tuple(names) - self.fldtypes = tuple(types) - self.fldbitsize = tuple(bitsizes) - self.fldquals = tuple(fldquals) - - def get_cached_btype(self, ffi, finishlist, can_delay=False): - BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, - can_delay) - if not can_delay: - self.finish_backend_type(ffi, finishlist) - return BType - - def finish_backend_type(self, ffi, finishlist): - if self.completed: - if self.completed != 2: - raise NotImplementedError("recursive structure declaration " - "for '%s'" % (self.name,)) - return - BType = ffi._cached_btypes[self] - # - self.completed = 1 - # - if self.fldtypes is None: - pass # not completing it: it's an opaque struct - # - elif self.fixedlayout is None: - fldtypes = [tp.get_cached_btype(ffi, finishlist) - for tp in self.fldtypes] - lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) - extra_flags = () - if self.packed: - if self.packed == 1: - extra_flags = (8,) # SF_PACKED - else: - extra_flags = (0, self.packed) - ffi._backend.complete_struct_or_union(BType, lst, self, - -1, -1, *extra_flags) - # - else: - fldtypes = [] - fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout - for i in range(len(self.fldnames)): - fsize = fieldsize[i] - ftype = self.fldtypes[i] - # - if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): - # fix the length to match the total size - BItemType = ftype.item.get_cached_btype(ffi, finishlist) - nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) - if nrest != 0: - self._verification_error( - "field '%s.%s' has a bogus size?" % ( - self.name, self.fldnames[i] or '{}')) - ftype = ftype.resolve_length(nlen) - self.fldtypes = (self.fldtypes[:i] + (ftype,) + - self.fldtypes[i+1:]) - # - BFieldType = ftype.get_cached_btype(ffi, finishlist) - if isinstance(ftype, ArrayType) and ftype.length is None: - assert fsize == 0 - else: - bitemsize = ffi.sizeof(BFieldType) - if bitemsize != fsize: - self._verification_error( - "field '%s.%s' is declared as %d bytes, but is " - "really %d bytes" % (self.name, - self.fldnames[i] or '{}', - bitemsize, fsize)) - fldtypes.append(BFieldType) - # - lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) - ffi._backend.complete_struct_or_union(BType, lst, self, - totalsize, totalalignment) - self.completed = 2 - - def _verification_error(self, msg): - raise VerificationError(msg) - - def check_not_partial(self): - if self.partial and self.fixedlayout is None: - raise VerificationMissing(self._get_c_name()) - - def build_backend_type(self, ffi, finishlist): - self.check_not_partial() - finishlist.append(self) - # - return global_cache(self, ffi, 'new_%s_type' % self.kind, - self.get_official_name(), key=self) - - -class StructType(StructOrUnion): - kind = 'struct' - - -class UnionType(StructOrUnion): - kind = 'union' - - -class EnumType(StructOrUnionOrEnum): - kind = 'enum' - partial = False - partial_resolved = False - - def __init__(self, name, enumerators, enumvalues, baseinttype=None): - self.name = name - self.enumerators = enumerators - self.enumvalues = enumvalues - self.baseinttype = baseinttype - self.build_c_name_with_marker() - - def force_the_name(self, forcename): - StructOrUnionOrEnum.force_the_name(self, forcename) - if self.forcename is None: - name = self.get_official_name() - self.forcename = '$' + name.replace(' ', '_') - - def check_not_partial(self): - if self.partial and not self.partial_resolved: - raise VerificationMissing(self._get_c_name()) - - def build_backend_type(self, ffi, finishlist): - self.check_not_partial() - base_btype = self.build_baseinttype(ffi, finishlist) - return global_cache(self, ffi, 'new_enum_type', - self.get_official_name(), - self.enumerators, self.enumvalues, - base_btype, key=self) - - def build_baseinttype(self, ffi, finishlist): - if self.baseinttype is not None: - return self.baseinttype.get_cached_btype(ffi, finishlist) - # - if self.enumvalues: - smallest_value = min(self.enumvalues) - largest_value = max(self.enumvalues) - else: - import warnings - try: - # XXX! The goal is to ensure that the warnings.warn() - # will not suppress the warning. We want to get it - # several times if we reach this point several times. - __warningregistry__.clear() - except NameError: - pass - warnings.warn("%r has no values explicitly defined; " - "guessing that it is equivalent to 'unsigned int'" - % self._get_c_name()) - smallest_value = largest_value = 0 - if smallest_value < 0: # needs a signed type - sign = 1 - candidate1 = PrimitiveType("int") - candidate2 = PrimitiveType("long") - else: - sign = 0 - candidate1 = PrimitiveType("unsigned int") - candidate2 = PrimitiveType("unsigned long") - btype1 = candidate1.get_cached_btype(ffi, finishlist) - btype2 = candidate2.get_cached_btype(ffi, finishlist) - size1 = ffi.sizeof(btype1) - size2 = ffi.sizeof(btype2) - if (smallest_value >= ((-1) << (8*size1-1)) and - largest_value < (1 << (8*size1-sign))): - return btype1 - if (smallest_value >= ((-1) << (8*size2-1)) and - largest_value < (1 << (8*size2-sign))): - return btype2 - raise CDefError("%s values don't all fit into either 'long' " - "or 'unsigned long'" % self._get_c_name()) - -def unknown_type(name, structname=None): - if structname is None: - structname = '$%s' % name - tp = StructType(structname, None, None, None) - tp.force_the_name(name) - tp.origin = "unknown_type" - return tp - -def unknown_ptr_type(name, structname=None): - if structname is None: - structname = '$$%s' % name - tp = StructType(structname, None, None, None) - return NamedPointerType(tp, name) - - -global_lock = allocate_lock() -_typecache_cffi_backend = weakref.WeakValueDictionary() - -def get_typecache(backend): - # returns _typecache_cffi_backend if backend is the _cffi_backend - # module, or type(backend).__typecache if backend is an instance of - # CTypesBackend (or some FakeBackend class during tests) - if isinstance(backend, types.ModuleType): - return _typecache_cffi_backend - with global_lock: - if not hasattr(type(backend), '__typecache'): - type(backend).__typecache = weakref.WeakValueDictionary() - return type(backend).__typecache - -def global_cache(srctype, ffi, funcname, *args, **kwds): - key = kwds.pop('key', (funcname, args)) - assert not kwds - try: - return ffi._typecache[key] - except KeyError: - pass - try: - res = getattr(ffi._backend, funcname)(*args) - except NotImplementedError as e: - raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) - # note that setdefault() on WeakValueDictionary is not atomic - # and contains a rare bug (http://bugs.python.org/issue19542); - # we have to use a lock and do it ourselves - cache = ffi._typecache - with global_lock: - res1 = cache.get(key) - if res1 is None: - cache[key] = res - return res - else: - return res1 - -def pointer_cache(ffi, BType): - return global_cache('?', ffi, 'new_pointer_type', BType) - -def attach_exception_info(e, name): - if e.args and type(e.args[0]) is str: - e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/venv/Lib/site-packages/cffi/parse_c_type.h b/venv/Lib/site-packages/cffi/parse_c_type.h deleted file mode 100644 index 84e4ef8..0000000 --- a/venv/Lib/site-packages/cffi/parse_c_type.h +++ /dev/null @@ -1,181 +0,0 @@ - -/* This part is from file 'cffi/parse_c_type.h'. It is copied at the - beginning of C sources generated by CFFI's ffi.set_source(). */ - -typedef void *_cffi_opcode_t; - -#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) -#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) -#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) - -#define _CFFI_OP_PRIMITIVE 1 -#define _CFFI_OP_POINTER 3 -#define _CFFI_OP_ARRAY 5 -#define _CFFI_OP_OPEN_ARRAY 7 -#define _CFFI_OP_STRUCT_UNION 9 -#define _CFFI_OP_ENUM 11 -#define _CFFI_OP_FUNCTION 13 -#define _CFFI_OP_FUNCTION_END 15 -#define _CFFI_OP_NOOP 17 -#define _CFFI_OP_BITFIELD 19 -#define _CFFI_OP_TYPENAME 21 -#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs -#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs -#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) -#define _CFFI_OP_CONSTANT 29 -#define _CFFI_OP_CONSTANT_INT 31 -#define _CFFI_OP_GLOBAL_VAR 33 -#define _CFFI_OP_DLOPEN_FUNC 35 -#define _CFFI_OP_DLOPEN_CONST 37 -#define _CFFI_OP_GLOBAL_VAR_F 39 -#define _CFFI_OP_EXTERN_PYTHON 41 - -#define _CFFI_PRIM_VOID 0 -#define _CFFI_PRIM_BOOL 1 -#define _CFFI_PRIM_CHAR 2 -#define _CFFI_PRIM_SCHAR 3 -#define _CFFI_PRIM_UCHAR 4 -#define _CFFI_PRIM_SHORT 5 -#define _CFFI_PRIM_USHORT 6 -#define _CFFI_PRIM_INT 7 -#define _CFFI_PRIM_UINT 8 -#define _CFFI_PRIM_LONG 9 -#define _CFFI_PRIM_ULONG 10 -#define _CFFI_PRIM_LONGLONG 11 -#define _CFFI_PRIM_ULONGLONG 12 -#define _CFFI_PRIM_FLOAT 13 -#define _CFFI_PRIM_DOUBLE 14 -#define _CFFI_PRIM_LONGDOUBLE 15 - -#define _CFFI_PRIM_WCHAR 16 -#define _CFFI_PRIM_INT8 17 -#define _CFFI_PRIM_UINT8 18 -#define _CFFI_PRIM_INT16 19 -#define _CFFI_PRIM_UINT16 20 -#define _CFFI_PRIM_INT32 21 -#define _CFFI_PRIM_UINT32 22 -#define _CFFI_PRIM_INT64 23 -#define _CFFI_PRIM_UINT64 24 -#define _CFFI_PRIM_INTPTR 25 -#define _CFFI_PRIM_UINTPTR 26 -#define _CFFI_PRIM_PTRDIFF 27 -#define _CFFI_PRIM_SIZE 28 -#define _CFFI_PRIM_SSIZE 29 -#define _CFFI_PRIM_INT_LEAST8 30 -#define _CFFI_PRIM_UINT_LEAST8 31 -#define _CFFI_PRIM_INT_LEAST16 32 -#define _CFFI_PRIM_UINT_LEAST16 33 -#define _CFFI_PRIM_INT_LEAST32 34 -#define _CFFI_PRIM_UINT_LEAST32 35 -#define _CFFI_PRIM_INT_LEAST64 36 -#define _CFFI_PRIM_UINT_LEAST64 37 -#define _CFFI_PRIM_INT_FAST8 38 -#define _CFFI_PRIM_UINT_FAST8 39 -#define _CFFI_PRIM_INT_FAST16 40 -#define _CFFI_PRIM_UINT_FAST16 41 -#define _CFFI_PRIM_INT_FAST32 42 -#define _CFFI_PRIM_UINT_FAST32 43 -#define _CFFI_PRIM_INT_FAST64 44 -#define _CFFI_PRIM_UINT_FAST64 45 -#define _CFFI_PRIM_INTMAX 46 -#define _CFFI_PRIM_UINTMAX 47 -#define _CFFI_PRIM_FLOATCOMPLEX 48 -#define _CFFI_PRIM_DOUBLECOMPLEX 49 -#define _CFFI_PRIM_CHAR16 50 -#define _CFFI_PRIM_CHAR32 51 - -#define _CFFI__NUM_PRIM 52 -#define _CFFI__UNKNOWN_PRIM (-1) -#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) -#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) - -#define _CFFI__IO_FILE_STRUCT (-1) - - -struct _cffi_global_s { - const char *name; - void *address; - _cffi_opcode_t type_op; - void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown - // OP_CPYTHON_BLTN_*: addr of direct function -}; - -struct _cffi_getconst_s { - unsigned long long value; - const struct _cffi_type_context_s *ctx; - int gindex; -}; - -struct _cffi_struct_union_s { - const char *name; - int type_index; // -> _cffi_types, on a OP_STRUCT_UNION - int flags; // _CFFI_F_* flags below - size_t size; - int alignment; - int first_field_index; // -> _cffi_fields array - int num_fields; -}; -#define _CFFI_F_UNION 0x01 // is a union, not a struct -#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the - // "standard layout" or if some are missing -#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct -#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() -#define _CFFI_F_OPAQUE 0x10 // opaque - -struct _cffi_field_s { - const char *name; - size_t field_offset; - size_t field_size; - _cffi_opcode_t field_type_op; -}; - -struct _cffi_enum_s { - const char *name; - int type_index; // -> _cffi_types, on a OP_ENUM - int type_prim; // _CFFI_PRIM_xxx - const char *enumerators; // comma-delimited string -}; - -struct _cffi_typename_s { - const char *name; - int type_index; /* if opaque, points to a possibly artificial - OP_STRUCT which is itself opaque */ -}; - -struct _cffi_type_context_s { - _cffi_opcode_t *types; - const struct _cffi_global_s *globals; - const struct _cffi_field_s *fields; - const struct _cffi_struct_union_s *struct_unions; - const struct _cffi_enum_s *enums; - const struct _cffi_typename_s *typenames; - int num_globals; - int num_struct_unions; - int num_enums; - int num_typenames; - const char *const *includes; - int num_types; - int flags; /* future extension */ -}; - -struct _cffi_parse_info_s { - const struct _cffi_type_context_s *ctx; - _cffi_opcode_t *output; - unsigned int output_size; - size_t error_location; - const char *error_message; -}; - -struct _cffi_externpy_s { - const char *name; - size_t size_of_result; - void *reserved1, *reserved2; -}; - -#ifdef _CFFI_INTERNAL -static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); -static int search_in_globals(const struct _cffi_type_context_s *ctx, - const char *search, size_t search_len); -static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, - const char *search, size_t search_len); -#endif diff --git a/venv/Lib/site-packages/cffi/pkgconfig.py b/venv/Lib/site-packages/cffi/pkgconfig.py deleted file mode 100644 index 5c93f15..0000000 --- a/venv/Lib/site-packages/cffi/pkgconfig.py +++ /dev/null @@ -1,121 +0,0 @@ -# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi -import sys, os, subprocess - -from .error import PkgConfigError - - -def merge_flags(cfg1, cfg2): - """Merge values from cffi config flags cfg2 to cf1 - - Example: - merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) - {"libraries": ["one", "two"]} - """ - for key, value in cfg2.items(): - if key not in cfg1: - cfg1[key] = value - else: - if not isinstance(cfg1[key], list): - raise TypeError("cfg1[%r] should be a list of strings" % (key,)) - if not isinstance(value, list): - raise TypeError("cfg2[%r] should be a list of strings" % (key,)) - cfg1[key].extend(value) - return cfg1 - - -def call(libname, flag, encoding=sys.getfilesystemencoding()): - """Calls pkg-config and returns the output if found - """ - a = ["pkg-config", "--print-errors"] - a.append(flag) - a.append(libname) - try: - pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except EnvironmentError as e: - raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) - - bout, berr = pc.communicate() - if pc.returncode != 0: - try: - berr = berr.decode(encoding) - except Exception: - pass - raise PkgConfigError(berr.strip()) - - if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x - try: - bout = bout.decode(encoding) - except UnicodeDecodeError: - raise PkgConfigError("pkg-config %s %s returned bytes that cannot " - "be decoded with encoding %r:\n%r" % - (flag, libname, encoding, bout)) - - if os.altsep != '\\' and '\\' in bout: - raise PkgConfigError("pkg-config %s %s returned an unsupported " - "backslash-escaped output:\n%r" % - (flag, libname, bout)) - return bout - - -def flags_from_pkgconfig(libs): - r"""Return compiler line flags for FFI.set_source based on pkg-config output - - Usage - ... - ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) - - If pkg-config is installed on build machine, then arguments include_dirs, - library_dirs, libraries, define_macros, extra_compile_args and - extra_link_args are extended with an output of pkg-config for libfoo and - libbar. - - Raises PkgConfigError in case the pkg-config call fails. - """ - - def get_include_dirs(string): - return [x[2:] for x in string.split() if x.startswith("-I")] - - def get_library_dirs(string): - return [x[2:] for x in string.split() if x.startswith("-L")] - - def get_libraries(string): - return [x[2:] for x in string.split() if x.startswith("-l")] - - # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils - def get_macros(string): - def _macro(x): - x = x[2:] # drop "-D" - if '=' in x: - return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") - else: - return (x, None) # "-Dfoo" => ("foo", None) - return [_macro(x) for x in string.split() if x.startswith("-D")] - - def get_other_cflags(string): - return [x for x in string.split() if not x.startswith("-I") and - not x.startswith("-D")] - - def get_other_libs(string): - return [x for x in string.split() if not x.startswith("-L") and - not x.startswith("-l")] - - # return kwargs for given libname - def kwargs(libname): - fse = sys.getfilesystemencoding() - all_cflags = call(libname, "--cflags") - all_libs = call(libname, "--libs") - return { - "include_dirs": get_include_dirs(all_cflags), - "library_dirs": get_library_dirs(all_libs), - "libraries": get_libraries(all_libs), - "define_macros": get_macros(all_cflags), - "extra_compile_args": get_other_cflags(all_cflags), - "extra_link_args": get_other_libs(all_libs), - } - - # merge all arguments together - ret = {} - for libname in libs: - lib_flags = kwargs(libname) - merge_flags(ret, lib_flags) - return ret diff --git a/venv/Lib/site-packages/cffi/recompiler.py b/venv/Lib/site-packages/cffi/recompiler.py deleted file mode 100644 index 7734a34..0000000 --- a/venv/Lib/site-packages/cffi/recompiler.py +++ /dev/null @@ -1,1598 +0,0 @@ -import io, os, sys, sysconfig -from . import ffiplatform, model -from .error import VerificationError -from .cffi_opcode import * - -VERSION_BASE = 0x2601 -VERSION_EMBEDDED = 0x2701 -VERSION_CHAR16CHAR32 = 0x2801 - -USE_LIMITED_API = ((sys.platform != 'win32' or sys.version_info < (3, 0) or - sys.version_info >= (3, 5)) and - not sysconfig.get_config_var("Py_GIL_DISABLED")) # free-threaded doesn't yet support limited API - -class GlobalExpr: - def __init__(self, name, address, type_op, size=0, check_value=0): - self.name = name - self.address = address - self.type_op = type_op - self.size = size - self.check_value = check_value - - def as_c_expr(self): - return ' { "%s", (void *)%s, %s, (void *)%s },' % ( - self.name, self.address, self.type_op.as_c_expr(), self.size) - - def as_python_expr(self): - return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, - self.check_value) - -class FieldExpr: - def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): - self.name = name - self.field_offset = field_offset - self.field_size = field_size - self.fbitsize = fbitsize - self.field_type_op = field_type_op - - def as_c_expr(self): - spaces = " " * len(self.name) - return (' { "%s", %s,\n' % (self.name, self.field_offset) + - ' %s %s,\n' % (spaces, self.field_size) + - ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) - - def as_python_expr(self): - raise NotImplementedError - - def as_field_python_expr(self): - if self.field_type_op.op == OP_NOOP: - size_expr = '' - elif self.field_type_op.op == OP_BITFIELD: - size_expr = format_four_bytes(self.fbitsize) - else: - raise NotImplementedError - return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), - size_expr, - self.name) - -class StructUnionExpr: - def __init__(self, name, type_index, flags, size, alignment, comment, - first_field_index, c_fields): - self.name = name - self.type_index = type_index - self.flags = flags - self.size = size - self.alignment = alignment - self.comment = comment - self.first_field_index = first_field_index - self.c_fields = c_fields - - def as_c_expr(self): - return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) - + '\n %s, %s, ' % (self.size, self.alignment) - + '%d, %d ' % (self.first_field_index, len(self.c_fields)) - + ('/* %s */ ' % self.comment if self.comment else '') - + '},') - - def as_python_expr(self): - flags = eval(self.flags, G_FLAGS) - fields_expr = [c_field.as_field_python_expr() - for c_field in self.c_fields] - return "(b'%s%s%s',%s)" % ( - format_four_bytes(self.type_index), - format_four_bytes(flags), - self.name, - ','.join(fields_expr)) - -class EnumExpr: - def __init__(self, name, type_index, size, signed, allenums): - self.name = name - self.type_index = type_index - self.size = size - self.signed = signed - self.allenums = allenums - - def as_c_expr(self): - return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' - ' "%s" },' % (self.name, self.type_index, - self.size, self.signed, self.allenums)) - - def as_python_expr(self): - prim_index = { - (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, - (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, - (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, - (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, - }[self.size, self.signed] - return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), - format_four_bytes(prim_index), - self.name, self.allenums) - -class TypenameExpr: - def __init__(self, name, type_index): - self.name = name - self.type_index = type_index - - def as_c_expr(self): - return ' { "%s", %d },' % (self.name, self.type_index) - - def as_python_expr(self): - return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) - - -# ____________________________________________________________ - - -class Recompiler: - _num_externpy = 0 - - def __init__(self, ffi, module_name, target_is_python=False): - self.ffi = ffi - self.module_name = module_name - self.target_is_python = target_is_python - self._version = VERSION_BASE - - def needs_version(self, ver): - self._version = max(self._version, ver) - - def collect_type_table(self): - self._typesdict = {} - self._generate("collecttype") - # - all_decls = sorted(self._typesdict, key=str) - # - # prepare all FUNCTION bytecode sequences first - self.cffi_types = [] - for tp in all_decls: - if tp.is_raw_function: - assert self._typesdict[tp] is None - self._typesdict[tp] = len(self.cffi_types) - self.cffi_types.append(tp) # placeholder - for tp1 in tp.args: - assert isinstance(tp1, (model.VoidType, - model.BasePrimitiveType, - model.PointerType, - model.StructOrUnionOrEnum, - model.FunctionPtrType)) - if self._typesdict[tp1] is None: - self._typesdict[tp1] = len(self.cffi_types) - self.cffi_types.append(tp1) # placeholder - self.cffi_types.append('END') # placeholder - # - # prepare all OTHER bytecode sequences - for tp in all_decls: - if not tp.is_raw_function and self._typesdict[tp] is None: - self._typesdict[tp] = len(self.cffi_types) - self.cffi_types.append(tp) # placeholder - if tp.is_array_type and tp.length is not None: - self.cffi_types.append('LEN') # placeholder - assert None not in self._typesdict.values() - # - # collect all structs and unions and enums - self._struct_unions = {} - self._enums = {} - for tp in all_decls: - if isinstance(tp, model.StructOrUnion): - self._struct_unions[tp] = None - elif isinstance(tp, model.EnumType): - self._enums[tp] = None - for i, tp in enumerate(sorted(self._struct_unions, - key=lambda tp: tp.name)): - self._struct_unions[tp] = i - for i, tp in enumerate(sorted(self._enums, - key=lambda tp: tp.name)): - self._enums[tp] = i - # - # emit all bytecode sequences now - for tp in all_decls: - method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) - method(tp, self._typesdict[tp]) - # - # consistency check - for op in self.cffi_types: - assert isinstance(op, CffiOp) - self.cffi_types = tuple(self.cffi_types) # don't change any more - - def _enum_fields(self, tp): - # When producing C, expand all anonymous struct/union fields. - # That's necessary to have C code checking the offsets of the - # individual fields contained in them. When producing Python, - # don't do it and instead write it like it is, with the - # corresponding fields having an empty name. Empty names are - # recognized at runtime when we import the generated Python - # file. - expand_anonymous_struct_union = not self.target_is_python - return tp.enumfields(expand_anonymous_struct_union) - - def _do_collect_type(self, tp): - if not isinstance(tp, model.BaseTypeByIdentity): - if isinstance(tp, tuple): - for x in tp: - self._do_collect_type(x) - return - if tp not in self._typesdict: - self._typesdict[tp] = None - if isinstance(tp, model.FunctionPtrType): - self._do_collect_type(tp.as_raw_function()) - elif isinstance(tp, model.StructOrUnion): - if tp.fldtypes is not None and ( - tp not in self.ffi._parser._included_declarations): - for name1, tp1, _, _ in self._enum_fields(tp): - self._do_collect_type(self._field_type(tp, name1, tp1)) - else: - for _, x in tp._get_items(): - self._do_collect_type(x) - - def _generate(self, step_name): - lst = self.ffi._parser._declarations.items() - for name, (tp, quals) in sorted(lst): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_cpy_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in recompile(): %r" % name) - try: - self._current_quals = quals - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - # ---------- - - ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] - - def collect_step_tables(self): - # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. - self._lsts = {} - for step_name in self.ALL_STEPS: - self._lsts[step_name] = [] - self._seen_struct_unions = set() - self._generate("ctx") - self._add_missing_struct_unions() - # - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - if step_name != "field": - lst.sort(key=lambda entry: entry.name) - self._lsts[step_name] = tuple(lst) # don't change any more - # - # check for a possible internal inconsistency: _cffi_struct_unions - # should have been generated with exactly self._struct_unions - lst = self._lsts["struct_union"] - for tp, i in self._struct_unions.items(): - assert i < len(lst) - assert lst[i].name == tp.name - assert len(lst) == len(self._struct_unions) - # same with enums - lst = self._lsts["enum"] - for tp, i in self._enums.items(): - assert i < len(lst) - assert lst[i].name == tp.name - assert len(lst) == len(self._enums) - - # ---------- - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def write_source_to_f(self, f, preamble): - if self.target_is_python: - assert preamble is None - self.write_py_source_to_f(f) - else: - assert preamble is not None - self.write_c_source_to_f(f, preamble) - - def _rel_readlines(self, filename): - g = open(os.path.join(os.path.dirname(__file__), filename), 'r') - lines = g.readlines() - g.close() - return lines - - def write_c_source_to_f(self, f, preamble): - self._f = f - prnt = self._prnt - if self.ffi._embedding is not None: - prnt('#define _CFFI_USE_EMBEDDING') - if not USE_LIMITED_API: - prnt('#define _CFFI_NO_LIMITED_API') - # - # first the '#include' (actually done by inlining the file's content) - lines = self._rel_readlines('_cffi_include.h') - i = lines.index('#include "parse_c_type.h"\n') - lines[i:i+1] = self._rel_readlines('parse_c_type.h') - prnt(''.join(lines)) - # - # if we have ffi._embedding != None, we give it here as a macro - # and include an extra file - base_module_name = self.module_name.split('.')[-1] - if self.ffi._embedding is not None: - prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) - prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') - self._print_string_literal_in_array(self.ffi._embedding) - prnt('0 };') - prnt('#ifdef PYPY_VERSION') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( - base_module_name,)) - prnt('#elif PY_MAJOR_VERSION >= 3') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( - base_module_name,)) - prnt('#else') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( - base_module_name,)) - prnt('#endif') - lines = self._rel_readlines('_embedding.h') - i = lines.index('#include "_cffi_errors.h"\n') - lines[i:i+1] = self._rel_readlines('_cffi_errors.h') - prnt(''.join(lines)) - self.needs_version(VERSION_EMBEDDED) - # - # then paste the C source given by the user, verbatim. - prnt('/************************************************************/') - prnt() - prnt(preamble) - prnt() - prnt('/************************************************************/') - prnt() - # - # the declaration of '_cffi_types' - prnt('static void *_cffi_types[] = {') - typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) - for i, op in enumerate(self.cffi_types): - comment = '' - if i in typeindex2type: - comment = ' // ' + typeindex2type[i]._get_c_name() - prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) - if not self.cffi_types: - prnt(' 0') - prnt('};') - prnt() - # - # call generate_cpy_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._seen_constants = set() - self._generate("decl") - # - # the declaration of '_cffi_globals' and '_cffi_typenames' - nums = {} - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - nums[step_name] = len(lst) - if nums[step_name] > 0: - prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( - step_name, step_name)) - for entry in lst: - prnt(entry.as_c_expr()) - prnt('};') - prnt() - # - # the declaration of '_cffi_includes' - if self.ffi._included_ffis: - prnt('static const char * const _cffi_includes[] = {') - for ffi_to_include in self.ffi._included_ffis: - try: - included_module_name, included_source = ( - ffi_to_include._assigned_source[:2]) - except AttributeError: - raise VerificationError( - "ffi object %r includes %r, but the latter has not " - "been prepared with set_source()" % ( - self.ffi, ffi_to_include,)) - if included_source is None: - raise VerificationError( - "not implemented yet: ffi.include() of a Python-based " - "ffi inside a C-based ffi") - prnt(' "%s",' % (included_module_name,)) - prnt(' NULL') - prnt('};') - prnt() - # - # the declaration of '_cffi_type_context' - prnt('static const struct _cffi_type_context_s _cffi_type_context = {') - prnt(' _cffi_types,') - for step_name in self.ALL_STEPS: - if nums[step_name] > 0: - prnt(' _cffi_%ss,' % step_name) - else: - prnt(' NULL, /* no %ss */' % step_name) - for step_name in self.ALL_STEPS: - if step_name != "field": - prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) - if self.ffi._included_ffis: - prnt(' _cffi_includes,') - else: - prnt(' NULL, /* no includes */') - prnt(' %d, /* num_types */' % (len(self.cffi_types),)) - flags = 0 - if self._num_externpy > 0 or self.ffi._embedding is not None: - flags |= 1 # set to mean that we use extern "Python" - prnt(' %d, /* flags */' % flags) - prnt('};') - prnt() - # - # the init function - prnt('#ifdef __GNUC__') - prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') - prnt('#endif') - prnt() - prnt('#ifdef PYPY_VERSION') - prnt('PyMODINIT_FUNC') - prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) - prnt('{') - if flags & 1: - prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') - prnt(' _cffi_call_python_org = ' - '(void(*)(struct _cffi_externpy_s *, char *))p[1];') - prnt(' }') - prnt(' p[0] = (const void *)0x%x;' % self._version) - prnt(' p[1] = &_cffi_type_context;') - prnt('#if PY_MAJOR_VERSION >= 3') - prnt(' return NULL;') - prnt('#endif') - prnt('}') - # on Windows, distutils insists on putting init_cffi_xyz in - # 'export_symbols', so instead of fighting it, just give up and - # give it one - prnt('# ifdef _MSC_VER') - prnt(' PyMODINIT_FUNC') - prnt('# if PY_MAJOR_VERSION >= 3') - prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) - prnt('# else') - prnt(' init%s(void) { }' % (base_module_name,)) - prnt('# endif') - prnt('# endif') - prnt('#elif PY_MAJOR_VERSION >= 3') - prnt('PyMODINIT_FUNC') - prnt('PyInit_%s(void)' % (base_module_name,)) - prnt('{') - prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( - self.module_name, self._version)) - prnt('}') - prnt('#else') - prnt('PyMODINIT_FUNC') - prnt('init%s(void)' % (base_module_name,)) - prnt('{') - prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( - self.module_name, self._version)) - prnt('}') - prnt('#endif') - prnt() - prnt('#ifdef __GNUC__') - prnt('# pragma GCC visibility pop') - prnt('#endif') - self._version = None - - def _to_py(self, x): - if isinstance(x, str): - return "b'%s'" % (x,) - if isinstance(x, (list, tuple)): - rep = [self._to_py(item) for item in x] - if len(rep) == 1: - rep.append('') - return "(%s)" % (','.join(rep),) - return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. - - def write_py_source_to_f(self, f): - self._f = f - prnt = self._prnt - # - # header - prnt("# auto-generated file") - prnt("import _cffi_backend") - # - # the 'import' of the included ffis - num_includes = len(self.ffi._included_ffis or ()) - for i in range(num_includes): - ffi_to_include = self.ffi._included_ffis[i] - try: - included_module_name, included_source = ( - ffi_to_include._assigned_source[:2]) - except AttributeError: - raise VerificationError( - "ffi object %r includes %r, but the latter has not " - "been prepared with set_source()" % ( - self.ffi, ffi_to_include,)) - if included_source is not None: - raise VerificationError( - "not implemented yet: ffi.include() of a C-based " - "ffi inside a Python-based ffi") - prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) - prnt() - prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) - prnt(" _version = 0x%x," % (self._version,)) - self._version = None - # - # the '_types' keyword argument - self.cffi_types = tuple(self.cffi_types) # don't change any more - types_lst = [op.as_python_bytes() for op in self.cffi_types] - prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) - typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) - # - # the keyword arguments from ALL_STEPS - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - if len(lst) > 0 and step_name != "field": - prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) - # - # the '_includes' keyword argument - if num_includes > 0: - prnt(' _includes = (%s,),' % ( - ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) - # - # the footer - prnt(')') - - # ---------- - - def _gettypenum(self, type): - # a KeyError here is a bug. please report it! :-) - return self._typesdict[type] - - def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): - extraarg = '' - if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): - if tp.is_integer_type() and tp.name != '_Bool': - converter = '_cffi_to_c_int' - extraarg = ', %s' % tp.name - elif isinstance(tp, model.UnknownFloatType): - # don't check with is_float_type(): it may be a 'long - # double' here, and _cffi_to_c_double would loose precision - converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) - else: - cname = tp.get_c_name('') - converter = '(%s)_cffi_to_c_%s' % (cname, - tp.name.replace(' ', '_')) - if cname in ('char16_t', 'char32_t'): - self.needs_version(VERSION_CHAR16CHAR32) - errvalue = '-1' - # - elif isinstance(tp, model.PointerType): - self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, - tovar, errcode) - return - # - elif (isinstance(tp, model.StructOrUnionOrEnum) or - isinstance(tp, model.BasePrimitiveType)): - # a struct (not a struct pointer) as a function argument; - # or, a complex (the same code works) - self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' - % (tovar, self._gettypenum(tp), fromvar)) - self._prnt(' %s;' % errcode) - return - # - elif isinstance(tp, model.FunctionPtrType): - converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') - extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) - errvalue = 'NULL' - # - else: - raise NotImplementedError(tp) - # - self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) - self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( - tovar, tp.get_c_name(''), errvalue)) - self._prnt(' %s;' % errcode) - - def _extra_local_variables(self, tp, localvars, freelines): - if isinstance(tp, model.PointerType): - localvars.add('Py_ssize_t datasize') - localvars.add('struct _cffi_freeme_s *large_args_free = NULL') - freelines.add('if (large_args_free != NULL)' - ' _cffi_free_array_arguments(large_args_free);') - - def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): - self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') - self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( - self._gettypenum(tp), fromvar, tovar)) - self._prnt(' if (datasize != 0) {') - self._prnt(' %s = ((size_t)datasize) <= 640 ? ' - '(%s)alloca((size_t)datasize) : NULL;' % ( - tovar, tp.get_c_name(''))) - self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' - '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) - self._prnt(' datasize, &large_args_free) < 0)') - self._prnt(' %s;' % errcode) - self._prnt(' }') - - def _convert_expr_from_c(self, tp, var, context): - if isinstance(tp, model.BasePrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - return '_cffi_from_c_int(%s, %s)' % (var, tp.name) - elif isinstance(tp, model.UnknownFloatType): - return '_cffi_from_c_double(%s)' % (var,) - elif tp.name != 'long double' and not tp.is_complex_type(): - cname = tp.name.replace(' ', '_') - if cname in ('char16_t', 'char32_t'): - self.needs_version(VERSION_CHAR16CHAR32) - return '_cffi_from_c_%s(%s)' % (cname, var) - else: - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.ArrayType): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(model.PointerType(tp.item))) - elif isinstance(tp, model.StructOrUnion): - if tp.fldnames is None: - raise TypeError("'%s' is used as %s, but is opaque" % ( - tp._get_c_name(), context)) - return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.EnumType): - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - else: - raise NotImplementedError(tp) - - # ---------- - # typedefs - - def _typedef_type(self, tp, name): - return self._global_type(tp, "(*(%s *)0)" % (name,)) - - def _generate_cpy_typedef_collecttype(self, tp, name): - self._do_collect_type(self._typedef_type(tp, name)) - - def _generate_cpy_typedef_decl(self, tp, name): - pass - - def _typedef_ctx(self, tp, name): - type_index = self._typesdict[tp] - self._lsts["typename"].append(TypenameExpr(name, type_index)) - - def _generate_cpy_typedef_ctx(self, tp, name): - tp = self._typedef_type(tp, name) - self._typedef_ctx(tp, name) - if getattr(tp, "origin", None) == "unknown_type": - self._struct_ctx(tp, tp.name, approxname=None) - elif isinstance(tp, model.NamedPointerType): - self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, - named_ptr=tp) - - # ---------- - # function declarations - - def _generate_cpy_function_collecttype(self, tp, name): - self._do_collect_type(tp.as_raw_function()) - if tp.ellipsis and not self.target_is_python: - self._do_collect_type(tp) - - def _generate_cpy_function_decl(self, tp, name): - assert not self.target_is_python - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no CPython wrapper) - self._generate_cpy_constant_decl(tp, name) - return - prnt = self._prnt - numargs = len(tp.args) - if numargs == 0: - argname = 'noarg' - elif numargs == 1: - argname = 'arg0' - else: - argname = 'args' - # - # ------------------------------ - # the 'd' version of the function, only for addressof(lib, 'func') - arguments = [] - call_arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arguments.append(type.get_c_name(' x%d' % i, context)) - call_arguments.append('x%d' % i) - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - if tp.abi: - abi = tp.abi + ' ' - else: - abi = '' - name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) - prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) - prnt('{') - call_arguments = ', '.join(call_arguments) - result_code = 'return ' - if isinstance(tp.result, model.VoidType): - result_code = '' - prnt(' %s%s(%s);' % (result_code, name, call_arguments)) - prnt('}') - # - prnt('#ifndef PYPY_VERSION') # ------------------------------ - # - prnt('static PyObject *') - prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) - prnt('{') - # - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arg = type.get_c_name(' x%d' % i, context) - prnt(' %s;' % arg) - # - localvars = set() - freelines = set() - for type in tp.args: - self._extra_local_variables(type, localvars, freelines) - for decl in sorted(localvars): - prnt(' %s;' % (decl,)) - # - if not isinstance(tp.result, model.VoidType): - result_code = 'result = ' - context = 'result of %s' % name - result_decl = ' %s;' % tp.result.get_c_name(' result', context) - prnt(result_decl) - prnt(' PyObject *pyresult;') - else: - result_decl = None - result_code = '' - # - if len(tp.args) > 1: - rng = range(len(tp.args)) - for i in rng: - prnt(' PyObject *arg%d;' % i) - prnt() - prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( - name, len(rng), len(rng), - ', '.join(['&arg%d' % i for i in rng]))) - prnt(' return NULL;') - prnt() - # - for i, type in enumerate(tp.args): - self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, - 'return NULL') - prnt() - # - prnt(' Py_BEGIN_ALLOW_THREADS') - prnt(' _cffi_restore_errno();') - call_arguments = ['x%d' % i for i in range(len(tp.args))] - call_arguments = ', '.join(call_arguments) - prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) - prnt(' _cffi_save_errno();') - prnt(' Py_END_ALLOW_THREADS') - prnt() - # - prnt(' (void)self; /* unused */') - if numargs == 0: - prnt(' (void)noarg; /* unused */') - if result_code: - prnt(' pyresult = %s;' % - self._convert_expr_from_c(tp.result, 'result', 'result type')) - for freeline in freelines: - prnt(' ' + freeline) - prnt(' return pyresult;') - else: - for freeline in freelines: - prnt(' ' + freeline) - prnt(' Py_INCREF(Py_None);') - prnt(' return Py_None;') - prnt('}') - # - prnt('#else') # ------------------------------ - # - # the PyPy version: need to replace struct/union arguments with - # pointers, and if the result is a struct/union, insert a first - # arg that is a pointer to the result. We also do that for - # complex args and return type. - def need_indirection(type): - return (isinstance(type, model.StructOrUnion) or - (isinstance(type, model.PrimitiveType) and - type.is_complex_type())) - difference = False - arguments = [] - call_arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - indirection = '' - if need_indirection(type): - indirection = '*' - difference = True - arg = type.get_c_name(' %sx%d' % (indirection, i), context) - arguments.append(arg) - call_arguments.append('%sx%d' % (indirection, i)) - tp_result = tp.result - if need_indirection(tp_result): - context = 'result of %s' % name - arg = tp_result.get_c_name(' *result', context) - arguments.insert(0, arg) - tp_result = model.void_type - result_decl = None - result_code = '*result = ' - difference = True - if difference: - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, - repr_arguments) - prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) - prnt('{') - if result_decl: - prnt(result_decl) - call_arguments = ', '.join(call_arguments) - prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) - if result_decl: - prnt(' return result;') - prnt('}') - else: - prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) - # - prnt('#endif') # ------------------------------ - prnt() - - def _generate_cpy_function_ctx(self, tp, name): - if tp.ellipsis and not self.target_is_python: - self._generate_cpy_constant_ctx(tp, name) - return - type_index = self._typesdict[tp.as_raw_function()] - numargs = len(tp.args) - if self.target_is_python: - meth_kind = OP_DLOPEN_FUNC - elif numargs == 0: - meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' - elif numargs == 1: - meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' - else: - meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' - self._lsts["global"].append( - GlobalExpr(name, '_cffi_f_%s' % name, - CffiOp(meth_kind, type_index), - size='_cffi_d_%s' % name)) - - # ---------- - # named structs or unions - - def _field_type(self, tp_struct, field_name, tp_field): - if isinstance(tp_field, model.ArrayType): - actual_length = tp_field.length - if actual_length == '...': - ptr_struct_name = tp_struct.get_c_name('*') - actual_length = '_cffi_array_len(((%s)0)->%s)' % ( - ptr_struct_name, field_name) - tp_item = self._field_type(tp_struct, '%s[0]' % field_name, - tp_field.item) - tp_field = model.ArrayType(tp_item, actual_length) - return tp_field - - def _struct_collecttype(self, tp): - self._do_collect_type(tp) - if self.target_is_python: - # also requires nested anon struct/unions in ABI mode, recursively - for fldtype in tp.anonymous_struct_fields(): - self._struct_collecttype(fldtype) - - def _struct_decl(self, tp, cname, approxname): - if tp.fldtypes is None: - return - prnt = self._prnt - checkfuncname = '_cffi_checkfld_%s' % (approxname,) - prnt('_CFFI_UNUSED_FN') - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in self._enum_fields(tp): - try: - if ftype.is_integer_type() or fbitsize >= 0: - # accept all integers, but complain on float or double - if fname != '': - prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " - "an integer */" % (fname, cname, fname)) - continue - # only accept exactly the type declared, except that '[]' - # is interpreted as a '*' and so will match any array length. - # (It would also match '*', but that's harder to detect...) - while (isinstance(ftype, model.ArrayType) - and (ftype.length is None or ftype.length == '...')): - ftype = ftype.item - fname = fname + '[0]' - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) - prnt() - - def _struct_ctx(self, tp, cname, approxname, named_ptr=None): - type_index = self._typesdict[tp] - reason_for_not_expanding = None - flags = [] - if isinstance(tp, model.UnionType): - flags.append("_CFFI_F_UNION") - if tp.fldtypes is None: - flags.append("_CFFI_F_OPAQUE") - reason_for_not_expanding = "opaque" - if (tp not in self.ffi._parser._included_declarations and - (named_ptr is None or - named_ptr not in self.ffi._parser._included_declarations)): - if tp.fldtypes is None: - pass # opaque - elif tp.partial or any(tp.anonymous_struct_fields()): - pass # field layout obtained silently from the C compiler - else: - flags.append("_CFFI_F_CHECK_FIELDS") - if tp.packed: - if tp.packed > 1: - raise NotImplementedError( - "%r is declared with 'pack=%r'; only 0 or 1 are " - "supported in API mode (try to use \"...;\", which " - "does not require a 'pack' declaration)" % - (tp, tp.packed)) - flags.append("_CFFI_F_PACKED") - else: - flags.append("_CFFI_F_EXTERNAL") - reason_for_not_expanding = "external" - flags = '|'.join(flags) or '0' - c_fields = [] - if reason_for_not_expanding is None: - enumfields = list(self._enum_fields(tp)) - for fldname, fldtype, fbitsize, fqual in enumfields: - fldtype = self._field_type(tp, fldname, fldtype) - self._check_not_opaque(fldtype, - "field '%s.%s'" % (tp.name, fldname)) - # cname is None for _add_missing_struct_unions() only - op = OP_NOOP - if fbitsize >= 0: - op = OP_BITFIELD - size = '%d /* bits */' % fbitsize - elif cname is None or ( - isinstance(fldtype, model.ArrayType) and - fldtype.length is None): - size = '(size_t)-1' - else: - size = 'sizeof(((%s)0)->%s)' % ( - tp.get_c_name('*') if named_ptr is None - else named_ptr.name, - fldname) - if cname is None or fbitsize >= 0: - offset = '(size_t)-1' - elif named_ptr is not None: - offset = '(size_t)(((char *)&((%s)4096)->%s) - (char *)4096)' % ( - named_ptr.name, fldname) - else: - offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) - c_fields.append( - FieldExpr(fldname, offset, size, fbitsize, - CffiOp(op, self._typesdict[fldtype]))) - first_field_index = len(self._lsts["field"]) - self._lsts["field"].extend(c_fields) - # - if cname is None: # unknown name, for _add_missing_struct_unions - size = '(size_t)-2' - align = -2 - comment = "unnamed" - else: - if named_ptr is not None: - size = 'sizeof(*(%s)0)' % (named_ptr.name,) - align = '-1 /* unknown alignment */' - else: - size = 'sizeof(%s)' % (cname,) - align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) - comment = None - else: - size = '(size_t)-1' - align = -1 - first_field_index = -1 - comment = reason_for_not_expanding - self._lsts["struct_union"].append( - StructUnionExpr(tp.name, type_index, flags, size, align, comment, - first_field_index, c_fields)) - self._seen_struct_unions.add(tp) - - def _check_not_opaque(self, tp, location): - while isinstance(tp, model.ArrayType): - tp = tp.item - if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: - raise TypeError( - "%s is of an opaque type (not declared in cdef())" % location) - - def _add_missing_struct_unions(self): - # not very nice, but some struct declarations might be missing - # because they don't have any known C name. Check that they are - # not partial (we can't complete or verify them!) and emit them - # anonymously. - lst = list(self._struct_unions.items()) - lst.sort(key=lambda tp_order: tp_order[1]) - for tp, order in lst: - if tp not in self._seen_struct_unions: - if tp.partial: - raise NotImplementedError("internal inconsistency: %r is " - "partial but was not seen at " - "this point" % (tp,)) - if tp.name.startswith('$') and tp.name[1:].isdigit(): - approxname = tp.name[1:] - elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': - approxname = 'FILE' - self._typedef_ctx(tp, 'FILE') - else: - raise NotImplementedError("internal inconsistency: %r" % - (tp,)) - self._struct_ctx(tp, None, approxname) - - def _generate_cpy_struct_collecttype(self, tp, name): - self._struct_collecttype(tp) - _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype - - def _struct_names(self, tp): - cname = tp.get_c_name('') - if ' ' in cname: - return cname, cname.replace(' ', '_') - else: - return cname, '_' + cname - - def _generate_cpy_struct_decl(self, tp, name): - self._struct_decl(tp, *self._struct_names(tp)) - _generate_cpy_union_decl = _generate_cpy_struct_decl - - def _generate_cpy_struct_ctx(self, tp, name): - self._struct_ctx(tp, *self._struct_names(tp)) - _generate_cpy_union_ctx = _generate_cpy_struct_ctx - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - def _generate_cpy_anonymous_collecttype(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_collecttype(tp, name) - else: - self._struct_collecttype(tp) - - def _generate_cpy_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_decl(tp) - else: - self._struct_decl(tp, name, 'typedef_' + name) - - def _generate_cpy_anonymous_ctx(self, tp, name): - if isinstance(tp, model.EnumType): - self._enum_ctx(tp, name) - else: - self._struct_ctx(tp, name, 'typedef_' + name) - - # ---------- - # constants, declared with "static const ..." - - def _generate_cpy_const(self, is_int, name, tp=None, category='const', - check_value=None): - if (category, name) in self._seen_constants: - raise VerificationError( - "duplicate declaration of %s '%s'" % (category, name)) - self._seen_constants.add((category, name)) - # - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - if is_int: - prnt('static int %s(unsigned long long *o)' % funcname) - prnt('{') - prnt(' int n = (%s) <= 0;' % (name,)) - prnt(' *o = (unsigned long long)((%s) | 0);' - ' /* check that %s is an integer */' % (name, name)) - if check_value is not None: - if check_value > 0: - check_value = '%dU' % (check_value,) - prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) - prnt(' n |= 2;') - prnt(' return n;') - prnt('}') - else: - assert check_value is None - prnt('static void %s(char *o)' % funcname) - prnt('{') - prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) - prnt('}') - prnt() - - def _generate_cpy_constant_collecttype(self, tp, name): - is_int = tp.is_integer_type() - if not is_int or self.target_is_python: - self._do_collect_type(tp) - - def _generate_cpy_constant_decl(self, tp, name): - is_int = tp.is_integer_type() - self._generate_cpy_const(is_int, name, tp) - - def _generate_cpy_constant_ctx(self, tp, name): - if not self.target_is_python and tp.is_integer_type(): - type_op = CffiOp(OP_CONSTANT_INT, -1) - else: - if self.target_is_python: - const_kind = OP_DLOPEN_CONST - else: - const_kind = OP_CONSTANT - type_index = self._typesdict[tp] - type_op = CffiOp(const_kind, type_index) - self._lsts["global"].append( - GlobalExpr(name, '_cffi_const_%s' % name, type_op)) - - # ---------- - # enums - - def _generate_cpy_enum_collecttype(self, tp, name): - self._do_collect_type(tp) - - def _generate_cpy_enum_decl(self, tp, name=None): - for enumerator in tp.enumerators: - self._generate_cpy_const(True, enumerator) - - def _enum_ctx(self, tp, cname): - type_index = self._typesdict[tp] - type_op = CffiOp(OP_ENUM, -1) - if self.target_is_python: - tp.check_not_partial() - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._lsts["global"].append( - GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, - check_value=enumvalue)) - # - if cname is not None and '$' not in cname and not self.target_is_python: - size = "sizeof(%s)" % cname - signed = "((%s)-1) <= 0" % cname - else: - basetp = tp.build_baseinttype(self.ffi, []) - size = self.ffi.sizeof(basetp) - signed = int(int(self.ffi.cast(basetp, -1)) < 0) - allenums = ",".join(tp.enumerators) - self._lsts["enum"].append( - EnumExpr(tp.name, type_index, size, signed, allenums)) - - def _generate_cpy_enum_ctx(self, tp, name): - self._enum_ctx(tp, tp._get_c_name()) - - # ---------- - # macros: for now only for integers - - def _generate_cpy_macro_collecttype(self, tp, name): - pass - - def _generate_cpy_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_cpy_const(True, name, check_value=check_value) - - def _generate_cpy_macro_ctx(self, tp, name): - if tp == '...': - if self.target_is_python: - raise VerificationError( - "cannot use the syntax '...' in '#define %s ...' when " - "using the ABI mode" % (name,)) - check_value = None - else: - check_value = tp # an integer - type_op = CffiOp(OP_CONSTANT_INT, -1) - self._lsts["global"].append( - GlobalExpr(name, '_cffi_const_%s' % name, type_op, - check_value=check_value)) - - # ---------- - # global variables - - def _global_type(self, tp, global_name): - if isinstance(tp, model.ArrayType): - actual_length = tp.length - if actual_length == '...': - actual_length = '_cffi_array_len(%s)' % (global_name,) - tp_item = self._global_type(tp.item, '%s[0]' % global_name) - tp = model.ArrayType(tp_item, actual_length) - return tp - - def _generate_cpy_variable_collecttype(self, tp, name): - self._do_collect_type(self._global_type(tp, name)) - - def _generate_cpy_variable_decl(self, tp, name): - prnt = self._prnt - tp = self._global_type(tp, name) - if isinstance(tp, model.ArrayType) and tp.length is None: - tp = tp.item - ampersand = '' - else: - ampersand = '&' - # This code assumes that casts from "tp *" to "void *" is a - # no-op, i.e. a function that returns a "tp *" can be called - # as if it returned a "void *". This should be generally true - # on any modern machine. The only exception to that rule (on - # uncommon architectures, and as far as I can tell) might be - # if 'tp' were a function type, but that is not possible here. - # (If 'tp' is a function _pointer_ type, then casts from "fn_t - # **" to "void *" are again no-ops, as far as I can tell.) - decl = '*_cffi_var_%s(void)' % (name,) - prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) - prnt('{') - prnt(' return %s(%s);' % (ampersand, name)) - prnt('}') - prnt() - - def _generate_cpy_variable_ctx(self, tp, name): - tp = self._global_type(tp, name) - type_index = self._typesdict[tp] - if self.target_is_python: - op = OP_GLOBAL_VAR - else: - op = OP_GLOBAL_VAR_F - self._lsts["global"].append( - GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) - - # ---------- - # extern "Python" - - def _generate_cpy_extern_python_collecttype(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - self._do_collect_type(tp) - _generate_cpy_dllexport_python_collecttype = \ - _generate_cpy_extern_python_plus_c_collecttype = \ - _generate_cpy_extern_python_collecttype - - def _extern_python_decl(self, tp, name, tag_and_space): - prnt = self._prnt - if isinstance(tp.result, model.VoidType): - size_of_result = '0' - else: - context = 'result of %s' % name - size_of_result = '(int)sizeof(%s)' % ( - tp.result.get_c_name('', context),) - prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) - prnt(' { "%s.%s", %s, 0, 0 };' % ( - self.module_name, name, size_of_result)) - prnt() - # - arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arg = type.get_c_name(' a%d' % i, context) - arguments.append(arg) - # - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - name_and_arguments = '%s(%s)' % (name, repr_arguments) - if tp.abi == "__stdcall": - name_and_arguments = '_cffi_stdcall ' + name_and_arguments - # - def may_need_128_bits(tp): - return (isinstance(tp, model.PrimitiveType) and - tp.name == 'long double') - # - size_of_a = max(len(tp.args)*8, 8) - if may_need_128_bits(tp.result): - size_of_a = max(size_of_a, 16) - if isinstance(tp.result, model.StructOrUnion): - size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( - tp.result.get_c_name(''), size_of_a, - tp.result.get_c_name(''), size_of_a) - prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) - prnt('{') - prnt(' char a[%s];' % size_of_a) - prnt(' char *p = a;') - for i, type in enumerate(tp.args): - arg = 'a%d' % i - if (isinstance(type, model.StructOrUnion) or - may_need_128_bits(type)): - arg = '&' + arg - type = model.PointerType(type) - prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) - prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) - if not isinstance(tp.result, model.VoidType): - prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) - prnt('}') - prnt() - self._num_externpy += 1 - - def _generate_cpy_extern_python_decl(self, tp, name): - self._extern_python_decl(tp, name, 'static ') - - def _generate_cpy_dllexport_python_decl(self, tp, name): - self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') - - def _generate_cpy_extern_python_plus_c_decl(self, tp, name): - self._extern_python_decl(tp, name, '') - - def _generate_cpy_extern_python_ctx(self, tp, name): - if self.target_is_python: - raise VerificationError( - "cannot use 'extern \"Python\"' in the ABI mode") - if tp.ellipsis: - raise NotImplementedError("a vararg function is extern \"Python\"") - type_index = self._typesdict[tp] - type_op = CffiOp(OP_EXTERN_PYTHON, type_index) - self._lsts["global"].append( - GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) - - _generate_cpy_dllexport_python_ctx = \ - _generate_cpy_extern_python_plus_c_ctx = \ - _generate_cpy_extern_python_ctx - - def _print_string_literal_in_array(self, s): - prnt = self._prnt - prnt('// # NB. this is not a string because of a size limit in MSVC') - if not isinstance(s, bytes): # unicode - s = s.encode('utf-8') # -> bytes - else: - s.decode('utf-8') # got bytes, check for valid utf-8 - try: - s.decode('ascii') - except UnicodeDecodeError: - s = b'# -*- encoding: utf8 -*-\n' + s - for line in s.splitlines(True): - comment = line - if type('//') is bytes: # python2 - line = map(ord, line) # make a list of integers - else: # python3 - # type(line) is bytes, which enumerates like a list of integers - comment = ascii(comment)[1:-1] - prnt(('// ' + comment).rstrip()) - printed_line = '' - for c in line: - if len(printed_line) >= 76: - prnt(printed_line) - printed_line = '' - printed_line += '%d,' % (c,) - prnt(printed_line) - - # ---------- - # emitting the opcodes for individual types - - def _emit_bytecode_VoidType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) - - def _emit_bytecode_PrimitiveType(self, tp, index): - prim_index = PRIMITIVE_TO_INDEX[tp.name] - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) - - def _emit_bytecode_UnknownIntegerType(self, tp, index): - s = ('_cffi_prim_int(sizeof(%s), (\n' - ' ((%s)-1) | 0 /* check that %s is an integer type */\n' - ' ) <= 0)' % (tp.name, tp.name, tp.name)) - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) - - def _emit_bytecode_UnknownFloatType(self, tp, index): - s = ('_cffi_prim_float(sizeof(%s) *\n' - ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' - ' )' % (tp.name, tp.name)) - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) - - def _emit_bytecode_RawFunctionType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) - index += 1 - for tp1 in tp.args: - realindex = self._typesdict[tp1] - if index != realindex: - if isinstance(tp1, model.PrimitiveType): - self._emit_bytecode_PrimitiveType(tp1, index) - else: - self.cffi_types[index] = CffiOp(OP_NOOP, realindex) - index += 1 - flags = int(tp.ellipsis) - if tp.abi is not None: - if tp.abi == '__stdcall': - flags |= 2 - else: - raise NotImplementedError("abi=%r" % (tp.abi,)) - self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) - - def _emit_bytecode_PointerType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) - - _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType - _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType - - def _emit_bytecode_FunctionPtrType(self, tp, index): - raw = tp.as_raw_function() - self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) - - def _emit_bytecode_ArrayType(self, tp, index): - item_index = self._typesdict[tp.item] - if tp.length is None: - self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) - elif tp.length == '...': - raise VerificationError( - "type %s badly placed: the '...' array length can only be " - "used on global arrays or on fields of structures" % ( - str(tp).replace('/*...*/', '...'),)) - else: - assert self.cffi_types[index + 1] == 'LEN' - self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) - self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) - - def _emit_bytecode_StructType(self, tp, index): - struct_index = self._struct_unions[tp] - self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) - _emit_bytecode_UnionType = _emit_bytecode_StructType - - def _emit_bytecode_EnumType(self, tp, index): - enum_index = self._enums[tp] - self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) - - -if sys.version_info >= (3,): - NativeIO = io.StringIO -else: - class NativeIO(io.BytesIO): - def write(self, s): - if isinstance(s, unicode): - s = s.encode('ascii') - super(NativeIO, self).write(s) - -def _is_file_like(maybefile): - # compare to xml.etree.ElementTree._get_writer - return hasattr(maybefile, 'write') - -def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): - if verbose: - print("generating %s" % (target_file,)) - recompiler = Recompiler(ffi, module_name, - target_is_python=(preamble is None)) - recompiler.collect_type_table() - recompiler.collect_step_tables() - if _is_file_like(target_file): - recompiler.write_source_to_f(target_file, preamble) - return True - f = NativeIO() - recompiler.write_source_to_f(f, preamble) - output = f.getvalue() - try: - with open(target_file, 'r') as f1: - if f1.read(len(output) + 1) != output: - raise IOError - if verbose: - print("(already up-to-date)") - return False # already up-to-date - except IOError: - tmp_file = '%s.~%d' % (target_file, os.getpid()) - with open(tmp_file, 'w') as f1: - f1.write(output) - try: - os.rename(tmp_file, target_file) - except OSError: - os.unlink(target_file) - os.rename(tmp_file, target_file) - return True - -def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): - assert preamble is not None - return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, - verbose) - -def make_py_source(ffi, module_name, target_py_file, verbose=False): - return _make_c_or_py_source(ffi, module_name, None, target_py_file, - verbose) - -def _modname_to_file(outputdir, modname, extension): - parts = modname.split('.') - try: - os.makedirs(os.path.join(outputdir, *parts[:-1])) - except OSError: - pass - parts[-1] += extension - return os.path.join(outputdir, *parts), parts - - -# Aaargh. Distutils is not tested at all for the purpose of compiling -# DLLs that are not extension modules. Here are some hacks to work -# around that, in the _patch_for_*() functions... - -def _patch_meth(patchlist, cls, name, new_meth): - old = getattr(cls, name) - patchlist.append((cls, name, old)) - setattr(cls, name, new_meth) - return old - -def _unpatch_meths(patchlist): - for cls, name, old_meth in reversed(patchlist): - setattr(cls, name, old_meth) - -def _patch_for_embedding(patchlist): - if sys.platform == 'win32': - # we must not remove the manifest when building for embedding! - # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed, - # since the toolchain it supports (VS2005-2008) is also long dead. - from cffi._shimmed_dist_utils import MSVCCompiler - if MSVCCompiler is not None: - _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', - lambda self, manifest_file: manifest_file) - - if sys.platform == 'darwin': - # we must not make a '-bundle', but a '-dynamiclib' instead - from cffi._shimmed_dist_utils import CCompiler - def my_link_shared_object(self, *args, **kwds): - if '-bundle' in self.linker_so: - self.linker_so = list(self.linker_so) - i = self.linker_so.index('-bundle') - self.linker_so[i] = '-dynamiclib' - return old_link_shared_object(self, *args, **kwds) - old_link_shared_object = _patch_meth(patchlist, CCompiler, - 'link_shared_object', - my_link_shared_object) - -def _patch_for_target(patchlist, target): - from cffi._shimmed_dist_utils import build_ext - # if 'target' is different from '*', we need to patch some internal - # method to just return this 'target' value, instead of having it - # built from module_name - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - elif sys.platform == 'darwin': - target += '.dylib' - else: - target += '.so' - _patch_meth(patchlist, build_ext, 'get_ext_filename', - lambda self, ext_name: target) - - -def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, - c_file=None, source_extension='.c', extradir=None, - compiler_verbose=1, target=None, debug=None, - uses_ffiplatform=True, **kwds): - if not isinstance(module_name, str): - module_name = module_name.encode('ascii') - if ffi._windows_unicode: - ffi._apply_windows_unicode(kwds) - if preamble is not None: - if call_c_compiler and _is_file_like(c_file): - raise TypeError("Writing to file-like objects is not supported " - "with call_c_compiler=True") - embedding = (ffi._embedding is not None) - if embedding: - ffi._apply_embedding_fix(kwds) - if c_file is None: - c_file, parts = _modname_to_file(tmpdir, module_name, - source_extension) - if extradir: - parts = [extradir] + parts - ext_c_file = os.path.join(*parts) - else: - ext_c_file = c_file - # - if target is None: - if embedding: - target = '%s.*' % module_name - else: - target = '*' - # - if uses_ffiplatform: - ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) - else: - ext = None - updated = make_c_source(ffi, module_name, preamble, c_file, - verbose=compiler_verbose) - if call_c_compiler: - patchlist = [] - cwd = os.getcwd() - try: - if embedding: - _patch_for_embedding(patchlist) - if target != '*': - _patch_for_target(patchlist, target) - if compiler_verbose: - if tmpdir == '.': - msg = 'the current directory is' - else: - msg = 'setting the current directory to' - print('%s %r' % (msg, os.path.abspath(tmpdir))) - os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, - compiler_verbose, debug) - finally: - os.chdir(cwd) - _unpatch_meths(patchlist) - return outputfilename - else: - return ext, updated - else: - if c_file is None: - c_file, _ = _modname_to_file(tmpdir, module_name, '.py') - updated = make_py_source(ffi, module_name, c_file, - verbose=compiler_verbose) - if call_c_compiler: - return c_file - else: - return None, updated - diff --git a/venv/Lib/site-packages/cffi/setuptools_ext.py b/venv/Lib/site-packages/cffi/setuptools_ext.py deleted file mode 100644 index 5cdd246..0000000 --- a/venv/Lib/site-packages/cffi/setuptools_ext.py +++ /dev/null @@ -1,229 +0,0 @@ -import os -import sys -import sysconfig - -try: - basestring -except NameError: - # Python 3.x - basestring = str - -def error(msg): - from cffi._shimmed_dist_utils import DistutilsSetupError - raise DistutilsSetupError(msg) - - -def execfile(filename, glob): - # We use execfile() (here rewritten for Python 3) instead of - # __import__() to load the build script. The problem with - # a normal import is that in some packages, the intermediate - # __init__.py files may already try to import the file that - # we are generating. - with open(filename) as f: - src = f.read() - src += '\n' # Python 2.6 compatibility - code = compile(src, filename, 'exec') - exec(code, glob, glob) - - -def add_cffi_module(dist, mod_spec): - from cffi.api import FFI - - if not isinstance(mod_spec, basestring): - error("argument to 'cffi_modules=...' must be a str or a list of str," - " not %r" % (type(mod_spec).__name__,)) - mod_spec = str(mod_spec) - try: - build_file_name, ffi_var_name = mod_spec.split(':') - except ValueError: - error("%r must be of the form 'path/build.py:ffi_variable'" % - (mod_spec,)) - if not os.path.exists(build_file_name): - ext = '' - rewritten = build_file_name.replace('.', '/') + '.py' - if os.path.exists(rewritten): - ext = ' (rewrite cffi_modules to [%r])' % ( - rewritten + ':' + ffi_var_name,) - error("%r does not name an existing file%s" % (build_file_name, ext)) - - mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} - execfile(build_file_name, mod_vars) - - try: - ffi = mod_vars[ffi_var_name] - except KeyError: - error("%r: object %r not found in module" % (mod_spec, - ffi_var_name)) - if not isinstance(ffi, FFI): - ffi = ffi() # maybe it's a function instead of directly an ffi - if not isinstance(ffi, FFI): - error("%r is not an FFI instance (got %r)" % (mod_spec, - type(ffi).__name__)) - if not hasattr(ffi, '_assigned_source'): - error("%r: the set_source() method was not called" % (mod_spec,)) - module_name, source, source_extension, kwds = ffi._assigned_source - if ffi._windows_unicode: - kwds = kwds.copy() - ffi._apply_windows_unicode(kwds) - - if source is None: - _add_py_module(dist, ffi, module_name) - else: - _add_c_module(dist, ffi, module_name, source, source_extension, kwds) - -def _set_py_limited_api(Extension, kwds): - """ - Add py_limited_api to kwds if setuptools >= 26 is in use. - Do not alter the setting if it already exists. - Setuptools takes care of ignoring the flag on Python 2 and PyPy. - - CPython itself should ignore the flag in a debugging version - (by not listing .abi3.so in the extensions it supports), but - it doesn't so far, creating troubles. That's why we check - for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent - of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) - - On Windows, with CPython <= 3.4, it's better not to use py_limited_api - because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. - Recently (2020) we started shipping only >= 3.5 wheels, though. So - we'll give it another try and set py_limited_api on Windows >= 3.5. - """ - from cffi._shimmed_dist_utils import log - from cffi import recompiler - - if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') - and recompiler.USE_LIMITED_API): - import setuptools - try: - setuptools_major_version = int(setuptools.__version__.partition('.')[0]) - if setuptools_major_version >= 26: - kwds['py_limited_api'] = True - except ValueError: # certain development versions of setuptools - # If we don't know the version number of setuptools, we - # try to set 'py_limited_api' anyway. At worst, we get a - # warning. - kwds['py_limited_api'] = True - - if sysconfig.get_config_var("Py_GIL_DISABLED"): - if kwds.get('py_limited_api'): - log.info("Ignoring py_limited_api=True for free-threaded build.") - - kwds['py_limited_api'] = False - - if kwds.get('py_limited_api') is False: - # avoid setting Py_LIMITED_API if py_limited_api=False - # which _cffi_include.h does unless _CFFI_NO_LIMITED_API is defined - kwds.setdefault("define_macros", []).append(("_CFFI_NO_LIMITED_API", None)) - return kwds - -def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): - # We are a setuptools extension. Need this build_ext for py_limited_api. - from setuptools.command.build_ext import build_ext - from cffi._shimmed_dist_utils import Extension, log, mkpath - from cffi import recompiler - - allsources = ['$PLACEHOLDER'] - allsources.extend(kwds.pop('sources', [])) - kwds = _set_py_limited_api(Extension, kwds) - ext = Extension(name=module_name, sources=allsources, **kwds) - - def make_mod(tmpdir, pre_run=None): - c_file = os.path.join(tmpdir, module_name + source_extension) - log.info("generating cffi module %r" % c_file) - mkpath(tmpdir) - # a setuptools-only, API-only hook: called with the "ext" and "ffi" - # arguments just before we turn the ffi into C code. To use it, - # subclass the 'distutils.command.build_ext.build_ext' class and - # add a method 'def pre_run(self, ext, ffi)'. - if pre_run is not None: - pre_run(ext, ffi) - updated = recompiler.make_c_source(ffi, module_name, source, c_file) - if not updated: - log.info("already up-to-date") - return c_file - - if dist.ext_modules is None: - dist.ext_modules = [] - dist.ext_modules.append(ext) - - base_class = dist.cmdclass.get('build_ext', build_ext) - class build_ext_make_mod(base_class): - def run(self): - if ext.sources[0] == '$PLACEHOLDER': - pre_run = getattr(self, 'pre_run', None) - ext.sources[0] = make_mod(self.build_temp, pre_run) - base_class.run(self) - dist.cmdclass['build_ext'] = build_ext_make_mod - # NB. multiple runs here will create multiple 'build_ext_make_mod' - # classes. Even in this case the 'build_ext' command should be - # run once; but just in case, the logic above does nothing if - # called again. - - -def _add_py_module(dist, ffi, module_name): - from setuptools.command.build_py import build_py - from setuptools.command.build_ext import build_ext - from cffi._shimmed_dist_utils import log, mkpath - from cffi import recompiler - - def generate_mod(py_file): - log.info("generating cffi module %r" % py_file) - mkpath(os.path.dirname(py_file)) - updated = recompiler.make_py_source(ffi, module_name, py_file) - if not updated: - log.info("already up-to-date") - - base_class = dist.cmdclass.get('build_py', build_py) - class build_py_make_mod(base_class): - def run(self): - base_class.run(self) - module_path = module_name.split('.') - module_path[-1] += '.py' - generate_mod(os.path.join(self.build_lib, *module_path)) - def get_source_files(self): - # This is called from 'setup.py sdist' only. Exclude - # the generate .py module in this case. - saved_py_modules = self.py_modules - try: - if saved_py_modules: - self.py_modules = [m for m in saved_py_modules - if m != module_name] - return base_class.get_source_files(self) - finally: - self.py_modules = saved_py_modules - dist.cmdclass['build_py'] = build_py_make_mod - - # distutils and setuptools have no notion I could find of a - # generated python module. If we don't add module_name to - # dist.py_modules, then things mostly work but there are some - # combination of options (--root and --record) that will miss - # the module. So we add it here, which gives a few apparently - # harmless warnings about not finding the file outside the - # build directory. - # Then we need to hack more in get_source_files(); see above. - if dist.py_modules is None: - dist.py_modules = [] - dist.py_modules.append(module_name) - - # the following is only for "build_ext -i" - base_class_2 = dist.cmdclass.get('build_ext', build_ext) - class build_ext_make_mod(base_class_2): - def run(self): - base_class_2.run(self) - if self.inplace: - # from get_ext_fullpath() in distutils/command/build_ext.py - module_path = module_name.split('.') - package = '.'.join(module_path[:-1]) - build_py = self.get_finalized_command('build_py') - package_dir = build_py.get_package_dir(package) - file_name = module_path[-1] + '.py' - generate_mod(os.path.join(package_dir, file_name)) - dist.cmdclass['build_ext'] = build_ext_make_mod - -def cffi_modules(dist, attr, value): - assert attr == 'cffi_modules' - if isinstance(value, basestring): - value = [value] - - for cffi_module in value: - add_cffi_module(dist, cffi_module) diff --git a/venv/Lib/site-packages/cffi/vengine_cpy.py b/venv/Lib/site-packages/cffi/vengine_cpy.py deleted file mode 100644 index 02e6a47..0000000 --- a/venv/Lib/site-packages/cffi/vengine_cpy.py +++ /dev/null @@ -1,1087 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys -from . import model -from .error import VerificationError -from . import _imp_emulation as imp - - -class VCPythonEngine(object): - _class_key = 'x' - _gen_python_module = True - - def __init__(self, verifier): - self.verifier = verifier - self.ffi = verifier.ffi - self._struct_pending_verification = {} - self._types_of_builtin_functions = {} - - def patch_extension_kwds(self, kwds): - pass - - def find_module(self, module_name, path, so_suffixes): - try: - f, filename, descr = imp.find_module(module_name, path) - except ImportError: - return None - if f is not None: - f.close() - # Note that after a setuptools installation, there are both .py - # and .so files with the same basename. The code here relies on - # imp.find_module() locating the .so in priority. - if descr[0] not in so_suffixes: - return None - return filename - - def collect_types(self): - self._typesdict = {} - self._generate("collecttype") - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def _gettypenum(self, type): - # a KeyError here is a bug. please report it! :-) - return self._typesdict[type] - - def _do_collect_type(self, tp): - if ((not isinstance(tp, model.PrimitiveType) - or tp.name == 'long double') - and tp not in self._typesdict): - num = len(self._typesdict) - self._typesdict[tp] = num - - def write_source_to_f(self): - self.collect_types() - # - # The new module will have a _cffi_setup() function that receives - # objects from the ffi world, and that calls some setup code in - # the module. This setup code is split in several independent - # functions, e.g. one per constant. The functions are "chained" - # by ending in a tail call to each other. - # - # This is further split in two chained lists, depending on if we - # can do it at import-time or if we must wait for _cffi_setup() to - # provide us with the objects. This is needed because we - # need the values of the enum constants in order to build the - # that we may have to pass to _cffi_setup(). - # - # The following two 'chained_list_constants' items contains - # the head of these two chained lists, as a string that gives the - # call to do, if any. - self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] - # - prnt = self._prnt - # first paste some standard set of lines that are mostly '#define' - prnt(cffimod_header) - prnt() - # then paste the C source given by the user, verbatim. - prnt(self.verifier.preamble) - prnt() - # - # call generate_cpy_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._generate("decl") - # - # implement the function _cffi_setup_custom() as calling the - # head of the chained list. - self._generate_setup_custom() - prnt() - # - # produce the method table, including the entries for the - # generated Python->C function wrappers, which are done - # by generate_cpy_function_method(). - prnt('static PyMethodDef _cffi_methods[] = {') - self._generate("method") - prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') - prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') - prnt('};') - prnt() - # - # standard init. - modname = self.verifier.get_module_name() - constants = self._chained_list_constants[False] - prnt('#if PY_MAJOR_VERSION >= 3') - prnt() - prnt('static struct PyModuleDef _cffi_module_def = {') - prnt(' PyModuleDef_HEAD_INIT,') - prnt(' "%s",' % modname) - prnt(' NULL,') - prnt(' -1,') - prnt(' _cffi_methods,') - prnt(' NULL, NULL, NULL, NULL') - prnt('};') - prnt() - prnt('PyMODINIT_FUNC') - prnt('PyInit_%s(void)' % modname) - prnt('{') - prnt(' PyObject *lib;') - prnt(' lib = PyModule_Create(&_cffi_module_def);') - prnt(' if (lib == NULL)') - prnt(' return NULL;') - prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) - prnt(' Py_DECREF(lib);') - prnt(' return NULL;') - prnt(' }') - prnt('#if Py_GIL_DISABLED') - prnt(' PyUnstable_Module_SetGIL(lib, Py_MOD_GIL_NOT_USED);') - prnt('#endif') - prnt(' return lib;') - prnt('}') - prnt() - prnt('#else') - prnt() - prnt('PyMODINIT_FUNC') - prnt('init%s(void)' % modname) - prnt('{') - prnt(' PyObject *lib;') - prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) - prnt(' if (lib == NULL)') - prnt(' return;') - prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) - prnt(' return;') - prnt(' return;') - prnt('}') - prnt() - prnt('#endif') - - def load_library(self, flags=None): - # XXX review all usages of 'self' here! - # import it as a new extension module - imp.acquire_lock() - try: - if hasattr(sys, "getdlopenflags"): - previous_flags = sys.getdlopenflags() - try: - if hasattr(sys, "setdlopenflags") and flags is not None: - sys.setdlopenflags(flags) - module = imp.load_dynamic(self.verifier.get_module_name(), - self.verifier.modulefilename) - except ImportError as e: - error = "importing %r: %s" % (self.verifier.modulefilename, e) - raise VerificationError(error) - finally: - if hasattr(sys, "setdlopenflags"): - sys.setdlopenflags(previous_flags) - finally: - imp.release_lock() - # - # call loading_cpy_struct() to get the struct layout inferred by - # the C compiler - self._load(module, 'loading') - # - # the C code will need the objects. Collect them in - # order in a list. - revmapping = dict([(value, key) - for (key, value) in self._typesdict.items()]) - lst = [revmapping[i] for i in range(len(revmapping))] - lst = list(map(self.ffi._get_cached_btype, lst)) - # - # build the FFILibrary class and instance and call _cffi_setup(). - # this will set up some fields like '_cffi_types', and only then - # it will invoke the chained list of functions that will really - # build (notably) the constant objects, as if they are - # pointers, and store them as attributes on the 'library' object. - class FFILibrary(object): - _cffi_python_module = module - _cffi_ffi = self.ffi - _cffi_dir = [] - def __dir__(self): - return FFILibrary._cffi_dir + list(self.__dict__) - library = FFILibrary() - if module._cffi_setup(lst, VerificationError, library): - import warnings - warnings.warn("reimporting %r might overwrite older definitions" - % (self.verifier.get_module_name())) - # - # finally, call the loaded_cpy_xxx() functions. This will perform - # the final adjustments, like copying the Python->C wrapper - # functions from the module to the 'library' object, and setting - # up the FFILibrary class with properties for the global C variables. - self._load(module, 'loaded', library=library) - module._cffi_original_ffi = self.ffi - module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions - return library - - def _get_declarations(self): - lst = [(key, tp) for (key, (tp, qual)) in - self.ffi._parser._declarations.items()] - lst.sort() - return lst - - def _generate(self, step_name): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_cpy_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in verify(): %r" % name) - try: - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _load(self, module, step_name, **kwds): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) - try: - method(tp, realname, module, **kwds) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _generate_nothing(self, tp, name): - pass - - def _loaded_noop(self, tp, name, module, **kwds): - pass - - # ---------- - - def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): - extraarg = '' - if isinstance(tp, model.PrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - converter = '_cffi_to_c_int' - extraarg = ', %s' % tp.name - elif tp.is_complex_type(): - raise VerificationError( - "not implemented in verify(): complex types") - else: - converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), - tp.name.replace(' ', '_')) - errvalue = '-1' - # - elif isinstance(tp, model.PointerType): - self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, - tovar, errcode) - return - # - elif isinstance(tp, (model.StructOrUnion, model.EnumType)): - # a struct (not a struct pointer) as a function argument - self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' - % (tovar, self._gettypenum(tp), fromvar)) - self._prnt(' %s;' % errcode) - return - # - elif isinstance(tp, model.FunctionPtrType): - converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') - extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) - errvalue = 'NULL' - # - else: - raise NotImplementedError(tp) - # - self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) - self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( - tovar, tp.get_c_name(''), errvalue)) - self._prnt(' %s;' % errcode) - - def _extra_local_variables(self, tp, localvars, freelines): - if isinstance(tp, model.PointerType): - localvars.add('Py_ssize_t datasize') - localvars.add('struct _cffi_freeme_s *large_args_free = NULL') - freelines.add('if (large_args_free != NULL)' - ' _cffi_free_array_arguments(large_args_free);') - - def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): - self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') - self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( - self._gettypenum(tp), fromvar, tovar)) - self._prnt(' if (datasize != 0) {') - self._prnt(' %s = ((size_t)datasize) <= 640 ? ' - 'alloca((size_t)datasize) : NULL;' % (tovar,)) - self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' - '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) - self._prnt(' datasize, &large_args_free) < 0)') - self._prnt(' %s;' % errcode) - self._prnt(' }') - - def _convert_expr_from_c(self, tp, var, context): - if isinstance(tp, model.PrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - return '_cffi_from_c_int(%s, %s)' % (var, tp.name) - elif tp.name != 'long double': - return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) - else: - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.ArrayType): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(model.PointerType(tp.item))) - elif isinstance(tp, model.StructOrUnion): - if tp.fldnames is None: - raise TypeError("'%s' is used as %s, but is opaque" % ( - tp._get_c_name(), context)) - return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.EnumType): - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - else: - raise NotImplementedError(tp) - - # ---------- - # typedefs: generates no code so far - - _generate_cpy_typedef_collecttype = _generate_nothing - _generate_cpy_typedef_decl = _generate_nothing - _generate_cpy_typedef_method = _generate_nothing - _loading_cpy_typedef = _loaded_noop - _loaded_cpy_typedef = _loaded_noop - - # ---------- - # function declarations - - def _generate_cpy_function_collecttype(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - self._do_collect_type(tp) - else: - # don't call _do_collect_type(tp) in this common case, - # otherwise test_autofilled_struct_as_argument fails - for type in tp.args: - self._do_collect_type(type) - self._do_collect_type(tp.result) - - def _generate_cpy_function_decl(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no CPython wrapper) - self._generate_cpy_const(False, name, tp) - return - prnt = self._prnt - numargs = len(tp.args) - if numargs == 0: - argname = 'noarg' - elif numargs == 1: - argname = 'arg0' - else: - argname = 'args' - prnt('static PyObject *') - prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) - prnt('{') - # - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - prnt(' %s;' % type.get_c_name(' x%d' % i, context)) - # - localvars = set() - freelines = set() - for type in tp.args: - self._extra_local_variables(type, localvars, freelines) - for decl in sorted(localvars): - prnt(' %s;' % (decl,)) - # - if not isinstance(tp.result, model.VoidType): - result_code = 'result = ' - context = 'result of %s' % name - prnt(' %s;' % tp.result.get_c_name(' result', context)) - prnt(' PyObject *pyresult;') - else: - result_code = '' - # - if len(tp.args) > 1: - rng = range(len(tp.args)) - for i in rng: - prnt(' PyObject *arg%d;' % i) - prnt() - prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( - 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) - prnt(' return NULL;') - prnt() - # - for i, type in enumerate(tp.args): - self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, - 'return NULL') - prnt() - # - prnt(' Py_BEGIN_ALLOW_THREADS') - prnt(' _cffi_restore_errno();') - prnt(' { %s%s(%s); }' % ( - result_code, name, - ', '.join(['x%d' % i for i in range(len(tp.args))]))) - prnt(' _cffi_save_errno();') - prnt(' Py_END_ALLOW_THREADS') - prnt() - # - prnt(' (void)self; /* unused */') - if numargs == 0: - prnt(' (void)noarg; /* unused */') - if result_code: - prnt(' pyresult = %s;' % - self._convert_expr_from_c(tp.result, 'result', 'result type')) - for freeline in freelines: - prnt(' ' + freeline) - prnt(' return pyresult;') - else: - for freeline in freelines: - prnt(' ' + freeline) - prnt(' Py_INCREF(Py_None);') - prnt(' return Py_None;') - prnt('}') - prnt() - - def _generate_cpy_function_method(self, tp, name): - if tp.ellipsis: - return - numargs = len(tp.args) - if numargs == 0: - meth = 'METH_NOARGS' - elif numargs == 1: - meth = 'METH_O' - else: - meth = 'METH_VARARGS' - self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) - - _loading_cpy_function = _loaded_noop - - def _loaded_cpy_function(self, tp, name, module, library): - if tp.ellipsis: - return - func = getattr(module, name) - setattr(library, name, func) - self._types_of_builtin_functions[func] = tp - - # ---------- - # named structs - - _generate_cpy_struct_collecttype = _generate_nothing - def _generate_cpy_struct_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'struct', name) - def _generate_cpy_struct_method(self, tp, name): - self._generate_struct_or_union_method(tp, 'struct', name) - def _loading_cpy_struct(self, tp, name, module): - self._loading_struct_or_union(tp, 'struct', name, module) - def _loaded_cpy_struct(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - _generate_cpy_union_collecttype = _generate_nothing - def _generate_cpy_union_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'union', name) - def _generate_cpy_union_method(self, tp, name): - self._generate_struct_or_union_method(tp, 'union', name) - def _loading_cpy_union(self, tp, name, module): - self._loading_struct_or_union(tp, 'union', name, module) - def _loaded_cpy_union(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_struct_or_union_decl(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - checkfuncname = '_cffi_check_%s_%s' % (prefix, name) - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - cname = ('%s %s' % (prefix, name)).strip() - # - prnt = self._prnt - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if (isinstance(ftype, model.PrimitiveType) - and ftype.is_integer_type()) or fbitsize >= 0: - # accept all integers, but complain on float or double - prnt(' (void)((p->%s) << 1);' % fname) - else: - # only accept exactly the type declared. - try: - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - prnt('static PyObject *') - prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) - prnt('{') - prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) - prnt(' static Py_ssize_t nums[] = {') - prnt(' sizeof(%s),' % cname) - prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - prnt(' offsetof(%s, %s),' % (cname, fname)) - if isinstance(ftype, model.ArrayType) and ftype.length is None: - prnt(' 0, /* %s */' % ftype._get_c_name()) - else: - prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) - prnt(' -1') - prnt(' };') - prnt(' (void)self; /* unused */') - prnt(' (void)noarg; /* unused */') - prnt(' return _cffi_get_struct_layout(nums);') - prnt(' /* the next line is not executed, but compiled */') - prnt(' %s(0);' % (checkfuncname,)) - prnt('}') - prnt() - - def _generate_struct_or_union_method(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, - layoutfuncname)) - - def _loading_struct_or_union(self, tp, prefix, name, module): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - # - function = getattr(module, layoutfuncname) - layout = function() - if isinstance(tp, model.StructOrUnion) and tp.partial: - # use the function()'s sizes and offsets to guide the - # layout of the struct - totalsize = layout[0] - totalalignment = layout[1] - fieldofs = layout[2::2] - fieldsize = layout[3::2] - tp.force_flatten() - assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) - tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment - else: - cname = ('%s %s' % (prefix, name)).strip() - self._struct_pending_verification[tp] = layout, cname - - def _loaded_struct_or_union(self, tp): - if tp.fldnames is None: - return # nothing to do with opaque structs - self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered - - if tp in self._struct_pending_verification: - # check that the layout sizes and offsets match the real ones - def check(realvalue, expectedvalue, msg): - if realvalue != expectedvalue: - raise VerificationError( - "%s (we have %d, but C compiler says %d)" - % (msg, expectedvalue, realvalue)) - ffi = self.ffi - BStruct = ffi._get_cached_btype(tp) - layout, cname = self._struct_pending_verification.pop(tp) - check(layout[0], ffi.sizeof(BStruct), "wrong total size") - check(layout[1], ffi.alignof(BStruct), "wrong total alignment") - i = 2 - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - check(layout[i], ffi.offsetof(BStruct, fname), - "wrong offset for field %r" % (fname,)) - if layout[i+1] != 0: - BField = ffi._get_cached_btype(ftype) - check(layout[i+1], ffi.sizeof(BField), - "wrong size for field %r" % (fname,)) - i += 2 - assert i == len(layout) - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - _generate_cpy_anonymous_collecttype = _generate_nothing - - def _generate_cpy_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_decl(tp, name, '') - else: - self._generate_struct_or_union_decl(tp, '', name) - - def _generate_cpy_anonymous_method(self, tp, name): - if not isinstance(tp, model.EnumType): - self._generate_struct_or_union_method(tp, '', name) - - def _loading_cpy_anonymous(self, tp, name, module): - if isinstance(tp, model.EnumType): - self._loading_cpy_enum(tp, name, module) - else: - self._loading_struct_or_union(tp, '', name, module) - - def _loaded_cpy_anonymous(self, tp, name, module, **kwds): - if isinstance(tp, model.EnumType): - self._loaded_cpy_enum(tp, name, module, **kwds) - else: - self._loaded_struct_or_union(tp) - - # ---------- - # constants, likely declared with '#define' - - def _generate_cpy_const(self, is_int, name, tp=None, category='const', - vartp=None, delayed=True, size_too=False, - check_value=None): - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - prnt('static int %s(PyObject *lib)' % funcname) - prnt('{') - prnt(' PyObject *o;') - prnt(' int res;') - if not is_int: - prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) - else: - assert category == 'const' - # - if check_value is not None: - self._check_int_constant_value(name, check_value) - # - if not is_int: - if category == 'var': - realexpr = '&' + name - else: - realexpr = name - prnt(' i = (%s);' % (realexpr,)) - prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', - 'variable type'),)) - assert delayed - else: - prnt(' o = _cffi_from_c_int_const(%s);' % name) - prnt(' if (o == NULL)') - prnt(' return -1;') - if size_too: - prnt(' {') - prnt(' PyObject *o1 = o;') - prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' - % (name,)) - prnt(' Py_DECREF(o1);') - prnt(' if (o == NULL)') - prnt(' return -1;') - prnt(' }') - prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) - prnt(' Py_DECREF(o);') - prnt(' if (res < 0)') - prnt(' return -1;') - prnt(' return %s;' % self._chained_list_constants[delayed]) - self._chained_list_constants[delayed] = funcname + '(lib)' - prnt('}') - prnt() - - def _generate_cpy_constant_collecttype(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - if not is_int: - self._do_collect_type(tp) - - def _generate_cpy_constant_decl(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - self._generate_cpy_const(is_int, name, tp) - - _generate_cpy_constant_method = _generate_nothing - _loading_cpy_constant = _loaded_noop - _loaded_cpy_constant = _loaded_noop - - # ---------- - # enums - - def _check_int_constant_value(self, name, value, err_prefix=''): - prnt = self._prnt - if value <= 0: - prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( - name, name, value)) - else: - prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( - name, name, value)) - prnt(' char buf[64];') - prnt(' if ((%s) <= 0)' % name) - prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) - prnt(' else') - prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % - name) - prnt(' PyErr_Format(_cffi_VerificationError,') - prnt(' "%s%s has the real value %s, not %s",') - prnt(' "%s", "%s", buf, "%d");' % ( - err_prefix, name, value)) - prnt(' return -1;') - prnt(' }') - - def _enum_funcname(self, prefix, name): - # "$enum_$1" => "___D_enum____D_1" - name = name.replace('$', '___D_') - return '_cffi_e_%s_%s' % (prefix, name) - - def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): - if tp.partial: - for enumerator in tp.enumerators: - self._generate_cpy_const(True, enumerator, delayed=False) - return - # - funcname = self._enum_funcname(prefix, name) - prnt = self._prnt - prnt('static int %s(PyObject *lib)' % funcname) - prnt('{') - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._check_int_constant_value(enumerator, enumvalue, - "enum %s: " % name) - prnt(' return %s;' % self._chained_list_constants[True]) - self._chained_list_constants[True] = funcname + '(lib)' - prnt('}') - prnt() - - _generate_cpy_enum_collecttype = _generate_nothing - _generate_cpy_enum_method = _generate_nothing - - def _loading_cpy_enum(self, tp, name, module): - if tp.partial: - enumvalues = [getattr(module, enumerator) - for enumerator in tp.enumerators] - tp.enumvalues = tuple(enumvalues) - tp.partial_resolved = True - - def _loaded_cpy_enum(self, tp, name, module, library): - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - setattr(library, enumerator, enumvalue) - - # ---------- - # macros: for now only for integers - - def _generate_cpy_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_cpy_const(True, name, check_value=check_value) - - _generate_cpy_macro_collecttype = _generate_nothing - _generate_cpy_macro_method = _generate_nothing - _loading_cpy_macro = _loaded_noop - _loaded_cpy_macro = _loaded_noop - - # ---------- - # global variables - - def _generate_cpy_variable_collecttype(self, tp, name): - if isinstance(tp, model.ArrayType): - tp_ptr = model.PointerType(tp.item) - else: - tp_ptr = model.PointerType(tp) - self._do_collect_type(tp_ptr) - - def _generate_cpy_variable_decl(self, tp, name): - if isinstance(tp, model.ArrayType): - tp_ptr = model.PointerType(tp.item) - self._generate_cpy_const(False, name, tp, vartp=tp_ptr, - size_too = tp.length_is_unknown()) - else: - tp_ptr = model.PointerType(tp) - self._generate_cpy_const(False, name, tp_ptr, category='var') - - _generate_cpy_variable_method = _generate_nothing - _loading_cpy_variable = _loaded_noop - - def _loaded_cpy_variable(self, tp, name, module, library): - value = getattr(library, name) - if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the - # sense that "a=..." is forbidden - if tp.length_is_unknown(): - assert isinstance(value, tuple) - (value, size) = value - BItemType = self.ffi._get_cached_btype(tp.item) - length, rest = divmod(size, self.ffi.sizeof(BItemType)) - if rest != 0: - raise VerificationError( - "bad size: %r does not seem to be an array of %s" % - (name, tp.item)) - tp = tp.resolve_length(length) - # 'value' is a which we have to replace with - # a if the N is actually known - if tp.length is not None: - BArray = self.ffi._get_cached_btype(tp) - value = self.ffi.cast(BArray, value) - setattr(library, name, value) - return - # remove ptr= from the library instance, and replace - # it by a property on the class, which reads/writes into ptr[0]. - ptr = value - delattr(library, name) - def getter(library): - return ptr[0] - def setter(library, value): - ptr[0] = value - setattr(type(library), name, property(getter, setter)) - type(library)._cffi_dir.append(name) - - # ---------- - - def _generate_setup_custom(self): - prnt = self._prnt - prnt('static int _cffi_setup_custom(PyObject *lib)') - prnt('{') - prnt(' return %s;' % self._chained_list_constants[True]) - prnt('}') - -cffimod_header = r''' -#include -#include - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif - -#if PY_MAJOR_VERSION < 3 -# undef PyCapsule_CheckExact -# undef PyCapsule_GetPointer -# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) -# define PyCapsule_GetPointer(capsule, name) \ - (PyCObject_AsVoidPtr(capsule)) -#endif - -#if PY_MAJOR_VERSION >= 3 -# define PyInt_FromLong PyLong_FromLong -#endif - -#define _cffi_from_c_double PyFloat_FromDouble -#define _cffi_from_c_float PyFloat_FromDouble -#define _cffi_from_c_long PyInt_FromLong -#define _cffi_from_c_ulong PyLong_FromUnsignedLong -#define _cffi_from_c_longlong PyLong_FromLongLong -#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong -#define _cffi_from_c__Bool PyBool_FromLong - -#define _cffi_to_c_double PyFloat_AsDouble -#define _cffi_to_c_float PyFloat_AsDouble - -#define _cffi_from_c_int_const(x) \ - (((x) > 0) ? \ - ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ - PyInt_FromLong((long)(x)) : \ - PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ - ((long long)(x) >= (long long)LONG_MIN) ? \ - PyInt_FromLong((long)(x)) : \ - PyLong_FromLongLong((long long)(x))) - -#define _cffi_from_c_int(x, type) \ - (((type)-1) > 0 ? /* unsigned */ \ - (sizeof(type) < sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - sizeof(type) == sizeof(long) ? \ - PyLong_FromUnsignedLong((unsigned long)x) : \ - PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ - (sizeof(type) <= sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - PyLong_FromLongLong((long long)x))) - -#define _cffi_to_c_int(o, type) \ - ((type)( \ - sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ - : (type)_cffi_to_c_i8(o)) : \ - sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ - : (type)_cffi_to_c_i16(o)) : \ - sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ - : (type)_cffi_to_c_i32(o)) : \ - sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ - : (type)_cffi_to_c_i64(o)) : \ - (Py_FatalError("unsupported size for type " #type), (type)0))) - -#define _cffi_to_c_i8 \ - ((int(*)(PyObject *))_cffi_exports[1]) -#define _cffi_to_c_u8 \ - ((int(*)(PyObject *))_cffi_exports[2]) -#define _cffi_to_c_i16 \ - ((int(*)(PyObject *))_cffi_exports[3]) -#define _cffi_to_c_u16 \ - ((int(*)(PyObject *))_cffi_exports[4]) -#define _cffi_to_c_i32 \ - ((int(*)(PyObject *))_cffi_exports[5]) -#define _cffi_to_c_u32 \ - ((unsigned int(*)(PyObject *))_cffi_exports[6]) -#define _cffi_to_c_i64 \ - ((long long(*)(PyObject *))_cffi_exports[7]) -#define _cffi_to_c_u64 \ - ((unsigned long long(*)(PyObject *))_cffi_exports[8]) -#define _cffi_to_c_char \ - ((int(*)(PyObject *))_cffi_exports[9]) -#define _cffi_from_c_pointer \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) -#define _cffi_to_c_pointer \ - ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) -#define _cffi_get_struct_layout \ - ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) -#define _cffi_restore_errno \ - ((void(*)(void))_cffi_exports[13]) -#define _cffi_save_errno \ - ((void(*)(void))_cffi_exports[14]) -#define _cffi_from_c_char \ - ((PyObject *(*)(char))_cffi_exports[15]) -#define _cffi_from_c_deref \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) -#define _cffi_to_c \ - ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) -#define _cffi_from_c_struct \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) -#define _cffi_to_c_wchar_t \ - ((wchar_t(*)(PyObject *))_cffi_exports[19]) -#define _cffi_from_c_wchar_t \ - ((PyObject *(*)(wchar_t))_cffi_exports[20]) -#define _cffi_to_c_long_double \ - ((long double(*)(PyObject *))_cffi_exports[21]) -#define _cffi_to_c__Bool \ - ((_Bool(*)(PyObject *))_cffi_exports[22]) -#define _cffi_prepare_pointer_call_argument \ - ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) -#define _cffi_convert_array_from_object \ - ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) -#define _CFFI_NUM_EXPORTS 25 - -typedef struct _ctypedescr CTypeDescrObject; - -static void *_cffi_exports[_CFFI_NUM_EXPORTS]; -static PyObject *_cffi_types, *_cffi_VerificationError; - -static int _cffi_setup_custom(PyObject *lib); /* forward */ - -static PyObject *_cffi_setup(PyObject *self, PyObject *args) -{ - PyObject *library; - int was_alive = (_cffi_types != NULL); - (void)self; /* unused */ - if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, - &library)) - return NULL; - Py_INCREF(_cffi_types); - Py_INCREF(_cffi_VerificationError); - if (_cffi_setup_custom(library) < 0) - return NULL; - return PyBool_FromLong(was_alive); -} - -union _cffi_union_alignment_u { - unsigned char m_char; - unsigned short m_short; - unsigned int m_int; - unsigned long m_long; - unsigned long long m_longlong; - float m_float; - double m_double; - long double m_longdouble; -}; - -struct _cffi_freeme_s { - struct _cffi_freeme_s *next; - union _cffi_union_alignment_u alignment; -}; - -#ifdef __GNUC__ - __attribute__((unused)) -#endif -static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, - char **output_data, Py_ssize_t datasize, - struct _cffi_freeme_s **freeme) -{ - char *p; - if (datasize < 0) - return -1; - - p = *output_data; - if (p == NULL) { - struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( - offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); - if (fp == NULL) - return -1; - fp->next = *freeme; - *freeme = fp; - p = *output_data = (char *)&fp->alignment; - } - memset((void *)p, 0, (size_t)datasize); - return _cffi_convert_array_from_object(p, ctptr, arg); -} - -#ifdef __GNUC__ - __attribute__((unused)) -#endif -static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) -{ - do { - void *p = (void *)freeme; - freeme = freeme->next; - PyObject_Free(p); - } while (freeme != NULL); -} - -static int _cffi_init(void) -{ - PyObject *module, *c_api_object = NULL; - - module = PyImport_ImportModule("_cffi_backend"); - if (module == NULL) - goto failure; - - c_api_object = PyObject_GetAttrString(module, "_C_API"); - if (c_api_object == NULL) - goto failure; - if (!PyCapsule_CheckExact(c_api_object)) { - PyErr_SetNone(PyExc_ImportError); - goto failure; - } - memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), - _CFFI_NUM_EXPORTS * sizeof(void *)); - - Py_DECREF(module); - Py_DECREF(c_api_object); - return 0; - - failure: - Py_XDECREF(module); - Py_XDECREF(c_api_object); - return -1; -} - -#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) - -/**********/ -''' diff --git a/venv/Lib/site-packages/cffi/vengine_gen.py b/venv/Lib/site-packages/cffi/vengine_gen.py deleted file mode 100644 index bffc821..0000000 --- a/venv/Lib/site-packages/cffi/vengine_gen.py +++ /dev/null @@ -1,679 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys, os -import types - -from . import model -from .error import VerificationError - - -class VGenericEngine(object): - _class_key = 'g' - _gen_python_module = False - - def __init__(self, verifier): - self.verifier = verifier - self.ffi = verifier.ffi - self.export_symbols = [] - self._struct_pending_verification = {} - - def patch_extension_kwds(self, kwds): - # add 'export_symbols' to the dictionary. Note that we add the - # list before filling it. When we fill it, it will thus also show - # up in kwds['export_symbols']. - kwds.setdefault('export_symbols', self.export_symbols) - - def find_module(self, module_name, path, so_suffixes): - for so_suffix in so_suffixes: - basename = module_name + so_suffix - if path is None: - path = sys.path - for dirname in path: - filename = os.path.join(dirname, basename) - if os.path.isfile(filename): - return filename - - def collect_types(self): - pass # not needed in the generic engine - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def write_source_to_f(self): - prnt = self._prnt - # first paste some standard set of lines that are mostly '#include' - prnt(cffimod_header) - # then paste the C source given by the user, verbatim. - prnt(self.verifier.preamble) - # - # call generate_gen_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._generate('decl') - # - # on Windows, distutils insists on putting init_cffi_xyz in - # 'export_symbols', so instead of fighting it, just give up and - # give it one - if sys.platform == 'win32': - if sys.version_info >= (3,): - prefix = 'PyInit_' - else: - prefix = 'init' - modname = self.verifier.get_module_name() - prnt("void %s%s(void) { }\n" % (prefix, modname)) - - def load_library(self, flags=0): - # import it with the CFFI backend - backend = self.ffi._backend - # needs to make a path that contains '/', on Posix - filename = os.path.join(os.curdir, self.verifier.modulefilename) - module = backend.load_library(filename, flags) - # - # call loading_gen_struct() to get the struct layout inferred by - # the C compiler - self._load(module, 'loading') - - # build the FFILibrary class and instance, this is a module subclass - # because modules are expected to have usually-constant-attributes and - # in PyPy this means the JIT is able to treat attributes as constant, - # which we want. - class FFILibrary(types.ModuleType): - _cffi_generic_module = module - _cffi_ffi = self.ffi - _cffi_dir = [] - def __dir__(self): - return FFILibrary._cffi_dir - library = FFILibrary("") - # - # finally, call the loaded_gen_xxx() functions. This will set - # up the 'library' object. - self._load(module, 'loaded', library=library) - return library - - def _get_declarations(self): - lst = [(key, tp) for (key, (tp, qual)) in - self.ffi._parser._declarations.items()] - lst.sort() - return lst - - def _generate(self, step_name): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_gen_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in verify(): %r" % name) - try: - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _load(self, module, step_name, **kwds): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - method = getattr(self, '_%s_gen_%s' % (step_name, kind)) - try: - method(tp, realname, module, **kwds) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _generate_nothing(self, tp, name): - pass - - def _loaded_noop(self, tp, name, module, **kwds): - pass - - # ---------- - # typedefs: generates no code so far - - _generate_gen_typedef_decl = _generate_nothing - _loading_gen_typedef = _loaded_noop - _loaded_gen_typedef = _loaded_noop - - # ---------- - # function declarations - - def _generate_gen_function_decl(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no _cffi_f_%s wrapper) - self._generate_gen_const(False, name, tp) - return - prnt = self._prnt - numargs = len(tp.args) - argnames = [] - for i, type in enumerate(tp.args): - indirection = '' - if isinstance(type, model.StructOrUnion): - indirection = '*' - argnames.append('%sx%d' % (indirection, i)) - context = 'argument of %s' % name - arglist = [type.get_c_name(' %s' % arg, context) - for type, arg in zip(tp.args, argnames)] - tpresult = tp.result - if isinstance(tpresult, model.StructOrUnion): - arglist.insert(0, tpresult.get_c_name(' *r', context)) - tpresult = model.void_type - arglist = ', '.join(arglist) or 'void' - wrappername = '_cffi_f_%s' % name - self.export_symbols.append(wrappername) - if tp.abi: - abi = tp.abi + ' ' - else: - abi = '' - funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) - context = 'result of %s' % name - prnt(tpresult.get_c_name(funcdecl, context)) - prnt('{') - # - if isinstance(tp.result, model.StructOrUnion): - result_code = '*r = ' - elif not isinstance(tp.result, model.VoidType): - result_code = 'return ' - else: - result_code = '' - prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) - prnt('}') - prnt() - - _loading_gen_function = _loaded_noop - - def _loaded_gen_function(self, tp, name, module, library): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - newfunction = self._load_constant(False, tp, name, module) - else: - indirections = [] - base_tp = tp - if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) - or isinstance(tp.result, model.StructOrUnion)): - indirect_args = [] - for i, typ in enumerate(tp.args): - if isinstance(typ, model.StructOrUnion): - typ = model.PointerType(typ) - indirections.append((i, typ)) - indirect_args.append(typ) - indirect_result = tp.result - if isinstance(indirect_result, model.StructOrUnion): - if indirect_result.fldtypes is None: - raise TypeError("'%s' is used as result type, " - "but is opaque" % ( - indirect_result._get_c_name(),)) - indirect_result = model.PointerType(indirect_result) - indirect_args.insert(0, indirect_result) - indirections.insert(0, ("result", indirect_result)) - indirect_result = model.void_type - tp = model.FunctionPtrType(tuple(indirect_args), - indirect_result, tp.ellipsis) - BFunc = self.ffi._get_cached_btype(tp) - wrappername = '_cffi_f_%s' % name - newfunction = module.load_function(BFunc, wrappername) - for i, typ in indirections: - newfunction = self._make_struct_wrapper(newfunction, i, typ, - base_tp) - setattr(library, name, newfunction) - type(library)._cffi_dir.append(name) - - def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): - backend = self.ffi._backend - BType = self.ffi._get_cached_btype(tp) - if i == "result": - ffi = self.ffi - def newfunc(*args): - res = ffi.new(BType) - oldfunc(res, *args) - return res[0] - else: - def newfunc(*args): - args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] - return oldfunc(*args) - newfunc._cffi_base_type = base_tp - return newfunc - - # ---------- - # named structs - - def _generate_gen_struct_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'struct', name) - - def _loading_gen_struct(self, tp, name, module): - self._loading_struct_or_union(tp, 'struct', name, module) - - def _loaded_gen_struct(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_gen_union_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'union', name) - - def _loading_gen_union(self, tp, name, module): - self._loading_struct_or_union(tp, 'union', name, module) - - def _loaded_gen_union(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_struct_or_union_decl(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - checkfuncname = '_cffi_check_%s_%s' % (prefix, name) - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - cname = ('%s %s' % (prefix, name)).strip() - # - prnt = self._prnt - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if (isinstance(ftype, model.PrimitiveType) - and ftype.is_integer_type()) or fbitsize >= 0: - # accept all integers, but complain on float or double - prnt(' (void)((p->%s) << 1);' % fname) - else: - # only accept exactly the type declared. - try: - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - self.export_symbols.append(layoutfuncname) - prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) - prnt('{') - prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) - prnt(' static intptr_t nums[] = {') - prnt(' sizeof(%s),' % cname) - prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - prnt(' offsetof(%s, %s),' % (cname, fname)) - if isinstance(ftype, model.ArrayType) and ftype.length is None: - prnt(' 0, /* %s */' % ftype._get_c_name()) - else: - prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) - prnt(' -1') - prnt(' };') - prnt(' return nums[i];') - prnt(' /* the next line is not executed, but compiled */') - prnt(' %s(0);' % (checkfuncname,)) - prnt('}') - prnt() - - def _loading_struct_or_union(self, tp, prefix, name, module): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - # - BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] - function = module.load_function(BFunc, layoutfuncname) - layout = [] - num = 0 - while True: - x = function(num) - if x < 0: break - layout.append(x) - num += 1 - if isinstance(tp, model.StructOrUnion) and tp.partial: - # use the function()'s sizes and offsets to guide the - # layout of the struct - totalsize = layout[0] - totalalignment = layout[1] - fieldofs = layout[2::2] - fieldsize = layout[3::2] - tp.force_flatten() - assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) - tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment - else: - cname = ('%s %s' % (prefix, name)).strip() - self._struct_pending_verification[tp] = layout, cname - - def _loaded_struct_or_union(self, tp): - if tp.fldnames is None: - return # nothing to do with opaque structs - self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered - - if tp in self._struct_pending_verification: - # check that the layout sizes and offsets match the real ones - def check(realvalue, expectedvalue, msg): - if realvalue != expectedvalue: - raise VerificationError( - "%s (we have %d, but C compiler says %d)" - % (msg, expectedvalue, realvalue)) - ffi = self.ffi - BStruct = ffi._get_cached_btype(tp) - layout, cname = self._struct_pending_verification.pop(tp) - check(layout[0], ffi.sizeof(BStruct), "wrong total size") - check(layout[1], ffi.alignof(BStruct), "wrong total alignment") - i = 2 - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - check(layout[i], ffi.offsetof(BStruct, fname), - "wrong offset for field %r" % (fname,)) - if layout[i+1] != 0: - BField = ffi._get_cached_btype(ftype) - check(layout[i+1], ffi.sizeof(BField), - "wrong size for field %r" % (fname,)) - i += 2 - assert i == len(layout) - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - def _generate_gen_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_gen_enum_decl(tp, name, '') - else: - self._generate_struct_or_union_decl(tp, '', name) - - def _loading_gen_anonymous(self, tp, name, module): - if isinstance(tp, model.EnumType): - self._loading_gen_enum(tp, name, module, '') - else: - self._loading_struct_or_union(tp, '', name, module) - - def _loaded_gen_anonymous(self, tp, name, module, **kwds): - if isinstance(tp, model.EnumType): - self._loaded_gen_enum(tp, name, module, **kwds) - else: - self._loaded_struct_or_union(tp) - - # ---------- - # constants, likely declared with '#define' - - def _generate_gen_const(self, is_int, name, tp=None, category='const', - check_value=None): - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - self.export_symbols.append(funcname) - if check_value is not None: - assert is_int - assert category == 'const' - prnt('int %s(char *out_error)' % funcname) - prnt('{') - self._check_int_constant_value(name, check_value) - prnt(' return 0;') - prnt('}') - elif is_int: - assert category == 'const' - prnt('int %s(long long *out_value)' % funcname) - prnt('{') - prnt(' *out_value = (long long)(%s);' % (name,)) - prnt(' return (%s) <= 0;' % (name,)) - prnt('}') - else: - assert tp is not None - assert check_value is None - if category == 'var': - ampersand = '&' - else: - ampersand = '' - extra = '' - if category == 'const' and isinstance(tp, model.StructOrUnion): - extra = 'const *' - ampersand = '&' - prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) - prnt('{') - prnt(' return (%s%s);' % (ampersand, name)) - prnt('}') - prnt() - - def _generate_gen_constant_decl(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - self._generate_gen_const(is_int, name, tp) - - _loading_gen_constant = _loaded_noop - - def _load_constant(self, is_int, tp, name, module, check_value=None): - funcname = '_cffi_const_%s' % name - if check_value is not None: - assert is_int - self._load_known_int_constant(module, funcname) - value = check_value - elif is_int: - BType = self.ffi._typeof_locked("long long*")[0] - BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] - function = module.load_function(BFunc, funcname) - p = self.ffi.new(BType) - negative = function(p) - value = int(p[0]) - if value < 0 and not negative: - BLongLong = self.ffi._typeof_locked("long long")[0] - value += (1 << (8*self.ffi.sizeof(BLongLong))) - else: - assert check_value is None - fntypeextra = '(*)(void)' - if isinstance(tp, model.StructOrUnion): - fntypeextra = '*' + fntypeextra - BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] - function = module.load_function(BFunc, funcname) - value = function() - if isinstance(tp, model.StructOrUnion): - value = value[0] - return value - - def _loaded_gen_constant(self, tp, name, module, library): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - value = self._load_constant(is_int, tp, name, module) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - - # ---------- - # enums - - def _check_int_constant_value(self, name, value): - prnt = self._prnt - if value <= 0: - prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( - name, name, value)) - else: - prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( - name, name, value)) - prnt(' char buf[64];') - prnt(' if ((%s) <= 0)' % name) - prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) - prnt(' else') - prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % - name) - prnt(' sprintf(out_error, "%s has the real value %s, not %s",') - prnt(' "%s", buf, "%d");' % (name[:100], value)) - prnt(' return -1;') - prnt(' }') - - def _load_known_int_constant(self, module, funcname): - BType = self.ffi._typeof_locked("char[]")[0] - BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] - function = module.load_function(BFunc, funcname) - p = self.ffi.new(BType, 256) - if function(p) < 0: - error = self.ffi.string(p) - if sys.version_info >= (3,): - error = str(error, 'utf-8') - raise VerificationError(error) - - def _enum_funcname(self, prefix, name): - # "$enum_$1" => "___D_enum____D_1" - name = name.replace('$', '___D_') - return '_cffi_e_%s_%s' % (prefix, name) - - def _generate_gen_enum_decl(self, tp, name, prefix='enum'): - if tp.partial: - for enumerator in tp.enumerators: - self._generate_gen_const(True, enumerator) - return - # - funcname = self._enum_funcname(prefix, name) - self.export_symbols.append(funcname) - prnt = self._prnt - prnt('int %s(char *out_error)' % funcname) - prnt('{') - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._check_int_constant_value(enumerator, enumvalue) - prnt(' return 0;') - prnt('}') - prnt() - - def _loading_gen_enum(self, tp, name, module, prefix='enum'): - if tp.partial: - enumvalues = [self._load_constant(True, tp, enumerator, module) - for enumerator in tp.enumerators] - tp.enumvalues = tuple(enumvalues) - tp.partial_resolved = True - else: - funcname = self._enum_funcname(prefix, name) - self._load_known_int_constant(module, funcname) - - def _loaded_gen_enum(self, tp, name, module, library): - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - setattr(library, enumerator, enumvalue) - type(library)._cffi_dir.append(enumerator) - - # ---------- - # macros: for now only for integers - - def _generate_gen_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_gen_const(True, name, check_value=check_value) - - _loading_gen_macro = _loaded_noop - - def _loaded_gen_macro(self, tp, name, module, library): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - value = self._load_constant(True, tp, name, module, - check_value=check_value) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - - # ---------- - # global variables - - def _generate_gen_variable_decl(self, tp, name): - if isinstance(tp, model.ArrayType): - if tp.length_is_unknown(): - prnt = self._prnt - funcname = '_cffi_sizeof_%s' % (name,) - self.export_symbols.append(funcname) - prnt("size_t %s(void)" % funcname) - prnt("{") - prnt(" return sizeof(%s);" % (name,)) - prnt("}") - tp_ptr = model.PointerType(tp.item) - self._generate_gen_const(False, name, tp_ptr) - else: - tp_ptr = model.PointerType(tp) - self._generate_gen_const(False, name, tp_ptr, category='var') - - _loading_gen_variable = _loaded_noop - - def _loaded_gen_variable(self, tp, name, module, library): - if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the - # sense that "a=..." is forbidden - if tp.length_is_unknown(): - funcname = '_cffi_sizeof_%s' % (name,) - BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] - function = module.load_function(BFunc, funcname) - size = function() - BItemType = self.ffi._get_cached_btype(tp.item) - length, rest = divmod(size, self.ffi.sizeof(BItemType)) - if rest != 0: - raise VerificationError( - "bad size: %r does not seem to be an array of %s" % - (name, tp.item)) - tp = tp.resolve_length(length) - tp_ptr = model.PointerType(tp.item) - value = self._load_constant(False, tp_ptr, name, module) - # 'value' is a which we have to replace with - # a if the N is actually known - if tp.length is not None: - BArray = self.ffi._get_cached_btype(tp) - value = self.ffi.cast(BArray, value) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - return - # remove ptr= from the library instance, and replace - # it by a property on the class, which reads/writes into ptr[0]. - funcname = '_cffi_var_%s' % name - BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] - function = module.load_function(BFunc, funcname) - ptr = function() - def getter(library): - return ptr[0] - def setter(library, value): - ptr[0] = value - setattr(type(library), name, property(getter, setter)) - type(library)._cffi_dir.append(name) - -cffimod_header = r''' -#include -#include -#include -#include -#include /* XXX for ssize_t on some platforms */ - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif -''' diff --git a/venv/Lib/site-packages/cffi/verifier.py b/venv/Lib/site-packages/cffi/verifier.py deleted file mode 100644 index e392a2b..0000000 --- a/venv/Lib/site-packages/cffi/verifier.py +++ /dev/null @@ -1,306 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys, os, binascii, shutil, io -from . import __version_verifier_modules__ -from . import ffiplatform -from .error import VerificationError - -if sys.version_info >= (3, 3): - import importlib.machinery - def _extension_suffixes(): - return importlib.machinery.EXTENSION_SUFFIXES[:] -else: - import imp - def _extension_suffixes(): - return [suffix for suffix, _, type in imp.get_suffixes() - if type == imp.C_EXTENSION] - - -if sys.version_info >= (3,): - NativeIO = io.StringIO -else: - class NativeIO(io.BytesIO): - def write(self, s): - if isinstance(s, unicode): - s = s.encode('ascii') - super(NativeIO, self).write(s) - - -class Verifier(object): - - def __init__(self, ffi, preamble, tmpdir=None, modulename=None, - ext_package=None, tag='', force_generic_engine=False, - source_extension='.c', flags=None, relative_to=None, **kwds): - if ffi._parser._uses_new_feature: - raise VerificationError( - "feature not supported with ffi.verify(), but only " - "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) - self.ffi = ffi - self.preamble = preamble - if not modulename: - flattened_kwds = ffiplatform.flatten(kwds) - vengine_class = _locate_engine_class(ffi, force_generic_engine) - self._vengine = vengine_class(self) - self._vengine.patch_extension_kwds(kwds) - self.flags = flags - self.kwds = self.make_relative_to(kwds, relative_to) - # - if modulename: - if tag: - raise TypeError("can't specify both 'modulename' and 'tag'") - else: - key = '\x00'.join(['%d.%d' % sys.version_info[:2], - __version_verifier_modules__, - preamble, flattened_kwds] + - ffi._cdefsources) - if sys.version_info >= (3,): - key = key.encode('utf-8') - k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) - k1 = k1.lstrip('0x').rstrip('L') - k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) - k2 = k2.lstrip('0').rstrip('L') - modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, - k1, k2) - suffix = _get_so_suffixes()[0] - self.tmpdir = tmpdir or _caller_dir_pycache() - self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) - self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) - self.ext_package = ext_package - self._has_source = False - self._has_module = False - - def write_source(self, file=None): - """Write the C source code. It is produced in 'self.sourcefilename', - which can be tweaked beforehand.""" - with self.ffi._lock: - if self._has_source and file is None: - raise VerificationError( - "source code already written") - self._write_source(file) - - def compile_module(self): - """Write the C source code (if not done already) and compile it. - This produces a dynamic link library in 'self.modulefilename'.""" - with self.ffi._lock: - if self._has_module: - raise VerificationError("module already compiled") - if not self._has_source: - self._write_source() - self._compile_module() - - def load_library(self): - """Get a C module from this Verifier instance. - Returns an instance of a FFILibrary class that behaves like the - objects returned by ffi.dlopen(), but that delegates all - operations to the C module. If necessary, the C code is written - and compiled first. - """ - with self.ffi._lock: - if not self._has_module: - self._locate_module() - if not self._has_module: - if not self._has_source: - self._write_source() - self._compile_module() - return self._load_library() - - def get_module_name(self): - basename = os.path.basename(self.modulefilename) - # kill both the .so extension and the other .'s, as introduced - # by Python 3: 'basename.cpython-33m.so' - basename = basename.split('.', 1)[0] - # and the _d added in Python 2 debug builds --- but try to be - # conservative and not kill a legitimate _d - if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): - basename = basename[:-2] - return basename - - def get_extension(self): - if not self._has_source: - with self.ffi._lock: - if not self._has_source: - self._write_source() - sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) - modname = self.get_module_name() - return ffiplatform.get_extension(sourcename, modname, **self.kwds) - - def generates_python_module(self): - return self._vengine._gen_python_module - - def make_relative_to(self, kwds, relative_to): - if relative_to and os.path.dirname(relative_to): - dirname = os.path.dirname(relative_to) - kwds = kwds.copy() - for key in ffiplatform.LIST_OF_FILE_NAMES: - if key in kwds: - lst = kwds[key] - if not isinstance(lst, (list, tuple)): - raise TypeError("keyword '%s' should be a list or tuple" - % (key,)) - lst = [os.path.join(dirname, fn) for fn in lst] - kwds[key] = lst - return kwds - - # ---------- - - def _locate_module(self): - if not os.path.isfile(self.modulefilename): - if self.ext_package: - try: - pkg = __import__(self.ext_package, None, None, ['__doc__']) - except ImportError: - return # cannot import the package itself, give up - # (e.g. it might be called differently before installation) - path = pkg.__path__ - else: - path = None - filename = self._vengine.find_module(self.get_module_name(), path, - _get_so_suffixes()) - if filename is None: - return - self.modulefilename = filename - self._vengine.collect_types() - self._has_module = True - - def _write_source_to(self, file): - self._vengine._f = file - try: - self._vengine.write_source_to_f() - finally: - del self._vengine._f - - def _write_source(self, file=None): - if file is not None: - self._write_source_to(file) - else: - # Write our source file to an in memory file. - f = NativeIO() - self._write_source_to(f) - source_data = f.getvalue() - - # Determine if this matches the current file - if os.path.exists(self.sourcefilename): - with open(self.sourcefilename, "r") as fp: - needs_written = not (fp.read() == source_data) - else: - needs_written = True - - # Actually write the file out if it doesn't match - if needs_written: - _ensure_dir(self.sourcefilename) - with open(self.sourcefilename, "w") as fp: - fp.write(source_data) - - # Set this flag - self._has_source = True - - def _compile_module(self): - # compile this C source - tmpdir = os.path.dirname(self.sourcefilename) - outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) - try: - same = ffiplatform.samefile(outputfilename, self.modulefilename) - except OSError: - same = False - if not same: - _ensure_dir(self.modulefilename) - shutil.move(outputfilename, self.modulefilename) - self._has_module = True - - def _load_library(self): - assert self._has_module - if self.flags is not None: - return self._vengine.load_library(self.flags) - else: - return self._vengine.load_library() - -# ____________________________________________________________ - -_FORCE_GENERIC_ENGINE = False # for tests - -def _locate_engine_class(ffi, force_generic_engine): - if _FORCE_GENERIC_ENGINE: - force_generic_engine = True - if not force_generic_engine: - if '__pypy__' in sys.builtin_module_names: - force_generic_engine = True - else: - try: - import _cffi_backend - except ImportError: - _cffi_backend = '?' - if ffi._backend is not _cffi_backend: - force_generic_engine = True - if force_generic_engine: - from . import vengine_gen - return vengine_gen.VGenericEngine - else: - from . import vengine_cpy - return vengine_cpy.VCPythonEngine - -# ____________________________________________________________ - -_TMPDIR = None - -def _caller_dir_pycache(): - if _TMPDIR: - return _TMPDIR - result = os.environ.get('CFFI_TMPDIR') - if result: - return result - filename = sys._getframe(2).f_code.co_filename - return os.path.abspath(os.path.join(os.path.dirname(filename), - '__pycache__')) - -def set_tmpdir(dirname): - """Set the temporary directory to use instead of __pycache__.""" - global _TMPDIR - _TMPDIR = dirname - -def cleanup_tmpdir(tmpdir=None, keep_so=False): - """Clean up the temporary directory by removing all files in it - called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" - tmpdir = tmpdir or _caller_dir_pycache() - try: - filelist = os.listdir(tmpdir) - except OSError: - return - if keep_so: - suffix = '.c' # only remove .c files - else: - suffix = _get_so_suffixes()[0].lower() - for fn in filelist: - if fn.lower().startswith('_cffi_') and ( - fn.lower().endswith(suffix) or fn.lower().endswith('.c')): - try: - os.unlink(os.path.join(tmpdir, fn)) - except OSError: - pass - clean_dir = [os.path.join(tmpdir, 'build')] - for dir in clean_dir: - try: - for fn in os.listdir(dir): - fn = os.path.join(dir, fn) - if os.path.isdir(fn): - clean_dir.append(fn) - else: - os.unlink(fn) - except OSError: - pass - -def _get_so_suffixes(): - suffixes = _extension_suffixes() - if not suffixes: - # bah, no C_EXTENSION available. Occurs on pypy without cpyext - if sys.platform == 'win32': - suffixes = [".pyd"] - else: - suffixes = [".so"] - - return suffixes - -def _ensure_dir(filename): - dirname = os.path.dirname(filename) - if dirname and not os.path.isdir(dirname): - os.makedirs(dirname) diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/INSTALLER b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/METADATA b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/METADATA deleted file mode 100644 index 8d32edc..0000000 --- a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/METADATA +++ /dev/null @@ -1,764 +0,0 @@ -Metadata-Version: 2.4 -Name: charset-normalizer -Version: 3.4.4 -Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. -Author-email: "Ahmed R. TAHRI" -Maintainer-email: "Ahmed R. TAHRI" -License: MIT -Project-URL: Changelog, https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md -Project-URL: Documentation, https://charset-normalizer.readthedocs.io/ -Project-URL: Code, https://github.com/jawah/charset_normalizer -Project-URL: Issue tracker, https://github.com/jawah/charset_normalizer/issues -Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Text Processing :: Linguistic -Classifier: Topic :: Utilities -Classifier: Typing :: Typed -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -License-File: LICENSE -Provides-Extra: unicode-backport -Dynamic: license-file - -

    Charset Detection, for Everyone 👋

    - -

    - The Real First Universal Charset Detector
    - - - - - Download Count Total - - - - -

    -

    - Featured Packages
    - - Static Badge - - - Static Badge - -

    -

    - In other language (unofficial port - by the community)
    - - Static Badge - -

    - -> A library that helps you read text from an unknown charset encoding.
    Motivated by `chardet`, -> I'm trying to resolve the issue by taking a new approach. -> All IANA character set names for which the Python core library provides codecs are supported. - -

    - >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< -

    - -This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. - -| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | -|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| -| `Fast` | ❌ | ✅ | ✅ | -| `Universal**` | ❌ | ✅ | ❌ | -| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | -| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | -| `License` | LGPL-2.1
    _restrictive_ | MIT | MPL-1.1
    _restrictive_ | -| `Native Python` | ✅ | ✅ | ❌ | -| `Detect spoken language` | ❌ | ✅ | N/A | -| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | -| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | -| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | - -

    -Reading Normalized TextCat Reading Text -

    - -*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
    - -## ⚡ Performance - -This package offer better performance than its counterpart Chardet. Here are some numbers. - -| Package | Accuracy | Mean per file (ms) | File per sec (est) | -|-----------------------------------------------|:--------:|:------------------:|:------------------:| -| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec | -| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | - -| Package | 99th percentile | 95th percentile | 50th percentile | -|-----------------------------------------------|:---------------:|:---------------:|:---------------:| -| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms | -| charset-normalizer | 100 ms | 50 ms | 5 ms | - -_updated as of december 2024 using CPython 3.12_ - -Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. - -> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. -> And yes, these results might change at any time. The dataset can be updated to include more files. -> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. -> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability -> (e.g. Supported Encoding) Challenge-them if you want. - -## ✨ Installation - -Using pip: - -```sh -pip install charset-normalizer -U -``` - -## 🚀 Basic Usage - -### CLI -This package comes with a CLI. - -``` -usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] - file [file ...] - -The Real First Universal Charset Detector. Discover originating encoding used -on text file. Normalize text to unicode. - -positional arguments: - files File(s) to be analysed - -optional arguments: - -h, --help show this help message and exit - -v, --verbose Display complementary information about file if any. - Stdout will contain logs about the detection process. - -a, --with-alternative - Output complementary possibilities if any. Top-level - JSON WILL be a list. - -n, --normalize Permit to normalize input file. If not set, program - does not write anything. - -m, --minimal Only output the charset detected to STDOUT. Disabling - JSON output. - -r, --replace Replace file when trying to normalize it instead of - creating a new one. - -f, --force Replace file without asking if you are sure, use this - flag with caution. - -t THRESHOLD, --threshold THRESHOLD - Define a custom maximum amount of chaos allowed in - decoded content. 0. <= chaos <= 1. - --version Show version information and exit. -``` - -```bash -normalizer ./data/sample.1.fr.srt -``` - -or - -```bash -python -m charset_normalizer ./data/sample.1.fr.srt -``` - -🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. - -```json -{ - "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", - "encoding": "cp1252", - "encoding_aliases": [ - "1252", - "windows_1252" - ], - "alternative_encodings": [ - "cp1254", - "cp1256", - "cp1258", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - "mbcs" - ], - "language": "French", - "alphabets": [ - "Basic Latin", - "Latin-1 Supplement" - ], - "has_sig_or_bom": false, - "chaos": 0.149, - "coherence": 97.152, - "unicode_path": null, - "is_preferred": true -} -``` - -### Python -*Just print out normalized text* -```python -from charset_normalizer import from_path - -results = from_path('./my_subtitle.srt') - -print(str(results.best())) -``` - -*Upgrade your code without effort* -```python -from charset_normalizer import detect -``` - -The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. - -See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) - -## 😇 Why - -When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a -reliable alternative using a completely different method. Also! I never back down on a good challenge! - -I **don't care** about the **originating charset** encoding, because **two different tables** can -produce **two identical rendered string.** -What I want is to get readable text, the best I can. - -In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 - -Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. - -## 🍰 How - - - Discard all charset encoding table that could not fit the binary content. - - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. - - Extract matches with the lowest mess detected. - - Additionally, we measure coherence / probe for a language. - -**Wait a minute**, what is noise/mess and coherence according to **YOU ?** - -*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then -**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text). - I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to - improve or rewrite it. - -*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought -that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. - -## ⚡ Known limitations - - - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) - - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. - -## ⚠️ About Python EOLs - -**If you are running:** - -- Python >=2.7,<3.5: Unsupported -- Python 3.5: charset-normalizer < 2.1 -- Python 3.6: charset-normalizer < 3.1 -- Python 3.7: charset-normalizer < 4.0 - -Upgrade your Python interpreter as soon as possible. - -## 👤 Contributing - -Contributions, issues and feature requests are very much welcome.
    -Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. - -## 📝 License - -Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
    -This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. - -Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) - -## 💼 For Enterprise - -Professional support for charset-normalizer is available as part of the [Tidelift -Subscription][1]. Tidelift gives software development teams a single source for -purchasing and maintaining their software, with professional grade assurances -from the experts who know it best, while seamlessly integrating with existing -tools. - -[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme - -[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297) - -# Changelog -All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## [3.4.4](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.4) (2025-10-13) - -### Changed -- Bound `setuptools` to a specific constraint `setuptools>=68,<=81`. -- Raised upper bound of mypyc for the optional pre-built extension to v1.18.2 - -### Removed -- `setuptools-scm` as a build dependency. - -### Misc -- Enforced hashes in `dev-requirements.txt` and created `ci-requirements.txt` for security purposes. -- Additional pre-built wheels for riscv64, s390x, and armv7l architectures. -- Restore ` multiple.intoto.jsonl` in GitHub releases in addition to individual attestation file per wheel. - -## [3.4.3](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.3) (2025-08-09) - -### Changed -- mypy(c) is no longer a required dependency at build time if `CHARSET_NORMALIZER_USE_MYPYC` isn't set to `1`. (#595) (#583) -- automatically lower confidence on small bytes samples that are not Unicode in `detect` output legacy function. (#391) - -### Added -- Custom build backend to overcome inability to mark mypy as an optional dependency in the build phase. -- Support for Python 3.14 - -### Fixed -- sdist archive contained useless directories. -- automatically fallback on valid UTF-16 or UTF-32 even if the md says it's noisy. (#633) - -### Misc -- SBOM are automatically published to the relevant GitHub release to comply with regulatory changes. - Each published wheel comes with its SBOM. We choose CycloneDX as the format. -- Prebuilt optimized wheel are no longer distributed by default for CPython 3.7 due to a change in cibuildwheel. - -## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02) - -### Fixed -- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591) -- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587) - -### Changed -- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8 - -## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24) - -### Changed -- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend. -- Enforce annotation delayed loading for a simpler and consistent types in the project. -- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8 - -### Added -- pre-commit configuration. -- noxfile. - -### Removed -- `build-requirements.txt` as per using `pyproject.toml` native build configuration. -- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile). -- `setup.cfg` in favor of `pyproject.toml` metadata configuration. -- Unused `utils.range_scan` function. - -### Fixed -- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572) -- Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+ - -## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08) - -### Added -- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints. -- Support for Python 3.13 (#512) - -### Fixed -- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch. -- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537) -- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381) - -## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) - -### Fixed -- Unintentional memory usage regression when using large payload that match several encoding (#376) -- Regression on some detection case showcased in the documentation (#371) - -### Added -- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) - -## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) - -### Changed -- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 -- Improved the general detection reliability based on reports from the community - -## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) - -### Added -- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` -- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) - -### Removed -- (internal) Redundant utils.is_ascii function and unused function is_private_use_only -- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant - -### Changed -- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection -- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 - -### Fixed -- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) - -## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) - -### Changed -- Typehint for function `from_path` no longer enforce `PathLike` as its first argument -- Minor improvement over the global detection reliability - -### Added -- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries -- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) -- Explicit support for Python 3.12 - -### Fixed -- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) - -## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) - -### Added -- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) - -### Removed -- Support for Python 3.6 (PR #260) - -### Changed -- Optional speedup provided by mypy/c 1.0.1 - -## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) - -### Fixed -- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) - -### Changed -- Speedup provided by mypy/c 0.990 on Python >= 3.7 - -## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it -- Sphinx warnings when generating the documentation - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' - -## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) - -### Added -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Removed -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) - -### Fixed -- Sphinx warnings when generating the documentation - -## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) - -### Changed -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Removed -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) - -### Deprecated -- Function `normalize` scheduled for removal in 3.0 - -### Changed -- Removed useless call to decode in fn is_unprintable (#206) - -### Fixed -- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) - -## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) - -### Added -- Output the Unicode table version when running the CLI with `--version` (PR #194) - -### Changed -- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) -- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) - -### Fixed -- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) -- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) - -### Removed -- Support for Python 3.5 (PR #192) - -### Deprecated -- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) - -## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) - -### Fixed -- ASCII miss-detection on rare cases (PR #170) - -## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) - -### Added -- Explicit support for Python 3.11 (PR #164) - -### Changed -- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) - -## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) - -### Fixed -- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) - -### Changed -- Skipping the language-detection (CD) on ASCII (PR #155) - -## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) - -### Changed -- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) - -### Fixed -- Wrong logging level applied when setting kwarg `explain` to True (PR #146) - -## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) -### Changed -- Improvement over Vietnamese detection (PR #126) -- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) -- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) -- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) -- Code style as refactored by Sourcery-AI (PR #131) -- Minor adjustment on the MD around european words (PR #133) -- Remove and replace SRTs from assets / tests (PR #139) -- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) - -### Fixed -- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) -- Avoid using too insignificant chunk (PR #137) - -### Added -- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) - -## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) -### Added -- Add support for Kazakh (Cyrillic) language detection (PR #109) - -### Changed -- Further, improve inferring the language from a given single-byte code page (PR #112) -- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) -- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) -- Various detection improvement (MD+CD) (PR #117) - -### Removed -- Remove redundant logging entry about detected language(s) (PR #115) - -### Fixed -- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) - -## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) -### Fixed -- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) -- Fix CLI crash when using --minimal output in certain cases (PR #103) - -### Changed -- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) - -## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) -### Changed -- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) -- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) -- The Unicode detection is slightly improved (PR #93) -- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) - -### Removed -- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) - -### Fixed -- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) -- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) -- The MANIFEST.in was not exhaustive (PR #78) - -## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) -### Fixed -- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) -- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) -- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) -- Submatch factoring could be wrong in rare edge cases (PR #72) -- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) -- Fix line endings from CRLF to LF for certain project files (PR #67) - -### Changed -- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) -- Allow fallback on specified encoding if any (PR #71) - -## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) -### Changed -- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) -- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) - -## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) -### Fixed -- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) - -### Changed -- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) - -## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) -### Fixed -- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) -- Using explain=False permanently disable the verbose output in the current runtime (PR #47) -- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) -- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) - -### Changed -- Public function normalize default args values were not aligned with from_bytes (PR #53) - -### Added -- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) - -## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) -### Changed -- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. -- Accent has been made on UTF-8 detection, should perform rather instantaneous. -- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. -- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) -- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ -- utf_7 detection has been reinstated. - -### Removed -- This package no longer require anything when used with Python 3.5 (Dropped cached_property) -- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. -- The exception hook on UnicodeDecodeError has been removed. - -### Deprecated -- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 - -### Fixed -- The CLI output used the relative path of the file(s). Should be absolute. - -## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) -### Fixed -- Logger configuration/usage no longer conflict with others (PR #44) - -## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) -### Removed -- Using standard logging instead of using the package loguru. -- Dropping nose test framework in favor of the maintained pytest. -- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. -- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. -- Stop support for UTF-7 that does not contain a SIG. -- Dropping PrettyTable, replaced with pure JSON output in CLI. - -### Fixed -- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. -- Not searching properly for the BOM when trying utf32/16 parent codec. - -### Changed -- Improving the package final size by compressing frequencies.json. -- Huge improvement over the larges payload. - -### Added -- CLI now produces JSON consumable output. -- Return ASCII if given sequences fit. Given reasonable confidence. - -## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) - -### Fixed -- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) - -## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) - -### Fixed -- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) - -## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) - -### Fixed -- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) - -## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) - -### Changed -- Amend the previous release to allow prettytable 2.0 (PR #35) - -## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) - -### Fixed -- Fix error while using the package with a python pre-release interpreter (PR #33) - -### Changed -- Dependencies refactoring, constraints revised. - -### Added -- Add python 3.9 and 3.10 to the supported interpreters - -MIT License - -Copyright (c) 2025 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/RECORD b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/RECORD deleted file mode 100644 index f8d68fc..0000000 --- a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/RECORD +++ /dev/null @@ -1,36 +0,0 @@ -../../Scripts/normalizer.exe,sha256=cmHBk2yUw25KOJDRjGLUO2K4ezV8239Po6fXAiI0WY8,108402 -charset_normalizer-3.4.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -charset_normalizer-3.4.4.dist-info/METADATA,sha256=Mg5oc0yfpVMtDcprHt_pPbbV0qUSHEeaEz4NG53pmyY,38067 -charset_normalizer-3.4.4.dist-info/RECORD,, -charset_normalizer-3.4.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -charset_normalizer-3.4.4.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101 -charset_normalizer-3.4.4.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65 -charset_normalizer-3.4.4.dist-info/licenses/LICENSE,sha256=GFd0hdNwTxpHne2OVzwJds_tMV_S_ReYP6mI2kwvcNE,1092 -charset_normalizer-3.4.4.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 -charset_normalizer/__init__.py,sha256=0NT8MHi7SKq3juMqYfOdrkzjisK0L73lneNHH4qaUAs,1638 -charset_normalizer/__main__.py,sha256=2sj_BS6H0sU25C1bMqz9DVwa6kOK9lchSEbSU-_iu7M,115 -charset_normalizer/__pycache__/__init__.cpython-312.pyc,, -charset_normalizer/__pycache__/__main__.cpython-312.pyc,, -charset_normalizer/__pycache__/api.cpython-312.pyc,, -charset_normalizer/__pycache__/cd.cpython-312.pyc,, -charset_normalizer/__pycache__/constant.cpython-312.pyc,, -charset_normalizer/__pycache__/legacy.cpython-312.pyc,, -charset_normalizer/__pycache__/md.cpython-312.pyc,, -charset_normalizer/__pycache__/models.cpython-312.pyc,, -charset_normalizer/__pycache__/utils.cpython-312.pyc,, -charset_normalizer/__pycache__/version.cpython-312.pyc,, -charset_normalizer/api.py,sha256=ODy4hX78b3ldTl5sViYPU1yzQ5qkclfgSIFE8BtNrTI,23337 -charset_normalizer/cd.py,sha256=uq8nVxRpR6Guc16ACvOWtL8KO3w7vYaCh8hHisuOyTg,12917 -charset_normalizer/cli/__init__.py,sha256=d9MUx-1V_qD3x9igIy4JT4oC5CU0yjulk7QyZWeRFhg,144 -charset_normalizer/cli/__main__.py,sha256=-pdJCyPywouPyFsC8_eTSgTmvh1YEvgjsvy1WZ0XjaA,13027 -charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc,, -charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc,, -charset_normalizer/constant.py,sha256=mCJmYzpBU27Ut9kiNWWoBbhhxQ-aRVw3K7LSwoFwBGI,44728 -charset_normalizer/legacy.py,sha256=ui08NlKqAXU3Y7smK-NFJjEgRRQz9ruM7aNCbT0OOrE,2811 -charset_normalizer/md.cp312-win_amd64.pyd,sha256=dqU14JU7SKI0i4dyNqV5nPHQHLIUIsfxeULzU2fLXI8,10752 -charset_normalizer/md.py,sha256=LSuW2hNgXSgF7JGdRapLAHLuj6pABHiP85LTNAYmu7c,20780 -charset_normalizer/md__mypyc.cp312-win_amd64.pyd,sha256=CDDD_25vg5Sn3xcPlfwQ3mWrnyKzD50jg_DMKZuN8QE,126976 -charset_normalizer/models.py,sha256=ZR2PE-fqf6dASZfqdE5Uhkmr0o1MciSdXOjuNqwkmvg,12754 -charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -charset_normalizer/utils.py,sha256=XtWIQeOuz7cnGebMzyi4Vvi1JtA84QBSIeR9PDzF7pw,12584 -charset_normalizer/version.py,sha256=MhW8dOLls4GbbxBUqeS1huc7Rth1ArKi4nS90qTFwz8,123 diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/REQUESTED b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/WHEEL b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/WHEEL deleted file mode 100644 index 10ac2c2..0000000 --- a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/entry_points.txt b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/entry_points.txt deleted file mode 100644 index 65619e7..0000000 --- a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -normalizer = charset_normalizer.cli:cli_detect diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/licenses/LICENSE b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/licenses/LICENSE deleted file mode 100644 index 9725772..0000000 --- a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2025 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/top_level.txt b/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/top_level.txt deleted file mode 100644 index 66958f0..0000000 --- a/venv/Lib/site-packages/charset_normalizer-3.4.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -charset_normalizer diff --git a/venv/Lib/site-packages/charset_normalizer/__init__.py b/venv/Lib/site-packages/charset_normalizer/__init__.py deleted file mode 100644 index 0d3a379..0000000 --- a/venv/Lib/site-packages/charset_normalizer/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Charset-Normalizer -~~~~~~~~~~~~~~ -The Real First Universal Charset Detector. -A library that helps you read text from an unknown charset encoding. -Motivated by chardet, This package is trying to resolve the issue by taking a new approach. -All IANA character set names for which the Python core library provides codecs are supported. - -Basic usage: - >>> from charset_normalizer import from_bytes - >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) - >>> best_guess = results.best() - >>> str(best_guess) - 'Bсеки човек има право на образование. Oбразованието!' - -Others methods and usages are available - see the full documentation -at . -:copyright: (c) 2021 by Ahmed TAHRI -:license: MIT, see LICENSE for more details. -""" - -from __future__ import annotations - -import logging - -from .api import from_bytes, from_fp, from_path, is_binary -from .legacy import detect -from .models import CharsetMatch, CharsetMatches -from .utils import set_logging_handler -from .version import VERSION, __version__ - -__all__ = ( - "from_fp", - "from_path", - "from_bytes", - "is_binary", - "detect", - "CharsetMatch", - "CharsetMatches", - "__version__", - "VERSION", - "set_logging_handler", -) - -# Attach a NullHandler to the top level logger by default -# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library - -logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/venv/Lib/site-packages/charset_normalizer/__main__.py b/venv/Lib/site-packages/charset_normalizer/__main__.py deleted file mode 100644 index e0e76f7..0000000 --- a/venv/Lib/site-packages/charset_normalizer/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -from __future__ import annotations - -from .cli import cli_detect - -if __name__ == "__main__": - cli_detect() diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index af3681b..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/__main__.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index f174834..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/api.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/api.cpython-312.pyc deleted file mode 100644 index 1235a95..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/api.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/cd.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/cd.cpython-312.pyc deleted file mode 100644 index ea831e9..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/cd.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/constant.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/constant.cpython-312.pyc deleted file mode 100644 index c550086..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/constant.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-312.pyc deleted file mode 100644 index c7e381b..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/md.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/md.cpython-312.pyc deleted file mode 100644 index 339e032..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/md.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/models.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/models.cpython-312.pyc deleted file mode 100644 index 8bf261a..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index ab34031..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/__pycache__/version.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/__pycache__/version.cpython-312.pyc deleted file mode 100644 index 5c30288..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/__pycache__/version.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/api.py b/venv/Lib/site-packages/charset_normalizer/api.py deleted file mode 100644 index ebd9639..0000000 --- a/venv/Lib/site-packages/charset_normalizer/api.py +++ /dev/null @@ -1,669 +0,0 @@ -from __future__ import annotations - -import logging -from os import PathLike -from typing import BinaryIO - -from .cd import ( - coherence_ratio, - encoding_languages, - mb_encoding_languages, - merge_coherence_ratios, -) -from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE -from .md import mess_ratio -from .models import CharsetMatch, CharsetMatches -from .utils import ( - any_specified_encoding, - cut_sequence_chunks, - iana_name, - identify_sig_or_bom, - is_cp_similar, - is_multi_byte_encoding, - should_strip_sig_or_bom, -) - -logger = logging.getLogger("charset_normalizer") -explain_handler = logging.StreamHandler() -explain_handler.setFormatter( - logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") -) - - -def from_bytes( - sequences: bytes | bytearray, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.2, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Given a raw bytes sequence, return the best possibles charset usable to render str objects. - If there is no results, it is a strong indicator that the source is binary/not text. - By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. - And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. - - The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page - but never take it for granted. Can improve the performance. - - You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that - purpose. - - This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. - By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' - toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. - Custom logging format and handler can be set manually. - """ - - if not isinstance(sequences, (bytearray, bytes)): - raise TypeError( - "Expected object of type bytes or bytearray, got: {}".format( - type(sequences) - ) - ) - - if explain: - previous_logger_level: int = logger.level - logger.addHandler(explain_handler) - logger.setLevel(TRACE) - - length: int = len(sequences) - - if length == 0: - logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level or logging.WARNING) - return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) - - if cp_isolation is not None: - logger.log( - TRACE, - "cp_isolation is set. use this flag for debugging purpose. " - "limited list of encoding allowed : %s.", - ", ".join(cp_isolation), - ) - cp_isolation = [iana_name(cp, False) for cp in cp_isolation] - else: - cp_isolation = [] - - if cp_exclusion is not None: - logger.log( - TRACE, - "cp_exclusion is set. use this flag for debugging purpose. " - "limited list of encoding excluded : %s.", - ", ".join(cp_exclusion), - ) - cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] - else: - cp_exclusion = [] - - if length <= (chunk_size * steps): - logger.log( - TRACE, - "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", - steps, - chunk_size, - length, - ) - steps = 1 - chunk_size = length - - if steps > 1 and length / steps < chunk_size: - chunk_size = int(length / steps) - - is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE - is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE - - if is_too_small_sequence: - logger.log( - TRACE, - "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( - length - ), - ) - elif is_too_large_sequence: - logger.log( - TRACE, - "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( - length - ), - ) - - prioritized_encodings: list[str] = [] - - specified_encoding: str | None = ( - any_specified_encoding(sequences) if preemptive_behaviour else None - ) - - if specified_encoding is not None: - prioritized_encodings.append(specified_encoding) - logger.log( - TRACE, - "Detected declarative mark in sequence. Priority +1 given for %s.", - specified_encoding, - ) - - tested: set[str] = set() - tested_but_hard_failure: list[str] = [] - tested_but_soft_failure: list[str] = [] - - fallback_ascii: CharsetMatch | None = None - fallback_u8: CharsetMatch | None = None - fallback_specified: CharsetMatch | None = None - - results: CharsetMatches = CharsetMatches() - - early_stop_results: CharsetMatches = CharsetMatches() - - sig_encoding, sig_payload = identify_sig_or_bom(sequences) - - if sig_encoding is not None: - prioritized_encodings.append(sig_encoding) - logger.log( - TRACE, - "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", - len(sig_payload), - sig_encoding, - ) - - prioritized_encodings.append("ascii") - - if "utf_8" not in prioritized_encodings: - prioritized_encodings.append("utf_8") - - for encoding_iana in prioritized_encodings + IANA_SUPPORTED: - if cp_isolation and encoding_iana not in cp_isolation: - continue - - if cp_exclusion and encoding_iana in cp_exclusion: - continue - - if encoding_iana in tested: - continue - - tested.add(encoding_iana) - - decoded_payload: str | None = None - bom_or_sig_available: bool = sig_encoding == encoding_iana - strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( - encoding_iana - ) - - if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", - encoding_iana, - ) - continue - if encoding_iana in {"utf_7"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", - encoding_iana, - ) - continue - - try: - is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) - except (ModuleNotFoundError, ImportError): - logger.log( - TRACE, - "Encoding %s does not provide an IncrementalDecoder", - encoding_iana, - ) - continue - - try: - if is_too_large_sequence and is_multi_byte_decoder is False: - str( - ( - sequences[: int(50e4)] - if strip_sig_or_bom is False - else sequences[len(sig_payload) : int(50e4)] - ), - encoding=encoding_iana, - ) - else: - decoded_payload = str( - ( - sequences - if strip_sig_or_bom is False - else sequences[len(sig_payload) :] - ), - encoding=encoding_iana, - ) - except (UnicodeDecodeError, LookupError) as e: - if not isinstance(e, LookupError): - logger.log( - TRACE, - "Code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - similar_soft_failure_test: bool = False - - for encoding_soft_failed in tested_but_soft_failure: - if is_cp_similar(encoding_iana, encoding_soft_failed): - similar_soft_failure_test = True - break - - if similar_soft_failure_test: - logger.log( - TRACE, - "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", - encoding_iana, - encoding_soft_failed, - ) - continue - - r_ = range( - 0 if not bom_or_sig_available else len(sig_payload), - length, - int(length / steps), - ) - - multi_byte_bonus: bool = ( - is_multi_byte_decoder - and decoded_payload is not None - and len(decoded_payload) < length - ) - - if multi_byte_bonus: - logger.log( - TRACE, - "Code page %s is a multi byte encoding table and it appear that at least one character " - "was encoded using n-bytes.", - encoding_iana, - ) - - max_chunk_gave_up: int = int(len(r_) / 4) - - max_chunk_gave_up = max(max_chunk_gave_up, 2) - early_stop_count: int = 0 - lazy_str_hard_failure = False - - md_chunks: list[str] = [] - md_ratios = [] - - try: - for chunk in cut_sequence_chunks( - sequences, - encoding_iana, - r_, - chunk_size, - bom_or_sig_available, - strip_sig_or_bom, - sig_payload, - is_multi_byte_decoder, - decoded_payload, - ): - md_chunks.append(chunk) - - md_ratios.append( - mess_ratio( - chunk, - threshold, - explain is True and 1 <= len(cp_isolation) <= 2, - ) - ) - - if md_ratios[-1] >= threshold: - early_stop_count += 1 - - if (early_stop_count >= max_chunk_gave_up) or ( - bom_or_sig_available and strip_sig_or_bom is False - ): - break - except ( - UnicodeDecodeError - ) as e: # Lazy str loading may have missed something there - logger.log( - TRACE, - "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - early_stop_count = max_chunk_gave_up - lazy_str_hard_failure = True - - # We might want to check the sequence again with the whole content - # Only if initial MD tests passes - if ( - not lazy_str_hard_failure - and is_too_large_sequence - and not is_multi_byte_decoder - ): - try: - sequences[int(50e3) :].decode(encoding_iana, errors="strict") - except UnicodeDecodeError as e: - logger.log( - TRACE, - "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 - if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: - tested_but_soft_failure.append(encoding_iana) - logger.log( - TRACE, - "%s was excluded because of initial chaos probing. Gave up %i time(s). " - "Computed mean chaos is %f %%.", - encoding_iana, - early_stop_count, - round(mean_mess_ratio * 100, ndigits=3), - ) - # Preparing those fallbacks in case we got nothing. - if ( - enable_fallback - and encoding_iana - in ["ascii", "utf_8", specified_encoding, "utf_16", "utf_32"] - and not lazy_str_hard_failure - ): - fallback_entry = CharsetMatch( - sequences, - encoding_iana, - threshold, - bom_or_sig_available, - [], - decoded_payload, - preemptive_declaration=specified_encoding, - ) - if encoding_iana == specified_encoding: - fallback_specified = fallback_entry - elif encoding_iana == "ascii": - fallback_ascii = fallback_entry - else: - fallback_u8 = fallback_entry - continue - - logger.log( - TRACE, - "%s passed initial chaos probing. Mean measured chaos is %f %%", - encoding_iana, - round(mean_mess_ratio * 100, ndigits=3), - ) - - if not is_multi_byte_decoder: - target_languages: list[str] = encoding_languages(encoding_iana) - else: - target_languages = mb_encoding_languages(encoding_iana) - - if target_languages: - logger.log( - TRACE, - "{} should target any language(s) of {}".format( - encoding_iana, str(target_languages) - ), - ) - - cd_ratios = [] - - # We shall skip the CD when its about ASCII - # Most of the time its not relevant to run "language-detection" on it. - if encoding_iana != "ascii": - for chunk in md_chunks: - chunk_languages = coherence_ratio( - chunk, - language_threshold, - ",".join(target_languages) if target_languages else None, - ) - - cd_ratios.append(chunk_languages) - - cd_ratios_merged = merge_coherence_ratios(cd_ratios) - - if cd_ratios_merged: - logger.log( - TRACE, - "We detected language {} using {}".format( - cd_ratios_merged, encoding_iana - ), - ) - - current_match = CharsetMatch( - sequences, - encoding_iana, - mean_mess_ratio, - bom_or_sig_available, - cd_ratios_merged, - ( - decoded_payload - if ( - is_too_large_sequence is False - or encoding_iana in [specified_encoding, "ascii", "utf_8"] - ) - else None - ), - preemptive_declaration=specified_encoding, - ) - - results.append(current_match) - - if ( - encoding_iana in [specified_encoding, "ascii", "utf_8"] - and mean_mess_ratio < 0.1 - ): - # If md says nothing to worry about, then... stop immediately! - if mean_mess_ratio == 0.0: - logger.debug( - "Encoding detection: %s is most likely the one.", - current_match.encoding, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([current_match]) - - early_stop_results.append(current_match) - - if ( - len(early_stop_results) - and (specified_encoding is None or specified_encoding in tested) - and "ascii" in tested - and "utf_8" in tested - ): - probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment] - logger.debug( - "Encoding detection: %s is most likely the one.", - probable_result.encoding, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return CharsetMatches([probable_result]) - - if encoding_iana == sig_encoding: - logger.debug( - "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " - "the beginning of the sequence.", - encoding_iana, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([results[encoding_iana]]) - - if len(results) == 0: - if fallback_u8 or fallback_ascii or fallback_specified: - logger.log( - TRACE, - "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", - ) - - if fallback_specified: - logger.debug( - "Encoding detection: %s will be used as a fallback match", - fallback_specified.encoding, - ) - results.append(fallback_specified) - elif ( - (fallback_u8 and fallback_ascii is None) - or ( - fallback_u8 - and fallback_ascii - and fallback_u8.fingerprint != fallback_ascii.fingerprint - ) - or (fallback_u8 is not None) - ): - logger.debug("Encoding detection: utf_8 will be used as a fallback match") - results.append(fallback_u8) - elif fallback_ascii: - logger.debug("Encoding detection: ascii will be used as a fallback match") - results.append(fallback_ascii) - - if results: - logger.debug( - "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", - results.best().encoding, # type: ignore - len(results) - 1, - ) - else: - logger.debug("Encoding detection: Unable to determine any suitable charset.") - - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return results - - -def from_fp( - fp: BinaryIO, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but using a file pointer that is already ready. - Will not close the file pointer. - """ - return from_bytes( - fp.read(), - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def from_path( - path: str | bytes | PathLike, # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. - Can raise IOError. - """ - with open(path, "rb") as fp: - return from_fp( - fp, - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def is_binary( - fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = False, -) -> bool: - """ - Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. - Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match - are disabled to be stricter around ASCII-compatible but unlikely to be a string. - """ - if isinstance(fp_or_path_or_payload, (str, PathLike)): - guesses = from_path( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - elif isinstance( - fp_or_path_or_payload, - ( - bytes, - bytearray, - ), - ): - guesses = from_bytes( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - else: - guesses = from_fp( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - - return not guesses diff --git a/venv/Lib/site-packages/charset_normalizer/cd.py b/venv/Lib/site-packages/charset_normalizer/cd.py deleted file mode 100644 index 71a3ed5..0000000 --- a/venv/Lib/site-packages/charset_normalizer/cd.py +++ /dev/null @@ -1,395 +0,0 @@ -from __future__ import annotations - -import importlib -from codecs import IncrementalDecoder -from collections import Counter -from functools import lru_cache -from typing import Counter as TypeCounter - -from .constant import ( - FREQUENCIES, - KO_NAMES, - LANGUAGE_SUPPORTED_COUNT, - TOO_SMALL_SEQUENCE, - ZH_NAMES, -) -from .md import is_suspiciously_successive_range -from .models import CoherenceMatches -from .utils import ( - is_accentuated, - is_latin, - is_multi_byte_encoding, - is_unicode_range_secondary, - unicode_range, -) - - -def encoding_unicode_range(iana_name: str) -> list[str]: - """ - Return associated unicode ranges in a single byte code page. - """ - if is_multi_byte_encoding(iana_name): - raise OSError("Function not supported on multi-byte code page") - - decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder - - p: IncrementalDecoder = decoder(errors="ignore") - seen_ranges: dict[str, int] = {} - character_count: int = 0 - - for i in range(0x40, 0xFF): - chunk: str = p.decode(bytes([i])) - - if chunk: - character_range: str | None = unicode_range(chunk) - - if character_range is None: - continue - - if is_unicode_range_secondary(character_range) is False: - if character_range not in seen_ranges: - seen_ranges[character_range] = 0 - seen_ranges[character_range] += 1 - character_count += 1 - - return sorted( - [ - character_range - for character_range in seen_ranges - if seen_ranges[character_range] / character_count >= 0.15 - ] - ) - - -def unicode_range_languages(primary_range: str) -> list[str]: - """ - Return inferred languages used with a unicode range. - """ - languages: list[str] = [] - - for language, characters in FREQUENCIES.items(): - for character in characters: - if unicode_range(character) == primary_range: - languages.append(language) - break - - return languages - - -@lru_cache() -def encoding_languages(iana_name: str) -> list[str]: - """ - Single-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - unicode_ranges: list[str] = encoding_unicode_range(iana_name) - primary_range: str | None = None - - for specified_range in unicode_ranges: - if "Latin" not in specified_range: - primary_range = specified_range - break - - if primary_range is None: - return ["Latin Based"] - - return unicode_range_languages(primary_range) - - -@lru_cache() -def mb_encoding_languages(iana_name: str) -> list[str]: - """ - Multi-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - if ( - iana_name.startswith("shift_") - or iana_name.startswith("iso2022_jp") - or iana_name.startswith("euc_j") - or iana_name == "cp932" - ): - return ["Japanese"] - if iana_name.startswith("gb") or iana_name in ZH_NAMES: - return ["Chinese"] - if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: - return ["Korean"] - - return [] - - -@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) -def get_target_features(language: str) -> tuple[bool, bool]: - """ - Determine main aspects from a supported language if it contains accents and if is pure Latin. - """ - target_have_accents: bool = False - target_pure_latin: bool = True - - for character in FREQUENCIES[language]: - if not target_have_accents and is_accentuated(character): - target_have_accents = True - if target_pure_latin and is_latin(character) is False: - target_pure_latin = False - - return target_have_accents, target_pure_latin - - -def alphabet_languages( - characters: list[str], ignore_non_latin: bool = False -) -> list[str]: - """ - Return associated languages associated to given characters. - """ - languages: list[tuple[str, float]] = [] - - source_have_accents = any(is_accentuated(character) for character in characters) - - for language, language_characters in FREQUENCIES.items(): - target_have_accents, target_pure_latin = get_target_features(language) - - if ignore_non_latin and target_pure_latin is False: - continue - - if target_have_accents is False and source_have_accents: - continue - - character_count: int = len(language_characters) - - character_match_count: int = len( - [c for c in language_characters if c in characters] - ) - - ratio: float = character_match_count / character_count - - if ratio >= 0.2: - languages.append((language, ratio)) - - languages = sorted(languages, key=lambda x: x[1], reverse=True) - - return [compatible_language[0] for compatible_language in languages] - - -def characters_popularity_compare( - language: str, ordered_characters: list[str] -) -> float: - """ - Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. - The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). - Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) - """ - if language not in FREQUENCIES: - raise ValueError(f"{language} not available") - - character_approved_count: int = 0 - FREQUENCIES_language_set = set(FREQUENCIES[language]) - - ordered_characters_count: int = len(ordered_characters) - target_language_characters_count: int = len(FREQUENCIES[language]) - - large_alphabet: bool = target_language_characters_count > 26 - - for character, character_rank in zip( - ordered_characters, range(0, ordered_characters_count) - ): - if character not in FREQUENCIES_language_set: - continue - - character_rank_in_language: int = FREQUENCIES[language].index(character) - expected_projection_ratio: float = ( - target_language_characters_count / ordered_characters_count - ) - character_rank_projection: int = int(character_rank * expected_projection_ratio) - - if ( - large_alphabet is False - and abs(character_rank_projection - character_rank_in_language) > 4 - ): - continue - - if ( - large_alphabet is True - and abs(character_rank_projection - character_rank_in_language) - < target_language_characters_count / 3 - ): - character_approved_count += 1 - continue - - characters_before_source: list[str] = FREQUENCIES[language][ - 0:character_rank_in_language - ] - characters_after_source: list[str] = FREQUENCIES[language][ - character_rank_in_language: - ] - characters_before: list[str] = ordered_characters[0:character_rank] - characters_after: list[str] = ordered_characters[character_rank:] - - before_match_count: int = len( - set(characters_before) & set(characters_before_source) - ) - - after_match_count: int = len( - set(characters_after) & set(characters_after_source) - ) - - if len(characters_before_source) == 0 and before_match_count <= 4: - character_approved_count += 1 - continue - - if len(characters_after_source) == 0 and after_match_count <= 4: - character_approved_count += 1 - continue - - if ( - before_match_count / len(characters_before_source) >= 0.4 - or after_match_count / len(characters_after_source) >= 0.4 - ): - character_approved_count += 1 - continue - - return character_approved_count / len(ordered_characters) - - -def alpha_unicode_split(decoded_sequence: str) -> list[str]: - """ - Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. - Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; - One containing the latin letters and the other hebrew. - """ - layers: dict[str, str] = {} - - for character in decoded_sequence: - if character.isalpha() is False: - continue - - character_range: str | None = unicode_range(character) - - if character_range is None: - continue - - layer_target_range: str | None = None - - for discovered_range in layers: - if ( - is_suspiciously_successive_range(discovered_range, character_range) - is False - ): - layer_target_range = discovered_range - break - - if layer_target_range is None: - layer_target_range = character_range - - if layer_target_range not in layers: - layers[layer_target_range] = character.lower() - continue - - layers[layer_target_range] += character.lower() - - return list(layers.values()) - - -def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches: - """ - This function merge results previously given by the function coherence_ratio. - The return type is the same as coherence_ratio. - """ - per_language_ratios: dict[str, list[float]] = {} - for result in results: - for sub_result in result: - language, ratio = sub_result - if language not in per_language_ratios: - per_language_ratios[language] = [ratio] - continue - per_language_ratios[language].append(ratio) - - merge = [ - ( - language, - round( - sum(per_language_ratios[language]) / len(per_language_ratios[language]), - 4, - ), - ) - for language in per_language_ratios - ] - - return sorted(merge, key=lambda x: x[1], reverse=True) - - -def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: - """ - We shall NOT return "English—" in CoherenceMatches because it is an alternative - of "English". This function only keeps the best match and remove the em-dash in it. - """ - index_results: dict[str, list[float]] = dict() - - for result in results: - language, ratio = result - no_em_name: str = language.replace("—", "") - - if no_em_name not in index_results: - index_results[no_em_name] = [] - - index_results[no_em_name].append(ratio) - - if any(len(index_results[e]) > 1 for e in index_results): - filtered_results: CoherenceMatches = [] - - for language in index_results: - filtered_results.append((language, max(index_results[language]))) - - return filtered_results - - return results - - -@lru_cache(maxsize=2048) -def coherence_ratio( - decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None -) -> CoherenceMatches: - """ - Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. - A layer = Character extraction by alphabets/ranges. - """ - - results: list[tuple[str, float]] = [] - ignore_non_latin: bool = False - - sufficient_match_count: int = 0 - - lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] - if "Latin Based" in lg_inclusion_list: - ignore_non_latin = True - lg_inclusion_list.remove("Latin Based") - - for layer in alpha_unicode_split(decoded_sequence): - sequence_frequencies: TypeCounter[str] = Counter(layer) - most_common = sequence_frequencies.most_common() - - character_count: int = sum(o for c, o in most_common) - - if character_count <= TOO_SMALL_SEQUENCE: - continue - - popular_character_ordered: list[str] = [c for c, o in most_common] - - for language in lg_inclusion_list or alphabet_languages( - popular_character_ordered, ignore_non_latin - ): - ratio: float = characters_popularity_compare( - language, popular_character_ordered - ) - - if ratio < threshold: - continue - elif ratio >= 0.8: - sufficient_match_count += 1 - - results.append((language, round(ratio, 4))) - - if sufficient_match_count >= 3: - break - - return sorted( - filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True - ) diff --git a/venv/Lib/site-packages/charset_normalizer/cli/__init__.py b/venv/Lib/site-packages/charset_normalizer/cli/__init__.py deleted file mode 100644 index 543a5a4..0000000 --- a/venv/Lib/site-packages/charset_normalizer/cli/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import annotations - -from .__main__ import cli_detect, query_yes_no - -__all__ = ( - "cli_detect", - "query_yes_no", -) diff --git a/venv/Lib/site-packages/charset_normalizer/cli/__main__.py b/venv/Lib/site-packages/charset_normalizer/cli/__main__.py deleted file mode 100644 index cb64156..0000000 --- a/venv/Lib/site-packages/charset_normalizer/cli/__main__.py +++ /dev/null @@ -1,381 +0,0 @@ -from __future__ import annotations - -import argparse -import sys -import typing -from json import dumps -from os.path import abspath, basename, dirname, join, realpath -from platform import python_version -from unicodedata import unidata_version - -import charset_normalizer.md as md_module -from charset_normalizer import from_fp -from charset_normalizer.models import CliDetectionResult -from charset_normalizer.version import __version__ - - -def query_yes_no(question: str, default: str = "yes") -> bool: - """Ask a yes/no question via input() and return their answer. - - "question" is a string that is presented to the user. - "default" is the presumed answer if the user just hits . - It must be "yes" (the default), "no" or None (meaning - an answer is required of the user). - - The "answer" return value is True for "yes" or False for "no". - - Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input - """ - valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} - if default is None: - prompt = " [y/n] " - elif default == "yes": - prompt = " [Y/n] " - elif default == "no": - prompt = " [y/N] " - else: - raise ValueError("invalid default answer: '%s'" % default) - - while True: - sys.stdout.write(question + prompt) - choice = input().lower() - if default is not None and choice == "": - return valid[default] - elif choice in valid: - return valid[choice] - else: - sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") - - -class FileType: - """Factory for creating file object types - - Instances of FileType are typically passed as type= arguments to the - ArgumentParser add_argument() method. - - Keyword Arguments: - - mode -- A string indicating how the file is to be opened. Accepts the - same values as the builtin open() function. - - bufsize -- The file's desired buffer size. Accepts the same values as - the builtin open() function. - - encoding -- The file's encoding. Accepts the same values as the - builtin open() function. - - errors -- A string indicating how encoding and decoding errors are to - be handled. Accepts the same value as the builtin open() function. - - Backported from CPython 3.12 - """ - - def __init__( - self, - mode: str = "r", - bufsize: int = -1, - encoding: str | None = None, - errors: str | None = None, - ): - self._mode = mode - self._bufsize = bufsize - self._encoding = encoding - self._errors = errors - - def __call__(self, string: str) -> typing.IO: # type: ignore[type-arg] - # the special argument "-" means sys.std{in,out} - if string == "-": - if "r" in self._mode: - return sys.stdin.buffer if "b" in self._mode else sys.stdin - elif any(c in self._mode for c in "wax"): - return sys.stdout.buffer if "b" in self._mode else sys.stdout - else: - msg = f'argument "-" with mode {self._mode}' - raise ValueError(msg) - - # all other arguments are used as file names - try: - return open(string, self._mode, self._bufsize, self._encoding, self._errors) - except OSError as e: - message = f"can't open '{string}': {e}" - raise argparse.ArgumentTypeError(message) - - def __repr__(self) -> str: - args = self._mode, self._bufsize - kwargs = [("encoding", self._encoding), ("errors", self._errors)] - args_str = ", ".join( - [repr(arg) for arg in args if arg != -1] - + [f"{kw}={arg!r}" for kw, arg in kwargs if arg is not None] - ) - return f"{type(self).__name__}({args_str})" - - -def cli_detect(argv: list[str] | None = None) -> int: - """ - CLI assistant using ARGV and ArgumentParser - :param argv: - :return: 0 if everything is fine, anything else equal trouble - """ - parser = argparse.ArgumentParser( - description="The Real First Universal Charset Detector. " - "Discover originating encoding used on text file. " - "Normalize text to unicode." - ) - - parser.add_argument( - "files", type=FileType("rb"), nargs="+", help="File(s) to be analysed" - ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - default=False, - dest="verbose", - help="Display complementary information about file if any. " - "Stdout will contain logs about the detection process.", - ) - parser.add_argument( - "-a", - "--with-alternative", - action="store_true", - default=False, - dest="alternatives", - help="Output complementary possibilities if any. Top-level JSON WILL be a list.", - ) - parser.add_argument( - "-n", - "--normalize", - action="store_true", - default=False, - dest="normalize", - help="Permit to normalize input file. If not set, program does not write anything.", - ) - parser.add_argument( - "-m", - "--minimal", - action="store_true", - default=False, - dest="minimal", - help="Only output the charset detected to STDOUT. Disabling JSON output.", - ) - parser.add_argument( - "-r", - "--replace", - action="store_true", - default=False, - dest="replace", - help="Replace file when trying to normalize it instead of creating a new one.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - dest="force", - help="Replace file without asking if you are sure, use this flag with caution.", - ) - parser.add_argument( - "-i", - "--no-preemptive", - action="store_true", - default=False, - dest="no_preemptive", - help="Disable looking at a charset declaration to hint the detector.", - ) - parser.add_argument( - "-t", - "--threshold", - action="store", - default=0.2, - type=float, - dest="threshold", - help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.", - ) - parser.add_argument( - "--version", - action="version", - version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( - __version__, - python_version(), - unidata_version, - "OFF" if md_module.__file__.lower().endswith(".py") else "ON", - ), - help="Show version information and exit.", - ) - - args = parser.parse_args(argv) - - if args.replace is True and args.normalize is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --replace in addition of --normalize only.", file=sys.stderr) - return 1 - - if args.force is True and args.replace is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --force in addition of --replace only.", file=sys.stderr) - return 1 - - if args.threshold < 0.0 or args.threshold > 1.0: - if args.files: - for my_file in args.files: - my_file.close() - print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) - return 1 - - x_ = [] - - for my_file in args.files: - matches = from_fp( - my_file, - threshold=args.threshold, - explain=args.verbose, - preemptive_behaviour=args.no_preemptive is False, - ) - - best_guess = matches.best() - - if best_guess is None: - print( - 'Unable to identify originating encoding for "{}". {}'.format( - my_file.name, - ( - "Maybe try increasing maximum amount of chaos." - if args.threshold < 1.0 - else "" - ), - ), - file=sys.stderr, - ) - x_.append( - CliDetectionResult( - abspath(my_file.name), - None, - [], - [], - "Unknown", - [], - False, - 1.0, - 0.0, - None, - True, - ) - ) - else: - x_.append( - CliDetectionResult( - abspath(my_file.name), - best_guess.encoding, - best_guess.encoding_aliases, - [ - cp - for cp in best_guess.could_be_from_charset - if cp != best_guess.encoding - ], - best_guess.language, - best_guess.alphabets, - best_guess.bom, - best_guess.percent_chaos, - best_guess.percent_coherence, - None, - True, - ) - ) - - if len(matches) > 1 and args.alternatives: - for el in matches: - if el != best_guess: - x_.append( - CliDetectionResult( - abspath(my_file.name), - el.encoding, - el.encoding_aliases, - [ - cp - for cp in el.could_be_from_charset - if cp != el.encoding - ], - el.language, - el.alphabets, - el.bom, - el.percent_chaos, - el.percent_coherence, - None, - False, - ) - ) - - if args.normalize is True: - if best_guess.encoding.startswith("utf") is True: - print( - '"{}" file does not need to be normalized, as it already came from unicode.'.format( - my_file.name - ), - file=sys.stderr, - ) - if my_file.closed is False: - my_file.close() - continue - - dir_path = dirname(realpath(my_file.name)) - file_name = basename(realpath(my_file.name)) - - o_: list[str] = file_name.split(".") - - if args.replace is False: - o_.insert(-1, best_guess.encoding) - if my_file.closed is False: - my_file.close() - elif ( - args.force is False - and query_yes_no( - 'Are you sure to normalize "{}" by replacing it ?'.format( - my_file.name - ), - "no", - ) - is False - ): - if my_file.closed is False: - my_file.close() - continue - - try: - x_[0].unicode_path = join(dir_path, ".".join(o_)) - - with open(x_[0].unicode_path, "wb") as fp: - fp.write(best_guess.output()) - except OSError as e: - print(str(e), file=sys.stderr) - if my_file.closed is False: - my_file.close() - return 2 - - if my_file.closed is False: - my_file.close() - - if args.minimal is False: - print( - dumps( - [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, - ensure_ascii=True, - indent=4, - ) - ) - else: - for my_file in args.files: - print( - ", ".join( - [ - el.encoding or "undefined" - for el in x_ - if el.path == abspath(my_file.name) - ] - ) - ) - - return 0 - - -if __name__ == "__main__": - cli_detect() diff --git a/venv/Lib/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 3396607..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc b/venv/Lib/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index 5612093..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/constant.py b/venv/Lib/site-packages/charset_normalizer/constant.py deleted file mode 100644 index cc71a01..0000000 --- a/venv/Lib/site-packages/charset_normalizer/constant.py +++ /dev/null @@ -1,2015 +0,0 @@ -from __future__ import annotations - -from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE -from encodings.aliases import aliases -from re import IGNORECASE -from re import compile as re_compile - -# Contain for each eligible encoding a list of/item bytes SIG/BOM -ENCODING_MARKS: dict[str, bytes | list[bytes]] = { - "utf_8": BOM_UTF8, - "utf_7": [ - b"\x2b\x2f\x76\x38", - b"\x2b\x2f\x76\x39", - b"\x2b\x2f\x76\x2b", - b"\x2b\x2f\x76\x2f", - b"\x2b\x2f\x76\x38\x2d", - ], - "gb18030": b"\x84\x31\x95\x33", - "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], - "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], -} - -TOO_SMALL_SEQUENCE: int = 32 -TOO_BIG_SEQUENCE: int = int(10e6) - -UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 - -# Up-to-date Unicode ucd/15.0.0 -UNICODE_RANGES_COMBINED: dict[str, range] = { - "Control character": range(32), - "Basic Latin": range(32, 128), - "Latin-1 Supplement": range(128, 256), - "Latin Extended-A": range(256, 384), - "Latin Extended-B": range(384, 592), - "IPA Extensions": range(592, 688), - "Spacing Modifier Letters": range(688, 768), - "Combining Diacritical Marks": range(768, 880), - "Greek and Coptic": range(880, 1024), - "Cyrillic": range(1024, 1280), - "Cyrillic Supplement": range(1280, 1328), - "Armenian": range(1328, 1424), - "Hebrew": range(1424, 1536), - "Arabic": range(1536, 1792), - "Syriac": range(1792, 1872), - "Arabic Supplement": range(1872, 1920), - "Thaana": range(1920, 1984), - "NKo": range(1984, 2048), - "Samaritan": range(2048, 2112), - "Mandaic": range(2112, 2144), - "Syriac Supplement": range(2144, 2160), - "Arabic Extended-B": range(2160, 2208), - "Arabic Extended-A": range(2208, 2304), - "Devanagari": range(2304, 2432), - "Bengali": range(2432, 2560), - "Gurmukhi": range(2560, 2688), - "Gujarati": range(2688, 2816), - "Oriya": range(2816, 2944), - "Tamil": range(2944, 3072), - "Telugu": range(3072, 3200), - "Kannada": range(3200, 3328), - "Malayalam": range(3328, 3456), - "Sinhala": range(3456, 3584), - "Thai": range(3584, 3712), - "Lao": range(3712, 3840), - "Tibetan": range(3840, 4096), - "Myanmar": range(4096, 4256), - "Georgian": range(4256, 4352), - "Hangul Jamo": range(4352, 4608), - "Ethiopic": range(4608, 4992), - "Ethiopic Supplement": range(4992, 5024), - "Cherokee": range(5024, 5120), - "Unified Canadian Aboriginal Syllabics": range(5120, 5760), - "Ogham": range(5760, 5792), - "Runic": range(5792, 5888), - "Tagalog": range(5888, 5920), - "Hanunoo": range(5920, 5952), - "Buhid": range(5952, 5984), - "Tagbanwa": range(5984, 6016), - "Khmer": range(6016, 6144), - "Mongolian": range(6144, 6320), - "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), - "Limbu": range(6400, 6480), - "Tai Le": range(6480, 6528), - "New Tai Lue": range(6528, 6624), - "Khmer Symbols": range(6624, 6656), - "Buginese": range(6656, 6688), - "Tai Tham": range(6688, 6832), - "Combining Diacritical Marks Extended": range(6832, 6912), - "Balinese": range(6912, 7040), - "Sundanese": range(7040, 7104), - "Batak": range(7104, 7168), - "Lepcha": range(7168, 7248), - "Ol Chiki": range(7248, 7296), - "Cyrillic Extended-C": range(7296, 7312), - "Georgian Extended": range(7312, 7360), - "Sundanese Supplement": range(7360, 7376), - "Vedic Extensions": range(7376, 7424), - "Phonetic Extensions": range(7424, 7552), - "Phonetic Extensions Supplement": range(7552, 7616), - "Combining Diacritical Marks Supplement": range(7616, 7680), - "Latin Extended Additional": range(7680, 7936), - "Greek Extended": range(7936, 8192), - "General Punctuation": range(8192, 8304), - "Superscripts and Subscripts": range(8304, 8352), - "Currency Symbols": range(8352, 8400), - "Combining Diacritical Marks for Symbols": range(8400, 8448), - "Letterlike Symbols": range(8448, 8528), - "Number Forms": range(8528, 8592), - "Arrows": range(8592, 8704), - "Mathematical Operators": range(8704, 8960), - "Miscellaneous Technical": range(8960, 9216), - "Control Pictures": range(9216, 9280), - "Optical Character Recognition": range(9280, 9312), - "Enclosed Alphanumerics": range(9312, 9472), - "Box Drawing": range(9472, 9600), - "Block Elements": range(9600, 9632), - "Geometric Shapes": range(9632, 9728), - "Miscellaneous Symbols": range(9728, 9984), - "Dingbats": range(9984, 10176), - "Miscellaneous Mathematical Symbols-A": range(10176, 10224), - "Supplemental Arrows-A": range(10224, 10240), - "Braille Patterns": range(10240, 10496), - "Supplemental Arrows-B": range(10496, 10624), - "Miscellaneous Mathematical Symbols-B": range(10624, 10752), - "Supplemental Mathematical Operators": range(10752, 11008), - "Miscellaneous Symbols and Arrows": range(11008, 11264), - "Glagolitic": range(11264, 11360), - "Latin Extended-C": range(11360, 11392), - "Coptic": range(11392, 11520), - "Georgian Supplement": range(11520, 11568), - "Tifinagh": range(11568, 11648), - "Ethiopic Extended": range(11648, 11744), - "Cyrillic Extended-A": range(11744, 11776), - "Supplemental Punctuation": range(11776, 11904), - "CJK Radicals Supplement": range(11904, 12032), - "Kangxi Radicals": range(12032, 12256), - "Ideographic Description Characters": range(12272, 12288), - "CJK Symbols and Punctuation": range(12288, 12352), - "Hiragana": range(12352, 12448), - "Katakana": range(12448, 12544), - "Bopomofo": range(12544, 12592), - "Hangul Compatibility Jamo": range(12592, 12688), - "Kanbun": range(12688, 12704), - "Bopomofo Extended": range(12704, 12736), - "CJK Strokes": range(12736, 12784), - "Katakana Phonetic Extensions": range(12784, 12800), - "Enclosed CJK Letters and Months": range(12800, 13056), - "CJK Compatibility": range(13056, 13312), - "CJK Unified Ideographs Extension A": range(13312, 19904), - "Yijing Hexagram Symbols": range(19904, 19968), - "CJK Unified Ideographs": range(19968, 40960), - "Yi Syllables": range(40960, 42128), - "Yi Radicals": range(42128, 42192), - "Lisu": range(42192, 42240), - "Vai": range(42240, 42560), - "Cyrillic Extended-B": range(42560, 42656), - "Bamum": range(42656, 42752), - "Modifier Tone Letters": range(42752, 42784), - "Latin Extended-D": range(42784, 43008), - "Syloti Nagri": range(43008, 43056), - "Common Indic Number Forms": range(43056, 43072), - "Phags-pa": range(43072, 43136), - "Saurashtra": range(43136, 43232), - "Devanagari Extended": range(43232, 43264), - "Kayah Li": range(43264, 43312), - "Rejang": range(43312, 43360), - "Hangul Jamo Extended-A": range(43360, 43392), - "Javanese": range(43392, 43488), - "Myanmar Extended-B": range(43488, 43520), - "Cham": range(43520, 43616), - "Myanmar Extended-A": range(43616, 43648), - "Tai Viet": range(43648, 43744), - "Meetei Mayek Extensions": range(43744, 43776), - "Ethiopic Extended-A": range(43776, 43824), - "Latin Extended-E": range(43824, 43888), - "Cherokee Supplement": range(43888, 43968), - "Meetei Mayek": range(43968, 44032), - "Hangul Syllables": range(44032, 55216), - "Hangul Jamo Extended-B": range(55216, 55296), - "High Surrogates": range(55296, 56192), - "High Private Use Surrogates": range(56192, 56320), - "Low Surrogates": range(56320, 57344), - "Private Use Area": range(57344, 63744), - "CJK Compatibility Ideographs": range(63744, 64256), - "Alphabetic Presentation Forms": range(64256, 64336), - "Arabic Presentation Forms-A": range(64336, 65024), - "Variation Selectors": range(65024, 65040), - "Vertical Forms": range(65040, 65056), - "Combining Half Marks": range(65056, 65072), - "CJK Compatibility Forms": range(65072, 65104), - "Small Form Variants": range(65104, 65136), - "Arabic Presentation Forms-B": range(65136, 65280), - "Halfwidth and Fullwidth Forms": range(65280, 65520), - "Specials": range(65520, 65536), - "Linear B Syllabary": range(65536, 65664), - "Linear B Ideograms": range(65664, 65792), - "Aegean Numbers": range(65792, 65856), - "Ancient Greek Numbers": range(65856, 65936), - "Ancient Symbols": range(65936, 66000), - "Phaistos Disc": range(66000, 66048), - "Lycian": range(66176, 66208), - "Carian": range(66208, 66272), - "Coptic Epact Numbers": range(66272, 66304), - "Old Italic": range(66304, 66352), - "Gothic": range(66352, 66384), - "Old Permic": range(66384, 66432), - "Ugaritic": range(66432, 66464), - "Old Persian": range(66464, 66528), - "Deseret": range(66560, 66640), - "Shavian": range(66640, 66688), - "Osmanya": range(66688, 66736), - "Osage": range(66736, 66816), - "Elbasan": range(66816, 66864), - "Caucasian Albanian": range(66864, 66928), - "Vithkuqi": range(66928, 67008), - "Linear A": range(67072, 67456), - "Latin Extended-F": range(67456, 67520), - "Cypriot Syllabary": range(67584, 67648), - "Imperial Aramaic": range(67648, 67680), - "Palmyrene": range(67680, 67712), - "Nabataean": range(67712, 67760), - "Hatran": range(67808, 67840), - "Phoenician": range(67840, 67872), - "Lydian": range(67872, 67904), - "Meroitic Hieroglyphs": range(67968, 68000), - "Meroitic Cursive": range(68000, 68096), - "Kharoshthi": range(68096, 68192), - "Old South Arabian": range(68192, 68224), - "Old North Arabian": range(68224, 68256), - "Manichaean": range(68288, 68352), - "Avestan": range(68352, 68416), - "Inscriptional Parthian": range(68416, 68448), - "Inscriptional Pahlavi": range(68448, 68480), - "Psalter Pahlavi": range(68480, 68528), - "Old Turkic": range(68608, 68688), - "Old Hungarian": range(68736, 68864), - "Hanifi Rohingya": range(68864, 68928), - "Rumi Numeral Symbols": range(69216, 69248), - "Yezidi": range(69248, 69312), - "Arabic Extended-C": range(69312, 69376), - "Old Sogdian": range(69376, 69424), - "Sogdian": range(69424, 69488), - "Old Uyghur": range(69488, 69552), - "Chorasmian": range(69552, 69600), - "Elymaic": range(69600, 69632), - "Brahmi": range(69632, 69760), - "Kaithi": range(69760, 69840), - "Sora Sompeng": range(69840, 69888), - "Chakma": range(69888, 69968), - "Mahajani": range(69968, 70016), - "Sharada": range(70016, 70112), - "Sinhala Archaic Numbers": range(70112, 70144), - "Khojki": range(70144, 70224), - "Multani": range(70272, 70320), - "Khudawadi": range(70320, 70400), - "Grantha": range(70400, 70528), - "Newa": range(70656, 70784), - "Tirhuta": range(70784, 70880), - "Siddham": range(71040, 71168), - "Modi": range(71168, 71264), - "Mongolian Supplement": range(71264, 71296), - "Takri": range(71296, 71376), - "Ahom": range(71424, 71504), - "Dogra": range(71680, 71760), - "Warang Citi": range(71840, 71936), - "Dives Akuru": range(71936, 72032), - "Nandinagari": range(72096, 72192), - "Zanabazar Square": range(72192, 72272), - "Soyombo": range(72272, 72368), - "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), - "Pau Cin Hau": range(72384, 72448), - "Devanagari Extended-A": range(72448, 72544), - "Bhaiksuki": range(72704, 72816), - "Marchen": range(72816, 72896), - "Masaram Gondi": range(72960, 73056), - "Gunjala Gondi": range(73056, 73136), - "Makasar": range(73440, 73472), - "Kawi": range(73472, 73568), - "Lisu Supplement": range(73648, 73664), - "Tamil Supplement": range(73664, 73728), - "Cuneiform": range(73728, 74752), - "Cuneiform Numbers and Punctuation": range(74752, 74880), - "Early Dynastic Cuneiform": range(74880, 75088), - "Cypro-Minoan": range(77712, 77824), - "Egyptian Hieroglyphs": range(77824, 78896), - "Egyptian Hieroglyph Format Controls": range(78896, 78944), - "Anatolian Hieroglyphs": range(82944, 83584), - "Bamum Supplement": range(92160, 92736), - "Mro": range(92736, 92784), - "Tangsa": range(92784, 92880), - "Bassa Vah": range(92880, 92928), - "Pahawh Hmong": range(92928, 93072), - "Medefaidrin": range(93760, 93856), - "Miao": range(93952, 94112), - "Ideographic Symbols and Punctuation": range(94176, 94208), - "Tangut": range(94208, 100352), - "Tangut Components": range(100352, 101120), - "Khitan Small Script": range(101120, 101632), - "Tangut Supplement": range(101632, 101760), - "Kana Extended-B": range(110576, 110592), - "Kana Supplement": range(110592, 110848), - "Kana Extended-A": range(110848, 110896), - "Small Kana Extension": range(110896, 110960), - "Nushu": range(110960, 111360), - "Duployan": range(113664, 113824), - "Shorthand Format Controls": range(113824, 113840), - "Znamenny Musical Notation": range(118528, 118736), - "Byzantine Musical Symbols": range(118784, 119040), - "Musical Symbols": range(119040, 119296), - "Ancient Greek Musical Notation": range(119296, 119376), - "Kaktovik Numerals": range(119488, 119520), - "Mayan Numerals": range(119520, 119552), - "Tai Xuan Jing Symbols": range(119552, 119648), - "Counting Rod Numerals": range(119648, 119680), - "Mathematical Alphanumeric Symbols": range(119808, 120832), - "Sutton SignWriting": range(120832, 121520), - "Latin Extended-G": range(122624, 122880), - "Glagolitic Supplement": range(122880, 122928), - "Cyrillic Extended-D": range(122928, 123024), - "Nyiakeng Puachue Hmong": range(123136, 123216), - "Toto": range(123536, 123584), - "Wancho": range(123584, 123648), - "Nag Mundari": range(124112, 124160), - "Ethiopic Extended-B": range(124896, 124928), - "Mende Kikakui": range(124928, 125152), - "Adlam": range(125184, 125280), - "Indic Siyaq Numbers": range(126064, 126144), - "Ottoman Siyaq Numbers": range(126208, 126288), - "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), - "Mahjong Tiles": range(126976, 127024), - "Domino Tiles": range(127024, 127136), - "Playing Cards": range(127136, 127232), - "Enclosed Alphanumeric Supplement": range(127232, 127488), - "Enclosed Ideographic Supplement": range(127488, 127744), - "Miscellaneous Symbols and Pictographs": range(127744, 128512), - "Emoticons range(Emoji)": range(128512, 128592), - "Ornamental Dingbats": range(128592, 128640), - "Transport and Map Symbols": range(128640, 128768), - "Alchemical Symbols": range(128768, 128896), - "Geometric Shapes Extended": range(128896, 129024), - "Supplemental Arrows-C": range(129024, 129280), - "Supplemental Symbols and Pictographs": range(129280, 129536), - "Chess Symbols": range(129536, 129648), - "Symbols and Pictographs Extended-A": range(129648, 129792), - "Symbols for Legacy Computing": range(129792, 130048), - "CJK Unified Ideographs Extension B": range(131072, 173792), - "CJK Unified Ideographs Extension C": range(173824, 177984), - "CJK Unified Ideographs Extension D": range(177984, 178208), - "CJK Unified Ideographs Extension E": range(178208, 183984), - "CJK Unified Ideographs Extension F": range(183984, 191472), - "CJK Compatibility Ideographs Supplement": range(194560, 195104), - "CJK Unified Ideographs Extension G": range(196608, 201552), - "CJK Unified Ideographs Extension H": range(201552, 205744), - "Tags": range(917504, 917632), - "Variation Selectors Supplement": range(917760, 918000), - "Supplementary Private Use Area-A": range(983040, 1048576), - "Supplementary Private Use Area-B": range(1048576, 1114112), -} - - -UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [ - "Supplement", - "Extended", - "Extensions", - "Modifier", - "Marks", - "Punctuation", - "Symbols", - "Forms", - "Operators", - "Miscellaneous", - "Drawing", - "Block", - "Shapes", - "Supplemental", - "Tags", -] - -RE_POSSIBLE_ENCODING_INDICATION = re_compile( - r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", - IGNORECASE, -) - -IANA_NO_ALIASES = [ - "cp720", - "cp737", - "cp856", - "cp874", - "cp875", - "cp1006", - "koi8_r", - "koi8_t", - "koi8_u", -] - -IANA_SUPPORTED: list[str] = sorted( - filter( - lambda x: x.endswith("_codec") is False - and x not in {"rot_13", "tactis", "mbcs"}, - list(set(aliases.values())) + IANA_NO_ALIASES, - ) -) - -IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) - -# pre-computed code page that are similar using the function cp_similarity. -IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = { - "cp037": ["cp1026", "cp1140", "cp273", "cp500"], - "cp1026": ["cp037", "cp1140", "cp273", "cp500"], - "cp1125": ["cp866"], - "cp1140": ["cp037", "cp1026", "cp273", "cp500"], - "cp1250": ["iso8859_2"], - "cp1251": ["kz1048", "ptcp154"], - "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1253": ["iso8859_7"], - "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1257": ["iso8859_13"], - "cp273": ["cp037", "cp1026", "cp1140", "cp500"], - "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], - "cp500": ["cp037", "cp1026", "cp1140", "cp273"], - "cp850": ["cp437", "cp857", "cp858", "cp865"], - "cp857": ["cp850", "cp858", "cp865"], - "cp858": ["cp437", "cp850", "cp857", "cp865"], - "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], - "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], - "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], - "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], - "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], - "cp866": ["cp1125"], - "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], - "iso8859_11": ["tis_620"], - "iso8859_13": ["cp1257"], - "iso8859_14": [ - "iso8859_10", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_15": [ - "cp1252", - "cp1254", - "iso8859_10", - "iso8859_14", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_16": [ - "iso8859_14", - "iso8859_15", - "iso8859_2", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], - "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], - "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], - "iso8859_7": ["cp1253"], - "iso8859_9": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "latin_1", - ], - "kz1048": ["cp1251", "ptcp154"], - "latin_1": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "iso8859_9", - ], - "mac_iceland": ["mac_roman", "mac_turkish"], - "mac_roman": ["mac_iceland", "mac_turkish"], - "mac_turkish": ["mac_iceland", "mac_roman"], - "ptcp154": ["cp1251", "kz1048"], - "tis_620": ["iso8859_11"], -} - - -CHARDET_CORRESPONDENCE: dict[str, str] = { - "iso2022_kr": "ISO-2022-KR", - "iso2022_jp": "ISO-2022-JP", - "euc_kr": "EUC-KR", - "tis_620": "TIS-620", - "utf_32": "UTF-32", - "euc_jp": "EUC-JP", - "koi8_r": "KOI8-R", - "iso8859_1": "ISO-8859-1", - "iso8859_2": "ISO-8859-2", - "iso8859_5": "ISO-8859-5", - "iso8859_6": "ISO-8859-6", - "iso8859_7": "ISO-8859-7", - "iso8859_8": "ISO-8859-8", - "utf_16": "UTF-16", - "cp855": "IBM855", - "mac_cyrillic": "MacCyrillic", - "gb2312": "GB2312", - "gb18030": "GB18030", - "cp932": "CP932", - "cp866": "IBM866", - "utf_8": "utf-8", - "utf_8_sig": "UTF-8-SIG", - "shift_jis": "SHIFT_JIS", - "big5": "Big5", - "cp1250": "windows-1250", - "cp1251": "windows-1251", - "cp1252": "Windows-1252", - "cp1253": "windows-1253", - "cp1255": "windows-1255", - "cp1256": "windows-1256", - "cp1254": "Windows-1254", - "cp949": "CP949", -} - - -COMMON_SAFE_ASCII_CHARACTERS: set[str] = { - "<", - ">", - "=", - ":", - "/", - "&", - ";", - "{", - "}", - "[", - "]", - ",", - "|", - '"', - "-", - "(", - ")", -} - -# Sample character sets — replace with full lists if needed -COMMON_CHINESE_CHARACTERS = "的一是在不了有和人这中大为上个国我以要他时来用们生到作地于出就分对成会可主发年动同工也能下过子说产种面而方后多定行学法所民得经十三之进着等部度家电力里如水化高自二理起小物现实加量都两体制机当使点从业本去把性好应开它合还因由其些然前外天政四日那社义事平形相全表间样与关各重新线内数正心反你明看原又么利比或但质气第向道命此变条只没结解问意建月公无系军很情者最立代想已通并提直题党程展五果料象员革位入常文总次品式活设及管特件长求老头基资边流路级少图山统接知较将组见计别她手角期根论运农指几九区强放决西被干做必战先回则任取据处队南给色光门即保治北造百规热领七海口东导器压志世金增争济阶油思术极交受联什认六共权收证改清己美再采转更单风切打白教速花带安场身车例真务具万每目至达走积示议声报斗完类八离华名确才科张信马节话米整空元况今集温传土许步群广石记需段研界拉林律叫且究观越织装影算低持音众书布复容儿须际商非验连断深难近矿千周委素技备半办青省列习响约支般史感劳便团往酸历市克何除消构府太准精值号率族维划选标写存候毛亲快效斯院查江型眼王按格养易置派层片始却专状育厂京识适属圆包火住调满县局照参红细引听该铁价严龙飞" - -COMMON_JAPANESE_CHARACTERS = "日一国年大十二本中長出三時行見月分後前生五間上東四今金九入学高円子外八六下来気小七山話女北午百書先名川千水半男西電校語土木聞食車何南万毎白天母火右読友左休父雨" - -COMMON_KOREAN_CHARACTERS = "一二三四五六七八九十百千萬上下左右中人女子大小山川日月火水木金土父母天地國名年時文校學生" - -# Combine all into a set -COMMON_CJK_CHARACTERS = set( - "".join( - [ - COMMON_CHINESE_CHARACTERS, - COMMON_JAPANESE_CHARACTERS, - COMMON_KOREAN_CHARACTERS, - ] - ) -) - -KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"} -ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"} - -# Logging LEVEL below DEBUG -TRACE: int = 5 - - -# Language label that contain the em dash "—" -# character are to be considered alternative seq to origin -FREQUENCIES: dict[str, list[str]] = { - "English": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "u", - "m", - "f", - "p", - "g", - "w", - "y", - "b", - "v", - "k", - "x", - "j", - "z", - "q", - ], - "English—": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "m", - "u", - "f", - "p", - "g", - "w", - "b", - "y", - "v", - "k", - "j", - "x", - "z", - "q", - ], - "German": [ - "e", - "n", - "i", - "r", - "s", - "t", - "a", - "d", - "h", - "u", - "l", - "g", - "o", - "c", - "m", - "b", - "f", - "k", - "w", - "z", - "p", - "v", - "ü", - "ä", - "ö", - "j", - ], - "French": [ - "e", - "a", - "s", - "n", - "i", - "t", - "r", - "l", - "u", - "o", - "d", - "c", - "p", - "m", - "é", - "v", - "g", - "f", - "b", - "h", - "q", - "à", - "x", - "è", - "y", - "j", - ], - "Dutch": [ - "e", - "n", - "a", - "i", - "r", - "t", - "o", - "d", - "s", - "l", - "g", - "h", - "v", - "m", - "u", - "k", - "c", - "p", - "b", - "w", - "j", - "z", - "f", - "y", - "x", - "ë", - ], - "Italian": [ - "e", - "i", - "a", - "o", - "n", - "l", - "t", - "r", - "s", - "c", - "d", - "u", - "p", - "m", - "g", - "v", - "f", - "b", - "z", - "h", - "q", - "è", - "à", - "k", - "y", - "ò", - ], - "Polish": [ - "a", - "i", - "o", - "e", - "n", - "r", - "z", - "w", - "s", - "c", - "t", - "k", - "y", - "d", - "p", - "m", - "u", - "l", - "j", - "ł", - "g", - "b", - "h", - "ą", - "ę", - "ó", - ], - "Spanish": [ - "e", - "a", - "o", - "n", - "s", - "r", - "i", - "l", - "d", - "t", - "c", - "u", - "m", - "p", - "b", - "g", - "v", - "f", - "y", - "ó", - "h", - "q", - "í", - "j", - "z", - "á", - ], - "Russian": [ - "о", - "а", - "е", - "и", - "н", - "с", - "т", - "р", - "в", - "л", - "к", - "м", - "д", - "п", - "у", - "г", - "я", - "ы", - "з", - "б", - "й", - "ь", - "ч", - "х", - "ж", - "ц", - ], - # Jap-Kanji - "Japanese": [ - "人", - "一", - "大", - "亅", - "丁", - "丨", - "竹", - "笑", - "口", - "日", - "今", - "二", - "彳", - "行", - "十", - "土", - "丶", - "寸", - "寺", - "時", - "乙", - "丿", - "乂", - "气", - "気", - "冂", - "巾", - "亠", - "市", - "目", - "儿", - "見", - "八", - "小", - "凵", - "県", - "月", - "彐", - "門", - "間", - "木", - "東", - "山", - "出", - "本", - "中", - "刀", - "分", - "耳", - "又", - "取", - "最", - "言", - "田", - "心", - "思", - "刂", - "前", - "京", - "尹", - "事", - "生", - "厶", - "云", - "会", - "未", - "来", - "白", - "冫", - "楽", - "灬", - "馬", - "尸", - "尺", - "駅", - "明", - "耂", - "者", - "了", - "阝", - "都", - "高", - "卜", - "占", - "厂", - "广", - "店", - "子", - "申", - "奄", - "亻", - "俺", - "上", - "方", - "冖", - "学", - "衣", - "艮", - "食", - "自", - ], - # Jap-Katakana - "Japanese—": [ - "ー", - "ン", - "ス", - "・", - "ル", - "ト", - "リ", - "イ", - "ア", - "ラ", - "ッ", - "ク", - "ド", - "シ", - "レ", - "ジ", - "タ", - "フ", - "ロ", - "カ", - "テ", - "マ", - "ィ", - "グ", - "バ", - "ム", - "プ", - "オ", - "コ", - "デ", - "ニ", - "ウ", - "メ", - "サ", - "ビ", - "ナ", - "ブ", - "ャ", - "エ", - "ュ", - "チ", - "キ", - "ズ", - "ダ", - "パ", - "ミ", - "ェ", - "ョ", - "ハ", - "セ", - "ベ", - "ガ", - "モ", - "ツ", - "ネ", - "ボ", - "ソ", - "ノ", - "ァ", - "ヴ", - "ワ", - "ポ", - "ペ", - "ピ", - "ケ", - "ゴ", - "ギ", - "ザ", - "ホ", - "ゲ", - "ォ", - "ヤ", - "ヒ", - "ユ", - "ヨ", - "ヘ", - "ゼ", - "ヌ", - "ゥ", - "ゾ", - "ヶ", - "ヂ", - "ヲ", - "ヅ", - "ヵ", - "ヱ", - "ヰ", - "ヮ", - "ヽ", - "゠", - "ヾ", - "ヷ", - "ヿ", - "ヸ", - "ヹ", - "ヺ", - ], - # Jap-Hiragana - "Japanese——": [ - "の", - "に", - "る", - "た", - "と", - "は", - "し", - "い", - "を", - "で", - "て", - "が", - "な", - "れ", - "か", - "ら", - "さ", - "っ", - "り", - "す", - "あ", - "も", - "こ", - "ま", - "う", - "く", - "よ", - "き", - "ん", - "め", - "お", - "け", - "そ", - "つ", - "だ", - "や", - "え", - "ど", - "わ", - "ち", - "み", - "せ", - "じ", - "ば", - "へ", - "び", - "ず", - "ろ", - "ほ", - "げ", - "む", - "べ", - "ひ", - "ょ", - "ゆ", - "ぶ", - "ご", - "ゃ", - "ね", - "ふ", - "ぐ", - "ぎ", - "ぼ", - "ゅ", - "づ", - "ざ", - "ぞ", - "ぬ", - "ぜ", - "ぱ", - "ぽ", - "ぷ", - "ぴ", - "ぃ", - "ぁ", - "ぇ", - "ぺ", - "ゞ", - "ぢ", - "ぉ", - "ぅ", - "ゐ", - "ゝ", - "ゑ", - "゛", - "゜", - "ゎ", - "ゔ", - "゚", - "ゟ", - "゙", - "ゕ", - "ゖ", - ], - "Portuguese": [ - "a", - "e", - "o", - "s", - "i", - "r", - "d", - "n", - "t", - "m", - "u", - "c", - "l", - "p", - "g", - "v", - "b", - "f", - "h", - "ã", - "q", - "é", - "ç", - "á", - "z", - "í", - ], - "Swedish": [ - "e", - "a", - "n", - "r", - "t", - "s", - "i", - "l", - "d", - "o", - "m", - "k", - "g", - "v", - "h", - "f", - "u", - "p", - "ä", - "c", - "b", - "ö", - "å", - "y", - "j", - "x", - ], - "Chinese": [ - "的", - "一", - "是", - "不", - "了", - "在", - "人", - "有", - "我", - "他", - "这", - "个", - "们", - "中", - "来", - "上", - "大", - "为", - "和", - "国", - "地", - "到", - "以", - "说", - "时", - "要", - "就", - "出", - "会", - "可", - "也", - "你", - "对", - "生", - "能", - "而", - "子", - "那", - "得", - "于", - "着", - "下", - "自", - "之", - "年", - "过", - "发", - "后", - "作", - "里", - "用", - "道", - "行", - "所", - "然", - "家", - "种", - "事", - "成", - "方", - "多", - "经", - "么", - "去", - "法", - "学", - "如", - "都", - "同", - "现", - "当", - "没", - "动", - "面", - "起", - "看", - "定", - "天", - "分", - "还", - "进", - "好", - "小", - "部", - "其", - "些", - "主", - "样", - "理", - "心", - "她", - "本", - "前", - "开", - "但", - "因", - "只", - "从", - "想", - "实", - ], - "Ukrainian": [ - "о", - "а", - "н", - "і", - "и", - "р", - "в", - "т", - "е", - "с", - "к", - "л", - "у", - "д", - "м", - "п", - "з", - "я", - "ь", - "б", - "г", - "й", - "ч", - "х", - "ц", - "ї", - ], - "Norwegian": [ - "e", - "r", - "n", - "t", - "a", - "s", - "i", - "o", - "l", - "d", - "g", - "k", - "m", - "v", - "f", - "p", - "u", - "b", - "h", - "å", - "y", - "j", - "ø", - "c", - "æ", - "w", - ], - "Finnish": [ - "a", - "i", - "n", - "t", - "e", - "s", - "l", - "o", - "u", - "k", - "ä", - "m", - "r", - "v", - "j", - "h", - "p", - "y", - "d", - "ö", - "g", - "c", - "b", - "f", - "w", - "z", - ], - "Vietnamese": [ - "n", - "h", - "t", - "i", - "c", - "g", - "a", - "o", - "u", - "m", - "l", - "r", - "à", - "đ", - "s", - "e", - "v", - "p", - "b", - "y", - "ư", - "d", - "á", - "k", - "ộ", - "ế", - ], - "Czech": [ - "o", - "e", - "a", - "n", - "t", - "s", - "i", - "l", - "v", - "r", - "k", - "d", - "u", - "m", - "p", - "í", - "c", - "h", - "z", - "á", - "y", - "j", - "b", - "ě", - "é", - "ř", - ], - "Hungarian": [ - "e", - "a", - "t", - "l", - "s", - "n", - "k", - "r", - "i", - "o", - "z", - "á", - "é", - "g", - "m", - "b", - "y", - "v", - "d", - "h", - "u", - "p", - "j", - "ö", - "f", - "c", - ], - "Korean": [ - "이", - "다", - "에", - "의", - "는", - "로", - "하", - "을", - "가", - "고", - "지", - "서", - "한", - "은", - "기", - "으", - "년", - "대", - "사", - "시", - "를", - "리", - "도", - "인", - "스", - "일", - ], - "Indonesian": [ - "a", - "n", - "e", - "i", - "r", - "t", - "u", - "s", - "d", - "k", - "m", - "l", - "g", - "p", - "b", - "o", - "h", - "y", - "j", - "c", - "w", - "f", - "v", - "z", - "x", - "q", - ], - "Turkish": [ - "a", - "e", - "i", - "n", - "r", - "l", - "ı", - "k", - "d", - "t", - "s", - "m", - "y", - "u", - "o", - "b", - "ü", - "ş", - "v", - "g", - "z", - "h", - "c", - "p", - "ç", - "ğ", - ], - "Romanian": [ - "e", - "i", - "a", - "r", - "n", - "t", - "u", - "l", - "o", - "c", - "s", - "d", - "p", - "m", - "ă", - "f", - "v", - "î", - "g", - "b", - "ș", - "ț", - "z", - "h", - "â", - "j", - ], - "Farsi": [ - "ا", - "ی", - "ر", - "د", - "ن", - "ه", - "و", - "م", - "ت", - "ب", - "س", - "ل", - "ک", - "ش", - "ز", - "ف", - "گ", - "ع", - "خ", - "ق", - "ج", - "آ", - "پ", - "ح", - "ط", - "ص", - ], - "Arabic": [ - "ا", - "ل", - "ي", - "م", - "و", - "ن", - "ر", - "ت", - "ب", - "ة", - "ع", - "د", - "س", - "ف", - "ه", - "ك", - "ق", - "أ", - "ح", - "ج", - "ش", - "ط", - "ص", - "ى", - "خ", - "إ", - ], - "Danish": [ - "e", - "r", - "n", - "t", - "a", - "i", - "s", - "d", - "l", - "o", - "g", - "m", - "k", - "f", - "v", - "u", - "b", - "h", - "p", - "å", - "y", - "ø", - "æ", - "c", - "j", - "w", - ], - "Serbian": [ - "а", - "и", - "о", - "е", - "н", - "р", - "с", - "у", - "т", - "к", - "ј", - "в", - "д", - "м", - "п", - "л", - "г", - "з", - "б", - "a", - "i", - "e", - "o", - "n", - "ц", - "ш", - ], - "Lithuanian": [ - "i", - "a", - "s", - "o", - "r", - "e", - "t", - "n", - "u", - "k", - "m", - "l", - "p", - "v", - "d", - "j", - "g", - "ė", - "b", - "y", - "ų", - "š", - "ž", - "c", - "ą", - "į", - ], - "Slovene": [ - "e", - "a", - "i", - "o", - "n", - "r", - "s", - "l", - "t", - "j", - "v", - "k", - "d", - "p", - "m", - "u", - "z", - "b", - "g", - "h", - "č", - "c", - "š", - "ž", - "f", - "y", - ], - "Slovak": [ - "o", - "a", - "e", - "n", - "i", - "r", - "v", - "t", - "s", - "l", - "k", - "d", - "m", - "p", - "u", - "c", - "h", - "j", - "b", - "z", - "á", - "y", - "ý", - "í", - "č", - "é", - ], - "Hebrew": [ - "י", - "ו", - "ה", - "ל", - "ר", - "ב", - "ת", - "מ", - "א", - "ש", - "נ", - "ע", - "ם", - "ד", - "ק", - "ח", - "פ", - "ס", - "כ", - "ג", - "ט", - "צ", - "ן", - "ז", - "ך", - ], - "Bulgarian": [ - "а", - "и", - "о", - "е", - "н", - "т", - "р", - "с", - "в", - "л", - "к", - "д", - "п", - "м", - "з", - "г", - "я", - "ъ", - "у", - "б", - "ч", - "ц", - "й", - "ж", - "щ", - "х", - ], - "Croatian": [ - "a", - "i", - "o", - "e", - "n", - "r", - "j", - "s", - "t", - "u", - "k", - "l", - "v", - "d", - "m", - "p", - "g", - "z", - "b", - "c", - "č", - "h", - "š", - "ž", - "ć", - "f", - ], - "Hindi": [ - "क", - "र", - "स", - "न", - "त", - "म", - "ह", - "प", - "य", - "ल", - "व", - "ज", - "द", - "ग", - "ब", - "श", - "ट", - "अ", - "ए", - "थ", - "भ", - "ड", - "च", - "ध", - "ष", - "इ", - ], - "Estonian": [ - "a", - "i", - "e", - "s", - "t", - "l", - "u", - "n", - "o", - "k", - "r", - "d", - "m", - "v", - "g", - "p", - "j", - "h", - "ä", - "b", - "õ", - "ü", - "f", - "c", - "ö", - "y", - ], - "Thai": [ - "า", - "น", - "ร", - "อ", - "ก", - "เ", - "ง", - "ม", - "ย", - "ล", - "ว", - "ด", - "ท", - "ส", - "ต", - "ะ", - "ป", - "บ", - "ค", - "ห", - "แ", - "จ", - "พ", - "ช", - "ข", - "ใ", - ], - "Greek": [ - "α", - "τ", - "ο", - "ι", - "ε", - "ν", - "ρ", - "σ", - "κ", - "η", - "π", - "ς", - "υ", - "μ", - "λ", - "ί", - "ό", - "ά", - "γ", - "έ", - "δ", - "ή", - "ω", - "χ", - "θ", - "ύ", - ], - "Tamil": [ - "க", - "த", - "ப", - "ட", - "ர", - "ம", - "ல", - "ன", - "வ", - "ற", - "ய", - "ள", - "ச", - "ந", - "இ", - "ண", - "அ", - "ஆ", - "ழ", - "ங", - "எ", - "உ", - "ஒ", - "ஸ", - ], - "Kazakh": [ - "а", - "ы", - "е", - "н", - "т", - "р", - "л", - "і", - "д", - "с", - "м", - "қ", - "к", - "о", - "б", - "и", - "у", - "ғ", - "ж", - "ң", - "з", - "ш", - "й", - "п", - "г", - "ө", - ], -} - -LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/venv/Lib/site-packages/charset_normalizer/legacy.py b/venv/Lib/site-packages/charset_normalizer/legacy.py deleted file mode 100644 index 360a310..0000000 --- a/venv/Lib/site-packages/charset_normalizer/legacy.py +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any -from warnings import warn - -from .api import from_bytes -from .constant import CHARDET_CORRESPONDENCE, TOO_SMALL_SEQUENCE - -# TODO: remove this check when dropping Python 3.7 support -if TYPE_CHECKING: - from typing_extensions import TypedDict - - class ResultDict(TypedDict): - encoding: str | None - language: str - confidence: float | None - - -def detect( - byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any -) -> ResultDict: - """ - chardet legacy method - Detect the encoding of the given byte string. It should be mostly backward-compatible. - Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) - This function is deprecated and should be used to migrate your project easily, consult the documentation for - further information. Not planned for removal. - - :param byte_str: The byte sequence to examine. - :param should_rename_legacy: Should we rename legacy encodings - to their more modern equivalents? - """ - if len(kwargs): - warn( - f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" - ) - - if not isinstance(byte_str, (bytearray, bytes)): - raise TypeError( # pragma: nocover - f"Expected object of type bytes or bytearray, got: {type(byte_str)}" - ) - - if isinstance(byte_str, bytearray): - byte_str = bytes(byte_str) - - r = from_bytes(byte_str).best() - - encoding = r.encoding if r is not None else None - language = r.language if r is not None and r.language != "Unknown" else "" - confidence = 1.0 - r.chaos if r is not None else None - - # automatically lower confidence - # on small bytes samples. - # https://github.com/jawah/charset_normalizer/issues/391 - if ( - confidence is not None - and confidence >= 0.9 - and encoding - not in { - "utf_8", - "ascii", - } - and r.bom is False # type: ignore[union-attr] - and len(byte_str) < TOO_SMALL_SEQUENCE - ): - confidence -= 0.2 - - # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process - # but chardet does return 'utf-8-sig' and it is a valid codec name. - if r is not None and encoding == "utf_8" and r.bom: - encoding += "_sig" - - if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: - encoding = CHARDET_CORRESPONDENCE[encoding] - - return { - "encoding": encoding, - "language": language, - "confidence": confidence, - } diff --git a/venv/Lib/site-packages/charset_normalizer/md.cp312-win_amd64.pyd b/venv/Lib/site-packages/charset_normalizer/md.cp312-win_amd64.pyd deleted file mode 100644 index 047d0bc..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/md.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/md.py b/venv/Lib/site-packages/charset_normalizer/md.py deleted file mode 100644 index 12ce024..0000000 --- a/venv/Lib/site-packages/charset_normalizer/md.py +++ /dev/null @@ -1,635 +0,0 @@ -from __future__ import annotations - -from functools import lru_cache -from logging import getLogger - -from .constant import ( - COMMON_SAFE_ASCII_CHARACTERS, - TRACE, - UNICODE_SECONDARY_RANGE_KEYWORD, -) -from .utils import ( - is_accentuated, - is_arabic, - is_arabic_isolated_form, - is_case_variable, - is_cjk, - is_emoticon, - is_hangul, - is_hiragana, - is_katakana, - is_latin, - is_punctuation, - is_separator, - is_symbol, - is_thai, - is_unprintable, - remove_accent, - unicode_range, - is_cjk_uncommon, -) - - -class MessDetectorPlugin: - """ - Base abstract class used for mess detection plugins. - All detectors MUST extend and implement given methods. - """ - - def eligible(self, character: str) -> bool: - """ - Determine if given character should be fed in. - """ - raise NotImplementedError # pragma: nocover - - def feed(self, character: str) -> None: - """ - The main routine to be executed upon character. - Insert the logic in witch the text would be considered chaotic. - """ - raise NotImplementedError # pragma: nocover - - def reset(self) -> None: # pragma: no cover - """ - Permit to reset the plugin to the initial state. - """ - raise NotImplementedError - - @property - def ratio(self) -> float: - """ - Compute the chaos ratio based on what your feed() has seen. - Must NOT be lower than 0.; No restriction gt 0. - """ - raise NotImplementedError # pragma: nocover - - -class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._punctuation_count: int = 0 - self._symbol_count: int = 0 - self._character_count: int = 0 - - self._last_printable_char: str | None = None - self._frenzy_symbol_in_word: bool = False - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character != self._last_printable_char - and character not in COMMON_SAFE_ASCII_CHARACTERS - ): - if is_punctuation(character): - self._punctuation_count += 1 - elif ( - character.isdigit() is False - and is_symbol(character) - and is_emoticon(character) is False - ): - self._symbol_count += 2 - - self._last_printable_char = character - - def reset(self) -> None: # Abstract - self._punctuation_count = 0 - self._character_count = 0 - self._symbol_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - ratio_of_punctuation: float = ( - self._punctuation_count + self._symbol_count - ) / self._character_count - - return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 - - -class TooManyAccentuatedPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._accentuated_count: int = 0 - - def eligible(self, character: str) -> bool: - return character.isalpha() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_accentuated(character): - self._accentuated_count += 1 - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._accentuated_count = 0 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - ratio_of_accentuation: float = self._accentuated_count / self._character_count - return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 - - -class UnprintablePlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._unprintable_count: int = 0 - self._character_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if is_unprintable(character): - self._unprintable_count += 1 - self._character_count += 1 - - def reset(self) -> None: # Abstract - self._unprintable_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._unprintable_count * 8) / self._character_count - - -class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._successive_count: int = 0 - self._character_count: int = 0 - - self._last_latin_character: str | None = None - - def eligible(self, character: str) -> bool: - return character.isalpha() and is_latin(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - if ( - self._last_latin_character is not None - and is_accentuated(character) - and is_accentuated(self._last_latin_character) - ): - if character.isupper() and self._last_latin_character.isupper(): - self._successive_count += 1 - # Worse if its the same char duplicated with different accent. - if remove_accent(character) == remove_accent(self._last_latin_character): - self._successive_count += 1 - self._last_latin_character = character - - def reset(self) -> None: # Abstract - self._successive_count = 0 - self._character_count = 0 - self._last_latin_character = None - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._successive_count * 2) / self._character_count - - -class SuspiciousRange(MessDetectorPlugin): - def __init__(self) -> None: - self._suspicious_successive_range_count: int = 0 - self._character_count: int = 0 - self._last_printable_seen: str | None = None - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character.isspace() - or is_punctuation(character) - or character in COMMON_SAFE_ASCII_CHARACTERS - ): - self._last_printable_seen = None - return - - if self._last_printable_seen is None: - self._last_printable_seen = character - return - - unicode_range_a: str | None = unicode_range(self._last_printable_seen) - unicode_range_b: str | None = unicode_range(character) - - if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): - self._suspicious_successive_range_count += 1 - - self._last_printable_seen = character - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._suspicious_successive_range_count = 0 - self._last_printable_seen = None - - @property - def ratio(self) -> float: - if self._character_count <= 13: - return 0.0 - - ratio_of_suspicious_range_usage: float = ( - self._suspicious_successive_range_count * 2 - ) / self._character_count - - return ratio_of_suspicious_range_usage - - -class SuperWeirdWordPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._word_count: int = 0 - self._bad_word_count: int = 0 - self._foreign_long_count: int = 0 - - self._is_current_word_bad: bool = False - self._foreign_long_watch: bool = False - - self._character_count: int = 0 - self._bad_character_count: int = 0 - - self._buffer: str = "" - self._buffer_accent_count: int = 0 - self._buffer_glyph_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if character.isalpha(): - self._buffer += character - if is_accentuated(character): - self._buffer_accent_count += 1 - if ( - self._foreign_long_watch is False - and (is_latin(character) is False or is_accentuated(character)) - and is_cjk(character) is False - and is_hangul(character) is False - and is_katakana(character) is False - and is_hiragana(character) is False - and is_thai(character) is False - ): - self._foreign_long_watch = True - if ( - is_cjk(character) - or is_hangul(character) - or is_katakana(character) - or is_hiragana(character) - or is_thai(character) - ): - self._buffer_glyph_count += 1 - return - if not self._buffer: - return - if ( - character.isspace() or is_punctuation(character) or is_separator(character) - ) and self._buffer: - self._word_count += 1 - buffer_length: int = len(self._buffer) - - self._character_count += buffer_length - - if buffer_length >= 4: - if self._buffer_accent_count / buffer_length >= 0.5: - self._is_current_word_bad = True - # Word/Buffer ending with an upper case accentuated letter are so rare, - # that we will consider them all as suspicious. Same weight as foreign_long suspicious. - elif ( - is_accentuated(self._buffer[-1]) - and self._buffer[-1].isupper() - and all(_.isupper() for _ in self._buffer) is False - ): - self._foreign_long_count += 1 - self._is_current_word_bad = True - elif self._buffer_glyph_count == 1: - self._is_current_word_bad = True - self._foreign_long_count += 1 - if buffer_length >= 24 and self._foreign_long_watch: - camel_case_dst = [ - i - for c, i in zip(self._buffer, range(0, buffer_length)) - if c.isupper() - ] - probable_camel_cased: bool = False - - if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): - probable_camel_cased = True - - if not probable_camel_cased: - self._foreign_long_count += 1 - self._is_current_word_bad = True - - if self._is_current_word_bad: - self._bad_word_count += 1 - self._bad_character_count += len(self._buffer) - self._is_current_word_bad = False - - self._foreign_long_watch = False - self._buffer = "" - self._buffer_accent_count = 0 - self._buffer_glyph_count = 0 - elif ( - character not in {"<", ">", "-", "=", "~", "|", "_"} - and character.isdigit() is False - and is_symbol(character) - ): - self._is_current_word_bad = True - self._buffer += character - - def reset(self) -> None: # Abstract - self._buffer = "" - self._is_current_word_bad = False - self._foreign_long_watch = False - self._bad_word_count = 0 - self._word_count = 0 - self._character_count = 0 - self._bad_character_count = 0 - self._foreign_long_count = 0 - - @property - def ratio(self) -> float: - if self._word_count <= 10 and self._foreign_long_count == 0: - return 0.0 - - return self._bad_character_count / self._character_count - - -class CjkUncommonPlugin(MessDetectorPlugin): - """ - Detect messy CJK text that probably means nothing. - """ - - def __init__(self) -> None: - self._character_count: int = 0 - self._uncommon_count: int = 0 - - def eligible(self, character: str) -> bool: - return is_cjk(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_cjk_uncommon(character): - self._uncommon_count += 1 - return - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._uncommon_count = 0 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - uncommon_form_usage: float = self._uncommon_count / self._character_count - - # we can be pretty sure it's garbage when uncommon characters are widely - # used. otherwise it could just be traditional chinese for example. - return uncommon_form_usage / 10 if uncommon_form_usage > 0.5 else 0.0 - - -class ArchaicUpperLowerPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._buf: bool = False - - self._character_count_since_last_sep: int = 0 - - self._successive_upper_lower_count: int = 0 - self._successive_upper_lower_count_final: int = 0 - - self._character_count: int = 0 - - self._last_alpha_seen: str | None = None - self._current_ascii_only: bool = True - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - is_concerned = character.isalpha() and is_case_variable(character) - chunk_sep = is_concerned is False - - if chunk_sep and self._character_count_since_last_sep > 0: - if ( - self._character_count_since_last_sep <= 64 - and character.isdigit() is False - and self._current_ascii_only is False - ): - self._successive_upper_lower_count_final += ( - self._successive_upper_lower_count - ) - - self._successive_upper_lower_count = 0 - self._character_count_since_last_sep = 0 - self._last_alpha_seen = None - self._buf = False - self._character_count += 1 - self._current_ascii_only = True - - return - - if self._current_ascii_only is True and character.isascii() is False: - self._current_ascii_only = False - - if self._last_alpha_seen is not None: - if (character.isupper() and self._last_alpha_seen.islower()) or ( - character.islower() and self._last_alpha_seen.isupper() - ): - if self._buf is True: - self._successive_upper_lower_count += 2 - self._buf = False - else: - self._buf = True - else: - self._buf = False - - self._character_count += 1 - self._character_count_since_last_sep += 1 - self._last_alpha_seen = character - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._character_count_since_last_sep = 0 - self._successive_upper_lower_count = 0 - self._successive_upper_lower_count_final = 0 - self._last_alpha_seen = None - self._buf = False - self._current_ascii_only = True - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return self._successive_upper_lower_count_final / self._character_count - - -class ArabicIsolatedFormPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._isolated_form_count: int = 0 - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._isolated_form_count = 0 - - def eligible(self, character: str) -> bool: - return is_arabic(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_arabic_isolated_form(character): - self._isolated_form_count += 1 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - isolated_form_usage: float = self._isolated_form_count / self._character_count - - return isolated_form_usage - - -@lru_cache(maxsize=1024) -def is_suspiciously_successive_range( - unicode_range_a: str | None, unicode_range_b: str | None -) -> bool: - """ - Determine if two Unicode range seen next to each other can be considered as suspicious. - """ - if unicode_range_a is None or unicode_range_b is None: - return True - - if unicode_range_a == unicode_range_b: - return False - - if "Latin" in unicode_range_a and "Latin" in unicode_range_b: - return False - - if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: - return False - - # Latin characters can be accompanied with a combining diacritical mark - # eg. Vietnamese. - if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( - "Combining" in unicode_range_a or "Combining" in unicode_range_b - ): - return False - - keywords_range_a, keywords_range_b = ( - unicode_range_a.split(" "), - unicode_range_b.split(" "), - ) - - for el in keywords_range_a: - if el in UNICODE_SECONDARY_RANGE_KEYWORD: - continue - if el in keywords_range_b: - return False - - # Japanese Exception - range_a_jp_chars, range_b_jp_chars = ( - unicode_range_a - in ( - "Hiragana", - "Katakana", - ), - unicode_range_b in ("Hiragana", "Katakana"), - ) - if (range_a_jp_chars or range_b_jp_chars) and ( - "CJK" in unicode_range_a or "CJK" in unicode_range_b - ): - return False - if range_a_jp_chars and range_b_jp_chars: - return False - - if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: - if "CJK" in unicode_range_a or "CJK" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - # Chinese/Japanese use dedicated range for punctuation and/or separators. - if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( - unicode_range_a in ["Katakana", "Hiragana"] - and unicode_range_b in ["Katakana", "Hiragana"] - ): - if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: - return False - if "Forms" in unicode_range_a or "Forms" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - return True - - -@lru_cache(maxsize=2048) -def mess_ratio( - decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False -) -> float: - """ - Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. - """ - - detectors: list[MessDetectorPlugin] = [ - md_class() for md_class in MessDetectorPlugin.__subclasses__() - ] - - length: int = len(decoded_sequence) + 1 - - mean_mess_ratio: float = 0.0 - - if length < 512: - intermediary_mean_mess_ratio_calc: int = 32 - elif length <= 1024: - intermediary_mean_mess_ratio_calc = 64 - else: - intermediary_mean_mess_ratio_calc = 128 - - for character, index in zip(decoded_sequence + "\n", range(length)): - for detector in detectors: - if detector.eligible(character): - detector.feed(character) - - if ( - index > 0 and index % intermediary_mean_mess_ratio_calc == 0 - ) or index == length - 1: - mean_mess_ratio = sum(dt.ratio for dt in detectors) - - if mean_mess_ratio >= maximum_threshold: - break - - if debug: - logger = getLogger("charset_normalizer") - - logger.log( - TRACE, - "Mess-detector extended-analysis start. " - f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " - f"maximum_threshold={maximum_threshold}", - ) - - if len(decoded_sequence) > 16: - logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") - logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") - - for dt in detectors: - logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") - - return round(mean_mess_ratio, 3) diff --git a/venv/Lib/site-packages/charset_normalizer/md__mypyc.cp312-win_amd64.pyd b/venv/Lib/site-packages/charset_normalizer/md__mypyc.cp312-win_amd64.pyd deleted file mode 100644 index c2c489e..0000000 Binary files a/venv/Lib/site-packages/charset_normalizer/md__mypyc.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/charset_normalizer/models.py b/venv/Lib/site-packages/charset_normalizer/models.py deleted file mode 100644 index 1042758..0000000 --- a/venv/Lib/site-packages/charset_normalizer/models.py +++ /dev/null @@ -1,360 +0,0 @@ -from __future__ import annotations - -from encodings.aliases import aliases -from hashlib import sha256 -from json import dumps -from re import sub -from typing import Any, Iterator, List, Tuple - -from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE -from .utils import iana_name, is_multi_byte_encoding, unicode_range - - -class CharsetMatch: - def __init__( - self, - payload: bytes, - guessed_encoding: str, - mean_mess_ratio: float, - has_sig_or_bom: bool, - languages: CoherenceMatches, - decoded_payload: str | None = None, - preemptive_declaration: str | None = None, - ): - self._payload: bytes = payload - - self._encoding: str = guessed_encoding - self._mean_mess_ratio: float = mean_mess_ratio - self._languages: CoherenceMatches = languages - self._has_sig_or_bom: bool = has_sig_or_bom - self._unicode_ranges: list[str] | None = None - - self._leaves: list[CharsetMatch] = [] - self._mean_coherence_ratio: float = 0.0 - - self._output_payload: bytes | None = None - self._output_encoding: str | None = None - - self._string: str | None = decoded_payload - - self._preemptive_declaration: str | None = preemptive_declaration - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CharsetMatch): - if isinstance(other, str): - return iana_name(other) == self.encoding - return False - return self.encoding == other.encoding and self.fingerprint == other.fingerprint - - def __lt__(self, other: object) -> bool: - """ - Implemented to make sorted available upon CharsetMatches items. - """ - if not isinstance(other, CharsetMatch): - raise ValueError - - chaos_difference: float = abs(self.chaos - other.chaos) - coherence_difference: float = abs(self.coherence - other.coherence) - - # Below 1% difference --> Use Coherence - if chaos_difference < 0.01 and coherence_difference > 0.02: - return self.coherence > other.coherence - elif chaos_difference < 0.01 and coherence_difference <= 0.02: - # When having a difficult decision, use the result that decoded as many multi-byte as possible. - # preserve RAM usage! - if len(self._payload) >= TOO_BIG_SEQUENCE: - return self.chaos < other.chaos - return self.multi_byte_usage > other.multi_byte_usage - - return self.chaos < other.chaos - - @property - def multi_byte_usage(self) -> float: - return 1.0 - (len(str(self)) / len(self.raw)) - - def __str__(self) -> str: - # Lazy Str Loading - if self._string is None: - self._string = str(self._payload, self._encoding, "strict") - return self._string - - def __repr__(self) -> str: - return f"" - - def add_submatch(self, other: CharsetMatch) -> None: - if not isinstance(other, CharsetMatch) or other == self: - raise ValueError( - "Unable to add instance <{}> as a submatch of a CharsetMatch".format( - other.__class__ - ) - ) - - other._string = None # Unload RAM usage; dirty trick. - self._leaves.append(other) - - @property - def encoding(self) -> str: - return self._encoding - - @property - def encoding_aliases(self) -> list[str]: - """ - Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. - """ - also_known_as: list[str] = [] - for u, p in aliases.items(): - if self.encoding == u: - also_known_as.append(p) - elif self.encoding == p: - also_known_as.append(u) - return also_known_as - - @property - def bom(self) -> bool: - return self._has_sig_or_bom - - @property - def byte_order_mark(self) -> bool: - return self._has_sig_or_bom - - @property - def languages(self) -> list[str]: - """ - Return the complete list of possible languages found in decoded sequence. - Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. - """ - return [e[0] for e in self._languages] - - @property - def language(self) -> str: - """ - Most probable language found in decoded sequence. If none were detected or inferred, the property will return - "Unknown". - """ - if not self._languages: - # Trying to infer the language based on the given encoding - # Its either English or we should not pronounce ourselves in certain cases. - if "ascii" in self.could_be_from_charset: - return "English" - - # doing it there to avoid circular import - from charset_normalizer.cd import encoding_languages, mb_encoding_languages - - languages = ( - mb_encoding_languages(self.encoding) - if is_multi_byte_encoding(self.encoding) - else encoding_languages(self.encoding) - ) - - if len(languages) == 0 or "Latin Based" in languages: - return "Unknown" - - return languages[0] - - return self._languages[0][0] - - @property - def chaos(self) -> float: - return self._mean_mess_ratio - - @property - def coherence(self) -> float: - if not self._languages: - return 0.0 - return self._languages[0][1] - - @property - def percent_chaos(self) -> float: - return round(self.chaos * 100, ndigits=3) - - @property - def percent_coherence(self) -> float: - return round(self.coherence * 100, ndigits=3) - - @property - def raw(self) -> bytes: - """ - Original untouched bytes. - """ - return self._payload - - @property - def submatch(self) -> list[CharsetMatch]: - return self._leaves - - @property - def has_submatch(self) -> bool: - return len(self._leaves) > 0 - - @property - def alphabets(self) -> list[str]: - if self._unicode_ranges is not None: - return self._unicode_ranges - # list detected ranges - detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)] - # filter and sort - self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) - return self._unicode_ranges - - @property - def could_be_from_charset(self) -> list[str]: - """ - The complete list of encoding that output the exact SAME str result and therefore could be the originating - encoding. - This list does include the encoding available in property 'encoding'. - """ - return [self._encoding] + [m.encoding for m in self._leaves] - - def output(self, encoding: str = "utf_8") -> bytes: - """ - Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. - Any errors will be simply ignored by the encoder NOT replaced. - """ - if self._output_encoding is None or self._output_encoding != encoding: - self._output_encoding = encoding - decoded_string = str(self) - if ( - self._preemptive_declaration is not None - and self._preemptive_declaration.lower() - not in ["utf-8", "utf8", "utf_8"] - ): - patched_header = sub( - RE_POSSIBLE_ENCODING_INDICATION, - lambda m: m.string[m.span()[0] : m.span()[1]].replace( - m.groups()[0], - iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type] - ), - decoded_string[:8192], - count=1, - ) - - decoded_string = patched_header + decoded_string[8192:] - - self._output_payload = decoded_string.encode(encoding, "replace") - - return self._output_payload # type: ignore - - @property - def fingerprint(self) -> str: - """ - Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. - """ - return sha256(self.output()).hexdigest() - - -class CharsetMatches: - """ - Container with every CharsetMatch items ordered by default from most probable to the less one. - Act like a list(iterable) but does not implements all related methods. - """ - - def __init__(self, results: list[CharsetMatch] | None = None): - self._results: list[CharsetMatch] = sorted(results) if results else [] - - def __iter__(self) -> Iterator[CharsetMatch]: - yield from self._results - - def __getitem__(self, item: int | str) -> CharsetMatch: - """ - Retrieve a single item either by its position or encoding name (alias may be used here). - Raise KeyError upon invalid index or encoding not present in results. - """ - if isinstance(item, int): - return self._results[item] - if isinstance(item, str): - item = iana_name(item, False) - for result in self._results: - if item in result.could_be_from_charset: - return result - raise KeyError - - def __len__(self) -> int: - return len(self._results) - - def __bool__(self) -> bool: - return len(self._results) > 0 - - def append(self, item: CharsetMatch) -> None: - """ - Insert a single match. Will be inserted accordingly to preserve sort. - Can be inserted as a submatch. - """ - if not isinstance(item, CharsetMatch): - raise ValueError( - "Cannot append instance '{}' to CharsetMatches".format( - str(item.__class__) - ) - ) - # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) - if len(item.raw) < TOO_BIG_SEQUENCE: - for match in self._results: - if match.fingerprint == item.fingerprint and match.chaos == item.chaos: - match.add_submatch(item) - return - self._results.append(item) - self._results = sorted(self._results) - - def best(self) -> CharsetMatch | None: - """ - Simply return the first match. Strict equivalent to matches[0]. - """ - if not self._results: - return None - return self._results[0] - - def first(self) -> CharsetMatch | None: - """ - Redundant method, call the method best(). Kept for BC reasons. - """ - return self.best() - - -CoherenceMatch = Tuple[str, float] -CoherenceMatches = List[CoherenceMatch] - - -class CliDetectionResult: - def __init__( - self, - path: str, - encoding: str | None, - encoding_aliases: list[str], - alternative_encodings: list[str], - language: str, - alphabets: list[str], - has_sig_or_bom: bool, - chaos: float, - coherence: float, - unicode_path: str | None, - is_preferred: bool, - ): - self.path: str = path - self.unicode_path: str | None = unicode_path - self.encoding: str | None = encoding - self.encoding_aliases: list[str] = encoding_aliases - self.alternative_encodings: list[str] = alternative_encodings - self.language: str = language - self.alphabets: list[str] = alphabets - self.has_sig_or_bom: bool = has_sig_or_bom - self.chaos: float = chaos - self.coherence: float = coherence - self.is_preferred: bool = is_preferred - - @property - def __dict__(self) -> dict[str, Any]: # type: ignore - return { - "path": self.path, - "encoding": self.encoding, - "encoding_aliases": self.encoding_aliases, - "alternative_encodings": self.alternative_encodings, - "language": self.language, - "alphabets": self.alphabets, - "has_sig_or_bom": self.has_sig_or_bom, - "chaos": self.chaos, - "coherence": self.coherence, - "unicode_path": self.unicode_path, - "is_preferred": self.is_preferred, - } - - def to_json(self) -> str: - return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/venv/Lib/site-packages/charset_normalizer/py.typed b/venv/Lib/site-packages/charset_normalizer/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/charset_normalizer/utils.py b/venv/Lib/site-packages/charset_normalizer/utils.py deleted file mode 100644 index 6bf0384..0000000 --- a/venv/Lib/site-packages/charset_normalizer/utils.py +++ /dev/null @@ -1,414 +0,0 @@ -from __future__ import annotations - -import importlib -import logging -import unicodedata -from codecs import IncrementalDecoder -from encodings.aliases import aliases -from functools import lru_cache -from re import findall -from typing import Generator - -from _multibytecodec import ( # type: ignore[import-not-found,import] - MultibyteIncrementalDecoder, -) - -from .constant import ( - ENCODING_MARKS, - IANA_SUPPORTED_SIMILAR, - RE_POSSIBLE_ENCODING_INDICATION, - UNICODE_RANGES_COMBINED, - UNICODE_SECONDARY_RANGE_KEYWORD, - UTF8_MAXIMAL_ALLOCATION, - COMMON_CJK_CHARACTERS, -) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_accentuated(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - return ( - "WITH GRAVE" in description - or "WITH ACUTE" in description - or "WITH CEDILLA" in description - or "WITH DIAERESIS" in description - or "WITH CIRCUMFLEX" in description - or "WITH TILDE" in description - or "WITH MACRON" in description - or "WITH RING ABOVE" in description - ) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def remove_accent(character: str) -> str: - decomposed: str = unicodedata.decomposition(character) - if not decomposed: - return character - - codes: list[str] = decomposed.split(" ") - - return chr(int(codes[0], 16)) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def unicode_range(character: str) -> str | None: - """ - Retrieve the Unicode range official name from a single character. - """ - character_ord: int = ord(character) - - for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): - if character_ord in ord_range: - return range_name - - return None - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_latin(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - return "LATIN" in description - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_punctuation(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "P" in character_category: - return True - - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Punctuation" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_symbol(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "S" in character_category or "N" in character_category: - return True - - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Forms" in character_range and character_category != "Lo" - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_emoticon(character: str) -> bool: - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Emoticons" in character_range or "Pictographs" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_separator(character: str) -> bool: - if character.isspace() or character in {"|", "+", "<", ">"}: - return True - - character_category: str = unicodedata.category(character) - - return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_case_variable(character: str) -> bool: - return character.islower() != character.isupper() - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "CJK" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hiragana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "HIRAGANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_katakana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "KATAKANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hangul(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "HANGUL" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_thai(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "THAI" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "ARABIC" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic_isolated_form(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "ARABIC" in character_name and "ISOLATED FORM" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk_uncommon(character: str) -> bool: - return character not in COMMON_CJK_CHARACTERS - - -@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) -def is_unicode_range_secondary(range_name: str) -> bool: - return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_unprintable(character: str) -> bool: - return ( - character.isspace() is False # includes \n \t \r \v - and character.isprintable() is False - and character != "\x1a" # Why? Its the ASCII substitute character. - and character != "\ufeff" # bug discovered in Python, - # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. - ) - - -def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None: - """ - Extract using ASCII-only decoder any specified encoding in the first n-bytes. - """ - if not isinstance(sequence, bytes): - raise TypeError - - seq_len: int = len(sequence) - - results: list[str] = findall( - RE_POSSIBLE_ENCODING_INDICATION, - sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), - ) - - if len(results) == 0: - return None - - for specified_encoding in results: - specified_encoding = specified_encoding.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if encoding_alias == specified_encoding: - return encoding_iana - if encoding_iana == specified_encoding: - return encoding_iana - - return None - - -@lru_cache(maxsize=128) -def is_multi_byte_encoding(name: str) -> bool: - """ - Verify is a specific encoding is a multi byte one based on it IANA name - """ - return name in { - "utf_8", - "utf_8_sig", - "utf_16", - "utf_16_be", - "utf_16_le", - "utf_32", - "utf_32_le", - "utf_32_be", - "utf_7", - } or issubclass( - importlib.import_module(f"encodings.{name}").IncrementalDecoder, - MultibyteIncrementalDecoder, - ) - - -def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]: - """ - Identify and extract SIG/BOM in given sequence. - """ - - for iana_encoding in ENCODING_MARKS: - marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding] - - if isinstance(marks, bytes): - marks = [marks] - - for mark in marks: - if sequence.startswith(mark): - return iana_encoding, mark - - return None, b"" - - -def should_strip_sig_or_bom(iana_encoding: str) -> bool: - return iana_encoding not in {"utf_16", "utf_32"} - - -def iana_name(cp_name: str, strict: bool = True) -> str: - """Returns the Python normalized encoding name (Not the IANA official name).""" - cp_name = cp_name.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if cp_name in [encoding_alias, encoding_iana]: - return encoding_iana - - if strict: - raise ValueError(f"Unable to retrieve IANA for '{cp_name}'") - - return cp_name - - -def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: - if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): - return 0.0 - - decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder - decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder - - id_a: IncrementalDecoder = decoder_a(errors="ignore") - id_b: IncrementalDecoder = decoder_b(errors="ignore") - - character_match_count: int = 0 - - for i in range(255): - to_be_decoded: bytes = bytes([i]) - if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): - character_match_count += 1 - - return character_match_count / 254 - - -def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: - """ - Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using - the function cp_similarity. - """ - return ( - iana_name_a in IANA_SUPPORTED_SIMILAR - and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] - ) - - -def set_logging_handler( - name: str = "charset_normalizer", - level: int = logging.INFO, - format_string: str = "%(asctime)s | %(levelname)s | %(message)s", -) -> None: - logger = logging.getLogger(name) - logger.setLevel(level) - - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter(format_string)) - logger.addHandler(handler) - - -def cut_sequence_chunks( - sequences: bytes, - encoding_iana: str, - offsets: range, - chunk_size: int, - bom_or_sig_available: bool, - strip_sig_or_bom: bool, - sig_payload: bytes, - is_multi_byte_decoder: bool, - decoded_payload: str | None = None, -) -> Generator[str, None, None]: - if decoded_payload and is_multi_byte_decoder is False: - for i in offsets: - chunk = decoded_payload[i : i + chunk_size] - if not chunk: - break - yield chunk - else: - for i in offsets: - chunk_end = i + chunk_size - if chunk_end > len(sequences) + 8: - continue - - cut_sequence = sequences[i : i + chunk_size] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode( - encoding_iana, - errors="ignore" if is_multi_byte_decoder else "strict", - ) - - # multi-byte bad cutting detector and adjustment - # not the cleanest way to perform that fix but clever enough for now. - if is_multi_byte_decoder and i > 0: - chunk_partial_size_chk: int = min(chunk_size, 16) - - if ( - decoded_payload - and chunk[:chunk_partial_size_chk] not in decoded_payload - ): - for j in range(i, i - 4, -1): - cut_sequence = sequences[j:chunk_end] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode(encoding_iana, errors="ignore") - - if chunk[:chunk_partial_size_chk] in decoded_payload: - break - - yield chunk diff --git a/venv/Lib/site-packages/charset_normalizer/version.py b/venv/Lib/site-packages/charset_normalizer/version.py deleted file mode 100644 index c843e53..0000000 --- a/venv/Lib/site-packages/charset_normalizer/version.py +++ /dev/null @@ -1,8 +0,0 @@ -""" -Expose version -""" - -from __future__ import annotations - -__version__ = "3.4.4" -VERSION = __version__.split(".") diff --git a/venv/Lib/site-packages/click-8.3.1.dist-info/INSTALLER b/venv/Lib/site-packages/click-8.3.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/click-8.3.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/click-8.3.1.dist-info/METADATA b/venv/Lib/site-packages/click-8.3.1.dist-info/METADATA deleted file mode 100644 index 3f433af..0000000 --- a/venv/Lib/site-packages/click-8.3.1.dist-info/METADATA +++ /dev/null @@ -1,84 +0,0 @@ -Metadata-Version: 2.4 -Name: click -Version: 8.3.1 -Summary: Composable command line interface toolkit -Maintainer-email: Pallets -Requires-Python: >=3.10 -Description-Content-Type: text/markdown -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: colorama; platform_system == 'Windows' -Project-URL: Changes, https://click.palletsprojects.com/page/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/click/ - -
    - -# Click - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -## A Simple Example - -```python -import click - -@click.command() -@click.option("--count", default=1, help="Number of greetings.") -@click.option("--name", prompt="Your name", help="The person to greet.") -def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - -if __name__ == '__main__': - hello() -``` - -``` -$ python hello.py --count=3 -Your name: Click -Hello, Click! -Hello, Click! -Hello, Click! -``` - - -## Donate - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/venv/Lib/site-packages/click-8.3.1.dist-info/RECORD b/venv/Lib/site-packages/click-8.3.1.dist-info/RECORD deleted file mode 100644 index f3a18c5..0000000 --- a/venv/Lib/site-packages/click-8.3.1.dist-info/RECORD +++ /dev/null @@ -1,41 +0,0 @@ -click-8.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.3.1.dist-info/METADATA,sha256=XZeBrMAE0ghTE88SjfrSDuSyNCpBPplxJR1tbwD9oZg,2621 -click-8.3.1.dist-info/RECORD,, -click-8.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click-8.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -click-8.3.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473 -click/__pycache__/__init__.cpython-312.pyc,, -click/__pycache__/_compat.cpython-312.pyc,, -click/__pycache__/_termui_impl.cpython-312.pyc,, -click/__pycache__/_textwrap.cpython-312.pyc,, -click/__pycache__/_utils.cpython-312.pyc,, -click/__pycache__/_winconsole.cpython-312.pyc,, -click/__pycache__/core.cpython-312.pyc,, -click/__pycache__/decorators.cpython-312.pyc,, -click/__pycache__/exceptions.cpython-312.pyc,, -click/__pycache__/formatting.cpython-312.pyc,, -click/__pycache__/globals.cpython-312.pyc,, -click/__pycache__/parser.cpython-312.pyc,, -click/__pycache__/shell_completion.cpython-312.pyc,, -click/__pycache__/termui.cpython-312.pyc,, -click/__pycache__/testing.cpython-312.pyc,, -click/__pycache__/types.cpython-312.pyc,, -click/__pycache__/utils.cpython-312.pyc,, -click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693 -click/_termui_impl.py,sha256=rgCb3On8X5A4200rA5L6i13u5iapmFer7sru57Jy6zA,27093 -click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400 -click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943 -click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465 -click/core.py,sha256=U6Bfxt8GkjNDqyJ0HqXvluJHtyZ4sY5USAvM1Cdq7mQ,132105 -click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461 -click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954 -click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730 -click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923 -click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994 -click/termui.py,sha256=hqCEjNndU-nzW08nRAkBaVgfZp_FdCA9KxfIWlKYaMc,31037 -click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102 -click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927 -click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257 diff --git a/venv/Lib/site-packages/click-8.3.1.dist-info/REQUESTED b/venv/Lib/site-packages/click-8.3.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/click-8.3.1.dist-info/WHEEL b/venv/Lib/site-packages/click-8.3.1.dist-info/WHEEL deleted file mode 100644 index d8b9936..0000000 --- a/venv/Lib/site-packages/click-8.3.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt b/venv/Lib/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt deleted file mode 100644 index d12a849..0000000 --- a/venv/Lib/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/click/__init__.py b/venv/Lib/site-packages/click/__init__.py deleted file mode 100644 index 1aa547c..0000000 --- a/venv/Lib/site-packages/click/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" - -from __future__ import annotations - -from .core import Argument as Argument -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - from .core import _BaseCommand - - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - from .core import _MultiCommand - - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - if name == "OptionParser": - from .parser import _OptionParser - - warnings.warn( - "'OptionParser' is deprecated and will be removed in Click 9.0. The" - " old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return _OptionParser - - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Click 9.1. Use feature detection or" - " 'importlib.metadata.version(\"click\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("click") - - raise AttributeError(name) diff --git a/venv/Lib/site-packages/click/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 8272159..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/_compat.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/_compat.cpython-312.pyc deleted file mode 100644 index 07cbcae..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/_compat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc deleted file mode 100644 index 2f72178..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/_textwrap.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/_textwrap.cpython-312.pyc deleted file mode 100644 index f248a0e..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/_textwrap.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/_utils.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/_utils.cpython-312.pyc deleted file mode 100644 index e39b589..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/_utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/_winconsole.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/_winconsole.cpython-312.pyc deleted file mode 100644 index de47c26..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/_winconsole.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/core.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/core.cpython-312.pyc deleted file mode 100644 index d804319..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/decorators.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/decorators.cpython-312.pyc deleted file mode 100644 index b4a74c0..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/decorators.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index e7ba68e..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/formatting.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/formatting.cpython-312.pyc deleted file mode 100644 index f6e7e47..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/formatting.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/globals.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/globals.cpython-312.pyc deleted file mode 100644 index 93aa4af..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/globals.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/parser.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/parser.cpython-312.pyc deleted file mode 100644 index 9222bc9..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/parser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/shell_completion.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/shell_completion.cpython-312.pyc deleted file mode 100644 index fc624e5..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/shell_completion.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/termui.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/termui.cpython-312.pyc deleted file mode 100644 index 64deedc..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/termui.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/testing.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/testing.cpython-312.pyc deleted file mode 100644 index d4996cb..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/testing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/types.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/types.cpython-312.pyc deleted file mode 100644 index cbd7c62..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/types.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/click/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index b80260b..0000000 Binary files a/venv/Lib/site-packages/click/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/click/_compat.py b/venv/Lib/site-packages/click/_compat.py deleted file mode 100644 index f2726b9..0000000 --- a/venv/Lib/site-packages/click/_compat.py +++ /dev/null @@ -1,622 +0,0 @@ -from __future__ import annotations - -import codecs -import collections.abc as cabc -import io -import os -import re -import sys -import typing as t -from types import TracebackType -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -WIN = sys.platform.startswith("win") -auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO[t.Any]) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write(b"") - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: str | None, errors: str | None -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO[t.Any], - encoding: str | None, - errors: str | None, - is_binary: t.Callable[[t.IO[t.Any], bool], bool], - find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: str | os.PathLike[str] | int, - mode: str, - encoding: str | None, - errors: str | None, -) -> t.IO[t.Any]: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, -) -> tuple[t.IO[t.Any], bool]: - binary = "b" in mode - filename = os.fspath(filename) - - # Standard streams first. These are simple because they ignore the - # atomic flag. Use fsdecode to handle Path("-"). - if os.fsdecode(filename) == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: int | None = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO[t.Any], af), True - - -class _AtomicFile: - def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> _AtomicFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s: str) -> int: - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write # type: ignore[method-assign] - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None - ) -> t.TextIO | None: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO[t.Any]) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.TextIO | None], - wrapper_func: t.Callable[[], t.TextIO], -) -> t.Callable[[], t.TextIO | None]: - cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.TextIO | None: - stream = src_func() - - if stream is None: - return None - - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/venv/Lib/site-packages/click/_termui_impl.py b/venv/Lib/site-packages/click/_termui_impl.py deleted file mode 100644 index ee8225c..0000000 --- a/venv/Lib/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,852 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" - -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import math -import os -import shlex -import sys -import time -import typing as t -from gettext import gettext as _ -from io import StringIO -from pathlib import Path -from types import TracebackType - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: cabc.Iterable[V] | None, - length: int | None = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - label: str | None = None, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.hidden = hidden - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label: str = label or "" - - if file is None: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - file = StringIO() - - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width: int = width - self.autowidth: bool = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast("cabc.Iterable[V]", range(length)) - self.iter: cabc.Iterable[V] = iter(iterable) - self.length = length - self.pos: int = 0 - self.avg: list[float] = [] - self.last_eta: float - self.start: float - self.start = self.last_eta = time.time() - self.eta_known: bool = False - self.finished: bool = False - self.max_width: int | None = None - self.entered: bool = False - self.current_item: V | None = None - self._is_atty = isatty(self.file) - self._last_line: str | None = None - - def __enter__(self) -> ProgressBar[V]: - self.entered = True - self.render_progress() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.render_finish() - - def __iter__(self) -> cabc.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.hidden or not self._is_atty: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - if self.hidden: - return - - if not self._is_atty: - # Only output the label once if the output is not a TTY. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - import shutil - - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width and self.max_width is not None: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: V | None = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> cabc.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if not self._is_atty: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if stdout is None: - stdout = StringIO() - - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - - # Split and normalize the pager command into parts. - pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False) - if pager_cmd_parts: - if WIN: - if _tempfilepager(generator, pager_cmd_parts, color): - return - elif _pipepager(generator, pager_cmd_parts, color): - return - - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if (WIN or sys.platform.startswith("os2")) and _tempfilepager( - generator, ["more"], color - ): - return - if _pipepager(generator, ["less"], color): - return - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if _pipepager(generator, ["more"], color): - return - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - - import shutil - - cmd = cmd_parts[0] - cmd_params = cmd_parts[1:] - - cmd_filepath = shutil.which(cmd) - if not cmd_filepath: - return False - - # Produces a normalized absolute path string. - # multi-call binaries such as busybox derive their identity from the symlink - # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) - cmd_path = Path(cmd_filepath).absolute() - cmd_name = cmd_path.name - - import subprocess - - # Make a local copy of the environment to not affect the global one. - env = dict(os.environ) - - # If we're piping to less and the user hasn't decided on colors, we enable - # them by default we find the -R flag in the command line arguments. - if color is None and cmd_name == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen( - [str(cmd_path)] + cmd_params, - shell=False, - stdin=subprocess.PIPE, - env=env, - errors="replace", - text=True, - ) - assert c.stdin is not None - try: - for text in generator: - if not color: - text = strip_ansi(text) - - c.stdin.write(text) - except BrokenPipeError: - # In case the pager exited unexpectedly, ignore the broken pipe error. - pass - except Exception as e: - # In case there is an exception we want to close the pager immediately - # and let the caller handle it. - # Otherwise the pager will keep running, and the user may not notice - # the error message, or worse yet it may leave the terminal in a broken state. - c.terminate() - raise e - finally: - # We must close stdin and wait for the pager to exit before we continue - try: - c.stdin.close() - # Close implies flush, so it might throw a BrokenPipeError if the pager - # process exited already. - except BrokenPipeError: - pass - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - return True - - -def _tempfilepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by invoking a program on a temporary file. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - - import shutil - - cmd = cmd_parts[0] - - cmd_filepath = shutil.which(cmd) - if not cmd_filepath: - return False - # Produces a normalized absolute path string. - # multi-call binaries such as busybox derive their identity from the symlink - # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) - cmd_path = Path(cmd_filepath).absolute() - - import subprocess - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - subprocess.call([str(cmd_path), filename]) - except OSError: - # Command not found - pass - finally: - os.close(fd) - os.unlink(filename) - - return True - - -def _nullpager( - stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - - from shutil import which - - for editor in "sensible-editor", "vim", "nano": - if which(editor) is not None: - return editor - return "vi" - - def edit_files(self, filenames: cabc.Iterable[str]) -> None: - import subprocess - - editor = self.get_editor() - environ: dict[str, str] | None = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - exc_filename = " ".join(f'"{filename}"' for filename in filenames) - - try: - c = subprocess.Popen( - args=f"{editor} {exc_filename}", env=environ, shell=True - ) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - @t.overload - def edit(self, text: bytes | bytearray) -> bytes | None: ... - - # We cannot know whether or not the type expected is str or bytes when None - # is passed, so str is returned as that was what was done before. - @t.overload - def edit(self, text: str | None) -> str | None: ... - - def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None: - import tempfile - - if text is None: - data: bytes | bytearray = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_files((name,)) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url) - args = ["explorer", f"/select,{url}"] - else: - args = ["start"] - if wait: - args.append("/WAIT") - args.append("") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - elif CYGWIN: - if locate: - url = _unquote_file(url) - args = ["cygstart", os.path.dirname(url)] - else: - args = ["cygstart"] - if wait: - args.append("-w") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> None: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if sys.platform == "win32": - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - - if echo: - func = t.cast(t.Callable[[], str], msvcrt.getwche) - else: - func = t.cast(t.Callable[[], str], msvcrt.getwch) - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - -else: - import termios - import tty - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - f: t.TextIO | None - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/venv/Lib/site-packages/click/_textwrap.py b/venv/Lib/site-packages/click/_textwrap.py deleted file mode 100644 index 97fbee3..0000000 --- a/venv/Lib/site-packages/click/_textwrap.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import textwrap -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: list[str], - cur_line: list[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> cabc.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/venv/Lib/site-packages/click/_utils.py b/venv/Lib/site-packages/click/_utils.py deleted file mode 100644 index 09fb008..0000000 --- a/venv/Lib/site-packages/click/_utils.py +++ /dev/null @@ -1,36 +0,0 @@ -from __future__ import annotations - -import enum -import typing as t - - -class Sentinel(enum.Enum): - """Enum used to define sentinel values. - - .. seealso:: - - `PEP 661 - Sentinel Values `_. - """ - - UNSET = object() - FLAG_NEEDS_VALUE = object() - - def __repr__(self) -> str: - return f"{self.__class__.__name__}.{self.name}" - - -UNSET = Sentinel.UNSET -"""Sentinel used to indicate that a value is not set.""" - -FLAG_NEEDS_VALUE = Sentinel.FLAG_NEEDS_VALUE -"""Sentinel used to indicate an option was passed as a flag without a -value but is not a flag option. - -``Option.consume_value`` uses this to prompt or use the ``flag_value``. -""" - -T_UNSET = t.Literal[UNSET] # type: ignore[valid-type] -"""Type hint for the :data:`UNSET` sentinel value.""" - -T_FLAG_NEEDS_VALUE = t.Literal[FLAG_NEEDS_VALUE] # type: ignore[valid-type] -"""Type hint for the :data:`FLAG_NEEDS_VALUE` sentinel value.""" diff --git a/venv/Lib/site-packages/click/_winconsole.py b/venv/Lib/site-packages/click/_winconsole.py deleted file mode 100644 index e56c7c6..0000000 --- a/venv/Lib/site-packages/click/_winconsole.py +++ /dev/null @@ -1,296 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -from __future__ import annotations - -import collections.abc as cabc -import io -import sys -import time -import typing as t -from ctypes import Array -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -if t.TYPE_CHECKING: - try: - # Using `typing_extensions.Buffer` instead of `collections.abc` - # on Windows for some reason does not have `Sized` implemented. - from collections.abc import Buffer # type: ignore - except ImportError: - from typing_extensions import Buffer - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ # noqa: RUF012 - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]: - buf = Py_buffer() - flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - out: Array[c_char] = buffer_type.from_address(buf.buf) - return out - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle: int | None) -> None: - self.handle = handle - - def isatty(self) -> t.Literal[True]: - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self) -> t.Literal[True]: - return True - - def readinto(self, b: Buffer) -> int: - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self) -> t.Literal[True]: - return True - - @staticmethod - def _get_error_message(errno: int) -> str: - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b: Buffer) -> int: - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self) -> str: - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None -) -> t.TextIO | None: - if ( - get_buffer is None - or encoding not in {"utf-16-le", None} - or errors not in {"strict", None} - or not _is_console(f) - ): - return None - - func = _stream_factories.get(f.fileno()) - if func is None: - return None - - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/venv/Lib/site-packages/click/core.py b/venv/Lib/site-packages/click/core.py deleted file mode 100644 index 57f549c..0000000 --- a/venv/Lib/site-packages/click/core.py +++ /dev/null @@ -1,3415 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import errno -import inspect -import os -import sys -import typing as t -from collections import abc -from collections import Counter -from contextlib import AbstractContextManager -from contextlib import contextmanager -from contextlib import ExitStack -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat -from types import TracebackType - -from . import types -from ._utils import FLAG_NEEDS_VALUE -from ._utils import UNSET -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import NoArgsIsHelpError -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _OptionParser -from .parser import _split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound="t.Callable[..., t.Any]") -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: Context, incomplete: str -) -> cabc.Iterator[tuple[str, Command]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(Group, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_nested_chain( - base_command: Group, cmd_name: str, cmd: Command, register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, Group): - return - - if register: - message = ( - f"It is not possible to add the group {cmd_name!r} to another" - f" group {base_command.name!r} that is in chain mode." - ) - else: - message = ( - f"Found the group {cmd_name!r} as subcommand to another group " - f" {base_command.name!r} that is in chain mode. This is not supported." - ) - - raise RuntimeError(message) - - -def batch(iterable: cabc.Iterable[V], batch_size: int) -> list[tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size), strict=False)) - - -@contextmanager -def augment_usage_errors( - ctx: Context, param: Parameter | None = None -) -> cabc.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: cabc.Sequence[Parameter], - declaration_order: cabc.Sequence[Parameter], -) -> list[Parameter]: - """Returns all declared parameters in the order they should be processed. - - The declared parameters are re-shuffled depending on the order in which - they were invoked, as well as the eagerness of each parameters. - - The invocation order takes precedence over the declaration order. I.e. the - order in which the user provided them to the CLI is respected. - - This behavior and its effect on callback evaluation is detailed at: - https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order - """ - - def sort_key(item: Parameter) -> tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show the default value for commands. If this - value is not set, it defaults to the value from the parent - context. ``Command.show_default`` overrides this default for the - specific command. - - .. versionchanged:: 8.2 - The ``protected_args`` attribute is deprecated and will be removed in - Click 9.0. ``args`` will contain remaining unparsed tokens. - - .. versionchanged:: 8.1 - The ``show_default`` parameter is overridden by - ``Command.show_default``, instead of the other way around. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: type[HelpFormatter] = HelpFormatter - - def __init__( - self, - command: Command, - parent: Context | None = None, - info_name: str | None = None, - obj: t.Any | None = None, - auto_envvar_prefix: str | None = None, - default_map: cabc.MutableMapping[str, t.Any] | None = None, - terminal_width: int | None = None, - max_content_width: int | None = None, - resilient_parsing: bool = False, - allow_extra_args: bool | None = None, - allow_interspersed_args: bool | None = None, - ignore_unknown_options: bool | None = None, - help_option_names: list[str] | None = None, - token_normalize_func: t.Callable[[str], str] | None = None, - color: bool | None = None, - show_default: bool | None = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: dict[str, t.Any] = {} - #: the leftover arguments. - self.args: list[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self._protected_args: list[str] = [] - #: the collected prefixes of the command's options. - self._opt_prefixes: set[str] = set(parent._opt_prefixes) if parent else set() - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: cabc.MutableMapping[str, t.Any] | None = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: str | None = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: int | None = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: int | None = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: list[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Callable[[str], str] | None = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: str | None = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: bool | None = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: bool | None = show_default - - self._close_callbacks: list[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - @property - def protected_args(self) -> list[str]: - import warnings - - warnings.warn( - "'protected_args' is deprecated and will be removed in Click 9.0." - " 'args' will contain remaining unparsed tokens.", - DeprecationWarning, - stacklevel=2, - ) - return self._protected_args - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> Context: - self._depth += 1 - push_context(self) - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> bool | None: - self._depth -= 1 - exit_result: bool | None = None - if self._depth == 0: - exit_result = self._close_with_exception_info(exc_type, exc_value, tb) - pop_context() - - return exit_result - - @contextmanager - def scope(self, cleanup: bool = True) -> cabc.Iterator[Context]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: AbstractContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._close_with_exception_info(None, None, None) - - def _close_with_exception_info( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> bool | None: - """Unwind the exit stack by calling its :meth:`__exit__` providing the exception - information to allow for exception handling by the various resources registered - using :meth;`with_resource` - - :return: Whatever ``exit_stack.__exit__()`` returns. - """ - exit_result = self._exit_stack.__exit__(exc_type, exc_value, tb) - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - return exit_result - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> Context: - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: type[V]) -> V | None: - """Finds the closest object of a given type.""" - node: Context | None = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[False] = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def lookup_default(self, name: str, call: bool = True) -> t.Any | None: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name, UNSET) - - if call and callable(value): - return value() - - return value - - return UNSET - - def fail(self, message: str) -> t.NoReturn: - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> t.NoReturn: - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> t.NoReturn: - """Exits the application with a given exit code. - - .. versionchanged:: 8.2 - Callbacks and context managers registered with :meth:`call_on_close` - and :meth:`with_resource` are closed before exiting. - """ - self.close() - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: Command) -> Context: - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - @t.overload - def invoke( - self, callback: t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> V: ... - - @t.overload - def invoke(self, callback: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: ... - - def invoke( - self, callback: Command | t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> t.Any | V: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - - .. versionchanged:: 3.2 - A new context is created, and missing arguments use default values. - """ - if isinstance(callback, Command): - other_cmd = callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - callback = t.cast("t.Callable[..., V]", other_cmd.callback) - - ctx = self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - default_value = param.get_default(ctx) - # We explicitly hide the :attr:`UNSET` value to the user, as we - # choose to make it an implementation detail. And because ``invoke`` - # has been designed as part of Click public API, we return ``None`` - # instead. Refs: - # https://github.com/pallets/click/issues/3066 - # https://github.com/pallets/click/issues/3065 - # https://github.com/pallets/click/pull/3068 - if default_value is UNSET: - default_value = None - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, default_value - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = self - - with augment_usage_errors(self): - with ctx: - return callback(*args, **kwargs) - - def forward(self, cmd: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(cmd, Command): - raise TypeError("Callback is not a command.") - - for param in self.params: - if param not in kwargs: - kwargs[param] = self.params[param] - - return self.invoke(cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> ParameterSource | None: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class Command: - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the command is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. - - .. versionchanged:: 8.2 - This is the base class for all commands, not ``BaseCommand``. - ``deprecated`` can be set to a string as well to customize the - deprecation message. - - .. versionchanged:: 8.1 - ``help``, ``epilog``, and ``short_help`` are stored unprocessed, - all formatting is done when outputting help text, not at init, - and is done even if not using the ``@command`` decorator. - - .. versionchanged:: 8.0 - Added a ``repr`` showing the command name. - - .. versionchanged:: 7.1 - Added the ``no_args_is_help`` parameter. - - .. versionchanged:: 2.0 - Added the ``context_settings`` parameter. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: type[Context] = Context - - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: str | None, - context_settings: cabc.MutableMapping[str, t.Any] | None = None, - callback: t.Callable[..., t.Any] | None = None, - params: list[Parameter] | None = None, - help: str | None = None, - epilog: str | None = None, - short_help: str | None = None, - options_metavar: str | None = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool | str = False, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: cabc.MutableMapping[str, t.Any] = context_settings - - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: list[Parameter] = params or [] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self._help_option = None - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - return { - "name": self.name, - "params": [param.to_info_dict() for param in self.get_params(ctx)], - "help": self.help, - "epilog": self.epilog, - "short_help": self.short_help, - "hidden": self.hidden, - "deprecated": self.deprecated, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> list[Parameter]: - params = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - params = [*params, help_option] - - if __debug__: - import warnings - - opts = [opt for param in params for opt in param.opts] - opts_counter = Counter(opts) - duplicate_opts = (opt for opt, count in opts_counter.items() if count > 1) - - for duplicate_opt in duplicate_opts: - warnings.warn( - ( - f"The parameter {duplicate_opt} is used more than once. " - "Remove its duplicate as parameters should be unique." - ), - stacklevel=3, - ) - - return params - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> list[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> Option | None: - """Returns the help option object. - - Skipped if :attr:`add_help_option` is ``False``. - - .. versionchanged:: 8.1.8 - The help option is now cached to avoid creating it multiple times. - """ - help_option_names = self.get_help_option_names(ctx) - - if not help_option_names or not self.add_help_option: - return None - - # Cache the help option object in private _help_option attribute to - # avoid creating it multiple times. Not doing this will break the - # callback odering by iter_params_for_processing(), which relies on - # object comparison. - if self._help_option is None: - # Avoid circular import. - from .decorators import help_option - - # Apply help_option decorator and pop resulting option - help_option(*help_option_names)(self) - self._help_option = self.params.pop() # type: ignore[assignment] - - return self._help_option - - def make_parser(self, ctx: Context) -> _OptionParser: - """Creates the underlying option parser for this command.""" - parser = _OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - if self.short_help: - text = inspect.cleandoc(self.short_help) - elif self.help: - text = make_default_short_help(self.help, limit) - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - if self.help is not None: - # truncate the help text to the first form feed - text = inspect.cleandoc(self.help).partition("\f")[0] - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - if text: - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - epilog = inspect.cleandoc(self.epilog) - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(epilog) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: Context | None = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class(self, info_name=info_name, parent=parent, **extra) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - _, args = param.handle_parse_result(ctx, opts, args) - - # We now have all parameters' values into `ctx.params`, but the data may contain - # the `UNSET` sentinel. - # Convert `UNSET` to `None` to ensure that the user doesn't see `UNSET`. - # - # Waiting until after the initial parse to convert allows us to treat `UNSET` - # more like a missing value when multiple params use the same name. - # Refs: - # https://github.com/pallets/click/issues/3071 - # https://github.com/pallets/click/pull/3079 - for name, value in ctx.params.items(): - if value is UNSET: - ctx.params[name] = None - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - ctx._opt_prefixes.update(parser._opt_prefixes) - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The command {name!r} is deprecated.{extra_message}" - ).format(name=self.name, extra_message=extra_message) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: list[CompletionItem] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, Group) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx._protected_args - ) - - return results - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: t.Literal[True] = True, - **extra: t.Any, - ) -> t.NoReturn: ... - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: ... - - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt) as e: - echo(file=sys.stderr) - raise Abort() from e - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str | None = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - - .. versionchanged:: 8.2.0 - Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). - """ - if complete_var is None: - complete_name = prog_name.replace("-", "_").replace(".", "_") - complete_var = f"_{complete_name}_COMPLETE".upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class _FakeSubclassCheck(type): - def __subclasscheck__(cls, subclass: type) -> bool: - return issubclass(subclass, cls.__bases__[0]) - - def __instancecheck__(cls, instance: t.Any) -> bool: - return isinstance(instance, cls.__bases__[0]) - - -class _BaseCommand(Command, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Command`` instead. - """ - - -class Group(Command): - """A group is a command that nests other commands (or more groups). - - :param name: The name of the group command. - :param commands: Map names to :class:`Command` objects. Can be a list, which - will use :attr:`Command.name` as the keys. - :param invoke_without_command: Invoke the group's callback even if a - subcommand is not given. - :param no_args_is_help: If no arguments are given, show the group's help and - exit. Defaults to the opposite of ``invoke_without_command``. - :param subcommand_metavar: How to represent the subcommand argument in help. - The default will represent whether ``chain`` is set or not. - :param chain: Allow passing more than one subcommand argument. After parsing - a command's arguments, if any arguments remain another command will be - matched, and so on. - :param result_callback: A function to call after the group's and - subcommand's callbacks. The value returned by the subcommand is passed. - If ``chain`` is enabled, the value will be a list of values returned by - all the commands. If ``invoke_without_command`` is enabled, the value - will be the value returned by the group's callback, or an empty list if - ``chain`` is enabled. - :param kwargs: Other arguments passed to :class:`Command`. - - .. versionchanged:: 8.0 - The ``commands`` argument can be a list of command objects. - - .. versionchanged:: 8.2 - Merged with and replaces the ``MultiCommand`` base class. - """ - - allow_extra_args = True - allow_interspersed_args = False - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: type[Command] | None = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: type[Group] | type[type] | None = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: str | None = None, - commands: cabc.MutableMapping[str, Command] - | cabc.Sequence[Command] - | None = None, - invoke_without_command: bool = False, - no_args_is_help: bool | None = None, - subcommand_metavar: str | None = None, - chain: bool = False, - result_callback: t.Callable[..., t.Any] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: cabc.MutableMapping[str, Command] = commands - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "A group in chain mode cannot have optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def add_command(self, cmd: Command, name: str | None = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_nested_chain(self, name, cmd, register=True) - self.commands[name] = cmd - - @t.overload - def command(self, __func: t.Callable[..., t.Any]) -> Command: ... - - @t.overload - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ... - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command] | Command: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'command(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.command_class and kwargs.get("cls") is None: - kwargs["cls"] = self.command_class - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd: Command = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - @t.overload - def group(self, __func: t.Callable[..., t.Any]) -> Group: ... - - @t.overload - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group]: ... - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group] | Group: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'group(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.group_class is not None and kwargs.get("cls") is None: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> Group: - cmd: Group = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(value: t.Any, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - inner = old_callback(value, *args, **kwargs) - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv # type: ignore[return-value] - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - """Given a context and a command name, this returns a :class:`Command` - object if it exists or returns ``None``. - """ - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> list[str]: - """Returns a list of subcommand names in the order they should appear.""" - return sorted(self.commands) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx._protected_args = rest - ctx.args = [] - elif rest: - ctx._protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx._protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with the group return value for regular - # groups, or an empty list for chained groups. - with ctx: - rv = super().invoke(ctx) - return _process_result([] if self.chain else rv) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx._protected_args, *ctx.args] - ctx.args = [] - ctx._protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: list[str] - ) -> tuple[str | None, Command | None, list[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if _split_opt(cmd_name)[0]: - self.parse_args(ctx, args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class _MultiCommand(Group, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Group`` instead. - """ - - -class CommandCollection(Group): - """A :class:`Group` that looks up subcommands on other groups. If a command - is not found on this group, each registered source is checked in order. - Parameters on a source are not added to this group, and a source's callback - is not invoked when invoking its commands. In other words, this "flattens" - commands in many groups into this one group. - - :param name: The name of the group command. - :param sources: A list of :class:`Group` objects to look up commands from. - :param kwargs: Other arguments passed to :class:`Group`. - - .. versionchanged:: 8.2 - This is a subclass of ``Group``. Commands are looked up first on this - group, then each of its sources. - """ - - def __init__( - self, - name: str | None = None, - sources: list[Group] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - #: The list of registered groups. - self.sources: list[Group] = sources or [] - - def add_source(self, group: Group) -> None: - """Add a group as a source of commands.""" - self.sources.append(group) - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - rv = super().get_command(ctx, cmd_name) - - if rv is not None: - return rv - - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_nested_chain(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> list[str]: - rv: set[str] = set(super().list_commands(ctx)) - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The latter is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: environment variable(s) that are used to provide a default value for - this parameter. This can be a string or a sequence of strings. If a sequence is - given, only the first non-empty environment variable is used for the parameter. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the argument is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. A deprecated parameter - cannot be required, a ValueError will be raised otherwise. - - .. versionchanged:: 8.2.0 - Introduction of ``deprecated``. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - type: types.ParamType | t.Any | None = None, - required: bool = False, - # XXX The default historically embed two concepts: - # - the declaration of a Parameter object carrying the default (handy to - # arbitrage the default value of coupled Parameters sharing the same - # self.name, like flag options), - # - and the actual value of the default. - # It is confusing and is the source of many issues discussed in: - # https://github.com/pallets/click/pull/3030 - # In the future, we might think of splitting it in two, not unlike - # Option.is_flag and Option.flag_value: we could have something like - # Parameter.is_default and Parameter.default_value. - default: t.Any | t.Callable[[], t.Any] | None = UNSET, - callback: t.Callable[[Context, Parameter, t.Any], t.Any] | None = None, - nargs: int | None = None, - multiple: bool = False, - metavar: str | None = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: str | cabc.Sequence[str] | None = None, - shell_complete: t.Callable[ - [Context, Parameter, str], list[CompletionItem] | list[str] - ] - | None = None, - deprecated: bool | str = False, - ) -> None: - self.name: str | None - self.opts: list[str] - self.secondary_opts: list[str] - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type: types.ParamType = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default: t.Any | t.Callable[[], t.Any] | None = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self._custom_shell_complete = shell_complete - self.deprecated = deprecated - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - if required and deprecated: - raise ValueError( - f"The {self.param_type_name} '{self.human_readable_name}' " - "is deprecated and still required. A deprecated " - f"{self.param_type_name} cannot be required." - ) - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionchanged:: 8.3.0 - Returns ``None`` for the :attr:`default` if it was not set. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - # We explicitly hide the :attr:`UNSET` value to the user, as we choose to - # make it an implementation detail. And because ``to_info_dict`` has been - # designed for documentation purposes, we return ``None`` instead. - "default": self.default if self.default is not UNSET else None, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(param=self, ctx=ctx) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @t.overload - def get_default( - self, ctx: Context, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Any | t.Callable[[], t.Any] | None: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is UNSET: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, t.Any] - ) -> tuple[t.Any, ParameterSource]: - """Returns the parameter value produced by the parser. - - If the parser did not produce a value from user input, the value is either - sourced from the environment variable, the default map, or the parameter's - default value. In that order of precedence. - - If no value is found, an internal sentinel value is returned. - - :meta private: - """ - # Collect from the parse the value passed by the user to the CLI. - value = opts.get(self.name, UNSET) # type: ignore - # If the value is set, it means it was sourced from the command line by the - # parser, otherwise it left unset by default. - source = ( - ParameterSource.COMMANDLINE - if value is not UNSET - else ParameterSource.DEFAULT - ) - - if value is UNSET: - envvar_value = self.value_from_envvar(ctx) - if envvar_value is not None: - value = envvar_value - source = ParameterSource.ENVIRONMENT - - if value is UNSET: - default_map_value = ctx.lookup_default(self.name) # type: ignore - if default_map_value is not UNSET: - value = default_map_value - source = ParameterSource.DEFAULT_MAP - - if value is UNSET: - default_value = self.get_default(ctx) - if default_value is not UNSET: - value = default_value - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the parameter's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - if self.multiple or self.nargs == -1: - return () - else: - return value - - def check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - # Define the conversion function based on nargs and type. - - if self.nargs == 1 or self.type.is_composite: - - def convert(value: t.Any) -> t.Any: - return self.type(value, param=self, ctx=ctx) - - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - """A value is considered missing if: - - - it is :attr:`UNSET`, - - or if it is an empty sequence while the parameter is suppose to have - non-single value (i.e. :attr:`nargs` is not ``1`` or :attr:`multiple` is - set). - - :meta private: - """ - if value is UNSET: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - """Process the value of this parameter: - - 1. Type cast the value using :meth:`type_cast_value`. - 2. Check if the value is missing (see: :meth:`value_is_missing`), and raise - :exc:`MissingParameter` if it is required. - 3. If a :attr:`callback` is set, call it to have the value replaced by the - result of the callback. If the value was not set, the callback receive - ``None``. This keep the legacy behavior as it was before the introduction of - the :attr:`UNSET` sentinel. - - :meta private: - """ - # shelter `type_cast_value` from ever seeing an `UNSET` value by handling the - # cases in which `UNSET` gets special treatment explicitly at this layer - # - # Refs: - # https://github.com/pallets/click/issues/3069 - if value is UNSET: - if self.multiple or self.nargs == -1: - value = () - else: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - # Legacy case: UNSET is not exposed directly to the callback, but converted - # to None. - if value is UNSET: - value = None - - # Search for parameters with UNSET values in the context. - unset_keys = {k: None for k, v in ctx.params.items() if v is UNSET} - # No UNSET values, call the callback as usual. - if not unset_keys: - value = self.callback(ctx, self, value) - - # Legacy case: provide a temporarily manipulated context to the callback - # to hide UNSET values as None. - # - # Refs: - # https://github.com/pallets/click/issues/3136 - # https://github.com/pallets/click/pull/3137 - else: - # Add another layer to the context stack to clearly hint that the - # context is temporarily modified. - with ctx: - # Update the context parameters to replace UNSET with None. - ctx.params.update(unset_keys) - # Feed these fake context parameters to the callback. - value = self.callback(ctx, self, value) - # Restore the UNSET values in the context parameters. - ctx.params.update( - { - k: UNSET - for k in unset_keys - # Only restore keys that are present and still None, in case - # the callback modified other parameters. - if k in ctx.params and ctx.params[k] is None - } - ) - - return value - - def resolve_envvar_value(self, ctx: Context) -> str | None: - """Returns the value found in the environment variable(s) attached to this - parameter. - - Environment variables values are `always returned as strings - `_. - - This method returns ``None`` if: - - - the :attr:`envvar` property is not set on the :class:`Parameter`, - - the environment variable is not found in the environment, - - the variable is found in the environment but its value is empty (i.e. the - environment variable is present but has an empty string). - - If :attr:`envvar` is setup with multiple environment variables, - then only the first non-empty value is returned. - - .. caution:: - - The raw value extracted from the environment is not normalized and is - returned as-is. Any normalization or reconciliation is performed later by - the :class:`Parameter`'s :attr:`type`. - - :meta private: - """ - if not self.envvar: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - # Return the first non-empty value of the list of environment variables. - if rv: - return rv - # Else, absence of value is interpreted as an environment variable that - # is not set, so proceed to the next one. - - return None - - def value_from_envvar(self, ctx: Context) -> str | cabc.Sequence[str] | None: - """Process the raw environment variable string for this parameter. - - Returns the string as-is or splits it into a sequence of strings if the - parameter is expecting multiple values (i.e. its :attr:`nargs` property is set - to a value other than ``1``). - - :meta private: - """ - rv = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - return self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: cabc.Mapping[str, t.Any], args: list[str] - ) -> tuple[t.Any, list[str]]: - """Process the value produced by the parser from user input. - - Always process the value through the Parameter's :attr:`type`, wherever it - comes from. - - If the parameter is deprecated, this method warn the user about it. But only if - the value has been explicitly set by the user (and as such, is not coming from - a default). - - :meta private: - """ - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - - ctx.set_parameter_source(self.name, source) # type: ignore - - # Display a deprecation warning if necessary. - if ( - self.deprecated - and value is not UNSET - and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - ): - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The {param_type} {name!r} is deprecated." - "{extra_message}" - ).format( - param_type=self.param_type_name, - name=self.human_readable_name, - extra_message=extra_message, - ) - echo(style(message, fg="red"), err=True) - - # Process the value through the parameter's type. - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - # In resilient parsing mode, we do not want to fail the command if the - # value is incompatible with the parameter type, so we reset the value - # to UNSET, which will be interpreted as a missing value. - value = UNSET - - # Add parameter's value to the context. - if ( - self.expose_value - # We skip adding the value if it was previously set by another parameter - # targeting the same variable name. This prevents parameters competing for - # the same name to override each other. - and (self.name not in ctx.params or ctx.params[self.name] is UNSET) - ): - # Click is logically enforcing that the name is None if the parameter is - # not to be exposed. We still assert it here to please the type checker. - assert self.name is not None, ( - f"{self!r} parameter's name should not be None when exposing value." - ) - ctx.params[self.name] = value - - return value, args - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - pass - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast("list[CompletionItem]", results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: Show the default value for this option in its - help text. Values are not shown by default, unless - :attr:`Context.show_default` is ``True``. If this value is a - string, it shows that string in parentheses instead of the - actual value. This is particularly useful for dynamic options. - For single option boolean flags, the default remains hidden if - its value is ``False``. - :param show_envvar: Controls if an environment variable should be - shown on the help page and error messages. - Normally, environment variables are not shown. - :param prompt: If set to ``True`` or a non empty string then the - user will be prompted for input. If set to ``True`` the prompt - will be the option name capitalized. A deprecated option cannot be - prompted. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: If this is ``True`` then the input on the prompt - will be hidden from the user. This is useful for password input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - :param attrs: Other command arguments described in :class:`Parameter`. - - .. versionchanged:: 8.2 - ``envvar`` used with ``flag_value`` will always use the ``flag_value``, - previously it would use the value of the environment variable. - - .. versionchanged:: 8.1 - Help text indentation is cleaned here instead of only in the - ``@option`` decorator. - - .. versionchanged:: 8.1 - The ``show_default`` parameter overrides - ``Context.show_default``. - - .. versionchanged:: 8.1 - The default of a single option boolean flag is not shown if the - default value is ``False``. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - show_default: bool | str | None = None, - prompt: bool | str = False, - confirmation_prompt: bool | str = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: bool | None = None, - flag_value: t.Any = UNSET, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: types.ParamType | t.Any | None = None, - help: str | None = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - deprecated: bool | str = False, - **attrs: t.Any, - ) -> None: - if help: - help = inspect.cleandoc(help) - - super().__init__( - param_decls, type=type, multiple=multiple, deprecated=deprecated, **attrs - ) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: str | None = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - - if deprecated: - deprecated_message = ( - f"(DEPRECATED: {deprecated})" - if isinstance(deprecated, str) - else "(DEPRECATED)" - ) - help = help + deprecated_message if help is not None else deprecated_message - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # The _flag_needs_value property tells the parser that this option is a flag - # that cannot be used standalone and needs a value. With this information, the - # parser can determine whether to consider the next user-provided argument in - # the CLI as a value for this flag or as a new option. - # If prompt is enabled but not required, then it opens the possibility for the - # option to gets its value from the user. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - # Auto-detect if this is a flag or not. - if is_flag is None: - # Implicitly a flag because flag_value was set. - if flag_value is not UNSET: - is_flag = True - # Not a flag, but when used as a flag it shows a prompt. - elif self._flag_needs_value: - is_flag = False - # Implicitly a flag because secondary options names were given. - elif self.secondary_opts: - is_flag = True - # The option is explicitly not a flag. But we do not know yet if it needs a - # value or not. So we look at the default value to determine it. - elif is_flag is False and not self._flag_needs_value: - self._flag_needs_value = self.default is UNSET - - if is_flag: - # Set missing default for flags if not explicitly required or prompted. - if self.default is UNSET and not self.required and not self.prompt: - if multiple: - self.default = () - - # Auto-detect the type of the flag based on the flag_value. - if type is None: - # A flag without a flag_value is a boolean flag. - if flag_value is UNSET: - self.type: types.ParamType = types.BoolParamType() - # If the flag value is a boolean, use BoolParamType. - elif isinstance(flag_value, bool): - self.type = types.BoolParamType() - # Otherwise, guess the type from the flag value. - else: - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = bool(is_flag) - self.is_bool_flag: bool = bool( - is_flag and isinstance(self.type, types.BoolParamType) - ) - self.flag_value: t.Any = flag_value - - # Set boolean flag default to False if unset and not required. - if self.is_bool_flag: - if self.default is UNSET and not self.required: - self.default = False - - # Support the special case of aligning the default value with the flag_value - # for flags whose default is explicitly set to True. Note that as long as we - # have this condition, there is no way a flag can have a default set to True, - # and a flag_value set to something else. Refs: - # https://github.com/pallets/click/issues/3024#issuecomment-3146199461 - # https://github.com/pallets/click/pull/3030/commits/06847da - if self.default is True and self.flag_value is not UNSET: - self.default = self.flag_value - - # Set the default flag_value if it is not set. - if self.flag_value is UNSET: - if self.is_flag: - self.flag_value = True - else: - self.flag_value = None - - # Counting. - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if self.default is UNSET: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if deprecated and prompt: - raise ValueError("`deprecated` options cannot use `prompt`.") - - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - def to_info_dict(self) -> dict[str, t.Any]: - """ - .. versionchanged:: 8.3.0 - Returns ``None`` for the :attr:`flag_value` if it was not set. - """ - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - # We explicitly hide the :attr:`UNSET` value to the user, as we choose to - # make it an implementation detail. And because ``to_info_dict`` has been - # designed for documentation purposes, we return ``None`` instead. - flag_value=self.flag_value if self.flag_value is not UNSET else None, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def get_error_hint(self, ctx: Context) -> str: - result = super().get_error_hint(ctx) - if self.show_envvar and self.envvar is not None: - result += f" (env var: '{self.envvar}')" - return result - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(_split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(_split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError( - f"Could not determine name for option with declarations {decls!r}" - ) - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: cabc.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar(ctx=ctx)}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - - extra = self.get_help_extra(ctx) - extra_items = [] - if "envvars" in extra: - extra_items.append( - _("env var: {var}").format(var=", ".join(extra["envvars"])) - ) - if "default" in extra: - extra_items.append(_("default: {default}").format(default=extra["default"])) - if "range" in extra: - extra_items.append(extra["range"]) - if "required" in extra: - extra_items.append(_(extra["required"])) - - if extra_items: - extra_str = "; ".join(extra_items) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - def get_help_extra(self, ctx: Context) -> types.OptionHelpExtra: - extra: types.OptionHelpExtra = {} - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - if isinstance(envvar, str): - extra["envvars"] = (envvar,) - else: - extra["envvars"] = tuple(str(d) for d in envvar) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default = False - show_default_is_str = False - - if self.show_default is not None: - if isinstance(self.show_default, str): - show_default_is_str = show_default = True - else: - show_default = self.show_default - elif ctx.show_default is not None: - show_default = ctx.show_default - - if show_default_is_str or ( - show_default and (default_value not in (None, UNSET)) - ): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif isinstance(default_value, enum.Enum): - default_string = default_value.name - elif inspect.isfunction(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = _split_opt( - (self.opts if default_value else self.secondary_opts)[0] - )[1] - elif self.is_bool_flag and not self.secondary_opts and not default_value: - default_string = "" - elif default_value == "": - default_string = '""' - else: - default_string = str(default_value) - - if default_string: - extra["default"] = default_string - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra["range"] = range_str - - if self.required: - extra["required"] = "required" - - return extra - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to lock in the value before - # attempting any user interaction. - default = self.get_default(ctx) - - # A boolean flag can use a simplified [y/n] confirmation prompt. - if self.is_bool_flag: - # If we have no boolean default, we force the user to explicitly provide - # one. - if default in (UNSET, None): - default = None - # Nothing prevent you to declare an option that is simultaneously: - # 1) auto-detected as a boolean flag, - # 2) allowed to prompt, and - # 3) still declare a non-boolean default. - # This forced casting into a boolean is necessary to align any non-boolean - # default to the prompt, which is going to be a [y/n]-style confirmation - # because the option is still a boolean flag. That way, instead of [y/n], - # we get [Y/n] or [y/N] depending on the truthy value of the default. - # Refs: https://github.com/pallets/click/pull/3030#discussion_r2289180249 - else: - default = bool(default) - return confirm(self.prompt, default) - - # If show_default is set to True/False, provide this to `prompt` as well. For - # non-bool values of `show_default`, we use `prompt`'s default behavior - prompt_kwargs: t.Any = {} - if isinstance(self.show_default, bool): - prompt_kwargs["show_default"] = self.show_default - - return prompt( - self.prompt, - # Use ``None`` to inform the prompt() function to reiterate until a valid - # value is provided by the user if we have no default. - default=None if default is UNSET else default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - **prompt_kwargs, - ) - - def resolve_envvar_value(self, ctx: Context) -> str | None: - """:class:`Option` resolves its environment variable the same way as - :func:`Parameter.resolve_envvar_value`, but it also supports - :attr:`Context.auto_envvar_prefix`. If we could not find an environment from - the :attr:`envvar` property, we fallback on :attr:`Context.auto_envvar_prefix` - to build dynamiccaly the environment variable name using the - :python:`{ctx.auto_envvar_prefix}_{self.name.upper()}` template. - - :meta private: - """ - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Any: - """For :class:`Option`, this method processes the raw environment variable - string the same way as :func:`Parameter.value_from_envvar` does. - - But in the case of non-boolean flags, the value is analyzed to determine if the - flag is activated or not, and returns a boolean of its activation, or the - :attr:`flag_value` if the latter is set. - - This method also takes care of repeated options (i.e. options with - :attr:`multiple` set to ``True``). - - :meta private: - """ - rv = self.resolve_envvar_value(ctx) - - # Absent environment variable or an empty string is interpreted as unset. - if rv is None: - return None - - # Non-boolean flags are more liberal in what they accept. But a flag being a - # flag, its envvar value still needs to be analyzed to determine if the flag is - # activated or not. - if self.is_flag and not self.is_bool_flag: - # If the flag_value is set and match the envvar value, return it - # directly. - if self.flag_value is not UNSET and rv == self.flag_value: - return self.flag_value - # Analyze the envvar value as a boolean to know if the flag is - # activated or not. - return types.BoolParamType.str_to_bool(rv) - - # Split the envvar value if it is allowed to be repeated. - value_depth = (self.nargs != 1) + bool(self.multiple) - if value_depth > 0: - multi_rv = self.type.split_envvar_value(rv) - if self.multiple and self.nargs != 1: - multi_rv = batch(multi_rv, self.nargs) # type: ignore[assignment] - - return multi_rv - - return rv - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, Parameter] - ) -> tuple[t.Any, ParameterSource]: - """For :class:`Option`, the value can be collected from an interactive prompt - if the option is a flag that needs a value (and the :attr:`prompt` property is - set). - - Additionally, this method handles flag option that are activated without a - value, in which case the :attr:`flag_value` is returned. - - :meta private: - """ - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option is allowed to as a flag - # without a value. - if value is FLAG_NEEDS_VALUE: - # If the option allows for a prompt, we start an interaction with the user. - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - # Else the flag takes its flag_value as value. - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - # A flag which is activated always returns the flag value, unless the value - # comes from the explicitly sets default. - elif ( - self.is_flag - and value is True - and not self.is_bool_flag - and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - ): - value = self.flag_value - - # Re-interpret a multiple option which has been sent as-is by the parser. - # Here we replace each occurrence of value-less flags (marked by the - # FLAG_NEEDS_VALUE sentinel) with the flag_value. - elif ( - self.multiple - and value is not UNSET - and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - and any(v is FLAG_NEEDS_VALUE for v in value) - ): - value = [self.flag_value if v is FLAG_NEEDS_VALUE else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt for one to the user - # if prompting is enabled. - elif ( - ( - value is UNSET - or source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - ) - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - # process_value has to be overridden on Options in order to capture - # `value == UNSET` cases before `type_cast_value()` gets called. - # - # Refs: - # https://github.com/pallets/click/issues/3069 - if self.is_flag and not self.required and self.is_bool_flag and value is UNSET: - value = False - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - # in the normal case, rely on Parameter.process_value - return super().process_value(ctx, value) - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the constructor of :class:`Parameter`. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: cabc.Sequence[str], - required: bool | None = None, - **attrs: t.Any, - ) -> None: - # Auto-detect the requirement status of the argument if not explicitly set. - if required is None: - # The argument gets automatically required if it has no explicit default - # value set and is setup to match at least one value. - if attrs.get("default", UNSET) is UNSET: - required = attrs.get("nargs", 1) > 0 - # If the argument has a default value, it is not required. - else: - required = False - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(param=self, ctx=ctx) - if not var: - var = self.name.upper() # type: ignore - if self.deprecated: - var += "!" - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Argument is marked as exposed, but does not have a name.") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}: {decls}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [self.make_metavar(ctx)] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar(ctx)}'" - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - raise AttributeError(name) diff --git a/venv/Lib/site-packages/click/decorators.py b/venv/Lib/site-packages/click/decorators.py deleted file mode 100644 index 21f4c34..0000000 --- a/venv/Lib/site-packages/click/decorators.py +++ /dev/null @@ -1,551 +0,0 @@ -from __future__ import annotations - -import inspect -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") -T = t.TypeVar("T") -_AnyCallable = t.Callable[..., t.Any] -FC = t.TypeVar("FC", bound="_AnyCallable | Command") - - -def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]: - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(new_func, f) - - -def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - -def make_pass_decorator( - object_type: type[T], ensure: bool = False -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - - obj: T | None - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - return decorator - - -def pass_meta_key( - key: str, *, doc_description: str | None = None -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator - - -CmdType = t.TypeVar("CmdType", bound=Command) - - -# variant: no call, directly as decorator for a function. -@t.overload -def command(name: _AnyCallable) -> Command: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) -@t.overload -def command( - name: str | None, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) -@t.overload -def command( - name: None = None, - *, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def command( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Command]: ... - - -def command( - name: str | _AnyCallable | None = None, - cls: type[CmdType] | None = None, - **attrs: t.Any, -) -> Command | t.Callable[[_AnyCallable], Command | CmdType]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function, converted to - lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes - ``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example, - ``init_data_command`` becomes ``init-data``. - - All keyword arguments are forwarded to the underlying command class. - For the ``params`` argument, any decorated params are appended to - the end of the list. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: The name of the command. Defaults to modifying the function's - name as described above. - :param cls: The command class to create. Defaults to :class:`Command`. - - .. versionchanged:: 8.2 - The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are - removed when generating the name. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.1 - The ``params`` argument can be used. Decorated params are - appended to the end of the list. - """ - - func: t.Callable[[_AnyCallable], t.Any] | None = None - - if callable(name): - func = name - name = None - assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." - assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." - - if cls is None: - cls = t.cast("type[CmdType]", Command) - - def decorator(f: _AnyCallable) -> CmdType: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - attr_params = attrs.pop("params", None) - params = attr_params if attr_params is not None else [] - - try: - decorator_params = f.__click_params__ # type: ignore - except AttributeError: - pass - else: - del f.__click_params__ # type: ignore - params.extend(reversed(decorator_params)) - - if attrs.get("help") is None: - attrs["help"] = f.__doc__ - - if t.TYPE_CHECKING: - assert cls is not None - assert not callable(name) - - if name is not None: - cmd_name = name - else: - cmd_name = f.__name__.lower().replace("_", "-") - cmd_left, sep, suffix = cmd_name.rpartition("-") - - if sep and suffix in {"command", "cmd", "group", "grp"}: - cmd_name = cmd_left - - cmd = cls(name=cmd_name, callback=f, params=params, **attrs) - cmd.__doc__ = f.__doc__ - return cmd - - if func is not None: - return decorator(func) - - return decorator - - -GrpType = t.TypeVar("GrpType", bound=Group) - - -# variant: no call, directly as decorator for a function. -@t.overload -def group(name: _AnyCallable) -> Group: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) -@t.overload -def group( - name: str | None, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) -@t.overload -def group( - name: None = None, - *, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def group( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Group]: ... - - -def group( - name: str | _AnyCallable | None = None, - cls: type[GrpType] | None = None, - **attrs: t.Any, -) -> Group | t.Callable[[_AnyCallable], Group | GrpType]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - """ - if cls is None: - cls = t.cast("type[GrpType]", Group) - - if callable(name): - return command(cls=cls, **attrs)(name) - - return command(name, cls, **attrs) - - -def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument( - *param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default argument class, refer to :class:`Argument` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Argument - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def option( - *param_decls: str, cls: type[Option] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default option class, refer to :class:`Option` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Option - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: str | None = None, - *param_decls: str, - package_name: str | None = None, - prog_name: str | None = None, - message: str | None = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - import importlib.metadata - - try: - version = importlib.metadata.version(package_name) - except importlib.metadata.PackageNotFoundError: - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - message % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Pre-configured ``--help`` option which immediately prints the help page - and exits the program. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def show_help(ctx: Context, param: Parameter, value: bool) -> None: - """Callback that print the help page on ```` and exits.""" - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs.setdefault("callback", show_help) - - return option(*param_decls, **kwargs) diff --git a/venv/Lib/site-packages/click/exceptions.py b/venv/Lib/site-packages/click/exceptions.py deleted file mode 100644 index 4d782ee..0000000 --- a/venv/Lib/site-packages/click/exceptions.py +++ /dev/null @@ -1,308 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .globals import resolve_color_default -from .utils import echo -from .utils import format_filename - -if t.TYPE_CHECKING: - from .core import Command - from .core import Context - from .core import Parameter - - -def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - # The context will be removed by the time we print the message, so cache - # the color settings here to be used later on (in `show`) - self.show_color: bool | None = resolve_color_default() - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=self.show_color, - ) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: Context | None = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd: Command | None = self.ctx.command if self.ctx else None - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: cabc.Sequence[str] | str | None = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: str | None = None, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: cabc.Sequence[str] | str | None = None, - param_type: str | None = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: cabc.Sequence[str] | str | None = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message( - param=self.param, ctx=self.ctx - ) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: str | None = None, - possibilities: cabc.Sequence[str] | None = None, - ctx: Context | None = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: Context | None = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class NoArgsIsHelpError(UsageError): - def __init__(self, ctx: Context) -> None: - self.ctx: Context - super().__init__(ctx.get_help(), ctx=ctx) - - def show(self, file: t.IO[t.Any] | None = None) -> None: - echo(self.format_message(), file=file, err=True, color=self.ctx.color) - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: str | None = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename: str = format_filename(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code: int = code diff --git a/venv/Lib/site-packages/click/formatting.py b/venv/Lib/site-packages/click/formatting.py deleted file mode 100644 index 0b64f83..0000000 --- a/venv/Lib/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import _split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: int | None = None - - -def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]: - widths: dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: cabc.Iterable[tuple[str, str]], col_count: int -) -> cabc.Iterator[tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: list[tuple[int, bool, str]] = [] - buf: list[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: int | None = None, - max_width: int | None = None, - ) -> None: - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - import shutil - - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent: int = 0 - self.buffer: list[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: cabc.Sequence[tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> cabc.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> cabc.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = _split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/Lib/site-packages/click/globals.py b/venv/Lib/site-packages/click/globals.py deleted file mode 100644 index a2f9172..0000000 --- a/venv/Lib/site-packages/click/globals.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import annotations - -import typing as t -from threading import local - -if t.TYPE_CHECKING: - from .core import Context - -_local = local() - - -@t.overload -def get_current_context(silent: t.Literal[False] = False) -> Context: ... - - -@t.overload -def get_current_context(silent: bool = ...) -> Context | None: ... - - -def get_current_context(silent: bool = False) -> Context | None: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: Context) -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: bool | None = None) -> bool | None: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/venv/Lib/site-packages/click/parser.py b/venv/Lib/site-packages/click/parser.py deleted file mode 100644 index 1ea1f71..0000000 --- a/venv/Lib/site-packages/click/parser.py +++ /dev/null @@ -1,532 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" - -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from ._utils import FLAG_NEEDS_VALUE -from ._utils import UNSET -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - from ._utils import T_FLAG_NEEDS_VALUE - from ._utils import T_UNSET - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - - -def _unpack_args( - args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int] -) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with ``UNSET``. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: list[str | tuple[str | T_UNSET, ...] | T_UNSET] = [] - spos: int | None = None - - def _fetch(c: deque[V]) -> V | T_UNSET: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return UNSET - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) # type: ignore[arg-type] - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(UNSET) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def _split_opt(opt: str) -> tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def _normalize_opt(opt: str, ctx: Context | None) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = _split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -class _Option: - def __init__( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes: set[str] = set() - - for opt in opts: - prefix, value = _split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: t.Any, state: _ParsingState) -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class _Argument: - def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: str | cabc.Sequence[str | None] | None | T_UNSET, - state: _ParsingState, - ) -> None: - if self.nargs > 1: - assert isinstance(value, cabc.Sequence) - holes = sum(1 for x in value if x is UNSET) - if holes == len(value): - value = UNSET - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - # We failed to collect any argument value so we consider the argument as unset. - if value == (): - value = UNSET - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class _ParsingState: - def __init__(self, rargs: list[str]) -> None: - self.opts: dict[str, t.Any] = {} - self.largs: list[str] = [] - self.rargs = rargs - self.order: list[CoreParameter] = [] - - -class _OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - - .. deprecated:: 8.2 - Will be removed in Click 9.0. - """ - - def __init__(self, ctx: Context | None = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args: bool = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options: bool = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: dict[str, _Option] = {} - self._long_opt: dict[str, _Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: list[_Argument] = [] - - def add_option( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [_normalize_opt(opt, self.ctx) for opt in opts] - option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(_Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: list[str] - ) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = _ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: _ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: _ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: str | None, state: _ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = UNSET - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: _ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = _normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = UNSET - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we recombine the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: _Option, state: _ParsingState - ) -> str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE: - nargs = option.nargs - - value: str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = FLAG_NEEDS_VALUE - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = FLAG_NEEDS_VALUE - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: _ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = _normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) - - -def __getattr__(name: str) -> object: - import warnings - - if name in { - "OptionParser", - "Argument", - "Option", - "split_opt", - "normalize_opt", - "ParsingState", - }: - warnings.warn( - f"'parser.{name}' is deprecated and will be removed in Click 9.0." - " The old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return globals()[f"_{name}"] - - if name == "split_arg_string": - from .shell_completion import split_arg_string - - warnings.warn( - "Importing 'parser.split_arg_string' is deprecated, it will only be" - " available in 'shell_completion' in Click 9.0.", - DeprecationWarning, - stacklevel=2, - ) - return split_arg_string - - raise AttributeError(name) diff --git a/venv/Lib/site-packages/click/py.typed b/venv/Lib/site-packages/click/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/click/shell_completion.py b/venv/Lib/site-packages/click/shell_completion.py deleted file mode 100644 index 8f1564c..0000000 --- a/venv/Lib/site-packages/click/shell_completion.py +++ /dev/null @@ -1,667 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .utils import echo - - -def shell_complete( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: str | None = None, - **kwargs: t.Any, - ) -> None: - self.value: t.Any = value - self.type: str = type - self.help: str | None = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMPREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMPREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -# See ZshComplete.format_completion below, and issue #2703, before -# changing this script. -# -# (TL;DR: _describe is picky about the format, but this Zsh script snippet -# is already widely deployed. So freeze this script, and use clever-ish -# handling of colons in ZshComplet.format_completion.) -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -if [[ $zsh_eval_context[-1] == loadautofunc ]]; then - # autoload from fpath, call function directly - %(complete_func)s "$@" -else - # eval/source/. command, register function for later - compdef %(complete_func)s %(prog_name)s -fi -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> tuple[list[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - @staticmethod - def _check_version() -> None: - import shutil - import subprocess - - bash_exe = shutil.which("bash") - - if bash_exe is None: - match = None - else: - output = subprocess.run( - [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'], - stdout=subprocess.PIPE, - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - echo( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ), - err=True, - ) - else: - echo( - _("Couldn't detect Bash version, shell completion is not supported."), - err=True, - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - help_ = item.help or "_" - # The zsh completion script uses `_describe` on items with help - # texts (which splits the item help from the item value at the - # first unescaped colon) and `compadd` on items without help - # text (which uses the item value as-is and does not support - # colon escaping). So escape colons in the item value if and - # only if the item help is not the sentinel "_" value, as used - # by the completion script. - # - # (The zsh completion script is potentially widely deployed, and - # thus harder to fix than this method.) - # - # See issue #1812 and issue #2703 for further context. - value = item.value.replace(":", r"\:") if help_ != "_" else item.value - return f"{item.type}\n{value}\n{help_}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - if incomplete: - incomplete = split_arg_string(incomplete)[0] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]") - - -_available_shells: dict[str, type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: ShellCompleteType, name: str | None = None -) -> ShellCompleteType: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - return cls - - -def get_completion_class(shell: str) -> type[ShellComplete] | None: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def split_arg_string(string: str) -> list[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - - .. versionchanged:: 8.2 - Moved to ``shell_completion`` from ``parser``. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - # Will be None if expose_value is False. - value = ctx.params.get(param.name) - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(ctx: Context, value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - return c in ctx._opt_prefixes - - -def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag or param.count: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(ctx, arg): - last_option = arg - break - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - args: list[str], -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx: - args = ctx._protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, Group): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, args, parent=ctx, resilient_parsing=True - ) as sub_ctx: - ctx = sub_ctx - args = ctx._protected_args + ctx.args - else: - sub_ctx = ctx - - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) as sub_sub_ctx: - sub_ctx = sub_sub_ctx - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx._protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: list[str], incomplete: str -) -> tuple[Command | Parameter, str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(ctx, incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(ctx, incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(ctx, args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/venv/Lib/site-packages/click/termui.py b/venv/Lib/site-packages/click/termui.py deleted file mode 100644 index 2e98a07..0000000 --- a/venv/Lib/site-packages/click/termui.py +++ /dev/null @@ -1,883 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import inspect -import io -import itertools -import sys -import typing as t -from contextlib import AbstractContextManager -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Any | None = None, - show_choices: bool = True, - type: ParamType | None = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name - - return default - - -def prompt( - text: str, - default: t.Any | None = None, - hide_input: bool = False, - confirmation_prompt: bool | str = False, - type: ParamType | t.Any | None = None, - value_proc: t.Callable[[str], t.Any] | None = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending an interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionchanged:: 8.3.1 - A space is no longer appended to the prompt. - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text[:-1], nl=False, err=err) - # Echo the last character to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(text[-1:]) - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) - continue - if not confirmation_prompt: - return result - while True: - value2 = prompt_func(confirmation_prompt) - is_empty = not value and not value2 - if value2 or is_empty: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: bool | None = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.3.1 - A space is no longer appended to the prompt. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt[:-1], nl=False, err=err) - # Echo the last character to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(prompt[-1:]).lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def echo_via_pager( - text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str, - color: bool | None = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast("cabc.Iterable[str]", text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -@t.overload -def progressbar( - *, - length: int, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[int]: ... - - -@t.overload -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: ... - - -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param hidden: hide the progressbar. Defaults to ``False``. When no tty is - detected, it will only print the progressbar label. Setting this to - ``False`` also disables that. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionadded:: 8.2 - The ``hidden`` argument. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - The ``update_min_steps`` parameter. - - .. versionadded:: 4.0 - The ``color`` parameter and ``update`` method. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - hidden=hidden, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - - # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor - echo("\033[2J\033[1;1H", nl=False) - - -def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: int | tuple[int, int, int] | str | None = None, - bg: int | tuple[int, int, int] | str | None = None, - bold: bool | None = None, - dim: bool | None = None, - underline: bool | None = None, - overline: bool | None = None, - italic: bool | None = None, - blink: bool | None = None, - reverse: bool | None = None, - strikethrough: bool | None = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Any | None = None, - file: t.IO[t.AnyStr] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -@t.overload -def edit( - text: bytes | bytearray, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = False, - extension: str = ".txt", -) -> bytes | None: ... - - -@t.overload -def edit( - text: str, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", -) -> str | None: ... - - -@t.overload -def edit( - text: None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> None: ... - - -def edit( - text: str | bytes | bytearray | None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> str | bytes | bytearray | None: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. If the editor supports - editing multiple files at once, a sequence of files may be - passed as well. Invoke `click.file` once per file instead - if multiple files cannot be managed at once or editing the - files serially is desired. - - .. versionchanged:: 8.2.0 - ``filename`` now accepts any ``Iterable[str]`` in addition to a ``str`` - if the ``editor`` supports editing multiple files at once. - - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - if isinstance(filename, str): - filename = (filename,) - - ed.edit_files(filenames=filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Callable[[bool], str] | None = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> AbstractContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: str | None = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/venv/Lib/site-packages/click/testing.py b/venv/Lib/site-packages/click/testing.py deleted file mode 100644 index f6f60b8..0000000 --- a/venv/Lib/site-packages/click/testing.py +++ /dev/null @@ -1,577 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import io -import os -import shlex -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import _compat -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from _typeshed import ReadableBuffer - - from .core import Command - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> list[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> cabc.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class BytesIOCopy(io.BytesIO): - """Patch ``io.BytesIO`` to let the written stream be copied to another. - - .. versionadded:: 8.2 - """ - - def __init__(self, copy_to: io.BytesIO) -> None: - super().__init__() - self.copy_to = copy_to - - def flush(self) -> None: - super().flush() - self.copy_to.flush() - - def write(self, b: ReadableBuffer) -> int: - self.copy_to.write(b) - return super().write(b) - - -class StreamMixer: - """Mixes `` and `` streams. - - The result is available in the ``output`` attribute. - - .. versionadded:: 8.2 - """ - - def __init__(self) -> None: - self.output: io.BytesIO = io.BytesIO() - self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output) - self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output) - - def __del__(self) -> None: - """ - Guarantee that embedded file-like objects are closed in a - predictable order, protecting against races between - self.output being closed and other streams being flushed on close - - .. versionadded:: 8.2.2 - """ - self.stderr.close() - self.stdout.close() - self.output.close() - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - -def make_input_stream( - input: str | bytes | t.IO[t.Any] | None, charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast("t.IO[t.Any]", input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(input) - - -class Result: - """Holds the captured result of an invoked CLI script. - - :param runner: The runner that created the result - :param stdout_bytes: The standard output as bytes. - :param stderr_bytes: The standard error as bytes. - :param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the - user would see it in its terminal. - :param return_value: The value returned from the invoked command. - :param exit_code: The exit code as integer. - :param exception: The exception that happened if one did. - :param exc_info: Exception information (exception type, exception instance, - traceback type). - - .. versionchanged:: 8.2 - ``stderr_bytes`` no longer optional, ``output_bytes`` introduced and - ``mix_stderr`` has been removed. - - .. versionadded:: 8.0 - Added ``return_value``. - """ - - def __init__( - self, - runner: CliRunner, - stdout_bytes: bytes, - stderr_bytes: bytes, - output_bytes: bytes, - return_value: t.Any, - exit_code: int, - exception: BaseException | None, - exc_info: tuple[type[BaseException], BaseException, TracebackType] - | None = None, - ): - self.runner = runner - self.stdout_bytes = stdout_bytes - self.stderr_bytes = stderr_bytes - self.output_bytes = output_bytes - self.return_value = return_value - self.exit_code = exit_code - self.exception = exception - self.exc_info = exc_info - - @property - def output(self) -> str: - """The terminal output as unicode string, as the user would see it. - - .. versionchanged:: 8.2 - No longer a proxy for ``self.stdout``. Now has its own independent stream - that is mixing `` and ``, in the order they were written. - """ - return self.output_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string. - - .. versionchanged:: 8.2 - No longer raise an exception, always returns the `` string. - """ - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from `` writes - to ``. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param catch_exceptions: Whether to catch any exceptions other than - ``SystemExit`` when running :meth:`~CliRunner.invoke`. - - .. versionchanged:: 8.2 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 8.2 - ``mix_stderr`` parameter has been removed. - """ - - def __init__( - self, - charset: str = "utf-8", - env: cabc.Mapping[str, str | None] | None = None, - echo_stdin: bool = False, - catch_exceptions: bool = True, - ) -> None: - self.charset = charset - self.env: cabc.Mapping[str, str | None] = env or {} - self.echo_stdin = echo_stdin - self.catch_exceptions = catch_exceptions - - def get_default_prog_name(self, cli: Command) -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: cabc.Mapping[str, str | None] | None = None - ) -> cabc.Mapping[str, str | None]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - color: bool = False, - ) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up `` with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into `sys.stdin`. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - An additional output stream is returned, which is a mix of - `` and `` streams. - - .. versionchanged:: 8.2 - Always returns the `` stream. - - .. versionchanged:: 8.0 - `` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - stream_mixer = StreamMixer() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - stream_mixer.stdout, encoding=self.charset, name="", mode="w" - ) - - sys.stderr = _NamedTextIOWrapper( - stream_mixer.stderr, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: str | None = None) -> str: - sys.stdout.write(prompt or "") - try: - val = next(text_input).rstrip("\r\n") - except StopIteration as e: - raise EOFError() from e - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: str | None = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - try: - return next(text_input).rstrip("\r\n") - except StopIteration as e: - raise EOFError() from e - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - old__compat_should_strip_ansi = _compat.should_strip_ansi - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - _compat.should_strip_ansi = should_strip_ansi - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - _compat.should_strip_ansi = old__compat_should_strip_ansi - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: Command, - args: str | cabc.Sequence[str] | None = None, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - catch_exceptions: bool | None = None, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. If :data:`None`, the value - from :class:`CliRunner` is used. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - The result object has the ``output_bytes`` attribute with - the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would - see it in its terminal. - - .. versionchanged:: 8.2 - The result object always returns the ``stderr_bytes`` stream. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - if catch_exceptions is None: - catch_exceptions = self.catch_exceptions - - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: BaseException | None = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast("int | t.Any | None", e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - sys.stderr.flush() - stdout = outstreams[0].getvalue() - stderr = outstreams[1].getvalue() - output = outstreams[2].getvalue() - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - output_bytes=output, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: str | os.PathLike[str] | None = None - ) -> cabc.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - dt = tempfile.mkdtemp(dir=temp_dir) - os.chdir(dt) - - try: - yield dt - finally: - os.chdir(cwd) - - if temp_dir is None: - import shutil - - try: - shutil.rmtree(dt) - except OSError: - pass diff --git a/venv/Lib/site-packages/click/types.py b/venv/Lib/site-packages/click/types.py deleted file mode 100644 index e71c1c2..0000000 --- a/venv/Lib/site-packages/click/types.py +++ /dev/null @@ -1,1209 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import os -import stat -import sys -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import format_filename -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - -ParamTypeValue = t.TypeVar("ParamTypeValue") - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[str | None] = None - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - - # Custom subclasses might not remember to set a name. - if hasattr(self, "name"): - name = self.name - else: - name = param_type - - return {"param_type": param_type, "name": name} - - def __call__( - self, - value: t.Any, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str | None: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> cabc.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.NoReturn: - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name: str = func.__name__ - self.func = func - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = sys.getfilesystemencoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType, t.Generic[ParamTypeValue]): - """The choice type allows a value to be checked against a fixed set - of supported values. - - You may pass any iterable value which will be converted to a tuple - and thus will only be iterated once. - - The resulting value will always be one of the originally passed choices. - See :meth:`normalize_choice` for more info on the mapping of strings - to choices. See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - - .. versionchanged:: 8.2.0 - Non-``str`` ``choices`` are now supported. It can additionally be any - iterable. Before you were not recommended to pass anything but a list or - tuple. - - .. versionadded:: 8.2.0 - Choice normalization can be overridden via :meth:`normalize_choice`. - """ - - name = "choice" - - def __init__( - self, choices: cabc.Iterable[ParamTypeValue], case_sensitive: bool = True - ) -> None: - self.choices: cabc.Sequence[ParamTypeValue] = tuple(choices) - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def _normalized_mapping( - self, ctx: Context | None = None - ) -> cabc.Mapping[ParamTypeValue, str]: - """ - Returns mapping where keys are the original choices and the values are - the normalized values that are accepted via the command line. - - This is a simple wrapper around :meth:`normalize_choice`, use that - instead which is supported. - """ - return { - choice: self.normalize_choice( - choice=choice, - ctx=ctx, - ) - for choice in self.choices - } - - def normalize_choice(self, choice: ParamTypeValue, ctx: Context | None) -> str: - """ - Normalize a choice value, used to map a passed string to a choice. - Each choice must have a unique normalized value. - - By default uses :meth:`Context.token_normalize_func` and if not case - sensitive, convert it to a casefolded value. - - .. versionadded:: 8.2.0 - """ - normed_value = choice.name if isinstance(choice, enum.Enum) else str(choice) - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(normed_value) - - if not self.case_sensitive: - normed_value = normed_value.casefold() - - return normed_value - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - if param.param_type_name == "option" and not param.show_choices: # type: ignore - choice_metavars = [ - convert_type(type(choice)).name.upper() for choice in self.choices - ] - choices_str = "|".join([*dict.fromkeys(choice_metavars)]) - else: - choices_str = "|".join( - [str(i) for i in self._normalized_mapping(ctx=ctx).values()] - ) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str: - """ - Message shown when no choice is passed. - - .. versionchanged:: 8.2.0 Added ``ctx`` argument. - """ - return _("Choose from:\n\t{choices}").format( - choices=",\n\t".join(self._normalized_mapping(ctx=ctx).values()) - ) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> ParamTypeValue: - """ - For a given value from the parser, normalize it and find its - matching normalized value in the list of choices. Then return the - matched "original" choice. - """ - normed_value = self.normalize_choice(choice=value, ctx=ctx) - normalized_mapping = self._normalized_mapping(ctx=ctx) - - try: - return next( - original - for original, normalized in normalized_mapping.items() - if normalized == normed_value - ) - except StopIteration: - self.fail( - self.get_invalid_choice_message(value=value, ctx=ctx), - param=param, - ctx=ctx, - ) - - def get_invalid_choice_message(self, value: t.Any, ctx: Context | None) -> str: - """Get the error message when the given choice is invalid. - - :param value: The invalid value. - - .. versionadded:: 8.2 - """ - choices_str = ", ".join(map(repr, self._normalized_mapping(ctx=ctx).values())) - return ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: cabc.Sequence[str] | None = None): - self.formats: cabc.Sequence[str] = formats or [ - "%Y-%m-%d", - "%Y-%m-%dT%H:%M:%S", - "%Y-%m-%d %H:%M:%S", - ] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> datetime | None: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[type[t.Any]] - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: t.Literal[1, -1], open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - if not open: - return bound - - # Could use math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - bool_states: dict[str, bool] = { - "1": True, - "0": False, - "yes": True, - "no": False, - "true": True, - "false": False, - "on": True, - "off": False, - "t": True, - "f": False, - "y": True, - "n": False, - # Absence of value is considered False. - "": False, - } - """A mapping of string values to boolean states. - - Mapping is inspired by :py:attr:`configparser.ConfigParser.BOOLEAN_STATES` - and extends it. - - .. caution:: - String values are lower-cased, as the ``str_to_bool`` comparison function - below is case-insensitive. - - .. warning:: - The mapping is not exhaustive, and does not cover all possible boolean strings - representations. It will remains as it is to avoid endless bikeshedding. - - Future work my be considered to make this mapping user-configurable from public - API. - """ - - @staticmethod - def str_to_bool(value: str | bool) -> bool | None: - """Convert a string to a boolean value. - - If the value is already a boolean, it is returned as-is. If the value is a - string, it is stripped of whitespaces and lower-cased, then checked against - the known boolean states pre-defined in the `BoolParamType.bool_states` mapping - above. - - Returns `None` if the value does not match any known boolean state. - """ - if isinstance(value, bool): - return value - return BoolParamType.bool_states.get(value.strip().lower()) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> bool: - normalized = self.str_to_bool(value) - if normalized is None: - self.fail( - _( - "{value!r} is not a valid boolean. Recognized values: {states}" - ).format(value=value, states=", ".join(sorted(self.bool_states))), - param, - ctx, - ) - return normalized - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Files can also be opened atomically in which case all writes go into a - separate file in the same folder and upon completion the file will - be moved over to the original location. This is useful if a file - regularly read by other users is modified. - - See :ref:`file-args` for more information. - - .. versionchanged:: 2.0 - Added the ``atomic`` parameter. - """ - - name = "filename" - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool | None = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: str | os.PathLike[str]) -> bool: - if self.lazy is not None: - return self.lazy - if os.fspath(value) == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, - value: str | os.PathLike[str] | t.IO[t.Any], - param: Parameter | None, - ctx: Context | None, - ) -> t.IO[t.Any]: - if _is_file_like(value): - return value - - value = t.cast("str | os.PathLike[str]", value) - - try: - lazy = self.resolve_lazy_flag(value) - - if lazy: - lf = LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - if ctx is not None: - ctx.call_on_close(lf.close_intelligently) - - return t.cast("t.IO[t.Any]", lf) - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: - self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -def _is_file_like(value: t.Any) -> te.TypeGuard[t.IO[t.Any]]: - return hasattr(value, "read") or hasattr(value, "write") - - -class Path(ParamType): - """The ``Path`` type is similar to the :class:`File` type, but - returns the filename instead of an open file. Various checks can be - enabled to validate the type of file and permissions. - - :param exists: The file or directory needs to exist for the value to - be valid. If this is not set to ``True``, and the file does not - exist, then all further checks are silently skipped. - :param file_okay: Allow a file as a value. - :param dir_okay: Allow a directory as a value. - :param readable: if true, a readable check is performed. - :param writable: if true, a writable check is performed. - :param executable: if true, an executable check is performed. - :param resolve_path: Make the value absolute and resolve any - symlinks. A ``~`` is not expanded, as this is supposed to be - done by the shell only. - :param allow_dash: Allow a single dash as a value, which indicates - a standard stream (but does not open it). Use - :func:`~click.open_file` to handle opening this value. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.1 - Added the ``executable`` parameter. - - .. versionchanged:: 8.0 - Allow passing ``path_type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: type[t.Any] | None = None, - executable: bool = False, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.readable = readable - self.writable = writable - self.executable = executable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name: str = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result( - self, value: str | os.PathLike[str] - ) -> str | bytes | os.PathLike[str]: - if self.type is not None and not isinstance(value, self.type): - if self.type is str: - return os.fsdecode(value) - elif self.type is bytes: - return os.fsencode(value) - else: - return t.cast("os.PathLike[str]", self.type(value)) - - return value - - def convert( - self, - value: str | os.PathLike[str], - param: Parameter | None, - ctx: Context | None, - ) -> str | bytes | os.PathLike[str]: - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - rv = os.path.realpath(rv) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} {filename!r} is a directory.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.executable and not os.access(value, os.X_OK): - self.fail( - _("{name} {filename!r} is not executable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: cabc.Sequence[type[t.Any] | ParamType]) -> None: - self.types: cabc.Sequence[ParamType] = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple( - ty(x, param, ctx) for ty, x in zip(self.types, value, strict=False) - ) - - -def convert_type(ty: t.Any | None, default: t.Any | None = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() - - -class OptionHelpExtra(t.TypedDict, total=False): - envvars: tuple[str, ...] - default: str - range: str - required: str diff --git a/venv/Lib/site-packages/click/utils.py b/venv/Lib/site-packages/click/utils.py deleted file mode 100644 index beae26f..0000000 --- a/venv/Lib/site-packages/click/utils.py +++ /dev/null @@ -1,627 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType -from types import TracebackType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]: - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None: - try: - return func(*args, **kwargs) - except Exception: - pass - return None - - return update_wrapper(wrapper, func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(sys.getfilesystemencoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, - ): - self.name: str = os.fspath(filename) - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.IO[t.Any] | None - self.should_close: bool - - if self.name == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO[t.Any]: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> LazyFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close_intelligently() - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO[t.Any]) -> None: - self._file: t.IO[t.Any] = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> KeepOpenFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Any | None = None, - file: t.IO[t.Any] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - return - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: str | bytes | bytearray | None = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file, color) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: t.Literal["stdin", "stdout", "stderr"], - encoding: str | None = None, - errors: str | None = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO[t.Any]: - """Open a file, with extra behavior to handle ``'-'`` to indicate - a standard stream, lazy open on write, and atomic write. Similar to - the behavior of the :class:`~click.File` param type. - - If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is - wrapped so that using it in a context manager will not close it. - This makes it possible to use the function without accidentally - closing a standard stream: - - .. code-block:: python - - with open_file(filename) as f: - ... - - :param filename: The name or Path of the file to open, or ``'-'`` for - ``stdin``/``stdout``. - :param mode: The mode in which to open the file. - :param encoding: The encoding to decode or encode a file opened in - text mode. - :param errors: The error handling mode. - :param lazy: Wait to open the file until it is accessed. For read - mode, the file is temporarily opened to raise access errors - early, then closed until it is read again. - :param atomic: Write to a temporary file and replace the given file - on close. - - .. versionadded:: 3.0 - """ - if lazy: - return t.cast( - "t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic) - ) - - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - - if not should_close: - f = t.cast("t.IO[t.Any]", KeepOpenFile(f)) - - return f - - -def format_filename( - filename: str | bytes | os.PathLike[str] | os.PathLike[bytes], - shorten: bool = False, -) -> str: - """Format a filename as a string for display. Ensures the filename can be - displayed by replacing any invalid bytes or surrogate escapes in the name - with the replacement character ``�``. - - Invalid bytes or surrogate escapes will raise an error when written to a - stream with ``errors="strict"``. This will typically happen with ``stdout`` - when the locale is something like ``en_GB.UTF-8``. - - Many scenarios *are* safe to write surrogates though, due to PEP 538 and - PEP 540, including: - - - Writing to ``stderr``, which uses ``errors="backslashreplace"``. - - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens - stdout and stderr with ``errors="surrogateescape"``. - - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. - - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. - Python opens stdout and stderr with ``errors="surrogateescape"``. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - else: - filename = os.fspath(filename) - - if isinstance(filename, bytes): - filename = filename.decode(sys.getfilesystemencoding(), "replace") - else: - filename = filename.encode("utf-8", "surrogateescape").decode( - "utf-8", "replace" - ) - - return filename - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no effect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO[t.Any]) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: str | None = None, _main: ModuleType | None = None -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if _main is None: - _main = sys.modules["__main__"] - - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - # It is set to "" inside a Shiv or PEX zipapp. - if getattr(_main, "__package__", None) in {None, ""} or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: cabc.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> list[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This is intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionchanged:: 8.1 - Invalid glob patterns are treated as empty expansions rather - than raising an error. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - try: - matches = glob(arg, recursive=glob_recursive) - except re.error: - matches = [] - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/venv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER b/venv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA b/venv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA deleted file mode 100644 index a1b5c57..0000000 --- a/venv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA +++ /dev/null @@ -1,441 +0,0 @@ -Metadata-Version: 2.1 -Name: colorama -Version: 0.4.6 -Summary: Cross-platform colored terminal text. -Project-URL: Homepage, https://github.com/tartley/colorama -Author-email: Jonathan Hartley -License-File: LICENSE.txt -Keywords: ansi,color,colour,crossplatform,terminal,text,windows,xplatform -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Terminals -Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 -Description-Content-Type: text/x-rst - -.. image:: https://img.shields.io/pypi/v/colorama.svg - :target: https://pypi.org/project/colorama/ - :alt: Latest Version - -.. image:: https://img.shields.io/pypi/pyversions/colorama.svg - :target: https://pypi.org/project/colorama/ - :alt: Supported Python versions - -.. image:: https://github.com/tartley/colorama/actions/workflows/test.yml/badge.svg - :target: https://github.com/tartley/colorama/actions/workflows/test.yml - :alt: Build Status - -Colorama -======== - -Makes ANSI escape character sequences (for producing colored terminal text and -cursor positioning) work under MS Windows. - -.. |donate| image:: https://www.paypalobjects.com/en_US/i/btn/btn_donate_SM.gif - :target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=2MZ9D2GMLYCUJ&item_name=Colorama¤cy_code=USD - :alt: Donate with Paypal - -`PyPI for releases `_ | -`Github for source `_ | -`Colorama for enterprise on Tidelift `_ - -If you find Colorama useful, please |donate| to the authors. Thank you! - -Installation ------------- - -Tested on CPython 2.7, 3.7, 3.8, 3.9 and 3.10 and Pypy 2.7 and 3.8. - -No requirements other than the standard library. - -.. code-block:: bash - - pip install colorama - # or - conda install -c anaconda colorama - -Description ------------ - -ANSI escape character sequences have long been used to produce colored terminal -text and cursor positioning on Unix and Macs. Colorama makes this work on -Windows, too, by wrapping ``stdout``, stripping ANSI sequences it finds (which -would appear as gobbledygook in the output), and converting them into the -appropriate win32 calls to modify the state of the terminal. On other platforms, -Colorama does nothing. - -This has the upshot of providing a simple cross-platform API for printing -colored terminal text from Python, and has the happy side-effect that existing -applications or libraries which use ANSI sequences to produce colored output on -Linux or Macs can now also work on Windows, simply by calling -``colorama.just_fix_windows_console()`` (since v0.4.6) or ``colorama.init()`` -(all versions, but may have other side-effects – see below). - -An alternative approach is to install ``ansi.sys`` on Windows machines, which -provides the same behaviour for all applications running in terminals. Colorama -is intended for situations where that isn't easy (e.g., maybe your app doesn't -have an installer.) - -Demo scripts in the source code repository print some colored text using -ANSI sequences. Compare their output under Gnome-terminal's built in ANSI -handling, versus on Windows Command-Prompt using Colorama: - -.. image:: https://github.com/tartley/colorama/raw/master/screenshots/ubuntu-demo.png - :width: 661 - :height: 357 - :alt: ANSI sequences on Ubuntu under gnome-terminal. - -.. image:: https://github.com/tartley/colorama/raw/master/screenshots/windows-demo.png - :width: 668 - :height: 325 - :alt: Same ANSI sequences on Windows, using Colorama. - -These screenshots show that, on Windows, Colorama does not support ANSI 'dim -text'; it looks the same as 'normal text'. - -Usage ------ - -Initialisation -.............. - -If the only thing you want from Colorama is to get ANSI escapes to work on -Windows, then run: - -.. code-block:: python - - from colorama import just_fix_windows_console - just_fix_windows_console() - -If you're on a recent version of Windows 10 or better, and your stdout/stderr -are pointing to a Windows console, then this will flip the magic configuration -switch to enable Windows' built-in ANSI support. - -If you're on an older version of Windows, and your stdout/stderr are pointing to -a Windows console, then this will wrap ``sys.stdout`` and/or ``sys.stderr`` in a -magic file object that intercepts ANSI escape sequences and issues the -appropriate Win32 calls to emulate them. - -In all other circumstances, it does nothing whatsoever. Basically the idea is -that this makes Windows act like Unix with respect to ANSI escape handling. - -It's safe to call this function multiple times. It's safe to call this function -on non-Windows platforms, but it won't do anything. It's safe to call this -function when one or both of your stdout/stderr are redirected to a file – it -won't do anything to those streams. - -Alternatively, you can use the older interface with more features (but also more -potential footguns): - -.. code-block:: python - - from colorama import init - init() - -This does the same thing as ``just_fix_windows_console``, except for the -following differences: - -- It's not safe to call ``init`` multiple times; you can end up with multiple - layers of wrapping and broken ANSI support. - -- Colorama will apply a heuristic to guess whether stdout/stderr support ANSI, - and if it thinks they don't, then it will wrap ``sys.stdout`` and - ``sys.stderr`` in a magic file object that strips out ANSI escape sequences - before printing them. This happens on all platforms, and can be convenient if - you want to write your code to emit ANSI escape sequences unconditionally, and - let Colorama decide whether they should actually be output. But note that - Colorama's heuristic is not particularly clever. - -- ``init`` also accepts explicit keyword args to enable/disable various - functionality – see below. - -To stop using Colorama before your program exits, simply call ``deinit()``. -This will restore ``stdout`` and ``stderr`` to their original values, so that -Colorama is disabled. To resume using Colorama again, call ``reinit()``; it is -cheaper than calling ``init()`` again (but does the same thing). - -Most users should depend on ``colorama >= 0.4.6``, and use -``just_fix_windows_console``. The old ``init`` interface will be supported -indefinitely for backwards compatibility, but we don't plan to fix any issues -with it, also for backwards compatibility. - -Colored Output -.............. - -Cross-platform printing of colored text can then be done using Colorama's -constant shorthand for ANSI escape sequences. These are deliberately -rudimentary, see below. - -.. code-block:: python - - from colorama import Fore, Back, Style - print(Fore.RED + 'some red text') - print(Back.GREEN + 'and with a green background') - print(Style.DIM + 'and in dim text') - print(Style.RESET_ALL) - print('back to normal now') - -...or simply by manually printing ANSI sequences from your own code: - -.. code-block:: python - - print('\033[31m' + 'some red text') - print('\033[39m') # and reset to default color - -...or, Colorama can be used in conjunction with existing ANSI libraries -such as the venerable `Termcolor `_ -the fabulous `Blessings `_, -or the incredible `_Rich `_. - -If you wish Colorama's Fore, Back and Style constants were more capable, -then consider using one of the above highly capable libraries to generate -colors, etc, and use Colorama just for its primary purpose: to convert -those ANSI sequences to also work on Windows: - -SIMILARLY, do not send PRs adding the generation of new ANSI types to Colorama. -We are only interested in converting ANSI codes to win32 API calls, not -shortcuts like the above to generate ANSI characters. - -.. code-block:: python - - from colorama import just_fix_windows_console - from termcolor import colored - - # use Colorama to make Termcolor work on Windows too - just_fix_windows_console() - - # then use Termcolor for all colored text output - print(colored('Hello, World!', 'green', 'on_red')) - -Available formatting constants are:: - - Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. - Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. - Style: DIM, NORMAL, BRIGHT, RESET_ALL - -``Style.RESET_ALL`` resets foreground, background, and brightness. Colorama will -perform this reset automatically on program exit. - -These are fairly well supported, but not part of the standard:: - - Fore: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX - Back: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX - -Cursor Positioning -.................. - -ANSI codes to reposition the cursor are supported. See ``demos/demo06.py`` for -an example of how to generate them. - -Init Keyword Args -................. - -``init()`` accepts some ``**kwargs`` to override default behaviour. - -init(autoreset=False): - If you find yourself repeatedly sending reset sequences to turn off color - changes at the end of every print, then ``init(autoreset=True)`` will - automate that: - - .. code-block:: python - - from colorama import init - init(autoreset=True) - print(Fore.RED + 'some red text') - print('automatically back to default color again') - -init(strip=None): - Pass ``True`` or ``False`` to override whether ANSI codes should be - stripped from the output. The default behaviour is to strip if on Windows - or if output is redirected (not a tty). - -init(convert=None): - Pass ``True`` or ``False`` to override whether to convert ANSI codes in the - output into win32 calls. The default behaviour is to convert if on Windows - and output is to a tty (terminal). - -init(wrap=True): - On Windows, Colorama works by replacing ``sys.stdout`` and ``sys.stderr`` - with proxy objects, which override the ``.write()`` method to do their work. - If this wrapping causes you problems, then this can be disabled by passing - ``init(wrap=False)``. The default behaviour is to wrap if ``autoreset`` or - ``strip`` or ``convert`` are True. - - When wrapping is disabled, colored printing on non-Windows platforms will - continue to work as normal. To do cross-platform colored output, you can - use Colorama's ``AnsiToWin32`` proxy directly: - - .. code-block:: python - - import sys - from colorama import init, AnsiToWin32 - init(wrap=False) - stream = AnsiToWin32(sys.stderr).stream - - # Python 2 - print >>stream, Fore.BLUE + 'blue text on stderr' - - # Python 3 - print(Fore.BLUE + 'blue text on stderr', file=stream) - -Recognised ANSI Sequences -......................... - -ANSI sequences generally take the form:: - - ESC [ ; ... - -Where ```` is an integer, and ```` is a single letter. Zero or -more params are passed to a ````. If no params are passed, it is -generally synonymous with passing a single zero. No spaces exist in the -sequence; they have been inserted here simply to read more easily. - -The only ANSI sequences that Colorama converts into win32 calls are:: - - ESC [ 0 m # reset all (colors and brightness) - ESC [ 1 m # bright - ESC [ 2 m # dim (looks same as normal brightness) - ESC [ 22 m # normal brightness - - # FOREGROUND: - ESC [ 30 m # black - ESC [ 31 m # red - ESC [ 32 m # green - ESC [ 33 m # yellow - ESC [ 34 m # blue - ESC [ 35 m # magenta - ESC [ 36 m # cyan - ESC [ 37 m # white - ESC [ 39 m # reset - - # BACKGROUND - ESC [ 40 m # black - ESC [ 41 m # red - ESC [ 42 m # green - ESC [ 43 m # yellow - ESC [ 44 m # blue - ESC [ 45 m # magenta - ESC [ 46 m # cyan - ESC [ 47 m # white - ESC [ 49 m # reset - - # cursor positioning - ESC [ y;x H # position cursor at x across, y down - ESC [ y;x f # position cursor at x across, y down - ESC [ n A # move cursor n lines up - ESC [ n B # move cursor n lines down - ESC [ n C # move cursor n characters forward - ESC [ n D # move cursor n characters backward - - # clear the screen - ESC [ mode J # clear the screen - - # clear the line - ESC [ mode K # clear the line - -Multiple numeric params to the ``'m'`` command can be combined into a single -sequence:: - - ESC [ 36 ; 45 ; 1 m # bright cyan text on magenta background - -All other ANSI sequences of the form ``ESC [ ; ... `` -are silently stripped from the output on Windows. - -Any other form of ANSI sequence, such as single-character codes or alternative -initial characters, are not recognised or stripped. It would be cool to add -them though. Let me know if it would be useful for you, via the Issues on -GitHub. - -Status & Known Problems ------------------------ - -I've personally only tested it on Windows XP (CMD, Console2), Ubuntu -(gnome-terminal, xterm), and OS X. - -Some valid ANSI sequences aren't recognised. - -If you're hacking on the code, see `README-hacking.md`_. ESPECIALLY, see the -explanation there of why we do not want PRs that allow Colorama to generate new -types of ANSI codes. - -See outstanding issues and wish-list: -https://github.com/tartley/colorama/issues - -If anything doesn't work for you, or doesn't do what you expected or hoped for, -I'd love to hear about it on that issues list, would be delighted by patches, -and would be happy to grant commit access to anyone who submits a working patch -or two. - -.. _README-hacking.md: README-hacking.md - -License -------- - -Copyright Jonathan Hartley & Arnon Yaari, 2013-2020. BSD 3-Clause license; see -LICENSE file. - -Professional support --------------------- - -.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png - :alt: Tidelift - :target: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme - -.. list-table:: - :widths: 10 100 - - * - |tideliftlogo| - - Professional support for colorama is available as part of the - `Tidelift Subscription`_. - Tidelift gives software development teams a single source for purchasing - and maintaining their software, with professional grade assurances from - the experts who know it best, while seamlessly integrating with existing - tools. - -.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme - -Thanks ------- - -See the CHANGELOG for more thanks! - -* Marc Schlaich (schlamar) for a ``setup.py`` fix for Python2.5. -* Marc Abramowitz, reported & fixed a crash on exit with closed ``stdout``, - providing a solution to issue #7's setuptools/distutils debate, - and other fixes. -* User 'eryksun', for guidance on correctly instantiating ``ctypes.windll``. -* Matthew McCormick for politely pointing out a longstanding crash on non-Win. -* Ben Hoyt, for a magnificent fix under 64-bit Windows. -* Jesse at Empty Square for submitting a fix for examples in the README. -* User 'jamessp', an observant documentation fix for cursor positioning. -* User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7 - fix. -* Julien Stuyck, for wisely suggesting Python3 compatible updates to README. -* Daniel Griffith for multiple fabulous patches. -* Oscar Lesta for a valuable fix to stop ANSI chars being sent to non-tty - output. -* Roger Binns, for many suggestions, valuable feedback, & bug reports. -* Tim Golden for thought and much appreciated feedback on the initial idea. -* User 'Zearin' for updates to the README file. -* John Szakmeister for adding support for light colors -* Charles Merriam for adding documentation to demos -* Jurko for a fix on 64-bit Windows CPython2.5 w/o ctypes -* Florian Bruhin for a fix when stdout or stderr are None -* Thomas Weininger for fixing ValueError on Windows -* Remi Rampin for better Github integration and fixes to the README file -* Simeon Visser for closing a file handle using 'with' and updating classifiers - to include Python 3.3 and 3.4 -* Andy Neff for fixing RESET of LIGHT_EX colors. -* Jonathan Hartley for the initial idea and implementation. diff --git a/venv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD b/venv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD deleted file mode 100644 index d1eaa50..0000000 --- a/venv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD +++ /dev/null @@ -1,32 +0,0 @@ -colorama-0.4.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -colorama-0.4.6.dist-info/METADATA,sha256=e67SnrUMOym9sz_4TjF3vxvAV4T3aF7NyqRHHH3YEMw,17158 -colorama-0.4.6.dist-info/RECORD,, -colorama-0.4.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -colorama-0.4.6.dist-info/WHEEL,sha256=cdcF4Fbd0FPtw2EMIOwH-3rSOTUdTCeOSXRMD1iLUb8,105 -colorama-0.4.6.dist-info/licenses/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491 -colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266 -colorama/__pycache__/__init__.cpython-312.pyc,, -colorama/__pycache__/ansi.cpython-312.pyc,, -colorama/__pycache__/ansitowin32.cpython-312.pyc,, -colorama/__pycache__/initialise.cpython-312.pyc,, -colorama/__pycache__/win32.cpython-312.pyc,, -colorama/__pycache__/winterm.cpython-312.pyc,, -colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 -colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128 -colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325 -colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75 -colorama/tests/__pycache__/__init__.cpython-312.pyc,, -colorama/tests/__pycache__/ansi_test.cpython-312.pyc,, -colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc,, -colorama/tests/__pycache__/initialise_test.cpython-312.pyc,, -colorama/tests/__pycache__/isatty_test.cpython-312.pyc,, -colorama/tests/__pycache__/utils.cpython-312.pyc,, -colorama/tests/__pycache__/winterm_test.cpython-312.pyc,, -colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839 -colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678 -colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741 -colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866 -colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079 -colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709 -colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181 -colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134 diff --git a/venv/Lib/site-packages/colorama-0.4.6.dist-info/REQUESTED b/venv/Lib/site-packages/colorama-0.4.6.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL b/venv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL deleted file mode 100644 index d79189f..0000000 --- a/venv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.11.1 -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any diff --git a/venv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt b/venv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 3105888..0000000 --- a/venv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2010 Jonathan Hartley -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holders, nor those of its contributors - may be used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/colorama/__init__.py b/venv/Lib/site-packages/colorama/__init__.py deleted file mode 100644 index 383101c..0000000 --- a/venv/Lib/site-packages/colorama/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console -from .ansi import Fore, Back, Style, Cursor -from .ansitowin32 import AnsiToWin32 - -__version__ = '0.4.6' - diff --git a/venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 86aa408..0000000 Binary files a/venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-312.pyc b/venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-312.pyc deleted file mode 100644 index 037aa42..0000000 Binary files a/venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-312.pyc b/venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-312.pyc deleted file mode 100644 index 0770305..0000000 Binary files a/venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-312.pyc b/venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-312.pyc deleted file mode 100644 index 02eec1a..0000000 Binary files a/venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/__pycache__/win32.cpython-312.pyc b/venv/Lib/site-packages/colorama/__pycache__/win32.cpython-312.pyc deleted file mode 100644 index 9474ea4..0000000 Binary files a/venv/Lib/site-packages/colorama/__pycache__/win32.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-312.pyc b/venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-312.pyc deleted file mode 100644 index 2ce8379..0000000 Binary files a/venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/ansi.py b/venv/Lib/site-packages/colorama/ansi.py deleted file mode 100644 index 11ec695..0000000 --- a/venv/Lib/site-packages/colorama/ansi.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -''' -This module generates ANSI character codes to printing colors to terminals. -See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' - -CSI = '\033[' -OSC = '\033]' -BEL = '\a' - - -def code_to_chars(code): - return CSI + str(code) + 'm' - -def set_title(title): - return OSC + '2;' + title + BEL - -def clear_screen(mode=2): - return CSI + str(mode) + 'J' - -def clear_line(mode=2): - return CSI + str(mode) + 'K' - - -class AnsiCodes(object): - def __init__(self): - # the subclasses declare class attributes which are numbers. - # Upon instantiation we define instance attributes, which are the same - # as the class attributes but wrapped with the ANSI escape sequence - for name in dir(self): - if not name.startswith('_'): - value = getattr(self, name) - setattr(self, name, code_to_chars(value)) - - -class AnsiCursor(object): - def UP(self, n=1): - return CSI + str(n) + 'A' - def DOWN(self, n=1): - return CSI + str(n) + 'B' - def FORWARD(self, n=1): - return CSI + str(n) + 'C' - def BACK(self, n=1): - return CSI + str(n) + 'D' - def POS(self, x=1, y=1): - return CSI + str(y) + ';' + str(x) + 'H' - - -class AnsiFore(AnsiCodes): - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 - MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 90 - LIGHTRED_EX = 91 - LIGHTGREEN_EX = 92 - LIGHTYELLOW_EX = 93 - LIGHTBLUE_EX = 94 - LIGHTMAGENTA_EX = 95 - LIGHTCYAN_EX = 96 - LIGHTWHITE_EX = 97 - - -class AnsiBack(AnsiCodes): - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 - MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 100 - LIGHTRED_EX = 101 - LIGHTGREEN_EX = 102 - LIGHTYELLOW_EX = 103 - LIGHTBLUE_EX = 104 - LIGHTMAGENTA_EX = 105 - LIGHTCYAN_EX = 106 - LIGHTWHITE_EX = 107 - - -class AnsiStyle(AnsiCodes): - BRIGHT = 1 - DIM = 2 - NORMAL = 22 - RESET_ALL = 0 - -Fore = AnsiFore() -Back = AnsiBack() -Style = AnsiStyle() -Cursor = AnsiCursor() diff --git a/venv/Lib/site-packages/colorama/ansitowin32.py b/venv/Lib/site-packages/colorama/ansitowin32.py deleted file mode 100644 index abf209e..0000000 --- a/venv/Lib/site-packages/colorama/ansitowin32.py +++ /dev/null @@ -1,277 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import re -import sys -import os - -from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL -from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle -from .win32 import windll, winapi_test - - -winterm = None -if windll is not None: - winterm = WinTerm() - - -class StreamWrapper(object): - ''' - Wraps a stream (such as stdout), acting as a transparent proxy for all - attribute access apart from method 'write()', which is delegated to our - Converter instance. - ''' - def __init__(self, wrapped, converter): - # double-underscore everything to prevent clashes with names of - # attributes on the wrapped stream object. - self.__wrapped = wrapped - self.__convertor = converter - - def __getattr__(self, name): - return getattr(self.__wrapped, name) - - def __enter__(self, *args, **kwargs): - # special method lookup bypasses __getattr__/__getattribute__, see - # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit - # thus, contextlib magic methods are not proxied via __getattr__ - return self.__wrapped.__enter__(*args, **kwargs) - - def __exit__(self, *args, **kwargs): - return self.__wrapped.__exit__(*args, **kwargs) - - def __setstate__(self, state): - self.__dict__ = state - - def __getstate__(self): - return self.__dict__ - - def write(self, text): - self.__convertor.write(text) - - def isatty(self): - stream = self.__wrapped - if 'PYCHARM_HOSTED' in os.environ: - if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): - return True - try: - stream_isatty = stream.isatty - except AttributeError: - return False - else: - return stream_isatty() - - @property - def closed(self): - stream = self.__wrapped - try: - return stream.closed - # AttributeError in the case that the stream doesn't support being closed - # ValueError for the case that the stream has already been detached when atexit runs - except (AttributeError, ValueError): - return True - - -class AnsiToWin32(object): - ''' - Implements a 'write()' method which, on Windows, will strip ANSI character - sequences from the text, and if outputting to a tty, will convert them into - win32 function calls. - ''' - ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer - ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command - - def __init__(self, wrapped, convert=None, strip=None, autoreset=False): - # The wrapped stream (normally sys.stdout or sys.stderr) - self.wrapped = wrapped - - # should we reset colors to defaults after every .write() - self.autoreset = autoreset - - # create the proxy wrapping our output stream - self.stream = StreamWrapper(wrapped, self) - - on_windows = os.name == 'nt' - # We test if the WinAPI works, because even if we are on Windows - # we may be using a terminal that doesn't support the WinAPI - # (e.g. Cygwin Terminal). In this case it's up to the terminal - # to support the ANSI codes. - conversion_supported = on_windows and winapi_test() - try: - fd = wrapped.fileno() - except Exception: - fd = -1 - system_has_native_ansi = not on_windows or enable_vt_processing(fd) - have_tty = not self.stream.closed and self.stream.isatty() - need_conversion = conversion_supported and not system_has_native_ansi - - # should we strip ANSI sequences from our output? - if strip is None: - strip = need_conversion or not have_tty - self.strip = strip - - # should we should convert ANSI sequences into win32 calls? - if convert is None: - convert = need_conversion and have_tty - self.convert = convert - - # dict of ansi codes to win32 functions and parameters - self.win32_calls = self.get_win32_calls() - - # are we wrapping stderr? - self.on_stderr = self.wrapped is sys.stderr - - def should_wrap(self): - ''' - True if this class is actually needed. If false, then the output - stream will not be affected, nor will win32 calls be issued, so - wrapping stdout is not actually required. This will generally be - False on non-Windows platforms, unless optional functionality like - autoreset has been requested using kwargs to init() - ''' - return self.convert or self.strip or self.autoreset - - def get_win32_calls(self): - if self.convert and winterm: - return { - AnsiStyle.RESET_ALL: (winterm.reset_all, ), - AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), - AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), - AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), - AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), - AnsiFore.RED: (winterm.fore, WinColor.RED), - AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), - AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), - AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), - AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), - AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), - AnsiFore.WHITE: (winterm.fore, WinColor.GREY), - AnsiFore.RESET: (winterm.fore, ), - AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), - AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), - AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), - AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), - AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), - AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), - AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), - AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), - AnsiBack.BLACK: (winterm.back, WinColor.BLACK), - AnsiBack.RED: (winterm.back, WinColor.RED), - AnsiBack.GREEN: (winterm.back, WinColor.GREEN), - AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), - AnsiBack.BLUE: (winterm.back, WinColor.BLUE), - AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), - AnsiBack.CYAN: (winterm.back, WinColor.CYAN), - AnsiBack.WHITE: (winterm.back, WinColor.GREY), - AnsiBack.RESET: (winterm.back, ), - AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), - AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), - AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), - AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), - AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), - AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), - AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), - AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), - } - return dict() - - def write(self, text): - if self.strip or self.convert: - self.write_and_convert(text) - else: - self.wrapped.write(text) - self.wrapped.flush() - if self.autoreset: - self.reset_all() - - - def reset_all(self): - if self.convert: - self.call_win32('m', (0,)) - elif not self.strip and not self.stream.closed: - self.wrapped.write(Style.RESET_ALL) - - - def write_and_convert(self, text): - ''' - Write the given text to our wrapped stream, stripping any ANSI - sequences from the text, and optionally converting them into win32 - calls. - ''' - cursor = 0 - text = self.convert_osc(text) - for match in self.ANSI_CSI_RE.finditer(text): - start, end = match.span() - self.write_plain_text(text, cursor, start) - self.convert_ansi(*match.groups()) - cursor = end - self.write_plain_text(text, cursor, len(text)) - - - def write_plain_text(self, text, start, end): - if start < end: - self.wrapped.write(text[start:end]) - self.wrapped.flush() - - - def convert_ansi(self, paramstring, command): - if self.convert: - params = self.extract_params(command, paramstring) - self.call_win32(command, params) - - - def extract_params(self, command, paramstring): - if command in 'Hf': - params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) - while len(params) < 2: - # defaults: - params = params + (1,) - else: - params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) - if len(params) == 0: - # defaults: - if command in 'JKm': - params = (0,) - elif command in 'ABCD': - params = (1,) - - return params - - - def call_win32(self, command, params): - if command == 'm': - for param in params: - if param in self.win32_calls: - func_args = self.win32_calls[param] - func = func_args[0] - args = func_args[1:] - kwargs = dict(on_stderr=self.on_stderr) - func(*args, **kwargs) - elif command in 'J': - winterm.erase_screen(params[0], on_stderr=self.on_stderr) - elif command in 'K': - winterm.erase_line(params[0], on_stderr=self.on_stderr) - elif command in 'Hf': # cursor position - absolute - winterm.set_cursor_position(params, on_stderr=self.on_stderr) - elif command in 'ABCD': # cursor position - relative - n = params[0] - # A - up, B - down, C - forward, D - back - x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] - winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) - - - def convert_osc(self, text): - for match in self.ANSI_OSC_RE.finditer(text): - start, end = match.span() - text = text[:start] + text[end:] - paramstring, command = match.groups() - if command == BEL: - if paramstring.count(";") == 1: - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) - return text - - - def flush(self): - self.wrapped.flush() diff --git a/venv/Lib/site-packages/colorama/initialise.py b/venv/Lib/site-packages/colorama/initialise.py deleted file mode 100644 index d5fd4b7..0000000 --- a/venv/Lib/site-packages/colorama/initialise.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import atexit -import contextlib -import sys - -from .ansitowin32 import AnsiToWin32 - - -def _wipe_internal_state_for_tests(): - global orig_stdout, orig_stderr - orig_stdout = None - orig_stderr = None - - global wrapped_stdout, wrapped_stderr - wrapped_stdout = None - wrapped_stderr = None - - global atexit_done - atexit_done = False - - global fixed_windows_console - fixed_windows_console = False - - try: - # no-op if it wasn't registered - atexit.unregister(reset_all) - except AttributeError: - # python 2: no atexit.unregister. Oh well, we did our best. - pass - - -def reset_all(): - if AnsiToWin32 is not None: # Issue #74: objects might become None at exit - AnsiToWin32(orig_stdout).reset_all() - - -def init(autoreset=False, convert=None, strip=None, wrap=True): - - if not wrap and any([autoreset, convert, strip]): - raise ValueError('wrap=False conflicts with any other arg=True') - - global wrapped_stdout, wrapped_stderr - global orig_stdout, orig_stderr - - orig_stdout = sys.stdout - orig_stderr = sys.stderr - - if sys.stdout is None: - wrapped_stdout = None - else: - sys.stdout = wrapped_stdout = \ - wrap_stream(orig_stdout, convert, strip, autoreset, wrap) - if sys.stderr is None: - wrapped_stderr = None - else: - sys.stderr = wrapped_stderr = \ - wrap_stream(orig_stderr, convert, strip, autoreset, wrap) - - global atexit_done - if not atexit_done: - atexit.register(reset_all) - atexit_done = True - - -def deinit(): - if orig_stdout is not None: - sys.stdout = orig_stdout - if orig_stderr is not None: - sys.stderr = orig_stderr - - -def just_fix_windows_console(): - global fixed_windows_console - - if sys.platform != "win32": - return - if fixed_windows_console: - return - if wrapped_stdout is not None or wrapped_stderr is not None: - # Someone already ran init() and it did stuff, so we won't second-guess them - return - - # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the - # native ANSI support in the console as a side-effect. We only need to actually - # replace sys.stdout/stderr if we're in the old-style conversion mode. - new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) - if new_stdout.convert: - sys.stdout = new_stdout - new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) - if new_stderr.convert: - sys.stderr = new_stderr - - fixed_windows_console = True - -@contextlib.contextmanager -def colorama_text(*args, **kwargs): - init(*args, **kwargs) - try: - yield - finally: - deinit() - - -def reinit(): - if wrapped_stdout is not None: - sys.stdout = wrapped_stdout - if wrapped_stderr is not None: - sys.stderr = wrapped_stderr - - -def wrap_stream(stream, convert, strip, autoreset, wrap): - if wrap: - wrapper = AnsiToWin32(stream, - convert=convert, strip=strip, autoreset=autoreset) - if wrapper.should_wrap(): - stream = wrapper.stream - return stream - - -# Use this for initial setup as well, to reduce code duplication -_wipe_internal_state_for_tests() diff --git a/venv/Lib/site-packages/colorama/tests/__init__.py b/venv/Lib/site-packages/colorama/tests/__init__.py deleted file mode 100644 index 8c5661e..0000000 --- a/venv/Lib/site-packages/colorama/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. diff --git a/venv/Lib/site-packages/colorama/tests/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/colorama/tests/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d23a7ae..0000000 Binary files a/venv/Lib/site-packages/colorama/tests/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/tests/__pycache__/ansi_test.cpython-312.pyc b/venv/Lib/site-packages/colorama/tests/__pycache__/ansi_test.cpython-312.pyc deleted file mode 100644 index 59bbfab..0000000 Binary files a/venv/Lib/site-packages/colorama/tests/__pycache__/ansi_test.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc b/venv/Lib/site-packages/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc deleted file mode 100644 index 690e57f..0000000 Binary files a/venv/Lib/site-packages/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/tests/__pycache__/initialise_test.cpython-312.pyc b/venv/Lib/site-packages/colorama/tests/__pycache__/initialise_test.cpython-312.pyc deleted file mode 100644 index 1a4677b..0000000 Binary files a/venv/Lib/site-packages/colorama/tests/__pycache__/initialise_test.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/tests/__pycache__/isatty_test.cpython-312.pyc b/venv/Lib/site-packages/colorama/tests/__pycache__/isatty_test.cpython-312.pyc deleted file mode 100644 index e363a79..0000000 Binary files a/venv/Lib/site-packages/colorama/tests/__pycache__/isatty_test.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/tests/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/colorama/tests/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index cd9a176..0000000 Binary files a/venv/Lib/site-packages/colorama/tests/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/tests/__pycache__/winterm_test.cpython-312.pyc b/venv/Lib/site-packages/colorama/tests/__pycache__/winterm_test.cpython-312.pyc deleted file mode 100644 index 0c85261..0000000 Binary files a/venv/Lib/site-packages/colorama/tests/__pycache__/winterm_test.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/colorama/tests/ansi_test.py b/venv/Lib/site-packages/colorama/tests/ansi_test.py deleted file mode 100644 index 0a20c80..0000000 --- a/venv/Lib/site-packages/colorama/tests/ansi_test.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main - -from ..ansi import Back, Fore, Style -from ..ansitowin32 import AnsiToWin32 - -stdout_orig = sys.stdout -stderr_orig = sys.stderr - - -class AnsiTest(TestCase): - - def setUp(self): - # sanity check: stdout should be a file or StringIO object. - # It will only be AnsiToWin32 if init() has previously wrapped it - self.assertNotEqual(type(sys.stdout), AnsiToWin32) - self.assertNotEqual(type(sys.stderr), AnsiToWin32) - - def tearDown(self): - sys.stdout = stdout_orig - sys.stderr = stderr_orig - - - def testForeAttributes(self): - self.assertEqual(Fore.BLACK, '\033[30m') - self.assertEqual(Fore.RED, '\033[31m') - self.assertEqual(Fore.GREEN, '\033[32m') - self.assertEqual(Fore.YELLOW, '\033[33m') - self.assertEqual(Fore.BLUE, '\033[34m') - self.assertEqual(Fore.MAGENTA, '\033[35m') - self.assertEqual(Fore.CYAN, '\033[36m') - self.assertEqual(Fore.WHITE, '\033[37m') - self.assertEqual(Fore.RESET, '\033[39m') - - # Check the light, extended versions. - self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m') - self.assertEqual(Fore.LIGHTRED_EX, '\033[91m') - self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m') - self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m') - self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m') - self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m') - self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m') - self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m') - - - def testBackAttributes(self): - self.assertEqual(Back.BLACK, '\033[40m') - self.assertEqual(Back.RED, '\033[41m') - self.assertEqual(Back.GREEN, '\033[42m') - self.assertEqual(Back.YELLOW, '\033[43m') - self.assertEqual(Back.BLUE, '\033[44m') - self.assertEqual(Back.MAGENTA, '\033[45m') - self.assertEqual(Back.CYAN, '\033[46m') - self.assertEqual(Back.WHITE, '\033[47m') - self.assertEqual(Back.RESET, '\033[49m') - - # Check the light, extended versions. - self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m') - self.assertEqual(Back.LIGHTRED_EX, '\033[101m') - self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m') - self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m') - self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m') - self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m') - self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m') - self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m') - - - def testStyleAttributes(self): - self.assertEqual(Style.DIM, '\033[2m') - self.assertEqual(Style.NORMAL, '\033[22m') - self.assertEqual(Style.BRIGHT, '\033[1m') - - -if __name__ == '__main__': - main() diff --git a/venv/Lib/site-packages/colorama/tests/ansitowin32_test.py b/venv/Lib/site-packages/colorama/tests/ansitowin32_test.py deleted file mode 100644 index 91ca551..0000000 --- a/venv/Lib/site-packages/colorama/tests/ansitowin32_test.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from io import StringIO, TextIOWrapper -from unittest import TestCase, main -try: - from contextlib import ExitStack -except ImportError: - # python 2 - from contextlib2 import ExitStack - -try: - from unittest.mock import MagicMock, Mock, patch -except ImportError: - from mock import MagicMock, Mock, patch - -from ..ansitowin32 import AnsiToWin32, StreamWrapper -from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING -from .utils import osname - - -class StreamWrapperTest(TestCase): - - def testIsAProxy(self): - mockStream = Mock() - wrapper = StreamWrapper(mockStream, None) - self.assertTrue( wrapper.random_attr is mockStream.random_attr ) - - def testDelegatesWrite(self): - mockStream = Mock() - mockConverter = Mock() - wrapper = StreamWrapper(mockStream, mockConverter) - wrapper.write('hello') - self.assertTrue(mockConverter.write.call_args, (('hello',), {})) - - def testDelegatesContext(self): - mockConverter = Mock() - s = StringIO() - with StreamWrapper(s, mockConverter) as fp: - fp.write(u'hello') - self.assertTrue(s.closed) - - def testProxyNoContextManager(self): - mockStream = MagicMock() - mockStream.__enter__.side_effect = AttributeError() - mockConverter = Mock() - with self.assertRaises(AttributeError) as excinfo: - with StreamWrapper(mockStream, mockConverter) as wrapper: - wrapper.write('hello') - - def test_closed_shouldnt_raise_on_closed_stream(self): - stream = StringIO() - stream.close() - wrapper = StreamWrapper(stream, None) - self.assertEqual(wrapper.closed, True) - - def test_closed_shouldnt_raise_on_detached_stream(self): - stream = TextIOWrapper(StringIO()) - stream.detach() - wrapper = StreamWrapper(stream, None) - self.assertEqual(wrapper.closed, True) - -class AnsiToWin32Test(TestCase): - - def testInit(self): - mockStdout = Mock() - auto = Mock() - stream = AnsiToWin32(mockStdout, autoreset=auto) - self.assertEqual(stream.wrapped, mockStdout) - self.assertEqual(stream.autoreset, auto) - - @patch('colorama.ansitowin32.winterm', None) - @patch('colorama.ansitowin32.winapi_test', lambda *_: True) - def testStripIsTrueOnWindows(self): - with osname('nt'): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - self.assertTrue(stream.strip) - - def testStripIsFalseOffWindows(self): - with osname('posix'): - mockStdout = Mock(closed=False) - stream = AnsiToWin32(mockStdout) - self.assertFalse(stream.strip) - - def testWriteStripsAnsi(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - stream.wrapped = Mock() - stream.write_and_convert = Mock() - stream.strip = True - - stream.write('abc') - - self.assertFalse(stream.wrapped.write.called) - self.assertEqual(stream.write_and_convert.call_args, (('abc',), {})) - - def testWriteDoesNotStripAnsi(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - stream.wrapped = Mock() - stream.write_and_convert = Mock() - stream.strip = False - stream.convert = False - - stream.write('abc') - - self.assertFalse(stream.write_and_convert.called) - self.assertEqual(stream.wrapped.write.call_args, (('abc',), {})) - - def assert_autoresets(self, convert, autoreset=True): - stream = AnsiToWin32(Mock()) - stream.convert = convert - stream.reset_all = Mock() - stream.autoreset = autoreset - stream.winterm = Mock() - - stream.write('abc') - - self.assertEqual(stream.reset_all.called, autoreset) - - def testWriteAutoresets(self): - self.assert_autoresets(convert=True) - self.assert_autoresets(convert=False) - self.assert_autoresets(convert=True, autoreset=False) - self.assert_autoresets(convert=False, autoreset=False) - - def testWriteAndConvertWritesPlainText(self): - stream = AnsiToWin32(Mock()) - stream.write_and_convert( 'abc' ) - self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) ) - - def testWriteAndConvertStripsAllValidAnsi(self): - stream = AnsiToWin32(Mock()) - stream.call_win32 = Mock() - data = [ - 'abc\033[mdef', - 'abc\033[0mdef', - 'abc\033[2mdef', - 'abc\033[02mdef', - 'abc\033[002mdef', - 'abc\033[40mdef', - 'abc\033[040mdef', - 'abc\033[0;1mdef', - 'abc\033[40;50mdef', - 'abc\033[50;30;40mdef', - 'abc\033[Adef', - 'abc\033[0Gdef', - 'abc\033[1;20;128Hdef', - ] - for datum in data: - stream.wrapped.write.reset_mock() - stream.write_and_convert( datum ) - self.assertEqual( - [args[0] for args in stream.wrapped.write.call_args_list], - [ ('abc',), ('def',) ] - ) - - def testWriteAndConvertSkipsEmptySnippets(self): - stream = AnsiToWin32(Mock()) - stream.call_win32 = Mock() - stream.write_and_convert( '\033[40m\033[41m' ) - self.assertFalse( stream.wrapped.write.called ) - - def testWriteAndConvertCallsWin32WithParamsAndCommand(self): - stream = AnsiToWin32(Mock()) - stream.convert = True - stream.call_win32 = Mock() - stream.extract_params = Mock(return_value='params') - data = { - 'abc\033[adef': ('a', 'params'), - 'abc\033[;;bdef': ('b', 'params'), - 'abc\033[0cdef': ('c', 'params'), - 'abc\033[;;0;;Gdef': ('G', 'params'), - 'abc\033[1;20;128Hdef': ('H', 'params'), - } - for datum, expected in data.items(): - stream.call_win32.reset_mock() - stream.write_and_convert( datum ) - self.assertEqual( stream.call_win32.call_args[0], expected ) - - def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self): - stream = StringIO() - converter = AnsiToWin32(stream) - stream.close() - - converter.reset_all() - - def test_wrap_shouldnt_raise_on_closed_orig_stdout(self): - stream = StringIO() - stream.close() - with \ - patch("colorama.ansitowin32.os.name", "nt"), \ - patch("colorama.ansitowin32.winapi_test", lambda: True): - converter = AnsiToWin32(stream) - self.assertTrue(converter.strip) - self.assertFalse(converter.convert) - - def test_wrap_shouldnt_raise_on_missing_closed_attr(self): - with \ - patch("colorama.ansitowin32.os.name", "nt"), \ - patch("colorama.ansitowin32.winapi_test", lambda: True): - converter = AnsiToWin32(object()) - self.assertTrue(converter.strip) - self.assertFalse(converter.convert) - - def testExtractParams(self): - stream = AnsiToWin32(Mock()) - data = { - '': (0,), - ';;': (0,), - '2': (2,), - ';;002;;': (2,), - '0;1': (0, 1), - ';;003;;456;;': (3, 456), - '11;22;33;44;55': (11, 22, 33, 44, 55), - } - for datum, expected in data.items(): - self.assertEqual(stream.extract_params('m', datum), expected) - - def testCallWin32UsesLookup(self): - listener = Mock() - stream = AnsiToWin32(listener) - stream.win32_calls = { - 1: (lambda *_, **__: listener(11),), - 2: (lambda *_, **__: listener(22),), - 3: (lambda *_, **__: listener(33),), - } - stream.call_win32('m', (3, 1, 99, 2)) - self.assertEqual( - [a[0][0] for a in listener.call_args_list], - [33, 11, 22] ) - - def test_osc_codes(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout, convert=True) - with patch('colorama.ansitowin32.winterm') as winterm: - data = [ - '\033]0\x07', # missing arguments - '\033]0;foo\x08', # wrong OSC command - '\033]0;colorama_test_title\x07', # should work - '\033]1;colorama_test_title\x07', # wrong set command - '\033]2;colorama_test_title\x07', # should work - '\033]' + ';' * 64 + '\x08', # see issue #247 - ] - for code in data: - stream.write(code) - self.assertEqual(winterm.set_title.call_count, 2) - - def test_native_windows_ansi(self): - with ExitStack() as stack: - def p(a, b): - stack.enter_context(patch(a, b, create=True)) - # Pretend to be on Windows - p("colorama.ansitowin32.os.name", "nt") - p("colorama.ansitowin32.winapi_test", lambda: True) - p("colorama.win32.winapi_test", lambda: True) - p("colorama.winterm.win32.windll", "non-None") - p("colorama.winterm.get_osfhandle", lambda _: 1234) - - # Pretend that our mock stream has native ANSI support - p( - "colorama.winterm.win32.GetConsoleMode", - lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING, - ) - SetConsoleMode = Mock() - p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) - - stdout = Mock() - stdout.closed = False - stdout.isatty.return_value = True - stdout.fileno.return_value = 1 - - # Our fake console says it has native vt support, so AnsiToWin32 should - # enable that support and do nothing else. - stream = AnsiToWin32(stdout) - SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) - self.assertFalse(stream.strip) - self.assertFalse(stream.convert) - self.assertFalse(stream.should_wrap()) - - # Now let's pretend we're on an old Windows console, that doesn't have - # native ANSI support. - p("colorama.winterm.win32.GetConsoleMode", lambda _: 0) - SetConsoleMode = Mock() - p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) - - stream = AnsiToWin32(stdout) - SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) - self.assertTrue(stream.strip) - self.assertTrue(stream.convert) - self.assertTrue(stream.should_wrap()) - - -if __name__ == '__main__': - main() diff --git a/venv/Lib/site-packages/colorama/tests/initialise_test.py b/venv/Lib/site-packages/colorama/tests/initialise_test.py deleted file mode 100644 index 89f9b07..0000000 --- a/venv/Lib/site-packages/colorama/tests/initialise_test.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main, skipUnless - -try: - from unittest.mock import patch, Mock -except ImportError: - from mock import patch, Mock - -from ..ansitowin32 import StreamWrapper -from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests -from .utils import osname, replace_by - -orig_stdout = sys.stdout -orig_stderr = sys.stderr - - -class InitTest(TestCase): - - @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty") - def setUp(self): - # sanity check - self.assertNotWrapped() - - def tearDown(self): - _wipe_internal_state_for_tests() - sys.stdout = orig_stdout - sys.stderr = orig_stderr - - def assertWrapped(self): - self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped') - self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped') - self.assertTrue(isinstance(sys.stdout, StreamWrapper), - 'bad stdout wrapper') - self.assertTrue(isinstance(sys.stderr, StreamWrapper), - 'bad stderr wrapper') - - def assertNotWrapped(self): - self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped') - self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped') - - @patch('colorama.initialise.reset_all') - @patch('colorama.ansitowin32.winapi_test', lambda *_: True) - @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False) - def testInitWrapsOnWindows(self, _): - with osname("nt"): - init() - self.assertWrapped() - - @patch('colorama.initialise.reset_all') - @patch('colorama.ansitowin32.winapi_test', lambda *_: False) - def testInitDoesntWrapOnEmulatedWindows(self, _): - with osname("nt"): - init() - self.assertNotWrapped() - - def testInitDoesntWrapOnNonWindows(self): - with osname("posix"): - init() - self.assertNotWrapped() - - def testInitDoesntWrapIfNone(self): - with replace_by(None): - init() - # We can't use assertNotWrapped here because replace_by(None) - # changes stdout/stderr already. - self.assertIsNone(sys.stdout) - self.assertIsNone(sys.stderr) - - def testInitAutoresetOnWrapsOnAllPlatforms(self): - with osname("posix"): - init(autoreset=True) - self.assertWrapped() - - def testInitWrapOffDoesntWrapOnWindows(self): - with osname("nt"): - init(wrap=False) - self.assertNotWrapped() - - def testInitWrapOffIncompatibleWithAutoresetOn(self): - self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False)) - - @patch('colorama.win32.SetConsoleTextAttribute') - @patch('colorama.initialise.AnsiToWin32') - def testAutoResetPassedOn(self, mockATW32, _): - with osname("nt"): - init(autoreset=True) - self.assertEqual(len(mockATW32.call_args_list), 2) - self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True) - self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True) - - @patch('colorama.initialise.AnsiToWin32') - def testAutoResetChangeable(self, mockATW32): - with osname("nt"): - init() - - init(autoreset=True) - self.assertEqual(len(mockATW32.call_args_list), 4) - self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True) - self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True) - - init() - self.assertEqual(len(mockATW32.call_args_list), 6) - self.assertEqual( - mockATW32.call_args_list[4][1]['autoreset'], False) - self.assertEqual( - mockATW32.call_args_list[5][1]['autoreset'], False) - - - @patch('colorama.initialise.atexit.register') - def testAtexitRegisteredOnlyOnce(self, mockRegister): - init() - self.assertTrue(mockRegister.called) - mockRegister.reset_mock() - init() - self.assertFalse(mockRegister.called) - - -class JustFixWindowsConsoleTest(TestCase): - def _reset(self): - _wipe_internal_state_for_tests() - sys.stdout = orig_stdout - sys.stderr = orig_stderr - - def tearDown(self): - self._reset() - - @patch("colorama.ansitowin32.winapi_test", lambda: True) - def testJustFixWindowsConsole(self): - if sys.platform != "win32": - # just_fix_windows_console should be a no-op - just_fix_windows_console() - self.assertIs(sys.stdout, orig_stdout) - self.assertIs(sys.stderr, orig_stderr) - else: - def fake_std(): - # Emulate stdout=not a tty, stderr=tty - # to check that we handle both cases correctly - stdout = Mock() - stdout.closed = False - stdout.isatty.return_value = False - stdout.fileno.return_value = 1 - sys.stdout = stdout - - stderr = Mock() - stderr.closed = False - stderr.isatty.return_value = True - stderr.fileno.return_value = 2 - sys.stderr = stderr - - for native_ansi in [False, True]: - with patch( - 'colorama.ansitowin32.enable_vt_processing', - lambda *_: native_ansi - ): - self._reset() - fake_std() - - # Regular single-call test - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(sys.stdout, prev_stdout) - if native_ansi: - self.assertIs(sys.stderr, prev_stderr) - else: - self.assertIsNot(sys.stderr, prev_stderr) - - # second call without resetting is always a no-op - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(sys.stdout, prev_stdout) - self.assertIs(sys.stderr, prev_stderr) - - self._reset() - fake_std() - - # If init() runs first, just_fix_windows_console should be a no-op - init() - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(prev_stdout, sys.stdout) - self.assertIs(prev_stderr, sys.stderr) - - -if __name__ == '__main__': - main() diff --git a/venv/Lib/site-packages/colorama/tests/isatty_test.py b/venv/Lib/site-packages/colorama/tests/isatty_test.py deleted file mode 100644 index 0f84e4b..0000000 --- a/venv/Lib/site-packages/colorama/tests/isatty_test.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main - -from ..ansitowin32 import StreamWrapper, AnsiToWin32 -from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY - - -def is_a_tty(stream): - return StreamWrapper(stream, None).isatty() - -class IsattyTest(TestCase): - - def test_TTY(self): - tty = StreamTTY() - self.assertTrue(is_a_tty(tty)) - with pycharm(): - self.assertTrue(is_a_tty(tty)) - - def test_nonTTY(self): - non_tty = StreamNonTTY() - self.assertFalse(is_a_tty(non_tty)) - with pycharm(): - self.assertFalse(is_a_tty(non_tty)) - - def test_withPycharm(self): - with pycharm(): - self.assertTrue(is_a_tty(sys.stderr)) - self.assertTrue(is_a_tty(sys.stdout)) - - def test_withPycharmTTYOverride(self): - tty = StreamTTY() - with pycharm(), replace_by(tty): - self.assertTrue(is_a_tty(tty)) - - def test_withPycharmNonTTYOverride(self): - non_tty = StreamNonTTY() - with pycharm(), replace_by(non_tty): - self.assertFalse(is_a_tty(non_tty)) - - def test_withPycharmNoneOverride(self): - with pycharm(): - with replace_by(None), replace_original_by(None): - self.assertFalse(is_a_tty(None)) - self.assertFalse(is_a_tty(StreamNonTTY())) - self.assertTrue(is_a_tty(StreamTTY())) - - def test_withPycharmStreamWrapped(self): - with pycharm(): - self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty()) - self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty()) - self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty()) - self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty()) - - -if __name__ == '__main__': - main() diff --git a/venv/Lib/site-packages/colorama/tests/utils.py b/venv/Lib/site-packages/colorama/tests/utils.py deleted file mode 100644 index 472fafb..0000000 --- a/venv/Lib/site-packages/colorama/tests/utils.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from contextlib import contextmanager -from io import StringIO -import sys -import os - - -class StreamTTY(StringIO): - def isatty(self): - return True - -class StreamNonTTY(StringIO): - def isatty(self): - return False - -@contextmanager -def osname(name): - orig = os.name - os.name = name - yield - os.name = orig - -@contextmanager -def replace_by(stream): - orig_stdout = sys.stdout - orig_stderr = sys.stderr - sys.stdout = stream - sys.stderr = stream - yield - sys.stdout = orig_stdout - sys.stderr = orig_stderr - -@contextmanager -def replace_original_by(stream): - orig_stdout = sys.__stdout__ - orig_stderr = sys.__stderr__ - sys.__stdout__ = stream - sys.__stderr__ = stream - yield - sys.__stdout__ = orig_stdout - sys.__stderr__ = orig_stderr - -@contextmanager -def pycharm(): - os.environ["PYCHARM_HOSTED"] = "1" - non_tty = StreamNonTTY() - with replace_by(non_tty), replace_original_by(non_tty): - yield - del os.environ["PYCHARM_HOSTED"] diff --git a/venv/Lib/site-packages/colorama/tests/winterm_test.py b/venv/Lib/site-packages/colorama/tests/winterm_test.py deleted file mode 100644 index d0955f9..0000000 --- a/venv/Lib/site-packages/colorama/tests/winterm_test.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main, skipUnless - -try: - from unittest.mock import Mock, patch -except ImportError: - from mock import Mock, patch - -from ..winterm import WinColor, WinStyle, WinTerm - - -class WinTermTest(TestCase): - - @patch('colorama.winterm.win32') - def testInit(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 7 + 6 * 16 + 8 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - self.assertEqual(term._fore, 7) - self.assertEqual(term._back, 6) - self.assertEqual(term._style, 8) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testGetAttrs(self): - term = WinTerm() - - term._fore = 0 - term._back = 0 - term._style = 0 - self.assertEqual(term.get_attrs(), 0) - - term._fore = WinColor.YELLOW - self.assertEqual(term.get_attrs(), WinColor.YELLOW) - - term._back = WinColor.MAGENTA - self.assertEqual( - term.get_attrs(), - WinColor.YELLOW + WinColor.MAGENTA * 16) - - term._style = WinStyle.BRIGHT - self.assertEqual( - term.get_attrs(), - WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT) - - @patch('colorama.winterm.win32') - def testResetAll(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 1 + 2 * 16 + 8 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - - term.set_console = Mock() - term._fore = -1 - term._back = -1 - term._style = -1 - - term.reset_all() - - self.assertEqual(term._fore, 1) - self.assertEqual(term._back, 2) - self.assertEqual(term._style, 8) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testFore(self): - term = WinTerm() - term.set_console = Mock() - term._fore = 0 - - term.fore(5) - - self.assertEqual(term._fore, 5) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testBack(self): - term = WinTerm() - term.set_console = Mock() - term._back = 0 - - term.back(5) - - self.assertEqual(term._back, 5) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testStyle(self): - term = WinTerm() - term.set_console = Mock() - term._style = 0 - - term.style(22) - - self.assertEqual(term._style, 22) - self.assertEqual(term.set_console.called, True) - - @patch('colorama.winterm.win32') - def testSetConsole(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 0 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - term.windll = Mock() - - term.set_console() - - self.assertEqual( - mockWin32.SetConsoleTextAttribute.call_args, - ((mockWin32.STDOUT, term.get_attrs()), {}) - ) - - @patch('colorama.winterm.win32') - def testSetConsoleOnStderr(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 0 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - term.windll = Mock() - - term.set_console(on_stderr=True) - - self.assertEqual( - mockWin32.SetConsoleTextAttribute.call_args, - ((mockWin32.STDERR, term.get_attrs()), {}) - ) - - -if __name__ == '__main__': - main() diff --git a/venv/Lib/site-packages/colorama/win32.py b/venv/Lib/site-packages/colorama/win32.py deleted file mode 100644 index 841b0e2..0000000 --- a/venv/Lib/site-packages/colorama/win32.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. - -# from winbase.h -STDOUT = -11 -STDERR = -12 - -ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 - -try: - import ctypes - from ctypes import LibraryLoader - windll = LibraryLoader(ctypes.WinDLL) - from ctypes import wintypes -except (AttributeError, ImportError): - windll = None - SetConsoleTextAttribute = lambda *_: None - winapi_test = lambda *_: None -else: - from ctypes import byref, Structure, c_char, POINTER - - COORD = wintypes._COORD - - class CONSOLE_SCREEN_BUFFER_INFO(Structure): - """struct in wincon.h.""" - _fields_ = [ - ("dwSize", COORD), - ("dwCursorPosition", COORD), - ("wAttributes", wintypes.WORD), - ("srWindow", wintypes.SMALL_RECT), - ("dwMaximumWindowSize", COORD), - ] - def __str__(self): - return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( - self.dwSize.Y, self.dwSize.X - , self.dwCursorPosition.Y, self.dwCursorPosition.X - , self.wAttributes - , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right - , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X - ) - - _GetStdHandle = windll.kernel32.GetStdHandle - _GetStdHandle.argtypes = [ - wintypes.DWORD, - ] - _GetStdHandle.restype = wintypes.HANDLE - - _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo - _GetConsoleScreenBufferInfo.argtypes = [ - wintypes.HANDLE, - POINTER(CONSOLE_SCREEN_BUFFER_INFO), - ] - _GetConsoleScreenBufferInfo.restype = wintypes.BOOL - - _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute - _SetConsoleTextAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - ] - _SetConsoleTextAttribute.restype = wintypes.BOOL - - _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition - _SetConsoleCursorPosition.argtypes = [ - wintypes.HANDLE, - COORD, - ] - _SetConsoleCursorPosition.restype = wintypes.BOOL - - _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA - _FillConsoleOutputCharacterA.argtypes = [ - wintypes.HANDLE, - c_char, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputCharacterA.restype = wintypes.BOOL - - _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute - _FillConsoleOutputAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputAttribute.restype = wintypes.BOOL - - _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW - _SetConsoleTitleW.argtypes = [ - wintypes.LPCWSTR - ] - _SetConsoleTitleW.restype = wintypes.BOOL - - _GetConsoleMode = windll.kernel32.GetConsoleMode - _GetConsoleMode.argtypes = [ - wintypes.HANDLE, - POINTER(wintypes.DWORD) - ] - _GetConsoleMode.restype = wintypes.BOOL - - _SetConsoleMode = windll.kernel32.SetConsoleMode - _SetConsoleMode.argtypes = [ - wintypes.HANDLE, - wintypes.DWORD - ] - _SetConsoleMode.restype = wintypes.BOOL - - def _winapi_test(handle): - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return bool(success) - - def winapi_test(): - return any(_winapi_test(h) for h in - (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) - - def GetConsoleScreenBufferInfo(stream_id=STDOUT): - handle = _GetStdHandle(stream_id) - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return csbi - - def SetConsoleTextAttribute(stream_id, attrs): - handle = _GetStdHandle(stream_id) - return _SetConsoleTextAttribute(handle, attrs) - - def SetConsoleCursorPosition(stream_id, position, adjust=True): - position = COORD(*position) - # If the position is out of range, do nothing. - if position.Y <= 0 or position.X <= 0: - return - # Adjust for Windows' SetConsoleCursorPosition: - # 1. being 0-based, while ANSI is 1-based. - # 2. expecting (x,y), while ANSI uses (y,x). - adjusted_position = COORD(position.Y - 1, position.X - 1) - if adjust: - # Adjust for viewport's scroll position - sr = GetConsoleScreenBufferInfo(STDOUT).srWindow - adjusted_position.Y += sr.Top - adjusted_position.X += sr.Left - # Resume normal processing - handle = _GetStdHandle(stream_id) - return _SetConsoleCursorPosition(handle, adjusted_position) - - def FillConsoleOutputCharacter(stream_id, char, length, start): - handle = _GetStdHandle(stream_id) - char = c_char(char.encode()) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - success = _FillConsoleOutputCharacterA( - handle, char, length, start, byref(num_written)) - return num_written.value - - def FillConsoleOutputAttribute(stream_id, attr, length, start): - ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' - handle = _GetStdHandle(stream_id) - attribute = wintypes.WORD(attr) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - return _FillConsoleOutputAttribute( - handle, attribute, length, start, byref(num_written)) - - def SetConsoleTitle(title): - return _SetConsoleTitleW(title) - - def GetConsoleMode(handle): - mode = wintypes.DWORD() - success = _GetConsoleMode(handle, byref(mode)) - if not success: - raise ctypes.WinError() - return mode.value - - def SetConsoleMode(handle, mode): - success = _SetConsoleMode(handle, mode) - if not success: - raise ctypes.WinError() diff --git a/venv/Lib/site-packages/colorama/winterm.py b/venv/Lib/site-packages/colorama/winterm.py deleted file mode 100644 index aad867e..0000000 --- a/venv/Lib/site-packages/colorama/winterm.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -try: - from msvcrt import get_osfhandle -except ImportError: - def get_osfhandle(_): - raise OSError("This isn't windows!") - - -from . import win32 - -# from wincon.h -class WinColor(object): - BLACK = 0 - BLUE = 1 - GREEN = 2 - CYAN = 3 - RED = 4 - MAGENTA = 5 - YELLOW = 6 - GREY = 7 - -# from wincon.h -class WinStyle(object): - NORMAL = 0x00 # dim text, dim background - BRIGHT = 0x08 # bright text, dim background - BRIGHT_BACKGROUND = 0x80 # dim text, bright background - -class WinTerm(object): - - def __init__(self): - self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes - self.set_attrs(self._default) - self._default_fore = self._fore - self._default_back = self._back - self._default_style = self._style - # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. - # So that LIGHT_EX colors and BRIGHT style do not clobber each other, - # we track them separately, since LIGHT_EX is overwritten by Fore/Back - # and BRIGHT is overwritten by Style codes. - self._light = 0 - - def get_attrs(self): - return self._fore + self._back * 16 + (self._style | self._light) - - def set_attrs(self, value): - self._fore = value & 7 - self._back = (value >> 4) & 7 - self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) - - def reset_all(self, on_stderr=None): - self.set_attrs(self._default) - self.set_console(attrs=self._default) - self._light = 0 - - def fore(self, fore=None, light=False, on_stderr=False): - if fore is None: - fore = self._default_fore - self._fore = fore - # Emulate LIGHT_EX with BRIGHT Style - if light: - self._light |= WinStyle.BRIGHT - else: - self._light &= ~WinStyle.BRIGHT - self.set_console(on_stderr=on_stderr) - - def back(self, back=None, light=False, on_stderr=False): - if back is None: - back = self._default_back - self._back = back - # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style - if light: - self._light |= WinStyle.BRIGHT_BACKGROUND - else: - self._light &= ~WinStyle.BRIGHT_BACKGROUND - self.set_console(on_stderr=on_stderr) - - def style(self, style=None, on_stderr=False): - if style is None: - style = self._default_style - self._style = style - self.set_console(on_stderr=on_stderr) - - def set_console(self, attrs=None, on_stderr=False): - if attrs is None: - attrs = self.get_attrs() - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleTextAttribute(handle, attrs) - - def get_position(self, handle): - position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition - # Because Windows coordinates are 0-based, - # and win32.SetConsoleCursorPosition expects 1-based. - position.X += 1 - position.Y += 1 - return position - - def set_cursor_position(self, position=None, on_stderr=False): - if position is None: - # I'm not currently tracking the position, so there is no default. - # position = self.get_position() - return - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleCursorPosition(handle, position) - - def cursor_adjust(self, x, y, on_stderr=False): - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - position = self.get_position(handle) - adjusted_position = (position.Y + y, position.X + x) - win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) - - def erase_screen(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the screen. - # 1 should clear from the cursor to the beginning of the screen. - # 2 should clear the entire screen, and move cursor to (1,1) - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - # get the number of character cells in the current buffer - cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y - # get number of character cells before current cursor position - cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = cells_in_screen - cells_before_cursor - elif mode == 1: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_before_cursor - elif mode == 2: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_in_screen - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - if mode == 2: - # put the cursor where needed - win32.SetConsoleCursorPosition(handle, (1, 1)) - - def erase_line(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the line. - # 1 should clear from the cursor to the beginning of the line. - # 2 should clear the entire line. - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X - elif mode == 1: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwCursorPosition.X - elif mode == 2: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwSize.X - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - - def set_title(self, title): - win32.SetConsoleTitle(title) - - -def enable_vt_processing(fd): - if win32.windll is None or not win32.winapi_test(): - return False - - try: - handle = get_osfhandle(fd) - mode = win32.GetConsoleMode(handle) - win32.SetConsoleMode( - handle, - mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING, - ) - - mode = win32.GetConsoleMode(handle) - if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING: - return True - # Can get TypeError in testsuite where 'fd' is a Mock() - except (OSError, TypeError): - return False diff --git a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/INSTALLER b/venv/Lib/site-packages/cryptography-46.0.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/METADATA b/venv/Lib/site-packages/cryptography-46.0.3.dist-info/METADATA deleted file mode 100644 index 7b07ee7..0000000 --- a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/METADATA +++ /dev/null @@ -1,139 +0,0 @@ -Metadata-Version: 2.4 -Name: cryptography -Version: 46.0.3 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: POSIX -Classifier: Operating System :: POSIX :: BSD -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Microsoft :: Windows -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable -Classifier: Topic :: Security :: Cryptography -Requires-Dist: cffi>=1.14 ; python_full_version == '3.8.*' and platform_python_implementation != 'PyPy' -Requires-Dist: cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy' -Requires-Dist: typing-extensions>=4.13.2 ; python_full_version < '3.11' -Requires-Dist: bcrypt>=3.1.5 ; extra == 'ssh' -Requires-Dist: nox[uv]>=2024.4.15 ; extra == 'nox' -Requires-Dist: cryptography-vectors==46.0.3 ; extra == 'test' -Requires-Dist: pytest>=7.4.0 ; extra == 'test' -Requires-Dist: pytest-benchmark>=4.0 ; extra == 'test' -Requires-Dist: pytest-cov>=2.10.1 ; extra == 'test' -Requires-Dist: pytest-xdist>=3.5.0 ; extra == 'test' -Requires-Dist: pretend>=0.7 ; extra == 'test' -Requires-Dist: certifi>=2024 ; extra == 'test' -Requires-Dist: pytest-randomly ; extra == 'test-randomorder' -Requires-Dist: sphinx>=5.3.0 ; extra == 'docs' -Requires-Dist: sphinx-rtd-theme>=3.0.0 ; extra == 'docs' -Requires-Dist: sphinx-inline-tabs ; extra == 'docs' -Requires-Dist: pyenchant>=3 ; extra == 'docstest' -Requires-Dist: readme-renderer>=30.0 ; extra == 'docstest' -Requires-Dist: sphinxcontrib-spelling>=7.3.1 ; extra == 'docstest' -Requires-Dist: build>=1.0.0 ; extra == 'sdist' -Requires-Dist: ruff>=0.11.11 ; extra == 'pep8test' -Requires-Dist: mypy>=1.14 ; extra == 'pep8test' -Requires-Dist: check-sdist ; extra == 'pep8test' -Requires-Dist: click>=8.0.1 ; extra == 'pep8test' -Provides-Extra: ssh -Provides-Extra: nox -Provides-Extra: test -Provides-Extra: test-randomorder -Provides-Extra: docs -Provides-Extra: docstest -Provides-Extra: sdist -Provides-Extra: pep8test -License-File: LICENSE -License-File: LICENSE.APACHE -License-File: LICENSE.BSD -Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. -Author-email: The Python Cryptographic Authority and individual contributors -License-Expression: Apache-2.0 OR BSD-3-Clause -Requires-Python: >=3.8, !=3.9.0, !=3.9.1 -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: homepage, https://github.com/pyca/cryptography -Project-URL: documentation, https://cryptography.io/ -Project-URL: source, https://github.com/pyca/cryptography/ -Project-URL: issues, https://github.com/pyca/cryptography/issues -Project-URL: changelog, https://cryptography.io/en/latest/changelog/ - -pyca/cryptography -================= - -.. image:: https://img.shields.io/pypi/v/cryptography.svg - :target: https://pypi.org/project/cryptography/ - :alt: Latest Version - -.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest - :target: https://cryptography.io - :alt: Latest Docs - -.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg - :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain - -``cryptography`` is a package which provides cryptographic recipes and -primitives to Python developers. Our goal is for it to be your "cryptographic -standard library". It supports Python 3.8+ and PyPy3 7.3.11+. - -``cryptography`` includes both high level recipes and low level interfaces to -common cryptographic algorithms such as symmetric ciphers, message digests, and -key derivation functions. For example, to encrypt something with -``cryptography``'s high level symmetric encryption recipe: - -.. code-block:: pycon - - >>> from cryptography.fernet import Fernet - >>> # Put this somewhere safe! - >>> key = Fernet.generate_key() - >>> f = Fernet(key) - >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") - >>> token - b'...' - >>> f.decrypt(token) - b'A really secret message. Not for prying eyes.' - -You can find more information in the `documentation`_. - -You can install ``cryptography`` with: - -.. code-block:: console - - $ pip install cryptography - -For full details see `the installation documentation`_. - -Discussion -~~~~~~~~~~ - -If you run into bugs, you can file them in our `issue tracker`_. - -We maintain a `cryptography-dev`_ mailing list for development discussion. - -You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get -involved. - -Security -~~~~~~~~ - -Need to report a security issue? Please consult our `security reporting`_ -documentation. - - -.. _`documentation`: https://cryptography.io/ -.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ -.. _`issue tracker`: https://github.com/pyca/cryptography/issues -.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev -.. _`security reporting`: https://cryptography.io/en/latest/security/ - diff --git a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/RECORD b/venv/Lib/site-packages/cryptography-46.0.3.dist-info/RECORD deleted file mode 100644 index d3e3233..0000000 --- a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/RECORD +++ /dev/null @@ -1,180 +0,0 @@ -cryptography-46.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -cryptography-46.0.3.dist-info/METADATA,sha256=bx2LyCEmOVUC8FH5hsGEZewWPiZoIIYTq0hM9mu9r4s,5748 -cryptography-46.0.3.dist-info/RECORD,, -cryptography-46.0.3.dist-info/WHEEL,sha256=8hEf8NzM1FnmM77AjVt5h8nDuYkN3UqZ79LoIAHXeRE,95 -cryptography-46.0.3.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 -cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 -cryptography-46.0.3.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 -cryptography/__about__.py,sha256=QCLxNH_Abbygdc9RQGpUmrK14Wp3Cl_SEiB2byLwyxo,445 -cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364 -cryptography/__pycache__/__about__.cpython-312.pyc,, -cryptography/__pycache__/__init__.cpython-312.pyc,, -cryptography/__pycache__/exceptions.cpython-312.pyc,, -cryptography/__pycache__/fernet.cpython-312.pyc,, -cryptography/__pycache__/utils.cpython-312.pyc,, -cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 -cryptography/fernet.py,sha256=3Cvxkh0KJSbX8HbnCHu4wfCW7U0GgfUA3v_qQ8a8iWc,6963 -cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 -cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,, -cryptography/hazmat/_oid.py,sha256=p8ThjwJB56Ci_rAIrjyJ1f8VjgD6e39es2dh8JIUBOw,17240 -cryptography/hazmat/asn1/__init__.py,sha256=hS_EWx3wVvZzfbCcNV8hzcDnyMM8H-BhIoS1TipUosk,293 -cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc,, -cryptography/hazmat/asn1/asn1.py,sha256=eMEThEXa19LQjcyVofgHsW6tsZnjp3ddH7bWkkcxfLM,3860 -cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 -cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 -cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,, -cryptography/hazmat/backends/openssl/backend.py,sha256=tV5AxBoFJ2GfA0DMWSY-0TxQJrpQoexzI9R4Kybb--4,10215 -cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/bindings/_rust.pyd,sha256=kvWLtPAadaDvTdlCXcKpbd_iX8k_2dwR6o8NBbek8IU,9245696 -cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=KhqLhXFPArPzzJ7DYO9Fl8FoXB_BagAd_r4Dm_Ze9Xo,1257 -cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 -cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 -cryptography/hazmat/bindings/_rust/declarative_asn1.pyi,sha256=2ECFmYue1EPkHEE2Bm7aLwkjB0mSUTpr23v9MN4pri4,892 -cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 -cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=VPVWuKHI9EMs09ZLRYAGvR0Iz0mCMmEzXAkgJHovpoM,4020 -cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=iOAMDyHoNwwCSZfZzuXDr64g4GpGUeDgEN-LjXqdrBM,1522 -cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=4Nddw6-ynzIB3w2W86WvkGKTLlTDk_6F5l54RHCuy3E,2688 -cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=LhPzHWSXJq4grAJXn6zSvSSdV-aYIIscHDwIPlJGGPs,1315 -cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 -cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 -cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 -cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 -cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=VXfXd5G6hUivg399R1DYdmW3eTb0EebzDTqjRC2gaRw,532 -cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=Yx49lqdnjsD7bxiDV1kcaMrDktug5evi5a6zerMiy2s,514 -cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=OWZvBx7xfo_HJl41Nc--DugVyCVPIprZ3HlOPTSWH9g,984 -cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=BXZn7NDjL3JAbYW0SQ8pg1iyC5DbQXVhUAiwsi8DFR8,702 -cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=xXfFBb9QehHfDtEaxV_65Z0YK7NquOVIChpTLkgAs_k,2029 -cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=teIt8M6ZEMJrn4s3W0UnW0DZ-30Jd68WnSsKKG124l0,912 -cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=_SW9NtQ5FDlAbdclFtWpT4lGmxKIKHpN-4j8J2BzYfQ,585 -cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 -cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=ewn4GpQyb7zPwE-ni7GtyQgMC0A1mLuqYsSyqv6nI_s,523 -cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=juTZTmli8jO_5Vcufg-vHvx_tCyezmSLIh_9PU3TczI,505 -cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=vEEd5wDiZvb8ZGFaziLCaWLzAwoG_tvPUxLQw5_uOl8,1605 -cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=txGBJijqZshEcqra6byPNbnisIdlxzOSIHP2hl9arPs,1601 -cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=PPhld-WkO743iXFPebeG0LtgK0aTzGdjcIsay1Gm5GE,757 -cryptography/hazmat/bindings/_rust/x509.pyi,sha256=n9X0IQ6ICbdIi-ExdCFZoBgeY6njm3QOVAVZwDQdnbk,9784 -cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,, -cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,, -cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DMOpA_XN4l70zTc5_J9DpwlbQeUBRTWpfIJ4yRIn1-U,5791 -cryptography/hazmat/bindings/openssl/binding.py,sha256=x8eocEmukO4cm7cHqfVmOoYY7CCXdoF1v1WhZQt9neo,4610 -cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 -cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 -cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc,, -cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=YrKgHS4MfwWaMmPBYRymRRlC0phwWp9ycICFezeJPGk,2595 -cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,, -cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,, -cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 -cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=Eh3i7lwedHfi0eLSsH93PZxQKzY9I6lkK67vL4V5tOc,1522 -cryptography/hazmat/primitives/_serialization.py,sha256=chgPCSF2jxI2Cr5gB-qbWXOvOfupBh4CARS0KAhv9AM,5123 -cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,, -cryptography/hazmat/primitives/asymmetric/dh.py,sha256=0v_vEFFz5pQ1QG-FkWDyvgv7IfuVZSH5Q6LyFI5A8rg,3645 -cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=Ld_bbbqQFz12dObHxIkzEQzX0SWWP41RLSWkYSaKhqE,4213 -cryptography/hazmat/primitives/asymmetric/ec.py,sha256=Vf5ig2PcS3PVnsb5N49Kx1uIkFBJyhg4BWXThDz5cug,12999 -cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=jZW5cs472wXXV3eB0sE1b8w64gdazwwU0_MT5UOTiXs,3700 -cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=yAetgn2f2JYf0BO8MapGzXeThsvSMG5LmUCrxVOidAA,3729 -cryptography/hazmat/primitives/asymmetric/padding.py,sha256=vQ6l6gOg9HqcbOsvHrSiJRVLdEj9L4m4HkRGYziTyFA,2854 -cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=ZnKOo2f34MCCOupC03Y1uR-_jiSG5IrelHEmxaME3D4,8303 -cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 -cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 -cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=_4nQeZ3yJ3Lg0RpXnaqA-1yt6vbx1F-wzLcaZHwSpeE,3613 -cryptography/hazmat/primitives/asymmetric/x448.py,sha256=WKBLtuVfJqiBRro654fGaQAlvsKbqbNkK7c4A_ZCdV0,3642 -cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 -cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,, -cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 -cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=Q7ZJwcsx83Mgxv5y7r6CyJKSdsOwC-my-5A67-ma2vw,3407 -cryptography/hazmat/primitives/ciphers/base.py,sha256=aBC7HHBBoixebmparVr0UlODs3VD0A7B6oz_AaRjDv8,4253 -cryptography/hazmat/primitives/ciphers/modes.py,sha256=20stpwhDtbAvpH0SMf9EDHIciwmTF-JMBUOZ9bU8WiQ,8318 -cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 -cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 -cryptography/hazmat/primitives/hashes.py,sha256=M8BrlKB3U6DEtHvWTV5VRjpteHv1kS3Zxm_Bsk04cr8,5184 -cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 -cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 -cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,, -cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460 -cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=Ua8KoLXXnzgsrAUmHpyKymaPt8aPRP0EHEaBz7QCQ9I,3737 -cryptography/hazmat/primitives/kdf/hkdf.py,sha256=M0lAEfRoc4kpp4-nwDj9yB-vNZukIOYEQrUlWsBNn9o,543 -cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=oZepvo4evhKkkJQWRDwaPoIbyTaFmDc5NPimxg6lfKg,9165 -cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1WIwhELR0w8ztTpTu8BrFiYWmK3hUfJq08I79TxwieE,1957 -cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590 -cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=zLTcF665QFvXX2f8TS7fmBZTteXpFjKahzfjjQcCJyw,1999 -cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 -cryptography/hazmat/primitives/padding.py,sha256=QT-U-NvV2eQGO1wVPbDiNGNSc9keRDS-ig5cQOrLz0E,1865 -cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 -cryptography/hazmat/primitives/serialization/__init__.py,sha256=Q7uTgDlt7n3WfsMT6jYwutC6DIg_7SEeoAm1GHZ5B5E,1705 -cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,, -cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 -cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=mS9cFNG4afzvseoc5e1MWoY2VskfL8N8Y_OFjl67luY,5104 -cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=5OR_Tkysxaprn4FegvJIfbep9rJ9wok6FLWvWwQ5-Mg,13943 -cryptography/hazmat/primitives/serialization/ssh.py,sha256=hPV5obFznz0QhFfXFPOeQ8y6MsurA0xVMQiLnLESEs8,53700 -cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 -cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,, -cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,, -cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,, -cryptography/hazmat/primitives/twofactor/hotp.py,sha256=ivZo5BrcCGWLsqql4nZV0XXCjyGPi_iHfDFltGlOJwk,3256 -cryptography/hazmat/primitives/twofactor/totp.py,sha256=m5LPpRL00kp4zY8gTjr55Hfz9aMlPS53kHmVkSQCmdY,1652 -cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -cryptography/utils.py,sha256=bZAjFC5KVpfmF29qS_18vvpW3mKxmdiRALcusHhTTkg,4301 -cryptography/x509/__init__.py,sha256=xloN0swseNx-m2WFZmCA17gOoxQWqeU82UVjEdJBePQ,8257 -cryptography/x509/__pycache__/__init__.cpython-312.pyc,, -cryptography/x509/__pycache__/base.cpython-312.pyc,, -cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,, -cryptography/x509/__pycache__/extensions.cpython-312.pyc,, -cryptography/x509/__pycache__/general_name.cpython-312.pyc,, -cryptography/x509/__pycache__/name.cpython-312.pyc,, -cryptography/x509/__pycache__/ocsp.cpython-312.pyc,, -cryptography/x509/__pycache__/oid.cpython-312.pyc,, -cryptography/x509/__pycache__/verification.cpython-312.pyc,, -cryptography/x509/base.py,sha256=OrmTw3y8B6AE_nGXQPN8x9kq-d7rDWeH13gCq6T6D6U,27997 -cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797 -cryptography/x509/extensions.py,sha256=QxYrqR6SF1qzR9ZraP8wDiIczlEVlAFuwDRVcltB6Tk,77724 -cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 -cryptography/x509/name.py,sha256=ty0_xf0LnHwZAdEf-d8FLO1K4hGqx_7DsD3CHwoLJiY,15101 -cryptography/x509/ocsp.py,sha256=Yey6NdFV1MPjop24Mj_VenjEpg3kUaMopSWOK0AbeBs,12699 -cryptography/x509/oid.py,sha256=BUzgXXGVWilkBkdKPTm9R4qElE9gAGHgdYPMZAp7PJo,931 -cryptography/x509/verification.py,sha256=gR2C2c-XZQtblZhT5T5vjSKOtCb74ef2alPVmEcwFlM,958 diff --git a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/WHEEL b/venv/Lib/site-packages/cryptography-46.0.3.dist-info/WHEEL deleted file mode 100644 index dcfb13d..0000000 --- a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: maturin (1.9.4) -Root-Is-Purelib: false -Tag: cp311-abi3-win_amd64 diff --git a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE b/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE deleted file mode 100644 index b11f379..0000000 --- a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made -under the terms of *both* these licenses. diff --git a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE b/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE deleted file mode 100644 index 62589ed..0000000 --- a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD b/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD deleted file mode 100644 index ec1a29d..0000000 --- a/venv/Lib/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) Individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - 3. Neither the name of PyCA Cryptography nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/cryptography/__about__.py b/venv/Lib/site-packages/cryptography/__about__.py deleted file mode 100644 index a811628..0000000 --- a/venv/Lib/site-packages/cryptography/__about__.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -__all__ = [ - "__author__", - "__copyright__", - "__version__", -] - -__version__ = "46.0.3" - - -__author__ = "The Python Cryptographic Authority and individual contributors" -__copyright__ = f"Copyright 2013-2025 {__author__}" diff --git a/venv/Lib/site-packages/cryptography/__init__.py b/venv/Lib/site-packages/cryptography/__init__.py deleted file mode 100644 index d374f75..0000000 --- a/venv/Lib/site-packages/cryptography/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.__about__ import __author__, __copyright__, __version__ - -__all__ = [ - "__author__", - "__copyright__", - "__version__", -] diff --git a/venv/Lib/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc deleted file mode 100644 index 221a8b8..0000000 Binary files a/venv/Lib/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 9a5aab1..0000000 Binary files a/venv/Lib/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index c809fa5..0000000 Binary files a/venv/Lib/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc b/venv/Lib/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc deleted file mode 100644 index cb9a59a..0000000 Binary files a/venv/Lib/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/cryptography/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 8eeff62..0000000 Binary files a/venv/Lib/site-packages/cryptography/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/exceptions.py b/venv/Lib/site-packages/cryptography/exceptions.py deleted file mode 100644 index fe125ea..0000000 --- a/venv/Lib/site-packages/cryptography/exceptions.py +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions - -if typing.TYPE_CHECKING: - from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -_Reasons = rust_exceptions._Reasons - - -class UnsupportedAlgorithm(Exception): - def __init__(self, message: str, reason: _Reasons | None = None) -> None: - super().__init__(message) - self._reason = reason - - -class AlreadyFinalized(Exception): - pass - - -class AlreadyUpdated(Exception): - pass - - -class NotYetFinalized(Exception): - pass - - -class InvalidTag(Exception): - pass - - -class InvalidSignature(Exception): - pass - - -class InternalError(Exception): - def __init__( - self, msg: str, err_code: list[rust_openssl.OpenSSLError] - ) -> None: - super().__init__(msg) - self.err_code = err_code - - -class InvalidKey(Exception): - pass diff --git a/venv/Lib/site-packages/cryptography/fernet.py b/venv/Lib/site-packages/cryptography/fernet.py deleted file mode 100644 index c6744ae..0000000 --- a/venv/Lib/site-packages/cryptography/fernet.py +++ /dev/null @@ -1,224 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import base64 -import binascii -import os -import time -import typing -from collections.abc import Iterable - -from cryptography import utils -from cryptography.exceptions import InvalidSignature -from cryptography.hazmat.primitives import hashes, padding -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes -from cryptography.hazmat.primitives.hmac import HMAC - - -class InvalidToken(Exception): - pass - - -_MAX_CLOCK_SKEW = 60 - - -class Fernet: - def __init__( - self, - key: bytes | str, - backend: typing.Any = None, - ) -> None: - try: - key = base64.urlsafe_b64decode(key) - except binascii.Error as exc: - raise ValueError( - "Fernet key must be 32 url-safe base64-encoded bytes." - ) from exc - if len(key) != 32: - raise ValueError( - "Fernet key must be 32 url-safe base64-encoded bytes." - ) - - self._signing_key = key[:16] - self._encryption_key = key[16:] - - @classmethod - def generate_key(cls) -> bytes: - return base64.urlsafe_b64encode(os.urandom(32)) - - def encrypt(self, data: bytes) -> bytes: - return self.encrypt_at_time(data, int(time.time())) - - def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: - iv = os.urandom(16) - return self._encrypt_from_parts(data, current_time, iv) - - def _encrypt_from_parts( - self, data: bytes, current_time: int, iv: bytes - ) -> bytes: - utils._check_bytes("data", data) - - padder = padding.PKCS7(algorithms.AES.block_size).padder() - padded_data = padder.update(data) + padder.finalize() - encryptor = Cipher( - algorithms.AES(self._encryption_key), - modes.CBC(iv), - ).encryptor() - ciphertext = encryptor.update(padded_data) + encryptor.finalize() - - basic_parts = ( - b"\x80" - + current_time.to_bytes(length=8, byteorder="big") - + iv - + ciphertext - ) - - h = HMAC(self._signing_key, hashes.SHA256()) - h.update(basic_parts) - hmac = h.finalize() - return base64.urlsafe_b64encode(basic_parts + hmac) - - def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes: - timestamp, data = Fernet._get_unverified_token_data(token) - if ttl is None: - time_info = None - else: - time_info = (ttl, int(time.time())) - return self._decrypt_data(data, timestamp, time_info) - - def decrypt_at_time( - self, token: bytes | str, ttl: int, current_time: int - ) -> bytes: - if ttl is None: - raise ValueError( - "decrypt_at_time() can only be used with a non-None ttl" - ) - timestamp, data = Fernet._get_unverified_token_data(token) - return self._decrypt_data(data, timestamp, (ttl, current_time)) - - def extract_timestamp(self, token: bytes | str) -> int: - timestamp, data = Fernet._get_unverified_token_data(token) - # Verify the token was not tampered with. - self._verify_signature(data) - return timestamp - - @staticmethod - def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]: - if not isinstance(token, (str, bytes)): - raise TypeError("token must be bytes or str") - - try: - data = base64.urlsafe_b64decode(token) - except (TypeError, binascii.Error): - raise InvalidToken - - if not data or data[0] != 0x80: - raise InvalidToken - - if len(data) < 9: - raise InvalidToken - - timestamp = int.from_bytes(data[1:9], byteorder="big") - return timestamp, data - - def _verify_signature(self, data: bytes) -> None: - h = HMAC(self._signing_key, hashes.SHA256()) - h.update(data[:-32]) - try: - h.verify(data[-32:]) - except InvalidSignature: - raise InvalidToken - - def _decrypt_data( - self, - data: bytes, - timestamp: int, - time_info: tuple[int, int] | None, - ) -> bytes: - if time_info is not None: - ttl, current_time = time_info - if timestamp + ttl < current_time: - raise InvalidToken - - if current_time + _MAX_CLOCK_SKEW < timestamp: - raise InvalidToken - - self._verify_signature(data) - - iv = data[9:25] - ciphertext = data[25:-32] - decryptor = Cipher( - algorithms.AES(self._encryption_key), modes.CBC(iv) - ).decryptor() - plaintext_padded = decryptor.update(ciphertext) - try: - plaintext_padded += decryptor.finalize() - except ValueError: - raise InvalidToken - unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() - - unpadded = unpadder.update(plaintext_padded) - try: - unpadded += unpadder.finalize() - except ValueError: - raise InvalidToken - return unpadded - - -class MultiFernet: - def __init__(self, fernets: Iterable[Fernet]): - fernets = list(fernets) - if not fernets: - raise ValueError( - "MultiFernet requires at least one Fernet instance" - ) - self._fernets = fernets - - def encrypt(self, msg: bytes) -> bytes: - return self.encrypt_at_time(msg, int(time.time())) - - def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: - return self._fernets[0].encrypt_at_time(msg, current_time) - - def rotate(self, msg: bytes | str) -> bytes: - timestamp, data = Fernet._get_unverified_token_data(msg) - for f in self._fernets: - try: - p = f._decrypt_data(data, timestamp, None) - break - except InvalidToken: - pass - else: - raise InvalidToken - - iv = os.urandom(16) - return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) - - def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes: - for f in self._fernets: - try: - return f.decrypt(msg, ttl) - except InvalidToken: - pass - raise InvalidToken - - def decrypt_at_time( - self, msg: bytes | str, ttl: int, current_time: int - ) -> bytes: - for f in self._fernets: - try: - return f.decrypt_at_time(msg, ttl, current_time) - except InvalidToken: - pass - raise InvalidToken - - def extract_timestamp(self, msg: bytes | str) -> int: - for f in self._fernets: - try: - return f.extract_timestamp(msg) - except InvalidToken: - pass - raise InvalidToken diff --git a/venv/Lib/site-packages/cryptography/hazmat/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/__init__.py deleted file mode 100644 index b9f1187..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -""" -Hazardous Materials - -This is a "Hazardous Materials" module. You should ONLY use it if you're -100% absolutely sure that you know what you're doing because this module -is full of land mines, dragons, and dinosaurs with laser guns. -""" diff --git a/venv/Lib/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4e7e156..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc deleted file mode 100644 index f598656..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/_oid.py b/venv/Lib/site-packages/cryptography/hazmat/_oid.py deleted file mode 100644 index 4bf138d..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/_oid.py +++ /dev/null @@ -1,356 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import ( - ObjectIdentifier as ObjectIdentifier, -) -from cryptography.hazmat.primitives import hashes - - -class ExtensionOID: - SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") - SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") - KEY_USAGE = ObjectIdentifier("2.5.29.15") - PRIVATE_KEY_USAGE_PERIOD = ObjectIdentifier("2.5.29.16") - SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") - ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") - BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") - NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") - CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") - CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") - POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") - AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") - POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") - EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") - FRESHEST_CRL = ObjectIdentifier("2.5.29.46") - INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") - ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") - AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") - SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") - OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") - TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") - CRL_NUMBER = ObjectIdentifier("2.5.29.20") - DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") - PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( - "1.3.6.1.4.1.11129.2.4.2" - ) - PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") - SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") - MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7") - ADMISSIONS = ObjectIdentifier("1.3.36.8.3.3") - - -class OCSPExtensionOID: - NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") - ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4") - - -class CRLEntryExtensionOID: - CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") - CRL_REASON = ObjectIdentifier("2.5.29.21") - INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") - - -class NameOID: - COMMON_NAME = ObjectIdentifier("2.5.4.3") - COUNTRY_NAME = ObjectIdentifier("2.5.4.6") - LOCALITY_NAME = ObjectIdentifier("2.5.4.7") - STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") - STREET_ADDRESS = ObjectIdentifier("2.5.4.9") - ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97") - ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") - ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") - SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") - SURNAME = ObjectIdentifier("2.5.4.4") - GIVEN_NAME = ObjectIdentifier("2.5.4.42") - TITLE = ObjectIdentifier("2.5.4.12") - INITIALS = ObjectIdentifier("2.5.4.43") - GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") - X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") - DN_QUALIFIER = ObjectIdentifier("2.5.4.46") - PSEUDONYM = ObjectIdentifier("2.5.4.65") - USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") - DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") - EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") - JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") - JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") - JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( - "1.3.6.1.4.1.311.60.2.1.2" - ) - BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") - POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") - POSTAL_CODE = ObjectIdentifier("2.5.4.17") - INN = ObjectIdentifier("1.2.643.3.131.1.1") - OGRN = ObjectIdentifier("1.2.643.100.1") - SNILS = ObjectIdentifier("1.2.643.100.3") - UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") - - -class SignatureAlgorithmOID: - RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") - RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") - # This is an alternate OID for RSA with SHA1 that is occasionally seen - _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") - RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") - RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") - RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") - RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") - RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") - RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") - RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") - RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") - RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") - ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") - ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") - ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") - ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") - ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") - ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") - ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") - ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") - ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") - DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") - DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") - DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") - DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") - DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") - ED25519 = ObjectIdentifier("1.3.101.112") - ED448 = ObjectIdentifier("1.3.101.113") - GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") - GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") - GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") - - -_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = { - SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), - SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), - SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), - SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(), - SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(), - SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(), - SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), - SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), - SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), - SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.ED25519: None, - SignatureAlgorithmOID.ED448: None, - SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, -} - - -class HashAlgorithmOID: - SHA1 = ObjectIdentifier("1.3.14.3.2.26") - SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.2.4") - SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.2.1") - SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.2.2") - SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.2.3") - SHA3_224 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.224") - SHA3_256 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.256") - SHA3_384 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.384") - SHA3_512 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.512") - SHA3_224_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.7") - SHA3_256_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.8") - SHA3_384_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.9") - SHA3_512_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.10") - - -class PublicKeyAlgorithmOID: - DSA = ObjectIdentifier("1.2.840.10040.4.1") - EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1") - RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1") - RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") - X25519 = ObjectIdentifier("1.3.101.110") - X448 = ObjectIdentifier("1.3.101.111") - ED25519 = ObjectIdentifier("1.3.101.112") - ED448 = ObjectIdentifier("1.3.101.113") - - -class ExtendedKeyUsageOID: - SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") - CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") - CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") - EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") - TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") - OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") - ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") - SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") - KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") - IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") - BUNDLE_SECURITY = ObjectIdentifier("1.3.6.1.5.5.7.3.35") - CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") - - -class OtherNameFormOID: - PERMANENT_IDENTIFIER = ObjectIdentifier("1.3.6.1.5.5.7.8.3") - HW_MODULE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.4") - DNS_SRV = ObjectIdentifier("1.3.6.1.5.5.7.8.7") - NAI_REALM = ObjectIdentifier("1.3.6.1.5.5.7.8.8") - SMTP_UTF8_MAILBOX = ObjectIdentifier("1.3.6.1.5.5.7.8.9") - ACP_NODE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.10") - BUNDLE_EID = ObjectIdentifier("1.3.6.1.5.5.7.8.11") - - -class AuthorityInformationAccessOID: - CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") - OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") - - -class SubjectInformationAccessOID: - CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") - - -class CertificatePoliciesOID: - CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") - CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") - ANY_POLICY = ObjectIdentifier("2.5.29.32.0") - - -class AttributeOID: - CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") - UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") - - -_OID_NAMES = { - NameOID.COMMON_NAME: "commonName", - NameOID.COUNTRY_NAME: "countryName", - NameOID.LOCALITY_NAME: "localityName", - NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", - NameOID.STREET_ADDRESS: "streetAddress", - NameOID.ORGANIZATION_NAME: "organizationName", - NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", - NameOID.SERIAL_NUMBER: "serialNumber", - NameOID.SURNAME: "surname", - NameOID.GIVEN_NAME: "givenName", - NameOID.TITLE: "title", - NameOID.GENERATION_QUALIFIER: "generationQualifier", - NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", - NameOID.DN_QUALIFIER: "dnQualifier", - NameOID.PSEUDONYM: "pseudonym", - NameOID.USER_ID: "userID", - NameOID.DOMAIN_COMPONENT: "domainComponent", - NameOID.EMAIL_ADDRESS: "emailAddress", - NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", - NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", - NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( - "jurisdictionStateOrProvinceName" - ), - NameOID.BUSINESS_CATEGORY: "businessCategory", - NameOID.POSTAL_ADDRESS: "postalAddress", - NameOID.POSTAL_CODE: "postalCode", - NameOID.INN: "INN", - NameOID.OGRN: "OGRN", - NameOID.SNILS: "SNILS", - NameOID.UNSTRUCTURED_NAME: "unstructuredName", - SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", - SignatureAlgorithmOID.RSASSA_PSS: "rsassaPss", - SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", - SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", - SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", - SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", - SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", - SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", - SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", - SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", - SignatureAlgorithmOID.ED25519: "ed25519", - SignatureAlgorithmOID.ED448: "ed448", - SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( - "GOST R 34.11-94 with GOST R 34.10-2001" - ), - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( - "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" - ), - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( - "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" - ), - HashAlgorithmOID.SHA1: "sha1", - HashAlgorithmOID.SHA224: "sha224", - HashAlgorithmOID.SHA256: "sha256", - HashAlgorithmOID.SHA384: "sha384", - HashAlgorithmOID.SHA512: "sha512", - HashAlgorithmOID.SHA3_224: "sha3_224", - HashAlgorithmOID.SHA3_256: "sha3_256", - HashAlgorithmOID.SHA3_384: "sha3_384", - HashAlgorithmOID.SHA3_512: "sha3_512", - HashAlgorithmOID.SHA3_224_NIST: "sha3_224", - HashAlgorithmOID.SHA3_256_NIST: "sha3_256", - HashAlgorithmOID.SHA3_384_NIST: "sha3_384", - HashAlgorithmOID.SHA3_512_NIST: "sha3_512", - PublicKeyAlgorithmOID.DSA: "dsaEncryption", - PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey", - PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption", - PublicKeyAlgorithmOID.X25519: "X25519", - PublicKeyAlgorithmOID.X448: "X448", - ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", - ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", - ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", - ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", - ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", - ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", - ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", - ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", - ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", - ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", - ExtensionOID.KEY_USAGE: "keyUsage", - ExtensionOID.PRIVATE_KEY_USAGE_PERIOD: "privateKeyUsagePeriod", - ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", - ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", - ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", - ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( - "signedCertificateTimestampList" - ), - ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( - "signedCertificateTimestampList" - ), - ExtensionOID.PRECERT_POISON: "ctPoison", - ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate", - ExtensionOID.ADMISSIONS: "Admissions", - CRLEntryExtensionOID.CRL_REASON: "cRLReason", - CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", - CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", - ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", - ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", - ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", - ExtensionOID.POLICY_MAPPINGS: "policyMappings", - ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", - ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", - ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", - ExtensionOID.FRESHEST_CRL: "freshestCRL", - ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", - ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint", - ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", - ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", - ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", - ExtensionOID.CRL_NUMBER: "cRLNumber", - ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", - ExtensionOID.TLS_FEATURE: "TLSFeature", - AuthorityInformationAccessOID.OCSP: "OCSP", - AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", - SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", - CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", - CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", - OCSPExtensionOID.NONCE: "OCSPNonce", - AttributeOID.CHALLENGE_PASSWORD: "challengePassword", -} diff --git a/venv/Lib/site-packages/cryptography/hazmat/asn1/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/asn1/__init__.py deleted file mode 100644 index be68373..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/asn1/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.asn1.asn1 import encode_der, sequence - -__all__ = [ - "encode_der", - "sequence", -] diff --git a/venv/Lib/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 43c39c2..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc deleted file mode 100644 index 06c0de1..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/asn1/asn1.py b/venv/Lib/site-packages/cryptography/hazmat/asn1/asn1.py deleted file mode 100644 index dedad6f..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/asn1/asn1.py +++ /dev/null @@ -1,116 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import dataclasses -import sys -import typing - -if sys.version_info < (3, 11): - import typing_extensions - - # We use the `include_extras` parameter of `get_type_hints`, which was - # added in Python 3.9. This can be replaced by the `typing` version - # once the min version is >= 3.9 - if sys.version_info < (3, 9): - get_type_hints = typing_extensions.get_type_hints - else: - get_type_hints = typing.get_type_hints -else: - get_type_hints = typing.get_type_hints - -from cryptography.hazmat.bindings._rust import declarative_asn1 - -T = typing.TypeVar("T", covariant=True) -U = typing.TypeVar("U") - - -encode_der = declarative_asn1.encode_der - - -def _normalize_field_type( - field_type: typing.Any, field_name: str -) -> declarative_asn1.AnnotatedType: - annotation = declarative_asn1.Annotation() - - if hasattr(field_type, "__asn1_root__"): - annotated_root = field_type.__asn1_root__ - if not isinstance(annotated_root, declarative_asn1.AnnotatedType): - raise TypeError(f"unsupported root type: {annotated_root}") - return annotated_root - else: - rust_field_type = declarative_asn1.non_root_python_to_rust(field_type) - - return declarative_asn1.AnnotatedType(rust_field_type, annotation) - - -def _annotate_fields( - raw_fields: dict[str, type], -) -> dict[str, declarative_asn1.AnnotatedType]: - fields = {} - for field_name, field_type in raw_fields.items(): - # Recursively normalize the field type into something that the - # Rust code can understand. - annotated_field_type = _normalize_field_type(field_type, field_name) - fields[field_name] = annotated_field_type - - return fields - - -def _register_asn1_sequence(cls: type[U]) -> None: - raw_fields = get_type_hints(cls, include_extras=True) - root = declarative_asn1.AnnotatedType( - declarative_asn1.Type.Sequence(cls, _annotate_fields(raw_fields)), - declarative_asn1.Annotation(), - ) - - setattr(cls, "__asn1_root__", root) - - -# Due to https://github.com/python/mypy/issues/19731, we can't define an alias -# for `dataclass_transform` that conditionally points to `typing` or -# `typing_extensions` depending on the Python version (like we do for -# `get_type_hints`). -# We work around it by making the whole decorated class conditional on the -# Python version. -if sys.version_info < (3, 11): - - @typing_extensions.dataclass_transform(kw_only_default=True) - def sequence(cls: type[U]) -> type[U]: - # We use `dataclasses.dataclass` to add an __init__ method - # to the class with keyword-only parameters. - if sys.version_info >= (3, 10): - dataclass_cls = dataclasses.dataclass( - repr=False, - eq=False, - # `match_args` was added in Python 3.10 and defaults - # to True - match_args=False, - # `kw_only` was added in Python 3.10 and defaults to - # False - kw_only=True, - )(cls) - else: - dataclass_cls = dataclasses.dataclass( - repr=False, - eq=False, - )(cls) - _register_asn1_sequence(dataclass_cls) - return dataclass_cls - -else: - - @typing.dataclass_transform(kw_only_default=True) - def sequence(cls: type[U]) -> type[U]: - # Only add an __init__ method, with keyword-only - # parameters. - dataclass_cls = dataclasses.dataclass( - repr=False, - eq=False, - match_args=False, - kw_only=True, - )(cls) - _register_asn1_sequence(dataclass_cls) - return dataclass_cls diff --git a/venv/Lib/site-packages/cryptography/hazmat/backends/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/backends/__init__.py deleted file mode 100644 index b4400aa..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/backends/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from typing import Any - - -def default_backend() -> Any: - from cryptography.hazmat.backends.openssl.backend import backend - - return backend diff --git a/venv/Lib/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index a6355d0..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py deleted file mode 100644 index 51b0447..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.backends.openssl.backend import backend - -__all__ = ["backend"] diff --git a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index b94f428..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc deleted file mode 100644 index d58c775..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/backend.py b/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/backend.py deleted file mode 100644 index 248b8c5..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/backends/openssl/backend.py +++ /dev/null @@ -1,302 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.bindings.openssl import binding -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding -from cryptography.hazmat.primitives.asymmetric import ec -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.hazmat.primitives.asymmetric.padding import ( - MGF1, - OAEP, - PSS, - PKCS1v15, -) -from cryptography.hazmat.primitives.ciphers import ( - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers.algorithms import ( - AES, -) -from cryptography.hazmat.primitives.ciphers.modes import ( - CBC, - Mode, -) - - -class Backend: - """ - OpenSSL API binding interfaces. - """ - - name = "openssl" - - # TripleDES encryption is disallowed/deprecated throughout 2023 in - # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). - _fips_ciphers = (AES,) - # Sometimes SHA1 is still permissible. That logic is contained - # within the various *_supported methods. - _fips_hashes = ( - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA512_224, - hashes.SHA512_256, - hashes.SHA3_224, - hashes.SHA3_256, - hashes.SHA3_384, - hashes.SHA3_512, - hashes.SHAKE128, - hashes.SHAKE256, - ) - _fips_ecdh_curves = ( - ec.SECP224R1, - ec.SECP256R1, - ec.SECP384R1, - ec.SECP521R1, - ) - _fips_rsa_min_key_size = 2048 - _fips_rsa_min_public_exponent = 65537 - _fips_dsa_min_modulus = 1 << 2048 - _fips_dh_min_key_size = 2048 - _fips_dh_min_modulus = 1 << _fips_dh_min_key_size - - def __init__(self) -> None: - self._binding = binding.Binding() - self._ffi = self._binding.ffi - self._lib = self._binding.lib - self._fips_enabled = rust_openssl.is_fips_enabled() - - def __repr__(self) -> str: - return ( - f"" - ) - - def openssl_assert(self, ok: bool) -> None: - return binding._openssl_assert(ok) - - def _enable_fips(self) -> None: - # This function enables FIPS mode for OpenSSL 3.0.0 on installs that - # have the FIPS provider installed properly. - rust_openssl.enable_fips(rust_openssl._providers) - assert rust_openssl.is_fips_enabled() - self._fips_enabled = rust_openssl.is_fips_enabled() - - def openssl_version_text(self) -> str: - """ - Friendly string name of the loaded OpenSSL library. This is not - necessarily the same version as it was compiled against. - - Example: OpenSSL 3.2.1 30 Jan 2024 - """ - return rust_openssl.openssl_version_text() - - def openssl_version_number(self) -> int: - return rust_openssl.openssl_version() - - def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): - return False - - return rust_openssl.hashes.hash_supported(algorithm) - - def signature_hash_supported( - self, algorithm: hashes.HashAlgorithm - ) -> bool: - # Dedicated check for hashing algorithm use in message digest for - # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return False - return self.hash_supported(algorithm) - - def scrypt_supported(self) -> bool: - if self._fips_enabled: - return False - else: - return hasattr(rust_openssl.kdf.Scrypt, "derive") - - def argon2_supported(self) -> bool: - if self._fips_enabled: - return False - else: - return hasattr(rust_openssl.kdf.Argon2id, "derive") - - def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - # FIPS mode still allows SHA1 for HMAC - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return True - if rust_openssl.CRYPTOGRAPHY_IS_AWSLC: - return isinstance( - algorithm, - ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA512_224, - hashes.SHA512_256, - ), - ) - return self.hash_supported(algorithm) - - def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: - if self._fips_enabled: - # FIPS mode requires AES. TripleDES is disallowed/deprecated in - # FIPS 140-3. - if not isinstance(cipher, self._fips_ciphers): - return False - - return rust_openssl.ciphers.cipher_supported(cipher, mode) - - def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - return self.hmac_supported(algorithm) - - def _consume_errors(self) -> list[rust_openssl.OpenSSLError]: - return rust_openssl.capture_error_stack() - - def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return False - - return isinstance( - algorithm, - ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - ), - ) - - def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: - if isinstance(padding, PKCS1v15): - return True - elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): - # FIPS 186-4 only allows salt length == digest length for PSS - # It is technically acceptable to set an explicit salt length - # equal to the digest length and this will incorrectly fail, but - # since we don't do that in the tests and this method is - # private, we'll ignore that until we need to do otherwise. - if ( - self._fips_enabled - and padding._salt_length != PSS.DIGEST_LENGTH - ): - return False - return self.hash_supported(padding._mgf._algorithm) - elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): - return self._oaep_hash_supported( - padding._mgf._algorithm - ) and self._oaep_hash_supported(padding._algorithm) - else: - return False - - def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool: - if self._fips_enabled and isinstance(padding, PKCS1v15): - return False - else: - return self.rsa_padding_supported(padding) - - def dsa_supported(self) -> bool: - return ( - not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not self._fips_enabled - ) - - def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if not self.dsa_supported(): - return False - return self.signature_hash_supported(algorithm) - - def cmac_algorithm_supported(self, algorithm) -> bool: - return self.cipher_supported( - algorithm, CBC(b"\x00" * algorithm.block_size) - ) - - def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: - if self._fips_enabled and not isinstance( - curve, self._fips_ecdh_curves - ): - return False - - return rust_openssl.ec.curve_supported(curve) - - def elliptic_curve_signature_algorithm_supported( - self, - signature_algorithm: ec.EllipticCurveSignatureAlgorithm, - curve: ec.EllipticCurve, - ) -> bool: - # We only support ECDSA right now. - if not isinstance(signature_algorithm, ec.ECDSA): - return False - - return self.elliptic_curve_supported(curve) and ( - isinstance(signature_algorithm.algorithm, asym_utils.Prehashed) - or self.hash_supported(signature_algorithm.algorithm) - ) - - def elliptic_curve_exchange_algorithm_supported( - self, algorithm: ec.ECDH, curve: ec.EllipticCurve - ) -> bool: - return self.elliptic_curve_supported(curve) and isinstance( - algorithm, ec.ECDH - ) - - def dh_supported(self) -> bool: - return ( - not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - def dh_x942_serialization_supported(self) -> bool: - return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 - - def x25519_supported(self) -> bool: - return not self._fips_enabled - - def x448_supported(self) -> bool: - if self._fips_enabled: - return False - return ( - not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL - and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - def ed25519_supported(self) -> bool: - return not self._fips_enabled - - def ed448_supported(self) -> bool: - if self._fips_enabled: - return False - return ( - not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL - and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - def ecdsa_deterministic_supported(self) -> bool: - return ( - rust_openssl.CRYPTOGRAPHY_OPENSSL_320_OR_GREATER - and not self._fips_enabled - ) - - def poly1305_supported(self) -> bool: - return not self._fips_enabled - - def pkcs7_supported(self) -> bool: - return ( - not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - -backend = Backend() diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/bindings/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 84eec86..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust.pyd b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust.pyd deleted file mode 100644 index 5c93835..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi deleted file mode 100644 index 2f4eef4..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import padding -from cryptography.utils import Buffer - -class PKCS7PaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class ANSIX923PaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class PKCS7UnpaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class ANSIX923UnpaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class ObjectIdentifier: - def __init__(self, value: str) -> None: ... - @property - def dotted_string(self) -> str: ... - @property - def _name(self) -> str: ... - -T = typing.TypeVar("T") diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi deleted file mode 100644 index 8010008..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi +++ /dev/null @@ -1,8 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -lib = typing.Any -ffi = typing.Any diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi deleted file mode 100644 index 3b5f208..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi +++ /dev/null @@ -1,7 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... -def encode_dss_signature(r: int, s: int) -> bytes: ... -def parse_spki_for_data(data: bytes) -> bytes: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi deleted file mode 100644 index 8563c11..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi +++ /dev/null @@ -1,32 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -import typing - -def encode_der(value: typing.Any) -> bytes: ... -def non_root_python_to_rust(cls: type) -> Type: ... - -# Type is a Rust enum with tuple variants. For now, we express the type -# annotations like this: -class Type: - Sequence: typing.ClassVar[type] - PyInt: typing.ClassVar[type] - -class Annotation: - def __new__( - cls, - ) -> Annotation: ... - -class AnnotatedType: - inner: Type - annotation: Annotation - - def __new__(cls, inner: Type, annotation: Annotation) -> AnnotatedType: ... - -class AnnotatedTypeObject: - annotated_type: AnnotatedType - value: typing.Any - - def __new__( - cls, annotated_type: AnnotatedType, value: typing.Any - ) -> AnnotatedTypeObject: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi deleted file mode 100644 index 09f46b1..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi +++ /dev/null @@ -1,17 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -class _Reasons: - BACKEND_MISSING_INTERFACE: _Reasons - UNSUPPORTED_HASH: _Reasons - UNSUPPORTED_CIPHER: _Reasons - UNSUPPORTED_PADDING: _Reasons - UNSUPPORTED_MGF: _Reasons - UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons - UNSUPPORTED_ELLIPTIC_CURVE: _Reasons - UNSUPPORTED_SERIALIZATION: _Reasons - UNSUPPORTED_X509: _Reasons - UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons - UNSUPPORTED_DIFFIE_HELLMAN: _Reasons - UNSUPPORTED_MAC: _Reasons diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi deleted file mode 100644 index 103e96c..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi +++ /dev/null @@ -1,117 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import datetime -from collections.abc import Iterator - -from cryptography import x509 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes -from cryptography.x509 import ocsp - -class OCSPRequest: - @property - def issuer_key_hash(self) -> bytes: ... - @property - def issuer_name_hash(self) -> bytes: ... - @property - def hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def serial_number(self) -> int: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - @property - def extensions(self) -> x509.Extensions: ... - -class OCSPResponse: - @property - def responses(self) -> Iterator[OCSPSingleResponse]: ... - @property - def response_status(self) -> ocsp.OCSPResponseStatus: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_response_bytes(self) -> bytes: ... - @property - def certificates(self) -> list[x509.Certificate]: ... - @property - def responder_key_hash(self) -> bytes | None: ... - @property - def responder_name(self) -> x509.Name | None: ... - @property - def produced_at(self) -> datetime.datetime: ... - @property - def produced_at_utc(self) -> datetime.datetime: ... - @property - def certificate_status(self) -> ocsp.OCSPCertStatus: ... - @property - def revocation_time(self) -> datetime.datetime | None: ... - @property - def revocation_time_utc(self) -> datetime.datetime | None: ... - @property - def revocation_reason(self) -> x509.ReasonFlags | None: ... - @property - def this_update(self) -> datetime.datetime: ... - @property - def this_update_utc(self) -> datetime.datetime: ... - @property - def next_update(self) -> datetime.datetime | None: ... - @property - def next_update_utc(self) -> datetime.datetime | None: ... - @property - def issuer_key_hash(self) -> bytes: ... - @property - def issuer_name_hash(self) -> bytes: ... - @property - def hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def serial_number(self) -> int: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def single_extensions(self) -> x509.Extensions: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - -class OCSPSingleResponse: - @property - def certificate_status(self) -> ocsp.OCSPCertStatus: ... - @property - def revocation_time(self) -> datetime.datetime | None: ... - @property - def revocation_time_utc(self) -> datetime.datetime | None: ... - @property - def revocation_reason(self) -> x509.ReasonFlags | None: ... - @property - def this_update(self) -> datetime.datetime: ... - @property - def this_update_utc(self) -> datetime.datetime: ... - @property - def next_update(self) -> datetime.datetime | None: ... - @property - def next_update_utc(self) -> datetime.datetime | None: ... - @property - def issuer_key_hash(self) -> bytes: ... - @property - def issuer_name_hash(self) -> bytes: ... - @property - def hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def serial_number(self) -> int: ... - -def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ... -def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ... -def create_ocsp_request( - builder: ocsp.OCSPRequestBuilder, -) -> ocsp.OCSPRequest: ... -def create_ocsp_response( - status: ocsp.OCSPResponseStatus, - builder: ocsp.OCSPResponseBuilder | None, - private_key: PrivateKeyTypes | None, - hash_algorithm: hashes.HashAlgorithm | None, -) -> ocsp.OCSPResponse: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi deleted file mode 100644 index 5fb3cb2..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi +++ /dev/null @@ -1,75 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.bindings._rust.openssl import ( - aead, - ciphers, - cmac, - dh, - dsa, - ec, - ed448, - ed25519, - hashes, - hmac, - kdf, - keys, - poly1305, - rsa, - x448, - x25519, -) - -__all__ = [ - "aead", - "ciphers", - "cmac", - "dh", - "dsa", - "ec", - "ed448", - "ed25519", - "hashes", - "hmac", - "kdf", - "keys", - "openssl_version", - "openssl_version_text", - "poly1305", - "raise_openssl_error", - "rsa", - "x448", - "x25519", -] - -CRYPTOGRAPHY_IS_LIBRESSL: bool -CRYPTOGRAPHY_IS_BORINGSSL: bool -CRYPTOGRAPHY_IS_AWSLC: bool -CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_309_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_330_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_350_OR_GREATER: bool - -class Providers: ... - -_legacy_provider_loaded: bool -_providers: Providers - -def openssl_version() -> int: ... -def openssl_version_text() -> str: ... -def raise_openssl_error() -> typing.NoReturn: ... -def capture_error_stack() -> list[OpenSSLError]: ... -def is_fips_enabled() -> bool: ... -def enable_fips(providers: Providers) -> None: ... - -class OpenSSLError: - @property - def lib(self) -> int: ... - @property - def reason(self) -> int: ... - @property - def reason_text(self) -> bytes: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi deleted file mode 100644 index 831fcd1..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi +++ /dev/null @@ -1,107 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from collections.abc import Sequence - -from cryptography.utils import Buffer - -class AESGCM: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class ChaCha20Poly1305: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key() -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class AESCCM: - def __init__(self, key: Buffer, tag_length: int = 16) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class AESSIV: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - data: Buffer, - associated_data: Sequence[Buffer] | None, - ) -> bytes: ... - def decrypt( - self, - data: Buffer, - associated_data: Sequence[Buffer] | None, - ) -> bytes: ... - -class AESOCB3: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class AESGCMSIV: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi deleted file mode 100644 index a48fb01..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi +++ /dev/null @@ -1,38 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import ciphers -from cryptography.hazmat.primitives.ciphers import modes - -@typing.overload -def create_encryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag -) -> ciphers.AEADEncryptionContext: ... -@typing.overload -def create_encryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None -) -> ciphers.CipherContext: ... -@typing.overload -def create_decryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag -) -> ciphers.AEADDecryptionContext: ... -@typing.overload -def create_decryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None -) -> ciphers.CipherContext: ... -def cipher_supported( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode -) -> bool: ... -def _advance( - ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int -) -> None: ... -def _advance_aad( - ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int -) -> None: ... - -class CipherContext: ... -class AEADEncryptionContext: ... -class AEADDecryptionContext: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi deleted file mode 100644 index 9c03508..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi +++ /dev/null @@ -1,18 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import ciphers - -class CMAC: - def __init__( - self, - algorithm: ciphers.BlockCipherAlgorithm, - backend: typing.Any = None, - ) -> None: ... - def update(self, data: bytes) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, signature: bytes) -> None: ... - def copy(self) -> CMAC: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi deleted file mode 100644 index 08733d7..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi +++ /dev/null @@ -1,51 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import dh - -MIN_MODULUS_SIZE: int - -class DHPrivateKey: ... -class DHPublicKey: ... -class DHParameters: ... - -class DHPrivateNumbers: - def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... - def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ... - @property - def x(self) -> int: ... - @property - def public_numbers(self) -> DHPublicNumbers: ... - -class DHPublicNumbers: - def __init__( - self, y: int, parameter_numbers: DHParameterNumbers - ) -> None: ... - def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ... - @property - def y(self) -> int: ... - @property - def parameter_numbers(self) -> DHParameterNumbers: ... - -class DHParameterNumbers: - def __init__(self, p: int, g: int, q: int | None = None) -> None: ... - def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ... - @property - def p(self) -> int: ... - @property - def g(self) -> int: ... - @property - def q(self) -> int | None: ... - -def generate_parameters( - generator: int, key_size: int, backend: typing.Any = None -) -> dh.DHParameters: ... -def from_pem_parameters( - data: bytes, backend: typing.Any = None -) -> dh.DHParameters: ... -def from_der_parameters( - data: bytes, backend: typing.Any = None -) -> dh.DHParameters: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi deleted file mode 100644 index 0922a4c..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi +++ /dev/null @@ -1,41 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import dsa - -class DSAPrivateKey: ... -class DSAPublicKey: ... -class DSAParameters: ... - -class DSAPrivateNumbers: - def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... - @property - def x(self) -> int: ... - @property - def public_numbers(self) -> DSAPublicNumbers: ... - def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ... - -class DSAPublicNumbers: - def __init__( - self, y: int, parameter_numbers: DSAParameterNumbers - ) -> None: ... - @property - def y(self) -> int: ... - @property - def parameter_numbers(self) -> DSAParameterNumbers: ... - def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ... - -class DSAParameterNumbers: - def __init__(self, p: int, q: int, g: int) -> None: ... - @property - def p(self) -> int: ... - @property - def q(self) -> int: ... - @property - def g(self) -> int: ... - def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ... - -def generate_parameters(key_size: int) -> dsa.DSAParameters: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi deleted file mode 100644 index 5c3b7bf..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import ec - -class ECPrivateKey: ... -class ECPublicKey: ... - -class EllipticCurvePrivateNumbers: - def __init__( - self, private_value: int, public_numbers: EllipticCurvePublicNumbers - ) -> None: ... - def private_key( - self, backend: typing.Any = None - ) -> ec.EllipticCurvePrivateKey: ... - @property - def private_value(self) -> int: ... - @property - def public_numbers(self) -> EllipticCurvePublicNumbers: ... - -class EllipticCurvePublicNumbers: - def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ... - def public_key( - self, backend: typing.Any = None - ) -> ec.EllipticCurvePublicKey: ... - @property - def x(self) -> int: ... - @property - def y(self) -> int: ... - @property - def curve(self) -> ec.EllipticCurve: ... - def __eq__(self, other: object) -> bool: ... - -def curve_supported(curve: ec.EllipticCurve) -> bool: ... -def generate_private_key( - curve: ec.EllipticCurve, backend: typing.Any = None -) -> ec.EllipticCurvePrivateKey: ... -def from_private_numbers( - numbers: ec.EllipticCurvePrivateNumbers, -) -> ec.EllipticCurvePrivateKey: ... -def from_public_numbers( - numbers: ec.EllipticCurvePublicNumbers, -) -> ec.EllipticCurvePublicKey: ... -def from_public_bytes( - curve: ec.EllipticCurve, data: bytes -) -> ec.EllipticCurvePublicKey: ... -def derive_private_key( - private_value: int, curve: ec.EllipticCurve -) -> ec.EllipticCurvePrivateKey: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi deleted file mode 100644 index f85b3d1..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import ed25519 -from cryptography.utils import Buffer - -class Ed25519PrivateKey: ... -class Ed25519PublicKey: ... - -def generate_key() -> ed25519.Ed25519PrivateKey: ... -def from_private_bytes(data: Buffer) -> ed25519.Ed25519PrivateKey: ... -def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi deleted file mode 100644 index c8ca0ec..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import ed448 -from cryptography.utils import Buffer - -class Ed448PrivateKey: ... -class Ed448PublicKey: ... - -def generate_key() -> ed448.Ed448PrivateKey: ... -def from_private_bytes(data: Buffer) -> ed448.Ed448PrivateKey: ... -def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi deleted file mode 100644 index 6bfd295..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi +++ /dev/null @@ -1,28 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import hashes -from cryptography.utils import Buffer - -class Hash(hashes.HashContext): - def __init__( - self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None - ) -> None: ... - @property - def algorithm(self) -> hashes.HashAlgorithm: ... - def update(self, data: Buffer) -> None: ... - def finalize(self) -> bytes: ... - def copy(self) -> Hash: ... - -def hash_supported(algorithm: hashes.HashAlgorithm) -> bool: ... - -class XOFHash: - def __init__(self, algorithm: hashes.ExtendableOutputFunction) -> None: ... - @property - def algorithm(self) -> hashes.ExtendableOutputFunction: ... - def update(self, data: Buffer) -> None: ... - def squeeze(self, length: int) -> bytes: ... - def copy(self) -> XOFHash: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi deleted file mode 100644 index 3883d1b..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi +++ /dev/null @@ -1,22 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import hashes -from cryptography.utils import Buffer - -class HMAC(hashes.HashContext): - def __init__( - self, - key: Buffer, - algorithm: hashes.HashAlgorithm, - backend: typing.Any = None, - ) -> None: ... - @property - def algorithm(self) -> hashes.HashAlgorithm: ... - def update(self, data: Buffer) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, signature: bytes) -> None: ... - def copy(self) -> HMAC: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi deleted file mode 100644 index 9e2d8d9..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi +++ /dev/null @@ -1,72 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.hashes import HashAlgorithm -from cryptography.utils import Buffer - -def derive_pbkdf2_hmac( - key_material: Buffer, - algorithm: HashAlgorithm, - salt: bytes, - iterations: int, - length: int, -) -> bytes: ... - -class Scrypt: - def __init__( - self, - salt: bytes, - length: int, - n: int, - r: int, - p: int, - backend: typing.Any = None, - ) -> None: ... - def derive(self, key_material: Buffer) -> bytes: ... - def verify(self, key_material: bytes, expected_key: bytes) -> None: ... - -class Argon2id: - def __init__( - self, - *, - salt: bytes, - length: int, - iterations: int, - lanes: int, - memory_cost: int, - ad: bytes | None = None, - secret: bytes | None = None, - ) -> None: ... - def derive(self, key_material: bytes) -> bytes: ... - def verify(self, key_material: bytes, expected_key: bytes) -> None: ... - def derive_phc_encoded(self, key_material: bytes) -> str: ... - @classmethod - def verify_phc_encoded( - cls, key_material: bytes, phc_encoded: str, secret: bytes | None = None - ) -> None: ... - -class HKDF: - def __init__( - self, - algorithm: HashAlgorithm, - length: int, - salt: bytes | None, - info: bytes | None, - backend: typing.Any = None, - ): ... - def derive(self, key_material: Buffer) -> bytes: ... - def verify(self, key_material: bytes, expected_key: bytes) -> None: ... - -class HKDFExpand: - def __init__( - self, - algorithm: HashAlgorithm, - length: int, - info: bytes | None, - backend: typing.Any = None, - ): ... - def derive(self, key_material: Buffer) -> bytes: ... - def verify(self, key_material: bytes, expected_key: bytes) -> None: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi deleted file mode 100644 index 404057e..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi +++ /dev/null @@ -1,34 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric.types import ( - PrivateKeyTypes, - PublicKeyTypes, -) -from cryptography.utils import Buffer - -def load_der_private_key( - data: Buffer, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> PrivateKeyTypes: ... -def load_pem_private_key( - data: Buffer, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> PrivateKeyTypes: ... -def load_der_public_key( - data: bytes, - backend: typing.Any = None, -) -> PublicKeyTypes: ... -def load_pem_public_key( - data: bytes, - backend: typing.Any = None, -) -> PublicKeyTypes: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi deleted file mode 100644 index 45a2a39..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi +++ /dev/null @@ -1,15 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.utils import Buffer - -class Poly1305: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_tag(key: Buffer, data: Buffer) -> bytes: ... - @staticmethod - def verify_tag(key: Buffer, data: Buffer, tag: bytes) -> None: ... - def update(self, data: Buffer) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, tag: bytes) -> None: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi deleted file mode 100644 index ef7752d..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi +++ /dev/null @@ -1,55 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import rsa - -class RSAPrivateKey: ... -class RSAPublicKey: ... - -class RSAPrivateNumbers: - def __init__( - self, - p: int, - q: int, - d: int, - dmp1: int, - dmq1: int, - iqmp: int, - public_numbers: RSAPublicNumbers, - ) -> None: ... - @property - def p(self) -> int: ... - @property - def q(self) -> int: ... - @property - def d(self) -> int: ... - @property - def dmp1(self) -> int: ... - @property - def dmq1(self) -> int: ... - @property - def iqmp(self) -> int: ... - @property - def public_numbers(self) -> RSAPublicNumbers: ... - def private_key( - self, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, - ) -> rsa.RSAPrivateKey: ... - -class RSAPublicNumbers: - def __init__(self, e: int, n: int) -> None: ... - @property - def n(self) -> int: ... - @property - def e(self) -> int: ... - def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ... - -def generate_private_key( - public_exponent: int, - key_size: int, -) -> rsa.RSAPrivateKey: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi deleted file mode 100644 index 38d2add..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import x25519 -from cryptography.utils import Buffer - -class X25519PrivateKey: ... -class X25519PublicKey: ... - -def generate_key() -> x25519.X25519PrivateKey: ... -def from_private_bytes(data: Buffer) -> x25519.X25519PrivateKey: ... -def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi deleted file mode 100644 index 3ac0980..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import x448 -from cryptography.utils import Buffer - -class X448PrivateKey: ... -class X448PublicKey: ... - -def generate_key() -> x448.X448PrivateKey: ... -def from_private_bytes(data: Buffer) -> x448.X448PrivateKey: ... -def from_public_bytes(data: bytes) -> x448.X448PublicKey: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi deleted file mode 100644 index b25becb..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing -from collections.abc import Iterable - -from cryptography import x509 -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes -from cryptography.hazmat.primitives.serialization import ( - KeySerializationEncryption, -) -from cryptography.hazmat.primitives.serialization.pkcs12 import ( - PKCS12KeyAndCertificates, - PKCS12PrivateKeyTypes, -) -from cryptography.utils import Buffer - -class PKCS12Certificate: - def __init__( - self, cert: x509.Certificate, friendly_name: bytes | None - ) -> None: ... - @property - def friendly_name(self) -> bytes | None: ... - @property - def certificate(self) -> x509.Certificate: ... - -def load_key_and_certificates( - data: Buffer, - password: Buffer | None, - backend: typing.Any = None, -) -> tuple[ - PrivateKeyTypes | None, - x509.Certificate | None, - list[x509.Certificate], -]: ... -def load_pkcs12( - data: bytes, - password: bytes | None, - backend: typing.Any = None, -) -> PKCS12KeyAndCertificates: ... -def serialize_java_truststore( - certs: Iterable[PKCS12Certificate], - encryption_algorithm: KeySerializationEncryption, -) -> bytes: ... -def serialize_key_and_certificates( - name: bytes | None, - key: PKCS12PrivateKeyTypes | None, - cert: x509.Certificate | None, - cas: Iterable[x509.Certificate | PKCS12Certificate] | None, - encryption_algorithm: KeySerializationEncryption, -) -> bytes: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi deleted file mode 100644 index 358b135..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi +++ /dev/null @@ -1,50 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from collections.abc import Iterable - -from cryptography import x509 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives.serialization import pkcs7 - -def serialize_certificates( - certs: list[x509.Certificate], - encoding: serialization.Encoding, -) -> bytes: ... -def encrypt_and_serialize( - builder: pkcs7.PKCS7EnvelopeBuilder, - content_encryption_algorithm: pkcs7.ContentEncryptionAlgorithm, - encoding: serialization.Encoding, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def sign_and_serialize( - builder: pkcs7.PKCS7SignatureBuilder, - encoding: serialization.Encoding, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def decrypt_der( - data: bytes, - certificate: x509.Certificate, - private_key: rsa.RSAPrivateKey, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def decrypt_pem( - data: bytes, - certificate: x509.Certificate, - private_key: rsa.RSAPrivateKey, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def decrypt_smime( - data: bytes, - certificate: x509.Certificate, - private_key: rsa.RSAPrivateKey, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def load_pem_pkcs7_certificates( - data: bytes, -) -> list[x509.Certificate]: ... -def load_der_pkcs7_certificates( - data: bytes, -) -> list[x509.Certificate]: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi deleted file mode 100644 index c6c6d0b..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography import x509 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.serialization import pkcs7 -from cryptography.utils import Buffer - -class TestCertificate: - not_after_tag: int - not_before_tag: int - issuer_value_tags: list[int] - subject_value_tags: list[int] - -def test_parse_certificate(data: bytes) -> TestCertificate: ... -def pkcs7_verify( - encoding: serialization.Encoding, - sig: bytes, - msg: Buffer | None, - certs: list[x509.Certificate], - options: list[pkcs7.PKCS7Options], -) -> None: ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi deleted file mode 100644 index 83c3441..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi +++ /dev/null @@ -1,301 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import datetime -import typing -from collections.abc import Iterator - -from cryptography import x509 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric.ec import ECDSA -from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15 -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPublicKeyTypes, - CertificatePublicKeyTypes, - PrivateKeyTypes, -) -from cryptography.x509 import certificate_transparency - -def load_pem_x509_certificate( - data: bytes, backend: typing.Any = None -) -> x509.Certificate: ... -def load_der_x509_certificate( - data: bytes, backend: typing.Any = None -) -> x509.Certificate: ... -def load_pem_x509_certificates( - data: bytes, -) -> list[x509.Certificate]: ... -def load_pem_x509_crl( - data: bytes, backend: typing.Any = None -) -> x509.CertificateRevocationList: ... -def load_der_x509_crl( - data: bytes, backend: typing.Any = None -) -> x509.CertificateRevocationList: ... -def load_pem_x509_csr( - data: bytes, backend: typing.Any = None -) -> x509.CertificateSigningRequest: ... -def load_der_x509_csr( - data: bytes, backend: typing.Any = None -) -> x509.CertificateSigningRequest: ... -def encode_name_bytes(name: x509.Name) -> bytes: ... -def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... -def create_x509_certificate( - builder: x509.CertificateBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, - ecdsa_deterministic: bool | None, -) -> x509.Certificate: ... -def create_x509_csr( - builder: x509.CertificateSigningRequestBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, - ecdsa_deterministic: bool | None, -) -> x509.CertificateSigningRequest: ... -def create_x509_crl( - builder: x509.CertificateRevocationListBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, - ecdsa_deterministic: bool | None, -) -> x509.CertificateRevocationList: ... - -class Sct: - @property - def version(self) -> certificate_transparency.Version: ... - @property - def log_id(self) -> bytes: ... - @property - def timestamp(self) -> datetime.datetime: ... - @property - def entry_type(self) -> certificate_transparency.LogEntryType: ... - @property - def signature_hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def signature_algorithm( - self, - ) -> certificate_transparency.SignatureAlgorithm: ... - @property - def signature(self) -> bytes: ... - @property - def extension_bytes(self) -> bytes: ... - -class Certificate: - def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... - @property - def serial_number(self) -> int: ... - @property - def version(self) -> x509.Version: ... - def public_key(self) -> CertificatePublicKeyTypes: ... - @property - def public_key_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def not_valid_before(self) -> datetime.datetime: ... - @property - def not_valid_before_utc(self) -> datetime.datetime: ... - @property - def not_valid_after(self) -> datetime.datetime: ... - @property - def not_valid_after_utc(self) -> datetime.datetime: ... - @property - def issuer(self) -> x509.Name: ... - @property - def subject(self) -> x509.Name: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_algorithm_parameters( - self, - ) -> PSS | PKCS1v15 | ECDSA | None: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_certificate_bytes(self) -> bytes: ... - @property - def tbs_precertificate_bytes(self) -> bytes: ... - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - def verify_directly_issued_by(self, issuer: Certificate) -> None: ... - -class RevokedCertificate: ... - -class CertificateRevocationList: - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... - def get_revoked_certificate_by_serial_number( - self, serial_number: int - ) -> x509.RevokedCertificate | None: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_algorithm_parameters( - self, - ) -> PSS | PKCS1v15 | ECDSA | None: ... - @property - def issuer(self) -> x509.Name: ... - @property - def next_update(self) -> datetime.datetime | None: ... - @property - def next_update_utc(self) -> datetime.datetime | None: ... - @property - def last_update(self) -> datetime.datetime: ... - @property - def last_update_utc(self) -> datetime.datetime: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_certlist_bytes(self) -> bytes: ... - def __eq__(self, other: object) -> bool: ... - def __len__(self) -> int: ... - @typing.overload - def __getitem__(self, idx: int) -> x509.RevokedCertificate: ... - @typing.overload - def __getitem__(self, idx: slice) -> list[x509.RevokedCertificate]: ... - def __iter__(self) -> Iterator[x509.RevokedCertificate]: ... - def is_signature_valid( - self, public_key: CertificateIssuerPublicKeyTypes - ) -> bool: ... - -class CertificateSigningRequest: - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def public_key(self) -> CertificatePublicKeyTypes: ... - @property - def subject(self) -> x509.Name: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_algorithm_parameters( - self, - ) -> PSS | PKCS1v15 | ECDSA | None: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def attributes(self) -> x509.Attributes: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_certrequest_bytes(self) -> bytes: ... - @property - def is_signature_valid(self) -> bool: ... - -class PolicyBuilder: - def time(self, time: datetime.datetime) -> PolicyBuilder: ... - def store(self, store: Store) -> PolicyBuilder: ... - def max_chain_depth(self, max_chain_depth: int) -> PolicyBuilder: ... - def extension_policies( - self, *, ca_policy: ExtensionPolicy, ee_policy: ExtensionPolicy - ) -> PolicyBuilder: ... - def build_client_verifier(self) -> ClientVerifier: ... - def build_server_verifier( - self, subject: x509.verification.Subject - ) -> ServerVerifier: ... - -class Policy: - @property - def max_chain_depth(self) -> int: ... - @property - def subject(self) -> x509.verification.Subject | None: ... - @property - def validation_time(self) -> datetime.datetime: ... - @property - def extended_key_usage(self) -> x509.ObjectIdentifier: ... - @property - def minimum_rsa_modulus(self) -> int: ... - -class Criticality: - CRITICAL: Criticality - AGNOSTIC: Criticality - NON_CRITICAL: Criticality - -T = typing.TypeVar("T", contravariant=True, bound=x509.ExtensionType) - -MaybeExtensionValidatorCallback = typing.Callable[ - [ - Policy, - x509.Certificate, - T | None, - ], - None, -] - -PresentExtensionValidatorCallback = typing.Callable[ - [Policy, x509.Certificate, T], - None, -] - -class ExtensionPolicy: - @staticmethod - def permit_all() -> ExtensionPolicy: ... - @staticmethod - def webpki_defaults_ca() -> ExtensionPolicy: ... - @staticmethod - def webpki_defaults_ee() -> ExtensionPolicy: ... - def require_not_present( - self, extension_type: type[x509.ExtensionType] - ) -> ExtensionPolicy: ... - def may_be_present( - self, - extension_type: type[T], - criticality: Criticality, - validator: MaybeExtensionValidatorCallback[T] | None, - ) -> ExtensionPolicy: ... - def require_present( - self, - extension_type: type[T], - criticality: Criticality, - validator: PresentExtensionValidatorCallback[T] | None, - ) -> ExtensionPolicy: ... - -class VerifiedClient: - @property - def subjects(self) -> list[x509.GeneralName] | None: ... - @property - def chain(self) -> list[x509.Certificate]: ... - -class ClientVerifier: - @property - def policy(self) -> Policy: ... - @property - def store(self) -> Store: ... - def verify( - self, - leaf: x509.Certificate, - intermediates: list[x509.Certificate], - ) -> VerifiedClient: ... - -class ServerVerifier: - @property - def policy(self) -> Policy: ... - @property - def store(self) -> Store: ... - def verify( - self, - leaf: x509.Certificate, - intermediates: list[x509.Certificate], - ) -> list[x509.Certificate]: ... - -class Store: - def __init__(self, certs: list[x509.Certificate]) -> None: ... - -class VerificationError(Exception): ... diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index fa59cc7..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc deleted file mode 100644 index 20c491e..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc deleted file mode 100644 index 5542a32..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py deleted file mode 100644 index 063bcf5..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py +++ /dev/null @@ -1,207 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - - -def cryptography_has_set_cert_cb() -> list[str]: - return [ - "SSL_CTX_set_cert_cb", - "SSL_set_cert_cb", - ] - - -def cryptography_has_ssl_st() -> list[str]: - return [ - "SSL_ST_BEFORE", - "SSL_ST_OK", - "SSL_ST_INIT", - "SSL_ST_RENEGOTIATE", - ] - - -def cryptography_has_tls_st() -> list[str]: - return [ - "TLS_ST_BEFORE", - "TLS_ST_OK", - ] - - -def cryptography_has_ssl_sigalgs() -> list[str]: - return [ - "SSL_CTX_set1_sigalgs_list", - ] - - -def cryptography_has_psk() -> list[str]: - return [ - "SSL_CTX_use_psk_identity_hint", - "SSL_CTX_set_psk_server_callback", - "SSL_CTX_set_psk_client_callback", - ] - - -def cryptography_has_psk_tlsv13() -> list[str]: - return [ - "SSL_CTX_set_psk_find_session_callback", - "SSL_CTX_set_psk_use_session_callback", - "Cryptography_SSL_SESSION_new", - "SSL_CIPHER_find", - "SSL_SESSION_set1_master_key", - "SSL_SESSION_set_cipher", - "SSL_SESSION_set_protocol_version", - ] - - -def cryptography_has_custom_ext() -> list[str]: - return [ - "SSL_CTX_add_client_custom_ext", - "SSL_CTX_add_server_custom_ext", - "SSL_extension_supported", - ] - - -def cryptography_has_tlsv13_functions() -> list[str]: - return [ - "SSL_CTX_set_ciphersuites", - ] - - -def cryptography_has_tlsv13_hs_functions() -> list[str]: - return [ - "SSL_VERIFY_POST_HANDSHAKE", - "SSL_verify_client_post_handshake", - "SSL_CTX_set_post_handshake_auth", - "SSL_set_post_handshake_auth", - "SSL_SESSION_get_max_early_data", - "SSL_write_early_data", - "SSL_read_early_data", - "SSL_CTX_set_max_early_data", - ] - - -def cryptography_has_ssl_verify_client_post_handshake() -> list[str]: - return [ - "SSL_verify_client_post_handshake", - ] - - -def cryptography_has_engine() -> list[str]: - return [ - "ENGINE_by_id", - "ENGINE_init", - "ENGINE_finish", - "ENGINE_get_default_RAND", - "ENGINE_set_default_RAND", - "ENGINE_unregister_RAND", - "ENGINE_ctrl_cmd", - "ENGINE_free", - "ENGINE_get_name", - "ENGINE_ctrl_cmd_string", - "ENGINE_load_builtin_engines", - "ENGINE_load_private_key", - "ENGINE_load_public_key", - "SSL_CTX_set_client_cert_engine", - ] - - -def cryptography_has_verified_chain() -> list[str]: - return [ - "SSL_get0_verified_chain", - ] - - -def cryptography_has_srtp() -> list[str]: - return [ - "SSL_CTX_set_tlsext_use_srtp", - "SSL_set_tlsext_use_srtp", - "SSL_get_selected_srtp_profile", - ] - - -def cryptography_has_op_no_renegotiation() -> list[str]: - return [ - "SSL_OP_NO_RENEGOTIATION", - ] - - -def cryptography_has_dtls_get_data_mtu() -> list[str]: - return [ - "DTLS_get_data_mtu", - ] - - -def cryptography_has_ssl_cookie() -> list[str]: - return [ - "SSL_OP_COOKIE_EXCHANGE", - "DTLSv1_listen", - "SSL_CTX_set_cookie_generate_cb", - "SSL_CTX_set_cookie_verify_cb", - ] - - -def cryptography_has_prime_checks() -> list[str]: - return [ - "BN_prime_checks_for_size", - ] - - -def cryptography_has_unexpected_eof_while_reading() -> list[str]: - return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] - - -def cryptography_has_ssl_op_ignore_unexpected_eof() -> list[str]: - return [ - "SSL_OP_IGNORE_UNEXPECTED_EOF", - ] - - -def cryptography_has_get_extms_support() -> list[str]: - return ["SSL_get_extms_support"] - - -def cryptography_has_ssl_get0_group_name() -> list[str]: - return ["SSL_get0_group_name"] - - -# This is a mapping of -# {condition: function-returning-names-dependent-on-that-condition} so we can -# loop over them and delete unsupported names at runtime. It will be removed -# when cffi supports #if in cdef. We use functions instead of just a dict of -# lists so we can use coverage to measure which are used. -CONDITIONAL_NAMES = { - "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, - "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, - "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, - "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, - "Cryptography_HAS_PSK": cryptography_has_psk, - "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, - "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, - "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, - "Cryptography_HAS_TLSv1_3_HS_FUNCTIONS": ( - cryptography_has_tlsv13_hs_functions - ), - "Cryptography_HAS_SSL_VERIFY_CLIENT_POST_HANDSHAKE": ( - cryptography_has_ssl_verify_client_post_handshake - ), - "Cryptography_HAS_ENGINE": cryptography_has_engine, - "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, - "Cryptography_HAS_SRTP": cryptography_has_srtp, - "Cryptography_HAS_OP_NO_RENEGOTIATION": ( - cryptography_has_op_no_renegotiation - ), - "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, - "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, - "Cryptography_HAS_PRIME_CHECKS": cryptography_has_prime_checks, - "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( - cryptography_has_unexpected_eof_while_reading - ), - "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": ( - cryptography_has_ssl_op_ignore_unexpected_eof - ), - "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support, - "Cryptography_HAS_SSL_GET0_GROUP_NAME": ( - cryptography_has_ssl_get0_group_name - ), -} diff --git a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py deleted file mode 100644 index 4494c71..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py +++ /dev/null @@ -1,137 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import os -import sys -import threading -import types -import typing -import warnings -from collections.abc import Callable - -import cryptography -from cryptography.exceptions import InternalError -from cryptography.hazmat.bindings._rust import _openssl, openssl -from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES -from cryptography.utils import CryptographyDeprecationWarning - - -def _openssl_assert(ok: bool) -> None: - if not ok: - errors = openssl.capture_error_stack() - - raise InternalError( - "Unknown OpenSSL error. This error is commonly encountered when " - "another library is not cleaning up the OpenSSL error stack. If " - "you are using cryptography with another library that uses " - "OpenSSL try disabling it before reporting a bug. Otherwise " - "please file an issue at https://github.com/pyca/cryptography/" - "issues with information on how to reproduce " - f"this. ({errors!r})", - errors, - ) - - -def build_conditional_library( - lib: typing.Any, - conditional_names: dict[str, Callable[[], list[str]]], -) -> typing.Any: - conditional_lib = types.ModuleType("lib") - conditional_lib._original_lib = lib # type: ignore[attr-defined] - excluded_names = set() - for condition, names_cb in conditional_names.items(): - if not getattr(lib, condition): - excluded_names.update(names_cb()) - - for attr in dir(lib): - if attr not in excluded_names: - setattr(conditional_lib, attr, getattr(lib, attr)) - - return conditional_lib - - -class Binding: - """ - OpenSSL API wrapper. - """ - - lib: typing.ClassVar[typing.Any] = None - ffi = _openssl.ffi - _lib_loaded = False - _init_lock = threading.Lock() - - def __init__(self) -> None: - self._ensure_ffi_initialized() - - @classmethod - def _ensure_ffi_initialized(cls) -> None: - with cls._init_lock: - if not cls._lib_loaded: - cls.lib = build_conditional_library( - _openssl.lib, CONDITIONAL_NAMES - ) - cls._lib_loaded = True - - @classmethod - def init_static_locks(cls) -> None: - cls._ensure_ffi_initialized() - - -def _verify_package_version(version: str) -> None: - # Occasionally we run into situations where the version of the Python - # package does not match the version of the shared object that is loaded. - # This may occur in environments where multiple versions of cryptography - # are installed and available in the python path. To avoid errors cropping - # up later this code checks that the currently imported package and the - # shared object that were loaded have the same version and raise an - # ImportError if they do not - so_package_version = _openssl.ffi.string( - _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION - ) - if version.encode("ascii") != so_package_version: - raise ImportError( - "The version of cryptography does not match the loaded " - "shared object. This can happen if you have multiple copies of " - "cryptography installed in your Python path. Please try creating " - "a new virtual environment to resolve this issue. " - f"Loaded python version: {version}, " - f"shared object version: {so_package_version}" - ) - - _openssl_assert( - _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(), - ) - - -_verify_package_version(cryptography.__version__) - -Binding.init_static_locks() - -if ( - sys.platform == "win32" - and os.environ.get("PROCESSOR_ARCHITEW6432") is not None -): - warnings.warn( - "You are using cryptography on a 32-bit Python on a 64-bit Windows " - "Operating System. Cryptography will be significantly faster if you " - "switch to using a 64-bit Python.", - UserWarning, - stacklevel=2, - ) - -if ( - not openssl.CRYPTOGRAPHY_IS_LIBRESSL - and not openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not openssl.CRYPTOGRAPHY_IS_AWSLC - and not openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER -): - warnings.warn( - "You are using OpenSSL < 3.0. Support for OpenSSL < 3.0 is deprecated " - "and will be removed in the next release. Please upgrade to OpenSSL " - "3.0 or later.", - CryptographyDeprecationWarning, - stacklevel=2, - ) diff --git a/venv/Lib/site-packages/cryptography/hazmat/decrepit/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/decrepit/__init__.py deleted file mode 100644 index 41d7318..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/decrepit/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations diff --git a/venv/Lib/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index a6133c8..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py deleted file mode 100644 index 41d7318..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations diff --git a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index cdcfb4c..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc deleted file mode 100644 index cee1f27..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py b/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py deleted file mode 100644 index 072a991..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py +++ /dev/null @@ -1,112 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, - _verify_key_size, -) - - -class ARC4(CipherAlgorithm): - name = "RC4" - key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class TripleDES(BlockCipherAlgorithm): - name = "3DES" - block_size = 64 - key_sizes = frozenset([64, 128, 192]) - - def __init__(self, key: bytes): - if len(key) == 8: - key += key + key - elif len(key) == 16: - key += key[:8] - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -# Not actually supported, marker for tests -class _DES: - key_size = 64 - - -class Blowfish(BlockCipherAlgorithm): - name = "Blowfish" - block_size = 64 - key_sizes = frozenset(range(32, 449, 8)) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class CAST5(BlockCipherAlgorithm): - name = "CAST5" - block_size = 64 - key_sizes = frozenset(range(40, 129, 8)) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class SEED(BlockCipherAlgorithm): - name = "SEED" - block_size = 128 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class IDEA(BlockCipherAlgorithm): - name = "IDEA" - block_size = 64 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -# This class only allows RC2 with a 128-bit key. No support for -# effective key bits or other key sizes is provided. -class RC2(BlockCipherAlgorithm): - name = "RC2" - block_size = 64 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 277f903..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc deleted file mode 100644 index 5633505..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc deleted file mode 100644 index 11dc2c4..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc deleted file mode 100644 index efaf465..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc deleted file mode 100644 index e342eb4..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc deleted file mode 100644 index 7177289..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc deleted file mode 100644 index 2f8b991..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc deleted file mode 100644 index e83e0dd..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc deleted file mode 100644 index 7a8b133..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc deleted file mode 100644 index 01fecc4..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc deleted file mode 100644 index 0b97348..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/_asymmetric.py deleted file mode 100644 index ea55ffd..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/_asymmetric.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -# This exists to break an import cycle. It is normally accessible from the -# asymmetric padding module. - - -class AsymmetricPadding(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this padding (e.g. "PSS", "PKCS1"). - """ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py deleted file mode 100644 index 305a9fd..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py +++ /dev/null @@ -1,60 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils - -# This exists to break an import cycle. It is normally accessible from the -# ciphers module. - - -class CipherAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this mode (e.g. "AES", "Camellia"). - """ - - @property - @abc.abstractmethod - def key_sizes(self) -> frozenset[int]: - """ - Valid key sizes for this algorithm in bits - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The size of the key being used as an integer in bits (e.g. 128, 256). - """ - - -class BlockCipherAlgorithm(CipherAlgorithm): - key: utils.Buffer - - @property - @abc.abstractmethod - def block_size(self) -> int: - """ - The size of a block as an integer in bits (e.g. 64, 128). - """ - - -def _verify_key_size( - algorithm: CipherAlgorithm, key: utils.Buffer -) -> utils.Buffer: - # Verify that the key is instance of bytes - utils._check_byteslike("key", key) - - # Verify that the key size matches the expected key size - if len(key) * 8 not in algorithm.key_sizes: - raise ValueError( - f"Invalid key size ({len(key) * 8}) for {algorithm.name}." - ) - return key diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/_serialization.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/_serialization.py deleted file mode 100644 index e998865..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/_serialization.py +++ /dev/null @@ -1,168 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.hazmat.primitives.hashes import HashAlgorithm - -# This exists to break an import cycle. These classes are normally accessible -# from the serialization module. - - -class PBES(utils.Enum): - PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES" - PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC" - - -class Encoding(utils.Enum): - PEM = "PEM" - DER = "DER" - OpenSSH = "OpenSSH" - Raw = "Raw" - X962 = "ANSI X9.62" - SMIME = "S/MIME" - - -class PrivateFormat(utils.Enum): - PKCS8 = "PKCS8" - TraditionalOpenSSL = "TraditionalOpenSSL" - Raw = "Raw" - OpenSSH = "OpenSSH" - PKCS12 = "PKCS12" - - def encryption_builder(self) -> KeySerializationEncryptionBuilder: - if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12): - raise ValueError( - "encryption_builder only supported with PrivateFormat.OpenSSH" - " and PrivateFormat.PKCS12" - ) - return KeySerializationEncryptionBuilder(self) - - -class PublicFormat(utils.Enum): - SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" - PKCS1 = "Raw PKCS#1" - OpenSSH = "OpenSSH" - Raw = "Raw" - CompressedPoint = "X9.62 Compressed Point" - UncompressedPoint = "X9.62 Uncompressed Point" - - -class ParameterFormat(utils.Enum): - PKCS3 = "PKCS3" - - -class KeySerializationEncryption(metaclass=abc.ABCMeta): - pass - - -class BestAvailableEncryption(KeySerializationEncryption): - def __init__(self, password: bytes): - if not isinstance(password, bytes) or len(password) == 0: - raise ValueError("Password must be 1 or more bytes.") - - self.password = password - - -class NoEncryption(KeySerializationEncryption): - pass - - -class KeySerializationEncryptionBuilder: - def __init__( - self, - format: PrivateFormat, - *, - _kdf_rounds: int | None = None, - _hmac_hash: HashAlgorithm | None = None, - _key_cert_algorithm: PBES | None = None, - ) -> None: - self._format = format - - self._kdf_rounds = _kdf_rounds - self._hmac_hash = _hmac_hash - self._key_cert_algorithm = _key_cert_algorithm - - def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder: - if self._kdf_rounds is not None: - raise ValueError("kdf_rounds already set") - - if not isinstance(rounds, int): - raise TypeError("kdf_rounds must be an integer") - - if rounds < 1: - raise ValueError("kdf_rounds must be a positive integer") - - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=rounds, - _hmac_hash=self._hmac_hash, - _key_cert_algorithm=self._key_cert_algorithm, - ) - - def hmac_hash( - self, algorithm: HashAlgorithm - ) -> KeySerializationEncryptionBuilder: - if self._format is not PrivateFormat.PKCS12: - raise TypeError( - "hmac_hash only supported with PrivateFormat.PKCS12" - ) - - if self._hmac_hash is not None: - raise ValueError("hmac_hash already set") - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=self._kdf_rounds, - _hmac_hash=algorithm, - _key_cert_algorithm=self._key_cert_algorithm, - ) - - def key_cert_algorithm( - self, algorithm: PBES - ) -> KeySerializationEncryptionBuilder: - if self._format is not PrivateFormat.PKCS12: - raise TypeError( - "key_cert_algorithm only supported with PrivateFormat.PKCS12" - ) - if self._key_cert_algorithm is not None: - raise ValueError("key_cert_algorithm already set") - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=self._kdf_rounds, - _hmac_hash=self._hmac_hash, - _key_cert_algorithm=algorithm, - ) - - def build(self, password: bytes) -> KeySerializationEncryption: - if not isinstance(password, bytes) or len(password) == 0: - raise ValueError("Password must be 1 or more bytes.") - - return _KeySerializationEncryption( - self._format, - password, - kdf_rounds=self._kdf_rounds, - hmac_hash=self._hmac_hash, - key_cert_algorithm=self._key_cert_algorithm, - ) - - -class _KeySerializationEncryption(KeySerializationEncryption): - def __init__( - self, - format: PrivateFormat, - password: bytes, - *, - kdf_rounds: int | None, - hmac_hash: HashAlgorithm | None, - key_cert_algorithm: PBES | None, - ): - self._format = format - self.password = password - - self._kdf_rounds = kdf_rounds - self._hmac_hash = hmac_hash - self._key_cert_algorithm = key_cert_algorithm diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 5ea274d..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc deleted file mode 100644 index 2bc5520..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc deleted file mode 100644 index 606fe9b..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc deleted file mode 100644 index 4250305..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc deleted file mode 100644 index 43877c9..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc deleted file mode 100644 index 86a3780..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc deleted file mode 100644 index fa7ddcf..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc deleted file mode 100644 index 86d785c..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc deleted file mode 100644 index 8cf1fb5..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index c5deb33..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc deleted file mode 100644 index 2052bc8..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc deleted file mode 100644 index 7f87c0e..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py deleted file mode 100644 index 1822e99..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py +++ /dev/null @@ -1,147 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization - -generate_parameters = rust_openssl.dh.generate_parameters - - -DHPrivateNumbers = rust_openssl.dh.DHPrivateNumbers -DHPublicNumbers = rust_openssl.dh.DHPublicNumbers -DHParameterNumbers = rust_openssl.dh.DHParameterNumbers - - -class DHParameters(metaclass=abc.ABCMeta): - @abc.abstractmethod - def generate_private_key(self) -> DHPrivateKey: - """ - Generates and returns a DHPrivateKey. - """ - - @abc.abstractmethod - def parameter_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.ParameterFormat, - ) -> bytes: - """ - Returns the parameters serialized as bytes. - """ - - @abc.abstractmethod - def parameter_numbers(self) -> DHParameterNumbers: - """ - Returns a DHParameterNumbers. - """ - - -DHParametersWithSerialization = DHParameters -DHParameters.register(rust_openssl.dh.DHParameters) - - -class DHPublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def parameters(self) -> DHParameters: - """ - The DHParameters object associated with this public key. - """ - - @abc.abstractmethod - def public_numbers(self) -> DHPublicNumbers: - """ - Returns a DHPublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> DHPublicKey: - """ - Returns a copy. - """ - - -DHPublicKeyWithSerialization = DHPublicKey -DHPublicKey.register(rust_openssl.dh.DHPublicKey) - - -class DHPrivateKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def public_key(self) -> DHPublicKey: - """ - The DHPublicKey associated with this private key. - """ - - @abc.abstractmethod - def parameters(self) -> DHParameters: - """ - The DHParameters object associated with this private key. - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: DHPublicKey) -> bytes: - """ - Given peer's DHPublicKey, carry out the key exchange and - return shared key as bytes. - """ - - @abc.abstractmethod - def private_numbers(self) -> DHPrivateNumbers: - """ - Returns a DHPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> DHPrivateKey: - """ - Returns a copy. - """ - - -DHPrivateKeyWithSerialization = DHPrivateKey -DHPrivateKey.register(rust_openssl.dh.DHPrivateKey) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py deleted file mode 100644 index 21d78ba..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py +++ /dev/null @@ -1,167 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.utils import Buffer - - -class DSAParameters(metaclass=abc.ABCMeta): - @abc.abstractmethod - def generate_private_key(self) -> DSAPrivateKey: - """ - Generates and returns a DSAPrivateKey. - """ - - @abc.abstractmethod - def parameter_numbers(self) -> DSAParameterNumbers: - """ - Returns a DSAParameterNumbers. - """ - - -DSAParametersWithNumbers = DSAParameters -DSAParameters.register(rust_openssl.dsa.DSAParameters) - - -class DSAPrivateKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def public_key(self) -> DSAPublicKey: - """ - The DSAPublicKey associated with this private key. - """ - - @abc.abstractmethod - def parameters(self) -> DSAParameters: - """ - The DSAParameters object associated with this private key. - """ - - @abc.abstractmethod - def sign( - self, - data: Buffer, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> bytes: - """ - Signs the data - """ - - @abc.abstractmethod - def private_numbers(self) -> DSAPrivateNumbers: - """ - Returns a DSAPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> DSAPrivateKey: - """ - Returns a copy. - """ - - -DSAPrivateKeyWithSerialization = DSAPrivateKey -DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey) - - -class DSAPublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def parameters(self) -> DSAParameters: - """ - The DSAParameters object associated with this public key. - """ - - @abc.abstractmethod - def public_numbers(self) -> DSAPublicNumbers: - """ - Returns a DSAPublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: Buffer, - data: Buffer, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> DSAPublicKey: - """ - Returns a copy. - """ - - -DSAPublicKeyWithSerialization = DSAPublicKey -DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey) - -DSAPrivateNumbers = rust_openssl.dsa.DSAPrivateNumbers -DSAPublicNumbers = rust_openssl.dsa.DSAPublicNumbers -DSAParameterNumbers = rust_openssl.dsa.DSAParameterNumbers - - -def generate_parameters( - key_size: int, backend: typing.Any = None -) -> DSAParameters: - if key_size not in (1024, 2048, 3072, 4096): - raise ValueError("Key size must be 1024, 2048, 3072, or 4096 bits.") - - return rust_openssl.dsa.generate_parameters(key_size) - - -def generate_private_key( - key_size: int, backend: typing.Any = None -) -> DSAPrivateKey: - parameters = generate_parameters(key_size) - return parameters.generate_private_key() diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py deleted file mode 100644 index a13d982..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py +++ /dev/null @@ -1,447 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat._oid import ObjectIdentifier -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils - - -class EllipticCurveOID: - SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") - SECP224R1 = ObjectIdentifier("1.3.132.0.33") - SECP256K1 = ObjectIdentifier("1.3.132.0.10") - SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") - SECP384R1 = ObjectIdentifier("1.3.132.0.34") - SECP521R1 = ObjectIdentifier("1.3.132.0.35") - BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") - BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") - BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") - SECT163K1 = ObjectIdentifier("1.3.132.0.1") - SECT163R2 = ObjectIdentifier("1.3.132.0.15") - SECT233K1 = ObjectIdentifier("1.3.132.0.26") - SECT233R1 = ObjectIdentifier("1.3.132.0.27") - SECT283K1 = ObjectIdentifier("1.3.132.0.16") - SECT283R1 = ObjectIdentifier("1.3.132.0.17") - SECT409K1 = ObjectIdentifier("1.3.132.0.36") - SECT409R1 = ObjectIdentifier("1.3.132.0.37") - SECT571K1 = ObjectIdentifier("1.3.132.0.38") - SECT571R1 = ObjectIdentifier("1.3.132.0.39") - - -class EllipticCurve(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - The name of the curve. e.g. secp256r1. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @property - @abc.abstractmethod - def group_order(self) -> int: - """ - The order of the curve's group. - """ - - -class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def algorithm( - self, - ) -> asym_utils.Prehashed | hashes.HashAlgorithm: - """ - The digest algorithm used with this signature. - """ - - -class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def exchange( - self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey - ) -> bytes: - """ - Performs a key exchange operation using the provided algorithm with the - provided peer's public key. - """ - - @abc.abstractmethod - def public_key(self) -> EllipticCurvePublicKey: - """ - The EllipticCurvePublicKey for this private key. - """ - - @property - @abc.abstractmethod - def curve(self) -> EllipticCurve: - """ - The EllipticCurve that this key is on. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @abc.abstractmethod - def sign( - self, - data: utils.Buffer, - signature_algorithm: EllipticCurveSignatureAlgorithm, - ) -> bytes: - """ - Signs the data - """ - - @abc.abstractmethod - def private_numbers(self) -> EllipticCurvePrivateNumbers: - """ - Returns an EllipticCurvePrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> EllipticCurvePrivateKey: - """ - Returns a copy. - """ - - -EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey -EllipticCurvePrivateKey.register(rust_openssl.ec.ECPrivateKey) - - -class EllipticCurvePublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def curve(self) -> EllipticCurve: - """ - The EllipticCurve that this key is on. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @abc.abstractmethod - def public_numbers(self) -> EllipticCurvePublicNumbers: - """ - Returns an EllipticCurvePublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: utils.Buffer, - data: utils.Buffer, - signature_algorithm: EllipticCurveSignatureAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @classmethod - def from_encoded_point( - cls, curve: EllipticCurve, data: bytes - ) -> EllipticCurvePublicKey: - utils._check_bytes("data", data) - - if len(data) == 0: - raise ValueError("data must not be an empty byte string") - - if data[0] not in [0x02, 0x03, 0x04]: - raise ValueError("Unsupported elliptic curve point type") - - return rust_openssl.ec.from_public_bytes(curve, data) - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> EllipticCurvePublicKey: - """ - Returns a copy. - """ - - -EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey -EllipticCurvePublicKey.register(rust_openssl.ec.ECPublicKey) - -EllipticCurvePrivateNumbers = rust_openssl.ec.EllipticCurvePrivateNumbers -EllipticCurvePublicNumbers = rust_openssl.ec.EllipticCurvePublicNumbers - - -class SECT571R1(EllipticCurve): - name = "sect571r1" - key_size = 570 - group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE661CE18FF55987308059B186823851EC7DD9CA1161DE93D5174D66E8382E9BB2FE84E47 # noqa: E501 - - -class SECT409R1(EllipticCurve): - name = "sect409r1" - key_size = 409 - group_order = 0x10000000000000000000000000000000000000000000000000001E2AAD6A612F33307BE5FA47C3C9E052F838164CD37D9A21173 # noqa: E501 - - -class SECT283R1(EllipticCurve): - name = "sect283r1" - key_size = 283 - group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF90399660FC938A90165B042A7CEFADB307 # noqa: E501 - - -class SECT233R1(EllipticCurve): - name = "sect233r1" - key_size = 233 - group_order = 0x1000000000000000000000000000013E974E72F8A6922031D2603CFE0D7 - - -class SECT163R2(EllipticCurve): - name = "sect163r2" - key_size = 163 - group_order = 0x40000000000000000000292FE77E70C12A4234C33 - - -class SECT571K1(EllipticCurve): - name = "sect571k1" - key_size = 571 - group_order = 0x20000000000000000000000000000000000000000000000000000000000000000000000131850E1F19A63E4B391A8DB917F4138B630D84BE5D639381E91DEB45CFE778F637C1001 # noqa: E501 - - -class SECT409K1(EllipticCurve): - name = "sect409k1" - key_size = 409 - group_order = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE5F83B2D4EA20400EC4557D5ED3E3E7CA5B4B5C83B8E01E5FCF # noqa: E501 - - -class SECT283K1(EllipticCurve): - name = "sect283k1" - key_size = 283 - group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE9AE2ED07577265DFF7F94451E061E163C61 # noqa: E501 - - -class SECT233K1(EllipticCurve): - name = "sect233k1" - key_size = 233 - group_order = 0x8000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF - - -class SECT163K1(EllipticCurve): - name = "sect163k1" - key_size = 163 - group_order = 0x4000000000000000000020108A2E0CC0D99F8A5EF - - -class SECP521R1(EllipticCurve): - name = "secp521r1" - key_size = 521 - group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409 # noqa: E501 - - -class SECP384R1(EllipticCurve): - name = "secp384r1" - key_size = 384 - group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973 # noqa: E501 - - -class SECP256R1(EllipticCurve): - name = "secp256r1" - key_size = 256 - group_order = ( - 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551 - ) - - -class SECP256K1(EllipticCurve): - name = "secp256k1" - key_size = 256 - group_order = ( - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 - ) - - -class SECP224R1(EllipticCurve): - name = "secp224r1" - key_size = 224 - group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D - - -class SECP192R1(EllipticCurve): - name = "secp192r1" - key_size = 192 - group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831 - - -class BrainpoolP256R1(EllipticCurve): - name = "brainpoolP256r1" - key_size = 256 - group_order = ( - 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 - ) - - -class BrainpoolP384R1(EllipticCurve): - name = "brainpoolP384r1" - key_size = 384 - group_order = 0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7CF3AB6AF6B7FC3103B883202E9046565 # noqa: E501 - - -class BrainpoolP512R1(EllipticCurve): - name = "brainpoolP512r1" - key_size = 512 - group_order = 0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069 # noqa: E501 - - -_CURVE_TYPES: dict[str, EllipticCurve] = { - "prime192v1": SECP192R1(), - "prime256v1": SECP256R1(), - "secp192r1": SECP192R1(), - "secp224r1": SECP224R1(), - "secp256r1": SECP256R1(), - "secp384r1": SECP384R1(), - "secp521r1": SECP521R1(), - "secp256k1": SECP256K1(), - "sect163k1": SECT163K1(), - "sect233k1": SECT233K1(), - "sect283k1": SECT283K1(), - "sect409k1": SECT409K1(), - "sect571k1": SECT571K1(), - "sect163r2": SECT163R2(), - "sect233r1": SECT233R1(), - "sect283r1": SECT283R1(), - "sect409r1": SECT409R1(), - "sect571r1": SECT571R1(), - "brainpoolP256r1": BrainpoolP256R1(), - "brainpoolP384r1": BrainpoolP384R1(), - "brainpoolP512r1": BrainpoolP512R1(), -} - - -class ECDSA(EllipticCurveSignatureAlgorithm): - def __init__( - self, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - deterministic_signing: bool = False, - ): - from cryptography.hazmat.backends.openssl.backend import backend - - if ( - deterministic_signing - and not backend.ecdsa_deterministic_supported() - ): - raise UnsupportedAlgorithm( - "ECDSA with deterministic signature (RFC 6979) is not " - "supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - self._algorithm = algorithm - self._deterministic_signing = deterministic_signing - - @property - def algorithm( - self, - ) -> asym_utils.Prehashed | hashes.HashAlgorithm: - return self._algorithm - - @property - def deterministic_signing( - self, - ) -> bool: - return self._deterministic_signing - - -generate_private_key = rust_openssl.ec.generate_private_key - - -def derive_private_key( - private_value: int, - curve: EllipticCurve, - backend: typing.Any = None, -) -> EllipticCurvePrivateKey: - if not isinstance(private_value, int): - raise TypeError("private_value must be an integer type.") - - if private_value <= 0: - raise ValueError("private_value must be a positive integer.") - - return rust_openssl.ec.derive_private_key(private_value, curve) - - -class ECDH: - pass - - -_OID_TO_CURVE = { - EllipticCurveOID.SECP192R1: SECP192R1, - EllipticCurveOID.SECP224R1: SECP224R1, - EllipticCurveOID.SECP256K1: SECP256K1, - EllipticCurveOID.SECP256R1: SECP256R1, - EllipticCurveOID.SECP384R1: SECP384R1, - EllipticCurveOID.SECP521R1: SECP521R1, - EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, - EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, - EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, - EllipticCurveOID.SECT163K1: SECT163K1, - EllipticCurveOID.SECT163R2: SECT163R2, - EllipticCurveOID.SECT233K1: SECT233K1, - EllipticCurveOID.SECT233R1: SECT233R1, - EllipticCurveOID.SECT283K1: SECT283K1, - EllipticCurveOID.SECT283R1: SECT283R1, - EllipticCurveOID.SECT409K1: SECT409K1, - EllipticCurveOID.SECT409R1: SECT409R1, - EllipticCurveOID.SECT571K1: SECT571K1, - EllipticCurveOID.SECT571R1: SECT571R1, -} - - -def get_curve_for_oid(oid: ObjectIdentifier) -> type[EllipticCurve]: - try: - return _OID_TO_CURVE[oid] - except KeyError: - raise LookupError( - "The provided object identifier has no matching elliptic " - "curve class" - ) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py deleted file mode 100644 index e576dc9..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py +++ /dev/null @@ -1,129 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class Ed25519PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def verify(self, signature: Buffer, data: Buffer) -> None: - """ - Verify the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> Ed25519PublicKey: - """ - Returns a copy. - """ - - -Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey) - - -class Ed25519PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> Ed25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> Ed25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> Ed25519PublicKey: - """ - The Ed25519PublicKey derived from the private key. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def sign(self, data: Buffer) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def __copy__(self) -> Ed25519PrivateKey: - """ - Returns a copy. - """ - - -Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py deleted file mode 100644 index 89db209..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py +++ /dev/null @@ -1,131 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class Ed448PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def verify(self, signature: Buffer, data: Buffer) -> None: - """ - Verify the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> Ed448PublicKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "ed448"): - Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey) - - -class Ed448PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> Ed448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> Ed448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> Ed448PublicKey: - """ - The Ed448PublicKey derived from the private key. - """ - - @abc.abstractmethod - def sign(self, data: Buffer) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def __copy__(self) -> Ed448PrivateKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "x448"): - Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py deleted file mode 100644 index 5121a28..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py +++ /dev/null @@ -1,111 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives._asymmetric import ( - AsymmetricPadding as AsymmetricPadding, -) -from cryptography.hazmat.primitives.asymmetric import rsa - - -class PKCS1v15(AsymmetricPadding): - name = "EMSA-PKCS1-v1_5" - - -class _MaxLength: - "Sentinel value for `MAX_LENGTH`." - - -class _Auto: - "Sentinel value for `AUTO`." - - -class _DigestLength: - "Sentinel value for `DIGEST_LENGTH`." - - -class PSS(AsymmetricPadding): - MAX_LENGTH = _MaxLength() - AUTO = _Auto() - DIGEST_LENGTH = _DigestLength() - name = "EMSA-PSS" - _salt_length: int | _MaxLength | _Auto | _DigestLength - - def __init__( - self, - mgf: MGF, - salt_length: int | _MaxLength | _Auto | _DigestLength, - ) -> None: - self._mgf = mgf - - if not isinstance( - salt_length, (int, _MaxLength, _Auto, _DigestLength) - ): - raise TypeError( - "salt_length must be an integer, MAX_LENGTH, " - "DIGEST_LENGTH, or AUTO" - ) - - if isinstance(salt_length, int) and salt_length < 0: - raise ValueError("salt_length must be zero or greater.") - - self._salt_length = salt_length - - @property - def mgf(self) -> MGF: - return self._mgf - - -class OAEP(AsymmetricPadding): - name = "EME-OAEP" - - def __init__( - self, - mgf: MGF, - algorithm: hashes.HashAlgorithm, - label: bytes | None, - ): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of hashes.HashAlgorithm.") - - self._mgf = mgf - self._algorithm = algorithm - self._label = label - - @property - def algorithm(self) -> hashes.HashAlgorithm: - return self._algorithm - - @property - def mgf(self) -> MGF: - return self._mgf - - -class MGF(metaclass=abc.ABCMeta): - _algorithm: hashes.HashAlgorithm - - -class MGF1(MGF): - def __init__(self, algorithm: hashes.HashAlgorithm): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of hashes.HashAlgorithm.") - - self._algorithm = algorithm - - -def calculate_max_pss_salt_length( - key: rsa.RSAPrivateKey | rsa.RSAPublicKey, - hash_algorithm: hashes.HashAlgorithm, -) -> int: - if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): - raise TypeError("key must be an RSA public or private key") - # bit length - 1 per RFC 3447 - emlen = (key.key_size + 6) // 8 - salt_length = emlen - hash_algorithm.digest_size - 2 - assert salt_length >= 0 - return salt_length diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py deleted file mode 100644 index f94812e..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py +++ /dev/null @@ -1,285 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import random -import typing -from math import gcd - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils - - -class RSAPrivateKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: - """ - Decrypts the provided ciphertext. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the public modulus. - """ - - @abc.abstractmethod - def public_key(self) -> RSAPublicKey: - """ - The RSAPublicKey associated with this private key. - """ - - @abc.abstractmethod - def sign( - self, - data: bytes, - padding: AsymmetricPadding, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def private_numbers(self) -> RSAPrivateNumbers: - """ - Returns an RSAPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> RSAPrivateKey: - """ - Returns a copy. - """ - - -RSAPrivateKeyWithSerialization = RSAPrivateKey -RSAPrivateKey.register(rust_openssl.rsa.RSAPrivateKey) - - -class RSAPublicKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: - """ - Encrypts the given plaintext. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the public modulus. - """ - - @abc.abstractmethod - def public_numbers(self) -> RSAPublicNumbers: - """ - Returns an RSAPublicNumbers - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: bytes, - data: bytes, - padding: AsymmetricPadding, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @abc.abstractmethod - def recover_data_from_signature( - self, - signature: bytes, - padding: AsymmetricPadding, - algorithm: hashes.HashAlgorithm | None, - ) -> bytes: - """ - Recovers the original data from the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> RSAPublicKey: - """ - Returns a copy. - """ - - -RSAPublicKeyWithSerialization = RSAPublicKey -RSAPublicKey.register(rust_openssl.rsa.RSAPublicKey) - -RSAPrivateNumbers = rust_openssl.rsa.RSAPrivateNumbers -RSAPublicNumbers = rust_openssl.rsa.RSAPublicNumbers - - -def generate_private_key( - public_exponent: int, - key_size: int, - backend: typing.Any = None, -) -> RSAPrivateKey: - _verify_rsa_parameters(public_exponent, key_size) - return rust_openssl.rsa.generate_private_key(public_exponent, key_size) - - -def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: - if public_exponent not in (3, 65537): - raise ValueError( - "public_exponent must be either 3 (for legacy compatibility) or " - "65537. Almost everyone should choose 65537 here!" - ) - - if key_size < 1024: - raise ValueError("key_size must be at least 1024-bits.") - - -def _modinv(e: int, m: int) -> int: - """ - Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 - """ - x1, x2 = 1, 0 - a, b = e, m - while b > 0: - q, r = divmod(a, b) - xn = x1 - q * x2 - a, b, x1, x2 = b, r, x2, xn - return x1 % m - - -def rsa_crt_iqmp(p: int, q: int) -> int: - """ - Compute the CRT (q ** -1) % p value from RSA primes p and q. - """ - if p <= 1 or q <= 1: - raise ValueError("Values can't be <= 1") - return _modinv(q, p) - - -def rsa_crt_dmp1(private_exponent: int, p: int) -> int: - """ - Compute the CRT private_exponent % (p - 1) value from the RSA - private_exponent (d) and p. - """ - if private_exponent <= 1 or p <= 1: - raise ValueError("Values can't be <= 1") - return private_exponent % (p - 1) - - -def rsa_crt_dmq1(private_exponent: int, q: int) -> int: - """ - Compute the CRT private_exponent % (q - 1) value from the RSA - private_exponent (d) and q. - """ - if private_exponent <= 1 or q <= 1: - raise ValueError("Values can't be <= 1") - return private_exponent % (q - 1) - - -def rsa_recover_private_exponent(e: int, p: int, q: int) -> int: - """ - Compute the RSA private_exponent (d) given the public exponent (e) - and the RSA primes p and q. - - This uses the Carmichael totient function to generate the - smallest possible working value of the private exponent. - """ - # This lambda_n is the Carmichael totient function. - # The original RSA paper uses the Euler totient function - # here: phi_n = (p - 1) * (q - 1) - # Either version of the private exponent will work, but the - # one generated by the older formulation may be larger - # than necessary. (lambda_n always divides phi_n) - # - # TODO: Replace with lcm(p - 1, q - 1) once the minimum - # supported Python version is >= 3.9. - if e <= 1 or p <= 1 or q <= 1: - raise ValueError("Values can't be <= 1") - lambda_n = (p - 1) * (q - 1) // gcd(p - 1, q - 1) - return _modinv(e, lambda_n) - - -# Controls the number of iterations rsa_recover_prime_factors will perform -# to obtain the prime factors. -_MAX_RECOVERY_ATTEMPTS = 500 - - -def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: - """ - Compute factors p and q from the private exponent d. We assume that n has - no more than two factors. This function is adapted from code in PyCrypto. - """ - # reject invalid values early - if d <= 1 or e <= 1: - raise ValueError("d, e can't be <= 1") - if 17 != pow(17, e * d, n): - raise ValueError("n, d, e don't match") - # See 8.2.2(i) in Handbook of Applied Cryptography. - ktot = d * e - 1 - # The quantity d*e-1 is a multiple of phi(n), even, - # and can be represented as t*2^s. - t = ktot - while t % 2 == 0: - t = t // 2 - # Cycle through all multiplicative inverses in Zn. - # The algorithm is non-deterministic, but there is a 50% chance - # any candidate a leads to successful factoring. - # See "Digitalized Signatures and Public Key Functions as Intractable - # as Factorization", M. Rabin, 1979 - spotted = False - tries = 0 - while not spotted and tries < _MAX_RECOVERY_ATTEMPTS: - a = random.randint(2, n - 1) - tries += 1 - k = t - # Cycle through all values a^{t*2^i}=a^k - while k < ktot: - cand = pow(a, k, n) - # Check if a^k is a non-trivial root of unity (mod n) - if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: - # We have found a number such that (cand-1)(cand+1)=0 (mod n). - # Either of the terms divides n. - p = gcd(cand + 1, n) - spotted = True - break - k *= 2 - if not spotted: - raise ValueError("Unable to compute factors p and q from exponent d.") - # Found ! - q, r = divmod(n, p) - assert r == 0 - p, q = sorted((p, q), reverse=True) - return (p, q) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/types.py deleted file mode 100644 index 1fe4eaf..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/types.py +++ /dev/null @@ -1,111 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.hazmat.primitives.asymmetric import ( - dh, - dsa, - ec, - ed448, - ed25519, - rsa, - x448, - x25519, -) - -# Every asymmetric key type -PublicKeyTypes = typing.Union[ - dh.DHPublicKey, - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, -] -PUBLIC_KEY_TYPES = PublicKeyTypes -utils.deprecated( - PUBLIC_KEY_TYPES, - __name__, - "Use PublicKeyTypes instead", - utils.DeprecatedIn40, - name="PUBLIC_KEY_TYPES", -) -# Every asymmetric key type -PrivateKeyTypes = typing.Union[ - dh.DHPrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - x25519.X25519PrivateKey, - x448.X448PrivateKey, -] -PRIVATE_KEY_TYPES = PrivateKeyTypes -utils.deprecated( - PRIVATE_KEY_TYPES, - __name__, - "Use PrivateKeyTypes instead", - utils.DeprecatedIn40, - name="PRIVATE_KEY_TYPES", -) -# Just the key types we allow to be used for x509 signing. This mirrors -# the certificate public key types -CertificateIssuerPrivateKeyTypes = typing.Union[ - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, -] -CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes -utils.deprecated( - CERTIFICATE_PRIVATE_KEY_TYPES, - __name__, - "Use CertificateIssuerPrivateKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_PRIVATE_KEY_TYPES", -) -# Just the key types we allow to be used for x509 signing. This mirrors -# the certificate private key types -CertificateIssuerPublicKeyTypes = typing.Union[ - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, -] -CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes -utils.deprecated( - CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, - __name__, - "Use CertificateIssuerPublicKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES", -) -# This type removes DHPublicKey. x448/x25519 can be a public key -# but cannot be used in signing so they are allowed here. -CertificatePublicKeyTypes = typing.Union[ - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, -] -CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes -utils.deprecated( - CERTIFICATE_PUBLIC_KEY_TYPES, - __name__, - "Use CertificatePublicKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_PUBLIC_KEY_TYPES", -) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py deleted file mode 100644 index 826b956..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import asn1 -from cryptography.hazmat.primitives import hashes - -decode_dss_signature = asn1.decode_dss_signature -encode_dss_signature = asn1.encode_dss_signature - - -class Prehashed: - def __init__(self, algorithm: hashes.HashAlgorithm): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of HashAlgorithm.") - - self._algorithm = algorithm - self._digest_size = algorithm.digest_size - - @property - def digest_size(self) -> int: - return self._digest_size diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py deleted file mode 100644 index a499376..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py +++ /dev/null @@ -1,122 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class X25519PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> X25519PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x25519.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> X25519PublicKey: - """ - Returns a copy. - """ - - -X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey) - - -class X25519PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> X25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - return rust_openssl.x25519.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> X25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x25519.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> X25519PublicKey: - """ - Returns the public key associated with this private key - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: X25519PublicKey) -> bytes: - """ - Performs a key exchange operation using the provided peer's public key. - """ - - @abc.abstractmethod - def __copy__(self) -> X25519PrivateKey: - """ - Returns a copy. - """ - - -X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py deleted file mode 100644 index c6fd71b..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py +++ /dev/null @@ -1,125 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class X448PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> X448PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> X448PublicKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "x448"): - X448PublicKey.register(rust_openssl.x448.X448PublicKey) - - -class X448PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> X448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> X448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> X448PublicKey: - """ - Returns the public key associated with this private key - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: X448PublicKey) -> bytes: - """ - Performs a key exchange operation using the provided peer's public key. - """ - - @abc.abstractmethod - def __copy__(self) -> X448PrivateKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "x448"): - X448PrivateKey.register(rust_openssl.x448.X448PrivateKey) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py deleted file mode 100644 index 10c15d0..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers.base import ( - AEADCipherContext, - AEADDecryptionContext, - AEADEncryptionContext, - Cipher, - CipherContext, -) - -__all__ = [ - "AEADCipherContext", - "AEADDecryptionContext", - "AEADEncryptionContext", - "BlockCipherAlgorithm", - "Cipher", - "CipherAlgorithm", - "CipherContext", -] diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 3b8faae..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc deleted file mode 100644 index 130c3f7..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc deleted file mode 100644 index 8a810b0..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc deleted file mode 100644 index f2b5cf1..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc deleted file mode 100644 index a1a3a28..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/aead.py deleted file mode 100644 index c8a582d..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/aead.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = [ - "AESCCM", - "AESGCM", - "AESGCMSIV", - "AESOCB3", - "AESSIV", - "ChaCha20Poly1305", -] - -AESGCM = rust_openssl.aead.AESGCM -ChaCha20Poly1305 = rust_openssl.aead.ChaCha20Poly1305 -AESCCM = rust_openssl.aead.AESCCM -AESSIV = rust_openssl.aead.AESSIV -AESOCB3 = rust_openssl.aead.AESOCB3 -AESGCMSIV = rust_openssl.aead.AESGCMSIV diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py deleted file mode 100644 index 1e402c7..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py +++ /dev/null @@ -1,136 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography import utils -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - ARC4 as ARC4, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - CAST5 as CAST5, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - IDEA as IDEA, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - SEED as SEED, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - Blowfish as Blowfish, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - TripleDES as TripleDES, -) -from cryptography.hazmat.primitives._cipheralgorithm import _verify_key_size -from cryptography.hazmat.primitives.ciphers import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) - - -class AES(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - # 512 added to support AES-256-XTS, which uses 512-bit keys - key_sizes = frozenset([128, 192, 256, 512]) - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class AES128(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - key_sizes = frozenset([128]) - key_size = 128 - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - -class AES256(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - key_sizes = frozenset([256]) - key_size = 256 - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - -class Camellia(BlockCipherAlgorithm): - name = "camellia" - block_size = 128 - key_sizes = frozenset([128, 192, 256]) - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -utils.deprecated( - ARC4, - __name__, - "ARC4 has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", - utils.DeprecatedIn43, - name="ARC4", -) - - -utils.deprecated( - TripleDES, - __name__, - "TripleDES has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", - utils.DeprecatedIn43, - name="TripleDES", -) - - -class ChaCha20(CipherAlgorithm): - name = "ChaCha20" - key_sizes = frozenset([256]) - - def __init__(self, key: utils.Buffer, nonce: utils.Buffer): - self.key = _verify_key_size(self, key) - utils._check_byteslike("nonce", nonce) - - if len(nonce) != 16: - raise ValueError("nonce must be 128-bits (16 bytes)") - - self._nonce = nonce - - @property - def nonce(self) -> utils.Buffer: - return self._nonce - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class SM4(BlockCipherAlgorithm): - name = "SM4" - block_size = 128 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py deleted file mode 100644 index 24fceea..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py +++ /dev/null @@ -1,146 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm -from cryptography.hazmat.primitives.ciphers import modes -from cryptography.utils import Buffer - - -class CipherContext(metaclass=abc.ABCMeta): - @abc.abstractmethod - def update(self, data: Buffer) -> bytes: - """ - Processes the provided bytes through the cipher and returns the results - as bytes. - """ - - @abc.abstractmethod - def update_into(self, data: Buffer, buf: Buffer) -> int: - """ - Processes the provided bytes and writes the resulting data into the - provided buffer. Returns the number of bytes written. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Returns the results of processing the final block as bytes. - """ - - @abc.abstractmethod - def reset_nonce(self, nonce: bytes) -> None: - """ - Resets the nonce for the cipher context to the provided value. - Raises an exception if it does not support reset or if the - provided nonce does not have a valid length. - """ - - -class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): - @abc.abstractmethod - def authenticate_additional_data(self, data: Buffer) -> None: - """ - Authenticates the provided bytes. - """ - - -class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): - @abc.abstractmethod - def finalize_with_tag(self, tag: bytes) -> bytes: - """ - Returns the results of processing the final block as bytes and allows - delayed passing of the authentication tag. - """ - - -class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tag(self) -> bytes: - """ - Returns tag bytes. This is only available after encryption is - finalized. - """ - - -Mode = typing.TypeVar( - "Mode", bound=typing.Optional[modes.Mode], covariant=True -) - - -class Cipher(typing.Generic[Mode]): - def __init__( - self, - algorithm: CipherAlgorithm, - mode: Mode, - backend: typing.Any = None, - ) -> None: - if not isinstance(algorithm, CipherAlgorithm): - raise TypeError("Expected interface of CipherAlgorithm.") - - if mode is not None: - # mypy needs this assert to narrow the type from our generic - # type. Maybe it won't some time in the future. - assert isinstance(mode, modes.Mode) - mode.validate_for_algorithm(algorithm) - - self.algorithm = algorithm - self.mode = mode - - @typing.overload - def encryptor( - self: Cipher[modes.ModeWithAuthenticationTag], - ) -> AEADEncryptionContext: ... - - @typing.overload - def encryptor( - self: _CIPHER_TYPE, - ) -> CipherContext: ... - - def encryptor(self): - if isinstance(self.mode, modes.ModeWithAuthenticationTag): - if self.mode.tag is not None: - raise ValueError( - "Authentication tag must be None when encrypting." - ) - - return rust_openssl.ciphers.create_encryption_ctx( - self.algorithm, self.mode - ) - - @typing.overload - def decryptor( - self: Cipher[modes.ModeWithAuthenticationTag], - ) -> AEADDecryptionContext: ... - - @typing.overload - def decryptor( - self: _CIPHER_TYPE, - ) -> CipherContext: ... - - def decryptor(self): - return rust_openssl.ciphers.create_decryption_ctx( - self.algorithm, self.mode - ) - - -_CIPHER_TYPE = Cipher[ - typing.Union[ - modes.ModeWithNonce, - modes.ModeWithTweak, - modes.ECB, - modes.ModeWithInitializationVector, - None, - ] -] - -CipherContext.register(rust_openssl.ciphers.CipherContext) -AEADEncryptionContext.register(rust_openssl.ciphers.AEADEncryptionContext) -AEADDecryptionContext.register(rust_openssl.ciphers.AEADDecryptionContext) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py deleted file mode 100644 index 36c555c..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py +++ /dev/null @@ -1,268 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers import algorithms - - -class Mode(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this mode (e.g. "ECB", "CBC"). - """ - - @abc.abstractmethod - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - """ - Checks that all the necessary invariants of this (mode, algorithm) - combination are met. - """ - - -class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def initialization_vector(self) -> utils.Buffer: - """ - The value of the initialization vector for this mode as bytes. - """ - - -class ModeWithTweak(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tweak(self) -> utils.Buffer: - """ - The value of the tweak for this mode as bytes. - """ - - -class ModeWithNonce(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def nonce(self) -> utils.Buffer: - """ - The value of the nonce for this mode as bytes. - """ - - -class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tag(self) -> bytes | None: - """ - The value of the tag supplied to the constructor of this mode. - """ - - -def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: - if algorithm.key_size > 256 and algorithm.name == "AES": - raise ValueError( - "Only 128, 192, and 256 bit keys are allowed for this AES mode" - ) - - -def _check_iv_length( - self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm -) -> None: - iv_len = len(self.initialization_vector) - if iv_len * 8 != algorithm.block_size: - raise ValueError(f"Invalid IV size ({iv_len}) for {self.name}.") - - -def _check_nonce_length( - nonce: utils.Buffer, name: str, algorithm: CipherAlgorithm -) -> None: - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - f"{name} requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - if len(nonce) * 8 != algorithm.block_size: - raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.") - - -def _check_iv_and_key_length( - self: ModeWithInitializationVector, algorithm: CipherAlgorithm -) -> None: - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - f"{self} requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - _check_aes_key_length(self, algorithm) - _check_iv_length(self, algorithm) - - -class CBC(ModeWithInitializationVector): - name = "CBC" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class XTS(ModeWithTweak): - name = "XTS" - - def __init__(self, tweak: utils.Buffer): - utils._check_byteslike("tweak", tweak) - - if len(tweak) != 16: - raise ValueError("tweak must be 128-bits (16 bytes)") - - self._tweak = tweak - - @property - def tweak(self) -> utils.Buffer: - return self._tweak - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)): - raise TypeError( - "The AES128 and AES256 classes do not support XTS, please use " - "the standard AES class instead." - ) - - if algorithm.key_size not in (256, 512): - raise ValueError( - "The XTS specification requires a 256-bit key for AES-128-XTS" - " and 512-bit key for AES-256-XTS" - ) - - -class ECB(Mode): - name = "ECB" - - validate_for_algorithm = _check_aes_key_length - - -class OFB(ModeWithInitializationVector): - name = "OFB" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CFB(ModeWithInitializationVector): - name = "CFB" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CFB8(ModeWithInitializationVector): - name = "CFB8" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CTR(ModeWithNonce): - name = "CTR" - - def __init__(self, nonce: utils.Buffer): - utils._check_byteslike("nonce", nonce) - self._nonce = nonce - - @property - def nonce(self) -> utils.Buffer: - return self._nonce - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - _check_aes_key_length(self, algorithm) - _check_nonce_length(self.nonce, self.name, algorithm) - - -class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): - name = "GCM" - _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 - _MAX_AAD_BYTES = (2**64) // 8 - - def __init__( - self, - initialization_vector: utils.Buffer, - tag: bytes | None = None, - min_tag_length: int = 16, - ): - # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive - # This is a sane limit anyway so we'll enforce it here. - utils._check_byteslike("initialization_vector", initialization_vector) - if len(initialization_vector) < 8 or len(initialization_vector) > 128: - raise ValueError( - "initialization_vector must be between 8 and 128 bytes (64 " - "and 1024 bits)." - ) - self._initialization_vector = initialization_vector - if tag is not None: - utils._check_bytes("tag", tag) - if min_tag_length < 4: - raise ValueError("min_tag_length must be >= 4") - if len(tag) < min_tag_length: - raise ValueError( - f"Authentication tag must be {min_tag_length} bytes or " - "longer." - ) - self._tag = tag - self._min_tag_length = min_tag_length - - @property - def tag(self) -> bytes | None: - return self._tag - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - _check_aes_key_length(self, algorithm) - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - "GCM requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - block_size_bytes = algorithm.block_size // 8 - if self._tag is not None and len(self._tag) > block_size_bytes: - raise ValueError( - f"Authentication tag cannot be more than {block_size_bytes} " - "bytes." - ) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/cmac.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/cmac.py deleted file mode 100644 index 2c67ce2..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/cmac.py +++ /dev/null @@ -1,10 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = ["CMAC"] -CMAC = rust_openssl.cmac.CMAC diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/constant_time.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/constant_time.py deleted file mode 100644 index 3975c71..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/constant_time.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import hmac - - -def bytes_eq(a: bytes, b: bytes) -> bool: - if not isinstance(a, bytes) or not isinstance(b, bytes): - raise TypeError("a and b must be bytes.") - - return hmac.compare_digest(a, b) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/hashes.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/hashes.py deleted file mode 100644 index 4b55ec3..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/hashes.py +++ /dev/null @@ -1,246 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.utils import Buffer - -__all__ = [ - "MD5", - "SHA1", - "SHA3_224", - "SHA3_256", - "SHA3_384", - "SHA3_512", - "SHA224", - "SHA256", - "SHA384", - "SHA512", - "SHA512_224", - "SHA512_256", - "SHAKE128", - "SHAKE256", - "SM3", - "BLAKE2b", - "BLAKE2s", - "ExtendableOutputFunction", - "Hash", - "HashAlgorithm", - "HashContext", - "XOFHash", -] - - -class HashAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this algorithm (e.g. "sha256", "md5"). - """ - - @property - @abc.abstractmethod - def digest_size(self) -> int: - """ - The size of the resulting digest in bytes. - """ - - @property - @abc.abstractmethod - def block_size(self) -> int | None: - """ - The internal block size of the hash function, or None if the hash - function does not use blocks internally (e.g. SHA3). - """ - - -class HashContext(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def algorithm(self) -> HashAlgorithm: - """ - A HashAlgorithm that will be used by this context. - """ - - @abc.abstractmethod - def update(self, data: Buffer) -> None: - """ - Processes the provided bytes through the hash. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Finalizes the hash context and returns the hash digest as bytes. - """ - - @abc.abstractmethod - def copy(self) -> HashContext: - """ - Return a HashContext that is a copy of the current context. - """ - - -Hash = rust_openssl.hashes.Hash -HashContext.register(Hash) - -XOFHash = rust_openssl.hashes.XOFHash - - -class ExtendableOutputFunction(metaclass=abc.ABCMeta): - """ - An interface for extendable output functions. - """ - - -class SHA1(HashAlgorithm): - name = "sha1" - digest_size = 20 - block_size = 64 - - -class SHA512_224(HashAlgorithm): # noqa: N801 - name = "sha512-224" - digest_size = 28 - block_size = 128 - - -class SHA512_256(HashAlgorithm): # noqa: N801 - name = "sha512-256" - digest_size = 32 - block_size = 128 - - -class SHA224(HashAlgorithm): - name = "sha224" - digest_size = 28 - block_size = 64 - - -class SHA256(HashAlgorithm): - name = "sha256" - digest_size = 32 - block_size = 64 - - -class SHA384(HashAlgorithm): - name = "sha384" - digest_size = 48 - block_size = 128 - - -class SHA512(HashAlgorithm): - name = "sha512" - digest_size = 64 - block_size = 128 - - -class SHA3_224(HashAlgorithm): # noqa: N801 - name = "sha3-224" - digest_size = 28 - block_size = None - - -class SHA3_256(HashAlgorithm): # noqa: N801 - name = "sha3-256" - digest_size = 32 - block_size = None - - -class SHA3_384(HashAlgorithm): # noqa: N801 - name = "sha3-384" - digest_size = 48 - block_size = None - - -class SHA3_512(HashAlgorithm): # noqa: N801 - name = "sha3-512" - digest_size = 64 - block_size = None - - -class SHAKE128(HashAlgorithm, ExtendableOutputFunction): - name = "shake128" - block_size = None - - def __init__(self, digest_size: int): - if not isinstance(digest_size, int): - raise TypeError("digest_size must be an integer") - - if digest_size < 1: - raise ValueError("digest_size must be a positive integer") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class SHAKE256(HashAlgorithm, ExtendableOutputFunction): - name = "shake256" - block_size = None - - def __init__(self, digest_size: int): - if not isinstance(digest_size, int): - raise TypeError("digest_size must be an integer") - - if digest_size < 1: - raise ValueError("digest_size must be a positive integer") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class MD5(HashAlgorithm): - name = "md5" - digest_size = 16 - block_size = 64 - - -class BLAKE2b(HashAlgorithm): - name = "blake2b" - _max_digest_size = 64 - _min_digest_size = 1 - block_size = 128 - - def __init__(self, digest_size: int): - if digest_size != 64: - raise ValueError("Digest size must be 64") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class BLAKE2s(HashAlgorithm): - name = "blake2s" - block_size = 64 - _max_digest_size = 32 - _min_digest_size = 1 - - def __init__(self, digest_size: int): - if digest_size != 32: - raise ValueError("Digest size must be 32") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class SM3(HashAlgorithm): - name = "sm3" - digest_size = 32 - block_size = 64 diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/hmac.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/hmac.py deleted file mode 100644 index a9442d5..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/hmac.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import hashes - -__all__ = ["HMAC"] - -HMAC = rust_openssl.hmac.HMAC -hashes.HashContext.register(HMAC) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py deleted file mode 100644 index 79bb459..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - - -class KeyDerivationFunction(metaclass=abc.ABCMeta): - @abc.abstractmethod - def derive(self, key_material: bytes) -> bytes: - """ - Deterministically generates and returns a new key based on the existing - key material. - """ - - @abc.abstractmethod - def verify(self, key_material: bytes, expected_key: bytes) -> None: - """ - Checks whether the key generated by the key material matches the - expected derived key. Raises an exception if they do not match. - """ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 06ae219..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc deleted file mode 100644 index b777e3f..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc deleted file mode 100644 index da722a6..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc deleted file mode 100644 index 5365f9a..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc deleted file mode 100644 index c9e886a..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc deleted file mode 100644 index d9e04f8..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc deleted file mode 100644 index f5834e4..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc deleted file mode 100644 index fe3f000..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/argon2.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/argon2.py deleted file mode 100644 index 405fc8d..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/argon2.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - -Argon2id = rust_openssl.kdf.Argon2id -KeyDerivationFunction.register(Argon2id) - -__all__ = ["Argon2id"] diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py deleted file mode 100644 index 1b92841..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py +++ /dev/null @@ -1,125 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing -from collections.abc import Callable - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes, hmac -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -def _int_to_u32be(n: int) -> bytes: - return n.to_bytes(length=4, byteorder="big") - - -def _common_args_checks( - algorithm: hashes.HashAlgorithm, - length: int, - otherinfo: bytes | None, -) -> None: - max_length = algorithm.digest_size * (2**32 - 1) - if length > max_length: - raise ValueError(f"Cannot derive keys larger than {max_length} bits.") - if otherinfo is not None: - utils._check_bytes("otherinfo", otherinfo) - - -def _concatkdf_derive( - key_material: utils.Buffer, - length: int, - auxfn: Callable[[], hashes.HashContext], - otherinfo: bytes, -) -> bytes: - utils._check_byteslike("key_material", key_material) - output = [b""] - outlen = 0 - counter = 1 - - while length > outlen: - h = auxfn() - h.update(_int_to_u32be(counter)) - h.update(key_material) - h.update(otherinfo) - output.append(h.finalize()) - outlen += len(output[-1]) - counter += 1 - - return b"".join(output)[:length] - - -class ConcatKDFHash(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - otherinfo: bytes | None, - backend: typing.Any = None, - ): - _common_args_checks(algorithm, length, otherinfo) - self._algorithm = algorithm - self._length = length - self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" - - self._used = False - - def _hash(self) -> hashes.Hash: - return hashes.Hash(self._algorithm) - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - return _concatkdf_derive( - key_material, self._length, self._hash, self._otherinfo - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class ConcatKDFHMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes | None, - otherinfo: bytes | None, - backend: typing.Any = None, - ): - _common_args_checks(algorithm, length, otherinfo) - self._algorithm = algorithm - self._length = length - self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" - - if algorithm.block_size is None: - raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF") - - if salt is None: - salt = b"\x00" * algorithm.block_size - else: - utils._check_bytes("salt", salt) - - self._salt = salt - - self._used = False - - def _hmac(self) -> hmac.HMAC: - return hmac.HMAC(self._salt, self._algorithm) - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - return _concatkdf_derive( - key_material, self._length, self._hmac, self._otherinfo - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py deleted file mode 100644 index 1e162d9..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - -HKDF = rust_openssl.kdf.HKDF -HKDFExpand = rust_openssl.kdf.HKDFExpand - -KeyDerivationFunction.register(HKDF) -KeyDerivationFunction.register(HKDFExpand) - -__all__ = ["HKDF", "HKDFExpand"] diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py deleted file mode 100644 index 5b47137..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py +++ /dev/null @@ -1,303 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing -from collections.abc import Callable - -from cryptography import utils -from cryptography.exceptions import ( - AlreadyFinalized, - InvalidKey, - UnsupportedAlgorithm, - _Reasons, -) -from cryptography.hazmat.primitives import ( - ciphers, - cmac, - constant_time, - hashes, - hmac, -) -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class Mode(utils.Enum): - CounterMode = "ctr" - - -class CounterLocation(utils.Enum): - BeforeFixed = "before_fixed" - AfterFixed = "after_fixed" - MiddleFixed = "middle_fixed" - - -class _KBKDFDeriver: - def __init__( - self, - prf: Callable, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - break_location: int | None, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - ): - assert callable(prf) - - if not isinstance(mode, Mode): - raise TypeError("mode must be of type Mode") - - if not isinstance(location, CounterLocation): - raise TypeError("location must be of type CounterLocation") - - if break_location is None and location is CounterLocation.MiddleFixed: - raise ValueError("Please specify a break_location") - - if ( - break_location is not None - and location != CounterLocation.MiddleFixed - ): - raise ValueError( - "break_location is ignored when location is not" - " CounterLocation.MiddleFixed" - ) - - if break_location is not None and not isinstance(break_location, int): - raise TypeError("break_location must be an integer") - - if break_location is not None and break_location < 0: - raise ValueError("break_location must be a positive integer") - - if (label or context) and fixed: - raise ValueError( - "When supplying fixed data, label and context are ignored." - ) - - if rlen is None or not self._valid_byte_length(rlen): - raise ValueError("rlen must be between 1 and 4") - - if llen is None and fixed is None: - raise ValueError("Please specify an llen") - - if llen is not None and not isinstance(llen, int): - raise TypeError("llen must be an integer") - - if llen == 0: - raise ValueError("llen must be non-zero") - - if label is None: - label = b"" - - if context is None: - context = b"" - - utils._check_bytes("label", label) - utils._check_bytes("context", context) - self._prf = prf - self._mode = mode - self._length = length - self._rlen = rlen - self._llen = llen - self._location = location - self._break_location = break_location - self._label = label - self._context = context - self._used = False - self._fixed_data = fixed - - @staticmethod - def _valid_byte_length(value: int) -> bool: - if not isinstance(value, int): - raise TypeError("value must be of type int") - - value_bin = utils.int_to_bytes(1, value) - return 1 <= len(value_bin) <= 4 - - def derive( - self, key_material: utils.Buffer, prf_output_size: int - ) -> bytes: - if self._used: - raise AlreadyFinalized - - utils._check_byteslike("key_material", key_material) - self._used = True - - # inverse floor division (equivalent to ceiling) - rounds = -(-self._length // prf_output_size) - - output = [b""] - - # For counter mode, the number of iterations shall not be - # larger than 2^r-1, where r <= 32 is the binary length of the counter - # This ensures that the counter values used as an input to the - # PRF will not repeat during a particular call to the KDF function. - r_bin = utils.int_to_bytes(1, self._rlen) - if rounds > pow(2, len(r_bin) * 8) - 1: - raise ValueError("There are too many iterations.") - - fixed = self._generate_fixed_input() - - if self._location == CounterLocation.BeforeFixed: - data_before_ctr = b"" - data_after_ctr = fixed - elif self._location == CounterLocation.AfterFixed: - data_before_ctr = fixed - data_after_ctr = b"" - else: - if isinstance( - self._break_location, int - ) and self._break_location > len(fixed): - raise ValueError("break_location offset > len(fixed)") - data_before_ctr = fixed[: self._break_location] - data_after_ctr = fixed[self._break_location :] - - for i in range(1, rounds + 1): - h = self._prf(key_material) - - counter = utils.int_to_bytes(i, self._rlen) - input_data = data_before_ctr + counter + data_after_ctr - - h.update(input_data) - - output.append(h.finalize()) - - return b"".join(output)[: self._length] - - def _generate_fixed_input(self) -> bytes: - if self._fixed_data and isinstance(self._fixed_data, bytes): - return self._fixed_data - - l_val = utils.int_to_bytes(self._length * 8, self._llen) - - return b"".join([self._label, b"\x00", self._context, l_val]) - - -class KBKDFHMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - backend: typing.Any = None, - *, - break_location: int | None = None, - ): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported hash algorithm.", - _Reasons.UNSUPPORTED_HASH, - ) - - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.hmac_supported(algorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported hmac algorithm.", - _Reasons.UNSUPPORTED_HASH, - ) - - self._algorithm = algorithm - - self._deriver = _KBKDFDeriver( - self._prf, - mode, - length, - rlen, - llen, - location, - break_location, - label, - context, - fixed, - ) - - def _prf(self, key_material: bytes) -> hmac.HMAC: - return hmac.HMAC(key_material, self._algorithm) - - def derive(self, key_material: utils.Buffer) -> bytes: - return self._deriver.derive(key_material, self._algorithm.digest_size) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class KBKDFCMAC(KeyDerivationFunction): - def __init__( - self, - algorithm, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - backend: typing.Any = None, - *, - break_location: int | None = None, - ): - if not issubclass( - algorithm, ciphers.BlockCipherAlgorithm - ) or not issubclass(algorithm, ciphers.CipherAlgorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported cipher algorithm.", - _Reasons.UNSUPPORTED_CIPHER, - ) - - self._algorithm = algorithm - self._cipher: ciphers.BlockCipherAlgorithm | None = None - - self._deriver = _KBKDFDeriver( - self._prf, - mode, - length, - rlen, - llen, - location, - break_location, - label, - context, - fixed, - ) - - def _prf(self, _: bytes) -> cmac.CMAC: - assert self._cipher is not None - - return cmac.CMAC(self._cipher) - - def derive(self, key_material: utils.Buffer) -> bytes: - self._cipher = self._algorithm(key_material) - - assert self._cipher is not None - - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.cmac_algorithm_supported(self._cipher): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported cipher algorithm.", - _Reasons.UNSUPPORTED_CIPHER, - ) - - return self._deriver.derive(key_material, self._cipher.block_size // 8) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py deleted file mode 100644 index d539f13..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py +++ /dev/null @@ -1,62 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import ( - AlreadyFinalized, - InvalidKey, - UnsupportedAlgorithm, - _Reasons, -) -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import constant_time, hashes -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class PBKDF2HMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes, - iterations: int, - backend: typing.Any = None, - ): - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.pbkdf2_hmac_supported(algorithm): - raise UnsupportedAlgorithm( - f"{algorithm.name} is not supported for PBKDF2.", - _Reasons.UNSUPPORTED_HASH, - ) - self._used = False - self._algorithm = algorithm - self._length = length - utils._check_bytes("salt", salt) - self._salt = salt - self._iterations = iterations - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized("PBKDF2 instances can only be used once.") - self._used = True - - return rust_openssl.kdf.derive_pbkdf2_hmac( - key_material, - self._algorithm, - self._salt, - self._iterations, - self._length, - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - derived_key = self.derive(key_material) - if not constant_time.bytes_eq(derived_key, expected_key): - raise InvalidKey("Keys do not match.") diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py deleted file mode 100644 index f791cee..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import sys - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - -# This is used by the scrypt tests to skip tests that require more memory -# than the MEM_LIMIT -_MEM_LIMIT = sys.maxsize // 2 - -Scrypt = rust_openssl.kdf.Scrypt -KeyDerivationFunction.register(Scrypt) - -__all__ = ["Scrypt"] diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py deleted file mode 100644 index 63870cd..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -def _int_to_u32be(n: int) -> bytes: - return n.to_bytes(length=4, byteorder="big") - - -class X963KDF(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - sharedinfo: bytes | None, - backend: typing.Any = None, - ): - max_len = algorithm.digest_size * (2**32 - 1) - if length > max_len: - raise ValueError(f"Cannot derive keys larger than {max_len} bits.") - if sharedinfo is not None: - utils._check_bytes("sharedinfo", sharedinfo) - - self._algorithm = algorithm - self._length = length - self._sharedinfo = sharedinfo - self._used = False - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - utils._check_byteslike("key_material", key_material) - output = [b""] - outlen = 0 - counter = 1 - - while self._length > outlen: - h = hashes.Hash(self._algorithm) - h.update(key_material) - h.update(_int_to_u32be(counter)) - if self._sharedinfo is not None: - h.update(self._sharedinfo) - output.append(h.finalize()) - outlen += len(output[-1]) - counter += 1 - - return b"".join(output)[: self._length] - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/keywrap.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/keywrap.py deleted file mode 100644 index b93d87d..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/keywrap.py +++ /dev/null @@ -1,177 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers.algorithms import AES -from cryptography.hazmat.primitives.ciphers.modes import ECB -from cryptography.hazmat.primitives.constant_time import bytes_eq - - -def _wrap_core( - wrapping_key: bytes, - a: bytes, - r: list[bytes], -) -> bytes: - # RFC 3394 Key Wrap - 2.2.1 (index method) - encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() - n = len(r) - for j in range(6): - for i in range(n): - # every encryption operation is a discrete 16 byte chunk (because - # AES has a 128-bit block size) and since we're using ECB it is - # safe to reuse the encryptor for the entire operation - b = encryptor.update(a + r[i]) - a = ( - int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) - ).to_bytes(length=8, byteorder="big") - r[i] = b[-8:] - - assert encryptor.finalize() == b"" - - return a + b"".join(r) - - -def aes_key_wrap( - wrapping_key: bytes, - key_to_wrap: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - if len(key_to_wrap) < 16: - raise ValueError("The key to wrap must be at least 16 bytes") - - if len(key_to_wrap) % 8 != 0: - raise ValueError("The key to wrap must be a multiple of 8 bytes") - - a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" - r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] - return _wrap_core(wrapping_key, a, r) - - -def _unwrap_core( - wrapping_key: bytes, - a: bytes, - r: list[bytes], -) -> tuple[bytes, list[bytes]]: - # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) - decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() - n = len(r) - for j in reversed(range(6)): - for i in reversed(range(n)): - atr = ( - int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) - ).to_bytes(length=8, byteorder="big") + r[i] - # every decryption operation is a discrete 16 byte chunk so - # it is safe to reuse the decryptor for the entire operation - b = decryptor.update(atr) - a = b[:8] - r[i] = b[-8:] - - assert decryptor.finalize() == b"" - return a, r - - -def aes_key_wrap_with_padding( - wrapping_key: bytes, - key_to_wrap: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - aiv = b"\xa6\x59\x59\xa6" + len(key_to_wrap).to_bytes( - length=4, byteorder="big" - ) - # pad the key to wrap if necessary - pad = (8 - (len(key_to_wrap) % 8)) % 8 - key_to_wrap = key_to_wrap + b"\x00" * pad - if len(key_to_wrap) == 8: - # RFC 5649 - 4.1 - exactly 8 octets after padding - encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() - b = encryptor.update(aiv + key_to_wrap) - assert encryptor.finalize() == b"" - return b - else: - r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] - return _wrap_core(wrapping_key, aiv, r) - - -def aes_key_unwrap_with_padding( - wrapping_key: bytes, - wrapped_key: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapped_key) < 16: - raise InvalidUnwrap("Must be at least 16 bytes") - - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - if len(wrapped_key) == 16: - # RFC 5649 - 4.2 - exactly two 64-bit blocks - decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() - out = decryptor.update(wrapped_key) - assert decryptor.finalize() == b"" - a = out[:8] - data = out[8:] - n = 1 - else: - r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] - encrypted_aiv = r.pop(0) - n = len(r) - a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) - data = b"".join(r) - - # 1) Check that MSB(32,A) = A65959A6. - # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let - # MLI = LSB(32,A). - # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of - # the output data are zero. - mli = int.from_bytes(a[4:], byteorder="big") - b = (8 * n) - mli - if ( - not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") - or not 8 * (n - 1) < mli <= 8 * n - or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) - ): - raise InvalidUnwrap() - - if b == 0: - return data - else: - return data[:-b] - - -def aes_key_unwrap( - wrapping_key: bytes, - wrapped_key: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapped_key) < 24: - raise InvalidUnwrap("Must be at least 24 bytes") - - if len(wrapped_key) % 8 != 0: - raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") - - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" - r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] - a = r.pop(0) - a, r = _unwrap_core(wrapping_key, a, r) - if not bytes_eq(a, aiv): - raise InvalidUnwrap() - - return b"".join(r) - - -class InvalidUnwrap(Exception): - pass diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/padding.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/padding.py deleted file mode 100644 index f9cd1f1..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/padding.py +++ /dev/null @@ -1,69 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.hazmat.bindings._rust import ( - ANSIX923PaddingContext, - ANSIX923UnpaddingContext, - PKCS7PaddingContext, - PKCS7UnpaddingContext, -) - - -class PaddingContext(metaclass=abc.ABCMeta): - @abc.abstractmethod - def update(self, data: utils.Buffer) -> bytes: - """ - Pads the provided bytes and returns any available data as bytes. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Finalize the padding, returns bytes. - """ - - -def _byte_padding_check(block_size: int) -> None: - if not (0 <= block_size <= 2040): - raise ValueError("block_size must be in range(0, 2041).") - - if block_size % 8 != 0: - raise ValueError("block_size must be a multiple of 8.") - - -class PKCS7: - def __init__(self, block_size: int): - _byte_padding_check(block_size) - self.block_size = block_size - - def padder(self) -> PaddingContext: - return PKCS7PaddingContext(self.block_size) - - def unpadder(self) -> PaddingContext: - return PKCS7UnpaddingContext(self.block_size) - - -PaddingContext.register(PKCS7PaddingContext) -PaddingContext.register(PKCS7UnpaddingContext) - - -class ANSIX923: - def __init__(self, block_size: int): - _byte_padding_check(block_size) - self.block_size = block_size - - def padder(self) -> PaddingContext: - return ANSIX923PaddingContext(self.block_size) - - def unpadder(self) -> PaddingContext: - return ANSIX923UnpaddingContext(self.block_size) - - -PaddingContext.register(ANSIX923PaddingContext) -PaddingContext.register(ANSIX923UnpaddingContext) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/poly1305.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/poly1305.py deleted file mode 100644 index 7f5a77a..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/poly1305.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = ["Poly1305"] - -Poly1305 = rust_openssl.poly1305.Poly1305 diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__init__.py deleted file mode 100644 index 62283cc..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._serialization import ( - BestAvailableEncryption, - Encoding, - KeySerializationEncryption, - NoEncryption, - ParameterFormat, - PrivateFormat, - PublicFormat, - _KeySerializationEncryption, -) -from cryptography.hazmat.primitives.serialization.base import ( - load_der_parameters, - load_der_private_key, - load_der_public_key, - load_pem_parameters, - load_pem_private_key, - load_pem_public_key, -) -from cryptography.hazmat.primitives.serialization.ssh import ( - SSHCertificate, - SSHCertificateBuilder, - SSHCertificateType, - SSHCertPrivateKeyTypes, - SSHCertPublicKeyTypes, - SSHPrivateKeyTypes, - SSHPublicKeyTypes, - load_ssh_private_key, - load_ssh_public_identity, - load_ssh_public_key, - ssh_key_fingerprint, -) - -__all__ = [ - "BestAvailableEncryption", - "Encoding", - "KeySerializationEncryption", - "NoEncryption", - "ParameterFormat", - "PrivateFormat", - "PublicFormat", - "SSHCertPrivateKeyTypes", - "SSHCertPublicKeyTypes", - "SSHCertificate", - "SSHCertificateBuilder", - "SSHCertificateType", - "SSHPrivateKeyTypes", - "SSHPublicKeyTypes", - "_KeySerializationEncryption", - "load_der_parameters", - "load_der_private_key", - "load_der_public_key", - "load_pem_parameters", - "load_pem_private_key", - "load_pem_public_key", - "load_ssh_private_key", - "load_ssh_public_identity", - "load_ssh_public_key", - "ssh_key_fingerprint", -] diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index b97c7b2..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 20b95e8..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc deleted file mode 100644 index 69837d2..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc deleted file mode 100644 index cd5063e..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc deleted file mode 100644 index aa15405..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/base.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/base.py deleted file mode 100644 index e7c998b..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/base.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -load_pem_private_key = rust_openssl.keys.load_pem_private_key -load_der_private_key = rust_openssl.keys.load_der_private_key - -load_pem_public_key = rust_openssl.keys.load_pem_public_key -load_der_public_key = rust_openssl.keys.load_der_public_key - -load_pem_parameters = rust_openssl.dh.from_pem_parameters -load_der_parameters = rust_openssl.dh.from_der_parameters diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py deleted file mode 100644 index 58884ff..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py +++ /dev/null @@ -1,176 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing -from collections.abc import Iterable - -from cryptography import x509 -from cryptography.hazmat.bindings._rust import pkcs12 as rust_pkcs12 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives._serialization import PBES as PBES -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed448, - ed25519, - rsa, -) -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes - -__all__ = [ - "PBES", - "PKCS12Certificate", - "PKCS12KeyAndCertificates", - "PKCS12PrivateKeyTypes", - "load_key_and_certificates", - "load_pkcs12", - "serialize_java_truststore", - "serialize_key_and_certificates", -] - -PKCS12PrivateKeyTypes = typing.Union[ - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, -] - - -PKCS12Certificate = rust_pkcs12.PKCS12Certificate - - -class PKCS12KeyAndCertificates: - def __init__( - self, - key: PrivateKeyTypes | None, - cert: PKCS12Certificate | None, - additional_certs: list[PKCS12Certificate], - ): - if key is not None and not isinstance( - key, - ( - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - ), - ): - raise TypeError( - "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" - " private key, or None." - ) - if cert is not None and not isinstance(cert, PKCS12Certificate): - raise TypeError("cert must be a PKCS12Certificate object or None") - if not all( - isinstance(add_cert, PKCS12Certificate) - for add_cert in additional_certs - ): - raise TypeError( - "all values in additional_certs must be PKCS12Certificate" - " objects" - ) - self._key = key - self._cert = cert - self._additional_certs = additional_certs - - @property - def key(self) -> PrivateKeyTypes | None: - return self._key - - @property - def cert(self) -> PKCS12Certificate | None: - return self._cert - - @property - def additional_certs(self) -> list[PKCS12Certificate]: - return self._additional_certs - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PKCS12KeyAndCertificates): - return NotImplemented - - return ( - self.key == other.key - and self.cert == other.cert - and self.additional_certs == other.additional_certs - ) - - def __hash__(self) -> int: - return hash((self.key, self.cert, tuple(self.additional_certs))) - - def __repr__(self) -> str: - fmt = ( - "" - ) - return fmt.format(self.key, self.cert, self.additional_certs) - - -load_key_and_certificates = rust_pkcs12.load_key_and_certificates -load_pkcs12 = rust_pkcs12.load_pkcs12 - - -_PKCS12CATypes = typing.Union[ - x509.Certificate, - PKCS12Certificate, -] - - -def serialize_java_truststore( - certs: Iterable[PKCS12Certificate], - encryption_algorithm: serialization.KeySerializationEncryption, -) -> bytes: - if not certs: - raise ValueError("You must supply at least one cert") - - if not isinstance( - encryption_algorithm, serialization.KeySerializationEncryption - ): - raise TypeError( - "Key encryption algorithm must be a " - "KeySerializationEncryption instance" - ) - - return rust_pkcs12.serialize_java_truststore(certs, encryption_algorithm) - - -def serialize_key_and_certificates( - name: bytes | None, - key: PKCS12PrivateKeyTypes | None, - cert: x509.Certificate | None, - cas: Iterable[_PKCS12CATypes] | None, - encryption_algorithm: serialization.KeySerializationEncryption, -) -> bytes: - if key is not None and not isinstance( - key, - ( - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - ), - ): - raise TypeError( - "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" - " private key, or None." - ) - - if not isinstance( - encryption_algorithm, serialization.KeySerializationEncryption - ): - raise TypeError( - "Key encryption algorithm must be a " - "KeySerializationEncryption instance" - ) - - if key is None and cert is None and not cas: - raise ValueError("You must supply at least one of key, cert, or cas") - - return rust_pkcs12.serialize_key_and_certificates( - name, key, cert, cas, encryption_algorithm - ) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py deleted file mode 100644 index 456dc5b..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py +++ /dev/null @@ -1,411 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import email.base64mime -import email.generator -import email.message -import email.policy -import io -import typing -from collections.abc import Iterable - -from cryptography import utils, x509 -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa -from cryptography.hazmat.primitives.ciphers import ( - algorithms, -) -from cryptography.utils import _check_byteslike - -load_pem_pkcs7_certificates = rust_pkcs7.load_pem_pkcs7_certificates - -load_der_pkcs7_certificates = rust_pkcs7.load_der_pkcs7_certificates - -serialize_certificates = rust_pkcs7.serialize_certificates - -PKCS7HashTypes = typing.Union[ - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, -] - -PKCS7PrivateKeyTypes = typing.Union[ - rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey -] - -ContentEncryptionAlgorithm = typing.Union[ - typing.Type[algorithms.AES128], typing.Type[algorithms.AES256] -] - - -class PKCS7Options(utils.Enum): - Text = "Add text/plain MIME type" - Binary = "Don't translate input data into canonical MIME format" - DetachedSignature = "Don't embed data in the PKCS7 structure" - NoCapabilities = "Don't embed SMIME capabilities" - NoAttributes = "Don't embed authenticatedAttributes" - NoCerts = "Don't embed signer certificate" - - -class PKCS7SignatureBuilder: - def __init__( - self, - data: utils.Buffer | None = None, - signers: list[ - tuple[ - x509.Certificate, - PKCS7PrivateKeyTypes, - PKCS7HashTypes, - padding.PSS | padding.PKCS1v15 | None, - ] - ] = [], - additional_certs: list[x509.Certificate] = [], - ): - self._data = data - self._signers = signers - self._additional_certs = additional_certs - - def set_data(self, data: utils.Buffer) -> PKCS7SignatureBuilder: - _check_byteslike("data", data) - if self._data is not None: - raise ValueError("data may only be set once") - - return PKCS7SignatureBuilder(data, self._signers) - - def add_signer( - self, - certificate: x509.Certificate, - private_key: PKCS7PrivateKeyTypes, - hash_algorithm: PKCS7HashTypes, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ) -> PKCS7SignatureBuilder: - if not isinstance( - hash_algorithm, - ( - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - ), - ): - raise TypeError( - "hash_algorithm must be one of hashes.SHA224, " - "SHA256, SHA384, or SHA512" - ) - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - if not isinstance( - private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) - ): - raise TypeError("Only RSA & EC keys are supported at this time.") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - return PKCS7SignatureBuilder( - self._data, - [ - *self._signers, - (certificate, private_key, hash_algorithm, rsa_padding), - ], - ) - - def add_certificate( - self, certificate: x509.Certificate - ) -> PKCS7SignatureBuilder: - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - return PKCS7SignatureBuilder( - self._data, self._signers, [*self._additional_certs, certificate] - ) - - def sign( - self, - encoding: serialization.Encoding, - options: Iterable[PKCS7Options], - backend: typing.Any = None, - ) -> bytes: - if len(self._signers) == 0: - raise ValueError("Must have at least one signer") - if self._data is None: - raise ValueError("You must add data to sign") - options = list(options) - if not all(isinstance(x, PKCS7Options) for x in options): - raise ValueError("options must be from the PKCS7Options enum") - if encoding not in ( - serialization.Encoding.PEM, - serialization.Encoding.DER, - serialization.Encoding.SMIME, - ): - raise ValueError( - "Must be PEM, DER, or SMIME from the Encoding enum" - ) - - # Text is a meaningless option unless it is accompanied by - # DetachedSignature - if ( - PKCS7Options.Text in options - and PKCS7Options.DetachedSignature not in options - ): - raise ValueError( - "When passing the Text option you must also pass " - "DetachedSignature" - ) - - if PKCS7Options.Text in options and encoding in ( - serialization.Encoding.DER, - serialization.Encoding.PEM, - ): - raise ValueError( - "The Text option is only available for SMIME serialization" - ) - - # No attributes implies no capabilities so we'll error if you try to - # pass both. - if ( - PKCS7Options.NoAttributes in options - and PKCS7Options.NoCapabilities in options - ): - raise ValueError( - "NoAttributes is a superset of NoCapabilities. Do not pass " - "both values." - ) - - return rust_pkcs7.sign_and_serialize(self, encoding, options) - - -class PKCS7EnvelopeBuilder: - def __init__( - self, - *, - _data: bytes | None = None, - _recipients: list[x509.Certificate] | None = None, - _content_encryption_algorithm: ContentEncryptionAlgorithm - | None = None, - ): - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.rsa_encryption_supported(padding=padding.PKCS1v15()): - raise UnsupportedAlgorithm( - "RSA with PKCS1 v1.5 padding is not supported by this version" - " of OpenSSL.", - _Reasons.UNSUPPORTED_PADDING, - ) - self._data = _data - self._recipients = _recipients if _recipients is not None else [] - self._content_encryption_algorithm = _content_encryption_algorithm - - def set_data(self, data: bytes) -> PKCS7EnvelopeBuilder: - _check_byteslike("data", data) - if self._data is not None: - raise ValueError("data may only be set once") - - return PKCS7EnvelopeBuilder( - _data=data, - _recipients=self._recipients, - _content_encryption_algorithm=self._content_encryption_algorithm, - ) - - def add_recipient( - self, - certificate: x509.Certificate, - ) -> PKCS7EnvelopeBuilder: - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - if not isinstance(certificate.public_key(), rsa.RSAPublicKey): - raise TypeError("Only RSA keys are supported at this time.") - - return PKCS7EnvelopeBuilder( - _data=self._data, - _recipients=[ - *self._recipients, - certificate, - ], - _content_encryption_algorithm=self._content_encryption_algorithm, - ) - - def set_content_encryption_algorithm( - self, content_encryption_algorithm: ContentEncryptionAlgorithm - ) -> PKCS7EnvelopeBuilder: - if self._content_encryption_algorithm is not None: - raise ValueError("Content encryption algo may only be set once") - if content_encryption_algorithm not in { - algorithms.AES128, - algorithms.AES256, - }: - raise TypeError("Only AES128 and AES256 are supported") - - return PKCS7EnvelopeBuilder( - _data=self._data, - _recipients=self._recipients, - _content_encryption_algorithm=content_encryption_algorithm, - ) - - def encrypt( - self, - encoding: serialization.Encoding, - options: Iterable[PKCS7Options], - ) -> bytes: - if len(self._recipients) == 0: - raise ValueError("Must have at least one recipient") - if self._data is None: - raise ValueError("You must add data to encrypt") - - # The default content encryption algorithm is AES-128, which the S/MIME - # v3.2 RFC specifies as MUST support (https://datatracker.ietf.org/doc/html/rfc5751#section-2.7) - content_encryption_algorithm = ( - self._content_encryption_algorithm or algorithms.AES128 - ) - - options = list(options) - if not all(isinstance(x, PKCS7Options) for x in options): - raise ValueError("options must be from the PKCS7Options enum") - if encoding not in ( - serialization.Encoding.PEM, - serialization.Encoding.DER, - serialization.Encoding.SMIME, - ): - raise ValueError( - "Must be PEM, DER, or SMIME from the Encoding enum" - ) - - # Only allow options that make sense for encryption - if any( - opt not in [PKCS7Options.Text, PKCS7Options.Binary] - for opt in options - ): - raise ValueError( - "Only the following options are supported for encryption: " - "Text, Binary" - ) - elif PKCS7Options.Text in options and PKCS7Options.Binary in options: - # OpenSSL accepts both options at the same time, but ignores Text. - # We fail defensively to avoid unexpected outputs. - raise ValueError( - "Cannot use Binary and Text options at the same time" - ) - - return rust_pkcs7.encrypt_and_serialize( - self, content_encryption_algorithm, encoding, options - ) - - -pkcs7_decrypt_der = rust_pkcs7.decrypt_der -pkcs7_decrypt_pem = rust_pkcs7.decrypt_pem -pkcs7_decrypt_smime = rust_pkcs7.decrypt_smime - - -def _smime_signed_encode( - data: bytes, signature: bytes, micalg: str, text_mode: bool -) -> bytes: - # This function works pretty hard to replicate what OpenSSL does - # precisely. For good and for ill. - - m = email.message.Message() - m.add_header("MIME-Version", "1.0") - m.add_header( - "Content-Type", - "multipart/signed", - protocol="application/x-pkcs7-signature", - micalg=micalg, - ) - - m.preamble = "This is an S/MIME signed message\n" - - msg_part = OpenSSLMimePart() - msg_part.set_payload(data) - if text_mode: - msg_part.add_header("Content-Type", "text/plain") - m.attach(msg_part) - - sig_part = email.message.MIMEPart() - sig_part.add_header( - "Content-Type", "application/x-pkcs7-signature", name="smime.p7s" - ) - sig_part.add_header("Content-Transfer-Encoding", "base64") - sig_part.add_header( - "Content-Disposition", "attachment", filename="smime.p7s" - ) - sig_part.set_payload( - email.base64mime.body_encode(signature, maxlinelen=65) - ) - del sig_part["MIME-Version"] - m.attach(sig_part) - - fp = io.BytesIO() - g = email.generator.BytesGenerator( - fp, - maxheaderlen=0, - mangle_from_=False, - policy=m.policy.clone(linesep="\r\n"), - ) - g.flatten(m) - return fp.getvalue() - - -def _smime_enveloped_encode(data: bytes) -> bytes: - m = email.message.Message() - m.add_header("MIME-Version", "1.0") - m.add_header("Content-Disposition", "attachment", filename="smime.p7m") - m.add_header( - "Content-Type", - "application/pkcs7-mime", - smime_type="enveloped-data", - name="smime.p7m", - ) - m.add_header("Content-Transfer-Encoding", "base64") - - m.set_payload(email.base64mime.body_encode(data, maxlinelen=65)) - - return m.as_bytes(policy=m.policy.clone(linesep="\n", max_line_length=0)) - - -def _smime_enveloped_decode(data: bytes) -> bytes: - m = email.message_from_bytes(data) - if m.get_content_type() not in { - "application/x-pkcs7-mime", - "application/pkcs7-mime", - }: - raise ValueError("Not an S/MIME enveloped message") - return bytes(m.get_payload(decode=True)) - - -def _smime_remove_text_headers(data: bytes) -> bytes: - m = email.message_from_bytes(data) - # Using get() instead of get_content_type() since it has None as default, - # where the latter has "text/plain". Both methods are case-insensitive. - content_type = m.get("content-type") - if content_type is None: - raise ValueError( - "Decrypted MIME data has no 'Content-Type' header. " - "Please remove the 'Text' option to parse it manually." - ) - if "text/plain" not in content_type: - raise ValueError( - f"Decrypted MIME data content type is '{content_type}', not " - "'text/plain'. Remove the 'Text' option to parse it manually." - ) - return bytes(m.get_payload(decode=True)) - - -class OpenSSLMimePart(email.message.MIMEPart): - # A MIMEPart subclass that replicates OpenSSL's behavior of not including - # a newline if there are no headers. - def _write_headers(self, generator) -> None: - if list(self.raw_items()): - generator._write_headers(self) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/ssh.py deleted file mode 100644 index cb10cf8..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/serialization/ssh.py +++ /dev/null @@ -1,1619 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import binascii -import enum -import os -import re -import typing -import warnings -from base64 import encodebytes as _base64_encode -from dataclasses import dataclass - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed25519, - padding, - rsa, -) -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.hazmat.primitives.ciphers import ( - AEADDecryptionContext, - Cipher, - algorithms, - modes, -) -from cryptography.hazmat.primitives.serialization import ( - Encoding, - KeySerializationEncryption, - NoEncryption, - PrivateFormat, - PublicFormat, - _KeySerializationEncryption, -) - -try: - from bcrypt import kdf as _bcrypt_kdf - - _bcrypt_supported = True -except ImportError: - _bcrypt_supported = False - - def _bcrypt_kdf( - password: bytes, - salt: bytes, - desired_key_bytes: int, - rounds: int, - ignore_few_rounds: bool = False, - ) -> bytes: - raise UnsupportedAlgorithm("Need bcrypt module") - - -_SSH_ED25519 = b"ssh-ed25519" -_SSH_RSA = b"ssh-rsa" -_SSH_DSA = b"ssh-dss" -_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" -_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" -_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" -_CERT_SUFFIX = b"-cert-v01@openssh.com" - -# U2F application string suffixed pubkey -_SK_SSH_ED25519 = b"sk-ssh-ed25519@openssh.com" -_SK_SSH_ECDSA_NISTP256 = b"sk-ecdsa-sha2-nistp256@openssh.com" - -# These are not key types, only algorithms, so they cannot appear -# as a public key type -_SSH_RSA_SHA256 = b"rsa-sha2-256" -_SSH_RSA_SHA512 = b"rsa-sha2-512" - -_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") -_SK_MAGIC = b"openssh-key-v1\0" -_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" -_SK_END = b"-----END OPENSSH PRIVATE KEY-----" -_BCRYPT = b"bcrypt" -_NONE = b"none" -_DEFAULT_CIPHER = b"aes256-ctr" -_DEFAULT_ROUNDS = 16 - -# re is only way to work on bytes-like data -_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) - -# padding for max blocksize -_PADDING = memoryview(bytearray(range(1, 1 + 16))) - - -@dataclass -class _SSHCipher: - alg: type[algorithms.AES] - key_len: int - mode: type[modes.CTR] | type[modes.CBC] | type[modes.GCM] - block_len: int - iv_len: int - tag_len: int | None - is_aead: bool - - -# ciphers that are actually used in key wrapping -_SSH_CIPHERS: dict[bytes, _SSHCipher] = { - b"aes256-ctr": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.CTR, - block_len=16, - iv_len=16, - tag_len=None, - is_aead=False, - ), - b"aes256-cbc": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.CBC, - block_len=16, - iv_len=16, - tag_len=None, - is_aead=False, - ), - b"aes256-gcm@openssh.com": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.GCM, - block_len=16, - iv_len=12, - tag_len=16, - is_aead=True, - ), -} - -# map local curve name to key type -_ECDSA_KEY_TYPE = { - "secp256r1": _ECDSA_NISTP256, - "secp384r1": _ECDSA_NISTP384, - "secp521r1": _ECDSA_NISTP521, -} - - -def _get_ssh_key_type(key: SSHPrivateKeyTypes | SSHPublicKeyTypes) -> bytes: - if isinstance(key, ec.EllipticCurvePrivateKey): - key_type = _ecdsa_key_type(key.public_key()) - elif isinstance(key, ec.EllipticCurvePublicKey): - key_type = _ecdsa_key_type(key) - elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): - key_type = _SSH_RSA - elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)): - key_type = _SSH_DSA - elif isinstance( - key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey) - ): - key_type = _SSH_ED25519 - else: - raise ValueError("Unsupported key type") - - return key_type - - -def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: - """Return SSH key_type and curve_name for private key.""" - curve = public_key.curve - if curve.name not in _ECDSA_KEY_TYPE: - raise ValueError( - f"Unsupported curve for ssh private key: {curve.name!r}" - ) - return _ECDSA_KEY_TYPE[curve.name] - - -def _ssh_pem_encode( - data: utils.Buffer, - prefix: bytes = _SK_START + b"\n", - suffix: bytes = _SK_END + b"\n", -) -> bytes: - return b"".join([prefix, _base64_encode(data), suffix]) - - -def _check_block_size(data: utils.Buffer, block_len: int) -> None: - """Require data to be full blocks""" - if not data or len(data) % block_len != 0: - raise ValueError("Corrupt data: missing padding") - - -def _check_empty(data: utils.Buffer) -> None: - """All data should have been parsed.""" - if data: - raise ValueError("Corrupt data: unparsed data") - - -def _init_cipher( - ciphername: bytes, - password: bytes | None, - salt: bytes, - rounds: int, -) -> Cipher[modes.CBC | modes.CTR | modes.GCM]: - """Generate key + iv and return cipher.""" - if not password: - raise TypeError( - "Key is password-protected, but password was not provided." - ) - - ciph = _SSH_CIPHERS[ciphername] - seed = _bcrypt_kdf( - password, salt, ciph.key_len + ciph.iv_len, rounds, True - ) - return Cipher( - ciph.alg(seed[: ciph.key_len]), - ciph.mode(seed[ciph.key_len :]), - ) - - -def _get_u32(data: memoryview) -> tuple[int, memoryview]: - """Uint32""" - if len(data) < 4: - raise ValueError("Invalid data") - return int.from_bytes(data[:4], byteorder="big"), data[4:] - - -def _get_u64(data: memoryview) -> tuple[int, memoryview]: - """Uint64""" - if len(data) < 8: - raise ValueError("Invalid data") - return int.from_bytes(data[:8], byteorder="big"), data[8:] - - -def _get_sshstr(data: memoryview) -> tuple[memoryview, memoryview]: - """Bytes with u32 length prefix""" - n, data = _get_u32(data) - if n > len(data): - raise ValueError("Invalid data") - return data[:n], data[n:] - - -def _get_mpint(data: memoryview) -> tuple[int, memoryview]: - """Big integer.""" - val, data = _get_sshstr(data) - if val and val[0] > 0x7F: - raise ValueError("Invalid data") - return int.from_bytes(val, "big"), data - - -def _to_mpint(val: int) -> bytes: - """Storage format for signed bigint.""" - if val < 0: - raise ValueError("negative mpint not allowed") - if not val: - return b"" - nbytes = (val.bit_length() + 8) // 8 - return utils.int_to_bytes(val, nbytes) - - -class _FragList: - """Build recursive structure without data copy.""" - - flist: list[utils.Buffer] - - def __init__(self, init: list[utils.Buffer] | None = None) -> None: - self.flist = [] - if init: - self.flist.extend(init) - - def put_raw(self, val: utils.Buffer) -> None: - """Add plain bytes""" - self.flist.append(val) - - def put_u32(self, val: int) -> None: - """Big-endian uint32""" - self.flist.append(val.to_bytes(length=4, byteorder="big")) - - def put_u64(self, val: int) -> None: - """Big-endian uint64""" - self.flist.append(val.to_bytes(length=8, byteorder="big")) - - def put_sshstr(self, val: bytes | _FragList) -> None: - """Bytes prefixed with u32 length""" - if isinstance(val, (bytes, memoryview, bytearray)): - self.put_u32(len(val)) - self.flist.append(val) - else: - self.put_u32(val.size()) - self.flist.extend(val.flist) - - def put_mpint(self, val: int) -> None: - """Big-endian bigint prefixed with u32 length""" - self.put_sshstr(_to_mpint(val)) - - def size(self) -> int: - """Current number of bytes""" - return sum(map(len, self.flist)) - - def render(self, dstbuf: memoryview, pos: int = 0) -> int: - """Write into bytearray""" - for frag in self.flist: - flen = len(frag) - start, pos = pos, pos + flen - dstbuf[start:pos] = frag - return pos - - def tobytes(self) -> bytes: - """Return as bytes""" - buf = memoryview(bytearray(self.size())) - self.render(buf) - return buf.tobytes() - - -class _SSHFormatRSA: - """Format for RSA keys. - - Public: - mpint e, n - Private: - mpint n, e, d, iqmp, p, q - """ - - def get_public( - self, data: memoryview - ) -> tuple[tuple[int, int], memoryview]: - """RSA public fields""" - e, data = _get_mpint(data) - n, data = _get_mpint(data) - return (e, n), data - - def load_public( - self, data: memoryview - ) -> tuple[rsa.RSAPublicKey, memoryview]: - """Make RSA public key from data.""" - (e, n), data = self.get_public(data) - public_numbers = rsa.RSAPublicNumbers(e, n) - public_key = public_numbers.public_key() - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[rsa.RSAPrivateKey, memoryview]: - """Make RSA private key from data.""" - n, data = _get_mpint(data) - e, data = _get_mpint(data) - d, data = _get_mpint(data) - iqmp, data = _get_mpint(data) - p, data = _get_mpint(data) - q, data = _get_mpint(data) - - if (e, n) != pubfields: - raise ValueError("Corrupt data: rsa field mismatch") - dmp1 = rsa.rsa_crt_dmp1(d, p) - dmq1 = rsa.rsa_crt_dmq1(d, q) - public_numbers = rsa.RSAPublicNumbers(e, n) - private_numbers = rsa.RSAPrivateNumbers( - p, q, d, dmp1, dmq1, iqmp, public_numbers - ) - private_key = private_numbers.private_key( - unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation - ) - return private_key, data - - def encode_public( - self, public_key: rsa.RSAPublicKey, f_pub: _FragList - ) -> None: - """Write RSA public key""" - pubn = public_key.public_numbers() - f_pub.put_mpint(pubn.e) - f_pub.put_mpint(pubn.n) - - def encode_private( - self, private_key: rsa.RSAPrivateKey, f_priv: _FragList - ) -> None: - """Write RSA private key""" - private_numbers = private_key.private_numbers() - public_numbers = private_numbers.public_numbers - - f_priv.put_mpint(public_numbers.n) - f_priv.put_mpint(public_numbers.e) - - f_priv.put_mpint(private_numbers.d) - f_priv.put_mpint(private_numbers.iqmp) - f_priv.put_mpint(private_numbers.p) - f_priv.put_mpint(private_numbers.q) - - -class _SSHFormatDSA: - """Format for DSA keys. - - Public: - mpint p, q, g, y - Private: - mpint p, q, g, y, x - """ - - def get_public(self, data: memoryview) -> tuple[tuple, memoryview]: - """DSA public fields""" - p, data = _get_mpint(data) - q, data = _get_mpint(data) - g, data = _get_mpint(data) - y, data = _get_mpint(data) - return (p, q, g, y), data - - def load_public( - self, data: memoryview - ) -> tuple[dsa.DSAPublicKey, memoryview]: - """Make DSA public key from data.""" - (p, q, g, y), data = self.get_public(data) - parameter_numbers = dsa.DSAParameterNumbers(p, q, g) - public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) - self._validate(public_numbers) - public_key = public_numbers.public_key() - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[dsa.DSAPrivateKey, memoryview]: - """Make DSA private key from data.""" - (p, q, g, y), data = self.get_public(data) - x, data = _get_mpint(data) - - if (p, q, g, y) != pubfields: - raise ValueError("Corrupt data: dsa field mismatch") - parameter_numbers = dsa.DSAParameterNumbers(p, q, g) - public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) - self._validate(public_numbers) - private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) - private_key = private_numbers.private_key() - return private_key, data - - def encode_public( - self, public_key: dsa.DSAPublicKey, f_pub: _FragList - ) -> None: - """Write DSA public key""" - public_numbers = public_key.public_numbers() - parameter_numbers = public_numbers.parameter_numbers - self._validate(public_numbers) - - f_pub.put_mpint(parameter_numbers.p) - f_pub.put_mpint(parameter_numbers.q) - f_pub.put_mpint(parameter_numbers.g) - f_pub.put_mpint(public_numbers.y) - - def encode_private( - self, private_key: dsa.DSAPrivateKey, f_priv: _FragList - ) -> None: - """Write DSA private key""" - self.encode_public(private_key.public_key(), f_priv) - f_priv.put_mpint(private_key.private_numbers().x) - - def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: - parameter_numbers = public_numbers.parameter_numbers - if parameter_numbers.p.bit_length() != 1024: - raise ValueError("SSH supports only 1024 bit DSA keys") - - -class _SSHFormatECDSA: - """Format for ECDSA keys. - - Public: - str curve - bytes point - Private: - str curve - bytes point - mpint secret - """ - - def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): - self.ssh_curve_name = ssh_curve_name - self.curve = curve - - def get_public( - self, data: memoryview - ) -> tuple[tuple[memoryview, memoryview], memoryview]: - """ECDSA public fields""" - curve, data = _get_sshstr(data) - point, data = _get_sshstr(data) - if curve != self.ssh_curve_name: - raise ValueError("Curve name mismatch") - if point[0] != 4: - raise NotImplementedError("Need uncompressed point") - return (curve, point), data - - def load_public( - self, data: memoryview - ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: - """Make ECDSA public key from data.""" - (_, point), data = self.get_public(data) - public_key = ec.EllipticCurvePublicKey.from_encoded_point( - self.curve, point.tobytes() - ) - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[ec.EllipticCurvePrivateKey, memoryview]: - """Make ECDSA private key from data.""" - (curve_name, point), data = self.get_public(data) - secret, data = _get_mpint(data) - - if (curve_name, point) != pubfields: - raise ValueError("Corrupt data: ecdsa field mismatch") - private_key = ec.derive_private_key(secret, self.curve) - return private_key, data - - def encode_public( - self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList - ) -> None: - """Write ECDSA public key""" - point = public_key.public_bytes( - Encoding.X962, PublicFormat.UncompressedPoint - ) - f_pub.put_sshstr(self.ssh_curve_name) - f_pub.put_sshstr(point) - - def encode_private( - self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList - ) -> None: - """Write ECDSA private key""" - public_key = private_key.public_key() - private_numbers = private_key.private_numbers() - - self.encode_public(public_key, f_priv) - f_priv.put_mpint(private_numbers.private_value) - - -class _SSHFormatEd25519: - """Format for Ed25519 keys. - - Public: - bytes point - Private: - bytes point - bytes secret_and_point - """ - - def get_public( - self, data: memoryview - ) -> tuple[tuple[memoryview], memoryview]: - """Ed25519 public fields""" - point, data = _get_sshstr(data) - return (point,), data - - def load_public( - self, data: memoryview - ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: - """Make Ed25519 public key from data.""" - (point,), data = self.get_public(data) - public_key = ed25519.Ed25519PublicKey.from_public_bytes( - point.tobytes() - ) - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[ed25519.Ed25519PrivateKey, memoryview]: - """Make Ed25519 private key from data.""" - (point,), data = self.get_public(data) - keypair, data = _get_sshstr(data) - - secret = keypair[:32] - point2 = keypair[32:] - if point != point2 or (point,) != pubfields: - raise ValueError("Corrupt data: ed25519 field mismatch") - private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) - return private_key, data - - def encode_public( - self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList - ) -> None: - """Write Ed25519 public key""" - raw_public_key = public_key.public_bytes( - Encoding.Raw, PublicFormat.Raw - ) - f_pub.put_sshstr(raw_public_key) - - def encode_private( - self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList - ) -> None: - """Write Ed25519 private key""" - public_key = private_key.public_key() - raw_private_key = private_key.private_bytes( - Encoding.Raw, PrivateFormat.Raw, NoEncryption() - ) - raw_public_key = public_key.public_bytes( - Encoding.Raw, PublicFormat.Raw - ) - f_keypair = _FragList([raw_private_key, raw_public_key]) - - self.encode_public(public_key, f_priv) - f_priv.put_sshstr(f_keypair) - - -def load_application(data) -> tuple[memoryview, memoryview]: - """ - U2F application strings - """ - application, data = _get_sshstr(data) - if not application.tobytes().startswith(b"ssh:"): - raise ValueError( - "U2F application string does not start with b'ssh:' " - f"({application})" - ) - return application, data - - -class _SSHFormatSKEd25519: - """ - The format of a sk-ssh-ed25519@openssh.com public key is: - - string "sk-ssh-ed25519@openssh.com" - string public key - string application (user-specified, but typically "ssh:") - """ - - def load_public( - self, data: memoryview - ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: - """Make Ed25519 public key from data.""" - public_key, data = _lookup_kformat(_SSH_ED25519).load_public(data) - _, data = load_application(data) - return public_key, data - - def get_public(self, data: memoryview) -> typing.NoReturn: - # Confusingly `get_public` is an entry point used by private key - # loading. - raise UnsupportedAlgorithm( - "sk-ssh-ed25519 private keys cannot be loaded" - ) - - -class _SSHFormatSKECDSA: - """ - The format of a sk-ecdsa-sha2-nistp256@openssh.com public key is: - - string "sk-ecdsa-sha2-nistp256@openssh.com" - string curve name - ec_point Q - string application (user-specified, but typically "ssh:") - """ - - def load_public( - self, data: memoryview - ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: - """Make ECDSA public key from data.""" - public_key, data = _lookup_kformat(_ECDSA_NISTP256).load_public(data) - _, data = load_application(data) - return public_key, data - - def get_public(self, data: memoryview) -> typing.NoReturn: - # Confusingly `get_public` is an entry point used by private key - # loading. - raise UnsupportedAlgorithm( - "sk-ecdsa-sha2-nistp256 private keys cannot be loaded" - ) - - -_KEY_FORMATS = { - _SSH_RSA: _SSHFormatRSA(), - _SSH_DSA: _SSHFormatDSA(), - _SSH_ED25519: _SSHFormatEd25519(), - _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), - _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), - _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), - _SK_SSH_ED25519: _SSHFormatSKEd25519(), - _SK_SSH_ECDSA_NISTP256: _SSHFormatSKECDSA(), -} - - -def _lookup_kformat(key_type: utils.Buffer): - """Return valid format or throw error""" - if not isinstance(key_type, bytes): - key_type = memoryview(key_type).tobytes() - if key_type in _KEY_FORMATS: - return _KEY_FORMATS[key_type] - raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") - - -SSHPrivateKeyTypes = typing.Union[ - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ed25519.Ed25519PrivateKey, -] - - -def load_ssh_private_key( - data: utils.Buffer, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> SSHPrivateKeyTypes: - """Load private key from OpenSSH custom encoding.""" - utils._check_byteslike("data", data) - if password is not None: - utils._check_bytes("password", password) - - m = _PEM_RC.search(data) - if not m: - raise ValueError("Not OpenSSH private key format") - p1 = m.start(1) - p2 = m.end(1) - data = binascii.a2b_base64(memoryview(data)[p1:p2]) - if not data.startswith(_SK_MAGIC): - raise ValueError("Not OpenSSH private key format") - data = memoryview(data)[len(_SK_MAGIC) :] - - # parse header - ciphername, data = _get_sshstr(data) - kdfname, data = _get_sshstr(data) - kdfoptions, data = _get_sshstr(data) - nkeys, data = _get_u32(data) - if nkeys != 1: - raise ValueError("Only one key supported") - - # load public key data - pubdata, data = _get_sshstr(data) - pub_key_type, pubdata = _get_sshstr(pubdata) - kformat = _lookup_kformat(pub_key_type) - pubfields, pubdata = kformat.get_public(pubdata) - _check_empty(pubdata) - - if ciphername != _NONE or kdfname != _NONE: - ciphername_bytes = ciphername.tobytes() - if ciphername_bytes not in _SSH_CIPHERS: - raise UnsupportedAlgorithm( - f"Unsupported cipher: {ciphername_bytes!r}" - ) - if kdfname != _BCRYPT: - raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") - blklen = _SSH_CIPHERS[ciphername_bytes].block_len - tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len - # load secret data - edata, data = _get_sshstr(data) - # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for - # information about how OpenSSH handles AEAD tags - if _SSH_CIPHERS[ciphername_bytes].is_aead: - tag = bytes(data) - if len(tag) != tag_len: - raise ValueError("Corrupt data: invalid tag length for cipher") - else: - _check_empty(data) - _check_block_size(edata, blklen) - salt, kbuf = _get_sshstr(kdfoptions) - rounds, kbuf = _get_u32(kbuf) - _check_empty(kbuf) - ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) - dec = ciph.decryptor() - edata = memoryview(dec.update(edata)) - if _SSH_CIPHERS[ciphername_bytes].is_aead: - assert isinstance(dec, AEADDecryptionContext) - _check_empty(dec.finalize_with_tag(tag)) - else: - # _check_block_size requires data to be a full block so there - # should be no output from finalize - _check_empty(dec.finalize()) - else: - if password: - raise TypeError( - "Password was given but private key is not encrypted." - ) - # load secret data - edata, data = _get_sshstr(data) - _check_empty(data) - blklen = 8 - _check_block_size(edata, blklen) - ck1, edata = _get_u32(edata) - ck2, edata = _get_u32(edata) - if ck1 != ck2: - raise ValueError("Corrupt data: broken checksum") - - # load per-key struct - key_type, edata = _get_sshstr(edata) - if key_type != pub_key_type: - raise ValueError("Corrupt data: key type mismatch") - private_key, edata = kformat.load_private( - edata, - pubfields, - unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation, - ) - # We don't use the comment - _, edata = _get_sshstr(edata) - - # yes, SSH does padding check *after* all other parsing is done. - # need to follow as it writes zero-byte padding too. - if edata != _PADDING[: len(edata)]: - raise ValueError("Corrupt data: invalid padding") - - if isinstance(private_key, dsa.DSAPrivateKey): - warnings.warn( - "SSH DSA keys are deprecated and will be removed in a future " - "release.", - utils.DeprecatedIn40, - stacklevel=2, - ) - - return private_key - - -def _serialize_ssh_private_key( - private_key: SSHPrivateKeyTypes, - password: bytes, - encryption_algorithm: KeySerializationEncryption, -) -> bytes: - """Serialize private key with OpenSSH custom encoding.""" - utils._check_bytes("password", password) - if isinstance(private_key, dsa.DSAPrivateKey): - warnings.warn( - "SSH DSA key support is deprecated and will be " - "removed in a future release", - utils.DeprecatedIn40, - stacklevel=4, - ) - - key_type = _get_ssh_key_type(private_key) - kformat = _lookup_kformat(key_type) - - # setup parameters - f_kdfoptions = _FragList() - if password: - ciphername = _DEFAULT_CIPHER - blklen = _SSH_CIPHERS[ciphername].block_len - kdfname = _BCRYPT - rounds = _DEFAULT_ROUNDS - if ( - isinstance(encryption_algorithm, _KeySerializationEncryption) - and encryption_algorithm._kdf_rounds is not None - ): - rounds = encryption_algorithm._kdf_rounds - salt = os.urandom(16) - f_kdfoptions.put_sshstr(salt) - f_kdfoptions.put_u32(rounds) - ciph = _init_cipher(ciphername, password, salt, rounds) - else: - ciphername = kdfname = _NONE - blklen = 8 - ciph = None - nkeys = 1 - checkval = os.urandom(4) - comment = b"" - - # encode public and private parts together - f_public_key = _FragList() - f_public_key.put_sshstr(key_type) - kformat.encode_public(private_key.public_key(), f_public_key) - - f_secrets = _FragList([checkval, checkval]) - f_secrets.put_sshstr(key_type) - kformat.encode_private(private_key, f_secrets) - f_secrets.put_sshstr(comment) - f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) - - # top-level structure - f_main = _FragList() - f_main.put_raw(_SK_MAGIC) - f_main.put_sshstr(ciphername) - f_main.put_sshstr(kdfname) - f_main.put_sshstr(f_kdfoptions) - f_main.put_u32(nkeys) - f_main.put_sshstr(f_public_key) - f_main.put_sshstr(f_secrets) - - # copy result info bytearray - slen = f_secrets.size() - mlen = f_main.size() - buf = memoryview(bytearray(mlen + blklen)) - f_main.render(buf) - ofs = mlen - slen - - # encrypt in-place - if ciph is not None: - ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) - - return _ssh_pem_encode(buf[:mlen]) - - -SSHPublicKeyTypes = typing.Union[ - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - dsa.DSAPublicKey, - ed25519.Ed25519PublicKey, -] - -SSHCertPublicKeyTypes = typing.Union[ - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - ed25519.Ed25519PublicKey, -] - - -class SSHCertificateType(enum.Enum): - USER = 1 - HOST = 2 - - -class SSHCertificate: - def __init__( - self, - _nonce: memoryview, - _public_key: SSHPublicKeyTypes, - _serial: int, - _cctype: int, - _key_id: memoryview, - _valid_principals: list[bytes], - _valid_after: int, - _valid_before: int, - _critical_options: dict[bytes, bytes], - _extensions: dict[bytes, bytes], - _sig_type: memoryview, - _sig_key: memoryview, - _inner_sig_type: memoryview, - _signature: memoryview, - _tbs_cert_body: memoryview, - _cert_key_type: bytes, - _cert_body: memoryview, - ): - self._nonce = _nonce - self._public_key = _public_key - self._serial = _serial - try: - self._type = SSHCertificateType(_cctype) - except ValueError: - raise ValueError("Invalid certificate type") - self._key_id = _key_id - self._valid_principals = _valid_principals - self._valid_after = _valid_after - self._valid_before = _valid_before - self._critical_options = _critical_options - self._extensions = _extensions - self._sig_type = _sig_type - self._sig_key = _sig_key - self._inner_sig_type = _inner_sig_type - self._signature = _signature - self._cert_key_type = _cert_key_type - self._cert_body = _cert_body - self._tbs_cert_body = _tbs_cert_body - - @property - def nonce(self) -> bytes: - return bytes(self._nonce) - - def public_key(self) -> SSHCertPublicKeyTypes: - # make mypy happy until we remove DSA support entirely and - # the underlying union won't have a disallowed type - return typing.cast(SSHCertPublicKeyTypes, self._public_key) - - @property - def serial(self) -> int: - return self._serial - - @property - def type(self) -> SSHCertificateType: - return self._type - - @property - def key_id(self) -> bytes: - return bytes(self._key_id) - - @property - def valid_principals(self) -> list[bytes]: - return self._valid_principals - - @property - def valid_before(self) -> int: - return self._valid_before - - @property - def valid_after(self) -> int: - return self._valid_after - - @property - def critical_options(self) -> dict[bytes, bytes]: - return self._critical_options - - @property - def extensions(self) -> dict[bytes, bytes]: - return self._extensions - - def signature_key(self) -> SSHCertPublicKeyTypes: - sigformat = _lookup_kformat(self._sig_type) - signature_key, sigkey_rest = sigformat.load_public(self._sig_key) - _check_empty(sigkey_rest) - return signature_key - - def public_bytes(self) -> bytes: - return ( - bytes(self._cert_key_type) - + b" " - + binascii.b2a_base64(bytes(self._cert_body), newline=False) - ) - - def verify_cert_signature(self) -> None: - signature_key = self.signature_key() - if isinstance(signature_key, ed25519.Ed25519PublicKey): - signature_key.verify( - bytes(self._signature), bytes(self._tbs_cert_body) - ) - elif isinstance(signature_key, ec.EllipticCurvePublicKey): - # The signature is encoded as a pair of big-endian integers - r, data = _get_mpint(self._signature) - s, data = _get_mpint(data) - _check_empty(data) - computed_sig = asym_utils.encode_dss_signature(r, s) - hash_alg = _get_ec_hash_alg(signature_key.curve) - signature_key.verify( - computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg) - ) - else: - assert isinstance(signature_key, rsa.RSAPublicKey) - if self._inner_sig_type == _SSH_RSA: - hash_alg = hashes.SHA1() - elif self._inner_sig_type == _SSH_RSA_SHA256: - hash_alg = hashes.SHA256() - else: - assert self._inner_sig_type == _SSH_RSA_SHA512 - hash_alg = hashes.SHA512() - signature_key.verify( - bytes(self._signature), - bytes(self._tbs_cert_body), - padding.PKCS1v15(), - hash_alg, - ) - - -def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm: - if isinstance(curve, ec.SECP256R1): - return hashes.SHA256() - elif isinstance(curve, ec.SECP384R1): - return hashes.SHA384() - else: - assert isinstance(curve, ec.SECP521R1) - return hashes.SHA512() - - -def _load_ssh_public_identity( - data: utils.Buffer, - _legacy_dsa_allowed=False, -) -> SSHCertificate | SSHPublicKeyTypes: - utils._check_byteslike("data", data) - - m = _SSH_PUBKEY_RC.match(data) - if not m: - raise ValueError("Invalid line format") - key_type = orig_key_type = m.group(1) - key_body = m.group(2) - with_cert = False - if key_type.endswith(_CERT_SUFFIX): - with_cert = True - key_type = key_type[: -len(_CERT_SUFFIX)] - if key_type == _SSH_DSA and not _legacy_dsa_allowed: - raise UnsupportedAlgorithm( - "DSA keys aren't supported in SSH certificates" - ) - kformat = _lookup_kformat(key_type) - - try: - rest = memoryview(binascii.a2b_base64(key_body)) - except (TypeError, binascii.Error): - raise ValueError("Invalid format") - - if with_cert: - cert_body = rest - inner_key_type, rest = _get_sshstr(rest) - if inner_key_type != orig_key_type: - raise ValueError("Invalid key format") - if with_cert: - nonce, rest = _get_sshstr(rest) - public_key, rest = kformat.load_public(rest) - if with_cert: - serial, rest = _get_u64(rest) - cctype, rest = _get_u32(rest) - key_id, rest = _get_sshstr(rest) - principals, rest = _get_sshstr(rest) - valid_principals = [] - while principals: - principal, principals = _get_sshstr(principals) - valid_principals.append(bytes(principal)) - valid_after, rest = _get_u64(rest) - valid_before, rest = _get_u64(rest) - crit_options, rest = _get_sshstr(rest) - critical_options = _parse_exts_opts(crit_options) - exts, rest = _get_sshstr(rest) - extensions = _parse_exts_opts(exts) - # Get the reserved field, which is unused. - _, rest = _get_sshstr(rest) - sig_key_raw, rest = _get_sshstr(rest) - sig_type, sig_key = _get_sshstr(sig_key_raw) - if sig_type == _SSH_DSA and not _legacy_dsa_allowed: - raise UnsupportedAlgorithm( - "DSA signatures aren't supported in SSH certificates" - ) - # Get the entire cert body and subtract the signature - tbs_cert_body = cert_body[: -len(rest)] - signature_raw, rest = _get_sshstr(rest) - _check_empty(rest) - inner_sig_type, sig_rest = _get_sshstr(signature_raw) - # RSA certs can have multiple algorithm types - if ( - sig_type == _SSH_RSA - and inner_sig_type - not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA] - ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type): - raise ValueError("Signature key type does not match") - signature, sig_rest = _get_sshstr(sig_rest) - _check_empty(sig_rest) - return SSHCertificate( - nonce, - public_key, - serial, - cctype, - key_id, - valid_principals, - valid_after, - valid_before, - critical_options, - extensions, - sig_type, - sig_key, - inner_sig_type, - signature, - tbs_cert_body, - orig_key_type, - cert_body, - ) - else: - _check_empty(rest) - return public_key - - -def load_ssh_public_identity( - data: utils.Buffer, -) -> SSHCertificate | SSHPublicKeyTypes: - return _load_ssh_public_identity(data) - - -def _parse_exts_opts(exts_opts: memoryview) -> dict[bytes, bytes]: - result: dict[bytes, bytes] = {} - last_name = None - while exts_opts: - name, exts_opts = _get_sshstr(exts_opts) - bname: bytes = bytes(name) - if bname in result: - raise ValueError("Duplicate name") - if last_name is not None and bname < last_name: - raise ValueError("Fields not lexically sorted") - value, exts_opts = _get_sshstr(exts_opts) - if len(value) > 0: - value, extra = _get_sshstr(value) - if len(extra) > 0: - raise ValueError("Unexpected extra data after value") - result[bname] = bytes(value) - last_name = bname - return result - - -def ssh_key_fingerprint( - key: SSHPublicKeyTypes, - hash_algorithm: hashes.MD5 | hashes.SHA256, -) -> bytes: - if not isinstance(hash_algorithm, (hashes.MD5, hashes.SHA256)): - raise TypeError("hash_algorithm must be either MD5 or SHA256") - - key_type = _get_ssh_key_type(key) - kformat = _lookup_kformat(key_type) - - f_pub = _FragList() - f_pub.put_sshstr(key_type) - kformat.encode_public(key, f_pub) - - ssh_binary_data = f_pub.tobytes() - - # Hash the binary data - hash_obj = hashes.Hash(hash_algorithm) - hash_obj.update(ssh_binary_data) - return hash_obj.finalize() - - -def load_ssh_public_key( - data: utils.Buffer, backend: typing.Any = None -) -> SSHPublicKeyTypes: - cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True) - public_key: SSHPublicKeyTypes - if isinstance(cert_or_key, SSHCertificate): - public_key = cert_or_key.public_key() - else: - public_key = cert_or_key - - if isinstance(public_key, dsa.DSAPublicKey): - warnings.warn( - "SSH DSA keys are deprecated and will be removed in a future " - "release.", - utils.DeprecatedIn40, - stacklevel=2, - ) - return public_key - - -def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes: - """One-line public key format for OpenSSH""" - if isinstance(public_key, dsa.DSAPublicKey): - warnings.warn( - "SSH DSA key support is deprecated and will be " - "removed in a future release", - utils.DeprecatedIn40, - stacklevel=4, - ) - key_type = _get_ssh_key_type(public_key) - kformat = _lookup_kformat(key_type) - - f_pub = _FragList() - f_pub.put_sshstr(key_type) - kformat.encode_public(public_key, f_pub) - - pub = binascii.b2a_base64(f_pub.tobytes()).strip() - return b"".join([key_type, b" ", pub]) - - -SSHCertPrivateKeyTypes = typing.Union[ - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - ed25519.Ed25519PrivateKey, -] - - -# This is an undocumented limit enforced in the openssh codebase for sshd and -# ssh-keygen, but it is undefined in the ssh certificates spec. -_SSHKEY_CERT_MAX_PRINCIPALS = 256 - - -class SSHCertificateBuilder: - def __init__( - self, - _public_key: SSHCertPublicKeyTypes | None = None, - _serial: int | None = None, - _type: SSHCertificateType | None = None, - _key_id: bytes | None = None, - _valid_principals: list[bytes] = [], - _valid_for_all_principals: bool = False, - _valid_before: int | None = None, - _valid_after: int | None = None, - _critical_options: list[tuple[bytes, bytes]] = [], - _extensions: list[tuple[bytes, bytes]] = [], - ): - self._public_key = _public_key - self._serial = _serial - self._type = _type - self._key_id = _key_id - self._valid_principals = _valid_principals - self._valid_for_all_principals = _valid_for_all_principals - self._valid_before = _valid_before - self._valid_after = _valid_after - self._critical_options = _critical_options - self._extensions = _extensions - - def public_key( - self, public_key: SSHCertPublicKeyTypes - ) -> SSHCertificateBuilder: - if not isinstance( - public_key, - ( - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - ed25519.Ed25519PublicKey, - ), - ): - raise TypeError("Unsupported key type") - if self._public_key is not None: - raise ValueError("public_key already set") - - return SSHCertificateBuilder( - _public_key=public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def serial(self, serial: int) -> SSHCertificateBuilder: - if not isinstance(serial, int): - raise TypeError("serial must be an integer") - if not 0 <= serial < 2**64: - raise ValueError("serial must be between 0 and 2**64") - if self._serial is not None: - raise ValueError("serial already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def type(self, type: SSHCertificateType) -> SSHCertificateBuilder: - if not isinstance(type, SSHCertificateType): - raise TypeError("type must be an SSHCertificateType") - if self._type is not None: - raise ValueError("type already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def key_id(self, key_id: bytes) -> SSHCertificateBuilder: - if not isinstance(key_id, bytes): - raise TypeError("key_id must be bytes") - if self._key_id is not None: - raise ValueError("key_id already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_principals( - self, valid_principals: list[bytes] - ) -> SSHCertificateBuilder: - if self._valid_for_all_principals: - raise ValueError( - "Principals can't be set because the cert is valid " - "for all principals" - ) - if ( - not all(isinstance(x, bytes) for x in valid_principals) - or not valid_principals - ): - raise TypeError( - "principals must be a list of bytes and can't be empty" - ) - if self._valid_principals: - raise ValueError("valid_principals already set") - - if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS: - raise ValueError( - "Reached or exceeded the maximum number of valid_principals" - ) - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_for_all_principals(self): - if self._valid_principals: - raise ValueError( - "valid_principals already set, can't set " - "valid_for_all_principals" - ) - if self._valid_for_all_principals: - raise ValueError("valid_for_all_principals already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=True, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_before(self, valid_before: int | float) -> SSHCertificateBuilder: - if not isinstance(valid_before, (int, float)): - raise TypeError("valid_before must be an int or float") - valid_before = int(valid_before) - if valid_before < 0 or valid_before >= 2**64: - raise ValueError("valid_before must [0, 2**64)") - if self._valid_before is not None: - raise ValueError("valid_before already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_after(self, valid_after: int | float) -> SSHCertificateBuilder: - if not isinstance(valid_after, (int, float)): - raise TypeError("valid_after must be an int or float") - valid_after = int(valid_after) - if valid_after < 0 or valid_after >= 2**64: - raise ValueError("valid_after must [0, 2**64)") - if self._valid_after is not None: - raise ValueError("valid_after already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def add_critical_option( - self, name: bytes, value: bytes - ) -> SSHCertificateBuilder: - if not isinstance(name, bytes) or not isinstance(value, bytes): - raise TypeError("name and value must be bytes") - # This is O(n**2) - if name in [name for name, _ in self._critical_options]: - raise ValueError("Duplicate critical option name") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=[*self._critical_options, (name, value)], - _extensions=self._extensions, - ) - - def add_extension( - self, name: bytes, value: bytes - ) -> SSHCertificateBuilder: - if not isinstance(name, bytes) or not isinstance(value, bytes): - raise TypeError("name and value must be bytes") - # This is O(n**2) - if name in [name for name, _ in self._extensions]: - raise ValueError("Duplicate extension name") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=[*self._extensions, (name, value)], - ) - - def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate: - if not isinstance( - private_key, - ( - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - ed25519.Ed25519PrivateKey, - ), - ): - raise TypeError("Unsupported private key type") - - if self._public_key is None: - raise ValueError("public_key must be set") - - # Not required - serial = 0 if self._serial is None else self._serial - - if self._type is None: - raise ValueError("type must be set") - - # Not required - key_id = b"" if self._key_id is None else self._key_id - - # A zero length list is valid, but means the certificate - # is valid for any principal of the specified type. We require - # the user to explicitly set valid_for_all_principals to get - # that behavior. - if not self._valid_principals and not self._valid_for_all_principals: - raise ValueError( - "valid_principals must be set if valid_for_all_principals " - "is False" - ) - - if self._valid_before is None: - raise ValueError("valid_before must be set") - - if self._valid_after is None: - raise ValueError("valid_after must be set") - - if self._valid_after > self._valid_before: - raise ValueError("valid_after must be earlier than valid_before") - - # lexically sort our byte strings - self._critical_options.sort(key=lambda x: x[0]) - self._extensions.sort(key=lambda x: x[0]) - - key_type = _get_ssh_key_type(self._public_key) - cert_prefix = key_type + _CERT_SUFFIX - - # Marshal the bytes to be signed - nonce = os.urandom(32) - kformat = _lookup_kformat(key_type) - f = _FragList() - f.put_sshstr(cert_prefix) - f.put_sshstr(nonce) - kformat.encode_public(self._public_key, f) - f.put_u64(serial) - f.put_u32(self._type.value) - f.put_sshstr(key_id) - fprincipals = _FragList() - for p in self._valid_principals: - fprincipals.put_sshstr(p) - f.put_sshstr(fprincipals.tobytes()) - f.put_u64(self._valid_after) - f.put_u64(self._valid_before) - fcrit = _FragList() - for name, value in self._critical_options: - fcrit.put_sshstr(name) - if len(value) > 0: - foptval = _FragList() - foptval.put_sshstr(value) - fcrit.put_sshstr(foptval.tobytes()) - else: - fcrit.put_sshstr(value) - f.put_sshstr(fcrit.tobytes()) - fext = _FragList() - for name, value in self._extensions: - fext.put_sshstr(name) - if len(value) > 0: - fextval = _FragList() - fextval.put_sshstr(value) - fext.put_sshstr(fextval.tobytes()) - else: - fext.put_sshstr(value) - f.put_sshstr(fext.tobytes()) - f.put_sshstr(b"") # RESERVED FIELD - # encode CA public key - ca_type = _get_ssh_key_type(private_key) - caformat = _lookup_kformat(ca_type) - caf = _FragList() - caf.put_sshstr(ca_type) - caformat.encode_public(private_key.public_key(), caf) - f.put_sshstr(caf.tobytes()) - # Sigs according to the rules defined for the CA's public key - # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA, - # and RFC8032 for Ed25519). - if isinstance(private_key, ed25519.Ed25519PrivateKey): - signature = private_key.sign(f.tobytes()) - fsig = _FragList() - fsig.put_sshstr(ca_type) - fsig.put_sshstr(signature) - f.put_sshstr(fsig.tobytes()) - elif isinstance(private_key, ec.EllipticCurvePrivateKey): - hash_alg = _get_ec_hash_alg(private_key.curve) - signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg)) - r, s = asym_utils.decode_dss_signature(signature) - fsig = _FragList() - fsig.put_sshstr(ca_type) - fsigblob = _FragList() - fsigblob.put_mpint(r) - fsigblob.put_mpint(s) - fsig.put_sshstr(fsigblob.tobytes()) - f.put_sshstr(fsig.tobytes()) - - else: - assert isinstance(private_key, rsa.RSAPrivateKey) - # Just like Golang, we're going to use SHA512 for RSA - # https://cs.opensource.google/go/x/crypto/+/refs/tags/ - # v0.4.0:ssh/certs.go;l=445 - # RFC 8332 defines SHA256 and 512 as options - fsig = _FragList() - fsig.put_sshstr(_SSH_RSA_SHA512) - signature = private_key.sign( - f.tobytes(), padding.PKCS1v15(), hashes.SHA512() - ) - fsig.put_sshstr(signature) - f.put_sshstr(fsig.tobytes()) - - cert_data = binascii.b2a_base64(f.tobytes()).strip() - # load_ssh_public_identity returns a union, but this is - # guaranteed to be an SSHCertificate, so we cast to make - # mypy happy. - return typing.cast( - SSHCertificate, - load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])), - ) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py deleted file mode 100644 index c1af423..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - - -class InvalidToken(Exception): - pass diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 1e4d05d..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc deleted file mode 100644 index cd700b7..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc b/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc deleted file mode 100644 index df51313..0000000 Binary files a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py deleted file mode 100644 index 21fb000..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py +++ /dev/null @@ -1,101 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import base64 -import typing -from urllib.parse import quote, urlencode - -from cryptography.hazmat.primitives import constant_time, hmac -from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 -from cryptography.hazmat.primitives.twofactor import InvalidToken -from cryptography.utils import Buffer - -HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512] - - -def _generate_uri( - hotp: HOTP, - type_name: str, - account_name: str, - issuer: str | None, - extra_parameters: list[tuple[str, int]], -) -> str: - parameters = [ - ("digits", hotp._length), - ("secret", base64.b32encode(hotp._key)), - ("algorithm", hotp._algorithm.name.upper()), - ] - - if issuer is not None: - parameters.append(("issuer", issuer)) - - parameters.extend(extra_parameters) - - label = ( - f"{quote(issuer)}:{quote(account_name)}" - if issuer - else quote(account_name) - ) - return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" - - -class HOTP: - def __init__( - self, - key: Buffer, - length: int, - algorithm: HOTPHashTypes, - backend: typing.Any = None, - enforce_key_length: bool = True, - ) -> None: - if len(key) < 16 and enforce_key_length is True: - raise ValueError("Key length has to be at least 128 bits.") - - if not isinstance(length, int): - raise TypeError("Length parameter must be an integer type.") - - if length < 6 or length > 8: - raise ValueError("Length of HOTP has to be between 6 and 8.") - - if not isinstance(algorithm, (SHA1, SHA256, SHA512)): - raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") - - self._key = key - self._length = length - self._algorithm = algorithm - - def generate(self, counter: int) -> bytes: - if not isinstance(counter, int): - raise TypeError("Counter parameter must be an integer type.") - - truncated_value = self._dynamic_truncate(counter) - hotp = truncated_value % (10**self._length) - return "{0:0{1}}".format(hotp, self._length).encode() - - def verify(self, hotp: bytes, counter: int) -> None: - if not constant_time.bytes_eq(self.generate(counter), hotp): - raise InvalidToken("Supplied HOTP value does not match.") - - def _dynamic_truncate(self, counter: int) -> int: - ctx = hmac.HMAC(self._key, self._algorithm) - - try: - ctx.update(counter.to_bytes(length=8, byteorder="big")) - except OverflowError: - raise ValueError(f"Counter must be between 0 and {2**64 - 1}.") - - hmac_value = ctx.finalize() - - offset = hmac_value[len(hmac_value) - 1] & 0b1111 - p = hmac_value[offset : offset + 4] - return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF - - def get_provisioning_uri( - self, account_name: str, counter: int, issuer: str | None - ) -> str: - return _generate_uri( - self, "hotp", account_name, issuer, [("counter", int(counter))] - ) diff --git a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py deleted file mode 100644 index 10c725c..0000000 --- a/venv/Lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py +++ /dev/null @@ -1,56 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.primitives import constant_time -from cryptography.hazmat.primitives.twofactor import InvalidToken -from cryptography.hazmat.primitives.twofactor.hotp import ( - HOTP, - HOTPHashTypes, - _generate_uri, -) -from cryptography.utils import Buffer - - -class TOTP: - def __init__( - self, - key: Buffer, - length: int, - algorithm: HOTPHashTypes, - time_step: int, - backend: typing.Any = None, - enforce_key_length: bool = True, - ): - self._time_step = time_step - self._hotp = HOTP( - key, length, algorithm, enforce_key_length=enforce_key_length - ) - - def generate(self, time: int | float) -> bytes: - if not isinstance(time, (int, float)): - raise TypeError( - "Time parameter must be an integer type or float type." - ) - - counter = int(time / self._time_step) - return self._hotp.generate(counter) - - def verify(self, totp: bytes, time: int) -> None: - if not constant_time.bytes_eq(self.generate(time), totp): - raise InvalidToken("Supplied TOTP value does not match.") - - def get_provisioning_uri( - self, account_name: str, issuer: str | None - ) -> str: - return _generate_uri( - self._hotp, - "totp", - account_name, - issuer, - [("period", int(self._time_step))], - ) diff --git a/venv/Lib/site-packages/cryptography/py.typed b/venv/Lib/site-packages/cryptography/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/cryptography/utils.py b/venv/Lib/site-packages/cryptography/utils.py deleted file mode 100644 index a0fc3b1..0000000 --- a/venv/Lib/site-packages/cryptography/utils.py +++ /dev/null @@ -1,137 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import enum -import sys -import types -import typing -import warnings -from collections.abc import Callable, Sequence - - -# We use a UserWarning subclass, instead of DeprecationWarning, because CPython -# decided deprecation warnings should be invisible by default. -class CryptographyDeprecationWarning(UserWarning): - pass - - -# Several APIs were deprecated with no specific end-of-life date because of the -# ubiquity of their use. They should not be removed until we agree on when that -# cycle ends. -DeprecatedIn36 = CryptographyDeprecationWarning -DeprecatedIn40 = CryptographyDeprecationWarning -DeprecatedIn41 = CryptographyDeprecationWarning -DeprecatedIn42 = CryptographyDeprecationWarning -DeprecatedIn43 = CryptographyDeprecationWarning - - -# If you're wondering why we don't use `Buffer`, it's because `Buffer` would -# be more accurately named: Bufferable. It means something which has an -# `__buffer__`. Which means you can't actually treat the result as a buffer -# (and do things like take a `len()`). -if sys.version_info >= (3, 9): - Buffer = typing.Union[bytes, bytearray, memoryview] -else: - Buffer = typing.ByteString - - -def _check_bytes(name: str, value: bytes) -> None: - if not isinstance(value, bytes): - raise TypeError(f"{name} must be bytes") - - -def _check_byteslike(name: str, value: Buffer) -> None: - try: - memoryview(value) - except TypeError: - raise TypeError(f"{name} must be bytes-like") - - -def int_to_bytes(integer: int, length: int | None = None) -> bytes: - if length == 0: - raise ValueError("length argument can't be 0") - return integer.to_bytes( - length or (integer.bit_length() + 7) // 8 or 1, "big" - ) - - -class InterfaceNotImplemented(Exception): - pass - - -class _DeprecatedValue: - def __init__(self, value: object, message: str, warning_class): - self.value = value - self.message = message - self.warning_class = warning_class - - -class _ModuleWithDeprecations(types.ModuleType): - def __init__(self, module: types.ModuleType): - super().__init__(module.__name__) - self.__dict__["_module"] = module - - def __getattr__(self, attr: str) -> object: - obj = getattr(self._module, attr) - if isinstance(obj, _DeprecatedValue): - warnings.warn(obj.message, obj.warning_class, stacklevel=2) - obj = obj.value - return obj - - def __setattr__(self, attr: str, value: object) -> None: - setattr(self._module, attr, value) - - def __delattr__(self, attr: str) -> None: - obj = getattr(self._module, attr) - if isinstance(obj, _DeprecatedValue): - warnings.warn(obj.message, obj.warning_class, stacklevel=2) - - delattr(self._module, attr) - - def __dir__(self) -> Sequence[str]: - return ["_module", *dir(self._module)] - - -def deprecated( - value: object, - module_name: str, - message: str, - warning_class: type[Warning], - name: str | None = None, -) -> _DeprecatedValue: - module = sys.modules[module_name] - if not isinstance(module, _ModuleWithDeprecations): - sys.modules[module_name] = module = _ModuleWithDeprecations(module) - dv = _DeprecatedValue(value, message, warning_class) - # Maintain backwards compatibility with `name is None` for pyOpenSSL. - if name is not None: - setattr(module, name, dv) - return dv - - -def cached_property(func: Callable) -> property: - cached_name = f"_cached_{func}" - sentinel = object() - - def inner(instance: object): - cache = getattr(instance, cached_name, sentinel) - if cache is not sentinel: - return cache - result = func(instance) - setattr(instance, cached_name, result) - return result - - return property(inner) - - -# Python 3.10 changed representation of enums. We use well-defined object -# representation and string representation from Python 3.9. -class Enum(enum.Enum): - def __repr__(self) -> str: - return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" - - def __str__(self) -> str: - return f"{self.__class__.__name__}.{self._name_}" diff --git a/venv/Lib/site-packages/cryptography/x509/__init__.py b/venv/Lib/site-packages/cryptography/x509/__init__.py deleted file mode 100644 index 318eecc..0000000 --- a/venv/Lib/site-packages/cryptography/x509/__init__.py +++ /dev/null @@ -1,270 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.x509 import certificate_transparency, verification -from cryptography.x509.base import ( - Attribute, - AttributeNotFound, - Attributes, - Certificate, - CertificateBuilder, - CertificateRevocationList, - CertificateRevocationListBuilder, - CertificateSigningRequest, - CertificateSigningRequestBuilder, - InvalidVersion, - RevokedCertificate, - RevokedCertificateBuilder, - Version, - load_der_x509_certificate, - load_der_x509_crl, - load_der_x509_csr, - load_pem_x509_certificate, - load_pem_x509_certificates, - load_pem_x509_crl, - load_pem_x509_csr, - random_serial_number, -) -from cryptography.x509.extensions import ( - AccessDescription, - Admission, - Admissions, - AuthorityInformationAccess, - AuthorityKeyIdentifier, - BasicConstraints, - CertificateIssuer, - CertificatePolicies, - CRLDistributionPoints, - CRLNumber, - CRLReason, - DeltaCRLIndicator, - DistributionPoint, - DuplicateExtension, - ExtendedKeyUsage, - Extension, - ExtensionNotFound, - Extensions, - ExtensionType, - FreshestCRL, - GeneralNames, - InhibitAnyPolicy, - InvalidityDate, - IssuerAlternativeName, - IssuingDistributionPoint, - KeyUsage, - MSCertificateTemplate, - NameConstraints, - NamingAuthority, - NoticeReference, - OCSPAcceptableResponses, - OCSPNoCheck, - OCSPNonce, - PolicyConstraints, - PolicyInformation, - PrecertificateSignedCertificateTimestamps, - PrecertPoison, - PrivateKeyUsagePeriod, - ProfessionInfo, - ReasonFlags, - SignedCertificateTimestamps, - SubjectAlternativeName, - SubjectInformationAccess, - SubjectKeyIdentifier, - TLSFeature, - TLSFeatureType, - UnrecognizedExtension, - UserNotice, -) -from cryptography.x509.general_name import ( - DirectoryName, - DNSName, - GeneralName, - IPAddress, - OtherName, - RegisteredID, - RFC822Name, - UniformResourceIdentifier, - UnsupportedGeneralNameType, -) -from cryptography.x509.name import ( - Name, - NameAttribute, - RelativeDistinguishedName, -) -from cryptography.x509.oid import ( - AuthorityInformationAccessOID, - CertificatePoliciesOID, - CRLEntryExtensionOID, - ExtendedKeyUsageOID, - ExtensionOID, - NameOID, - ObjectIdentifier, - PublicKeyAlgorithmOID, - SignatureAlgorithmOID, -) - -OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS -OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER -OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS -OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES -OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS -OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE -OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL -OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY -OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME -OID_KEY_USAGE = ExtensionOID.KEY_USAGE -OID_PRIVATE_KEY_USAGE_PERIOD = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD -OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS -OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK -OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS -OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS -OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME -OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES -OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS -OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER - -OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 -OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 -OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 -OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 -OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 -OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 -OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 -OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 -OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 -OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 -OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 -OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 -OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 -OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 -OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS - -OID_COMMON_NAME = NameOID.COMMON_NAME -OID_COUNTRY_NAME = NameOID.COUNTRY_NAME -OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT -OID_DN_QUALIFIER = NameOID.DN_QUALIFIER -OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS -OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER -OID_GIVEN_NAME = NameOID.GIVEN_NAME -OID_LOCALITY_NAME = NameOID.LOCALITY_NAME -OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME -OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME -OID_PSEUDONYM = NameOID.PSEUDONYM -OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER -OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME -OID_SURNAME = NameOID.SURNAME -OID_TITLE = NameOID.TITLE - -OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH -OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING -OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION -OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING -OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH -OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING - -OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY -OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER -OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE - -OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER -OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON -OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE - -OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS -OID_OCSP = AuthorityInformationAccessOID.OCSP - -__all__ = [ - "OID_CA_ISSUERS", - "OID_OCSP", - "AccessDescription", - "Admission", - "Admissions", - "Attribute", - "AttributeNotFound", - "Attributes", - "AuthorityInformationAccess", - "AuthorityKeyIdentifier", - "BasicConstraints", - "CRLDistributionPoints", - "CRLNumber", - "CRLReason", - "Certificate", - "CertificateBuilder", - "CertificateIssuer", - "CertificatePolicies", - "CertificateRevocationList", - "CertificateRevocationListBuilder", - "CertificateSigningRequest", - "CertificateSigningRequestBuilder", - "DNSName", - "DeltaCRLIndicator", - "DirectoryName", - "DistributionPoint", - "DuplicateExtension", - "ExtendedKeyUsage", - "Extension", - "ExtensionNotFound", - "ExtensionType", - "Extensions", - "FreshestCRL", - "GeneralName", - "GeneralNames", - "IPAddress", - "InhibitAnyPolicy", - "InvalidVersion", - "InvalidityDate", - "IssuerAlternativeName", - "IssuingDistributionPoint", - "KeyUsage", - "MSCertificateTemplate", - "Name", - "NameAttribute", - "NameConstraints", - "NameOID", - "NamingAuthority", - "NoticeReference", - "OCSPAcceptableResponses", - "OCSPNoCheck", - "OCSPNonce", - "ObjectIdentifier", - "OtherName", - "PolicyConstraints", - "PolicyInformation", - "PrecertPoison", - "PrecertificateSignedCertificateTimestamps", - "PrivateKeyUsagePeriod", - "ProfessionInfo", - "PublicKeyAlgorithmOID", - "RFC822Name", - "ReasonFlags", - "RegisteredID", - "RelativeDistinguishedName", - "RevokedCertificate", - "RevokedCertificateBuilder", - "SignatureAlgorithmOID", - "SignedCertificateTimestamps", - "SubjectAlternativeName", - "SubjectInformationAccess", - "SubjectKeyIdentifier", - "TLSFeature", - "TLSFeatureType", - "UniformResourceIdentifier", - "UnrecognizedExtension", - "UnsupportedGeneralNameType", - "UserNotice", - "Version", - "certificate_transparency", - "load_der_x509_certificate", - "load_der_x509_crl", - "load_der_x509_csr", - "load_pem_x509_certificate", - "load_pem_x509_certificates", - "load_pem_x509_crl", - "load_pem_x509_csr", - "random_serial_number", - "verification", - "verification", -] diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 0e62c8c..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 94e76dd..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc deleted file mode 100644 index af02323..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc deleted file mode 100644 index e83dc15..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc deleted file mode 100644 index 4d43ed7..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc deleted file mode 100644 index 045820e..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc deleted file mode 100644 index a22dc13..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc deleted file mode 100644 index f2a3281..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc b/venv/Lib/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc deleted file mode 100644 index 3d7ec71..0000000 Binary files a/venv/Lib/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/cryptography/x509/base.py b/venv/Lib/site-packages/cryptography/x509/base.py deleted file mode 100644 index 1be612b..0000000 --- a/venv/Lib/site-packages/cryptography/x509/base.py +++ /dev/null @@ -1,848 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime -import os -import typing -import warnings -from collections.abc import Iterable - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed448, - ed25519, - padding, - rsa, - x448, - x25519, -) -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPrivateKeyTypes, - CertificatePublicKeyTypes, -) -from cryptography.x509.extensions import ( - Extension, - Extensions, - ExtensionType, - _make_sequence_methods, -) -from cryptography.x509.name import Name, _ASN1Type -from cryptography.x509.oid import ObjectIdentifier - -_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) - -# This must be kept in sync with sign.rs's list of allowable types in -# identify_hash_type -_AllowedHashTypes = typing.Union[ - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA3_224, - hashes.SHA3_256, - hashes.SHA3_384, - hashes.SHA3_512, -] - - -class AttributeNotFound(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -def _reject_duplicate_extension( - extension: Extension[ExtensionType], - extensions: list[Extension[ExtensionType]], -) -> None: - # This is quadratic in the number of extensions - for e in extensions: - if e.oid == extension.oid: - raise ValueError("This extension has already been set.") - - -def _reject_duplicate_attribute( - oid: ObjectIdentifier, - attributes: list[tuple[ObjectIdentifier, bytes, int | None]], -) -> None: - # This is quadratic in the number of attributes - for attr_oid, _, _ in attributes: - if attr_oid == oid: - raise ValueError("This attribute has already been set.") - - -def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: - """Normalizes a datetime to a naive datetime in UTC. - - time -- datetime to normalize. Assumed to be in UTC if not timezone - aware. - """ - if time.tzinfo is not None: - offset = time.utcoffset() - offset = offset if offset else datetime.timedelta() - return time.replace(tzinfo=None) - offset - else: - return time - - -class Attribute: - def __init__( - self, - oid: ObjectIdentifier, - value: bytes, - _type: int = _ASN1Type.UTF8String.value, - ) -> None: - self._oid = oid - self._value = value - self._type = _type - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Attribute): - return NotImplemented - - return ( - self.oid == other.oid - and self.value == other.value - and self._type == other._type - ) - - def __hash__(self) -> int: - return hash((self.oid, self.value, self._type)) - - -class Attributes: - def __init__( - self, - attributes: Iterable[Attribute], - ) -> None: - self._attributes = list(attributes) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") - - def __repr__(self) -> str: - return f"" - - def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: - for attr in self: - if attr.oid == oid: - return attr - - raise AttributeNotFound(f"No {oid} attribute was found", oid) - - -class Version(utils.Enum): - v1 = 0 - v3 = 2 - - -class InvalidVersion(Exception): - def __init__(self, msg: str, parsed_version: int) -> None: - super().__init__(msg) - self.parsed_version = parsed_version - - -Certificate = rust_x509.Certificate - - -class RevokedCertificate(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def serial_number(self) -> int: - """ - Returns the serial number of the revoked certificate. - """ - - @property - @abc.abstractmethod - def revocation_date(self) -> datetime.datetime: - """ - Returns the date of when this certificate was revoked. - """ - - @property - @abc.abstractmethod - def revocation_date_utc(self) -> datetime.datetime: - """ - Returns the date of when this certificate was revoked as a non-naive - UTC datetime. - """ - - @property - @abc.abstractmethod - def extensions(self) -> Extensions: - """ - Returns an Extensions object containing a list of Revoked extensions. - """ - - -# Runtime isinstance checks need this since the rust class is not a subclass. -RevokedCertificate.register(rust_x509.RevokedCertificate) - - -class _RawRevokedCertificate(RevokedCertificate): - def __init__( - self, - serial_number: int, - revocation_date: datetime.datetime, - extensions: Extensions, - ): - self._serial_number = serial_number - self._revocation_date = revocation_date - self._extensions = extensions - - @property - def serial_number(self) -> int: - return self._serial_number - - @property - def revocation_date(self) -> datetime.datetime: - warnings.warn( - "Properties that return a naïve datetime object have been " - "deprecated. Please switch to revocation_date_utc.", - utils.DeprecatedIn42, - stacklevel=2, - ) - return self._revocation_date - - @property - def revocation_date_utc(self) -> datetime.datetime: - return self._revocation_date.replace(tzinfo=datetime.timezone.utc) - - @property - def extensions(self) -> Extensions: - return self._extensions - - -CertificateRevocationList = rust_x509.CertificateRevocationList -CertificateSigningRequest = rust_x509.CertificateSigningRequest - - -load_pem_x509_certificate = rust_x509.load_pem_x509_certificate -load_der_x509_certificate = rust_x509.load_der_x509_certificate - -load_pem_x509_certificates = rust_x509.load_pem_x509_certificates - -load_pem_x509_csr = rust_x509.load_pem_x509_csr -load_der_x509_csr = rust_x509.load_der_x509_csr - -load_pem_x509_crl = rust_x509.load_pem_x509_crl -load_der_x509_crl = rust_x509.load_der_x509_crl - - -class CertificateSigningRequestBuilder: - def __init__( - self, - subject_name: Name | None = None, - extensions: list[Extension[ExtensionType]] = [], - attributes: list[tuple[ObjectIdentifier, bytes, int | None]] = [], - ): - """ - Creates an empty X.509 certificate request (v1). - """ - self._subject_name = subject_name - self._extensions = extensions - self._attributes = attributes - - def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: - """ - Sets the certificate requestor's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._subject_name is not None: - raise ValueError("The subject name may only be set once.") - return CertificateSigningRequestBuilder( - name, self._extensions, self._attributes - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateSigningRequestBuilder: - """ - Adds an X.509 extension to the certificate request. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return CertificateSigningRequestBuilder( - self._subject_name, - [*self._extensions, extension], - self._attributes, - ) - - def add_attribute( - self, - oid: ObjectIdentifier, - value: bytes, - *, - _tag: _ASN1Type | None = None, - ) -> CertificateSigningRequestBuilder: - """ - Adds an X.509 attribute with an OID and associated value. - """ - if not isinstance(oid, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - - if not isinstance(value, bytes): - raise TypeError("value must be bytes") - - if _tag is not None and not isinstance(_tag, _ASN1Type): - raise TypeError("tag must be _ASN1Type") - - _reject_duplicate_attribute(oid, self._attributes) - - if _tag is not None: - tag = _tag.value - else: - tag = None - - return CertificateSigningRequestBuilder( - self._subject_name, - self._extensions, - [*self._attributes, (oid, value, tag)], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ecdsa_deterministic: bool | None = None, - ) -> CertificateSigningRequest: - """ - Signs the request using the requestor's private key. - """ - if self._subject_name is None: - raise ValueError("A CertificateSigningRequest must have a subject") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - if ecdsa_deterministic is not None: - if not isinstance(private_key, ec.EllipticCurvePrivateKey): - raise TypeError( - "Deterministic ECDSA is only supported for EC keys" - ) - - return rust_x509.create_x509_csr( - self, - private_key, - algorithm, - rsa_padding, - ecdsa_deterministic, - ) - - -class CertificateBuilder: - _extensions: list[Extension[ExtensionType]] - - def __init__( - self, - issuer_name: Name | None = None, - subject_name: Name | None = None, - public_key: CertificatePublicKeyTypes | None = None, - serial_number: int | None = None, - not_valid_before: datetime.datetime | None = None, - not_valid_after: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - ) -> None: - self._version = Version.v3 - self._issuer_name = issuer_name - self._subject_name = subject_name - self._public_key = public_key - self._serial_number = serial_number - self._not_valid_before = not_valid_before - self._not_valid_after = not_valid_after - self._extensions = extensions - - def issuer_name(self, name: Name) -> CertificateBuilder: - """ - Sets the CA's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._issuer_name is not None: - raise ValueError("The issuer name may only be set once.") - return CertificateBuilder( - name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def subject_name(self, name: Name) -> CertificateBuilder: - """ - Sets the requestor's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._subject_name is not None: - raise ValueError("The subject name may only be set once.") - return CertificateBuilder( - self._issuer_name, - name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def public_key( - self, - key: CertificatePublicKeyTypes, - ) -> CertificateBuilder: - """ - Sets the requestor's public key (as found in the signing request). - """ - if not isinstance( - key, - ( - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, - ), - ): - raise TypeError( - "Expecting one of DSAPublicKey, RSAPublicKey," - " EllipticCurvePublicKey, Ed25519PublicKey," - " Ed448PublicKey, X25519PublicKey, or " - "X448PublicKey." - ) - if self._public_key is not None: - raise ValueError("The public key may only be set once.") - return CertificateBuilder( - self._issuer_name, - self._subject_name, - key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def serial_number(self, number: int) -> CertificateBuilder: - """ - Sets the certificate serial number. - """ - if not isinstance(number, int): - raise TypeError("Serial number must be of integral type.") - if self._serial_number is not None: - raise ValueError("The serial number may only be set once.") - if number <= 0: - raise ValueError("The serial number should be positive.") - - # ASN.1 integers are always signed, so most significant bit must be - # zero. - if number.bit_length() >= 160: # As defined in RFC 5280 - raise ValueError( - "The serial number should not be more than 159 bits." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: - """ - Sets the certificate activation time. - """ - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._not_valid_before is not None: - raise ValueError("The not valid before may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The not valid before date must be on or after" - " 1950 January 1)." - ) - if self._not_valid_after is not None and time > self._not_valid_after: - raise ValueError( - "The not valid before date must be before the not valid after " - "date." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - time, - self._not_valid_after, - self._extensions, - ) - - def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: - """ - Sets the certificate expiration time. - """ - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._not_valid_after is not None: - raise ValueError("The not valid after may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The not valid after date must be on or after 1950 January 1." - ) - if ( - self._not_valid_before is not None - and time < self._not_valid_before - ): - raise ValueError( - "The not valid after date must be after the not valid before " - "date." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - time, - self._extensions, - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateBuilder: - """ - Adds an X.509 extension to the certificate. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - [*self._extensions, extension], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ecdsa_deterministic: bool | None = None, - ) -> Certificate: - """ - Signs the certificate using the CA's private key. - """ - if self._subject_name is None: - raise ValueError("A certificate must have a subject name") - - if self._issuer_name is None: - raise ValueError("A certificate must have an issuer name") - - if self._serial_number is None: - raise ValueError("A certificate must have a serial number") - - if self._not_valid_before is None: - raise ValueError("A certificate must have a not valid before time") - - if self._not_valid_after is None: - raise ValueError("A certificate must have a not valid after time") - - if self._public_key is None: - raise ValueError("A certificate must have a public key") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - if ecdsa_deterministic is not None: - if not isinstance(private_key, ec.EllipticCurvePrivateKey): - raise TypeError( - "Deterministic ECDSA is only supported for EC keys" - ) - - return rust_x509.create_x509_certificate( - self, - private_key, - algorithm, - rsa_padding, - ecdsa_deterministic, - ) - - -class CertificateRevocationListBuilder: - _extensions: list[Extension[ExtensionType]] - _revoked_certificates: list[RevokedCertificate] - - def __init__( - self, - issuer_name: Name | None = None, - last_update: datetime.datetime | None = None, - next_update: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - revoked_certificates: list[RevokedCertificate] = [], - ): - self._issuer_name = issuer_name - self._last_update = last_update - self._next_update = next_update - self._extensions = extensions - self._revoked_certificates = revoked_certificates - - def issuer_name( - self, issuer_name: Name - ) -> CertificateRevocationListBuilder: - if not isinstance(issuer_name, Name): - raise TypeError("Expecting x509.Name object.") - if self._issuer_name is not None: - raise ValueError("The issuer name may only be set once.") - return CertificateRevocationListBuilder( - issuer_name, - self._last_update, - self._next_update, - self._extensions, - self._revoked_certificates, - ) - - def last_update( - self, last_update: datetime.datetime - ) -> CertificateRevocationListBuilder: - if not isinstance(last_update, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._last_update is not None: - raise ValueError("Last update may only be set once.") - last_update = _convert_to_naive_utc_time(last_update) - if last_update < _EARLIEST_UTC_TIME: - raise ValueError( - "The last update date must be on or after 1950 January 1." - ) - if self._next_update is not None and last_update > self._next_update: - raise ValueError( - "The last update date must be before the next update date." - ) - return CertificateRevocationListBuilder( - self._issuer_name, - last_update, - self._next_update, - self._extensions, - self._revoked_certificates, - ) - - def next_update( - self, next_update: datetime.datetime - ) -> CertificateRevocationListBuilder: - if not isinstance(next_update, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._next_update is not None: - raise ValueError("Last update may only be set once.") - next_update = _convert_to_naive_utc_time(next_update) - if next_update < _EARLIEST_UTC_TIME: - raise ValueError( - "The last update date must be on or after 1950 January 1." - ) - if self._last_update is not None and next_update < self._last_update: - raise ValueError( - "The next update date must be after the last update date." - ) - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - next_update, - self._extensions, - self._revoked_certificates, - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateRevocationListBuilder: - """ - Adds an X.509 extension to the certificate revocation list. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - self._next_update, - [*self._extensions, extension], - self._revoked_certificates, - ) - - def add_revoked_certificate( - self, revoked_certificate: RevokedCertificate - ) -> CertificateRevocationListBuilder: - """ - Adds a revoked certificate to the CRL. - """ - if not isinstance(revoked_certificate, RevokedCertificate): - raise TypeError("Must be an instance of RevokedCertificate") - - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - self._next_update, - self._extensions, - [*self._revoked_certificates, revoked_certificate], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ecdsa_deterministic: bool | None = None, - ) -> CertificateRevocationList: - if self._issuer_name is None: - raise ValueError("A CRL must have an issuer name") - - if self._last_update is None: - raise ValueError("A CRL must have a last update time") - - if self._next_update is None: - raise ValueError("A CRL must have a next update time") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - if ecdsa_deterministic is not None: - if not isinstance(private_key, ec.EllipticCurvePrivateKey): - raise TypeError( - "Deterministic ECDSA is only supported for EC keys" - ) - - return rust_x509.create_x509_crl( - self, - private_key, - algorithm, - rsa_padding, - ecdsa_deterministic, - ) - - -class RevokedCertificateBuilder: - def __init__( - self, - serial_number: int | None = None, - revocation_date: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - ): - self._serial_number = serial_number - self._revocation_date = revocation_date - self._extensions = extensions - - def serial_number(self, number: int) -> RevokedCertificateBuilder: - if not isinstance(number, int): - raise TypeError("Serial number must be of integral type.") - if self._serial_number is not None: - raise ValueError("The serial number may only be set once.") - if number <= 0: - raise ValueError("The serial number should be positive") - - # ASN.1 integers are always signed, so most significant bit must be - # zero. - if number.bit_length() >= 160: # As defined in RFC 5280 - raise ValueError( - "The serial number should not be more than 159 bits." - ) - return RevokedCertificateBuilder( - number, self._revocation_date, self._extensions - ) - - def revocation_date( - self, time: datetime.datetime - ) -> RevokedCertificateBuilder: - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._revocation_date is not None: - raise ValueError("The revocation date may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The revocation date must be on or after 1950 January 1." - ) - return RevokedCertificateBuilder( - self._serial_number, time, self._extensions - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> RevokedCertificateBuilder: - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - return RevokedCertificateBuilder( - self._serial_number, - self._revocation_date, - [*self._extensions, extension], - ) - - def build(self, backend: typing.Any = None) -> RevokedCertificate: - if self._serial_number is None: - raise ValueError("A revoked certificate must have a serial number") - if self._revocation_date is None: - raise ValueError( - "A revoked certificate must have a revocation date" - ) - return _RawRevokedCertificate( - self._serial_number, - self._revocation_date, - Extensions(self._extensions), - ) - - -def random_serial_number() -> int: - return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/venv/Lib/site-packages/cryptography/x509/certificate_transparency.py b/venv/Lib/site-packages/cryptography/x509/certificate_transparency.py deleted file mode 100644 index fb66cc6..0000000 --- a/venv/Lib/site-packages/cryptography/x509/certificate_transparency.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 - - -class LogEntryType(utils.Enum): - X509_CERTIFICATE = 0 - PRE_CERTIFICATE = 1 - - -class Version(utils.Enum): - v1 = 0 - - -class SignatureAlgorithm(utils.Enum): - """ - Signature algorithms that are valid for SCTs. - - These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2). - - See: - """ - - ANONYMOUS = 0 - RSA = 1 - DSA = 2 - ECDSA = 3 - - -SignedCertificateTimestamp = rust_x509.Sct diff --git a/venv/Lib/site-packages/cryptography/x509/extensions.py b/venv/Lib/site-packages/cryptography/x509/extensions.py deleted file mode 100644 index dfa472d..0000000 --- a/venv/Lib/site-packages/cryptography/x509/extensions.py +++ /dev/null @@ -1,2528 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime -import hashlib -import ipaddress -import typing -from collections.abc import Iterable, Iterator - -from cryptography import utils -from cryptography.hazmat.bindings._rust import asn1 -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.hazmat.primitives import constant_time, serialization -from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey -from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPublicKeyTypes, - CertificatePublicKeyTypes, -) -from cryptography.x509.certificate_transparency import ( - SignedCertificateTimestamp, -) -from cryptography.x509.general_name import ( - DirectoryName, - DNSName, - GeneralName, - IPAddress, - OtherName, - RegisteredID, - RFC822Name, - UniformResourceIdentifier, - _IPAddressTypes, -) -from cryptography.x509.name import Name, RelativeDistinguishedName -from cryptography.x509.oid import ( - CRLEntryExtensionOID, - ExtensionOID, - ObjectIdentifier, - OCSPExtensionOID, -) - -ExtensionTypeVar = typing.TypeVar( - "ExtensionTypeVar", bound="ExtensionType", covariant=True -) - - -def _key_identifier_from_public_key( - public_key: CertificatePublicKeyTypes, -) -> bytes: - if isinstance(public_key, RSAPublicKey): - data = public_key.public_bytes( - serialization.Encoding.DER, - serialization.PublicFormat.PKCS1, - ) - elif isinstance(public_key, EllipticCurvePublicKey): - data = public_key.public_bytes( - serialization.Encoding.X962, - serialization.PublicFormat.UncompressedPoint, - ) - else: - # This is a very slow way to do this. - serialized = public_key.public_bytes( - serialization.Encoding.DER, - serialization.PublicFormat.SubjectPublicKeyInfo, - ) - data = asn1.parse_spki_for_data(serialized) - - return hashlib.sha1(data).digest() - - -def _make_sequence_methods(field_name: str): - def len_method(self) -> int: - return len(getattr(self, field_name)) - - def iter_method(self): - return iter(getattr(self, field_name)) - - def getitem_method(self, idx): - return getattr(self, field_name)[idx] - - return len_method, iter_method, getitem_method - - -class DuplicateExtension(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -class ExtensionNotFound(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -class ExtensionType(metaclass=abc.ABCMeta): - oid: typing.ClassVar[ObjectIdentifier] - - def public_bytes(self) -> bytes: - """ - Serializes the extension type to DER. - """ - raise NotImplementedError( - f"public_bytes is not implemented for extension type {self!r}" - ) - - -class Extensions: - def __init__(self, extensions: Iterable[Extension[ExtensionType]]) -> None: - self._extensions = list(extensions) - - def get_extension_for_oid( - self, oid: ObjectIdentifier - ) -> Extension[ExtensionType]: - for ext in self: - if ext.oid == oid: - return ext - - raise ExtensionNotFound(f"No {oid} extension was found", oid) - - def get_extension_for_class( - self, extclass: type[ExtensionTypeVar] - ) -> Extension[ExtensionTypeVar]: - if extclass is UnrecognizedExtension: - raise TypeError( - "UnrecognizedExtension can't be used with " - "get_extension_for_class because more than one instance of the" - " class may be present." - ) - - for ext in self: - if isinstance(ext.value, extclass): - return ext - - raise ExtensionNotFound( - f"No {extclass} extension was found", extclass.oid - ) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") - - def __repr__(self) -> str: - return f"" - - -class CRLNumber(ExtensionType): - oid = ExtensionOID.CRL_NUMBER - - def __init__(self, crl_number: int) -> None: - if not isinstance(crl_number, int): - raise TypeError("crl_number must be an integer") - - self._crl_number = crl_number - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLNumber): - return NotImplemented - - return self.crl_number == other.crl_number - - def __hash__(self) -> int: - return hash(self.crl_number) - - def __repr__(self) -> str: - return f"" - - @property - def crl_number(self) -> int: - return self._crl_number - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AuthorityKeyIdentifier(ExtensionType): - oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER - - def __init__( - self, - key_identifier: bytes | None, - authority_cert_issuer: Iterable[GeneralName] | None, - authority_cert_serial_number: int | None, - ) -> None: - if (authority_cert_issuer is None) != ( - authority_cert_serial_number is None - ): - raise ValueError( - "authority_cert_issuer and authority_cert_serial_number " - "must both be present or both None" - ) - - if authority_cert_issuer is not None: - authority_cert_issuer = list(authority_cert_issuer) - if not all( - isinstance(x, GeneralName) for x in authority_cert_issuer - ): - raise TypeError( - "authority_cert_issuer must be a list of GeneralName " - "objects" - ) - - if authority_cert_serial_number is not None and not isinstance( - authority_cert_serial_number, int - ): - raise TypeError("authority_cert_serial_number must be an integer") - - self._key_identifier = key_identifier - self._authority_cert_issuer = authority_cert_issuer - self._authority_cert_serial_number = authority_cert_serial_number - - # This takes a subset of CertificatePublicKeyTypes because an issuer - # cannot have an X25519/X448 key. This introduces some unfortunate - # asymmetry that requires typing users to explicitly - # narrow their type, but we should make this accurate and not just - # convenient. - @classmethod - def from_issuer_public_key( - cls, public_key: CertificateIssuerPublicKeyTypes - ) -> AuthorityKeyIdentifier: - digest = _key_identifier_from_public_key(public_key) - return cls( - key_identifier=digest, - authority_cert_issuer=None, - authority_cert_serial_number=None, - ) - - @classmethod - def from_issuer_subject_key_identifier( - cls, ski: SubjectKeyIdentifier - ) -> AuthorityKeyIdentifier: - return cls( - key_identifier=ski.digest, - authority_cert_issuer=None, - authority_cert_serial_number=None, - ) - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AuthorityKeyIdentifier): - return NotImplemented - - return ( - self.key_identifier == other.key_identifier - and self.authority_cert_issuer == other.authority_cert_issuer - and self.authority_cert_serial_number - == other.authority_cert_serial_number - ) - - def __hash__(self) -> int: - if self.authority_cert_issuer is None: - aci = None - else: - aci = tuple(self.authority_cert_issuer) - return hash( - (self.key_identifier, aci, self.authority_cert_serial_number) - ) - - @property - def key_identifier(self) -> bytes | None: - return self._key_identifier - - @property - def authority_cert_issuer( - self, - ) -> list[GeneralName] | None: - return self._authority_cert_issuer - - @property - def authority_cert_serial_number(self) -> int | None: - return self._authority_cert_serial_number - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SubjectKeyIdentifier(ExtensionType): - oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER - - def __init__(self, digest: bytes) -> None: - self._digest = digest - - @classmethod - def from_public_key( - cls, public_key: CertificatePublicKeyTypes - ) -> SubjectKeyIdentifier: - return cls(_key_identifier_from_public_key(public_key)) - - @property - def digest(self) -> bytes: - return self._digest - - @property - def key_identifier(self) -> bytes: - return self._digest - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectKeyIdentifier): - return NotImplemented - - return constant_time.bytes_eq(self.digest, other.digest) - - def __hash__(self) -> int: - return hash(self.digest) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AuthorityInformationAccess(ExtensionType): - oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS - - def __init__(self, descriptions: Iterable[AccessDescription]) -> None: - descriptions = list(descriptions) - if not all(isinstance(x, AccessDescription) for x in descriptions): - raise TypeError( - "Every item in the descriptions list must be an " - "AccessDescription" - ) - - self._descriptions = descriptions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AuthorityInformationAccess): - return NotImplemented - - return self._descriptions == other._descriptions - - def __hash__(self) -> int: - return hash(tuple(self._descriptions)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SubjectInformationAccess(ExtensionType): - oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS - - def __init__(self, descriptions: Iterable[AccessDescription]) -> None: - descriptions = list(descriptions) - if not all(isinstance(x, AccessDescription) for x in descriptions): - raise TypeError( - "Every item in the descriptions list must be an " - "AccessDescription" - ) - - self._descriptions = descriptions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectInformationAccess): - return NotImplemented - - return self._descriptions == other._descriptions - - def __hash__(self) -> int: - return hash(tuple(self._descriptions)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AccessDescription: - def __init__( - self, access_method: ObjectIdentifier, access_location: GeneralName - ) -> None: - if not isinstance(access_method, ObjectIdentifier): - raise TypeError("access_method must be an ObjectIdentifier") - - if not isinstance(access_location, GeneralName): - raise TypeError("access_location must be a GeneralName") - - self._access_method = access_method - self._access_location = access_location - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AccessDescription): - return NotImplemented - - return ( - self.access_method == other.access_method - and self.access_location == other.access_location - ) - - def __hash__(self) -> int: - return hash((self.access_method, self.access_location)) - - @property - def access_method(self) -> ObjectIdentifier: - return self._access_method - - @property - def access_location(self) -> GeneralName: - return self._access_location - - -class BasicConstraints(ExtensionType): - oid = ExtensionOID.BASIC_CONSTRAINTS - - def __init__(self, ca: bool, path_length: int | None) -> None: - if not isinstance(ca, bool): - raise TypeError("ca must be a boolean value") - - if path_length is not None and not ca: - raise ValueError("path_length must be None when ca is False") - - if path_length is not None and ( - not isinstance(path_length, int) or path_length < 0 - ): - raise TypeError( - "path_length must be a non-negative integer or None" - ) - - self._ca = ca - self._path_length = path_length - - @property - def ca(self) -> bool: - return self._ca - - @property - def path_length(self) -> int | None: - return self._path_length - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, BasicConstraints): - return NotImplemented - - return self.ca == other.ca and self.path_length == other.path_length - - def __hash__(self) -> int: - return hash((self.ca, self.path_length)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class DeltaCRLIndicator(ExtensionType): - oid = ExtensionOID.DELTA_CRL_INDICATOR - - def __init__(self, crl_number: int) -> None: - if not isinstance(crl_number, int): - raise TypeError("crl_number must be an integer") - - self._crl_number = crl_number - - @property - def crl_number(self) -> int: - return self._crl_number - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DeltaCRLIndicator): - return NotImplemented - - return self.crl_number == other.crl_number - - def __hash__(self) -> int: - return hash(self.crl_number) - - def __repr__(self) -> str: - return f"" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CRLDistributionPoints(ExtensionType): - oid = ExtensionOID.CRL_DISTRIBUTION_POINTS - - def __init__( - self, distribution_points: Iterable[DistributionPoint] - ) -> None: - distribution_points = list(distribution_points) - if not all( - isinstance(x, DistributionPoint) for x in distribution_points - ): - raise TypeError( - "distribution_points must be a list of DistributionPoint " - "objects" - ) - - self._distribution_points = distribution_points - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_distribution_points" - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLDistributionPoints): - return NotImplemented - - return self._distribution_points == other._distribution_points - - def __hash__(self) -> int: - return hash(tuple(self._distribution_points)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class FreshestCRL(ExtensionType): - oid = ExtensionOID.FRESHEST_CRL - - def __init__( - self, distribution_points: Iterable[DistributionPoint] - ) -> None: - distribution_points = list(distribution_points) - if not all( - isinstance(x, DistributionPoint) for x in distribution_points - ): - raise TypeError( - "distribution_points must be a list of DistributionPoint " - "objects" - ) - - self._distribution_points = distribution_points - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_distribution_points" - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, FreshestCRL): - return NotImplemented - - return self._distribution_points == other._distribution_points - - def __hash__(self) -> int: - return hash(tuple(self._distribution_points)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class DistributionPoint: - def __init__( - self, - full_name: Iterable[GeneralName] | None, - relative_name: RelativeDistinguishedName | None, - reasons: frozenset[ReasonFlags] | None, - crl_issuer: Iterable[GeneralName] | None, - ) -> None: - if full_name and relative_name: - raise ValueError( - "You cannot provide both full_name and relative_name, at " - "least one must be None." - ) - if not full_name and not relative_name and not crl_issuer: - raise ValueError( - "Either full_name, relative_name or crl_issuer must be " - "provided." - ) - - if full_name is not None: - full_name = list(full_name) - if not all(isinstance(x, GeneralName) for x in full_name): - raise TypeError( - "full_name must be a list of GeneralName objects" - ) - - if relative_name: - if not isinstance(relative_name, RelativeDistinguishedName): - raise TypeError( - "relative_name must be a RelativeDistinguishedName" - ) - - if crl_issuer is not None: - crl_issuer = list(crl_issuer) - if not all(isinstance(x, GeneralName) for x in crl_issuer): - raise TypeError( - "crl_issuer must be None or a list of general names" - ) - - if reasons and ( - not isinstance(reasons, frozenset) - or not all(isinstance(x, ReasonFlags) for x in reasons) - ): - raise TypeError("reasons must be None or frozenset of ReasonFlags") - - if reasons and ( - ReasonFlags.unspecified in reasons - or ReasonFlags.remove_from_crl in reasons - ): - raise ValueError( - "unspecified and remove_from_crl are not valid reasons in a " - "DistributionPoint" - ) - - self._full_name = full_name - self._relative_name = relative_name - self._reasons = reasons - self._crl_issuer = crl_issuer - - def __repr__(self) -> str: - return ( - "".format(self) - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DistributionPoint): - return NotImplemented - - return ( - self.full_name == other.full_name - and self.relative_name == other.relative_name - and self.reasons == other.reasons - and self.crl_issuer == other.crl_issuer - ) - - def __hash__(self) -> int: - if self.full_name is not None: - fn: tuple[GeneralName, ...] | None = tuple(self.full_name) - else: - fn = None - - if self.crl_issuer is not None: - crl_issuer: tuple[GeneralName, ...] | None = tuple(self.crl_issuer) - else: - crl_issuer = None - - return hash((fn, self.relative_name, self.reasons, crl_issuer)) - - @property - def full_name(self) -> list[GeneralName] | None: - return self._full_name - - @property - def relative_name(self) -> RelativeDistinguishedName | None: - return self._relative_name - - @property - def reasons(self) -> frozenset[ReasonFlags] | None: - return self._reasons - - @property - def crl_issuer(self) -> list[GeneralName] | None: - return self._crl_issuer - - -class ReasonFlags(utils.Enum): - unspecified = "unspecified" - key_compromise = "keyCompromise" - ca_compromise = "cACompromise" - affiliation_changed = "affiliationChanged" - superseded = "superseded" - cessation_of_operation = "cessationOfOperation" - certificate_hold = "certificateHold" - privilege_withdrawn = "privilegeWithdrawn" - aa_compromise = "aACompromise" - remove_from_crl = "removeFromCRL" - - -# These are distribution point bit string mappings. Not to be confused with -# CRLReason reason flags bit string mappings. -# ReasonFlags ::= BIT STRING { -# unused (0), -# keyCompromise (1), -# cACompromise (2), -# affiliationChanged (3), -# superseded (4), -# cessationOfOperation (5), -# certificateHold (6), -# privilegeWithdrawn (7), -# aACompromise (8) } -_REASON_BIT_MAPPING = { - 1: ReasonFlags.key_compromise, - 2: ReasonFlags.ca_compromise, - 3: ReasonFlags.affiliation_changed, - 4: ReasonFlags.superseded, - 5: ReasonFlags.cessation_of_operation, - 6: ReasonFlags.certificate_hold, - 7: ReasonFlags.privilege_withdrawn, - 8: ReasonFlags.aa_compromise, -} - -_CRLREASONFLAGS = { - ReasonFlags.key_compromise: 1, - ReasonFlags.ca_compromise: 2, - ReasonFlags.affiliation_changed: 3, - ReasonFlags.superseded: 4, - ReasonFlags.cessation_of_operation: 5, - ReasonFlags.certificate_hold: 6, - ReasonFlags.privilege_withdrawn: 7, - ReasonFlags.aa_compromise: 8, -} - -# CRLReason ::= ENUMERATED { -# unspecified (0), -# keyCompromise (1), -# cACompromise (2), -# affiliationChanged (3), -# superseded (4), -# cessationOfOperation (5), -# certificateHold (6), -# -- value 7 is not used -# removeFromCRL (8), -# privilegeWithdrawn (9), -# aACompromise (10) } -_CRL_ENTRY_REASON_ENUM_TO_CODE = { - ReasonFlags.unspecified: 0, - ReasonFlags.key_compromise: 1, - ReasonFlags.ca_compromise: 2, - ReasonFlags.affiliation_changed: 3, - ReasonFlags.superseded: 4, - ReasonFlags.cessation_of_operation: 5, - ReasonFlags.certificate_hold: 6, - ReasonFlags.remove_from_crl: 8, - ReasonFlags.privilege_withdrawn: 9, - ReasonFlags.aa_compromise: 10, -} - - -class PolicyConstraints(ExtensionType): - oid = ExtensionOID.POLICY_CONSTRAINTS - - def __init__( - self, - require_explicit_policy: int | None, - inhibit_policy_mapping: int | None, - ) -> None: - if require_explicit_policy is not None and not isinstance( - require_explicit_policy, int - ): - raise TypeError( - "require_explicit_policy must be a non-negative integer or " - "None" - ) - - if inhibit_policy_mapping is not None and not isinstance( - inhibit_policy_mapping, int - ): - raise TypeError( - "inhibit_policy_mapping must be a non-negative integer or None" - ) - - if inhibit_policy_mapping is None and require_explicit_policy is None: - raise ValueError( - "At least one of require_explicit_policy and " - "inhibit_policy_mapping must not be None" - ) - - self._require_explicit_policy = require_explicit_policy - self._inhibit_policy_mapping = inhibit_policy_mapping - - def __repr__(self) -> str: - return ( - "".format(self) - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PolicyConstraints): - return NotImplemented - - return ( - self.require_explicit_policy == other.require_explicit_policy - and self.inhibit_policy_mapping == other.inhibit_policy_mapping - ) - - def __hash__(self) -> int: - return hash( - (self.require_explicit_policy, self.inhibit_policy_mapping) - ) - - @property - def require_explicit_policy(self) -> int | None: - return self._require_explicit_policy - - @property - def inhibit_policy_mapping(self) -> int | None: - return self._inhibit_policy_mapping - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CertificatePolicies(ExtensionType): - oid = ExtensionOID.CERTIFICATE_POLICIES - - def __init__(self, policies: Iterable[PolicyInformation]) -> None: - policies = list(policies) - if not all(isinstance(x, PolicyInformation) for x in policies): - raise TypeError( - "Every item in the policies list must be a PolicyInformation" - ) - - self._policies = policies - - __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CertificatePolicies): - return NotImplemented - - return self._policies == other._policies - - def __hash__(self) -> int: - return hash(tuple(self._policies)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PolicyInformation: - def __init__( - self, - policy_identifier: ObjectIdentifier, - policy_qualifiers: Iterable[str | UserNotice] | None, - ) -> None: - if not isinstance(policy_identifier, ObjectIdentifier): - raise TypeError("policy_identifier must be an ObjectIdentifier") - - self._policy_identifier = policy_identifier - - if policy_qualifiers is not None: - policy_qualifiers = list(policy_qualifiers) - if not all( - isinstance(x, (str, UserNotice)) for x in policy_qualifiers - ): - raise TypeError( - "policy_qualifiers must be a list of strings and/or " - "UserNotice objects or None" - ) - - self._policy_qualifiers = policy_qualifiers - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PolicyInformation): - return NotImplemented - - return ( - self.policy_identifier == other.policy_identifier - and self.policy_qualifiers == other.policy_qualifiers - ) - - def __hash__(self) -> int: - if self.policy_qualifiers is not None: - pq = tuple(self.policy_qualifiers) - else: - pq = None - - return hash((self.policy_identifier, pq)) - - @property - def policy_identifier(self) -> ObjectIdentifier: - return self._policy_identifier - - @property - def policy_qualifiers( - self, - ) -> list[str | UserNotice] | None: - return self._policy_qualifiers - - -class UserNotice: - def __init__( - self, - notice_reference: NoticeReference | None, - explicit_text: str | None, - ) -> None: - if notice_reference and not isinstance( - notice_reference, NoticeReference - ): - raise TypeError( - "notice_reference must be None or a NoticeReference" - ) - - self._notice_reference = notice_reference - self._explicit_text = explicit_text - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UserNotice): - return NotImplemented - - return ( - self.notice_reference == other.notice_reference - and self.explicit_text == other.explicit_text - ) - - def __hash__(self) -> int: - return hash((self.notice_reference, self.explicit_text)) - - @property - def notice_reference(self) -> NoticeReference | None: - return self._notice_reference - - @property - def explicit_text(self) -> str | None: - return self._explicit_text - - -class NoticeReference: - def __init__( - self, - organization: str | None, - notice_numbers: Iterable[int], - ) -> None: - self._organization = organization - notice_numbers = list(notice_numbers) - if not all(isinstance(x, int) for x in notice_numbers): - raise TypeError("notice_numbers must be a list of integers") - - self._notice_numbers = notice_numbers - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NoticeReference): - return NotImplemented - - return ( - self.organization == other.organization - and self.notice_numbers == other.notice_numbers - ) - - def __hash__(self) -> int: - return hash((self.organization, tuple(self.notice_numbers))) - - @property - def organization(self) -> str | None: - return self._organization - - @property - def notice_numbers(self) -> list[int]: - return self._notice_numbers - - -class ExtendedKeyUsage(ExtensionType): - oid = ExtensionOID.EXTENDED_KEY_USAGE - - def __init__(self, usages: Iterable[ObjectIdentifier]) -> None: - usages = list(usages) - if not all(isinstance(x, ObjectIdentifier) for x in usages): - raise TypeError( - "Every item in the usages list must be an ObjectIdentifier" - ) - - self._usages = usages - - __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, ExtendedKeyUsage): - return NotImplemented - - return self._usages == other._usages - - def __hash__(self) -> int: - return hash(tuple(self._usages)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPNoCheck(ExtensionType): - oid = ExtensionOID.OCSP_NO_CHECK - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPNoCheck): - return NotImplemented - - return True - - def __hash__(self) -> int: - return hash(OCSPNoCheck) - - def __repr__(self) -> str: - return "" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrecertPoison(ExtensionType): - oid = ExtensionOID.PRECERT_POISON - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrecertPoison): - return NotImplemented - - return True - - def __hash__(self) -> int: - return hash(PrecertPoison) - - def __repr__(self) -> str: - return "" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class TLSFeature(ExtensionType): - oid = ExtensionOID.TLS_FEATURE - - def __init__(self, features: Iterable[TLSFeatureType]) -> None: - features = list(features) - if ( - not all(isinstance(x, TLSFeatureType) for x in features) - or len(features) == 0 - ): - raise TypeError( - "features must be a list of elements from the TLSFeatureType " - "enum" - ) - - self._features = features - - __len__, __iter__, __getitem__ = _make_sequence_methods("_features") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, TLSFeature): - return NotImplemented - - return self._features == other._features - - def __hash__(self) -> int: - return hash(tuple(self._features)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class TLSFeatureType(utils.Enum): - # status_request is defined in RFC 6066 and is used for what is commonly - # called OCSP Must-Staple when present in the TLS Feature extension in an - # X.509 certificate. - status_request = 5 - # status_request_v2 is defined in RFC 6961 and allows multiple OCSP - # responses to be provided. It is not currently in use by clients or - # servers. - status_request_v2 = 17 - - -_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} - - -class InhibitAnyPolicy(ExtensionType): - oid = ExtensionOID.INHIBIT_ANY_POLICY - - def __init__(self, skip_certs: int) -> None: - if not isinstance(skip_certs, int): - raise TypeError("skip_certs must be an integer") - - if skip_certs < 0: - raise ValueError("skip_certs must be a non-negative integer") - - self._skip_certs = skip_certs - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, InhibitAnyPolicy): - return NotImplemented - - return self.skip_certs == other.skip_certs - - def __hash__(self) -> int: - return hash(self.skip_certs) - - @property - def skip_certs(self) -> int: - return self._skip_certs - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class KeyUsage(ExtensionType): - oid = ExtensionOID.KEY_USAGE - - def __init__( - self, - digital_signature: bool, - content_commitment: bool, - key_encipherment: bool, - data_encipherment: bool, - key_agreement: bool, - key_cert_sign: bool, - crl_sign: bool, - encipher_only: bool, - decipher_only: bool, - ) -> None: - if not key_agreement and (encipher_only or decipher_only): - raise ValueError( - "encipher_only and decipher_only can only be true when " - "key_agreement is true" - ) - - self._digital_signature = digital_signature - self._content_commitment = content_commitment - self._key_encipherment = key_encipherment - self._data_encipherment = data_encipherment - self._key_agreement = key_agreement - self._key_cert_sign = key_cert_sign - self._crl_sign = crl_sign - self._encipher_only = encipher_only - self._decipher_only = decipher_only - - @property - def digital_signature(self) -> bool: - return self._digital_signature - - @property - def content_commitment(self) -> bool: - return self._content_commitment - - @property - def key_encipherment(self) -> bool: - return self._key_encipherment - - @property - def data_encipherment(self) -> bool: - return self._data_encipherment - - @property - def key_agreement(self) -> bool: - return self._key_agreement - - @property - def key_cert_sign(self) -> bool: - return self._key_cert_sign - - @property - def crl_sign(self) -> bool: - return self._crl_sign - - @property - def encipher_only(self) -> bool: - if not self.key_agreement: - raise ValueError( - "encipher_only is undefined unless key_agreement is true" - ) - else: - return self._encipher_only - - @property - def decipher_only(self) -> bool: - if not self.key_agreement: - raise ValueError( - "decipher_only is undefined unless key_agreement is true" - ) - else: - return self._decipher_only - - def __repr__(self) -> str: - try: - encipher_only = self.encipher_only - decipher_only = self.decipher_only - except ValueError: - # Users found None confusing because even though encipher/decipher - # have no meaning unless key_agreement is true, to construct an - # instance of the class you still need to pass False. - encipher_only = False - decipher_only = False - - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, KeyUsage): - return NotImplemented - - return ( - self.digital_signature == other.digital_signature - and self.content_commitment == other.content_commitment - and self.key_encipherment == other.key_encipherment - and self.data_encipherment == other.data_encipherment - and self.key_agreement == other.key_agreement - and self.key_cert_sign == other.key_cert_sign - and self.crl_sign == other.crl_sign - and self._encipher_only == other._encipher_only - and self._decipher_only == other._decipher_only - ) - - def __hash__(self) -> int: - return hash( - ( - self.digital_signature, - self.content_commitment, - self.key_encipherment, - self.data_encipherment, - self.key_agreement, - self.key_cert_sign, - self.crl_sign, - self._encipher_only, - self._decipher_only, - ) - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrivateKeyUsagePeriod(ExtensionType): - oid = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD - - def __init__( - self, - not_before: datetime.datetime | None, - not_after: datetime.datetime | None, - ) -> None: - if ( - not isinstance(not_before, datetime.datetime) - and not_before is not None - ): - raise TypeError("not_before must be a datetime.datetime or None") - - if ( - not isinstance(not_after, datetime.datetime) - and not_after is not None - ): - raise TypeError("not_after must be a datetime.datetime or None") - - if not_before is None and not_after is None: - raise ValueError( - "At least one of not_before and not_after must not be None" - ) - - if ( - not_before is not None - and not_after is not None - and not_before > not_after - ): - raise ValueError("not_before must be before not_after") - - self._not_before = not_before - self._not_after = not_after - - @property - def not_before(self) -> datetime.datetime | None: - return self._not_before - - @property - def not_after(self) -> datetime.datetime | None: - return self._not_after - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrivateKeyUsagePeriod): - return NotImplemented - - return ( - self.not_before == other.not_before - and self.not_after == other.not_after - ) - - def __hash__(self) -> int: - return hash((self.not_before, self.not_after)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class NameConstraints(ExtensionType): - oid = ExtensionOID.NAME_CONSTRAINTS - - def __init__( - self, - permitted_subtrees: Iterable[GeneralName] | None, - excluded_subtrees: Iterable[GeneralName] | None, - ) -> None: - if permitted_subtrees is not None: - permitted_subtrees = list(permitted_subtrees) - if not permitted_subtrees: - raise ValueError( - "permitted_subtrees must be a non-empty list or None" - ) - if not all(isinstance(x, GeneralName) for x in permitted_subtrees): - raise TypeError( - "permitted_subtrees must be a list of GeneralName objects " - "or None" - ) - - self._validate_tree(permitted_subtrees) - - if excluded_subtrees is not None: - excluded_subtrees = list(excluded_subtrees) - if not excluded_subtrees: - raise ValueError( - "excluded_subtrees must be a non-empty list or None" - ) - if not all(isinstance(x, GeneralName) for x in excluded_subtrees): - raise TypeError( - "excluded_subtrees must be a list of GeneralName objects " - "or None" - ) - - self._validate_tree(excluded_subtrees) - - if permitted_subtrees is None and excluded_subtrees is None: - raise ValueError( - "At least one of permitted_subtrees and excluded_subtrees " - "must not be None" - ) - - self._permitted_subtrees = permitted_subtrees - self._excluded_subtrees = excluded_subtrees - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NameConstraints): - return NotImplemented - - return ( - self.excluded_subtrees == other.excluded_subtrees - and self.permitted_subtrees == other.permitted_subtrees - ) - - def _validate_tree(self, tree: Iterable[GeneralName]) -> None: - self._validate_ip_name(tree) - self._validate_dns_name(tree) - - def _validate_ip_name(self, tree: Iterable[GeneralName]) -> None: - if any( - isinstance(name, IPAddress) - and not isinstance( - name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) - ) - for name in tree - ): - raise TypeError( - "IPAddress name constraints must be an IPv4Network or" - " IPv6Network object" - ) - - def _validate_dns_name(self, tree: Iterable[GeneralName]) -> None: - if any( - isinstance(name, DNSName) and "*" in name.value for name in tree - ): - raise ValueError( - "DNSName name constraints must not contain the '*' wildcard" - " character" - ) - - def __repr__(self) -> str: - return ( - f"" - ) - - def __hash__(self) -> int: - if self.permitted_subtrees is not None: - ps: tuple[GeneralName, ...] | None = tuple(self.permitted_subtrees) - else: - ps = None - - if self.excluded_subtrees is not None: - es: tuple[GeneralName, ...] | None = tuple(self.excluded_subtrees) - else: - es = None - - return hash((ps, es)) - - @property - def permitted_subtrees( - self, - ) -> list[GeneralName] | None: - return self._permitted_subtrees - - @property - def excluded_subtrees( - self, - ) -> list[GeneralName] | None: - return self._excluded_subtrees - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class Extension(typing.Generic[ExtensionTypeVar]): - def __init__( - self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar - ) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError( - "oid argument must be an ObjectIdentifier instance." - ) - - if not isinstance(critical, bool): - raise TypeError("critical must be a boolean value") - - self._oid = oid - self._critical = critical - self._value = value - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def critical(self) -> bool: - return self._critical - - @property - def value(self) -> ExtensionTypeVar: - return self._value - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Extension): - return NotImplemented - - return ( - self.oid == other.oid - and self.critical == other.critical - and self.value == other.value - ) - - def __hash__(self) -> int: - return hash((self.oid, self.critical, self.value)) - - -class GeneralNames: - def __init__(self, general_names: Iterable[GeneralName]) -> None: - general_names = list(general_names) - if not all(isinstance(x, GeneralName) for x in general_names): - raise TypeError( - "Every item in the general_names list must be an " - "object conforming to the GeneralName interface" - ) - - self._general_names = general_names - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - # Return the value of each GeneralName, except for OtherName instances - # which we return directly because it has two important properties not - # just one value. - objs = (i for i in self if isinstance(i, type)) - if type != OtherName: - return [i.value for i in objs] - return list(objs) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, GeneralNames): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(tuple(self._general_names)) - - -class SubjectAlternativeName(ExtensionType): - oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME - - def __init__(self, general_names: Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectAlternativeName): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class IssuerAlternativeName(ExtensionType): - oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME - - def __init__(self, general_names: Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IssuerAlternativeName): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CertificateIssuer(ExtensionType): - oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER - - def __init__(self, general_names: Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CertificateIssuer): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CRLReason(ExtensionType): - oid = CRLEntryExtensionOID.CRL_REASON - - def __init__(self, reason: ReasonFlags) -> None: - if not isinstance(reason, ReasonFlags): - raise TypeError("reason must be an element from ReasonFlags") - - self._reason = reason - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLReason): - return NotImplemented - - return self.reason == other.reason - - def __hash__(self) -> int: - return hash(self.reason) - - @property - def reason(self) -> ReasonFlags: - return self._reason - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class InvalidityDate(ExtensionType): - oid = CRLEntryExtensionOID.INVALIDITY_DATE - - def __init__(self, invalidity_date: datetime.datetime) -> None: - if not isinstance(invalidity_date, datetime.datetime): - raise TypeError("invalidity_date must be a datetime.datetime") - - self._invalidity_date = invalidity_date - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, InvalidityDate): - return NotImplemented - - return self.invalidity_date == other.invalidity_date - - def __hash__(self) -> int: - return hash(self.invalidity_date) - - @property - def invalidity_date(self) -> datetime.datetime: - return self._invalidity_date - - @property - def invalidity_date_utc(self) -> datetime.datetime: - if self._invalidity_date.tzinfo is None: - return self._invalidity_date.replace(tzinfo=datetime.timezone.utc) - else: - return self._invalidity_date.astimezone(tz=datetime.timezone.utc) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrecertificateSignedCertificateTimestamps(ExtensionType): - oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS - - def __init__( - self, - signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], - ) -> None: - signed_certificate_timestamps = list(signed_certificate_timestamps) - if not all( - isinstance(sct, SignedCertificateTimestamp) - for sct in signed_certificate_timestamps - ): - raise TypeError( - "Every item in the signed_certificate_timestamps list must be " - "a SignedCertificateTimestamp" - ) - self._signed_certificate_timestamps = signed_certificate_timestamps - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_signed_certificate_timestamps" - ) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash(tuple(self._signed_certificate_timestamps)) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrecertificateSignedCertificateTimestamps): - return NotImplemented - - return ( - self._signed_certificate_timestamps - == other._signed_certificate_timestamps - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SignedCertificateTimestamps(ExtensionType): - oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS - - def __init__( - self, - signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], - ) -> None: - signed_certificate_timestamps = list(signed_certificate_timestamps) - if not all( - isinstance(sct, SignedCertificateTimestamp) - for sct in signed_certificate_timestamps - ): - raise TypeError( - "Every item in the signed_certificate_timestamps list must be " - "a SignedCertificateTimestamp" - ) - self._signed_certificate_timestamps = signed_certificate_timestamps - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_signed_certificate_timestamps" - ) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash(tuple(self._signed_certificate_timestamps)) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SignedCertificateTimestamps): - return NotImplemented - - return ( - self._signed_certificate_timestamps - == other._signed_certificate_timestamps - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPNonce(ExtensionType): - oid = OCSPExtensionOID.NONCE - - def __init__(self, nonce: bytes) -> None: - if not isinstance(nonce, bytes): - raise TypeError("nonce must be bytes") - - self._nonce = nonce - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPNonce): - return NotImplemented - - return self.nonce == other.nonce - - def __hash__(self) -> int: - return hash(self.nonce) - - def __repr__(self) -> str: - return f"" - - @property - def nonce(self) -> bytes: - return self._nonce - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPAcceptableResponses(ExtensionType): - oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES - - def __init__(self, responses: Iterable[ObjectIdentifier]) -> None: - responses = list(responses) - if any(not isinstance(r, ObjectIdentifier) for r in responses): - raise TypeError("All responses must be ObjectIdentifiers") - - self._responses = responses - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPAcceptableResponses): - return NotImplemented - - return self._responses == other._responses - - def __hash__(self) -> int: - return hash(tuple(self._responses)) - - def __repr__(self) -> str: - return f"" - - def __iter__(self) -> Iterator[ObjectIdentifier]: - return iter(self._responses) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class IssuingDistributionPoint(ExtensionType): - oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT - - def __init__( - self, - full_name: Iterable[GeneralName] | None, - relative_name: RelativeDistinguishedName | None, - only_contains_user_certs: bool, - only_contains_ca_certs: bool, - only_some_reasons: frozenset[ReasonFlags] | None, - indirect_crl: bool, - only_contains_attribute_certs: bool, - ) -> None: - if full_name is not None: - full_name = list(full_name) - - if only_some_reasons and ( - not isinstance(only_some_reasons, frozenset) - or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) - ): - raise TypeError( - "only_some_reasons must be None or frozenset of ReasonFlags" - ) - - if only_some_reasons and ( - ReasonFlags.unspecified in only_some_reasons - or ReasonFlags.remove_from_crl in only_some_reasons - ): - raise ValueError( - "unspecified and remove_from_crl are not valid reasons in an " - "IssuingDistributionPoint" - ) - - if not ( - isinstance(only_contains_user_certs, bool) - and isinstance(only_contains_ca_certs, bool) - and isinstance(indirect_crl, bool) - and isinstance(only_contains_attribute_certs, bool) - ): - raise TypeError( - "only_contains_user_certs, only_contains_ca_certs, " - "indirect_crl and only_contains_attribute_certs " - "must all be boolean." - ) - - # Per RFC5280 Section 5.2.5, the Issuing Distribution Point extension - # in a CRL can have only one of onlyContainsUserCerts, - # onlyContainsCACerts, onlyContainsAttributeCerts set to TRUE. - crl_constraints = [ - only_contains_user_certs, - only_contains_ca_certs, - only_contains_attribute_certs, - ] - - if len([x for x in crl_constraints if x]) > 1: - raise ValueError( - "Only one of the following can be set to True: " - "only_contains_user_certs, only_contains_ca_certs, " - "only_contains_attribute_certs" - ) - - if not any( - [ - only_contains_user_certs, - only_contains_ca_certs, - indirect_crl, - only_contains_attribute_certs, - full_name, - relative_name, - only_some_reasons, - ] - ): - raise ValueError( - "Cannot create empty extension: " - "if only_contains_user_certs, only_contains_ca_certs, " - "indirect_crl, and only_contains_attribute_certs are all False" - ", then either full_name, relative_name, or only_some_reasons " - "must have a value." - ) - - self._only_contains_user_certs = only_contains_user_certs - self._only_contains_ca_certs = only_contains_ca_certs - self._indirect_crl = indirect_crl - self._only_contains_attribute_certs = only_contains_attribute_certs - self._only_some_reasons = only_some_reasons - self._full_name = full_name - self._relative_name = relative_name - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IssuingDistributionPoint): - return NotImplemented - - return ( - self.full_name == other.full_name - and self.relative_name == other.relative_name - and self.only_contains_user_certs == other.only_contains_user_certs - and self.only_contains_ca_certs == other.only_contains_ca_certs - and self.only_some_reasons == other.only_some_reasons - and self.indirect_crl == other.indirect_crl - and self.only_contains_attribute_certs - == other.only_contains_attribute_certs - ) - - def __hash__(self) -> int: - return hash( - ( - self.full_name, - self.relative_name, - self.only_contains_user_certs, - self.only_contains_ca_certs, - self.only_some_reasons, - self.indirect_crl, - self.only_contains_attribute_certs, - ) - ) - - @property - def full_name(self) -> list[GeneralName] | None: - return self._full_name - - @property - def relative_name(self) -> RelativeDistinguishedName | None: - return self._relative_name - - @property - def only_contains_user_certs(self) -> bool: - return self._only_contains_user_certs - - @property - def only_contains_ca_certs(self) -> bool: - return self._only_contains_ca_certs - - @property - def only_some_reasons( - self, - ) -> frozenset[ReasonFlags] | None: - return self._only_some_reasons - - @property - def indirect_crl(self) -> bool: - return self._indirect_crl - - @property - def only_contains_attribute_certs(self) -> bool: - return self._only_contains_attribute_certs - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class MSCertificateTemplate(ExtensionType): - oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE - - def __init__( - self, - template_id: ObjectIdentifier, - major_version: int | None, - minor_version: int | None, - ) -> None: - if not isinstance(template_id, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - self._template_id = template_id - if ( - major_version is not None and not isinstance(major_version, int) - ) or ( - minor_version is not None and not isinstance(minor_version, int) - ): - raise TypeError( - "major_version and minor_version must be integers or None" - ) - self._major_version = major_version - self._minor_version = minor_version - - @property - def template_id(self) -> ObjectIdentifier: - return self._template_id - - @property - def major_version(self) -> int | None: - return self._major_version - - @property - def minor_version(self) -> int | None: - return self._minor_version - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, MSCertificateTemplate): - return NotImplemented - - return ( - self.template_id == other.template_id - and self.major_version == other.major_version - and self.minor_version == other.minor_version - ) - - def __hash__(self) -> int: - return hash((self.template_id, self.major_version, self.minor_version)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class NamingAuthority: - def __init__( - self, - id: ObjectIdentifier | None, - url: str | None, - text: str | None, - ) -> None: - if id is not None and not isinstance(id, ObjectIdentifier): - raise TypeError("id must be an ObjectIdentifier") - - if url is not None and not isinstance(url, str): - raise TypeError("url must be a str") - - if text is not None and not isinstance(text, str): - raise TypeError("text must be a str") - - self._id = id - self._url = url - self._text = text - - @property - def id(self) -> ObjectIdentifier | None: - return self._id - - @property - def url(self) -> str | None: - return self._url - - @property - def text(self) -> str | None: - return self._text - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NamingAuthority): - return NotImplemented - - return ( - self.id == other.id - and self.url == other.url - and self.text == other.text - ) - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.url, - self.text, - ) - ) - - -class ProfessionInfo: - def __init__( - self, - naming_authority: NamingAuthority | None, - profession_items: Iterable[str], - profession_oids: Iterable[ObjectIdentifier] | None, - registration_number: str | None, - add_profession_info: bytes | None, - ) -> None: - if naming_authority is not None and not isinstance( - naming_authority, NamingAuthority - ): - raise TypeError("naming_authority must be a NamingAuthority") - - profession_items = list(profession_items) - if not all(isinstance(item, str) for item in profession_items): - raise TypeError( - "Every item in the profession_items list must be a str" - ) - - if profession_oids is not None: - profession_oids = list(profession_oids) - if not all( - isinstance(oid, ObjectIdentifier) for oid in profession_oids - ): - raise TypeError( - "Every item in the profession_oids list must be an " - "ObjectIdentifier" - ) - - if registration_number is not None and not isinstance( - registration_number, str - ): - raise TypeError("registration_number must be a str") - - if add_profession_info is not None and not isinstance( - add_profession_info, bytes - ): - raise TypeError("add_profession_info must be bytes") - - self._naming_authority = naming_authority - self._profession_items = profession_items - self._profession_oids = profession_oids - self._registration_number = registration_number - self._add_profession_info = add_profession_info - - @property - def naming_authority(self) -> NamingAuthority | None: - return self._naming_authority - - @property - def profession_items(self) -> list[str]: - return self._profession_items - - @property - def profession_oids(self) -> list[ObjectIdentifier] | None: - return self._profession_oids - - @property - def registration_number(self) -> str | None: - return self._registration_number - - @property - def add_profession_info(self) -> bytes | None: - return self._add_profession_info - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, ProfessionInfo): - return NotImplemented - - return ( - self.naming_authority == other.naming_authority - and self.profession_items == other.profession_items - and self.profession_oids == other.profession_oids - and self.registration_number == other.registration_number - and self.add_profession_info == other.add_profession_info - ) - - def __hash__(self) -> int: - if self.profession_oids is not None: - profession_oids = tuple(self.profession_oids) - else: - profession_oids = None - return hash( - ( - self.naming_authority, - tuple(self.profession_items), - profession_oids, - self.registration_number, - self.add_profession_info, - ) - ) - - -class Admission: - def __init__( - self, - admission_authority: GeneralName | None, - naming_authority: NamingAuthority | None, - profession_infos: Iterable[ProfessionInfo], - ) -> None: - if admission_authority is not None and not isinstance( - admission_authority, GeneralName - ): - raise TypeError("admission_authority must be a GeneralName") - - if naming_authority is not None and not isinstance( - naming_authority, NamingAuthority - ): - raise TypeError("naming_authority must be a NamingAuthority") - - profession_infos = list(profession_infos) - if not all( - isinstance(info, ProfessionInfo) for info in profession_infos - ): - raise TypeError( - "Every item in the profession_infos list must be a " - "ProfessionInfo" - ) - - self._admission_authority = admission_authority - self._naming_authority = naming_authority - self._profession_infos = profession_infos - - @property - def admission_authority(self) -> GeneralName | None: - return self._admission_authority - - @property - def naming_authority(self) -> NamingAuthority | None: - return self._naming_authority - - @property - def profession_infos(self) -> list[ProfessionInfo]: - return self._profession_infos - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Admission): - return NotImplemented - - return ( - self.admission_authority == other.admission_authority - and self.naming_authority == other.naming_authority - and self.profession_infos == other.profession_infos - ) - - def __hash__(self) -> int: - return hash( - ( - self.admission_authority, - self.naming_authority, - tuple(self.profession_infos), - ) - ) - - -class Admissions(ExtensionType): - oid = ExtensionOID.ADMISSIONS - - def __init__( - self, - authority: GeneralName | None, - admissions: Iterable[Admission], - ) -> None: - if authority is not None and not isinstance(authority, GeneralName): - raise TypeError("authority must be a GeneralName") - - admissions = list(admissions) - if not all( - isinstance(admission, Admission) for admission in admissions - ): - raise TypeError( - "Every item in the contents_of_admissions list must be an " - "Admission" - ) - - self._authority = authority - self._admissions = admissions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_admissions") - - @property - def authority(self) -> GeneralName | None: - return self._authority - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Admissions): - return NotImplemented - - return ( - self.authority == other.authority - and self._admissions == other._admissions - ) - - def __hash__(self) -> int: - return hash((self.authority, tuple(self._admissions))) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class UnrecognizedExtension(ExtensionType): - def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - self._oid = oid - self._value = value - - @property - def oid(self) -> ObjectIdentifier: # type: ignore[override] - return self._oid - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UnrecognizedExtension): - return NotImplemented - - return self.oid == other.oid and self.value == other.value - - def __hash__(self) -> int: - return hash((self.oid, self.value)) - - def public_bytes(self) -> bytes: - return self.value diff --git a/venv/Lib/site-packages/cryptography/x509/general_name.py b/venv/Lib/site-packages/cryptography/x509/general_name.py deleted file mode 100644 index 672f287..0000000 --- a/venv/Lib/site-packages/cryptography/x509/general_name.py +++ /dev/null @@ -1,281 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import ipaddress -import typing -from email.utils import parseaddr - -from cryptography.x509.name import Name -from cryptography.x509.oid import ObjectIdentifier - -_IPAddressTypes = typing.Union[ - ipaddress.IPv4Address, - ipaddress.IPv6Address, - ipaddress.IPv4Network, - ipaddress.IPv6Network, -] - - -class UnsupportedGeneralNameType(Exception): - pass - - -class GeneralName(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def value(self) -> typing.Any: - """ - Return the value of the object - """ - - -class RFC822Name(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "RFC822Name values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - name, address = parseaddr(value) - if name or not address: - # parseaddr has found a name (e.g. Name ) or the entire - # value is an empty string. - raise ValueError("Invalid rfc822name value") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> RFC822Name: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RFC822Name): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class DNSName(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "DNSName values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> DNSName: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DNSName): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class UniformResourceIdentifier(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "URI values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> UniformResourceIdentifier: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UniformResourceIdentifier): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class DirectoryName(GeneralName): - def __init__(self, value: Name) -> None: - if not isinstance(value, Name): - raise TypeError("value must be a Name") - - self._value = value - - @property - def value(self) -> Name: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DirectoryName): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class RegisteredID(GeneralName): - def __init__(self, value: ObjectIdentifier) -> None: - if not isinstance(value, ObjectIdentifier): - raise TypeError("value must be an ObjectIdentifier") - - self._value = value - - @property - def value(self) -> ObjectIdentifier: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RegisteredID): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class IPAddress(GeneralName): - def __init__(self, value: _IPAddressTypes) -> None: - if not isinstance( - value, - ( - ipaddress.IPv4Address, - ipaddress.IPv6Address, - ipaddress.IPv4Network, - ipaddress.IPv6Network, - ), - ): - raise TypeError( - "value must be an instance of ipaddress.IPv4Address, " - "ipaddress.IPv6Address, ipaddress.IPv4Network, or " - "ipaddress.IPv6Network" - ) - - self._value = value - - @property - def value(self) -> _IPAddressTypes: - return self._value - - def _packed(self) -> bytes: - if isinstance( - self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) - ): - return self.value.packed - else: - return ( - self.value.network_address.packed + self.value.netmask.packed - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IPAddress): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class OtherName(GeneralName): - def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: - if not isinstance(type_id, ObjectIdentifier): - raise TypeError("type_id must be an ObjectIdentifier") - if not isinstance(value, bytes): - raise TypeError("value must be a binary string") - - self._type_id = type_id - self._value = value - - @property - def type_id(self) -> ObjectIdentifier: - return self._type_id - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OtherName): - return NotImplemented - - return self.type_id == other.type_id and self.value == other.value - - def __hash__(self) -> int: - return hash((self.type_id, self.value)) diff --git a/venv/Lib/site-packages/cryptography/x509/name.py b/venv/Lib/site-packages/cryptography/x509/name.py deleted file mode 100644 index 685f921..0000000 --- a/venv/Lib/site-packages/cryptography/x509/name.py +++ /dev/null @@ -1,476 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import binascii -import re -import sys -import typing -import warnings -from collections.abc import Iterable, Iterator - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.x509.oid import NameOID, ObjectIdentifier - - -class _ASN1Type(utils.Enum): - BitString = 3 - OctetString = 4 - UTF8String = 12 - NumericString = 18 - PrintableString = 19 - T61String = 20 - IA5String = 22 - UTCTime = 23 - GeneralizedTime = 24 - VisibleString = 26 - UniversalString = 28 - BMPString = 30 - - -_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} -_NAMEOID_DEFAULT_TYPE: dict[ObjectIdentifier, _ASN1Type] = { - NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, - NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, - NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, - NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, - NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, - NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, -} - -# Type alias -_OidNameMap = typing.Mapping[ObjectIdentifier, str] -_NameOidMap = typing.Mapping[str, ObjectIdentifier] - -#: Short attribute names from RFC 4514: -#: https://tools.ietf.org/html/rfc4514#page-7 -_NAMEOID_TO_NAME: _OidNameMap = { - NameOID.COMMON_NAME: "CN", - NameOID.LOCALITY_NAME: "L", - NameOID.STATE_OR_PROVINCE_NAME: "ST", - NameOID.ORGANIZATION_NAME: "O", - NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", - NameOID.COUNTRY_NAME: "C", - NameOID.STREET_ADDRESS: "STREET", - NameOID.DOMAIN_COMPONENT: "DC", - NameOID.USER_ID: "UID", -} -_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} - -_NAMEOID_LENGTH_LIMIT = { - NameOID.COUNTRY_NAME: (2, 2), - NameOID.JURISDICTION_COUNTRY_NAME: (2, 2), - NameOID.COMMON_NAME: (1, 64), -} - - -def _escape_dn_value(val: str | bytes) -> str: - """Escape special characters in RFC4514 Distinguished Name value.""" - - if not val: - return "" - - # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character - # followed by the hexadecimal encoding of the octets. - if isinstance(val, bytes): - return "#" + binascii.hexlify(val).decode("utf8") - - # See https://tools.ietf.org/html/rfc4514#section-2.4 - val = val.replace("\\", "\\\\") - val = val.replace('"', '\\"') - val = val.replace("+", "\\+") - val = val.replace(",", "\\,") - val = val.replace(";", "\\;") - val = val.replace("<", "\\<") - val = val.replace(">", "\\>") - val = val.replace("\0", "\\00") - - if val[0] in ("#", " "): - val = "\\" + val - if val[-1] == " ": - val = val[:-1] + "\\ " - - return val - - -def _unescape_dn_value(val: str) -> str: - if not val: - return "" - - # See https://tools.ietf.org/html/rfc4514#section-3 - - # special = escaped / SPACE / SHARP / EQUALS - # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE - def sub(m): - val = m.group(1) - # Regular escape - if len(val) == 1: - return val - # Hex-value scape - return chr(int(val, 16)) - - return _RFC4514NameParser._PAIR_RE.sub(sub, val) - - -NameAttributeValueType = typing.TypeVar( - "NameAttributeValueType", - typing.Union[str, bytes], - str, - bytes, - covariant=True, -) - - -class NameAttribute(typing.Generic[NameAttributeValueType]): - def __init__( - self, - oid: ObjectIdentifier, - value: NameAttributeValueType, - _type: _ASN1Type | None = None, - *, - _validate: bool = True, - ) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError( - "oid argument must be an ObjectIdentifier instance." - ) - if _type == _ASN1Type.BitString: - if oid != NameOID.X500_UNIQUE_IDENTIFIER: - raise TypeError( - "oid must be X500_UNIQUE_IDENTIFIER for BitString type." - ) - if not isinstance(value, bytes): - raise TypeError("value must be bytes for BitString") - elif not isinstance(value, str): - raise TypeError("value argument must be a str") - - length_limits = _NAMEOID_LENGTH_LIMIT.get(oid) - if length_limits is not None: - min_length, max_length = length_limits - assert isinstance(value, str) - c_len = len(value.encode("utf8")) - if c_len < min_length or c_len > max_length: - msg = ( - f"Attribute's length must be >= {min_length} and " - f"<= {max_length}, but it was {c_len}" - ) - if _validate is True: - raise ValueError(msg) - else: - warnings.warn(msg, stacklevel=2) - - # The appropriate ASN1 string type varies by OID and is defined across - # multiple RFCs including 2459, 3280, and 5280. In general UTF8String - # is preferred (2459), but 3280 and 5280 specify several OIDs with - # alternate types. This means when we see the sentinel value we need - # to look up whether the OID has a non-UTF8 type. If it does, set it - # to that. Otherwise, UTF8! - if _type is None: - _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) - - if not isinstance(_type, _ASN1Type): - raise TypeError("_type must be from the _ASN1Type enum") - - self._oid = oid - self._value: NameAttributeValueType = value - self._type: _ASN1Type = _type - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def value(self) -> NameAttributeValueType: - return self._value - - @property - def rfc4514_attribute_name(self) -> str: - """ - The short attribute name (for example "CN") if available, - otherwise the OID dotted string. - """ - return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - - Use short attribute name if available, otherwise fall back to OID - dotted string. - """ - attr_name = ( - attr_name_overrides.get(self.oid) if attr_name_overrides else None - ) - if attr_name is None: - attr_name = self.rfc4514_attribute_name - - return f"{attr_name}={_escape_dn_value(self.value)}" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NameAttribute): - return NotImplemented - - return self.oid == other.oid and self.value == other.value - - def __hash__(self) -> int: - return hash((self.oid, self.value)) - - def __repr__(self) -> str: - return f"" - - -class RelativeDistinguishedName: - def __init__(self, attributes: Iterable[NameAttribute]): - attributes = list(attributes) - if not attributes: - raise ValueError("a relative distinguished name cannot be empty") - if not all(isinstance(x, NameAttribute) for x in attributes): - raise TypeError("attributes must be an iterable of NameAttribute") - - # Keep list and frozenset to preserve attribute order where it matters - self._attributes = attributes - self._attribute_set = frozenset(attributes) - - if len(self._attribute_set) != len(attributes): - raise ValueError("duplicate attributes are not allowed") - - def get_attributes_for_oid( - self, - oid: ObjectIdentifier, - ) -> list[NameAttribute[str | bytes]]: - return [i for i in self if i.oid == oid] - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - - Within each RDN, attributes are joined by '+', although that is rarely - used in certificates. - """ - return "+".join( - attr.rfc4514_string(attr_name_overrides) - for attr in self._attributes - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RelativeDistinguishedName): - return NotImplemented - - return self._attribute_set == other._attribute_set - - def __hash__(self) -> int: - return hash(self._attribute_set) - - def __iter__(self) -> Iterator[NameAttribute]: - return iter(self._attributes) - - def __len__(self) -> int: - return len(self._attributes) - - def __repr__(self) -> str: - return f"" - - -class Name: - @typing.overload - def __init__(self, attributes: Iterable[NameAttribute]) -> None: ... - - @typing.overload - def __init__( - self, attributes: Iterable[RelativeDistinguishedName] - ) -> None: ... - - def __init__( - self, - attributes: Iterable[NameAttribute | RelativeDistinguishedName], - ) -> None: - attributes = list(attributes) - if all(isinstance(x, NameAttribute) for x in attributes): - self._attributes = [ - RelativeDistinguishedName([typing.cast(NameAttribute, x)]) - for x in attributes - ] - elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): - self._attributes = typing.cast( - typing.List[RelativeDistinguishedName], attributes - ) - else: - raise TypeError( - "attributes must be a list of NameAttribute" - " or a list RelativeDistinguishedName" - ) - - @classmethod - def from_rfc4514_string( - cls, - data: str, - attr_name_overrides: _NameOidMap | None = None, - ) -> Name: - return _RFC4514NameParser(data, attr_name_overrides or {}).parse() - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - For example 'CN=foobar.com,O=Foo Corp,C=US' - - An X.509 name is a two-level structure: a list of sets of attributes. - Each list element is separated by ',' and within each list element, set - elements are separated by '+'. The latter is almost never used in - real world certificates. According to RFC4514 section 2.1 the - RDNSequence must be reversed when converting to string representation. - """ - return ",".join( - attr.rfc4514_string(attr_name_overrides) - for attr in reversed(self._attributes) - ) - - def get_attributes_for_oid( - self, - oid: ObjectIdentifier, - ) -> list[NameAttribute[str | bytes]]: - return [i for i in self if i.oid == oid] - - @property - def rdns(self) -> list[RelativeDistinguishedName]: - return self._attributes - - def public_bytes(self, backend: typing.Any = None) -> bytes: - return rust_x509.encode_name_bytes(self) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Name): - return NotImplemented - - return self._attributes == other._attributes - - def __hash__(self) -> int: - # TODO: this is relatively expensive, if this looks like a bottleneck - # for you, consider optimizing! - return hash(tuple(self._attributes)) - - def __iter__(self) -> Iterator[NameAttribute]: - for rdn in self._attributes: - yield from rdn - - def __len__(self) -> int: - return sum(len(rdn) for rdn in self._attributes) - - def __repr__(self) -> str: - rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) - return f"" - - -class _RFC4514NameParser: - _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") - _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") - - _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" - _PAIR_RE = re.compile(_PAIR) - _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" - _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" - _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" - _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" - _STRING_RE = re.compile( - rf""" - ( - ({_LEADCHAR}|{_PAIR}) - ( - ({_STRINGCHAR}|{_PAIR})* - ({_TRAILCHAR}|{_PAIR}) - )? - )? - """, - re.VERBOSE, - ) - _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") - - def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None: - self._data = data - self._idx = 0 - - self._attr_name_overrides = attr_name_overrides - - def _has_data(self) -> bool: - return self._idx < len(self._data) - - def _peek(self) -> str | None: - if self._has_data(): - return self._data[self._idx] - return None - - def _read_char(self, ch: str) -> None: - if self._peek() != ch: - raise ValueError - self._idx += 1 - - def _read_re(self, pat) -> str: - match = pat.match(self._data, pos=self._idx) - if match is None: - raise ValueError - val = match.group() - self._idx += len(val) - return val - - def parse(self) -> Name: - """ - Parses the `data` string and converts it to a Name. - - According to RFC4514 section 2.1 the RDNSequence must be - reversed when converting to string representation. So, when - we parse it, we need to reverse again to get the RDNs on the - correct order. - """ - - if not self._has_data(): - return Name([]) - - rdns = [self._parse_rdn()] - - while self._has_data(): - self._read_char(",") - rdns.append(self._parse_rdn()) - - return Name(reversed(rdns)) - - def _parse_rdn(self) -> RelativeDistinguishedName: - nas = [self._parse_na()] - while self._peek() == "+": - self._read_char("+") - nas.append(self._parse_na()) - - return RelativeDistinguishedName(nas) - - def _parse_na(self) -> NameAttribute: - try: - oid_value = self._read_re(self._OID_RE) - except ValueError: - name = self._read_re(self._DESCR_RE) - oid = self._attr_name_overrides.get( - name, _NAME_TO_NAMEOID.get(name) - ) - if oid is None: - raise ValueError - else: - oid = ObjectIdentifier(oid_value) - - self._read_char("=") - if self._peek() == "#": - value = self._read_re(self._HEXSTRING_RE) - value = binascii.unhexlify(value[1:]).decode() - else: - raw_value = self._read_re(self._STRING_RE) - value = _unescape_dn_value(raw_value) - - return NameAttribute(oid, value) diff --git a/venv/Lib/site-packages/cryptography/x509/ocsp.py b/venv/Lib/site-packages/cryptography/x509/ocsp.py deleted file mode 100644 index f61ed80..0000000 --- a/venv/Lib/site-packages/cryptography/x509/ocsp.py +++ /dev/null @@ -1,379 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import datetime -from collections.abc import Iterable - -from cryptography import utils, x509 -from cryptography.hazmat.bindings._rust import ocsp -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPrivateKeyTypes, -) -from cryptography.x509.base import _reject_duplicate_extension - - -class OCSPResponderEncoding(utils.Enum): - HASH = "By Hash" - NAME = "By Name" - - -class OCSPResponseStatus(utils.Enum): - SUCCESSFUL = 0 - MALFORMED_REQUEST = 1 - INTERNAL_ERROR = 2 - TRY_LATER = 3 - SIG_REQUIRED = 5 - UNAUTHORIZED = 6 - - -_ALLOWED_HASHES = ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, -) - - -def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: - if not isinstance(algorithm, _ALLOWED_HASHES): - raise ValueError( - "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" - ) - - -class OCSPCertStatus(utils.Enum): - GOOD = 0 - REVOKED = 1 - UNKNOWN = 2 - - -class _SingleResponse: - def __init__( - self, - resp: tuple[x509.Certificate, x509.Certificate] | None, - resp_hash: tuple[bytes, bytes, int] | None, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ): - _verify_algorithm(algorithm) - if not isinstance(this_update, datetime.datetime): - raise TypeError("this_update must be a datetime object") - if next_update is not None and not isinstance( - next_update, datetime.datetime - ): - raise TypeError("next_update must be a datetime object or None") - - self._resp = resp - self._resp_hash = resp_hash - self._algorithm = algorithm - self._this_update = this_update - self._next_update = next_update - - if not isinstance(cert_status, OCSPCertStatus): - raise TypeError( - "cert_status must be an item from the OCSPCertStatus enum" - ) - if cert_status is not OCSPCertStatus.REVOKED: - if revocation_time is not None: - raise ValueError( - "revocation_time can only be provided if the certificate " - "is revoked" - ) - if revocation_reason is not None: - raise ValueError( - "revocation_reason can only be provided if the certificate" - " is revoked" - ) - else: - if not isinstance(revocation_time, datetime.datetime): - raise TypeError("revocation_time must be a datetime object") - - if revocation_reason is not None and not isinstance( - revocation_reason, x509.ReasonFlags - ): - raise TypeError( - "revocation_reason must be an item from the ReasonFlags " - "enum or None" - ) - - self._cert_status = cert_status - self._revocation_time = revocation_time - self._revocation_reason = revocation_reason - - -OCSPRequest = ocsp.OCSPRequest -OCSPResponse = ocsp.OCSPResponse -OCSPSingleResponse = ocsp.OCSPSingleResponse - - -class OCSPRequestBuilder: - def __init__( - self, - request: tuple[ - x509.Certificate, x509.Certificate, hashes.HashAlgorithm - ] - | None = None, - request_hash: tuple[bytes, bytes, int, hashes.HashAlgorithm] - | None = None, - extensions: list[x509.Extension[x509.ExtensionType]] = [], - ) -> None: - self._request = request - self._request_hash = request_hash - self._extensions = extensions - - def add_certificate( - self, - cert: x509.Certificate, - issuer: x509.Certificate, - algorithm: hashes.HashAlgorithm, - ) -> OCSPRequestBuilder: - if self._request is not None or self._request_hash is not None: - raise ValueError("Only one certificate can be added to a request") - - _verify_algorithm(algorithm) - if not isinstance(cert, x509.Certificate) or not isinstance( - issuer, x509.Certificate - ): - raise TypeError("cert and issuer must be a Certificate") - - return OCSPRequestBuilder( - (cert, issuer, algorithm), self._request_hash, self._extensions - ) - - def add_certificate_by_hash( - self, - issuer_name_hash: bytes, - issuer_key_hash: bytes, - serial_number: int, - algorithm: hashes.HashAlgorithm, - ) -> OCSPRequestBuilder: - if self._request is not None or self._request_hash is not None: - raise ValueError("Only one certificate can be added to a request") - - if not isinstance(serial_number, int): - raise TypeError("serial_number must be an integer") - - _verify_algorithm(algorithm) - utils._check_bytes("issuer_name_hash", issuer_name_hash) - utils._check_bytes("issuer_key_hash", issuer_key_hash) - if algorithm.digest_size != len( - issuer_name_hash - ) or algorithm.digest_size != len(issuer_key_hash): - raise ValueError( - "issuer_name_hash and issuer_key_hash must be the same length " - "as the digest size of the algorithm" - ) - - return OCSPRequestBuilder( - self._request, - (issuer_name_hash, issuer_key_hash, serial_number, algorithm), - self._extensions, - ) - - def add_extension( - self, extval: x509.ExtensionType, critical: bool - ) -> OCSPRequestBuilder: - if not isinstance(extval, x509.ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = x509.Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return OCSPRequestBuilder( - self._request, self._request_hash, [*self._extensions, extension] - ) - - def build(self) -> OCSPRequest: - if self._request is None and self._request_hash is None: - raise ValueError("You must add a certificate before building") - - return ocsp.create_ocsp_request(self) - - -class OCSPResponseBuilder: - def __init__( - self, - response: _SingleResponse | None = None, - responder_id: tuple[x509.Certificate, OCSPResponderEncoding] - | None = None, - certs: list[x509.Certificate] | None = None, - extensions: list[x509.Extension[x509.ExtensionType]] = [], - ): - self._response = response - self._responder_id = responder_id - self._certs = certs - self._extensions = extensions - - def add_response( - self, - cert: x509.Certificate, - issuer: x509.Certificate, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ) -> OCSPResponseBuilder: - if self._response is not None: - raise ValueError("Only one response per OCSPResponse.") - - if not isinstance(cert, x509.Certificate) or not isinstance( - issuer, x509.Certificate - ): - raise TypeError("cert and issuer must be a Certificate") - - singleresp = _SingleResponse( - (cert, issuer), - None, - algorithm, - cert_status, - this_update, - next_update, - revocation_time, - revocation_reason, - ) - return OCSPResponseBuilder( - singleresp, - self._responder_id, - self._certs, - self._extensions, - ) - - def add_response_by_hash( - self, - issuer_name_hash: bytes, - issuer_key_hash: bytes, - serial_number: int, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ) -> OCSPResponseBuilder: - if self._response is not None: - raise ValueError("Only one response per OCSPResponse.") - - if not isinstance(serial_number, int): - raise TypeError("serial_number must be an integer") - - utils._check_bytes("issuer_name_hash", issuer_name_hash) - utils._check_bytes("issuer_key_hash", issuer_key_hash) - _verify_algorithm(algorithm) - if algorithm.digest_size != len( - issuer_name_hash - ) or algorithm.digest_size != len(issuer_key_hash): - raise ValueError( - "issuer_name_hash and issuer_key_hash must be the same length " - "as the digest size of the algorithm" - ) - - singleresp = _SingleResponse( - None, - (issuer_name_hash, issuer_key_hash, serial_number), - algorithm, - cert_status, - this_update, - next_update, - revocation_time, - revocation_reason, - ) - return OCSPResponseBuilder( - singleresp, - self._responder_id, - self._certs, - self._extensions, - ) - - def responder_id( - self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate - ) -> OCSPResponseBuilder: - if self._responder_id is not None: - raise ValueError("responder_id can only be set once") - if not isinstance(responder_cert, x509.Certificate): - raise TypeError("responder_cert must be a Certificate") - if not isinstance(encoding, OCSPResponderEncoding): - raise TypeError( - "encoding must be an element from OCSPResponderEncoding" - ) - - return OCSPResponseBuilder( - self._response, - (responder_cert, encoding), - self._certs, - self._extensions, - ) - - def certificates( - self, certs: Iterable[x509.Certificate] - ) -> OCSPResponseBuilder: - if self._certs is not None: - raise ValueError("certificates may only be set once") - certs = list(certs) - if len(certs) == 0: - raise ValueError("certs must not be an empty list") - if not all(isinstance(x, x509.Certificate) for x in certs): - raise TypeError("certs must be a list of Certificates") - return OCSPResponseBuilder( - self._response, - self._responder_id, - certs, - self._extensions, - ) - - def add_extension( - self, extval: x509.ExtensionType, critical: bool - ) -> OCSPResponseBuilder: - if not isinstance(extval, x509.ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = x509.Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return OCSPResponseBuilder( - self._response, - self._responder_id, - self._certs, - [*self._extensions, extension], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: hashes.HashAlgorithm | None, - ) -> OCSPResponse: - if self._response is None: - raise ValueError("You must add a response before signing") - if self._responder_id is None: - raise ValueError("You must add a responder_id before signing") - - return ocsp.create_ocsp_response( - OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm - ) - - @classmethod - def build_unsuccessful( - cls, response_status: OCSPResponseStatus - ) -> OCSPResponse: - if not isinstance(response_status, OCSPResponseStatus): - raise TypeError( - "response_status must be an item from OCSPResponseStatus" - ) - if response_status is OCSPResponseStatus.SUCCESSFUL: - raise ValueError("response_status cannot be SUCCESSFUL") - - return ocsp.create_ocsp_response(response_status, None, None, None) - - -load_der_ocsp_request = ocsp.load_der_ocsp_request -load_der_ocsp_response = ocsp.load_der_ocsp_response diff --git a/venv/Lib/site-packages/cryptography/x509/oid.py b/venv/Lib/site-packages/cryptography/x509/oid.py deleted file mode 100644 index 520fc7a..0000000 --- a/venv/Lib/site-packages/cryptography/x509/oid.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat._oid import ( - AttributeOID, - AuthorityInformationAccessOID, - CertificatePoliciesOID, - CRLEntryExtensionOID, - ExtendedKeyUsageOID, - ExtensionOID, - NameOID, - ObjectIdentifier, - OCSPExtensionOID, - OtherNameFormOID, - PublicKeyAlgorithmOID, - SignatureAlgorithmOID, - SubjectInformationAccessOID, -) - -__all__ = [ - "AttributeOID", - "AuthorityInformationAccessOID", - "CRLEntryExtensionOID", - "CertificatePoliciesOID", - "ExtendedKeyUsageOID", - "ExtensionOID", - "NameOID", - "OCSPExtensionOID", - "ObjectIdentifier", - "OtherNameFormOID", - "PublicKeyAlgorithmOID", - "SignatureAlgorithmOID", - "SubjectInformationAccessOID", -] diff --git a/venv/Lib/site-packages/cryptography/x509/verification.py b/venv/Lib/site-packages/cryptography/x509/verification.py deleted file mode 100644 index 2db4324..0000000 --- a/venv/Lib/site-packages/cryptography/x509/verification.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.x509.general_name import DNSName, IPAddress - -__all__ = [ - "ClientVerifier", - "Criticality", - "ExtensionPolicy", - "Policy", - "PolicyBuilder", - "ServerVerifier", - "Store", - "Subject", - "VerificationError", - "VerifiedClient", -] - -Store = rust_x509.Store -Subject = typing.Union[DNSName, IPAddress] -VerifiedClient = rust_x509.VerifiedClient -ClientVerifier = rust_x509.ClientVerifier -ServerVerifier = rust_x509.ServerVerifier -PolicyBuilder = rust_x509.PolicyBuilder -Policy = rust_x509.Policy -ExtensionPolicy = rust_x509.ExtensionPolicy -Criticality = rust_x509.Criticality -VerificationError = rust_x509.VerificationError diff --git a/venv/Lib/site-packages/darabonba/__init__.py b/venv/Lib/site-packages/darabonba/__init__.py deleted file mode 100644 index 222a4c1..0000000 --- a/venv/Lib/site-packages/darabonba/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "1.0.5" \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 6741a45..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/core.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/core.cpython-312.pyc deleted file mode 100644 index 41067f0..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/date.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/date.cpython-312.pyc deleted file mode 100644 index 4f5bd0b..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/date.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/decorators.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/decorators.cpython-312.pyc deleted file mode 100644 index 0b2570e..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/decorators.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/event.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/event.cpython-312.pyc deleted file mode 100644 index 3eece2a..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/event.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 81ca003..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/file.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/file.cpython-312.pyc deleted file mode 100644 index 8711c1e..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/file.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/model.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/model.cpython-312.pyc deleted file mode 100644 index fc6d5c5..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/model.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/number.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/number.cpython-312.pyc deleted file mode 100644 index 5e7ea3c..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/number.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/request.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/request.cpython-312.pyc deleted file mode 100644 index eb6b6e2..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/request.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/response.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/response.cpython-312.pyc deleted file mode 100644 index 07ff61a..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/response.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/runtime.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/runtime.cpython-312.pyc deleted file mode 100644 index 0fdd475..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/runtime.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/__pycache__/url.cpython-312.pyc b/venv/Lib/site-packages/darabonba/__pycache__/url.cpython-312.pyc deleted file mode 100644 index 9c1fc43..0000000 Binary files a/venv/Lib/site-packages/darabonba/__pycache__/url.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/core.py b/venv/Lib/site-packages/darabonba/core.py deleted file mode 100644 index 22245ab..0000000 --- a/venv/Lib/site-packages/darabonba/core.py +++ /dev/null @@ -1,667 +0,0 @@ -import asyncio -import aiohttp -import logging -import io -import os -import ssl -import time -import re -import certifi -import json -from requests import status_codes, adapters, PreparedRequest -from typing import Any, Dict, Optional, Union -from enum import Enum -from urllib.parse import urlencode, urlparse -from requests import status_codes, adapters, PreparedRequest, Session -from darabonba.exceptions import RequiredArgumentException, RetryError -from darabonba.model import DaraModel -from darabonba.request import DaraRequest -from darabonba.response import DaraResponse -from darabonba.utils.stream import BaseStream, SSEResponseWrapper, SyncSSEResponseWrapper -from darabonba.policy.retry import RetryOptions, RetryPolicyContext - - -DEFAULT_CONNECT_TIMEOUT = 5000 -DEFAULT_READ_TIMEOUT = 10000 -DEFAULT_POOL_SIZE = 10 -MAX_DELAY_TIME = 120 * 1000 -MIN_DELAY_TIME = 100 - -logger = logging.getLogger('darabonba-core') -logger.setLevel(logging.DEBUG) -ch = logging.StreamHandler() -logger.addHandler(ch) - -class _ModelEncoder(json.JSONEncoder): - def default(self, o: Any) -> Any: - if isinstance(o, DaraModel): - return o.to_map() - elif isinstance(o, bytes): - return o.decode('utf-8') - super().default(o) - -class TLSVersion(Enum): - TLSv1 = 'TLSv1' - TLSv1_1 = 'TLSv1.1' - TLSv1_2 = 'TLSv1.2' - TLSv1_3 = 'TLSv1.3' - -class _TLSAdapter(adapters.HTTPAdapter): - """A HTTPAdapter that uses an arbitrary TLS version.""" - - def __init__(self, ssl_context=None, **kwargs): - self.ssl_context = ssl_context - super().__init__(**kwargs) - - def init_poolmanager(self, *args, **kwargs): - """Override the init_poolmanager method to set the SSL.""" - kwargs['ssl_context'] = self.ssl_context - super().init_poolmanager(*args, **kwargs) - - -class DaraCore: - _sessions = {} - http_adapter = adapters.HTTPAdapter(pool_connections=DEFAULT_POOL_SIZE, pool_maxsize=DEFAULT_POOL_SIZE * 4) - https_adapter = adapters.HTTPAdapter(pool_connections=DEFAULT_POOL_SIZE, pool_maxsize=DEFAULT_POOL_SIZE * 4) - - @staticmethod - def to_json_string( - val: Any, - ) -> str: - """ - Stringify a value by JSON format - @return: the JSON format string - """ - if isinstance(val, str): - return str(val) - return json.dumps( - val, cls=_ModelEncoder, ensure_ascii=False, separators=(",", ":") - ) - - @staticmethod - def _set_tls_minimum_version(sls_context, tls_min_version): - context = sls_context - if tls_min_version is not None: - if tls_min_version == 'TLSv1': - context.minimum_version = ssl.TLSVersion.TLSv1 - elif tls_min_version == 'TLSv1.1': - context.minimum_version = ssl.TLSVersion.TLSv1_1 - elif tls_min_version == 'TLSv1.2': - context.minimum_version = ssl.TLSVersion.TLSv1_2 - elif tls_min_version == 'TLSv1.3': - context.minimum_version = ssl.TLSVersion.TLSv1_3 - return context - - @staticmethod - def get_adapter(prefix, tls_min_version: str = None): - ca_cert = certifi.where() - context = ssl.create_default_context() - if ca_cert and prefix.upper() == 'HTTPS': - context = DaraCore._set_tls_minimum_version(context, tls_min_version) - context.load_verify_locations(ca_cert) - adapter = _TLSAdapter(ssl_context=context, pool_connections=DEFAULT_POOL_SIZE, - pool_maxsize=DEFAULT_POOL_SIZE * 4) - return adapter - - @staticmethod - def _prepare_http_debug(request, symbol): - base = '' - for key, value in request.headers.items(): - base += f'\n{symbol} {key} : {value}' - return base - - @staticmethod - def _do_http_debug(request, response): - # logger the request - url = urlparse(request.url) - request_base = f'\n> {request.method.upper()} {url.path + url.query} HTTP/1.1' - logger.debug(request_base + DaraCore._prepare_http_debug(request, '>')) - - # logger the response - response_base = f'\n< HTTP/1.1 {response.status_code}' \ - f' {status_codes._codes.get(response.status_code)[0].upper()}' - logger.debug(response_base + DaraCore._prepare_http_debug(response, '<')) - - @staticmethod - def compose_url(request): - host = request.headers.get('host') - if not host: - raise RequiredArgumentException('endpoint') - else: - host = host.rstrip('/') - protocol = f'{request.protocol.lower()}://' - pathname = request.pathname - - if host.startswith(('http://', 'https://')): - protocol = '' - - if request.port == 80: - port = '' - else: - port = f':{request.port}' - - url = protocol + host + port + pathname - - if request.query: - if "?" in url: - if not url.endswith("&"): - url += "&" - else: - url += "?" - - encode_query = {} - for key in request.query: - value = request.query[key] - if value is not None: - encode_query[key] = str(value) - url += urlencode(encode_query) - return url.rstrip("?&") - - @staticmethod - async def async_do_action( - request: DaraRequest, - runtime_option=None - ) -> DaraResponse: - runtime_option = runtime_option or {} - - url = DaraCore.compose_url(request) - ignore_ssl = runtime_option.get('ignoreSSL', False) - verify: Union[bool, str] = not ignore_ssl - tls_min_version = runtime_option.get('tlsMinVersion') - if isinstance(tls_min_version, Enum): - tls_min_version = tls_min_version.value - - if verify: - ca = runtime_option.get('ca') - if ca is not None: - verify = ca - - cert = runtime_option.get('cert', None) - - timeout = runtime_option.get('timeout') - connect_timeout = runtime_option.get('connectTimeout') or timeout or DEFAULT_CONNECT_TIMEOUT - read_timeout = runtime_option.get('readTimeout') or timeout or DEFAULT_READ_TIMEOUT - - connect_timeout, read_timeout = (int(connect_timeout) / 1000, int(read_timeout) / 1000) - - proxy = None - if request.protocol.upper() == 'HTTP': - proxy = runtime_option.get('httpProxy') - if not proxy: - proxy = os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy') - elif request.protocol.upper() == 'HTTPS': - proxy = runtime_option.get('httpsProxy') - if not proxy: - proxy = os.environ.get('HTTPS_PROXY') or os.environ.get('https_proxy') - - connector = None - ca_cert = certifi.where() - ssl_context = None - if isinstance(verify, str) and request.protocol.upper() == 'HTTPS': - ssl_context = ssl.create_default_context() - ssl_context = DaraCore._set_tls_minimum_version(ssl_context, tls_min_version) - ssl_context.load_verify_locations(verify) - # Handle cert if provided - if cert is not None: - if isinstance(cert, (list, tuple)): - ssl_context.load_cert_chain(certfile=cert[0], keyfile=cert[1] if len(cert) > 1 else None) - else: - ssl_context.load_cert_chain(certfile=cert, keyfile=None) - connector = aiohttp.TCPConnector(ssl=ssl_context) - elif ca_cert and request.protocol.upper() == 'HTTPS' and verify: - ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) - ssl_context = DaraCore._set_tls_minimum_version(ssl_context, tls_min_version) - ssl_context.load_verify_locations(ca_cert) - # Handle cert if provided - if cert is not None: - if isinstance(cert, (list, tuple)): - ssl_context.load_cert_chain(certfile=cert[0], keyfile=cert[1] if len(cert) > 1 else None) - else: - ssl_context.load_cert_chain(certfile=cert, keyfile=None) - connector = aiohttp.TCPConnector(ssl=ssl_context) - else: - verify = False - - timeout = aiohttp.ClientTimeout( - sock_read=read_timeout, - sock_connect=connect_timeout - ) - async with aiohttp.ClientSession( - connector=connector - ) as s: - body = b'' - if isinstance(request.body, BaseStream): - for content in request.body: - body += content - elif isinstance(request.body, str): - body = request.body.encode('utf-8') - else: - body = request.body or b'' - try: - ssl_param: Union[bool, ssl.SSLContext] = ssl_context if ssl_context is not None else bool(verify) - async with s.request(request.method, url, - data=body, - headers=request.headers, - ssl=ssl_param, - proxy=proxy, - timeout=timeout) as response: - tea_resp: DaraResponse = DaraResponse() - tea_resp.body = await response.read() - tea_resp.headers = dict({k.lower(): v for k, v in response.headers.items()}) - tea_resp.status_code = response.status - tea_resp.status_message = response.reason - tea_resp.response = response - except IOError as e: - raise RetryError(str(e)) - return tea_resp - - @staticmethod - def do_action( - request: DaraRequest, - runtime_option=None - ) -> DaraResponse: - url = DaraCore.compose_url(request) - - runtime_option = runtime_option or {} - - verify = not runtime_option.get('ignoreSSL', False) - tls_min_version = runtime_option.get('tlsMinVersion') - if isinstance(tls_min_version, Enum): - tls_min_version = tls_min_version.value - - if verify: - verify = runtime_option.get('ca', True) if runtime_option.get('ca', True) is not None else True - cert = runtime_option.get('cert', None) - - timeout = runtime_option.get('timeout') - connect_timeout = runtime_option.get('connectTimeout') or timeout or DEFAULT_CONNECT_TIMEOUT - read_timeout = runtime_option.get('readTimeout') or timeout or DEFAULT_READ_TIMEOUT - - timeout = (int(connect_timeout) / 1000, int(read_timeout) / 1000) - - if isinstance(request.body, str): - request.body = request.body.encode('utf-8') - - p = PreparedRequest() - p.prepare( - method=request.method.upper(), - url=url, - data=request.body, - headers=request.headers, - ) - - proxies = {} - http_proxy = runtime_option.get('httpProxy') - https_proxy = runtime_option.get('httpsProxy') - no_proxy = runtime_option.get('noProxy') - - if not http_proxy: - http_proxy = os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy') - if not https_proxy: - https_proxy = os.environ.get('HTTPS_PROXY') or os.environ.get('https_proxy') - - if http_proxy: - proxies['http'] = http_proxy - if https_proxy: - proxies['https'] = https_proxy - if no_proxy: - proxies['no_proxy'] = no_proxy - - host = request.headers.get('host') - host = host.rstrip('/') - - session_key = f'{request.protocol.lower()}://{host}:{request.port}' - session = DaraCore._get_session(session_key=session_key, protocol=request.protocol, - tls_min_version=tls_min_version, verify=verify) - try: - resp = session.send( - p, - proxies=proxies, - timeout=timeout, - verify=verify, - cert=cert, - ) - except IOError as e: - raise RetryError(str(e)) - - debug = runtime_option.get('debug') or os.getenv('DEBUG') - if debug and debug.lower() == 'sdk': - DaraCore._do_http_debug(p, resp) - - response = DaraResponse() - response.status_message = resp.reason - response.status_code = resp.status_code - response.headers = {k.lower(): v for k, v in resp.headers.items()} - response.body = resp.content - response.response = resp - return response - - - @staticmethod - async def async_do_sse_action( - request: DaraRequest, - runtime_option=None - ) -> DaraResponse: - runtime_option = runtime_option or {} - - url = DaraCore.compose_url(request) - ignore_ssl = runtime_option.get('ignoreSSL', False) - verify: Union[bool, str] = not ignore_ssl - tls_min_version = runtime_option.get('tlsMinVersion') - if isinstance(tls_min_version, Enum): - tls_min_version = tls_min_version.value - - if verify: - ca = runtime_option.get('ca') - if ca is not None: - verify = ca - - cert = runtime_option.get('cert', None) - - timeout = runtime_option.get('timeout') - connect_timeout = runtime_option.get('connectTimeout') or timeout or DEFAULT_CONNECT_TIMEOUT - read_timeout = runtime_option.get('readTimeout') or timeout or DEFAULT_READ_TIMEOUT - - connect_timeout, read_timeout = (int(connect_timeout) / 1000, int(read_timeout) / 1000) - - proxy = None - if request.protocol.upper() == 'HTTP': - proxy = runtime_option.get('httpProxy') - if not proxy: - proxy = os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy') - elif request.protocol.upper() == 'HTTPS': - proxy = runtime_option.get('httpsProxy') - if not proxy: - proxy = os.environ.get('HTTPS_PROXY') or os.environ.get('https_proxy') - - connector = None - ca_cert = certifi.where() - ssl_context = None - if isinstance(verify, str) and request.protocol.upper() == 'HTTPS': - ssl_context = ssl.create_default_context() - ssl_context = DaraCore._set_tls_minimum_version(ssl_context, tls_min_version) - ssl_context.load_verify_locations(verify) - # Handle cert if provided - if cert is not None: - if isinstance(cert, (list, tuple)): - ssl_context.load_cert_chain(certfile=cert[0], keyfile=cert[1] if len(cert) > 1 else None) - else: - ssl_context.load_cert_chain(certfile=cert, keyfile=None) - connector = aiohttp.TCPConnector(ssl=ssl_context) - elif ca_cert and request.protocol.upper() == 'HTTPS' and verify: - ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) - ssl_context = DaraCore._set_tls_minimum_version(ssl_context, tls_min_version) - ssl_context.load_verify_locations(ca_cert) - # Handle cert if provided - if cert is not None: - if isinstance(cert, (list, tuple)): - ssl_context.load_cert_chain(certfile=cert[0], keyfile=cert[1] if len(cert) > 1 else None) - else: - ssl_context.load_cert_chain(certfile=cert, keyfile=None) - connector = aiohttp.TCPConnector(ssl=ssl_context) - else: - verify = False - - timeout = aiohttp.ClientTimeout( - sock_read=read_timeout, - sock_connect=connect_timeout - ) - - session = aiohttp.ClientSession(connector=connector) - - body = b'' - if isinstance(request.body, BaseStream): - for content in request.body: - body += content - elif isinstance(request.body, str): - body = request.body.encode('utf-8') - else: - body = request.body or b'' - - try: - headers = request.headers.copy() - ssl_param: Union[bool, ssl.SSLContext] = ssl_context if ssl_context is not None else bool(verify) - response = await session.request( - request.method, - url, - data=body, - headers=headers, - ssl=ssl_param, - proxy=proxy, - timeout=timeout - ) - tea_resp: DaraResponse = DaraResponse() - tea_resp.status_code = response.status - tea_resp.status_message = response.reason - tea_resp.headers = dict({k.lower(): v for k, v in response.headers.items()}) - tea_resp.body = SSEResponseWrapper(session, response) - return tea_resp - - except IOError as e: - await session.close() - raise RetryError(str(e)) - - @staticmethod - def do_sse_action( - request: DaraRequest, - runtime_option=None - ) -> DaraResponse: - url = DaraCore.compose_url(request) - - runtime_option = runtime_option or {} - - verify = not runtime_option.get('ignoreSSL', False) - tls_min_version = runtime_option.get('tlsMinVersion') - if isinstance(tls_min_version, Enum): - tls_min_version = tls_min_version.value - - if verify: - verify = runtime_option.get('ca', True) if runtime_option.get('ca', True) is not None else True - cert = runtime_option.get('cert', None) - - timeout = runtime_option.get('timeout') - connect_timeout = runtime_option.get('connectTimeout') or timeout or DEFAULT_CONNECT_TIMEOUT - read_timeout = runtime_option.get('readTimeout') or timeout or DEFAULT_READ_TIMEOUT - - timeout = (int(connect_timeout) / 1000, int(read_timeout) / 1000) - - if isinstance(request.body, str): - request.body = request.body.encode('utf-8') - - p = PreparedRequest() - p.prepare( - method=request.method.upper(), - url=url, - data=request.body, - headers=request.headers, - ) - - proxies = {} - http_proxy = runtime_option.get('httpProxy') - https_proxy = runtime_option.get('httpsProxy') - no_proxy = runtime_option.get('noProxy') - - if not http_proxy: - http_proxy = os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy') - if not https_proxy: - https_proxy = os.environ.get('HTTPS_PROXY') or os.environ.get('https_proxy') - - if http_proxy: - proxies['http'] = http_proxy - if https_proxy: - proxies['https'] = https_proxy - if no_proxy: - proxies['no_proxy'] = no_proxy - - host = request.headers.get('host') - host = host.rstrip('/') if host else '' - - session_key = f'{request.protocol.lower()}://{host}:{request.port}' - session = DaraCore._get_session(session_key=session_key, protocol=request.protocol, - tls_min_version=tls_min_version, verify=verify) - try: - resp = session.send( - p, - proxies=proxies, - timeout=timeout, - verify=verify, - cert=cert, - stream=True - ) - except IOError as e: - raise RetryError(str(e)) - - debug = runtime_option.get('debug') or os.getenv('DEBUG') - if debug and debug.lower() == 'sdk': - DaraCore._do_http_debug(p, resp) - - response = DaraResponse() - response.status_message = resp.reason - response.status_code = resp.status_code - response.headers = {k.lower(): v for k, v in resp.headers.items()} - response.body = SyncSSEResponseWrapper(session, resp) - - return response - @staticmethod - def get_response_body(resp) -> str: - return resp.content.decode("utf-8") - - @staticmethod - def allow_retry(dic, retry_times, now=None) -> bool: - if retry_times == 0: - return True - if dic is None or not dic.__contains__("maxAttempts") or \ - dic.get('retryable') is not True and retry_times >= 1: - return False - else: - retry = 0 if dic.get("maxAttempts") is None else int( - dic.get("maxAttempts")) - return retry >= retry_times - - @staticmethod - def should_retry(options: RetryOptions, ctx: RetryPolicyContext) -> bool: - if ctx.retries_attempted == 0: - return True - - if not options or not options.retryable: - return False - - retries_attempted = ctx.retries_attempted - ex = ctx.exception - - for condition in options.no_retry_condition: - if getattr(ex, 'name', None) in condition.exception or getattr(ex, 'code', None) in condition.error_code: - return False - - for condition in options.retry_condition: - if getattr(ex, 'name', None) not in condition.exception and getattr(ex, 'code', None) not in condition.error_code: - continue - - if retries_attempted >= condition.max_attempts: - return False - return True - - return False - - @staticmethod - def get_backoff_time(options: RetryOptions, ctx: RetryPolicyContext) -> int: - ex = ctx.exception - conditions = options.retry_condition - for condition in conditions: - if getattr(ex, 'name', None) not in condition.exception and getattr(ex, 'code', None) not in condition.error_code: - continue - max_delay = condition.max_delay or MAX_DELAY_TIME - retry_after = getattr(ctx.exception, "retry_after", None) - if retry_after is not None: - return min(retry_after, max_delay) - if not condition.backoff: - return MIN_DELAY_TIME - return min(condition.backoff.get_delay_time(ctx), max_delay) - return MIN_DELAY_TIME - - @staticmethod - async def sleep_async(millisecond: int): - await asyncio.sleep(millisecond / 1000) - - @staticmethod - def sleep(millisecond: int): - time.sleep(millisecond / 1000) - - @staticmethod - def is_retryable(ex) -> bool: - return isinstance(ex, RetryError) - - @staticmethod - def bytes_readable(body): - return body - - @staticmethod - def merge(*dic_list) -> dict: - dic_result = {} - for item in dic_list: - if isinstance(item, dict): - dic_result.update(item) - elif isinstance(item, DaraModel): - dic_result.update(item.to_map()) - return dic_result - - @staticmethod - def is_null(value) -> bool: - return value is None - - @staticmethod - def to_readable_stream(data): - if isinstance(data, str): - return io.StringIO(data) - elif isinstance(data, bytes): - return io.BytesIO(data) - else: - raise TypeError("Input data must be of type str or bytes") - - @staticmethod - def to_map(model: Optional[DaraModel]) -> Dict[str, Any]: - if isinstance(model, DaraModel): - return model.to_map() - else: - return dict() - - @staticmethod - def to_number(model) -> int: - if isinstance(model, int): - return model - if isinstance(model, str): - if model == "": - return 0 - return int(model) - if isinstance(model, float): - return int(model) - return 0 - - @staticmethod - def from_map( - model: DaraModel, - dic: Dict[str, Any] - ) -> DaraModel: - if isinstance(model, DaraModel): - try: - return model.from_map(dic) - except Exception: - model._map = dic - return model - else: - return model - - @staticmethod - def _get_session(session_key: str, protocol: str, tls_min_version: str = None, verify: bool = True): - if session_key not in DaraCore._sessions: - session = Session() - adapter = DaraCore.get_adapter(protocol, tls_min_version) - if protocol.upper() == 'HTTPS': - if verify: - session.mount('https://', adapter) - else: - session.mount('https://', DaraCore.https_adapter) - else: - session.mount('http://', adapter) - DaraCore._sessions[session_key] = session - return DaraCore._sessions[session_key] \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/date.py b/venv/Lib/site-packages/darabonba/date.py deleted file mode 100644 index b1e36f9..0000000 --- a/venv/Lib/site-packages/darabonba/date.py +++ /dev/null @@ -1,128 +0,0 @@ -from datetime import datetime, timedelta - -class Date: - def __init__(self, date_input): - formats = [ - "%Y-%m-%d %H:%M:%S", - "%Y-%m-%d %H:%M:%S.%f %z %Z", - "%Y-%m-%dT%H:%M:%S%z", - "%Y-%m-%dT%H:%M:%SZ", - "%Y-%m-%dT%H:%M:%S", - "%Y-%m-%dT%H:%M:%S.%f", - ] - - self.date = None - for format in formats: - try: - self.date = datetime.strptime(date_input, format) - break - except ValueError: - continue - - if self.date is None: - raise ValueError(f"unable to parse date: {date_input}") - - def strftime(self, layout): - layout = layout.replace("yyyy", "%Y") - layout = layout.replace("MM", "%m") - layout = layout.replace("dd", "%d") - layout = layout.replace("hh", "%H") - layout = layout.replace("mm", "%M") - layout = layout.replace("ss", "%S") - layout = layout.replace("a", "%p") - layout = layout.replace("EEEE", "%A") - layout = layout.replace("E", "%a") - return self.date.strftime(layout) - - def timestamp(self): - return int(self.date.timestamp()) - - def sub(self, unit, amount): - if unit in ["second", "seconds"]: - return Date((self.date - timedelta(seconds=amount)).isoformat()) - elif unit in ["minute", "minutes"]: - return Date((self.date - timedelta(minutes=amount)).isoformat()) - elif unit in ["hour", "hours"]: - return Date((self.date - timedelta(hours=amount)).isoformat()) - elif unit in ["day", "days"]: - return Date((self.date - timedelta(days=amount)).isoformat()) - elif unit in ["week", "weeks"]: - return Date((self.date - timedelta(weeks=amount)).isoformat()) - elif unit in ["month", "months"]: - return Date((self.date.replace(month=self.date.month - amount)).isoformat()) - elif unit in ["year", "years"]: - return Date((self.date.replace(year=self.date.year - amount)).isoformat()) - - def add(self, unit, amount): - if unit in ["second", "seconds"]: - return Date((self.date + timedelta(seconds=amount)).isoformat()) - elif unit in ["minute", "minutes"]: - return Date((self.date + timedelta(minutes=amount)).isoformat()) - elif unit in ["hour", "hours"]: - return Date((self.date + timedelta(hours=amount)).isoformat()) - elif unit in ["day", "days"]: - return Date((self.date + timedelta(days=amount)).isoformat()) - elif unit in ["week", "weeks"]: - return Date((self.date + timedelta(weeks=amount)).isoformat()) - elif unit in ["month", "months"]: - new_month = self.date.month + amount - new_year = self.date.year + (new_month - 1) // 12 - new_month = (new_month - 1) % 12 + 1 - if self.date.day > 28: - if new_month == 2: - new_day = min(self.date.day, 29) - if new_day == 29 and not (new_year % 4 == 0 and (new_year % 100 != 0 or new_year % 400 == 0)): - new_day = 28 - else: - new_day = min(self.date.day, [31, 30][new_month % 2]) - else: - new_day = self.date.day - - new_date = self.date.replace(year=new_year, month=new_month, day=new_day) - return Date(new_date.isoformat()) - elif unit in ["year", "years"]: - return Date((self.date.replace(year=self.date.year + amount)).isoformat()) - - def diff(self, unit, diff_date): - if unit in ["second", "seconds"]: - return int((self.date - diff_date.date).total_seconds()) - elif unit in ["minute", "minutes"]: - return int((self.date - diff_date.date).total_seconds() / 60) - elif unit in ["hour", "hours"]: - return int((self.date - diff_date.date).total_seconds() / 3600) - elif unit in ["day", "days"]: - return int((self.date - diff_date.date).total_seconds() / (3600 * 24)) - elif unit in ["week", "weeks"]: - return int((self.date - diff_date.date).total_seconds() / (3600 * 24 * 7)) - elif unit in ["month", "months"]: - return (self.date.year - diff_date.date.year) * 12 + (self.date.month - diff_date.date.month) - elif unit in ["year", "years"]: - return self.date.year - diff_date.date.year - - def hour(self): - return self.date.hour - - def minute(self): - return self.date.minute - - def second(self): - return self.date.second - - def month(self): - return self.date.month - - def day_of_month(self): - return self.date.day - - def day_of_week(self): - weekday = self.date.weekday() + 1 # Monday is 0 in python, so - return weekday % 7 or 7 # Convert to Sunday = 7 - - def week_of_year(self): - return self.date.isocalendar()[1] - - def year(self): - return self.date.year - - def UTC(self): - return self.date.strftime("%Y-%m-%d %H:%M:%S.%f %z %Z") \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/decorators.py b/venv/Lib/site-packages/darabonba/decorators.py deleted file mode 100644 index 66bdd1f..0000000 --- a/venv/Lib/site-packages/darabonba/decorators.py +++ /dev/null @@ -1,61 +0,0 @@ -import warnings -import functools - - -def deprecated(reason): - """This is a decorator which can be used to mark functions as deprecated. - It will result in a warning being emitted when the function is used. - - Args: - reason (str): Explanation of why the function is deprecated. - """ - - def decorator(func): - original_func = func.__func__ if isinstance(func, staticmethod) or isinstance(func, classmethod) else func - - @functools.wraps(original_func) - def decorated_function(*args, **kwargs): - warnings.warn(f"Call to deprecated function {original_func.__name__}. {reason}", - category=DeprecationWarning, - stacklevel=2) - return original_func(*args, **kwargs) - - if isinstance(func, staticmethod): - return staticmethod(decorated_function) - elif isinstance(func, classmethod): - return classmethod(decorated_function) - else: - return decorated_function - - return decorator - -def type_check(*arg_types, **kwarg_types): - """This decorator is used to check whether the input parameter type meets the definition. - It will result in a warning being emitted when the function is used. - """ - - def decorator(func): - original_func = func.__func__ if isinstance(func, staticmethod) or isinstance(func, classmethod) else func - - @functools.wraps(original_func) - def wrapper(*args, **kwargs): - for i, (a, t) in enumerate(zip(args, arg_types)): - if not isinstance(a, t): - warnings.warn(f"Argument {i} is not of type {t}", - category=UserWarning, - stacklevel=2) - for k, t in kwarg_types.items(): - if k in kwargs and not isinstance(kwargs[k], t): - warnings.warn(f"Argument {k} is not of type {t}", - category=UserWarning, - stacklevel=2) - return original_func(*args, **kwargs) - - if isinstance(func, staticmethod): - return staticmethod(wrapper) - elif isinstance(func, classmethod): - return classmethod(wrapper) - else: - return wrapper - - return decorator \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/event.py b/venv/Lib/site-packages/darabonba/event.py deleted file mode 100644 index 12756b7..0000000 --- a/venv/Lib/site-packages/darabonba/event.py +++ /dev/null @@ -1,48 +0,0 @@ -from darabonba.model import DaraModel - -class Event(DaraModel): - def __init__( - self, - id: str = None, - event: str = None, - data: str = None, - retry: int = None, - ): - self.id = id - self.event = event - self.data = data - self.retry = retry - - def validate(self): - self.validate_required(self.id, 'id') - self.validate_required(self.event, 'event') - self.validate_required(self.data, 'data') - self.validate_required(self.retry, 'retry') - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.id is not None: - result['id'] = self.id - if self.event is not None: - result['event'] = self.event - if self.data is not None: - result['data'] = self.data - if self.retry is not None: - result['retry'] = self.retry - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('id') is not None: - self.id = m.get('id') - if m.get('event') is not None: - self.event = m.get('event') - if m.get('data') is not None: - self.data = m.get('data') - if m.get('retry') is not None: - self.retry = m.get('retry') - return self \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/exceptions.py b/venv/Lib/site-packages/darabonba/exceptions.py deleted file mode 100644 index 8766beb..0000000 --- a/venv/Lib/site-packages/darabonba/exceptions.py +++ /dev/null @@ -1,86 +0,0 @@ -from Tea.exceptions import UnretryableException as TeaUnretryableException -from Tea.exceptions import RequiredArgumentException as TeaRequiredArgumentException -from Tea.exceptions import TeaException -from darabonba.policy.retry import RetryPolicyContext -from typing import Any, Optional - - -class DaraException(TeaException): - def __init__(self, dic): - super().__init__(dic) - self.code = dic.get("code") - self.message = dic.get("message") - self.data = dic.get("data") - self.description = dic.get("description") - self.accessDeniedDetail = dic.get("accessDeniedDetail") - if isinstance(dic.get("data"), dict) and dic.get("data").get("statusCode") is not None: - self.statusCode = dic.get("data").get("statusCode") - self.name = 'DaraException' - - def __str__(self): - return f'Error: {self.code} {self.message} Response: {self.data}' - -class ResponseException(DaraException): - def __init__(self, - code: Optional[str] = None, - message: Optional[str] = None, - status_code: Optional[int] = None, - retry_after: Optional[int] = None, - data: Optional[dict] = None, - access_denied_detail: Optional[dict] = None, - description: Optional[str] = None, - stack: Optional[str] = None): - if data and status_code is not None: - data['statusCode'] = status_code - super().__init__({ - 'code': code, - 'message': message, - 'data': data, - 'description': description, - 'accessDeniedDetail': access_denied_detail - }) - - self.name = 'ResponseException' - self.status_code = status_code - self.retry_after = retry_after - self.stack = stack - -class ValidateException(Exception): - pass - - -class RequiredArgumentException(TeaRequiredArgumentException): - def __init__(self, arg): - self.arg = arg - - def __str__(self): - return f'"{self.arg}" is required.' - - -class RetryError(Exception): - def __init__(self, message): - - super().__init__({"message":message}) - - self.message = message - self.data = None - self.name = 'RetryError' - -class UnretryableException(TeaUnretryableException): - def __init__( - self, - _context: RetryPolicyContext - ): - if isinstance(_context.exception, ResponseException): - raise _context.exception - - super().__init__( - request= _context.http_request, - ex= _context.exception, - ) - - self.name = 'UnretryableException' - - - def __str__(self): - return str(self.inner_exception) \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/file.py b/venv/Lib/site-packages/darabonba/file.py deleted file mode 100644 index 00f8048..0000000 --- a/venv/Lib/site-packages/darabonba/file.py +++ /dev/null @@ -1,48 +0,0 @@ -import os -from datetime import datetime -from darabonba.date import Date - -class File: - def __init__(self, path: str): - self._path = path - self._file = None - - @staticmethod - def exists(path: str) -> bool: - return os.path.exists(path) - - def path(self) -> str: - return self._path - - def length(self) -> int: - return os.path.getsize(self._path) - - def create_time(self) -> Date: - ctime = os.path.getctime(self._path) - return Date(datetime.fromtimestamp(ctime).isoformat()) - - def modify_time(self) -> Date: - mtime = os.path.getmtime(self._path) - return Date(datetime.fromtimestamp(mtime).isoformat()) - - def read(self, size: int) -> bytes: - if self._file is None: - self._file = open(self._path, 'rb') - - data = self._file.read(size) - if not data: - self._file.close() - self._file = None - return data - - def write(self, data: bytes) -> None: - with open(self._path, 'ab') as f: - f.write(data) - - @staticmethod - def create_read_stream(path: str): - return open(path, 'rb') - - @staticmethod - def create_write_stream(path: str): - return open(path, 'ab') \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/model.py b/venv/Lib/site-packages/darabonba/model.py deleted file mode 100644 index f704145..0000000 --- a/venv/Lib/site-packages/darabonba/model.py +++ /dev/null @@ -1,54 +0,0 @@ -import re -from darabonba.exceptions import RequiredArgumentException, ValidateException -from Tea.model import TeaModel - - -class DaraModel(TeaModel): - _map = None - - def validate(self): - pass - - def to_map(self): - return self._map - - def from_map(self, map=None): - pass - - @staticmethod - def validate_required(prop, prop_name): - if prop is None: - raise RequiredArgumentException(prop_name) - - @staticmethod - def validate_max_length(prop, prop_name, max_length): - if len(prop) > max_length: - raise ValidateException(f'{prop_name} is exceed max-length: {max_length}') - - @staticmethod - def validate_min_length(prop, prop_name, min_length): - if len(prop) < min_length: - raise ValidateException(f'{prop_name} is less than min-length: {min_length}') - - @staticmethod - def validate_pattern(prop, prop_name, pattern): - match_obj = re.search(pattern, str(prop), re.M | re.I) - if not match_obj: - raise ValidateException(f'{prop_name} is not match: {pattern}') - - @staticmethod - def validate_maximum(num, prop_name, maximum): - if num > maximum: - raise ValidateException(f'{prop_name} is greater than the maximum: {maximum}') - - @staticmethod - def validate_minimum(num, prop_name, minimum): - if num < minimum: - raise ValidateException(f'{prop_name} is less than the minimum: {minimum}') - - def __str__(self): - s = self.to_map() - if s: - return str(s) - else: - return object.__str__(self) diff --git a/venv/Lib/site-packages/darabonba/number.py b/venv/Lib/site-packages/darabonba/number.py deleted file mode 100644 index ce7ed13..0000000 --- a/venv/Lib/site-packages/darabonba/number.py +++ /dev/null @@ -1,23 +0,0 @@ -class Number: - def __init__(self, value): - self.value = self.to_number(value) - - def to_number(self, value): - if isinstance(value, bool): - return 1 if value else 0 - elif value is None: - return 0 - elif isinstance(value, (int, float)): - return float(value) - elif isinstance(value, str): - try: - return float(value) - except ValueError: - return float('nan') - elif isinstance(value, (list, dict)): - return float('nan') - else: - return float('nan') - - def __str__(self): - return str(self.value) \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/policy/__init__.py b/venv/Lib/site-packages/darabonba/policy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/darabonba/policy/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/darabonba/policy/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 7d90cc6..0000000 Binary files a/venv/Lib/site-packages/darabonba/policy/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/policy/__pycache__/retry.cpython-312.pyc b/venv/Lib/site-packages/darabonba/policy/__pycache__/retry.cpython-312.pyc deleted file mode 100644 index 8a397a1..0000000 Binary files a/venv/Lib/site-packages/darabonba/policy/__pycache__/retry.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/policy/retry.py b/venv/Lib/site-packages/darabonba/policy/retry.py deleted file mode 100644 index 38842aa..0000000 --- a/venv/Lib/site-packages/darabonba/policy/retry.py +++ /dev/null @@ -1,214 +0,0 @@ -import random -from typing import List, Any, Dict - -MAX_DELAY_TIME = 120 * 1000 -MIN_DELAY_TIME = 100 - -class BackoffPolicy: - def __init__(self, option: Dict[str, Any]): - self.policy = option.get("policy") - - def get_delay_time(self, ctx: 'RetryPolicyContext') -> int: - raise NotImplementedError('un-implemented') - - @staticmethod - def new_backoff_policy(option: Dict[str, Any]) -> 'BackoffPolicy': - policy_map = { - 'Fixed': FixedBackoffPolicy, - 'Random': RandomBackoffPolicy, - 'Exponential': ExponentialBackoffPolicy, - 'EqualJitter': EqualJitterBackoffPolicy, - 'ExponentialWithEqualJitter': EqualJitterBackoffPolicy, - 'FullJitter': FullJitterBackoffPolicy, - 'ExponentialWithFullJitter': FullJitterBackoffPolicy, - } - policy_class = policy_map.get(option.get('policy')) - if policy_class: - return policy_class(option) - raise ValueError(f"Unknown policy: {option.get('policy')}") - -class FixedBackoffPolicy(BackoffPolicy): - def __init__(self, option: Dict[str, Any]): - super().__init__(option) - self.period = option.get('period') - - def to_map(self): - return { - 'policy': self.policy, - 'period': self.period, - } - - def get_delay_time(self, ctx: 'RetryPolicyContext') -> int: - return self.period - -class RandomBackoffPolicy(BackoffPolicy): - def __init__(self, option: Dict[str, Any]): - super().__init__(option) - self.period = option.get('period') - self.cap = option.get('cap', 20 * 1000) - - def to_map(self): - return { - 'policy': self.policy, - 'period': self.period, - 'cap': self.cap, - } - - def get_delay_time(self, ctx: 'RetryPolicyContext') -> int: - random_time = random.randint(0, ctx.retries_attempted * self.period) - return min(random_time, self.cap) - -class ExponentialBackoffPolicy(BackoffPolicy): - def __init__(self, option: Dict[str, Any]): - super().__init__(option) - self.period = option.get('period') - self.cap = option.get('cap', 3 * 24 * 60 * 60 * 1000) - - def to_map(self): - return { - 'policy': self.policy, - 'period': self.period, - 'cap': self.cap, - } - - def get_delay_time(self, ctx: 'RetryPolicyContext') -> int: - random_time = min(2 ** (ctx.retries_attempted * self.period), self.cap) - return random_time - -class EqualJitterBackoffPolicy(BackoffPolicy): - def __init__(self, option: Dict[str, Any]): - super().__init__(option) - self.period = option.get('period') - self.cap = option.get('cap', 3 * 24 * 60 * 60 * 1000) - - - def to_map(self): - return { - 'policy': self.policy, - 'period': self.period, - 'cap': self.cap, - } - - def get_delay_time(self, ctx: 'RetryPolicyContext') -> int: - ceil = min(self.cap, 2 ** (ctx.retries_attempted * self.period)) - return ceil // 2 + random.randint(0, ceil // 2) - -class FullJitterBackoffPolicy(BackoffPolicy): - def __init__(self, option: Dict[str, Any]): - super().__init__(option) - self.period = option.get('period') - self.cap = option.get('cap', 3 * 24 * 60 * 60 * 1000) - - def to_map(self): - return { - 'policy': self.policy, - 'period': self.period, - 'cap': self.cap, - } - - def get_delay_time(self, ctx: 'RetryPolicyContext') -> int: - ceil = min(self.cap, 2 ** (ctx.retries_attempted * self.period)) - return random.randint(0, ceil) - -class RetryCondition: - def __init__(self, condition: Dict[str, Any]): - self.max_attempts = condition.get('maxAttempts', None) - self.backoff = self._ensure_backoff_policy(condition.get('backoff', None)) - self.exception = condition.get('exception', []) - self.error_code = condition.get('errorCode', []) - self.max_delay = condition.get('maxDelay', None) - - def _ensure_backoff_policy(self, backoff): - if isinstance(backoff, dict): - return BackoffPolicy.new_backoff_policy(backoff) - elif isinstance(backoff, BackoffPolicy): - return backoff - - def to_map(self): - result = dict() - if self.max_attempts: - result['maxAttempts'] = self.max_attempts - if self.backoff: - result['backoff'] = self.backoff.to_map() - if self.exception: - result['exception'] = self.exception - if self.error_code: - result['errorCode'] = self.error_code - if self.max_delay: - result['maxDelay'] = self.max_delay - return result - - @staticmethod - def from_map(data: Dict[str, Any]) -> 'RetryCondition': - return RetryCondition({ - 'maxAttempts': data.get('maxAttempts'), - 'backoff': data.get('backoff'), - 'exception': data.get('exception', []), - 'errorCode': data.get('errorCode', []), - 'maxDelay': data.get('maxDelay') - }) - -class RetryOptions: - def __init__(self, options: Dict[str, Any]): - self.retryable = options.get('retryable', True) - self.retry_condition = [self._ensure_retry_condition(cond) for cond in options.get('retryCondition', [])] - self.no_retry_condition = [self._ensure_retry_condition(cond) for cond in options.get('noRetryCondition', [])] - - def _ensure_retry_condition(self, condition): - if isinstance(condition, dict): - return RetryCondition(condition) - elif isinstance(condition, RetryCondition): - return condition - else: - raise ValueError("Condition must be either a dictionary or a RetryCondition instance") - - def validate(self) -> bool: - if not isinstance(self.retryable, bool): - raise ValueError("retryable must be a boolean.") - if not isinstance(self.retry_condition, list) or not all(isinstance(cond, RetryCondition) for cond in self.retry_condition): - raise ValueError("retryCondition must be a list of RetryCondition.") - if not isinstance(self.no_retry_condition, list) or not all(isinstance(cond, RetryCondition) for cond in self.no_retry_condition): - raise ValueError("noRetryCondition must be a list of RetryCondition.") - return True - - def to_map(self): - result = dict() - if self.retryable: - result['retryable'] = self.retryable - if self.retry_condition: - result['retryCondition'] = [cond.to_map() for cond in self.retry_condition] - if self.no_retry_condition: - result['noRetryCondition'] = [cond.to_map() for cond in self.no_retry_condition] - return result - - @staticmethod - def from_map(data: Dict[str, Any]) -> 'RetryOptions': - options = { - 'retryable': data.get('retryable', True), - 'retryCondition': [cond for cond in data.get('retryCondition', [])], - 'noRetryCondition': [cond for cond in data.get('noRetryCondition', [])] - } - return RetryOptions(options) - -class RetryPolicyContext: - def __init__(self, retries_attempted = None, http_request = None, http_response = None, exception = None): - self.retries_attempted = retries_attempted - self.http_request = http_request - self.http_response = http_response - self.exception = exception - -def get_backoff_delay(options: RetryOptions, ctx: RetryPolicyContext) -> int: - ex = ctx.exception - for condition in options.retry_condition: - if (ex and (ex.name in condition.exception or ex.code in condition.error_code)): - max_delay = condition.max_delay or MAX_DELAY_TIME - retry_after = getattr(ex, 'retryAfter', None) - if retry_after is not None: - return min(retry_after, max_delay) - - if not condition.backoff: - return MIN_DELAY_TIME - - return min(condition.backoff.get_delay_time(ctx), max_delay) - - return MIN_DELAY_TIME \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/request.py b/venv/Lib/site-packages/darabonba/request.py deleted file mode 100644 index d5d2021..0000000 --- a/venv/Lib/site-packages/darabonba/request.py +++ /dev/null @@ -1,29 +0,0 @@ -class DaraRequest: - _PROPERTY_DEFAULT_MAP = { - 'query': {}, - 'protocol': 'http', - 'port': 80, - 'method': 'GET', - 'headers': {}, - 'pathname': "", - 'body': None, - } - - def __init__(self): - self.query = {} - self.protocol = "http" - self.port = 80 - self.method = "GET" - self.headers = {} - self.pathname = "" - self.body = None - - def __setattr__(self, key, value): - if key in self._PROPERTY_DEFAULT_MAP: - if not value: - if isinstance(self._PROPERTY_DEFAULT_MAP[key], (list, dict)): - self.__dict__[key] = self._PROPERTY_DEFAULT_MAP[key].copy() - else: - self.__dict__[key] = self._PROPERTY_DEFAULT_MAP[key] - return - self.__dict__[key] = value diff --git a/venv/Lib/site-packages/darabonba/response.py b/venv/Lib/site-packages/darabonba/response.py deleted file mode 100644 index 0269305..0000000 --- a/venv/Lib/site-packages/darabonba/response.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any, Dict, Optional, Union -from aiohttp import ClientResponse - -class DaraResponse: - def __init__(self): - # status - self.status_code: Optional[int] = None - # reason - self.status_message: Optional[str] = None - self.headers: Optional[Dict[str, str]] = None - self.response: Optional[Union[ClientResponse, Any]] = None - self.body: Optional[Union[bytes, Any]] = None \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/runtime.py b/venv/Lib/site-packages/darabonba/runtime.py deleted file mode 100644 index 78210e5..0000000 --- a/venv/Lib/site-packages/darabonba/runtime.py +++ /dev/null @@ -1,202 +0,0 @@ -from darabonba.core import DaraModel -from typing import Dict -from darabonba.policy.retry import RetryOptions - -class ExtendsParameters(DaraModel): - def __init__( - self, - headers: Dict[str, str] = None, - queries: Dict[str, str] = None, - ): - self.headers = headers - self.queries = queries - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.queries is not None: - result['queries'] = self.queries - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('queries') is not None: - self.queries = m.get('queries') - return self - -class RuntimeOptions(DaraModel): - """ - The common runtime options model - """ - def __init__( - self, - retry_options: RetryOptions = None, - autoretry: bool = None, - ignore_ssl: bool = None, - max_attempts: int = None, - backoff_policy: str = None, - backoff_period: int = None, - read_timeout: int = None, - connect_timeout: int = None, - http_proxy: str = None, - https_proxy: str = None, - no_proxy: str = None, - max_idle_conns: int = None, - local_addr: str = None, - socks_5proxy: str = None, - socks_5net_work: str = None, - keep_alive: bool = None, - key: str = None, - cert: str = None, - ca: str = None, - extends_parameters: ExtendsParameters = None, - ): - # retry options - self.retry_options = retry_options - # whether to try again - self.autoretry = autoretry - # ignore SSL validation - self.ignore_ssl = ignore_ssl - # privite key for client certificate - self.key = key - # client certificate - self.cert = cert - # server certificate - self.ca = ca - # maximum number of retries - self.max_attempts = max_attempts - # backoff policy - self.backoff_policy = backoff_policy - # backoff period - self.backoff_period = backoff_period - # read timeout - self.read_timeout = read_timeout - # connect timeout - self.connect_timeout = connect_timeout - # http proxy url - self.http_proxy = http_proxy - # https Proxy url - self.https_proxy = https_proxy - # agent blacklist - self.no_proxy = no_proxy - # maximum number of connections - self.max_idle_conns = max_idle_conns - # local addr - self.local_addr = local_addr - # SOCKS5 proxy - self.socks_5proxy = socks_5proxy - # SOCKS5 netWork - self.socks_5net_work = socks_5net_work - # whether to enable keep-alive - self.keep_alive = keep_alive - # Extends Parameters - self.extends_parameters = extends_parameters - - def validate(self): - if self.retry_options: - self.retry_options.validate() - if self.extends_parameters: - self.extends_parameters.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.retry_options is not None: - result['retryOptions'] = self.retry_options.to_map() - if self.autoretry is not None: - result['autoretry'] = self.autoretry - if self.ignore_ssl is not None: - result['ignoreSSL'] = self.ignore_ssl - if self.key is not None: - result['key'] = self.key - if self.cert is not None: - result['cert'] = self.cert - if self.ca is not None: - result['ca'] = self.ca - if self.max_attempts is not None: - result['max_attempts'] = self.max_attempts - if self.backoff_policy is not None: - result['backoff_policy'] = self.backoff_policy - if self.backoff_period is not None: - result['backoff_period'] = self.backoff_period - if self.read_timeout is not None: - result['readTimeout'] = self.read_timeout - if self.connect_timeout is not None: - result['connectTimeout'] = self.connect_timeout - if self.http_proxy is not None: - result['httpProxy'] = self.http_proxy - if self.https_proxy is not None: - result['httpsProxy'] = self.https_proxy - if self.no_proxy is not None: - result['noProxy'] = self.no_proxy - if self.max_idle_conns is not None: - result['maxIdleConns'] = self.max_idle_conns - if self.local_addr is not None: - result['localAddr'] = self.local_addr - if self.socks_5proxy is not None: - result['socks5Proxy'] = self.socks_5proxy - if self.socks_5net_work is not None: - result['socks5NetWork'] = self.socks_5net_work - if self.keep_alive is not None: - result['keepAlive'] = self.keep_alive - if self.extends_parameters is not None: - result['extendsParameters'] = self.extends_parameters.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('retryOptions') is not None: - self.retry_options = RetryOptions.from_map(m.get('retryOptions')) - if m.get('autoretry') is not None: - self.autoretry = m.get('autoretry') - if m.get('ignoreSSL') is not None: - self.ignore_ssl = m.get('ignoreSSL') - if m.get('key') is not None: - self.key = m.get('key') - if m.get('cert') is not None: - self.cert = m.get('cert') - if m.get('ca') is not None: - self.ca = m.get('ca') - if m.get('max_attempts') is not None: - self.max_attempts = m.get('max_attempts') - if m.get('backoff_policy') is not None: - self.backoff_policy = m.get('backoff_policy') - if m.get('backoff_period') is not None: - self.backoff_period = m.get('backoff_period') - if m.get('readTimeout') is not None: - self.read_timeout = m.get('readTimeout') - if m.get('connectTimeout') is not None: - self.connect_timeout = m.get('connectTimeout') - if m.get('httpProxy') is not None: - self.http_proxy = m.get('httpProxy') - if m.get('httpsProxy') is not None: - self.https_proxy = m.get('httpsProxy') - if m.get('noProxy') is not None: - self.no_proxy = m.get('noProxy') - if m.get('maxIdleConns') is not None: - self.max_idle_conns = m.get('maxIdleConns') - if m.get('localAddr') is not None: - self.local_addr = m.get('localAddr') - if m.get('socks5Proxy') is not None: - self.socks_5proxy = m.get('socks5Proxy') - if m.get('socks5NetWork') is not None: - self.socks_5net_work = m.get('socks5NetWork') - if m.get('keepAlive') is not None: - self.keep_alive = m.get('keepAlive') - if m.get('extendsParameters') is not None: - temp_model = ExtendsParameters() - self.extends_parameters = temp_model.from_map(m['extendsParameters']) - return self \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/url.py b/venv/Lib/site-packages/darabonba/url.py deleted file mode 100644 index d000f29..0000000 --- a/venv/Lib/site-packages/darabonba/url.py +++ /dev/null @@ -1,92 +0,0 @@ -import re -from urllib.parse import urlparse, urlunparse, quote - -PORT_MAP = { - "ftp": "21", - "gopher": "70", - "http": "80", - "https": "443", - "ws": "80", - "wss": "443" -} - -class Url: - def __init__(self, url_str): - self._url = urlparse(url_str) - - @staticmethod - def new_url(url_str): - try: - return Url(url_str) - except Exception as e: - raise e - - def path(self): - if not self._url.query: - return self._url.path - return f"{self._url.path}?{self._url.query}" - - def pathname(self): - return self._url.path - - def protocol(self): - return self._url.scheme - - def hostname(self): - return self._url.hostname - - def host(self): - if self._url.port: - return f"{self._url.hostname}:{self._url.port}" - return self._url.hostname - - def port(self): - if self._url.port: - return str(self._url.port) - return PORT_MAP.get(self.protocol(), "") - - def hash(self): - return self._url.fragment - - def search(self): - return self._url.query - - def href(self): - return urlunparse(self._url) - - def auth(self): - if self._url.username or self._url.password: - return f"{self._url.username}:{self._url.password or ''}" - return "" - - @staticmethod - def parse(url_str): - return Url.new_url(url_str) - - @staticmethod - def url_encode(url_str): - if not url_str: - return "" - parts = url_str.split('/') - encoded_parts = [quote(part, safe='') for part in parts] - encoded_url = '/'.join(encoded_parts) - encoded_url = encoded_url.replace("+", "%20").replace("*", "%2A").replace("%7E", "~") - return encoded_url - - @staticmethod - def percent_encode(uri): - if not uri: - return "" - encoded_uri = quote(uri, safe='') - encoded_uri = encoded_uri.replace("+", "%20").replace("*", "%2A").replace("%7E", "~") - return encoded_uri - - @staticmethod - def path_encode(path): - if not path or path == "/": - return path - parts = path.split('/') - encoded_parts = [quote(part, safe='') for part in parts] - encoded_path = '/'.join(encoded_parts) - encoded_path = encoded_path.replace("+", "%20").replace("*", "%2A").replace("%7E", "~") - return encoded_path \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/utils/__init__.py b/venv/Lib/site-packages/darabonba/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 37ca02f..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/bytes.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/bytes.cpython-312.pyc deleted file mode 100644 index 9fca355..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/bytes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/form.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/form.cpython-312.pyc deleted file mode 100644 index dcc0290..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/form.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/logger.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/logger.cpython-312.pyc deleted file mode 100644 index ff950ce..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/logger.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/map.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/map.cpython-312.pyc deleted file mode 100644 index cb17864..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/map.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/stream.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/stream.cpython-312.pyc deleted file mode 100644 index 9835e07..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/stream.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/validation.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/validation.cpython-312.pyc deleted file mode 100644 index 020c21a..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/validation.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/__pycache__/xml.cpython-312.pyc b/venv/Lib/site-packages/darabonba/utils/__pycache__/xml.cpython-312.pyc deleted file mode 100644 index aa4950b..0000000 Binary files a/venv/Lib/site-packages/darabonba/utils/__pycache__/xml.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/darabonba/utils/bytes.py b/venv/Lib/site-packages/darabonba/utils/bytes.py deleted file mode 100644 index 40fcfe9..0000000 --- a/venv/Lib/site-packages/darabonba/utils/bytes.py +++ /dev/null @@ -1,30 +0,0 @@ -import base64 -from typing import Union - -class Bytes: - - @staticmethod - def from_(data: Union[str, bytes], encoding: str) -> bytes: - if encoding == 'base64': - if isinstance(data, str): - data = data.encode('utf-8') - return base64.b64decode(data) - elif encoding == 'hex': - if isinstance(data, str): - return bytes.fromhex(data) - elif encoding == 'utf-8': - if isinstance(data, bytes): - return data - if isinstance(data, str): - return data.encode('utf-8') - elif encoding == 'utf-16': - if isinstance(data, bytes): - return data.decode('utf-16').encode('utf-16') - elif encoding == 'utf-32': - if isinstance(data, bytes): - return data.decode('utf-32').encode('utf-32') - elif encoding == 'binary': - if isinstance(data, str): - return bytes(int(data[i:i+8], 2) for i in range(0, len(data), 8)) - - raise ValueError(f"Unsupported encoding: {encoding}") diff --git a/venv/Lib/site-packages/darabonba/utils/form.py b/venv/Lib/site-packages/darabonba/utils/form.py deleted file mode 100644 index 2ab0c2d..0000000 --- a/venv/Lib/site-packages/darabonba/utils/form.py +++ /dev/null @@ -1,208 +0,0 @@ -import os -import sys -from _io import BytesIO -import random -from darabonba.utils.stream import BaseStream, READABLE -from darabonba.core import DaraModel -from urllib.parse import urlencode - -class Form: - - @staticmethod - def to_form_string( - val: dict, - ) -> str: - """ - Format a map to form string, like a=a%20b%20c - @return: the form string - """ - if not val: - return "" - keys = sorted(list(val)) - dic = {k: val[k] for k in keys if not isinstance(val[k], READABLE)} - return urlencode(dic) - - @staticmethod - def get_boundary(): - result = '%s' % int(random.random() * 100000000000000) - return result.zfill(14) - - @staticmethod - def to_file_form(form, boundary): - return FileFormInputStream(form, boundary) - -def _length(o): - if hasattr(o, 'len'): - return o.len - elif isinstance(o, BytesIO): - return o.getbuffer().nbytes - elif hasattr(o, 'fileno'): - return os.path.getsize(o.name) - return len(o) - - -class FileFormInputStream(BaseStream): - def __init__(self, form, boundary, size=1024): - super().__init__(size) - self.form = form - self.boundary = boundary - self.file_size_left = 0 - - self.forms = {} - self.files = {} - self.files_keys = [] - self._to_map() - - self.form_str = b'' - self._build_str_forms() - self.str_length = len(self.form_str) - - def _to_map(self): - for k, v in self.form.items(): - if isinstance(v, FileField): - self.files[k] = v - self.files_keys.append(k) - else: - self.forms[k] = v - - def _build_str_forms(self): - form_str = '' - str_fmt = '--%s\r\nContent-Disposition: form-data; name="%s"\r\n\r\n%s\r\n' - forms_list = sorted(list(self.forms)) - for key in forms_list: - value = self.forms[key] - form_str += str_fmt % (self.boundary, key, value) - self.form_str = form_str.encode('utf-8') - - def _get_stream_length(self): - file_length = 0 - for k, ff in self.files.items(): - field_length = len(ff.filename.encode('utf-8')) + len(ff.content_type) +\ - len(k.encode('utf-8')) + len(self.boundary) + 78 - - file_length += _length(ff.content) + field_length - - stream_length = self.str_length + file_length + len(self.boundary) + 6 - return stream_length - - def __len__(self): - return self._get_stream_length() - - def __iter__(self): - return self - - def __next__(self): - return self.read(self.size, loop=True) - - def file_str(self, size): - # handle file object - form_str = b'' - start_fmt = '--%s\r\nContent-Disposition: form-data; name="%s";' - content_fmt = b' filename="%s"\r\nContent-Type: %s\r\n\r\n%s' - - if self.file_size_left: - for key in self.files_keys[:]: - if size <= 0: - break - file_field = self.files[key] - file_content = file_field.content.read(size) - if isinstance(file_content, str): - file_content = file_content.encode('utf-8') - - if self.file_size_left <= size: - form_str += b'%s\r\n' % file_content - self.file_size_left = 0 - size -= len(file_content) - self.files_keys.remove(key) - else: - form_str += file_content - self.file_size_left -= size - size -= len(file_content) - else: - for key in self.files_keys[:]: - if size <= 0: - break - file_field = self.files[key] - - file_size = _length(file_field.content) - self.file_size_left = file_size - file_content = file_field.content.read(size) - if isinstance(file_content, str): - file_content = file_content.encode('utf-8') - - # build form_str - start = start_fmt % (self.boundary, key) - content = content_fmt % ( - file_field.filename.encode('utf-8'), - file_field.content_type.encode('utf-8'), - file_content - ) - if self.file_size_left < size: - form_str += b'%s%s\r\n' % (start.encode('utf-8'), content) - self.file_size_left = 0 - size -= len(file_content) - self.files_keys.remove(key) - else: - form_str += b'%s%s' % (start.encode('utf-8'), content) - self.file_size_left -= size - size -= len(file_content) - - return form_str - - def read(self, size=None, loop=False): - if not self.files_keys and not self.form_str: - self.refresh() - if loop: - raise StopIteration - else: - return b'' - - if size is None: - size = sys.maxsize - - if self.form_str: - form_str = self.form_str[:size] - self.form_str = self.form_str[size:] - if len(form_str) < size: - form_str += self.file_str(size) - else: - form_str = self.file_str(size) - - if not self.form_str and not self.files_keys: - form_str += b'--%s--\r\n' % self.boundary.encode('utf-8') - return form_str - - def refresh_cursor(self): - for ff in self.files.values(): - if hasattr(ff.content, 'seek'): - ff.content.seek(0, 0) - - def refresh(self): - self.file_size_left = 0 - self._to_map() - self._build_str_forms() - self.refresh_cursor() - -class FileField(DaraModel): - def __init__(self, filename=None, content_type=None, content=None): - self.filename = filename - self.content_type = content_type - self.content = content - - def validate(self): - self.validate_required(self.filename, 'filename') - self.validate_required(self.content_type, 'content_type') - self.validate_required(self.content, 'content') - - def to_map(self): - result = {} - result['filename'] = self.filename - result['contentType'] = self.content_type - result['content'] = self.content - return result - - def from_map(self, map={}): - self.filename = map.get('filename') - self.content_type = map.get('contentType') - self.content = map.get('content') - return self \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/utils/logger.py b/venv/Lib/site-packages/darabonba/utils/logger.py deleted file mode 100644 index 2e3fa30..0000000 --- a/venv/Lib/site-packages/darabonba/utils/logger.py +++ /dev/null @@ -1,47 +0,0 @@ -class Logger: - levels = { - 'DEBUG': 10, - 'INFO': 20, - 'WARNING': 30, - 'ERROR': 40, - 'CRITICAL': 50 - } - - current_level = levels['DEBUG'] - log_format = "{levelname}: {message}" - - @staticmethod - def log(level_name, message): - if Logger.levels[level_name] >= Logger.current_level: - print(Logger.log_format.format(levelname=level_name, message=message)) - - @staticmethod - def info(message): - Logger.log('INFO', message) - - @staticmethod - def debug(message): - Logger.log('DEBUG', message) - - @staticmethod - def warning(message): - Logger.log('WARNING', message) - - @staticmethod - def error(message): - Logger.log('ERROR', message) - - @staticmethod - def critical(message): - Logger.log('CRITICAL', message) - - @staticmethod - def set_level(level_name): - if level_name in Logger.levels: - Logger.current_level = Logger.levels[level_name] - else: - raise ValueError(f"Invalid log level: {level_name}") - - @staticmethod - def format(log_format): - Logger.log_format = log_format \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/utils/map.py b/venv/Lib/site-packages/darabonba/utils/map.py deleted file mode 100644 index bcf9398..0000000 --- a/venv/Lib/site-packages/darabonba/utils/map.py +++ /dev/null @@ -1,14 +0,0 @@ -import json - -class Map: - def __init__(self, data): - self.data = data - - @staticmethod - def to_json(map_instance): - if not isinstance(map_instance, Map): - raise ValueError("Input must be an instance of Map") - try: - return json.dumps(map_instance.data) - except TypeError as e: - raise Exception(f"Serialization error: {e}") \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/utils/stream.py b/venv/Lib/site-packages/darabonba/utils/stream.py deleted file mode 100644 index da473c3..0000000 --- a/venv/Lib/site-packages/darabonba/utils/stream.py +++ /dev/null @@ -1,624 +0,0 @@ -import json -import re -import aiohttp -import codecs -from darabonba.event import Event - -from io import BytesIO, StringIO -from typing import Any, BinaryIO, Generator, AsyncGenerator, Dict - -# define WRITEABLE -sse_line_pattern = re.compile('(?P[^:]*):?( ?(?P.*))?') - -class BaseStream: - def __init__(self, size=1024): - self.size = size - - def read(self, size=1024): - raise NotImplementedError('read method must be overridden') - - def __len__(self): - raise NotImplementedError('__len__ method must be overridden') - - def __next__(self): - raise NotImplementedError('__next__ method must be overridden') - - def __iter__(self): - return self - - -class _ReadableMc(type): - def __instancecheck__(self, instance): - if hasattr(instance, 'read') and hasattr(instance, '__iter__'): - return True - - -class READABLE(metaclass=_ReadableMc): - pass - -class SyncSSEResponseWrapper: - def __init__(self, session, response): - self.session = session - self.response = response - self._closed = False - - def close(self): - if not self._closed: - self.response.close() - self.session.close() - self._closed = True - - def __iter__(self): - return self._read_chunks() - - def _read_chunks(self): - try: - for chunk in self.response.iter_content(chunk_size=8192): - yield chunk - finally: - self.close() - - def read(self) -> bytes: - try: - return self.response.content - finally: - self.close() - -class SSEResponseWrapper: - def __init__(self, session: aiohttp.ClientSession, response: aiohttp.ClientResponse): - self.session = session - self.response = response - self._closed = False - self._content_cache = None - - async def close(self): - if not self._closed: - self.response.close() - await self.session.close() - self._closed = True - - def __aiter__(self): - return self._read_chunks() - - async def _read_chunks(self): - try: - async for chunk in self.response.content.iter_chunked(8192): - yield chunk - finally: - await self.close() - - async def read(self) -> bytes: - if self._content_cache is not None: - return self._content_cache - - try: - content = await self.response.read() - self._content_cache = content - return content - finally: - await self.close() - -class _WriteableMc(type): - def __instancecheck__(self, instance): - if hasattr(instance, 'write'): - return True - - -class WRITABLE(metaclass=_WriteableMc): - pass - - -STREAM_CLASS = (READABLE, WRITABLE) - -class Stream: - - def __init__(self, data=None): - self.data = data if data is not None else b'' - self.position = 0 - - @staticmethod - def __read_part(f, size=1024): - while True: - part = f.read(size) - if part: - yield part - else: - return - - @staticmethod - def __to_string( - val: bytes, - ) -> str: - """ - Convert a bytes to string(utf8) - @return: the return string - """ - if isinstance(val, str): - return val - elif isinstance(val, bytes): - return val.decode('utf-8') - else: - return str(val) - - @staticmethod - def __parse_json( - val: str, - ) -> Any: - """ - Parse it by JSON format - @return: the parsed result - """ - try: - return json.loads(val) - except ValueError: - raise RuntimeError(f'Failed to parse the value as json format, Value: "{val}".') - - @staticmethod - def read_as_bytes(stream) -> bytes: - """ - Read data from a readable stream, and compose it to a bytes - @param stream: the readable stream - @return: the bytes result - """ - if isinstance(stream, SyncSSEResponseWrapper): - return stream.read() - elif isinstance(stream, READABLE): - b = b'' - for part in Stream.__read_part(stream, 1024): - b += part - return b - elif isinstance(stream, bytes): - return stream - else: - return bytes(stream, encoding='utf-8') - - @staticmethod - async def read_as_bytes_async(stream) -> bytes: - """ - Read data from a readable stream, and compose it to a bytes - @param stream: the readable stream - @return: the bytes result - """ - if isinstance(stream, bytes): - return stream - elif isinstance(stream, str): - return bytes(stream, encoding='utf-8') - else: - return await stream.read() - - @staticmethod - def read_as_json(stream) -> Any: - """ - Read data from a readable stream, and parse it by JSON format - @param stream: the readable stream - @return: the parsed result - """ - return Stream.__parse_json(Stream.read_as_string(stream)) - - @staticmethod - async def read_as_json_async(stream) -> Any: - """ - Read data from a readable stream, and parse it by JSON format - @param stream: the readable stream - @return: the parsed result - """ - return Stream.__parse_json( - await Stream.read_as_string_async(stream) - ) - - - @staticmethod - def read_as_string(stream) -> str: - """ - Read data from a readable stream, and compose it to a string - @param stream: the readable stream - @return: the string result - """ - buff = Stream.read_as_bytes(stream) - return Stream.__to_string(buff) - - @staticmethod - async def read_as_string_async(stream) -> str: - """ - Read data from a readable stream, and compose it to a string - @param stream: the readable stream - @return: the string result - """ - buff = await Stream.read_as_bytes_async(stream) - return Stream.__to_string(buff) - - @staticmethod - def read_as_sse(stream) -> Generator[Event, None, None]: - """ - Read events from SSE stream (synchronous version) - """ - if isinstance(stream, SyncSSEResponseWrapper): - for event in Stream._parse_sse_stream_sync(stream): - yield Event( - id=event.get('id'), - data=event.get('data'), - event=event.get('event'), - retry=event.get('retry')) - elif hasattr(stream, 'iter_content'): - # Read directly from the content stream of requests response object - for event in Stream._parse_sse_stream_from_response_sync(stream): - yield Event( - id=event.get('id'), - data=event.get('data'), - event=event.get('event'), - retry=event.get('retry')) - else: - for event in Stream._parse_sse_stream_sync(stream): - yield Event( - id=event.get('id'), - data=event.get('data'), - event=event.get('event'), - retry=event.get('retry')) - - @staticmethod - async def read_as_sse_async(stream) -> AsyncGenerator[Event, None]: - """ - Read events from SSE stream - """ - if isinstance(stream, SSEResponseWrapper): - async for event in Stream._parse_sse_stream(stream): - yield Event( - id = event.get('id'), - data = event.get('data'), - event= event.get('event'), - retry = event.get('retry')) - elif hasattr(stream, 'content'): - # Read directly from the content stream of aiohttp response object - async for event in Stream._parse_sse_stream_from_response(stream): - yield Event( - id = event.get('id'), - data = event.get('data'), - event= event.get('event'), - retry = event.get('retry')) - else: - async for event in Stream._parse_sse_stream(stream): - yield Event( - id = event.get('id'), - data = event.get('data'), - event= event.get('event'), - retry = event.get('retry')) - - def read(self, size=None): - if size is None: - return self.data[self.position:] - - start = self.position - end = min(start + size, len(self.data)) - self.position = end - return self.data[start:end] - - def write(self, data): - if isinstance(data, (bytes, str)): - self.data = data - else: - raise TypeError("Data should be bytes or string.") - - def pipe(self, output_stream, buffer_size=1024): - if not isinstance(output_stream, Stream): - raise TypeError("Output stream should be an instance of Stream.") - - while True: - chunk = self.read(buffer_size) - if not chunk: - break - output_stream.write(chunk) - - @staticmethod - def to_readable( - value: Any, - ) -> BinaryIO: - """ - Assert a value, if it is a readable, return it, otherwise throws - @return: the readable value - """ - if isinstance(value, str): - value = value.encode('utf-8') - - if isinstance(value, bytes): - value = BytesIO(value) - elif not isinstance(value, READABLE): - raise ValueError(f'The value is not a readable') - return value - - @staticmethod - def to_writeable( - value: Any, - ) -> WRITABLE: - """ - Assert a value, if it is a writeable, return it, otherwise throws - @return: the writeable value - """ - if isinstance(value, str): - value = StringIO(value) - - elif isinstance(value, bytes): - value = BytesIO(value) - elif not isinstance(value, WRITABLE): - raise ValueError(f'The value is not a writeable') - return value - - @staticmethod - async def _parse_sse_stream(wrapper: SSEResponseWrapper) -> AsyncGenerator[Dict[str, Any], None]: - """ - Analyze SSE stream data - """ - buffer = "" - current_event = Event() - - MAX_BUFFER_SIZE = 1024 * 1024 # 1MB - dec = codecs.getincrementaldecoder('utf-8')() - - async for chunk in wrapper: - try: - chunk_str = dec.decode(chunk) - except UnicodeDecodeError: - chunk_str = chunk.decode('utf-8', errors='replace') - - if len(buffer) + len(chunk_str) > MAX_BUFFER_SIZE: - import logging - logging.warning("SSE stream data too large, skipping chunk") - continue - - buffer += chunk_str - - while '\n' in buffer: - line, buffer = buffer.split('\n', 1) - line = line.rstrip('\r') # Remove \r - - if not line.strip(): - if current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } - current_event = Event() - continue - - if line.startswith(':'): - continue - - if ':' in line: - match = sse_line_pattern.match(line) - if match: - name = match.group('name').strip() - value = match.group('value').strip() - - if name == 'event': - current_event.event = value - elif name == 'id': - current_event.id = value - elif name == 'data': - if current_event.data is None: - current_event.data = value - else: - current_event.data += '\n' + value - elif name == 'retry': - try: - current_event.retry = int(value) - except ValueError: - pass - else: - if current_event.data is None: - current_event.data = line - else: - current_event.data += '\n' + line - - if buffer.strip() and current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } - - @staticmethod - async def _parse_sse_stream_from_response(response) -> AsyncGenerator[Dict[str, Any], None]: - buffer = "" - current_event = Event() - - async for chunk in response.content.iter_chunked(8192): - try: - chunk_str = chunk.decode('utf-8') - except UnicodeDecodeError: - continue - - buffer += chunk_str - - while '\n' in buffer: - line, buffer = buffer.split('\n', 1) - line = line.rstrip('\r') - - if not line.strip(): - if current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } - current_event = Event() - continue - - if line.startswith(':'): - continue - - if ':' in line: - match = sse_line_pattern.match(line) - if match: - name = match.group('name').strip() - value = match.group('value').strip() - - if name == 'event': - current_event.event = value - elif name == 'id': - current_event.id = value - elif name == 'data': - if current_event.data is None: - current_event.data = value - else: - current_event.data += '\n' + value - elif name == 'retry': - try: - current_event.retry = int(value) - except ValueError: - pass - else: - if current_event.data is None: - current_event.data = line - else: - current_event.data += '\n' + line - - if buffer.strip() and current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } - - @staticmethod - def _parse_sse_stream_sync(wrapper: SyncSSEResponseWrapper) -> Generator[Dict[str, Any], None, None]: - """ - Analyze SSE stream data (synchronous version) - """ - buffer = "" - current_event = Event() - - for chunk in wrapper: - # Decoding byte data into strings - try: - chunk_str = chunk.decode('utf-8') - except UnicodeDecodeError: - # If decoding fails, skip this chunk - continue - - buffer += chunk_str - - # Split processing by row - while '\n' in buffer: - line, buffer = buffer.split('\n', 1) - line = line.rstrip('\r') # Remove \r - - if not line.strip(): - if current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } - current_event = Event() - continue - - # Skip comment lines - if line.startswith(':'): - continue - - if ':' in line: - match = sse_line_pattern.match(line) - if match: - name = match.group('name').strip() - value = match.group('value').strip() - - if name == 'event': - current_event.event = value - elif name == 'id': - current_event.id = value - elif name == 'data': - if current_event.data is None: - current_event.data = value - else: - current_event.data += '\n' + value - elif name == 'retry': - try: - current_event.retry = int(value) - except ValueError: - pass - else: - if current_event.data is None: - current_event.data = line - else: - current_event.data += '\n' + line - - if buffer.strip() and current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } - - @staticmethod - def _parse_sse_stream_from_response_sync(response) -> Generator[Dict[str, Any], None, None]: - """ - Parse SSE stream from requests response object (synchronous version) - """ - buffer = "" - current_event = Event() - - for chunk in response.iter_content(chunk_size=8192): - try: - chunk_str = chunk.decode('utf-8') - except UnicodeDecodeError: - continue - - buffer += chunk_str - - while '\n' in buffer: - line, buffer = buffer.split('\n', 1) - line = line.rstrip('\r') - - if not line.strip(): - if current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } - current_event = Event() - continue - - if line.startswith(':'): - continue - - if ':' in line: - match = sse_line_pattern.match(line) - if match: - name = match.group('name').strip() - value = match.group('value').strip() - - if name == 'event': - current_event.event = value - elif name == 'id': - current_event.id = value - elif name == 'data': - if current_event.data is None: - current_event.data = value - else: - current_event.data += '\n' + value - elif name == 'retry': - try: - current_event.retry = int(value) - except ValueError: - pass - else: - if current_event.data is None: - current_event.data = line - else: - current_event.data += '\n' + line - - if buffer.strip() and current_event.data is not None: - yield { - 'id': current_event.id, - 'event': current_event.event or 'message', - 'data': current_event.data, - 'retry': current_event.retry - } \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/utils/validation.py b/venv/Lib/site-packages/darabonba/utils/validation.py deleted file mode 100644 index b2d342f..0000000 --- a/venv/Lib/site-packages/darabonba/utils/validation.py +++ /dev/null @@ -1,20 +0,0 @@ -import re - -from darabonba.exceptions import ValidateException - - -def assert_integer_positive(integer, name): - if isinstance(integer, int) and integer > 0: - return - raise ValidateException("{0} should be a positive integer.".format(name)) - - -def validate_pattern(prop, prop_name, pattern): - match_obj = re.search(pattern, prop, re.M | re.I) - if not match_obj: - raise ValidateException('The parameter %s not match with %s' % (prop_name, pattern)) - - -def is_null(value, name): - if value is None: - raise ValidateException("The parameter {0} should not be null.".format(name)) \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba/utils/xml.py b/venv/Lib/site-packages/darabonba/utils/xml.py deleted file mode 100644 index 6ae3370..0000000 --- a/venv/Lib/site-packages/darabonba/utils/xml.py +++ /dev/null @@ -1,99 +0,0 @@ -from xml.etree import ElementTree -from darabonba.model import DaraModel -from collections import defaultdict - -class XML: - - _LIST_TYPE = (list, tuple, set) - - @staticmethod - def __get_xml_factory(elem, val, parent_element=None): - if val is None: - return - - if isinstance(val, dict): - XML.__get_xml_by_dict(elem, val) - elif isinstance(val, XML._LIST_TYPE): - if parent_element is None: - raise RuntimeError("Missing root tag") - XML.__get_xml_by_list(elem, val, parent_element) - else: - elem.text = str(val) - - @staticmethod - def __get_xml_by_dict(elem, val): - for k in val: - sub_elem = ElementTree.SubElement(elem, k) - XML.__get_xml_factory(sub_elem, val[k], elem) - - @staticmethod - def __get_xml_by_list(elem, val, parent_element): - i = 0 - tag_name = elem.tag - if val.__len__() > 0: - XML.__get_xml_factory(elem, val[0], parent_element) - - for item in val: - if i > 0: - sub_elem = ElementTree.SubElement(parent_element, tag_name) - XML.__get_xml_factory(sub_elem, item, parent_element) - i = i + 1 - - @staticmethod - def _parse_xml(t): - d = {t.tag: {} if t.attrib else None} - children = list(t) - if children: - dd = defaultdict(list) - for dc in map(XML._parse_xml, children): - for k, v in dc.items(): - dd[k].append(v) - d = {t.tag: {k: v[0] if len(v) == 1 else v for k, v in dd.items()}} - - if t.attrib: - d[t.tag].update(('@' + k, v) for k, v in t.attrib.items()) - - if t.text: - text = t.text.strip() - if children or t.attrib: - if text: - d[t.tag]['#text'] = text - else: - d[t.tag] = text - return d - - @staticmethod - def parse_xml(body, response=None): - """ - Parse body into the response, and put the resposne into a object - @param body: source content - @param response: target model - @return the final object - """ - return XML._parse_xml(ElementTree.fromstring(body)) - - @staticmethod - def to_xml(body): - """ - Parse body as a xml string - @param body: source body - @return the xml string - """ - if body is None: - return - - dic = {} - if isinstance(body, DaraModel): - dic = body.to_map() - elif isinstance(body, dict): - dic = body - - if dic.__len__() == 0: - return "" - else: - result_xml = '' - for k in dic: - elem = ElementTree.Element(k) - XML.__get_xml_factory(elem, dic[k]) - result_xml += bytes.decode(ElementTree.tostring(elem), encoding="utf-8") - return result_xml \ No newline at end of file diff --git a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/INSTALLER b/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/METADATA b/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/METADATA deleted file mode 100644 index fcc6283..0000000 --- a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/METADATA +++ /dev/null @@ -1,69 +0,0 @@ -Metadata-Version: 2.1 -Name: darabonba-core -Version: 1.0.5 -Summary: The darabonba module of alibabaCloud Python SDK. -Home-page: https://github.com/aliyun/tea-python -Author: Alibaba Cloud -Author-email: alibaba-cloud-sdk-dev-team@list.alibaba-inc.com -License: Apache License 2.0 -Keywords: alibabacloud,sdk,darabonba -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development -Requires-Python: >=3.7 -Requires-Dist: aiohttp (<4.0.0,>=3.7.0) -Requires-Dist: alibabacloud-tea -Requires-Dist: requests (<3.0.0,>=2.21.0) - -English | [简体中文](README-CN.md) - -![Alibaba Cloud](https://aliyunsdk-pages.alicdn.com/icons/AlibabaCloud.svg) - -## Alibaba Cloud Tea for Python - -[![PyPI version](https://badge.fury.io/py/darabonba-core.svg)](https://badge.fury.io/py/darabonba-core) -[![Python Test](https://github.com/aliyun/tea-python/actions/workflows/ci.yml/badge.svg)](https://github.com/aliyun/tea-python/actions/workflows/ci.yml) -[![codecov](https://codecov.io/gh/aliyun/tea-python/graph/badge.svg?token=FN19OMRTVY)](https://codecov.io/gh/aliyun/tea-python) -[![python](https://img.shields.io/pypi/pyversions/darabonba-core.svg)](https://img.shields.io/pypi/pyversions/darabonba-core.svg) - -## Important Updates - -- Starting from version 1.0.0, the package `darabonba-core` only supports Python 3.7 and above. - -## Installation - -- **Install with pip** - -Python SDK uses a common package management tool named `pip`. If pip is not installed, see the [pip user guide](https://pip.pypa.io/en/stable/installing/ "pip User Guide") to install pip. - -```bash -# Install the darabonba-core -pip install darabonba-core -``` - -## Issues - -[Opening an Issue](https://github.com/aliyun/tea-python/issues/new), Issues not conforming to the guidelines may be closed immediately. - -## Changelog - -Detailed changes for each release are documented in the [release notes](./ChangeLog.md). - -## References - -- [Latest Release](https://github.com/aliyun/tea-python/tree/master/python) - -## License - -[Apache-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Copyright (c) 2009-present, Alibaba Cloud All rights reserved. diff --git a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/RECORD b/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/RECORD deleted file mode 100644 index 32aa197..0000000 --- a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/RECORD +++ /dev/null @@ -1,52 +0,0 @@ -darabonba/__init__.py,sha256=ZR1VA9cGs0vIK6cWK4YKLfBTnmUCAcDaaP9ARPPYxEs,21 -darabonba/__pycache__/__init__.cpython-312.pyc,, -darabonba/__pycache__/core.cpython-312.pyc,, -darabonba/__pycache__/date.cpython-312.pyc,, -darabonba/__pycache__/decorators.cpython-312.pyc,, -darabonba/__pycache__/event.cpython-312.pyc,, -darabonba/__pycache__/exceptions.cpython-312.pyc,, -darabonba/__pycache__/file.cpython-312.pyc,, -darabonba/__pycache__/model.cpython-312.pyc,, -darabonba/__pycache__/number.cpython-312.pyc,, -darabonba/__pycache__/request.cpython-312.pyc,, -darabonba/__pycache__/response.cpython-312.pyc,, -darabonba/__pycache__/runtime.cpython-312.pyc,, -darabonba/__pycache__/url.cpython-312.pyc,, -darabonba/core.py,sha256=TUVEy1Dz6Q5jjD60nn4C7UBIcylZbdGj9R3jBKHOMsY,24637 -darabonba/date.py,sha256=XYmT7j-6V_Z7gxqyORSLBL67p_gaZrle2Uyw-TggXcU,5166 -darabonba/decorators.py,sha256=BgTYrPBqIqfVMxtU2eLwFfM09ThPvPHd7FBeKNgavfs,2313 -darabonba/event.py,sha256=gbILkZWiTIxtcX7e7a948ODV3Lw8ByzP9ivMEx_yB2g,1366 -darabonba/exceptions.py,sha256=GFgyAZYVMpSinwgnAf703AWyWDefdMkSK8bcqm9tMzg,2770 -darabonba/file.py,sha256=_uT8zoBl0DaWt2DA21zKDagGsiJPoM8Pe6gP5Fpw3Mw,1228 -darabonba/model.py,sha256=vbb5JAFgGvL1uvXNq7jaEK6fySxVgQHcbZBYZ8lYT40,1593 -darabonba/number.py,sha256=6F--LBdBHycEpjnqKnXZDi1w2WOGGLFK54kHG-GLb7s,657 -darabonba/policy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -darabonba/policy/__pycache__/__init__.cpython-312.pyc,, -darabonba/policy/__pycache__/retry.cpython-312.pyc,, -darabonba/policy/retry.py,sha256=3yhsBcvnMc_18sYyP7UAqxxxge1YJ33qDds_JMqDjFU,8110 -darabonba/request.py,sha256=eWu7nthagicELTFrSQAPAGJUzZTj1mZkZ_1HVxcjitc,850 -darabonba/response.py,sha256=ebxS3uM012lIu7UlsPJ6CGHvRbwoJutsHWXLCFEk5V8,431 -darabonba/runtime.py,sha256=9cZoIP8Cq77KaAqCtRHOi7rBttCaNfA9_i3YowwNsR8,7372 -darabonba/url.py,sha256=qE7JNtH-SJbBYc7ancUGqOstiJqdGnSJbTxHdd19y5o,2451 -darabonba/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -darabonba/utils/__pycache__/__init__.cpython-312.pyc,, -darabonba/utils/__pycache__/bytes.cpython-312.pyc,, -darabonba/utils/__pycache__/form.cpython-312.pyc,, -darabonba/utils/__pycache__/logger.cpython-312.pyc,, -darabonba/utils/__pycache__/map.cpython-312.pyc,, -darabonba/utils/__pycache__/stream.cpython-312.pyc,, -darabonba/utils/__pycache__/validation.cpython-312.pyc,, -darabonba/utils/__pycache__/xml.cpython-312.pyc,, -darabonba/utils/bytes.py,sha256=2qp7nDf8UI-x0W-cYbER6dYX-N1mHgoXZZWuk9NWuB0,1086 -darabonba/utils/form.py,sha256=QQ42xterB91Pp9OmEWRy76OfY6TWyHgdTw90IeotgO4,6656 -darabonba/utils/logger.py,sha256=yiZ13PyvaDXBATbiax3gSrM13gmym4hbVt4u4owUT1U,1159 -darabonba/utils/map.py,sha256=pMtwaQqvIUWUbU9ZyeGeJUeJsioByUKRB9sX7kiMw9I,389 -darabonba/utils/stream.py,sha256=k2Ku0b-jYgTM9_uAo6lKEkhEm4bHBs5hTIeYiRjqa0k,21980 -darabonba/utils/validation.py,sha256=VSf6LEJwYxDduuc4ldX_hN6AHw-qRlltqLqZHpqqNJ4,604 -darabonba/utils/xml.py,sha256=C52bYFKyI8Z9f8AvwX4Z6oReQSQdi0TthO7IrRsQMFE,2981 -darabonba_core-1.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -darabonba_core-1.0.5.dist-info/METADATA,sha256=5mGgaKCGJrsKZmSfIxIhkjK95cBgJdZdMgQLhR79hck,2582 -darabonba_core-1.0.5.dist-info/RECORD,, -darabonba_core-1.0.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -darabonba_core-1.0.5.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 -darabonba_core-1.0.5.dist-info/top_level.txt,sha256=-8IXmruGn3drNiPfZzTCkCtMzX6bGr10RkqwTLsRMzY,10 diff --git a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/REQUESTED b/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/WHEEL b/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/WHEEL deleted file mode 100644 index 57e3d84..0000000 --- a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/top_level.txt b/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/top_level.txt deleted file mode 100644 index d173572..0000000 --- a/venv/Lib/site-packages/darabonba_core-1.0.5.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -darabonba diff --git a/venv/Lib/site-packages/dateutil/__init__.py b/venv/Lib/site-packages/dateutil/__init__.py deleted file mode 100644 index a2c19c0..0000000 --- a/venv/Lib/site-packages/dateutil/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -import sys - -try: - from ._version import version as __version__ -except ImportError: - __version__ = 'unknown' - -__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', - 'utils', 'zoneinfo'] - -def __getattr__(name): - import importlib - - if name in __all__: - return importlib.import_module("." + name, __name__) - raise AttributeError( - "module {!r} has not attribute {!r}".format(__name__, name) - ) - - -def __dir__(): - # __dir__ should include all the lazy-importable modules as well. - return [x for x in globals() if x not in sys.modules] + __all__ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index becffbd..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-312.pyc deleted file mode 100644 index 925290f..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-312.pyc deleted file mode 100644 index b549c13..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-312.pyc deleted file mode 100644 index 1dbace7..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc deleted file mode 100644 index 674db30..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc deleted file mode 100644 index e1a3054..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc deleted file mode 100644 index 97e2204..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/dateutil/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 3721d09..0000000 Binary files a/venv/Lib/site-packages/dateutil/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/_common.py b/venv/Lib/site-packages/dateutil/_common.py deleted file mode 100644 index 4eb2659..0000000 --- a/venv/Lib/site-packages/dateutil/_common.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Common code used in multiple modules. -""" - - -class weekday(object): - __slots__ = ["weekday", "n"] - - def __init__(self, weekday, n=None): - self.weekday = weekday - self.n = n - - def __call__(self, n): - if n == self.n: - return self - else: - return self.__class__(self.weekday, n) - - def __eq__(self, other): - try: - if self.weekday != other.weekday or self.n != other.n: - return False - except AttributeError: - return False - return True - - def __hash__(self): - return hash(( - self.weekday, - self.n, - )) - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] - if not self.n: - return s - else: - return "%s(%+d)" % (s, self.n) - -# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/_version.py b/venv/Lib/site-packages/dateutil/_version.py deleted file mode 100644 index ddda980..0000000 --- a/venv/Lib/site-packages/dateutil/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# file generated by setuptools_scm -# don't change, don't track in version control -__version__ = version = '2.9.0.post0' -__version_tuple__ = version_tuple = (2, 9, 0) diff --git a/venv/Lib/site-packages/dateutil/easter.py b/venv/Lib/site-packages/dateutil/easter.py deleted file mode 100644 index f74d1f7..0000000 --- a/venv/Lib/site-packages/dateutil/easter.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a generic Easter computing method for any given year, using -Western, Orthodox or Julian algorithms. -""" - -import datetime - -__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] - -EASTER_JULIAN = 1 -EASTER_ORTHODOX = 2 -EASTER_WESTERN = 3 - - -def easter(year, method=EASTER_WESTERN): - """ - This method was ported from the work done by GM Arts, - on top of the algorithm by Claus Tondering, which was - based in part on the algorithm of Ouding (1940), as - quoted in "Explanatory Supplement to the Astronomical - Almanac", P. Kenneth Seidelmann, editor. - - This algorithm implements three different Easter - calculation methods: - - 1. Original calculation in Julian calendar, valid in - dates after 326 AD - 2. Original method, with date converted to Gregorian - calendar, valid in years 1583 to 4099 - 3. Revised method, in Gregorian calendar, valid in - years 1583 to 4099 as well - - These methods are represented by the constants: - - * ``EASTER_JULIAN = 1`` - * ``EASTER_ORTHODOX = 2`` - * ``EASTER_WESTERN = 3`` - - The default method is method 3. - - More about the algorithm may be found at: - - `GM Arts: Easter Algorithms `_ - - and - - `The Calendar FAQ: Easter `_ - - """ - - if not (1 <= method <= 3): - raise ValueError("invalid method") - - # g - Golden year - 1 - # c - Century - # h - (23 - Epact) mod 30 - # i - Number of days from March 21 to Paschal Full Moon - # j - Weekday for PFM (0=Sunday, etc) - # p - Number of days from March 21 to Sunday on or before PFM - # (-6 to 28 methods 1 & 3, to 56 for method 2) - # e - Extra days to add for method 2 (converting Julian - # date to Gregorian date) - - y = year - g = y % 19 - e = 0 - if method < 3: - # Old method - i = (19*g + 15) % 30 - j = (y + y//4 + i) % 7 - if method == 2: - # Extra dates to convert Julian to Gregorian date - e = 10 - if y > 1600: - e = e + y//100 - 16 - (y//100 - 16)//4 - else: - # New method - c = y//100 - h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 - i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) - j = (y + y//4 + i + 2 - c + c//4) % 7 - - # p can be from -6 to 56 corresponding to dates 22 March to 23 May - # (later dates apply to method 2, although 23 May never actually occurs) - p = i - j + e - d = 1 + (p + 27 + (p + 6)//40) % 31 - m = 3 + (p + 26)//30 - return datetime.date(int(y), int(m), int(d)) diff --git a/venv/Lib/site-packages/dateutil/parser/__init__.py b/venv/Lib/site-packages/dateutil/parser/__init__.py deleted file mode 100644 index d174b0e..0000000 --- a/venv/Lib/site-packages/dateutil/parser/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -from ._parser import parse, parser, parserinfo, ParserError -from ._parser import DEFAULTPARSER, DEFAULTTZPARSER -from ._parser import UnknownTimezoneWarning - -from ._parser import __doc__ - -from .isoparser import isoparser, isoparse - -__all__ = ['parse', 'parser', 'parserinfo', - 'isoparse', 'isoparser', - 'ParserError', - 'UnknownTimezoneWarning'] - - -### -# Deprecate portions of the private interface so that downstream code that -# is improperly relying on it is given *some* notice. - - -def __deprecated_private_func(f): - from functools import wraps - import warnings - - msg = ('{name} is a private function and may break without warning, ' - 'it will be moved and or renamed in future versions.') - msg = msg.format(name=f.__name__) - - @wraps(f) - def deprecated_func(*args, **kwargs): - warnings.warn(msg, DeprecationWarning) - return f(*args, **kwargs) - - return deprecated_func - -def __deprecate_private_class(c): - import warnings - - msg = ('{name} is a private class and may break without warning, ' - 'it will be moved and or renamed in future versions.') - msg = msg.format(name=c.__name__) - - class private_class(c): - __doc__ = c.__doc__ - - def __init__(self, *args, **kwargs): - warnings.warn(msg, DeprecationWarning) - super(private_class, self).__init__(*args, **kwargs) - - private_class.__name__ = c.__name__ - - return private_class - - -from ._parser import _timelex, _resultbase -from ._parser import _tzparser, _parsetz - -_timelex = __deprecate_private_class(_timelex) -_tzparser = __deprecate_private_class(_tzparser) -_resultbase = __deprecate_private_class(_resultbase) -_parsetz = __deprecated_private_func(_parsetz) diff --git a/venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index f1cf0df..0000000 Binary files a/venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc b/venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc deleted file mode 100644 index da8b9a8..0000000 Binary files a/venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc b/venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc deleted file mode 100644 index b08df8e..0000000 Binary files a/venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/parser/_parser.py b/venv/Lib/site-packages/dateutil/parser/_parser.py deleted file mode 100644 index 37d1663..0000000 --- a/venv/Lib/site-packages/dateutil/parser/_parser.py +++ /dev/null @@ -1,1613 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a generic date/time string parser which is able to parse -most known formats to represent a date and/or time. - -This module attempts to be forgiving with regards to unlikely input formats, -returning a datetime object even for dates which are ambiguous. If an element -of a date/time stamp is omitted, the following rules are applied: - -- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour - on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is - specified. -- If a time zone is omitted, a timezone-naive datetime is returned. - -If any other elements are missing, they are taken from the -:class:`datetime.datetime` object passed to the parameter ``default``. If this -results in a day number exceeding the valid number of days per month, the -value falls back to the end of the month. - -Additional resources about date/time string formats can be found below: - -- `A summary of the international standard date and time notation - `_ -- `W3C Date and Time Formats `_ -- `Time Formats (Planetary Rings Node) `_ -- `CPAN ParseDate module - `_ -- `Java SimpleDateFormat Class - `_ -""" -from __future__ import unicode_literals - -import datetime -import re -import string -import time -import warnings - -from calendar import monthrange -from io import StringIO - -import six -from six import integer_types, text_type - -from decimal import Decimal - -from warnings import warn - -from .. import relativedelta -from .. import tz - -__all__ = ["parse", "parserinfo", "ParserError"] - - -# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth -# making public and/or figuring out if there is something we can -# take off their plate. -class _timelex(object): - # Fractional seconds are sometimes split by a comma - _split_decimal = re.compile("([.,])") - - def __init__(self, instream): - if isinstance(instream, (bytes, bytearray)): - instream = instream.decode() - - if isinstance(instream, text_type): - instream = StringIO(instream) - elif getattr(instream, 'read', None) is None: - raise TypeError('Parser must be a string or character stream, not ' - '{itype}'.format(itype=instream.__class__.__name__)) - - self.instream = instream - self.charstack = [] - self.tokenstack = [] - self.eof = False - - def get_token(self): - """ - This function breaks the time string into lexical units (tokens), which - can be parsed by the parser. Lexical units are demarcated by changes in - the character set, so any continuous string of letters is considered - one unit, any continuous string of numbers is considered one unit. - - The main complication arises from the fact that dots ('.') can be used - both as separators (e.g. "Sep.20.2009") or decimal points (e.g. - "4:30:21.447"). As such, it is necessary to read the full context of - any dot-separated strings before breaking it into tokens; as such, this - function maintains a "token stack", for when the ambiguous context - demands that multiple tokens be parsed at once. - """ - if self.tokenstack: - return self.tokenstack.pop(0) - - seenletters = False - token = None - state = None - - while not self.eof: - # We only realize that we've reached the end of a token when we - # find a character that's not part of the current token - since - # that character may be part of the next token, it's stored in the - # charstack. - if self.charstack: - nextchar = self.charstack.pop(0) - else: - nextchar = self.instream.read(1) - while nextchar == '\x00': - nextchar = self.instream.read(1) - - if not nextchar: - self.eof = True - break - elif not state: - # First character of the token - determines if we're starting - # to parse a word, a number or something else. - token = nextchar - if self.isword(nextchar): - state = 'a' - elif self.isnum(nextchar): - state = '0' - elif self.isspace(nextchar): - token = ' ' - break # emit token - else: - break # emit token - elif state == 'a': - # If we've already started reading a word, we keep reading - # letters until we find something that's not part of a word. - seenletters = True - if self.isword(nextchar): - token += nextchar - elif nextchar == '.': - token += nextchar - state = 'a.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == '0': - # If we've already started reading a number, we keep reading - # numbers until we find something that doesn't fit. - if self.isnum(nextchar): - token += nextchar - elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): - token += nextchar - state = '0.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == 'a.': - # If we've seen some letters and a dot separator, continue - # parsing, and the tokens will be broken up later. - seenletters = True - if nextchar == '.' or self.isword(nextchar): - token += nextchar - elif self.isnum(nextchar) and token[-1] == '.': - token += nextchar - state = '0.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == '0.': - # If we've seen at least one dot separator, keep going, we'll - # break up the tokens later. - if nextchar == '.' or self.isnum(nextchar): - token += nextchar - elif self.isword(nextchar) and token[-1] == '.': - token += nextchar - state = 'a.' - else: - self.charstack.append(nextchar) - break # emit token - - if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or - token[-1] in '.,')): - l = self._split_decimal.split(token) - token = l[0] - for tok in l[1:]: - if tok: - self.tokenstack.append(tok) - - if state == '0.' and token.count('.') == 0: - token = token.replace(',', '.') - - return token - - def __iter__(self): - return self - - def __next__(self): - token = self.get_token() - if token is None: - raise StopIteration - - return token - - def next(self): - return self.__next__() # Python 2.x support - - @classmethod - def split(cls, s): - return list(cls(s)) - - @classmethod - def isword(cls, nextchar): - """ Whether or not the next character is part of a word """ - return nextchar.isalpha() - - @classmethod - def isnum(cls, nextchar): - """ Whether the next character is part of a number """ - return nextchar.isdigit() - - @classmethod - def isspace(cls, nextchar): - """ Whether the next character is whitespace """ - return nextchar.isspace() - - -class _resultbase(object): - - def __init__(self): - for attr in self.__slots__: - setattr(self, attr, None) - - def _repr(self, classname): - l = [] - for attr in self.__slots__: - value = getattr(self, attr) - if value is not None: - l.append("%s=%s" % (attr, repr(value))) - return "%s(%s)" % (classname, ", ".join(l)) - - def __len__(self): - return (sum(getattr(self, attr) is not None - for attr in self.__slots__)) - - def __repr__(self): - return self._repr(self.__class__.__name__) - - -class parserinfo(object): - """ - Class which handles what inputs are accepted. Subclass this to customize - the language and acceptable values for each parameter. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM - and YMD. Default is ``False``. - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken - to be the year, otherwise the last number is taken to be the year. - Default is ``False``. - """ - - # m from a.m/p.m, t from ISO T separator - JUMP = [" ", ".", ",", ";", "-", "/", "'", - "at", "on", "and", "ad", "m", "t", "of", - "st", "nd", "rd", "th"] - - WEEKDAYS = [("Mon", "Monday"), - ("Tue", "Tuesday"), # TODO: "Tues" - ("Wed", "Wednesday"), - ("Thu", "Thursday"), # TODO: "Thurs" - ("Fri", "Friday"), - ("Sat", "Saturday"), - ("Sun", "Sunday")] - MONTHS = [("Jan", "January"), - ("Feb", "February"), # TODO: "Febr" - ("Mar", "March"), - ("Apr", "April"), - ("May", "May"), - ("Jun", "June"), - ("Jul", "July"), - ("Aug", "August"), - ("Sep", "Sept", "September"), - ("Oct", "October"), - ("Nov", "November"), - ("Dec", "December")] - HMS = [("h", "hour", "hours"), - ("m", "minute", "minutes"), - ("s", "second", "seconds")] - AMPM = [("am", "a"), - ("pm", "p")] - UTCZONE = ["UTC", "GMT", "Z", "z"] - PERTAIN = ["of"] - TZOFFSET = {} - # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", - # "Anno Domini", "Year of Our Lord"] - - def __init__(self, dayfirst=False, yearfirst=False): - self._jump = self._convert(self.JUMP) - self._weekdays = self._convert(self.WEEKDAYS) - self._months = self._convert(self.MONTHS) - self._hms = self._convert(self.HMS) - self._ampm = self._convert(self.AMPM) - self._utczone = self._convert(self.UTCZONE) - self._pertain = self._convert(self.PERTAIN) - - self.dayfirst = dayfirst - self.yearfirst = yearfirst - - self._year = time.localtime().tm_year - self._century = self._year // 100 * 100 - - def _convert(self, lst): - dct = {} - for i, v in enumerate(lst): - if isinstance(v, tuple): - for v in v: - dct[v.lower()] = i - else: - dct[v.lower()] = i - return dct - - def jump(self, name): - return name.lower() in self._jump - - def weekday(self, name): - try: - return self._weekdays[name.lower()] - except KeyError: - pass - return None - - def month(self, name): - try: - return self._months[name.lower()] + 1 - except KeyError: - pass - return None - - def hms(self, name): - try: - return self._hms[name.lower()] - except KeyError: - return None - - def ampm(self, name): - try: - return self._ampm[name.lower()] - except KeyError: - return None - - def pertain(self, name): - return name.lower() in self._pertain - - def utczone(self, name): - return name.lower() in self._utczone - - def tzoffset(self, name): - if name in self._utczone: - return 0 - - return self.TZOFFSET.get(name) - - def convertyear(self, year, century_specified=False): - """ - Converts two-digit years to year within [-50, 49] - range of self._year (current local time) - """ - - # Function contract is that the year is always positive - assert year >= 0 - - if year < 100 and not century_specified: - # assume current century to start - year += self._century - - if year >= self._year + 50: # if too far in future - year -= 100 - elif year < self._year - 50: # if too far in past - year += 100 - - return year - - def validate(self, res): - # move to info - if res.year is not None: - res.year = self.convertyear(res.year, res.century_specified) - - if ((res.tzoffset == 0 and not res.tzname) or - (res.tzname == 'Z' or res.tzname == 'z')): - res.tzname = "UTC" - res.tzoffset = 0 - elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): - res.tzoffset = 0 - return True - - -class _ymd(list): - def __init__(self, *args, **kwargs): - super(self.__class__, self).__init__(*args, **kwargs) - self.century_specified = False - self.dstridx = None - self.mstridx = None - self.ystridx = None - - @property - def has_year(self): - return self.ystridx is not None - - @property - def has_month(self): - return self.mstridx is not None - - @property - def has_day(self): - return self.dstridx is not None - - def could_be_day(self, value): - if self.has_day: - return False - elif not self.has_month: - return 1 <= value <= 31 - elif not self.has_year: - # Be permissive, assume leap year - month = self[self.mstridx] - return 1 <= value <= monthrange(2000, month)[1] - else: - month = self[self.mstridx] - year = self[self.ystridx] - return 1 <= value <= monthrange(year, month)[1] - - def append(self, val, label=None): - if hasattr(val, '__len__'): - if val.isdigit() and len(val) > 2: - self.century_specified = True - if label not in [None, 'Y']: # pragma: no cover - raise ValueError(label) - label = 'Y' - elif val > 100: - self.century_specified = True - if label not in [None, 'Y']: # pragma: no cover - raise ValueError(label) - label = 'Y' - - super(self.__class__, self).append(int(val)) - - if label == 'M': - if self.has_month: - raise ValueError('Month is already set') - self.mstridx = len(self) - 1 - elif label == 'D': - if self.has_day: - raise ValueError('Day is already set') - self.dstridx = len(self) - 1 - elif label == 'Y': - if self.has_year: - raise ValueError('Year is already set') - self.ystridx = len(self) - 1 - - def _resolve_from_stridxs(self, strids): - """ - Try to resolve the identities of year/month/day elements using - ystridx, mstridx, and dstridx, if enough of these are specified. - """ - if len(self) == 3 and len(strids) == 2: - # we can back out the remaining stridx value - missing = [x for x in range(3) if x not in strids.values()] - key = [x for x in ['y', 'm', 'd'] if x not in strids] - assert len(missing) == len(key) == 1 - key = key[0] - val = missing[0] - strids[key] = val - - assert len(self) == len(strids) # otherwise this should not be called - out = {key: self[strids[key]] for key in strids} - return (out.get('y'), out.get('m'), out.get('d')) - - def resolve_ymd(self, yearfirst, dayfirst): - len_ymd = len(self) - year, month, day = (None, None, None) - - strids = (('y', self.ystridx), - ('m', self.mstridx), - ('d', self.dstridx)) - - strids = {key: val for key, val in strids if val is not None} - if (len(self) == len(strids) > 0 or - (len(self) == 3 and len(strids) == 2)): - return self._resolve_from_stridxs(strids) - - mstridx = self.mstridx - - if len_ymd > 3: - raise ValueError("More than three YMD values") - elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): - # One member, or two members with a month string - if mstridx is not None: - month = self[mstridx] - # since mstridx is 0 or 1, self[mstridx-1] always - # looks up the other element - other = self[mstridx - 1] - else: - other = self[0] - - if len_ymd > 1 or mstridx is None: - if other > 31: - year = other - else: - day = other - - elif len_ymd == 2: - # Two members with numbers - if self[0] > 31: - # 99-01 - year, month = self - elif self[1] > 31: - # 01-99 - month, year = self - elif dayfirst and self[1] <= 12: - # 13-01 - day, month = self - else: - # 01-13 - month, day = self - - elif len_ymd == 3: - # Three members - if mstridx == 0: - if self[1] > 31: - # Apr-2003-25 - month, year, day = self - else: - month, day, year = self - elif mstridx == 1: - if self[0] > 31 or (yearfirst and self[2] <= 31): - # 99-Jan-01 - year, month, day = self - else: - # 01-Jan-01 - # Give precedence to day-first, since - # two-digit years is usually hand-written. - day, month, year = self - - elif mstridx == 2: - # WTF!? - if self[1] > 31: - # 01-99-Jan - day, year, month = self - else: - # 99-01-Jan - year, day, month = self - - else: - if (self[0] > 31 or - self.ystridx == 0 or - (yearfirst and self[1] <= 12 and self[2] <= 31)): - # 99-01-01 - if dayfirst and self[2] <= 12: - year, day, month = self - else: - year, month, day = self - elif self[0] > 12 or (dayfirst and self[1] <= 12): - # 13-01-01 - day, month, year = self - else: - # 01-13-01 - month, day, year = self - - return year, month, day - - -class parser(object): - def __init__(self, info=None): - self.info = info or parserinfo() - - def parse(self, timestr, default=None, - ignoretz=False, tzinfos=None, **kwargs): - """ - Parse the date/time string into a :class:`datetime.datetime` object. - - :param timestr: - Any date/time string using the supported formats. - - :param default: - The default datetime object, if this is a datetime object and not - ``None``, elements specified in ``timestr`` replace elements in the - default object. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a - naive :class:`datetime.datetime` object is returned. - - :param tzinfos: - Additional time zone names / aliases which may be present in the - string. This argument maps time zone names (and optionally offsets - from those time zones) to time zones. This parameter can be a - dictionary with timezone aliases mapping time zone names to time - zones or a function taking two parameters (``tzname`` and - ``tzoffset``) and returning a time zone. - - The timezones to which the names are mapped can be an integer - offset from UTC in seconds or a :class:`tzinfo` object. - - .. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from dateutil.parser import parse - >>> from dateutil.tz import gettz - >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} - >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) - >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, - tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) - - This parameter is ignored if ``ignoretz`` is set. - - :param \\*\\*kwargs: - Keyword arguments as passed to ``_parse()``. - - :return: - Returns a :class:`datetime.datetime` object or, if the - ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the - first element being a :class:`datetime.datetime` object, the second - a tuple containing the fuzzy tokens. - - :raises ParserError: - Raised for invalid or unknown string format, if the provided - :class:`tzinfo` is not in a valid format, or if an invalid date - would be created. - - :raises TypeError: - Raised for non-string or character stream input. - - :raises OverflowError: - Raised if the parsed date exceeds the largest valid C integer on - your system. - """ - - if default is None: - default = datetime.datetime.now().replace(hour=0, minute=0, - second=0, microsecond=0) - - res, skipped_tokens = self._parse(timestr, **kwargs) - - if res is None: - raise ParserError("Unknown string format: %s", timestr) - - if len(res) == 0: - raise ParserError("String does not contain a date: %s", timestr) - - try: - ret = self._build_naive(res, default) - except ValueError as e: - six.raise_from(ParserError(str(e) + ": %s", timestr), e) - - if not ignoretz: - ret = self._build_tzaware(ret, res, tzinfos) - - if kwargs.get('fuzzy_with_tokens', False): - return ret, skipped_tokens - else: - return ret - - class _result(_resultbase): - __slots__ = ["year", "month", "day", "weekday", - "hour", "minute", "second", "microsecond", - "tzname", "tzoffset", "ampm","any_unused_tokens"] - - def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, - fuzzy_with_tokens=False): - """ - Private method which performs the heavy lifting of parsing, called from - ``parse()``, which passes on its ``kwargs`` to this function. - - :param timestr: - The string to parse. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM - and YMD. If set to ``None``, this value is retrieved from the - current :class:`parserinfo` object (which itself defaults to - ``False``). - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken - to be the year, otherwise the last number is taken to be the year. - If this is set to ``None``, the value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param fuzzy: - Whether to allow fuzzy parsing, allowing for string like "Today is - January 1, 2047 at 8:21:00AM". - - :param fuzzy_with_tokens: - If ``True``, ``fuzzy`` is automatically set to True, and the parser - will return a tuple where the first element is the parsed - :class:`datetime.datetime` datetimestamp and the second element is - a tuple containing the portions of the string which were ignored: - - .. doctest:: - - >>> from dateutil.parser import parse - >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) - (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) - - """ - if fuzzy_with_tokens: - fuzzy = True - - info = self.info - - if dayfirst is None: - dayfirst = info.dayfirst - - if yearfirst is None: - yearfirst = info.yearfirst - - res = self._result() - l = _timelex.split(timestr) # Splits the timestr into tokens - - skipped_idxs = [] - - # year/month/day list - ymd = _ymd() - - len_l = len(l) - i = 0 - try: - while i < len_l: - - # Check if it's a number - value_repr = l[i] - try: - value = float(value_repr) - except ValueError: - value = None - - if value is not None: - # Numeric token - i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) - - # Check weekday - elif info.weekday(l[i]) is not None: - value = info.weekday(l[i]) - res.weekday = value - - # Check month name - elif info.month(l[i]) is not None: - value = info.month(l[i]) - ymd.append(value, 'M') - - if i + 1 < len_l: - if l[i + 1] in ('-', '/'): - # Jan-01[-99] - sep = l[i + 1] - ymd.append(l[i + 2]) - - if i + 3 < len_l and l[i + 3] == sep: - # Jan-01-99 - ymd.append(l[i + 4]) - i += 2 - - i += 2 - - elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and - info.pertain(l[i + 2])): - # Jan of 01 - # In this case, 01 is clearly year - if l[i + 4].isdigit(): - # Convert it here to become unambiguous - value = int(l[i + 4]) - year = str(info.convertyear(value)) - ymd.append(year, 'Y') - else: - # Wrong guess - pass - # TODO: not hit in tests - i += 4 - - # Check am/pm - elif info.ampm(l[i]) is not None: - value = info.ampm(l[i]) - val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) - - if val_is_ampm: - res.hour = self._adjust_ampm(res.hour, value) - res.ampm = value - - elif fuzzy: - skipped_idxs.append(i) - - # Check for a timezone name - elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): - res.tzname = l[i] - res.tzoffset = info.tzoffset(res.tzname) - - # Check for something like GMT+3, or BRST+3. Notice - # that it doesn't mean "I am 3 hours after GMT", but - # "my time +3 is GMT". If found, we reverse the - # logic so that timezone parsing code will get it - # right. - if i + 1 < len_l and l[i + 1] in ('+', '-'): - l[i + 1] = ('+', '-')[l[i + 1] == '+'] - res.tzoffset = None - if info.utczone(res.tzname): - # With something like GMT+3, the timezone - # is *not* GMT. - res.tzname = None - - # Check for a numbered timezone - elif res.hour is not None and l[i] in ('+', '-'): - signal = (-1, 1)[l[i] == '+'] - len_li = len(l[i + 1]) - - # TODO: check that l[i + 1] is integer? - if len_li == 4: - # -0300 - hour_offset = int(l[i + 1][:2]) - min_offset = int(l[i + 1][2:]) - elif i + 2 < len_l and l[i + 2] == ':': - # -03:00 - hour_offset = int(l[i + 1]) - min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? - i += 2 - elif len_li <= 2: - # -[0]3 - hour_offset = int(l[i + 1][:2]) - min_offset = 0 - else: - raise ValueError(timestr) - - res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) - - # Look for a timezone name between parenthesis - if (i + 5 < len_l and - info.jump(l[i + 2]) and l[i + 3] == '(' and - l[i + 5] == ')' and - 3 <= len(l[i + 4]) and - self._could_be_tzname(res.hour, res.tzname, - None, l[i + 4])): - # -0300 (BRST) - res.tzname = l[i + 4] - i += 4 - - i += 1 - - # Check jumps - elif not (info.jump(l[i]) or fuzzy): - raise ValueError(timestr) - - else: - skipped_idxs.append(i) - i += 1 - - # Process year/month/day - year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) - - res.century_specified = ymd.century_specified - res.year = year - res.month = month - res.day = day - - except (IndexError, ValueError): - return None, None - - if not info.validate(res): - return None, None - - if fuzzy_with_tokens: - skipped_tokens = self._recombine_skipped(l, skipped_idxs) - return res, tuple(skipped_tokens) - else: - return res, None - - def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): - # Token is a number - value_repr = tokens[idx] - try: - value = self._to_decimal(value_repr) - except Exception as e: - six.raise_from(ValueError('Unknown numeric token'), e) - - len_li = len(value_repr) - - len_l = len(tokens) - - if (len(ymd) == 3 and len_li in (2, 4) and - res.hour is None and - (idx + 1 >= len_l or - (tokens[idx + 1] != ':' and - info.hms(tokens[idx + 1]) is None))): - # 19990101T23[59] - s = tokens[idx] - res.hour = int(s[:2]) - - if len_li == 4: - res.minute = int(s[2:]) - - elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): - # YYMMDD or HHMMSS[.ss] - s = tokens[idx] - - if not ymd and '.' not in tokens[idx]: - ymd.append(s[:2]) - ymd.append(s[2:4]) - ymd.append(s[4:]) - else: - # 19990101T235959[.59] - - # TODO: Check if res attributes already set. - res.hour = int(s[:2]) - res.minute = int(s[2:4]) - res.second, res.microsecond = self._parsems(s[4:]) - - elif len_li in (8, 12, 14): - # YYYYMMDD - s = tokens[idx] - ymd.append(s[:4], 'Y') - ymd.append(s[4:6]) - ymd.append(s[6:8]) - - if len_li > 8: - res.hour = int(s[8:10]) - res.minute = int(s[10:12]) - - if len_li > 12: - res.second = int(s[12:]) - - elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: - # HH[ ]h or MM[ ]m or SS[.ss][ ]s - hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) - (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) - if hms is not None: - # TODO: checking that hour/minute/second are not - # already set? - self._assign_hms(res, value_repr, hms) - - elif idx + 2 < len_l and tokens[idx + 1] == ':': - # HH:MM[:SS[.ss]] - res.hour = int(value) - value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? - (res.minute, res.second) = self._parse_min_sec(value) - - if idx + 4 < len_l and tokens[idx + 3] == ':': - res.second, res.microsecond = self._parsems(tokens[idx + 4]) - - idx += 2 - - idx += 2 - - elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): - sep = tokens[idx + 1] - ymd.append(value_repr) - - if idx + 2 < len_l and not info.jump(tokens[idx + 2]): - if tokens[idx + 2].isdigit(): - # 01-01[-01] - ymd.append(tokens[idx + 2]) - else: - # 01-Jan[-01] - value = info.month(tokens[idx + 2]) - - if value is not None: - ymd.append(value, 'M') - else: - raise ValueError() - - if idx + 3 < len_l and tokens[idx + 3] == sep: - # We have three members - value = info.month(tokens[idx + 4]) - - if value is not None: - ymd.append(value, 'M') - else: - ymd.append(tokens[idx + 4]) - idx += 2 - - idx += 1 - idx += 1 - - elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): - if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: - # 12 am - hour = int(value) - res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) - idx += 1 - else: - # Year, month or day - ymd.append(value) - idx += 1 - - elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): - # 12am - hour = int(value) - res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) - idx += 1 - - elif ymd.could_be_day(value): - ymd.append(value) - - elif not fuzzy: - raise ValueError() - - return idx - - def _find_hms_idx(self, idx, tokens, info, allow_jump): - len_l = len(tokens) - - if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: - # There is an "h", "m", or "s" label following this token. We take - # assign the upcoming label to the current token. - # e.g. the "12" in 12h" - hms_idx = idx + 1 - - elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and - info.hms(tokens[idx+2]) is not None): - # There is a space and then an "h", "m", or "s" label. - # e.g. the "12" in "12 h" - hms_idx = idx + 2 - - elif idx > 0 and info.hms(tokens[idx-1]) is not None: - # There is a "h", "m", or "s" preceding this token. Since neither - # of the previous cases was hit, there is no label following this - # token, so we use the previous label. - # e.g. the "04" in "12h04" - hms_idx = idx-1 - - elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and - info.hms(tokens[idx-2]) is not None): - # If we are looking at the final token, we allow for a - # backward-looking check to skip over a space. - # TODO: Are we sure this is the right condition here? - hms_idx = idx - 2 - - else: - hms_idx = None - - return hms_idx - - def _assign_hms(self, res, value_repr, hms): - # See GH issue #427, fixing float rounding - value = self._to_decimal(value_repr) - - if hms == 0: - # Hour - res.hour = int(value) - if value % 1: - res.minute = int(60*(value % 1)) - - elif hms == 1: - (res.minute, res.second) = self._parse_min_sec(value) - - elif hms == 2: - (res.second, res.microsecond) = self._parsems(value_repr) - - def _could_be_tzname(self, hour, tzname, tzoffset, token): - return (hour is not None and - tzname is None and - tzoffset is None and - len(token) <= 5 and - (all(x in string.ascii_uppercase for x in token) - or token in self.info.UTCZONE)) - - def _ampm_valid(self, hour, ampm, fuzzy): - """ - For fuzzy parsing, 'a' or 'am' (both valid English words) - may erroneously trigger the AM/PM flag. Deal with that - here. - """ - val_is_ampm = True - - # If there's already an AM/PM flag, this one isn't one. - if fuzzy and ampm is not None: - val_is_ampm = False - - # If AM/PM is found and hour is not, raise a ValueError - if hour is None: - if fuzzy: - val_is_ampm = False - else: - raise ValueError('No hour specified with AM or PM flag.') - elif not 0 <= hour <= 12: - # If AM/PM is found, it's a 12 hour clock, so raise - # an error for invalid range - if fuzzy: - val_is_ampm = False - else: - raise ValueError('Invalid hour specified for 12-hour clock.') - - return val_is_ampm - - def _adjust_ampm(self, hour, ampm): - if hour < 12 and ampm == 1: - hour += 12 - elif hour == 12 and ampm == 0: - hour = 0 - return hour - - def _parse_min_sec(self, value): - # TODO: Every usage of this function sets res.second to the return - # value. Are there any cases where second will be returned as None and - # we *don't* want to set res.second = None? - minute = int(value) - second = None - - sec_remainder = value % 1 - if sec_remainder: - second = int(60 * sec_remainder) - return (minute, second) - - def _parse_hms(self, idx, tokens, info, hms_idx): - # TODO: Is this going to admit a lot of false-positives for when we - # just happen to have digits and "h", "m" or "s" characters in non-date - # text? I guess hex hashes won't have that problem, but there's plenty - # of random junk out there. - if hms_idx is None: - hms = None - new_idx = idx - elif hms_idx > idx: - hms = info.hms(tokens[hms_idx]) - new_idx = hms_idx - else: - # Looking backwards, increment one. - hms = info.hms(tokens[hms_idx]) + 1 - new_idx = idx - - return (new_idx, hms) - - # ------------------------------------------------------------------ - # Handling for individual tokens. These are kept as methods instead - # of functions for the sake of customizability via subclassing. - - def _parsems(self, value): - """Parse a I[.F] seconds value into (seconds, microseconds).""" - if "." not in value: - return int(value), 0 - else: - i, f = value.split(".") - return int(i), int(f.ljust(6, "0")[:6]) - - def _to_decimal(self, val): - try: - decimal_value = Decimal(val) - # See GH 662, edge case, infinite value should not be converted - # via `_to_decimal` - if not decimal_value.is_finite(): - raise ValueError("Converted decimal value is infinite or NaN") - except Exception as e: - msg = "Could not convert %s to decimal" % val - six.raise_from(ValueError(msg), e) - else: - return decimal_value - - # ------------------------------------------------------------------ - # Post-Parsing construction of datetime output. These are kept as - # methods instead of functions for the sake of customizability via - # subclassing. - - def _build_tzinfo(self, tzinfos, tzname, tzoffset): - if callable(tzinfos): - tzdata = tzinfos(tzname, tzoffset) - else: - tzdata = tzinfos.get(tzname) - # handle case where tzinfo is paased an options that returns None - # eg tzinfos = {'BRST' : None} - if isinstance(tzdata, datetime.tzinfo) or tzdata is None: - tzinfo = tzdata - elif isinstance(tzdata, text_type): - tzinfo = tz.tzstr(tzdata) - elif isinstance(tzdata, integer_types): - tzinfo = tz.tzoffset(tzname, tzdata) - else: - raise TypeError("Offset must be tzinfo subclass, tz string, " - "or int offset.") - return tzinfo - - def _build_tzaware(self, naive, res, tzinfos): - if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): - tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) - aware = naive.replace(tzinfo=tzinfo) - aware = self._assign_tzname(aware, res.tzname) - - elif res.tzname and res.tzname in time.tzname: - aware = naive.replace(tzinfo=tz.tzlocal()) - - # Handle ambiguous local datetime - aware = self._assign_tzname(aware, res.tzname) - - # This is mostly relevant for winter GMT zones parsed in the UK - if (aware.tzname() != res.tzname and - res.tzname in self.info.UTCZONE): - aware = aware.replace(tzinfo=tz.UTC) - - elif res.tzoffset == 0: - aware = naive.replace(tzinfo=tz.UTC) - - elif res.tzoffset: - aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) - - elif not res.tzname and not res.tzoffset: - # i.e. no timezone information was found. - aware = naive - - elif res.tzname: - # tz-like string was parsed but we don't know what to do - # with it - warnings.warn("tzname {tzname} identified but not understood. " - "Pass `tzinfos` argument in order to correctly " - "return a timezone-aware datetime. In a future " - "version, this will raise an " - "exception.".format(tzname=res.tzname), - category=UnknownTimezoneWarning) - aware = naive - - return aware - - def _build_naive(self, res, default): - repl = {} - for attr in ("year", "month", "day", "hour", - "minute", "second", "microsecond"): - value = getattr(res, attr) - if value is not None: - repl[attr] = value - - if 'day' not in repl: - # If the default day exceeds the last day of the month, fall back - # to the end of the month. - cyear = default.year if res.year is None else res.year - cmonth = default.month if res.month is None else res.month - cday = default.day if res.day is None else res.day - - if cday > monthrange(cyear, cmonth)[1]: - repl['day'] = monthrange(cyear, cmonth)[1] - - naive = default.replace(**repl) - - if res.weekday is not None and not res.day: - naive = naive + relativedelta.relativedelta(weekday=res.weekday) - - return naive - - def _assign_tzname(self, dt, tzname): - if dt.tzname() != tzname: - new_dt = tz.enfold(dt, fold=1) - if new_dt.tzname() == tzname: - return new_dt - - return dt - - def _recombine_skipped(self, tokens, skipped_idxs): - """ - >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] - >>> skipped_idxs = [0, 1, 2, 5] - >>> _recombine_skipped(tokens, skipped_idxs) - ["foo bar", "baz"] - """ - skipped_tokens = [] - for i, idx in enumerate(sorted(skipped_idxs)): - if i > 0 and idx - 1 == skipped_idxs[i - 1]: - skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] - else: - skipped_tokens.append(tokens[idx]) - - return skipped_tokens - - -DEFAULTPARSER = parser() - - -def parse(timestr, parserinfo=None, **kwargs): - """ - - Parse a string in one of the supported formats, using the - ``parserinfo`` parameters. - - :param timestr: - A string containing a date/time stamp. - - :param parserinfo: - A :class:`parserinfo` object containing parameters for the parser. - If ``None``, the default arguments to the :class:`parserinfo` - constructor are used. - - The ``**kwargs`` parameter takes the following keyword arguments: - - :param default: - The default datetime object, if this is a datetime object and not - ``None``, elements specified in ``timestr`` replace elements in the - default object. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a naive - :class:`datetime` object is returned. - - :param tzinfos: - Additional time zone names / aliases which may be present in the - string. This argument maps time zone names (and optionally offsets - from those time zones) to time zones. This parameter can be a - dictionary with timezone aliases mapping time zone names to time - zones or a function taking two parameters (``tzname`` and - ``tzoffset``) and returning a time zone. - - The timezones to which the names are mapped can be an integer - offset from UTC in seconds or a :class:`tzinfo` object. - - .. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from dateutil.parser import parse - >>> from dateutil.tz import gettz - >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} - >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) - >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, - tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) - - This parameter is ignored if ``ignoretz`` is set. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM and - YMD. If set to ``None``, this value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken to - be the year, otherwise the last number is taken to be the year. If - this is set to ``None``, the value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param fuzzy: - Whether to allow fuzzy parsing, allowing for string like "Today is - January 1, 2047 at 8:21:00AM". - - :param fuzzy_with_tokens: - If ``True``, ``fuzzy`` is automatically set to True, and the parser - will return a tuple where the first element is the parsed - :class:`datetime.datetime` datetimestamp and the second element is - a tuple containing the portions of the string which were ignored: - - .. doctest:: - - >>> from dateutil.parser import parse - >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) - (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) - - :return: - Returns a :class:`datetime.datetime` object or, if the - ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the - first element being a :class:`datetime.datetime` object, the second - a tuple containing the fuzzy tokens. - - :raises ParserError: - Raised for invalid or unknown string formats, if the provided - :class:`tzinfo` is not in a valid format, or if an invalid date would - be created. - - :raises OverflowError: - Raised if the parsed date exceeds the largest valid C integer on - your system. - """ - if parserinfo: - return parser(parserinfo).parse(timestr, **kwargs) - else: - return DEFAULTPARSER.parse(timestr, **kwargs) - - -class _tzparser(object): - - class _result(_resultbase): - - __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", - "start", "end"] - - class _attr(_resultbase): - __slots__ = ["month", "week", "weekday", - "yday", "jyday", "day", "time"] - - def __repr__(self): - return self._repr("") - - def __init__(self): - _resultbase.__init__(self) - self.start = self._attr() - self.end = self._attr() - - def parse(self, tzstr): - res = self._result() - l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] - used_idxs = list() - try: - - len_l = len(l) - - i = 0 - while i < len_l: - # BRST+3[BRDT[+2]] - j = i - while j < len_l and not [x for x in l[j] - if x in "0123456789:,-+"]: - j += 1 - if j != i: - if not res.stdabbr: - offattr = "stdoffset" - res.stdabbr = "".join(l[i:j]) - else: - offattr = "dstoffset" - res.dstabbr = "".join(l[i:j]) - - for ii in range(j): - used_idxs.append(ii) - i = j - if (i < len_l and (l[i] in ('+', '-') or l[i][0] in - "0123456789")): - if l[i] in ('+', '-'): - # Yes, that's right. See the TZ variable - # documentation. - signal = (1, -1)[l[i] == '+'] - used_idxs.append(i) - i += 1 - else: - signal = -1 - len_li = len(l[i]) - if len_li == 4: - # -0300 - setattr(res, offattr, (int(l[i][:2]) * 3600 + - int(l[i][2:]) * 60) * signal) - elif i + 1 < len_l and l[i + 1] == ':': - # -03:00 - setattr(res, offattr, - (int(l[i]) * 3600 + - int(l[i + 2]) * 60) * signal) - used_idxs.append(i) - i += 2 - elif len_li <= 2: - # -[0]3 - setattr(res, offattr, - int(l[i][:2]) * 3600 * signal) - else: - return None - used_idxs.append(i) - i += 1 - if res.dstabbr: - break - else: - break - - - if i < len_l: - for j in range(i, len_l): - if l[j] == ';': - l[j] = ',' - - assert l[i] == ',' - - i += 1 - - if i >= len_l: - pass - elif (8 <= l.count(',') <= 9 and - not [y for x in l[i:] if x != ',' - for y in x if y not in "0123456789+-"]): - # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] - for x in (res.start, res.end): - x.month = int(l[i]) - used_idxs.append(i) - i += 2 - if l[i] == '-': - value = int(l[i + 1]) * -1 - used_idxs.append(i) - i += 1 - else: - value = int(l[i]) - used_idxs.append(i) - i += 2 - if value: - x.week = value - x.weekday = (int(l[i]) - 1) % 7 - else: - x.day = int(l[i]) - used_idxs.append(i) - i += 2 - x.time = int(l[i]) - used_idxs.append(i) - i += 2 - if i < len_l: - if l[i] in ('-', '+'): - signal = (-1, 1)[l[i] == "+"] - used_idxs.append(i) - i += 1 - else: - signal = 1 - used_idxs.append(i) - res.dstoffset = (res.stdoffset + int(l[i]) * signal) - - # This was a made-up format that is not in normal use - warn(('Parsed time zone "%s"' % tzstr) + - 'is in a non-standard dateutil-specific format, which ' + - 'is now deprecated; support for parsing this format ' + - 'will be removed in future versions. It is recommended ' + - 'that you switch to a standard format like the GNU ' + - 'TZ variable format.', tz.DeprecatedTzFormatWarning) - elif (l.count(',') == 2 and l[i:].count('/') <= 2 and - not [y for x in l[i:] if x not in (',', '/', 'J', 'M', - '.', '-', ':') - for y in x if y not in "0123456789"]): - for x in (res.start, res.end): - if l[i] == 'J': - # non-leap year day (1 based) - used_idxs.append(i) - i += 1 - x.jyday = int(l[i]) - elif l[i] == 'M': - # month[-.]week[-.]weekday - used_idxs.append(i) - i += 1 - x.month = int(l[i]) - used_idxs.append(i) - i += 1 - assert l[i] in ('-', '.') - used_idxs.append(i) - i += 1 - x.week = int(l[i]) - if x.week == 5: - x.week = -1 - used_idxs.append(i) - i += 1 - assert l[i] in ('-', '.') - used_idxs.append(i) - i += 1 - x.weekday = (int(l[i]) - 1) % 7 - else: - # year day (zero based) - x.yday = int(l[i]) + 1 - - used_idxs.append(i) - i += 1 - - if i < len_l and l[i] == '/': - used_idxs.append(i) - i += 1 - # start time - len_li = len(l[i]) - if len_li == 4: - # -0300 - x.time = (int(l[i][:2]) * 3600 + - int(l[i][2:]) * 60) - elif i + 1 < len_l and l[i + 1] == ':': - # -03:00 - x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 - used_idxs.append(i) - i += 2 - if i + 1 < len_l and l[i + 1] == ':': - used_idxs.append(i) - i += 2 - x.time += int(l[i]) - elif len_li <= 2: - # -[0]3 - x.time = (int(l[i][:2]) * 3600) - else: - return None - used_idxs.append(i) - i += 1 - - assert i == len_l or l[i] == ',' - - i += 1 - - assert i >= len_l - - except (IndexError, ValueError, AssertionError): - return None - - unused_idxs = set(range(len_l)).difference(used_idxs) - res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) - return res - - -DEFAULTTZPARSER = _tzparser() - - -def _parsetz(tzstr): - return DEFAULTTZPARSER.parse(tzstr) - - -class ParserError(ValueError): - """Exception subclass used for any failure to parse a datetime string. - - This is a subclass of :py:exc:`ValueError`, and should be raised any time - earlier versions of ``dateutil`` would have raised ``ValueError``. - - .. versionadded:: 2.8.1 - """ - def __str__(self): - try: - return self.args[0] % self.args[1:] - except (TypeError, IndexError): - return super(ParserError, self).__str__() - - def __repr__(self): - args = ", ".join("'%s'" % arg for arg in self.args) - return "%s(%s)" % (self.__class__.__name__, args) - - -class UnknownTimezoneWarning(RuntimeWarning): - """Raised when the parser finds a timezone it cannot parse into a tzinfo. - - .. versionadded:: 2.7.0 - """ -# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/parser/isoparser.py b/venv/Lib/site-packages/dateutil/parser/isoparser.py deleted file mode 100644 index 7060087..0000000 --- a/venv/Lib/site-packages/dateutil/parser/isoparser.py +++ /dev/null @@ -1,416 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a parser for ISO-8601 strings - -It is intended to support all valid date, time and datetime formats per the -ISO-8601 specification. - -..versionadded:: 2.7.0 -""" -from datetime import datetime, timedelta, time, date -import calendar -from dateutil import tz - -from functools import wraps - -import re -import six - -__all__ = ["isoparse", "isoparser"] - - -def _takes_ascii(f): - @wraps(f) - def func(self, str_in, *args, **kwargs): - # If it's a stream, read the whole thing - str_in = getattr(str_in, 'read', lambda: str_in)() - - # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII - if isinstance(str_in, six.text_type): - # ASCII is the same in UTF-8 - try: - str_in = str_in.encode('ascii') - except UnicodeEncodeError as e: - msg = 'ISO-8601 strings should contain only ASCII characters' - six.raise_from(ValueError(msg), e) - - return f(self, str_in, *args, **kwargs) - - return func - - -class isoparser(object): - def __init__(self, sep=None): - """ - :param sep: - A single character that separates date and time portions. If - ``None``, the parser will accept any single character. - For strict ISO-8601 adherence, pass ``'T'``. - """ - if sep is not None: - if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): - raise ValueError('Separator must be a single, non-numeric ' + - 'ASCII character') - - sep = sep.encode('ascii') - - self._sep = sep - - @_takes_ascii - def isoparse(self, dt_str): - """ - Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. - - An ISO-8601 datetime string consists of a date portion, followed - optionally by a time portion - the date and time portions are separated - by a single character separator, which is ``T`` in the official - standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be - combined with a time portion. - - Supported date formats are: - - Common: - - - ``YYYY`` - - ``YYYY-MM`` - - ``YYYY-MM-DD`` or ``YYYYMMDD`` - - Uncommon: - - - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) - - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day - - The ISO week and day numbering follows the same logic as - :func:`datetime.date.isocalendar`. - - Supported time formats are: - - - ``hh`` - - ``hh:mm`` or ``hhmm`` - - ``hh:mm:ss`` or ``hhmmss`` - - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) - - Midnight is a special case for `hh`, as the standard supports both - 00:00 and 24:00 as a representation. The decimal separator can be - either a dot or a comma. - - - .. caution:: - - Support for fractional components other than seconds is part of the - ISO-8601 standard, but is not currently implemented in this parser. - - Supported time zone offset formats are: - - - `Z` (UTC) - - `±HH:MM` - - `±HHMM` - - `±HH` - - Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, - with the exception of UTC, which will be represented as - :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such - as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. - - :param dt_str: - A string or stream containing only an ISO-8601 datetime string - - :return: - Returns a :class:`datetime.datetime` representing the string. - Unspecified components default to their lowest value. - - .. warning:: - - As of version 2.7.0, the strictness of the parser should not be - considered a stable part of the contract. Any valid ISO-8601 string - that parses correctly with the default settings will continue to - parse correctly in future versions, but invalid strings that - currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not - guaranteed to continue failing in future versions if they encode - a valid date. - - .. versionadded:: 2.7.0 - """ - components, pos = self._parse_isodate(dt_str) - - if len(dt_str) > pos: - if self._sep is None or dt_str[pos:pos + 1] == self._sep: - components += self._parse_isotime(dt_str[pos + 1:]) - else: - raise ValueError('String contains unknown ISO components') - - if len(components) > 3 and components[3] == 24: - components[3] = 0 - return datetime(*components) + timedelta(days=1) - - return datetime(*components) - - @_takes_ascii - def parse_isodate(self, datestr): - """ - Parse the date portion of an ISO string. - - :param datestr: - The string portion of an ISO string, without a separator - - :return: - Returns a :class:`datetime.date` object - """ - components, pos = self._parse_isodate(datestr) - if pos < len(datestr): - raise ValueError('String contains unknown ISO ' + - 'components: {!r}'.format(datestr.decode('ascii'))) - return date(*components) - - @_takes_ascii - def parse_isotime(self, timestr): - """ - Parse the time portion of an ISO string. - - :param timestr: - The time portion of an ISO string, without a separator - - :return: - Returns a :class:`datetime.time` object - """ - components = self._parse_isotime(timestr) - if components[0] == 24: - components[0] = 0 - return time(*components) - - @_takes_ascii - def parse_tzstr(self, tzstr, zero_as_utc=True): - """ - Parse a valid ISO time zone string. - - See :func:`isoparser.isoparse` for details on supported formats. - - :param tzstr: - A string representing an ISO time zone offset - - :param zero_as_utc: - Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones - - :return: - Returns :class:`dateutil.tz.tzoffset` for offsets and - :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is - specified) offsets equivalent to UTC. - """ - return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) - - # Constants - _DATE_SEP = b'-' - _TIME_SEP = b':' - _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') - - def _parse_isodate(self, dt_str): - try: - return self._parse_isodate_common(dt_str) - except ValueError: - return self._parse_isodate_uncommon(dt_str) - - def _parse_isodate_common(self, dt_str): - len_str = len(dt_str) - components = [1, 1, 1] - - if len_str < 4: - raise ValueError('ISO string too short') - - # Year - components[0] = int(dt_str[0:4]) - pos = 4 - if pos >= len_str: - return components, pos - - has_sep = dt_str[pos:pos + 1] == self._DATE_SEP - if has_sep: - pos += 1 - - # Month - if len_str - pos < 2: - raise ValueError('Invalid common month') - - components[1] = int(dt_str[pos:pos + 2]) - pos += 2 - - if pos >= len_str: - if has_sep: - return components, pos - else: - raise ValueError('Invalid ISO format') - - if has_sep: - if dt_str[pos:pos + 1] != self._DATE_SEP: - raise ValueError('Invalid separator in ISO string') - pos += 1 - - # Day - if len_str - pos < 2: - raise ValueError('Invalid common day') - components[2] = int(dt_str[pos:pos + 2]) - return components, pos + 2 - - def _parse_isodate_uncommon(self, dt_str): - if len(dt_str) < 4: - raise ValueError('ISO string too short') - - # All ISO formats start with the year - year = int(dt_str[0:4]) - - has_sep = dt_str[4:5] == self._DATE_SEP - - pos = 4 + has_sep # Skip '-' if it's there - if dt_str[pos:pos + 1] == b'W': - # YYYY-?Www-?D? - pos += 1 - weekno = int(dt_str[pos:pos + 2]) - pos += 2 - - dayno = 1 - if len(dt_str) > pos: - if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: - raise ValueError('Inconsistent use of dash separator') - - pos += has_sep - - dayno = int(dt_str[pos:pos + 1]) - pos += 1 - - base_date = self._calculate_weekdate(year, weekno, dayno) - else: - # YYYYDDD or YYYY-DDD - if len(dt_str) - pos < 3: - raise ValueError('Invalid ordinal day') - - ordinal_day = int(dt_str[pos:pos + 3]) - pos += 3 - - if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): - raise ValueError('Invalid ordinal day' + - ' {} for year {}'.format(ordinal_day, year)) - - base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) - - components = [base_date.year, base_date.month, base_date.day] - return components, pos - - def _calculate_weekdate(self, year, week, day): - """ - Calculate the day of corresponding to the ISO year-week-day calendar. - - This function is effectively the inverse of - :func:`datetime.date.isocalendar`. - - :param year: - The year in the ISO calendar - - :param week: - The week in the ISO calendar - range is [1, 53] - - :param day: - The day in the ISO calendar - range is [1 (MON), 7 (SUN)] - - :return: - Returns a :class:`datetime.date` - """ - if not 0 < week < 54: - raise ValueError('Invalid week: {}'.format(week)) - - if not 0 < day < 8: # Range is 1-7 - raise ValueError('Invalid weekday: {}'.format(day)) - - # Get week 1 for the specific year: - jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it - week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) - - # Now add the specific number of weeks and days to get what we want - week_offset = (week - 1) * 7 + (day - 1) - return week_1 + timedelta(days=week_offset) - - def _parse_isotime(self, timestr): - len_str = len(timestr) - components = [0, 0, 0, 0, None] - pos = 0 - comp = -1 - - if len_str < 2: - raise ValueError('ISO time too short') - - has_sep = False - - while pos < len_str and comp < 5: - comp += 1 - - if timestr[pos:pos + 1] in b'-+Zz': - # Detect time zone boundary - components[-1] = self._parse_tzstr(timestr[pos:]) - pos = len_str - break - - if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP: - has_sep = True - pos += 1 - elif comp == 2 and has_sep: - if timestr[pos:pos+1] != self._TIME_SEP: - raise ValueError('Inconsistent use of colon separator') - pos += 1 - - if comp < 3: - # Hour, minute, second - components[comp] = int(timestr[pos:pos + 2]) - pos += 2 - - if comp == 3: - # Fraction of a second - frac = self._FRACTION_REGEX.match(timestr[pos:]) - if not frac: - continue - - us_str = frac.group(1)[:6] # Truncate to microseconds - components[comp] = int(us_str) * 10**(6 - len(us_str)) - pos += len(frac.group()) - - if pos < len_str: - raise ValueError('Unused components in ISO string') - - if components[0] == 24: - # Standard supports 00:00 and 24:00 as representations of midnight - if any(component != 0 for component in components[1:4]): - raise ValueError('Hour may only be 24 at 24:00:00.000') - - return components - - def _parse_tzstr(self, tzstr, zero_as_utc=True): - if tzstr == b'Z' or tzstr == b'z': - return tz.UTC - - if len(tzstr) not in {3, 5, 6}: - raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') - - if tzstr[0:1] == b'-': - mult = -1 - elif tzstr[0:1] == b'+': - mult = 1 - else: - raise ValueError('Time zone offset requires sign') - - hours = int(tzstr[1:3]) - if len(tzstr) == 3: - minutes = 0 - else: - minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) - - if zero_as_utc and hours == 0 and minutes == 0: - return tz.UTC - else: - if minutes > 59: - raise ValueError('Invalid minutes in time zone offset') - - if hours > 23: - raise ValueError('Invalid hours in time zone offset') - - return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) - - -DEFAULT_ISOPARSER = isoparser() -isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/venv/Lib/site-packages/dateutil/relativedelta.py b/venv/Lib/site-packages/dateutil/relativedelta.py deleted file mode 100644 index cd323a5..0000000 --- a/venv/Lib/site-packages/dateutil/relativedelta.py +++ /dev/null @@ -1,599 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import calendar - -import operator -from math import copysign - -from six import integer_types -from warnings import warn - -from ._common import weekday - -MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) - -__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] - - -class relativedelta(object): - """ - The relativedelta type is designed to be applied to an existing datetime and - can replace specific components of that datetime, or represents an interval - of time. - - It is based on the specification of the excellent work done by M.-A. Lemburg - in his - `mx.DateTime `_ extension. - However, notice that this type does *NOT* implement the same algorithm as - his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. - - There are two different ways to build a relativedelta instance. The - first one is passing it two date/datetime classes:: - - relativedelta(datetime1, datetime2) - - The second one is passing it any number of the following keyword arguments:: - - relativedelta(arg1=x,arg2=y,arg3=z...) - - year, month, day, hour, minute, second, microsecond: - Absolute information (argument is singular); adding or subtracting a - relativedelta with absolute information does not perform an arithmetic - operation, but rather REPLACES the corresponding value in the - original datetime with the value(s) in relativedelta. - - years, months, weeks, days, hours, minutes, seconds, microseconds: - Relative information, may be negative (argument is plural); adding - or subtracting a relativedelta with relative information performs - the corresponding arithmetic operation on the original datetime value - with the information in the relativedelta. - - weekday: - One of the weekday instances (MO, TU, etc) available in the - relativedelta module. These instances may receive a parameter N, - specifying the Nth weekday, which could be positive or negative - (like MO(+1) or MO(-2)). Not specifying it is the same as specifying - +1. You can also use an integer, where 0=MO. This argument is always - relative e.g. if the calculated date is already Monday, using MO(1) - or MO(-1) won't change the day. To effectively make it absolute, use - it in combination with the day argument (e.g. day=1, MO(1) for first - Monday of the month). - - leapdays: - Will add given days to the date found, if year is a leap - year, and the date found is post 28 of february. - - yearday, nlyearday: - Set the yearday or the non-leap year day (jump leap days). - These are converted to day/month/leapdays information. - - There are relative and absolute forms of the keyword - arguments. The plural is relative, and the singular is - absolute. For each argument in the order below, the absolute form - is applied first (by setting each attribute to that value) and - then the relative form (by adding the value to the attribute). - - The order of attributes considered when this relativedelta is - added to a datetime is: - - 1. Year - 2. Month - 3. Day - 4. Hours - 5. Minutes - 6. Seconds - 7. Microseconds - - Finally, weekday is applied, using the rule described above. - - For example - - >>> from datetime import datetime - >>> from dateutil.relativedelta import relativedelta, MO - >>> dt = datetime(2018, 4, 9, 13, 37, 0) - >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) - >>> dt + delta - datetime.datetime(2018, 4, 2, 14, 37) - - First, the day is set to 1 (the first of the month), then 25 hours - are added, to get to the 2nd day and 14th hour, finally the - weekday is applied, but since the 2nd is already a Monday there is - no effect. - - """ - - def __init__(self, dt1=None, dt2=None, - years=0, months=0, days=0, leapdays=0, weeks=0, - hours=0, minutes=0, seconds=0, microseconds=0, - year=None, month=None, day=None, weekday=None, - yearday=None, nlyearday=None, - hour=None, minute=None, second=None, microsecond=None): - - if dt1 and dt2: - # datetime is a subclass of date. So both must be date - if not (isinstance(dt1, datetime.date) and - isinstance(dt2, datetime.date)): - raise TypeError("relativedelta only diffs datetime/date") - - # We allow two dates, or two datetimes, so we coerce them to be - # of the same type - if (isinstance(dt1, datetime.datetime) != - isinstance(dt2, datetime.datetime)): - if not isinstance(dt1, datetime.datetime): - dt1 = datetime.datetime.fromordinal(dt1.toordinal()) - elif not isinstance(dt2, datetime.datetime): - dt2 = datetime.datetime.fromordinal(dt2.toordinal()) - - self.years = 0 - self.months = 0 - self.days = 0 - self.leapdays = 0 - self.hours = 0 - self.minutes = 0 - self.seconds = 0 - self.microseconds = 0 - self.year = None - self.month = None - self.day = None - self.weekday = None - self.hour = None - self.minute = None - self.second = None - self.microsecond = None - self._has_time = 0 - - # Get year / month delta between the two - months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) - self._set_months(months) - - # Remove the year/month delta so the timedelta is just well-defined - # time units (seconds, days and microseconds) - dtm = self.__radd__(dt2) - - # If we've overshot our target, make an adjustment - if dt1 < dt2: - compare = operator.gt - increment = 1 - else: - compare = operator.lt - increment = -1 - - while compare(dt1, dtm): - months += increment - self._set_months(months) - dtm = self.__radd__(dt2) - - # Get the timedelta between the "months-adjusted" date and dt1 - delta = dt1 - dtm - self.seconds = delta.seconds + delta.days * 86400 - self.microseconds = delta.microseconds - else: - # Check for non-integer values in integer-only quantities - if any(x is not None and x != int(x) for x in (years, months)): - raise ValueError("Non-integer years and months are " - "ambiguous and not currently supported.") - - # Relative information - self.years = int(years) - self.months = int(months) - self.days = days + weeks * 7 - self.leapdays = leapdays - self.hours = hours - self.minutes = minutes - self.seconds = seconds - self.microseconds = microseconds - - # Absolute information - self.year = year - self.month = month - self.day = day - self.hour = hour - self.minute = minute - self.second = second - self.microsecond = microsecond - - if any(x is not None and int(x) != x - for x in (year, month, day, hour, - minute, second, microsecond)): - # For now we'll deprecate floats - later it'll be an error. - warn("Non-integer value passed as absolute information. " + - "This is not a well-defined condition and will raise " + - "errors in future versions.", DeprecationWarning) - - if isinstance(weekday, integer_types): - self.weekday = weekdays[weekday] - else: - self.weekday = weekday - - yday = 0 - if nlyearday: - yday = nlyearday - elif yearday: - yday = yearday - if yearday > 59: - self.leapdays = -1 - if yday: - ydayidx = [31, 59, 90, 120, 151, 181, 212, - 243, 273, 304, 334, 366] - for idx, ydays in enumerate(ydayidx): - if yday <= ydays: - self.month = idx+1 - if idx == 0: - self.day = yday - else: - self.day = yday-ydayidx[idx-1] - break - else: - raise ValueError("invalid year day (%d)" % yday) - - self._fix() - - def _fix(self): - if abs(self.microseconds) > 999999: - s = _sign(self.microseconds) - div, mod = divmod(self.microseconds * s, 1000000) - self.microseconds = mod * s - self.seconds += div * s - if abs(self.seconds) > 59: - s = _sign(self.seconds) - div, mod = divmod(self.seconds * s, 60) - self.seconds = mod * s - self.minutes += div * s - if abs(self.minutes) > 59: - s = _sign(self.minutes) - div, mod = divmod(self.minutes * s, 60) - self.minutes = mod * s - self.hours += div * s - if abs(self.hours) > 23: - s = _sign(self.hours) - div, mod = divmod(self.hours * s, 24) - self.hours = mod * s - self.days += div * s - if abs(self.months) > 11: - s = _sign(self.months) - div, mod = divmod(self.months * s, 12) - self.months = mod * s - self.years += div * s - if (self.hours or self.minutes or self.seconds or self.microseconds - or self.hour is not None or self.minute is not None or - self.second is not None or self.microsecond is not None): - self._has_time = 1 - else: - self._has_time = 0 - - @property - def weeks(self): - return int(self.days / 7.0) - - @weeks.setter - def weeks(self, value): - self.days = self.days - (self.weeks * 7) + value * 7 - - def _set_months(self, months): - self.months = months - if abs(self.months) > 11: - s = _sign(self.months) - div, mod = divmod(self.months * s, 12) - self.months = mod * s - self.years = div * s - else: - self.years = 0 - - def normalized(self): - """ - Return a version of this object represented entirely using integer - values for the relative attributes. - - >>> relativedelta(days=1.5, hours=2).normalized() - relativedelta(days=+1, hours=+14) - - :return: - Returns a :class:`dateutil.relativedelta.relativedelta` object. - """ - # Cascade remainders down (rounding each to roughly nearest microsecond) - days = int(self.days) - - hours_f = round(self.hours + 24 * (self.days - days), 11) - hours = int(hours_f) - - minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) - minutes = int(minutes_f) - - seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) - seconds = int(seconds_f) - - microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) - - # Constructor carries overflow back up with call to _fix() - return self.__class__(years=self.years, months=self.months, - days=days, hours=hours, minutes=minutes, - seconds=seconds, microseconds=microseconds, - leapdays=self.leapdays, year=self.year, - month=self.month, day=self.day, - weekday=self.weekday, hour=self.hour, - minute=self.minute, second=self.second, - microsecond=self.microsecond) - - def __add__(self, other): - if isinstance(other, relativedelta): - return self.__class__(years=other.years + self.years, - months=other.months + self.months, - days=other.days + self.days, - hours=other.hours + self.hours, - minutes=other.minutes + self.minutes, - seconds=other.seconds + self.seconds, - microseconds=(other.microseconds + - self.microseconds), - leapdays=other.leapdays or self.leapdays, - year=(other.year if other.year is not None - else self.year), - month=(other.month if other.month is not None - else self.month), - day=(other.day if other.day is not None - else self.day), - weekday=(other.weekday if other.weekday is not None - else self.weekday), - hour=(other.hour if other.hour is not None - else self.hour), - minute=(other.minute if other.minute is not None - else self.minute), - second=(other.second if other.second is not None - else self.second), - microsecond=(other.microsecond if other.microsecond - is not None else - self.microsecond)) - if isinstance(other, datetime.timedelta): - return self.__class__(years=self.years, - months=self.months, - days=self.days + other.days, - hours=self.hours, - minutes=self.minutes, - seconds=self.seconds + other.seconds, - microseconds=self.microseconds + other.microseconds, - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - if not isinstance(other, datetime.date): - return NotImplemented - elif self._has_time and not isinstance(other, datetime.datetime): - other = datetime.datetime.fromordinal(other.toordinal()) - year = (self.year or other.year)+self.years - month = self.month or other.month - if self.months: - assert 1 <= abs(self.months) <= 12 - month += self.months - if month > 12: - year += 1 - month -= 12 - elif month < 1: - year -= 1 - month += 12 - day = min(calendar.monthrange(year, month)[1], - self.day or other.day) - repl = {"year": year, "month": month, "day": day} - for attr in ["hour", "minute", "second", "microsecond"]: - value = getattr(self, attr) - if value is not None: - repl[attr] = value - days = self.days - if self.leapdays and month > 2 and calendar.isleap(year): - days += self.leapdays - ret = (other.replace(**repl) - + datetime.timedelta(days=days, - hours=self.hours, - minutes=self.minutes, - seconds=self.seconds, - microseconds=self.microseconds)) - if self.weekday: - weekday, nth = self.weekday.weekday, self.weekday.n or 1 - jumpdays = (abs(nth) - 1) * 7 - if nth > 0: - jumpdays += (7 - ret.weekday() + weekday) % 7 - else: - jumpdays += (ret.weekday() - weekday) % 7 - jumpdays *= -1 - ret += datetime.timedelta(days=jumpdays) - return ret - - def __radd__(self, other): - return self.__add__(other) - - def __rsub__(self, other): - return self.__neg__().__radd__(other) - - def __sub__(self, other): - if not isinstance(other, relativedelta): - return NotImplemented # In case the other object defines __rsub__ - return self.__class__(years=self.years - other.years, - months=self.months - other.months, - days=self.days - other.days, - hours=self.hours - other.hours, - minutes=self.minutes - other.minutes, - seconds=self.seconds - other.seconds, - microseconds=self.microseconds - other.microseconds, - leapdays=self.leapdays or other.leapdays, - year=(self.year if self.year is not None - else other.year), - month=(self.month if self.month is not None else - other.month), - day=(self.day if self.day is not None else - other.day), - weekday=(self.weekday if self.weekday is not None else - other.weekday), - hour=(self.hour if self.hour is not None else - other.hour), - minute=(self.minute if self.minute is not None else - other.minute), - second=(self.second if self.second is not None else - other.second), - microsecond=(self.microsecond if self.microsecond - is not None else - other.microsecond)) - - def __abs__(self): - return self.__class__(years=abs(self.years), - months=abs(self.months), - days=abs(self.days), - hours=abs(self.hours), - minutes=abs(self.minutes), - seconds=abs(self.seconds), - microseconds=abs(self.microseconds), - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - def __neg__(self): - return self.__class__(years=-self.years, - months=-self.months, - days=-self.days, - hours=-self.hours, - minutes=-self.minutes, - seconds=-self.seconds, - microseconds=-self.microseconds, - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - def __bool__(self): - return not (not self.years and - not self.months and - not self.days and - not self.hours and - not self.minutes and - not self.seconds and - not self.microseconds and - not self.leapdays and - self.year is None and - self.month is None and - self.day is None and - self.weekday is None and - self.hour is None and - self.minute is None and - self.second is None and - self.microsecond is None) - # Compatibility with Python 2.x - __nonzero__ = __bool__ - - def __mul__(self, other): - try: - f = float(other) - except TypeError: - return NotImplemented - - return self.__class__(years=int(self.years * f), - months=int(self.months * f), - days=int(self.days * f), - hours=int(self.hours * f), - minutes=int(self.minutes * f), - seconds=int(self.seconds * f), - microseconds=int(self.microseconds * f), - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - __rmul__ = __mul__ - - def __eq__(self, other): - if not isinstance(other, relativedelta): - return NotImplemented - if self.weekday or other.weekday: - if not self.weekday or not other.weekday: - return False - if self.weekday.weekday != other.weekday.weekday: - return False - n1, n2 = self.weekday.n, other.weekday.n - if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): - return False - return (self.years == other.years and - self.months == other.months and - self.days == other.days and - self.hours == other.hours and - self.minutes == other.minutes and - self.seconds == other.seconds and - self.microseconds == other.microseconds and - self.leapdays == other.leapdays and - self.year == other.year and - self.month == other.month and - self.day == other.day and - self.hour == other.hour and - self.minute == other.minute and - self.second == other.second and - self.microsecond == other.microsecond) - - def __hash__(self): - return hash(( - self.weekday, - self.years, - self.months, - self.days, - self.hours, - self.minutes, - self.seconds, - self.microseconds, - self.leapdays, - self.year, - self.month, - self.day, - self.hour, - self.minute, - self.second, - self.microsecond, - )) - - def __ne__(self, other): - return not self.__eq__(other) - - def __div__(self, other): - try: - reciprocal = 1 / float(other) - except TypeError: - return NotImplemented - - return self.__mul__(reciprocal) - - __truediv__ = __div__ - - def __repr__(self): - l = [] - for attr in ["years", "months", "days", "leapdays", - "hours", "minutes", "seconds", "microseconds"]: - value = getattr(self, attr) - if value: - l.append("{attr}={value:+g}".format(attr=attr, value=value)) - for attr in ["year", "month", "day", "weekday", - "hour", "minute", "second", "microsecond"]: - value = getattr(self, attr) - if value is not None: - l.append("{attr}={value}".format(attr=attr, value=repr(value))) - return "{classname}({attrs})".format(classname=self.__class__.__name__, - attrs=", ".join(l)) - - -def _sign(x): - return int(copysign(1, x)) - -# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/rrule.py b/venv/Lib/site-packages/dateutil/rrule.py deleted file mode 100644 index 571a0d2..0000000 --- a/venv/Lib/site-packages/dateutil/rrule.py +++ /dev/null @@ -1,1737 +0,0 @@ -# -*- coding: utf-8 -*- -""" -The rrule module offers a small, complete, and very fast, implementation of -the recurrence rules documented in the -`iCalendar RFC `_, -including support for caching of results. -""" -import calendar -import datetime -import heapq -import itertools -import re -import sys -from functools import wraps -# For warning about deprecation of until and count -from warnings import warn - -from six import advance_iterator, integer_types - -from six.moves import _thread, range - -from ._common import weekday as weekdaybase - -try: - from math import gcd -except ImportError: - from fractions import gcd - -__all__ = ["rrule", "rruleset", "rrulestr", - "YEARLY", "MONTHLY", "WEEKLY", "DAILY", - "HOURLY", "MINUTELY", "SECONDLY", - "MO", "TU", "WE", "TH", "FR", "SA", "SU"] - -# Every mask is 7 days longer to handle cross-year weekly periods. -M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + - [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) -M365MASK = list(M366MASK) -M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) -MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) -MDAY365MASK = list(MDAY366MASK) -M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) -NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) -NMDAY365MASK = list(NMDAY366MASK) -M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) -M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) -WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 -del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] -MDAY365MASK = tuple(MDAY365MASK) -M365MASK = tuple(M365MASK) - -FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] - -(YEARLY, - MONTHLY, - WEEKLY, - DAILY, - HOURLY, - MINUTELY, - SECONDLY) = list(range(7)) - -# Imported on demand. -easter = None -parser = None - - -class weekday(weekdaybase): - """ - This version of weekday does not allow n = 0. - """ - def __init__(self, wkday, n=None): - if n == 0: - raise ValueError("Can't create weekday with n==0") - - super(weekday, self).__init__(wkday, n) - - -MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) - - -def _invalidates_cache(f): - """ - Decorator for rruleset methods which may invalidate the - cached length. - """ - @wraps(f) - def inner_func(self, *args, **kwargs): - rv = f(self, *args, **kwargs) - self._invalidate_cache() - return rv - - return inner_func - - -class rrulebase(object): - def __init__(self, cache=False): - if cache: - self._cache = [] - self._cache_lock = _thread.allocate_lock() - self._invalidate_cache() - else: - self._cache = None - self._cache_complete = False - self._len = None - - def __iter__(self): - if self._cache_complete: - return iter(self._cache) - elif self._cache is None: - return self._iter() - else: - return self._iter_cached() - - def _invalidate_cache(self): - if self._cache is not None: - self._cache = [] - self._cache_complete = False - self._cache_gen = self._iter() - - if self._cache_lock.locked(): - self._cache_lock.release() - - self._len = None - - def _iter_cached(self): - i = 0 - gen = self._cache_gen - cache = self._cache - acquire = self._cache_lock.acquire - release = self._cache_lock.release - while gen: - if i == len(cache): - acquire() - if self._cache_complete: - break - try: - for j in range(10): - cache.append(advance_iterator(gen)) - except StopIteration: - self._cache_gen = gen = None - self._cache_complete = True - break - release() - yield cache[i] - i += 1 - while i < self._len: - yield cache[i] - i += 1 - - def __getitem__(self, item): - if self._cache_complete: - return self._cache[item] - elif isinstance(item, slice): - if item.step and item.step < 0: - return list(iter(self))[item] - else: - return list(itertools.islice(self, - item.start or 0, - item.stop or sys.maxsize, - item.step or 1)) - elif item >= 0: - gen = iter(self) - try: - for i in range(item+1): - res = advance_iterator(gen) - except StopIteration: - raise IndexError - return res - else: - return list(iter(self))[item] - - def __contains__(self, item): - if self._cache_complete: - return item in self._cache - else: - for i in self: - if i == item: - return True - elif i > item: - return False - return False - - # __len__() introduces a large performance penalty. - def count(self): - """ Returns the number of recurrences in this set. It will have go - through the whole recurrence, if this hasn't been done before. """ - if self._len is None: - for x in self: - pass - return self._len - - def before(self, dt, inc=False): - """ Returns the last recurrence before the given datetime instance. The - inc keyword defines what happens if dt is an occurrence. With - inc=True, if dt itself is an occurrence, it will be returned. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - last = None - if inc: - for i in gen: - if i > dt: - break - last = i - else: - for i in gen: - if i >= dt: - break - last = i - return last - - def after(self, dt, inc=False): - """ Returns the first recurrence after the given datetime instance. The - inc keyword defines what happens if dt is an occurrence. With - inc=True, if dt itself is an occurrence, it will be returned. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - if inc: - for i in gen: - if i >= dt: - return i - else: - for i in gen: - if i > dt: - return i - return None - - def xafter(self, dt, count=None, inc=False): - """ - Generator which yields up to `count` recurrences after the given - datetime instance, equivalent to `after`. - - :param dt: - The datetime at which to start generating recurrences. - - :param count: - The maximum number of recurrences to generate. If `None` (default), - dates are generated until the recurrence rule is exhausted. - - :param inc: - If `dt` is an instance of the rule and `inc` is `True`, it is - included in the output. - - :yields: Yields a sequence of `datetime` objects. - """ - - if self._cache_complete: - gen = self._cache - else: - gen = self - - # Select the comparison function - if inc: - comp = lambda dc, dtc: dc >= dtc - else: - comp = lambda dc, dtc: dc > dtc - - # Generate dates - n = 0 - for d in gen: - if comp(d, dt): - if count is not None: - n += 1 - if n > count: - break - - yield d - - def between(self, after, before, inc=False, count=1): - """ Returns all the occurrences of the rrule between after and before. - The inc keyword defines what happens if after and/or before are - themselves occurrences. With inc=True, they will be included in the - list, if they are found in the recurrence set. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - started = False - l = [] - if inc: - for i in gen: - if i > before: - break - elif not started: - if i >= after: - started = True - l.append(i) - else: - l.append(i) - else: - for i in gen: - if i >= before: - break - elif not started: - if i > after: - started = True - l.append(i) - else: - l.append(i) - return l - - -class rrule(rrulebase): - """ - That's the base of the rrule operation. It accepts all the keywords - defined in the RFC as its constructor parameters (except byday, - which was renamed to byweekday) and more. The constructor prototype is:: - - rrule(freq) - - Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, - or SECONDLY. - - .. note:: - Per RFC section 3.3.10, recurrence instances falling on invalid dates - and times are ignored rather than coerced: - - Recurrence rules may generate recurrence instances with an invalid - date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM - on a day where the local time is moved forward by an hour at 1:00 - AM). Such recurrence instances MUST be ignored and MUST NOT be - counted as part of the recurrence set. - - This can lead to possibly surprising behavior when, for example, the - start date occurs at the end of the month: - - >>> from dateutil.rrule import rrule, MONTHLY - >>> from datetime import datetime - >>> start_date = datetime(2014, 12, 31) - >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) - ... # doctest: +NORMALIZE_WHITESPACE - [datetime.datetime(2014, 12, 31, 0, 0), - datetime.datetime(2015, 1, 31, 0, 0), - datetime.datetime(2015, 3, 31, 0, 0), - datetime.datetime(2015, 5, 31, 0, 0)] - - Additionally, it supports the following keyword arguments: - - :param dtstart: - The recurrence start. Besides being the base for the recurrence, - missing parameters in the final recurrence instances will also be - extracted from this date. If not given, datetime.now() will be used - instead. - :param interval: - The interval between each freq iteration. For example, when using - YEARLY, an interval of 2 means once every two years, but with HOURLY, - it means once every two hours. The default interval is 1. - :param wkst: - The week start day. Must be one of the MO, TU, WE constants, or an - integer, specifying the first day of the week. This will affect - recurrences based on weekly periods. The default week start is got - from calendar.firstweekday(), and may be modified by - calendar.setfirstweekday(). - :param count: - If given, this determines how many occurrences will be generated. - - .. note:: - As of version 2.5.0, the use of the keyword ``until`` in conjunction - with ``count`` is deprecated, to make sure ``dateutil`` is fully - compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` - **must not** occur in the same call to ``rrule``. - :param until: - If given, this must be a datetime instance specifying the upper-bound - limit of the recurrence. The last recurrence in the rule is the greatest - datetime that is less than or equal to the value specified in the - ``until`` parameter. - - .. note:: - As of version 2.5.0, the use of the keyword ``until`` in conjunction - with ``count`` is deprecated, to make sure ``dateutil`` is fully - compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` - **must not** occur in the same call to ``rrule``. - :param bysetpos: - If given, it must be either an integer, or a sequence of integers, - positive or negative. Each given integer will specify an occurrence - number, corresponding to the nth occurrence of the rule inside the - frequency period. For example, a bysetpos of -1 if combined with a - MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will - result in the last work day of every month. - :param bymonth: - If given, it must be either an integer, or a sequence of integers, - meaning the months to apply the recurrence to. - :param bymonthday: - If given, it must be either an integer, or a sequence of integers, - meaning the month days to apply the recurrence to. - :param byyearday: - If given, it must be either an integer, or a sequence of integers, - meaning the year days to apply the recurrence to. - :param byeaster: - If given, it must be either an integer, or a sequence of integers, - positive or negative. Each integer will define an offset from the - Easter Sunday. Passing the offset 0 to byeaster will yield the Easter - Sunday itself. This is an extension to the RFC specification. - :param byweekno: - If given, it must be either an integer, or a sequence of integers, - meaning the week numbers to apply the recurrence to. Week numbers - have the meaning described in ISO8601, that is, the first week of - the year is that containing at least four days of the new year. - :param byweekday: - If given, it must be either an integer (0 == MO), a sequence of - integers, one of the weekday constants (MO, TU, etc), or a sequence - of these constants. When given, these variables will define the - weekdays where the recurrence will be applied. It's also possible to - use an argument n for the weekday instances, which will mean the nth - occurrence of this weekday in the period. For example, with MONTHLY, - or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the - first friday of the month where the recurrence happens. Notice that in - the RFC documentation, this is specified as BYDAY, but was renamed to - avoid the ambiguity of that keyword. - :param byhour: - If given, it must be either an integer, or a sequence of integers, - meaning the hours to apply the recurrence to. - :param byminute: - If given, it must be either an integer, or a sequence of integers, - meaning the minutes to apply the recurrence to. - :param bysecond: - If given, it must be either an integer, or a sequence of integers, - meaning the seconds to apply the recurrence to. - :param cache: - If given, it must be a boolean value specifying to enable or disable - caching of results. If you will use the same rrule instance multiple - times, enabling caching will improve the performance considerably. - """ - def __init__(self, freq, dtstart=None, - interval=1, wkst=None, count=None, until=None, bysetpos=None, - bymonth=None, bymonthday=None, byyearday=None, byeaster=None, - byweekno=None, byweekday=None, - byhour=None, byminute=None, bysecond=None, - cache=False): - super(rrule, self).__init__(cache) - global easter - if not dtstart: - if until and until.tzinfo: - dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) - else: - dtstart = datetime.datetime.now().replace(microsecond=0) - elif not isinstance(dtstart, datetime.datetime): - dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) - else: - dtstart = dtstart.replace(microsecond=0) - self._dtstart = dtstart - self._tzinfo = dtstart.tzinfo - self._freq = freq - self._interval = interval - self._count = count - - # Cache the original byxxx rules, if they are provided, as the _byxxx - # attributes do not necessarily map to the inputs, and this can be - # a problem in generating the strings. Only store things if they've - # been supplied (the string retrieval will just use .get()) - self._original_rule = {} - - if until and not isinstance(until, datetime.datetime): - until = datetime.datetime.fromordinal(until.toordinal()) - self._until = until - - if self._dtstart and self._until: - if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): - # According to RFC5545 Section 3.3.10: - # https://tools.ietf.org/html/rfc5545#section-3.3.10 - # - # > If the "DTSTART" property is specified as a date with UTC - # > time or a date with local time and time zone reference, - # > then the UNTIL rule part MUST be specified as a date with - # > UTC time. - raise ValueError( - 'RRULE UNTIL values must be specified in UTC when DTSTART ' - 'is timezone-aware' - ) - - if count is not None and until: - warn("Using both 'count' and 'until' is inconsistent with RFC 5545" - " and has been deprecated in dateutil. Future versions will " - "raise an error.", DeprecationWarning) - - if wkst is None: - self._wkst = calendar.firstweekday() - elif isinstance(wkst, integer_types): - self._wkst = wkst - else: - self._wkst = wkst.weekday - - if bysetpos is None: - self._bysetpos = None - elif isinstance(bysetpos, integer_types): - if bysetpos == 0 or not (-366 <= bysetpos <= 366): - raise ValueError("bysetpos must be between 1 and 366, " - "or between -366 and -1") - self._bysetpos = (bysetpos,) - else: - self._bysetpos = tuple(bysetpos) - for pos in self._bysetpos: - if pos == 0 or not (-366 <= pos <= 366): - raise ValueError("bysetpos must be between 1 and 366, " - "or between -366 and -1") - - if self._bysetpos: - self._original_rule['bysetpos'] = self._bysetpos - - if (byweekno is None and byyearday is None and bymonthday is None and - byweekday is None and byeaster is None): - if freq == YEARLY: - if bymonth is None: - bymonth = dtstart.month - self._original_rule['bymonth'] = None - bymonthday = dtstart.day - self._original_rule['bymonthday'] = None - elif freq == MONTHLY: - bymonthday = dtstart.day - self._original_rule['bymonthday'] = None - elif freq == WEEKLY: - byweekday = dtstart.weekday() - self._original_rule['byweekday'] = None - - # bymonth - if bymonth is None: - self._bymonth = None - else: - if isinstance(bymonth, integer_types): - bymonth = (bymonth,) - - self._bymonth = tuple(sorted(set(bymonth))) - - if 'bymonth' not in self._original_rule: - self._original_rule['bymonth'] = self._bymonth - - # byyearday - if byyearday is None: - self._byyearday = None - else: - if isinstance(byyearday, integer_types): - byyearday = (byyearday,) - - self._byyearday = tuple(sorted(set(byyearday))) - self._original_rule['byyearday'] = self._byyearday - - # byeaster - if byeaster is not None: - if not easter: - from dateutil import easter - if isinstance(byeaster, integer_types): - self._byeaster = (byeaster,) - else: - self._byeaster = tuple(sorted(byeaster)) - - self._original_rule['byeaster'] = self._byeaster - else: - self._byeaster = None - - # bymonthday - if bymonthday is None: - self._bymonthday = () - self._bynmonthday = () - else: - if isinstance(bymonthday, integer_types): - bymonthday = (bymonthday,) - - bymonthday = set(bymonthday) # Ensure it's unique - - self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) - self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) - - # Storing positive numbers first, then negative numbers - if 'bymonthday' not in self._original_rule: - self._original_rule['bymonthday'] = tuple( - itertools.chain(self._bymonthday, self._bynmonthday)) - - # byweekno - if byweekno is None: - self._byweekno = None - else: - if isinstance(byweekno, integer_types): - byweekno = (byweekno,) - - self._byweekno = tuple(sorted(set(byweekno))) - - self._original_rule['byweekno'] = self._byweekno - - # byweekday / bynweekday - if byweekday is None: - self._byweekday = None - self._bynweekday = None - else: - # If it's one of the valid non-sequence types, convert to a - # single-element sequence before the iterator that builds the - # byweekday set. - if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): - byweekday = (byweekday,) - - self._byweekday = set() - self._bynweekday = set() - for wday in byweekday: - if isinstance(wday, integer_types): - self._byweekday.add(wday) - elif not wday.n or freq > MONTHLY: - self._byweekday.add(wday.weekday) - else: - self._bynweekday.add((wday.weekday, wday.n)) - - if not self._byweekday: - self._byweekday = None - elif not self._bynweekday: - self._bynweekday = None - - if self._byweekday is not None: - self._byweekday = tuple(sorted(self._byweekday)) - orig_byweekday = [weekday(x) for x in self._byweekday] - else: - orig_byweekday = () - - if self._bynweekday is not None: - self._bynweekday = tuple(sorted(self._bynweekday)) - orig_bynweekday = [weekday(*x) for x in self._bynweekday] - else: - orig_bynweekday = () - - if 'byweekday' not in self._original_rule: - self._original_rule['byweekday'] = tuple(itertools.chain( - orig_byweekday, orig_bynweekday)) - - # byhour - if byhour is None: - if freq < HOURLY: - self._byhour = {dtstart.hour} - else: - self._byhour = None - else: - if isinstance(byhour, integer_types): - byhour = (byhour,) - - if freq == HOURLY: - self._byhour = self.__construct_byset(start=dtstart.hour, - byxxx=byhour, - base=24) - else: - self._byhour = set(byhour) - - self._byhour = tuple(sorted(self._byhour)) - self._original_rule['byhour'] = self._byhour - - # byminute - if byminute is None: - if freq < MINUTELY: - self._byminute = {dtstart.minute} - else: - self._byminute = None - else: - if isinstance(byminute, integer_types): - byminute = (byminute,) - - if freq == MINUTELY: - self._byminute = self.__construct_byset(start=dtstart.minute, - byxxx=byminute, - base=60) - else: - self._byminute = set(byminute) - - self._byminute = tuple(sorted(self._byminute)) - self._original_rule['byminute'] = self._byminute - - # bysecond - if bysecond is None: - if freq < SECONDLY: - self._bysecond = ((dtstart.second,)) - else: - self._bysecond = None - else: - if isinstance(bysecond, integer_types): - bysecond = (bysecond,) - - self._bysecond = set(bysecond) - - if freq == SECONDLY: - self._bysecond = self.__construct_byset(start=dtstart.second, - byxxx=bysecond, - base=60) - else: - self._bysecond = set(bysecond) - - self._bysecond = tuple(sorted(self._bysecond)) - self._original_rule['bysecond'] = self._bysecond - - if self._freq >= HOURLY: - self._timeset = None - else: - self._timeset = [] - for hour in self._byhour: - for minute in self._byminute: - for second in self._bysecond: - self._timeset.append( - datetime.time(hour, minute, second, - tzinfo=self._tzinfo)) - self._timeset.sort() - self._timeset = tuple(self._timeset) - - def __str__(self): - """ - Output a string that would generate this RRULE if passed to rrulestr. - This is mostly compatible with RFC5545, except for the - dateutil-specific extension BYEASTER. - """ - - output = [] - h, m, s = [None] * 3 - if self._dtstart: - output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) - h, m, s = self._dtstart.timetuple()[3:6] - - parts = ['FREQ=' + FREQNAMES[self._freq]] - if self._interval != 1: - parts.append('INTERVAL=' + str(self._interval)) - - if self._wkst: - parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) - - if self._count is not None: - parts.append('COUNT=' + str(self._count)) - - if self._until: - parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) - - if self._original_rule.get('byweekday') is not None: - # The str() method on weekday objects doesn't generate - # RFC5545-compliant strings, so we should modify that. - original_rule = dict(self._original_rule) - wday_strings = [] - for wday in original_rule['byweekday']: - if wday.n: - wday_strings.append('{n:+d}{wday}'.format( - n=wday.n, - wday=repr(wday)[0:2])) - else: - wday_strings.append(repr(wday)) - - original_rule['byweekday'] = wday_strings - else: - original_rule = self._original_rule - - partfmt = '{name}={vals}' - for name, key in [('BYSETPOS', 'bysetpos'), - ('BYMONTH', 'bymonth'), - ('BYMONTHDAY', 'bymonthday'), - ('BYYEARDAY', 'byyearday'), - ('BYWEEKNO', 'byweekno'), - ('BYDAY', 'byweekday'), - ('BYHOUR', 'byhour'), - ('BYMINUTE', 'byminute'), - ('BYSECOND', 'bysecond'), - ('BYEASTER', 'byeaster')]: - value = original_rule.get(key) - if value: - parts.append(partfmt.format(name=name, vals=(','.join(str(v) - for v in value)))) - - output.append('RRULE:' + ';'.join(parts)) - return '\n'.join(output) - - def replace(self, **kwargs): - """Return new rrule with same attributes except for those attributes given new - values by whichever keyword arguments are specified.""" - new_kwargs = {"interval": self._interval, - "count": self._count, - "dtstart": self._dtstart, - "freq": self._freq, - "until": self._until, - "wkst": self._wkst, - "cache": False if self._cache is None else True } - new_kwargs.update(self._original_rule) - new_kwargs.update(kwargs) - return rrule(**new_kwargs) - - def _iter(self): - year, month, day, hour, minute, second, weekday, yearday, _ = \ - self._dtstart.timetuple() - - # Some local variables to speed things up a bit - freq = self._freq - interval = self._interval - wkst = self._wkst - until = self._until - bymonth = self._bymonth - byweekno = self._byweekno - byyearday = self._byyearday - byweekday = self._byweekday - byeaster = self._byeaster - bymonthday = self._bymonthday - bynmonthday = self._bynmonthday - bysetpos = self._bysetpos - byhour = self._byhour - byminute = self._byminute - bysecond = self._bysecond - - ii = _iterinfo(self) - ii.rebuild(year, month) - - getdayset = {YEARLY: ii.ydayset, - MONTHLY: ii.mdayset, - WEEKLY: ii.wdayset, - DAILY: ii.ddayset, - HOURLY: ii.ddayset, - MINUTELY: ii.ddayset, - SECONDLY: ii.ddayset}[freq] - - if freq < HOURLY: - timeset = self._timeset - else: - gettimeset = {HOURLY: ii.htimeset, - MINUTELY: ii.mtimeset, - SECONDLY: ii.stimeset}[freq] - if ((freq >= HOURLY and - self._byhour and hour not in self._byhour) or - (freq >= MINUTELY and - self._byminute and minute not in self._byminute) or - (freq >= SECONDLY and - self._bysecond and second not in self._bysecond)): - timeset = () - else: - timeset = gettimeset(hour, minute, second) - - total = 0 - count = self._count - while True: - # Get dayset with the right frequency - dayset, start, end = getdayset(year, month, day) - - # Do the "hard" work ;-) - filtered = False - for i in dayset[start:end]: - if ((bymonth and ii.mmask[i] not in bymonth) or - (byweekno and not ii.wnomask[i]) or - (byweekday and ii.wdaymask[i] not in byweekday) or - (ii.nwdaymask and not ii.nwdaymask[i]) or - (byeaster and not ii.eastermask[i]) or - ((bymonthday or bynmonthday) and - ii.mdaymask[i] not in bymonthday and - ii.nmdaymask[i] not in bynmonthday) or - (byyearday and - ((i < ii.yearlen and i+1 not in byyearday and - -ii.yearlen+i not in byyearday) or - (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and - -ii.nextyearlen+i-ii.yearlen not in byyearday)))): - dayset[i] = None - filtered = True - - # Output results - if bysetpos and timeset: - poslist = [] - for pos in bysetpos: - if pos < 0: - daypos, timepos = divmod(pos, len(timeset)) - else: - daypos, timepos = divmod(pos-1, len(timeset)) - try: - i = [x for x in dayset[start:end] - if x is not None][daypos] - time = timeset[timepos] - except IndexError: - pass - else: - date = datetime.date.fromordinal(ii.yearordinal+i) - res = datetime.datetime.combine(date, time) - if res not in poslist: - poslist.append(res) - poslist.sort() - for res in poslist: - if until and res > until: - self._len = total - return - elif res >= self._dtstart: - if count is not None: - count -= 1 - if count < 0: - self._len = total - return - total += 1 - yield res - else: - for i in dayset[start:end]: - if i is not None: - date = datetime.date.fromordinal(ii.yearordinal + i) - for time in timeset: - res = datetime.datetime.combine(date, time) - if until and res > until: - self._len = total - return - elif res >= self._dtstart: - if count is not None: - count -= 1 - if count < 0: - self._len = total - return - - total += 1 - yield res - - # Handle frequency and interval - fixday = False - if freq == YEARLY: - year += interval - if year > datetime.MAXYEAR: - self._len = total - return - ii.rebuild(year, month) - elif freq == MONTHLY: - month += interval - if month > 12: - div, mod = divmod(month, 12) - month = mod - year += div - if month == 0: - month = 12 - year -= 1 - if year > datetime.MAXYEAR: - self._len = total - return - ii.rebuild(year, month) - elif freq == WEEKLY: - if wkst > weekday: - day += -(weekday+1+(6-wkst))+self._interval*7 - else: - day += -(weekday-wkst)+self._interval*7 - weekday = wkst - fixday = True - elif freq == DAILY: - day += interval - fixday = True - elif freq == HOURLY: - if filtered: - # Jump to one iteration before next day - hour += ((23-hour)//interval)*interval - - if byhour: - ndays, hour = self.__mod_distance(value=hour, - byxxx=self._byhour, - base=24) - else: - ndays, hour = divmod(hour+interval, 24) - - if ndays: - day += ndays - fixday = True - - timeset = gettimeset(hour, minute, second) - elif freq == MINUTELY: - if filtered: - # Jump to one iteration before next day - minute += ((1439-(hour*60+minute))//interval)*interval - - valid = False - rep_rate = (24*60) - for j in range(rep_rate // gcd(interval, rep_rate)): - if byminute: - nhours, minute = \ - self.__mod_distance(value=minute, - byxxx=self._byminute, - base=60) - else: - nhours, minute = divmod(minute+interval, 60) - - div, hour = divmod(hour+nhours, 24) - if div: - day += div - fixday = True - filtered = False - - if not byhour or hour in byhour: - valid = True - break - - if not valid: - raise ValueError('Invalid combination of interval and ' + - 'byhour resulting in empty rule.') - - timeset = gettimeset(hour, minute, second) - elif freq == SECONDLY: - if filtered: - # Jump to one iteration before next day - second += (((86399 - (hour * 3600 + minute * 60 + second)) - // interval) * interval) - - rep_rate = (24 * 3600) - valid = False - for j in range(0, rep_rate // gcd(interval, rep_rate)): - if bysecond: - nminutes, second = \ - self.__mod_distance(value=second, - byxxx=self._bysecond, - base=60) - else: - nminutes, second = divmod(second+interval, 60) - - div, minute = divmod(minute+nminutes, 60) - if div: - hour += div - div, hour = divmod(hour, 24) - if div: - day += div - fixday = True - - if ((not byhour or hour in byhour) and - (not byminute or minute in byminute) and - (not bysecond or second in bysecond)): - valid = True - break - - if not valid: - raise ValueError('Invalid combination of interval, ' + - 'byhour and byminute resulting in empty' + - ' rule.') - - timeset = gettimeset(hour, minute, second) - - if fixday and day > 28: - daysinmonth = calendar.monthrange(year, month)[1] - if day > daysinmonth: - while day > daysinmonth: - day -= daysinmonth - month += 1 - if month == 13: - month = 1 - year += 1 - if year > datetime.MAXYEAR: - self._len = total - return - daysinmonth = calendar.monthrange(year, month)[1] - ii.rebuild(year, month) - - def __construct_byset(self, start, byxxx, base): - """ - If a `BYXXX` sequence is passed to the constructor at the same level as - `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some - specifications which cannot be reached given some starting conditions. - - This occurs whenever the interval is not coprime with the base of a - given unit and the difference between the starting position and the - ending position is not coprime with the greatest common denominator - between the interval and the base. For example, with a FREQ of hourly - starting at 17:00 and an interval of 4, the only valid values for - BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not - coprime. - - :param start: - Specifies the starting position. - :param byxxx: - An iterable containing the list of allowed values. - :param base: - The largest allowable value for the specified frequency (e.g. - 24 hours, 60 minutes). - - This does not preserve the type of the iterable, returning a set, since - the values should be unique and the order is irrelevant, this will - speed up later lookups. - - In the event of an empty set, raises a :exception:`ValueError`, as this - results in an empty rrule. - """ - - cset = set() - - # Support a single byxxx value. - if isinstance(byxxx, integer_types): - byxxx = (byxxx, ) - - for num in byxxx: - i_gcd = gcd(self._interval, base) - # Use divmod rather than % because we need to wrap negative nums. - if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: - cset.add(num) - - if len(cset) == 0: - raise ValueError("Invalid rrule byxxx generates an empty set.") - - return cset - - def __mod_distance(self, value, byxxx, base): - """ - Calculates the next value in a sequence where the `FREQ` parameter is - specified along with a `BYXXX` parameter at the same "level" - (e.g. `HOURLY` specified with `BYHOUR`). - - :param value: - The old value of the component. - :param byxxx: - The `BYXXX` set, which should have been generated by - `rrule._construct_byset`, or something else which checks that a - valid rule is present. - :param base: - The largest allowable value for the specified frequency (e.g. - 24 hours, 60 minutes). - - If a valid value is not found after `base` iterations (the maximum - number before the sequence would start to repeat), this raises a - :exception:`ValueError`, as no valid values were found. - - This returns a tuple of `divmod(n*interval, base)`, where `n` is the - smallest number of `interval` repetitions until the next specified - value in `byxxx` is found. - """ - accumulator = 0 - for ii in range(1, base + 1): - # Using divmod() over % to account for negative intervals - div, value = divmod(value + self._interval, base) - accumulator += div - if value in byxxx: - return (accumulator, value) - - -class _iterinfo(object): - __slots__ = ["rrule", "lastyear", "lastmonth", - "yearlen", "nextyearlen", "yearordinal", "yearweekday", - "mmask", "mrange", "mdaymask", "nmdaymask", - "wdaymask", "wnomask", "nwdaymask", "eastermask"] - - def __init__(self, rrule): - for attr in self.__slots__: - setattr(self, attr, None) - self.rrule = rrule - - def rebuild(self, year, month): - # Every mask is 7 days longer to handle cross-year weekly periods. - rr = self.rrule - if year != self.lastyear: - self.yearlen = 365 + calendar.isleap(year) - self.nextyearlen = 365 + calendar.isleap(year + 1) - firstyday = datetime.date(year, 1, 1) - self.yearordinal = firstyday.toordinal() - self.yearweekday = firstyday.weekday() - - wday = datetime.date(year, 1, 1).weekday() - if self.yearlen == 365: - self.mmask = M365MASK - self.mdaymask = MDAY365MASK - self.nmdaymask = NMDAY365MASK - self.wdaymask = WDAYMASK[wday:] - self.mrange = M365RANGE - else: - self.mmask = M366MASK - self.mdaymask = MDAY366MASK - self.nmdaymask = NMDAY366MASK - self.wdaymask = WDAYMASK[wday:] - self.mrange = M366RANGE - - if not rr._byweekno: - self.wnomask = None - else: - self.wnomask = [0]*(self.yearlen+7) - # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) - no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 - if no1wkst >= 4: - no1wkst = 0 - # Number of days in the year, plus the days we got - # from last year. - wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 - else: - # Number of days in the year, minus the days we - # left in last year. - wyearlen = self.yearlen-no1wkst - div, mod = divmod(wyearlen, 7) - numweeks = div+mod//4 - for n in rr._byweekno: - if n < 0: - n += numweeks+1 - if not (0 < n <= numweeks): - continue - if n > 1: - i = no1wkst+(n-1)*7 - if no1wkst != firstwkst: - i -= 7-firstwkst - else: - i = no1wkst - for j in range(7): - self.wnomask[i] = 1 - i += 1 - if self.wdaymask[i] == rr._wkst: - break - if 1 in rr._byweekno: - # Check week number 1 of next year as well - # TODO: Check -numweeks for next year. - i = no1wkst+numweeks*7 - if no1wkst != firstwkst: - i -= 7-firstwkst - if i < self.yearlen: - # If week starts in next year, we - # don't care about it. - for j in range(7): - self.wnomask[i] = 1 - i += 1 - if self.wdaymask[i] == rr._wkst: - break - if no1wkst: - # Check last week number of last year as - # well. If no1wkst is 0, either the year - # started on week start, or week number 1 - # got days from last year, so there are no - # days from last year's last week number in - # this year. - if -1 not in rr._byweekno: - lyearweekday = datetime.date(year-1, 1, 1).weekday() - lno1wkst = (7-lyearweekday+rr._wkst) % 7 - lyearlen = 365+calendar.isleap(year-1) - if lno1wkst >= 4: - lno1wkst = 0 - lnumweeks = 52+(lyearlen + - (lyearweekday-rr._wkst) % 7) % 7//4 - else: - lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 - else: - lnumweeks = -1 - if lnumweeks in rr._byweekno: - for i in range(no1wkst): - self.wnomask[i] = 1 - - if (rr._bynweekday and (month != self.lastmonth or - year != self.lastyear)): - ranges = [] - if rr._freq == YEARLY: - if rr._bymonth: - for month in rr._bymonth: - ranges.append(self.mrange[month-1:month+1]) - else: - ranges = [(0, self.yearlen)] - elif rr._freq == MONTHLY: - ranges = [self.mrange[month-1:month+1]] - if ranges: - # Weekly frequency won't get here, so we may not - # care about cross-year weekly periods. - self.nwdaymask = [0]*self.yearlen - for first, last in ranges: - last -= 1 - for wday, n in rr._bynweekday: - if n < 0: - i = last+(n+1)*7 - i -= (self.wdaymask[i]-wday) % 7 - else: - i = first+(n-1)*7 - i += (7-self.wdaymask[i]+wday) % 7 - if first <= i <= last: - self.nwdaymask[i] = 1 - - if rr._byeaster: - self.eastermask = [0]*(self.yearlen+7) - eyday = easter.easter(year).toordinal()-self.yearordinal - for offset in rr._byeaster: - self.eastermask[eyday+offset] = 1 - - self.lastyear = year - self.lastmonth = month - - def ydayset(self, year, month, day): - return list(range(self.yearlen)), 0, self.yearlen - - def mdayset(self, year, month, day): - dset = [None]*self.yearlen - start, end = self.mrange[month-1:month+1] - for i in range(start, end): - dset[i] = i - return dset, start, end - - def wdayset(self, year, month, day): - # We need to handle cross-year weeks here. - dset = [None]*(self.yearlen+7) - i = datetime.date(year, month, day).toordinal()-self.yearordinal - start = i - for j in range(7): - dset[i] = i - i += 1 - # if (not (0 <= i < self.yearlen) or - # self.wdaymask[i] == self.rrule._wkst): - # This will cross the year boundary, if necessary. - if self.wdaymask[i] == self.rrule._wkst: - break - return dset, start, i - - def ddayset(self, year, month, day): - dset = [None] * self.yearlen - i = datetime.date(year, month, day).toordinal() - self.yearordinal - dset[i] = i - return dset, i, i + 1 - - def htimeset(self, hour, minute, second): - tset = [] - rr = self.rrule - for minute in rr._byminute: - for second in rr._bysecond: - tset.append(datetime.time(hour, minute, second, - tzinfo=rr._tzinfo)) - tset.sort() - return tset - - def mtimeset(self, hour, minute, second): - tset = [] - rr = self.rrule - for second in rr._bysecond: - tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) - tset.sort() - return tset - - def stimeset(self, hour, minute, second): - return (datetime.time(hour, minute, second, - tzinfo=self.rrule._tzinfo),) - - -class rruleset(rrulebase): - """ The rruleset type allows more complex recurrence setups, mixing - multiple rules, dates, exclusion rules, and exclusion dates. The type - constructor takes the following keyword arguments: - - :param cache: If True, caching of results will be enabled, improving - performance of multiple queries considerably. """ - - class _genitem(object): - def __init__(self, genlist, gen): - try: - self.dt = advance_iterator(gen) - genlist.append(self) - except StopIteration: - pass - self.genlist = genlist - self.gen = gen - - def __next__(self): - try: - self.dt = advance_iterator(self.gen) - except StopIteration: - if self.genlist[0] is self: - heapq.heappop(self.genlist) - else: - self.genlist.remove(self) - heapq.heapify(self.genlist) - - next = __next__ - - def __lt__(self, other): - return self.dt < other.dt - - def __gt__(self, other): - return self.dt > other.dt - - def __eq__(self, other): - return self.dt == other.dt - - def __ne__(self, other): - return self.dt != other.dt - - def __init__(self, cache=False): - super(rruleset, self).__init__(cache) - self._rrule = [] - self._rdate = [] - self._exrule = [] - self._exdate = [] - - @_invalidates_cache - def rrule(self, rrule): - """ Include the given :py:class:`rrule` instance in the recurrence set - generation. """ - self._rrule.append(rrule) - - @_invalidates_cache - def rdate(self, rdate): - """ Include the given :py:class:`datetime` instance in the recurrence - set generation. """ - self._rdate.append(rdate) - - @_invalidates_cache - def exrule(self, exrule): - """ Include the given rrule instance in the recurrence set exclusion - list. Dates which are part of the given recurrence rules will not - be generated, even if some inclusive rrule or rdate matches them. - """ - self._exrule.append(exrule) - - @_invalidates_cache - def exdate(self, exdate): - """ Include the given datetime instance in the recurrence set - exclusion list. Dates included that way will not be generated, - even if some inclusive rrule or rdate matches them. """ - self._exdate.append(exdate) - - def _iter(self): - rlist = [] - self._rdate.sort() - self._genitem(rlist, iter(self._rdate)) - for gen in [iter(x) for x in self._rrule]: - self._genitem(rlist, gen) - exlist = [] - self._exdate.sort() - self._genitem(exlist, iter(self._exdate)) - for gen in [iter(x) for x in self._exrule]: - self._genitem(exlist, gen) - lastdt = None - total = 0 - heapq.heapify(rlist) - heapq.heapify(exlist) - while rlist: - ritem = rlist[0] - if not lastdt or lastdt != ritem.dt: - while exlist and exlist[0] < ritem: - exitem = exlist[0] - advance_iterator(exitem) - if exlist and exlist[0] is exitem: - heapq.heapreplace(exlist, exitem) - if not exlist or ritem != exlist[0]: - total += 1 - yield ritem.dt - lastdt = ritem.dt - advance_iterator(ritem) - if rlist and rlist[0] is ritem: - heapq.heapreplace(rlist, ritem) - self._len = total - - - - -class _rrulestr(object): - """ Parses a string representation of a recurrence rule or set of - recurrence rules. - - :param s: - Required, a string defining one or more recurrence rules. - - :param dtstart: - If given, used as the default recurrence start if not specified in the - rule string. - - :param cache: - If set ``True`` caching of results will be enabled, improving - performance of multiple queries considerably. - - :param unfold: - If set ``True`` indicates that a rule string is split over more - than one line and should be joined before processing. - - :param forceset: - If set ``True`` forces a :class:`dateutil.rrule.rruleset` to - be returned. - - :param compatible: - If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a naive - :class:`datetime.datetime` object is returned. - - :param tzids: - If given, a callable or mapping used to retrieve a - :class:`datetime.tzinfo` from a string representation. - Defaults to :func:`dateutil.tz.gettz`. - - :param tzinfos: - Additional time zone names / aliases which may be present in a string - representation. See :func:`dateutil.parser.parse` for more - information. - - :return: - Returns a :class:`dateutil.rrule.rruleset` or - :class:`dateutil.rrule.rrule` - """ - - _freq_map = {"YEARLY": YEARLY, - "MONTHLY": MONTHLY, - "WEEKLY": WEEKLY, - "DAILY": DAILY, - "HOURLY": HOURLY, - "MINUTELY": MINUTELY, - "SECONDLY": SECONDLY} - - _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, - "FR": 4, "SA": 5, "SU": 6} - - def _handle_int(self, rrkwargs, name, value, **kwargs): - rrkwargs[name.lower()] = int(value) - - def _handle_int_list(self, rrkwargs, name, value, **kwargs): - rrkwargs[name.lower()] = [int(x) for x in value.split(',')] - - _handle_INTERVAL = _handle_int - _handle_COUNT = _handle_int - _handle_BYSETPOS = _handle_int_list - _handle_BYMONTH = _handle_int_list - _handle_BYMONTHDAY = _handle_int_list - _handle_BYYEARDAY = _handle_int_list - _handle_BYEASTER = _handle_int_list - _handle_BYWEEKNO = _handle_int_list - _handle_BYHOUR = _handle_int_list - _handle_BYMINUTE = _handle_int_list - _handle_BYSECOND = _handle_int_list - - def _handle_FREQ(self, rrkwargs, name, value, **kwargs): - rrkwargs["freq"] = self._freq_map[value] - - def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): - global parser - if not parser: - from dateutil import parser - try: - rrkwargs["until"] = parser.parse(value, - ignoretz=kwargs.get("ignoretz"), - tzinfos=kwargs.get("tzinfos")) - except ValueError: - raise ValueError("invalid until date") - - def _handle_WKST(self, rrkwargs, name, value, **kwargs): - rrkwargs["wkst"] = self._weekday_map[value] - - def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): - """ - Two ways to specify this: +1MO or MO(+1) - """ - l = [] - for wday in value.split(','): - if '(' in wday: - # If it's of the form TH(+1), etc. - splt = wday.split('(') - w = splt[0] - n = int(splt[1][:-1]) - elif len(wday): - # If it's of the form +1MO - for i in range(len(wday)): - if wday[i] not in '+-0123456789': - break - n = wday[:i] or None - w = wday[i:] - if n: - n = int(n) - else: - raise ValueError("Invalid (empty) BYDAY specification.") - - l.append(weekdays[self._weekday_map[w]](n)) - rrkwargs["byweekday"] = l - - _handle_BYDAY = _handle_BYWEEKDAY - - def _parse_rfc_rrule(self, line, - dtstart=None, - cache=False, - ignoretz=False, - tzinfos=None): - if line.find(':') != -1: - name, value = line.split(':') - if name != "RRULE": - raise ValueError("unknown parameter name") - else: - value = line - rrkwargs = {} - for pair in value.split(';'): - name, value = pair.split('=') - name = name.upper() - value = value.upper() - try: - getattr(self, "_handle_"+name)(rrkwargs, name, value, - ignoretz=ignoretz, - tzinfos=tzinfos) - except AttributeError: - raise ValueError("unknown parameter '%s'" % name) - except (KeyError, ValueError): - raise ValueError("invalid '%s': %s" % (name, value)) - return rrule(dtstart=dtstart, cache=cache, **rrkwargs) - - def _parse_date_value(self, date_value, parms, rule_tzids, - ignoretz, tzids, tzinfos): - global parser - if not parser: - from dateutil import parser - - datevals = [] - value_found = False - TZID = None - - for parm in parms: - if parm.startswith("TZID="): - try: - tzkey = rule_tzids[parm.split('TZID=')[-1]] - except KeyError: - continue - if tzids is None: - from . import tz - tzlookup = tz.gettz - elif callable(tzids): - tzlookup = tzids - else: - tzlookup = getattr(tzids, 'get', None) - if tzlookup is None: - msg = ('tzids must be a callable, mapping, or None, ' - 'not %s' % tzids) - raise ValueError(msg) - - TZID = tzlookup(tzkey) - continue - - # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found - # only once. - if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: - raise ValueError("unsupported parm: " + parm) - else: - if value_found: - msg = ("Duplicate value parameter found in: " + parm) - raise ValueError(msg) - value_found = True - - for datestr in date_value.split(','): - date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) - if TZID is not None: - if date.tzinfo is None: - date = date.replace(tzinfo=TZID) - else: - raise ValueError('DTSTART/EXDATE specifies multiple timezone') - datevals.append(date) - - return datevals - - def _parse_rfc(self, s, - dtstart=None, - cache=False, - unfold=False, - forceset=False, - compatible=False, - ignoretz=False, - tzids=None, - tzinfos=None): - global parser - if compatible: - forceset = True - unfold = True - - TZID_NAMES = dict(map( - lambda x: (x.upper(), x), - re.findall('TZID=(?P[^:]+):', s) - )) - s = s.upper() - if not s.strip(): - raise ValueError("empty string") - if unfold: - lines = s.splitlines() - i = 0 - while i < len(lines): - line = lines[i].rstrip() - if not line: - del lines[i] - elif i > 0 and line[0] == " ": - lines[i-1] += line[1:] - del lines[i] - else: - i += 1 - else: - lines = s.split() - if (not forceset and len(lines) == 1 and (s.find(':') == -1 or - s.startswith('RRULE:'))): - return self._parse_rfc_rrule(lines[0], cache=cache, - dtstart=dtstart, ignoretz=ignoretz, - tzinfos=tzinfos) - else: - rrulevals = [] - rdatevals = [] - exrulevals = [] - exdatevals = [] - for line in lines: - if not line: - continue - if line.find(':') == -1: - name = "RRULE" - value = line - else: - name, value = line.split(':', 1) - parms = name.split(';') - if not parms: - raise ValueError("empty property name") - name = parms[0] - parms = parms[1:] - if name == "RRULE": - for parm in parms: - raise ValueError("unsupported RRULE parm: "+parm) - rrulevals.append(value) - elif name == "RDATE": - for parm in parms: - if parm != "VALUE=DATE-TIME": - raise ValueError("unsupported RDATE parm: "+parm) - rdatevals.append(value) - elif name == "EXRULE": - for parm in parms: - raise ValueError("unsupported EXRULE parm: "+parm) - exrulevals.append(value) - elif name == "EXDATE": - exdatevals.extend( - self._parse_date_value(value, parms, - TZID_NAMES, ignoretz, - tzids, tzinfos) - ) - elif name == "DTSTART": - dtvals = self._parse_date_value(value, parms, TZID_NAMES, - ignoretz, tzids, tzinfos) - if len(dtvals) != 1: - raise ValueError("Multiple DTSTART values specified:" + - value) - dtstart = dtvals[0] - else: - raise ValueError("unsupported property: "+name) - if (forceset or len(rrulevals) > 1 or rdatevals - or exrulevals or exdatevals): - if not parser and (rdatevals or exdatevals): - from dateutil import parser - rset = rruleset(cache=cache) - for value in rrulevals: - rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in rdatevals: - for datestr in value.split(','): - rset.rdate(parser.parse(datestr, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in exrulevals: - rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in exdatevals: - rset.exdate(value) - if compatible and dtstart: - rset.rdate(dtstart) - return rset - else: - return self._parse_rfc_rrule(rrulevals[0], - dtstart=dtstart, - cache=cache, - ignoretz=ignoretz, - tzinfos=tzinfos) - - def __call__(self, s, **kwargs): - return self._parse_rfc(s, **kwargs) - - -rrulestr = _rrulestr() - -# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/tz/__init__.py b/venv/Lib/site-packages/dateutil/tz/__init__.py deleted file mode 100644 index af1352c..0000000 --- a/venv/Lib/site-packages/dateutil/tz/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -from .tz import * -from .tz import __doc__ - -__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", - "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", - "enfold", "datetime_ambiguous", "datetime_exists", - "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] - - -class DeprecatedTzFormatWarning(Warning): - """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index a80bb07..0000000 Binary files a/venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc deleted file mode 100644 index b15484f..0000000 Binary files a/venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc deleted file mode 100644 index 2305d08..0000000 Binary files a/venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc deleted file mode 100644 index 54bfdf3..0000000 Binary files a/venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc deleted file mode 100644 index 1ec6e9b..0000000 Binary files a/venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/tz/_common.py b/venv/Lib/site-packages/dateutil/tz/_common.py deleted file mode 100644 index e6ac118..0000000 --- a/venv/Lib/site-packages/dateutil/tz/_common.py +++ /dev/null @@ -1,419 +0,0 @@ -from six import PY2 - -from functools import wraps - -from datetime import datetime, timedelta, tzinfo - - -ZERO = timedelta(0) - -__all__ = ['tzname_in_python2', 'enfold'] - - -def tzname_in_python2(namefunc): - """Change unicode output into bytestrings in Python 2 - - tzname() API changed in Python 3. It used to return bytes, but was changed - to unicode strings - """ - if PY2: - @wraps(namefunc) - def adjust_encoding(*args, **kwargs): - name = namefunc(*args, **kwargs) - if name is not None: - name = name.encode() - - return name - - return adjust_encoding - else: - return namefunc - - -# The following is adapted from Alexander Belopolsky's tz library -# https://github.com/abalkin/tz -if hasattr(datetime, 'fold'): - # This is the pre-python 3.6 fold situation - def enfold(dt, fold=1): - """ - Provides a unified interface for assigning the ``fold`` attribute to - datetimes both before and after the implementation of PEP-495. - - :param fold: - The value for the ``fold`` attribute in the returned datetime. This - should be either 0 or 1. - - :return: - Returns an object for which ``getattr(dt, 'fold', 0)`` returns - ``fold`` for all versions of Python. In versions prior to - Python 3.6, this is a ``_DatetimeWithFold`` object, which is a - subclass of :py:class:`datetime.datetime` with the ``fold`` - attribute added, if ``fold`` is 1. - - .. versionadded:: 2.6.0 - """ - return dt.replace(fold=fold) - -else: - class _DatetimeWithFold(datetime): - """ - This is a class designed to provide a PEP 495-compliant interface for - Python versions before 3.6. It is used only for dates in a fold, so - the ``fold`` attribute is fixed at ``1``. - - .. versionadded:: 2.6.0 - """ - __slots__ = () - - def replace(self, *args, **kwargs): - """ - Return a datetime with the same attributes, except for those - attributes given new values by whichever keyword arguments are - specified. Note that tzinfo=None can be specified to create a naive - datetime from an aware datetime with no conversion of date and time - data. - - This is reimplemented in ``_DatetimeWithFold`` because pypy3 will - return a ``datetime.datetime`` even if ``fold`` is unchanged. - """ - argnames = ( - 'year', 'month', 'day', 'hour', 'minute', 'second', - 'microsecond', 'tzinfo' - ) - - for arg, argname in zip(args, argnames): - if argname in kwargs: - raise TypeError('Duplicate argument: {}'.format(argname)) - - kwargs[argname] = arg - - for argname in argnames: - if argname not in kwargs: - kwargs[argname] = getattr(self, argname) - - dt_class = self.__class__ if kwargs.get('fold', 1) else datetime - - return dt_class(**kwargs) - - @property - def fold(self): - return 1 - - def enfold(dt, fold=1): - """ - Provides a unified interface for assigning the ``fold`` attribute to - datetimes both before and after the implementation of PEP-495. - - :param fold: - The value for the ``fold`` attribute in the returned datetime. This - should be either 0 or 1. - - :return: - Returns an object for which ``getattr(dt, 'fold', 0)`` returns - ``fold`` for all versions of Python. In versions prior to - Python 3.6, this is a ``_DatetimeWithFold`` object, which is a - subclass of :py:class:`datetime.datetime` with the ``fold`` - attribute added, if ``fold`` is 1. - - .. versionadded:: 2.6.0 - """ - if getattr(dt, 'fold', 0) == fold: - return dt - - args = dt.timetuple()[:6] - args += (dt.microsecond, dt.tzinfo) - - if fold: - return _DatetimeWithFold(*args) - else: - return datetime(*args) - - -def _validate_fromutc_inputs(f): - """ - The CPython version of ``fromutc`` checks that the input is a ``datetime`` - object and that ``self`` is attached as its ``tzinfo``. - """ - @wraps(f) - def fromutc(self, dt): - if not isinstance(dt, datetime): - raise TypeError("fromutc() requires a datetime argument") - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - return f(self, dt) - - return fromutc - - -class _tzinfo(tzinfo): - """ - Base class for all ``dateutil`` ``tzinfo`` objects. - """ - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - - dt = dt.replace(tzinfo=self) - - wall_0 = enfold(dt, fold=0) - wall_1 = enfold(dt, fold=1) - - same_offset = wall_0.utcoffset() == wall_1.utcoffset() - same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) - - return same_dt and not same_offset - - def _fold_status(self, dt_utc, dt_wall): - """ - Determine the fold status of a "wall" datetime, given a representation - of the same datetime as a (naive) UTC datetime. This is calculated based - on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all - datetimes, and that this offset is the actual number of hours separating - ``dt_utc`` and ``dt_wall``. - - :param dt_utc: - Representation of the datetime as UTC - - :param dt_wall: - Representation of the datetime as "wall time". This parameter must - either have a `fold` attribute or have a fold-naive - :class:`datetime.tzinfo` attached, otherwise the calculation may - fail. - """ - if self.is_ambiguous(dt_wall): - delta_wall = dt_wall - dt_utc - _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) - else: - _fold = 0 - - return _fold - - def _fold(self, dt): - return getattr(dt, 'fold', 0) - - def _fromutc(self, dt): - """ - Given a timezone-aware datetime in a given timezone, calculates a - timezone-aware datetime in a new timezone. - - Since this is the one time that we *know* we have an unambiguous - datetime object, we take this opportunity to determine whether the - datetime is ambiguous and in a "fold" state (e.g. if it's the first - occurrence, chronologically, of the ambiguous datetime). - - :param dt: - A timezone-aware :class:`datetime.datetime` object. - """ - - # Re-implement the algorithm from Python's datetime.py - dtoff = dt.utcoffset() - if dtoff is None: - raise ValueError("fromutc() requires a non-None utcoffset() " - "result") - - # The original datetime.py code assumes that `dst()` defaults to - # zero during ambiguous times. PEP 495 inverts this presumption, so - # for pre-PEP 495 versions of python, we need to tweak the algorithm. - dtdst = dt.dst() - if dtdst is None: - raise ValueError("fromutc() requires a non-None dst() result") - delta = dtoff - dtdst - - dt += delta - # Set fold=1 so we can default to being in the fold for - # ambiguous dates. - dtdst = enfold(dt, fold=1).dst() - if dtdst is None: - raise ValueError("fromutc(): dt.dst gave inconsistent " - "results; cannot convert") - return dt + dtdst - - @_validate_fromutc_inputs - def fromutc(self, dt): - """ - Given a timezone-aware datetime in a given timezone, calculates a - timezone-aware datetime in a new timezone. - - Since this is the one time that we *know* we have an unambiguous - datetime object, we take this opportunity to determine whether the - datetime is ambiguous and in a "fold" state (e.g. if it's the first - occurrence, chronologically, of the ambiguous datetime). - - :param dt: - A timezone-aware :class:`datetime.datetime` object. - """ - dt_wall = self._fromutc(dt) - - # Calculate the fold status given the two datetimes. - _fold = self._fold_status(dt, dt_wall) - - # Set the default fold value for ambiguous dates - return enfold(dt_wall, fold=_fold) - - -class tzrangebase(_tzinfo): - """ - This is an abstract base class for time zones represented by an annual - transition into and out of DST. Child classes should implement the following - methods: - - * ``__init__(self, *args, **kwargs)`` - * ``transitions(self, year)`` - this is expected to return a tuple of - datetimes representing the DST on and off transitions in standard - time. - - A fully initialized ``tzrangebase`` subclass should also provide the - following attributes: - * ``hasdst``: Boolean whether or not the zone uses DST. - * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects - representing the respective UTC offsets. - * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short - abbreviations in DST and STD, respectively. - * ``_hasdst``: Whether or not the zone has DST. - - .. versionadded:: 2.6.0 - """ - def __init__(self): - raise NotImplementedError('tzrangebase is an abstract base class') - - def utcoffset(self, dt): - isdst = self._isdst(dt) - - if isdst is None: - return None - elif isdst: - return self._dst_offset - else: - return self._std_offset - - def dst(self, dt): - isdst = self._isdst(dt) - - if isdst is None: - return None - elif isdst: - return self._dst_base_offset - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - if self._isdst(dt): - return self._dst_abbr - else: - return self._std_abbr - - def fromutc(self, dt): - """ Given a datetime in UTC, return local time """ - if not isinstance(dt, datetime): - raise TypeError("fromutc() requires a datetime argument") - - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - # Get transitions - if there are none, fixed offset - transitions = self.transitions(dt.year) - if transitions is None: - return dt + self.utcoffset(dt) - - # Get the transition times in UTC - dston, dstoff = transitions - - dston -= self._std_offset - dstoff -= self._std_offset - - utc_transitions = (dston, dstoff) - dt_utc = dt.replace(tzinfo=None) - - isdst = self._naive_isdst(dt_utc, utc_transitions) - - if isdst: - dt_wall = dt + self._dst_offset - else: - dt_wall = dt + self._std_offset - - _fold = int(not isdst and self.is_ambiguous(dt_wall)) - - return enfold(dt_wall, fold=_fold) - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - if not self.hasdst: - return False - - start, end = self.transitions(dt.year) - - dt = dt.replace(tzinfo=None) - return (end <= dt < end + self._dst_base_offset) - - def _isdst(self, dt): - if not self.hasdst: - return False - elif dt is None: - return None - - transitions = self.transitions(dt.year) - - if transitions is None: - return False - - dt = dt.replace(tzinfo=None) - - isdst = self._naive_isdst(dt, transitions) - - # Handle ambiguous dates - if not isdst and self.is_ambiguous(dt): - return not self._fold(dt) - else: - return isdst - - def _naive_isdst(self, dt, transitions): - dston, dstoff = transitions - - dt = dt.replace(tzinfo=None) - - if dston < dstoff: - isdst = dston <= dt < dstoff - else: - isdst = not dstoff <= dt < dston - - return isdst - - @property - def _dst_base_offset(self): - return self._dst_offset - self._std_offset - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(...)" % self.__class__.__name__ - - __reduce__ = object.__reduce__ diff --git a/venv/Lib/site-packages/dateutil/tz/_factories.py b/venv/Lib/site-packages/dateutil/tz/_factories.py deleted file mode 100644 index f8a6589..0000000 --- a/venv/Lib/site-packages/dateutil/tz/_factories.py +++ /dev/null @@ -1,80 +0,0 @@ -from datetime import timedelta -import weakref -from collections import OrderedDict - -from six.moves import _thread - - -class _TzSingleton(type): - def __init__(cls, *args, **kwargs): - cls.__instance = None - super(_TzSingleton, cls).__init__(*args, **kwargs) - - def __call__(cls): - if cls.__instance is None: - cls.__instance = super(_TzSingleton, cls).__call__() - return cls.__instance - - -class _TzFactory(type): - def instance(cls, *args, **kwargs): - """Alternate constructor that returns a fresh instance""" - return type.__call__(cls, *args, **kwargs) - - -class _TzOffsetFactory(_TzFactory): - def __init__(cls, *args, **kwargs): - cls.__instances = weakref.WeakValueDictionary() - cls.__strong_cache = OrderedDict() - cls.__strong_cache_size = 8 - - cls._cache_lock = _thread.allocate_lock() - - def __call__(cls, name, offset): - if isinstance(offset, timedelta): - key = (name, offset.total_seconds()) - else: - key = (name, offset) - - instance = cls.__instances.get(key, None) - if instance is None: - instance = cls.__instances.setdefault(key, - cls.instance(name, offset)) - - # This lock may not be necessary in Python 3. See GH issue #901 - with cls._cache_lock: - cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) - - # Remove an item if the strong cache is overpopulated - if len(cls.__strong_cache) > cls.__strong_cache_size: - cls.__strong_cache.popitem(last=False) - - return instance - - -class _TzStrFactory(_TzFactory): - def __init__(cls, *args, **kwargs): - cls.__instances = weakref.WeakValueDictionary() - cls.__strong_cache = OrderedDict() - cls.__strong_cache_size = 8 - - cls.__cache_lock = _thread.allocate_lock() - - def __call__(cls, s, posix_offset=False): - key = (s, posix_offset) - instance = cls.__instances.get(key, None) - - if instance is None: - instance = cls.__instances.setdefault(key, - cls.instance(s, posix_offset)) - - # This lock may not be necessary in Python 3. See GH issue #901 - with cls.__cache_lock: - cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) - - # Remove an item if the strong cache is overpopulated - if len(cls.__strong_cache) > cls.__strong_cache_size: - cls.__strong_cache.popitem(last=False) - - return instance - diff --git a/venv/Lib/site-packages/dateutil/tz/tz.py b/venv/Lib/site-packages/dateutil/tz/tz.py deleted file mode 100644 index 6175914..0000000 --- a/venv/Lib/site-packages/dateutil/tz/tz.py +++ /dev/null @@ -1,1849 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers timezone implementations subclassing the abstract -:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format -files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, -etc), TZ environment string (in all known formats), given ranges (with help -from relative deltas), local machine timezone, fixed offset timezone, and UTC -timezone. -""" -import datetime -import struct -import time -import sys -import os -import bisect -import weakref -from collections import OrderedDict - -import six -from six import string_types -from six.moves import _thread -from ._common import tzname_in_python2, _tzinfo -from ._common import tzrangebase, enfold -from ._common import _validate_fromutc_inputs - -from ._factories import _TzSingleton, _TzOffsetFactory -from ._factories import _TzStrFactory -try: - from .win import tzwin, tzwinlocal -except ImportError: - tzwin = tzwinlocal = None - -# For warning about rounding tzinfo -from warnings import warn - -ZERO = datetime.timedelta(0) -EPOCH = datetime.datetime(1970, 1, 1, 0, 0) -EPOCHORDINAL = EPOCH.toordinal() - - -@six.add_metaclass(_TzSingleton) -class tzutc(datetime.tzinfo): - """ - This is a tzinfo object that represents the UTC time zone. - - **Examples:** - - .. doctest:: - - >>> from datetime import * - >>> from dateutil.tz import * - - >>> datetime.now() - datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) - - >>> datetime.now(tzutc()) - datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) - - >>> datetime.now(tzutc()).tzname() - 'UTC' - - .. versionchanged:: 2.7.0 - ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will - always return the same object. - - .. doctest:: - - >>> from dateutil.tz import tzutc, UTC - >>> tzutc() is tzutc() - True - >>> tzutc() is UTC - True - """ - def utcoffset(self, dt): - return ZERO - - def dst(self, dt): - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return "UTC" - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - return False - - @_validate_fromutc_inputs - def fromutc(self, dt): - """ - Fast track version of fromutc() returns the original ``dt`` object for - any valid :py:class:`datetime.datetime` object. - """ - return dt - - def __eq__(self, other): - if not isinstance(other, (tzutc, tzoffset)): - return NotImplemented - - return (isinstance(other, tzutc) or - (isinstance(other, tzoffset) and other._offset == ZERO)) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s()" % self.__class__.__name__ - - __reduce__ = object.__reduce__ - - -#: Convenience constant providing a :class:`tzutc()` instance -#: -#: .. versionadded:: 2.7.0 -UTC = tzutc() - - -@six.add_metaclass(_TzOffsetFactory) -class tzoffset(datetime.tzinfo): - """ - A simple class for representing a fixed offset from UTC. - - :param name: - The timezone name, to be returned when ``tzname()`` is called. - :param offset: - The time zone offset in seconds, or (since version 2.6.0, represented - as a :py:class:`datetime.timedelta` object). - """ - def __init__(self, name, offset): - self._name = name - - try: - # Allow a timedelta - offset = offset.total_seconds() - except (TypeError, AttributeError): - pass - - self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._name - - @_validate_fromutc_inputs - def fromutc(self, dt): - return dt + self._offset - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - return False - - def __eq__(self, other): - if not isinstance(other, tzoffset): - return NotImplemented - - return self._offset == other._offset - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(%s, %s)" % (self.__class__.__name__, - repr(self._name), - int(self._offset.total_seconds())) - - __reduce__ = object.__reduce__ - - -class tzlocal(_tzinfo): - """ - A :class:`tzinfo` subclass built around the ``time`` timezone functions. - """ - def __init__(self): - super(tzlocal, self).__init__() - - self._std_offset = datetime.timedelta(seconds=-time.timezone) - if time.daylight: - self._dst_offset = datetime.timedelta(seconds=-time.altzone) - else: - self._dst_offset = self._std_offset - - self._dst_saved = self._dst_offset - self._std_offset - self._hasdst = bool(self._dst_saved) - self._tznames = tuple(time.tzname) - - def utcoffset(self, dt): - if dt is None and self._hasdst: - return None - - if self._isdst(dt): - return self._dst_offset - else: - return self._std_offset - - def dst(self, dt): - if dt is None and self._hasdst: - return None - - if self._isdst(dt): - return self._dst_offset - self._std_offset - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._tznames[self._isdst(dt)] - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - naive_dst = self._naive_is_dst(dt) - return (not naive_dst and - (naive_dst != self._naive_is_dst(dt - self._dst_saved))) - - def _naive_is_dst(self, dt): - timestamp = _datetime_to_timestamp(dt) - return time.localtime(timestamp + time.timezone).tm_isdst - - def _isdst(self, dt, fold_naive=True): - # We can't use mktime here. It is unstable when deciding if - # the hour near to a change is DST or not. - # - # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, - # dt.minute, dt.second, dt.weekday(), 0, -1)) - # return time.localtime(timestamp).tm_isdst - # - # The code above yields the following result: - # - # >>> import tz, datetime - # >>> t = tz.tzlocal() - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRDT' - # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() - # 'BRST' - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRST' - # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() - # 'BRDT' - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRDT' - # - # Here is a more stable implementation: - # - if not self._hasdst: - return False - - # Check for ambiguous times: - dstval = self._naive_is_dst(dt) - fold = getattr(dt, 'fold', None) - - if self.is_ambiguous(dt): - if fold is not None: - return not self._fold(dt) - else: - return True - - return dstval - - def __eq__(self, other): - if isinstance(other, tzlocal): - return (self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset) - elif isinstance(other, tzutc): - return (not self._hasdst and - self._tznames[0] in {'UTC', 'GMT'} and - self._std_offset == ZERO) - elif isinstance(other, tzoffset): - return (not self._hasdst and - self._tznames[0] == other._name and - self._std_offset == other._offset) - else: - return NotImplemented - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s()" % self.__class__.__name__ - - __reduce__ = object.__reduce__ - - -class _ttinfo(object): - __slots__ = ["offset", "delta", "isdst", "abbr", - "isstd", "isgmt", "dstoffset"] - - def __init__(self): - for attr in self.__slots__: - setattr(self, attr, None) - - def __repr__(self): - l = [] - for attr in self.__slots__: - value = getattr(self, attr) - if value is not None: - l.append("%s=%s" % (attr, repr(value))) - return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) - - def __eq__(self, other): - if not isinstance(other, _ttinfo): - return NotImplemented - - return (self.offset == other.offset and - self.delta == other.delta and - self.isdst == other.isdst and - self.abbr == other.abbr and - self.isstd == other.isstd and - self.isgmt == other.isgmt and - self.dstoffset == other.dstoffset) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __getstate__(self): - state = {} - for name in self.__slots__: - state[name] = getattr(self, name, None) - return state - - def __setstate__(self, state): - for name in self.__slots__: - if name in state: - setattr(self, name, state[name]) - - -class _tzfile(object): - """ - Lightweight class for holding the relevant transition and time zone - information read from binary tzfiles. - """ - attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', - 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] - - def __init__(self, **kwargs): - for attr in self.attrs: - setattr(self, attr, kwargs.get(attr, None)) - - -class tzfile(_tzinfo): - """ - This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)`` - format timezone files to extract current and historical zone information. - - :param fileobj: - This can be an opened file stream or a file name that the time zone - information can be read from. - - :param filename: - This is an optional parameter specifying the source of the time zone - information in the event that ``fileobj`` is a file object. If omitted - and ``fileobj`` is a file stream, this parameter will be set either to - ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. - - See `Sources for Time Zone and Daylight Saving Time Data - `_ for more information. - Time zone files can be compiled from the `IANA Time Zone database files - `_ with the `zic time zone compiler - `_ - - .. note:: - - Only construct a ``tzfile`` directly if you have a specific timezone - file on disk that you want to read into a Python ``tzinfo`` object. - If you want to get a ``tzfile`` representing a specific IANA zone, - (e.g. ``'America/New_York'``), you should call - :func:`dateutil.tz.gettz` with the zone identifier. - - - **Examples:** - - Using the US Eastern time zone as an example, we can see that a ``tzfile`` - provides time zone information for the standard Daylight Saving offsets: - - .. testsetup:: tzfile - - from dateutil.tz import gettz - from datetime import datetime - - .. doctest:: tzfile - - >>> NYC = gettz('America/New_York') - >>> NYC - tzfile('/usr/share/zoneinfo/America/New_York') - - >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST - 2016-01-03 00:00:00-05:00 - - >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT - 2016-07-07 00:00:00-04:00 - - - The ``tzfile`` structure contains a fully history of the time zone, - so historical dates will also have the right offsets. For example, before - the adoption of the UTC standards, New York used local solar mean time: - - .. doctest:: tzfile - - >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT - 1901-04-12 00:00:00-04:56 - - And during World War II, New York was on "Eastern War Time", which was a - state of permanent daylight saving time: - - .. doctest:: tzfile - - >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT - 1944-02-07 00:00:00-04:00 - - """ - - def __init__(self, fileobj, filename=None): - super(tzfile, self).__init__() - - file_opened_here = False - if isinstance(fileobj, string_types): - self._filename = fileobj - fileobj = open(fileobj, 'rb') - file_opened_here = True - elif filename is not None: - self._filename = filename - elif hasattr(fileobj, "name"): - self._filename = fileobj.name - else: - self._filename = repr(fileobj) - - if fileobj is not None: - if not file_opened_here: - fileobj = _nullcontext(fileobj) - - with fileobj as file_stream: - tzobj = self._read_tzfile(file_stream) - - self._set_tzdata(tzobj) - - def _set_tzdata(self, tzobj): - """ Set the time zone data of this object from a _tzfile object """ - # Copy the relevant attributes over as private attributes - for attr in _tzfile.attrs: - setattr(self, '_' + attr, getattr(tzobj, attr)) - - def _read_tzfile(self, fileobj): - out = _tzfile() - - # From tzfile(5): - # - # The time zone information files used by tzset(3) - # begin with the magic characters "TZif" to identify - # them as time zone information files, followed by - # sixteen bytes reserved for future use, followed by - # six four-byte values of type long, written in a - # ``standard'' byte order (the high-order byte - # of the value is written first). - if fileobj.read(4).decode() != "TZif": - raise ValueError("magic not found") - - fileobj.read(16) - - ( - # The number of UTC/local indicators stored in the file. - ttisgmtcnt, - - # The number of standard/wall indicators stored in the file. - ttisstdcnt, - - # The number of leap seconds for which data is - # stored in the file. - leapcnt, - - # The number of "transition times" for which data - # is stored in the file. - timecnt, - - # The number of "local time types" for which data - # is stored in the file (must not be zero). - typecnt, - - # The number of characters of "time zone - # abbreviation strings" stored in the file. - charcnt, - - ) = struct.unpack(">6l", fileobj.read(24)) - - # The above header is followed by tzh_timecnt four-byte - # values of type long, sorted in ascending order. - # These values are written in ``standard'' byte order. - # Each is used as a transition time (as returned by - # time(2)) at which the rules for computing local time - # change. - - if timecnt: - out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, - fileobj.read(timecnt*4))) - else: - out.trans_list_utc = [] - - # Next come tzh_timecnt one-byte values of type unsigned - # char; each one tells which of the different types of - # ``local time'' types described in the file is associated - # with the same-indexed transition time. These values - # serve as indices into an array of ttinfo structures that - # appears next in the file. - - if timecnt: - out.trans_idx = struct.unpack(">%dB" % timecnt, - fileobj.read(timecnt)) - else: - out.trans_idx = [] - - # Each ttinfo structure is written as a four-byte value - # for tt_gmtoff of type long, in a standard byte - # order, followed by a one-byte value for tt_isdst - # and a one-byte value for tt_abbrind. In each - # structure, tt_gmtoff gives the number of - # seconds to be added to UTC, tt_isdst tells whether - # tm_isdst should be set by localtime(3), and - # tt_abbrind serves as an index into the array of - # time zone abbreviation characters that follow the - # ttinfo structure(s) in the file. - - ttinfo = [] - - for i in range(typecnt): - ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) - - abbr = fileobj.read(charcnt).decode() - - # Then there are tzh_leapcnt pairs of four-byte - # values, written in standard byte order; the - # first value of each pair gives the time (as - # returned by time(2)) at which a leap second - # occurs; the second gives the total number of - # leap seconds to be applied after the given time. - # The pairs of values are sorted in ascending order - # by time. - - # Not used, for now (but seek for correct file position) - if leapcnt: - fileobj.seek(leapcnt * 8, os.SEEK_CUR) - - # Then there are tzh_ttisstdcnt standard/wall - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as standard - # time or wall clock time, and are used when - # a time zone file is used in handling POSIX-style - # time zone environment variables. - - if ttisstdcnt: - isstd = struct.unpack(">%db" % ttisstdcnt, - fileobj.read(ttisstdcnt)) - - # Finally, there are tzh_ttisgmtcnt UTC/local - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as UTC or - # local time, and are used when a time zone file - # is used in handling POSIX-style time zone envi- - # ronment variables. - - if ttisgmtcnt: - isgmt = struct.unpack(">%db" % ttisgmtcnt, - fileobj.read(ttisgmtcnt)) - - # Build ttinfo list - out.ttinfo_list = [] - for i in range(typecnt): - gmtoff, isdst, abbrind = ttinfo[i] - gmtoff = _get_supported_offset(gmtoff) - tti = _ttinfo() - tti.offset = gmtoff - tti.dstoffset = datetime.timedelta(0) - tti.delta = datetime.timedelta(seconds=gmtoff) - tti.isdst = isdst - tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] - tti.isstd = (ttisstdcnt > i and isstd[i] != 0) - tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) - out.ttinfo_list.append(tti) - - # Replace ttinfo indexes for ttinfo objects. - out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] - - # Set standard, dst, and before ttinfos. before will be - # used when a given time is before any transitions, - # and will be set to the first non-dst ttinfo, or to - # the first dst, if all of them are dst. - out.ttinfo_std = None - out.ttinfo_dst = None - out.ttinfo_before = None - if out.ttinfo_list: - if not out.trans_list_utc: - out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] - else: - for i in range(timecnt-1, -1, -1): - tti = out.trans_idx[i] - if not out.ttinfo_std and not tti.isdst: - out.ttinfo_std = tti - elif not out.ttinfo_dst and tti.isdst: - out.ttinfo_dst = tti - - if out.ttinfo_std and out.ttinfo_dst: - break - else: - if out.ttinfo_dst and not out.ttinfo_std: - out.ttinfo_std = out.ttinfo_dst - - for tti in out.ttinfo_list: - if not tti.isdst: - out.ttinfo_before = tti - break - else: - out.ttinfo_before = out.ttinfo_list[0] - - # Now fix transition times to become relative to wall time. - # - # I'm not sure about this. In my tests, the tz source file - # is setup to wall time, and in the binary file isstd and - # isgmt are off, so it should be in wall time. OTOH, it's - # always in gmt time. Let me know if you have comments - # about this. - lastdst = None - lastoffset = None - lastdstoffset = None - lastbaseoffset = None - out.trans_list = [] - - for i, tti in enumerate(out.trans_idx): - offset = tti.offset - dstoffset = 0 - - if lastdst is not None: - if tti.isdst: - if not lastdst: - dstoffset = offset - lastoffset - - if not dstoffset and lastdstoffset: - dstoffset = lastdstoffset - - tti.dstoffset = datetime.timedelta(seconds=dstoffset) - lastdstoffset = dstoffset - - # If a time zone changes its base offset during a DST transition, - # then you need to adjust by the previous base offset to get the - # transition time in local time. Otherwise you use the current - # base offset. Ideally, I would have some mathematical proof of - # why this is true, but I haven't really thought about it enough. - baseoffset = offset - dstoffset - adjustment = baseoffset - if (lastbaseoffset is not None and baseoffset != lastbaseoffset - and tti.isdst != lastdst): - # The base DST has changed - adjustment = lastbaseoffset - - lastdst = tti.isdst - lastoffset = offset - lastbaseoffset = baseoffset - - out.trans_list.append(out.trans_list_utc[i] + adjustment) - - out.trans_idx = tuple(out.trans_idx) - out.trans_list = tuple(out.trans_list) - out.trans_list_utc = tuple(out.trans_list_utc) - - return out - - def _find_last_transition(self, dt, in_utc=False): - # If there's no list, there are no transitions to find - if not self._trans_list: - return None - - timestamp = _datetime_to_timestamp(dt) - - # Find where the timestamp fits in the transition list - if the - # timestamp is a transition time, it's part of the "after" period. - trans_list = self._trans_list_utc if in_utc else self._trans_list - idx = bisect.bisect_right(trans_list, timestamp) - - # We want to know when the previous transition was, so subtract off 1 - return idx - 1 - - def _get_ttinfo(self, idx): - # For no list or after the last transition, default to _ttinfo_std - if idx is None or (idx + 1) >= len(self._trans_list): - return self._ttinfo_std - - # If there is a list and the time is before it, return _ttinfo_before - if idx < 0: - return self._ttinfo_before - - return self._trans_idx[idx] - - def _find_ttinfo(self, dt): - idx = self._resolve_ambiguous_time(dt) - - return self._get_ttinfo(idx) - - def fromutc(self, dt): - """ - The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. - - :param dt: - A :py:class:`datetime.datetime` object. - - :raises TypeError: - Raised if ``dt`` is not a :py:class:`datetime.datetime` object. - - :raises ValueError: - Raised if this is called with a ``dt`` which does not have this - ``tzinfo`` attached. - - :return: - Returns a :py:class:`datetime.datetime` object representing the - wall time in ``self``'s time zone. - """ - # These isinstance checks are in datetime.tzinfo, so we'll preserve - # them, even if we don't care about duck typing. - if not isinstance(dt, datetime.datetime): - raise TypeError("fromutc() requires a datetime argument") - - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - # First treat UTC as wall time and get the transition we're in. - idx = self._find_last_transition(dt, in_utc=True) - tti = self._get_ttinfo(idx) - - dt_out = dt + datetime.timedelta(seconds=tti.offset) - - fold = self.is_ambiguous(dt_out, idx=idx) - - return enfold(dt_out, fold=int(fold)) - - def is_ambiguous(self, dt, idx=None): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - if idx is None: - idx = self._find_last_transition(dt) - - # Calculate the difference in offsets from current to previous - timestamp = _datetime_to_timestamp(dt) - tti = self._get_ttinfo(idx) - - if idx is None or idx <= 0: - return False - - od = self._get_ttinfo(idx - 1).offset - tti.offset - tt = self._trans_list[idx] # Transition time - - return timestamp < tt + od - - def _resolve_ambiguous_time(self, dt): - idx = self._find_last_transition(dt) - - # If we have no transitions, return the index - _fold = self._fold(dt) - if idx is None or idx == 0: - return idx - - # If it's ambiguous and we're in a fold, shift to a different index. - idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) - - return idx - idx_offset - - def utcoffset(self, dt): - if dt is None: - return None - - if not self._ttinfo_std: - return ZERO - - return self._find_ttinfo(dt).delta - - def dst(self, dt): - if dt is None: - return None - - if not self._ttinfo_dst: - return ZERO - - tti = self._find_ttinfo(dt) - - if not tti.isdst: - return ZERO - - # The documentation says that utcoffset()-dst() must - # be constant for every dt. - return tti.dstoffset - - @tzname_in_python2 - def tzname(self, dt): - if not self._ttinfo_std or dt is None: - return None - return self._find_ttinfo(dt).abbr - - def __eq__(self, other): - if not isinstance(other, tzfile): - return NotImplemented - return (self._trans_list == other._trans_list and - self._trans_idx == other._trans_idx and - self._ttinfo_list == other._ttinfo_list) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) - - def __reduce__(self): - return self.__reduce_ex__(None) - - def __reduce_ex__(self, protocol): - return (self.__class__, (None, self._filename), self.__dict__) - - -class tzrange(tzrangebase): - """ - The ``tzrange`` object is a time zone specified by a set of offsets and - abbreviations, equivalent to the way the ``TZ`` variable can be specified - in POSIX-like systems, but using Python delta objects to specify DST - start, end and offsets. - - :param stdabbr: - The abbreviation for standard time (e.g. ``'EST'``). - - :param stdoffset: - An integer or :class:`datetime.timedelta` object or equivalent - specifying the base offset from UTC. - - If unspecified, +00:00 is used. - - :param dstabbr: - The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). - - If specified, with no other DST information, DST is assumed to occur - and the default behavior or ``dstoffset``, ``start`` and ``end`` is - used. If unspecified and no other DST information is specified, it - is assumed that this zone has no DST. - - If this is unspecified and other DST information is *is* specified, - DST occurs in the zone but the time zone abbreviation is left - unchanged. - - :param dstoffset: - A an integer or :class:`datetime.timedelta` object or equivalent - specifying the UTC offset during DST. If unspecified and any other DST - information is specified, it is assumed to be the STD offset +1 hour. - - :param start: - A :class:`relativedelta.relativedelta` object or equivalent specifying - the time and time of year that daylight savings time starts. To - specify, for example, that DST starts at 2AM on the 2nd Sunday in - March, pass: - - ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` - - If unspecified and any other DST information is specified, the default - value is 2 AM on the first Sunday in April. - - :param end: - A :class:`relativedelta.relativedelta` object or equivalent - representing the time and time of year that daylight savings time - ends, with the same specification method as in ``start``. One note is - that this should point to the first time in the *standard* zone, so if - a transition occurs at 2AM in the DST zone and the clocks are set back - 1 hour to 1AM, set the ``hours`` parameter to +1. - - - **Examples:** - - .. testsetup:: tzrange - - from dateutil.tz import tzrange, tzstr - - .. doctest:: tzrange - - >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") - True - - >>> from dateutil.relativedelta import * - >>> range1 = tzrange("EST", -18000, "EDT") - >>> range2 = tzrange("EST", -18000, "EDT", -14400, - ... relativedelta(hours=+2, month=4, day=1, - ... weekday=SU(+1)), - ... relativedelta(hours=+1, month=10, day=31, - ... weekday=SU(-1))) - >>> tzstr('EST5EDT') == range1 == range2 - True - - """ - def __init__(self, stdabbr, stdoffset=None, - dstabbr=None, dstoffset=None, - start=None, end=None): - - global relativedelta - from dateutil import relativedelta - - self._std_abbr = stdabbr - self._dst_abbr = dstabbr - - try: - stdoffset = stdoffset.total_seconds() - except (TypeError, AttributeError): - pass - - try: - dstoffset = dstoffset.total_seconds() - except (TypeError, AttributeError): - pass - - if stdoffset is not None: - self._std_offset = datetime.timedelta(seconds=stdoffset) - else: - self._std_offset = ZERO - - if dstoffset is not None: - self._dst_offset = datetime.timedelta(seconds=dstoffset) - elif dstabbr and stdoffset is not None: - self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) - else: - self._dst_offset = ZERO - - if dstabbr and start is None: - self._start_delta = relativedelta.relativedelta( - hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) - else: - self._start_delta = start - - if dstabbr and end is None: - self._end_delta = relativedelta.relativedelta( - hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) - else: - self._end_delta = end - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = bool(self._start_delta) - - def transitions(self, year): - """ - For a given year, get the DST on and off transition times, expressed - always on the standard time side. For zones with no transitions, this - function returns ``None``. - - :param year: - The year whose transitions you would like to query. - - :return: - Returns a :class:`tuple` of :class:`datetime.datetime` objects, - ``(dston, dstoff)`` for zones with an annual DST transition, or - ``None`` for fixed offset zones. - """ - if not self.hasdst: - return None - - base_year = datetime.datetime(year, 1, 1) - - start = base_year + self._start_delta - end = base_year + self._end_delta - - return (start, end) - - def __eq__(self, other): - if not isinstance(other, tzrange): - return NotImplemented - - return (self._std_abbr == other._std_abbr and - self._dst_abbr == other._dst_abbr and - self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset and - self._start_delta == other._start_delta and - self._end_delta == other._end_delta) - - @property - def _dst_base_offset(self): - return self._dst_base_offset_ - - -@six.add_metaclass(_TzStrFactory) -class tzstr(tzrange): - """ - ``tzstr`` objects are time zone objects specified by a time-zone string as - it would be passed to a ``TZ`` variable on POSIX-style systems (see - the `GNU C Library: TZ Variable`_ for more details). - - There is one notable exception, which is that POSIX-style time zones use an - inverted offset format, so normally ``GMT+3`` would be parsed as an offset - 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an - offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX - behavior, pass a ``True`` value to ``posix_offset``. - - The :class:`tzrange` object provides the same functionality, but is - specified using :class:`relativedelta.relativedelta` objects. rather than - strings. - - :param s: - A time zone string in ``TZ`` variable format. This can be a - :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: - :class:`unicode`) or a stream emitting unicode characters - (e.g. :class:`StringIO`). - - :param posix_offset: - Optional. If set to ``True``, interpret strings such as ``GMT+3`` or - ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the - POSIX standard. - - .. caution:: - - Prior to version 2.7.0, this function also supported time zones - in the format: - - * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` - * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` - - This format is non-standard and has been deprecated; this function - will raise a :class:`DeprecatedTZFormatWarning` until - support is removed in a future version. - - .. _`GNU C Library: TZ Variable`: - https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html - """ - def __init__(self, s, posix_offset=False): - global parser - from dateutil.parser import _parser as parser - - self._s = s - - res = parser._parsetz(s) - if res is None or res.any_unused_tokens: - raise ValueError("unknown string format") - - # Here we break the compatibility with the TZ variable handling. - # GMT-3 actually *means* the timezone -3. - if res.stdabbr in ("GMT", "UTC") and not posix_offset: - res.stdoffset *= -1 - - # We must initialize it first, since _delta() needs - # _std_offset and _dst_offset set. Use False in start/end - # to avoid building it two times. - tzrange.__init__(self, res.stdabbr, res.stdoffset, - res.dstabbr, res.dstoffset, - start=False, end=False) - - if not res.dstabbr: - self._start_delta = None - self._end_delta = None - else: - self._start_delta = self._delta(res.start) - if self._start_delta: - self._end_delta = self._delta(res.end, isend=1) - - self.hasdst = bool(self._start_delta) - - def _delta(self, x, isend=0): - from dateutil import relativedelta - kwargs = {} - if x.month is not None: - kwargs["month"] = x.month - if x.weekday is not None: - kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) - if x.week > 0: - kwargs["day"] = 1 - else: - kwargs["day"] = 31 - elif x.day: - kwargs["day"] = x.day - elif x.yday is not None: - kwargs["yearday"] = x.yday - elif x.jyday is not None: - kwargs["nlyearday"] = x.jyday - if not kwargs: - # Default is to start on first sunday of april, and end - # on last sunday of october. - if not isend: - kwargs["month"] = 4 - kwargs["day"] = 1 - kwargs["weekday"] = relativedelta.SU(+1) - else: - kwargs["month"] = 10 - kwargs["day"] = 31 - kwargs["weekday"] = relativedelta.SU(-1) - if x.time is not None: - kwargs["seconds"] = x.time - else: - # Default is 2AM. - kwargs["seconds"] = 7200 - if isend: - # Convert to standard time, to follow the documented way - # of working with the extra hour. See the documentation - # of the tzinfo class. - delta = self._dst_offset - self._std_offset - kwargs["seconds"] -= delta.seconds + delta.days * 86400 - return relativedelta.relativedelta(**kwargs) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._s)) - - -class _tzicalvtzcomp(object): - def __init__(self, tzoffsetfrom, tzoffsetto, isdst, - tzname=None, rrule=None): - self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) - self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) - self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom - self.isdst = isdst - self.tzname = tzname - self.rrule = rrule - - -class _tzicalvtz(_tzinfo): - def __init__(self, tzid, comps=[]): - super(_tzicalvtz, self).__init__() - - self._tzid = tzid - self._comps = comps - self._cachedate = [] - self._cachecomp = [] - self._cache_lock = _thread.allocate_lock() - - def _find_comp(self, dt): - if len(self._comps) == 1: - return self._comps[0] - - dt = dt.replace(tzinfo=None) - - try: - with self._cache_lock: - return self._cachecomp[self._cachedate.index( - (dt, self._fold(dt)))] - except ValueError: - pass - - lastcompdt = None - lastcomp = None - - for comp in self._comps: - compdt = self._find_compdt(comp, dt) - - if compdt and (not lastcompdt or lastcompdt < compdt): - lastcompdt = compdt - lastcomp = comp - - if not lastcomp: - # RFC says nothing about what to do when a given - # time is before the first onset date. We'll look for the - # first standard component, or the first component, if - # none is found. - for comp in self._comps: - if not comp.isdst: - lastcomp = comp - break - else: - lastcomp = comp[0] - - with self._cache_lock: - self._cachedate.insert(0, (dt, self._fold(dt))) - self._cachecomp.insert(0, lastcomp) - - if len(self._cachedate) > 10: - self._cachedate.pop() - self._cachecomp.pop() - - return lastcomp - - def _find_compdt(self, comp, dt): - if comp.tzoffsetdiff < ZERO and self._fold(dt): - dt -= comp.tzoffsetdiff - - compdt = comp.rrule.before(dt, inc=True) - - return compdt - - def utcoffset(self, dt): - if dt is None: - return None - - return self._find_comp(dt).tzoffsetto - - def dst(self, dt): - comp = self._find_comp(dt) - if comp.isdst: - return comp.tzoffsetdiff - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._find_comp(dt).tzname - - def __repr__(self): - return "" % repr(self._tzid) - - __reduce__ = object.__reduce__ - - -class tzical(object): - """ - This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure - as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. - - :param `fileobj`: - A file or stream in iCalendar format, which should be UTF-8 encoded - with CRLF endings. - - .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 - """ - def __init__(self, fileobj): - global rrule - from dateutil import rrule - - if isinstance(fileobj, string_types): - self._s = fileobj - # ical should be encoded in UTF-8 with CRLF - fileobj = open(fileobj, 'r') - else: - self._s = getattr(fileobj, 'name', repr(fileobj)) - fileobj = _nullcontext(fileobj) - - self._vtz = {} - - with fileobj as fobj: - self._parse_rfc(fobj.read()) - - def keys(self): - """ - Retrieves the available time zones as a list. - """ - return list(self._vtz.keys()) - - def get(self, tzid=None): - """ - Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. - - :param tzid: - If there is exactly one time zone available, omitting ``tzid`` - or passing :py:const:`None` value returns it. Otherwise a valid - key (which can be retrieved from :func:`keys`) is required. - - :raises ValueError: - Raised if ``tzid`` is not specified but there are either more - or fewer than 1 zone defined. - - :returns: - Returns either a :py:class:`datetime.tzinfo` object representing - the relevant time zone or :py:const:`None` if the ``tzid`` was - not found. - """ - if tzid is None: - if len(self._vtz) == 0: - raise ValueError("no timezones defined") - elif len(self._vtz) > 1: - raise ValueError("more than one timezone available") - tzid = next(iter(self._vtz)) - - return self._vtz.get(tzid) - - def _parse_offset(self, s): - s = s.strip() - if not s: - raise ValueError("empty offset") - if s[0] in ('+', '-'): - signal = (-1, +1)[s[0] == '+'] - s = s[1:] - else: - signal = +1 - if len(s) == 4: - return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal - elif len(s) == 6: - return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal - else: - raise ValueError("invalid offset: " + s) - - def _parse_rfc(self, s): - lines = s.splitlines() - if not lines: - raise ValueError("empty string") - - # Unfold - i = 0 - while i < len(lines): - line = lines[i].rstrip() - if not line: - del lines[i] - elif i > 0 and line[0] == " ": - lines[i-1] += line[1:] - del lines[i] - else: - i += 1 - - tzid = None - comps = [] - invtz = False - comptype = None - for line in lines: - if not line: - continue - name, value = line.split(':', 1) - parms = name.split(';') - if not parms: - raise ValueError("empty property name") - name = parms[0].upper() - parms = parms[1:] - if invtz: - if name == "BEGIN": - if value in ("STANDARD", "DAYLIGHT"): - # Process component - pass - else: - raise ValueError("unknown component: "+value) - comptype = value - founddtstart = False - tzoffsetfrom = None - tzoffsetto = None - rrulelines = [] - tzname = None - elif name == "END": - if value == "VTIMEZONE": - if comptype: - raise ValueError("component not closed: "+comptype) - if not tzid: - raise ValueError("mandatory TZID not found") - if not comps: - raise ValueError( - "at least one component is needed") - # Process vtimezone - self._vtz[tzid] = _tzicalvtz(tzid, comps) - invtz = False - elif value == comptype: - if not founddtstart: - raise ValueError("mandatory DTSTART not found") - if tzoffsetfrom is None: - raise ValueError( - "mandatory TZOFFSETFROM not found") - if tzoffsetto is None: - raise ValueError( - "mandatory TZOFFSETFROM not found") - # Process component - rr = None - if rrulelines: - rr = rrule.rrulestr("\n".join(rrulelines), - compatible=True, - ignoretz=True, - cache=True) - comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, - (comptype == "DAYLIGHT"), - tzname, rr) - comps.append(comp) - comptype = None - else: - raise ValueError("invalid component end: "+value) - elif comptype: - if name == "DTSTART": - # DTSTART in VTIMEZONE takes a subset of valid RRULE - # values under RFC 5545. - for parm in parms: - if parm != 'VALUE=DATE-TIME': - msg = ('Unsupported DTSTART param in ' + - 'VTIMEZONE: ' + parm) - raise ValueError(msg) - rrulelines.append(line) - founddtstart = True - elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): - rrulelines.append(line) - elif name == "TZOFFSETFROM": - if parms: - raise ValueError( - "unsupported %s parm: %s " % (name, parms[0])) - tzoffsetfrom = self._parse_offset(value) - elif name == "TZOFFSETTO": - if parms: - raise ValueError( - "unsupported TZOFFSETTO parm: "+parms[0]) - tzoffsetto = self._parse_offset(value) - elif name == "TZNAME": - if parms: - raise ValueError( - "unsupported TZNAME parm: "+parms[0]) - tzname = value - elif name == "COMMENT": - pass - else: - raise ValueError("unsupported property: "+name) - else: - if name == "TZID": - if parms: - raise ValueError( - "unsupported TZID parm: "+parms[0]) - tzid = value - elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): - pass - else: - raise ValueError("unsupported property: "+name) - elif name == "BEGIN" and value == "VTIMEZONE": - tzid = None - comps = [] - invtz = True - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._s)) - - -if sys.platform != "win32": - TZFILES = ["/etc/localtime", "localtime"] - TZPATHS = ["/usr/share/zoneinfo", - "/usr/lib/zoneinfo", - "/usr/share/lib/zoneinfo", - "/etc/zoneinfo"] -else: - TZFILES = [] - TZPATHS = [] - - -def __get_gettz(): - tzlocal_classes = (tzlocal,) - if tzwinlocal is not None: - tzlocal_classes += (tzwinlocal,) - - class GettzFunc(object): - """ - Retrieve a time zone object from a string representation - - This function is intended to retrieve the :py:class:`tzinfo` subclass - that best represents the time zone that would be used if a POSIX - `TZ variable`_ were set to the same value. - - If no argument or an empty string is passed to ``gettz``, local time - is returned: - - .. code-block:: python3 - - >>> gettz() - tzfile('/etc/localtime') - - This function is also the preferred way to map IANA tz database keys - to :class:`tzfile` objects: - - .. code-block:: python3 - - >>> gettz('Pacific/Kiritimati') - tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') - - On Windows, the standard is extended to include the Windows-specific - zone names provided by the operating system: - - .. code-block:: python3 - - >>> gettz('Egypt Standard Time') - tzwin('Egypt Standard Time') - - Passing a GNU ``TZ`` style string time zone specification returns a - :class:`tzstr` object: - - .. code-block:: python3 - - >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') - tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') - - :param name: - A time zone name (IANA, or, on Windows, Windows keys), location of - a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone - specifier. An empty string, no argument or ``None`` is interpreted - as local time. - - :return: - Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` - subclasses. - - .. versionchanged:: 2.7.0 - - After version 2.7.0, any two calls to ``gettz`` using the same - input strings will return the same object: - - .. code-block:: python3 - - >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') - True - - In addition to improving performance, this ensures that - `"same zone" semantics`_ are used for datetimes in the same zone. - - - .. _`TZ variable`: - https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html - - .. _`"same zone" semantics`: - https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html - """ - def __init__(self): - - self.__instances = weakref.WeakValueDictionary() - self.__strong_cache_size = 8 - self.__strong_cache = OrderedDict() - self._cache_lock = _thread.allocate_lock() - - def __call__(self, name=None): - with self._cache_lock: - rv = self.__instances.get(name, None) - - if rv is None: - rv = self.nocache(name=name) - if not (name is None - or isinstance(rv, tzlocal_classes) - or rv is None): - # tzlocal is slightly more complicated than the other - # time zone providers because it depends on environment - # at construction time, so don't cache that. - # - # We also cannot store weak references to None, so we - # will also not store that. - self.__instances[name] = rv - else: - # No need for strong caching, return immediately - return rv - - self.__strong_cache[name] = self.__strong_cache.pop(name, rv) - - if len(self.__strong_cache) > self.__strong_cache_size: - self.__strong_cache.popitem(last=False) - - return rv - - def set_cache_size(self, size): - with self._cache_lock: - self.__strong_cache_size = size - while len(self.__strong_cache) > size: - self.__strong_cache.popitem(last=False) - - def cache_clear(self): - with self._cache_lock: - self.__instances = weakref.WeakValueDictionary() - self.__strong_cache.clear() - - @staticmethod - def nocache(name=None): - """A non-cached version of gettz""" - tz = None - if not name: - try: - name = os.environ["TZ"] - except KeyError: - pass - if name is None or name in ("", ":"): - for filepath in TZFILES: - if not os.path.isabs(filepath): - filename = filepath - for path in TZPATHS: - filepath = os.path.join(path, filename) - if os.path.isfile(filepath): - break - else: - continue - if os.path.isfile(filepath): - try: - tz = tzfile(filepath) - break - except (IOError, OSError, ValueError): - pass - else: - tz = tzlocal() - else: - try: - if name.startswith(":"): - name = name[1:] - except TypeError as e: - if isinstance(name, bytes): - new_msg = "gettz argument should be str, not bytes" - six.raise_from(TypeError(new_msg), e) - else: - raise - if os.path.isabs(name): - if os.path.isfile(name): - tz = tzfile(name) - else: - tz = None - else: - for path in TZPATHS: - filepath = os.path.join(path, name) - if not os.path.isfile(filepath): - filepath = filepath.replace(' ', '_') - if not os.path.isfile(filepath): - continue - try: - tz = tzfile(filepath) - break - except (IOError, OSError, ValueError): - pass - else: - tz = None - if tzwin is not None: - try: - tz = tzwin(name) - except (WindowsError, UnicodeEncodeError): - # UnicodeEncodeError is for Python 2.7 compat - tz = None - - if not tz: - from dateutil.zoneinfo import get_zonefile_instance - tz = get_zonefile_instance().get(name) - - if not tz: - for c in name: - # name is not a tzstr unless it has at least - # one offset. For short values of "name", an - # explicit for loop seems to be the fastest way - # To determine if a string contains a digit - if c in "0123456789": - try: - tz = tzstr(name) - except ValueError: - pass - break - else: - if name in ("GMT", "UTC"): - tz = UTC - elif name in time.tzname: - tz = tzlocal() - return tz - - return GettzFunc() - - -gettz = __get_gettz() -del __get_gettz - - -def datetime_exists(dt, tz=None): - """ - Given a datetime and a time zone, determine whether or not a given datetime - would fall in a gap. - - :param dt: - A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` - is provided.) - - :param tz: - A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If - ``None`` or not provided, the datetime's own time zone will be used. - - :return: - Returns a boolean value whether or not the "wall time" exists in - ``tz``. - - .. versionadded:: 2.7.0 - """ - if tz is None: - if dt.tzinfo is None: - raise ValueError('Datetime is naive and no time zone provided.') - tz = dt.tzinfo - - dt = dt.replace(tzinfo=None) - - # This is essentially a test of whether or not the datetime can survive - # a round trip to UTC. - dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz) - dt_rt = dt_rt.replace(tzinfo=None) - - return dt == dt_rt - - -def datetime_ambiguous(dt, tz=None): - """ - Given a datetime and a time zone, determine whether or not a given datetime - is ambiguous (i.e if there are two times differentiated only by their DST - status). - - :param dt: - A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` - is provided.) - - :param tz: - A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If - ``None`` or not provided, the datetime's own time zone will be used. - - :return: - Returns a boolean value whether or not the "wall time" is ambiguous in - ``tz``. - - .. versionadded:: 2.6.0 - """ - if tz is None: - if dt.tzinfo is None: - raise ValueError('Datetime is naive and no time zone provided.') - - tz = dt.tzinfo - - # If a time zone defines its own "is_ambiguous" function, we'll use that. - is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) - if is_ambiguous_fn is not None: - try: - return tz.is_ambiguous(dt) - except Exception: - pass - - # If it doesn't come out and tell us it's ambiguous, we'll just check if - # the fold attribute has any effect on this particular date and time. - dt = dt.replace(tzinfo=tz) - wall_0 = enfold(dt, fold=0) - wall_1 = enfold(dt, fold=1) - - same_offset = wall_0.utcoffset() == wall_1.utcoffset() - same_dst = wall_0.dst() == wall_1.dst() - - return not (same_offset and same_dst) - - -def resolve_imaginary(dt): - """ - Given a datetime that may be imaginary, return an existing datetime. - - This function assumes that an imaginary datetime represents what the - wall time would be in a zone had the offset transition not occurred, so - it will always fall forward by the transition's change in offset. - - .. doctest:: - - >>> from dateutil import tz - >>> from datetime import datetime - >>> NYC = tz.gettz('America/New_York') - >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) - 2017-03-12 03:30:00-04:00 - - >>> KIR = tz.gettz('Pacific/Kiritimati') - >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) - 1995-01-02 12:30:00+14:00 - - As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, - existing datetime, so a round-trip to and from UTC is sufficient to get - an extant datetime, however, this generally "falls back" to an earlier time - rather than falling forward to the STD side (though no guarantees are made - about this behavior). - - :param dt: - A :class:`datetime.datetime` which may or may not exist. - - :return: - Returns an existing :class:`datetime.datetime`. If ``dt`` was not - imaginary, the datetime returned is guaranteed to be the same object - passed to the function. - - .. versionadded:: 2.7.0 - """ - if dt.tzinfo is not None and not datetime_exists(dt): - - curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() - old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() - - dt += curr_offset - old_offset - - return dt - - -def _datetime_to_timestamp(dt): - """ - Convert a :class:`datetime.datetime` object to an epoch timestamp in - seconds since January 1, 1970, ignoring the time zone. - """ - return (dt.replace(tzinfo=None) - EPOCH).total_seconds() - - -if sys.version_info >= (3, 6): - def _get_supported_offset(second_offset): - return second_offset -else: - def _get_supported_offset(second_offset): - # For python pre-3.6, round to full-minutes if that's not the case. - # Python's datetime doesn't accept sub-minute timezones. Check - # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 - # for some information. - old_offset = second_offset - calculated_offset = 60 * ((second_offset + 30) // 60) - return calculated_offset - - -try: - # Python 3.7 feature - from contextlib import nullcontext as _nullcontext -except ImportError: - class _nullcontext(object): - """ - Class for wrapping contexts so that they are passed through in a - with statement. - """ - def __init__(self, context): - self.context = context - - def __enter__(self): - return self.context - - def __exit__(*args, **kwargs): - pass - -# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/tz/win.py b/venv/Lib/site-packages/dateutil/tz/win.py deleted file mode 100644 index cde07ba..0000000 --- a/venv/Lib/site-packages/dateutil/tz/win.py +++ /dev/null @@ -1,370 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module provides an interface to the native time zone data on Windows, -including :py:class:`datetime.tzinfo` implementations. - -Attempting to import this module on a non-Windows platform will raise an -:py:obj:`ImportError`. -""" -# This code was originally contributed by Jeffrey Harris. -import datetime -import struct - -from six.moves import winreg -from six import text_type - -try: - import ctypes - from ctypes import wintypes -except ValueError: - # ValueError is raised on non-Windows systems for some horrible reason. - raise ImportError("Running tzwin on non-Windows system") - -from ._common import tzrangebase - -__all__ = ["tzwin", "tzwinlocal", "tzres"] - -ONEWEEK = datetime.timedelta(7) - -TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" -TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" -TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" - - -def _settzkeyname(): - handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) - try: - winreg.OpenKey(handle, TZKEYNAMENT).Close() - TZKEYNAME = TZKEYNAMENT - except WindowsError: - TZKEYNAME = TZKEYNAME9X - handle.Close() - return TZKEYNAME - - -TZKEYNAME = _settzkeyname() - - -class tzres(object): - """ - Class for accessing ``tzres.dll``, which contains timezone name related - resources. - - .. versionadded:: 2.5.0 - """ - p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char - - def __init__(self, tzres_loc='tzres.dll'): - # Load the user32 DLL so we can load strings from tzres - user32 = ctypes.WinDLL('user32') - - # Specify the LoadStringW function - user32.LoadStringW.argtypes = (wintypes.HINSTANCE, - wintypes.UINT, - wintypes.LPWSTR, - ctypes.c_int) - - self.LoadStringW = user32.LoadStringW - self._tzres = ctypes.WinDLL(tzres_loc) - self.tzres_loc = tzres_loc - - def load_name(self, offset): - """ - Load a timezone name from a DLL offset (integer). - - >>> from dateutil.tzwin import tzres - >>> tzr = tzres() - >>> print(tzr.load_name(112)) - 'Eastern Standard Time' - - :param offset: - A positive integer value referring to a string from the tzres dll. - - .. note:: - - Offsets found in the registry are generally of the form - ``@tzres.dll,-114``. The offset in this case is 114, not -114. - - """ - resource = self.p_wchar() - lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) - nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) - return resource[:nchar] - - def name_from_string(self, tzname_str): - """ - Parse strings as returned from the Windows registry into the time zone - name as defined in the registry. - - >>> from dateutil.tzwin import tzres - >>> tzr = tzres() - >>> print(tzr.name_from_string('@tzres.dll,-251')) - 'Dateline Daylight Time' - >>> print(tzr.name_from_string('Eastern Standard Time')) - 'Eastern Standard Time' - - :param tzname_str: - A timezone name string as returned from a Windows registry key. - - :return: - Returns the localized timezone string from tzres.dll if the string - is of the form `@tzres.dll,-offset`, else returns the input string. - """ - if not tzname_str.startswith('@'): - return tzname_str - - name_splt = tzname_str.split(',-') - try: - offset = int(name_splt[1]) - except: - raise ValueError("Malformed timezone string.") - - return self.load_name(offset) - - -class tzwinbase(tzrangebase): - """tzinfo class based on win32's timezones available in the registry.""" - def __init__(self): - raise NotImplementedError('tzwinbase is an abstract base class') - - def __eq__(self, other): - # Compare on all relevant dimensions, including name. - if not isinstance(other, tzwinbase): - return NotImplemented - - return (self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset and - self._stddayofweek == other._stddayofweek and - self._dstdayofweek == other._dstdayofweek and - self._stdweeknumber == other._stdweeknumber and - self._dstweeknumber == other._dstweeknumber and - self._stdhour == other._stdhour and - self._dsthour == other._dsthour and - self._stdminute == other._stdminute and - self._dstminute == other._dstminute and - self._std_abbr == other._std_abbr and - self._dst_abbr == other._dst_abbr) - - @staticmethod - def list(): - """Return a list of all time zones known to the system.""" - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - with winreg.OpenKey(handle, TZKEYNAME) as tzkey: - result = [winreg.EnumKey(tzkey, i) - for i in range(winreg.QueryInfoKey(tzkey)[0])] - return result - - def display(self): - """ - Return the display name of the time zone. - """ - return self._display - - def transitions(self, year): - """ - For a given year, get the DST on and off transition times, expressed - always on the standard time side. For zones with no transitions, this - function returns ``None``. - - :param year: - The year whose transitions you would like to query. - - :return: - Returns a :class:`tuple` of :class:`datetime.datetime` objects, - ``(dston, dstoff)`` for zones with an annual DST transition, or - ``None`` for fixed offset zones. - """ - - if not self.hasdst: - return None - - dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, - self._dsthour, self._dstminute, - self._dstweeknumber) - - dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, - self._stdhour, self._stdminute, - self._stdweeknumber) - - # Ambiguous dates default to the STD side - dstoff -= self._dst_base_offset - - return dston, dstoff - - def _get_hasdst(self): - return self._dstmonth != 0 - - @property - def _dst_base_offset(self): - return self._dst_base_offset_ - - -class tzwin(tzwinbase): - """ - Time zone object created from the zone info in the Windows registry - - These are similar to :py:class:`dateutil.tz.tzrange` objects in that - the time zone data is provided in the format of a single offset rule - for either 0 or 2 time zone transitions per year. - - :param: name - The name of a Windows time zone key, e.g. "Eastern Standard Time". - The full list of keys can be retrieved with :func:`tzwin.list`. - """ - - def __init__(self, name): - self._name = name - - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) - with winreg.OpenKey(handle, tzkeyname) as tzkey: - keydict = valuestodict(tzkey) - - self._std_abbr = keydict["Std"] - self._dst_abbr = keydict["Dlt"] - - self._display = keydict["Display"] - - # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm - tup = struct.unpack("=3l16h", keydict["TZI"]) - stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 - dstoffset = stdoffset-tup[2] # + DaylightBias * -1 - self._std_offset = datetime.timedelta(minutes=stdoffset) - self._dst_offset = datetime.timedelta(minutes=dstoffset) - - # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs - # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx - (self._stdmonth, - self._stddayofweek, # Sunday = 0 - self._stdweeknumber, # Last = 5 - self._stdhour, - self._stdminute) = tup[4:9] - - (self._dstmonth, - self._dstdayofweek, # Sunday = 0 - self._dstweeknumber, # Last = 5 - self._dsthour, - self._dstminute) = tup[12:17] - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = self._get_hasdst() - - def __repr__(self): - return "tzwin(%s)" % repr(self._name) - - def __reduce__(self): - return (self.__class__, (self._name,)) - - -class tzwinlocal(tzwinbase): - """ - Class representing the local time zone information in the Windows registry - - While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` - module) to retrieve time zone information, ``tzwinlocal`` retrieves the - rules directly from the Windows registry and creates an object like - :class:`dateutil.tz.tzwin`. - - Because Windows does not have an equivalent of :func:`time.tzset`, on - Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the - time zone settings *at the time that the process was started*, meaning - changes to the machine's time zone settings during the run of a program - on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. - Because ``tzwinlocal`` reads the registry directly, it is unaffected by - this issue. - """ - def __init__(self): - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: - keydict = valuestodict(tzlocalkey) - - self._std_abbr = keydict["StandardName"] - self._dst_abbr = keydict["DaylightName"] - - try: - tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, - sn=self._std_abbr) - with winreg.OpenKey(handle, tzkeyname) as tzkey: - _keydict = valuestodict(tzkey) - self._display = _keydict["Display"] - except OSError: - self._display = None - - stdoffset = -keydict["Bias"]-keydict["StandardBias"] - dstoffset = stdoffset-keydict["DaylightBias"] - - self._std_offset = datetime.timedelta(minutes=stdoffset) - self._dst_offset = datetime.timedelta(minutes=dstoffset) - - # For reasons unclear, in this particular key, the day of week has been - # moved to the END of the SYSTEMTIME structure. - tup = struct.unpack("=8h", keydict["StandardStart"]) - - (self._stdmonth, - self._stdweeknumber, # Last = 5 - self._stdhour, - self._stdminute) = tup[1:5] - - self._stddayofweek = tup[7] - - tup = struct.unpack("=8h", keydict["DaylightStart"]) - - (self._dstmonth, - self._dstweeknumber, # Last = 5 - self._dsthour, - self._dstminute) = tup[1:5] - - self._dstdayofweek = tup[7] - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = self._get_hasdst() - - def __repr__(self): - return "tzwinlocal()" - - def __str__(self): - # str will return the standard name, not the daylight name. - return "tzwinlocal(%s)" % repr(self._std_abbr) - - def __reduce__(self): - return (self.__class__, ()) - - -def picknthweekday(year, month, dayofweek, hour, minute, whichweek): - """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ - first = datetime.datetime(year, month, 1, hour, minute) - - # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), - # Because 7 % 7 = 0 - weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) - wd = weekdayone + ((whichweek - 1) * ONEWEEK) - if (wd.month != month): - wd -= ONEWEEK - - return wd - - -def valuestodict(key): - """Convert a registry key's values to a dictionary.""" - dout = {} - size = winreg.QueryInfoKey(key)[1] - tz_res = None - - for i in range(size): - key_name, value, dtype = winreg.EnumValue(key, i) - if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: - # If it's a DWORD (32-bit integer), it's stored as unsigned - convert - # that to a proper signed integer - if value & (1 << 31): - value = value - (1 << 32) - elif dtype == winreg.REG_SZ: - # If it's a reference to the tzres DLL, load the actual string - if value.startswith('@tzres'): - tz_res = tz_res or tzres() - value = tz_res.name_from_string(value) - - value = value.rstrip('\x00') # Remove trailing nulls - - dout[key_name] = value - - return dout diff --git a/venv/Lib/site-packages/dateutil/tzwin.py b/venv/Lib/site-packages/dateutil/tzwin.py deleted file mode 100644 index cebc673..0000000 --- a/venv/Lib/site-packages/dateutil/tzwin.py +++ /dev/null @@ -1,2 +0,0 @@ -# tzwin has moved to dateutil.tz.win -from .tz.win import * diff --git a/venv/Lib/site-packages/dateutil/utils.py b/venv/Lib/site-packages/dateutil/utils.py deleted file mode 100644 index dd2d245..0000000 --- a/venv/Lib/site-packages/dateutil/utils.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers general convenience and utility functions for dealing with -datetimes. - -.. versionadded:: 2.7.0 -""" -from __future__ import unicode_literals - -from datetime import datetime, time - - -def today(tzinfo=None): - """ - Returns a :py:class:`datetime` representing the current day at midnight - - :param tzinfo: - The time zone to attach (also used to determine the current day). - - :return: - A :py:class:`datetime.datetime` object representing the current day - at midnight. - """ - - dt = datetime.now(tzinfo) - return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) - - -def default_tzinfo(dt, tzinfo): - """ - Sets the ``tzinfo`` parameter on naive datetimes only - - This is useful for example when you are provided a datetime that may have - either an implicit or explicit time zone, such as when parsing a time zone - string. - - .. doctest:: - - >>> from dateutil.tz import tzoffset - >>> from dateutil.parser import parse - >>> from dateutil.utils import default_tzinfo - >>> dflt_tz = tzoffset("EST", -18000) - >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) - 2014-01-01 12:30:00+00:00 - >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) - 2014-01-01 12:30:00-05:00 - - :param dt: - The datetime on which to replace the time zone - - :param tzinfo: - The :py:class:`datetime.tzinfo` subclass instance to assign to - ``dt`` if (and only if) it is naive. - - :return: - Returns an aware :py:class:`datetime.datetime`. - """ - if dt.tzinfo is not None: - return dt - else: - return dt.replace(tzinfo=tzinfo) - - -def within_delta(dt1, dt2, delta): - """ - Useful for comparing two datetimes that may have a negligible difference - to be considered equal. - """ - delta = abs(delta) - difference = dt1 - dt2 - return -delta <= difference <= delta diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/__init__.py b/venv/Lib/site-packages/dateutil/zoneinfo/__init__.py deleted file mode 100644 index 34f11ad..0000000 --- a/venv/Lib/site-packages/dateutil/zoneinfo/__init__.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -import warnings -import json - -from tarfile import TarFile -from pkgutil import get_data -from io import BytesIO - -from dateutil.tz import tzfile as _tzfile - -__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] - -ZONEFILENAME = "dateutil-zoneinfo.tar.gz" -METADATA_FN = 'METADATA' - - -class tzfile(_tzfile): - def __reduce__(self): - return (gettz, (self._filename,)) - - -def getzoneinfofile_stream(): - try: - return BytesIO(get_data(__name__, ZONEFILENAME)) - except IOError as e: # TODO switch to FileNotFoundError? - warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) - return None - - -class ZoneInfoFile(object): - def __init__(self, zonefile_stream=None): - if zonefile_stream is not None: - with TarFile.open(fileobj=zonefile_stream) as tf: - self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) - for zf in tf.getmembers() - if zf.isfile() and zf.name != METADATA_FN} - # deal with links: They'll point to their parent object. Less - # waste of memory - links = {zl.name: self.zones[zl.linkname] - for zl in tf.getmembers() if - zl.islnk() or zl.issym()} - self.zones.update(links) - try: - metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) - metadata_str = metadata_json.read().decode('UTF-8') - self.metadata = json.loads(metadata_str) - except KeyError: - # no metadata in tar file - self.metadata = None - else: - self.zones = {} - self.metadata = None - - def get(self, name, default=None): - """ - Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method - for retrieving zones from the zone dictionary. - - :param name: - The name of the zone to retrieve. (Generally IANA zone names) - - :param default: - The value to return in the event of a missing key. - - .. versionadded:: 2.6.0 - - """ - return self.zones.get(name, default) - - -# The current API has gettz as a module function, although in fact it taps into -# a stateful class. So as a workaround for now, without changing the API, we -# will create a new "global" class instance the first time a user requests a -# timezone. Ugly, but adheres to the api. -# -# TODO: Remove after deprecation period. -_CLASS_ZONE_INSTANCE = [] - - -def get_zonefile_instance(new_instance=False): - """ - This is a convenience function which provides a :class:`ZoneInfoFile` - instance using the data provided by the ``dateutil`` package. By default, it - caches a single instance of the ZoneInfoFile object and returns that. - - :param new_instance: - If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and - used as the cached instance for the next call. Otherwise, new instances - are created only as necessary. - - :return: - Returns a :class:`ZoneInfoFile` object. - - .. versionadded:: 2.6 - """ - if new_instance: - zif = None - else: - zif = getattr(get_zonefile_instance, '_cached_instance', None) - - if zif is None: - zif = ZoneInfoFile(getzoneinfofile_stream()) - - get_zonefile_instance._cached_instance = zif - - return zif - - -def gettz(name): - """ - This retrieves a time zone from the local zoneinfo tarball that is packaged - with dateutil. - - :param name: - An IANA-style time zone name, as found in the zoneinfo file. - - :return: - Returns a :class:`dateutil.tz.tzfile` time zone object. - - .. warning:: - It is generally inadvisable to use this function, and it is only - provided for API compatibility with earlier versions. This is *not* - equivalent to ``dateutil.tz.gettz()``, which selects an appropriate - time zone based on the inputs, favoring system zoneinfo. This is ONLY - for accessing the dateutil-specific zoneinfo (which may be out of - date compared to the system zoneinfo). - - .. deprecated:: 2.6 - If you need to use a specific zoneinfofile over the system zoneinfo, - instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call - :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. - - Use :func:`get_zonefile_instance` to retrieve an instance of the - dateutil-provided zoneinfo. - """ - warnings.warn("zoneinfo.gettz() will be removed in future versions, " - "to use the dateutil-provided zoneinfo files, instantiate a " - "ZoneInfoFile object and use ZoneInfoFile.zones.get() " - "instead. See the documentation for details.", - DeprecationWarning) - - if len(_CLASS_ZONE_INSTANCE) == 0: - _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) - return _CLASS_ZONE_INSTANCE[0].zones.get(name) - - -def gettz_db_metadata(): - """ Get the zonefile metadata - - See `zonefile_metadata`_ - - :returns: - A dictionary with the database metadata - - .. deprecated:: 2.6 - See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, - query the attribute ``zoneinfo.ZoneInfoFile.metadata``. - """ - warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " - "versions, to use the dateutil-provided zoneinfo files, " - "ZoneInfoFile object and query the 'metadata' attribute " - "instead. See the documentation for details.", - DeprecationWarning) - - if len(_CLASS_ZONE_INSTANCE) == 0: - _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) - return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 063d31a..0000000 Binary files a/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc b/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc deleted file mode 100644 index e147063..0000000 Binary files a/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/venv/Lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz deleted file mode 100644 index 1461f8c..0000000 Binary files a/venv/Lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz and /dev/null differ diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/rebuild.py b/venv/Lib/site-packages/dateutil/zoneinfo/rebuild.py deleted file mode 100644 index 684c658..0000000 --- a/venv/Lib/site-packages/dateutil/zoneinfo/rebuild.py +++ /dev/null @@ -1,75 +0,0 @@ -import logging -import os -import tempfile -import shutil -import json -from subprocess import check_call, check_output -from tarfile import TarFile - -from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME - - -def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): - """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* - - filename is the timezone tarball from ``ftp.iana.org/tz``. - - """ - tmpdir = tempfile.mkdtemp() - zonedir = os.path.join(tmpdir, "zoneinfo") - moduledir = os.path.dirname(__file__) - try: - with TarFile.open(filename) as tf: - for name in zonegroups: - tf.extract(name, tmpdir) - filepaths = [os.path.join(tmpdir, n) for n in zonegroups] - - _run_zic(zonedir, filepaths) - - # write metadata file - with open(os.path.join(zonedir, METADATA_FN), 'w') as f: - json.dump(metadata, f, indent=4, sort_keys=True) - target = os.path.join(moduledir, ZONEFILENAME) - with TarFile.open(target, "w:%s" % format) as tf: - for entry in os.listdir(zonedir): - entrypath = os.path.join(zonedir, entry) - tf.add(entrypath, entry) - finally: - shutil.rmtree(tmpdir) - - -def _run_zic(zonedir, filepaths): - """Calls the ``zic`` compiler in a compatible way to get a "fat" binary. - - Recent versions of ``zic`` default to ``-b slim``, while older versions - don't even have the ``-b`` option (but default to "fat" binaries). The - current version of dateutil does not support Version 2+ TZif files, which - causes problems when used in conjunction with "slim" binaries, so this - function is used to ensure that we always get a "fat" binary. - """ - - try: - help_text = check_output(["zic", "--help"]) - except OSError as e: - _print_on_nosuchfile(e) - raise - - if b"-b " in help_text: - bloat_args = ["-b", "fat"] - else: - bloat_args = [] - - check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths) - - -def _print_on_nosuchfile(e): - """Print helpful troubleshooting message - - e is an exception raised by subprocess.check_call() - - """ - if e.errno == 2: - logging.error( - "Could not find zic. Perhaps you need to install " - "libc-bin or some other package that provides it, " - "or it's not in your PATH?") diff --git a/venv/Lib/site-packages/flask-3.1.2.dist-info/INSTALLER b/venv/Lib/site-packages/flask-3.1.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/flask-3.1.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/flask-3.1.2.dist-info/METADATA b/venv/Lib/site-packages/flask-3.1.2.dist-info/METADATA deleted file mode 100644 index 46028fb..0000000 --- a/venv/Lib/site-packages/flask-3.1.2.dist-info/METADATA +++ /dev/null @@ -1,91 +0,0 @@ -Metadata-Version: 2.4 -Name: Flask -Version: 3.1.2 -Summary: A simple framework for building complex web applications. -Maintainer-email: Pallets -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: blinker>=1.9.0 -Requires-Dist: click>=8.1.3 -Requires-Dist: importlib-metadata>=3.6.0; python_version < '3.10' -Requires-Dist: itsdangerous>=2.2.0 -Requires-Dist: jinja2>=3.1.2 -Requires-Dist: markupsafe>=2.1.1 -Requires-Dist: werkzeug>=3.1.0 -Requires-Dist: asgiref>=3.2 ; extra == "async" -Requires-Dist: python-dotenv ; extra == "dotenv" -Project-URL: Changes, https://flask.palletsprojects.com/page/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://flask.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/flask/ -Provides-Extra: async -Provides-Extra: dotenv - -
    - -# Flask - -Flask is a lightweight [WSGI] web application framework. It is designed -to make getting started quick and easy, with the ability to scale up to -complex applications. It began as a simple wrapper around [Werkzeug] -and [Jinja], and has become one of the most popular Python web -application frameworks. - -Flask offers suggestions, but doesn't enforce any dependencies or -project layout. It is up to the developer to choose the tools and -libraries they want to use. There are many extensions provided by the -community that make adding new functionality easy. - -[WSGI]: https://wsgi.readthedocs.io/ -[Werkzeug]: https://werkzeug.palletsprojects.com/ -[Jinja]: https://jinja.palletsprojects.com/ - -## A Simple Example - -```python -# save this as app.py -from flask import Flask - -app = Flask(__name__) - -@app.route("/") -def hello(): - return "Hello, World!" -``` - -``` -$ flask run - * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) -``` - -## Donate - -The Pallets organization develops and supports Flask and the libraries -it uses. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/venv/Lib/site-packages/flask-3.1.2.dist-info/RECORD b/venv/Lib/site-packages/flask-3.1.2.dist-info/RECORD deleted file mode 100644 index 44a89bf..0000000 --- a/venv/Lib/site-packages/flask-3.1.2.dist-info/RECORD +++ /dev/null @@ -1,58 +0,0 @@ -../../Scripts/flask.exe,sha256=bl6ElShgvgn105GNrL8RWXvilQzkYpRu6NIp-lFVmbg,108377 -flask-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask-3.1.2.dist-info/METADATA,sha256=oRg63DAAIcoLAr7kzTgIEKfm8_4HMTRpmWmIptdY_js,3167 -flask-3.1.2.dist-info/RECORD,, -flask-3.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask-3.1.2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -flask-3.1.2.dist-info/entry_points.txt,sha256=bBP7hTOS5fz9zLtC7sPofBZAlMkEvBxu7KqS6l5lvc4,40 -flask-3.1.2.dist-info/licenses/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -flask/__init__.py,sha256=mHvJN9Swtl1RDtjCqCIYyIniK_SZ_l_hqUynOzgpJ9o,2701 -flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 -flask/__pycache__/__init__.cpython-312.pyc,, -flask/__pycache__/__main__.cpython-312.pyc,, -flask/__pycache__/app.cpython-312.pyc,, -flask/__pycache__/blueprints.cpython-312.pyc,, -flask/__pycache__/cli.cpython-312.pyc,, -flask/__pycache__/config.cpython-312.pyc,, -flask/__pycache__/ctx.cpython-312.pyc,, -flask/__pycache__/debughelpers.cpython-312.pyc,, -flask/__pycache__/globals.cpython-312.pyc,, -flask/__pycache__/helpers.cpython-312.pyc,, -flask/__pycache__/logging.cpython-312.pyc,, -flask/__pycache__/sessions.cpython-312.pyc,, -flask/__pycache__/signals.cpython-312.pyc,, -flask/__pycache__/templating.cpython-312.pyc,, -flask/__pycache__/testing.cpython-312.pyc,, -flask/__pycache__/typing.cpython-312.pyc,, -flask/__pycache__/views.cpython-312.pyc,, -flask/__pycache__/wrappers.cpython-312.pyc,, -flask/app.py,sha256=XGqgFRsLgBhzIoB2HSftoMTIM3hjDiH6rdV7c3g3IKc,61744 -flask/blueprints.py,sha256=p5QE2lY18GItbdr_RKRpZ8Do17g0PvQGIgZkSUDhX2k,4541 -flask/cli.py,sha256=Pfh72-BxlvoH0QHCDOc1HvXG7Kq5Xetf3zzNz2kNSHk,37184 -flask/config.py,sha256=PiqF0DPam6HW0FH4CH1hpXTBe30NSzjPEOwrz1b6kt0,13219 -flask/ctx.py,sha256=sPKzahqtgxaS7O0y9E_NzUJNUDyTD6M4GkDrVu2fU3Y,15064 -flask/debughelpers.py,sha256=PGIDhStW_efRjpaa3zHIpo-htStJOR41Ip3OJWPYBwo,6080 -flask/globals.py,sha256=XdQZmStBmPIs8t93tjx6pO7Bm3gobAaONWkFcUHaGas,1713 -flask/helpers.py,sha256=rJZge7_J288J1UQv5-kNf4oEaw332PP8NTW0QRIBbXE,23517 -flask/json/__init__.py,sha256=hLNR898paqoefdeAhraa5wyJy-bmRB2k2dV4EgVy2Z8,5602 -flask/json/__pycache__/__init__.cpython-312.pyc,, -flask/json/__pycache__/provider.cpython-312.pyc,, -flask/json/__pycache__/tag.cpython-312.pyc,, -flask/json/provider.py,sha256=5imEzY5HjV2HoUVrQbJLqXCzMNpZXfD0Y1XqdLV2XBA,7672 -flask/json/tag.py,sha256=DhaNwuIOhdt2R74oOC9Y4Z8ZprxFYiRb5dUP5byyINw,9281 -flask/logging.py,sha256=8sM3WMTubi1cBb2c_lPkWpN0J8dMAqrgKRYLLi1dCVI,2377 -flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask/sansio/README.md,sha256=-0X1tECnilmz1cogx-YhNw5d7guK7GKrq_DEV2OzlU0,228 -flask/sansio/__pycache__/app.cpython-312.pyc,, -flask/sansio/__pycache__/blueprints.cpython-312.pyc,, -flask/sansio/__pycache__/scaffold.cpython-312.pyc,, -flask/sansio/app.py,sha256=5EbxwHOchgcpZqQyalA9vyDBopknOvDg6BVwXFyFD2s,38099 -flask/sansio/blueprints.py,sha256=Tqe-7EkZ-tbWchm8iDoCfD848f0_3nLv6NNjeIPvHwM,24637 -flask/sansio/scaffold.py,sha256=wSASXYdFRWJmqcL0Xq-T7N-PDVUSiFGvjO9kPZg58bk,30371 -flask/sessions.py,sha256=duvYGmCGh_H3cgMuy2oeSjrCsCvLylF4CBKOXpN0Qms,15480 -flask/signals.py,sha256=V7lMUww7CqgJ2ThUBn1PiatZtQanOyt7OZpu2GZI-34,750 -flask/templating.py,sha256=IHsdsF-eBJPCJE0AJLCi1VhhnytOGdzHCn3yThz87c4,7536 -flask/testing.py,sha256=zzC7XxhBWOP9H697IV_4SG7Lg3Lzb5PWiyEP93_KQXE,10117 -flask/typing.py,sha256=L-L5t2jKgS0aOmVhioQ_ylqcgiVFnA6yxO-RLNhq-GU,3293 -flask/views.py,sha256=xzJx6oJqGElThtEghZN7ZQGMw5TDFyuRxUkecwRuAoA,6962 -flask/wrappers.py,sha256=jUkv4mVek2Iq4hwxd4RvqrIMb69Bv0PElDgWLmd5ORo,9406 diff --git a/venv/Lib/site-packages/flask-3.1.2.dist-info/REQUESTED b/venv/Lib/site-packages/flask-3.1.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/flask-3.1.2.dist-info/WHEEL b/venv/Lib/site-packages/flask-3.1.2.dist-info/WHEEL deleted file mode 100644 index d8b9936..0000000 --- a/venv/Lib/site-packages/flask-3.1.2.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/flask-3.1.2.dist-info/entry_points.txt b/venv/Lib/site-packages/flask-3.1.2.dist-info/entry_points.txt deleted file mode 100644 index eec6733..0000000 --- a/venv/Lib/site-packages/flask-3.1.2.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -flask=flask.cli:main - diff --git a/venv/Lib/site-packages/flask-3.1.2.dist-info/licenses/LICENSE.txt b/venv/Lib/site-packages/flask-3.1.2.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 9d227a0..0000000 --- a/venv/Lib/site-packages/flask-3.1.2.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/flask/__init__.py b/venv/Lib/site-packages/flask/__init__.py deleted file mode 100644 index 1fdc50c..0000000 --- a/venv/Lib/site-packages/flask/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import json as json -from .app import Flask as Flask -from .blueprints import Blueprint as Blueprint -from .config import Config as Config -from .ctx import after_this_request as after_this_request -from .ctx import copy_current_request_context as copy_current_request_context -from .ctx import has_app_context as has_app_context -from .ctx import has_request_context as has_request_context -from .globals import current_app as current_app -from .globals import g as g -from .globals import request as request -from .globals import session as session -from .helpers import abort as abort -from .helpers import flash as flash -from .helpers import get_flashed_messages as get_flashed_messages -from .helpers import get_template_attribute as get_template_attribute -from .helpers import make_response as make_response -from .helpers import redirect as redirect -from .helpers import send_file as send_file -from .helpers import send_from_directory as send_from_directory -from .helpers import stream_with_context as stream_with_context -from .helpers import url_for as url_for -from .json import jsonify as jsonify -from .signals import appcontext_popped as appcontext_popped -from .signals import appcontext_pushed as appcontext_pushed -from .signals import appcontext_tearing_down as appcontext_tearing_down -from .signals import before_render_template as before_render_template -from .signals import got_request_exception as got_request_exception -from .signals import message_flashed as message_flashed -from .signals import request_finished as request_finished -from .signals import request_started as request_started -from .signals import request_tearing_down as request_tearing_down -from .signals import template_rendered as template_rendered -from .templating import render_template as render_template -from .templating import render_template_string as render_template_string -from .templating import stream_template as stream_template -from .templating import stream_template_string as stream_template_string -from .wrappers import Request as Request -from .wrappers import Response as Response - -if not t.TYPE_CHECKING: - - def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Flask 3.2. Use feature detection or" - " 'importlib.metadata.version(\"flask\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("flask") - - raise AttributeError(name) diff --git a/venv/Lib/site-packages/flask/__main__.py b/venv/Lib/site-packages/flask/__main__.py deleted file mode 100644 index 4e28416..0000000 --- a/venv/Lib/site-packages/flask/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .cli import main - -main() diff --git a/venv/Lib/site-packages/flask/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index eba0b43..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/__main__.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index de64eea..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/app.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/app.cpython-312.pyc deleted file mode 100644 index a5cdbf5..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/app.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/blueprints.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/blueprints.cpython-312.pyc deleted file mode 100644 index be16b67..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/blueprints.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/cli.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/cli.cpython-312.pyc deleted file mode 100644 index 9db2b91..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/cli.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/config.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/config.cpython-312.pyc deleted file mode 100644 index 81d7f52..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/config.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/ctx.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/ctx.cpython-312.pyc deleted file mode 100644 index e898b71..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/ctx.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc deleted file mode 100644 index 8eda525..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/globals.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/globals.cpython-312.pyc deleted file mode 100644 index 6e5efdc..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/globals.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/helpers.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/helpers.cpython-312.pyc deleted file mode 100644 index 8176897..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/helpers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/logging.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/logging.cpython-312.pyc deleted file mode 100644 index 2a4fa6e..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/logging.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/sessions.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/sessions.cpython-312.pyc deleted file mode 100644 index 339afbc..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/sessions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/signals.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/signals.cpython-312.pyc deleted file mode 100644 index 83c5373..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/signals.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/templating.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/templating.cpython-312.pyc deleted file mode 100644 index 92cf938..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/templating.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/testing.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/testing.cpython-312.pyc deleted file mode 100644 index 01e3f1a..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/testing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/typing.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/typing.cpython-312.pyc deleted file mode 100644 index 86db35c..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/typing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/views.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/views.cpython-312.pyc deleted file mode 100644 index b082f93..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/views.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/__pycache__/wrappers.cpython-312.pyc b/venv/Lib/site-packages/flask/__pycache__/wrappers.cpython-312.pyc deleted file mode 100644 index 6a94156..0000000 Binary files a/venv/Lib/site-packages/flask/__pycache__/wrappers.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/app.py b/venv/Lib/site-packages/flask/app.py deleted file mode 100644 index 1232b03..0000000 --- a/venv/Lib/site-packages/flask/app.py +++ /dev/null @@ -1,1536 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import sys -import typing as t -import weakref -from datetime import timedelta -from inspect import iscoroutinefunction -from itertools import chain -from types import TracebackType -from urllib.parse import quote as _url_quote - -import click -from werkzeug.datastructures import Headers -from werkzeug.datastructures import ImmutableDict -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import InternalServerError -from werkzeug.routing import BuildError -from werkzeug.routing import MapAdapter -from werkzeug.routing import RequestRedirect -from werkzeug.routing import RoutingException -from werkzeug.routing import Rule -from werkzeug.serving import is_running_from_reloader -from werkzeug.wrappers import Response as BaseResponse -from werkzeug.wsgi import get_host - -from . import cli -from . import typing as ft -from .ctx import AppContext -from .ctx import RequestContext -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import g -from .globals import request -from .globals import request_ctx -from .globals import session -from .helpers import get_debug_flag -from .helpers import get_flashed_messages -from .helpers import get_load_dotenv -from .helpers import send_from_directory -from .sansio.app import App -from .sansio.scaffold import _sentinel -from .sessions import SecureCookieSessionInterface -from .sessions import SessionInterface -from .signals import appcontext_tearing_down -from .signals import got_request_exception -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .templating import Environment -from .wrappers import Request -from .wrappers import Response - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIEnvironment - - from .testing import FlaskClient - from .testing import FlaskCliRunner - from .typing import HeadersValue - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class Flask(App): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - default_config = ImmutableDict( - { - "DEBUG": None, - "TESTING": False, - "PROPAGATE_EXCEPTIONS": None, - "SECRET_KEY": None, - "SECRET_KEY_FALLBACKS": None, - "PERMANENT_SESSION_LIFETIME": timedelta(days=31), - "USE_X_SENDFILE": False, - "TRUSTED_HOSTS": None, - "SERVER_NAME": None, - "APPLICATION_ROOT": "/", - "SESSION_COOKIE_NAME": "session", - "SESSION_COOKIE_DOMAIN": None, - "SESSION_COOKIE_PATH": None, - "SESSION_COOKIE_HTTPONLY": True, - "SESSION_COOKIE_SECURE": False, - "SESSION_COOKIE_PARTITIONED": False, - "SESSION_COOKIE_SAMESITE": None, - "SESSION_REFRESH_EACH_REQUEST": True, - "MAX_CONTENT_LENGTH": None, - "MAX_FORM_MEMORY_SIZE": 500_000, - "MAX_FORM_PARTS": 1_000, - "SEND_FILE_MAX_AGE_DEFAULT": None, - "TRAP_BAD_REQUEST_ERRORS": None, - "TRAP_HTTP_EXCEPTIONS": False, - "EXPLAIN_TEMPLATE_LOADING": False, - "PREFERRED_URL_SCHEME": "http", - "TEMPLATES_AUTO_RELOAD": None, - "MAX_COOKIE_SIZE": 4093, - "PROVIDE_AUTOMATIC_OPTIONS": True, - } - ) - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class: type[Request] = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class: type[Response] = Response - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface: SessionInterface = SecureCookieSessionInterface() - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ): - super().__init__( - import_name=import_name, - static_url_path=static_url_path, - static_folder=static_folder, - static_host=static_host, - host_matching=host_matching, - subdomain_matching=subdomain_matching, - template_folder=template_folder, - instance_path=instance_path, - instance_relative_config=instance_relative_config, - root_path=root_path, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = cli.AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - # Add a static route using the provided static_url_path, static_host, - # and static_folder if there is a configured static_folder. - # Note we do this without checking if static_folder exists. - # For one, it might be created while the server is running (e.g. during - # development). Also, Google App Engine stores static files somewhere - if self.has_static_folder: - assert bool(static_host) == host_matching, ( - "Invalid static_host/host_matching combination" - ) - # Use a weakref to avoid creating a reference cycle between the app - # and the view function (see #3761). - self_ref = weakref.ref(self) - self.add_url_rule( - f"{self.static_url_path}/", - endpoint="static", - host=static_host, - view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 - ) - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource( - self, resource: str, mode: str = "rb", encoding: str | None = None - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for reading. - - For example, if the file ``schema.sql`` is next to the file - ``app.py`` where the ``Flask`` app is defined, it can be opened - with: - - .. code-block:: python - - with app.open_resource("schema.sql") as f: - conn.executescript(f.read()) - - :param resource: Path to the resource relative to :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is supported, - valid values are ``"r"`` (or ``"rt"``) and ``"rb"``. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - path = os.path.join(self.root_path, resource) - - if mode == "rb": - return open(path, mode) # pyright: ignore - - return open(path, mode, encoding=encoding) - - def open_instance_resource( - self, resource: str, mode: str = "rb", encoding: str | None = "utf-8" - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to the application's instance folder - :attr:`instance_path`. Unlike :meth:`open_resource`, files in the - instance folder can be opened for writing. - - :param resource: Path to the resource relative to :attr:`instance_path`. - :param mode: Open the file in this mode. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - path = os.path.join(self.instance_path, resource) - - if "b" in mode: - return open(path, mode) - - return open(path, mode, encoding=encoding) - - def create_jinja_environment(self) -> Environment: - """Create the Jinja environment based on :attr:`jinja_options` - and the various Jinja-related methods of the app. Changing - :attr:`jinja_options` after this will have no effect. Also adds - Flask-related globals and filters to the environment. - - .. versionchanged:: 0.11 - ``Environment.auto_reload`` set in accordance with - ``TEMPLATES_AUTO_RELOAD`` configuration option. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - - if "autoescape" not in options: - options["autoescape"] = self.select_jinja_autoescape - - if "auto_reload" not in options: - auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] - - if auto_reload is None: - auto_reload = self.debug - - options["auto_reload"] = auto_reload - - rv = self.jinja_environment(self, **options) - rv.globals.update( - url_for=self.url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g, - ) - rv.policies["json.dumps_function"] = self.json.dumps - return rv - - def create_url_adapter(self, request: Request | None) -> MapAdapter | None: - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set - up so the request is passed explicitly. - - .. versionchanged:: 3.1 - If :data:`SERVER_NAME` is set, it does not restrict requests to - only that domain, for both ``subdomain_matching`` and - ``host_matching``. - - .. versionchanged:: 1.0 - :data:`SERVER_NAME` no longer implicitly enables subdomain - matching. Use :attr:`subdomain_matching` instead. - - .. versionchanged:: 0.9 - This can be called outside a request when the URL adapter is created - for an application context. - - .. versionadded:: 0.6 - """ - if request is not None: - if (trusted_hosts := self.config["TRUSTED_HOSTS"]) is not None: - request.trusted_hosts = trusted_hosts - - # Check trusted_hosts here until bind_to_environ does. - request.host = get_host(request.environ, request.trusted_hosts) # pyright: ignore - subdomain = None - server_name = self.config["SERVER_NAME"] - - if self.url_map.host_matching: - # Don't pass SERVER_NAME, otherwise it's used and the actual - # host is ignored, which breaks host matching. - server_name = None - elif not self.subdomain_matching: - # Werkzeug doesn't implement subdomain matching yet. Until then, - # disable it by forcing the current subdomain to the default, or - # the empty string. - subdomain = self.url_map.default_subdomain or "" - - return self.url_map.bind_to_environ( - request.environ, server_name=server_name, subdomain=subdomain - ) - - # Need at least SERVER_NAME to match/build outside a request. - if self.config["SERVER_NAME"] is not None: - return self.url_map.bind( - self.config["SERVER_NAME"], - script_name=self.config["APPLICATION_ROOT"], - url_scheme=self.config["PREFERRED_URL_SCHEME"], - ) - - return None - - def raise_routing_exception(self, request: Request) -> t.NoReturn: - """Intercept routing exceptions and possibly do something else. - - In debug mode, intercept a routing redirect and replace it with - an error if the body will be discarded. - - With modern Werkzeug this shouldn't occur, since it now uses a - 308 status which tells the browser to resend the method and - body. - - .. versionchanged:: 2.1 - Don't intercept 307 and 308 redirects. - - :meta private: - :internal: - """ - if ( - not self.debug - or not isinstance(request.routing_exception, RequestRedirect) - or request.routing_exception.code in {307, 308} - or request.method in {"GET", "HEAD", "OPTIONS"} - ): - raise request.routing_exception # type: ignore[misc] - - from .debughelpers import FormDataRoutingRedirect - - raise FormDataRoutingRedirect(request) - - def update_template_context(self, context: dict[str, t.Any]) -> None: - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - names: t.Iterable[str | None] = (None,) - - # A template may be rendered outside a request context. - if request: - names = chain(names, reversed(request.blueprints)) - - # The values passed to render_template take precedence. Keep a - # copy to re-apply after all context functions. - orig_ctx = context.copy() - - for name in names: - if name in self.template_context_processors: - for func in self.template_context_processors[name]: - context.update(self.ensure_sync(func)()) - - context.update(orig_ctx) - - def make_shell_context(self) -> dict[str, t.Any]: - """Returns the shell context for an interactive shell for this - application. This runs all the registered shell context - processors. - - .. versionadded:: 0.11 - """ - rv = {"app": self, "g": g} - for processor in self.shell_context_processors: - rv.update(processor()) - return rv - - def run( - self, - host: str | None = None, - port: int | None = None, - debug: bool | None = None, - load_dotenv: bool = True, - **options: t.Any, - ) -> None: - """Runs the application on a local development server. - - Do not use ``run()`` in a production setting. It is not intended to - meet security and performance requirements for a production server. - Instead, see :doc:`/deploying/index` for WSGI server recommendations. - - If the :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - It is not recommended to use this function for development with - automatic reloading as this is badly supported. Instead you should - be using the :command:`flask` command line script's ``run`` support. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to ``True`` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable - if present. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if present. - :param debug: if given, enable or disable debug mode. See - :attr:`debug`. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param options: the options to be forwarded to the underlying Werkzeug - server. See :func:`werkzeug.serving.run_simple` for more - information. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment - variables from :file:`.env` and :file:`.flaskenv` files. - - The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. - - Threaded mode is enabled by default. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` - variable. - """ - # Ignore this call so that it doesn't start another server if - # the 'flask run' command is used. - if os.environ.get("FLASK_RUN_FROM_CLI") == "true": - if not is_running_from_reloader(): - click.secho( - " * Ignoring a call to 'app.run()' that would block" - " the current 'flask' CLI command.\n" - " Only call 'app.run()' in an 'if __name__ ==" - ' "__main__"\' guard.', - fg="red", - ) - - return - - if get_load_dotenv(load_dotenv): - cli.load_dotenv() - - # if set, env var overrides existing value - if "FLASK_DEBUG" in os.environ: - self.debug = get_debug_flag() - - # debug passed to method overrides all other sources - if debug is not None: - self.debug = bool(debug) - - server_name = self.config.get("SERVER_NAME") - sn_host = sn_port = None - - if server_name: - sn_host, _, sn_port = server_name.partition(":") - - if not host: - if sn_host: - host = sn_host - else: - host = "127.0.0.1" - - if port or port == 0: - port = int(port) - elif sn_port: - port = int(sn_port) - else: - port = 5000 - - options.setdefault("use_reloader", self.debug) - options.setdefault("use_debugger", self.debug) - options.setdefault("threaded", True) - - cli.show_server_banner(self.debug, self.name) - - from werkzeug.serving import run_simple - - try: - run_simple(t.cast(str, host), port, self, **options) - finally: - # reset the first request information if the development server - # reset normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> FlaskClient: - """Creates a test client for this application. For information - about unit testing head over to :doc:`/testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a ``with`` block to defer the closing down - of the context until the end of the ``with`` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - Additionally, you may pass optional keyword arguments that will then - be passed to the application's :attr:`test_client_class` constructor. - For example:: - - from flask.testing import FlaskClient - - class CustomClient(FlaskClient): - def __init__(self, *args, **kwargs): - self._authentication = kwargs.pop("authentication") - super(CustomClient,self).__init__( *args, **kwargs) - - app.test_client_class = CustomClient - client = app.test_client(authentication='Basic ....') - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for ``with`` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - - .. versionchanged:: 0.11 - Added `**kwargs` to support passing additional keyword arguments to - the constructor of :attr:`test_client_class`. - """ - cls = self.test_client_class - if cls is None: - from .testing import FlaskClient as cls - return cls( # type: ignore - self, self.response_class, use_cookies=use_cookies, **kwargs - ) - - def test_cli_runner(self, **kwargs: t.Any) -> FlaskCliRunner: - """Create a CLI runner for testing CLI commands. - See :ref:`testing-cli`. - - Returns an instance of :attr:`test_cli_runner_class`, by default - :class:`~flask.testing.FlaskCliRunner`. The Flask app object is - passed as the first argument. - - .. versionadded:: 1.0 - """ - cls = self.test_cli_runner_class - - if cls is None: - from .testing import FlaskCliRunner as cls - - return cls(self, **kwargs) # type: ignore - - def handle_http_exception( - self, e: HTTPException - ) -> HTTPException | ft.ResponseReturnValue: - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionchanged:: 1.0.3 - ``RoutingException``, used internally for actions such as - slash redirects during routing, is not passed to error - handlers. - - .. versionchanged:: 1.0 - Exceptions are looked up by code *and* by MRO, so - ``HTTPException`` subclasses can be handled with a catch-all - handler for the base ``HTTPException``. - - .. versionadded:: 0.3 - """ - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - - # RoutingExceptions are used internally to trigger routing - # actions, such as slash redirects raising RequestRedirect. They - # are not raised or handled in user code. - if isinstance(e, RoutingException): - return e - - handler = self._find_error_handler(e, request.blueprints) - if handler is None: - return e - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_user_exception( - self, e: Exception - ) -> HTTPException | ft.ResponseReturnValue: - """This method is called whenever an exception occurs that - should be handled. A special case is :class:`~werkzeug - .exceptions.HTTPException` which is forwarded to the - :meth:`handle_http_exception` method. This function will either - return a response value or reraise the exception with the same - traceback. - - .. versionchanged:: 1.0 - Key errors raised from request data like ``form`` show the - bad key in debug mode rather than a generic bad request - message. - - .. versionadded:: 0.7 - """ - if isinstance(e, BadRequestKeyError) and ( - self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] - ): - e.show_exception = True - - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - handler = self._find_error_handler(e, request.blueprints) - - if handler is None: - raise - - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_exception(self, e: Exception) -> Response: - """Handle an exception that did not have an error handler - associated with it, or that was raised from an error handler. - This always causes a 500 ``InternalServerError``. - - Always sends the :data:`got_request_exception` signal. - - If :data:`PROPAGATE_EXCEPTIONS` is ``True``, such as in debug - mode, the error will be re-raised so that the debugger can - display it. Otherwise, the original exception is logged, and - an :exc:`~werkzeug.exceptions.InternalServerError` is returned. - - If an error handler is registered for ``InternalServerError`` or - ``500``, it will be used. For consistency, the handler will - always receive the ``InternalServerError``. The original - unhandled exception is available as ``e.original_exception``. - - .. versionchanged:: 1.1.0 - Always passes the ``InternalServerError`` instance to the - handler, setting ``original_exception`` to the unhandled - error. - - .. versionchanged:: 1.1.0 - ``after_request`` functions and other finalization is done - even for the default 500 response when there is no handler. - - .. versionadded:: 0.3 - """ - exc_info = sys.exc_info() - got_request_exception.send(self, _async_wrapper=self.ensure_sync, exception=e) - propagate = self.config["PROPAGATE_EXCEPTIONS"] - - if propagate is None: - propagate = self.testing or self.debug - - if propagate: - # Re-raise if called with an active exception, otherwise - # raise the passed in exception. - if exc_info[1] is e: - raise - - raise e - - self.log_exception(exc_info) - server_error: InternalServerError | ft.ResponseReturnValue - server_error = InternalServerError(original_exception=e) - handler = self._find_error_handler(server_error, request.blueprints) - - if handler is not None: - server_error = self.ensure_sync(handler)(server_error) - - return self.finalize_request(server_error, from_error_handler=True) - - def log_exception( - self, - exc_info: (tuple[type, BaseException, TracebackType] | tuple[None, None, None]), - ) -> None: - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error( - f"Exception on {request.path} [{request.method}]", exc_info=exc_info - ) - - def dispatch_request(self) -> ft.ResponseReturnValue: - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = request_ctx.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule: Rule = req.url_rule # type: ignore[assignment] - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if ( - getattr(rule, "provide_automatic_options", False) - and req.method == "OPTIONS" - ): - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - view_args: dict[str, t.Any] = req.view_args # type: ignore[assignment] - return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) # type: ignore[no-any-return] - - def full_dispatch_request(self) -> Response: - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self._got_first_request = True - - try: - request_started.send(self, _async_wrapper=self.ensure_sync) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - return self.finalize_request(rv) - - def finalize_request( - self, - rv: ft.ResponseReturnValue | HTTPException, - from_error_handler: bool = False, - ) -> Response: - """Given the return value from a view function this finalizes - the request by converting it into a response and invoking the - postprocessing functions. This is invoked for both normal - request dispatching as well as error handlers. - - Because this means that it might be called as a result of a - failure a special safe mode is available which can be enabled - with the `from_error_handler` flag. If enabled, failures in - response processing will be logged and otherwise ignored. - - :internal: - """ - response = self.make_response(rv) - try: - response = self.process_response(response) - request_finished.send( - self, _async_wrapper=self.ensure_sync, response=response - ) - except Exception: - if not from_error_handler: - raise - self.logger.exception( - "Request finalizing failed with an error while handling an error" - ) - return response - - def make_default_options_response(self) -> Response: - """This method is called to create the default ``OPTIONS`` response. - This can be changed through subclassing to change the default - behavior of ``OPTIONS`` responses. - - .. versionadded:: 0.7 - """ - adapter = request_ctx.url_adapter - methods = adapter.allowed_methods() # type: ignore[union-attr] - rv = self.response_class() - rv.allow.update(methods) - return rv - - def ensure_sync(self, func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Ensure that the function is synchronous for WSGI workers. - Plain ``def`` functions are returned as-is. ``async def`` - functions are wrapped to run and wait for the response. - - Override this method to change how the app runs async views. - - .. versionadded:: 2.0 - """ - if iscoroutinefunction(func): - return self.async_to_sync(func) - - return func - - def async_to_sync( - self, func: t.Callable[..., t.Coroutine[t.Any, t.Any, t.Any]] - ) -> t.Callable[..., t.Any]: - """Return a sync function that will run the coroutine function. - - .. code-block:: python - - result = app.async_to_sync(func)(*args, **kwargs) - - Override this method to change how the app converts async code - to be synchronously callable. - - .. versionadded:: 2.0 - """ - try: - from asgiref.sync import async_to_sync as asgiref_async_to_sync - except ImportError: - raise RuntimeError( - "Install Flask with the 'async' extra in order to use async views." - ) from None - - return asgiref_async_to_sync(func) - - def url_for( - self, - /, - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, - ) -> str: - """Generate a URL to the given endpoint with the given values. - - This is called by :func:`flask.url_for`, and can be called - directly as well. - - An *endpoint* is the name of a URL rule, usually added with - :meth:`@app.route() `, and usually the same name as the - view function. A route defined in a :class:`~flask.Blueprint` - will prepend the blueprint's name separated by a ``.`` to the - endpoint. - - In some cases, such as email messages, you want URLs to include - the scheme and domain, like ``https://example.com/hello``. When - not in an active request, URLs will be external by default, but - this requires setting :data:`SERVER_NAME` so Flask knows what - domain to use. :data:`APPLICATION_ROOT` and - :data:`PREFERRED_URL_SCHEME` should also be configured as - needed. This config is only used when not in an active request. - - Functions can be decorated with :meth:`url_defaults` to modify - keyword arguments before the URL is built. - - If building fails for some reason, such as an unknown endpoint - or incorrect values, the app's :meth:`handle_url_build_error` - method is called. If that returns a string, that is returned, - otherwise a :exc:`~werkzeug.routing.BuildError` is raised. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it - is external. - :param _external: If given, prefer the URL to be internal - (False) or require it to be external (True). External URLs - include the scheme and domain. When not in an active - request, URLs are external by default. - :param values: Values to use for the variable parts of the URL - rule. Unknown keys are appended as query string arguments, - like ``?a=b&c=d``. - - .. versionadded:: 2.2 - Moved from ``flask.url_for``, which calls this method. - """ - req_ctx = _cv_request.get(None) - - if req_ctx is not None: - url_adapter = req_ctx.url_adapter - blueprint_name = req_ctx.request.blueprint - - # If the endpoint starts with "." and the request matches a - # blueprint, the endpoint is relative to the blueprint. - if endpoint[:1] == ".": - if blueprint_name is not None: - endpoint = f"{blueprint_name}{endpoint}" - else: - endpoint = endpoint[1:] - - # When in a request, generate a URL without scheme and - # domain by default, unless a scheme is given. - if _external is None: - _external = _scheme is not None - else: - app_ctx = _cv_app.get(None) - - # If called by helpers.url_for, an app context is active, - # use its url_adapter. Otherwise, app.url_for was called - # directly, build an adapter. - if app_ctx is not None: - url_adapter = app_ctx.url_adapter - else: - url_adapter = self.create_url_adapter(None) - - if url_adapter is None: - raise RuntimeError( - "Unable to build URLs outside an active request" - " without 'SERVER_NAME' configured. Also configure" - " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" - " needed." - ) - - # When outside a request, generate a URL with scheme and - # domain by default. - if _external is None: - _external = True - - # It is an error to set _scheme when _external=False, in order - # to avoid accidental insecure URLs. - if _scheme is not None and not _external: - raise ValueError("When specifying '_scheme', '_external' must be True.") - - self.inject_url_defaults(endpoint, values) - - try: - rv = url_adapter.build( # type: ignore[union-attr] - endpoint, - values, - method=_method, - url_scheme=_scheme, - force_external=_external, - ) - except BuildError as error: - values.update( - _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external - ) - return self.handle_url_build_error(error, endpoint, values) - - if _anchor is not None: - _anchor = _url_quote(_anchor, safe="%!#$&'()*+,/:;=?@") - rv = f"{rv}#{_anchor}" - - return rv - - def make_response(self, rv: ft.ResponseReturnValue) -> Response: - """Convert the return value from a view function to an instance of - :attr:`response_class`. - - :param rv: the return value from the view function. The view function - must return a response. Returning ``None``, or the view ending - without returning, is not allowed. The following types are allowed - for ``view_rv``: - - ``str`` - A response object is created with the string encoded to UTF-8 - as the body. - - ``bytes`` - A response object is created with the bytes as the body. - - ``dict`` - A dictionary that will be jsonify'd before being returned. - - ``list`` - A list that will be jsonify'd before being returned. - - ``generator`` or ``iterator`` - A generator that returns ``str`` or ``bytes`` to be - streamed as the response. - - ``tuple`` - Either ``(body, status, headers)``, ``(body, status)``, or - ``(body, headers)``, where ``body`` is any of the other types - allowed here, ``status`` is a string or an integer, and - ``headers`` is a dictionary or a list of ``(key, value)`` - tuples. If ``body`` is a :attr:`response_class` instance, - ``status`` overwrites the exiting value and ``headers`` are - extended. - - :attr:`response_class` - The object is returned unchanged. - - other :class:`~werkzeug.wrappers.Response` class - The object is coerced to :attr:`response_class`. - - :func:`callable` - The function is called as a WSGI application. The result is - used to create a response object. - - .. versionchanged:: 2.2 - A generator will be converted to a streaming response. - A list will be converted to a JSON response. - - .. versionchanged:: 1.1 - A dict will be converted to a JSON response. - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - - status: int | None = None - headers: HeadersValue | None = None - - # unpack tuple returns - if isinstance(rv, tuple): - len_rv = len(rv) - - # a 3-tuple is unpacked directly - if len_rv == 3: - rv, status, headers = rv # type: ignore[misc] - # decide if a 2-tuple has status or headers - elif len_rv == 2: - if isinstance(rv[1], (Headers, dict, tuple, list)): - rv, headers = rv # pyright: ignore - else: - rv, status = rv # type: ignore[assignment,misc] - # other sized tuples are not allowed - else: - raise TypeError( - "The view function did not return a valid response tuple." - " The tuple must have the form (body, status, headers)," - " (body, status), or (body, headers)." - ) - - # the body must not be None - if rv is None: - raise TypeError( - f"The view function for {request.endpoint!r} did not" - " return a valid response. The function either returned" - " None or ended without a return statement." - ) - - # make sure the body is an instance of the response class - if not isinstance(rv, self.response_class): - if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, cabc.Iterator): - # let the response class set the status and headers instead of - # waiting to do it manually, so that the class can handle any - # special logic - rv = self.response_class( - rv, # pyright: ignore - status=status, - headers=headers, # type: ignore[arg-type] - ) - status = headers = None - elif isinstance(rv, (dict, list)): - rv = self.json.response(rv) - elif isinstance(rv, BaseResponse) or callable(rv): - # evaluate a WSGI callable, or coerce a different response - # class to the correct type - try: - rv = self.response_class.force_type( - rv, # type: ignore[arg-type] - request.environ, - ) - except TypeError as e: - raise TypeError( - f"{e}\nThe view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it" - f" was a {type(rv).__name__}." - ).with_traceback(sys.exc_info()[2]) from None - else: - raise TypeError( - "The view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it was a" - f" {type(rv).__name__}." - ) - - rv = t.cast(Response, rv) - # prefer the status if it was provided - if status is not None: - if isinstance(status, (str, bytes, bytearray)): - rv.status = status - else: - rv.status_code = status - - # extend existing headers with provided headers - if headers: - rv.headers.update(headers) - - return rv - - def preprocess_request(self) -> ft.ResponseReturnValue | None: - """Called before the request is dispatched. Calls - :attr:`url_value_preprocessors` registered with the app and the - current blueprint (if any). Then calls :attr:`before_request_funcs` - registered with the app and the blueprint. - - If any :meth:`before_request` handler returns a non-None value, the - value is handled as if it was the return value from the view, and - further request handling is stopped. - """ - names = (None, *reversed(request.blueprints)) - - for name in names: - if name in self.url_value_preprocessors: - for url_func in self.url_value_preprocessors[name]: - url_func(request.endpoint, request.view_args) - - for name in names: - if name in self.before_request_funcs: - for before_func in self.before_request_funcs[name]: - rv = self.ensure_sync(before_func)() - - if rv is not None: - return rv # type: ignore[no-any-return] - - return None - - def process_response(self, response: Response) -> Response: - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = request_ctx._get_current_object() # type: ignore[attr-defined] - - for func in ctx._after_request_functions: - response = self.ensure_sync(func)(response) - - for name in chain(request.blueprints, (None,)): - if name in self.after_request_funcs: - for func in reversed(self.after_request_funcs[name]): - response = self.ensure_sync(func)(response) - - if not self.session_interface.is_null_session(ctx.session): - self.session_interface.save_session(self, ctx.session, response) - - return response - - def do_teardown_request( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called after the request is dispatched and the response is - returned, right before the request context is popped. - - This calls all functions decorated with - :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` - if a blueprint handled the request. Finally, the - :data:`request_tearing_down` signal is sent. - - This is called by - :meth:`RequestContext.pop() `, - which may be delayed during testing to maintain access to - resources. - - :param exc: An unhandled exception raised while dispatching the - request. Detected from the current exception information if - not passed. Passed to each teardown function. - - .. versionchanged:: 0.9 - Added the ``exc`` argument. - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for name in chain(request.blueprints, (None,)): - if name in self.teardown_request_funcs: - for func in reversed(self.teardown_request_funcs[name]): - self.ensure_sync(func)(exc) - - request_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def do_teardown_appcontext( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called right before the application context is popped. - - When handling a request, the application context is popped - after the request context. See :meth:`do_teardown_request`. - - This calls all functions decorated with - :meth:`teardown_appcontext`. Then the - :data:`appcontext_tearing_down` signal is sent. - - This is called by - :meth:`AppContext.pop() `. - - .. versionadded:: 0.9 - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for func in reversed(self.teardown_appcontext_funcs): - self.ensure_sync(func)(exc) - - appcontext_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def app_context(self) -> AppContext: - """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` - block to push the context, which will make :data:`current_app` - point at this application. - - An application context is automatically pushed by - :meth:`RequestContext.push() ` - when handling a request, and when running a CLI command. Use - this to manually create a context outside of these situations. - - :: - - with app.app_context(): - init_db() - - See :doc:`/appcontext`. - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ: WSGIEnvironment) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` representing a - WSGI environment. Use a ``with`` block to push the context, - which will make :data:`request` point at this request. - - See :doc:`/reqcontext`. - - Typically you should not call this from your own code. A request - context is automatically pushed by the :meth:`wsgi_app` when - handling a request. Use :meth:`test_request_context` to create - an environment and context instead of this method. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` for a WSGI - environment created from the given values. This is mostly useful - during testing, where you may want to run a function that uses - request data without dispatching a full request. - - See :doc:`/reqcontext`. - - Use a ``with`` block to push the context, which will make - :data:`request` point at the request for the created - environment. :: - - with app.test_request_context(...): - generate_report() - - When using the shell, it may be easier to push and pop the - context manually to avoid indentation. :: - - ctx = app.test_request_context(...) - ctx.push() - ... - ctx.pop() - - Takes the same arguments as Werkzeug's - :class:`~werkzeug.test.EnvironBuilder`, with some defaults from - the application. See the linked Werkzeug docs for most of the - available arguments. Flask-specific behavior is listed here. - - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to - :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param data: The request body, either as a string or a dict of - form keys and values. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - from .testing import EnvironBuilder - - builder = EnvironBuilder(self, *args, **kwargs) - - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The actual WSGI application. This is not implemented in - :meth:`__call__` so that middlewares can be applied without - losing a reference to the app object. Instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - Teardown events for the request and app contexts are called - even if an unhandled error occurs. Other events may not be - called depending on when an error occurs during dispatch. - See :ref:`callbacks-and-errors`. - - :param environ: A WSGI environment. - :param start_response: A callable accepting a status code, - a list of headers, and an optional exception context to - start the response. - """ - ctx = self.request_context(environ) - error: BaseException | None = None - try: - try: - ctx.push() - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.handle_exception(e) - except: # noqa: B001 - error = sys.exc_info()[1] - raise - return response(environ, start_response) - finally: - if "werkzeug.debug.preserve_context" in environ: - environ["werkzeug.debug.preserve_context"](_cv_app.get()) - environ["werkzeug.debug.preserve_context"](_cv_request.get()) - - if error is not None and self.should_ignore_error(error): - error = None - - ctx.pop(error) - - def __call__( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The WSGI server calls the Flask application object as the - WSGI application. This calls :meth:`wsgi_app`, which can be - wrapped to apply middleware. - """ - return self.wsgi_app(environ, start_response) diff --git a/venv/Lib/site-packages/flask/blueprints.py b/venv/Lib/site-packages/flask/blueprints.py deleted file mode 100644 index b6d4e43..0000000 --- a/venv/Lib/site-packages/flask/blueprints.py +++ /dev/null @@ -1,128 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from datetime import timedelta - -from .cli import AppGroup -from .globals import current_app -from .helpers import send_from_directory -from .sansio.blueprints import Blueprint as SansioBlueprint -from .sansio.blueprints import BlueprintSetupState as BlueprintSetupState # noqa -from .sansio.scaffold import _sentinel - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -class Blueprint(SansioBlueprint): - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore - ) -> None: - super().__init__( - name, - import_name, - static_folder, - static_url_path, - template_folder, - url_prefix, - subdomain, - url_defaults, - root_path, - cli_group, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource( - self, resource: str, mode: str = "rb", encoding: str | None = "utf-8" - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for reading. The - blueprint-relative equivalent of the app's :meth:`~.Flask.open_resource` - method. - - :param resource: Path to the resource relative to :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is supported, - valid values are ``"r"`` (or ``"rt"``) and ``"rb"``. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - path = os.path.join(self.root_path, resource) - - if mode == "rb": - return open(path, mode) # pyright: ignore - - return open(path, mode, encoding=encoding) diff --git a/venv/Lib/site-packages/flask/cli.py b/venv/Lib/site-packages/flask/cli.py deleted file mode 100644 index ed11f25..0000000 --- a/venv/Lib/site-packages/flask/cli.py +++ /dev/null @@ -1,1135 +0,0 @@ -from __future__ import annotations - -import ast -import collections.abc as cabc -import importlib.metadata -import inspect -import os -import platform -import re -import sys -import traceback -import typing as t -from functools import update_wrapper -from operator import itemgetter -from types import ModuleType - -import click -from click.core import ParameterSource -from werkzeug import run_simple -from werkzeug.serving import is_running_from_reloader -from werkzeug.utils import import_string - -from .globals import current_app -from .helpers import get_debug_flag -from .helpers import get_load_dotenv - -if t.TYPE_CHECKING: - import ssl - - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - - -class NoAppException(click.UsageError): - """Raised if an application cannot be found or loaded.""" - - -def find_best_app(module: ModuleType) -> Flask: - """Given a module instance this tries to find the best possible - application in the module or raises an exception. - """ - from . import Flask - - # Search for the most common names first. - for attr_name in ("app", "application"): - app = getattr(module, attr_name, None) - - if isinstance(app, Flask): - return app - - # Otherwise find the only object that is a Flask instance. - matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] - - if len(matches) == 1: - return matches[0] - elif len(matches) > 1: - raise NoAppException( - "Detected multiple Flask applications in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify the correct one." - ) - - # Search for app factory functions. - for attr_name in ("create_app", "make_app"): - app_factory = getattr(module, attr_name, None) - - if inspect.isfunction(app_factory): - try: - app = app_factory() - - if isinstance(app, Flask): - return app - except TypeError as e: - if not _called_with_wrong_args(app_factory): - raise - - raise NoAppException( - f"Detected factory '{attr_name}' in module '{module.__name__}'," - " but could not call it without arguments. Use" - f" '{module.__name__}:{attr_name}(args)'" - " to specify arguments." - ) from e - - raise NoAppException( - "Failed to find Flask application or factory in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify one." - ) - - -def _called_with_wrong_args(f: t.Callable[..., Flask]) -> bool: - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the factory raised the - error. - - :param f: The function that was called. - :return: ``True`` if the call failed. - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is f.__code__: - # In the function, it was called successfully. - return False - - tb = tb.tb_next - - # Didn't reach the function. - return True - finally: - # Delete tb to break a circular reference. - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def find_app_by_string(module: ModuleType, app_name: str) -> Flask: - """Check if the given string is a variable name or a function. Call - a function to get the app instance, or return the variable directly. - """ - from . import Flask - - # Parse app_name as a single expression to determine if it's a valid - # attribute name or function call. - try: - expr = ast.parse(app_name.strip(), mode="eval").body - except SyntaxError: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) from None - - if isinstance(expr, ast.Name): - name = expr.id - args = [] - kwargs = {} - elif isinstance(expr, ast.Call): - # Ensure the function name is an attribute name only. - if not isinstance(expr.func, ast.Name): - raise NoAppException( - f"Function reference must be a simple name: {app_name!r}." - ) - - name = expr.func.id - - # Parse the positional and keyword arguments as literals. - try: - args = [ast.literal_eval(arg) for arg in expr.args] - kwargs = { - kw.arg: ast.literal_eval(kw.value) - for kw in expr.keywords - if kw.arg is not None - } - except ValueError: - # literal_eval gives cryptic error messages, show a generic - # message with the full expression instead. - raise NoAppException( - f"Failed to parse arguments as literal values: {app_name!r}." - ) from None - else: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) - - try: - attr = getattr(module, name) - except AttributeError as e: - raise NoAppException( - f"Failed to find attribute {name!r} in {module.__name__!r}." - ) from e - - # If the attribute is a function, call it with any args and kwargs - # to get the real application. - if inspect.isfunction(attr): - try: - app = attr(*args, **kwargs) - except TypeError as e: - if not _called_with_wrong_args(attr): - raise - - raise NoAppException( - f"The factory {app_name!r} in module" - f" {module.__name__!r} could not be called with the" - " specified arguments." - ) from e - else: - app = attr - - if isinstance(app, Flask): - return app - - raise NoAppException( - "A valid Flask application was not obtained from" - f" '{module.__name__}:{app_name}'." - ) - - -def prepare_import(path: str) -> str: - """Given a filename this will try to calculate the python path, add it - to the search path and return the actual module name that is expected. - """ - path = os.path.realpath(path) - - fname, ext = os.path.splitext(path) - if ext == ".py": - path = fname - - if os.path.basename(path) == "__init__": - path = os.path.dirname(path) - - module_name = [] - - # move up until outside package structure (no __init__.py) - while True: - path, name = os.path.split(path) - module_name.append(name) - - if not os.path.exists(os.path.join(path, "__init__.py")): - break - - if sys.path[0] != path: - sys.path.insert(0, path) - - return ".".join(module_name[::-1]) - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[True] = True -) -> Flask: ... - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[False] = ... -) -> Flask | None: ... - - -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: bool = True -) -> Flask | None: - try: - __import__(module_name) - except ImportError: - # Reraise the ImportError if it occurred within the imported module. - # Determine this by checking whether the trace has a depth > 1. - if sys.exc_info()[2].tb_next: # type: ignore[union-attr] - raise NoAppException( - f"While importing {module_name!r}, an ImportError was" - f" raised:\n\n{traceback.format_exc()}" - ) from None - elif raise_if_not_found: - raise NoAppException(f"Could not import {module_name!r}.") from None - else: - return None - - module = sys.modules[module_name] - - if app_name is None: - return find_best_app(module) - else: - return find_app_by_string(module, app_name) - - -def get_version(ctx: click.Context, param: click.Parameter, value: t.Any) -> None: - if not value or ctx.resilient_parsing: - return - - flask_version = importlib.metadata.version("flask") - werkzeug_version = importlib.metadata.version("werkzeug") - - click.echo( - f"Python {platform.python_version()}\n" - f"Flask {flask_version}\n" - f"Werkzeug {werkzeug_version}", - color=ctx.color, - ) - ctx.exit() - - -version_option = click.Option( - ["--version"], - help="Show the Flask version.", - expose_value=False, - callback=get_version, - is_flag=True, - is_eager=True, -) - - -class ScriptInfo: - """Helper object to deal with Flask applications. This is usually not - necessary to interface with as it's used internally in the dispatching - to click. In future versions of Flask this object will most likely play - a bigger role. Typically it's created automatically by the - :class:`FlaskGroup` but you can also manually create it and pass it - onwards as click object. - - .. versionchanged:: 3.1 - Added the ``load_dotenv_defaults`` parameter and attribute. - """ - - def __init__( - self, - app_import_path: str | None = None, - create_app: t.Callable[..., Flask] | None = None, - set_debug_flag: bool = True, - load_dotenv_defaults: bool = True, - ) -> None: - #: Optionally the import path for the Flask application. - self.app_import_path = app_import_path - #: Optionally a function that is passed the script info to create - #: the instance of the application. - self.create_app = create_app - #: A dictionary with arbitrary data that can be associated with - #: this script info. - self.data: dict[t.Any, t.Any] = {} - self.set_debug_flag = set_debug_flag - - self.load_dotenv_defaults = get_load_dotenv(load_dotenv_defaults) - """Whether default ``.flaskenv`` and ``.env`` files should be loaded. - - ``ScriptInfo`` doesn't load anything, this is for reference when doing - the load elsewhere during processing. - - .. versionadded:: 3.1 - """ - - self._loaded_app: Flask | None = None - - def load_app(self) -> Flask: - """Loads the Flask app (if not yet loaded) and returns it. Calling - this multiple times will just result in the already loaded app to - be returned. - """ - if self._loaded_app is not None: - return self._loaded_app - app: Flask | None = None - if self.create_app is not None: - app = self.create_app() - else: - if self.app_import_path: - path, name = ( - re.split(r":(?![\\/])", self.app_import_path, maxsplit=1) + [None] - )[:2] - import_name = prepare_import(path) - app = locate_app(import_name, name) - else: - for path in ("wsgi.py", "app.py"): - import_name = prepare_import(path) - app = locate_app(import_name, None, raise_if_not_found=False) - - if app is not None: - break - - if app is None: - raise NoAppException( - "Could not locate a Flask application. Use the" - " 'flask --app' option, 'FLASK_APP' environment" - " variable, or a 'wsgi.py' or 'app.py' file in the" - " current directory." - ) - - if self.set_debug_flag: - # Update the app's debug flag through the descriptor so that - # other values repopulate as well. - app.debug = get_debug_flag() - - self._loaded_app = app - return app - - -pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def with_appcontext(f: F) -> F: - """Wraps a callback so that it's guaranteed to be executed with the - script's application context. - - Custom commands (and their options) registered under ``app.cli`` or - ``blueprint.cli`` will always have an app context available, this - decorator is not required in that case. - - .. versionchanged:: 2.2 - The app context is active for subcommands as well as the - decorated callback. The app context is always available to - ``app.cli`` command and parameter callbacks. - """ - - @click.pass_context - def decorator(ctx: click.Context, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - if not current_app: - app = ctx.ensure_object(ScriptInfo).load_app() - ctx.with_resource(app.app_context()) - - return ctx.invoke(f, *args, **kwargs) - - return update_wrapper(decorator, f) # type: ignore[return-value] - - -class AppGroup(click.Group): - """This works similar to a regular click :class:`~click.Group` but it - changes the behavior of the :meth:`command` decorator so that it - automatically wraps the functions in :func:`with_appcontext`. - - Not to be confused with :class:`FlaskGroup`. - """ - - def command( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Command]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` - unless it's disabled by passing ``with_appcontext=False``. - """ - wrap_for_ctx = kwargs.pop("with_appcontext", True) - - def decorator(f: t.Callable[..., t.Any]) -> click.Command: - if wrap_for_ctx: - f = with_appcontext(f) - return super(AppGroup, self).command(*args, **kwargs)(f) # type: ignore[no-any-return] - - return decorator - - def group( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Group]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it defaults the group class to - :class:`AppGroup`. - """ - kwargs.setdefault("cls", AppGroup) - return super().group(*args, **kwargs) # type: ignore[no-any-return] - - -def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: - if value is None: - return None - - info = ctx.ensure_object(ScriptInfo) - info.app_import_path = value - return value - - -# This option is eager so the app will be available if --help is given. -# --help is also eager, so --app must be before it in the param list. -# no_args_is_help bypasses eager processing, so this option must be -# processed manually in that case to ensure FLASK_APP gets picked up. -_app_option = click.Option( - ["-A", "--app"], - metavar="IMPORT", - help=( - "The Flask application or factory function to load, in the form 'module:name'." - " Module can be a dotted import or file path. Name is not required if it is" - " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" - " pass arguments." - ), - is_eager=True, - expose_value=False, - callback=_set_app, -) - - -def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: - # If the flag isn't provided, it will default to False. Don't use - # that, let debug be set by env in that case. - source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] - - if source is not None and source in ( - ParameterSource.DEFAULT, - ParameterSource.DEFAULT_MAP, - ): - return None - - # Set with env var instead of ScriptInfo.load so that it can be - # accessed early during a factory function. - os.environ["FLASK_DEBUG"] = "1" if value else "0" - return value - - -_debug_option = click.Option( - ["--debug/--no-debug"], - help="Set debug mode.", - expose_value=False, - callback=_set_debug, -) - - -def _env_file_callback( - ctx: click.Context, param: click.Option, value: str | None -) -> str | None: - try: - import dotenv # noqa: F401 - except ImportError: - # Only show an error if a value was passed, otherwise we still want to - # call load_dotenv and show a message without exiting. - if value is not None: - raise click.BadParameter( - "python-dotenv must be installed to load an env file.", - ctx=ctx, - param=param, - ) from None - - # Load if a value was passed, or we want to load default files, or both. - if value is not None or ctx.obj.load_dotenv_defaults: - load_dotenv(value, load_defaults=ctx.obj.load_dotenv_defaults) - - return value - - -# This option is eager so env vars are loaded as early as possible to be -# used by other options. -_env_file_option = click.Option( - ["-e", "--env-file"], - type=click.Path(exists=True, dir_okay=False), - help=( - "Load environment variables from this file, taking precedence over" - " those set by '.env' and '.flaskenv'. Variables set directly in the" - " environment take highest precedence. python-dotenv must be installed." - ), - is_eager=True, - expose_value=False, - callback=_env_file_callback, -) - - -class FlaskGroup(AppGroup): - """Special subclass of the :class:`AppGroup` group that supports - loading more commands from the configured Flask app. Normally a - developer does not have to interface with this class but there are - some very advanced use cases for which it makes sense to create an - instance of this. see :ref:`custom-scripts`. - - :param add_default_commands: if this is True then the default run and - shell commands will be added. - :param add_version_option: adds the ``--version`` option. - :param create_app: an optional callback that is passed the script info and - returns the loaded app. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param set_debug_flag: Set the app's debug flag. - - .. versionchanged:: 3.1 - ``-e path`` takes precedence over default ``.env`` and ``.flaskenv`` files. - - .. versionchanged:: 2.2 - Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. - - .. versionchanged:: 2.2 - An app context is pushed when running ``app.cli`` commands, so - ``@with_appcontext`` is no longer required for those commands. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment variables - from :file:`.env` and :file:`.flaskenv` files. - """ - - def __init__( - self, - add_default_commands: bool = True, - create_app: t.Callable[..., Flask] | None = None, - add_version_option: bool = True, - load_dotenv: bool = True, - set_debug_flag: bool = True, - **extra: t.Any, - ) -> None: - params: list[click.Parameter] = list(extra.pop("params", None) or ()) - # Processing is done with option callbacks instead of a group - # callback. This allows users to make a custom group callback - # without losing the behavior. --env-file must come first so - # that it is eagerly evaluated before --app. - params.extend((_env_file_option, _app_option, _debug_option)) - - if add_version_option: - params.append(version_option) - - if "context_settings" not in extra: - extra["context_settings"] = {} - - extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") - - super().__init__(params=params, **extra) - - self.create_app = create_app - self.load_dotenv = load_dotenv - self.set_debug_flag = set_debug_flag - - if add_default_commands: - self.add_command(run_command) - self.add_command(shell_command) - self.add_command(routes_command) - - self._loaded_plugin_commands = False - - def _load_plugin_commands(self) -> None: - if self._loaded_plugin_commands: - return - - if sys.version_info >= (3, 10): - from importlib import metadata - else: - # Use a backport on Python < 3.10. We technically have - # importlib.metadata on 3.8+, but the API changed in 3.10, - # so use the backport for consistency. - import importlib_metadata as metadata # pyright: ignore - - for ep in metadata.entry_points(group="flask.commands"): - self.add_command(ep.load(), ep.name) - - self._loaded_plugin_commands = True - - def get_command(self, ctx: click.Context, name: str) -> click.Command | None: - self._load_plugin_commands() - # Look up built-in and plugin commands, which should be - # available even if the app fails to load. - rv = super().get_command(ctx, name) - - if rv is not None: - return rv - - info = ctx.ensure_object(ScriptInfo) - - # Look up commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - app = info.load_app() - except NoAppException as e: - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - return None - - # Push an app context for the loaded app unless it is already - # active somehow. This makes the context available to parameter - # and command callbacks without needing @with_appcontext. - if not current_app or current_app._get_current_object() is not app: # type: ignore[attr-defined] - ctx.with_resource(app.app_context()) - - return app.cli.get_command(ctx, name) - - def list_commands(self, ctx: click.Context) -> list[str]: - self._load_plugin_commands() - # Start with the built-in and plugin commands. - rv = set(super().list_commands(ctx)) - info = ctx.ensure_object(ScriptInfo) - - # Add commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - rv.update(info.load_app().cli.list_commands(ctx)) - except NoAppException as e: - # When an app couldn't be loaded, show the error message - # without the traceback. - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - except Exception: - # When any other errors occurred during loading, show the - # full traceback. - click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") - - return sorted(rv) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: click.Context | None = None, - **extra: t.Any, - ) -> click.Context: - # Set a flag to tell app.run to become a no-op. If app.run was - # not in a __name__ == __main__ guard, it would start the server - # when importing, blocking whatever command is being called. - os.environ["FLASK_RUN_FROM_CLI"] = "true" - - if "obj" not in extra and "obj" not in self.context_settings: - extra["obj"] = ScriptInfo( - create_app=self.create_app, - set_debug_flag=self.set_debug_flag, - load_dotenv_defaults=self.load_dotenv, - ) - - return super().make_context(info_name, args, parent=parent, **extra) - - def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: - if (not args and self.no_args_is_help) or ( - len(args) == 1 and args[0] in self.get_help_option_names(ctx) - ): - # Attempt to load --env-file and --app early in case they - # were given as env vars. Otherwise no_args_is_help will not - # see commands from app.cli. - _env_file_option.handle_parse_result(ctx, {}, []) - _app_option.handle_parse_result(ctx, {}, []) - - return super().parse_args(ctx, args) - - -def _path_is_ancestor(path: str, other: str) -> bool: - """Take ``other`` and remove the length of ``path`` from it. Then join it - to ``path``. If it is the original value, ``path`` is an ancestor of - ``other``.""" - return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other - - -def load_dotenv( - path: str | os.PathLike[str] | None = None, load_defaults: bool = True -) -> bool: - """Load "dotenv" files to set environment variables. A given path takes - precedence over ``.env``, which takes precedence over ``.flaskenv``. After - loading and combining these files, values are only set if the key is not - already set in ``os.environ``. - - This is a no-op if `python-dotenv`_ is not installed. - - .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme - - :param path: Load the file at this location. - :param load_defaults: Search for and load the default ``.flaskenv`` and - ``.env`` files. - :return: ``True`` if at least one env var was loaded. - - .. versionchanged:: 3.1 - Added the ``load_defaults`` parameter. A given path takes precedence - over default files. - - .. versionchanged:: 2.0 - The current directory is not changed to the location of the - loaded file. - - .. versionchanged:: 2.0 - When loading the env files, set the default encoding to UTF-8. - - .. versionchanged:: 1.1.0 - Returns ``False`` when python-dotenv is not installed, or when - the given path isn't a file. - - .. versionadded:: 1.0 - """ - try: - import dotenv - except ImportError: - if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): - click.secho( - " * Tip: There are .env files present. Install python-dotenv" - " to use them.", - fg="yellow", - err=True, - ) - - return False - - data: dict[str, str | None] = {} - - if load_defaults: - for default_name in (".flaskenv", ".env"): - if not (default_path := dotenv.find_dotenv(default_name, usecwd=True)): - continue - - data |= dotenv.dotenv_values(default_path, encoding="utf-8") - - if path is not None and os.path.isfile(path): - data |= dotenv.dotenv_values(path, encoding="utf-8") - - for key, value in data.items(): - if key in os.environ or value is None: - continue - - os.environ[key] = value - - return bool(data) # True if at least one env var was loaded. - - -def show_server_banner(debug: bool, app_import_path: str | None) -> None: - """Show extra startup messages the first time the server is run, - ignoring the reloader. - """ - if is_running_from_reloader(): - return - - if app_import_path is not None: - click.echo(f" * Serving Flask app '{app_import_path}'") - - if debug is not None: - click.echo(f" * Debug mode: {'on' if debug else 'off'}") - - -class CertParamType(click.ParamType): - """Click option type for the ``--cert`` option. Allows either an - existing file, the string ``'adhoc'``, or an import for a - :class:`~ssl.SSLContext` object. - """ - - name = "path" - - def __init__(self) -> None: - self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - try: - import ssl - except ImportError: - raise click.BadParameter( - 'Using "--cert" requires Python to be compiled with SSL support.', - ctx, - param, - ) from None - - try: - return self.path_type(value, param, ctx) - except click.BadParameter: - value = click.STRING(value, param, ctx).lower() - - if value == "adhoc": - try: - import cryptography # noqa: F401 - except ImportError: - raise click.BadParameter( - "Using ad-hoc certificates requires the cryptography library.", - ctx, - param, - ) from None - - return value - - obj = import_string(value, silent=True) - - if isinstance(obj, ssl.SSLContext): - return obj - - raise - - -def _validate_key(ctx: click.Context, param: click.Parameter, value: t.Any) -> t.Any: - """The ``--key`` option must be specified when ``--cert`` is a file. - Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. - """ - cert = ctx.params.get("cert") - is_adhoc = cert == "adhoc" - - try: - import ssl - except ImportError: - is_context = False - else: - is_context = isinstance(cert, ssl.SSLContext) - - if value is not None: - if is_adhoc: - raise click.BadParameter( - 'When "--cert" is "adhoc", "--key" is not used.', ctx, param - ) - - if is_context: - raise click.BadParameter( - 'When "--cert" is an SSLContext object, "--key" is not used.', - ctx, - param, - ) - - if not cert: - raise click.BadParameter('"--cert" must also be specified.', ctx, param) - - ctx.params["cert"] = cert, value - - else: - if cert and not (is_adhoc or is_context): - raise click.BadParameter('Required when using "--cert".', ctx, param) - - return value - - -class SeparatedPathType(click.Path): - """Click option type that accepts a list of values separated by the - OS's path separator (``:``, ``;`` on Windows). Each value is - validated as a :class:`click.Path` type. - """ - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - items = self.split_envvar_value(value) - # can't call no-arg super() inside list comprehension until Python 3.12 - super_convert = super().convert - return [super_convert(item, param, ctx) for item in items] - - -@click.command("run", short_help="Run a development server.") -@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") -@click.option("--port", "-p", default=5000, help="The port to bind to.") -@click.option( - "--cert", - type=CertParamType(), - help="Specify a certificate file to use HTTPS.", - is_eager=True, -) -@click.option( - "--key", - type=click.Path(exists=True, dir_okay=False, resolve_path=True), - callback=_validate_key, - expose_value=False, - help="The key file to use when specifying a certificate.", -) -@click.option( - "--reload/--no-reload", - default=None, - help="Enable or disable the reloader. By default the reloader " - "is active if debug is enabled.", -) -@click.option( - "--debugger/--no-debugger", - default=None, - help="Enable or disable the debugger. By default the debugger " - "is active if debug is enabled.", -) -@click.option( - "--with-threads/--without-threads", - default=True, - help="Enable or disable multithreading.", -) -@click.option( - "--extra-files", - default=None, - type=SeparatedPathType(), - help=( - "Extra files that trigger a reload on change. Multiple paths" - f" are separated by {os.path.pathsep!r}." - ), -) -@click.option( - "--exclude-patterns", - default=None, - type=SeparatedPathType(), - help=( - "Files matching these fnmatch patterns will not trigger a reload" - " on change. Multiple patterns are separated by" - f" {os.path.pathsep!r}." - ), -) -@pass_script_info -def run_command( - info: ScriptInfo, - host: str, - port: int, - reload: bool, - debugger: bool, - with_threads: bool, - cert: ssl.SSLContext | tuple[str, str | None] | t.Literal["adhoc"] | None, - extra_files: list[str] | None, - exclude_patterns: list[str] | None, -) -> None: - """Run a local development server. - - This server is for development purposes only. It does not provide - the stability, security, or performance of production WSGI servers. - - The reloader and debugger are enabled by default with the '--debug' - option. - """ - try: - app: WSGIApplication = info.load_app() # pyright: ignore - except Exception as e: - if is_running_from_reloader(): - # When reloading, print out the error immediately, but raise - # it later so the debugger or server can handle it. - traceback.print_exc() - err = e - - def app( - environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - raise err from None - - else: - # When not reloading, raise the error immediately so the - # command fails. - raise e from None - - debug = get_debug_flag() - - if reload is None: - reload = debug - - if debugger is None: - debugger = debug - - show_server_banner(debug, info.app_import_path) - - run_simple( - host, - port, - app, - use_reloader=reload, - use_debugger=debugger, - threaded=with_threads, - ssl_context=cert, - extra_files=extra_files, - exclude_patterns=exclude_patterns, - ) - - -run_command.params.insert(0, _debug_option) - - -@click.command("shell", short_help="Run a shell in the app context.") -@with_appcontext -def shell_command() -> None: - """Run an interactive Python shell in the context of a given - Flask application. The application will populate the default - namespace of this shell according to its configuration. - - This is useful for executing small snippets of management code - without having to manually configure the application. - """ - import code - - banner = ( - f"Python {sys.version} on {sys.platform}\n" - f"App: {current_app.import_name}\n" - f"Instance: {current_app.instance_path}" - ) - ctx: dict[str, t.Any] = {} - - # Support the regular Python interpreter startup script if someone - # is using it. - startup = os.environ.get("PYTHONSTARTUP") - if startup and os.path.isfile(startup): - with open(startup) as f: - eval(compile(f.read(), startup, "exec"), ctx) - - ctx.update(current_app.make_shell_context()) - - # Site, customize, or startup script can set a hook to call when - # entering interactive mode. The default one sets up readline with - # tab and history completion. - interactive_hook = getattr(sys, "__interactivehook__", None) - - if interactive_hook is not None: - try: - import readline - from rlcompleter import Completer - except ImportError: - pass - else: - # rlcompleter uses __main__.__dict__ by default, which is - # flask.__main__. Use the shell context instead. - readline.set_completer(Completer(ctx).complete) - - interactive_hook() - - code.interact(banner=banner, local=ctx) - - -@click.command("routes", short_help="Show the routes for the app.") -@click.option( - "--sort", - "-s", - type=click.Choice(("endpoint", "methods", "domain", "rule", "match")), - default="endpoint", - help=( - "Method to sort routes by. 'match' is the order that Flask will match routes" - " when dispatching a request." - ), -) -@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") -@with_appcontext -def routes_command(sort: str, all_methods: bool) -> None: - """Show all registered routes with endpoints and methods.""" - rules = list(current_app.url_map.iter_rules()) - - if not rules: - click.echo("No routes were registered.") - return - - ignored_methods = set() if all_methods else {"HEAD", "OPTIONS"} - host_matching = current_app.url_map.host_matching - has_domain = any(rule.host if host_matching else rule.subdomain for rule in rules) - rows = [] - - for rule in rules: - row = [ - rule.endpoint, - ", ".join(sorted((rule.methods or set()) - ignored_methods)), - ] - - if has_domain: - row.append((rule.host if host_matching else rule.subdomain) or "") - - row.append(rule.rule) - rows.append(row) - - headers = ["Endpoint", "Methods"] - sorts = ["endpoint", "methods"] - - if has_domain: - headers.append("Host" if host_matching else "Subdomain") - sorts.append("domain") - - headers.append("Rule") - sorts.append("rule") - - try: - rows.sort(key=itemgetter(sorts.index(sort))) - except ValueError: - pass - - rows.insert(0, headers) - widths = [max(len(row[i]) for row in rows) for i in range(len(headers))] - rows.insert(1, ["-" * w for w in widths]) - template = " ".join(f"{{{i}:<{w}}}" for i, w in enumerate(widths)) - - for row in rows: - click.echo(template.format(*row)) - - -cli = FlaskGroup( - name="flask", - help="""\ -A general utility script for Flask applications. - -An application to load must be given with the '--app' option, -'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file -in the current directory. -""", -) - - -def main() -> None: - cli.main() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/flask/config.py b/venv/Lib/site-packages/flask/config.py deleted file mode 100644 index 34ef1a5..0000000 --- a/venv/Lib/site-packages/flask/config.py +++ /dev/null @@ -1,367 +0,0 @@ -from __future__ import annotations - -import errno -import json -import os -import types -import typing as t - -from werkzeug.utils import import_string - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .sansio.app import App - - -T = t.TypeVar("T") - - -class ConfigAttribute(t.Generic[T]): - """Makes an attribute forward to the config""" - - def __init__( - self, name: str, get_converter: t.Callable[[t.Any], T] | None = None - ) -> None: - self.__name__ = name - self.get_converter = get_converter - - @t.overload - def __get__(self, obj: None, owner: None) -> te.Self: ... - - @t.overload - def __get__(self, obj: App, owner: type[App]) -> T: ... - - def __get__(self, obj: App | None, owner: type[App] | None = None) -> T | te.Self: - if obj is None: - return self - - rv = obj.config[self.__name__] - - if self.get_converter is not None: - rv = self.get_converter(rv) - - return rv # type: ignore[no-any-return] - - def __set__(self, obj: App, value: t.Any) -> None: - obj.config[self.__name__] = value - - -class Config(dict): # type: ignore[type-arg] - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__( - self, - root_path: str | os.PathLike[str], - defaults: dict[str, t.Any] | None = None, - ) -> None: - super().__init__(defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name: str, silent: bool = False) -> bool: - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError( - f"The environment variable {variable_name!r} is not set" - " and as such configuration could not be loaded. Set" - " this variable and make it point to a configuration" - " file" - ) - return self.from_pyfile(rv, silent=silent) - - def from_prefixed_env( - self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads - ) -> bool: - """Load any environment variables that start with ``FLASK_``, - dropping the prefix from the env key for the config key. Values - are passed through a loading function to attempt to convert them - to more specific types than strings. - - Keys are loaded in :func:`sorted` order. - - The default loading function attempts to parse values as any - valid JSON type, including dicts and lists. - - Specific items in nested dicts can be set by separating the - keys with double underscores (``__``). If an intermediate key - doesn't exist, it will be initialized to an empty dict. - - :param prefix: Load env vars that start with this prefix, - separated with an underscore (``_``). - :param loads: Pass each string value to this function and use - the returned value as the config value. If any error is - raised it is ignored and the value remains a string. The - default is :func:`json.loads`. - - .. versionadded:: 2.1 - """ - prefix = f"{prefix}_" - - for key in sorted(os.environ): - if not key.startswith(prefix): - continue - - value = os.environ[key] - key = key.removeprefix(prefix) - - try: - value = loads(value) - except Exception: - # Keep the value as a string if loading failed. - pass - - if "__" not in key: - # A non-nested key, set directly. - self[key] = value - continue - - # Traverse nested dictionaries with keys separated by "__". - current = self - *parts, tail = key.split("__") - - for part in parts: - # If an intermediate dict does not exist, create it. - if part not in current: - current[part] = {} - - current = current[part] - - current[tail] = value - - return True - - def from_pyfile( - self, filename: str | os.PathLike[str], silent: bool = False - ) -> bool: - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = types.ModuleType("config") - d.__file__ = filename - try: - with open(filename, mode="rb") as config_file: - exec(compile(config_file.read(), filename, "exec"), d.__dict__) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): - return False - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - self.from_object(d) - return True - - def from_object(self, obj: object | str) -> None: - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. :meth:`from_object` - loads only the uppercase attributes of the module/class. A ``dict`` - object will not work with :meth:`from_object` because the keys of a - ``dict`` are not attributes of the ``dict`` class. - - Example of module-based configuration:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - Nothing is done to the object before loading. If the object is a - class and has ``@property`` attributes, it needs to be - instantiated before being passed to this method. - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - See :ref:`config-dev-prod` for an example of class-based configuration - using :meth:`from_object`. - - :param obj: an import name or object - """ - if isinstance(obj, str): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def from_file( - self, - filename: str | os.PathLike[str], - load: t.Callable[[t.IO[t.Any]], t.Mapping[str, t.Any]], - silent: bool = False, - text: bool = True, - ) -> bool: - """Update the values in the config from a file that is loaded - using the ``load`` parameter. The loaded data is passed to the - :meth:`from_mapping` method. - - .. code-block:: python - - import json - app.config.from_file("config.json", load=json.load) - - import tomllib - app.config.from_file("config.toml", load=tomllib.load, text=False) - - :param filename: The path to the data file. This can be an - absolute path or relative to the config root path. - :param load: A callable that takes a file handle and returns a - mapping of loaded data from the file. - :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` - implements a ``read`` method. - :param silent: Ignore the file if it doesn't exist. - :param text: Open the file in text or binary mode. - :return: ``True`` if the file was loaded successfully. - - .. versionchanged:: 2.3 - The ``text`` parameter was added. - - .. versionadded:: 2.0 - """ - filename = os.path.join(self.root_path, filename) - - try: - with open(filename, "r" if text else "rb") as f: - obj = load(f) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - - return self.from_mapping(obj) - - def from_mapping( - self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any - ) -> bool: - """Updates the config like :meth:`update` ignoring items with - non-upper keys. - - :return: Always returns ``True``. - - .. versionadded:: 0.11 - """ - mappings: dict[str, t.Any] = {} - if mapping is not None: - mappings.update(mapping) - mappings.update(kwargs) - for key, value in mappings.items(): - if key.isupper(): - self[key] = value - return True - - def get_namespace( - self, namespace: str, lowercase: bool = True, trim_namespace: bool = True - ) -> dict[str, t.Any]: - """Returns a dictionary containing a subset of configuration options - that match the specified namespace/prefix. Example usage:: - - app.config['IMAGE_STORE_TYPE'] = 'fs' - app.config['IMAGE_STORE_PATH'] = '/var/app/images' - app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' - image_store_config = app.config.get_namespace('IMAGE_STORE_') - - The resulting dictionary `image_store_config` would look like:: - - { - 'type': 'fs', - 'path': '/var/app/images', - 'base_url': 'http://img.website.com' - } - - This is often useful when configuration options map directly to - keyword arguments in functions or class constructors. - - :param namespace: a configuration namespace - :param lowercase: a flag indicating if the keys of the resulting - dictionary should be lowercase - :param trim_namespace: a flag indicating if the keys of the resulting - dictionary should not include the namespace - - .. versionadded:: 0.11 - """ - rv = {} - for k, v in self.items(): - if not k.startswith(namespace): - continue - if trim_namespace: - key = k[len(namespace) :] - else: - key = k - if lowercase: - key = key.lower() - rv[key] = v - return rv - - def __repr__(self) -> str: - return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/venv/Lib/site-packages/flask/ctx.py b/venv/Lib/site-packages/flask/ctx.py deleted file mode 100644 index 222e818..0000000 --- a/venv/Lib/site-packages/flask/ctx.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import annotations - -import contextvars -import sys -import typing as t -from functools import update_wrapper -from types import TracebackType - -from werkzeug.exceptions import HTTPException - -from . import typing as ft -from .globals import _cv_app -from .globals import _cv_request -from .signals import appcontext_popped -from .signals import appcontext_pushed - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - from .sessions import SessionMixin - from .wrappers import Request - - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class _AppCtxGlobals: - """A plain object. Used as a namespace for storing data during an - application context. - - Creating an app context automatically creates this object, which is - made available as the :data:`g` proxy. - - .. describe:: 'key' in g - - Check whether an attribute is present. - - .. versionadded:: 0.10 - - .. describe:: iter(g) - - Return an iterator over the attribute names. - - .. versionadded:: 0.10 - """ - - # Define attr methods to let mypy know this is a namespace object - # that has arbitrary attributes. - - def __getattr__(self, name: str) -> t.Any: - try: - return self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def __setattr__(self, name: str, value: t.Any) -> None: - self.__dict__[name] = value - - def __delattr__(self, name: str) -> None: - try: - del self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def get(self, name: str, default: t.Any | None = None) -> t.Any: - """Get an attribute by name, or a default value. Like - :meth:`dict.get`. - - :param name: Name of attribute to get. - :param default: Value to return if the attribute is not present. - - .. versionadded:: 0.10 - """ - return self.__dict__.get(name, default) - - def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: - """Get and remove an attribute by name. Like :meth:`dict.pop`. - - :param name: Name of attribute to pop. - :param default: Value to return if the attribute is not present, - instead of raising a ``KeyError``. - - .. versionadded:: 0.11 - """ - if default is _sentinel: - return self.__dict__.pop(name) - else: - return self.__dict__.pop(name, default) - - def setdefault(self, name: str, default: t.Any = None) -> t.Any: - """Get the value of an attribute if it is present, otherwise - set and return a default value. Like :meth:`dict.setdefault`. - - :param name: Name of attribute to get. - :param default: Value to set and return if the attribute is not - present. - - .. versionadded:: 0.11 - """ - return self.__dict__.setdefault(name, default) - - def __contains__(self, item: str) -> bool: - return item in self.__dict__ - - def __iter__(self) -> t.Iterator[str]: - return iter(self.__dict__) - - def __repr__(self) -> str: - ctx = _cv_app.get(None) - if ctx is not None: - return f"" - return object.__repr__(self) - - -def after_this_request( - f: ft.AfterRequestCallable[t.Any], -) -> ft.AfterRequestCallable[t.Any]: - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'after_this_request' can only be used when a request" - " context is active, such as in a view function." - ) - - ctx._after_request_functions.append(f) - return f - - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def copy_current_request_context(f: F) -> F: - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. The current session is also - included in the copied request context. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request or - # flask.session like you would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'copy_current_request_context' can only be used when a" - " request context is active, such as in a view function." - ) - - ctx = ctx.copy() - - def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any: - with ctx: - return ctx.app.ensure_sync(f)(*args, **kwargs) - - return update_wrapper(wrapper, f) # type: ignore[return-value] - - -def has_request_context() -> bool: - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g`) for truthness:: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _cv_request.get(None) is not None - - -def has_app_context() -> bool: - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _cv_app.get(None) is not None - - -class AppContext: - """The app context contains application-specific information. An app - context is created and pushed at the beginning of each request if - one is not already active. An app context is also pushed when - running CLI commands. - """ - - def __init__(self, app: Flask) -> None: - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g: _AppCtxGlobals = app.app_ctx_globals_class() - self._cv_tokens: list[contextvars.Token[AppContext]] = [] - - def push(self) -> None: - """Binds the app context to the current context.""" - self._cv_tokens.append(_cv_app.set(self)) - appcontext_pushed.send(self.app, _async_wrapper=self.app.ensure_sync) - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the app context.""" - try: - if len(self._cv_tokens) == 1: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - finally: - ctx = _cv_app.get() - _cv_app.reset(self._cv_tokens.pop()) - - if ctx is not self: - raise AssertionError( - f"Popped wrong app context. ({ctx!r} instead of {self!r})" - ) - - appcontext_popped.send(self.app, _async_wrapper=self.app.ensure_sync) - - def __enter__(self) -> AppContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - -class RequestContext: - """The request context contains per-request information. The Flask - app creates and pushes it at the beginning of the request, then pops - it at the end of the request. It will create the URL adapter and - request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the - request. When using the interactive debugger, the context will be - restored so ``request`` is still accessible. Similarly, the test - client can preserve the context after the request ends. However, - teardown functions may already have closed some resources such as - database connections. - """ - - def __init__( - self, - app: Flask, - environ: WSGIEnvironment, - request: Request | None = None, - session: SessionMixin | None = None, - ) -> None: - self.app = app - if request is None: - request = app.request_class(environ) - request.json_module = app.json - self.request: Request = request - self.url_adapter = None - try: - self.url_adapter = app.create_url_adapter(self.request) - except HTTPException as e: - self.request.routing_exception = e - self.flashes: list[tuple[str, str]] | None = None - self.session: SessionMixin | None = session - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions: list[ft.AfterRequestCallable[t.Any]] = [] - - self._cv_tokens: list[ - tuple[contextvars.Token[RequestContext], AppContext | None] - ] = [] - - def copy(self) -> RequestContext: - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - - .. versionchanged:: 1.1 - The current session object is used instead of reloading the original - data. This prevents `flask.session` pointing to an out-of-date object. - """ - return self.__class__( - self.app, - environ=self.request.environ, - request=self.request, - session=self.session, - ) - - def match_request(self) -> None: - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - result = self.url_adapter.match(return_rule=True) # type: ignore - self.request.url_rule, self.request.view_args = result # type: ignore - except HTTPException as e: - self.request.routing_exception = e - - def push(self) -> None: - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _cv_app.get(None) - - if app_ctx is None or app_ctx.app is not self.app: - app_ctx = self.app.app_context() - app_ctx.push() - else: - app_ctx = None - - self._cv_tokens.append((_cv_request.set(self), app_ctx)) - - # Open the session at the moment that the request context is available. - # This allows a custom open_session method to use the request context. - # Only open a new session if this is the first time the request was - # pushed, otherwise stream_with_context loses the session. - if self.session is None: - session_interface = self.app.session_interface - self.session = session_interface.open_session(self.app, self.request) - - if self.session is None: - self.session = session_interface.make_null_session(self.app) - - # Match the request URL after loading the session, so that the - # session is available in custom URL converters. - if self.url_adapter is not None: - self.match_request() - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - clear_request = len(self._cv_tokens) == 1 - - try: - if clear_request: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - request_close = getattr(self.request, "close", None) - if request_close is not None: - request_close() - finally: - ctx = _cv_request.get() - token, app_ctx = self._cv_tokens.pop() - _cv_request.reset(token) - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - ctx.request.environ["werkzeug.request"] = None - - if app_ctx is not None: - app_ctx.pop(exc) - - if ctx is not self: - raise AssertionError( - f"Popped wrong request context. ({ctx!r} instead of {self!r})" - ) - - def __enter__(self) -> RequestContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - def __repr__(self) -> str: - return ( - f"<{type(self).__name__} {self.request.url!r}" - f" [{self.request.method}] of {self.app.name}>" - ) diff --git a/venv/Lib/site-packages/flask/debughelpers.py b/venv/Lib/site-packages/flask/debughelpers.py deleted file mode 100644 index 2c8c4c4..0000000 --- a/venv/Lib/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2.loaders import BaseLoader -from werkzeug.routing import RequestRedirect - -from .blueprints import Blueprint -from .globals import request_ctx -from .sansio.app import App - -if t.TYPE_CHECKING: - from .sansio.scaffold import Scaffold - from .wrappers import Request - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request: Request, key: str) -> None: - form_matches = request.form.getlist(key) - buf = [ - f"You tried to access the file {key!r} in the request.files" - " dictionary but it does not exist. The mimetype for the" - f" request is {request.mimetype!r} instead of" - " 'multipart/form-data' which means that no file contents" - " were transmitted. To fix this error you should provide" - ' enctype="multipart/form-data" in your form.' - ] - if form_matches: - names = ", ".join(repr(x) for x in form_matches) - buf.append( - "\n\nThe browser instead transmitted some file names. " - f"This was submitted: {names}" - ) - self.msg = "".join(buf) - - def __str__(self) -> str: - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised in debug mode if a routing redirect - would cause the browser to drop the method or body. This happens - when method is not GET, HEAD or OPTIONS and the status code is not - 307 or 308. - """ - - def __init__(self, request: Request) -> None: - exc = request.routing_exception - assert isinstance(exc, RequestRedirect) - buf = [ - f"A request was sent to '{request.url}', but routing issued" - f" a redirect to the canonical URL '{exc.new_url}'." - ] - - if f"{request.base_url}/" == exc.new_url.partition("?")[0]: - buf.append( - " The URL was defined with a trailing slash. Flask" - " will redirect to the URL with a trailing slash if it" - " was accessed without one." - ) - - buf.append( - " Send requests to the canonical URL, or use 307 or 308 for" - " routing redirects. Otherwise, browsers will drop form" - " data.\n\n" - "This exception is only raised in debug mode." - ) - super().__init__("".join(buf)) - - -def attach_enctype_error_multidict(request: Request) -> None: - """Patch ``request.files.__getitem__`` to raise a descriptive error - about ``enctype=multipart/form-data``. - - :param request: The request to patch. - :meta private: - """ - oldcls = request.files.__class__ - - class newcls(oldcls): # type: ignore[valid-type, misc] - def __getitem__(self, key: str) -> t.Any: - try: - return super().__getitem__(key) - except KeyError as e: - if key not in request.form: - raise - - raise DebugFilesKeyError(request, key).with_traceback( - e.__traceback__ - ) from None - - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls - - -def _dump_loader_info(loader: BaseLoader) -> t.Iterator[str]: - yield f"class: {type(loader).__module__}.{type(loader).__name__}" - for key, value in sorted(loader.__dict__.items()): - if key.startswith("_"): - continue - if isinstance(value, (tuple, list)): - if not all(isinstance(x, str) for x in value): - continue - yield f"{key}:" - for item in value: - yield f" - {item}" - continue - elif not isinstance(value, (str, int, float, bool)): - continue - yield f"{key}: {value!r}" - - -def explain_template_loading_attempts( - app: App, - template: str, - attempts: list[ - tuple[ - BaseLoader, - Scaffold, - tuple[str, str | None, t.Callable[[], bool] | None] | None, - ] - ], -) -> None: - """This should help developers understand what failed""" - info = [f"Locating template {template!r}:"] - total_found = 0 - blueprint = None - if request_ctx and request_ctx.request.blueprint is not None: - blueprint = request_ctx.request.blueprint - - for idx, (loader, srcobj, triple) in enumerate(attempts): - if isinstance(srcobj, App): - src_info = f"application {srcobj.import_name!r}" - elif isinstance(srcobj, Blueprint): - src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" - else: - src_info = repr(srcobj) - - info.append(f"{idx + 1:5}: trying loader of {src_info}") - - for line in _dump_loader_info(loader): - info.append(f" {line}") - - if triple is None: - detail = "no match" - else: - detail = f"found ({triple[1] or ''!r})" - total_found += 1 - info.append(f" -> {detail}") - - seems_fishy = False - if total_found == 0: - info.append("Error: the template could not be found.") - seems_fishy = True - elif total_found > 1: - info.append("Warning: multiple loaders returned a match for the template.") - seems_fishy = True - - if blueprint is not None and seems_fishy: - info.append( - " The template was looked up from an endpoint that belongs" - f" to the blueprint {blueprint!r}." - ) - info.append(" Maybe you did not place a template in the right folder?") - info.append(" See https://flask.palletsprojects.com/blueprints/#templates") - - app.logger.info("\n".join(info)) diff --git a/venv/Lib/site-packages/flask/globals.py b/venv/Lib/site-packages/flask/globals.py deleted file mode 100644 index e2c410c..0000000 --- a/venv/Lib/site-packages/flask/globals.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import typing as t -from contextvars import ContextVar - -from werkzeug.local import LocalProxy - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .ctx import _AppCtxGlobals - from .ctx import AppContext - from .ctx import RequestContext - from .sessions import SessionMixin - from .wrappers import Request - - -_no_app_msg = """\ -Working outside of application context. - -This typically means that you attempted to use functionality that needed -the current application. To solve this, set up an application context -with app.app_context(). See the documentation for more information.\ -""" -_cv_app: ContextVar[AppContext] = ContextVar("flask.app_ctx") -app_ctx: AppContext = LocalProxy( # type: ignore[assignment] - _cv_app, unbound_message=_no_app_msg -) -current_app: Flask = LocalProxy( # type: ignore[assignment] - _cv_app, "app", unbound_message=_no_app_msg -) -g: _AppCtxGlobals = LocalProxy( # type: ignore[assignment] - _cv_app, "g", unbound_message=_no_app_msg -) - -_no_req_msg = """\ -Working outside of request context. - -This typically means that you attempted to use functionality that needed -an active HTTP request. Consult the documentation on testing for -information about how to avoid this problem.\ -""" -_cv_request: ContextVar[RequestContext] = ContextVar("flask.request_ctx") -request_ctx: RequestContext = LocalProxy( # type: ignore[assignment] - _cv_request, unbound_message=_no_req_msg -) -request: Request = LocalProxy( # type: ignore[assignment] - _cv_request, "request", unbound_message=_no_req_msg -) -session: SessionMixin = LocalProxy( # type: ignore[assignment] - _cv_request, "session", unbound_message=_no_req_msg -) diff --git a/venv/Lib/site-packages/flask/helpers.py b/venv/Lib/site-packages/flask/helpers.py deleted file mode 100644 index 5d412c9..0000000 --- a/venv/Lib/site-packages/flask/helpers.py +++ /dev/null @@ -1,641 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import sys -import typing as t -from datetime import datetime -from functools import cache -from functools import update_wrapper - -import werkzeug.utils -from werkzeug.exceptions import abort as _wz_abort -from werkzeug.utils import redirect as _wz_redirect -from werkzeug.wrappers import Response as BaseResponse - -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .globals import request_ctx -from .globals import session -from .signals import message_flashed - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -def get_debug_flag() -> bool: - """Get whether debug mode should be enabled for the app, indicated by the - :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. - """ - val = os.environ.get("FLASK_DEBUG") - return bool(val and val.lower() not in {"0", "false", "no"}) - - -def get_load_dotenv(default: bool = True) -> bool: - """Get whether the user has disabled loading default dotenv files by - setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load - the files. - - :param default: What to return if the env var isn't set. - """ - val = os.environ.get("FLASK_SKIP_DOTENV") - - if not val: - return default - - return val.lower() in ("0", "false", "no") - - -@t.overload -def stream_with_context( - generator_or_function: t.Iterator[t.AnyStr], -) -> t.Iterator[t.AnyStr]: ... - - -@t.overload -def stream_with_context( - generator_or_function: t.Callable[..., t.Iterator[t.AnyStr]], -) -> t.Callable[[t.Iterator[t.AnyStr]], t.Iterator[t.AnyStr]]: ... - - -def stream_with_context( - generator_or_function: t.Iterator[t.AnyStr] | t.Callable[..., t.Iterator[t.AnyStr]], -) -> t.Iterator[t.AnyStr] | t.Callable[[t.Iterator[t.AnyStr]], t.Iterator[t.AnyStr]]: - """Wrap a response generator function so that it runs inside the current - request context. This keeps :data:`request`, :data:`session`, and :data:`g` - available, even though at the point the generator runs the request context - will typically have ended. - - Use it as a decorator on a generator function: - - .. code-block:: python - - from flask import stream_with_context, request, Response - - @app.get("/stream") - def streamed_response(): - @stream_with_context - def generate(): - yield "Hello " - yield request.args["name"] - yield "!" - - return Response(generate()) - - Or use it as a wrapper around a created generator: - - .. code-block:: python - - from flask import stream_with_context, request, Response - - @app.get("/stream") - def streamed_response(): - def generate(): - yield "Hello " - yield request.args["name"] - yield "!" - - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) # type: ignore[arg-type] - except TypeError: - - def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: - gen = generator_or_function(*args, **kwargs) # type: ignore[operator] - return stream_with_context(gen) - - return update_wrapper(decorator, generator_or_function) # type: ignore[arg-type] - - def generator() -> t.Iterator[t.AnyStr]: - if (req_ctx := _cv_request.get(None)) is None: - raise RuntimeError( - "'stream_with_context' can only be used when a request" - " context is active, such as in a view function." - ) - - app_ctx = _cv_app.get() - # Setup code below will run the generator to this point, so that the - # current contexts are recorded. The contexts must be pushed after, - # otherwise their ContextVar will record the wrong event loop during - # async view functions. - yield None # type: ignore[misc] - - # Push the app context first, so that the request context does not - # automatically create and push a different app context. - with app_ctx, req_ctx: - try: - yield from gen - finally: - # Clean up in case the user wrapped a WSGI iterator. - if hasattr(gen, "close"): - gen.close() - - # Execute the generator to the sentinel value. This ensures the context is - # preserved in the generator's state. Further iteration will push the - # context and yield from the original iterator. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g - - -def make_response(*args: t.Any) -> Response: - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) - - -def url_for( - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, -) -> str: - """Generate a URL to the given endpoint with the given values. - - This requires an active request or application context, and calls - :meth:`current_app.url_for() `. See that method - for full documentation. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it is - external. - :param _external: If given, prefer the URL to be internal (False) or - require it to be external (True). External URLs include the - scheme and domain. When not in an active request, URLs are - external by default. - :param values: Values to use for the variable parts of the URL rule. - Unknown keys are appended as query string arguments, like - ``?a=b&c=d``. - - .. versionchanged:: 2.2 - Calls ``current_app.url_for``, allowing an app to override the - behavior. - - .. versionchanged:: 0.10 - The ``_scheme`` parameter was added. - - .. versionchanged:: 0.9 - The ``_anchor`` and ``_method`` parameters were added. - - .. versionchanged:: 0.9 - Calls ``app.handle_url_build_error`` on build errors. - """ - return current_app.url_for( - endpoint, - _anchor=_anchor, - _method=_method, - _scheme=_scheme, - _external=_external, - **values, - ) - - -def redirect( - location: str, code: int = 302, Response: type[BaseResponse] | None = None -) -> BaseResponse: - """Create a redirect response object. - - If :data:`~flask.current_app` is available, it will use its - :meth:`~flask.Flask.redirect` method, otherwise it will use - :func:`werkzeug.utils.redirect`. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - :param Response: The response class to use. Not used when - ``current_app`` is active, which uses ``app.response_class``. - - .. versionadded:: 2.2 - Calls ``current_app.redirect`` if available instead of always - using Werkzeug's default ``redirect``. - """ - if current_app: - return current_app.redirect(location, code=code) - - return _wz_redirect(location, code=code, Response=Response) - - -def abort(code: int | BaseResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given - status code. - - If :data:`~flask.current_app` is available, it will call its - :attr:`~flask.Flask.aborter` object, otherwise it will use - :func:`werkzeug.exceptions.abort`. - - :param code: The status code for the exception, which must be - registered in ``app.aborter``. - :param args: Passed to the exception. - :param kwargs: Passed to the exception. - - .. versionadded:: 2.2 - Calls ``current_app.aborter`` if available instead of always - using Werkzeug's default ``abort``. - """ - if current_app: - current_app.aborter(code, *args, **kwargs) - - _wz_abort(code, *args, **kwargs) - - -def get_template_attribute(template_name: str, attribute: str) -> t.Any: - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named :file:`_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, attribute) - - -def flash(message: str, category: str = "message") -> None: - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # always in sync with the session object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get("_flashes", []) - flashes.append((category, message)) - session["_flashes"] = flashes - app = current_app._get_current_object() # type: ignore - message_flashed.send( - app, - _async_wrapper=app.ensure_sync, - message=message, - category=category, - ) - - -def get_flashed_messages( - with_categories: bool = False, category_filter: t.Iterable[str] = () -) -> list[str] | list[tuple[str, str]]: - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to ``True``, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (``True`` gives a tuple, where ``False`` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :doc:`/patterns/flashing` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to ``True`` to also receive categories. - :param category_filter: filter of categories to limit return values. Only - categories in the list will be returned. - """ - flashes = request_ctx.flashes - if flashes is None: - flashes = session.pop("_flashes") if "_flashes" in session else [] - request_ctx.flashes = flashes - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def _prepare_send_file_kwargs(**kwargs: t.Any) -> dict[str, t.Any]: - if kwargs.get("max_age") is None: - kwargs["max_age"] = current_app.get_send_file_max_age - - kwargs.update( - environ=request.environ, - use_x_sendfile=current_app.config["USE_X_SENDFILE"], - response_class=current_app.response_class, - _root_path=current_app.root_path, - ) - return kwargs - - -def send_file( - path_or_file: os.PathLike[t.AnyStr] | str | t.IO[bytes], - mimetype: str | None = None, - as_attachment: bool = False, - download_name: str | None = None, - conditional: bool = True, - etag: bool | str = True, - last_modified: datetime | int | float | None = None, - max_age: None | (int | t.Callable[[str | None], int | None]) = None, -) -> Response: - """Send the contents of a file to the client. - - The first argument can be a file path or a file-like object. Paths - are preferred in most cases because Werkzeug can manage the file and - get extra information from the path. Passing a file-like object - requires that the file is opened in binary mode, and is mostly - useful when building a file in memory with :class:`io.BytesIO`. - - Never pass file paths provided by a user. The path is assumed to be - trusted, so a user could craft a path to access a file you didn't - intend. Use :func:`send_from_directory` to safely serve - user-requested paths from within a directory. - - If the WSGI server sets a ``file_wrapper`` in ``environ``, it is - used, otherwise Werkzeug's built-in wrapper is used. Alternatively, - if the HTTP server supports ``X-Sendfile``, configuring Flask with - ``USE_X_SENDFILE = True`` will tell the server to send the given - path, which is much more efficient than reading it in Python. - - :param path_or_file: The path to the file to send, relative to the - current working directory if a relative path is given. - Alternatively, a file-like object opened in binary mode. Make - sure the file pointer is seeked to the start of the data. - :param mimetype: The MIME type to send for the file. If not - provided, it will try to detect it from the file name. - :param as_attachment: Indicate to a browser that it should offer to - save the file instead of displaying it. - :param download_name: The default name browsers will use when saving - the file. Defaults to the passed file name. - :param conditional: Enable conditional and range responses based on - request headers. Requires passing a file path and ``environ``. - :param etag: Calculate an ETag for the file, which requires passing - a file path. Can also be a string to use instead. - :param last_modified: The last modified time to send for the file, - in seconds. If not provided, it will try to detect it from the - file path. - :param max_age: How long the client should cache the file, in - seconds. If set, ``Cache-Control`` will be ``public``, otherwise - it will be ``no-cache`` to prefer conditional caching. - - .. versionchanged:: 2.0 - ``download_name`` replaces the ``attachment_filename`` - parameter. If ``as_attachment=False``, it is passed with - ``Content-Disposition: inline`` instead. - - .. versionchanged:: 2.0 - ``max_age`` replaces the ``cache_timeout`` parameter. - ``conditional`` is enabled and ``max_age`` is not set by - default. - - .. versionchanged:: 2.0 - ``etag`` replaces the ``add_etags`` parameter. It can be a - string to use instead of generating one. - - .. versionchanged:: 2.0 - Passing a file-like object that inherits from - :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather - than sending an empty file. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionchanged:: 1.1 - ``filename`` may be a :class:`~os.PathLike` object. - - .. versionchanged:: 1.1 - Passing a :class:`~io.BytesIO` object supports range requests. - - .. versionchanged:: 1.0.3 - Filenames are encoded with ASCII instead of Latin-1 for broader - compatibility with WSGI servers. - - .. versionchanged:: 1.0 - UTF-8 filenames as specified in :rfc:`2231` are supported. - - .. versionchanged:: 0.12 - The filename is no longer automatically inferred from file - objects. If you want to use automatic MIME and etag support, - pass a filename via ``filename_or_fp`` or - ``attachment_filename``. - - .. versionchanged:: 0.12 - ``attachment_filename`` is preferred over ``filename`` for MIME - detection. - - .. versionchanged:: 0.9 - ``cache_timeout`` defaults to - :meth:`Flask.get_send_file_max_age`. - - .. versionchanged:: 0.7 - MIME guessing and etag support for file-like objects was - removed because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. - - .. versionchanged:: 0.5 - The ``add_etags``, ``cache_timeout`` and ``conditional`` - parameters were added. The default behavior is to add etags. - - .. versionadded:: 0.2 - """ - return werkzeug.utils.send_file( # type: ignore[return-value] - **_prepare_send_file_kwargs( - path_or_file=path_or_file, - environ=request.environ, - mimetype=mimetype, - as_attachment=as_attachment, - download_name=download_name, - conditional=conditional, - etag=etag, - last_modified=last_modified, - max_age=max_age, - ) - ) - - -def send_from_directory( - directory: os.PathLike[str] | str, - path: os.PathLike[str] | str, - **kwargs: t.Any, -) -> Response: - """Send a file from within a directory using :func:`send_file`. - - .. code-block:: python - - @app.route("/uploads/") - def download_file(name): - return send_from_directory( - app.config['UPLOAD_FOLDER'], name, as_attachment=True - ) - - This is a secure way to serve files from a folder, such as static - files or uploads. Uses :func:`~werkzeug.security.safe_join` to - ensure the path coming from the client is not maliciously crafted to - point outside the specified directory. - - If the final path does not point to an existing regular file, - raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. - - :param directory: The directory that ``path`` must be located under, - relative to the current application's root path. This *must not* - be a value provided by the client, otherwise it becomes insecure. - :param path: The path to the file to send, relative to - ``directory``. - :param kwargs: Arguments to pass to :func:`send_file`. - - .. versionchanged:: 2.0 - ``path`` replaces the ``filename`` parameter. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionadded:: 0.5 - """ - return werkzeug.utils.send_from_directory( # type: ignore[return-value] - directory, path, **_prepare_send_file_kwargs(**kwargs) - ) - - -def get_root_path(import_name: str) -> str: - """Find the root path of a package, or the path that contains a - module. If it cannot be found, returns the current working - directory. - - Not to be confused with the value returned by :func:`find_package`. - - :meta private: - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - - if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - try: - spec = importlib.util.find_spec(import_name) - - if spec is None: - raise ValueError - except (ImportError, ValueError): - loader = None - else: - loader = spec.loader - - # Loader does not exist or we're referring to an unloaded main - # module or a main module without path (interactive sessions), go - # with the current working directory. - if loader is None: - return os.getcwd() - - if hasattr(loader, "get_filename"): - filepath = loader.get_filename(import_name) # pyright: ignore - else: - # Fall back to imports. - __import__(import_name) - mod = sys.modules[import_name] - filepath = getattr(mod, "__file__", None) - - # If we don't have a file path it might be because it is a - # namespace package. In this case pick the root path from the - # first module that is contained in the package. - if filepath is None: - raise RuntimeError( - "No root path can be found for the provided module" - f" {import_name!r}. This can happen because the module" - " came from an import hook that does not provide file" - " name information or because it's a namespace package." - " In this case the root path needs to be explicitly" - " provided." - ) - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) # type: ignore[no-any-return] - - -@cache -def _split_blueprint_path(name: str) -> list[str]: - out: list[str] = [name] - - if "." in name: - out.extend(_split_blueprint_path(name.rpartition(".")[0])) - - return out diff --git a/venv/Lib/site-packages/flask/json/__init__.py b/venv/Lib/site-packages/flask/json/__init__.py deleted file mode 100644 index c0941d0..0000000 --- a/venv/Lib/site-packages/flask/json/__init__.py +++ /dev/null @@ -1,170 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - -from ..globals import current_app -from .provider import _default - -if t.TYPE_CHECKING: # pragma: no cover - from ..wrappers import Response - - -def dumps(obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dumps() ` - method, otherwise it will use :func:`json.dumps`. - - :param obj: The data to serialize. - :param kwargs: Arguments passed to the ``dumps`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dumps``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.dumps(obj, **kwargs) - - kwargs.setdefault("default", _default) - return _json.dumps(obj, **kwargs) - - -def dump(obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dump() ` - method, otherwise it will use :func:`json.dump`. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: Arguments passed to the ``dump`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dump``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - Writing to a binary file, and the ``encoding`` argument, will be - removed in Flask 2.1. - """ - if current_app: - current_app.json.dump(obj, fp, **kwargs) - else: - kwargs.setdefault("default", _default) - _json.dump(obj, fp, **kwargs) - - -def loads(s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.loads() ` - method, otherwise it will use :func:`json.loads`. - - :param s: Text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``loads`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.loads``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The data must be a - string or UTF-8 bytes. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.loads(s, **kwargs) - - return _json.loads(s, **kwargs) - - -def load(fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.load() ` - method, otherwise it will use :func:`json.load`. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``load`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.load``, allowing an app to override - the behavior. - - .. versionchanged:: 2.2 - The ``app`` parameter will be removed in Flask 2.3. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The file must be text - mode, or binary mode with UTF-8 bytes. - """ - if current_app: - return current_app.json.load(fp, **kwargs) - - return _json.load(fp, **kwargs) - - -def jsonify(*args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. A dict or list returned from a view will be converted to a - JSON response automatically without needing to call this. - - This requires an active request or application context, and calls - :meth:`app.json.response() `. - - In debug mode, the output is formatted with indentation to make it - easier to read. This may also be controlled by the provider. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - - .. versionchanged:: 2.2 - Calls ``current_app.json.response``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 0.11 - Added support for serializing top-level arrays. This was a - security risk in ancient browsers. See :ref:`security-json`. - - .. versionadded:: 0.2 - """ - return current_app.json.response(*args, **kwargs) # type: ignore[return-value] diff --git a/venv/Lib/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index f6d654d..0000000 Binary files a/venv/Lib/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/json/__pycache__/provider.cpython-312.pyc b/venv/Lib/site-packages/flask/json/__pycache__/provider.cpython-312.pyc deleted file mode 100644 index 3aabc4e..0000000 Binary files a/venv/Lib/site-packages/flask/json/__pycache__/provider.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/json/__pycache__/tag.cpython-312.pyc b/venv/Lib/site-packages/flask/json/__pycache__/tag.cpython-312.pyc deleted file mode 100644 index b027af0..0000000 Binary files a/venv/Lib/site-packages/flask/json/__pycache__/tag.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/json/provider.py b/venv/Lib/site-packages/flask/json/provider.py deleted file mode 100644 index ea7e475..0000000 --- a/venv/Lib/site-packages/flask/json/provider.py +++ /dev/null @@ -1,215 +0,0 @@ -from __future__ import annotations - -import dataclasses -import decimal -import json -import typing as t -import uuid -import weakref -from datetime import date - -from werkzeug.http import http_date - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.sansio.response import Response - - from ..sansio.app import App - - -class JSONProvider: - """A standard set of JSON operations for an application. Subclasses - of this can be used to customize JSON behavior or use different - JSON libraries. - - To implement a provider for a specific library, subclass this base - class and implement at least :meth:`dumps` and :meth:`loads`. All - other methods have default implementations. - - To use a different provider, either subclass ``Flask`` and set - :attr:`~flask.Flask.json_provider_class` to a provider class, or set - :attr:`app.json ` to an instance of the class. - - :param app: An application instance. This will be stored as a - :class:`weakref.proxy` on the :attr:`_app` attribute. - - .. versionadded:: 2.2 - """ - - def __init__(self, app: App) -> None: - self._app: App = weakref.proxy(app) - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - :param obj: The data to serialize. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def dump(self, obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: May be passed to the underlying JSON library. - """ - fp.write(self.dumps(obj, **kwargs)) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - :param s: Text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def load(self, fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - return self.loads(fp.read(), **kwargs) - - def _prepare_response_obj( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> t.Any: - if args and kwargs: - raise TypeError("app.json.response() takes either args or kwargs, not both") - - if not args and not kwargs: - return None - - if len(args) == 1: - return args[0] - - return args or kwargs - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. - - The :func:`~flask.json.jsonify` function calls this method for - the current application. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - return self._app.response_class(self.dumps(obj), mimetype="application/json") - - -def _default(o: t.Any) -> t.Any: - if isinstance(o, date): - return http_date(o) - - if isinstance(o, (decimal.Decimal, uuid.UUID)): - return str(o) - - if dataclasses and dataclasses.is_dataclass(o): - return dataclasses.asdict(o) # type: ignore[arg-type] - - if hasattr(o, "__html__"): - return str(o.__html__()) - - raise TypeError(f"Object of type {type(o).__name__} is not JSON serializable") - - -class DefaultJSONProvider(JSONProvider): - """Provide JSON operations using Python's built-in :mod:`json` - library. Serializes the following additional data types: - - - :class:`datetime.datetime` and :class:`datetime.date` are - serialized to :rfc:`822` strings. This is the same as the HTTP - date format. - - :class:`uuid.UUID` is serialized to a string. - - :class:`dataclasses.dataclass` is passed to - :func:`dataclasses.asdict`. - - :class:`~markupsafe.Markup` (or any object with a ``__html__`` - method) will call the ``__html__`` method to get a string. - """ - - default: t.Callable[[t.Any], t.Any] = staticmethod(_default) # type: ignore[assignment] - """Apply this function to any object that :meth:`json.dumps` does - not know how to serialize. It should return a valid JSON type or - raise a ``TypeError``. - """ - - ensure_ascii = True - """Replace non-ASCII characters with escape sequences. This may be - more compatible with some clients, but can be disabled for better - performance and size. - """ - - sort_keys = True - """Sort the keys in any serialized dicts. This may be useful for - some caching situations, but can be disabled for better performance. - When enabled, keys must all be strings, they are not converted - before sorting. - """ - - compact: bool | None = None - """If ``True``, or ``None`` out of debug mode, the :meth:`response` - output will not add indentation, newlines, or spaces. If ``False``, - or ``None`` in debug mode, it will use a non-compact representation. - """ - - mimetype = "application/json" - """The mimetype set in :meth:`response`.""" - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON to a string. - - Keyword arguments are passed to :func:`json.dumps`. Sets some - parameter defaults from the :attr:`default`, - :attr:`ensure_ascii`, and :attr:`sort_keys` attributes. - - :param obj: The data to serialize. - :param kwargs: Passed to :func:`json.dumps`. - """ - kwargs.setdefault("default", self.default) - kwargs.setdefault("ensure_ascii", self.ensure_ascii) - kwargs.setdefault("sort_keys", self.sort_keys) - return json.dumps(obj, **kwargs) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON from a string or bytes. - - :param s: Text or UTF-8 bytes. - :param kwargs: Passed to :func:`json.loads`. - """ - return json.loads(s, **kwargs) - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with it. The response mimetype - will be "application/json" and can be changed with - :attr:`mimetype`. - - If :attr:`compact` is ``False`` or debug mode is enabled, the - output will be formatted to be easier to read. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - dump_args: dict[str, t.Any] = {} - - if (self.compact is None and self._app.debug) or self.compact is False: - dump_args.setdefault("indent", 2) - else: - dump_args.setdefault("separators", (",", ":")) - - return self._app.response_class( - f"{self.dumps(obj, **dump_args)}\n", mimetype=self.mimetype - ) diff --git a/venv/Lib/site-packages/flask/json/tag.py b/venv/Lib/site-packages/flask/json/tag.py deleted file mode 100644 index 8dc3629..0000000 --- a/venv/Lib/site-packages/flask/json/tag.py +++ /dev/null @@ -1,327 +0,0 @@ -""" -Tagged JSON -~~~~~~~~~~~ - -A compact representation for lossless serialization of non-standard JSON -types. :class:`~flask.sessions.SecureCookieSessionInterface` uses this -to serialize the session data, but it may be useful in other places. It -can be extended to support other types. - -.. autoclass:: TaggedJSONSerializer - :members: - -.. autoclass:: JSONTag - :members: - -Let's see an example that adds support for -:class:`~collections.OrderedDict`. Dicts don't have an order in JSON, so -to handle this we will dump the items as a list of ``[key, value]`` -pairs. Subclass :class:`JSONTag` and give it the new key ``' od'`` to -identify the type. The session serializer processes dicts first, so -insert the new tag at the front of the order since ``OrderedDict`` must -be processed before ``dict``. - -.. code-block:: python - - from flask.json.tag import JSONTag - - class TagOrderedDict(JSONTag): - __slots__ = ('serializer',) - key = ' od' - - def check(self, value): - return isinstance(value, OrderedDict) - - def to_json(self, value): - return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] - - def to_python(self, value): - return OrderedDict(value) - - app.session_interface.serializer.register(TagOrderedDict, index=0) -""" - -from __future__ import annotations - -import typing as t -from base64 import b64decode -from base64 import b64encode -from datetime import datetime -from uuid import UUID - -from markupsafe import Markup -from werkzeug.http import http_date -from werkzeug.http import parse_date - -from ..json import dumps -from ..json import loads - - -class JSONTag: - """Base class for defining type tags for :class:`TaggedJSONSerializer`.""" - - __slots__ = ("serializer",) - - #: The tag to mark the serialized object with. If empty, this tag is - #: only used as an intermediate step during tagging. - key: str = "" - - def __init__(self, serializer: TaggedJSONSerializer) -> None: - """Create a tagger for the given serializer.""" - self.serializer = serializer - - def check(self, value: t.Any) -> bool: - """Check if the given value should be tagged by this tag.""" - raise NotImplementedError - - def to_json(self, value: t.Any) -> t.Any: - """Convert the Python object to an object that is a valid JSON type. - The tag will be added later.""" - raise NotImplementedError - - def to_python(self, value: t.Any) -> t.Any: - """Convert the JSON representation back to the correct type. The tag - will already be removed.""" - raise NotImplementedError - - def tag(self, value: t.Any) -> dict[str, t.Any]: - """Convert the value to a valid JSON type and add the tag structure - around it.""" - return {self.key: self.to_json(value)} - - -class TagDict(JSONTag): - """Tag for 1-item dicts whose only key matches a registered tag. - - Internally, the dict key is suffixed with `__`, and the suffix is removed - when deserializing. - """ - - __slots__ = () - key = " di" - - def check(self, value: t.Any) -> bool: - return ( - isinstance(value, dict) - and len(value) == 1 - and next(iter(value)) in self.serializer.tags - ) - - def to_json(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {f"{key}__": self.serializer.tag(value[key])} - - def to_python(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {key[:-2]: value[key]} - - -class PassDict(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, dict) - - def to_json(self, value: t.Any) -> t.Any: - # JSON objects may only have string keys, so don't bother tagging the - # key here. - return {k: self.serializer.tag(v) for k, v in value.items()} - - tag = to_json - - -class TagTuple(JSONTag): - __slots__ = () - key = " t" - - def check(self, value: t.Any) -> bool: - return isinstance(value, tuple) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - def to_python(self, value: t.Any) -> t.Any: - return tuple(value) - - -class PassList(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, list) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - tag = to_json - - -class TagBytes(JSONTag): - __slots__ = () - key = " b" - - def check(self, value: t.Any) -> bool: - return isinstance(value, bytes) - - def to_json(self, value: t.Any) -> t.Any: - return b64encode(value).decode("ascii") - - def to_python(self, value: t.Any) -> t.Any: - return b64decode(value) - - -class TagMarkup(JSONTag): - """Serialize anything matching the :class:`~markupsafe.Markup` API by - having a ``__html__`` method to the result of that method. Always - deserializes to an instance of :class:`~markupsafe.Markup`.""" - - __slots__ = () - key = " m" - - def check(self, value: t.Any) -> bool: - return callable(getattr(value, "__html__", None)) - - def to_json(self, value: t.Any) -> t.Any: - return str(value.__html__()) - - def to_python(self, value: t.Any) -> t.Any: - return Markup(value) - - -class TagUUID(JSONTag): - __slots__ = () - key = " u" - - def check(self, value: t.Any) -> bool: - return isinstance(value, UUID) - - def to_json(self, value: t.Any) -> t.Any: - return value.hex - - def to_python(self, value: t.Any) -> t.Any: - return UUID(value) - - -class TagDateTime(JSONTag): - __slots__ = () - key = " d" - - def check(self, value: t.Any) -> bool: - return isinstance(value, datetime) - - def to_json(self, value: t.Any) -> t.Any: - return http_date(value) - - def to_python(self, value: t.Any) -> t.Any: - return parse_date(value) - - -class TaggedJSONSerializer: - """Serializer that uses a tag system to compactly represent objects that - are not JSON types. Passed as the intermediate serializer to - :class:`itsdangerous.Serializer`. - - The following extra types are supported: - - * :class:`dict` - * :class:`tuple` - * :class:`bytes` - * :class:`~markupsafe.Markup` - * :class:`~uuid.UUID` - * :class:`~datetime.datetime` - """ - - __slots__ = ("tags", "order") - - #: Tag classes to bind when creating the serializer. Other tags can be - #: added later using :meth:`~register`. - default_tags = [ - TagDict, - PassDict, - TagTuple, - PassList, - TagBytes, - TagMarkup, - TagUUID, - TagDateTime, - ] - - def __init__(self) -> None: - self.tags: dict[str, JSONTag] = {} - self.order: list[JSONTag] = [] - - for cls in self.default_tags: - self.register(cls) - - def register( - self, - tag_class: type[JSONTag], - force: bool = False, - index: int | None = None, - ) -> None: - """Register a new tag with this serializer. - - :param tag_class: tag class to register. Will be instantiated with this - serializer instance. - :param force: overwrite an existing tag. If false (default), a - :exc:`KeyError` is raised. - :param index: index to insert the new tag in the tag order. Useful when - the new tag is a special case of an existing tag. If ``None`` - (default), the tag is appended to the end of the order. - - :raise KeyError: if the tag key is already registered and ``force`` is - not true. - """ - tag = tag_class(self) - key = tag.key - - if key: - if not force and key in self.tags: - raise KeyError(f"Tag '{key}' is already registered.") - - self.tags[key] = tag - - if index is None: - self.order.append(tag) - else: - self.order.insert(index, tag) - - def tag(self, value: t.Any) -> t.Any: - """Convert a value to a tagged representation if necessary.""" - for tag in self.order: - if tag.check(value): - return tag.tag(value) - - return value - - def untag(self, value: dict[str, t.Any]) -> t.Any: - """Convert a tagged representation back to the original type.""" - if len(value) != 1: - return value - - key = next(iter(value)) - - if key not in self.tags: - return value - - return self.tags[key].to_python(value[key]) - - def _untag_scan(self, value: t.Any) -> t.Any: - if isinstance(value, dict): - # untag each item recursively - value = {k: self._untag_scan(v) for k, v in value.items()} - # untag the dict itself - value = self.untag(value) - elif isinstance(value, list): - # untag each item recursively - value = [self._untag_scan(item) for item in value] - - return value - - def dumps(self, value: t.Any) -> str: - """Tag the value and dump it to a compact JSON string.""" - return dumps(self.tag(value), separators=(",", ":")) - - def loads(self, value: str) -> t.Any: - """Load data from a JSON string and deserialized any tagged objects.""" - return self._untag_scan(loads(value)) diff --git a/venv/Lib/site-packages/flask/logging.py b/venv/Lib/site-packages/flask/logging.py deleted file mode 100644 index 0cb8f43..0000000 --- a/venv/Lib/site-packages/flask/logging.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -import logging -import sys -import typing as t - -from werkzeug.local import LocalProxy - -from .globals import request - -if t.TYPE_CHECKING: # pragma: no cover - from .sansio.app import App - - -@LocalProxy -def wsgi_errors_stream() -> t.TextIO: - """Find the most appropriate error stream for the application. If a request - is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. - - If you configure your own :class:`logging.StreamHandler`, you may want to - use this for the stream. If you are using file or dict configuration and - can't import this directly, you can refer to it as - ``ext://flask.logging.wsgi_errors_stream``. - """ - if request: - return request.environ["wsgi.errors"] # type: ignore[no-any-return] - - return sys.stderr - - -def has_level_handler(logger: logging.Logger) -> bool: - """Check if there is a handler in the logging chain that will handle the - given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. - """ - level = logger.getEffectiveLevel() - current = logger - - while current: - if any(handler.level <= level for handler in current.handlers): - return True - - if not current.propagate: - break - - current = current.parent # type: ignore - - return False - - -#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format -#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. -default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore -default_handler.setFormatter( - logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s") -) - - -def create_logger(app: App) -> logging.Logger: - """Get the Flask app's logger and configure it if needed. - - The logger name will be the same as - :attr:`app.import_name `. - - When :attr:`~flask.Flask.debug` is enabled, set the logger level to - :data:`logging.DEBUG` if it is not set. - - If there is no handler for the logger's effective level, add a - :class:`~logging.StreamHandler` for - :func:`~flask.logging.wsgi_errors_stream` with a basic format. - """ - logger = logging.getLogger(app.name) - - if app.debug and not logger.level: - logger.setLevel(logging.DEBUG) - - if not has_level_handler(logger): - logger.addHandler(default_handler) - - return logger diff --git a/venv/Lib/site-packages/flask/py.typed b/venv/Lib/site-packages/flask/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/flask/sansio/README.md b/venv/Lib/site-packages/flask/sansio/README.md deleted file mode 100644 index 623ac19..0000000 --- a/venv/Lib/site-packages/flask/sansio/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Sansio - -This folder contains code that can be used by alternative Flask -implementations, for example Quart. The code therefore cannot do any -IO, nor be part of a likely IO path. Finally this code cannot use the -Flask globals. diff --git a/venv/Lib/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc b/venv/Lib/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc deleted file mode 100644 index dfe6571..0000000 Binary files a/venv/Lib/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc b/venv/Lib/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc deleted file mode 100644 index 2763161..0000000 Binary files a/venv/Lib/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc b/venv/Lib/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc deleted file mode 100644 index c478ec5..0000000 Binary files a/venv/Lib/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask/sansio/app.py b/venv/Lib/site-packages/flask/sansio/app.py deleted file mode 100644 index a2592fe..0000000 --- a/venv/Lib/site-packages/flask/sansio/app.py +++ /dev/null @@ -1,964 +0,0 @@ -from __future__ import annotations - -import logging -import os -import sys -import typing as t -from datetime import timedelta -from itertools import chain - -from werkzeug.exceptions import Aborter -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.routing import BuildError -from werkzeug.routing import Map -from werkzeug.routing import Rule -from werkzeug.sansio.response import Response -from werkzeug.utils import cached_property -from werkzeug.utils import redirect as _wz_redirect - -from .. import typing as ft -from ..config import Config -from ..config import ConfigAttribute -from ..ctx import _AppCtxGlobals -from ..helpers import _split_blueprint_path -from ..helpers import get_debug_flag -from ..json.provider import DefaultJSONProvider -from ..json.provider import JSONProvider -from ..logging import create_logger -from ..templating import DispatchingJinjaLoader -from ..templating import Environment -from .scaffold import _endpoint_from_view_func -from .scaffold import find_package -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.wrappers import Response as BaseResponse - - from ..testing import FlaskClient - from ..testing import FlaskCliRunner - from .blueprints import Blueprint - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class App(Scaffold): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - #: The class of the object assigned to :attr:`aborter`, created by - #: :meth:`create_aborter`. That object is called by - #: :func:`flask.abort` to raise HTTP errors, and can be - #: called directly as well. - #: - #: Defaults to :class:`werkzeug.exceptions.Aborter`. - #: - #: .. versionadded:: 2.2 - aborter_class = Aborter - - #: The class that is used for the Jinja environment. - #: - #: .. versionadded:: 0.11 - jinja_environment = Environment - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on unexpected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is now application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - #: The class that is used for the ``config`` attribute of this app. - #: Defaults to :class:`~flask.Config`. - #: - #: Example use cases for a custom class: - #: - #: 1. Default values for certain config options. - #: 2. Access to config values through attributes in addition to keys. - #: - #: .. versionadded:: 0.11 - config_class = Config - - #: The testing flag. Set this to ``True`` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate test helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: ``TESTING`` configuration key. Defaults to ``False``. - testing = ConfigAttribute[bool]("TESTING") - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. - secret_key = ConfigAttribute[t.Union[str, bytes, None]]("SECRET_KEY") - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute[timedelta]( - "PERMANENT_SESSION_LIFETIME", - get_converter=_make_timedelta, # type: ignore[arg-type] - ) - - json_provider_class: type[JSONProvider] = DefaultJSONProvider - """A subclass of :class:`~flask.json.provider.JSONProvider`. An - instance is created and assigned to :attr:`app.json` when creating - the app. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses - Python's built-in :mod:`json` library. A different provider can use - a different JSON library. - - .. versionadded:: 2.2 - """ - - #: Options that are passed to the Jinja environment in - #: :meth:`create_jinja_environment`. Changing these options after - #: the environment is created (accessing :attr:`jinja_env`) will - #: have no effect. - #: - #: .. versionchanged:: 1.1.0 - #: This is a ``dict`` instead of an ``ImmutableDict`` to allow - #: easier configuration. - #: - jinja_options: dict[str, t.Any] = {} - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: The map object to use for storing the URL rules and routing - #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. - #: - #: .. versionadded:: 1.1.0 - url_map_class = Map - - #: The :meth:`test_client` method creates an instance of this test - #: client class. Defaults to :class:`~flask.testing.FlaskClient`. - #: - #: .. versionadded:: 0.7 - test_client_class: type[FlaskClient] | None = None - - #: The :class:`~click.testing.CliRunner` subclass, by default - #: :class:`~flask.testing.FlaskCliRunner` that is used by - #: :meth:`test_cli_runner`. Its ``__init__`` method should take a - #: Flask app object as the first argument. - #: - #: .. versionadded:: 1.0 - test_cli_runner_class: type[FlaskCliRunner] | None = None - - default_config: dict[str, t.Any] - response_class: type[Response] - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ) -> None: - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError( - "If an instance path is provided it must be absolute." - " A relative path was given instead." - ) - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - #: An instance of :attr:`aborter_class` created by - #: :meth:`make_aborter`. This is called by :func:`flask.abort` - #: to raise HTTP errors, and can be called directly as well. - #: - #: .. versionadded:: 2.2 - #: Moved from ``flask.abort``, which calls this object. - self.aborter = self.make_aborter() - - self.json: JSONProvider = self.json_provider_class(self) - """Provides access to JSON methods. Functions in ``flask.json`` - will call methods on this provider when the application context - is active. Used for handling JSON requests and responses. - - An instance of :attr:`json_provider_class`. Can be customized by - changing that attribute on a subclass, or by assigning to this - attribute afterwards. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, - uses Python's built-in :mod:`json` library. A different provider - can use a different JSON library. - - .. versionadded:: 2.2 - """ - - #: A list of functions that are called by - #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function is called - #: with ``error``, ``endpoint`` and ``values``. If a function - #: returns ``None`` or raises a ``BuildError``, it is skipped. - #: Otherwise, its return value is returned by ``url_for``. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers: list[ - t.Callable[[Exception, str, dict[str, t.Any]], str] - ] = [] - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs: list[ft.TeardownCallable] = [] - - #: A list of shell context processor functions that should be run - #: when a shell context is created. - #: - #: .. versionadded:: 0.11 - self.shell_context_processors: list[ft.ShellContextProcessorCallable] = [] - - #: Maps registered blueprint names to blueprint objects. The - #: dict retains the order the blueprints were registered in. - #: Blueprints can be registered multiple times, this dict does - #: not track how often they were attached. - #: - #: .. versionadded:: 0.7 - self.blueprints: dict[str, Blueprint] = {} - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. - #: - #: The key must match the name of the extension module. For example in - #: case of a "Flask-Foo" extension in `flask_foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions: dict[str, t.Any] = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(super(ListConverter, self).to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = self.url_map_class(host_matching=host_matching) - - self.subdomain_matching = subdomain_matching - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_first_request: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called" - " on the application. It has already handled its first" - " request, any changes will not be applied" - " consistently.\n" - "Make sure all imports, decorators, functions, etc." - " needed to set up the application are done before" - " running it." - ) - - @cached_property - def name(self) -> str: - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == "__main__": - fn: str | None = getattr(sys.modules["__main__"], "__file__", None) - if fn is None: - return "__main__" - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @cached_property - def logger(self) -> logging.Logger: - """A standard Python :class:`~logging.Logger` for the app, with - the same name as :attr:`name`. - - In debug mode, the logger's :attr:`~logging.Logger.level` will - be set to :data:`~logging.DEBUG`. - - If there are no handlers configured, a default handler will be - added. See :doc:`/logging` for more information. - - .. versionchanged:: 1.1.0 - The logger takes the same name as :attr:`name` rather than - hard-coding ``"flask.app"``. - - .. versionchanged:: 1.0.0 - Behavior was simplified. The logger is always named - ``"flask.app"``. The level is only set during configuration, - it doesn't check ``app.debug`` each time. Only one format is - used, not different ones depending on ``app.debug``. No - handlers are removed, and a handler is only added if no - handlers are already configured. - - .. versionadded:: 0.3 - """ - return create_logger(self) - - @cached_property - def jinja_env(self) -> Environment: - """The Jinja environment used to load templates. - - The environment is created the first time this property is - accessed. Changing :attr:`jinja_options` after that will have no - effect. - """ - return self.create_jinja_environment() - - def create_jinja_environment(self) -> Environment: - raise NotImplementedError() - - def make_config(self, instance_relative: bool = False) -> Config: - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - defaults = dict(self.default_config) - defaults["DEBUG"] = get_debug_flag() - return self.config_class(root_path, defaults) - - def make_aborter(self) -> Aborter: - """Create the object to assign to :attr:`aborter`. That object - is called by :func:`flask.abort` to raise HTTP errors, and can - be called directly as well. - - By default, this creates an instance of :attr:`aborter_class`, - which defaults to :class:`werkzeug.exceptions.Aborter`. - - .. versionadded:: 2.2 - """ - return self.aborter_class() - - def auto_find_instance_path(self) -> str: - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, "instance") - return os.path.join(prefix, "var", f"{self.name}-instance") - - def create_global_jinja_loader(self) -> DispatchingJinjaLoader: - """Creates the loader for the Jinja environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def select_jinja_autoescape(self, filename: str) -> bool: - """Returns ``True`` if autoescaping should be active for the given - template name. If no template name is given, returns `True`. - - .. versionchanged:: 2.2 - Autoescaping is now enabled by default for ``.svg`` files. - - .. versionadded:: 0.5 - """ - if filename is None: - return True - return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg")) - - @property - def debug(self) -> bool: - """Whether debug mode is enabled. When using ``flask run`` to start the - development server, an interactive debugger will be shown for unhandled - exceptions, and the server will be reloaded when code changes. This maps to the - :data:`DEBUG` config key. It may not behave as expected if set late. - - **Do not enable debug mode when deploying in production.** - - Default: ``False`` - """ - return self.config["DEBUG"] # type: ignore[no-any-return] - - @debug.setter - def debug(self, value: bool) -> None: - self.config["DEBUG"] = value - - if self.config["TEMPLATES_AUTO_RELOAD"] is None: - self.jinja_env.auto_reload = value - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on the application. Keyword - arguments passed to this method will override the defaults set on the - blueprint. - - Calls the blueprint's :meth:`~flask.Blueprint.register` method after - recording the blueprint in the application's :attr:`blueprints`. - - :param blueprint: The blueprint to register. - :param url_prefix: Blueprint routes will be prefixed with this. - :param subdomain: Blueprint routes will match on this subdomain. - :param url_defaults: Blueprint routes will use these default values for - view arguments. - :param options: Additional keyword arguments are passed to - :class:`~flask.blueprints.BlueprintSetupState`. They can be - accessed in :meth:`~flask.Blueprint.record` callbacks. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 0.7 - """ - blueprint.register(self, options) - - def iter_blueprints(self) -> t.ValuesView[Blueprint]: - """Iterates over all blueprints by the order they were registered. - - .. versionadded:: 0.11 - """ - return self.blueprints.values() - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - options["endpoint"] = endpoint - methods = options.pop("methods", None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only ``GET`` as default. - if methods is None: - methods = getattr(view_func, "methods", None) or ("GET",) - if isinstance(methods, str): - raise TypeError( - "Allowed methods must be a list of strings, for" - ' example: @app.route(..., methods=["POST"])' - ) - methods = {item.upper() for item in methods} - - # Methods that should always be added - required_methods: set[str] = set(getattr(view_func, "required_methods", ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - if provide_automatic_options is None: - provide_automatic_options = getattr( - view_func, "provide_automatic_options", None - ) - - if provide_automatic_options is None: - if "OPTIONS" not in methods and self.config["PROVIDE_AUTOMATIC_OPTIONS"]: - provide_automatic_options = True - required_methods.add("OPTIONS") - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - rule_obj = self.url_rule_class(rule, methods=methods, **options) - rule_obj.provide_automatic_options = provide_automatic_options # type: ignore[attr-defined] - - self.url_map.add(rule_obj) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError( - "View function mapping is overwriting an existing" - f" endpoint function: {endpoint}" - ) - self.view_functions[endpoint] = view_func - - @setupmethod - def template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - @setupmethod - def template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def teardown_appcontext(self, f: T_teardown) -> T_teardown: - """Registers a function to be called when the application - context is popped. The application context is typically popped - after the request context for each request, at the end of CLI - commands, or after a manually pushed context ends. - - .. code-block:: python - - with app.app_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the app context is - made inactive. Since a request context typically also manages an - application context it would also be called when you pop a - request context. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def shell_context_processor( - self, f: T_shell_context_processor - ) -> T_shell_context_processor: - """Registers a shell context processor function. - - .. versionadded:: 0.11 - """ - self.shell_context_processors.append(f) - return f - - def _find_error_handler( - self, e: Exception, blueprints: list[str] - ) -> ft.ErrorHandlerCallable | None: - """Return a registered error handler for an exception in this order: - blueprint handler for a specific code, app handler for a specific code, - blueprint handler for an exception class, app handler for an exception - class, or ``None`` if a suitable handler is not found. - """ - exc_class, code = self._get_exc_class_and_code(type(e)) - names = (*blueprints, None) - - for c in (code, None) if code is not None else (None,): - for name in names: - handler_map = self.error_handler_spec[name][c] - - if not handler_map: - continue - - for cls in exc_class.__mro__: - handler = handler_map.get(cls) - - if handler is not None: - return handler - return None - - def trap_http_exception(self, e: Exception) -> bool: - """Checks if an HTTP exception should be trapped or not. By default - this will return ``False`` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It - also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. - - This is called for all HTTP exceptions raised by a view function. - If it returns ``True`` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionchanged:: 1.0 - Bad request errors are not trapped by default in debug mode. - - .. versionadded:: 0.8 - """ - if self.config["TRAP_HTTP_EXCEPTIONS"]: - return True - - trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] - - # if unset, trap key errors in debug mode - if ( - trap_bad_request is None - and self.debug - and isinstance(e, BadRequestKeyError) - ): - return True - - if trap_bad_request: - return isinstance(e, BadRequest) - - return False - - def should_ignore_error(self, error: BaseException | None) -> bool: - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns ``True`` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def redirect(self, location: str, code: int = 302) -> BaseResponse: - """Create a redirect response object. - - This is called by :func:`flask.redirect`, and can be called - directly as well. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - - .. versionadded:: 2.2 - Moved from ``flask.redirect``, which calls this method. - """ - return _wz_redirect( - location, - code=code, - Response=self.response_class, # type: ignore[arg-type] - ) - - def inject_url_defaults(self, endpoint: str, values: dict[str, t.Any]) -> None: - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - names: t.Iterable[str | None] = (None,) - - # url_for may be called outside a request context, parse the - # passed endpoint instead of using request.blueprints. - if "." in endpoint: - names = chain( - names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) - ) - - for name in names: - if name in self.url_default_functions: - for func in self.url_default_functions[name]: - func(endpoint, values) - - def handle_url_build_error( - self, error: BuildError, endpoint: str, values: dict[str, t.Any] - ) -> str: - """Called by :meth:`.url_for` if a - :exc:`~werkzeug.routing.BuildError` was raised. If this returns - a value, it will be returned by ``url_for``, otherwise the error - will be re-raised. - - Each function in :attr:`url_build_error_handlers` is called with - ``error``, ``endpoint`` and ``values``. If a function returns - ``None`` or raises a ``BuildError``, it is skipped. Otherwise, - its return value is returned by ``url_for``. - - :param error: The active ``BuildError`` being handled. - :param endpoint: The endpoint being built. - :param values: The keyword arguments passed to ``url_for``. - """ - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - except BuildError as e: - # make error available outside except block - error = e - else: - if rv is not None: - return rv - - # Re-raise if called with an active exception, otherwise raise - # the passed in exception. - if error is sys.exc_info()[1]: - raise - - raise error diff --git a/venv/Lib/site-packages/flask/sansio/blueprints.py b/venv/Lib/site-packages/flask/sansio/blueprints.py deleted file mode 100644 index 4f912cc..0000000 --- a/venv/Lib/site-packages/flask/sansio/blueprints.py +++ /dev/null @@ -1,632 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from .. import typing as ft -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from .app import App - -DeferredSetupFunction = t.Callable[["BlueprintSetupState"], None] -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) - - -class BlueprintSetupState: - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__( - self, - blueprint: Blueprint, - app: App, - options: t.Any, - first_registration: bool, - ) -> None: - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get("subdomain") - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, ``None`` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get("url_prefix") - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - self.name = self.options.get("name", blueprint.name) - self.name_prefix = self.options.get("name_prefix", "") - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get("url_defaults", ())) - - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - **options: t.Any, - ) -> None: - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix is not None: - if rule: - rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) - else: - rule = self.url_prefix - options.setdefault("subdomain", self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - defaults = self.url_defaults - if "defaults" in options: - defaults = dict(defaults, **options.pop("defaults")) - - self.app.add_url_rule( - rule, - f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), - view_func, - defaults=defaults, - **options, - ) - - -class Blueprint(Scaffold): - """Represents a blueprint, a collection of routes and other - app-related functions that can be registered on a real application - later. - - A blueprint is an object that allows defining application functions - without requiring an application object ahead of time. It uses the - same decorators as :class:`~flask.Flask`, but defers the need for an - application by recording them for later registration. - - Decorating a function with a blueprint creates a deferred function - that is called with :class:`~flask.blueprints.BlueprintSetupState` - when the blueprint is registered on an application. - - See :doc:`/blueprints` for more information. - - :param name: The name of the blueprint. Will be prepended to each - endpoint name. - :param import_name: The name of the blueprint package, usually - ``__name__``. This helps locate the ``root_path`` for the - blueprint. - :param static_folder: A folder with static files that should be - served by the blueprint's static route. The path is relative to - the blueprint's root path. Blueprint static files are disabled - by default. - :param static_url_path: The url to serve static files from. - Defaults to ``static_folder``. If the blueprint does not have - a ``url_prefix``, the app's static route will take precedence, - and the blueprint's static files won't be accessible. - :param template_folder: A folder with templates that should be added - to the app's template search path. The path is relative to the - blueprint's root path. Blueprint templates are disabled by - default. Blueprint templates have a lower precedence than those - in the app's templates folder. - :param url_prefix: A path to prepend to all of the blueprint's URLs, - to make them distinct from the rest of the app's routes. - :param subdomain: A subdomain that blueprint routes will match on by - default. - :param url_defaults: A dict of default values that blueprint routes - will receive by default. - :param root_path: By default, the blueprint will automatically set - this based on ``import_name``. In certain situations this - automatic detection can fail, so the path can be specified - manually instead. - - .. versionchanged:: 1.1.0 - Blueprints have a ``cli`` group to register nested CLI commands. - The ``cli_group`` parameter controls the name of the group under - the ``flask`` command. - - .. versionadded:: 0.7 - """ - - _got_registered_once = False - - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore[assignment] - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if not name: - raise ValueError("'name' may not be empty.") - - if "." in name: - raise ValueError("'name' may not contain a dot '.' character.") - - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.deferred_functions: list[DeferredSetupFunction] = [] - - if url_defaults is None: - url_defaults = {} - - self.url_values_defaults = url_defaults - self.cli_group = cli_group - self._blueprints: list[tuple[Blueprint, dict[str, t.Any]]] = [] - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_registered_once: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called on the blueprint" - f" '{self.name}'. It has already been registered at least once, any" - " changes will not be applied consistently.\n" - "Make sure all imports, decorators, functions, etc. needed to set up" - " the blueprint are done before registering it." - ) - - @setupmethod - def record(self, func: DeferredSetupFunction) -> None: - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - self.deferred_functions.append(func) - - @setupmethod - def record_once(self, func: DeferredSetupFunction) -> None: - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - - def wrapper(state: BlueprintSetupState) -> None: - if state.first_registration: - func(state) - - self.record(update_wrapper(wrapper, func)) - - def make_setup_state( - self, app: App, options: dict[str, t.Any], first_registration: bool = False - ) -> BlueprintSetupState: - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on this blueprint. Keyword - arguments passed to this method will override the defaults set - on the blueprint. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 2.0 - """ - if blueprint is self: - raise ValueError("Cannot register a blueprint on itself") - self._blueprints.append((blueprint, options)) - - def register(self, app: App, options: dict[str, t.Any]) -> None: - """Called by :meth:`Flask.register_blueprint` to register all - views and callbacks registered on the blueprint with the - application. Creates a :class:`.BlueprintSetupState` and calls - each :meth:`record` callback with it. - - :param app: The application this blueprint is being registered - with. - :param options: Keyword arguments forwarded from - :meth:`~Flask.register_blueprint`. - - .. versionchanged:: 2.3 - Nested blueprints now correctly apply subdomains. - - .. versionchanged:: 2.1 - Registering the same blueprint with the same name multiple - times is an error. - - .. versionchanged:: 2.0.1 - Nested blueprints are registered with their dotted name. - This allows different blueprints with the same name to be - nested at different locations. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - """ - name_prefix = options.get("name_prefix", "") - self_name = options.get("name", self.name) - name = f"{name_prefix}.{self_name}".lstrip(".") - - if name in app.blueprints: - bp_desc = "this" if app.blueprints[name] is self else "a different" - existing_at = f" '{name}'" if self_name != name else "" - - raise ValueError( - f"The name '{self_name}' is already registered for" - f" {bp_desc} blueprint{existing_at}. Use 'name=' to" - f" provide a unique name." - ) - - first_bp_registration = not any(bp is self for bp in app.blueprints.values()) - first_name_registration = name not in app.blueprints - - app.blueprints[name] = self - self._got_registered_once = True - state = self.make_setup_state(app, options, first_bp_registration) - - if self.has_static_folder: - state.add_url_rule( - f"{self.static_url_path}/", - view_func=self.send_static_file, # type: ignore[attr-defined] - endpoint="static", - ) - - # Merge blueprint data into parent. - if first_bp_registration or first_name_registration: - self._merge_blueprint_funcs(app, name) - - for deferred in self.deferred_functions: - deferred(state) - - cli_resolved_group = options.get("cli_group", self.cli_group) - - if self.cli.commands: - if cli_resolved_group is None: - app.cli.commands.update(self.cli.commands) - elif cli_resolved_group is _sentinel: - self.cli.name = name - app.cli.add_command(self.cli) - else: - self.cli.name = cli_resolved_group - app.cli.add_command(self.cli) - - for blueprint, bp_options in self._blueprints: - bp_options = bp_options.copy() - bp_url_prefix = bp_options.get("url_prefix") - bp_subdomain = bp_options.get("subdomain") - - if bp_subdomain is None: - bp_subdomain = blueprint.subdomain - - if state.subdomain is not None and bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain + "." + state.subdomain - elif bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain - elif state.subdomain is not None: - bp_options["subdomain"] = state.subdomain - - if bp_url_prefix is None: - bp_url_prefix = blueprint.url_prefix - - if state.url_prefix is not None and bp_url_prefix is not None: - bp_options["url_prefix"] = ( - state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") - ) - elif bp_url_prefix is not None: - bp_options["url_prefix"] = bp_url_prefix - elif state.url_prefix is not None: - bp_options["url_prefix"] = state.url_prefix - - bp_options["name_prefix"] = name - blueprint.register(app, bp_options) - - def _merge_blueprint_funcs(self, app: App, name: str) -> None: - def extend( - bp_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - parent_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - ) -> None: - for key, values in bp_dict.items(): - key = name if key is None else f"{name}.{key}" - parent_dict[key].extend(values) - - for key, value in self.error_handler_spec.items(): - key = name if key is None else f"{name}.{key}" - value = defaultdict( - dict, - { - code: {exc_class: func for exc_class, func in code_values.items()} - for code, code_values in value.items() - }, - ) - app.error_handler_spec[key] = value - - for endpoint, func in self.view_functions.items(): - app.view_functions[endpoint] = func - - extend(self.before_request_funcs, app.before_request_funcs) - extend(self.after_request_funcs, app.after_request_funcs) - extend( - self.teardown_request_funcs, - app.teardown_request_funcs, - ) - extend(self.url_default_functions, app.url_default_functions) - extend(self.url_value_preprocessors, app.url_value_preprocessors) - extend(self.template_context_processors, app.template_context_processors) - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for - full documentation. - - The URL rule is prefixed with the blueprint's URL prefix. The endpoint name, - used with :func:`url_for`, is prefixed with the blueprint's name. - """ - if endpoint and "." in endpoint: - raise ValueError("'endpoint' may not contain a dot '.' character.") - - if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: - raise ValueError("'view_func' name may not contain a dot '.' character.") - - self.record( - lambda s: s.add_url_rule( - rule, - endpoint, - view_func, - provide_automatic_options=provide_automatic_options, - **options, - ) - ) - - @setupmethod - def app_template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """Register a template filter, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_app_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a template filter, available in any template rendered by the - application. Works like the :meth:`app_template_filter` decorator. Equivalent to - :meth:`.Flask.add_template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.filters[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """Register a template test, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_app_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a template test, available in any template rendered by the - application. Works like the :meth:`app_template_test` decorator. Equivalent to - :meth:`.Flask.add_template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.tests[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """Register a template global, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_app_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a template global, available in any template rendered by the - application. Works like the :meth:`app_template_global` decorator. Equivalent to - :meth:`.Flask.add_template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.globals[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def before_app_request(self, f: T_before_request) -> T_before_request: - """Like :meth:`before_request`, but before every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.before_request`. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def after_app_request(self, f: T_after_request) -> T_after_request: - """Like :meth:`after_request`, but after every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.after_request`. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def teardown_app_request(self, f: T_teardown) -> T_teardown: - """Like :meth:`teardown_request`, but after every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_context_processor( - self, f: T_template_context_processor - ) -> T_template_context_processor: - """Like :meth:`context_processor`, but for templates rendered by every view, not - only by the blueprint. Equivalent to :meth:`.Flask.context_processor`. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_errorhandler( - self, code: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Like :meth:`errorhandler`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.errorhandler`. - """ - - def decorator(f: T_error_handler) -> T_error_handler: - def from_blueprint(state: BlueprintSetupState) -> None: - state.app.errorhandler(code)(f) - - self.record_once(from_blueprint) - return f - - return decorator - - @setupmethod - def app_url_value_preprocessor( - self, f: T_url_value_preprocessor - ) -> T_url_value_preprocessor: - """Like :meth:`url_value_preprocessor`, but for every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`. - """ - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Like :meth:`url_defaults`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.url_defaults`. - """ - self.record_once( - lambda s: s.app.url_default_functions.setdefault(None, []).append(f) - ) - return f diff --git a/venv/Lib/site-packages/flask/sansio/scaffold.py b/venv/Lib/site-packages/flask/sansio/scaffold.py deleted file mode 100644 index 0e96f15..0000000 --- a/venv/Lib/site-packages/flask/sansio/scaffold.py +++ /dev/null @@ -1,792 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import pathlib -import sys -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from jinja2 import BaseLoader -from jinja2 import FileSystemLoader -from werkzeug.exceptions import default_exceptions -from werkzeug.exceptions import HTTPException -from werkzeug.utils import cached_property - -from .. import typing as ft -from ..helpers import get_root_path -from ..templating import _default_template_ctx_processor - -if t.TYPE_CHECKING: # pragma: no cover - from click import Group - -# a singleton sentinel value for parameter defaults -_sentinel = object() - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) -T_route = t.TypeVar("T_route", bound=ft.RouteCallable) - - -def setupmethod(f: F) -> F: - f_name = f.__name__ - - def wrapper_func(self: Scaffold, *args: t.Any, **kwargs: t.Any) -> t.Any: - self._check_setup_finished(f_name) - return f(self, *args, **kwargs) - - return t.cast(F, update_wrapper(wrapper_func, f)) - - -class Scaffold: - """Common behavior shared between :class:`~flask.Flask` and - :class:`~flask.blueprints.Blueprint`. - - :param import_name: The import name of the module where this object - is defined. Usually :attr:`__name__` should be used. - :param static_folder: Path to a folder of static files to serve. - If this is set, a static route will be added. - :param static_url_path: URL prefix for the static route. - :param template_folder: Path to a folder containing template files. - for rendering. If this is set, a Jinja loader will be added. - :param root_path: The path that static, template, and resource files - are relative to. Typically not set, it is discovered based on - the ``import_name``. - - .. versionadded:: 2.0 - """ - - cli: Group - name: str - _static_folder: str | None = None - _static_url_path: str | None = None - - def __init__( - self, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - root_path: str | None = None, - ): - #: The name of the package or module that this object belongs - #: to. Do not change this once it is set by the constructor. - self.import_name = import_name - - self.static_folder = static_folder - self.static_url_path = static_url_path - - #: The path to the templates folder, relative to - #: :attr:`root_path`, to add to the template loader. ``None`` if - #: templates should not be added. - self.template_folder = template_folder - - if root_path is None: - root_path = get_root_path(self.import_name) - - #: Absolute path to the package on the filesystem. Used to look - #: up resources contained in the package. - self.root_path = root_path - - #: A dictionary mapping endpoint names to view functions. - #: - #: To register a view function, use the :meth:`route` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.view_functions: dict[str, ft.RouteCallable] = {} - - #: A data structure of registered error handlers, in the format - #: ``{scope: {code: {class: handler}}}``. The ``scope`` key is - #: the name of a blueprint the handlers are active for, or - #: ``None`` for all requests. The ``code`` key is the HTTP - #: status code for ``HTTPException``, or ``None`` for - #: other exceptions. The innermost dictionary maps exception - #: classes to handler functions. - #: - #: To register an error handler, use the :meth:`errorhandler` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.error_handler_spec: dict[ - ft.AppOrBlueprintKey, - dict[int | None, dict[type[Exception], ft.ErrorHandlerCallable]], - ] = defaultdict(lambda: defaultdict(dict)) - - #: A data structure of functions to call at the beginning of - #: each request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`before_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.before_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.BeforeRequestCallable] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`after_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.after_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.AfterRequestCallable[t.Any]] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request even if an exception is raised, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`teardown_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.teardown_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.TeardownCallable] - ] = defaultdict(list) - - #: A data structure of functions to call to pass extra context - #: values when rendering templates, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`context_processor` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.template_context_processors: dict[ - ft.AppOrBlueprintKey, list[ft.TemplateContextProcessorCallable] - ] = defaultdict(list, {None: [_default_template_ctx_processor]}) - - #: A data structure of functions to call to modify the keyword - #: arguments passed to the view function, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the - #: :meth:`url_value_preprocessor` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_value_preprocessors: dict[ - ft.AppOrBlueprintKey, - list[ft.URLValuePreprocessorCallable], - ] = defaultdict(list) - - #: A data structure of functions to call to modify the keyword - #: arguments when generating URLs, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`url_defaults` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_default_functions: dict[ - ft.AppOrBlueprintKey, list[ft.URLDefaultCallable] - ] = defaultdict(list) - - def __repr__(self) -> str: - return f"<{type(self).__name__} {self.name!r}>" - - def _check_setup_finished(self, f_name: str) -> None: - raise NotImplementedError - - @property - def static_folder(self) -> str | None: - """The absolute path to the configured static folder. ``None`` - if no static folder is set. - """ - if self._static_folder is not None: - return os.path.join(self.root_path, self._static_folder) - else: - return None - - @static_folder.setter - def static_folder(self, value: str | os.PathLike[str] | None) -> None: - if value is not None: - value = os.fspath(value).rstrip(r"\/") - - self._static_folder = value - - @property - def has_static_folder(self) -> bool: - """``True`` if :attr:`static_folder` is set. - - .. versionadded:: 0.5 - """ - return self.static_folder is not None - - @property - def static_url_path(self) -> str | None: - """The URL prefix that the static route will be accessible from. - - If it was not configured during init, it is derived from - :attr:`static_folder`. - """ - if self._static_url_path is not None: - return self._static_url_path - - if self.static_folder is not None: - basename = os.path.basename(self.static_folder) - return f"/{basename}".rstrip("/") - - return None - - @static_url_path.setter - def static_url_path(self, value: str | None) -> None: - if value is not None: - value = value.rstrip("/") - - self._static_url_path = value - - @cached_property - def jinja_loader(self) -> BaseLoader | None: - """The Jinja loader for this object's templates. By default this - is a class :class:`jinja2.loaders.FileSystemLoader` to - :attr:`template_folder` if it is set. - - .. versionadded:: 0.5 - """ - if self.template_folder is not None: - return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) - else: - return None - - def _method_route( - self, - method: str, - rule: str, - options: dict[str, t.Any], - ) -> t.Callable[[T_route], T_route]: - if "methods" in options: - raise TypeError("Use the 'route' decorator to use the 'methods' argument.") - - return self.route(rule, methods=[method], **options) - - @setupmethod - def get(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["GET"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("GET", rule, options) - - @setupmethod - def post(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["POST"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("POST", rule, options) - - @setupmethod - def put(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PUT"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PUT", rule, options) - - @setupmethod - def delete(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["DELETE"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("DELETE", rule, options) - - @setupmethod - def patch(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PATCH"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PATCH", rule, options) - - @setupmethod - def route(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Decorate a view function to register it with the given URL - rule and options. Calls :meth:`add_url_rule`, which has more - details about the implementation. - - .. code-block:: python - - @app.route("/") - def index(): - return "Hello, World!" - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and - ``OPTIONS`` are added automatically. - - :param rule: The URL rule string. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - - def decorator(f: T_route) -> T_route: - endpoint = options.pop("endpoint", None) - self.add_url_rule(rule, endpoint, f, **options) - return f - - return decorator - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a rule for routing incoming requests and building - URLs. The :meth:`route` decorator is a shortcut to call this - with the ``view_func`` argument. These are equivalent: - - .. code-block:: python - - @app.route("/") - def index(): - ... - - .. code-block:: python - - def index(): - ... - - app.add_url_rule("/", view_func=index) - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. An error - will be raised if a function has already been registered for the - endpoint. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is - always added automatically, and ``OPTIONS`` is added - automatically by default. - - ``view_func`` does not necessarily need to be passed, but if the - rule should participate in routing an endpoint name must be - associated with a view function at some point with the - :meth:`endpoint` decorator. - - .. code-block:: python - - app.add_url_rule("/", endpoint="index") - - @app.endpoint("index") - def index(): - ... - - If ``view_func`` has a ``required_methods`` attribute, those - methods are added to the passed and automatic methods. If it - has a ``provide_automatic_methods`` attribute, it is used as the - default if the parameter is not passed. - - :param rule: The URL rule string. - :param endpoint: The endpoint name to associate with the rule - and view function. Used when routing and building URLs. - Defaults to ``view_func.__name__``. - :param view_func: The view function to associate with the - endpoint name. - :param provide_automatic_options: Add the ``OPTIONS`` method and - respond to ``OPTIONS`` requests automatically. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - raise NotImplementedError - - @setupmethod - def endpoint(self, endpoint: str) -> t.Callable[[F], F]: - """Decorate a view function to register it for the given - endpoint. Used if a rule is added without a ``view_func`` with - :meth:`add_url_rule`. - - .. code-block:: python - - app.add_url_rule("/ex", endpoint="example") - - @app.endpoint("example") - def example(): - ... - - :param endpoint: The endpoint name to associate with the view - function. - """ - - def decorator(f: F) -> F: - self.view_functions[endpoint] = f - return f - - return decorator - - @setupmethod - def before_request(self, f: T_before_request) -> T_before_request: - """Register a function to run before each request. - - For example, this can be used to open a database connection, or - to load the logged in user from the session. - - .. code-block:: python - - @app.before_request - def load_user(): - if "user_id" in session: - g.user = db.session.get(session["user_id"]) - - The function will be called without any arguments. If it returns - a non-``None`` value, the value is handled as if it was the - return value from the view, and further request handling is - stopped. - - This is available on both app and blueprint objects. When used on an app, this - executes before every request. When used on a blueprint, this executes before - every request that the blueprint handles. To register with a blueprint and - execute before every request, use :meth:`.Blueprint.before_app_request`. - """ - self.before_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def after_request(self, f: T_after_request) -> T_after_request: - """Register a function to run after each request to this object. - - The function is called with the response object, and must return - a response object. This allows the functions to modify or - replace the response before it is sent. - - If a function raises an exception, any remaining - ``after_request`` functions will not be called. Therefore, this - should not be used for actions that must execute, such as to - close resources. Use :meth:`teardown_request` for that. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.after_app_request`. - """ - self.after_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_request(self, f: T_teardown) -> T_teardown: - """Register a function to be called when the request context is - popped. Typically this happens at the end of each request, but - contexts may be pushed manually as well during testing. - - .. code-block:: python - - with app.test_request_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the request context is - made inactive. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.teardown_app_request`. - """ - self.teardown_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def context_processor( - self, - f: T_template_context_processor, - ) -> T_template_context_processor: - """Registers a template context processor function. These functions run before - rendering a template. The keys of the returned dict are added as variables - available in the template. - - This is available on both app and blueprint objects. When used on an app, this - is called for every rendered template. When used on a blueprint, this is called - for templates rendered from the blueprint's views. To register with a blueprint - and affect every template, use :meth:`.Blueprint.app_context_processor`. - """ - self.template_context_processors[None].append(f) - return f - - @setupmethod - def url_value_preprocessor( - self, - f: T_url_value_preprocessor, - ) -> T_url_value_preprocessor: - """Register a URL value preprocessor function for all view - functions in the application. These functions will be called before the - :meth:`before_request` functions. - - The function can modify the values captured from the matched url before - they are passed to the view. For example, this can be used to pop a - common language code value and place it in ``g`` rather than pass it to - every view. - - The function is passed the endpoint name and values dict. The return - value is ignored. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_value_preprocessor`. - """ - self.url_value_preprocessors[None].append(f) - return f - - @setupmethod - def url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Callback function for URL defaults for all view functions of the - application. It's called with the endpoint and values and should - update the values passed in place. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_defaults`. - """ - self.url_default_functions[None].append(f) - return f - - @setupmethod - def errorhandler( - self, code_or_exception: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Register a function to handle errors by code or exception class. - - A decorator that is used to register a function given an - error code. Example:: - - @app.errorhandler(404) - def page_not_found(error): - return 'This page does not exist', 404 - - You can also register handlers for arbitrary exceptions:: - - @app.errorhandler(DatabaseError) - def special_exception_handler(error): - return 'Database connection failed', 500 - - This is available on both app and blueprint objects. When used on an app, this - can handle errors from every request. When used on a blueprint, this can handle - errors from requests that the blueprint handles. To register with a blueprint - and affect every request, use :meth:`.Blueprint.app_errorhandler`. - - .. versionadded:: 0.7 - Use :meth:`register_error_handler` instead of modifying - :attr:`error_handler_spec` directly, for application wide error - handlers. - - .. versionadded:: 0.7 - One can now additionally also register custom exception types - that do not necessarily have to be a subclass of the - :class:`~werkzeug.exceptions.HTTPException` class. - - :param code_or_exception: the code as integer for the handler, or - an arbitrary exception - """ - - def decorator(f: T_error_handler) -> T_error_handler: - self.register_error_handler(code_or_exception, f) - return f - - return decorator - - @setupmethod - def register_error_handler( - self, - code_or_exception: type[Exception] | int, - f: ft.ErrorHandlerCallable, - ) -> None: - """Alternative error attach function to the :meth:`errorhandler` - decorator that is more straightforward to use for non decorator - usage. - - .. versionadded:: 0.7 - """ - exc_class, code = self._get_exc_class_and_code(code_or_exception) - self.error_handler_spec[None][code][exc_class] = f - - @staticmethod - def _get_exc_class_and_code( - exc_class_or_code: type[Exception] | int, - ) -> tuple[type[Exception], int | None]: - """Get the exception class being handled. For HTTP status codes - or ``HTTPException`` subclasses, return both the exception and - status code. - - :param exc_class_or_code: Any exception class, or an HTTP status - code as an integer. - """ - exc_class: type[Exception] - - if isinstance(exc_class_or_code, int): - try: - exc_class = default_exceptions[exc_class_or_code] - except KeyError: - raise ValueError( - f"'{exc_class_or_code}' is not a recognized HTTP" - " error code. Use a subclass of HTTPException with" - " that code instead." - ) from None - else: - exc_class = exc_class_or_code - - if isinstance(exc_class, Exception): - raise TypeError( - f"{exc_class!r} is an instance, not a class. Handlers" - " can only be registered for Exception classes or HTTP" - " error codes." - ) - - if not issubclass(exc_class, Exception): - raise ValueError( - f"'{exc_class.__name__}' is not a subclass of Exception." - " Handlers can only be registered for Exception classes" - " or HTTP error codes." - ) - - if issubclass(exc_class, HTTPException): - return exc_class, exc_class.code - else: - return exc_class, None - - -def _endpoint_from_view_func(view_func: ft.RouteCallable) -> str: - """Internal helper that returns the default endpoint for a given - function. This always is the function name. - """ - assert view_func is not None, "expected view func if endpoint is not provided." - return view_func.__name__ - - -def _find_package_path(import_name: str) -> str: - """Find the path that contains the package or module.""" - root_mod_name, _, _ = import_name.partition(".") - - try: - root_spec = importlib.util.find_spec(root_mod_name) - - if root_spec is None: - raise ValueError("not found") - except (ImportError, ValueError): - # ImportError: the machinery told us it does not exist - # ValueError: - # - the module name was invalid - # - the module name is __main__ - # - we raised `ValueError` due to `root_spec` being `None` - return os.getcwd() - - if root_spec.submodule_search_locations: - if root_spec.origin is None or root_spec.origin == "namespace": - # namespace package - package_spec = importlib.util.find_spec(import_name) - - if package_spec is not None and package_spec.submodule_search_locations: - # Pick the path in the namespace that contains the submodule. - package_path = pathlib.Path( - os.path.commonpath(package_spec.submodule_search_locations) - ) - search_location = next( - location - for location in root_spec.submodule_search_locations - if package_path.is_relative_to(location) - ) - else: - # Pick the first path. - search_location = root_spec.submodule_search_locations[0] - - return os.path.dirname(search_location) - else: - # package with __init__.py - return os.path.dirname(os.path.dirname(root_spec.origin)) - else: - # module - return os.path.dirname(root_spec.origin) # type: ignore[type-var, return-value] - - -def find_package(import_name: str) -> tuple[str | None, str]: - """Find the prefix that a package is installed under, and the path - that it would be imported from. - - The prefix is the directory containing the standard directory - hierarchy (lib, bin, etc.). If the package is not installed to the - system (:attr:`sys.prefix`) or a virtualenv (``site-packages``), - ``None`` is returned. - - The path is the entry in :attr:`sys.path` that contains the package - for import. If the package is not installed, it's assumed that the - package was imported from the current working directory. - """ - package_path = _find_package_path(import_name) - py_prefix = os.path.abspath(sys.prefix) - - # installed to the system - if pathlib.PurePath(package_path).is_relative_to(py_prefix): - return py_prefix, package_path - - site_parent, site_folder = os.path.split(package_path) - - # installed to a virtualenv - if site_folder.lower() == "site-packages": - parent, folder = os.path.split(site_parent) - - # Windows (prefix/lib/site-packages) - if folder.lower() == "lib": - return parent, package_path - - # Unix (prefix/lib/pythonX.Y/site-packages) - if os.path.basename(parent).lower() == "lib": - return os.path.dirname(parent), package_path - - # something else (prefix/site-packages) - return site_parent, package_path - - # not installed - return None, package_path diff --git a/venv/Lib/site-packages/flask/sessions.py b/venv/Lib/site-packages/flask/sessions.py deleted file mode 100644 index 0a357d9..0000000 --- a/venv/Lib/site-packages/flask/sessions.py +++ /dev/null @@ -1,399 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import hashlib -import typing as t -from collections.abc import MutableMapping -from datetime import datetime -from datetime import timezone - -from itsdangerous import BadSignature -from itsdangerous import URLSafeTimedSerializer -from werkzeug.datastructures import CallbackDict - -from .json.tag import TaggedJSONSerializer - -if t.TYPE_CHECKING: # pragma: no cover - import typing_extensions as te - - from .app import Flask - from .wrappers import Request - from .wrappers import Response - - -class SessionMixin(MutableMapping[str, t.Any]): - """Expands a basic dictionary with session attributes.""" - - @property - def permanent(self) -> bool: - """This reflects the ``'_permanent'`` key in the dict.""" - return self.get("_permanent", False) - - @permanent.setter - def permanent(self, value: bool) -> None: - self["_permanent"] = bool(value) - - #: Some implementations can detect whether a session is newly - #: created, but that is not guaranteed. Use with caution. The mixin - # default is hard-coded ``False``. - new = False - - #: Some implementations can detect changes to the session and set - #: this when that happens. The mixin default is hard coded to - #: ``True``. - modified = True - - #: Some implementations can detect when session data is read or - #: written and set this when that happens. The mixin default is hard - #: coded to ``True``. - accessed = True - - -class SecureCookieSession(CallbackDict[str, t.Any], SessionMixin): - """Base class for sessions based on signed cookies. - - This session backend will set the :attr:`modified` and - :attr:`accessed` attributes. It cannot reliably track whether a - session is new (vs. empty), so :attr:`new` remains hard coded to - ``False``. - """ - - #: When data is changed, this is set to ``True``. Only the session - #: dictionary itself is tracked; if the session contains mutable - #: data (for example a nested dict) then this must be set to - #: ``True`` manually when modifying that data. The session cookie - #: will only be written to the response if this is ``True``. - modified = False - - #: When data is read or written, this is set to ``True``. Used by - # :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` - #: header, which allows caching proxies to cache different pages for - #: different users. - accessed = False - - def __init__( - self, - initial: c.Mapping[str, t.Any] | c.Iterable[tuple[str, t.Any]] | None = None, - ) -> None: - def on_update(self: te.Self) -> None: - self.modified = True - self.accessed = True - - super().__init__(initial, on_update) - - def __getitem__(self, key: str) -> t.Any: - self.accessed = True - return super().__getitem__(key) - - def get(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().get(key, default) - - def setdefault(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().setdefault(key, default) - - -class NullSession(SecureCookieSession): - """Class used to generate nicer error messages if sessions are not - available. Will still allow read-only access to the empty session - but fail on setting. - """ - - def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - raise RuntimeError( - "The session is unavailable because no secret " - "key was set. Set the secret_key on the " - "application to something unique and secret." - ) - - __setitem__ = __delitem__ = clear = pop = popitem = update = setdefault = _fail # noqa: B950 - del _fail - - -class SessionInterface: - """The basic interface you have to implement in order to replace the - default session interface which uses werkzeug's securecookie - implementation. The only methods you have to implement are - :meth:`open_session` and :meth:`save_session`, the others have - useful defaults which you don't need to change. - - The session object returned by the :meth:`open_session` method has to - provide a dictionary like interface plus the properties and methods - from the :class:`SessionMixin`. We recommend just subclassing a dict - and adding that mixin:: - - class Session(dict, SessionMixin): - pass - - If :meth:`open_session` returns ``None`` Flask will call into - :meth:`make_null_session` to create a session that acts as replacement - if the session support cannot work because some requirement is not - fulfilled. The default :class:`NullSession` class that is created - will complain that the secret key was not set. - - To replace the session interface on an application all you have to do - is to assign :attr:`flask.Flask.session_interface`:: - - app = Flask(__name__) - app.session_interface = MySessionInterface() - - Multiple requests with the same session may be sent and handled - concurrently. When implementing a new session interface, consider - whether reads or writes to the backing store must be synchronized. - There is no guarantee on the order in which the session for each - request is opened or saved, it will occur in the order that requests - begin and end processing. - - .. versionadded:: 0.8 - """ - - #: :meth:`make_null_session` will look here for the class that should - #: be created when a null session is requested. Likewise the - #: :meth:`is_null_session` method will perform a typecheck against - #: this type. - null_session_class = NullSession - - #: A flag that indicates if the session interface is pickle based. - #: This can be used by Flask extensions to make a decision in regards - #: to how to deal with the session object. - #: - #: .. versionadded:: 0.10 - pickle_based = False - - def make_null_session(self, app: Flask) -> NullSession: - """Creates a null session which acts as a replacement object if the - real session support could not be loaded due to a configuration - error. This mainly aids the user experience because the job of the - null session is to still support lookup without complaining but - modifications are answered with a helpful error message of what - failed. - - This creates an instance of :attr:`null_session_class` by default. - """ - return self.null_session_class() - - def is_null_session(self, obj: object) -> bool: - """Checks if a given object is a null session. Null sessions are - not asked to be saved. - - This checks if the object is an instance of :attr:`null_session_class` - by default. - """ - return isinstance(obj, self.null_session_class) - - def get_cookie_name(self, app: Flask) -> str: - """The name of the session cookie. Uses``app.config["SESSION_COOKIE_NAME"]``.""" - return app.config["SESSION_COOKIE_NAME"] # type: ignore[no-any-return] - - def get_cookie_domain(self, app: Flask) -> str | None: - """The value of the ``Domain`` parameter on the session cookie. If not set, - browsers will only send the cookie to the exact domain it was set from. - Otherwise, they will send it to any subdomain of the given value as well. - - Uses the :data:`SESSION_COOKIE_DOMAIN` config. - - .. versionchanged:: 2.3 - Not set by default, does not fall back to ``SERVER_NAME``. - """ - return app.config["SESSION_COOKIE_DOMAIN"] # type: ignore[no-any-return] - - def get_cookie_path(self, app: Flask) -> str: - """Returns the path for which the cookie should be valid. The - default implementation uses the value from the ``SESSION_COOKIE_PATH`` - config var if it's set, and falls back to ``APPLICATION_ROOT`` or - uses ``/`` if it's ``None``. - """ - return app.config["SESSION_COOKIE_PATH"] or app.config["APPLICATION_ROOT"] # type: ignore[no-any-return] - - def get_cookie_httponly(self, app: Flask) -> bool: - """Returns True if the session cookie should be httponly. This - currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` - config var. - """ - return app.config["SESSION_COOKIE_HTTPONLY"] # type: ignore[no-any-return] - - def get_cookie_secure(self, app: Flask) -> bool: - """Returns True if the cookie should be secure. This currently - just returns the value of the ``SESSION_COOKIE_SECURE`` setting. - """ - return app.config["SESSION_COOKIE_SECURE"] # type: ignore[no-any-return] - - def get_cookie_samesite(self, app: Flask) -> str | None: - """Return ``'Strict'`` or ``'Lax'`` if the cookie should use the - ``SameSite`` attribute. This currently just returns the value of - the :data:`SESSION_COOKIE_SAMESITE` setting. - """ - return app.config["SESSION_COOKIE_SAMESITE"] # type: ignore[no-any-return] - - def get_cookie_partitioned(self, app: Flask) -> bool: - """Returns True if the cookie should be partitioned. By default, uses - the value of :data:`SESSION_COOKIE_PARTITIONED`. - - .. versionadded:: 3.1 - """ - return app.config["SESSION_COOKIE_PARTITIONED"] # type: ignore[no-any-return] - - def get_expiration_time(self, app: Flask, session: SessionMixin) -> datetime | None: - """A helper method that returns an expiration date for the session - or ``None`` if the session is linked to the browser session. The - default implementation returns now + the permanent session - lifetime configured on the application. - """ - if session.permanent: - return datetime.now(timezone.utc) + app.permanent_session_lifetime - return None - - def should_set_cookie(self, app: Flask, session: SessionMixin) -> bool: - """Used by session backends to determine if a ``Set-Cookie`` header - should be set for this session cookie for this response. If the session - has been modified, the cookie is set. If the session is permanent and - the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is - always set. - - This check is usually skipped if the session was deleted. - - .. versionadded:: 0.11 - """ - - return session.modified or ( - session.permanent and app.config["SESSION_REFRESH_EACH_REQUEST"] - ) - - def open_session(self, app: Flask, request: Request) -> SessionMixin | None: - """This is called at the beginning of each request, after - pushing the request context, before matching the URL. - - This must return an object which implements a dictionary-like - interface as well as the :class:`SessionMixin` interface. - - This will return ``None`` to indicate that loading failed in - some way that is not immediately an error. The request - context will fall back to using :meth:`make_null_session` - in this case. - """ - raise NotImplementedError() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - """This is called at the end of each request, after generating - a response, before removing the request context. It is skipped - if :meth:`is_null_session` returns ``True``. - """ - raise NotImplementedError() - - -session_json_serializer = TaggedJSONSerializer() - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class SecureCookieSessionInterface(SessionInterface): - """The default session interface that stores sessions in signed cookies - through the :mod:`itsdangerous` module. - """ - - #: the salt that should be applied on top of the secret key for the - #: signing of cookie based sessions. - salt = "cookie-session" - #: the hash function to use for the signature. The default is sha1 - digest_method = staticmethod(_lazy_sha1) - #: the name of the itsdangerous supported key derivation. The default - #: is hmac. - key_derivation = "hmac" - #: A python serializer for the payload. The default is a compact - #: JSON derived serializer with support for some extra Python types - #: such as datetime objects or tuples. - serializer = session_json_serializer - session_class = SecureCookieSession - - def get_signing_serializer(self, app: Flask) -> URLSafeTimedSerializer | None: - if not app.secret_key: - return None - - keys: list[str | bytes] = [] - - if fallbacks := app.config["SECRET_KEY_FALLBACKS"]: - keys.extend(fallbacks) - - keys.append(app.secret_key) # itsdangerous expects current key at top - return URLSafeTimedSerializer( - keys, # type: ignore[arg-type] - salt=self.salt, - serializer=self.serializer, - signer_kwargs={ - "key_derivation": self.key_derivation, - "digest_method": self.digest_method, - }, - ) - - def open_session(self, app: Flask, request: Request) -> SecureCookieSession | None: - s = self.get_signing_serializer(app) - if s is None: - return None - val = request.cookies.get(self.get_cookie_name(app)) - if not val: - return self.session_class() - max_age = int(app.permanent_session_lifetime.total_seconds()) - try: - data = s.loads(val, max_age=max_age) - return self.session_class(data) - except BadSignature: - return self.session_class() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - name = self.get_cookie_name(app) - domain = self.get_cookie_domain(app) - path = self.get_cookie_path(app) - secure = self.get_cookie_secure(app) - partitioned = self.get_cookie_partitioned(app) - samesite = self.get_cookie_samesite(app) - httponly = self.get_cookie_httponly(app) - - # Add a "Vary: Cookie" header if the session was accessed at all. - if session.accessed: - response.vary.add("Cookie") - - # If the session is modified to be empty, remove the cookie. - # If the session is empty, return without setting the cookie. - if not session: - if session.modified: - response.delete_cookie( - name, - domain=domain, - path=path, - secure=secure, - partitioned=partitioned, - samesite=samesite, - httponly=httponly, - ) - response.vary.add("Cookie") - - return - - if not self.should_set_cookie(app, session): - return - - expires = self.get_expiration_time(app, session) - val = self.get_signing_serializer(app).dumps(dict(session)) # type: ignore[union-attr] - response.set_cookie( - name, - val, - expires=expires, - httponly=httponly, - domain=domain, - path=path, - secure=secure, - partitioned=partitioned, - samesite=samesite, - ) - response.vary.add("Cookie") diff --git a/venv/Lib/site-packages/flask/signals.py b/venv/Lib/site-packages/flask/signals.py deleted file mode 100644 index 444fda9..0000000 --- a/venv/Lib/site-packages/flask/signals.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from blinker import Namespace - -# This namespace is only for signals provided by Flask itself. -_signals = Namespace() - -template_rendered = _signals.signal("template-rendered") -before_render_template = _signals.signal("before-render-template") -request_started = _signals.signal("request-started") -request_finished = _signals.signal("request-finished") -request_tearing_down = _signals.signal("request-tearing-down") -got_request_exception = _signals.signal("got-request-exception") -appcontext_tearing_down = _signals.signal("appcontext-tearing-down") -appcontext_pushed = _signals.signal("appcontext-pushed") -appcontext_popped = _signals.signal("appcontext-popped") -message_flashed = _signals.signal("message-flashed") diff --git a/venv/Lib/site-packages/flask/templating.py b/venv/Lib/site-packages/flask/templating.py deleted file mode 100644 index 16d480f..0000000 --- a/venv/Lib/site-packages/flask/templating.py +++ /dev/null @@ -1,219 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2 import BaseLoader -from jinja2 import Environment as BaseEnvironment -from jinja2 import Template -from jinja2 import TemplateNotFound - -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .helpers import stream_with_context -from .signals import before_render_template -from .signals import template_rendered - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .sansio.app import App - from .sansio.scaffold import Scaffold - - -def _default_template_ctx_processor() -> dict[str, t.Any]: - """Default template context processor. Injects `request`, - `session` and `g`. - """ - appctx = _cv_app.get(None) - reqctx = _cv_request.get(None) - rv: dict[str, t.Any] = {} - if appctx is not None: - rv["g"] = appctx.g - if reqctx is not None: - rv["request"] = reqctx.request - rv["session"] = reqctx.session - return rv - - -class Environment(BaseEnvironment): - """Works like a regular Jinja environment but has some additional - knowledge of how Flask's blueprint works so that it can prepend the - name of the blueprint to referenced templates if necessary. - """ - - def __init__(self, app: App, **options: t.Any) -> None: - if "loader" not in options: - options["loader"] = app.create_global_jinja_loader() - BaseEnvironment.__init__(self, **options) - self.app = app - - -class DispatchingJinjaLoader(BaseLoader): - """A loader that looks for templates in the application and all - the blueprint folders. - """ - - def __init__(self, app: App) -> None: - self.app = app - - def get_source( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - if self.app.config["EXPLAIN_TEMPLATE_LOADING"]: - return self._get_source_explained(environment, template) - return self._get_source_fast(environment, template) - - def _get_source_explained( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - attempts = [] - rv: tuple[str, str | None, t.Callable[[], bool] | None] | None - trv: None | (tuple[str, str | None, t.Callable[[], bool] | None]) = None - - for srcobj, loader in self._iter_loaders(template): - try: - rv = loader.get_source(environment, template) - if trv is None: - trv = rv - except TemplateNotFound: - rv = None - attempts.append((loader, srcobj, rv)) - - from .debughelpers import explain_template_loading_attempts - - explain_template_loading_attempts(self.app, template, attempts) - - if trv is not None: - return trv - raise TemplateNotFound(template) - - def _get_source_fast( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - for _srcobj, loader in self._iter_loaders(template): - try: - return loader.get_source(environment, template) - except TemplateNotFound: - continue - raise TemplateNotFound(template) - - def _iter_loaders(self, template: str) -> t.Iterator[tuple[Scaffold, BaseLoader]]: - loader = self.app.jinja_loader - if loader is not None: - yield self.app, loader - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - yield blueprint, loader - - def list_templates(self) -> list[str]: - result = set() - loader = self.app.jinja_loader - if loader is not None: - result.update(loader.list_templates()) - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - for template in loader.list_templates(): - result.add(template) - - return list(result) - - -def _render(app: Flask, template: Template, context: dict[str, t.Any]) -> str: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - rv = template.render(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - return rv - - -def render_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> str: - """Render a template by name with the given context. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _render(app, template, context) - - -def render_template_string(source: str, **context: t.Any) -> str: - """Render a template from the given source string with the given - context. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _render(app, template, context) - - -def _stream( - app: Flask, template: Template, context: dict[str, t.Any] -) -> t.Iterator[str]: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - def generate() -> t.Iterator[str]: - yield from template.generate(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - rv = generate() - - # If a request context is active, keep it while generating. - if request: - rv = stream_with_context(rv) - - return rv - - -def stream_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> t.Iterator[str]: - """Render a template by name with the given context as a stream. - This returns an iterator of strings, which can be used as a - streaming response from a view. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _stream(app, template, context) - - -def stream_template_string(source: str, **context: t.Any) -> t.Iterator[str]: - """Render a template from the given source string with the given - context as a stream. This returns an iterator of strings, which can - be used as a streaming response from a view. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _stream(app, template, context) diff --git a/venv/Lib/site-packages/flask/testing.py b/venv/Lib/site-packages/flask/testing.py deleted file mode 100644 index 55eb12f..0000000 --- a/venv/Lib/site-packages/flask/testing.py +++ /dev/null @@ -1,298 +0,0 @@ -from __future__ import annotations - -import importlib.metadata -import typing as t -from contextlib import contextmanager -from contextlib import ExitStack -from copy import copy -from types import TracebackType -from urllib.parse import urlsplit - -import werkzeug.test -from click.testing import CliRunner -from click.testing import Result -from werkzeug.test import Client -from werkzeug.wrappers import Request as BaseRequest - -from .cli import ScriptInfo -from .sessions import SessionMixin - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - from werkzeug.test import TestResponse - - from .app import Flask - - -class EnvironBuilder(werkzeug.test.EnvironBuilder): - """An :class:`~werkzeug.test.EnvironBuilder`, that takes defaults from the - application. - - :param app: The Flask application to configure the environment from. - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - - def __init__( - self, - app: Flask, - path: str = "/", - base_url: str | None = None, - subdomain: str | None = None, - url_scheme: str | None = None, - *args: t.Any, - **kwargs: t.Any, - ) -> None: - assert not (base_url or subdomain or url_scheme) or ( - base_url is not None - ) != bool(subdomain or url_scheme), ( - 'Cannot pass "subdomain" or "url_scheme" with "base_url".' - ) - - if base_url is None: - http_host = app.config.get("SERVER_NAME") or "localhost" - app_root = app.config["APPLICATION_ROOT"] - - if subdomain: - http_host = f"{subdomain}.{http_host}" - - if url_scheme is None: - url_scheme = app.config["PREFERRED_URL_SCHEME"] - - url = urlsplit(path) - base_url = ( - f"{url.scheme or url_scheme}://{url.netloc or http_host}" - f"/{app_root.lstrip('/')}" - ) - path = url.path - - if url.query: - path = f"{path}?{url.query}" - - self.app = app - super().__init__(path, base_url, *args, **kwargs) - - def json_dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize ``obj`` to a JSON-formatted string. - - The serialization will be configured according to the config associated - with this EnvironBuilder's ``app``. - """ - return self.app.json.dumps(obj, **kwargs) - - -_werkzeug_version = "" - - -def _get_werkzeug_version() -> str: - global _werkzeug_version - - if not _werkzeug_version: - _werkzeug_version = importlib.metadata.version("werkzeug") - - return _werkzeug_version - - -class FlaskClient(Client): - """Works like a regular Werkzeug test client but has knowledge about - Flask's contexts to defer the cleanup of the request context until - the end of a ``with`` block. For general information about how to - use this class refer to :class:`werkzeug.test.Client`. - - .. versionchanged:: 0.12 - `app.test_client()` includes preset default environment, which can be - set after instantiation of the `app.test_client()` object in - `client.environ_base`. - - Basic usage is outlined in the :doc:`/testing` chapter. - """ - - application: Flask - - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - super().__init__(*args, **kwargs) - self.preserve_context = False - self._new_contexts: list[t.ContextManager[t.Any]] = [] - self._context_stack = ExitStack() - self.environ_base = { - "REMOTE_ADDR": "127.0.0.1", - "HTTP_USER_AGENT": f"Werkzeug/{_get_werkzeug_version()}", - } - - @contextmanager - def session_transaction( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Iterator[SessionMixin]: - """When used in combination with a ``with`` statement this opens a - session transaction. This can be used to modify the session that - the test client uses. Once the ``with`` block is left the session is - stored back. - - :: - - with client.session_transaction() as session: - session['value'] = 42 - - Internally this is implemented by going through a temporary test - request context and since session handling could depend on - request variables this function accepts the same arguments as - :meth:`~flask.Flask.test_request_context` which are directly - passed through. - """ - if self._cookies is None: - raise TypeError( - "Cookies are disabled. Create a client with 'use_cookies=True'." - ) - - app = self.application - ctx = app.test_request_context(*args, **kwargs) - self._add_cookies_to_wsgi(ctx.request.environ) - - with ctx: - sess = app.session_interface.open_session(app, ctx.request) - - if sess is None: - raise RuntimeError("Session backend did not open a session.") - - yield sess - resp = app.response_class() - - if app.session_interface.is_null_session(sess): - return - - with ctx: - app.session_interface.save_session(app, sess, resp) - - self._update_cookies_from_response( - ctx.request.host.partition(":")[0], - ctx.request.path, - resp.headers.getlist("Set-Cookie"), - ) - - def _copy_environ(self, other: WSGIEnvironment) -> WSGIEnvironment: - out = {**self.environ_base, **other} - - if self.preserve_context: - out["werkzeug.debug.preserve_context"] = self._new_contexts.append - - return out - - def _request_from_builder_args( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> BaseRequest: - kwargs["environ_base"] = self._copy_environ(kwargs.get("environ_base", {})) - builder = EnvironBuilder(self.application, *args, **kwargs) - - try: - return builder.get_request() - finally: - builder.close() - - def open( - self, - *args: t.Any, - buffered: bool = False, - follow_redirects: bool = False, - **kwargs: t.Any, - ) -> TestResponse: - if args and isinstance( - args[0], (werkzeug.test.EnvironBuilder, dict, BaseRequest) - ): - if isinstance(args[0], werkzeug.test.EnvironBuilder): - builder = copy(args[0]) - builder.environ_base = self._copy_environ(builder.environ_base or {}) # type: ignore[arg-type] - request = builder.get_request() - elif isinstance(args[0], dict): - request = EnvironBuilder.from_environ( - args[0], app=self.application, environ_base=self._copy_environ({}) - ).get_request() - else: - # isinstance(args[0], BaseRequest) - request = copy(args[0]) - request.environ = self._copy_environ(request.environ) - else: - # request is None - request = self._request_from_builder_args(args, kwargs) - - # Pop any previously preserved contexts. This prevents contexts - # from being preserved across redirects or multiple requests - # within a single block. - self._context_stack.close() - - response = super().open( - request, - buffered=buffered, - follow_redirects=follow_redirects, - ) - response.json_module = self.application.json # type: ignore[assignment] - - # Re-push contexts that were preserved during the request. - for cm in self._new_contexts: - self._context_stack.enter_context(cm) - - self._new_contexts.clear() - return response - - def __enter__(self) -> FlaskClient: - if self.preserve_context: - raise RuntimeError("Cannot nest client invocations") - self.preserve_context = True - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.preserve_context = False - self._context_stack.close() - - -class FlaskCliRunner(CliRunner): - """A :class:`~click.testing.CliRunner` for testing a Flask app's - CLI commands. Typically created using - :meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. - """ - - def __init__(self, app: Flask, **kwargs: t.Any) -> None: - self.app = app - super().__init__(**kwargs) - - def invoke( # type: ignore - self, cli: t.Any = None, args: t.Any = None, **kwargs: t.Any - ) -> Result: - """Invokes a CLI command in an isolated environment. See - :meth:`CliRunner.invoke ` for - full method documentation. See :ref:`testing-cli` for examples. - - If the ``obj`` argument is not given, passes an instance of - :class:`~flask.cli.ScriptInfo` that knows how to load the Flask - app being tested. - - :param cli: Command object to invoke. Default is the app's - :attr:`~flask.app.Flask.cli` group. - :param args: List of strings to invoke the command with. - - :return: a :class:`~click.testing.Result` object. - """ - if cli is None: - cli = self.app.cli - - if "obj" not in kwargs: - kwargs["obj"] = ScriptInfo(create_app=lambda: self.app) - - return super().invoke(cli, args, **kwargs) diff --git a/venv/Lib/site-packages/flask/typing.py b/venv/Lib/site-packages/flask/typing.py deleted file mode 100644 index 6b70c40..0000000 --- a/venv/Lib/site-packages/flask/typing.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import typing as t - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIApplication # noqa: F401 - from werkzeug.datastructures import Headers # noqa: F401 - from werkzeug.sansio.response import Response # noqa: F401 - -# The possible types that are directly convertible or are a Response object. -ResponseValue = t.Union[ - "Response", - str, - bytes, - list[t.Any], - # Only dict is actually accepted, but Mapping allows for TypedDict. - t.Mapping[str, t.Any], - t.Iterator[str], - t.Iterator[bytes], - cabc.AsyncIterable[str], # for Quart, until App is generic. - cabc.AsyncIterable[bytes], -] - -# the possible types for an individual HTTP header -# This should be a Union, but mypy doesn't pass unless it's a TypeVar. -HeaderValue = t.Union[str, list[str], tuple[str, ...]] - -# the possible types for HTTP headers -HeadersValue = t.Union[ - "Headers", - t.Mapping[str, HeaderValue], - t.Sequence[tuple[str, HeaderValue]], -] - -# The possible types returned by a route function. -ResponseReturnValue = t.Union[ - ResponseValue, - tuple[ResponseValue, HeadersValue], - tuple[ResponseValue, int], - tuple[ResponseValue, int, HeadersValue], - "WSGIApplication", -] - -# Allow any subclass of werkzeug.Response, such as the one from Flask, -# as a callback argument. Using werkzeug.Response directly makes a -# callback annotated with flask.Response fail type checking. -ResponseClass = t.TypeVar("ResponseClass", bound="Response") - -AppOrBlueprintKey = t.Optional[str] # The App key is None, whereas blueprints are named -AfterRequestCallable = t.Union[ - t.Callable[[ResponseClass], ResponseClass], - t.Callable[[ResponseClass], t.Awaitable[ResponseClass]], -] -BeforeFirstRequestCallable = t.Union[ - t.Callable[[], None], t.Callable[[], t.Awaitable[None]] -] -BeforeRequestCallable = t.Union[ - t.Callable[[], t.Optional[ResponseReturnValue]], - t.Callable[[], t.Awaitable[t.Optional[ResponseReturnValue]]], -] -ShellContextProcessorCallable = t.Callable[[], dict[str, t.Any]] -TeardownCallable = t.Union[ - t.Callable[[t.Optional[BaseException]], None], - t.Callable[[t.Optional[BaseException]], t.Awaitable[None]], -] -TemplateContextProcessorCallable = t.Union[ - t.Callable[[], dict[str, t.Any]], - t.Callable[[], t.Awaitable[dict[str, t.Any]]], -] -TemplateFilterCallable = t.Callable[..., t.Any] -TemplateGlobalCallable = t.Callable[..., t.Any] -TemplateTestCallable = t.Callable[..., bool] -URLDefaultCallable = t.Callable[[str, dict[str, t.Any]], None] -URLValuePreprocessorCallable = t.Callable[ - [t.Optional[str], t.Optional[dict[str, t.Any]]], None -] - -# This should take Exception, but that either breaks typing the argument -# with a specific exception, or decorating multiple times with different -# exceptions (and using a union type on the argument). -# https://github.com/pallets/flask/issues/4095 -# https://github.com/pallets/flask/issues/4295 -# https://github.com/pallets/flask/issues/4297 -ErrorHandlerCallable = t.Union[ - t.Callable[[t.Any], ResponseReturnValue], - t.Callable[[t.Any], t.Awaitable[ResponseReturnValue]], -] - -RouteCallable = t.Union[ - t.Callable[..., ResponseReturnValue], - t.Callable[..., t.Awaitable[ResponseReturnValue]], -] diff --git a/venv/Lib/site-packages/flask/views.py b/venv/Lib/site-packages/flask/views.py deleted file mode 100644 index 53fe976..0000000 --- a/venv/Lib/site-packages/flask/views.py +++ /dev/null @@ -1,191 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import typing as ft -from .globals import current_app -from .globals import request - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -http_method_funcs = frozenset( - ["get", "post", "head", "options", "delete", "put", "trace", "patch"] -) - - -class View: - """Subclass this class and override :meth:`dispatch_request` to - create a generic class-based view. Call :meth:`as_view` to create a - view function that creates an instance of the class with the given - arguments and calls its ``dispatch_request`` method with any URL - variables. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class Hello(View): - init_every_request = False - - def dispatch_request(self, name): - return f"Hello, {name}!" - - app.add_url_rule( - "/hello/", view_func=Hello.as_view("hello") - ) - - Set :attr:`methods` on the class to change what methods the view - accepts. - - Set :attr:`decorators` on the class to apply a list of decorators to - the generated view function. Decorators applied to the class itself - will not be applied to the generated view function! - - Set :attr:`init_every_request` to ``False`` for efficiency, unless - you need to store request-global data on ``self``. - """ - - #: The methods this view is registered for. Uses the same default - #: (``["GET", "HEAD", "OPTIONS"]``) as ``route`` and - #: ``add_url_rule`` by default. - methods: t.ClassVar[t.Collection[str] | None] = None - - #: Control whether the ``OPTIONS`` method is handled automatically. - #: Uses the same default (``True``) as ``route`` and - #: ``add_url_rule`` by default. - provide_automatic_options: t.ClassVar[bool | None] = None - - #: A list of decorators to apply, in order, to the generated view - #: function. Remember that ``@decorator`` syntax is applied bottom - #: to top, so the first decorator in the list would be the bottom - #: decorator. - #: - #: .. versionadded:: 0.8 - decorators: t.ClassVar[list[t.Callable[..., t.Any]]] = [] - - #: Create a new instance of this view class for every request by - #: default. If a view subclass sets this to ``False``, the same - #: instance is used for every request. - #: - #: A single instance is more efficient, especially if complex setup - #: is done during init. However, storing data on ``self`` is no - #: longer safe across requests, and :data:`~flask.g` should be used - #: instead. - #: - #: .. versionadded:: 2.2 - init_every_request: t.ClassVar[bool] = True - - def dispatch_request(self) -> ft.ResponseReturnValue: - """The actual view function behavior. Subclasses must override - this and return a valid response. Any variables from the URL - rule are passed as keyword arguments. - """ - raise NotImplementedError() - - @classmethod - def as_view( - cls, name: str, *class_args: t.Any, **class_kwargs: t.Any - ) -> ft.RouteCallable: - """Convert the class into a view function that can be registered - for a route. - - By default, the generated view will create a new instance of the - view class for every request and call its - :meth:`dispatch_request` method. If the view class sets - :attr:`init_every_request` to ``False``, the same instance will - be used for every request. - - Except for ``name``, all other arguments passed to this method - are forwarded to the view class ``__init__`` method. - - .. versionchanged:: 2.2 - Added the ``init_every_request`` class attribute. - """ - if cls.init_every_request: - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - self = view.view_class( # type: ignore[attr-defined] - *class_args, **class_kwargs - ) - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - else: - self = cls(*class_args, **class_kwargs) # pyright: ignore - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - if cls.decorators: - view.__name__ = name - view.__module__ = cls.__module__ - for decorator in cls.decorators: - view = decorator(view) - - # We attach the view class to the view function for two reasons: - # first of all it allows us to easily figure out what class-based - # view this thing came from, secondly it's also used for instantiating - # the view class so you can actually replace it with something else - # for testing purposes and debugging. - view.view_class = cls # type: ignore - view.__name__ = name - view.__doc__ = cls.__doc__ - view.__module__ = cls.__module__ - view.methods = cls.methods # type: ignore - view.provide_automatic_options = cls.provide_automatic_options # type: ignore - return view - - -class MethodView(View): - """Dispatches request methods to the corresponding instance methods. - For example, if you implement a ``get`` method, it will be used to - handle ``GET`` requests. - - This can be useful for defining a REST API. - - :attr:`methods` is automatically set based on the methods defined on - the class. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class CounterAPI(MethodView): - def get(self): - return str(session.get("counter", 0)) - - def post(self): - session["counter"] = session.get("counter", 0) + 1 - return redirect(url_for("counter")) - - app.add_url_rule( - "/counter", view_func=CounterAPI.as_view("counter") - ) - """ - - def __init_subclass__(cls, **kwargs: t.Any) -> None: - super().__init_subclass__(**kwargs) - - if "methods" not in cls.__dict__: - methods = set() - - for base in cls.__bases__: - if getattr(base, "methods", None): - methods.update(base.methods) # type: ignore[attr-defined] - - for key in http_method_funcs: - if hasattr(cls, key): - methods.add(key.upper()) - - if methods: - cls.methods = methods - - def dispatch_request(self, **kwargs: t.Any) -> ft.ResponseReturnValue: - meth = getattr(self, request.method.lower(), None) - - # If the request method is HEAD and we don't have a handler for it - # retry with GET. - if meth is None and request.method == "HEAD": - meth = getattr(self, "get", None) - - assert meth is not None, f"Unimplemented method {request.method!r}" - return current_app.ensure_sync(meth)(**kwargs) # type: ignore[no-any-return] diff --git a/venv/Lib/site-packages/flask/wrappers.py b/venv/Lib/site-packages/flask/wrappers.py deleted file mode 100644 index bab6102..0000000 --- a/venv/Lib/site-packages/flask/wrappers.py +++ /dev/null @@ -1,257 +0,0 @@ -from __future__ import annotations - -import typing as t - -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import HTTPException -from werkzeug.wrappers import Request as RequestBase -from werkzeug.wrappers import Response as ResponseBase - -from . import json -from .globals import current_app -from .helpers import _split_blueprint_path - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.routing import Rule - - -class Request(RequestBase): - """The request object used by default in Flask. Remembers the - matched endpoint and view arguments. - - It is what ends up as :class:`~flask.request`. If you want to replace - the request object used you can subclass this and set - :attr:`~flask.Flask.request_class` to your subclass. - - The request object is a :class:`~werkzeug.wrappers.Request` subclass and - provides all of the attributes Werkzeug defines plus a few Flask - specific ones. - """ - - json_module: t.Any = json - - #: The internal URL rule that matched the request. This can be - #: useful to inspect which methods are allowed for the URL from - #: a before/after handler (``request.url_rule.methods``) etc. - #: Though if the request's method was invalid for the URL rule, - #: the valid list is available in ``routing_exception.valid_methods`` - #: instead (an attribute of the Werkzeug exception - #: :exc:`~werkzeug.exceptions.MethodNotAllowed`) - #: because the request was never internally bound. - #: - #: .. versionadded:: 0.6 - url_rule: Rule | None = None - - #: A dict of view arguments that matched the request. If an exception - #: happened when matching, this will be ``None``. - view_args: dict[str, t.Any] | None = None - - #: If matching the URL failed, this is the exception that will be - #: raised / was raised as part of the request handling. This is - #: usually a :exc:`~werkzeug.exceptions.NotFound` exception or - #: something similar. - routing_exception: HTTPException | None = None - - _max_content_length: int | None = None - _max_form_memory_size: int | None = None - _max_form_parts: int | None = None - - @property - def max_content_length(self) -> int | None: - """The maximum number of bytes that will be read during this request. If - this limit is exceeded, a 413 :exc:`~werkzeug.exceptions.RequestEntityTooLarge` - error is raised. If it is set to ``None``, no limit is enforced at the - Flask application level. However, if it is ``None`` and the request has - no ``Content-Length`` header and the WSGI server does not indicate that - it terminates the stream, then no data is read to avoid an infinite - stream. - - Each request defaults to the :data:`MAX_CONTENT_LENGTH` config, which - defaults to ``None``. It can be set on a specific ``request`` to apply - the limit to that specific view. This should be set appropriately based - on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This can be set per-request. - - .. versionchanged:: 0.6 - This is configurable through Flask config. - """ - if self._max_content_length is not None: - return self._max_content_length - - if not current_app: - return super().max_content_length - - return current_app.config["MAX_CONTENT_LENGTH"] # type: ignore[no-any-return] - - @max_content_length.setter - def max_content_length(self, value: int | None) -> None: - self._max_content_length = value - - @property - def max_form_memory_size(self) -> int | None: - """The maximum size in bytes any non-file form field may be in a - ``multipart/form-data`` body. If this limit is exceeded, a 413 - :exc:`~werkzeug.exceptions.RequestEntityTooLarge` error is raised. If it - is set to ``None``, no limit is enforced at the Flask application level. - - Each request defaults to the :data:`MAX_FORM_MEMORY_SIZE` config, which - defaults to ``500_000``. It can be set on a specific ``request`` to - apply the limit to that specific view. This should be set appropriately - based on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This is configurable through Flask config. - """ - if self._max_form_memory_size is not None: - return self._max_form_memory_size - - if not current_app: - return super().max_form_memory_size - - return current_app.config["MAX_FORM_MEMORY_SIZE"] # type: ignore[no-any-return] - - @max_form_memory_size.setter - def max_form_memory_size(self, value: int | None) -> None: - self._max_form_memory_size = value - - @property # type: ignore[override] - def max_form_parts(self) -> int | None: - """The maximum number of fields that may be present in a - ``multipart/form-data`` body. If this limit is exceeded, a 413 - :exc:`~werkzeug.exceptions.RequestEntityTooLarge` error is raised. If it - is set to ``None``, no limit is enforced at the Flask application level. - - Each request defaults to the :data:`MAX_FORM_PARTS` config, which - defaults to ``1_000``. It can be set on a specific ``request`` to apply - the limit to that specific view. This should be set appropriately based - on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This is configurable through Flask config. - """ - if self._max_form_parts is not None: - return self._max_form_parts - - if not current_app: - return super().max_form_parts - - return current_app.config["MAX_FORM_PARTS"] # type: ignore[no-any-return] - - @max_form_parts.setter - def max_form_parts(self, value: int | None) -> None: - self._max_form_parts = value - - @property - def endpoint(self) -> str | None: - """The endpoint that matched the request URL. - - This will be ``None`` if matching failed or has not been - performed yet. - - This in combination with :attr:`view_args` can be used to - reconstruct the same URL or a modified URL. - """ - if self.url_rule is not None: - return self.url_rule.endpoint # type: ignore[no-any-return] - - return None - - @property - def blueprint(self) -> str | None: - """The registered name of the current blueprint. - - This will be ``None`` if the endpoint is not part of a - blueprint, or if URL matching failed or has not been performed - yet. - - This does not necessarily match the name the blueprint was - created with. It may have been nested, or registered with a - different name. - """ - endpoint = self.endpoint - - if endpoint is not None and "." in endpoint: - return endpoint.rpartition(".")[0] - - return None - - @property - def blueprints(self) -> list[str]: - """The registered names of the current blueprint upwards through - parent blueprints. - - This will be an empty list if there is no current blueprint, or - if URL matching failed. - - .. versionadded:: 2.0.1 - """ - name = self.blueprint - - if name is None: - return [] - - return _split_blueprint_path(name) - - def _load_form_data(self) -> None: - super()._load_form_data() - - # In debug mode we're replacing the files multidict with an ad-hoc - # subclass that raises a different error for key errors. - if ( - current_app - and current_app.debug - and self.mimetype != "multipart/form-data" - and not self.files - ): - from .debughelpers import attach_enctype_error_multidict - - attach_enctype_error_multidict(self) - - def on_json_loading_failed(self, e: ValueError | None) -> t.Any: - try: - return super().on_json_loading_failed(e) - except BadRequest as ebr: - if current_app and current_app.debug: - raise - - raise BadRequest() from ebr - - -class Response(ResponseBase): - """The response object that is used by default in Flask. Works like the - response object from Werkzeug but is set to have an HTML mimetype by - default. Quite often you don't have to create this object yourself because - :meth:`~flask.Flask.make_response` will take care of that for you. - - If you want to replace the response object used you can subclass this and - set :attr:`~flask.Flask.response_class` to your subclass. - - .. versionchanged:: 1.0 - JSON support is added to the response, like the request. This is useful - when testing to get the test client response data as JSON. - - .. versionchanged:: 1.0 - - Added :attr:`max_cookie_size`. - """ - - default_mimetype: str | None = "text/html" - - json_module = json - - autocorrect_location_header = False - - @property - def max_cookie_size(self) -> int: # type: ignore - """Read-only view of the :data:`MAX_COOKIE_SIZE` config key. - - See :attr:`~werkzeug.wrappers.Response.max_cookie_size` in - Werkzeug's docs. - """ - if current_app: - return current_app.config["MAX_COOKIE_SIZE"] # type: ignore[no-any-return] - - # return Werkzeug's default when not in an app context - return super().max_cookie_size diff --git a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/INSTALLER b/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/LICENSE.md b/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/LICENSE.md deleted file mode 100644 index c5402b9..0000000 --- a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/LICENSE.md +++ /dev/null @@ -1,55 +0,0 @@ -# Blue Oak Model License - -Version 1.0.0 - -## Purpose - -This license gives everyone as much permission to work with -this software as possible, while protecting contributors -from liability. - -## Acceptance - -In order to receive this license, you must agree to its -rules. The rules of this license are both obligations -under that agreement and conditions to your license. -You must not do anything with this software that triggers -a rule that you cannot or will not follow. - -## Copyright - -Each contributor licenses you to do everything with this -software that would otherwise infringe that contributor's -copyright in it. - -## Notices - -You must ensure that everyone who gets a copy of -any part of this software from you, with or without -changes, also gets the text of this license or a link to -. - -## Excuse - -If anyone notifies you in writing that you have not -complied with [Notices](#notices), you can keep your -license by taking all practical steps to comply within 30 -days after the notice. If you do not do so, your license -ends immediately. - -## Patent - -Each contributor licenses you to do everything with this -software that would otherwise infringe any patent claims -they can license or become able to license. - -## Reliability - -No contributor can revoke this license. - -## No Liability - -***As far as the law allows, this software comes as is, -without any warranty or condition, and no contributor -will be liable to anyone for any damages related to this -software or this license, under any kind of legal claim.*** diff --git a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/METADATA b/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/METADATA deleted file mode 100644 index 1053975..0000000 --- a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/METADATA +++ /dev/null @@ -1,258 +0,0 @@ -Metadata-Version: 2.1 -Name: flask-redis -Version: 0.4.0 -Summary: A nice way to use Redis in your Flask app -Home-page: https://github.com/underyx/flask-redis/ -Author: Bence Nagy -Author-email: bence@underyx.me -Maintainer: Bence Nagy -Maintainer-email: bence@underyx.me -License: UNKNOWN -Download-URL: https://github.com/underyx/flask-redis/releases -Keywords: flask,redis -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* -Description-Content-Type: text/markdown -Requires-Dist: Flask (>=0.8) -Requires-Dist: redis (>=2.7.6) -Provides-Extra: dev -Requires-Dist: coverage ; extra == 'dev' -Requires-Dist: pytest ; extra == 'dev' -Requires-Dist: pytest-mock ; extra == 'dev' -Requires-Dist: pre-commit ; extra == 'dev' -Provides-Extra: tests -Requires-Dist: coverage ; extra == 'tests' -Requires-Dist: pytest ; extra == 'tests' -Requires-Dist: pytest-mock ; extra == 'tests' - -# flask-redis - -[![CircleCI](https://circleci.com/gh/underyx/flask-redis.svg?style=svg)](https://circleci.com/gh/underyx/flask-redis) -[![codecov](https://codecov.io/gh/underyx/flask-redis/branch/master/graph/badge.svg)](https://codecov.io/gh/underyx/flask-redis) -[![Codacy Badge](https://api.codacy.com/project/badge/Grade/8f8297c1a5f542d49429c4837165984f)](https://www.codacy.com/app/bence/flask-redis?utm_source=github.com&utm_medium=referral&utm_content=underyx/flask-redis&utm_campaign=Badge_Grade) -[![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/underyx/flask-redis.svg)](https://github.com/underyx/flask-redis/tags) - -![PyPI - Python Version](https://img.shields.io/pypi/pyversions/flask-redis.svg) -![Flask version support is 0.9+](https://img.shields.io/badge/flask-0.9%2B-blue.svg) -![redis-py version support is 2.6+](https://img.shields.io/badge/redis--py-2.6%2B-blue.svg) -[![Code style: black](https://img.shields.io/badge/code%20style-black-black.svg)](https://github.com/ambv/black) - -A nice way to use Redis in your Flask app. - -## Configuration - -Start by installing the extension with `pip install flask-redis`. -Once that's done, configure it within your Flask config. -Set the URL of your Redis instance like this: - -```python -REDIS_URL = "redis://:password@localhost:6379/0" -``` - -If you wanna connect to a Unix socket, -you can specify it like `"unix://:password@/path/to/socket.sock?db=0"`. - -## Usage - -### Setup - -To add a Redis client to your application: - -```python -from flask import Flask -from flask_redis import FlaskRedis - -app = Flask(__name__) -redis_client = FlaskRedis(app) -``` - -or if you prefer, you can do it the other way around: - -```python -redis_client = FlaskRedis(app) -def create_app(): - app = Flask(__name__) - redis_client.init_app(app) - return app -``` - -### Accessing Redis - -The redis client you created above from `FlaskRedis` acts just like a regular `Redis` instance from the [`redis-py`](https://github.com/andymccurdy/redis-py) library: - -```python -from my_app import redis_client - -@app.route('/') -def index(): - return redis_client.get('potato') -``` - -For detailed instructions on what methods you can use on the client, -as well as how you can use advanced features -such as Lua scripting, pipelines, and callbacks, -please check the -[redis-py documentation](https://redis-py.readthedocs.io/en/latest/). - -**Pro-tip:** The [redis-py](https://github.com/andymccurdy/redis-py) -package uses the `redis` namespace, so it's nicer to name your Redis object something like `redis_client` instead of just `redis`. - -## Extra features in flask-redis - -### Custom providers - -Instead of the default `Redis` client from `redis-py`, -you can provide your own. -This can be useful to replace it with [mockredis](https://github.com/locationlabs/mockredis) for testing: - -```python -from flask import Flask -from flask_redis import FlaskRedis -from mockredis import MockRedis - - -def create_app(): - app = Flask(__name__) - if app.testing: - redis_store = FlaskRedis.from_custom_provider(MockRedis) - else: - redis_store = FlaskRedis() - redis_store.init_app(app) - return app -``` - -## Contributing - -1. Check for open issues or open a fresh issue to start a discussion -2. Fork [the repository](https://github.com/underyx/flask-redis) on GitHub. -3. Send a pull request with your code! - -Merging will require a test which shows that the bug was fixed, -or that the feature works as expected. -Feel free to open a draft pull request though without such a test -and ask for help with writing it if you're not sure how to. - -As [Bence](https://underyx.me) (the only maintainer) works full-time, -please allow some time before your issue or pull request is handled. - - -## Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -## 0.4.0 (2019-05-29) - -- Reorganized the module and rewrote everything other than the library code, mainly packaging and CI. There are no user-facing changes in behavior. - -## 0.3.0 (2016-07-18) - -- **Backwards incompatible:** The `FlaskRedis.init_app` method no - longer takes a `strict` parameter. Pass this flag when creating your - `FlaskRedis` instance, instead. -- **Backwards incompatible:** The extension will now be registered - under the (lowercased) config prefix of the instance. The default - config prefix is `'REDIS'`, so unless you change that, you can still - access the extension via `app.extensions['redis']` as before. -- **Backwards incompatible:** The default class has been changed to - `redis.StrictRedis`. You can switch back to the old `redis.Redis` - class by specifying `strict=False` in the `FlaskRedis` kwargs. -- You can now pass all supported `Redis` keyword arguments (such as - `decode_responses`) to `FlaskRedis` and they will be correctly - passed over to the `redis-py` instance. Thanks, @giyyapan\! -- Usage like `redis_store['key'] = value`, `redis_store['key']`, and - `del redis_store['key']` is now supported. Thanks, @ariscn\! - -## 0.2.0 (2015-04-15) - -- Made 0.1.0's deprecation warned changes final - -## 0.1.0 (2015-04-15) - -- **Deprecation:** Renamed `flask_redis.Redis` to - `flask_redis.FlaskRedis`. Using the old name still works, but emits - a deprecation warning, as it will be removed from the next version -- **Deprecation:** Setting a `REDIS_DATABASE` (or equivalent) now - emits a deprecation warning as it will be removed in the version in - favor of including the database number in `REDIS_URL` (or - equivalent) -- Added a `FlaskRedis.from_custom_provider(provider)` class method for - using any redis provider class that supports instantiation with a - `from_url` class method -- Added a `strict` parameter to `FlaskRedis` which expects a boolean - value and allows choosing between using `redis.StrictRedis` and - `redis.Redis` as the defualt provider. -- Made `FlaskRedis` register as a Flask extension through Flask's - extension API -- Rewrote test suite in py.test -- Got rid of the hacky attribute copying mechanism in favor of using - the `__getattr__` magic method to pass calls to the underlying - client - -## 0.0.6 (2014-04-09) - -- Improved Python 3 Support (Thanks underyx\!). -- Improved test cases. -- Improved configuration. -- Fixed up documentation. -- Removed un-used imports (Thanks underyx and lyschoening\!). - -## 0.0.5 (2014-02-17) - -- Improved suppot for the config prefix. - -## 0.0.4 (2014-02-17) - -- Added support for config_prefix, allowing multiple DBs. - -## 0.0.3 (2013-07-06) - -- Added TravisCI Testing for Flask 0.9/0.10. -- Added Badges to README. - -## 0.0.2 (2013-07-06) - -- Implemented a very simple test. -- Fixed some documentation issues. -- Included requirements.txt for testing. -- Included task file including some basic methods for tests. - -## 0.0.1 (2013-07-05) - -- Conception -- Initial Commit of Package to GitHub. - - -## Credits - -The `flask-redis` project is written and maintained -by [Bence Nagy (underyx)](https://underyx.me). - -The project was originally created by [Rhys Elsmore](https://rhys.io/), -who maintained it until the 0.0.6 release in 2014. -His work was licensed under the Apache 2 license. -The project has gone through a full rewrite since, -but his work was essential as inspiration. -Thanks, Rhys! - -A full list of contributors can be found on [GitHub's Contributors page](https://github.com/underyx/flask-redis/graphs/contributors) -or you can obtain it on your own by running `git shortlog -sn`. - - diff --git a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/RECORD b/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/RECORD deleted file mode 100644 index a7ab623..0000000 --- a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/RECORD +++ /dev/null @@ -1,21 +0,0 @@ -flask_redis-0.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask_redis-0.4.0.dist-info/LICENSE.md,sha256=ihrxQP379a_T3yf35mL5icW5Y6MAAg36_OQgM8rp4AQ,1552 -flask_redis-0.4.0.dist-info/METADATA,sha256=YRcxGCSt4o_Z2c8nrAMnDcPUV5_Xw47b5zlwzTPpWcw,9173 -flask_redis-0.4.0.dist-info/RECORD,, -flask_redis-0.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask_redis-0.4.0.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 -flask_redis-0.4.0.dist-info/top_level.txt,sha256=hEF5PFQyhTQHqYz-ZQ9uiS6Ef4u-oVYElxMiFpB-2KQ,17 -flask_redis/__init__.py,sha256=Qb3a6zbtvUZmbqGsES-XrHVnqGhwh7gOi9OrbGdpTOA,378 -flask_redis/__pycache__/__init__.cpython-312.pyc,, -flask_redis/__pycache__/client.cpython-312.pyc,, -flask_redis/client.py,sha256=EzWmisA_0980c8k7Sh1UnJfDv75LMoOafSPPIzXszog,1731 -test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -test/__pycache__/__init__.cpython-312.pyc,, -test/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -test/integration/__pycache__/__init__.cpython-312.pyc,, -test/integration/__pycache__/test_client.cpython-312.pyc,, -test/integration/test_client.py,sha256=nT1k1XEdwhja0DBYtSj9xA3geoST6CK4RADeRzea0Mg,2534 -test/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -test/unit/__pycache__/__init__.cpython-312.pyc,, -test/unit/__pycache__/test_client.cpython-312.pyc,, -test/unit/test_client.py,sha256=pJvGxyle3llqM-XGpVw3pFVMglWzcIsEg_hlDRzAjxU,364 diff --git a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/REQUESTED b/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/WHEEL b/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/WHEEL deleted file mode 100644 index 78e6f69..0000000 --- a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/top_level.txt b/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/top_level.txt deleted file mode 100644 index d1ed0a4..0000000 --- a/venv/Lib/site-packages/flask_redis-0.4.0.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -flask_redis -test diff --git a/venv/Lib/site-packages/flask_redis/__init__.py b/venv/Lib/site-packages/flask_redis/__init__.py deleted file mode 100644 index 19ac0ef..0000000 --- a/venv/Lib/site-packages/flask_redis/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from .client import FlaskRedis - - -__version__ = "0.4.0" - -__title__ = "flask-redis" -__description__ = "A nice way to use Redis in your Flask app" -__url__ = "https://github.com/underyx/flask-redis/" -__uri__ = __url__ - -__author__ = "Bence Nagy" -__email__ = "bence@underyx.me" - -__license__ = "Blue Oak License" -__copyright__ = "Copyright (c) 2019 Bence Nagy" - -__all__ = [FlaskRedis] diff --git a/venv/Lib/site-packages/flask_redis/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/flask_redis/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4b4a805..0000000 Binary files a/venv/Lib/site-packages/flask_redis/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_redis/__pycache__/client.cpython-312.pyc b/venv/Lib/site-packages/flask_redis/__pycache__/client.cpython-312.pyc deleted file mode 100644 index ca8e933..0000000 Binary files a/venv/Lib/site-packages/flask_redis/__pycache__/client.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_redis/client.py b/venv/Lib/site-packages/flask_redis/client.py deleted file mode 100644 index c33004f..0000000 --- a/venv/Lib/site-packages/flask_redis/client.py +++ /dev/null @@ -1,55 +0,0 @@ -try: - import redis -except ImportError: - # We can still allow custom provider-only usage without redis-py being installed - redis = None - - -class FlaskRedis(object): - def __init__(self, app=None, strict=True, config_prefix="REDIS", **kwargs): - self._redis_client = None - self.provider_class = redis.StrictRedis if strict else redis.Redis - self.provider_kwargs = kwargs - self.config_prefix = config_prefix - - if app is not None: - self.init_app(app) - - @classmethod - def from_custom_provider(cls, provider, app=None, **kwargs): - assert provider is not None, "your custom provider is None, come on" - - # We never pass the app parameter here, so we can call init_app - # ourselves later, after the provider class has been set - instance = cls(**kwargs) - - instance.provider_class = provider - if app is not None: - instance.init_app(app) - return instance - - def init_app(self, app, **kwargs): - redis_url = app.config.get( - "{0}_URL".format(self.config_prefix), "redis://localhost:6379/0" - ) - - self.provider_kwargs.update(kwargs) - self._redis_client = self.provider_class.from_url( - redis_url, **self.provider_kwargs - ) - - if not hasattr(app, "extensions"): - app.extensions = {} - app.extensions[self.config_prefix.lower()] = self - - def __getattr__(self, name): - return getattr(self._redis_client, name) - - def __getitem__(self, name): - return self._redis_client[name] - - def __setitem__(self, name, value): - self._redis_client[name] = value - - def __delitem__(self, name): - del self._redis_client[name] diff --git a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/INSTALLER b/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst b/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/METADATA b/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/METADATA deleted file mode 100644 index 92f239c..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/METADATA +++ /dev/null @@ -1,109 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-SQLAlchemy -Version: 3.1.1 -Summary: Add SQLAlchemy support to your Flask application. -Maintainer-email: Pallets -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Requires-Dist: flask>=2.2.5 -Requires-Dist: sqlalchemy>=2.0.16 -Project-URL: Changes, https://flask-sqlalchemy.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://flask-sqlalchemy.palletsprojects.com -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Issue Tracker, https://github.com/pallets-eco/flask-sqlalchemy/issues/ -Project-URL: Source Code, https://github.com/pallets-eco/flask-sqlalchemy/ - -Flask-SQLAlchemy -================ - -Flask-SQLAlchemy is an extension for `Flask`_ that adds support for -`SQLAlchemy`_ to your application. It aims to simplify using SQLAlchemy -with Flask by providing useful defaults and extra helpers that make it -easier to accomplish common tasks. - -.. _Flask: https://palletsprojects.com/p/flask/ -.. _SQLAlchemy: https://www.sqlalchemy.org - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U Flask-SQLAlchemy - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - from flask import Flask - from flask_sqlalchemy import SQLAlchemy - from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column - - app = Flask(__name__) - app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///example.sqlite" - - class Base(DeclarativeBase): - pass - - db = SQLAlchemy(app, model_class=Base) - - class User(db.Model): - id: Mapped[int] = mapped_column(db.Integer, primary_key=True) - username: Mapped[str] = mapped_column(db.String, unique=True, nullable=False) - - with app.app_context(): - db.create_all() - - db.session.add(User(username="example")) - db.session.commit() - - users = db.session.execute(db.select(User)).scalars() - - -Contributing ------------- - -For guidance on setting up a development environment and how to make a -contribution to Flask-SQLAlchemy, see the `contributing guidelines`_. - -.. _contributing guidelines: https://github.com/pallets-eco/flask-sqlalchemy/blob/main/CONTRIBUTING.rst - - -Donate ------- - -The Pallets organization develops and supports Flask-SQLAlchemy and -other popular packages. In order to grow the community of contributors -and users, and allow the maintainers to devote more time to the -projects, `please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://flask-sqlalchemy.palletsprojects.com/ -- Changes: https://flask-sqlalchemy.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Flask-SQLAlchemy/ -- Source Code: https://github.com/pallets-eco/flask-sqlalchemy/ -- Issue Tracker: https://github.com/pallets-eco/flask-sqlalchemy/issues/ -- Website: https://palletsprojects.com/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - diff --git a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/RECORD b/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/RECORD deleted file mode 100644 index 95c47ac..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/RECORD +++ /dev/null @@ -1,27 +0,0 @@ -flask_sqlalchemy-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -flask_sqlalchemy-3.1.1.dist-info/METADATA,sha256=lBxR1akBt7n9XBjIVTL2OV52OhCfFrb-Mqtoe0DCbR8,3432 -flask_sqlalchemy-3.1.1.dist-info/RECORD,, -flask_sqlalchemy-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask_sqlalchemy-3.1.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -flask_sqlalchemy/__init__.py,sha256=he_w4qQQVS2Z1ms5GCTptDTXNOXBXw0n8zSuWCp8n6Y,653 -flask_sqlalchemy/__pycache__/__init__.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/cli.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/extension.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/model.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/pagination.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/query.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/record_queries.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/session.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/table.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/track_modifications.cpython-312.pyc,, -flask_sqlalchemy/cli.py,sha256=pg3QDxP36GW2qnwe_CpPtkRhPchyVSGM6zlBNWuNCFE,484 -flask_sqlalchemy/extension.py,sha256=71tP_kNtb5VgZdafy_OH1sWdZOA6PaT7cJqX7tKgZ-k,38261 -flask_sqlalchemy/model.py,sha256=_mSisC2Eni0TgTyFWeN_O4LIexTeP_sVTdxh03yMK50,11461 -flask_sqlalchemy/pagination.py,sha256=JFpllrqkRkwacb8DAmQWaz9wsvQa0dypfSkhUDSC2ws,11119 -flask_sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask_sqlalchemy/query.py,sha256=Uls9qbmnpb9Vba43EDfsRP17eHJ0X4VG7SE22tH5R3g,3748 -flask_sqlalchemy/record_queries.py,sha256=ouS1ayj16h76LJprx13iYdoFZbm6m8OncrOgAVbG1Sk,3520 -flask_sqlalchemy/session.py,sha256=pBbtN8iDc8yuGVt0k18BvZHh2uEI7QPzZXO7eXrRi1g,3426 -flask_sqlalchemy/table.py,sha256=wAPOy8qwyAxpMwOIUJY4iMOultzz2W0D6xvBkQ7U2CE,859 -flask_sqlalchemy/track_modifications.py,sha256=yieyozj7IiVzwnAGZ-ZrgqrzjrUfG0kPrXBfW_hStSU,2755 diff --git a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/REQUESTED b/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/WHEEL b/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__init__.py b/venv/Lib/site-packages/flask_sqlalchemy/__init__.py deleted file mode 100644 index c2fa059..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .extension import SQLAlchemy - -__all__ = [ - "SQLAlchemy", -] - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Flask-SQLAlchemy 3.2. Use feature detection or" - " 'importlib.metadata.version(\"flask-sqlalchemy\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("flask-sqlalchemy") - - raise AttributeError(name) diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 5e3f543..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/cli.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/cli.cpython-312.pyc deleted file mode 100644 index 796dd70..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/cli.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/extension.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/extension.cpython-312.pyc deleted file mode 100644 index 62084f7..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/extension.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/model.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/model.cpython-312.pyc deleted file mode 100644 index 8321403..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/model.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/pagination.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/pagination.cpython-312.pyc deleted file mode 100644 index cfdcb87..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/pagination.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/query.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/query.cpython-312.pyc deleted file mode 100644 index 73a79ee..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/query.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/record_queries.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/record_queries.cpython-312.pyc deleted file mode 100644 index ed89bfe..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/record_queries.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/session.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/session.cpython-312.pyc deleted file mode 100644 index 586345e..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/session.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/table.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/table.cpython-312.pyc deleted file mode 100644 index 1582efa..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/table.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/track_modifications.cpython-312.pyc b/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/track_modifications.cpython-312.pyc deleted file mode 100644 index 6d63fae..0000000 Binary files a/venv/Lib/site-packages/flask_sqlalchemy/__pycache__/track_modifications.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/cli.py b/venv/Lib/site-packages/flask_sqlalchemy/cli.py deleted file mode 100644 index d7d7e4b..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/cli.py +++ /dev/null @@ -1,16 +0,0 @@ -from __future__ import annotations - -import typing as t - -from flask import current_app - - -def add_models_to_shell() -> dict[str, t.Any]: - """Registered with :meth:`~flask.Flask.shell_context_processor` if - ``add_models_to_shell`` is enabled. Adds the ``db`` instance and all model classes - to ``flask shell``. - """ - db = current_app.extensions["sqlalchemy"] - out = {m.class_.__name__: m.class_ for m in db.Model._sa_registry.mappers} - out["db"] = db - return out diff --git a/venv/Lib/site-packages/flask_sqlalchemy/extension.py b/venv/Lib/site-packages/flask_sqlalchemy/extension.py deleted file mode 100644 index 43e1b9a..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/extension.py +++ /dev/null @@ -1,1008 +0,0 @@ -from __future__ import annotations - -import os -import types -import typing as t -import warnings -from weakref import WeakKeyDictionary - -import sqlalchemy as sa -import sqlalchemy.event as sa_event -import sqlalchemy.exc as sa_exc -import sqlalchemy.orm as sa_orm -from flask import abort -from flask import current_app -from flask import Flask -from flask import has_app_context - -from .model import _QueryProperty -from .model import BindMixin -from .model import DefaultMeta -from .model import DefaultMetaNoName -from .model import Model -from .model import NameMixin -from .pagination import Pagination -from .pagination import SelectPagination -from .query import Query -from .session import _app_ctx_id -from .session import Session -from .table import _Table - -_O = t.TypeVar("_O", bound=object) # Based on sqlalchemy.orm._typing.py - - -# Type accepted for model_class argument -_FSA_MCT = t.TypeVar( - "_FSA_MCT", - bound=t.Union[ - t.Type[Model], - sa_orm.DeclarativeMeta, - t.Type[sa_orm.DeclarativeBase], - t.Type[sa_orm.DeclarativeBaseNoMeta], - ], -) - - -# Type returned by make_declarative_base -class _FSAModel(Model): - metadata: sa.MetaData - - -def _get_2x_declarative_bases( - model_class: _FSA_MCT, -) -> list[t.Type[t.Union[sa_orm.DeclarativeBase, sa_orm.DeclarativeBaseNoMeta]]]: - return [ - b - for b in model_class.__bases__ - if issubclass(b, (sa_orm.DeclarativeBase, sa_orm.DeclarativeBaseNoMeta)) - ] - - -class SQLAlchemy: - """Integrates SQLAlchemy with Flask. This handles setting up one or more engines, - associating tables and models with specific engines, and cleaning up connections and - sessions after each request. - - Only the engine configuration is specific to each application, other things like - the model, table, metadata, and session are shared for all applications using that - extension instance. Call :meth:`init_app` to configure the extension on an - application. - - After creating the extension, create model classes by subclassing :attr:`Model`, and - table classes with :attr:`Table`. These can be accessed before :meth:`init_app` is - called, making it possible to define the models separately from the application. - - Accessing :attr:`session` and :attr:`engine` requires an active Flask application - context. This includes methods like :meth:`create_all` which use the engine. - - This class also provides access to names in SQLAlchemy's ``sqlalchemy`` and - ``sqlalchemy.orm`` modules. For example, you can use ``db.Column`` and - ``db.relationship`` instead of importing ``sqlalchemy.Column`` and - ``sqlalchemy.orm.relationship``. This can be convenient when defining models. - - :param app: Call :meth:`init_app` on this Flask application now. - :param metadata: Use this as the default :class:`sqlalchemy.schema.MetaData`. Useful - for setting a naming convention. - :param session_options: Arguments used by :attr:`session` to create each session - instance. A ``scopefunc`` key will be passed to the scoped session, not the - session instance. See :class:`sqlalchemy.orm.sessionmaker` for a list of - arguments. - :param query_class: Use this as the default query class for models and dynamic - relationships. The query interface is considered legacy in SQLAlchemy. - :param model_class: Use this as the model base class when creating the declarative - model class :attr:`Model`. Can also be a fully created declarative model class - for further customization. - :param engine_options: Default arguments used when creating every engine. These are - lower precedence than application config. See :func:`sqlalchemy.create_engine` - for a list of arguments. - :param add_models_to_shell: Add the ``db`` instance and all model classes to - ``flask shell``. - - .. versionchanged:: 3.1.0 - The ``metadata`` parameter can still be used with SQLAlchemy 1.x classes, - but is ignored when using SQLAlchemy 2.x style of declarative classes. - Instead, specify metadata on your Base class. - - .. versionchanged:: 3.1.0 - Added the ``disable_autonaming`` parameter. - - .. versionchanged:: 3.1.0 - Changed ``model_class`` parameter to accepta SQLAlchemy 2.x - declarative base subclass. - - .. versionchanged:: 3.0 - An active Flask application context is always required to access ``session`` and - ``engine``. - - .. versionchanged:: 3.0 - Separate ``metadata`` are used for each bind key. - - .. versionchanged:: 3.0 - The ``engine_options`` parameter is applied as defaults before per-engine - configuration. - - .. versionchanged:: 3.0 - The session class can be customized in ``session_options``. - - .. versionchanged:: 3.0 - Added the ``add_models_to_shell`` parameter. - - .. versionchanged:: 3.0 - Engines are created when calling ``init_app`` rather than the first time they - are accessed. - - .. versionchanged:: 3.0 - All parameters except ``app`` are keyword-only. - - .. versionchanged:: 3.0 - The extension instance is stored directly as ``app.extensions["sqlalchemy"]``. - - .. versionchanged:: 3.0 - Setup methods are renamed with a leading underscore. They are considered - internal interfaces which may change at any time. - - .. versionchanged:: 3.0 - Removed the ``use_native_unicode`` parameter and config. - - .. versionchanged:: 2.4 - Added the ``engine_options`` parameter. - - .. versionchanged:: 2.1 - Added the ``metadata``, ``query_class``, and ``model_class`` parameters. - - .. versionchanged:: 2.1 - Use the same query class across ``session``, ``Model.query`` and - ``Query``. - - .. versionchanged:: 0.16 - ``scopefunc`` is accepted in ``session_options``. - - .. versionchanged:: 0.10 - Added the ``session_options`` parameter. - """ - - def __init__( - self, - app: Flask | None = None, - *, - metadata: sa.MetaData | None = None, - session_options: dict[str, t.Any] | None = None, - query_class: type[Query] = Query, - model_class: _FSA_MCT = Model, # type: ignore[assignment] - engine_options: dict[str, t.Any] | None = None, - add_models_to_shell: bool = True, - disable_autonaming: bool = False, - ): - if session_options is None: - session_options = {} - - self.Query = query_class - """The default query class used by ``Model.query`` and ``lazy="dynamic"`` - relationships. - - .. warning:: - The query interface is considered legacy in SQLAlchemy. - - Customize this by passing the ``query_class`` parameter to the extension. - """ - - self.session = self._make_scoped_session(session_options) - """A :class:`sqlalchemy.orm.scoping.scoped_session` that creates instances of - :class:`.Session` scoped to the current Flask application context. The session - will be removed, returning the engine connection to the pool, when the - application context exits. - - Customize this by passing ``session_options`` to the extension. - - This requires that a Flask application context is active. - - .. versionchanged:: 3.0 - The session is scoped to the current app context. - """ - - self.metadatas: dict[str | None, sa.MetaData] = {} - """Map of bind keys to :class:`sqlalchemy.schema.MetaData` instances. The - ``None`` key refers to the default metadata, and is available as - :attr:`metadata`. - - Customize the default metadata by passing the ``metadata`` parameter to the - extension. This can be used to set a naming convention. When metadata for - another bind key is created, it copies the default's naming convention. - - .. versionadded:: 3.0 - """ - - if metadata is not None: - if len(_get_2x_declarative_bases(model_class)) > 0: - warnings.warn( - "When using SQLAlchemy 2.x style of declarative classes," - " the `metadata` should be an attribute of the base class." - "The metadata passed into SQLAlchemy() is ignored.", - DeprecationWarning, - stacklevel=2, - ) - else: - metadata.info["bind_key"] = None - self.metadatas[None] = metadata - - self.Table = self._make_table_class() - """A :class:`sqlalchemy.schema.Table` class that chooses a metadata - automatically. - - Unlike the base ``Table``, the ``metadata`` argument is not required. If it is - not given, it is selected based on the ``bind_key`` argument. - - :param bind_key: Used to select a different metadata. - :param args: Arguments passed to the base class. These are typically the table's - name, columns, and constraints. - :param kwargs: Arguments passed to the base class. - - .. versionchanged:: 3.0 - This is a subclass of SQLAlchemy's ``Table`` rather than a function. - """ - - self.Model = self._make_declarative_base( - model_class, disable_autonaming=disable_autonaming - ) - """A SQLAlchemy declarative model class. Subclass this to define database - models. - - If a model does not set ``__tablename__``, it will be generated by converting - the class name from ``CamelCase`` to ``snake_case``. It will not be generated - if the model looks like it uses single-table inheritance. - - If a model or parent class sets ``__bind_key__``, it will use that metadata and - database engine. Otherwise, it will use the default :attr:`metadata` and - :attr:`engine`. This is ignored if the model sets ``metadata`` or ``__table__``. - - For code using the SQLAlchemy 1.x API, customize this model by subclassing - :class:`.Model` and passing the ``model_class`` parameter to the extension. - A fully created declarative model class can be - passed as well, to use a custom metaclass. - - For code using the SQLAlchemy 2.x API, customize this model by subclassing - :class:`sqlalchemy.orm.DeclarativeBase` or - :class:`sqlalchemy.orm.DeclarativeBaseNoMeta` - and passing the ``model_class`` parameter to the extension. - """ - - if engine_options is None: - engine_options = {} - - self._engine_options = engine_options - self._app_engines: WeakKeyDictionary[Flask, dict[str | None, sa.engine.Engine]] - self._app_engines = WeakKeyDictionary() - self._add_models_to_shell = add_models_to_shell - - if app is not None: - self.init_app(app) - - def __repr__(self) -> str: - if not has_app_context(): - return f"<{type(self).__name__}>" - - message = f"{type(self).__name__} {self.engine.url}" - - if len(self.engines) > 1: - message = f"{message} +{len(self.engines) - 1}" - - return f"<{message}>" - - def init_app(self, app: Flask) -> None: - """Initialize a Flask application for use with this extension instance. This - must be called before accessing the database engine or session with the app. - - This sets default configuration values, then configures the extension on the - application and creates the engines for each bind key. Therefore, this must be - called after the application has been configured. Changes to application config - after this call will not be reflected. - - The following keys from ``app.config`` are used: - - - :data:`.SQLALCHEMY_DATABASE_URI` - - :data:`.SQLALCHEMY_ENGINE_OPTIONS` - - :data:`.SQLALCHEMY_ECHO` - - :data:`.SQLALCHEMY_BINDS` - - :data:`.SQLALCHEMY_RECORD_QUERIES` - - :data:`.SQLALCHEMY_TRACK_MODIFICATIONS` - - :param app: The Flask application to initialize. - """ - if "sqlalchemy" in app.extensions: - raise RuntimeError( - "A 'SQLAlchemy' instance has already been registered on this Flask app." - " Import and use that instance instead." - ) - - app.extensions["sqlalchemy"] = self - app.teardown_appcontext(self._teardown_session) - - if self._add_models_to_shell: - from .cli import add_models_to_shell - - app.shell_context_processor(add_models_to_shell) - - basic_uri: str | sa.engine.URL | None = app.config.setdefault( - "SQLALCHEMY_DATABASE_URI", None - ) - basic_engine_options = self._engine_options.copy() - basic_engine_options.update( - app.config.setdefault("SQLALCHEMY_ENGINE_OPTIONS", {}) - ) - echo: bool = app.config.setdefault("SQLALCHEMY_ECHO", False) - config_binds: dict[ - str | None, str | sa.engine.URL | dict[str, t.Any] - ] = app.config.setdefault("SQLALCHEMY_BINDS", {}) - engine_options: dict[str | None, dict[str, t.Any]] = {} - - # Build the engine config for each bind key. - for key, value in config_binds.items(): - engine_options[key] = self._engine_options.copy() - - if isinstance(value, (str, sa.engine.URL)): - engine_options[key]["url"] = value - else: - engine_options[key].update(value) - - # Build the engine config for the default bind key. - if basic_uri is not None: - basic_engine_options["url"] = basic_uri - - if "url" in basic_engine_options: - engine_options.setdefault(None, {}).update(basic_engine_options) - - if not engine_options: - raise RuntimeError( - "Either 'SQLALCHEMY_DATABASE_URI' or 'SQLALCHEMY_BINDS' must be set." - ) - - engines = self._app_engines.setdefault(app, {}) - - # Dispose existing engines in case init_app is called again. - if engines: - for engine in engines.values(): - engine.dispose() - - engines.clear() - - # Create the metadata and engine for each bind key. - for key, options in engine_options.items(): - self._make_metadata(key) - options.setdefault("echo", echo) - options.setdefault("echo_pool", echo) - self._apply_driver_defaults(options, app) - engines[key] = self._make_engine(key, options, app) - - if app.config.setdefault("SQLALCHEMY_RECORD_QUERIES", False): - from . import record_queries - - for engine in engines.values(): - record_queries._listen(engine) - - if app.config.setdefault("SQLALCHEMY_TRACK_MODIFICATIONS", False): - from . import track_modifications - - track_modifications._listen(self.session) - - def _make_scoped_session( - self, options: dict[str, t.Any] - ) -> sa_orm.scoped_session[Session]: - """Create a :class:`sqlalchemy.orm.scoping.scoped_session` around the factory - from :meth:`_make_session_factory`. The result is available as :attr:`session`. - - The scope function can be customized using the ``scopefunc`` key in the - ``session_options`` parameter to the extension. By default it uses the current - thread or greenlet id. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param options: The ``session_options`` parameter from ``__init__``. Keyword - arguments passed to the session factory. A ``scopefunc`` key is popped. - - .. versionchanged:: 3.0 - The session is scoped to the current app context. - - .. versionchanged:: 3.0 - Renamed from ``create_scoped_session``, this method is internal. - """ - scope = options.pop("scopefunc", _app_ctx_id) - factory = self._make_session_factory(options) - return sa_orm.scoped_session(factory, scope) - - def _make_session_factory( - self, options: dict[str, t.Any] - ) -> sa_orm.sessionmaker[Session]: - """Create the SQLAlchemy :class:`sqlalchemy.orm.sessionmaker` used by - :meth:`_make_scoped_session`. - - To customize, pass the ``session_options`` parameter to :class:`SQLAlchemy`. To - customize the session class, subclass :class:`.Session` and pass it as the - ``class_`` key. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param options: The ``session_options`` parameter from ``__init__``. Keyword - arguments passed to the session factory. - - .. versionchanged:: 3.0 - The session class can be customized. - - .. versionchanged:: 3.0 - Renamed from ``create_session``, this method is internal. - """ - options.setdefault("class_", Session) - options.setdefault("query_cls", self.Query) - return sa_orm.sessionmaker(db=self, **options) - - def _teardown_session(self, exc: BaseException | None) -> None: - """Remove the current session at the end of the request. - - :meta private: - - .. versionadded:: 3.0 - """ - self.session.remove() - - def _make_metadata(self, bind_key: str | None) -> sa.MetaData: - """Get or create a :class:`sqlalchemy.schema.MetaData` for the given bind key. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param bind_key: The name of the metadata being created. - - .. versionadded:: 3.0 - """ - if bind_key in self.metadatas: - return self.metadatas[bind_key] - - if bind_key is not None: - # Copy the naming convention from the default metadata. - naming_convention = self._make_metadata(None).naming_convention - else: - naming_convention = None - - # Set the bind key in info to be used by session.get_bind. - metadata = sa.MetaData( - naming_convention=naming_convention, info={"bind_key": bind_key} - ) - self.metadatas[bind_key] = metadata - return metadata - - def _make_table_class(self) -> type[_Table]: - """Create a SQLAlchemy :class:`sqlalchemy.schema.Table` class that chooses a - metadata automatically based on the ``bind_key``. The result is available as - :attr:`Table`. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - .. versionadded:: 3.0 - """ - - class Table(_Table): - def __new__( - cls, *args: t.Any, bind_key: str | None = None, **kwargs: t.Any - ) -> Table: - # If a metadata arg is passed, go directly to the base Table. Also do - # this for no args so the correct error is shown. - if not args or (len(args) >= 2 and isinstance(args[1], sa.MetaData)): - return super().__new__(cls, *args, **kwargs) - - metadata = self._make_metadata(bind_key) - return super().__new__(cls, *[args[0], metadata, *args[1:]], **kwargs) - - return Table - - def _make_declarative_base( - self, - model_class: _FSA_MCT, - disable_autonaming: bool = False, - ) -> t.Type[_FSAModel]: - """Create a SQLAlchemy declarative model class. The result is available as - :attr:`Model`. - - To customize, subclass :class:`.Model` and pass it as ``model_class`` to - :class:`SQLAlchemy`. To customize at the metaclass level, pass an already - created declarative model class as ``model_class``. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param model_class: A model base class, or an already created declarative model - class. - - :param disable_autonaming: Turns off automatic tablename generation in models. - - .. versionchanged:: 3.1.0 - Added support for passing SQLAlchemy 2.x base class as model class. - Added optional ``disable_autonaming`` parameter. - - .. versionchanged:: 3.0 - Renamed with a leading underscore, this method is internal. - - .. versionchanged:: 2.3 - ``model`` can be an already created declarative model class. - """ - model: t.Type[_FSAModel] - declarative_bases = _get_2x_declarative_bases(model_class) - if len(declarative_bases) > 1: - # raise error if more than one declarative base is found - raise ValueError( - "Only one declarative base can be passed to SQLAlchemy." - " Got: {}".format(model_class.__bases__) - ) - elif len(declarative_bases) == 1: - body = dict(model_class.__dict__) - body["__fsa__"] = self - mixin_classes = [BindMixin, NameMixin, Model] - if disable_autonaming: - mixin_classes.remove(NameMixin) - model = types.new_class( - "FlaskSQLAlchemyBase", - (*mixin_classes, *model_class.__bases__), - {"metaclass": type(declarative_bases[0])}, - lambda ns: ns.update(body), - ) - elif not isinstance(model_class, sa_orm.DeclarativeMeta): - metadata = self._make_metadata(None) - metaclass = DefaultMetaNoName if disable_autonaming else DefaultMeta - model = sa_orm.declarative_base( - metadata=metadata, cls=model_class, name="Model", metaclass=metaclass - ) - else: - model = model_class # type: ignore[assignment] - - if None not in self.metadatas: - # Use the model's metadata as the default metadata. - model.metadata.info["bind_key"] = None - self.metadatas[None] = model.metadata - else: - # Use the passed in default metadata as the model's metadata. - model.metadata = self.metadatas[None] - - model.query_class = self.Query - model.query = _QueryProperty() # type: ignore[assignment] - model.__fsa__ = self - return model - - def _apply_driver_defaults(self, options: dict[str, t.Any], app: Flask) -> None: - """Apply driver-specific configuration to an engine. - - SQLite in-memory databases use ``StaticPool`` and disable ``check_same_thread``. - File paths are relative to the app's :attr:`~flask.Flask.instance_path`, - which is created if it doesn't exist. - - MySQL sets ``charset="utf8mb4"``, and ``pool_timeout`` defaults to 2 hours. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param options: Arguments passed to the engine. - :param app: The application that the engine configuration belongs to. - - .. versionchanged:: 3.0 - SQLite paths are relative to ``app.instance_path``. It does not use - ``NullPool`` if ``pool_size`` is 0. Driver-level URIs are supported. - - .. versionchanged:: 3.0 - MySQL sets ``charset="utf8mb4". It does not set ``pool_size`` to 10. It - does not set ``pool_recycle`` if not using a queue pool. - - .. versionchanged:: 3.0 - Renamed from ``apply_driver_hacks``, this method is internal. It does not - return anything. - - .. versionchanged:: 2.5 - Returns ``(sa_url, options)``. - """ - url = sa.engine.make_url(options["url"]) - - if url.drivername in {"sqlite", "sqlite+pysqlite"}: - if url.database is None or url.database in {"", ":memory:"}: - options["poolclass"] = sa.pool.StaticPool - - if "connect_args" not in options: - options["connect_args"] = {} - - options["connect_args"]["check_same_thread"] = False - else: - # the url might look like sqlite:///file:path?uri=true - is_uri = url.query.get("uri", False) - - if is_uri: - db_str = url.database[5:] - else: - db_str = url.database - - if not os.path.isabs(db_str): - os.makedirs(app.instance_path, exist_ok=True) - db_str = os.path.join(app.instance_path, db_str) - - if is_uri: - db_str = f"file:{db_str}" - - options["url"] = url.set(database=db_str) - elif url.drivername.startswith("mysql"): - # set queue defaults only when using queue pool - if ( - "pool_class" not in options - or options["pool_class"] is sa.pool.QueuePool - ): - options.setdefault("pool_recycle", 7200) - - if "charset" not in url.query: - options["url"] = url.update_query_dict({"charset": "utf8mb4"}) - - def _make_engine( - self, bind_key: str | None, options: dict[str, t.Any], app: Flask - ) -> sa.engine.Engine: - """Create the :class:`sqlalchemy.engine.Engine` for the given bind key and app. - - To customize, use :data:`.SQLALCHEMY_ENGINE_OPTIONS` or - :data:`.SQLALCHEMY_BINDS` config. Pass ``engine_options`` to :class:`SQLAlchemy` - to set defaults for all engines. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param bind_key: The name of the engine being created. - :param options: Arguments passed to the engine. - :param app: The application that the engine configuration belongs to. - - .. versionchanged:: 3.0 - Renamed from ``create_engine``, this method is internal. - """ - return sa.engine_from_config(options, prefix="") - - @property - def metadata(self) -> sa.MetaData: - """The default metadata used by :attr:`Model` and :attr:`Table` if no bind key - is set. - """ - return self.metadatas[None] - - @property - def engines(self) -> t.Mapping[str | None, sa.engine.Engine]: - """Map of bind keys to :class:`sqlalchemy.engine.Engine` instances for current - application. The ``None`` key refers to the default engine, and is available as - :attr:`engine`. - - To customize, set the :data:`.SQLALCHEMY_BINDS` config, and set defaults by - passing the ``engine_options`` parameter to the extension. - - This requires that a Flask application context is active. - - .. versionadded:: 3.0 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - - if app not in self._app_engines: - raise RuntimeError( - "The current Flask app is not registered with this 'SQLAlchemy'" - " instance. Did you forget to call 'init_app', or did you create" - " multiple 'SQLAlchemy' instances?" - ) - - return self._app_engines[app] - - @property - def engine(self) -> sa.engine.Engine: - """The default :class:`~sqlalchemy.engine.Engine` for the current application, - used by :attr:`session` if the :attr:`Model` or :attr:`Table` being queried does - not set a bind key. - - To customize, set the :data:`.SQLALCHEMY_ENGINE_OPTIONS` config, and set - defaults by passing the ``engine_options`` parameter to the extension. - - This requires that a Flask application context is active. - """ - return self.engines[None] - - def get_engine( - self, bind_key: str | None = None, **kwargs: t.Any - ) -> sa.engine.Engine: - """Get the engine for the given bind key for the current application. - This requires that a Flask application context is active. - - :param bind_key: The name of the engine. - - .. deprecated:: 3.0 - Will be removed in Flask-SQLAlchemy 3.2. Use ``engines[key]`` instead. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - """ - warnings.warn( - "'get_engine' is deprecated and will be removed in Flask-SQLAlchemy" - " 3.2. Use 'engine' or 'engines[key]' instead. If you're using" - " Flask-Migrate or Alembic, you'll need to update your 'env.py' file.", - DeprecationWarning, - stacklevel=2, - ) - - if "bind" in kwargs: - bind_key = kwargs.pop("bind") - - return self.engines[bind_key] - - def get_or_404( - self, - entity: type[_O], - ident: t.Any, - *, - description: str | None = None, - **kwargs: t.Any, - ) -> _O: - """Like :meth:`session.get() ` but aborts with a - ``404 Not Found`` error instead of returning ``None``. - - :param entity: The model class to query. - :param ident: The primary key to query. - :param description: A custom message to show on the error page. - :param kwargs: Extra arguments passed to ``session.get()``. - - .. versionchanged:: 3.1 - Pass extra keyword arguments to ``session.get()``. - - .. versionadded:: 3.0 - """ - value = self.session.get(entity, ident, **kwargs) - - if value is None: - abort(404, description=description) - - return value - - def first_or_404( - self, statement: sa.sql.Select[t.Any], *, description: str | None = None - ) -> t.Any: - """Like :meth:`Result.scalar() `, but aborts - with a ``404 Not Found`` error instead of returning ``None``. - - :param statement: The ``select`` statement to execute. - :param description: A custom message to show on the error page. - - .. versionadded:: 3.0 - """ - value = self.session.execute(statement).scalar() - - if value is None: - abort(404, description=description) - - return value - - def one_or_404( - self, statement: sa.sql.Select[t.Any], *, description: str | None = None - ) -> t.Any: - """Like :meth:`Result.scalar_one() `, - but aborts with a ``404 Not Found`` error instead of raising ``NoResultFound`` - or ``MultipleResultsFound``. - - :param statement: The ``select`` statement to execute. - :param description: A custom message to show on the error page. - - .. versionadded:: 3.0 - """ - try: - return self.session.execute(statement).scalar_one() - except (sa_exc.NoResultFound, sa_exc.MultipleResultsFound): - abort(404, description=description) - - def paginate( - self, - select: sa.sql.Select[t.Any], - *, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = None, - error_out: bool = True, - count: bool = True, - ) -> Pagination: - """Apply an offset and limit to a select statment based on the current page and - number of items per page, returning a :class:`.Pagination` object. - - The statement should select a model class, like ``select(User)``. This applies - ``unique()`` and ``scalars()`` modifiers to the result, so compound selects will - not return the expected results. - - :param select: The ``select`` statement to paginate. - :param page: The current page, used to calculate the offset. Defaults to the - ``page`` query arg during a request, or 1 otherwise. - :param per_page: The maximum number of items on a page, used to calculate the - offset and limit. Defaults to the ``per_page`` query arg during a request, - or 20 otherwise. - :param max_per_page: The maximum allowed value for ``per_page``, to limit a - user-provided value. Use ``None`` for no limit. Defaults to 100. - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - :param count: Calculate the total number of values by issuing an extra count - query. For very complex queries this may be inaccurate or slow, so it can be - disabled and set manually if necessary. - - .. versionchanged:: 3.0 - The ``count`` query is more efficient. - - .. versionadded:: 3.0 - """ - return SelectPagination( - select=select, - session=self.session(), - page=page, - per_page=per_page, - max_per_page=max_per_page, - error_out=error_out, - count=count, - ) - - def _call_for_binds( - self, bind_key: str | None | list[str | None], op_name: str - ) -> None: - """Call a method on each metadata. - - :meta private: - - :param bind_key: A bind key or list of keys. Defaults to all binds. - :param op_name: The name of the method to call. - - .. versionchanged:: 3.0 - Renamed from ``_execute_for_all_tables``. - """ - if bind_key == "__all__": - keys: list[str | None] = list(self.metadatas) - elif bind_key is None or isinstance(bind_key, str): - keys = [bind_key] - else: - keys = bind_key - - for key in keys: - try: - engine = self.engines[key] - except KeyError: - message = f"Bind key '{key}' is not in 'SQLALCHEMY_BINDS' config." - - if key is None: - message = f"'SQLALCHEMY_DATABASE_URI' config is not set. {message}" - - raise sa_exc.UnboundExecutionError(message) from None - - metadata = self.metadatas[key] - getattr(metadata, op_name)(bind=engine) - - def create_all(self, bind_key: str | None | list[str | None] = "__all__") -> None: - """Create tables that do not exist in the database by calling - ``metadata.create_all()`` for all or some bind keys. This does not - update existing tables, use a migration library for that. - - This requires that a Flask application context is active. - - :param bind_key: A bind key or list of keys to create the tables for. Defaults - to all binds. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - - .. versionchanged:: 0.12 - Added the ``bind`` and ``app`` parameters. - """ - self._call_for_binds(bind_key, "create_all") - - def drop_all(self, bind_key: str | None | list[str | None] = "__all__") -> None: - """Drop tables by calling ``metadata.drop_all()`` for all or some bind keys. - - This requires that a Flask application context is active. - - :param bind_key: A bind key or list of keys to drop the tables from. Defaults to - all binds. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - - .. versionchanged:: 0.12 - Added the ``bind`` and ``app`` parameters. - """ - self._call_for_binds(bind_key, "drop_all") - - def reflect(self, bind_key: str | None | list[str | None] = "__all__") -> None: - """Load table definitions from the database by calling ``metadata.reflect()`` - for all or some bind keys. - - This requires that a Flask application context is active. - - :param bind_key: A bind key or list of keys to reflect the tables from. Defaults - to all binds. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - - .. versionchanged:: 0.12 - Added the ``bind`` and ``app`` parameters. - """ - self._call_for_binds(bind_key, "reflect") - - def _set_rel_query(self, kwargs: dict[str, t.Any]) -> None: - """Apply the extension's :attr:`Query` class as the default for relationships - and backrefs. - - :meta private: - """ - kwargs.setdefault("query_class", self.Query) - - if "backref" in kwargs: - backref = kwargs["backref"] - - if isinstance(backref, str): - backref = (backref, {}) - - backref[1].setdefault("query_class", self.Query) - - def relationship( - self, *args: t.Any, **kwargs: t.Any - ) -> sa_orm.RelationshipProperty[t.Any]: - """A :func:`sqlalchemy.orm.relationship` that applies this extension's - :attr:`Query` class for dynamic relationships and backrefs. - - .. versionchanged:: 3.0 - The :attr:`Query` class is set on ``backref``. - """ - self._set_rel_query(kwargs) - return sa_orm.relationship(*args, **kwargs) - - def dynamic_loader( - self, argument: t.Any, **kwargs: t.Any - ) -> sa_orm.RelationshipProperty[t.Any]: - """A :func:`sqlalchemy.orm.dynamic_loader` that applies this extension's - :attr:`Query` class for relationships and backrefs. - - .. versionchanged:: 3.0 - The :attr:`Query` class is set on ``backref``. - """ - self._set_rel_query(kwargs) - return sa_orm.dynamic_loader(argument, **kwargs) - - def _relation( - self, *args: t.Any, **kwargs: t.Any - ) -> sa_orm.RelationshipProperty[t.Any]: - """A :func:`sqlalchemy.orm.relationship` that applies this extension's - :attr:`Query` class for dynamic relationships and backrefs. - - SQLAlchemy 2.0 removes this name, use ``relationship`` instead. - - :meta private: - - .. versionchanged:: 3.0 - The :attr:`Query` class is set on ``backref``. - """ - self._set_rel_query(kwargs) - f = sa_orm.relationship - return f(*args, **kwargs) - - def __getattr__(self, name: str) -> t.Any: - if name == "relation": - return self._relation - - if name == "event": - return sa_event - - if name.startswith("_"): - raise AttributeError(name) - - for mod in (sa, sa_orm): - if hasattr(mod, name): - return getattr(mod, name) - - raise AttributeError(name) diff --git a/venv/Lib/site-packages/flask_sqlalchemy/model.py b/venv/Lib/site-packages/flask_sqlalchemy/model.py deleted file mode 100644 index c6f9e5a..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/model.py +++ /dev/null @@ -1,330 +0,0 @@ -from __future__ import annotations - -import re -import typing as t - -import sqlalchemy as sa -import sqlalchemy.orm as sa_orm - -from .query import Query - -if t.TYPE_CHECKING: - from .extension import SQLAlchemy - - -class _QueryProperty: - """A class property that creates a query object for a model. - - :meta private: - """ - - def __get__(self, obj: Model | None, cls: type[Model]) -> Query: - return cls.query_class( - cls, session=cls.__fsa__.session() # type: ignore[arg-type] - ) - - -class Model: - """The base class of the :attr:`.SQLAlchemy.Model` declarative model class. - - To define models, subclass :attr:`db.Model <.SQLAlchemy.Model>`, not this. To - customize ``db.Model``, subclass this and pass it as ``model_class`` to - :class:`.SQLAlchemy`. To customize ``db.Model`` at the metaclass level, pass an - already created declarative model class as ``model_class``. - """ - - __fsa__: t.ClassVar[SQLAlchemy] - """Internal reference to the extension object. - - :meta private: - """ - - query_class: t.ClassVar[type[Query]] = Query - """Query class used by :attr:`query`. Defaults to :attr:`.SQLAlchemy.Query`, which - defaults to :class:`.Query`. - """ - - query: t.ClassVar[Query] = _QueryProperty() # type: ignore[assignment] - """A SQLAlchemy query for a model. Equivalent to ``db.session.query(Model)``. Can be - customized per-model by overriding :attr:`query_class`. - - .. warning:: - The query interface is considered legacy in SQLAlchemy. Prefer using - ``session.execute(select())`` instead. - """ - - def __repr__(self) -> str: - state = sa.inspect(self) - assert state is not None - - if state.transient: - pk = f"(transient {id(self)})" - elif state.pending: - pk = f"(pending {id(self)})" - else: - pk = ", ".join(map(str, state.identity)) - - return f"<{type(self).__name__} {pk}>" - - -class BindMetaMixin(type): - """Metaclass mixin that sets a model's ``metadata`` based on its ``__bind_key__``. - - If the model sets ``metadata`` or ``__table__`` directly, ``__bind_key__`` is - ignored. If the ``metadata`` is the same as the parent model, it will not be set - directly on the child model. - """ - - __fsa__: SQLAlchemy - metadata: sa.MetaData - - def __init__( - cls, name: str, bases: tuple[type, ...], d: dict[str, t.Any], **kwargs: t.Any - ) -> None: - if not ("metadata" in cls.__dict__ or "__table__" in cls.__dict__): - bind_key = getattr(cls, "__bind_key__", None) - parent_metadata = getattr(cls, "metadata", None) - metadata = cls.__fsa__._make_metadata(bind_key) - - if metadata is not parent_metadata: - cls.metadata = metadata - - super().__init__(name, bases, d, **kwargs) - - -class BindMixin: - """DeclarativeBase mixin to set a model's ``metadata`` based on ``__bind_key__``. - - If no ``__bind_key__`` is specified, the model will use the default metadata - provided by ``DeclarativeBase`` or ``DeclarativeBaseNoMeta``. - If the model doesn't set ``metadata`` or ``__table__`` directly - and does set ``__bind_key__``, the model will use the metadata - for the specified bind key. - If the ``metadata`` is the same as the parent model, it will not be set - directly on the child model. - - .. versionchanged:: 3.1.0 - """ - - __fsa__: SQLAlchemy - metadata: sa.MetaData - - @classmethod - def __init_subclass__(cls: t.Type[BindMixin], **kwargs: t.Dict[str, t.Any]) -> None: - if not ("metadata" in cls.__dict__ or "__table__" in cls.__dict__) and hasattr( - cls, "__bind_key__" - ): - bind_key = getattr(cls, "__bind_key__", None) - parent_metadata = getattr(cls, "metadata", None) - metadata = cls.__fsa__._make_metadata(bind_key) - - if metadata is not parent_metadata: - cls.metadata = metadata - - super().__init_subclass__(**kwargs) - - -class NameMetaMixin(type): - """Metaclass mixin that sets a model's ``__tablename__`` by converting the - ``CamelCase`` class name to ``snake_case``. A name is set for non-abstract models - that do not otherwise define ``__tablename__``. If a model does not define a primary - key, it will not generate a name or ``__table__``, for single-table inheritance. - """ - - metadata: sa.MetaData - __tablename__: str - __table__: sa.Table - - def __init__( - cls, name: str, bases: tuple[type, ...], d: dict[str, t.Any], **kwargs: t.Any - ) -> None: - if should_set_tablename(cls): - cls.__tablename__ = camel_to_snake_case(cls.__name__) - - super().__init__(name, bases, d, **kwargs) - - # __table_cls__ has run. If no table was created, use the parent table. - if ( - "__tablename__" not in cls.__dict__ - and "__table__" in cls.__dict__ - and cls.__dict__["__table__"] is None - ): - del cls.__table__ - - def __table_cls__(cls, *args: t.Any, **kwargs: t.Any) -> sa.Table | None: - """This is called by SQLAlchemy during mapper setup. It determines the final - table object that the model will use. - - If no primary key is found, that indicates single-table inheritance, so no table - will be created and ``__tablename__`` will be unset. - """ - schema = kwargs.get("schema") - - if schema is None: - key = args[0] - else: - key = f"{schema}.{args[0]}" - - # Check if a table with this name already exists. Allows reflected tables to be - # applied to models by name. - if key in cls.metadata.tables: - return sa.Table(*args, **kwargs) - - # If a primary key is found, create a table for joined-table inheritance. - for arg in args: - if (isinstance(arg, sa.Column) and arg.primary_key) or isinstance( - arg, sa.PrimaryKeyConstraint - ): - return sa.Table(*args, **kwargs) - - # If no base classes define a table, return one that's missing a primary key - # so SQLAlchemy shows the correct error. - for base in cls.__mro__[1:-1]: - if "__table__" in base.__dict__: - break - else: - return sa.Table(*args, **kwargs) - - # Single-table inheritance, use the parent table name. __init__ will unset - # __table__ based on this. - if "__tablename__" in cls.__dict__: - del cls.__tablename__ - - return None - - -class NameMixin: - """DeclarativeBase mixin that sets a model's ``__tablename__`` by converting the - ``CamelCase`` class name to ``snake_case``. A name is set for non-abstract models - that do not otherwise define ``__tablename__``. If a model does not define a primary - key, it will not generate a name or ``__table__``, for single-table inheritance. - - .. versionchanged:: 3.1.0 - """ - - metadata: sa.MetaData - __tablename__: str - __table__: sa.Table - - @classmethod - def __init_subclass__(cls: t.Type[NameMixin], **kwargs: t.Dict[str, t.Any]) -> None: - if should_set_tablename(cls): - cls.__tablename__ = camel_to_snake_case(cls.__name__) - - super().__init_subclass__(**kwargs) - - # __table_cls__ has run. If no table was created, use the parent table. - if ( - "__tablename__" not in cls.__dict__ - and "__table__" in cls.__dict__ - and cls.__dict__["__table__"] is None - ): - del cls.__table__ - - @classmethod - def __table_cls__(cls, *args: t.Any, **kwargs: t.Any) -> sa.Table | None: - """This is called by SQLAlchemy during mapper setup. It determines the final - table object that the model will use. - - If no primary key is found, that indicates single-table inheritance, so no table - will be created and ``__tablename__`` will be unset. - """ - schema = kwargs.get("schema") - - if schema is None: - key = args[0] - else: - key = f"{schema}.{args[0]}" - - # Check if a table with this name already exists. Allows reflected tables to be - # applied to models by name. - if key in cls.metadata.tables: - return sa.Table(*args, **kwargs) - - # If a primary key is found, create a table for joined-table inheritance. - for arg in args: - if (isinstance(arg, sa.Column) and arg.primary_key) or isinstance( - arg, sa.PrimaryKeyConstraint - ): - return sa.Table(*args, **kwargs) - - # If no base classes define a table, return one that's missing a primary key - # so SQLAlchemy shows the correct error. - for base in cls.__mro__[1:-1]: - if "__table__" in base.__dict__: - break - else: - return sa.Table(*args, **kwargs) - - # Single-table inheritance, use the parent table name. __init__ will unset - # __table__ based on this. - if "__tablename__" in cls.__dict__: - del cls.__tablename__ - - return None - - -def should_set_tablename(cls: type) -> bool: - """Determine whether ``__tablename__`` should be generated for a model. - - - If no class in the MRO sets a name, one should be generated. - - If a declared attr is found, it should be used instead. - - If a name is found, it should be used if the class is a mixin, otherwise one - should be generated. - - Abstract models should not have one generated. - - Later, ``__table_cls__`` will determine if the model looks like single or - joined-table inheritance. If no primary key is found, the name will be unset. - """ - if ( - cls.__dict__.get("__abstract__", False) - or ( - not issubclass(cls, (sa_orm.DeclarativeBase, sa_orm.DeclarativeBaseNoMeta)) - and not any(isinstance(b, sa_orm.DeclarativeMeta) for b in cls.__mro__[1:]) - ) - or any( - (b is sa_orm.DeclarativeBase or b is sa_orm.DeclarativeBaseNoMeta) - for b in cls.__bases__ - ) - ): - return False - - for base in cls.__mro__: - if "__tablename__" not in base.__dict__: - continue - - if isinstance(base.__dict__["__tablename__"], sa_orm.declared_attr): - return False - - return not ( - base is cls - or base.__dict__.get("__abstract__", False) - or not ( - # SQLAlchemy 1.x - isinstance(base, sa_orm.DeclarativeMeta) - # 2.x: DeclarativeBas uses this as metaclass - or isinstance(base, sa_orm.decl_api.DeclarativeAttributeIntercept) - # 2.x: DeclarativeBaseNoMeta doesn't use a metaclass - or issubclass(base, sa_orm.DeclarativeBaseNoMeta) - ) - ) - - return True - - -def camel_to_snake_case(name: str) -> str: - """Convert a ``CamelCase`` name to ``snake_case``.""" - name = re.sub(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))", r"_\1", name) - return name.lower().lstrip("_") - - -class DefaultMeta(BindMetaMixin, NameMetaMixin, sa_orm.DeclarativeMeta): - """SQLAlchemy declarative metaclass that provides ``__bind_key__`` and - ``__tablename__`` support. - """ - - -class DefaultMetaNoName(BindMetaMixin, sa_orm.DeclarativeMeta): - """SQLAlchemy declarative metaclass that provides ``__bind_key__`` and - ``__tablename__`` support. - """ diff --git a/venv/Lib/site-packages/flask_sqlalchemy/pagination.py b/venv/Lib/site-packages/flask_sqlalchemy/pagination.py deleted file mode 100644 index 3d49d6e..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/pagination.py +++ /dev/null @@ -1,364 +0,0 @@ -from __future__ import annotations - -import typing as t -from math import ceil - -import sqlalchemy as sa -import sqlalchemy.orm as sa_orm -from flask import abort -from flask import request - - -class Pagination: - """Apply an offset and limit to the query based on the current page and number of - items per page. - - Don't create pagination objects manually. They are created by - :meth:`.SQLAlchemy.paginate` and :meth:`.Query.paginate`. - - This is a base class, a subclass must implement :meth:`_query_items` and - :meth:`_query_count`. Those methods will use arguments passed as ``kwargs`` to - perform the queries. - - :param page: The current page, used to calculate the offset. Defaults to the - ``page`` query arg during a request, or 1 otherwise. - :param per_page: The maximum number of items on a page, used to calculate the - offset and limit. Defaults to the ``per_page`` query arg during a request, - or 20 otherwise. - :param max_per_page: The maximum allowed value for ``per_page``, to limit a - user-provided value. Use ``None`` for no limit. Defaults to 100. - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - :param count: Calculate the total number of values by issuing an extra count - query. For very complex queries this may be inaccurate or slow, so it can be - disabled and set manually if necessary. - :param kwargs: Information about the query to paginate. Different subclasses will - require different arguments. - - .. versionchanged:: 3.0 - Iterating over a pagination object iterates over its items. - - .. versionchanged:: 3.0 - Creating instances manually is not a public API. - """ - - def __init__( - self, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = 100, - error_out: bool = True, - count: bool = True, - **kwargs: t.Any, - ) -> None: - self._query_args = kwargs - page, per_page = self._prepare_page_args( - page=page, - per_page=per_page, - max_per_page=max_per_page, - error_out=error_out, - ) - - self.page: int = page - """The current page.""" - - self.per_page: int = per_page - """The maximum number of items on a page.""" - - self.max_per_page: int | None = max_per_page - """The maximum allowed value for ``per_page``.""" - - items = self._query_items() - - if not items and page != 1 and error_out: - abort(404) - - self.items: list[t.Any] = items - """The items on the current page. Iterating over the pagination object is - equivalent to iterating over the items. - """ - - if count: - total = self._query_count() - else: - total = None - - self.total: int | None = total - """The total number of items across all pages.""" - - @staticmethod - def _prepare_page_args( - *, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = None, - error_out: bool = True, - ) -> tuple[int, int]: - if request: - if page is None: - try: - page = int(request.args.get("page", 1)) - except (TypeError, ValueError): - if error_out: - abort(404) - - page = 1 - - if per_page is None: - try: - per_page = int(request.args.get("per_page", 20)) - except (TypeError, ValueError): - if error_out: - abort(404) - - per_page = 20 - else: - if page is None: - page = 1 - - if per_page is None: - per_page = 20 - - if max_per_page is not None: - per_page = min(per_page, max_per_page) - - if page < 1: - if error_out: - abort(404) - else: - page = 1 - - if per_page < 1: - if error_out: - abort(404) - else: - per_page = 20 - - return page, per_page - - @property - def _query_offset(self) -> int: - """The index of the first item to query, passed to ``offset()``. - - :meta private: - - .. versionadded:: 3.0 - """ - return (self.page - 1) * self.per_page - - def _query_items(self) -> list[t.Any]: - """Execute the query to get the items on the current page. - - Uses init arguments stored in :attr:`_query_args`. - - :meta private: - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - def _query_count(self) -> int: - """Execute the query to get the total number of items. - - Uses init arguments stored in :attr:`_query_args`. - - :meta private: - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - @property - def first(self) -> int: - """The number of the first item on the page, starting from 1, or 0 if there are - no items. - - .. versionadded:: 3.0 - """ - if len(self.items) == 0: - return 0 - - return (self.page - 1) * self.per_page + 1 - - @property - def last(self) -> int: - """The number of the last item on the page, starting from 1, inclusive, or 0 if - there are no items. - - .. versionadded:: 3.0 - """ - first = self.first - return max(first, first + len(self.items) - 1) - - @property - def pages(self) -> int: - """The total number of pages.""" - if self.total == 0 or self.total is None: - return 0 - - return ceil(self.total / self.per_page) - - @property - def has_prev(self) -> bool: - """``True`` if this is not the first page.""" - return self.page > 1 - - @property - def prev_num(self) -> int | None: - """The previous page number, or ``None`` if this is the first page.""" - if not self.has_prev: - return None - - return self.page - 1 - - def prev(self, *, error_out: bool = False) -> Pagination: - """Query the :class:`Pagination` object for the previous page. - - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - """ - p = type(self)( - page=self.page - 1, - per_page=self.per_page, - error_out=error_out, - count=False, - **self._query_args, - ) - p.total = self.total - return p - - @property - def has_next(self) -> bool: - """``True`` if this is not the last page.""" - return self.page < self.pages - - @property - def next_num(self) -> int | None: - """The next page number, or ``None`` if this is the last page.""" - if not self.has_next: - return None - - return self.page + 1 - - def next(self, *, error_out: bool = False) -> Pagination: - """Query the :class:`Pagination` object for the next page. - - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - """ - p = type(self)( - page=self.page + 1, - per_page=self.per_page, - max_per_page=self.max_per_page, - error_out=error_out, - count=False, - **self._query_args, - ) - p.total = self.total - return p - - def iter_pages( - self, - *, - left_edge: int = 2, - left_current: int = 2, - right_current: int = 4, - right_edge: int = 2, - ) -> t.Iterator[int | None]: - """Yield page numbers for a pagination widget. Skipped pages between the edges - and middle are represented by a ``None``. - - For example, if there are 20 pages and the current page is 7, the following - values are yielded. - - .. code-block:: python - - 1, 2, None, 5, 6, 7, 8, 9, 10, 11, None, 19, 20 - - :param left_edge: How many pages to show from the first page. - :param left_current: How many pages to show left of the current page. - :param right_current: How many pages to show right of the current page. - :param right_edge: How many pages to show from the last page. - - .. versionchanged:: 3.0 - Improved efficiency of calculating what to yield. - - .. versionchanged:: 3.0 - ``right_current`` boundary is inclusive. - - .. versionchanged:: 3.0 - All parameters are keyword-only. - """ - pages_end = self.pages + 1 - - if pages_end == 1: - return - - left_end = min(1 + left_edge, pages_end) - yield from range(1, left_end) - - if left_end == pages_end: - return - - mid_start = max(left_end, self.page - left_current) - mid_end = min(self.page + right_current + 1, pages_end) - - if mid_start - left_end > 0: - yield None - - yield from range(mid_start, mid_end) - - if mid_end == pages_end: - return - - right_start = max(mid_end, pages_end - right_edge) - - if right_start - mid_end > 0: - yield None - - yield from range(right_start, pages_end) - - def __iter__(self) -> t.Iterator[t.Any]: - yield from self.items - - -class SelectPagination(Pagination): - """Returned by :meth:`.SQLAlchemy.paginate`. Takes ``select`` and ``session`` - arguments in addition to the :class:`Pagination` arguments. - - .. versionadded:: 3.0 - """ - - def _query_items(self) -> list[t.Any]: - select = self._query_args["select"] - select = select.limit(self.per_page).offset(self._query_offset) - session = self._query_args["session"] - return list(session.execute(select).unique().scalars()) - - def _query_count(self) -> int: - select = self._query_args["select"] - sub = select.options(sa_orm.lazyload("*")).order_by(None).subquery() - session = self._query_args["session"] - out = session.execute(sa.select(sa.func.count()).select_from(sub)).scalar() - return out # type: ignore[no-any-return] - - -class QueryPagination(Pagination): - """Returned by :meth:`.Query.paginate`. Takes a ``query`` argument in addition to - the :class:`Pagination` arguments. - - .. versionadded:: 3.0 - """ - - def _query_items(self) -> list[t.Any]: - query = self._query_args["query"] - out = query.limit(self.per_page).offset(self._query_offset).all() - return out # type: ignore[no-any-return] - - def _query_count(self) -> int: - # Query.count automatically disables eager loads - out = self._query_args["query"].order_by(None).count() - return out # type: ignore[no-any-return] diff --git a/venv/Lib/site-packages/flask_sqlalchemy/py.typed b/venv/Lib/site-packages/flask_sqlalchemy/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/flask_sqlalchemy/query.py b/venv/Lib/site-packages/flask_sqlalchemy/query.py deleted file mode 100644 index 35f927d..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/query.py +++ /dev/null @@ -1,105 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy.exc as sa_exc -import sqlalchemy.orm as sa_orm -from flask import abort - -from .pagination import Pagination -from .pagination import QueryPagination - - -class Query(sa_orm.Query): # type: ignore[type-arg] - """SQLAlchemy :class:`~sqlalchemy.orm.query.Query` subclass with some extra methods - useful for querying in a web application. - - This is the default query class for :attr:`.Model.query`. - - .. versionchanged:: 3.0 - Renamed to ``Query`` from ``BaseQuery``. - """ - - def get_or_404(self, ident: t.Any, description: str | None = None) -> t.Any: - """Like :meth:`~sqlalchemy.orm.Query.get` but aborts with a ``404 Not Found`` - error instead of returning ``None``. - - :param ident: The primary key to query. - :param description: A custom message to show on the error page. - """ - rv = self.get(ident) - - if rv is None: - abort(404, description=description) - - return rv - - def first_or_404(self, description: str | None = None) -> t.Any: - """Like :meth:`~sqlalchemy.orm.Query.first` but aborts with a ``404 Not Found`` - error instead of returning ``None``. - - :param description: A custom message to show on the error page. - """ - rv = self.first() - - if rv is None: - abort(404, description=description) - - return rv - - def one_or_404(self, description: str | None = None) -> t.Any: - """Like :meth:`~sqlalchemy.orm.Query.one` but aborts with a ``404 Not Found`` - error instead of raising ``NoResultFound`` or ``MultipleResultsFound``. - - :param description: A custom message to show on the error page. - - .. versionadded:: 3.0 - """ - try: - return self.one() - except (sa_exc.NoResultFound, sa_exc.MultipleResultsFound): - abort(404, description=description) - - def paginate( - self, - *, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = None, - error_out: bool = True, - count: bool = True, - ) -> Pagination: - """Apply an offset and limit to the query based on the current page and number - of items per page, returning a :class:`.Pagination` object. - - :param page: The current page, used to calculate the offset. Defaults to the - ``page`` query arg during a request, or 1 otherwise. - :param per_page: The maximum number of items on a page, used to calculate the - offset and limit. Defaults to the ``per_page`` query arg during a request, - or 20 otherwise. - :param max_per_page: The maximum allowed value for ``per_page``, to limit a - user-provided value. Use ``None`` for no limit. Defaults to 100. - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - :param count: Calculate the total number of values by issuing an extra count - query. For very complex queries this may be inaccurate or slow, so it can be - disabled and set manually if necessary. - - .. versionchanged:: 3.0 - All parameters are keyword-only. - - .. versionchanged:: 3.0 - The ``count`` query is more efficient. - - .. versionchanged:: 3.0 - ``max_per_page`` defaults to 100. - """ - return QueryPagination( - query=self, - page=page, - per_page=per_page, - max_per_page=max_per_page, - error_out=error_out, - count=count, - ) diff --git a/venv/Lib/site-packages/flask_sqlalchemy/record_queries.py b/venv/Lib/site-packages/flask_sqlalchemy/record_queries.py deleted file mode 100644 index e8273be..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/record_queries.py +++ /dev/null @@ -1,117 +0,0 @@ -from __future__ import annotations - -import dataclasses -import inspect -import typing as t -from time import perf_counter - -import sqlalchemy as sa -import sqlalchemy.event as sa_event -from flask import current_app -from flask import g -from flask import has_app_context - - -def get_recorded_queries() -> list[_QueryInfo]: - """Get the list of recorded query information for the current session. Queries are - recorded if the config :data:`.SQLALCHEMY_RECORD_QUERIES` is enabled. - - Each query info object has the following attributes: - - ``statement`` - The string of SQL generated by SQLAlchemy with parameter placeholders. - ``parameters`` - The parameters sent with the SQL statement. - ``start_time`` / ``end_time`` - Timing info about when the query started execution and when the results where - returned. Accuracy and value depends on the operating system. - ``duration`` - The time the query took in seconds. - ``location`` - A string description of where in your application code the query was executed. - This may not be possible to calculate, and the format is not stable. - - .. versionchanged:: 3.0 - Renamed from ``get_debug_queries``. - - .. versionchanged:: 3.0 - The info object is a dataclass instead of a tuple. - - .. versionchanged:: 3.0 - The info object attribute ``context`` is renamed to ``location``. - - .. versionchanged:: 3.0 - Not enabled automatically in debug or testing mode. - """ - return g.get("_sqlalchemy_queries", []) # type: ignore[no-any-return] - - -@dataclasses.dataclass -class _QueryInfo: - """Information about an executed query. Returned by :func:`get_recorded_queries`. - - .. versionchanged:: 3.0 - Renamed from ``_DebugQueryTuple``. - - .. versionchanged:: 3.0 - Changed to a dataclass instead of a tuple. - - .. versionchanged:: 3.0 - ``context`` is renamed to ``location``. - """ - - statement: str | None - parameters: t.Any - start_time: float - end_time: float - location: str - - @property - def duration(self) -> float: - return self.end_time - self.start_time - - -def _listen(engine: sa.engine.Engine) -> None: - sa_event.listen(engine, "before_cursor_execute", _record_start, named=True) - sa_event.listen(engine, "after_cursor_execute", _record_end, named=True) - - -def _record_start(context: sa.engine.ExecutionContext, **kwargs: t.Any) -> None: - if not has_app_context(): - return - - context._fsa_start_time = perf_counter() # type: ignore[attr-defined] - - -def _record_end(context: sa.engine.ExecutionContext, **kwargs: t.Any) -> None: - if not has_app_context(): - return - - if "_sqlalchemy_queries" not in g: - g._sqlalchemy_queries = [] - - import_top = current_app.import_name.partition(".")[0] - import_dot = f"{import_top}." - frame = inspect.currentframe() - - while frame: - name = frame.f_globals.get("__name__") - - if name and (name == import_top or name.startswith(import_dot)): - code = frame.f_code - location = f"{code.co_filename}:{frame.f_lineno} ({code.co_name})" - break - - frame = frame.f_back - else: - location = "" - - g._sqlalchemy_queries.append( - _QueryInfo( - statement=context.statement, - parameters=context.parameters, - start_time=context._fsa_start_time, # type: ignore[attr-defined] - end_time=perf_counter(), - location=location, - ) - ) diff --git a/venv/Lib/site-packages/flask_sqlalchemy/session.py b/venv/Lib/site-packages/flask_sqlalchemy/session.py deleted file mode 100644 index 631fffa..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/session.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy as sa -import sqlalchemy.exc as sa_exc -import sqlalchemy.orm as sa_orm -from flask.globals import app_ctx - -if t.TYPE_CHECKING: - from .extension import SQLAlchemy - - -class Session(sa_orm.Session): - """A SQLAlchemy :class:`~sqlalchemy.orm.Session` class that chooses what engine to - use based on the bind key associated with the metadata associated with the thing - being queried. - - To customize ``db.session``, subclass this and pass it as the ``class_`` key in the - ``session_options`` to :class:`.SQLAlchemy`. - - .. versionchanged:: 3.0 - Renamed from ``SignallingSession``. - """ - - def __init__(self, db: SQLAlchemy, **kwargs: t.Any) -> None: - super().__init__(**kwargs) - self._db = db - self._model_changes: dict[object, tuple[t.Any, str]] = {} - - def get_bind( - self, - mapper: t.Any | None = None, - clause: t.Any | None = None, - bind: sa.engine.Engine | sa.engine.Connection | None = None, - **kwargs: t.Any, - ) -> sa.engine.Engine | sa.engine.Connection: - """Select an engine based on the ``bind_key`` of the metadata associated with - the model or table being queried. If no bind key is set, uses the default bind. - - .. versionchanged:: 3.0.3 - Fix finding the bind for a joined inheritance model. - - .. versionchanged:: 3.0 - The implementation more closely matches the base SQLAlchemy implementation. - - .. versionchanged:: 2.1 - Support joining an external transaction. - """ - if bind is not None: - return bind - - engines = self._db.engines - - if mapper is not None: - try: - mapper = sa.inspect(mapper) - except sa_exc.NoInspectionAvailable as e: - if isinstance(mapper, type): - raise sa_orm.exc.UnmappedClassError(mapper) from e - - raise - - engine = _clause_to_engine(mapper.local_table, engines) - - if engine is not None: - return engine - - if clause is not None: - engine = _clause_to_engine(clause, engines) - - if engine is not None: - return engine - - if None in engines: - return engines[None] - - return super().get_bind(mapper=mapper, clause=clause, bind=bind, **kwargs) - - -def _clause_to_engine( - clause: sa.ClauseElement | None, - engines: t.Mapping[str | None, sa.engine.Engine], -) -> sa.engine.Engine | None: - """If the clause is a table, return the engine associated with the table's - metadata's bind key. - """ - table = None - - if clause is not None: - if isinstance(clause, sa.Table): - table = clause - elif isinstance(clause, sa.UpdateBase) and isinstance(clause.table, sa.Table): - table = clause.table - - if table is not None and "bind_key" in table.metadata.info: - key = table.metadata.info["bind_key"] - - if key not in engines: - raise sa_exc.UnboundExecutionError( - f"Bind key '{key}' is not in 'SQLALCHEMY_BINDS' config." - ) - - return engines[key] - - return None - - -def _app_ctx_id() -> int: - """Get the id of the current Flask application context for the session scope.""" - return id(app_ctx._get_current_object()) # type: ignore[attr-defined] diff --git a/venv/Lib/site-packages/flask_sqlalchemy/table.py b/venv/Lib/site-packages/flask_sqlalchemy/table.py deleted file mode 100644 index ab08a69..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/table.py +++ /dev/null @@ -1,39 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy as sa -import sqlalchemy.sql.schema as sa_sql_schema - - -class _Table(sa.Table): - @t.overload - def __init__( - self, - name: str, - *args: sa_sql_schema.SchemaItem, - bind_key: str | None = None, - **kwargs: t.Any, - ) -> None: - ... - - @t.overload - def __init__( - self, - name: str, - metadata: sa.MetaData, - *args: sa_sql_schema.SchemaItem, - **kwargs: t.Any, - ) -> None: - ... - - @t.overload - def __init__( - self, name: str, *args: sa_sql_schema.SchemaItem, **kwargs: t.Any - ) -> None: - ... - - def __init__( - self, name: str, *args: sa_sql_schema.SchemaItem, **kwargs: t.Any - ) -> None: - super().__init__(name, *args, **kwargs) # type: ignore[arg-type] diff --git a/venv/Lib/site-packages/flask_sqlalchemy/track_modifications.py b/venv/Lib/site-packages/flask_sqlalchemy/track_modifications.py deleted file mode 100644 index 7028b65..0000000 --- a/venv/Lib/site-packages/flask_sqlalchemy/track_modifications.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy as sa -import sqlalchemy.event as sa_event -import sqlalchemy.orm as sa_orm -from flask import current_app -from flask import has_app_context -from flask.signals import Namespace # type: ignore[attr-defined] - -if t.TYPE_CHECKING: - from .session import Session - -_signals = Namespace() - -models_committed = _signals.signal("models-committed") -"""This Blinker signal is sent after the session is committed if there were changed -models in the session. - -The sender is the application that emitted the changes. The receiver is passed the -``changes`` argument with a list of tuples in the form ``(instance, operation)``. -The operations are ``"insert"``, ``"update"``, and ``"delete"``. -""" - -before_models_committed = _signals.signal("before-models-committed") -"""This signal works exactly like :data:`models_committed` but is emitted before the -commit takes place. -""" - - -def _listen(session: sa_orm.scoped_session[Session]) -> None: - sa_event.listen(session, "before_flush", _record_ops, named=True) - sa_event.listen(session, "before_commit", _record_ops, named=True) - sa_event.listen(session, "before_commit", _before_commit) - sa_event.listen(session, "after_commit", _after_commit) - sa_event.listen(session, "after_rollback", _after_rollback) - - -def _record_ops(session: Session, **kwargs: t.Any) -> None: - if not has_app_context(): - return - - if not current_app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]: - return - - for targets, operation in ( - (session.new, "insert"), - (session.dirty, "update"), - (session.deleted, "delete"), - ): - for target in targets: - state = sa.inspect(target) - key = state.identity_key if state.has_identity else id(target) - session._model_changes[key] = (target, operation) - - -def _before_commit(session: Session) -> None: - if not has_app_context(): - return - - app = current_app._get_current_object() # type: ignore[attr-defined] - - if not app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]: - return - - if session._model_changes: - changes = list(session._model_changes.values()) - before_models_committed.send(app, changes=changes) - - -def _after_commit(session: Session) -> None: - if not has_app_context(): - return - - app = current_app._get_current_object() # type: ignore[attr-defined] - - if not app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]: - return - - if session._model_changes: - changes = list(session._model_changes.values()) - models_committed.send(app, changes=changes) - session._model_changes.clear() - - -def _after_rollback(session: Session) -> None: - session._model_changes.clear() diff --git a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/INSTALLER b/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/METADATA b/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/METADATA deleted file mode 100644 index 30d6653..0000000 --- a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/METADATA +++ /dev/null @@ -1,672 +0,0 @@ -Metadata-Version: 2.4 -Name: frozenlist -Version: 1.8.0 -Summary: A list-like structure which implements collections.abc.MutableSequence -Home-page: https://github.com/aio-libs/frozenlist -Maintainer: aiohttp team -Maintainer-email: team@aiohttp.org -License: Apache-2.0 -Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org -Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org -Project-URL: CI: Github Actions, https://github.com/aio-libs/frozenlist/actions -Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md -Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/frozenlist -Project-URL: Docs: Changelog, https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst#changelog -Project-URL: Docs: RTD, https://frozenlist.aio-libs.org -Project-URL: GitHub: issues, https://github.com/aio-libs/frozenlist/issues -Project-URL: GitHub: repo, https://github.com/aio-libs/frozenlist -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Operating System :: POSIX -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows -Classifier: Programming Language :: Cython -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=3.9 -Description-Content-Type: text/x-rst -License-File: LICENSE -Dynamic: license-file - -frozenlist -========== - -.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg - :target: https://github.com/aio-libs/frozenlist/actions - :alt: GitHub status for master branch - -.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg?flag=pytest - :target: https://codecov.io/gh/aio-libs/frozenlist?flags[]=pytest - :alt: codecov.io status for master branch - -.. image:: https://img.shields.io/pypi/v/frozenlist.svg?logo=Python&logoColor=white - :target: https://pypi.org/project/frozenlist - :alt: frozenlist @ PyPI - -.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest - :target: https://frozenlist.aio-libs.org - :alt: Read The Docs build status badge - -.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat - :target: https://matrix.to/#/%23aio-libs:matrix.org - :alt: Matrix Room — #aio-libs:matrix.org - -.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat - :target: https://matrix.to/#/%23aio-libs-space:matrix.org - :alt: Matrix Space — #aio-libs-space:matrix.org - -Introduction ------------- - -``frozenlist.FrozenList`` is a list-like structure which implements -``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze`` -is called, after which list modifications raise ``RuntimeError``: - - ->>> from frozenlist import FrozenList ->>> fl = FrozenList([17, 42]) ->>> fl.append('spam') ->>> fl.append('Vikings') ->>> fl - ->>> fl.freeze() ->>> fl - ->>> fl.frozen -True ->>> fl.append("Monty") -Traceback (most recent call last): - File "", line 1, in - File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append - self._check_frozen() - File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen - raise RuntimeError("Cannot modify frozen list.") -RuntimeError: Cannot modify frozen list. - - -FrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError: - - ->>> fl = FrozenList([17, 42, 'spam']) ->>> hash(fl) -Traceback (most recent call last): - File "", line 1, in - File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__ - raise RuntimeError("Cannot hash unfrozen list.") -RuntimeError: Cannot hash unfrozen list. ->>> fl.freeze() ->>> hash(fl) -3713081631934410656 ->>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key ->>> dictionary -{: 'Vikings'} - - -Installation ------------- - -:: - - $ pip install frozenlist - - -Documentation -------------- - -https://frozenlist.aio-libs.org - -Communication channels ----------------------- - -We have a *Matrix Space* `#aio-libs-space:matrix.org -`_ which is -also accessible via Gitter. - -License -------- - -``frozenlist`` is offered under the Apache 2 license. - -Source code ------------ - -The project is hosted on GitHub_ - -Please file an issue in the `bug tracker -`_ if you have found a bug -or have some suggestions to improve the library. - -.. _GitHub: https://github.com/aio-libs/frozenlist - -========= -Changelog -========= - -.. - You should *NOT* be adding new change log entries to this file, this - file is managed by towncrier. You *may* edit previous change logs to - fix problems like typo corrections or such. - To add a new change log entry, please see - https://pip.pypa.io/en/latest/development/contributing/#news-entries - we named the news folder "changes". - - WARNING: Don't drop the next directive! - -.. towncrier release notes start - -v1.8.0 -====== - -*(2025-10-05)* - - -Contributor-facing changes --------------------------- - -- The ``reusable-cibuildwheel.yml`` workflow has been refactored to - be more generic and ``ci-cd.yml`` now holds all the configuration - toggles -- by `@webknjaz `__. - - *Related issues and pull requests on GitHub:* - `#668 `__. - -- When building wheels, the source distribution is now passed directly - to the ``cibuildwheel`` invocation -- by `@webknjaz `__. - - *Related issues and pull requests on GitHub:* - `#669 `__. - -- Builds and tests have been added to - ``ci-cd.yml`` for arm64 Windows wheels -- by `@finnagin `__. - - *Related issues and pull requests on GitHub:* - `#677 `__. - -- Started building wheels for CPython 3.14 -- by `@kumaraditya303 `__. - - *Related issues and pull requests on GitHub:* - `#681 `__, `#682 `__. - -- Removed ``--config-settings=pure-python=false`` from ``requirements/dev.txt``. - Developers on CPython still get accelerated builds by default. To explicitly build - a pure Python wheel, use ``pip install -e . --config-settings=pure-python=true`` - -- by `@bdraco `__. - - *Related issues and pull requests on GitHub:* - `#687 `__. - - ----- - - -v1.7.0 -====== - -*(2025-06-09)* - - -Features --------- - -- Added deepcopy support to FrozenList -- by `@bdraco `__. - - *Related issues and pull requests on GitHub:* - `#659 `__. - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Fixed an issue where ``frozenlist`` binary wheels would be built with debugging symbols and line tracing enabled, which significantly impacted performance. Line tracing is now disabled by default and can only be enabled explicitly -- by `@bdraco `__. - - This change ensures that production builds are optimized for performance. Developers who need line tracing for debugging purposes can still enable it by: - - 1. Setting the ``FROZENLIST_CYTHON_TRACING`` environment variable - 2. Using the ``--config-setting=with-cython-tracing=true`` option with pip - - *Related issues and pull requests on GitHub:* - `#660 `__. - -- Enabled ``PIP_CONSTRAINT`` environment variable in the build configuration to ensure the pinned Cython version from ``requirements/cython.txt`` is used during wheel builds. - - *Related issues and pull requests on GitHub:* - `#661 `__. - - ----- - - -v1.6.2 -====== - -*(2025-06-03)* - - -No significant changes. - - ----- - - -v1.6.1 -====== - -*(2025-06-02)* - - -Bug fixes ---------- - -- Correctly use ``cimport`` for including ``PyBool_FromLong`` -- by `@lysnikolaou `__. - - *Related issues and pull requests on GitHub:* - `#653 `__. - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Exclude ``_frozenlist.cpp`` from bdists/wheels -- by `@musicinmybrain `__. - - *Related issues and pull requests on GitHub:* - `#649 `__. - -- Updated to use Cython 3.1 universally across the build path -- by `@lysnikolaou `__. - - *Related issues and pull requests on GitHub:* - `#654 `__. - - ----- - - -v1.6.0 -====== - -*(2025-04-17)* - - -Bug fixes ---------- - -- Stopped implicitly allowing the use of Cython pre-release versions when - building the distribution package -- by `@ajsanchezsanz `__ and - `@markgreene74 `__. - - *Related commits on GitHub:* - `41591f2 `__. - - -Features --------- - -- Implemented support for the free-threaded build of CPython 3.13 -- by `@lysnikolaou `__. - - *Related issues and pull requests on GitHub:* - `#618 `__. - -- Started building armv7l wheels -- by `@bdraco `__. - - *Related issues and pull requests on GitHub:* - `#642 `__. - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Stopped implicitly allowing the use of Cython pre-release versions when - building the distribution package -- by `@ajsanchezsanz `__ and - `@markgreene74 `__. - - *Related commits on GitHub:* - `41591f2 `__. - -- Started building wheels for the free-threaded build of CPython 3.13 -- by `@lysnikolaou `__. - - *Related issues and pull requests on GitHub:* - `#618 `__. - -- The packaging metadata switched to including an SPDX license identifier introduced in `PEP 639 `__ -- by `@cdce8p `__. - - *Related issues and pull requests on GitHub:* - `#639 `__. - - -Contributor-facing changes --------------------------- - -- GitHub Actions CI/CD is now configured to manage caching pip-ecosystem - dependencies using `re-actors/cache-python-deps`_ -- an action by - `@webknjaz `__ that takes into account ABI stability and the exact - version of Python runtime. - - .. _`re-actors/cache-python-deps`: - https://github.com/marketplace/actions/cache-python-deps - - *Related issues and pull requests on GitHub:* - `#633 `__. - -- Organized dependencies into test and lint dependencies so that no - unnecessary ones are installed during CI runs -- by `@lysnikolaou `__. - - *Related issues and pull requests on GitHub:* - `#636 `__. - - ----- - - -1.5.0 (2024-10-22) -================== - -Bug fixes ---------- - -- An incorrect signature of the ``__class_getitem__`` class method - has been fixed, adding a missing ``class_item`` argument under - Python 3.8 and older. - - This change also improves the code coverage of this method that - was previously missing -- by `@webknjaz `__. - - - *Related issues and pull requests on GitHub:* - `#567 `__, `#571 `__. - - -Improved documentation ----------------------- - -- Rendered issue, PR, and commit links now lead to - ``frozenlist``'s repo instead of ``yarl``'s repo. - - - *Related issues and pull requests on GitHub:* - `#573 `__. - -- On the ``Contributing docs`` page, - a link to the ``Towncrier philosophy`` has been fixed. - - - *Related issues and pull requests on GitHub:* - `#574 `__. - - -Packaging updates and notes for downstreams -------------------------------------------- - -- A name of a temporary building directory now reflects - that it's related to ``frozenlist``, not ``yarl``. - - - *Related issues and pull requests on GitHub:* - `#573 `__. - -- Declared Python 3.13 supported officially in the distribution package metadata. - - - *Related issues and pull requests on GitHub:* - `#595 `__. - - ----- - - -1.4.1 (2023-12-15) -================== - -Packaging updates and notes for downstreams -------------------------------------------- - -- Declared Python 3.12 and PyPy 3.8-3.10 supported officially - in the distribution package metadata. - - - *Related issues and pull requests on GitHub:* - `#553 `__. - -- Replaced the packaging is replaced from an old-fashioned ``setup.py`` to an - in-tree `PEP 517 `__ build backend -- by `@webknjaz `__. - - Whenever the end-users or downstream packagers need to build ``frozenlist`` - from source (a Git checkout or an sdist), they may pass a ``config_settings`` - flag ``pure-python``. If this flag is not set, a C-extension will be built - and included into the distribution. - - Here is how this can be done with ``pip``: - - .. code-block:: console - - $ python3 -m pip install . --config-settings=pure-python= - - This will also work with ``-e | --editable``. - - The same can be achieved via ``pypa/build``: - - .. code-block:: console - - $ python3 -m build --config-setting=pure-python= - - Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source - directly, as opposed to building an ``sdist`` and then building from it. - - - *Related issues and pull requests on GitHub:* - `#560 `__. - - -Contributor-facing changes --------------------------- - -- It is now possible to request line tracing in Cython builds using the - ``with-cython-tracing`` `PEP 517 `__ config setting - -- `@webknjaz `__. - - This can be used in CI and development environment to measure coverage - on Cython modules, but is not normally useful to the end-users or - downstream packagers. - - Here's a usage example: - - .. code-block:: console - - $ python3 -Im pip install . --config-settings=with-cython-tracing=true - - For editable installs, this setting is on by default. Otherwise, it's - off unless requested explicitly. - - The following produces C-files required for the Cython coverage - plugin to map the measurements back to the PYX-files: - - .. code-block:: console - - $ python -Im pip install -e . - - Alternatively, the ``FROZENLIST_CYTHON_TRACING=1`` environment variable - can be set to do the same as the `PEP 517 `__ config setting. - - - *Related issues and pull requests on GitHub:* - `#560 `__. - -- Coverage collection has been implemented for the Cython modules - -- by `@webknjaz `__. - - It will also be reported to Codecov from any non-release CI jobs. - - - *Related issues and pull requests on GitHub:* - `#561 `__. - -- A step-by-step ``Release Guide`` guide has - been added, describing how to release *frozenlist* -- by `@webknjaz `__. - - This is primarily targeting the maintainers. - - - *Related issues and pull requests on GitHub:* - `#563 `__. - -- Detailed ``Contributing Guidelines`` on - authoring the changelog fragments have been published in the - documentation -- by `@webknjaz `__. - - - *Related issues and pull requests on GitHub:* - `#564 `__. - - ----- - - -1.4.0 (2023-07-12) -================== - -The published source distribution package became buildable -under Python 3.12. - - ----- - - -Bugfixes --------- - -- Removed an unused ``typing.Tuple`` import - `#411 `_ - - -Deprecations and Removals -------------------------- - -- Dropped Python 3.7 support. - `#413 `_ - - -Misc ----- - -- `#410 `_, `#433 `_ - - ----- - - -1.3.3 (2022-11-08) -================== - -- Fixed CI runs when creating a new release, where new towncrier versions - fail when the current version section is already present. - - ----- - - -1.3.2 (2022-11-08) -================== - -Misc ----- - -- Updated the CI runs to better check for test results and to avoid deprecated syntax. `#327 `_ - - ----- - - -1.3.1 (2022-08-02) -================== - -The published source distribution package became buildable -under Python 3.11. - - ----- - - -1.3.0 (2022-01-18) -================== - -Bugfixes --------- - -- Do not install C sources with binary distributions. - `#250 `_ - - -Deprecations and Removals -------------------------- - -- Dropped Python 3.6 support - `#274 `_ - - ----- - - -1.2.0 (2021-10-16) -================== - -Features --------- - -- ``FrozenList`` now supports being used as a generic type as per PEP 585, e.g. ``frozen_int_list: FrozenList[int]`` (requires Python 3.9 or newer). - `#172 `_ -- Added support for Python 3.10. - `#227 `_ -- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. - `#227 `_ -- Started shipping platform-specific arm64 wheels for Apple Silicon. - `#227 `_ - - ----- - - -1.1.1 (2020-11-14) -================== - -Bugfixes --------- - -- Provide x86 Windows wheels. - `#169 `_ - - ----- - - -1.1.0 (2020-10-13) -================== - -Features --------- - -- Add support for hashing of a frozen list. - `#136 `_ - -- Support Python 3.8 and 3.9. - -- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on - Linux as well as ``x86_64``. - - ----- - - -1.0.0 (2019-11-09) -================== - -Deprecations and Removals -------------------------- - -- Dropped support for Python 3.5; only 3.6, 3.7 and 3.8 are supported going forward. - `#24 `_ diff --git a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/RECORD b/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/RECORD deleted file mode 100644 index 86e098c..0000000 --- a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/RECORD +++ /dev/null @@ -1,13 +0,0 @@ -frozenlist-1.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -frozenlist-1.8.0.dist-info/METADATA,sha256=Qnius9GPH4t-avW2dxupqPMnASpC1kUJ-dQIe1X3GpQ,21005 -frozenlist-1.8.0.dist-info/RECORD,, -frozenlist-1.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -frozenlist-1.8.0.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101 -frozenlist-1.8.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 -frozenlist-1.8.0.dist-info/top_level.txt,sha256=jivtxsPXA3nK3WBWW2LW5Mtu_GHt8UZA13NeCs2cKuA,11 -frozenlist/__init__.py,sha256=xAIE2u9ncAbjATGIPfno_OJfe8AQ-1h7z_uc73dYsEA,2108 -frozenlist/__init__.pyi,sha256=vMEoES1xGegPtVXoCi9XydEeHsyuIq-KdeXwP5PdsaA,1470 -frozenlist/__pycache__/__init__.cpython-312.pyc,, -frozenlist/_frozenlist.cp312-win_amd64.pyd,sha256=rAPOTWexA69W76WkNpVMbF5NoOr-yHijEpjyuM6cb5Y,69632 -frozenlist/_frozenlist.pyx,sha256=t-aGjuEiVt_MZPBJ0RnraavVmPBK6arz3i48ZvXuYsU,3708 -frozenlist/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/REQUESTED b/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/WHEEL b/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/WHEEL deleted file mode 100644 index 10ac2c2..0000000 --- a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE b/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE deleted file mode 100644 index 7082a2d..0000000 --- a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2013-2019 Nikolay Kim and Andrew Svetlov - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/top_level.txt b/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/top_level.txt deleted file mode 100644 index 52f13fc..0000000 --- a/venv/Lib/site-packages/frozenlist-1.8.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -frozenlist diff --git a/venv/Lib/site-packages/frozenlist/__init__.py b/venv/Lib/site-packages/frozenlist/__init__.py deleted file mode 100644 index 41c8595..0000000 --- a/venv/Lib/site-packages/frozenlist/__init__.py +++ /dev/null @@ -1,86 +0,0 @@ -import os -import types -from collections.abc import MutableSequence -from functools import total_ordering - -__version__ = "1.8.0" - -__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...] - - -NO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool - - -@total_ordering -class FrozenList(MutableSequence): - __slots__ = ("_frozen", "_items") - __class_getitem__ = classmethod(types.GenericAlias) - - def __init__(self, items=None): - self._frozen = False - if items is not None: - items = list(items) - else: - items = [] - self._items = items - - @property - def frozen(self): - return self._frozen - - def freeze(self): - self._frozen = True - - def __getitem__(self, index): - return self._items[index] - - def __setitem__(self, index, value): - if self._frozen: - raise RuntimeError("Cannot modify frozen list.") - self._items[index] = value - - def __delitem__(self, index): - if self._frozen: - raise RuntimeError("Cannot modify frozen list.") - del self._items[index] - - def __len__(self): - return self._items.__len__() - - def __iter__(self): - return self._items.__iter__() - - def __reversed__(self): - return self._items.__reversed__() - - def __eq__(self, other): - return list(self) == other - - def __le__(self, other): - return list(self) <= other - - def insert(self, pos, item): - if self._frozen: - raise RuntimeError("Cannot modify frozen list.") - self._items.insert(pos, item) - - def __repr__(self): - return f"" - - def __hash__(self): - if self._frozen: - return hash(tuple(self)) - else: - raise RuntimeError("Cannot hash unfrozen list.") - - -PyFrozenList = FrozenList - - -if not NO_EXTENSIONS: - try: - from ._frozenlist import FrozenList as CFrozenList # type: ignore - except ImportError: # pragma: no cover - pass - else: - FrozenList = CFrozenList # type: ignore diff --git a/venv/Lib/site-packages/frozenlist/__init__.pyi b/venv/Lib/site-packages/frozenlist/__init__.pyi deleted file mode 100644 index ae803ef..0000000 --- a/venv/Lib/site-packages/frozenlist/__init__.pyi +++ /dev/null @@ -1,47 +0,0 @@ -from typing import ( - Generic, - Iterable, - Iterator, - List, - MutableSequence, - Optional, - TypeVar, - Union, - overload, -) - -_T = TypeVar("_T") -_Arg = Union[List[_T], Iterable[_T]] - -class FrozenList(MutableSequence[_T], Generic[_T]): - def __init__(self, items: Optional[_Arg[_T]] = None) -> None: ... - @property - def frozen(self) -> bool: ... - def freeze(self) -> None: ... - @overload - def __getitem__(self, i: int) -> _T: ... - @overload - def __getitem__(self, s: slice) -> FrozenList[_T]: ... - @overload - def __setitem__(self, i: int, o: _T) -> None: ... - @overload - def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... - @overload - def __delitem__(self, i: int) -> None: ... - @overload - def __delitem__(self, i: slice) -> None: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[_T]: ... - def __reversed__(self) -> Iterator[_T]: ... - def __eq__(self, other: object) -> bool: ... - def __le__(self, other: FrozenList[_T]) -> bool: ... - def __ne__(self, other: object) -> bool: ... - def __lt__(self, other: FrozenList[_T]) -> bool: ... - def __ge__(self, other: FrozenList[_T]) -> bool: ... - def __gt__(self, other: FrozenList[_T]) -> bool: ... - def insert(self, pos: int, item: _T) -> None: ... - def __repr__(self) -> str: ... - def __hash__(self) -> int: ... - -# types for C accelerators are the same -CFrozenList = PyFrozenList = FrozenList diff --git a/venv/Lib/site-packages/frozenlist/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/frozenlist/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 16be6ce..0000000 Binary files a/venv/Lib/site-packages/frozenlist/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/frozenlist/_frozenlist.cp312-win_amd64.pyd b/venv/Lib/site-packages/frozenlist/_frozenlist.cp312-win_amd64.pyd deleted file mode 100644 index 2be3e19..0000000 Binary files a/venv/Lib/site-packages/frozenlist/_frozenlist.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/frozenlist/_frozenlist.pyx b/venv/Lib/site-packages/frozenlist/_frozenlist.pyx deleted file mode 100644 index a82d8c8..0000000 --- a/venv/Lib/site-packages/frozenlist/_frozenlist.pyx +++ /dev/null @@ -1,148 +0,0 @@ -# cython: freethreading_compatible = True -# distutils: language = c++ - -from cpython.bool cimport PyBool_FromLong -from libcpp.atomic cimport atomic - -import copy -import types -from collections.abc import MutableSequence - - -cdef class FrozenList: - __class_getitem__ = classmethod(types.GenericAlias) - - cdef atomic[bint] _frozen - cdef list _items - - def __init__(self, items=None): - self._frozen.store(False) - if items is not None: - items = list(items) - else: - items = [] - self._items = items - - @property - def frozen(self): - return PyBool_FromLong(self._frozen.load()) - - cdef object _check_frozen(self): - if self._frozen.load(): - raise RuntimeError("Cannot modify frozen list.") - - cdef inline object _fast_len(self): - return len(self._items) - - def freeze(self): - self._frozen.store(True) - - def __getitem__(self, index): - return self._items[index] - - def __setitem__(self, index, value): - self._check_frozen() - self._items[index] = value - - def __delitem__(self, index): - self._check_frozen() - del self._items[index] - - def __len__(self): - return self._fast_len() - - def __iter__(self): - return self._items.__iter__() - - def __reversed__(self): - return self._items.__reversed__() - - def __richcmp__(self, other, op): - if op == 0: # < - return list(self) < other - if op == 1: # <= - return list(self) <= other - if op == 2: # == - return list(self) == other - if op == 3: # != - return list(self) != other - if op == 4: # > - return list(self) > other - if op == 5: # => - return list(self) >= other - - def insert(self, pos, item): - self._check_frozen() - self._items.insert(pos, item) - - def __contains__(self, item): - return item in self._items - - def __iadd__(self, items): - self._check_frozen() - self._items += list(items) - return self - - def index(self, item): - return self._items.index(item) - - def remove(self, item): - self._check_frozen() - self._items.remove(item) - - def clear(self): - self._check_frozen() - self._items.clear() - - def extend(self, items): - self._check_frozen() - self._items += list(items) - - def reverse(self): - self._check_frozen() - self._items.reverse() - - def pop(self, index=-1): - self._check_frozen() - return self._items.pop(index) - - def append(self, item): - self._check_frozen() - return self._items.append(item) - - def count(self, item): - return self._items.count(item) - - def __repr__(self): - return ''.format(self._frozen.load(), - self._items) - - def __hash__(self): - if self._frozen.load(): - return hash(tuple(self._items)) - else: - raise RuntimeError("Cannot hash unfrozen list.") - - def __deepcopy__(self, memo): - cdef FrozenList new_list - obj_id = id(self) - - # Return existing copy if already processed (circular reference) - if obj_id in memo: - return memo[obj_id] - - # Create new instance and register immediately - new_list = self.__class__([]) - memo[obj_id] = new_list - - # Deep copy items - new_list._items[:] = [copy.deepcopy(item, memo) for item in self._items] - - # Preserve frozen state - if self._frozen.load(): - new_list.freeze() - - return new_list - - -MutableSequence.register(FrozenList) diff --git a/venv/Lib/site-packages/frozenlist/py.typed b/venv/Lib/site-packages/frozenlist/py.typed deleted file mode 100644 index f5642f7..0000000 --- a/venv/Lib/site-packages/frozenlist/py.typed +++ /dev/null @@ -1 +0,0 @@ -Marker diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/INSTALLER b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/METADATA b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/METADATA deleted file mode 100644 index 550a670..0000000 --- a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/METADATA +++ /dev/null @@ -1,117 +0,0 @@ -Metadata-Version: 2.4 -Name: greenlet -Version: 3.3.0 -Summary: Lightweight in-process concurrent programming -Home-page: https://greenlet.readthedocs.io/ -Author: Alexey Borzenkov -Author-email: snaury@gmail.com -Maintainer: Jason Madden -Maintainer-email: jason@seecoresoftware.com -License: MIT AND Python-2.0 -Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues -Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ -Project-URL: Documentation, https://greenlet.readthedocs.io/ -Project-URL: Changes, https://greenlet.readthedocs.io/en/latest/changes.html -Keywords: greenlet coroutine concurrency threads cooperative -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: Programming Language :: C -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Operating System :: OS Independent -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.10 -Description-Content-Type: text/x-rst -License-File: LICENSE -License-File: LICENSE.PSF -Provides-Extra: docs -Requires-Dist: Sphinx; extra == "docs" -Requires-Dist: furo; extra == "docs" -Provides-Extra: test -Requires-Dist: objgraph; extra == "test" -Requires-Dist: psutil; extra == "test" -Requires-Dist: setuptools; extra == "test" -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: description-content-type -Dynamic: home-page -Dynamic: keywords -Dynamic: license -Dynamic: license-file -Dynamic: maintainer -Dynamic: maintainer-email -Dynamic: platform -Dynamic: project-url -Dynamic: provides-extra -Dynamic: requires-python -Dynamic: summary - -.. This file is included into docs/history.rst - - -Greenlets are lightweight coroutines for in-process concurrent -programming. - -The "greenlet" package is a spin-off of `Stackless`_, a version of -CPython that supports micro-threads called "tasklets". Tasklets run -pseudo-concurrently (typically in a single or a few OS-level threads) -and are synchronized with data exchanges on "channels". - -A "greenlet", on the other hand, is a still more primitive notion of -micro-thread with no implicit scheduling; coroutines, in other words. -This is useful when you want to control exactly when your code runs. -You can build custom scheduled micro-threads on top of greenlet; -however, it seems that greenlets are useful on their own as a way to -make advanced control flow structures. For example, we can recreate -generators; the difference with Python's own generators is that our -generators can call nested functions and the nested functions can -yield values too. (Additionally, you don't need a "yield" keyword. See -the example in `test_generator.py -`_). - -Greenlets are provided as a C extension module for the regular unmodified -interpreter. - -.. _`Stackless`: http://www.stackless.com - - -Who is using Greenlet? -====================== - -There are several libraries that use Greenlet as a more flexible -alternative to Python's built in coroutine support: - - - `Concurrence`_ - - `Eventlet`_ - - `Gevent`_ - -.. _Concurrence: http://opensource.hyves.org/concurrence/ -.. _Eventlet: http://eventlet.net/ -.. _Gevent: http://www.gevent.org/ - -Getting Greenlet -================ - -The easiest way to get Greenlet is to install it with pip:: - - pip install greenlet - - -Source code archives and binary distributions are available on the -python package index at https://pypi.org/project/greenlet - -The source code repository is hosted on github: -https://github.com/python-greenlet/greenlet - -Documentation is available on readthedocs.org: -https://greenlet.readthedocs.io diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/RECORD b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/RECORD deleted file mode 100644 index 309aebd..0000000 --- a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/RECORD +++ /dev/null @@ -1,122 +0,0 @@ -../../include/site/python3.12/greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 -greenlet-3.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -greenlet-3.3.0.dist-info/METADATA,sha256=z34cVbU89R99TLnlubPWZOm7mz57d-J2zYknh8orBdE,4239 -greenlet-3.3.0.dist-info/RECORD,, -greenlet-3.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -greenlet-3.3.0.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101 -greenlet-3.3.0.dist-info/licenses/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 -greenlet-3.3.0.dist-info/licenses/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 -greenlet-3.3.0.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 -greenlet/CObjects.cpp,sha256=OPej1bWBgc4sRrTRQ2aFFML9pzDYKlKhlJSjsI0X_eU,3508 -greenlet/PyGreenlet.cpp,sha256=rEKSIiAJ_IT2k0uZbUbtOpRNUmUcpKFMLLR99dPkFRU,25314 -greenlet/PyGreenlet.hpp,sha256=2ZQlOxYNoy7QwD7mppFoOXe_At56NIsJ0eNsE_hoSsw,1463 -greenlet/PyGreenletUnswitchable.cpp,sha256=PQE0fSZa_IOyUM44IESHkJoD2KtGW3dkhkmZSYY3WHs,4375 -greenlet/PyModule.cpp,sha256=uBC2FOruNKOlcD8FmSI0CkTJkrmRU71aKYwnvY14LvU,8649 -greenlet/TBrokenGreenlet.cpp,sha256=smN26uC7ahAbNYiS10rtWPjCeTG4jevM8siA2sjJiXg,1021 -greenlet/TExceptionState.cpp,sha256=U7Ctw9fBdNraS0d174MoQW7bN-ae209Ta0JuiKpcpVI,1359 -greenlet/TGreenlet.cpp,sha256=IM4cHsv1drEl35d7n8YOA_wR-R7oRvx5XhOJOK2PBB8,25732 -greenlet/TGreenlet.hpp,sha256=5HkQ_yzOPIySl7AL_1aO-z1WGeXxms8-OLygyWTap_Q,28700 -greenlet/TGreenletGlobals.cpp,sha256=YyEmDjKf1g32bsL-unIUScFLnnA1fzLWf2gOMd-D0Zw,3264 -greenlet/TMainGreenlet.cpp,sha256=pBqCkp_ck3Sv7TJ66g9QV1j7jZt378WpoBozVZ9Wh4M,3420 -greenlet/TPythonState.cpp,sha256=8CSu7xoB6Uliw7l-vRmgKbhPWxT2ByVmm9kfWu7QP3o,17155 -greenlet/TStackState.cpp,sha256=V444I8Jj9DhQz-9leVW_9dtiSRjaE1NMlgDG02Xxq-Y,7381 -greenlet/TThreadState.hpp,sha256=_ntdN33J1IBNucVsCctLtkspkvp3ud0PanVnSxLfj8k,19636 -greenlet/TThreadStateCreator.hpp,sha256=s-PCahFbp8mpTIJTOEDZWMKWWLPA2rC5clZX6tvxA4s,2620 -greenlet/TThreadStateDestroy.cpp,sha256=MShqLtCuUGaQIinFWe2b7Fz-GPmVVKAfV7FbGhb4GEc,8395 -greenlet/TUserGreenlet.cpp,sha256=uemg0lwKXtYB0yzmvyYdIIAsKnNkifXM1OJ2OlrFP1A,23553 -greenlet/__init__.py,sha256=2eWfnGH1UKF2uVvrBWDD0pDdH2aIgzTCs3COImZuiMw,1723 -greenlet/__pycache__/__init__.cpython-312.pyc,, -greenlet/_greenlet.cp312-win_amd64.pyd,sha256=u_88D3Da2h6473fXx9QbiZuTM9B55NVxc4CF1ai7Tc0,220672 -greenlet/greenlet.cpp,sha256=menzWlidfN2DfBJYBAcB8BpUOGK1XgMLukeCsERSp_c,11093 -greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 -greenlet/greenlet_allocator.hpp,sha256=n28rwj76RVSn7B5QDA00nL8OBjfFeiOM1QGrVrHhfsk,1835 -greenlet/greenlet_compiler_compat.hpp,sha256=nRxpLN9iNbnLVyFDeVmOwyeeNm6scQrOed1l7JQYMCM,4346 -greenlet/greenlet_cpython_compat.hpp,sha256=kJG6d_yDwwl3bSZOOFqM3ks1UzVIGcwbsTM2s8C6VYE,4149 -greenlet/greenlet_exceptions.hpp,sha256=06Bx81DtVaJTa6RtiMcV141b-XHv4ppEgVItkblcLWY,4503 -greenlet/greenlet_internal.hpp,sha256=Ajc-_09W4xWzm9XfyXHAeQAFUgKGKsnJwYsTCoNy3ns,2709 -greenlet/greenlet_msvc_compat.hpp,sha256=0MyaiyoCE_A6UROXZlMQRxRS17gfyh0d7NUppU3EVFc,2978 -greenlet/greenlet_refs.hpp,sha256=OnbA91yZf3QHH6-eJccvoNDAaN-pQBMMrclFU1Ot3J4,34436 -greenlet/greenlet_slp_switch.hpp,sha256=T1Y-w01yBBljePiHgUaWCs3XZSdtHSrtLvvkMXFDUN4,3298 -greenlet/greenlet_thread_support.hpp,sha256=XUJ6ljWjf9OYyuOILiz8e_yHvT3fbaUiHdhiPNQUV4s,867 -greenlet/platform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -greenlet/platform/__pycache__/__init__.cpython-312.pyc,, -greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 -greenlet/platform/switch_aarch64_gcc.h,sha256=GKC0yWNXnbK2X--X6aguRCMj2Tg7hDU1Zkl3RljDvC8,4307 -greenlet/platform/switch_alpha_unix.h,sha256=Z-SvF8JQV3oxWT8JRbL9RFu4gRFxPdJ7cviM8YayMmw,671 -greenlet/platform/switch_amd64_unix.h,sha256=EcSFCBlodEBhqhKjcJqY_5Dn_jn7pKpkJlOvp7gFXLI,2748 -greenlet/platform/switch_arm32_gcc.h,sha256=Z3KkHszdgq6uU4YN3BxvKMG2AdDnovwCCNrqGWZ1Lyo,2479 -greenlet/platform/switch_arm32_ios.h,sha256=mm5_R9aXB92hyxzFRwB71M60H6AlvHjrpTrc72Pz3l8,1892 -greenlet/platform/switch_arm64_masm.asm,sha256=4kpTtfy7rfcr8j1CpJLAK21EtZpGDAJXWRU68HEy5A8,1245 -greenlet/platform/switch_arm64_masm.obj,sha256=DmLnIB_icoEHAz1naue_pJPTZgR9ElM7-Nmztr-o9_U,746 -greenlet/platform/switch_arm64_msvc.h,sha256=RqK5MHLmXI3Q-FQ7tm32KWnbDNZKnkJdq8CR89cz640,398 -greenlet/platform/switch_csky_gcc.h,sha256=kDikyiPpewP71KoBZQO_MukDTXTXBiC7x-hF0_2DL0w,1331 -greenlet/platform/switch_loongarch64_linux.h,sha256=7M-Dhc4Q8tRbJCJhalDLwU6S9Mx8MjmN1RbTDgIvQTM,779 -greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 -greenlet/platform/switch_mips_unix.h,sha256=E0tYsqc5anDY1BhenU1l8DW-nVHC_BElzLgJw3TGtPk,1426 -greenlet/platform/switch_ppc64_aix.h,sha256=_BL0iyRr3ZA5iPlr3uk9SJ5sNRWGYLrXcZ5z-CE9anE,3860 -greenlet/platform/switch_ppc64_linux.h,sha256=0rriT5XyxPb0GqsSSn_bP9iQsnjsPbBmu0yqo5goSyQ,3815 -greenlet/platform/switch_ppc_aix.h,sha256=pHA4slEjUFP3J3SYm1TAlNPhgb2G_PAtax5cO8BEe1A,2941 -greenlet/platform/switch_ppc_linux.h,sha256=YwrlKUzxlXuiKMQqr6MFAV1bPzWnmvk6X1AqJZEpOWU,2759 -greenlet/platform/switch_ppc_macosx.h,sha256=Z6KN_ud0n6nC3ltJrNz2qtvER6vnRAVRNH9mdIDpMxY,2624 -greenlet/platform/switch_ppc_unix.h,sha256=-ZG7MSSPEA5N4qO9PQChtyEJ-Fm6qInhyZm_ZBHTtMg,2652 -greenlet/platform/switch_riscv_unix.h,sha256=606V6ACDf79Fz_WGItnkgbjIJ0pGg_sHmPyDxQYKK58,949 -greenlet/platform/switch_s390_unix.h,sha256=RRlGu957ybmq95qNNY4Qw1mcaoT3eBnW5KbVwu48KX8,2763 -greenlet/platform/switch_sh_gcc.h,sha256=mcRJBTu-2UBf4kZtX601qofwuDuy-Y-hnxJtrcaB7do,901 -greenlet/platform/switch_sparc_sun_gcc.h,sha256=xZish9GsMHBienUbUMsX1-ZZ-as7hs36sVhYIE3ew8Y,2797 -greenlet/platform/switch_x32_unix.h,sha256=nM98PKtzTWc1lcM7TRMUZJzskVdR1C69U1UqZRWX0GE,1509 -greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 -greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 -greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 -greenlet/platform/switch_x86_msvc.h,sha256=TtGOwinbFfnn6clxMNkCz8i6OmgB6kVRrShoF5iT9to,12838 -greenlet/platform/switch_x86_unix.h,sha256=VplW9H0FF0cZHw1DhJdIUs5q6YLS4cwb2nYwjF83R1s,3059 -greenlet/slp_platformselect.h,sha256=hTb3GFdcPUYJTuu1MY93js7MZEax1_e5E-gflpi0RzI,3959 -greenlet/tests/__init__.py,sha256=EtTtQfpRDde0MhsdAM5Cm7LYIfS_HKUIFwquiH4Q7ac,9736 -greenlet/tests/__pycache__/__init__.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc,, -greenlet/tests/__pycache__/leakcheck.cpython-312.pyc,, -greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc,, -greenlet/tests/__pycache__/test_cpp.cpython-312.pyc,, -greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc,, -greenlet/tests/__pycache__/test_gc.cpython-312.pyc,, -greenlet/tests/__pycache__/test_generator.cpython-312.pyc,, -greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc,, -greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc,, -greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc,, -greenlet/tests/__pycache__/test_leaks.cpython-312.pyc,, -greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc,, -greenlet/tests/__pycache__/test_throw.cpython-312.pyc,, -greenlet/tests/__pycache__/test_tracing.cpython-312.pyc,, -greenlet/tests/__pycache__/test_version.cpython-312.pyc,, -greenlet/tests/__pycache__/test_weakref.cpython-312.pyc,, -greenlet/tests/_test_extension.c,sha256=DETtCa8cvPgQ2KrSQm9jlqZSlb_1x0o3axydmzgPohQ,6921 -greenlet/tests/_test_extension.cp312-win_amd64.pyd,sha256=3294tor4nMUAMk9AxTgW3whmWEif1DeEMrOUupBJpo8,14336 -greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd,sha256=s1xmtFdMsWKIatbtNMX9wIkrhIpG6oXb7vjsHoJODos,15872 -greenlet/tests/_test_extension_cpp.cpp,sha256=VbkGmOw9b6pnj5OsqQa7OC5aPfIBynx-aVQXXF9uWcE,6686 -greenlet/tests/fail_clearing_run_switches.py,sha256=o433oA_nUCtOPaMEGc8VEhZIKa71imVHXFw7TsXaP8M,1263 -greenlet/tests/fail_cpp_exception.py,sha256=o_ZbipWikok8Bjc-vjiQvcb5FHh2nVW-McGKMLcMzh0,985 -greenlet/tests/fail_initialstub_already_started.py,sha256=txENn5IyzGx2p-XR1XB7qXmC8JX_4mKDEA8kYBXUQKc,1961 -greenlet/tests/fail_slp_switch.py,sha256=rJBZcZfTWR3e2ERQtPAud6YKShiDsP84PmwOJbp4ey0,524 -greenlet/tests/fail_switch_three_greenlets.py,sha256=zSitV7rkNnaoHYVzAGGLnxz-yPtohXJJzaE8ehFDQ0M,956 -greenlet/tests/fail_switch_three_greenlets2.py,sha256=FPJensn2EJxoropl03JSTVP3kgP33k04h6aDWWozrOk,1285 -greenlet/tests/fail_switch_two_greenlets.py,sha256=1CaI8s3504VbbF1vj1uBYuy-zxBHVzHPIAd1LIc8ONg,817 -greenlet/tests/leakcheck.py,sha256=f28zZf0MlMgcybbm_5YyIOR6HcDqP2f2k6LX-FxFGqA,12652 -greenlet/tests/test_contextvars.py,sha256=xutO-qZgKTwKsA9lAqTjIcTBEiQV4RpNKM-vO2_YCVU,10541 -greenlet/tests/test_cpp.py,sha256=hpxhFAdKJTpAVZP8CBGs1ZcrKdscI9BaDZk4btkI5d4,2736 -greenlet/tests/test_extension_interface.py,sha256=eJ3cwLacdK2WbsrC-4DgeyHdwLRcG4zx7rrkRtqSzC4,3829 -greenlet/tests/test_gc.py,sha256=xrIreQr85eO8WlpHs6IWCa5C4ecIA6t2_IrkS76Fdjg,2922 -greenlet/tests/test_generator.py,sha256=tONXiTf98VGm347o1b-810daPiwdla5cbpFg6QI1R1g,1240 -greenlet/tests/test_generator_nested.py,sha256=7v4HOYrf1XZP39dk5IUMubdZ8yc3ynwZcqj9GUJyMSA,3718 -greenlet/tests/test_greenlet.py,sha256=oRrUAGEFb-GF8GNVEFsXYvzrayleF5qQDdA2QWeYEis,48439 -greenlet/tests/test_greenlet_trash.py,sha256=n2dBlQfOoEO1ODatFi8QdhboH3fB86YtqzcYMYOXxbw,7947 -greenlet/tests/test_leaks.py,sha256=OFSE870Zyql85HukfC_XYa2c4gDQBU889RV1AlLum74,18076 -greenlet/tests/test_stack_saved.py,sha256=eyzqNY2VCGuGlxhT_In6TvZ6Okb0AXFZVyBEnK1jDwA,446 -greenlet/tests/test_throw.py,sha256=u2TQ_WvvCd6N6JdXWIxVEcXkKu5fepDlz9dktYdmtng,3712 -greenlet/tests/test_tracing.py,sha256=NFD6Vcww8grBnFQFhCNdswwGetjLeLQ7vL2Qqw3LWBM,8591 -greenlet/tests/test_version.py,sha256=O9DpAITsOFgiRcjd4odQ7ejmwx_N9Q1zQENVcbtFHIc,1339 -greenlet/tests/test_weakref.py,sha256=F8M23btEF87bIbpptLNBORosbQqNZGiYeKMqYjWrsak,883 diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/REQUESTED b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/WHEEL b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/WHEEL deleted file mode 100644 index 10ac2c2..0000000 --- a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/licenses/LICENSE b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/licenses/LICENSE deleted file mode 100644 index b73a4a1..0000000 --- a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -The following files are derived from Stackless Python and are subject to the -same license as Stackless Python: - - src/greenlet/slp_platformselect.h - files in src/greenlet/platform/ directory - -See LICENSE.PSF and http://www.stackless.com/ for details. - -Unless otherwise noted, the files in greenlet have been released under the -following MIT license: - -Copyright (c) Armin Rigo, Christian Tismer and contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/licenses/LICENSE.PSF b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/licenses/LICENSE.PSF deleted file mode 100644 index d3b509a..0000000 --- a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/licenses/LICENSE.PSF +++ /dev/null @@ -1,47 +0,0 @@ -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011 Python Software Foundation; All Rights Reserved" are retained in Python -alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/top_level.txt b/venv/Lib/site-packages/greenlet-3.3.0.dist-info/top_level.txt deleted file mode 100644 index 46725be..0000000 --- a/venv/Lib/site-packages/greenlet-3.3.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -greenlet diff --git a/venv/Lib/site-packages/greenlet/CObjects.cpp b/venv/Lib/site-packages/greenlet/CObjects.cpp deleted file mode 100644 index c135995..0000000 --- a/venv/Lib/site-packages/greenlet/CObjects.cpp +++ /dev/null @@ -1,157 +0,0 @@ -#ifndef COBJECTS_CPP -#define COBJECTS_CPP -/***************************************************************************** - * C interface - * - * These are exported using the CObject API - */ -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -#endif - -#include "greenlet_exceptions.hpp" - -#include "greenlet_internal.hpp" -#include "greenlet_refs.hpp" - - -#include "TThreadStateDestroy.cpp" - -#include "PyGreenlet.hpp" - -using greenlet::PyErrOccurred; -using greenlet::Require; - - - -extern "C" { -static PyGreenlet* -PyGreenlet_GetCurrent(void) -{ - return GET_THREAD_STATE().state().get_current().relinquish_ownership(); -} - -static int -PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent) -{ - return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL); -} - -static PyGreenlet* -PyGreenlet_New(PyObject* run, PyGreenlet* parent) -{ - using greenlet::refs::NewDictReference; - // In the past, we didn't use green_new and green_init, but that - // was a maintenance issue because we duplicated code. This way is - // much safer, but slightly slower. If that's a problem, we could - // refactor green_init to separate argument parsing from initialization. - OwnedGreenlet g = OwnedGreenlet::consuming(green_new(&PyGreenlet_Type, nullptr, nullptr)); - if (!g) { - return NULL; - } - - try { - NewDictReference kwargs; - if (run) { - kwargs.SetItem(mod_globs->str_run, run); - } - if (parent) { - kwargs.SetItem("parent", (PyObject*)parent); - } - - Require(green_init(g.borrow(), mod_globs->empty_tuple, kwargs.borrow())); - } - catch (const PyErrOccurred&) { - return nullptr; - } - - return g.relinquish_ownership(); -} - -static PyObject* -PyGreenlet_Switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return NULL; - } - - if (args == NULL) { - args = mod_globs->empty_tuple; - } - - if (kwargs == NULL || !PyDict_Check(kwargs)) { - kwargs = NULL; - } - - return green_switch(self, args, kwargs); -} - -static PyObject* -PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return nullptr; - } - try { - PyErrPieces err_pieces(typ, val, tb); - return internal_green_throw(self, err_pieces).relinquish_ownership(); - } - catch (const PyErrOccurred&) { - return nullptr; - } -} - - - -static int -Extern_PyGreenlet_MAIN(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return -1; - } - return self->pimpl->main(); -} - -static int -Extern_PyGreenlet_ACTIVE(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return -1; - } - return self->pimpl->active(); -} - -static int -Extern_PyGreenlet_STARTED(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return -1; - } - return self->pimpl->started(); -} - -static PyGreenlet* -Extern_PyGreenlet_GET_PARENT(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return NULL; - } - // This can return NULL even if there is no exception - return self->pimpl->parent().acquire(); -} -} // extern C. - -/** End C API ****************************************************************/ -#ifdef __clang__ -# pragma clang diagnostic pop -#endif - - -#endif diff --git a/venv/Lib/site-packages/greenlet/PyGreenlet.cpp b/venv/Lib/site-packages/greenlet/PyGreenlet.cpp deleted file mode 100644 index fd62241..0000000 --- a/venv/Lib/site-packages/greenlet/PyGreenlet.cpp +++ /dev/null @@ -1,774 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef PYGREENLET_CPP -#define PYGREENLET_CPP -/***************** -The Python slot functions for TGreenlet. - */ - - -#define PY_SSIZE_T_CLEAN -#include -#include "structmember.h" // PyMemberDef - -#include "greenlet_internal.hpp" -#include "TThreadStateDestroy.cpp" -#include "TGreenlet.hpp" -// #include "TUserGreenlet.cpp" -// #include "TMainGreenlet.cpp" -// #include "TBrokenGreenlet.cpp" - - -#include "greenlet_refs.hpp" -#include "greenlet_slp_switch.hpp" - -#include "greenlet_thread_support.hpp" -#include "TGreenlet.hpp" - -#include "TGreenletGlobals.cpp" -#include "TThreadStateDestroy.cpp" -#include "PyGreenlet.hpp" -// #include "TGreenlet.cpp" - -// #include "TExceptionState.cpp" -// #include "TPythonState.cpp" -// #include "TStackState.cpp" - -using greenlet::LockGuard; -using greenlet::LockInitError; -using greenlet::PyErrOccurred; -using greenlet::Require; - -using greenlet::g_handle_exit; -using greenlet::single_result; - -using greenlet::Greenlet; -using greenlet::UserGreenlet; -using greenlet::MainGreenlet; -using greenlet::BrokenGreenlet; -using greenlet::ThreadState; -using greenlet::PythonState; - - - -static PyGreenlet* -green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) -{ - PyGreenlet* o = - (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); - if (o) { - // Recall: borrowing or getting the current greenlet - // causes the "deleteme list" to get cleared. So constructing a greenlet - // can do things like cause other greenlets to get finalized. - UserGreenlet* c = new UserGreenlet(o, GET_THREAD_STATE().state().borrow_current()); - assert(Py_REFCNT(o) == 1); - // Also: This looks like a memory leak, but isn't. Constructing the - // C++ object assigns it to the pimpl pointer of the Python object (o); - // we'll need that later. - assert(c == o->pimpl); - } - return o; -} - - -// green_init is used in the tp_init slot. So it's important that -// it can be called directly from CPython. Thus, we don't use -// BorrowedGreenlet and BorrowedObject --- although in theory -// these should be binary layout compatible, that may not be -// guaranteed to be the case (32-bit linux ppc possibly). -static int -green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs) -{ - PyArgParseParam run; - PyArgParseParam nparent; - static const char* kwlist[] = { - "run", - "parent", - NULL - }; - - // recall: The O specifier does NOT increase the reference count. - if (!PyArg_ParseTupleAndKeywords( - args, kwargs, "|OO:green", (char**)kwlist, &run, &nparent)) { - return -1; - } - - if (run) { - if (green_setrun(self, run, NULL)) { - return -1; - } - } - if (nparent && !nparent.is_None()) { - return green_setparent(self, nparent, NULL); - } - return 0; -} - - - -static int -green_traverse(PyGreenlet* self, visitproc visit, void* arg) -{ - // We must only visit referenced objects, i.e. only objects - // Py_INCREF'ed by this greenlet (directly or indirectly): - // - // - stack_prev is not visited: holds previous stack pointer, but it's not - // referenced - // - frames are not visited as we don't strongly reference them; - // alive greenlets are not garbage collected - // anyway. This can be a problem, however, if this greenlet is - // never allowed to finish, and is referenced from the frame: we - // have an uncollectible cycle in that case. Note that the - // frame object itself is also frequently not even tracked by the GC - // starting with Python 3.7 (frames are allocated by the - // interpreter untracked, and only become tracked when their - // evaluation is finished if they have a refcount > 1). All of - // this is to say that we should probably strongly reference - // the frame object. Doing so, while always allowing GC on a - // greenlet, solves several leaks for us. - - Py_VISIT(self->dict); - if (!self->pimpl) { - // Hmm. I have seen this at interpreter shutdown time, - // I think. That's very odd because this doesn't go away until - // we're ``green_dealloc()``, at which point we shouldn't be - // traversed anymore. - return 0; - } - - return self->pimpl->tp_traverse(visit, arg); -} - -static int -green_is_gc(PyObject* _self) -{ - BorrowedGreenlet self(_self); - int result = 0; - /* Main greenlet can be garbage collected since it can only - become unreachable if the underlying thread exited. - Active greenlets --- including those that are suspended --- - cannot be garbage collected, however. - */ - if (self->main() || !self->active()) { - result = 1; - } - // The main greenlet pointer will eventually go away after the thread dies. - if (self->was_running_in_dead_thread()) { - // Our thread is dead! We can never run again. Might as well - // GC us. Note that if a tuple containing only us and other - // immutable objects had been scanned before this, when we - // would have returned 0, the tuple will take itself out of GC - // tracking and never be investigated again. So that could - // result in both us and the tuple leaking due to an - // unreachable/uncollectible reference. The same goes for - // dictionaries. - // - // It's not a great idea to be changing our GC state on the - // fly. - result = 1; - } - return result; -} - - -static int -green_clear(PyGreenlet* self) -{ - /* Greenlet is only cleared if it is about to be collected. - Since active greenlets are not garbage collectable, we can - be sure that, even if they are deallocated during clear, - nothing they reference is in unreachable or finalizers, - so even if it switches we are relatively safe. */ - // XXX: Are we responsible for clearing weakrefs here? - Py_CLEAR(self->dict); - return self->pimpl->tp_clear(); -} - -/** - * Returns 0 on failure (the object was resurrected) or 1 on success. - **/ -static int -_green_dealloc_kill_started_non_main_greenlet(BorrowedGreenlet self) -{ - /* Hacks hacks hacks copied from instance_dealloc() */ - /* Temporarily resurrect the greenlet. */ - assert(self.REFCNT() == 0); - Py_SET_REFCNT(self.borrow(), 1); - /* Save the current exception, if any. */ - PyErrPieces saved_err; - try { - // BY THE TIME WE GET HERE, the state may actually be going - // away - // if we're shutting down the interpreter and freeing thread - // entries, - // this could result in freeing greenlets that were leaked. So - // we can't try to read the state. - self->deallocing_greenlet_in_thread( - self->thread_state() - ? static_cast(GET_THREAD_STATE()) - : nullptr); - } - catch (const PyErrOccurred&) { - PyErr_WriteUnraisable(self.borrow_o()); - /* XXX what else should we do? */ - } - /* Check for no resurrection must be done while we keep - * our internal reference, otherwise PyFile_WriteObject - * causes recursion if using Py_INCREF/Py_DECREF - */ - if (self.REFCNT() == 1 && self->active()) { - /* Not resurrected, but still not dead! - XXX what else should we do? we complain. */ - PyObject* f = PySys_GetObject("stderr"); - Py_INCREF(self.borrow_o()); /* leak! */ - if (f != NULL) { - PyFile_WriteString("GreenletExit did not kill ", f); - PyFile_WriteObject(self.borrow_o(), f, 0); - PyFile_WriteString("\n", f); - } - } - /* Restore the saved exception. */ - saved_err.PyErrRestore(); - /* Undo the temporary resurrection; can't use DECREF here, - * it would cause a recursive call. - */ - assert(self.REFCNT() > 0); - - Py_ssize_t refcnt = self.REFCNT() - 1; - Py_SET_REFCNT(self.borrow_o(), refcnt); - if (refcnt != 0) { - /* Resurrected! */ - _Py_NewReference(self.borrow_o()); - Py_SET_REFCNT(self.borrow_o(), refcnt); - /* Better to use tp_finalizer slot (PEP 442) - * and call ``PyObject_CallFinalizerFromDealloc``, - * but that's only supported in Python 3.4+; see - * Modules/_io/iobase.c for an example. - * TODO: We no longer run on anything that old, switch to finalizers. - * - * The following approach is copied from iobase.c in CPython 2.7. - * (along with much of this function in general). Here's their - * comment: - * - * When called from a heap type's dealloc, the type will be - * decref'ed on return (see e.g. subtype_dealloc in typeobject.c). - * - * On free-threaded builds of CPython, the type is meant to be immortal - * so we probably shouldn't mess with this? See - * test_issue_245_reference_counting_subclass_no_threads - */ - if (PyType_HasFeature(self.TYPE(), Py_TPFLAGS_HEAPTYPE)) { - Py_INCREF(self.TYPE()); - } - - PyObject_GC_Track((PyObject*)self); - - GREENLET_Py_DEC_REFTOTAL; -#ifdef COUNT_ALLOCS - --Py_TYPE(self)->tp_frees; - --Py_TYPE(self)->tp_allocs; -#endif /* COUNT_ALLOCS */ - return 0; - } - return 1; -} - - -static void -green_dealloc(PyGreenlet* self) -{ - PyObject_GC_UnTrack(self); - BorrowedGreenlet me(self); - if (me->active() - && me->started() - && !me->main()) { - if (!_green_dealloc_kill_started_non_main_greenlet(me)) { - return; - } - } - - if (self->weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); - } - Py_CLEAR(self->dict); - - if (self->pimpl) { - // In case deleting this, which frees some memory, - // somehow winds up calling back into us. That's usually a - //bug in our code. - Greenlet* p = self->pimpl; - self->pimpl = nullptr; - delete p; - } - // and finally we're done. self is now invalid. - Py_TYPE(self)->tp_free((PyObject*)self); -} - - - -static OwnedObject -internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces) -{ - PyObject* result = nullptr; - err_pieces.PyErrRestore(); - assert(PyErr_Occurred()); - if (self->started() && !self->active()) { - /* dead greenlet: turn GreenletExit into a regular return */ - result = g_handle_exit(OwnedObject()).relinquish_ownership(); - } - self->args() <<= result; - - return single_result(self->g_switch()); -} - - - -PyDoc_STRVAR( - green_switch_doc, - "switch(*args, **kwargs)\n" - "\n" - "Switch execution to this greenlet.\n" - "\n" - "If this greenlet has never been run, then this greenlet\n" - "will be switched to using the body of ``self.run(*args, **kwargs)``.\n" - "\n" - "If the greenlet is active (has been run, but was switch()'ed\n" - "out before leaving its run function), then this greenlet will\n" - "be resumed and the return value to its switch call will be\n" - "None if no arguments are given, the given argument if one\n" - "argument is given, or the args tuple and keyword args dict if\n" - "multiple arguments are given.\n" - "\n" - "If the greenlet is dead, or is the current greenlet then this\n" - "function will simply return the arguments using the same rules as\n" - "above.\n"); - -static PyObject* -green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) -{ - using greenlet::SwitchingArgs; - SwitchingArgs switch_args(OwnedObject::owning(args), OwnedObject::owning(kwargs)); - self->pimpl->may_switch_away(); - self->pimpl->args() <<= switch_args; - - // If we're switching out of a greenlet, and that switch is the - // last thing the greenlet does, the greenlet ought to be able to - // go ahead and die at that point. Currently, someone else must - // manually switch back to the greenlet so that we "fall off the - // end" and can perform cleanup. You'd think we'd be able to - // figure out that this is happening using the frame's ``f_lasti`` - // member, which is supposed to be an index into - // ``frame->f_code->co_code``, the bytecode string. However, in - // recent interpreters, ``f_lasti`` tends not to be updated thanks - // to things like the PREDICT() macros in ceval.c. So it doesn't - // really work to do that in many cases. For example, the Python - // code: - // def run(): - // greenlet.getcurrent().parent.switch() - // produces bytecode of len 16, with the actual call to switch() - // being at index 10 (in Python 3.10). However, the reported - // ``f_lasti`` we actually see is...5! (Which happens to be the - // second byte of the CALL_METHOD op for ``getcurrent()``). - - try { - //OwnedObject result = single_result(self->pimpl->g_switch()); - OwnedObject result(single_result(self->pimpl->g_switch())); -#ifndef NDEBUG - // Note that the current greenlet isn't necessarily self. If self - // finished, we went to one of its parents. - assert(!self->pimpl->args()); - - const BorrowedGreenlet& current = GET_THREAD_STATE().state().borrow_current(); - // It's possible it's never been switched to. - assert(!current->args()); -#endif - PyObject* p = result.relinquish_ownership(); - - if (!p && !PyErr_Occurred()) { - // This shouldn't be happening anymore, so the asserts - // are there for debug builds. Non-debug builds - // crash "gracefully" in this case, although there is an - // argument to be made for killing the process in all - // cases --- for this to be the case, our switches - // probably nested in an incorrect way, so the state is - // suspicious. Nothing should be corrupt though, just - // confused at the Python level. Letting this propagate is - // probably good enough. - assert(p || PyErr_Occurred()); - throw PyErrOccurred( - mod_globs->PyExc_GreenletError, - "Greenlet.switch() returned NULL without an exception set." - ); - } - return p; - } - catch(const PyErrOccurred&) { - return nullptr; - } -} - -PyDoc_STRVAR( - green_throw_doc, - "Switches execution to this greenlet, but immediately raises the\n" - "given exception in this greenlet. If no argument is provided, the " - "exception\n" - "defaults to `greenlet.GreenletExit`. The normal exception\n" - "propagation rules apply, as described for `switch`. Note that calling " - "this\n" - "method is almost equivalent to the following::\n" - "\n" - " def raiser():\n" - " raise typ, val, tb\n" - " g_raiser = greenlet(raiser, parent=g)\n" - " g_raiser.switch()\n" - "\n" - "except that this trick does not work for the\n" - "`greenlet.GreenletExit` exception, which would not propagate\n" - "from ``g_raiser`` to ``g``.\n"); - -static PyObject* -green_throw(PyGreenlet* self, PyObject* args) -{ - PyArgParseParam typ(mod_globs->PyExc_GreenletExit); - PyArgParseParam val; - PyArgParseParam tb; - - if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) { - return nullptr; - } - - assert(typ.borrow() || val.borrow()); - - self->pimpl->may_switch_away(); - try { - // Both normalizing the error and the actual throw_greenlet - // could throw PyErrOccurred. - PyErrPieces err_pieces(typ.borrow(), val.borrow(), tb.borrow()); - - return internal_green_throw(self, err_pieces).relinquish_ownership(); - } - catch (const PyErrOccurred&) { - return nullptr; - } -} - -static int -green_bool(PyGreenlet* self) -{ - return self->pimpl->active(); -} - -/** - * CAUTION: Allocates memory, may run GC and arbitrary Python code. - */ -static PyObject* -green_getdict(PyGreenlet* self, void* UNUSED(context)) -{ - if (self->dict == NULL) { - self->dict = PyDict_New(); - if (self->dict == NULL) { - return NULL; - } - } - Py_INCREF(self->dict); - return self->dict; -} - -static int -green_setdict(PyGreenlet* self, PyObject* val, void* UNUSED(context)) -{ - PyObject* tmp; - - if (val == NULL) { - PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted"); - return -1; - } - if (!PyDict_Check(val)) { - PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary"); - return -1; - } - tmp = self->dict; - Py_INCREF(val); - self->dict = val; - Py_XDECREF(tmp); - return 0; -} - -static bool -_green_not_dead(BorrowedGreenlet self) -{ - // XXX: Where else should we do this? - // Probably on entry to most Python-facing functions? - if (self->was_running_in_dead_thread()) { - self->deactivate_and_free(); - return false; - } - return self->active() || !self->started(); -} - - -static PyObject* -green_getdead(PyGreenlet* self, void* UNUSED(context)) -{ - if (_green_not_dead(self)) { - Py_RETURN_FALSE; - } - else { - Py_RETURN_TRUE; - } -} - -static PyObject* -green_get_stack_saved(PyGreenlet* self, void* UNUSED(context)) -{ - return PyLong_FromSsize_t(self->pimpl->stack_saved()); -} - - -static PyObject* -green_getrun(PyGreenlet* self, void* UNUSED(context)) -{ - try { - OwnedObject result(BorrowedGreenlet(self)->run()); - return result.relinquish_ownership(); - } - catch(const PyErrOccurred&) { - return nullptr; - } -} - - -static int -green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)) -{ - try { - BorrowedGreenlet(self)->run(nrun); - return 0; - } - catch(const PyErrOccurred&) { - return -1; - } -} - -static PyObject* -green_getparent(PyGreenlet* self, void* UNUSED(context)) -{ - return BorrowedGreenlet(self)->parent().acquire_or_None(); -} - - -static int -green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)) -{ - try { - BorrowedGreenlet(self)->parent(nparent); - } - catch(const PyErrOccurred&) { - return -1; - } - return 0; -} - - -static PyObject* -green_getcontext(const PyGreenlet* self, void* UNUSED(context)) -{ - const Greenlet *const g = self->pimpl; - try { - OwnedObject result(g->context()); - return result.relinquish_ownership(); - } - catch(const PyErrOccurred&) { - return nullptr; - } -} - -static int -green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)) -{ - try { - BorrowedGreenlet(self)->context(nctx); - return 0; - } - catch(const PyErrOccurred&) { - return -1; - } -} - - -static PyObject* -green_getframe(PyGreenlet* self, void* UNUSED(context)) -{ - const PythonState::OwnedFrame& top_frame = BorrowedGreenlet(self)->top_frame(); - return top_frame.acquire_or_None(); -} - - -static PyObject* -green_getstate(PyGreenlet* self) -{ - PyErr_Format(PyExc_TypeError, - "cannot serialize '%s' object", - Py_TYPE(self)->tp_name); - return nullptr; -} - -static PyObject* -green_repr(PyGreenlet* _self) -{ - BorrowedGreenlet self(_self); - /* - Return a string like - - - The handling of greenlets across threads is not super good. - We mostly use the internal definitions of these terms, but they - generally should make sense to users as well. - */ - PyObject* result; - int never_started = !self->started() && !self->active(); - - const char* const tp_name = Py_TYPE(self)->tp_name; - - if (_green_not_dead(self)) { - /* XXX: The otid= is almost useless because you can't correlate it to - any thread identifier exposed to Python. We could use - PyThreadState_GET()->thread_id, but we'd need to save that in the - greenlet, or save the whole PyThreadState object itself. - - As it stands, its only useful for identifying greenlets from the same thread. - */ - const char* state_in_thread; - if (self->was_running_in_dead_thread()) { - // The thread it was running in is dead! - // This can happen, especially at interpreter shut down. - // It complicates debugging output because it may be - // impossible to access the current thread state at that - // time. Thus, don't access the current thread state. - state_in_thread = " (thread exited)"; - } - else { - state_in_thread = GET_THREAD_STATE().state().is_current(self) - ? " current" - : (self->started() ? " suspended" : ""); - } - result = PyUnicode_FromFormat( - "<%s object at %p (otid=%p)%s%s%s%s>", - tp_name, - self.borrow_o(), - self->thread_state(), - state_in_thread, - self->active() ? " active" : "", - never_started ? " pending" : " started", - self->main() ? " main" : "" - ); - } - else { - result = PyUnicode_FromFormat( - "<%s object at %p (otid=%p) %sdead>", - tp_name, - self.borrow_o(), - self->thread_state(), - self->was_running_in_dead_thread() - ? "(thread exited) " - : "" - ); - } - - return result; -} - - -static PyMethodDef green_methods[] = { - { - .ml_name="switch", - .ml_meth=reinterpret_cast(green_switch), - .ml_flags=METH_VARARGS | METH_KEYWORDS, - .ml_doc=green_switch_doc - }, - {.ml_name="throw", .ml_meth=(PyCFunction)green_throw, .ml_flags=METH_VARARGS, .ml_doc=green_throw_doc}, - {.ml_name="__getstate__", .ml_meth=(PyCFunction)green_getstate, .ml_flags=METH_NOARGS, .ml_doc=NULL}, - {.ml_name=NULL, .ml_meth=NULL} /* sentinel */ -}; - -static PyGetSetDef green_getsets[] = { - /* name, getter, setter, doc, context pointer */ - {.name="__dict__", .get=(getter)green_getdict, .set=(setter)green_setdict}, - {.name="run", .get=(getter)green_getrun, .set=(setter)green_setrun}, - {.name="parent", .get=(getter)green_getparent, .set=(setter)green_setparent}, - {.name="gr_frame", .get=(getter)green_getframe }, - { - .name="gr_context", - .get=(getter)green_getcontext, - .set=(setter)green_setcontext - }, - {.name="dead", .get=(getter)green_getdead}, - {.name="_stack_saved", .get=(getter)green_get_stack_saved}, - {.name=NULL} -}; - -static PyMemberDef green_members[] = { - {.name=NULL} -}; - -static PyNumberMethods green_as_number = { - .nb_bool=(inquiry)green_bool, -}; - - -PyTypeObject PyGreenlet_Type = { - .ob_base=PyVarObject_HEAD_INIT(NULL, 0) - .tp_name="greenlet.greenlet", /* tp_name */ - .tp_basicsize=sizeof(PyGreenlet), /* tp_basicsize */ - /* methods */ - .tp_dealloc=(destructor)green_dealloc, /* tp_dealloc */ - .tp_repr=(reprfunc)green_repr, /* tp_repr */ - .tp_as_number=&green_as_number, /* tp_as _number*/ - .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - .tp_doc="greenlet(run=None, parent=None) -> greenlet\n\n" - "Creates a new greenlet object (without running it).\n\n" - " - *run* -- The callable to invoke.\n" - " - *parent* -- The parent greenlet. The default is the current " - "greenlet.", /* tp_doc */ - .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ - .tp_clear=(inquiry)green_clear, /* tp_clear */ - .tp_weaklistoffset=offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */ - - .tp_methods=green_methods, /* tp_methods */ - .tp_members=green_members, /* tp_members */ - .tp_getset=green_getsets, /* tp_getset */ - .tp_dictoffset=offsetof(PyGreenlet, dict), /* tp_dictoffset */ - .tp_init=(initproc)green_init, /* tp_init */ - .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ - .tp_new=(newfunc)green_new, /* tp_new */ - .tp_free=PyObject_GC_Del, /* tp_free */ -#ifndef Py_GIL_DISABLED -/* - We may have been handling this wrong all along. - - It shows as a problem with the GIL disabled. In builds of 3.14 with - assertions enabled, we break the garbage collector if we *ever* - return false from this function. The docs say this is to distinguish - some objects that are collectable vs some that are not, specifically - giving the example of PyTypeObject as the only place this is done, - where it distinguishes between static types like this one (allocated - by the C runtime at load time) and dynamic heap types (created at - runtime as objects). With the GIL disabled, all allocations that are - potentially collectable go in the mimalloc heap, and the collector - asserts that tp_is_gc() is true for them as it walks through the - heap object by object. Since we set the Py_TPFLAGS_HAS_GC bit, we - are always allocated in that mimalloc heap, so we must always be - collectable. - - XXX: TODO: Could this be responsible for some apparent leaks, even - on GIL builds, at least in 3.14? See if we can catch an assertion - failure in the GC on regular 3.14 as well. - */ - .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ -#endif -}; - -#endif - -// Local Variables: -// flycheck-clang-include-path: ("/opt/local/Library/Frameworks/Python.framework/Versions/3.8/include/python3.8") -// End: diff --git a/venv/Lib/site-packages/greenlet/PyGreenlet.hpp b/venv/Lib/site-packages/greenlet/PyGreenlet.hpp deleted file mode 100644 index df6cd80..0000000 --- a/venv/Lib/site-packages/greenlet/PyGreenlet.hpp +++ /dev/null @@ -1,35 +0,0 @@ -#ifndef PYGREENLET_HPP -#define PYGREENLET_HPP - - -#include "greenlet.h" -#include "greenlet_compiler_compat.hpp" -#include "greenlet_refs.hpp" - - -using greenlet::refs::OwnedGreenlet; -using greenlet::refs::BorrowedGreenlet; -using greenlet::refs::BorrowedObject;; -using greenlet::refs::OwnedObject; -using greenlet::refs::PyErrPieces; - - -// XXX: These doesn't really belong here, it's not a Python slot. -static OwnedObject internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces); - -static PyGreenlet* green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)); -static int green_clear(PyGreenlet* self); -static int green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs); -static int green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)); -static int green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)); -static int green_traverse(PyGreenlet* self, visitproc visit, void* arg); -static void green_dealloc(PyGreenlet* self); -static PyObject* green_getparent(PyGreenlet* self, void* UNUSED(context)); - -static int green_is_gc(PyObject* self); -static PyObject* green_getdead(PyGreenlet* self, void* UNUSED(context)); -static PyObject* green_getrun(PyGreenlet* self, void* UNUSED(context)); -static int green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)); -static PyObject* green_getframe(PyGreenlet* self, void* UNUSED(context)); -static PyObject* green_repr(PyGreenlet* self); -#endif diff --git a/venv/Lib/site-packages/greenlet/PyGreenletUnswitchable.cpp b/venv/Lib/site-packages/greenlet/PyGreenletUnswitchable.cpp deleted file mode 100644 index 1b768ee..0000000 --- a/venv/Lib/site-packages/greenlet/PyGreenletUnswitchable.cpp +++ /dev/null @@ -1,147 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - Implementation of the Python slots for PyGreenletUnswitchable_Type -*/ -#ifndef PY_GREENLET_UNSWITCHABLE_CPP -#define PY_GREENLET_UNSWITCHABLE_CPP - - - -#define PY_SSIZE_T_CLEAN -#include -#include "structmember.h" // PyMemberDef - -#include "greenlet_internal.hpp" -// Code after this point can assume access to things declared in stdint.h, -// including the fixed-width types. This goes for the platform-specific switch functions -// as well. -#include "greenlet_refs.hpp" -#include "greenlet_slp_switch.hpp" - -#include "greenlet_thread_support.hpp" -#include "TGreenlet.hpp" - -#include "TGreenlet.cpp" -#include "TGreenletGlobals.cpp" -#include "TThreadStateDestroy.cpp" - - -using greenlet::LockGuard; -using greenlet::LockInitError; -using greenlet::PyErrOccurred; -using greenlet::Require; - -using greenlet::g_handle_exit; -using greenlet::single_result; - -using greenlet::Greenlet; -using greenlet::UserGreenlet; -using greenlet::MainGreenlet; -using greenlet::BrokenGreenlet; -using greenlet::ThreadState; -using greenlet::PythonState; - - -#include "PyGreenlet.hpp" - -static PyGreenlet* -green_unswitchable_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) -{ - PyGreenlet* o = - (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); - if (o) { - new BrokenGreenlet(o, GET_THREAD_STATE().state().borrow_current()); - assert(Py_REFCNT(o) == 1); - } - return o; -} - -static PyObject* -green_unswitchable_getforce(PyGreenlet* self, void* UNUSED(context)) -{ - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - return PyBool_FromLong(broken->_force_switch_error); -} - -static int -green_unswitchable_setforce(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) -{ - if (!nforce) { - PyErr_SetString( - PyExc_AttributeError, - "Cannot delete force_switch_error" - ); - return -1; - } - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - int is_true = PyObject_IsTrue(nforce); - if (is_true == -1) { - return -1; - } - broken->_force_switch_error = is_true; - return 0; -} - -static PyObject* -green_unswitchable_getforceslp(PyGreenlet* self, void* UNUSED(context)) -{ - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - return PyBool_FromLong(broken->_force_slp_switch_error); -} - -static int -green_unswitchable_setforceslp(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) -{ - if (!nforce) { - PyErr_SetString( - PyExc_AttributeError, - "Cannot delete force_slp_switch_error" - ); - return -1; - } - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - int is_true = PyObject_IsTrue(nforce); - if (is_true == -1) { - return -1; - } - broken->_force_slp_switch_error = is_true; - return 0; -} - -static PyGetSetDef green_unswitchable_getsets[] = { - /* name, getter, setter, doc, closure (context pointer) */ - { - .name="force_switch_error", - .get=(getter)green_unswitchable_getforce, - .set=(setter)green_unswitchable_setforce, - .doc=NULL - }, - { - .name="force_slp_switch_error", - .get=(getter)green_unswitchable_getforceslp, - .set=(setter)green_unswitchable_setforceslp, - .doc=nullptr - }, - {.name=nullptr} -}; - -PyTypeObject PyGreenletUnswitchable_Type = { - .ob_base=PyVarObject_HEAD_INIT(NULL, 0) - .tp_name="greenlet._greenlet.UnswitchableGreenlet", - .tp_dealloc= (destructor)green_dealloc, /* tp_dealloc */ - .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - .tp_doc="Undocumented internal class", /* tp_doc */ - .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ - .tp_clear=(inquiry)green_clear, /* tp_clear */ - - .tp_getset=green_unswitchable_getsets, /* tp_getset */ - .tp_base=&PyGreenlet_Type, /* tp_base */ - .tp_init=(initproc)green_init, /* tp_init */ - .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ - .tp_new=(newfunc)green_unswitchable_new, /* tp_new */ - .tp_free=PyObject_GC_Del, /* tp_free */ - .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ -}; - - -#endif diff --git a/venv/Lib/site-packages/greenlet/PyModule.cpp b/venv/Lib/site-packages/greenlet/PyModule.cpp deleted file mode 100644 index a999dc9..0000000 --- a/venv/Lib/site-packages/greenlet/PyModule.cpp +++ /dev/null @@ -1,292 +0,0 @@ -#ifndef PY_MODULE_CPP -#define PY_MODULE_CPP - -#include "greenlet_internal.hpp" - - -#include "TGreenletGlobals.cpp" -#include "TMainGreenlet.cpp" -#include "TThreadStateDestroy.cpp" - -using greenlet::LockGuard; -using greenlet::ThreadState; - -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -# pragma clang diagnostic ignored "-Wunused-variable" -#endif - -PyDoc_STRVAR(mod_getcurrent_doc, - "getcurrent() -> greenlet\n" - "\n" - "Returns the current greenlet (i.e. the one which called this " - "function).\n"); - -static PyObject* -mod_getcurrent(PyObject* UNUSED(module)) -{ - return GET_THREAD_STATE().state().get_current().relinquish_ownership_o(); -} - -PyDoc_STRVAR(mod_settrace_doc, - "settrace(callback) -> object\n" - "\n" - "Sets a new tracing function and returns the previous one.\n"); -static PyObject* -mod_settrace(PyObject* UNUSED(module), PyObject* args) -{ - PyArgParseParam tracefunc; - if (!PyArg_ParseTuple(args, "O", &tracefunc)) { - return NULL; - } - ThreadState& state = GET_THREAD_STATE(); - OwnedObject previous = state.get_tracefunc(); - if (!previous) { - previous = Py_None; - } - - state.set_tracefunc(tracefunc); - - return previous.relinquish_ownership(); -} - -PyDoc_STRVAR(mod_gettrace_doc, - "gettrace() -> object\n" - "\n" - "Returns the currently set tracing function, or None.\n"); - -static PyObject* -mod_gettrace(PyObject* UNUSED(module)) -{ - OwnedObject tracefunc = GET_THREAD_STATE().state().get_tracefunc(); - if (!tracefunc) { - tracefunc = Py_None; - } - return tracefunc.relinquish_ownership(); -} - - - -PyDoc_STRVAR(mod_set_thread_local_doc, - "set_thread_local(key, value) -> None\n" - "\n" - "Set a value in the current thread-local dictionary. Debugging only.\n"); - -static PyObject* -mod_set_thread_local(PyObject* UNUSED(module), PyObject* args) -{ - PyArgParseParam key; - PyArgParseParam value; - PyObject* result = NULL; - - if (PyArg_UnpackTuple(args, "set_thread_local", 2, 2, &key, &value)) { - if(PyDict_SetItem( - PyThreadState_GetDict(), // borrow - key, - value) == 0 ) { - // success - Py_INCREF(Py_None); - result = Py_None; - } - } - return result; -} - -PyDoc_STRVAR(mod_get_pending_cleanup_count_doc, - "get_pending_cleanup_count() -> Integer\n" - "\n" - "Get the number of greenlet cleanup operations pending. Testing only.\n"); - - -static PyObject* -mod_get_pending_cleanup_count(PyObject* UNUSED(module)) -{ - LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); - return PyLong_FromSize_t(mod_globs->thread_states_to_destroy.size()); -} - -PyDoc_STRVAR(mod_get_total_main_greenlets_doc, - "get_total_main_greenlets() -> Integer\n" - "\n" - "Quickly return the number of main greenlets that exist. Testing only.\n"); - -static PyObject* -mod_get_total_main_greenlets(PyObject* UNUSED(module)) -{ - return PyLong_FromSize_t(G_TOTAL_MAIN_GREENLETS); -} - - - -PyDoc_STRVAR(mod_get_clocks_used_doing_optional_cleanup_doc, - "get_clocks_used_doing_optional_cleanup() -> Integer\n" - "\n" - "Get the number of clock ticks the program has used doing optional " - "greenlet cleanup.\n" - "Beginning in greenlet 2.0, greenlet tries to find and dispose of greenlets\n" - "that leaked after a thread exited. This requires invoking Python's garbage collector,\n" - "which may have a performance cost proportional to the number of live objects.\n" - "This function returns the amount of processor time\n" - "greenlet has used to do this. In programs that run with very large amounts of live\n" - "objects, this metric can be used to decide whether the cost of doing this cleanup\n" - "is worth the memory leak being corrected. If not, you can disable the cleanup\n" - "using ``enable_optional_cleanup(False)``.\n" - "The units are arbitrary and can only be compared to themselves (similarly to ``time.clock()``);\n" - "for example, to see how it scales with your heap. You can attempt to convert them into seconds\n" - "by dividing by the value of CLOCKS_PER_SEC." - "If cleanup has been disabled, returns None." - "\n" - "This is an implementation specific, provisional API. It may be changed or removed\n" - "in the future.\n" - ".. versionadded:: 2.0" - ); -static PyObject* -mod_get_clocks_used_doing_optional_cleanup(PyObject* UNUSED(module)) -{ - std::clock_t clocks = ThreadState::clocks_used_doing_gc(); - - if (clocks == std::clock_t(-1)) { - Py_RETURN_NONE; - } - // This might not actually work on some implementations; clock_t - // is an opaque type. - return PyLong_FromSsize_t(clocks); -} - -PyDoc_STRVAR(mod_enable_optional_cleanup_doc, - "mod_enable_optional_cleanup(bool) -> None\n" - "\n" - "Enable or disable optional cleanup operations.\n" - "See ``get_clocks_used_doing_optional_cleanup()`` for details.\n" - ); -static PyObject* -mod_enable_optional_cleanup(PyObject* UNUSED(module), PyObject* flag) -{ - int is_true = PyObject_IsTrue(flag); - if (is_true == -1) { - return nullptr; - } - - if (is_true) { - std::clock_t clocks = ThreadState::clocks_used_doing_gc(); - // If we already have a value, we don't want to lose it. - if (clocks == std::clock_t(-1)) { - ThreadState::set_clocks_used_doing_gc(0); - } - } - else { - ThreadState::set_clocks_used_doing_gc(std::clock_t(-1)); - } - Py_RETURN_NONE; -} - - - - -#if !GREENLET_PY313 -PyDoc_STRVAR(mod_get_tstate_trash_delete_nesting_doc, - "get_tstate_trash_delete_nesting() -> Integer\n" - "\n" - "Return the 'trash can' nesting level. Testing only.\n"); -static PyObject* -mod_get_tstate_trash_delete_nesting(PyObject* UNUSED(module)) -{ - PyThreadState* tstate = PyThreadState_GET(); - -#if GREENLET_PY312 - return PyLong_FromLong(tstate->trash.delete_nesting); -#else - return PyLong_FromLong(tstate->trash_delete_nesting); -#endif -} -#endif - - - - -static PyMethodDef GreenMethods[] = { - { - .ml_name="getcurrent", - .ml_meth=(PyCFunction)mod_getcurrent, - .ml_flags=METH_NOARGS, - .ml_doc=mod_getcurrent_doc - }, - { - .ml_name="settrace", - .ml_meth=(PyCFunction)mod_settrace, - .ml_flags=METH_VARARGS, - .ml_doc=mod_settrace_doc - }, - { - .ml_name="gettrace", - .ml_meth=(PyCFunction)mod_gettrace, - .ml_flags=METH_NOARGS, - .ml_doc=mod_gettrace_doc - }, - { - .ml_name="set_thread_local", - .ml_meth=(PyCFunction)mod_set_thread_local, - .ml_flags=METH_VARARGS, - .ml_doc=mod_set_thread_local_doc - }, - { - .ml_name="get_pending_cleanup_count", - .ml_meth=(PyCFunction)mod_get_pending_cleanup_count, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_pending_cleanup_count_doc - }, - { - .ml_name="get_total_main_greenlets", - .ml_meth=(PyCFunction)mod_get_total_main_greenlets, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_total_main_greenlets_doc - }, - { - .ml_name="get_clocks_used_doing_optional_cleanup", - .ml_meth=(PyCFunction)mod_get_clocks_used_doing_optional_cleanup, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_clocks_used_doing_optional_cleanup_doc - }, - { - .ml_name="enable_optional_cleanup", - .ml_meth=(PyCFunction)mod_enable_optional_cleanup, - .ml_flags=METH_O, - .ml_doc=mod_enable_optional_cleanup_doc - }, -#if !GREENLET_PY313 - { - .ml_name="get_tstate_trash_delete_nesting", - .ml_meth=(PyCFunction)mod_get_tstate_trash_delete_nesting, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_tstate_trash_delete_nesting_doc - }, -#endif - {.ml_name=NULL, .ml_meth=NULL} /* Sentinel */ -}; - -static const char* const copy_on_greentype[] = { - "getcurrent", - "error", - "GreenletExit", - "settrace", - "gettrace", - NULL -}; - -static struct PyModuleDef greenlet_module_def = { - .m_base=PyModuleDef_HEAD_INIT, - .m_name="greenlet._greenlet", - .m_doc=NULL, - .m_size=-1, - .m_methods=GreenMethods, -}; - - -#endif - -#ifdef __clang__ -# pragma clang diagnostic pop -#elif defined(__GNUC__) -# pragma GCC diagnostic pop -#endif diff --git a/venv/Lib/site-packages/greenlet/TBrokenGreenlet.cpp b/venv/Lib/site-packages/greenlet/TBrokenGreenlet.cpp deleted file mode 100644 index 7e9ab5b..0000000 --- a/venv/Lib/site-packages/greenlet/TBrokenGreenlet.cpp +++ /dev/null @@ -1,45 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::UserGreenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ - -#include "TGreenlet.hpp" - -namespace greenlet { - -void* BrokenGreenlet::operator new(size_t UNUSED(count)) -{ - return allocator.allocate(1); -} - - -void BrokenGreenlet::operator delete(void* ptr) -{ - return allocator.deallocate(static_cast(ptr), - 1); -} - -greenlet::PythonAllocator greenlet::BrokenGreenlet::allocator; - -bool -BrokenGreenlet::force_slp_switch_error() const noexcept -{ - return this->_force_slp_switch_error; -} - -UserGreenlet::switchstack_result_t BrokenGreenlet::g_switchstack(void) -{ - if (this->_force_switch_error) { - return switchstack_result_t(-1); - } - return UserGreenlet::g_switchstack(); -} - -}; //namespace greenlet diff --git a/venv/Lib/site-packages/greenlet/TExceptionState.cpp b/venv/Lib/site-packages/greenlet/TExceptionState.cpp deleted file mode 100644 index 08a94ae..0000000 --- a/venv/Lib/site-packages/greenlet/TExceptionState.cpp +++ /dev/null @@ -1,62 +0,0 @@ -#ifndef GREENLET_EXCEPTION_STATE_CPP -#define GREENLET_EXCEPTION_STATE_CPP - -#include -#include "TGreenlet.hpp" - -namespace greenlet { - - -ExceptionState::ExceptionState() -{ - this->clear(); -} - -void ExceptionState::operator<<(const PyThreadState *const tstate) noexcept -{ - this->exc_info = tstate->exc_info; - this->exc_state = tstate->exc_state; -} - -void ExceptionState::operator>>(PyThreadState *const tstate) noexcept -{ - tstate->exc_state = this->exc_state; - tstate->exc_info = - this->exc_info ? this->exc_info : &tstate->exc_state; - this->clear(); -} - -void ExceptionState::clear() noexcept -{ - this->exc_info = nullptr; - this->exc_state.exc_value = nullptr; -#if !GREENLET_PY311 - this->exc_state.exc_type = nullptr; - this->exc_state.exc_traceback = nullptr; -#endif - this->exc_state.previous_item = nullptr; -} - -int ExceptionState::tp_traverse(visitproc visit, void* arg) noexcept -{ - Py_VISIT(this->exc_state.exc_value); -#if !GREENLET_PY311 - Py_VISIT(this->exc_state.exc_type); - Py_VISIT(this->exc_state.exc_traceback); -#endif - return 0; -} - -void ExceptionState::tp_clear() noexcept -{ - Py_CLEAR(this->exc_state.exc_value); -#if !GREENLET_PY311 - Py_CLEAR(this->exc_state.exc_type); - Py_CLEAR(this->exc_state.exc_traceback); -#endif -} - - -}; // namespace greenlet - -#endif // GREENLET_EXCEPTION_STATE_CPP diff --git a/venv/Lib/site-packages/greenlet/TGreenlet.cpp b/venv/Lib/site-packages/greenlet/TGreenlet.cpp deleted file mode 100644 index d12722b..0000000 --- a/venv/Lib/site-packages/greenlet/TGreenlet.cpp +++ /dev/null @@ -1,719 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::Greenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef TGREENLET_CPP -#define TGREENLET_CPP -#include "greenlet_internal.hpp" -#include "TGreenlet.hpp" - - -#include "TGreenletGlobals.cpp" -#include "TThreadStateDestroy.cpp" - -namespace greenlet { - -Greenlet::Greenlet(PyGreenlet* p) - : Greenlet(p, StackState()) -{ -} - -Greenlet::Greenlet(PyGreenlet* p, const StackState& initial_stack) - : _self(p), stack_state(initial_stack) -{ - assert(p->pimpl == nullptr); - p->pimpl = this; -} - -Greenlet::~Greenlet() -{ - // XXX: Can't do this. tp_clear is a virtual function, and by the - // time we're here, we've sliced off our child classes. - //this->tp_clear(); - this->_self->pimpl = nullptr; -} - -bool -Greenlet::force_slp_switch_error() const noexcept -{ - return false; -} - -void -Greenlet::release_args() -{ - this->switch_args.CLEAR(); -} - -/** - * CAUTION: This will allocate memory and may trigger garbage - * collection and arbitrary Python code. - */ -OwnedObject -Greenlet::throw_GreenletExit_during_dealloc(const ThreadState& UNUSED(current_thread_state)) -{ - // If we're killed because we lost all references in the - // middle of a switch, that's ok. Don't reset the args/kwargs, - // we still want to pass them to the parent. - PyErr_SetString(mod_globs->PyExc_GreenletExit, - "Killing the greenlet because all references have vanished."); - // To get here it had to have run before - return this->g_switch(); -} - -inline void -Greenlet::slp_restore_state() noexcept -{ -#ifdef SLP_BEFORE_RESTORE_STATE - SLP_BEFORE_RESTORE_STATE(); -#endif - this->stack_state.copy_heap_to_stack( - this->thread_state()->borrow_current()->stack_state); -} - - -inline int -Greenlet::slp_save_state(char *const stackref) noexcept -{ - // XXX: This used to happen in the middle, before saving, but - // after finding the next owner. Does that matter? This is - // only defined for Sparc/GCC where it flushes register - // windows to the stack (I think) -#ifdef SLP_BEFORE_SAVE_STATE - SLP_BEFORE_SAVE_STATE(); -#endif - return this->stack_state.copy_stack_to_heap(stackref, - this->thread_state()->borrow_current()->stack_state); -} - -/** - * CAUTION: This will allocate memory and may trigger garbage - * collection and arbitrary Python code. - */ -OwnedObject -Greenlet::on_switchstack_or_initialstub_failure( - Greenlet* target, - const Greenlet::switchstack_result_t& err, - const bool target_was_me, - const bool was_initial_stub) -{ - // If we get here, either g_initialstub() - // failed, or g_switchstack() failed. Either one of those - // cases SHOULD leave us in the original greenlet with a valid stack. - if (!PyErr_Occurred()) { - PyErr_SetString( - PyExc_SystemError, - was_initial_stub - ? "Failed to switch stacks into a greenlet for the first time." - : "Failed to switch stacks into a running greenlet."); - } - this->release_args(); - - if (target && !target_was_me) { - target->murder_in_place(); - } - - assert(!err.the_new_current_greenlet); - assert(!err.origin_greenlet); - return OwnedObject(); - -} - -OwnedGreenlet -Greenlet::g_switchstack_success() noexcept -{ - PyThreadState* tstate = PyThreadState_GET(); - // restore the saved state - this->python_state >> tstate; - this->exception_state >> tstate; - - // The thread state hasn't been changed yet. - ThreadState* thread_state = this->thread_state(); - OwnedGreenlet result(thread_state->get_current()); - thread_state->set_current(this->self()); - //assert(thread_state->borrow_current().borrow() == this->_self); - return result; -} - -Greenlet::switchstack_result_t -Greenlet::g_switchstack(void) -{ - // if any of these assertions fail, it's likely because we - // switched away and tried to switch back to us. Early stages of - // switching are not reentrant because we re-use ``this->args()``. - // Switching away would happen if we trigger a garbage collection - // (by just using some Python APIs that happen to allocate Python - // objects) and some garbage had weakref callbacks or __del__ that - // switches (people don't write code like that by hand, but with - // gevent it's possible without realizing it) - assert(this->args() || PyErr_Occurred()); - { /* save state */ - if (this->thread_state()->is_current(this->self())) { - // Hmm, nothing to do. - // TODO: Does this bypass trace events that are - // important? - return switchstack_result_t(0, - this, this->thread_state()->borrow_current()); - } - BorrowedGreenlet current = this->thread_state()->borrow_current(); - PyThreadState* tstate = PyThreadState_GET(); - - current->python_state << tstate; - current->exception_state << tstate; - this->python_state.will_switch_from(tstate); - switching_thread_state = this; - current->expose_frames(); - } - assert(this->args() || PyErr_Occurred()); - // If this is the first switch into a greenlet, this will - // return twice, once with 1 in the new greenlet, once with 0 - // in the origin. - int err; - if (this->force_slp_switch_error()) { - err = -1; - } - else { - err = slp_switch(); - } - - if (err < 0) { /* error */ - // Tested by - // test_greenlet.TestBrokenGreenlets.test_failed_to_slp_switch_into_running - // - // It's not clear if it's worth trying to clean up and - // continue here. Failing to switch stacks is a big deal which - // may not be recoverable (who knows what state the stack is in). - // Also, we've stolen references in preparation for calling - // ``g_switchstack_success()`` and we don't have a clean - // mechanism for backing that all out. - Py_FatalError("greenlet: Failed low-level slp_switch(). The stack is probably corrupt."); - } - - // No stack-based variables are valid anymore. - - // But the global is volatile so we can reload it without the - // compiler caching it from earlier. - Greenlet* greenlet_that_switched_in = switching_thread_state; // aka this - switching_thread_state = nullptr; - // except that no stack variables are valid, we would: - // assert(this == greenlet_that_switched_in); - - // switchstack success is where we restore the exception state, - // etc. It returns the origin greenlet because its convenient. - - OwnedGreenlet origin = greenlet_that_switched_in->g_switchstack_success(); - assert(greenlet_that_switched_in->args() || PyErr_Occurred()); - return switchstack_result_t(err, greenlet_that_switched_in, origin); -} - - -inline void -Greenlet::check_switch_allowed() const -{ - // TODO: Make this take a parameter of the current greenlet, - // or current main greenlet, to make the check for - // cross-thread switching cheaper. Surely somewhere up the - // call stack we've already accessed the thread local variable. - - // We expect to always have a main greenlet now; accessing the thread state - // created it. However, if we get here and cleanup has already - // begun because we're a greenlet that was running in a - // (now dead) thread, these invariants will not hold true. In - // fact, accessing `this->thread_state` may not even be possible. - - // If the thread this greenlet was running in is dead, - // we'll still have a reference to a main greenlet, but the - // thread state pointer we have is bogus. - // TODO: Give the objects an API to determine if they belong - // to a dead thread. - - const BorrowedMainGreenlet main_greenlet = this->find_main_greenlet_in_lineage(); - - if (!main_greenlet) { - throw PyErrOccurred(mod_globs->PyExc_GreenletError, - "cannot switch to a garbage collected greenlet"); - } - - if (!main_greenlet->thread_state()) { - throw PyErrOccurred(mod_globs->PyExc_GreenletError, - "cannot switch to a different thread (which happens to have exited)"); - } - - // The main greenlet we found was from the .parent lineage. - // That may or may not have any relationship to the main - // greenlet of the running thread. We can't actually access - // our this->thread_state members to try to check that, - // because it could be in the process of getting destroyed, - // but setting the main_greenlet->thread_state member to NULL - // may not be visible yet. So we need to check against the - // current thread state (once the cheaper checks are out of - // the way) - const BorrowedMainGreenlet current_main_greenlet = GET_THREAD_STATE().state().borrow_main_greenlet(); - if ( - // lineage main greenlet is not this thread's greenlet - current_main_greenlet != main_greenlet - || ( - // atteched to some thread - this->main_greenlet() - // XXX: Same condition as above. Was this supposed to be - // this->main_greenlet()? - && current_main_greenlet != main_greenlet) - // switching into a known dead thread (XXX: which, if we get here, - // is bad, because we just accessed the thread state, which is - // gone!) - || (!current_main_greenlet->thread_state())) { - // CAUTION: This may trigger memory allocations, gc, and - // arbitrary Python code. - throw PyErrOccurred( - mod_globs->PyExc_GreenletError, - "Cannot switch to a different thread\n\tCurrent: %R\n\tExpected: %R", - current_main_greenlet, main_greenlet); - } -} - -const OwnedObject -Greenlet::context() const -{ - using greenlet::PythonStateContext; - OwnedObject result; - - if (this->is_currently_running_in_some_thread()) { - /* Currently running greenlet: context is stored in the thread state, - not the greenlet object. */ - if (GET_THREAD_STATE().state().is_current(this->self())) { - result = PythonStateContext::context(PyThreadState_GET()); - } - else { - throw ValueError( - "cannot get context of a " - "greenlet that is running in a different thread"); - } - } - else { - /* Greenlet is not running: just return context. */ - result = this->python_state.context(); - } - if (!result) { - result = OwnedObject::None(); - } - return result; -} - - -void -Greenlet::context(BorrowedObject given) -{ - using greenlet::PythonStateContext; - if (!given) { - throw AttributeError("can't delete context attribute"); - } - if (given.is_None()) { - /* "Empty context" is stored as NULL, not None. */ - given = nullptr; - } - - //checks type, incrs refcnt - greenlet::refs::OwnedContext context(given); - PyThreadState* tstate = PyThreadState_GET(); - - if (this->is_currently_running_in_some_thread()) { - if (!GET_THREAD_STATE().state().is_current(this->self())) { - throw ValueError("cannot set context of a greenlet" - " that is running in a different thread"); - } - - /* Currently running greenlet: context is stored in the thread state, - not the greenlet object. */ - OwnedObject octx = OwnedObject::consuming(PythonStateContext::context(tstate)); - PythonStateContext::context(tstate, context.relinquish_ownership()); - } - else { - /* Greenlet is not running: just set context. Note that the - greenlet may be dead.*/ - this->python_state.context() = context; - } -} - -/** - * CAUTION: May invoke arbitrary Python code. - * - * Figure out what the result of ``greenlet.switch(arg, kwargs)`` - * should be and transfers ownership of it to the left-hand-side. - * - * If switch() was just passed an arg tuple, then we'll just return that. - * If only keyword arguments were passed, then we'll pass the keyword - * argument dict. Otherwise, we'll create a tuple of (args, kwargs) and - * return both. - * - * CAUTION: This may allocate a new tuple object, which may - * cause the Python garbage collector to run, which in turn may - * run arbitrary Python code that switches. - */ -OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept -{ - // Because this may invoke arbitrary Python code, which could - // result in switching back to us, we need to get the - // arguments locally on the stack. - assert(rhs); - OwnedObject args = rhs.args(); - OwnedObject kwargs = rhs.kwargs(); - rhs.CLEAR(); - // We shouldn't be called twice for the same switch. - assert(args || kwargs); - assert(!rhs); - - if (!kwargs) { - lhs = args; - } - else if (!PyDict_Size(kwargs.borrow())) { - lhs = args; - } - else if (!PySequence_Length(args.borrow())) { - lhs = kwargs; - } - else { - // PyTuple_Pack allocates memory, may GC, may run arbitrary - // Python code. - lhs = OwnedObject::consuming(PyTuple_Pack(2, args.borrow(), kwargs.borrow())); - } - return lhs; -} - -static OwnedObject -g_handle_exit(const OwnedObject& greenlet_result) -{ - if (!greenlet_result && mod_globs->PyExc_GreenletExit.PyExceptionMatches()) { - /* catch and ignore GreenletExit */ - PyErrFetchParam val; - PyErr_Fetch(PyErrFetchParam(), val, PyErrFetchParam()); - if (!val) { - return OwnedObject::None(); - } - return OwnedObject(val); - } - - if (greenlet_result) { - // package the result into a 1-tuple - // PyTuple_Pack increments the reference of its arguments, - // so we always need to decref the greenlet result; - // the owner will do that. - return OwnedObject::consuming(PyTuple_Pack(1, greenlet_result.borrow())); - } - - return OwnedObject(); -} - - - -/** - * May run arbitrary Python code. - */ -OwnedObject -Greenlet::g_switch_finish(const switchstack_result_t& err) -{ - assert(err.the_new_current_greenlet == this); - - ThreadState& state = *this->thread_state(); - // Because calling the trace function could do arbitrary things, - // including switching away from this greenlet and then maybe - // switching back, we need to capture the arguments now so that - // they don't change. - OwnedObject result; - if (this->args()) { - result <<= this->args(); - } - else { - assert(PyErr_Occurred()); - } - assert(!this->args()); - try { - // Our only caller handles the bad error case - assert(err.status >= 0); - assert(state.borrow_current() == this->self()); - if (OwnedObject tracefunc = state.get_tracefunc()) { - assert(result || PyErr_Occurred()); - g_calltrace(tracefunc, - result ? mod_globs->event_switch : mod_globs->event_throw, - err.origin_greenlet, - this->self()); - } - // The above could have invoked arbitrary Python code, but - // it couldn't switch back to this object and *also* - // throw an exception, so the args won't have changed. - - if (PyErr_Occurred()) { - // We get here if we fell of the end of the run() function - // raising an exception. The switch itself was - // successful, but the function raised. - // valgrind reports that memory allocated here can still - // be reached after a test run. - throw PyErrOccurred::from_current(); - } - return result; - } - catch (const PyErrOccurred&) { - /* Turn switch errors into switch throws */ - /* Turn trace errors into switch throws */ - this->release_args(); - throw; - } -} - -void -Greenlet::g_calltrace(const OwnedObject& tracefunc, - const greenlet::refs::ImmortalEventName& event, - const BorrowedGreenlet& origin, - const BorrowedGreenlet& target) -{ - PyErrPieces saved_exc; - try { - TracingGuard tracing_guard; - // TODO: We have saved the active exception (if any) that's - // about to be raised. In the 'throw' case, we could provide - // the exception to the tracefunction, which seems very helpful. - tracing_guard.CallTraceFunction(tracefunc, event, origin, target); - } - catch (const PyErrOccurred&) { - // In case of exceptions trace function is removed, - // and any existing exception is replaced with the tracing - // exception. - GET_THREAD_STATE().state().set_tracefunc(Py_None); - throw; - } - - saved_exc.PyErrRestore(); - assert( - (event == mod_globs->event_throw && PyErr_Occurred()) - || (event == mod_globs->event_switch && !PyErr_Occurred()) - ); -} - -void -Greenlet::murder_in_place() -{ - if (this->active()) { - assert(!this->is_currently_running_in_some_thread()); - this->deactivate_and_free(); - } -} - -inline void -Greenlet::deactivate_and_free() -{ - if (!this->active()) { - return; - } - // Throw away any saved stack. - this->stack_state = StackState(); - assert(!this->stack_state.active()); - // Throw away any Python references. - // We're holding a borrowed reference to the last - // frame we executed. Since we borrowed it, the - // normal traversal, clear, and dealloc functions - // ignore it, meaning it leaks. (The thread state - // object can't find it to clear it when that's - // deallocated either, because by definition if we - // got an object on this list, it wasn't - // running and the thread state doesn't have - // this frame.) - // So here, we *do* clear it. - this->python_state.tp_clear(true); -} - -bool -Greenlet::belongs_to_thread(const ThreadState* thread_state) const -{ - if (!this->thread_state() // not running anywhere, or thread - // exited - || !thread_state) { // same, or there is no thread state. - return false; - } - return true; -} - - -void -Greenlet::deallocing_greenlet_in_thread(const ThreadState* current_thread_state) -{ - /* Cannot raise an exception to kill the greenlet if - it is not running in the same thread! */ - if (this->belongs_to_thread(current_thread_state)) { - assert(current_thread_state); - // To get here it had to have run before - /* Send the greenlet a GreenletExit exception. */ - - // We don't care about the return value, only whether an - // exception happened. - this->throw_GreenletExit_during_dealloc(*current_thread_state); - return; - } - - // Not the same thread! Temporarily save the greenlet - // into its thread's deleteme list, *if* it exists. - // If that thread has already exited, and processed its pending - // cleanup, we'll never be able to clean everything up: we won't - // be able to raise an exception. - // That's mostly OK! Since we can't add it to a list, our refcount - // won't increase, and we'll go ahead with the DECREFs later. - - ThreadState *const thread_state = this->thread_state(); - if (thread_state) { - thread_state->delete_when_thread_running(this->self()); - } - else { - // The thread is dead, we can't raise an exception. - // We need to make it look non-active, though, so that dealloc - // finishes killing it. - this->deactivate_and_free(); - } - return; -} - - -int -Greenlet::tp_traverse(visitproc visit, void* arg) -{ - - int result; - if ((result = this->exception_state.tp_traverse(visit, arg)) != 0) { - return result; - } - //XXX: This is ugly. But so is handling everything having to do - //with the top frame. - bool visit_top_frame = this->was_running_in_dead_thread(); - // When true, the thread is dead. Our implicit weak reference to the - // frame is now all that's left; we consider ourselves to - // strongly own it now. - if ((result = this->python_state.tp_traverse(visit, arg, visit_top_frame)) != 0) { - return result; - } - return 0; -} - -int -Greenlet::tp_clear() -{ - bool own_top_frame = this->was_running_in_dead_thread(); - this->exception_state.tp_clear(); - this->python_state.tp_clear(own_top_frame); - return 0; -} - -bool Greenlet::is_currently_running_in_some_thread() const -{ - return this->stack_state.active() && !this->python_state.top_frame(); -} - -#if GREENLET_PY312 -void GREENLET_NOINLINE(Greenlet::expose_frames)() -{ - if (!this->python_state.top_frame()) { - return; - } - - _PyInterpreterFrame* last_complete_iframe = nullptr; - _PyInterpreterFrame* iframe = this->python_state.top_frame()->f_frame; - while (iframe) { - // We must make a copy before looking at the iframe contents, - // since iframe might point to a portion of the greenlet's C stack - // that was spilled when switching greenlets. - _PyInterpreterFrame iframe_copy; - this->stack_state.copy_from_stack(&iframe_copy, iframe, sizeof(*iframe)); - if (!_PyFrame_IsIncomplete(&iframe_copy)) { - // If the iframe were OWNED_BY_CSTACK then it would always be - // incomplete. Since it's not incomplete, it's not on the C stack - // and we can access it through the original `iframe` pointer - // directly. This is important since GetFrameObject might - // lazily _create_ the frame object and we don't want the - // interpreter to lose track of it. - assert(iframe_copy.owner != FRAME_OWNED_BY_CSTACK); - - // We really want to just write: - // PyFrameObject* frame = _PyFrame_GetFrameObject(iframe); - // but _PyFrame_GetFrameObject calls _PyFrame_MakeAndSetFrameObject - // which is not a visible symbol in libpython. The easiest - // way to get a public function to call it is using - // PyFrame_GetBack, which is defined as follows: - // assert(frame != NULL); - // assert(!_PyFrame_IsIncomplete(frame->f_frame)); - // PyFrameObject *back = frame->f_back; - // if (back == NULL) { - // _PyInterpreterFrame *prev = frame->f_frame->previous; - // prev = _PyFrame_GetFirstComplete(prev); - // if (prev) { - // back = _PyFrame_GetFrameObject(prev); - // } - // } - // return (PyFrameObject*)Py_XNewRef(back); - if (!iframe->frame_obj) { - PyFrameObject dummy_frame; - _PyInterpreterFrame dummy_iframe; - dummy_frame.f_back = nullptr; - dummy_frame.f_frame = &dummy_iframe; - // force the iframe to be considered complete without - // needing to check its code object: - dummy_iframe.owner = FRAME_OWNED_BY_GENERATOR; - dummy_iframe.previous = iframe; - assert(!_PyFrame_IsIncomplete(&dummy_iframe)); - // Drop the returned reference immediately; the iframe - // continues to hold a strong reference - Py_XDECREF(PyFrame_GetBack(&dummy_frame)); - assert(iframe->frame_obj); - } - - // This is a complete frame, so make the last one of those we saw - // point at it, bypassing any incomplete frames (which may have - // been on the C stack) in between the two. We're overwriting - // last_complete_iframe->previous and need that to be reversible, - // so we store the original previous ptr in the frame object - // (which we must have created on a previous iteration through - // this loop). The frame object has a bunch of storage that is - // only used when its iframe is OWNED_BY_FRAME_OBJECT, which only - // occurs when the frame object outlives the frame's execution, - // which can't have happened yet because the frame is currently - // executing as far as the interpreter is concerned. So, we can - // reuse it for our own purposes. - assert(iframe->owner == FRAME_OWNED_BY_THREAD - || iframe->owner == FRAME_OWNED_BY_GENERATOR); - if (last_complete_iframe) { - assert(last_complete_iframe->frame_obj); - memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], - &last_complete_iframe->previous, sizeof(void *)); - last_complete_iframe->previous = iframe; - } - last_complete_iframe = iframe; - } - // Frames that are OWNED_BY_FRAME_OBJECT are linked via the - // frame's f_back while all others are linked via the iframe's - // previous ptr. Since all the frames we traverse are running - // as far as the interpreter is concerned, we don't have to - // worry about the OWNED_BY_FRAME_OBJECT case. - iframe = iframe_copy.previous; - } - - // Give the outermost complete iframe a null previous pointer to - // account for any potential incomplete/C-stack iframes between it - // and the actual top-of-stack - if (last_complete_iframe) { - assert(last_complete_iframe->frame_obj); - memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], - &last_complete_iframe->previous, sizeof(void *)); - last_complete_iframe->previous = nullptr; - } -} -#else -void Greenlet::expose_frames() -{ - -} -#endif - -}; // namespace greenlet -#endif diff --git a/venv/Lib/site-packages/greenlet/TGreenlet.hpp b/venv/Lib/site-packages/greenlet/TGreenlet.hpp deleted file mode 100644 index 32330e9..0000000 --- a/venv/Lib/site-packages/greenlet/TGreenlet.hpp +++ /dev/null @@ -1,837 +0,0 @@ -#ifndef GREENLET_GREENLET_HPP -#define GREENLET_GREENLET_HPP -/* - * Declarations of the core data structures. -*/ - -#define PY_SSIZE_T_CLEAN -#include - -#include "greenlet_compiler_compat.hpp" -#include "greenlet_refs.hpp" -#include "greenlet_cpython_compat.hpp" -#include "greenlet_allocator.hpp" - -using greenlet::refs::OwnedObject; -using greenlet::refs::OwnedGreenlet; -using greenlet::refs::OwnedMainGreenlet; -using greenlet::refs::BorrowedGreenlet; - -#if PY_VERSION_HEX < 0x30B00A6 -# define _PyCFrame CFrame -# define _PyInterpreterFrame _interpreter_frame -#endif - -#if GREENLET_PY312 -# define Py_BUILD_CORE -# include "internal/pycore_frame.h" -#endif - -#if GREENLET_PY314 -# include "internal/pycore_interpframe_structs.h" -#if defined(_MSC_VER) || defined(__MINGW64__) -# include "greenlet_msvc_compat.hpp" -#else -# include "internal/pycore_interpframe.h" -#endif -#ifdef Py_GIL_DISABLED -# include "internal/pycore_tstate.h" -#endif -#endif - -// XXX: TODO: Work to remove all virtual functions -// for speed of calling and size of objects (no vtable). -// One pattern is the Curiously Recurring Template -namespace greenlet -{ - class ExceptionState - { - private: - G_NO_COPIES_OF_CLS(ExceptionState); - - // Even though these are borrowed objects, we actually own - // them, when they're not null. - // XXX: Express that in the API. - private: - _PyErr_StackItem* exc_info; - _PyErr_StackItem exc_state; - public: - ExceptionState(); - void operator<<(const PyThreadState *const tstate) noexcept; - void operator>>(PyThreadState* tstate) noexcept; - void clear() noexcept; - - int tp_traverse(visitproc visit, void* arg) noexcept; - void tp_clear() noexcept; - }; - - template - void operator<<(const PyThreadState *const tstate, T& exc); - - class PythonStateContext - { - protected: - greenlet::refs::OwnedContext _context; - public: - inline const greenlet::refs::OwnedContext& context() const - { - return this->_context; - } - inline greenlet::refs::OwnedContext& context() - { - return this->_context; - } - - inline void tp_clear() - { - this->_context.CLEAR(); - } - - template - inline static PyObject* context(T* tstate) - { - return tstate->context; - } - - template - inline static void context(T* tstate, PyObject* new_context) - { - tstate->context = new_context; - tstate->context_ver++; - } - }; - class SwitchingArgs; - class PythonState : public PythonStateContext - { - public: - typedef greenlet::refs::OwnedReference OwnedFrame; - private: - G_NO_COPIES_OF_CLS(PythonState); - // We own this if we're suspended (although currently we don't - // tp_traverse into it; that's a TODO). If we're running, it's - // empty. If we get deallocated and *still* have a frame, it - // won't be reachable from the place that normally decref's - // it, so we need to do it (hence owning it). - OwnedFrame _top_frame; -#if GREENLET_USE_CFRAME - _PyCFrame* cframe; - int use_tracing; -#endif -#if GREENLET_PY314 - int py_recursion_depth; - // I think this is only used by the JIT. At least, - // we only got errors not switching it when the JIT was enabled. - // Python/generated_cases.c.h:12469: _PyEval_EvalFrameDefault: - // Assertion `tstate->current_executor == NULL' failed. - // see https://github.com/python-greenlet/greenlet/issues/460 - PyObject* current_executor; - _PyStackRef* stackpointer; - #ifdef Py_GIL_DISABLED - _PyCStackRef* c_stack_refs; - #endif -#elif GREENLET_PY312 - int py_recursion_depth; - int c_recursion_depth; -#else - int recursion_depth; -#endif -#if GREENLET_PY313 - PyObject *delete_later; -#else - int trash_delete_nesting; -#endif -#if GREENLET_PY311 - _PyInterpreterFrame* current_frame; - _PyStackChunk* datastack_chunk; - PyObject** datastack_top; - PyObject** datastack_limit; -#endif - // The PyInterpreterFrame list on 3.12+ contains some entries that are - // on the C stack, which can't be directly accessed while a greenlet is - // suspended. In order to keep greenlet gr_frame introspection working, - // we adjust stack switching to rewrite the interpreter frame list - // to skip these C-stack frames; we call this "exposing" the greenlet's - // frames because it makes them valid to work with in Python. Then when - // the greenlet is resumed we need to remember to reverse the operation - // we did. The C-stack frames are "entry frames" which are a low-level - // interpreter detail; they're not needed for introspection, but do - // need to be present for the eval loop to work. - void unexpose_frames(); - - public: - - PythonState(); - // You can use this for testing whether we have a frame - // or not. It returns const so they can't modify it. - const OwnedFrame& top_frame() const noexcept; - - inline void operator<<(const PyThreadState *const tstate) noexcept; - inline void operator>>(PyThreadState* tstate) noexcept; - void clear() noexcept; - - int tp_traverse(visitproc visit, void* arg, bool visit_top_frame) noexcept; - void tp_clear(bool own_top_frame) noexcept; - void set_initial_state(const PyThreadState* const tstate) noexcept; -#if GREENLET_USE_CFRAME - void set_new_cframe(_PyCFrame& frame) noexcept; -#endif - - void may_switch_away() noexcept; - inline void will_switch_from(PyThreadState *const origin_tstate) noexcept; - void did_finish(PyThreadState* tstate) noexcept; - }; - - class StackState - { - // By having only plain C (POD) members, no virtual functions - // or bases, we get a trivial assignment operator generated - // for us. However, that's not safe since we do manage memory. - // So we declare an assignment operator that only works if we - // don't have any memory allocated. (We don't use - // std::shared_ptr for reference counting just to keep this - // object small) - private: - char* _stack_start; - char* stack_stop; - char* stack_copy; - intptr_t _stack_saved; - StackState* stack_prev; - inline int copy_stack_to_heap_up_to(const char* const stop) noexcept; - inline void free_stack_copy() noexcept; - - public: - /** - * Creates a started, but inactive, state, using *current* - * as the previous. - */ - StackState(void* mark, StackState& current); - /** - * Creates an inactive, unstarted, state. - */ - StackState(); - ~StackState(); - StackState(const StackState& other); - StackState& operator=(const StackState& other); - inline void copy_heap_to_stack(const StackState& current) noexcept; - inline int copy_stack_to_heap(char* const stackref, const StackState& current) noexcept; - inline bool started() const noexcept; - inline bool main() const noexcept; - inline bool active() const noexcept; - inline void set_active() noexcept; - inline void set_inactive() noexcept; - inline intptr_t stack_saved() const noexcept; - inline char* stack_start() const noexcept; - static inline StackState make_main() noexcept; -#ifdef GREENLET_USE_STDIO - friend std::ostream& operator<<(std::ostream& os, const StackState& s); -#endif - - // Fill in [dest, dest + n) with the values that would be at - // [src, src + n) while this greenlet is running. This is like memcpy - // except that if the greenlet is suspended it accounts for the portion - // of the greenlet's stack that was spilled to the heap. `src` may - // be on this greenlet's stack, or on the heap, but not on a different - // greenlet's stack. - void copy_from_stack(void* dest, const void* src, size_t n) const; - }; -#ifdef GREENLET_USE_STDIO - std::ostream& operator<<(std::ostream& os, const StackState& s); -#endif - - class SwitchingArgs - { - private: - G_NO_ASSIGNMENT_OF_CLS(SwitchingArgs); - // If args and kwargs are both false (NULL), this is a *throw*, not a - // switch. PyErr_... must have been called already. - OwnedObject _args; - OwnedObject _kwargs; - public: - - SwitchingArgs() - {} - - SwitchingArgs(const OwnedObject& args, const OwnedObject& kwargs) - : _args(args), - _kwargs(kwargs) - {} - - SwitchingArgs(const SwitchingArgs& other) - : _args(other._args), - _kwargs(other._kwargs) - {} - - const OwnedObject& args() - { - return this->_args; - } - - const OwnedObject& kwargs() - { - return this->_kwargs; - } - - /** - * Moves ownership from the argument to this object. - */ - SwitchingArgs& operator<<=(SwitchingArgs& other) - { - if (this != &other) { - this->_args = other._args; - this->_kwargs = other._kwargs; - other.CLEAR(); - } - return *this; - } - - /** - * Acquires ownership of the argument (consumes the reference). - */ - SwitchingArgs& operator<<=(PyObject* args) - { - this->_args = OwnedObject::consuming(args); - this->_kwargs.CLEAR(); - return *this; - } - - /** - * Acquires ownership of the argument. - * - * Sets the args to be the given value; clears the kwargs. - */ - SwitchingArgs& operator<<=(OwnedObject& args) - { - assert(&args != &this->_args); - this->_args = args; - this->_kwargs.CLEAR(); - args.CLEAR(); - - return *this; - } - - explicit operator bool() const noexcept - { - return this->_args || this->_kwargs; - } - - inline void CLEAR() - { - this->_args.CLEAR(); - this->_kwargs.CLEAR(); - } - - const std::string as_str() const noexcept - { - return PyUnicode_AsUTF8( - OwnedObject::consuming( - PyUnicode_FromFormat( - "SwitchingArgs(args=%R, kwargs=%R)", - this->_args.borrow(), - this->_kwargs.borrow() - ) - ).borrow() - ); - } - }; - - class ThreadState; - - class UserGreenlet; - class MainGreenlet; - - class Greenlet - { - private: - G_NO_COPIES_OF_CLS(Greenlet); - PyGreenlet* const _self; - private: - // XXX: Work to remove these. - friend class ThreadState; - friend class UserGreenlet; - friend class MainGreenlet; - protected: - ExceptionState exception_state; - SwitchingArgs switch_args; - StackState stack_state; - PythonState python_state; - Greenlet(PyGreenlet* p, const StackState& initial_state); - public: - // This constructor takes ownership of the PyGreenlet, by - // setting ``p->pimpl = this;``. - Greenlet(PyGreenlet* p); - virtual ~Greenlet(); - - const OwnedObject context() const; - - // You MUST call this _very_ early in the switching process to - // prepare anything that may need prepared. This might perform - // garbage collections or otherwise run arbitrary Python code. - // - // One specific use of it is for Python 3.11+, preventing - // running arbitrary code at unsafe times. See - // PythonState::may_switch_away(). - inline void may_switch_away() - { - this->python_state.may_switch_away(); - } - - inline void context(refs::BorrowedObject new_context); - - inline SwitchingArgs& args() - { - return this->switch_args; - } - - virtual const refs::BorrowedMainGreenlet main_greenlet() const = 0; - - inline intptr_t stack_saved() const noexcept - { - return this->stack_state.stack_saved(); - } - - // This is used by the macro SLP_SAVE_STATE to compute the - // difference in stack sizes. It might be nice to handle the - // computation ourself, but the type of the result - // varies by platform, so doing it in the macro is the - // simplest way. - inline const char* stack_start() const noexcept - { - return this->stack_state.stack_start(); - } - - virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); - virtual OwnedObject g_switch() = 0; - /** - * Force the greenlet to appear dead. Used when it's not - * possible to throw an exception into a greenlet anymore. - * - * This losses access to the thread state and the main greenlet. - */ - virtual void murder_in_place(); - - /** - * Called when somebody notices we were running in a dead - * thread to allow cleaning up resources (because we can't - * raise GreenletExit into it anymore). - * This is very similar to ``murder_in_place()``, except that - * it DOES NOT lose the main greenlet or thread state. - */ - inline void deactivate_and_free(); - - - // Called when some thread wants to deallocate a greenlet - // object. - // The thread may or may not be the same thread the greenlet - // was running in. - // The thread state will be null if the thread the greenlet - // was running in was known to have exited. - void deallocing_greenlet_in_thread(const ThreadState* current_state); - - // Must be called on 3.12+ before exposing a suspended greenlet's - // frames to user code. This rewrites the linked list of interpreter - // frames to skip the ones that are being stored on the C stack (which - // can't be safely accessed while the greenlet is suspended because - // that stack space might be hosting a different greenlet), and - // sets PythonState::frames_were_exposed so we remember to restore - // the original list before resuming the greenlet. The C-stack frames - // are a low-level interpreter implementation detail; while they're - // important to the bytecode eval loop, they're superfluous for - // introspection purposes. - void expose_frames(); - - - // TODO: Figure out how to make these non-public. - inline void slp_restore_state() noexcept; - inline int slp_save_state(char *const stackref) noexcept; - - inline bool is_currently_running_in_some_thread() const; - virtual bool belongs_to_thread(const ThreadState* state) const; - - inline bool started() const - { - return this->stack_state.started(); - } - inline bool active() const - { - return this->stack_state.active(); - } - inline bool main() const - { - return this->stack_state.main(); - } - virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const = 0; - - virtual const OwnedGreenlet parent() const = 0; - virtual void parent(const refs::BorrowedObject new_parent) = 0; - - inline const PythonState::OwnedFrame& top_frame() - { - return this->python_state.top_frame(); - } - - virtual const OwnedObject& run() const = 0; - virtual void run(const refs::BorrowedObject nrun) = 0; - - - virtual int tp_traverse(visitproc visit, void* arg); - virtual int tp_clear(); - - - // Return the thread state that the greenlet is running in, or - // null if the greenlet is not running or the thread is known - // to have exited. - virtual ThreadState* thread_state() const noexcept = 0; - - // Return true if the greenlet is known to have been running - // (active) in a thread that has now exited. - virtual bool was_running_in_dead_thread() const noexcept = 0; - - // Return a borrowed greenlet that is the Python object - // this object represents. - inline BorrowedGreenlet self() const noexcept - { - return BorrowedGreenlet(this->_self); - } - - // For testing. If this returns true, we should pretend that - // slp_switch() failed. - virtual bool force_slp_switch_error() const noexcept; - - protected: - inline void release_args(); - - // The functions that must not be inlined are declared virtual. - // We also mark them as protected, not private, so that the - // compiler is forced to call them through a function pointer. - // (A sufficiently smart compiler could directly call a private - // virtual function since it can never be overridden in a - // subclass). - - // Also TODO: Switch away from integer error codes and to enums, - // or throw exceptions when possible. - struct switchstack_result_t - { - int status; - Greenlet* the_new_current_greenlet; - OwnedGreenlet origin_greenlet; - - switchstack_result_t() - : status(0), - the_new_current_greenlet(nullptr) - {} - - switchstack_result_t(int err) - : status(err), - the_new_current_greenlet(nullptr) - {} - - switchstack_result_t(int err, Greenlet* state, OwnedGreenlet& origin) - : status(err), - the_new_current_greenlet(state), - origin_greenlet(origin) - { - } - - switchstack_result_t(int err, Greenlet* state, const BorrowedGreenlet& origin) - : status(err), - the_new_current_greenlet(state), - origin_greenlet(origin) - { - } - - switchstack_result_t(const switchstack_result_t& other) - : status(other.status), - the_new_current_greenlet(other.the_new_current_greenlet), - origin_greenlet(other.origin_greenlet) - {} - - switchstack_result_t& operator=(const switchstack_result_t& other) - { - this->status = other.status; - this->the_new_current_greenlet = other.the_new_current_greenlet; - this->origin_greenlet = other.origin_greenlet; - return *this; - } - }; - - OwnedObject on_switchstack_or_initialstub_failure( - Greenlet* target, - const switchstack_result_t& err, - const bool target_was_me=false, - const bool was_initial_stub=false); - - // Returns the previous greenlet we just switched away from. - virtual OwnedGreenlet g_switchstack_success() noexcept; - - - // Check the preconditions for switching to this greenlet; if they - // aren't met, throws PyErrOccurred. Most callers will want to - // catch this and clear the arguments - inline void check_switch_allowed() const; - class GreenletStartedWhileInPython : public std::runtime_error - { - public: - GreenletStartedWhileInPython() : std::runtime_error("") - {} - }; - - protected: - - - /** - Perform a stack switch into this greenlet. - - This temporarily sets the global variable - ``switching_thread_state`` to this greenlet; as soon as the - call to ``slp_switch`` completes, this is reset to NULL. - Consequently, this depends on the GIL. - - TODO: Adopt the stackman model and pass ``slp_switch`` a - callback function and context pointer; this eliminates the - need for global variables altogether. - - Because the stack switch happens in this function, this - function can't use its own stack (local) variables, set - before the switch, and then accessed after the switch. - - Further, you con't even access ``g_thread_state_global`` - before and after the switch from the global variable. - Because it is thread local some compilers cache it in a - register/on the stack, notably new versions of MSVC; this - breaks with strange crashes sometime later, because writing - to anything in ``g_thread_state_global`` after the switch - is actually writing to random memory. For this reason, we - call a non-inlined function to finish the operation. (XXX: - The ``/GT`` MSVC compiler argument probably fixes that.) - - It is very important that stack switch is 'atomic', i.e. no - calls into other Python code allowed (except very few that - are safe), because global variables are very fragile. (This - should no longer be the case with thread-local variables.) - - */ - // Made virtual to facilitate subclassing UserGreenlet for testing. - virtual switchstack_result_t g_switchstack(void); - -class TracingGuard -{ -private: - PyThreadState* tstate; -public: - TracingGuard() - : tstate(PyThreadState_GET()) - { - PyThreadState_EnterTracing(this->tstate); - } - - ~TracingGuard() - { - PyThreadState_LeaveTracing(this->tstate); - this->tstate = nullptr; - } - - inline void CallTraceFunction(const OwnedObject& tracefunc, - const greenlet::refs::ImmortalEventName& event, - const BorrowedGreenlet& origin, - const BorrowedGreenlet& target) - { - // TODO: This calls tracefunc(event, (origin, target)). Add a shortcut - // function for that that's specialized to avoid the Py_BuildValue - // string parsing, or start with just using "ON" format with PyTuple_Pack(2, - // origin, target). That seems like what the N format is meant - // for. - // XXX: Why does event not automatically cast back to a PyObject? - // It tries to call the "deleted constructor ImmortalEventName - // const" instead. - assert(tracefunc); - assert(event); - assert(origin); - assert(target); - greenlet::refs::NewReference retval( - PyObject_CallFunction( - tracefunc.borrow(), - "O(OO)", - event.borrow(), - origin.borrow(), - target.borrow() - )); - if (!retval) { - throw PyErrOccurred::from_current(); - } - } -}; - - static void - g_calltrace(const OwnedObject& tracefunc, - const greenlet::refs::ImmortalEventName& event, - const greenlet::refs::BorrowedGreenlet& origin, - const BorrowedGreenlet& target); - private: - OwnedObject g_switch_finish(const switchstack_result_t& err); - - }; - - class UserGreenlet : public Greenlet - { - private: - static greenlet::PythonAllocator allocator; - OwnedMainGreenlet _main_greenlet; - OwnedObject _run_callable; - OwnedGreenlet _parent; - public: - static void* operator new(size_t UNUSED(count)); - static void operator delete(void* ptr); - - UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent); - virtual ~UserGreenlet(); - - virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; - virtual bool was_running_in_dead_thread() const noexcept; - virtual ThreadState* thread_state() const noexcept; - virtual OwnedObject g_switch(); - virtual const OwnedObject& run() const - { - if (this->started() || !this->_run_callable) { - throw AttributeError("run"); - } - return this->_run_callable; - } - virtual void run(const refs::BorrowedObject nrun); - - virtual const OwnedGreenlet parent() const; - virtual void parent(const refs::BorrowedObject new_parent); - - virtual const refs::BorrowedMainGreenlet main_greenlet() const; - - virtual void murder_in_place(); - virtual bool belongs_to_thread(const ThreadState* state) const; - virtual int tp_traverse(visitproc visit, void* arg); - virtual int tp_clear(); - class ParentIsCurrentGuard - { - private: - OwnedGreenlet oldparent; - UserGreenlet* greenlet; - G_NO_COPIES_OF_CLS(ParentIsCurrentGuard); - public: - ParentIsCurrentGuard(UserGreenlet* p, const ThreadState& thread_state); - ~ParentIsCurrentGuard(); - }; - virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); - protected: - virtual switchstack_result_t g_initialstub(void* mark); - private: - // This function isn't meant to return. - // This accepts raw pointers and the ownership of them at the - // same time. The caller should use ``inner_bootstrap(origin.relinquish_ownership())``. - void inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run); - }; - - class BrokenGreenlet : public UserGreenlet - { - private: - static greenlet::PythonAllocator allocator; - public: - bool _force_switch_error = false; - bool _force_slp_switch_error = false; - - static void* operator new(size_t UNUSED(count)); - static void operator delete(void* ptr); - BrokenGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) - : UserGreenlet(p, the_parent) - {} - virtual ~BrokenGreenlet() - {} - - virtual switchstack_result_t g_switchstack(void); - virtual bool force_slp_switch_error() const noexcept; - - }; - - class MainGreenlet : public Greenlet - { - private: - static greenlet::PythonAllocator allocator; - refs::BorrowedMainGreenlet _self; - ThreadState* _thread_state; - G_NO_COPIES_OF_CLS(MainGreenlet); - public: - static void* operator new(size_t UNUSED(count)); - static void operator delete(void* ptr); - - MainGreenlet(refs::BorrowedMainGreenlet::PyType*, ThreadState*); - virtual ~MainGreenlet(); - - - virtual const OwnedObject& run() const; - virtual void run(const refs::BorrowedObject nrun); - - virtual const OwnedGreenlet parent() const; - virtual void parent(const refs::BorrowedObject new_parent); - - virtual const refs::BorrowedMainGreenlet main_greenlet() const; - - virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; - virtual bool was_running_in_dead_thread() const noexcept; - virtual ThreadState* thread_state() const noexcept; - void thread_state(ThreadState*) noexcept; - virtual OwnedObject g_switch(); - virtual int tp_traverse(visitproc visit, void* arg); - }; - - // Instantiate one on the stack to save the GC state, - // and then disable GC. When it goes out of scope, GC will be - // restored to its original state. Sadly, these APIs are only - // available on 3.10+; luckily, we only need them on 3.11+. -#if GREENLET_PY310 - class GCDisabledGuard - { - private: - int was_enabled = 0; - public: - GCDisabledGuard() - : was_enabled(PyGC_IsEnabled()) - { - PyGC_Disable(); - } - - ~GCDisabledGuard() - { - if (this->was_enabled) { - PyGC_Enable(); - } - } - }; -#endif - - OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept; - - //TODO: Greenlet::g_switch() should call this automatically on its - //return value. As it is, the module code is calling it. - static inline OwnedObject - single_result(const OwnedObject& results) - { - if (results - && PyTuple_Check(results.borrow()) - && PyTuple_GET_SIZE(results.borrow()) == 1) { - PyObject* result = PyTuple_GET_ITEM(results.borrow(), 0); - assert(result); - return OwnedObject::owning(result); - } - return results; - } - - - static OwnedObject - g_handle_exit(const OwnedObject& greenlet_result); - - - template - void operator<<(const PyThreadState *const lhs, T& rhs) - { - rhs.operator<<(lhs); - } - -} // namespace greenlet ; - -#endif diff --git a/venv/Lib/site-packages/greenlet/TGreenletGlobals.cpp b/venv/Lib/site-packages/greenlet/TGreenletGlobals.cpp deleted file mode 100644 index 0087d2f..0000000 --- a/venv/Lib/site-packages/greenlet/TGreenletGlobals.cpp +++ /dev/null @@ -1,94 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of GreenletGlobals. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_GREENLET_GLOBALS -#define T_GREENLET_GLOBALS - -#include "greenlet_refs.hpp" -#include "greenlet_exceptions.hpp" -#include "greenlet_thread_support.hpp" -#include "greenlet_internal.hpp" - -namespace greenlet { - -// This encapsulates what were previously module global "constants" -// established at init time. -// This is a step towards Python3 style module state that allows -// reloading. -// -// In an earlier iteration of this code, we used placement new to be -// able to allocate this object statically still, so that references -// to its members don't incur an extra pointer indirection. -// But under some scenarios, that could result in crashes at -// shutdown because apparently the destructor was getting run twice? -class GreenletGlobals -{ - -public: - const greenlet::refs::ImmortalEventName event_switch; - const greenlet::refs::ImmortalEventName event_throw; - const greenlet::refs::ImmortalException PyExc_GreenletError; - const greenlet::refs::ImmortalException PyExc_GreenletExit; - const greenlet::refs::ImmortalObject empty_tuple; - const greenlet::refs::ImmortalObject empty_dict; - const greenlet::refs::ImmortalString str_run; - Mutex* const thread_states_to_destroy_lock; - greenlet::cleanup_queue_t thread_states_to_destroy; - - GreenletGlobals() : - event_switch("switch"), - event_throw("throw"), - PyExc_GreenletError("greenlet.error"), - PyExc_GreenletExit("greenlet.GreenletExit", PyExc_BaseException), - empty_tuple(Require(PyTuple_New(0))), - empty_dict(Require(PyDict_New())), - str_run("run"), - thread_states_to_destroy_lock(new Mutex()) - {} - - ~GreenletGlobals() - { - // This object is (currently) effectively immortal, and not - // just because of those placement new tricks; if we try to - // deallocate the static object we allocated, and overwrote, - // we would be doing so at C++ teardown time, which is after - // the final Python GIL is released, and we can't use the API - // then. - // (The members will still be destructed, but they also don't - // do any deallocation.) - } - - void queue_to_destroy(ThreadState* ts) const - { - // we're currently accessed through a static const object, - // implicitly marking our members as const, so code can't just - // call push_back (or pop_back) without casting away the - // const. - // - // Do that for callers. - greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); - q.push_back(ts); - } - - ThreadState* take_next_to_destroy() const - { - greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); - ThreadState* result = q.back(); - q.pop_back(); - return result; - } -}; - -}; // namespace greenlet - -static const greenlet::GreenletGlobals* mod_globs; - -#endif // T_GREENLET_GLOBALS diff --git a/venv/Lib/site-packages/greenlet/TMainGreenlet.cpp b/venv/Lib/site-packages/greenlet/TMainGreenlet.cpp deleted file mode 100644 index ee01481..0000000 --- a/venv/Lib/site-packages/greenlet/TMainGreenlet.cpp +++ /dev/null @@ -1,160 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::MainGreenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_MAIN_GREENLET_CPP -#define T_MAIN_GREENLET_CPP - -#include "TGreenlet.hpp" - -#ifdef Py_GIL_DISABLED -#include -#endif - -// Incremented when we create a main greenlet, in a new thread, decremented -// when it is destroyed. -#ifdef Py_GIL_DISABLED -static std::atomic G_TOTAL_MAIN_GREENLETS(0); -#else -// Protected by the GIL. -static Py_ssize_t G_TOTAL_MAIN_GREENLETS; -#endif - -namespace greenlet { -greenlet::PythonAllocator MainGreenlet::allocator; - -void* MainGreenlet::operator new(size_t UNUSED(count)) -{ - return allocator.allocate(1); -} - - -void MainGreenlet::operator delete(void* ptr) -{ - return allocator.deallocate(static_cast(ptr), - 1); -} - - -MainGreenlet::MainGreenlet(PyGreenlet* p, ThreadState* state) - : Greenlet(p, StackState::make_main()), - _self(p), - _thread_state(state) -{ - G_TOTAL_MAIN_GREENLETS++; -} - -MainGreenlet::~MainGreenlet() -{ - G_TOTAL_MAIN_GREENLETS--; - this->tp_clear(); -} - -ThreadState* -MainGreenlet::thread_state() const noexcept -{ - return this->_thread_state; -} - -void -MainGreenlet::thread_state(ThreadState* t) noexcept -{ - assert(!t); - this->_thread_state = t; -} - - -const BorrowedMainGreenlet -MainGreenlet::main_greenlet() const -{ - return this->_self; -} - -BorrowedMainGreenlet -MainGreenlet::find_main_greenlet_in_lineage() const -{ - return BorrowedMainGreenlet(this->_self); -} - -bool -MainGreenlet::was_running_in_dead_thread() const noexcept -{ - return !this->_thread_state; -} - -OwnedObject -MainGreenlet::g_switch() -{ - try { - this->check_switch_allowed(); - } - catch (const PyErrOccurred&) { - this->release_args(); - throw; - } - - switchstack_result_t err = this->g_switchstack(); - if (err.status < 0) { - // XXX: This code path is untested, but it is shared - // with the UserGreenlet path that is tested. - return this->on_switchstack_or_initialstub_failure( - this, - err, - true, // target was me - false // was initial stub - ); - } - - return err.the_new_current_greenlet->g_switch_finish(err); -} - -int -MainGreenlet::tp_traverse(visitproc visit, void* arg) -{ - if (this->_thread_state) { - // we've already traversed main, (self), don't do it again. - int result = this->_thread_state->tp_traverse(visit, arg, false); - if (result) { - return result; - } - } - return Greenlet::tp_traverse(visit, arg); -} - -const OwnedObject& -MainGreenlet::run() const -{ - throw AttributeError("Main greenlets do not have a run attribute."); -} - -void -MainGreenlet::run(const BorrowedObject UNUSED(nrun)) -{ - throw AttributeError("Main greenlets do not have a run attribute."); -} - -void -MainGreenlet::parent(const BorrowedObject raw_new_parent) -{ - if (!raw_new_parent) { - throw AttributeError("can't delete attribute"); - } - throw AttributeError("cannot set the parent of a main greenlet"); -} - -const OwnedGreenlet -MainGreenlet::parent() const -{ - return OwnedGreenlet(); // null becomes None -} - -}; // namespace greenlet - -#endif diff --git a/venv/Lib/site-packages/greenlet/TPythonState.cpp b/venv/Lib/site-packages/greenlet/TPythonState.cpp deleted file mode 100644 index 6375922..0000000 --- a/venv/Lib/site-packages/greenlet/TPythonState.cpp +++ /dev/null @@ -1,439 +0,0 @@ -#ifndef GREENLET_PYTHON_STATE_CPP -#define GREENLET_PYTHON_STATE_CPP - -#include -#include "TGreenlet.hpp" - -namespace greenlet { - -PythonState::PythonState() - : _top_frame() -#if GREENLET_USE_CFRAME - ,cframe(nullptr) - ,use_tracing(0) -#endif -#if GREENLET_PY314 - ,py_recursion_depth(0) - ,current_executor(nullptr) - ,stackpointer(nullptr) - #ifdef Py_GIL_DISABLED - ,c_stack_refs(nullptr) - #endif -#elif GREENLET_PY312 - ,py_recursion_depth(0) - ,c_recursion_depth(0) -#else - ,recursion_depth(0) -#endif -#if GREENLET_PY313 - ,delete_later(nullptr) -#else - ,trash_delete_nesting(0) -#endif -#if GREENLET_PY311 - ,current_frame(nullptr) - ,datastack_chunk(nullptr) - ,datastack_top(nullptr) - ,datastack_limit(nullptr) -#endif -{ -#if GREENLET_USE_CFRAME - /* - The PyThreadState->cframe pointer usually points to memory on - the stack, alloceted in a call into PyEval_EvalFrameDefault. - - Initially, before any evaluation begins, it points to the - initial PyThreadState object's ``root_cframe`` object, which is - statically allocated for the lifetime of the thread. - - A greenlet can last for longer than a call to - PyEval_EvalFrameDefault, so we can't set its ``cframe`` pointer - to be the current ``PyThreadState->cframe``; nor could we use - one from the greenlet parent for the same reason. Yet a further - no: we can't allocate one scoped to the greenlet and then - destroy it when the greenlet is deallocated, because inside the - interpreter the _PyCFrame objects form a linked list, and that too - can result in accessing memory beyond its dynamic lifetime (if - the greenlet doesn't actually finish before it dies, its entry - could still be in the list). - - Using the ``root_cframe`` is problematic, though, because its - members are never modified by the interpreter and are set to 0, - meaning that its ``use_tracing`` flag is never updated. We don't - want to modify that value in the ``root_cframe`` ourself: it - *shouldn't* matter much because we should probably never get - back to the point where that's the only cframe on the stack; - even if it did matter, the major consequence of an incorrect - value for ``use_tracing`` is that if its true the interpreter - does some extra work --- however, it's just good code hygiene. - - Our solution: before a greenlet runs, after its initial - creation, it uses the ``root_cframe`` just to have something to - put there. However, once the greenlet is actually switched to - for the first time, ``g_initialstub`` (which doesn't actually - "return" while the greenlet is running) stores a new _PyCFrame on - its local stack, and copies the appropriate values from the - currently running _PyCFrame; this is then made the _PyCFrame for the - newly-minted greenlet. ``g_initialstub`` then proceeds to call - ``glet.run()``, which results in ``PyEval_...`` adding the - _PyCFrame to the list. Switches continue as normal. Finally, when - the greenlet finishes, the call to ``glet.run()`` returns and - the _PyCFrame is taken out of the linked list and the stack value - is now unused and free to expire. - - XXX: I think we can do better. If we're deallocing in the same - thread, can't we traverse the list and unlink our frame? - Can we just keep a reference to the thread state in case we - dealloc in another thread? (Is that even possible if we're still - running and haven't returned from g_initialstub?) - */ - this->cframe = &PyThreadState_GET()->root_cframe; -#endif -} - - -inline void PythonState::may_switch_away() noexcept -{ -#if GREENLET_PY311 - // PyThreadState_GetFrame is probably going to have to allocate a - // new frame object. That may trigger garbage collection. Because - // we call this during the early phases of a switch (it doesn't - // matter to which greenlet, as this has a global effect), if a GC - // triggers a switch away, two things can happen, both bad: - // - We might not get switched back to, halting forward progress. - // this is pathological, but possible. - // - We might get switched back to with a different set of - // arguments or a throw instead of a switch. That would corrupt - // our state (specifically, PyErr_Occurred() and this->args() - // would no longer agree). - // - // Thus, when we call this API, we need to have GC disabled. - // This method serves as a bottleneck we call when maybe beginning - // a switch. In this way, it is always safe -- no risk of GC -- to - // use ``_GetFrame()`` whenever we need to, just as it was in - // <=3.10 (because subsequent calls will be cached and not - // allocate memory). - - GCDisabledGuard no_gc; - Py_XDECREF(PyThreadState_GetFrame(PyThreadState_GET())); -#endif -} - -void PythonState::operator<<(const PyThreadState *const tstate) noexcept -{ - this->_context.steal(tstate->context); -#if GREENLET_USE_CFRAME - /* - IMPORTANT: ``cframe`` is a pointer into the STACK. Thus, because - the call to ``slp_switch()`` changes the contents of the stack, - you cannot read from ``ts_current->cframe`` after that call and - necessarily get the same values you get from reading it here. - Anything you need to restore from now to then must be saved in a - global/threadlocal variable (because we can't use stack - variables here either). For things that need to persist across - the switch, use `will_switch_from`. - */ - this->cframe = tstate->cframe; - #if !GREENLET_PY312 - this->use_tracing = tstate->cframe->use_tracing; - #endif -#endif // GREENLET_USE_CFRAME -#if GREENLET_PY311 - #if GREENLET_PY314 - this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; - this->current_executor = tstate->current_executor; - #ifdef Py_GIL_DISABLED - this->c_stack_refs = ((_PyThreadStateImpl*)tstate)->c_stack_refs; - #endif - #elif GREENLET_PY312 - this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; - this->c_recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; - #else // not 312 - this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; - #endif // GREENLET_PY312 - #if GREENLET_PY313 - this->current_frame = tstate->current_frame; - #elif GREENLET_USE_CFRAME - this->current_frame = tstate->cframe->current_frame; - #endif - this->datastack_chunk = tstate->datastack_chunk; - this->datastack_top = tstate->datastack_top; - this->datastack_limit = tstate->datastack_limit; - - PyFrameObject *frame = PyThreadState_GetFrame((PyThreadState *)tstate); - Py_XDECREF(frame); // PyThreadState_GetFrame gives us a new - // reference. - this->_top_frame.steal(frame); - #if GREENLET_PY314 - if (this->top_frame()) { - this->stackpointer = this->_top_frame->f_frame->stackpointer; - } - else { - this->stackpointer = nullptr; - } - #endif - #if GREENLET_PY313 - this->delete_later = Py_XNewRef(tstate->delete_later); - #elif GREENLET_PY312 - this->trash_delete_nesting = tstate->trash.delete_nesting; - #else // not 312 - this->trash_delete_nesting = tstate->trash_delete_nesting; - #endif // GREENLET_PY312 -#else // Not 311 - this->recursion_depth = tstate->recursion_depth; - this->_top_frame.steal(tstate->frame); - this->trash_delete_nesting = tstate->trash_delete_nesting; -#endif // GREENLET_PY311 -} - -#if GREENLET_PY312 -void GREENLET_NOINLINE(PythonState::unexpose_frames)() -{ - if (!this->top_frame()) { - return; - } - - // See GreenletState::expose_frames() and the comment on frames_were_exposed - // for more information about this logic. - _PyInterpreterFrame *iframe = this->_top_frame->f_frame; - while (iframe != nullptr) { - _PyInterpreterFrame *prev_exposed = iframe->previous; - assert(iframe->frame_obj); - memcpy(&iframe->previous, &iframe->frame_obj->_f_frame_data[0], - sizeof(void *)); - iframe = prev_exposed; - } -} -#else -void PythonState::unexpose_frames() -{} -#endif - -void PythonState::operator>>(PyThreadState *const tstate) noexcept -{ - tstate->context = this->_context.relinquish_ownership(); - /* Incrementing this value invalidates the contextvars cache, - which would otherwise remain valid across switches */ - tstate->context_ver++; -#if GREENLET_USE_CFRAME - tstate->cframe = this->cframe; - /* - If we were tracing, we need to keep tracing. - There should never be the possibility of hitting the - root_cframe here. See note above about why we can't - just copy this from ``origin->cframe->use_tracing``. - */ - #if !GREENLET_PY312 - tstate->cframe->use_tracing = this->use_tracing; - #endif -#endif // GREENLET_USE_CFRAME -#if GREENLET_PY311 - #if GREENLET_PY314 - tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth; - tstate->current_executor = this->current_executor; - #ifdef Py_GIL_DISABLED - ((_PyThreadStateImpl*)tstate)->c_stack_refs = this->c_stack_refs; - #endif - this->unexpose_frames(); - #elif GREENLET_PY312 - tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth; - tstate->c_recursion_remaining = Py_C_RECURSION_LIMIT - this->c_recursion_depth; - this->unexpose_frames(); - #else // \/ 3.11 - tstate->recursion_remaining = tstate->recursion_limit - this->recursion_depth; - #endif // GREENLET_PY312 - #if GREENLET_PY313 - tstate->current_frame = this->current_frame; - #elif GREENLET_USE_CFRAME - tstate->cframe->current_frame = this->current_frame; - #endif - tstate->datastack_chunk = this->datastack_chunk; - tstate->datastack_top = this->datastack_top; - tstate->datastack_limit = this->datastack_limit; -#if GREENLET_PY314 && defined(Py_GIL_DISABLED) - if (this->top_frame()) { - this->_top_frame->f_frame->stackpointer = this->stackpointer; - } -#endif - this->_top_frame.relinquish_ownership(); - #if GREENLET_PY313 - Py_XDECREF(tstate->delete_later); - tstate->delete_later = this->delete_later; - Py_CLEAR(this->delete_later); - #elif GREENLET_PY312 - tstate->trash.delete_nesting = this->trash_delete_nesting; - #else // not 3.12 - tstate->trash_delete_nesting = this->trash_delete_nesting; - #endif // GREENLET_PY312 -#else // not 3.11 - tstate->frame = this->_top_frame.relinquish_ownership(); - tstate->recursion_depth = this->recursion_depth; - tstate->trash_delete_nesting = this->trash_delete_nesting; -#endif // GREENLET_PY311 -} - -inline void PythonState::will_switch_from(PyThreadState *const origin_tstate) noexcept -{ -#if GREENLET_USE_CFRAME && !GREENLET_PY312 - // The weird thing is, we don't actually save this for an - // effect on the current greenlet, it's saved for an - // effect on the target greenlet. That is, we want - // continuity of this setting across the greenlet switch. - this->use_tracing = origin_tstate->cframe->use_tracing; -#endif -} - -void PythonState::set_initial_state(const PyThreadState* const tstate) noexcept -{ - this->_top_frame = nullptr; -#if GREENLET_PY314 - this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; - this->current_executor = tstate->current_executor; -#elif GREENLET_PY312 - this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; - // XXX: TODO: Comment from a reviewer: - // Should this be ``Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining``? - // But to me it looks more like that might not be the right - // initialization either? - this->c_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; -#elif GREENLET_PY311 - this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; -#else - this->recursion_depth = tstate->recursion_depth; -#endif -} -// TODO: Better state management about when we own the top frame. -int PythonState::tp_traverse(visitproc visit, void* arg, bool own_top_frame) noexcept -{ - Py_VISIT(this->_context.borrow()); - if (own_top_frame) { - Py_VISIT(this->_top_frame.borrow()); - } -#if GREENLET_PY314 - // TODO: Should we be visiting the c_stack_refs objects? - // CPython uses a specific macro to do that which takes into - // account boxing and null values and then calls - // ``_PyGC_VisitStackRef``, but we don't have access to that, and - // we can't duplicate it ourself (because it compares - // ``visitproc`` to another function we can't access). - // The naive way of looping over c_stack_refs->ref and visiting - // those crashes the process (at least with GIL disabled). -#endif - return 0; -} - -void PythonState::tp_clear(bool own_top_frame) noexcept -{ - PythonStateContext::tp_clear(); - // If we get here owning a frame, - // we got dealloc'd without being finished. We may or may not be - // in the same thread. - if (own_top_frame) { - this->_top_frame.CLEAR(); - } -} - -#if GREENLET_USE_CFRAME -void PythonState::set_new_cframe(_PyCFrame& frame) noexcept -{ - frame = *PyThreadState_GET()->cframe; - /* Make the target greenlet refer to the stack value. */ - this->cframe = &frame; - /* - And restore the link to the previous frame so this one gets - unliked appropriately. - */ - this->cframe->previous = &PyThreadState_GET()->root_cframe; -} -#endif - -const PythonState::OwnedFrame& PythonState::top_frame() const noexcept -{ - return this->_top_frame; -} - -void PythonState::did_finish(PyThreadState* tstate) noexcept -{ -#if GREENLET_PY311 - // See https://github.com/gevent/gevent/issues/1924 and - // https://github.com/python-greenlet/greenlet/issues/328. In - // short, Python 3.11 allocates memory for frames as a sort of - // linked list that's kept as part of PyThreadState in the - // ``datastack_chunk`` member and friends. These are saved and - // restored as part of switching greenlets. - // - // When we initially switch to a greenlet, we set those to NULL. - // That causes the frame management code to treat this like a - // brand new thread and start a fresh list of chunks, beginning - // with a new "root" chunk. As we make calls in this greenlet, - // those chunks get added, and as calls return, they get popped. - // But the frame code (pystate.c) is careful to make sure that the - // root chunk never gets popped. - // - // Thus, when a greenlet exits for the last time, there will be at - // least a single root chunk that we must be responsible for - // deallocating. - // - // The complex part is that these chunks are allocated and freed - // using ``_PyObject_VirtualAlloc``/``Free``. Those aren't public - // functions, and they aren't exported for linking. It so happens - // that we know they are just thin wrappers around the Arena - // allocator, so we can use that directly to deallocate in a - // compatible way. - // - // CAUTION: Check this implementation detail on every major version. - // - // It might be nice to be able to do this in our destructor, but - // can we be sure that no one else is using that memory? Plus, as - // described below, our pointers may not even be valid anymore. As - // a special case, there is one time that we know we can do this, - // and that's from the destructor of the associated UserGreenlet - // (NOT main greenlet) - PyObjectArenaAllocator alloc; - _PyStackChunk* chunk = nullptr; - if (tstate) { - // We really did finish, we can never be switched to again. - chunk = tstate->datastack_chunk; - // Unfortunately, we can't do much sanity checking. Our - // this->datastack_chunk pointer is out of date (evaluation may - // have popped down through it already) so we can't verify that - // we deallocate it. I don't think we can even check datastack_top - // for the same reason. - - PyObject_GetArenaAllocator(&alloc); - tstate->datastack_chunk = nullptr; - tstate->datastack_limit = nullptr; - tstate->datastack_top = nullptr; - - } - else if (this->datastack_chunk) { - // The UserGreenlet (NOT the main greenlet!) is being deallocated. If we're - // still holding a stack chunk, it's garbage because we know - // we can never switch back to let cPython clean it up. - // Because the last time we got switched away from, and we - // haven't run since then, we know our chain is valid and can - // be dealloced. - chunk = this->datastack_chunk; - PyObject_GetArenaAllocator(&alloc); - } - - if (alloc.free && chunk) { - // In case the arena mechanism has been torn down already. - while (chunk) { - _PyStackChunk *prev = chunk->previous; - chunk->previous = nullptr; - alloc.free(alloc.ctx, chunk, chunk->size); - chunk = prev; - } - } - - this->datastack_chunk = nullptr; - this->datastack_limit = nullptr; - this->datastack_top = nullptr; -#endif -} - - -}; // namespace greenlet - -#endif // GREENLET_PYTHON_STATE_CPP diff --git a/venv/Lib/site-packages/greenlet/TStackState.cpp b/venv/Lib/site-packages/greenlet/TStackState.cpp deleted file mode 100644 index 9743ab5..0000000 --- a/venv/Lib/site-packages/greenlet/TStackState.cpp +++ /dev/null @@ -1,265 +0,0 @@ -#ifndef GREENLET_STACK_STATE_CPP -#define GREENLET_STACK_STATE_CPP - -#include "TGreenlet.hpp" - -namespace greenlet { - -#ifdef GREENLET_USE_STDIO -#include -using std::cerr; -using std::endl; - -std::ostream& operator<<(std::ostream& os, const StackState& s) -{ - os << "StackState(stack_start=" << (void*)s._stack_start - << ", stack_stop=" << (void*)s.stack_stop - << ", stack_copy=" << (void*)s.stack_copy - << ", stack_saved=" << s._stack_saved - << ", stack_prev=" << s.stack_prev - << ", addr=" << &s - << ")"; - return os; -} -#endif - -StackState::StackState(void* mark, StackState& current) - : _stack_start(nullptr), - stack_stop((char*)mark), - stack_copy(nullptr), - _stack_saved(0), - /* Skip a dying greenlet */ - stack_prev(current._stack_start - ? ¤t - : current.stack_prev) -{ -} - -StackState::StackState() - : _stack_start(nullptr), - stack_stop(nullptr), - stack_copy(nullptr), - _stack_saved(0), - stack_prev(nullptr) -{ -} - -StackState::StackState(const StackState& other) -// can't use a delegating constructor because of -// MSVC for Python 2.7 - : _stack_start(nullptr), - stack_stop(nullptr), - stack_copy(nullptr), - _stack_saved(0), - stack_prev(nullptr) -{ - this->operator=(other); -} - -StackState& StackState::operator=(const StackState& other) -{ - if (&other == this) { - return *this; - } - if (other._stack_saved) { - throw std::runtime_error("Refusing to steal memory."); - } - - //If we have memory allocated, dispose of it - this->free_stack_copy(); - - this->_stack_start = other._stack_start; - this->stack_stop = other.stack_stop; - this->stack_copy = other.stack_copy; - this->_stack_saved = other._stack_saved; - this->stack_prev = other.stack_prev; - return *this; -} - -inline void StackState::free_stack_copy() noexcept -{ - PyMem_Free(this->stack_copy); - this->stack_copy = nullptr; - this->_stack_saved = 0; -} - -inline void StackState::copy_heap_to_stack(const StackState& current) noexcept -{ - - /* Restore the heap copy back into the C stack */ - if (this->_stack_saved != 0) { - memcpy(this->_stack_start, this->stack_copy, this->_stack_saved); - this->free_stack_copy(); - } - StackState* owner = const_cast(¤t); - if (!owner->_stack_start) { - owner = owner->stack_prev; /* greenlet is dying, skip it */ - } - while (owner && owner->stack_stop <= this->stack_stop) { - // cerr << "\tOwner: " << owner << endl; - owner = owner->stack_prev; /* find greenlet with more stack */ - } - this->stack_prev = owner; - // cerr << "\tFinished with: " << *this << endl; -} - -inline int StackState::copy_stack_to_heap_up_to(const char* const stop) noexcept -{ - /* Save more of g's stack into the heap -- at least up to 'stop' - g->stack_stop |________| - | | - | __ stop . . . . . - | | ==> . . - |________| _______ - | | | | - | | | | - g->stack_start | | |_______| g->stack_copy - */ - intptr_t sz1 = this->_stack_saved; - intptr_t sz2 = stop - this->_stack_start; - assert(this->_stack_start); - if (sz2 > sz1) { - char* c = (char*)PyMem_Realloc(this->stack_copy, sz2); - if (!c) { - PyErr_NoMemory(); - return -1; - } - memcpy(c + sz1, this->_stack_start + sz1, sz2 - sz1); - this->stack_copy = c; - this->_stack_saved = sz2; - } - return 0; -} - -inline int StackState::copy_stack_to_heap(char* const stackref, - const StackState& current) noexcept -{ - /* must free all the C stack up to target_stop */ - const char* const target_stop = this->stack_stop; - - StackState* owner = const_cast(¤t); - assert(owner->_stack_saved == 0); // everything is present on the stack - if (!owner->_stack_start) { - owner = owner->stack_prev; /* not saved if dying */ - } - else { - owner->_stack_start = stackref; - } - - while (owner->stack_stop < target_stop) { - /* ts_current is entierely within the area to free */ - if (owner->copy_stack_to_heap_up_to(owner->stack_stop)) { - return -1; /* XXX */ - } - owner = owner->stack_prev; - } - if (owner != this) { - if (owner->copy_stack_to_heap_up_to(target_stop)) { - return -1; /* XXX */ - } - } - return 0; -} - -inline bool StackState::started() const noexcept -{ - return this->stack_stop != nullptr; -} - -inline bool StackState::main() const noexcept -{ - return this->stack_stop == (char*)-1; -} - -inline bool StackState::active() const noexcept -{ - return this->_stack_start != nullptr; -} - -inline void StackState::set_active() noexcept -{ - assert(this->_stack_start == nullptr); - this->_stack_start = (char*)1; -} - -inline void StackState::set_inactive() noexcept -{ - this->_stack_start = nullptr; - // XXX: What if we still have memory out there? - // That case is actually triggered by - // test_issue251_issue252_explicit_reference_not_collectable (greenlet.tests.test_leaks.TestLeaks) - // and - // test_issue251_issue252_need_to_collect_in_background - // (greenlet.tests.test_leaks.TestLeaks) - // - // Those objects never get deallocated, so the destructor never - // runs. - // It *seems* safe to clean up the memory here? - if (this->_stack_saved) { - this->free_stack_copy(); - } -} - -inline intptr_t StackState::stack_saved() const noexcept -{ - return this->_stack_saved; -} - -inline char* StackState::stack_start() const noexcept -{ - return this->_stack_start; -} - - -inline StackState StackState::make_main() noexcept -{ - StackState s; - s._stack_start = (char*)1; - s.stack_stop = (char*)-1; - return s; -} - -StackState::~StackState() -{ - if (this->_stack_saved != 0) { - this->free_stack_copy(); - } -} - -void StackState::copy_from_stack(void* vdest, const void* vsrc, size_t n) const -{ - char* dest = static_cast(vdest); - const char* src = static_cast(vsrc); - if (src + n <= this->_stack_start - || src >= this->_stack_start + this->_stack_saved - || this->_stack_saved == 0) { - // Nothing we're copying was spilled from the stack - memcpy(dest, src, n); - return; - } - - if (src < this->_stack_start) { - // Copy the part before the saved stack. - // We know src + n > _stack_start due to the test above. - const size_t nbefore = this->_stack_start - src; - memcpy(dest, src, nbefore); - dest += nbefore; - src += nbefore; - n -= nbefore; - } - // We know src >= _stack_start after the before-copy, and - // src < _stack_start + _stack_saved due to the first if condition - size_t nspilled = std::min(n, this->_stack_start + this->_stack_saved - src); - memcpy(dest, this->stack_copy + (src - this->_stack_start), nspilled); - dest += nspilled; - src += nspilled; - n -= nspilled; - if (n > 0) { - // Copy the part after the saved stack - memcpy(dest, src, n); - } -} - -}; // namespace greenlet - -#endif // GREENLET_STACK_STATE_CPP diff --git a/venv/Lib/site-packages/greenlet/TThreadState.hpp b/venv/Lib/site-packages/greenlet/TThreadState.hpp deleted file mode 100644 index b3451a0..0000000 --- a/venv/Lib/site-packages/greenlet/TThreadState.hpp +++ /dev/null @@ -1,523 +0,0 @@ -#ifndef GREENLET_THREAD_STATE_HPP -#define GREENLET_THREAD_STATE_HPP - -#include -#include -#include - -#include "greenlet_internal.hpp" -#include "greenlet_refs.hpp" -#include "greenlet_thread_support.hpp" - -using greenlet::refs::BorrowedObject; -using greenlet::refs::BorrowedGreenlet; -using greenlet::refs::BorrowedMainGreenlet; -using greenlet::refs::OwnedMainGreenlet; -using greenlet::refs::OwnedObject; -using greenlet::refs::OwnedGreenlet; -using greenlet::refs::OwnedList; -using greenlet::refs::PyErrFetchParam; -using greenlet::refs::PyArgParseParam; -using greenlet::refs::ImmortalString; -using greenlet::refs::CreatedModule; -using greenlet::refs::PyErrPieces; -using greenlet::refs::NewReference; - -namespace greenlet { -/** - * Thread-local state of greenlets. - * - * Each native thread will get exactly one of these objects, - * automatically accessed through the best available thread-local - * mechanism the compiler supports (``thread_local`` for C++11 - * compilers or ``__thread``/``declspec(thread)`` for older GCC/clang - * or MSVC, respectively.) - * - * Previously, we kept thread-local state mostly in a bunch of - * ``static volatile`` variables in the main greenlet file.. This had - * the problem of requiring extra checks, loops, and great care - * accessing these variables if we potentially invoked any Python code - * that could release the GIL, because the state could change out from - * under us. Making the variables thread-local solves this problem. - * - * When we detected that a greenlet API accessing the current greenlet - * was invoked from a different thread than the greenlet belonged to, - * we stored a reference to the greenlet in the Python thread - * dictionary for the thread the greenlet belonged to. This could lead - * to memory leaks if the thread then exited (because of a reference - * cycle, as greenlets referred to the thread dictionary, and deleting - * non-current greenlets leaked their frame plus perhaps arguments on - * the C stack). If a thread exited while still having running - * greenlet objects (perhaps that had just switched back to the main - * greenlet), and did not invoke one of the greenlet APIs *in that - * thread, immediately before it exited, without some other thread - * then being invoked*, such a leak was guaranteed. - * - * This can be partly solved by using compiler thread-local variables - * instead of the Python thread dictionary, thus avoiding a cycle. - * - * To fully solve this problem, we need a reliable way to know that a - * thread is done and we should clean up the main greenlet. On POSIX, - * we can use the destructor function of ``pthread_key_create``, but - * there's nothing similar on Windows; a C++11 thread local object - * reliably invokes its destructor when the thread it belongs to exits - * (non-C++11 compilers offer ``__thread`` or ``declspec(thread)`` to - * create thread-local variables, but they can't hold C++ objects that - * invoke destructors; the C++11 version is the most portable solution - * I found). When the thread exits, we can drop references and - * otherwise manipulate greenlets and frames that we know can no - * longer be switched to. - * - * There are two small wrinkles. The first is that when the thread - * exits, it is too late to actually invoke Python APIs: the Python - * thread state is gone, and the GIL is released. To solve *this* - * problem, our destructor uses ``Py_AddPendingCall`` to transfer the - * destruction work to the main thread. - * - * The second is that once the thread exits, the thread local object - * is invalid and we can't even access a pointer to it, so we can't - * pass it to ``Py_AddPendingCall``. This is handled by actually using - * a second object that's thread local (ThreadStateCreator) and having - * it dynamically allocate this object so it can live until the - * pending call runs. - */ - - - -class ThreadState { -private: - // As of commit 08ad1dd7012b101db953f492e0021fb08634afad - // this class needed 56 bytes in o Py_DEBUG build - // on 64-bit macOS 11. - // Adding the vector takes us up to 80 bytes () - - /* Strong reference to the main greenlet */ - OwnedMainGreenlet main_greenlet; - - /* Strong reference to the current greenlet. */ - OwnedGreenlet current_greenlet; - - /* Strong reference to the trace function, if any. */ - OwnedObject tracefunc; - - typedef std::vector > deleteme_t; - /* A vector of raw PyGreenlet pointers representing things that need - deleted when this thread is running. The vector owns the - references, but you need to manually INCREF/DECREF as you use - them. We don't use a vector because we - make copy of this vector, and that would become O(n) as all the - refcounts are incremented in the copy. - */ - deleteme_t deleteme; - -#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED - void* exception_state; -#endif - -#ifdef Py_GIL_DISABLED - static std::atomic _clocks_used_doing_gc; -#else - static std::clock_t _clocks_used_doing_gc; -#endif - static ImmortalString get_referrers_name; - static PythonAllocator allocator; - - G_NO_COPIES_OF_CLS(ThreadState); - - - // Allocates a main greenlet for the thread state. If this fails, - // exits the process. Called only during constructing a ThreadState. - MainGreenlet* alloc_main() - { - PyGreenlet* gmain; - - /* create the main greenlet for this thread */ - gmain = reinterpret_cast(PyType_GenericAlloc(&PyGreenlet_Type, 0)); - if (gmain == NULL) { - throw PyFatalError("alloc_main failed to alloc"); //exits the process - } - - MainGreenlet* const main = new MainGreenlet(gmain, this); - - assert(Py_REFCNT(gmain) == 1); - assert(gmain->pimpl == main); - return main; - } - - -public: - static void* operator new(size_t UNUSED(count)) - { - return ThreadState::allocator.allocate(1); - } - - static void operator delete(void* ptr) - { - return ThreadState::allocator.deallocate(static_cast(ptr), - 1); - } - - static void init() - { - ThreadState::get_referrers_name = "get_referrers"; - ThreadState::set_clocks_used_doing_gc(0); - } - - ThreadState() - { - -#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED - this->exception_state = slp_get_exception_state(); -#endif - - // XXX: Potentially dangerous, exposing a not fully - // constructed object. - MainGreenlet* const main = this->alloc_main(); - this->main_greenlet = OwnedMainGreenlet::consuming( - main->self() - ); - assert(this->main_greenlet); - this->current_greenlet = main->self(); - // The main greenlet starts with 1 refs: The returned one. We - // then copied it to the current greenlet. - assert(this->main_greenlet.REFCNT() == 2); - } - - inline void restore_exception_state() - { -#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED - // It's probably important this be inlined and only call C - // functions to avoid adding an SEH frame. - slp_set_exception_state(this->exception_state); -#endif - } - - inline bool has_main_greenlet() const noexcept - { - return bool(this->main_greenlet); - } - - // Called from the ThreadStateCreator when we're in non-standard - // threading mode. In that case, there is an object in the Python - // thread state dictionary that points to us. The main greenlet - // also traverses into us, in which case it's crucial not to - // traverse back into the main greenlet. - int tp_traverse(visitproc visit, void* arg, bool traverse_main=true) - { - if (traverse_main) { - Py_VISIT(main_greenlet.borrow_o()); - } - if (traverse_main || current_greenlet != main_greenlet) { - Py_VISIT(current_greenlet.borrow_o()); - } - Py_VISIT(tracefunc.borrow()); - return 0; - } - - inline BorrowedMainGreenlet borrow_main_greenlet() const noexcept - { - assert(this->main_greenlet); - assert(this->main_greenlet.REFCNT() >= 2); - return this->main_greenlet; - }; - - inline OwnedMainGreenlet get_main_greenlet() const noexcept - { - return this->main_greenlet; - } - - /** - * In addition to returning a new reference to the currunt - * greenlet, this performs any maintenance needed. - */ - inline OwnedGreenlet get_current() - { - /* green_dealloc() cannot delete greenlets from other threads, so - it stores them in the thread dict; delete them now. */ - this->clear_deleteme_list(); - //assert(this->current_greenlet->main_greenlet == this->main_greenlet); - //assert(this->main_greenlet->main_greenlet == this->main_greenlet); - return this->current_greenlet; - } - - /** - * As for non-const get_current(); - */ - inline BorrowedGreenlet borrow_current() - { - this->clear_deleteme_list(); - return this->current_greenlet; - } - - /** - * Does no maintenance. - */ - inline OwnedGreenlet get_current() const - { - return this->current_greenlet; - } - - template - inline bool is_current(const refs::PyObjectPointer& obj) const - { - return this->current_greenlet.borrow_o() == obj.borrow_o(); - } - - inline void set_current(const OwnedGreenlet& target) - { - this->current_greenlet = target; - } - -private: - /** - * Deref and remove the greenlets from the deleteme list. Must be - * holding the GIL. - * - * If *murder* is true, then we must be called from a different - * thread than the one that these greenlets were running in. - * In that case, if the greenlet was actually running, we destroy - * the frame reference and otherwise make it appear dead before - * proceeding; otherwise, we would try (and fail) to raise an - * exception in it and wind up right back in this list. - */ - inline void clear_deleteme_list(const bool murder=false) - { - if (!this->deleteme.empty()) { - // It's possible we could add items to this list while - // running Python code if there's a thread switch, so we - // need to defensively copy it before that can happen. - deleteme_t copy = this->deleteme; - this->deleteme.clear(); // in case things come back on the list - for(deleteme_t::iterator it = copy.begin(), end = copy.end(); - it != end; - ++it ) { - PyGreenlet* to_del = *it; - if (murder) { - // Force each greenlet to appear dead; we can't raise an - // exception into it anymore anyway. - to_del->pimpl->murder_in_place(); - } - - // The only reference to these greenlets should be in - // this list, decreffing them should let them be - // deleted again, triggering calls to green_dealloc() - // in the correct thread (if we're not murdering). - // This may run arbitrary Python code and switch - // threads or greenlets! - Py_DECREF(to_del); - if (PyErr_Occurred()) { - PyErr_WriteUnraisable(nullptr); - PyErr_Clear(); - } - } - } - } - -public: - - /** - * Returns a new reference, or a false object. - */ - inline OwnedObject get_tracefunc() const - { - return tracefunc; - }; - - - inline void set_tracefunc(BorrowedObject tracefunc) - { - assert(tracefunc); - if (tracefunc == BorrowedObject(Py_None)) { - this->tracefunc.CLEAR(); - } - else { - this->tracefunc = tracefunc; - } - } - - /** - * Given a reference to a greenlet that some other thread - * attempted to delete (has a refcount of 0) store it for later - * deletion when the thread this state belongs to is current. - */ - inline void delete_when_thread_running(PyGreenlet* to_del) - { - Py_INCREF(to_del); - this->deleteme.push_back(to_del); - } - - /** - * Set to std::clock_t(-1) to disable. - */ - inline static std::clock_t clocks_used_doing_gc() - { -#ifdef Py_GIL_DISABLED - return ThreadState::_clocks_used_doing_gc.load(std::memory_order_relaxed); -#else - return ThreadState::_clocks_used_doing_gc; -#endif - } - - inline static void set_clocks_used_doing_gc(std::clock_t value) - { -#ifdef Py_GIL_DISABLED - ThreadState::_clocks_used_doing_gc.store(value, std::memory_order_relaxed); -#else - ThreadState::_clocks_used_doing_gc = value; -#endif - } - - inline static void add_clocks_used_doing_gc(std::clock_t value) - { -#ifdef Py_GIL_DISABLED - ThreadState::_clocks_used_doing_gc.fetch_add(value, std::memory_order_relaxed); -#else - ThreadState::_clocks_used_doing_gc += value; -#endif - } - - ~ThreadState() - { - if (!PyInterpreterState_Head()) { - // We shouldn't get here (our callers protect us) - // but if we do, all we can do is bail early. - return; - } - - // We should not have an "origin" greenlet; that only exists - // for the temporary time during a switch, which should not - // be in progress as the thread dies. - //assert(!this->switching_state.origin); - - this->tracefunc.CLEAR(); - - // Forcibly GC as much as we can. - this->clear_deleteme_list(true); - - // The pending call did this. - assert(this->main_greenlet->thread_state() == nullptr); - - // If the main greenlet is the current greenlet, - // then we "fell off the end" and the thread died. - // It's possible that there is some other greenlet that - // switched to us, leaving a reference to the main greenlet - // on the stack, somewhere uncollectible. Try to detect that. - if (this->current_greenlet == this->main_greenlet && this->current_greenlet) { - assert(this->current_greenlet->is_currently_running_in_some_thread()); - // Drop one reference we hold. - this->current_greenlet.CLEAR(); - assert(!this->current_greenlet); - // Only our reference to the main greenlet should be left, - // But hold onto the pointer in case we need to do extra cleanup. - PyGreenlet* old_main_greenlet = this->main_greenlet.borrow(); - Py_ssize_t cnt = this->main_greenlet.REFCNT(); - this->main_greenlet.CLEAR(); - if (ThreadState::clocks_used_doing_gc() != std::clock_t(-1) - && cnt == 2 && Py_REFCNT(old_main_greenlet) == 1) { - // Highly likely that the reference is somewhere on - // the stack, not reachable by GC. Verify. - // XXX: This is O(n) in the total number of objects. - // TODO: Add a way to disable this at runtime, and - // another way to report on it. - std::clock_t begin = std::clock(); - NewReference gc(PyImport_ImportModule("gc")); - if (gc) { - OwnedObject get_referrers = gc.PyRequireAttr(ThreadState::get_referrers_name); - OwnedList refs(get_referrers.PyCall(old_main_greenlet)); - if (refs && refs.empty()) { - assert(refs.REFCNT() == 1); - // We found nothing! So we left a dangling - // reference: Probably the last thing some - // other greenlet did was call - // 'getcurrent().parent.switch()' to switch - // back to us. Clean it up. This will be the - // case on CPython 3.7 and newer, as they use - // an internal calling conversion that avoids - // creating method objects and storing them on - // the stack. - Py_DECREF(old_main_greenlet); - } - else if (refs - && refs.size() == 1 - && PyCFunction_Check(refs.at(0)) - && Py_REFCNT(refs.at(0)) == 2) { - assert(refs.REFCNT() == 1); - // Ok, we found a C method that refers to the - // main greenlet, and its only referenced - // twice, once in the list we just created, - // once from...somewhere else. If we can't - // find where else, then this is a leak. - // This happens in older versions of CPython - // that create a bound method object somewhere - // on the stack that we'll never get back to. - if (PyCFunction_GetFunction(refs.at(0).borrow()) == (PyCFunction)green_switch) { - BorrowedObject function_w = refs.at(0); - refs.clear(); // destroy the reference - // from the list. - // back to one reference. Can *it* be - // found? - assert(function_w.REFCNT() == 1); - refs = get_referrers.PyCall(function_w); - if (refs && refs.empty()) { - // Nope, it can't be found so it won't - // ever be GC'd. Drop it. - Py_CLEAR(function_w); - } - } - } - std::clock_t end = std::clock(); - ThreadState::add_clocks_used_doing_gc(end - begin); - } - } - } - - // We need to make sure this greenlet appears to be dead, - // because otherwise deallocing it would fail to raise an - // exception in it (the thread is dead) and put it back in our - // deleteme list. - if (this->current_greenlet) { - this->current_greenlet->murder_in_place(); - this->current_greenlet.CLEAR(); - } - - if (this->main_greenlet) { - // Couldn't have been the main greenlet that was running - // when the thread exited (because we already cleared this - // pointer if it was). This shouldn't be possible? - - // If the main greenlet was current when the thread died (it - // should be, right?) then we cleared its self pointer above - // when we cleared the current greenlet's main greenlet pointer. - // assert(this->main_greenlet->main_greenlet == this->main_greenlet - // || !this->main_greenlet->main_greenlet); - // // self reference, probably gone - // this->main_greenlet->main_greenlet.CLEAR(); - - // This will actually go away when the ivar is destructed. - this->main_greenlet.CLEAR(); - } - - if (PyErr_Occurred()) { - PyErr_WriteUnraisable(NULL); - PyErr_Clear(); - } - - } - -}; - -ImmortalString ThreadState::get_referrers_name(nullptr); -PythonAllocator ThreadState::allocator; -#ifdef Py_GIL_DISABLED -std::atomic ThreadState::_clocks_used_doing_gc(0); -#else -std::clock_t ThreadState::_clocks_used_doing_gc(0); -#endif - - - - - -}; // namespace greenlet - -#endif diff --git a/venv/Lib/site-packages/greenlet/TThreadStateCreator.hpp b/venv/Lib/site-packages/greenlet/TThreadStateCreator.hpp deleted file mode 100644 index ebd33a3..0000000 --- a/venv/Lib/site-packages/greenlet/TThreadStateCreator.hpp +++ /dev/null @@ -1,102 +0,0 @@ -#ifndef GREENLET_THREAD_STATE_CREATOR_HPP -#define GREENLET_THREAD_STATE_CREATOR_HPP - -#include -#include - -#include "greenlet_internal.hpp" -#include "greenlet_refs.hpp" -#include "greenlet_thread_support.hpp" - -#include "TThreadState.hpp" - -namespace greenlet { - - -typedef void (*ThreadStateDestructor)(ThreadState* const); - -// Only one of these, auto created per thread as a thread_local. -// Constructing the state constructs the MainGreenlet. -template -class ThreadStateCreator -{ -private: - // Initialized to 1, and, if still 1, created on access. - // Set to 0 on destruction. - ThreadState* _state; - G_NO_COPIES_OF_CLS(ThreadStateCreator); - - inline bool has_initialized_state() const noexcept - { - return this->_state != (ThreadState*)1; - } - - inline bool has_state() const noexcept - { - return this->has_initialized_state() && this->_state != nullptr; - } - -public: - - ThreadStateCreator() : - _state((ThreadState*)1) - { - } - - ~ThreadStateCreator() - { - if (this->has_state()) { - Destructor(this->_state); - } - - this->_state = nullptr; - } - - inline ThreadState& state() - { - // The main greenlet will own this pointer when it is created, - // which will be right after this. The plan is to give every - // greenlet a pointer to the main greenlet for the thread it - // runs in; if we are doing something cross-thread, we need to - // access the pointer from the main greenlet. Deleting the - // thread, and hence the thread-local storage, will delete the - // state pointer in the main greenlet. - if (!this->has_initialized_state()) { - // XXX: Assuming allocation never fails - this->_state = new ThreadState; - // For non-standard threading, we need to store an object - // in the Python thread state dictionary so that it can be - // DECREF'd when the thread ends (ideally; the dict could - // last longer) and clean this object up. - } - if (!this->_state) { - throw std::runtime_error("Accessing state after destruction."); - } - return *this->_state; - } - - operator ThreadState&() - { - return this->state(); - } - - operator ThreadState*() - { - return &this->state(); - } - - inline int tp_traverse(visitproc visit, void* arg) - { - if (this->has_state()) { - return this->_state->tp_traverse(visit, arg); - } - return 0; - } - -}; - - - -}; // namespace greenlet - -#endif diff --git a/venv/Lib/site-packages/greenlet/TThreadStateDestroy.cpp b/venv/Lib/site-packages/greenlet/TThreadStateDestroy.cpp deleted file mode 100644 index ae0b9ae..0000000 --- a/venv/Lib/site-packages/greenlet/TThreadStateDestroy.cpp +++ /dev/null @@ -1,223 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of the ThreadState destructors. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_THREADSTATE_DESTROY -#define T_THREADSTATE_DESTROY - -#include "TGreenlet.hpp" - -#include "greenlet_thread_support.hpp" -#include "greenlet_compiler_compat.hpp" -#include "TGreenletGlobals.cpp" -#include "TThreadState.hpp" -#include "TThreadStateCreator.hpp" - -namespace greenlet { - -extern "C" { - -struct ThreadState_DestroyNoGIL -{ - /** - This function uses the same lock that the PendingCallback does - */ - static void - MarkGreenletDeadAndQueueCleanup(ThreadState* const state) - { -#if GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK - // One rare platform. - return; -#endif - // We are *NOT* holding the GIL. Our thread is in the middle - // of its death throes and the Python thread state is already - // gone so we can't use most Python APIs. One that is safe is - // ``Py_AddPendingCall``, unless the interpreter itself has - // been torn down. There is a limited number of calls that can - // be queued: 32 (NPENDINGCALLS) in CPython 3.10, so we - // coalesce these calls using our own queue. - - if (!MarkGreenletDeadIfNeeded(state)) { - // No state, or no greenlet - return; - } - - // XXX: Because we don't have the GIL, this is a race condition. - if (!PyInterpreterState_Head()) { - // We have to leak the thread state, if the - // interpreter has shut down when we're getting - // deallocated, we can't run the cleanup code that - // deleting it would imply. - return; - } - - AddToCleanupQueue(state); - - } - -private: - - // If the state has an allocated main greenlet: - // - mark the greenlet as dead by disassociating it from the state; - // - return 1 - // Otherwise, return 0. - static bool - MarkGreenletDeadIfNeeded(ThreadState* const state) - { - if (!state) { - return false; - } - LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); - if (state->has_main_greenlet()) { - // mark the thread as dead ASAP. - // this is racy! If we try to throw or switch to a - // greenlet from this thread from some other thread before - // we clear the state pointer, it won't realize the state - // is dead which can crash the process. - PyGreenlet* p(state->borrow_main_greenlet().borrow()); - assert(p->pimpl->thread_state() == state || p->pimpl->thread_state() == nullptr); - dynamic_cast(p->pimpl)->thread_state(nullptr); - return true; - } - return false; - } - - static void - AddToCleanupQueue(ThreadState* const state) - { - assert(state && state->has_main_greenlet()); - - // NOTE: Because we're not holding the GIL here, some other - // Python thread could run and call ``os.fork()``, which would - // be bad if that happened while we are holding the cleanup - // lock (it wouldn't function in the child process). - // Make a best effort to try to keep the duration we hold the - // lock short. - // TODO: On platforms that support it, use ``pthread_atfork`` to - // drop this lock. - LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); - - mod_globs->queue_to_destroy(state); - if (mod_globs->thread_states_to_destroy.size() == 1) { - // We added the first item to the queue. We need to schedule - // the cleanup. - - // A size greater than 1 means that we have already added the pending call, - // and in fact, it may be executing now. - // If it is executing, our lock makes sure that it will see the item we just added - // to the queue on its next iteration (after we release the lock) - // - // A size of 1 means there is no pending call, OR the pending call is - // currently executing, has dropped the lock, and is deleting the last item - // from the queue; its next iteration will go ahead and delete the item we just added. - // And the pending call we schedule here will have no work to do. - int result = AddPendingCall( - PendingCallback_DestroyQueue, - nullptr); - if (result < 0) { - // Hmm, what can we do here? - fprintf(stderr, - "greenlet: WARNING: failed in call to Py_AddPendingCall; " - "expect a memory leak.\n"); - } - } - } - - static int - PendingCallback_DestroyQueue(void* UNUSED(arg)) - { - // We're may or may not be holding the GIL here (depending on - // Py_GIL_DISABLED), so calls to ``os.fork()`` may or may not - // be possible. - while (1) { - ThreadState* to_destroy; - { - LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); - if (mod_globs->thread_states_to_destroy.empty()) { - break; - } - to_destroy = mod_globs->take_next_to_destroy(); - } - assert(to_destroy); - assert(to_destroy->has_main_greenlet()); - // Drop the lock while we do the actual deletion. - // This allows other calls to MarkGreenletDeadAndQueueCleanup - // to enter and add to our queue. - DestroyOne(to_destroy); - } - return 0; - } - - static void - DestroyOne(const ThreadState* const state) - { - // May or may not be holding the GIL (depending on Py_GIL_DISABLED). - // Passed a non-shared pointer to the actual thread state. - // state -> main greenlet - assert(state->has_main_greenlet()); - PyGreenlet* main(state->borrow_main_greenlet()); - // When we need to do cross-thread operations, we check this. - // A NULL value means the thread died some time ago. - // We do this here, rather than in a Python dealloc function - // for the greenlet, in case there's still a reference out - // there. - dynamic_cast(main->pimpl)->thread_state(nullptr); - - delete state; // Deleting this runs the destructor, DECREFs the main greenlet. - } - - - static int AddPendingCall(int (*func)(void*), void* arg) - { - // If the interpreter is in the middle of finalizing, we can't add a - // pending call. Trying to do so will end up in a SIGSEGV, as - // Py_AddPendingCall will not be able to get the interpreter and will - // try to dereference a NULL pointer. It's possible this can still - // segfault if we happen to get context switched, and maybe we should - // just always implement our own AddPendingCall, but I'd like to see if - // this works first -#if GREENLET_PY313 - if (Py_IsFinalizing()) { -#else - if (_Py_IsFinalizing()) { -#endif -#ifdef GREENLET_DEBUG - // No need to log in the general case. Yes, we'll leak, - // but we're shutting down so it should be ok. - fprintf(stderr, - "greenlet: WARNING: Interpreter is finalizing. Ignoring " - "call to Py_AddPendingCall; \n"); -#endif - return 0; - } - return Py_AddPendingCall(func, arg); - } - - - - - -}; -}; - -}; // namespace greenlet - -// The intent when GET_THREAD_STATE() is needed multiple times in a -// function is to take a reference to its return value in a local -// variable, to avoid the thread-local indirection. On some platforms -// (macOS), accessing a thread-local involves a function call (plus an -// initial function call in each function that uses a thread local); -// in contrast, static volatile variables are at some pre-computed -// offset. -typedef greenlet::ThreadStateCreator ThreadStateCreator; -static thread_local ThreadStateCreator g_thread_state_global; -#define GET_THREAD_STATE() g_thread_state_global - -#endif //T_THREADSTATE_DESTROY diff --git a/venv/Lib/site-packages/greenlet/TUserGreenlet.cpp b/venv/Lib/site-packages/greenlet/TUserGreenlet.cpp deleted file mode 100644 index 73a8133..0000000 --- a/venv/Lib/site-packages/greenlet/TUserGreenlet.cpp +++ /dev/null @@ -1,662 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::UserGreenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_USER_GREENLET_CPP -#define T_USER_GREENLET_CPP - -#include "greenlet_internal.hpp" -#include "TGreenlet.hpp" - -#include "TThreadStateDestroy.cpp" - - -namespace greenlet { -using greenlet::refs::BorrowedMainGreenlet; -greenlet::PythonAllocator UserGreenlet::allocator; - -void* UserGreenlet::operator new(size_t UNUSED(count)) -{ - return allocator.allocate(1); -} - - -void UserGreenlet::operator delete(void* ptr) -{ - return allocator.deallocate(static_cast(ptr), - 1); -} - - -UserGreenlet::UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) - : Greenlet(p), _parent(the_parent) -{ -} - -UserGreenlet::~UserGreenlet() -{ - // Python 3.11: If we don't clear out the raw frame datastack - // when deleting an unfinished greenlet, - // TestLeaks.test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main fails. - this->python_state.did_finish(nullptr); - this->tp_clear(); -} - - -const BorrowedMainGreenlet -UserGreenlet::main_greenlet() const -{ - return this->_main_greenlet; -} - - -BorrowedMainGreenlet -UserGreenlet::find_main_greenlet_in_lineage() const -{ - if (this->started()) { - assert(this->_main_greenlet); - return BorrowedMainGreenlet(this->_main_greenlet); - } - - if (!this->_parent) { - /* garbage collected greenlet in chain */ - // XXX: WHAT? - return BorrowedMainGreenlet(nullptr); - } - - return this->_parent->find_main_greenlet_in_lineage(); -} - - -/** - * CAUTION: This will allocate memory and may trigger garbage - * collection and arbitrary Python code. - */ -OwnedObject -UserGreenlet::throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state) -{ - /* The dying greenlet cannot be a parent of ts_current - because the 'parent' field chain would hold a - reference */ - UserGreenlet::ParentIsCurrentGuard with_current_parent(this, current_thread_state); - - // We don't care about the return value, only whether an - // exception happened. Whether or not an exception happens, - // we need to restore the parent in case the greenlet gets - // resurrected. - return Greenlet::throw_GreenletExit_during_dealloc(current_thread_state); -} - -ThreadState* -UserGreenlet::thread_state() const noexcept -{ - // TODO: maybe make this throw, if the thread state isn't there? - // if (!this->main_greenlet) { - // throw std::runtime_error("No thread state"); // TODO: Better exception - // } - if (!this->_main_greenlet) { - return nullptr; - } - return this->_main_greenlet->thread_state(); -} - - -bool -UserGreenlet::was_running_in_dead_thread() const noexcept -{ - return this->_main_greenlet && !this->thread_state(); -} - -OwnedObject -UserGreenlet::g_switch() -{ - assert(this->args() || PyErr_Occurred()); - - try { - this->check_switch_allowed(); - } - catch (const PyErrOccurred&) { - this->release_args(); - throw; - } - - // Switching greenlets used to attempt to clean out ones that need - // deleted *if* we detected a thread switch. Should it still do - // that? - // An issue is that if we delete a greenlet from another thread, - // it gets queued to this thread, and ``kill_greenlet()`` switches - // back into the greenlet - - /* find the real target by ignoring dead greenlets, - and if necessary starting a greenlet. */ - switchstack_result_t err; - Greenlet* target = this; - // TODO: probably cleaner to handle the case where we do - // switch to ourself separately from the other cases. - // This can probably even further be simplified if we keep - // track of the switching_state we're going for and just call - // into g_switch() if it's not ourself. The main problem with that - // is that we would be using more stack space. - bool target_was_me = true; - bool was_initial_stub = false; - while (target) { - if (target->active()) { - if (!target_was_me) { - target->args() <<= this->args(); - assert(!this->args()); - } - err = target->g_switchstack(); - break; - } - if (!target->started()) { - // We never encounter a main greenlet that's not started. - assert(!target->main()); - UserGreenlet* real_target = static_cast(target); - assert(real_target); - void* dummymarker; - was_initial_stub = true; - if (!target_was_me) { - target->args() <<= this->args(); - assert(!this->args()); - } - try { - // This can only throw back to us while we're - // still in this greenlet. Once the new greenlet - // is bootstrapped, it has its own exception state. - err = real_target->g_initialstub(&dummymarker); - } - catch (const PyErrOccurred&) { - this->release_args(); - throw; - } - catch (const GreenletStartedWhileInPython&) { - // The greenlet was started sometime before this - // greenlet actually switched to it, i.e., - // "concurrent" calls to switch() or throw(). - // We need to retry the switch. - // Note that the current greenlet has been reset - // to this one (or we wouldn't be running!) - continue; - } - break; - } - - target = target->parent(); - target_was_me = false; - } - // The ``this`` pointer and all other stack or register based - // variables are invalid now, at least where things succeed - // above. - // But this one, probably not so much? It's not clear if it's - // safe to throw an exception at this point. - - if (err.status < 0) { - // If we get here, either g_initialstub() - // failed, or g_switchstack() failed. Either one of those - // cases SHOULD leave us in the original greenlet with a valid - // stack. - return this->on_switchstack_or_initialstub_failure(target, err, target_was_me, was_initial_stub); - } - - // err.the_new_current_greenlet would be the same as ``target``, - // if target wasn't probably corrupt. - return err.the_new_current_greenlet->g_switch_finish(err); -} - - - -Greenlet::switchstack_result_t -UserGreenlet::g_initialstub(void* mark) -{ - OwnedObject run; - - // We need to grab a reference to the current switch arguments - // in case we're entered concurrently during the call to - // GetAttr() and have to try again. - // We'll restore them when we return in that case. - // Scope them tightly to avoid ref leaks. - { - SwitchingArgs args(this->args()); - - /* save exception in case getattr clears it */ - PyErrPieces saved; - - /* - self.run is the object to call in the new greenlet. - This could run arbitrary python code and switch greenlets! - */ - run = this->self().PyRequireAttr(mod_globs->str_run); - /* restore saved exception */ - saved.PyErrRestore(); - - - /* recheck that it's safe to switch in case greenlet reparented anywhere above */ - this->check_switch_allowed(); - - /* by the time we got here another start could happen elsewhere, - * that means it should now be a regular switch. - * This can happen if the Python code is a subclass that implements - * __getattribute__ or __getattr__, or makes ``run`` a descriptor; - * all of those can run arbitrary code that switches back into - * this greenlet. - */ - if (this->stack_state.started()) { - // the successful switch cleared these out, we need to - // restore our version. They will be copied on up to the - // next target. - assert(!this->args()); - this->args() <<= args; - throw GreenletStartedWhileInPython(); - } - } - - // Sweet, if we got here, we have the go-ahead and will switch - // greenlets. - // Nothing we do from here on out should allow for a thread or - // greenlet switch: No arbitrary calls to Python, including - // decref'ing - -#if GREENLET_USE_CFRAME - /* OK, we need it, we're about to switch greenlets, save the state. */ - /* - See green_new(). This is a stack-allocated variable used - while *self* is in PyObject_Call(). - We want to defer copying the state info until we're sure - we need it and are in a stable place to do so. - */ - _PyCFrame trace_info; - - this->python_state.set_new_cframe(trace_info); -#endif - /* start the greenlet */ - ThreadState& thread_state = GET_THREAD_STATE().state(); - this->stack_state = StackState(mark, - thread_state.borrow_current()->stack_state); - this->python_state.set_initial_state(PyThreadState_GET()); - this->exception_state.clear(); - this->_main_greenlet = thread_state.get_main_greenlet(); - - /* perform the initial switch */ - switchstack_result_t err = this->g_switchstack(); - /* returns twice! - The 1st time with ``err == 1``: we are in the new greenlet. - This one owns a greenlet that used to be current. - The 2nd time with ``err <= 0``: back in the caller's - greenlet; this happens if the child finishes or switches - explicitly to us. Either way, the ``err`` variable is - created twice at the same memory location, but possibly - having different ``origin`` values. Note that it's not - constructed for the second time until the switch actually happens. - */ - if (err.status == 1) { - // In the new greenlet. - - // This never returns! Calling inner_bootstrap steals - // the contents of our run object within this stack frame, so - // it is not valid to do anything with it. - try { - this->inner_bootstrap(err.origin_greenlet.relinquish_ownership(), - run.relinquish_ownership()); - } - // Getting a C++ exception here isn't good. It's probably a - // bug in the underlying greenlet, meaning it's probably a - // C++ extension. We're going to abort anyway, but try to - // display some nice information *if* possible. Some obscure - // platforms don't properly support this (old 32-bit Arm, see see - // https://github.com/python-greenlet/greenlet/issues/385); that's not - // great, but should usually be OK because, as mentioned above, we're - // terminating anyway. - // - // The catching is tested by - // ``test_cpp.CPPTests.test_unhandled_exception_in_greenlet_aborts``. - // - // PyErrOccurred can theoretically be thrown by - // inner_bootstrap() -> g_switch_finish(), but that should - // never make it back to here. It is a std::exception and - // would be caught if it is. - catch (const std::exception& e) { - std::string base = "greenlet: Unhandled C++ exception: "; - base += e.what(); - Py_FatalError(base.c_str()); - } - catch (...) { - // Some compilers/runtimes use exceptions internally. - // It appears that GCC on Linux with libstdc++ throws an - // exception internally at process shutdown time to unwind - // stacks and clean up resources. Depending on exactly - // where we are when the process exits, that could result - // in an unknown exception getting here. If we - // Py_FatalError() or abort() here, we interfere with - // orderly process shutdown. Throwing the exception on up - // is the right thing to do. - // - // gevent's ``examples/dns_mass_resolve.py`` demonstrates this. -#ifndef NDEBUG - fprintf(stderr, - "greenlet: inner_bootstrap threw unknown exception; " - "is the process terminating?\n"); -#endif - throw; - } - Py_FatalError("greenlet: inner_bootstrap returned with no exception.\n"); - } - - - // In contrast, notice that we're keeping the origin greenlet - // around as an owned reference; we need it to call the trace - // function for the switch back into the parent. It was only - // captured at the time the switch actually happened, though, - // so we haven't been keeping an extra reference around this - // whole time. - - /* back in the parent */ - if (err.status < 0) { - /* start failed badly, restore greenlet state */ - this->stack_state = StackState(); - this->_main_greenlet.CLEAR(); - // CAUTION: This may run arbitrary Python code. - run.CLEAR(); // inner_bootstrap didn't run, we own the reference. - } - - // In the success case, the spawned code (inner_bootstrap) will - // take care of decrefing this, so we relinquish ownership so as - // to not double-decref. - - run.relinquish_ownership(); - - return err; -} - - -void -UserGreenlet::inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run) -{ - // The arguments here would be another great place for move. - // As it is, we take them as a reference so that when we clear - // them we clear what's on the stack above us. Do that NOW, and - // without using a C++ RAII object, - // so there's no way that exiting the parent frame can clear it, - // or we clear it unexpectedly. This arises in the context of the - // interpreter shutting down. See https://github.com/python-greenlet/greenlet/issues/325 - //PyObject* run = _run.relinquish_ownership(); - - /* in the new greenlet */ - assert(this->thread_state()->borrow_current() == BorrowedGreenlet(this->_self)); - // C++ exceptions cannot propagate to the parent greenlet from - // here. (TODO: Do we need a catch(...) clause, perhaps on the - // function itself? ALl we could do is terminate the program.) - // NOTE: On 32-bit Windows, the call chain is extremely - // important here in ways that are subtle, having to do with - // the depth of the SEH list. The call to restore it MUST NOT - // add a new SEH handler to the list, or we'll restore it to - // the wrong thing. - this->thread_state()->restore_exception_state(); - /* stack variables from above are no good and also will not unwind! */ - // EXCEPT: That can't be true, we access run, among others, here. - - this->stack_state.set_active(); /* running */ - - // We're about to possibly run Python code again, which - // could switch back/away to/from us, so we need to grab the - // arguments locally. - SwitchingArgs args; - args <<= this->args(); - assert(!this->args()); - - // XXX: We could clear this much earlier, right? - // Or would that introduce the possibility of running Python - // code when we don't want to? - // CAUTION: This may run arbitrary Python code. - this->_run_callable.CLEAR(); - - - // The first switch we need to manually call the trace - // function here instead of in g_switch_finish, because we - // never return there. - if (OwnedObject tracefunc = this->thread_state()->get_tracefunc()) { - OwnedGreenlet trace_origin; - trace_origin = origin_greenlet; - try { - g_calltrace(tracefunc, - args ? mod_globs->event_switch : mod_globs->event_throw, - trace_origin, - this->_self); - } - catch (const PyErrOccurred&) { - /* Turn trace errors into switch throws */ - args.CLEAR(); - } - } - - // We no longer need the origin, it was only here for - // tracing. - // We may never actually exit this stack frame so we need - // to explicitly clear it. - // This could run Python code and switch. - Py_CLEAR(origin_greenlet); - - OwnedObject result; - if (!args) { - /* pending exception */ - result = NULL; - } - else { - /* call g.run(*args, **kwargs) */ - // This could result in further switches - try { - //result = run.PyCall(args.args(), args.kwargs()); - // CAUTION: Just invoking this, before the function even - // runs, may cause memory allocations, which may trigger - // GC, which may run arbitrary Python code. - result = OwnedObject::consuming(PyObject_Call(run, args.args().borrow(), args.kwargs().borrow())); - } - catch (...) { - // Unhandled C++ exception! - - // If we declare ourselves as noexcept, if we don't catch - // this here, most platforms will just abort() the - // process. But on 64-bit Windows with older versions of - // the C runtime, this can actually corrupt memory and - // just return. We see this when compiling with the - // Windows 7.0 SDK targeting Windows Server 2008, but not - // when using the Appveyor Visual Studio 2019 image. So - // this currently only affects Python 2.7 on Windows 64. - // That is, the tests pass and the runtime aborts - // everywhere else. - // - // However, if we catch it and try to continue with a - // Python error, then all Windows 64 bit platforms corrupt - // memory. So all we can do is manually abort, hopefully - // with a good error message. (Note that the above was - // tested WITHOUT the `/EHr` switch being used at compile - // time, so MSVC may have "optimized" out important - // checking. Using that switch, we may be in a better - // place in terms of memory corruption.) But sometimes it - // can't be caught here at all, which is confusing but not - // terribly surprising; so again, the G_NOEXCEPT_WIN32 - // plus "/EHr". - // - // Hopefully the basic C stdlib is still functional enough - // for us to at least print an error. - // - // It gets more complicated than that, though, on some - // platforms, specifically at least Linux/gcc/libstdc++. They use - // an exception to unwind the stack when a background - // thread exits. (See comments about noexcept.) So this - // may not actually represent anything untoward. On those - // platforms we allow throws of this to propagate, or - // attempt to anyway. -# if defined(WIN32) || defined(_WIN32) - Py_FatalError( - "greenlet: Unhandled C++ exception from a greenlet run function. " - "Because memory is likely corrupted, terminating process."); - std::abort(); -#else - throw; -#endif - } - } - // These lines may run arbitrary code - args.CLEAR(); - Py_CLEAR(run); - - if (!result - && mod_globs->PyExc_GreenletExit.PyExceptionMatches() - && (this->args())) { - // This can happen, for example, if our only reference - // goes away after we switch back to the parent. - // See test_dealloc_switch_args_not_lost - PyErrPieces clear_error; - result <<= this->args(); - result = single_result(result); - } - this->release_args(); - this->python_state.did_finish(PyThreadState_GET()); - - result = g_handle_exit(result); - assert(this->thread_state()->borrow_current() == this->_self); - - /* jump back to parent */ - this->stack_state.set_inactive(); /* dead */ - - - // TODO: Can we decref some things here? Release our main greenlet - // and maybe parent? - for (Greenlet* parent = this->_parent; - parent; - parent = parent->parent()) { - // We need to somewhere consume a reference to - // the result; in most cases we'll never have control - // back in this stack frame again. Calling - // green_switch actually adds another reference! - // This would probably be clearer with a specific API - // to hand results to the parent. - parent->args() <<= result; - assert(!result); - // The parent greenlet now owns the result; in the - // typical case we'll never get back here to assign to - // result and thus release the reference. - try { - result = parent->g_switch(); - } - catch (const PyErrOccurred&) { - // Ignore, keep passing the error on up. - } - - /* Return here means switch to parent failed, - * in which case we throw *current* exception - * to the next parent in chain. - */ - assert(!result); - } - /* We ran out of parents, cannot continue */ - PyErr_WriteUnraisable(this->self().borrow_o()); - Py_FatalError("greenlet: ran out of parent greenlets while propagating exception; " - "cannot continue"); - std::abort(); -} - -void -UserGreenlet::run(const BorrowedObject nrun) -{ - if (this->started()) { - throw AttributeError( - "run cannot be set " - "after the start of the greenlet"); - } - this->_run_callable = nrun; -} - -const OwnedGreenlet -UserGreenlet::parent() const -{ - return this->_parent; -} - -void -UserGreenlet::parent(const BorrowedObject raw_new_parent) -{ - if (!raw_new_parent) { - throw AttributeError("can't delete attribute"); - } - - BorrowedMainGreenlet main_greenlet_of_new_parent; - BorrowedGreenlet new_parent(raw_new_parent.borrow()); // could - // throw - // TypeError! - for (BorrowedGreenlet p = new_parent; p; p = p->parent()) { - if (p == this->self()) { - throw ValueError("cyclic parent chain"); - } - main_greenlet_of_new_parent = p->main_greenlet(); - } - - if (!main_greenlet_of_new_parent) { - throw ValueError("parent must not be garbage collected"); - } - - if (this->started() - && this->_main_greenlet != main_greenlet_of_new_parent) { - throw ValueError("parent cannot be on a different thread"); - } - - this->_parent = new_parent; -} - -void -UserGreenlet::murder_in_place() -{ - this->_main_greenlet.CLEAR(); - Greenlet::murder_in_place(); -} - -bool -UserGreenlet::belongs_to_thread(const ThreadState* thread_state) const -{ - return Greenlet::belongs_to_thread(thread_state) && this->_main_greenlet == thread_state->borrow_main_greenlet(); -} - - -int -UserGreenlet::tp_traverse(visitproc visit, void* arg) -{ - Py_VISIT(this->_parent.borrow_o()); - Py_VISIT(this->_main_greenlet.borrow_o()); - Py_VISIT(this->_run_callable.borrow_o()); - - return Greenlet::tp_traverse(visit, arg); -} - -int -UserGreenlet::tp_clear() -{ - Greenlet::tp_clear(); - this->_parent.CLEAR(); - this->_main_greenlet.CLEAR(); - this->_run_callable.CLEAR(); - return 0; -} - -UserGreenlet::ParentIsCurrentGuard::ParentIsCurrentGuard(UserGreenlet* p, - const ThreadState& thread_state) - : oldparent(p->_parent), - greenlet(p) -{ - p->_parent = thread_state.get_current(); -} - -UserGreenlet::ParentIsCurrentGuard::~ParentIsCurrentGuard() -{ - this->greenlet->_parent = oldparent; - oldparent.CLEAR(); -} - -}; //namespace greenlet -#endif diff --git a/venv/Lib/site-packages/greenlet/__init__.py b/venv/Lib/site-packages/greenlet/__init__.py deleted file mode 100644 index 61d991e..0000000 --- a/venv/Lib/site-packages/greenlet/__init__.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -""" -The root of the greenlet package. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -__all__ = [ - '__version__', - '_C_API', - - 'GreenletExit', - 'error', - - 'getcurrent', - 'greenlet', - - 'gettrace', - 'settrace', -] - -# pylint:disable=no-name-in-module - -### -# Metadata -### -__version__ = '3.3.0' -from ._greenlet import _C_API # pylint:disable=no-name-in-module - -### -# Exceptions -### -from ._greenlet import GreenletExit -from ._greenlet import error - -### -# greenlets -### -from ._greenlet import getcurrent -from ._greenlet import greenlet - -### -# tracing -### -try: - from ._greenlet import gettrace - from ._greenlet import settrace -except ImportError: - # Tracing wasn't supported. - # XXX: The option to disable it was removed in 1.0, - # so this branch should be dead code. - pass - -### -# Constants -# These constants aren't documented and aren't recommended. -# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS -# is the same as ``sys.version_info[:2] >= 3.7`` -### -from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import -from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import -from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import - -# Controlling the use of the gc module. Provisional API for this greenlet -# implementation in 2.0. -from ._greenlet import CLOCKS_PER_SEC # pylint:disable=unused-import -from ._greenlet import enable_optional_cleanup # pylint:disable=unused-import -from ._greenlet import get_clocks_used_doing_optional_cleanup # pylint:disable=unused-import - -# Other APIS in the _greenlet module are for test support. diff --git a/venv/Lib/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2fabdc6..0000000 Binary files a/venv/Lib/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/_greenlet.cp312-win_amd64.pyd b/venv/Lib/site-packages/greenlet/_greenlet.cp312-win_amd64.pyd deleted file mode 100644 index 5583ab9..0000000 Binary files a/venv/Lib/site-packages/greenlet/_greenlet.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/greenlet.cpp b/venv/Lib/site-packages/greenlet/greenlet.cpp deleted file mode 100644 index 7722bd0..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet.cpp +++ /dev/null @@ -1,323 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/* Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#include -#include -#include -#include - - -#define PY_SSIZE_T_CLEAN -#include -#include "structmember.h" // PyMemberDef - -#include "greenlet_internal.hpp" -// Code after this point can assume access to things declared in stdint.h, -// including the fixed-width types. This goes for the platform-specific switch functions -// as well. -#include "greenlet_refs.hpp" -#include "greenlet_slp_switch.hpp" - -#include "greenlet_thread_support.hpp" -#include "TGreenlet.hpp" - -#include "TGreenletGlobals.cpp" - -#include "TGreenlet.cpp" -#include "TMainGreenlet.cpp" -#include "TUserGreenlet.cpp" -#include "TBrokenGreenlet.cpp" -#include "TExceptionState.cpp" -#include "TPythonState.cpp" -#include "TStackState.cpp" - -#include "TThreadState.hpp" -#include "TThreadStateCreator.hpp" -#include "TThreadStateDestroy.cpp" - -#include "PyGreenlet.cpp" -#include "PyGreenletUnswitchable.cpp" -#include "CObjects.cpp" - -using greenlet::LockGuard; -using greenlet::LockInitError; -using greenlet::PyErrOccurred; -using greenlet::Require; - -using greenlet::g_handle_exit; -using greenlet::single_result; - -using greenlet::Greenlet; -using greenlet::UserGreenlet; -using greenlet::MainGreenlet; -using greenlet::BrokenGreenlet; -using greenlet::ThreadState; -using greenlet::PythonState; - - - -// ******* Implementation of things from included files -template -greenlet::refs::_BorrowedGreenlet& greenlet::refs::_BorrowedGreenlet::operator=(const greenlet::refs::BorrowedObject& other) -{ - this->_set_raw_pointer(static_cast(other)); - return *this; -} - -template -inline greenlet::refs::_BorrowedGreenlet::operator Greenlet*() const noexcept -{ - if (!this->p) { - return nullptr; - } - return reinterpret_cast(this->p)->pimpl; -} - -template -greenlet::refs::_BorrowedGreenlet::_BorrowedGreenlet(const BorrowedObject& p) - : BorrowedReference(nullptr) -{ - - this->_set_raw_pointer(p.borrow()); -} - -template -inline greenlet::refs::_OwnedGreenlet::operator Greenlet*() const noexcept -{ - if (!this->p) { - return nullptr; - } - return reinterpret_cast(this->p)->pimpl; -} - - - -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wmissing-field-initializers" -# pragma clang diagnostic ignored "-Wwritable-strings" -#elif defined(__GNUC__) -# pragma GCC diagnostic push -// warning: ISO C++ forbids converting a string constant to ‘char*’ -// (The python APIs aren't const correct and accept writable char*) -# pragma GCC diagnostic ignored "-Wwrite-strings" -#endif - - -/*********************************************************** - -A PyGreenlet is a range of C stack addresses that must be -saved and restored in such a way that the full range of the -stack contains valid data when we switch to it. - -Stack layout for a greenlet: - - | ^^^ | - | older data | - | | - stack_stop . |_______________| - . | | - . | greenlet data | - . | in stack | - . * |_______________| . . _____________ stack_copy + stack_saved - . | | | | - . | data | |greenlet data| - . | unrelated | | saved | - . | to | | in heap | - stack_start . | this | . . |_____________| stack_copy - | greenlet | - | | - | newer data | - | vvv | - - -Note that a greenlet's stack data is typically partly at its correct -place in the stack, and partly saved away in the heap, but always in -the above configuration: two blocks, the more recent one in the heap -and the older one still in the stack (either block may be empty). - -Greenlets are chained: each points to the previous greenlet, which is -the one that owns the data currently in the C stack above my -stack_stop. The currently running greenlet is the first element of -this chain. The main (initial) greenlet is the last one. Greenlets -whose stack is entirely in the heap can be skipped from the chain. - -The chain is not related to execution order, but only to the order -in which bits of C stack happen to belong to greenlets at a particular -point in time. - -The main greenlet doesn't have a stack_stop: it is responsible for the -complete rest of the C stack, and we don't know where it begins. We -use (char*) -1, the largest possible address. - -States: - stack_stop == NULL && stack_start == NULL: did not start yet - stack_stop != NULL && stack_start == NULL: already finished - stack_stop != NULL && stack_start != NULL: active - -The running greenlet's stack_start is undefined but not NULL. - - ***********************************************************/ - - - - -/***********************************************************/ - -/* Some functions must not be inlined: - * slp_restore_state, when inlined into slp_switch might cause - it to restore stack over its own local variables - * slp_save_state, when inlined would add its own local - variables to the saved stack, wasting space - * slp_switch, cannot be inlined for obvious reasons - * g_initialstub, when inlined would receive a pointer into its - own stack frame, leading to incomplete stack save/restore - -g_initialstub is a member function and declared virtual so that the -compiler always calls it through a vtable. - -slp_save_state and slp_restore_state are also member functions. They -are called from trampoline functions that themselves are declared as -not eligible for inlining. -*/ - -extern "C" { -static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref) -{ - return switching_thread_state->slp_save_state(stackref); -} -static void GREENLET_NOINLINE(slp_restore_state_trampoline)() -{ - switching_thread_state->slp_restore_state(); -} -} - - -/***********************************************************/ - - -#include "PyModule.cpp" - - - -static PyObject* -greenlet_internal_mod_init() noexcept -{ - static void* _PyGreenlet_API[PyGreenlet_API_pointers]; - - try { - CreatedModule m(greenlet_module_def); - - Require(PyType_Ready(&PyGreenlet_Type)); - Require(PyType_Ready(&PyGreenletUnswitchable_Type)); - - mod_globs = new greenlet::GreenletGlobals; - ThreadState::init(); - - m.PyAddObject("greenlet", PyGreenlet_Type); - m.PyAddObject("UnswitchableGreenlet", PyGreenletUnswitchable_Type); - m.PyAddObject("error", mod_globs->PyExc_GreenletError); - m.PyAddObject("GreenletExit", mod_globs->PyExc_GreenletExit); - - m.PyAddObject("GREENLET_USE_GC", 1); - m.PyAddObject("GREENLET_USE_TRACING", 1); - m.PyAddObject("GREENLET_USE_CONTEXT_VARS", 1L); - m.PyAddObject("GREENLET_USE_STANDARD_THREADING", 1L); - - OwnedObject clocks_per_sec = OwnedObject::consuming(PyLong_FromSsize_t(CLOCKS_PER_SEC)); - m.PyAddObject("CLOCKS_PER_SEC", clocks_per_sec); - - /* also publish module-level data as attributes of the greentype. */ - // XXX: This is weird, and enables a strange pattern of - // confusing the class greenlet with the module greenlet; with - // the exception of (possibly) ``getcurrent()``, this - // shouldn't be encouraged so don't add new items here. - for (const char* const* p = copy_on_greentype; *p; p++) { - OwnedObject o = m.PyRequireAttr(*p); - PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o.borrow()); - } - - /* - * Expose C API - */ - - /* types */ - _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type; - - /* exceptions */ - _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)mod_globs->PyExc_GreenletError; - _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)mod_globs->PyExc_GreenletExit; - - /* methods */ - _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New; - _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent; - _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw; - _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch; - _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent; - - /* Previously macros, but now need to be functions externally. */ - _PyGreenlet_API[PyGreenlet_MAIN_NUM] = (void*)Extern_PyGreenlet_MAIN; - _PyGreenlet_API[PyGreenlet_STARTED_NUM] = (void*)Extern_PyGreenlet_STARTED; - _PyGreenlet_API[PyGreenlet_ACTIVE_NUM] = (void*)Extern_PyGreenlet_ACTIVE; - _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM] = (void*)Extern_PyGreenlet_GET_PARENT; - - /* XXX: Note that our module name is ``greenlet._greenlet``, but for - backwards compatibility with existing C code, we need the _C_API to - be directly in greenlet. - */ - const NewReference c_api_object(Require( - PyCapsule_New( - (void*)_PyGreenlet_API, - "greenlet._C_API", - NULL))); - m.PyAddObject("_C_API", c_api_object); - assert(c_api_object.REFCNT() == 2); - - // cerr << "Sizes:" - // << "\n\tGreenlet : " << sizeof(Greenlet) - // << "\n\tUserGreenlet : " << sizeof(UserGreenlet) - // << "\n\tMainGreenlet : " << sizeof(MainGreenlet) - // << "\n\tExceptionState : " << sizeof(greenlet::ExceptionState) - // << "\n\tPythonState : " << sizeof(greenlet::PythonState) - // << "\n\tStackState : " << sizeof(greenlet::StackState) - // << "\n\tSwitchingArgs : " << sizeof(greenlet::SwitchingArgs) - // << "\n\tOwnedObject : " << sizeof(greenlet::refs::OwnedObject) - // << "\n\tBorrowedObject : " << sizeof(greenlet::refs::BorrowedObject) - // << "\n\tPyGreenlet : " << sizeof(PyGreenlet) - // << endl; - -#ifdef Py_GIL_DISABLED - PyUnstable_Module_SetGIL(m.borrow(), Py_MOD_GIL_NOT_USED); -#endif - return m.borrow(); // But really it's the main reference. - } - catch (const LockInitError& e) { - PyErr_SetString(PyExc_MemoryError, e.what()); - return NULL; - } - catch (const PyErrOccurred&) { - return NULL; - } - -} - -extern "C" { - -PyMODINIT_FUNC -PyInit__greenlet(void) -{ - return greenlet_internal_mod_init(); -} - -}; // extern C - -#ifdef __clang__ -# pragma clang diagnostic pop -#elif defined(__GNUC__) -# pragma GCC diagnostic pop -#endif diff --git a/venv/Lib/site-packages/greenlet/greenlet.h b/venv/Lib/site-packages/greenlet/greenlet.h deleted file mode 100644 index d02a16e..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet.h +++ /dev/null @@ -1,164 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ - -/* Greenlet object interface */ - -#ifndef Py_GREENLETOBJECT_H -#define Py_GREENLETOBJECT_H - - -#include - -#ifdef __cplusplus -extern "C" { -#endif - -/* This is deprecated and undocumented. It does not change. */ -#define GREENLET_VERSION "1.0.0" - -#ifndef GREENLET_MODULE -#define implementation_ptr_t void* -#endif - -typedef struct _greenlet { - PyObject_HEAD - PyObject* weakreflist; - PyObject* dict; - implementation_ptr_t pimpl; -} PyGreenlet; - -#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) - - -/* C API functions */ - -/* Total number of symbols that are exported */ -#define PyGreenlet_API_pointers 12 - -#define PyGreenlet_Type_NUM 0 -#define PyExc_GreenletError_NUM 1 -#define PyExc_GreenletExit_NUM 2 - -#define PyGreenlet_New_NUM 3 -#define PyGreenlet_GetCurrent_NUM 4 -#define PyGreenlet_Throw_NUM 5 -#define PyGreenlet_Switch_NUM 6 -#define PyGreenlet_SetParent_NUM 7 - -#define PyGreenlet_MAIN_NUM 8 -#define PyGreenlet_STARTED_NUM 9 -#define PyGreenlet_ACTIVE_NUM 10 -#define PyGreenlet_GET_PARENT_NUM 11 - -#ifndef GREENLET_MODULE -/* This section is used by modules that uses the greenlet C API */ -static void** _PyGreenlet_API = NULL; - -# define PyGreenlet_Type \ - (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) - -# define PyExc_GreenletError \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) - -# define PyExc_GreenletExit \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) - -/* - * PyGreenlet_New(PyObject *args) - * - * greenlet.greenlet(run, parent=None) - */ -# define PyGreenlet_New \ - (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ - _PyGreenlet_API[PyGreenlet_New_NUM]) - -/* - * PyGreenlet_GetCurrent(void) - * - * greenlet.getcurrent() - */ -# define PyGreenlet_GetCurrent \ - (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) - -/* - * PyGreenlet_Throw( - * PyGreenlet *greenlet, - * PyObject *typ, - * PyObject *val, - * PyObject *tb) - * - * g.throw(...) - */ -# define PyGreenlet_Throw \ - (*(PyObject * (*)(PyGreenlet * self, \ - PyObject * typ, \ - PyObject * val, \ - PyObject * tb)) \ - _PyGreenlet_API[PyGreenlet_Throw_NUM]) - -/* - * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) - * - * g.switch(*args, **kwargs) - */ -# define PyGreenlet_Switch \ - (*(PyObject * \ - (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ - _PyGreenlet_API[PyGreenlet_Switch_NUM]) - -/* - * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) - * - * g.parent = new_parent - */ -# define PyGreenlet_SetParent \ - (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ - _PyGreenlet_API[PyGreenlet_SetParent_NUM]) - -/* - * PyGreenlet_GetParent(PyObject* greenlet) - * - * return greenlet.parent; - * - * This could return NULL even if there is no exception active. - * If it does not return NULL, you are responsible for decrementing the - * reference count. - */ -# define PyGreenlet_GetParent \ - (*(PyGreenlet* (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) - -/* - * deprecated, undocumented alias. - */ -# define PyGreenlet_GET_PARENT PyGreenlet_GetParent - -# define PyGreenlet_MAIN \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_MAIN_NUM]) - -# define PyGreenlet_STARTED \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_STARTED_NUM]) - -# define PyGreenlet_ACTIVE \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) - - - - -/* Macro that imports greenlet and initializes C API */ -/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we - keep the older definition to be sure older code that might have a copy of - the header still works. */ -# define PyGreenlet_Import() \ - { \ - _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ - } - -#endif /* GREENLET_MODULE */ - -#ifdef __cplusplus -} -#endif -#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/Lib/site-packages/greenlet/greenlet_allocator.hpp b/venv/Lib/site-packages/greenlet/greenlet_allocator.hpp deleted file mode 100644 index 1cd9223..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_allocator.hpp +++ /dev/null @@ -1,76 +0,0 @@ -#ifndef GREENLET_ALLOCATOR_HPP -#define GREENLET_ALLOCATOR_HPP - -#define PY_SSIZE_T_CLEAN -#include -#include -#include "greenlet_compiler_compat.hpp" -#include "greenlet_cpython_compat.hpp" - - -namespace greenlet -{ - // This allocator is stateless; all instances are identical. - // It can *ONLY* be used when we're sure we're holding the GIL - // (Python's allocators require the GIL). - template - struct PythonAllocator : public std::allocator { - - PythonAllocator(const PythonAllocator& UNUSED(other)) - : std::allocator() - { - } - - PythonAllocator(const std::allocator other) - : std::allocator(other) - {} - - template - PythonAllocator(const std::allocator& other) - : std::allocator(other) - { - } - - PythonAllocator() : std::allocator() {} - - T* allocate(size_t number_objects, const void* UNUSED(hint)=0) - { - void* p; - if (number_objects == 1) { -#ifdef Py_GIL_DISABLED - p = PyMem_Malloc(sizeof(T) * number_objects); -#else - p = PyObject_Malloc(sizeof(T)); -#endif - } - else { - p = PyMem_Malloc(sizeof(T) * number_objects); - } - return static_cast(p); - } - - void deallocate(T* t, size_t n) - { - void* p = t; - if (n == 1) { -#ifdef Py_GIL_DISABLED - PyMem_Free(p); -#else - PyObject_Free(p); -#endif - } - else { - PyMem_Free(p); - } - } - // This member is deprecated in C++17 and removed in C++20 - template< class U > - struct rebind { - typedef PythonAllocator other; - }; - - }; - -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/greenlet_compiler_compat.hpp b/venv/Lib/site-packages/greenlet/greenlet_compiler_compat.hpp deleted file mode 100644 index af24bd8..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_compiler_compat.hpp +++ /dev/null @@ -1,98 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef GREENLET_COMPILER_COMPAT_HPP -#define GREENLET_COMPILER_COMPAT_HPP - -/** - * Definitions to aid with compatibility with different compilers. - * - * .. caution:: Use extreme care with noexcept. - * Some compilers and runtimes, specifically gcc/libgcc/libstdc++ on - * Linux, implement stack unwinding by throwing an uncatchable - * exception, one that specifically does not appear to be an active - * exception to the rest of the runtime. If this happens while we're in a noexcept function, - * we have violated our dynamic exception contract, and so the runtime - * will call std::terminate(), which kills the process with the - * unhelpful message "terminate called without an active exception". - * - * This has happened in this scenario: A background thread is running - * a greenlet that has made a native call and released the GIL. - * Meanwhile, the main thread finishes and starts shutting down the - * interpreter. When the background thread is scheduled again and - * attempts to obtain the GIL, it notices that the interpreter is - * exiting and calls ``pthread_exit()``. This in turn starts to unwind - * the stack by throwing that exception. But we had the ``PyCall`` - * functions annotated as noexcept, so the runtime terminated us. - * - * #2 0x00007fab26fec2b7 in std::terminate() () from /lib/x86_64-linux-gnu/libstdc++.so.6 - * #3 0x00007fab26febb3c in __gxx_personality_v0 () from /lib/x86_64-linux-gnu/libstdc++.so.6 - * #4 0x00007fab26f34de6 in ?? () from /lib/x86_64-linux-gnu/libgcc_s.so.1 - * #6 0x00007fab276a34c6 in __GI___pthread_unwind at ./nptl/unwind.c:130 - * #7 0x00007fab2769bd3a in __do_cancel () at ../sysdeps/nptl/pthreadP.h:280 - * #8 __GI___pthread_exit (value=value@entry=0x0) at ./nptl/pthread_exit.c:36 - * #9 0x000000000052e567 in PyThread_exit_thread () at ../Python/thread_pthread.h:370 - * #10 0x00000000004d60b5 in take_gil at ../Python/ceval_gil.h:224 - * #11 0x00000000004d65f9 in PyEval_RestoreThread at ../Python/ceval.c:467 - * #12 0x000000000060cce3 in setipaddr at ../Modules/socketmodule.c:1203 - * #13 0x00000000006101cd in socket_gethostbyname - */ - -#include - -# define G_NO_COPIES_OF_CLS(Cls) private: \ - Cls(const Cls& other) = delete; \ - Cls& operator=(const Cls& other) = delete - -# define G_NO_ASSIGNMENT_OF_CLS(Cls) private: \ - Cls& operator=(const Cls& other) = delete - -# define G_NO_COPY_CONSTRUCTOR_OF_CLS(Cls) private: \ - Cls(const Cls& other) = delete; - - -// CAUTION: MSVC is stupidly picky: -// -// "The compiler ignores, without warning, any __declspec keywords -// placed after * or & and in front of the variable identifier in a -// declaration." -// (https://docs.microsoft.com/en-us/cpp/cpp/declspec?view=msvc-160) -// -// So pointer return types must be handled differently (because of the -// trailing *), or you get inscrutable compiler warnings like "error -// C2059: syntax error: ''" -// -// In C++ 11, there is a standard syntax for attributes, and -// GCC defines an attribute to use with this: [[gnu:noinline]]. -// In the future, this is expected to become standard. - -#if defined(__GNUC__) || defined(__clang__) -/* We used to check for GCC 4+ or 3.4+, but those compilers are - laughably out of date. Just assume they support it. */ -# define GREENLET_NOINLINE(name) __attribute__((noinline)) name -# define GREENLET_NOINLINE_P(rtype, name) rtype __attribute__((noinline)) name -# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) -#elif defined(_MSC_VER) -/* We used to check for && (_MSC_VER >= 1300) but that's also out of date. */ -# define GREENLET_NOINLINE(name) __declspec(noinline) name -# define GREENLET_NOINLINE_P(rtype, name) __declspec(noinline) rtype name -# define UNUSED(x) UNUSED_ ## x -#endif - -#if defined(_MSC_VER) -# define G_NOEXCEPT_WIN32 noexcept -#else -# define G_NOEXCEPT_WIN32 -#endif - -#if defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) -// 32-bit PPC/MacOSX. Only known to be tested on unreleased versions -// of macOS 10.6 using a macports build gcc 14. It appears that -// running C++ destructors of thread-local variables is broken. - -// See https://github.com/python-greenlet/greenlet/pull/419 -# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 1 -#else -# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 0 -#endif - - -#endif diff --git a/venv/Lib/site-packages/greenlet/greenlet_cpython_compat.hpp b/venv/Lib/site-packages/greenlet/greenlet_cpython_compat.hpp deleted file mode 100644 index a3b3850..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_cpython_compat.hpp +++ /dev/null @@ -1,150 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef GREENLET_CPYTHON_COMPAT_H -#define GREENLET_CPYTHON_COMPAT_H - -/** - * Helpers for compatibility with multiple versions of CPython. - */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" - - -#if PY_VERSION_HEX >= 0x30A00B1 -# define GREENLET_PY310 1 -#else -# define GREENLET_PY310 0 -#endif - -/* -Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member. -See https://github.com/python/cpython/pull/25276 -We have to save and restore this as well. - -Python 3.13 removed PyThreadState.cframe (GH-108035). -*/ -#if GREENLET_PY310 && PY_VERSION_HEX < 0x30D0000 -# define GREENLET_USE_CFRAME 1 -#else -# define GREENLET_USE_CFRAME 0 -#endif - - -#if PY_VERSION_HEX >= 0x30B00A4 -/* -Greenlet won't compile on anything older than Python 3.11 alpha 4 (see -https://bugs.python.org/issue46090). Summary of breaking internal changes: -- Python 3.11 alpha 1 changed how frame objects are represented internally. - - https://github.com/python/cpython/pull/30122 -- Python 3.11 alpha 3 changed how recursion limits are stored. - - https://github.com/python/cpython/pull/29524 -- Python 3.11 alpha 4 changed how exception state is stored. It also includes a - change to help greenlet save and restore the interpreter frame "data stack". - - https://github.com/python/cpython/pull/30122 - - https://github.com/python/cpython/pull/30234 -*/ -# define GREENLET_PY311 1 -#else -# define GREENLET_PY311 0 -#endif - - -#if PY_VERSION_HEX >= 0x30C0000 -# define GREENLET_PY312 1 -#else -# define GREENLET_PY312 0 -#endif - -#if PY_VERSION_HEX >= 0x30D0000 -# define GREENLET_PY313 1 -#else -# define GREENLET_PY313 0 -#endif - -#if PY_VERSION_HEX >= 0x30E0000 -# define GREENLET_PY314 1 -#else -# define GREENLET_PY314 0 -#endif - -#ifndef Py_SET_REFCNT -/* Py_REFCNT and Py_SIZE macros are converted to functions -https://bugs.python.org/issue39573 */ -# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) -#endif - -#ifdef _Py_DEC_REFTOTAL -# define GREENLET_Py_DEC_REFTOTAL _Py_DEC_REFTOTAL -#else -/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by: - https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924 - - The symbol we use to replace it was removed by at least 3.12. -*/ -# ifdef Py_REF_DEBUG -# if GREENLET_PY312 -# define GREENLET_Py_DEC_REFTOTAL -# else -# define GREENLET_Py_DEC_REFTOTAL _Py_RefTotal-- -# endif -# else -# define GREENLET_Py_DEC_REFTOTAL -# endif -#endif -// Define these flags like Cython does if we're on an old version. -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif - -#ifndef Py_TPFLAGS_HAVE_VERSION_TAG - #define Py_TPFLAGS_HAVE_VERSION_TAG 0 -#endif - -#define G_TPFLAGS_DEFAULT Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_VERSION_TAG | Py_TPFLAGS_CHECKTYPES | Py_TPFLAGS_HAVE_NEWBUFFER | Py_TPFLAGS_HAVE_GC - - -#if PY_VERSION_HEX < 0x03090000 -// The official version only became available in 3.9 -# define PyObject_GC_IsTracked(o) _PyObject_GC_IS_TRACKED(o) -#endif - - -// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 -#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) -static inline void PyThreadState_EnterTracing(PyThreadState *tstate) -{ - tstate->tracing++; -#if PY_VERSION_HEX >= 0x030A00A1 - tstate->cframe->use_tracing = 0; -#else - tstate->use_tracing = 0; -#endif -} -#endif - -// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 -#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) -static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) -{ - tstate->tracing--; - int use_tracing = (tstate->c_tracefunc != NULL - || tstate->c_profilefunc != NULL); -#if PY_VERSION_HEX >= 0x030A00A1 - tstate->cframe->use_tracing = use_tracing; -#else - tstate->use_tracing = use_tracing; -#endif -} -#endif - -#if !defined(Py_C_RECURSION_LIMIT) && defined(C_RECURSION_LIMIT) -# define Py_C_RECURSION_LIMIT C_RECURSION_LIMIT -#endif - -#endif /* GREENLET_CPYTHON_COMPAT_H */ diff --git a/venv/Lib/site-packages/greenlet/greenlet_exceptions.hpp b/venv/Lib/site-packages/greenlet/greenlet_exceptions.hpp deleted file mode 100644 index 617f07c..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_exceptions.hpp +++ /dev/null @@ -1,171 +0,0 @@ -#ifndef GREENLET_EXCEPTIONS_HPP -#define GREENLET_EXCEPTIONS_HPP - -#define PY_SSIZE_T_CLEAN -#include -#include -#include - -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -#endif - -namespace greenlet { - - class PyErrOccurred : public std::runtime_error - { - public: - - // CAUTION: In debug builds, may run arbitrary Python code. - static const PyErrOccurred - from_current() - { - assert(PyErr_Occurred()); -#ifndef NDEBUG - // This is not exception safe, and - // not necessarily safe in general (what if it switches?) - // But we only do this in debug mode, where we are in - // tight control of what exceptions are getting raised and - // can prevent those issues. - - // You can't call PyObject_Str with a pending exception. - PyObject* typ; - PyObject* val; - PyObject* tb; - - PyErr_Fetch(&typ, &val, &tb); - PyObject* typs = PyObject_Str(typ); - PyObject* vals = PyObject_Str(val ? val : typ); - const char* typ_msg = PyUnicode_AsUTF8(typs); - const char* val_msg = PyUnicode_AsUTF8(vals); - PyErr_Restore(typ, val, tb); - - std::string msg(typ_msg); - msg += ": "; - msg += val_msg; - PyErrOccurred ex(msg); - Py_XDECREF(typs); - Py_XDECREF(vals); - - return ex; -#else - return PyErrOccurred(); -#endif - } - - PyErrOccurred() : std::runtime_error("") - { - assert(PyErr_Occurred()); - } - - PyErrOccurred(const std::string& msg) : std::runtime_error(msg) - { - assert(PyErr_Occurred()); - } - - PyErrOccurred(PyObject* exc_kind, const char* const msg) - : std::runtime_error(msg) - { - PyErr_SetString(exc_kind, msg); - } - - PyErrOccurred(PyObject* exc_kind, const std::string msg) - : std::runtime_error(msg) - { - // This copies the c_str, so we don't have any lifetime - // issues to worry about. - PyErr_SetString(exc_kind, msg.c_str()); - } - - PyErrOccurred(PyObject* exc_kind, - const std::string msg, //This is the format - //string; that's not - //usually safe! - - PyObject* borrowed_obj_one, PyObject* borrowed_obj_two) - : std::runtime_error(msg) - { - - //This is designed specifically for the - //``check_switch_allowed`` function. - - // PyObject_Str and PyObject_Repr are safe to call with - // NULL pointers; they return the string "" in that - // case. - // This function always returns null. - PyErr_Format(exc_kind, - msg.c_str(), - borrowed_obj_one, borrowed_obj_two); - } - }; - - class TypeError : public PyErrOccurred - { - public: - TypeError(const char* const what) - : PyErrOccurred(PyExc_TypeError, what) - { - } - TypeError(const std::string what) - : PyErrOccurred(PyExc_TypeError, what) - { - } - }; - - class ValueError : public PyErrOccurred - { - public: - ValueError(const char* const what) - : PyErrOccurred(PyExc_ValueError, what) - { - } - }; - - class AttributeError : public PyErrOccurred - { - public: - AttributeError(const char* const what) - : PyErrOccurred(PyExc_AttributeError, what) - { - } - }; - - /** - * Calls `Py_FatalError` when constructed, so you can't actually - * throw this. It just makes static analysis easier. - */ - class PyFatalError : public std::runtime_error - { - public: - PyFatalError(const char* const msg) - : std::runtime_error(msg) - { - Py_FatalError(msg); - } - }; - - static inline PyObject* - Require(PyObject* p, const std::string& msg="") - { - if (!p) { - throw PyErrOccurred(msg); - } - return p; - }; - - static inline void - Require(const int retval) - { - if (retval < 0) { - throw PyErrOccurred(); - } - }; - - -}; -#ifdef __clang__ -# pragma clang diagnostic pop -#endif - -#endif diff --git a/venv/Lib/site-packages/greenlet/greenlet_internal.hpp b/venv/Lib/site-packages/greenlet/greenlet_internal.hpp deleted file mode 100644 index f2b15d5..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_internal.hpp +++ /dev/null @@ -1,107 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef GREENLET_INTERNAL_H -#define GREENLET_INTERNAL_H -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -#endif - -/** - * Implementation helpers. - * - * C++ templates and inline functions should go here. - */ -#define PY_SSIZE_T_CLEAN -#include "greenlet_compiler_compat.hpp" -#include "greenlet_cpython_compat.hpp" -#include "greenlet_exceptions.hpp" -#include "TGreenlet.hpp" -#include "greenlet_allocator.hpp" - -#include -#include - -#define GREENLET_MODULE -struct _greenlet; -typedef struct _greenlet PyGreenlet; -namespace greenlet { - - class ThreadState; - // We can't use the PythonAllocator for this, because we push to it - // from the thread state destructor, which doesn't have the GIL, - // and Python's allocators can only be called with the GIL. - typedef std::vector cleanup_queue_t; - -}; - - -#define implementation_ptr_t greenlet::Greenlet* - - -#include "greenlet.h" - -void -greenlet::refs::MainGreenletExactChecker(void *p) -{ - if (!p) { - return; - } - // We control the class of the main greenlet exactly. - if (Py_TYPE(p) != &PyGreenlet_Type) { - std::string err("MainGreenlet: Expected exactly a greenlet, not a "); - err += Py_TYPE(p)->tp_name; - throw greenlet::TypeError(err); - } - - // Greenlets from dead threads no longer respond to main() with a - // true value; so in that case we need to perform an additional - // check. - Greenlet* g = static_cast(p)->pimpl; - if (g->main()) { - return; - } - if (!dynamic_cast(g)) { - std::string err("MainGreenlet: Expected exactly a main greenlet, not a "); - err += Py_TYPE(p)->tp_name; - throw greenlet::TypeError(err); - } -} - - - -template -inline greenlet::Greenlet* greenlet::refs::_OwnedGreenlet::operator->() const noexcept -{ - return reinterpret_cast(this->p)->pimpl; -} - -template -inline greenlet::Greenlet* greenlet::refs::_BorrowedGreenlet::operator->() const noexcept -{ - return reinterpret_cast(this->p)->pimpl; -} - -#include -#include - - -extern PyTypeObject PyGreenlet_Type; - - - -/** - * Forward declarations needed in multiple files. - */ -static PyObject* green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs); - - -#ifdef __clang__ -# pragma clang diagnostic pop -#endif - - -#endif - -// Local Variables: -// flycheck-clang-include-path: ("../../include" "/opt/local/Library/Frameworks/Python.framework/Versions/3.10/include/python3.10") -// End: diff --git a/venv/Lib/site-packages/greenlet/greenlet_msvc_compat.hpp b/venv/Lib/site-packages/greenlet/greenlet_msvc_compat.hpp deleted file mode 100644 index c00245b..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_msvc_compat.hpp +++ /dev/null @@ -1,91 +0,0 @@ -#ifndef GREENLET_MSVC_COMPAT_HPP -#define GREENLET_MSVC_COMPAT_HPP -/* - * Support for MSVC on Windows. - * - * Beginning with Python 3.14, some of the internal - * include files we need are not compatible with MSVC - * in C++ mode: - * - * internal\pycore_stackref.h(253): error C4576: a parenthesized type - * followed by an initializer list is a non-standard explicit type conversion syntax - * - * This file is included from ``internal/pycore_interpframe.h``, which - * we need for the ``_PyFrame_IsIncomplete`` API. - * - * Unfortunately, that API is a ``static inline`` function, as are a - * bunch of the functions it calls. The only solution seems to be to - * copy those definitions and the supporting inline functions here. - * - * Now, this makes us VERY fragile to changes in those functions. Because - * they're internal and static, the CPython devs might feel free to change - * them in even minor versions, meaning that we could runtime link and load, - * but still crash. We have that problem on all platforms though. It's just worse - * here because we have to keep copying the updated definitions. - */ -#include -#include "greenlet_cpython_compat.hpp" - -// This file is only included on 3.14+ - -extern "C" { - -// pycore_code.h ---------------- -#define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive) -// End pycore_code.h ---------- - -// pycore_interpframe.h ---------- -#if !defined(Py_GIL_DISABLED) && defined(Py_STACKREF_DEBUG) - -#define Py_TAG_BITS 0 -#else -#define Py_TAG_BITS ((uintptr_t)1) -#define Py_TAG_DEFERRED (1) -#endif - - -static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED}; -#define PyStackRef_IsNull(stackref) ((stackref).bits == PyStackRef_NULL.bits) - -static inline PyObject * -PyStackRef_AsPyObjectBorrow(_PyStackRef stackref) -{ - PyObject *cleared = ((PyObject *)((stackref).bits & (~Py_TAG_BITS))); - return cleared; -} - -static inline PyCodeObject *_PyFrame_GetCode(_PyInterpreterFrame *f) { - assert(!PyStackRef_IsNull(f->f_executable)); - PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); - assert(PyCode_Check(executable)); - return (PyCodeObject *)executable; -} - - -static inline _Py_CODEUNIT * -_PyFrame_GetBytecode(_PyInterpreterFrame *f) -{ -#ifdef Py_GIL_DISABLED - PyCodeObject *co = _PyFrame_GetCode(f); - _PyCodeArray *tlbc = _PyCode_GetTLBCArray(co); - assert(f->tlbc_index >= 0 && f->tlbc_index < tlbc->size); - return (_Py_CODEUNIT *)tlbc->entries[f->tlbc_index]; -#else - return _PyCode_CODE(_PyFrame_GetCode(f)); -#endif -} - -static inline bool //_Py_NO_SANITIZE_THREAD -_PyFrame_IsIncomplete(_PyInterpreterFrame *frame) -{ - if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { - return true; - } - return frame->owner != FRAME_OWNED_BY_GENERATOR && - frame->instr_ptr < _PyFrame_GetBytecode(frame) + - _PyFrame_GetCode(frame)->_co_firsttraceable; -} -// pycore_interpframe.h ---------- - -} -#endif // GREENLET_MSVC_COMPAT_HPP diff --git a/venv/Lib/site-packages/greenlet/greenlet_refs.hpp b/venv/Lib/site-packages/greenlet/greenlet_refs.hpp deleted file mode 100644 index b7e5e3f..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_refs.hpp +++ /dev/null @@ -1,1118 +0,0 @@ -#ifndef GREENLET_REFS_HPP -#define GREENLET_REFS_HPP - -#define PY_SSIZE_T_CLEAN -#include - -#include - -//#include "greenlet_internal.hpp" -#include "greenlet_compiler_compat.hpp" -#include "greenlet_cpython_compat.hpp" -#include "greenlet_exceptions.hpp" - -struct _greenlet; -struct _PyMainGreenlet; - -typedef struct _greenlet PyGreenlet; -extern PyTypeObject PyGreenlet_Type; - - -#ifdef GREENLET_USE_STDIO -#include -using std::cerr; -using std::endl; -#endif - -namespace greenlet -{ - class Greenlet; - - namespace refs - { - // Type checkers throw a TypeError if the argument is not - // null, and isn't of the required Python type. - // (We can't use most of the defined type checkers - // like PyList_Check, etc, directly, because they are - // implemented as macros.) - typedef void (*TypeChecker)(void*); - - void - NoOpChecker(void*) - { - return; - } - - void - GreenletChecker(void *p) - { - if (!p) { - return; - } - - PyTypeObject* typ = Py_TYPE(p); - // fast, common path. (PyObject_TypeCheck is a macro or - // static inline function, and it also does a - // direct comparison of the type pointers, but its fast - // path only handles one type) - if (typ == &PyGreenlet_Type) { - return; - } - - if (!PyObject_TypeCheck(p, &PyGreenlet_Type)) { - std::string err("GreenletChecker: Expected any type of greenlet, not "); - err += Py_TYPE(p)->tp_name; - throw TypeError(err); - } - } - - void - MainGreenletExactChecker(void *p); - - template - class PyObjectPointer; - - template - class OwnedReference; - - - template - class BorrowedReference; - - typedef BorrowedReference BorrowedObject; - typedef OwnedReference OwnedObject; - - class ImmortalObject; - class ImmortalString; - - template - class _OwnedGreenlet; - - typedef _OwnedGreenlet OwnedGreenlet; - typedef _OwnedGreenlet OwnedMainGreenlet; - - template - class _BorrowedGreenlet; - - typedef _BorrowedGreenlet BorrowedGreenlet; - - void - ContextExactChecker(void *p) - { - if (!p) { - return; - } - if (!PyContext_CheckExact(p)) { - throw TypeError( - "greenlet context must be a contextvars.Context or None" - ); - } - } - - typedef OwnedReference OwnedContext; - } -} - -namespace greenlet { - - - namespace refs { - // A set of classes to make reference counting rules in python - // code explicit. - // - // Rules of use: - // (1) Functions returning a new reference that the caller of the - // function is expected to dispose of should return a - // ``OwnedObject`` object. This object automatically releases its - // reference when it goes out of scope. It works like a ``std::shared_ptr`` - // and can be copied or used as a function parameter (but don't do - // that). Note that constructing a ``OwnedObject`` from a - // PyObject* steals the reference. - // (2) Parameters to functions should be either a - // ``OwnedObject&``, or, more generally, a ``PyObjectPointer&``. - // If the function needs to create its own new reference, it can - // do so by copying to a local ``OwnedObject``. - // (3) Functions returning an existing pointer that is NOT - // incref'd, and which the caller MUST NOT decref, - // should return a ``BorrowedObject``. - - // XXX: The following two paragraphs do not hold for all platforms. - // Notably, 32-bit PPC Linux passes structs by reference, not by - // value, so this actually doesn't work. (Although that's the only - // platform that doesn't work on.) DO NOT ATTEMPT IT. The - // unfortunate consequence of that is that the slots which we - // *know* are already type safe will wind up calling the type - // checker function (when we had the slots accepting - // BorrowedGreenlet, this was bypassed), so this slows us down. - // TODO: Optimize this again. - - // For a class with a single pointer member, whose constructor - // does nothing but copy a pointer parameter into the member, and - // which can then be converted back to the pointer type, compilers - // generate code that's the same as just passing the pointer. - // That is, func(BorrowedObject x) called like ``PyObject* p = - // ...; f(p)`` has 0 overhead. Similarly, they "unpack" to the - // pointer type with 0 overhead. - // - // If there are no virtual functions, no complex inheritance (maybe?) and - // no destructor, these can be directly used as parameters in - // Python callbacks like tp_init: the layout is the same as a - // single pointer. Only subclasses with trivial constructors that - // do nothing but set the single pointer member are safe to use - // that way. - - - // This is the base class for things that can be done with a - // PyObject pointer. It assumes nothing about memory management. - // NOTE: Nothing is virtual, so subclasses shouldn't add new - // storage fields or try to override these methods. - template - class PyObjectPointer - { - public: - typedef T PyType; - protected: - T* p; - public: - PyObjectPointer(T* it=nullptr) : p(it) - { - TC(p); - } - - // We don't allow automatic casting to PyObject* at this - // level, because then we could be passed to Py_DECREF/INCREF, - // but we want nothing to do with memory management. If you - // know better, then you can use the get() method, like on a - // std::shared_ptr. Except we name it borrow() to clarify that - // if this is a reference-tracked object, the pointer you get - // back will go away when the object does. - // TODO: This should probably not exist here, but be moved - // down to relevant sub-types. - - T* borrow() const noexcept - { - return this->p; - } - - PyObject* borrow_o() const noexcept - { - return reinterpret_cast(this->p); - } - - T* operator->() const noexcept - { - return this->p; - } - - bool is_None() const noexcept - { - return this->p == Py_None; - } - - PyObject* acquire_or_None() const noexcept - { - PyObject* result = this->p ? reinterpret_cast(this->p) : Py_None; - Py_INCREF(result); - return result; - } - - explicit operator bool() const noexcept - { - return this->p != nullptr; - } - - bool operator!() const noexcept - { - return this->p == nullptr; - } - - Py_ssize_t REFCNT() const noexcept - { - return p ? Py_REFCNT(p) : -42; - } - - PyTypeObject* TYPE() const noexcept - { - return p ? Py_TYPE(p) : nullptr; - } - - inline OwnedObject PyStr() const noexcept; - inline const std::string as_str() const noexcept; - inline OwnedObject PyGetAttr(const ImmortalObject& name) const noexcept; - inline OwnedObject PyRequireAttr(const char* const name) const; - inline OwnedObject PyRequireAttr(const ImmortalString& name) const; - inline OwnedObject PyCall(const BorrowedObject& arg) const; - inline OwnedObject PyCall(PyGreenlet* arg) const ; - inline OwnedObject PyCall(PyObject* arg) const ; - // PyObject_Call(this, args, kwargs); - inline OwnedObject PyCall(const BorrowedObject args, - const BorrowedObject kwargs) const; - inline OwnedObject PyCall(const OwnedObject& args, - const OwnedObject& kwargs) const; - - protected: - void _set_raw_pointer(void* t) - { - TC(t); - p = reinterpret_cast(t); - } - void* _get_raw_pointer() const - { - return p; - } - }; - -#ifdef GREENLET_USE_STDIO - template - std::ostream& operator<<(std::ostream& os, const PyObjectPointer& s) - { - const std::type_info& t = typeid(s); - os << t.name() - << "(addr=" << s.borrow() - << ", refcnt=" << s.REFCNT() - << ", value=" << s.as_str() - << ")"; - - return os; - } -#endif - - template - inline bool operator==(const PyObjectPointer& lhs, const PyObject* const rhs) noexcept - { - return static_cast(lhs.borrow_o()) == static_cast(rhs); - } - - template - inline bool operator==(const PyObjectPointer& lhs, const PyObjectPointer& rhs) noexcept - { - return lhs.borrow_o() == rhs.borrow_o(); - } - - template - inline bool operator!=(const PyObjectPointer& lhs, - const PyObjectPointer& rhs) noexcept - { - return lhs.borrow_o() != rhs.borrow_o(); - } - - template - class OwnedReference : public PyObjectPointer - { - private: - friend class OwnedList; - - protected: - explicit OwnedReference(T* it) : PyObjectPointer(it) - { - } - - public: - - // Constructors - - static OwnedReference consuming(PyObject* p) - { - return OwnedReference(reinterpret_cast(p)); - } - - static OwnedReference owning(T* p) - { - OwnedReference result(p); - Py_XINCREF(result.p); - return result; - } - - OwnedReference() : PyObjectPointer(nullptr) - {} - - explicit OwnedReference(const PyObjectPointer<>& other) - : PyObjectPointer(nullptr) - { - T* op = other.borrow(); - TC(op); - this->p = other.borrow(); - Py_XINCREF(this->p); - } - - // It would be good to make use of the C++11 distinction - // between move and copy operations, e.g., constructing from a - // pointer should be a move operation. - // In the common case of ``OwnedObject x = Py_SomeFunction()``, - // the call to the copy constructor will be elided completely. - OwnedReference(const OwnedReference& other) - : PyObjectPointer(other.p) - { - Py_XINCREF(this->p); - } - - static OwnedReference None() - { - Py_INCREF(Py_None); - return OwnedReference(Py_None); - } - - // We can assign from exactly our type without any extra checking - OwnedReference& operator=(const OwnedReference& other) - { - Py_XINCREF(other.p); - const T* tmp = this->p; - this->p = other.p; - Py_XDECREF(tmp); - return *this; - } - - OwnedReference& operator=(const BorrowedReference other) - { - return this->operator=(other.borrow()); - } - - OwnedReference& operator=(T* const other) - { - TC(other); - Py_XINCREF(other); - T* tmp = this->p; - this->p = other; - Py_XDECREF(tmp); - return *this; - } - - // We can assign from an arbitrary reference type - // if it passes our check. - template - OwnedReference& operator=(const OwnedReference& other) - { - X* op = other.borrow(); - TC(op); - return this->operator=(reinterpret_cast(op)); - } - - inline void steal(T* other) - { - assert(this->p == nullptr); - TC(other); - this->p = other; - } - - T* relinquish_ownership() - { - T* result = this->p; - this->p = nullptr; - return result; - } - - T* acquire() const - { - // Return a new reference. - // TODO: This may go away when we have reference objects - // throughout the code. - Py_XINCREF(this->p); - return this->p; - } - - // Nothing else declares a destructor, we're the leaf, so we - // should be able to get away without virtual. - ~OwnedReference() - { - Py_CLEAR(this->p); - } - - void CLEAR() - { - Py_CLEAR(this->p); - assert(this->p == nullptr); - } - }; - - static inline - void operator<<=(PyObject*& target, OwnedObject& o) - { - target = o.relinquish_ownership(); - } - - - class NewReference : public OwnedObject - { - private: - G_NO_COPIES_OF_CLS(NewReference); - public: - // Consumes the reference. Only use this - // for API return values. - NewReference(PyObject* it) : OwnedObject(it) - { - } - }; - - class NewDictReference : public NewReference - { - private: - G_NO_COPIES_OF_CLS(NewDictReference); - public: - NewDictReference() : NewReference(PyDict_New()) - { - if (!this->p) { - throw PyErrOccurred(); - } - } - - void SetItem(const char* const key, PyObject* value) - { - Require(PyDict_SetItemString(this->p, key, value)); - } - - void SetItem(const PyObjectPointer<>& key, PyObject* value) - { - Require(PyDict_SetItem(this->p, key.borrow_o(), value)); - } - }; - - template - class _OwnedGreenlet: public OwnedReference - { - private: - protected: - _OwnedGreenlet(T* it) : OwnedReference(it) - {} - - public: - _OwnedGreenlet() : OwnedReference() - {} - - _OwnedGreenlet(const _OwnedGreenlet& other) : OwnedReference(other) - { - } - _OwnedGreenlet(OwnedMainGreenlet& other) : - OwnedReference(reinterpret_cast(other.acquire())) - { - } - _OwnedGreenlet(const BorrowedGreenlet& other); - // Steals a reference. - static _OwnedGreenlet consuming(PyGreenlet* it) - { - return _OwnedGreenlet(reinterpret_cast(it)); - } - - inline _OwnedGreenlet& operator=(const OwnedGreenlet& other) - { - return this->operator=(other.borrow()); - } - - inline _OwnedGreenlet& operator=(const BorrowedGreenlet& other); - - _OwnedGreenlet& operator=(const OwnedMainGreenlet& other) - { - PyGreenlet* owned = other.acquire(); - Py_XDECREF(this->p); - this->p = reinterpret_cast(owned); - return *this; - } - - _OwnedGreenlet& operator=(T* const other) - { - OwnedReference::operator=(other); - return *this; - } - - T* relinquish_ownership() - { - T* result = this->p; - this->p = nullptr; - return result; - } - - PyObject* relinquish_ownership_o() - { - return reinterpret_cast(relinquish_ownership()); - } - - inline Greenlet* operator->() const noexcept; - inline operator Greenlet*() const noexcept; - }; - - template - class BorrowedReference : public PyObjectPointer - { - public: - // Allow implicit creation from PyObject* pointers as we - // transition to using these classes. Also allow automatic - // conversion to PyObject* for passing to C API calls and even - // for Py_INCREF/DECREF, because we ourselves do no memory management. - BorrowedReference(T* it) : PyObjectPointer(it) - {} - - BorrowedReference(const PyObjectPointer& ref) : PyObjectPointer(ref.borrow()) - {} - - BorrowedReference() : PyObjectPointer(nullptr) - {} - - operator T*() const - { - return this->p; - } - }; - - typedef BorrowedReference BorrowedObject; - //typedef BorrowedReference BorrowedGreenlet; - - template - class _BorrowedGreenlet : public BorrowedReference - { - public: - _BorrowedGreenlet() : - BorrowedReference(nullptr) - {} - - _BorrowedGreenlet(T* it) : - BorrowedReference(it) - {} - - _BorrowedGreenlet(const BorrowedObject& it); - - _BorrowedGreenlet(const OwnedGreenlet& it) : - BorrowedReference(it.borrow()) - {} - - _BorrowedGreenlet& operator=(const BorrowedObject& other); - - // We get one of these for PyGreenlet, but one for PyObject - // is handy as well - operator PyObject*() const - { - return reinterpret_cast(this->p); - } - Greenlet* operator->() const noexcept; - operator Greenlet*() const noexcept; - }; - - typedef _BorrowedGreenlet BorrowedGreenlet; - - template - _OwnedGreenlet::_OwnedGreenlet(const BorrowedGreenlet& other) - : OwnedReference(reinterpret_cast(other.borrow())) - { - Py_XINCREF(this->p); - } - - - class BorrowedMainGreenlet - : public _BorrowedGreenlet - { - public: - BorrowedMainGreenlet(const OwnedMainGreenlet& it) : - _BorrowedGreenlet(it.borrow()) - {} - BorrowedMainGreenlet(PyGreenlet* it=nullptr) - : _BorrowedGreenlet(it) - {} - }; - - template - _OwnedGreenlet& _OwnedGreenlet::operator=(const BorrowedGreenlet& other) - { - return this->operator=(other.borrow()); - } - - - class ImmortalObject : public PyObjectPointer<> - { - private: - G_NO_ASSIGNMENT_OF_CLS(ImmortalObject); - public: - explicit ImmortalObject(PyObject* it) : PyObjectPointer<>(it) - { - } - - ImmortalObject(const ImmortalObject& other) - : PyObjectPointer<>(other.p) - { - - } - - /** - * Become the new owner of the object. Does not change the - * reference count. - */ - ImmortalObject& operator=(PyObject* it) - { - assert(this->p == nullptr); - this->p = it; - return *this; - } - - static ImmortalObject consuming(PyObject* it) - { - return ImmortalObject(it); - } - - inline operator PyObject*() const - { - return this->p; - } - }; - - class ImmortalString : public ImmortalObject - { - private: - G_NO_COPIES_OF_CLS(ImmortalString); - const char* str; - public: - ImmortalString(const char* const str) : - ImmortalObject(str ? Require(PyUnicode_InternFromString(str)) : nullptr) - { - this->str = str; - } - - inline ImmortalString& operator=(const char* const str) - { - if (!this->p) { - this->p = Require(PyUnicode_InternFromString(str)); - this->str = str; - } - else { - assert(this->str == str); - } - return *this; - } - - inline operator std::string() const - { - return this->str; - } - - }; - - class ImmortalEventName : public ImmortalString - { - private: - G_NO_COPIES_OF_CLS(ImmortalEventName); - public: - ImmortalEventName(const char* const str) : ImmortalString(str) - {} - }; - - class ImmortalException : public ImmortalObject - { - private: - G_NO_COPIES_OF_CLS(ImmortalException); - public: - ImmortalException(const char* const name, PyObject* base=nullptr) : - ImmortalObject(name - // Python 2.7 isn't const correct - ? Require(PyErr_NewException((char*)name, base, nullptr)) - : nullptr) - {} - - inline bool PyExceptionMatches() const - { - return PyErr_ExceptionMatches(this->p) > 0; - } - - }; - - template - inline OwnedObject PyObjectPointer::PyStr() const noexcept - { - if (!this->p) { - return OwnedObject(); - } - return OwnedObject::consuming(PyObject_Str(reinterpret_cast(this->p))); - } - - template - inline const std::string PyObjectPointer::as_str() const noexcept - { - // NOTE: This is not Python exception safe. - if (this->p) { - // The Python APIs return a cached char* value that's only valid - // as long as the original object stays around, and we're - // about to (probably) toss it. Hence the copy to std::string. - OwnedObject py_str = this->PyStr(); - if (!py_str) { - return "(nil)"; - } - return PyUnicode_AsUTF8(py_str.borrow()); - } - return "(nil)"; - } - - template - inline OwnedObject PyObjectPointer::PyGetAttr(const ImmortalObject& name) const noexcept - { - assert(this->p); - return OwnedObject::consuming(PyObject_GetAttr(reinterpret_cast(this->p), name)); - } - - template - inline OwnedObject PyObjectPointer::PyRequireAttr(const char* const name) const - { - assert(this->p); - return OwnedObject::consuming(Require(PyObject_GetAttrString(this->p, name), name)); - } - - template - inline OwnedObject PyObjectPointer::PyRequireAttr(const ImmortalString& name) const - { - assert(this->p); - return OwnedObject::consuming(Require( - PyObject_GetAttr( - reinterpret_cast(this->p), - name - ), - name - )); - } - - template - inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject& arg) const - { - return this->PyCall(arg.borrow()); - } - - template - inline OwnedObject PyObjectPointer::PyCall(PyGreenlet* arg) const - { - return this->PyCall(reinterpret_cast(arg)); - } - - template - inline OwnedObject PyObjectPointer::PyCall(PyObject* arg) const - { - assert(this->p); - return OwnedObject::consuming(PyObject_CallFunctionObjArgs(this->p, arg, NULL)); - } - - template - inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject args, - const BorrowedObject kwargs) const - { - assert(this->p); - return OwnedObject::consuming(PyObject_Call(this->p, args, kwargs)); - } - - template - inline OwnedObject PyObjectPointer::PyCall(const OwnedObject& args, - const OwnedObject& kwargs) const - { - assert(this->p); - return OwnedObject::consuming(PyObject_Call(this->p, args.borrow(), kwargs.borrow())); - } - - inline void - ListChecker(void * p) - { - if (!p) { - return; - } - if (!PyList_Check(p)) { - throw TypeError("Expected a list"); - } - } - - class OwnedList : public OwnedReference - { - private: - G_NO_ASSIGNMENT_OF_CLS(OwnedList); - public: - // TODO: Would like to use move. - explicit OwnedList(const OwnedObject& other) - : OwnedReference(other) - { - } - - OwnedList& operator=(const OwnedObject& other) - { - if (other && PyList_Check(other.p)) { - // Valid list. Own a new reference to it, discard the - // reference to what we did own. - PyObject* new_ptr = other.p; - Py_INCREF(new_ptr); - Py_XDECREF(this->p); - this->p = new_ptr; - } - else { - // Either the other object was NULL (an error) or it - // wasn't a list. Either way, we're now invalidated. - Py_XDECREF(this->p); - this->p = nullptr; - } - return *this; - } - - inline bool empty() const - { - return PyList_GET_SIZE(p) == 0; - } - - inline Py_ssize_t size() const - { - return PyList_GET_SIZE(p); - } - - inline BorrowedObject at(const Py_ssize_t index) const - { - return PyList_GET_ITEM(p, index); - } - - inline void clear() - { - PyList_SetSlice(p, 0, PyList_GET_SIZE(p), NULL); - } - }; - - // Use this to represent the module object used at module init - // time. - // This could either be a borrowed (Py2) or new (Py3) reference; - // either way, we don't want to do any memory management - // on it here, Python itself will handle that. - // XXX: Actually, that's not quite right. On Python 3, if an - // exception occurs before we return to the interpreter, this will - // leak; but all previous versions also had that problem. - class CreatedModule : public PyObjectPointer<> - { - private: - G_NO_COPIES_OF_CLS(CreatedModule); - public: - CreatedModule(PyModuleDef& mod_def) : PyObjectPointer<>( - Require(PyModule_Create(&mod_def))) - { - } - - // PyAddObject(): Add a reference to the object to the module. - // On return, the reference count of the object is unchanged. - // - // The docs warn that PyModule_AddObject only steals the - // reference on success, so if it fails after we've incref'd - // or allocated, we're responsible for the decref. - void PyAddObject(const char* name, const long new_bool) - { - OwnedObject p = OwnedObject::consuming(Require(PyBool_FromLong(new_bool))); - this->PyAddObject(name, p); - } - - void PyAddObject(const char* name, const OwnedObject& new_object) - { - // The caller already owns a reference they will decref - // when their variable goes out of scope, we still need to - // incref/decref. - this->PyAddObject(name, new_object.borrow()); - } - - void PyAddObject(const char* name, const ImmortalObject& new_object) - { - this->PyAddObject(name, new_object.borrow()); - } - - void PyAddObject(const char* name, PyTypeObject& type) - { - this->PyAddObject(name, reinterpret_cast(&type)); - } - - void PyAddObject(const char* name, PyObject* new_object) - { - Py_INCREF(new_object); - try { - Require(PyModule_AddObject(this->p, name, new_object)); - } - catch (const PyErrOccurred&) { - Py_DECREF(p); - throw; - } - } - }; - - class PyErrFetchParam : public PyObjectPointer<> - { - // Not an owned object, because we can't be initialized with - // one, and we only sometimes acquire ownership. - private: - G_NO_COPIES_OF_CLS(PyErrFetchParam); - public: - // To allow declaring these and passing them to - // PyErr_Fetch we implement the empty constructor, - // and the address operator. - PyErrFetchParam() : PyObjectPointer<>(nullptr) - { - } - - PyObject** operator&() - { - return &this->p; - } - - // This allows us to pass one directly without the &, - // BUT it has higher precedence than the bool operator - // if it's not explicit. - operator PyObject**() - { - return &this->p; - } - - // We don't want to be able to pass these to Py_DECREF and - // such so we don't have the implicit PyObject* conversion. - - inline PyObject* relinquish_ownership() - { - PyObject* result = this->p; - this->p = nullptr; - return result; - } - - ~PyErrFetchParam() - { - Py_XDECREF(p); - } - }; - - class OwnedErrPiece : public OwnedObject - { - private: - - public: - // Unlike OwnedObject, this increments the refcount. - OwnedErrPiece(PyObject* p=nullptr) : OwnedObject(p) - { - this->acquire(); - } - - PyObject** operator&() - { - return &this->p; - } - - inline operator PyObject*() const - { - return this->p; - } - - operator PyTypeObject*() const - { - return reinterpret_cast(this->p); - } - }; - - class PyErrPieces - { - private: - OwnedErrPiece type; - OwnedErrPiece instance; - OwnedErrPiece traceback; - bool restored; - public: - // Takes new references; if we're destroyed before - // restoring the error, we drop the references. - PyErrPieces(PyObject* t, PyObject* v, PyObject* tb) : - type(t), - instance(v), - traceback(tb), - restored(0) - { - this->normalize(); - } - - PyErrPieces() : - restored(0) - { - // PyErr_Fetch transfers ownership to us, so - // we don't actually need to INCREF; but we *do* - // need to DECREF if we're not restored. - PyErrFetchParam t, v, tb; - PyErr_Fetch(&t, &v, &tb); - type.steal(t.relinquish_ownership()); - instance.steal(v.relinquish_ownership()); - traceback.steal(tb.relinquish_ownership()); - } - - void PyErrRestore() - { - // can only do this once - assert(!this->restored); - this->restored = true; - PyErr_Restore( - this->type.relinquish_ownership(), - this->instance.relinquish_ownership(), - this->traceback.relinquish_ownership()); - assert(!this->type && !this->instance && !this->traceback); - } - - private: - void normalize() - { - // First, check the traceback argument, replacing None, - // with NULL - if (traceback.is_None()) { - traceback = nullptr; - } - - if (traceback && !PyTraceBack_Check(traceback.borrow())) { - throw PyErrOccurred(PyExc_TypeError, - "throw() third argument must be a traceback object"); - } - - if (PyExceptionClass_Check(type)) { - // If we just had a type, we'll now have a type and - // instance. - // The type's refcount will have gone up by one - // because of the instance and the instance will have - // a refcount of one. Either way, we owned, and still - // do own, exactly one reference. - PyErr_NormalizeException(&type, &instance, &traceback); - - } - else if (PyExceptionInstance_Check(type)) { - /* Raising an instance --- usually that means an - object that is a subclass of BaseException, but on - Python 2, that can also mean an arbitrary old-style - object. The value should be a dummy. */ - if (instance && !instance.is_None()) { - throw PyErrOccurred( - PyExc_TypeError, - "instance exception may not have a separate value"); - } - /* Normalize to raise , */ - this->instance = this->type; - this->type = PyExceptionInstance_Class(instance.borrow()); - - /* - It would be tempting to do this: - - Py_ssize_t type_count = Py_REFCNT(Py_TYPE(instance.borrow())); - this->type = PyExceptionInstance_Class(instance.borrow()); - assert(this->type.REFCNT() == type_count + 1); - - But that doesn't work on Python 2 in the case of - old-style instances: The result of Py_TYPE is going to - be the global shared that all - old-style classes have, while the return of Instance_Class() - will be the Python-level class object. The two are unrelated. - */ - } - else { - /* Not something you can raise. throw() fails. */ - PyErr_Format(PyExc_TypeError, - "exceptions must be classes, or instances, not %s", - Py_TYPE(type.borrow())->tp_name); - throw PyErrOccurred(); - } - } - }; - - // PyArg_Parse's O argument returns a borrowed reference. - class PyArgParseParam : public BorrowedObject - { - private: - G_NO_COPIES_OF_CLS(PyArgParseParam); - public: - explicit PyArgParseParam(PyObject* p=nullptr) : BorrowedObject(p) - { - } - - inline PyObject** operator&() - { - return &this->p; - } - }; - -};}; - -#endif diff --git a/venv/Lib/site-packages/greenlet/greenlet_slp_switch.hpp b/venv/Lib/site-packages/greenlet/greenlet_slp_switch.hpp deleted file mode 100644 index bdffcca..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_slp_switch.hpp +++ /dev/null @@ -1,103 +0,0 @@ -#ifndef GREENLET_SLP_SWITCH_HPP -#define GREENLET_SLP_SWITCH_HPP - -#include "greenlet_compiler_compat.hpp" -#include "greenlet_refs.hpp" - -/* - * the following macros are spliced into the OS/compiler - * specific code, in order to simplify maintenance. - */ -// We can save about 10% of the time it takes to switch greenlets if -// we thread the thread state through the slp_save_state() and the -// following slp_restore_state() calls from -// slp_switch()->g_switchstack() (which already needs to access it). -// -// However: -// -// that requires changing the prototypes and implementations of the -// switching functions. If we just change the prototype of -// slp_switch() to accept the argument and update the macros, without -// changing the implementation of slp_switch(), we get crashes on -// 64-bit Linux and 32-bit x86 (for reasons that aren't 100% clear); -// on the other hand, 64-bit macOS seems to be fine. Also, 64-bit -// windows is an issue because slp_switch is written fully in assembly -// and currently ignores its argument so some code would have to be -// adjusted there to pass the argument on to the -// ``slp_save_state_asm()`` function (but interestingly, because of -// the calling convention, the extra argument is just ignored and -// things function fine, albeit slower, if we just modify -// ``slp_save_state_asm`()` to fetch the pointer to pass to the -// macro.) -// -// Our compromise is to use a *glabal*, untracked, weak, pointer -// to the necessary thread state during the process of switching only. -// This is safe because we're protected by the GIL, and if we're -// running this code, the thread isn't exiting. This also nets us a -// 10-12% speed improvement. - -#if Py_GIL_DISABLED -thread_local greenlet::Greenlet* switching_thread_state = nullptr; -#else -static greenlet::Greenlet* volatile switching_thread_state = nullptr; -#endif - - -extern "C" { -static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref); -static void GREENLET_NOINLINE(slp_restore_state_trampoline)(); -} - - -#define SLP_SAVE_STATE(stackref, stsizediff) \ -do { \ - assert(switching_thread_state); \ - stackref += STACK_MAGIC; \ - if (slp_save_state_trampoline((char*)stackref)) \ - return -1; \ - if (!switching_thread_state->active()) \ - return 1; \ - stsizediff = switching_thread_state->stack_start() - (char*)stackref; \ -} while (0) - -#define SLP_RESTORE_STATE() slp_restore_state_trampoline() - -#define SLP_EVAL -extern "C" { -#define slp_switch GREENLET_NOINLINE(slp_switch) -#include "slp_platformselect.h" -} -#undef slp_switch - -#ifndef STACK_MAGIC -# error \ - "greenlet needs to be ported to this platform, or taught how to detect your compiler properly." -#endif /* !STACK_MAGIC */ - - - -#ifdef EXTERNAL_ASM -/* CCP addition: Make these functions, to be called from assembler. - * The token include file for the given platform should enable the - * EXTERNAL_ASM define so that this is included. - */ -extern "C" { -intptr_t -slp_save_state_asm(intptr_t* ref) -{ - intptr_t diff; - SLP_SAVE_STATE(ref, diff); - return diff; -} - -void -slp_restore_state_asm(void) -{ - SLP_RESTORE_STATE(); -} - -extern int slp_switch(void); -}; -#endif - -#endif diff --git a/venv/Lib/site-packages/greenlet/greenlet_thread_support.hpp b/venv/Lib/site-packages/greenlet/greenlet_thread_support.hpp deleted file mode 100644 index 3ded7d2..0000000 --- a/venv/Lib/site-packages/greenlet/greenlet_thread_support.hpp +++ /dev/null @@ -1,31 +0,0 @@ -#ifndef GREENLET_THREAD_SUPPORT_HPP -#define GREENLET_THREAD_SUPPORT_HPP - -/** - * Defines various utility functions to help greenlet integrate well - * with threads. This used to be needed when we supported Python - * 2.7 on Windows, which used a very old compiler. We wrote an - * alternative implementation using Python APIs and POSIX or Windows - * APIs, but that's no longer needed. So this file is a shadow of its - * former self --- but may be needed in the future. - */ - -#include -#include -#include - -#include "greenlet_compiler_compat.hpp" - -namespace greenlet { - typedef std::mutex Mutex; - typedef std::lock_guard LockGuard; - class LockInitError : public std::runtime_error - { - public: - LockInitError(const char* what) : std::runtime_error(what) - {}; - }; -}; - - -#endif /* GREENLET_THREAD_SUPPORT_HPP */ diff --git a/venv/Lib/site-packages/greenlet/platform/__init__.py b/venv/Lib/site-packages/greenlet/platform/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 6dc56af..0000000 Binary files a/venv/Lib/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd b/venv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd deleted file mode 100644 index 038ced2..0000000 --- a/venv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd +++ /dev/null @@ -1,2 +0,0 @@ -call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64 -ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm diff --git a/venv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h b/venv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h deleted file mode 100644 index 058617c..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h +++ /dev/null @@ -1,124 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall - * 13-Apr-13 Add support for strange GCC caller-save decisions - * 08-Apr-13 File creation. Michael Matz - * - * NOTES - * - * Simply save all callee saved registers - * - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \ - "x27", "x28", "x30" /* aka lr */, \ - "v8", "v9", "v10", "v11", \ - "v12", "v13", "v14", "v15" - -/* - * Recall: - asm asm-qualifiers ( AssemblerTemplate - : OutputOperands - [ : InputOperands - [ : Clobbers ] ]) - - or (if asm-qualifiers contains 'goto') - - asm asm-qualifiers ( AssemblerTemplate - : OutputOperands - : InputOperands - : Clobbers - : GotoLabels) - - and OutputOperands are - - [ [asmSymbolicName] ] constraint (cvariablename) - - When a name is given, refer to it as ``%[the name]``. - When not given, ``%i`` where ``i`` is the zero-based index. - - constraints starting with ``=`` means only writing; ``+`` means - reading and writing. - - This is followed by ``r`` (must be register) or ``m`` (must be memory) - and these can be combined. - - The ``cvariablename`` is actually an lvalue expression. - - In AArch65, 31 general purpose registers. If named X0... they are - 64-bit. If named W0... they are the bottom 32 bits of the - corresponding 64 bit register. - - XZR and WZR are hardcoded to 0, and ignore writes. - - Arguments are in X0..X7. C++ uses X0 for ``this``. X0 holds simple return - values (?) - - Whenever a W register is written, the top half of the X register is zeroed. - */ - -static int -slp_switch(void) -{ - int err; - void *fp; - /* Windowz uses a 32-bit long on a 64-bit platform, unlike the rest of - the world, and in theory we can be compiled with GCC/llvm on 64-bit - windows. So we need a fixed-width type. - */ - int64_t *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("str x29, %0" : "=m"(fp) : : ); - __asm__ ("mov %0, sp" : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add sp,sp,%0\n" - "add x29,x29,%0\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - /* SLP_SAVE_STATE macro contains some return statements - (of -1 and 1). It falls through only when - the return value of slp_save_state() is zero, which - is placed in x0. - In that case we (slp_switch) also want to return zero - (also in x0 of course). - Now, some GCC versions (seen with 4.8) think it's a - good idea to save/restore x0 around the call to - slp_restore_state(), instead of simply zeroing it - at the return below. But slp_restore_state - writes random values to the stack slot used for this - save/restore (from when it once was saved above in - SLP_SAVE_STATE, when it was still uninitialized), so - "restoring" that precious zero actually makes us - return random values. There are some ways to make - GCC not use that zero value in the normal return path - (e.g. making err volatile, but that costs a little - stack space), and the simplest is to call a function - that returns an unknown value (which happens to be zero), - so the saved/restored value is unused. - - Thus, this line stores a 0 into the ``err`` variable - (which must be held in a register for this instruction, - of course). The ``w`` qualifier causes the instruction - to use W0 instead of X0, otherwise we get a warning - about a value size mismatch (because err is an int, - and aarch64 platforms are LP64: 32-bit int, 64 bit long - and pointer). - */ - __asm__ volatile ("mov %w0, #0" : "=r" (err)); - } - __asm__ volatile ("ldr x29, %0" : : "m" (fp) :); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return err; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h deleted file mode 100644 index 7e07abf..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h +++ /dev/null @@ -1,30 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \ - "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9" - -static int -slp_switch(void) -{ - int ret; - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mov $30, %0" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addq $30, %0, $30\n\t" - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mov $31, %0" : "=r" (ret) : ); - return ret; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h deleted file mode 100644 index d470110..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 3-May-13 Ralf Schmitt - * Add support for strange GCC caller-save decisions - * (ported from switch_aarch64_gcc.h) - * 18-Aug-11 Alexey Borzenkov - * Correctly save rbp, csr and cw - * 01-Apr-04 Hye-Shik Chang - * Ported from i386 to amd64. - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for spark - * 31-Avr-02 Armin Rigo - * Added ebx, esi and edi register-saves. - * 01-Mar-02 Samual M. Rushing - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -/* #define STACK_MAGIC 3 */ -/* the above works fine with gcc 2.96, but 2.95.3 wants this */ -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "r12", "r13", "r14", "r15" - -static int -slp_switch(void) -{ - int err; - void* rbp; - void* rbx; - unsigned int csr; - unsigned short cw; - /* This used to be declared 'register', but that does nothing in - modern compilers and is explicitly forbidden in some new - standards. */ - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("fstcw %0" : "=m" (cw)); - __asm__ volatile ("stmxcsr %0" : "=m" (csr)); - __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp)); - __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx)); - __asm__ ("movq %%rsp, %0" : "=g" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addq %0, %%rsp\n" - "addq %0, %%rbp\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err)); - } - __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx)); - __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp)); - __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); - __asm__ volatile ("fldcw %0" : : "m" (cw)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h b/venv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h deleted file mode 100644 index 655003a..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro - * 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I - * read that these do not need to be saved. Also added notes and - * errors related to the frame pointer. Richard Tew. - * - * NOTES - * - * It is not possible to detect if fp is used or not, so the supplied - * switch function needs to support it, so that you can remove it if - * it does not apply to you. - * - * POSSIBLE ERRORS - * - * "fp cannot be used in asm here" - * - * - Try commenting out "fp" in REGS_TO_SAVE. - * - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REG_SP "sp" -#define REG_SPSP "sp,sp" -#ifdef __thumb__ -#define REG_FP "r7" -#define REG_FPFP "r7,r7" -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr" -#else -#define REG_FP "fp" -#define REG_FPFP "fp,fp" -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr" -#endif -#if defined(__SOFTFP__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL -#elif defined(__VFP_FP__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ - "d12", "d13", "d14", "d15" -#elif defined(__MAVERICK__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \ - "mvf8", "mvf9", "mvf10", "mvf11", \ - "mvf12", "mvf13", "mvf14", "mvf15" -#else -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7" -#endif - -static int -#ifdef __GNUC__ -__attribute__((optimize("no-omit-frame-pointer"))) -#endif -slp_switch(void) -{ - void *fp; - int *stackref, stsizediff; - int result; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0"); - __asm__ ("mov %0," REG_SP : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add " REG_SPSP ",%0\n" - "add " REG_FPFP ",%0\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0"); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return result; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h b/venv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h deleted file mode 100644 index 9e640e1..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 31-May-15 iOS support. Ported from arm32. Proton - * - * NOTES - * - * It is not possible to detect if fp is used or not, so the supplied - * switch function needs to support it, so that you can remove it if - * it does not apply to you. - * - * POSSIBLE ERRORS - * - * "fp cannot be used in asm here" - * - * - Try commenting out "fp" in REGS_TO_SAVE. - * - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 -#define REG_SP "sp" -#define REG_SPSP "sp,sp" -#define REG_FP "r7" -#define REG_FPFP "r7,r7" -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr" -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ - "d12", "d13", "d14", "d15" - -static int -#ifdef __GNUC__ -__attribute__((optimize("no-omit-frame-pointer"))) -#endif -slp_switch(void) -{ - void *fp; - int *stackref, stsizediff, result; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("str " REG_FP ",%0" : "=m" (fp)); - __asm__ ("mov %0," REG_SP : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add " REG_SPSP ",%0\n" - "add " REG_FPFP ",%0\n" - : - : "r" (stsizediff) - : REGS_TO_SAVE /* Clobber registers, force compiler to - * recalculate address of void *fp from REG_SP or REG_FP */ - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ( - "ldr " REG_FP ", %1\n\t" - "mov %0, #0" - : "=r" (result) - : "m" (fp) - : REGS_TO_SAVE /* Force compiler to restore saved registers after this */ - ); - return result; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_arm64_masm.asm b/venv/Lib/site-packages/greenlet/platform/switch_arm64_masm.asm deleted file mode 100644 index 29f9c22..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_arm64_masm.asm +++ /dev/null @@ -1,53 +0,0 @@ - AREA switch_arm64_masm, CODE, READONLY; - GLOBAL slp_switch [FUNC] - EXTERN slp_save_state_asm - EXTERN slp_restore_state_asm - -slp_switch - ; push callee saved registers to stack - stp x19, x20, [sp, #-16]! - stp x21, x22, [sp, #-16]! - stp x23, x24, [sp, #-16]! - stp x25, x26, [sp, #-16]! - stp x27, x28, [sp, #-16]! - stp x29, x30, [sp, #-16]! - stp d8, d9, [sp, #-16]! - stp d10, d11, [sp, #-16]! - stp d12, d13, [sp, #-16]! - stp d14, d15, [sp, #-16]! - - ; call slp_save_state_asm with stack pointer - mov x0, sp - bl slp_save_state_asm - - ; early return for return value of 1 and -1 - cmp x0, #-1 - b.eq RETURN - cmp x0, #1 - b.eq RETURN - - ; increment stack and frame pointer - add sp, sp, x0 - add x29, x29, x0 - - bl slp_restore_state_asm - - ; store return value for successful completion of routine - mov x0, #0 - -RETURN - ; pop registers from stack - ldp d14, d15, [sp], #16 - ldp d12, d13, [sp], #16 - ldp d10, d11, [sp], #16 - ldp d8, d9, [sp], #16 - ldp x29, x30, [sp], #16 - ldp x27, x28, [sp], #16 - ldp x25, x26, [sp], #16 - ldp x23, x24, [sp], #16 - ldp x21, x22, [sp], #16 - ldp x19, x20, [sp], #16 - - ret - - END diff --git a/venv/Lib/site-packages/greenlet/platform/switch_arm64_masm.obj b/venv/Lib/site-packages/greenlet/platform/switch_arm64_masm.obj deleted file mode 100644 index f6f220e..0000000 Binary files a/venv/Lib/site-packages/greenlet/platform/switch_arm64_masm.obj and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_arm64_msvc.h b/venv/Lib/site-packages/greenlet/platform/switch_arm64_msvc.h deleted file mode 100644 index 7ab7f45..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_arm64_msvc.h +++ /dev/null @@ -1,17 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 21-Oct-21 Niyas Sait - * First version to enable win/arm64 support. - */ - -#define STACK_REFPLUS 1 -#define STACK_MAGIC 0 - -/* Use the generic support for an external assembly language slp_switch function. */ -#define EXTERNAL_ASM - -#ifdef SLP_EVAL -/* This always uses the external masm assembly file. */ -#endif \ No newline at end of file diff --git a/venv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h b/venv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h deleted file mode 100644 index ac469d3..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h +++ /dev/null @@ -1,48 +0,0 @@ -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REG_FP "r8" -#ifdef __CSKYABIV2__ -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\ - "r16", "r17", "r18", "r19", "r20", "r21", "r22",\ - "r23", "r24", "r25" - -#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\ - "vr13", "vr14", "vr15" -#else -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL -#endif -#else -#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15" -#endif - - -static int -#ifdef __GNUC__ -__attribute__((optimize("no-omit-frame-pointer"))) -#endif -slp_switch(void) -{ - int *stackref, stsizediff; - int result; - - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mov %0, sp" : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addu sp,%0\n" - "addu "REG_FP",%0\n" - : - : "r" (stsizediff) - ); - - SLP_RESTORE_STATE(); - } - __asm__ volatile ("movi %0, 0" : "=r" (result)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - - return result; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_loongarch64_linux.h b/venv/Lib/site-packages/greenlet/platform/switch_loongarch64_linux.h deleted file mode 100644 index 9eaf34e..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_loongarch64_linux.h +++ /dev/null @@ -1,31 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \ - "s6", "s7", "s8", "fp", \ - "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31" - -static int -slp_switch(void) -{ - int ret; - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("move %0, $sp" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add.d $sp, $sp, %0\n\t" - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("move %0, $zero" : "=r" (ret) : ); - return ret; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h b/venv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h deleted file mode 100644 index da761c2..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 2014-01-06 Andreas Schwab - * File created. - */ - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \ - "%a2", "%a3", "%a4" - -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - void *fp, *a5; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("move.l %%fp, %0" : "=m"(fp)); - __asm__ volatile ("move.l %%a5, %0" : "=m"(a5)); - __asm__ ("move.l %%sp, %0" : "=r"(stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff)); - SLP_RESTORE_STATE(); - __asm__ volatile ("clr.l %0" : "=g" (err)); - } - __asm__ volatile ("move.l %0, %%a5" : : "m"(a5)); - __asm__ volatile ("move.l %0, %%fp" : : "m"(fp)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return err; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_mips_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_mips_unix.h deleted file mode 100644 index b9003e9..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_mips_unix.h +++ /dev/null @@ -1,64 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 20-Sep-14 Matt Madison - * Re-code the saving of the gp register for MIPS64. - * 05-Jan-08 Thiemo Seufer - * Ported from ppc. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \ - "$23", "$30" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; -#ifdef __mips64 - uint64_t gpsave; -#endif - __asm__ __volatile__ ("" : : : REGS_TO_SAVE); -#ifdef __mips64 - __asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : ); -#endif - __asm__ ("move %0, $29" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ __volatile__ ( -#ifdef __mips64 - "daddu $29, %0\n" -#else - "addu $29, %0\n" -#endif - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } -#ifdef __mips64 - __asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : ); -#endif - __asm__ __volatile__ ("" : : : REGS_TO_SAVE); - __asm__ __volatile__ ("move %0, $0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h b/venv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h deleted file mode 100644 index e7e0b87..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h +++ /dev/null @@ -1,103 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 16-Oct-20 Jesse Gorzinski - * Copied from Linux PPC64 implementation - * 04-Sep-18 Alexey Borzenkov - * Workaround a gcc bug using manual save/restore of r30 - * 21-Mar-18 Tulio Magno Quites Machado Filho - * Added r30 to the list of saved registers in order to fully comply with - * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this - * register as a nonvolatile register used for local variables. - * 21-Mar-18 Laszlo Boszormenyi - * Save r2 (TOC pointer) manually. - * 10-Dec-13 Ulrich Weigand - * Support ELFv2 ABI. Save float/vector registers. - * 09-Mar-12 Michael Ellerman - * 64-bit implementation, copied from 32-bit. - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - * 31-Jul-12 Trevor Bowen - * Changed memory constraints to register only. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 6 - -#if defined(__ALTIVEC__) -#define ALTIVEC_REGS \ - "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ - "v28", "v29", "v30", "v31", -#else -#define ALTIVEC_REGS -#endif - -#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "r31", \ - "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ - "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ - "fr30", "fr31", \ - ALTIVEC_REGS \ - "cr2", "cr3", "cr4" - -static int -slp_switch(void) -{ - int err; - long *stackref, stsizediff; - void * toc; - void * r30; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("std 2, %0" : "=m" (toc)); - __asm__ volatile ("std 30, %0" : "=m" (r30)); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("ld 30, %0" : : "m" (r30)); - __asm__ volatile ("ld 2, %0" : : "m" (toc)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h b/venv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h deleted file mode 100644 index 3c324d0..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h +++ /dev/null @@ -1,105 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 04-Sep-18 Alexey Borzenkov - * Workaround a gcc bug using manual save/restore of r30 - * 21-Mar-18 Tulio Magno Quites Machado Filho - * Added r30 to the list of saved registers in order to fully comply with - * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this - * register as a nonvolatile register used for local variables. - * 21-Mar-18 Laszlo Boszormenyi - * Save r2 (TOC pointer) manually. - * 10-Dec-13 Ulrich Weigand - * Support ELFv2 ABI. Save float/vector registers. - * 09-Mar-12 Michael Ellerman - * 64-bit implementation, copied from 32-bit. - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - * 31-Jul-12 Trevor Bowen - * Changed memory constraints to register only. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#if _CALL_ELF == 2 -#define STACK_MAGIC 4 -#else -#define STACK_MAGIC 6 -#endif - -#if defined(__ALTIVEC__) -#define ALTIVEC_REGS \ - "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ - "v28", "v29", "v30", "v31", -#else -#define ALTIVEC_REGS -#endif - -#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "r31", \ - "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ - "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ - "fr30", "fr31", \ - ALTIVEC_REGS \ - "cr2", "cr3", "cr4" - -static int -slp_switch(void) -{ - int err; - long *stackref, stsizediff; - void * toc; - void * r30; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("std 2, %0" : "=m" (toc)); - __asm__ volatile ("std 30, %0" : "=m" (r30)); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("ld 30, %0" : : "m" (r30)); - __asm__ volatile ("ld 2, %0" : : "m" (toc)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h b/venv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h deleted file mode 100644 index 6d93c13..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Mar-11 Floris Bruynooghe - * Do not add stsizediff to general purpose - * register (GPR) 30 as this is a non-volatile and - * unused by the PowerOpen Environment, therefore - * this was modifying a user register instead of the - * frame pointer (which does not seem to exist). - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h b/venv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h deleted file mode 100644 index e83ad70..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - * 31-Jul-12 Trevor Bowen - * Changed memory constraints to register only. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - "add 30, 30, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h b/venv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h deleted file mode 100644 index bd414c6..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" - -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("; asm block 2\n\tmr %0, r1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "; asm block 3\n" - "\tmr r11, %0\n" - "\tadd r1, r1, r11\n" - "\tadd r30, r30, r11\n" - : /* no outputs */ - : "r" (stsizediff) - : "r11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h deleted file mode 100644 index bb18808..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - "add 30, 30, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h deleted file mode 100644 index 8761122..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h +++ /dev/null @@ -1,41 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "s1", "s2", "s3", "s4", "s5", \ - "s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \ - "fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \ - "fs10", "fs11" - -static int -slp_switch(void) -{ - int ret; - long fp; - long *stackref, stsizediff; - - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mv %0, fp" : "=r" (fp) : ); - __asm__ volatile ("mv %0, sp" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add sp, sp, %0\n\t" - "add fp, fp, %0\n\t" - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); -#if __riscv_xlen == 32 - __asm__ volatile ("lw fp, %0" : : "m" (fp)); -#else - __asm__ volatile ("ld fp, %0" : : "m" (fp)); -#endif - __asm__ volatile ("mv %0, zero" : "=r" (ret) : ); - return ret; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_s390_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_s390_unix.h deleted file mode 100644 index 9199367..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_s390_unix.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 25-Jan-12 Alexey Borzenkov - * Fixed Linux/S390 port to work correctly with - * different optimization options both on 31-bit - * and 64-bit. Thanks to Stefan Raabe for lots - * of testing. - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 06-Oct-02 Gustavo Niemeyer - * Ported to Linux/S390. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#ifdef __s390x__ -#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */ -#else -#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */ -#endif - -/* Technically, r11-r13 also need saving, but function prolog starts - with stm(g) and since there are so many saved registers already - it won't be optimized, resulting in all r6-r15 being saved */ -#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \ - "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \ - "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15" - -static int -slp_switch(void) -{ - int ret; - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); -#ifdef __s390x__ - __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : ); -#else - __asm__ volatile ("lr %0, 15" : "=r" (stackref) : ); -#endif - { - SLP_SAVE_STATE(stackref, stsizediff); -/* N.B. - r11 may be used as the frame pointer, and in that case it cannot be - clobbered and needs offsetting just like the stack pointer (but in cases - where frame pointer isn't used we might clobber it accidentally). What's - scary is that r11 is 2nd (and even 1st when GOT is used) callee saved - register that gcc would chose for surviving function calls. However, - since r6-r10 are clobbered above, their cost for reuse is reduced, so - gcc IRA will chose them over r11 (not seeing r11 is implicitly saved), - making it relatively safe to offset in all cases. :) */ - __asm__ volatile ( -#ifdef __s390x__ - "agr 15, %0\n\t" - "agr 11, %0" -#else - "ar 15, %0\n\t" - "ar 11, %0" -#endif - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("lhi %0, 0" : "=r" (ret) : ); - return ret; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_sh_gcc.h b/venv/Lib/site-packages/greenlet/platform/switch_sh_gcc.h deleted file mode 100644 index 5ecc3b3..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_sh_gcc.h +++ /dev/null @@ -1,36 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REGS_TO_SAVE "r8", "r9", "r10", "r11", "r13", \ - "fr12", "fr13", "fr14", "fr15" - -// r12 Global context pointer, GP -// r14 Frame pointer, FP -// r15 Stack pointer, SP - -static int -slp_switch(void) -{ - int err; - void* fp; - int *stackref, stsizediff; - __asm__ volatile("" : : : REGS_TO_SAVE); - __asm__ volatile("mov.l r14, %0" : "=m"(fp) : :); - __asm__("mov r15, %0" : "=r"(stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile( - "add %0, r15\n" - "add %0, r14\n" - : /* no outputs */ - : "r"(stsizediff)); - SLP_RESTORE_STATE(); - __asm__ volatile("mov r0, %0" : "=r"(err) : :); - } - __asm__ volatile("mov.l %0, r14" : : "m"(fp) :); - __asm__ volatile("" : : : REGS_TO_SAVE); - return err; -} - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h b/venv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h deleted file mode 100644 index 96990c3..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h +++ /dev/null @@ -1,92 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 16-May-15 Alexey Borzenkov - * Move stack spilling code inside save/restore functions - * 30-Aug-13 Floris Bruynooghe - Clean the register windows again before returning. - This does not clobber the PIC register as it leaves - the current window intact and is required for multi- - threaded code to work correctly. - * 08-Mar-11 Floris Bruynooghe - * No need to set return value register explicitly - * before the stack and framepointer are adjusted - * as none of the other registers are influenced by - * this. Also don't needlessly clean the windows - * ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that - * clobbers the gcc PIC register (%l7). - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * added support for SunOS sparc with gcc - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - - -#define STACK_MAGIC 0 - - -#if defined(__sparcv9) -#define SLP_FLUSHW __asm__ volatile ("flushw") -#else -#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */ -#endif - -/* On sparc we need to spill register windows inside save/restore functions */ -#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW -#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW - - -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - - /* Put current stack pointer into stackref. - * Register spilling is done in save/restore. - */ - __asm__ volatile ("mov %%sp, %0" : "=r" (stackref)); - - { - /* Thou shalt put SLP_SAVE_STATE into a local block */ - /* Copy the current stack onto the heap */ - SLP_SAVE_STATE(stackref, stsizediff); - - /* Increment stack and frame pointer by stsizediff */ - __asm__ volatile ( - "add %0, %%sp, %%sp\n\t" - "add %0, %%fp, %%fp" - : : "r" (stsizediff)); - - /* Copy new stack from it's save store on the heap */ - SLP_RESTORE_STATE(); - - __asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0)); - return err; - } -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_x32_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_x32_unix.h deleted file mode 100644 index 893369c..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_x32_unix.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 17-Aug-12 Fantix King - * Ported from amd64. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "r12", "r13", "r14", "r15" - - -static int -slp_switch(void) -{ - void* ebp; - void* ebx; - unsigned int csr; - unsigned short cw; - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("fstcw %0" : "=m" (cw)); - __asm__ volatile ("stmxcsr %0" : "=m" (csr)); - __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); - __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); - __asm__ ("movl %%esp, %0" : "=g" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addl %0, %%esp\n" - "addl %0, %%ebp\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); - __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); - __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); - __asm__ volatile ("fldcw %0" : : "m" (cw)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm b/venv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm deleted file mode 100644 index f5c72a2..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm +++ /dev/null @@ -1,111 +0,0 @@ -; -; stack switching code for MASM on x641 -; Kristjan Valur Jonsson, sept 2005 -; - - -;prototypes for our calls -slp_save_state_asm PROTO -slp_restore_state_asm PROTO - - -pushxmm MACRO reg - sub rsp, 16 - .allocstack 16 - movaps [rsp], reg ; faster than movups, but we must be aligned - ; .savexmm128 reg, offset (don't know what offset is, no documentation) -ENDM -popxmm MACRO reg - movaps reg, [rsp] ; faster than movups, but we must be aligned - add rsp, 16 -ENDM - -pushreg MACRO reg - push reg - .pushreg reg -ENDM -popreg MACRO reg - pop reg -ENDM - - -.code -slp_switch PROC FRAME - ;realign stack to 16 bytes after return address push, makes the following faster - sub rsp,8 - .allocstack 8 - - pushxmm xmm15 - pushxmm xmm14 - pushxmm xmm13 - pushxmm xmm12 - pushxmm xmm11 - pushxmm xmm10 - pushxmm xmm9 - pushxmm xmm8 - pushxmm xmm7 - pushxmm xmm6 - - pushreg r15 - pushreg r14 - pushreg r13 - pushreg r12 - - pushreg rbp - pushreg rbx - pushreg rdi - pushreg rsi - - sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16) - .allocstack 10h -.endprolog - - lea rcx, [rsp+10h] ;load stack base that we are saving - call slp_save_state_asm ;pass stackpointer, return offset in eax - cmp rax, 1 - je EXIT1 - cmp rax, -1 - je EXIT2 - ;actual stack switch: - add rsp, rax - call slp_restore_state_asm - xor rax, rax ;return 0 - -EXIT: - - add rsp, 10h - popreg rsi - popreg rdi - popreg rbx - popreg rbp - - popreg r12 - popreg r13 - popreg r14 - popreg r15 - - popxmm xmm6 - popxmm xmm7 - popxmm xmm8 - popxmm xmm9 - popxmm xmm10 - popxmm xmm11 - popxmm xmm12 - popxmm xmm13 - popxmm xmm14 - popxmm xmm15 - - add rsp, 8 - ret - -EXIT1: - mov rax, 1 - jmp EXIT - -EXIT2: - sar rax, 1 - jmp EXIT - -slp_switch ENDP - -END \ No newline at end of file diff --git a/venv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj b/venv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj deleted file mode 100644 index 64e3e6b..0000000 Binary files a/venv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h b/venv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h deleted file mode 100644 index 601ea56..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h +++ /dev/null @@ -1,60 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 26-Sep-02 Christian Tismer - * again as a result of virtualized stack access, - * the compiler used less registers. Needed to - * explicit mention registers in order to get them saved. - * Thanks to Jeff Senn for pointing this out and help. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 01-Mar-02 Christian Tismer - * Initial final version after lots of iterations for i386. - */ - -/* Avoid alloca redefined warning on mingw64 */ -#ifndef alloca -#define alloca _alloca -#endif - -#define STACK_REFPLUS 1 -#define STACK_MAGIC 0 - -/* Use the generic support for an external assembly language slp_switch function. */ -#define EXTERNAL_ASM - -#ifdef SLP_EVAL -/* This always uses the external masm assembly file. */ -#endif - -/* - * further self-processing support - */ - -/* we have IsBadReadPtr available, so we can peek at objects */ -/* -#define STACKLESS_SPY - -#ifdef IMPLEMENT_STACKLESSMODULE -#include "Windows.h" -#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) - -static int IS_ON_STACK(void*p) -{ - int stackref; - intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000; - return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000; -} - -#endif -*/ \ No newline at end of file diff --git a/venv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h b/venv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h deleted file mode 100644 index 0f3a59f..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h +++ /dev/null @@ -1,326 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 26-Sep-02 Christian Tismer - * again as a result of virtualized stack access, - * the compiler used less registers. Needed to - * explicit mention registers in order to get them saved. - * Thanks to Jeff Senn for pointing this out and help. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 01-Mar-02 Christian Tismer - * Initial final version after lots of iterations for i386. - */ - -#define alloca _alloca - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -/* Some magic to quell warnings and keep slp_switch() from crashing when built - with VC90. Disable global optimizations, and the warning: frame pointer - register 'ebp' modified by inline assembly code. - - We used to just disable global optimizations ("g") but upstream stackless - Python, as well as stackman, turn off all optimizations. - -References: -https://github.com/stackless-dev/stackman/blob/dbc72fe5207a2055e658c819fdeab9731dee78b9/stackman/platforms/switch_x86_msvc.h -https://github.com/stackless-dev/stackless/blob/main-slp/Stackless/platf/switch_x86_msvc.h -*/ -#define WIN32_LEAN_AND_MEAN -#include - -#pragma optimize("", off) /* so that autos are stored on the stack */ -#pragma warning(disable:4731) -#pragma warning(disable:4733) /* disable warning about modifying FS[0] */ - -/** - * Most modern compilers and environments handle C++ exceptions without any - * special help from us. MSVC on 32-bit windows is an exception. There, C++ - * exceptions are dealt with using Windows' Structured Exception Handling - * (SEH). - * - * SEH is implemented as a singly linked list of nodes. The - * head of this list is stored in the Thread Information Block, which itself - * is pointed to from the FS register. It's the first field in the structure, - * or offset 0, so we can access it using assembly FS:[0], or the compiler - * intrinsics and field offset information from the headers (as we do below). - * Somewhat unusually, the tail of the list doesn't have prev == NULL, it has - * prev == 0xFFFFFFFF. - * - * SEH was designed for C, and traditionally uses the MSVC compiler - * intrinsincs __try{}/__except{}. It is also utilized for C++ exceptions by - * MSVC; there, every throw of a C++ exception raises a SEH error with the - * ExceptionCode 0xE06D7363; the SEH handler list is then traversed to - * deal with the exception. - * - * If the SEH list is corrupt, then when a C++ exception is thrown the program - * will abruptly exit with exit code 1. This does not use std::terminate(), so - * std::set_terminate() is useless to debug this. - * - * The SEH list is closely tied to the call stack; entering a function that - * uses __try{} or most C++ functions will push a new handler onto the front - * of the list. Returning from the function will remove the handler. Saving - * and restoring the head node of the SEH list (FS:[0]) per-greenlet is NOT - * ENOUGH to make SEH or exceptions work. - * - * Stack switching breaks SEH because the call stack no longer necessarily - * matches the SEH list. For example, given greenlet A that switches to - * greenlet B, at the moment of entering greenlet B, we will have any SEH - * handlers from greenlet A on the SEH list; greenlet B can then add its own - * handlers to the SEH list. When greenlet B switches back to greenlet A, - * greenlet B's handlers would still be on the SEH stack, but when switch() - * returns control to greenlet A, we have replaced the contents of the stack - * in memory, so all the address that greenlet B added to the SEH list are now - * invalid: part of the call stack has been unwound, but the SEH list was out - * of sync with the call stack. The net effect is that exception handling - * stops working. - * - * Thus, when switching greenlets, we need to be sure that the SEH list - * matches the effective call stack, "cutting out" any handlers that were - * pushed by the greenlet that switched out and which are no longer valid. - * - * The easiest way to do this is to capture the SEH list at the time the main - * greenlet for a thread is created, and, when initially starting a greenlet, - * start a new SEH list for it, which contains nothing but the handler - * established for the new greenlet itself, with the tail being the handlers - * for the main greenlet. If we then save and restore the SEH per-greenlet, - * they won't interfere with each others SEH lists. (No greenlet can unwind - * the call stack past the handlers established by the main greenlet). - * - * By observation, a new thread starts with three SEH handlers on the list. By - * the time we get around to creating the main greenlet, though, there can be - * many more, established by transient calls that lead to the creation of the - * main greenlet. Therefore, 3 is a magic constant telling us when to perform - * the initial slice. - * - * All of this can be debugged using a vectored exception handler, which - * operates independently of the SEH handler list, and is called first. - * Walking the SEH list at key points can also be helpful. - * - * References: - * https://en.wikipedia.org/wiki/Win32_Thread_Information_Block - * https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 - * https://docs.microsoft.com/en-us/cpp/cpp/try-except-statement?view=msvc-160 - * https://docs.microsoft.com/en-us/cpp/cpp/structured-exception-handling-c-cpp?view=msvc-160 - * https://docs.microsoft.com/en-us/windows/win32/debug/structured-exception-handling - * https://docs.microsoft.com/en-us/windows/win32/debug/using-a-vectored-exception-handler - * https://bytepointer.com/resources/pietrek_crash_course_depths_of_win32_seh.htm - */ -#define GREENLET_NEEDS_EXCEPTION_STATE_SAVED - - -typedef struct _GExceptionRegistration { - struct _GExceptionRegistration* prev; - void* handler_f; -} GExceptionRegistration; - -static void -slp_set_exception_state(const void *const seh_state) -{ - // Because the stack from from which we do this is ALSO a handler, and - // that one we want to keep, we need to relink the current SEH handler - // frame to point to this one, cutting out the middle men, as it were. - // - // Entering a try block doesn't change the SEH frame, but entering a - // function containing a try block does. - GExceptionRegistration* current_seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - current_seh_state->prev = (GExceptionRegistration*)seh_state; -} - - -static GExceptionRegistration* -x86_slp_get_third_oldest_handler() -{ - GExceptionRegistration* a = NULL; /* Closest to the top */ - GExceptionRegistration* b = NULL; /* second */ - GExceptionRegistration* c = NULL; - GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - a = b = c = seh_state; - - while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { - if ((void*)seh_state->prev < (void*)100) { - fprintf(stderr, "\tERROR: Broken SEH chain.\n"); - return NULL; - } - a = b; - b = c; - c = seh_state; - - seh_state = seh_state->prev; - } - return a ? a : (b ? b : c); -} - - -static void* -slp_get_exception_state() -{ - // XXX: There appear to be three SEH handlers on the stack already at the - // start of the thread. Is that a guarantee? Almost certainly not. Yet in - // all observed cases it has been three. This is consistent with - // faulthandler off or on, and optimizations off or on. It may not be - // consistent with other operating system versions, though: we only have - // CI on one or two versions (don't ask what there are). - // In theory we could capture the number of handlers on the chain when - // PyInit__greenlet is called: there are probably only the default - // handlers at that point (unless we're embedded and people have used - // __try/__except or a C++ handler)? - return x86_slp_get_third_oldest_handler(); -} - -static int -slp_switch(void) -{ - /* MASM syntax is typically reversed from other assemblers. - It is usually - */ - int *stackref, stsizediff; - /* store the structured exception state for this stack */ - DWORD seh_state = __readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - __asm mov stackref, esp; - /* modify EBX, ESI and EDI in order to get them preserved */ - __asm mov ebx, ebx; - __asm xchg esi, edi; - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm { - mov eax, stsizediff - add esp, eax - add ebp, eax - } - SLP_RESTORE_STATE(); - } - __writefsdword(FIELD_OFFSET(NT_TIB, ExceptionList), seh_state); - return 0; -} - -/* re-enable ebp warning and global optimizations. */ -#pragma optimize("", on) -#pragma warning(default:4731) -#pragma warning(default:4733) /* disable warning about modifying FS[0] */ - - -#endif - -/* - * further self-processing support - */ - -/* we have IsBadReadPtr available, so we can peek at objects */ -#define STACKLESS_SPY - -#ifdef GREENLET_DEBUG - -#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) - -static int IS_ON_STACK(void*p) -{ - int stackref; - int stackbase = ((int)&stackref) & 0xfffff000; - return (int)p >= stackbase && (int)p < stackbase + 0x00100000; -} - -static void -x86_slp_show_seh_chain() -{ - GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - fprintf(stderr, "====== SEH Chain ======\n"); - while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { - fprintf(stderr, "\tSEH_chain addr: %p handler: %p prev: %p\n", - seh_state, - seh_state->handler_f, seh_state->prev); - if ((void*)seh_state->prev < (void*)100) { - fprintf(stderr, "\tERROR: Broken chain.\n"); - break; - } - seh_state = seh_state->prev; - } - fprintf(stderr, "====== End SEH Chain ======\n"); - fflush(NULL); - return; -} - -//addVectoredExceptionHandler constants: -//CALL_FIRST means call this exception handler first; -//CALL_LAST means call this exception handler last -#define CALL_FIRST 1 -#define CALL_LAST 0 - -LONG WINAPI -GreenletVectorHandler(PEXCEPTION_POINTERS ExceptionInfo) -{ - // We get one of these for every C++ exception, with code - // E06D7363 - // This is a special value that means "C++ exception from MSVC" - // https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 - // - // Install in the module init function with: - // AddVectoredExceptionHandler(CALL_FIRST, GreenletVectorHandler); - PEXCEPTION_RECORD ExceptionRecord = ExceptionInfo->ExceptionRecord; - - fprintf(stderr, - "GOT VECTORED EXCEPTION:\n" - "\tExceptionCode : %p\n" - "\tExceptionFlags : %p\n" - "\tExceptionAddr : %p\n" - "\tNumberparams : %ld\n", - ExceptionRecord->ExceptionCode, - ExceptionRecord->ExceptionFlags, - ExceptionRecord->ExceptionAddress, - ExceptionRecord->NumberParameters - ); - if (ExceptionRecord->ExceptionFlags & 1) { - fprintf(stderr, "\t\tEH_NONCONTINUABLE\n" ); - } - if (ExceptionRecord->ExceptionFlags & 2) { - fprintf(stderr, "\t\tEH_UNWINDING\n" ); - } - if (ExceptionRecord->ExceptionFlags & 4) { - fprintf(stderr, "\t\tEH_EXIT_UNWIND\n" ); - } - if (ExceptionRecord->ExceptionFlags & 8) { - fprintf(stderr, "\t\tEH_STACK_INVALID\n" ); - } - if (ExceptionRecord->ExceptionFlags & 0x10) { - fprintf(stderr, "\t\tEH_NESTED_CALL\n" ); - } - if (ExceptionRecord->ExceptionFlags & 0x20) { - fprintf(stderr, "\t\tEH_TARGET_UNWIND\n" ); - } - if (ExceptionRecord->ExceptionFlags & 0x40) { - fprintf(stderr, "\t\tEH_COLLIDED_UNWIND\n" ); - } - fprintf(stderr, "\n"); - fflush(NULL); - for(DWORD i = 0; i < ExceptionRecord->NumberParameters; i++) { - fprintf(stderr, "\t\t\tParam %ld: %lX\n", i, ExceptionRecord->ExceptionInformation[i]); - } - - if (ExceptionRecord->NumberParameters == 3) { - fprintf(stderr, "\tAbout to traverse SEH chain\n"); - // C++ Exception records have 3 params. - x86_slp_show_seh_chain(); - } - - return EXCEPTION_CONTINUE_SEARCH; -} - - - - -#endif diff --git a/venv/Lib/site-packages/greenlet/platform/switch_x86_unix.h b/venv/Lib/site-packages/greenlet/platform/switch_x86_unix.h deleted file mode 100644 index 493fa6b..0000000 --- a/venv/Lib/site-packages/greenlet/platform/switch_x86_unix.h +++ /dev/null @@ -1,105 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 3-May-13 Ralf Schmitt - * Add support for strange GCC caller-save decisions - * (ported from switch_aarch64_gcc.h) - * 19-Aug-11 Alexey Borzenkov - * Correctly save ebp, ebx and cw - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'ebx' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for spark - * 31-Avr-02 Armin Rigo - * Added ebx, esi and edi register-saves. - * 01-Mar-02 Samual M. Rushing - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -/* #define STACK_MAGIC 3 */ -/* the above works fine with gcc 2.96, but 2.95.3 wants this */ -#define STACK_MAGIC 0 - -#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) -# define ATTR_NOCLONE __attribute__((noclone)) -#else -# define ATTR_NOCLONE -#endif - -static int -slp_switch(void) -{ - int err; -#ifdef _WIN32 - void *seh; -#endif - void *ebp, *ebx; - unsigned short cw; - int *stackref, stsizediff; - __asm__ volatile ("" : : : "esi", "edi"); - __asm__ volatile ("fstcw %0" : "=m" (cw)); - __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); - __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); -#ifdef _WIN32 - __asm__ volatile ( - "movl %%fs:0x0, %%eax\n" - "movl %%eax, %0\n" - : "=m" (seh) - : - : "eax"); -#endif - __asm__ ("movl %%esp, %0" : "=g" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addl %0, %%esp\n" - "addl %0, %%ebp\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); - } -#ifdef _WIN32 - __asm__ volatile ( - "movl %0, %%eax\n" - "movl %%eax, %%fs:0x0\n" - : - : "m" (seh) - : "eax"); -#endif - __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); - __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); - __asm__ volatile ("fldcw %0" : : "m" (cw)); - __asm__ volatile ("" : : : "esi", "edi"); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/venv/Lib/site-packages/greenlet/slp_platformselect.h b/venv/Lib/site-packages/greenlet/slp_platformselect.h deleted file mode 100644 index d9b7d0a..0000000 --- a/venv/Lib/site-packages/greenlet/slp_platformselect.h +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Platform Selection for Stackless Python - */ -#ifdef __cplusplus -extern "C" { -#endif - -#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER) -# include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */ -#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__) -# include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */ -#elif defined(MS_WIN64) && defined(_M_ARM64) -# include "platform/switch_arm64_msvc.h" /* MS Visual Studio on ARM64 */ -#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__) -# include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */ -#elif defined(__GNUC__) && defined(__amd64__) -# include "platform/switch_amd64_unix.h" /* gcc on amd64 */ -#elif defined(__GNUC__) && defined(__i386__) -# include "platform/switch_x86_unix.h" /* gcc on X86 */ -#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__)) -# include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */ -#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__)) -# include "platform/switch_ppc_linux.h" /* gcc on PowerPC */ -#elif defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) -# include "platform/switch_ppc_macosx.h" /* Apple MacOS X on 32-bit PowerPC */ -#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX) -# include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */ -#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX) -# include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */ -#elif defined(__GNUC__) && defined(__powerpc__) && defined(__NetBSD__) -#include "platform/switch_ppc_unix.h" /* gcc on NetBSD/powerpc */ -#elif defined(__GNUC__) && defined(sparc) -# include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */ -#elif defined(__GNUC__) && defined(__sparc__) -# include "platform/switch_sparc_sun_gcc.h" /* NetBSD sparc with gcc */ -#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun) -# include "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */ -#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun) -# include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */ -#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun) -# include "platform/switch_x86_unix.h" /* SunStudio on x86 */ -#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__) -# include "platform/switch_s390_unix.h" /* Linux/S390 */ -#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__) -# include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */ -#elif defined(__GNUC__) && defined(__arm__) -# ifdef __APPLE__ -# include -# endif -# if TARGET_OS_IPHONE -# include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */ -# else -# include "platform/switch_arm32_gcc.h" /* gcc using arm32 */ -# endif -#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__) -# include "platform/switch_mips_unix.h" /* Linux/MIPS */ -#elif defined(__GNUC__) && defined(__aarch64__) -# include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */ -#elif defined(__GNUC__) && defined(__mc68000__) -# include "platform/switch_m68k_gcc.h" /* gcc on m68k */ -#elif defined(__GNUC__) && defined(__csky__) -#include "platform/switch_csky_gcc.h" /* gcc on csky */ -# elif defined(__GNUC__) && defined(__riscv) -# include "platform/switch_riscv_unix.h" /* gcc on RISC-V */ -#elif defined(__GNUC__) && defined(__alpha__) -# include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */ -#elif defined(MS_WIN32) && defined(__llvm__) && defined(__aarch64__) -# include "platform/switch_aarch64_gcc.h" /* LLVM Aarch64 ABI for Windows */ -#elif defined(__GNUC__) && defined(__loongarch64) && defined(__linux__) -# include "platform/switch_loongarch64_linux.h" /* LoongArch64 */ -#elif defined(__GNUC__) && defined(__sh__) -# include "platform/switch_sh_gcc.h" /* SuperH */ -#endif - -#ifdef __cplusplus -}; -#endif diff --git a/venv/Lib/site-packages/greenlet/tests/__init__.py b/venv/Lib/site-packages/greenlet/tests/__init__.py deleted file mode 100644 index 1861360..0000000 --- a/venv/Lib/site-packages/greenlet/tests/__init__.py +++ /dev/null @@ -1,248 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tests for greenlet. - -""" -import os -import sys -import sysconfig -import unittest - -from gc import collect -from gc import get_objects -from threading import active_count as active_thread_count -from time import sleep -from time import time - -import psutil - -from greenlet import greenlet as RawGreenlet -from greenlet import getcurrent - -from greenlet._greenlet import get_pending_cleanup_count -from greenlet._greenlet import get_total_main_greenlets - -from . import leakcheck - -PY312 = sys.version_info[:2] >= (3, 12) -PY313 = sys.version_info[:2] >= (3, 13) -# XXX: First tested on 3.14a7. Revisit all uses of this on later versions to ensure they -# are still valid. -PY314 = sys.version_info[:2] >= (3, 14) - -WIN = sys.platform.startswith("win") -RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') -RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS -RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') -RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR -RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') - -# Is the current interpreter free-threaded?) Note that this -# isn't the same as whether the GIL is enabled, this is the build-time -# value. Certain CPython details, like the garbage collector, -# work very differently on potentially-free-threaded builds than -# standard builds. -RUNNING_ON_FREETHREAD_BUILD = bool(sysconfig.get_config_var("Py_GIL_DISABLED")) - -class TestCaseMetaClass(type): - # wrap each test method with - # a) leak checks - def __new__(cls, classname, bases, classDict): - # pylint and pep8 fight over what this should be called (mcs or cls). - # pylint gets it right, but we can't scope disable pep8, so we go with - # its convention. - # pylint: disable=bad-mcs-classmethod-argument - check_totalrefcount = True - - # Python 3: must copy, we mutate the classDict. Interestingly enough, - # it doesn't actually error out, but under 3.6 we wind up wrapping - # and re-wrapping the same items over and over and over. - for key, value in list(classDict.items()): - if key.startswith('test') and callable(value): - classDict.pop(key) - if check_totalrefcount: - value = leakcheck.wrap_refcount(value) - classDict[key] = value - return type.__new__(cls, classname, bases, classDict) - - -class TestCase(unittest.TestCase, metaclass=TestCaseMetaClass): - - cleanup_attempt_sleep_duration = 0.001 - cleanup_max_sleep_seconds = 1 - - def wait_for_pending_cleanups(self, - initial_active_threads=None, - initial_main_greenlets=None): - initial_active_threads = initial_active_threads or self.threads_before_test - initial_main_greenlets = initial_main_greenlets or self.main_greenlets_before_test - sleep_time = self.cleanup_attempt_sleep_duration - # NOTE: This is racy! A Python-level thread object may be dead - # and gone, but the C thread may not yet have fired its - # destructors and added to the queue. There's no particular - # way to know that's about to happen. We try to watch the - # Python threads to make sure they, at least, have gone away. - # Counting the main greenlets, which we can easily do deterministically, - # also helps. - - # Always sleep at least once to let other threads run - sleep(sleep_time) - quit_after = time() + self.cleanup_max_sleep_seconds - # TODO: We could add an API that calls us back when a particular main greenlet is deleted? - # It would have to drop the GIL - while ( - get_pending_cleanup_count() - or active_thread_count() > initial_active_threads - or (not self.expect_greenlet_leak - and get_total_main_greenlets() > initial_main_greenlets)): - sleep(sleep_time) - if time() > quit_after: - print("Time limit exceeded.") - print("Threads: Waiting for only", initial_active_threads, - "-->", active_thread_count()) - print("MGlets : Waiting for only", initial_main_greenlets, - "-->", get_total_main_greenlets()) - break - collect() - - def count_objects(self, kind=list, exact_kind=True): - # pylint:disable=unidiomatic-typecheck - # Collect the garbage. - for _ in range(3): - collect() - if exact_kind: - return sum( - 1 - for x in get_objects() - if type(x) is kind - ) - # instances - return sum( - 1 - for x in get_objects() - if isinstance(x, kind) - ) - - greenlets_before_test = 0 - threads_before_test = 0 - main_greenlets_before_test = 0 - expect_greenlet_leak = False - - def count_greenlets(self): - """ - Find all the greenlets and subclasses tracked by the GC. - """ - return self.count_objects(RawGreenlet, False) - - def setUp(self): - # Ensure the main greenlet exists, otherwise the first test - # gets a false positive leak - super().setUp() - getcurrent() - self.threads_before_test = active_thread_count() - self.main_greenlets_before_test = get_total_main_greenlets() - self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) - self.greenlets_before_test = self.count_greenlets() - - def tearDown(self): - if getattr(self, 'skipTearDown', False): - return - - self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) - super().tearDown() - - def get_expected_returncodes_for_aborted_process(self): - import signal - # The child should be aborted in an unusual way. On POSIX - # platforms, this is done with abort() and signal.SIGABRT, - # which is reflected in a negative return value; however, on - # Windows, even though we observe the child print "Fatal - # Python error: Aborted" and in older versions of the C - # runtime "This application has requested the Runtime to - # terminate it in an unusual way," it always has an exit code - # of 3. This is interesting because 3 is the error code for - # ERROR_PATH_NOT_FOUND; BUT: the C runtime abort() function - # also uses this code. - # - # If we link to the static C library on Windows, the error - # code changes to '0xc0000409' (hex(3221226505)), which - # apparently is STATUS_STACK_BUFFER_OVERRUN; but "What this - # means is that nowadays when you get a - # STATUS_STACK_BUFFER_OVERRUN, it doesn’t actually mean that - # there is a stack buffer overrun. It just means that the - # application decided to terminate itself with great haste." - # - # - # On windows, we've also seen '0xc0000005' (hex(3221225477)). - # That's "Access Violation" - # - # See - # https://devblogs.microsoft.com/oldnewthing/20110519-00/?p=10623 - # and - # https://docs.microsoft.com/en-us/previous-versions/k089yyh0(v=vs.140)?redirectedfrom=MSDN - # and - # https://devblogs.microsoft.com/oldnewthing/20190108-00/?p=100655 - expected_exit = ( - -signal.SIGABRT, - # But beginning on Python 3.11, the faulthandler - # that prints the C backtraces sometimes segfaults after - # reporting the exception but before printing the stack. - # This has only been seen on linux/gcc. - -signal.SIGSEGV, - ) if not WIN else ( - 3, - 0xc0000409, - 0xc0000005, - ) - return expected_exit - - def get_process_uss(self): - """ - Return the current process's USS in bytes. - - uss is available on Linux, macOS, Windows. Also known as - "Unique Set Size", this is the memory which is unique to a - process and which would be freed if the process was terminated - right now. - - If this is not supported by ``psutil``, this raises the - :exc:`unittest.SkipTest` exception. - """ - try: - return psutil.Process().memory_full_info().uss - except AttributeError as e: - raise unittest.SkipTest("uss not supported") from e - - def run_script(self, script_name, show_output=True): - import subprocess - script = os.path.join( - os.path.dirname(__file__), - script_name, - ) - - try: - return subprocess.check_output([sys.executable, script], - encoding='utf-8', - stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as ex: - if show_output: - print('-----') - print('Failed to run script', script) - print('~~~~~') - print(ex.output) - print('------') - raise - - - def assertScriptRaises(self, script_name, exitcodes=None): - import subprocess - with self.assertRaises(subprocess.CalledProcessError) as exc: - output = self.run_script(script_name, show_output=False) - __traceback_info__ = output - # We're going to fail the assertion if we get here, at least - # preserve the output in the traceback. - - if exitcodes is None: - exitcodes = self.get_expected_returncodes_for_aborted_process() - self.assertIn(exc.exception.returncode, exitcodes) - return exc.exception diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index a6a69a7..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc deleted file mode 100644 index 8f1a5cc..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc deleted file mode 100644 index 69ea980..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc deleted file mode 100644 index de35f7d..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc deleted file mode 100644 index 74b7850..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc deleted file mode 100644 index 575467f..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc deleted file mode 100644 index 2597e12..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc deleted file mode 100644 index 2c95f59..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc deleted file mode 100644 index 3517424..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc deleted file mode 100644 index 9b15055..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc deleted file mode 100644 index 6d8bc69..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc deleted file mode 100644 index eae2519..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc deleted file mode 100644 index 622e837..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc deleted file mode 100644 index 5b92a11..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc deleted file mode 100644 index 76621e7..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc deleted file mode 100644 index 4f0a611..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc deleted file mode 100644 index b50be4c..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc deleted file mode 100644 index a81e234..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc deleted file mode 100644 index 9831ce4..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc deleted file mode 100644 index a66e1c3..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc deleted file mode 100644 index 7b8ee54..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc deleted file mode 100644 index 0a313ee..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc b/venv/Lib/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc deleted file mode 100644 index aa68bbd..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/_test_extension.c b/venv/Lib/site-packages/greenlet/tests/_test_extension.c deleted file mode 100644 index 612b735..0000000 --- a/venv/Lib/site-packages/greenlet/tests/_test_extension.c +++ /dev/null @@ -1,258 +0,0 @@ -/* This is a set of functions used by test_extension_interface.py to test the - * Greenlet C API. - */ - -#include "../greenlet.h" - -#ifndef Py_RETURN_NONE -# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None -#endif - -#define TEST_MODULE_NAME "_test_extension" - -// CAUTION: MSVC is stupidly picky: -// -// "The compiler ignores, without warning, any __declspec keywords -// placed after * or & and in front of the variable identifier in a -// declaration." -// (https://docs.microsoft.com/en-us/cpp/cpp/declspec?view=msvc-160) -// -// So pointer return types must be handled differently (because of the -// trailing *), or you get inscrutable compiler warnings like "error -// C2059: syntax error: ''" -// -// In C23, there is a standard syntax for attributes, and -// GCC defines an attribute to use with this: [[gnu:noinline]]. -// In the future, this is expected to become standard. - -#if defined(__GNUC__) || defined(__clang__) -/* We used to check for GCC 4+ or 3.4+, but those compilers are - laughably out of date. Just assume they support it. */ -# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) -#elif defined(_MSC_VER) -/* We used to check for && (_MSC_VER >= 1300) but that's also out of date. */ -# define UNUSED(x) UNUSED_ ## x -#endif - -static PyObject* -test_switch(PyObject* UNUSED(self), PyObject* greenlet) -{ - PyObject* result = NULL; - - if (greenlet == NULL || !PyGreenlet_Check(greenlet)) { - PyErr_BadArgument(); - return NULL; - } - - result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL); - if (result == NULL) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_AssertionError, - "greenlet.switch() failed for some reason."); - } - return NULL; - } - Py_INCREF(result); - return result; -} - -static PyObject* -test_switch_kwargs(PyObject* UNUSED(self), PyObject* args, PyObject* kwargs) -{ - PyGreenlet* g = NULL; - PyObject* result = NULL; - - PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g); - - if (g == NULL || !PyGreenlet_Check(g)) { - PyErr_BadArgument(); - return NULL; - } - - result = PyGreenlet_Switch(g, NULL, kwargs); - if (result == NULL) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_AssertionError, - "greenlet.switch() failed for some reason."); - } - return NULL; - } - Py_XINCREF(result); - return result; -} - -static PyObject* -test_getcurrent(PyObject* UNUSED(self)) -{ - PyGreenlet* g = PyGreenlet_GetCurrent(); - if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) { - PyErr_SetString(PyExc_AssertionError, - "getcurrent() returned an invalid greenlet"); - Py_XDECREF(g); - return NULL; - } - Py_DECREF(g); - Py_RETURN_NONE; -} - -static PyObject* -test_setparent(PyObject* UNUSED(self), PyObject* arg) -{ - PyGreenlet* current; - PyGreenlet* greenlet = NULL; - - if (arg == NULL || !PyGreenlet_Check(arg)) { - PyErr_BadArgument(); - return NULL; - } - if ((current = PyGreenlet_GetCurrent()) == NULL) { - return NULL; - } - greenlet = (PyGreenlet*)arg; - if (PyGreenlet_SetParent(greenlet, current)) { - Py_DECREF(current); - return NULL; - } - Py_DECREF(current); - if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject* -test_new_greenlet(PyObject* UNUSED(self), PyObject* callable) -{ - PyObject* result = NULL; - PyGreenlet* greenlet = PyGreenlet_New(callable, NULL); - - if (!greenlet) { - return NULL; - } - - result = PyGreenlet_Switch(greenlet, NULL, NULL); - Py_CLEAR(greenlet); - if (result == NULL) { - return NULL; - } - - Py_INCREF(result); - return result; -} - -static PyObject* -test_raise_dead_greenlet(PyObject* UNUSED(self)) -{ - PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception."); - return NULL; -} - -static PyObject* -test_raise_greenlet_error(PyObject* UNUSED(self)) -{ - PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception"); - return NULL; -} - -static PyObject* -test_throw(PyObject* UNUSED(self), PyGreenlet* g) -{ - const char msg[] = "take that sucka!"; - PyObject* msg_obj = Py_BuildValue("s", msg); - PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL); - Py_DECREF(msg_obj); - if (PyErr_Occurred()) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject* -test_throw_exact(PyObject* UNUSED(self), PyObject* args) -{ - PyGreenlet* g = NULL; - PyObject* typ = NULL; - PyObject* val = NULL; - PyObject* tb = NULL; - - if (!PyArg_ParseTuple(args, "OOOO:throw", &g, &typ, &val, &tb)) { - return NULL; - } - - PyGreenlet_Throw(g, typ, val, tb); - if (PyErr_Occurred()) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyMethodDef test_methods[] = { - {"test_switch", - (PyCFunction)test_switch, - METH_O, - "Switch to the provided greenlet sending provided arguments, and \n" - "return the results."}, - {"test_switch_kwargs", - (PyCFunction)test_switch_kwargs, - METH_VARARGS | METH_KEYWORDS, - "Switch to the provided greenlet sending the provided keyword args."}, - {"test_getcurrent", - (PyCFunction)test_getcurrent, - METH_NOARGS, - "Test PyGreenlet_GetCurrent()"}, - {"test_setparent", - (PyCFunction)test_setparent, - METH_O, - "Se the parent of the provided greenlet and switch to it."}, - {"test_new_greenlet", - (PyCFunction)test_new_greenlet, - METH_O, - "Test PyGreenlet_New()"}, - {"test_raise_dead_greenlet", - (PyCFunction)test_raise_dead_greenlet, - METH_NOARGS, - "Just raise greenlet.GreenletExit"}, - {"test_raise_greenlet_error", - (PyCFunction)test_raise_greenlet_error, - METH_NOARGS, - "Just raise greenlet.error"}, - {"test_throw", - (PyCFunction)test_throw, - METH_O, - "Throw a ValueError at the provided greenlet"}, - {"test_throw_exact", - (PyCFunction)test_throw_exact, - METH_VARARGS, - "Throw exactly the arguments given at the provided greenlet"}, - {NULL, NULL, 0, NULL} -}; - - -#define INITERROR return NULL - -static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, - TEST_MODULE_NAME, - NULL, - 0, - test_methods, - NULL, - NULL, - NULL, - NULL}; - -PyMODINIT_FUNC -PyInit__test_extension(void) -{ - PyObject* module = NULL; - module = PyModule_Create(&moduledef); - - if (module == NULL) { - return NULL; - } - - PyGreenlet_Import(); -#ifdef Py_GIL_DISABLED - PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED); -#endif - return module; -} diff --git a/venv/Lib/site-packages/greenlet/tests/_test_extension.cp312-win_amd64.pyd b/venv/Lib/site-packages/greenlet/tests/_test_extension.cp312-win_amd64.pyd deleted file mode 100644 index f77a534..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/_test_extension.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd b/venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd deleted file mode 100644 index 1813813..0000000 Binary files a/venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp b/venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp deleted file mode 100644 index bc3f178..0000000 --- a/venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp +++ /dev/null @@ -1,229 +0,0 @@ -/* This is a set of functions used to test C++ exceptions are not - * broken during greenlet switches - */ - -#include "../greenlet.h" -#include "../greenlet_compiler_compat.hpp" -#include -#include - -struct exception_t { - int depth; - exception_t(int depth) : depth(depth) {} -}; - -/* Functions are called via pointers to prevent inlining */ -static void (*p_test_exception_throw_nonstd)(int depth); -static void (*p_test_exception_throw_std)(); -static PyObject* (*p_test_exception_switch_recurse)(int depth, int left); - -static void -test_exception_throw_nonstd(int depth) -{ - throw exception_t(depth); -} - -static void -test_exception_throw_std() -{ - throw std::runtime_error("Thrown from an extension."); -} - -static PyObject* -test_exception_switch_recurse(int depth, int left) -{ - if (left > 0) { - return p_test_exception_switch_recurse(depth, left - 1); - } - - PyObject* result = NULL; - PyGreenlet* self = PyGreenlet_GetCurrent(); - if (self == NULL) - return NULL; - - try { - if (PyGreenlet_Switch(PyGreenlet_GET_PARENT(self), NULL, NULL) == NULL) { - Py_DECREF(self); - return NULL; - } - p_test_exception_throw_nonstd(depth); - PyErr_SetString(PyExc_RuntimeError, - "throwing C++ exception didn't work"); - } - catch (const exception_t& e) { - if (e.depth != depth) - PyErr_SetString(PyExc_AssertionError, "depth mismatch"); - else - result = PyLong_FromLong(depth); - } - catch (...) { - PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception"); - } - - Py_DECREF(self); - return result; -} - -/* test_exception_switch(int depth) - * - recurses depth times - * - switches to parent inside try/catch block - * - throws an exception that (expected to be caught in the same function) - * - verifies depth matches (exceptions shouldn't be caught in other greenlets) - */ -static PyObject* -test_exception_switch(PyObject* UNUSED(self), PyObject* args) -{ - int depth; - if (!PyArg_ParseTuple(args, "i", &depth)) - return NULL; - return p_test_exception_switch_recurse(depth, depth); -} - - -static PyObject* -py_test_exception_throw_nonstd(PyObject* UNUSED(self), PyObject* args) -{ - if (!PyArg_ParseTuple(args, "")) - return NULL; - p_test_exception_throw_nonstd(0); - PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); - return NULL; -} - -static PyObject* -py_test_exception_throw_std(PyObject* UNUSED(self), PyObject* args) -{ - if (!PyArg_ParseTuple(args, "")) - return NULL; - p_test_exception_throw_std(); - PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); - return NULL; -} - -static PyObject* -py_test_call(PyObject* UNUSED(self), PyObject* arg) -{ - PyObject* noargs = PyTuple_New(0); - PyObject* ret = PyObject_Call(arg, noargs, nullptr); - Py_DECREF(noargs); - return ret; -} - - - -/* test_exception_switch_and_do_in_g2(g2func) - * - creates new greenlet g2 to run g2func - * - switches to g2 inside try/catch block - * - verifies that no exception has been caught - * - * it is used together with test_exception_throw to verify that unhandled - * exceptions thrown in one greenlet do not propagate to other greenlet nor - * segfault the process. - */ -static PyObject* -test_exception_switch_and_do_in_g2(PyObject* UNUSED(self), PyObject* args) -{ - PyObject* g2func = NULL; - PyObject* result = NULL; - - if (!PyArg_ParseTuple(args, "O", &g2func)) - return NULL; - PyGreenlet* g2 = PyGreenlet_New(g2func, NULL); - if (!g2) { - return NULL; - } - - try { - result = PyGreenlet_Switch(g2, NULL, NULL); - if (!result) { - return NULL; - } - } - catch (const exception_t& e) { - /* if we are here the memory can be already corrupted and the program - * might crash before below py-level exception might become printed. - * -> print something to stderr to make it clear that we had entered - * this catch block. - * See comments in inner_bootstrap() - */ -#if defined(WIN32) || defined(_WIN32) - fprintf(stderr, "C++ exception unexpectedly caught in g1\n"); - PyErr_SetString(PyExc_AssertionError, "C++ exception unexpectedly caught in g1"); - Py_XDECREF(result); - return NULL; -#else - throw; -#endif - } - - Py_XDECREF(result); - Py_RETURN_NONE; -} - -static PyMethodDef test_methods[] = { - {"test_exception_switch", - (PyCFunction)&test_exception_switch, - METH_VARARGS, - "Switches to parent twice, to test exception handling and greenlet " - "switching."}, - {"test_exception_switch_and_do_in_g2", - (PyCFunction)&test_exception_switch_and_do_in_g2, - METH_VARARGS, - "Creates new greenlet g2 to run g2func and switches to it inside try/catch " - "block. Used together with test_exception_throw to verify that unhandled " - "C++ exceptions thrown in a greenlet doe not corrupt memory."}, - {"test_exception_throw_nonstd", - (PyCFunction)&py_test_exception_throw_nonstd, - METH_VARARGS, - "Throws non-standard C++ exception. Calling this function directly should abort the process." - }, - {"test_exception_throw_std", - (PyCFunction)&py_test_exception_throw_std, - METH_VARARGS, - "Throws standard C++ exception. Calling this function directly should abort the process." - }, - {"test_call", - (PyCFunction)&py_test_call, - METH_O, - "Call the given callable. Unlike calling it directly, this creates a " - "new C-level stack frame, which may be helpful in testing." - }, - {NULL, NULL, 0, NULL} -}; - - -static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, - "greenlet.tests._test_extension_cpp", - NULL, - 0, - test_methods, - NULL, - NULL, - NULL, - NULL}; - -PyMODINIT_FUNC -PyInit__test_extension_cpp(void) -{ - PyObject* module = NULL; - - module = PyModule_Create(&moduledef); - - if (module == NULL) { - return NULL; - } - - PyGreenlet_Import(); - if (_PyGreenlet_API == NULL) { - return NULL; - } - - p_test_exception_throw_nonstd = test_exception_throw_nonstd; - p_test_exception_throw_std = test_exception_throw_std; - p_test_exception_switch_recurse = test_exception_switch_recurse; -#ifdef Py_GIL_DISABLED - PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED); -#endif - - return module; -} diff --git a/venv/Lib/site-packages/greenlet/tests/fail_clearing_run_switches.py b/venv/Lib/site-packages/greenlet/tests/fail_clearing_run_switches.py deleted file mode 100644 index 6dd1492..0000000 --- a/venv/Lib/site-packages/greenlet/tests/fail_clearing_run_switches.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -""" -If we have a run callable passed to the constructor or set as an -attribute, but we don't actually use that (because ``__getattribute__`` -or the like interferes), then when we clear callable before beginning -to run, there's an opportunity for Python code to run. - -""" -import greenlet - -g = None -main = greenlet.getcurrent() - -results = [] - -class RunCallable: - - def __del__(self): - results.append(('RunCallable', '__del__')) - main.switch('from RunCallable') - - -class G(greenlet.greenlet): - - def __getattribute__(self, name): - if name == 'run': - results.append(('G.__getattribute__', 'run')) - return run_func - return object.__getattribute__(self, name) - - -def run_func(): - results.append(('run_func', 'enter')) - - -g = G(RunCallable()) -# Try to start G. It will get to the point where it deletes -# its run callable C++ variable in inner_bootstrap. That triggers -# the __del__ method, which switches back to main before g -# actually even starts running. -x = g.switch() -results.append(('main: g.switch()', x)) -# In the C++ code, this results in g->g_switch() appearing to return, even though -# it has yet to run. -print('In main with', x, flush=True) -g.switch() -print('RESULTS', results) diff --git a/venv/Lib/site-packages/greenlet/tests/fail_cpp_exception.py b/venv/Lib/site-packages/greenlet/tests/fail_cpp_exception.py deleted file mode 100644 index fa4dc2e..0000000 --- a/venv/Lib/site-packages/greenlet/tests/fail_cpp_exception.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Helper for testing a C++ exception throw aborts the process. - -Takes one argument, the name of the function in :mod:`_test_extension_cpp` to call. -""" -import sys -import greenlet -from greenlet.tests import _test_extension_cpp -print('fail_cpp_exception is running') - -def run_unhandled_exception_in_greenlet_aborts(): - def _(): - _test_extension_cpp.test_exception_switch_and_do_in_g2( - _test_extension_cpp.test_exception_throw_nonstd - ) - g1 = greenlet.greenlet(_) - g1.switch() - - -func_name = sys.argv[1] -try: - func = getattr(_test_extension_cpp, func_name) -except AttributeError: - if func_name == run_unhandled_exception_in_greenlet_aborts.__name__: - func = run_unhandled_exception_in_greenlet_aborts - elif func_name == 'run_as_greenlet_target': - g = greenlet.greenlet(_test_extension_cpp.test_exception_throw_std) - func = g.switch - else: - raise -print('raising', func, flush=True) -func() diff --git a/venv/Lib/site-packages/greenlet/tests/fail_initialstub_already_started.py b/venv/Lib/site-packages/greenlet/tests/fail_initialstub_already_started.py deleted file mode 100644 index c1a44ef..0000000 --- a/venv/Lib/site-packages/greenlet/tests/fail_initialstub_already_started.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Testing initialstub throwing an already started exception. -""" - -import greenlet - -a = None -b = None -c = None -main = greenlet.getcurrent() - -# If we switch into a dead greenlet, -# we go looking for its parents. -# if a parent is not yet started, we start it. - -results = [] - -def a_run(*args): - #results.append('A') - results.append(('Begin A', args)) - - -def c_run(): - results.append('Begin C') - b.switch('From C') - results.append('C done') - -class A(greenlet.greenlet): pass - -class B(greenlet.greenlet): - doing_it = False - def __getattribute__(self, name): - if name == 'run' and not self.doing_it: - assert greenlet.getcurrent() is c - self.doing_it = True - results.append('Switch to b from B.__getattribute__ in ' - + type(greenlet.getcurrent()).__name__) - b.switch() - results.append('B.__getattribute__ back from main in ' - + type(greenlet.getcurrent()).__name__) - if name == 'run': - name = '_B_run' - return object.__getattribute__(self, name) - - def _B_run(self, *arg): - results.append(('Begin B', arg)) - results.append('_B_run switching to main') - main.switch('From B') - -class C(greenlet.greenlet): - pass -a = A(a_run) -b = B(parent=a) -c = C(c_run, b) - -# Start a child; while running, it will start B, -# but starting B will ALSO start B. -result = c.switch() -results.append(('main from c', result)) - -# Switch back to C, which was in the middle of switching -# already. This will throw the ``GreenletStartedWhileInPython`` -# exception, which results in parent A getting started (B is finished) -c.switch() - -results.append(('A dead?', a.dead, 'B dead?', b.dead, 'C dead?', c.dead)) - -# A and B should both be dead now. -assert a.dead -assert b.dead -assert not c.dead - -result = c.switch() -results.append(('main from c.2', result)) -# Now C is dead -assert c.dead - -print("RESULTS:", results) diff --git a/venv/Lib/site-packages/greenlet/tests/fail_slp_switch.py b/venv/Lib/site-packages/greenlet/tests/fail_slp_switch.py deleted file mode 100644 index 0990526..0000000 --- a/venv/Lib/site-packages/greenlet/tests/fail_slp_switch.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -""" -A test helper for seeing what happens when slp_switch() -fails. -""" -# pragma: no cover - -import greenlet - - -print('fail_slp_switch is running', flush=True) - -runs = [] -def func(): - runs.append(1) - greenlet.getcurrent().parent.switch() - runs.append(2) - greenlet.getcurrent().parent.switch() - runs.append(3) - -g = greenlet._greenlet.UnswitchableGreenlet(func) -g.switch() -assert runs == [1] -g.switch() -assert runs == [1, 2] -g.force_slp_switch_error = True - -# This should crash. -g.switch() diff --git a/venv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets.py b/venv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets.py deleted file mode 100644 index e151b19..0000000 --- a/venv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -Uses a trace function to switch greenlets at unexpected times. - -In the trace function, we switch from the current greenlet to another -greenlet, which switches -""" -import greenlet - -g1 = None -g2 = None - -switch_to_g2 = False - -def tracefunc(*args): - print('TRACE', *args) - global switch_to_g2 - if switch_to_g2: - switch_to_g2 = False - g2.switch() - print('\tLEAVE TRACE', *args) - -def g1_run(): - print('In g1_run') - global switch_to_g2 - switch_to_g2 = True - from_parent = greenlet.getcurrent().parent.switch() - print('Return to g1_run') - print('From parent', from_parent) - -def g2_run(): - #g1.switch() - greenlet.getcurrent().parent.switch() - -greenlet.settrace(tracefunc) - -g1 = greenlet.greenlet(g1_run) -g2 = greenlet.greenlet(g2_run) - -# This switch didn't actually finish! -# And if it did, it would raise TypeError -# because g1_run() doesn't take any arguments. -g1.switch(1) -print('Back in main') -g1.switch(2) diff --git a/venv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets2.py b/venv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets2.py deleted file mode 100644 index 1f6b66b..0000000 --- a/venv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets2.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Like fail_switch_three_greenlets, but the call into g1_run would actually be -valid. -""" -import greenlet - -g1 = None -g2 = None - -switch_to_g2 = True - -results = [] - -def tracefunc(*args): - results.append(('trace', args[0])) - print('TRACE', *args) - global switch_to_g2 - if switch_to_g2: - switch_to_g2 = False - g2.switch('g2 from tracefunc') - print('\tLEAVE TRACE', *args) - -def g1_run(arg): - results.append(('g1 arg', arg)) - print('In g1_run') - from_parent = greenlet.getcurrent().parent.switch('from g1_run') - results.append(('g1 from parent', from_parent)) - return 'g1 done' - -def g2_run(arg): - #g1.switch() - results.append(('g2 arg', arg)) - parent = greenlet.getcurrent().parent.switch('from g2_run') - global switch_to_g2 - switch_to_g2 = False - results.append(('g2 from parent', parent)) - return 'g2 done' - - -greenlet.settrace(tracefunc) - -g1 = greenlet.greenlet(g1_run) -g2 = greenlet.greenlet(g2_run) - -x = g1.switch('g1 from main') -results.append(('main g1', x)) -print('Back in main', x) -x = g1.switch('g2 from main') -results.append(('main g2', x)) -print('back in amain again', x) -x = g1.switch('g1 from main 2') -results.append(('main g1.2', x)) -x = g2.switch() -results.append(('main g2.2', x)) -print("RESULTS:", results) diff --git a/venv/Lib/site-packages/greenlet/tests/fail_switch_two_greenlets.py b/venv/Lib/site-packages/greenlet/tests/fail_switch_two_greenlets.py deleted file mode 100644 index 3e52345..0000000 --- a/venv/Lib/site-packages/greenlet/tests/fail_switch_two_greenlets.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Uses a trace function to switch greenlets at unexpected times. - -In the trace function, we switch from the current greenlet to another -greenlet, which switches -""" -import greenlet - -g1 = None -g2 = None - -switch_to_g2 = False - -def tracefunc(*args): - print('TRACE', *args) - global switch_to_g2 - if switch_to_g2: - switch_to_g2 = False - g2.switch() - print('\tLEAVE TRACE', *args) - -def g1_run(): - print('In g1_run') - global switch_to_g2 - switch_to_g2 = True - greenlet.getcurrent().parent.switch() - print('Return to g1_run') - print('Falling off end of g1_run') - -def g2_run(): - g1.switch() - print('Falling off end of g2') - -greenlet.settrace(tracefunc) - -g1 = greenlet.greenlet(g1_run) -g2 = greenlet.greenlet(g2_run) - -g1.switch() -print('Falling off end of main') -g2.switch() diff --git a/venv/Lib/site-packages/greenlet/tests/leakcheck.py b/venv/Lib/site-packages/greenlet/tests/leakcheck.py deleted file mode 100644 index f45361e..0000000 --- a/venv/Lib/site-packages/greenlet/tests/leakcheck.py +++ /dev/null @@ -1,336 +0,0 @@ -# Copyright (c) 2018 gevent community -# Copyright (c) 2021 greenlet community -# -# This was originally part of gevent's test suite. The main author -# (Jason Madden) vendored a copy of it into greenlet. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -from __future__ import print_function - -import os -import sys -import gc - -from functools import wraps -import unittest - - -import objgraph - -# graphviz 0.18 (Nov 7 2021), available only on Python 3.6 and newer, -# has added type hints (sigh). It wants to use ``typing.Literal`` for -# some stuff, but that's only available on Python 3.9+. If that's not -# found, it creates a ``unittest.mock.MagicMock`` object and annotates -# with that. These are GC'able objects, and doing almost *anything* -# with them results in an explosion of objects. For example, trying to -# compare them for equality creates new objects. This causes our -# leakchecks to fail, with reports like: -# -# greenlet.tests.leakcheck.LeakCheckError: refcount increased by [337, 1333, 343, 430, 530, 643, 769] -# _Call 1820 +546 -# dict 4094 +76 -# MagicProxy 585 +73 -# tuple 2693 +66 -# _CallList 24 +3 -# weakref 1441 +1 -# function 5996 +1 -# type 736 +1 -# cell 592 +1 -# MagicMock 8 +1 -# -# To avoid this, we *could* filter this type of object out early. In -# principle it could leak, but we don't use mocks in greenlet, so it -# doesn't leak from us. However, a further issue is that ``MagicMock`` -# objects have subobjects that are also GC'able, like ``_Call``, and -# those create new mocks of their own too. So we'd have to filter them -# as well, and they're not public. That's OK, we can workaround the -# problem by being very careful to never compare by equality or other -# user-defined operators, only using object identity or other builtin -# functions. - -RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') -RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS -RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') -RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR -RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') -SKIP_LEAKCHECKS = RUNNING_ON_MANYLINUX or os.environ.get('GREENLET_SKIP_LEAKCHECKS') -SKIP_FAILING_LEAKCHECKS = os.environ.get('GREENLET_SKIP_FAILING_LEAKCHECKS') -ONLY_FAILING_LEAKCHECKS = os.environ.get('GREENLET_ONLY_FAILING_LEAKCHECKS') - -def ignores_leakcheck(func): - """ - Ignore the given object during leakchecks. - - Can be applied to a method, in which case the method will run, but - will not be subject to leak checks. - - If applied to a class, the entire class will be skipped during leakchecks. This - is intended to be used for classes that are very slow and cause problems such as - test timeouts; typically it will be used for classes that are subclasses of a base - class and specify variants of behaviour (such as pool sizes). - """ - func.ignore_leakcheck = True - return func - -def fails_leakcheck(func): - """ - Mark that the function is known to leak. - """ - func.fails_leakcheck = True - if SKIP_FAILING_LEAKCHECKS: - func = unittest.skip("Skipping known failures")(func) - return func - -class LeakCheckError(AssertionError): - pass - -if hasattr(sys, 'getobjects'): - # In a Python build with ``--with-trace-refs``, make objgraph - # trace *all* the objects, not just those that are tracked by the - # GC - class _MockGC(object): - def get_objects(self): - return sys.getobjects(0) # pylint:disable=no-member - def __getattr__(self, name): - return getattr(gc, name) - objgraph.gc = _MockGC() - fails_strict_leakcheck = fails_leakcheck -else: - def fails_strict_leakcheck(func): - """ - Decorator for a function that is known to fail when running - strict (``sys.getobjects()``) leakchecks. - - This type of leakcheck finds all objects, even those, such as - strings, which are not tracked by the garbage collector. - """ - return func - -class ignores_types_in_strict_leakcheck(object): - def __init__(self, types): - self.types = types - def __call__(self, func): - func.leakcheck_ignore_types = self.types - return func - -class _RefCountChecker(object): - - # Some builtin things that we ignore - # XXX: Those things were ignored by gevent, but they're important here, - # presumably. - IGNORED_TYPES = () #(tuple, dict, types.FrameType, types.TracebackType) - - # Names of types that should be ignored. Use this when we cannot - # or don't want to import the class directly. - IGNORED_TYPE_NAMES = ( - # This appears in Python3.14 with the JIT enabled. It - # doesn't seem to be directly exposed to Python; the only way to get - # one is to cause code to get jitted and then look for all objects - # and find one with this name. But they multiply as code - # executes and gets jitted, in ways we don't want to rely on. - # So just ignore it. - 'uop_executor', - ) - - def __init__(self, testcase, function): - self.testcase = testcase - self.function = function - self.deltas = [] - self.peak_stats = {} - self.ignored_types = () - - # The very first time we are called, we have already been - # self.setUp() by the test runner, so we don't need to do it again. - self.needs_setUp = False - - def _include_object_p(self, obj): - # pylint:disable=too-many-return-statements - # - # See the comment block at the top. We must be careful to - # avoid invoking user-defined operations. - if obj is self: - return False - kind = type(obj) - # ``self._include_object_p == obj`` returns NotImplemented - # for non-function objects, which causes the interpreter - # to try to reverse the order of arguments...which leads - # to the explosion of mock objects. We don't want that, so we implement - # the check manually. - if kind == type(self._include_object_p): # pylint: disable=unidiomatic-typecheck - try: - # pylint:disable=not-callable - exact_method_equals = self._include_object_p.__eq__(obj) - except AttributeError: - # Python 2.7 methods may only have __cmp__, and that raises a - # TypeError for non-method arguments - # pylint:disable=no-member - exact_method_equals = self._include_object_p.__cmp__(obj) == 0 - - if exact_method_equals is not NotImplemented and exact_method_equals: - return False - - # Similarly, we need to check identity in our __dict__ to avoid mock explosions. - for x in self.__dict__.values(): - if obj is x: - return False - - - if ( - kind in self.ignored_types - or kind in self.IGNORED_TYPES - or kind.__name__ in self.IGNORED_TYPE_NAMES - ): - return False - - - return True - - def _growth(self): - return objgraph.growth(limit=None, peak_stats=self.peak_stats, - filter=self._include_object_p) - - def _report_diff(self, growth): - if not growth: - return "" - - lines = [] - width = max(len(name) for name, _, _ in growth) - for name, count, delta in growth: - lines.append('%-*s%9d %+9d' % (width, name, count, delta)) - - diff = '\n'.join(lines) - return diff - - - def _run_test(self, args, kwargs): - gc_enabled = gc.isenabled() - gc.disable() - - if self.needs_setUp: - self.testcase.setUp() - self.testcase.skipTearDown = False - try: - self.function(self.testcase, *args, **kwargs) - finally: - self.testcase.tearDown() - self.testcase.doCleanups() - self.testcase.skipTearDown = True - self.needs_setUp = True - if gc_enabled: - gc.enable() - - def _growth_after(self): - # Grab post snapshot - # pylint:disable=no-member - if 'urlparse' in sys.modules: - sys.modules['urlparse'].clear_cache() - if 'urllib.parse' in sys.modules: - sys.modules['urllib.parse'].clear_cache() - - return self._growth() - - def _check_deltas(self, growth): - # Return false when we have decided there is no leak, - # true if we should keep looping, raises an assertion - # if we have decided there is a leak. - - deltas = self.deltas - if not deltas: - # We haven't run yet, no data, keep looping - return True - - if gc.garbage: - raise LeakCheckError("Generated uncollectable garbage %r" % (gc.garbage,)) - - - # the following configurations are classified as "no leak" - # [0, 0] - # [x, 0, 0] - # [... a, b, c, d] where a+b+c+d = 0 - # - # the following configurations are classified as "leak" - # [... z, z, z] where z > 0 - - if deltas[-2:] == [0, 0] and len(deltas) in (2, 3): - return False - - if deltas[-3:] == [0, 0, 0]: - return False - - if len(deltas) >= 4 and sum(deltas[-4:]) == 0: - return False - - if len(deltas) >= 3 and deltas[-1] > 0 and deltas[-1] == deltas[-2] and deltas[-2] == deltas[-3]: - diff = self._report_diff(growth) - raise LeakCheckError('refcount increased by %r\n%s' % (deltas, diff)) - - # OK, we don't know for sure yet. Let's search for more - if sum(deltas[-3:]) <= 0 or sum(deltas[-4:]) <= 0 or deltas[-4:].count(0) >= 2: - # this is suspicious, so give a few more runs - limit = 11 - else: - limit = 7 - if len(deltas) >= limit: - raise LeakCheckError('refcount increased by %r\n%s' - % (deltas, - self._report_diff(growth))) - - # We couldn't decide yet, keep going - return True - - def __call__(self, args, kwargs): - for _ in range(3): - gc.collect() - - expect_failure = getattr(self.function, 'fails_leakcheck', False) - if expect_failure: - self.testcase.expect_greenlet_leak = True - self.ignored_types = getattr(self.function, "leakcheck_ignore_types", ()) - - # Capture state before; the incremental will be - # updated by each call to _growth_after - growth = self._growth() - - try: - while self._check_deltas(growth): - self._run_test(args, kwargs) - - growth = self._growth_after() - - self.deltas.append(sum((stat[2] for stat in growth))) - except LeakCheckError: - if not expect_failure: - raise - else: - if expect_failure: - raise LeakCheckError("Expected %s to leak but it did not." % (self.function,)) - -def wrap_refcount(method): - if getattr(method, 'ignore_leakcheck', False) or SKIP_LEAKCHECKS: - return method - - @wraps(method) - def wrapper(self, *args, **kwargs): # pylint:disable=too-many-branches - if getattr(self, 'ignore_leakcheck', False): - raise unittest.SkipTest("This class ignored during leakchecks") - if ONLY_FAILING_LEAKCHECKS and not getattr(method, 'fails_leakcheck', False): - raise unittest.SkipTest("Only running tests that fail leakchecks.") - return _RefCountChecker(self, method)(args, kwargs) - - return wrapper diff --git a/venv/Lib/site-packages/greenlet/tests/test_contextvars.py b/venv/Lib/site-packages/greenlet/tests/test_contextvars.py deleted file mode 100644 index b0d1ccf..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_contextvars.py +++ /dev/null @@ -1,312 +0,0 @@ -from __future__ import print_function - -import gc -import sys -import unittest - -from functools import partial -from unittest import skipUnless -from unittest import skipIf - -from greenlet import greenlet -from greenlet import getcurrent -from . import TestCase -from . import PY314 - -try: - from contextvars import Context - from contextvars import ContextVar - from contextvars import copy_context - # From the documentation: - # - # Important: Context Variables should be created at the top module - # level and never in closures. Context objects hold strong - # references to context variables which prevents context variables - # from being properly garbage collected. - ID_VAR = ContextVar("id", default=None) - VAR_VAR = ContextVar("var", default=None) - ContextVar = None -except ImportError: - Context = ContextVar = copy_context = None - -# We don't support testing if greenlet's built-in context var support is disabled. -@skipUnless(Context is not None, "ContextVar not supported") -class ContextVarsTests(TestCase): - def _new_ctx_run(self, *args, **kwargs): - return copy_context().run(*args, **kwargs) - - def _increment(self, greenlet_id, callback, counts, expect): - ctx_var = ID_VAR - if expect is None: - self.assertIsNone(ctx_var.get()) - else: - self.assertEqual(ctx_var.get(), expect) - ctx_var.set(greenlet_id) - for _ in range(2): - counts[ctx_var.get()] += 1 - callback() - - def _test_context(self, propagate_by): - # pylint:disable=too-many-branches - ID_VAR.set(0) - - callback = getcurrent().switch - counts = dict((i, 0) for i in range(5)) - - lets = [ - greenlet(partial( - partial( - copy_context().run, - self._increment - ) if propagate_by == "run" else self._increment, - greenlet_id=i, - callback=callback, - counts=counts, - expect=( - i - 1 if propagate_by == "share" else - 0 if propagate_by in ("set", "run") else None - ) - )) - for i in range(1, 5) - ] - - for let in lets: - if propagate_by == "set": - let.gr_context = copy_context() - elif propagate_by == "share": - let.gr_context = getcurrent().gr_context - - for i in range(2): - counts[ID_VAR.get()] += 1 - for let in lets: - let.switch() - - if propagate_by == "run": - # Must leave each context.run() in reverse order of entry - for let in reversed(lets): - let.switch() - else: - # No context.run(), so fine to exit in any order. - for let in lets: - let.switch() - - for let in lets: - self.assertTrue(let.dead) - # When using run(), we leave the run() as the greenlet dies, - # and there's no context "underneath". When not using run(), - # gr_context still reflects the context the greenlet was - # running in. - if propagate_by == 'run': - self.assertIsNone(let.gr_context) - else: - self.assertIsNotNone(let.gr_context) - - - if propagate_by == "share": - self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6}) - else: - self.assertEqual(set(counts.values()), set([2])) - - def test_context_propagated_by_context_run(self): - self._new_ctx_run(self._test_context, "run") - - def test_context_propagated_by_setting_attribute(self): - self._new_ctx_run(self._test_context, "set") - - def test_context_not_propagated(self): - self._new_ctx_run(self._test_context, None) - - def test_context_shared(self): - self._new_ctx_run(self._test_context, "share") - - def test_break_ctxvars(self): - let1 = greenlet(copy_context().run) - let2 = greenlet(copy_context().run) - let1.switch(getcurrent().switch) - let2.switch(getcurrent().switch) - # Since let2 entered the current context and let1 exits its own, the - # interpreter emits: - # RuntimeError: cannot exit context: thread state references a different context object - let1.switch() - - def test_not_broken_if_using_attribute_instead_of_context_run(self): - let1 = greenlet(getcurrent().switch) - let2 = greenlet(getcurrent().switch) - let1.gr_context = copy_context() - let2.gr_context = copy_context() - let1.switch() - let2.switch() - let1.switch() - let2.switch() - - def test_context_assignment_while_running(self): - # pylint:disable=too-many-statements - ID_VAR.set(None) - - def target(): - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - - # Context is created on first use - ID_VAR.set(1) - self.assertIsInstance(gr.gr_context, Context) - self.assertEqual(ID_VAR.get(), 1) - self.assertEqual(gr.gr_context[ID_VAR], 1) - - # Clearing the context makes it get re-created as another - # empty context when next used - old_context = gr.gr_context - gr.gr_context = None # assign None while running - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - ID_VAR.set(2) - self.assertIsInstance(gr.gr_context, Context) - self.assertEqual(ID_VAR.get(), 2) - self.assertEqual(gr.gr_context[ID_VAR], 2) - - new_context = gr.gr_context - getcurrent().parent.switch((old_context, new_context)) - # parent switches us back to old_context - - self.assertEqual(ID_VAR.get(), 1) - gr.gr_context = new_context # assign non-None while running - self.assertEqual(ID_VAR.get(), 2) - - getcurrent().parent.switch() - # parent switches us back to no context - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - gr.gr_context = old_context - self.assertEqual(ID_VAR.get(), 1) - - getcurrent().parent.switch() - # parent switches us back to no context - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - - gr = greenlet(target) - - with self.assertRaisesRegex(AttributeError, "can't delete context attribute"): - del gr.gr_context - - self.assertIsNone(gr.gr_context) - old_context, new_context = gr.switch() - self.assertIs(new_context, gr.gr_context) - self.assertEqual(old_context[ID_VAR], 1) - self.assertEqual(new_context[ID_VAR], 2) - self.assertEqual(new_context.run(ID_VAR.get), 2) - gr.gr_context = old_context # assign non-None while suspended - gr.switch() - self.assertIs(gr.gr_context, new_context) - gr.gr_context = None # assign None while suspended - gr.switch() - self.assertIs(gr.gr_context, old_context) - gr.gr_context = None - gr.switch() - self.assertIsNone(gr.gr_context) - - # Make sure there are no reference leaks - gr = None - gc.collect() - # Python 3.14 elides reference counting operations - # in some cases. See https://github.com/python/cpython/pull/130708 - self.assertEqual(sys.getrefcount(old_context), 2 if not PY314 else 1) - self.assertEqual(sys.getrefcount(new_context), 2 if not PY314 else 1) - - def test_context_assignment_different_thread(self): - import threading - VAR_VAR.set(None) - ctx = Context() - - is_running = threading.Event() - should_suspend = threading.Event() - did_suspend = threading.Event() - should_exit = threading.Event() - holder = [] - - def greenlet_in_thread_fn(): - VAR_VAR.set(1) - is_running.set() - should_suspend.wait(10) - VAR_VAR.set(2) - getcurrent().parent.switch() - holder.append(VAR_VAR.get()) - - def thread_fn(): - gr = greenlet(greenlet_in_thread_fn) - gr.gr_context = ctx - holder.append(gr) - gr.switch() - did_suspend.set() - should_exit.wait(10) - gr.switch() - del gr - greenlet() # trigger cleanup - - thread = threading.Thread(target=thread_fn, daemon=True) - thread.start() - is_running.wait(10) - gr = holder[0] - - # Can't access or modify context if the greenlet is running - # in a different thread - with self.assertRaisesRegex(ValueError, "running in a different"): - getattr(gr, 'gr_context') - with self.assertRaisesRegex(ValueError, "running in a different"): - gr.gr_context = None - - should_suspend.set() - did_suspend.wait(10) - - # OK to access and modify context if greenlet is suspended - self.assertIs(gr.gr_context, ctx) - self.assertEqual(gr.gr_context[VAR_VAR], 2) - gr.gr_context = None - - should_exit.set() - thread.join(10) - - self.assertEqual(holder, [gr, None]) - - # Context can still be accessed/modified when greenlet is dead: - self.assertIsNone(gr.gr_context) - gr.gr_context = ctx - self.assertIs(gr.gr_context, ctx) - - # Otherwise we leak greenlets on some platforms. - # XXX: Should be able to do this automatically - del holder[:] - gr = None - thread = None - - def test_context_assignment_wrong_type(self): - g = greenlet() - with self.assertRaisesRegex(TypeError, - "greenlet context must be a contextvars.Context or None"): - g.gr_context = self - - -@skipIf(Context is not None, "ContextVar supported") -class NoContextVarsTests(TestCase): - def test_contextvars_errors(self): - let1 = greenlet(getcurrent().switch) - self.assertFalse(hasattr(let1, 'gr_context')) - with self.assertRaises(AttributeError): - getattr(let1, 'gr_context') - - with self.assertRaises(AttributeError): - let1.gr_context = None - - let1.switch() - - with self.assertRaises(AttributeError): - getattr(let1, 'gr_context') - - with self.assertRaises(AttributeError): - let1.gr_context = None - - del let1 - - -if __name__ == '__main__': - unittest.main() diff --git a/venv/Lib/site-packages/greenlet/tests/test_cpp.py b/venv/Lib/site-packages/greenlet/tests/test_cpp.py deleted file mode 100644 index 2d0cc9c..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_cpp.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import print_function -from __future__ import absolute_import - -import subprocess -import unittest - -import greenlet -from . import _test_extension_cpp -from . import TestCase -from . import WIN - -class CPPTests(TestCase): - def test_exception_switch(self): - greenlets = [] - for i in range(4): - g = greenlet.greenlet(_test_extension_cpp.test_exception_switch) - g.switch(i) - greenlets.append(g) - for i, g in enumerate(greenlets): - self.assertEqual(g.switch(), i) - - def _do_test_unhandled_exception(self, target): - import os - import sys - script = os.path.join( - os.path.dirname(__file__), - 'fail_cpp_exception.py', - ) - args = [sys.executable, script, target.__name__ if not isinstance(target, str) else target] - __traceback_info__ = args - with self.assertRaises(subprocess.CalledProcessError) as exc: - subprocess.check_output( - args, - encoding='utf-8', - stderr=subprocess.STDOUT - ) - - ex = exc.exception - expected_exit = self.get_expected_returncodes_for_aborted_process() - self.assertIn(ex.returncode, expected_exit) - self.assertIn('fail_cpp_exception is running', ex.output) - return ex.output - - - def test_unhandled_nonstd_exception_aborts(self): - # verify that plain unhandled throw aborts - self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_nonstd) - - def test_unhandled_std_exception_aborts(self): - # verify that plain unhandled throw aborts - self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_std) - - @unittest.skipIf(WIN, "XXX: This does not crash on Windows") - # Meaning the exception is getting lost somewhere... - def test_unhandled_std_exception_as_greenlet_function_aborts(self): - # verify that plain unhandled throw aborts - output = self._do_test_unhandled_exception('run_as_greenlet_target') - self.assertIn( - # We really expect this to be prefixed with "greenlet: Unhandled C++ exception:" - # as added by our handler for std::exception (see TUserGreenlet.cpp), but - # that's not correct everywhere --- our handler never runs before std::terminate - # gets called (for example, on arm32). - 'Thrown from an extension.', - output - ) - - def test_unhandled_exception_in_greenlet_aborts(self): - # verify that unhandled throw called in greenlet aborts too - self._do_test_unhandled_exception('run_unhandled_exception_in_greenlet_aborts') - - -if __name__ == '__main__': - unittest.main() diff --git a/venv/Lib/site-packages/greenlet/tests/test_extension_interface.py b/venv/Lib/site-packages/greenlet/tests/test_extension_interface.py deleted file mode 100644 index 34b6656..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_extension_interface.py +++ /dev/null @@ -1,115 +0,0 @@ -from __future__ import print_function -from __future__ import absolute_import - -import sys - -import greenlet -from . import _test_extension -from . import TestCase - -# pylint:disable=c-extension-no-member - -class CAPITests(TestCase): - def test_switch(self): - self.assertEqual( - 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) - - def test_switch_kwargs(self): - def adder(x, y): - return x * y - g = greenlet.greenlet(adder) - self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) - - def test_setparent(self): - # pylint:disable=disallowed-name - def foo(): - def bar(): - greenlet.getcurrent().parent.switch() - - # This final switch should go back to the main greenlet, since - # the test_setparent() function in the C extension should have - # reparented this greenlet. - greenlet.getcurrent().parent.switch() - raise AssertionError("Should never have reached this code") - child = greenlet.greenlet(bar) - child.switch() - greenlet.getcurrent().parent.switch(child) - greenlet.getcurrent().parent.throw( - AssertionError("Should never reach this code")) - foo_child = greenlet.greenlet(foo).switch() - self.assertEqual(None, _test_extension.test_setparent(foo_child)) - - def test_getcurrent(self): - _test_extension.test_getcurrent() - - def test_new_greenlet(self): - self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15)) - - def test_raise_greenlet_dead(self): - self.assertRaises( - greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) - - def test_raise_greenlet_error(self): - self.assertRaises( - greenlet.error, _test_extension.test_raise_greenlet_error) - - def test_throw(self): - seen = [] - - def foo(): # pylint:disable=disallowed-name - try: - greenlet.getcurrent().parent.switch() - except ValueError: - seen.append(sys.exc_info()[1]) - except greenlet.GreenletExit: - raise AssertionError - g = greenlet.greenlet(foo) - g.switch() - _test_extension.test_throw(g) - self.assertEqual(len(seen), 1) - self.assertTrue( - isinstance(seen[0], ValueError), - "ValueError was not raised in foo()") - self.assertEqual( - str(seen[0]), - 'take that sucka!', - "message doesn't match") - - def test_non_traceback_param(self): - with self.assertRaises(TypeError) as exc: - _test_extension.test_throw_exact( - greenlet.getcurrent(), - Exception, - Exception(), - self - ) - self.assertEqual(str(exc.exception), - "throw() third argument must be a traceback object") - - def test_instance_of_wrong_type(self): - with self.assertRaises(TypeError) as exc: - _test_extension.test_throw_exact( - greenlet.getcurrent(), - Exception(), - BaseException(), - None, - ) - - self.assertEqual(str(exc.exception), - "instance exception may not have a separate value") - - def test_not_throwable(self): - with self.assertRaises(TypeError) as exc: - _test_extension.test_throw_exact( - greenlet.getcurrent(), - "abc", - None, - None, - ) - self.assertEqual(str(exc.exception), - "exceptions must be classes, or instances, not str") - - -if __name__ == '__main__': - import unittest - unittest.main() diff --git a/venv/Lib/site-packages/greenlet/tests/test_gc.py b/venv/Lib/site-packages/greenlet/tests/test_gc.py deleted file mode 100644 index 898452f..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_gc.py +++ /dev/null @@ -1,86 +0,0 @@ -import gc - -import weakref - -import greenlet - - -from . import TestCase -from .leakcheck import fails_leakcheck -# These only work with greenlet gc support -# which is no longer optional. -assert greenlet.GREENLET_USE_GC - -class TestGC(TestCase): - def test_dead_circular_ref(self): - o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch()) - gc.collect() - if o() is not None: - import sys - print("O IS NOT NONE.", sys.getrefcount(o())) - self.assertIsNone(o()) - self.assertFalse(gc.garbage, gc.garbage) - - def test_circular_greenlet(self): - class circular_greenlet(greenlet.greenlet): - self = None - o = circular_greenlet() - o.self = o - o = weakref.ref(o) - gc.collect() - self.assertIsNone(o()) - self.assertFalse(gc.garbage, gc.garbage) - - def test_inactive_ref(self): - class inactive_greenlet(greenlet.greenlet): - def __init__(self): - greenlet.greenlet.__init__(self, run=self.run) - - def run(self): - pass - o = inactive_greenlet() - o = weakref.ref(o) - gc.collect() - self.assertIsNone(o()) - self.assertFalse(gc.garbage, gc.garbage) - - @fails_leakcheck - def test_finalizer_crash(self): - # This test is designed to crash when active greenlets - # are made garbage collectable, until the underlying - # problem is resolved. How does it work: - # - order of object creation is important - # - array is created first, so it is moved to unreachable first - # - we create a cycle between a greenlet and this array - # - we create an object that participates in gc, is only - # referenced by a greenlet, and would corrupt gc lists - # on destruction, the easiest is to use an object with - # a finalizer - # - because array is the first object in unreachable it is - # cleared first, which causes all references to greenlet - # to disappear and causes greenlet to be destroyed, but since - # it is still live it causes a switch during gc, which causes - # an object with finalizer to be destroyed, which causes stack - # corruption and then a crash - - class object_with_finalizer(object): - def __del__(self): - pass - array = [] - parent = greenlet.getcurrent() - def greenlet_body(): - greenlet.getcurrent().object = object_with_finalizer() - try: - parent.switch() - except greenlet.GreenletExit: - print("Got greenlet exit!") - finally: - del greenlet.getcurrent().object - g = greenlet.greenlet(greenlet_body) - g.array = array - array.append(g) - g.switch() - del array - del g - greenlet.getcurrent() - gc.collect() diff --git a/venv/Lib/site-packages/greenlet/tests/test_generator.py b/venv/Lib/site-packages/greenlet/tests/test_generator.py deleted file mode 100644 index ca4a644..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_generator.py +++ /dev/null @@ -1,59 +0,0 @@ - -from greenlet import greenlet - -from . import TestCase - -class genlet(greenlet): - parent = None - def __init__(self, *args, **kwds): - self.args = args - self.kwds = kwds - - def run(self): - fn, = self.fn - fn(*self.args, **self.kwds) - - def __iter__(self): - return self - - def __next__(self): - self.parent = greenlet.getcurrent() - result = self.switch() - if self: - return result - - raise StopIteration - - next = __next__ - - -def Yield(value): - g = greenlet.getcurrent() - while not isinstance(g, genlet): - if g is None: - raise RuntimeError('yield outside a genlet') - g = g.parent - g.parent.switch(value) - - -def generator(func): - class Generator(genlet): - fn = (func,) - return Generator - -# ____________________________________________________________ - - -class GeneratorTests(TestCase): - def test_generator(self): - seen = [] - - def g(n): - for i in range(n): - seen.append(i) - Yield(i) - g = generator(g) - for _ in range(3): - for j in g(5): - seen.append(j) - self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) diff --git a/venv/Lib/site-packages/greenlet/tests/test_generator_nested.py b/venv/Lib/site-packages/greenlet/tests/test_generator_nested.py deleted file mode 100644 index 8d752a6..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_generator_nested.py +++ /dev/null @@ -1,168 +0,0 @@ - -from greenlet import greenlet -from . import TestCase -from .leakcheck import fails_leakcheck - -class genlet(greenlet): - parent = None - def __init__(self, *args, **kwds): - self.args = args - self.kwds = kwds - self.child = None - - def run(self): - # Note the function is packed in a tuple - # to avoid creating a bound method for it. - fn, = self.fn - fn(*self.args, **self.kwds) - - def __iter__(self): - return self - - def set_child(self, child): - self.child = child - - def __next__(self): - if self.child: - child = self.child - while child.child: - tmp = child - child = child.child - tmp.child = None - - result = child.switch() - else: - self.parent = greenlet.getcurrent() - result = self.switch() - - if self: - return result - - raise StopIteration - - next = __next__ - -def Yield(value, level=1): - g = greenlet.getcurrent() - - while level != 0: - if not isinstance(g, genlet): - raise RuntimeError('yield outside a genlet') - if level > 1: - g.parent.set_child(g) - g = g.parent - level -= 1 - - g.switch(value) - - -def Genlet(func): - class TheGenlet(genlet): - fn = (func,) - return TheGenlet - -# ____________________________________________________________ - - -def g1(n, seen): - for i in range(n): - seen.append(i + 1) - yield i - - -def g2(n, seen): - for i in range(n): - seen.append(i + 1) - Yield(i) - -g2 = Genlet(g2) - - -def nested(i): - Yield(i) - - -def g3(n, seen): - for i in range(n): - seen.append(i + 1) - nested(i) -g3 = Genlet(g3) - - -def a(n): - if n == 0: - return - for ii in ax(n - 1): - Yield(ii) - Yield(n) -ax = Genlet(a) - - -def perms(l): - if len(l) > 1: - for e in l: - # No syntactical sugar for generator expressions - x = [Yield([e] + p) for p in perms([x for x in l if x != e])] - assert x - else: - Yield(l) -perms = Genlet(perms) - - -def gr1(n): - for ii in range(1, n): - Yield(ii) - Yield(ii * ii, 2) - -gr1 = Genlet(gr1) - - -def gr2(n, seen): - for ii in gr1(n): - seen.append(ii) - -gr2 = Genlet(gr2) - - -class NestedGeneratorTests(TestCase): - def test_layered_genlets(self): - seen = [] - for ii in gr2(5, seen): - seen.append(ii) - self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16]) - - @fails_leakcheck - def test_permutations(self): - gen_perms = perms(list(range(4))) - permutations = list(gen_perms) - self.assertEqual(len(permutations), 4 * 3 * 2 * 1) - self.assertIn([0, 1, 2, 3], permutations) - self.assertIn([3, 2, 1, 0], permutations) - res = [] - for ii in zip(perms(list(range(4))), perms(list(range(3)))): - res.append(ii) - self.assertEqual( - res, - [([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]), - ([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]), - ([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])]) - # XXX Test to make sure we are working as a generator expression - - def test_genlet_simple(self): - for g in g1, g2, g3: - seen = [] - for _ in range(3): - for j in g(5, seen): - seen.append(j) - self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]) - - def test_genlet_bad(self): - try: - Yield(10) - except RuntimeError: - pass - - def test_nested_genlets(self): - seen = [] - for ii in ax(5): - seen.append(ii) diff --git a/venv/Lib/site-packages/greenlet/tests/test_greenlet.py b/venv/Lib/site-packages/greenlet/tests/test_greenlet.py deleted file mode 100644 index 5a8139b..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_greenlet.py +++ /dev/null @@ -1,1365 +0,0 @@ -import gc -import sys -import time -import threading -import unittest - -from abc import ABCMeta -from abc import abstractmethod - -import greenlet -from greenlet import greenlet as RawGreenlet -from . import TestCase -from . import RUNNING_ON_MANYLINUX -from . import PY313 -from . import PY314 -from . import RUNNING_ON_FREETHREAD_BUILD -from .leakcheck import fails_leakcheck - - -# We manually manage locks in many tests -# pylint:disable=consider-using-with -# pylint:disable=too-many-public-methods -# This module is quite large. -# TODO: Refactor into separate test files. For example, -# put all the regression tests that used to produce -# crashes in test_greenlet_no_crash; put tests that DO deliberately crash -# the interpreter into test_greenlet_crash. -# pylint:disable=too-many-lines - -class SomeError(Exception): - pass - - -def fmain(seen): - try: - greenlet.getcurrent().parent.switch() - except: - seen.append(sys.exc_info()[0]) - raise - raise SomeError - - -def send_exception(g, exc): - # note: send_exception(g, exc) can be now done with g.throw(exc). - # the purpose of this test is to explicitly check the propagation rules. - def crasher(exc): - raise exc - g1 = RawGreenlet(crasher, parent=g) - g1.switch(exc) - - -class TestGreenlet(TestCase): - - def _do_simple_test(self): - lst = [] - - def f(): - lst.append(1) - greenlet.getcurrent().parent.switch() - lst.append(3) - g = RawGreenlet(f) - lst.append(0) - g.switch() - lst.append(2) - g.switch() - lst.append(4) - self.assertEqual(lst, list(range(5))) - - def test_simple(self): - self._do_simple_test() - - def test_switch_no_run_raises_AttributeError(self): - g = RawGreenlet() - with self.assertRaises(AttributeError) as exc: - g.switch() - - self.assertIn("run", str(exc.exception)) - - def test_throw_no_run_raises_AttributeError(self): - g = RawGreenlet() - with self.assertRaises(AttributeError) as exc: - g.throw(SomeError) - - self.assertIn("run", str(exc.exception)) - - def test_parent_equals_None(self): - g = RawGreenlet(parent=None) - self.assertIsNotNone(g) - self.assertIs(g.parent, greenlet.getcurrent()) - - def test_run_equals_None(self): - g = RawGreenlet(run=None) - self.assertIsNotNone(g) - self.assertIsNone(g.run) - - def test_two_children(self): - lst = [] - - def f(): - lst.append(1) - greenlet.getcurrent().parent.switch() - lst.extend([1, 1]) - g = RawGreenlet(f) - h = RawGreenlet(f) - g.switch() - self.assertEqual(len(lst), 1) - h.switch() - self.assertEqual(len(lst), 2) - h.switch() - self.assertEqual(len(lst), 4) - self.assertEqual(h.dead, True) - g.switch() - self.assertEqual(len(lst), 6) - self.assertEqual(g.dead, True) - - def test_two_recursive_children(self): - lst = [] - - def f(): - lst.append('b') - greenlet.getcurrent().parent.switch() - - def g(): - lst.append('a') - g = RawGreenlet(f) - g.switch() - lst.append('c') - self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) - g = RawGreenlet(g) - # Python 3.14 elides reference counting operations - # in some cases. See https://github.com/python/cpython/pull/130708 - self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) - g.switch() - self.assertEqual(lst, ['a', 'b', 'c']) - # Just the one in this frame, plus the one on the stack we pass to the function - self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) - - def test_threads(self): - success = [] - - def f(): - self._do_simple_test() - success.append(True) - ths = [threading.Thread(target=f) for i in range(10)] - for th in ths: - th.start() - for th in ths: - th.join(10) - self.assertEqual(len(success), len(ths)) - - def test_exception(self): - seen = [] - g1 = RawGreenlet(fmain) - g2 = RawGreenlet(fmain) - g1.switch(seen) - g2.switch(seen) - g2.parent = g1 - - self.assertEqual(seen, []) - #with self.assertRaises(SomeError): - # p("***Switching back") - # g2.switch() - # Creating this as a bound method can reveal bugs that - # are hidden on newer versions of Python that avoid creating - # bound methods for direct expressions; IOW, don't use the `with` - # form! - self.assertRaises(SomeError, g2.switch) - self.assertEqual(seen, [SomeError]) - - value = g2.switch() - self.assertEqual(value, ()) - self.assertEqual(seen, [SomeError]) - - value = g2.switch(25) - self.assertEqual(value, 25) - self.assertEqual(seen, [SomeError]) - - - def test_send_exception(self): - seen = [] - g1 = RawGreenlet(fmain) - g1.switch(seen) - self.assertRaises(KeyError, send_exception, g1, KeyError) - self.assertEqual(seen, [KeyError]) - - def test_dealloc(self): - seen = [] - g1 = RawGreenlet(fmain) - g2 = RawGreenlet(fmain) - g1.switch(seen) - g2.switch(seen) - self.assertEqual(seen, []) - del g1 - gc.collect() - self.assertEqual(seen, [greenlet.GreenletExit]) - del g2 - gc.collect() - self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit]) - - def test_dealloc_catches_GreenletExit_throws_other(self): - def run(): - try: - greenlet.getcurrent().parent.switch() - except greenlet.GreenletExit: - raise SomeError from None - - g = RawGreenlet(run) - g.switch() - # Destroying the only reference to the greenlet causes it - # to get GreenletExit; when it in turn raises, even though we're the parent - # we don't get the exception, it just gets printed. - # When we run on 3.8 only, we can use sys.unraisablehook - oldstderr = sys.stderr - from io import StringIO - stderr = sys.stderr = StringIO() - try: - del g - finally: - sys.stderr = oldstderr - - v = stderr.getvalue() - self.assertIn("Exception", v) - self.assertIn('ignored', v) - self.assertIn("SomeError", v) - - - @unittest.skipIf( - PY313 and RUNNING_ON_MANYLINUX, - "Sometimes flaky (getting one GreenletExit in the second list)" - # Probably due to funky timing interactions? - # TODO: FIXME Make that work. - ) - - def test_dealloc_other_thread(self): - seen = [] - someref = [] - - bg_glet_created_running_and_no_longer_ref_in_bg = threading.Event() - fg_ref_released = threading.Event() - bg_should_be_clear = threading.Event() - ok_to_exit_bg_thread = threading.Event() - - def f(): - g1 = RawGreenlet(fmain) - g1.switch(seen) - someref.append(g1) - del g1 - gc.collect() - bg_glet_created_running_and_no_longer_ref_in_bg.set() - fg_ref_released.wait(3) - - RawGreenlet() # trigger release - bg_should_be_clear.set() - ok_to_exit_bg_thread.wait(3) - RawGreenlet() # One more time - - t = threading.Thread(target=f) - t.start() - bg_glet_created_running_and_no_longer_ref_in_bg.wait(10) - - self.assertEqual(seen, []) - self.assertEqual(len(someref), 1) - del someref[:] - if not RUNNING_ON_FREETHREAD_BUILD: - # The free-threaded GC is very different. In 3.14rc1, - # the free-threaded GC traverses ``g1``, realizes it is - # not referenced from anywhere else IT cares about, - # calls ``tp_clear`` and then ``green_dealloc``. This causes - # the greenlet to lose its reference to the main greenlet and thread - # in which it was running, which means we can no longer throw an - # exception into it, preventing the rest of this test from working. - # Standard 3.14 traverses the object but doesn't ``tp_clear`` or - # ``green_dealloc`` it. - gc.collect() - # g1 is not released immediately because it's from another thread; - # switching back to that thread will allocate a greenlet and thus - # trigger deletion actions. - self.assertEqual(seen, []) - fg_ref_released.set() - bg_should_be_clear.wait(3) - try: - self.assertEqual(seen, [greenlet.GreenletExit]) - finally: - ok_to_exit_bg_thread.set() - t.join(10) - del seen[:] - del someref[:] - - def test_frame(self): - def f1(): - f = sys._getframe(0) # pylint:disable=protected-access - self.assertEqual(f.f_back, None) - greenlet.getcurrent().parent.switch(f) - return "meaning of life" - g = RawGreenlet(f1) - frame = g.switch() - self.assertTrue(frame is g.gr_frame) - self.assertTrue(g) - - from_g = g.switch() - self.assertFalse(g) - self.assertEqual(from_g, 'meaning of life') - self.assertEqual(g.gr_frame, None) - - def test_thread_bug(self): - def runner(x): - g = RawGreenlet(lambda: time.sleep(x)) - g.switch() - t1 = threading.Thread(target=runner, args=(0.2,)) - t2 = threading.Thread(target=runner, args=(0.3,)) - t1.start() - t2.start() - t1.join(10) - t2.join(10) - - def test_switch_kwargs(self): - def run(a, b): - self.assertEqual(a, 4) - self.assertEqual(b, 2) - return 42 - x = RawGreenlet(run).switch(a=4, b=2) - self.assertEqual(x, 42) - - def test_switch_kwargs_to_parent(self): - def run(x): - greenlet.getcurrent().parent.switch(x=x) - greenlet.getcurrent().parent.switch(2, x=3) - return x, x ** 2 - g = RawGreenlet(run) - self.assertEqual({'x': 3}, g.switch(3)) - self.assertEqual(((2,), {'x': 3}), g.switch()) - self.assertEqual((3, 9), g.switch()) - - def test_switch_to_another_thread(self): - data = {} - created_event = threading.Event() - done_event = threading.Event() - - def run(): - data['g'] = RawGreenlet(lambda: None) - created_event.set() - done_event.wait(10) - thread = threading.Thread(target=run) - thread.start() - created_event.wait(10) - with self.assertRaises(greenlet.error): - data['g'].switch() - done_event.set() - thread.join(10) - # XXX: Should handle this automatically - data.clear() - - def test_exc_state(self): - def f(): - try: - raise ValueError('fun') - except: # pylint:disable=bare-except - exc_info = sys.exc_info() - RawGreenlet(h).switch() - self.assertEqual(exc_info, sys.exc_info()) - - def h(): - self.assertEqual(sys.exc_info(), (None, None, None)) - - RawGreenlet(f).switch() - - def test_instance_dict(self): - def f(): - greenlet.getcurrent().test = 42 - def deldict(g): - del g.__dict__ - def setdict(g, value): - g.__dict__ = value - g = RawGreenlet(f) - self.assertEqual(g.__dict__, {}) - g.switch() - self.assertEqual(g.test, 42) - self.assertEqual(g.__dict__, {'test': 42}) - g.__dict__ = g.__dict__ - self.assertEqual(g.__dict__, {'test': 42}) - self.assertRaises(TypeError, deldict, g) - self.assertRaises(TypeError, setdict, g, 42) - - def test_running_greenlet_has_no_run(self): - has_run = [] - def func(): - has_run.append( - hasattr(greenlet.getcurrent(), 'run') - ) - - g = RawGreenlet(func) - g.switch() - self.assertEqual(has_run, [False]) - - def test_deepcopy(self): - import copy - self.assertRaises(TypeError, copy.copy, RawGreenlet()) - self.assertRaises(TypeError, copy.deepcopy, RawGreenlet()) - - def test_parent_restored_on_kill(self): - hub = RawGreenlet(lambda: None) - main = greenlet.getcurrent() - result = [] - def worker(): - try: - # Wait to be killed by going back to the test. - main.switch() - except greenlet.GreenletExit: - # Resurrect and switch to parent - result.append(greenlet.getcurrent().parent) - result.append(greenlet.getcurrent()) - hub.switch() - g = RawGreenlet(worker, parent=hub) - g.switch() - # delete the only reference, thereby raising GreenletExit - del g - self.assertTrue(result) - self.assertIs(result[0], main) - self.assertIs(result[1].parent, hub) - # Delete them, thereby breaking the cycle between the greenlet - # and the frame, which otherwise would never be collectable - # XXX: We should be able to automatically fix this. - del result[:] - hub = None - main = None - - def test_parent_return_failure(self): - # No run causes AttributeError on switch - g1 = RawGreenlet() - # Greenlet that implicitly switches to parent - g2 = RawGreenlet(lambda: None, parent=g1) - # AttributeError should propagate to us, no fatal errors - with self.assertRaises(AttributeError): - g2.switch() - - def test_throw_exception_not_lost(self): - class mygreenlet(RawGreenlet): - def __getattribute__(self, name): - try: - raise Exception # pylint:disable=broad-exception-raised - except: # pylint:disable=bare-except - pass - return RawGreenlet.__getattribute__(self, name) - g = mygreenlet(lambda: None) - self.assertRaises(SomeError, g.throw, SomeError()) - - @fails_leakcheck - def _do_test_throw_to_dead_thread_doesnt_crash(self, wait_for_cleanup=False): - result = [] - def worker(): - greenlet.getcurrent().parent.switch() - - def creator(): - g = RawGreenlet(worker) - g.switch() - result.append(g) - if wait_for_cleanup: - # Let this greenlet eventually be cleaned up. - g.switch() - greenlet.getcurrent() - t = threading.Thread(target=creator) - t.start() - t.join(10) - del t - # But, depending on the operating system, the thread - # deallocator may not actually have run yet! So we can't be - # sure about the error message unless we wait. - if wait_for_cleanup: - self.wait_for_pending_cleanups() - with self.assertRaises(greenlet.error) as exc: - result[0].throw(SomeError) - - if not wait_for_cleanup: - s = str(exc.exception) - self.assertTrue( - s == "cannot switch to a different thread (which happens to have exited)" - or 'Cannot switch' in s - ) - else: - self.assertEqual( - str(exc.exception), - "cannot switch to a different thread (which happens to have exited)", - ) - - if hasattr(result[0].gr_frame, 'clear'): - # The frame is actually executing (it thinks), we can't clear it. - with self.assertRaises(RuntimeError): - result[0].gr_frame.clear() - # Unfortunately, this doesn't actually clear the references, they're in the - # fast local array. - if not wait_for_cleanup: - # f_locals has no clear method in Python 3.13 - if hasattr(result[0].gr_frame.f_locals, 'clear'): - result[0].gr_frame.f_locals.clear() - else: - self.assertIsNone(result[0].gr_frame) - - del creator - worker = None - del result[:] - # XXX: we ought to be able to automatically fix this. - # See issue 252 - self.expect_greenlet_leak = True # direct us not to wait for it to go away - - @fails_leakcheck - def test_throw_to_dead_thread_doesnt_crash(self): - self._do_test_throw_to_dead_thread_doesnt_crash() - - def test_throw_to_dead_thread_doesnt_crash_wait(self): - self._do_test_throw_to_dead_thread_doesnt_crash(True) - - @fails_leakcheck - def test_recursive_startup(self): - class convoluted(RawGreenlet): - def __init__(self): - RawGreenlet.__init__(self) - self.count = 0 - def __getattribute__(self, name): - if name == 'run' and self.count == 0: - self.count = 1 - self.switch(43) - return RawGreenlet.__getattribute__(self, name) - def run(self, value): - while True: - self.parent.switch(value) - g = convoluted() - self.assertEqual(g.switch(42), 43) - # Exits the running greenlet, otherwise it leaks - # XXX: We should be able to automatically fix this - #g.throw(greenlet.GreenletExit) - #del g - self.expect_greenlet_leak = True - - def test_threaded_updatecurrent(self): - # released when main thread should execute - lock1 = threading.Lock() - lock1.acquire() - # released when another thread should execute - lock2 = threading.Lock() - lock2.acquire() - class finalized(object): - def __del__(self): - # happens while in green_updatecurrent() in main greenlet - # should be very careful not to accidentally call it again - # at the same time we must make sure another thread executes - lock2.release() - lock1.acquire() - # now ts_current belongs to another thread - def deallocator(): - greenlet.getcurrent().parent.switch() - def fthread(): - lock2.acquire() - greenlet.getcurrent() - del g[0] - lock1.release() - lock2.acquire() - greenlet.getcurrent() - lock1.release() - main = greenlet.getcurrent() - g = [RawGreenlet(deallocator)] - g[0].bomb = finalized() - g[0].switch() - t = threading.Thread(target=fthread) - t.start() - # let another thread grab ts_current and deallocate g[0] - lock2.release() - lock1.acquire() - # this is the corner stone - # getcurrent() will notice that ts_current belongs to another thread - # and start the update process, which would notice that g[0] should - # be deallocated, and that will execute an object's finalizer. Now, - # that object will let another thread run so it can grab ts_current - # again, which would likely crash the interpreter if there's no - # check for this case at the end of green_updatecurrent(). This test - # passes if getcurrent() returns correct result, but it's likely - # to randomly crash if it's not anyway. - self.assertEqual(greenlet.getcurrent(), main) - # wait for another thread to complete, just in case - t.join(10) - - def test_dealloc_switch_args_not_lost(self): - seen = [] - def worker(): - # wait for the value - value = greenlet.getcurrent().parent.switch() - # delete all references to ourself - del worker[0] - initiator.parent = greenlet.getcurrent().parent - # switch to main with the value, but because - # ts_current is the last reference to us we - # return here immediately, where we resurrect ourself. - try: - greenlet.getcurrent().parent.switch(value) - finally: - seen.append(greenlet.getcurrent()) - def initiator(): - return 42 # implicitly falls thru to parent - - worker = [RawGreenlet(worker)] - - worker[0].switch() # prime worker - initiator = RawGreenlet(initiator, worker[0]) - value = initiator.switch() - self.assertTrue(seen) - self.assertEqual(value, 42) - - def test_tuple_subclass(self): - # The point of this test is to see what happens when a custom - # tuple subclass is used as an object passed directly to the C - # function ``green_switch``; part of ``green_switch`` checks - # the ``len()`` of the ``args`` tuple, and that can call back - # into Python. Here, when it calls back into Python, we - # recursively enter ``green_switch`` again. - - # This test is really only relevant on Python 2. The builtin - # `apply` function directly passes the given args tuple object - # to the underlying function, whereas the Python 3 version - # unpacks and repacks into an actual tuple. This could still - # happen using the C API on Python 3 though. We should write a - # builtin version of apply() ourself. - def _apply(func, a, k): - func(*a, **k) - - class mytuple(tuple): - def __len__(self): - greenlet.getcurrent().switch() - return tuple.__len__(self) - args = mytuple() - kwargs = dict(a=42) - def switchapply(): - _apply(greenlet.getcurrent().parent.switch, args, kwargs) - g = RawGreenlet(switchapply) - self.assertEqual(g.switch(), kwargs) - - def test_abstract_subclasses(self): - AbstractSubclass = ABCMeta( - 'AbstractSubclass', - (RawGreenlet,), - {'run': abstractmethod(lambda self: None)}) - - class BadSubclass(AbstractSubclass): - pass - - class GoodSubclass(AbstractSubclass): - def run(self): - pass - - GoodSubclass() # should not raise - self.assertRaises(TypeError, BadSubclass) - - def test_implicit_parent_with_threads(self): - if not gc.isenabled(): - return # cannot test with disabled gc - N = gc.get_threshold()[0] - if N < 50: - return # cannot test with such a small N - def attempt(): - lock1 = threading.Lock() - lock1.acquire() - lock2 = threading.Lock() - lock2.acquire() - recycled = [False] - def another_thread(): - lock1.acquire() # wait for gc - greenlet.getcurrent() # update ts_current - lock2.release() # release gc - t = threading.Thread(target=another_thread) - t.start() - class gc_callback(object): - def __del__(self): - lock1.release() - lock2.acquire() - recycled[0] = True - class garbage(object): - def __init__(self): - self.cycle = self - self.callback = gc_callback() - l = [] - x = range(N*2) - current = greenlet.getcurrent() - g = garbage() - for _ in x: - g = None # lose reference to garbage - if recycled[0]: - # gc callback called prematurely - t.join(10) - return False - last = RawGreenlet() - if recycled[0]: - break # yes! gc called in green_new - l.append(last) # increase allocation counter - else: - # gc callback not called when expected - gc.collect() - if recycled[0]: - t.join(10) - return False - self.assertEqual(last.parent, current) - for g in l: - self.assertEqual(g.parent, current) - return True - for _ in range(5): - if attempt(): - break - - def test_issue_245_reference_counting_subclass_no_threads(self): - # https://github.com/python-greenlet/greenlet/issues/245 - # Before the fix, this crashed pretty reliably on - # Python 3.10, at least on macOS; but much less reliably on other - # interpreters (memory layout must have changed). - # The threaded test crashed more reliably on more interpreters. - from greenlet import getcurrent - from greenlet import GreenletExit - - class Greenlet(RawGreenlet): - pass - - initial_refs = sys.getrefcount(Greenlet) - # This has to be an instance variable because - # Python 2 raises a SyntaxError if we delete a local - # variable referenced in an inner scope. - self.glets = [] # pylint:disable=attribute-defined-outside-init - - def greenlet_main(): - try: - getcurrent().parent.switch() - except GreenletExit: - self.glets.append(getcurrent()) - - # Before the - for _ in range(10): - Greenlet(greenlet_main).switch() - - del self.glets - if RUNNING_ON_FREETHREAD_BUILD: - # Free-threaded builds make types immortal, which gives us - # weird numbers here, and we actually do APPEAR to end - # up with one more reference than we started with, at least on 3.14. - # If we change the code in green_dealloc to avoid increffing the type - # (which fixed this initial bug), then our leakchecks find other objects - # that have leaked, including a tuple, a dict, and a type. So that's not the - # right solution. Instead we change the test: - # XXX: FIXME: Is there a better way? - self.assertGreaterEqual(sys.getrefcount(Greenlet), initial_refs) - else: - self.assertEqual(sys.getrefcount(Greenlet), initial_refs) - - @unittest.skipIf( - PY313 and RUNNING_ON_MANYLINUX, - "The manylinux images appear to hang on this test on 3.13rc2" - # Or perhaps I just got tired of waiting for the 450s timeout. - # Still, it shouldn't take anywhere near that long. Does not reproduce in - # Ubuntu images, on macOS or Windows. - ) - def test_issue_245_reference_counting_subclass_threads(self): - # https://github.com/python-greenlet/greenlet/issues/245 - from threading import Thread - from threading import Event - - from greenlet import getcurrent - - class MyGreenlet(RawGreenlet): - pass - - glets = [] - ref_cleared = Event() - - def greenlet_main(): - getcurrent().parent.switch() - - def thread_main(greenlet_running_event): - mine = MyGreenlet(greenlet_main) - glets.append(mine) - # The greenlets being deleted must be active - mine.switch() - # Don't keep any reference to it in this thread - del mine - # Let main know we published our greenlet. - greenlet_running_event.set() - # Wait for main to let us know the references are - # gone and the greenlet objects no longer reachable - ref_cleared.wait(10) - # The creating thread must call getcurrent() (or a few other - # greenlet APIs) because that's when the thread-local list of dead - # greenlets gets cleared. - getcurrent() - - # We start with 3 references to the subclass: - # - This module - # - Its __mro__ - # - The __subclassess__ attribute of greenlet - # - (If we call gc.get_referents(), we find four entries, including - # some other tuple ``(greenlet)`` that I'm not sure about but must be part - # of the machinery.) - # - # On Python 3.10 it's often enough to just run 3 threads; on Python 2.7, - # more threads are needed, and the results are still - # non-deterministic. Presumably the memory layouts are different - initial_refs = sys.getrefcount(MyGreenlet) - thread_ready_events = [] - thread_count = initial_refs + 45 - if RUNNING_ON_FREETHREAD_BUILD: - # types are immortal, so this is a HUGE number most likely, - # and we can't create that many threads. - thread_count = 50 - for _ in range(thread_count): - event = Event() - thread = Thread(target=thread_main, args=(event,)) - thread_ready_events.append(event) - thread.start() - - - for done_event in thread_ready_events: - done_event.wait(10) - - - del glets[:] - ref_cleared.set() - # Let any other thread run; it will crash the interpreter - # if not fixed (or silently corrupt memory and we possibly crash - # later). - self.wait_for_pending_cleanups() - self.assertEqual(sys.getrefcount(MyGreenlet), initial_refs) - - def test_falling_off_end_switches_to_unstarted_parent_raises_error(self): - def no_args(): - return 13 - - parent_never_started = RawGreenlet(no_args) - - def leaf(): - return 42 - - child = RawGreenlet(leaf, parent_never_started) - - # Because the run function takes to arguments - with self.assertRaises(TypeError): - child.switch() - - def test_falling_off_end_switches_to_unstarted_parent_works(self): - def one_arg(x): - return (x, 24) - - parent_never_started = RawGreenlet(one_arg) - - def leaf(): - return 42 - - child = RawGreenlet(leaf, parent_never_started) - - result = child.switch() - self.assertEqual(result, (42, 24)) - - def test_switch_to_dead_greenlet_with_unstarted_perverse_parent(self): - class Parent(RawGreenlet): - def __getattribute__(self, name): - if name == 'run': - raise SomeError - - - parent_never_started = Parent() - seen = [] - child = RawGreenlet(lambda: seen.append(42), parent_never_started) - # Because we automatically start the parent when the child is - # finished - with self.assertRaises(SomeError): - child.switch() - - self.assertEqual(seen, [42]) - - with self.assertRaises(SomeError): - child.switch() - self.assertEqual(seen, [42]) - - def test_switch_to_dead_greenlet_reparent(self): - seen = [] - parent_never_started = RawGreenlet(lambda: seen.append(24)) - child = RawGreenlet(lambda: seen.append(42)) - - child.switch() - self.assertEqual(seen, [42]) - - child.parent = parent_never_started - # This actually is the same as switching to the parent. - result = child.switch() - self.assertIsNone(result) - self.assertEqual(seen, [42, 24]) - - def test_can_access_f_back_of_suspended_greenlet(self): - # This tests our frame rewriting to work around Python 3.12+ having - # some interpreter frames on the C stack. It will crash in the absence - # of that logic. - main = greenlet.getcurrent() - - def outer(): - inner() - - def inner(): - main.switch(sys._getframe(0)) - - hub = RawGreenlet(outer) - # start it - hub.switch() - - # start another greenlet to make sure we aren't relying on - # anything in `hub` still being on the C stack - unrelated = RawGreenlet(lambda: None) - unrelated.switch() - - # now it is suspended - self.assertIsNotNone(hub.gr_frame) - self.assertEqual(hub.gr_frame.f_code.co_name, "inner") - self.assertIsNotNone(hub.gr_frame.f_back) - self.assertEqual(hub.gr_frame.f_back.f_code.co_name, "outer") - # The next line is what would crash - self.assertIsNone(hub.gr_frame.f_back.f_back) - - def test_get_stack_with_nested_c_calls(self): - from functools import partial - from . import _test_extension_cpp - - def recurse(v): - if v > 0: - return v * _test_extension_cpp.test_call(partial(recurse, v - 1)) - return greenlet.getcurrent().parent.switch() - - gr = RawGreenlet(recurse) - gr.switch(5) - frame = gr.gr_frame - for i in range(5): - self.assertEqual(frame.f_locals["v"], i) - frame = frame.f_back - self.assertEqual(frame.f_locals["v"], 5) - self.assertIsNone(frame.f_back) - self.assertEqual(gr.switch(10), 1200) # 1200 = 5! * 10 - - def test_frames_always_exposed(self): - # On Python 3.12 this will crash if we don't set the - # gr_frames_always_exposed attribute. More background: - # https://github.com/python-greenlet/greenlet/issues/388 - main = greenlet.getcurrent() - - def outer(): - inner(sys._getframe(0)) - - def inner(frame): - main.switch(frame) - - gr = RawGreenlet(outer) - frame = gr.switch() - - # Do something else to clobber the part of the C stack used by `gr`, - # so we can't skate by on "it just happened to still be there" - unrelated = RawGreenlet(lambda: None) - unrelated.switch() - - self.assertEqual(frame.f_code.co_name, "outer") - # The next line crashes on 3.12 if we haven't exposed the frames. - self.assertIsNone(frame.f_back) - - -class TestGreenletSetParentErrors(TestCase): - def test_threaded_reparent(self): - data = {} - created_event = threading.Event() - done_event = threading.Event() - - def run(): - data['g'] = RawGreenlet(lambda: None) - created_event.set() - done_event.wait(10) - - def blank(): - greenlet.getcurrent().parent.switch() - - thread = threading.Thread(target=run) - thread.start() - created_event.wait(10) - g = RawGreenlet(blank) - g.switch() - with self.assertRaises(ValueError) as exc: - g.parent = data['g'] - done_event.set() - thread.join(10) - - self.assertEqual(str(exc.exception), "parent cannot be on a different thread") - - def test_unexpected_reparenting(self): - another = [] - def worker(): - g = RawGreenlet(lambda: None) - another.append(g) - g.switch() - t = threading.Thread(target=worker) - t.start() - t.join(10) - # The first time we switch (running g_initialstub(), which is - # when we look up the run attribute) we attempt to change the - # parent to one from another thread (which also happens to be - # dead). ``g_initialstub()`` should detect this and raise a - # greenlet error. - # - # EXCEPT: With the fix for #252, this is actually detected - # sooner, when setting the parent itself. Prior to that fix, - # the main greenlet from the background thread kept a valid - # value for ``run_info``, and appeared to be a valid parent - # until we actually started the greenlet. But now that it's - # cleared, this test is catching whether ``green_setparent`` - # can detect the dead thread. - # - # Further refactoring once again changes this back to a greenlet.error - # - # We need to wait for the cleanup to happen, but we're - # deliberately leaking a main greenlet here. - self.wait_for_pending_cleanups(initial_main_greenlets=self.main_greenlets_before_test + 1) - - class convoluted(RawGreenlet): - def __getattribute__(self, name): - if name == 'run': - self.parent = another[0] # pylint:disable=attribute-defined-outside-init - return RawGreenlet.__getattribute__(self, name) - g = convoluted(lambda: None) - with self.assertRaises(greenlet.error) as exc: - g.switch() - self.assertEqual(str(exc.exception), - "cannot switch to a different thread (which happens to have exited)") - del another[:] - - def test_unexpected_reparenting_thread_running(self): - # Like ``test_unexpected_reparenting``, except the background thread is - # actually still alive. - another = [] - switched_to_greenlet = threading.Event() - keep_main_alive = threading.Event() - def worker(): - g = RawGreenlet(lambda: None) - another.append(g) - g.switch() - switched_to_greenlet.set() - keep_main_alive.wait(10) - class convoluted(RawGreenlet): - def __getattribute__(self, name): - if name == 'run': - self.parent = another[0] # pylint:disable=attribute-defined-outside-init - return RawGreenlet.__getattribute__(self, name) - - t = threading.Thread(target=worker) - t.start() - - switched_to_greenlet.wait(10) - try: - g = convoluted(lambda: None) - - with self.assertRaises(greenlet.error) as exc: - g.switch() - self.assertIn("Cannot switch to a different thread", str(exc.exception)) - self.assertIn("Expected", str(exc.exception)) - self.assertIn("Current", str(exc.exception)) - finally: - keep_main_alive.set() - t.join(10) - # XXX: Should handle this automatically. - del another[:] - - def test_cannot_delete_parent(self): - worker = RawGreenlet(lambda: None) - self.assertIs(worker.parent, greenlet.getcurrent()) - - with self.assertRaises(AttributeError) as exc: - del worker.parent - self.assertEqual(str(exc.exception), "can't delete attribute") - - def test_cannot_delete_parent_of_main(self): - with self.assertRaises(AttributeError) as exc: - del greenlet.getcurrent().parent - self.assertEqual(str(exc.exception), "can't delete attribute") - - - def test_main_greenlet_parent_is_none(self): - # assuming we're in a main greenlet here. - self.assertIsNone(greenlet.getcurrent().parent) - - def test_set_parent_wrong_types(self): - def bg(): - # Go back to main. - greenlet.getcurrent().parent.switch() - - def check(glet): - for p in None, 1, self, "42": - with self.assertRaises(TypeError) as exc: - glet.parent = p - - self.assertEqual( - str(exc.exception), - "GreenletChecker: Expected any type of greenlet, not " + type(p).__name__) - - # First, not running - g = RawGreenlet(bg) - self.assertFalse(g) - check(g) - - # Then when running. - g.switch() - self.assertTrue(g) - check(g) - - # Let it finish - g.switch() - - - def test_trivial_cycle(self): - glet = RawGreenlet(lambda: None) - with self.assertRaises(ValueError) as exc: - glet.parent = glet - self.assertEqual(str(exc.exception), "cyclic parent chain") - - def test_trivial_cycle_main(self): - # This used to produce a ValueError, but we catch it earlier than that now. - with self.assertRaises(AttributeError) as exc: - greenlet.getcurrent().parent = greenlet.getcurrent() - self.assertEqual(str(exc.exception), "cannot set the parent of a main greenlet") - - def test_deeper_cycle(self): - g1 = RawGreenlet(lambda: None) - g2 = RawGreenlet(lambda: None) - g3 = RawGreenlet(lambda: None) - - g1.parent = g2 - g2.parent = g3 - with self.assertRaises(ValueError) as exc: - g3.parent = g1 - self.assertEqual(str(exc.exception), "cyclic parent chain") - - -class TestRepr(TestCase): - - if not hasattr(TestCase, 'assertEndsWith'): # Added in 3.14 - def assertEndsWith(self, s, suffix, msg=None): - self.assertTrue(s.endswith(suffix), (s, suffix, msg)) - - def test_main_while_running(self): - r = repr(greenlet.getcurrent()) - self.assertEndsWith(r, " current active started main>") - - def test_main_in_background(self): - main = greenlet.getcurrent() - def run(): - return repr(main) - - g = RawGreenlet(run) - r = g.switch() - self.assertEndsWith(r, ' suspended active started main>') - - def test_initial(self): - r = repr(RawGreenlet()) - self.assertEndsWith(r, ' pending>') - - def test_main_from_other_thread(self): - main = greenlet.getcurrent() - - class T(threading.Thread): - original_main = thread_main = None - main_glet = None - def run(self): - self.original_main = repr(main) - self.main_glet = greenlet.getcurrent() - self.thread_main = repr(self.main_glet) - - t = T() - t.start() - t.join(10) - - self.assertEndsWith(t.original_main, ' suspended active started main>') - self.assertEndsWith(t.thread_main, ' current active started main>') - # give the machinery time to notice the death of the thread, - # and clean it up. Note that we don't use - # ``expect_greenlet_leak`` or wait_for_pending_cleanups, - # because at this point we know we have an extra greenlet - # still reachable. - for _ in range(3): - time.sleep(0.001) - - # In the past, main greenlets, even from dead threads, never - # really appear dead. We have fixed that, and we also report - # that the thread is dead in the repr. (Do this multiple times - # to make sure that we don't self-modify and forget our state - # in the C++ code). - for _ in range(3): - self.assertTrue(t.main_glet.dead) - r = repr(t.main_glet) - self.assertEndsWith(r, ' (thread exited) dead>') - - def test_dead(self): - g = RawGreenlet(lambda: None) - g.switch() - self.assertEndsWith(repr(g), ' dead>') - self.assertNotIn('suspended', repr(g)) - self.assertNotIn('started', repr(g)) - self.assertNotIn('active', repr(g)) - - def test_formatting_produces_native_str(self): - # https://github.com/python-greenlet/greenlet/issues/218 - # %s formatting on Python 2 was producing unicode, not str. - - g_dead = RawGreenlet(lambda: None) - g_not_started = RawGreenlet(lambda: None) - g_cur = greenlet.getcurrent() - - for g in g_dead, g_not_started, g_cur: - - self.assertIsInstance( - '%s' % (g,), - str - ) - self.assertIsInstance( - '%r' % (g,), - str, - ) - - -class TestMainGreenlet(TestCase): - # Tests some implementation details, and relies on some - # implementation details. - - def _check_current_is_main(self): - # implementation detail - assert 'main' in repr(greenlet.getcurrent()) - - t = type(greenlet.getcurrent()) - assert 'main' not in repr(t) - return t - - def test_main_greenlet_type_can_be_subclassed(self): - main_type = self._check_current_is_main() - subclass = type('subclass', (main_type,), {}) - self.assertIsNotNone(subclass) - - def test_main_greenlet_is_greenlet(self): - self._check_current_is_main() - self.assertIsInstance(greenlet.getcurrent(), RawGreenlet) - - - -class TestBrokenGreenlets(TestCase): - # Tests for things that used to, or still do, terminate the interpreter. - # This often means doing unsavory things. - - def test_failed_to_initialstub(self): - def func(): - raise AssertionError("Never get here") - - - g = greenlet._greenlet.UnswitchableGreenlet(func) - g.force_switch_error = True - - with self.assertRaisesRegex(SystemError, - "Failed to switch stacks into a greenlet for the first time."): - g.switch() - - def test_failed_to_switch_into_running(self): - runs = [] - def func(): - runs.append(1) - greenlet.getcurrent().parent.switch() - runs.append(2) - greenlet.getcurrent().parent.switch() - runs.append(3) # pragma: no cover - - g = greenlet._greenlet.UnswitchableGreenlet(func) - g.switch() - self.assertEqual(runs, [1]) - g.switch() - self.assertEqual(runs, [1, 2]) - g.force_switch_error = True - - with self.assertRaisesRegex(SystemError, - "Failed to switch stacks into a running greenlet."): - g.switch() - - # If we stopped here, we would fail the leakcheck, because we've left - # the ``inner_bootstrap()`` C frame and its descendents hanging around, - # which have a bunch of Python references. They'll never get cleaned up - # if we don't let the greenlet finish. - g.force_switch_error = False - g.switch() - self.assertEqual(runs, [1, 2, 3]) - - def test_failed_to_slp_switch_into_running(self): - ex = self.assertScriptRaises('fail_slp_switch.py') - - self.assertIn('fail_slp_switch is running', ex.output) - self.assertIn(ex.returncode, self.get_expected_returncodes_for_aborted_process()) - - def test_reentrant_switch_two_greenlets(self): - # Before we started capturing the arguments in g_switch_finish, this could crash. - output = self.run_script('fail_switch_two_greenlets.py') - self.assertIn('In g1_run', output) - self.assertIn('TRACE', output) - self.assertIn('LEAVE TRACE', output) - self.assertIn('Falling off end of main', output) - self.assertIn('Falling off end of g1_run', output) - self.assertIn('Falling off end of g2', output) - - def test_reentrant_switch_three_greenlets(self): - # On debug builds of greenlet, this used to crash with an assertion error; - # on non-debug versions, it ran fine (which it should not do!). - # Now it always crashes correctly with a TypeError - ex = self.assertScriptRaises('fail_switch_three_greenlets.py', exitcodes=(1,)) - - self.assertIn('TypeError', ex.output) - self.assertIn('positional arguments', ex.output) - - def test_reentrant_switch_three_greenlets2(self): - # This actually passed on debug and non-debug builds. It - # should probably have been triggering some debug assertions - # but it didn't. - # - # I think the fixes for the above test also kicked in here. - output = self.run_script('fail_switch_three_greenlets2.py') - self.assertIn( - "RESULTS: [('trace', 'switch'), " - "('trace', 'switch'), ('g2 arg', 'g2 from tracefunc'), " - "('trace', 'switch'), ('main g1', 'from g2_run'), ('trace', 'switch'), " - "('g1 arg', 'g1 from main'), ('trace', 'switch'), ('main g2', 'from g1_run'), " - "('trace', 'switch'), ('g1 from parent', 'g1 from main 2'), ('trace', 'switch'), " - "('main g1.2', 'g1 done'), ('trace', 'switch'), ('g2 from parent', ()), " - "('trace', 'switch'), ('main g2.2', 'g2 done')]", - output - ) - - def test_reentrant_switch_GreenletAlreadyStartedInPython(self): - output = self.run_script('fail_initialstub_already_started.py') - - self.assertIn( - "RESULTS: ['Begin C', 'Switch to b from B.__getattribute__ in C', " - "('Begin B', ()), '_B_run switching to main', ('main from c', 'From B'), " - "'B.__getattribute__ back from main in C', ('Begin A', (None,)), " - "('A dead?', True, 'B dead?', True, 'C dead?', False), " - "'C done', ('main from c.2', None)]", - output - ) - - def test_reentrant_switch_run_callable_has_del(self): - output = self.run_script('fail_clearing_run_switches.py') - self.assertIn( - "RESULTS [" - "('G.__getattribute__', 'run'), ('RunCallable', '__del__'), " - "('main: g.switch()', 'from RunCallable'), ('run_func', 'enter')" - "]", - output - ) - -class TestModule(TestCase): - - @unittest.skipUnless(hasattr(sys, '_is_gil_enabled'), - "Needs 3.13 and above for sys._is_gil_enabled") - def test_no_gil_on_free_threaded(self): - - if RUNNING_ON_FREETHREAD_BUILD: - self.assertFalse(sys._is_gil_enabled()) - else: - self.assertTrue(sys._is_gil_enabled()) - -if __name__ == '__main__': - unittest.main() diff --git a/venv/Lib/site-packages/greenlet/tests/test_greenlet_trash.py b/venv/Lib/site-packages/greenlet/tests/test_greenlet_trash.py deleted file mode 100644 index c1fc137..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_greenlet_trash.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tests for greenlets interacting with the CPython trash can API. - -The CPython trash can API is not designed to be re-entered from a -single thread. But this can happen using greenlets, if something -during the object deallocation process switches greenlets, and this second -greenlet then causes the trash can to get entered again. Here, we do this -very explicitly, but in other cases (like gevent) it could be arbitrarily more -complicated: for example, a weakref callback might try to acquire a lock that's -already held by another greenlet; that would allow a greenlet switch to occur. - -See https://github.com/gevent/gevent/issues/1909 - -This test is fragile and relies on details of the CPython -implementation (like most of the rest of this package): - - - We enter the trashcan and deferred deallocation after - ``_PyTrash_UNWIND_LEVEL`` calls. This constant, defined in - CPython's object.c, is generally 50. That's basically how many objects are required to - get us into the deferred deallocation situation. - - - The test fails by hitting an ``assert()`` in object.c; if the - build didn't enable assert, then we don't catch this. - - - If the test fails in that way, the interpreter crashes. -""" -from __future__ import print_function, absolute_import, division - -import unittest - - -class TestTrashCanReEnter(unittest.TestCase): - - def test_it(self): - try: - # pylint:disable-next=no-name-in-module - from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=unused-import - except ImportError: - import sys - # Python 3.13 has not "trash delete nesting" anymore (but "delete later") - assert sys.version_info[:2] >= (3, 13) - self.skipTest("get_tstate_trash_delete_nesting is not available.") - - # Try several times to trigger it, because it isn't 100% - # reliable. - for _ in range(10): - self.check_it() - - def check_it(self): # pylint:disable=too-many-statements - import greenlet - from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=no-name-in-module - main = greenlet.getcurrent() - - assert get_tstate_trash_delete_nesting() == 0 - - # We expect to be in deferred deallocation after this many - # deallocations have occurred. TODO: I wish we had a better way to do - # this --- that was before get_tstate_trash_delete_nesting; perhaps - # we can use that API to do better? - TRASH_UNWIND_LEVEL = 50 - # How many objects to put in a container; it's the container that - # queues objects for deferred deallocation. - OBJECTS_PER_CONTAINER = 500 - - class Dealloc: # define the class here because we alter class variables each time we run. - """ - An object with a ``__del__`` method. When it starts getting deallocated - from a deferred trash can run, it switches greenlets, allocates more objects - which then also go in the trash can. If we don't save state appropriately, - nesting gets out of order and we can crash the interpreter. - """ - - #: Has our deallocation actually run and switched greenlets? - #: When it does, this will be set to the current greenlet. This should - #: be happening in the main greenlet, so we check that down below. - SPAWNED = False - - #: Has the background greenlet run? - BG_RAN = False - - BG_GLET = None - - #: How many of these things have ever been allocated. - CREATED = 0 - - #: How many of these things have ever been deallocated. - DESTROYED = 0 - - #: How many were destroyed not in the main greenlet. There should always - #: be some. - #: If the test is broken or things change in the trashcan implementation, - #: this may not be correct. - DESTROYED_BG = 0 - - def __init__(self, sequence_number): - """ - :param sequence_number: The ordinal of this object during - one particular creation run. This is used to detect (guess, really) - when we have entered the trash can's deferred deallocation. - """ - self.i = sequence_number - Dealloc.CREATED += 1 - - def __del__(self): - if self.i == TRASH_UNWIND_LEVEL and not self.SPAWNED: - Dealloc.SPAWNED = greenlet.getcurrent() - other = Dealloc.BG_GLET = greenlet.greenlet(background_greenlet) - x = other.switch() - assert x == 42 - # It's important that we don't switch back to the greenlet, - # we leave it hanging there in an incomplete state. But we don't let it - # get collected, either. If we complete it now, while we're still - # in the scope of the initial trash can, things work out and we - # don't see the problem. We need this greenlet to complete - # at some point in the future, after we've exited this trash can invocation. - del other - elif self.i == 40 and greenlet.getcurrent() is not main: - Dealloc.BG_RAN = True - try: - main.switch(42) - except greenlet.GreenletExit as ex: - # We expect this; all references to us go away - # while we're still running, and we need to finish deleting - # ourself. - Dealloc.BG_RAN = type(ex) - del ex - - # Record the fact that we're dead last of all. This ensures that - # we actually get returned too. - Dealloc.DESTROYED += 1 - if greenlet.getcurrent() is not main: - Dealloc.DESTROYED_BG += 1 - - - def background_greenlet(): - # We direct through a second function, instead of - # directly calling ``make_some()``, so that we have complete - # control over when these objects are destroyed: we need them - # to be destroyed in the context of the background greenlet - t = make_some() - del t # Triggere deletion. - - def make_some(): - t = () - i = OBJECTS_PER_CONTAINER - while i: - # Nest the tuples; it's the recursion that gets us - # into trash. - t = (Dealloc(i), t) - i -= 1 - return t - - - some = make_some() - self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER) - self.assertEqual(Dealloc.DESTROYED, 0) - - # If we're going to crash, it should be on the following line. - # We only crash if ``assert()`` is enabled, of course. - del some - - # For non-debug builds of CPython, we won't crash. The best we can do is check - # the nesting level explicitly. - self.assertEqual(0, get_tstate_trash_delete_nesting()) - - # Discard this, raising GreenletExit into where it is waiting. - Dealloc.BG_GLET = None - # The same nesting level maintains. - self.assertEqual(0, get_tstate_trash_delete_nesting()) - - # We definitely cleaned some up in the background - self.assertGreater(Dealloc.DESTROYED_BG, 0) - - # Make sure all the cleanups happened. - self.assertIs(Dealloc.SPAWNED, main) - self.assertTrue(Dealloc.BG_RAN) - self.assertEqual(Dealloc.BG_RAN, greenlet.GreenletExit) - self.assertEqual(Dealloc.CREATED, Dealloc.DESTROYED ) - self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER * 2) - - import gc - gc.collect() - - -if __name__ == '__main__': - unittest.main() diff --git a/venv/Lib/site-packages/greenlet/tests/test_leaks.py b/venv/Lib/site-packages/greenlet/tests/test_leaks.py deleted file mode 100644 index e09da7d..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_leaks.py +++ /dev/null @@ -1,457 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Testing scenarios that may have leaked. -""" -from __future__ import print_function, absolute_import, division - -import sys -import gc - -import time -import weakref -import threading - - -import greenlet -from . import TestCase -from . import PY314 -from . import RUNNING_ON_FREETHREAD_BUILD -from .leakcheck import fails_leakcheck -from .leakcheck import ignores_leakcheck -from .leakcheck import RUNNING_ON_MANYLINUX - - -# pylint:disable=protected-access - -assert greenlet.GREENLET_USE_GC # Option to disable this was removed in 1.0 - -class HasFinalizerTracksInstances(object): - EXTANT_INSTANCES = set() - def __init__(self, msg): - self.msg = sys.intern(msg) - self.EXTANT_INSTANCES.add(id(self)) - def __del__(self): - self.EXTANT_INSTANCES.remove(id(self)) - def __repr__(self): - return "" % ( - id(self), self.msg - ) - @classmethod - def reset(cls): - cls.EXTANT_INSTANCES.clear() - - -def fails_leakcheck_except_on_free_thraded(func): - if RUNNING_ON_FREETHREAD_BUILD: - # These all seem to pass on free threading because - # of the changes to the garbage collector - return func - return fails_leakcheck(func) - - -class TestLeaks(TestCase): - - def test_arg_refs(self): - args = ('a', 'b', 'c') - refcount_before = sys.getrefcount(args) - # pylint:disable=unnecessary-lambda - g = greenlet.greenlet( - lambda *args: greenlet.getcurrent().parent.switch(*args)) - for _ in range(100): - g.switch(*args) - self.assertEqual(sys.getrefcount(args), refcount_before) - - def test_kwarg_refs(self): - kwargs = {} - self.assertEqual(sys.getrefcount(kwargs), 2 if not PY314 else 1) - # pylint:disable=unnecessary-lambda - g = greenlet.greenlet( - lambda **gkwargs: greenlet.getcurrent().parent.switch(**gkwargs)) - for _ in range(100): - g.switch(**kwargs) - # Python 3.14 elides reference counting operations - # in some cases. See https://github.com/python/cpython/pull/130708 - self.assertEqual(sys.getrefcount(kwargs), 2 if not PY314 else 1) - - - @staticmethod - def __recycle_threads(): - # By introducing a thread that does sleep we allow other threads, - # that have triggered their __block condition, but did not have a - # chance to deallocate their thread state yet, to finally do so. - # The way it works is by requiring a GIL switch (different thread), - # which does a GIL release (sleep), which might do a GIL switch - # to finished threads and allow them to clean up. - def worker(): - time.sleep(0.001) - t = threading.Thread(target=worker) - t.start() - time.sleep(0.001) - t.join(10) - - def test_threaded_leak(self): - gg = [] - def worker(): - # only main greenlet present - gg.append(weakref.ref(greenlet.getcurrent())) - for _ in range(2): - t = threading.Thread(target=worker) - t.start() - t.join(10) - del t - greenlet.getcurrent() # update ts_current - self.__recycle_threads() - greenlet.getcurrent() # update ts_current - gc.collect() - greenlet.getcurrent() # update ts_current - for g in gg: - self.assertIsNone(g()) - - def test_threaded_adv_leak(self): - gg = [] - def worker(): - # main and additional *finished* greenlets - ll = greenlet.getcurrent().ll = [] - def additional(): - ll.append(greenlet.getcurrent()) - for _ in range(2): - greenlet.greenlet(additional).switch() - gg.append(weakref.ref(greenlet.getcurrent())) - for _ in range(2): - t = threading.Thread(target=worker) - t.start() - t.join(10) - del t - greenlet.getcurrent() # update ts_current - self.__recycle_threads() - greenlet.getcurrent() # update ts_current - gc.collect() - greenlet.getcurrent() # update ts_current - for g in gg: - self.assertIsNone(g()) - - def assertClocksUsed(self): - used = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() - self.assertGreaterEqual(used, 0) - # we don't lose the value - greenlet._greenlet.enable_optional_cleanup(True) - used2 = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() - self.assertEqual(used, used2) - self.assertGreater(greenlet._greenlet.CLOCKS_PER_SEC, 1) - - def _check_issue251(self, - manually_collect_background=True, - explicit_reference_to_switch=False): - # See https://github.com/python-greenlet/greenlet/issues/251 - # Killing a greenlet (probably not the main one) - # in one thread from another thread would - # result in leaking a list (the ts_delkey list). - # We no longer use lists to hold that stuff, though. - - # For the test to be valid, even empty lists have to be tracked by the - # GC - - assert gc.is_tracked([]) - HasFinalizerTracksInstances.reset() - greenlet.getcurrent() - greenlets_before = self.count_objects(greenlet.greenlet, exact_kind=False) - - background_glet_running = threading.Event() - background_glet_killed = threading.Event() - background_greenlets = [] - - # XXX: Switching this to a greenlet subclass that overrides - # run results in all callers failing the leaktest; that - # greenlet instance is leaked. There's a bound method for - # run() living on the stack of the greenlet in g_initialstub, - # and since we don't manually switch back to the background - # greenlet to let it "fall off the end" and exit the - # g_initialstub function, it never gets cleaned up. Making the - # garbage collector aware of this bound method (making it an - # attribute of the greenlet structure and traversing into it) - # doesn't help, for some reason. - def background_greenlet(): - # Throw control back to the main greenlet. - jd = HasFinalizerTracksInstances("DELETING STACK OBJECT") - greenlet._greenlet.set_thread_local( - 'test_leaks_key', - HasFinalizerTracksInstances("DELETING THREAD STATE")) - # Explicitly keeping 'switch' in a local variable - # breaks this test in all versions - if explicit_reference_to_switch: - s = greenlet.getcurrent().parent.switch - s([jd]) - else: - greenlet.getcurrent().parent.switch([jd]) - - bg_main_wrefs = [] - - def background_thread(): - glet = greenlet.greenlet(background_greenlet) - bg_main_wrefs.append(weakref.ref(glet.parent)) - - background_greenlets.append(glet) - glet.switch() # Be sure it's active. - # Control is ours again. - del glet # Delete one reference from the thread it runs in. - background_glet_running.set() - background_glet_killed.wait(10) - - # To trigger the background collection of the dead - # greenlet, thus clearing out the contents of the list, we - # need to run some APIs. See issue 252. - if manually_collect_background: - greenlet.getcurrent() - - - t = threading.Thread(target=background_thread) - t.start() - background_glet_running.wait(10) - greenlet.getcurrent() - lists_before = self.count_objects(list, exact_kind=True) - - assert len(background_greenlets) == 1 - self.assertFalse(background_greenlets[0].dead) - # Delete the last reference to the background greenlet - # from a different thread. This puts it in the background thread's - # ts_delkey list. - del background_greenlets[:] - background_glet_killed.set() - - # Now wait for the background thread to die. - t.join(10) - del t - # As part of the fix for 252, we need to cycle the ceval.c - # interpreter loop to be sure it has had a chance to process - # the pending call. - self.wait_for_pending_cleanups() - - lists_after = self.count_objects(list, exact_kind=True) - greenlets_after = self.count_objects(greenlet.greenlet, exact_kind=False) - - # On 2.7, we observe that lists_after is smaller than - # lists_before. No idea what lists got cleaned up. All the - # Python 3 versions match exactly. - self.assertLessEqual(lists_after, lists_before) - # On versions after 3.6, we've successfully cleaned up the - # greenlet references thanks to the internal "vectorcall" - # protocol; prior to that, there is a reference path through - # the ``greenlet.switch`` method still on the stack that we - # can't reach to clean up. The C code goes through terrific - # lengths to clean that up. - if not explicit_reference_to_switch \ - and greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: - # If cleanup was disabled, though, we may not find it. - self.assertEqual(greenlets_after, greenlets_before) - if manually_collect_background: - # TODO: Figure out how to make this work! - # The one on the stack is still leaking somehow - # in the non-manually-collect state. - self.assertEqual(HasFinalizerTracksInstances.EXTANT_INSTANCES, set()) - else: - # The explicit reference prevents us from collecting it - # and it isn't always found by the GC either for some - # reason. The entire frame is leaked somehow, on some - # platforms (e.g., MacPorts builds of Python (all - # versions!)), but not on other platforms (the linux and - # windows builds on GitHub actions and Appveyor). So we'd - # like to write a test that proves that the main greenlet - # sticks around, and we can on my machine (macOS 11.6, - # MacPorts builds of everything) but we can't write that - # same test on other platforms. However, hopefully iteration - # done by leakcheck will find it. - pass - - if greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: - self.assertClocksUsed() - - def test_issue251_killing_cross_thread_leaks_list(self): - self._check_issue251() - - def test_issue251_with_cleanup_disabled(self): - greenlet._greenlet.enable_optional_cleanup(False) - try: - self._check_issue251() - finally: - greenlet._greenlet.enable_optional_cleanup(True) - - @fails_leakcheck_except_on_free_thraded - def test_issue251_issue252_need_to_collect_in_background(self): - # Between greenlet 1.1.2 and the next version, this was still - # failing because the leak of the list still exists when we - # don't call a greenlet API before exiting the thread. The - # proximate cause is that neither of the two greenlets from - # the background thread are actually being destroyed, even - # though the GC is in fact visiting both objects. It's not - # clear where that leak is? For some reason the thread-local - # dict holding it isn't being cleaned up. - # - # The leak, I think, is in the CPYthon internal function that - # calls into green_switch(). The argument tuple is still on - # the C stack somewhere and can't be reached? That doesn't - # make sense, because the tuple should be collectable when - # this object goes away. - # - # Note that this test sometimes spuriously passes on Linux, - # for some reason, but I've never seen it pass on macOS. - self._check_issue251(manually_collect_background=False) - - @fails_leakcheck_except_on_free_thraded - def test_issue251_issue252_need_to_collect_in_background_cleanup_disabled(self): - self.expect_greenlet_leak = True - greenlet._greenlet.enable_optional_cleanup(False) - try: - self._check_issue251(manually_collect_background=False) - finally: - greenlet._greenlet.enable_optional_cleanup(True) - - @fails_leakcheck_except_on_free_thraded - def test_issue251_issue252_explicit_reference_not_collectable(self): - self._check_issue251( - manually_collect_background=False, - explicit_reference_to_switch=True) - - UNTRACK_ATTEMPTS = 100 - - def _only_test_some_versions(self): - # We're only looking for this problem specifically on 3.11, - # and this set of tests is relatively fragile, depending on - # OS and memory management details. So we want to run it on 3.11+ - # (obviously) but not every older 3.x version in order to reduce - # false negatives. At the moment, those false results seem to have - # resolved, so we are actually running this on 3.8+ - assert sys.version_info[0] >= 3 - if sys.version_info[:2] < (3, 8): - self.skipTest('Only observed on 3.11') - if RUNNING_ON_MANYLINUX: - self.skipTest("Slow and not worth repeating here") - - @ignores_leakcheck - # Because we're just trying to track raw memory, not objects, and running - # the leakcheck makes an already slow test slower. - def test_untracked_memory_doesnt_increase(self): - # See https://github.com/gevent/gevent/issues/1924 - # and https://github.com/python-greenlet/greenlet/issues/328 - self._only_test_some_versions() - def f(): - return 1 - - ITER = 10000 - def run_it(): - for _ in range(ITER): - greenlet.greenlet(f).switch() - - # Establish baseline - for _ in range(3): - run_it() - - # uss: (Linux, macOS, Windows): aka "Unique Set Size", this is - # the memory which is unique to a process and which would be - # freed if the process was terminated right now. - uss_before = self.get_process_uss() - - for count in range(self.UNTRACK_ATTEMPTS): - uss_before = max(uss_before, self.get_process_uss()) - run_it() - - uss_after = self.get_process_uss() - if uss_after <= uss_before and count > 1: - break - - self.assertLessEqual(uss_after, uss_before) - - def _check_untracked_memory_thread(self, deallocate_in_thread=True): - self._only_test_some_versions() - # Like the above test, but what if there are a bunch of - # unfinished greenlets in a thread that dies? - # Does it matter if we deallocate in the thread or not? - EXIT_COUNT = [0] - - def f(): - try: - greenlet.getcurrent().parent.switch() - except greenlet.GreenletExit: - EXIT_COUNT[0] += 1 - raise - return 1 - - ITER = 10000 - def run_it(): - glets = [] - for _ in range(ITER): - # Greenlet starts, switches back to us. - # We keep a strong reference to the greenlet though so it doesn't - # get a GreenletExit exception. - g = greenlet.greenlet(f) - glets.append(g) - g.switch() - - return glets - - test = self - - class ThreadFunc: - uss_before = uss_after = 0 - glets = () - ITER = 2 - def __call__(self): - self.uss_before = test.get_process_uss() - - for _ in range(self.ITER): - self.glets += tuple(run_it()) - - for g in self.glets: - test.assertIn('suspended active', str(g)) - # Drop them. - if deallocate_in_thread: - self.glets = () - self.uss_after = test.get_process_uss() - - # Establish baseline - uss_before = uss_after = None - for count in range(self.UNTRACK_ATTEMPTS): - EXIT_COUNT[0] = 0 - thread_func = ThreadFunc() - t = threading.Thread(target=thread_func) - t.start() - t.join(30) - self.assertFalse(t.is_alive()) - - if uss_before is None: - uss_before = thread_func.uss_before - - uss_before = max(uss_before, thread_func.uss_before) - if deallocate_in_thread: - self.assertEqual(thread_func.glets, ()) - self.assertEqual(EXIT_COUNT[0], ITER * thread_func.ITER) - - del thread_func # Deallocate the greenlets; but this won't raise into them - del t - if not deallocate_in_thread: - self.assertEqual(EXIT_COUNT[0], 0) - if deallocate_in_thread: - self.wait_for_pending_cleanups() - - uss_after = self.get_process_uss() - # See if we achieve a non-growth state at some point. Break when we do. - if uss_after <= uss_before and count > 1: - break - - self.wait_for_pending_cleanups() - uss_after = self.get_process_uss() - self.assertLessEqual(uss_after, uss_before, "after attempts %d" % (count,)) - - @ignores_leakcheck - # Because we're just trying to track raw memory, not objects, and running - # the leakcheck makes an already slow test slower. - def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_thread(self): - self._check_untracked_memory_thread(deallocate_in_thread=True) - - @ignores_leakcheck - # Because the main greenlets from the background threads do not exit in a timely fashion, - # we fail the object-based leakchecks. - def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main(self): - self._check_untracked_memory_thread(deallocate_in_thread=False) - -if __name__ == '__main__': - __import__('unittest').main() diff --git a/venv/Lib/site-packages/greenlet/tests/test_stack_saved.py b/venv/Lib/site-packages/greenlet/tests/test_stack_saved.py deleted file mode 100644 index b362bf9..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_stack_saved.py +++ /dev/null @@ -1,19 +0,0 @@ -import greenlet -from . import TestCase - - -class Test(TestCase): - - def test_stack_saved(self): - main = greenlet.getcurrent() - self.assertEqual(main._stack_saved, 0) - - def func(): - main.switch(main._stack_saved) - - g = greenlet.greenlet(func) - x = g.switch() - self.assertGreater(x, 0) - self.assertGreater(g._stack_saved, 0) - g.switch() - self.assertEqual(g._stack_saved, 0) diff --git a/venv/Lib/site-packages/greenlet/tests/test_throw.py b/venv/Lib/site-packages/greenlet/tests/test_throw.py deleted file mode 100644 index f4f9a14..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_throw.py +++ /dev/null @@ -1,128 +0,0 @@ -import sys - - -from greenlet import greenlet -from . import TestCase - -def switch(*args): - return greenlet.getcurrent().parent.switch(*args) - - -class ThrowTests(TestCase): - def test_class(self): - def f(): - try: - switch("ok") - except RuntimeError: - switch("ok") - return - switch("fail") - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw(RuntimeError) - self.assertEqual(res, "ok") - - def test_val(self): - def f(): - try: - switch("ok") - except RuntimeError: - val = sys.exc_info()[1] - if str(val) == "ciao": - switch("ok") - return - switch("fail") - - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw(RuntimeError("ciao")) - self.assertEqual(res, "ok") - - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw(RuntimeError, "ciao") - self.assertEqual(res, "ok") - - def test_kill(self): - def f(): - switch("ok") - switch("fail") - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw() - self.assertTrue(isinstance(res, greenlet.GreenletExit)) - self.assertTrue(g.dead) - res = g.throw() # immediately eaten by the already-dead greenlet - self.assertTrue(isinstance(res, greenlet.GreenletExit)) - - def test_throw_goes_to_original_parent(self): - main = greenlet.getcurrent() - - def f1(): - try: - main.switch("f1 ready to catch") - except IndexError: - return "caught" - return "normal exit" - - def f2(): - main.switch("from f2") - - g1 = greenlet(f1) - g2 = greenlet(f2, parent=g1) - with self.assertRaises(IndexError): - g2.throw(IndexError) - self.assertTrue(g2.dead) - self.assertTrue(g1.dead) - - g1 = greenlet(f1) - g2 = greenlet(f2, parent=g1) - res = g1.switch() - self.assertEqual(res, "f1 ready to catch") - res = g2.throw(IndexError) - self.assertEqual(res, "caught") - self.assertTrue(g2.dead) - self.assertTrue(g1.dead) - - g1 = greenlet(f1) - g2 = greenlet(f2, parent=g1) - res = g1.switch() - self.assertEqual(res, "f1 ready to catch") - res = g2.switch() - self.assertEqual(res, "from f2") - res = g2.throw(IndexError) - self.assertEqual(res, "caught") - self.assertTrue(g2.dead) - self.assertTrue(g1.dead) - - def test_non_traceback_param(self): - with self.assertRaises(TypeError) as exc: - greenlet.getcurrent().throw( - Exception, - Exception(), - self - ) - self.assertEqual(str(exc.exception), - "throw() third argument must be a traceback object") - - def test_instance_of_wrong_type(self): - with self.assertRaises(TypeError) as exc: - greenlet.getcurrent().throw( - Exception(), - BaseException() - ) - - self.assertEqual(str(exc.exception), - "instance exception may not have a separate value") - - def test_not_throwable(self): - with self.assertRaises(TypeError) as exc: - greenlet.getcurrent().throw( - "abc" - ) - self.assertEqual(str(exc.exception), - "exceptions must be classes, or instances, not str") diff --git a/venv/Lib/site-packages/greenlet/tests/test_tracing.py b/venv/Lib/site-packages/greenlet/tests/test_tracing.py deleted file mode 100644 index 235fbcd..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_tracing.py +++ /dev/null @@ -1,299 +0,0 @@ -from __future__ import print_function -import sys -import sysconfig -import greenlet -import unittest - -from . import TestCase -from . import PY312 - -# https://discuss.python.org/t/cpython-3-12-greenlet-and-tracing-profiling-how-to-not-crash-and-get-correct-results/33144/2 -# When build variables are available, OPT is the best way of detecting -# the build with assertions enabled. Otherwise, fallback to detecting PyDEBUG -# build. -ASSERTION_BUILD_PY312 = ( - PY312 and ( - "-DNDEBUG" not in sysconfig.get_config_var("OPT").split() - if sysconfig.get_config_var("OPT") is not None - else hasattr(sys, 'gettotalrefcount') - ), - "Broken on assertion-enabled builds of Python 3.12" -) - -class SomeError(Exception): - pass - -class GreenletTracer(object): - oldtrace = None - - def __init__(self, error_on_trace=False): - self.actions = [] - self.error_on_trace = error_on_trace - - def __call__(self, *args): - self.actions.append(args) - if self.error_on_trace: - raise SomeError - - def __enter__(self): - self.oldtrace = greenlet.settrace(self) - return self.actions - - def __exit__(self, *args): - greenlet.settrace(self.oldtrace) - - -class TestGreenletTracing(TestCase): - """ - Tests of ``greenlet.settrace()`` - """ - - def test_a_greenlet_tracing(self): - main = greenlet.getcurrent() - def dummy(): - pass - def dummyexc(): - raise SomeError() - - with GreenletTracer() as actions: - g1 = greenlet.greenlet(dummy) - g1.switch() - g2 = greenlet.greenlet(dummyexc) - self.assertRaises(SomeError, g2.switch) - - self.assertEqual(actions, [ - ('switch', (main, g1)), - ('switch', (g1, main)), - ('switch', (main, g2)), - ('throw', (g2, main)), - ]) - - def test_b_exception_disables_tracing(self): - main = greenlet.getcurrent() - def dummy(): - main.switch() - g = greenlet.greenlet(dummy) - g.switch() - with GreenletTracer(error_on_trace=True) as actions: - self.assertRaises(SomeError, g.switch) - self.assertEqual(greenlet.gettrace(), None) - - self.assertEqual(actions, [ - ('switch', (main, g)), - ]) - - def test_set_same_tracer_twice(self): - # https://github.com/python-greenlet/greenlet/issues/332 - # Our logic in asserting that the tracefunction should - # gain a reference was incorrect if the same tracefunction was set - # twice. - tracer = GreenletTracer() - with tracer: - greenlet.settrace(tracer) - - -class PythonTracer(object): - oldtrace = None - - def __init__(self): - self.actions = [] - - def __call__(self, frame, event, arg): - # Record the co_name so we have an idea what function we're in. - self.actions.append((event, frame.f_code.co_name)) - - def __enter__(self): - self.oldtrace = sys.setprofile(self) - return self.actions - - def __exit__(self, *args): - sys.setprofile(self.oldtrace) - -def tpt_callback(): - return 42 - -class TestPythonTracing(TestCase): - """ - Tests of the interaction of ``sys.settrace()`` - with greenlet facilities. - - NOTE: Most of this is probably CPython specific. - """ - - maxDiff = None - - def test_trace_events_trivial(self): - with PythonTracer() as actions: - tpt_callback() - # If we use the sys.settrace instead of setprofile, we get - # this: - - # self.assertEqual(actions, [ - # ('call', 'tpt_callback'), - # ('call', '__exit__'), - # ]) - - self.assertEqual(actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - def _trace_switch(self, glet): - with PythonTracer() as actions: - glet.switch() - return actions - - def _check_trace_events_func_already_set(self, glet): - actions = self._trace_switch(glet) - self.assertEqual(actions, [ - ('return', '__enter__'), - ('c_call', '_trace_switch'), - ('call', 'run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('return', 'run'), - ('c_return', '_trace_switch'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - def test_trace_events_into_greenlet_func_already_set(self): - def run(): - return tpt_callback() - - self._check_trace_events_func_already_set(greenlet.greenlet(run)) - - def test_trace_events_into_greenlet_subclass_already_set(self): - class X(greenlet.greenlet): - def run(self): - return tpt_callback() - self._check_trace_events_func_already_set(X()) - - def _check_trace_events_from_greenlet_sets_profiler(self, g, tracer): - g.switch() - tpt_callback() - tracer.__exit__() - self.assertEqual(tracer.actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('return', 'run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - - def test_trace_events_from_greenlet_func_sets_profiler(self): - tracer = PythonTracer() - def run(): - tracer.__enter__() - return tpt_callback() - - self._check_trace_events_from_greenlet_sets_profiler(greenlet.greenlet(run), - tracer) - - def test_trace_events_from_greenlet_subclass_sets_profiler(self): - tracer = PythonTracer() - class X(greenlet.greenlet): - def run(self): - tracer.__enter__() - return tpt_callback() - - self._check_trace_events_from_greenlet_sets_profiler(X(), tracer) - - @unittest.skipIf(*ASSERTION_BUILD_PY312) - def test_trace_events_multiple_greenlets_switching(self): - tracer = PythonTracer() - - g1 = None - g2 = None - - def g1_run(): - tracer.__enter__() - tpt_callback() - g2.switch() - tpt_callback() - return 42 - - def g2_run(): - tpt_callback() - tracer.__exit__() - tpt_callback() - g1.switch() - - g1 = greenlet.greenlet(g1_run) - g2 = greenlet.greenlet(g2_run) - - x = g1.switch() - self.assertEqual(x, 42) - tpt_callback() # ensure not in the trace - self.assertEqual(tracer.actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('c_call', 'g1_run'), - ('call', 'g2_run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - @unittest.skipIf(*ASSERTION_BUILD_PY312) - def test_trace_events_multiple_greenlets_switching_siblings(self): - # Like the first version, but get both greenlets running first - # as "siblings" and then establish the tracing. - tracer = PythonTracer() - - g1 = None - g2 = None - - def g1_run(): - greenlet.getcurrent().parent.switch() - tracer.__enter__() - tpt_callback() - g2.switch() - tpt_callback() - return 42 - - def g2_run(): - greenlet.getcurrent().parent.switch() - - tpt_callback() - tracer.__exit__() - tpt_callback() - g1.switch() - - g1 = greenlet.greenlet(g1_run) - g2 = greenlet.greenlet(g2_run) - - # Start g1 - g1.switch() - # And it immediately returns control to us. - # Start g2 - g2.switch() - # Which also returns. Now kick of the real part of the - # test. - x = g1.switch() - self.assertEqual(x, 42) - - tpt_callback() # ensure not in the trace - self.assertEqual(tracer.actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('c_call', 'g1_run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - -if __name__ == '__main__': - unittest.main() diff --git a/venv/Lib/site-packages/greenlet/tests/test_version.py b/venv/Lib/site-packages/greenlet/tests/test_version.py deleted file mode 100644 index 96c17cf..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_version.py +++ /dev/null @@ -1,41 +0,0 @@ -#! /usr/bin/env python -from __future__ import absolute_import -from __future__ import print_function - -import sys -import os -from unittest import TestCase as NonLeakingTestCase - -import greenlet - -# No reason to run this multiple times under leakchecks, -# it doesn't do anything. -class VersionTests(NonLeakingTestCase): - def test_version(self): - def find_dominating_file(name): - if os.path.exists(name): - return name - - tried = [] - here = os.path.abspath(os.path.dirname(__file__)) - for i in range(10): - up = ['..'] * i - path = [here] + up + [name] - fname = os.path.join(*path) - fname = os.path.abspath(fname) - tried.append(fname) - if os.path.exists(fname): - return fname - raise AssertionError("Could not find file " + name + "; checked " + str(tried)) - - try: - setup_py = find_dominating_file('setup.py') - except AssertionError as e: - self.skipTest("Unable to find setup.py; must be out of tree. " + str(e)) - - - invoke_setup = "%s %s --version" % (sys.executable, setup_py) - with os.popen(invoke_setup) as f: - sversion = f.read().strip() - - self.assertEqual(sversion, greenlet.__version__) diff --git a/venv/Lib/site-packages/greenlet/tests/test_weakref.py b/venv/Lib/site-packages/greenlet/tests/test_weakref.py deleted file mode 100644 index 05a38a7..0000000 --- a/venv/Lib/site-packages/greenlet/tests/test_weakref.py +++ /dev/null @@ -1,35 +0,0 @@ -import gc -import weakref - - -import greenlet -from . import TestCase - -class WeakRefTests(TestCase): - def test_dead_weakref(self): - def _dead_greenlet(): - g = greenlet.greenlet(lambda: None) - g.switch() - return g - o = weakref.ref(_dead_greenlet()) - gc.collect() - self.assertEqual(o(), None) - - def test_inactive_weakref(self): - o = weakref.ref(greenlet.greenlet()) - gc.collect() - self.assertEqual(o(), None) - - def test_dealloc_weakref(self): - seen = [] - def worker(): - try: - greenlet.getcurrent().parent.switch() - finally: - seen.append(g()) - g = greenlet.greenlet(worker) - g.switch() - g2 = greenlet.greenlet(lambda: None, g) - g = weakref.ref(g2) - g2 = None - self.assertEqual(seen, [None]) diff --git a/venv/Lib/site-packages/idna-3.11.dist-info/INSTALLER b/venv/Lib/site-packages/idna-3.11.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/idna-3.11.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/idna-3.11.dist-info/METADATA b/venv/Lib/site-packages/idna-3.11.dist-info/METADATA deleted file mode 100644 index 7a4a4b7..0000000 --- a/venv/Lib/site-packages/idna-3.11.dist-info/METADATA +++ /dev/null @@ -1,209 +0,0 @@ -Metadata-Version: 2.4 -Name: idna -Version: 3.11 -Summary: Internationalized Domain Names in Applications (IDNA) -Author-email: Kim Davies -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3.14 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Utilities -License-File: LICENSE.md -Requires-Dist: ruff >= 0.6.2 ; extra == "all" -Requires-Dist: mypy >= 1.11.2 ; extra == "all" -Requires-Dist: pytest >= 8.3.2 ; extra == "all" -Requires-Dist: flake8 >= 7.1.1 ; extra == "all" -Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst -Project-URL: Issue tracker, https://github.com/kjd/idna/issues -Project-URL: Source, https://github.com/kjd/idna -Provides-Extra: all - -Internationalized Domain Names in Applications (IDNA) -===================================================== - -Support for `Internationalized Domain Names in -Applications (IDNA) `_ -and `Unicode IDNA Compatibility Processing -`_. - -The latest versions of these standards supplied here provide -more comprehensive language coverage and reduce the potential of -allowing domains with known security vulnerabilities. This library -is a suitable replacement for the “encodings.idna” -module that comes with the Python standard library, but which -only supports an older superseded IDNA specification from 2003. - -Basic functions are simply executed: - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - - -Installation ------------- - -This package is available for installation from PyPI via the -typical mechanisms, such as: - -.. code-block:: bash - - $ python3 -m pip install idna - - -Usage ------ - -For typical usage, the ``encode`` and ``decode`` functions will take a -domain name argument and perform a conversion to ASCII compatible encoding -(known as A-labels), or to Unicode strings (known as U-labels) -respectively. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - -Conversions can be applied at a per-label basis using the ``ulabel`` or -``alabel`` functions if necessary: - -.. code-block:: pycon - - >>> idna.alabel('测试') - b'xn--0zwm56d' - - -Compatibility Mapping (UTS #46) -+++++++++++++++++++++++++++++++ - -This library provides support for `Unicode IDNA Compatibility -Processing `_ which normalizes input from -different potential ways a user may input a domain prior to performing the IDNA -conversion operations. This functionality, known as a -`mapping `_, is considered by the -specification to be a local user-interface issue distinct from IDNA -conversion functionality. - -For example, “Königsgäßchen” is not a permissible label as *LATIN -CAPITAL LETTER K* is not allowed (nor are capital letters in general). -UTS 46 will convert this into lower case prior to applying the IDNA -conversion. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('Königsgäßchen') - ... - idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed - >>> idna.encode('Königsgäßchen', uts46=True) - b'xn--knigsgchen-b4a3dun' - >>> print(idna.decode('xn--knigsgchen-b4a3dun')) - königsgäßchen - - -Exceptions ----------- - -All errors raised during the conversion following the specification -should raise an exception derived from the ``idna.IDNAError`` base -class. - -More specific exceptions that may be generated as ``idna.IDNABidiError`` -when the error reflects an illegal combination of left-to-right and -right-to-left characters in a label; ``idna.InvalidCodepoint`` when -a specific codepoint is an illegal character in an IDN label (i.e. -INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is -illegal based on its position in the string (i.e. it is CONTEXTO or CONTEXTJ -but the contextual requirements are not satisfied.) - -Building and Diagnostics ------------------------- - -The IDNA and UTS 46 functionality relies upon pre-calculated lookup -tables for performance. These tables are derived from computing against -eligibility criteria in the respective standards using the command-line -script ``tools/idna-data``. - -This tool will fetch relevant codepoint data from the Unicode repository -and perform the required calculations to identify eligibility. There are -three main modes: - -* ``idna-data make-libdata``. Generates ``idnadata.py`` and - ``uts46data.py``, the pre-calculated lookup tables used for IDNA and - UTS 46 conversions. Implementers who wish to track this library against - a different Unicode version may use this tool to manually generate a - different version of the ``idnadata.py`` and ``uts46data.py`` files. - -* ``idna-data make-table``. Generate a table of the IDNA disposition - (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix - B.1 of RFC 5892 and the pre-computed tables published by `IANA - `_. - -* ``idna-data U+0061``. Prints debugging output on the various - properties associated with an individual Unicode codepoint (in this - case, U+0061), that are used to assess the IDNA and UTS 46 status of a - codepoint. This is helpful in debugging or analysis. - -The tool accepts a number of arguments, described using ``idna-data --h``. Most notably, the ``--version`` argument allows the specification -of the version of Unicode to be used in computing the table data. For -example, ``idna-data --version 9.0.0 make-libdata`` will generate -library data against Unicode 9.0.0. - - -Additional Notes ----------------- - -* **Packages**. The latest tagged release version is published in the - `Python Package Index `_. - -* **Version support**. This library supports Python 3.8 and higher. - As this library serves as a low-level toolkit for a variety of - applications, many of which strive for broad compatibility with older - Python versions, there is no rush to remove older interpreter support. - Support for older versions are likely to be removed from new releases - as automated tests can no longer easily be run, i.e. once the Python - version is officially end-of-life. - -* **Testing**. The library has a test suite based on each rule of the - IDNA specification, as well as tests that are provided as part of the - Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing - `_. - -* **Emoji**. It is an occasional request to support emoji domains in - this library. Encoding of symbols like emoji is expressly prohibited by - the technical standard IDNA 2008 and emoji domains are broadly phased - out across the domain industry due to associated security risks. For - now, applications that need to support these non-compliant labels - may wish to consider trying the encode/decode operation in this library - first, and then falling back to using `encodings.idna`. See `the Github - project `_ for more discussion. - -* **Transitional processing**. Unicode 16.0.0 removed transitional - processing so the `transitional` argument for the encode() method - no longer has any effect and will be removed at a later date. - diff --git a/venv/Lib/site-packages/idna-3.11.dist-info/RECORD b/venv/Lib/site-packages/idna-3.11.dist-info/RECORD deleted file mode 100644 index 6019b09..0000000 --- a/venv/Lib/site-packages/idna-3.11.dist-info/RECORD +++ /dev/null @@ -1,23 +0,0 @@ -idna-3.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -idna-3.11.dist-info/METADATA,sha256=fCwSww9SuiN8TIHllFSASUQCW55hAs8dzKnr9RaEEbA,8378 -idna-3.11.dist-info/RECORD,, -idna-3.11.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -idna-3.11.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -idna-3.11.dist-info/licenses/LICENSE.md,sha256=t6M2q_OwThgOwGXN0W5wXQeeHMehT5EKpukYfza5zYc,1541 -idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 -idna/__pycache__/__init__.cpython-312.pyc,, -idna/__pycache__/codec.cpython-312.pyc,, -idna/__pycache__/compat.cpython-312.pyc,, -idna/__pycache__/core.cpython-312.pyc,, -idna/__pycache__/idnadata.cpython-312.pyc,, -idna/__pycache__/intranges.cpython-312.pyc,, -idna/__pycache__/package_data.cpython-312.pyc,, -idna/__pycache__/uts46data.cpython-312.pyc,, -idna/codec.py,sha256=M2SGWN7cs_6B32QmKTyTN6xQGZeYQgQ2wiX3_DR6loE,3438 -idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 -idna/core.py,sha256=P26_XVycuMTZ1R2mNK1ZREVzM5mvTzdabBXfyZVU1Lc,13246 -idna/idnadata.py,sha256=SG8jhaGE53iiD6B49pt2pwTv_UvClciWE-N54oR2p4U,79623 -idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 -idna/package_data.py,sha256=_CUavOxobnbyNG2FLyHoN8QHP3QM9W1tKuw7eq9QwBk,21 -idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -idna/uts46data.py,sha256=H9J35VkD0F9L9mKOqjeNGd2A-Va6FlPoz6Jz4K7h-ps,243725 diff --git a/venv/Lib/site-packages/idna-3.11.dist-info/REQUESTED b/venv/Lib/site-packages/idna-3.11.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/idna-3.11.dist-info/WHEEL b/venv/Lib/site-packages/idna-3.11.dist-info/WHEEL deleted file mode 100644 index d8b9936..0000000 --- a/venv/Lib/site-packages/idna-3.11.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/idna-3.11.dist-info/licenses/LICENSE.md b/venv/Lib/site-packages/idna-3.11.dist-info/licenses/LICENSE.md deleted file mode 100644 index 256ba90..0000000 --- a/venv/Lib/site-packages/idna-3.11.dist-info/licenses/LICENSE.md +++ /dev/null @@ -1,31 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2013-2025, Kim Davies and contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/idna/__init__.py b/venv/Lib/site-packages/idna/__init__.py deleted file mode 100644 index cfdc030..0000000 --- a/venv/Lib/site-packages/idna/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -from .core import ( - IDNABidiError, - IDNAError, - InvalidCodepoint, - InvalidCodepointContext, - alabel, - check_bidi, - check_hyphen_ok, - check_initial_combiner, - check_label, - check_nfc, - decode, - encode, - ulabel, - uts46_remap, - valid_contextj, - valid_contexto, - valid_label_length, - valid_string_length, -) -from .intranges import intranges_contain -from .package_data import __version__ - -__all__ = [ - "__version__", - "IDNABidiError", - "IDNAError", - "InvalidCodepoint", - "InvalidCodepointContext", - "alabel", - "check_bidi", - "check_hyphen_ok", - "check_initial_combiner", - "check_label", - "check_nfc", - "decode", - "encode", - "intranges_contain", - "ulabel", - "uts46_remap", - "valid_contextj", - "valid_contexto", - "valid_label_length", - "valid_string_length", -] diff --git a/venv/Lib/site-packages/idna/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index f877ce7..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/__pycache__/codec.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/codec.cpython-312.pyc deleted file mode 100644 index 3a7df1c..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/codec.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/__pycache__/compat.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/compat.cpython-312.pyc deleted file mode 100644 index 1b2a045..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/compat.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/__pycache__/core.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/core.cpython-312.pyc deleted file mode 100644 index ed5a4ec..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-312.pyc deleted file mode 100644 index d96fd7b..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/__pycache__/intranges.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/intranges.cpython-312.pyc deleted file mode 100644 index 4557b6e..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/intranges.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/__pycache__/package_data.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/package_data.cpython-312.pyc deleted file mode 100644 index 87fbc8a..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/package_data.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/__pycache__/uts46data.cpython-312.pyc b/venv/Lib/site-packages/idna/__pycache__/uts46data.cpython-312.pyc deleted file mode 100644 index 1f98bb9..0000000 Binary files a/venv/Lib/site-packages/idna/__pycache__/uts46data.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/idna/codec.py b/venv/Lib/site-packages/idna/codec.py deleted file mode 100644 index cbc2e4f..0000000 --- a/venv/Lib/site-packages/idna/codec.py +++ /dev/null @@ -1,122 +0,0 @@ -import codecs -import re -from typing import Any, Optional, Tuple - -from .core import IDNAError, alabel, decode, encode, ulabel - -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class Codec(codecs.Codec): - def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - return encode(data), len(data) - - def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return "", 0 - - return decode(data), len(data) - - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - labels = _unicode_dots_re.split(data) - trailing_dot = b"" - if labels: - if not labels[-1]: - trailing_dot = b"." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = b"." - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result_bytes = b".".join(result) + trailing_dot - size += len(trailing_dot) - return result_bytes, size - - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return ("", 0) - - if not isinstance(data, str): - data = str(data, "ascii") - - labels = _unicode_dots_re.split(data) - trailing_dot = "" - if labels: - if not labels[-1]: - trailing_dot = "." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = "." - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result_str = ".".join(result) + trailing_dot - size += len(trailing_dot) - return (result_str, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - - -class StreamReader(Codec, codecs.StreamReader): - pass - - -def search_function(name: str) -> Optional[codecs.CodecInfo]: - if name != "idna2008": - return None - return codecs.CodecInfo( - name=name, - encode=Codec().encode, - decode=Codec().decode, # type: ignore - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) - - -codecs.register(search_function) diff --git a/venv/Lib/site-packages/idna/compat.py b/venv/Lib/site-packages/idna/compat.py deleted file mode 100644 index 1df9f2a..0000000 --- a/venv/Lib/site-packages/idna/compat.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any, Union - -from .core import decode, encode - - -def ToASCII(label: str) -> bytes: - return encode(label) - - -def ToUnicode(label: Union[bytes, bytearray]) -> str: - return decode(label) - - -def nameprep(s: Any) -> None: - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/venv/Lib/site-packages/idna/core.py b/venv/Lib/site-packages/idna/core.py deleted file mode 100644 index 8177bf7..0000000 --- a/venv/Lib/site-packages/idna/core.py +++ /dev/null @@ -1,437 +0,0 @@ -import bisect -import re -import unicodedata -from typing import Optional, Union - -from . import idnadata -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b"xn--" -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class IDNAError(UnicodeError): - """Base exception for all IDNA-encoding related problems""" - - pass - - -class IDNABidiError(IDNAError): - """Exception when bidirectional requirements are not satisfied""" - - pass - - -class InvalidCodepoint(IDNAError): - """Exception when a disallowed or unallocated codepoint is used""" - - pass - - -class InvalidCodepointContext(IDNAError): - """Exception when the codepoint is not valid in the context it is used""" - - pass - - -def _combining_class(cp: int) -> int: - v = unicodedata.combining(chr(cp)) - if v == 0: - if not unicodedata.name(chr(cp)): - raise ValueError("Unknown character in unicodedata") - return v - - -def _is_script(cp: str, script: str) -> bool: - return intranges_contain(ord(cp), idnadata.scripts[script]) - - -def _punycode(s: str) -> bytes: - return s.encode("punycode") - - -def _unot(s: int) -> str: - return "U+{:04X}".format(s) - - -def valid_label_length(label: Union[bytes, str]) -> bool: - if len(label) > 63: - return False - return True - - -def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label: str, check_ltr: bool = False) -> bool: - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == "": - # String likely comes from a newer version of Unicode - raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) - if direction in ["R", "AL", "AN"]: - bidi_label = True - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ["R", "AL"]: - rtl = True - elif direction == "L": - rtl = False - else: - raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) - - valid_ending = False - number_type: Optional[str] = None - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if direction not in [ - "R", - "AL", - "AN", - "EN", - "ES", - "CS", - "ET", - "ON", - "BN", - "NSM", - ]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) - # Bidi rule 3 - if direction in ["R", "AL", "EN", "AN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - # Bidi rule 4 - if direction in ["AN", "EN"]: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError("Can not mix numeral types in a right-to-left label") - else: - # Bidi rule 5 - if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) - # Bidi rule 6 - if direction in ["L", "EN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - - if not valid_ending: - raise IDNABidiError("Label ends with illegal codepoint directionality") - - return True - - -def check_initial_combiner(label: str) -> bool: - if unicodedata.category(label[0])[0] == "M": - raise IDNAError("Label begins with an illegal combining character") - return True - - -def check_hyphen_ok(label: str) -> bool: - if label[2:4] == "--": - raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") - if label[0] == "-" or label[-1] == "-": - raise IDNAError("Label must not start or end with a hyphen") - return True - - -def check_nfc(label: str) -> None: - if unicodedata.normalize("NFC", label) != label: - raise IDNAError("Label must be in Normalization Form C") - - -def valid_contextj(label: str, pos: int) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x200C: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos - 1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("L"), ord("D")]: - ok = True - break - else: - break - - if not ok: - return False - - ok = False - for i in range(pos + 1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("R"), ord("D")]: - ok = True - break - else: - break - return ok - - if cp_value == 0x200D: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - return False - - -def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x00B7: - if 0 < pos < len(label) - 1: - if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label) - 1 and len(label) > 1: - return _is_script(label[pos + 1], "Greek") - return False - - elif cp_value == 0x05F3 or cp_value == 0x05F4: - if pos > 0: - return _is_script(label[pos - 1], "Hebrew") - return False - - elif cp_value == 0x30FB: - for cp in label: - if cp == "\u30fb": - continue - if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6F0 <= ord(cp) <= 0x06F9: - return False - return True - - elif 0x6F0 <= cp_value <= 0x6F9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - return False - - -def check_label(label: Union[str, bytes, bytearray]) -> None: - if isinstance(label, (bytes, bytearray)): - label = label.decode("utf-8") - if len(label) == 0: - raise IDNAError("Empty Label") - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for pos, cp in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext( - "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - except ValueError: - raise IDNAError( - "Unknown codepoint adjacent to joiner {} at position {} in {}".format( - _unot(cp_value), pos + 1, repr(label) - ) - ) - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): - if not valid_contexto(label, pos): - raise InvalidCodepointContext( - "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - else: - raise InvalidCodepoint( - "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) - ) - - check_bidi(label) - - -def alabel(label: str) -> bytes: - try: - label_bytes = label.encode("ascii") - ulabel(label_bytes) - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - return label_bytes - except UnicodeEncodeError: - pass - - check_label(label) - label_bytes = _alabel_prefix + _punycode(label) - - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - - return label_bytes - - -def ulabel(label: Union[str, bytes, bytearray]) -> str: - if not isinstance(label, (bytes, bytearray)): - try: - label_bytes = label.encode("ascii") - except UnicodeEncodeError: - check_label(label) - return label - else: - label_bytes = bytes(label) - - label_bytes = label_bytes.lower() - if label_bytes.startswith(_alabel_prefix): - label_bytes = label_bytes[len(_alabel_prefix) :] - if not label_bytes: - raise IDNAError("Malformed A-label, no Punycode eligible content found") - if label_bytes.decode("ascii")[-1] == "-": - raise IDNAError("A-label must not end with a hyphen") - else: - check_label(label_bytes) - return label_bytes.decode("ascii") - - try: - label = label_bytes.decode("punycode") - except UnicodeError: - raise IDNAError("Invalid A-label") - check_label(label) - return label - - -def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - - output = "" - - for pos, char in enumerate(domain): - code_point = ord(char) - try: - uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] - status = uts46row[1] - replacement: Optional[str] = None - if len(uts46row) == 3: - replacement = uts46row[2] - if ( - status == "V" - or (status == "D" and not transitional) - or (status == "3" and not std3_rules and replacement is None) - ): - output += char - elif replacement is not None and ( - status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) - ): - output += replacement - elif status != "I": - raise IndexError() - except IndexError: - raise InvalidCodepoint( - "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) - ) - - return unicodedata.normalize("NFC", output) - - -def encode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, - transitional: bool = False, -) -> bytes: - if not isinstance(s, str): - try: - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("should pass a unicode string to the function rather than a byte string.") - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split(".") - else: - labels = _unicode_dots_re.split(s) - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if labels[-1] == "": - del labels[-1] - trailing_dot = True - for label in labels: - s = alabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append(b"") - s = b".".join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError("Domain too long") - return s - - -def decode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, -) -> str: - try: - if not isinstance(s, str): - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("Invalid ASCII in A-label") - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split(".") - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - s = ulabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append("") - return ".".join(result) diff --git a/venv/Lib/site-packages/idna/idnadata.py b/venv/Lib/site-packages/idna/idnadata.py deleted file mode 100644 index ded47ca..0000000 --- a/venv/Lib/site-packages/idna/idnadata.py +++ /dev/null @@ -1,4309 +0,0 @@ -# This file is automatically generated by tools/idna-data - -__version__ = "16.0.0" - -scripts = { - "Greek": ( - 0x37000000374, - 0x37500000378, - 0x37A0000037E, - 0x37F00000380, - 0x38400000385, - 0x38600000387, - 0x3880000038B, - 0x38C0000038D, - 0x38E000003A2, - 0x3A3000003E2, - 0x3F000000400, - 0x1D2600001D2B, - 0x1D5D00001D62, - 0x1D6600001D6B, - 0x1DBF00001DC0, - 0x1F0000001F16, - 0x1F1800001F1E, - 0x1F2000001F46, - 0x1F4800001F4E, - 0x1F5000001F58, - 0x1F5900001F5A, - 0x1F5B00001F5C, - 0x1F5D00001F5E, - 0x1F5F00001F7E, - 0x1F8000001FB5, - 0x1FB600001FC5, - 0x1FC600001FD4, - 0x1FD600001FDC, - 0x1FDD00001FF0, - 0x1FF200001FF5, - 0x1FF600001FFF, - 0x212600002127, - 0xAB650000AB66, - 0x101400001018F, - 0x101A0000101A1, - 0x1D2000001D246, - ), - "Han": ( - 0x2E8000002E9A, - 0x2E9B00002EF4, - 0x2F0000002FD6, - 0x300500003006, - 0x300700003008, - 0x30210000302A, - 0x30380000303C, - 0x340000004DC0, - 0x4E000000A000, - 0xF9000000FA6E, - 0xFA700000FADA, - 0x16FE200016FE4, - 0x16FF000016FF2, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x2F8000002FA1E, - 0x300000003134B, - 0x31350000323B0, - ), - "Hebrew": ( - 0x591000005C8, - 0x5D0000005EB, - 0x5EF000005F5, - 0xFB1D0000FB37, - 0xFB380000FB3D, - 0xFB3E0000FB3F, - 0xFB400000FB42, - 0xFB430000FB45, - 0xFB460000FB50, - ), - "Hiragana": ( - 0x304100003097, - 0x309D000030A0, - 0x1B0010001B120, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1F2000001F201, - ), - "Katakana": ( - 0x30A1000030FB, - 0x30FD00003100, - 0x31F000003200, - 0x32D0000032FF, - 0x330000003358, - 0xFF660000FF70, - 0xFF710000FF9E, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B001, - 0x1B1200001B123, - 0x1B1550001B156, - 0x1B1640001B168, - ), -} -joining_types = { - 0xAD: 84, - 0x300: 84, - 0x301: 84, - 0x302: 84, - 0x303: 84, - 0x304: 84, - 0x305: 84, - 0x306: 84, - 0x307: 84, - 0x308: 84, - 0x309: 84, - 0x30A: 84, - 0x30B: 84, - 0x30C: 84, - 0x30D: 84, - 0x30E: 84, - 0x30F: 84, - 0x310: 84, - 0x311: 84, - 0x312: 84, - 0x313: 84, - 0x314: 84, - 0x315: 84, - 0x316: 84, - 0x317: 84, - 0x318: 84, - 0x319: 84, - 0x31A: 84, - 0x31B: 84, - 0x31C: 84, - 0x31D: 84, - 0x31E: 84, - 0x31F: 84, - 0x320: 84, - 0x321: 84, - 0x322: 84, - 0x323: 84, - 0x324: 84, - 0x325: 84, - 0x326: 84, - 0x327: 84, - 0x328: 84, - 0x329: 84, - 0x32A: 84, - 0x32B: 84, - 0x32C: 84, - 0x32D: 84, - 0x32E: 84, - 0x32F: 84, - 0x330: 84, - 0x331: 84, - 0x332: 84, - 0x333: 84, - 0x334: 84, - 0x335: 84, - 0x336: 84, - 0x337: 84, - 0x338: 84, - 0x339: 84, - 0x33A: 84, - 0x33B: 84, - 0x33C: 84, - 0x33D: 84, - 0x33E: 84, - 0x33F: 84, - 0x340: 84, - 0x341: 84, - 0x342: 84, - 0x343: 84, - 0x344: 84, - 0x345: 84, - 0x346: 84, - 0x347: 84, - 0x348: 84, - 0x349: 84, - 0x34A: 84, - 0x34B: 84, - 0x34C: 84, - 0x34D: 84, - 0x34E: 84, - 0x34F: 84, - 0x350: 84, - 0x351: 84, - 0x352: 84, - 0x353: 84, - 0x354: 84, - 0x355: 84, - 0x356: 84, - 0x357: 84, - 0x358: 84, - 0x359: 84, - 0x35A: 84, - 0x35B: 84, - 0x35C: 84, - 0x35D: 84, - 0x35E: 84, - 0x35F: 84, - 0x360: 84, - 0x361: 84, - 0x362: 84, - 0x363: 84, - 0x364: 84, - 0x365: 84, - 0x366: 84, - 0x367: 84, - 0x368: 84, - 0x369: 84, - 0x36A: 84, - 0x36B: 84, - 0x36C: 84, - 0x36D: 84, - 0x36E: 84, - 0x36F: 84, - 0x483: 84, - 0x484: 84, - 0x485: 84, - 0x486: 84, - 0x487: 84, - 0x488: 84, - 0x489: 84, - 0x591: 84, - 0x592: 84, - 0x593: 84, - 0x594: 84, - 0x595: 84, - 0x596: 84, - 0x597: 84, - 0x598: 84, - 0x599: 84, - 0x59A: 84, - 0x59B: 84, - 0x59C: 84, - 0x59D: 84, - 0x59E: 84, - 0x59F: 84, - 0x5A0: 84, - 0x5A1: 84, - 0x5A2: 84, - 0x5A3: 84, - 0x5A4: 84, - 0x5A5: 84, - 0x5A6: 84, - 0x5A7: 84, - 0x5A8: 84, - 0x5A9: 84, - 0x5AA: 84, - 0x5AB: 84, - 0x5AC: 84, - 0x5AD: 84, - 0x5AE: 84, - 0x5AF: 84, - 0x5B0: 84, - 0x5B1: 84, - 0x5B2: 84, - 0x5B3: 84, - 0x5B4: 84, - 0x5B5: 84, - 0x5B6: 84, - 0x5B7: 84, - 0x5B8: 84, - 0x5B9: 84, - 0x5BA: 84, - 0x5BB: 84, - 0x5BC: 84, - 0x5BD: 84, - 0x5BF: 84, - 0x5C1: 84, - 0x5C2: 84, - 0x5C4: 84, - 0x5C5: 84, - 0x5C7: 84, - 0x610: 84, - 0x611: 84, - 0x612: 84, - 0x613: 84, - 0x614: 84, - 0x615: 84, - 0x616: 84, - 0x617: 84, - 0x618: 84, - 0x619: 84, - 0x61A: 84, - 0x61C: 84, - 0x620: 68, - 0x622: 82, - 0x623: 82, - 0x624: 82, - 0x625: 82, - 0x626: 68, - 0x627: 82, - 0x628: 68, - 0x629: 82, - 0x62A: 68, - 0x62B: 68, - 0x62C: 68, - 0x62D: 68, - 0x62E: 68, - 0x62F: 82, - 0x630: 82, - 0x631: 82, - 0x632: 82, - 0x633: 68, - 0x634: 68, - 0x635: 68, - 0x636: 68, - 0x637: 68, - 0x638: 68, - 0x639: 68, - 0x63A: 68, - 0x63B: 68, - 0x63C: 68, - 0x63D: 68, - 0x63E: 68, - 0x63F: 68, - 0x640: 67, - 0x641: 68, - 0x642: 68, - 0x643: 68, - 0x644: 68, - 0x645: 68, - 0x646: 68, - 0x647: 68, - 0x648: 82, - 0x649: 68, - 0x64A: 68, - 0x64B: 84, - 0x64C: 84, - 0x64D: 84, - 0x64E: 84, - 0x64F: 84, - 0x650: 84, - 0x651: 84, - 0x652: 84, - 0x653: 84, - 0x654: 84, - 0x655: 84, - 0x656: 84, - 0x657: 84, - 0x658: 84, - 0x659: 84, - 0x65A: 84, - 0x65B: 84, - 0x65C: 84, - 0x65D: 84, - 0x65E: 84, - 0x65F: 84, - 0x66E: 68, - 0x66F: 68, - 0x670: 84, - 0x671: 82, - 0x672: 82, - 0x673: 82, - 0x675: 82, - 0x676: 82, - 0x677: 82, - 0x678: 68, - 0x679: 68, - 0x67A: 68, - 0x67B: 68, - 0x67C: 68, - 0x67D: 68, - 0x67E: 68, - 0x67F: 68, - 0x680: 68, - 0x681: 68, - 0x682: 68, - 0x683: 68, - 0x684: 68, - 0x685: 68, - 0x686: 68, - 0x687: 68, - 0x688: 82, - 0x689: 82, - 0x68A: 82, - 0x68B: 82, - 0x68C: 82, - 0x68D: 82, - 0x68E: 82, - 0x68F: 82, - 0x690: 82, - 0x691: 82, - 0x692: 82, - 0x693: 82, - 0x694: 82, - 0x695: 82, - 0x696: 82, - 0x697: 82, - 0x698: 82, - 0x699: 82, - 0x69A: 68, - 0x69B: 68, - 0x69C: 68, - 0x69D: 68, - 0x69E: 68, - 0x69F: 68, - 0x6A0: 68, - 0x6A1: 68, - 0x6A2: 68, - 0x6A3: 68, - 0x6A4: 68, - 0x6A5: 68, - 0x6A6: 68, - 0x6A7: 68, - 0x6A8: 68, - 0x6A9: 68, - 0x6AA: 68, - 0x6AB: 68, - 0x6AC: 68, - 0x6AD: 68, - 0x6AE: 68, - 0x6AF: 68, - 0x6B0: 68, - 0x6B1: 68, - 0x6B2: 68, - 0x6B3: 68, - 0x6B4: 68, - 0x6B5: 68, - 0x6B6: 68, - 0x6B7: 68, - 0x6B8: 68, - 0x6B9: 68, - 0x6BA: 68, - 0x6BB: 68, - 0x6BC: 68, - 0x6BD: 68, - 0x6BE: 68, - 0x6BF: 68, - 0x6C0: 82, - 0x6C1: 68, - 0x6C2: 68, - 0x6C3: 82, - 0x6C4: 82, - 0x6C5: 82, - 0x6C6: 82, - 0x6C7: 82, - 0x6C8: 82, - 0x6C9: 82, - 0x6CA: 82, - 0x6CB: 82, - 0x6CC: 68, - 0x6CD: 82, - 0x6CE: 68, - 0x6CF: 82, - 0x6D0: 68, - 0x6D1: 68, - 0x6D2: 82, - 0x6D3: 82, - 0x6D5: 82, - 0x6D6: 84, - 0x6D7: 84, - 0x6D8: 84, - 0x6D9: 84, - 0x6DA: 84, - 0x6DB: 84, - 0x6DC: 84, - 0x6DF: 84, - 0x6E0: 84, - 0x6E1: 84, - 0x6E2: 84, - 0x6E3: 84, - 0x6E4: 84, - 0x6E7: 84, - 0x6E8: 84, - 0x6EA: 84, - 0x6EB: 84, - 0x6EC: 84, - 0x6ED: 84, - 0x6EE: 82, - 0x6EF: 82, - 0x6FA: 68, - 0x6FB: 68, - 0x6FC: 68, - 0x6FF: 68, - 0x70F: 84, - 0x710: 82, - 0x711: 84, - 0x712: 68, - 0x713: 68, - 0x714: 68, - 0x715: 82, - 0x716: 82, - 0x717: 82, - 0x718: 82, - 0x719: 82, - 0x71A: 68, - 0x71B: 68, - 0x71C: 68, - 0x71D: 68, - 0x71E: 82, - 0x71F: 68, - 0x720: 68, - 0x721: 68, - 0x722: 68, - 0x723: 68, - 0x724: 68, - 0x725: 68, - 0x726: 68, - 0x727: 68, - 0x728: 82, - 0x729: 68, - 0x72A: 82, - 0x72B: 68, - 0x72C: 82, - 0x72D: 68, - 0x72E: 68, - 0x72F: 82, - 0x730: 84, - 0x731: 84, - 0x732: 84, - 0x733: 84, - 0x734: 84, - 0x735: 84, - 0x736: 84, - 0x737: 84, - 0x738: 84, - 0x739: 84, - 0x73A: 84, - 0x73B: 84, - 0x73C: 84, - 0x73D: 84, - 0x73E: 84, - 0x73F: 84, - 0x740: 84, - 0x741: 84, - 0x742: 84, - 0x743: 84, - 0x744: 84, - 0x745: 84, - 0x746: 84, - 0x747: 84, - 0x748: 84, - 0x749: 84, - 0x74A: 84, - 0x74D: 82, - 0x74E: 68, - 0x74F: 68, - 0x750: 68, - 0x751: 68, - 0x752: 68, - 0x753: 68, - 0x754: 68, - 0x755: 68, - 0x756: 68, - 0x757: 68, - 0x758: 68, - 0x759: 82, - 0x75A: 82, - 0x75B: 82, - 0x75C: 68, - 0x75D: 68, - 0x75E: 68, - 0x75F: 68, - 0x760: 68, - 0x761: 68, - 0x762: 68, - 0x763: 68, - 0x764: 68, - 0x765: 68, - 0x766: 68, - 0x767: 68, - 0x768: 68, - 0x769: 68, - 0x76A: 68, - 0x76B: 82, - 0x76C: 82, - 0x76D: 68, - 0x76E: 68, - 0x76F: 68, - 0x770: 68, - 0x771: 82, - 0x772: 68, - 0x773: 82, - 0x774: 82, - 0x775: 68, - 0x776: 68, - 0x777: 68, - 0x778: 82, - 0x779: 82, - 0x77A: 68, - 0x77B: 68, - 0x77C: 68, - 0x77D: 68, - 0x77E: 68, - 0x77F: 68, - 0x7A6: 84, - 0x7A7: 84, - 0x7A8: 84, - 0x7A9: 84, - 0x7AA: 84, - 0x7AB: 84, - 0x7AC: 84, - 0x7AD: 84, - 0x7AE: 84, - 0x7AF: 84, - 0x7B0: 84, - 0x7CA: 68, - 0x7CB: 68, - 0x7CC: 68, - 0x7CD: 68, - 0x7CE: 68, - 0x7CF: 68, - 0x7D0: 68, - 0x7D1: 68, - 0x7D2: 68, - 0x7D3: 68, - 0x7D4: 68, - 0x7D5: 68, - 0x7D6: 68, - 0x7D7: 68, - 0x7D8: 68, - 0x7D9: 68, - 0x7DA: 68, - 0x7DB: 68, - 0x7DC: 68, - 0x7DD: 68, - 0x7DE: 68, - 0x7DF: 68, - 0x7E0: 68, - 0x7E1: 68, - 0x7E2: 68, - 0x7E3: 68, - 0x7E4: 68, - 0x7E5: 68, - 0x7E6: 68, - 0x7E7: 68, - 0x7E8: 68, - 0x7E9: 68, - 0x7EA: 68, - 0x7EB: 84, - 0x7EC: 84, - 0x7ED: 84, - 0x7EE: 84, - 0x7EF: 84, - 0x7F0: 84, - 0x7F1: 84, - 0x7F2: 84, - 0x7F3: 84, - 0x7FA: 67, - 0x7FD: 84, - 0x816: 84, - 0x817: 84, - 0x818: 84, - 0x819: 84, - 0x81B: 84, - 0x81C: 84, - 0x81D: 84, - 0x81E: 84, - 0x81F: 84, - 0x820: 84, - 0x821: 84, - 0x822: 84, - 0x823: 84, - 0x825: 84, - 0x826: 84, - 0x827: 84, - 0x829: 84, - 0x82A: 84, - 0x82B: 84, - 0x82C: 84, - 0x82D: 84, - 0x840: 82, - 0x841: 68, - 0x842: 68, - 0x843: 68, - 0x844: 68, - 0x845: 68, - 0x846: 82, - 0x847: 82, - 0x848: 68, - 0x849: 82, - 0x84A: 68, - 0x84B: 68, - 0x84C: 68, - 0x84D: 68, - 0x84E: 68, - 0x84F: 68, - 0x850: 68, - 0x851: 68, - 0x852: 68, - 0x853: 68, - 0x854: 82, - 0x855: 68, - 0x856: 82, - 0x857: 82, - 0x858: 82, - 0x859: 84, - 0x85A: 84, - 0x85B: 84, - 0x860: 68, - 0x862: 68, - 0x863: 68, - 0x864: 68, - 0x865: 68, - 0x867: 82, - 0x868: 68, - 0x869: 82, - 0x86A: 82, - 0x870: 82, - 0x871: 82, - 0x872: 82, - 0x873: 82, - 0x874: 82, - 0x875: 82, - 0x876: 82, - 0x877: 82, - 0x878: 82, - 0x879: 82, - 0x87A: 82, - 0x87B: 82, - 0x87C: 82, - 0x87D: 82, - 0x87E: 82, - 0x87F: 82, - 0x880: 82, - 0x881: 82, - 0x882: 82, - 0x883: 67, - 0x884: 67, - 0x885: 67, - 0x886: 68, - 0x889: 68, - 0x88A: 68, - 0x88B: 68, - 0x88C: 68, - 0x88D: 68, - 0x88E: 82, - 0x897: 84, - 0x898: 84, - 0x899: 84, - 0x89A: 84, - 0x89B: 84, - 0x89C: 84, - 0x89D: 84, - 0x89E: 84, - 0x89F: 84, - 0x8A0: 68, - 0x8A1: 68, - 0x8A2: 68, - 0x8A3: 68, - 0x8A4: 68, - 0x8A5: 68, - 0x8A6: 68, - 0x8A7: 68, - 0x8A8: 68, - 0x8A9: 68, - 0x8AA: 82, - 0x8AB: 82, - 0x8AC: 82, - 0x8AE: 82, - 0x8AF: 68, - 0x8B0: 68, - 0x8B1: 82, - 0x8B2: 82, - 0x8B3: 68, - 0x8B4: 68, - 0x8B5: 68, - 0x8B6: 68, - 0x8B7: 68, - 0x8B8: 68, - 0x8B9: 82, - 0x8BA: 68, - 0x8BB: 68, - 0x8BC: 68, - 0x8BD: 68, - 0x8BE: 68, - 0x8BF: 68, - 0x8C0: 68, - 0x8C1: 68, - 0x8C2: 68, - 0x8C3: 68, - 0x8C4: 68, - 0x8C5: 68, - 0x8C6: 68, - 0x8C7: 68, - 0x8C8: 68, - 0x8CA: 84, - 0x8CB: 84, - 0x8CC: 84, - 0x8CD: 84, - 0x8CE: 84, - 0x8CF: 84, - 0x8D0: 84, - 0x8D1: 84, - 0x8D2: 84, - 0x8D3: 84, - 0x8D4: 84, - 0x8D5: 84, - 0x8D6: 84, - 0x8D7: 84, - 0x8D8: 84, - 0x8D9: 84, - 0x8DA: 84, - 0x8DB: 84, - 0x8DC: 84, - 0x8DD: 84, - 0x8DE: 84, - 0x8DF: 84, - 0x8E0: 84, - 0x8E1: 84, - 0x8E3: 84, - 0x8E4: 84, - 0x8E5: 84, - 0x8E6: 84, - 0x8E7: 84, - 0x8E8: 84, - 0x8E9: 84, - 0x8EA: 84, - 0x8EB: 84, - 0x8EC: 84, - 0x8ED: 84, - 0x8EE: 84, - 0x8EF: 84, - 0x8F0: 84, - 0x8F1: 84, - 0x8F2: 84, - 0x8F3: 84, - 0x8F4: 84, - 0x8F5: 84, - 0x8F6: 84, - 0x8F7: 84, - 0x8F8: 84, - 0x8F9: 84, - 0x8FA: 84, - 0x8FB: 84, - 0x8FC: 84, - 0x8FD: 84, - 0x8FE: 84, - 0x8FF: 84, - 0x900: 84, - 0x901: 84, - 0x902: 84, - 0x93A: 84, - 0x93C: 84, - 0x941: 84, - 0x942: 84, - 0x943: 84, - 0x944: 84, - 0x945: 84, - 0x946: 84, - 0x947: 84, - 0x948: 84, - 0x94D: 84, - 0x951: 84, - 0x952: 84, - 0x953: 84, - 0x954: 84, - 0x955: 84, - 0x956: 84, - 0x957: 84, - 0x962: 84, - 0x963: 84, - 0x981: 84, - 0x9BC: 84, - 0x9C1: 84, - 0x9C2: 84, - 0x9C3: 84, - 0x9C4: 84, - 0x9CD: 84, - 0x9E2: 84, - 0x9E3: 84, - 0x9FE: 84, - 0xA01: 84, - 0xA02: 84, - 0xA3C: 84, - 0xA41: 84, - 0xA42: 84, - 0xA47: 84, - 0xA48: 84, - 0xA4B: 84, - 0xA4C: 84, - 0xA4D: 84, - 0xA51: 84, - 0xA70: 84, - 0xA71: 84, - 0xA75: 84, - 0xA81: 84, - 0xA82: 84, - 0xABC: 84, - 0xAC1: 84, - 0xAC2: 84, - 0xAC3: 84, - 0xAC4: 84, - 0xAC5: 84, - 0xAC7: 84, - 0xAC8: 84, - 0xACD: 84, - 0xAE2: 84, - 0xAE3: 84, - 0xAFA: 84, - 0xAFB: 84, - 0xAFC: 84, - 0xAFD: 84, - 0xAFE: 84, - 0xAFF: 84, - 0xB01: 84, - 0xB3C: 84, - 0xB3F: 84, - 0xB41: 84, - 0xB42: 84, - 0xB43: 84, - 0xB44: 84, - 0xB4D: 84, - 0xB55: 84, - 0xB56: 84, - 0xB62: 84, - 0xB63: 84, - 0xB82: 84, - 0xBC0: 84, - 0xBCD: 84, - 0xC00: 84, - 0xC04: 84, - 0xC3C: 84, - 0xC3E: 84, - 0xC3F: 84, - 0xC40: 84, - 0xC46: 84, - 0xC47: 84, - 0xC48: 84, - 0xC4A: 84, - 0xC4B: 84, - 0xC4C: 84, - 0xC4D: 84, - 0xC55: 84, - 0xC56: 84, - 0xC62: 84, - 0xC63: 84, - 0xC81: 84, - 0xCBC: 84, - 0xCBF: 84, - 0xCC6: 84, - 0xCCC: 84, - 0xCCD: 84, - 0xCE2: 84, - 0xCE3: 84, - 0xD00: 84, - 0xD01: 84, - 0xD3B: 84, - 0xD3C: 84, - 0xD41: 84, - 0xD42: 84, - 0xD43: 84, - 0xD44: 84, - 0xD4D: 84, - 0xD62: 84, - 0xD63: 84, - 0xD81: 84, - 0xDCA: 84, - 0xDD2: 84, - 0xDD3: 84, - 0xDD4: 84, - 0xDD6: 84, - 0xE31: 84, - 0xE34: 84, - 0xE35: 84, - 0xE36: 84, - 0xE37: 84, - 0xE38: 84, - 0xE39: 84, - 0xE3A: 84, - 0xE47: 84, - 0xE48: 84, - 0xE49: 84, - 0xE4A: 84, - 0xE4B: 84, - 0xE4C: 84, - 0xE4D: 84, - 0xE4E: 84, - 0xEB1: 84, - 0xEB4: 84, - 0xEB5: 84, - 0xEB6: 84, - 0xEB7: 84, - 0xEB8: 84, - 0xEB9: 84, - 0xEBA: 84, - 0xEBB: 84, - 0xEBC: 84, - 0xEC8: 84, - 0xEC9: 84, - 0xECA: 84, - 0xECB: 84, - 0xECC: 84, - 0xECD: 84, - 0xECE: 84, - 0xF18: 84, - 0xF19: 84, - 0xF35: 84, - 0xF37: 84, - 0xF39: 84, - 0xF71: 84, - 0xF72: 84, - 0xF73: 84, - 0xF74: 84, - 0xF75: 84, - 0xF76: 84, - 0xF77: 84, - 0xF78: 84, - 0xF79: 84, - 0xF7A: 84, - 0xF7B: 84, - 0xF7C: 84, - 0xF7D: 84, - 0xF7E: 84, - 0xF80: 84, - 0xF81: 84, - 0xF82: 84, - 0xF83: 84, - 0xF84: 84, - 0xF86: 84, - 0xF87: 84, - 0xF8D: 84, - 0xF8E: 84, - 0xF8F: 84, - 0xF90: 84, - 0xF91: 84, - 0xF92: 84, - 0xF93: 84, - 0xF94: 84, - 0xF95: 84, - 0xF96: 84, - 0xF97: 84, - 0xF99: 84, - 0xF9A: 84, - 0xF9B: 84, - 0xF9C: 84, - 0xF9D: 84, - 0xF9E: 84, - 0xF9F: 84, - 0xFA0: 84, - 0xFA1: 84, - 0xFA2: 84, - 0xFA3: 84, - 0xFA4: 84, - 0xFA5: 84, - 0xFA6: 84, - 0xFA7: 84, - 0xFA8: 84, - 0xFA9: 84, - 0xFAA: 84, - 0xFAB: 84, - 0xFAC: 84, - 0xFAD: 84, - 0xFAE: 84, - 0xFAF: 84, - 0xFB0: 84, - 0xFB1: 84, - 0xFB2: 84, - 0xFB3: 84, - 0xFB4: 84, - 0xFB5: 84, - 0xFB6: 84, - 0xFB7: 84, - 0xFB8: 84, - 0xFB9: 84, - 0xFBA: 84, - 0xFBB: 84, - 0xFBC: 84, - 0xFC6: 84, - 0x102D: 84, - 0x102E: 84, - 0x102F: 84, - 0x1030: 84, - 0x1032: 84, - 0x1033: 84, - 0x1034: 84, - 0x1035: 84, - 0x1036: 84, - 0x1037: 84, - 0x1039: 84, - 0x103A: 84, - 0x103D: 84, - 0x103E: 84, - 0x1058: 84, - 0x1059: 84, - 0x105E: 84, - 0x105F: 84, - 0x1060: 84, - 0x1071: 84, - 0x1072: 84, - 0x1073: 84, - 0x1074: 84, - 0x1082: 84, - 0x1085: 84, - 0x1086: 84, - 0x108D: 84, - 0x109D: 84, - 0x135D: 84, - 0x135E: 84, - 0x135F: 84, - 0x1712: 84, - 0x1713: 84, - 0x1714: 84, - 0x1732: 84, - 0x1733: 84, - 0x1752: 84, - 0x1753: 84, - 0x1772: 84, - 0x1773: 84, - 0x17B4: 84, - 0x17B5: 84, - 0x17B7: 84, - 0x17B8: 84, - 0x17B9: 84, - 0x17BA: 84, - 0x17BB: 84, - 0x17BC: 84, - 0x17BD: 84, - 0x17C6: 84, - 0x17C9: 84, - 0x17CA: 84, - 0x17CB: 84, - 0x17CC: 84, - 0x17CD: 84, - 0x17CE: 84, - 0x17CF: 84, - 0x17D0: 84, - 0x17D1: 84, - 0x17D2: 84, - 0x17D3: 84, - 0x17DD: 84, - 0x1807: 68, - 0x180A: 67, - 0x180B: 84, - 0x180C: 84, - 0x180D: 84, - 0x180F: 84, - 0x1820: 68, - 0x1821: 68, - 0x1822: 68, - 0x1823: 68, - 0x1824: 68, - 0x1825: 68, - 0x1826: 68, - 0x1827: 68, - 0x1828: 68, - 0x1829: 68, - 0x182A: 68, - 0x182B: 68, - 0x182C: 68, - 0x182D: 68, - 0x182E: 68, - 0x182F: 68, - 0x1830: 68, - 0x1831: 68, - 0x1832: 68, - 0x1833: 68, - 0x1834: 68, - 0x1835: 68, - 0x1836: 68, - 0x1837: 68, - 0x1838: 68, - 0x1839: 68, - 0x183A: 68, - 0x183B: 68, - 0x183C: 68, - 0x183D: 68, - 0x183E: 68, - 0x183F: 68, - 0x1840: 68, - 0x1841: 68, - 0x1842: 68, - 0x1843: 68, - 0x1844: 68, - 0x1845: 68, - 0x1846: 68, - 0x1847: 68, - 0x1848: 68, - 0x1849: 68, - 0x184A: 68, - 0x184B: 68, - 0x184C: 68, - 0x184D: 68, - 0x184E: 68, - 0x184F: 68, - 0x1850: 68, - 0x1851: 68, - 0x1852: 68, - 0x1853: 68, - 0x1854: 68, - 0x1855: 68, - 0x1856: 68, - 0x1857: 68, - 0x1858: 68, - 0x1859: 68, - 0x185A: 68, - 0x185B: 68, - 0x185C: 68, - 0x185D: 68, - 0x185E: 68, - 0x185F: 68, - 0x1860: 68, - 0x1861: 68, - 0x1862: 68, - 0x1863: 68, - 0x1864: 68, - 0x1865: 68, - 0x1866: 68, - 0x1867: 68, - 0x1868: 68, - 0x1869: 68, - 0x186A: 68, - 0x186B: 68, - 0x186C: 68, - 0x186D: 68, - 0x186E: 68, - 0x186F: 68, - 0x1870: 68, - 0x1871: 68, - 0x1872: 68, - 0x1873: 68, - 0x1874: 68, - 0x1875: 68, - 0x1876: 68, - 0x1877: 68, - 0x1878: 68, - 0x1885: 84, - 0x1886: 84, - 0x1887: 68, - 0x1888: 68, - 0x1889: 68, - 0x188A: 68, - 0x188B: 68, - 0x188C: 68, - 0x188D: 68, - 0x188E: 68, - 0x188F: 68, - 0x1890: 68, - 0x1891: 68, - 0x1892: 68, - 0x1893: 68, - 0x1894: 68, - 0x1895: 68, - 0x1896: 68, - 0x1897: 68, - 0x1898: 68, - 0x1899: 68, - 0x189A: 68, - 0x189B: 68, - 0x189C: 68, - 0x189D: 68, - 0x189E: 68, - 0x189F: 68, - 0x18A0: 68, - 0x18A1: 68, - 0x18A2: 68, - 0x18A3: 68, - 0x18A4: 68, - 0x18A5: 68, - 0x18A6: 68, - 0x18A7: 68, - 0x18A8: 68, - 0x18A9: 84, - 0x18AA: 68, - 0x1920: 84, - 0x1921: 84, - 0x1922: 84, - 0x1927: 84, - 0x1928: 84, - 0x1932: 84, - 0x1939: 84, - 0x193A: 84, - 0x193B: 84, - 0x1A17: 84, - 0x1A18: 84, - 0x1A1B: 84, - 0x1A56: 84, - 0x1A58: 84, - 0x1A59: 84, - 0x1A5A: 84, - 0x1A5B: 84, - 0x1A5C: 84, - 0x1A5D: 84, - 0x1A5E: 84, - 0x1A60: 84, - 0x1A62: 84, - 0x1A65: 84, - 0x1A66: 84, - 0x1A67: 84, - 0x1A68: 84, - 0x1A69: 84, - 0x1A6A: 84, - 0x1A6B: 84, - 0x1A6C: 84, - 0x1A73: 84, - 0x1A74: 84, - 0x1A75: 84, - 0x1A76: 84, - 0x1A77: 84, - 0x1A78: 84, - 0x1A79: 84, - 0x1A7A: 84, - 0x1A7B: 84, - 0x1A7C: 84, - 0x1A7F: 84, - 0x1AB0: 84, - 0x1AB1: 84, - 0x1AB2: 84, - 0x1AB3: 84, - 0x1AB4: 84, - 0x1AB5: 84, - 0x1AB6: 84, - 0x1AB7: 84, - 0x1AB8: 84, - 0x1AB9: 84, - 0x1ABA: 84, - 0x1ABB: 84, - 0x1ABC: 84, - 0x1ABD: 84, - 0x1ABE: 84, - 0x1ABF: 84, - 0x1AC0: 84, - 0x1AC1: 84, - 0x1AC2: 84, - 0x1AC3: 84, - 0x1AC4: 84, - 0x1AC5: 84, - 0x1AC6: 84, - 0x1AC7: 84, - 0x1AC8: 84, - 0x1AC9: 84, - 0x1ACA: 84, - 0x1ACB: 84, - 0x1ACC: 84, - 0x1ACD: 84, - 0x1ACE: 84, - 0x1B00: 84, - 0x1B01: 84, - 0x1B02: 84, - 0x1B03: 84, - 0x1B34: 84, - 0x1B36: 84, - 0x1B37: 84, - 0x1B38: 84, - 0x1B39: 84, - 0x1B3A: 84, - 0x1B3C: 84, - 0x1B42: 84, - 0x1B6B: 84, - 0x1B6C: 84, - 0x1B6D: 84, - 0x1B6E: 84, - 0x1B6F: 84, - 0x1B70: 84, - 0x1B71: 84, - 0x1B72: 84, - 0x1B73: 84, - 0x1B80: 84, - 0x1B81: 84, - 0x1BA2: 84, - 0x1BA3: 84, - 0x1BA4: 84, - 0x1BA5: 84, - 0x1BA8: 84, - 0x1BA9: 84, - 0x1BAB: 84, - 0x1BAC: 84, - 0x1BAD: 84, - 0x1BE6: 84, - 0x1BE8: 84, - 0x1BE9: 84, - 0x1BED: 84, - 0x1BEF: 84, - 0x1BF0: 84, - 0x1BF1: 84, - 0x1C2C: 84, - 0x1C2D: 84, - 0x1C2E: 84, - 0x1C2F: 84, - 0x1C30: 84, - 0x1C31: 84, - 0x1C32: 84, - 0x1C33: 84, - 0x1C36: 84, - 0x1C37: 84, - 0x1CD0: 84, - 0x1CD1: 84, - 0x1CD2: 84, - 0x1CD4: 84, - 0x1CD5: 84, - 0x1CD6: 84, - 0x1CD7: 84, - 0x1CD8: 84, - 0x1CD9: 84, - 0x1CDA: 84, - 0x1CDB: 84, - 0x1CDC: 84, - 0x1CDD: 84, - 0x1CDE: 84, - 0x1CDF: 84, - 0x1CE0: 84, - 0x1CE2: 84, - 0x1CE3: 84, - 0x1CE4: 84, - 0x1CE5: 84, - 0x1CE6: 84, - 0x1CE7: 84, - 0x1CE8: 84, - 0x1CED: 84, - 0x1CF4: 84, - 0x1CF8: 84, - 0x1CF9: 84, - 0x1DC0: 84, - 0x1DC1: 84, - 0x1DC2: 84, - 0x1DC3: 84, - 0x1DC4: 84, - 0x1DC5: 84, - 0x1DC6: 84, - 0x1DC7: 84, - 0x1DC8: 84, - 0x1DC9: 84, - 0x1DCA: 84, - 0x1DCB: 84, - 0x1DCC: 84, - 0x1DCD: 84, - 0x1DCE: 84, - 0x1DCF: 84, - 0x1DD0: 84, - 0x1DD1: 84, - 0x1DD2: 84, - 0x1DD3: 84, - 0x1DD4: 84, - 0x1DD5: 84, - 0x1DD6: 84, - 0x1DD7: 84, - 0x1DD8: 84, - 0x1DD9: 84, - 0x1DDA: 84, - 0x1DDB: 84, - 0x1DDC: 84, - 0x1DDD: 84, - 0x1DDE: 84, - 0x1DDF: 84, - 0x1DE0: 84, - 0x1DE1: 84, - 0x1DE2: 84, - 0x1DE3: 84, - 0x1DE4: 84, - 0x1DE5: 84, - 0x1DE6: 84, - 0x1DE7: 84, - 0x1DE8: 84, - 0x1DE9: 84, - 0x1DEA: 84, - 0x1DEB: 84, - 0x1DEC: 84, - 0x1DED: 84, - 0x1DEE: 84, - 0x1DEF: 84, - 0x1DF0: 84, - 0x1DF1: 84, - 0x1DF2: 84, - 0x1DF3: 84, - 0x1DF4: 84, - 0x1DF5: 84, - 0x1DF6: 84, - 0x1DF7: 84, - 0x1DF8: 84, - 0x1DF9: 84, - 0x1DFA: 84, - 0x1DFB: 84, - 0x1DFC: 84, - 0x1DFD: 84, - 0x1DFE: 84, - 0x1DFF: 84, - 0x200B: 84, - 0x200D: 67, - 0x200E: 84, - 0x200F: 84, - 0x202A: 84, - 0x202B: 84, - 0x202C: 84, - 0x202D: 84, - 0x202E: 84, - 0x2060: 84, - 0x2061: 84, - 0x2062: 84, - 0x2063: 84, - 0x2064: 84, - 0x206A: 84, - 0x206B: 84, - 0x206C: 84, - 0x206D: 84, - 0x206E: 84, - 0x206F: 84, - 0x20D0: 84, - 0x20D1: 84, - 0x20D2: 84, - 0x20D3: 84, - 0x20D4: 84, - 0x20D5: 84, - 0x20D6: 84, - 0x20D7: 84, - 0x20D8: 84, - 0x20D9: 84, - 0x20DA: 84, - 0x20DB: 84, - 0x20DC: 84, - 0x20DD: 84, - 0x20DE: 84, - 0x20DF: 84, - 0x20E0: 84, - 0x20E1: 84, - 0x20E2: 84, - 0x20E3: 84, - 0x20E4: 84, - 0x20E5: 84, - 0x20E6: 84, - 0x20E7: 84, - 0x20E8: 84, - 0x20E9: 84, - 0x20EA: 84, - 0x20EB: 84, - 0x20EC: 84, - 0x20ED: 84, - 0x20EE: 84, - 0x20EF: 84, - 0x20F0: 84, - 0x2CEF: 84, - 0x2CF0: 84, - 0x2CF1: 84, - 0x2D7F: 84, - 0x2DE0: 84, - 0x2DE1: 84, - 0x2DE2: 84, - 0x2DE3: 84, - 0x2DE4: 84, - 0x2DE5: 84, - 0x2DE6: 84, - 0x2DE7: 84, - 0x2DE8: 84, - 0x2DE9: 84, - 0x2DEA: 84, - 0x2DEB: 84, - 0x2DEC: 84, - 0x2DED: 84, - 0x2DEE: 84, - 0x2DEF: 84, - 0x2DF0: 84, - 0x2DF1: 84, - 0x2DF2: 84, - 0x2DF3: 84, - 0x2DF4: 84, - 0x2DF5: 84, - 0x2DF6: 84, - 0x2DF7: 84, - 0x2DF8: 84, - 0x2DF9: 84, - 0x2DFA: 84, - 0x2DFB: 84, - 0x2DFC: 84, - 0x2DFD: 84, - 0x2DFE: 84, - 0x2DFF: 84, - 0x302A: 84, - 0x302B: 84, - 0x302C: 84, - 0x302D: 84, - 0x3099: 84, - 0x309A: 84, - 0xA66F: 84, - 0xA670: 84, - 0xA671: 84, - 0xA672: 84, - 0xA674: 84, - 0xA675: 84, - 0xA676: 84, - 0xA677: 84, - 0xA678: 84, - 0xA679: 84, - 0xA67A: 84, - 0xA67B: 84, - 0xA67C: 84, - 0xA67D: 84, - 0xA69E: 84, - 0xA69F: 84, - 0xA6F0: 84, - 0xA6F1: 84, - 0xA802: 84, - 0xA806: 84, - 0xA80B: 84, - 0xA825: 84, - 0xA826: 84, - 0xA82C: 84, - 0xA840: 68, - 0xA841: 68, - 0xA842: 68, - 0xA843: 68, - 0xA844: 68, - 0xA845: 68, - 0xA846: 68, - 0xA847: 68, - 0xA848: 68, - 0xA849: 68, - 0xA84A: 68, - 0xA84B: 68, - 0xA84C: 68, - 0xA84D: 68, - 0xA84E: 68, - 0xA84F: 68, - 0xA850: 68, - 0xA851: 68, - 0xA852: 68, - 0xA853: 68, - 0xA854: 68, - 0xA855: 68, - 0xA856: 68, - 0xA857: 68, - 0xA858: 68, - 0xA859: 68, - 0xA85A: 68, - 0xA85B: 68, - 0xA85C: 68, - 0xA85D: 68, - 0xA85E: 68, - 0xA85F: 68, - 0xA860: 68, - 0xA861: 68, - 0xA862: 68, - 0xA863: 68, - 0xA864: 68, - 0xA865: 68, - 0xA866: 68, - 0xA867: 68, - 0xA868: 68, - 0xA869: 68, - 0xA86A: 68, - 0xA86B: 68, - 0xA86C: 68, - 0xA86D: 68, - 0xA86E: 68, - 0xA86F: 68, - 0xA870: 68, - 0xA871: 68, - 0xA872: 76, - 0xA8C4: 84, - 0xA8C5: 84, - 0xA8E0: 84, - 0xA8E1: 84, - 0xA8E2: 84, - 0xA8E3: 84, - 0xA8E4: 84, - 0xA8E5: 84, - 0xA8E6: 84, - 0xA8E7: 84, - 0xA8E8: 84, - 0xA8E9: 84, - 0xA8EA: 84, - 0xA8EB: 84, - 0xA8EC: 84, - 0xA8ED: 84, - 0xA8EE: 84, - 0xA8EF: 84, - 0xA8F0: 84, - 0xA8F1: 84, - 0xA8FF: 84, - 0xA926: 84, - 0xA927: 84, - 0xA928: 84, - 0xA929: 84, - 0xA92A: 84, - 0xA92B: 84, - 0xA92C: 84, - 0xA92D: 84, - 0xA947: 84, - 0xA948: 84, - 0xA949: 84, - 0xA94A: 84, - 0xA94B: 84, - 0xA94C: 84, - 0xA94D: 84, - 0xA94E: 84, - 0xA94F: 84, - 0xA950: 84, - 0xA951: 84, - 0xA980: 84, - 0xA981: 84, - 0xA982: 84, - 0xA9B3: 84, - 0xA9B6: 84, - 0xA9B7: 84, - 0xA9B8: 84, - 0xA9B9: 84, - 0xA9BC: 84, - 0xA9BD: 84, - 0xA9E5: 84, - 0xAA29: 84, - 0xAA2A: 84, - 0xAA2B: 84, - 0xAA2C: 84, - 0xAA2D: 84, - 0xAA2E: 84, - 0xAA31: 84, - 0xAA32: 84, - 0xAA35: 84, - 0xAA36: 84, - 0xAA43: 84, - 0xAA4C: 84, - 0xAA7C: 84, - 0xAAB0: 84, - 0xAAB2: 84, - 0xAAB3: 84, - 0xAAB4: 84, - 0xAAB7: 84, - 0xAAB8: 84, - 0xAABE: 84, - 0xAABF: 84, - 0xAAC1: 84, - 0xAAEC: 84, - 0xAAED: 84, - 0xAAF6: 84, - 0xABE5: 84, - 0xABE8: 84, - 0xABED: 84, - 0xFB1E: 84, - 0xFE00: 84, - 0xFE01: 84, - 0xFE02: 84, - 0xFE03: 84, - 0xFE04: 84, - 0xFE05: 84, - 0xFE06: 84, - 0xFE07: 84, - 0xFE08: 84, - 0xFE09: 84, - 0xFE0A: 84, - 0xFE0B: 84, - 0xFE0C: 84, - 0xFE0D: 84, - 0xFE0E: 84, - 0xFE0F: 84, - 0xFE20: 84, - 0xFE21: 84, - 0xFE22: 84, - 0xFE23: 84, - 0xFE24: 84, - 0xFE25: 84, - 0xFE26: 84, - 0xFE27: 84, - 0xFE28: 84, - 0xFE29: 84, - 0xFE2A: 84, - 0xFE2B: 84, - 0xFE2C: 84, - 0xFE2D: 84, - 0xFE2E: 84, - 0xFE2F: 84, - 0xFEFF: 84, - 0xFFF9: 84, - 0xFFFA: 84, - 0xFFFB: 84, - 0x101FD: 84, - 0x102E0: 84, - 0x10376: 84, - 0x10377: 84, - 0x10378: 84, - 0x10379: 84, - 0x1037A: 84, - 0x10A01: 84, - 0x10A02: 84, - 0x10A03: 84, - 0x10A05: 84, - 0x10A06: 84, - 0x10A0C: 84, - 0x10A0D: 84, - 0x10A0E: 84, - 0x10A0F: 84, - 0x10A38: 84, - 0x10A39: 84, - 0x10A3A: 84, - 0x10A3F: 84, - 0x10AC0: 68, - 0x10AC1: 68, - 0x10AC2: 68, - 0x10AC3: 68, - 0x10AC4: 68, - 0x10AC5: 82, - 0x10AC7: 82, - 0x10AC9: 82, - 0x10ACA: 82, - 0x10ACD: 76, - 0x10ACE: 82, - 0x10ACF: 82, - 0x10AD0: 82, - 0x10AD1: 82, - 0x10AD2: 82, - 0x10AD3: 68, - 0x10AD4: 68, - 0x10AD5: 68, - 0x10AD6: 68, - 0x10AD7: 76, - 0x10AD8: 68, - 0x10AD9: 68, - 0x10ADA: 68, - 0x10ADB: 68, - 0x10ADC: 68, - 0x10ADD: 82, - 0x10ADE: 68, - 0x10ADF: 68, - 0x10AE0: 68, - 0x10AE1: 82, - 0x10AE4: 82, - 0x10AE5: 84, - 0x10AE6: 84, - 0x10AEB: 68, - 0x10AEC: 68, - 0x10AED: 68, - 0x10AEE: 68, - 0x10AEF: 82, - 0x10B80: 68, - 0x10B81: 82, - 0x10B82: 68, - 0x10B83: 82, - 0x10B84: 82, - 0x10B85: 82, - 0x10B86: 68, - 0x10B87: 68, - 0x10B88: 68, - 0x10B89: 82, - 0x10B8A: 68, - 0x10B8B: 68, - 0x10B8C: 82, - 0x10B8D: 68, - 0x10B8E: 82, - 0x10B8F: 82, - 0x10B90: 68, - 0x10B91: 82, - 0x10BA9: 82, - 0x10BAA: 82, - 0x10BAB: 82, - 0x10BAC: 82, - 0x10BAD: 68, - 0x10BAE: 68, - 0x10D00: 76, - 0x10D01: 68, - 0x10D02: 68, - 0x10D03: 68, - 0x10D04: 68, - 0x10D05: 68, - 0x10D06: 68, - 0x10D07: 68, - 0x10D08: 68, - 0x10D09: 68, - 0x10D0A: 68, - 0x10D0B: 68, - 0x10D0C: 68, - 0x10D0D: 68, - 0x10D0E: 68, - 0x10D0F: 68, - 0x10D10: 68, - 0x10D11: 68, - 0x10D12: 68, - 0x10D13: 68, - 0x10D14: 68, - 0x10D15: 68, - 0x10D16: 68, - 0x10D17: 68, - 0x10D18: 68, - 0x10D19: 68, - 0x10D1A: 68, - 0x10D1B: 68, - 0x10D1C: 68, - 0x10D1D: 68, - 0x10D1E: 68, - 0x10D1F: 68, - 0x10D20: 68, - 0x10D21: 68, - 0x10D22: 82, - 0x10D23: 68, - 0x10D24: 84, - 0x10D25: 84, - 0x10D26: 84, - 0x10D27: 84, - 0x10D69: 84, - 0x10D6A: 84, - 0x10D6B: 84, - 0x10D6C: 84, - 0x10D6D: 84, - 0x10EAB: 84, - 0x10EAC: 84, - 0x10EC2: 82, - 0x10EC3: 68, - 0x10EC4: 68, - 0x10EFC: 84, - 0x10EFD: 84, - 0x10EFE: 84, - 0x10EFF: 84, - 0x10F30: 68, - 0x10F31: 68, - 0x10F32: 68, - 0x10F33: 82, - 0x10F34: 68, - 0x10F35: 68, - 0x10F36: 68, - 0x10F37: 68, - 0x10F38: 68, - 0x10F39: 68, - 0x10F3A: 68, - 0x10F3B: 68, - 0x10F3C: 68, - 0x10F3D: 68, - 0x10F3E: 68, - 0x10F3F: 68, - 0x10F40: 68, - 0x10F41: 68, - 0x10F42: 68, - 0x10F43: 68, - 0x10F44: 68, - 0x10F46: 84, - 0x10F47: 84, - 0x10F48: 84, - 0x10F49: 84, - 0x10F4A: 84, - 0x10F4B: 84, - 0x10F4C: 84, - 0x10F4D: 84, - 0x10F4E: 84, - 0x10F4F: 84, - 0x10F50: 84, - 0x10F51: 68, - 0x10F52: 68, - 0x10F53: 68, - 0x10F54: 82, - 0x10F70: 68, - 0x10F71: 68, - 0x10F72: 68, - 0x10F73: 68, - 0x10F74: 82, - 0x10F75: 82, - 0x10F76: 68, - 0x10F77: 68, - 0x10F78: 68, - 0x10F79: 68, - 0x10F7A: 68, - 0x10F7B: 68, - 0x10F7C: 68, - 0x10F7D: 68, - 0x10F7E: 68, - 0x10F7F: 68, - 0x10F80: 68, - 0x10F81: 68, - 0x10F82: 84, - 0x10F83: 84, - 0x10F84: 84, - 0x10F85: 84, - 0x10FB0: 68, - 0x10FB2: 68, - 0x10FB3: 68, - 0x10FB4: 82, - 0x10FB5: 82, - 0x10FB6: 82, - 0x10FB8: 68, - 0x10FB9: 82, - 0x10FBA: 82, - 0x10FBB: 68, - 0x10FBC: 68, - 0x10FBD: 82, - 0x10FBE: 68, - 0x10FBF: 68, - 0x10FC1: 68, - 0x10FC2: 82, - 0x10FC3: 82, - 0x10FC4: 68, - 0x10FC9: 82, - 0x10FCA: 68, - 0x10FCB: 76, - 0x11001: 84, - 0x11038: 84, - 0x11039: 84, - 0x1103A: 84, - 0x1103B: 84, - 0x1103C: 84, - 0x1103D: 84, - 0x1103E: 84, - 0x1103F: 84, - 0x11040: 84, - 0x11041: 84, - 0x11042: 84, - 0x11043: 84, - 0x11044: 84, - 0x11045: 84, - 0x11046: 84, - 0x11070: 84, - 0x11073: 84, - 0x11074: 84, - 0x1107F: 84, - 0x11080: 84, - 0x11081: 84, - 0x110B3: 84, - 0x110B4: 84, - 0x110B5: 84, - 0x110B6: 84, - 0x110B9: 84, - 0x110BA: 84, - 0x110C2: 84, - 0x11100: 84, - 0x11101: 84, - 0x11102: 84, - 0x11127: 84, - 0x11128: 84, - 0x11129: 84, - 0x1112A: 84, - 0x1112B: 84, - 0x1112D: 84, - 0x1112E: 84, - 0x1112F: 84, - 0x11130: 84, - 0x11131: 84, - 0x11132: 84, - 0x11133: 84, - 0x11134: 84, - 0x11173: 84, - 0x11180: 84, - 0x11181: 84, - 0x111B6: 84, - 0x111B7: 84, - 0x111B8: 84, - 0x111B9: 84, - 0x111BA: 84, - 0x111BB: 84, - 0x111BC: 84, - 0x111BD: 84, - 0x111BE: 84, - 0x111C9: 84, - 0x111CA: 84, - 0x111CB: 84, - 0x111CC: 84, - 0x111CF: 84, - 0x1122F: 84, - 0x11230: 84, - 0x11231: 84, - 0x11234: 84, - 0x11236: 84, - 0x11237: 84, - 0x1123E: 84, - 0x11241: 84, - 0x112DF: 84, - 0x112E3: 84, - 0x112E4: 84, - 0x112E5: 84, - 0x112E6: 84, - 0x112E7: 84, - 0x112E8: 84, - 0x112E9: 84, - 0x112EA: 84, - 0x11300: 84, - 0x11301: 84, - 0x1133B: 84, - 0x1133C: 84, - 0x11340: 84, - 0x11366: 84, - 0x11367: 84, - 0x11368: 84, - 0x11369: 84, - 0x1136A: 84, - 0x1136B: 84, - 0x1136C: 84, - 0x11370: 84, - 0x11371: 84, - 0x11372: 84, - 0x11373: 84, - 0x11374: 84, - 0x113BB: 84, - 0x113BC: 84, - 0x113BD: 84, - 0x113BE: 84, - 0x113BF: 84, - 0x113C0: 84, - 0x113CE: 84, - 0x113D0: 84, - 0x113D2: 84, - 0x113E1: 84, - 0x113E2: 84, - 0x11438: 84, - 0x11439: 84, - 0x1143A: 84, - 0x1143B: 84, - 0x1143C: 84, - 0x1143D: 84, - 0x1143E: 84, - 0x1143F: 84, - 0x11442: 84, - 0x11443: 84, - 0x11444: 84, - 0x11446: 84, - 0x1145E: 84, - 0x114B3: 84, - 0x114B4: 84, - 0x114B5: 84, - 0x114B6: 84, - 0x114B7: 84, - 0x114B8: 84, - 0x114BA: 84, - 0x114BF: 84, - 0x114C0: 84, - 0x114C2: 84, - 0x114C3: 84, - 0x115B2: 84, - 0x115B3: 84, - 0x115B4: 84, - 0x115B5: 84, - 0x115BC: 84, - 0x115BD: 84, - 0x115BF: 84, - 0x115C0: 84, - 0x115DC: 84, - 0x115DD: 84, - 0x11633: 84, - 0x11634: 84, - 0x11635: 84, - 0x11636: 84, - 0x11637: 84, - 0x11638: 84, - 0x11639: 84, - 0x1163A: 84, - 0x1163D: 84, - 0x1163F: 84, - 0x11640: 84, - 0x116AB: 84, - 0x116AD: 84, - 0x116B0: 84, - 0x116B1: 84, - 0x116B2: 84, - 0x116B3: 84, - 0x116B4: 84, - 0x116B5: 84, - 0x116B7: 84, - 0x1171D: 84, - 0x1171F: 84, - 0x11722: 84, - 0x11723: 84, - 0x11724: 84, - 0x11725: 84, - 0x11727: 84, - 0x11728: 84, - 0x11729: 84, - 0x1172A: 84, - 0x1172B: 84, - 0x1182F: 84, - 0x11830: 84, - 0x11831: 84, - 0x11832: 84, - 0x11833: 84, - 0x11834: 84, - 0x11835: 84, - 0x11836: 84, - 0x11837: 84, - 0x11839: 84, - 0x1183A: 84, - 0x1193B: 84, - 0x1193C: 84, - 0x1193E: 84, - 0x11943: 84, - 0x119D4: 84, - 0x119D5: 84, - 0x119D6: 84, - 0x119D7: 84, - 0x119DA: 84, - 0x119DB: 84, - 0x119E0: 84, - 0x11A01: 84, - 0x11A02: 84, - 0x11A03: 84, - 0x11A04: 84, - 0x11A05: 84, - 0x11A06: 84, - 0x11A07: 84, - 0x11A08: 84, - 0x11A09: 84, - 0x11A0A: 84, - 0x11A33: 84, - 0x11A34: 84, - 0x11A35: 84, - 0x11A36: 84, - 0x11A37: 84, - 0x11A38: 84, - 0x11A3B: 84, - 0x11A3C: 84, - 0x11A3D: 84, - 0x11A3E: 84, - 0x11A47: 84, - 0x11A51: 84, - 0x11A52: 84, - 0x11A53: 84, - 0x11A54: 84, - 0x11A55: 84, - 0x11A56: 84, - 0x11A59: 84, - 0x11A5A: 84, - 0x11A5B: 84, - 0x11A8A: 84, - 0x11A8B: 84, - 0x11A8C: 84, - 0x11A8D: 84, - 0x11A8E: 84, - 0x11A8F: 84, - 0x11A90: 84, - 0x11A91: 84, - 0x11A92: 84, - 0x11A93: 84, - 0x11A94: 84, - 0x11A95: 84, - 0x11A96: 84, - 0x11A98: 84, - 0x11A99: 84, - 0x11C30: 84, - 0x11C31: 84, - 0x11C32: 84, - 0x11C33: 84, - 0x11C34: 84, - 0x11C35: 84, - 0x11C36: 84, - 0x11C38: 84, - 0x11C39: 84, - 0x11C3A: 84, - 0x11C3B: 84, - 0x11C3C: 84, - 0x11C3D: 84, - 0x11C3F: 84, - 0x11C92: 84, - 0x11C93: 84, - 0x11C94: 84, - 0x11C95: 84, - 0x11C96: 84, - 0x11C97: 84, - 0x11C98: 84, - 0x11C99: 84, - 0x11C9A: 84, - 0x11C9B: 84, - 0x11C9C: 84, - 0x11C9D: 84, - 0x11C9E: 84, - 0x11C9F: 84, - 0x11CA0: 84, - 0x11CA1: 84, - 0x11CA2: 84, - 0x11CA3: 84, - 0x11CA4: 84, - 0x11CA5: 84, - 0x11CA6: 84, - 0x11CA7: 84, - 0x11CAA: 84, - 0x11CAB: 84, - 0x11CAC: 84, - 0x11CAD: 84, - 0x11CAE: 84, - 0x11CAF: 84, - 0x11CB0: 84, - 0x11CB2: 84, - 0x11CB3: 84, - 0x11CB5: 84, - 0x11CB6: 84, - 0x11D31: 84, - 0x11D32: 84, - 0x11D33: 84, - 0x11D34: 84, - 0x11D35: 84, - 0x11D36: 84, - 0x11D3A: 84, - 0x11D3C: 84, - 0x11D3D: 84, - 0x11D3F: 84, - 0x11D40: 84, - 0x11D41: 84, - 0x11D42: 84, - 0x11D43: 84, - 0x11D44: 84, - 0x11D45: 84, - 0x11D47: 84, - 0x11D90: 84, - 0x11D91: 84, - 0x11D95: 84, - 0x11D97: 84, - 0x11EF3: 84, - 0x11EF4: 84, - 0x11F00: 84, - 0x11F01: 84, - 0x11F36: 84, - 0x11F37: 84, - 0x11F38: 84, - 0x11F39: 84, - 0x11F3A: 84, - 0x11F40: 84, - 0x11F42: 84, - 0x11F5A: 84, - 0x13430: 84, - 0x13431: 84, - 0x13432: 84, - 0x13433: 84, - 0x13434: 84, - 0x13435: 84, - 0x13436: 84, - 0x13437: 84, - 0x13438: 84, - 0x13439: 84, - 0x1343A: 84, - 0x1343B: 84, - 0x1343C: 84, - 0x1343D: 84, - 0x1343E: 84, - 0x1343F: 84, - 0x13440: 84, - 0x13447: 84, - 0x13448: 84, - 0x13449: 84, - 0x1344A: 84, - 0x1344B: 84, - 0x1344C: 84, - 0x1344D: 84, - 0x1344E: 84, - 0x1344F: 84, - 0x13450: 84, - 0x13451: 84, - 0x13452: 84, - 0x13453: 84, - 0x13454: 84, - 0x13455: 84, - 0x1611E: 84, - 0x1611F: 84, - 0x16120: 84, - 0x16121: 84, - 0x16122: 84, - 0x16123: 84, - 0x16124: 84, - 0x16125: 84, - 0x16126: 84, - 0x16127: 84, - 0x16128: 84, - 0x16129: 84, - 0x1612D: 84, - 0x1612E: 84, - 0x1612F: 84, - 0x16AF0: 84, - 0x16AF1: 84, - 0x16AF2: 84, - 0x16AF3: 84, - 0x16AF4: 84, - 0x16B30: 84, - 0x16B31: 84, - 0x16B32: 84, - 0x16B33: 84, - 0x16B34: 84, - 0x16B35: 84, - 0x16B36: 84, - 0x16F4F: 84, - 0x16F8F: 84, - 0x16F90: 84, - 0x16F91: 84, - 0x16F92: 84, - 0x16FE4: 84, - 0x1BC9D: 84, - 0x1BC9E: 84, - 0x1BCA0: 84, - 0x1BCA1: 84, - 0x1BCA2: 84, - 0x1BCA3: 84, - 0x1CF00: 84, - 0x1CF01: 84, - 0x1CF02: 84, - 0x1CF03: 84, - 0x1CF04: 84, - 0x1CF05: 84, - 0x1CF06: 84, - 0x1CF07: 84, - 0x1CF08: 84, - 0x1CF09: 84, - 0x1CF0A: 84, - 0x1CF0B: 84, - 0x1CF0C: 84, - 0x1CF0D: 84, - 0x1CF0E: 84, - 0x1CF0F: 84, - 0x1CF10: 84, - 0x1CF11: 84, - 0x1CF12: 84, - 0x1CF13: 84, - 0x1CF14: 84, - 0x1CF15: 84, - 0x1CF16: 84, - 0x1CF17: 84, - 0x1CF18: 84, - 0x1CF19: 84, - 0x1CF1A: 84, - 0x1CF1B: 84, - 0x1CF1C: 84, - 0x1CF1D: 84, - 0x1CF1E: 84, - 0x1CF1F: 84, - 0x1CF20: 84, - 0x1CF21: 84, - 0x1CF22: 84, - 0x1CF23: 84, - 0x1CF24: 84, - 0x1CF25: 84, - 0x1CF26: 84, - 0x1CF27: 84, - 0x1CF28: 84, - 0x1CF29: 84, - 0x1CF2A: 84, - 0x1CF2B: 84, - 0x1CF2C: 84, - 0x1CF2D: 84, - 0x1CF30: 84, - 0x1CF31: 84, - 0x1CF32: 84, - 0x1CF33: 84, - 0x1CF34: 84, - 0x1CF35: 84, - 0x1CF36: 84, - 0x1CF37: 84, - 0x1CF38: 84, - 0x1CF39: 84, - 0x1CF3A: 84, - 0x1CF3B: 84, - 0x1CF3C: 84, - 0x1CF3D: 84, - 0x1CF3E: 84, - 0x1CF3F: 84, - 0x1CF40: 84, - 0x1CF41: 84, - 0x1CF42: 84, - 0x1CF43: 84, - 0x1CF44: 84, - 0x1CF45: 84, - 0x1CF46: 84, - 0x1D167: 84, - 0x1D168: 84, - 0x1D169: 84, - 0x1D173: 84, - 0x1D174: 84, - 0x1D175: 84, - 0x1D176: 84, - 0x1D177: 84, - 0x1D178: 84, - 0x1D179: 84, - 0x1D17A: 84, - 0x1D17B: 84, - 0x1D17C: 84, - 0x1D17D: 84, - 0x1D17E: 84, - 0x1D17F: 84, - 0x1D180: 84, - 0x1D181: 84, - 0x1D182: 84, - 0x1D185: 84, - 0x1D186: 84, - 0x1D187: 84, - 0x1D188: 84, - 0x1D189: 84, - 0x1D18A: 84, - 0x1D18B: 84, - 0x1D1AA: 84, - 0x1D1AB: 84, - 0x1D1AC: 84, - 0x1D1AD: 84, - 0x1D242: 84, - 0x1D243: 84, - 0x1D244: 84, - 0x1DA00: 84, - 0x1DA01: 84, - 0x1DA02: 84, - 0x1DA03: 84, - 0x1DA04: 84, - 0x1DA05: 84, - 0x1DA06: 84, - 0x1DA07: 84, - 0x1DA08: 84, - 0x1DA09: 84, - 0x1DA0A: 84, - 0x1DA0B: 84, - 0x1DA0C: 84, - 0x1DA0D: 84, - 0x1DA0E: 84, - 0x1DA0F: 84, - 0x1DA10: 84, - 0x1DA11: 84, - 0x1DA12: 84, - 0x1DA13: 84, - 0x1DA14: 84, - 0x1DA15: 84, - 0x1DA16: 84, - 0x1DA17: 84, - 0x1DA18: 84, - 0x1DA19: 84, - 0x1DA1A: 84, - 0x1DA1B: 84, - 0x1DA1C: 84, - 0x1DA1D: 84, - 0x1DA1E: 84, - 0x1DA1F: 84, - 0x1DA20: 84, - 0x1DA21: 84, - 0x1DA22: 84, - 0x1DA23: 84, - 0x1DA24: 84, - 0x1DA25: 84, - 0x1DA26: 84, - 0x1DA27: 84, - 0x1DA28: 84, - 0x1DA29: 84, - 0x1DA2A: 84, - 0x1DA2B: 84, - 0x1DA2C: 84, - 0x1DA2D: 84, - 0x1DA2E: 84, - 0x1DA2F: 84, - 0x1DA30: 84, - 0x1DA31: 84, - 0x1DA32: 84, - 0x1DA33: 84, - 0x1DA34: 84, - 0x1DA35: 84, - 0x1DA36: 84, - 0x1DA3B: 84, - 0x1DA3C: 84, - 0x1DA3D: 84, - 0x1DA3E: 84, - 0x1DA3F: 84, - 0x1DA40: 84, - 0x1DA41: 84, - 0x1DA42: 84, - 0x1DA43: 84, - 0x1DA44: 84, - 0x1DA45: 84, - 0x1DA46: 84, - 0x1DA47: 84, - 0x1DA48: 84, - 0x1DA49: 84, - 0x1DA4A: 84, - 0x1DA4B: 84, - 0x1DA4C: 84, - 0x1DA4D: 84, - 0x1DA4E: 84, - 0x1DA4F: 84, - 0x1DA50: 84, - 0x1DA51: 84, - 0x1DA52: 84, - 0x1DA53: 84, - 0x1DA54: 84, - 0x1DA55: 84, - 0x1DA56: 84, - 0x1DA57: 84, - 0x1DA58: 84, - 0x1DA59: 84, - 0x1DA5A: 84, - 0x1DA5B: 84, - 0x1DA5C: 84, - 0x1DA5D: 84, - 0x1DA5E: 84, - 0x1DA5F: 84, - 0x1DA60: 84, - 0x1DA61: 84, - 0x1DA62: 84, - 0x1DA63: 84, - 0x1DA64: 84, - 0x1DA65: 84, - 0x1DA66: 84, - 0x1DA67: 84, - 0x1DA68: 84, - 0x1DA69: 84, - 0x1DA6A: 84, - 0x1DA6B: 84, - 0x1DA6C: 84, - 0x1DA75: 84, - 0x1DA84: 84, - 0x1DA9B: 84, - 0x1DA9C: 84, - 0x1DA9D: 84, - 0x1DA9E: 84, - 0x1DA9F: 84, - 0x1DAA1: 84, - 0x1DAA2: 84, - 0x1DAA3: 84, - 0x1DAA4: 84, - 0x1DAA5: 84, - 0x1DAA6: 84, - 0x1DAA7: 84, - 0x1DAA8: 84, - 0x1DAA9: 84, - 0x1DAAA: 84, - 0x1DAAB: 84, - 0x1DAAC: 84, - 0x1DAAD: 84, - 0x1DAAE: 84, - 0x1DAAF: 84, - 0x1E000: 84, - 0x1E001: 84, - 0x1E002: 84, - 0x1E003: 84, - 0x1E004: 84, - 0x1E005: 84, - 0x1E006: 84, - 0x1E008: 84, - 0x1E009: 84, - 0x1E00A: 84, - 0x1E00B: 84, - 0x1E00C: 84, - 0x1E00D: 84, - 0x1E00E: 84, - 0x1E00F: 84, - 0x1E010: 84, - 0x1E011: 84, - 0x1E012: 84, - 0x1E013: 84, - 0x1E014: 84, - 0x1E015: 84, - 0x1E016: 84, - 0x1E017: 84, - 0x1E018: 84, - 0x1E01B: 84, - 0x1E01C: 84, - 0x1E01D: 84, - 0x1E01E: 84, - 0x1E01F: 84, - 0x1E020: 84, - 0x1E021: 84, - 0x1E023: 84, - 0x1E024: 84, - 0x1E026: 84, - 0x1E027: 84, - 0x1E028: 84, - 0x1E029: 84, - 0x1E02A: 84, - 0x1E08F: 84, - 0x1E130: 84, - 0x1E131: 84, - 0x1E132: 84, - 0x1E133: 84, - 0x1E134: 84, - 0x1E135: 84, - 0x1E136: 84, - 0x1E2AE: 84, - 0x1E2EC: 84, - 0x1E2ED: 84, - 0x1E2EE: 84, - 0x1E2EF: 84, - 0x1E4EC: 84, - 0x1E4ED: 84, - 0x1E4EE: 84, - 0x1E4EF: 84, - 0x1E5EE: 84, - 0x1E5EF: 84, - 0x1E8D0: 84, - 0x1E8D1: 84, - 0x1E8D2: 84, - 0x1E8D3: 84, - 0x1E8D4: 84, - 0x1E8D5: 84, - 0x1E8D6: 84, - 0x1E900: 68, - 0x1E901: 68, - 0x1E902: 68, - 0x1E903: 68, - 0x1E904: 68, - 0x1E905: 68, - 0x1E906: 68, - 0x1E907: 68, - 0x1E908: 68, - 0x1E909: 68, - 0x1E90A: 68, - 0x1E90B: 68, - 0x1E90C: 68, - 0x1E90D: 68, - 0x1E90E: 68, - 0x1E90F: 68, - 0x1E910: 68, - 0x1E911: 68, - 0x1E912: 68, - 0x1E913: 68, - 0x1E914: 68, - 0x1E915: 68, - 0x1E916: 68, - 0x1E917: 68, - 0x1E918: 68, - 0x1E919: 68, - 0x1E91A: 68, - 0x1E91B: 68, - 0x1E91C: 68, - 0x1E91D: 68, - 0x1E91E: 68, - 0x1E91F: 68, - 0x1E920: 68, - 0x1E921: 68, - 0x1E922: 68, - 0x1E923: 68, - 0x1E924: 68, - 0x1E925: 68, - 0x1E926: 68, - 0x1E927: 68, - 0x1E928: 68, - 0x1E929: 68, - 0x1E92A: 68, - 0x1E92B: 68, - 0x1E92C: 68, - 0x1E92D: 68, - 0x1E92E: 68, - 0x1E92F: 68, - 0x1E930: 68, - 0x1E931: 68, - 0x1E932: 68, - 0x1E933: 68, - 0x1E934: 68, - 0x1E935: 68, - 0x1E936: 68, - 0x1E937: 68, - 0x1E938: 68, - 0x1E939: 68, - 0x1E93A: 68, - 0x1E93B: 68, - 0x1E93C: 68, - 0x1E93D: 68, - 0x1E93E: 68, - 0x1E93F: 68, - 0x1E940: 68, - 0x1E941: 68, - 0x1E942: 68, - 0x1E943: 68, - 0x1E944: 84, - 0x1E945: 84, - 0x1E946: 84, - 0x1E947: 84, - 0x1E948: 84, - 0x1E949: 84, - 0x1E94A: 84, - 0x1E94B: 84, - 0xE0001: 84, - 0xE0020: 84, - 0xE0021: 84, - 0xE0022: 84, - 0xE0023: 84, - 0xE0024: 84, - 0xE0025: 84, - 0xE0026: 84, - 0xE0027: 84, - 0xE0028: 84, - 0xE0029: 84, - 0xE002A: 84, - 0xE002B: 84, - 0xE002C: 84, - 0xE002D: 84, - 0xE002E: 84, - 0xE002F: 84, - 0xE0030: 84, - 0xE0031: 84, - 0xE0032: 84, - 0xE0033: 84, - 0xE0034: 84, - 0xE0035: 84, - 0xE0036: 84, - 0xE0037: 84, - 0xE0038: 84, - 0xE0039: 84, - 0xE003A: 84, - 0xE003B: 84, - 0xE003C: 84, - 0xE003D: 84, - 0xE003E: 84, - 0xE003F: 84, - 0xE0040: 84, - 0xE0041: 84, - 0xE0042: 84, - 0xE0043: 84, - 0xE0044: 84, - 0xE0045: 84, - 0xE0046: 84, - 0xE0047: 84, - 0xE0048: 84, - 0xE0049: 84, - 0xE004A: 84, - 0xE004B: 84, - 0xE004C: 84, - 0xE004D: 84, - 0xE004E: 84, - 0xE004F: 84, - 0xE0050: 84, - 0xE0051: 84, - 0xE0052: 84, - 0xE0053: 84, - 0xE0054: 84, - 0xE0055: 84, - 0xE0056: 84, - 0xE0057: 84, - 0xE0058: 84, - 0xE0059: 84, - 0xE005A: 84, - 0xE005B: 84, - 0xE005C: 84, - 0xE005D: 84, - 0xE005E: 84, - 0xE005F: 84, - 0xE0060: 84, - 0xE0061: 84, - 0xE0062: 84, - 0xE0063: 84, - 0xE0064: 84, - 0xE0065: 84, - 0xE0066: 84, - 0xE0067: 84, - 0xE0068: 84, - 0xE0069: 84, - 0xE006A: 84, - 0xE006B: 84, - 0xE006C: 84, - 0xE006D: 84, - 0xE006E: 84, - 0xE006F: 84, - 0xE0070: 84, - 0xE0071: 84, - 0xE0072: 84, - 0xE0073: 84, - 0xE0074: 84, - 0xE0075: 84, - 0xE0076: 84, - 0xE0077: 84, - 0xE0078: 84, - 0xE0079: 84, - 0xE007A: 84, - 0xE007B: 84, - 0xE007C: 84, - 0xE007D: 84, - 0xE007E: 84, - 0xE007F: 84, - 0xE0100: 84, - 0xE0101: 84, - 0xE0102: 84, - 0xE0103: 84, - 0xE0104: 84, - 0xE0105: 84, - 0xE0106: 84, - 0xE0107: 84, - 0xE0108: 84, - 0xE0109: 84, - 0xE010A: 84, - 0xE010B: 84, - 0xE010C: 84, - 0xE010D: 84, - 0xE010E: 84, - 0xE010F: 84, - 0xE0110: 84, - 0xE0111: 84, - 0xE0112: 84, - 0xE0113: 84, - 0xE0114: 84, - 0xE0115: 84, - 0xE0116: 84, - 0xE0117: 84, - 0xE0118: 84, - 0xE0119: 84, - 0xE011A: 84, - 0xE011B: 84, - 0xE011C: 84, - 0xE011D: 84, - 0xE011E: 84, - 0xE011F: 84, - 0xE0120: 84, - 0xE0121: 84, - 0xE0122: 84, - 0xE0123: 84, - 0xE0124: 84, - 0xE0125: 84, - 0xE0126: 84, - 0xE0127: 84, - 0xE0128: 84, - 0xE0129: 84, - 0xE012A: 84, - 0xE012B: 84, - 0xE012C: 84, - 0xE012D: 84, - 0xE012E: 84, - 0xE012F: 84, - 0xE0130: 84, - 0xE0131: 84, - 0xE0132: 84, - 0xE0133: 84, - 0xE0134: 84, - 0xE0135: 84, - 0xE0136: 84, - 0xE0137: 84, - 0xE0138: 84, - 0xE0139: 84, - 0xE013A: 84, - 0xE013B: 84, - 0xE013C: 84, - 0xE013D: 84, - 0xE013E: 84, - 0xE013F: 84, - 0xE0140: 84, - 0xE0141: 84, - 0xE0142: 84, - 0xE0143: 84, - 0xE0144: 84, - 0xE0145: 84, - 0xE0146: 84, - 0xE0147: 84, - 0xE0148: 84, - 0xE0149: 84, - 0xE014A: 84, - 0xE014B: 84, - 0xE014C: 84, - 0xE014D: 84, - 0xE014E: 84, - 0xE014F: 84, - 0xE0150: 84, - 0xE0151: 84, - 0xE0152: 84, - 0xE0153: 84, - 0xE0154: 84, - 0xE0155: 84, - 0xE0156: 84, - 0xE0157: 84, - 0xE0158: 84, - 0xE0159: 84, - 0xE015A: 84, - 0xE015B: 84, - 0xE015C: 84, - 0xE015D: 84, - 0xE015E: 84, - 0xE015F: 84, - 0xE0160: 84, - 0xE0161: 84, - 0xE0162: 84, - 0xE0163: 84, - 0xE0164: 84, - 0xE0165: 84, - 0xE0166: 84, - 0xE0167: 84, - 0xE0168: 84, - 0xE0169: 84, - 0xE016A: 84, - 0xE016B: 84, - 0xE016C: 84, - 0xE016D: 84, - 0xE016E: 84, - 0xE016F: 84, - 0xE0170: 84, - 0xE0171: 84, - 0xE0172: 84, - 0xE0173: 84, - 0xE0174: 84, - 0xE0175: 84, - 0xE0176: 84, - 0xE0177: 84, - 0xE0178: 84, - 0xE0179: 84, - 0xE017A: 84, - 0xE017B: 84, - 0xE017C: 84, - 0xE017D: 84, - 0xE017E: 84, - 0xE017F: 84, - 0xE0180: 84, - 0xE0181: 84, - 0xE0182: 84, - 0xE0183: 84, - 0xE0184: 84, - 0xE0185: 84, - 0xE0186: 84, - 0xE0187: 84, - 0xE0188: 84, - 0xE0189: 84, - 0xE018A: 84, - 0xE018B: 84, - 0xE018C: 84, - 0xE018D: 84, - 0xE018E: 84, - 0xE018F: 84, - 0xE0190: 84, - 0xE0191: 84, - 0xE0192: 84, - 0xE0193: 84, - 0xE0194: 84, - 0xE0195: 84, - 0xE0196: 84, - 0xE0197: 84, - 0xE0198: 84, - 0xE0199: 84, - 0xE019A: 84, - 0xE019B: 84, - 0xE019C: 84, - 0xE019D: 84, - 0xE019E: 84, - 0xE019F: 84, - 0xE01A0: 84, - 0xE01A1: 84, - 0xE01A2: 84, - 0xE01A3: 84, - 0xE01A4: 84, - 0xE01A5: 84, - 0xE01A6: 84, - 0xE01A7: 84, - 0xE01A8: 84, - 0xE01A9: 84, - 0xE01AA: 84, - 0xE01AB: 84, - 0xE01AC: 84, - 0xE01AD: 84, - 0xE01AE: 84, - 0xE01AF: 84, - 0xE01B0: 84, - 0xE01B1: 84, - 0xE01B2: 84, - 0xE01B3: 84, - 0xE01B4: 84, - 0xE01B5: 84, - 0xE01B6: 84, - 0xE01B7: 84, - 0xE01B8: 84, - 0xE01B9: 84, - 0xE01BA: 84, - 0xE01BB: 84, - 0xE01BC: 84, - 0xE01BD: 84, - 0xE01BE: 84, - 0xE01BF: 84, - 0xE01C0: 84, - 0xE01C1: 84, - 0xE01C2: 84, - 0xE01C3: 84, - 0xE01C4: 84, - 0xE01C5: 84, - 0xE01C6: 84, - 0xE01C7: 84, - 0xE01C8: 84, - 0xE01C9: 84, - 0xE01CA: 84, - 0xE01CB: 84, - 0xE01CC: 84, - 0xE01CD: 84, - 0xE01CE: 84, - 0xE01CF: 84, - 0xE01D0: 84, - 0xE01D1: 84, - 0xE01D2: 84, - 0xE01D3: 84, - 0xE01D4: 84, - 0xE01D5: 84, - 0xE01D6: 84, - 0xE01D7: 84, - 0xE01D8: 84, - 0xE01D9: 84, - 0xE01DA: 84, - 0xE01DB: 84, - 0xE01DC: 84, - 0xE01DD: 84, - 0xE01DE: 84, - 0xE01DF: 84, - 0xE01E0: 84, - 0xE01E1: 84, - 0xE01E2: 84, - 0xE01E3: 84, - 0xE01E4: 84, - 0xE01E5: 84, - 0xE01E6: 84, - 0xE01E7: 84, - 0xE01E8: 84, - 0xE01E9: 84, - 0xE01EA: 84, - 0xE01EB: 84, - 0xE01EC: 84, - 0xE01ED: 84, - 0xE01EE: 84, - 0xE01EF: 84, -} -codepoint_classes = { - "PVALID": ( - 0x2D0000002E, - 0x300000003A, - 0x610000007B, - 0xDF000000F7, - 0xF800000100, - 0x10100000102, - 0x10300000104, - 0x10500000106, - 0x10700000108, - 0x1090000010A, - 0x10B0000010C, - 0x10D0000010E, - 0x10F00000110, - 0x11100000112, - 0x11300000114, - 0x11500000116, - 0x11700000118, - 0x1190000011A, - 0x11B0000011C, - 0x11D0000011E, - 0x11F00000120, - 0x12100000122, - 0x12300000124, - 0x12500000126, - 0x12700000128, - 0x1290000012A, - 0x12B0000012C, - 0x12D0000012E, - 0x12F00000130, - 0x13100000132, - 0x13500000136, - 0x13700000139, - 0x13A0000013B, - 0x13C0000013D, - 0x13E0000013F, - 0x14200000143, - 0x14400000145, - 0x14600000147, - 0x14800000149, - 0x14B0000014C, - 0x14D0000014E, - 0x14F00000150, - 0x15100000152, - 0x15300000154, - 0x15500000156, - 0x15700000158, - 0x1590000015A, - 0x15B0000015C, - 0x15D0000015E, - 0x15F00000160, - 0x16100000162, - 0x16300000164, - 0x16500000166, - 0x16700000168, - 0x1690000016A, - 0x16B0000016C, - 0x16D0000016E, - 0x16F00000170, - 0x17100000172, - 0x17300000174, - 0x17500000176, - 0x17700000178, - 0x17A0000017B, - 0x17C0000017D, - 0x17E0000017F, - 0x18000000181, - 0x18300000184, - 0x18500000186, - 0x18800000189, - 0x18C0000018E, - 0x19200000193, - 0x19500000196, - 0x1990000019C, - 0x19E0000019F, - 0x1A1000001A2, - 0x1A3000001A4, - 0x1A5000001A6, - 0x1A8000001A9, - 0x1AA000001AC, - 0x1AD000001AE, - 0x1B0000001B1, - 0x1B4000001B5, - 0x1B6000001B7, - 0x1B9000001BC, - 0x1BD000001C4, - 0x1CE000001CF, - 0x1D0000001D1, - 0x1D2000001D3, - 0x1D4000001D5, - 0x1D6000001D7, - 0x1D8000001D9, - 0x1DA000001DB, - 0x1DC000001DE, - 0x1DF000001E0, - 0x1E1000001E2, - 0x1E3000001E4, - 0x1E5000001E6, - 0x1E7000001E8, - 0x1E9000001EA, - 0x1EB000001EC, - 0x1ED000001EE, - 0x1EF000001F1, - 0x1F5000001F6, - 0x1F9000001FA, - 0x1FB000001FC, - 0x1FD000001FE, - 0x1FF00000200, - 0x20100000202, - 0x20300000204, - 0x20500000206, - 0x20700000208, - 0x2090000020A, - 0x20B0000020C, - 0x20D0000020E, - 0x20F00000210, - 0x21100000212, - 0x21300000214, - 0x21500000216, - 0x21700000218, - 0x2190000021A, - 0x21B0000021C, - 0x21D0000021E, - 0x21F00000220, - 0x22100000222, - 0x22300000224, - 0x22500000226, - 0x22700000228, - 0x2290000022A, - 0x22B0000022C, - 0x22D0000022E, - 0x22F00000230, - 0x23100000232, - 0x2330000023A, - 0x23C0000023D, - 0x23F00000241, - 0x24200000243, - 0x24700000248, - 0x2490000024A, - 0x24B0000024C, - 0x24D0000024E, - 0x24F000002B0, - 0x2B9000002C2, - 0x2C6000002D2, - 0x2EC000002ED, - 0x2EE000002EF, - 0x30000000340, - 0x34200000343, - 0x3460000034F, - 0x35000000370, - 0x37100000372, - 0x37300000374, - 0x37700000378, - 0x37B0000037E, - 0x39000000391, - 0x3AC000003CF, - 0x3D7000003D8, - 0x3D9000003DA, - 0x3DB000003DC, - 0x3DD000003DE, - 0x3DF000003E0, - 0x3E1000003E2, - 0x3E3000003E4, - 0x3E5000003E6, - 0x3E7000003E8, - 0x3E9000003EA, - 0x3EB000003EC, - 0x3ED000003EE, - 0x3EF000003F0, - 0x3F3000003F4, - 0x3F8000003F9, - 0x3FB000003FD, - 0x43000000460, - 0x46100000462, - 0x46300000464, - 0x46500000466, - 0x46700000468, - 0x4690000046A, - 0x46B0000046C, - 0x46D0000046E, - 0x46F00000470, - 0x47100000472, - 0x47300000474, - 0x47500000476, - 0x47700000478, - 0x4790000047A, - 0x47B0000047C, - 0x47D0000047E, - 0x47F00000480, - 0x48100000482, - 0x48300000488, - 0x48B0000048C, - 0x48D0000048E, - 0x48F00000490, - 0x49100000492, - 0x49300000494, - 0x49500000496, - 0x49700000498, - 0x4990000049A, - 0x49B0000049C, - 0x49D0000049E, - 0x49F000004A0, - 0x4A1000004A2, - 0x4A3000004A4, - 0x4A5000004A6, - 0x4A7000004A8, - 0x4A9000004AA, - 0x4AB000004AC, - 0x4AD000004AE, - 0x4AF000004B0, - 0x4B1000004B2, - 0x4B3000004B4, - 0x4B5000004B6, - 0x4B7000004B8, - 0x4B9000004BA, - 0x4BB000004BC, - 0x4BD000004BE, - 0x4BF000004C0, - 0x4C2000004C3, - 0x4C4000004C5, - 0x4C6000004C7, - 0x4C8000004C9, - 0x4CA000004CB, - 0x4CC000004CD, - 0x4CE000004D0, - 0x4D1000004D2, - 0x4D3000004D4, - 0x4D5000004D6, - 0x4D7000004D8, - 0x4D9000004DA, - 0x4DB000004DC, - 0x4DD000004DE, - 0x4DF000004E0, - 0x4E1000004E2, - 0x4E3000004E4, - 0x4E5000004E6, - 0x4E7000004E8, - 0x4E9000004EA, - 0x4EB000004EC, - 0x4ED000004EE, - 0x4EF000004F0, - 0x4F1000004F2, - 0x4F3000004F4, - 0x4F5000004F6, - 0x4F7000004F8, - 0x4F9000004FA, - 0x4FB000004FC, - 0x4FD000004FE, - 0x4FF00000500, - 0x50100000502, - 0x50300000504, - 0x50500000506, - 0x50700000508, - 0x5090000050A, - 0x50B0000050C, - 0x50D0000050E, - 0x50F00000510, - 0x51100000512, - 0x51300000514, - 0x51500000516, - 0x51700000518, - 0x5190000051A, - 0x51B0000051C, - 0x51D0000051E, - 0x51F00000520, - 0x52100000522, - 0x52300000524, - 0x52500000526, - 0x52700000528, - 0x5290000052A, - 0x52B0000052C, - 0x52D0000052E, - 0x52F00000530, - 0x5590000055A, - 0x56000000587, - 0x58800000589, - 0x591000005BE, - 0x5BF000005C0, - 0x5C1000005C3, - 0x5C4000005C6, - 0x5C7000005C8, - 0x5D0000005EB, - 0x5EF000005F3, - 0x6100000061B, - 0x62000000640, - 0x64100000660, - 0x66E00000675, - 0x679000006D4, - 0x6D5000006DD, - 0x6DF000006E9, - 0x6EA000006F0, - 0x6FA00000700, - 0x7100000074B, - 0x74D000007B2, - 0x7C0000007F6, - 0x7FD000007FE, - 0x8000000082E, - 0x8400000085C, - 0x8600000086B, - 0x87000000888, - 0x8890000088F, - 0x897000008E2, - 0x8E300000958, - 0x96000000964, - 0x96600000970, - 0x97100000984, - 0x9850000098D, - 0x98F00000991, - 0x993000009A9, - 0x9AA000009B1, - 0x9B2000009B3, - 0x9B6000009BA, - 0x9BC000009C5, - 0x9C7000009C9, - 0x9CB000009CF, - 0x9D7000009D8, - 0x9E0000009E4, - 0x9E6000009F2, - 0x9FC000009FD, - 0x9FE000009FF, - 0xA0100000A04, - 0xA0500000A0B, - 0xA0F00000A11, - 0xA1300000A29, - 0xA2A00000A31, - 0xA3200000A33, - 0xA3500000A36, - 0xA3800000A3A, - 0xA3C00000A3D, - 0xA3E00000A43, - 0xA4700000A49, - 0xA4B00000A4E, - 0xA5100000A52, - 0xA5C00000A5D, - 0xA6600000A76, - 0xA8100000A84, - 0xA8500000A8E, - 0xA8F00000A92, - 0xA9300000AA9, - 0xAAA00000AB1, - 0xAB200000AB4, - 0xAB500000ABA, - 0xABC00000AC6, - 0xAC700000ACA, - 0xACB00000ACE, - 0xAD000000AD1, - 0xAE000000AE4, - 0xAE600000AF0, - 0xAF900000B00, - 0xB0100000B04, - 0xB0500000B0D, - 0xB0F00000B11, - 0xB1300000B29, - 0xB2A00000B31, - 0xB3200000B34, - 0xB3500000B3A, - 0xB3C00000B45, - 0xB4700000B49, - 0xB4B00000B4E, - 0xB5500000B58, - 0xB5F00000B64, - 0xB6600000B70, - 0xB7100000B72, - 0xB8200000B84, - 0xB8500000B8B, - 0xB8E00000B91, - 0xB9200000B96, - 0xB9900000B9B, - 0xB9C00000B9D, - 0xB9E00000BA0, - 0xBA300000BA5, - 0xBA800000BAB, - 0xBAE00000BBA, - 0xBBE00000BC3, - 0xBC600000BC9, - 0xBCA00000BCE, - 0xBD000000BD1, - 0xBD700000BD8, - 0xBE600000BF0, - 0xC0000000C0D, - 0xC0E00000C11, - 0xC1200000C29, - 0xC2A00000C3A, - 0xC3C00000C45, - 0xC4600000C49, - 0xC4A00000C4E, - 0xC5500000C57, - 0xC5800000C5B, - 0xC5D00000C5E, - 0xC6000000C64, - 0xC6600000C70, - 0xC8000000C84, - 0xC8500000C8D, - 0xC8E00000C91, - 0xC9200000CA9, - 0xCAA00000CB4, - 0xCB500000CBA, - 0xCBC00000CC5, - 0xCC600000CC9, - 0xCCA00000CCE, - 0xCD500000CD7, - 0xCDD00000CDF, - 0xCE000000CE4, - 0xCE600000CF0, - 0xCF100000CF4, - 0xD0000000D0D, - 0xD0E00000D11, - 0xD1200000D45, - 0xD4600000D49, - 0xD4A00000D4F, - 0xD5400000D58, - 0xD5F00000D64, - 0xD6600000D70, - 0xD7A00000D80, - 0xD8100000D84, - 0xD8500000D97, - 0xD9A00000DB2, - 0xDB300000DBC, - 0xDBD00000DBE, - 0xDC000000DC7, - 0xDCA00000DCB, - 0xDCF00000DD5, - 0xDD600000DD7, - 0xDD800000DE0, - 0xDE600000DF0, - 0xDF200000DF4, - 0xE0100000E33, - 0xE3400000E3B, - 0xE4000000E4F, - 0xE5000000E5A, - 0xE8100000E83, - 0xE8400000E85, - 0xE8600000E8B, - 0xE8C00000EA4, - 0xEA500000EA6, - 0xEA700000EB3, - 0xEB400000EBE, - 0xEC000000EC5, - 0xEC600000EC7, - 0xEC800000ECF, - 0xED000000EDA, - 0xEDE00000EE0, - 0xF0000000F01, - 0xF0B00000F0C, - 0xF1800000F1A, - 0xF2000000F2A, - 0xF3500000F36, - 0xF3700000F38, - 0xF3900000F3A, - 0xF3E00000F43, - 0xF4400000F48, - 0xF4900000F4D, - 0xF4E00000F52, - 0xF5300000F57, - 0xF5800000F5C, - 0xF5D00000F69, - 0xF6A00000F6D, - 0xF7100000F73, - 0xF7400000F75, - 0xF7A00000F81, - 0xF8200000F85, - 0xF8600000F93, - 0xF9400000F98, - 0xF9900000F9D, - 0xF9E00000FA2, - 0xFA300000FA7, - 0xFA800000FAC, - 0xFAD00000FB9, - 0xFBA00000FBD, - 0xFC600000FC7, - 0x10000000104A, - 0x10500000109E, - 0x10D0000010FB, - 0x10FD00001100, - 0x120000001249, - 0x124A0000124E, - 0x125000001257, - 0x125800001259, - 0x125A0000125E, - 0x126000001289, - 0x128A0000128E, - 0x1290000012B1, - 0x12B2000012B6, - 0x12B8000012BF, - 0x12C0000012C1, - 0x12C2000012C6, - 0x12C8000012D7, - 0x12D800001311, - 0x131200001316, - 0x13180000135B, - 0x135D00001360, - 0x138000001390, - 0x13A0000013F6, - 0x14010000166D, - 0x166F00001680, - 0x16810000169B, - 0x16A0000016EB, - 0x16F1000016F9, - 0x170000001716, - 0x171F00001735, - 0x174000001754, - 0x17600000176D, - 0x176E00001771, - 0x177200001774, - 0x1780000017B4, - 0x17B6000017D4, - 0x17D7000017D8, - 0x17DC000017DE, - 0x17E0000017EA, - 0x18100000181A, - 0x182000001879, - 0x1880000018AB, - 0x18B0000018F6, - 0x19000000191F, - 0x19200000192C, - 0x19300000193C, - 0x19460000196E, - 0x197000001975, - 0x1980000019AC, - 0x19B0000019CA, - 0x19D0000019DA, - 0x1A0000001A1C, - 0x1A2000001A5F, - 0x1A6000001A7D, - 0x1A7F00001A8A, - 0x1A9000001A9A, - 0x1AA700001AA8, - 0x1AB000001ABE, - 0x1ABF00001ACF, - 0x1B0000001B4D, - 0x1B5000001B5A, - 0x1B6B00001B74, - 0x1B8000001BF4, - 0x1C0000001C38, - 0x1C4000001C4A, - 0x1C4D00001C7E, - 0x1C8A00001C8B, - 0x1CD000001CD3, - 0x1CD400001CFB, - 0x1D0000001D2C, - 0x1D2F00001D30, - 0x1D3B00001D3C, - 0x1D4E00001D4F, - 0x1D6B00001D78, - 0x1D7900001D9B, - 0x1DC000001E00, - 0x1E0100001E02, - 0x1E0300001E04, - 0x1E0500001E06, - 0x1E0700001E08, - 0x1E0900001E0A, - 0x1E0B00001E0C, - 0x1E0D00001E0E, - 0x1E0F00001E10, - 0x1E1100001E12, - 0x1E1300001E14, - 0x1E1500001E16, - 0x1E1700001E18, - 0x1E1900001E1A, - 0x1E1B00001E1C, - 0x1E1D00001E1E, - 0x1E1F00001E20, - 0x1E2100001E22, - 0x1E2300001E24, - 0x1E2500001E26, - 0x1E2700001E28, - 0x1E2900001E2A, - 0x1E2B00001E2C, - 0x1E2D00001E2E, - 0x1E2F00001E30, - 0x1E3100001E32, - 0x1E3300001E34, - 0x1E3500001E36, - 0x1E3700001E38, - 0x1E3900001E3A, - 0x1E3B00001E3C, - 0x1E3D00001E3E, - 0x1E3F00001E40, - 0x1E4100001E42, - 0x1E4300001E44, - 0x1E4500001E46, - 0x1E4700001E48, - 0x1E4900001E4A, - 0x1E4B00001E4C, - 0x1E4D00001E4E, - 0x1E4F00001E50, - 0x1E5100001E52, - 0x1E5300001E54, - 0x1E5500001E56, - 0x1E5700001E58, - 0x1E5900001E5A, - 0x1E5B00001E5C, - 0x1E5D00001E5E, - 0x1E5F00001E60, - 0x1E6100001E62, - 0x1E6300001E64, - 0x1E6500001E66, - 0x1E6700001E68, - 0x1E6900001E6A, - 0x1E6B00001E6C, - 0x1E6D00001E6E, - 0x1E6F00001E70, - 0x1E7100001E72, - 0x1E7300001E74, - 0x1E7500001E76, - 0x1E7700001E78, - 0x1E7900001E7A, - 0x1E7B00001E7C, - 0x1E7D00001E7E, - 0x1E7F00001E80, - 0x1E8100001E82, - 0x1E8300001E84, - 0x1E8500001E86, - 0x1E8700001E88, - 0x1E8900001E8A, - 0x1E8B00001E8C, - 0x1E8D00001E8E, - 0x1E8F00001E90, - 0x1E9100001E92, - 0x1E9300001E94, - 0x1E9500001E9A, - 0x1E9C00001E9E, - 0x1E9F00001EA0, - 0x1EA100001EA2, - 0x1EA300001EA4, - 0x1EA500001EA6, - 0x1EA700001EA8, - 0x1EA900001EAA, - 0x1EAB00001EAC, - 0x1EAD00001EAE, - 0x1EAF00001EB0, - 0x1EB100001EB2, - 0x1EB300001EB4, - 0x1EB500001EB6, - 0x1EB700001EB8, - 0x1EB900001EBA, - 0x1EBB00001EBC, - 0x1EBD00001EBE, - 0x1EBF00001EC0, - 0x1EC100001EC2, - 0x1EC300001EC4, - 0x1EC500001EC6, - 0x1EC700001EC8, - 0x1EC900001ECA, - 0x1ECB00001ECC, - 0x1ECD00001ECE, - 0x1ECF00001ED0, - 0x1ED100001ED2, - 0x1ED300001ED4, - 0x1ED500001ED6, - 0x1ED700001ED8, - 0x1ED900001EDA, - 0x1EDB00001EDC, - 0x1EDD00001EDE, - 0x1EDF00001EE0, - 0x1EE100001EE2, - 0x1EE300001EE4, - 0x1EE500001EE6, - 0x1EE700001EE8, - 0x1EE900001EEA, - 0x1EEB00001EEC, - 0x1EED00001EEE, - 0x1EEF00001EF0, - 0x1EF100001EF2, - 0x1EF300001EF4, - 0x1EF500001EF6, - 0x1EF700001EF8, - 0x1EF900001EFA, - 0x1EFB00001EFC, - 0x1EFD00001EFE, - 0x1EFF00001F08, - 0x1F1000001F16, - 0x1F2000001F28, - 0x1F3000001F38, - 0x1F4000001F46, - 0x1F5000001F58, - 0x1F6000001F68, - 0x1F7000001F71, - 0x1F7200001F73, - 0x1F7400001F75, - 0x1F7600001F77, - 0x1F7800001F79, - 0x1F7A00001F7B, - 0x1F7C00001F7D, - 0x1FB000001FB2, - 0x1FB600001FB7, - 0x1FC600001FC7, - 0x1FD000001FD3, - 0x1FD600001FD8, - 0x1FE000001FE3, - 0x1FE400001FE8, - 0x1FF600001FF7, - 0x214E0000214F, - 0x218400002185, - 0x2C3000002C60, - 0x2C6100002C62, - 0x2C6500002C67, - 0x2C6800002C69, - 0x2C6A00002C6B, - 0x2C6C00002C6D, - 0x2C7100002C72, - 0x2C7300002C75, - 0x2C7600002C7C, - 0x2C8100002C82, - 0x2C8300002C84, - 0x2C8500002C86, - 0x2C8700002C88, - 0x2C8900002C8A, - 0x2C8B00002C8C, - 0x2C8D00002C8E, - 0x2C8F00002C90, - 0x2C9100002C92, - 0x2C9300002C94, - 0x2C9500002C96, - 0x2C9700002C98, - 0x2C9900002C9A, - 0x2C9B00002C9C, - 0x2C9D00002C9E, - 0x2C9F00002CA0, - 0x2CA100002CA2, - 0x2CA300002CA4, - 0x2CA500002CA6, - 0x2CA700002CA8, - 0x2CA900002CAA, - 0x2CAB00002CAC, - 0x2CAD00002CAE, - 0x2CAF00002CB0, - 0x2CB100002CB2, - 0x2CB300002CB4, - 0x2CB500002CB6, - 0x2CB700002CB8, - 0x2CB900002CBA, - 0x2CBB00002CBC, - 0x2CBD00002CBE, - 0x2CBF00002CC0, - 0x2CC100002CC2, - 0x2CC300002CC4, - 0x2CC500002CC6, - 0x2CC700002CC8, - 0x2CC900002CCA, - 0x2CCB00002CCC, - 0x2CCD00002CCE, - 0x2CCF00002CD0, - 0x2CD100002CD2, - 0x2CD300002CD4, - 0x2CD500002CD6, - 0x2CD700002CD8, - 0x2CD900002CDA, - 0x2CDB00002CDC, - 0x2CDD00002CDE, - 0x2CDF00002CE0, - 0x2CE100002CE2, - 0x2CE300002CE5, - 0x2CEC00002CED, - 0x2CEE00002CF2, - 0x2CF300002CF4, - 0x2D0000002D26, - 0x2D2700002D28, - 0x2D2D00002D2E, - 0x2D3000002D68, - 0x2D7F00002D97, - 0x2DA000002DA7, - 0x2DA800002DAF, - 0x2DB000002DB7, - 0x2DB800002DBF, - 0x2DC000002DC7, - 0x2DC800002DCF, - 0x2DD000002DD7, - 0x2DD800002DDF, - 0x2DE000002E00, - 0x2E2F00002E30, - 0x300500003008, - 0x302A0000302E, - 0x303C0000303D, - 0x304100003097, - 0x30990000309B, - 0x309D0000309F, - 0x30A1000030FB, - 0x30FC000030FF, - 0x310500003130, - 0x31A0000031C0, - 0x31F000003200, - 0x340000004DC0, - 0x4E000000A48D, - 0xA4D00000A4FE, - 0xA5000000A60D, - 0xA6100000A62C, - 0xA6410000A642, - 0xA6430000A644, - 0xA6450000A646, - 0xA6470000A648, - 0xA6490000A64A, - 0xA64B0000A64C, - 0xA64D0000A64E, - 0xA64F0000A650, - 0xA6510000A652, - 0xA6530000A654, - 0xA6550000A656, - 0xA6570000A658, - 0xA6590000A65A, - 0xA65B0000A65C, - 0xA65D0000A65E, - 0xA65F0000A660, - 0xA6610000A662, - 0xA6630000A664, - 0xA6650000A666, - 0xA6670000A668, - 0xA6690000A66A, - 0xA66B0000A66C, - 0xA66D0000A670, - 0xA6740000A67E, - 0xA67F0000A680, - 0xA6810000A682, - 0xA6830000A684, - 0xA6850000A686, - 0xA6870000A688, - 0xA6890000A68A, - 0xA68B0000A68C, - 0xA68D0000A68E, - 0xA68F0000A690, - 0xA6910000A692, - 0xA6930000A694, - 0xA6950000A696, - 0xA6970000A698, - 0xA6990000A69A, - 0xA69B0000A69C, - 0xA69E0000A6E6, - 0xA6F00000A6F2, - 0xA7170000A720, - 0xA7230000A724, - 0xA7250000A726, - 0xA7270000A728, - 0xA7290000A72A, - 0xA72B0000A72C, - 0xA72D0000A72E, - 0xA72F0000A732, - 0xA7330000A734, - 0xA7350000A736, - 0xA7370000A738, - 0xA7390000A73A, - 0xA73B0000A73C, - 0xA73D0000A73E, - 0xA73F0000A740, - 0xA7410000A742, - 0xA7430000A744, - 0xA7450000A746, - 0xA7470000A748, - 0xA7490000A74A, - 0xA74B0000A74C, - 0xA74D0000A74E, - 0xA74F0000A750, - 0xA7510000A752, - 0xA7530000A754, - 0xA7550000A756, - 0xA7570000A758, - 0xA7590000A75A, - 0xA75B0000A75C, - 0xA75D0000A75E, - 0xA75F0000A760, - 0xA7610000A762, - 0xA7630000A764, - 0xA7650000A766, - 0xA7670000A768, - 0xA7690000A76A, - 0xA76B0000A76C, - 0xA76D0000A76E, - 0xA76F0000A770, - 0xA7710000A779, - 0xA77A0000A77B, - 0xA77C0000A77D, - 0xA77F0000A780, - 0xA7810000A782, - 0xA7830000A784, - 0xA7850000A786, - 0xA7870000A789, - 0xA78C0000A78D, - 0xA78E0000A790, - 0xA7910000A792, - 0xA7930000A796, - 0xA7970000A798, - 0xA7990000A79A, - 0xA79B0000A79C, - 0xA79D0000A79E, - 0xA79F0000A7A0, - 0xA7A10000A7A2, - 0xA7A30000A7A4, - 0xA7A50000A7A6, - 0xA7A70000A7A8, - 0xA7A90000A7AA, - 0xA7AF0000A7B0, - 0xA7B50000A7B6, - 0xA7B70000A7B8, - 0xA7B90000A7BA, - 0xA7BB0000A7BC, - 0xA7BD0000A7BE, - 0xA7BF0000A7C0, - 0xA7C10000A7C2, - 0xA7C30000A7C4, - 0xA7C80000A7C9, - 0xA7CA0000A7CB, - 0xA7CD0000A7CE, - 0xA7D10000A7D2, - 0xA7D30000A7D4, - 0xA7D50000A7D6, - 0xA7D70000A7D8, - 0xA7D90000A7DA, - 0xA7DB0000A7DC, - 0xA7F60000A7F8, - 0xA7FA0000A828, - 0xA82C0000A82D, - 0xA8400000A874, - 0xA8800000A8C6, - 0xA8D00000A8DA, - 0xA8E00000A8F8, - 0xA8FB0000A8FC, - 0xA8FD0000A92E, - 0xA9300000A954, - 0xA9800000A9C1, - 0xA9CF0000A9DA, - 0xA9E00000A9FF, - 0xAA000000AA37, - 0xAA400000AA4E, - 0xAA500000AA5A, - 0xAA600000AA77, - 0xAA7A0000AAC3, - 0xAADB0000AADE, - 0xAAE00000AAF0, - 0xAAF20000AAF7, - 0xAB010000AB07, - 0xAB090000AB0F, - 0xAB110000AB17, - 0xAB200000AB27, - 0xAB280000AB2F, - 0xAB300000AB5B, - 0xAB600000AB69, - 0xABC00000ABEB, - 0xABEC0000ABEE, - 0xABF00000ABFA, - 0xAC000000D7A4, - 0xFA0E0000FA10, - 0xFA110000FA12, - 0xFA130000FA15, - 0xFA1F0000FA20, - 0xFA210000FA22, - 0xFA230000FA25, - 0xFA270000FA2A, - 0xFB1E0000FB1F, - 0xFE200000FE30, - 0xFE730000FE74, - 0x100000001000C, - 0x1000D00010027, - 0x100280001003B, - 0x1003C0001003E, - 0x1003F0001004E, - 0x100500001005E, - 0x10080000100FB, - 0x101FD000101FE, - 0x102800001029D, - 0x102A0000102D1, - 0x102E0000102E1, - 0x1030000010320, - 0x1032D00010341, - 0x103420001034A, - 0x103500001037B, - 0x103800001039E, - 0x103A0000103C4, - 0x103C8000103D0, - 0x104280001049E, - 0x104A0000104AA, - 0x104D8000104FC, - 0x1050000010528, - 0x1053000010564, - 0x10597000105A2, - 0x105A3000105B2, - 0x105B3000105BA, - 0x105BB000105BD, - 0x105C0000105F4, - 0x1060000010737, - 0x1074000010756, - 0x1076000010768, - 0x1078000010781, - 0x1080000010806, - 0x1080800010809, - 0x1080A00010836, - 0x1083700010839, - 0x1083C0001083D, - 0x1083F00010856, - 0x1086000010877, - 0x108800001089F, - 0x108E0000108F3, - 0x108F4000108F6, - 0x1090000010916, - 0x109200001093A, - 0x10980000109B8, - 0x109BE000109C0, - 0x10A0000010A04, - 0x10A0500010A07, - 0x10A0C00010A14, - 0x10A1500010A18, - 0x10A1900010A36, - 0x10A3800010A3B, - 0x10A3F00010A40, - 0x10A6000010A7D, - 0x10A8000010A9D, - 0x10AC000010AC8, - 0x10AC900010AE7, - 0x10B0000010B36, - 0x10B4000010B56, - 0x10B6000010B73, - 0x10B8000010B92, - 0x10C0000010C49, - 0x10CC000010CF3, - 0x10D0000010D28, - 0x10D3000010D3A, - 0x10D4000010D50, - 0x10D6900010D6E, - 0x10D6F00010D86, - 0x10E8000010EAA, - 0x10EAB00010EAD, - 0x10EB000010EB2, - 0x10EC200010EC5, - 0x10EFC00010F1D, - 0x10F2700010F28, - 0x10F3000010F51, - 0x10F7000010F86, - 0x10FB000010FC5, - 0x10FE000010FF7, - 0x1100000011047, - 0x1106600011076, - 0x1107F000110BB, - 0x110C2000110C3, - 0x110D0000110E9, - 0x110F0000110FA, - 0x1110000011135, - 0x1113600011140, - 0x1114400011148, - 0x1115000011174, - 0x1117600011177, - 0x11180000111C5, - 0x111C9000111CD, - 0x111CE000111DB, - 0x111DC000111DD, - 0x1120000011212, - 0x1121300011238, - 0x1123E00011242, - 0x1128000011287, - 0x1128800011289, - 0x1128A0001128E, - 0x1128F0001129E, - 0x1129F000112A9, - 0x112B0000112EB, - 0x112F0000112FA, - 0x1130000011304, - 0x113050001130D, - 0x1130F00011311, - 0x1131300011329, - 0x1132A00011331, - 0x1133200011334, - 0x113350001133A, - 0x1133B00011345, - 0x1134700011349, - 0x1134B0001134E, - 0x1135000011351, - 0x1135700011358, - 0x1135D00011364, - 0x113660001136D, - 0x1137000011375, - 0x113800001138A, - 0x1138B0001138C, - 0x1138E0001138F, - 0x11390000113B6, - 0x113B7000113C1, - 0x113C2000113C3, - 0x113C5000113C6, - 0x113C7000113CB, - 0x113CC000113D4, - 0x113E1000113E3, - 0x114000001144B, - 0x114500001145A, - 0x1145E00011462, - 0x11480000114C6, - 0x114C7000114C8, - 0x114D0000114DA, - 0x11580000115B6, - 0x115B8000115C1, - 0x115D8000115DE, - 0x1160000011641, - 0x1164400011645, - 0x116500001165A, - 0x11680000116B9, - 0x116C0000116CA, - 0x116D0000116E4, - 0x117000001171B, - 0x1171D0001172C, - 0x117300001173A, - 0x1174000011747, - 0x118000001183B, - 0x118C0000118EA, - 0x118FF00011907, - 0x119090001190A, - 0x1190C00011914, - 0x1191500011917, - 0x1191800011936, - 0x1193700011939, - 0x1193B00011944, - 0x119500001195A, - 0x119A0000119A8, - 0x119AA000119D8, - 0x119DA000119E2, - 0x119E3000119E5, - 0x11A0000011A3F, - 0x11A4700011A48, - 0x11A5000011A9A, - 0x11A9D00011A9E, - 0x11AB000011AF9, - 0x11BC000011BE1, - 0x11BF000011BFA, - 0x11C0000011C09, - 0x11C0A00011C37, - 0x11C3800011C41, - 0x11C5000011C5A, - 0x11C7200011C90, - 0x11C9200011CA8, - 0x11CA900011CB7, - 0x11D0000011D07, - 0x11D0800011D0A, - 0x11D0B00011D37, - 0x11D3A00011D3B, - 0x11D3C00011D3E, - 0x11D3F00011D48, - 0x11D5000011D5A, - 0x11D6000011D66, - 0x11D6700011D69, - 0x11D6A00011D8F, - 0x11D9000011D92, - 0x11D9300011D99, - 0x11DA000011DAA, - 0x11EE000011EF7, - 0x11F0000011F11, - 0x11F1200011F3B, - 0x11F3E00011F43, - 0x11F5000011F5B, - 0x11FB000011FB1, - 0x120000001239A, - 0x1248000012544, - 0x12F9000012FF1, - 0x1300000013430, - 0x1344000013456, - 0x13460000143FB, - 0x1440000014647, - 0x161000001613A, - 0x1680000016A39, - 0x16A4000016A5F, - 0x16A6000016A6A, - 0x16A7000016ABF, - 0x16AC000016ACA, - 0x16AD000016AEE, - 0x16AF000016AF5, - 0x16B0000016B37, - 0x16B4000016B44, - 0x16B5000016B5A, - 0x16B6300016B78, - 0x16B7D00016B90, - 0x16D4000016D6D, - 0x16D7000016D7A, - 0x16E6000016E80, - 0x16F0000016F4B, - 0x16F4F00016F88, - 0x16F8F00016FA0, - 0x16FE000016FE2, - 0x16FE300016FE5, - 0x16FF000016FF2, - 0x17000000187F8, - 0x1880000018CD6, - 0x18CFF00018D09, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B123, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1B1550001B156, - 0x1B1640001B168, - 0x1B1700001B2FC, - 0x1BC000001BC6B, - 0x1BC700001BC7D, - 0x1BC800001BC89, - 0x1BC900001BC9A, - 0x1BC9D0001BC9F, - 0x1CCF00001CCFA, - 0x1CF000001CF2E, - 0x1CF300001CF47, - 0x1DA000001DA37, - 0x1DA3B0001DA6D, - 0x1DA750001DA76, - 0x1DA840001DA85, - 0x1DA9B0001DAA0, - 0x1DAA10001DAB0, - 0x1DF000001DF1F, - 0x1DF250001DF2B, - 0x1E0000001E007, - 0x1E0080001E019, - 0x1E01B0001E022, - 0x1E0230001E025, - 0x1E0260001E02B, - 0x1E08F0001E090, - 0x1E1000001E12D, - 0x1E1300001E13E, - 0x1E1400001E14A, - 0x1E14E0001E14F, - 0x1E2900001E2AF, - 0x1E2C00001E2FA, - 0x1E4D00001E4FA, - 0x1E5D00001E5FB, - 0x1E7E00001E7E7, - 0x1E7E80001E7EC, - 0x1E7ED0001E7EF, - 0x1E7F00001E7FF, - 0x1E8000001E8C5, - 0x1E8D00001E8D7, - 0x1E9220001E94C, - 0x1E9500001E95A, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x300000003134B, - 0x31350000323B0, - ), - "CONTEXTJ": (0x200C0000200E,), - "CONTEXTO": ( - 0xB7000000B8, - 0x37500000376, - 0x5F3000005F5, - 0x6600000066A, - 0x6F0000006FA, - 0x30FB000030FC, - ), -} diff --git a/venv/Lib/site-packages/idna/intranges.py b/venv/Lib/site-packages/idna/intranges.py deleted file mode 100644 index 7bfaa8d..0000000 --- a/venv/Lib/site-packages/idna/intranges.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Given a list of integers, made up of (hopefully) a small number of long runs -of consecutive integers, compute a representation of the form -((start1, end1), (start2, end2) ...). Then answer the question "was x present -in the original list?" in time O(log(# runs)). -""" - -import bisect -from typing import List, Tuple - - -def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: - """Represent a list of integers as a sequence of ranges: - ((start_0, end_0), (start_1, end_1), ...), such that the original - integers are exactly those x such that start_i <= x < end_i for some i. - - Ranges are encoded as single integers (start << 32 | end), not as tuples. - """ - - sorted_list = sorted(list_) - ranges = [] - last_write = -1 - for i in range(len(sorted_list)): - if i + 1 < len(sorted_list): - if sorted_list[i] == sorted_list[i + 1] - 1: - continue - current_range = sorted_list[last_write + 1 : i + 1] - ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) - last_write = i - - return tuple(ranges) - - -def _encode_range(start: int, end: int) -> int: - return (start << 32) | end - - -def _decode_range(r: int) -> Tuple[int, int]: - return (r >> 32), (r & ((1 << 32) - 1)) - - -def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: - """Determine if `int_` falls into one of the ranges in `ranges`.""" - tuple_ = _encode_range(int_, 0) - pos = bisect.bisect_left(ranges, tuple_) - # we could be immediately ahead of a tuple (start, end) - # with start < int_ <= end - if pos > 0: - left, right = _decode_range(ranges[pos - 1]) - if left <= int_ < right: - return True - # or we could be immediately behind a tuple (int_, end) - if pos < len(ranges): - left, _ = _decode_range(ranges[pos]) - if left == int_: - return True - return False diff --git a/venv/Lib/site-packages/idna/package_data.py b/venv/Lib/site-packages/idna/package_data.py deleted file mode 100644 index 7272c8d..0000000 --- a/venv/Lib/site-packages/idna/package_data.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "3.11" diff --git a/venv/Lib/site-packages/idna/py.typed b/venv/Lib/site-packages/idna/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/idna/uts46data.py b/venv/Lib/site-packages/idna/uts46data.py deleted file mode 100644 index 4610b71..0000000 --- a/venv/Lib/site-packages/idna/uts46data.py +++ /dev/null @@ -1,8841 +0,0 @@ -# This file is automatically generated by tools/idna-data -# vim: set fileencoding=utf-8 : - -from typing import List, Tuple, Union - -"""IDNA Mapping Table from UTS46.""" - - -__version__ = "16.0.0" - - -def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x0, "V"), - (0x1, "V"), - (0x2, "V"), - (0x3, "V"), - (0x4, "V"), - (0x5, "V"), - (0x6, "V"), - (0x7, "V"), - (0x8, "V"), - (0x9, "V"), - (0xA, "V"), - (0xB, "V"), - (0xC, "V"), - (0xD, "V"), - (0xE, "V"), - (0xF, "V"), - (0x10, "V"), - (0x11, "V"), - (0x12, "V"), - (0x13, "V"), - (0x14, "V"), - (0x15, "V"), - (0x16, "V"), - (0x17, "V"), - (0x18, "V"), - (0x19, "V"), - (0x1A, "V"), - (0x1B, "V"), - (0x1C, "V"), - (0x1D, "V"), - (0x1E, "V"), - (0x1F, "V"), - (0x20, "V"), - (0x21, "V"), - (0x22, "V"), - (0x23, "V"), - (0x24, "V"), - (0x25, "V"), - (0x26, "V"), - (0x27, "V"), - (0x28, "V"), - (0x29, "V"), - (0x2A, "V"), - (0x2B, "V"), - (0x2C, "V"), - (0x2D, "V"), - (0x2E, "V"), - (0x2F, "V"), - (0x30, "V"), - (0x31, "V"), - (0x32, "V"), - (0x33, "V"), - (0x34, "V"), - (0x35, "V"), - (0x36, "V"), - (0x37, "V"), - (0x38, "V"), - (0x39, "V"), - (0x3A, "V"), - (0x3B, "V"), - (0x3C, "V"), - (0x3D, "V"), - (0x3E, "V"), - (0x3F, "V"), - (0x40, "V"), - (0x41, "M", "a"), - (0x42, "M", "b"), - (0x43, "M", "c"), - (0x44, "M", "d"), - (0x45, "M", "e"), - (0x46, "M", "f"), - (0x47, "M", "g"), - (0x48, "M", "h"), - (0x49, "M", "i"), - (0x4A, "M", "j"), - (0x4B, "M", "k"), - (0x4C, "M", "l"), - (0x4D, "M", "m"), - (0x4E, "M", "n"), - (0x4F, "M", "o"), - (0x50, "M", "p"), - (0x51, "M", "q"), - (0x52, "M", "r"), - (0x53, "M", "s"), - (0x54, "M", "t"), - (0x55, "M", "u"), - (0x56, "M", "v"), - (0x57, "M", "w"), - (0x58, "M", "x"), - (0x59, "M", "y"), - (0x5A, "M", "z"), - (0x5B, "V"), - (0x5C, "V"), - (0x5D, "V"), - (0x5E, "V"), - (0x5F, "V"), - (0x60, "V"), - (0x61, "V"), - (0x62, "V"), - (0x63, "V"), - ] - - -def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x64, "V"), - (0x65, "V"), - (0x66, "V"), - (0x67, "V"), - (0x68, "V"), - (0x69, "V"), - (0x6A, "V"), - (0x6B, "V"), - (0x6C, "V"), - (0x6D, "V"), - (0x6E, "V"), - (0x6F, "V"), - (0x70, "V"), - (0x71, "V"), - (0x72, "V"), - (0x73, "V"), - (0x74, "V"), - (0x75, "V"), - (0x76, "V"), - (0x77, "V"), - (0x78, "V"), - (0x79, "V"), - (0x7A, "V"), - (0x7B, "V"), - (0x7C, "V"), - (0x7D, "V"), - (0x7E, "V"), - (0x7F, "V"), - (0x80, "X"), - (0x81, "X"), - (0x82, "X"), - (0x83, "X"), - (0x84, "X"), - (0x85, "X"), - (0x86, "X"), - (0x87, "X"), - (0x88, "X"), - (0x89, "X"), - (0x8A, "X"), - (0x8B, "X"), - (0x8C, "X"), - (0x8D, "X"), - (0x8E, "X"), - (0x8F, "X"), - (0x90, "X"), - (0x91, "X"), - (0x92, "X"), - (0x93, "X"), - (0x94, "X"), - (0x95, "X"), - (0x96, "X"), - (0x97, "X"), - (0x98, "X"), - (0x99, "X"), - (0x9A, "X"), - (0x9B, "X"), - (0x9C, "X"), - (0x9D, "X"), - (0x9E, "X"), - (0x9F, "X"), - (0xA0, "M", " "), - (0xA1, "V"), - (0xA2, "V"), - (0xA3, "V"), - (0xA4, "V"), - (0xA5, "V"), - (0xA6, "V"), - (0xA7, "V"), - (0xA8, "M", " ̈"), - (0xA9, "V"), - (0xAA, "M", "a"), - (0xAB, "V"), - (0xAC, "V"), - (0xAD, "I"), - (0xAE, "V"), - (0xAF, "M", " ̄"), - (0xB0, "V"), - (0xB1, "V"), - (0xB2, "M", "2"), - (0xB3, "M", "3"), - (0xB4, "M", " ́"), - (0xB5, "M", "μ"), - (0xB6, "V"), - (0xB7, "V"), - (0xB8, "M", " ̧"), - (0xB9, "M", "1"), - (0xBA, "M", "o"), - (0xBB, "V"), - (0xBC, "M", "1⁄4"), - (0xBD, "M", "1⁄2"), - (0xBE, "M", "3⁄4"), - (0xBF, "V"), - (0xC0, "M", "à"), - (0xC1, "M", "á"), - (0xC2, "M", "â"), - (0xC3, "M", "ã"), - (0xC4, "M", "ä"), - (0xC5, "M", "å"), - (0xC6, "M", "æ"), - (0xC7, "M", "ç"), - ] - - -def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC8, "M", "è"), - (0xC9, "M", "é"), - (0xCA, "M", "ê"), - (0xCB, "M", "ë"), - (0xCC, "M", "ì"), - (0xCD, "M", "í"), - (0xCE, "M", "î"), - (0xCF, "M", "ï"), - (0xD0, "M", "ð"), - (0xD1, "M", "ñ"), - (0xD2, "M", "ò"), - (0xD3, "M", "ó"), - (0xD4, "M", "ô"), - (0xD5, "M", "õ"), - (0xD6, "M", "ö"), - (0xD7, "V"), - (0xD8, "M", "ø"), - (0xD9, "M", "ù"), - (0xDA, "M", "ú"), - (0xDB, "M", "û"), - (0xDC, "M", "ü"), - (0xDD, "M", "ý"), - (0xDE, "M", "þ"), - (0xDF, "D", "ss"), - (0xE0, "V"), - (0xE1, "V"), - (0xE2, "V"), - (0xE3, "V"), - (0xE4, "V"), - (0xE5, "V"), - (0xE6, "V"), - (0xE7, "V"), - (0xE8, "V"), - (0xE9, "V"), - (0xEA, "V"), - (0xEB, "V"), - (0xEC, "V"), - (0xED, "V"), - (0xEE, "V"), - (0xEF, "V"), - (0xF0, "V"), - (0xF1, "V"), - (0xF2, "V"), - (0xF3, "V"), - (0xF4, "V"), - (0xF5, "V"), - (0xF6, "V"), - (0xF7, "V"), - (0xF8, "V"), - (0xF9, "V"), - (0xFA, "V"), - (0xFB, "V"), - (0xFC, "V"), - (0xFD, "V"), - (0xFE, "V"), - (0xFF, "V"), - (0x100, "M", "ā"), - (0x101, "V"), - (0x102, "M", "ă"), - (0x103, "V"), - (0x104, "M", "ą"), - (0x105, "V"), - (0x106, "M", "ć"), - (0x107, "V"), - (0x108, "M", "ĉ"), - (0x109, "V"), - (0x10A, "M", "ċ"), - (0x10B, "V"), - (0x10C, "M", "č"), - (0x10D, "V"), - (0x10E, "M", "ď"), - (0x10F, "V"), - (0x110, "M", "đ"), - (0x111, "V"), - (0x112, "M", "ē"), - (0x113, "V"), - (0x114, "M", "ĕ"), - (0x115, "V"), - (0x116, "M", "ė"), - (0x117, "V"), - (0x118, "M", "ę"), - (0x119, "V"), - (0x11A, "M", "ě"), - (0x11B, "V"), - (0x11C, "M", "ĝ"), - (0x11D, "V"), - (0x11E, "M", "ğ"), - (0x11F, "V"), - (0x120, "M", "ġ"), - (0x121, "V"), - (0x122, "M", "ģ"), - (0x123, "V"), - (0x124, "M", "ĥ"), - (0x125, "V"), - (0x126, "M", "ħ"), - (0x127, "V"), - (0x128, "M", "ĩ"), - (0x129, "V"), - (0x12A, "M", "ī"), - (0x12B, "V"), - ] - - -def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x12C, "M", "ĭ"), - (0x12D, "V"), - (0x12E, "M", "į"), - (0x12F, "V"), - (0x130, "M", "i̇"), - (0x131, "V"), - (0x132, "M", "ij"), - (0x134, "M", "ĵ"), - (0x135, "V"), - (0x136, "M", "ķ"), - (0x137, "V"), - (0x139, "M", "ĺ"), - (0x13A, "V"), - (0x13B, "M", "ļ"), - (0x13C, "V"), - (0x13D, "M", "ľ"), - (0x13E, "V"), - (0x13F, "M", "l·"), - (0x141, "M", "ł"), - (0x142, "V"), - (0x143, "M", "ń"), - (0x144, "V"), - (0x145, "M", "ņ"), - (0x146, "V"), - (0x147, "M", "ň"), - (0x148, "V"), - (0x149, "M", "ʼn"), - (0x14A, "M", "ŋ"), - (0x14B, "V"), - (0x14C, "M", "ō"), - (0x14D, "V"), - (0x14E, "M", "ŏ"), - (0x14F, "V"), - (0x150, "M", "ő"), - (0x151, "V"), - (0x152, "M", "œ"), - (0x153, "V"), - (0x154, "M", "ŕ"), - (0x155, "V"), - (0x156, "M", "ŗ"), - (0x157, "V"), - (0x158, "M", "ř"), - (0x159, "V"), - (0x15A, "M", "ś"), - (0x15B, "V"), - (0x15C, "M", "ŝ"), - (0x15D, "V"), - (0x15E, "M", "ş"), - (0x15F, "V"), - (0x160, "M", "š"), - (0x161, "V"), - (0x162, "M", "ţ"), - (0x163, "V"), - (0x164, "M", "ť"), - (0x165, "V"), - (0x166, "M", "ŧ"), - (0x167, "V"), - (0x168, "M", "ũ"), - (0x169, "V"), - (0x16A, "M", "ū"), - (0x16B, "V"), - (0x16C, "M", "ŭ"), - (0x16D, "V"), - (0x16E, "M", "ů"), - (0x16F, "V"), - (0x170, "M", "ű"), - (0x171, "V"), - (0x172, "M", "ų"), - (0x173, "V"), - (0x174, "M", "ŵ"), - (0x175, "V"), - (0x176, "M", "ŷ"), - (0x177, "V"), - (0x178, "M", "ÿ"), - (0x179, "M", "ź"), - (0x17A, "V"), - (0x17B, "M", "ż"), - (0x17C, "V"), - (0x17D, "M", "ž"), - (0x17E, "V"), - (0x17F, "M", "s"), - (0x180, "V"), - (0x181, "M", "ɓ"), - (0x182, "M", "ƃ"), - (0x183, "V"), - (0x184, "M", "ƅ"), - (0x185, "V"), - (0x186, "M", "ɔ"), - (0x187, "M", "ƈ"), - (0x188, "V"), - (0x189, "M", "ɖ"), - (0x18A, "M", "ɗ"), - (0x18B, "M", "ƌ"), - (0x18C, "V"), - (0x18E, "M", "ǝ"), - (0x18F, "M", "ə"), - (0x190, "M", "ɛ"), - (0x191, "M", "ƒ"), - (0x192, "V"), - (0x193, "M", "ɠ"), - ] - - -def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x194, "M", "ɣ"), - (0x195, "V"), - (0x196, "M", "ɩ"), - (0x197, "M", "ɨ"), - (0x198, "M", "ƙ"), - (0x199, "V"), - (0x19C, "M", "ɯ"), - (0x19D, "M", "ɲ"), - (0x19E, "V"), - (0x19F, "M", "ɵ"), - (0x1A0, "M", "ơ"), - (0x1A1, "V"), - (0x1A2, "M", "ƣ"), - (0x1A3, "V"), - (0x1A4, "M", "ƥ"), - (0x1A5, "V"), - (0x1A6, "M", "ʀ"), - (0x1A7, "M", "ƨ"), - (0x1A8, "V"), - (0x1A9, "M", "ʃ"), - (0x1AA, "V"), - (0x1AC, "M", "ƭ"), - (0x1AD, "V"), - (0x1AE, "M", "ʈ"), - (0x1AF, "M", "ư"), - (0x1B0, "V"), - (0x1B1, "M", "ʊ"), - (0x1B2, "M", "ʋ"), - (0x1B3, "M", "ƴ"), - (0x1B4, "V"), - (0x1B5, "M", "ƶ"), - (0x1B6, "V"), - (0x1B7, "M", "ʒ"), - (0x1B8, "M", "ƹ"), - (0x1B9, "V"), - (0x1BC, "M", "ƽ"), - (0x1BD, "V"), - (0x1C4, "M", "dž"), - (0x1C7, "M", "lj"), - (0x1CA, "M", "nj"), - (0x1CD, "M", "ǎ"), - (0x1CE, "V"), - (0x1CF, "M", "ǐ"), - (0x1D0, "V"), - (0x1D1, "M", "ǒ"), - (0x1D2, "V"), - (0x1D3, "M", "ǔ"), - (0x1D4, "V"), - (0x1D5, "M", "ǖ"), - (0x1D6, "V"), - (0x1D7, "M", "ǘ"), - (0x1D8, "V"), - (0x1D9, "M", "ǚ"), - (0x1DA, "V"), - (0x1DB, "M", "ǜ"), - (0x1DC, "V"), - (0x1DE, "M", "ǟ"), - (0x1DF, "V"), - (0x1E0, "M", "ǡ"), - (0x1E1, "V"), - (0x1E2, "M", "ǣ"), - (0x1E3, "V"), - (0x1E4, "M", "ǥ"), - (0x1E5, "V"), - (0x1E6, "M", "ǧ"), - (0x1E7, "V"), - (0x1E8, "M", "ǩ"), - (0x1E9, "V"), - (0x1EA, "M", "ǫ"), - (0x1EB, "V"), - (0x1EC, "M", "ǭ"), - (0x1ED, "V"), - (0x1EE, "M", "ǯ"), - (0x1EF, "V"), - (0x1F1, "M", "dz"), - (0x1F4, "M", "ǵ"), - (0x1F5, "V"), - (0x1F6, "M", "ƕ"), - (0x1F7, "M", "ƿ"), - (0x1F8, "M", "ǹ"), - (0x1F9, "V"), - (0x1FA, "M", "ǻ"), - (0x1FB, "V"), - (0x1FC, "M", "ǽ"), - (0x1FD, "V"), - (0x1FE, "M", "ǿ"), - (0x1FF, "V"), - (0x200, "M", "ȁ"), - (0x201, "V"), - (0x202, "M", "ȃ"), - (0x203, "V"), - (0x204, "M", "ȅ"), - (0x205, "V"), - (0x206, "M", "ȇ"), - (0x207, "V"), - (0x208, "M", "ȉ"), - (0x209, "V"), - (0x20A, "M", "ȋ"), - (0x20B, "V"), - (0x20C, "M", "ȍ"), - ] - - -def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x20D, "V"), - (0x20E, "M", "ȏ"), - (0x20F, "V"), - (0x210, "M", "ȑ"), - (0x211, "V"), - (0x212, "M", "ȓ"), - (0x213, "V"), - (0x214, "M", "ȕ"), - (0x215, "V"), - (0x216, "M", "ȗ"), - (0x217, "V"), - (0x218, "M", "ș"), - (0x219, "V"), - (0x21A, "M", "ț"), - (0x21B, "V"), - (0x21C, "M", "ȝ"), - (0x21D, "V"), - (0x21E, "M", "ȟ"), - (0x21F, "V"), - (0x220, "M", "ƞ"), - (0x221, "V"), - (0x222, "M", "ȣ"), - (0x223, "V"), - (0x224, "M", "ȥ"), - (0x225, "V"), - (0x226, "M", "ȧ"), - (0x227, "V"), - (0x228, "M", "ȩ"), - (0x229, "V"), - (0x22A, "M", "ȫ"), - (0x22B, "V"), - (0x22C, "M", "ȭ"), - (0x22D, "V"), - (0x22E, "M", "ȯ"), - (0x22F, "V"), - (0x230, "M", "ȱ"), - (0x231, "V"), - (0x232, "M", "ȳ"), - (0x233, "V"), - (0x23A, "M", "ⱥ"), - (0x23B, "M", "ȼ"), - (0x23C, "V"), - (0x23D, "M", "ƚ"), - (0x23E, "M", "ⱦ"), - (0x23F, "V"), - (0x241, "M", "ɂ"), - (0x242, "V"), - (0x243, "M", "ƀ"), - (0x244, "M", "ʉ"), - (0x245, "M", "ʌ"), - (0x246, "M", "ɇ"), - (0x247, "V"), - (0x248, "M", "ɉ"), - (0x249, "V"), - (0x24A, "M", "ɋ"), - (0x24B, "V"), - (0x24C, "M", "ɍ"), - (0x24D, "V"), - (0x24E, "M", "ɏ"), - (0x24F, "V"), - (0x2B0, "M", "h"), - (0x2B1, "M", "ɦ"), - (0x2B2, "M", "j"), - (0x2B3, "M", "r"), - (0x2B4, "M", "ɹ"), - (0x2B5, "M", "ɻ"), - (0x2B6, "M", "ʁ"), - (0x2B7, "M", "w"), - (0x2B8, "M", "y"), - (0x2B9, "V"), - (0x2D8, "M", " ̆"), - (0x2D9, "M", " ̇"), - (0x2DA, "M", " ̊"), - (0x2DB, "M", " ̨"), - (0x2DC, "M", " ̃"), - (0x2DD, "M", " ̋"), - (0x2DE, "V"), - (0x2E0, "M", "ɣ"), - (0x2E1, "M", "l"), - (0x2E2, "M", "s"), - (0x2E3, "M", "x"), - (0x2E4, "M", "ʕ"), - (0x2E5, "V"), - (0x340, "M", "̀"), - (0x341, "M", "́"), - (0x342, "V"), - (0x343, "M", "̓"), - (0x344, "M", "̈́"), - (0x345, "M", "ι"), - (0x346, "V"), - (0x34F, "I"), - (0x350, "V"), - (0x370, "M", "ͱ"), - (0x371, "V"), - (0x372, "M", "ͳ"), - (0x373, "V"), - (0x374, "M", "ʹ"), - (0x375, "V"), - (0x376, "M", "ͷ"), - (0x377, "V"), - ] - - -def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x378, "X"), - (0x37A, "M", " ι"), - (0x37B, "V"), - (0x37E, "M", ";"), - (0x37F, "M", "ϳ"), - (0x380, "X"), - (0x384, "M", " ́"), - (0x385, "M", " ̈́"), - (0x386, "M", "ά"), - (0x387, "M", "·"), - (0x388, "M", "έ"), - (0x389, "M", "ή"), - (0x38A, "M", "ί"), - (0x38B, "X"), - (0x38C, "M", "ό"), - (0x38D, "X"), - (0x38E, "M", "ύ"), - (0x38F, "M", "ώ"), - (0x390, "V"), - (0x391, "M", "α"), - (0x392, "M", "β"), - (0x393, "M", "γ"), - (0x394, "M", "δ"), - (0x395, "M", "ε"), - (0x396, "M", "ζ"), - (0x397, "M", "η"), - (0x398, "M", "θ"), - (0x399, "M", "ι"), - (0x39A, "M", "κ"), - (0x39B, "M", "λ"), - (0x39C, "M", "μ"), - (0x39D, "M", "ν"), - (0x39E, "M", "ξ"), - (0x39F, "M", "ο"), - (0x3A0, "M", "π"), - (0x3A1, "M", "ρ"), - (0x3A2, "X"), - (0x3A3, "M", "σ"), - (0x3A4, "M", "τ"), - (0x3A5, "M", "υ"), - (0x3A6, "M", "φ"), - (0x3A7, "M", "χ"), - (0x3A8, "M", "ψ"), - (0x3A9, "M", "ω"), - (0x3AA, "M", "ϊ"), - (0x3AB, "M", "ϋ"), - (0x3AC, "V"), - (0x3C2, "D", "σ"), - (0x3C3, "V"), - (0x3CF, "M", "ϗ"), - (0x3D0, "M", "β"), - (0x3D1, "M", "θ"), - (0x3D2, "M", "υ"), - (0x3D3, "M", "ύ"), - (0x3D4, "M", "ϋ"), - (0x3D5, "M", "φ"), - (0x3D6, "M", "π"), - (0x3D7, "V"), - (0x3D8, "M", "ϙ"), - (0x3D9, "V"), - (0x3DA, "M", "ϛ"), - (0x3DB, "V"), - (0x3DC, "M", "ϝ"), - (0x3DD, "V"), - (0x3DE, "M", "ϟ"), - (0x3DF, "V"), - (0x3E0, "M", "ϡ"), - (0x3E1, "V"), - (0x3E2, "M", "ϣ"), - (0x3E3, "V"), - (0x3E4, "M", "ϥ"), - (0x3E5, "V"), - (0x3E6, "M", "ϧ"), - (0x3E7, "V"), - (0x3E8, "M", "ϩ"), - (0x3E9, "V"), - (0x3EA, "M", "ϫ"), - (0x3EB, "V"), - (0x3EC, "M", "ϭ"), - (0x3ED, "V"), - (0x3EE, "M", "ϯ"), - (0x3EF, "V"), - (0x3F0, "M", "κ"), - (0x3F1, "M", "ρ"), - (0x3F2, "M", "σ"), - (0x3F3, "V"), - (0x3F4, "M", "θ"), - (0x3F5, "M", "ε"), - (0x3F6, "V"), - (0x3F7, "M", "ϸ"), - (0x3F8, "V"), - (0x3F9, "M", "σ"), - (0x3FA, "M", "ϻ"), - (0x3FB, "V"), - (0x3FD, "M", "ͻ"), - (0x3FE, "M", "ͼ"), - (0x3FF, "M", "ͽ"), - (0x400, "M", "ѐ"), - (0x401, "M", "ё"), - (0x402, "M", "ђ"), - ] - - -def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x403, "M", "ѓ"), - (0x404, "M", "є"), - (0x405, "M", "ѕ"), - (0x406, "M", "і"), - (0x407, "M", "ї"), - (0x408, "M", "ј"), - (0x409, "M", "љ"), - (0x40A, "M", "њ"), - (0x40B, "M", "ћ"), - (0x40C, "M", "ќ"), - (0x40D, "M", "ѝ"), - (0x40E, "M", "ў"), - (0x40F, "M", "џ"), - (0x410, "M", "а"), - (0x411, "M", "б"), - (0x412, "M", "в"), - (0x413, "M", "г"), - (0x414, "M", "д"), - (0x415, "M", "е"), - (0x416, "M", "ж"), - (0x417, "M", "з"), - (0x418, "M", "и"), - (0x419, "M", "й"), - (0x41A, "M", "к"), - (0x41B, "M", "л"), - (0x41C, "M", "м"), - (0x41D, "M", "н"), - (0x41E, "M", "о"), - (0x41F, "M", "п"), - (0x420, "M", "р"), - (0x421, "M", "с"), - (0x422, "M", "т"), - (0x423, "M", "у"), - (0x424, "M", "ф"), - (0x425, "M", "х"), - (0x426, "M", "ц"), - (0x427, "M", "ч"), - (0x428, "M", "ш"), - (0x429, "M", "щ"), - (0x42A, "M", "ъ"), - (0x42B, "M", "ы"), - (0x42C, "M", "ь"), - (0x42D, "M", "э"), - (0x42E, "M", "ю"), - (0x42F, "M", "я"), - (0x430, "V"), - (0x460, "M", "ѡ"), - (0x461, "V"), - (0x462, "M", "ѣ"), - (0x463, "V"), - (0x464, "M", "ѥ"), - (0x465, "V"), - (0x466, "M", "ѧ"), - (0x467, "V"), - (0x468, "M", "ѩ"), - (0x469, "V"), - (0x46A, "M", "ѫ"), - (0x46B, "V"), - (0x46C, "M", "ѭ"), - (0x46D, "V"), - (0x46E, "M", "ѯ"), - (0x46F, "V"), - (0x470, "M", "ѱ"), - (0x471, "V"), - (0x472, "M", "ѳ"), - (0x473, "V"), - (0x474, "M", "ѵ"), - (0x475, "V"), - (0x476, "M", "ѷ"), - (0x477, "V"), - (0x478, "M", "ѹ"), - (0x479, "V"), - (0x47A, "M", "ѻ"), - (0x47B, "V"), - (0x47C, "M", "ѽ"), - (0x47D, "V"), - (0x47E, "M", "ѿ"), - (0x47F, "V"), - (0x480, "M", "ҁ"), - (0x481, "V"), - (0x48A, "M", "ҋ"), - (0x48B, "V"), - (0x48C, "M", "ҍ"), - (0x48D, "V"), - (0x48E, "M", "ҏ"), - (0x48F, "V"), - (0x490, "M", "ґ"), - (0x491, "V"), - (0x492, "M", "ғ"), - (0x493, "V"), - (0x494, "M", "ҕ"), - (0x495, "V"), - (0x496, "M", "җ"), - (0x497, "V"), - (0x498, "M", "ҙ"), - (0x499, "V"), - (0x49A, "M", "қ"), - (0x49B, "V"), - (0x49C, "M", "ҝ"), - (0x49D, "V"), - ] - - -def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x49E, "M", "ҟ"), - (0x49F, "V"), - (0x4A0, "M", "ҡ"), - (0x4A1, "V"), - (0x4A2, "M", "ң"), - (0x4A3, "V"), - (0x4A4, "M", "ҥ"), - (0x4A5, "V"), - (0x4A6, "M", "ҧ"), - (0x4A7, "V"), - (0x4A8, "M", "ҩ"), - (0x4A9, "V"), - (0x4AA, "M", "ҫ"), - (0x4AB, "V"), - (0x4AC, "M", "ҭ"), - (0x4AD, "V"), - (0x4AE, "M", "ү"), - (0x4AF, "V"), - (0x4B0, "M", "ұ"), - (0x4B1, "V"), - (0x4B2, "M", "ҳ"), - (0x4B3, "V"), - (0x4B4, "M", "ҵ"), - (0x4B5, "V"), - (0x4B6, "M", "ҷ"), - (0x4B7, "V"), - (0x4B8, "M", "ҹ"), - (0x4B9, "V"), - (0x4BA, "M", "һ"), - (0x4BB, "V"), - (0x4BC, "M", "ҽ"), - (0x4BD, "V"), - (0x4BE, "M", "ҿ"), - (0x4BF, "V"), - (0x4C0, "M", "ӏ"), - (0x4C1, "M", "ӂ"), - (0x4C2, "V"), - (0x4C3, "M", "ӄ"), - (0x4C4, "V"), - (0x4C5, "M", "ӆ"), - (0x4C6, "V"), - (0x4C7, "M", "ӈ"), - (0x4C8, "V"), - (0x4C9, "M", "ӊ"), - (0x4CA, "V"), - (0x4CB, "M", "ӌ"), - (0x4CC, "V"), - (0x4CD, "M", "ӎ"), - (0x4CE, "V"), - (0x4D0, "M", "ӑ"), - (0x4D1, "V"), - (0x4D2, "M", "ӓ"), - (0x4D3, "V"), - (0x4D4, "M", "ӕ"), - (0x4D5, "V"), - (0x4D6, "M", "ӗ"), - (0x4D7, "V"), - (0x4D8, "M", "ә"), - (0x4D9, "V"), - (0x4DA, "M", "ӛ"), - (0x4DB, "V"), - (0x4DC, "M", "ӝ"), - (0x4DD, "V"), - (0x4DE, "M", "ӟ"), - (0x4DF, "V"), - (0x4E0, "M", "ӡ"), - (0x4E1, "V"), - (0x4E2, "M", "ӣ"), - (0x4E3, "V"), - (0x4E4, "M", "ӥ"), - (0x4E5, "V"), - (0x4E6, "M", "ӧ"), - (0x4E7, "V"), - (0x4E8, "M", "ө"), - (0x4E9, "V"), - (0x4EA, "M", "ӫ"), - (0x4EB, "V"), - (0x4EC, "M", "ӭ"), - (0x4ED, "V"), - (0x4EE, "M", "ӯ"), - (0x4EF, "V"), - (0x4F0, "M", "ӱ"), - (0x4F1, "V"), - (0x4F2, "M", "ӳ"), - (0x4F3, "V"), - (0x4F4, "M", "ӵ"), - (0x4F5, "V"), - (0x4F6, "M", "ӷ"), - (0x4F7, "V"), - (0x4F8, "M", "ӹ"), - (0x4F9, "V"), - (0x4FA, "M", "ӻ"), - (0x4FB, "V"), - (0x4FC, "M", "ӽ"), - (0x4FD, "V"), - (0x4FE, "M", "ӿ"), - (0x4FF, "V"), - (0x500, "M", "ԁ"), - (0x501, "V"), - (0x502, "M", "ԃ"), - ] - - -def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x503, "V"), - (0x504, "M", "ԅ"), - (0x505, "V"), - (0x506, "M", "ԇ"), - (0x507, "V"), - (0x508, "M", "ԉ"), - (0x509, "V"), - (0x50A, "M", "ԋ"), - (0x50B, "V"), - (0x50C, "M", "ԍ"), - (0x50D, "V"), - (0x50E, "M", "ԏ"), - (0x50F, "V"), - (0x510, "M", "ԑ"), - (0x511, "V"), - (0x512, "M", "ԓ"), - (0x513, "V"), - (0x514, "M", "ԕ"), - (0x515, "V"), - (0x516, "M", "ԗ"), - (0x517, "V"), - (0x518, "M", "ԙ"), - (0x519, "V"), - (0x51A, "M", "ԛ"), - (0x51B, "V"), - (0x51C, "M", "ԝ"), - (0x51D, "V"), - (0x51E, "M", "ԟ"), - (0x51F, "V"), - (0x520, "M", "ԡ"), - (0x521, "V"), - (0x522, "M", "ԣ"), - (0x523, "V"), - (0x524, "M", "ԥ"), - (0x525, "V"), - (0x526, "M", "ԧ"), - (0x527, "V"), - (0x528, "M", "ԩ"), - (0x529, "V"), - (0x52A, "M", "ԫ"), - (0x52B, "V"), - (0x52C, "M", "ԭ"), - (0x52D, "V"), - (0x52E, "M", "ԯ"), - (0x52F, "V"), - (0x530, "X"), - (0x531, "M", "ա"), - (0x532, "M", "բ"), - (0x533, "M", "գ"), - (0x534, "M", "դ"), - (0x535, "M", "ե"), - (0x536, "M", "զ"), - (0x537, "M", "է"), - (0x538, "M", "ը"), - (0x539, "M", "թ"), - (0x53A, "M", "ժ"), - (0x53B, "M", "ի"), - (0x53C, "M", "լ"), - (0x53D, "M", "խ"), - (0x53E, "M", "ծ"), - (0x53F, "M", "կ"), - (0x540, "M", "հ"), - (0x541, "M", "ձ"), - (0x542, "M", "ղ"), - (0x543, "M", "ճ"), - (0x544, "M", "մ"), - (0x545, "M", "յ"), - (0x546, "M", "ն"), - (0x547, "M", "շ"), - (0x548, "M", "ո"), - (0x549, "M", "չ"), - (0x54A, "M", "պ"), - (0x54B, "M", "ջ"), - (0x54C, "M", "ռ"), - (0x54D, "M", "ս"), - (0x54E, "M", "վ"), - (0x54F, "M", "տ"), - (0x550, "M", "ր"), - (0x551, "M", "ց"), - (0x552, "M", "ւ"), - (0x553, "M", "փ"), - (0x554, "M", "ք"), - (0x555, "M", "օ"), - (0x556, "M", "ֆ"), - (0x557, "X"), - (0x559, "V"), - (0x587, "M", "եւ"), - (0x588, "V"), - (0x58B, "X"), - (0x58D, "V"), - (0x590, "X"), - (0x591, "V"), - (0x5C8, "X"), - (0x5D0, "V"), - (0x5EB, "X"), - (0x5EF, "V"), - (0x5F5, "X"), - (0x606, "V"), - (0x61C, "X"), - (0x61D, "V"), - ] - - -def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x675, "M", "اٴ"), - (0x676, "M", "وٴ"), - (0x677, "M", "ۇٴ"), - (0x678, "M", "يٴ"), - (0x679, "V"), - (0x6DD, "X"), - (0x6DE, "V"), - (0x70E, "X"), - (0x710, "V"), - (0x74B, "X"), - (0x74D, "V"), - (0x7B2, "X"), - (0x7C0, "V"), - (0x7FB, "X"), - (0x7FD, "V"), - (0x82E, "X"), - (0x830, "V"), - (0x83F, "X"), - (0x840, "V"), - (0x85C, "X"), - (0x85E, "V"), - (0x85F, "X"), - (0x860, "V"), - (0x86B, "X"), - (0x870, "V"), - (0x88F, "X"), - (0x897, "V"), - (0x8E2, "X"), - (0x8E3, "V"), - (0x958, "M", "क़"), - (0x959, "M", "ख़"), - (0x95A, "M", "ग़"), - (0x95B, "M", "ज़"), - (0x95C, "M", "ड़"), - (0x95D, "M", "ढ़"), - (0x95E, "M", "फ़"), - (0x95F, "M", "य़"), - (0x960, "V"), - (0x984, "X"), - (0x985, "V"), - (0x98D, "X"), - (0x98F, "V"), - (0x991, "X"), - (0x993, "V"), - (0x9A9, "X"), - (0x9AA, "V"), - (0x9B1, "X"), - (0x9B2, "V"), - (0x9B3, "X"), - (0x9B6, "V"), - (0x9BA, "X"), - (0x9BC, "V"), - (0x9C5, "X"), - (0x9C7, "V"), - (0x9C9, "X"), - (0x9CB, "V"), - (0x9CF, "X"), - (0x9D7, "V"), - (0x9D8, "X"), - (0x9DC, "M", "ড়"), - (0x9DD, "M", "ঢ়"), - (0x9DE, "X"), - (0x9DF, "M", "য়"), - (0x9E0, "V"), - (0x9E4, "X"), - (0x9E6, "V"), - (0x9FF, "X"), - (0xA01, "V"), - (0xA04, "X"), - (0xA05, "V"), - (0xA0B, "X"), - (0xA0F, "V"), - (0xA11, "X"), - (0xA13, "V"), - (0xA29, "X"), - (0xA2A, "V"), - (0xA31, "X"), - (0xA32, "V"), - (0xA33, "M", "ਲ਼"), - (0xA34, "X"), - (0xA35, "V"), - (0xA36, "M", "ਸ਼"), - (0xA37, "X"), - (0xA38, "V"), - (0xA3A, "X"), - (0xA3C, "V"), - (0xA3D, "X"), - (0xA3E, "V"), - (0xA43, "X"), - (0xA47, "V"), - (0xA49, "X"), - (0xA4B, "V"), - (0xA4E, "X"), - (0xA51, "V"), - (0xA52, "X"), - (0xA59, "M", "ਖ਼"), - (0xA5A, "M", "ਗ਼"), - (0xA5B, "M", "ਜ਼"), - (0xA5C, "V"), - (0xA5D, "X"), - ] - - -def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA5E, "M", "ਫ਼"), - (0xA5F, "X"), - (0xA66, "V"), - (0xA77, "X"), - (0xA81, "V"), - (0xA84, "X"), - (0xA85, "V"), - (0xA8E, "X"), - (0xA8F, "V"), - (0xA92, "X"), - (0xA93, "V"), - (0xAA9, "X"), - (0xAAA, "V"), - (0xAB1, "X"), - (0xAB2, "V"), - (0xAB4, "X"), - (0xAB5, "V"), - (0xABA, "X"), - (0xABC, "V"), - (0xAC6, "X"), - (0xAC7, "V"), - (0xACA, "X"), - (0xACB, "V"), - (0xACE, "X"), - (0xAD0, "V"), - (0xAD1, "X"), - (0xAE0, "V"), - (0xAE4, "X"), - (0xAE6, "V"), - (0xAF2, "X"), - (0xAF9, "V"), - (0xB00, "X"), - (0xB01, "V"), - (0xB04, "X"), - (0xB05, "V"), - (0xB0D, "X"), - (0xB0F, "V"), - (0xB11, "X"), - (0xB13, "V"), - (0xB29, "X"), - (0xB2A, "V"), - (0xB31, "X"), - (0xB32, "V"), - (0xB34, "X"), - (0xB35, "V"), - (0xB3A, "X"), - (0xB3C, "V"), - (0xB45, "X"), - (0xB47, "V"), - (0xB49, "X"), - (0xB4B, "V"), - (0xB4E, "X"), - (0xB55, "V"), - (0xB58, "X"), - (0xB5C, "M", "ଡ଼"), - (0xB5D, "M", "ଢ଼"), - (0xB5E, "X"), - (0xB5F, "V"), - (0xB64, "X"), - (0xB66, "V"), - (0xB78, "X"), - (0xB82, "V"), - (0xB84, "X"), - (0xB85, "V"), - (0xB8B, "X"), - (0xB8E, "V"), - (0xB91, "X"), - (0xB92, "V"), - (0xB96, "X"), - (0xB99, "V"), - (0xB9B, "X"), - (0xB9C, "V"), - (0xB9D, "X"), - (0xB9E, "V"), - (0xBA0, "X"), - (0xBA3, "V"), - (0xBA5, "X"), - (0xBA8, "V"), - (0xBAB, "X"), - (0xBAE, "V"), - (0xBBA, "X"), - (0xBBE, "V"), - (0xBC3, "X"), - (0xBC6, "V"), - (0xBC9, "X"), - (0xBCA, "V"), - (0xBCE, "X"), - (0xBD0, "V"), - (0xBD1, "X"), - (0xBD7, "V"), - (0xBD8, "X"), - (0xBE6, "V"), - (0xBFB, "X"), - (0xC00, "V"), - (0xC0D, "X"), - (0xC0E, "V"), - (0xC11, "X"), - (0xC12, "V"), - (0xC29, "X"), - (0xC2A, "V"), - ] - - -def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC3A, "X"), - (0xC3C, "V"), - (0xC45, "X"), - (0xC46, "V"), - (0xC49, "X"), - (0xC4A, "V"), - (0xC4E, "X"), - (0xC55, "V"), - (0xC57, "X"), - (0xC58, "V"), - (0xC5B, "X"), - (0xC5D, "V"), - (0xC5E, "X"), - (0xC60, "V"), - (0xC64, "X"), - (0xC66, "V"), - (0xC70, "X"), - (0xC77, "V"), - (0xC8D, "X"), - (0xC8E, "V"), - (0xC91, "X"), - (0xC92, "V"), - (0xCA9, "X"), - (0xCAA, "V"), - (0xCB4, "X"), - (0xCB5, "V"), - (0xCBA, "X"), - (0xCBC, "V"), - (0xCC5, "X"), - (0xCC6, "V"), - (0xCC9, "X"), - (0xCCA, "V"), - (0xCCE, "X"), - (0xCD5, "V"), - (0xCD7, "X"), - (0xCDD, "V"), - (0xCDF, "X"), - (0xCE0, "V"), - (0xCE4, "X"), - (0xCE6, "V"), - (0xCF0, "X"), - (0xCF1, "V"), - (0xCF4, "X"), - (0xD00, "V"), - (0xD0D, "X"), - (0xD0E, "V"), - (0xD11, "X"), - (0xD12, "V"), - (0xD45, "X"), - (0xD46, "V"), - (0xD49, "X"), - (0xD4A, "V"), - (0xD50, "X"), - (0xD54, "V"), - (0xD64, "X"), - (0xD66, "V"), - (0xD80, "X"), - (0xD81, "V"), - (0xD84, "X"), - (0xD85, "V"), - (0xD97, "X"), - (0xD9A, "V"), - (0xDB2, "X"), - (0xDB3, "V"), - (0xDBC, "X"), - (0xDBD, "V"), - (0xDBE, "X"), - (0xDC0, "V"), - (0xDC7, "X"), - (0xDCA, "V"), - (0xDCB, "X"), - (0xDCF, "V"), - (0xDD5, "X"), - (0xDD6, "V"), - (0xDD7, "X"), - (0xDD8, "V"), - (0xDE0, "X"), - (0xDE6, "V"), - (0xDF0, "X"), - (0xDF2, "V"), - (0xDF5, "X"), - (0xE01, "V"), - (0xE33, "M", "ํา"), - (0xE34, "V"), - (0xE3B, "X"), - (0xE3F, "V"), - (0xE5C, "X"), - (0xE81, "V"), - (0xE83, "X"), - (0xE84, "V"), - (0xE85, "X"), - (0xE86, "V"), - (0xE8B, "X"), - (0xE8C, "V"), - (0xEA4, "X"), - (0xEA5, "V"), - (0xEA6, "X"), - (0xEA7, "V"), - (0xEB3, "M", "ໍາ"), - (0xEB4, "V"), - ] - - -def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xEBE, "X"), - (0xEC0, "V"), - (0xEC5, "X"), - (0xEC6, "V"), - (0xEC7, "X"), - (0xEC8, "V"), - (0xECF, "X"), - (0xED0, "V"), - (0xEDA, "X"), - (0xEDC, "M", "ຫນ"), - (0xEDD, "M", "ຫມ"), - (0xEDE, "V"), - (0xEE0, "X"), - (0xF00, "V"), - (0xF0C, "M", "་"), - (0xF0D, "V"), - (0xF43, "M", "གྷ"), - (0xF44, "V"), - (0xF48, "X"), - (0xF49, "V"), - (0xF4D, "M", "ཌྷ"), - (0xF4E, "V"), - (0xF52, "M", "དྷ"), - (0xF53, "V"), - (0xF57, "M", "བྷ"), - (0xF58, "V"), - (0xF5C, "M", "ཛྷ"), - (0xF5D, "V"), - (0xF69, "M", "ཀྵ"), - (0xF6A, "V"), - (0xF6D, "X"), - (0xF71, "V"), - (0xF73, "M", "ཱི"), - (0xF74, "V"), - (0xF75, "M", "ཱུ"), - (0xF76, "M", "ྲྀ"), - (0xF77, "M", "ྲཱྀ"), - (0xF78, "M", "ླྀ"), - (0xF79, "M", "ླཱྀ"), - (0xF7A, "V"), - (0xF81, "M", "ཱྀ"), - (0xF82, "V"), - (0xF93, "M", "ྒྷ"), - (0xF94, "V"), - (0xF98, "X"), - (0xF99, "V"), - (0xF9D, "M", "ྜྷ"), - (0xF9E, "V"), - (0xFA2, "M", "ྡྷ"), - (0xFA3, "V"), - (0xFA7, "M", "ྦྷ"), - (0xFA8, "V"), - (0xFAC, "M", "ྫྷ"), - (0xFAD, "V"), - (0xFB9, "M", "ྐྵ"), - (0xFBA, "V"), - (0xFBD, "X"), - (0xFBE, "V"), - (0xFCD, "X"), - (0xFCE, "V"), - (0xFDB, "X"), - (0x1000, "V"), - (0x10A0, "M", "ⴀ"), - (0x10A1, "M", "ⴁ"), - (0x10A2, "M", "ⴂ"), - (0x10A3, "M", "ⴃ"), - (0x10A4, "M", "ⴄ"), - (0x10A5, "M", "ⴅ"), - (0x10A6, "M", "ⴆ"), - (0x10A7, "M", "ⴇ"), - (0x10A8, "M", "ⴈ"), - (0x10A9, "M", "ⴉ"), - (0x10AA, "M", "ⴊ"), - (0x10AB, "M", "ⴋ"), - (0x10AC, "M", "ⴌ"), - (0x10AD, "M", "ⴍ"), - (0x10AE, "M", "ⴎ"), - (0x10AF, "M", "ⴏ"), - (0x10B0, "M", "ⴐ"), - (0x10B1, "M", "ⴑ"), - (0x10B2, "M", "ⴒ"), - (0x10B3, "M", "ⴓ"), - (0x10B4, "M", "ⴔ"), - (0x10B5, "M", "ⴕ"), - (0x10B6, "M", "ⴖ"), - (0x10B7, "M", "ⴗ"), - (0x10B8, "M", "ⴘ"), - (0x10B9, "M", "ⴙ"), - (0x10BA, "M", "ⴚ"), - (0x10BB, "M", "ⴛ"), - (0x10BC, "M", "ⴜ"), - (0x10BD, "M", "ⴝ"), - (0x10BE, "M", "ⴞ"), - (0x10BF, "M", "ⴟ"), - (0x10C0, "M", "ⴠ"), - (0x10C1, "M", "ⴡ"), - (0x10C2, "M", "ⴢ"), - (0x10C3, "M", "ⴣ"), - (0x10C4, "M", "ⴤ"), - (0x10C5, "M", "ⴥ"), - ] - - -def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10C6, "X"), - (0x10C7, "M", "ⴧ"), - (0x10C8, "X"), - (0x10CD, "M", "ⴭ"), - (0x10CE, "X"), - (0x10D0, "V"), - (0x10FC, "M", "ნ"), - (0x10FD, "V"), - (0x115F, "I"), - (0x1161, "V"), - (0x1249, "X"), - (0x124A, "V"), - (0x124E, "X"), - (0x1250, "V"), - (0x1257, "X"), - (0x1258, "V"), - (0x1259, "X"), - (0x125A, "V"), - (0x125E, "X"), - (0x1260, "V"), - (0x1289, "X"), - (0x128A, "V"), - (0x128E, "X"), - (0x1290, "V"), - (0x12B1, "X"), - (0x12B2, "V"), - (0x12B6, "X"), - (0x12B8, "V"), - (0x12BF, "X"), - (0x12C0, "V"), - (0x12C1, "X"), - (0x12C2, "V"), - (0x12C6, "X"), - (0x12C8, "V"), - (0x12D7, "X"), - (0x12D8, "V"), - (0x1311, "X"), - (0x1312, "V"), - (0x1316, "X"), - (0x1318, "V"), - (0x135B, "X"), - (0x135D, "V"), - (0x137D, "X"), - (0x1380, "V"), - (0x139A, "X"), - (0x13A0, "V"), - (0x13F6, "X"), - (0x13F8, "M", "Ᏸ"), - (0x13F9, "M", "Ᏹ"), - (0x13FA, "M", "Ᏺ"), - (0x13FB, "M", "Ᏻ"), - (0x13FC, "M", "Ᏼ"), - (0x13FD, "M", "Ᏽ"), - (0x13FE, "X"), - (0x1400, "V"), - (0x1680, "X"), - (0x1681, "V"), - (0x169D, "X"), - (0x16A0, "V"), - (0x16F9, "X"), - (0x1700, "V"), - (0x1716, "X"), - (0x171F, "V"), - (0x1737, "X"), - (0x1740, "V"), - (0x1754, "X"), - (0x1760, "V"), - (0x176D, "X"), - (0x176E, "V"), - (0x1771, "X"), - (0x1772, "V"), - (0x1774, "X"), - (0x1780, "V"), - (0x17B4, "I"), - (0x17B6, "V"), - (0x17DE, "X"), - (0x17E0, "V"), - (0x17EA, "X"), - (0x17F0, "V"), - (0x17FA, "X"), - (0x1800, "V"), - (0x180B, "I"), - (0x1810, "V"), - (0x181A, "X"), - (0x1820, "V"), - (0x1879, "X"), - (0x1880, "V"), - (0x18AB, "X"), - (0x18B0, "V"), - (0x18F6, "X"), - (0x1900, "V"), - (0x191F, "X"), - (0x1920, "V"), - (0x192C, "X"), - (0x1930, "V"), - (0x193C, "X"), - (0x1940, "V"), - (0x1941, "X"), - (0x1944, "V"), - (0x196E, "X"), - ] - - -def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1970, "V"), - (0x1975, "X"), - (0x1980, "V"), - (0x19AC, "X"), - (0x19B0, "V"), - (0x19CA, "X"), - (0x19D0, "V"), - (0x19DB, "X"), - (0x19DE, "V"), - (0x1A1C, "X"), - (0x1A1E, "V"), - (0x1A5F, "X"), - (0x1A60, "V"), - (0x1A7D, "X"), - (0x1A7F, "V"), - (0x1A8A, "X"), - (0x1A90, "V"), - (0x1A9A, "X"), - (0x1AA0, "V"), - (0x1AAE, "X"), - (0x1AB0, "V"), - (0x1ACF, "X"), - (0x1B00, "V"), - (0x1B4D, "X"), - (0x1B4E, "V"), - (0x1BF4, "X"), - (0x1BFC, "V"), - (0x1C38, "X"), - (0x1C3B, "V"), - (0x1C4A, "X"), - (0x1C4D, "V"), - (0x1C80, "M", "в"), - (0x1C81, "M", "д"), - (0x1C82, "M", "о"), - (0x1C83, "M", "с"), - (0x1C84, "M", "т"), - (0x1C86, "M", "ъ"), - (0x1C87, "M", "ѣ"), - (0x1C88, "M", "ꙋ"), - (0x1C89, "M", "ᲊ"), - (0x1C8A, "V"), - (0x1C8B, "X"), - (0x1C90, "M", "ა"), - (0x1C91, "M", "ბ"), - (0x1C92, "M", "გ"), - (0x1C93, "M", "დ"), - (0x1C94, "M", "ე"), - (0x1C95, "M", "ვ"), - (0x1C96, "M", "ზ"), - (0x1C97, "M", "თ"), - (0x1C98, "M", "ი"), - (0x1C99, "M", "კ"), - (0x1C9A, "M", "ლ"), - (0x1C9B, "M", "მ"), - (0x1C9C, "M", "ნ"), - (0x1C9D, "M", "ო"), - (0x1C9E, "M", "პ"), - (0x1C9F, "M", "ჟ"), - (0x1CA0, "M", "რ"), - (0x1CA1, "M", "ს"), - (0x1CA2, "M", "ტ"), - (0x1CA3, "M", "უ"), - (0x1CA4, "M", "ფ"), - (0x1CA5, "M", "ქ"), - (0x1CA6, "M", "ღ"), - (0x1CA7, "M", "ყ"), - (0x1CA8, "M", "შ"), - (0x1CA9, "M", "ჩ"), - (0x1CAA, "M", "ც"), - (0x1CAB, "M", "ძ"), - (0x1CAC, "M", "წ"), - (0x1CAD, "M", "ჭ"), - (0x1CAE, "M", "ხ"), - (0x1CAF, "M", "ჯ"), - (0x1CB0, "M", "ჰ"), - (0x1CB1, "M", "ჱ"), - (0x1CB2, "M", "ჲ"), - (0x1CB3, "M", "ჳ"), - (0x1CB4, "M", "ჴ"), - (0x1CB5, "M", "ჵ"), - (0x1CB6, "M", "ჶ"), - (0x1CB7, "M", "ჷ"), - (0x1CB8, "M", "ჸ"), - (0x1CB9, "M", "ჹ"), - (0x1CBA, "M", "ჺ"), - (0x1CBB, "X"), - (0x1CBD, "M", "ჽ"), - (0x1CBE, "M", "ჾ"), - (0x1CBF, "M", "ჿ"), - (0x1CC0, "V"), - (0x1CC8, "X"), - (0x1CD0, "V"), - (0x1CFB, "X"), - (0x1D00, "V"), - (0x1D2C, "M", "a"), - (0x1D2D, "M", "æ"), - (0x1D2E, "M", "b"), - (0x1D2F, "V"), - (0x1D30, "M", "d"), - (0x1D31, "M", "e"), - ] - - -def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D32, "M", "ǝ"), - (0x1D33, "M", "g"), - (0x1D34, "M", "h"), - (0x1D35, "M", "i"), - (0x1D36, "M", "j"), - (0x1D37, "M", "k"), - (0x1D38, "M", "l"), - (0x1D39, "M", "m"), - (0x1D3A, "M", "n"), - (0x1D3B, "V"), - (0x1D3C, "M", "o"), - (0x1D3D, "M", "ȣ"), - (0x1D3E, "M", "p"), - (0x1D3F, "M", "r"), - (0x1D40, "M", "t"), - (0x1D41, "M", "u"), - (0x1D42, "M", "w"), - (0x1D43, "M", "a"), - (0x1D44, "M", "ɐ"), - (0x1D45, "M", "ɑ"), - (0x1D46, "M", "ᴂ"), - (0x1D47, "M", "b"), - (0x1D48, "M", "d"), - (0x1D49, "M", "e"), - (0x1D4A, "M", "ə"), - (0x1D4B, "M", "ɛ"), - (0x1D4C, "M", "ɜ"), - (0x1D4D, "M", "g"), - (0x1D4E, "V"), - (0x1D4F, "M", "k"), - (0x1D50, "M", "m"), - (0x1D51, "M", "ŋ"), - (0x1D52, "M", "o"), - (0x1D53, "M", "ɔ"), - (0x1D54, "M", "ᴖ"), - (0x1D55, "M", "ᴗ"), - (0x1D56, "M", "p"), - (0x1D57, "M", "t"), - (0x1D58, "M", "u"), - (0x1D59, "M", "ᴝ"), - (0x1D5A, "M", "ɯ"), - (0x1D5B, "M", "v"), - (0x1D5C, "M", "ᴥ"), - (0x1D5D, "M", "β"), - (0x1D5E, "M", "γ"), - (0x1D5F, "M", "δ"), - (0x1D60, "M", "φ"), - (0x1D61, "M", "χ"), - (0x1D62, "M", "i"), - (0x1D63, "M", "r"), - (0x1D64, "M", "u"), - (0x1D65, "M", "v"), - (0x1D66, "M", "β"), - (0x1D67, "M", "γ"), - (0x1D68, "M", "ρ"), - (0x1D69, "M", "φ"), - (0x1D6A, "M", "χ"), - (0x1D6B, "V"), - (0x1D78, "M", "н"), - (0x1D79, "V"), - (0x1D9B, "M", "ɒ"), - (0x1D9C, "M", "c"), - (0x1D9D, "M", "ɕ"), - (0x1D9E, "M", "ð"), - (0x1D9F, "M", "ɜ"), - (0x1DA0, "M", "f"), - (0x1DA1, "M", "ɟ"), - (0x1DA2, "M", "ɡ"), - (0x1DA3, "M", "ɥ"), - (0x1DA4, "M", "ɨ"), - (0x1DA5, "M", "ɩ"), - (0x1DA6, "M", "ɪ"), - (0x1DA7, "M", "ᵻ"), - (0x1DA8, "M", "ʝ"), - (0x1DA9, "M", "ɭ"), - (0x1DAA, "M", "ᶅ"), - (0x1DAB, "M", "ʟ"), - (0x1DAC, "M", "ɱ"), - (0x1DAD, "M", "ɰ"), - (0x1DAE, "M", "ɲ"), - (0x1DAF, "M", "ɳ"), - (0x1DB0, "M", "ɴ"), - (0x1DB1, "M", "ɵ"), - (0x1DB2, "M", "ɸ"), - (0x1DB3, "M", "ʂ"), - (0x1DB4, "M", "ʃ"), - (0x1DB5, "M", "ƫ"), - (0x1DB6, "M", "ʉ"), - (0x1DB7, "M", "ʊ"), - (0x1DB8, "M", "ᴜ"), - (0x1DB9, "M", "ʋ"), - (0x1DBA, "M", "ʌ"), - (0x1DBB, "M", "z"), - (0x1DBC, "M", "ʐ"), - (0x1DBD, "M", "ʑ"), - (0x1DBE, "M", "ʒ"), - (0x1DBF, "M", "θ"), - (0x1DC0, "V"), - (0x1E00, "M", "ḁ"), - (0x1E01, "V"), - ] - - -def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E02, "M", "ḃ"), - (0x1E03, "V"), - (0x1E04, "M", "ḅ"), - (0x1E05, "V"), - (0x1E06, "M", "ḇ"), - (0x1E07, "V"), - (0x1E08, "M", "ḉ"), - (0x1E09, "V"), - (0x1E0A, "M", "ḋ"), - (0x1E0B, "V"), - (0x1E0C, "M", "ḍ"), - (0x1E0D, "V"), - (0x1E0E, "M", "ḏ"), - (0x1E0F, "V"), - (0x1E10, "M", "ḑ"), - (0x1E11, "V"), - (0x1E12, "M", "ḓ"), - (0x1E13, "V"), - (0x1E14, "M", "ḕ"), - (0x1E15, "V"), - (0x1E16, "M", "ḗ"), - (0x1E17, "V"), - (0x1E18, "M", "ḙ"), - (0x1E19, "V"), - (0x1E1A, "M", "ḛ"), - (0x1E1B, "V"), - (0x1E1C, "M", "ḝ"), - (0x1E1D, "V"), - (0x1E1E, "M", "ḟ"), - (0x1E1F, "V"), - (0x1E20, "M", "ḡ"), - (0x1E21, "V"), - (0x1E22, "M", "ḣ"), - (0x1E23, "V"), - (0x1E24, "M", "ḥ"), - (0x1E25, "V"), - (0x1E26, "M", "ḧ"), - (0x1E27, "V"), - (0x1E28, "M", "ḩ"), - (0x1E29, "V"), - (0x1E2A, "M", "ḫ"), - (0x1E2B, "V"), - (0x1E2C, "M", "ḭ"), - (0x1E2D, "V"), - (0x1E2E, "M", "ḯ"), - (0x1E2F, "V"), - (0x1E30, "M", "ḱ"), - (0x1E31, "V"), - (0x1E32, "M", "ḳ"), - (0x1E33, "V"), - (0x1E34, "M", "ḵ"), - (0x1E35, "V"), - (0x1E36, "M", "ḷ"), - (0x1E37, "V"), - (0x1E38, "M", "ḹ"), - (0x1E39, "V"), - (0x1E3A, "M", "ḻ"), - (0x1E3B, "V"), - (0x1E3C, "M", "ḽ"), - (0x1E3D, "V"), - (0x1E3E, "M", "ḿ"), - (0x1E3F, "V"), - (0x1E40, "M", "ṁ"), - (0x1E41, "V"), - (0x1E42, "M", "ṃ"), - (0x1E43, "V"), - (0x1E44, "M", "ṅ"), - (0x1E45, "V"), - (0x1E46, "M", "ṇ"), - (0x1E47, "V"), - (0x1E48, "M", "ṉ"), - (0x1E49, "V"), - (0x1E4A, "M", "ṋ"), - (0x1E4B, "V"), - (0x1E4C, "M", "ṍ"), - (0x1E4D, "V"), - (0x1E4E, "M", "ṏ"), - (0x1E4F, "V"), - (0x1E50, "M", "ṑ"), - (0x1E51, "V"), - (0x1E52, "M", "ṓ"), - (0x1E53, "V"), - (0x1E54, "M", "ṕ"), - (0x1E55, "V"), - (0x1E56, "M", "ṗ"), - (0x1E57, "V"), - (0x1E58, "M", "ṙ"), - (0x1E59, "V"), - (0x1E5A, "M", "ṛ"), - (0x1E5B, "V"), - (0x1E5C, "M", "ṝ"), - (0x1E5D, "V"), - (0x1E5E, "M", "ṟ"), - (0x1E5F, "V"), - (0x1E60, "M", "ṡ"), - (0x1E61, "V"), - (0x1E62, "M", "ṣ"), - (0x1E63, "V"), - (0x1E64, "M", "ṥ"), - (0x1E65, "V"), - ] - - -def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E66, "M", "ṧ"), - (0x1E67, "V"), - (0x1E68, "M", "ṩ"), - (0x1E69, "V"), - (0x1E6A, "M", "ṫ"), - (0x1E6B, "V"), - (0x1E6C, "M", "ṭ"), - (0x1E6D, "V"), - (0x1E6E, "M", "ṯ"), - (0x1E6F, "V"), - (0x1E70, "M", "ṱ"), - (0x1E71, "V"), - (0x1E72, "M", "ṳ"), - (0x1E73, "V"), - (0x1E74, "M", "ṵ"), - (0x1E75, "V"), - (0x1E76, "M", "ṷ"), - (0x1E77, "V"), - (0x1E78, "M", "ṹ"), - (0x1E79, "V"), - (0x1E7A, "M", "ṻ"), - (0x1E7B, "V"), - (0x1E7C, "M", "ṽ"), - (0x1E7D, "V"), - (0x1E7E, "M", "ṿ"), - (0x1E7F, "V"), - (0x1E80, "M", "ẁ"), - (0x1E81, "V"), - (0x1E82, "M", "ẃ"), - (0x1E83, "V"), - (0x1E84, "M", "ẅ"), - (0x1E85, "V"), - (0x1E86, "M", "ẇ"), - (0x1E87, "V"), - (0x1E88, "M", "ẉ"), - (0x1E89, "V"), - (0x1E8A, "M", "ẋ"), - (0x1E8B, "V"), - (0x1E8C, "M", "ẍ"), - (0x1E8D, "V"), - (0x1E8E, "M", "ẏ"), - (0x1E8F, "V"), - (0x1E90, "M", "ẑ"), - (0x1E91, "V"), - (0x1E92, "M", "ẓ"), - (0x1E93, "V"), - (0x1E94, "M", "ẕ"), - (0x1E95, "V"), - (0x1E9A, "M", "aʾ"), - (0x1E9B, "M", "ṡ"), - (0x1E9C, "V"), - (0x1E9E, "M", "ß"), - (0x1E9F, "V"), - (0x1EA0, "M", "ạ"), - (0x1EA1, "V"), - (0x1EA2, "M", "ả"), - (0x1EA3, "V"), - (0x1EA4, "M", "ấ"), - (0x1EA5, "V"), - (0x1EA6, "M", "ầ"), - (0x1EA7, "V"), - (0x1EA8, "M", "ẩ"), - (0x1EA9, "V"), - (0x1EAA, "M", "ẫ"), - (0x1EAB, "V"), - (0x1EAC, "M", "ậ"), - (0x1EAD, "V"), - (0x1EAE, "M", "ắ"), - (0x1EAF, "V"), - (0x1EB0, "M", "ằ"), - (0x1EB1, "V"), - (0x1EB2, "M", "ẳ"), - (0x1EB3, "V"), - (0x1EB4, "M", "ẵ"), - (0x1EB5, "V"), - (0x1EB6, "M", "ặ"), - (0x1EB7, "V"), - (0x1EB8, "M", "ẹ"), - (0x1EB9, "V"), - (0x1EBA, "M", "ẻ"), - (0x1EBB, "V"), - (0x1EBC, "M", "ẽ"), - (0x1EBD, "V"), - (0x1EBE, "M", "ế"), - (0x1EBF, "V"), - (0x1EC0, "M", "ề"), - (0x1EC1, "V"), - (0x1EC2, "M", "ể"), - (0x1EC3, "V"), - (0x1EC4, "M", "ễ"), - (0x1EC5, "V"), - (0x1EC6, "M", "ệ"), - (0x1EC7, "V"), - (0x1EC8, "M", "ỉ"), - (0x1EC9, "V"), - (0x1ECA, "M", "ị"), - (0x1ECB, "V"), - (0x1ECC, "M", "ọ"), - (0x1ECD, "V"), - (0x1ECE, "M", "ỏ"), - ] - - -def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1ECF, "V"), - (0x1ED0, "M", "ố"), - (0x1ED1, "V"), - (0x1ED2, "M", "ồ"), - (0x1ED3, "V"), - (0x1ED4, "M", "ổ"), - (0x1ED5, "V"), - (0x1ED6, "M", "ỗ"), - (0x1ED7, "V"), - (0x1ED8, "M", "ộ"), - (0x1ED9, "V"), - (0x1EDA, "M", "ớ"), - (0x1EDB, "V"), - (0x1EDC, "M", "ờ"), - (0x1EDD, "V"), - (0x1EDE, "M", "ở"), - (0x1EDF, "V"), - (0x1EE0, "M", "ỡ"), - (0x1EE1, "V"), - (0x1EE2, "M", "ợ"), - (0x1EE3, "V"), - (0x1EE4, "M", "ụ"), - (0x1EE5, "V"), - (0x1EE6, "M", "ủ"), - (0x1EE7, "V"), - (0x1EE8, "M", "ứ"), - (0x1EE9, "V"), - (0x1EEA, "M", "ừ"), - (0x1EEB, "V"), - (0x1EEC, "M", "ử"), - (0x1EED, "V"), - (0x1EEE, "M", "ữ"), - (0x1EEF, "V"), - (0x1EF0, "M", "ự"), - (0x1EF1, "V"), - (0x1EF2, "M", "ỳ"), - (0x1EF3, "V"), - (0x1EF4, "M", "ỵ"), - (0x1EF5, "V"), - (0x1EF6, "M", "ỷ"), - (0x1EF7, "V"), - (0x1EF8, "M", "ỹ"), - (0x1EF9, "V"), - (0x1EFA, "M", "ỻ"), - (0x1EFB, "V"), - (0x1EFC, "M", "ỽ"), - (0x1EFD, "V"), - (0x1EFE, "M", "ỿ"), - (0x1EFF, "V"), - (0x1F08, "M", "ἀ"), - (0x1F09, "M", "ἁ"), - (0x1F0A, "M", "ἂ"), - (0x1F0B, "M", "ἃ"), - (0x1F0C, "M", "ἄ"), - (0x1F0D, "M", "ἅ"), - (0x1F0E, "M", "ἆ"), - (0x1F0F, "M", "ἇ"), - (0x1F10, "V"), - (0x1F16, "X"), - (0x1F18, "M", "ἐ"), - (0x1F19, "M", "ἑ"), - (0x1F1A, "M", "ἒ"), - (0x1F1B, "M", "ἓ"), - (0x1F1C, "M", "ἔ"), - (0x1F1D, "M", "ἕ"), - (0x1F1E, "X"), - (0x1F20, "V"), - (0x1F28, "M", "ἠ"), - (0x1F29, "M", "ἡ"), - (0x1F2A, "M", "ἢ"), - (0x1F2B, "M", "ἣ"), - (0x1F2C, "M", "ἤ"), - (0x1F2D, "M", "ἥ"), - (0x1F2E, "M", "ἦ"), - (0x1F2F, "M", "ἧ"), - (0x1F30, "V"), - (0x1F38, "M", "ἰ"), - (0x1F39, "M", "ἱ"), - (0x1F3A, "M", "ἲ"), - (0x1F3B, "M", "ἳ"), - (0x1F3C, "M", "ἴ"), - (0x1F3D, "M", "ἵ"), - (0x1F3E, "M", "ἶ"), - (0x1F3F, "M", "ἷ"), - (0x1F40, "V"), - (0x1F46, "X"), - (0x1F48, "M", "ὀ"), - (0x1F49, "M", "ὁ"), - (0x1F4A, "M", "ὂ"), - (0x1F4B, "M", "ὃ"), - (0x1F4C, "M", "ὄ"), - (0x1F4D, "M", "ὅ"), - (0x1F4E, "X"), - (0x1F50, "V"), - (0x1F58, "X"), - (0x1F59, "M", "ὑ"), - (0x1F5A, "X"), - (0x1F5B, "M", "ὓ"), - (0x1F5C, "X"), - (0x1F5D, "M", "ὕ"), - ] - - -def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F5E, "X"), - (0x1F5F, "M", "ὗ"), - (0x1F60, "V"), - (0x1F68, "M", "ὠ"), - (0x1F69, "M", "ὡ"), - (0x1F6A, "M", "ὢ"), - (0x1F6B, "M", "ὣ"), - (0x1F6C, "M", "ὤ"), - (0x1F6D, "M", "ὥ"), - (0x1F6E, "M", "ὦ"), - (0x1F6F, "M", "ὧ"), - (0x1F70, "V"), - (0x1F71, "M", "ά"), - (0x1F72, "V"), - (0x1F73, "M", "έ"), - (0x1F74, "V"), - (0x1F75, "M", "ή"), - (0x1F76, "V"), - (0x1F77, "M", "ί"), - (0x1F78, "V"), - (0x1F79, "M", "ό"), - (0x1F7A, "V"), - (0x1F7B, "M", "ύ"), - (0x1F7C, "V"), - (0x1F7D, "M", "ώ"), - (0x1F7E, "X"), - (0x1F80, "M", "ἀι"), - (0x1F81, "M", "ἁι"), - (0x1F82, "M", "ἂι"), - (0x1F83, "M", "ἃι"), - (0x1F84, "M", "ἄι"), - (0x1F85, "M", "ἅι"), - (0x1F86, "M", "ἆι"), - (0x1F87, "M", "ἇι"), - (0x1F88, "M", "ἀι"), - (0x1F89, "M", "ἁι"), - (0x1F8A, "M", "ἂι"), - (0x1F8B, "M", "ἃι"), - (0x1F8C, "M", "ἄι"), - (0x1F8D, "M", "ἅι"), - (0x1F8E, "M", "ἆι"), - (0x1F8F, "M", "ἇι"), - (0x1F90, "M", "ἠι"), - (0x1F91, "M", "ἡι"), - (0x1F92, "M", "ἢι"), - (0x1F93, "M", "ἣι"), - (0x1F94, "M", "ἤι"), - (0x1F95, "M", "ἥι"), - (0x1F96, "M", "ἦι"), - (0x1F97, "M", "ἧι"), - (0x1F98, "M", "ἠι"), - (0x1F99, "M", "ἡι"), - (0x1F9A, "M", "ἢι"), - (0x1F9B, "M", "ἣι"), - (0x1F9C, "M", "ἤι"), - (0x1F9D, "M", "ἥι"), - (0x1F9E, "M", "ἦι"), - (0x1F9F, "M", "ἧι"), - (0x1FA0, "M", "ὠι"), - (0x1FA1, "M", "ὡι"), - (0x1FA2, "M", "ὢι"), - (0x1FA3, "M", "ὣι"), - (0x1FA4, "M", "ὤι"), - (0x1FA5, "M", "ὥι"), - (0x1FA6, "M", "ὦι"), - (0x1FA7, "M", "ὧι"), - (0x1FA8, "M", "ὠι"), - (0x1FA9, "M", "ὡι"), - (0x1FAA, "M", "ὢι"), - (0x1FAB, "M", "ὣι"), - (0x1FAC, "M", "ὤι"), - (0x1FAD, "M", "ὥι"), - (0x1FAE, "M", "ὦι"), - (0x1FAF, "M", "ὧι"), - (0x1FB0, "V"), - (0x1FB2, "M", "ὰι"), - (0x1FB3, "M", "αι"), - (0x1FB4, "M", "άι"), - (0x1FB5, "X"), - (0x1FB6, "V"), - (0x1FB7, "M", "ᾶι"), - (0x1FB8, "M", "ᾰ"), - (0x1FB9, "M", "ᾱ"), - (0x1FBA, "M", "ὰ"), - (0x1FBB, "M", "ά"), - (0x1FBC, "M", "αι"), - (0x1FBD, "M", " ̓"), - (0x1FBE, "M", "ι"), - (0x1FBF, "M", " ̓"), - (0x1FC0, "M", " ͂"), - (0x1FC1, "M", " ̈͂"), - (0x1FC2, "M", "ὴι"), - (0x1FC3, "M", "ηι"), - (0x1FC4, "M", "ήι"), - (0x1FC5, "X"), - (0x1FC6, "V"), - (0x1FC7, "M", "ῆι"), - (0x1FC8, "M", "ὲ"), - (0x1FC9, "M", "έ"), - (0x1FCA, "M", "ὴ"), - ] - - -def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FCB, "M", "ή"), - (0x1FCC, "M", "ηι"), - (0x1FCD, "M", " ̓̀"), - (0x1FCE, "M", " ̓́"), - (0x1FCF, "M", " ̓͂"), - (0x1FD0, "V"), - (0x1FD3, "M", "ΐ"), - (0x1FD4, "X"), - (0x1FD6, "V"), - (0x1FD8, "M", "ῐ"), - (0x1FD9, "M", "ῑ"), - (0x1FDA, "M", "ὶ"), - (0x1FDB, "M", "ί"), - (0x1FDC, "X"), - (0x1FDD, "M", " ̔̀"), - (0x1FDE, "M", " ̔́"), - (0x1FDF, "M", " ̔͂"), - (0x1FE0, "V"), - (0x1FE3, "M", "ΰ"), - (0x1FE4, "V"), - (0x1FE8, "M", "ῠ"), - (0x1FE9, "M", "ῡ"), - (0x1FEA, "M", "ὺ"), - (0x1FEB, "M", "ύ"), - (0x1FEC, "M", "ῥ"), - (0x1FED, "M", " ̈̀"), - (0x1FEE, "M", " ̈́"), - (0x1FEF, "M", "`"), - (0x1FF0, "X"), - (0x1FF2, "M", "ὼι"), - (0x1FF3, "M", "ωι"), - (0x1FF4, "M", "ώι"), - (0x1FF5, "X"), - (0x1FF6, "V"), - (0x1FF7, "M", "ῶι"), - (0x1FF8, "M", "ὸ"), - (0x1FF9, "M", "ό"), - (0x1FFA, "M", "ὼ"), - (0x1FFB, "M", "ώ"), - (0x1FFC, "M", "ωι"), - (0x1FFD, "M", " ́"), - (0x1FFE, "M", " ̔"), - (0x1FFF, "X"), - (0x2000, "M", " "), - (0x200B, "I"), - (0x200C, "D", ""), - (0x200E, "X"), - (0x2010, "V"), - (0x2011, "M", "‐"), - (0x2012, "V"), - (0x2017, "M", " ̳"), - (0x2018, "V"), - (0x2024, "X"), - (0x2027, "V"), - (0x2028, "X"), - (0x202F, "M", " "), - (0x2030, "V"), - (0x2033, "M", "′′"), - (0x2034, "M", "′′′"), - (0x2035, "V"), - (0x2036, "M", "‵‵"), - (0x2037, "M", "‵‵‵"), - (0x2038, "V"), - (0x203C, "M", "!!"), - (0x203D, "V"), - (0x203E, "M", " ̅"), - (0x203F, "V"), - (0x2047, "M", "??"), - (0x2048, "M", "?!"), - (0x2049, "M", "!?"), - (0x204A, "V"), - (0x2057, "M", "′′′′"), - (0x2058, "V"), - (0x205F, "M", " "), - (0x2060, "I"), - (0x2065, "X"), - (0x206A, "I"), - (0x2070, "M", "0"), - (0x2071, "M", "i"), - (0x2072, "X"), - (0x2074, "M", "4"), - (0x2075, "M", "5"), - (0x2076, "M", "6"), - (0x2077, "M", "7"), - (0x2078, "M", "8"), - (0x2079, "M", "9"), - (0x207A, "M", "+"), - (0x207B, "M", "−"), - (0x207C, "M", "="), - (0x207D, "M", "("), - (0x207E, "M", ")"), - (0x207F, "M", "n"), - (0x2080, "M", "0"), - (0x2081, "M", "1"), - (0x2082, "M", "2"), - (0x2083, "M", "3"), - (0x2084, "M", "4"), - (0x2085, "M", "5"), - (0x2086, "M", "6"), - (0x2087, "M", "7"), - ] - - -def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2088, "M", "8"), - (0x2089, "M", "9"), - (0x208A, "M", "+"), - (0x208B, "M", "−"), - (0x208C, "M", "="), - (0x208D, "M", "("), - (0x208E, "M", ")"), - (0x208F, "X"), - (0x2090, "M", "a"), - (0x2091, "M", "e"), - (0x2092, "M", "o"), - (0x2093, "M", "x"), - (0x2094, "M", "ə"), - (0x2095, "M", "h"), - (0x2096, "M", "k"), - (0x2097, "M", "l"), - (0x2098, "M", "m"), - (0x2099, "M", "n"), - (0x209A, "M", "p"), - (0x209B, "M", "s"), - (0x209C, "M", "t"), - (0x209D, "X"), - (0x20A0, "V"), - (0x20A8, "M", "rs"), - (0x20A9, "V"), - (0x20C1, "X"), - (0x20D0, "V"), - (0x20F1, "X"), - (0x2100, "M", "a/c"), - (0x2101, "M", "a/s"), - (0x2102, "M", "c"), - (0x2103, "M", "°c"), - (0x2104, "V"), - (0x2105, "M", "c/o"), - (0x2106, "M", "c/u"), - (0x2107, "M", "ɛ"), - (0x2108, "V"), - (0x2109, "M", "°f"), - (0x210A, "M", "g"), - (0x210B, "M", "h"), - (0x210F, "M", "ħ"), - (0x2110, "M", "i"), - (0x2112, "M", "l"), - (0x2114, "V"), - (0x2115, "M", "n"), - (0x2116, "M", "no"), - (0x2117, "V"), - (0x2119, "M", "p"), - (0x211A, "M", "q"), - (0x211B, "M", "r"), - (0x211E, "V"), - (0x2120, "M", "sm"), - (0x2121, "M", "tel"), - (0x2122, "M", "tm"), - (0x2123, "V"), - (0x2124, "M", "z"), - (0x2125, "V"), - (0x2126, "M", "ω"), - (0x2127, "V"), - (0x2128, "M", "z"), - (0x2129, "V"), - (0x212A, "M", "k"), - (0x212B, "M", "å"), - (0x212C, "M", "b"), - (0x212D, "M", "c"), - (0x212E, "V"), - (0x212F, "M", "e"), - (0x2131, "M", "f"), - (0x2132, "M", "ⅎ"), - (0x2133, "M", "m"), - (0x2134, "M", "o"), - (0x2135, "M", "א"), - (0x2136, "M", "ב"), - (0x2137, "M", "ג"), - (0x2138, "M", "ד"), - (0x2139, "M", "i"), - (0x213A, "V"), - (0x213B, "M", "fax"), - (0x213C, "M", "π"), - (0x213D, "M", "γ"), - (0x213F, "M", "π"), - (0x2140, "M", "∑"), - (0x2141, "V"), - (0x2145, "M", "d"), - (0x2147, "M", "e"), - (0x2148, "M", "i"), - (0x2149, "M", "j"), - (0x214A, "V"), - (0x2150, "M", "1⁄7"), - (0x2151, "M", "1⁄9"), - (0x2152, "M", "1⁄10"), - (0x2153, "M", "1⁄3"), - (0x2154, "M", "2⁄3"), - (0x2155, "M", "1⁄5"), - (0x2156, "M", "2⁄5"), - (0x2157, "M", "3⁄5"), - (0x2158, "M", "4⁄5"), - (0x2159, "M", "1⁄6"), - (0x215A, "M", "5⁄6"), - (0x215B, "M", "1⁄8"), - ] - - -def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x215C, "M", "3⁄8"), - (0x215D, "M", "5⁄8"), - (0x215E, "M", "7⁄8"), - (0x215F, "M", "1⁄"), - (0x2160, "M", "i"), - (0x2161, "M", "ii"), - (0x2162, "M", "iii"), - (0x2163, "M", "iv"), - (0x2164, "M", "v"), - (0x2165, "M", "vi"), - (0x2166, "M", "vii"), - (0x2167, "M", "viii"), - (0x2168, "M", "ix"), - (0x2169, "M", "x"), - (0x216A, "M", "xi"), - (0x216B, "M", "xii"), - (0x216C, "M", "l"), - (0x216D, "M", "c"), - (0x216E, "M", "d"), - (0x216F, "M", "m"), - (0x2170, "M", "i"), - (0x2171, "M", "ii"), - (0x2172, "M", "iii"), - (0x2173, "M", "iv"), - (0x2174, "M", "v"), - (0x2175, "M", "vi"), - (0x2176, "M", "vii"), - (0x2177, "M", "viii"), - (0x2178, "M", "ix"), - (0x2179, "M", "x"), - (0x217A, "M", "xi"), - (0x217B, "M", "xii"), - (0x217C, "M", "l"), - (0x217D, "M", "c"), - (0x217E, "M", "d"), - (0x217F, "M", "m"), - (0x2180, "V"), - (0x2183, "M", "ↄ"), - (0x2184, "V"), - (0x2189, "M", "0⁄3"), - (0x218A, "V"), - (0x218C, "X"), - (0x2190, "V"), - (0x222C, "M", "∫∫"), - (0x222D, "M", "∫∫∫"), - (0x222E, "V"), - (0x222F, "M", "∮∮"), - (0x2230, "M", "∮∮∮"), - (0x2231, "V"), - (0x2329, "M", "〈"), - (0x232A, "M", "〉"), - (0x232B, "V"), - (0x242A, "X"), - (0x2440, "V"), - (0x244B, "X"), - (0x2460, "M", "1"), - (0x2461, "M", "2"), - (0x2462, "M", "3"), - (0x2463, "M", "4"), - (0x2464, "M", "5"), - (0x2465, "M", "6"), - (0x2466, "M", "7"), - (0x2467, "M", "8"), - (0x2468, "M", "9"), - (0x2469, "M", "10"), - (0x246A, "M", "11"), - (0x246B, "M", "12"), - (0x246C, "M", "13"), - (0x246D, "M", "14"), - (0x246E, "M", "15"), - (0x246F, "M", "16"), - (0x2470, "M", "17"), - (0x2471, "M", "18"), - (0x2472, "M", "19"), - (0x2473, "M", "20"), - (0x2474, "M", "(1)"), - (0x2475, "M", "(2)"), - (0x2476, "M", "(3)"), - (0x2477, "M", "(4)"), - (0x2478, "M", "(5)"), - (0x2479, "M", "(6)"), - (0x247A, "M", "(7)"), - (0x247B, "M", "(8)"), - (0x247C, "M", "(9)"), - (0x247D, "M", "(10)"), - (0x247E, "M", "(11)"), - (0x247F, "M", "(12)"), - (0x2480, "M", "(13)"), - (0x2481, "M", "(14)"), - (0x2482, "M", "(15)"), - (0x2483, "M", "(16)"), - (0x2484, "M", "(17)"), - (0x2485, "M", "(18)"), - (0x2486, "M", "(19)"), - (0x2487, "M", "(20)"), - (0x2488, "X"), - (0x249C, "M", "(a)"), - (0x249D, "M", "(b)"), - (0x249E, "M", "(c)"), - (0x249F, "M", "(d)"), - ] - - -def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x24A0, "M", "(e)"), - (0x24A1, "M", "(f)"), - (0x24A2, "M", "(g)"), - (0x24A3, "M", "(h)"), - (0x24A4, "M", "(i)"), - (0x24A5, "M", "(j)"), - (0x24A6, "M", "(k)"), - (0x24A7, "M", "(l)"), - (0x24A8, "M", "(m)"), - (0x24A9, "M", "(n)"), - (0x24AA, "M", "(o)"), - (0x24AB, "M", "(p)"), - (0x24AC, "M", "(q)"), - (0x24AD, "M", "(r)"), - (0x24AE, "M", "(s)"), - (0x24AF, "M", "(t)"), - (0x24B0, "M", "(u)"), - (0x24B1, "M", "(v)"), - (0x24B2, "M", "(w)"), - (0x24B3, "M", "(x)"), - (0x24B4, "M", "(y)"), - (0x24B5, "M", "(z)"), - (0x24B6, "M", "a"), - (0x24B7, "M", "b"), - (0x24B8, "M", "c"), - (0x24B9, "M", "d"), - (0x24BA, "M", "e"), - (0x24BB, "M", "f"), - (0x24BC, "M", "g"), - (0x24BD, "M", "h"), - (0x24BE, "M", "i"), - (0x24BF, "M", "j"), - (0x24C0, "M", "k"), - (0x24C1, "M", "l"), - (0x24C2, "M", "m"), - (0x24C3, "M", "n"), - (0x24C4, "M", "o"), - (0x24C5, "M", "p"), - (0x24C6, "M", "q"), - (0x24C7, "M", "r"), - (0x24C8, "M", "s"), - (0x24C9, "M", "t"), - (0x24CA, "M", "u"), - (0x24CB, "M", "v"), - (0x24CC, "M", "w"), - (0x24CD, "M", "x"), - (0x24CE, "M", "y"), - (0x24CF, "M", "z"), - (0x24D0, "M", "a"), - (0x24D1, "M", "b"), - (0x24D2, "M", "c"), - (0x24D3, "M", "d"), - (0x24D4, "M", "e"), - (0x24D5, "M", "f"), - (0x24D6, "M", "g"), - (0x24D7, "M", "h"), - (0x24D8, "M", "i"), - (0x24D9, "M", "j"), - (0x24DA, "M", "k"), - (0x24DB, "M", "l"), - (0x24DC, "M", "m"), - (0x24DD, "M", "n"), - (0x24DE, "M", "o"), - (0x24DF, "M", "p"), - (0x24E0, "M", "q"), - (0x24E1, "M", "r"), - (0x24E2, "M", "s"), - (0x24E3, "M", "t"), - (0x24E4, "M", "u"), - (0x24E5, "M", "v"), - (0x24E6, "M", "w"), - (0x24E7, "M", "x"), - (0x24E8, "M", "y"), - (0x24E9, "M", "z"), - (0x24EA, "M", "0"), - (0x24EB, "V"), - (0x2A0C, "M", "∫∫∫∫"), - (0x2A0D, "V"), - (0x2A74, "M", "::="), - (0x2A75, "M", "=="), - (0x2A76, "M", "==="), - (0x2A77, "V"), - (0x2ADC, "M", "⫝̸"), - (0x2ADD, "V"), - (0x2B74, "X"), - (0x2B76, "V"), - (0x2B96, "X"), - (0x2B97, "V"), - (0x2C00, "M", "ⰰ"), - (0x2C01, "M", "ⰱ"), - (0x2C02, "M", "ⰲ"), - (0x2C03, "M", "ⰳ"), - (0x2C04, "M", "ⰴ"), - (0x2C05, "M", "ⰵ"), - (0x2C06, "M", "ⰶ"), - (0x2C07, "M", "ⰷ"), - (0x2C08, "M", "ⰸ"), - (0x2C09, "M", "ⰹ"), - (0x2C0A, "M", "ⰺ"), - (0x2C0B, "M", "ⰻ"), - ] - - -def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2C0C, "M", "ⰼ"), - (0x2C0D, "M", "ⰽ"), - (0x2C0E, "M", "ⰾ"), - (0x2C0F, "M", "ⰿ"), - (0x2C10, "M", "ⱀ"), - (0x2C11, "M", "ⱁ"), - (0x2C12, "M", "ⱂ"), - (0x2C13, "M", "ⱃ"), - (0x2C14, "M", "ⱄ"), - (0x2C15, "M", "ⱅ"), - (0x2C16, "M", "ⱆ"), - (0x2C17, "M", "ⱇ"), - (0x2C18, "M", "ⱈ"), - (0x2C19, "M", "ⱉ"), - (0x2C1A, "M", "ⱊ"), - (0x2C1B, "M", "ⱋ"), - (0x2C1C, "M", "ⱌ"), - (0x2C1D, "M", "ⱍ"), - (0x2C1E, "M", "ⱎ"), - (0x2C1F, "M", "ⱏ"), - (0x2C20, "M", "ⱐ"), - (0x2C21, "M", "ⱑ"), - (0x2C22, "M", "ⱒ"), - (0x2C23, "M", "ⱓ"), - (0x2C24, "M", "ⱔ"), - (0x2C25, "M", "ⱕ"), - (0x2C26, "M", "ⱖ"), - (0x2C27, "M", "ⱗ"), - (0x2C28, "M", "ⱘ"), - (0x2C29, "M", "ⱙ"), - (0x2C2A, "M", "ⱚ"), - (0x2C2B, "M", "ⱛ"), - (0x2C2C, "M", "ⱜ"), - (0x2C2D, "M", "ⱝ"), - (0x2C2E, "M", "ⱞ"), - (0x2C2F, "M", "ⱟ"), - (0x2C30, "V"), - (0x2C60, "M", "ⱡ"), - (0x2C61, "V"), - (0x2C62, "M", "ɫ"), - (0x2C63, "M", "ᵽ"), - (0x2C64, "M", "ɽ"), - (0x2C65, "V"), - (0x2C67, "M", "ⱨ"), - (0x2C68, "V"), - (0x2C69, "M", "ⱪ"), - (0x2C6A, "V"), - (0x2C6B, "M", "ⱬ"), - (0x2C6C, "V"), - (0x2C6D, "M", "ɑ"), - (0x2C6E, "M", "ɱ"), - (0x2C6F, "M", "ɐ"), - (0x2C70, "M", "ɒ"), - (0x2C71, "V"), - (0x2C72, "M", "ⱳ"), - (0x2C73, "V"), - (0x2C75, "M", "ⱶ"), - (0x2C76, "V"), - (0x2C7C, "M", "j"), - (0x2C7D, "M", "v"), - (0x2C7E, "M", "ȿ"), - (0x2C7F, "M", "ɀ"), - (0x2C80, "M", "ⲁ"), - (0x2C81, "V"), - (0x2C82, "M", "ⲃ"), - (0x2C83, "V"), - (0x2C84, "M", "ⲅ"), - (0x2C85, "V"), - (0x2C86, "M", "ⲇ"), - (0x2C87, "V"), - (0x2C88, "M", "ⲉ"), - (0x2C89, "V"), - (0x2C8A, "M", "ⲋ"), - (0x2C8B, "V"), - (0x2C8C, "M", "ⲍ"), - (0x2C8D, "V"), - (0x2C8E, "M", "ⲏ"), - (0x2C8F, "V"), - (0x2C90, "M", "ⲑ"), - (0x2C91, "V"), - (0x2C92, "M", "ⲓ"), - (0x2C93, "V"), - (0x2C94, "M", "ⲕ"), - (0x2C95, "V"), - (0x2C96, "M", "ⲗ"), - (0x2C97, "V"), - (0x2C98, "M", "ⲙ"), - (0x2C99, "V"), - (0x2C9A, "M", "ⲛ"), - (0x2C9B, "V"), - (0x2C9C, "M", "ⲝ"), - (0x2C9D, "V"), - (0x2C9E, "M", "ⲟ"), - (0x2C9F, "V"), - (0x2CA0, "M", "ⲡ"), - (0x2CA1, "V"), - (0x2CA2, "M", "ⲣ"), - (0x2CA3, "V"), - (0x2CA4, "M", "ⲥ"), - (0x2CA5, "V"), - ] - - -def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2CA6, "M", "ⲧ"), - (0x2CA7, "V"), - (0x2CA8, "M", "ⲩ"), - (0x2CA9, "V"), - (0x2CAA, "M", "ⲫ"), - (0x2CAB, "V"), - (0x2CAC, "M", "ⲭ"), - (0x2CAD, "V"), - (0x2CAE, "M", "ⲯ"), - (0x2CAF, "V"), - (0x2CB0, "M", "ⲱ"), - (0x2CB1, "V"), - (0x2CB2, "M", "ⲳ"), - (0x2CB3, "V"), - (0x2CB4, "M", "ⲵ"), - (0x2CB5, "V"), - (0x2CB6, "M", "ⲷ"), - (0x2CB7, "V"), - (0x2CB8, "M", "ⲹ"), - (0x2CB9, "V"), - (0x2CBA, "M", "ⲻ"), - (0x2CBB, "V"), - (0x2CBC, "M", "ⲽ"), - (0x2CBD, "V"), - (0x2CBE, "M", "ⲿ"), - (0x2CBF, "V"), - (0x2CC0, "M", "ⳁ"), - (0x2CC1, "V"), - (0x2CC2, "M", "ⳃ"), - (0x2CC3, "V"), - (0x2CC4, "M", "ⳅ"), - (0x2CC5, "V"), - (0x2CC6, "M", "ⳇ"), - (0x2CC7, "V"), - (0x2CC8, "M", "ⳉ"), - (0x2CC9, "V"), - (0x2CCA, "M", "ⳋ"), - (0x2CCB, "V"), - (0x2CCC, "M", "ⳍ"), - (0x2CCD, "V"), - (0x2CCE, "M", "ⳏ"), - (0x2CCF, "V"), - (0x2CD0, "M", "ⳑ"), - (0x2CD1, "V"), - (0x2CD2, "M", "ⳓ"), - (0x2CD3, "V"), - (0x2CD4, "M", "ⳕ"), - (0x2CD5, "V"), - (0x2CD6, "M", "ⳗ"), - (0x2CD7, "V"), - (0x2CD8, "M", "ⳙ"), - (0x2CD9, "V"), - (0x2CDA, "M", "ⳛ"), - (0x2CDB, "V"), - (0x2CDC, "M", "ⳝ"), - (0x2CDD, "V"), - (0x2CDE, "M", "ⳟ"), - (0x2CDF, "V"), - (0x2CE0, "M", "ⳡ"), - (0x2CE1, "V"), - (0x2CE2, "M", "ⳣ"), - (0x2CE3, "V"), - (0x2CEB, "M", "ⳬ"), - (0x2CEC, "V"), - (0x2CED, "M", "ⳮ"), - (0x2CEE, "V"), - (0x2CF2, "M", "ⳳ"), - (0x2CF3, "V"), - (0x2CF4, "X"), - (0x2CF9, "V"), - (0x2D26, "X"), - (0x2D27, "V"), - (0x2D28, "X"), - (0x2D2D, "V"), - (0x2D2E, "X"), - (0x2D30, "V"), - (0x2D68, "X"), - (0x2D6F, "M", "ⵡ"), - (0x2D70, "V"), - (0x2D71, "X"), - (0x2D7F, "V"), - (0x2D97, "X"), - (0x2DA0, "V"), - (0x2DA7, "X"), - (0x2DA8, "V"), - (0x2DAF, "X"), - (0x2DB0, "V"), - (0x2DB7, "X"), - (0x2DB8, "V"), - (0x2DBF, "X"), - (0x2DC0, "V"), - (0x2DC7, "X"), - (0x2DC8, "V"), - (0x2DCF, "X"), - (0x2DD0, "V"), - (0x2DD7, "X"), - (0x2DD8, "V"), - (0x2DDF, "X"), - (0x2DE0, "V"), - (0x2E5E, "X"), - ] - - -def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2E80, "V"), - (0x2E9A, "X"), - (0x2E9B, "V"), - (0x2E9F, "M", "母"), - (0x2EA0, "V"), - (0x2EF3, "M", "龟"), - (0x2EF4, "X"), - (0x2F00, "M", "一"), - (0x2F01, "M", "丨"), - (0x2F02, "M", "丶"), - (0x2F03, "M", "丿"), - (0x2F04, "M", "乙"), - (0x2F05, "M", "亅"), - (0x2F06, "M", "二"), - (0x2F07, "M", "亠"), - (0x2F08, "M", "人"), - (0x2F09, "M", "儿"), - (0x2F0A, "M", "入"), - (0x2F0B, "M", "八"), - (0x2F0C, "M", "冂"), - (0x2F0D, "M", "冖"), - (0x2F0E, "M", "冫"), - (0x2F0F, "M", "几"), - (0x2F10, "M", "凵"), - (0x2F11, "M", "刀"), - (0x2F12, "M", "力"), - (0x2F13, "M", "勹"), - (0x2F14, "M", "匕"), - (0x2F15, "M", "匚"), - (0x2F16, "M", "匸"), - (0x2F17, "M", "十"), - (0x2F18, "M", "卜"), - (0x2F19, "M", "卩"), - (0x2F1A, "M", "厂"), - (0x2F1B, "M", "厶"), - (0x2F1C, "M", "又"), - (0x2F1D, "M", "口"), - (0x2F1E, "M", "囗"), - (0x2F1F, "M", "土"), - (0x2F20, "M", "士"), - (0x2F21, "M", "夂"), - (0x2F22, "M", "夊"), - (0x2F23, "M", "夕"), - (0x2F24, "M", "大"), - (0x2F25, "M", "女"), - (0x2F26, "M", "子"), - (0x2F27, "M", "宀"), - (0x2F28, "M", "寸"), - (0x2F29, "M", "小"), - (0x2F2A, "M", "尢"), - (0x2F2B, "M", "尸"), - (0x2F2C, "M", "屮"), - (0x2F2D, "M", "山"), - (0x2F2E, "M", "巛"), - (0x2F2F, "M", "工"), - (0x2F30, "M", "己"), - (0x2F31, "M", "巾"), - (0x2F32, "M", "干"), - (0x2F33, "M", "幺"), - (0x2F34, "M", "广"), - (0x2F35, "M", "廴"), - (0x2F36, "M", "廾"), - (0x2F37, "M", "弋"), - (0x2F38, "M", "弓"), - (0x2F39, "M", "彐"), - (0x2F3A, "M", "彡"), - (0x2F3B, "M", "彳"), - (0x2F3C, "M", "心"), - (0x2F3D, "M", "戈"), - (0x2F3E, "M", "戶"), - (0x2F3F, "M", "手"), - (0x2F40, "M", "支"), - (0x2F41, "M", "攴"), - (0x2F42, "M", "文"), - (0x2F43, "M", "斗"), - (0x2F44, "M", "斤"), - (0x2F45, "M", "方"), - (0x2F46, "M", "无"), - (0x2F47, "M", "日"), - (0x2F48, "M", "曰"), - (0x2F49, "M", "月"), - (0x2F4A, "M", "木"), - (0x2F4B, "M", "欠"), - (0x2F4C, "M", "止"), - (0x2F4D, "M", "歹"), - (0x2F4E, "M", "殳"), - (0x2F4F, "M", "毋"), - (0x2F50, "M", "比"), - (0x2F51, "M", "毛"), - (0x2F52, "M", "氏"), - (0x2F53, "M", "气"), - (0x2F54, "M", "水"), - (0x2F55, "M", "火"), - (0x2F56, "M", "爪"), - (0x2F57, "M", "父"), - (0x2F58, "M", "爻"), - (0x2F59, "M", "爿"), - (0x2F5A, "M", "片"), - (0x2F5B, "M", "牙"), - (0x2F5C, "M", "牛"), - ] - - -def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F5D, "M", "犬"), - (0x2F5E, "M", "玄"), - (0x2F5F, "M", "玉"), - (0x2F60, "M", "瓜"), - (0x2F61, "M", "瓦"), - (0x2F62, "M", "甘"), - (0x2F63, "M", "生"), - (0x2F64, "M", "用"), - (0x2F65, "M", "田"), - (0x2F66, "M", "疋"), - (0x2F67, "M", "疒"), - (0x2F68, "M", "癶"), - (0x2F69, "M", "白"), - (0x2F6A, "M", "皮"), - (0x2F6B, "M", "皿"), - (0x2F6C, "M", "目"), - (0x2F6D, "M", "矛"), - (0x2F6E, "M", "矢"), - (0x2F6F, "M", "石"), - (0x2F70, "M", "示"), - (0x2F71, "M", "禸"), - (0x2F72, "M", "禾"), - (0x2F73, "M", "穴"), - (0x2F74, "M", "立"), - (0x2F75, "M", "竹"), - (0x2F76, "M", "米"), - (0x2F77, "M", "糸"), - (0x2F78, "M", "缶"), - (0x2F79, "M", "网"), - (0x2F7A, "M", "羊"), - (0x2F7B, "M", "羽"), - (0x2F7C, "M", "老"), - (0x2F7D, "M", "而"), - (0x2F7E, "M", "耒"), - (0x2F7F, "M", "耳"), - (0x2F80, "M", "聿"), - (0x2F81, "M", "肉"), - (0x2F82, "M", "臣"), - (0x2F83, "M", "自"), - (0x2F84, "M", "至"), - (0x2F85, "M", "臼"), - (0x2F86, "M", "舌"), - (0x2F87, "M", "舛"), - (0x2F88, "M", "舟"), - (0x2F89, "M", "艮"), - (0x2F8A, "M", "色"), - (0x2F8B, "M", "艸"), - (0x2F8C, "M", "虍"), - (0x2F8D, "M", "虫"), - (0x2F8E, "M", "血"), - (0x2F8F, "M", "行"), - (0x2F90, "M", "衣"), - (0x2F91, "M", "襾"), - (0x2F92, "M", "見"), - (0x2F93, "M", "角"), - (0x2F94, "M", "言"), - (0x2F95, "M", "谷"), - (0x2F96, "M", "豆"), - (0x2F97, "M", "豕"), - (0x2F98, "M", "豸"), - (0x2F99, "M", "貝"), - (0x2F9A, "M", "赤"), - (0x2F9B, "M", "走"), - (0x2F9C, "M", "足"), - (0x2F9D, "M", "身"), - (0x2F9E, "M", "車"), - (0x2F9F, "M", "辛"), - (0x2FA0, "M", "辰"), - (0x2FA1, "M", "辵"), - (0x2FA2, "M", "邑"), - (0x2FA3, "M", "酉"), - (0x2FA4, "M", "釆"), - (0x2FA5, "M", "里"), - (0x2FA6, "M", "金"), - (0x2FA7, "M", "長"), - (0x2FA8, "M", "門"), - (0x2FA9, "M", "阜"), - (0x2FAA, "M", "隶"), - (0x2FAB, "M", "隹"), - (0x2FAC, "M", "雨"), - (0x2FAD, "M", "靑"), - (0x2FAE, "M", "非"), - (0x2FAF, "M", "面"), - (0x2FB0, "M", "革"), - (0x2FB1, "M", "韋"), - (0x2FB2, "M", "韭"), - (0x2FB3, "M", "音"), - (0x2FB4, "M", "頁"), - (0x2FB5, "M", "風"), - (0x2FB6, "M", "飛"), - (0x2FB7, "M", "食"), - (0x2FB8, "M", "首"), - (0x2FB9, "M", "香"), - (0x2FBA, "M", "馬"), - (0x2FBB, "M", "骨"), - (0x2FBC, "M", "高"), - (0x2FBD, "M", "髟"), - (0x2FBE, "M", "鬥"), - (0x2FBF, "M", "鬯"), - (0x2FC0, "M", "鬲"), - ] - - -def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2FC1, "M", "鬼"), - (0x2FC2, "M", "魚"), - (0x2FC3, "M", "鳥"), - (0x2FC4, "M", "鹵"), - (0x2FC5, "M", "鹿"), - (0x2FC6, "M", "麥"), - (0x2FC7, "M", "麻"), - (0x2FC8, "M", "黃"), - (0x2FC9, "M", "黍"), - (0x2FCA, "M", "黑"), - (0x2FCB, "M", "黹"), - (0x2FCC, "M", "黽"), - (0x2FCD, "M", "鼎"), - (0x2FCE, "M", "鼓"), - (0x2FCF, "M", "鼠"), - (0x2FD0, "M", "鼻"), - (0x2FD1, "M", "齊"), - (0x2FD2, "M", "齒"), - (0x2FD3, "M", "龍"), - (0x2FD4, "M", "龜"), - (0x2FD5, "M", "龠"), - (0x2FD6, "X"), - (0x3000, "M", " "), - (0x3001, "V"), - (0x3002, "M", "."), - (0x3003, "V"), - (0x3036, "M", "〒"), - (0x3037, "V"), - (0x3038, "M", "十"), - (0x3039, "M", "卄"), - (0x303A, "M", "卅"), - (0x303B, "V"), - (0x3040, "X"), - (0x3041, "V"), - (0x3097, "X"), - (0x3099, "V"), - (0x309B, "M", " ゙"), - (0x309C, "M", " ゚"), - (0x309D, "V"), - (0x309F, "M", "より"), - (0x30A0, "V"), - (0x30FF, "M", "コト"), - (0x3100, "X"), - (0x3105, "V"), - (0x3130, "X"), - (0x3131, "M", "ᄀ"), - (0x3132, "M", "ᄁ"), - (0x3133, "M", "ᆪ"), - (0x3134, "M", "ᄂ"), - (0x3135, "M", "ᆬ"), - (0x3136, "M", "ᆭ"), - (0x3137, "M", "ᄃ"), - (0x3138, "M", "ᄄ"), - (0x3139, "M", "ᄅ"), - (0x313A, "M", "ᆰ"), - (0x313B, "M", "ᆱ"), - (0x313C, "M", "ᆲ"), - (0x313D, "M", "ᆳ"), - (0x313E, "M", "ᆴ"), - (0x313F, "M", "ᆵ"), - (0x3140, "M", "ᄚ"), - (0x3141, "M", "ᄆ"), - (0x3142, "M", "ᄇ"), - (0x3143, "M", "ᄈ"), - (0x3144, "M", "ᄡ"), - (0x3145, "M", "ᄉ"), - (0x3146, "M", "ᄊ"), - (0x3147, "M", "ᄋ"), - (0x3148, "M", "ᄌ"), - (0x3149, "M", "ᄍ"), - (0x314A, "M", "ᄎ"), - (0x314B, "M", "ᄏ"), - (0x314C, "M", "ᄐ"), - (0x314D, "M", "ᄑ"), - (0x314E, "M", "ᄒ"), - (0x314F, "M", "ᅡ"), - (0x3150, "M", "ᅢ"), - (0x3151, "M", "ᅣ"), - (0x3152, "M", "ᅤ"), - (0x3153, "M", "ᅥ"), - (0x3154, "M", "ᅦ"), - (0x3155, "M", "ᅧ"), - (0x3156, "M", "ᅨ"), - (0x3157, "M", "ᅩ"), - (0x3158, "M", "ᅪ"), - (0x3159, "M", "ᅫ"), - (0x315A, "M", "ᅬ"), - (0x315B, "M", "ᅭ"), - (0x315C, "M", "ᅮ"), - (0x315D, "M", "ᅯ"), - (0x315E, "M", "ᅰ"), - (0x315F, "M", "ᅱ"), - (0x3160, "M", "ᅲ"), - (0x3161, "M", "ᅳ"), - (0x3162, "M", "ᅴ"), - (0x3163, "M", "ᅵ"), - (0x3164, "I"), - (0x3165, "M", "ᄔ"), - (0x3166, "M", "ᄕ"), - (0x3167, "M", "ᇇ"), - ] - - -def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3168, "M", "ᇈ"), - (0x3169, "M", "ᇌ"), - (0x316A, "M", "ᇎ"), - (0x316B, "M", "ᇓ"), - (0x316C, "M", "ᇗ"), - (0x316D, "M", "ᇙ"), - (0x316E, "M", "ᄜ"), - (0x316F, "M", "ᇝ"), - (0x3170, "M", "ᇟ"), - (0x3171, "M", "ᄝ"), - (0x3172, "M", "ᄞ"), - (0x3173, "M", "ᄠ"), - (0x3174, "M", "ᄢ"), - (0x3175, "M", "ᄣ"), - (0x3176, "M", "ᄧ"), - (0x3177, "M", "ᄩ"), - (0x3178, "M", "ᄫ"), - (0x3179, "M", "ᄬ"), - (0x317A, "M", "ᄭ"), - (0x317B, "M", "ᄮ"), - (0x317C, "M", "ᄯ"), - (0x317D, "M", "ᄲ"), - (0x317E, "M", "ᄶ"), - (0x317F, "M", "ᅀ"), - (0x3180, "M", "ᅇ"), - (0x3181, "M", "ᅌ"), - (0x3182, "M", "ᇱ"), - (0x3183, "M", "ᇲ"), - (0x3184, "M", "ᅗ"), - (0x3185, "M", "ᅘ"), - (0x3186, "M", "ᅙ"), - (0x3187, "M", "ᆄ"), - (0x3188, "M", "ᆅ"), - (0x3189, "M", "ᆈ"), - (0x318A, "M", "ᆑ"), - (0x318B, "M", "ᆒ"), - (0x318C, "M", "ᆔ"), - (0x318D, "M", "ᆞ"), - (0x318E, "M", "ᆡ"), - (0x318F, "X"), - (0x3190, "V"), - (0x3192, "M", "一"), - (0x3193, "M", "二"), - (0x3194, "M", "三"), - (0x3195, "M", "四"), - (0x3196, "M", "上"), - (0x3197, "M", "中"), - (0x3198, "M", "下"), - (0x3199, "M", "甲"), - (0x319A, "M", "乙"), - (0x319B, "M", "丙"), - (0x319C, "M", "丁"), - (0x319D, "M", "天"), - (0x319E, "M", "地"), - (0x319F, "M", "人"), - (0x31A0, "V"), - (0x31E6, "X"), - (0x31F0, "V"), - (0x3200, "M", "(ᄀ)"), - (0x3201, "M", "(ᄂ)"), - (0x3202, "M", "(ᄃ)"), - (0x3203, "M", "(ᄅ)"), - (0x3204, "M", "(ᄆ)"), - (0x3205, "M", "(ᄇ)"), - (0x3206, "M", "(ᄉ)"), - (0x3207, "M", "(ᄋ)"), - (0x3208, "M", "(ᄌ)"), - (0x3209, "M", "(ᄎ)"), - (0x320A, "M", "(ᄏ)"), - (0x320B, "M", "(ᄐ)"), - (0x320C, "M", "(ᄑ)"), - (0x320D, "M", "(ᄒ)"), - (0x320E, "M", "(가)"), - (0x320F, "M", "(나)"), - (0x3210, "M", "(다)"), - (0x3211, "M", "(라)"), - (0x3212, "M", "(마)"), - (0x3213, "M", "(바)"), - (0x3214, "M", "(사)"), - (0x3215, "M", "(아)"), - (0x3216, "M", "(자)"), - (0x3217, "M", "(차)"), - (0x3218, "M", "(카)"), - (0x3219, "M", "(타)"), - (0x321A, "M", "(파)"), - (0x321B, "M", "(하)"), - (0x321C, "M", "(주)"), - (0x321D, "M", "(오전)"), - (0x321E, "M", "(오후)"), - (0x321F, "X"), - (0x3220, "M", "(一)"), - (0x3221, "M", "(二)"), - (0x3222, "M", "(三)"), - (0x3223, "M", "(四)"), - (0x3224, "M", "(五)"), - (0x3225, "M", "(六)"), - (0x3226, "M", "(七)"), - (0x3227, "M", "(八)"), - (0x3228, "M", "(九)"), - (0x3229, "M", "(十)"), - ] - - -def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x322A, "M", "(月)"), - (0x322B, "M", "(火)"), - (0x322C, "M", "(水)"), - (0x322D, "M", "(木)"), - (0x322E, "M", "(金)"), - (0x322F, "M", "(土)"), - (0x3230, "M", "(日)"), - (0x3231, "M", "(株)"), - (0x3232, "M", "(有)"), - (0x3233, "M", "(社)"), - (0x3234, "M", "(名)"), - (0x3235, "M", "(特)"), - (0x3236, "M", "(財)"), - (0x3237, "M", "(祝)"), - (0x3238, "M", "(労)"), - (0x3239, "M", "(代)"), - (0x323A, "M", "(呼)"), - (0x323B, "M", "(学)"), - (0x323C, "M", "(監)"), - (0x323D, "M", "(企)"), - (0x323E, "M", "(資)"), - (0x323F, "M", "(協)"), - (0x3240, "M", "(祭)"), - (0x3241, "M", "(休)"), - (0x3242, "M", "(自)"), - (0x3243, "M", "(至)"), - (0x3244, "M", "問"), - (0x3245, "M", "幼"), - (0x3246, "M", "文"), - (0x3247, "M", "箏"), - (0x3248, "V"), - (0x3250, "M", "pte"), - (0x3251, "M", "21"), - (0x3252, "M", "22"), - (0x3253, "M", "23"), - (0x3254, "M", "24"), - (0x3255, "M", "25"), - (0x3256, "M", "26"), - (0x3257, "M", "27"), - (0x3258, "M", "28"), - (0x3259, "M", "29"), - (0x325A, "M", "30"), - (0x325B, "M", "31"), - (0x325C, "M", "32"), - (0x325D, "M", "33"), - (0x325E, "M", "34"), - (0x325F, "M", "35"), - (0x3260, "M", "ᄀ"), - (0x3261, "M", "ᄂ"), - (0x3262, "M", "ᄃ"), - (0x3263, "M", "ᄅ"), - (0x3264, "M", "ᄆ"), - (0x3265, "M", "ᄇ"), - (0x3266, "M", "ᄉ"), - (0x3267, "M", "ᄋ"), - (0x3268, "M", "ᄌ"), - (0x3269, "M", "ᄎ"), - (0x326A, "M", "ᄏ"), - (0x326B, "M", "ᄐ"), - (0x326C, "M", "ᄑ"), - (0x326D, "M", "ᄒ"), - (0x326E, "M", "가"), - (0x326F, "M", "나"), - (0x3270, "M", "다"), - (0x3271, "M", "라"), - (0x3272, "M", "마"), - (0x3273, "M", "바"), - (0x3274, "M", "사"), - (0x3275, "M", "아"), - (0x3276, "M", "자"), - (0x3277, "M", "차"), - (0x3278, "M", "카"), - (0x3279, "M", "타"), - (0x327A, "M", "파"), - (0x327B, "M", "하"), - (0x327C, "M", "참고"), - (0x327D, "M", "주의"), - (0x327E, "M", "우"), - (0x327F, "V"), - (0x3280, "M", "一"), - (0x3281, "M", "二"), - (0x3282, "M", "三"), - (0x3283, "M", "四"), - (0x3284, "M", "五"), - (0x3285, "M", "六"), - (0x3286, "M", "七"), - (0x3287, "M", "八"), - (0x3288, "M", "九"), - (0x3289, "M", "十"), - (0x328A, "M", "月"), - (0x328B, "M", "火"), - (0x328C, "M", "水"), - (0x328D, "M", "木"), - (0x328E, "M", "金"), - (0x328F, "M", "土"), - (0x3290, "M", "日"), - (0x3291, "M", "株"), - (0x3292, "M", "有"), - (0x3293, "M", "社"), - (0x3294, "M", "名"), - ] - - -def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3295, "M", "特"), - (0x3296, "M", "財"), - (0x3297, "M", "祝"), - (0x3298, "M", "労"), - (0x3299, "M", "秘"), - (0x329A, "M", "男"), - (0x329B, "M", "女"), - (0x329C, "M", "適"), - (0x329D, "M", "優"), - (0x329E, "M", "印"), - (0x329F, "M", "注"), - (0x32A0, "M", "項"), - (0x32A1, "M", "休"), - (0x32A2, "M", "写"), - (0x32A3, "M", "正"), - (0x32A4, "M", "上"), - (0x32A5, "M", "中"), - (0x32A6, "M", "下"), - (0x32A7, "M", "左"), - (0x32A8, "M", "右"), - (0x32A9, "M", "医"), - (0x32AA, "M", "宗"), - (0x32AB, "M", "学"), - (0x32AC, "M", "監"), - (0x32AD, "M", "企"), - (0x32AE, "M", "資"), - (0x32AF, "M", "協"), - (0x32B0, "M", "夜"), - (0x32B1, "M", "36"), - (0x32B2, "M", "37"), - (0x32B3, "M", "38"), - (0x32B4, "M", "39"), - (0x32B5, "M", "40"), - (0x32B6, "M", "41"), - (0x32B7, "M", "42"), - (0x32B8, "M", "43"), - (0x32B9, "M", "44"), - (0x32BA, "M", "45"), - (0x32BB, "M", "46"), - (0x32BC, "M", "47"), - (0x32BD, "M", "48"), - (0x32BE, "M", "49"), - (0x32BF, "M", "50"), - (0x32C0, "M", "1月"), - (0x32C1, "M", "2月"), - (0x32C2, "M", "3月"), - (0x32C3, "M", "4月"), - (0x32C4, "M", "5月"), - (0x32C5, "M", "6月"), - (0x32C6, "M", "7月"), - (0x32C7, "M", "8月"), - (0x32C8, "M", "9月"), - (0x32C9, "M", "10月"), - (0x32CA, "M", "11月"), - (0x32CB, "M", "12月"), - (0x32CC, "M", "hg"), - (0x32CD, "M", "erg"), - (0x32CE, "M", "ev"), - (0x32CF, "M", "ltd"), - (0x32D0, "M", "ア"), - (0x32D1, "M", "イ"), - (0x32D2, "M", "ウ"), - (0x32D3, "M", "エ"), - (0x32D4, "M", "オ"), - (0x32D5, "M", "カ"), - (0x32D6, "M", "キ"), - (0x32D7, "M", "ク"), - (0x32D8, "M", "ケ"), - (0x32D9, "M", "コ"), - (0x32DA, "M", "サ"), - (0x32DB, "M", "シ"), - (0x32DC, "M", "ス"), - (0x32DD, "M", "セ"), - (0x32DE, "M", "ソ"), - (0x32DF, "M", "タ"), - (0x32E0, "M", "チ"), - (0x32E1, "M", "ツ"), - (0x32E2, "M", "テ"), - (0x32E3, "M", "ト"), - (0x32E4, "M", "ナ"), - (0x32E5, "M", "ニ"), - (0x32E6, "M", "ヌ"), - (0x32E7, "M", "ネ"), - (0x32E8, "M", "ノ"), - (0x32E9, "M", "ハ"), - (0x32EA, "M", "ヒ"), - (0x32EB, "M", "フ"), - (0x32EC, "M", "ヘ"), - (0x32ED, "M", "ホ"), - (0x32EE, "M", "マ"), - (0x32EF, "M", "ミ"), - (0x32F0, "M", "ム"), - (0x32F1, "M", "メ"), - (0x32F2, "M", "モ"), - (0x32F3, "M", "ヤ"), - (0x32F4, "M", "ユ"), - (0x32F5, "M", "ヨ"), - (0x32F6, "M", "ラ"), - (0x32F7, "M", "リ"), - (0x32F8, "M", "ル"), - ] - - -def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x32F9, "M", "レ"), - (0x32FA, "M", "ロ"), - (0x32FB, "M", "ワ"), - (0x32FC, "M", "ヰ"), - (0x32FD, "M", "ヱ"), - (0x32FE, "M", "ヲ"), - (0x32FF, "M", "令和"), - (0x3300, "M", "アパート"), - (0x3301, "M", "アルファ"), - (0x3302, "M", "アンペア"), - (0x3303, "M", "アール"), - (0x3304, "M", "イニング"), - (0x3305, "M", "インチ"), - (0x3306, "M", "ウォン"), - (0x3307, "M", "エスクード"), - (0x3308, "M", "エーカー"), - (0x3309, "M", "オンス"), - (0x330A, "M", "オーム"), - (0x330B, "M", "カイリ"), - (0x330C, "M", "カラット"), - (0x330D, "M", "カロリー"), - (0x330E, "M", "ガロン"), - (0x330F, "M", "ガンマ"), - (0x3310, "M", "ギガ"), - (0x3311, "M", "ギニー"), - (0x3312, "M", "キュリー"), - (0x3313, "M", "ギルダー"), - (0x3314, "M", "キロ"), - (0x3315, "M", "キログラム"), - (0x3316, "M", "キロメートル"), - (0x3317, "M", "キロワット"), - (0x3318, "M", "グラム"), - (0x3319, "M", "グラムトン"), - (0x331A, "M", "クルゼイロ"), - (0x331B, "M", "クローネ"), - (0x331C, "M", "ケース"), - (0x331D, "M", "コルナ"), - (0x331E, "M", "コーポ"), - (0x331F, "M", "サイクル"), - (0x3320, "M", "サンチーム"), - (0x3321, "M", "シリング"), - (0x3322, "M", "センチ"), - (0x3323, "M", "セント"), - (0x3324, "M", "ダース"), - (0x3325, "M", "デシ"), - (0x3326, "M", "ドル"), - (0x3327, "M", "トン"), - (0x3328, "M", "ナノ"), - (0x3329, "M", "ノット"), - (0x332A, "M", "ハイツ"), - (0x332B, "M", "パーセント"), - (0x332C, "M", "パーツ"), - (0x332D, "M", "バーレル"), - (0x332E, "M", "ピアストル"), - (0x332F, "M", "ピクル"), - (0x3330, "M", "ピコ"), - (0x3331, "M", "ビル"), - (0x3332, "M", "ファラッド"), - (0x3333, "M", "フィート"), - (0x3334, "M", "ブッシェル"), - (0x3335, "M", "フラン"), - (0x3336, "M", "ヘクタール"), - (0x3337, "M", "ペソ"), - (0x3338, "M", "ペニヒ"), - (0x3339, "M", "ヘルツ"), - (0x333A, "M", "ペンス"), - (0x333B, "M", "ページ"), - (0x333C, "M", "ベータ"), - (0x333D, "M", "ポイント"), - (0x333E, "M", "ボルト"), - (0x333F, "M", "ホン"), - (0x3340, "M", "ポンド"), - (0x3341, "M", "ホール"), - (0x3342, "M", "ホーン"), - (0x3343, "M", "マイクロ"), - (0x3344, "M", "マイル"), - (0x3345, "M", "マッハ"), - (0x3346, "M", "マルク"), - (0x3347, "M", "マンション"), - (0x3348, "M", "ミクロン"), - (0x3349, "M", "ミリ"), - (0x334A, "M", "ミリバール"), - (0x334B, "M", "メガ"), - (0x334C, "M", "メガトン"), - (0x334D, "M", "メートル"), - (0x334E, "M", "ヤード"), - (0x334F, "M", "ヤール"), - (0x3350, "M", "ユアン"), - (0x3351, "M", "リットル"), - (0x3352, "M", "リラ"), - (0x3353, "M", "ルピー"), - (0x3354, "M", "ルーブル"), - (0x3355, "M", "レム"), - (0x3356, "M", "レントゲン"), - (0x3357, "M", "ワット"), - (0x3358, "M", "0点"), - (0x3359, "M", "1点"), - (0x335A, "M", "2点"), - (0x335B, "M", "3点"), - (0x335C, "M", "4点"), - ] - - -def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x335D, "M", "5点"), - (0x335E, "M", "6点"), - (0x335F, "M", "7点"), - (0x3360, "M", "8点"), - (0x3361, "M", "9点"), - (0x3362, "M", "10点"), - (0x3363, "M", "11点"), - (0x3364, "M", "12点"), - (0x3365, "M", "13点"), - (0x3366, "M", "14点"), - (0x3367, "M", "15点"), - (0x3368, "M", "16点"), - (0x3369, "M", "17点"), - (0x336A, "M", "18点"), - (0x336B, "M", "19点"), - (0x336C, "M", "20点"), - (0x336D, "M", "21点"), - (0x336E, "M", "22点"), - (0x336F, "M", "23点"), - (0x3370, "M", "24点"), - (0x3371, "M", "hpa"), - (0x3372, "M", "da"), - (0x3373, "M", "au"), - (0x3374, "M", "bar"), - (0x3375, "M", "ov"), - (0x3376, "M", "pc"), - (0x3377, "M", "dm"), - (0x3378, "M", "dm2"), - (0x3379, "M", "dm3"), - (0x337A, "M", "iu"), - (0x337B, "M", "平成"), - (0x337C, "M", "昭和"), - (0x337D, "M", "大正"), - (0x337E, "M", "明治"), - (0x337F, "M", "株式会社"), - (0x3380, "M", "pa"), - (0x3381, "M", "na"), - (0x3382, "M", "μa"), - (0x3383, "M", "ma"), - (0x3384, "M", "ka"), - (0x3385, "M", "kb"), - (0x3386, "M", "mb"), - (0x3387, "M", "gb"), - (0x3388, "M", "cal"), - (0x3389, "M", "kcal"), - (0x338A, "M", "pf"), - (0x338B, "M", "nf"), - (0x338C, "M", "μf"), - (0x338D, "M", "μg"), - (0x338E, "M", "mg"), - (0x338F, "M", "kg"), - (0x3390, "M", "hz"), - (0x3391, "M", "khz"), - (0x3392, "M", "mhz"), - (0x3393, "M", "ghz"), - (0x3394, "M", "thz"), - (0x3395, "M", "μl"), - (0x3396, "M", "ml"), - (0x3397, "M", "dl"), - (0x3398, "M", "kl"), - (0x3399, "M", "fm"), - (0x339A, "M", "nm"), - (0x339B, "M", "μm"), - (0x339C, "M", "mm"), - (0x339D, "M", "cm"), - (0x339E, "M", "km"), - (0x339F, "M", "mm2"), - (0x33A0, "M", "cm2"), - (0x33A1, "M", "m2"), - (0x33A2, "M", "km2"), - (0x33A3, "M", "mm3"), - (0x33A4, "M", "cm3"), - (0x33A5, "M", "m3"), - (0x33A6, "M", "km3"), - (0x33A7, "M", "m∕s"), - (0x33A8, "M", "m∕s2"), - (0x33A9, "M", "pa"), - (0x33AA, "M", "kpa"), - (0x33AB, "M", "mpa"), - (0x33AC, "M", "gpa"), - (0x33AD, "M", "rad"), - (0x33AE, "M", "rad∕s"), - (0x33AF, "M", "rad∕s2"), - (0x33B0, "M", "ps"), - (0x33B1, "M", "ns"), - (0x33B2, "M", "μs"), - (0x33B3, "M", "ms"), - (0x33B4, "M", "pv"), - (0x33B5, "M", "nv"), - (0x33B6, "M", "μv"), - (0x33B7, "M", "mv"), - (0x33B8, "M", "kv"), - (0x33B9, "M", "mv"), - (0x33BA, "M", "pw"), - (0x33BB, "M", "nw"), - (0x33BC, "M", "μw"), - (0x33BD, "M", "mw"), - (0x33BE, "M", "kw"), - (0x33BF, "M", "mw"), - (0x33C0, "M", "kω"), - ] - - -def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x33C1, "M", "mω"), - (0x33C2, "X"), - (0x33C3, "M", "bq"), - (0x33C4, "M", "cc"), - (0x33C5, "M", "cd"), - (0x33C6, "M", "c∕kg"), - (0x33C7, "X"), - (0x33C8, "M", "db"), - (0x33C9, "M", "gy"), - (0x33CA, "M", "ha"), - (0x33CB, "M", "hp"), - (0x33CC, "M", "in"), - (0x33CD, "M", "kk"), - (0x33CE, "M", "km"), - (0x33CF, "M", "kt"), - (0x33D0, "M", "lm"), - (0x33D1, "M", "ln"), - (0x33D2, "M", "log"), - (0x33D3, "M", "lx"), - (0x33D4, "M", "mb"), - (0x33D5, "M", "mil"), - (0x33D6, "M", "mol"), - (0x33D7, "M", "ph"), - (0x33D8, "X"), - (0x33D9, "M", "ppm"), - (0x33DA, "M", "pr"), - (0x33DB, "M", "sr"), - (0x33DC, "M", "sv"), - (0x33DD, "M", "wb"), - (0x33DE, "M", "v∕m"), - (0x33DF, "M", "a∕m"), - (0x33E0, "M", "1日"), - (0x33E1, "M", "2日"), - (0x33E2, "M", "3日"), - (0x33E3, "M", "4日"), - (0x33E4, "M", "5日"), - (0x33E5, "M", "6日"), - (0x33E6, "M", "7日"), - (0x33E7, "M", "8日"), - (0x33E8, "M", "9日"), - (0x33E9, "M", "10日"), - (0x33EA, "M", "11日"), - (0x33EB, "M", "12日"), - (0x33EC, "M", "13日"), - (0x33ED, "M", "14日"), - (0x33EE, "M", "15日"), - (0x33EF, "M", "16日"), - (0x33F0, "M", "17日"), - (0x33F1, "M", "18日"), - (0x33F2, "M", "19日"), - (0x33F3, "M", "20日"), - (0x33F4, "M", "21日"), - (0x33F5, "M", "22日"), - (0x33F6, "M", "23日"), - (0x33F7, "M", "24日"), - (0x33F8, "M", "25日"), - (0x33F9, "M", "26日"), - (0x33FA, "M", "27日"), - (0x33FB, "M", "28日"), - (0x33FC, "M", "29日"), - (0x33FD, "M", "30日"), - (0x33FE, "M", "31日"), - (0x33FF, "M", "gal"), - (0x3400, "V"), - (0xA48D, "X"), - (0xA490, "V"), - (0xA4C7, "X"), - (0xA4D0, "V"), - (0xA62C, "X"), - (0xA640, "M", "ꙁ"), - (0xA641, "V"), - (0xA642, "M", "ꙃ"), - (0xA643, "V"), - (0xA644, "M", "ꙅ"), - (0xA645, "V"), - (0xA646, "M", "ꙇ"), - (0xA647, "V"), - (0xA648, "M", "ꙉ"), - (0xA649, "V"), - (0xA64A, "M", "ꙋ"), - (0xA64B, "V"), - (0xA64C, "M", "ꙍ"), - (0xA64D, "V"), - (0xA64E, "M", "ꙏ"), - (0xA64F, "V"), - (0xA650, "M", "ꙑ"), - (0xA651, "V"), - (0xA652, "M", "ꙓ"), - (0xA653, "V"), - (0xA654, "M", "ꙕ"), - (0xA655, "V"), - (0xA656, "M", "ꙗ"), - (0xA657, "V"), - (0xA658, "M", "ꙙ"), - (0xA659, "V"), - (0xA65A, "M", "ꙛ"), - (0xA65B, "V"), - (0xA65C, "M", "ꙝ"), - (0xA65D, "V"), - (0xA65E, "M", "ꙟ"), - ] - - -def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA65F, "V"), - (0xA660, "M", "ꙡ"), - (0xA661, "V"), - (0xA662, "M", "ꙣ"), - (0xA663, "V"), - (0xA664, "M", "ꙥ"), - (0xA665, "V"), - (0xA666, "M", "ꙧ"), - (0xA667, "V"), - (0xA668, "M", "ꙩ"), - (0xA669, "V"), - (0xA66A, "M", "ꙫ"), - (0xA66B, "V"), - (0xA66C, "M", "ꙭ"), - (0xA66D, "V"), - (0xA680, "M", "ꚁ"), - (0xA681, "V"), - (0xA682, "M", "ꚃ"), - (0xA683, "V"), - (0xA684, "M", "ꚅ"), - (0xA685, "V"), - (0xA686, "M", "ꚇ"), - (0xA687, "V"), - (0xA688, "M", "ꚉ"), - (0xA689, "V"), - (0xA68A, "M", "ꚋ"), - (0xA68B, "V"), - (0xA68C, "M", "ꚍ"), - (0xA68D, "V"), - (0xA68E, "M", "ꚏ"), - (0xA68F, "V"), - (0xA690, "M", "ꚑ"), - (0xA691, "V"), - (0xA692, "M", "ꚓ"), - (0xA693, "V"), - (0xA694, "M", "ꚕ"), - (0xA695, "V"), - (0xA696, "M", "ꚗ"), - (0xA697, "V"), - (0xA698, "M", "ꚙ"), - (0xA699, "V"), - (0xA69A, "M", "ꚛ"), - (0xA69B, "V"), - (0xA69C, "M", "ъ"), - (0xA69D, "M", "ь"), - (0xA69E, "V"), - (0xA6F8, "X"), - (0xA700, "V"), - (0xA722, "M", "ꜣ"), - (0xA723, "V"), - (0xA724, "M", "ꜥ"), - (0xA725, "V"), - (0xA726, "M", "ꜧ"), - (0xA727, "V"), - (0xA728, "M", "ꜩ"), - (0xA729, "V"), - (0xA72A, "M", "ꜫ"), - (0xA72B, "V"), - (0xA72C, "M", "ꜭ"), - (0xA72D, "V"), - (0xA72E, "M", "ꜯ"), - (0xA72F, "V"), - (0xA732, "M", "ꜳ"), - (0xA733, "V"), - (0xA734, "M", "ꜵ"), - (0xA735, "V"), - (0xA736, "M", "ꜷ"), - (0xA737, "V"), - (0xA738, "M", "ꜹ"), - (0xA739, "V"), - (0xA73A, "M", "ꜻ"), - (0xA73B, "V"), - (0xA73C, "M", "ꜽ"), - (0xA73D, "V"), - (0xA73E, "M", "ꜿ"), - (0xA73F, "V"), - (0xA740, "M", "ꝁ"), - (0xA741, "V"), - (0xA742, "M", "ꝃ"), - (0xA743, "V"), - (0xA744, "M", "ꝅ"), - (0xA745, "V"), - (0xA746, "M", "ꝇ"), - (0xA747, "V"), - (0xA748, "M", "ꝉ"), - (0xA749, "V"), - (0xA74A, "M", "ꝋ"), - (0xA74B, "V"), - (0xA74C, "M", "ꝍ"), - (0xA74D, "V"), - (0xA74E, "M", "ꝏ"), - (0xA74F, "V"), - (0xA750, "M", "ꝑ"), - (0xA751, "V"), - (0xA752, "M", "ꝓ"), - (0xA753, "V"), - (0xA754, "M", "ꝕ"), - (0xA755, "V"), - (0xA756, "M", "ꝗ"), - (0xA757, "V"), - ] - - -def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA758, "M", "ꝙ"), - (0xA759, "V"), - (0xA75A, "M", "ꝛ"), - (0xA75B, "V"), - (0xA75C, "M", "ꝝ"), - (0xA75D, "V"), - (0xA75E, "M", "ꝟ"), - (0xA75F, "V"), - (0xA760, "M", "ꝡ"), - (0xA761, "V"), - (0xA762, "M", "ꝣ"), - (0xA763, "V"), - (0xA764, "M", "ꝥ"), - (0xA765, "V"), - (0xA766, "M", "ꝧ"), - (0xA767, "V"), - (0xA768, "M", "ꝩ"), - (0xA769, "V"), - (0xA76A, "M", "ꝫ"), - (0xA76B, "V"), - (0xA76C, "M", "ꝭ"), - (0xA76D, "V"), - (0xA76E, "M", "ꝯ"), - (0xA76F, "V"), - (0xA770, "M", "ꝯ"), - (0xA771, "V"), - (0xA779, "M", "ꝺ"), - (0xA77A, "V"), - (0xA77B, "M", "ꝼ"), - (0xA77C, "V"), - (0xA77D, "M", "ᵹ"), - (0xA77E, "M", "ꝿ"), - (0xA77F, "V"), - (0xA780, "M", "ꞁ"), - (0xA781, "V"), - (0xA782, "M", "ꞃ"), - (0xA783, "V"), - (0xA784, "M", "ꞅ"), - (0xA785, "V"), - (0xA786, "M", "ꞇ"), - (0xA787, "V"), - (0xA78B, "M", "ꞌ"), - (0xA78C, "V"), - (0xA78D, "M", "ɥ"), - (0xA78E, "V"), - (0xA790, "M", "ꞑ"), - (0xA791, "V"), - (0xA792, "M", "ꞓ"), - (0xA793, "V"), - (0xA796, "M", "ꞗ"), - (0xA797, "V"), - (0xA798, "M", "ꞙ"), - (0xA799, "V"), - (0xA79A, "M", "ꞛ"), - (0xA79B, "V"), - (0xA79C, "M", "ꞝ"), - (0xA79D, "V"), - (0xA79E, "M", "ꞟ"), - (0xA79F, "V"), - (0xA7A0, "M", "ꞡ"), - (0xA7A1, "V"), - (0xA7A2, "M", "ꞣ"), - (0xA7A3, "V"), - (0xA7A4, "M", "ꞥ"), - (0xA7A5, "V"), - (0xA7A6, "M", "ꞧ"), - (0xA7A7, "V"), - (0xA7A8, "M", "ꞩ"), - (0xA7A9, "V"), - (0xA7AA, "M", "ɦ"), - (0xA7AB, "M", "ɜ"), - (0xA7AC, "M", "ɡ"), - (0xA7AD, "M", "ɬ"), - (0xA7AE, "M", "ɪ"), - (0xA7AF, "V"), - (0xA7B0, "M", "ʞ"), - (0xA7B1, "M", "ʇ"), - (0xA7B2, "M", "ʝ"), - (0xA7B3, "M", "ꭓ"), - (0xA7B4, "M", "ꞵ"), - (0xA7B5, "V"), - (0xA7B6, "M", "ꞷ"), - (0xA7B7, "V"), - (0xA7B8, "M", "ꞹ"), - (0xA7B9, "V"), - (0xA7BA, "M", "ꞻ"), - (0xA7BB, "V"), - (0xA7BC, "M", "ꞽ"), - (0xA7BD, "V"), - (0xA7BE, "M", "ꞿ"), - (0xA7BF, "V"), - (0xA7C0, "M", "ꟁ"), - (0xA7C1, "V"), - (0xA7C2, "M", "ꟃ"), - (0xA7C3, "V"), - (0xA7C4, "M", "ꞔ"), - (0xA7C5, "M", "ʂ"), - (0xA7C6, "M", "ᶎ"), - (0xA7C7, "M", "ꟈ"), - (0xA7C8, "V"), - ] - - -def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA7C9, "M", "ꟊ"), - (0xA7CA, "V"), - (0xA7CB, "M", "ɤ"), - (0xA7CC, "M", "ꟍ"), - (0xA7CD, "V"), - (0xA7CE, "X"), - (0xA7D0, "M", "ꟑ"), - (0xA7D1, "V"), - (0xA7D2, "X"), - (0xA7D3, "V"), - (0xA7D4, "X"), - (0xA7D5, "V"), - (0xA7D6, "M", "ꟗ"), - (0xA7D7, "V"), - (0xA7D8, "M", "ꟙ"), - (0xA7D9, "V"), - (0xA7DA, "M", "ꟛ"), - (0xA7DB, "V"), - (0xA7DC, "M", "ƛ"), - (0xA7DD, "X"), - (0xA7F2, "M", "c"), - (0xA7F3, "M", "f"), - (0xA7F4, "M", "q"), - (0xA7F5, "M", "ꟶ"), - (0xA7F6, "V"), - (0xA7F8, "M", "ħ"), - (0xA7F9, "M", "œ"), - (0xA7FA, "V"), - (0xA82D, "X"), - (0xA830, "V"), - (0xA83A, "X"), - (0xA840, "V"), - (0xA878, "X"), - (0xA880, "V"), - (0xA8C6, "X"), - (0xA8CE, "V"), - (0xA8DA, "X"), - (0xA8E0, "V"), - (0xA954, "X"), - (0xA95F, "V"), - (0xA97D, "X"), - (0xA980, "V"), - (0xA9CE, "X"), - (0xA9CF, "V"), - (0xA9DA, "X"), - (0xA9DE, "V"), - (0xA9FF, "X"), - (0xAA00, "V"), - (0xAA37, "X"), - (0xAA40, "V"), - (0xAA4E, "X"), - (0xAA50, "V"), - (0xAA5A, "X"), - (0xAA5C, "V"), - (0xAAC3, "X"), - (0xAADB, "V"), - (0xAAF7, "X"), - (0xAB01, "V"), - (0xAB07, "X"), - (0xAB09, "V"), - (0xAB0F, "X"), - (0xAB11, "V"), - (0xAB17, "X"), - (0xAB20, "V"), - (0xAB27, "X"), - (0xAB28, "V"), - (0xAB2F, "X"), - (0xAB30, "V"), - (0xAB5C, "M", "ꜧ"), - (0xAB5D, "M", "ꬷ"), - (0xAB5E, "M", "ɫ"), - (0xAB5F, "M", "ꭒ"), - (0xAB60, "V"), - (0xAB69, "M", "ʍ"), - (0xAB6A, "V"), - (0xAB6C, "X"), - (0xAB70, "M", "Ꭰ"), - (0xAB71, "M", "Ꭱ"), - (0xAB72, "M", "Ꭲ"), - (0xAB73, "M", "Ꭳ"), - (0xAB74, "M", "Ꭴ"), - (0xAB75, "M", "Ꭵ"), - (0xAB76, "M", "Ꭶ"), - (0xAB77, "M", "Ꭷ"), - (0xAB78, "M", "Ꭸ"), - (0xAB79, "M", "Ꭹ"), - (0xAB7A, "M", "Ꭺ"), - (0xAB7B, "M", "Ꭻ"), - (0xAB7C, "M", "Ꭼ"), - (0xAB7D, "M", "Ꭽ"), - (0xAB7E, "M", "Ꭾ"), - (0xAB7F, "M", "Ꭿ"), - (0xAB80, "M", "Ꮀ"), - (0xAB81, "M", "Ꮁ"), - (0xAB82, "M", "Ꮂ"), - (0xAB83, "M", "Ꮃ"), - (0xAB84, "M", "Ꮄ"), - (0xAB85, "M", "Ꮅ"), - (0xAB86, "M", "Ꮆ"), - (0xAB87, "M", "Ꮇ"), - ] - - -def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xAB88, "M", "Ꮈ"), - (0xAB89, "M", "Ꮉ"), - (0xAB8A, "M", "Ꮊ"), - (0xAB8B, "M", "Ꮋ"), - (0xAB8C, "M", "Ꮌ"), - (0xAB8D, "M", "Ꮍ"), - (0xAB8E, "M", "Ꮎ"), - (0xAB8F, "M", "Ꮏ"), - (0xAB90, "M", "Ꮐ"), - (0xAB91, "M", "Ꮑ"), - (0xAB92, "M", "Ꮒ"), - (0xAB93, "M", "Ꮓ"), - (0xAB94, "M", "Ꮔ"), - (0xAB95, "M", "Ꮕ"), - (0xAB96, "M", "Ꮖ"), - (0xAB97, "M", "Ꮗ"), - (0xAB98, "M", "Ꮘ"), - (0xAB99, "M", "Ꮙ"), - (0xAB9A, "M", "Ꮚ"), - (0xAB9B, "M", "Ꮛ"), - (0xAB9C, "M", "Ꮜ"), - (0xAB9D, "M", "Ꮝ"), - (0xAB9E, "M", "Ꮞ"), - (0xAB9F, "M", "Ꮟ"), - (0xABA0, "M", "Ꮠ"), - (0xABA1, "M", "Ꮡ"), - (0xABA2, "M", "Ꮢ"), - (0xABA3, "M", "Ꮣ"), - (0xABA4, "M", "Ꮤ"), - (0xABA5, "M", "Ꮥ"), - (0xABA6, "M", "Ꮦ"), - (0xABA7, "M", "Ꮧ"), - (0xABA8, "M", "Ꮨ"), - (0xABA9, "M", "Ꮩ"), - (0xABAA, "M", "Ꮪ"), - (0xABAB, "M", "Ꮫ"), - (0xABAC, "M", "Ꮬ"), - (0xABAD, "M", "Ꮭ"), - (0xABAE, "M", "Ꮮ"), - (0xABAF, "M", "Ꮯ"), - (0xABB0, "M", "Ꮰ"), - (0xABB1, "M", "Ꮱ"), - (0xABB2, "M", "Ꮲ"), - (0xABB3, "M", "Ꮳ"), - (0xABB4, "M", "Ꮴ"), - (0xABB5, "M", "Ꮵ"), - (0xABB6, "M", "Ꮶ"), - (0xABB7, "M", "Ꮷ"), - (0xABB8, "M", "Ꮸ"), - (0xABB9, "M", "Ꮹ"), - (0xABBA, "M", "Ꮺ"), - (0xABBB, "M", "Ꮻ"), - (0xABBC, "M", "Ꮼ"), - (0xABBD, "M", "Ꮽ"), - (0xABBE, "M", "Ꮾ"), - (0xABBF, "M", "Ꮿ"), - (0xABC0, "V"), - (0xABEE, "X"), - (0xABF0, "V"), - (0xABFA, "X"), - (0xAC00, "V"), - (0xD7A4, "X"), - (0xD7B0, "V"), - (0xD7C7, "X"), - (0xD7CB, "V"), - (0xD7FC, "X"), - (0xF900, "M", "豈"), - (0xF901, "M", "更"), - (0xF902, "M", "車"), - (0xF903, "M", "賈"), - (0xF904, "M", "滑"), - (0xF905, "M", "串"), - (0xF906, "M", "句"), - (0xF907, "M", "龜"), - (0xF909, "M", "契"), - (0xF90A, "M", "金"), - (0xF90B, "M", "喇"), - (0xF90C, "M", "奈"), - (0xF90D, "M", "懶"), - (0xF90E, "M", "癩"), - (0xF90F, "M", "羅"), - (0xF910, "M", "蘿"), - (0xF911, "M", "螺"), - (0xF912, "M", "裸"), - (0xF913, "M", "邏"), - (0xF914, "M", "樂"), - (0xF915, "M", "洛"), - (0xF916, "M", "烙"), - (0xF917, "M", "珞"), - (0xF918, "M", "落"), - (0xF919, "M", "酪"), - (0xF91A, "M", "駱"), - (0xF91B, "M", "亂"), - (0xF91C, "M", "卵"), - (0xF91D, "M", "欄"), - (0xF91E, "M", "爛"), - (0xF91F, "M", "蘭"), - (0xF920, "M", "鸞"), - (0xF921, "M", "嵐"), - (0xF922, "M", "濫"), - ] - - -def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF923, "M", "藍"), - (0xF924, "M", "襤"), - (0xF925, "M", "拉"), - (0xF926, "M", "臘"), - (0xF927, "M", "蠟"), - (0xF928, "M", "廊"), - (0xF929, "M", "朗"), - (0xF92A, "M", "浪"), - (0xF92B, "M", "狼"), - (0xF92C, "M", "郎"), - (0xF92D, "M", "來"), - (0xF92E, "M", "冷"), - (0xF92F, "M", "勞"), - (0xF930, "M", "擄"), - (0xF931, "M", "櫓"), - (0xF932, "M", "爐"), - (0xF933, "M", "盧"), - (0xF934, "M", "老"), - (0xF935, "M", "蘆"), - (0xF936, "M", "虜"), - (0xF937, "M", "路"), - (0xF938, "M", "露"), - (0xF939, "M", "魯"), - (0xF93A, "M", "鷺"), - (0xF93B, "M", "碌"), - (0xF93C, "M", "祿"), - (0xF93D, "M", "綠"), - (0xF93E, "M", "菉"), - (0xF93F, "M", "錄"), - (0xF940, "M", "鹿"), - (0xF941, "M", "論"), - (0xF942, "M", "壟"), - (0xF943, "M", "弄"), - (0xF944, "M", "籠"), - (0xF945, "M", "聾"), - (0xF946, "M", "牢"), - (0xF947, "M", "磊"), - (0xF948, "M", "賂"), - (0xF949, "M", "雷"), - (0xF94A, "M", "壘"), - (0xF94B, "M", "屢"), - (0xF94C, "M", "樓"), - (0xF94D, "M", "淚"), - (0xF94E, "M", "漏"), - (0xF94F, "M", "累"), - (0xF950, "M", "縷"), - (0xF951, "M", "陋"), - (0xF952, "M", "勒"), - (0xF953, "M", "肋"), - (0xF954, "M", "凜"), - (0xF955, "M", "凌"), - (0xF956, "M", "稜"), - (0xF957, "M", "綾"), - (0xF958, "M", "菱"), - (0xF959, "M", "陵"), - (0xF95A, "M", "讀"), - (0xF95B, "M", "拏"), - (0xF95C, "M", "樂"), - (0xF95D, "M", "諾"), - (0xF95E, "M", "丹"), - (0xF95F, "M", "寧"), - (0xF960, "M", "怒"), - (0xF961, "M", "率"), - (0xF962, "M", "異"), - (0xF963, "M", "北"), - (0xF964, "M", "磻"), - (0xF965, "M", "便"), - (0xF966, "M", "復"), - (0xF967, "M", "不"), - (0xF968, "M", "泌"), - (0xF969, "M", "數"), - (0xF96A, "M", "索"), - (0xF96B, "M", "參"), - (0xF96C, "M", "塞"), - (0xF96D, "M", "省"), - (0xF96E, "M", "葉"), - (0xF96F, "M", "說"), - (0xF970, "M", "殺"), - (0xF971, "M", "辰"), - (0xF972, "M", "沈"), - (0xF973, "M", "拾"), - (0xF974, "M", "若"), - (0xF975, "M", "掠"), - (0xF976, "M", "略"), - (0xF977, "M", "亮"), - (0xF978, "M", "兩"), - (0xF979, "M", "凉"), - (0xF97A, "M", "梁"), - (0xF97B, "M", "糧"), - (0xF97C, "M", "良"), - (0xF97D, "M", "諒"), - (0xF97E, "M", "量"), - (0xF97F, "M", "勵"), - (0xF980, "M", "呂"), - (0xF981, "M", "女"), - (0xF982, "M", "廬"), - (0xF983, "M", "旅"), - (0xF984, "M", "濾"), - (0xF985, "M", "礪"), - (0xF986, "M", "閭"), - ] - - -def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF987, "M", "驪"), - (0xF988, "M", "麗"), - (0xF989, "M", "黎"), - (0xF98A, "M", "力"), - (0xF98B, "M", "曆"), - (0xF98C, "M", "歷"), - (0xF98D, "M", "轢"), - (0xF98E, "M", "年"), - (0xF98F, "M", "憐"), - (0xF990, "M", "戀"), - (0xF991, "M", "撚"), - (0xF992, "M", "漣"), - (0xF993, "M", "煉"), - (0xF994, "M", "璉"), - (0xF995, "M", "秊"), - (0xF996, "M", "練"), - (0xF997, "M", "聯"), - (0xF998, "M", "輦"), - (0xF999, "M", "蓮"), - (0xF99A, "M", "連"), - (0xF99B, "M", "鍊"), - (0xF99C, "M", "列"), - (0xF99D, "M", "劣"), - (0xF99E, "M", "咽"), - (0xF99F, "M", "烈"), - (0xF9A0, "M", "裂"), - (0xF9A1, "M", "說"), - (0xF9A2, "M", "廉"), - (0xF9A3, "M", "念"), - (0xF9A4, "M", "捻"), - (0xF9A5, "M", "殮"), - (0xF9A6, "M", "簾"), - (0xF9A7, "M", "獵"), - (0xF9A8, "M", "令"), - (0xF9A9, "M", "囹"), - (0xF9AA, "M", "寧"), - (0xF9AB, "M", "嶺"), - (0xF9AC, "M", "怜"), - (0xF9AD, "M", "玲"), - (0xF9AE, "M", "瑩"), - (0xF9AF, "M", "羚"), - (0xF9B0, "M", "聆"), - (0xF9B1, "M", "鈴"), - (0xF9B2, "M", "零"), - (0xF9B3, "M", "靈"), - (0xF9B4, "M", "領"), - (0xF9B5, "M", "例"), - (0xF9B6, "M", "禮"), - (0xF9B7, "M", "醴"), - (0xF9B8, "M", "隸"), - (0xF9B9, "M", "惡"), - (0xF9BA, "M", "了"), - (0xF9BB, "M", "僚"), - (0xF9BC, "M", "寮"), - (0xF9BD, "M", "尿"), - (0xF9BE, "M", "料"), - (0xF9BF, "M", "樂"), - (0xF9C0, "M", "燎"), - (0xF9C1, "M", "療"), - (0xF9C2, "M", "蓼"), - (0xF9C3, "M", "遼"), - (0xF9C4, "M", "龍"), - (0xF9C5, "M", "暈"), - (0xF9C6, "M", "阮"), - (0xF9C7, "M", "劉"), - (0xF9C8, "M", "杻"), - (0xF9C9, "M", "柳"), - (0xF9CA, "M", "流"), - (0xF9CB, "M", "溜"), - (0xF9CC, "M", "琉"), - (0xF9CD, "M", "留"), - (0xF9CE, "M", "硫"), - (0xF9CF, "M", "紐"), - (0xF9D0, "M", "類"), - (0xF9D1, "M", "六"), - (0xF9D2, "M", "戮"), - (0xF9D3, "M", "陸"), - (0xF9D4, "M", "倫"), - (0xF9D5, "M", "崙"), - (0xF9D6, "M", "淪"), - (0xF9D7, "M", "輪"), - (0xF9D8, "M", "律"), - (0xF9D9, "M", "慄"), - (0xF9DA, "M", "栗"), - (0xF9DB, "M", "率"), - (0xF9DC, "M", "隆"), - (0xF9DD, "M", "利"), - (0xF9DE, "M", "吏"), - (0xF9DF, "M", "履"), - (0xF9E0, "M", "易"), - (0xF9E1, "M", "李"), - (0xF9E2, "M", "梨"), - (0xF9E3, "M", "泥"), - (0xF9E4, "M", "理"), - (0xF9E5, "M", "痢"), - (0xF9E6, "M", "罹"), - (0xF9E7, "M", "裏"), - (0xF9E8, "M", "裡"), - (0xF9E9, "M", "里"), - (0xF9EA, "M", "離"), - ] - - -def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF9EB, "M", "匿"), - (0xF9EC, "M", "溺"), - (0xF9ED, "M", "吝"), - (0xF9EE, "M", "燐"), - (0xF9EF, "M", "璘"), - (0xF9F0, "M", "藺"), - (0xF9F1, "M", "隣"), - (0xF9F2, "M", "鱗"), - (0xF9F3, "M", "麟"), - (0xF9F4, "M", "林"), - (0xF9F5, "M", "淋"), - (0xF9F6, "M", "臨"), - (0xF9F7, "M", "立"), - (0xF9F8, "M", "笠"), - (0xF9F9, "M", "粒"), - (0xF9FA, "M", "狀"), - (0xF9FB, "M", "炙"), - (0xF9FC, "M", "識"), - (0xF9FD, "M", "什"), - (0xF9FE, "M", "茶"), - (0xF9FF, "M", "刺"), - (0xFA00, "M", "切"), - (0xFA01, "M", "度"), - (0xFA02, "M", "拓"), - (0xFA03, "M", "糖"), - (0xFA04, "M", "宅"), - (0xFA05, "M", "洞"), - (0xFA06, "M", "暴"), - (0xFA07, "M", "輻"), - (0xFA08, "M", "行"), - (0xFA09, "M", "降"), - (0xFA0A, "M", "見"), - (0xFA0B, "M", "廓"), - (0xFA0C, "M", "兀"), - (0xFA0D, "M", "嗀"), - (0xFA0E, "V"), - (0xFA10, "M", "塚"), - (0xFA11, "V"), - (0xFA12, "M", "晴"), - (0xFA13, "V"), - (0xFA15, "M", "凞"), - (0xFA16, "M", "猪"), - (0xFA17, "M", "益"), - (0xFA18, "M", "礼"), - (0xFA19, "M", "神"), - (0xFA1A, "M", "祥"), - (0xFA1B, "M", "福"), - (0xFA1C, "M", "靖"), - (0xFA1D, "M", "精"), - (0xFA1E, "M", "羽"), - (0xFA1F, "V"), - (0xFA20, "M", "蘒"), - (0xFA21, "V"), - (0xFA22, "M", "諸"), - (0xFA23, "V"), - (0xFA25, "M", "逸"), - (0xFA26, "M", "都"), - (0xFA27, "V"), - (0xFA2A, "M", "飯"), - (0xFA2B, "M", "飼"), - (0xFA2C, "M", "館"), - (0xFA2D, "M", "鶴"), - (0xFA2E, "M", "郞"), - (0xFA2F, "M", "隷"), - (0xFA30, "M", "侮"), - (0xFA31, "M", "僧"), - (0xFA32, "M", "免"), - (0xFA33, "M", "勉"), - (0xFA34, "M", "勤"), - (0xFA35, "M", "卑"), - (0xFA36, "M", "喝"), - (0xFA37, "M", "嘆"), - (0xFA38, "M", "器"), - (0xFA39, "M", "塀"), - (0xFA3A, "M", "墨"), - (0xFA3B, "M", "層"), - (0xFA3C, "M", "屮"), - (0xFA3D, "M", "悔"), - (0xFA3E, "M", "慨"), - (0xFA3F, "M", "憎"), - (0xFA40, "M", "懲"), - (0xFA41, "M", "敏"), - (0xFA42, "M", "既"), - (0xFA43, "M", "暑"), - (0xFA44, "M", "梅"), - (0xFA45, "M", "海"), - (0xFA46, "M", "渚"), - (0xFA47, "M", "漢"), - (0xFA48, "M", "煮"), - (0xFA49, "M", "爫"), - (0xFA4A, "M", "琢"), - (0xFA4B, "M", "碑"), - (0xFA4C, "M", "社"), - (0xFA4D, "M", "祉"), - (0xFA4E, "M", "祈"), - (0xFA4F, "M", "祐"), - (0xFA50, "M", "祖"), - (0xFA51, "M", "祝"), - (0xFA52, "M", "禍"), - (0xFA53, "M", "禎"), - ] - - -def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA54, "M", "穀"), - (0xFA55, "M", "突"), - (0xFA56, "M", "節"), - (0xFA57, "M", "練"), - (0xFA58, "M", "縉"), - (0xFA59, "M", "繁"), - (0xFA5A, "M", "署"), - (0xFA5B, "M", "者"), - (0xFA5C, "M", "臭"), - (0xFA5D, "M", "艹"), - (0xFA5F, "M", "著"), - (0xFA60, "M", "褐"), - (0xFA61, "M", "視"), - (0xFA62, "M", "謁"), - (0xFA63, "M", "謹"), - (0xFA64, "M", "賓"), - (0xFA65, "M", "贈"), - (0xFA66, "M", "辶"), - (0xFA67, "M", "逸"), - (0xFA68, "M", "難"), - (0xFA69, "M", "響"), - (0xFA6A, "M", "頻"), - (0xFA6B, "M", "恵"), - (0xFA6C, "M", "𤋮"), - (0xFA6D, "M", "舘"), - (0xFA6E, "X"), - (0xFA70, "M", "並"), - (0xFA71, "M", "况"), - (0xFA72, "M", "全"), - (0xFA73, "M", "侀"), - (0xFA74, "M", "充"), - (0xFA75, "M", "冀"), - (0xFA76, "M", "勇"), - (0xFA77, "M", "勺"), - (0xFA78, "M", "喝"), - (0xFA79, "M", "啕"), - (0xFA7A, "M", "喙"), - (0xFA7B, "M", "嗢"), - (0xFA7C, "M", "塚"), - (0xFA7D, "M", "墳"), - (0xFA7E, "M", "奄"), - (0xFA7F, "M", "奔"), - (0xFA80, "M", "婢"), - (0xFA81, "M", "嬨"), - (0xFA82, "M", "廒"), - (0xFA83, "M", "廙"), - (0xFA84, "M", "彩"), - (0xFA85, "M", "徭"), - (0xFA86, "M", "惘"), - (0xFA87, "M", "慎"), - (0xFA88, "M", "愈"), - (0xFA89, "M", "憎"), - (0xFA8A, "M", "慠"), - (0xFA8B, "M", "懲"), - (0xFA8C, "M", "戴"), - (0xFA8D, "M", "揄"), - (0xFA8E, "M", "搜"), - (0xFA8F, "M", "摒"), - (0xFA90, "M", "敖"), - (0xFA91, "M", "晴"), - (0xFA92, "M", "朗"), - (0xFA93, "M", "望"), - (0xFA94, "M", "杖"), - (0xFA95, "M", "歹"), - (0xFA96, "M", "殺"), - (0xFA97, "M", "流"), - (0xFA98, "M", "滛"), - (0xFA99, "M", "滋"), - (0xFA9A, "M", "漢"), - (0xFA9B, "M", "瀞"), - (0xFA9C, "M", "煮"), - (0xFA9D, "M", "瞧"), - (0xFA9E, "M", "爵"), - (0xFA9F, "M", "犯"), - (0xFAA0, "M", "猪"), - (0xFAA1, "M", "瑱"), - (0xFAA2, "M", "甆"), - (0xFAA3, "M", "画"), - (0xFAA4, "M", "瘝"), - (0xFAA5, "M", "瘟"), - (0xFAA6, "M", "益"), - (0xFAA7, "M", "盛"), - (0xFAA8, "M", "直"), - (0xFAA9, "M", "睊"), - (0xFAAA, "M", "着"), - (0xFAAB, "M", "磌"), - (0xFAAC, "M", "窱"), - (0xFAAD, "M", "節"), - (0xFAAE, "M", "类"), - (0xFAAF, "M", "絛"), - (0xFAB0, "M", "練"), - (0xFAB1, "M", "缾"), - (0xFAB2, "M", "者"), - (0xFAB3, "M", "荒"), - (0xFAB4, "M", "華"), - (0xFAB5, "M", "蝹"), - (0xFAB6, "M", "襁"), - (0xFAB7, "M", "覆"), - (0xFAB8, "M", "視"), - (0xFAB9, "M", "調"), - ] - - -def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFABA, "M", "諸"), - (0xFABB, "M", "請"), - (0xFABC, "M", "謁"), - (0xFABD, "M", "諾"), - (0xFABE, "M", "諭"), - (0xFABF, "M", "謹"), - (0xFAC0, "M", "變"), - (0xFAC1, "M", "贈"), - (0xFAC2, "M", "輸"), - (0xFAC3, "M", "遲"), - (0xFAC4, "M", "醙"), - (0xFAC5, "M", "鉶"), - (0xFAC6, "M", "陼"), - (0xFAC7, "M", "難"), - (0xFAC8, "M", "靖"), - (0xFAC9, "M", "韛"), - (0xFACA, "M", "響"), - (0xFACB, "M", "頋"), - (0xFACC, "M", "頻"), - (0xFACD, "M", "鬒"), - (0xFACE, "M", "龜"), - (0xFACF, "M", "𢡊"), - (0xFAD0, "M", "𢡄"), - (0xFAD1, "M", "𣏕"), - (0xFAD2, "M", "㮝"), - (0xFAD3, "M", "䀘"), - (0xFAD4, "M", "䀹"), - (0xFAD5, "M", "𥉉"), - (0xFAD6, "M", "𥳐"), - (0xFAD7, "M", "𧻓"), - (0xFAD8, "M", "齃"), - (0xFAD9, "M", "龎"), - (0xFADA, "X"), - (0xFB00, "M", "ff"), - (0xFB01, "M", "fi"), - (0xFB02, "M", "fl"), - (0xFB03, "M", "ffi"), - (0xFB04, "M", "ffl"), - (0xFB05, "M", "st"), - (0xFB07, "X"), - (0xFB13, "M", "մն"), - (0xFB14, "M", "մե"), - (0xFB15, "M", "մի"), - (0xFB16, "M", "վն"), - (0xFB17, "M", "մխ"), - (0xFB18, "X"), - (0xFB1D, "M", "יִ"), - (0xFB1E, "V"), - (0xFB1F, "M", "ײַ"), - (0xFB20, "M", "ע"), - (0xFB21, "M", "א"), - (0xFB22, "M", "ד"), - (0xFB23, "M", "ה"), - (0xFB24, "M", "כ"), - (0xFB25, "M", "ל"), - (0xFB26, "M", "ם"), - (0xFB27, "M", "ר"), - (0xFB28, "M", "ת"), - (0xFB29, "M", "+"), - (0xFB2A, "M", "שׁ"), - (0xFB2B, "M", "שׂ"), - (0xFB2C, "M", "שּׁ"), - (0xFB2D, "M", "שּׂ"), - (0xFB2E, "M", "אַ"), - (0xFB2F, "M", "אָ"), - (0xFB30, "M", "אּ"), - (0xFB31, "M", "בּ"), - (0xFB32, "M", "גּ"), - (0xFB33, "M", "דּ"), - (0xFB34, "M", "הּ"), - (0xFB35, "M", "וּ"), - (0xFB36, "M", "זּ"), - (0xFB37, "X"), - (0xFB38, "M", "טּ"), - (0xFB39, "M", "יּ"), - (0xFB3A, "M", "ךּ"), - (0xFB3B, "M", "כּ"), - (0xFB3C, "M", "לּ"), - (0xFB3D, "X"), - (0xFB3E, "M", "מּ"), - (0xFB3F, "X"), - (0xFB40, "M", "נּ"), - (0xFB41, "M", "סּ"), - (0xFB42, "X"), - (0xFB43, "M", "ףּ"), - (0xFB44, "M", "פּ"), - (0xFB45, "X"), - (0xFB46, "M", "צּ"), - (0xFB47, "M", "קּ"), - (0xFB48, "M", "רּ"), - (0xFB49, "M", "שּ"), - (0xFB4A, "M", "תּ"), - (0xFB4B, "M", "וֹ"), - (0xFB4C, "M", "בֿ"), - (0xFB4D, "M", "כֿ"), - (0xFB4E, "M", "פֿ"), - (0xFB4F, "M", "אל"), - (0xFB50, "M", "ٱ"), - (0xFB52, "M", "ٻ"), - (0xFB56, "M", "پ"), - ] - - -def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFB5A, "M", "ڀ"), - (0xFB5E, "M", "ٺ"), - (0xFB62, "M", "ٿ"), - (0xFB66, "M", "ٹ"), - (0xFB6A, "M", "ڤ"), - (0xFB6E, "M", "ڦ"), - (0xFB72, "M", "ڄ"), - (0xFB76, "M", "ڃ"), - (0xFB7A, "M", "چ"), - (0xFB7E, "M", "ڇ"), - (0xFB82, "M", "ڍ"), - (0xFB84, "M", "ڌ"), - (0xFB86, "M", "ڎ"), - (0xFB88, "M", "ڈ"), - (0xFB8A, "M", "ژ"), - (0xFB8C, "M", "ڑ"), - (0xFB8E, "M", "ک"), - (0xFB92, "M", "گ"), - (0xFB96, "M", "ڳ"), - (0xFB9A, "M", "ڱ"), - (0xFB9E, "M", "ں"), - (0xFBA0, "M", "ڻ"), - (0xFBA4, "M", "ۀ"), - (0xFBA6, "M", "ہ"), - (0xFBAA, "M", "ھ"), - (0xFBAE, "M", "ے"), - (0xFBB0, "M", "ۓ"), - (0xFBB2, "V"), - (0xFBC3, "X"), - (0xFBD3, "M", "ڭ"), - (0xFBD7, "M", "ۇ"), - (0xFBD9, "M", "ۆ"), - (0xFBDB, "M", "ۈ"), - (0xFBDD, "M", "ۇٴ"), - (0xFBDE, "M", "ۋ"), - (0xFBE0, "M", "ۅ"), - (0xFBE2, "M", "ۉ"), - (0xFBE4, "M", "ې"), - (0xFBE8, "M", "ى"), - (0xFBEA, "M", "ئا"), - (0xFBEC, "M", "ئە"), - (0xFBEE, "M", "ئو"), - (0xFBF0, "M", "ئۇ"), - (0xFBF2, "M", "ئۆ"), - (0xFBF4, "M", "ئۈ"), - (0xFBF6, "M", "ئې"), - (0xFBF9, "M", "ئى"), - (0xFBFC, "M", "ی"), - (0xFC00, "M", "ئج"), - (0xFC01, "M", "ئح"), - (0xFC02, "M", "ئم"), - (0xFC03, "M", "ئى"), - (0xFC04, "M", "ئي"), - (0xFC05, "M", "بج"), - (0xFC06, "M", "بح"), - (0xFC07, "M", "بخ"), - (0xFC08, "M", "بم"), - (0xFC09, "M", "بى"), - (0xFC0A, "M", "بي"), - (0xFC0B, "M", "تج"), - (0xFC0C, "M", "تح"), - (0xFC0D, "M", "تخ"), - (0xFC0E, "M", "تم"), - (0xFC0F, "M", "تى"), - (0xFC10, "M", "تي"), - (0xFC11, "M", "ثج"), - (0xFC12, "M", "ثم"), - (0xFC13, "M", "ثى"), - (0xFC14, "M", "ثي"), - (0xFC15, "M", "جح"), - (0xFC16, "M", "جم"), - (0xFC17, "M", "حج"), - (0xFC18, "M", "حم"), - (0xFC19, "M", "خج"), - (0xFC1A, "M", "خح"), - (0xFC1B, "M", "خم"), - (0xFC1C, "M", "سج"), - (0xFC1D, "M", "سح"), - (0xFC1E, "M", "سخ"), - (0xFC1F, "M", "سم"), - (0xFC20, "M", "صح"), - (0xFC21, "M", "صم"), - (0xFC22, "M", "ضج"), - (0xFC23, "M", "ضح"), - (0xFC24, "M", "ضخ"), - (0xFC25, "M", "ضم"), - (0xFC26, "M", "طح"), - (0xFC27, "M", "طم"), - (0xFC28, "M", "ظم"), - (0xFC29, "M", "عج"), - (0xFC2A, "M", "عم"), - (0xFC2B, "M", "غج"), - (0xFC2C, "M", "غم"), - (0xFC2D, "M", "فج"), - (0xFC2E, "M", "فح"), - (0xFC2F, "M", "فخ"), - (0xFC30, "M", "فم"), - (0xFC31, "M", "فى"), - (0xFC32, "M", "في"), - (0xFC33, "M", "قح"), - ] - - -def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFC34, "M", "قم"), - (0xFC35, "M", "قى"), - (0xFC36, "M", "قي"), - (0xFC37, "M", "كا"), - (0xFC38, "M", "كج"), - (0xFC39, "M", "كح"), - (0xFC3A, "M", "كخ"), - (0xFC3B, "M", "كل"), - (0xFC3C, "M", "كم"), - (0xFC3D, "M", "كى"), - (0xFC3E, "M", "كي"), - (0xFC3F, "M", "لج"), - (0xFC40, "M", "لح"), - (0xFC41, "M", "لخ"), - (0xFC42, "M", "لم"), - (0xFC43, "M", "لى"), - (0xFC44, "M", "لي"), - (0xFC45, "M", "مج"), - (0xFC46, "M", "مح"), - (0xFC47, "M", "مخ"), - (0xFC48, "M", "مم"), - (0xFC49, "M", "مى"), - (0xFC4A, "M", "مي"), - (0xFC4B, "M", "نج"), - (0xFC4C, "M", "نح"), - (0xFC4D, "M", "نخ"), - (0xFC4E, "M", "نم"), - (0xFC4F, "M", "نى"), - (0xFC50, "M", "ني"), - (0xFC51, "M", "هج"), - (0xFC52, "M", "هم"), - (0xFC53, "M", "هى"), - (0xFC54, "M", "هي"), - (0xFC55, "M", "يج"), - (0xFC56, "M", "يح"), - (0xFC57, "M", "يخ"), - (0xFC58, "M", "يم"), - (0xFC59, "M", "يى"), - (0xFC5A, "M", "يي"), - (0xFC5B, "M", "ذٰ"), - (0xFC5C, "M", "رٰ"), - (0xFC5D, "M", "ىٰ"), - (0xFC5E, "M", " ٌّ"), - (0xFC5F, "M", " ٍّ"), - (0xFC60, "M", " َّ"), - (0xFC61, "M", " ُّ"), - (0xFC62, "M", " ِّ"), - (0xFC63, "M", " ّٰ"), - (0xFC64, "M", "ئر"), - (0xFC65, "M", "ئز"), - (0xFC66, "M", "ئم"), - (0xFC67, "M", "ئن"), - (0xFC68, "M", "ئى"), - (0xFC69, "M", "ئي"), - (0xFC6A, "M", "بر"), - (0xFC6B, "M", "بز"), - (0xFC6C, "M", "بم"), - (0xFC6D, "M", "بن"), - (0xFC6E, "M", "بى"), - (0xFC6F, "M", "بي"), - (0xFC70, "M", "تر"), - (0xFC71, "M", "تز"), - (0xFC72, "M", "تم"), - (0xFC73, "M", "تن"), - (0xFC74, "M", "تى"), - (0xFC75, "M", "تي"), - (0xFC76, "M", "ثر"), - (0xFC77, "M", "ثز"), - (0xFC78, "M", "ثم"), - (0xFC79, "M", "ثن"), - (0xFC7A, "M", "ثى"), - (0xFC7B, "M", "ثي"), - (0xFC7C, "M", "فى"), - (0xFC7D, "M", "في"), - (0xFC7E, "M", "قى"), - (0xFC7F, "M", "قي"), - (0xFC80, "M", "كا"), - (0xFC81, "M", "كل"), - (0xFC82, "M", "كم"), - (0xFC83, "M", "كى"), - (0xFC84, "M", "كي"), - (0xFC85, "M", "لم"), - (0xFC86, "M", "لى"), - (0xFC87, "M", "لي"), - (0xFC88, "M", "ما"), - (0xFC89, "M", "مم"), - (0xFC8A, "M", "نر"), - (0xFC8B, "M", "نز"), - (0xFC8C, "M", "نم"), - (0xFC8D, "M", "نن"), - (0xFC8E, "M", "نى"), - (0xFC8F, "M", "ني"), - (0xFC90, "M", "ىٰ"), - (0xFC91, "M", "ير"), - (0xFC92, "M", "يز"), - (0xFC93, "M", "يم"), - (0xFC94, "M", "ين"), - (0xFC95, "M", "يى"), - (0xFC96, "M", "يي"), - (0xFC97, "M", "ئج"), - ] - - -def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFC98, "M", "ئح"), - (0xFC99, "M", "ئخ"), - (0xFC9A, "M", "ئم"), - (0xFC9B, "M", "ئه"), - (0xFC9C, "M", "بج"), - (0xFC9D, "M", "بح"), - (0xFC9E, "M", "بخ"), - (0xFC9F, "M", "بم"), - (0xFCA0, "M", "به"), - (0xFCA1, "M", "تج"), - (0xFCA2, "M", "تح"), - (0xFCA3, "M", "تخ"), - (0xFCA4, "M", "تم"), - (0xFCA5, "M", "ته"), - (0xFCA6, "M", "ثم"), - (0xFCA7, "M", "جح"), - (0xFCA8, "M", "جم"), - (0xFCA9, "M", "حج"), - (0xFCAA, "M", "حم"), - (0xFCAB, "M", "خج"), - (0xFCAC, "M", "خم"), - (0xFCAD, "M", "سج"), - (0xFCAE, "M", "سح"), - (0xFCAF, "M", "سخ"), - (0xFCB0, "M", "سم"), - (0xFCB1, "M", "صح"), - (0xFCB2, "M", "صخ"), - (0xFCB3, "M", "صم"), - (0xFCB4, "M", "ضج"), - (0xFCB5, "M", "ضح"), - (0xFCB6, "M", "ضخ"), - (0xFCB7, "M", "ضم"), - (0xFCB8, "M", "طح"), - (0xFCB9, "M", "ظم"), - (0xFCBA, "M", "عج"), - (0xFCBB, "M", "عم"), - (0xFCBC, "M", "غج"), - (0xFCBD, "M", "غم"), - (0xFCBE, "M", "فج"), - (0xFCBF, "M", "فح"), - (0xFCC0, "M", "فخ"), - (0xFCC1, "M", "فم"), - (0xFCC2, "M", "قح"), - (0xFCC3, "M", "قم"), - (0xFCC4, "M", "كج"), - (0xFCC5, "M", "كح"), - (0xFCC6, "M", "كخ"), - (0xFCC7, "M", "كل"), - (0xFCC8, "M", "كم"), - (0xFCC9, "M", "لج"), - (0xFCCA, "M", "لح"), - (0xFCCB, "M", "لخ"), - (0xFCCC, "M", "لم"), - (0xFCCD, "M", "له"), - (0xFCCE, "M", "مج"), - (0xFCCF, "M", "مح"), - (0xFCD0, "M", "مخ"), - (0xFCD1, "M", "مم"), - (0xFCD2, "M", "نج"), - (0xFCD3, "M", "نح"), - (0xFCD4, "M", "نخ"), - (0xFCD5, "M", "نم"), - (0xFCD6, "M", "نه"), - (0xFCD7, "M", "هج"), - (0xFCD8, "M", "هم"), - (0xFCD9, "M", "هٰ"), - (0xFCDA, "M", "يج"), - (0xFCDB, "M", "يح"), - (0xFCDC, "M", "يخ"), - (0xFCDD, "M", "يم"), - (0xFCDE, "M", "يه"), - (0xFCDF, "M", "ئم"), - (0xFCE0, "M", "ئه"), - (0xFCE1, "M", "بم"), - (0xFCE2, "M", "به"), - (0xFCE3, "M", "تم"), - (0xFCE4, "M", "ته"), - (0xFCE5, "M", "ثم"), - (0xFCE6, "M", "ثه"), - (0xFCE7, "M", "سم"), - (0xFCE8, "M", "سه"), - (0xFCE9, "M", "شم"), - (0xFCEA, "M", "شه"), - (0xFCEB, "M", "كل"), - (0xFCEC, "M", "كم"), - (0xFCED, "M", "لم"), - (0xFCEE, "M", "نم"), - (0xFCEF, "M", "نه"), - (0xFCF0, "M", "يم"), - (0xFCF1, "M", "يه"), - (0xFCF2, "M", "ـَّ"), - (0xFCF3, "M", "ـُّ"), - (0xFCF4, "M", "ـِّ"), - (0xFCF5, "M", "طى"), - (0xFCF6, "M", "طي"), - (0xFCF7, "M", "عى"), - (0xFCF8, "M", "عي"), - (0xFCF9, "M", "غى"), - (0xFCFA, "M", "غي"), - (0xFCFB, "M", "سى"), - ] - - -def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFCFC, "M", "سي"), - (0xFCFD, "M", "شى"), - (0xFCFE, "M", "شي"), - (0xFCFF, "M", "حى"), - (0xFD00, "M", "حي"), - (0xFD01, "M", "جى"), - (0xFD02, "M", "جي"), - (0xFD03, "M", "خى"), - (0xFD04, "M", "خي"), - (0xFD05, "M", "صى"), - (0xFD06, "M", "صي"), - (0xFD07, "M", "ضى"), - (0xFD08, "M", "ضي"), - (0xFD09, "M", "شج"), - (0xFD0A, "M", "شح"), - (0xFD0B, "M", "شخ"), - (0xFD0C, "M", "شم"), - (0xFD0D, "M", "شر"), - (0xFD0E, "M", "سر"), - (0xFD0F, "M", "صر"), - (0xFD10, "M", "ضر"), - (0xFD11, "M", "طى"), - (0xFD12, "M", "طي"), - (0xFD13, "M", "عى"), - (0xFD14, "M", "عي"), - (0xFD15, "M", "غى"), - (0xFD16, "M", "غي"), - (0xFD17, "M", "سى"), - (0xFD18, "M", "سي"), - (0xFD19, "M", "شى"), - (0xFD1A, "M", "شي"), - (0xFD1B, "M", "حى"), - (0xFD1C, "M", "حي"), - (0xFD1D, "M", "جى"), - (0xFD1E, "M", "جي"), - (0xFD1F, "M", "خى"), - (0xFD20, "M", "خي"), - (0xFD21, "M", "صى"), - (0xFD22, "M", "صي"), - (0xFD23, "M", "ضى"), - (0xFD24, "M", "ضي"), - (0xFD25, "M", "شج"), - (0xFD26, "M", "شح"), - (0xFD27, "M", "شخ"), - (0xFD28, "M", "شم"), - (0xFD29, "M", "شر"), - (0xFD2A, "M", "سر"), - (0xFD2B, "M", "صر"), - (0xFD2C, "M", "ضر"), - (0xFD2D, "M", "شج"), - (0xFD2E, "M", "شح"), - (0xFD2F, "M", "شخ"), - (0xFD30, "M", "شم"), - (0xFD31, "M", "سه"), - (0xFD32, "M", "شه"), - (0xFD33, "M", "طم"), - (0xFD34, "M", "سج"), - (0xFD35, "M", "سح"), - (0xFD36, "M", "سخ"), - (0xFD37, "M", "شج"), - (0xFD38, "M", "شح"), - (0xFD39, "M", "شخ"), - (0xFD3A, "M", "طم"), - (0xFD3B, "M", "ظم"), - (0xFD3C, "M", "اً"), - (0xFD3E, "V"), - (0xFD50, "M", "تجم"), - (0xFD51, "M", "تحج"), - (0xFD53, "M", "تحم"), - (0xFD54, "M", "تخم"), - (0xFD55, "M", "تمج"), - (0xFD56, "M", "تمح"), - (0xFD57, "M", "تمخ"), - (0xFD58, "M", "جمح"), - (0xFD5A, "M", "حمي"), - (0xFD5B, "M", "حمى"), - (0xFD5C, "M", "سحج"), - (0xFD5D, "M", "سجح"), - (0xFD5E, "M", "سجى"), - (0xFD5F, "M", "سمح"), - (0xFD61, "M", "سمج"), - (0xFD62, "M", "سمم"), - (0xFD64, "M", "صحح"), - (0xFD66, "M", "صمم"), - (0xFD67, "M", "شحم"), - (0xFD69, "M", "شجي"), - (0xFD6A, "M", "شمخ"), - (0xFD6C, "M", "شمم"), - (0xFD6E, "M", "ضحى"), - (0xFD6F, "M", "ضخم"), - (0xFD71, "M", "طمح"), - (0xFD73, "M", "طمم"), - (0xFD74, "M", "طمي"), - (0xFD75, "M", "عجم"), - (0xFD76, "M", "عمم"), - (0xFD78, "M", "عمى"), - (0xFD79, "M", "غمم"), - (0xFD7A, "M", "غمي"), - (0xFD7B, "M", "غمى"), - (0xFD7C, "M", "فخم"), - ] - - -def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFD7E, "M", "قمح"), - (0xFD7F, "M", "قمم"), - (0xFD80, "M", "لحم"), - (0xFD81, "M", "لحي"), - (0xFD82, "M", "لحى"), - (0xFD83, "M", "لجج"), - (0xFD85, "M", "لخم"), - (0xFD87, "M", "لمح"), - (0xFD89, "M", "محج"), - (0xFD8A, "M", "محم"), - (0xFD8B, "M", "محي"), - (0xFD8C, "M", "مجح"), - (0xFD8D, "M", "مجم"), - (0xFD8E, "M", "مخج"), - (0xFD8F, "M", "مخم"), - (0xFD90, "X"), - (0xFD92, "M", "مجخ"), - (0xFD93, "M", "همج"), - (0xFD94, "M", "همم"), - (0xFD95, "M", "نحم"), - (0xFD96, "M", "نحى"), - (0xFD97, "M", "نجم"), - (0xFD99, "M", "نجى"), - (0xFD9A, "M", "نمي"), - (0xFD9B, "M", "نمى"), - (0xFD9C, "M", "يمم"), - (0xFD9E, "M", "بخي"), - (0xFD9F, "M", "تجي"), - (0xFDA0, "M", "تجى"), - (0xFDA1, "M", "تخي"), - (0xFDA2, "M", "تخى"), - (0xFDA3, "M", "تمي"), - (0xFDA4, "M", "تمى"), - (0xFDA5, "M", "جمي"), - (0xFDA6, "M", "جحى"), - (0xFDA7, "M", "جمى"), - (0xFDA8, "M", "سخى"), - (0xFDA9, "M", "صحي"), - (0xFDAA, "M", "شحي"), - (0xFDAB, "M", "ضحي"), - (0xFDAC, "M", "لجي"), - (0xFDAD, "M", "لمي"), - (0xFDAE, "M", "يحي"), - (0xFDAF, "M", "يجي"), - (0xFDB0, "M", "يمي"), - (0xFDB1, "M", "ممي"), - (0xFDB2, "M", "قمي"), - (0xFDB3, "M", "نحي"), - (0xFDB4, "M", "قمح"), - (0xFDB5, "M", "لحم"), - (0xFDB6, "M", "عمي"), - (0xFDB7, "M", "كمي"), - (0xFDB8, "M", "نجح"), - (0xFDB9, "M", "مخي"), - (0xFDBA, "M", "لجم"), - (0xFDBB, "M", "كمم"), - (0xFDBC, "M", "لجم"), - (0xFDBD, "M", "نجح"), - (0xFDBE, "M", "جحي"), - (0xFDBF, "M", "حجي"), - (0xFDC0, "M", "مجي"), - (0xFDC1, "M", "فمي"), - (0xFDC2, "M", "بحي"), - (0xFDC3, "M", "كمم"), - (0xFDC4, "M", "عجم"), - (0xFDC5, "M", "صمم"), - (0xFDC6, "M", "سخي"), - (0xFDC7, "M", "نجي"), - (0xFDC8, "X"), - (0xFDCF, "V"), - (0xFDD0, "X"), - (0xFDF0, "M", "صلے"), - (0xFDF1, "M", "قلے"), - (0xFDF2, "M", "الله"), - (0xFDF3, "M", "اكبر"), - (0xFDF4, "M", "محمد"), - (0xFDF5, "M", "صلعم"), - (0xFDF6, "M", "رسول"), - (0xFDF7, "M", "عليه"), - (0xFDF8, "M", "وسلم"), - (0xFDF9, "M", "صلى"), - (0xFDFA, "M", "صلى الله عليه وسلم"), - (0xFDFB, "M", "جل جلاله"), - (0xFDFC, "M", "ریال"), - (0xFDFD, "V"), - (0xFE00, "I"), - (0xFE10, "M", ","), - (0xFE11, "M", "、"), - (0xFE12, "X"), - (0xFE13, "M", ":"), - (0xFE14, "M", ";"), - (0xFE15, "M", "!"), - (0xFE16, "M", "?"), - (0xFE17, "M", "〖"), - (0xFE18, "M", "〗"), - (0xFE19, "X"), - (0xFE20, "V"), - (0xFE30, "X"), - (0xFE31, "M", "—"), - (0xFE32, "M", "–"), - ] - - -def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFE33, "M", "_"), - (0xFE35, "M", "("), - (0xFE36, "M", ")"), - (0xFE37, "M", "{"), - (0xFE38, "M", "}"), - (0xFE39, "M", "〔"), - (0xFE3A, "M", "〕"), - (0xFE3B, "M", "【"), - (0xFE3C, "M", "】"), - (0xFE3D, "M", "《"), - (0xFE3E, "M", "》"), - (0xFE3F, "M", "〈"), - (0xFE40, "M", "〉"), - (0xFE41, "M", "「"), - (0xFE42, "M", "」"), - (0xFE43, "M", "『"), - (0xFE44, "M", "』"), - (0xFE45, "V"), - (0xFE47, "M", "["), - (0xFE48, "M", "]"), - (0xFE49, "M", " ̅"), - (0xFE4D, "M", "_"), - (0xFE50, "M", ","), - (0xFE51, "M", "、"), - (0xFE52, "X"), - (0xFE54, "M", ";"), - (0xFE55, "M", ":"), - (0xFE56, "M", "?"), - (0xFE57, "M", "!"), - (0xFE58, "M", "—"), - (0xFE59, "M", "("), - (0xFE5A, "M", ")"), - (0xFE5B, "M", "{"), - (0xFE5C, "M", "}"), - (0xFE5D, "M", "〔"), - (0xFE5E, "M", "〕"), - (0xFE5F, "M", "#"), - (0xFE60, "M", "&"), - (0xFE61, "M", "*"), - (0xFE62, "M", "+"), - (0xFE63, "M", "-"), - (0xFE64, "M", "<"), - (0xFE65, "M", ">"), - (0xFE66, "M", "="), - (0xFE67, "X"), - (0xFE68, "M", "\\"), - (0xFE69, "M", "$"), - (0xFE6A, "M", "%"), - (0xFE6B, "M", "@"), - (0xFE6C, "X"), - (0xFE70, "M", " ً"), - (0xFE71, "M", "ـً"), - (0xFE72, "M", " ٌ"), - (0xFE73, "V"), - (0xFE74, "M", " ٍ"), - (0xFE75, "X"), - (0xFE76, "M", " َ"), - (0xFE77, "M", "ـَ"), - (0xFE78, "M", " ُ"), - (0xFE79, "M", "ـُ"), - (0xFE7A, "M", " ِ"), - (0xFE7B, "M", "ـِ"), - (0xFE7C, "M", " ّ"), - (0xFE7D, "M", "ـّ"), - (0xFE7E, "M", " ْ"), - (0xFE7F, "M", "ـْ"), - (0xFE80, "M", "ء"), - (0xFE81, "M", "آ"), - (0xFE83, "M", "أ"), - (0xFE85, "M", "ؤ"), - (0xFE87, "M", "إ"), - (0xFE89, "M", "ئ"), - (0xFE8D, "M", "ا"), - (0xFE8F, "M", "ب"), - (0xFE93, "M", "ة"), - (0xFE95, "M", "ت"), - (0xFE99, "M", "ث"), - (0xFE9D, "M", "ج"), - (0xFEA1, "M", "ح"), - (0xFEA5, "M", "خ"), - (0xFEA9, "M", "د"), - (0xFEAB, "M", "ذ"), - (0xFEAD, "M", "ر"), - (0xFEAF, "M", "ز"), - (0xFEB1, "M", "س"), - (0xFEB5, "M", "ش"), - (0xFEB9, "M", "ص"), - (0xFEBD, "M", "ض"), - (0xFEC1, "M", "ط"), - (0xFEC5, "M", "ظ"), - (0xFEC9, "M", "ع"), - (0xFECD, "M", "غ"), - (0xFED1, "M", "ف"), - (0xFED5, "M", "ق"), - (0xFED9, "M", "ك"), - (0xFEDD, "M", "ل"), - (0xFEE1, "M", "م"), - (0xFEE5, "M", "ن"), - (0xFEE9, "M", "ه"), - (0xFEED, "M", "و"), - ] - - -def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFEEF, "M", "ى"), - (0xFEF1, "M", "ي"), - (0xFEF5, "M", "لآ"), - (0xFEF7, "M", "لأ"), - (0xFEF9, "M", "لإ"), - (0xFEFB, "M", "لا"), - (0xFEFD, "X"), - (0xFEFF, "I"), - (0xFF00, "X"), - (0xFF01, "M", "!"), - (0xFF02, "M", '"'), - (0xFF03, "M", "#"), - (0xFF04, "M", "$"), - (0xFF05, "M", "%"), - (0xFF06, "M", "&"), - (0xFF07, "M", "'"), - (0xFF08, "M", "("), - (0xFF09, "M", ")"), - (0xFF0A, "M", "*"), - (0xFF0B, "M", "+"), - (0xFF0C, "M", ","), - (0xFF0D, "M", "-"), - (0xFF0E, "M", "."), - (0xFF0F, "M", "/"), - (0xFF10, "M", "0"), - (0xFF11, "M", "1"), - (0xFF12, "M", "2"), - (0xFF13, "M", "3"), - (0xFF14, "M", "4"), - (0xFF15, "M", "5"), - (0xFF16, "M", "6"), - (0xFF17, "M", "7"), - (0xFF18, "M", "8"), - (0xFF19, "M", "9"), - (0xFF1A, "M", ":"), - (0xFF1B, "M", ";"), - (0xFF1C, "M", "<"), - (0xFF1D, "M", "="), - (0xFF1E, "M", ">"), - (0xFF1F, "M", "?"), - (0xFF20, "M", "@"), - (0xFF21, "M", "a"), - (0xFF22, "M", "b"), - (0xFF23, "M", "c"), - (0xFF24, "M", "d"), - (0xFF25, "M", "e"), - (0xFF26, "M", "f"), - (0xFF27, "M", "g"), - (0xFF28, "M", "h"), - (0xFF29, "M", "i"), - (0xFF2A, "M", "j"), - (0xFF2B, "M", "k"), - (0xFF2C, "M", "l"), - (0xFF2D, "M", "m"), - (0xFF2E, "M", "n"), - (0xFF2F, "M", "o"), - (0xFF30, "M", "p"), - (0xFF31, "M", "q"), - (0xFF32, "M", "r"), - (0xFF33, "M", "s"), - (0xFF34, "M", "t"), - (0xFF35, "M", "u"), - (0xFF36, "M", "v"), - (0xFF37, "M", "w"), - (0xFF38, "M", "x"), - (0xFF39, "M", "y"), - (0xFF3A, "M", "z"), - (0xFF3B, "M", "["), - (0xFF3C, "M", "\\"), - (0xFF3D, "M", "]"), - (0xFF3E, "M", "^"), - (0xFF3F, "M", "_"), - (0xFF40, "M", "`"), - (0xFF41, "M", "a"), - (0xFF42, "M", "b"), - (0xFF43, "M", "c"), - (0xFF44, "M", "d"), - (0xFF45, "M", "e"), - (0xFF46, "M", "f"), - (0xFF47, "M", "g"), - (0xFF48, "M", "h"), - (0xFF49, "M", "i"), - (0xFF4A, "M", "j"), - (0xFF4B, "M", "k"), - (0xFF4C, "M", "l"), - (0xFF4D, "M", "m"), - (0xFF4E, "M", "n"), - (0xFF4F, "M", "o"), - (0xFF50, "M", "p"), - (0xFF51, "M", "q"), - (0xFF52, "M", "r"), - (0xFF53, "M", "s"), - (0xFF54, "M", "t"), - (0xFF55, "M", "u"), - (0xFF56, "M", "v"), - (0xFF57, "M", "w"), - (0xFF58, "M", "x"), - (0xFF59, "M", "y"), - (0xFF5A, "M", "z"), - (0xFF5B, "M", "{"), - ] - - -def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF5C, "M", "|"), - (0xFF5D, "M", "}"), - (0xFF5E, "M", "~"), - (0xFF5F, "M", "⦅"), - (0xFF60, "M", "⦆"), - (0xFF61, "M", "."), - (0xFF62, "M", "「"), - (0xFF63, "M", "」"), - (0xFF64, "M", "、"), - (0xFF65, "M", "・"), - (0xFF66, "M", "ヲ"), - (0xFF67, "M", "ァ"), - (0xFF68, "M", "ィ"), - (0xFF69, "M", "ゥ"), - (0xFF6A, "M", "ェ"), - (0xFF6B, "M", "ォ"), - (0xFF6C, "M", "ャ"), - (0xFF6D, "M", "ュ"), - (0xFF6E, "M", "ョ"), - (0xFF6F, "M", "ッ"), - (0xFF70, "M", "ー"), - (0xFF71, "M", "ア"), - (0xFF72, "M", "イ"), - (0xFF73, "M", "ウ"), - (0xFF74, "M", "エ"), - (0xFF75, "M", "オ"), - (0xFF76, "M", "カ"), - (0xFF77, "M", "キ"), - (0xFF78, "M", "ク"), - (0xFF79, "M", "ケ"), - (0xFF7A, "M", "コ"), - (0xFF7B, "M", "サ"), - (0xFF7C, "M", "シ"), - (0xFF7D, "M", "ス"), - (0xFF7E, "M", "セ"), - (0xFF7F, "M", "ソ"), - (0xFF80, "M", "タ"), - (0xFF81, "M", "チ"), - (0xFF82, "M", "ツ"), - (0xFF83, "M", "テ"), - (0xFF84, "M", "ト"), - (0xFF85, "M", "ナ"), - (0xFF86, "M", "ニ"), - (0xFF87, "M", "ヌ"), - (0xFF88, "M", "ネ"), - (0xFF89, "M", "ノ"), - (0xFF8A, "M", "ハ"), - (0xFF8B, "M", "ヒ"), - (0xFF8C, "M", "フ"), - (0xFF8D, "M", "ヘ"), - (0xFF8E, "M", "ホ"), - (0xFF8F, "M", "マ"), - (0xFF90, "M", "ミ"), - (0xFF91, "M", "ム"), - (0xFF92, "M", "メ"), - (0xFF93, "M", "モ"), - (0xFF94, "M", "ヤ"), - (0xFF95, "M", "ユ"), - (0xFF96, "M", "ヨ"), - (0xFF97, "M", "ラ"), - (0xFF98, "M", "リ"), - (0xFF99, "M", "ル"), - (0xFF9A, "M", "レ"), - (0xFF9B, "M", "ロ"), - (0xFF9C, "M", "ワ"), - (0xFF9D, "M", "ン"), - (0xFF9E, "M", "゙"), - (0xFF9F, "M", "゚"), - (0xFFA0, "I"), - (0xFFA1, "M", "ᄀ"), - (0xFFA2, "M", "ᄁ"), - (0xFFA3, "M", "ᆪ"), - (0xFFA4, "M", "ᄂ"), - (0xFFA5, "M", "ᆬ"), - (0xFFA6, "M", "ᆭ"), - (0xFFA7, "M", "ᄃ"), - (0xFFA8, "M", "ᄄ"), - (0xFFA9, "M", "ᄅ"), - (0xFFAA, "M", "ᆰ"), - (0xFFAB, "M", "ᆱ"), - (0xFFAC, "M", "ᆲ"), - (0xFFAD, "M", "ᆳ"), - (0xFFAE, "M", "ᆴ"), - (0xFFAF, "M", "ᆵ"), - (0xFFB0, "M", "ᄚ"), - (0xFFB1, "M", "ᄆ"), - (0xFFB2, "M", "ᄇ"), - (0xFFB3, "M", "ᄈ"), - (0xFFB4, "M", "ᄡ"), - (0xFFB5, "M", "ᄉ"), - (0xFFB6, "M", "ᄊ"), - (0xFFB7, "M", "ᄋ"), - (0xFFB8, "M", "ᄌ"), - (0xFFB9, "M", "ᄍ"), - (0xFFBA, "M", "ᄎ"), - (0xFFBB, "M", "ᄏ"), - (0xFFBC, "M", "ᄐ"), - (0xFFBD, "M", "ᄑ"), - (0xFFBE, "M", "ᄒ"), - (0xFFBF, "X"), - ] - - -def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFFC2, "M", "ᅡ"), - (0xFFC3, "M", "ᅢ"), - (0xFFC4, "M", "ᅣ"), - (0xFFC5, "M", "ᅤ"), - (0xFFC6, "M", "ᅥ"), - (0xFFC7, "M", "ᅦ"), - (0xFFC8, "X"), - (0xFFCA, "M", "ᅧ"), - (0xFFCB, "M", "ᅨ"), - (0xFFCC, "M", "ᅩ"), - (0xFFCD, "M", "ᅪ"), - (0xFFCE, "M", "ᅫ"), - (0xFFCF, "M", "ᅬ"), - (0xFFD0, "X"), - (0xFFD2, "M", "ᅭ"), - (0xFFD3, "M", "ᅮ"), - (0xFFD4, "M", "ᅯ"), - (0xFFD5, "M", "ᅰ"), - (0xFFD6, "M", "ᅱ"), - (0xFFD7, "M", "ᅲ"), - (0xFFD8, "X"), - (0xFFDA, "M", "ᅳ"), - (0xFFDB, "M", "ᅴ"), - (0xFFDC, "M", "ᅵ"), - (0xFFDD, "X"), - (0xFFE0, "M", "¢"), - (0xFFE1, "M", "£"), - (0xFFE2, "M", "¬"), - (0xFFE3, "M", " ̄"), - (0xFFE4, "M", "¦"), - (0xFFE5, "M", "¥"), - (0xFFE6, "M", "₩"), - (0xFFE7, "X"), - (0xFFE8, "M", "│"), - (0xFFE9, "M", "←"), - (0xFFEA, "M", "↑"), - (0xFFEB, "M", "→"), - (0xFFEC, "M", "↓"), - (0xFFED, "M", "■"), - (0xFFEE, "M", "○"), - (0xFFEF, "X"), - (0x10000, "V"), - (0x1000C, "X"), - (0x1000D, "V"), - (0x10027, "X"), - (0x10028, "V"), - (0x1003B, "X"), - (0x1003C, "V"), - (0x1003E, "X"), - (0x1003F, "V"), - (0x1004E, "X"), - (0x10050, "V"), - (0x1005E, "X"), - (0x10080, "V"), - (0x100FB, "X"), - (0x10100, "V"), - (0x10103, "X"), - (0x10107, "V"), - (0x10134, "X"), - (0x10137, "V"), - (0x1018F, "X"), - (0x10190, "V"), - (0x1019D, "X"), - (0x101A0, "V"), - (0x101A1, "X"), - (0x101D0, "V"), - (0x101FE, "X"), - (0x10280, "V"), - (0x1029D, "X"), - (0x102A0, "V"), - (0x102D1, "X"), - (0x102E0, "V"), - (0x102FC, "X"), - (0x10300, "V"), - (0x10324, "X"), - (0x1032D, "V"), - (0x1034B, "X"), - (0x10350, "V"), - (0x1037B, "X"), - (0x10380, "V"), - (0x1039E, "X"), - (0x1039F, "V"), - (0x103C4, "X"), - (0x103C8, "V"), - (0x103D6, "X"), - (0x10400, "M", "𐐨"), - (0x10401, "M", "𐐩"), - (0x10402, "M", "𐐪"), - (0x10403, "M", "𐐫"), - (0x10404, "M", "𐐬"), - (0x10405, "M", "𐐭"), - (0x10406, "M", "𐐮"), - (0x10407, "M", "𐐯"), - (0x10408, "M", "𐐰"), - (0x10409, "M", "𐐱"), - (0x1040A, "M", "𐐲"), - (0x1040B, "M", "𐐳"), - (0x1040C, "M", "𐐴"), - (0x1040D, "M", "𐐵"), - (0x1040E, "M", "𐐶"), - ] - - -def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1040F, "M", "𐐷"), - (0x10410, "M", "𐐸"), - (0x10411, "M", "𐐹"), - (0x10412, "M", "𐐺"), - (0x10413, "M", "𐐻"), - (0x10414, "M", "𐐼"), - (0x10415, "M", "𐐽"), - (0x10416, "M", "𐐾"), - (0x10417, "M", "𐐿"), - (0x10418, "M", "𐑀"), - (0x10419, "M", "𐑁"), - (0x1041A, "M", "𐑂"), - (0x1041B, "M", "𐑃"), - (0x1041C, "M", "𐑄"), - (0x1041D, "M", "𐑅"), - (0x1041E, "M", "𐑆"), - (0x1041F, "M", "𐑇"), - (0x10420, "M", "𐑈"), - (0x10421, "M", "𐑉"), - (0x10422, "M", "𐑊"), - (0x10423, "M", "𐑋"), - (0x10424, "M", "𐑌"), - (0x10425, "M", "𐑍"), - (0x10426, "M", "𐑎"), - (0x10427, "M", "𐑏"), - (0x10428, "V"), - (0x1049E, "X"), - (0x104A0, "V"), - (0x104AA, "X"), - (0x104B0, "M", "𐓘"), - (0x104B1, "M", "𐓙"), - (0x104B2, "M", "𐓚"), - (0x104B3, "M", "𐓛"), - (0x104B4, "M", "𐓜"), - (0x104B5, "M", "𐓝"), - (0x104B6, "M", "𐓞"), - (0x104B7, "M", "𐓟"), - (0x104B8, "M", "𐓠"), - (0x104B9, "M", "𐓡"), - (0x104BA, "M", "𐓢"), - (0x104BB, "M", "𐓣"), - (0x104BC, "M", "𐓤"), - (0x104BD, "M", "𐓥"), - (0x104BE, "M", "𐓦"), - (0x104BF, "M", "𐓧"), - (0x104C0, "M", "𐓨"), - (0x104C1, "M", "𐓩"), - (0x104C2, "M", "𐓪"), - (0x104C3, "M", "𐓫"), - (0x104C4, "M", "𐓬"), - (0x104C5, "M", "𐓭"), - (0x104C6, "M", "𐓮"), - (0x104C7, "M", "𐓯"), - (0x104C8, "M", "𐓰"), - (0x104C9, "M", "𐓱"), - (0x104CA, "M", "𐓲"), - (0x104CB, "M", "𐓳"), - (0x104CC, "M", "𐓴"), - (0x104CD, "M", "𐓵"), - (0x104CE, "M", "𐓶"), - (0x104CF, "M", "𐓷"), - (0x104D0, "M", "𐓸"), - (0x104D1, "M", "𐓹"), - (0x104D2, "M", "𐓺"), - (0x104D3, "M", "𐓻"), - (0x104D4, "X"), - (0x104D8, "V"), - (0x104FC, "X"), - (0x10500, "V"), - (0x10528, "X"), - (0x10530, "V"), - (0x10564, "X"), - (0x1056F, "V"), - (0x10570, "M", "𐖗"), - (0x10571, "M", "𐖘"), - (0x10572, "M", "𐖙"), - (0x10573, "M", "𐖚"), - (0x10574, "M", "𐖛"), - (0x10575, "M", "𐖜"), - (0x10576, "M", "𐖝"), - (0x10577, "M", "𐖞"), - (0x10578, "M", "𐖟"), - (0x10579, "M", "𐖠"), - (0x1057A, "M", "𐖡"), - (0x1057B, "X"), - (0x1057C, "M", "𐖣"), - (0x1057D, "M", "𐖤"), - (0x1057E, "M", "𐖥"), - (0x1057F, "M", "𐖦"), - (0x10580, "M", "𐖧"), - (0x10581, "M", "𐖨"), - (0x10582, "M", "𐖩"), - (0x10583, "M", "𐖪"), - (0x10584, "M", "𐖫"), - (0x10585, "M", "𐖬"), - (0x10586, "M", "𐖭"), - (0x10587, "M", "𐖮"), - (0x10588, "M", "𐖯"), - (0x10589, "M", "𐖰"), - (0x1058A, "M", "𐖱"), - ] - - -def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1058B, "X"), - (0x1058C, "M", "𐖳"), - (0x1058D, "M", "𐖴"), - (0x1058E, "M", "𐖵"), - (0x1058F, "M", "𐖶"), - (0x10590, "M", "𐖷"), - (0x10591, "M", "𐖸"), - (0x10592, "M", "𐖹"), - (0x10593, "X"), - (0x10594, "M", "𐖻"), - (0x10595, "M", "𐖼"), - (0x10596, "X"), - (0x10597, "V"), - (0x105A2, "X"), - (0x105A3, "V"), - (0x105B2, "X"), - (0x105B3, "V"), - (0x105BA, "X"), - (0x105BB, "V"), - (0x105BD, "X"), - (0x105C0, "V"), - (0x105F4, "X"), - (0x10600, "V"), - (0x10737, "X"), - (0x10740, "V"), - (0x10756, "X"), - (0x10760, "V"), - (0x10768, "X"), - (0x10780, "V"), - (0x10781, "M", "ː"), - (0x10782, "M", "ˑ"), - (0x10783, "M", "æ"), - (0x10784, "M", "ʙ"), - (0x10785, "M", "ɓ"), - (0x10786, "X"), - (0x10787, "M", "ʣ"), - (0x10788, "M", "ꭦ"), - (0x10789, "M", "ʥ"), - (0x1078A, "M", "ʤ"), - (0x1078B, "M", "ɖ"), - (0x1078C, "M", "ɗ"), - (0x1078D, "M", "ᶑ"), - (0x1078E, "M", "ɘ"), - (0x1078F, "M", "ɞ"), - (0x10790, "M", "ʩ"), - (0x10791, "M", "ɤ"), - (0x10792, "M", "ɢ"), - (0x10793, "M", "ɠ"), - (0x10794, "M", "ʛ"), - (0x10795, "M", "ħ"), - (0x10796, "M", "ʜ"), - (0x10797, "M", "ɧ"), - (0x10798, "M", "ʄ"), - (0x10799, "M", "ʪ"), - (0x1079A, "M", "ʫ"), - (0x1079B, "M", "ɬ"), - (0x1079C, "M", "𝼄"), - (0x1079D, "M", "ꞎ"), - (0x1079E, "M", "ɮ"), - (0x1079F, "M", "𝼅"), - (0x107A0, "M", "ʎ"), - (0x107A1, "M", "𝼆"), - (0x107A2, "M", "ø"), - (0x107A3, "M", "ɶ"), - (0x107A4, "M", "ɷ"), - (0x107A5, "M", "q"), - (0x107A6, "M", "ɺ"), - (0x107A7, "M", "𝼈"), - (0x107A8, "M", "ɽ"), - (0x107A9, "M", "ɾ"), - (0x107AA, "M", "ʀ"), - (0x107AB, "M", "ʨ"), - (0x107AC, "M", "ʦ"), - (0x107AD, "M", "ꭧ"), - (0x107AE, "M", "ʧ"), - (0x107AF, "M", "ʈ"), - (0x107B0, "M", "ⱱ"), - (0x107B1, "X"), - (0x107B2, "M", "ʏ"), - (0x107B3, "M", "ʡ"), - (0x107B4, "M", "ʢ"), - (0x107B5, "M", "ʘ"), - (0x107B6, "M", "ǀ"), - (0x107B7, "M", "ǁ"), - (0x107B8, "M", "ǂ"), - (0x107B9, "M", "𝼊"), - (0x107BA, "M", "𝼞"), - (0x107BB, "X"), - (0x10800, "V"), - (0x10806, "X"), - (0x10808, "V"), - (0x10809, "X"), - (0x1080A, "V"), - (0x10836, "X"), - (0x10837, "V"), - (0x10839, "X"), - (0x1083C, "V"), - (0x1083D, "X"), - (0x1083F, "V"), - (0x10856, "X"), - ] - - -def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10857, "V"), - (0x1089F, "X"), - (0x108A7, "V"), - (0x108B0, "X"), - (0x108E0, "V"), - (0x108F3, "X"), - (0x108F4, "V"), - (0x108F6, "X"), - (0x108FB, "V"), - (0x1091C, "X"), - (0x1091F, "V"), - (0x1093A, "X"), - (0x1093F, "V"), - (0x10940, "X"), - (0x10980, "V"), - (0x109B8, "X"), - (0x109BC, "V"), - (0x109D0, "X"), - (0x109D2, "V"), - (0x10A04, "X"), - (0x10A05, "V"), - (0x10A07, "X"), - (0x10A0C, "V"), - (0x10A14, "X"), - (0x10A15, "V"), - (0x10A18, "X"), - (0x10A19, "V"), - (0x10A36, "X"), - (0x10A38, "V"), - (0x10A3B, "X"), - (0x10A3F, "V"), - (0x10A49, "X"), - (0x10A50, "V"), - (0x10A59, "X"), - (0x10A60, "V"), - (0x10AA0, "X"), - (0x10AC0, "V"), - (0x10AE7, "X"), - (0x10AEB, "V"), - (0x10AF7, "X"), - (0x10B00, "V"), - (0x10B36, "X"), - (0x10B39, "V"), - (0x10B56, "X"), - (0x10B58, "V"), - (0x10B73, "X"), - (0x10B78, "V"), - (0x10B92, "X"), - (0x10B99, "V"), - (0x10B9D, "X"), - (0x10BA9, "V"), - (0x10BB0, "X"), - (0x10C00, "V"), - (0x10C49, "X"), - (0x10C80, "M", "𐳀"), - (0x10C81, "M", "𐳁"), - (0x10C82, "M", "𐳂"), - (0x10C83, "M", "𐳃"), - (0x10C84, "M", "𐳄"), - (0x10C85, "M", "𐳅"), - (0x10C86, "M", "𐳆"), - (0x10C87, "M", "𐳇"), - (0x10C88, "M", "𐳈"), - (0x10C89, "M", "𐳉"), - (0x10C8A, "M", "𐳊"), - (0x10C8B, "M", "𐳋"), - (0x10C8C, "M", "𐳌"), - (0x10C8D, "M", "𐳍"), - (0x10C8E, "M", "𐳎"), - (0x10C8F, "M", "𐳏"), - (0x10C90, "M", "𐳐"), - (0x10C91, "M", "𐳑"), - (0x10C92, "M", "𐳒"), - (0x10C93, "M", "𐳓"), - (0x10C94, "M", "𐳔"), - (0x10C95, "M", "𐳕"), - (0x10C96, "M", "𐳖"), - (0x10C97, "M", "𐳗"), - (0x10C98, "M", "𐳘"), - (0x10C99, "M", "𐳙"), - (0x10C9A, "M", "𐳚"), - (0x10C9B, "M", "𐳛"), - (0x10C9C, "M", "𐳜"), - (0x10C9D, "M", "𐳝"), - (0x10C9E, "M", "𐳞"), - (0x10C9F, "M", "𐳟"), - (0x10CA0, "M", "𐳠"), - (0x10CA1, "M", "𐳡"), - (0x10CA2, "M", "𐳢"), - (0x10CA3, "M", "𐳣"), - (0x10CA4, "M", "𐳤"), - (0x10CA5, "M", "𐳥"), - (0x10CA6, "M", "𐳦"), - (0x10CA7, "M", "𐳧"), - (0x10CA8, "M", "𐳨"), - (0x10CA9, "M", "𐳩"), - (0x10CAA, "M", "𐳪"), - (0x10CAB, "M", "𐳫"), - (0x10CAC, "M", "𐳬"), - (0x10CAD, "M", "𐳭"), - ] - - -def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10CAE, "M", "𐳮"), - (0x10CAF, "M", "𐳯"), - (0x10CB0, "M", "𐳰"), - (0x10CB1, "M", "𐳱"), - (0x10CB2, "M", "𐳲"), - (0x10CB3, "X"), - (0x10CC0, "V"), - (0x10CF3, "X"), - (0x10CFA, "V"), - (0x10D28, "X"), - (0x10D30, "V"), - (0x10D3A, "X"), - (0x10D40, "V"), - (0x10D50, "M", "𐵰"), - (0x10D51, "M", "𐵱"), - (0x10D52, "M", "𐵲"), - (0x10D53, "M", "𐵳"), - (0x10D54, "M", "𐵴"), - (0x10D55, "M", "𐵵"), - (0x10D56, "M", "𐵶"), - (0x10D57, "M", "𐵷"), - (0x10D58, "M", "𐵸"), - (0x10D59, "M", "𐵹"), - (0x10D5A, "M", "𐵺"), - (0x10D5B, "M", "𐵻"), - (0x10D5C, "M", "𐵼"), - (0x10D5D, "M", "𐵽"), - (0x10D5E, "M", "𐵾"), - (0x10D5F, "M", "𐵿"), - (0x10D60, "M", "𐶀"), - (0x10D61, "M", "𐶁"), - (0x10D62, "M", "𐶂"), - (0x10D63, "M", "𐶃"), - (0x10D64, "M", "𐶄"), - (0x10D65, "M", "𐶅"), - (0x10D66, "X"), - (0x10D69, "V"), - (0x10D86, "X"), - (0x10D8E, "V"), - (0x10D90, "X"), - (0x10E60, "V"), - (0x10E7F, "X"), - (0x10E80, "V"), - (0x10EAA, "X"), - (0x10EAB, "V"), - (0x10EAE, "X"), - (0x10EB0, "V"), - (0x10EB2, "X"), - (0x10EC2, "V"), - (0x10EC5, "X"), - (0x10EFC, "V"), - (0x10F28, "X"), - (0x10F30, "V"), - (0x10F5A, "X"), - (0x10F70, "V"), - (0x10F8A, "X"), - (0x10FB0, "V"), - (0x10FCC, "X"), - (0x10FE0, "V"), - (0x10FF7, "X"), - (0x11000, "V"), - (0x1104E, "X"), - (0x11052, "V"), - (0x11076, "X"), - (0x1107F, "V"), - (0x110BD, "X"), - (0x110BE, "V"), - (0x110C3, "X"), - (0x110D0, "V"), - (0x110E9, "X"), - (0x110F0, "V"), - (0x110FA, "X"), - (0x11100, "V"), - (0x11135, "X"), - (0x11136, "V"), - (0x11148, "X"), - (0x11150, "V"), - (0x11177, "X"), - (0x11180, "V"), - (0x111E0, "X"), - (0x111E1, "V"), - (0x111F5, "X"), - (0x11200, "V"), - (0x11212, "X"), - (0x11213, "V"), - (0x11242, "X"), - (0x11280, "V"), - (0x11287, "X"), - (0x11288, "V"), - (0x11289, "X"), - (0x1128A, "V"), - (0x1128E, "X"), - (0x1128F, "V"), - (0x1129E, "X"), - (0x1129F, "V"), - (0x112AA, "X"), - (0x112B0, "V"), - (0x112EB, "X"), - (0x112F0, "V"), - (0x112FA, "X"), - ] - - -def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x11300, "V"), - (0x11304, "X"), - (0x11305, "V"), - (0x1130D, "X"), - (0x1130F, "V"), - (0x11311, "X"), - (0x11313, "V"), - (0x11329, "X"), - (0x1132A, "V"), - (0x11331, "X"), - (0x11332, "V"), - (0x11334, "X"), - (0x11335, "V"), - (0x1133A, "X"), - (0x1133B, "V"), - (0x11345, "X"), - (0x11347, "V"), - (0x11349, "X"), - (0x1134B, "V"), - (0x1134E, "X"), - (0x11350, "V"), - (0x11351, "X"), - (0x11357, "V"), - (0x11358, "X"), - (0x1135D, "V"), - (0x11364, "X"), - (0x11366, "V"), - (0x1136D, "X"), - (0x11370, "V"), - (0x11375, "X"), - (0x11380, "V"), - (0x1138A, "X"), - (0x1138B, "V"), - (0x1138C, "X"), - (0x1138E, "V"), - (0x1138F, "X"), - (0x11390, "V"), - (0x113B6, "X"), - (0x113B7, "V"), - (0x113C1, "X"), - (0x113C2, "V"), - (0x113C3, "X"), - (0x113C5, "V"), - (0x113C6, "X"), - (0x113C7, "V"), - (0x113CB, "X"), - (0x113CC, "V"), - (0x113D6, "X"), - (0x113D7, "V"), - (0x113D9, "X"), - (0x113E1, "V"), - (0x113E3, "X"), - (0x11400, "V"), - (0x1145C, "X"), - (0x1145D, "V"), - (0x11462, "X"), - (0x11480, "V"), - (0x114C8, "X"), - (0x114D0, "V"), - (0x114DA, "X"), - (0x11580, "V"), - (0x115B6, "X"), - (0x115B8, "V"), - (0x115DE, "X"), - (0x11600, "V"), - (0x11645, "X"), - (0x11650, "V"), - (0x1165A, "X"), - (0x11660, "V"), - (0x1166D, "X"), - (0x11680, "V"), - (0x116BA, "X"), - (0x116C0, "V"), - (0x116CA, "X"), - (0x116D0, "V"), - (0x116E4, "X"), - (0x11700, "V"), - (0x1171B, "X"), - (0x1171D, "V"), - (0x1172C, "X"), - (0x11730, "V"), - (0x11747, "X"), - (0x11800, "V"), - (0x1183C, "X"), - (0x118A0, "M", "𑣀"), - (0x118A1, "M", "𑣁"), - (0x118A2, "M", "𑣂"), - (0x118A3, "M", "𑣃"), - (0x118A4, "M", "𑣄"), - (0x118A5, "M", "𑣅"), - (0x118A6, "M", "𑣆"), - (0x118A7, "M", "𑣇"), - (0x118A8, "M", "𑣈"), - (0x118A9, "M", "𑣉"), - (0x118AA, "M", "𑣊"), - (0x118AB, "M", "𑣋"), - (0x118AC, "M", "𑣌"), - (0x118AD, "M", "𑣍"), - (0x118AE, "M", "𑣎"), - (0x118AF, "M", "𑣏"), - ] - - -def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x118B0, "M", "𑣐"), - (0x118B1, "M", "𑣑"), - (0x118B2, "M", "𑣒"), - (0x118B3, "M", "𑣓"), - (0x118B4, "M", "𑣔"), - (0x118B5, "M", "𑣕"), - (0x118B6, "M", "𑣖"), - (0x118B7, "M", "𑣗"), - (0x118B8, "M", "𑣘"), - (0x118B9, "M", "𑣙"), - (0x118BA, "M", "𑣚"), - (0x118BB, "M", "𑣛"), - (0x118BC, "M", "𑣜"), - (0x118BD, "M", "𑣝"), - (0x118BE, "M", "𑣞"), - (0x118BF, "M", "𑣟"), - (0x118C0, "V"), - (0x118F3, "X"), - (0x118FF, "V"), - (0x11907, "X"), - (0x11909, "V"), - (0x1190A, "X"), - (0x1190C, "V"), - (0x11914, "X"), - (0x11915, "V"), - (0x11917, "X"), - (0x11918, "V"), - (0x11936, "X"), - (0x11937, "V"), - (0x11939, "X"), - (0x1193B, "V"), - (0x11947, "X"), - (0x11950, "V"), - (0x1195A, "X"), - (0x119A0, "V"), - (0x119A8, "X"), - (0x119AA, "V"), - (0x119D8, "X"), - (0x119DA, "V"), - (0x119E5, "X"), - (0x11A00, "V"), - (0x11A48, "X"), - (0x11A50, "V"), - (0x11AA3, "X"), - (0x11AB0, "V"), - (0x11AF9, "X"), - (0x11B00, "V"), - (0x11B0A, "X"), - (0x11BC0, "V"), - (0x11BE2, "X"), - (0x11BF0, "V"), - (0x11BFA, "X"), - (0x11C00, "V"), - (0x11C09, "X"), - (0x11C0A, "V"), - (0x11C37, "X"), - (0x11C38, "V"), - (0x11C46, "X"), - (0x11C50, "V"), - (0x11C6D, "X"), - (0x11C70, "V"), - (0x11C90, "X"), - (0x11C92, "V"), - (0x11CA8, "X"), - (0x11CA9, "V"), - (0x11CB7, "X"), - (0x11D00, "V"), - (0x11D07, "X"), - (0x11D08, "V"), - (0x11D0A, "X"), - (0x11D0B, "V"), - (0x11D37, "X"), - (0x11D3A, "V"), - (0x11D3B, "X"), - (0x11D3C, "V"), - (0x11D3E, "X"), - (0x11D3F, "V"), - (0x11D48, "X"), - (0x11D50, "V"), - (0x11D5A, "X"), - (0x11D60, "V"), - (0x11D66, "X"), - (0x11D67, "V"), - (0x11D69, "X"), - (0x11D6A, "V"), - (0x11D8F, "X"), - (0x11D90, "V"), - (0x11D92, "X"), - (0x11D93, "V"), - (0x11D99, "X"), - (0x11DA0, "V"), - (0x11DAA, "X"), - (0x11EE0, "V"), - (0x11EF9, "X"), - (0x11F00, "V"), - (0x11F11, "X"), - (0x11F12, "V"), - (0x11F3B, "X"), - (0x11F3E, "V"), - (0x11F5B, "X"), - ] - - -def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x11FB0, "V"), - (0x11FB1, "X"), - (0x11FC0, "V"), - (0x11FF2, "X"), - (0x11FFF, "V"), - (0x1239A, "X"), - (0x12400, "V"), - (0x1246F, "X"), - (0x12470, "V"), - (0x12475, "X"), - (0x12480, "V"), - (0x12544, "X"), - (0x12F90, "V"), - (0x12FF3, "X"), - (0x13000, "V"), - (0x13430, "X"), - (0x13440, "V"), - (0x13456, "X"), - (0x13460, "V"), - (0x143FB, "X"), - (0x14400, "V"), - (0x14647, "X"), - (0x16100, "V"), - (0x1613A, "X"), - (0x16800, "V"), - (0x16A39, "X"), - (0x16A40, "V"), - (0x16A5F, "X"), - (0x16A60, "V"), - (0x16A6A, "X"), - (0x16A6E, "V"), - (0x16ABF, "X"), - (0x16AC0, "V"), - (0x16ACA, "X"), - (0x16AD0, "V"), - (0x16AEE, "X"), - (0x16AF0, "V"), - (0x16AF6, "X"), - (0x16B00, "V"), - (0x16B46, "X"), - (0x16B50, "V"), - (0x16B5A, "X"), - (0x16B5B, "V"), - (0x16B62, "X"), - (0x16B63, "V"), - (0x16B78, "X"), - (0x16B7D, "V"), - (0x16B90, "X"), - (0x16D40, "V"), - (0x16D7A, "X"), - (0x16E40, "M", "𖹠"), - (0x16E41, "M", "𖹡"), - (0x16E42, "M", "𖹢"), - (0x16E43, "M", "𖹣"), - (0x16E44, "M", "𖹤"), - (0x16E45, "M", "𖹥"), - (0x16E46, "M", "𖹦"), - (0x16E47, "M", "𖹧"), - (0x16E48, "M", "𖹨"), - (0x16E49, "M", "𖹩"), - (0x16E4A, "M", "𖹪"), - (0x16E4B, "M", "𖹫"), - (0x16E4C, "M", "𖹬"), - (0x16E4D, "M", "𖹭"), - (0x16E4E, "M", "𖹮"), - (0x16E4F, "M", "𖹯"), - (0x16E50, "M", "𖹰"), - (0x16E51, "M", "𖹱"), - (0x16E52, "M", "𖹲"), - (0x16E53, "M", "𖹳"), - (0x16E54, "M", "𖹴"), - (0x16E55, "M", "𖹵"), - (0x16E56, "M", "𖹶"), - (0x16E57, "M", "𖹷"), - (0x16E58, "M", "𖹸"), - (0x16E59, "M", "𖹹"), - (0x16E5A, "M", "𖹺"), - (0x16E5B, "M", "𖹻"), - (0x16E5C, "M", "𖹼"), - (0x16E5D, "M", "𖹽"), - (0x16E5E, "M", "𖹾"), - (0x16E5F, "M", "𖹿"), - (0x16E60, "V"), - (0x16E9B, "X"), - (0x16F00, "V"), - (0x16F4B, "X"), - (0x16F4F, "V"), - (0x16F88, "X"), - (0x16F8F, "V"), - (0x16FA0, "X"), - (0x16FE0, "V"), - (0x16FE5, "X"), - (0x16FF0, "V"), - (0x16FF2, "X"), - (0x17000, "V"), - (0x187F8, "X"), - (0x18800, "V"), - (0x18CD6, "X"), - (0x18CFF, "V"), - (0x18D09, "X"), - ] - - -def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1AFF0, "V"), - (0x1AFF4, "X"), - (0x1AFF5, "V"), - (0x1AFFC, "X"), - (0x1AFFD, "V"), - (0x1AFFF, "X"), - (0x1B000, "V"), - (0x1B123, "X"), - (0x1B132, "V"), - (0x1B133, "X"), - (0x1B150, "V"), - (0x1B153, "X"), - (0x1B155, "V"), - (0x1B156, "X"), - (0x1B164, "V"), - (0x1B168, "X"), - (0x1B170, "V"), - (0x1B2FC, "X"), - (0x1BC00, "V"), - (0x1BC6B, "X"), - (0x1BC70, "V"), - (0x1BC7D, "X"), - (0x1BC80, "V"), - (0x1BC89, "X"), - (0x1BC90, "V"), - (0x1BC9A, "X"), - (0x1BC9C, "V"), - (0x1BCA0, "I"), - (0x1BCA4, "X"), - (0x1CC00, "V"), - (0x1CCD6, "M", "a"), - (0x1CCD7, "M", "b"), - (0x1CCD8, "M", "c"), - (0x1CCD9, "M", "d"), - (0x1CCDA, "M", "e"), - (0x1CCDB, "M", "f"), - (0x1CCDC, "M", "g"), - (0x1CCDD, "M", "h"), - (0x1CCDE, "M", "i"), - (0x1CCDF, "M", "j"), - (0x1CCE0, "M", "k"), - (0x1CCE1, "M", "l"), - (0x1CCE2, "M", "m"), - (0x1CCE3, "M", "n"), - (0x1CCE4, "M", "o"), - (0x1CCE5, "M", "p"), - (0x1CCE6, "M", "q"), - (0x1CCE7, "M", "r"), - (0x1CCE8, "M", "s"), - (0x1CCE9, "M", "t"), - (0x1CCEA, "M", "u"), - (0x1CCEB, "M", "v"), - (0x1CCEC, "M", "w"), - (0x1CCED, "M", "x"), - (0x1CCEE, "M", "y"), - (0x1CCEF, "M", "z"), - (0x1CCF0, "M", "0"), - (0x1CCF1, "M", "1"), - (0x1CCF2, "M", "2"), - (0x1CCF3, "M", "3"), - (0x1CCF4, "M", "4"), - (0x1CCF5, "M", "5"), - (0x1CCF6, "M", "6"), - (0x1CCF7, "M", "7"), - (0x1CCF8, "M", "8"), - (0x1CCF9, "M", "9"), - (0x1CCFA, "X"), - (0x1CD00, "V"), - (0x1CEB4, "X"), - (0x1CF00, "V"), - (0x1CF2E, "X"), - (0x1CF30, "V"), - (0x1CF47, "X"), - (0x1CF50, "V"), - (0x1CFC4, "X"), - (0x1D000, "V"), - (0x1D0F6, "X"), - (0x1D100, "V"), - (0x1D127, "X"), - (0x1D129, "V"), - (0x1D15E, "M", "𝅗𝅥"), - (0x1D15F, "M", "𝅘𝅥"), - (0x1D160, "M", "𝅘𝅥𝅮"), - (0x1D161, "M", "𝅘𝅥𝅯"), - (0x1D162, "M", "𝅘𝅥𝅰"), - (0x1D163, "M", "𝅘𝅥𝅱"), - (0x1D164, "M", "𝅘𝅥𝅲"), - (0x1D165, "V"), - (0x1D173, "I"), - (0x1D17B, "V"), - (0x1D1BB, "M", "𝆹𝅥"), - (0x1D1BC, "M", "𝆺𝅥"), - (0x1D1BD, "M", "𝆹𝅥𝅮"), - (0x1D1BE, "M", "𝆺𝅥𝅮"), - (0x1D1BF, "M", "𝆹𝅥𝅯"), - (0x1D1C0, "M", "𝆺𝅥𝅯"), - (0x1D1C1, "V"), - (0x1D1EB, "X"), - (0x1D200, "V"), - (0x1D246, "X"), - ] - - -def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D2C0, "V"), - (0x1D2D4, "X"), - (0x1D2E0, "V"), - (0x1D2F4, "X"), - (0x1D300, "V"), - (0x1D357, "X"), - (0x1D360, "V"), - (0x1D379, "X"), - (0x1D400, "M", "a"), - (0x1D401, "M", "b"), - (0x1D402, "M", "c"), - (0x1D403, "M", "d"), - (0x1D404, "M", "e"), - (0x1D405, "M", "f"), - (0x1D406, "M", "g"), - (0x1D407, "M", "h"), - (0x1D408, "M", "i"), - (0x1D409, "M", "j"), - (0x1D40A, "M", "k"), - (0x1D40B, "M", "l"), - (0x1D40C, "M", "m"), - (0x1D40D, "M", "n"), - (0x1D40E, "M", "o"), - (0x1D40F, "M", "p"), - (0x1D410, "M", "q"), - (0x1D411, "M", "r"), - (0x1D412, "M", "s"), - (0x1D413, "M", "t"), - (0x1D414, "M", "u"), - (0x1D415, "M", "v"), - (0x1D416, "M", "w"), - (0x1D417, "M", "x"), - (0x1D418, "M", "y"), - (0x1D419, "M", "z"), - (0x1D41A, "M", "a"), - (0x1D41B, "M", "b"), - (0x1D41C, "M", "c"), - (0x1D41D, "M", "d"), - (0x1D41E, "M", "e"), - (0x1D41F, "M", "f"), - (0x1D420, "M", "g"), - (0x1D421, "M", "h"), - (0x1D422, "M", "i"), - (0x1D423, "M", "j"), - (0x1D424, "M", "k"), - (0x1D425, "M", "l"), - (0x1D426, "M", "m"), - (0x1D427, "M", "n"), - (0x1D428, "M", "o"), - (0x1D429, "M", "p"), - (0x1D42A, "M", "q"), - (0x1D42B, "M", "r"), - (0x1D42C, "M", "s"), - (0x1D42D, "M", "t"), - (0x1D42E, "M", "u"), - (0x1D42F, "M", "v"), - (0x1D430, "M", "w"), - (0x1D431, "M", "x"), - (0x1D432, "M", "y"), - (0x1D433, "M", "z"), - (0x1D434, "M", "a"), - (0x1D435, "M", "b"), - (0x1D436, "M", "c"), - (0x1D437, "M", "d"), - (0x1D438, "M", "e"), - (0x1D439, "M", "f"), - (0x1D43A, "M", "g"), - (0x1D43B, "M", "h"), - (0x1D43C, "M", "i"), - (0x1D43D, "M", "j"), - (0x1D43E, "M", "k"), - (0x1D43F, "M", "l"), - (0x1D440, "M", "m"), - (0x1D441, "M", "n"), - (0x1D442, "M", "o"), - (0x1D443, "M", "p"), - (0x1D444, "M", "q"), - (0x1D445, "M", "r"), - (0x1D446, "M", "s"), - (0x1D447, "M", "t"), - (0x1D448, "M", "u"), - (0x1D449, "M", "v"), - (0x1D44A, "M", "w"), - (0x1D44B, "M", "x"), - (0x1D44C, "M", "y"), - (0x1D44D, "M", "z"), - (0x1D44E, "M", "a"), - (0x1D44F, "M", "b"), - (0x1D450, "M", "c"), - (0x1D451, "M", "d"), - (0x1D452, "M", "e"), - (0x1D453, "M", "f"), - (0x1D454, "M", "g"), - (0x1D455, "X"), - (0x1D456, "M", "i"), - (0x1D457, "M", "j"), - (0x1D458, "M", "k"), - (0x1D459, "M", "l"), - (0x1D45A, "M", "m"), - (0x1D45B, "M", "n"), - ] - - -def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D45C, "M", "o"), - (0x1D45D, "M", "p"), - (0x1D45E, "M", "q"), - (0x1D45F, "M", "r"), - (0x1D460, "M", "s"), - (0x1D461, "M", "t"), - (0x1D462, "M", "u"), - (0x1D463, "M", "v"), - (0x1D464, "M", "w"), - (0x1D465, "M", "x"), - (0x1D466, "M", "y"), - (0x1D467, "M", "z"), - (0x1D468, "M", "a"), - (0x1D469, "M", "b"), - (0x1D46A, "M", "c"), - (0x1D46B, "M", "d"), - (0x1D46C, "M", "e"), - (0x1D46D, "M", "f"), - (0x1D46E, "M", "g"), - (0x1D46F, "M", "h"), - (0x1D470, "M", "i"), - (0x1D471, "M", "j"), - (0x1D472, "M", "k"), - (0x1D473, "M", "l"), - (0x1D474, "M", "m"), - (0x1D475, "M", "n"), - (0x1D476, "M", "o"), - (0x1D477, "M", "p"), - (0x1D478, "M", "q"), - (0x1D479, "M", "r"), - (0x1D47A, "M", "s"), - (0x1D47B, "M", "t"), - (0x1D47C, "M", "u"), - (0x1D47D, "M", "v"), - (0x1D47E, "M", "w"), - (0x1D47F, "M", "x"), - (0x1D480, "M", "y"), - (0x1D481, "M", "z"), - (0x1D482, "M", "a"), - (0x1D483, "M", "b"), - (0x1D484, "M", "c"), - (0x1D485, "M", "d"), - (0x1D486, "M", "e"), - (0x1D487, "M", "f"), - (0x1D488, "M", "g"), - (0x1D489, "M", "h"), - (0x1D48A, "M", "i"), - (0x1D48B, "M", "j"), - (0x1D48C, "M", "k"), - (0x1D48D, "M", "l"), - (0x1D48E, "M", "m"), - (0x1D48F, "M", "n"), - (0x1D490, "M", "o"), - (0x1D491, "M", "p"), - (0x1D492, "M", "q"), - (0x1D493, "M", "r"), - (0x1D494, "M", "s"), - (0x1D495, "M", "t"), - (0x1D496, "M", "u"), - (0x1D497, "M", "v"), - (0x1D498, "M", "w"), - (0x1D499, "M", "x"), - (0x1D49A, "M", "y"), - (0x1D49B, "M", "z"), - (0x1D49C, "M", "a"), - (0x1D49D, "X"), - (0x1D49E, "M", "c"), - (0x1D49F, "M", "d"), - (0x1D4A0, "X"), - (0x1D4A2, "M", "g"), - (0x1D4A3, "X"), - (0x1D4A5, "M", "j"), - (0x1D4A6, "M", "k"), - (0x1D4A7, "X"), - (0x1D4A9, "M", "n"), - (0x1D4AA, "M", "o"), - (0x1D4AB, "M", "p"), - (0x1D4AC, "M", "q"), - (0x1D4AD, "X"), - (0x1D4AE, "M", "s"), - (0x1D4AF, "M", "t"), - (0x1D4B0, "M", "u"), - (0x1D4B1, "M", "v"), - (0x1D4B2, "M", "w"), - (0x1D4B3, "M", "x"), - (0x1D4B4, "M", "y"), - (0x1D4B5, "M", "z"), - (0x1D4B6, "M", "a"), - (0x1D4B7, "M", "b"), - (0x1D4B8, "M", "c"), - (0x1D4B9, "M", "d"), - (0x1D4BA, "X"), - (0x1D4BB, "M", "f"), - (0x1D4BC, "X"), - (0x1D4BD, "M", "h"), - (0x1D4BE, "M", "i"), - (0x1D4BF, "M", "j"), - (0x1D4C0, "M", "k"), - (0x1D4C1, "M", "l"), - (0x1D4C2, "M", "m"), - ] - - -def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D4C3, "M", "n"), - (0x1D4C4, "X"), - (0x1D4C5, "M", "p"), - (0x1D4C6, "M", "q"), - (0x1D4C7, "M", "r"), - (0x1D4C8, "M", "s"), - (0x1D4C9, "M", "t"), - (0x1D4CA, "M", "u"), - (0x1D4CB, "M", "v"), - (0x1D4CC, "M", "w"), - (0x1D4CD, "M", "x"), - (0x1D4CE, "M", "y"), - (0x1D4CF, "M", "z"), - (0x1D4D0, "M", "a"), - (0x1D4D1, "M", "b"), - (0x1D4D2, "M", "c"), - (0x1D4D3, "M", "d"), - (0x1D4D4, "M", "e"), - (0x1D4D5, "M", "f"), - (0x1D4D6, "M", "g"), - (0x1D4D7, "M", "h"), - (0x1D4D8, "M", "i"), - (0x1D4D9, "M", "j"), - (0x1D4DA, "M", "k"), - (0x1D4DB, "M", "l"), - (0x1D4DC, "M", "m"), - (0x1D4DD, "M", "n"), - (0x1D4DE, "M", "o"), - (0x1D4DF, "M", "p"), - (0x1D4E0, "M", "q"), - (0x1D4E1, "M", "r"), - (0x1D4E2, "M", "s"), - (0x1D4E3, "M", "t"), - (0x1D4E4, "M", "u"), - (0x1D4E5, "M", "v"), - (0x1D4E6, "M", "w"), - (0x1D4E7, "M", "x"), - (0x1D4E8, "M", "y"), - (0x1D4E9, "M", "z"), - (0x1D4EA, "M", "a"), - (0x1D4EB, "M", "b"), - (0x1D4EC, "M", "c"), - (0x1D4ED, "M", "d"), - (0x1D4EE, "M", "e"), - (0x1D4EF, "M", "f"), - (0x1D4F0, "M", "g"), - (0x1D4F1, "M", "h"), - (0x1D4F2, "M", "i"), - (0x1D4F3, "M", "j"), - (0x1D4F4, "M", "k"), - (0x1D4F5, "M", "l"), - (0x1D4F6, "M", "m"), - (0x1D4F7, "M", "n"), - (0x1D4F8, "M", "o"), - (0x1D4F9, "M", "p"), - (0x1D4FA, "M", "q"), - (0x1D4FB, "M", "r"), - (0x1D4FC, "M", "s"), - (0x1D4FD, "M", "t"), - (0x1D4FE, "M", "u"), - (0x1D4FF, "M", "v"), - (0x1D500, "M", "w"), - (0x1D501, "M", "x"), - (0x1D502, "M", "y"), - (0x1D503, "M", "z"), - (0x1D504, "M", "a"), - (0x1D505, "M", "b"), - (0x1D506, "X"), - (0x1D507, "M", "d"), - (0x1D508, "M", "e"), - (0x1D509, "M", "f"), - (0x1D50A, "M", "g"), - (0x1D50B, "X"), - (0x1D50D, "M", "j"), - (0x1D50E, "M", "k"), - (0x1D50F, "M", "l"), - (0x1D510, "M", "m"), - (0x1D511, "M", "n"), - (0x1D512, "M", "o"), - (0x1D513, "M", "p"), - (0x1D514, "M", "q"), - (0x1D515, "X"), - (0x1D516, "M", "s"), - (0x1D517, "M", "t"), - (0x1D518, "M", "u"), - (0x1D519, "M", "v"), - (0x1D51A, "M", "w"), - (0x1D51B, "M", "x"), - (0x1D51C, "M", "y"), - (0x1D51D, "X"), - (0x1D51E, "M", "a"), - (0x1D51F, "M", "b"), - (0x1D520, "M", "c"), - (0x1D521, "M", "d"), - (0x1D522, "M", "e"), - (0x1D523, "M", "f"), - (0x1D524, "M", "g"), - (0x1D525, "M", "h"), - (0x1D526, "M", "i"), - (0x1D527, "M", "j"), - ] - - -def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D528, "M", "k"), - (0x1D529, "M", "l"), - (0x1D52A, "M", "m"), - (0x1D52B, "M", "n"), - (0x1D52C, "M", "o"), - (0x1D52D, "M", "p"), - (0x1D52E, "M", "q"), - (0x1D52F, "M", "r"), - (0x1D530, "M", "s"), - (0x1D531, "M", "t"), - (0x1D532, "M", "u"), - (0x1D533, "M", "v"), - (0x1D534, "M", "w"), - (0x1D535, "M", "x"), - (0x1D536, "M", "y"), - (0x1D537, "M", "z"), - (0x1D538, "M", "a"), - (0x1D539, "M", "b"), - (0x1D53A, "X"), - (0x1D53B, "M", "d"), - (0x1D53C, "M", "e"), - (0x1D53D, "M", "f"), - (0x1D53E, "M", "g"), - (0x1D53F, "X"), - (0x1D540, "M", "i"), - (0x1D541, "M", "j"), - (0x1D542, "M", "k"), - (0x1D543, "M", "l"), - (0x1D544, "M", "m"), - (0x1D545, "X"), - (0x1D546, "M", "o"), - (0x1D547, "X"), - (0x1D54A, "M", "s"), - (0x1D54B, "M", "t"), - (0x1D54C, "M", "u"), - (0x1D54D, "M", "v"), - (0x1D54E, "M", "w"), - (0x1D54F, "M", "x"), - (0x1D550, "M", "y"), - (0x1D551, "X"), - (0x1D552, "M", "a"), - (0x1D553, "M", "b"), - (0x1D554, "M", "c"), - (0x1D555, "M", "d"), - (0x1D556, "M", "e"), - (0x1D557, "M", "f"), - (0x1D558, "M", "g"), - (0x1D559, "M", "h"), - (0x1D55A, "M", "i"), - (0x1D55B, "M", "j"), - (0x1D55C, "M", "k"), - (0x1D55D, "M", "l"), - (0x1D55E, "M", "m"), - (0x1D55F, "M", "n"), - (0x1D560, "M", "o"), - (0x1D561, "M", "p"), - (0x1D562, "M", "q"), - (0x1D563, "M", "r"), - (0x1D564, "M", "s"), - (0x1D565, "M", "t"), - (0x1D566, "M", "u"), - (0x1D567, "M", "v"), - (0x1D568, "M", "w"), - (0x1D569, "M", "x"), - (0x1D56A, "M", "y"), - (0x1D56B, "M", "z"), - (0x1D56C, "M", "a"), - (0x1D56D, "M", "b"), - (0x1D56E, "M", "c"), - (0x1D56F, "M", "d"), - (0x1D570, "M", "e"), - (0x1D571, "M", "f"), - (0x1D572, "M", "g"), - (0x1D573, "M", "h"), - (0x1D574, "M", "i"), - (0x1D575, "M", "j"), - (0x1D576, "M", "k"), - (0x1D577, "M", "l"), - (0x1D578, "M", "m"), - (0x1D579, "M", "n"), - (0x1D57A, "M", "o"), - (0x1D57B, "M", "p"), - (0x1D57C, "M", "q"), - (0x1D57D, "M", "r"), - (0x1D57E, "M", "s"), - (0x1D57F, "M", "t"), - (0x1D580, "M", "u"), - (0x1D581, "M", "v"), - (0x1D582, "M", "w"), - (0x1D583, "M", "x"), - (0x1D584, "M", "y"), - (0x1D585, "M", "z"), - (0x1D586, "M", "a"), - (0x1D587, "M", "b"), - (0x1D588, "M", "c"), - (0x1D589, "M", "d"), - (0x1D58A, "M", "e"), - (0x1D58B, "M", "f"), - (0x1D58C, "M", "g"), - (0x1D58D, "M", "h"), - ] - - -def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D58E, "M", "i"), - (0x1D58F, "M", "j"), - (0x1D590, "M", "k"), - (0x1D591, "M", "l"), - (0x1D592, "M", "m"), - (0x1D593, "M", "n"), - (0x1D594, "M", "o"), - (0x1D595, "M", "p"), - (0x1D596, "M", "q"), - (0x1D597, "M", "r"), - (0x1D598, "M", "s"), - (0x1D599, "M", "t"), - (0x1D59A, "M", "u"), - (0x1D59B, "M", "v"), - (0x1D59C, "M", "w"), - (0x1D59D, "M", "x"), - (0x1D59E, "M", "y"), - (0x1D59F, "M", "z"), - (0x1D5A0, "M", "a"), - (0x1D5A1, "M", "b"), - (0x1D5A2, "M", "c"), - (0x1D5A3, "M", "d"), - (0x1D5A4, "M", "e"), - (0x1D5A5, "M", "f"), - (0x1D5A6, "M", "g"), - (0x1D5A7, "M", "h"), - (0x1D5A8, "M", "i"), - (0x1D5A9, "M", "j"), - (0x1D5AA, "M", "k"), - (0x1D5AB, "M", "l"), - (0x1D5AC, "M", "m"), - (0x1D5AD, "M", "n"), - (0x1D5AE, "M", "o"), - (0x1D5AF, "M", "p"), - (0x1D5B0, "M", "q"), - (0x1D5B1, "M", "r"), - (0x1D5B2, "M", "s"), - (0x1D5B3, "M", "t"), - (0x1D5B4, "M", "u"), - (0x1D5B5, "M", "v"), - (0x1D5B6, "M", "w"), - (0x1D5B7, "M", "x"), - (0x1D5B8, "M", "y"), - (0x1D5B9, "M", "z"), - (0x1D5BA, "M", "a"), - (0x1D5BB, "M", "b"), - (0x1D5BC, "M", "c"), - (0x1D5BD, "M", "d"), - (0x1D5BE, "M", "e"), - (0x1D5BF, "M", "f"), - (0x1D5C0, "M", "g"), - (0x1D5C1, "M", "h"), - (0x1D5C2, "M", "i"), - (0x1D5C3, "M", "j"), - (0x1D5C4, "M", "k"), - (0x1D5C5, "M", "l"), - (0x1D5C6, "M", "m"), - (0x1D5C7, "M", "n"), - (0x1D5C8, "M", "o"), - (0x1D5C9, "M", "p"), - (0x1D5CA, "M", "q"), - (0x1D5CB, "M", "r"), - (0x1D5CC, "M", "s"), - (0x1D5CD, "M", "t"), - (0x1D5CE, "M", "u"), - (0x1D5CF, "M", "v"), - (0x1D5D0, "M", "w"), - (0x1D5D1, "M", "x"), - (0x1D5D2, "M", "y"), - (0x1D5D3, "M", "z"), - (0x1D5D4, "M", "a"), - (0x1D5D5, "M", "b"), - (0x1D5D6, "M", "c"), - (0x1D5D7, "M", "d"), - (0x1D5D8, "M", "e"), - (0x1D5D9, "M", "f"), - (0x1D5DA, "M", "g"), - (0x1D5DB, "M", "h"), - (0x1D5DC, "M", "i"), - (0x1D5DD, "M", "j"), - (0x1D5DE, "M", "k"), - (0x1D5DF, "M", "l"), - (0x1D5E0, "M", "m"), - (0x1D5E1, "M", "n"), - (0x1D5E2, "M", "o"), - (0x1D5E3, "M", "p"), - (0x1D5E4, "M", "q"), - (0x1D5E5, "M", "r"), - (0x1D5E6, "M", "s"), - (0x1D5E7, "M", "t"), - (0x1D5E8, "M", "u"), - (0x1D5E9, "M", "v"), - (0x1D5EA, "M", "w"), - (0x1D5EB, "M", "x"), - (0x1D5EC, "M", "y"), - (0x1D5ED, "M", "z"), - (0x1D5EE, "M", "a"), - (0x1D5EF, "M", "b"), - (0x1D5F0, "M", "c"), - (0x1D5F1, "M", "d"), - ] - - -def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D5F2, "M", "e"), - (0x1D5F3, "M", "f"), - (0x1D5F4, "M", "g"), - (0x1D5F5, "M", "h"), - (0x1D5F6, "M", "i"), - (0x1D5F7, "M", "j"), - (0x1D5F8, "M", "k"), - (0x1D5F9, "M", "l"), - (0x1D5FA, "M", "m"), - (0x1D5FB, "M", "n"), - (0x1D5FC, "M", "o"), - (0x1D5FD, "M", "p"), - (0x1D5FE, "M", "q"), - (0x1D5FF, "M", "r"), - (0x1D600, "M", "s"), - (0x1D601, "M", "t"), - (0x1D602, "M", "u"), - (0x1D603, "M", "v"), - (0x1D604, "M", "w"), - (0x1D605, "M", "x"), - (0x1D606, "M", "y"), - (0x1D607, "M", "z"), - (0x1D608, "M", "a"), - (0x1D609, "M", "b"), - (0x1D60A, "M", "c"), - (0x1D60B, "M", "d"), - (0x1D60C, "M", "e"), - (0x1D60D, "M", "f"), - (0x1D60E, "M", "g"), - (0x1D60F, "M", "h"), - (0x1D610, "M", "i"), - (0x1D611, "M", "j"), - (0x1D612, "M", "k"), - (0x1D613, "M", "l"), - (0x1D614, "M", "m"), - (0x1D615, "M", "n"), - (0x1D616, "M", "o"), - (0x1D617, "M", "p"), - (0x1D618, "M", "q"), - (0x1D619, "M", "r"), - (0x1D61A, "M", "s"), - (0x1D61B, "M", "t"), - (0x1D61C, "M", "u"), - (0x1D61D, "M", "v"), - (0x1D61E, "M", "w"), - (0x1D61F, "M", "x"), - (0x1D620, "M", "y"), - (0x1D621, "M", "z"), - (0x1D622, "M", "a"), - (0x1D623, "M", "b"), - (0x1D624, "M", "c"), - (0x1D625, "M", "d"), - (0x1D626, "M", "e"), - (0x1D627, "M", "f"), - (0x1D628, "M", "g"), - (0x1D629, "M", "h"), - (0x1D62A, "M", "i"), - (0x1D62B, "M", "j"), - (0x1D62C, "M", "k"), - (0x1D62D, "M", "l"), - (0x1D62E, "M", "m"), - (0x1D62F, "M", "n"), - (0x1D630, "M", "o"), - (0x1D631, "M", "p"), - (0x1D632, "M", "q"), - (0x1D633, "M", "r"), - (0x1D634, "M", "s"), - (0x1D635, "M", "t"), - (0x1D636, "M", "u"), - (0x1D637, "M", "v"), - (0x1D638, "M", "w"), - (0x1D639, "M", "x"), - (0x1D63A, "M", "y"), - (0x1D63B, "M", "z"), - (0x1D63C, "M", "a"), - (0x1D63D, "M", "b"), - (0x1D63E, "M", "c"), - (0x1D63F, "M", "d"), - (0x1D640, "M", "e"), - (0x1D641, "M", "f"), - (0x1D642, "M", "g"), - (0x1D643, "M", "h"), - (0x1D644, "M", "i"), - (0x1D645, "M", "j"), - (0x1D646, "M", "k"), - (0x1D647, "M", "l"), - (0x1D648, "M", "m"), - (0x1D649, "M", "n"), - (0x1D64A, "M", "o"), - (0x1D64B, "M", "p"), - (0x1D64C, "M", "q"), - (0x1D64D, "M", "r"), - (0x1D64E, "M", "s"), - (0x1D64F, "M", "t"), - (0x1D650, "M", "u"), - (0x1D651, "M", "v"), - (0x1D652, "M", "w"), - (0x1D653, "M", "x"), - (0x1D654, "M", "y"), - (0x1D655, "M", "z"), - ] - - -def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D656, "M", "a"), - (0x1D657, "M", "b"), - (0x1D658, "M", "c"), - (0x1D659, "M", "d"), - (0x1D65A, "M", "e"), - (0x1D65B, "M", "f"), - (0x1D65C, "M", "g"), - (0x1D65D, "M", "h"), - (0x1D65E, "M", "i"), - (0x1D65F, "M", "j"), - (0x1D660, "M", "k"), - (0x1D661, "M", "l"), - (0x1D662, "M", "m"), - (0x1D663, "M", "n"), - (0x1D664, "M", "o"), - (0x1D665, "M", "p"), - (0x1D666, "M", "q"), - (0x1D667, "M", "r"), - (0x1D668, "M", "s"), - (0x1D669, "M", "t"), - (0x1D66A, "M", "u"), - (0x1D66B, "M", "v"), - (0x1D66C, "M", "w"), - (0x1D66D, "M", "x"), - (0x1D66E, "M", "y"), - (0x1D66F, "M", "z"), - (0x1D670, "M", "a"), - (0x1D671, "M", "b"), - (0x1D672, "M", "c"), - (0x1D673, "M", "d"), - (0x1D674, "M", "e"), - (0x1D675, "M", "f"), - (0x1D676, "M", "g"), - (0x1D677, "M", "h"), - (0x1D678, "M", "i"), - (0x1D679, "M", "j"), - (0x1D67A, "M", "k"), - (0x1D67B, "M", "l"), - (0x1D67C, "M", "m"), - (0x1D67D, "M", "n"), - (0x1D67E, "M", "o"), - (0x1D67F, "M", "p"), - (0x1D680, "M", "q"), - (0x1D681, "M", "r"), - (0x1D682, "M", "s"), - (0x1D683, "M", "t"), - (0x1D684, "M", "u"), - (0x1D685, "M", "v"), - (0x1D686, "M", "w"), - (0x1D687, "M", "x"), - (0x1D688, "M", "y"), - (0x1D689, "M", "z"), - (0x1D68A, "M", "a"), - (0x1D68B, "M", "b"), - (0x1D68C, "M", "c"), - (0x1D68D, "M", "d"), - (0x1D68E, "M", "e"), - (0x1D68F, "M", "f"), - (0x1D690, "M", "g"), - (0x1D691, "M", "h"), - (0x1D692, "M", "i"), - (0x1D693, "M", "j"), - (0x1D694, "M", "k"), - (0x1D695, "M", "l"), - (0x1D696, "M", "m"), - (0x1D697, "M", "n"), - (0x1D698, "M", "o"), - (0x1D699, "M", "p"), - (0x1D69A, "M", "q"), - (0x1D69B, "M", "r"), - (0x1D69C, "M", "s"), - (0x1D69D, "M", "t"), - (0x1D69E, "M", "u"), - (0x1D69F, "M", "v"), - (0x1D6A0, "M", "w"), - (0x1D6A1, "M", "x"), - (0x1D6A2, "M", "y"), - (0x1D6A3, "M", "z"), - (0x1D6A4, "M", "ı"), - (0x1D6A5, "M", "ȷ"), - (0x1D6A6, "X"), - (0x1D6A8, "M", "α"), - (0x1D6A9, "M", "β"), - (0x1D6AA, "M", "γ"), - (0x1D6AB, "M", "δ"), - (0x1D6AC, "M", "ε"), - (0x1D6AD, "M", "ζ"), - (0x1D6AE, "M", "η"), - (0x1D6AF, "M", "θ"), - (0x1D6B0, "M", "ι"), - (0x1D6B1, "M", "κ"), - (0x1D6B2, "M", "λ"), - (0x1D6B3, "M", "μ"), - (0x1D6B4, "M", "ν"), - (0x1D6B5, "M", "ξ"), - (0x1D6B6, "M", "ο"), - (0x1D6B7, "M", "π"), - (0x1D6B8, "M", "ρ"), - (0x1D6B9, "M", "θ"), - (0x1D6BA, "M", "σ"), - ] - - -def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D6BB, "M", "τ"), - (0x1D6BC, "M", "υ"), - (0x1D6BD, "M", "φ"), - (0x1D6BE, "M", "χ"), - (0x1D6BF, "M", "ψ"), - (0x1D6C0, "M", "ω"), - (0x1D6C1, "M", "∇"), - (0x1D6C2, "M", "α"), - (0x1D6C3, "M", "β"), - (0x1D6C4, "M", "γ"), - (0x1D6C5, "M", "δ"), - (0x1D6C6, "M", "ε"), - (0x1D6C7, "M", "ζ"), - (0x1D6C8, "M", "η"), - (0x1D6C9, "M", "θ"), - (0x1D6CA, "M", "ι"), - (0x1D6CB, "M", "κ"), - (0x1D6CC, "M", "λ"), - (0x1D6CD, "M", "μ"), - (0x1D6CE, "M", "ν"), - (0x1D6CF, "M", "ξ"), - (0x1D6D0, "M", "ο"), - (0x1D6D1, "M", "π"), - (0x1D6D2, "M", "ρ"), - (0x1D6D3, "M", "σ"), - (0x1D6D5, "M", "τ"), - (0x1D6D6, "M", "υ"), - (0x1D6D7, "M", "φ"), - (0x1D6D8, "M", "χ"), - (0x1D6D9, "M", "ψ"), - (0x1D6DA, "M", "ω"), - (0x1D6DB, "M", "∂"), - (0x1D6DC, "M", "ε"), - (0x1D6DD, "M", "θ"), - (0x1D6DE, "M", "κ"), - (0x1D6DF, "M", "φ"), - (0x1D6E0, "M", "ρ"), - (0x1D6E1, "M", "π"), - (0x1D6E2, "M", "α"), - (0x1D6E3, "M", "β"), - (0x1D6E4, "M", "γ"), - (0x1D6E5, "M", "δ"), - (0x1D6E6, "M", "ε"), - (0x1D6E7, "M", "ζ"), - (0x1D6E8, "M", "η"), - (0x1D6E9, "M", "θ"), - (0x1D6EA, "M", "ι"), - (0x1D6EB, "M", "κ"), - (0x1D6EC, "M", "λ"), - (0x1D6ED, "M", "μ"), - (0x1D6EE, "M", "ν"), - (0x1D6EF, "M", "ξ"), - (0x1D6F0, "M", "ο"), - (0x1D6F1, "M", "π"), - (0x1D6F2, "M", "ρ"), - (0x1D6F3, "M", "θ"), - (0x1D6F4, "M", "σ"), - (0x1D6F5, "M", "τ"), - (0x1D6F6, "M", "υ"), - (0x1D6F7, "M", "φ"), - (0x1D6F8, "M", "χ"), - (0x1D6F9, "M", "ψ"), - (0x1D6FA, "M", "ω"), - (0x1D6FB, "M", "∇"), - (0x1D6FC, "M", "α"), - (0x1D6FD, "M", "β"), - (0x1D6FE, "M", "γ"), - (0x1D6FF, "M", "δ"), - (0x1D700, "M", "ε"), - (0x1D701, "M", "ζ"), - (0x1D702, "M", "η"), - (0x1D703, "M", "θ"), - (0x1D704, "M", "ι"), - (0x1D705, "M", "κ"), - (0x1D706, "M", "λ"), - (0x1D707, "M", "μ"), - (0x1D708, "M", "ν"), - (0x1D709, "M", "ξ"), - (0x1D70A, "M", "ο"), - (0x1D70B, "M", "π"), - (0x1D70C, "M", "ρ"), - (0x1D70D, "M", "σ"), - (0x1D70F, "M", "τ"), - (0x1D710, "M", "υ"), - (0x1D711, "M", "φ"), - (0x1D712, "M", "χ"), - (0x1D713, "M", "ψ"), - (0x1D714, "M", "ω"), - (0x1D715, "M", "∂"), - (0x1D716, "M", "ε"), - (0x1D717, "M", "θ"), - (0x1D718, "M", "κ"), - (0x1D719, "M", "φ"), - (0x1D71A, "M", "ρ"), - (0x1D71B, "M", "π"), - (0x1D71C, "M", "α"), - (0x1D71D, "M", "β"), - (0x1D71E, "M", "γ"), - (0x1D71F, "M", "δ"), - (0x1D720, "M", "ε"), - ] - - -def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D721, "M", "ζ"), - (0x1D722, "M", "η"), - (0x1D723, "M", "θ"), - (0x1D724, "M", "ι"), - (0x1D725, "M", "κ"), - (0x1D726, "M", "λ"), - (0x1D727, "M", "μ"), - (0x1D728, "M", "ν"), - (0x1D729, "M", "ξ"), - (0x1D72A, "M", "ο"), - (0x1D72B, "M", "π"), - (0x1D72C, "M", "ρ"), - (0x1D72D, "M", "θ"), - (0x1D72E, "M", "σ"), - (0x1D72F, "M", "τ"), - (0x1D730, "M", "υ"), - (0x1D731, "M", "φ"), - (0x1D732, "M", "χ"), - (0x1D733, "M", "ψ"), - (0x1D734, "M", "ω"), - (0x1D735, "M", "∇"), - (0x1D736, "M", "α"), - (0x1D737, "M", "β"), - (0x1D738, "M", "γ"), - (0x1D739, "M", "δ"), - (0x1D73A, "M", "ε"), - (0x1D73B, "M", "ζ"), - (0x1D73C, "M", "η"), - (0x1D73D, "M", "θ"), - (0x1D73E, "M", "ι"), - (0x1D73F, "M", "κ"), - (0x1D740, "M", "λ"), - (0x1D741, "M", "μ"), - (0x1D742, "M", "ν"), - (0x1D743, "M", "ξ"), - (0x1D744, "M", "ο"), - (0x1D745, "M", "π"), - (0x1D746, "M", "ρ"), - (0x1D747, "M", "σ"), - (0x1D749, "M", "τ"), - (0x1D74A, "M", "υ"), - (0x1D74B, "M", "φ"), - (0x1D74C, "M", "χ"), - (0x1D74D, "M", "ψ"), - (0x1D74E, "M", "ω"), - (0x1D74F, "M", "∂"), - (0x1D750, "M", "ε"), - (0x1D751, "M", "θ"), - (0x1D752, "M", "κ"), - (0x1D753, "M", "φ"), - (0x1D754, "M", "ρ"), - (0x1D755, "M", "π"), - (0x1D756, "M", "α"), - (0x1D757, "M", "β"), - (0x1D758, "M", "γ"), - (0x1D759, "M", "δ"), - (0x1D75A, "M", "ε"), - (0x1D75B, "M", "ζ"), - (0x1D75C, "M", "η"), - (0x1D75D, "M", "θ"), - (0x1D75E, "M", "ι"), - (0x1D75F, "M", "κ"), - (0x1D760, "M", "λ"), - (0x1D761, "M", "μ"), - (0x1D762, "M", "ν"), - (0x1D763, "M", "ξ"), - (0x1D764, "M", "ο"), - (0x1D765, "M", "π"), - (0x1D766, "M", "ρ"), - (0x1D767, "M", "θ"), - (0x1D768, "M", "σ"), - (0x1D769, "M", "τ"), - (0x1D76A, "M", "υ"), - (0x1D76B, "M", "φ"), - (0x1D76C, "M", "χ"), - (0x1D76D, "M", "ψ"), - (0x1D76E, "M", "ω"), - (0x1D76F, "M", "∇"), - (0x1D770, "M", "α"), - (0x1D771, "M", "β"), - (0x1D772, "M", "γ"), - (0x1D773, "M", "δ"), - (0x1D774, "M", "ε"), - (0x1D775, "M", "ζ"), - (0x1D776, "M", "η"), - (0x1D777, "M", "θ"), - (0x1D778, "M", "ι"), - (0x1D779, "M", "κ"), - (0x1D77A, "M", "λ"), - (0x1D77B, "M", "μ"), - (0x1D77C, "M", "ν"), - (0x1D77D, "M", "ξ"), - (0x1D77E, "M", "ο"), - (0x1D77F, "M", "π"), - (0x1D780, "M", "ρ"), - (0x1D781, "M", "σ"), - (0x1D783, "M", "τ"), - (0x1D784, "M", "υ"), - (0x1D785, "M", "φ"), - (0x1D786, "M", "χ"), - ] - - -def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D787, "M", "ψ"), - (0x1D788, "M", "ω"), - (0x1D789, "M", "∂"), - (0x1D78A, "M", "ε"), - (0x1D78B, "M", "θ"), - (0x1D78C, "M", "κ"), - (0x1D78D, "M", "φ"), - (0x1D78E, "M", "ρ"), - (0x1D78F, "M", "π"), - (0x1D790, "M", "α"), - (0x1D791, "M", "β"), - (0x1D792, "M", "γ"), - (0x1D793, "M", "δ"), - (0x1D794, "M", "ε"), - (0x1D795, "M", "ζ"), - (0x1D796, "M", "η"), - (0x1D797, "M", "θ"), - (0x1D798, "M", "ι"), - (0x1D799, "M", "κ"), - (0x1D79A, "M", "λ"), - (0x1D79B, "M", "μ"), - (0x1D79C, "M", "ν"), - (0x1D79D, "M", "ξ"), - (0x1D79E, "M", "ο"), - (0x1D79F, "M", "π"), - (0x1D7A0, "M", "ρ"), - (0x1D7A1, "M", "θ"), - (0x1D7A2, "M", "σ"), - (0x1D7A3, "M", "τ"), - (0x1D7A4, "M", "υ"), - (0x1D7A5, "M", "φ"), - (0x1D7A6, "M", "χ"), - (0x1D7A7, "M", "ψ"), - (0x1D7A8, "M", "ω"), - (0x1D7A9, "M", "∇"), - (0x1D7AA, "M", "α"), - (0x1D7AB, "M", "β"), - (0x1D7AC, "M", "γ"), - (0x1D7AD, "M", "δ"), - (0x1D7AE, "M", "ε"), - (0x1D7AF, "M", "ζ"), - (0x1D7B0, "M", "η"), - (0x1D7B1, "M", "θ"), - (0x1D7B2, "M", "ι"), - (0x1D7B3, "M", "κ"), - (0x1D7B4, "M", "λ"), - (0x1D7B5, "M", "μ"), - (0x1D7B6, "M", "ν"), - (0x1D7B7, "M", "ξ"), - (0x1D7B8, "M", "ο"), - (0x1D7B9, "M", "π"), - (0x1D7BA, "M", "ρ"), - (0x1D7BB, "M", "σ"), - (0x1D7BD, "M", "τ"), - (0x1D7BE, "M", "υ"), - (0x1D7BF, "M", "φ"), - (0x1D7C0, "M", "χ"), - (0x1D7C1, "M", "ψ"), - (0x1D7C2, "M", "ω"), - (0x1D7C3, "M", "∂"), - (0x1D7C4, "M", "ε"), - (0x1D7C5, "M", "θ"), - (0x1D7C6, "M", "κ"), - (0x1D7C7, "M", "φ"), - (0x1D7C8, "M", "ρ"), - (0x1D7C9, "M", "π"), - (0x1D7CA, "M", "ϝ"), - (0x1D7CC, "X"), - (0x1D7CE, "M", "0"), - (0x1D7CF, "M", "1"), - (0x1D7D0, "M", "2"), - (0x1D7D1, "M", "3"), - (0x1D7D2, "M", "4"), - (0x1D7D3, "M", "5"), - (0x1D7D4, "M", "6"), - (0x1D7D5, "M", "7"), - (0x1D7D6, "M", "8"), - (0x1D7D7, "M", "9"), - (0x1D7D8, "M", "0"), - (0x1D7D9, "M", "1"), - (0x1D7DA, "M", "2"), - (0x1D7DB, "M", "3"), - (0x1D7DC, "M", "4"), - (0x1D7DD, "M", "5"), - (0x1D7DE, "M", "6"), - (0x1D7DF, "M", "7"), - (0x1D7E0, "M", "8"), - (0x1D7E1, "M", "9"), - (0x1D7E2, "M", "0"), - (0x1D7E3, "M", "1"), - (0x1D7E4, "M", "2"), - (0x1D7E5, "M", "3"), - (0x1D7E6, "M", "4"), - (0x1D7E7, "M", "5"), - (0x1D7E8, "M", "6"), - (0x1D7E9, "M", "7"), - (0x1D7EA, "M", "8"), - (0x1D7EB, "M", "9"), - (0x1D7EC, "M", "0"), - (0x1D7ED, "M", "1"), - ] - - -def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D7EE, "M", "2"), - (0x1D7EF, "M", "3"), - (0x1D7F0, "M", "4"), - (0x1D7F1, "M", "5"), - (0x1D7F2, "M", "6"), - (0x1D7F3, "M", "7"), - (0x1D7F4, "M", "8"), - (0x1D7F5, "M", "9"), - (0x1D7F6, "M", "0"), - (0x1D7F7, "M", "1"), - (0x1D7F8, "M", "2"), - (0x1D7F9, "M", "3"), - (0x1D7FA, "M", "4"), - (0x1D7FB, "M", "5"), - (0x1D7FC, "M", "6"), - (0x1D7FD, "M", "7"), - (0x1D7FE, "M", "8"), - (0x1D7FF, "M", "9"), - (0x1D800, "V"), - (0x1DA8C, "X"), - (0x1DA9B, "V"), - (0x1DAA0, "X"), - (0x1DAA1, "V"), - (0x1DAB0, "X"), - (0x1DF00, "V"), - (0x1DF1F, "X"), - (0x1DF25, "V"), - (0x1DF2B, "X"), - (0x1E000, "V"), - (0x1E007, "X"), - (0x1E008, "V"), - (0x1E019, "X"), - (0x1E01B, "V"), - (0x1E022, "X"), - (0x1E023, "V"), - (0x1E025, "X"), - (0x1E026, "V"), - (0x1E02B, "X"), - (0x1E030, "M", "а"), - (0x1E031, "M", "б"), - (0x1E032, "M", "в"), - (0x1E033, "M", "г"), - (0x1E034, "M", "д"), - (0x1E035, "M", "е"), - (0x1E036, "M", "ж"), - (0x1E037, "M", "з"), - (0x1E038, "M", "и"), - (0x1E039, "M", "к"), - (0x1E03A, "M", "л"), - (0x1E03B, "M", "м"), - (0x1E03C, "M", "о"), - (0x1E03D, "M", "п"), - (0x1E03E, "M", "р"), - (0x1E03F, "M", "с"), - (0x1E040, "M", "т"), - (0x1E041, "M", "у"), - (0x1E042, "M", "ф"), - (0x1E043, "M", "х"), - (0x1E044, "M", "ц"), - (0x1E045, "M", "ч"), - (0x1E046, "M", "ш"), - (0x1E047, "M", "ы"), - (0x1E048, "M", "э"), - (0x1E049, "M", "ю"), - (0x1E04A, "M", "ꚉ"), - (0x1E04B, "M", "ә"), - (0x1E04C, "M", "і"), - (0x1E04D, "M", "ј"), - (0x1E04E, "M", "ө"), - (0x1E04F, "M", "ү"), - (0x1E050, "M", "ӏ"), - (0x1E051, "M", "а"), - (0x1E052, "M", "б"), - (0x1E053, "M", "в"), - (0x1E054, "M", "г"), - (0x1E055, "M", "д"), - (0x1E056, "M", "е"), - (0x1E057, "M", "ж"), - (0x1E058, "M", "з"), - (0x1E059, "M", "и"), - (0x1E05A, "M", "к"), - (0x1E05B, "M", "л"), - (0x1E05C, "M", "о"), - (0x1E05D, "M", "п"), - (0x1E05E, "M", "с"), - (0x1E05F, "M", "у"), - (0x1E060, "M", "ф"), - (0x1E061, "M", "х"), - (0x1E062, "M", "ц"), - (0x1E063, "M", "ч"), - (0x1E064, "M", "ш"), - (0x1E065, "M", "ъ"), - (0x1E066, "M", "ы"), - (0x1E067, "M", "ґ"), - (0x1E068, "M", "і"), - (0x1E069, "M", "ѕ"), - (0x1E06A, "M", "џ"), - (0x1E06B, "M", "ҫ"), - (0x1E06C, "M", "ꙑ"), - (0x1E06D, "M", "ұ"), - ] - - -def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E06E, "X"), - (0x1E08F, "V"), - (0x1E090, "X"), - (0x1E100, "V"), - (0x1E12D, "X"), - (0x1E130, "V"), - (0x1E13E, "X"), - (0x1E140, "V"), - (0x1E14A, "X"), - (0x1E14E, "V"), - (0x1E150, "X"), - (0x1E290, "V"), - (0x1E2AF, "X"), - (0x1E2C0, "V"), - (0x1E2FA, "X"), - (0x1E2FF, "V"), - (0x1E300, "X"), - (0x1E4D0, "V"), - (0x1E4FA, "X"), - (0x1E5D0, "V"), - (0x1E5FB, "X"), - (0x1E5FF, "V"), - (0x1E600, "X"), - (0x1E7E0, "V"), - (0x1E7E7, "X"), - (0x1E7E8, "V"), - (0x1E7EC, "X"), - (0x1E7ED, "V"), - (0x1E7EF, "X"), - (0x1E7F0, "V"), - (0x1E7FF, "X"), - (0x1E800, "V"), - (0x1E8C5, "X"), - (0x1E8C7, "V"), - (0x1E8D7, "X"), - (0x1E900, "M", "𞤢"), - (0x1E901, "M", "𞤣"), - (0x1E902, "M", "𞤤"), - (0x1E903, "M", "𞤥"), - (0x1E904, "M", "𞤦"), - (0x1E905, "M", "𞤧"), - (0x1E906, "M", "𞤨"), - (0x1E907, "M", "𞤩"), - (0x1E908, "M", "𞤪"), - (0x1E909, "M", "𞤫"), - (0x1E90A, "M", "𞤬"), - (0x1E90B, "M", "𞤭"), - (0x1E90C, "M", "𞤮"), - (0x1E90D, "M", "𞤯"), - (0x1E90E, "M", "𞤰"), - (0x1E90F, "M", "𞤱"), - (0x1E910, "M", "𞤲"), - (0x1E911, "M", "𞤳"), - (0x1E912, "M", "𞤴"), - (0x1E913, "M", "𞤵"), - (0x1E914, "M", "𞤶"), - (0x1E915, "M", "𞤷"), - (0x1E916, "M", "𞤸"), - (0x1E917, "M", "𞤹"), - (0x1E918, "M", "𞤺"), - (0x1E919, "M", "𞤻"), - (0x1E91A, "M", "𞤼"), - (0x1E91B, "M", "𞤽"), - (0x1E91C, "M", "𞤾"), - (0x1E91D, "M", "𞤿"), - (0x1E91E, "M", "𞥀"), - (0x1E91F, "M", "𞥁"), - (0x1E920, "M", "𞥂"), - (0x1E921, "M", "𞥃"), - (0x1E922, "V"), - (0x1E94C, "X"), - (0x1E950, "V"), - (0x1E95A, "X"), - (0x1E95E, "V"), - (0x1E960, "X"), - (0x1EC71, "V"), - (0x1ECB5, "X"), - (0x1ED01, "V"), - (0x1ED3E, "X"), - (0x1EE00, "M", "ا"), - (0x1EE01, "M", "ب"), - (0x1EE02, "M", "ج"), - (0x1EE03, "M", "د"), - (0x1EE04, "X"), - (0x1EE05, "M", "و"), - (0x1EE06, "M", "ز"), - (0x1EE07, "M", "ح"), - (0x1EE08, "M", "ط"), - (0x1EE09, "M", "ي"), - (0x1EE0A, "M", "ك"), - (0x1EE0B, "M", "ل"), - (0x1EE0C, "M", "م"), - (0x1EE0D, "M", "ن"), - (0x1EE0E, "M", "س"), - (0x1EE0F, "M", "ع"), - (0x1EE10, "M", "ف"), - (0x1EE11, "M", "ص"), - (0x1EE12, "M", "ق"), - (0x1EE13, "M", "ر"), - (0x1EE14, "M", "ش"), - ] - - -def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EE15, "M", "ت"), - (0x1EE16, "M", "ث"), - (0x1EE17, "M", "خ"), - (0x1EE18, "M", "ذ"), - (0x1EE19, "M", "ض"), - (0x1EE1A, "M", "ظ"), - (0x1EE1B, "M", "غ"), - (0x1EE1C, "M", "ٮ"), - (0x1EE1D, "M", "ں"), - (0x1EE1E, "M", "ڡ"), - (0x1EE1F, "M", "ٯ"), - (0x1EE20, "X"), - (0x1EE21, "M", "ب"), - (0x1EE22, "M", "ج"), - (0x1EE23, "X"), - (0x1EE24, "M", "ه"), - (0x1EE25, "X"), - (0x1EE27, "M", "ح"), - (0x1EE28, "X"), - (0x1EE29, "M", "ي"), - (0x1EE2A, "M", "ك"), - (0x1EE2B, "M", "ل"), - (0x1EE2C, "M", "م"), - (0x1EE2D, "M", "ن"), - (0x1EE2E, "M", "س"), - (0x1EE2F, "M", "ع"), - (0x1EE30, "M", "ف"), - (0x1EE31, "M", "ص"), - (0x1EE32, "M", "ق"), - (0x1EE33, "X"), - (0x1EE34, "M", "ش"), - (0x1EE35, "M", "ت"), - (0x1EE36, "M", "ث"), - (0x1EE37, "M", "خ"), - (0x1EE38, "X"), - (0x1EE39, "M", "ض"), - (0x1EE3A, "X"), - (0x1EE3B, "M", "غ"), - (0x1EE3C, "X"), - (0x1EE42, "M", "ج"), - (0x1EE43, "X"), - (0x1EE47, "M", "ح"), - (0x1EE48, "X"), - (0x1EE49, "M", "ي"), - (0x1EE4A, "X"), - (0x1EE4B, "M", "ل"), - (0x1EE4C, "X"), - (0x1EE4D, "M", "ن"), - (0x1EE4E, "M", "س"), - (0x1EE4F, "M", "ع"), - (0x1EE50, "X"), - (0x1EE51, "M", "ص"), - (0x1EE52, "M", "ق"), - (0x1EE53, "X"), - (0x1EE54, "M", "ش"), - (0x1EE55, "X"), - (0x1EE57, "M", "خ"), - (0x1EE58, "X"), - (0x1EE59, "M", "ض"), - (0x1EE5A, "X"), - (0x1EE5B, "M", "غ"), - (0x1EE5C, "X"), - (0x1EE5D, "M", "ں"), - (0x1EE5E, "X"), - (0x1EE5F, "M", "ٯ"), - (0x1EE60, "X"), - (0x1EE61, "M", "ب"), - (0x1EE62, "M", "ج"), - (0x1EE63, "X"), - (0x1EE64, "M", "ه"), - (0x1EE65, "X"), - (0x1EE67, "M", "ح"), - (0x1EE68, "M", "ط"), - (0x1EE69, "M", "ي"), - (0x1EE6A, "M", "ك"), - (0x1EE6B, "X"), - (0x1EE6C, "M", "م"), - (0x1EE6D, "M", "ن"), - (0x1EE6E, "M", "س"), - (0x1EE6F, "M", "ع"), - (0x1EE70, "M", "ف"), - (0x1EE71, "M", "ص"), - (0x1EE72, "M", "ق"), - (0x1EE73, "X"), - (0x1EE74, "M", "ش"), - (0x1EE75, "M", "ت"), - (0x1EE76, "M", "ث"), - (0x1EE77, "M", "خ"), - (0x1EE78, "X"), - (0x1EE79, "M", "ض"), - (0x1EE7A, "M", "ظ"), - (0x1EE7B, "M", "غ"), - (0x1EE7C, "M", "ٮ"), - (0x1EE7D, "X"), - (0x1EE7E, "M", "ڡ"), - (0x1EE7F, "X"), - (0x1EE80, "M", "ا"), - (0x1EE81, "M", "ب"), - (0x1EE82, "M", "ج"), - (0x1EE83, "M", "د"), - ] - - -def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EE84, "M", "ه"), - (0x1EE85, "M", "و"), - (0x1EE86, "M", "ز"), - (0x1EE87, "M", "ح"), - (0x1EE88, "M", "ط"), - (0x1EE89, "M", "ي"), - (0x1EE8A, "X"), - (0x1EE8B, "M", "ل"), - (0x1EE8C, "M", "م"), - (0x1EE8D, "M", "ن"), - (0x1EE8E, "M", "س"), - (0x1EE8F, "M", "ع"), - (0x1EE90, "M", "ف"), - (0x1EE91, "M", "ص"), - (0x1EE92, "M", "ق"), - (0x1EE93, "M", "ر"), - (0x1EE94, "M", "ش"), - (0x1EE95, "M", "ت"), - (0x1EE96, "M", "ث"), - (0x1EE97, "M", "خ"), - (0x1EE98, "M", "ذ"), - (0x1EE99, "M", "ض"), - (0x1EE9A, "M", "ظ"), - (0x1EE9B, "M", "غ"), - (0x1EE9C, "X"), - (0x1EEA1, "M", "ب"), - (0x1EEA2, "M", "ج"), - (0x1EEA3, "M", "د"), - (0x1EEA4, "X"), - (0x1EEA5, "M", "و"), - (0x1EEA6, "M", "ز"), - (0x1EEA7, "M", "ح"), - (0x1EEA8, "M", "ط"), - (0x1EEA9, "M", "ي"), - (0x1EEAA, "X"), - (0x1EEAB, "M", "ل"), - (0x1EEAC, "M", "م"), - (0x1EEAD, "M", "ن"), - (0x1EEAE, "M", "س"), - (0x1EEAF, "M", "ع"), - (0x1EEB0, "M", "ف"), - (0x1EEB1, "M", "ص"), - (0x1EEB2, "M", "ق"), - (0x1EEB3, "M", "ر"), - (0x1EEB4, "M", "ش"), - (0x1EEB5, "M", "ت"), - (0x1EEB6, "M", "ث"), - (0x1EEB7, "M", "خ"), - (0x1EEB8, "M", "ذ"), - (0x1EEB9, "M", "ض"), - (0x1EEBA, "M", "ظ"), - (0x1EEBB, "M", "غ"), - (0x1EEBC, "X"), - (0x1EEF0, "V"), - (0x1EEF2, "X"), - (0x1F000, "V"), - (0x1F02C, "X"), - (0x1F030, "V"), - (0x1F094, "X"), - (0x1F0A0, "V"), - (0x1F0AF, "X"), - (0x1F0B1, "V"), - (0x1F0C0, "X"), - (0x1F0C1, "V"), - (0x1F0D0, "X"), - (0x1F0D1, "V"), - (0x1F0F6, "X"), - (0x1F101, "M", "0,"), - (0x1F102, "M", "1,"), - (0x1F103, "M", "2,"), - (0x1F104, "M", "3,"), - (0x1F105, "M", "4,"), - (0x1F106, "M", "5,"), - (0x1F107, "M", "6,"), - (0x1F108, "M", "7,"), - (0x1F109, "M", "8,"), - (0x1F10A, "M", "9,"), - (0x1F10B, "V"), - (0x1F110, "M", "(a)"), - (0x1F111, "M", "(b)"), - (0x1F112, "M", "(c)"), - (0x1F113, "M", "(d)"), - (0x1F114, "M", "(e)"), - (0x1F115, "M", "(f)"), - (0x1F116, "M", "(g)"), - (0x1F117, "M", "(h)"), - (0x1F118, "M", "(i)"), - (0x1F119, "M", "(j)"), - (0x1F11A, "M", "(k)"), - (0x1F11B, "M", "(l)"), - (0x1F11C, "M", "(m)"), - (0x1F11D, "M", "(n)"), - (0x1F11E, "M", "(o)"), - (0x1F11F, "M", "(p)"), - (0x1F120, "M", "(q)"), - (0x1F121, "M", "(r)"), - (0x1F122, "M", "(s)"), - (0x1F123, "M", "(t)"), - (0x1F124, "M", "(u)"), - (0x1F125, "M", "(v)"), - ] - - -def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F126, "M", "(w)"), - (0x1F127, "M", "(x)"), - (0x1F128, "M", "(y)"), - (0x1F129, "M", "(z)"), - (0x1F12A, "M", "〔s〕"), - (0x1F12B, "M", "c"), - (0x1F12C, "M", "r"), - (0x1F12D, "M", "cd"), - (0x1F12E, "M", "wz"), - (0x1F12F, "V"), - (0x1F130, "M", "a"), - (0x1F131, "M", "b"), - (0x1F132, "M", "c"), - (0x1F133, "M", "d"), - (0x1F134, "M", "e"), - (0x1F135, "M", "f"), - (0x1F136, "M", "g"), - (0x1F137, "M", "h"), - (0x1F138, "M", "i"), - (0x1F139, "M", "j"), - (0x1F13A, "M", "k"), - (0x1F13B, "M", "l"), - (0x1F13C, "M", "m"), - (0x1F13D, "M", "n"), - (0x1F13E, "M", "o"), - (0x1F13F, "M", "p"), - (0x1F140, "M", "q"), - (0x1F141, "M", "r"), - (0x1F142, "M", "s"), - (0x1F143, "M", "t"), - (0x1F144, "M", "u"), - (0x1F145, "M", "v"), - (0x1F146, "M", "w"), - (0x1F147, "M", "x"), - (0x1F148, "M", "y"), - (0x1F149, "M", "z"), - (0x1F14A, "M", "hv"), - (0x1F14B, "M", "mv"), - (0x1F14C, "M", "sd"), - (0x1F14D, "M", "ss"), - (0x1F14E, "M", "ppv"), - (0x1F14F, "M", "wc"), - (0x1F150, "V"), - (0x1F16A, "M", "mc"), - (0x1F16B, "M", "md"), - (0x1F16C, "M", "mr"), - (0x1F16D, "V"), - (0x1F190, "M", "dj"), - (0x1F191, "V"), - (0x1F1AE, "X"), - (0x1F1E6, "V"), - (0x1F200, "M", "ほか"), - (0x1F201, "M", "ココ"), - (0x1F202, "M", "サ"), - (0x1F203, "X"), - (0x1F210, "M", "手"), - (0x1F211, "M", "字"), - (0x1F212, "M", "双"), - (0x1F213, "M", "デ"), - (0x1F214, "M", "二"), - (0x1F215, "M", "多"), - (0x1F216, "M", "解"), - (0x1F217, "M", "天"), - (0x1F218, "M", "交"), - (0x1F219, "M", "映"), - (0x1F21A, "M", "無"), - (0x1F21B, "M", "料"), - (0x1F21C, "M", "前"), - (0x1F21D, "M", "後"), - (0x1F21E, "M", "再"), - (0x1F21F, "M", "新"), - (0x1F220, "M", "初"), - (0x1F221, "M", "終"), - (0x1F222, "M", "生"), - (0x1F223, "M", "販"), - (0x1F224, "M", "声"), - (0x1F225, "M", "吹"), - (0x1F226, "M", "演"), - (0x1F227, "M", "投"), - (0x1F228, "M", "捕"), - (0x1F229, "M", "一"), - (0x1F22A, "M", "三"), - (0x1F22B, "M", "遊"), - (0x1F22C, "M", "左"), - (0x1F22D, "M", "中"), - (0x1F22E, "M", "右"), - (0x1F22F, "M", "指"), - (0x1F230, "M", "走"), - (0x1F231, "M", "打"), - (0x1F232, "M", "禁"), - (0x1F233, "M", "空"), - (0x1F234, "M", "合"), - (0x1F235, "M", "満"), - (0x1F236, "M", "有"), - (0x1F237, "M", "月"), - (0x1F238, "M", "申"), - (0x1F239, "M", "割"), - (0x1F23A, "M", "営"), - (0x1F23B, "M", "配"), - (0x1F23C, "X"), - ] - - -def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F240, "M", "〔本〕"), - (0x1F241, "M", "〔三〕"), - (0x1F242, "M", "〔二〕"), - (0x1F243, "M", "〔安〕"), - (0x1F244, "M", "〔点〕"), - (0x1F245, "M", "〔打〕"), - (0x1F246, "M", "〔盗〕"), - (0x1F247, "M", "〔勝〕"), - (0x1F248, "M", "〔敗〕"), - (0x1F249, "X"), - (0x1F250, "M", "得"), - (0x1F251, "M", "可"), - (0x1F252, "X"), - (0x1F260, "V"), - (0x1F266, "X"), - (0x1F300, "V"), - (0x1F6D8, "X"), - (0x1F6DC, "V"), - (0x1F6ED, "X"), - (0x1F6F0, "V"), - (0x1F6FD, "X"), - (0x1F700, "V"), - (0x1F777, "X"), - (0x1F77B, "V"), - (0x1F7DA, "X"), - (0x1F7E0, "V"), - (0x1F7EC, "X"), - (0x1F7F0, "V"), - (0x1F7F1, "X"), - (0x1F800, "V"), - (0x1F80C, "X"), - (0x1F810, "V"), - (0x1F848, "X"), - (0x1F850, "V"), - (0x1F85A, "X"), - (0x1F860, "V"), - (0x1F888, "X"), - (0x1F890, "V"), - (0x1F8AE, "X"), - (0x1F8B0, "V"), - (0x1F8BC, "X"), - (0x1F8C0, "V"), - (0x1F8C2, "X"), - (0x1F900, "V"), - (0x1FA54, "X"), - (0x1FA60, "V"), - (0x1FA6E, "X"), - (0x1FA70, "V"), - (0x1FA7D, "X"), - (0x1FA80, "V"), - (0x1FA8A, "X"), - (0x1FA8F, "V"), - (0x1FAC7, "X"), - (0x1FACE, "V"), - (0x1FADD, "X"), - (0x1FADF, "V"), - (0x1FAEA, "X"), - (0x1FAF0, "V"), - (0x1FAF9, "X"), - (0x1FB00, "V"), - (0x1FB93, "X"), - (0x1FB94, "V"), - (0x1FBF0, "M", "0"), - (0x1FBF1, "M", "1"), - (0x1FBF2, "M", "2"), - (0x1FBF3, "M", "3"), - (0x1FBF4, "M", "4"), - (0x1FBF5, "M", "5"), - (0x1FBF6, "M", "6"), - (0x1FBF7, "M", "7"), - (0x1FBF8, "M", "8"), - (0x1FBF9, "M", "9"), - (0x1FBFA, "X"), - (0x20000, "V"), - (0x2A6E0, "X"), - (0x2A700, "V"), - (0x2B73A, "X"), - (0x2B740, "V"), - (0x2B81E, "X"), - (0x2B820, "V"), - (0x2CEA2, "X"), - (0x2CEB0, "V"), - (0x2EBE1, "X"), - (0x2EBF0, "V"), - (0x2EE5E, "X"), - (0x2F800, "M", "丽"), - (0x2F801, "M", "丸"), - (0x2F802, "M", "乁"), - (0x2F803, "M", "𠄢"), - (0x2F804, "M", "你"), - (0x2F805, "M", "侮"), - (0x2F806, "M", "侻"), - (0x2F807, "M", "倂"), - (0x2F808, "M", "偺"), - (0x2F809, "M", "備"), - (0x2F80A, "M", "僧"), - (0x2F80B, "M", "像"), - (0x2F80C, "M", "㒞"), - (0x2F80D, "M", "𠘺"), - (0x2F80E, "M", "免"), - ] - - -def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F80F, "M", "兔"), - (0x2F810, "M", "兤"), - (0x2F811, "M", "具"), - (0x2F812, "M", "𠔜"), - (0x2F813, "M", "㒹"), - (0x2F814, "M", "內"), - (0x2F815, "M", "再"), - (0x2F816, "M", "𠕋"), - (0x2F817, "M", "冗"), - (0x2F818, "M", "冤"), - (0x2F819, "M", "仌"), - (0x2F81A, "M", "冬"), - (0x2F81B, "M", "况"), - (0x2F81C, "M", "𩇟"), - (0x2F81D, "M", "凵"), - (0x2F81E, "M", "刃"), - (0x2F81F, "M", "㓟"), - (0x2F820, "M", "刻"), - (0x2F821, "M", "剆"), - (0x2F822, "M", "割"), - (0x2F823, "M", "剷"), - (0x2F824, "M", "㔕"), - (0x2F825, "M", "勇"), - (0x2F826, "M", "勉"), - (0x2F827, "M", "勤"), - (0x2F828, "M", "勺"), - (0x2F829, "M", "包"), - (0x2F82A, "M", "匆"), - (0x2F82B, "M", "北"), - (0x2F82C, "M", "卉"), - (0x2F82D, "M", "卑"), - (0x2F82E, "M", "博"), - (0x2F82F, "M", "即"), - (0x2F830, "M", "卽"), - (0x2F831, "M", "卿"), - (0x2F834, "M", "𠨬"), - (0x2F835, "M", "灰"), - (0x2F836, "M", "及"), - (0x2F837, "M", "叟"), - (0x2F838, "M", "𠭣"), - (0x2F839, "M", "叫"), - (0x2F83A, "M", "叱"), - (0x2F83B, "M", "吆"), - (0x2F83C, "M", "咞"), - (0x2F83D, "M", "吸"), - (0x2F83E, "M", "呈"), - (0x2F83F, "M", "周"), - (0x2F840, "M", "咢"), - (0x2F841, "M", "哶"), - (0x2F842, "M", "唐"), - (0x2F843, "M", "啓"), - (0x2F844, "M", "啣"), - (0x2F845, "M", "善"), - (0x2F847, "M", "喙"), - (0x2F848, "M", "喫"), - (0x2F849, "M", "喳"), - (0x2F84A, "M", "嗂"), - (0x2F84B, "M", "圖"), - (0x2F84C, "M", "嘆"), - (0x2F84D, "M", "圗"), - (0x2F84E, "M", "噑"), - (0x2F84F, "M", "噴"), - (0x2F850, "M", "切"), - (0x2F851, "M", "壮"), - (0x2F852, "M", "城"), - (0x2F853, "M", "埴"), - (0x2F854, "M", "堍"), - (0x2F855, "M", "型"), - (0x2F856, "M", "堲"), - (0x2F857, "M", "報"), - (0x2F858, "M", "墬"), - (0x2F859, "M", "𡓤"), - (0x2F85A, "M", "売"), - (0x2F85B, "M", "壷"), - (0x2F85C, "M", "夆"), - (0x2F85D, "M", "多"), - (0x2F85E, "M", "夢"), - (0x2F85F, "M", "奢"), - (0x2F860, "M", "𡚨"), - (0x2F861, "M", "𡛪"), - (0x2F862, "M", "姬"), - (0x2F863, "M", "娛"), - (0x2F864, "M", "娧"), - (0x2F865, "M", "姘"), - (0x2F866, "M", "婦"), - (0x2F867, "M", "㛮"), - (0x2F868, "M", "㛼"), - (0x2F869, "M", "嬈"), - (0x2F86A, "M", "嬾"), - (0x2F86C, "M", "𡧈"), - (0x2F86D, "M", "寃"), - (0x2F86E, "M", "寘"), - (0x2F86F, "M", "寧"), - (0x2F870, "M", "寳"), - (0x2F871, "M", "𡬘"), - (0x2F872, "M", "寿"), - (0x2F873, "M", "将"), - (0x2F874, "M", "当"), - (0x2F875, "M", "尢"), - (0x2F876, "M", "㞁"), - ] - - -def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F877, "M", "屠"), - (0x2F878, "M", "屮"), - (0x2F879, "M", "峀"), - (0x2F87A, "M", "岍"), - (0x2F87B, "M", "𡷤"), - (0x2F87C, "M", "嵃"), - (0x2F87D, "M", "𡷦"), - (0x2F87E, "M", "嵮"), - (0x2F87F, "M", "嵫"), - (0x2F880, "M", "嵼"), - (0x2F881, "M", "巡"), - (0x2F882, "M", "巢"), - (0x2F883, "M", "㠯"), - (0x2F884, "M", "巽"), - (0x2F885, "M", "帨"), - (0x2F886, "M", "帽"), - (0x2F887, "M", "幩"), - (0x2F888, "M", "㡢"), - (0x2F889, "M", "𢆃"), - (0x2F88A, "M", "㡼"), - (0x2F88B, "M", "庰"), - (0x2F88C, "M", "庳"), - (0x2F88D, "M", "庶"), - (0x2F88E, "M", "廊"), - (0x2F88F, "M", "𪎒"), - (0x2F890, "M", "廾"), - (0x2F891, "M", "𢌱"), - (0x2F893, "M", "舁"), - (0x2F894, "M", "弢"), - (0x2F896, "M", "㣇"), - (0x2F897, "M", "𣊸"), - (0x2F898, "M", "𦇚"), - (0x2F899, "M", "形"), - (0x2F89A, "M", "彫"), - (0x2F89B, "M", "㣣"), - (0x2F89C, "M", "徚"), - (0x2F89D, "M", "忍"), - (0x2F89E, "M", "志"), - (0x2F89F, "M", "忹"), - (0x2F8A0, "M", "悁"), - (0x2F8A1, "M", "㤺"), - (0x2F8A2, "M", "㤜"), - (0x2F8A3, "M", "悔"), - (0x2F8A4, "M", "𢛔"), - (0x2F8A5, "M", "惇"), - (0x2F8A6, "M", "慈"), - (0x2F8A7, "M", "慌"), - (0x2F8A8, "M", "慎"), - (0x2F8A9, "M", "慌"), - (0x2F8AA, "M", "慺"), - (0x2F8AB, "M", "憎"), - (0x2F8AC, "M", "憲"), - (0x2F8AD, "M", "憤"), - (0x2F8AE, "M", "憯"), - (0x2F8AF, "M", "懞"), - (0x2F8B0, "M", "懲"), - (0x2F8B1, "M", "懶"), - (0x2F8B2, "M", "成"), - (0x2F8B3, "M", "戛"), - (0x2F8B4, "M", "扝"), - (0x2F8B5, "M", "抱"), - (0x2F8B6, "M", "拔"), - (0x2F8B7, "M", "捐"), - (0x2F8B8, "M", "𢬌"), - (0x2F8B9, "M", "挽"), - (0x2F8BA, "M", "拼"), - (0x2F8BB, "M", "捨"), - (0x2F8BC, "M", "掃"), - (0x2F8BD, "M", "揤"), - (0x2F8BE, "M", "𢯱"), - (0x2F8BF, "M", "搢"), - (0x2F8C0, "M", "揅"), - (0x2F8C1, "M", "掩"), - (0x2F8C2, "M", "㨮"), - (0x2F8C3, "M", "摩"), - (0x2F8C4, "M", "摾"), - (0x2F8C5, "M", "撝"), - (0x2F8C6, "M", "摷"), - (0x2F8C7, "M", "㩬"), - (0x2F8C8, "M", "敏"), - (0x2F8C9, "M", "敬"), - (0x2F8CA, "M", "𣀊"), - (0x2F8CB, "M", "旣"), - (0x2F8CC, "M", "書"), - (0x2F8CD, "M", "晉"), - (0x2F8CE, "M", "㬙"), - (0x2F8CF, "M", "暑"), - (0x2F8D0, "M", "㬈"), - (0x2F8D1, "M", "㫤"), - (0x2F8D2, "M", "冒"), - (0x2F8D3, "M", "冕"), - (0x2F8D4, "M", "最"), - (0x2F8D5, "M", "暜"), - (0x2F8D6, "M", "肭"), - (0x2F8D7, "M", "䏙"), - (0x2F8D8, "M", "朗"), - (0x2F8D9, "M", "望"), - (0x2F8DA, "M", "朡"), - (0x2F8DB, "M", "杞"), - (0x2F8DC, "M", "杓"), - ] - - -def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F8DD, "M", "𣏃"), - (0x2F8DE, "M", "㭉"), - (0x2F8DF, "M", "柺"), - (0x2F8E0, "M", "枅"), - (0x2F8E1, "M", "桒"), - (0x2F8E2, "M", "梅"), - (0x2F8E3, "M", "𣑭"), - (0x2F8E4, "M", "梎"), - (0x2F8E5, "M", "栟"), - (0x2F8E6, "M", "椔"), - (0x2F8E7, "M", "㮝"), - (0x2F8E8, "M", "楂"), - (0x2F8E9, "M", "榣"), - (0x2F8EA, "M", "槪"), - (0x2F8EB, "M", "檨"), - (0x2F8EC, "M", "𣚣"), - (0x2F8ED, "M", "櫛"), - (0x2F8EE, "M", "㰘"), - (0x2F8EF, "M", "次"), - (0x2F8F0, "M", "𣢧"), - (0x2F8F1, "M", "歔"), - (0x2F8F2, "M", "㱎"), - (0x2F8F3, "M", "歲"), - (0x2F8F4, "M", "殟"), - (0x2F8F5, "M", "殺"), - (0x2F8F6, "M", "殻"), - (0x2F8F7, "M", "𣪍"), - (0x2F8F8, "M", "𡴋"), - (0x2F8F9, "M", "𣫺"), - (0x2F8FA, "M", "汎"), - (0x2F8FB, "M", "𣲼"), - (0x2F8FC, "M", "沿"), - (0x2F8FD, "M", "泍"), - (0x2F8FE, "M", "汧"), - (0x2F8FF, "M", "洖"), - (0x2F900, "M", "派"), - (0x2F901, "M", "海"), - (0x2F902, "M", "流"), - (0x2F903, "M", "浩"), - (0x2F904, "M", "浸"), - (0x2F905, "M", "涅"), - (0x2F906, "M", "𣴞"), - (0x2F907, "M", "洴"), - (0x2F908, "M", "港"), - (0x2F909, "M", "湮"), - (0x2F90A, "M", "㴳"), - (0x2F90B, "M", "滋"), - (0x2F90C, "M", "滇"), - (0x2F90D, "M", "𣻑"), - (0x2F90E, "M", "淹"), - (0x2F90F, "M", "潮"), - (0x2F910, "M", "𣽞"), - (0x2F911, "M", "𣾎"), - (0x2F912, "M", "濆"), - (0x2F913, "M", "瀹"), - (0x2F914, "M", "瀞"), - (0x2F915, "M", "瀛"), - (0x2F916, "M", "㶖"), - (0x2F917, "M", "灊"), - (0x2F918, "M", "災"), - (0x2F919, "M", "灷"), - (0x2F91A, "M", "炭"), - (0x2F91B, "M", "𠔥"), - (0x2F91C, "M", "煅"), - (0x2F91D, "M", "𤉣"), - (0x2F91E, "M", "熜"), - (0x2F91F, "M", "𤎫"), - (0x2F920, "M", "爨"), - (0x2F921, "M", "爵"), - (0x2F922, "M", "牐"), - (0x2F923, "M", "𤘈"), - (0x2F924, "M", "犀"), - (0x2F925, "M", "犕"), - (0x2F926, "M", "𤜵"), - (0x2F927, "M", "𤠔"), - (0x2F928, "M", "獺"), - (0x2F929, "M", "王"), - (0x2F92A, "M", "㺬"), - (0x2F92B, "M", "玥"), - (0x2F92C, "M", "㺸"), - (0x2F92E, "M", "瑇"), - (0x2F92F, "M", "瑜"), - (0x2F930, "M", "瑱"), - (0x2F931, "M", "璅"), - (0x2F932, "M", "瓊"), - (0x2F933, "M", "㼛"), - (0x2F934, "M", "甤"), - (0x2F935, "M", "𤰶"), - (0x2F936, "M", "甾"), - (0x2F937, "M", "𤲒"), - (0x2F938, "M", "異"), - (0x2F939, "M", "𢆟"), - (0x2F93A, "M", "瘐"), - (0x2F93B, "M", "𤾡"), - (0x2F93C, "M", "𤾸"), - (0x2F93D, "M", "𥁄"), - (0x2F93E, "M", "㿼"), - (0x2F93F, "M", "䀈"), - (0x2F940, "M", "直"), - (0x2F941, "M", "𥃳"), - ] - - -def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F942, "M", "𥃲"), - (0x2F943, "M", "𥄙"), - (0x2F944, "M", "𥄳"), - (0x2F945, "M", "眞"), - (0x2F946, "M", "真"), - (0x2F948, "M", "睊"), - (0x2F949, "M", "䀹"), - (0x2F94A, "M", "瞋"), - (0x2F94B, "M", "䁆"), - (0x2F94C, "M", "䂖"), - (0x2F94D, "M", "𥐝"), - (0x2F94E, "M", "硎"), - (0x2F94F, "M", "碌"), - (0x2F950, "M", "磌"), - (0x2F951, "M", "䃣"), - (0x2F952, "M", "𥘦"), - (0x2F953, "M", "祖"), - (0x2F954, "M", "𥚚"), - (0x2F955, "M", "𥛅"), - (0x2F956, "M", "福"), - (0x2F957, "M", "秫"), - (0x2F958, "M", "䄯"), - (0x2F959, "M", "穀"), - (0x2F95A, "M", "穊"), - (0x2F95B, "M", "穏"), - (0x2F95C, "M", "𥥼"), - (0x2F95D, "M", "𥪧"), - (0x2F95F, "M", "竮"), - (0x2F960, "M", "䈂"), - (0x2F961, "M", "𥮫"), - (0x2F962, "M", "篆"), - (0x2F963, "M", "築"), - (0x2F964, "M", "䈧"), - (0x2F965, "M", "𥲀"), - (0x2F966, "M", "糒"), - (0x2F967, "M", "䊠"), - (0x2F968, "M", "糨"), - (0x2F969, "M", "糣"), - (0x2F96A, "M", "紀"), - (0x2F96B, "M", "𥾆"), - (0x2F96C, "M", "絣"), - (0x2F96D, "M", "䌁"), - (0x2F96E, "M", "緇"), - (0x2F96F, "M", "縂"), - (0x2F970, "M", "繅"), - (0x2F971, "M", "䌴"), - (0x2F972, "M", "𦈨"), - (0x2F973, "M", "𦉇"), - (0x2F974, "M", "䍙"), - (0x2F975, "M", "𦋙"), - (0x2F976, "M", "罺"), - (0x2F977, "M", "𦌾"), - (0x2F978, "M", "羕"), - (0x2F979, "M", "翺"), - (0x2F97A, "M", "者"), - (0x2F97B, "M", "𦓚"), - (0x2F97C, "M", "𦔣"), - (0x2F97D, "M", "聠"), - (0x2F97E, "M", "𦖨"), - (0x2F97F, "M", "聰"), - (0x2F980, "M", "𣍟"), - (0x2F981, "M", "䏕"), - (0x2F982, "M", "育"), - (0x2F983, "M", "脃"), - (0x2F984, "M", "䐋"), - (0x2F985, "M", "脾"), - (0x2F986, "M", "媵"), - (0x2F987, "M", "𦞧"), - (0x2F988, "M", "𦞵"), - (0x2F989, "M", "𣎓"), - (0x2F98A, "M", "𣎜"), - (0x2F98B, "M", "舁"), - (0x2F98C, "M", "舄"), - (0x2F98D, "M", "辞"), - (0x2F98E, "M", "䑫"), - (0x2F98F, "M", "芑"), - (0x2F990, "M", "芋"), - (0x2F991, "M", "芝"), - (0x2F992, "M", "劳"), - (0x2F993, "M", "花"), - (0x2F994, "M", "芳"), - (0x2F995, "M", "芽"), - (0x2F996, "M", "苦"), - (0x2F997, "M", "𦬼"), - (0x2F998, "M", "若"), - (0x2F999, "M", "茝"), - (0x2F99A, "M", "荣"), - (0x2F99B, "M", "莭"), - (0x2F99C, "M", "茣"), - (0x2F99D, "M", "莽"), - (0x2F99E, "M", "菧"), - (0x2F99F, "M", "著"), - (0x2F9A0, "M", "荓"), - (0x2F9A1, "M", "菊"), - (0x2F9A2, "M", "菌"), - (0x2F9A3, "M", "菜"), - (0x2F9A4, "M", "𦰶"), - (0x2F9A5, "M", "𦵫"), - (0x2F9A6, "M", "𦳕"), - (0x2F9A7, "M", "䔫"), - ] - - -def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F9A8, "M", "蓱"), - (0x2F9A9, "M", "蓳"), - (0x2F9AA, "M", "蔖"), - (0x2F9AB, "M", "𧏊"), - (0x2F9AC, "M", "蕤"), - (0x2F9AD, "M", "𦼬"), - (0x2F9AE, "M", "䕝"), - (0x2F9AF, "M", "䕡"), - (0x2F9B0, "M", "𦾱"), - (0x2F9B1, "M", "𧃒"), - (0x2F9B2, "M", "䕫"), - (0x2F9B3, "M", "虐"), - (0x2F9B4, "M", "虜"), - (0x2F9B5, "M", "虧"), - (0x2F9B6, "M", "虩"), - (0x2F9B7, "M", "蚩"), - (0x2F9B8, "M", "蚈"), - (0x2F9B9, "M", "蜎"), - (0x2F9BA, "M", "蛢"), - (0x2F9BB, "M", "蝹"), - (0x2F9BC, "M", "蜨"), - (0x2F9BD, "M", "蝫"), - (0x2F9BE, "M", "螆"), - (0x2F9BF, "M", "䗗"), - (0x2F9C0, "M", "蟡"), - (0x2F9C1, "M", "蠁"), - (0x2F9C2, "M", "䗹"), - (0x2F9C3, "M", "衠"), - (0x2F9C4, "M", "衣"), - (0x2F9C5, "M", "𧙧"), - (0x2F9C6, "M", "裗"), - (0x2F9C7, "M", "裞"), - (0x2F9C8, "M", "䘵"), - (0x2F9C9, "M", "裺"), - (0x2F9CA, "M", "㒻"), - (0x2F9CB, "M", "𧢮"), - (0x2F9CC, "M", "𧥦"), - (0x2F9CD, "M", "䚾"), - (0x2F9CE, "M", "䛇"), - (0x2F9CF, "M", "誠"), - (0x2F9D0, "M", "諭"), - (0x2F9D1, "M", "變"), - (0x2F9D2, "M", "豕"), - (0x2F9D3, "M", "𧲨"), - (0x2F9D4, "M", "貫"), - (0x2F9D5, "M", "賁"), - (0x2F9D6, "M", "贛"), - (0x2F9D7, "M", "起"), - (0x2F9D8, "M", "𧼯"), - (0x2F9D9, "M", "𠠄"), - (0x2F9DA, "M", "跋"), - (0x2F9DB, "M", "趼"), - (0x2F9DC, "M", "跰"), - (0x2F9DD, "M", "𠣞"), - (0x2F9DE, "M", "軔"), - (0x2F9DF, "M", "輸"), - (0x2F9E0, "M", "𨗒"), - (0x2F9E1, "M", "𨗭"), - (0x2F9E2, "M", "邔"), - (0x2F9E3, "M", "郱"), - (0x2F9E4, "M", "鄑"), - (0x2F9E5, "M", "𨜮"), - (0x2F9E6, "M", "鄛"), - (0x2F9E7, "M", "鈸"), - (0x2F9E8, "M", "鋗"), - (0x2F9E9, "M", "鋘"), - (0x2F9EA, "M", "鉼"), - (0x2F9EB, "M", "鏹"), - (0x2F9EC, "M", "鐕"), - (0x2F9ED, "M", "𨯺"), - (0x2F9EE, "M", "開"), - (0x2F9EF, "M", "䦕"), - (0x2F9F0, "M", "閷"), - (0x2F9F1, "M", "𨵷"), - (0x2F9F2, "M", "䧦"), - (0x2F9F3, "M", "雃"), - (0x2F9F4, "M", "嶲"), - (0x2F9F5, "M", "霣"), - (0x2F9F6, "M", "𩅅"), - (0x2F9F7, "M", "𩈚"), - (0x2F9F8, "M", "䩮"), - (0x2F9F9, "M", "䩶"), - (0x2F9FA, "M", "韠"), - (0x2F9FB, "M", "𩐊"), - (0x2F9FC, "M", "䪲"), - (0x2F9FD, "M", "𩒖"), - (0x2F9FE, "M", "頋"), - (0x2FA00, "M", "頩"), - (0x2FA01, "M", "𩖶"), - (0x2FA02, "M", "飢"), - (0x2FA03, "M", "䬳"), - (0x2FA04, "M", "餩"), - (0x2FA05, "M", "馧"), - (0x2FA06, "M", "駂"), - (0x2FA07, "M", "駾"), - (0x2FA08, "M", "䯎"), - (0x2FA09, "M", "𩬰"), - (0x2FA0A, "M", "鬒"), - (0x2FA0B, "M", "鱀"), - (0x2FA0C, "M", "鳽"), - ] - - -def _seg_83() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2FA0D, "M", "䳎"), - (0x2FA0E, "M", "䳭"), - (0x2FA0F, "M", "鵧"), - (0x2FA10, "M", "𪃎"), - (0x2FA11, "M", "䳸"), - (0x2FA12, "M", "𪄅"), - (0x2FA13, "M", "𪈎"), - (0x2FA14, "M", "𪊑"), - (0x2FA15, "M", "麻"), - (0x2FA16, "M", "䵖"), - (0x2FA17, "M", "黹"), - (0x2FA18, "M", "黾"), - (0x2FA19, "M", "鼅"), - (0x2FA1A, "M", "鼏"), - (0x2FA1B, "M", "鼖"), - (0x2FA1C, "M", "鼻"), - (0x2FA1D, "M", "𪘀"), - (0x2FA1E, "X"), - (0x30000, "V"), - (0x3134B, "X"), - (0x31350, "V"), - (0x323B0, "X"), - (0xE0100, "I"), - (0xE01F0, "X"), - ] - - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() - + _seg_73() - + _seg_74() - + _seg_75() - + _seg_76() - + _seg_77() - + _seg_78() - + _seg_79() - + _seg_80() - + _seg_81() - + _seg_82() - + _seg_83() -) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER b/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt b/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt deleted file mode 100644 index 7b190ca..0000000 --- a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2011 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/METADATA b/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/METADATA deleted file mode 100644 index ddf5464..0000000 --- a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: itsdangerous -Version: 2.2.0 -Summary: Safely pass data to untrusted environments and back. -Maintainer-email: Pallets -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/itsdangerous/ - -# ItsDangerous - -... so better sign this - -Various helpers to pass data to untrusted environments and to get it -back safe and sound. Data is cryptographically signed to ensure that a -token has not been tampered with. - -It's possible to customize how data is serialized. Data is compressed as -needed. A timestamp can be added and verified automatically while -loading a token. - - -## A Simple Example - -Here's how you could generate a token for transmitting a user's id and -name between web requests. - -```python -from itsdangerous import URLSafeSerializer -auth_s = URLSafeSerializer("secret key", "auth") -token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) - -print(token) -# eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg - -data = auth_s.loads(token) -print(data["name"]) -# itsdangerous -``` - - -## Donate - -The Pallets organization develops and supports ItsDangerous and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -[please donate today][]. - -[please donate today]: https://palletsprojects.com/donate - diff --git a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/RECORD b/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/RECORD deleted file mode 100644 index b73b08a..0000000 --- a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/RECORD +++ /dev/null @@ -1,23 +0,0 @@ -itsdangerous-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -itsdangerous-2.2.0.dist-info/LICENSE.txt,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 -itsdangerous-2.2.0.dist-info/METADATA,sha256=0rk0-1ZwihuU5DnwJVwPWoEI4yWOyCexih3JyZHblhE,1924 -itsdangerous-2.2.0.dist-info/RECORD,, -itsdangerous-2.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous-2.2.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -itsdangerous/__init__.py,sha256=4SK75sCe29xbRgQE1ZQtMHnKUuZYAf3bSpZOrff1IAY,1427 -itsdangerous/__pycache__/__init__.cpython-312.pyc,, -itsdangerous/__pycache__/_json.cpython-312.pyc,, -itsdangerous/__pycache__/encoding.cpython-312.pyc,, -itsdangerous/__pycache__/exc.cpython-312.pyc,, -itsdangerous/__pycache__/serializer.cpython-312.pyc,, -itsdangerous/__pycache__/signer.cpython-312.pyc,, -itsdangerous/__pycache__/timed.cpython-312.pyc,, -itsdangerous/__pycache__/url_safe.cpython-312.pyc,, -itsdangerous/_json.py,sha256=wPQGmge2yZ9328EHKF6gadGeyGYCJQKxtU-iLKE6UnA,473 -itsdangerous/encoding.py,sha256=wwTz5q_3zLcaAdunk6_vSoStwGqYWe307Zl_U87aRFM,1409 -itsdangerous/exc.py,sha256=Rr3exo0MRFEcPZltwecyK16VV1bE2K9_F1-d-ljcUn4,3201 -itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous/serializer.py,sha256=PmdwADLqkSyQLZ0jOKAgDsAW4k_H0TlA71Ei3z0C5aI,15601 -itsdangerous/signer.py,sha256=YO0CV7NBvHA6j549REHJFUjUojw2pHqwcUpQnU7yNYQ,9647 -itsdangerous/timed.py,sha256=6RvDMqNumGMxf0-HlpaZdN9PUQQmRvrQGplKhxuivUs,8083 -itsdangerous/url_safe.py,sha256=az4e5fXi_vs-YbWj8YZwn4wiVKfeD--GEKRT5Ueu4P4,2505 diff --git a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/REQUESTED b/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/WHEEL b/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/venv/Lib/site-packages/itsdangerous-2.2.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/itsdangerous/__init__.py b/venv/Lib/site-packages/itsdangerous/__init__.py deleted file mode 100644 index ea55256..0000000 --- a/venv/Lib/site-packages/itsdangerous/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .encoding import base64_decode as base64_decode -from .encoding import base64_encode as base64_encode -from .encoding import want_bytes as want_bytes -from .exc import BadData as BadData -from .exc import BadHeader as BadHeader -from .exc import BadPayload as BadPayload -from .exc import BadSignature as BadSignature -from .exc import BadTimeSignature as BadTimeSignature -from .exc import SignatureExpired as SignatureExpired -from .serializer import Serializer as Serializer -from .signer import HMACAlgorithm as HMACAlgorithm -from .signer import NoneAlgorithm as NoneAlgorithm -from .signer import Signer as Signer -from .timed import TimedSerializer as TimedSerializer -from .timed import TimestampSigner as TimestampSigner -from .url_safe import URLSafeSerializer as URLSafeSerializer -from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " ItsDangerous 2.3. Use feature detection or" - " 'importlib.metadata.version(\"itsdangerous\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("itsdangerous") - - raise AttributeError(name) diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4186732..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc deleted file mode 100644 index a072c6b..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc deleted file mode 100644 index eaa6596..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc deleted file mode 100644 index b042a13..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc deleted file mode 100644 index 5fc796d..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc deleted file mode 100644 index 5ab6ff4..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc deleted file mode 100644 index eee0036..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc b/venv/Lib/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc deleted file mode 100644 index 1f565b7..0000000 Binary files a/venv/Lib/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/itsdangerous/_json.py b/venv/Lib/site-packages/itsdangerous/_json.py deleted file mode 100644 index fc23fea..0000000 --- a/venv/Lib/site-packages/itsdangerous/_json.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - - -class _CompactJSON: - """Wrapper around json module that strips whitespace.""" - - @staticmethod - def loads(payload: str | bytes) -> t.Any: - return _json.loads(payload) - - @staticmethod - def dumps(obj: t.Any, **kwargs: t.Any) -> str: - kwargs.setdefault("ensure_ascii", False) - kwargs.setdefault("separators", (",", ":")) - return _json.dumps(obj, **kwargs) diff --git a/venv/Lib/site-packages/itsdangerous/encoding.py b/venv/Lib/site-packages/itsdangerous/encoding.py deleted file mode 100644 index f5ca80f..0000000 --- a/venv/Lib/site-packages/itsdangerous/encoding.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -import base64 -import string -import struct -import typing as t - -from .exc import BadData - - -def want_bytes( - s: str | bytes, encoding: str = "utf-8", errors: str = "strict" -) -> bytes: - if isinstance(s, str): - s = s.encode(encoding, errors) - - return s - - -def base64_encode(string: str | bytes) -> bytes: - """Base64 encode a string of bytes or text. The resulting bytes are - safe to use in URLs. - """ - string = want_bytes(string) - return base64.urlsafe_b64encode(string).rstrip(b"=") - - -def base64_decode(string: str | bytes) -> bytes: - """Base64 decode a URL-safe string of bytes or text. The result is - bytes. - """ - string = want_bytes(string, encoding="ascii", errors="ignore") - string += b"=" * (-len(string) % 4) - - try: - return base64.urlsafe_b64decode(string) - except (TypeError, ValueError) as e: - raise BadData("Invalid base64-encoded data") from e - - -# The alphabet used by base64.urlsafe_* -_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") - -_int64_struct = struct.Struct(">Q") -_int_to_bytes = _int64_struct.pack -_bytes_to_int = t.cast("t.Callable[[bytes], tuple[int]]", _int64_struct.unpack) - - -def int_to_bytes(num: int) -> bytes: - return _int_to_bytes(num).lstrip(b"\x00") - - -def bytes_to_int(bytestr: bytes) -> int: - return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/venv/Lib/site-packages/itsdangerous/exc.py b/venv/Lib/site-packages/itsdangerous/exc.py deleted file mode 100644 index a75adcd..0000000 --- a/venv/Lib/site-packages/itsdangerous/exc.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import annotations - -import typing as t -from datetime import datetime - - -class BadData(Exception): - """Raised if bad data of any sort was encountered. This is the base - for all exceptions that ItsDangerous defines. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str): - super().__init__(message) - self.message = message - - def __str__(self) -> str: - return self.message - - -class BadSignature(BadData): - """Raised if a signature does not match.""" - - def __init__(self, message: str, payload: t.Any | None = None): - super().__init__(message) - - #: The payload that failed the signature test. In some - #: situations you might still want to inspect this, even if - #: you know it was tampered with. - #: - #: .. versionadded:: 0.14 - self.payload: t.Any | None = payload - - -class BadTimeSignature(BadSignature): - """Raised if a time-based signature is invalid. This is a subclass - of :class:`BadSignature`. - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - date_signed: datetime | None = None, - ): - super().__init__(message, payload) - - #: If the signature expired this exposes the date of when the - #: signature was created. This can be helpful in order to - #: tell the user how long a link has been gone stale. - #: - #: .. versionchanged:: 2.0 - #: The datetime value is timezone-aware rather than naive. - #: - #: .. versionadded:: 0.14 - self.date_signed = date_signed - - -class SignatureExpired(BadTimeSignature): - """Raised if a signature timestamp is older than ``max_age``. This - is a subclass of :exc:`BadTimeSignature`. - """ - - -class BadHeader(BadSignature): - """Raised if a signed header is invalid in some form. This only - happens for serializers that have a header that goes with the - signature. - - .. versionadded:: 0.24 - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - header: t.Any | None = None, - original_error: Exception | None = None, - ): - super().__init__(message, payload) - - #: If the header is actually available but just malformed it - #: might be stored here. - self.header: t.Any | None = header - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error - - -class BadPayload(BadData): - """Raised if a payload is invalid. This could happen if the payload - is loaded despite an invalid signature, or if there is a mismatch - between the serializer and deserializer. The original exception - that occurred during loading is stored on as :attr:`original_error`. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str, original_error: Exception | None = None): - super().__init__(message) - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error diff --git a/venv/Lib/site-packages/itsdangerous/py.typed b/venv/Lib/site-packages/itsdangerous/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/itsdangerous/serializer.py b/venv/Lib/site-packages/itsdangerous/serializer.py deleted file mode 100644 index 5ddf387..0000000 --- a/venv/Lib/site-packages/itsdangerous/serializer.py +++ /dev/null @@ -1,406 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import json -import typing as t - -from .encoding import want_bytes -from .exc import BadPayload -from .exc import BadSignature -from .signer import _make_keys_list -from .signer import Signer - -if t.TYPE_CHECKING: - import typing_extensions as te - - # This should be either be str or bytes. To avoid having to specify the - # bound type, it falls back to a union if structural matching fails. - _TSerialized = te.TypeVar( - "_TSerialized", bound=t.Union[str, bytes], default=t.Union[str, bytes] - ) -else: - # Still available at runtime on Python < 3.13, but without the default. - _TSerialized = t.TypeVar("_TSerialized", bound=t.Union[str, bytes]) - - -class _PDataSerializer(t.Protocol[_TSerialized]): - def loads(self, payload: _TSerialized, /) -> t.Any: ... - # A signature with additional arguments is not handled correctly by type - # checkers right now, so an overload is used below for serializers that - # don't match this strict protocol. - def dumps(self, obj: t.Any, /) -> _TSerialized: ... - - -# Use TypeIs once it's available in typing_extensions or 3.13. -def is_text_serializer( - serializer: _PDataSerializer[t.Any], -) -> te.TypeGuard[_PDataSerializer[str]]: - """Checks whether a serializer generates text or binary.""" - return isinstance(serializer.dumps({}), str) - - -class Serializer(t.Generic[_TSerialized]): - """A serializer wraps a :class:`~itsdangerous.signer.Signer` to - enable serializing and securely signing data other than bytes. It - can unsign to verify that the data hasn't been changed. - - The serializer provides :meth:`dumps` and :meth:`loads`, similar to - :mod:`json`, and by default uses :mod:`json` internally to serialize - the data to bytes. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param serializer: An object that provides ``dumps`` and ``loads`` - methods for serializing data to a string. Defaults to - :attr:`default_serializer`, which defaults to :mod:`json`. - :param serializer_kwargs: Keyword arguments to pass when calling - ``serializer.dumps``. - :param signer: A ``Signer`` class to instantiate when signing data. - Defaults to :attr:`default_signer`, which defaults to - :class:`~itsdangerous.signer.Signer`. - :param signer_kwargs: Keyword arguments to pass when instantiating - the ``Signer`` class. - :param fallback_signers: List of signer parameters to try when - unsigning with the default signer fails. Each item can be a dict - of ``signer_kwargs``, a ``Signer`` class, or a tuple of - ``(signer, signer_kwargs)``. Defaults to - :attr:`default_fallback_signers`. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 2.0 - Removed the default SHA-512 fallback signer from - ``default_fallback_signers``. - - .. versionchanged:: 1.1 - Added support for ``fallback_signers`` and configured a default - SHA-512 fallback. This fallback is for users who used the yanked - 1.0.0 release which defaulted to SHA-512. - - .. versionchanged:: 0.14 - The ``signer`` and ``signer_kwargs`` parameters were added to - the constructor. - """ - - #: The default serialization module to use to serialize data to a - #: string internally. The default is :mod:`json`, but can be changed - #: to any object that provides ``dumps`` and ``loads`` methods. - default_serializer: _PDataSerializer[t.Any] = json - - #: The default ``Signer`` class to instantiate when signing data. - #: The default is :class:`itsdangerous.signer.Signer`. - default_signer: type[Signer] = Signer - - #: The default fallback signers to try when unsigning fails. - default_fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = [] - - # Serializer[str] if no data serializer is provided, or if it returns str. - @t.overload - def __init__( - self: Serializer[str], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: None | _PDataSerializer[str] = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer positional argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer keyword argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a positional argument. If the strict signature of - # _PDataSerializer doesn't match, fall back to a union, requiring the user - # to specify the type. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a keyword argument. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: t.Any | None = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - - if salt is not None: - salt = want_bytes(salt) - # if salt is None then the signer's default is used - - self.salt = salt - - if serializer is None: - serializer = self.default_serializer - - self.serializer: _PDataSerializer[_TSerialized] = serializer - self.is_text_serializer: bool = is_text_serializer(serializer) - - if signer is None: - signer = self.default_signer - - self.signer: type[Signer] = signer - self.signer_kwargs: dict[str, t.Any] = signer_kwargs or {} - - if fallback_signers is None: - fallback_signers = list(self.default_fallback_signers) - - self.fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = fallback_signers - self.serializer_kwargs: dict[str, t.Any] = serializer_kwargs or {} - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def load_payload( - self, payload: bytes, serializer: _PDataSerializer[t.Any] | None = None - ) -> t.Any: - """Loads the encoded object. This function raises - :class:`.BadPayload` if the payload is not valid. The - ``serializer`` parameter can be used to override the serializer - stored on the class. The encoded ``payload`` should always be - bytes. - """ - if serializer is None: - use_serializer = self.serializer - is_text = self.is_text_serializer - else: - use_serializer = serializer - is_text = is_text_serializer(serializer) - - try: - if is_text: - return use_serializer.loads(payload.decode("utf-8")) # type: ignore[arg-type] - - return use_serializer.loads(payload) # type: ignore[arg-type] - except Exception as e: - raise BadPayload( - "Could not load the payload because an exception" - " occurred on unserializing the data.", - original_error=e, - ) from e - - def dump_payload(self, obj: t.Any) -> bytes: - """Dumps the encoded object. The return value is always bytes. - If the internal serializer returns text, the value will be - encoded as UTF-8. - """ - return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) - - def make_signer(self, salt: str | bytes | None = None) -> Signer: - """Creates a new instance of the signer to be used. The default - implementation uses the :class:`.Signer` base class. - """ - if salt is None: - salt = self.salt - - return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) - - def iter_unsigners(self, salt: str | bytes | None = None) -> cabc.Iterator[Signer]: - """Iterates over all signers to be tried for unsigning. Starts - with the configured signer, then constructs each signer - specified in ``fallback_signers``. - """ - if salt is None: - salt = self.salt - - yield self.make_signer(salt) - - for fallback in self.fallback_signers: - if isinstance(fallback, dict): - kwargs = fallback - fallback = self.signer - elif isinstance(fallback, tuple): - fallback, kwargs = fallback - else: - kwargs = self.signer_kwargs - - for secret_key in self.secret_keys: - yield fallback(secret_key, salt=salt, **kwargs) - - def dumps(self, obj: t.Any, salt: str | bytes | None = None) -> _TSerialized: - """Returns a signed string serialized with the internal - serializer. The return value can be either a byte or unicode - string depending on the format of the internal serializer. - """ - payload = want_bytes(self.dump_payload(obj)) - rv = self.make_signer(salt).sign(payload) - - if self.is_text_serializer: - return rv.decode("utf-8") # type: ignore[return-value] - - return rv # type: ignore[return-value] - - def dump(self, obj: t.Any, f: t.IO[t.Any], salt: str | bytes | None = None) -> None: - """Like :meth:`dumps` but dumps into a file. The file handle has - to be compatible with what the internal serializer expects. - """ - f.write(self.dumps(obj, salt)) - - def loads( - self, s: str | bytes, salt: str | bytes | None = None, **kwargs: t.Any - ) -> t.Any: - """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the - signature validation fails. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - return self.load_payload(signer.unsign(s)) - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def load(self, f: t.IO[t.Any], salt: str | bytes | None = None) -> t.Any: - """Like :meth:`loads` but loads from a file.""" - return self.loads(f.read(), salt) - - def loads_unsafe( - self, s: str | bytes, salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads` but without verifying the signature. This - is potentially very dangerous to use depending on how your - serializer works. The return value is ``(signature_valid, - payload)`` instead of just the payload. The first item will be a - boolean that indicates if the signature is valid. This function - never fails. - - Use it for debugging only and if you know that your serializer - module is not exploitable (for example, do not use it with a - pickle serializer). - - .. versionadded:: 0.15 - """ - return self._loads_unsafe_impl(s, salt) - - def _loads_unsafe_impl( - self, - s: str | bytes, - salt: str | bytes | None, - load_kwargs: dict[str, t.Any] | None = None, - load_payload_kwargs: dict[str, t.Any] | None = None, - ) -> tuple[bool, t.Any]: - """Low level helper function to implement :meth:`loads_unsafe` - in serializer subclasses. - """ - if load_kwargs is None: - load_kwargs = {} - - try: - return True, self.loads(s, salt=salt, **load_kwargs) - except BadSignature as e: - if e.payload is None: - return False, None - - if load_payload_kwargs is None: - load_payload_kwargs = {} - - try: - return ( - False, - self.load_payload(e.payload, **load_payload_kwargs), - ) - except BadPayload: - return False, None - - def load_unsafe( - self, f: t.IO[t.Any], salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads_unsafe` but loads from a file. - - .. versionadded:: 0.15 - """ - return self.loads_unsafe(f.read(), salt=salt) diff --git a/venv/Lib/site-packages/itsdangerous/signer.py b/venv/Lib/site-packages/itsdangerous/signer.py deleted file mode 100644 index e324dc0..0000000 --- a/venv/Lib/site-packages/itsdangerous/signer.py +++ /dev/null @@ -1,266 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import hashlib -import hmac -import typing as t - -from .encoding import _base64_alphabet -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import want_bytes -from .exc import BadSignature - - -class SigningAlgorithm: - """Subclasses must implement :meth:`get_signature` to provide - signature generation functionality. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - """Returns the signature for the given key and value.""" - raise NotImplementedError() - - def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: - """Verifies the given signature matches the expected - signature. - """ - return hmac.compare_digest(sig, self.get_signature(key, value)) - - -class NoneAlgorithm(SigningAlgorithm): - """Provides an algorithm that does not perform any signing and - returns an empty signature. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - return b"" - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class HMACAlgorithm(SigningAlgorithm): - """Provides signature generation using HMACs.""" - - #: The digest method to use with the MAC algorithm. This defaults to - #: SHA1, but can be changed to any other function in the hashlib - #: module. - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - def __init__(self, digest_method: t.Any = None): - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - def get_signature(self, key: bytes, value: bytes) -> bytes: - mac = hmac.new(key, msg=value, digestmod=self.digest_method) - return mac.digest() - - -def _make_keys_list( - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], -) -> list[bytes]: - if isinstance(secret_key, (str, bytes)): - return [want_bytes(secret_key)] - - return [want_bytes(s) for s in secret_key] # pyright: ignore - - -class Signer: - """A signer securely signs bytes, then unsigns them to verify that - the value hasn't been changed. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param sep: Separator between the signature and value. - :param key_derivation: How to derive the signing key from the secret - key and salt. Possible values are ``concat``, ``django-concat``, - or ``hmac``. Defaults to :attr:`default_key_derivation`, which - defaults to ``django-concat``. - :param digest_method: Hash function to use when generating the HMAC - signature. Defaults to :attr:`default_digest_method`, which - defaults to :func:`hashlib.sha1`. Note that the security of the - hash alone doesn't apply when used intermediately in HMAC. - :param algorithm: A :class:`SigningAlgorithm` instance to use - instead of building a default :class:`HMACAlgorithm` with the - ``digest_method``. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 0.18 - ``algorithm`` was added as an argument to the class constructor. - - .. versionchanged:: 0.14 - ``key_derivation`` and ``digest_method`` were added as arguments - to the class constructor. - """ - - #: The default digest method to use for the signer. The default is - #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or - #: compatible object. Note that the security of the hash alone - #: doesn't apply when used intermediately in HMAC. - #: - #: .. versionadded:: 0.14 - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - #: The default scheme to use to derive the signing key from the - #: secret key and salt. The default is ``django-concat``. Possible - #: values are ``concat``, ``django-concat``, and ``hmac``. - #: - #: .. versionadded:: 0.14 - default_key_derivation: str = "django-concat" - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous.Signer", - sep: str | bytes = b".", - key_derivation: str | None = None, - digest_method: t.Any | None = None, - algorithm: SigningAlgorithm | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - self.sep: bytes = want_bytes(sep) - - if self.sep in _base64_alphabet: - raise ValueError( - "The given separator cannot be used because it may be" - " contained in the signature itself. ASCII letters," - " digits, and '-_=' must not be used." - ) - - if salt is not None: - salt = want_bytes(salt) - else: - salt = b"itsdangerous.Signer" - - self.salt = salt - - if key_derivation is None: - key_derivation = self.default_key_derivation - - self.key_derivation: str = key_derivation - - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - if algorithm is None: - algorithm = HMACAlgorithm(self.digest_method) - - self.algorithm: SigningAlgorithm = algorithm - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def derive_key(self, secret_key: str | bytes | None = None) -> bytes: - """This method is called to derive the key. The default key - derivation choices can be overridden here. Key derivation is not - intended to be used as a security method to make a complex key - out of a short password. Instead you should use large random - secret keys. - - :param secret_key: A specific secret key to derive from. - Defaults to the last item in :attr:`secret_keys`. - - .. versionchanged:: 2.0 - Added the ``secret_key`` parameter. - """ - if secret_key is None: - secret_key = self.secret_keys[-1] - else: - secret_key = want_bytes(secret_key) - - if self.key_derivation == "concat": - return t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) - elif self.key_derivation == "django-concat": - return t.cast( - bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() - ) - elif self.key_derivation == "hmac": - mac = hmac.new(secret_key, digestmod=self.digest_method) - mac.update(self.salt) - return mac.digest() - elif self.key_derivation == "none": - return secret_key - else: - raise TypeError("Unknown key derivation method") - - def get_signature(self, value: str | bytes) -> bytes: - """Returns the signature for the given value.""" - value = want_bytes(value) - key = self.derive_key() - sig = self.algorithm.get_signature(key, value) - return base64_encode(sig) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string.""" - value = want_bytes(value) - return value + self.sep + self.get_signature(value) - - def verify_signature(self, value: str | bytes, sig: str | bytes) -> bool: - """Verifies the signature for the given value.""" - try: - sig = base64_decode(sig) - except Exception: - return False - - value = want_bytes(value) - - for secret_key in reversed(self.secret_keys): - key = self.derive_key(secret_key) - - if self.algorithm.verify_signature(key, value, sig): - return True - - return False - - def unsign(self, signed_value: str | bytes) -> bytes: - """Unsigns the given string.""" - signed_value = want_bytes(signed_value) - - if self.sep not in signed_value: - raise BadSignature(f"No {self.sep!r} found in value") - - value, sig = signed_value.rsplit(self.sep, 1) - - if self.verify_signature(value, sig): - return value - - raise BadSignature(f"Signature {sig!r} does not match", payload=value) - - def validate(self, signed_value: str | bytes) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid. - """ - try: - self.unsign(signed_value) - return True - except BadSignature: - return False diff --git a/venv/Lib/site-packages/itsdangerous/timed.py b/venv/Lib/site-packages/itsdangerous/timed.py deleted file mode 100644 index 7384375..0000000 --- a/venv/Lib/site-packages/itsdangerous/timed.py +++ /dev/null @@ -1,228 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import time -import typing as t -from datetime import datetime -from datetime import timezone - -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import bytes_to_int -from .encoding import int_to_bytes -from .encoding import want_bytes -from .exc import BadSignature -from .exc import BadTimeSignature -from .exc import SignatureExpired -from .serializer import _TSerialized -from .serializer import Serializer -from .signer import Signer - - -class TimestampSigner(Signer): - """Works like the regular :class:`.Signer` but also records the time - of the signing and can be used to expire signatures. The - :meth:`unsign` method can raise :exc:`.SignatureExpired` if the - unsigning failed because the signature is expired. - """ - - def get_timestamp(self) -> int: - """Returns the current timestamp. The function must return an - integer. - """ - return int(time.time()) - - def timestamp_to_datetime(self, ts: int) -> datetime: - """Convert the timestamp from :meth:`get_timestamp` into an - aware :class`datetime.datetime` in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - return datetime.fromtimestamp(ts, tz=timezone.utc) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string and also attaches time information.""" - value = want_bytes(value) - timestamp = base64_encode(int_to_bytes(self.get_timestamp())) - sep = want_bytes(self.sep) - value = value + sep + timestamp - return value + sep + self.get_signature(value) - - # Ignore overlapping signatures check, return_timestamp is the only - # parameter that affects the return type. - - @t.overload - def unsign( # type: ignore[overload-overlap] - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[False] = False, - ) -> bytes: ... - - @t.overload - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[True] = True, - ) -> tuple[bytes, datetime]: ... - - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - ) -> tuple[bytes, datetime] | bytes: - """Works like the regular :meth:`.Signer.unsign` but can also - validate the time. See the base docstring of the class for - the general behavior. If ``return_timestamp`` is ``True`` the - timestamp of the signature will be returned as an aware - :class:`datetime.datetime` object in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - try: - result = super().unsign(signed_value) - sig_error = None - except BadSignature as e: - sig_error = e - result = e.payload or b"" - - sep = want_bytes(self.sep) - - # If there is no timestamp in the result there is something - # seriously wrong. In case there was a signature error, we raise - # that one directly, otherwise we have a weird situation in - # which we shouldn't have come except someone uses a time-based - # serializer on non-timestamp data, so catch that. - if sep not in result: - if sig_error: - raise sig_error - - raise BadTimeSignature("timestamp missing", payload=result) - - value, ts_bytes = result.rsplit(sep, 1) - ts_int: int | None = None - ts_dt: datetime | None = None - - try: - ts_int = bytes_to_int(base64_decode(ts_bytes)) - except Exception: - pass - - # Signature is *not* okay. Raise a proper error now that we have - # split the value and the timestamp. - if sig_error is not None: - if ts_int is not None: - try: - ts_dt = self.timestamp_to_datetime(ts_int) - except (ValueError, OSError, OverflowError) as exc: - # Windows raises OSError - # 32-bit raises OverflowError - raise BadTimeSignature( - "Malformed timestamp", payload=value - ) from exc - - raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) - - # Signature was okay but the timestamp is actually not there or - # malformed. Should not happen, but we handle it anyway. - if ts_int is None: - raise BadTimeSignature("Malformed timestamp", payload=value) - - # Check timestamp is not older than max_age - if max_age is not None: - age = self.get_timestamp() - ts_int - - if age > max_age: - raise SignatureExpired( - f"Signature age {age} > {max_age} seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if age < 0: - raise SignatureExpired( - f"Signature age {age} < 0 seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if return_timestamp: - return value, self.timestamp_to_datetime(ts_int) - - return value - - def validate(self, signed_value: str | bytes, max_age: int | None = None) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid.""" - try: - self.unsign(signed_value, max_age=max_age) - return True - except BadSignature: - return False - - -class TimedSerializer(Serializer[_TSerialized]): - """Uses :class:`TimestampSigner` instead of the default - :class:`.Signer`. - """ - - default_signer: type[TimestampSigner] = TimestampSigner - - def iter_unsigners( - self, salt: str | bytes | None = None - ) -> cabc.Iterator[TimestampSigner]: - return t.cast("cabc.Iterator[TimestampSigner]", super().iter_unsigners(salt)) - - # TODO: Signature is incompatible because parameters were added - # before salt. - - def loads( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - salt: str | bytes | None = None, - ) -> t.Any: - """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the - signature validation fails. If a ``max_age`` is provided it will - ensure the signature is not older than that time in seconds. In - case the signature is outdated, :exc:`.SignatureExpired` is - raised. All arguments are forwarded to the signer's - :meth:`~TimestampSigner.unsign` method. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - base64d, timestamp = signer.unsign( - s, max_age=max_age, return_timestamp=True - ) - payload = self.load_payload(base64d) - - if return_timestamp: - return payload, timestamp - - return payload - except SignatureExpired: - # The signature was unsigned successfully but was - # expired. Do not try the next signer. - raise - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def loads_unsafe( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - salt: str | bytes | None = None, - ) -> tuple[bool, t.Any]: - return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/venv/Lib/site-packages/itsdangerous/url_safe.py b/venv/Lib/site-packages/itsdangerous/url_safe.py deleted file mode 100644 index 56a0793..0000000 --- a/venv/Lib/site-packages/itsdangerous/url_safe.py +++ /dev/null @@ -1,83 +0,0 @@ -from __future__ import annotations - -import typing as t -import zlib - -from ._json import _CompactJSON -from .encoding import base64_decode -from .encoding import base64_encode -from .exc import BadPayload -from .serializer import _PDataSerializer -from .serializer import Serializer -from .timed import TimedSerializer - - -class URLSafeSerializerMixin(Serializer[str]): - """Mixed in with a regular serializer it will attempt to zlib - compress the string to make it shorter if necessary. It will also - base64 encode the string so that it can safely be placed in a URL. - """ - - default_serializer: _PDataSerializer[str] = _CompactJSON - - def load_payload( - self, - payload: bytes, - *args: t.Any, - serializer: t.Any | None = None, - **kwargs: t.Any, - ) -> t.Any: - decompress = False - - if payload.startswith(b"."): - payload = payload[1:] - decompress = True - - try: - json = base64_decode(payload) - except Exception as e: - raise BadPayload( - "Could not base64 decode the payload because of an exception", - original_error=e, - ) from e - - if decompress: - try: - json = zlib.decompress(json) - except Exception as e: - raise BadPayload( - "Could not zlib decompress the payload before decoding the payload", - original_error=e, - ) from e - - return super().load_payload(json, *args, **kwargs) - - def dump_payload(self, obj: t.Any) -> bytes: - json = super().dump_payload(obj) - is_compressed = False - compressed = zlib.compress(json) - - if len(compressed) < (len(json) - 1): - json = compressed - is_compressed = True - - base64d = base64_encode(json) - - if is_compressed: - base64d = b"." + base64d - - return base64d - - -class URLSafeSerializer(URLSafeSerializerMixin, Serializer[str]): - """Works like :class:`.Serializer` but dumps and loads into a URL - safe string consisting of the upper and lowercase character of the - alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ - - -class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer[str]): - """Works like :class:`.TimedSerializer` but dumps and loads into a - URL safe string consisting of the upper and lowercase character of - the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ diff --git a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/INSTALLER b/venv/Lib/site-packages/jinja2-3.1.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/METADATA b/venv/Lib/site-packages/jinja2-3.1.6.dist-info/METADATA deleted file mode 100644 index ffef2ff..0000000 --- a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/METADATA +++ /dev/null @@ -1,84 +0,0 @@ -Metadata-Version: 2.4 -Name: Jinja2 -Version: 3.1.6 -Summary: A very fast and expressive template engine. -Maintainer-email: Pallets -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: MarkupSafe>=2.0 -Requires-Dist: Babel>=2.7 ; extra == "i18n" -Project-URL: Changes, https://jinja.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://jinja.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/jinja/ -Provides-Extra: i18n - -# Jinja - -Jinja is a fast, expressive, extensible templating engine. Special -placeholders in the template allow writing code similar to Python -syntax. Then the template is passed data to render the final document. - -It includes: - -- Template inheritance and inclusion. -- Define and import macros within templates. -- HTML templates can use autoescaping to prevent XSS from untrusted - user input. -- A sandboxed environment can safely render untrusted templates. -- AsyncIO support for generating templates and calling async - functions. -- I18N support with Babel. -- Templates are compiled to optimized Python code just-in-time and - cached, or can be compiled ahead-of-time. -- Exceptions point to the correct line in templates to make debugging - easier. -- Extensible filters, tests, functions, and even syntax. - -Jinja's philosophy is that while application logic belongs in Python if -possible, it shouldn't make the template designer's job difficult by -restricting functionality too much. - - -## In A Nutshell - -```jinja -{% extends "base.html" %} -{% block title %}Members{% endblock %} -{% block content %} - -{% endblock %} -``` - -## Donate - -The Pallets organization develops and supports Jinja and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/RECORD b/venv/Lib/site-packages/jinja2-3.1.6.dist-info/RECORD deleted file mode 100644 index b035d60..0000000 --- a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/RECORD +++ /dev/null @@ -1,58 +0,0 @@ -jinja2-3.1.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jinja2-3.1.6.dist-info/METADATA,sha256=aMVUj7Z8QTKhOJjZsx7FDGvqKr3ZFdkh8hQ1XDpkmcg,2871 -jinja2-3.1.6.dist-info/RECORD,, -jinja2-3.1.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2-3.1.6.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82 -jinja2-3.1.6.dist-info/entry_points.txt,sha256=OL85gYU1eD8cuPlikifFngXpeBjaxl6rIJ8KkC_3r-I,58 -jinja2-3.1.6.dist-info/licenses/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -jinja2/__init__.py,sha256=xxepO9i7DHsqkQrgBEduLtfoz2QCuT6_gbL4XSN1hbU,1928 -jinja2/__pycache__/__init__.cpython-312.pyc,, -jinja2/__pycache__/_identifier.cpython-312.pyc,, -jinja2/__pycache__/async_utils.cpython-312.pyc,, -jinja2/__pycache__/bccache.cpython-312.pyc,, -jinja2/__pycache__/compiler.cpython-312.pyc,, -jinja2/__pycache__/constants.cpython-312.pyc,, -jinja2/__pycache__/debug.cpython-312.pyc,, -jinja2/__pycache__/defaults.cpython-312.pyc,, -jinja2/__pycache__/environment.cpython-312.pyc,, -jinja2/__pycache__/exceptions.cpython-312.pyc,, -jinja2/__pycache__/ext.cpython-312.pyc,, -jinja2/__pycache__/filters.cpython-312.pyc,, -jinja2/__pycache__/idtracking.cpython-312.pyc,, -jinja2/__pycache__/lexer.cpython-312.pyc,, -jinja2/__pycache__/loaders.cpython-312.pyc,, -jinja2/__pycache__/meta.cpython-312.pyc,, -jinja2/__pycache__/nativetypes.cpython-312.pyc,, -jinja2/__pycache__/nodes.cpython-312.pyc,, -jinja2/__pycache__/optimizer.cpython-312.pyc,, -jinja2/__pycache__/parser.cpython-312.pyc,, -jinja2/__pycache__/runtime.cpython-312.pyc,, -jinja2/__pycache__/sandbox.cpython-312.pyc,, -jinja2/__pycache__/tests.cpython-312.pyc,, -jinja2/__pycache__/utils.cpython-312.pyc,, -jinja2/__pycache__/visitor.cpython-312.pyc,, -jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 -jinja2/async_utils.py,sha256=vK-PdsuorOMnWSnEkT3iUJRIkTnYgO2T6MnGxDgHI5o,2834 -jinja2/bccache.py,sha256=gh0qs9rulnXo0PhX5jTJy2UHzI8wFnQ63o_vw7nhzRg,14061 -jinja2/compiler.py,sha256=9RpCQl5X88BHllJiPsHPh295Hh0uApvwFJNQuutULeM,74131 -jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 -jinja2/debug.py,sha256=CnHqCDHd-BVGvti_8ZsTolnXNhA3ECsY-6n_2pwU8Hw,6297 -jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 -jinja2/environment.py,sha256=9nhrP7Ch-NbGX00wvyr4yy-uhNHq2OCc60ggGrni_fk,61513 -jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 -jinja2/ext.py,sha256=5PF5eHfh8mXAIxXHHRB2xXbXohi8pE3nHSOxa66uS7E,31875 -jinja2/filters.py,sha256=PQ_Egd9n9jSgtnGQYyF4K5j2nYwhUIulhPnyimkdr-k,55212 -jinja2/idtracking.py,sha256=-ll5lIp73pML3ErUYiIJj7tdmWxcH_IlDv3yA_hiZYo,10555 -jinja2/lexer.py,sha256=LYiYio6br-Tep9nPcupWXsPEtjluw3p1mU-lNBVRUfk,29786 -jinja2/loaders.py,sha256=wIrnxjvcbqh5VwW28NSkfotiDq8qNCxIOSFbGUiSLB4,24055 -jinja2/meta.py,sha256=OTDPkaFvU2Hgvx-6akz7154F8BIWaRmvJcBFvwopHww,4397 -jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210 -jinja2/nodes.py,sha256=m1Duzcr6qhZI8JQ6VyJgUNinjAf5bQzijSmDnMsvUx8,34579 -jinja2/optimizer.py,sha256=rJnCRlQ7pZsEEmMhsQDgC_pKyDHxP5TPS6zVPGsgcu8,1651 -jinja2/parser.py,sha256=lLOFy3sEmHc5IaEHRiH1sQVnId2moUQzhyeJZTtdY30,40383 -jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2/runtime.py,sha256=gDk-GvdriJXqgsGbHgrcKTP0Yp6zPXzhzrIpCFH3jAU,34249 -jinja2/sandbox.py,sha256=Mw2aitlY2I8la7FYhcX2YG9BtUYcLnD0Gh3d29cDWrY,15009 -jinja2/tests.py,sha256=VLsBhVFnWg-PxSBz1MhRnNWgP1ovXk3neO1FLQMeC9Q,5926 -jinja2/utils.py,sha256=rRp3o9e7ZKS4fyrWRbELyLcpuGVTFcnooaOa1qx_FIk,24129 -jinja2/visitor.py,sha256=EcnL1PIwf_4RVCOMxsRNuR8AXHbS1qfAdMOE2ngKJz4,3557 diff --git a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/REQUESTED b/venv/Lib/site-packages/jinja2-3.1.6.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/WHEEL b/venv/Lib/site-packages/jinja2-3.1.6.dist-info/WHEEL deleted file mode 100644 index 23d2d7e..0000000 --- a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.11.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/entry_points.txt b/venv/Lib/site-packages/jinja2-3.1.6.dist-info/entry_points.txt deleted file mode 100644 index abc3eae..0000000 --- a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[babel.extractors] -jinja2=jinja2.ext:babel_extract[i18n] - diff --git a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt b/venv/Lib/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt deleted file mode 100644 index c37cae4..0000000 --- a/venv/Lib/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/jinja2/__init__.py b/venv/Lib/site-packages/jinja2/__init__.py deleted file mode 100644 index 1a423a3..0000000 --- a/venv/Lib/site-packages/jinja2/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" - -from .bccache import BytecodeCache as BytecodeCache -from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache -from .environment import Environment as Environment -from .environment import Template as Template -from .exceptions import TemplateAssertionError as TemplateAssertionError -from .exceptions import TemplateError as TemplateError -from .exceptions import TemplateNotFound as TemplateNotFound -from .exceptions import TemplateRuntimeError as TemplateRuntimeError -from .exceptions import TemplatesNotFound as TemplatesNotFound -from .exceptions import TemplateSyntaxError as TemplateSyntaxError -from .exceptions import UndefinedError as UndefinedError -from .loaders import BaseLoader as BaseLoader -from .loaders import ChoiceLoader as ChoiceLoader -from .loaders import DictLoader as DictLoader -from .loaders import FileSystemLoader as FileSystemLoader -from .loaders import FunctionLoader as FunctionLoader -from .loaders import ModuleLoader as ModuleLoader -from .loaders import PackageLoader as PackageLoader -from .loaders import PrefixLoader as PrefixLoader -from .runtime import ChainableUndefined as ChainableUndefined -from .runtime import DebugUndefined as DebugUndefined -from .runtime import make_logging_undefined as make_logging_undefined -from .runtime import StrictUndefined as StrictUndefined -from .runtime import Undefined as Undefined -from .utils import clear_caches as clear_caches -from .utils import is_undefined as is_undefined -from .utils import pass_context as pass_context -from .utils import pass_environment as pass_environment -from .utils import pass_eval_context as pass_eval_context -from .utils import select_autoescape as select_autoescape - -__version__ = "3.1.6" diff --git a/venv/Lib/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d6cc2c6..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc deleted file mode 100644 index 3c0de03..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc deleted file mode 100644 index c415b2f..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc deleted file mode 100644 index c63ff86..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc deleted file mode 100644 index 0f06507..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/constants.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/constants.cpython-312.pyc deleted file mode 100644 index 708854a..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/constants.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/debug.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/debug.cpython-312.pyc deleted file mode 100644 index 4401413..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/debug.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc deleted file mode 100644 index 4dc8f93..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/environment.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/environment.cpython-312.pyc deleted file mode 100644 index b91187c..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/environment.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 70195a4..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/ext.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/ext.cpython-312.pyc deleted file mode 100644 index 446b97b..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/ext.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/filters.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/filters.cpython-312.pyc deleted file mode 100644 index 2c5ca01..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/filters.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc deleted file mode 100644 index 6a81ece..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc deleted file mode 100644 index ac02d8b..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc deleted file mode 100644 index 4928608..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/meta.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/meta.cpython-312.pyc deleted file mode 100644 index 88080c6..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/meta.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc deleted file mode 100644 index 4ff574d..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc deleted file mode 100644 index bdfc42e..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc deleted file mode 100644 index 6780429..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/parser.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/parser.cpython-312.pyc deleted file mode 100644 index 55c9ab5..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/parser.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc deleted file mode 100644 index 33e7add..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc deleted file mode 100644 index 1d4ee5d..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/tests.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/tests.cpython-312.pyc deleted file mode 100644 index 19efa6a..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/tests.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/utils.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index e0d7936..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc b/venv/Lib/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc deleted file mode 100644 index bb29a50..0000000 Binary files a/venv/Lib/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/jinja2/_identifier.py b/venv/Lib/site-packages/jinja2/_identifier.py deleted file mode 100644 index 928c150..0000000 --- a/venv/Lib/site-packages/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 -) diff --git a/venv/Lib/site-packages/jinja2/async_utils.py b/venv/Lib/site-packages/jinja2/async_utils.py deleted file mode 100644 index f0c1402..0000000 --- a/venv/Lib/site-packages/jinja2/async_utils.py +++ /dev/null @@ -1,99 +0,0 @@ -import inspect -import typing as t -from functools import WRAPPER_ASSIGNMENTS -from functools import wraps - -from .utils import _PassArg -from .utils import pass_eval_context - -if t.TYPE_CHECKING: - import typing_extensions as te - -V = t.TypeVar("V") - - -def async_variant(normal_func): # type: ignore - def decorator(async_func): # type: ignore - pass_arg = _PassArg.from_obj(normal_func) - need_eval_context = pass_arg is None - - if pass_arg is _PassArg.environment: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].is_async) - - else: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].environment.is_async) - - # Take the doc and annotations from the sync function, but the - # name from the async function. Pallets-Sphinx-Themes - # build_function_directive expects __wrapped__ to point to the - # sync function. - async_func_attrs = ("__module__", "__name__", "__qualname__") - normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) - - @wraps(normal_func, assigned=normal_func_attrs) - @wraps(async_func, assigned=async_func_attrs, updated=()) - def wrapper(*args, **kwargs): # type: ignore - b = is_async(args) - - if need_eval_context: - args = args[1:] - - if b: - return async_func(*args, **kwargs) - - return normal_func(*args, **kwargs) - - if need_eval_context: - wrapper = pass_eval_context(wrapper) - - wrapper.jinja_async_variant = True # type: ignore[attr-defined] - return wrapper - - return decorator - - -_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} - - -async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": - # Avoid a costly call to isawaitable - if type(value) in _common_primitives: - return t.cast("V", value) - - if inspect.isawaitable(value): - return await t.cast("t.Awaitable[V]", value) - - return value - - -class _IteratorToAsyncIterator(t.Generic[V]): - def __init__(self, iterator: "t.Iterator[V]"): - self._iterator = iterator - - def __aiter__(self) -> "te.Self": - return self - - async def __anext__(self) -> V: - try: - return next(self._iterator) - except StopIteration as e: - raise StopAsyncIteration(e.value) from e - - -def auto_aiter( - iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> "t.AsyncIterator[V]": - if hasattr(iterable, "__aiter__"): - return iterable.__aiter__() - else: - return _IteratorToAsyncIterator(iter(iterable)) - - -async def auto_to_list( - value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> t.List["V"]: - return [x async for x in auto_aiter(value)] diff --git a/venv/Lib/site-packages/jinja2/bccache.py b/venv/Lib/site-packages/jinja2/bccache.py deleted file mode 100644 index ada8b09..0000000 --- a/venv/Lib/site-packages/jinja2/bccache.py +++ /dev/null @@ -1,408 +0,0 @@ -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" - -import errno -import fnmatch -import marshal -import os -import pickle -import stat -import sys -import tempfile -import typing as t -from hashlib import sha1 -from io import BytesIO -from types import CodeType - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - - class _MemcachedClient(te.Protocol): - def get(self, key: str) -> bytes: ... - - def set( - self, key: str, value: bytes, timeout: t.Optional[int] = None - ) -> None: ... - - -bc_version = 5 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( - b"j2" - + pickle.dumps(bc_version, 2) - + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket: - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment: "Environment", key: str, checksum: str) -> None: - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self) -> None: - """Resets the bucket (unloads the bytecode).""" - self.code: t.Optional[CodeType] = None - - def load_bytecode(self, f: t.BinaryIO) -> None: - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal.load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f: t.IO[bytes]) -> None: - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError("can't write empty bucket") - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal.dump(self.code, f) - - def bytecode_from_string(self, string: bytes) -> None: - """Load bytecode from bytes.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self) -> bytes: - """Return the bytecode as bytes.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache: - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja. - """ - - def load_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self) -> None: - """Clears the cache. This method is not used by Jinja but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key( - self, name: str, filename: t.Optional[t.Union[str]] = None - ) -> str: - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode("utf-8")) - - if filename is not None: - hash.update(f"|{filename}".encode()) - - return hash.hexdigest() - - def get_source_checksum(self, source: str) -> str: - """Returns a checksum for the source.""" - return sha1(source.encode("utf-8")).hexdigest() - - def get_bucket( - self, - environment: "Environment", - name: str, - filename: t.Optional[str], - source: str, - ) -> Bucket: - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket: Bucket) -> None: - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__( - self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" - ) -> None: - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self) -> str: - def _unsafe_dir() -> "te.NoReturn": - raise RuntimeError( - "Cannot determine safe temp directory. You " - "need to explicitly provide one." - ) - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == "nt": - return tmpdir - if not hasattr(os, "getuid"): - _unsafe_dir() - - dirname = f"_jinja2-cache-{os.getuid()}" - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket: Bucket) -> str: - return os.path.join(self.directory, self.pattern % (bucket.key,)) - - def load_bytecode(self, bucket: Bucket) -> None: - filename = self._get_cache_filename(bucket) - - # Don't test for existence before opening the file, since the - # file could disappear after the test before the open. - try: - f = open(filename, "rb") - except (FileNotFoundError, IsADirectoryError, PermissionError): - # PermissionError can occur on Windows when an operation is - # in progress, such as calling clear(). - return - - with f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket: Bucket) -> None: - # Write to a temporary file, then rename to the real name after - # writing. This avoids another process reading the file before - # it is fully written. - name = self._get_cache_filename(bucket) - f = tempfile.NamedTemporaryFile( - mode="wb", - dir=os.path.dirname(name), - prefix=os.path.basename(name), - suffix=".tmp", - delete=False, - ) - - def remove_silent() -> None: - try: - os.remove(f.name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - pass - - try: - with f: - bucket.write_bytecode(f) - except BaseException: - remove_silent() - raise - - try: - os.replace(f.name, name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - remove_silent() - except BaseException: - remove_silent() - raise - - def clear(self) -> None: - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - - files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) - for filename in files: - try: - remove(os.path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `cachelib `_ - - `python-memcached `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only text. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__( - self, - client: "_MemcachedClient", - prefix: str = "jinja2/bytecode/", - timeout: t.Optional[int] = None, - ignore_memcache_errors: bool = True, - ): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket: Bucket) -> None: - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - else: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket: Bucket) -> None: - key = self.prefix + bucket.key - value = bucket.bytecode_to_string() - - try: - if self.timeout is not None: - self.client.set(key, value, self.timeout) - else: - self.client.set(key, value) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/venv/Lib/site-packages/jinja2/compiler.py b/venv/Lib/site-packages/jinja2/compiler.py deleted file mode 100644 index a4ff6a1..0000000 --- a/venv/Lib/site-packages/jinja2/compiler.py +++ /dev/null @@ -1,1998 +0,0 @@ -"""Compiles nodes from the parser into Python code.""" - -import typing as t -from contextlib import contextmanager -from functools import update_wrapper -from io import StringIO -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import _PassArg -from .utils import concat -from .visitor import NodeVisitor - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -operators = { - "eq": "==", - "ne": "!=", - "gt": ">", - "gteq": ">=", - "lt": "<", - "lteq": "<=", - "in": "in", - "notin": "not in", -} - - -def optimizeconst(f: F) -> F: - def new_func( - self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any - ) -> t.Any: - # Only optimize if the frame is not volatile - if self.optimizer is not None and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - - if new_node != node: - return self.visit(new_node, frame) - - return f(self, node, frame, **kwargs) - - return update_wrapper(new_func, f) # type: ignore[return-value] - - -def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_binops # type: ignore - ): - self.write(f"environment.call_binop(context, {op!r}, ") - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(f" {op} ") - self.visit(node.right, frame) - - self.write(")") - - return visitor - - -def _make_unop( - op: str, -) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_unops # type: ignore - ): - self.write(f"environment.call_unop(context, {op!r}, ") - self.visit(node.node, frame) - else: - self.write("(" + op) - self.visit(node.node, frame) - - self.write(")") - - return visitor - - -def generate( - node: nodes.Template, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, -) -> t.Optional[str]: - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError("Can't compile non template nodes") - - generator = environment.code_generator_class( - environment, name, filename, stream, defer_init, optimized - ) - generator.visit(node) - - if stream is None: - return generator.stream.getvalue() # type: ignore - - return None - - -def has_safe_repr(value: t.Any) -> bool: - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - - if type(value) in {bool, int, float, complex, range, str, Markup}: - return True - - if type(value) in {tuple, list, set, frozenset}: - return all(has_safe_repr(v) for v in value) - - if type(value) is dict: # noqa E721 - return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) - - return False - - -def find_undeclared( - nodes: t.Iterable[nodes.Node], names: t.Iterable[str] -) -> t.Set[str]: - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef: - def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame: - """Holds compile time information for us.""" - - def __init__( - self, - eval_ctx: EvalContext, - parent: t.Optional["Frame"] = None, - level: t.Optional[int] = None, - ) -> None: - self.eval_ctx = eval_ctx - - # the parent of this frame - self.parent = parent - - if parent is None: - self.symbols = Symbols(level=level) - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = False - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer: t.Optional[str] = None - - # the name of the block we're in, otherwise None. - self.block: t.Optional[str] = None - - else: - self.symbols = Symbols(parent.symbols, level=level) - self.require_output_check = parent.require_output_check - self.buffer = parent.buffer - self.block = parent.block - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # variables set inside of loops and blocks should not affect outer frames, - # but they still needs to be kept track of as part of the active context. - self.loop_frame = False - self.block_frame = False - - # track whether the frame is being used in an if-statement or conditional - # expression as it determines which errors should be raised during runtime - # or compile time. - self.soft_frame = False - - def copy(self) -> "te.Self": - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated: bool = False) -> "Frame": - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self) -> "te.Self": - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements and conditional - expressions. - """ - rv = self.copy() - rv.rootlevel = False - rv.soft_frame = True - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self) -> None: - self.filters: t.Set[str] = set() - self.tests: t.Set[str] = set() - - def visit_Filter(self, node: nodes.Filter) -> None: - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node: nodes.Test) -> None: - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names: t.Iterable[str]) -> None: - self.names = set(names) - self.undeclared: t.Set[str] = set() - - def visit_Name(self, node: nodes.Name) -> None: - if node.ctx == "load" and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - def __init__( - self, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, - ) -> None: - if stream is None: - stream = StringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimizer: t.Optional[Optimizer] = None - - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases: t.Dict[str, str] = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks: t.Dict[str, nodes.Block] = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests: t.Dict[str, str] = {} - self.filters: t.Dict[str, str] = {} - - # the debug information - self.debug_info: t.List[t.Tuple[int, int]] = [] - self._write_debug_info: t.Optional[int] = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack: t.List[t.Set[str]] = [] - - # Tracks parameter definition blocks - self._param_def_block: t.List[t.Set[str]] = [] - - # Tracks the current context. - self._context_reference_stack = ["context"] - - @property - def optimized(self) -> bool: - return self.optimizer is not None - - # -- Various compilation helpers - - def fail(self, msg: str, lineno: int) -> "te.NoReturn": - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self) -> str: - """Get a new unique identifier.""" - self._last_identifier += 1 - return f"t_{self._last_identifier}" - - def buffer(self, frame: Frame) -> None: - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline(f"{frame.buffer} = []") - - def return_buffer_contents( - self, frame: Frame, force_unescaped: bool = False - ) -> None: - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline("if context.eval_ctx.autoescape:") - self.indent() - self.writeline(f"return Markup(concat({frame.buffer}))") - self.outdent() - self.writeline("else:") - self.indent() - self.writeline(f"return concat({frame.buffer})") - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline(f"return Markup(concat({frame.buffer}))") - return - self.writeline(f"return concat({frame.buffer})") - - def indent(self) -> None: - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step: int = 1) -> None: - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline("yield ", node) - else: - self.writeline(f"{frame.buffer}.append(", node) - - def end_write(self, frame: Frame) -> None: - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(")") - - def simple_write( - self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None - ) -> None: - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline("pass") - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x: str) -> None: - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write("\n" * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(" " * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline( - self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 - ) -> None: - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature( - self, - node: t.Union[nodes.Call, nodes.Filter, nodes.Test], - frame: Frame, - extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> None: - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occur. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = any( - is_python_keyword(t.cast(str, k)) - for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) - ) - - for arg in node.args: - self.write(", ") - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(", ") - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f", {key}={value}") - if node.dyn_args: - self.write(", *") - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(", **dict({") - else: - self.write(", **{") - for kwarg in node.kwargs: - self.write(f"{kwarg.key!r}: ") - self.visit(kwarg.value, frame) - self.write(", ") - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f"{key!r}: {value}, ") - if node.dyn_kwargs is not None: - self.write("}, **") - self.visit(node.dyn_kwargs, frame) - self.write(")") - else: - self.write("}") - - elif node.dyn_kwargs is not None: - self.write(", **") - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: - """Find all filter and test names used in the template and - assign them to variables in the compiled namespace. Checking - that the names are registered with the environment is done when - compiling the Filter and Test nodes. If the node is in an If or - CondExpr node, the check is done at runtime instead. - - .. versionchanged:: 3.0 - Filters and tests in If and CondExpr nodes are checked at - runtime instead of compile time. - """ - visitor = DependencyFinderVisitor() - - for node in nodes: - visitor.visit(node) - - for id_map, names, dependency in ( - (self.filters, visitor.filters, "filters"), - ( - self.tests, - visitor.tests, - "tests", - ), - ): - for name in sorted(names): - if name not in id_map: - id_map[name] = self.temporary_identifier() - - # add check during runtime that dependencies used inside of executed - # blocks are defined, as this step may be skipped during compile time - self.writeline("try:") - self.indent() - self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") - self.outdent() - self.writeline("except KeyError:") - self.indent() - self.writeline("@internalcode") - self.writeline(f"def {id_map[name]}(*unused):") - self.indent() - self.writeline( - f'raise TemplateRuntimeError("No {dependency[:-1]}' - f' named {name!r} found.")' - ) - self.outdent() - self.outdent() - - def enter_frame(self, frame: Frame) -> None: - undefs = [] - for target, (action, param) in frame.symbols.loads.items(): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") - elif action == VAR_LOAD_ALIAS: - self.writeline(f"{target} = {param}") - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError("unknown load instruction") - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: - if not with_python_scope: - undefs = [] - for target in frame.symbols.loads: - undefs.append(target) - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: - return async_value if self.environment.is_async else sync_value - - def func(self, name: str) -> str: - return f"{self.choose_async()}def {name}" - - def macro_body( - self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame - ) -> t.Tuple[Frame, MacroRef]: - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - - for idx, arg in enumerate(node.args): - if arg.name == "caller": - explicit_caller = idx - if arg.name in ("kwargs", "varargs"): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - - if "caller" in undeclared: - # In older Jinja versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail( - "When defining macros or call blocks the " - 'special "caller" argument must be omitted ' - "or be given a default.", - node.lineno, - ) - else: - args.append(frame.symbols.declare_parameter("caller")) - macro_ref.accesses_caller = True - if "kwargs" in undeclared and "kwargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("kwargs")) - macro_ref.accesses_kwargs = True - if "varargs" in undeclared and "varargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("varargs")) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline(f"if {ref} is missing:") - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline( - f'{ref} = undefined("parameter {arg.name!r} was not provided",' - f" name={arg.name!r})" - ) - else: - self.writeline(f"{ref} = ") - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, "name", None) - if len(macro_ref.node.args) == 1: - arg_tuple += "," - self.write( - f"Macro(environment, macro, {name!r}, ({arg_tuple})," - f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," - f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" - ) - - def position(self, node: nodes.Node) -> str: - """Return a human readable position for the node.""" - rv = f"line {node.lineno}" - if self.name is not None: - rv = f"{rv} in {self.name!r}" - return rv - - def dump_local_context(self, frame: Frame) -> str: - items_kv = ", ".join( - f"{name!r}: {target}" - for name, target in frame.symbols.dump_stores().items() - ) - return f"{{{items_kv}}}" - - def write_commons(self) -> None: - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline("resolve = context.resolve_or_missing") - self.writeline("undefined = environment.undefined") - self.writeline("concat = environment.concat") - # always use the standard Undefined class for the implicit else of - # conditional expressions - self.writeline("cond_expr_undefined = Undefined") - self.writeline("if 0: yield None") - - def push_parameter_definitions(self, frame: Frame) -> None: - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self) -> None: - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target: str) -> None: - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target: str) -> None: - self._context_reference_stack.append(target) - - def pop_context_reference(self) -> None: - self._context_reference_stack.pop() - - def get_context_ref(self) -> str: - return self._context_reference_stack[-1] - - def get_resolve_func(self) -> str: - target = self._context_reference_stack[-1] - if target == "context": - return "resolve" - return f"{target}.resolve" - - def derive_context(self, frame: Frame) -> str: - return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - - def parameter_is_undeclared(self, target: str) -> bool: - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self) -> None: - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame: Frame) -> None: - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if ( - not frame.block_frame - and not frame.loop_frame - and not frame.toplevel - or not vars - ): - return - public_names = [x for x in vars if x[:1] != "_"] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - if frame.loop_frame: - self.writeline(f"_loop_vars[{name!r}] = {ref}") - return - if frame.block_frame: - self.writeline(f"_block_vars[{name!r}] = {ref}") - return - self.writeline(f"context.vars[{name!r}] = {ref}") - else: - if frame.loop_frame: - self.writeline("_loop_vars.update({") - elif frame.block_frame: - self.writeline("_block_vars.update({") - else: - self.writeline("context.vars.update({") - for idx, name in enumerate(sorted(vars)): - if idx: - self.write(", ") - ref = frame.symbols.ref(name) - self.write(f"{name!r}: {ref}") - self.write("})") - if not frame.block_frame and not frame.loop_frame and public_names: - if len(public_names) == 1: - self.writeline(f"context.exported_vars.add({public_names[0]!r})") - else: - names_str = ", ".join(map(repr, sorted(public_names))) - self.writeline(f"context.exported_vars.update(({names_str}))") - - # -- Statement Visitors - - def visit_Template( - self, node: nodes.Template, frame: t.Optional[Frame] = None - ) -> None: - assert frame is None, "no root frame allowed" - eval_ctx = EvalContext(self.environment, self.name) - - from .runtime import async_exported - from .runtime import exported - - if self.environment.is_async: - exported_names = sorted(exported + async_exported) - else: - exported_names = sorted(exported) - - self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = "" if self.defer_init else ", environment=environment" - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail(f"block {block.name!r} defined twice", block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if "." in imp: - module, obj = imp.rsplit(".", 1) - self.writeline(f"from {module} import {obj} as {alias}") - else: - self.writeline(f"import {imp} as {alias}") - - # add the load name - self.writeline(f"name = {self.name!r}") - - # generate the root render function. - self.writeline( - f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 - ) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if "self" in find_undeclared(node.body, ("self",)): - ref = frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline("parent_template = None") - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline("if parent_template is not None:") - self.indent() - if not self.environment.is_async: - self.writeline("yield from parent_template.root_render_func(context)") - else: - self.writeline("agen = parent_template.root_render_func(context)") - self.writeline("try:") - self.indent() - self.writeline("async for event in agen:") - self.indent() - self.writeline("yield event") - self.outdent() - self.outdent() - self.writeline("finally: await agen.aclose()") - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in self.blocks.items(): - self.writeline( - f"{self.func('block_' + name)}(context, missing=missing{envenv}):", - block, - 1, - ) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - block_frame.block_frame = True - undeclared = find_undeclared(block.body, ("self", "super")) - if "self" in undeclared: - ref = block_frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - if "super" in undeclared: - ref = block_frame.symbols.declare_parameter("super") - self.writeline(f"{ref} = context.super({name!r}, block_{name})") - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.writeline("_block_vars = {}") - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) - self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) - debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) - self.writeline(f"debug_info = {debug_kv_str!r}") - - def visit_Block(self, node: nodes.Block, frame: Frame) -> None: - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline("if parent_template is None:") - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if node.required: - self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) - self.indent() - self.writeline( - f'raise TemplateRuntimeError("Required block {node.name!r} not found")', - node, - ) - self.outdent() - - if not self.environment.is_async and frame.buffer is None: - self.writeline( - f"yield from context.blocks[{node.name!r}][0]({context})", node - ) - else: - self.writeline(f"gen = context.blocks[{node.name!r}][0]({context})") - self.writeline("try:") - self.indent() - self.writeline( - f"{self.choose_async()}for event in gen:", - node, - ) - self.indent() - self.simple_write("event", frame) - self.outdent() - self.outdent() - self.writeline( - f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" - ) - - self.outdent(level) - - def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: - """Calls the extender.""" - if not frame.toplevel: - self.fail("cannot use extend from a non top-level scope", node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline("if parent_template is not None:") - self.indent() - self.writeline('raise TemplateRuntimeError("extended multiple times")') - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline("parent_template = environment.get_template(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - self.writeline("for name, parent_block in parent_template.blocks.items():") - self.indent() - self.writeline("context.blocks.setdefault(name, []).append(parent_block)") - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node: nodes.Include, frame: Frame) -> None: - """Handles includes.""" - if node.ignore_missing: - self.writeline("try:") - self.indent() - - func_name = "get_or_select_template" - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, str): - func_name = "get_template" - elif isinstance(node.template.value, (tuple, list)): - func_name = "select_template" - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = "select_template" - - self.writeline(f"template = environment.{func_name}(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - if node.ignore_missing: - self.outdent() - self.writeline("except TemplateNotFound:") - self.indent() - self.writeline("pass") - self.outdent() - self.writeline("else:") - self.indent() - - def loop_body() -> None: - self.indent() - self.simple_write("event", frame) - self.outdent() - - if node.with_context: - self.writeline( - f"gen = template.root_render_func(" - "template.new_context(context.get_all(), True," - f" {self.dump_local_context(frame)}))" - ) - self.writeline("try:") - self.indent() - self.writeline(f"{self.choose_async()}for event in gen:") - loop_body() - self.outdent() - self.writeline( - f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" - ) - elif self.environment.is_async: - self.writeline( - "for event in (await template._get_default_module_async())" - "._body_stream:" - ) - loop_body() - else: - self.writeline("yield from template._get_default_module()._body_stream") - - if node.ignore_missing: - self.outdent() - - def _import_common( - self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame - ) -> None: - self.write(f"{self.choose_async('await ')}environment.get_template(") - self.visit(node.template, frame) - self.write(f", {self.name!r}).") - - if node.with_context: - f_name = f"make_module{self.choose_async('_async')}" - self.write( - f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" - ) - else: - self.write(f"_get_default_module{self.choose_async('_async')}(context)") - - def visit_Import(self, node: nodes.Import, frame: Frame) -> None: - """Visit regular imports.""" - self.writeline(f"{frame.symbols.ref(node.target)} = ", node) - if frame.toplevel: - self.write(f"context.vars[{node.target!r}] = ") - - self._import_common(node, frame) - - if frame.toplevel and not node.target.startswith("_"): - self.writeline(f"context.exported_vars.discard({node.target!r})") - - def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: - """Visit named imports.""" - self.newline(node) - self.write("included_template = ") - self._import_common(node, frame) - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline( - f"{frame.symbols.ref(alias)} =" - f" getattr(included_template, {name!r}, missing)" - ) - self.writeline(f"if {frame.symbols.ref(alias)} is missing:") - self.indent() - # The position will contain the template name, and will be formatted - # into a string that will be compiled into an f-string. Curly braces - # in the name must be replaced with escapes so that they will not be - # executed as part of the f-string. - position = self.position(node).replace("{", "{{").replace("}", "}}") - message = ( - "the template {included_template.__name__!r}" - f" (imported on {position})" - f" does not export the requested name {name!r}" - ) - self.writeline( - f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" - ) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith("_"): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") - else: - names_kv = ", ".join( - f"{name!r}: {frame.symbols.ref(name)}" for name in var_names - ) - self.writeline(f"context.vars.update({{{names_kv}}})") - if discarded_names: - if len(discarded_names) == 1: - self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") - else: - names_str = ", ".join(map(repr, discarded_names)) - self.writeline( - f"context.exported_vars.difference_update(({names_str}))" - ) - - def visit_For(self, node: nodes.For, frame: Frame) -> None: - loop_frame = frame.inner() - loop_frame.loop_frame = True - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body if the body is a scoped block. - extended_loop = ( - node.recursive - or "loop" - in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) - or any(block.scoped for block in node.find_all(nodes.Block)) - ) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter("loop") - - loop_frame.symbols.analyze_node(node, for_branch="body") - if node.else_: - else_frame.symbols.analyze_node(node, for_branch="else") - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.choose_async("async for ", "for ")) - self.visit(node.target, loop_frame) - self.write(" in ") - self.write(self.choose_async("auto_aiter(fiter)", "fiter")) - self.write(":") - self.indent() - self.writeline("if ", node.test) - self.visit(node.test, test_frame) - self.write(":") - self.indent() - self.writeline("yield ") - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline( - f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node - ) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline(f"{loop_ref} = missing") - - for name in node.find_all(nodes.Name): - if name.ctx == "store" and name.name == "loop": - self.fail( - "Can't assign to special loop variable in for-loop target", - name.lineno, - ) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline(f"{iteration_indicator} = 1") - - self.writeline(self.choose_async("async for ", "for "), node) - self.visit(node.target, loop_frame) - if extended_loop: - self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") - else: - self.write(" in ") - - if node.test: - self.write(f"{loop_filter_func}(") - if node.recursive: - self.write("reciter") - else: - if self.environment.is_async and not extended_loop: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(")") - if node.test: - self.write(")") - - if node.recursive: - self.write(", undefined, loop_render_func, depth):") - else: - self.write(", undefined):" if extended_loop else ":") - - self.indent() - self.enter_frame(loop_frame) - - self.writeline("_loop_vars = {}") - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline(f"{iteration_indicator} = 0") - self.outdent() - self.leave_frame( - loop_frame, with_python_scope=node.recursive and not node.else_ - ) - - if node.else_: - self.writeline(f"if {iteration_indicator}:") - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - self.write(f"{self.choose_async('await ')}loop(") - if self.environment.is_async: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(")") - self.write(", loop)") - self.end_write(frame) - - # at the end of the iteration, clear any assignments made in the - # loop from the top level - if self._assign_stack: - self._assign_stack[-1].difference_update(loop_frame.symbols.stores) - - def visit_If(self, node: nodes.If, frame: Frame) -> None: - if_frame = frame.soft() - self.writeline("if ", node) - self.visit(node.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline("elif ", elif_) - self.visit(elif_.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline("else:") - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith("_"): - self.write(f"context.exported_vars.add({node.name!r})") - self.writeline(f"context.vars[{node.name!r}] = ") - self.write(f"{frame.symbols.ref(node.name)} = ") - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline("caller = ") - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node: nodes.With, frame: Frame) -> None: - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for target, expr in zip(node.targets, node.values): - self.newline() - self.visit(target, with_frame) - self.write(" = ") - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: - self.newline(node) - self.visit(node.node, frame) - - class _FinalizeInfo(t.NamedTuple): - const: t.Optional[t.Callable[..., str]] - src: t.Optional[str] - - @staticmethod - def _default_finalize(value: t.Any) -> t.Any: - """The default finalize function if the environment isn't - configured with one. Or, if the environment has one, this is - called on that function's output for constants. - """ - return str(value) - - _finalize: t.Optional[_FinalizeInfo] = None - - def _make_finalize(self) -> _FinalizeInfo: - """Build the finalize function to be used on constants and at - runtime. Cached so it's only created once for all output nodes. - - Returns a ``namedtuple`` with the following attributes: - - ``const`` - A function to finalize constant data at compile time. - - ``src`` - Source code to output around nodes to be evaluated at - runtime. - """ - if self._finalize is not None: - return self._finalize - - finalize: t.Optional[t.Callable[..., t.Any]] - finalize = default = self._default_finalize - src = None - - if self.environment.finalize: - src = "environment.finalize(" - env_finalize = self.environment.finalize - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(env_finalize) # type: ignore - ) - finalize = None - - if pass_arg is None: - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(value)) - - else: - src = f"{src}{pass_arg}, " - - if pass_arg == "environment": - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(self.environment, value)) - - self._finalize = self._FinalizeInfo(finalize, src) - return self._finalize - - def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: - """Given a group of constant values converted from ``Output`` - child nodes, produce a string to write to the template module - source. - """ - return repr(concat(group)) - - def _output_child_to_const( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> str: - """Try to optimize a child of an ``Output`` node by trying to - convert it to constant, finalized data at compile time. - - If :exc:`Impossible` is raised, the node is not constant and - will be evaluated at runtime. Any other exception will also be - evaluated at runtime for easier debugging. - """ - const = node.as_const(frame.eval_ctx) - - if frame.eval_ctx.autoescape: - const = escape(const) - - # Template data doesn't go through finalize. - if isinstance(node, nodes.TemplateData): - return str(const) - - return finalize.const(const) # type: ignore - - def _output_child_pre( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code before visiting a child of an - ``Output`` node. - """ - if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else str)(") - elif frame.eval_ctx.autoescape: - self.write("escape(") - else: - self.write("str(") - - if finalize.src is not None: - self.write(finalize.src) - - def _output_child_post( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code after visiting a child of an - ``Output`` node. - """ - self.write(")") - - if finalize.src is not None: - self.write(")") - - def visit_Output(self, node: nodes.Output, frame: Frame) -> None: - # If an extends is active, don't render outside a block. - if frame.require_output_check: - # A top-level extends is known to exist at compile time. - if self.has_known_extends: - return - - self.writeline("if parent_template is None:") - self.indent() - - finalize = self._make_finalize() - body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] - - # Evaluate constants at compile time if possible. Each item in - # body will be either a list of static data or a node to be - # evaluated at runtime. - for child in node.nodes: - try: - if not ( - # If the finalize function requires runtime context, - # constants can't be evaluated at compile time. - finalize.const - # Unless it's basic template data that won't be - # finalized anyway. - or isinstance(child, nodes.TemplateData) - ): - raise nodes.Impossible() - - const = self._output_child_to_const(child, frame, finalize) - except (nodes.Impossible, Exception): - # The node was not constant and needs to be evaluated at - # runtime. Or another error was raised, which is easier - # to debug at runtime. - body.append(child) - continue - - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - if frame.buffer is not None: - if len(body) == 1: - self.writeline(f"{frame.buffer}.append(") - else: - self.writeline(f"{frame.buffer}.extend((") - - self.indent() - - for item in body: - if isinstance(item, list): - # A group of constant data to join and output. - val = self._output_const_repr(item) - - if frame.buffer is None: - self.writeline("yield " + val) - else: - self.writeline(val + ",") - else: - if frame.buffer is None: - self.writeline("yield ", item) - else: - self.newline(item) - - # A node to be evaluated at runtime. - self._output_child_pre(item, frame, finalize) - self.visit(item, frame) - self._output_child_post(item, frame, finalize) - - if frame.buffer is not None: - self.write(",") - - if frame.buffer is not None: - self.outdent() - self.writeline(")" if len(body) == 1 else "))") - - if frame.require_output_check: - self.outdent() - - def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: - self.push_assign_tracking() - - # ``a.b`` is allowed for assignment, and is parsed as an NSRef. However, - # it is only valid if it references a Namespace object. Emit a check for - # that for each ref here, before assignment code is emitted. This can't - # be done in visit_NSRef as the ref could be in the middle of a tuple. - seen_refs: t.Set[str] = set() - - for nsref in node.find_all(nodes.NSRef): - if nsref.name in seen_refs: - # Only emit the check for each reference once, in case the same - # ref is used multiple times in a tuple, `ns.a, ns.b = c, d`. - continue - - seen_refs.add(nsref.name) - ref = frame.symbols.ref(nsref.name) - self.writeline(f"if not isinstance({ref}, Namespace):") - self.indent() - self.writeline( - "raise TemplateRuntimeError" - '("cannot assign attribute on non-namespace object")' - ) - self.outdent() - - self.newline(node) - self.visit(node.target, frame) - self.write(" = ") - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write(f"concat({block_frame.buffer})") - self.write(")") - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node: nodes.Name, frame: Frame) -> None: - if node.ctx == "store" and ( - frame.toplevel or frame.loop_frame or frame.block_frame - ): - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == "load": - load = frame.symbols.find_load(ref) - if not ( - load is not None - and load[0] == VAR_LOAD_PARAMETER - and not self.parameter_is_undeclared(ref) - ): - self.write( - f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" - ) - return - - self.write(ref) - - def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: - # NSRef is a dotted assignment target a.b=c, but uses a[b]=c internally. - # visit_Assign emits code to validate that each ref is to a Namespace - # object only. That can't be emitted here as the ref could be in the - # middle of a tuple assignment. - ref = frame.symbols.ref(node.name) - self.writeline(f"{ref}[{node.attr!r}]") - - def visit_Const(self, node: nodes.Const, frame: Frame) -> None: - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write( - f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" - ) - - def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: - self.write("(") - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write(",)" if idx == 0 else ")") - - def visit_List(self, node: nodes.List, frame: Frame) -> None: - self.write("[") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write("]") - - def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: - self.write("{") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item.key, frame) - self.write(": ") - self.visit(item.value, frame) - self.write("}") - - visit_Add = _make_binop("+") - visit_Sub = _make_binop("-") - visit_Mul = _make_binop("*") - visit_Div = _make_binop("/") - visit_FloorDiv = _make_binop("//") - visit_Pow = _make_binop("**") - visit_Mod = _make_binop("%") - visit_And = _make_binop("and") - visit_Or = _make_binop("or") - visit_Pos = _make_unop("+") - visit_Neg = _make_unop("-") - visit_Not = _make_unop("not ") - - @optimizeconst - def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: - if frame.eval_ctx.volatile: - func_name = "(markup_join if context.eval_ctx.volatile else str_join)" - elif frame.eval_ctx.autoescape: - func_name = "markup_join" - else: - func_name = "str_join" - self.write(f"{func_name}((") - for arg in node.nodes: - self.visit(arg, frame) - self.write(", ") - self.write("))") - - @optimizeconst - def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: - self.write("(") - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - self.write(")") - - def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: - self.write(f" {operators[node.op]} ") - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getattr(") - self.visit(node.node, frame) - self.write(f", {node.attr!r})") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write("[") - self.visit(node.arg, frame) - self.write("]") - else: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getitem(") - self.visit(node.node, frame) - self.write(", ") - self.visit(node.arg, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: - if node.start is not None: - self.visit(node.start, frame) - self.write(":") - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(":") - self.visit(node.step, frame) - - @contextmanager - def _filter_test_common( - self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool - ) -> t.Iterator[None]: - if self.environment.is_async: - self.write("(await auto_await(") - - if is_filter: - self.write(f"{self.filters[node.name]}(") - func = self.environment.filters.get(node.name) - else: - self.write(f"{self.tests[node.name]}(") - func = self.environment.tests.get(node.name) - - # When inside an If or CondExpr frame, allow the filter to be - # undefined at compile time and only raise an error if it's - # actually called at runtime. See pull_dependencies. - if func is None and not frame.soft_frame: - type_name = "filter" if is_filter else "test" - self.fail(f"No {type_name} named {node.name!r}.", node.lineno) - - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(func) # type: ignore - ) - - if pass_arg is not None: - self.write(f"{pass_arg}, ") - - # Back to the visitor function to handle visiting the target of - # the filter or test. - yield - - self.signature(node, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: - with self._filter_test_common(node, frame, True): - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - f"(Markup(concat({frame.buffer}))" - f" if context.eval_ctx.autoescape else concat({frame.buffer}))" - ) - elif frame.eval_ctx.autoescape: - self.write(f"Markup(concat({frame.buffer}))") - else: - self.write(f"concat({frame.buffer})") - - @optimizeconst - def visit_Test(self, node: nodes.Test, frame: Frame) -> None: - with self._filter_test_common(node, frame, False): - self.visit(node.node, frame) - - @optimizeconst - def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: - frame = frame.soft() - - def write_expr2() -> None: - if node.expr2 is not None: - self.visit(node.expr2, frame) - return - - self.write( - f'cond_expr_undefined("the inline if-expression on' - f" {self.position(node)} evaluated to false and no else" - f' section was defined.")' - ) - - self.write("(") - self.visit(node.expr1, frame) - self.write(" if ") - self.visit(node.test, frame) - self.write(" else ") - write_expr2() - self.write(")") - - @optimizeconst - def visit_Call( - self, node: nodes.Call, frame: Frame, forward_caller: bool = False - ) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - if self.environment.sandboxed: - self.write("environment.call(context, ") - else: - self.write("context.call(") - self.visit(node.node, frame) - extra_kwargs = {"caller": "caller"} if forward_caller else None - loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} - block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} - if extra_kwargs: - extra_kwargs.update(loop_kwargs, **block_kwargs) - elif loop_kwargs or block_kwargs: - extra_kwargs = dict(loop_kwargs, **block_kwargs) - self.signature(node, frame, extra_kwargs) - self.write(")") - if self.environment.is_async: - self.write("))") - - def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: - self.write(node.key + "=") - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: - self.write("Markup(") - self.visit(node.expr, frame) - self.write(")") - - def visit_MarkSafeIfAutoescape( - self, node: nodes.MarkSafeIfAutoescape, frame: Frame - ) -> None: - self.write("(Markup if context.eval_ctx.autoescape else identity)(") - self.visit(node.expr, frame) - self.write(")") - - def visit_EnvironmentAttribute( - self, node: nodes.EnvironmentAttribute, frame: Frame - ) -> None: - self.write("environment." + node.name) - - def visit_ExtensionAttribute( - self, node: nodes.ExtensionAttribute, frame: Frame - ) -> None: - self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - - def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: - self.write(node.name) - - def visit_ContextReference( - self, node: nodes.ContextReference, frame: Frame - ) -> None: - self.write("context") - - def visit_DerivedContextReference( - self, node: nodes.DerivedContextReference, frame: Frame - ) -> None: - self.write(self.derive_context(frame)) - - def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: - self.writeline("continue", node) - - def visit_Break(self, node: nodes.Break, frame: Frame) -> None: - self.writeline("break", node) - - def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: - ctx = self.temporary_identifier() - self.writeline(f"{ctx} = {self.derive_context(frame)}") - self.writeline(f"{ctx}.vars = ") - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier( - self, node: nodes.EvalContextModifier, frame: Frame - ) -> None: - for keyword in node.options: - self.writeline(f"context.eval_ctx.{keyword.key} = ") - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier( - self, node: nodes.ScopedEvalContextModifier, frame: Frame - ) -> None: - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/venv/Lib/site-packages/jinja2/constants.py b/venv/Lib/site-packages/jinja2/constants.py deleted file mode 100644 index 41a1c23..0000000 --- a/venv/Lib/site-packages/jinja2/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = """\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/venv/Lib/site-packages/jinja2/debug.py b/venv/Lib/site-packages/jinja2/debug.py deleted file mode 100644 index eeeeee7..0000000 --- a/venv/Lib/site-packages/jinja2/debug.py +++ /dev/null @@ -1,191 +0,0 @@ -import sys -import typing as t -from types import CodeType -from types import TracebackType - -from .exceptions import TemplateSyntaxError -from .utils import internal_code -from .utils import missing - -if t.TYPE_CHECKING: - from .runtime import Context - - -def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: - """Rewrite the current exception to replace any tracebacks from - within compiled template code with tracebacks that look like they - came from the template source. - - This must be called within an ``except`` block. - - :param source: For ``TemplateSyntaxError``, the original source if - known. - :return: The original exception with the rewritten traceback. - """ - _, exc_value, tb = sys.exc_info() - exc_value = t.cast(BaseException, exc_value) - tb = t.cast(TracebackType, tb) - - if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: - exc_value.translated = True - exc_value.source = source - # Remove the old traceback, otherwise the frames from the - # compiler still show up. - exc_value.with_traceback(None) - # Outside of runtime, so the frame isn't executing template - # code, but it still needs to point at the template. - tb = fake_traceback( - exc_value, None, exc_value.filename or "", exc_value.lineno - ) - else: - # Skip the frame for the render function. - tb = tb.tb_next - - stack = [] - - # Build the stack of traceback object, replacing any in template - # code with the source file and line information. - while tb is not None: - # Skip frames decorated with @internalcode. These are internal - # calls that aren't useful in template debugging output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - template = tb.tb_frame.f_globals.get("__jinja_template__") - - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) - stack.append(fake_tb) - else: - stack.append(tb) - - tb = tb.tb_next - - tb_next = None - - # Assign tb_next in reverse to avoid circular references. - for tb in reversed(stack): - tb.tb_next = tb_next - tb_next = tb - - return exc_value.with_traceback(tb_next) - - -def fake_traceback( # type: ignore - exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int -) -> TracebackType: - """Produce a new traceback object that looks like it came from the - template source instead of the compiled code. The filename, line - number, and location name will point to the template, and the local - variables will be the current template context. - - :param exc_value: The original exception to be re-raised to create - the new traceback. - :param tb: The original traceback to get the local variables and - code info from. - :param filename: The template filename. - :param lineno: The line number in the template source. - """ - if tb is not None: - # Replace the real locals with the context that would be - # available at that point in the template. - locals = get_template_locals(tb.tb_frame.f_locals) - locals.pop("__jinja_exception__", None) - else: - locals = {} - - globals = { - "__name__": filename, - "__file__": filename, - "__jinja_exception__": exc_value, - } - # Raise an exception at the correct line number. - code: CodeType = compile( - "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" - ) - - # Build a new code object that points to the template file and - # replaces the location with a block name. - location = "template" - - if tb is not None: - function = tb.tb_frame.f_code.co_name - - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = f"block {function[6:]!r}" - - if sys.version_info >= (3, 8): - code = code.replace(co_name=location) - else: - code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - code.co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - location, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars, - ) - - # Execute the new code, which is guaranteed to raise, and return - # the new traceback without this frame. - try: - exec(code, globals, locals) - except BaseException: - return sys.exc_info()[2].tb_next # type: ignore - - -def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: - """Based on the runtime locals, get the context that would be - available at that point in the template. - """ - # Start with the current template context. - ctx: t.Optional[Context] = real_locals.get("context") - - if ctx is not None: - data: t.Dict[str, t.Any] = ctx.get_all().copy() - else: - data = {} - - # Might be in a derived context that only sets local variables - # rather than pushing a context. Local variables follow the scheme - # l_depth_name. Find the highest-depth local that has a value for - # each name. - local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} - - for name, value in real_locals.items(): - if not name.startswith("l_") or value is missing: - # Not a template variable, or no longer relevant. - continue - - try: - _, depth_str, name = name.split("_", 2) - depth = int(depth_str) - except ValueError: - continue - - cur_depth = local_overrides.get(name, (-1,))[0] - - if cur_depth < depth: - local_overrides[name] = (depth, value) - - # Modify the context with any derived context. - for name, (_, value) in local_overrides.items(): - if value is missing: - data.pop(name, None) - else: - data[name] = value - - return data diff --git a/venv/Lib/site-packages/jinja2/defaults.py b/venv/Lib/site-packages/jinja2/defaults.py deleted file mode 100644 index 638cad3..0000000 --- a/venv/Lib/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,48 +0,0 @@ -import typing as t - -from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -if t.TYPE_CHECKING: - import typing_extensions as te - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX: t.Optional[str] = None -LINE_COMMENT_PREFIX: t.Optional[str] = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { - "range": range, - "dict": dict, - "lipsum": generate_lorem_ipsum, - "cycler": Cycler, - "joiner": Joiner, - "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES: t.Dict[str, t.Any] = { - "compiler.ascii_str": True, - "urlize.rel": "noopener", - "urlize.target": None, - "urlize.extra_schemes": None, - "truncate.leeway": 5, - "json.dumps_function": None, - "json.dumps_kwargs": {"sort_keys": True}, - "ext.i18n.trimmed": False, -} diff --git a/venv/Lib/site-packages/jinja2/environment.py b/venv/Lib/site-packages/jinja2/environment.py deleted file mode 100644 index 0fc6e5b..0000000 --- a/venv/Lib/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1672 +0,0 @@ -"""Classes for managing templates and their runtime and compile time -options. -""" - -import os -import typing -import typing as t -import weakref -from collections import ChainMap -from functools import lru_cache -from functools import partial -from functools import reduce -from types import CodeType - -from markupsafe import Markup - -from . import nodes -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS # type: ignore[attr-defined] -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS # type: ignore[attr-defined] -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import Lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import _PassArg -from .utils import concat -from .utils import consume -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .bccache import BytecodeCache - from .ext import Extension - from .loaders import BaseLoader - -_env_bound = t.TypeVar("_env_bound", bound="Environment") - - -# for direct template usage we have up to ten living environments -@lru_cache(maxsize=10) -def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: - """Return a new spontaneous environment. A spontaneous environment - is used for templates created directly rather than through an - existing environment. - - :param cls: Environment class to create. - :param args: Positional arguments passed to environment. - """ - env = cls(*args) - env.shared = True - return env - - -def create_cache( - size: int, -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Return the cache class for the given size.""" - if size == 0: - return None - - if size < 0: - return {} - - return LRUCache(size) # type: ignore - - -def copy_cache( - cache: t.Optional[t.MutableMapping[t.Any, t.Any]], -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Create an empty copy of the given cache.""" - if cache is None: - return None - - if type(cache) is dict: # noqa E721 - return {} - - return LRUCache(cache.capacity) # type: ignore - - -def load_extensions( - environment: "Environment", - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], -) -> t.Dict[str, "Extension"]: - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated extensions. - """ - result = {} - - for extension in extensions: - if isinstance(extension, str): - extension = t.cast(t.Type["Extension"], import_string(extension)) - - result[extension.identifier] = extension(environment) - - return result - - -def _environment_config_check(environment: _env_bound) -> _env_bound: - """Perform a sanity check on the environment.""" - assert issubclass( - environment.undefined, Undefined - ), "'undefined' must be a subclass of 'jinja2.Undefined'." - assert ( - environment.block_start_string - != environment.variable_start_string - != environment.comment_start_string - ), "block, variable and comment start strings must be different." - assert environment.newline_sequence in { - "\r", - "\r\n", - "\n", - }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." - return environment - - -class Environment: - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which - allows using async functions and generators. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to: t.Optional["Environment"] = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - - concat = "".join - - #: the context class that is used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class: t.Type[Context] = Context - - template_class: t.Type["Template"] - - def __init__( - self, - block_start_string: str = BLOCK_START_STRING, - block_end_string: str = BLOCK_END_STRING, - variable_start_string: str = VARIABLE_START_STRING, - variable_end_string: str = VARIABLE_END_STRING, - comment_start_string: str = COMMENT_START_STRING, - comment_end_string: str = COMMENT_END_STRING, - line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, - line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, - trim_blocks: bool = TRIM_BLOCKS, - lstrip_blocks: bool = LSTRIP_BLOCKS, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, - keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), - optimized: bool = True, - undefined: t.Type[Undefined] = Undefined, - finalize: t.Optional[t.Callable[..., t.Any]] = None, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, - loader: t.Optional["BaseLoader"] = None, - cache_size: int = 400, - auto_reload: bool = True, - bytecode_cache: t.Optional["BytecodeCache"] = None, - enable_async: bool = False, - ): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined: t.Type[Undefined] = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.is_async = enable_async - _environment_config_check(self) - - def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes: t.Any) -> None: - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in attributes.items(): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay( - self, - block_start_string: str = missing, - block_end_string: str = missing, - variable_start_string: str = missing, - variable_end_string: str = missing, - comment_start_string: str = missing, - comment_end_string: str = missing, - line_statement_prefix: t.Optional[str] = missing, - line_comment_prefix: t.Optional[str] = missing, - trim_blocks: bool = missing, - lstrip_blocks: bool = missing, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, - keep_trailing_newline: bool = missing, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, - optimized: bool = missing, - undefined: t.Type[Undefined] = missing, - finalize: t.Optional[t.Callable[..., t.Any]] = missing, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, - loader: t.Optional["BaseLoader"] = missing, - cache_size: int = missing, - auto_reload: bool = missing, - bytecode_cache: t.Optional["BytecodeCache"] = missing, - enable_async: bool = missing, - ) -> "te.Self": - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - - .. versionchanged:: 3.1.5 - ``enable_async`` is applied correctly. - - .. versionchanged:: 3.1.2 - Added the ``newline_sequence``, ``keep_trailing_newline``, - and ``enable_async`` parameters to match ``__init__``. - """ - args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"], args["enable_async"] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in args.items(): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in self.extensions.items(): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - if enable_async is not missing: - rv.is_async = enable_async - - return _environment_config_check(rv) - - @property - def lexer(self) -> Lexer: - """The lexer for this environment.""" - return get_lexer(self) - - def iter_extensions(self) -> t.Iterator["Extension"]: - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - - def getitem( - self, obj: t.Any, argument: t.Union[str, t.Any] - ) -> t.Union[t.Any, Undefined]: - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, str): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj: t.Any, attribute: str) -> t.Any: - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a string. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def _filter_test_common( - self, - name: t.Union[str, Undefined], - value: t.Any, - args: t.Optional[t.Sequence[t.Any]], - kwargs: t.Optional[t.Mapping[str, t.Any]], - context: t.Optional[Context], - eval_ctx: t.Optional[EvalContext], - is_filter: bool, - ) -> t.Any: - if is_filter: - env_map = self.filters - type_name = "filter" - else: - env_map = self.tests - type_name = "test" - - func = env_map.get(name) # type: ignore - - if func is None: - msg = f"No {type_name} named {name!r}." - - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = f"{msg} ({e}; did you forget to quote the callable name?)" - - raise TemplateRuntimeError(msg) - - args = [value, *(args if args is not None else ())] - kwargs = kwargs if kwargs is not None else {} - pass_arg = _PassArg.from_obj(func) - - if pass_arg is _PassArg.context: - if context is None: - raise TemplateRuntimeError( - f"Attempted to invoke a context {type_name} without context." - ) - - args.insert(0, context) - elif pass_arg is _PassArg.eval_context: - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - - args.insert(0, eval_ctx) - elif pass_arg is _PassArg.environment: - args.insert(0, self) - - return func(*args, **kwargs) - - def call_filter( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a filter on a value the same way the compiler does. - - This might return a coroutine if the filter is running from an - environment in async mode and the filter supports async - execution. It's your responsibility to await this if needed. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, True - ) - - def call_test( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a test on a value the same way the compiler does. - - This might return a coroutine if the test is running from an - environment in async mode and the test supports async execution. - It's your responsibility to await this if needed. - - .. versionchanged:: 3.0 - Tests support ``@pass_context``, etc. decorators. Added - the ``context`` and ``eval_ctx`` parameters. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, False - ) - - @internalcode - def parse( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> nodes.Template: - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def _parse( - self, source: str, name: t.Optional[str], filename: t.Optional[str] - ) -> nodes.Template: - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, filename).parse() - - def lex( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> t.Iterator[t.Tuple[int, str, str]]: - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = str(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def preprocess( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> str: - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce( - lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), - str(source), - ) - - def _tokenize( - self, - source: str, - name: t.Optional[str], - filename: t.Optional[str] = None, - state: t.Optional[str] = None, - ) -> TokenStream: - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) # type: ignore - - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) - - return stream - - def _generate( - self, - source: nodes.Template, - name: t.Optional[str], - filename: t.Optional[str], - defer_init: bool = False, - ) -> str: - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate( # type: ignore - source, - self, - name, - filename, - defer_init=defer_init, - optimized=self.optimized, - ) - - def _compile(self, source: str, filename: str) -> CodeType: - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, "exec") - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[False]" = False, - defer_init: bool = False, - ) -> CodeType: ... - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[True]" = ..., - defer_init: bool = False, - ) -> str: ... - - @internalcode - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: bool = False, - defer_init: bool = False, - ) -> t.Union[str, CodeType]: - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, str): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, defer_init=defer_init) - if raw: - return source - if filename is None: - filename = "